diff --git a/.github/workflows/_test_template.yml b/.github/workflows/_test_template.yml
index e2401cab7f64..3a1f69243c39 100644
--- a/.github/workflows/_test_template.yml
+++ b/.github/workflows/_test_template.yml
@@ -51,7 +51,7 @@ jobs:
- name: Docker pull image
run: |
- docker pull nemoci.azurecr.io/nemo_container_${{ github.run_id }}
+ docker pull nemoci.azurecr.io/nemo_container:${{ github.run_id }}
- name: Start container
run: |
@@ -60,7 +60,7 @@ jobs:
ARG=("--runtime=nvidia --gpus all")
fi
- docker run --rm -d --name nemo_container_${{ github.run_id }} ${ARG[@]} --shm-size=64g --env TRANSFORMERS_OFFLINE=0 --env HYDRA_FULL_ERROR=1 --volume /mnt/datadrive/TestData:/home/TestData nemoci.azurecr.io/nemo_container_${{ github.run_id }} bash -c "sleep $(( ${{ inputs.TIMEOUT }} * 60 + 60 ))"
+ docker run --rm -d --name nemo_container_${{ github.run_id }} ${ARG[@]} --shm-size=64g --env TRANSFORMERS_OFFLINE=0 --env HYDRA_FULL_ERROR=1 --volume /mnt/datadrive/TestData:/home/TestData nemoci.azurecr.io/nemo_container:${{ github.run_id }} bash -c "sleep $(( ${{ inputs.TIMEOUT }} * 60 + 60 ))"
- id: main
name: Run main script
diff --git a/.github/workflows/build-test-publish-wheel.yml b/.github/workflows/build-test-publish-wheel.yml
new file mode 100644
index 000000000000..da940a94b638
--- /dev/null
+++ b/.github/workflows/build-test-publish-wheel.yml
@@ -0,0 +1,45 @@
+# Copyright (c) 2020-2021, NVIDIA CORPORATION.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: Build, test, and publish a PyPi wheel (to testpypi)
+
+on:
+ push:
+ branches:
+ - main
+ - 'r**'
+
+defaults:
+ run:
+ shell: bash -x -e -u -o pipefail {0}
+
+jobs:
+ build-test-publish-wheel:
+ uses: NVIDIA/NeMo-FW-CI-templates/.github/workflows/_build_test_publish_wheel.yml@v0.7.0
+ with:
+ image-name: nemo_container
+ dockerfile: Dockerfile.ci
+ image-label: nemo-core
+ build-args: |
+ IMAGE_LABEL=nemo-core
+ prune-filter-timerange: 24h
+ dry-run: true
+ python-package: nemo
+ container-workdir: /workspace
+ environment: public
+ secrets:
+ TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
+ TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
+ SLACK_WEBHOOK_ADMIN: ${{ secrets.SLACK_WEBHOOK_ADMIN }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
diff --git a/.github/workflows/cherry-pick-release-commit.yml b/.github/workflows/cherry-pick-release-commit.yml
index 67bc69b1f8a5..a1d300ca4f7a 100644
--- a/.github/workflows/cherry-pick-release-commit.yml
+++ b/.github/workflows/cherry-pick-release-commit.yml
@@ -6,133 +6,9 @@ on:
- main
jobs:
- main:
- runs-on: ubuntu-latest
- environment:
- name: main
- steps:
- - name: Checkout
- uses: actions/checkout@v3
- with:
- fetch-depth: 0
- token: ${{ secrets.PAT }}
-
-
- - name: Cherry pick
- env:
- GH_TOKEN: ${{ secrets.PAT }}
- run: |
- set -x
- set +e
-
- git config --global user.email "nemo-bot@nvidia.com"
- git config --global user.name "NeMo Bot"
-
- SHA=$(git rev-list --no-merges -n 1 HEAD)
- MESSAGE=$(git log -n 1 --pretty=format:%s $SHA)
- PR_ID=$(echo $MESSAGE | awk -F'#' '{print $2}' | awk -F')' '{print $1}' )
- USERNAME=$(git log -n 1 --pretty=format:%ae $SHA | awk -F'@' '{print $1}')
-
- PR=$(curl -L \
- -H "Accept: application/vnd.github+json" \
- -H "Authorization: Bearer $GH_TOKEN" \
- -H "X-GitHub-Api-Version: 2022-11-28" \
- https://api.github.com/repos/NVIDIA/NeMo/pulls/$PR_ID)
- PR_TITLE=$(echo -E $PR | jq '.title' | tr -d '"')
-
- LABELS=$(echo -E $PR | jq '.labels | [.[].name] | join(",")' | tr -d '"')
- AUTHOR=$(echo -E $PR | jq '.user.login' | tr -d '"')
-
- TARGET_BRANCHES=$(echo "$LABELS" | grep -o 'r[^,]*')
-
- if [[ $TARGET_BRANCHES == '' ]]; then
- echo Nothing to cherry-pick
- exit 0
- fi
-
- echo $TARGET_BRANCHES | while read -r RELEASE_BRANCH ; do
- TARGET_BRANCH_EXISTS_OK=$([[ "$(git ls-remote --heads origin refs/heads/$RELEASE_BRANCH)" != "" ]] && echo true || echo false)
-
- if [[ "$TARGET_BRANCH_EXISTS_OK" == "false" ]]; then
- echo Release branch does not yet exist, will not cherry-pick
- continue
- fi
-
- (
- git fetch origin $RELEASE_BRANCH:$RELEASE_BRANCH
- git switch --force-create cherry-pick-$PR_ID-$RELEASE_BRANCH $RELEASE_BRANCH
- git cherry-pick $SHA
- git push -u origin --force cherry-pick-$PR_ID-$RELEASE_BRANCH
- git checkout ${CI_DEFAULT_BRANCH:-main}
- )
-
- CHERRYPICK_SUCCESSFUL=$?
-
- if [[ $CHERRYPICK_SUCCESSFUL -eq 0 ]]; then
- PR_URL="https://github.com/NVIDIA/NeMo/pull/$PR_ID"
-
- PAYLOAD=$(jq \
- -n \
- -c \
- --arg TITLE "Cherry pick \`$PR_TITLE ($PR_ID)\` into \`$RELEASE_BRANCH\`" \
- --arg HEAD "cherry-pick-$PR_ID-$RELEASE_BRANCH" \
- --arg RELEASE_BRANCH "$RELEASE_BRANCH" \
- --arg BODY "[π€]: Hi @$AUTHOR π,
we've cherry picked #$PR_ID into \`$RELEASE_BRANCH\` for you! π
Please review and approve this cherry pick by your convenience\!" \
- '{
- "title": $TITLE,
- "head": $HEAD,
- "base": $RELEASE_BRANCH,
- "body": $BODY
- }'
- )
-
- NEW_PR=$(curl -L \
- -X POST \
- -H "Accept: application/vnd.github+json" \
- -H "Authorization: Bearer $GH_TOKEN" \
- -H "X-GitHub-Api-Version: 2022-11-28" \
- https://api.github.com/repos/NVIDIA/NeMo/pulls \
- -d "$PAYLOAD")
-
- NEW_PR_ID=$(echo -E $NEW_PR | jq '.number')
- curl -L \
- -X POST \
- -H "Accept: application/vnd.github+json" \
- -H "Authorization: Bearer $GH_TOKEN" \
- -H "X-GitHub-Api-Version: 2022-11-28" \
- https://api.github.com/repos/NVIDIA/NeMo/pulls/$NEW_PR_ID/requested_reviewers \
- -d '{"reviewers":["'$AUTHOR'"]}'
-
- curl -L \
- -X POST \
- -H "Accept: application/vnd.github+json" \
- -H "Authorization: Bearer $GH_TOKEN" \
- -H "X-GitHub-Api-Version: 2022-11-28" \
- https://api.github.com/repos/NVIDIA/NeMo/issues/$NEW_PR_ID/labels \
- -d '{"labels":["Run CICD", "cherry-pick"]}'
-
- else
- URL="https://github.com/NVIDIA/NeMo/pull/$PR_ID"
-
- MESSAGE='{
- "blocks": [
- {
- "type": "section",
- "text": {
- "type": "mrkdwn",
- "text": ":alert: Cherrypick bot π€: Hey <@'$USERNAME'>: Cherry-pick of <'$URL'|#'$PR_ID'> failed (3-way merge impossible). Please resolve manually and create a PR.\n\ncc: "
- }
- }
- ]
- }'
-
- curl -X POST -H "Content-type: application/json" --data "$MESSAGE" ${{ secrets.SLACK_WEBHOOK }}
-
- fi
-
- done
-
-
-
-env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
\ No newline at end of file
+ cherry-pick:
+ uses: NVIDIA/NeMo-FW-CI-templates/.github/workflows/_cherry_pick.yml@v0.6.0
+ secrets:
+ PAT: ${{ secrets.PAT }}
+ SLACK_WEBHOOK_ADMIN: ${{ secrets.SLACK_WEBHOOK_ADMIN }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
diff --git a/.github/workflows/cicd-main.yml b/.github/workflows/cicd-main.yml
index 22bbb3c1a447..a4b2baa59550 100644
--- a/.github/workflows/cicd-main.yml
+++ b/.github/workflows/cicd-main.yml
@@ -38,6 +38,7 @@ jobs:
outputs:
test_to_run: ${{ steps.test_to_run.outputs.main }}
all: ${{ steps.all.outputs.main }}
+ event_name: ${{ steps.github-event.outputs.main }}
steps:
- name: Parse test_to_run
id: test_to_run
@@ -47,70 +48,35 @@ jobs:
- name: Parse all
id: all
run: |
- echo "main=${{ contains(fromJSON(steps.test_to_run.outputs.main), 'all') }}" | tee -a "$GITHUB_OUTPUT"
-
- gpu-test:
- needs: [pre-flight]
- runs-on: self-hosted-azure
- if: ${{ github.event.label.name == 'Run CICD' || github.event_name == 'workflow_dispatch' }}
- steps:
- - name: Run nvidia-smi test
- run: |
- whoami
- nvidia-smi
+ echo "main=${{ contains(fromJSON(steps.test_to_run.outputs.main), 'all') }}" | tee -a "$GITHUB_OUTPUT"
+ - name: Infer github event
+ id: github-event
+ run: |
+ echo "main=${{ github.event_name }}" | tee -a "$GITHUB_OUTPUT"
-
- cicd-cluster-clean:
- runs-on: self-hosted-azure-builder
- needs: [pre-flight]
- if: ${{ github.event.label.name == 'Run CICD' || github.event_name == 'workflow_dispatch' }}
- steps:
- - name: Clean server from old files
- run: |
- docker system prune --filter "until=24h" --filter "label=nemo.library=nemo-core" --force
-
+ cicd-test-container-build:
+ if: ${{ github.event.label.name == 'Run CICD' || needs.pre-flight.outputs.event_name == 'workflow_dispatch' }}
+ uses: NVIDIA/NeMo-FW-CI-templates/.github/workflows/_build_container.yml@v0.14.0
+ needs: pre-flight
+ with:
+ image-name: nemo_container
+ dockerfile: Dockerfile.ci
+ image-label: nemo-core
+ build-args: |
+ IMAGE_LABEL=nemo-core
+ prune-filter-timerange: 24h
+
cicd-test-container-setup:
- needs: [cicd-cluster-clean, pre-flight]
+ needs: [cicd-test-container-build, pre-flight]
runs-on: self-hosted-azure-builder
if: ${{ github.event.label.name == 'Run CICD' || github.event_name == 'workflow_dispatch' }}
outputs:
test_to_run: ${{ needs.pre-flight.outputs.test_to_run }}
all: ${{ needs.pre-flight.outputs.all }}
steps:
- - name: Checkout repository
- uses: actions/checkout@v4
- with:
- path: ${{ github.run_id }}
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
- with:
- # We use `docker` driver as this speeds things up for
- # trivial (non-multi-stage) builds.
- driver: docker
-
- - name: Restore cache
- run: |
- docker pull nemoci.azurecr.io/nemo_container:latest
- docker pull nemoci.azurecr.io/nemo_container_${{ github.event.number || 'noop' }} || true
-
- - name: Build and push
- uses: docker/build-push-action@v5
- with:
- file: Dockerfile.ci
- push: true
- cache-from: |
- nemoci.azurecr.io/nemo_container:latest
- nemoci.azurecr.io/nemo_container_${{ github.event.number || 'noop' }}
- cache-to: type=inline
- tags: |
- nemoci.azurecr.io/nemo_container_${{ github.run_id }}
- nemoci.azurecr.io/nemo_container_${{ github.event.number || 'noop' }}
- nemoci.azurecr.io/nemo_container:latest
-
- name: Run some checks
run: |
- docker run --rm --device=/dev/nvidia0 --gpus all --shm-size=8g --env TRANSFORMERS_OFFLINE=0 --env HYDRA_FULL_ERROR=1 --env PYTHONUNBUFFERED=1 nemoci.azurecr.io/nemo_container_${{ github.run_id }} bash -c '\
+ docker run --rm --device=/dev/nvidia0 --gpus all --shm-size=8g --env TRANSFORMERS_OFFLINE=0 --env HYDRA_FULL_ERROR=1 --env PYTHONUNBUFFERED=1 nemoci.azurecr.io/nemo_container:${{ github.run_id }} bash -c '\
# PyTorch Lightning version
python -c "import pytorch_lightning; print(pytorch_lightning.__version__)"
@@ -475,7 +441,7 @@ jobs:
# needs: [cicd-test-container-setup]
# runs-on: self-hosted-azure
# container:
- # image: nemoci.azurecr.io/nemo_container_${{ github.run_id }}
+ # image: nemoci.azurecr.io/nemo_container:${{ github.run_id }}
# options:
# # --user 0:128
# --device=/dev/nvidia0
@@ -495,18 +461,6 @@ jobs:
# - uses: "NVIDIA/NeMo/.github/actions/cancel-workflow@main"
# if: "failure()"
- L2_PTQ_Llama2_Export_Only:
- needs: [cicd-test-container-setup]
- uses: ./.github/workflows/_test_template.yml
- if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'L2_PTQ_Llama2_Export_Only') || needs.cicd-test-container-setup.outputs.all == 'true'
- with:
- RUNNER: self-hosted-azure
- SCRIPT: |
- python examples/nlp/language_modeling/megatron_gpt_ptq.py \
- model.restore_from_path=/home/TestData/nlp/megatron_llama/llama_ci.nemo \
- quantization.algorithm=null \
- export.save_path=/tmp/nlp_megatron_llama_export_only/ci_baseline
-
L2_PTQ_Llama2_FP8:
needs: [cicd-test-container-setup]
uses: ./.github/workflows/_test_template.yml
@@ -514,8 +468,13 @@ jobs:
with:
RUNNER: self-hosted-azure
SCRIPT: |
+ CUDA_VISIBLE_DEVICES=0 python scripts/checkpoint_converters/convert_llama_hf_to_nemo.py \
+ --input_name_or_path=/home/TestData/nlp/megatron_llama/llama-ci-hf-tiny \
+ --output_path=/tmp/nlp_megatron_llama/llama_ci.nemo \
+ --precision=16
+
python examples/nlp/language_modeling/megatron_gpt_ptq.py \
- model.restore_from_path=/home/TestData/nlp/megatron_llama/llama_ci.nemo \
+ model.restore_from_path=/tmp/nlp_megatron_llama/llama_ci.nemo \
model.tensor_model_parallel_size=2 \
trainer.devices=2 \
quantization.calib_dataset=/home/TestData/nlp/test_quantization/test.json \
@@ -526,62 +485,15 @@ jobs:
export.sample_output=False \
export.save_path=/tmp/nlp_megatron_llama_eo/ci_fp8.qnemo
- L2_PTQ_Llama2_INT8_SQ:
- needs: [cicd-test-container-setup]
- uses: ./.github/workflows/_test_template.yml
- if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'L2_PTQ_Llama2_INT8_SQ') || needs.cicd-test-container-setup.outputs.all == 'true'
- with:
- RUNNER: self-hosted-azure
- TIMEOUT: 15
- SCRIPT: |
- python examples/nlp/language_modeling/megatron_gpt_ptq.py \
- model.restore_from_path=/home/TestData/nlp/megatron_llama/llama_ci.nemo \
- quantization.calib_dataset=/home/TestData/nlp/test_quantization/test.json \
- quantization.algorithm=int8_sq \
- quantization.num_calib_size=8 \
- inference.batch_size=2 \
- export.sample_output=False \
- export.save_path=/tmp/nlp_megatron_llama_eo/ci_int8_sq.qnemo
-
- # TODO: investigate int4_awq stuck issues and restore the test
- #L2_PTQ_Llama2_INT4_AWQ:
- # needs: [cicd-test-container-setup]
- # runs-on: self-hosted-azure
- # timeout-minutes: 10
- # container:
- # image: nemoci.azurecr.io/nemo_container_${{ github.run_id }}
- # options:
- # # --user 0:128
- # --device=/dev/nvidia0
- # --gpus all
- # --shm-size=8g
- # --env TRANSFORMERS_OFFLINE=0
- # --env HYDRA_FULL_ERROR=1
- # --volume /mnt/datadrive/TestData:/home/TestData
- # steps:
- # - name: Checkout repository
- # uses: actions/checkout@v4
- # - run: |
- # python examples/nlp/language_modeling/megatron_gpt_ptq.py \
- # model.restore_from_path=/home/TestData/nlp/megatron_llama/llama_ci.nemo \
- # model.tensor_model_parallel_size=1 \
- # trainer.devices=1 \
- # quantization.calib_dataset=/home/TestData/nlp/test_quantization/test.json \
- # quantization.algorithm=int4_awq \
- # quantization.num_calib_size=8 \
- # inference.batch_size=2 \
- # export.save_path=/home/TestData/nlp/megatron_llama/ci_int4_awq.qnemo
- #
- # rm -rf /home/TestData/nlp/megatron_llama/ci_int4_awq.qnemo
- #- uses: "NVIDIA/NeMo/.github/actions/cancel-workflow@main"
- # if: "failure()"
+ AFTER_SCRIPT: |
+ rm -rf /tmp/nlp_megatron_llama_eo/ci_fp8.qnemo
# OPTIONAL_L2_QAT_Llama2_INT4:
# needs: [cicd-test-container-setup]
# runs-on: self-hosted-azure
# timeout-minutes: 10
# container:
- # image: nemoci.azurecr.io/nemo_container_${{ github.run_id }}
+ # image: nemoci.azurecr.io/nemo_container:${{ github.run_id }}
# options:
# # --user 0:128
# --device=/dev/nvidia0
@@ -2197,6 +2109,121 @@ jobs:
# }
# }
+ L2_Megatron_LM_To_NeMo_Conversion:
+ needs: [cicd-test-container-setup]
+ uses: ./.github/workflows/_test_template.yml
+ if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'L2_Megatron_LM_To_NeMo_Conversion') || needs.cicd-test-container-setup.outputs.all == 'true'
+ with:
+ RUNNER: self-hosted-azure
+ SCRIPT: |
+ CUDA_DEVICE_MAX_CONNECTIONS=1 torchrun --nproc_per_node=1 Megatron-LM/pretrain_gpt.py \
+ --mock-data \
+ --distributed-timeout-minutes 60 \
+ --use-mcore-models \
+ --no-mmap-bin-files \
+ --untie-embeddings-and-output-weights \
+ --disable-bias-linear \
+ --train-samples 80 \
+ --init-method-std 0.014 \
+ --position-embedding-type rope \
+ --rotary-base 1000000 \
+ --rotary-percent 1.0 \
+ --squared-relu \
+ --num-layers 4 \
+ --hidden-size 384 \
+ --num-attention-heads 8 \
+ --group-query-attention \
+ --num-query-groups 8 \
+ --ffn-hidden-size 1536 \
+ --kv-channels 128 \
+ --normalization RMSNorm \
+ --attention-dropout 0.0 \
+ --hidden-dropout 0.0 \
+ --exit-duration-in-mins 5750 \
+ --tensor-model-parallel-size 1 \
+ --pipeline-model-parallel-size 1 \
+ --seq-length 8192 \
+ --max-position-embeddings 8192 \
+ --micro-batch-size 1 \
+ --global-batch-size 8 \
+ --lr 6e-4 \
+ --min-lr 6e-6 \
+ --weight-decay 0.1 \
+ --clip-grad 1.0 \
+ --lr-decay-style cosine \
+ --log-interval 1 \
+ --eval-iters 1 \
+ --eval-interval 10 \
+ --tokenizer-type GPT2BPETokenizer \
+ --tokenizer-model /home/TestData/nlp/gpt2_tokenizer \
+ --vocab-file /home/TestData/nlp/gpt2_tokenizer/vocab.json \
+ --merge-file /home/TestData/nlp/gpt2_tokenizer/merges.txt \
+ --save /tmp/mlm_conversion_ckpt \
+ --save-interval 10 \
+ --ckpt-format torch_dist \
+ --ckpt-fully-parallel-save \
+ --ckpt-fully-parallel-load \
+ --async-save \
+ --ckpt-assume-constant-structure \
+ --timing-log-option minmax \
+ --log-params-norm \
+ --log-num-zeros-in-grad \
+ --log-throughput \
+ --bf16 \
+ --adam-beta1 0.9 \
+ --adam-beta2 0.95 \
+ --use-distributed-optimizer \
+ --overlap-grad-reduce \
+ --overlap-param-gather \
+ --manual-gc \
+ --num-workers 2
+
+ python examples/nlp/language_modeling/megatron_gpt_pretraining.py \
+ model.data.data_impl=mock \
+ model.data.data_prefix=[] \
+ model.skip_train=True \
+ model.transformer_engine=True \
+ model.use_flash_attention=False \
+ model.normalization=rmsnorm \
+ model.num_layers=4 \
+ model.hidden_size=384 \
+ model.ffn_hidden_size=1536 \
+ model.num_attention_heads=8 \
+ model.num_query_groups=8 \
+ model.bias=False \
+ model.bias_activation_fusion=False \
+ model.bias_dropout_add_fusion=True \
+ model.masked_softmax_fusion=True \
+ model.encoder_seq_length=8192 \
+ model.max_position_embeddings=8192 \
+ model.data.seq_length=8192 \
+ model.activation=squared-relu \
+ model.transformer_block_type=True \
+ model.micro_batch_size=1 \
+ model.global_batch_size=8 \
+ ++model.rotary_base=1000000 \
+ model.rotary_percentage=1.0 \
+ model.apply_query_key_layer_scaling=False \
+ ++model.group_query_attention=True \
+ model.apply_rope_fusion=True \
+ model.kv_channels=128 \
+ ++model.bert_binary_head=True \
+ ++model.position_embedding_type=rope \
+ ++model.add_position_embedding=True \
+ trainer.limit_val_batches=1 \
+ exp_manager.exp_dir=/tmp/nemo_conversion_ckpt
+
+ python -m torch.distributed.launch --nproc_per_node=1 examples/nlp/language_modeling/megatron_ckpt_to_nemo.py \
+ --checkpoint_folder /tmp/mlm_conversion_ckpt \
+ --checkpoint_name iter_0000010 \
+ --nemo_file_path /tmp/mlm_to_nemo_test.nemo \
+ --tensor_model_parallel_size 1 \
+ --pipeline_model_parallel_size 1 \
+ --gpus_per_node 1 \
+ --model_type gpt \
+ --hparams_file /tmp/nemo_conversion_ckpt/megatron_gpt/version_0/hparams.yaml \
+ --convert_mlm
+
L2_Megatron_GPT_with_ResetLR_Pretraining_and_Resume_Training_TP2:
needs: [cicd-test-container-setup]
uses: ./.github/workflows/_test_template.yml
@@ -2580,10 +2607,10 @@ jobs:
rm -rf examples/nlp/language_modeling/gpt_index_mappings
IS_OPTIONAL: true
- OPTIONAL_L2_Megatron_GPT_Auto_Configurator_TP1_PP1_MBS124:
+ L2_Megatron_GPT_Auto_Configurator_TP1_PP1_MBS124:
needs: [cicd-test-container-setup]
uses: ./.github/workflows/_test_template.yml
- if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'OPTIONAL_L2_Megatron_GPT_Auto_Configurator_TP1_PP1_MBS124') || needs.cicd-test-container-setup.outputs.all == 'true'
+ if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'L2_Megatron_GPT_Auto_Configurator_TP1_PP1_MBS124') || needs.cicd-test-container-setup.outputs.all == 'true'
with:
RUNNER: self-hosted-azure-gpus-1
SCRIPT: |
@@ -2606,7 +2633,6 @@ jobs:
--get_results
AFTER_SCRIPT: |
rm -rf examples/llm/auto_configurator/auto_conf_logs
- IS_OPTIONAL: true
L2_Megatron_GPT_Finetuning_PP2:
needs: [cicd-test-container-setup]
@@ -3038,7 +3064,7 @@ jobs:
with:
RUNNER: self-hosted-azure
SCRIPT: |
- NVTE_FUSED_ATTN=0 NVTE_FLASH_ATTN=0 python examples/nlp/language_modeling/megatron_t5_pretraining.py \
+ python examples/nlp/language_modeling/megatron_t5_pretraining.py \
trainer.devices=2 \
trainer.log_every_n_steps=1 \
trainer.max_epochs=null \
@@ -3070,7 +3096,7 @@ jobs:
+model.data.data_impl_kwargs.workers=null \
+model.data.data_impl_kwargs.sort_dataset_paths=False
- NVTE_FUSED_ATTN=0 NVTE_FLASH_ATTN=0 python examples/nlp/language_modeling/megatron_t5_pretraining.py \
+ python examples/nlp/language_modeling/megatron_t5_pretraining.py \
trainer.devices=2 \
trainer.log_every_n_steps=1 \
trainer.max_epochs=null \
@@ -3493,8 +3519,8 @@ jobs:
with:
RUNNER: self-hosted-azure
SCRIPT: |
- NVTE_FLASH_ATTN=0 NVTE_FUSED_ATTN=0 python examples/nlp/language_modeling/megatron_t5_eval.py \
- --model_file /home/TestData/nlp/megatron_t5/220m/megatron_mcore_t5_220m.nemo \
+ python examples/nlp/language_modeling/megatron_t5_eval.py \
+ --model_file /home/TestData/nlp/megatron_t5/220m/megatron_mcore_t5_220m_padding_attnmasktype.nemo \
--prompt "How do I fix my GPU memory issue? I am seeing out of memory." \
--tensor_model_parallel_size 1
@@ -3505,7 +3531,7 @@ jobs:
with:
RUNNER: self-hosted-azure
SCRIPT: |
- NVTE_FUSED_ATTN=0 NVTE_FLASH_ATTN=0 python examples/nlp/language_modeling/tuning/megatron_t5_finetuning.py \
+ python examples/nlp/language_modeling/tuning/megatron_t5_finetuning.py \
trainer.devices=2 \
trainer.log_every_n_steps=1 \
trainer.max_epochs=9999 \
@@ -3516,7 +3542,7 @@ jobs:
exp_manager.exp_dir=/tmp/nlp_mcore_t5_lora_tuning_tp2 \
model.pipeline_model_parallel_size=1 \
model.tensor_model_parallel_size=2 \
- model.restore_from_path=/home/TestData/nlp/megatron_t5/220m/megatron_mcore_t5_220m.nemo \
+ model.restore_from_path=/home/TestData/nlp/megatron_t5/220m/megatron_mcore_t5_220m_padding_attnmasktype.nemo \
model.peft.peft_scheme=lora \
model.answer_only_loss=True \
model.micro_batch_size=1 \
@@ -3528,8 +3554,8 @@ jobs:
model.data.validation_ds.file_names=[/home/TestData/nlp/megatron_sft/quarel.jsonl] \
model.data.validation_ds.names=[quarel]
- NVTE_FUSED_ATTN=0 NVTE_FLASH_ATTN=0 python examples/nlp/language_modeling/tuning/megatron_t5_generate.py \
- model.restore_from_path=/home/TestData/nlp/megatron_t5/220m/megatron_mcore_t5_220m.nemo \
+ python examples/nlp/language_modeling/tuning/megatron_t5_generate.py \
+ model.restore_from_path=/home/TestData/nlp/megatron_t5/220m/megatron_mcore_t5_220m_padding_attnmasktype.nemo \
model.peft.restore_from_path=/tmp/nlp_mcore_t5_lora_tuning_tp2/megatron_t5_peft_lora_tuning/checkpoints/megatron_t5_peft_lora_tuning.nemo \
model.peft.restore_from_ckpt_name=null \
model.peft.restore_from_hparams_path=null \
@@ -3546,7 +3572,20 @@ jobs:
inference.repetition_penalty=1.0 \
inference.outfile_path=/tmp/nlp_mcore_t5_lora_tuning_tp2/out.jsonl
- # L2: Megatron Mock Data Generation
+
+ L2_HF_Transformer_SFT_TE_Acceleration:
+ needs: [ cicd-test-container-setup ]
+ uses: ./.github/workflows/_test_template.yml
+ if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'L2_HF_Transformer_SFT_TE_Acceleration') || needs.cicd-test-container-setup.outputs.all == 'true'
+ with:
+ RUNNER: self-hosted-azure
+ SCRIPT: |
+ python examples/llm/sft/hf.py --model /home/TestData/nlp/hf_gemma/hf_gemma_2b --model-accelerator te
+ AFTER_SCRIPT: |
+ rm -rf nemo_experiments
+
+
+ # L2: Megatron Mock Data Generation
L2_Megatron_Mock_Data_Generation_MockGPTDataset:
needs: [cicd-test-container-setup]
uses: ./.github/workflows/_test_template.yml
@@ -3666,13 +3705,13 @@ jobs:
# runs-on: self-hosted-azure
# timeout-minutes: 10
# container:
- # image: nemoci.azurecr.io/nemo_container_${{ github.run_id }}
- # options:
+ # image: nemoci.azurecr.io/nemo_container:${{ github.run_id }}
+ # options:
# # --user 0:128
# --device=/dev/nvidia0
# --gpus all
# --shm-size=8g
- # --env TRANSFORMERS_OFFLINE=0
+ # --env TRANSFORMERS_OFFLINE=0
# --env HYDRA_FULL_ERROR=1
# --volume /mnt/datadrive/TestData:/home/TestData
# steps:
@@ -3703,32 +3742,6 @@ jobs:
# #- uses: "NVIDIA/NeMo/.github/actions/cancel-workflow@main"
# # if: "failure()"
- L2_TTS_Fast_dev_runs_1_Mixer-TTS:
- needs: [cicd-test-container-setup]
- uses: ./.github/workflows/_test_template.yml
- if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'L2_TTS_Fast_dev_runs_1_Mixer-TTS') || needs.cicd-test-container-setup.outputs.all == 'true'
- with:
- RUNNER: self-hosted-azure
- SCRIPT: |
- python examples/tts/mixer_tts.py \
- train_dataset=/home/TestData/an4_dataset/an4_train.json \
- validation_datasets=/home/TestData/an4_dataset/an4_val.json \
- sup_data_path=/home/TestData/an4_dataset/sup_data \
- trainer.devices="[0]" \
- +trainer.limit_train_batches=1 \
- +trainer.limit_val_batches=1 \
- trainer.max_epochs=1 \
- trainer.strategy=auto \
- model.pitch_mean=212.35873413085938 \
- model.pitch_std=68.52806091308594 \
- model.train_ds.dataloader_params.batch_size=4 \
- model.train_ds.dataloader_params.num_workers=0 \
- model.validation_ds.dataloader_params.batch_size=4 \
- model.validation_ds.dataloader_params.num_workers=0 \
- ~trainer.check_val_every_n_epoch \
- ~model.text_normalizer \
- ~model.text_normalizer_call_kwargs
-
L2_TTS_Fast_dev_runs_1_Hifigan:
needs: [cicd-test-container-setup]
uses: ./.github/workflows/_test_template.yml
@@ -3757,13 +3770,13 @@ jobs:
# needs: [cicd-test-container-setup]
# runs-on: self-hosted-azure
# container:
- # image: nemoci.azurecr.io/nemo_container_${{ github.run_id }}
- # options:
+ # image: nemoci.azurecr.io/nemo_container:${{ github.run_id }}
+ # options:
# # --user 0:128
# --device=/dev/nvidia0
# --gpus all
- # --shm-size=8g
- # --env TRANSFORMERS_OFFLINE=0
+ # --shm-size=8g
+ # --env TRANSFORMERS_OFFLINE=0
# --env HYDRA_FULL_ERROR=1
# --volume /mnt/datadrive/TestData:/home/TestData
# steps:
@@ -3973,14 +3986,14 @@ jobs:
with:
RUNNER: self-hosted-azure
SCRIPT: |
- NVTE_FUSED_ATTN=0 NVTE_FLASH_ATTN=0 python tests/collections/llm/megatron_t5_pretraining.py \
+ python tests/collections/llm/megatron_t5_pretraining.py \
--devices=2 \
--max-steps=3 \
--experiment-dir=tests/collections/llm/t5_pretrain_results/${{ github.run_id }} \
--data-path=/home/TestData/nlp/megatron_t5/data/pile_val_small_bert_tokenizer_text_document \
--index-mapping-dir=tests/collections/llm/t5_index_mappings/${{ github.run_id }}
- NVTE_FUSED_ATTN=0 NVTE_FLASH_ATTN=0 python tests/collections/llm/megatron_t5_pretraining.py \
+ python tests/collections/llm/megatron_t5_pretraining.py \
--devices=2 \
--max-steps=6 \
--experiment-dir=tests/collections/llm/t5_pretrain_results/${{ github.run_id }} \
@@ -3997,11 +4010,11 @@ jobs:
with:
RUNNER: self-hosted-azure
SCRIPT: |
- NVTE_FUSED_ATTN=0 NVTE_FLASH_ATTN=0 python tests/collections/llm/megatron_t5_finetuning.py \
+ python tests/collections/llm/megatron_t5_finetuning.py \
--devices=2 \
--max-steps=250 \
--experiment-dir=tests/collections/llm/t5_finetune_results/${{ github.run_id }} \
- --checkpoint-path=/home/TestData/nlp/megatron_t5/220m/nemo2.0_t5_220m_150steps
+ --checkpoint-path=/home/TestData/nlp/megatron_t5/220m/nemo2.0_t5_220m_padding_attnmasktype_150steps
AFTER_SCRIPT: |
rm -rf tests/collections/llm/t5_finetune_results/${{ github.run_id }}
@@ -4012,12 +4025,12 @@ jobs:
with:
RUNNER: self-hosted-azure
SCRIPT: |
- NVTE_FUSED_ATTN=0 NVTE_FLASH_ATTN=0 python tests/collections/llm/megatron_t5_finetuning.py \
+ python tests/collections/llm/megatron_t5_finetuning.py \
--devices=2 \
--max-steps=250 \
--peft=lora \
--experiment-dir=tests/collections/llm/t5_peft_results/${{ github.run_id }} \
- --checkpoint-path=/home/TestData/nlp/megatron_t5/220m/nemo2.0_t5_220m_150steps
+ --checkpoint-path=/home/TestData/nlp/megatron_t5/220m/nemo2.0_t5_220m_padding_attnmasktype_150steps
AFTER_SCRIPT: |
rm -rf tests/collections/llm/t5_peft_results/${{ github.run_id }}
@@ -4320,6 +4333,120 @@ jobs:
--pp_size 1 \
--mbs 1 --packed
+ L2_NeMo_2_GPT_DoRA_TP1PP1_MBS1_PACKED:
+ needs: [cicd-test-container-setup]
+ uses: ./.github/workflows/_test_template.yml
+ if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'L2_NeMo_2_GPT_DoRA_TP1PP1_MBS1_PACKED') || needs.cicd-test-container-setup.outputs.all == 'true'
+ with:
+ RUNNER: self-hosted-azure
+ SCRIPT: |
+
+ python tests/collections/llm/gpt_finetuning.py \
+ --restore_path /home/TestData/nemo2_ckpt/llama_68M \
+ --devices 2 \
+ --max_steps 3 \
+ --experiment_dir /tmp/nemo2_gpt_finetune/${{ github.run_id }} \
+ --peft dora \
+ --tp_size 1 \
+ --pp_size 1 \
+ --mbs 1 --packed
+
+ python tests/collections/llm/gpt_finetuning.py \
+ --restore_path /home/TestData/nemo2_ckpt/llama_68M \
+ --devices 2 \
+ --max_steps 6 \
+ --experiment_dir /tmp/nemo2_gpt_finetune/${{ github.run_id }} \
+ --peft dora \
+ --tp_size 1 \
+ --pp_size 1 \
+ --mbs 1 --packed
+
+ L2_NeMo_2_Mixtral_LoRA_EP2PP1_MBS2:
+ needs: [cicd-test-container-setup]
+ uses: ./.github/workflows/_test_template.yml
+ if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'L2_NeMo_2_Mixtral_LoRA_EP2PP1_MBS2') || needs.cicd-test-container-setup.outputs.all == 'true'
+ with:
+ RUNNER: self-hosted-azure
+ SCRIPT: |
+
+ python tests/collections/llm/lora_mistralai.py \
+ --max-steps 3 \
+ --ep 1 \
+ --mbs 2 \
+ --model mixtral
+
+ L2_NeMo_2_Mixtral_LoRA_TP1PP1_MBS1:
+ needs: [cicd-test-container-setup]
+ uses: ./.github/workflows/_test_template.yml
+ if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'L2_NeMo_2_Mixtral_LoRA_TP1PP1_MBS1') || needs.cicd-test-container-setup.outputs.all == 'true'
+ with:
+ RUNNER: self-hosted-azure
+ SCRIPT: |
+
+ python tests/collections/llm/lora_mistralai.py \
+ --max-steps 3 \
+ --tp 1 \
+ --mbs 1 \
+ --model mixtral \
+ --dist-opt
+
+ L2_NeMo_2_Mixtral_LoRA_TP2PP1_MBS1:
+ needs: [cicd-test-container-setup]
+ uses: ./.github/workflows/_test_template.yml
+ if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'L2_NeMo_2_Mixtral_LoRA_TP2PP1_MBS1') || needs.cicd-test-container-setup.outputs.all == 'true'
+ with:
+ RUNNER: self-hosted-azure
+ SCRIPT: |
+
+ python tests/collections/llm/lora_mistralai.py \
+ --max-steps 3 \
+ --tp 2 \
+ --mbs 1 \
+ --model mixtral \
+ --dist-opt
+
+ L2_NeMo_2_Mistral_LoRA_TP1PP1_MBS1:
+ needs: [cicd-test-container-setup]
+ uses: ./.github/workflows/_test_template.yml
+ if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'L2_NeMo_2_Mistral_LoRA_TP1PP1_MBS1') || needs.cicd-test-container-setup.outputs.all == 'true'
+ with:
+ RUNNER: self-hosted-azure
+ SCRIPT: |
+
+ python tests/collections/llm/lora_mistralai.py \
+ --max-steps 3 \
+ --tp 1 \
+ --mbs 1 \
+ --model mistral \
+ --dist-opt
+
+ L2_NeMo_2_Mistral_LoRA_TP2PP1_MBS1:
+ needs: [cicd-test-container-setup]
+ uses: ./.github/workflows/_test_template.yml
+ if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'L2_NeMo_2_Mistral_LoRA_TP2PP1_MBS1') || needs.cicd-test-container-setup.outputs.all == 'true'
+ with:
+ RUNNER: self-hosted-azure
+ SCRIPT: |
+
+ python tests/collections/llm/lora_mistralai.py \
+ --max-steps 3 \
+ --tp 2 \
+ --mbs 1 \
+ --model mistral \
+ --dist-opt
+
+ L2_NEMO_2_LoRA_MERGE:
+ needs: [cicd-test-container-setup]
+ uses: ./.github/workflows/_test_template.yml
+ if: contains(fromJSON(needs.cicd-test-container-setup.outputs.test_to_run), 'L2_NEMO_2_LoRA_MERGE') || needs.cicd-test-container-setup.outputs.all == 'true'
+ with:
+ RUNNER: self-hosted-azure
+ SCRIPT: |
+
+ python tests/collections/llm/peft/lora_merge.py \
+ --lora_checkpoint_path=/home/TestData/nemo2_ckpt/llama_lora_ci_checkpoint/ \
+ --output_path=/tmp/nemo2_lora_merge/${{ github.run_id }}
+
L2_NeMo_2_NeMo_Mcore_Mixtral_bitexact:
needs: [cicd-test-container-setup]
uses: ./.github/workflows/_test_template.yml
@@ -4345,9 +4472,8 @@ jobs:
rm -rf /tmp/nemo2_ptq_engine
Nemo_CICD_Test:
- needs:
+ needs:
- pre-flight
- - gpu-test
- cicd-test-container-setup
- L0_Unit_Tests_GPU_ASR
@@ -4361,7 +4487,7 @@ jobs:
- L0_Unit_Tests_GPU_Hydra
- L0_Unit_Tests_GPU_Lightning
- L0_Unit_Tests_GPU_Others
-
+
- L0_Unit_Tests_CPU_ASR
- L0_Unit_Tests_CPU_Audio
- L0_Unit_Tests_CPU_Common
@@ -4421,13 +4547,14 @@ jobs:
- L2_RAG_Pipeline_Generating
- L2_Megatron_GPT_Pretraining_and_Resume_Training_TP2
- L2_Megatron_GPT_Skip_Train
+ - L2_Megatron_LM_To_NeMo_Conversion
- L2_Megatron_GPT_with_Rope_Pretraining_and_Resume_Training_TP2
- L2_Megatron_GPT_with_ResetLR_Pretraining_and_Resume_Training_TP2
- L2_Megatron_GPT_with_Drop_Optimizer_States_TP2
- L2_Megatron_GPT_with_ALiBi_Pretraining_and_Resume_Training_TP2
- L2_Megatron_GPT_with_KERPLE_Pretraining_and_Resume_Training_TP2
# - Optional_L2_Megatron_GPT_Pretraining_and_Resume_Training_PP2
- #- OPTIONAL_L2_Megatron_GPT_Auto_Configurator_TP1_PP1_MBS124
+ - L2_Megatron_GPT_Auto_Configurator_TP1_PP1_MBS124
- L2_Megatron_GPT_Finetuning_PP2
- L2_Megatron_GPT_Finetuning_StarCoder_PP1
- L2_Megatron_GPT_Embedding
@@ -4453,14 +4580,14 @@ jobs:
- L2_TTS_Fast_dev_runs_1_WaveGlow
- L2_TTS_Fast_dev_runs_1_FastPitch
#- OPTIONAL_L2_TTS_Fast_dev_runs_1_RADTTS
- - L2_TTS_Fast_dev_runs_1_Mixer-TTS
- L2_TTS_Fast_dev_runs_1_Hifigan
- Speech_Checkpoints_tests
- L2_Stable_Diffusion_Training
- L2_NeMo_2_GPT_Pretraining_no_transformer_engine
- L2_NeMo_2_GPT_DDP_Param_Parity_check
- L2_NeMo_2_HF_MODEL_IMPORT
- - L2_NeMo_2_llama3_pretraining_recipe
+ - L2_NeMo_2_llama3_pretraining_recipe
+ - L2_HF_Transformer_SFT_TE_Acceleration
- L2_NeMo_2_SSM_Pretraining
- L2_NeMo_2_SSM_Finetuning
- L2_NeMo_2_T5_Pretraining
@@ -4476,11 +4603,16 @@ jobs:
- L2_NeMo_2_GPT_LoRA_TP1PP2_MBS2
- L2_NeMo_2_GPT_LoRA_TP2PP1_MBS2
- L2_NeMo_2_GPT_LoRA_TP1PP1_MBS1_PACKED
+ - L2_NeMo_2_GPT_DoRA_TP1PP1_MBS1_PACKED
+ - L2_NeMo_2_Mixtral_LoRA_EP2PP1_MBS2
+ - L2_NeMo_2_Mixtral_LoRA_TP1PP1_MBS1
+ - L2_NeMo_2_Mixtral_LoRA_TP2PP1_MBS1
+ - L2_NeMo_2_Mistral_LoRA_TP1PP1_MBS1
+ - L2_NeMo_2_Mistral_LoRA_TP2PP1_MBS1
+ - L2_NEMO_2_LoRA_MERGE
- L2_NeMo_2_Mixtral_Pretraining
- - L2_PTQ_Llama2_INT8_SQ
- L2_PTQ_Llama2_FP8
- L2_Community_LLM_Checkpoints_tests_Llama3
- - L2_PTQ_Llama2_Export_Only
- L2_Distill_Llama2
- L2_Prune_Width_Llama2
- L2_Speech_to_Text_AED
@@ -4495,7 +4627,7 @@ jobs:
- L2_NeMo_2_PTQ_Llama2_FP8
if: always()
runs-on: ubuntu-latest
- steps:
+ steps:
- name: Evaluate conclusion
if: ${{ always() }}
id: pipeline-conclusion
@@ -4509,14 +4641,14 @@ jobs:
echo "SUCCESS=$SUCCESS" >> $GITHUB_OUTPUT
# This should depend on all the tests so we block/unblock based on all tests passing
- - name: Pipeline successful, set exit code to 0
+ - name: Pipeline successful, set exit code to 0
if: ${{ always() && steps.pipeline-conclusion.outputs.SUCCESS == 'true' }}
run: exit 0
- - name: Pipeline successful, add PR comment
+ - name: Pipeline successful, add PR comment
if: ${{ always() && steps.pipeline-conclusion.outputs.SUCCESS == 'true' && github.event_name == 'pull_request' && env.SLACK_WEBHOOK != '' }}
uses: peter-evans/create-or-update-comment@v4
- env:
+ env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
REPOSITORY: ${{ github.repository }}
RUN_ID: ${{ github.run_id }}
@@ -4535,7 +4667,7 @@ jobs:
- name: "Pipeline not successful and not cancelled: Send Slack alert & create step summary"
if: ${{ always() && steps.pipeline-conclusion.outputs.FAILED == 'true' && env.SLACK_WEBHOOK != '' }}
- env:
+ env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
SLACK_WEBHOOK_ADMIN:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -4628,4 +4760,4 @@ jobs:
- name: "Pipeline not successful, set exit code to 1"
if: ${{ always() && steps.pipeline-conclusion.outputs.SUCCESS == 'false' }}
- run: exit 1
+ run: exit 1
\ No newline at end of file
diff --git a/.github/workflows/code-formatting.yml b/.github/workflows/code-formatting.yml
index a4b8cf3d4072..b08e9676aabd 100644
--- a/.github/workflows/code-formatting.yml
+++ b/.github/workflows/code-formatting.yml
@@ -1,4 +1,4 @@
-name: Isort and Black Formatting
+name: Isort and Black Formatting; PyLint Docs check
# Incrementally reformat only changed files with black, all files with isort
#
# Replaces pre-commit.ci, since it reformats all the files.
@@ -13,6 +13,11 @@ on:
pull_request_target:
paths:
- '**.py'
+ types: [ opened, synchronize, reopened, labeled, unlabeled ]
+
+defaults:
+ run:
+ shell: bash -x -e -u -o pipefail {0}
jobs:
reformat_with_isort_and_black:
@@ -46,6 +51,7 @@ jobs:
- name: black
uses: psf/black@stable
+ if: ${{ steps.changed-files.outputs.any_changed == 'true' }}
with:
options: "--verbose"
# apply only to changed files (pass explicitly the files)
@@ -54,6 +60,7 @@ jobs:
- name: isort
uses: isort/isort-action@v1
+ if: ${{ steps.changed-files.outputs.any_changed == 'true' }}
with:
isort-version: "5.13.2"
# reformat all files with isort β safe since the whole repo is already reformatted
@@ -64,3 +71,134 @@ jobs:
with:
message: Apply isort and black reformatting
commit: --signoff
+
+ check_pylint:
+ name: "check_pylint (strict-mode: ${{ matrix.strict-mode }})"
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ pull-requests: write
+ env:
+ THRESHOLD: 1730937600 # On this date (2024/11/07) we decided to add Pylint. It shall only run in strict mode for files added past this date. For files prior to this date, we will only add a PR comment with PyLint's stdout.
+ strategy:
+ matrix:
+ strict-mode: ["true", "false"]
+ steps:
+ - name: Checkout branch
+ uses: actions/checkout@v4
+ with:
+ # setup repository and ref for PRs, see
+ # https://github.com/EndBug/add-and-commit?tab=readme-ov-file#working-with-prs
+ repository: ${{ github.event.pull_request.head.repo.full_name }}
+ ref: ${{ github.event.pull_request.head.ref }}
+ fetch-depth: 0
+
+ # https://github.com/tj-actions/changed-files
+ - name: Get changed files
+ id: changed-files
+ uses: tj-actions/changed-files@v44
+ with:
+ files: |
+ **.py
+
+ - name: Setup Python env
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: pylint
+ if: ${{ steps.changed-files.outputs.any_changed == 'true' && !contains( github.event.pull_request.labels.*.name, 'skip-docs') }}
+ id: pylint
+ env:
+ # only *.py files included
+ STRICT_MODE: ${{ matrix.strict-mode }}
+ CHANGED_FILES: "${{ steps.changed-files.outputs.all_changed_files }}"
+ run: |
+ pip install pylint
+
+ FILTERED=()
+ for file in $CHANGED_FILES; do
+ DATE=$(git log --format=%ad --date=unix $file | tail -1)
+
+ if [[ "$STRICT_MODE" == "true" ]]; then
+ if [[ "$DATE" -gt "$THRESHOLD" ]]; then
+ FILTERED+=("$file")
+ fi
+ else
+ if [[ "$DATE" -le "$THRESHOLD" ]]; then
+ FILTERED+=("$file")
+ fi
+ fi
+ done
+
+ if [ ${#FILTERED[@]} -eq 0 ]; then
+ echo "No files to check."
+ exit 0
+ fi
+
+ echo "Will run on these files:
+ ${FILTERED[@]}"
+
+ set +e
+ LOG=$(pylint ${FILTERED[@]})
+ EXIT_CODE=$?
+ set -e
+
+ echo "$LOG"
+ echo "OUTPUT<> $GITHUB_ENV
+ echo "$LOG" >> $GITHUB_ENV
+ echo "EOF" >> $GITHUB_ENV
+ echo "log=$LOG"
+
+ if [[ "${{ matrix.strict-mode }}" == "true" ]]; then
+ HEADER="π¨ The following files must be fixed before merge!"
+ else
+ HEADER="π The following files have warnings. In case you are familiar with these, please try helping us to improve the code base."
+ fi
+ echo "header=$HEADER" | tee -a "$GITHUB_OUTPUT"
+
+ exit $([[ "$EXIT_CODE" -ne 0 && "$STRICT_MODE" == "true" ]] && echo $EXIT_CODE || echo 0)
+
+ - name: Find Comment
+ if: ${{ always() && env.OUTPUT != '' }}
+ uses: peter-evans/find-comment@v3
+ id: fc
+ with:
+ issue-number: ${{ github.event.number }}
+ body-includes:
+
+ - name: Delete comment
+ if: ${{ always() && env.OUTPUT != '' && steps.fc.outputs.comment-id != '' }}
+ env:
+ GH_TOKEN: ${{ secrets.github_token }}
+ REPOSITORY: ${{ github.repository }}
+ COMMENT_ID: ${{ steps.fc.outputs.comment-id }}
+ run: |
+ curl -L \
+ -X DELETE \
+ -H "Accept: application/vnd.github+json" \
+ -H "Authorization: Bearer $GH_TOKEN" \
+ -H "X-GitHub-Api-Version: 2022-11-28" \
+ https://api.github.com/repos/$REPOSITORY/issues/comments/$COMMENT_ID
+
+ - name: Add PR comment for PyLint
+ if: ${{ always() && env.OUTPUT != '' }}
+ uses: peter-evans/create-or-update-comment@v4
+ with:
+ issue-number: ${{ github.event.number }}
+ body: |
+
+
+ beep boop π€: ${{ steps.pylint.outputs.header }}
+
+ ---
+
+ Your code was analyzed with PyLint. The following annotations have been identified:
+
+ ```
+ ${{ env.OUTPUT }}
+ ```
+
+ ---
+
+ Thank you for improving NeMo's documentation!
diff --git a/.github/workflows/copyright-check.yml b/.github/workflows/copyright-check.yml
index 724f3afb6177..ebd35c51dc44 100644
--- a/.github/workflows/copyright-check.yml
+++ b/.github/workflows/copyright-check.yml
@@ -11,49 +11,12 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+
name: Copyright check
on:
pull_request:
jobs:
- main:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repository
- uses: actions/checkout@v4
- with:
- path: ${{ github.run_id }}
- fetch-depth: 0
-
- - name: Check files have copyright notice
- run: |
- cd ${{ github.run_id }}
-
- # Files ending with .py should have Copyright notice in the first 10 lines
- find_files_with_missing_copyright() {
- find ./ -type f -name '*.py' -not -path "./.git/*" -not -path "./*__init__.py" | while read path; do
- echo -en $path"\t"
- head -n 10 $path | tr '\n' '\t' | sed 's/\t$/\n/'
- done \
- | egrep -iv 'Copyright.*NVIDIA CORPORATION.*' \
- | egrep -iv '*MIT.*Licen.e.*' \
- | egrep -iv '*Copyright.*Apache.*' \
- | egrep -iv '*Apache.*License.*' \
- | while read line; do
- echo $line | cut -d' ' -f1
- done
- }
-
-
- declare RESULT=($(find_files_with_missing_copyright)) # (..) = array
-
- if [ "${#RESULT[@]}" -gt 0 ]; then
- echo "Error: Found files with missing copyright:"
- for (( i=0; i<"${#RESULT[@]}"; i++ )); do
- echo "path= ${RESULT[$i]}"
- done
- exit 1;
- else
- echo "Ok: All (Python) files start with copyright notice"
- fi
+ copyright-check:
+ uses: NVIDIA/NeMo-FW-CI-templates/.github/workflows/_copyright_check.yml@v0.2.0
\ No newline at end of file
diff --git a/.github/workflows/mcore-tag-bump-bot.yml b/.github/workflows/mcore-tag-bump-bot.yml
index 13f4059a3a6b..1b0712924101 100644
--- a/.github/workflows/mcore-tag-bump-bot.yml
+++ b/.github/workflows/mcore-tag-bump-bot.yml
@@ -6,54 +6,15 @@ on:
- cron: 0 0 * * *
jobs:
- main:
- runs-on: ubuntu-latest
- environment: main
- steps:
- - name: Checkout NVIDIA/Megatron-LM
- uses: actions/checkout@v4
- with:
- repository: NVIDIA/Megatron-LM
- ref: main
- path: ${{ github.run_id }}
-
- - name: Get latest mcore commit
- id: ref
- run: |
- cd ${{ github.run_id }}
- sha=$(git rev-parse origin/main)
- echo "sha=${sha}" >> "$GITHUB_OUTPUT"
- echo "short_sha=${sha:0:7}" >> "$GITHUB_OUTPUT"
- echo "date=$(date +%F)" >> "$GITHUB_OUTPUT"
-
- - name: Checkout ${{ github.repository }}
- uses: actions/checkout@v4
- with:
- path: ${{ github.run_id }}
- token: ${{ secrets.PAT }}
-
- - name: Bump MCORE_TAG
- run: |
- cd ${{ github.run_id }}
- sed -i 's/^ARG MCORE_TAG=.*$/ARG MCORE_TAG=${{ steps.ref.outputs.sha }}/' Dockerfile.ci
-
- - name: Create Bump PR
- uses: peter-evans/create-pull-request@v6
- id: create-pull-request
- with:
- path: ${{ github.run_id }}
- branch: bump-ci-container-${{ steps.ref.outputs.date }}
- base: main
- title: 'Bump `Dockerfile.ci` (${{ steps.ref.outputs.date }})'
- token: ${{ secrets.PAT }}
- body: |
- π PR to Bump `Dockerfile.ci`.
-
- π Please remember the following to-do's before merge:
- - [ ] Verify the presubmit CI
-
- π Please merge this PR only if the CI workflow completed successfully.
- commit-message: "[π€ ]: Howdy folks, let's bump `Dockerfile.ci` to ${{ steps.ref.outputs.short_sha }} !"
- signoff: true
- reviewers: 'pablo-garay'
- labels: 'Run CICD'
+ mcore:
+ uses: NVIDIA/NeMo-FW-CI-templates/.github/workflows/_bump_dockerfile.yml@v0.11.0
+ with:
+ source-repository: NVIDIA/Megatron-LM
+ source-ref: main
+ build-arg: MCORE_TAG
+ dockerfile: Dockerfile.ci
+ base-branch: main
+ cicd-label: Run CICD
+ pr-reviewers: 'pablo-garay'
+ secrets:
+ PAT: ${{ secrets.PAT }}
\ No newline at end of file
diff --git a/.github/workflows/monitor-vms.yml b/.github/workflows/monitor-vms.yml
index 6795f87abf68..0bb54524847a 100644
--- a/.github/workflows/monitor-vms.yml
+++ b/.github/workflows/monitor-vms.yml
@@ -27,7 +27,7 @@ jobs:
| jq -c '[
.runners[]
| select(.status == "online")
- | select(.name | contains("gpu"))
+ | select(.name | contains("cpu") | not)
| {
"vm": .name,
"n_gpus": [
diff --git a/.github/workflows/release-freeze.yml b/.github/workflows/release-freeze.yml
index 0097f0aa2f9f..70ecd73c2252 100644
--- a/.github/workflows/release-freeze.yml
+++ b/.github/workflows/release-freeze.yml
@@ -9,7 +9,6 @@ on:
options:
- major
- minor
- - pre_release
mcore_version:
description: 'Version of MCore to use (must be a valid git ref)'
required: true
@@ -17,14 +16,15 @@ on:
jobs:
code-freeze:
- uses: NVIDIA/NeMo-FW-CI-templates/.github/workflows/_code_freeze.yml
+ uses: NVIDIA/NeMo-FW-CI-templates/.github/workflows/_code_freeze.yml@v0.8.0
with:
- name_of_library: NeMo-Toolkit
- type_of_release: ${{ inputs.type_of_release }}
+ library_name: NeMo-Toolkit
python_package: nemo
+ type_of_release: ${{ inputs.type_of_release }}
secrets:
SLACK_RELEASE_ENDPOINT: ${{ secrets.SLACK_RELEASE_ENDPOINT }}
-
+ SLACK_WEBHOOK_ADMIN: ${{ secrets.SLACK_WEBHOOK_ADMIN }}
+
freeze-tags:
runs-on: ubuntu-latest
needs: [code-freeze]
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index ba1d877b03f9..03474251f995 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -1,105 +1,42 @@
-name: "NeMo Code release"
-
-on:
+# Copyright (c) 2020-2021, NVIDIA CORPORATION.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+name: "Release Neural Modules"
+
+on:
workflow_dispatch:
inputs:
- branch:
- description: Branch to release
+ release-ref:
+ description: Ref (SHA or branch name) to release
required: true
type: string
-jobs:
- main:
- if: contains(fromJSON('["ko3n1g"]'), github.actor)
- runs-on: ubuntu-latest
- environment:
- name: main
- steps:
-
- - name: Checkout repository
- uses: actions/checkout@v4
- with:
- path: ${{ github.run_id }}
- ref: ${{ inputs.branch }}
-
- - name: Create release
- id: version-number
- run: |
- cd ${{ github.run_id }}
- VERSION=$(python -c "import nemo; print(nemo.__version__)")
-
- NAME="NVIDIA Neural Modules ${VERSION}"
- CHANGELOG=$(awk '/^## '"$NAME"'/{flag=1; next} /^## /{flag=0} flag' CHANGELOG.md)
- CHANGELOG=$(echo "$CHANGELOG" | sed '/./,$!d' | sed ':a;N;$!ba;s/\n$//')
-
- PAYLOAD=$(jq \
- -n \
- -c \
- --arg TAG_NAME "v${VERSION}" \
- --arg CI_COMMIT_BRANCH "${{ inputs.branch }}" \
- --arg NAME "$NAME" \
- --arg BODY "$CHANGELOG" \
- '{
- "tag_name": $TAG_NAME,
- "target_commitish": $CI_COMMIT_BRANCH,
- "name": $NAME,
- "body": $BODY,
- "draft": false,
- "prerelease": false,
- "generate_release_notes": false
- }'
- )
-
- curl -L \
- -X POST \
- -H "Accept: application/vnd.github+json" \
- -H "Authorization: Bearer ${{ secrets.PAT }}" \
- -H "X-GitHub-Api-Version: 2022-11-28" \
- https://api.github.com/repos/NVIDIA/NeMo/releases \
- -d "$PAYLOAD"
-
- - name: Build, test, and release wheel
- env:
- TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
- TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
- run: |
- cd ${{ github.run_id }}
- EXPECTED_VERSION=$(python -c 'import nemo; print(nemo.__version__)')
-
- python3 -m pip install --upgrade build
- python3 -m build
-
- pip install dist/*.whl
- cd ../
-
- INSTALLED_VERSION=$(python -c 'import nemo; print(nemo.__version__)')
-
- if [[ "$INSTALLED_VERSION" != "$EXPECTED_VERSION" ]]; then
- echo 'Wheel has an outdated version, mission abort immediately!'
- exit 1
- fi
-
- echo Proceed with uploading wheel...
- cd ${{ github.run_id }}
- python3 -m pip install --upgrade twine
- python3 -m twine upload --repository pypi dist/*
-
- - name: notify
- run: |
- VERSION=${{ steps.version-number.outputs.VERSION }}
-
- URL="https://github.com/NVIDIA/NeMo/releases/tag/v$VERSION"
- MESSAGE='{
- "blocks": [
- {
- "type": "section",
- "text": {
- "type": "mrkdwn",
- "text": "Releasebot π€: NeMo-Toolkit released <'$URL'|`'$VERSION'`> π"
- }
- }
- ]
- }'
-
-
- curl -X POST -H "Content-type: application/json" --data "$MESSAGE" ${{ secrets.SLACK_RELEASE_ENDPOINT }}
\ No newline at end of file
+jobs:
+ release:
+ uses: NVIDIA/NeMo-FW-CI-templates/.github/workflows/_release_library.yml@v0.12.3
+ with:
+ release-ref: ${{ inputs.release-ref }}
+ image-name: nemo_container
+ dockerfile: Dockerfile.ci
+ image-label: nemo-core
+ build-args: |
+ IMAGE_LABEL=nemo-core
+ prune-filter-timerange: 24h
+ python-package: nemo
+ container-workdir: /workspace
+ library-name: Neural Modules
+ secrets:
+ TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
+ TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
+ SLACK_RELEASE_ENDPOINT: ${{ secrets.SLACK_RELEASE_ENDPOINT }}
+ PAT: ${{ secrets.PAT }}
diff --git a/.github/workflows/secrets-detector.yml b/.github/workflows/secrets-detector.yml
index cf8ccc189ab6..825ae7a653fc 100644
--- a/.github/workflows/secrets-detector.yml
+++ b/.github/workflows/secrets-detector.yml
@@ -14,7 +14,7 @@
name: Secrets detector
on:
- pull_request:
+ pull_request_target:
branches:
- 'main'
@@ -25,13 +25,24 @@ jobs:
- name: Checkout repository
uses: actions/checkout@v4
with:
- path: ${{ github.run_id }}
+ # setup repository and ref for PRs, see
+ # https://github.com/EndBug/add-and-commit?tab=readme-ov-file#working-with-prs
+ repository: ${{ github.event.pull_request.head.repo.full_name }}
+ ref: ${{ github.event.pull_request.head.ref }}
+ # custom token is required to trigger actions after reformatting + pushing
fetch-depth: 0
+ token: ${{ secrets.NEMO_REFORMAT_TOKEN }}
- name: Install secrets detector
run: pip install detect-secrets
- name: Run on change-set
run: |
- cd ${{ github.run_id }}
- git diff --name-only --diff-filter=d --merge-base origin/main -z | xargs -0 detect-secrets-hook --baseline .secrets.baseline
\ No newline at end of file
+ git diff --name-only --diff-filter=d --merge-base origin/main -z | xargs -0 detect-secrets-hook --baseline .secrets.baseline
+
+ - uses: EndBug/add-and-commit@v9
+ # Commit changes. Nothing is committed if no changes.
+ if: always()
+ with:
+ message: Update baseline
+ commit: --signoff
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 000000000000..64a576751e5f
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,12 @@
+[MAIN]
+ignore-paths=tests
+max-line-length=119
+
+[MESSAGES CONTROL]
+disable=all
+
+enable=C0115,C0116,W0611,C0301
+# C0115: missing-class-docstring
+# C0116: missing-function-docstring
+# W0611: unused-import
+# C0301: line-too-long
diff --git a/.secrets.baseline b/.secrets.baseline
index c26f70775c5a..09fc7a78a6ca 100644
--- a/.secrets.baseline
+++ b/.secrets.baseline
@@ -90,6 +90,10 @@
{
"path": "detect_secrets.filters.allowlist.is_line_allowlisted"
},
+ {
+ "path": "detect_secrets.filters.common.is_baseline_file",
+ "filename": ".secrets.baseline"
+ },
{
"path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies",
"min_level": 2
@@ -273,7 +277,7 @@
"filename": "scripts/checkpoint_converters/convert_mistral_7b_hf_to_nemo.py",
"hashed_secret": "e0308bd21bffc156d79208f9ecf130370a015002",
"is_verified": false,
- "line_number": 460
+ "line_number": 471
}
],
"scripts/dataset_processing/nlp/intent_and_slot/assistant_utils.py": [
@@ -1929,7 +1933,7 @@
"filename": "tutorials/speaker_tasks/Speaker_Diarization_Inference.ipynb",
"hashed_secret": "80903ddedcf4ec0a2ee5911cefa7e1ad52419dcc",
"is_verified": false,
- "line_number": 989
+ "line_number": 990
}
],
"tutorials/tools/DefinedCrowd_x_NeMo_ASR_Training_Tutorial.ipynb": [
@@ -2083,5 +2087,5 @@
}
]
},
- "generated_at": "2024-10-25T13:43:17Z"
+ "generated_at": "2024-11-14T09:37:19Z"
}
diff --git a/Dockerfile.ci b/Dockerfile.ci
index c881a4c830eb..e1b78547325a 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -15,9 +15,10 @@
# limitations under the License.
ARG BASE_IMAGE=nvcr.io/nvidia/pytorch:24.07-py3
-
+ARG IMAGE_LABEL
FROM ${BASE_IMAGE}
-LABEL "nemo.library"="nemo-core"
+
+LABEL "nemo.library"=${IMAGE_LABEL}
ENV TRANSFORMERS_OFFLINE=0
ENV HYDRA_FULL_ERROR=1
@@ -52,8 +53,8 @@ RUN pip install nemo_run@git+https://github.com/NVIDIA/NeMo-Run.git@${NEMO_RUN_T
# Install NeMo requirements
ARG TE_TAG=7d576ed25266a17a7b651f2c12e8498f67e0baea
-ARG MODELOPT_VERSION=0.17.0
-ARG MCORE_TAG=213c8a23fa9fe95d19eff0932a1e6e71767f0962
+ARG MODELOPT_VERSION=0.19.0
+ARG MCORE_TAG=c1728c12f1f1cdbb786e52f1ffe512295d76bef3
ARG APEX_TAG=810ffae374a2b9cb4b5c5e28eaeca7d7998fca0c
RUN \
diff --git a/docs/source/asr/api.rst b/docs/source/asr/api.rst
index c99d92c0371a..a35ea49ea2c4 100644
--- a/docs/source/asr/api.rst
+++ b/docs/source/asr/api.rst
@@ -276,6 +276,21 @@ RNNT Decoding
:show-inheritance:
:members:
+TDT Decoding
+~~~~~~~~~~~~~
+
+.. autoclass:: nemo.collections.asr.parts.submodules.rnnt_greedy_decoding.GreedyTDTInfer
+ :show-inheritance:
+ :members:
+
+.. autoclass:: nemo.collections.asr.parts.submodules.rnnt_greedy_decoding.GreedyBatchedTDTInfer
+ :show-inheritance:
+ :members:
+
+.. autoclass:: nemo.collections.asr.parts.submodules.tdt_beam_decoding.BeamTDTInfer
+ :show-inheritance:
+ :members:
+
Hypotheses
~~~~~~~~~~
diff --git a/docs/source/asr/asr_language_modeling_and_customization.rst b/docs/source/asr/asr_language_modeling_and_customization.rst
index 0b4f7a7e730f..a9d42772698c 100644
--- a/docs/source/asr/asr_language_modeling_and_customization.rst
+++ b/docs/source/asr/asr_language_modeling_and_customization.rst
@@ -99,15 +99,15 @@ Evaluate by Beam Search Decoding and N-gram LM
NeMo's beam search decoders are capable of using the KenLM's N-gram models to find the best candidates.
The script to evaluate an ASR model with beam search decoding and N-gram models can be found at
-`scripts/asr_language_modeling/ngram_lm/eval_beamsearch_ngram.py `__.
+`scripts/asr_language_modeling/ngram_lm/eval_beamsearch_ngram_ctc.py `__.
-This script has a large number of possible argument overrides; therefore, it is recommended that you use ``python eval_beamsearch_ngram.py --help`` to see the full list of arguments.
+This script has a large number of possible argument overrides; therefore, it is recommended that you use ``python eval_beamsearch_ngram_ctc.py --help`` to see the full list of arguments.
You can evaluate an ASR model using the following:
.. code-block::
- python eval_beamsearch_ngram.py nemo_model_file= \
+ python eval_beamsearch_ngram_ctc.py nemo_model_file= \
input_manifest= \
beam_width=[] \
@@ -118,18 +118,18 @@ You can evaluate an ASR model using the following:
decoding_mode=beamsearch_ngram \
decoding_strategy=""
-It can evaluate a model in the following three modes by setting the argument `--decoding_mode`:
+It can evaluate a model in the following three modes by setting the argument ``--decoding_mode``:
* greedy: Just greedy decoding is done and no beam search decoding is performed.
* beamsearch: The beam search decoding is done, but without using the N-gram language model. Final results are equivalent to setting the weight of LM (beam_beta) to zero.
* beamsearch_ngram: The beam search decoding is done with N-gram LM.
-In `beamsearch` mode, the evaluation is performed using beam search decoding without any language model. The performance is reported in terms of Word Error Rate (WER) and Character Error Rate (CER). Moreover, when the best candidate is selected among the candidates, it is also reported as the best WER/CER. This can serve as an indicator of the quality of the predicted candidates.
+In ``beamsearch`` mode, the evaluation is performed using beam search decoding without any language model. The performance is reported in terms of Word Error Rate (WER) and Character Error Rate (CER). Moreover, when the best candidate is selected among the candidates, it is also reported as the best WER/CER. This can serve as an indicator of the quality of the predicted candidates.
The script initially loads the ASR model and predicts the outputs of the model's encoder as log probabilities. This part is computed in batches on a device specified by --device, which can be either a CPU (`--device=cpu`) or a single GPU (`--device=cuda:0`).
-The batch size for this part is specified by `--acoustic_batch_size`. Using the largest feasible batch size can speed up the calculation of log probabilities. Additionally, you can use `--use_amp` to accelerate the calculation and allow for larger --acoustic_batch_size values.
-Currently, multi-GPU support is not available for calculating log probabilities. However, using `--probs_cache_file` can help. This option stores the log probabilities produced by the modelβs encoder in a pickle file, allowing you to skip the first step in future runs.
+The batch size for this part is specified by ``--acoustic_batch_size``. Using the largest feasible batch size can speed up the calculation of log probabilities. Additionally, you can use `--use_amp` to accelerate the calculation and allow for larger --acoustic_batch_size values.
+Currently, multi-GPU support is not available for calculating log probabilities. However, using ``--probs_cache_file`` can help. This option stores the log probabilities produced by the modelβs encoder in a pickle file, allowing you to skip the first step in future runs.
The following is the list of the important arguments for the evaluation script:
@@ -167,7 +167,7 @@ The following is the list of the important arguments for the evaluation script:
| decoding_strategy | str | beam | String argument for type of decoding strategy for the model. |
+--------------------------------------+----------+------------------+-------------------------------------------------------------------------+
| decoding | Dict | BeamCTC | Subdict of beam search configs. Values found via |
-| | Config | InferConfig | python eval_beamsearch_ngram.py --help |
+| | Config | InferConfig | python eval_beamsearch_ngram_ctc.py --help |
+--------------------------------------+----------+------------------+-------------------------------------------------------------------------+
| text_processing.do_lowercase | bool | ``False`` | Whether to make the training text all lower case. |
+--------------------------------------+----------+------------------+-------------------------------------------------------------------------+
@@ -178,11 +178,11 @@ The following is the list of the important arguments for the evaluation script:
| text_processing.separate_punctuation | bool | ``True`` | Whether to separate punctuation with the previous word by space. |
+--------------------------------------+----------+------------------+-------------------------------------------------------------------------+
-The width of the beam search (`--beam_width`) specifies the number of top candidates or predictions the beam search decoder will consider. Larger beam widths result in more accurate but slower predictions.
+The width of the beam search (``--beam_width``) specifies the number of top candidates or predictions the beam search decoder will consider. Larger beam widths result in more accurate but slower predictions.
.. note::
- The ``eval_beamsearch_ngram.py`` script contains the entire subconfig used for CTC Beam Decoding.
+ The ``eval_beamsearch_ngram_ctc.py`` script contains the entire subconfig used for CTC Beam Decoding.
Therefore it is possible to forward arguments for various beam search libraries such as ``flashlight``
and ``pyctcdecode`` via the ``decoding`` subconfig.
@@ -223,14 +223,14 @@ It supports several advanced features, such as lexicon-based decoding, lexicon-f
.. code-block::
# Lexicon-based decoding
- python eval_beamsearch_ngram.py ... \
+ python eval_beamsearch_ngram_ctc.py ... \
decoding_strategy="flashlight" \
decoding.beam.flashlight_cfg.lexicon_path='/path/to/lexicon.lexicon' \
decoding.beam.flashlight_cfg.beam_size_token = 32 \
decoding.beam.flashlight_cfg.beam_threshold = 25.0
# Lexicon-free decoding
- python eval_beamsearch_ngram.py ... \
+ python eval_beamsearch_ngram_ctc.py ... \
decoding_strategy="flashlight" \
decoding.beam.flashlight_cfg.beam_size_token = 32 \
decoding.beam.flashlight_cfg.beam_threshold = 25.0
@@ -256,7 +256,7 @@ It has advanced features, such as word boosting, which can be useful for transcr
.. code-block::
# PyCTCDecoding
- python eval_beamsearch_ngram.py ... \
+ python eval_beamsearch_ngram_ctc.py ... \
decoding_strategy="pyctcdecode" \
decoding.beam.pyctcdecode_cfg.beam_prune_logp = -10. \
decoding.beam.pyctcdecode_cfg.token_min_logp = -5. \
@@ -273,7 +273,7 @@ For example, the following set of parameters would result in 212=4 beam search d
.. code-block::
- python eval_beamsearch_ngram.py ... \
+ python eval_beamsearch_ngram_ctc.py ... \
beam_width=[64,128] \
beam_alpha=[1.0] \
beam_beta=[1.0,0.5]
@@ -330,7 +330,7 @@ Given a trained TransformerLMModel `.nemo` file or a pretrained HF model, the sc
can be used to re-score beams obtained with ASR model. You need the `.tsv` file containing the candidates produced
by the acoustic model and the beam search decoding to use this script. The candidates can be the result of just the beam
search decoding or the result of fusion with an N-gram LM. You can generate this file by specifying `--preds_output_folder` for
-`scripts/asr_language_modeling/ngram_lm/eval_beamsearch_ngram.py `__.
+`scripts/asr_language_modeling/ngram_lm/eval_beamsearch_ngram_ctc.py `__.
The neural rescorer would rescore the beams/candidates by using two parameters of `rescorer_alpha` and `rescorer_beta`, as follows:
@@ -345,7 +345,7 @@ Use the following steps to evaluate a neural LM:
#. Obtain `.tsv` file with beams and their corresponding scores. Scores can be from a regular beam search decoder or
in fusion with an N-gram LM scores. For a given beam size `beam_size` and a number of examples
for evaluation `num_eval_examples`, it should contain (`num_eval_examples` x `beam_size`) lines of
- form `beam_candidate_text \t score`. This file can be generated by `scripts/asr_language_modeling/ngram_lm/eval_beamsearch_ngram.py `__
+ form `beam_candidate_text \t score`. This file can be generated by `scripts/asr_language_modeling/ngram_lm/eval_beamsearch_ngram_ctc.py `__
#. Rescore the candidates by `scripts/asr_language_modeling/neural_rescorer/eval_neural_rescorer.py `__.
@@ -439,7 +439,7 @@ You can then pass this file to your Flashlight config object during decoding:
.. code-block::
# Lexicon-based decoding
- python eval_beamsearch_ngram.py ... \
+ python eval_beamsearch_ngram_ctc.py ... \
decoding_strategy="flashlight" \
decoding.beam.flashlight_cfg.lexicon_path='/path/to/lexicon.lexicon' \
decoding.beam.flashlight_cfg.boost_path='/path/to/my_boost_file.boost' \
diff --git a/docs/source/asr/intro.rst b/docs/source/asr/intro.rst
index aae372765a8a..7303d1698422 100644
--- a/docs/source/asr/intro.rst
+++ b/docs/source/asr/intro.rst
@@ -16,10 +16,39 @@ After :ref:`installing NeMo`, you can transcribe an audio file as
asr_model = nemo_asr.models.ASRModel.from_pretrained("stt_en_fastconformer_transducer_large")
transcript = asr_model.transcribe(["path/to/audio_file.wav"])
-Obtain word/segment timestamps
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Obtain timestamps
+^^^^^^^^^^^^^^^^^
-You can also obtain timestamps for each word or segment in the transcription as follows:
+Obtaining char(token), word or segment timestamps is also possible with NeMo ASR Models.
+
+Currently, timestamps are available for Parakeet Models with all types of decoders (CTC/RNNT/TDT). Support for AED models would be added soon.
+
+There are two ways to obtain timestamps:
+1. By using the `timestamps=True` flag in the `transcribe` method.
+2. For more control over the timestamps, you can update the decoding config to mention type of timestamps (char, word, segment) and also specify the segment seperators or word seperator for segment and word level timestamps.
+
+With the `timestamps=True` flag, you can obtain timestamps for each character in the transcription as follows:
+
+.. code-block:: python
+
+ # import nemo_asr and instantiate asr_model as above
+ import nemo.collections.asr as nemo_asr
+ asr_model = nemo_asr.models.ASRModel.from_pretrained("nvidia/parakeet-tdt_ctc-110m")
+
+ # specify flag `timestamps=True`
+ hypotheses = asr_model.transcribe(["path/to/audio_file.wav"], timestamps=True)
+
+ # by default, timestamps are enabled for char, word and segment level
+ word_timestamps = hypotheses[0][0].timestep['word'] # word level timestamps for first sample
+ segment_timestamps = hypotheses[0][0].timestep['segment'] # segment level timestamps
+ char_timestamps = hypotheses[0][0].timestep['char'] # char level timestamps
+
+ for stamp in segment_timestamps:
+ print(f"{stamp['start']}s - {stamp['end']}s : {stamp['segment']}")
+
+ # segment level timestamps (if model supports Punctuation and Capitalization, segment level timestamps are displayed based on punctuation otherwise complete transcription is considered as a single segment)
+
+For more control over the timestamps, you can update the decoding config to mention type of timestamps (char, word, segment) and also specify the segment seperators or word seperator for segment and word level timestamps as follows:
.. code-block:: python
@@ -98,8 +127,8 @@ You can get a good improvement in transcription accuracy even using a simple N-g
After :ref:`training ` an N-gram LM, you can use it for transcribing audio as follows:
-1. Install the OpenSeq2Seq beam search decoding and KenLM libraries using the `install_beamsearch_decoders script `_.
-2. Perform transcription using the `eval_beamsearch_ngram script `_:
+1. Install the OpenSeq2Seq beam search decoding and KenLM libraries using the `install_beamsearch_decoders script `_.
+2. Perform transcription using the `eval_beamsearch_ngram script `_:
.. code-block:: bash
diff --git a/docs/source/core/core.rst b/docs/source/core/core.rst
index 6bdd18559902..94706b639b5f 100644
--- a/docs/source/core/core.rst
+++ b/docs/source/core/core.rst
@@ -294,8 +294,8 @@ CLI
With NeMo and Hydra, every aspect of model training can be modified from the command-line. This is extremely helpful for running lots
of experiments on compute clusters or for quickly testing parameters during development.
-All NeMo `examples `_ come with instructions on how to
-run the training/inference script from the command-line (see `here `__
+All NeMo `examples `_ come with instructions on how to
+run the training/inference script from the command-line (e.g. see `here `__
for an example).
With Hydra, arguments are set using the ``=`` operator:
diff --git a/docs/source/multimodal/mllm/configs.rst b/docs/source/multimodal/mllm/configs.rst
index 6e9f9b2b8d10..53b851867f65 100644
--- a/docs/source/multimodal/mllm/configs.rst
+++ b/docs/source/multimodal/mllm/configs.rst
@@ -5,14 +5,14 @@ This section provides a detailed overview of the NeMo configuration file setup s
Within the configuration files of the NeMo Multimodal Language Model, details concerning dataset(s), augmentation, optimization parameters, and model architectural specifications are central. This page explores each of these aspects.
-Discover exemplary configuration files for all NeMo Multimodal Language Model scripts in the `config directory of the examples `_.
+Discover exemplary configuration files for all NeMo Multimodal Language Model scripts in the `config directory of the examples `_.
Dataset Configuration
---------------------
The NeMo multimodal language model currently supports a conversation data format, inspired by and designed from https://github.com/haotian-liu/LLaVA/tree/main. To explore a sample dataset, visit https://github.com/haotian-liu/LLaVA/blob/main/docs/Data.md.
-The configuration file allows setting any initialization parameter accepted by the Dataset class used in the experiment. For a comprehensive list of Datasets and their parameters, visit the `Datasets <./api.html#Datasets>`__ section of the API.
+The configuration file allows setting any initialization parameter accepted by the Dataset class used in the experiment. For a comprehensive list of Datasets and their parameters, visit the :doc:`Datasets <./datasets>` section of the API.
A typical training configuration is as follows:
diff --git a/docs/source/multimodal/text2img/imagen.rst b/docs/source/multimodal/text2img/imagen.rst
index 844f68df747f..3134ffdc2747 100644
--- a/docs/source/multimodal/text2img/imagen.rst
+++ b/docs/source/multimodal/text2img/imagen.rst
@@ -31,7 +31,7 @@ Imagen has two types of UNet: Regular UNet and EfficientUNet.
Regular UNet
~~~~~~~~~~~~
Regular UNet is used for Imagen base64 model. You can also use regular UNet for SR models
-(see example config file `sr256-400m-edm.yaml `_), but this typically
+(see example config file `sr256-400m-edm.yaml `__), but this typically
results in a larger memory footprint during training for the same model size.
Recommended UNet size for base64 and SR256 models are listed below:
diff --git a/docs/source/multimodal/vlm/configs.rst b/docs/source/multimodal/vlm/configs.rst
index cc383cb64b62..711831121bd7 100644
--- a/docs/source/multimodal/vlm/configs.rst
+++ b/docs/source/multimodal/vlm/configs.rst
@@ -5,7 +5,7 @@ This section provides a detailed overview of the NeMo configuration file setup s
Within the configuration files of the NeMo Multimodal Language Model, details concerning dataset(s), augmentation, optimization parameters, and model architectural specifications are central. This page explores each of these aspects.
-Discover exemplary configuration files for all NeMo Multimodal Language Model scripts in the `config directory of the examples `_.
+Discover exemplary configuration files for all NeMo Multimodal Language Model scripts in the `config directories of the examples `__.
Dataset Configuration
=====================
diff --git a/docs/source/multimodal/vlm/datasets.rst b/docs/source/multimodal/vlm/datasets.rst
index 057c79109b08..0c32210d8b6f 100644
--- a/docs/source/multimodal/vlm/datasets.rst
+++ b/docs/source/multimodal/vlm/datasets.rst
@@ -32,4 +32,4 @@ For webdatasets already downloaded locally, sub-stages 4-6 can be used to precac
For models that encode image and text on-the-fly, only sub-stages 1-3 need to be run.
Instruction for configuring each sub-stage is provided as a comment next to each field in
-`download_multimodal.yaml `_
+`download_multimodal.yaml `__.
diff --git a/docs/source/nlp/distillation.rst b/docs/source/nlp/distillation.rst
deleted file mode 100644
index 22b2f3dd8a1c..000000000000
--- a/docs/source/nlp/distillation.rst
+++ /dev/null
@@ -1,58 +0,0 @@
-.. _megatron_distillation:
-
-Distillation
-==========================
-
-Knowledge Distillation (KD)
---------------------------------
-
-KD involves using information from an existing trained model to train a second (usually smaller, faster) model, thereby "distilling" knowledge from one to the other.
-
-Distillation has two primary benefits: faster convergence and higher end accuracy than traditional training.
-
-In NeMo, distillation is enabled by the `NVIDIA TensorRT Model Optimizer (ModelOpt) `_ library -- a library to optimize deep-learning models for inference on GPUs.
-
-The logits-distillation process consists of the following steps:
-
-1. Loading both student and teacher model checkpoints (must support same parallelism strategy, if any)
-2. Training until convergence, where forward passes are run on both models (and backward only on student), performing a specific loss function between the logits.
-3. Saving the final student model.
-
-
-Example
-^^^^^^^
-The example below shows how to run the distillation script for LLama models.
-
-The script must be launched correctly with the number of processes equal to tensor parallelism. This is achieved with the ``torchrun`` command below:
-
-.. code-block:: bash
-
- STUDENT_CKPT="path/to/student.nemo" # can also be None (will use default architecture found in examples/nlp/language_modeling/conf/megatron_llama_distill.yaml)
- TEACHER_CKPT="path/to/teacher.nemo"
- TOKENIZER="path/to/tokenizer.model"
- DATA_PATHS="[1.0,path/to/tokenized/data]"
- FINAL_SAVE_FILE="final_checkpoint.nemo"
- TP=4
-
- NPROC=$TP
- launch_config="torchrun --nproc_per_node=$NPROC"
-
- ${launch_config} examples/nlp/language_modeling/megatron_gpt_distillation.py \
- model.restore_from_path=$STUDENT_CKPT \
- model.kd_teacher_restore_from_path=$TEACHER_CKPT \
- model.tensor_model_parallel_size=$TP \
- model.tokenizer.model=$TOKENIZER \
- model.data.data_prefix=$DATA_PATHS \
- model.nemo_path=$FINAL_SAVE_FILE \
- trainer.precision=bf16 \
- trainer.devices=$NPROC
-
-For large models, the command can be used in multi-node setting. For example, this can be done with `NeMo Framework Launcher `_ using Slurm.
-
-
-Limitations
-^^^^^^^^^^^
-* Only Megatron Core-based GPT models are supported
-* Only logit-pair distillation is supported for now
-* Pipeline parallelism not yet supported
-* FSDP strategy not yet supported
diff --git a/docs/source/nlp/nemo_megatron/model_distillation/drop_layers.rst b/docs/source/nlp/nemo_megatron/model_distillation/drop_layers.rst
deleted file mode 100644
index aa1d6a1df6fe..000000000000
--- a/docs/source/nlp/nemo_megatron/model_distillation/drop_layers.rst
+++ /dev/null
@@ -1,67 +0,0 @@
-.. _drop_layers:
-
-Drop Model Laeyrs
------------------
-
-To trim the model layers, use the following script:
-
-.. code-block:: bash
-
- python -m torch.distributed.launch --nproc_per_node= * \
- /NeMo/examples/nlp/language_modeling/megatron_gpt_drop_layers.py \
- --path_to_nemo /path/to/model.nemo \
- --path_to_save /path/to/save/trimmed_model.nemo \
- --tensor_model_parallel_size \
- --pipeline_model_parallel_size \
- --gpus_per_node \
- --drop_layers 1 2 3 4
-
-**Note:** layer indices start from 1.
-
-To save trimmed model in ``zarr`` checkpoint format, add the following flag to the command above:
-
-.. code-block:: bash
-
- --zarr
-
-**Note:** the ``zarr`` checkpoint format is deprecated.
-
-Validate Trimmed Model
-----------------------
-
-To validate the trimmed model, use the following script:
-
-.. code-block:: bash
-
- python /NeMo/examples/nlp/language_modeling/megatron_gpt_pretraining.py \
- --config-path=/path/to/folder/with/model/config \
- --config-name=model_config.yaml \
- trainer.limit_val_batches= \
- model.restore_from_path=/path/to/trimmed_model.nemo \
- model.skip_train=True \
- model.data.data_impl=mock \
- model.data.data_prefix=[]
-
-To use a specific dataset instead of a mock dataset, modify the ``model.data`` parameters as follows:
-
-.. code-block:: bash
-
- model.data.data_impl=mmap \
- model.data.data_prefix=["path/to/datafile1", "path/to/datafile2"]
-
-Validate Original Model
------------------------
-
-To validate the original model without specific layers, use the following script:
-
-.. code-block:: bash
-
- python /NeMo/examples/nlp/language_modeling/megatron_gpt_pretraining.py \
- --config-path=/path/to/folder/with/model/config \
- --config-name=model_config.yaml \
- trainer.limit_val_batches= \
- model.restore_from_path=/path/to/original_model.nemo \
- model.skip_train=True \
- model.data.data_impl=mock \
- model.data.data_prefix=[] \
- model.drop_layers=[1,2,3,4]
diff --git a/docs/source/nlp/punctuation_and_capitalization.rst b/docs/source/nlp/punctuation_and_capitalization.rst
index 4be0d2151d8e..d67332eb00c1 100755
--- a/docs/source/nlp/punctuation_and_capitalization.rst
+++ b/docs/source/nlp/punctuation_and_capitalization.rst
@@ -240,7 +240,7 @@ An example of a config file is
- trainer config
-
- Parameters of
- `pytorch_lightning.Trainer `_.
+ `lightning.pytorch.Trainer `_.
* - **exp_manager**
- exp manager config
-
diff --git a/docs/source/nlp/text_normalization/nn_text_normalization.rst b/docs/source/nlp/text_normalization/nn_text_normalization.rst
index 87530dbcbc29..d4c172a4fab0 100644
--- a/docs/source/nlp/text_normalization/nn_text_normalization.rst
+++ b/docs/source/nlp/text_normalization/nn_text_normalization.rst
@@ -87,7 +87,7 @@ Data upsampling
---------------
Data upsampling is an effective way to increase the training data for better model performance, especially on the long tail of semiotic tokens.
-We used upsampling for training an English text normalization model, see `data/en/upsampling.py `__.
+We used upsampling for training an English text normalization model, see `data/en/upsampling.py `__.
Currently this script only upsamples a few classes, that are diverse in semiotic tokens but at the same time underrepresented in the training data.
Of all the input files in `train` folder created by `data/data_split.py `__. this script takes the first file and detects the class patterns that occur in it.
For those that are underrepresented, quantitatively defined as lower than `min_number`, the other files are scanned for sentences that have the missing patterns.
diff --git a/docs/source/nlp/text_normalization/wfst/wfst_customization.rst b/docs/source/nlp/text_normalization/wfst/wfst_customization.rst
index a199c1fb09d0..4af157489480 100644
--- a/docs/source/nlp/text_normalization/wfst/wfst_customization.rst
+++ b/docs/source/nlp/text_normalization/wfst/wfst_customization.rst
@@ -38,5 +38,5 @@ WFST TN/ITN resources could be found in :doc:`here `.
Riva resources
--------------
- - `Riva Text Normalization customization for TTS `_.
- - `Riva ASR/Inverse Text Normalization customization `_.
\ No newline at end of file
+ - `Riva Text Normalization customization for TTS `_.
+ - `Riva ASR/Inverse Text Normalization customization `_.
\ No newline at end of file
diff --git a/docs/source/starthere/fundamentals.rst b/docs/source/starthere/fundamentals.rst
index e3014e0f5a03..f486bf3d6e49 100644
--- a/docs/source/starthere/fundamentals.rst
+++ b/docs/source/starthere/fundamentals.rst
@@ -116,7 +116,7 @@ Below is an example training script for our ``ExampleEncDecModel`` model. We hig
:linenos:
:emphasize-lines: 10, 11, 12
- import pytorch_lightning as pl
+ import lightning.pytorch as pl
from nemo.collections.path_to_model_class import ExampleEncDecModel
from nemo.core.config import hydra_runner
diff --git a/docs/source/starthere/intro.rst b/docs/source/starthere/intro.rst
index c3c3304082dc..41c10dd5c6ea 100644
--- a/docs/source/starthere/intro.rst
+++ b/docs/source/starthere/intro.rst
@@ -32,95 +32,8 @@ Before using NeMo, make sure you meet the following prerequisites:
Installation
------------
-**Using NVIDIA PyTorch Container**
+Refer to the NeMo Framework `User Guide `__ for the latest installation instructions.
-To leverage all optimizations for LLM training, including 3D Model Parallel, fused kernels, FP8, and more, we recommend using the NVIDIA PyTorch container.
-
-.. code-block:: bash
-
- docker pull nvcr.io/nvidia/pytorch:24.01-py3
- docker run --gpus all -it nvcr.io/nvidia/pytorch:24.01-py3
-
-Within the container, you can install NeMo and its dependencies as follows:
-
-NeMo Installation
-
-.. code-block:: bash
-
- apt-get update && apt-get install -y libsndfile1 ffmpeg
- pip install Cython
- pip install nemo_toolkit['all']
-
-Transformer Engine Installation
-
-This step involves cloning the Transformer Engine repository, checking out a specific commit, and installing it with specific flags.
-
-.. code-block:: bash
-
- git clone https://github.com/NVIDIA/TransformerEngine.git && \
- cd TransformerEngine && \
- git fetch origin 8c9abbb80dba196f086b8b602a7cf1bce0040a6a && \
- git checkout FETCH_HEAD && \
- git submodule init && git submodule update && \
- NVTE_FRAMEWORK=pytorch NVTE_WITH_USERBUFFERS=1 MPI_HOME=/usr/local/mpi pip install .
-
-Apex Installation
-
-This step includes a bug fix for Apex in the PyTorch 23.11 container.
-
-.. code-block:: bash
-
- git clone https://github.com/NVIDIA/apex.git && \
- cd apex && \
- git checkout c07a4cf67102b9cd3f97d1ba36690f985bae4227 && \
- cp -R apex /usr/local/lib/python3.10/dist-packages
-
-PyTorch Lightning Installation
-
-This step involves installing a bug-fixed version of PyTorch Lightning from a specific branch.
-
-.. code-block:: bash
-
- git clone -b bug_fix https://github.com/athitten/pytorch-lightning.git && \
- cd pytorch-lightning && \
- PACKAGE_NAME=pytorch pip install -e .
-
-Megatron Core Installation
-
-This section details the steps to clone and install the Megatron Core.
-
-.. code-block:: bash
-
- git clone https://github.com/NVIDIA/Megatron-LM.git && \
- cd Megatron-LM && \
- git checkout a5415fcfacef2a37416259bd38b7c4b673583675 && \
- pip install .
-
-TensorRT Model Optimizer Installation
-
-This final step involves installing the TensorRT Model Optimizer package.
-
-.. code-block:: bash
-
- pip install nvidia-modelopt[torch]~=0.17.0 --extra-index-url https://pypi.nvidia.com
-
-
-.. code-block:: bash
-
- apt-get update && apt-get install -y libsndfile1 ffmpeg
- pip install Cython
- pip install nemo_toolkit['all']
-
-**Conda Installation**
-
-If you do not use the NVIDIA PyTorch container, we recommend installing NeMo in a clean Conda environment.
-
-.. code-block:: bash
-
- conda create --name nemo python==3.10.12
- conda activate nemo
-
-Refer to the PyTorch configurator for instructions on installing PyTorch. `configurator `_
Quick Start Guide
-----------------
diff --git a/docs/source/tools/speech_data_explorer.rst b/docs/source/tools/speech_data_explorer.rst
index a57cb442f468..ac13f3936746 100644
--- a/docs/source/tools/speech_data_explorer.rst
+++ b/docs/source/tools/speech_data_explorer.rst
@@ -18,16 +18,6 @@ Speech Data Explorer (SDE) is a `Dash `__-based web ap
| estimation of audio signal parameters [peak level, frequency bandwidth] |
+--------------------------------------------------------------------------------------------------------------------------+
-SDE Demo Instance
------------------
-
-To demonstrate both the :doc:`CTC-Segmentation <./ctc_segmentation>` and Speech Data Explorer tools, we re-segmenting the development set as of `the LibriSpeech corpus `_.
-We concatenated all audio files from the dev-clean split into a single file and set up the CTC-Segmentation tool to cut the long audio file into original utterances.
-We used the CTC-based `QuartzNet15x5Base-En ASR model `_.
-The segmented corpus has 3.82% WER and contains 300 out of the initial 323 minutes of audio.
-The remaining 23 minutes are the silence at the beginning and end of the audio removed during the segmentation.
-A `running instance of the SDE `_ demonstrates the re-segmented corpus.
-
Getting Started
---------------
SDE could be found in `NeMo/tools/speech_data_explorer `__.
diff --git a/docs/source/tts/datasets.rst b/docs/source/tts/datasets.rst
index 7efe116dcccc..e37c2176c41a 100644
--- a/docs/source/tts/datasets.rst
+++ b/docs/source/tts/datasets.rst
@@ -141,7 +141,7 @@ There are two German neutral datasets released by Thorsten MΓΌller for now, 21.0
HUI Audio Corpus German
~~~~~~~~~~~~~~~~~~~~~~~
-* Dataset URL: https://opendata.iisys.de/datasets.html
+* Dataset URL: https://github.com/iisys-hof/HUI-Audio-Corpus-German
* Dataset Processing Script: https://github.com/NVIDIA/NeMo/tree/stable/scripts/dataset_processing/tts/hui_acg/get_data.py
* Command Line Instruction:
diff --git a/docs/source/vision/configs.rst b/docs/source/vision/configs.rst
index 92b7e5b45d12..1064f9569c3d 100644
--- a/docs/source/vision/configs.rst
+++ b/docs/source/vision/configs.rst
@@ -4,7 +4,7 @@ This section provides a detailed overview of the NeMo configuration file setup s
Within the configuration files of the NeMo vision models, details concerning dataset(s), augmentation, optimization parameters, and model architectural specifications are central. This page explores each of these aspects.
-Discover exemplary configuration files for all NeMo vision models scripts in the `config directory of the examples `_.
+Discover exemplary configuration files for all NeMo vision models scripts in the `config directory of the examples `__.
Dataset Configuration
=====================
@@ -12,7 +12,6 @@ Dataset Configuration
The configuration file delineates parameters for dataset path.
All initialization parameters supported by the Dataset class utilized in the experiment can be defined in the config file.
-.. For a comprehensive list of Datasets and their associated parameters, consult the `Datasets <./api.html#Datasets>`__ section of the API.
A representative training configuration appears as:
diff --git a/examples/asr/asr_adapters/eval_asr_adapter.py b/examples/asr/asr_adapters/eval_asr_adapter.py
index bc5947f26aaf..b35cf33a6c0e 100644
--- a/examples/asr/asr_adapters/eval_asr_adapter.py
+++ b/examples/asr/asr_adapters/eval_asr_adapter.py
@@ -36,7 +36,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf, open_dict
from nemo.collections.asr.models import ASRModel
diff --git a/examples/asr/asr_adapters/train_asr_adapter.py b/examples/asr/asr_adapters/train_asr_adapter.py
index 3f82ef8fe554..253672e3eb89 100644
--- a/examples/asr/asr_adapters/train_asr_adapter.py
+++ b/examples/asr/asr_adapters/train_asr_adapter.py
@@ -84,7 +84,7 @@
import os
from dataclasses import is_dataclass
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf, open_dict
from nemo.collections.asr.models import ASRModel
diff --git a/examples/asr/asr_chunked_inference/aed/speech_to_text_aed_chunked_infer.py b/examples/asr/asr_chunked_inference/aed/speech_to_text_aed_chunked_infer.py
index 0417522885b9..1e63a9d820be 100644
--- a/examples/asr/asr_chunked_inference/aed/speech_to_text_aed_chunked_infer.py
+++ b/examples/asr/asr_chunked_inference/aed/speech_to_text_aed_chunked_infer.py
@@ -13,11 +13,13 @@
# limitations under the License.
"""
-This script chunks long audios into non-overlapping segments of `chunk_len_in_secs` seconds and performs inference on each
+This script chunks long audios into non-overlapping segments of `chunk_len_in_secs`
+seconds and performs inference on each
segment individually. The results are then concatenated to form the final output.
Below is an example of how to run this script with the Canary-1b model.
-It's recommended to use manifest input, otherwise the model will perform English ASR with punctuations and capitalizations.
+It's recommended to use manifest input, otherwise the model will perform English ASR
+with punctuations and capitalizations.
An example manifest line:
{
"audio_filepath": "/path/to/audio.wav", # path to the audio file
@@ -41,14 +43,13 @@
"""
-import contextlib
import copy
import glob
import os
-from dataclasses import dataclass, is_dataclass
+from dataclasses import dataclass
from typing import Optional
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf
@@ -67,6 +68,10 @@
@dataclass
class TranscriptionConfig:
+ """
+ Transcription config
+ """
+
# Required configs
model_path: Optional[str] = None # Path to a .nemo file
pretrained_name: Optional[str] = None # Name of a pretrained model
@@ -116,6 +121,10 @@ class TranscriptionConfig:
@hydra_runner(config_name="TranscriptionConfig", schema=TranscriptionConfig)
def main(cfg: TranscriptionConfig) -> TranscriptionConfig:
+ """
+ Transcribes the input audio and can be used to infer long audio files by chunking
+ them into smaller segments.
+ """
logging.info(f'Hydra config: {OmegaConf.to_yaml(cfg)}')
torch.set_grad_enabled(False)
@@ -160,7 +169,8 @@ def main(cfg: TranscriptionConfig) -> TranscriptionConfig:
if model_cfg.preprocessor.normalize != "per_feature":
logging.error(
- "Only EncDecMultiTaskModel models trained with per_feature normalization are supported currently"
+ "Only EncDecMultiTaskModel models trained with per_feature normalization are supported \
+ currently"
)
# Disable config overwriting
@@ -206,7 +216,7 @@ def main(cfg: TranscriptionConfig) -> TranscriptionConfig:
)
output_filename, pred_text_attr_name = write_transcription(
- hyps, cfg, model_name, filepaths=filepaths, compute_langs=False, compute_timestamps=False
+ hyps, cfg, model_name, filepaths=filepaths, compute_langs=False, timestamps=False
)
logging.info(f"Finished writing predictions to {output_filename}!")
diff --git a/examples/asr/asr_chunked_inference/ctc/speech_to_text_buffered_infer_ctc.py b/examples/asr/asr_chunked_inference/ctc/speech_to_text_buffered_infer_ctc.py
index 77b97e0ab516..ccea94f41f83 100644
--- a/examples/asr/asr_chunked_inference/ctc/speech_to_text_buffered_infer_ctc.py
+++ b/examples/asr/asr_chunked_inference/ctc/speech_to_text_buffered_infer_ctc.py
@@ -35,15 +35,14 @@
You can use `DEBUG=1 python speech_to_text_buffered_infer_ctc.py ...` to print out the
predictions of the model, and ground-truth text if presents in manifest.
"""
-import contextlib
import copy
import glob
import math
import os
-from dataclasses import dataclass, is_dataclass
+from dataclasses import dataclass
from typing import Optional
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf
@@ -65,6 +64,10 @@
@dataclass
class TranscriptionConfig:
+ """
+ Transcription Configuration for buffered inference.
+ """
+
# Required configs
model_path: Optional[str] = None # Path to a .nemo file
pretrained_name: Optional[str] = None # Name of a pretrained model
@@ -114,6 +117,10 @@ class TranscriptionConfig:
@hydra_runner(config_name="TranscriptionConfig", schema=TranscriptionConfig)
def main(cfg: TranscriptionConfig) -> TranscriptionConfig:
+ """
+ Transcribes the input audio and can be used to infer long audio files by chunking
+ them into smaller segments.
+ """
logging.info(f'Hydra config: {OmegaConf.to_yaml(cfg)}')
torch.set_grad_enabled(False)
@@ -221,7 +228,7 @@ def main(cfg: TranscriptionConfig) -> TranscriptionConfig:
filepaths,
)
output_filename, pred_text_attr_name = write_transcription(
- hyps, cfg, model_name, filepaths=filepaths, compute_langs=False, compute_timestamps=False
+ hyps, cfg, model_name, filepaths=filepaths, compute_langs=False, timestamps=False
)
logging.info(f"Finished writing predictions to {output_filename}!")
diff --git a/examples/asr/asr_chunked_inference/rnnt/speech_to_text_buffered_infer_rnnt.py b/examples/asr/asr_chunked_inference/rnnt/speech_to_text_buffered_infer_rnnt.py
index 501ca525c1ed..c31fa2b9d812 100644
--- a/examples/asr/asr_chunked_inference/rnnt/speech_to_text_buffered_infer_rnnt.py
+++ b/examples/asr/asr_chunked_inference/rnnt/speech_to_text_buffered_infer_rnnt.py
@@ -61,10 +61,10 @@
import glob
import math
import os
-from dataclasses import dataclass, is_dataclass
+from dataclasses import dataclass
from typing import Optional
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf, open_dict
@@ -87,6 +87,10 @@
@dataclass
class TranscriptionConfig:
+ """
+ Transcription Configuration for buffered inference.
+ """
+
# Required configs
model_path: Optional[str] = None # Path to a .nemo file
pretrained_name: Optional[str] = None # Name of a pretrained model
@@ -143,6 +147,10 @@ class TranscriptionConfig:
@hydra_runner(config_name="TranscriptionConfig", schema=TranscriptionConfig)
def main(cfg: TranscriptionConfig) -> TranscriptionConfig:
+ """
+ Transcribes the input audio and can be used to infer long audio files by chunking
+ them into smaller segments.
+ """
logging.info(f'Hydra config: {OmegaConf.to_yaml(cfg)}')
torch.set_grad_enabled(False)
@@ -274,7 +282,7 @@ def main(cfg: TranscriptionConfig) -> TranscriptionConfig:
)
output_filename, pred_text_attr_name = write_transcription(
- hyps, cfg, model_name, filepaths=filepaths, compute_langs=False, compute_timestamps=False
+ hyps, cfg, model_name, filepaths=filepaths, compute_langs=False, timestamps=False
)
logging.info(f"Finished writing predictions to {output_filename}!")
diff --git a/examples/asr/asr_ctc/speech_to_text_ctc.py b/examples/asr/asr_ctc/speech_to_text_ctc.py
index 87b1b11633f7..ccdf3a5e09ea 100644
--- a/examples/asr/asr_ctc/speech_to_text_ctc.py
+++ b/examples/asr/asr_ctc/speech_to_text_ctc.py
@@ -68,7 +68,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models import EncDecCTCModel
diff --git a/examples/asr/asr_ctc/speech_to_text_ctc_bpe.py b/examples/asr/asr_ctc/speech_to_text_ctc_bpe.py
index b4e3be5f650a..997cd6e52d5b 100644
--- a/examples/asr/asr_ctc/speech_to_text_ctc_bpe.py
+++ b/examples/asr/asr_ctc/speech_to_text_ctc_bpe.py
@@ -64,7 +64,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models.ctc_bpe_models import EncDecCTCModelBPE
diff --git a/examples/asr/asr_hybrid_transducer_ctc/speech_to_text_hybrid_rnnt_ctc_bpe.py b/examples/asr/asr_hybrid_transducer_ctc/speech_to_text_hybrid_rnnt_ctc_bpe.py
index 796005a8fcee..ffda4c554a49 100644
--- a/examples/asr/asr_hybrid_transducer_ctc/speech_to_text_hybrid_rnnt_ctc_bpe.py
+++ b/examples/asr/asr_hybrid_transducer_ctc/speech_to_text_hybrid_rnnt_ctc_bpe.py
@@ -58,7 +58,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models import EncDecHybridRNNTCTCBPEModel
diff --git a/examples/asr/asr_hybrid_transducer_ctc/speech_to_text_hybrid_rnnt_ctc_char.py b/examples/asr/asr_hybrid_transducer_ctc/speech_to_text_hybrid_rnnt_ctc_char.py
index 423e005d8f02..02f43f93e2c7 100644
--- a/examples/asr/asr_hybrid_transducer_ctc/speech_to_text_hybrid_rnnt_ctc_char.py
+++ b/examples/asr/asr_hybrid_transducer_ctc/speech_to_text_hybrid_rnnt_ctc_char.py
@@ -69,7 +69,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models import EncDecHybridRNNTCTCModel
diff --git a/examples/asr/asr_transducer/speech_to_text_rnnt.py b/examples/asr/asr_transducer/speech_to_text_rnnt.py
index 5b4f1e8a985d..2fab3ac137e6 100644
--- a/examples/asr/asr_transducer/speech_to_text_rnnt.py
+++ b/examples/asr/asr_transducer/speech_to_text_rnnt.py
@@ -67,7 +67,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models import EncDecRNNTModel
diff --git a/examples/asr/asr_transducer/speech_to_text_rnnt_bpe.py b/examples/asr/asr_transducer/speech_to_text_rnnt_bpe.py
index 1fffea55686f..d18313acc9a6 100644
--- a/examples/asr/asr_transducer/speech_to_text_rnnt_bpe.py
+++ b/examples/asr/asr_transducer/speech_to_text_rnnt_bpe.py
@@ -59,7 +59,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models import EncDecRNNTBPEModel
diff --git a/examples/asr/asr_with_tts/speech_to_text_bpe_with_text.py b/examples/asr/asr_with_tts/speech_to_text_bpe_with_text.py
index b435d418fda2..acd7a8632822 100644
--- a/examples/asr/asr_with_tts/speech_to_text_bpe_with_text.py
+++ b/examples/asr/asr_with_tts/speech_to_text_bpe_with_text.py
@@ -49,7 +49,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models.hybrid_asr_tts_models import ASRWithTTSModel
diff --git a/examples/asr/asr_with_tts/speech_to_text_bpe_with_text_finetune.py b/examples/asr/asr_with_tts/speech_to_text_bpe_with_text_finetune.py
index 99bc41ba966b..c1692cf6234f 100644
--- a/examples/asr/asr_with_tts/speech_to_text_bpe_with_text_finetune.py
+++ b/examples/asr/asr_with_tts/speech_to_text_bpe_with_text_finetune.py
@@ -45,7 +45,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models.hybrid_asr_tts_models import ASRWithTTSModel
diff --git a/examples/asr/conf/asr_adapters/asr_adaptation.yaml b/examples/asr/conf/asr_adapters/asr_adaptation.yaml
index b9a2a003217e..bae166d18782 100644
--- a/examples/asr/conf/asr_adapters/asr_adaptation.yaml
+++ b/examples/asr/conf/asr_adapters/asr_adaptation.yaml
@@ -182,7 +182,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: null
diff --git a/examples/asr/conf/asr_adapters/asr_adaptation_hp.yaml b/examples/asr/conf/asr_adapters/asr_adaptation_hp.yaml
index 958e6d23375c..d03b2eacfec4 100644
--- a/examples/asr/conf/asr_adapters/asr_adaptation_hp.yaml
+++ b/examples/asr/conf/asr_adapters/asr_adaptation_hp.yaml
@@ -182,7 +182,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: null
diff --git a/examples/asr/conf/asr_finetune/speech_to_text_finetune.yaml b/examples/asr/conf/asr_finetune/speech_to_text_finetune.yaml
index 3b5717efddf9..1ae64a341e16 100644
--- a/examples/asr/conf/asr_finetune/speech_to_text_finetune.yaml
+++ b/examples/asr/conf/asr_finetune/speech_to_text_finetune.yaml
@@ -81,7 +81,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 0.0
diff --git a/examples/asr/conf/asr_finetune/speech_to_text_hf_finetune.yaml b/examples/asr/conf/asr_finetune/speech_to_text_hf_finetune.yaml
index f111573f21eb..c044d3c8d7a8 100644
--- a/examples/asr/conf/asr_finetune/speech_to_text_hf_finetune.yaml
+++ b/examples/asr/conf/asr_finetune/speech_to_text_hf_finetune.yaml
@@ -145,7 +145,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 0.0
diff --git a/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_ctc_bpe_streaming.yaml b/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_ctc_bpe_streaming.yaml
index 4c80d2f2e9d4..564f4b176e64 100644
--- a/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_ctc_bpe_streaming.yaml
+++ b/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_ctc_bpe_streaming.yaml
@@ -172,7 +172,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 1.0
diff --git a/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_ctc_char_streaming.yaml b/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_ctc_char_streaming.yaml
index 0796a60260a1..6962c03ebe60 100644
--- a/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_ctc_char_streaming.yaml
+++ b/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_ctc_char_streaming.yaml
@@ -177,7 +177,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 1.0
diff --git a/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_transducer_bpe_streaming.yaml b/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_transducer_bpe_streaming.yaml
index 4edcc38396fa..1531bf380b6d 100644
--- a/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_transducer_bpe_streaming.yaml
+++ b/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_transducer_bpe_streaming.yaml
@@ -228,7 +228,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 1.0
diff --git a/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_transducer_char_streaming.yaml b/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_transducer_char_streaming.yaml
index 97b64ef93402..4cb508b0aff3 100644
--- a/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_transducer_char_streaming.yaml
+++ b/examples/asr/conf/fastconformer/cache_aware_streaming/fastconformer_transducer_char_streaming.yaml
@@ -234,7 +234,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 1.0
diff --git a/examples/asr/conf/fastconformer/fast-conformer_ctc_bpe.yaml b/examples/asr/conf/fastconformer/fast-conformer_ctc_bpe.yaml
index ea6094380856..fd5f34aa43cb 100644
--- a/examples/asr/conf/fastconformer/fast-conformer_ctc_bpe.yaml
+++ b/examples/asr/conf/fastconformer/fast-conformer_ctc_bpe.yaml
@@ -198,7 +198,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 0.0
diff --git a/examples/asr/conf/fastconformer/fast-conformer_transducer_bpe.yaml b/examples/asr/conf/fastconformer/fast-conformer_transducer_bpe.yaml
index 9e2c1a876864..deb7b7ca613a 100644
--- a/examples/asr/conf/fastconformer/fast-conformer_transducer_bpe.yaml
+++ b/examples/asr/conf/fastconformer/fast-conformer_transducer_bpe.yaml
@@ -251,7 +251,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 0.0
diff --git a/examples/asr/conf/fastconformer/hybrid_cache_aware_streaming/fastconformer_hybrid_transducer_ctc_bpe_streaming.yaml b/examples/asr/conf/fastconformer/hybrid_cache_aware_streaming/fastconformer_hybrid_transducer_ctc_bpe_streaming.yaml
index daef1ed67a9f..6d89a6a52dfb 100644
--- a/examples/asr/conf/fastconformer/hybrid_cache_aware_streaming/fastconformer_hybrid_transducer_ctc_bpe_streaming.yaml
+++ b/examples/asr/conf/fastconformer/hybrid_cache_aware_streaming/fastconformer_hybrid_transducer_ctc_bpe_streaming.yaml
@@ -245,7 +245,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 1.0
diff --git a/examples/asr/conf/fastconformer/hybrid_cache_aware_streaming/fastconformer_hybrid_transducer_ctc_char_streaming.yaml b/examples/asr/conf/fastconformer/hybrid_cache_aware_streaming/fastconformer_hybrid_transducer_ctc_char_streaming.yaml
index 96aee4af1803..7e6b9c4aa7b4 100644
--- a/examples/asr/conf/fastconformer/hybrid_cache_aware_streaming/fastconformer_hybrid_transducer_ctc_char_streaming.yaml
+++ b/examples/asr/conf/fastconformer/hybrid_cache_aware_streaming/fastconformer_hybrid_transducer_ctc_char_streaming.yaml
@@ -250,7 +250,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 1.0
diff --git a/examples/asr/conf/fastconformer/hybrid_transducer_ctc/fastconformer_hybrid_transducer_ctc_bpe.yaml b/examples/asr/conf/fastconformer/hybrid_transducer_ctc/fastconformer_hybrid_transducer_ctc_bpe.yaml
index 4ba55e368bb9..12a21c6fba6c 100644
--- a/examples/asr/conf/fastconformer/hybrid_transducer_ctc/fastconformer_hybrid_transducer_ctc_bpe.yaml
+++ b/examples/asr/conf/fastconformer/hybrid_transducer_ctc/fastconformer_hybrid_transducer_ctc_bpe.yaml
@@ -224,7 +224,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 1.0
diff --git a/examples/asr/conf/fastconformer/hybrid_transducer_ctc/fastconformer_hybrid_transducer_ctc_char.yaml b/examples/asr/conf/fastconformer/hybrid_transducer_ctc/fastconformer_hybrid_transducer_ctc_char.yaml
index ed2ad8ca9c0d..65f657b5416e 100644
--- a/examples/asr/conf/fastconformer/hybrid_transducer_ctc/fastconformer_hybrid_transducer_ctc_char.yaml
+++ b/examples/asr/conf/fastconformer/hybrid_transducer_ctc/fastconformer_hybrid_transducer_ctc_char.yaml
@@ -229,7 +229,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 1.0
diff --git a/examples/asr/conf/fastconformer/long_fastconformer/fast-conformer-long_ctc_bpe.yaml b/examples/asr/conf/fastconformer/long_fastconformer/fast-conformer-long_ctc_bpe.yaml
index 773a500ef2db..df511883ce80 100644
--- a/examples/asr/conf/fastconformer/long_fastconformer/fast-conformer-long_ctc_bpe.yaml
+++ b/examples/asr/conf/fastconformer/long_fastconformer/fast-conformer-long_ctc_bpe.yaml
@@ -169,7 +169,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 0.0
@@ -204,4 +204,4 @@ exp_manager:
create_wandb_logger: false
wandb_logger_kwargs:
name: null
- project: null
\ No newline at end of file
+ project: null
diff --git a/examples/asr/conf/fastconformer/long_fastconformer/fast-conformer-long_transducer_bpe.yaml b/examples/asr/conf/fastconformer/long_fastconformer/fast-conformer-long_transducer_bpe.yaml
index fec2a2839efa..0218136cbdbd 100644
--- a/examples/asr/conf/fastconformer/long_fastconformer/fast-conformer-long_transducer_bpe.yaml
+++ b/examples/asr/conf/fastconformer/long_fastconformer/fast-conformer-long_transducer_bpe.yaml
@@ -223,7 +223,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 0.0
diff --git a/examples/asr/conf/speech_multitask/fast-conformer_aed.yaml b/examples/asr/conf/speech_multitask/fast-conformer_aed.yaml
index 3d1a8c8bdf47..50446dfd9467 100644
--- a/examples/asr/conf/speech_multitask/fast-conformer_aed.yaml
+++ b/examples/asr/conf/speech_multitask/fast-conformer_aed.yaml
@@ -249,7 +249,7 @@ trainer:
val_check_interval: 1.0 # Set to 0.25 to check 4 times per epoch, or an int for number of iterations
accelerator: auto
strategy:
- _target_: pytorch_lightning.strategies.DDPStrategy
+ _target_: lightning.pytorch.strategies.DDPStrategy
gradient_as_bucket_view: true
accumulate_grad_batches: 1
gradient_clip_val: 0.0
diff --git a/examples/asr/experimental/k2/align_speech_parallel.py b/examples/asr/experimental/k2/align_speech_parallel.py
index abfffa0cdfdb..cf07fb998e95 100644
--- a/examples/asr/experimental/k2/align_speech_parallel.py
+++ b/examples/asr/experimental/k2/align_speech_parallel.py
@@ -77,7 +77,7 @@
from dataclasses import dataclass, field, is_dataclass
from typing import Optional
-import pytorch_lightning as ptl
+import lightning.pytorch as ptl
import torch
from omegaconf import MISSING, OmegaConf
diff --git a/examples/asr/experimental/k2/speech_to_text_bpe.py b/examples/asr/experimental/k2/speech_to_text_bpe.py
index ee3924c7b8ac..8a941200770f 100644
--- a/examples/asr/experimental/k2/speech_to_text_bpe.py
+++ b/examples/asr/experimental/k2/speech_to_text_bpe.py
@@ -74,7 +74,7 @@
model.graph_module_cfg.background_cfg.intersect_pruned=False \
model.graph_module_cfg.background_cfg.boost_coeff=0.0
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models.configs.k2_sequence_models_config import EncDecK2SeqModelConfig
diff --git a/examples/asr/experimental/k2/speech_to_text_rnnt_bpe.py b/examples/asr/experimental/k2/speech_to_text_rnnt_bpe.py
index a0031fba082d..973be0cbd477 100644
--- a/examples/asr/experimental/k2/speech_to_text_rnnt_bpe.py
+++ b/examples/asr/experimental/k2/speech_to_text_rnnt_bpe.py
@@ -63,7 +63,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models import EncDecK2RnntSeqModelBPE
diff --git a/examples/asr/experimental/structured/speech_to_text_hybrid.py b/examples/asr/experimental/structured/speech_to_text_hybrid.py
index 26530631498f..e6126c47305f 100644
--- a/examples/asr/experimental/structured/speech_to_text_hybrid.py
+++ b/examples/asr/experimental/structured/speech_to_text_hybrid.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.asr.models import EncDecCTCModel, configs
from nemo.core.config import hydra_runner
diff --git a/examples/asr/experimental/structured/speech_to_text_structured.py b/examples/asr/experimental/structured/speech_to_text_structured.py
index 366c6d831a7d..55934c00322e 100644
--- a/examples/asr/experimental/structured/speech_to_text_structured.py
+++ b/examples/asr/experimental/structured/speech_to_text_structured.py
@@ -14,7 +14,7 @@
from dataclasses import asdict
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import nemo.collections.asr as nemo_asr
from nemo.collections.asr.models import EncDecCTCModel, configs
@@ -64,7 +64,13 @@
),
# ... repeat 14 more times
nemo_asr.modules.conv_asr.JasperEncoderConfig(
- filters=1024, repeat=1, kernel=[1], stride=[1], dilation=[1], dropout=cfg.model.dropout, residual=False,
+ filters=1024,
+ repeat=1,
+ kernel=[1],
+ stride=[1],
+ dilation=[1],
+ dropout=cfg.model.dropout,
+ residual=False,
),
]
diff --git a/examples/asr/experimental/structured/speech_to_text_structured_v2.py b/examples/asr/experimental/structured/speech_to_text_structured_v2.py
index e8a865a9877a..146da425fb9b 100644
--- a/examples/asr/experimental/structured/speech_to_text_structured_v2.py
+++ b/examples/asr/experimental/structured/speech_to_text_structured_v2.py
@@ -14,7 +14,7 @@
from dataclasses import asdict
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.asr.models import EncDecCTCModel, configs
from nemo.core.config import modelPT, optimizers, schedulers
diff --git a/examples/asr/speech_classification/speech_to_frame_label.py b/examples/asr/speech_classification/speech_to_frame_label.py
index 04fcbdd1b61c..39a8e4415de5 100644
--- a/examples/asr/speech_classification/speech_to_frame_label.py
+++ b/examples/asr/speech_classification/speech_to_frame_label.py
@@ -39,7 +39,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models.classification_models import EncDecFrameClassificationModel
diff --git a/examples/asr/speech_classification/speech_to_label.py b/examples/asr/speech_classification/speech_to_label.py
index b3deb5a4e7e5..810d2b5e7bdf 100644
--- a/examples/asr/speech_classification/speech_to_label.py
+++ b/examples/asr/speech_classification/speech_to_label.py
@@ -143,7 +143,7 @@
https://docs.nvidia.com/deeplearning/nemo/user-guide/docs/en/main/asr/speech_classification/results.html#
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf
diff --git a/examples/asr/speech_multitask/speech_to_text_aed.py b/examples/asr/speech_multitask/speech_to_text_aed.py
index 0c13e5289d86..943ecee59bfc 100644
--- a/examples/asr/speech_multitask/speech_to_text_aed.py
+++ b/examples/asr/speech_multitask/speech_to_text_aed.py
@@ -50,7 +50,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models import EncDecMultiTaskModel
diff --git a/examples/asr/speech_pretraining/downstream/speech_classification_mfa_train.py b/examples/asr/speech_pretraining/downstream/speech_classification_mfa_train.py
index 3a256c7ab2d3..8bd56aa63450 100644
--- a/examples/asr/speech_pretraining/downstream/speech_classification_mfa_train.py
+++ b/examples/asr/speech_pretraining/downstream/speech_classification_mfa_train.py
@@ -14,7 +14,7 @@
from collections import OrderedDict
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf
diff --git a/examples/asr/speech_pretraining/masked_token_pred_pretrain.py b/examples/asr/speech_pretraining/masked_token_pred_pretrain.py
index 1ea88d696643..e1c740e66412 100644
--- a/examples/asr/speech_pretraining/masked_token_pred_pretrain.py
+++ b/examples/asr/speech_pretraining/masked_token_pred_pretrain.py
@@ -13,7 +13,7 @@
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models.ssl_models import EncDecDenoiseMaskedTokenPredModel
diff --git a/examples/asr/speech_pretraining/speech_pre_training.py b/examples/asr/speech_pretraining/speech_pre_training.py
index cec9444096c3..0c94099442a6 100644
--- a/examples/asr/speech_pretraining/speech_pre_training.py
+++ b/examples/asr/speech_pretraining/speech_pre_training.py
@@ -13,7 +13,7 @@
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models.ssl_models import SpeechEncDecSelfSupervisedModel
diff --git a/examples/asr/speech_to_text_finetune.py b/examples/asr/speech_to_text_finetune.py
index 36a7bdc3bbdc..6b53446622ee 100644
--- a/examples/asr/speech_to_text_finetune.py
+++ b/examples/asr/speech_to_text_finetune.py
@@ -54,7 +54,7 @@
https://docs.nvidia.com/deeplearning/nemo/user-guide/docs/en/main/asr/configs.html#fine-tuning-configurations
"""
import time
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models import ASRModel
diff --git a/examples/asr/speech_translation/speech_to_text_transformer.py b/examples/asr/speech_translation/speech_to_text_transformer.py
index ac4dc4334164..bb7e0b3e4461 100644
--- a/examples/asr/speech_translation/speech_to_text_transformer.py
+++ b/examples/asr/speech_translation/speech_to_text_transformer.py
@@ -40,7 +40,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.asr.models import EncDecTransfModelBPE
diff --git a/examples/asr/speech_translation/translate_speech.py b/examples/asr/speech_translation/translate_speech.py
index 47717f562774..76c8c096527f 100644
--- a/examples/asr/speech_translation/translate_speech.py
+++ b/examples/asr/speech_translation/translate_speech.py
@@ -12,13 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import contextlib
import json
import os
from dataclasses import dataclass, is_dataclass
from typing import List, Optional, Union
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf
@@ -65,13 +64,19 @@
@dataclass
class ModelChangeConfig:
+ """
+ Sub-config for changes specific to the Conformer Encoder
+ """
- # Sub-config for changes specific to the Conformer Encoder
conformer: ConformerChangeConfig = ConformerChangeConfig()
@dataclass
class TranslationConfig:
+ """
+ Translation Configuration for audio to text translation.
+ """
+
# Required configs
model_path: Optional[str] = None # Path to a .nemo file
pretrained_name: Optional[str] = None # Name of a pretrained model
@@ -106,6 +111,9 @@ class TranslationConfig:
@hydra_runner(config_name="TranslationConfig", schema=TranslationConfig)
def main(cfg: TranslationConfig) -> Union[TranslationConfig, List[str]]:
+ """
+ Main function to translate audio to text using a pretrained/finetuned model.
+ """
logging.info(f'Hydra config: {OmegaConf.to_yaml(cfg)}')
for key in cfg:
diff --git a/examples/asr/transcribe_speech.py b/examples/asr/transcribe_speech.py
index e529c988779a..5c4a636e8b1c 100644
--- a/examples/asr/transcribe_speech.py
+++ b/examples/asr/transcribe_speech.py
@@ -12,14 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import contextlib
import json
import os
import time
from dataclasses import dataclass, field, is_dataclass
from typing import List, Optional, Union
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf, open_dict
@@ -48,14 +47,9 @@
model_path: path to .nemo ASR checkpoint
pretrained_name: name of pretrained ASR model (from NGC registry)
audio_dir: path to directory with audio files
- dataset_manifest: path to dataset JSON manifest file (in NeMo format)
-
- compute_timestamps: Bool to request greedy time stamp information (if the model supports it)
+ dataset_manifest: path to dataset JSON manifest file (in NeMo formats
compute_langs: Bool to request language ID information (if the model supports it)
-
- (Optionally: You can limit the type of timestamp computations using below overrides)
- ctc_decoding.ctc_timestamp_type="all" # (default all, can be [all, char, word])
- rnnt_decoding.rnnt_timestamp_type="all" # (default all, can be [all, char, word])
+ timestamps: Bool to request greedy time stamp information (if the model supports it) by default None
(Optionally: You can limit the type of timestamp computations using below overrides)
ctc_decoding.ctc_timestamp_type="all" # (default all, can be [all, char, word, segment])
@@ -98,7 +92,7 @@
clean_groundtruth_text=True \
langid='en' \
batch_size=32 \
- compute_timestamps=False \
+ timestamps=False \
compute_langs=False \
cuda=0 \
amp=True \
@@ -109,13 +103,19 @@
@dataclass
class ModelChangeConfig:
+ """
+ Sub-config for changes specific to the Conformer Encoder
+ """
- # Sub-config for changes specific to the Conformer Encoder
conformer: ConformerChangeConfig = field(default_factory=ConformerChangeConfig)
@dataclass
class TranscriptionConfig:
+ """
+ Transcription Configuration for audio to text transcription.
+ """
+
# Required configs
model_path: Optional[str] = None # Path to a .nemo file
pretrained_name: Optional[str] = None # Name of a pretrained model
@@ -136,10 +136,11 @@ class TranscriptionConfig:
pred_name_postfix: Optional[str] = None # If you need to use another model name, rather than standard one.
random_seed: Optional[int] = None # seed number going to be used in seed_everything()
- # Set to True to output greedy timestamp information (only supported models)
- compute_timestamps: bool = False
- # set to True if need to return full alignment information
- preserve_alignment: bool = False
+ # Set to True to output greedy timestamp information (only supported models) and returns full alignment hypotheses
+ timestamps: Optional[bool] = None
+
+ # Set to True to return hypotheses instead of text from the transcribe function
+ return_hypotheses: bool = False
# Set to True to output language ID information
compute_langs: bool = False
@@ -171,7 +172,8 @@ class TranscriptionConfig:
# Implicit single-turn assuming default role='user' (works with Canary-1B)
# +prompt.source_lang=en +prompt.target_lang=es +prompt.task=asr +prompt.pnc=yes
# Explicit single-turn prompt:
- # +prompt.role=user +prompt.slots.source_lang=en +prompt.slots.target_lang=es +prompt.slots.task=s2t_translation +prompt.slots.pnc=yes
+ # +prompt.role=user +prompt.slots.source_lang=en +prompt.slots.target_lang=es
+ # +prompt.slots.task=s2t_translation +prompt.slots.pnc=yes
# Explicit multi-turn prompt:
# +prompt.turns='[{role:user,slots:{source_lang:en,target_lang:es,task:asr,pnc:yes}}]'
prompt: dict = field(default_factory=dict)
@@ -194,9 +196,6 @@ class TranscriptionConfig:
# if True, will also skip writing anything to the output file
return_transcriptions: bool = False
- # Set to False to return text instead of hypotheses from the transcribe function, so as to save memory
- return_hypotheses: bool = True
-
# key for groundtruth text in manifest
gt_text_attr_name: str = "text"
gt_lang_attr_name: str = "lang"
@@ -208,6 +207,9 @@ class TranscriptionConfig:
@hydra_runner(config_name="TranscriptionConfig", schema=TranscriptionConfig)
def main(cfg: TranscriptionConfig) -> Union[TranscriptionConfig, List[Hypothesis]]:
+ """
+ Transcribes the input audio and can be used to infer with Encoder-Decoder models.
+ """
logging.info(f'Hydra config: {OmegaConf.to_yaml(cfg)}')
for key in cfg:
@@ -272,10 +274,10 @@ def main(cfg: TranscriptionConfig) -> Union[TranscriptionConfig, List[Hypothesis
asr_model.to(getattr(torch, cfg.compute_dtype))
# we will adjust this flag if the model does not support it
- compute_timestamps = cfg.compute_timestamps
compute_langs = cfg.compute_langs
- # has to be True if timestamps are required
- preserve_alignment = True if cfg.compute_timestamps else cfg.preserve_alignment
+
+ if cfg.timestamps:
+ cfg.return_hypotheses = True
# Check whether model and decoder type match
if isinstance(asr_model, EncDecCTCModel):
@@ -295,7 +297,6 @@ def main(cfg: TranscriptionConfig) -> Union[TranscriptionConfig, List[Hypothesis
if hasattr(asr_model, 'change_decoding_strategy') and hasattr(asr_model, 'decoding'):
if isinstance(asr_model.decoding, MultiTaskDecoding):
cfg.multitask_decoding.compute_langs = cfg.compute_langs
- cfg.multitask_decoding.preserve_alignments = cfg.preserve_alignment
if cfg.extract_nbest:
cfg.multitask_decoding.beam.return_best_hypothesis = False
cfg.return_hypotheses = True
@@ -309,9 +310,6 @@ def main(cfg: TranscriptionConfig) -> Union[TranscriptionConfig, List[Hypothesis
if cfg.extract_nbest:
decoding_cfg.beam.return_best_hypothesis = False
cfg.return_hypotheses = True
- decoding_cfg.compute_timestamps = cfg.compute_timestamps # both ctc and rnnt support it
- if 'preserve_alignments' in decoding_cfg:
- decoding_cfg.preserve_alignments = preserve_alignment
if 'compute_langs' in decoding_cfg:
decoding_cfg.compute_langs = cfg.compute_langs
if hasattr(asr_model, 'cur_decoder'):
@@ -325,16 +323,12 @@ def main(cfg: TranscriptionConfig) -> Union[TranscriptionConfig, List[Hypothesis
cfg.rnnt_decoding.beam.return_best_hypothesis = False
cfg.return_hypotheses = True
cfg.rnnt_decoding.fused_batch_size = -1
- cfg.rnnt_decoding.compute_timestamps = cfg.compute_timestamps
cfg.rnnt_decoding.compute_langs = cfg.compute_langs
- if 'preserve_alignments' in cfg.rnnt_decoding:
- cfg.rnnt_decoding.preserve_alignments = preserve_alignment
asr_model.change_decoding_strategy(cfg.rnnt_decoding)
else:
if cfg.compute_langs:
raise ValueError("CTC models do not support `compute_langs` at the moment.")
- cfg.ctc_decoding.compute_timestamps = cfg.compute_timestamps
if cfg.extract_nbest:
cfg.ctc_decoding.beam.return_best_hypothesis = False
cfg.return_hypotheses = True
@@ -379,7 +373,8 @@ def main(cfg: TranscriptionConfig) -> Union[TranscriptionConfig, List[Hypothesis
item = json.loads(line)
if "duration" not in item:
raise ValueError(
- f"Requested calculate_rtfx=True, but line {line} in manifest {cfg.dataset_manifest} lacks a 'duration' field."
+ f"Requested calculate_rtfx=True, but line {line} in manifest {cfg.dataset_manifest} \
+ lacks a 'duration' field."
)
total_duration += item["duration"]
@@ -396,6 +391,7 @@ def main(cfg: TranscriptionConfig) -> Union[TranscriptionConfig, List[Hypothesis
override_cfg.augmentor = augmentor
override_cfg.text_field = cfg.gt_text_attr_name
override_cfg.lang_field = cfg.gt_lang_attr_name
+ override_cfg.timestamps = cfg.timestamps
if hasattr(override_cfg, "prompt"):
override_cfg.prompt = parse_multitask_prompt(OmegaConf.to_container(cfg.prompt))
@@ -433,7 +429,7 @@ def main(cfg: TranscriptionConfig) -> Union[TranscriptionConfig, List[Hypothesis
model_name,
filepaths=filepaths,
compute_langs=compute_langs,
- compute_timestamps=compute_timestamps,
+ timestamps=cfg.timestamps,
)
logging.info(f"Finished writing predictions to {output_filename}!")
diff --git a/examples/asr/transcribe_speech_parallel.py b/examples/asr/transcribe_speech_parallel.py
index eb905d3e91b0..bdf54ea67f7d 100644
--- a/examples/asr/transcribe_speech_parallel.py
+++ b/examples/asr/transcribe_speech_parallel.py
@@ -75,7 +75,7 @@
from dataclasses import dataclass, is_dataclass
from typing import Optional
-import pytorch_lightning as ptl
+import lightning.pytorch as ptl
import torch
from omegaconf import MISSING, OmegaConf
diff --git a/examples/audio/audio_to_audio_train.py b/examples/audio/audio_to_audio_train.py
index cef46dcf20b6..4d71e75176c9 100644
--- a/examples/audio/audio_to_audio_train.py
+++ b/examples/audio/audio_to_audio_train.py
@@ -28,7 +28,7 @@
"""
from enum import Enum
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf
diff --git a/examples/audio/process_audio.py b/examples/audio/process_audio.py
index ec88bda34954..8657d53ef957 100644
--- a/examples/audio/process_audio.py
+++ b/examples/audio/process_audio.py
@@ -20,7 +20,7 @@
from pathlib import Path
from typing import List, Optional
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf
diff --git a/examples/llm/peft/hf.py b/examples/llm/peft/hf.py
index c6dbbf90bf29..5b24c22ab79d 100644
--- a/examples/llm/peft/hf.py
+++ b/examples/llm/peft/hf.py
@@ -13,7 +13,7 @@
# limitations under the License.
import fiddle as fdl
-from pytorch_lightning.loggers import WandbLogger
+from lightning.pytorch.loggers import WandbLogger
from nemo import lightning as nl
from nemo.collections import llm
@@ -96,7 +96,7 @@ def formatting_prompts_func(examples):
use_distributed_sampler=use_dist_samp,
logger=wandb,
),
- optim=fdl.build(llm.adam.pytorch_adam_with_flat_lr(max_lr=1e-5, clip_grad=0.5)),
+ optim=fdl.build(llm.adam.pytorch_adam_with_flat_lr(lr=1e-5)),
log=None,
peft=llm.peft.LoRA(
target_modules=['*_proj'],
diff --git a/examples/llm/sft/hf.py b/examples/llm/sft/hf.py
old mode 100644
new mode 100755
index b7e12d8fb2de..59b8b4ad3491
--- a/examples/llm/sft/hf.py
+++ b/examples/llm/sft/hf.py
@@ -13,16 +13,18 @@
# limitations under the License.
import fiddle as fdl
-import pytorch_lightning as pl
-from pytorch_lightning.loggers import WandbLogger
+import lightning.pytorch as pl
+from lightning.pytorch.loggers import WandbLogger
from torch.utils.data import DataLoader
from nemo import lightning as nl
from nemo.collections import llm
+from nemo.lightning.pytorch.accelerate.transformer_engine import is_te_accelerated, te_accelerate
+from nemo.lightning.pytorch.callbacks import ModelCallback
class SquadDataModuleWithPthDataloader(llm.SquadDataModule):
- def _create_dataloader(self, dataset, **kwargs) -> DataLoader:
+ def _create_dataloader(self, dataset, mode, **kwargs) -> DataLoader:
return DataLoader(
dataset,
num_workers=self.num_workers,
@@ -41,7 +43,7 @@ def squad(tokenizer) -> pl.LightningDataModule:
micro_batch_size=2,
global_batch_size=128, # assert gbs == mbs * accumulate_grad_batches
num_workers=0,
- sanity_check_dist_workers=False,
+ dataset_kwargs={"sanity_check_dist_workers": False},
)
@@ -53,8 +55,11 @@ def squad(tokenizer) -> pl.LightningDataModule:
parser.add_argument('--strategy', type=str, default='auto', choices=['auto', 'ddp', 'fsdp'])
parser.add_argument('--devices', default=1)
parser.add_argument('--accelerator', default='gpu', choices=['gpu'])
+ parser.add_argument('--model-accelerator', default=None, choices=['te'])
parser.add_argument('--max-steps', type=int, default=100)
+ parser.add_argument("--fp8-autocast", default=False, action='store_true')
parser.add_argument('--wandb-project', type=str, default=None)
+ parser.add_argument('--model-save-path', type=str, default=None)
args = parser.parse_args()
wandb = None
@@ -70,9 +75,20 @@ def squad(tokenizer) -> pl.LightningDataModule:
grad_clip = None
use_dist_samp = False
+ model = llm.HfAutoModelForCausalLM(args.model)
+ tokenizer = model.tokenizer
+
+ callbacks = []
+ if args.model_accelerator:
+ if args.model_accelerator == "te":
+ model_transform = ModelCallback(
+ on_train_start=lambda model: te_accelerate(model, fp8_autocast=args.fp8_autocast)
+ )
+ callbacks.append(model_transform)
+
llm.api.finetune(
- model=llm.HfAutoModelForCausalLM(args.model),
- data=squad(llm.HfAutoModelForCausalLM.configure_tokenizer(args.model)),
+ model=model,
+ data=squad(tokenizer),
trainer=nl.Trainer(
devices=args.devices,
max_steps=args.max_steps,
@@ -84,8 +100,18 @@ def squad(tokenizer) -> pl.LightningDataModule:
accumulate_grad_batches=10,
gradient_clip_val=grad_clip,
use_distributed_sampler=use_dist_samp,
+ callbacks=callbacks,
logger=wandb,
),
- optim=fdl.build(llm.adam.pytorch_adam_with_flat_lr(max_lr=1e-5, clip_grad=0.5)),
+ optim=fdl.build(llm.adam.pytorch_adam_with_flat_lr(lr=1e-5)),
log=None,
)
+
+ if args.model_accelerator:
+ if args.model_accelerator == "te":
+ te_acc = is_te_accelerated(model.model)
+ assert te_acc, "Transformer Engine acceleration was unsuccessful"
+ print("TE Accelerated: ", te_acc)
+
+ if args.model_save_path is not None:
+ model.save_pretrained(args.model_save_path)
diff --git a/examples/llm/sft/hf_vllm.py b/examples/llm/sft/hf_vllm.py
new file mode 100755
index 000000000000..8110c0fafc4f
--- /dev/null
+++ b/examples/llm/sft/hf_vllm.py
@@ -0,0 +1,62 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from nemo.deploy import DeployPyTriton
+from nemo.deploy.nlp import NemoQueryLLM
+
+try:
+ from nemo.export.vllm_hf_exporter import vLLMHFExporter
+except Exception:
+ raise Exception(
+ "vLLM should be installed in the environment or import "
+ "the vLLM environment in the NeMo FW container using "
+ "source /opt/venv/bin/activate command"
+ )
+
+
+if __name__ == '__main__':
+ import argparse
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--model', required=True, type=str, help="Local path or model name on Hugging Face")
+ parser.add_argument('--triton-model-name', required=True, type=str, help="Name for the service")
+ args = parser.parse_args()
+
+ exporter = vLLMHFExporter()
+ exporter.export(model=args.model)
+
+ nm = DeployPyTriton(
+ model=exporter,
+ triton_model_name=args.triton_model_name,
+ triton_model_version=1,
+ max_batch_size=64,
+ port=8000,
+ address="0.0.0.0",
+ )
+
+ nm.deploy()
+ nm.run()
+
+ nq = NemoQueryLLM(url="localhost:8000", model_name=args.triton_model_name)
+ output_deployed = nq.query_llm(
+ prompts=["How are you doing?"],
+ max_output_len=128,
+ top_k=1,
+ top_p=0.2,
+ temperature=1.0,
+ )
+
+ print("------------- Output: ", output_deployed)
+ nm.stop()
diff --git a/examples/multimodal/multimodal_llm/neva/convert_llava_to_neva.py b/examples/multimodal/multimodal_llm/neva/convert_llava_to_neva.py
index d02b737c750a..874d62dc63c9 100644
--- a/examples/multimodal/multimodal_llm/neva/convert_llava_to_neva.py
+++ b/examples/multimodal/multimodal_llm/neva/convert_llava_to_neva.py
@@ -34,10 +34,10 @@
from collections import OrderedDict
import torch
+from lightning.pytorch.core.saving import _load_state as ptl_load_state
+from lightning.pytorch.trainer.trainer import Trainer
from llava import LlavaLlamaForCausalLM
from omegaconf import OmegaConf
-from pytorch_lightning.core.saving import _load_state as ptl_load_state
-from pytorch_lightning.trainer.trainer import Trainer
from safetensors import safe_open
from transformers import LlamaTokenizer
diff --git a/examples/multimodal/speech_llm/export/extract_salm_weights.py b/examples/multimodal/speech_llm/export/extract_salm_weights.py
index 0698a411110e..24c7aec3bb4d 100644
--- a/examples/multimodal/speech_llm/export/extract_salm_weights.py
+++ b/examples/multimodal/speech_llm/export/extract_salm_weights.py
@@ -18,9 +18,9 @@
import tempfile
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from megatron.core import dist_checkpointing
from omegaconf import OmegaConf
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.multimodal.speech_llm.modules.perception_modules import AudioPerceptionModule
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/examples/multimodal/text_to_image/controlnet/controlnet_train.py b/examples/multimodal/text_to_image/controlnet/controlnet_train.py
index 2bb8b66cac1a..14e7e62a1cc7 100644
--- a/examples/multimodal/text_to_image/controlnet/controlnet_train.py
+++ b/examples/multimodal/text_to_image/controlnet/controlnet_train.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from nemo.collections.multimodal.models.text_to_image.controlnet.controlnet import MegatronControlNet
from nemo.collections.multimodal.models.text_to_image.controlnet.util import ImageLogger
diff --git a/examples/multimodal/text_to_image/convert_hf_ckpt_to_nemo.py b/examples/multimodal/text_to_image/convert_hf_ckpt_to_nemo.py
index cebf159eb870..c50ad439eaec 100644
--- a/examples/multimodal/text_to_image/convert_hf_ckpt_to_nemo.py
+++ b/examples/multimodal/text_to_image/convert_hf_ckpt_to_nemo.py
@@ -27,10 +27,10 @@
from argparse import ArgumentParser
import torch
-from lightning_fabric.utilities.cloud_io import _load as pl_load
+from lightning.fabric.utilities.cloud_io import _load as pl_load
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.multimodal.models.text_to_image.controlnet.controlnet import MegatronControlNet
from nemo.collections.multimodal.models.text_to_image.stable_diffusion.diffusion_engine import MegatronDiffusionEngine
diff --git a/examples/multimodal/text_to_image/dreambooth/dreambooth_lora_infer.py b/examples/multimodal/text_to_image/dreambooth/dreambooth_lora_infer.py
index 52f0aa2940d2..e1d050f83939 100644
--- a/examples/multimodal/text_to_image/dreambooth/dreambooth_lora_infer.py
+++ b/examples/multimodal/text_to_image/dreambooth/dreambooth_lora_infer.py
@@ -12,9 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
+from lightning.pytorch import Trainer
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
from omegaconf import open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
from nemo.collections.multimodal.models.text_to_image.stable_diffusion.ldm.ddpm import MegatronLatentDiffusion
from nemo.collections.multimodal.parts.stable_diffusion.pipeline import pipeline
@@ -48,7 +48,10 @@ def model_cfg_modifier(model_cfg):
plugins = []
plugins.append(TorchElasticEnvironment())
- strategy = NLPDDPStrategy(no_ddp_communication_hook=True, find_unused_parameters=False,)
+ strategy = NLPDDPStrategy(
+ no_ddp_communication_hook=True,
+ find_unused_parameters=False,
+ )
trainer = Trainer(plugins=plugins, strategy=strategy, **cfg.trainer)
model = MegatronLatentDiffusion(model_cfg, trainer=trainer)
diff --git a/examples/multimodal/text_to_image/imagen/generate_fid_images.py b/examples/multimodal/text_to_image/imagen/generate_fid_images.py
index ea743e3e1d06..7d2df372b545 100644
--- a/examples/multimodal/text_to_image/imagen/generate_fid_images.py
+++ b/examples/multimodal/text_to_image/imagen/generate_fid_images.py
@@ -15,7 +15,7 @@
import os
import torch
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from nemo.collections.multimodal.models.text_to_image.imagen.imagen_pipeline import ImagenPipeline
from nemo.core.config import hydra_runner
@@ -79,7 +79,10 @@ def main(cfg):
seeds = [local_task_id * chunk_size + batch_idx * batch_size + idx for idx in range(len(batch_captions))]
with torch.no_grad():
images, all_res_images, *_ = pipeline(
- prompts=batch_captions, seed=seeds, single_batch_mode=True, classifier_free_guidance=current_node_cfg,
+ prompts=batch_captions,
+ seed=seeds,
+ single_batch_mode=True,
+ classifier_free_guidance=current_node_cfg,
)
if cfg.fid.save_all_res:
diff --git a/examples/multimodal/text_to_image/imagen/imagen_generate_images.py b/examples/multimodal/text_to_image/imagen/imagen_generate_images.py
index bc002052a989..06b324367a52 100644
--- a/examples/multimodal/text_to_image/imagen/imagen_generate_images.py
+++ b/examples/multimodal/text_to_image/imagen/imagen_generate_images.py
@@ -16,8 +16,8 @@
import pickle
import torch
+from lightning.pytorch import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning import Trainer
from nemo.collections.multimodal.models.text_to_image.imagen.imagen_pipeline import (
ImagenPipeline,
@@ -65,7 +65,11 @@ def main(inference_config):
seed = batch_idx + chuncksize
with torch.no_grad():
- images, all_res_images, throughput = pipeline(prompts=batch_captions, seed=seeds, single_batch_mode=True,)
+ images, all_res_images, throughput = pipeline(
+ prompts=batch_captions,
+ seed=seeds,
+ single_batch_mode=True,
+ )
for outpath, one_res in zip(outpaths, all_res_images):
for idx, (caption, image) in enumerate(zip(batch_captions, one_res[0])):
diff --git a/examples/multimodal/text_to_image/imagen/imagen_infer.py b/examples/multimodal/text_to_image/imagen/imagen_infer.py
index 0fb291729596..9ce680cf4b09 100644
--- a/examples/multimodal/text_to_image/imagen/imagen_infer.py
+++ b/examples/multimodal/text_to_image/imagen/imagen_infer.py
@@ -14,8 +14,8 @@
import os
+from lightning.pytorch import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning import Trainer
from nemo.collections.multimodal.models.text_to_image.imagen.imagen_pipeline import (
ImagenPipeline,
diff --git a/examples/multimodal/text_to_image/imagen/imagen_training.py b/examples/multimodal/text_to_image/imagen/imagen_training.py
index 23c1c9c1a1d7..211299156b69 100644
--- a/examples/multimodal/text_to_image/imagen/imagen_training.py
+++ b/examples/multimodal/text_to_image/imagen/imagen_training.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf.omegaconf import OmegaConf, open_dict
from torch._dynamo import disable
diff --git a/examples/multimodal/text_to_image/stable_diffusion/sd_lora_infer.py b/examples/multimodal/text_to_image/stable_diffusion/sd_lora_infer.py
index 0877d4eb4b2f..0d83a8daab9f 100644
--- a/examples/multimodal/text_to_image/stable_diffusion/sd_lora_infer.py
+++ b/examples/multimodal/text_to_image/stable_diffusion/sd_lora_infer.py
@@ -12,9 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
+from lightning.pytorch import Trainer
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
from omegaconf import open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
from nemo.collections.multimodal.models.text_to_image.stable_diffusion.ldm.ddpm import MegatronLatentDiffusion
from nemo.collections.multimodal.parts.stable_diffusion.pipeline import pipeline
@@ -45,7 +45,10 @@ def model_cfg_modifier(model_cfg):
plugins = []
plugins.append(TorchElasticEnvironment())
- strategy = NLPDDPStrategy(no_ddp_communication_hook=True, find_unused_parameters=False,)
+ strategy = NLPDDPStrategy(
+ no_ddp_communication_hook=True,
+ find_unused_parameters=False,
+ )
trainer = Trainer(plugins=plugins, strategy=strategy, **cfg.trainer)
model = MegatronLatentDiffusion(model_cfg, trainer=trainer)
diff --git a/examples/multimodal/text_to_image/stable_diffusion/sd_xl_train.py b/examples/multimodal/text_to_image/stable_diffusion/sd_xl_train.py
index 44412aee0d14..4ef22b69aa64 100644
--- a/examples/multimodal/text_to_image/stable_diffusion/sd_xl_train.py
+++ b/examples/multimodal/text_to_image/stable_diffusion/sd_xl_train.py
@@ -13,10 +13,11 @@
# limitations under the License.
import sys
+
import torch
import torch._dynamo.config as dynamo_config
+from lightning.pytorch import Trainer
from omegaconf.omegaconf import OmegaConf
-from pytorch_lightning import Trainer
from nemo.collections.multimodal.models.text_to_image.stable_diffusion.diffusion_engine import MegatronDiffusionEngine
from nemo.collections.nlp.parts.megatron_trainer_builder import MegatronTrainerBuilder
diff --git a/examples/multimodal/vision_language_foundation/clip/convert_external_clip_to_nemo.py b/examples/multimodal/vision_language_foundation/clip/convert_external_clip_to_nemo.py
index 178140aac828..abc987e07097 100644
--- a/examples/multimodal/vision_language_foundation/clip/convert_external_clip_to_nemo.py
+++ b/examples/multimodal/vision_language_foundation/clip/convert_external_clip_to_nemo.py
@@ -45,9 +45,9 @@
import einops
import open_clip
import torch
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.trainer.trainer import Trainer
from transformers import CLIPModel
from nemo.collections.multimodal.models.vision_language_foundation.clip.megatron_clip_models import MegatronCLIPModel
diff --git a/examples/multimodal/x_to_nerf/benchmark_callback.py b/examples/multimodal/x_to_nerf/benchmark_callback.py
index fd7d5afdc5bc..2db78d1d385a 100644
--- a/examples/multimodal/x_to_nerf/benchmark_callback.py
+++ b/examples/multimodal/x_to_nerf/benchmark_callback.py
@@ -15,7 +15,7 @@
import time
from typing import Optional
-from pytorch_lightning import Callback, LightningModule, Trainer
+from lightning.pytorch import Callback, LightningModule, Trainer
from nemo.utils import logging
diff --git a/examples/multimodal/x_to_nerf/data.py b/examples/multimodal/x_to_nerf/data.py
index fe7c47abc64b..b8dfd3aa536b 100644
--- a/examples/multimodal/x_to_nerf/data.py
+++ b/examples/multimodal/x_to_nerf/data.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf.omegaconf import DictConfig
from torch.utils.data import DataLoader
diff --git a/examples/multimodal/x_to_nerf/main.py b/examples/multimodal/x_to_nerf/main.py
index 5d7f616a3165..f3c8a6949867 100644
--- a/examples/multimodal/x_to_nerf/main.py
+++ b/examples/multimodal/x_to_nerf/main.py
@@ -13,8 +13,8 @@
# limitations under the License.
from hydra.utils import get_class, instantiate
+from lightning.pytorch import Trainer, seed_everything
from omegaconf.omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer, seed_everything
from nemo.core.config import hydra_runner
from nemo.utils import logging
diff --git a/examples/multimodal_autoregressive/README.md b/examples/multimodal_autoregressive/README.md
new file mode 100644
index 000000000000..5934074a7d17
--- /dev/null
+++ b/examples/multimodal_autoregressive/README.md
@@ -0,0 +1,3 @@
+### MULTIMODAL AUTOREGRESSIVE GENERTION
+
+For information on how to get started with autoregressive generation for multimodal datasets using discrete tokenizers follow this [guide](nemo/collections/multimodal_autoregressive/data/README.md)
diff --git a/examples/multimodal_autoregressive/conf/megatron_mm_ar_inference_image_generation.yaml b/examples/multimodal_autoregressive/conf/megatron_mm_ar_inference_image_generation.yaml
new file mode 100644
index 000000000000..806800c96155
--- /dev/null
+++ b/examples/multimodal_autoregressive/conf/megatron_mm_ar_inference_image_generation.yaml
@@ -0,0 +1,36 @@
+inference:
+ greedy: True # Whether or not to use sampling ; use greedy decoding otherwise
+ top_k: 0 # The number of highest probability vocabulary tokens to keep for top-k-filtering.
+ top_p: 0.9 # If set to float < 1, only the most probable tokens with probabilities that add up to top_p or higher are kept for generation.
+ temperature: 1.0 # sampling temperature
+ add_BOS: True # add the bos token at the begining of the prompt
+ tokens_to_generate: 30 # The minimum length of the sequence to be generated.
+ all_probs: False # whether return the log prob for all the tokens in vocab
+ repetition_penalty: 1.2 # The parameter for repetition penalty. 1.0 means no penalty.
+ min_tokens_to_generate: 0 # The minimum length of the sequence to be generated.
+ compute_logprob: False # a flag used to compute logprob of all the input text, a very special case of running inference, default False
+ end_strings: ["<|extra_204|>"] # generation will stop when one of these tokens is generated
+
+trainer:
+ devices: 1
+ num_nodes: 1
+ accelerator: gpu
+ logger: False # logger provided by exp_manager
+ precision: bf16 # 16, 32, or bf16
+ use_distributed_sampler: False
+
+tensor_model_parallel_size: -1
+pipeline_model_parallel_size: -1
+pipeline_model_parallel_split_rank: -1 # used for encoder and decoder model (0 for others)
+megatron_amp_O2: False # Enable O2-level automatic mixed precision to save memory
+image_encoder: Cosmos-Tokenizer-DV8x16x16
+gpt_model_file: null # GPT nemo file path
+checkpoint_dir: null # checkpoint file dir. This is used to load the PTL checkpoint generated during the GPT training
+checkpoint_name: null # PTL checkpoint file name, only used for PTL checkpoint loading
+hparams_file: null # model configuration file, only used for PTL checkpoint loading
+captions: # prompts for GPT inference
+ - "a drawing of a green pokemon with red eyes"
+ - "a red pokemon with green eyes"
+ - "a cartoon fish with a big smile"
+images_output_path: null # Path to the directory to store the output images
+
diff --git a/examples/multimodal_autoregressive/conf/megatron_mm_ar_inference_vision_understanding.yaml b/examples/multimodal_autoregressive/conf/megatron_mm_ar_inference_vision_understanding.yaml
new file mode 100644
index 000000000000..c392f5dcc5c2
--- /dev/null
+++ b/examples/multimodal_autoregressive/conf/megatron_mm_ar_inference_vision_understanding.yaml
@@ -0,0 +1,32 @@
+inference:
+ greedy: True # Whether or not to use sampling ; use greedy decoding otherwise
+ top_k: 0 # The number of highest probability vocabulary tokens to keep for top-k-filtering.
+ top_p: 0.9 # If set to float < 1, only the most probable tokens with probabilities that add up to top_p or higher are kept for generation.
+ temperature: 1.0 # sampling temperature
+ add_BOS: False # add the bos token at the begining of the prompt
+ tokens_to_generate: 30 # The minimum length of the sequence to be generated.
+ all_probs: False # whether return the log prob for all the tokens in vocab
+ repetition_penalty: 1.2 # The parameter for repetition penalty. 1.0 means no penalty.
+ min_tokens_to_generate: 0 # The minimum length of the sequence to be generated.
+ compute_logprob: False # a flag used to compute logprob of all the input text, a very special case of running inference, default False
+ end_strings: ["<|extra_204|>"] # generation will stop when one of these tokens is generated
+
+trainer:
+ devices: 1
+ num_nodes: 1
+ accelerator: gpu
+ logger: False # logger provided by exp_manager
+ precision: bf16 # 16, 32, or bf16
+ use_distributed_sampler: False
+
+tensor_model_parallel_size: -1
+pipeline_model_parallel_size: -1
+pipeline_model_parallel_split_rank: -1 # used for encoder and decoder model (0 for others)
+megatron_amp_O2: False # Enable O2-level automatic mixed precision to save memory
+gpt_model_file: null # GPT nemo file path
+checkpoint_dir: null # checkpoint file dir. This is used to load the PTL checkpoint generated during the GPT training
+checkpoint_name: null # PTL checkpoint file name, only used for PTL checkpoint loading
+hparams_file: null # model configuration file, only used for PTL checkpoint loading
+images_path: # prompts for GPT inference
+ - "/path/to/image1"
+ - "/path/to/image2"
diff --git a/examples/multimodal_autoregressive/megatron_mm_autoregressive_eval_image_generation.py b/examples/multimodal_autoregressive/megatron_mm_autoregressive_eval_image_generation.py
new file mode 100644
index 000000000000..ae8dddb29553
--- /dev/null
+++ b/examples/multimodal_autoregressive/megatron_mm_autoregressive_eval_image_generation.py
@@ -0,0 +1,196 @@
+# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import math
+import os
+import re
+
+import torch
+import torchvision
+from examples.nlp.language_modeling.megatron_gpt_eval import (
+ load_model_from_config,
+ remove_padded_prompts,
+ round_to_mult,
+)
+from pytorch_lightning.trainer.trainer import Trainer
+
+# pylint: disable=line-too-long
+from nemo.collections.common.video_tokenizers.cosmos_tokenizer import CausalVideoTokenizer
+from nemo.collections.nlp.modules.common.transformer.text_generation import LengthParam, SamplingParam
+from nemo.collections.nlp.parts.nlp_overrides import CustomProgressBar, NLPDDPStrategy
+from nemo.core.config import hydra_runner
+
+"""
+This is the script to run multimodal autoregresssive text generation.
+
+Make sure you install tiktoken==0.6.0
+
+Usage:
+ Assume the model has TP=1, PP=1 in the following use cases.
+ a. run greedy inference from a nemo file:
+ python megatron_mm_autoregresssive_eval.py \
+ gpt_model_file=PATH_TO_MODEL \
+ inference.greedy=True \
+ inference.add_BOS=True \
+ trainer.devices=1 \
+ trainer.num_nodes=1 \
+ tensor_model_parallel_size=-1 \
+ pipeline_model_parallel_size=-1 \
+ captions=[caption1,caption2]
+
+ b. run greedy inference from a PTL checkpoint file:
+ python megatron_mm_autoregresssive_eval.py \
+ checkpoint_dir=PATH_TO_CHECKPOINT_FILE \
+ checkpoint_name=CHECKPOINT_FILE_NAME \
+ hparams_file=HPARAMS_FILE \
+ inference.greedy=True \
+ inference.add_BOS=True \
+ trainer.devices=1 \
+ trainer.num_nodes=1 \
+ tensor_model_parallel_size=-1 \
+ pipeline_model_parallel_size=-1 \
+ captions=[caption1,caption2]
+
+ c. run top_p inference from a nemo file:
+ python megatron_mm_autoregresssive_eval.py \
+ gpt_model_file=PATH_TO_MODEL \
+ inference.greedy=False \
+ inference.top_k=0 \
+ inference.top_p=0.9 \
+ inference.repetition_penalty=1.2 \
+ inference.add_BOS=True \
+ trainer.devices=1 \
+ trainer.num_nodes=1 \
+ tensor_model_parallel_size=-1 \
+ pipeline_model_parallel_size=-1 \
+ captions=[caption1,caption2]
+
+ d. If you don't need to generate tokens and need model to compute logprobs:
+ python megatron_mm_autoregresssive_eval.py \
+ gpt_model_file=PATH_TO_MODEL \
+ inference.compute_logprob=True \
+ trainer.devices=1 \
+ trainer.num_nodes=1 \
+ tensor_model_parallel_size=-1 \
+ pipeline_model_parallel_size=-1 \
+ captions=[caption1,caption2]
+"""
+
+
+def to_img(tokens_string, image_tokenizer):
+ """Converts visual tokens to images
+
+ Given input visual tokens, we extract the indices, pass it to the decoder to get the image
+ """
+ visual_token_pattern = r"<\|visual token (\d+)\|>"
+ visual_tokens = [int(match) for match in re.findall(visual_token_pattern, tokens_string)]
+ # We assume image is square. So if 64 tokensa are present, we reshape it to 8x8 and then pass it to decoder
+ dim = int(math.sqrt(len(visual_tokens)))
+ visual_tokens_tensor = torch.tensor(visual_tokens[: dim * dim])
+ # Decoder accepts input of the following format [bs, channel_dim, h, w]
+ visual_tokens_tensor_reshaped = visual_tokens_tensor.reshape((dim, dim)).unsqueeze(0).unsqueeze(0)
+ visual_tokens_final = visual_tokens_tensor_reshaped.to(image_tokenizer._device)
+ img = image_tokenizer.decode(visual_tokens_final)
+
+ # Convert from bf16 to 16 and to format [channel_dim, h, w]
+ image = torchvision.transforms.functional.to_pil_image(img.float().squeeze())
+ return image
+
+
+def load_prompts(cfg):
+ """Function to return the prompts passed into the model"""
+ prompts = []
+ for caption in cfg.captions:
+ prompt = f'You are a helpful assistant. Draw a picture for the caption given by the user. USER: {caption}. ASSISTANT: '
+ prompts.append(prompt)
+ return prompts
+
+
+if not torch.cuda.is_available():
+ raise EnvironmentError("GPU is needed for the inference")
+
+
+@hydra_runner(config_path="conf", config_name="megatron_mm_ar_inference_image_generation")
+def main(cfg) -> None:
+ """Main function"""
+
+ callbacks = []
+ # enable_progress_bar is True by default. If cfg.trainer.enable_progress_bar=False, CustomProgressBar is not appended to callbacks
+ if 'enable_progress_bar' not in cfg.trainer or cfg.trainer.enable_progress_bar:
+ callbacks.append(CustomProgressBar())
+ # trainer required for restoring model parallel models
+ trainer = Trainer(
+ strategy=NLPDDPStrategy(timeout=datetime.timedelta(seconds=18000)),
+ **cfg.trainer,
+ callbacks=callbacks,
+ )
+
+ image_tokenizer = CausalVideoTokenizer.from_pretrained(
+ tokenizer_type=cfg.image_encoder, load_encoder=False, load_decoder=True, load_full_model=False
+ )
+
+ model = load_model_from_config(trainer, cfg)
+ model.freeze()
+
+ # Have to turn off activations_checkpoint_method for inference
+ try:
+ model.model.language_model.encoder.activations_checkpoint_method = None
+ except AttributeError:
+ pass
+
+ length_params: LengthParam = {
+ "max_length": cfg.inference.tokens_to_generate,
+ "min_length": cfg.inference.min_tokens_to_generate,
+ }
+
+ sampling_params: SamplingParam = {
+ "use_greedy": cfg.inference.greedy,
+ "temperature": cfg.inference.temperature,
+ "top_k": cfg.inference.top_k,
+ "top_p": cfg.inference.top_p,
+ "repetition_penalty": cfg.inference.repetition_penalty,
+ "add_BOS": cfg.inference.add_BOS,
+ "all_probs": cfg.inference.all_probs,
+ "compute_logprob": cfg.inference.compute_logprob,
+ "end_strings": cfg.inference.end_strings,
+ }
+
+ prompts = []
+ with torch.no_grad():
+ prompts = load_prompts(cfg)
+
+ fp8_enabled = hasattr(model.cfg, "fp8") and (model.cfg.fp8 == True)
+ if fp8_enabled and len(prompts) > 0:
+ padded_len = round_to_mult(len(prompts), 8)
+ nb_paddings = padded_len - len(prompts)
+ if nb_paddings > 0:
+ nb_paddings += [''] * nb_paddings
+
+ # First method of running text generation, call model.generate method
+ response = model.generate(inputs=prompts, length_params=length_params, sampling_params=sampling_params)
+
+ if fp8_enabled:
+ response = remove_padded_prompts(response, nb_paddings)
+
+ output_tokens_strings = response['sentences']
+ for idx, output_token_string in enumerate(output_tokens_strings):
+ image = to_img(output_token_string, image_tokenizer)
+ image.save(os.path.join(cfg.images_output_path, f'{idx}.jpg'))
+
+ print(f'Images saved to {cfg.images_output_path}')
+
+
+if __name__ == '__main__':
+ main() # noqa pylint: disable=no-value-for-parameter
diff --git a/examples/multimodal_autoregressive/megatron_mm_autoregressive_eval_vision_understanding.py b/examples/multimodal_autoregressive/megatron_mm_autoregressive_eval_vision_understanding.py
new file mode 100644
index 000000000000..4aea4d9898ae
--- /dev/null
+++ b/examples/multimodal_autoregressive/megatron_mm_autoregressive_eval_vision_understanding.py
@@ -0,0 +1,220 @@
+# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import datetime
+
+import torch
+import torchvision
+from examples.nlp.language_modeling.megatron_gpt_eval import (
+ RequestDataSet,
+ load_model_from_config,
+ remove_padded_prompts,
+ round_to_mult,
+)
+from omegaconf import OmegaConf
+from PIL import Image
+from pytorch_lightning.trainer.trainer import Trainer
+from torch.utils.data import DataLoader
+from transformers import AutoModel, AutoTokenizer
+
+# pylint: disable=line-too-long
+from nemo.collections.nlp.modules.common.transformer.text_generation import LengthParam, SamplingParam
+from nemo.collections.nlp.parts.nlp_overrides import CustomProgressBar, NLPDDPStrategy
+from nemo.core.config import hydra_runner
+
+"""
+This is the script to run multimodal autoregresssive text generation.
+
+Make sure you install tiktoken==0.6.0
+
+Usage:
+ Assume the model has TP=1, PP=1 in the following use cases.
+ a. run greedy inference from a nemo file:
+ python megatron_mm_autoregresssive_eval.py \
+ gpt_model_file=PATH_TO_MODEL \
+ inference.greedy=True \
+ inference.add_BOS=True \
+ trainer.devices=1 \
+ trainer.num_nodes=1 \
+ tensor_model_parallel_size=-1 \
+ pipeline_model_parallel_size=-1 \
+ images_path=[image_path1,image_path2]
+
+ b. run greedy inference from a PTL checkpoint file:
+ python megatron_mm_autoregresssive_eval.py \
+ checkpoint_dir=PATH_TO_CHECKPOINT_FILE \
+ checkpoint_name=CHECKPOINT_FILE_NAME \
+ hparams_file=HPARAMS_FILE \
+ inference.greedy=True \
+ inference.add_BOS=True \
+ trainer.devices=1 \
+ trainer.num_nodes=1 \
+ tensor_model_parallel_size=-1 \
+ pipeline_model_parallel_size=-1 \
+ images_path=[image_path1,image_path2]
+
+ c. run top_p inference from a nemo file:
+ python megatron_mm_autoregresssive_eval.py \
+ gpt_model_file=PATH_TO_MODEL \
+ inference.greedy=False \
+ inference.top_k=0 \
+ inference.top_p=0.9 \
+ inference.repetition_penalty=1.2 \
+ inference.add_BOS=True \
+ trainer.devices=1 \
+ trainer.num_nodes=1 \
+ tensor_model_parallel_size=-1 \
+ pipeline_model_parallel_size=-1 \
+ images_path=[image_path1,image_path2]
+
+ d. If you don't need to generate tokens and need model to compute logprobs:
+ python megatron_mm_autoregresssive_eval.py \
+ gpt_model_file=PATH_TO_MODEL \
+ inference.compute_logprob=True \
+ trainer.devices=1 \
+ trainer.num_nodes=1 \
+ tensor_model_parallel_size=-1 \
+ pipeline_model_parallel_size=-1 \
+ images_path=[image_path1,image_path2]
+"""
+
+EMU_HUB = "BAAI/Emu3-Gen"
+VQ_HUB = "BAAI/Emu3-VisionTokenizer"
+
+
+def to_imgstr(image_tokens, tokenizer):
+ """Convert integer image tokens to visual tokens string"""
+ image_tokens = image_tokens.cpu().numpy().tolist()
+ image_token_str = [
+ ['<|visual token {token_id:0>6d}|>'.format(token_id=token_id) for token_id in token_row]
+ for token_row in image_tokens
+ ]
+ image_row_str = ["".join(token_row) for token_row in image_token_str]
+ imgstr = tokenizer.eol_token.join(image_row_str)
+ return imgstr
+
+
+def load_prompts(cfg, image_tokenizer, tokenizer):
+ """Function to generate prompts
+
+ The prompts generated here are fed to the model.
+ """
+ prompts = []
+ text = "Please describe the image"
+ for image_path in cfg.images_path:
+ image = Image.open(image_path)
+ image_tensor = torchvision.transforms.functional.pil_to_tensor(image).unsqueeze(0)
+ image_tokens = image_tokenizer.encode(image_tensor.to(image_tokenizer.device, image_tokenizer.dtype))
+ bs, h, w = image_tokens.shape
+ imgstr = to_imgstr(image_tokens[0], tokenizer=tokenizer)
+ image_prompt = (
+ tokenizer.boi_token
+ + f'{h}*{w}'
+ + tokenizer.img_token
+ + imgstr
+ + tokenizer.eol_token
+ + tokenizer.eof_token
+ + tokenizer.eoi_token
+ )
+ prompt = f'{tokenizer.bos_token}You are a helpful assistant. USER: {image_prompt}{text}. ASSISTANT:'
+ prompts.append(prompt)
+ return prompts
+
+
+if not torch.cuda.is_available():
+ raise EnvironmentError("GPU is needed for the inference")
+
+
+@hydra_runner(config_path="conf", config_name="megatron_mm_ar_inference_vision_understanding")
+def main(cfg) -> None:
+ """Main function"""
+
+ callbacks = []
+ # enable_progress_bar is True by default. If cfg.trainer.enable_progress_bar=False, CustomProgressBar is not appended to callbacks
+ if 'enable_progress_bar' not in cfg.trainer or cfg.trainer.enable_progress_bar:
+ callbacks.append(CustomProgressBar())
+ # trainer required for restoring model parallel models
+ trainer = Trainer(
+ strategy=NLPDDPStrategy(timeout=datetime.timedelta(seconds=18000)),
+ **cfg.trainer,
+ callbacks=callbacks,
+ )
+
+ tokenizer = AutoTokenizer.from_pretrained(EMU_HUB, trust_remote_code=True)
+ image_tokenizer = AutoModel.from_pretrained(VQ_HUB, device_map="cuda", trust_remote_code=True).eval()
+
+ model = load_model_from_config(trainer, cfg)
+ model.freeze()
+
+ # Have to turn off activations_checkpoint_method for inference
+ try:
+ model.model.language_model.encoder.activations_checkpoint_method = None
+ except AttributeError:
+ pass
+
+ length_params: LengthParam = {
+ "max_length": cfg.inference.tokens_to_generate,
+ "min_length": cfg.inference.min_tokens_to_generate,
+ }
+
+ sampling_params: SamplingParam = {
+ "use_greedy": cfg.inference.greedy,
+ "temperature": cfg.inference.temperature,
+ "top_k": cfg.inference.top_k,
+ "top_p": cfg.inference.top_p,
+ "repetition_penalty": cfg.inference.repetition_penalty,
+ "add_BOS": cfg.inference.add_BOS,
+ "all_probs": cfg.inference.all_probs,
+ "compute_logprob": cfg.inference.compute_logprob,
+ "end_strings": cfg.inference.end_strings,
+ }
+
+ prompts = []
+ with torch.no_grad():
+ prompts = load_prompts(cfg, image_tokenizer, tokenizer)
+
+ fp8_enabled = hasattr(model.cfg, "fp8") and (model.cfg.fp8 == True)
+ if fp8_enabled and len(prompts) > 0:
+ padded_len = round_to_mult(len(prompts), 8)
+ nb_paddings = padded_len - len(prompts)
+ if nb_paddings > 0:
+ nb_paddings += [''] * nb_paddings
+
+ # First method of running text generation, call model.generate method
+ response = model.generate(inputs=prompts, length_params=length_params, sampling_params=sampling_params)
+
+ if fp8_enabled:
+ response = remove_padded_prompts(response, nb_paddings)
+ print("***************************")
+ print(response)
+ print("***************************")
+
+ # Second method of running text generation, call trainer.predict [recommended]
+ bs = 8 if fp8_enabled else 2
+ ds = RequestDataSet(prompts)
+ request_dl = DataLoader(dataset=ds, batch_size=bs)
+ config = OmegaConf.to_container(cfg.inference)
+ model.set_inference_config(config)
+ response = trainer.predict(model, request_dl)
+
+ if fp8_enabled:
+ response[-1] = remove_padded_prompts(response[-1], nb_paddings)
+ print("***************************")
+ print(response)
+ print("***************************")
+
+
+if __name__ == '__main__':
+ main() # noqa pylint: disable=no-value-for-parameter
diff --git a/examples/nlp/dialogue/dialogue.py b/examples/nlp/dialogue/dialogue.py
index 578895a2ad43..3f4c5581eb5a 100644
--- a/examples/nlp/dialogue/dialogue.py
+++ b/examples/nlp/dialogue/dialogue.py
@@ -42,7 +42,7 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models.dialogue.dialogue_gpt_classification_model import DialogueGPTClassificationModel
diff --git a/examples/nlp/duplex_text_normalization/helpers.py b/examples/nlp/duplex_text_normalization/helpers.py
index 6c1cfe37b90d..d9b8780fd787 100644
--- a/examples/nlp/duplex_text_normalization/helpers.py
+++ b/examples/nlp/duplex_text_normalization/helpers.py
@@ -14,7 +14,7 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig
from nemo.collections.nlp.data.text_normalization import constants
@@ -29,7 +29,7 @@
def instantiate_model_and_trainer(cfg: DictConfig, model_name: str, do_training: bool):
- """ Function for instantiating a model and a trainer
+ """Function for instantiating a model and a trainer
Args:
cfg: The config used to instantiate the model and the trainer.
model_name: A str indicates whether the model to be instantiated is a tagger or a decoder (i.e., model_name should be either TAGGER_MODEL or DECODER_MODEL).
diff --git a/examples/nlp/entity_linking/self_alignment_pretraining.py b/examples/nlp/entity_linking/self_alignment_pretraining.py
index a1ac1ac327cb..58b20f384d04 100644
--- a/examples/nlp/entity_linking/self_alignment_pretraining.py
+++ b/examples/nlp/entity_linking/self_alignment_pretraining.py
@@ -16,8 +16,8 @@
# Please see tutorial at Nemo/tutorials/nlp/Entity_Linking_Medical.ipynb for
# more information on entity linking and self alignment pretraining.
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from nemo.collections.nlp.models import EntityLinkingModel
from nemo.core.config import hydra_runner
diff --git a/examples/nlp/glue_benchmark/glue_benchmark.py b/examples/nlp/glue_benchmark/glue_benchmark.py
index 3cb5f8e4af3e..28efb9520fbd 100644
--- a/examples/nlp/glue_benchmark/glue_benchmark.py
+++ b/examples/nlp/glue_benchmark/glue_benchmark.py
@@ -35,7 +35,7 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models import GLUEModel
diff --git a/examples/nlp/information_retrieval/bert_dpr.py b/examples/nlp/information_retrieval/bert_dpr.py
index 2d9cd962ff34..4fc791da04fd 100644
--- a/examples/nlp/information_retrieval/bert_dpr.py
+++ b/examples/nlp/information_retrieval/bert_dpr.py
@@ -13,7 +13,7 @@
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models import BertDPRModel
diff --git a/examples/nlp/information_retrieval/bert_joint_ir.py b/examples/nlp/information_retrieval/bert_joint_ir.py
index 1bb164e580d1..f95cdd04e036 100644
--- a/examples/nlp/information_retrieval/bert_joint_ir.py
+++ b/examples/nlp/information_retrieval/bert_joint_ir.py
@@ -13,7 +13,7 @@
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models import BertJointIRModel
diff --git a/examples/nlp/information_retrieval/megatron_gpt_embedding_finetuning.py b/examples/nlp/information_retrieval/megatron_gpt_embedding_finetuning.py
index e1fe28cc892f..9cb5cb5d3d19 100644
--- a/examples/nlp/information_retrieval/megatron_gpt_embedding_finetuning.py
+++ b/examples/nlp/information_retrieval/megatron_gpt_embedding_finetuning.py
@@ -15,8 +15,8 @@
from collections.abc import MutableMapping
import torch.multiprocessing as mp
+from lightning.pytorch.loggers import WandbLogger
from omegaconf.omegaconf import OmegaConf
-from pytorch_lightning.loggers import WandbLogger
from nemo.collections.nlp.models.information_retrieval.megatron_gpt_embedding_model import MegatronGPTEmbeddingModel
from nemo.collections.nlp.parts.megatron_trainer_builder import MegatronLMPPTrainerBuilder
diff --git a/examples/nlp/information_retrieval/megatron_gpt_reranker_finetuning.py b/examples/nlp/information_retrieval/megatron_gpt_reranker_finetuning.py
index cf65840bb843..be89e5bf5c43 100644
--- a/examples/nlp/information_retrieval/megatron_gpt_reranker_finetuning.py
+++ b/examples/nlp/information_retrieval/megatron_gpt_reranker_finetuning.py
@@ -15,8 +15,8 @@
from collections.abc import MutableMapping
import torch.multiprocessing as mp
+from lightning.pytorch.loggers import WandbLogger
from omegaconf.omegaconf import OmegaConf
-from pytorch_lightning.loggers import WandbLogger
from nemo.collections.nlp.models.information_retrieval.megatron_gpt_reranker_model import MegatronGPTRerankerModel
from nemo.collections.nlp.parts.megatron_trainer_builder import MegatronLMPPTrainerBuilder
diff --git a/examples/nlp/intent_slot_classification/intent_slot_classification.py b/examples/nlp/intent_slot_classification/intent_slot_classification.py
index a112ea7785f5..2025f48f330f 100644
--- a/examples/nlp/intent_slot_classification/intent_slot_classification.py
+++ b/examples/nlp/intent_slot_classification/intent_slot_classification.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models import IntentSlotClassificationModel
diff --git a/examples/nlp/intent_slot_classification/multi_label_intent_slot_classification.py b/examples/nlp/intent_slot_classification/multi_label_intent_slot_classification.py
index 2441885e2ed2..232aa7d4d230 100644
--- a/examples/nlp/intent_slot_classification/multi_label_intent_slot_classification.py
+++ b/examples/nlp/intent_slot_classification/multi_label_intent_slot_classification.py
@@ -27,7 +27,7 @@
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models import MultiLabelIntentSlotClassificationModel
diff --git a/examples/nlp/language_modeling/bert_pretraining.py b/examples/nlp/language_modeling/bert_pretraining.py
index 75d0a1072e69..7cff43f7fc73 100644
--- a/examples/nlp/language_modeling/bert_pretraining.py
+++ b/examples/nlp/language_modeling/bert_pretraining.py
@@ -13,9 +13,9 @@
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
+from lightning.pytorch.strategies import DDPStrategy
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning.strategies import DDPStrategy
from nemo.collections.nlp.models.language_modeling import BERTLMModel
from nemo.core.config import hydra_runner
diff --git a/examples/nlp/language_modeling/mamba_change_num_partition.py b/examples/nlp/language_modeling/mamba_change_num_partition.py
index ced2b43cd312..349543de8e59 100644
--- a/examples/nlp/language_modeling/mamba_change_num_partition.py
+++ b/examples/nlp/language_modeling/mamba_change_num_partition.py
@@ -19,8 +19,8 @@
from argparse import ArgumentParser
import torch
+from lightning.pytorch import Trainer
from omegaconf import open_dict
-from pytorch_lightning import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_mamba_model import MegatronMambaModel
from nemo.collections.nlp.parts.nlp_overrides import (
diff --git a/examples/nlp/language_modeling/megatron_bart_pretraining.py b/examples/nlp/language_modeling/megatron_bart_pretraining.py
index e45b5e04ca45..a6dd6f183d72 100644
--- a/examples/nlp/language_modeling/megatron_bart_pretraining.py
+++ b/examples/nlp/language_modeling/megatron_bart_pretraining.py
@@ -13,11 +13,11 @@
# limitations under the License.
+from lightning.pytorch import Trainer
+from lightning.pytorch.callbacks import ModelSummary
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.trainer.connectors.checkpoint_connector import _CheckpointConnector
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.callbacks import ModelSummary
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.trainer.connectors.checkpoint_connector import _CheckpointConnector
from nemo.collections.nlp.models.language_modeling.megatron_bart_model import MegatronBARTModel
from nemo.collections.nlp.parts.nlp_overrides import (
@@ -48,7 +48,7 @@ def main(cfg) -> None:
scaler = None
if cfg.trainer.precision in [16, '16', '16-mixed']:
scaler = GradScaler(
- init_scale=cfg.model.get('native_amp_init_scale', 2 ** 32),
+ init_scale=cfg.model.get('native_amp_init_scale', 2**32),
growth_interval=cfg.model.get('native_amp_growth_interval', 1000),
hysteresis=cfg.model.get('hysteresis', 2),
)
diff --git a/examples/nlp/language_modeling/megatron_change_num_partitions.py b/examples/nlp/language_modeling/megatron_change_num_partitions.py
index c035346e3bf1..49d1ef0dcb57 100644
--- a/examples/nlp/language_modeling/megatron_change_num_partitions.py
+++ b/examples/nlp/language_modeling/megatron_change_num_partitions.py
@@ -21,8 +21,8 @@
import torch
import torch.nn as nn
+from lightning.pytorch import Trainer
from omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
from nemo.collections.nlp.parts.nlp_overrides import (
NEMO_MEGATRON_MODEL_PARALLEL_APPSTATE_OVERRIDE,
@@ -922,7 +922,7 @@ def main():
scaler = None
if precision in [16, '16', '16-mixed']:
scaler = GradScaler(
- init_scale=tmp_cfg.get('native_amp_init_scale', 2 ** 32),
+ init_scale=tmp_cfg.get('native_amp_init_scale', 2**32),
growth_interval=tmp_cfg.get('native_amp_growth_interval', 1000),
hysteresis=tmp_cfg.get('hysteresis', 2),
)
@@ -943,7 +943,10 @@ def main():
if tp_size < 0 or pp_size < 0:
logging.info(f"Loading model config from {args.model_file} to get TP and PP size")
model_config_internal = cls.restore_from(
- restore_path=args.model_file, trainer=trainer, map_location=torch.device("cpu"), return_config=True,
+ restore_path=args.model_file,
+ trainer=trainer,
+ map_location=torch.device("cpu"),
+ return_config=True,
)
tp_size = model_config_internal.get('tensor_model_parallel_size', 1)
@@ -1137,7 +1140,9 @@ def main():
else:
model = cls.load_from_checkpoint(
- checkpoint_path=checkpoint_path, trainer=trainer, map_location=torch.device("cpu"),
+ checkpoint_path=checkpoint_path,
+ trainer=trainer,
+ map_location=torch.device("cpu"),
)
model.freeze()
diff --git a/examples/nlp/language_modeling/megatron_ckpt_to_nemo.py b/examples/nlp/language_modeling/megatron_ckpt_to_nemo.py
index c81119489582..4b9fab987dc7 100644
--- a/examples/nlp/language_modeling/megatron_ckpt_to_nemo.py
+++ b/examples/nlp/language_modeling/megatron_ckpt_to_nemo.py
@@ -32,10 +32,10 @@
import torch
from genericpath import isdir
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.trainer.trainer import Trainer
from megatron.core import parallel_state
from omegaconf import OmegaConf, open_dict
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_bart_model import MegatronBARTModel
from nemo.collections.nlp.models.language_modeling.megatron_bert_model import MegatronBertModel
@@ -112,6 +112,11 @@ def get_args():
choices=['32-true', '16-mixed', 'bf16-mixed'],
help="Precision value for the trainer that matches with precision of the ckpt",
)
+ parser.add_argument(
+ "--convert_mlm",
+ action="store_true",
+ help="Use this flag to convert megatron-lm checkpoints.",
+ )
args = parser.parse_args()
return args
@@ -195,7 +200,9 @@ def convert(local_rank, rank, world_size, args):
)
if args.model_type == 'gpt':
- model = MegatronGPTModel.load_from_checkpoint(checkpoint_path, hparams_file=args.hparams_file, trainer=trainer)
+ model = MegatronGPTModel.load_from_checkpoint(
+ checkpoint_path, hparams_file=args.hparams_file, trainer=trainer, load_mlm=args.convert_mlm
+ )
elif args.model_type == 'sft':
model = MegatronGPTSFTModel.load_from_checkpoint(
checkpoint_path, hparams_file=args.hparams_file, trainer=trainer
diff --git a/examples/nlp/language_modeling/megatron_export.py b/examples/nlp/language_modeling/megatron_export.py
index bf9157884bfc..b511a415d9b1 100644
--- a/examples/nlp/language_modeling/megatron_export.py
+++ b/examples/nlp/language_modeling/megatron_export.py
@@ -28,8 +28,8 @@
import os
+from lightning.pytorch import Trainer
from omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_bart_model import MegatronBARTModel
from nemo.collections.nlp.models.language_modeling.megatron_bert_model import MegatronBertModel
diff --git a/examples/nlp/language_modeling/megatron_gpt_distillation.py b/examples/nlp/language_modeling/megatron_gpt_distillation.py
index dc8614be23b2..c00470c5c81e 100644
--- a/examples/nlp/language_modeling/megatron_gpt_distillation.py
+++ b/examples/nlp/language_modeling/megatron_gpt_distillation.py
@@ -19,8 +19,8 @@
import modelopt.torch.distill as mtd
import modelopt.torch.opt as mto
import torch.multiprocessing as mp
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning.trainer.trainer import Trainer
try:
from megatron.core import parallel_state, tensor_parallel
diff --git a/examples/nlp/language_modeling/megatron_gpt_eval.py b/examples/nlp/language_modeling/megatron_gpt_eval.py
index b9b0d2973094..4dbbee78e898 100644
--- a/examples/nlp/language_modeling/megatron_gpt_eval.py
+++ b/examples/nlp/language_modeling/megatron_gpt_eval.py
@@ -20,8 +20,8 @@
from functools import partial
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf, open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from torch.utils.data import DataLoader, Dataset
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/examples/nlp/language_modeling/megatron_gpt_mcore_batch_eval.py b/examples/nlp/language_modeling/megatron_gpt_mcore_batch_eval.py
index 988a5f8588ff..ceb32d75f495 100644
--- a/examples/nlp/language_modeling/megatron_gpt_mcore_batch_eval.py
+++ b/examples/nlp/language_modeling/megatron_gpt_mcore_batch_eval.py
@@ -16,6 +16,7 @@
import os
from argparse import Namespace
+from lightning.pytorch.trainer.trainer import Trainer
from megatron.core.inference.common_inference_params import CommonInferenceParams
from megatron.core.inference.engines.mcore_engine import MCoreEngine
from megatron.core.inference.inference_model_wrappers.gpt.gpt_inference_wrapper import GPTInferenceWrapper
@@ -23,7 +24,6 @@
SimpleTextGenerationController,
)
from omegaconf import OmegaConf, open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.collections.nlp.modules.common.megatron.megatron_init import fake_initialize_model_parallel
diff --git a/examples/nlp/language_modeling/megatron_gpt_prune.py b/examples/nlp/language_modeling/megatron_gpt_prune.py
index de12b861a1c0..44992873f362 100644
--- a/examples/nlp/language_modeling/megatron_gpt_prune.py
+++ b/examples/nlp/language_modeling/megatron_gpt_prune.py
@@ -16,8 +16,8 @@
import torch
import torch.multiprocessing as mp
from datasets import load_dataset
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.trainer.trainer import Trainer
from tqdm import tqdm
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/examples/nlp/language_modeling/megatron_gpt_ptq.py b/examples/nlp/language_modeling/megatron_gpt_ptq.py
index e41becc2d8e0..0ac0822c5fbe 100644
--- a/examples/nlp/language_modeling/megatron_gpt_ptq.py
+++ b/examples/nlp/language_modeling/megatron_gpt_ptq.py
@@ -15,8 +15,8 @@
import torch
import torch.multiprocessing as mp
from datasets import load_dataset
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.trainer.trainer import Trainer
from tqdm import tqdm
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/examples/nlp/language_modeling/megatron_gpt_test.py b/examples/nlp/language_modeling/megatron_gpt_test.py
index 62a1d40dbaed..03bc6735e891 100644
--- a/examples/nlp/language_modeling/megatron_gpt_test.py
+++ b/examples/nlp/language_modeling/megatron_gpt_test.py
@@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from lightning.pytorch import Trainer
from omegaconf.omegaconf import OmegaConf
-from pytorch_lightning import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.collections.nlp.modules.common.megatron.megatron_utils import compute_model_parallel_rank
@@ -38,7 +38,7 @@ def main(cfg) -> None:
trainer = Trainer(
plugins=[
NLPMixedPrecisionPlugin(
- init_scale=cfg.model.get('native_amp_init_scale', 2 ** 32),
+ init_scale=cfg.model.get('native_amp_init_scale', 2**32),
growth_interval=cfg.model.get('native_amp_growth_interval', 1000),
),
],
@@ -46,7 +46,13 @@ def main(cfg) -> None:
**cfg.trainer,
)
elif cfg.trainer.precision in ['bf16', 'bf16-mixed']:
- trainer = Trainer(plugins=[NLPNativeBfloat16PrecisionPlugin(),], strategy=NLPDDPStrategy(), **cfg.trainer,)
+ trainer = Trainer(
+ plugins=[
+ NLPNativeBfloat16PrecisionPlugin(),
+ ],
+ strategy=NLPDDPStrategy(),
+ **cfg.trainer,
+ )
else:
trainer = Trainer(plugins=[NLPPrecisionPlugin()], strategy=NLPDDPStrategy(), **cfg.trainer)
@@ -55,7 +61,9 @@ def main(cfg) -> None:
app_state.model_parallel_rank = compute_model_parallel_rank(trainer.local_rank, app_state.model_parallel_size)
model = MegatronGPTModel.restore_from(
- cfg.restore_from_path, trainer=trainer, save_restore_connector=NLPSaveRestoreConnector(),
+ cfg.restore_from_path,
+ trainer=trainer,
+ save_restore_connector=NLPSaveRestoreConnector(),
)
# Note: most nemo models must have the data paths configured before instantiating the model
diff --git a/examples/nlp/language_modeling/megatron_gpt_validate.py b/examples/nlp/language_modeling/megatron_gpt_validate.py
index b5a61e627a14..fa0abb89421c 100644
--- a/examples/nlp/language_modeling/megatron_gpt_validate.py
+++ b/examples/nlp/language_modeling/megatron_gpt_validate.py
@@ -15,8 +15,8 @@
import os
import tempfile
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf, open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.collections.nlp.modules.common.megatron.megatron_init import fake_initialize_model_parallel
@@ -140,7 +140,9 @@ def main(cfg) -> None:
with tempfile.NamedTemporaryFile(suffix='.yaml') as f:
OmegaConf.save(config=pretrained_cfg, f=f.name)
model = MegatronGPTModel.load_from_checkpoint(
- checkpoint_path=checkpoint_path, trainer=trainer, hparams_file=f.name,
+ checkpoint_path=checkpoint_path,
+ trainer=trainer,
+ hparams_file=f.name,
)
else:
raise ValueError("need at least a nemo file or checkpoint dir")
diff --git a/examples/nlp/language_modeling/megatron_lm_ckpt_to_nemo.py b/examples/nlp/language_modeling/megatron_lm_ckpt_to_nemo.py
index 72252a03d5be..64ba2a51bb71 100644
--- a/examples/nlp/language_modeling/megatron_lm_ckpt_to_nemo.py
+++ b/examples/nlp/language_modeling/megatron_lm_ckpt_to_nemo.py
@@ -42,12 +42,12 @@
from typing import Any, Optional
import torch
-from lightning_fabric.utilities.cloud_io import _load as pl_load
+from lightning.fabric.utilities.cloud_io import _load as pl_load
+from lightning.pytorch.core.saving import _load_state as ptl_load_state
+from lightning.pytorch.core.saving import load_hparams_from_tags_csv, load_hparams_from_yaml
+from lightning.pytorch.trainer.trainer import Trainer
+from lightning.pytorch.utilities.migration import pl_legacy_patch
from megatron.core import parallel_state
-from pytorch_lightning.core.saving import _load_state as ptl_load_state
-from pytorch_lightning.core.saving import load_hparams_from_tags_csv, load_hparams_from_yaml
-from pytorch_lightning.trainer.trainer import Trainer
-from pytorch_lightning.utilities.migration import pl_legacy_patch
from nemo.collections.nlp.models.language_modeling.megatron_bert_model import MegatronBertModel
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/examples/nlp/language_modeling/megatron_mamba_eval.py b/examples/nlp/language_modeling/megatron_mamba_eval.py
index ed12e4b904ac..ba000e6bef63 100644
--- a/examples/nlp/language_modeling/megatron_mamba_eval.py
+++ b/examples/nlp/language_modeling/megatron_mamba_eval.py
@@ -20,8 +20,8 @@
from functools import partial
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf, open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from torch.utils.data import DataLoader, Dataset
from nemo.collections.nlp.models.language_modeling.megatron_mamba_model import MegatronMambaModel
diff --git a/examples/nlp/language_modeling/megatron_retro_cal_shape.py b/examples/nlp/language_modeling/megatron_retro_cal_shape.py
index a57a927d2a36..f790d9471964 100644
--- a/examples/nlp/language_modeling/megatron_retro_cal_shape.py
+++ b/examples/nlp/language_modeling/megatron_retro_cal_shape.py
@@ -12,10 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from lightning.pytorch import Trainer
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.plugins.precision import MixedPrecisionPlugin
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.plugins.precision import MixedPrecisionPlugin
from nemo.collections.nlp.models.language_modeling.megatron_retrieval_model import MegatronRetrievalModel
from nemo.collections.nlp.modules.common.megatron.mup.shape import make_base_shapes
@@ -46,7 +46,7 @@ def main(cfg) -> None:
scaler = None
if cfg.trainer.precision in [16, '16', '16-mixed']:
scaler = GradScaler(
- init_scale=cfg.model.get('native_amp_init_scale', 2 ** 32),
+ init_scale=cfg.model.get('native_amp_init_scale', 2**32),
growth_interval=cfg.model.get('native_amp_growth_interval', 1000),
hysteresis=cfg.model.get('hysteresis', 2),
)
diff --git a/examples/nlp/language_modeling/megatron_retro_eval.py b/examples/nlp/language_modeling/megatron_retro_eval.py
index 89e3fe9c3ddb..ac946b2adf42 100644
--- a/examples/nlp/language_modeling/megatron_retro_eval.py
+++ b/examples/nlp/language_modeling/megatron_retro_eval.py
@@ -16,8 +16,8 @@
import os
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.trainer.trainer import Trainer
from torch.utils.data import DataLoader, Dataset
from nemo.collections.nlp.models.language_modeling.megatron_retro_model import MegatronRetroModel
@@ -60,7 +60,9 @@ def __init__(self, sentences, neighbors):
self.sentences = sentences
self.neighbors = neighbors
- def __len__(self,):
+ def __len__(
+ self,
+ ):
return len(self.sentences)
def __getitem__(self, idx):
diff --git a/examples/nlp/language_modeling/megatron_retro_eval_legacy.py b/examples/nlp/language_modeling/megatron_retro_eval_legacy.py
index 69222acedd34..c51a8f536cc1 100644
--- a/examples/nlp/language_modeling/megatron_retro_eval_legacy.py
+++ b/examples/nlp/language_modeling/megatron_retro_eval_legacy.py
@@ -15,8 +15,8 @@
import os
from examples.nlp.language_modeling.megatron_gpt_eval import RequestDataSet
+from lightning.pytorch import Trainer
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from nemo.collections.nlp.models.language_modeling.megatron_retrieval_model import MegatronRetrievalModel
@@ -69,7 +69,10 @@ def main(cfg) -> None:
save_restore_connector.model_extracted_dir = model_path
model_cfg = MegatronRetrievalModel.restore_from(
- model_path, trainer=trainer, return_config=True, save_restore_connector=save_restore_connector,
+ model_path,
+ trainer=trainer,
+ return_config=True,
+ save_restore_connector=save_restore_connector,
)
with open_dict(model_cfg):
@@ -89,7 +92,10 @@ def main(cfg) -> None:
cfg.pipeline_model_parallel_split_rank = model_cfg.get('pipeline_model_parallel_split_rank', 0)
model = MegatronRetrievalModel.restore_from(
- model_path, trainer=trainer, save_restore_connector=save_restore_connector, override_config_path=model_cfg,
+ model_path,
+ trainer=trainer,
+ save_restore_connector=save_restore_connector,
+ override_config_path=model_cfg,
)
length_params: LengthParam = {
diff --git a/examples/nlp/language_modeling/megatron_retro_fine_tune.py b/examples/nlp/language_modeling/megatron_retro_fine_tune.py
index 3fcaec156d9c..153a4b581135 100644
--- a/examples/nlp/language_modeling/megatron_retro_fine_tune.py
+++ b/examples/nlp/language_modeling/megatron_retro_fine_tune.py
@@ -15,12 +15,12 @@
import datetime
import os
+from lightning.pytorch import Trainer
+from lightning.pytorch.callbacks.timer import Timer
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.plugins.precision import MixedPrecisionPlugin
+from lightning.pytorch.trainer.connectors.checkpoint_connector import _CheckpointConnector
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.callbacks.timer import Timer
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.plugins.precision import MixedPrecisionPlugin
-from pytorch_lightning.trainer.connectors.checkpoint_connector import _CheckpointConnector
from nemo.collections.nlp.models.language_modeling.megatron_retro_fine_tune_model import MegatronRetroFinetuneModel
from nemo.collections.nlp.parts.nlp_overrides import (
@@ -87,7 +87,7 @@ def main(cfg) -> None:
scaler = None
if cfg.trainer.precision in [16, '16', '16-mixed']:
scaler = GradScaler(
- init_scale=cfg.model.get('native_amp_init_scale', 2 ** 32),
+ init_scale=cfg.model.get('native_amp_init_scale', 2**32),
growth_interval=cfg.model.get('native_amp_growth_interval', 1000),
hysteresis=cfg.model.get('hysteresis', 2),
)
@@ -118,7 +118,9 @@ def main(cfg) -> None:
# Override timer callback to a stateless one
for idx, callback in enumerate(trainer.callbacks):
if isinstance(callback, Timer):
- trainer.callbacks[idx] = StatelessTimer(cfg.trainer.max_time,)
+ trainer.callbacks[idx] = StatelessTimer(
+ cfg.trainer.max_time,
+ )
# load existing or init new soft prompt GPT model
if cfg.model.get("restore_path", None):
diff --git a/examples/nlp/language_modeling/megatron_retro_mutransfer_pretrain.py b/examples/nlp/language_modeling/megatron_retro_mutransfer_pretrain.py
index af6e22035def..775b75680ee9 100644
--- a/examples/nlp/language_modeling/megatron_retro_mutransfer_pretrain.py
+++ b/examples/nlp/language_modeling/megatron_retro_mutransfer_pretrain.py
@@ -12,11 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from lightning.pytorch import Trainer
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.plugins.precision import MixedPrecisionPlugin
+from lightning.pytorch.trainer.connectors.checkpoint_connector import _CheckpointConnector
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.plugins.precision import MixedPrecisionPlugin
-from pytorch_lightning.trainer.connectors.checkpoint_connector import _CheckpointConnector
from nemo.collections.nlp.models.language_modeling.megatron_retrieval_model import MegatronRetrievalModel
from nemo.collections.nlp.modules.common.megatron.mup.optim import MuAdam, MuAdamW
@@ -52,7 +52,7 @@ def main(cfg) -> None:
scaler = None
if cfg.trainer.precision in [16, '16', '16-mixed']:
scaler = GradScaler(
- init_scale=cfg.model.get('native_amp_init_scale', 2 ** 32),
+ init_scale=cfg.model.get('native_amp_init_scale', 2**32),
growth_interval=cfg.model.get('native_amp_growth_interval', 1000),
hysteresis=cfg.model.get('hysteresis', 2),
)
diff --git a/examples/nlp/language_modeling/megatron_retro_pretraining_legacy.py b/examples/nlp/language_modeling/megatron_retro_pretraining_legacy.py
index 4653222b3438..298deafabc1c 100644
--- a/examples/nlp/language_modeling/megatron_retro_pretraining_legacy.py
+++ b/examples/nlp/language_modeling/megatron_retro_pretraining_legacy.py
@@ -14,11 +14,11 @@
import os
+from lightning.pytorch import Trainer
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.plugins.precision import MixedPrecisionPlugin
+from lightning.pytorch.trainer.connectors.checkpoint_connector import _CheckpointConnector
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.plugins.precision import MixedPrecisionPlugin
-from pytorch_lightning.trainer.connectors.checkpoint_connector import _CheckpointConnector
from nemo.collections.nlp.models.language_modeling.megatron_retrieval_model import MegatronRetrievalModel
from nemo.collections.nlp.modules.common.megatron.megatron_init import initialize_model_parallel_for_nemo
@@ -51,7 +51,7 @@ def main(cfg) -> None:
scaler = None
if cfg.trainer.precision in [16, '16', '16-mixed']:
scaler = GradScaler(
- init_scale=cfg.model.get('native_amp_init_scale', 2 ** 32),
+ init_scale=cfg.model.get('native_amp_init_scale', 2**32),
growth_interval=cfg.model.get('native_amp_growth_interval', 1000),
hysteresis=cfg.model.get('hysteresis', 2),
)
diff --git a/examples/nlp/language_modeling/megatron_retro_qatask_eval.py b/examples/nlp/language_modeling/megatron_retro_qatask_eval.py
index b99bcafbab02..4e47157d5150 100644
--- a/examples/nlp/language_modeling/megatron_retro_qatask_eval.py
+++ b/examples/nlp/language_modeling/megatron_retro_qatask_eval.py
@@ -17,8 +17,8 @@
import os
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.trainer.trainer import Trainer
from torch.utils.data import DataLoader, Dataset
from nemo.collections.nlp.data.question_answering.input_example.qa_input_example import QAExample
@@ -63,7 +63,9 @@ def __init__(self, sentences, neighbors):
self.sentences = sentences
self.neighbors = neighbors
- def __len__(self,):
+ def __len__(
+ self,
+ ):
return len(self.sentences)
def __getitem__(self, idx):
diff --git a/examples/nlp/language_modeling/megatron_t5_eval.py b/examples/nlp/language_modeling/megatron_t5_eval.py
index 0b6ea54b6b99..57b48134101f 100644
--- a/examples/nlp/language_modeling/megatron_t5_eval.py
+++ b/examples/nlp/language_modeling/megatron_t5_eval.py
@@ -17,8 +17,8 @@
from argparse import ArgumentParser
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from torch.utils.data import DataLoader
from nemo.collections.nlp.data.language_modeling.megatron.request_dataset import T5RequestDataset
@@ -40,13 +40,22 @@ def main():
"--tokens_to_generate", type=int, default="16", required=False, help="How many tokens to add to prompt"
)
parser.add_argument(
- "--tensor_model_parallel_size", type=int, default=-1, required=False,
+ "--tensor_model_parallel_size",
+ type=int,
+ default=-1,
+ required=False,
)
parser.add_argument(
- "--pipeline_model_parallel_size", type=int, default=-1, required=False,
+ "--pipeline_model_parallel_size",
+ type=int,
+ default=-1,
+ required=False,
)
parser.add_argument(
- "--pipeline_model_parallel_split_rank", type=int, default=-1, required=False,
+ "--pipeline_model_parallel_split_rank",
+ type=int,
+ default=-1,
+ required=False,
)
parser.add_argument("--precision", default="16", type=str, help="PyTorch Lightning Trainer precision flag")
parser.add_argument("--decoder_starts_with_pad", action="store_true", help="Decoder starts with pad token")
diff --git a/examples/nlp/language_modeling/megatron_t5_lm_adaptation_finetune.py b/examples/nlp/language_modeling/megatron_t5_lm_adaptation_finetune.py
index 9e392d913171..4137213023ee 100644
--- a/examples/nlp/language_modeling/megatron_t5_lm_adaptation_finetune.py
+++ b/examples/nlp/language_modeling/megatron_t5_lm_adaptation_finetune.py
@@ -13,11 +13,11 @@
# limitations under the License.
+from lightning.pytorch import Trainer
+from lightning.pytorch.callbacks import ModelSummary
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.trainer.connectors.checkpoint_connector import _CheckpointConnector
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.callbacks import ModelSummary
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.trainer.connectors.checkpoint_connector import _CheckpointConnector
from nemo.collections.nlp.models.language_modeling.megatron_t5_model import MegatronT5Model
from nemo.collections.nlp.parts.nlp_overrides import (
@@ -49,7 +49,7 @@ def main(cfg) -> None:
scaler = None
if cfg.trainer.precision in [16, '16', '16-mixed']:
scaler = GradScaler(
- init_scale=cfg.model.get('native_amp_init_scale', 2 ** 32),
+ init_scale=cfg.model.get('native_amp_init_scale', 2**32),
growth_interval=cfg.model.get('native_amp_growth_interval', 1000),
hysteresis=cfg.model.get('hysteresis', 2),
)
diff --git a/examples/nlp/language_modeling/megatron_t5_seq2seq_eval.py b/examples/nlp/language_modeling/megatron_t5_seq2seq_eval.py
index ba8ea6492da3..ae6e1744395d 100644
--- a/examples/nlp/language_modeling/megatron_t5_seq2seq_eval.py
+++ b/examples/nlp/language_modeling/megatron_t5_seq2seq_eval.py
@@ -12,11 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from lightning.pytorch import Trainer
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.plugins.precision import MixedPrecisionPlugin
from megatron_t5_seq2seq_finetune import load_from_checkpoint_dir, load_from_nemo, validate_checkpoint_loading_args
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.plugins.precision import MixedPrecisionPlugin
from nemo.collections.nlp.models.language_modeling.megatron_glue_model import MegatronT5GLUEModel
from nemo.collections.nlp.models.language_modeling.megatron_t0_model import MegatronT0Model
@@ -82,7 +82,7 @@ def main(cfg) -> None:
scaler = None
if cfg.trainer.precision in [16, '16', '16-mixed']:
scaler = GradScaler(
- init_scale=cfg.model.get('native_amp_init_scale', 2 ** 32),
+ init_scale=cfg.model.get('native_amp_init_scale', 2**32),
growth_interval=cfg.model.get('native_amp_growth_interval', 1000),
hysteresis=cfg.model.get('hysteresis', 2),
)
diff --git a/examples/nlp/language_modeling/megatron_t5_seq2seq_finetune.py b/examples/nlp/language_modeling/megatron_t5_seq2seq_finetune.py
index 2409e99ad951..5f63289be27a 100644
--- a/examples/nlp/language_modeling/megatron_t5_seq2seq_finetune.py
+++ b/examples/nlp/language_modeling/megatron_t5_seq2seq_finetune.py
@@ -16,10 +16,10 @@
import tempfile
import torch.multiprocessing as mp
+from lightning.pytorch import Trainer
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.trainer.connectors.checkpoint_connector import _CheckpointConnector
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.trainer.connectors.checkpoint_connector import _CheckpointConnector
from nemo.collections.nlp.models.language_modeling.megatron_glue_model import MegatronT5GLUEModel
from nemo.collections.nlp.models.language_modeling.megatron_t0_model import MegatronT0Model
diff --git a/examples/nlp/language_modeling/transformer_lm.py b/examples/nlp/language_modeling/transformer_lm.py
index caaa0e0d2935..3e97e28bb35e 100644
--- a/examples/nlp/language_modeling/transformer_lm.py
+++ b/examples/nlp/language_modeling/transformer_lm.py
@@ -13,7 +13,7 @@
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models.language_modeling import TransformerLMModel
diff --git a/examples/nlp/language_modeling/upcycle_dense_to_moe.py b/examples/nlp/language_modeling/upcycle_dense_to_moe.py
index a1f4b6000b6f..f4a5fc017d97 100644
--- a/examples/nlp/language_modeling/upcycle_dense_to_moe.py
+++ b/examples/nlp/language_modeling/upcycle_dense_to_moe.py
@@ -26,7 +26,7 @@
import torch
import torch.nn
-from pytorch_lightning.trainer.trainer import Trainer
+from lightning.pytorch.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.collections.nlp.parts.nlp_overrides import NLPDDPStrategy, NLPSaveRestoreConnector
diff --git a/examples/nlp/machine_translation/enc_dec_nmt-bottleneck.py b/examples/nlp/machine_translation/enc_dec_nmt-bottleneck.py
index b1743e03188e..58c948f11458 100644
--- a/examples/nlp/machine_translation/enc_dec_nmt-bottleneck.py
+++ b/examples/nlp/machine_translation/enc_dec_nmt-bottleneck.py
@@ -15,8 +15,8 @@
from dataclasses import dataclass
from typing import Optional
+from lightning.pytorch import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning import Trainer
from nemo.collections.nlp.data.machine_translation.preproc_mt_data import MTDataPreproc
from nemo.collections.nlp.models.machine_translation.mt_enc_dec_bottleneck_model import MTBottleneckModel
@@ -29,7 +29,6 @@
from nemo.utils.config_utils import update_model_config
from nemo.utils.exp_manager import ExpManagerConfig, exp_manager
-
"""
Usage:
1. If you need to start docker and install NeMo, otherwise skip this step:
diff --git a/examples/nlp/machine_translation/enc_dec_nmt.py b/examples/nlp/machine_translation/enc_dec_nmt.py
index 57b9f84c39ce..b901ba28a4db 100644
--- a/examples/nlp/machine_translation/enc_dec_nmt.py
+++ b/examples/nlp/machine_translation/enc_dec_nmt.py
@@ -15,8 +15,8 @@
from dataclasses import dataclass
from typing import Optional
+from lightning.pytorch import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning import Trainer
from nemo.collections.nlp.data.machine_translation.preproc_mt_data import MTDataPreproc
from nemo.collections.nlp.models.machine_translation.mt_enc_dec_config import MTEncDecModelConfig
@@ -29,7 +29,6 @@
from nemo.utils.config_utils import update_model_config
from nemo.utils.exp_manager import ExpManagerConfig, exp_manager
-
"""
Usage:
1. If you need to start docker and install NeMo, otherwise skip this step:
diff --git a/examples/nlp/machine_translation/enc_dec_nmt_finetune.py b/examples/nlp/machine_translation/enc_dec_nmt_finetune.py
index 16a635d09dee..688461a7b491 100644
--- a/examples/nlp/machine_translation/enc_dec_nmt_finetune.py
+++ b/examples/nlp/machine_translation/enc_dec_nmt_finetune.py
@@ -15,9 +15,9 @@
from dataclasses import dataclass
from typing import Optional
+from lightning.pytorch import Trainer
from omegaconf import OmegaConf
from omegaconf.omegaconf import MISSING
-from pytorch_lightning import Trainer
from nemo.collections.nlp.models.machine_translation.mt_enc_dec_config import MTEncDecModelConfig
from nemo.collections.nlp.models.machine_translation.mt_enc_dec_model import MTEncDecModel
@@ -29,7 +29,6 @@
from nemo.utils.config_utils import update_model_config
from nemo.utils.exp_manager import ExpManagerConfig, exp_manager
-
"""
Usage:
python enc_dec_nmt_finetune.py \
diff --git a/examples/nlp/machine_translation/megatron_nmt_training.py b/examples/nlp/machine_translation/megatron_nmt_training.py
index 7946500f92e9..5ff70a7a863c 100644
--- a/examples/nlp/machine_translation/megatron_nmt_training.py
+++ b/examples/nlp/machine_translation/megatron_nmt_training.py
@@ -14,11 +14,11 @@
import torch.multiprocessing as mp
+from lightning.pytorch import Trainer
+from lightning.pytorch.callbacks import ModelSummary
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.trainer.connectors.checkpoint_connector import _CheckpointConnector
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.callbacks import ModelSummary
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.trainer.connectors.checkpoint_connector import _CheckpointConnector
from nemo.collections.nlp.models.language_modeling.megatron_bart_model import MegatronBARTModel
from nemo.collections.nlp.models.language_modeling.megatron_t5_model import MegatronT5Model
@@ -53,7 +53,7 @@ def main(cfg) -> None:
scaler = None
if cfg.trainer.precision in [16, '16', '16-mixed']:
scaler = GradScaler(
- init_scale=cfg.model.get('native_amp_init_scale', 2 ** 32),
+ init_scale=cfg.model.get('native_amp_init_scale', 2**32),
growth_interval=cfg.model.get('native_amp_growth_interval', 1000),
hysteresis=cfg.model.get('hysteresis', 2),
)
diff --git a/examples/nlp/machine_translation/nmt_transformer_infer_megatron.py b/examples/nlp/machine_translation/nmt_transformer_infer_megatron.py
index fcf1fb8d1796..349155101a5d 100644
--- a/examples/nlp/machine_translation/nmt_transformer_infer_megatron.py
+++ b/examples/nlp/machine_translation/nmt_transformer_infer_megatron.py
@@ -24,8 +24,8 @@
import os
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.machine_translation.megatron_nmt_model import MegatronNMTModel
from nemo.collections.nlp.modules.common.megatron.megatron_init import fake_initialize_model_parallel
diff --git a/examples/nlp/question_answering/question_answering.py b/examples/nlp/question_answering/question_answering.py
index fcde03582e5c..37bd43a4b0fb 100644
--- a/examples/nlp/question_answering/question_answering.py
+++ b/examples/nlp/question_answering/question_answering.py
@@ -14,7 +14,7 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models.question_answering.qa_bert_model import BERTQAModel
diff --git a/examples/nlp/spellchecking_asr_customization/helpers.py b/examples/nlp/spellchecking_asr_customization/helpers.py
index 2db11b0e7d96..8e3957d34cc1 100644
--- a/examples/nlp/spellchecking_asr_customization/helpers.py
+++ b/examples/nlp/spellchecking_asr_customization/helpers.py
@@ -16,7 +16,7 @@
import os
from typing import Tuple
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig
from nemo.collections.nlp.models import SpellcheckingAsrCustomizationModel
@@ -32,7 +32,7 @@
def instantiate_model_and_trainer(
cfg: DictConfig, model_name: str, do_training: bool
) -> Tuple[pl.Trainer, SpellcheckingAsrCustomizationModel]:
- """ Function for instantiating a model and a trainer
+ """Function for instantiating a model and a trainer
Args:
cfg: The config used to instantiate the model and the trainer.
model_name: A str indicates the model direction, currently only 'itn'.
diff --git a/examples/nlp/text2sparql/evaluate_text2sparql.py b/examples/nlp/text2sparql/evaluate_text2sparql.py
index 52baa2a7e78c..774ced98e8ec 100644
--- a/examples/nlp/text2sparql/evaluate_text2sparql.py
+++ b/examples/nlp/text2sparql/evaluate_text2sparql.py
@@ -39,7 +39,7 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models.text2sparql import Text2SparqlModel
diff --git a/examples/nlp/text2sparql/text2sparql.py b/examples/nlp/text2sparql/text2sparql.py
index 1353a3967735..d70a7e616950 100644
--- a/examples/nlp/text2sparql/text2sparql.py
+++ b/examples/nlp/text2sparql/text2sparql.py
@@ -88,7 +88,7 @@
exp_manager.exp_dir=./NeMo_logs
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models.text2sparql import Text2SparqlModel
diff --git a/examples/nlp/text_classification/model_parallel_text_classification_evaluation.py b/examples/nlp/text_classification/model_parallel_text_classification_evaluation.py
index ab3322f552c1..cf9b6d8dd2e4 100644
--- a/examples/nlp/text_classification/model_parallel_text_classification_evaluation.py
+++ b/examples/nlp/text_classification/model_parallel_text_classification_evaluation.py
@@ -15,7 +15,7 @@
"""
This script runs model parallel text classification evaluation.
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models.text_classification import TextClassificationModel
diff --git a/examples/nlp/text_classification/text_classification_with_bert.py b/examples/nlp/text_classification/text_classification_with_bert.py
index 01e8fae9bba5..a6c84b4e337a 100644
--- a/examples/nlp/text_classification/text_classification_with_bert.py
+++ b/examples/nlp/text_classification/text_classification_with_bert.py
@@ -95,7 +95,7 @@
eval_model.set_trainer(eval_trainer)
eval_trainer.test(model=eval_model, verbose=False)
"""
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models.text_classification import TextClassificationModel
diff --git a/examples/nlp/text_normalization_as_tagging/helpers.py b/examples/nlp/text_normalization_as_tagging/helpers.py
index 347b05b25fba..de74794f8f40 100644
--- a/examples/nlp/text_normalization_as_tagging/helpers.py
+++ b/examples/nlp/text_normalization_as_tagging/helpers.py
@@ -16,7 +16,7 @@
import os
from typing import Tuple
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig
from nemo.collections.nlp.models import ThutmoseTaggerModel
@@ -31,7 +31,7 @@
def instantiate_model_and_trainer(
cfg: DictConfig, model_name: str, do_training: bool
) -> Tuple[pl.Trainer, ThutmoseTaggerModel]:
- """ Function for instantiating a model and a trainer
+ """Function for instantiating a model and a trainer
Args:
cfg: The config used to instantiate the model and the trainer.
model_name: A str indicates the model direction, currently only 'itn'.
diff --git a/examples/nlp/token_classification/punctuation_capitalization_lexical_audio_train_evaluate.py b/examples/nlp/token_classification/punctuation_capitalization_lexical_audio_train_evaluate.py
index 149a9a4515e2..508e434bb598 100644
--- a/examples/nlp/token_classification/punctuation_capitalization_lexical_audio_train_evaluate.py
+++ b/examples/nlp/token_classification/punctuation_capitalization_lexical_audio_train_evaluate.py
@@ -14,7 +14,7 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import DictConfig, OmegaConf
diff --git a/examples/nlp/token_classification/punctuation_capitalization_train_evaluate.py b/examples/nlp/token_classification/punctuation_capitalization_train_evaluate.py
index e983540a68b2..b16e1ecd0bdc 100644
--- a/examples/nlp/token_classification/punctuation_capitalization_train_evaluate.py
+++ b/examples/nlp/token_classification/punctuation_capitalization_train_evaluate.py
@@ -14,7 +14,7 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import DictConfig, OmegaConf
diff --git a/examples/nlp/token_classification/token_classification_evaluate.py b/examples/nlp/token_classification/token_classification_evaluate.py
index b69212f59de4..764aa90c8593 100644
--- a/examples/nlp/token_classification/token_classification_evaluate.py
+++ b/examples/nlp/token_classification/token_classification_evaluate.py
@@ -14,7 +14,7 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig
from nemo.collections.nlp.models import TokenClassificationModel
diff --git a/examples/nlp/token_classification/token_classification_train.py b/examples/nlp/token_classification/token_classification_train.py
index 56c1487cf9c5..536327aff6da 100644
--- a/examples/nlp/token_classification/token_classification_train.py
+++ b/examples/nlp/token_classification/token_classification_train.py
@@ -14,7 +14,7 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models import TokenClassificationModel
diff --git a/examples/nlp/zero_shot_intent_recognition/zero_shot_intent_train.py b/examples/nlp/zero_shot_intent_recognition/zero_shot_intent_train.py
index 5b91049e965d..4dbbf01c935e 100644
--- a/examples/nlp/zero_shot_intent_recognition/zero_shot_intent_train.py
+++ b/examples/nlp/zero_shot_intent_recognition/zero_shot_intent_train.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf
from nemo.collections.nlp.models import ZeroShotIntentModel
diff --git a/examples/slu/speech_intent_slot/eval_utils/inference.py b/examples/slu/speech_intent_slot/eval_utils/inference.py
index 9bd76c76822d..241f6463ed76 100644
--- a/examples/slu/speech_intent_slot/eval_utils/inference.py
+++ b/examples/slu/speech_intent_slot/eval_utils/inference.py
@@ -21,7 +21,7 @@
from pathlib import Path
from typing import List, Optional
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf
from tqdm.auto import tqdm
diff --git a/examples/slu/speech_intent_slot/speech_intent_slot_train.py b/examples/slu/speech_intent_slot/speech_intent_slot_train.py
index a9999d4d4682..f8732ec757e1 100644
--- a/examples/slu/speech_intent_slot/speech_intent_slot_train.py
+++ b/examples/slu/speech_intent_slot/speech_intent_slot_train.py
@@ -66,7 +66,7 @@
from pathlib import Path
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf
diff --git a/examples/speaker_tasks/diarization/clustering_diarizer/offline_diar_infer.py b/examples/speaker_tasks/diarization/clustering_diarizer/offline_diar_infer.py
index 35077a5fe415..5c0f956c2e3c 100644
--- a/examples/speaker_tasks/diarization/clustering_diarizer/offline_diar_infer.py
+++ b/examples/speaker_tasks/diarization/clustering_diarizer/offline_diar_infer.py
@@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from lightning.pytorch import seed_everything
from omegaconf import OmegaConf
-from pytorch_lightning import seed_everything
from nemo.collections.asr.models import ClusteringDiarizer
from nemo.core.config import hydra_runner
diff --git a/examples/speaker_tasks/diarization/neural_diarizer/multiscale_diar_decoder.py b/examples/speaker_tasks/diarization/neural_diarizer/multiscale_diar_decoder.py
index 984b5ce93464..bc1db4dc1126 100644
--- a/examples/speaker_tasks/diarization/neural_diarizer/multiscale_diar_decoder.py
+++ b/examples/speaker_tasks/diarization/neural_diarizer/multiscale_diar_decoder.py
@@ -12,9 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
+from lightning.pytorch import seed_everything
from omegaconf import OmegaConf
-from pytorch_lightning import seed_everything
from nemo.collections.asr.models import EncDecDiarLabelModel
from nemo.core.config import hydra_runner
diff --git a/examples/speaker_tasks/recognition/speaker_identification_infer.py b/examples/speaker_tasks/recognition/speaker_identification_infer.py
index 90f930fcbfa6..7075a9f1f92a 100644
--- a/examples/speaker_tasks/recognition/speaker_identification_infer.py
+++ b/examples/speaker_tasks/recognition/speaker_identification_infer.py
@@ -16,8 +16,8 @@
import numpy as np
import torch
+from lightning.pytorch import seed_everything
from omegaconf import OmegaConf
-from pytorch_lightning import seed_everything
from nemo.collections.asr.data.audio_to_label import AudioToSpeechLabelDataset
from nemo.collections.asr.models import EncDecSpeakerLabelModel
@@ -55,10 +55,18 @@ def main(cfg):
speaker_model = EncDecSpeakerLabelModel.from_pretrained(model_path)
enroll_embs, _, enroll_truelabels, _ = speaker_model.batch_inference(
- enrollment_manifest, batch_size, sample_rate, device=device,
+ enrollment_manifest,
+ batch_size,
+ sample_rate,
+ device=device,
)
- test_embs, _, _, _ = speaker_model.batch_inference(test_manifest, batch_size, sample_rate, device=device,)
+ test_embs, _, _, _ = speaker_model.batch_inference(
+ test_manifest,
+ batch_size,
+ sample_rate,
+ device=device,
+ )
# length normalize
enroll_embs = enroll_embs / (np.linalg.norm(enroll_embs, ord=2, axis=-1, keepdims=True))
@@ -91,7 +99,12 @@ def main(cfg):
"number of labels mis match. Make sure you trained or finetuned neural classifier with labels from enrollement manifest_filepath"
)
- _, test_logits, _, _ = speaker_model.batch_inference(test_manifest, batch_size, sample_rate, device=device,)
+ _, test_logits, _, _ = speaker_model.batch_inference(
+ test_manifest,
+ batch_size,
+ sample_rate,
+ device=device,
+ )
matched_labels = test_logits.argmax(axis=-1)
with open(test_manifest, 'rb') as f1, open(out_manifest, 'w', encoding='utf-8') as f2:
diff --git a/examples/speaker_tasks/recognition/speaker_reco.py b/examples/speaker_tasks/recognition/speaker_reco.py
index a8acd4de4a3f..ac5cb12ac836 100644
--- a/examples/speaker_tasks/recognition/speaker_reco.py
+++ b/examples/speaker_tasks/recognition/speaker_reco.py
@@ -14,10 +14,10 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
+from lightning.pytorch import seed_everything
from omegaconf import OmegaConf
-from pytorch_lightning import seed_everything
from nemo.collections.asr.models import EncDecSpeakerLabelModel
from nemo.core.config import hydra_runner
diff --git a/examples/speaker_tasks/recognition/speaker_reco_finetune.py b/examples/speaker_tasks/recognition/speaker_reco_finetune.py
index 884e5a60bc59..502d016a920d 100644
--- a/examples/speaker_tasks/recognition/speaker_reco_finetune.py
+++ b/examples/speaker_tasks/recognition/speaker_reco_finetune.py
@@ -14,10 +14,10 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
+from lightning.pytorch import seed_everything
from omegaconf import OmegaConf
-from pytorch_lightning import seed_everything
from nemo.collections.asr.models import EncDecSpeakerLabelModel
from nemo.core.config import hydra_runner
diff --git a/examples/tts/aligner.py b/examples/tts/aligner.py
index e32c0444ca68..939b8dbcf11f 100644
--- a/examples/tts/aligner.py
+++ b/examples/tts/aligner.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.common.callbacks import LogEpochTimeCallback
from nemo.collections.tts.models import AlignerModel
diff --git a/examples/tts/audio_codec.py b/examples/tts/audio_codec.py
index 5fc4b6fd0afd..d875a3037ba3 100644
--- a/examples/tts/audio_codec.py
+++ b/examples/tts/audio_codec.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf
from nemo.collections.tts.models import AudioCodecModel
diff --git a/examples/tts/fastpitch.py b/examples/tts/fastpitch.py
index a8e6ecdc902d..7fd584b773e4 100644
--- a/examples/tts/fastpitch.py
+++ b/examples/tts/fastpitch.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.common.callbacks import LogEpochTimeCallback
from nemo.collections.tts.models import FastPitchModel
diff --git a/examples/tts/fastpitch_finetune.py b/examples/tts/fastpitch_finetune.py
index 64b5e8b90625..9bdf704c514c 100644
--- a/examples/tts/fastpitch_finetune.py
+++ b/examples/tts/fastpitch_finetune.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.common.callbacks import LogEpochTimeCallback
from nemo.collections.tts.models import FastPitchModel
diff --git a/examples/tts/fastpitch_finetune_adapters.py b/examples/tts/fastpitch_finetune_adapters.py
index 1361d63fb4cf..9b50d70ab15e 100644
--- a/examples/tts/fastpitch_finetune_adapters.py
+++ b/examples/tts/fastpitch_finetune_adapters.py
@@ -15,7 +15,7 @@
import os
from dataclasses import is_dataclass
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import DictConfig, OmegaConf, open_dict
from nemo.collections.common.callbacks import LogEpochTimeCallback
diff --git a/examples/tts/fastpitch_ssl.py b/examples/tts/fastpitch_ssl.py
index 1101ac1eeaf7..b92983a4bfb1 100644
--- a/examples/tts/fastpitch_ssl.py
+++ b/examples/tts/fastpitch_ssl.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.common.callbacks import LogEpochTimeCallback
from nemo.collections.tts.models import fastpitch_ssl, hifigan
diff --git a/examples/tts/g2p/g2p_heteronym_classification_inference.py b/examples/tts/g2p/g2p_heteronym_classification_inference.py
index 61262c41a340..89a563e9b683 100644
--- a/examples/tts/g2p/g2p_heteronym_classification_inference.py
+++ b/examples/tts/g2p/g2p_heteronym_classification_inference.py
@@ -18,7 +18,7 @@
from dataclasses import dataclass, is_dataclass
from typing import Optional
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf
@@ -56,9 +56,9 @@ class TranscriptionConfig:
# path to .json manifest inference, if not provided, interactive mode will be enabled
manifest: Optional[str] = None # Path to .json manifest
- output_manifest: Optional[
- str
- ] = "predictions.json" # Path to .json manifest to save prediction, will be saved in "pred_text" field
+ output_manifest: Optional[str] = (
+ "predictions.json" # Path to .json manifest to save prediction, will be saved in "pred_text" field
+ )
grapheme_field: str = "text_graphemes" # name of the field in .json manifest for input grapheme text
# mapping from wordid predicted by the model to phonemes, e.g.,
@@ -132,9 +132,10 @@ def main(cfg):
save_errors = True
correct = 0
total = 0
- with open(cfg.output_manifest, "r", encoding="utf-8") as f_preds, open(
- cfg.errors_file, "w", encoding="utf-8"
- ) as f_errors:
+ with (
+ open(cfg.output_manifest, "r", encoding="utf-8") as f_preds,
+ open(cfg.errors_file, "w", encoding="utf-8") as f_errors,
+ ):
for line in f_preds:
line = json.loads(line)
predictions = line["pred_wordid"]
diff --git a/examples/tts/g2p/g2p_heteronym_classification_train_and_evaluate.py b/examples/tts/g2p/g2p_heteronym_classification_train_and_evaluate.py
index 613865618501..f86a0a3934e4 100644
--- a/examples/tts/g2p/g2p_heteronym_classification_train_and_evaluate.py
+++ b/examples/tts/g2p/g2p_heteronym_classification_train_and_evaluate.py
@@ -14,7 +14,7 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from nemo.collections.common.callbacks import LogEpochTimeCallback
diff --git a/examples/tts/g2p/g2p_inference.py b/examples/tts/g2p/g2p_inference.py
index e7bffa888653..a9da11fcffdb 100644
--- a/examples/tts/g2p/g2p_inference.py
+++ b/examples/tts/g2p/g2p_inference.py
@@ -16,7 +16,7 @@
from dataclasses import dataclass, is_dataclass
from typing import Optional
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf
from utils import get_metrics
@@ -41,23 +41,23 @@ class TranscriptionConfig:
# Required configs
pretrained_model: str # Path to a .nemo file or Name of a pretrained model
manifest_filepath: str # Path to .json manifest file
- phoneme_field: Optional[
- str
- ] = None # name of the field in manifest_filepath for ground truth phonemes, default during training "text"
+ phoneme_field: Optional[str] = (
+ None # name of the field in manifest_filepath for ground truth phonemes, default during training "text"
+ )
grapheme_field: Optional[str] = "text_graphemes" # name of the field in manifest_filepath for input grapheme text
# General configs
- output_file: Optional[
- str
- ] = None # Path to .json manifest file to save predictions, will be saved in "target_field"
+ output_file: Optional[str] = (
+ None # Path to .json manifest file to save predictions, will be saved in "target_field"
+ )
pred_field: Optional[str] = "pred_text" # name of the field in the output_file to save predictions
batch_size: int = 32 # Batch size to use for inference
num_workers: int = 0 # Number of workers to use for DataLoader during inference
# Config for heteronyms correction
- pretrained_heteronyms_model: Optional[
- str
- ] = None # Path to a .nemo file or a Name of a pretrained model to disambiguate heteronyms (Optional)
+ pretrained_heteronyms_model: Optional[str] = (
+ None # Path to a .nemo file or a Name of a pretrained model to disambiguate heteronyms (Optional)
+ )
@hydra_runner(config_name="TranscriptionConfig", schema=TranscriptionConfig)
diff --git a/examples/tts/g2p/g2p_train_and_evaluate.py b/examples/tts/g2p/g2p_train_and_evaluate.py
index ff7b2b0675ea..319e1fb6a776 100644
--- a/examples/tts/g2p/g2p_train_and_evaluate.py
+++ b/examples/tts/g2p/g2p_train_and_evaluate.py
@@ -14,7 +14,7 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from utils import get_model
diff --git a/examples/tts/hifigan.py b/examples/tts/hifigan.py
index 5c3406a2f24c..6cf5c7a5aac4 100644
--- a/examples/tts/hifigan.py
+++ b/examples/tts/hifigan.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.tts.models import HifiGanModel
from nemo.core.config import hydra_runner
diff --git a/examples/tts/hifigan_finetune.py b/examples/tts/hifigan_finetune.py
index f0e2513404fd..328e1f423903 100644
--- a/examples/tts/hifigan_finetune.py
+++ b/examples/tts/hifigan_finetune.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.tts.models import HifiGanModel
from nemo.core.config import hydra_runner
diff --git a/examples/tts/mixer_tts.py b/examples/tts/mixer_tts.py
index 61a188f53969..53f55d93bcda 100644
--- a/examples/tts/mixer_tts.py
+++ b/examples/tts/mixer_tts.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.common.callbacks import LogEpochTimeCallback
from nemo.collections.tts.models import MixerTTSModel
diff --git a/examples/tts/radtts.py b/examples/tts/radtts.py
index 09bf69a2d6e5..4b3b0e62da87 100644
--- a/examples/tts/radtts.py
+++ b/examples/tts/radtts.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.common.callbacks import LogEpochTimeCallback
from nemo.collections.tts.models.radtts import RadTTSModel
diff --git a/examples/tts/spectrogram_enhancer.py b/examples/tts/spectrogram_enhancer.py
index 336729236d74..cd91ef3cb815 100644
--- a/examples/tts/spectrogram_enhancer.py
+++ b/examples/tts/spectrogram_enhancer.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.tts.models import SpectrogramEnhancerModel
from nemo.core.config import hydra_runner
diff --git a/examples/tts/speechllm/conf/megatron_t5_speechllm_inference.yaml b/examples/tts/speechllm/conf/megatron_t5_speechllm_inference.yaml
new file mode 100644
index 000000000000..8b37077bfdd5
--- /dev/null
+++ b/examples/tts/speechllm/conf/megatron_t5_speechllm_inference.yaml
@@ -0,0 +1,160 @@
+name: megatron_t5_speechllm_tts_inference
+checkpoint_path: ???
+
+trainer:
+ devices: 1
+ accelerator: gpu
+ num_nodes: 1
+ precision: 32
+ logger: False
+ enable_checkpointing: False
+ use_distributed_sampler: False
+ max_epochs: 10000
+ max_steps: -1
+ log_every_n_steps: 10
+ val_check_interval: null
+ check_val_every_n_epoch: 3
+ gradient_clip_val: 1.0
+
+exp_manager:
+ exp_dir: null
+ name: ${name}
+ create_wandb_logger: False
+ resume_if_exists: False
+ resume_ignore_no_checkpoint: True
+ create_checkpoint_callback: True
+ checkpoint_callback_params:
+ monitor: val_loss
+ save_top_k: 2
+ mode: min
+ save_nemo_on_train_end: False # Should be false, correct prompt learning model file is saved at model.nemo_path set below
+ filename: "megatron_t5_speechllm_tts--{${exp_manager.checkpoint_callback_params.monitor}:.3f}-{step}"
+ model_parallel_size: ${model.tensor_model_parallel_size}
+ save_best_model: True
+ create_early_stopping_callback: False
+ early_stopping_callback_params:
+ monitor: "val_loss"
+ mode: "min"
+ min_delta: 0.001
+ patience: 10
+ verbose: True
+
+model:
+ seed: 1234
+ nemo_path: ${name}.nemo # .nemo filename/absolute path to where the virtual prompt model parameters will be saved
+ virtual_prompt_style: "p-tuning" # one of 'prompt-tuning', 'p-tuning', or 'inference'
+ tensor_model_parallel_size: 1
+ pipeline_model_parallel_size: 1
+ global_batch_size: 16
+ micro_batch_size: 16 # micro batch size should equal global batch size when pipeline parallel = 1
+ validation_global_batch_size: ${model.global_batch_size}
+ validation_micro_batch_size: ${model.micro_batch_size}
+ validation_drop_last: False
+ report_validation_metric: False
+ validation_metric: accuracy
+ num_speech_tokens: 10112 # Vocabulary size pertaining to speech
+ seq_pattern: "parallel" # parallel, delay_parallel, flatten
+ temperature: 0.85 # Temperature to be used for inference
+ top_k: 80 # Top k to be used for inference
+ max_inference_timesteps: 1000 # Maximum number of timesteps to run inference for
+
+ restore_path: null # Path to an existing p-tuned/prompt tuned .nemo model you wish to add new tasks to or run inference with
+ language_model_path: ??? # Path to the pretrained T5 language model .nemo file, always required
+ save_nemo_on_validation_end: True # Saves an inference ready .nemo file every time a checkpoint is saved during training.
+ existing_tasks: []
+ new_tasks: ["squad"]
+ codecmodel_type: nemo_codec
+ codecmodel_path: ???
+ english_only_model: true
+ context_conditioning: decoder
+ use_flash_attention: false
+ lm_vocab_size: 30000
+ task_templates:
+ - taskname: "squad"
+ prompt_template: "<|VIRTUAL_PROMPT_0|> {context} {question} {answer}"
+ total_virtual_tokens: 3
+ virtual_token_splits: [3]
+ truncate_field: context
+ answer_field: answer
+
+ p_tuning: # P-tuning specific params
+ encoder_type: "mlp" # Either "mlp" or "lstm", mlp is default
+ num_layers: 2 # 2 recommended for MLP, 1 recommended for LSTM, must be at least 2 for mlp
+ dropout: 0.0
+
+ prompt_tuning: # Prompt tunin specific params
+ new_prompt_init_methods: ['text'] # List of 'text' or 'random', should correspond to tasks listed in new tasks
+ new_prompt_init_text: ['some init text goes here'] # some init text if init method is text, or None if init method is random
+
+ data:
+ grapheme_prefix: null
+ train_ds: null
+ validation_ds: null
+ test_ds: ???
+ max_seq_length: 1536
+ sample_rate: 24000
+ add_eos: true
+ add_bos: false
+ decoder_starts_with_pad: False
+ add_eos_to_decoder_output: True
+ add_sentinel_to_input: True
+ ul2_prompt_token: null # , ,
+ shuffle: true
+ num_workers: 4
+ pin_memory: true
+ speech_offset: 30000
+ train_task: all
+ sup_data_path: None
+ num_speech_codebooks: 8
+ codebook_fps: 86
+ context_duration_min: 2.9
+ context_duration_max: 2.9
+ context_slice_method: "fixed"
+ phoneme_probability: 1.0
+ g2p:
+ english:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/ipa_cmudict-0.7b_nv23.01.txt"
+ heteronyms: "scripts/tts_dataset_files/heteronyms-052722"
+ phoneme_probability: 0.8
+ ignore_ambiguous_words: False
+ use_chars: True
+ use_stresses: True
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ spanish:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/es_ES/es_ES_nv230301.dict"
+ phoneme_probability: 0.8
+ use_chars: True
+ use_stresses: True
+ ignore_ambiguous_words: False
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ locale: "es-ES"
+ mandarin:
+ _target_: nemo.collections.tts.g2p.models.zh_cn_pinyin.ChineseG2p
+ phoneme_dict: "scripts/tts_dataset_files/zh/36finals/ipa_dict_nv23.05.txt"
+ word_segmenter: "jieba"
+ phoneme_prefix: ""
+ phoneme_case: "lower"
+ tone_prefix: "#"
+ ascii_letter_prefix: ${model.data.grapheme_prefix}
+ ascii_letter_case: "upper"
+ german:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/de/de_nv230119.dict"
+ heteronyms: "scripts/tts_dataset_files/de/de_nv230119.heteronym"
+ phoneme_probability: 0.8
+ ignore_ambiguous_words: False
+ use_chars: True
+ use_stresses: True
+ grapheme_case: mixed
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ locale: "de-DE"
+
+ optim:
+ name: fused_adam
+ lr: 5e-5
+ weight_decay: 0.01
+ betas:
+ - 0.9
+ - 0.98
\ No newline at end of file
diff --git a/examples/tts/speechllm/conf/megatron_t5_speechllm_inference_model.yaml b/examples/tts/speechllm/conf/megatron_t5_speechllm_inference_model.yaml
new file mode 100644
index 000000000000..1858edf9e667
--- /dev/null
+++ b/examples/tts/speechllm/conf/megatron_t5_speechllm_inference_model.yaml
@@ -0,0 +1,213 @@
+name: megatron_t5_speechllm_tts_inference
+checkpoint_path: ???
+
+trainer:
+ devices: 1
+ accelerator: gpu
+ num_nodes: 1
+ precision: 32
+ logger: False
+ enable_checkpointing: False
+ use_distributed_sampler: False
+ max_epochs: 10000
+ max_steps: -1
+ log_every_n_steps: 10
+ val_check_interval: null
+ check_val_every_n_epoch: 3
+ gradient_clip_val: 1.0
+
+exp_manager:
+ exp_dir: null
+ name: ${name}
+ create_wandb_logger: False
+ resume_if_exists: False
+ resume_ignore_no_checkpoint: True
+ create_checkpoint_callback: True
+ checkpoint_callback_params:
+ monitor: val_loss
+ save_top_k: 2
+ mode: min
+ save_nemo_on_train_end: False # Should be false, correct prompt learning model file is saved at model.nemo_path set below
+ filename: "megatron_t5_speechllm_tts--{${exp_manager.checkpoint_callback_params.monitor}:.3f}-{step}"
+ model_parallel_size: ${model.tensor_model_parallel_size}
+ save_best_model: True
+ create_early_stopping_callback: False
+ early_stopping_callback_params:
+ monitor: "val_loss"
+ mode: "min"
+ min_delta: 0.001
+ patience: 10
+ verbose: True
+
+model:
+ seed: 1234
+ nemo_path: ${name}.nemo # .nemo filename/absolute path to where the virtual prompt model parameters will be saved
+ virtual_prompt_style: "p-tuning" # one of 'prompt-tuning', 'p-tuning', or 'inference'
+ tensor_model_parallel_size: 1
+ pipeline_model_parallel_size: 1
+ global_batch_size: 16
+ micro_batch_size: 16 # micro batch size should equal global batch size when pipeline parallel = 1
+ validation_global_batch_size: ${model.global_batch_size}
+ validation_micro_batch_size: ${model.micro_batch_size}
+ validation_drop_last: False
+ report_validation_metric: False
+ validation_metric: accuracy
+ num_speech_tokens: 10112 # Vocabulary size pertaining to speech
+ seq_pattern: "parallel" # parallel, delay_parallel, flatten
+ temperature: 0.85 # Temperature to be used for inference
+ top_k: 80 # Top k to be used for inference
+ max_inference_timesteps: 1000 # Maximum number of timesteps to run inference for
+
+ restore_path: null # Path to an existing p-tuned/prompt tuned .nemo model you wish to add new tasks to or run inference with
+ save_nemo_on_validation_end: True # Saves an inference ready .nemo file every time a checkpoint is saved during training.
+ existing_tasks: []
+ new_tasks: ["squad"]
+ codecmodel_type: nemo_codec
+ codecmodel_path: ???
+ english_only_model: true
+ context_conditioning: decoder
+ train_from_scratch: true
+ override_tokenizer_vocab_file: ???
+ use_flash_attention: false
+ lm_vocab_size: 30000
+
+ frozen_model:
+ tensor_model_parallel_size: 1
+ pipeline_model_parallel_size: 1
+ pipeline_model_parallel_split_rank: 0
+ make_vocab_size_divisible_by: 128
+ pre_process: true
+ post_process: true
+ gradient_as_bucket_view: true
+ native_amp_init_scale: 4294967296
+ native_amp_growth_interval: 1000
+ fp16_lm_cross_entropy: false
+ seed: 1234
+ use_cpu_initialization: false
+ apex_transformer_log_level: 30
+ tokenizer:
+ library: megatron
+ type: BertWordPieceCase
+ model: null
+ vocab_file: null
+ merge_file: null
+ optim:
+ name: null
+ data:
+ dataset_type: t5
+ encoder:
+ arch: transformer
+ bias_activation_fusion: false
+ use_flash_attention: ${model.use_flash_attention}
+ num_layers: 12
+ hidden_size: 768
+ ffn_hidden_size: 2048
+ num_attention_heads: 12
+ init_method_std: 0.015
+ hidden_dropout: 0.1
+ attention_dropout: 0.1
+ kv_channels: 64
+ activation: geglu
+ decoder:
+ arch: transformer
+ bias_activation_fusion: false
+ use_flash_attention: ${model.use_flash_attention}
+ num_layers: 12
+ hidden_size: 768
+ ffn_hidden_size: 2048
+ num_attention_heads: 12
+ init_method_std: 0.015
+ hidden_dropout: 0.1
+ attention_dropout: 0.1
+ kv_channels: 64
+ activation: geglu
+
+ task_templates:
+ - taskname: "squad"
+ prompt_template: "<|VIRTUAL_PROMPT_0|> {context} {question} {answer}"
+ total_virtual_tokens: 3
+ virtual_token_splits: [3]
+ truncate_field: context
+ answer_field: answer
+
+ p_tuning: # P-tuning specific params
+ encoder_type: "mlp" # Either "mlp" or "lstm", mlp is default
+ num_layers: 2 # 2 recommended for MLP, 1 recommended for LSTM, must be at least 2 for mlp
+ dropout: 0.0
+
+ prompt_tuning: # Prompt tunin specific params
+ new_prompt_init_methods: ['text'] # List of 'text' or 'random', should correspond to tasks listed in new tasks
+ new_prompt_init_text: ['some init text goes here'] # some init text if init method is text, or None if init method is random
+
+ data:
+ grapheme_prefix: null
+ train_ds: null
+ validation_ds: null
+ test_ds: ???
+ max_seq_length: 1536
+ sample_rate: 24000
+ add_eos: true
+ add_bos: false
+ decoder_starts_with_pad: False
+ add_eos_to_decoder_output: True
+ add_sentinel_to_input: True
+ ul2_prompt_token: null # , ,
+ shuffle: true
+ num_workers: 4
+ pin_memory: true
+ speech_offset: 30000
+ train_task: all
+ sup_data_path: None
+ num_speech_codebooks: 8
+ codebook_fps: 86
+ context_duration_min: 2.9
+ context_duration_max: 2.9
+ context_slice_method: "fixed"
+ phoneme_probability: 1.0
+ g2p:
+ english:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/ipa_cmudict-0.7b_nv23.01.txt"
+ heteronyms: "scripts/tts_dataset_files/heteronyms-052722"
+ phoneme_probability: 0.8
+ ignore_ambiguous_words: False
+ use_chars: True
+ use_stresses: True
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ spanish:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/es_ES/es_ES_nv230301.dict"
+ phoneme_probability: 0.8
+ use_chars: True
+ use_stresses: True
+ ignore_ambiguous_words: False
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ locale: "es-ES"
+ mandarin:
+ _target_: nemo.collections.tts.g2p.models.zh_cn_pinyin.ChineseG2p
+ phoneme_dict: "scripts/tts_dataset_files/zh/36finals/ipa_dict_nv23.05.txt"
+ word_segmenter: "jieba"
+ phoneme_prefix: ""
+ phoneme_case: "lower"
+ tone_prefix: "#"
+ ascii_letter_prefix: ${model.data.grapheme_prefix}
+ ascii_letter_case: "upper"
+ german:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/de/de_nv230119.dict"
+ heteronyms: "scripts/tts_dataset_files/de/de_nv230119.heteronym"
+ phoneme_probability: 0.8
+ ignore_ambiguous_words: False
+ use_chars: True
+ use_stresses: True
+ grapheme_case: mixed
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ locale: "de-DE"
+
+ optim:
+ name: fused_adam
+ lr: 5e-5
+ weight_decay: 0.01
+ betas:
+ - 0.9
+ - 0.98
diff --git a/examples/tts/speechllm/conf/megatron_t5_speechllm_inference_multiencoder.yaml b/examples/tts/speechllm/conf/megatron_t5_speechllm_inference_multiencoder.yaml
new file mode 100644
index 000000000000..8ad967d20538
--- /dev/null
+++ b/examples/tts/speechllm/conf/megatron_t5_speechllm_inference_multiencoder.yaml
@@ -0,0 +1,218 @@
+name: megatron_t5_speechllm
+checkpoint_path: ???
+
+trainer:
+ devices: 1
+ accelerator: gpu
+ num_nodes: 1
+ precision: 32
+ logger: False
+ enable_checkpointing: False
+ use_distributed_sampler: False
+ max_epochs: -1
+ max_steps: 250000
+ log_every_n_steps: 10
+ val_check_interval: null
+ check_val_every_n_epoch: 1
+
+exp_manager:
+ explicit_log_dir: null
+ exp_dir: null
+ name: ${name}
+ create_wandb_logger: False
+ resume_if_exists: False
+ resume_ignore_no_checkpoint: True
+ create_checkpoint_callback: True
+ checkpoint_callback_params:
+ monitor: val_loss
+ save_top_k: 3
+ mode: min
+ save_nemo_on_train_end: False # Should be false, correct prompt learning model file is saved at model.nemo_path set below
+ filename: "megatron_t5_speechllm_tts--{${exp_manager.checkpoint_callback_params.monitor}:.3f}-{step}"
+ model_parallel_size: ${model.tensor_model_parallel_size}
+ save_best_model: True
+ create_early_stopping_callback: False
+ early_stopping_callback_params:
+ monitor: "val_loss"
+ mode: "min"
+ min_delta: 0.001
+ patience: 10
+ verbose: True
+
+model:
+ seed: 1234
+ nemo_path: ${name}.nemo # .nemo filename/absolute path to where the virtual prompt model parameters will be saved
+ virtual_prompt_style: "p-tuning" # one of 'prompt-tuning', 'p-tuning', or 'inference'
+ tensor_model_parallel_size: 1
+ pipeline_model_parallel_size: 1
+ global_batch_size: 2
+ micro_batch_size: 2 # micro batch size should equal global batch size when pipeline parallel = 1
+ validation_global_batch_size: ${model.global_batch_size}
+ validation_micro_batch_size: ${model.micro_batch_size}
+ validation_drop_last: False
+ report_validation_metric: False
+ validation_metric: accuracy
+ num_speech_tokens: 10112 # Vocabulary size pertaining to speech
+ seq_pattern: "parallel" # parallel, delay_parallel, flatten
+ temperature: 0.85 # Temperature to be used for inference
+ top_k: 80 # Top k to be used for inference
+ max_inference_timesteps: 2000 # Maximum number of timesteps to run inference for
+ restore_path: null # Path to an existing p-tuned/prompt tuned .nemo model you wish to add new tasks to or run inference with
+ save_nemo_on_validation_end: True # Saves an inference ready .nemo file every time a checkpoint is saved during training.
+ existing_tasks: []
+ new_tasks: ["squad"]
+ codecmodel_type: nemo_codec
+ codecmodel_path: ???
+ english_only_model: true
+ context_conditioning: encoder
+ train_from_scratch: true
+ override_tokenizer_vocab_file: ???
+ use_flash_attention: false
+ lm_vocab_size: 30000
+ enc_output_to_layers: [[0,1,2],[3,4,5,6,7,8]]
+
+ frozen_model:
+ tensor_model_parallel_size: 1
+ pipeline_model_parallel_size: 1
+ pipeline_model_parallel_split_rank: 0
+ make_vocab_size_divisible_by: 128
+ pre_process: true
+ post_process: true
+ gradient_as_bucket_view: true
+ native_amp_init_scale: 4294967296
+ native_amp_growth_interval: 1000
+ fp16_lm_cross_entropy: false
+ seed: 1234
+ use_cpu_initialization: false
+ apex_transformer_log_level: 30
+ tokenizer:
+ library: megatron
+ type: BertWordPieceCase
+ model: null
+ vocab_file: null
+ merge_file: null
+ optim:
+ name: null
+ data:
+ dataset_type: t5
+ encoder:
+ arch: multi_transformer
+ n_transformers: 2
+ bias_activation_fusion: false
+ use_flash_attention: ${model.use_flash_attention}
+ num_layers: 6
+ hidden_size: 768
+ ffn_hidden_size: 2048
+ num_attention_heads: 12
+ init_method_std: 0.015
+ hidden_dropout: 0.1
+ attention_dropout: 0.1
+ kv_channels: 64
+ activation: geglu
+ decoder:
+ arch: transformer
+ bias_activation_fusion: false
+ use_flash_attention: ${model.use_flash_attention}
+ num_layers: 12
+ hidden_size: 768
+ ffn_hidden_size: 2048
+ num_attention_heads: 12
+ init_method_std: 0.015
+ hidden_dropout: 0.1
+ attention_dropout: 0.1
+ kv_channels: 64
+ activation: geglu
+
+ task_templates:
+ - taskname: "squad"
+ prompt_template: "<|VIRTUAL_PROMPT_0|> {context} {question} {answer}"
+ total_virtual_tokens: 3
+ virtual_token_splits: [3]
+ truncate_field: context
+ answer_field: answer
+
+ p_tuning: # P-tuning specific params
+ encoder_type: "mlp" # Either "mlp" or "lstm", mlp is default
+ num_layers: 2 # 2 recommended for MLP, 1 recommended for LSTM, must be at least 2 for mlp
+ dropout: 0.0
+
+ prompt_tuning: # Prompt tunin specific params
+ new_prompt_init_methods: ['text'] # List of 'text' or 'random', should correspond to tasks listed in new tasks
+ new_prompt_init_text: ['some init text goes here'] # some init text if init method is text, or None if init method is random
+
+ data:
+ grapheme_prefix: null
+ train_ds: null
+ validation_ds: null
+ test_ds: ???
+ max_seq_length: 2048
+ sample_rate: 24000
+ add_eos: true
+ add_bos: false
+ use_attention_prior: false
+ attention_prior_scaling_factor: 0.05
+ cross_attention_epsilon: 0.0
+ decoder_starts_with_pad: False
+ add_eos_to_decoder_output: True
+ add_sentinel_to_input: True
+ ul2_prompt_token: null # , ,
+ shuffle: true
+ num_workers: 4
+ pin_memory: true
+ speech_offset: 30128
+ train_task: all
+ sup_data_path: None
+ num_speech_codebooks: 8
+ codebook_fps: 86
+ context_duration_min: 2.9
+ context_duration_max: 2.9
+ context_slice_method: "fixed"
+ phoneme_probability: 1.0
+ encoder_type: ${model.frozen_model.encoder.arch}
+ g2p:
+ english:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/ipa_cmudict-0.7b_nv23.01.txt"
+ heteronyms: "scripts/tts_dataset_files/heteronyms-052722"
+ phoneme_probability: 0.8
+ ignore_ambiguous_words: False
+ use_chars: True
+ use_stresses: True
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ spanish:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/es_ES/es_ES_nv230301.dict"
+ phoneme_probability: 0.8
+ use_chars: True
+ use_stresses: True
+ ignore_ambiguous_words: False
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ locale: "es-ES"
+ mandarin:
+ _target_: nemo.collections.tts.g2p.models.zh_cn_pinyin.ChineseG2p
+ phoneme_dict: "scripts/tts_dataset_files/zh/36finals/ipa_dict_nv23.05.txt"
+ word_segmenter: "jieba"
+ phoneme_prefix: ""
+ phoneme_case: "lower"
+ tone_prefix: "#"
+ ascii_letter_prefix: ${model.data.grapheme_prefix}
+ ascii_letter_case: "upper"
+ german:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/de/de_nv230119.dict"
+ heteronyms: "scripts/tts_dataset_files/de/de_nv230119.heteronym"
+ phoneme_probability: 0.8
+ ignore_ambiguous_words: False
+ use_chars: True
+ use_stresses: True
+ grapheme_case: mixed
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ locale: "de-DE"
+
+ optim:
+ name: fused_adam
+ lr: 1e-4
+ weight_decay: 0.01
+ betas:
+ - 0.9
+ - 0.98
diff --git a/examples/tts/speechllm/conf/megatron_t5_speechllm_medium.yaml b/examples/tts/speechllm/conf/megatron_t5_speechllm_medium.yaml
new file mode 100644
index 000000000000..bd31f0712fdf
--- /dev/null
+++ b/examples/tts/speechllm/conf/megatron_t5_speechllm_medium.yaml
@@ -0,0 +1,161 @@
+name: megatron_t5_speechllm_medium
+
+trainer:
+ devices: 1
+ accelerator: gpu
+ num_nodes: 1
+ precision: 32
+ logger: False
+ enable_checkpointing: False
+ use_distributed_sampler: False
+ max_epochs: -1
+ max_steps: 1000000
+ log_every_n_steps: 10
+ val_check_interval: null
+ check_val_every_n_epoch: 1
+
+exp_manager:
+ explicit_log_dir: null
+ exp_dir: null
+ name: ${name}
+ create_wandb_logger: False
+ resume_if_exists: True
+ resume_ignore_no_checkpoint: True
+ create_checkpoint_callback: True
+ checkpoint_callback_params:
+ monitor: val_loss
+ save_top_k: 3
+ mode: min
+ save_nemo_on_train_end: False
+ filename: "megatron_t5_speechllm_tts--{${exp_manager.checkpoint_callback_params.monitor}:.3f}-{step}"
+ model_parallel_size: ${model.tensor_model_parallel_size}
+ save_best_model: True
+ create_early_stopping_callback: False
+ early_stopping_callback_params:
+ monitor: "val_loss"
+ mode: "min"
+ min_delta: 0.001
+ patience: 10
+ verbose: True
+
+model:
+ seed: 1234
+ nemo_path: ${name}.nemo # .nemo filename/absolute path to where the virtual prompt model parameters will be saved
+ virtual_prompt_style: "p-tuning" # one of 'prompt-tuning', 'p-tuning', or 'inference'
+ tensor_model_parallel_size: 1
+ pipeline_model_parallel_size: 1
+ global_batch_size: 2
+ micro_batch_size: 2 # micro batch size should equal global batch size when pipeline parallel = 1
+ validation_global_batch_size: ${model.global_batch_size}
+ validation_micro_batch_size: ${model.micro_batch_size}
+ validation_drop_last: False
+ report_validation_metric: False
+ validation_metric: accuracy
+ num_speech_tokens: 10112 # Vocabulary size pertaining to speech
+ seq_pattern: "parallel" # parallel, delay_parallel, flatten
+ attn_prior_scaledown_start_step: 10000
+ attn_prior_end_step: 11000
+ return_all_crossattention_probs: True
+ num_cross_attention_heads: 12 # 12 for 220m, 16 for 3b.
+ restore_path: null # Path to an existing p-tuned/prompt tuned .nemo model you wish to add new tasks to or run inference with
+ language_model_path: ??? # Path to the pretrained T5 language model .nemo file, always required
+ save_nemo_on_validation_end: True # Saves an inference ready .nemo file every time a checkpoint is saved during training.
+ existing_tasks: []
+ new_tasks: ["squad"]
+ freeze_model: false
+ use_alignment_loss: true
+ codecmodel_type: nemo_codec
+ codecmodel_path: ???
+ english_only_model: true
+ context_conditioning: decoder
+ use_flash_attention: false
+ lm_vocab_size: 30000
+
+ task_templates:
+ - taskname: "squad"
+ prompt_template: "<|VIRTUAL_PROMPT_0|> {context} {question} {answer}"
+ total_virtual_tokens: 3
+ virtual_token_splits: [3]
+ truncate_field: context
+ answer_field: answer
+
+ p_tuning: # P-tuning specific params
+ encoder_type: "mlp" # Either "mlp" or "lstm", mlp is default
+ num_layers: 2 # 2 recommended for MLP, 1 recommended for LSTM, must be at least 2 for mlp
+ dropout: 0.0
+
+ prompt_tuning: # Prompt tunin specific params
+ new_prompt_init_methods: ['text'] # List of 'text' or 'random', should correspond to tasks listed in new tasks
+ new_prompt_init_text: ['some init text goes here'] # some init text if init method is text, or None if init method is random
+
+ data:
+ grapheme_prefix: null
+ train_ds: ???
+ validation_ds: ???
+ max_seq_length: 2048
+ sample_rate: 24000
+ add_eos: true
+ add_bos: false
+ use_attention_prior: true
+ attention_prior_scaling_factor: 0.05
+ cross_attention_epsilon: 0.0
+ decoder_starts_with_pad: False
+ add_eos_to_decoder_output: True
+ add_sentinel_to_input: True
+ ul2_prompt_token: null # , ,
+ shuffle: true
+ num_workers: 4
+ pin_memory: true
+ speech_offset: 30128
+ train_task: all
+ num_speech_codebooks: 8
+ codebook_fps: 86
+ context_duration_min: 2.9
+ context_duration_max: 2.9
+ g2p:
+ english:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/ipa_cmudict-0.7b_nv23.01.txt"
+ heteronyms: "scripts/tts_dataset_files/heteronyms-052722"
+ phoneme_probability: 0.8
+ ignore_ambiguous_words: False
+ use_chars: True
+ use_stresses: True
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ spanish:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/es_ES/es_ES_nv230301.dict"
+ phoneme_probability: 0.8
+ use_chars: True
+ use_stresses: True
+ ignore_ambiguous_words: False
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ locale: "es-ES"
+ mandarin:
+ _target_: nemo.collections.tts.g2p.models.zh_cn_pinyin.ChineseG2p
+ phoneme_dict: "scripts/tts_dataset_files/zh/36finals/ipa_dict_nv23.05.txt"
+ word_segmenter: "jieba"
+ phoneme_prefix: ""
+ phoneme_case: "lower"
+ tone_prefix: "#"
+ ascii_letter_prefix: ${model.data.grapheme_prefix}
+ ascii_letter_case: "upper"
+ german:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/de/de_nv230119.dict"
+ heteronyms: "scripts/tts_dataset_files/de/de_nv230119.heteronym"
+ phoneme_probability: 0.8
+ ignore_ambiguous_words: False
+ use_chars: True
+ use_stresses: True
+ grapheme_case: mixed
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ locale: "de-DE"
+
+ optim:
+ name: fused_adam
+ lr: 1e-4
+ weight_decay: 0.01
+ betas:
+ - 0.9
+ - 0.98
\ No newline at end of file
diff --git a/examples/tts/speechllm/conf/megatron_t5_speechllm_multiencoder.yaml b/examples/tts/speechllm/conf/megatron_t5_speechllm_multiencoder.yaml
new file mode 100644
index 000000000000..bf3f65ff9e00
--- /dev/null
+++ b/examples/tts/speechllm/conf/megatron_t5_speechllm_multiencoder.yaml
@@ -0,0 +1,223 @@
+name: megatron_t5_speechllm
+
+trainer:
+ devices: 1
+ accelerator: gpu
+ num_nodes: 1
+ precision: 32
+ logger: False
+ enable_checkpointing: False
+ use_distributed_sampler: False
+ max_epochs: -1
+ max_steps: 250000
+ log_every_n_steps: 10
+ val_check_interval: null
+ check_val_every_n_epoch: 1
+
+exp_manager:
+ explicit_log_dir: null
+ exp_dir: null
+ name: ${name}
+ create_wandb_logger: False
+ resume_if_exists: True
+ resume_ignore_no_checkpoint: True
+ create_checkpoint_callback: True
+ checkpoint_callback_params:
+ monitor: val_loss
+ save_top_k: 3
+ mode: min
+ save_nemo_on_train_end: False # Should be false, correct prompt learning model file is saved at model.nemo_path set below
+ filename: "megatron_t5_speechllm_tts--{${exp_manager.checkpoint_callback_params.monitor}:.3f}-{step}"
+ model_parallel_size: ${model.tensor_model_parallel_size}
+ save_best_model: True
+ create_early_stopping_callback: False
+ early_stopping_callback_params:
+ monitor: "val_loss"
+ mode: "min"
+ min_delta: 0.001
+ patience: 10
+ verbose: True
+
+model:
+ seed: 1234
+ nemo_path: ${name}.nemo # .nemo filename/absolute path to where the virtual prompt model parameters will be saved
+ virtual_prompt_style: "p-tuning" # one of 'prompt-tuning', 'p-tuning', or 'inference'
+ tensor_model_parallel_size: 1
+ pipeline_model_parallel_size: 1
+ global_batch_size: 2
+ micro_batch_size: 2 # micro batch size should equal global batch size when pipeline parallel = 1
+ validation_global_batch_size: ${model.global_batch_size}
+ validation_micro_batch_size: ${model.micro_batch_size}
+ validation_drop_last: False
+ report_validation_metric: False
+ validation_metric: accuracy
+ num_speech_tokens: 10112 # Vocabulary size pertaining to speech
+ seq_pattern: "parallel" # parallel, delay_parallel, flatten
+ attn_prior_scaledown_start_step: 10000
+ attn_prior_end_step: 11000
+ return_all_crossattention_probs: True
+ num_cross_attention_heads: 12 # 12 for 220m, 16 for 3b.
+ restore_path: null # Path to an existing p-tuned/prompt tuned .nemo model you wish to add new tasks to or run inference with
+ save_nemo_on_validation_end: True # Saves an inference ready .nemo file every time a checkpoint is saved during training.
+ existing_tasks: []
+ new_tasks: ["squad"]
+ freeze_model: false
+ use_alignment_loss: true
+ codecmodel_type: nemo_codec
+ codecmodel_path: ???
+ english_only_model: true
+ context_conditioning: encoder
+ train_from_scratch: true
+ override_tokenizer_vocab_file: ???
+ use_flash_attention: false
+ lm_vocab_size: 30000
+ enc_output_to_layers: [[0,1,2],[3,4,5,6,7,8]]
+
+ frozen_model:
+ tensor_model_parallel_size: 1
+ pipeline_model_parallel_size: 1
+ pipeline_model_parallel_split_rank: 0
+ make_vocab_size_divisible_by: 128
+ pre_process: true
+ post_process: true
+ gradient_as_bucket_view: true
+ native_amp_init_scale: 4294967296
+ native_amp_growth_interval: 1000
+ fp16_lm_cross_entropy: false
+ seed: 1234
+ use_cpu_initialization: false
+ apex_transformer_log_level: 30
+ tokenizer:
+ library: megatron
+ type: BertWordPieceCase
+ model: null
+ vocab_file: null
+ merge_file: null
+ optim:
+ name: null
+ data:
+ dataset_type: t5
+ encoder:
+ arch: multi_transformer
+ n_transformers: 2
+ bias_activation_fusion: false
+ use_flash_attention: ${model.use_flash_attention}
+ num_layers: 6
+ hidden_size: 768
+ ffn_hidden_size: 2048
+ num_attention_heads: 12
+ init_method_std: 0.015
+ hidden_dropout: 0.1
+ attention_dropout: 0.1
+ kv_channels: 64
+ activation: geglu
+ decoder:
+ arch: transformer
+ bias_activation_fusion: false
+ use_flash_attention: ${model.use_flash_attention}
+ num_layers: 12
+ hidden_size: 768
+ ffn_hidden_size: 2048
+ num_attention_heads: 12
+ init_method_std: 0.015
+ hidden_dropout: 0.1
+ attention_dropout: 0.1
+ kv_channels: 64
+ activation: geglu
+
+ task_templates:
+ - taskname: "squad"
+ prompt_template: "<|VIRTUAL_PROMPT_0|> {context} {question} {answer}"
+ total_virtual_tokens: 3
+ virtual_token_splits: [3]
+ truncate_field: context
+ answer_field: answer
+
+ p_tuning: # P-tuning specific params
+ encoder_type: "mlp" # Either "mlp" or "lstm", mlp is default
+ num_layers: 2 # 2 recommended for MLP, 1 recommended for LSTM, must be at least 2 for mlp
+ dropout: 0.0
+
+ prompt_tuning: # Prompt tunin specific params
+ new_prompt_init_methods: ['text'] # List of 'text' or 'random', should correspond to tasks listed in new tasks
+ new_prompt_init_text: ['some init text goes here'] # some init text if init method is text, or None if init method is random
+
+ data:
+ grapheme_prefix: null
+ train_ds: ???
+ validation_ds: ???
+ max_seq_length: 2048
+ sample_rate: 24000
+ add_eos: true
+ add_bos: false
+ use_attention_prior: true
+ attention_prior_scaling_factor: 0.05
+ cross_attention_epsilon: 0.0
+ decoder_starts_with_pad: False
+ add_eos_to_decoder_output: True
+ add_sentinel_to_input: True
+ ul2_prompt_token: null # , ,
+ shuffle: true
+ num_workers: 4
+ pin_memory: true
+ speech_offset: 30128
+ train_task: all
+ num_speech_codebooks: 8
+ codebook_fps: 86
+ context_duration_min: 2.9
+ context_duration_max: 2.9
+ encoder_type: ${model.frozen_model.encoder.arch}
+ g2p:
+ english:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/ipa_cmudict-0.7b_nv23.01.txt"
+ heteronyms: "scripts/tts_dataset_files/heteronyms-052722"
+ phoneme_probability: 0.8
+ ignore_ambiguous_words: False
+ use_chars: True
+ use_stresses: True
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ spanish:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/es_ES/es_ES_nv230301.dict"
+ phoneme_probability: 0.8
+ use_chars: True
+ use_stresses: True
+ ignore_ambiguous_words: False
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ locale: "es-ES"
+ mandarin:
+ _target_: nemo.collections.tts.g2p.models.zh_cn_pinyin.ChineseG2p
+ phoneme_dict: "scripts/tts_dataset_files/zh/36finals/ipa_dict_nv23.05.txt"
+ word_segmenter: "jieba"
+ phoneme_prefix: ""
+ phoneme_case: "lower"
+ tone_prefix: "#"
+ ascii_letter_prefix: ${model.data.grapheme_prefix}
+ ascii_letter_case: "upper"
+ german:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/de/de_nv230119.dict"
+ heteronyms: "scripts/tts_dataset_files/de/de_nv230119.heteronym"
+ phoneme_probability: 0.8
+ ignore_ambiguous_words: False
+ use_chars: True
+ use_stresses: True
+ grapheme_case: mixed
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ locale: "de-DE"
+
+ optim:
+ name: fused_adam
+ lr: 1e-4
+ weight_decay: 0.01
+ betas:
+ - 0.9
+ - 0.98
+ sched:
+ name: CosineAnnealing
+ warmup_steps: 1000
+ constant_steps: 0
+ min_lr: 1e-5
+ monitor: val_loss
+ reduce_on_plateau: false
diff --git a/examples/tts/speechllm/conf/megatron_t5_speechlm_model.yaml b/examples/tts/speechllm/conf/megatron_t5_speechlm_model.yaml
new file mode 100644
index 000000000000..d69bfb979182
--- /dev/null
+++ b/examples/tts/speechllm/conf/megatron_t5_speechlm_model.yaml
@@ -0,0 +1,221 @@
+name: megatron_t5_speechllm
+
+trainer:
+ devices: 1
+ accelerator: gpu
+ num_nodes: 1
+ precision: 32
+ logger: False
+ enable_checkpointing: False
+ use_distributed_sampler: False
+ max_epochs: -1
+ max_steps: 250000
+ log_every_n_steps: 10
+ val_check_interval: null
+ check_val_every_n_epoch: 1
+
+exp_manager:
+ explicit_log_dir: null
+ exp_dir: null
+ name: ${name}
+ create_wandb_logger: False
+ resume_if_exists: True
+ resume_ignore_no_checkpoint: True
+ create_checkpoint_callback: True
+ checkpoint_callback_params:
+ monitor: val_loss
+ save_top_k: 3
+ mode: min
+ save_nemo_on_train_end: False # Should be false, correct prompt learning model file is saved at model.nemo_path set below
+ filename: "megatron_t5_speechllm_tts--{${exp_manager.checkpoint_callback_params.monitor}:.3f}-{step}"
+ model_parallel_size: ${model.tensor_model_parallel_size}
+ save_best_model: True
+ create_early_stopping_callback: False
+ early_stopping_callback_params:
+ monitor: "val_loss"
+ mode: "min"
+ min_delta: 0.001
+ patience: 10
+ verbose: True
+
+model:
+ seed: 1234
+ nemo_path: ${name}.nemo # .nemo filename/absolute path to where the virtual prompt model parameters will be saved
+ virtual_prompt_style: "p-tuning" # one of 'prompt-tuning', 'p-tuning', or 'inference'
+ tensor_model_parallel_size: 1
+ pipeline_model_parallel_size: 1
+ global_batch_size: 2
+ micro_batch_size: 2 # micro batch size should equal global batch size when pipeline parallel = 1
+ validation_global_batch_size: ${model.global_batch_size}
+ validation_micro_batch_size: ${model.micro_batch_size}
+ validation_drop_last: False
+ report_validation_metric: False
+ validation_metric: accuracy
+ num_speech_tokens: 10112 # Vocabulary size pertaining to speech
+ seq_pattern: "parallel" # parallel, delay_parallel, flatten
+ attn_prior_scaledown_start_step: 10000
+ attn_prior_end_step: 11000
+ return_all_crossattention_probs: True
+ num_cross_attention_heads: 12 # 12 for 220m, 16 for 3b.
+ restore_path: null # Path to an existing p-tuned/prompt tuned .nemo model you wish to add new tasks to or run inference with
+ save_nemo_on_validation_end: True # Saves an inference ready .nemo file every time a checkpoint is saved during training.
+ existing_tasks: []
+ new_tasks: ["squad"]
+ freeze_model: false
+ use_alignment_loss: true
+ codecmodel_type: nemo_codec
+ codecmodel_path: ???
+ english_only_model: true
+ context_conditioning: decoder
+ train_from_scratch: true
+ override_tokenizer_vocab_file: ???
+ use_flash_attention: false
+ lm_vocab_size: 30000
+
+ frozen_model:
+ tensor_model_parallel_size: 1
+ pipeline_model_parallel_size: 1
+ pipeline_model_parallel_split_rank: 0
+ make_vocab_size_divisible_by: 128
+ pre_process: true
+ post_process: true
+ gradient_as_bucket_view: true
+ native_amp_init_scale: 4294967296
+ native_amp_growth_interval: 1000
+ fp16_lm_cross_entropy: false
+ seed: 1234
+ use_cpu_initialization: false
+ apex_transformer_log_level: 30
+ tokenizer:
+ library: megatron
+ type: BertWordPieceCase
+ model: null
+ vocab_file: null
+ merge_file: null
+ optim:
+ name: null
+ data:
+ dataset_type: t5
+ encoder:
+ arch: transformer
+ bias_activation_fusion: false
+ use_flash_attention: ${model.use_flash_attention}
+ num_layers: 12
+ hidden_size: 768
+ ffn_hidden_size: 2048
+ num_attention_heads: 12
+ init_method_std: 0.015
+ hidden_dropout: 0.1
+ attention_dropout: 0.1
+ kv_channels: 64
+ activation: geglu
+ decoder:
+ arch: transformer
+ bias_activation_fusion: false
+ use_flash_attention: ${model.use_flash_attention}
+ num_layers: 12
+ hidden_size: 768
+ ffn_hidden_size: 2048
+ num_attention_heads: 12
+ init_method_std: 0.015
+ hidden_dropout: 0.1
+ attention_dropout: 0.1
+ kv_channels: 64
+ activation: geglu
+
+ task_templates:
+ - taskname: "squad"
+ prompt_template: "<|VIRTUAL_PROMPT_0|> {context} {question} {answer}"
+ total_virtual_tokens: 3
+ virtual_token_splits: [3]
+ truncate_field: context
+ answer_field: answer
+
+ p_tuning: # P-tuning specific params
+ encoder_type: "mlp" # Either "mlp" or "lstm", mlp is default
+ num_layers: 2 # 2 recommended for MLP, 1 recommended for LSTM, must be at least 2 for mlp
+ dropout: 0.0
+
+ prompt_tuning: # Prompt tunin specific params
+ new_prompt_init_methods: ['text'] # List of 'text' or 'random', should correspond to tasks listed in new tasks
+ new_prompt_init_text: ['some init text goes here'] # some init text if init method is text, or None if init method is random
+
+ data:
+ use_ipa: false
+ grapheme_prefix: null
+ train_ds: ???
+ validation_ds: ???
+ max_seq_length: 2048
+ sample_rate: 24000
+ add_eos: true
+ add_bos: false
+ use_attention_prior: true
+ attention_prior_scaling_factor: 0.05
+ cross_attention_epsilon: 0.0
+ decoder_starts_with_pad: False
+ add_eos_to_decoder_output: True
+ add_sentinel_to_input: True
+ ul2_prompt_token: null # , ,
+ shuffle: true
+ num_workers: 4
+ pin_memory: true
+ speech_offset: 30128
+ train_task: all
+ num_speech_codebooks: 8
+ codebook_fps: 86
+ context_duration_min: 2.9
+ context_duration_max: 2.9
+ g2p:
+ english:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/ipa_cmudict-0.7b_nv23.01.txt"
+ heteronyms: "scripts/tts_dataset_files/heteronyms-052722"
+ phoneme_probability: 0.8
+ ignore_ambiguous_words: False
+ use_chars: True
+ use_stresses: True
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ spanish:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/es_ES/es_ES_nv230301.dict"
+ phoneme_probability: 0.8
+ use_chars: True
+ use_stresses: True
+ ignore_ambiguous_words: False
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ locale: "es-ES"
+ mandarin:
+ _target_: nemo.collections.tts.g2p.models.zh_cn_pinyin.ChineseG2p
+ phoneme_dict: "scripts/tts_dataset_files/zh/36finals/ipa_dict_nv23.05.txt"
+ word_segmenter: "jieba"
+ phoneme_prefix: ""
+ phoneme_case: "lower"
+ tone_prefix: "#"
+ ascii_letter_prefix: ${model.data.grapheme_prefix}
+ ascii_letter_case: "upper"
+ german:
+ _target_: nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p
+ phoneme_dict: "scripts/tts_dataset_files/de/de_nv230119.dict"
+ heteronyms: "scripts/tts_dataset_files/de/de_nv230119.heteronym"
+ phoneme_probability: 0.8
+ ignore_ambiguous_words: False
+ use_chars: True
+ use_stresses: True
+ grapheme_case: mixed
+ grapheme_prefix: ${model.data.grapheme_prefix}
+ locale: "de-DE"
+
+ optim:
+ name: fused_adam
+ lr: 1e-4
+ weight_decay: 0.01
+ betas:
+ - 0.9
+ - 0.98
+ sched:
+ name: CosineAnnealing
+ warmup_steps: 1000
+ constant_steps: 0
+ min_lr: 1e-5
+ monitor: val_loss
+ reduce_on_plateau: false
diff --git a/examples/tts/speechllm/megatron_t5_speechllm.py b/examples/tts/speechllm/megatron_t5_speechllm.py
new file mode 100644
index 000000000000..c4ec1a77f944
--- /dev/null
+++ b/examples/tts/speechllm/megatron_t5_speechllm.py
@@ -0,0 +1,57 @@
+# Copyright (c) 2024, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from omegaconf.omegaconf import OmegaConf, open_dict
+
+from nemo.collections.nlp.parts.megatron_trainer_builder import MegatronTrainerBuilder
+from nemo.collections.nlp.parts.nlp_overrides import NLPSaveRestoreConnector
+from nemo.collections.tts.models.speechllm.megatron_t5_speechllm_model import MegatronT5SpeechLMModel
+from nemo.core.config import hydra_runner
+from nemo.utils import logging
+from nemo.utils.exp_manager import exp_manager
+
+
+@hydra_runner(config_path="conf", config_name="megatron_t5_speechllm_medium.yaml")
+def main(cfg) -> None:
+ logging.info("\n\n************** Experiment configuration ***********")
+ logging.info(f'\n{OmegaConf.to_yaml(cfg)}')
+
+ # MegatronTrainerBuilder compat checks
+ if "gradient_as_bucket_view" not in cfg.model:
+ with open_dict(cfg):
+ cfg.model.gradient_as_bucket_view = False
+
+ trainer = MegatronTrainerBuilder(cfg).create_trainer()
+ exp_manager(trainer, cfg.exp_manager)
+
+ # hydra interpolation does not work here as the interpolation key is lost when PTL saves hparams
+ with open_dict(cfg):
+ cfg.model.precision = cfg.trainer.precision
+
+ # load existing or init new soft prompt T5 model
+ if cfg.model.get("restore_path", None) is not None:
+ logging.info(f"cfg.model.restore_path {cfg.model.restore_path}")
+ model = MegatronT5SpeechLMModel.restore_from(
+ cfg.model.restore_path, cfg.model, trainer=trainer, save_restore_connector=NLPSaveRestoreConnector()
+ )
+ else:
+ logging.info(f"cfg.model.restore_path is None")
+ model = MegatronT5SpeechLMModel(cfg.model, trainer=trainer)
+ model.maybe_init_from_pretrained_checkpoint(cfg=cfg)
+
+ trainer.fit(model)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/examples/tts/speechllm/megatron_t5_speechllm_inference.py b/examples/tts/speechllm/megatron_t5_speechllm_inference.py
new file mode 100644
index 000000000000..48d46952a993
--- /dev/null
+++ b/examples/tts/speechllm/megatron_t5_speechllm_inference.py
@@ -0,0 +1,53 @@
+# Copyright (c) 2024, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from omegaconf.omegaconf import OmegaConf, open_dict
+
+from nemo.collections.nlp.parts.megatron_trainer_builder import MegatronTrainerBuilder
+from nemo.collections.tts.models.speechllm.megatron_t5_speechllm_model import MegatronT5SpeechLMModel
+from nemo.core.config import hydra_runner
+from nemo.utils import logging
+from nemo.utils.exp_manager import exp_manager
+
+
+@hydra_runner(config_path="conf", config_name="megatron_t5_speechllm_inference.yaml")
+def main(cfg) -> None:
+ logging.info("\n\n************** Experiment configuration ***********")
+ logging.info(f'\n{OmegaConf.to_yaml(cfg)}')
+
+ # MegatronTrainerBuilder compat checks
+ if "gradient_as_bucket_view" not in cfg.model:
+ with open_dict(cfg):
+ cfg.model.gradient_as_bucket_view = False
+
+ trainer = MegatronTrainerBuilder(cfg).create_trainer()
+ exp_manager(trainer, cfg.exp_manager)
+
+ # hydra interpolation does not work here as the interpolation key is lost when PTL saves hparams
+ with open_dict(cfg):
+ cfg.model.precision = cfg.trainer.precision
+
+ # load existing or init new soft prompt T5 model
+ checkpoint_path = cfg.get('checkpoint_path', None)
+ assert checkpoint_path is not None, "Please specify checkpoint_path in the config file"
+ model = MegatronT5SpeechLMModel.load_from_checkpoint(
+ checkpoint_path=checkpoint_path, trainer=trainer, cfg=cfg.model
+ )
+ model.eval()
+ model = model.cuda()
+ trainer.test(model)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/examples/tts/ssl_tts.py b/examples/tts/ssl_tts.py
index a96dccb930ab..a50997a8f432 100644
--- a/examples/tts/ssl_tts.py
+++ b/examples/tts/ssl_tts.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.common.callbacks import LogEpochTimeCallback
from nemo.collections.tts.models import ssl_tts
diff --git a/examples/tts/tacotron2.py b/examples/tts/tacotron2.py
index a5446c35f775..6c4a15d98ef2 100755
--- a/examples/tts/tacotron2.py
+++ b/examples/tts/tacotron2.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.common.callbacks import LogEpochTimeCallback
from nemo.collections.tts.models import Tacotron2Model
diff --git a/examples/tts/tacotron2_finetune.py b/examples/tts/tacotron2_finetune.py
index a0531f1f2801..f8d4d1dcaad0 100644
--- a/examples/tts/tacotron2_finetune.py
+++ b/examples/tts/tacotron2_finetune.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.common.callbacks import LogEpochTimeCallback
from nemo.collections.tts.models import Tacotron2Model
diff --git a/examples/tts/univnet.py b/examples/tts/univnet.py
index 91aafa661842..ac6949405fd5 100644
--- a/examples/tts/univnet.py
+++ b/examples/tts/univnet.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.common.callbacks import LogEpochTimeCallback
from nemo.collections.tts.models import UnivNetModel
diff --git a/examples/tts/vits.py b/examples/tts/vits.py
index 75e0d827018a..6eeebd3ea15a 100644
--- a/examples/tts/vits.py
+++ b/examples/tts/vits.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.tts.models.vits import VitsModel
from nemo.core.config import hydra_runner
diff --git a/examples/tts/waveglow.py b/examples/tts/waveglow.py
index 66b13491abd4..3bcd008ab5e0 100755
--- a/examples/tts/waveglow.py
+++ b/examples/tts/waveglow.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.common.callbacks import LogEpochTimeCallback
from nemo.collections.tts.models import WaveGlowModel
diff --git a/examples/vision/convert_ckpt_to_nemo.py b/examples/vision/convert_ckpt_to_nemo.py
index 14876f6931f9..e0cf773f98c2 100644
--- a/examples/vision/convert_ckpt_to_nemo.py
+++ b/examples/vision/convert_ckpt_to_nemo.py
@@ -28,8 +28,8 @@
from argparse import ArgumentParser
import torch
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.trainer.trainer import Trainer
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.trainer.trainer import Trainer
from nemo.collections.nlp.parts.nlp_overrides import NLPSaveRestoreConnector
from nemo.collections.vision.models.megatron_vit_classification_models import MegatronVitClassificationModel
diff --git a/examples/vision/vision_transformer/megatron_vit_classification_evaluate.py b/examples/vision/vision_transformer/megatron_vit_classification_evaluate.py
index e827e4db73c7..f7c384809702 100644
--- a/examples/vision/vision_transformer/megatron_vit_classification_evaluate.py
+++ b/examples/vision/vision_transformer/megatron_vit_classification_evaluate.py
@@ -15,9 +15,9 @@
import os
import torch
+from lightning.pytorch import Trainer
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
from torch.utils.data import DataLoader
from tqdm import tqdm
@@ -38,7 +38,8 @@ def main(cfg) -> None:
plugins = []
strategy = NLPDDPStrategy(
- no_ddp_communication_hook=True, find_unused_parameters=False, # we don't use DDP for async grad allreduce
+ no_ddp_communication_hook=True,
+ find_unused_parameters=False, # we don't use DDP for async grad allreduce
)
if cfg.get('cluster_type', None) == 'BCP':
plugins.append(TorchElasticEnvironment())
@@ -82,7 +83,10 @@ def main(cfg) -> None:
model.eval()
val_transform = ClassificationTransform(model.cfg, (model.cfg.img_h, model.cfg.img_w), train=False)
- val_data = ImageFolder(root=cfg.model.data.imagenet_val, transform=val_transform,)
+ val_data = ImageFolder(
+ root=cfg.model.data.imagenet_val,
+ transform=val_transform,
+ )
def dummy():
return
@@ -91,12 +95,20 @@ def dummy():
trainer.strategy.launcher.launch(dummy, trainer=trainer)
trainer.strategy.setup_environment()
- test_loader = DataLoader(val_data, batch_size=cfg.model.micro_batch_size, num_workers=cfg.model.data.num_workers,)
+ test_loader = DataLoader(
+ val_data,
+ batch_size=cfg.model.micro_batch_size,
+ num_workers=cfg.model.data.num_workers,
+ )
autocast_dtype = torch_dtype_from_precision(trainer.precision)
- with torch.no_grad(), torch.cuda.amp.autocast(
- enabled=autocast_dtype in (torch.half, torch.bfloat16), dtype=autocast_dtype,
+ with (
+ torch.no_grad(),
+ torch.cuda.amp.autocast(
+ enabled=autocast_dtype in (torch.half, torch.bfloat16),
+ dtype=autocast_dtype,
+ ),
):
total = correct = 0.0
for tokens, labels in tqdm(test_loader):
diff --git a/examples/vision/vision_transformer/megatron_vit_classification_infer.py b/examples/vision/vision_transformer/megatron_vit_classification_infer.py
index a757eb7a1c1f..f50ccf1c325c 100644
--- a/examples/vision/vision_transformer/megatron_vit_classification_infer.py
+++ b/examples/vision/vision_transformer/megatron_vit_classification_infer.py
@@ -16,10 +16,10 @@
import os
import torch
+from lightning.pytorch import Trainer
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
from omegaconf.omegaconf import OmegaConf, open_dict
from PIL import Image
-from pytorch_lightning import Trainer
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
from torch.utils.data import DataLoader, Dataset
from nemo.collections.nlp.parts.nlp_overrides import NLPDDPStrategy, NLPSaveRestoreConnector
@@ -63,7 +63,8 @@ def main(cfg) -> None:
plugins = []
strategy = NLPDDPStrategy(
- no_ddp_communication_hook=True, find_unused_parameters=False, # we don't use DDP for async grad allreduce
+ no_ddp_communication_hook=True,
+ find_unused_parameters=False, # we don't use DDP for async grad allreduce
)
if cfg.get('cluster_type', None) == 'BCP':
plugins.append(TorchElasticEnvironment())
@@ -107,7 +108,10 @@ def main(cfg) -> None:
model.eval()
test_transform = ClassificationTransform(cfg.model, (model_cfg.img_h, model_cfg.img_w), train=False)
- test_data = ImageFolderDataset(folder_path=cfg.data_path, transform=test_transform,)
+ test_data = ImageFolderDataset(
+ folder_path=cfg.data_path,
+ transform=test_transform,
+ )
test_loader = DataLoader(test_data, batch_size=8)
def dummy():
@@ -119,8 +123,12 @@ def dummy():
autocast_dtype = torch_dtype_from_precision(trainer.precision)
- with torch.no_grad(), torch.cuda.amp.autocast(
- enabled=autocast_dtype in (torch.half, torch.bfloat16), dtype=autocast_dtype,
+ with (
+ torch.no_grad(),
+ torch.cuda.amp.autocast(
+ enabled=autocast_dtype in (torch.half, torch.bfloat16),
+ dtype=autocast_dtype,
+ ),
):
class_names = []
for tokens in test_loader:
diff --git a/nemo/README.md b/nemo/README.md
index a6025e77822a..ebc23f4d5803 100644
--- a/nemo/README.md
+++ b/nemo/README.md
@@ -2,7 +2,12 @@ NeMo (**Ne**ural **Mo**dules) is a toolkit for creating AI applications built ar
**NeMo Core** provides common APIs all modules and models have to implement.
-**NeMo Collections**
+**NeMo 2.0 Collections**
+
+* LLM - A collection of data modules, models, configurations, and recipes for building training and parameter-efficient fine-tuning (PEFT) pipelines, including decoder-only models like those in the Llama, Gemma, and Mamba families.
+* VLM - A collection of data modules, models, configurations, and recipes for training and PEFT pipelines in vision-language models.
+
+**NeMo 1.0 Collections**
* ASR - collection of modules and models for building speech recognition networks
* TTS - collection of modules and models for building speech synthesis networks
diff --git a/nemo/collections/asr/data/audio_to_text.py b/nemo/collections/asr/data/audio_to_text.py
index d5ece6202da7..542f0fe70eca 100644
--- a/nemo/collections/asr/data/audio_to_text.py
+++ b/nemo/collections/asr/data/audio_to_text.py
@@ -42,6 +42,7 @@
is_datastore_path,
is_tarred_path,
)
+from nemo.utils.decorators import deprecated
from nemo.utils.distributed import webdataset_split_by_workers
from nemo.utils.get_rank import is_global_rank_zero
@@ -730,6 +731,9 @@ def __call__(self, *args):
)
+@deprecated(
+ explanation='Webdataset support will be removed in v2.1.0 versions, please use LhotseSpeechToTextBpeDataset class instead'
+)
class _TarredAudioToTextDataset(IterableDataset):
"""
A similar Dataset to the AudioToCharDataset/AudioToBPEDataset, but which loads tarred audio files.
diff --git a/nemo/collections/asr/data/audio_to_text_dataset.py b/nemo/collections/asr/data/audio_to_text_dataset.py
index c63c73323797..76537a8b2b78 100644
--- a/nemo/collections/asr/data/audio_to_text_dataset.py
+++ b/nemo/collections/asr/data/audio_to_text_dataset.py
@@ -19,9 +19,9 @@
from typing import Any, List, Optional, Union
import torch
+from lightning.pytorch.callbacks import BasePredictionWriter
from omegaconf import DictConfig, OmegaConf, open_dict
from omegaconf.listconfig import ListConfig
-from pytorch_lightning.callbacks import BasePredictionWriter
from torch.utils.data import ChainDataset
from nemo.collections.asr.data import audio_to_text, audio_to_text_dali
diff --git a/nemo/collections/asr/models/aed_multitask_models.py b/nemo/collections/asr/models/aed_multitask_models.py
index fc3662b04bc8..969966839dde 100644
--- a/nemo/collections/asr/models/aed_multitask_models.py
+++ b/nemo/collections/asr/models/aed_multitask_models.py
@@ -21,8 +21,8 @@
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, ListConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from nemo.collections.asr.data.audio_to_text_lhotse_prompted import (
@@ -40,7 +40,6 @@
from nemo.collections.asr.parts.preprocessing.segment import ChannelSelectorType
from nemo.collections.asr.parts.submodules.multitask_decoding import MultiTaskDecoding, MultiTaskDecodingConfig
from nemo.collections.asr.parts.submodules.token_classifier import TokenClassifier
-from nemo.collections.asr.parts.utils import manifest_utils
from nemo.collections.asr.parts.utils.rnnt_utils import Hypothesis
from nemo.collections.common import tokenizers
from nemo.collections.common.data.lhotse.dataloader import get_lhotse_dataloader_from_config
@@ -61,11 +60,15 @@
SpectrogramType,
)
from nemo.utils import logging, model_utils
+from nemo.utils.decorators import deprecated
__all__ = ['EncDecMultiTaskModel']
def lens_to_mask(lens, max_length):
+ """
+ Create a mask from a tensor of lengths.
+ """
batch_size = lens.shape[0]
mask = torch.arange(max_length).repeat(batch_size, 1).to(lens.device) < lens[:, None]
return mask
@@ -220,7 +223,8 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
self.val_loss = GlobalAverageLossMetric(dist_sync_on_step=False, take_avg_loss=True)
- # TODO: PytorchMetrics lets you join two metrics together to save compute. But need to make wer and bleu have same outputs first
+ # TODO: PytorchMetrics lets you join two metrics together to save compute.
+ # But need to make wer and bleu have same outputs first
self.wer = WER(self.decoding, log_prediction=self.cfg.get("log_prediction"))
self.bleu = BLEU(
self.decoding, tokenize=self.cfg.get('bleu_tokenizer', "13a"), log_prediction=False
@@ -268,13 +272,15 @@ def change_vocabulary(
prompt_format: Optional[str] = None,
):
"""
- Changes vocabulary used during AED decoding process. Use this method when fine-tuning on from pre-trained model.
- This method changes only decoder and leaves encoder and pre-processing modules unchanged. For example, you would
- use it if you want to use pretrained encoder when fine-tuning on data in another language, or when you'd need
- model to learn capitalization, punctuation and/or special characters.
+ Changes vocabulary used during AED decoding process. Use this method when fine-tuning on
+ from pre-trained model. This method changes only decoder and leaves encoder and pre-processing
+ modules unchanged. For example, you would use it if you want to use pretrained encoder when
+ fine-tuning on data in another language, or when you'd need model to learn capitalization,
+ punctuation and/or special characters.
Args:
- new_tokenizer_dir: Directory path to tokenizer or a config for a new tokenizer (if the tokenizer type is `agg`)
+ new_tokenizer_dir: Directory path to tokenizer or a config for a new tokenizer
+ (if the tokenizer type is `agg`)
new_tokenizer_type: Type of tokenizer. Can be either `agg`, `bpe` or `wpe`.
decoding_cfg: A config for the decoding, which is optional. If the decoding type
needs to be changed (from say Greedy to Beam decoding etc), the config can be passed here.
@@ -289,7 +295,8 @@ def change_vocabulary(
new_tokenizer_cfg = new_tokenizer_dir
else:
raise ValueError(
- f'New tokenizer dir should be a string unless the tokenizer is `agg`, but this tokenizer type is: {new_tokenizer_type}'
+ f'New tokenizer dir should be a string unless the tokenizer is `agg`, but this\
+ tokenizer type is: {new_tokenizer_type}'
)
else:
new_tokenizer_cfg = None
@@ -455,13 +462,15 @@ def transcribe(
channel_selector: Optional[ChannelSelectorType] = None,
augmentor: DictConfig = None,
verbose: bool = True,
+ timestamps: Optional[bool] = None,
override_config: Optional[MultiTaskTranscriptionConfig] = None,
**prompt,
) -> Union[List[str], List[Hypothesis]]:
"""
Uses greedy decoding to transcribe audio files. Use this method for debugging and prototyping.
Args:
- audio: (a single or list) of paths to audio files or a np.ndarray/tensor audio array or path to a manifest file.
+ audio: (a single or list) of paths to audio files or a np.ndarray/tensor audio array or path
+ to a manifest file.
Can also be a dataloader object that provides values that can be consumed by the model.
Recommended length per file is between 5 and 25 seconds. \
But it is possible to pass a few hours long file if enough GPU memory is available.
@@ -470,15 +479,30 @@ def transcribe(
return_hypotheses: (bool) Either return hypotheses or text
With hypotheses can do some postprocessing like getting timestamp or rescoring
num_workers: (int) number of workers for DataLoader
- channel_selector (int | Iterable[int] | str): select a single channel or a subset of channels from multi-channel audio. If set to `'average'`, it performs averaging across channels. Disabled if set to `None`. Defaults to `None`.
+ channel_selector (int | Iterable[int] | str): select a single channel or a subset of channels
+ from multi-channel audio. If set to `'average'`, it performs averaging across channels.
+ Disabled if set to `None`. Defaults to `None`.
augmentor: (DictConfig): Augment audio samples during transcription if augmentor is applied.
+ timestamps: Optional(Bool): timestamps will be returned if set to True as part of hypothesis
+ object (output.timestep['segment']/output.timestep['word']). Refer to `Hypothesis` class
+ for more details. Default is None and would retain the previous state set by using
+ self.change_decoding_strategy().
+ Note: Currently its not supported for AED models.
verbose: (bool) whether to display tqdm progress bar
- override_config: (Optional[MultiTaskTranscriptionConfig]) A config to override the default config.
- **prompt: Optional input to construct the prompts for the model. Accepted formats are: 1) legacy Canary-1B API source_lang=, target_lang=, etc. 2) explicit single-turn role=, slots={: , ...} 3) explicit multi-turn: turns=[{"role": , "slots": {: , ...}}]
+ override_config: (Optional[MultiTaskTranscriptionConfig]) A config to override the
+ default config.
+ **prompt: Optional input to construct the prompts for the model. Accepted formats are:
+ 1) legacy Canary-1B API source_lang=, target_lang=, etc.
+ 2) explicit single-turn role=, slots={: , ...}
+ 3) explicit multi-turn: turns=[{"role": , "slots": {: , ...}}]
Returns:
- A list of transcriptions (or raw log probabilities if logprobs is True) in the same order as paths2audio_files
+ A list of transcriptions (or raw log probabilities if logprobs is True) in the same order
+ as paths2audio_files
"""
+ if timestamps:
+ raise NotImplementedError("Computing timestamps are not supported for this model yet.")
+
if override_config is None:
trcfg = MultiTaskTranscriptionConfig(
batch_size=batch_size,
@@ -886,6 +910,10 @@ def _transcribe_forward(
decoder_input_ids=decoder_input_ids,
)
+ @deprecated(
+ explanation='The return type of args will be updated in the upcoming release to ensure a consistent \
+ output format across all decoder types, such that a Hypothesis object is always returned.'
+ )
def _transcribe_output_processing(self, outputs, trcfg: MultiTaskTranscriptionConfig) -> GenericTranscriptionType:
"""
Internal function to process the model's outputs to return the results to the user. This function is called by
diff --git a/nemo/collections/asr/models/classification_models.py b/nemo/collections/asr/models/classification_models.py
index 7b226f59e364..f84ece6d24ce 100644
--- a/nemo/collections/asr/models/classification_models.py
+++ b/nemo/collections/asr/models/classification_models.py
@@ -21,8 +21,8 @@
from typing import Any, Dict, List, Optional, Union
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, ListConfig, OmegaConf
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from torchmetrics import Accuracy
from torchmetrics.regression import MeanAbsoluteError, MeanSquaredError
@@ -39,6 +39,7 @@
from nemo.core.neural_types import *
from nemo.utils import logging, model_utils
from nemo.utils.cast_utils import cast_all
+from nemo.utils.decorators import deprecated
__all__ = ['EncDecClassificationModel', 'EncDecRegressionModel']
@@ -483,6 +484,7 @@ def get_transcribe_config(cls) -> ClassificationInferConfig:
return ClassificationInferConfig()
+@deprecated(explanation='EncDecClassificationModel will be merged with EncDecSpeakerLabelModel class.')
class EncDecClassificationModel(_EncDecBaseModel):
"""Encoder decoder Classification models."""
diff --git a/nemo/collections/asr/models/clustering_diarizer.py b/nemo/collections/asr/models/clustering_diarizer.py
index ddcc269bedcc..1f03cec59af7 100644
--- a/nemo/collections/asr/models/clustering_diarizer.py
+++ b/nemo/collections/asr/models/clustering_diarizer.py
@@ -22,8 +22,8 @@
from typing import Any, List, Optional, Union
import torch
+from lightning.pytorch.utilities import rank_zero_only
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning.utilities import rank_zero_only
from tqdm import tqdm
from nemo.collections.asr.metrics.der import score_labels
@@ -49,7 +49,6 @@
from nemo.core.classes import Model
from nemo.utils import logging, model_utils
-
__all__ = ['ClusteringDiarizer']
_MODEL_CONFIG_YAML = "model_config.yaml"
diff --git a/nemo/collections/asr/models/confidence_ensemble.py b/nemo/collections/asr/models/confidence_ensemble.py
index 9ae3bc3fbb5d..932d221be0f8 100644
--- a/nemo/collections/asr/models/confidence_ensemble.py
+++ b/nemo/collections/asr/models/confidence_ensemble.py
@@ -18,8 +18,8 @@
import joblib
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, open_dict
-from pytorch_lightning import Trainer
from nemo.collections.asr.models.asr_model import ASRModel
from nemo.collections.asr.models.hybrid_rnnt_ctc_models import EncDecHybridRNNTCTCModel
@@ -33,6 +33,7 @@
from nemo.collections.asr.parts.utils.rnnt_utils import Hypothesis
from nemo.core.classes import ModelPT
from nemo.utils import model_utils
+from nemo.utils.decorators import deprecated
# frozen is required to allow hashing of this class and use it
@@ -151,6 +152,7 @@ def compute_confidence(hypothesis: Hypothesis, confidence_cfg: ConfidenceConfig)
return conf_value
+@deprecated(version='v2.1.0')
class ConfidenceEnsembleModel(ModelPT):
"""Implementation of the confidence ensemble model.
diff --git a/nemo/collections/asr/models/ctc_bpe_models.py b/nemo/collections/asr/models/ctc_bpe_models.py
index 2e313ce3c928..79c22794de01 100644
--- a/nemo/collections/asr/models/ctc_bpe_models.py
+++ b/nemo/collections/asr/models/ctc_bpe_models.py
@@ -209,12 +209,14 @@ def change_vocabulary(
"""
Changes vocabulary of the tokenizer used during CTC decoding process.
Use this method when fine-tuning on from pre-trained model.
- This method changes only decoder and leaves encoder and pre-processing modules unchanged. For example, you would
- use it if you want to use pretrained encoder when fine-tuning on a data in another language, or when you'd need
- model to learn capitalization, punctuation and/or special characters.
+ This method changes only decoder and leaves encoder and pre-processing modules unchanged.
+ For example, you would use it if you want to use pretrained encoder when fine-tuning on a
+ data in another language, or when you'd need model to learn capitalization, punctuation
+ and/or special characters.
Args:
- new_tokenizer_dir: Directory path to tokenizer or a config for a new tokenizer (if the tokenizer type is `agg`)
+ new_tokenizer_dir: Directory path to tokenizer or a config for a new tokenizer
+ (if the tokenizer type is `agg`)
new_tokenizer_type: Either `agg`, `bpe` or `wpe`. `bpe` is used for SentencePiece tokenizers,
whereas `wpe` is used for `BertTokenizer`.
new_tokenizer_cfg: A config for the new tokenizer. if provided, pre-empts the dir and type
@@ -227,7 +229,8 @@ def change_vocabulary(
new_tokenizer_cfg = new_tokenizer_dir
else:
raise ValueError(
- f'New tokenizer dir should be a string unless the tokenizer is `agg`, but this tokenizer type is: {new_tokenizer_type}'
+ f'New tokenizer dir should be a string unless the tokenizer is `agg`, but this tokenizer \
+ type is: {new_tokenizer_type}'
)
else:
new_tokenizer_cfg = None
@@ -307,13 +310,14 @@ def change_vocabulary(
logging.info(f"Changed tokenizer to {self.decoder.vocabulary} vocabulary.")
- def change_decoding_strategy(self, decoding_cfg: DictConfig):
+ def change_decoding_strategy(self, decoding_cfg: DictConfig, verbose: bool = True):
"""
Changes decoding strategy used during CTC decoding process.
Args:
decoding_cfg: A config for the decoder, which is optional. If the decoding type
needs to be changed (from say Greedy to Beam decoding etc), the config can be passed here.
+ verbose: Whether to print the new config or not.
"""
if decoding_cfg is None:
# Assume same decoding config as before
@@ -343,7 +347,8 @@ def change_decoding_strategy(self, decoding_cfg: DictConfig):
with open_dict(self.cfg.decoding):
self.cfg.decoding = decoding_cfg
- logging.info(f"Changed decoding strategy to \n{OmegaConf.to_yaml(self.cfg.decoding)}")
+ if verbose:
+ logging.info(f"Changed decoding strategy to \n{OmegaConf.to_yaml(self.cfg.decoding)}")
@classmethod
def list_available_models(cls) -> List[PretrainedModelInfo]:
@@ -378,7 +383,7 @@ def list_available_models(cls) -> List[PretrainedModelInfo]:
model = PretrainedModelInfo(
pretrained_model_name="stt_en_citrinet_256_gamma_0_25",
- description="For details about this model, please visit https://ngc.nvidia.com/catalog/models/nvidia:nemo:stt_en_citrinet_256_gamma_0_25",
+ description="For details about this model, please visit https://ngc.nvidia.com/catalog/models/nvidia:\nemo:stt_en_citrinet_256_gamma_0_25",
location="https://api.ngc.nvidia.com/v2/models/nvidia/nemo/stt_en_citrinet_256_gamma_0_25/versions/1.0.0/files/stt_en_citrinet_256_gamma_0_25.nemo",
)
results.append(model)
diff --git a/nemo/collections/asr/models/ctc_models.py b/nemo/collections/asr/models/ctc_models.py
index 4976c09b44c1..76dcd13cca50 100644
--- a/nemo/collections/asr/models/ctc_models.py
+++ b/nemo/collections/asr/models/ctc_models.py
@@ -12,18 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
-import json
import os
-import tempfile
from math import ceil
from typing import Any, Dict, List, Optional, Union
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
-from tqdm.auto import tqdm
from nemo.collections.asr.data import audio_to_text_dataset
from nemo.collections.asr.data.audio_to_text import _AudioTextDataset
@@ -37,12 +34,14 @@
from nemo.collections.asr.parts.preprocessing.segment import ChannelSelectorType
from nemo.collections.asr.parts.submodules.ctc_decoding import CTCDecoding, CTCDecodingConfig
from nemo.collections.asr.parts.utils.asr_batching import get_semi_sorted_batch_sampler
+from nemo.collections.asr.parts.utils.transcribe_utils import process_timestamp_outputs
from nemo.collections.common.data.lhotse import get_lhotse_dataloader_from_config
from nemo.collections.common.parts.preprocessing.parsers import make_parser
from nemo.core.classes.common import PretrainedModelInfo, typecheck
from nemo.core.classes.mixins import AccessMixin
from nemo.core.neural_types import AudioSignal, LabelsType, LengthsType, LogprobsType, NeuralType, SpectrogramType
from nemo.utils import logging
+from nemo.utils.decorators import deprecated
__all__ = ['EncDecCTCModel']
@@ -127,13 +126,15 @@ def transcribe(
channel_selector: Optional[ChannelSelectorType] = None,
augmentor: DictConfig = None,
verbose: bool = True,
+ timestamps: Optional[bool] = None,
override_config: Optional[TranscribeConfig] = None,
) -> TranscriptionReturnType:
"""
Uses greedy decoding to transcribe audio files. Use this method for debugging and prototyping.
Args:
- audio: (a single or list) of paths to audio files or a np.ndarray/tensor audio array or path to a manifest file.
+ audio: (a single or list) of paths to audio files or a np.ndarray/tensor audio array or
+ path to a manifest file.
Can also be a dataloader object that provides values that can be consumed by the model.
Recommended length per file is between 5 and 25 seconds. \
But it is possible to pass a few hours long file if enough GPU memory is available.
@@ -142,16 +143,42 @@ def transcribe(
return_hypotheses: (bool) Either return hypotheses or text
With hypotheses can do some postprocessing like getting timestamp or rescoring
num_workers: (int) number of workers for DataLoader
- channel_selector (int | Iterable[int] | str): select a single channel or a subset of channels from multi-channel audio. If set to `'average'`, it performs averaging across channels. Disabled if set to `None`. Defaults to `None`.
+ channel_selector (int | Iterable[int] | str): select a single channel or a subset of channels
+ from multi-channel audio. If set to `'average'`, it performs averaging across channels.
+ Disabled if set to `None`. Defaults to `None`.
augmentor: (DictConfig): Augment audio samples during transcription if augmentor is applied.
+ timestamps: Optional(Bool): timestamps will be returned if set to True as part of hypothesis
+ object (output.timestep['segment']/output.timestep['word']). Refer to `Hypothesis` class
+ for more details. Default is None and would retain the previous state set by
+ using self.change_decoding_strategy().
verbose: (bool) whether to display tqdm progress bar
override_config: (Optional[TranscribeConfig]) override transcription config pre-defined by the user.
**Note**: All other arguments in the function will be ignored if override_config is passed.
You should call this argument as `model.transcribe(audio, override_config=TranscribeConfig(...))`.
Returns:
- A list of transcriptions (or raw log probabilities if logprobs is True) in the same order as paths2audio_files
+ A list of transcriptions (or raw log probabilities if logprobs is True) in the same order as
+ paths2audio_files
"""
+ timestamps = timestamps or (override_config.timestamps if override_config is not None else None)
+ if timestamps is not None:
+ # else retain the decoder state (users can set it using change_decoding_strategy)
+ if timestamps or (override_config is not None and override_config.timestamps):
+ logging.info(
+ "Timestamps requested, setting decoding timestamps to True. Capture them in Hypothesis object, \
+ with output[idx].timestep['word'/'segment'/'char']"
+ )
+ return_hypotheses = True
+ with open_dict(self.cfg.decoding):
+ self.cfg.decoding.compute_timestamps = True
+ self.cfg.decoding.preserve_alignments = True
+ self.change_decoding_strategy(self.cfg.decoding, verbose=False)
+ else: # This is done to ensure the state is preserved when decoding_strategy is set outside
+ with open_dict(self.cfg.decoding):
+ self.cfg.decoding.compute_timestamps = self.cfg.decoding.get('compute_timestamps', False)
+ self.cfg.decoding.preserve_alignments = self.cfg.decoding.get('preserve_alignments', False)
+ self.change_decoding_strategy(self.cfg.decoding, verbose=False)
+
return super().transcribe(
audio=audio,
batch_size=batch_size,
@@ -160,6 +187,7 @@ def transcribe(
channel_selector=channel_selector,
augmentor=augmentor,
verbose=verbose,
+ timestamps=timestamps,
override_config=override_config,
)
@@ -234,13 +262,14 @@ def change_vocabulary(self, new_vocabulary: List[str], decoding_cfg: Optional[Di
logging.info(f"Changed decoder to output to {self.decoder.vocabulary} vocabulary.")
- def change_decoding_strategy(self, decoding_cfg: DictConfig):
+ def change_decoding_strategy(self, decoding_cfg: DictConfig, verbose: bool = True):
"""
Changes decoding strategy used during CTC decoding process.
Args:
decoding_cfg: A config for the decoder, which is optional. If the decoding type
needs to be changed (from say Greedy to Beam decoding etc), the config can be passed here.
+ verbose: (bool) whether to display logging information
"""
if decoding_cfg is None:
# Assume same decoding config as before
@@ -269,7 +298,8 @@ def change_decoding_strategy(self, decoding_cfg: DictConfig):
with open_dict(self.cfg.decoding):
self.cfg.decoding = decoding_cfg
- logging.info(f"Changed decoding strategy to \n{OmegaConf.to_yaml(self.cfg.decoding)}")
+ if verbose:
+ logging.info(f"Changed decoding strategy to \n{OmegaConf.to_yaml(self.cfg.decoding)}")
def _setup_dataloader_from_config(self, config: Optional[Dict]):
# Automatically inject args from model config to dataloader config
@@ -668,6 +698,10 @@ def _transcribe_forward(self, batch: Any, trcfg: TranscribeConfig):
del greedy_predictions
return output
+ @deprecated(
+ explanation='The return type of args will be updated in the upcoming release to ensure a consistent output \
+ format across all decoder types, such that a Hypothesis object is always returned.'
+ )
def _transcribe_output_processing(self, outputs, trcfg: TranscribeConfig) -> GenericTranscriptionType:
logits = outputs.pop('logits')
logits_len = outputs.pop('logits_len')
@@ -701,6 +735,14 @@ def _transcribe_output_processing(self, outputs, trcfg: TranscribeConfig) -> Gen
# cleanup memory
del logits, logits_len
+ if trcfg.timestamps:
+ current_hypotheses = process_timestamp_outputs(
+ current_hypotheses, self.encoder.subsampling_factor, self.cfg['preprocessor']['window_stride']
+ )
+ all_hyp = process_timestamp_outputs(
+ all_hyp, self.encoder.subsampling_factor, self.cfg['preprocessor']['window_stride']
+ )
+
hypotheses = []
if all_hyp is None:
hypotheses += current_hypotheses
@@ -763,7 +805,11 @@ def list_available_models(cls) -> List[PretrainedModelInfo]:
model = PretrainedModelInfo(
pretrained_model_name="QuartzNet15x5Base-En",
- description="QuartzNet15x5 model trained on six datasets: LibriSpeech, Mozilla Common Voice (validated clips from en_1488h_2019-12-10), WSJ, Fisher, Switchboard, and NSC Singapore English. It was trained with Apex/Amp optimization level O1 for 600 epochs. The model achieves a WER of 3.79% on LibriSpeech dev-clean, and a WER of 10.05% on dev-other. Please visit https://ngc.nvidia.com/catalog/models/nvidia:nemospeechmodels for further details.",
+ description="QuartzNet15x5 model trained on six datasets: LibriSpeech, Mozilla Common Voice \
+ (validated clips from en_1488h_2019-12-10), WSJ, Fisher, Switchboard, and NSC Singapore English. \
+ It was trained with Apex/Amp optimization level O1 for 600 epochs. The model achieves a WER of \
+ 3.79% on LibriSpeech dev-clean, and a WER of 10.05% on dev-other. Please visit \
+ https://ngc.nvidia.com/catalog/models/nvidia:nemospeechmodels for further details.",
location="https://api.ngc.nvidia.com/v2/models/nvidia/nemospeechmodels/versions/1.0.0a5/files/QuartzNet15x5Base-En.nemo",
)
results.append(model)
diff --git a/nemo/collections/asr/models/hybrid_asr_tts_models.py b/nemo/collections/asr/models/hybrid_asr_tts_models.py
index 628395e04f94..89a7e1289675 100644
--- a/nemo/collections/asr/models/hybrid_asr_tts_models.py
+++ b/nemo/collections/asr/models/hybrid_asr_tts_models.py
@@ -19,8 +19,8 @@
from typing import Any, Dict, List, Optional, Tuple, Union, cast
import torch
+from lightning.pytorch import Trainer
from omegaconf import MISSING, DictConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from torch.nn.utils.rnn import pad_sequence
from nemo.collections.asr.data.audio_to_text_dali import DALIOutputs
@@ -324,7 +324,9 @@ def __setattr__(self, name, value):
return super().__setattr__(name, value)
def setup_optimization(
- self, optim_config: Optional[Union[DictConfig, Dict]] = None, optim_kwargs: Optional[Dict[str, Any]] = None,
+ self,
+ optim_config: Optional[Union[DictConfig, Dict]] = None,
+ optim_kwargs: Optional[Dict[str, Any]] = None,
):
"""
Setup optimizer and scheduler. Ensure tts model is frozen.
@@ -430,7 +432,8 @@ def _get_batch_spect(self, batch: Union[TextToTextBatch, TextOrAudioToTextBatch,
elif isinstance(batch, TextOrAudioToTextBatch):
tts_spectrogram, tts_spectrogram_len = self._get_tts_spectrogram(batch.tts_texts, batch.speakers)
asr_spectrogram, asr_spectrogram_len = self.asr_model.preprocessor(
- input_signal=batch.audio_signals, length=batch.audio_signal_lengths,
+ input_signal=batch.audio_signals,
+ length=batch.audio_signal_lengths,
)
spectrogram = pad_sequence(
diff --git a/nemo/collections/asr/models/hybrid_rnnt_ctc_bpe_models.py b/nemo/collections/asr/models/hybrid_rnnt_ctc_bpe_models.py
index 089c34d98884..7e8720ee3ad8 100644
--- a/nemo/collections/asr/models/hybrid_rnnt_ctc_bpe_models.py
+++ b/nemo/collections/asr/models/hybrid_rnnt_ctc_bpe_models.py
@@ -17,8 +17,8 @@
from typing import Dict, List, Optional, Union
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, ListConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from nemo.collections.asr.data import audio_to_text_dataset
from nemo.collections.asr.data.audio_to_text import _AudioTextDataset
@@ -253,10 +253,11 @@ def change_vocabulary(
ctc_decoding_cfg: Optional[DictConfig] = None,
):
"""
- Changes vocabulary used during RNNT decoding process. Use this method when fine-tuning on from pre-trained model.
- This method changes only decoder and leaves encoder and pre-processing modules unchanged. For example, you would
- use it if you want to use pretrained encoder when fine-tuning on data in another language, or when you'd need
- model to learn capitalization, punctuation and/or special characters.
+ Changes vocabulary used during RNNT decoding process. Use this method when fine-tuning on
+ from pre-trained model. This method changes only decoder and leaves encoder and pre-processing
+ modules unchanged. For example, you would use it if you want to use pretrained encoder when
+ fine-tuning on data in another language, or when you'd need model to learn capitalization,
+ punctuation and/or special characters.
Args:
new_tokenizer_dir: Directory path to tokenizer or a config for a new tokenizer (if the tokenizer type is `agg`)
@@ -415,7 +416,9 @@ def change_vocabulary(
logging.info(f"Changed tokenizer of the CTC decoder to {self.ctc_decoder.vocabulary} vocabulary.")
- def change_decoding_strategy(self, decoding_cfg: DictConfig = None, decoder_type: str = None):
+ def change_decoding_strategy(
+ self, decoding_cfg: DictConfig = None, decoder_type: str = None, verbose: bool = True
+ ):
"""
Changes decoding strategy used during RNNT decoding process.
Args:
@@ -424,6 +427,7 @@ def change_decoding_strategy(self, decoding_cfg: DictConfig = None, decoder_type
decoder_type: (str) Can be set to 'rnnt' or 'ctc' to switch between appropriate decoder in a
model having both RNN-T and CTC decoders. Defaults to None, in which case RNN-T decoder is
used. If set to 'ctc', it raises error if 'ctc_decoder' is not an attribute of the model.
+ verbose: bool whether to display change of decoder config or not.
"""
if decoder_type is None or decoder_type == 'rnnt':
if decoding_cfg is None:
@@ -466,7 +470,10 @@ def change_decoding_strategy(self, decoding_cfg: DictConfig = None, decoder_type
self.cfg.decoding = decoding_cfg
self.cur_decoder = "rnnt"
- logging.info(f"Changed decoding strategy of the RNNT decoder to \n{OmegaConf.to_yaml(self.cfg.decoding)}")
+ if verbose:
+ logging.info(
+ f"Changed decoding strategy of the RNNT decoder to \n{OmegaConf.to_yaml(self.cfg.decoding)}"
+ )
elif decoder_type == 'ctc':
if not hasattr(self, 'ctc_decoding'):
@@ -497,9 +504,10 @@ def change_decoding_strategy(self, decoding_cfg: DictConfig = None, decoder_type
self.cfg.aux_ctc.decoding = decoding_cfg
self.cur_decoder = "ctc"
- logging.info(
- f"Changed decoding strategy of the CTC decoder to \n{OmegaConf.to_yaml(self.cfg.aux_ctc.decoding)}"
- )
+ if verbose:
+ logging.info(
+ f"Changed decoding strategy of the CTC decoder to \n{OmegaConf.to_yaml(self.cfg.aux_ctc.decoding)}"
+ )
else:
raise ValueError(f"decoder_type={decoder_type} is not supported. Supported values: [ctc,rnnt]")
diff --git a/nemo/collections/asr/models/hybrid_rnnt_ctc_models.py b/nemo/collections/asr/models/hybrid_rnnt_ctc_models.py
index c14265325985..34dd9aae5711 100644
--- a/nemo/collections/asr/models/hybrid_rnnt_ctc_models.py
+++ b/nemo/collections/asr/models/hybrid_rnnt_ctc_models.py
@@ -19,8 +19,8 @@
from typing import Any, List, Optional, Tuple
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from tqdm.auto import tqdm
from nemo.collections.asr.data.audio_to_text_dali import DALIOutputs
@@ -31,6 +31,7 @@
from nemo.collections.asr.parts.mixins.transcription import TranscriptionReturnType
from nemo.collections.asr.parts.preprocessing.segment import ChannelSelectorType
from nemo.collections.asr.parts.submodules.ctc_decoding import CTCDecoding, CTCDecodingConfig
+from nemo.collections.asr.parts.utils.transcribe_utils import process_timestamp_outputs
from nemo.core.classes.common import PretrainedModelInfo
from nemo.core.classes.mixins import AccessMixin
from nemo.utils import logging, model_utils
@@ -104,6 +105,7 @@ def transcribe(
channel_selector: Optional[ChannelSelectorType] = None,
augmentor: DictConfig = None,
verbose: bool = True,
+ timestamps: bool = False,
override_config: Optional[TranscribeConfig] = None,
) -> TranscriptionReturnType:
"""
@@ -120,8 +122,13 @@ def transcribe(
return_hypotheses: (bool) Either return hypotheses or text
With hypotheses can do some postprocessing like getting timestamp or rescoring
num_workers: (int) number of workers for DataLoader
- channel_selector (int | Iterable[int] | str): select a single channel or a subset of channels from multi-channel audio. If set to `'average'`, it performs averaging across channels. Disabled if set to `None`. Defaults to `None`. Uses zero-based indexing.
+ channel_selector (int | Iterable[int] | str): select a single channel or a subset of
+ channels from multi-channel audio. If set to `'average'`, it performs averaging across channels.
+ Disabled if set to `None`. Defaults to `None`. Uses zero-based indexing.
augmentor: (DictConfig): Augment audio samples during transcription if augmentor is applied.
+ timestamps: Optional(Bool): timestamps will be returned if set to True as part of hypothesis object
+ (output.timestep['segment']/output.timestep['word']). Refer to `Hypothesis` class for more details.
+ Default is None and would retain the previous state set by using self.change_decoding_strategy().
verbose: (bool) whether to display tqdm progress bar
logprobs: (bool) whether to return ctc logits insted of hypotheses
@@ -130,10 +137,29 @@ def transcribe(
* A list of greedy transcript texts / Hypothesis
* An optional list of beam search transcript texts / Hypothesis / NBestHypothesis.
"""
- if self.cur_decoder not in ["ctc", "rnnt"]:
- raise ValueError(
- f"{self.cur_decoder} is not supported for cur_decoder. Supported values are ['ctc', 'rnnt']"
- )
+
+ if timestamps is not None:
+ if self.cur_decoder not in ["ctc", "rnnt"]:
+ raise ValueError(
+ f"{self.cur_decoder} is not supported for cur_decoder. Supported values are ['ctc', 'rnnt']"
+ )
+ decoding_cfg = self.cfg.aux_ctc.decoding if self.cur_decoder == "ctc" else self.cfg.decoding
+ if timestamps or (override_config is not None and override_config.timestamps):
+ logging.info(
+ "Timestamps requested, setting decoding timestamps to True. Capture them in Hypothesis object, \
+ with output[idx].timestep['word'/'segment'/'char']"
+ )
+ return_hypotheses = True
+ with open_dict(decoding_cfg):
+ decoding_cfg.compute_timestamps = True
+ decoding_cfg.preserve_alignments = True
+ self.change_decoding_strategy(decoding_cfg, decoder_type=self.cur_decoder, verbose=False)
+ else:
+ return_hypotheses = False
+ with open_dict(decoding_cfg):
+ decoding_cfg.compute_timestamps = False
+ decoding_cfg.preserve_alignments = False
+ self.change_decoding_strategy(decoding_cfg, decoder_type=self.cur_decoder, verbose=False)
return super().transcribe(
audio=audio,
@@ -144,6 +170,7 @@ def transcribe(
channel_selector=channel_selector,
augmentor=augmentor,
verbose=verbose,
+ timestamps=timestamps,
override_config=override_config,
)
@@ -201,6 +228,14 @@ def _transcribe_output_processing(
# for logit, elen in zip(logits, encoded_len):
# logits_list.append(logit[:elen])
+ if trcfg.timestamps:
+ best_hyp = process_timestamp_outputs(
+ best_hyp, self.encoder.subsampling_factor, self.cfg['preprocessor']['window_stride']
+ )
+ all_hyp = process_timestamp_outputs(
+ all_hyp, self.encoder.subsampling_factor, self.cfg['preprocessor']['window_stride']
+ )
+
del logits, encoded_len
hypotheses = []
@@ -221,10 +256,11 @@ def change_vocabulary(
ctc_decoding_cfg: Optional[DictConfig] = None,
):
"""
- Changes vocabulary used during RNNT decoding process. Use this method when fine-tuning a pre-trained model.
- This method changes only decoder and leaves encoder and pre-processing modules unchanged. For example, you would
- use it if you want to use pretrained encoder when fine-tuning on data in another language, or when you'd need
- model to learn capitalization, punctuation and/or special characters.
+ Changes vocabulary used during RNNT decoding process. Use this method when fine-tuning a
+ pre-trained model. This method changes only decoder and leaves encoder and pre-processing
+ modules unchanged. For example, you would use it if you want to use pretrained encoder
+ when fine-tuning on data in another language, or when you'd need model to learn capitalization,
+ punctuation and/or special characters.
Args:
new_vocabulary: list with new vocabulary. Must contain at least 2 elements. Typically, \
@@ -295,7 +331,9 @@ def change_vocabulary(
logging.info(f"Changed the tokenizer of the CTC decoder to {self.ctc_decoder.vocabulary} vocabulary.")
- def change_decoding_strategy(self, decoding_cfg: DictConfig = None, decoder_type: str = None):
+ def change_decoding_strategy(
+ self, decoding_cfg: DictConfig = None, decoder_type: str = None, verbose: bool = True
+ ):
"""
Changes decoding strategy used during RNNT decoding process.
@@ -305,10 +343,11 @@ def change_decoding_strategy(self, decoding_cfg: DictConfig = None, decoder_type
decoder_type: (str) Can be set to 'rnnt' or 'ctc' to switch between appropriate decoder in a
model having RNN-T and CTC decoders. Defaults to None, in which case RNN-T decoder is
used. If set to 'ctc', it raises error if 'ctc_decoder' is not an attribute of the model.
+ verbose: (bool) whether to display logging information
"""
if decoder_type is None or decoder_type == 'rnnt':
self.cur_decoder = "rnnt"
- return super().change_decoding_strategy(decoding_cfg=decoding_cfg)
+ return super().change_decoding_strategy(decoding_cfg=decoding_cfg, verbose=verbose)
assert decoder_type == 'ctc' and hasattr(self, 'ctc_decoder')
if decoding_cfg is None:
@@ -337,7 +376,10 @@ def change_decoding_strategy(self, decoding_cfg: DictConfig = None, decoder_type
self.cfg.aux_ctc.decoding = decoding_cfg
self.cur_decoder = "ctc"
- logging.info(f"Changed decoding strategy to \n{OmegaConf.to_yaml(self.cfg.aux_ctc.decoding)}")
+ if verbose:
+ logging.info(f"Changed decoding strategy to \n{OmegaConf.to_yaml(self.cfg.aux_ctc.decoding)}")
+
+ return None
# PTL-specific methods
def training_step(self, batch, batch_nb):
diff --git a/nemo/collections/asr/models/k2_sequence_models.py b/nemo/collections/asr/models/k2_sequence_models.py
index 087e9e41b85d..b60d08afe635 100644
--- a/nemo/collections/asr/models/k2_sequence_models.py
+++ b/nemo/collections/asr/models/k2_sequence_models.py
@@ -14,8 +14,8 @@
from typing import List, Optional
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.asr.models.ctc_bpe_models import EncDecCTCModelBPE
from nemo.collections.asr.models.ctc_models import EncDecCTCModel
@@ -76,7 +76,11 @@ def change_vocabulary(self, new_vocabulary: List[str]):
@typecheck()
def forward(
- self, input_signal=None, input_signal_length=None, processed_signal=None, processed_signal_length=None,
+ self,
+ input_signal=None,
+ input_signal_length=None,
+ processed_signal=None,
+ processed_signal_length=None,
):
"""
Forward pass of the model.
@@ -159,7 +163,11 @@ def change_vocabulary(self, new_tokenizer_dir: str, new_tokenizer_type: str):
@typecheck()
def forward(
- self, input_signal=None, input_signal_length=None, processed_signal=None, processed_signal_length=None,
+ self,
+ input_signal=None,
+ input_signal_length=None,
+ processed_signal=None,
+ processed_signal_length=None,
):
"""
Forward pass of the model.
diff --git a/nemo/collections/asr/models/label_models.py b/nemo/collections/asr/models/label_models.py
index 08c304e4c52c..37391879547b 100644
--- a/nemo/collections/asr/models/label_models.py
+++ b/nemo/collections/asr/models/label_models.py
@@ -24,8 +24,8 @@
import soundfile as sf
import torch
from hydra.utils import instantiate
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from sklearn.metrics import roc_curve
from torchmetrics import Accuracy
from tqdm import tqdm
diff --git a/nemo/collections/asr/models/msdd_models.py b/nemo/collections/asr/models/msdd_models.py
index c88275dcacd3..d30411f01bcc 100644
--- a/nemo/collections/asr/models/msdd_models.py
+++ b/nemo/collections/asr/models/msdd_models.py
@@ -25,11 +25,11 @@
import numpy as np
import torch
from hydra.utils import instantiate
+from lightning.pytorch import LightningModule, Trainer
+from lightning.pytorch.utilities import rank_zero_only
from omegaconf import DictConfig, open_dict
from pyannote.core import Annotation
from pyannote.metrics.diarization import DiarizationErrorRate
-from pytorch_lightning import LightningModule, Trainer
-from pytorch_lightning.utilities import rank_zero_only
from tqdm import tqdm
from nemo.collections.asr.data.audio_to_diar_label import AudioToSpeechMSDDInferDataset, AudioToSpeechMSDDTrainDataset
diff --git a/nemo/collections/asr/models/rnnt_bpe_models.py b/nemo/collections/asr/models/rnnt_bpe_models.py
index 9e09acd21a5d..c92bcfaaef7a 100644
--- a/nemo/collections/asr/models/rnnt_bpe_models.py
+++ b/nemo/collections/asr/models/rnnt_bpe_models.py
@@ -17,8 +17,8 @@
from typing import Dict, List, Optional, Union
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, ListConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from nemo.collections.asr.data import audio_to_text_dataset
from nemo.collections.asr.data.audio_to_text import _AudioTextDataset
@@ -344,13 +344,15 @@ def change_vocabulary(
decoding_cfg: Optional[DictConfig] = None,
):
"""
- Changes vocabulary used during RNNT decoding process. Use this method when fine-tuning on from pre-trained model.
- This method changes only decoder and leaves encoder and pre-processing modules unchanged. For example, you would
- use it if you want to use pretrained encoder when fine-tuning on data in another language, or when you'd need
- model to learn capitalization, punctuation and/or special characters.
+ Changes vocabulary used during RNNT decoding process. Use this method when fine-tuning
+ on from pre-trained model. This method changes only decoder and leaves encoder and pre-processing
+ modules unchanged. For example, you would use it if you want to use pretrained encoder when fine-tuning
+ on data in another language, or when you'd need model to learn capitalization, punctuation
+ and/or special characters.
Args:
- new_tokenizer_dir: Directory path to tokenizer or a config for a new tokenizer (if the tokenizer type is `agg`)
+ new_tokenizer_dir: Directory path to tokenizer or a config for a new tokenizer
+ (if the tokenizer type is `agg`)
new_tokenizer_type: Type of tokenizer. Can be either `agg`, `bpe` or `wpe`.
decoding_cfg: A config for the decoder, which is optional. If the decoding type
needs to be changed (from say Greedy to Beam decoding etc), the config can be passed here.
@@ -363,7 +365,8 @@ def change_vocabulary(
new_tokenizer_cfg = new_tokenizer_dir
else:
raise ValueError(
- f'New tokenizer dir should be a string unless the tokenizer is `agg`, but this tokenizer type is: {new_tokenizer_type}'
+ f'New tokenizer dir should be a string unless the tokenizer is `agg`, but this tokenizer \
+ type is: {new_tokenizer_type}'
)
else:
new_tokenizer_cfg = None
@@ -451,13 +454,14 @@ def change_vocabulary(
logging.info(f"Changed decoder to output to {self.joint.vocabulary} vocabulary.")
- def change_decoding_strategy(self, decoding_cfg: DictConfig):
+ def change_decoding_strategy(self, decoding_cfg: DictConfig, verbose: bool = True):
"""
Changes decoding strategy used during RNNT decoding process.
Args:
decoding_cfg: A config for the decoder, which is optional. If the decoding type
needs to be changed (from say Greedy to Beam decoding etc), the config can be passed here.
+ verbose: A flag to enable/disable logging.
"""
if decoding_cfg is None:
# Assume same decoding config as before
@@ -498,7 +502,8 @@ def change_decoding_strategy(self, decoding_cfg: DictConfig):
with open_dict(self.cfg.decoding):
self.cfg.decoding = decoding_cfg
- logging.info(f"Changed decoding strategy to \n{OmegaConf.to_yaml(self.cfg.decoding)}")
+ if verbose:
+ logging.info(f"Changed decoding strategy to \n{OmegaConf.to_yaml(self.cfg.decoding)}")
def _setup_dataloader_from_config(self, config: Optional[Dict]):
if config.get("use_lhotse"):
diff --git a/nemo/collections/asr/models/rnnt_models.py b/nemo/collections/asr/models/rnnt_models.py
index 0d1fbe651dc3..e4d1abd0b50c 100644
--- a/nemo/collections/asr/models/rnnt_models.py
+++ b/nemo/collections/asr/models/rnnt_models.py
@@ -19,8 +19,8 @@
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from nemo.collections.asr.data import audio_to_text_dataset
@@ -40,12 +40,14 @@
from nemo.collections.asr.parts.preprocessing.segment import ChannelSelectorType
from nemo.collections.asr.parts.submodules.rnnt_decoding import RNNTDecoding, RNNTDecodingConfig
from nemo.collections.asr.parts.utils.asr_batching import get_semi_sorted_batch_sampler
+from nemo.collections.asr.parts.utils.transcribe_utils import process_timestamp_outputs
from nemo.collections.common.data.lhotse import get_lhotse_dataloader_from_config
from nemo.collections.common.parts.preprocessing.parsers import make_parser
from nemo.core.classes.common import PretrainedModelInfo, typecheck
from nemo.core.classes.mixins import AccessMixin
from nemo.core.neural_types import AcousticEncodedRepresentation, AudioSignal, LengthsType, NeuralType, SpectrogramType
from nemo.utils import logging
+from nemo.utils.decorators import deprecated
class EncDecRNNTModel(ASRModel, ASRModuleMixin, ExportableEncDecModel, ASRTranscriptionMixin):
@@ -246,13 +248,15 @@ def transcribe(
channel_selector: Optional[ChannelSelectorType] = None,
augmentor: DictConfig = None,
verbose: bool = True,
+ timestamps: Optional[bool] = None,
override_config: Optional[TranscribeConfig] = None,
) -> TranscriptionReturnType:
"""
Uses greedy decoding to transcribe audio files. Use this method for debugging and prototyping.
Args:
- audio: (a single or list) of paths to audio files or a np.ndarray/tensor audio array or path to a manifest file.
+ audio: (a single or list) of paths to audio files or a np.ndarray/tensor audio array or path
+ to a manifest file.
Can also be a dataloader object that provides values that can be consumed by the model.
Recommended length per file is between 5 and 25 seconds. \
But it is possible to pass a few hours long file if enough GPU memory is available.
@@ -264,9 +268,14 @@ def transcribe(
decoding. This is useful for streaming rnnt decoding. If this is not None, then the length of this
list should be equal to the length of the audio list.
num_workers: (int) number of workers for DataLoader
- channel_selector (int | Iterable[int] | str): select a single channel or a subset of channels from multi-channel audio. If set to `'average'`, it performs averaging across channels. Disabled if set to `None`. Defaults to `None`. Uses zero-based indexing.
+ channel_selector (int | Iterable[int] | str): select a single channel or a subset of channels
+ from multi-channel audio. If set to `'average'`, it performs averaging across channels.
+ Disabled if set to `None`. Defaults to `None`. Uses zero-based indexing.
augmentor: (DictConfig): Augment audio samples during transcription if augmentor is applied.
verbose: (bool) whether to display tqdm progress bar
+ timestamps: Optional(Bool): timestamps will be returned if set to True as part of hypothesis object
+ (output.timestep['segment']/output.timestep['word']). Refer to `Hypothesis` class for more details.
+ Default is None and would retain the previous state set by using self.change_decoding_strategy().
override_config: (Optional[TranscribeConfig]) override transcription config pre-defined by the user.
**Note**: All other arguments in the function will be ignored if override_config is passed.
You should call this argument as `model.transcribe(audio, override_config=TranscribeConfig(...))`.
@@ -276,6 +285,25 @@ def transcribe(
* A list of greedy transcript texts / Hypothesis
* An optional list of beam search transcript texts / Hypothesis / NBestHypothesis.
"""
+ timestamps = timestamps or (override_config.timestamps if override_config is not None else None)
+ if timestamps is not None:
+ if timestamps or (override_config is not None and override_config.timestamps):
+ logging.info(
+ "Timestamps requested, setting decoding timestamps to True. Capture them in Hypothesis object, \
+ with output[0][idx].timestep['word'/'segment'/'char']"
+ )
+ return_hypotheses = True
+ with open_dict(self.cfg.decoding):
+ self.cfg.decoding.compute_timestamps = True
+ self.cfg.decoding.preserve_alignments = True
+ self.change_decoding_strategy(self.cfg.decoding, verbose=False)
+ else:
+ return_hypotheses = False
+ with open_dict(self.cfg.decoding):
+ self.cfg.decoding.compute_timestamps = False
+ self.cfg.decoding.preserve_alignments = False
+ self.change_decoding_strategy(self.cfg.decoding, verbose=False)
+
return super().transcribe(
audio=audio,
batch_size=batch_size,
@@ -284,6 +312,7 @@ def transcribe(
channel_selector=channel_selector,
augmentor=augmentor,
verbose=verbose,
+ timestamps=timestamps,
override_config=override_config,
# Additional arguments
partial_hypothesis=partial_hypothesis,
@@ -291,10 +320,11 @@ def transcribe(
def change_vocabulary(self, new_vocabulary: List[str], decoding_cfg: Optional[DictConfig] = None):
"""
- Changes vocabulary used during RNNT decoding process. Use this method when fine-tuning a pre-trained model.
- This method changes only decoder and leaves encoder and pre-processing modules unchanged. For example, you would
- use it if you want to use pretrained encoder when fine-tuning on data in another language, or when you'd need
- model to learn capitalization, punctuation and/or special characters.
+ Changes vocabulary used during RNNT decoding process. Use this method when fine-tuning a
+ pre-trained model. This method changes only decoder and leaves encoder and pre-processing
+ modules unchanged. For example, you would use it if you want to use pretrained encoder when
+ fine-tuning on data in another language, or when you'd need model to learn capitalization,
+ punctuation and/or special characters.
Args:
new_vocabulary: list with new vocabulary. Must contain at least 2 elements. Typically, \
@@ -380,13 +410,14 @@ def change_vocabulary(self, new_vocabulary: List[str], decoding_cfg: Optional[Di
logging.info(f"Changed decoder to output to {self.joint.vocabulary} vocabulary.")
- def change_decoding_strategy(self, decoding_cfg: DictConfig):
+ def change_decoding_strategy(self, decoding_cfg: DictConfig, verbose=True):
"""
Changes decoding strategy used during RNNT decoding process.
Args:
decoding_cfg: A config for the decoder, which is optional. If the decoding type
needs to be changed (from say Greedy to Beam decoding etc), the config can be passed here.
+ verbose: (bool) whether to display logging information
"""
if decoding_cfg is None:
# Assume same decoding config as before
@@ -427,7 +458,8 @@ def change_decoding_strategy(self, decoding_cfg: DictConfig):
with open_dict(self.cfg.decoding):
self.cfg.decoding = decoding_cfg
- logging.info(f"Changed decoding strategy to \n{OmegaConf.to_yaml(self.cfg.decoding)}")
+ if verbose:
+ logging.info(f"Changed decoding strategy to \n{OmegaConf.to_yaml(self.cfg.decoding)}")
def _setup_dataloader_from_config(self, config: Optional[Dict]):
# Automatically inject args from model config to dataloader config
@@ -899,6 +931,10 @@ def _transcribe_forward(self, batch: Any, trcfg: TranscribeConfig):
output = dict(encoded=encoded, encoded_len=encoded_len)
return output
+ @deprecated(
+ explanation='The return type of args will be updated in the upcoming release to ensure a consistent \
+ output format across all decoder types, such that a "Hypothesis" object is always returned.'
+ )
def _transcribe_output_processing(
self, outputs, trcfg: TranscribeConfig
) -> Tuple[List['Hypothesis'], List['Hypothesis']]:
@@ -911,10 +947,17 @@ def _transcribe_output_processing(
return_hypotheses=trcfg.return_hypotheses,
partial_hypotheses=trcfg.partial_hypothesis,
)
-
# cleanup memory
del encoded, encoded_len
+ if trcfg.timestamps:
+ best_hyp = process_timestamp_outputs(
+ best_hyp, self.encoder.subsampling_factor, self.cfg['preprocessor']['window_stride']
+ )
+ all_hyp = process_timestamp_outputs(
+ all_hyp, self.encoder.subsampling_factor, self.cfg['preprocessor']['window_stride']
+ )
+
hypotheses = []
all_hypotheses = []
diff --git a/nemo/collections/asr/models/ssl_models.py b/nemo/collections/asr/models/ssl_models.py
index 633a00d73f5e..9150da7bf7c2 100644
--- a/nemo/collections/asr/models/ssl_models.py
+++ b/nemo/collections/asr/models/ssl_models.py
@@ -17,8 +17,8 @@
import torch
import torch.nn as nn
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.asr.data import audio_to_text_dataset, ssl_dataset
from nemo.collections.asr.data.audio_to_text_dali import DALIOutputs
diff --git a/nemo/collections/asr/models/transformer_bpe_models.py b/nemo/collections/asr/models/transformer_bpe_models.py
index 089186e142bf..8d0f2b2223a3 100644
--- a/nemo/collections/asr/models/transformer_bpe_models.py
+++ b/nemo/collections/asr/models/transformer_bpe_models.py
@@ -22,8 +22,8 @@
import editdistance
import torch
import torch.distributed as dist
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from torchmetrics.text import SacreBLEUScore
from tqdm.auto import tqdm
diff --git a/nemo/collections/asr/modules/conv_asr.py b/nemo/collections/asr/modules/conv_asr.py
index 3cb9ec13109b..e48d76a9b7a3 100644
--- a/nemo/collections/asr/modules/conv_asr.py
+++ b/nemo/collections/asr/modules/conv_asr.py
@@ -133,6 +133,7 @@ def __init__(
residual_panes = []
encoder_layers = []
self.dense_residual = False
+ self._subsampling_factor = 1
for layer_idx, lcfg in enumerate(jasper):
dense_res = []
if lcfg.get('residual_dense', False):
@@ -181,6 +182,9 @@ def __init__(
)
)
feat_in = lcfg['filters']
+ self._subsampling_factor *= (
+ int(lcfg['stride'][0]) if isinstance(lcfg['stride'], List) else int(lcfg['stride'])
+ )
self._feat_out = feat_in
@@ -199,7 +203,9 @@ def forward(self, audio_signal, length):
return s_input[-1], length
def update_max_sequence_length(self, seq_length: int, device):
- # Find global max audio length across all nodes
+ """
+ Find global max audio length across all nodes in distributed training and update the max_audio_length
+ """
if torch.distributed.is_initialized():
global_max_len = torch.tensor([seq_length], dtype=torch.float32, device=device)
@@ -229,6 +235,10 @@ def update_max_sequence_length(self, seq_length: int, device):
elif isinstance(m, SqueezeExcite):
m.set_max_len(self.max_audio_length, seq_range=self.seq_range)
+ @property
+ def subsampling_factor(self) -> int:
+ return self._subsampling_factor
+
class ParallelConvASREncoder(NeuralModule, Exportable):
"""
@@ -426,7 +436,8 @@ def __init__(self, feat_in, num_classes, init_mode="xavier_uniform", vocabulary=
if vocabulary is not None:
if num_classes != len(vocabulary):
raise ValueError(
- f"If vocabulary is specified, it's length should be equal to the num_classes. Instead got: num_classes={num_classes} and len(vocabulary)={len(vocabulary)}"
+ f"If vocabulary is specified, it's length should be equal to the num_classes. \
+ Instead got: num_classes={num_classes} and len(vocabulary)={len(vocabulary)}"
)
self.__vocabulary = vocabulary
self._feat_in = feat_in
@@ -765,8 +776,8 @@ class SpeakerDecoder(NeuralModule, Exportable):
Args:
feat_in (int): Number of channels being input to this module
num_classes (int): Number of unique speakers in dataset
- emb_sizes (list) : shapes of intermediate embedding layers (we consider speaker embbeddings from 1st of this layers)
- Defaults to [1024,1024]
+ emb_sizes (list) : shapes of intermediate embedding layers (we consider speaker embbeddings
+ from 1st of this layers). Defaults to [1024,1024]
pool_mode (str) : Pooling strategy type. options are 'xvector','tap', 'attention'
Defaults to 'xvector (mean and variance)'
tap (temporal average pooling: just mean)
diff --git a/nemo/collections/asr/parts/mixins/transcription.py b/nemo/collections/asr/parts/mixins/transcription.py
index 104e6bff81af..ac928fe99272 100644
--- a/nemo/collections/asr/parts/mixins/transcription.py
+++ b/nemo/collections/asr/parts/mixins/transcription.py
@@ -16,8 +16,7 @@
import os
import tempfile
from abc import ABC, abstractmethod
-from collections.abc import Iterable
-from dataclasses import dataclass, fields, is_dataclass
+from dataclasses import dataclass
from functools import partial
from typing import Any, Dict, List, Optional, Tuple, Union
@@ -61,6 +60,7 @@ class TranscribeConfig:
num_workers: Optional[int] = None
channel_selector: ChannelSelectorType = None
augmentor: Optional[DictConfig] = None
+ timestamps: Optional[bool] = None # returns timestamps for each word and segments if model supports punctuations
verbose: bool = True
# Utility
@@ -86,7 +86,8 @@ def get_value_from_transcription_config(trcfg, key, default):
return getattr(trcfg, key)
else:
logging.debug(
- f"Using default value of {default} for {key} because it is not present in the transcription config {trcfg}."
+ f"Using default value of {default} for {key} because it is not present \
+ in the transcription config {trcfg}."
)
return default
@@ -179,6 +180,7 @@ def transcribe(
channel_selector: Optional[ChannelSelectorType] = None,
augmentor: DictConfig = None,
verbose: bool = True,
+ timestamps: Optional[bool] = None,
override_config: Optional[TranscribeConfig] = None,
**config_kwargs,
) -> GenericTranscriptionType:
@@ -200,6 +202,9 @@ def transcribe(
to `None`. Defaults to `None`. Uses zero-based indexing.
augmentor: (DictConfig): Augment audio samples during transcription if augmentor is applied.
verbose: (bool) whether to display tqdm progress bar
+ timestamps: Optional(Bool): timestamps will be returned if set to True as part of hypothesis object
+ (output.timestep['segment']/output.timestep['word']). Refer to `Hypothesis` class for more details.
+ Default is None and would retain the previous state set by using self.change_decoding_strategy().
override_config: (Optional[TranscribeConfig]) override transcription config pre-defined by the user.
**Note**: All other arguments in the function will be ignored if override_config is passed.
You should call this argument as `model.transcribe(audio, override_config=TranscribeConfig(...))`.
@@ -229,6 +234,7 @@ def transcribe(
channel_selector=channel_selector,
augmentor=augmentor,
verbose=verbose,
+ timestamps=timestamps,
**config_kwargs,
)
else:
diff --git a/nemo/collections/asr/parts/numba/rnnt_loss/rnnt_pytorch.py b/nemo/collections/asr/parts/numba/rnnt_loss/rnnt_pytorch.py
index 5960d5ab6b18..01f78c0675cd 100644
--- a/nemo/collections/asr/parts/numba/rnnt_loss/rnnt_pytorch.py
+++ b/nemo/collections/asr/parts/numba/rnnt_loss/rnnt_pytorch.py
@@ -80,15 +80,16 @@ def forward(ctx, acts, labels, act_lens, label_lens, blank, reduction, fastemit_
if grads is not None:
grads /= minibatch_size
- ctx.grads = grads
+ ctx.save_for_backward(grads)
return costs
@staticmethod
def backward(ctx, grad_output):
- if grad_output is not None and ctx.grads is not None:
- grad_output = grad_output.view(-1, 1, 1, 1).to(ctx.grads)
- return ctx.grads.mul_(grad_output), None, None, None, None, None, None, None
+ (grads,) = ctx.saved_tensors
+ if grad_output is not None and grads is not None:
+ grad_output = grad_output.view(-1, 1, 1, 1).to(grads)
+ return grads.mul_(grad_output), None, None, None, None, None, None, None
class _TDTNumba(Function):
@@ -170,18 +171,18 @@ def forward(
label_grads /= minibatch_size
duration_grads /= minibatch_size
- ctx.label_grads = label_grads
- ctx.duration_grads = duration_grads
+ ctx.save_for_backward(label_grads, duration_grads)
return costs
@staticmethod
def backward(ctx, grad_output):
- if grad_output is not None and ctx.label_grads is not None:
- grad_output = grad_output.view(-1, 1, 1, 1).to(ctx.label_grads)
+ label_grads, duration_grads = ctx.saved_tensors
+ if grad_output is not None and label_grads is not None:
+ grad_output = grad_output.view(-1, 1, 1, 1).to(label_grads)
return (
- ctx.label_grads.mul_(grad_output),
- ctx.duration_grads.mul_(grad_output),
+ label_grads.mul_(grad_output),
+ duration_grads.mul_(grad_output),
None,
None,
None,
@@ -251,15 +252,16 @@ def forward(
if grads is not None:
grads /= minibatch_size
- ctx.grads = grads
+ ctx.save_for_backward(grads)
return costs
@staticmethod
def backward(ctx, grad_output):
- if grad_output is not None and ctx.grads is not None:
- grad_output = grad_output.view(-1, 1, 1, 1).to(ctx.grads)
- return ctx.grads.mul_(grad_output), None, None, None, None, None, None, None, None, None, None
+ (grads,) = ctx.saved_tensors
+ if grad_output is not None and grads is not None:
+ grad_output = grad_output.view(-1, 1, 1, 1).to(grads)
+ return grads.mul_(grad_output), None, None, None, None, None, None, None, None, None, None
def rnnt_loss(
diff --git a/nemo/collections/asr/parts/numba/rnnt_loss/utils/cuda_utils/gpu_rnnt_kernel.py b/nemo/collections/asr/parts/numba/rnnt_loss/utils/cuda_utils/gpu_rnnt_kernel.py
index 4153af060941..219e9d0453b2 100644
--- a/nemo/collections/asr/parts/numba/rnnt_loss/utils/cuda_utils/gpu_rnnt_kernel.py
+++ b/nemo/collections/asr/parts/numba/rnnt_loss/utils/cuda_utils/gpu_rnnt_kernel.py
@@ -961,7 +961,9 @@ def compute_tdt_alphas_kernel(
if t > 0 and t < T:
alphas[offset + t * maxU + u] = -INF
- for i in range(1, num_durations): # skip 0 since blank emission has to advance by at least one
+ for i in range(num_durations):
+ if durations[i] == 0: # skip 0 since blank emission has to advance by at least one
+ continue
if t >= durations[i]:
alphas[offset + t * maxU + u] = rnnt_helper.log_sum_exp(
alphas[offset + t * maxU + u], # the current alpha value
@@ -981,21 +983,26 @@ def compute_tdt_alphas_kernel(
elif u < U:
# when t == 0, we only consider the non-blank emission.
if t == 0:
- alphas[offset + u] = (
- alphas[offset + u - 1] # alpha(t, u - 1)
- + logp(
- denom, acts, maxT, maxU, alphabet_size, b, t, u - 1, labels[u - 1]
- ) # logp of token emission
- - sigma # logit under-normalization
- + logp_duration(
- duration_acts, maxT, maxU, num_durations, b, t, u - 1, 0
- ) # t = 0, so it must be duration = 0. Therefore the last argument passed to logp_duration() is 0.
- )
+ if durations[0] == 0:
+ alphas[offset + u] = (
+ alphas[offset + u - 1] # alpha(t, u - 1)
+ + logp(
+ denom, acts, maxT, maxU, alphabet_size, b, t, u - 1, labels[u - 1]
+ ) # logp of token emission
+ - sigma # logit under-normalization
+ + logp_duration(
+ duration_acts, maxT, maxU, num_durations, b, t, u - 1, 0
+ ) # t = 0, so it must be duration = 0. Therefore the last argument passed to logp_duration() is 0.
+ )
+ else:
+ alphas[offset + u] = -INF
# now we have t != 0 and u != 0, and we need to consider both non-blank and blank emissions.
elif t > 0 and t < T:
no_emit = -INF # no_emit stores the score for all blank emissions.
- for i in range(1, num_durations):
+ for i in range(num_durations):
+ if durations[i] == 0:
+ continue
if t >= durations[i]:
no_emit = rnnt_helper.log_sum_exp(
no_emit, # current score
@@ -1012,7 +1019,7 @@ def compute_tdt_alphas_kernel(
break # we can exit the loop early here, same as the case for u == 0 above.
emit = -INF # emit stores the score for non-blank emissions.
- for i in range(0, num_durations):
+ for i in range(num_durations):
if t >= durations[i]:
emit = rnnt_helper.log_sum_exp(
emit, # current score
@@ -1037,16 +1044,21 @@ def compute_tdt_alphas_kernel(
# After final sync, the forward log-likelihood can be computed as the summataion of
# alpha(T - duration, U - 1) + logp(blank, duration | t - duration, U - 1), over different durations.
if u == 0:
- # first we consider duration = 1
- loglike = (
- alphas[offset + (T - 1) * maxU + U - 1]
- + logp(denom, acts, maxT, maxU, alphabet_size, b, T - 1, U - 1, blank_)
- - sigma
- + logp_duration(duration_acts, maxT, maxU, num_durations, b, T - 1, U - 1, 1)
- )
+ # initialize with negative infinite and start add terms later
+ loglike = -INF
# then we add the scores for duration > 1, if such durations are possible given the audio lengths.
- for i in range(2, num_durations):
+ for i in range(num_durations):
+ if durations[i] == 0:
+ continue
+ if durations[i] == 1:
+ loglike = (
+ alphas[offset + (T - 1) * maxU + U - 1]
+ + logp(denom, acts, maxT, maxU, alphabet_size, b, T - 1, U - 1, blank_)
+ - sigma
+ + logp_duration(duration_acts, maxT, maxU, num_durations, b, T - 1, U - 1, i)
+ )
+ continue
if T >= durations[i]:
big_blank_loglike = (
alphas[offset + (T - durations[i]) * maxU + U - 1]
@@ -1122,11 +1134,18 @@ def compute_tdt_betas_kernel(
# Initilize beta[b, t=T-1, u=U-1] for all b in B with log_probs[b, t=T-1, u=U-1, blank]
if u == 0:
- betas[offset + (T - 1) * maxU + U - 1] = (
- logp(denom, acts, maxT, maxU, alphabet_size, b, T - 1, U - 1, blank_)
- - sigma
- + logp_duration(duration_acts, maxT, maxU, num_durations, b, T - 1, U - 1, 1)
- )
+ if durations[0] == 1:
+ betas[offset + (T - 1) * maxU + U - 1] = (
+ logp(denom, acts, maxT, maxU, alphabet_size, b, T - 1, U - 1, blank_)
+ - sigma
+ + logp_duration(duration_acts, maxT, maxU, num_durations, b, T - 1, U - 1, 0)
+ )
+ elif durations[1] == 1:
+ betas[offset + (T - 1) * maxU + U - 1] = (
+ logp(denom, acts, maxT, maxU, alphabet_size, b, T - 1, U - 1, blank_)
+ - sigma
+ + logp_duration(duration_acts, maxT, maxU, num_durations, b, T - 1, U - 1, 1)
+ )
# sync until all betas are initialized
cuda.syncthreads()
@@ -1140,11 +1159,12 @@ def compute_tdt_betas_kernel(
# u == U - 1, we only consider blank emissions.
if t >= 0 and t + 1 < T:
betas[offset + t * maxU + U - 1] = -INF
- for i in range(1, num_durations):
+ for i in range(num_durations):
# although similar, the computation for beta's is slightly more complex for boundary cases.
# the following two cases correspond to whether t is exactly certain duration away from T.
# and they have slightly different update rules.
-
+ if durations[i] == 0:
+ continue
if t + durations[i] < T:
betas[offset + t * maxU + U - 1] = rnnt_helper.log_sum_exp(
betas[offset + t * maxU + U - 1],
@@ -1172,17 +1192,24 @@ def compute_tdt_betas_kernel(
elif u < U - 1:
if t == T - 1:
# t == T - 1, so we only consider non-blank with duration 0. (Note, we can't have blank emissions with duration = 0)
- betas[offset + (T - 1) * maxU + u] = (
- betas[offset + (T - 1) * maxU + u + 1]
- + logp(denom, acts, maxT, maxU, alphabet_size, b, T - 1, u, labels[u]) # non-blank log prob
- + logp_duration(duration_acts, maxT, maxU, num_durations, b, T - 1, u, 0) # log prob of duration 0
- - sigma
- )
+ if durations[0] == 0:
+ betas[offset + (T - 1) * maxU + u] = (
+ betas[offset + (T - 1) * maxU + u + 1]
+ + logp(denom, acts, maxT, maxU, alphabet_size, b, T - 1, u, labels[u]) # non-blank log prob
+ + logp_duration(
+ duration_acts, maxT, maxU, num_durations, b, T - 1, u, 0
+ ) # log prob of duration 0
+ - sigma
+ )
+ else:
+ betas[offset + (T - 1) * maxU + u] = -INF
elif t >= 0 and t < T - 1:
# now we need to consider both blank andnon-blanks. Similar to alphas, we first compute them separately with no_emit and emit.
no_emit = -INF
- for i in range(1, num_durations):
+ for i in range(num_durations):
+ if durations[i] == 0:
+ continue
if t + durations[i] < T:
no_emit = rnnt_helper.log_sum_exp(
no_emit,
@@ -1193,7 +1220,7 @@ def compute_tdt_betas_kernel(
)
emit = -INF
- for i in range(0, num_durations):
+ for i in range(num_durations):
if t + durations[i] < T:
emit = rnnt_helper.log_sum_exp(
emit,
@@ -1304,10 +1331,10 @@ def compute_tdt_grad_kernel(
logpk_label = denom[col] + acts[col * alphabet_size + labels[u]] - sigma
grad -= math.exp(alphas[col] + betas[col + 1 + durations[idx] * maxU] + logpk_label - logll[mb])
- if t + durations[idx] < T and idx > 0: # for blank in the middle
+ if t + durations[idx] < T and durations[idx] > 0: # for blank in the middle
grad -= math.exp(alphas[col] + betas[col + durations[idx] * maxU] + logpk_blank - logll[mb])
- if t + durations[idx] == T and idx >= 1 and u == U - 1: # for blank as the last symbol
+ if t + durations[idx] == T and u == U - 1 and durations[idx] > 0: # for blank as the last symbol
grad -= math.exp(alphas[col] + logpk_blank - logll[mb])
grad = grad * math.exp(duration_acts[col * num_durations + idx])
@@ -1335,7 +1362,7 @@ def compute_tdt_grad_kernel(
if fastemit_lambda > 0.0 and u < U - 1:
fastemit_grad = 0.0
- for i in range(0, num_durations):
+ for i in range(num_durations):
if t + durations[i] < T:
fastemit_grad += fastemit_lambda * math.exp(
alphas[col] # alphas(t, u)
@@ -1355,7 +1382,9 @@ def compute_tdt_grad_kernel(
# grad to last blank transition
# grad[b, T-1, U-1, v=blank] -= exp(alphas[b, t, u] + logpk - sigma - logll[b] + logp(duration) for all possible non-zero durations.
if idx == blank_ and u == U - 1:
- for i in range(1, num_durations):
+ for i in range(num_durations):
+ if durations[i] == 0:
+ continue
if t == T - durations[i]:
grad -= math.exp(
alphas[col] + logpk - sigma - logll[mb] + duration_acts[col * num_durations + i]
@@ -1364,7 +1393,9 @@ def compute_tdt_grad_kernel(
# grad of blank across t < T;
# grad[b, t List[Hypothesis]:
+ """
+ Packs a list of hypotheses into a tensor and prepares decoder states.
+
+ This function takes a list of token sequences (hypotheses) and converts
+ it into a tensor format. If any decoder states are on the GPU, they
+ are moved to the CPU. Additionally, the function removes any timesteps
+ with a value of -1 from the sequences.
+
+ Args:
+ hypotheses (list): A list of token sequences representing hypotheses.
+
+ Returns:
+ list: A list of packed hypotheses in tensor format.
+ """
for idx, hyp in enumerate(hypotheses): # type: rnnt_utils.Hypothesis
hyp.y_sequence = torch.tensor(hyp.y_sequence, dtype=torch.long)
@@ -69,6 +83,18 @@ def pack_hypotheses(hypotheses: List[Hypothesis]) -> List[Hypothesis]:
def _states_to_device(dec_state, device='cpu'):
+ """
+ Transfers decoder states to the specified device.
+
+ This function moves the provided decoder states to the specified device (e.g., 'cpu' or 'cuda').
+
+ Args:
+ dec_state (Tensor): The decoder states to be transferred.
+ device (str): The target device to which the decoder states should be moved. Defaults to 'cpu'.
+
+ Returns:
+ Tensor: The decoder states on the specified device.
+ """
if torch.is_tensor(dec_state):
dec_state = dec_state.to(device)
@@ -106,7 +132,8 @@ class BeamRNNTInfer(Typing):
however the time required for the search also grows steadily.
`tsd` - time synchronous decoding. Please refer to the paper:
- [Alignment-Length Synchronous Decoding for RNN Transducer](https://ieeexplore.ieee.org/document/9053040)
+ [Alignment-Length Synchronous Decoding for RNN Transducer]
+ (https://ieeexplore.ieee.org/document/9053040)
for details on the algorithm implemented.
Time synchronous decoding (TSD) execution time grows by the factor T * max_symmetric_expansions.
@@ -114,7 +141,8 @@ class BeamRNNTInfer(Typing):
good results. This also requires greater memory to execute.
`alsd` - alignment-length synchronous decoding. Please refer to the paper:
- [Alignment-Length Synchronous Decoding for RNN Transducer](https://ieeexplore.ieee.org/document/9053040)
+ [Alignment-Length Synchronous Decoding for RNN Transducer]
+ (https://ieeexplore.ieee.org/document/9053040)
for details on the algorithm implemented.
Alignment-length synchronous decoding (ALSD) execution time is faster than TSD, with growth
@@ -127,7 +155,8 @@ class BeamRNNTInfer(Typing):
For a given decoding accuracy, it is possible to attain faster decoding via ALSD than TSD.
`maes` = modified adaptive expansion searcn. Please refer to the paper:
- [Accelerating RNN Transducer Inference via Adaptive Expansion Search](https://ieeexplore.ieee.org/document/9250505)
+ [Accelerating RNN Transducer Inference via Adaptive Expansion Search]
+ (https://ieeexplore.ieee.org/document/9250505)
Modified Adaptive Synchronous Decoding (mAES) execution time is adaptive w.r.t the
number of expansions (for tokens) required per timestep. The number of expansions can usually
@@ -169,10 +198,10 @@ class BeamRNNTInfer(Typing):
and affects the speed of inference since large values will perform large beam search in the next step.
maes_expansion_gamma: Float pruning threshold used in the prune-by-value step when computing the expansions.
- The default (2.3) is selected from the paper. It performs a comparison (max_log_prob - gamma <= log_prob[v])
- where v is all vocabulary indices in the Vocab set and max_log_prob is the "most" likely token to be
- predicted. Gamma therefore provides a margin of additional tokens which can be potential candidates for
- expansion apart from the "most likely" candidate.
+ The default (2.3) is selected from the paper. It performs a comparison
+ (max_log_prob - gamma <= log_prob[v]) where v is all vocabulary indices in the Vocab set and max_log_prob
+ is the "most" likely token to be predicted. Gamma therefore provides a margin of additional tokens which
+ can be potential candidates for expansion apart from the "most likely" candidate.
Lower values will reduce the number of expansions (by increasing pruning-by-value, thereby improving speed
but hurting accuracy). Higher values will increase the number of expansions (by reducing pruning-by-value,
thereby reducing speed but potentially improving accuracy). This is a hyper parameter to be experimentally
@@ -182,7 +211,7 @@ class BeamRNNTInfer(Typing):
preserve_alignments: Bool flag which preserves the history of alignments generated during
beam decoding (sample). When set to true, the Hypothesis will contain
- the non-null value for `alignments` in it. Here, `alignments` is a List of List of Tensor (of length V + 1).
+ the non-null value for `alignments` in it. Here, `alignments` is a List of List of Tensor (of length V + 1)
The length of the list corresponds to the Acoustic Length (T).
Each value in the list (Ti) is a torch.Tensor (U), representing 1 or more targets from a vocabulary.
@@ -1456,8 +1485,11 @@ def compute_ngram_score(self, current_lm_state: "kenlm.State", label: int) -> Tu
return lm_score, next_state
def set_decoding_type(self, decoding_type: str):
-
- # Please check train_kenlm.py in scripts/asr_language_modeling/ to find out why we need
+ """
+ Sets decoding type. Please check train_kenlm.py in scripts/asr_language_modeling/ to find out why we need
+ Args:
+ decoding_type: decoding type
+ """
# TOKEN_OFFSET for BPE-based models
if decoding_type == 'subword':
from nemo.collections.asr.parts.submodules.ctc_beam_decoding import DEFAULT_TOKEN_OFFSET
@@ -1467,6 +1499,10 @@ def set_decoding_type(self, decoding_type: str):
@dataclass
class BeamRNNTInferConfig:
+ """
+ Beam RNNT Inference config.
+ """
+
beam_size: int
search_type: str = 'default'
score_norm: bool = True
diff --git a/nemo/collections/asr/parts/submodules/rnnt_decoding.py b/nemo/collections/asr/parts/submodules/rnnt_decoding.py
index e52c3f46423e..d3a63467c485 100644
--- a/nemo/collections/asr/parts/submodules/rnnt_decoding.py
+++ b/nemo/collections/asr/parts/submodules/rnnt_decoding.py
@@ -23,7 +23,7 @@
import torch
from omegaconf import OmegaConf
-from nemo.collections.asr.parts.submodules import rnnt_beam_decoding, rnnt_greedy_decoding
+from nemo.collections.asr.parts.submodules import rnnt_beam_decoding, rnnt_greedy_decoding, tdt_beam_decoding
from nemo.collections.asr.parts.utils.asr_confidence_utils import ConfidenceConfig, ConfidenceMixin
from nemo.collections.asr.parts.utils.rnnt_utils import Hypothesis, NBestHypotheses
from nemo.collections.common.tokenizers.aggregate_tokenizer import AggregateTokenizer
@@ -67,13 +67,15 @@ class AbstractRNNTDecoding(ConfidenceMixin):
rnnt_timestamp_type: A str value, which represents the types of timestamps that should be calculated.
Can take the following values - "char" for character/subword time stamps, "word" for word level
- time stamps, "segment" for segment level time stamps and "all" (default), for character, word and segment level time stamps.
+ time stamps, "segment" for segment level time stamps and "all" (default), for character, word and
+ segment level time stamps.
word_seperator: Str token representing the seperator between words.
segment_seperators: List containing tokens representing the seperator(s) between segments.
- segment_gap_threshold: The threshold (in frames) that caps the gap between two words necessary for forming the segments.
+ segment_gap_threshold: The threshold (in frames) that caps the gap between two words necessary for forming
+ the segments.
preserve_frame_confidence: Bool flag which preserves the history of per-frame confidence scores
generated during decoding (sample / batched). When set to true, the Hypothesis will contain
@@ -102,8 +104,8 @@ class AbstractRNNTDecoding(ConfidenceMixin):
The length of the list corresponds to the number of recognized words.
exclude_blank: Bool flag indicating that blank token confidence scores are to be excluded
from the `token_confidence`.
- aggregation: Which aggregation type to use for collapsing per-token confidence into per-word confidence.
- Valid options are `mean`, `min`, `max`, `prod`.
+ aggregation: Which aggregation type to use for collapsing per-token confidence into per-word
+ confidence. Valid options are `mean`, `min`, `max`, `prod`.
tdt_include_duration: Bool flag indicating that the duration confidence scores are to be calculated and
attached to the regular frame confidence,
making TDT frame confidence element a pair: (`prediction_confidence`, `duration_confidence`).
@@ -177,22 +179,23 @@ class AbstractRNNTDecoding(ConfidenceMixin):
maes_num_steps: Number of adaptive steps to take. From the paper, 2 steps is generally sufficient,
and can be reduced to 1 to improve decoding speed while sacrificing some accuracy. int > 0.
- maes_prefix_alpha: Maximum prefix length in prefix search. Must be an integer, and is advised to keep this as 1
- in order to reduce expensive beam search cost later. int >= 0.
+ maes_prefix_alpha: Maximum prefix length in prefix search. Must be an integer, and is advised to keep
+ this as 1 in order to reduce expensive beam search cost later. int >= 0.
maes_expansion_beta: Maximum number of prefix expansions allowed, in addition to the beam size.
Effectively, the number of hypothesis = beam_size + maes_expansion_beta. Must be an int >= 0,
- and affects the speed of inference since large values will perform large beam search in the next step.
-
- maes_expansion_gamma: Float pruning threshold used in the prune-by-value step when computing the expansions.
- The default (2.3) is selected from the paper. It performs a comparison (max_log_prob - gamma <= log_prob[v])
- where v is all vocabulary indices in the Vocab set and max_log_prob is the "most" likely token to be
- predicted. Gamma therefore provides a margin of additional tokens which can be potential candidates for
- expansion apart from the "most likely" candidate.
- Lower values will reduce the number of expansions (by increasing pruning-by-value, thereby improving speed
- but hurting accuracy). Higher values will increase the number of expansions (by reducing pruning-by-value,
- thereby reducing speed but potentially improving accuracy). This is a hyper parameter to be experimentally
- tuned on a validation set.
+ and affects the speed of inference since large values will perform large beam search in the next
+ step.
+
+ maes_expansion_gamma: Float pruning threshold used in the prune-by-value step when computing the
+ expansions. The default (2.3) is selected from the paper. It performs a comparison
+ (max_log_prob - gamma <= log_prob[v]) where v is all vocabulary indices in the Vocab set and
+ max_log_prob is the "most" likely token to be predicted. Gamma therefore provides a margin of
+ additional tokens which can be potential candidates for expansion apart from the "most likely"
+ candidate. Lower values will reduce the number of expansions (by increasing pruning-by-value,
+ thereby improving speed but hurting accuracy). Higher values will increase the number of expansions
+ (by reducing pruning-by-value, thereby reducing speed but potentially improving accuracy). This is
+ a hyper parameter to be experimentally tuned on a validation set.
softmax_temperature: Scales the logits of the joint prior to computing log_softmax.
@@ -231,8 +234,10 @@ def __init__(self, decoding_cfg, decoder, joint, blank_id: int, supported_punctu
raise ValueError("blank_id must equal len(non_blank_vocabs) for TDT models")
if self.big_blank_durations is not None and self.big_blank_durations != []:
raise ValueError("duration and big_blank_durations can't both be not None")
- if self.cfg.strategy not in ['greedy', 'greedy_batch']:
- raise ValueError("currently only greedy and greedy_batch inference is supported for TDT models")
+ if self.cfg.strategy not in ['greedy', 'greedy_batch', 'beam', 'maes']:
+ raise ValueError(
+ "currently only greedy, greedy_batch, beam and maes inference is supported for TDT models"
+ )
if (
self.big_blank_durations is not None and self.big_blank_durations != []
@@ -383,20 +388,32 @@ def __init__(self, decoding_cfg, decoder, joint, blank_id: int, supported_punctu
)
elif self.cfg.strategy == 'beam':
-
- self.decoding = rnnt_beam_decoding.BeamRNNTInfer(
- decoder_model=decoder,
- joint_model=joint,
- beam_size=self.cfg.beam.beam_size,
- return_best_hypothesis=decoding_cfg.beam.get('return_best_hypothesis', True),
- search_type='default',
- score_norm=self.cfg.beam.get('score_norm', True),
- softmax_temperature=self.cfg.beam.get('softmax_temperature', 1.0),
- preserve_alignments=self.preserve_alignments,
- )
+ if self.big_blank_durations is None or self.big_blank_durations == []:
+ if not self._is_tdt:
+ self.decoding = rnnt_beam_decoding.BeamRNNTInfer(
+ decoder_model=decoder,
+ joint_model=joint,
+ beam_size=self.cfg.beam.beam_size,
+ return_best_hypothesis=decoding_cfg.beam.get('return_best_hypothesis', True),
+ search_type='default',
+ score_norm=self.cfg.beam.get('score_norm', True),
+ softmax_temperature=self.cfg.beam.get('softmax_temperature', 1.0),
+ preserve_alignments=self.preserve_alignments,
+ )
+ else:
+ self.decoding = tdt_beam_decoding.BeamTDTInfer(
+ decoder_model=decoder,
+ joint_model=joint,
+ durations=self.durations,
+ beam_size=self.cfg.beam.beam_size,
+ return_best_hypothesis=decoding_cfg.beam.get('return_best_hypothesis', True),
+ search_type='default',
+ score_norm=self.cfg.beam.get('score_norm', True),
+ softmax_temperature=self.cfg.beam.get('softmax_temperature', 1.0),
+ preserve_alignments=self.preserve_alignments,
+ )
elif self.cfg.strategy == 'tsd':
-
self.decoding = rnnt_beam_decoding.BeamRNNTInfer(
decoder_model=decoder,
joint_model=joint,
@@ -410,7 +427,6 @@ def __init__(self, decoding_cfg, decoder, joint, blank_id: int, supported_punctu
)
elif self.cfg.strategy == 'alsd':
-
self.decoding = rnnt_beam_decoding.BeamRNNTInfer(
decoder_model=decoder,
joint_model=joint,
@@ -424,26 +440,44 @@ def __init__(self, decoding_cfg, decoder, joint, blank_id: int, supported_punctu
)
elif self.cfg.strategy == 'maes':
-
- self.decoding = rnnt_beam_decoding.BeamRNNTInfer(
- decoder_model=decoder,
- joint_model=joint,
- beam_size=self.cfg.beam.beam_size,
- return_best_hypothesis=decoding_cfg.beam.get('return_best_hypothesis', True),
- search_type='maes',
- score_norm=self.cfg.beam.get('score_norm', True),
- maes_num_steps=self.cfg.beam.get('maes_num_steps', 2),
- maes_prefix_alpha=self.cfg.beam.get('maes_prefix_alpha', 1),
- maes_expansion_gamma=self.cfg.beam.get('maes_expansion_gamma', 2.3),
- maes_expansion_beta=self.cfg.beam.get('maes_expansion_beta', 2.0),
- softmax_temperature=self.cfg.beam.get('softmax_temperature', 1.0),
- preserve_alignments=self.preserve_alignments,
- ngram_lm_model=self.cfg.beam.get('ngram_lm_model', None),
- ngram_lm_alpha=self.cfg.beam.get('ngram_lm_alpha', 0.0),
- hat_subtract_ilm=self.cfg.beam.get('hat_subtract_ilm', False),
- hat_ilm_weight=self.cfg.beam.get('hat_ilm_weight', 0.0),
- )
-
+ if self.big_blank_durations is None or self.big_blank_durations == []:
+ if not self._is_tdt:
+ self.decoding = rnnt_beam_decoding.BeamRNNTInfer(
+ decoder_model=decoder,
+ joint_model=joint,
+ beam_size=self.cfg.beam.beam_size,
+ return_best_hypothesis=decoding_cfg.beam.get('return_best_hypothesis', True),
+ search_type='maes',
+ score_norm=self.cfg.beam.get('score_norm', True),
+ maes_num_steps=self.cfg.beam.get('maes_num_steps', 2),
+ maes_prefix_alpha=self.cfg.beam.get('maes_prefix_alpha', 1),
+ maes_expansion_gamma=self.cfg.beam.get('maes_expansion_gamma', 2.3),
+ maes_expansion_beta=self.cfg.beam.get('maes_expansion_beta', 2.0),
+ softmax_temperature=self.cfg.beam.get('softmax_temperature', 1.0),
+ preserve_alignments=self.preserve_alignments,
+ ngram_lm_model=self.cfg.beam.get('ngram_lm_model', None),
+ ngram_lm_alpha=self.cfg.beam.get('ngram_lm_alpha', 0.0),
+ hat_subtract_ilm=self.cfg.beam.get('hat_subtract_ilm', False),
+ hat_ilm_weight=self.cfg.beam.get('hat_ilm_weight', 0.0),
+ )
+ else:
+ self.decoding = tdt_beam_decoding.BeamTDTInfer(
+ decoder_model=decoder,
+ joint_model=joint,
+ durations=self.durations,
+ beam_size=self.cfg.beam.beam_size,
+ return_best_hypothesis=decoding_cfg.beam.get('return_best_hypothesis', True),
+ search_type='maes',
+ score_norm=self.cfg.beam.get('score_norm', True),
+ maes_num_steps=self.cfg.beam.get('maes_num_steps', 2),
+ maes_prefix_alpha=self.cfg.beam.get('maes_prefix_alpha', 1),
+ maes_expansion_gamma=self.cfg.beam.get('maes_expansion_gamma', 2.3),
+ maes_expansion_beta=self.cfg.beam.get('maes_expansion_beta', 2.0),
+ softmax_temperature=self.cfg.beam.get('softmax_temperature', 1.0),
+ preserve_alignments=self.preserve_alignments,
+ ngram_lm_model=self.cfg.beam.get('ngram_lm_model', None),
+ ngram_lm_alpha=self.cfg.beam.get('ngram_lm_alpha', 0.3),
+ )
else:
raise ValueError(
@@ -725,6 +759,15 @@ def decode_ids_to_langs(self, tokens: List[int]) -> List[str]:
raise NotImplementedError()
def update_joint_fused_batch_size(self):
+ """ "
+ Updates the fused batch size for the joint module if applicable.
+
+ If `joint_fused_batch_size` is set, verifies that the joint module has
+ the required `set_fused_batch_size` and `set_fuse_loss_wer` functions.
+ If present, updates the batch size; otherwise, logs a warning.
+
+ If `joint_fused_batch_size` is <= 0, disables fused batch processing.
+ """
if self.joint_fused_batch_size is None:
# do nothing and let the Joint itself handle setting up of the fused batch
return
@@ -751,6 +794,21 @@ def update_joint_fused_batch_size(self):
self.decoding.joint.set_fuse_loss_wer(False)
def compute_rnnt_timestamps(self, hypothesis: Hypothesis, timestamp_type: str = "all"):
+ """
+ Computes character, word, and segment timestamps for an RNN-T hypothesis.
+
+ This function generates timestamps for characters, words, and segments within
+ a hypothesis sequence. The type of timestamps computed depends on `timestamp_type`,
+ which can be 'char', 'word', 'segment', or 'all'.
+
+ Args:
+ hypothesis (Hypothesis): Hypothesis.
+ timestamp_type (str): Type of timestamps to compute. Options are 'char', 'word', 'segment', or 'all'.
+ Defaults to 'all'.
+
+ Returns:
+ Hypothesis: The updated hypothesis with computed timestamps for characters, words, and/or segments.
+ """
assert timestamp_type in ['char', 'word', 'segment', 'all']
# Unpack the temporary storage
@@ -950,7 +1008,8 @@ def _refine_timestamps_tdt(
# Check if token is a punctuation mark
# If so, set its start and end offset as start and end of the previous token
- # This is done because there was observed a behaviour, when punctuation marks are predicted long after preceding token (i.e. after silence)
+ # This is done because there was observed a behaviour, when punctuation marks are
+ # predicted long after preceding token (i.e. after silence)
if offset['char'][0] in supported_punctuation and i > 0:
encoded_char_offsets[i]['start_offset'] = offset['start_offset'] = char_offsets[i - 1]['end_offset']
encoded_char_offsets[i]['end_offset'] = offset['end_offset'] = offset['start_offset']
@@ -1110,7 +1169,8 @@ def _get_segment_offsets(
offsets: A list of dictionaries, each containing "word", "start_offset" and "end_offset".
segments_delimiter_tokens: List containing tokens representing the seperator(s) between segments.
supported_punctuation: Set containing punctuation marks in the vocabulary.
- segment_gap_threshold: Number of frames between 2 consecutive words necessary to form segments out of plain text.
+ segment_gap_threshold: Number of frames between 2 consecutive words necessary to form segments out of plain
+ text.
Returns:
A list of dictionaries containing the segment offsets. Each item contains "segment", "start_offset" and
"end_offset".
@@ -1237,7 +1297,8 @@ class RNNTDecoding(AbstractRNNTDecoding):
The length of the list corresponds to the number of recognized words.
exclude_blank: Bool flag indicating that blank token confidence scores are to be excluded
from the `token_confidence`.
- aggregation: Which aggregation type to use for collapsing per-token confidence into per-word confidence.
+ aggregation: Which aggregation type to use for collapsing per-token confidence into per-word
+ confidence.
Valid options are `mean`, `min`, `max`, `prod`.
tdt_include_duration: Bool flag indicating that the duration confidence scores are to be calculated and
attached to the regular frame confidence,
@@ -1313,8 +1374,8 @@ class RNNTDecoding(AbstractRNNTDecoding):
per timestep of the acoustic model. Larger values will allow longer sentences to be decoded,
at increased cost to execution time.
- alsd_max_target_len: optional int or float, determines the potential maximum target sequence length.
- If an integer is provided, it can decode sequences of that particular maximum length.
+ alsd_max_target_len: optional int or float, determines the potential maximum target sequence
+ length. If an integer is provided, it can decode sequences of that particular maximum length.
If a float is provided, it can decode sequences of int(alsd_max_target_len * seq_len),
where seq_len is the length of the acoustic model output (T).
@@ -1326,22 +1387,23 @@ class RNNTDecoding(AbstractRNNTDecoding):
maes_num_steps: Number of adaptive steps to take. From the paper, 2 steps is generally sufficient,
and can be reduced to 1 to improve decoding speed while sacrificing some accuracy. int > 0.
- maes_prefix_alpha: Maximum prefix length in prefix search. Must be an integer, and is advised to keep this as 1
- in order to reduce expensive beam search cost later. int >= 0.
+ maes_prefix_alpha: Maximum prefix length in prefix search. Must be an integer, and is advised to
+ keep this as 1 in order to reduce expensive beam search cost later. int >= 0.
maes_expansion_beta: Maximum number of prefix expansions allowed, in addition to the beam size.
Effectively, the number of hypothesis = beam_size + maes_expansion_beta. Must be an int >= 0,
- and affects the speed of inference since large values will perform large beam search in the next step.
-
- maes_expansion_gamma: Float pruning threshold used in the prune-by-value step when computing the expansions.
- The default (2.3) is selected from the paper. It performs a comparison (max_log_prob - gamma <= log_prob[v])
- where v is all vocabulary indices in the Vocab set and max_log_prob is the "most" likely token to be
- predicted. Gamma therefore provides a margin of additional tokens which can be potential candidates for
- expansion apart from the "most likely" candidate.
- Lower values will reduce the number of expansions (by increasing pruning-by-value, thereby improving speed
- but hurting accuracy). Higher values will increase the number of expansions (by reducing pruning-by-value,
- thereby reducing speed but potentially improving accuracy). This is a hyper parameter to be experimentally
- tuned on a validation set.
+ and affects the speed of inference since large values will perform large beam search in the
+ next step.
+
+ maes_expansion_gamma: Float pruning threshold used in the prune-by-value step when computing the
+ expansions. The default (2.3) is selected from the paper. It performs a comparison
+ (max_log_prob - gamma <= log_prob[v]) where v is all vocabulary indices in the Vocab set and
+ max_log_prob is the "most" likely token to be predicted. Gamma therefore provides a margin of
+ additional tokens which can be potential candidates for expansion apart from the "most likely"
+ candidate. Lower values will reduce the number of expansions (by increasing pruning-by-value,
+ thereby improving speed but hurting accuracy). Higher values will increase the number of
+ expansions (by reducing pruning-by-value, thereby reducing speed but potentially improving
+ accuracy). This is a hyper parameter to be experimentally tuned on a validation set.
softmax_temperature: Scales the logits of the joint prior to computing log_softmax.
@@ -1376,7 +1438,9 @@ def __init__(
supported_punctuation=supported_punctuation,
)
- if isinstance(self.decoding, rnnt_beam_decoding.BeamRNNTInfer):
+ if isinstance(self.decoding, rnnt_beam_decoding.BeamRNNTInfer) or isinstance(
+ self.decoding, tdt_beam_decoding.BeamTDTInfer
+ ):
self.decoding.set_decoding_type('char')
def _aggregate_token_confidence(self, hypothesis: Hypothesis) -> List[float]:
@@ -1492,7 +1556,8 @@ class RNNTBPEDecoding(AbstractRNNTDecoding):
segment_seperators: List containing tokens representing the seperator(s) between segments.
- segment_gap_threshold: The threshold (in frames) that caps the gap between two words necessary for forming the segments.
+ segment_gap_threshold: The threshold (in frames) that caps the gap between two words necessary for forming
+ the segments.
preserve_frame_confidence: Bool flag which preserves the history of per-frame confidence scores
generated during decoding (sample / batched). When set to true, the Hypothesis will contain
@@ -1521,8 +1586,8 @@ class RNNTBPEDecoding(AbstractRNNTDecoding):
The length of the list corresponds to the number of recognized words.
exclude_blank: Bool flag indicating that blank token confidence scores are to be excluded
from the `token_confidence`.
- aggregation: Which aggregation type to use for collapsing per-token confidence into per-word confidence.
- Valid options are `mean`, `min`, `max`, `prod`.
+ aggregation: Which aggregation type to use for collapsing per-token confidence into per-word
+ confidence. Valid options are `mean`, `min`, `max`, `prod`.
tdt_include_duration: Bool flag indicating that the duration confidence scores are to be calculated and
attached to the regular frame confidence,
making TDT frame confidence element a pair: (`prediction_confidence`, `duration_confidence`).
@@ -1594,8 +1659,8 @@ class RNNTBPEDecoding(AbstractRNNTDecoding):
per timestep of the acoustic model. Larger values will allow longer sentences to be decoded,
at increased cost to execution time.
- alsd_max_target_len: optional int or float, determines the potential maximum target sequence length.
- If an integer is provided, it can decode sequences of that particular maximum length.
+ alsd_max_target_len: optional int or float, determines the potential maximum target sequence
+ length.If an integer is provided, it can decode sequences of that particular maximum length.
If a float is provided, it can decode sequences of int(alsd_max_target_len * seq_len),
where seq_len is the length of the acoustic model output (T).
@@ -1607,22 +1672,23 @@ class RNNTBPEDecoding(AbstractRNNTDecoding):
maes_num_steps: Number of adaptive steps to take. From the paper, 2 steps is generally sufficient,
and can be reduced to 1 to improve decoding speed while sacrificing some accuracy. int > 0.
- maes_prefix_alpha: Maximum prefix length in prefix search. Must be an integer, and is advised to keep this as 1
- in order to reduce expensive beam search cost later. int >= 0.
+ maes_prefix_alpha: Maximum prefix length in prefix search. Must be an integer, and is advised to
+ keep this as 1 in order to reduce expensive beam search cost later. int >= 0.
maes_expansion_beta: Maximum number of prefix expansions allowed, in addition to the beam size.
Effectively, the number of hypothesis = beam_size + maes_expansion_beta. Must be an int >= 0,
- and affects the speed of inference since large values will perform large beam search in the next step.
-
- maes_expansion_gamma: Float pruning threshold used in the prune-by-value step when computing the expansions.
- The default (2.3) is selected from the paper. It performs a comparison (max_log_prob - gamma <= log_prob[v])
- where v is all vocabulary indices in the Vocab set and max_log_prob is the "most" likely token to be
- predicted. Gamma therefore provides a margin of additional tokens which can be potential candidates for
- expansion apart from the "most likely" candidate.
- Lower values will reduce the number of expansions (by increasing pruning-by-value, thereby improving speed
- but hurting accuracy). Higher values will increase the number of expansions (by reducing pruning-by-value,
- thereby reducing speed but potentially improving accuracy). This is a hyper parameter to be experimentally
- tuned on a validation set.
+ and affects the speed of inference since large values will perform large beam search in the
+ next step.
+
+ maes_expansion_gamma: Float pruning threshold used in the prune-by-value step when computing the
+ expansions. The default (2.3) is selected from the paper. It performs a comparison
+ (max_log_prob - gamma <= log_prob[v]) where v is all vocabulary indices in the Vocab set and
+ max_log_prob is the "most" likely token to be predicted. Gamma therefore provides a margin of
+ additional tokens which can be potential candidates for expansion apart from the "most likely"
+ candidate. Lower values will reduce the number of expansions (by increasing pruning-by-value,
+ thereby improving speed but hurting accuracy). Higher values will increase the number of
+ expansions (by reducing pruning-by-value, thereby reducing speed but potentially improving
+ accuracy). This is a hyper parameter to be experimentally tuned on a validation set.
softmax_temperature: Scales the logits of the joint prior to computing log_softmax.
@@ -1649,7 +1715,9 @@ def __init__(self, decoding_cfg, decoder, joint, tokenizer: TokenizerSpec):
supported_punctuation=supported_punctuation,
)
- if isinstance(self.decoding, rnnt_beam_decoding.BeamRNNTInfer):
+ if isinstance(self.decoding, rnnt_beam_decoding.BeamRNNTInfer) or isinstance(
+ self.decoding, tdt_beam_decoding.BeamTDTInfer
+ ):
self.decoding.set_decoding_type('subword')
def _aggregate_token_confidence(self, hypothesis: Hypothesis) -> List[float]:
@@ -1750,7 +1818,8 @@ def decode_hypothesis(self, hypotheses_list: List[Hypothesis]) -> List[Union[Hyp
hypotheses[ind].langs_chars = self.decode_ids_to_langs(prediction)
else:
logging.warning(
- "Ignoring request for lang output in hypotheses since the model does not use an aggregate tokenizer"
+ "Ignoring request for lang output in hypotheses since the model does not use an aggregate \
+ tokenizer"
)
return hypotheses
@@ -1758,6 +1827,10 @@ def decode_hypothesis(self, hypotheses_list: List[Hypothesis]) -> List[Union[Hyp
@dataclass
class RNNTDecodingConfig:
+ """
+ RNNT Decoding config
+ """
+
model_type: str = "rnnt" # one of "rnnt", "multiblank" or "tdt"
strategy: str = "greedy_batch"
@@ -1815,4 +1888,8 @@ class RNNTDecodingConfig:
@dataclass
class RNNTBPEDecodingConfig(RNNTDecodingConfig):
+ """
+ RNNT BPE Decoding Config
+ """
+
pass
diff --git a/nemo/collections/asr/parts/submodules/rnnt_greedy_decoding.py b/nemo/collections/asr/parts/submodules/rnnt_greedy_decoding.py
index f9cf368fe405..bd169d0d224e 100644
--- a/nemo/collections/asr/parts/submodules/rnnt_greedy_decoding.py
+++ b/nemo/collections/asr/parts/submodules/rnnt_greedy_decoding.py
@@ -49,7 +49,20 @@ def pack_hypotheses(
hypotheses: List[rnnt_utils.Hypothesis],
logitlen: torch.Tensor,
) -> List[rnnt_utils.Hypothesis]:
+ """
+ Packs a list of hypotheses into a tensor and prepares decoder states.
+
+ This function takes a list of token sequences (hypotheses) and converts
+ it into a tensor format. If any decoder states are on the GPU, they
+ are moved to the CPU. Additionally, the function removes any timesteps
+ with a value of -1 from the sequences.
+
+ Args:
+ hypotheses (list): A list of token sequences representing hypotheses.
+ Returns:
+ list: A list of packed hypotheses in tensor format.
+ """
if hasattr(logitlen, 'cpu'):
logitlen_cpu = logitlen.to('cpu')
else:
@@ -578,7 +591,8 @@ class GreedyBatchedRNNTInfer(_GreedyRNNTInfer, WithOptionalCudaGraphs):
(evaluating Joint multiple times in inner loop); It uses a minimal possible amount of calls
to prediction network (with maximum possible batch size),
which makes it especially useful for scaling the prediction network.
- use_cuda_graph_decoder: if CUDA graphs should be enabled for decoding (currently recommended only for inference)
+ use_cuda_graph_decoder: if CUDA graphs should be enabled for decoding
+ (currently recommended only for inference)
"""
def __init__(
@@ -1169,6 +1183,10 @@ def _greedy_decode_masked(
class ExportedModelGreedyBatchedRNNTInfer:
+ """
+ Exported Model Greedy Batched RNNT Infer class
+ """
+
def __init__(self, encoder_model: str, decoder_joint_model: str, max_symbols_per_step: Optional[int] = None):
self.encoder_model_path = encoder_model
self.decoder_joint_model_path = decoder_joint_model
@@ -1344,9 +1362,25 @@ def _setup_blank_index(self):
raise NotImplementedError()
def run_encoder(self, audio_signal, length):
+ """
+ Runs encoder network:
+
+ Args:
+ audio_signal: audio signal
+ length: audio length
+ """
raise NotImplementedError()
def run_decoder_joint(self, enc_logits, targets, target_length, *states):
+ """
+ Runs decoder joint networks.
+
+ Args:
+ enc_logits: encoder logits
+ targets: targets
+ target_length: target length
+ states: states
+ """
raise NotImplementedError()
def _get_initial_states(self, batchsize):
@@ -1354,6 +1388,10 @@ def _get_initial_states(self, batchsize):
class ONNXGreedyBatchedRNNTInfer(ExportedModelGreedyBatchedRNNTInfer):
+ """
+ ONNX Greedy Batched RNNT Infer class
+ """
+
def __init__(self, encoder_model: str, decoder_joint_model: str, max_symbols_per_step: Optional[int] = 10):
super().__init__(
encoder_model=encoder_model,
@@ -1433,7 +1471,8 @@ def _setup_blank_index(self):
self._blank_index = log_probs.shape[-1] - 1 # last token of vocab size is blank token
logging.info(
- f"Enc-Dec-Joint step was evaluated, blank token id = {self._blank_index}; vocab size = {log_probs.shape[-1]}"
+ f"Enc-Dec-Joint step was evaluated, \
+ blank token id = {self._blank_index}; vocab size = {log_probs.shape[-1]}"
)
def run_encoder(self, audio_signal, length):
@@ -1512,6 +1551,10 @@ def _get_initial_states(self, batchsize):
class TorchscriptGreedyBatchedRNNTInfer(ExportedModelGreedyBatchedRNNTInfer):
+ """
+ Torchscript Greedy Batched RNNT Infer
+ """
+
def __init__(
self,
encoder_model: str,
@@ -2336,6 +2379,8 @@ def _greedy_decode_masked(
@dataclass
class GreedyRNNTInferConfig:
+ """Greedy RNNT Infer Config"""
+
max_symbols_per_step: Optional[int] = 10
preserve_alignments: bool = False
preserve_frame_confidence: bool = False
@@ -2354,6 +2399,8 @@ def __post_init__(self):
@dataclass
class GreedyBatchedRNNTInferConfig:
+ """Greedy Batched RNNT Infer Config"""
+
max_symbols_per_step: Optional[int] = 10
preserve_alignments: bool = False
preserve_frame_confidence: bool = False
@@ -2708,7 +2755,8 @@ class GreedyBatchedTDTInfer(_GreedyRNNTInfer, WithOptionalCudaGraphs):
- 'lin' for using the linear mapping.
- 'exp' for using exponential mapping with linear shift.
- use_cuda_graph_decoder: if CUDA graphs should be enabled for decoding (currently recommended only for inference)
+ use_cuda_graph_decoder: if CUDA graphs should be enabled for decoding
+ (currently recommended only for inference)
"""
def __init__(
diff --git a/nemo/collections/asr/parts/submodules/tdt_beam_decoding.py b/nemo/collections/asr/parts/submodules/tdt_beam_decoding.py
new file mode 100644
index 000000000000..908fc1c13d19
--- /dev/null
+++ b/nemo/collections/asr/parts/submodules/tdt_beam_decoding.py
@@ -0,0 +1,800 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Copyright 2017 Johns Hopkins University (Shinji Watanabe)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import List, Optional, Tuple
+
+import numpy as np
+import torch
+from tqdm import tqdm
+
+from nemo.collections.asr.modules import rnnt_abstract
+from nemo.collections.asr.parts.submodules.rnnt_beam_decoding import pack_hypotheses
+from nemo.collections.asr.parts.utils.rnnt_utils import Hypothesis, NBestHypotheses, is_prefix
+from nemo.core.classes import Typing, typecheck
+from nemo.core.neural_types import AcousticEncodedRepresentation, HypothesisType, LengthsType, NeuralType
+from nemo.utils import logging
+
+try:
+ import kenlm
+
+ KENLM_AVAILABLE = True
+except (ImportError, ModuleNotFoundError):
+ KENLM_AVAILABLE = False
+
+
+class BeamTDTInfer(Typing):
+ """
+ Beam search implementation for Token-andDuration Transducer (TDT) models.
+
+ Sequence level beam decoding or batched-beam decoding, performed auto-repressively
+ depending on the search type chosen.
+
+ Args:
+ decoder_model: rnnt_utils.AbstractRNNTDecoder implementation.
+ joint_model: rnnt_utils.AbstractRNNTJoint implementation.
+ durations: list of duration values from TDT model.
+
+ beam_size: number of beams for beam search. Must be a positive integer >= 1.
+ If beam size is 1, defaults to stateful greedy search.
+ For accurate greedy results, please use GreedyRNNTInfer or GreedyBatchedRNNTInfer.
+
+ search_type: str representing the type of beam search to perform.
+ Must be one of ['beam', 'maes'].
+
+ Algorithm used:
+
+ `default` - basic beam search strategy. Larger beams generally result in better decoding,
+ however the time required for the search also grows steadily.
+
+ `maes` = modified adaptive expansion search. Please refer to the paper:
+ [Accelerating RNN Transducer Inference via Adaptive Expansion Search]
+ (https://ieeexplore.ieee.org/document/9250505)
+
+ Modified Adaptive Synchronous Decoding (mAES) execution time is adaptive w.r.t the
+ number of expansions (for tokens) required per timestep. The number of expansions can usually
+ be constrained to 1 or 2, and in most cases 2 is sufficient.
+
+ This beam search technique can possibly obtain superior WER while sacrificing some evaluation time.
+
+ score_norm: bool, whether to normalize the scores of the log probabilities.
+
+ return_best_hypothesis: bool, decides whether to return a single hypothesis (the best out of N),
+ or return all N hypothesis (sorted with best score first). The container class changes based
+ this flag -
+ When set to True (default), returns a single Hypothesis.
+ When set to False, returns a NBestHypotheses container, which contains a list of Hypothesis.
+
+ # The following arguments are specific to the chosen `search_type`
+
+ # mAES flags
+ maes_num_steps: Number of adaptive steps to take. From the paper, 2 steps is generally sufficient. int > 1.
+
+ maes_prefix_alpha: Maximum prefix length in prefix search. Must be an integer, and is advised to keep this as 1
+ in order to reduce expensive beam search cost later. int >= 0.
+
+ maes_expansion_beta: Maximum number of prefix expansions allowed, in addition to the beam size.
+ Effectively, the number of hypothesis = beam_size + maes_expansion_beta. Must be an int >= 0,
+ and affects the speed of inference since large values will perform large beam search in the next step.
+
+ maes_expansion_gamma: Float pruning threshold used in the prune-by-value step when computing the expansions.
+ The default (2.3) is selected from the paper. It performs a comparison
+ (max_log_prob - gamma <= log_prob[v]) where v is all vocabulary indices in the Vocab set and max_log_prob
+ is the "most" likely token to be predicted. Gamma therefore provides a margin of additional tokens which
+ can be potential candidates for expansion apart from the "most likely" candidate.
+ Lower values will reduce the number of expansions (by increasing pruning-by-value, thereby improving speed
+ but hurting accuracy). Higher values will increase the number of expansions (by reducing pruning-by-value,
+ thereby reducing speed but potentially improving accuracy). This is a hyper parameter to be experimentally
+ tuned on a validation set.
+
+ softmax_temperature: Scales the logits of the joint prior to computing log_softmax.
+
+ preserve_alignments: Bool flag which preserves the history of alignments generated during
+ beam decoding (sample). When set to true, the Hypothesis will contain
+ the non-null value for `alignments` in it. Here, `alignments` is a List of List of Tensor (of length V + 1)
+
+ The length of the list corresponds to the Acoustic Length (T).
+ Each value in the list (Ti) is a torch.Tensor (U), representing 1 or more targets from a vocabulary.
+ U is the number of target tokens for the current timestep Ti.
+
+ NOTE: `preserve_alignments` is an invalid argument for any `search_type`
+ other than basic beam search.
+
+ ngram_lm_model: str
+ The path to the N-gram LM.
+ ngram_lm_alpha: float
+ Alpha weight of N-gram LM.
+ """
+
+ @property
+ def input_types(self):
+ """Returns definitions of module input ports."""
+ return {
+ "encoder_output": NeuralType(('B', 'D', 'T'), AcousticEncodedRepresentation()),
+ "encoded_lengths": NeuralType(tuple('B'), LengthsType()),
+ "partial_hypotheses": [NeuralType(elements_type=HypothesisType(), optional=True)], # must always be last
+ }
+
+ @property
+ def output_types(self):
+ """Returns definitions of module output ports."""
+ return {"predictions": [NeuralType(elements_type=HypothesisType())]}
+
+ def __init__(
+ self,
+ decoder_model: rnnt_abstract.AbstractRNNTDecoder,
+ joint_model: rnnt_abstract.AbstractRNNTJoint,
+ durations: list,
+ beam_size: int,
+ search_type: str = 'default',
+ score_norm: bool = True,
+ return_best_hypothesis: bool = True,
+ maes_num_steps: int = 2,
+ maes_prefix_alpha: int = 1,
+ maes_expansion_gamma: float = 2.3,
+ maes_expansion_beta: int = 2,
+ softmax_temperature: float = 1.0,
+ preserve_alignments: bool = False,
+ ngram_lm_model: Optional[str] = None,
+ ngram_lm_alpha: float = 0.3,
+ ):
+ self.joint = joint_model
+ self.decoder = decoder_model
+ self.durations = durations
+
+ self.token_offset = 0
+ self.search_type = search_type
+ self.blank = decoder_model.blank_idx
+ self.vocab_size = decoder_model.vocab_size
+ self.return_best_hypothesis = return_best_hypothesis
+
+ self.beam_size = beam_size
+ self.score_norm = score_norm
+ self.max_candidates = beam_size
+ self.softmax_temperature = softmax_temperature
+ self.preserve_alignments = preserve_alignments
+
+ if preserve_alignments:
+ raise ValueError("Alignment preservation has not been implemented.")
+ if beam_size < 1:
+ raise ValueError("Beam search size cannot be less than 1!")
+
+ if self.preserve_alignments:
+ raise NotImplementedError("Preserving alignments is not implemented.")
+
+ if search_type == "default":
+ if self.beam_size == 1:
+ logging.info(
+ """If beam size is 1, defaults to stateful greedy search.
+ For accurate greedy results, please use GreedyTDTInfer or GreedyBatchedTDTInfer."""
+ )
+ self.search_algorithm = self.default_beam_search
+ elif search_type == "tsd":
+ raise NotImplementedError("`tsd` (Time Synchronous Decoding) has not been implemented.")
+ elif search_type == "alsd":
+ raise NotImplementedError("`alsd` (Alignment Length Synchronous Decoding) has not been implemented.")
+ elif search_type == "nsc":
+ raise NotImplementedError("`nsc` (Constrained Beam Search) has not been implemented.")
+ elif search_type == "maes":
+ self.search_algorithm = self.modified_adaptive_expansion_search
+ else:
+ raise NotImplementedError(
+ f"The search type ({search_type}) supplied is not supported!\n" f"Please use one of : (default, maes)"
+ )
+
+ if self.search_type == 'maes':
+ self.maes_num_steps = int(maes_num_steps)
+ self.maes_prefix_alpha = int(maes_prefix_alpha)
+ self.maes_expansion_beta = int(maes_expansion_beta)
+ self.maes_expansion_gamma = float(maes_expansion_gamma)
+
+ self.max_candidates += maes_expansion_beta
+
+ if self.maes_prefix_alpha < 0:
+ raise ValueError("`maes_prefix_alpha` must be a positive integer.")
+
+ if self.vocab_size < beam_size + maes_expansion_beta:
+ raise ValueError(
+ f"beam_size ({beam_size}) + expansion_beta ({maes_expansion_beta}) "
+ f"should be smaller or equal to vocabulary size ({self.vocab_size})."
+ )
+
+ if self.maes_num_steps < 1:
+ raise ValueError("`maes_num_steps` must be greater than 0.")
+
+ try:
+ self.zero_duration_idx = self.durations.index(0)
+ except ValueError:
+ self.zero_duration_idx = None
+ self.min_non_zero_duration_idx = int(
+ np.argmin(np.ma.masked_where(np.array(self.durations) == 0, self.durations))
+ )
+
+ if ngram_lm_model:
+ if search_type != "maes":
+ raise ValueError("For decoding with language model `maes` decoding strategy must be chosen.")
+
+ if KENLM_AVAILABLE:
+ self.ngram_lm = kenlm.Model(ngram_lm_model)
+ self.ngram_lm_alpha = ngram_lm_alpha
+ else:
+ raise ImportError(
+ "KenLM package (https://github.com/kpu/kenlm) is not installed. " "Use ngram_lm_model=None."
+ )
+ else:
+ self.ngram_lm = None
+
+ @typecheck()
+ def __call__(
+ self,
+ encoder_output: torch.Tensor,
+ encoded_lengths: torch.Tensor,
+ partial_hypotheses: tuple[list[Hypothesis | NBestHypotheses],] = None,
+ ) -> tuple[list[Hypothesis | NBestHypotheses],]:
+ """Perform general beam search.
+
+ Args:
+ encoder_output: encoder outputs (batch, features, timesteps).
+ encoded_lengths: lengths of the encoder outputs.
+
+ Returns:
+ Either a list containing a single Hypothesis (when `return_best_hypothesis=True`,
+ otherwise a list containing a single NBestHypotheses, which itself contains a list of
+ Hypothesis. This list is sorted such that the best hypothesis is the first element.
+ """
+ # Preserve decoder and joint training state
+ decoder_training_state = self.decoder.training
+ joint_training_state = self.joint.training
+
+ with torch.inference_mode():
+ # Apply optional preprocessing
+ encoder_output = encoder_output.transpose(1, 2) # (B, T, D)
+
+ self.decoder.eval()
+ self.joint.eval()
+
+ hypotheses = []
+ with tqdm(
+ range(encoder_output.size(0)),
+ desc='Beam search progress:',
+ total=encoder_output.size(0),
+ unit='sample',
+ ) as idx_gen:
+
+ _p = next(self.joint.parameters())
+ dtype = _p.dtype
+
+ # Decode every sample in the batch independently.
+ for batch_idx in idx_gen:
+ inseq = encoder_output[batch_idx : batch_idx + 1, : encoded_lengths[batch_idx], :] # [1, T, D]
+ logitlen = encoded_lengths[batch_idx]
+
+ if inseq.dtype != dtype:
+ inseq = inseq.to(dtype=dtype)
+
+ # Extract partial hypothesis if exists
+ partial_hypothesis = partial_hypotheses[batch_idx] if partial_hypotheses is not None else None
+
+ # Execute the specific search strategy
+ nbest_hyps = self.search_algorithm(
+ inseq, logitlen, partial_hypotheses=partial_hypothesis
+ ) # sorted list of hypothesis
+
+ # Prepare the list of hypotheses
+ nbest_hyps = pack_hypotheses(nbest_hyps)
+
+ # Pack the result
+ if self.return_best_hypothesis:
+ best_hypothesis: Hypothesis = nbest_hyps[0]
+ else:
+ best_hypothesis: NBestHypotheses = NBestHypotheses(nbest_hyps)
+ hypotheses.append(best_hypothesis)
+
+ self.decoder.train(decoder_training_state)
+ self.joint.train(joint_training_state)
+
+ return (hypotheses,)
+
+ def default_beam_search(
+ self,
+ encoder_outputs: torch.Tensor,
+ encoded_lengths: torch.Tensor,
+ partial_hypotheses: Optional[Hypothesis] = None,
+ ) -> List[Hypothesis]:
+ """Default Beam search implementation for TDT models.
+
+ Args:
+ encoder_outputs: encoder outputs (batch, features, timesteps).
+ encoded_lengths: lengths of the encoder outputs.
+ partial_hypotheses: partial hypoteses.
+
+ Returns:
+ nbest_hyps: N-best decoding results
+ """
+ if partial_hypotheses is not None:
+ raise NotImplementedError("Support for `partial_hypotheses` is not implemented.")
+
+ beam = min(self.beam_size, self.vocab_size)
+ beam_k = min(beam, (self.vocab_size - 1))
+ durations_beam_k = min(beam, len(self.durations))
+
+ # Initialize zero vector states.
+ decoder_state = self.decoder.initialize_state(encoder_outputs)
+ # Cache decoder results to avoid duplicate computations.
+ cache = {}
+
+ # Initialize hypothesis array with blank hypothesis.
+ start_hyp = Hypothesis(
+ score=0.0, y_sequence=[self.blank], dec_state=decoder_state, timestep=[-1], length=0, last_frame=0
+ )
+ kept_hyps = [start_hyp]
+
+ for time_idx in range(int(encoded_lengths)):
+ # Retrieve hypotheses for current and future frames
+ hyps = [hyp for hyp in kept_hyps if hyp.last_frame == time_idx] # hypotheses for current frame
+ kept_hyps = [hyp for hyp in kept_hyps if hyp.last_frame > time_idx] # hypothesis for future frames
+
+ # Loop over hypotheses of current frame
+ while len(hyps) > 0:
+ max_hyp = max(hyps, key=lambda x: x.score)
+ hyps.remove(max_hyp)
+
+ # Update decoder state and get probability distribution over vocabulary and durations.
+ encoder_output = encoder_outputs[:, time_idx : time_idx + 1, :] # [1, 1, D]
+ decoder_output, decoder_state, _ = self.decoder.score_hypothesis(max_hyp, cache) # [1, 1, D]
+ logits = (
+ self.joint.joint(encoder_output, decoder_output) / self.softmax_temperature
+ ) # [1, 1, 1, V + NUM_DURATIONS + 1]
+ logp = torch.log_softmax(logits[0, 0, 0, : -len(self.durations)], dim=-1) # [V + 1]
+ durations_logp = torch.log_softmax(logits[0, 0, 0, -len(self.durations) :], dim=-1) # [NUM_DURATIONS]
+
+ # Proccess non-blank tokens
+ # Retrieve the top `beam_k` most probable tokens and the top `duration_beam_k` most probable durations.
+ # Then, select the top `beam_k` pairs of (token, duration) based on the highest combined probabilities.
+ # Note that indices are obtained in the flattened array.
+ logp_topks, logp_topk_idxs = logp[:-1].topk(beam_k, dim=-1) # topk of tokens without blank token
+ durations_logp_topks, durations_logp_topk_idxs = durations_logp.topk(durations_beam_k, dim=-1)
+ total_logp_topks, total_logp_topk_idxs = (
+ torch.cartesian_prod(durations_logp_topks, logp_topks).sum(dim=-1).topk(beam_k, dim=-1)
+ )
+
+ # Loop over pairs of (token, duration) with highest combined log prob
+ for total_logp_topk, total_logp_topk_idx in zip(total_logp_topks, total_logp_topk_idxs):
+ # Restore indices from flattened array indices
+ token_idx = int(logp_topk_idxs[total_logp_topk_idx % beam_k])
+ duration_idx = int(durations_logp_topk_idxs[total_logp_topk_idx // beam_k])
+
+ duration = self.durations[duration_idx]
+ # Construct hypothesis for non-blank token
+ new_hyp = Hypothesis(
+ score=float(max_hyp.score + total_logp_topk), # update score
+ y_sequence=max_hyp.y_sequence + [token_idx], # update hypothesis sequence
+ dec_state=decoder_state, # update decoder state
+ timestep=max_hyp.timestep + [time_idx + duration], # update timesteps
+ length=encoded_lengths,
+ last_frame=max_hyp.last_frame + duration,
+ ) # update frame idx where last token appeared
+
+ # Update current frame hypotheses if duration is zero and future frame hypotheses otherwise
+ if duration == 0:
+ hyps.append(new_hyp)
+ else:
+ kept_hyps.append(new_hyp)
+
+ # Update future frames with blank tokens
+ # Note: blank token can have only non-zero duration
+ for duration_idx in durations_logp_topk_idxs:
+ duration_idx = int(duration_idx)
+ # If zero is the only duration in topk, switch to closest non-zero duration to continue
+ if duration_idx == self.zero_duration_idx:
+ if durations_logp_topk_idxs.shape[0] == 1:
+ duration_idx = self.min_non_zero_duration_idx
+ else:
+ continue
+
+ duration = self.durations[duration_idx]
+ new_hyp = Hypothesis(
+ score=float(max_hyp.score + logp[self.blank] + durations_logp[duration_idx]), # update score
+ y_sequence=max_hyp.y_sequence[:], # no need to update sequence
+ dec_state=max_hyp.dec_state, # no need to update decoder state
+ timestep=max_hyp.timestep[:], # no need to update timesteps
+ length=encoded_lengths,
+ last_frame=max_hyp.last_frame + duration,
+ ) # update frame idx where last token appeared
+ kept_hyps.append(new_hyp)
+
+ # Merge duplicate hypotheses.
+ # If two consecutive blank tokens are predicted and their duration values sum up to the same number,
+ # it will produce two hypotheses with the same token sequence but different scores.
+ kept_hyps = self.merge_duplicate_hypotheses(kept_hyps)
+
+ if len(hyps) > 0:
+ # Keep those hypothesis that have scores greater than next search generation
+ hyps_max = float(max(hyps, key=lambda x: x.score).score)
+ kept_most_prob = sorted(
+ [hyp for hyp in kept_hyps if hyp.score > hyps_max],
+ key=lambda x: x.score,
+ )
+ # If enough hypotheses have scores greater than next search generation,
+ # stop beam search.
+ if len(kept_most_prob) >= beam:
+ kept_hyps = kept_most_prob
+ break
+ else:
+ # If there are no hypotheses in a current frame,
+ # keep only `beam` best hypotheses for the next search generation.
+ kept_hyps = sorted(kept_hyps, key=lambda x: x.score, reverse=True)[:beam]
+ return self.sort_nbest(kept_hyps)
+
+ def modified_adaptive_expansion_search(
+ self,
+ encoder_outputs: torch.Tensor,
+ encoded_lengths: torch.Tensor,
+ partial_hypotheses: Optional[Hypothesis] = None,
+ ) -> List[Hypothesis]:
+ """
+ Modified Adaptive Exoansion Search algorithm for TDT models.
+ Based on/modified from https://ieeexplore.ieee.org/document/9250505.
+ Supports N-gram language model shallow fusion.
+
+ Args:
+ encoder_outputs: encoder outputs (batch, features, timesteps).
+ encoded_lengths: lengths of the encoder outputs.
+ partial_hypotheses: partial hypotheses.
+
+ Returns:
+ nbest_hyps: N-best decoding results
+ """
+ if partial_hypotheses is not None:
+ raise NotImplementedError("Support for `partial_hypotheses` is not implemented.")
+
+ beam = min(self.beam_size, self.vocab_size)
+ beam_state = self.decoder.initialize_state(
+ torch.zeros(1, device=encoder_outputs.device, dtype=encoder_outputs.dtype)
+ ) # [L, B, H], [L, B, H] for LSTMS
+
+ # Initialize first hypothesis for the beam (blank).
+ start_hyp = Hypothesis(
+ y_sequence=[self.blank],
+ score=0.0,
+ dec_state=self.decoder.batch_select_state(beam_state, 0),
+ timestep=[-1],
+ length=0,
+ last_frame=0,
+ )
+ init_tokens = [start_hyp]
+
+ # Cache decoder results to avoid duplicate computations.
+ cache = {}
+
+ # Decode a batch of beam states and scores
+ beam_decoder_output, beam_state = self.decoder.batch_score_hypothesis(init_tokens, cache)
+ state = beam_state[0]
+
+ # Initialize first hypothesis for the beam (blank) for kept hypotheses
+ start_hyp_kept = Hypothesis(
+ y_sequence=[self.blank],
+ score=0.0,
+ dec_state=state,
+ dec_out=[beam_decoder_output[0]],
+ timestep=[-1],
+ length=0,
+ last_frame=0,
+ )
+
+ kept_hyps = [start_hyp_kept]
+
+ # Setup ngram LM:
+ if self.ngram_lm:
+ init_lm_state = kenlm.State()
+ self.ngram_lm.BeginSentenceWrite(init_lm_state)
+ start_hyp_kept.ngram_lm_state = init_lm_state
+
+ for time_idx in range(encoded_lengths):
+ # Select current iteration hypotheses
+ hyps = [x for x in kept_hyps if x.last_frame == time_idx]
+ kept_hyps = [x for x in kept_hyps if x.last_frame > time_idx]
+
+ if len(hyps) == 0:
+ continue
+
+ beam_encoder_output = encoder_outputs[:, time_idx : time_idx + 1] # [1, 1, D]
+ # Perform prefix search to update hypothesis scores.
+ if self.zero_duration_idx is not None:
+ hyps = self.prefix_search(
+ sorted(hyps, key=lambda x: len(x.y_sequence), reverse=True),
+ beam_encoder_output,
+ prefix_alpha=self.maes_prefix_alpha,
+ )
+
+ list_b = [] # List that contains the blank token emissions
+ list_nb = [] # List that contains the non-zero duration non-blank token emissions
+ # Repeat for number of mAES steps
+ for n in range(self.maes_num_steps):
+ # Pack the decoder logits for all current hypotheses
+ beam_decoder_output = torch.stack([h.dec_out[-1] for h in hyps]) # [H, 1, D]
+
+ # Extract the log probabilities
+ beam_logits = self.joint.joint(beam_encoder_output, beam_decoder_output) / self.softmax_temperature
+ beam_logp = torch.log_softmax(beam_logits[:, 0, 0, : -len(self.durations)], dim=-1)
+ beam_duration_logp = torch.log_softmax(beam_logits[:, 0, 0, -len(self.durations) :], dim=-1)
+
+ # Retrieve the top `max_candidades` most probable tokens.
+ # Then, select the top `max_candidates` pairs of (token, duration)
+ # based on the highest combined probabilities.
+ # Note that indices are obtained in flattened array.
+ beam_logp_topks, beam_idx_topks = beam_logp.topk(self.max_candidates, dim=-1)
+ beam_total_logp = (beam_duration_logp[:, :, None] + beam_logp_topks[:, None, :]).view(
+ len(hyps), -1
+ ) # [B, MAX_CANDIDATES*DURATION_BEAM]
+ beam_total_logp_topks, beam_total_logp_topk_idxs = beam_total_logp.topk(
+ self.max_candidates, dim=-1
+ ) # [B, MAX_CANDIDATES]
+
+ # Prune hypothesis to obtain k expansions
+ beam_best_expansion_scores = beam_total_logp_topks.max(dim=-1, keepdim=True).values
+ beam_masks = beam_total_logp_topks >= beam_best_expansion_scores - self.maes_expansion_gamma
+ beam_kexpansions_idxs = [
+ sum_logp_topk_idxs[mask] for sum_logp_topk_idxs, mask in zip(beam_total_logp_topk_idxs, beam_masks)
+ ]
+
+ list_exp = [] # List that contains the hypothesis expansion
+ list_nb_exp = [] # List that contains the hypothesis expansion
+ for hyp_idx, hyp in enumerate(hyps): # For all hypothesis
+ for idx in beam_kexpansions_idxs[hyp_idx]: # For all expansions within this hypothesis
+ # Restore indices in logp and durations_logp arrays from flattened indices.
+ k = int(beam_idx_topks[hyp_idx][idx % self.max_candidates])
+ duration = self.durations[int(idx // self.max_candidates)]
+ total_logp = float(beam_total_logp[hyp_idx][idx])
+
+ # Forcing blank token to have non-zero duration
+ if k == self.blank and duration == 0:
+ duration = self.durations[self.min_non_zero_duration_idx]
+
+ new_hyp = Hypothesis(
+ score=hyp.score + total_logp,
+ y_sequence=hyp.y_sequence[:],
+ dec_out=hyp.dec_out[:],
+ dec_state=hyp.dec_state,
+ timestep=hyp.timestep[:],
+ length=time_idx,
+ last_frame=hyp.last_frame + duration,
+ )
+
+ if self.ngram_lm:
+ new_hyp.ngram_lm_state = hyp.ngram_lm_state
+
+ # If the expansion was for blank
+ if k == self.blank:
+ list_b.append(new_hyp)
+ else:
+ new_hyp.y_sequence.append(k)
+ new_hyp.timestep.append(time_idx + duration)
+
+ if self.ngram_lm:
+ lm_score, new_hyp.ngram_lm_state = self.compute_ngram_score(hyp.ngram_lm_state, int(k))
+ new_hyp.score += self.ngram_lm_alpha * lm_score
+
+ # If token duration is 0 adding to expansions list
+ if duration == 0:
+ list_exp.append(new_hyp)
+ else:
+ list_nb_exp.append(new_hyp)
+
+ # Update states for hypothesis that do not end with blank
+ hyps_to_update = list_nb_exp + list_exp
+ if len(hyps_to_update) > 0:
+ # Decode a batch of beam states and scores
+ beam_decoder_output, beam_state = self.decoder.batch_score_hypothesis(
+ hyps_to_update,
+ cache,
+ )
+ for hyp_idx, hyp in enumerate(hyps_to_update):
+ # Preserve the decoder logits for the current beam
+ hyp.dec_out.append(beam_decoder_output[hyp_idx])
+ hyp.dec_state = beam_state[hyp_idx]
+
+ # If there were no token expansions in any of the hypotheses,
+ # Early exit
+ list_nb += list_nb_exp
+ if not list_exp:
+ kept_hyps = kept_hyps + list_b + list_nb
+ kept_hyps = self.merge_duplicate_hypotheses(kept_hyps)
+ kept_hyps = sorted(kept_hyps, key=lambda x: x.score, reverse=True)[:beam]
+
+ break
+ else:
+ # If this isn't the last mAES step
+ if n < (self.maes_num_steps - 1):
+ # Copy the expanded hypothesis for the next iteration
+ hyps = self.merge_duplicate_hypotheses(list_exp)
+ else:
+ # If this is the last mAES step add probabilities of the blank token to the end.
+ # Extract the log probabilities
+ beam_decoder_output = torch.stack([h.dec_out[-1] for h in list_exp]) # [H, 1, D]
+ beam_logits = (
+ self.joint.joint(beam_encoder_output, beam_decoder_output) / self.softmax_temperature
+ )
+ beam_logp = torch.log_softmax(beam_logits[:, 0, 0, : -len(self.durations)], dim=-1)
+
+ # Get most probable durations
+ beam_duration_logp = torch.log_softmax(beam_logits[:, 0, 0, -len(self.durations) :], dim=-1)
+ _, beam_max_duration_idx = torch.max(beam_duration_logp, dim=-1)
+
+ # For all expansions, add the score for the blank label
+ for hyp_idx, hyp in enumerate(list_exp):
+ # If zero duration was obtained, change to the closest non-zero duration
+ duration_idx = int(beam_max_duration_idx[hyp_idx])
+ if duration_idx == self.zero_duration_idx:
+ duration_idx = self.min_non_zero_duration_idx
+
+ total_logp = float(
+ beam_logp[hyp_idx, self.blank] + beam_duration_logp[hyp_idx, duration_idx]
+ )
+ hyp.score += total_logp
+ hyp.last_frame += self.durations[duration_idx]
+
+ # Finally, update the kept hypothesis of sorted top Beam candidates
+ kept_hyps = kept_hyps + list_b + list_exp + list_nb
+ kept_hyps = self.merge_duplicate_hypotheses(kept_hyps)
+ kept_hyps = sorted(kept_hyps, key=lambda x: x.score, reverse=True)[:beam]
+
+ # Sort the hypothesis with best scores
+ return self.sort_nbest(kept_hyps)
+
+ def merge_duplicate_hypotheses(self, hypotheses):
+ """
+ Merges hypotheses with identical token sequences and lengths.
+ The combined hypothesis's probability is the sum of the probabilities of all duplicates.
+ Duplicate hypotheses occur when two consecutive blank tokens are predicted
+ and their duration values sum up to the same number.
+
+ Args:
+ hypotheses: list of hypotheses.
+
+ Returns:
+ hypotheses: list if hypotheses without duplicates.
+ """
+ sorted_hyps = sorted(hypotheses, key=lambda x: x.score, reverse=True)
+ kept_hyps = {}
+ for hyp in sorted_hyps:
+ hyp_key = (tuple(hyp.y_sequence), int(hyp.last_frame))
+ if hyp_key in kept_hyps:
+ kept_hyp = kept_hyps[hyp_key]
+ kept_hyp.score = float(torch.logaddexp(torch.tensor(kept_hyp.score), torch.tensor(hyp.score)))
+ else:
+ kept_hyps[hyp_key] = hyp
+ return list(kept_hyps.values())
+
+ def set_decoding_type(self, decoding_type: str):
+ """
+ Sets decoding type. Please check train_kenlm.py in scripts/asr_language_modeling/ to find out why we need
+ Args:
+ decoding_type: decoding type
+ """
+ # TOKEN_OFFSET for BPE-based models
+ if decoding_type == 'subword':
+ from nemo.collections.asr.parts.submodules.ctc_beam_decoding import DEFAULT_TOKEN_OFFSET
+
+ self.token_offset = DEFAULT_TOKEN_OFFSET
+
+ def prefix_search(
+ self, hypotheses: List[Hypothesis], encoder_output: torch.Tensor, prefix_alpha: int
+ ) -> List[Hypothesis]:
+ """
+ Performs a prefix search and updates the scores of the hypotheses in place.
+ Based on https://arxiv.org/pdf/1211.3711.pdf.
+
+ Args:
+ hypotheses: a list of hypotheses sorted by the length from the longest to the shortest.
+ encoder_output: encoder output.
+ prefix_alpha: maximum allowable length difference between hypothesis and a prefix.
+
+ Returns:
+ hypotheses: list of hypotheses with updated scores.
+ """
+ # Iterate over hypotheses.
+ for curr_idx, curr_hyp in enumerate(hypotheses[:-1]):
+ # For each hypothesis, iterate over the subsequent hypotheses.
+ # If a hypothesis is a prefix of the current one, update current score.
+ for pref_hyp in hypotheses[(curr_idx + 1) :]:
+ curr_hyp_length = len(curr_hyp.y_sequence)
+ pref_hyp_length = len(pref_hyp.y_sequence)
+
+ if (
+ is_prefix(curr_hyp.y_sequence, pref_hyp.y_sequence)
+ and (curr_hyp_length - pref_hyp_length) <= prefix_alpha
+ ):
+ # Compute the score of the first token
+ # that follows the prefix hypothesis tokens in current hypothesis.
+ # Use the decoder output, which is stored in the prefix hypothesis.
+ logits = self.joint.joint(encoder_output, pref_hyp.dec_out[-1]) / self.softmax_temperature
+ logp = torch.log_softmax(logits[0, 0, 0, : -len(self.durations)], dim=-1)
+ duration_logp = torch.log_softmax(logits[0, 0, 0, -len(self.durations) :], dim=-1)
+ curr_score = pref_hyp.score + float(
+ logp[curr_hyp.y_sequence[pref_hyp_length]] + duration_logp[self.zero_duration_idx]
+ )
+
+ if self.ngram_lm:
+ lm_score, next_state = self.compute_ngram_score(
+ pref_hyp.ngram_lm_state, int(curr_hyp.y_sequence[pref_hyp_length])
+ )
+ curr_score += self.ngram_lm_alpha * lm_score
+
+ for k in range(pref_hyp_length, (curr_hyp_length - 1)):
+ # Compute the score of the next token.
+ # Approximate decoder output with the one that is stored in current hypothesis.
+ logits = self.joint.joint(encoder_output, curr_hyp.dec_out[k]) / self.softmax_temperature
+ logp = torch.log_softmax(logits[0, 0, 0, : -len(self.durations)], dim=-1)
+ duration_logp = torch.log_softmax(logits[0, 0, 0, -len(self.durations) :], dim=-1)
+ curr_score += float(logp[curr_hyp.y_sequence[k + 1]] + duration_logp[self.zero_duration_idx])
+
+ if self.ngram_lm:
+ lm_score, next_state = self.compute_ngram_score(
+ next_state, int(curr_hyp.y_sequence[k + 1])
+ )
+ curr_score += self.ngram_lm_alpha * lm_score
+
+ # Update current hypothesis score
+ curr_hyp.score = np.logaddexp(curr_hyp.score, curr_score)
+ return hypotheses
+
+ def compute_ngram_score(self, current_lm_state: "kenlm.State", label: int) -> Tuple[float, "kenlm.State"]:
+ """
+ Computes the score for KenLM Ngram language model.
+
+ Args:
+ current_lm_state: current state of the KenLM language model.
+ label: next label.
+
+ Returns:
+ lm_score: score for `label`.
+ """
+ if self.token_offset:
+ label = chr(label + self.token_offset)
+ else:
+ label = str(label)
+
+ next_state = kenlm.State()
+ lm_score = self.ngram_lm.BaseScore(current_lm_state, label, next_state)
+ lm_score *= 1.0 / np.log10(np.e)
+
+ return lm_score, next_state
+
+ def sort_nbest(self, hyps: List[Hypothesis]) -> List[Hypothesis]:
+ """Sort hypotheses by score or score given sequence length.
+
+ Args:
+ hyps: list of hypotheses
+
+ Return:
+ hyps: sorted list of hypotheses
+ """
+ if self.score_norm:
+ return sorted(hyps, key=lambda x: x.score / len(x.y_sequence), reverse=True)
+ else:
+ return sorted(hyps, key=lambda x: x.score, reverse=True)
diff --git a/nemo/collections/asr/parts/utils/rnnt_utils.py b/nemo/collections/asr/parts/utils/rnnt_utils.py
index 76e9da6087ed..8d2755fcc0ae 100644
--- a/nemo/collections/asr/parts/utils/rnnt_utils.py
+++ b/nemo/collections/asr/parts/utils/rnnt_utils.py
@@ -85,6 +85,8 @@ class Hypothesis:
tokens: (Optional) A list of decoded tokens (can be characters or word-pieces.
last_token (Optional): A token or batch of tokens which was predicted in the last step.
+
+ last_frame (Optional): Index of the last decoding step hypothesis was updated including blank token prediction.
"""
score: float
@@ -105,6 +107,7 @@ class Hypothesis:
tokens: Optional[Union[List[int], torch.Tensor]] = None
last_token: Optional[torch.Tensor] = None
token_duration: Optional[List[int]] = None
+ last_frame: Optional[int] = None
@property
def non_blank_frame_confidence(self) -> List[float]:
@@ -244,7 +247,8 @@ def __init__(
Args:
batch_size: batch size for hypotheses
- init_length: initial estimate for the length of hypotheses (if the real length is higher, tensors will be reallocated)
+ init_length: initial estimate for the length of hypotheses (if the real length is higher,
+ tensors will be reallocated)
device: device for storing hypotheses
float_dtype: float type for scores
"""
@@ -274,6 +278,9 @@ def __init__(
self._ones_batch = torch.ones_like(self._batch_indices)
def clear_(self):
+ """
+ Clears batched hypotheses state.
+ """
self.current_lengths.fill_(0)
self.transcript.fill_(0)
self.timesteps.fill_(0)
@@ -497,6 +504,9 @@ def __init__(
self._batch_indices = torch.arange(batch_size, device=device)
def clear_(self):
+ """
+ Clears batched hypotheses state.
+ """
self.current_lengths.fill_(0)
self.timesteps.fill_(0)
self.logits.fill_(0.0)
diff --git a/nemo/collections/asr/parts/utils/streaming_utils.py b/nemo/collections/asr/parts/utils/streaming_utils.py
index 415096a0c9d5..cb272e3d0462 100644
--- a/nemo/collections/asr/parts/utils/streaming_utils.py
+++ b/nemo/collections/asr/parts/utils/streaming_utils.py
@@ -22,7 +22,7 @@
from torch.utils.data import DataLoader
from nemo.collections.asr.data.audio_to_text_lhotse_prompted import PromptedAudioToTextMiniBatch
-from nemo.collections.asr.models.ctc_bpe_models import EncDecCTCModelBPE
+from nemo.collections.asr.models import ASRModel
from nemo.collections.asr.parts.mixins.streaming import StreamingEncoder
from nemo.collections.asr.parts.preprocessing.features import normalize_batch
from nemo.collections.asr.parts.preprocessing.segment import get_samples
@@ -79,8 +79,8 @@ def longest_common_subsequence_merge(X, Y, filepath=None):
Assumption is that the two chunks are consecutive chunks, and there exists at least small overlap acoustically.
- It is a sub-word token merge algorithm, operating on the abstract notion of integer ids representing the subword ids.
- It is independent of text or character encoding.
+ It is a sub-word token merge algorithm, operating on the abstract notion of integer ids representing
+ the subword ids. It is independent of text or character encoding.
Since the algorithm is merge based, and depends on consecutive buffers, the very first buffer is processes using
the "middle tokens" algorithm.
@@ -292,8 +292,8 @@ def lcs_alignment_merge_buffer(buffer, data, delay, model, max_steps_per_timeste
Merges the new text from the current frame with the previous text contained in the buffer.
The alignment is based on a Longest Common Subsequence algorithm, with some additional heuristics leveraging
- the notion that the chunk size is >= the context window. In case this assumptio is violated, the results of the merge
- will be incorrect (or at least obtain worse WER overall).
+ the notion that the chunk size is >= the context window. In case this assumptio is violated, the results of the
+ merge will be incorrect (or at least obtain worse WER overall).
"""
# If delay timesteps is 0, that means no future context was used. Simply concatenate the buffer with new data.
if delay < 1:
@@ -327,8 +327,8 @@ def inplace_buffer_merge(buffer, data, timesteps, model):
Merges the new text from the current frame with the previous text contained in the buffer.
The alignment is based on a Longest Common Subsequence algorithm, with some additional heuristics leveraging
- the notion that the chunk size is >= the context window. In case this assumptio is violated, the results of the merge
- will be incorrect (or at least obtain worse WER overall).
+ the notion that the chunk size is >= the context window. In case this assumptio is violated, the results of
+ the merge will be incorrect (or at least obtain worse WER overall).
"""
# If delay timesteps is 0, that means no future context was used. Simply concatenate the buffer with new data.
if timesteps < 1:
@@ -391,7 +391,7 @@ def __init__(self, asr_model, chunk_size, buffer_size):
cfg.preprocessor.dither = 0.0
cfg.preprocessor.pad_to = 0
cfg.preprocessor.normalize = "None"
- self.raw_preprocessor = EncDecCTCModelBPE.from_config_dict(cfg.preprocessor)
+ self.raw_preprocessor = ASRModel.from_config_dict(cfg.preprocessor)
self.raw_preprocessor.to(asr_model.device)
def reset(self):
@@ -756,7 +756,7 @@ def __init__(
cfg.preprocessor.dither = 0.0
cfg.preprocessor.pad_to = 0
cfg.preprocessor.normalize = "None"
- self.raw_preprocessor = EncDecCTCModelBPE.from_config_dict(cfg.preprocessor)
+ self.raw_preprocessor = ASRModel.from_config_dict(cfg.preprocessor)
self.raw_preprocessor.to(asr_model.device)
self.preprocessor = self.raw_preprocessor
@@ -1091,12 +1091,15 @@ def _get_batch_preds(self):
- For all samples, determine if signal has finished.
- If so, skip calculation of mel-specs.
- If not, compute mel spec and length
- - Perform Encoder forward over this sub-batch of samples. Maintain the indices of samples that were processed.
- - If performing stateful decoding, prior to decoder forward, remove the states of samples that were not processed.
+ - Perform Encoder forward over this sub-batch of samples. Maintain the indices of samples that
+ were processed.
+ - If performing stateful decoding, prior to decoder forward, remove the states of samples that
+ were not processed.
- Perform Decoder + Joint forward for samples that were processed.
- For all output RNNT alignment matrix of the joint do:
- If signal has ended previously (this was last buffer of padding), skip alignment
- - Otherwise, recalculate global index of this sample from the sub-batch index, and preserve alignment.
+ - Otherwise, recalculate global index of this sample from the sub-batch index, and preserve
+ alignment.
- Same for preds
- Update indices of sub-batch with global index map.
- Redo steps until all samples were processed (sub-batch size == 0).
@@ -1362,15 +1365,17 @@ def transcribe(
class CacheAwareStreamingAudioBuffer:
"""
- A buffer to be used for cache-aware streaming. It can load a single or multiple audio files/processed signals, split them in chunks and return one on one.
- It can be used to simulate streaming audio or audios.
+ A buffer to be used for cache-aware streaming. It can load a single or multiple audio
+ files/processed signals, split them in chunks and return one on one. It can be used to
+ simulate streaming audio or audios.
"""
def __init__(self, model, online_normalization=None, pad_and_drop_preencoded=False):
'''
Args:
model: An ASR model.
- online_normalization (bool): whether to perform online normalization per chunk or normalize the whole audio before chunking
+ online_normalization (bool): whether to perform online normalization per chunk or
+ normalize the whole audio before chunking
pad_and_drop_preencoded (bool): if true pad first audio chunk and always drop preencoded
'''
self.model = model
@@ -1430,7 +1435,8 @@ def __iter__(self):
audio_chunk = self.buffer[:, :, self.buffer_idx : self.buffer_idx + chunk_size]
if self.sampling_frames is not None:
- # checking to make sure the audio chunk has enough frames to produce at least one output after downsampling
+ # checking to make sure the audio chunk has enough frames to produce at least one output after
+ # downsampling
if self.buffer_idx == 0 and isinstance(self.sampling_frames, list):
cur_sampling_frames = self.sampling_frames[0]
else:
diff --git a/nemo/collections/asr/parts/utils/transcribe_utils.py b/nemo/collections/asr/parts/utils/transcribe_utils.py
index 0d4f4c895bcf..189d98537d3f 100644
--- a/nemo/collections/asr/parts/utils/transcribe_utils.py
+++ b/nemo/collections/asr/parts/utils/transcribe_utils.py
@@ -199,7 +199,8 @@ def get_buffered_pred_feat_multitaskAED(
if filepaths:
logging.info(
- "Deteced audio files as input, default to English ASR with Punctuation and Capitalization output. Please use manifest input for other options."
+ "Deteced audio files as input, default to English ASR with Punctuation and Capitalization output. \
+ Please use manifest input for other options."
)
for audio_file in tqdm(filepaths, desc="Transcribing:", total=len(filepaths), ncols=80):
meta = {
@@ -281,12 +282,16 @@ def prepare_audio_data(cfg: DictConfig) -> Tuple[List[str], bool]:
- append_pred (bool): Flag indicating whether to append predictions to an existing dataset.
- audio_type (str): Type of audio files to consider.
- dataset_manifest (str): Path to the dataset manifest file.
- - audio_key (str, optional): Key in the manifest file specifying the audio file path. Defaults to 'audio_filepath'.
- - presort_manifest (bool, optional): Flag indicating whether to presort the manifest file. Defaults to True.
+ - audio_key (str, optional): Key in the manifest file specifying the audio file path.
+ Defaults to 'audio_filepath'.
+ - presort_manifest (bool, optional): Flag indicating whether to presort the manifest file.
+ Defaults to True.
Returns:
Tuple[List[str], bool]: A tuple containing the following:
- - filepaths (List[str]): List of filepaths to the audio files if path to the directory containing audio files is provided.
- - sorted_manifest_path (bool): Path to the sorted manifest file if path to the dataset manifest file is provided.
+ - filepaths (List[str]): List of filepaths to the audio files if path to the directory
+ containing audio files is provided.
+ - sorted_manifest_path (bool): Path to the sorted manifest file if path to the dataset
+ manifest file is provided.
"""
filepaths = None
@@ -308,7 +313,8 @@ def prepare_audio_data(cfg: DictConfig) -> Tuple[List[str], bool]:
item[audio_key] = get_full_path(item[audio_key], cfg.dataset_manifest)
if item.get("duration") is None and cfg.presort_manifest:
raise ValueError(
- f"Requested presort_manifest=True, but line {line} in manifest {cfg.dataset_manifest} lacks a 'duration' field."
+ f"Requested presort_manifest=True, but line {line} in manifest {cfg.dataset_manifest} \
+ lacks a 'duration' field."
)
with NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:
@@ -388,7 +394,7 @@ def write_transcription(
model_name: str,
filepaths: List[str] = None,
compute_langs: bool = False,
- compute_timestamps: bool = False,
+ timestamps: bool = False,
) -> Tuple[str, str]:
"""Write generated transcription to output file."""
if cfg.append_pred:
@@ -433,7 +439,7 @@ def write_transcription(
else: # transcription is Hypothesis
item = {'audio_filepath': filepaths[idx], pred_text_attr_name: transcription.text}
- if compute_timestamps:
+ if timestamps:
timestamps = transcription.timestep
if timestamps is not None and isinstance(timestamps, dict):
timestamps.pop(
@@ -441,7 +447,7 @@ def write_transcription(
) # Pytorch tensor calculating index of each token, not needed.
for key in timestamps.keys():
values = normalize_timestamp_output(timestamps[key])
- item[f'timestamps_{key}'] = values
+ item[f'{key}'] = values
if compute_langs:
item['pred_lang'] = transcription.langs
@@ -458,7 +464,7 @@ def write_transcription(
else: # transcription is Hypothesis
item[pred_text_attr_name] = best_hyps[idx].text
- if compute_timestamps:
+ if timestamps:
timestamps = best_hyps[idx].timestep
if timestamps is not None and isinstance(timestamps, dict):
timestamps.pop(
@@ -466,7 +472,7 @@ def write_transcription(
) # Pytorch tensor calculating index of each token, not needed.
for key in timestamps.keys():
values = normalize_timestamp_output(timestamps[key])
- item[f'timestamps_{key}'] = values
+ item[f'{key}'] = values
if compute_langs:
item['pred_lang'] = best_hyps[idx].langs
@@ -492,10 +498,14 @@ def compute_metrics_per_sample(
Args:
manifest_path: str, Required - path to dataset JSON manifest file (in NeMo format)
- reference_field: str, Optional - name of field in .json manifest with the reference text ("text" by default).
- hypothesis_field: str, Optional - name of field in .json manifest with the hypothesis text ("pred_text" by default).
- metrics: list[str], Optional - list of metrics to be computed (currently supported "wer", "cer", "punct_er")
- punctuation_marks: list[str], Optional - list of punctuation marks for computing punctuation error rate ([".", ",", "?"] by default).
+ reference_field: str, Optional - name of field in .json manifest with the reference text
+ ("text" by default).
+ hypothesis_field: str, Optional - name of field in .json manifest with the hypothesis text
+ ("pred_text" by default).
+ metrics: list[str], Optional - list of metrics to be computed
+ (currently supported "wer", "cer", "punct_er")
+ punctuation_marks: list[str], Optional - list of punctuation marks for computing
+ punctuation error rate ([".", ",", "?"] by default).
output_manifest_path: str, Optional - path where .json manifest with calculated metrics will be saved.
Returns:
@@ -568,6 +578,61 @@ def compute_metrics_per_sample(
return samples_with_metrics
+def process_timestamp_outputs(outputs, subsampling_factor: int = 1, window_stride: float = 0.01):
+ """
+ Process the timestamps from list of hypothesis to user friendly format.
+ Converts the start and end duration from frames to seconds.
+ Args:
+ outputs: List of Hypothesis objects.
+ subsampling_factor: int, Subsampling factor used in the model.
+ window_stride: float, Window stride used in the model. (sometimes referred to as hop length/shift)
+ Returns:
+ List of Hypothesis objects with processed timestamps
+
+ """
+
+ if outputs is None:
+ return outputs
+
+ if isinstance(outputs, rnnt_utils.Hypothesis):
+ outputs = [outputs]
+
+ if not isinstance(outputs[0], rnnt_utils.Hypothesis):
+ raise ValueError(f"Expected Hypothesis object, got {type(outputs[0])}")
+
+ def process_timestamp(timestamp, subsampling_factor, window_stride):
+ """
+ Process the timestamp for a single hypothesis.
+ return the start and end duration in seconds.
+ """
+ for idx, val in enumerate(timestamp):
+ start_offset = val['start_offset']
+ end_offset = val['end_offset']
+ start = start_offset * window_stride * subsampling_factor
+ end = end_offset * window_stride * subsampling_factor
+ val['start'] = start
+ val['end'] = end
+
+ return timestamp
+
+ for idx, hyp in enumerate(outputs):
+ if not hasattr(hyp, 'timestep'):
+ raise ValueError(
+ f"Expected Hypothesis object to have 'timestep' attribute, when compute_timestamps is \
+ enabled but got {hyp}"
+ )
+ timestep = hyp.timestep
+ if 'word' in timestep:
+ outputs[idx].timestep['word'] = process_timestamp(timestep['word'], subsampling_factor, window_stride)
+ if 'char' in timestep:
+ outputs[idx].timestep['char'] = process_timestamp(timestep['char'], subsampling_factor, window_stride)
+ if 'segment' in timestep:
+ outputs[idx].timestep['segment'] = process_timestamp(
+ timestep['segment'], subsampling_factor, window_stride
+ )
+ return outputs
+
+
class PunctuationCapitalization:
def __init__(self, punctuation_marks: str):
"""
diff --git a/nemo/collections/asr/parts/utils/vad_utils.py b/nemo/collections/asr/parts/utils/vad_utils.py
index 29b4f7b33898..aea04b8cafcf 100644
--- a/nemo/collections/asr/parts/utils/vad_utils.py
+++ b/nemo/collections/asr/parts/utils/vad_utils.py
@@ -23,7 +23,6 @@
from pathlib import Path
from typing import Dict, List, Optional, Tuple, Union
-import IPython.display as ipd
import librosa
import matplotlib.pyplot as plt
import numpy as np
@@ -40,6 +39,15 @@
from nemo.collections.common.parts.preprocessing.manifest import get_full_path
from nemo.utils import logging
+HAVE_IPYTHON = False
+try:
+ import IPython.display as ipd
+
+ HAVE_IPYTHON = True
+except:
+ HAVE_IPYTHON = False
+
+
"""
This file contains all the utility functions required for voice activity detection.
"""
@@ -66,7 +74,8 @@ def prepare_manifest(config: dict) -> str:
input_list = config['input']
else:
raise ValueError(
- "The input for manifest preparation would either be a string of the filepath to manifest or a list of {'audio_filepath': i, 'offset': 0, 'duration': null} "
+ "The input for manifest preparation would either be a string of the filepath to \
+ manifest or a list of {'audio_filepath': i, 'offset': 0, 'duration': null} "
)
args_func = {
@@ -195,7 +204,8 @@ def write_vad_infer_manifest(file: dict, args_func: dict) -> list:
def get_vad_stream_status(data: list) -> list:
"""
- Generate a list of status for each snippet in manifest. A snippet should be in single, start, next or end status.
+ Generate a list of status for each snippet in manifest.
+ A snippet should be in single, start, next or end status.
Used for concatenating to full audio file.
Args:
data (list): list of filepath of audio snippet
@@ -246,7 +256,8 @@ def generate_overlap_vad_seq(
out_dir: str = None,
) -> str:
"""
- Generate predictions with overlapping input windows/segments. Then a smoothing filter is applied to decide the label for a frame spanned by multiple windows.
+ Generate predictions with overlapping input windows/segments.
+ Then a smoothing filter is applied to decide the label for a frame spanned by multiple windows.
Two common smoothing filters are supported: majority vote (median) and average (mean).
This function uses multiprocessing to speed up.
Args:
@@ -310,7 +321,8 @@ def generate_overlap_vad_seq_per_tensor(
frame: torch.Tensor, per_args: Dict[str, float], smoothing_method: str
) -> torch.Tensor:
"""
- Use generated frame prediction (generated by shifting window of shift_length_in_sec (10ms)) to generate prediction with overlapping input window/segments
+ Use generated frame prediction (generated by shifting window of shift_length_in_sec (10ms))
+ to generate prediction with overlapping input window/segments
See description in generate_overlap_vad_seq.
Use this for single instance pipeline.
"""
@@ -472,7 +484,8 @@ def binarization(sequence: torch.Tensor, per_args: Dict[str, float]) -> torch.Te
Binarize predictions to speech and non-speech
Reference
- Paper: Gregory Gelly and Jean-Luc Gauvain. "Minimum Word Error Training of RNN-based Voice Activity Detection", InterSpeech 2015.
+ Paper: Gregory Gelly and Jean-Luc Gauvain. "Minimum Word Error Training of RNN-based Voice Activity Detection", \
+ InterSpeech 2015.
Implementation: https://github.com/pyannote/pyannote-audio/blob/master/pyannote/audio/utils/signal.py
Args:
@@ -485,7 +498,8 @@ def binarization(sequence: torch.Tensor, per_args: Dict[str, float]) -> torch.Te
frame_length_in_sec (float): length of frame.
Returns:
- speech_segments(torch.Tensor): A tensor of speech segment in torch.Tensor([[start1, end1], [start2, end2]]) format.
+ speech_segments(torch.Tensor): A tensor of speech segment in torch.Tensor([[start1, end1], [start2, end2]]) \
+ format.
"""
frame_length_in_sec = per_args.get('frame_length_in_sec', 0.01)
@@ -536,7 +550,8 @@ def remove_segments(original_segments: torch.Tensor, to_be_removed_segments: tor
"""
Remove speech segments list in to_be_removed_segments from original_segments.
For example,
- remove torch.Tensor([[start2, end2],[start4, end4]]) from torch.Tensor([[start1, end1],[start2, end2],[start3, end3], [start4, end4]]),
+ remove torch.Tensor([[start2, end2],[start4, end4]]) from torch.Tensor([[start1, end1],[start2, end2],\
+ [start3, end3], [start4, end4]]),
->
torch.Tensor([[start1, end1],[start3, end3]])
"""
@@ -562,17 +577,21 @@ def filtering(speech_segments: torch.Tensor, per_args: Dict[str, float]) -> torc
Filter out short non_speech and speech segments.
Reference
- Paper: Gregory Gelly and Jean-Luc Gauvain. "Minimum Word Error Training of RNN-based Voice Activity Detection", InterSpeech 2015.
+ Paper: Gregory Gelly and Jean-Luc Gauvain. "Minimum Word Error Training of RNN-based Voice Activity Detection", \
+ InterSpeech 2015.
Implementation: https://github.com/pyannote/pyannote-audio/blob/master/pyannote/audio/utils/signal.py
Args:
- speech_segments (torch.Tensor): A tensor of speech segment in torch.Tensor([[start1, end1], [start2, end2]]) format.
+ speech_segments (torch.Tensor): A tensor of speech segment in torch.Tensor([[start1, end1], \
+ [start2, end2]]) format.
per_args:
min_duration_on (float): threshold for small non_speech deletion
min_duration_off (float): threshold for short speech segment deletion
- filter_speech_first (float): Whether to perform short speech segment deletion first. Use 1.0 to represent True.
+ filter_speech_first (float): Whether to perform short speech segment deletion first. \
+ Use 1.0 to represent True.
Returns:
- speech_segments(torch.Tensor): A tensor of filtered speech segment in torch.Tensor([[start1, end1], [start2, end2]]) format.
+ speech_segments(torch.Tensor): A tensor of filtered speech segment in \
+ torch.Tensor([[start1, end1], [start2, end2]]) format.
"""
if speech_segments.shape == torch.Size([0]):
return speech_segments
@@ -709,7 +728,8 @@ def generate_vad_segment_table(
17,18, speech
Args:
vad_pred_dir (str): directory of prediction files to be processed.
- postprocessing_params (dict): dictionary of thresholds for prediction score. See details in binarization and filtering.
+ postprocessing_params (dict): dictionary of thresholds for prediction score.
+ See details in binarization and filtering.
frame_length_in_sec (float): frame length.
out_dir (str): output dir of generated table/csv file.
num_workers(float): number of process for multiprocessing
@@ -820,10 +840,12 @@ def vad_tune_threshold_on_dev(
num_workers: int = 20,
) -> Tuple[dict, dict]:
"""
- Tune thresholds on dev set. Return best thresholds which gives the lowest detection error rate (DetER) in thresholds.
+ Tune thresholds on dev set. Return best thresholds which gives the lowest detection error rate
+ (DetER) in thresholds.
Args:
params (dict): dictionary of parameters to be tuned on.
- vad_pred_method (str): suffix of prediction file. Use to locate file. Should be either in "frame", "mean" or "median".
+ vad_pred_method (str): suffix of prediction file. Use to locate file.
+ Should be either in "frame", "mean" or "median".
groundtruth_RTTM_dir (str): directory of ground-truth rttm files or a file contains the paths of them.
focus_metric (str): metrics we care most when tuning threshold. Should be either in "DetER", "FA", "MISS"
frame_length_in_sec (float): frame length.
@@ -914,7 +936,8 @@ def check_if_param_valid(params: dict) -> bool:
for j in params[i]:
if not j >= 0:
raise ValueError(
- "Invalid inputs! All float parameters except pad_onset and pad_offset should be larger than 0!"
+ "Invalid inputs! All float parameters except pad_onset and pad_offset should be \
+ larger than 0!"
)
if not (all(i <= 1 for i in params['onset']) and all(i <= 1 for i in params['offset'])):
@@ -972,7 +995,7 @@ def plot(
unit_frame_len: float = 0.01,
label_repeat: int = 1,
xticks_step: int = 5,
-) -> ipd.Audio:
+) -> "ipd.Audio":
"""
Plot Audio and/or VAD output and/or groundtruth labels for visualization
Args:
@@ -986,9 +1009,13 @@ def plot(
threshold (float): threshold for prediction score (from 0 to 1).
per_args(dict): a dict that stores the thresholds for postprocessing.
unit_frame_len (float): unit frame length in seconds for VAD predictions.
- label_repeat (int): repeat the label for this number of times to match different frame lengths in preds and labels.
+ label_repeat (int): repeat the label for this number of times to match different \
+ frame lengths in preds and labels.
xticks_step (int): step size for xticks.
"""
+ if HAVE_IPYTHON is False:
+ raise ImportError("IPython is not installed. Please install IPython to use this function.")
+
plt.figure(figsize=[20, 2])
audio, sample_rate = librosa.load(
@@ -1254,7 +1281,8 @@ def stitch_segmented_asr_output(
fout.flush()
logging.info(
- f"Finish stitch segmented ASR output to {stitched_output_manifest}, the speech segments info has been stored in directory {speech_segments_tensor_dir}"
+ f"Finish stitch segmented ASR output to {stitched_output_manifest}, \
+ the speech segments info has been stored in directory {speech_segments_tensor_dir}"
)
return stitched_output_manifest
@@ -1434,10 +1462,13 @@ def plot_sample_from_rttm(
show: bool = True,
offset: float = 0.0,
unit_frame_len: float = 0.01,
-):
+) -> "ipd.Audio":
"""
Plot audio signal and frame-level labels from RTTM file
"""
+ if HAVE_IPYTHON is False:
+ raise ImportError("IPython is not installed. Please install IPython to use this function.")
+
plt.figure(figsize=[20, 2])
audio, sample_rate = librosa.load(path=audio_file, sr=16000, mono=True, offset=offset, duration=max_duration)
@@ -1472,8 +1503,9 @@ def plot_sample_from_rttm(
def align_labels_to_frames(probs, labels, threshold=0.2):
"""
Aligns labels to frames when the frame length (e.g., 10ms) is different from the label length (e.g., 20ms).
- The threshold 0.2 is not important, since the actual ratio will always be close to an integer unless using frame/label
- lengths that are not multiples of each other (e.g., 15ms frame length and 20ms label length), which is not valid.
+ The threshold 0.2 is not important, since the actual ratio will always be close to an integer
+ unless using frame/label. lengths that are not multiples of each other
+ (e.g., 15ms frame length and 20ms label length), which is not valid.
The value 0.2 here is just for easier unit testing.
Args:
probs (List[float]): list of probabilities
@@ -1511,11 +1543,13 @@ def align_labels_to_frames(probs, labels, threshold=0.2):
ratio = frames_len / labels_len
res = frames_len % labels_len
if ceil(ratio) - ratio < threshold:
- # e.g., ratio is 1.83, ceil(ratio) = 2, then we repeat labels to make it a multiple of 2, and discard the redundant labels
+ # e.g., ratio is 1.83, ceil(ratio) = 2, then we repeat labels to make it a
+ # multiple of 2, and discard the redundant labels
labels = labels.repeat_interleave(ceil(ratio), dim=0).long().tolist()
labels = labels[:frames_len]
else:
- # e.g., ratio is 2.02, floor(ratio) = 2, then we repeat labels to make it a multiple of 2 and add additional labels
+ # e.g., ratio is 2.02, floor(ratio) = 2, then we repeat labels to make it a multiple of
+ # 2 and add additional labels
labels = labels.repeat_interleave(floor(ratio), dim=0).long().tolist()
if res > 0:
labels += labels[-res:]
diff --git a/nemo/collections/asr/parts/utils/wfst_utils.py b/nemo/collections/asr/parts/utils/wfst_utils.py
index 31f394fb60ac..9dbb9fc751b2 100644
--- a/nemo/collections/asr/parts/utils/wfst_utils.py
+++ b/nemo/collections/asr/parts/utils/wfst_utils.py
@@ -32,7 +32,7 @@
import kaldifst
# check that kaldifst package is not empty
- # Note: pytorch_lightning.utilities.imports.package_available may not help here
+ # Note: lightning.pytorch.utilities.imports.package_available may not help here
kaldifst.StdVectorFst()
_KALDIFST_AVAILABLE = True
except (ImportError, ModuleNotFoundError, AttributeError):
diff --git a/nemo/collections/audio/models/audio_to_audio.py b/nemo/collections/audio/models/audio_to_audio.py
index e1732c1658b7..60c16f756f58 100644
--- a/nemo/collections/audio/models/audio_to_audio.py
+++ b/nemo/collections/audio/models/audio_to_audio.py
@@ -22,8 +22,8 @@
import librosa
import soundfile as sf
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from tqdm import tqdm
from nemo.collections.asr.data.audio_to_text_dataset import inject_dataloader_value_from_model_config
diff --git a/nemo/collections/audio/models/enhancement.py b/nemo/collections/audio/models/enhancement.py
index cd9f47b98096..8e2206afcef1 100644
--- a/nemo/collections/audio/models/enhancement.py
+++ b/nemo/collections/audio/models/enhancement.py
@@ -17,8 +17,8 @@
import einops
import hydra
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.audio.models.audio_to_audio import AudioToAudioModel
from nemo.core.classes.common import PretrainedModelInfo, typecheck
diff --git a/nemo/collections/audio/parts/utils/callbacks.py b/nemo/collections/audio/parts/utils/callbacks.py
index 093d5a11f419..ff975c93ecc7 100644
--- a/nemo/collections/audio/parts/utils/callbacks.py
+++ b/nemo/collections/audio/parts/utils/callbacks.py
@@ -16,10 +16,10 @@
import einops
import torch
-from pytorch_lightning import Callback, LightningModule, Trainer
-from pytorch_lightning.loggers import TensorBoardLogger
-from pytorch_lightning.loggers.logger import Logger
-from pytorch_lightning.loggers.wandb import WandbLogger
+from lightning.pytorch import Callback, LightningModule, Trainer
+from lightning.pytorch.loggers import TensorBoardLogger
+from lightning.pytorch.loggers.logger import Logger
+from lightning.pytorch.loggers.wandb import WandbLogger
from nemo.utils import logging
from nemo.utils.decorators import experimental
diff --git a/nemo/collections/common/callbacks/callbacks.py b/nemo/collections/common/callbacks/callbacks.py
index 1a6c011c38df..754b33726faf 100644
--- a/nemo/collections/common/callbacks/callbacks.py
+++ b/nemo/collections/common/callbacks/callbacks.py
@@ -13,15 +13,14 @@
# limitations under the License.
import time
-from pytorch_lightning.callbacks import Callback
-from pytorch_lightning.utilities import rank_zero_only
+from lightning.pytorch.callbacks import Callback
+from lightning.pytorch.utilities import rank_zero_only
# from sacrebleu import corpus_bleu
class LogEpochTimeCallback(Callback):
- """Simple callback that logs how long each epoch takes, in seconds, to a pytorch lightning log
- """
+ """Simple callback that logs how long each epoch takes, in seconds, to a pytorch lightning log"""
@rank_zero_only
def on_train_epoch_start(self, trainer, pl_module):
diff --git a/nemo/collections/common/callbacks/ema.py b/nemo/collections/common/callbacks/ema.py
index 2f295bf67354..f866a2639d63 100644
--- a/nemo/collections/common/callbacks/ema.py
+++ b/nemo/collections/common/callbacks/ema.py
@@ -17,11 +17,11 @@
import threading
from typing import Any, Dict, Iterable
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
-from pytorch_lightning import Callback
-from pytorch_lightning.utilities.exceptions import MisconfigurationException
-from pytorch_lightning.utilities.rank_zero import rank_zero_info
+from lightning.pytorch import Callback
+from lightning.pytorch.utilities.exceptions import MisconfigurationException
+from lightning.pytorch.utilities.rank_zero import rank_zero_info
class EMA(Callback):
@@ -40,7 +40,11 @@ class EMA(Callback):
"""
def __init__(
- self, decay: float, validate_original_weights: bool = False, every_n_steps: int = 1, cpu_offload: bool = False,
+ self,
+ decay: float,
+ validate_original_weights: bool = False,
+ every_n_steps: int = 1,
+ cpu_offload: bool = False,
):
if not (0 <= decay <= 1):
raise MisconfigurationException("EMA decay value must be between 0 and 1")
@@ -149,7 +153,9 @@ def on_load_checkpoint(
def ema_update(ema_model_tuple, current_model_tuple, decay):
torch._foreach_mul_(ema_model_tuple, decay)
torch._foreach_add_(
- ema_model_tuple, current_model_tuple, alpha=(1.0 - decay),
+ ema_model_tuple,
+ current_model_tuple,
+ alpha=(1.0 - decay),
)
@@ -272,7 +278,13 @@ def update(self):
if self.device.type == 'cpu':
self.thread = threading.Thread(
- target=run_ema_update_cpu, args=(self.ema_params, current_model_state, self.decay, self.stream,),
+ target=run_ema_update_cpu,
+ args=(
+ self.ema_params,
+ current_model_state,
+ self.decay,
+ self.stream,
+ ),
)
self.thread.start()
diff --git a/nemo/collections/common/data/lhotse/nemo_adapters.py b/nemo/collections/common/data/lhotse/nemo_adapters.py
index ee623f617e26..a34a2c074a11 100644
--- a/nemo/collections/common/data/lhotse/nemo_adapters.py
+++ b/nemo/collections/common/data/lhotse/nemo_adapters.py
@@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
import logging
import random
import re
@@ -398,40 +397,43 @@ def basename(d: dict) -> str:
shard_manifest: dict[str, list[dict]] = groupby(basename, self.shard_id_to_manifest[sid])
tar_path = self.shard_id_to_tar_path[sid]
- for data, raw_audio, tar_info in iter_fn(tar_path, shard_manifest, manifest_path):
- meta = soundfile.info(BytesIO(raw_audio))
- recording = Recording(
- id=tar_info.path,
- sources=[AudioSource(type="memory", channels=list(range(meta.channels)), source=raw_audio)],
- sampling_rate=int(meta.samplerate),
- num_samples=meta.frames,
- duration=meta.duration,
- )
- cuts_for_recording = []
- for data in sorted(shard_manifest[tar_info.name], key=lambda d: d["audio_filepath"]):
- # Cut the recording into corresponding segment and discard audio data outside the segment.
- cut = make_cut_with_subset_inmemory_recording(
- recording, offset=data.get("offset", 0.0), duration=data.get("duration")
+ try:
+ for data, raw_audio, tar_info in iter_fn(tar_path, shard_manifest, manifest_path):
+ meta = soundfile.info(BytesIO(raw_audio))
+ recording = Recording(
+ id=tar_info.path,
+ sources=[AudioSource(type="memory", channels=list(range(meta.channels)), source=raw_audio)],
+ sampling_rate=int(meta.samplerate),
+ num_samples=meta.frames,
+ duration=meta.duration,
)
- cut.supervisions.append(
- SupervisionSegment(
- id=cut.id,
- recording_id=cut.recording_id,
- start=0,
- duration=cut.duration,
- text=data.get(self.text_field),
- language=data.get(self.lang_field),
+ cuts_for_recording = []
+ for data in sorted(shard_manifest[tar_info.name], key=lambda d: d["audio_filepath"]):
+ # Cut the recording into corresponding segment and discard audio data outside the segment.
+ cut = make_cut_with_subset_inmemory_recording(
+ recording, offset=data.get("offset", 0.0), duration=data.get("duration")
)
- )
- cut.custom = _to_custom_attr_dict(data)
- cut.manifest_origin = manifest_path
- cut.tar_origin = tar_path
- for extra_field in extra_fields:
- extra_field.attach_to(cut)
- cuts_for_recording.append(cut)
- del recording # free the memory - helps with very large audio files
- del raw_audio
- yield from cuts_for_recording
+ cut.supervisions.append(
+ SupervisionSegment(
+ id=cut.id,
+ recording_id=cut.recording_id,
+ start=0,
+ duration=cut.duration,
+ text=data.get(self.text_field),
+ language=data.get(self.lang_field),
+ )
+ )
+ cut.custom = _to_custom_attr_dict(data)
+ cut.manifest_origin = manifest_path
+ cut.tar_origin = tar_path
+ for extra_field in extra_fields:
+ extra_field.attach_to(cut)
+ cuts_for_recording.append(cut)
+ del recording # free the memory - helps with very large audio files
+ del raw_audio
+ yield from cuts_for_recording
+ except tarfile.ReadError:
+ logging.warning(f"Skipping tar file due to read errors (unstable storage or bad file?): {tar_path=}")
def __len__(self) -> int:
return len(self.source)
diff --git a/nemo/collections/common/metrics/perf_metrics.py b/nemo/collections/common/metrics/perf_metrics.py
index d668d29c42ff..daad92ce95ea 100644
--- a/nemo/collections/common/metrics/perf_metrics.py
+++ b/nemo/collections/common/metrics/perf_metrics.py
@@ -15,7 +15,7 @@
from typing import Any, Dict, List, Optional
import numpy as np
-from pytorch_lightning.callbacks import Callback
+from lightning.pytorch.callbacks import Callback
from nemo.collections.common.parts.perf_metrics_utils import LLM_VOCAB_SIZE_MAP, read_tb_log
from nemo.utils import logging
diff --git a/nemo/collections/common/parts/preprocessing/collections.py b/nemo/collections/common/parts/preprocessing/collections.py
index b16ac50e4d56..915f406a3e88 100644
--- a/nemo/collections/common/parts/preprocessing/collections.py
+++ b/nemo/collections/common/parts/preprocessing/collections.py
@@ -308,6 +308,132 @@ def __init__(
super().__init__(data)
+class InstructionTuningAudioText(_Collection):
+ """`AudioText` collector from asr structured json files."""
+
+ OUTPUT_TYPE = collections.namedtuple(
+ typename='InstructionTuningText',
+ field_names='id context context_type context_duration question question_type answer answer_type answer_duration speaker',
+ )
+
+ def __init__(
+ self,
+ manifests_files: Union[str, List[str]],
+ min_duration: Optional[float] = None,
+ max_duration: Optional[float] = None,
+ max_seq_length: Optional[float] = None,
+ max_number: Optional[int] = None,
+ do_sort_by_duration: bool = False,
+ index_by_file_id: bool = False,
+ decoder_only_model: bool = False,
+ use_phoneme_tokenizer: bool = False,
+ ):
+ """Parse lists of audio files, durations and transcripts texts.
+ Args:
+ manifests_files: Either single string file or list of such -
+ manifests to yield items from.
+ *args: Args to pass to `AudioText` constructor.
+ **kwargs: Kwargs to pass to `AudioText` constructor.
+ """
+
+ output_type = self.OUTPUT_TYPE
+ self.use_phoneme_tokenizer = use_phoneme_tokenizer
+ data, duration_filtered, num_filtered, total_duration = [], 0.0, 0, 0.0
+ if index_by_file_id:
+ self.mapping = {}
+
+ for item in manifest.item_iter(manifests_files):
+
+ id = item['id']
+ context = item['context']
+ context_duration = item['context_duration']
+ context_type = item['context_type']
+ question = item['question']
+ question_type = item['question_type']
+ speaker = item['speaker']
+ answer = item['answer']
+ answer_duration = item['answer_duration']
+ answer_type = item['answer_type']
+ task = item['task']
+
+ task = 'tts' if task is None else task
+ duration = answer_duration if task == 'tts' else context_duration
+ if min_duration is not None and duration < min_duration:
+ duration_filtered += duration
+ num_filtered += 1
+ continue
+
+ if max_duration is not None and duration > max_duration:
+ duration_filtered += duration
+ num_filtered += 1
+ continue
+
+ # Check segment length
+ approx_context_len = min(self._get_len(context_type, context, context_duration) * 0.3, 400)
+ approx_question_len = self._get_len(question_type, question, None)
+ approx_answer_len = self._get_len(answer_type, answer, answer_duration)
+
+ if (
+ decoder_only_model and approx_context_len + approx_question_len + approx_answer_len >= max_seq_length
+ ) or (approx_context_len + approx_question_len >= max_seq_length or approx_answer_len >= max_seq_length):
+ duration_filtered += duration
+ num_filtered += 1
+ continue
+
+ total_duration += duration
+ data.append(
+ output_type(
+ id,
+ context,
+ context_type,
+ context_duration,
+ question,
+ question_type,
+ answer,
+ answer_type,
+ answer_duration,
+ speaker,
+ )
+ )
+
+ if index_by_file_id:
+ file_id, _ = os.path.splitext(os.path.basename(context))
+ if ".context" in file_id:
+ file_id = file_id[:-8]
+ if file_id not in self.mapping:
+ self.mapping[file_id] = []
+ self.mapping[file_id].append(len(data) - 1)
+
+ # Max number of entities filter.
+ if len(data) == max_number:
+ break
+
+ if do_sort_by_duration:
+ if index_by_file_id:
+ logging.warning("Tried to sort dataset by duration, but cannot since index_by_file_id is set.")
+ else:
+ data.sort(key=lambda entity: entity.duration)
+
+ logging.info("Dataset loaded with %d files totalling %.2f hours", len(data), total_duration / 3600)
+ logging.info("%d files were filtered totalling %.2f hours", num_filtered, duration_filtered / 3600)
+
+ super().__init__(data)
+
+ def _get_len(self, field_type, data, duration_data):
+ if field_type == "SPEECH":
+ return duration_data * 76 # TODO: add explanation for the hardcoded value.
+ elif field_type == "TEXT":
+ if self.use_phoneme_tokenizer:
+ # Approx len is number of characters
+ return len(data)
+ else:
+ return len(data.split(' ')) + 3 # # TODO: add explanation for the hardcoded value.
+ elif field_type == "TOKENS":
+ return len(data) + 3
+ else:
+ raise ValueError(f"Unknown field type {field_type}.")
+
+
class ASRAudioText(AudioText):
"""`AudioText` collector from asr structured json files."""
diff --git a/nemo/collections/common/parts/preprocessing/manifest.py b/nemo/collections/common/parts/preprocessing/manifest.py
index 1d49bd7c7019..e2ad08bd04c2 100644
--- a/nemo/collections/common/parts/preprocessing/manifest.py
+++ b/nemo/collections/common/parts/preprocessing/manifest.py
@@ -110,6 +110,8 @@ def __parse_item(line: str, manifest_file: str) -> Dict[str, Any]:
item['audio_file'] = item.pop('audio_filename')
elif 'audio_filepath' in item:
item['audio_file'] = item.pop('audio_filepath')
+ elif 'context' in item:
+ item['audio_file'] = item['context']
# Video File
if 'video_filename' in item:
@@ -132,7 +134,9 @@ def __parse_item(line: str, manifest_file: str) -> Dict[str, Any]:
item['video_file'] = get_full_path(audio_file=item['video_file'], manifest_file=manifest_file)
# Duration.
- if 'duration' not in item:
+ if 'context_duration' in item and 'duration' not in item:
+ item['duration'] = item['context_duration']
+ elif 'duration' not in item:
raise ValueError(
f"Manifest file {manifest_file} has invalid json line structure: {line} without proper duration key."
)
@@ -184,6 +188,15 @@ def __parse_item(line: str, manifest_file: str) -> Dict[str, Any]:
orig_sr=item.get('orig_sample_rate', None),
token_labels=item.get('token_labels', None),
lang=item.get('lang', None),
+ context=item.get('context', None),
+ context_type=item.get('context_type', None),
+ context_duration=item.get('context_duration', None),
+ answer=item.get('answer', None),
+ answer_type=item.get('answer_type', None),
+ answer_duration=item.get('answer_duration', None),
+ question=item.get('question', None),
+ question_type=item.get('question_type', None),
+ task=item.get('task', None),
)
return item
@@ -247,7 +260,7 @@ def get_full_path(
if (
(len(audio_file) < audio_file_len_limit)
and not os.path.isabs(audio_file)
- and not os.path.isfile(audio_file)
+ # and not os.path.isfile(audio_file) # Commented out because it slows down dataloading
):
# If audio_file is not available and the path is not absolute, the full path is assumed
# to be relative to the manifest file parent directory or data directory.
diff --git a/nemo/collections/common/parts/ptl_overrides.py b/nemo/collections/common/parts/ptl_overrides.py
index 0225ecd50fee..263c865f8270 100644
--- a/nemo/collections/common/parts/ptl_overrides.py
+++ b/nemo/collections/common/parts/ptl_overrides.py
@@ -13,11 +13,11 @@
# limitations under the License.
import torch
-from pytorch_lightning.plugins.precision import MixedPrecisionPlugin
+from lightning.pytorch.plugins.precision import MixedPrecisionPlugin
class NeMoMixedPrecisionPlugin(MixedPrecisionPlugin):
- def __init__(self, init_scale: float = 2 ** 32, growth_interval: int = 1000) -> None:
+ def __init__(self, init_scale: float = 2**32, growth_interval: int = 1000) -> None:
super().__init__(precision=16)
self.scaler = torch.cuda.amp.GradScaler(init_scale=init_scale, growth_interval=growth_interval)
diff --git a/nemo/collections/common/tokenizers/huggingface/auto_tokenizer.py b/nemo/collections/common/tokenizers/huggingface/auto_tokenizer.py
index 43d377b73f34..14da2d13a030 100644
--- a/nemo/collections/common/tokenizers/huggingface/auto_tokenizer.py
+++ b/nemo/collections/common/tokenizers/huggingface/auto_tokenizer.py
@@ -298,3 +298,7 @@ def name(self):
def save_vocabulary(self, save_directory: str, filename_prefix: str = None):
"""Saves tokenizer's vocabulary and other artifacts to the specified directory"""
return self.tokenizer.save_vocabulary(save_directory=save_directory, filename_prefix=filename_prefix)
+
+ def save_pretrained(self, save_directory: str):
+ """Saves tokenizer's vocabulary and other artifacts to the specified directory"""
+ return self.tokenizer.save_pretrained(save_directory)
diff --git a/nemo/collections/common/tokenizers/sentencepiece_tokenizer.py b/nemo/collections/common/tokenizers/sentencepiece_tokenizer.py
index a8ea949019c1..56a4b04dfe0f 100644
--- a/nemo/collections/common/tokenizers/sentencepiece_tokenizer.py
+++ b/nemo/collections/common/tokenizers/sentencepiece_tokenizer.py
@@ -25,7 +25,7 @@
from nemo.collections.common.tokenizers.tokenizer_spec import TokenizerSpec
from nemo.utils import logging
-__all__ = ['SentencePieceTokenizer', 'create_spt_model']
+__all__ = ['SentencePieceTokenizer', 'SentencePieceSpeechLLMTTSTokenizer', 'create_spt_model']
class SentencePieceTokenizer(TokenizerSpec, ChatTemplateMixin):
@@ -315,6 +315,14 @@ def vocab(self):
return main_vocab + special_tokens
+class SentencePieceSpeechLLMTTSTokenizer(SentencePieceTokenizer):
+ def add_phone_tokens_to_special_tokens(self):
+ for i, word in enumerate(self.vocab):
+ if word.startswith("p{"):
+ self.special_token_to_id[word] = i
+ self.id_to_special_token[i] = word
+
+
def create_spt_model(
data_file: str,
vocab_size: int,
diff --git a/nemo/collections/common/video_tokenizers/README.md b/nemo/collections/common/video_tokenizers/README.md
new file mode 100644
index 000000000000..d1a5dfbb71f9
--- /dev/null
+++ b/nemo/collections/common/video_tokenizers/README.md
@@ -0,0 +1,31 @@
+# Cosmos Tokenizer
+
+This directory contains the NeMo implementation of the [NVIDIA Cosmos Tokenizers](https://github.com/NVIDIA/Cosmos-Tokenizer)
+that are hosted on the [Huggingface Hub (HF-Hub)](https://huggingface.co/nvidia/)
+
+## Usage
+
+The encoder, decoder and autoencoder models can be loaded directly from the HF-Hub using the `from_pretrained` class method
+of the `CausalVideoTokenizer` class:
+
+```python
+from nemo.collections.common.video_tokenizers.cosmos_tokenizer import CausalVideoTokenizer
+
+model = CausalVideoTokenizer.from_pretrained("Cosmos-Tokenizer-DV4x8x8")
+```
+By default, this will download all three (`{encoder, decoder, autoencoder}.jit`) models from `nvidia/Cosmos-Tokenizer-DV4x8x8`
+and will only load the encoder and decoder models.
+
+To encode an input tensor, users can run the following:
+```python
+import torch
+input_tensor = torch.randn(1, 3, 9, 512, 512).to('cuda').to(torch.bfloat16)
+(indices, codes) = model.encode(input_tensor)
+```
+
+Please see the official [NVIDIA Cosmos repository](https://github.com/NVIDIA/Cosmos-Tokenizer)
+for the complete list of supported tokenizers.
+
+# Examples
+1. Image generation using [discrete cosmos tokenizer](../../../../nemo/collections/multimodal_autoregressive/data/README.md)
+2. Image / Video Megatron Energon WebDataset preparation with [continuous cosmos tokenizer](../../diffusion/data/readme.rst)
diff --git a/nemo/collections/common/video_tokenizers/__init__.py b/nemo/collections/common/video_tokenizers/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/nemo/collections/common/video_tokenizers/cosmos_tokenizer.py b/nemo/collections/common/video_tokenizers/cosmos_tokenizer.py
new file mode 100644
index 000000000000..d81097e1e6e0
--- /dev/null
+++ b/nemo/collections/common/video_tokenizers/cosmos_tokenizer.py
@@ -0,0 +1,195 @@
+# -----------------------------------------------------------------------------
+# Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES.
+#
+# Code for building NVIDIA proprietary Edify APIs.
+# It cannot be released to the public in any form.
+# If you want to use the code for other NVIDIA proprietary products,
+# please contact the Deep Imagination Research Team (dir@exchange.nvidia.com).
+# -----------------------------------------------------------------------------
+
+"""A library for Causal Video Tokenizer inference."""
+
+from pathlib import Path
+from typing import Optional
+
+import numpy as np
+import torch
+from huggingface_hub import get_token as get_hf_token
+from huggingface_hub import hf_hub_download
+from omegaconf import DictConfig
+from tqdm import tqdm
+
+from nemo.collections.common.video_tokenizers.utils import (
+ load_jit_model,
+ numpy2tensor,
+ pad_video_batch,
+ tensor2numpy,
+ unpad_video_batch,
+)
+from nemo.core.classes.common import PretrainedModelInfo
+from nemo.core.classes.modelPT import ModelPT
+
+
+class CausalVideoTokenizer(ModelPT):
+ """Causal Video tokenization with the NVIDIA Cosmos Tokenizer"""
+
+ def __init__(self, cfg: DictConfig) -> None:
+ super().__init__(cfg)
+
+ checkpoint = Path(cfg.checkpoint_dir)
+ self._full_model_path = str(checkpoint / "autoencoder.jit")
+ self._enc_model_path = str(checkpoint / "encoder.jit")
+ self._dec_model_path = str(checkpoint / "decoder.jit")
+ self._dtype = getattr(torch, cfg.dtype)
+
+ self._device = "cuda"
+
+ self._full_model = load_jit_model(self._full_model_path, self._device) if cfg.load_full_model else None
+ self._enc_model = load_jit_model(self._enc_model_path, self._device) if cfg.load_enc_model else None
+ self._dec_model = load_jit_model(self._dec_model_path, self._device) if cfg.load_dec_model else None
+
+ @classmethod
+ def from_pretrained(
+ cls,
+ tokenizer_type="Cosmos-Tokenizer-DV4x8x8",
+ load_encoder=True,
+ load_decoder=True,
+ load_full_model=False,
+ ):
+ cls._hf_model_name = f"nvidia/{tokenizer_type}"
+
+ # Requires setting HF_TOKEN env variable
+ hf_token = get_hf_token()
+
+ full_model_path = hf_hub_download(
+ repo_id=cls._hf_model_name,
+ filename="autoencoder.jit",
+ token=hf_token,
+ )
+
+ _ = hf_hub_download(
+ repo_id=cls._hf_model_name,
+ filename="encoder.jit",
+ token=hf_token,
+ )
+
+ _ = hf_hub_download(
+ repo_id=cls._hf_model_name,
+ filename="decoder.jit",
+ token=hf_token,
+ )
+
+ # No need to load in encoder and decoder with full model loaded
+ if load_full_model:
+ load_encoder = False
+ load_decoder = False
+
+ # Assumes HF downloads all files to same local dir
+ ckpt_dir = str(Path(full_model_path).parent)
+ cfg = DictConfig(
+ {
+ 'checkpoint_dir': ckpt_dir,
+ 'dtype': 'bfloat16',
+ 'load_enc_model': load_encoder,
+ 'load_dec_model': load_decoder,
+ 'load_full_model': load_full_model,
+ }
+ )
+
+ return cls(cfg)
+
+ @torch.no_grad()
+ def autoencode(self, input_tensor: torch.Tensor) -> torch.Tensor:
+ """Reconstructs a batch of video tensors after embedding into a latent.
+
+ Args:
+ video: The input video Bx3xTxHxW layout, range [-1..1].
+ Returns:
+ The reconstructed video, layout Bx3xTxHxW, range [-1..1].
+ """
+ if self._full_model is not None:
+ output_tensor = self._full_model(input_tensor)
+ output_tensor = output_tensor[0] if isinstance(output_tensor, tuple) else output_tensor
+ else:
+ output_latent = self.encode(input_tensor)[0]
+ output_tensor = self.decode(output_latent)
+ return output_tensor
+
+ @torch.no_grad()
+ def encode(self, input_tensor: torch.Tensor) -> tuple[torch.Tensor]:
+ """Encodes a numpy video into a CausalVideo latent or code.
+
+ Args:
+ input_tensor: The input tensor Bx3xTxHxW layout, range [-1..1].
+ Returns:
+ For causal continuous video (CausalCV) tokenizer, the tuple contains:
+ - The latent embedding, Bx16x(t)x(h)x(w), where the compression
+ rate is (T/t x H/h x W/w), and channel dimension of 16.
+ For causal discrete video (CausalDV) tokenizer, the tuple contains:
+ 1) The indices, Bx(t)x(h)x(w), from a codebook of size 64K, which
+ is formed by FSQ levels of (8,8,8,5,5,5).
+ 2) The discrete code, Bx6x(t)x(h)x(w), where the compression rate
+ is again (T/t x H/h x W/w), and channel dimension of 6.
+ """
+ assert input_tensor.ndim == 5, "input video should be of 5D."
+
+ output_latent = self._enc_model(input_tensor)
+ if isinstance(output_latent, torch.Tensor):
+ return output_latent
+ return output_latent[:-1]
+
+ @torch.no_grad()
+ def decode(self, input_latent: torch.Tensor) -> torch.Tensor:
+ """Encodes a numpy video into a CausalVideo latent.
+
+ Args:
+ input_latent: The continuous latent Bx16xtxhxw for CausalCV,
+ or the discrete indices Bxtxhxw for CausalDV.
+ Returns:
+ The reconstructed tensor, layout [B,3,1+(T-1)*8,H*16,W*16] in range [-1..1].
+ """
+ assert input_latent.ndim >= 4, "input latent should be of 5D for continuous and 4D for discrete."
+ return self._dec_model(input_latent)
+
+ def forward(
+ self,
+ video: np.ndarray,
+ temporal_window: int = 17,
+ ) -> np.ndarray:
+ """Reconstructs video using a pre-trained CausalTokenizer autoencoder.
+ Given a video of arbitrary length, the forward invokes the CausalVideoTokenizer
+ in a sliding manner with a `temporal_window` size.
+
+ Args:
+ video: The input video BxTxHxWx3 layout, range [0..255].
+ temporal_window: The length of the temporal window to process, default=25.
+ Returns:
+ The reconstructed video in range [0..255], layout BxTxHxWx3.
+ """
+ assert video.ndim == 5, "input video should be of 5D."
+ num_frames = video.shape[1] # can be of any length.
+ output_video_list = []
+ for idx in tqdm(range(0, (num_frames - 1) // temporal_window + 1)):
+ # Input video for the current window.
+ start, end = idx * temporal_window, (idx + 1) * temporal_window
+ input_video = video[:, start:end, ...]
+
+ # Spatio-temporally pad input_video so it's evenly divisible.
+ padded_input_video, crop_region = pad_video_batch(input_video)
+ input_tensor = numpy2tensor(padded_input_video, dtype=self._dtype, device=self._device)
+ output_tensor = self.autoencode(input_tensor)
+ padded_output_video = tensor2numpy(output_tensor)
+ output_video = unpad_video_batch(padded_output_video, crop_region)
+
+ output_video_list.append(output_video)
+ return np.concatenate(output_video_list, axis=1)
+
+ def setup_training_data(self, train_data_config: Optional[DictConfig]):
+ pass
+
+ def setup_validation_data(self, val_data_config: Optional[DictConfig]):
+ pass
+
+ @classmethod
+ def list_available_models(cls) -> Optional[PretrainedModelInfo]:
+ pass
diff --git a/nemo/collections/common/video_tokenizers/utils.py b/nemo/collections/common/video_tokenizers/utils.py
new file mode 100644
index 000000000000..255f0d2b5df4
--- /dev/null
+++ b/nemo/collections/common/video_tokenizers/utils.py
@@ -0,0 +1,319 @@
+# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Utility functions for the inference libraries."""
+
+import os
+from glob import glob
+
+import mediapy as media
+import numpy as np
+import torch
+
+_DTYPE, _DEVICE = torch.bfloat16, "cuda"
+_UINT8_MAX_F = float(torch.iinfo(torch.uint8).max)
+_SPATIAL_ALIGN = 16
+_TEMPORAL_ALIGN = 8
+
+
+def load_jit_model(jit_filepath: str = None, device: str = "cuda") -> torch.jit.ScriptModule:
+ """Loads a torch.jit.ScriptModule from a filepath.
+
+ Args:
+ jit_filepath: The filepath to the JIT-compiled model.
+ device: The device to load the model onto, default=cuda.
+ Returns:
+ The JIT compiled model loaded to device and on eval mode.
+ """
+ model = torch.jit.load(jit_filepath)
+ return model.eval().to(device)
+
+
+def save_jit_model(
+ model: torch.jit.ScriptModule | torch.jit.RecursiveScriptModule = None,
+ jit_filepath: str = None,
+) -> None:
+ """Saves a torch.jit.ScriptModule or torch.jit.RecursiveScriptModule to file.
+
+ Args:
+ model: JIT compiled model loaded onto `config.checkpoint.jit.device`.
+ jit_filepath: The filepath to the JIT-compiled model.
+ """
+ torch.jit.save(model, jit_filepath)
+
+
+def get_filepaths(input_pattern) -> list[str]:
+ """Returns a list of filepaths from a pattern."""
+ filepaths = sorted(glob(str(input_pattern)))
+ return list(set(filepaths))
+
+
+def get_output_filepath(filepath: str, output_dir: str = None) -> str:
+ """Returns the output filepath for the given input filepath."""
+ output_dir = output_dir or f"{os.path.dirname(filepath)}/reconstructions"
+ output_filepath = f"{output_dir}/{os.path.basename(filepath)}"
+ os.makedirs(output_dir, exist_ok=True)
+ return output_filepath
+
+
+def read_image(filepath: str) -> np.ndarray:
+ """Reads an image from a filepath.
+
+ Args:
+ filepath: The filepath to the image.
+
+ Returns:
+ The image as a numpy array, layout HxWxC, range [0..255], uint8 dtype.
+ """
+ image = media.read_image(filepath)
+ # convert the grey scale image to RGB
+ # since our tokenizers always assume 3-channel RGB image
+ if image.ndim == 2:
+ image = np.stack([image] * 3, axis=-1)
+ # convert RGBA to RGB
+ if image.shape[-1] == 4:
+ image = image[..., :3]
+ return image
+
+
+def read_video(filepath: str) -> np.ndarray:
+ """Reads a video from a filepath.
+
+ Args:
+ filepath: The filepath to the video.
+ Returns:
+ The video as a numpy array, layout TxHxWxC, range [0..255], uint8 dtype.
+ """
+ video = media.read_video(filepath)
+ # convert the grey scale frame to RGB
+ # since our tokenizers always assume 3-channel video
+ if video.ndim == 3:
+ video = np.stack([video] * 3, axis=-1)
+ # convert RGBA to RGB
+ if video.shape[-1] == 4:
+ video = video[..., :3]
+ return video
+
+
+def resize_image(image: np.ndarray, short_size: int = None) -> np.ndarray:
+ """Resizes an image to have the short side of `short_size`.
+
+ Args:
+ image: The image to resize, layout HxWxC, of any range.
+ short_size: The size of the short side.
+ Returns:
+ The resized image.
+ """
+ if short_size is None:
+ return image
+ height, width = image.shape[-3:-1]
+ if height <= width:
+ height_new, width_new = short_size, int(width * short_size / height + 0.5)
+ width_new = width_new if width_new % 2 == 0 else width_new + 1
+ else:
+ height_new, width_new = (
+ int(height * short_size / width + 0.5),
+ short_size,
+ )
+ height_new = height_new if height_new % 2 == 0 else height_new + 1
+ return media.resize_image(image, shape=(height_new, width_new))
+
+
+def resize_video(video: np.ndarray, short_size: int = None) -> np.ndarray:
+ """Resizes a video to have the short side of `short_size`.
+
+ Args:
+ video: The video to resize, layout TxHxWxC, of any range.
+ short_size: The size of the short side.
+ Returns:
+ The resized video.
+ """
+ if short_size is None:
+ return video
+ height, width = video.shape[-3:-1]
+ if height <= width:
+ height_new, width_new = short_size, int(width * short_size / height + 0.5)
+ width_new = width_new if width_new % 2 == 0 else width_new + 1
+ else:
+ height_new, width_new = (
+ int(height * short_size / width + 0.5),
+ short_size,
+ )
+ height_new = height_new if height_new % 2 == 0 else height_new + 1
+ return media.resize_video(video, shape=(height_new, width_new))
+
+
+def write_image(filepath: str, image: np.ndarray):
+ """Writes an image to a filepath."""
+ return media.write_image(filepath, image)
+
+
+def write_video(filepath: str, video: np.ndarray, fps: int = 24) -> None:
+ """Writes a video to a filepath."""
+ return media.write_video(filepath, video, fps=fps)
+
+
+def numpy2tensor(
+ input_image: np.ndarray,
+ dtype: torch.dtype = _DTYPE,
+ device: str = _DEVICE,
+ range_min: int = -1,
+) -> torch.Tensor:
+ """Converts image(dtype=np.uint8) to `dtype` in range [0..255].
+
+ Args:
+ input_image: A batch of images in range [0..255], BxHxWx3 layout.
+ Returns:
+ A torch.Tensor of layout Bx3xHxW in range [-1..1], dtype.
+ """
+ ndim = input_image.ndim
+ indices = list(range(1, ndim))[-1:] + list(range(1, ndim))[:-1]
+ image = input_image.transpose((0,) + tuple(indices)) / _UINT8_MAX_F
+ if range_min == -1:
+ image = 2.0 * image - 1.0
+ return torch.from_numpy(image).to(dtype).to(device)
+
+
+def tensor2numpy(input_tensor: torch.Tensor, range_min: int = -1) -> np.ndarray:
+ """Converts tensor in [-1,1] to image(dtype=np.uint8) in range [0..255].
+
+ Args:
+ input_tensor: Input image tensor of Bx3xHxW layout, range [-1..1].
+ Returns:
+ A numpy image of layout BxHxWx3, range [0..255], uint8 dtype.
+ """
+ if range_min == -1:
+ input_tensor = (input_tensor.float() + 1.0) / 2.0
+ ndim = input_tensor.ndim
+ output_image = input_tensor.clamp(0, 1).cpu().numpy()
+ output_image = output_image.transpose((0,) + tuple(range(2, ndim)) + (1,))
+ return (output_image * _UINT8_MAX_F + 0.5).astype(np.uint8)
+
+
+def pad_image_batch(batch: np.ndarray, spatial_align: int = _SPATIAL_ALIGN) -> tuple[np.ndarray, list[int]]:
+ """Pads a batch of images to be divisible by `spatial_align`.
+
+ Args:
+ batch: The batch of images to pad, layout BxHxWx3, in any range.
+ align: The alignment to pad to.
+ Returns:
+ The padded batch and the crop region.
+ """
+ height, width = batch.shape[1:3]
+ align = spatial_align
+ height_to_pad = (align - height % align) if height % align != 0 else 0
+ width_to_pad = (align - width % align) if width % align != 0 else 0
+
+ crop_region = [
+ height_to_pad >> 1,
+ width_to_pad >> 1,
+ height + (height_to_pad >> 1),
+ width + (width_to_pad >> 1),
+ ]
+ batch = np.pad(
+ batch,
+ (
+ (0, 0),
+ (height_to_pad >> 1, height_to_pad - (height_to_pad >> 1)),
+ (width_to_pad >> 1, width_to_pad - (width_to_pad >> 1)),
+ (0, 0),
+ ),
+ mode="constant",
+ )
+ return batch, crop_region
+
+
+def pad_video_batch(
+ batch: np.ndarray,
+ temporal_align: int = _TEMPORAL_ALIGN,
+ spatial_align: int = _SPATIAL_ALIGN,
+) -> tuple[np.ndarray, list[int]]:
+ """Pads a batch of videos to be divisible by `temporal_align` or `spatial_align`.
+
+ Zero pad spatially. Reflection pad temporally to handle causality better.
+ Args:
+ batch: The batch of videos to pad., layout BxFxHxWx3, in any range.
+ align: The alignment to pad to.
+ Returns:
+ The padded batch and the crop region.
+ """
+ num_frames, height, width = batch.shape[-4:-1]
+ align = spatial_align
+ height_to_pad = (align - height % align) if height % align != 0 else 0
+ width_to_pad = (align - width % align) if width % align != 0 else 0
+
+ align = temporal_align
+ frames_to_pad = (align - (num_frames - 1) % align) if (num_frames - 1) % align != 0 else 0
+
+ crop_region = [
+ frames_to_pad >> 1,
+ height_to_pad >> 1,
+ width_to_pad >> 1,
+ num_frames + (frames_to_pad >> 1),
+ height + (height_to_pad >> 1),
+ width + (width_to_pad >> 1),
+ ]
+ batch = np.pad(
+ batch,
+ (
+ (0, 0),
+ (0, 0),
+ (height_to_pad >> 1, height_to_pad - (height_to_pad >> 1)),
+ (width_to_pad >> 1, width_to_pad - (width_to_pad >> 1)),
+ (0, 0),
+ ),
+ mode="constant",
+ )
+ batch = np.pad(
+ batch,
+ (
+ (0, 0),
+ (frames_to_pad >> 1, frames_to_pad - (frames_to_pad >> 1)),
+ (0, 0),
+ (0, 0),
+ (0, 0),
+ ),
+ mode="edge",
+ )
+ return batch, crop_region
+
+
+def unpad_video_batch(batch: np.ndarray, crop_region: list[int]) -> np.ndarray:
+ """Unpads video with `crop_region`.
+
+ Args:
+ batch: A batch of numpy videos, layout BxFxHxWxC.
+ crop_region: [f1,y1,x1,f2,y2,x2] first, top, left, last, bot, right crop indices.
+
+ Returns:
+ np.ndarray: Cropped numpy video, layout BxFxHxWxC.
+ """
+ assert len(crop_region) == 6, "crop_region should be len of 6."
+ f1, y1, x1, f2, y2, x2 = crop_region
+ return batch[..., f1:f2, y1:y2, x1:x2, :]
+
+
+def unpad_image_batch(batch: np.ndarray, crop_region: list[int]) -> np.ndarray:
+ """Unpads image with `crop_region`.
+
+ Args:
+ batch: A batch of numpy images, layout BxHxWxC.
+ crop_region: [y1,x1,y2,x2] top, left, bot, right crop indices.
+
+ Returns:
+ np.ndarray: Cropped numpy image, layout BxHxWxC.
+ """
+ assert len(crop_region) == 4, "crop_region should be len of 4."
+ y1, x1, y2, x2 = crop_region
+ return batch[..., y1:y2, x1:x2, :]
diff --git a/nemo/collections/diffusion/assets/mixed_training.png b/nemo/collections/diffusion/assets/mixed_training.png
new file mode 100644
index 000000000000..2226e4c4d5e9
Binary files /dev/null and b/nemo/collections/diffusion/assets/mixed_training.png differ
diff --git a/nemo/collections/diffusion/assets/pipeline_conditioning.png b/nemo/collections/diffusion/assets/pipeline_conditioning.png
new file mode 100644
index 000000000000..0856489abcbd
Binary files /dev/null and b/nemo/collections/diffusion/assets/pipeline_conditioning.png differ
diff --git a/nemo/collections/diffusion/assets/st_dit_hybrid_parallel.png b/nemo/collections/diffusion/assets/st_dit_hybrid_parallel.png
new file mode 100644
index 000000000000..dcbe19fadd26
Binary files /dev/null and b/nemo/collections/diffusion/assets/st_dit_hybrid_parallel.png differ
diff --git a/nemo/collections/diffusion/data/diffusion_energon_datamodule.py b/nemo/collections/diffusion/data/diffusion_energon_datamodule.py
index f18c828d9d45..07747528363a 100644
--- a/nemo/collections/diffusion/data/diffusion_energon_datamodule.py
+++ b/nemo/collections/diffusion/data/diffusion_energon_datamodule.py
@@ -15,8 +15,9 @@
import logging
from typing import Any, Dict, Literal
-from megatron.energon import DefaultTaskEncoder, get_train_dataset
-from pytorch_lightning.utilities.types import EVAL_DATALOADERS
+from lightning.pytorch.utilities.types import EVAL_DATALOADERS
+from megatron.core import parallel_state
+from megatron.energon import DefaultTaskEncoder, WorkerConfig, get_savable_loader, get_train_dataset
from nemo.collections.multimodal.data.energon.base import SimpleMultiModalDataModule
@@ -56,6 +57,9 @@ def __init__(
pin_memory: bool = True,
task_encoder: DefaultTaskEncoder = None,
use_train_split_for_val: bool = False,
+ virtual_epoch_length: int = 1_000_000_000, # a hack to avoid energon end of epoch warning
+ packing_buffer_size: int | None = None,
+ max_samples_per_sequence: int | None = None,
) -> None:
"""
Initialize the SimpleMultiModalDataModule.
@@ -82,6 +86,10 @@ def __init__(
task_encoder=task_encoder,
)
self.use_train_split_for_val = use_train_split_for_val
+ self.virtual_epoch_length = virtual_epoch_length
+ self.num_workers_val = 1
+ self.packing_buffer_size = packing_buffer_size
+ self.max_samples_per_sequence = max_samples_per_sequence
def datasets_provider(self, worker_config, split: Literal['train', 'val'] = 'val'):
"""
@@ -106,29 +114,55 @@ def datasets_provider(self, worker_config, split: Literal['train', 'val'] = 'val
batch_size=self.micro_batch_size,
task_encoder=self.task_encoder,
worker_config=worker_config,
- max_samples_per_sequence=None,
- shuffle_buffer_size=100,
+ max_samples_per_sequence=self.max_samples_per_sequence,
+ shuffle_buffer_size=None,
split_part=split,
- batch_drop_last=True,
- virtual_epoch_length=1_000_000_000, # a hack to avoid energon end of epoch warning
+ virtual_epoch_length=self.virtual_epoch_length,
+ packing_buffer_size=self.packing_buffer_size,
)
return _dataset
def val_dataloader(self) -> EVAL_DATALOADERS:
"""
- Configure the validation DataLoader.
+ Initialize and return the validation DataLoader.
- This method configures the DataLoader for validation data.
-
- Parameters:
- worker_config: Configuration for the data loader workers.
+ This method initializes the DataLoader for the validation dataset. It ensures that the parallel state
+ is initialized correctly for distributed training and returns a configured DataLoader object.
Returns:
- DataLoader: The DataLoader for validation data.
+ EVAL_DATALOADERS: The DataLoader for the validation dataset.
"""
if self.use_train_split_for_val:
return self.train_dataloader()
- return super().val_dataloader()
+ if self.val_dataloader_object:
+ return self.val_dataloader_object
+
+ if not parallel_state.is_initialized():
+ message = (
+ "Muiltimodal val data loader parallel state is not initialized "
+ f"using default worker config with no_workers {self.num_workers}"
+ )
+ logging.info(message)
+
+ worker_config = WorkerConfig.default_worker_config(self.num_workers_val)
+ else:
+ rank = parallel_state.get_data_parallel_rank()
+ world_size = parallel_state.get_data_parallel_world_size()
+ data_parallel_group = parallel_state.get_data_parallel_group()
+
+ logging.info(f"rank {rank} world_size {world_size} data_parallel_group {data_parallel_group}")
+ worker_config = WorkerConfig(
+ rank=rank,
+ world_size=world_size,
+ num_workers=self.num_workers_val,
+ data_parallel_group=data_parallel_group,
+ worker_debug_path=None,
+ worker_log_level=0,
+ )
+ val_dataset = self.datasets_provider(worker_config, split='val')
+ energon_loader = get_savable_loader(val_dataset, worker_config=worker_config)
+ self.val_dataloader_object = energon_loader
+ return self.val_dataloader_object
def load_state_dict(self, state_dict: Dict[str, Any]) -> None:
"""
diff --git a/nemo/collections/diffusion/data/diffusion_fake_datamodule.py b/nemo/collections/diffusion/data/diffusion_fake_datamodule.py
new file mode 100644
index 000000000000..a9fc7ad5b484
--- /dev/null
+++ b/nemo/collections/diffusion/data/diffusion_fake_datamodule.py
@@ -0,0 +1,218 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import lightning.pytorch as pl
+import torch
+from lightning.pytorch.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
+from torch.utils.data import DataLoader
+
+from nemo.collections.diffusion.models.model import DiTConfig
+from nemo.lightning.pytorch.plugins import MegatronDataSampler
+
+from .diffusion_taskencoder import pos_id_3d
+
+
+class PosEmb3D:
+ """Generates and provides 3D positional embeddings for video data."""
+
+ def __init__(self, *, max_t=96, max_h=960, max_w=960):
+ self.max_t = max_t
+ self.max_h = max_h
+ self.max_w = max_w
+ self.generate_pos_id()
+
+ def generate_pos_id(self):
+ """Generates the positional ID grid based on max_t, max_h, and max_w."""
+ self.grid = torch.stack(
+ torch.meshgrid(
+ torch.arange(self.max_t, device='cpu'),
+ torch.arange(self.max_h, device='cpu'),
+ torch.arange(self.max_w, device='cpu'),
+ ),
+ dim=-1,
+ )
+
+ def get_pos_id_3d(self, *, t, h, w):
+ """Retrieves a subset of the positional IDs for the specified dimensions.
+
+ Parameters:
+ t (int): Number of time frames.
+ h (int): Height dimension.
+ w (int): Width dimension.
+
+ Returns:
+ torch.Tensor: The positional IDs tensor with shape (t, h, w, 3).
+ """
+ if t > self.max_t or h > self.max_h or w > self.max_w:
+ self.max_t = max(self.max_t, t)
+ self.max_h = max(self.max_h, h)
+ self.max_w = max(self.max_w, w)
+ self.generate_pos_id()
+ return self.grid[:t, :h, :w]
+
+
+class DiTVideoLatentFakeDataset(torch.utils.data.Dataset):
+ """A fake dataset for generating synthetic video latent data."""
+
+ def __init__(
+ self,
+ n_frames,
+ max_h,
+ max_w,
+ patch_size,
+ in_channels,
+ crossattn_emb_size,
+ max_text_seqlen=512,
+ seq_length=8192,
+ ):
+ self.max_t = n_frames
+ self.max_height = max_h
+ self.max_width = max_w
+ self.patch_size = patch_size
+ self.in_channels = in_channels
+ self.text_dim = crossattn_emb_size
+ self.text_seqlen = max_text_seqlen
+ self.seq_length = seq_length
+
+ def __len__(self):
+ """Returns the total number of samples."""
+ return 100000000
+
+ def __getitem__(self, idx):
+ """Generates a single sample of data.
+
+ Parameters:
+ idx (int): Index of the data sample.
+
+ Returns:
+ dict: A dictionary containing video latent data and related information.
+ """
+ t = self.max_t
+ h = self.max_height
+ w = self.max_width
+ p = self.patch_size
+ c = self.in_channels
+
+ video_latent = torch.ones(self.seq_length, c * p**2, dtype=torch.bfloat16) * 0.5
+ text_embedding = torch.randn(self.text_seqlen, self.text_dim, dtype=torch.bfloat16)
+ pos_emb = pos_id_3d.get_pos_id_3d(t=t, h=h // p, w=w // p).reshape(-1, 3)
+
+ return {
+ 'video': video_latent,
+ 't5_text_embeddings': text_embedding,
+ 'seq_len_q': torch.tensor([video_latent.shape[0]], dtype=torch.int32).squeeze(),
+ 'seq_len_kv': torch.tensor([self.text_seqlen], dtype=torch.int32).squeeze(),
+ 'pos_ids': torch.zeros((self.seq_length, 3), dtype=torch.int32),
+ 'loss_mask': torch.ones(video_latent.shape[0], dtype=torch.bfloat16),
+ }
+
+ def _collate_fn(self, batch):
+ """A default implementation of a collation function.
+
+ Users should override this method to define custom data loaders.
+ """
+ return torch.utils.data.dataloader.default_collate(batch)
+
+ def collate_fn(self, batch):
+ """Method that user passes as a functor to DataLoader.
+
+ The method optionally performs neural type checking and adds types to the outputs.
+
+ Please note, subclasses of Dataset should not implement `input_types`.
+
+ Usage:
+ dataloader = torch.utils.data.DataLoader(
+ ....,
+ collate_fn=dataset.collate_fn,
+ ....
+ )
+
+ Returns:
+ Collated batch, with or without types.
+ """
+ return self._collate_fn(batch)
+
+
+class VideoLatentFakeDataModule(pl.LightningDataModule):
+ """A LightningDataModule for generating fake video latent data for training."""
+
+ def __init__(
+ self,
+ model_config: DiTConfig,
+ seq_length: int = 2048,
+ micro_batch_size: int = 1,
+ global_batch_size: int = 8,
+ num_workers: int = 1,
+ pin_memory: bool = True,
+ task_encoder=None,
+ use_train_split_for_val: bool = False,
+ ) -> None:
+ super().__init__()
+ self.seq_length = seq_length
+ self.micro_batch_size = micro_batch_size
+ self.global_batch_size = global_batch_size
+ self.num_workers = num_workers
+ self.model_config = model_config
+
+ self.data_sampler = MegatronDataSampler(
+ seq_len=self.seq_length,
+ micro_batch_size=micro_batch_size,
+ global_batch_size=global_batch_size,
+ )
+
+ def setup(self, stage: str = "") -> None:
+ """Sets up the dataset for training and validation.
+
+ Parameters:
+ stage (str): Optional stage argument (unused).
+ """
+ self._train_ds = DiTVideoLatentFakeDataset(
+ n_frames=self.model_config.max_frames,
+ max_h=self.model_config.max_img_h,
+ max_w=self.model_config.max_img_w,
+ patch_size=self.model_config.patch_spatial,
+ in_channels=self.model_config.in_channels,
+ crossattn_emb_size=self.model_config.crossattn_emb_size,
+ )
+
+ def train_dataloader(self) -> TRAIN_DATALOADERS:
+ """Returns the training DataLoader."""
+ if not hasattr(self, "_train_ds"):
+ self.setup()
+ return self._create_dataloader(self._train_ds)
+
+ def val_dataloader(self) -> EVAL_DATALOADERS:
+ """Returns the validation DataLoader."""
+ if not hasattr(self, "_train_ds"):
+ self.setup()
+ return self._create_dataloader(self._train_ds)
+
+ def _create_dataloader(self, dataset, **kwargs) -> DataLoader:
+ """Creates a DataLoader for the given dataset.
+
+ Parameters:
+ dataset (Dataset): The dataset to load.
+ **kwargs: Additional arguments for DataLoader.
+
+ Returns:
+ DataLoader: The DataLoader instance.
+ """
+ return DataLoader(
+ dataset,
+ num_workers=self.num_workers,
+ pin_memory=True,
+ persistent_workers=True,
+ collate_fn=dataset.collate_fn,
+ **kwargs,
+ )
diff --git a/nemo/collections/diffusion/data/diffusion_taskencoder.py b/nemo/collections/diffusion/data/diffusion_taskencoder.py
index 57e4e4ec8673..2a42b15453b3 100644
--- a/nemo/collections/diffusion/data/diffusion_taskencoder.py
+++ b/nemo/collections/diffusion/data/diffusion_taskencoder.py
@@ -12,15 +12,96 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import warnings
+import random
+from dataclasses import dataclass
+from typing import Any, List, Optional
+
import torch
import torch.nn.functional as F
from einops import rearrange
-from megatron.core import parallel_state
-from megatron.energon import DefaultTaskEncoder, SkipSample
+from megatron.energon import DefaultTaskEncoder, Sample, SkipSample
+from megatron.energon.task_encoder.base import stateless
from megatron.energon.task_encoder.cooking import Cooker, basic_sample_keys
from nemo.lightning.io.mixin import IOMixin
+from nemo.utils.sequence_packing_utils import first_fit_decreasing
+
+
+@dataclass
+class DiffusionSample(Sample):
+ """
+ Data class representing a sample for diffusion tasks.
+
+ Attributes:
+ video (torch.Tensor): Video latents (C T H W).
+ t5_text_embeddings (torch.Tensor): Text embeddings (S D).
+ t5_text_mask (torch.Tensor): Mask for text embeddings.
+ loss_mask (torch.Tensor): Mask indicating valid positions for loss computation.
+ image_size (Optional[torch.Tensor]): Tensor containing image dimensions.
+ fps (Optional[torch.Tensor]): Frame rate of the video.
+ num_frames (Optional[torch.Tensor]): Number of frames in the video.
+ padding_mask (Optional[torch.Tensor]): Mask indicating padding positions.
+ seq_len_q (Optional[torch.Tensor]): Sequence length for query embeddings.
+ seq_len_kv (Optional[torch.Tensor]): Sequence length for key/value embeddings.
+ pos_ids (Optional[torch.Tensor]): Positional IDs.
+ latent_shape (Optional[torch.Tensor]): Shape of the latent tensor.
+ """
+
+ video: torch.Tensor # video latents (C T H W)
+ t5_text_embeddings: torch.Tensor # (S D)
+ t5_text_mask: torch.Tensor # 1
+ loss_mask: torch.Tensor
+ image_size: Optional[torch.Tensor] = None
+ fps: Optional[torch.Tensor] = None
+ num_frames: Optional[torch.Tensor] = None
+ padding_mask: Optional[torch.Tensor] = None
+ seq_len_q: Optional[torch.Tensor] = None
+ seq_len_kv: Optional[torch.Tensor] = None
+ pos_ids: Optional[torch.Tensor] = None
+ latent_shape: Optional[torch.Tensor] = None
+
+ def to_dict(self) -> dict:
+ """Converts the sample to a dictionary."""
+ return dict(
+ video=self.video,
+ t5_text_embeddings=self.t5_text_embeddings,
+ t5_text_mask=self.t5_text_mask,
+ loss_mask=self.loss_mask,
+ image_size=self.image_size,
+ fps=self.fps,
+ num_frames=self.num_frames,
+ padding_mask=self.padding_mask,
+ seq_len_q=self.seq_len_q,
+ seq_len_kv=self.seq_len_kv,
+ pos_ids=self.pos_ids,
+ latent_shape=self.latent_shape,
+ )
+
+ def __add__(self, other: Any) -> int:
+ """Adds the sequence length of this sample with another sample or integer."""
+ if isinstance(other, DiffusionSample):
+ # Combine the values of the two instances
+ return self.seq_len_q.item() + other.seq_len_q.item()
+ elif isinstance(other, int):
+ # Add an integer to the value
+ return self.seq_len_q.item() + other
+ raise NotImplementedError
+
+ def __radd__(self, other: Any) -> int:
+ """Handles reverse addition for summing with integers."""
+ # This is called if sum or other operations start with a non-DiffusionSample object.
+ # e.g., sum([DiffusionSample(1), DiffusionSample(2)]) -> the 0 + DiffusionSample(1) calls __radd__.
+ if isinstance(other, int):
+ return self.seq_len_q.item() + other
+ raise NotImplementedError
+
+ def __lt__(self, other: Any) -> bool:
+ """Compares this sample's sequence length with another sample or integer."""
+ if isinstance(other, DiffusionSample):
+ return self.seq_len_q.item() < other.seq_len_q.item()
+ elif isinstance(other, int):
+ return self.seq_len_q.item() < other
+ raise NotImplementedError
def cook(sample: dict) -> dict:
@@ -75,18 +156,26 @@ def __init__(
max_frames: int = None,
text_embedding_padding_size: int = 512,
seq_length: int = None,
+ max_seq_length: int = None,
patch_spatial: int = 2,
patch_temporal: int = 1,
+ aesthetic_score: float = 0.0,
**kwargs,
):
super().__init__(*args, **kwargs)
self.max_frames = max_frames
self.text_embedding_padding_size = text_embedding_padding_size
self.seq_length = seq_length
+ self.max_seq_length = max_seq_length
self.patch_spatial = patch_spatial
self.patch_temporal = patch_temporal
+ self.aesthetic_score = aesthetic_score
+ @stateless(restore_seeds=True)
def encode_sample(self, sample: dict) -> dict:
+ """
+ Encodes video / text sample.
+ """
video_latent = sample['pth']
if torch.isnan(video_latent).any() or torch.isinf(video_latent).any():
@@ -95,6 +184,9 @@ def encode_sample(self, sample: dict) -> dict:
raise SkipSample()
info = sample['json']
+ if info['aesthetic_score'] < self.aesthetic_score:
+ raise SkipSample()
+
C, T, H, W = video_latent.shape
seq_len = (
video_latent.shape[-1]
@@ -105,19 +197,14 @@ def encode_sample(self, sample: dict) -> dict:
)
is_image = T == 1
- if seq_len > self.seq_length:
+ if self.seq_length is not None and seq_len > self.seq_length:
+ raise SkipSample()
+ if self.max_seq_length is not None and seq_len > self.max_seq_length:
raise SkipSample()
if self.max_frames is not None:
video_latent = video_latent[:, : self.max_frames, :, :]
- tpcp_size = parallel_state.get_tensor_model_parallel_world_size()
- if parallel_state.get_context_parallel_world_size() > 1:
- tpcp_size *= parallel_state.get_context_parallel_world_size() * 2
- if (T * H * W) % tpcp_size != 0:
- warnings.warn(f'skipping {video_latent.shape=} not divisible by {tpcp_size=}')
- raise SkipSample()
-
video_latent = rearrange(
video_latent,
'C (T pt) (H ph) (W pw) -> (T H W) (ph pw pt C)',
@@ -161,7 +248,7 @@ def encode_sample(self, sample: dict) -> dict:
'T H W d -> (T H W) d',
)
- if self.seq_length is not None:
+ if self.seq_length is not None and self.max_seq_length is None:
pos_ids = F.pad(pos_ids, (0, 0, 0, self.seq_length - seq_len))
loss_mask = torch.zeros(self.seq_length, dtype=torch.bfloat16)
loss_mask[:seq_len] = 1
@@ -169,7 +256,11 @@ def encode_sample(self, sample: dict) -> dict:
else:
loss_mask = torch.ones(seq_len, dtype=torch.bfloat16)
- return dict(
+ return DiffusionSample(
+ __key__=sample['__key__'],
+ __restore_key__=sample['__restore_key__'],
+ __subflavor__=None,
+ __subflavors__=sample['__subflavors__'],
video=video_latent,
t5_text_embeddings=t5_text_embeddings,
t5_text_mask=t5_text_mask,
@@ -178,13 +269,86 @@ def encode_sample(self, sample: dict) -> dict:
num_frames=num_frames,
loss_mask=loss_mask,
seq_len_q=torch.tensor(seq_len, dtype=torch.int32),
- seq_len_kv=torch.tensor(t5_text_embeddings_seq_length, dtype=torch.int32),
+ seq_len_kv=torch.tensor(self.text_embedding_padding_size, dtype=torch.int32),
pos_ids=pos_ids,
latent_shape=torch.tensor([C, T, H, W], dtype=torch.int32),
)
+ def select_samples_to_pack(self, samples: List[DiffusionSample]) -> List[List[DiffusionSample]]:
+ """
+ Selects sequences to pack for mixed image-video training.
+ """
+ results = first_fit_decreasing(samples, self.max_seq_length)
+ random.shuffle(results)
+ return results
+
+ @stateless
+ def pack_selected_samples(self, samples: List[DiffusionSample]) -> DiffusionSample:
+ """Construct a new Diffusion sample by concatenating the sequences."""
+
+ def stack(attr):
+ return torch.stack([getattr(sample, attr) for sample in samples], dim=0)
+
+ def cat(attr):
+ return torch.cat([getattr(sample, attr) for sample in samples], dim=0)
+
+ video = concat_pad([i.video for i in samples], self.max_seq_length)
+ loss_mask = concat_pad([i.loss_mask for i in samples], self.max_seq_length)
+ pos_ids = concat_pad([i.pos_ids for i in samples], self.max_seq_length)
+
+ return DiffusionSample(
+ __key__=",".join([s.__key__ for s in samples]),
+ __restore_key__=(), # Will be set by energon based on `samples`
+ __subflavor__=None,
+ __subflavors__=samples[0].__subflavors__,
+ video=video,
+ t5_text_embeddings=cat('t5_text_embeddings'),
+ t5_text_mask=cat('t5_text_mask'),
+ # image_size=stack('image_size'),
+ # fps=stack('fps'),
+ # num_frames=stack('num_frames'),
+ loss_mask=loss_mask,
+ seq_len_q=stack('seq_len_q'),
+ seq_len_kv=stack('seq_len_kv'),
+ pos_ids=pos_ids,
+ latent_shape=stack('latent_shape'),
+ )
+
+ @stateless
+ def batch(self, samples: List[DiffusionSample]) -> dict:
+ """Return dictionary with data for batch."""
+ if self.max_seq_length is None:
+ # no packing
+ return super().batch(samples).to_dict()
+
+ # packing
+ sample = samples[0]
+ return dict(
+ video=sample.video.unsqueeze_(0),
+ t5_text_embeddings=sample.t5_text_embeddings.unsqueeze_(0),
+ t5_text_mask=sample.t5_text_mask.unsqueeze_(0),
+ loss_mask=sample.loss_mask.unsqueeze_(0),
+ # image_size=sample.image_size,
+ # fps=sample.fps,
+ # num_frames=sample.num_frames,
+ # padding_mask=sample.padding_mask.unsqueeze_(0),
+ seq_len_q=sample.seq_len_q,
+ seq_len_kv=sample.seq_len_kv,
+ pos_ids=sample.pos_ids.unsqueeze_(0),
+ latent_shape=sample.latent_shape,
+ )
+
class PosID3D:
+ """
+ Generates 3D positional IDs for video data.
+
+ Attributes:
+ max_t (int): Maximum number of time frames.
+ max_h (int): Maximum height dimension.
+ max_w (int): Maximum width dimension.
+ """
+
def __init__(self, *, max_t=32, max_h=128, max_w=128):
self.max_t = max_t
self.max_h = max_h
@@ -192,6 +356,7 @@ def __init__(self, *, max_t=32, max_h=128, max_w=128):
self.generate_pos_id()
def generate_pos_id(self):
+ """Generates a grid of positional IDs based on max_t, max_h, and max_w."""
self.grid = torch.stack(
torch.meshgrid(
torch.arange(self.max_t, device='cpu'),
@@ -202,6 +367,7 @@ def generate_pos_id(self):
)
def get_pos_id_3d(self, *, t, h, w):
+ """Retrieves positional IDs for specified dimensions."""
if t > self.max_t or h > self.max_h or w > self.max_w:
self.max_t = max(self.max_t, t)
self.max_h = max(self.max_h, h)
@@ -210,4 +376,70 @@ def get_pos_id_3d(self, *, t, h, w):
return self.grid[:t, :h, :w]
+def pad_divisible(x, padding_value=0):
+ """
+ Pads the input tensor to make its size divisible by a specified value.
+
+ Args:
+ x (torch.Tensor): Input tensor.
+ padding_value (int): The value to make the tensor size divisible by.
+
+ Returns:
+ torch.Tensor: Padded tensor.
+ """
+ if padding_value == 0:
+ return x
+ # Get the size of the first dimension
+ n = x.size(0)
+
+ # Compute the padding needed to make the first dimension divisible by 16
+ padding_needed = (padding_value - n % padding_value) % padding_value
+
+ if padding_needed <= 0:
+ return x
+
+ # Create a new shape with the padded first dimension
+ new_shape = list(x.shape)
+ new_shape[0] += padding_needed
+
+ # Create a new tensor filled with zeros
+ x_padded = torch.zeros(new_shape, dtype=x.dtype, device=x.device)
+
+ # Assign the original tensor to the beginning of the new tensor
+ x_padded[:n] = x
+ return x_padded
+
+
+def concat_pad(tensor_list, max_seq_length):
+ """
+ Efficiently concatenates a list of tensors along the first dimension and pads with zeros
+ to reach max_seq_length.
+
+ Args:
+ tensor_list (list of torch.Tensor): List of tensors to concatenate and pad.
+ max_seq_length (int): The desired size of the first dimension of the output tensor.
+
+ Returns:
+ torch.Tensor: A tensor of shape [max_seq_length, ...], where ... represents the remaining dimensions.
+ """
+ import torch
+
+ # Get common properties from the first tensor
+ other_shape = tensor_list[0].shape[1:]
+ dtype = tensor_list[0].dtype
+ device = tensor_list[0].device
+
+ # Initialize the result tensor with zeros
+ result = torch.zeros((max_seq_length, *other_shape), dtype=dtype, device=device)
+
+ current_index = 0
+ for tensor in tensor_list:
+ length = tensor.shape[0]
+ # Directly assign the tensor to the result tensor without checks
+ result[current_index : current_index + length] = tensor
+ current_index += length
+
+ return result
+
+
pos_id_3d = PosID3D()
diff --git a/nemo/collections/diffusion/data/prepare_energon_dataset.py b/nemo/collections/diffusion/data/prepare_energon_dataset.py
new file mode 100644
index 000000000000..eed6cde55d36
--- /dev/null
+++ b/nemo/collections/diffusion/data/prepare_energon_dataset.py
@@ -0,0 +1,278 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import pickle
+from typing import Callable
+
+import nemo_run as run
+import numpy as np
+import pandas as pd
+import torch
+import torch.distributed as dist
+import webdataset as wds
+from einops import rearrange
+from transformers import T5EncoderModel, T5TokenizerFast
+
+from nemo.collections.common.video_tokenizers.cosmos_tokenizer import CausalVideoTokenizer
+from nemo.collections.common.video_tokenizers.utils import read_image, resize_video
+
+
+def initialize_text_encoder(t5_cache_dir):
+ """
+ Initializes the T5 tokenizer and encoder model, loading them from a specified cache directory.
+
+ Args:
+ t5_cache_dir (str): Path to the cache directory for storing the pretrained model files.
+
+ Returns:
+ tuple: A tuple containing the tokenizer and encoder model instances.
+ """
+
+ # Load tokenizer and text encoder, save in cache directory
+ tokenizer = T5TokenizerFast.from_pretrained("google-t5/t5-11b", cache_dir=t5_cache_dir)
+ text_encoder = T5EncoderModel.from_pretrained("google-t5/t5-11b", cache_dir=t5_cache_dir)
+ text_encoder.to("cuda")
+ text_encoder.eval()
+
+ return tokenizer, text_encoder
+
+
+# Load dataset from HuggingFace
+df = pd.read_parquet("hf://datasets/huggan/smithsonian_butterflies_subset/data/train-00000-of-00001.parquet")
+# Load Cosmos tokenizer from HuggingFace
+autoencoder = CausalVideoTokenizer.from_pretrained("CosmosCausalCV_f4x8x8")
+# Load T5-XXL text encoder
+t5_cache_dir = '' # Use your own custom cache path
+tokenizer, text_encoder = initialize_text_encoder(t5_cache_dir)
+
+
+class EncodedSample:
+ """
+ A class representing an encoded sample, containing the text encoding, length,
+ attention mask, and offset mappings.
+
+ Attributes:
+ encoded_text (np.ndarray): Encoded text array.
+ length (int): Length of the encoding.
+ attn_mask (np.ndarray): Attention mask for the encoding.
+ offset_mappings (np.ndarray): Mappings for offset positions.
+ """
+
+ def __init__(self, encoded_text: np.ndarray, length: int, attn_mask: np.ndarray, offset_mappings: np.ndarray):
+ self.encoded_text = encoded_text
+ self.length = length
+ self.attn_mask = attn_mask
+ self.offset_mappings = offset_mappings
+
+ def truncate(self) -> None:
+ """
+ Truncates the encoded text, attention mask, and offset mappings to the specified length.
+ """
+ self.encoded_text = self.encoded_text[0 : self.length].astype(np.float16)
+ self.attn_mask = self.attn_mask[0 : self.length].astype(np.int32)
+ if self.offset_mappings is not None:
+ self.offset_mappings = self.offset_mappings[0 : self.length].astype(np.int32)
+
+
+@torch.no_grad()
+def encode_for_batch(
+ tokenizer, encoder, prompts: list[str], truncate: bool = True, max_length=512, output_mapping=True
+):
+ """
+ Encodes a batch of text prompts into T5 embeddings.
+
+ Args:
+ tokenizer: Tokenizer instance for encoding.
+ encoder: T5 encoder model instance.
+ prompts (list[str]): List of text prompts to encode.
+ truncate (bool): If True, truncates the output embeddings.
+ max_length (int): Maximum length for each encoded prompt.
+ output_mapping (bool): If True, returns offset mappings for each prompt.
+
+ Returns:
+ list[EncodedSample]: A list of encoded samples containing text encodings and masks.
+ """
+ batch_encoding = tokenizer.batch_encode_plus(
+ prompts,
+ return_tensors="pt",
+ truncation=True,
+ padding="max_length",
+ max_length=max_length,
+ return_length=True,
+ return_offsets_mapping=output_mapping,
+ )
+
+ # We expect all the processing is done in GPU.
+ input_ids = batch_encoding.input_ids.cuda()
+ attn_mask = batch_encoding.attention_mask.cuda()
+ if output_mapping:
+ offsets_mapping = batch_encoding["offset_mapping"]
+ offsets_mapping = offsets_mapping.cpu().numpy()
+ else:
+ offsets_mapping = None
+
+ outputs = encoder(input_ids=input_ids, attention_mask=attn_mask) # type: ignore
+ encoded_text = outputs.last_hidden_state
+
+ lengths = attn_mask.sum(dim=1).cpu()
+ for batch_id in range(encoded_text.shape[0]):
+ encoded_text[batch_id][lengths[batch_id] :] = 0
+
+ encoded_text = encoded_text.cpu().numpy()
+ attn_mask = attn_mask.cpu().numpy()
+
+ encoded_text = encoded_text[:, :max_length]
+ attn_mask = attn_mask[:, :max_length]
+
+ out = []
+ for idx in range(encoded_text.shape[0]):
+ if output_mapping:
+ offsets = offsets_mapping[idx]
+ else:
+ offsets = None
+
+ out.append(EncodedSample(encoded_text[idx].astype(np.float16), lengths[idx], attn_mask[idx], offsets))
+ if truncate:
+ for x in out:
+ x.truncate()
+ return out
+
+
+def generate_t5_embed(tokenizer, text_encoder, prompt, t5_embeding_max_length=512):
+ """
+ Generates a T5 embedding for a single text prompt.
+
+ Args:
+ tokenizer: T5 tokenizer instance.
+ text_encoder: T5 encoder model instance.
+ prompt (str): The text prompt to encode.
+ t5_embeding_max_length (int): Maximum length for the embedding.
+
+ Returns:
+ torch.Tensor: Padded T5 embedding tensor.
+ """
+ # encode text to t5 embedding
+ out = encode_for_batch(tokenizer, text_encoder, [prompt])[0]
+ encoded_text = torch.tensor(out.encoded_text, dtype=torch.bfloat16)
+
+ # padding t5 embedding to t5_embeding_max_length
+ L, C = encoded_text.shape
+ t5_embed = torch.zeros(1, t5_embeding_max_length, C, dtype=torch.bfloat16)
+ t5_embed[0, :L] = encoded_text
+
+ return t5_embed
+
+
+def get_start_end_idx_for_this_rank(dataset_size, rank, world_size):
+ """
+ Calculates the start and end indices for distributed processing based on rank.
+
+ Args:
+ dataset_size (int): Total dataset size.
+ rank (int): Current process rank.
+ world_size (int): Total number of processes.
+
+ Returns:
+ tuple: (start index, end index) for the rank.
+ """
+ split_size = dataset_size // world_size
+ start_idx = rank * split_size
+ # The last rank takes the remainder
+ end_idx = start_idx + split_size if rank != world_size - 1 else dataset_size
+ return start_idx, end_idx
+
+
+def butterfly_process_func(index):
+ """
+ Generates a sample dictionary with image latent tensor, caption, and metadata.
+
+ Args:
+ index (int): Index of the dataset row.
+
+ Returns:
+ dict: Dictionary containing processed image latents, embeddings, and metadata.
+ """
+ # Access the data from the dataframe
+ row = df.iloc[index]
+ image_url = row["image_url"]
+ image_caption = row["name"]
+
+ # Process image
+ video = read_image(image_url)
+ video = rearrange(video, 'h w (t c) -> t h w c', t=1)
+ video = resize_video(video, short_size=512)
+ batch_video = video[np.newaxis, ...]
+
+ # Run autoencoder to get latents
+ _, image_latent = autoencoder(batch_video, temporal_window=1)
+
+ text_embedding = generate_t5_embed(tokenizer, text_encoder, image_caption)
+
+ # Construct sample dictionary
+ sample = {
+ "__key__": f"{index:06}",
+ ".pth": image_latent.to(dtype=torch.bfloat16),
+ ".pickle": pickle.dumps(text_embedding),
+ ".json": {
+ "image_height": batch_video.shape[2],
+ "image_width": batch_video.shape[3],
+ # Add additional score as metadata
+ },
+ }
+ return sample
+
+
+@torch.no_grad()
+@run.cli.entrypoint
+def prepare(process_func: Callable, output_dir: str = 'output'):
+ """
+ Prepares a WebDataset using the specified processing function, for distributed settings.
+
+ Args:
+ process_func (Callable): Function to process each dataset entry.
+ output_dir (str): Output directory to save processed dataset.
+
+ """
+ rank = dist.get_rank()
+ world_size = torch.distributed.get_world_size()
+
+ start_idx, end_idx = get_start_end_idx_for_this_rank(len(df), rank, world_size)
+ os.makedirs(output_dir, exist_ok=True)
+ output_tar = os.path.join(output_dir, f"rank{rank}-%06d.tar")
+
+ with wds.ShardWriter(output_tar, maxcount=10000) as sink:
+ for i in range(start_idx, end_idx):
+ sample = process_func(i)
+ # Write sample to tar file
+ sink.write(sample)
+
+
+@run.cli.factory(target=prepare)
+def prepare_butterfly_dataset() -> run.Partial:
+ """
+ Prepares the butterfly dataset for distributed training.
+
+ Returns:
+ run.Partial: Partially configured run for WebDataset preparation.
+ """
+ recipe = run.Partial(prepare, process_func=butterfly_process_func, output_dir='butterfly_webdataset')
+ return recipe
+
+
+if __name__ == '__main__':
+ dist.init_process_group("nccl")
+ local_rank = int(os.environ['LOCAL_RANK'])
+ torch.cuda.set_device(local_rank)
+ run.cli.main(prepare, default_factory=prepare_butterfly_dataset)
diff --git a/nemo/collections/diffusion/data/readme.rst b/nemo/collections/diffusion/data/readme.rst
new file mode 100644
index 000000000000..57a1737988da
--- /dev/null
+++ b/nemo/collections/diffusion/data/readme.rst
@@ -0,0 +1,26 @@
+Preparing Image / Video Megatron Energon WebDataset with Cosmos Tokenizer
+===========================
+
+This script is an example on preparing a WebDataset for an image / video + text dataset using distributed processing with the Cosmos Tokenizer. It processes each sample by generating a **continuous** image / video latent using the Cosmos video tokenizer and a T5 embedding from the text caption. Then, the processed data is stored in a WebDataset-compatible format.
+
+Requirements
+------------
+- **Dependencies**:
+ - Please use the latest NeMo dev container: ``nvcr.io/nvidia/nemo:dev``
+ - You may also need to install ``jammy`` and ``mediapy`` depending on your dev container version.
+
+- **Data**:
+ - The script uses an example dataset that comes in parquet format. To use a custom, you will need to write a custom ``process_func`` and create a new factory recipe that uses your new ``process_func``.
+
+Usage
+-----
+1. **Set up your environment**:
+ Pull and launch the NeMo dev container to run your script.
+
+2. **Customize Cache Path**:
+ Set the T5 cache directory path in the script by specifying the `t5_cache_dir` variable.
+
+3. **Running the Script**:
+ To run the script on 8 GPUs, use the following command:
+
+ ``bash torchrun --nproc_per_node=8 nemo/collections/diffusion/data/prepare_energon_dataset.py``
diff --git a/nemo/collections/diffusion/models/dit/dit_embeddings.py b/nemo/collections/diffusion/models/dit/dit_embeddings.py
index ec8d095cbbd4..6303db43bba1 100644
--- a/nemo/collections/diffusion/models/dit/dit_embeddings.py
+++ b/nemo/collections/diffusion/models/dit/dit_embeddings.py
@@ -55,6 +55,12 @@ def __init__(self, in_channels: int, time_embed_dim: int, seed=None):
self.linear_1.reset_parameters()
self.linear_2.reset_parameters()
+ if parallel_state.get_pipeline_model_parallel_world_size() > 1:
+ setattr(self.linear_1.weight, "pipeline_parallel", True)
+ setattr(self.linear_1.bias, "pipeline_parallel", True)
+ setattr(self.linear_2.weight, "pipeline_parallel", True)
+ setattr(self.linear_2.bias, "pipeline_parallel", True)
+
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Computes the positional embeddings for the input tensor.
@@ -152,10 +158,27 @@ def __init__(
self.emb_h = torch.nn.Embedding(h, config.hidden_size)
self.emb_w = torch.nn.Embedding(w, config.hidden_size)
- if config.perform_initialization:
- config.init_method(self.emb_t.weight)
- config.init_method(self.emb_h.weight)
- config.init_method(self.emb_w.weight)
+ if 'seed' in kwargs.keys():
+ seed = kwargs['seed']
+ with torch.random.fork_rng():
+ torch.manual_seed(seed)
+ if config.perform_initialization:
+ self.customize_init_param()
+ else:
+ self.reset_parameters()
+ else:
+ if config.perform_initialization:
+ self.customize_init_param()
+
+ def customize_init_param(self):
+ self.config.init_method(self.emb_t.weight)
+ self.config.init_method(self.emb_h.weight)
+ self.config.init_method(self.emb_w.weight)
+
+ def reset_parameters(self):
+ self.emb_t.reset_parameters()
+ self.emb_h.reset_parameters()
+ self.emb_w.reset_parameters()
def forward(self, pos_ids: torch.Tensor):
return self.emb_t(pos_ids[..., 0]) + self.emb_h(pos_ids[..., 1]) + self.emb_w(pos_ids[..., 2])
diff --git a/nemo/collections/diffusion/models/dit/dit_layer_spec.py b/nemo/collections/diffusion/models/dit/dit_layer_spec.py
index cb7c520493f0..2233ef3a7354 100644
--- a/nemo/collections/diffusion/models/dit/dit_layer_spec.py
+++ b/nemo/collections/diffusion/models/dit/dit_layer_spec.py
@@ -733,8 +733,8 @@ def get_stdit_adaln_block_with_transformer_engine_spec() -> ModuleSpec:
)
-def get_dit_adaln_block_with_transformer_engine_spec() -> ModuleSpec:
- params = {"attn_mask_type": AttnMaskType.padding}
+def get_dit_adaln_block_with_transformer_engine_spec(attn_mask_type=AttnMaskType.padding) -> ModuleSpec:
+ params = {"attn_mask_type": attn_mask_type}
return ModuleSpec(
module=DiTLayerWithAdaLN,
submodules=DiTWithAdaLNSubmodules(
diff --git a/nemo/collections/diffusion/models/dit/dit_model.py b/nemo/collections/diffusion/models/dit/dit_model.py
index 0c1c1abc82f2..24943de6d675 100644
--- a/nemo/collections/diffusion/models/dit/dit_model.py
+++ b/nemo/collections/diffusion/models/dit/dit_model.py
@@ -141,7 +141,7 @@ def __init__(
self.config: TransformerConfig = config
- self.transformer_decoder_layer_spec = transformer_decoder_layer_spec()
+ self.transformer_decoder_layer_spec = transformer_decoder_layer_spec(attn_mask_type=config.attn_mask_type)
self.pre_process = pre_process
self.post_process = post_process
self.add_encoder = True
@@ -173,19 +173,33 @@ def __init__(
dit_embeddings.ParallelTimestepEmbedding(self.config.hidden_size, self.config.hidden_size, seed=1234),
)
+ self.fps_embedder = nn.Sequential(
+ Timesteps(num_channels=256, flip_sin_to_cos=False, downscale_freq_shift=1),
+ ParallelTimestepEmbedding(256, 256, seed=1234),
+ )
+
if self.pre_process:
self.x_embedder = torch.nn.Linear(in_channels * patch_spatial**2, self.config.hidden_size)
+ if pos_embedder is dit_embeddings.SinCosPosEmb3D:
+ if self.pre_process:
+ self.pos_embedder = pos_embedder(
+ config,
+ t=max_frames // patch_temporal,
+ h=max_img_h // patch_spatial,
+ w=max_img_w // patch_spatial,
+ )
+ else:
self.pos_embedder = pos_embedder(
config,
t=max_frames // patch_temporal,
h=max_img_h // patch_spatial,
w=max_img_w // patch_spatial,
+ seed=1234,
)
- self.fps_embedder = nn.Sequential(
- Timesteps(num_channels=256, flip_sin_to_cos=False, downscale_freq_shift=1),
- ParallelTimestepEmbedding(256, 256),
- )
+ if parallel_state.get_pipeline_model_parallel_world_size() > 1:
+ for p in self.pos_embedder.parameters():
+ setattr(p, "pipeline_parallel", True)
if self.post_process:
self.final_layer_linear = torch.nn.Linear(
@@ -194,6 +208,8 @@ def __init__(
)
self.affline_norm = RMSNorm(self.config.hidden_size)
+ if parallel_state.get_pipeline_model_parallel_world_size() > 1:
+ setattr(self.affline_norm.weight, "pipeline_parallel", True)
def forward(
self,
@@ -223,6 +239,7 @@ def forward(
]
* B,
dtype=torch.bfloat16,
+ device=x.device,
),
).view(-1)
if self.pre_process:
@@ -234,10 +251,16 @@ def forward(
else:
pos_emb = self.pos_embedder(pos_ids)
pos_emb = rearrange(pos_emb, "B S D -> S B D")
- x_S_B_D = rearrange(x_B_S_D, "B S D -> S B D")
+ x_S_B_D = rearrange(x_B_S_D, "B S D -> S B D").contiguous()
else:
# intermediate stage of pipeline
x_S_B_D = None ### should it take encoder_hidden_states
+ if (not hasattr(self, "pos_embedder")) or isinstance(self.pos_embedder, dit_embeddings.SinCosPosEmb3D):
+ pos_emb = None
+ else:
+ ## if transformer blocks need pos_emb, then pos_embedder should
+ ## be replicated across pp ranks.
+ pos_emb = rearrange(self.pos_embedder(pos_ids), "B S D -> S B D").contiguous()
timesteps_B_D = self.t_embedder(timesteps.flatten()).to(torch.bfloat16) # (b d_text_embedding)
@@ -245,12 +268,17 @@ def forward(
fps_B_D = self.fps_embedder(fps)
fps_B_D = nn.functional.pad(fps_B_D, (0, self.config.hidden_size - fps_B_D.shape[1]))
affline_emb_B_D += fps_B_D
+ affline_emb_B_D = self.affline_norm(affline_emb_B_D)
- crossattn_emb = rearrange(crossattn_emb, 'B S D -> S B D')
+ crossattn_emb = rearrange(crossattn_emb, 'B S D -> S B D').contiguous()
if self.config.sequence_parallel:
if self.pre_process:
x_S_B_D = tensor_parallel.scatter_to_sequence_parallel_region(x_S_B_D)
+ if hasattr(self, "pos_embedder") and isinstance(
+ self.pos_embedder, dit_embeddings.FactorizedLearnable3DEmbedding
+ ):
+ pos_emb = tensor_parallel.scatter_to_sequence_parallel_region(pos_emb)
crossattn_emb = tensor_parallel.scatter_to_sequence_parallel_region(crossattn_emb)
# `scatter_to_sequence_parallel_region` returns a view, which prevents
# the original tensor from being garbage collected. Clone to facilitate GC.
@@ -309,51 +337,41 @@ def sharded_state_dict(
"""
sharded_state_dict = super().sharded_state_dict(prefix, sharded_offsets, metadata)
- for param_name, param in self.t_embedder.named_parameters():
- weight_key = f'{prefix}t_embedder.{param_name}'
- self.tie_embeddings_weights_state_dict(param, sharded_state_dict, weight_key, weight_key)
-
- for param_name, param in self.affline_norm.named_parameters():
- weight_key = f'{prefix}affline_norm.{param_name}'
- self.tie_embeddings_weights_state_dict(param, sharded_state_dict, weight_key, weight_key)
-
+ for module in ['t_embedder']:
+ for param_name, param in getattr(self, module).named_parameters():
+ weight_key = f'{prefix}{module}.{param_name}'
+ self._set_embedder_weights_replica_id(param, sharded_state_dict, weight_key)
return sharded_state_dict
- def tie_embeddings_weights_state_dict(
- self,
- tensor,
- sharded_state_dict: ShardedStateDict,
- output_layer_weight_key: str,
- first_stage_word_emb_key: str,
+ def _set_embedder_weights_replica_id(
+ self, tensor: Tensor, sharded_state_dict: ShardedStateDict, embedder_weight_key: str
) -> None:
- """Ties the embedding and output weights in a given sharded state dict.
+ """set replica ids of the weights in t_embedder for sharded state dict.
Args:
sharded_state_dict (ShardedStateDict): state dict with the weight to tie
- output_layer_weight_key (str): key of the output layer weight in the state dict.
+ weight_key (str): key of the weight in the state dict.
This entry will be replaced with a tied version
- first_stage_word_emb_key (str): this must be the same as the
- ShardedTensor.key of the first stage word embeddings.
Returns: None, acts in-place
"""
- if self.pre_process and parallel_state.get_tensor_model_parallel_rank() == 0:
- # Output layer is equivalent to the embedding already
- return
-
- # Replace the default output layer with a one sharing the weights with the embedding
- del sharded_state_dict[output_layer_weight_key]
- last_stage_word_emb_replica_id = (
- 0, # copy of first stage embedding
- parallel_state.get_tensor_model_parallel_rank()
- + parallel_state.get_pipeline_model_parallel_rank()
- * parallel_state.get_pipeline_model_parallel_world_size(),
+ tp_rank = parallel_state.get_tensor_model_parallel_rank()
+ vpp_rank = parallel_state.get_virtual_pipeline_model_parallel_rank()
+ vpp_rank = vpp_rank if vpp_rank else 0
+ vpp_world = parallel_state.get_virtual_pipeline_model_parallel_world_size()
+ vpp_world = vpp_world if vpp_world else 1
+ pp_rank = parallel_state.get_pipeline_model_parallel_rank()
+ if embedder_weight_key in sharded_state_dict:
+ del sharded_state_dict[embedder_weight_key]
+ replica_id = (
+ tp_rank,
+ (vpp_rank + pp_rank * vpp_world),
parallel_state.get_data_parallel_rank(with_context_parallel=True),
)
- sharded_state_dict[output_layer_weight_key] = make_sharded_tensor_for_checkpoint(
+ sharded_state_dict[embedder_weight_key] = make_sharded_tensor_for_checkpoint(
tensor=tensor,
- key=first_stage_word_emb_key,
- replica_id=last_stage_word_emb_replica_id,
+ key=embedder_weight_key,
+ replica_id=replica_id,
allow_shape_mismatch=False,
)
diff --git a/nemo/collections/diffusion/models/dit_llama/dit_llama_layer_spec.py b/nemo/collections/diffusion/models/dit_llama/dit_llama_layer_spec.py
index 80bed5878e1b..305db1f2c993 100644
--- a/nemo/collections/diffusion/models/dit_llama/dit_llama_layer_spec.py
+++ b/nemo/collections/diffusion/models/dit_llama/dit_llama_layer_spec.py
@@ -13,7 +13,7 @@
# limitations under the License.
import copy
-from typing import Literal
+from typing import Literal, Optional
from megatron.core.transformer.attention import (
CrossAttention,
@@ -22,13 +22,18 @@
SelfAttentionSubmodules,
)
from megatron.core.transformer.custom_layers.transformer_engine import (
+ TEColumnParallelGroupedLinear,
TEColumnParallelLinear,
TEDotProductAttention,
+ TENorm,
+ TERowParallelGroupedLinear,
TERowParallelLinear,
)
from megatron.core.transformer.enums import AttnMaskType
from megatron.core.transformer.identity_op import IdentityOp
from megatron.core.transformer.mlp import MLP, MLPSubmodules
+from megatron.core.transformer.moe.moe_layer import MoELayer, MoESubmodules
+from megatron.core.transformer.moe.shared_experts import SharedExpertMLP
from megatron.core.transformer.spec_utils import ModuleSpec, build_module
from megatron.core.transformer.transformer_block import TransformerConfig
from megatron.core.transformer.transformer_config import TransformerConfig
@@ -78,7 +83,7 @@ def _replace_no_cp_submodules(submodules):
layer_number=layer_number,
)
- self.adaLN = AdaLN(config=self.config, n_adaln_chunks=6) # , norm=TENorm)
+ self.adaLN = AdaLN(config=self.config, n_adaln_chunks=6, norm=TENorm)
def forward(
self,
@@ -138,8 +143,57 @@ def forward(
return output, context
-def get_dit_llama_spec() -> ModuleSpec:
- params = {"attn_mask_type": AttnMaskType.padding}
+def _get_mlp_module_spec(
+ use_te: Optional[bool] = True,
+ num_experts: Optional[int] = None,
+ moe_grouped_gemm: Optional[bool] = False,
+ fp8: Optional[str] = None,
+) -> ModuleSpec:
+ """Helper function to get module spec for MLP/MoE"""
+ if num_experts is None:
+ # Dense MLP w/ or w/o TE modules.
+ return ModuleSpec(
+ module=MLP,
+ submodules=MLPSubmodules(
+ linear_fc1=TEColumnParallelLinear,
+ linear_fc2=TERowParallelLinear,
+ ),
+ )
+ else:
+ # Mixture of experts with modules in megatron core.
+ if use_te and moe_grouped_gemm:
+ linear_fc1 = TEColumnParallelGroupedLinear
+ linear_fc2 = TERowParallelGroupedLinear
+ elif use_te and fp8:
+ linear_fc1 = TEColumnParallelLinear
+ linear_fc2 = TERowParallelLinear
+ else:
+ raise ValueError("Invalid combination of use_te and moe_grouped_gemm")
+
+ use_te_grouped_gemm = use_te and TEColumnParallelGroupedLinear is not None
+
+ return ModuleSpec(
+ module=MoELayer,
+ submodules=MoESubmodules(
+ experts=(
+ MLPSubmodules(linear_fc1=linear_fc1, linear_fc2=linear_fc2)
+ if not moe_grouped_gemm or use_te_grouped_gemm
+ else None
+ ),
+ shared_experts=ModuleSpec(
+ module=SharedExpertMLP,
+ params={"gate": False},
+ submodules=MLPSubmodules(
+ linear_fc1=TEColumnParallelLinear,
+ linear_fc2=TERowParallelLinear,
+ ),
+ ),
+ ),
+ )
+
+
+def get_dit_llama_spec(num_experts=None, attn_mask_type=AttnMaskType.padding) -> ModuleSpec:
+ params = {"attn_mask_type": attn_mask_type}
return ModuleSpec(
module=MoviegGenLayer,
submodules=TransformerLayerSubmodules(
@@ -162,12 +216,6 @@ def get_dit_llama_spec() -> ModuleSpec:
linear_proj=TERowParallelLinear,
),
),
- mlp=ModuleSpec(
- module=MLP,
- submodules=MLPSubmodules(
- linear_fc1=TEColumnParallelLinear,
- linear_fc2=TERowParallelLinear,
- ),
- ),
+ mlp=_get_mlp_module_spec(use_te=True, num_experts=num_experts, moe_grouped_gemm=True, fp8=None),
),
)
diff --git a/nemo/collections/diffusion/models/dit_llama/dit_llama_model.py b/nemo/collections/diffusion/models/dit_llama/dit_llama_model.py
index bfa79e366cac..8ec0c7097c63 100644
--- a/nemo/collections/diffusion/models/dit_llama/dit_llama_model.py
+++ b/nemo/collections/diffusion/models/dit_llama/dit_llama_model.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
+from functools import partial
from typing import Literal
from megatron.core.transformer.transformer_config import TransformerConfig
@@ -54,7 +54,9 @@ def __init__(
patch_temporal=patch_temporal,
in_channels=in_channels,
out_channels=out_channels,
- transformer_decoder_layer_spec=get_dit_llama_spec,
+ transformer_decoder_layer_spec=partial(
+ get_dit_llama_spec, num_experts=config.num_moe_experts, attn_mask_type=config.attn_mask_type
+ ),
pos_embedder=dit_embeddings.FactorizedLearnable3DEmbedding,
**kwargs,
)
diff --git a/nemo/collections/diffusion/models/model.py b/nemo/collections/diffusion/models/model.py
index 8cc6be860585..9ee0ab441700 100644
--- a/nemo/collections/diffusion/models/model.py
+++ b/nemo/collections/diffusion/models/model.py
@@ -14,7 +14,7 @@
import importlib
import warnings
-from dataclasses import dataclass, field
+from dataclasses import dataclass
from typing import Any, Callable, Dict, Optional, Tuple
import numpy as np
@@ -24,6 +24,7 @@
from einops import rearrange
from megatron.core import parallel_state
from megatron.core.packed_seq_params import PackedSeqParams
+from megatron.core.transformer.enums import AttnMaskType
from megatron.core.transformer.transformer_config import TransformerConfig
from torch import nn
from typing_extensions import override
@@ -39,10 +40,12 @@
def dit_forward_step(model, batch) -> torch.Tensor:
+ """Forward pass of DiT."""
return model(**batch)
def dit_data_step(module, dataloader_iter):
+ """DiT data batch preparation."""
batch = next(dataloader_iter)[0]
batch = get_batch_on_this_cp_rank(batch)
batch = {k: v.to(device='cuda', non_blocking=True) if torch.is_tensor(v) else v for k, v in batch.items()}
@@ -58,12 +61,12 @@ def dit_data_step(module, dataloader_iter):
'self_attention': PackedSeqParams(
cu_seqlens_q=cu_seqlens,
cu_seqlens_kv=cu_seqlens,
- qkv_format='sbhd',
+ qkv_format=module.qkv_format,
),
'cross_attention': PackedSeqParams(
cu_seqlens_q=cu_seqlens,
cu_seqlens_kv=cu_seqlens_kv,
- qkv_format='sbhd',
+ qkv_format=module.qkv_format,
),
}
@@ -77,9 +80,7 @@ def get_batch_on_this_cp_rank(data: Dict):
cp_size = mpu.get_context_parallel_world_size()
cp_rank = mpu.get_context_parallel_rank()
- t = 16
if cp_size > 1:
- assert t % cp_size == 0, "t must divisibly by cp_size"
num_valid_tokens_in_ub = None
if 'loss_mask' in data and data['loss_mask'] is not None:
num_valid_tokens_in_ub = data['loss_mask'].sum()
@@ -88,9 +89,13 @@ def get_batch_on_this_cp_rank(data: Dict):
if (value is not None) and (key in ['video', 'video_latent', 'noise_latent', 'pos_ids']):
if len(value.shape) > 5:
value = value.squeeze(0)
- B, C, T, H, W = value.shape
+ if len(value.shape) == 5:
+ B, C, T, H, W = value.shape
+ data[key] = value.view(B, C, cp_size, T // cp_size, H, W)[:, :, cp_rank, ...].contiguous()
+ else:
+ B, S, D = value.shape
+ data[key] = value.view(B, cp_size, S // cp_size, D)[:, cp_rank, ...].contiguous()
# TODO: sequence packing
- data[key] = value.view(B, C, cp_size, T // cp_size, H, W)[:, :, cp_rank, ...].contiguous()
loss_mask = data["loss_mask"]
data["loss_mask"] = loss_mask.view(loss_mask.shape[0], cp_size, loss_mask.shape[1] // cp_size)[
:, cp_rank, ...
@@ -142,8 +147,16 @@ class DiTConfig(TransformerConfig, io.IOMixin):
data_step_fn = dit_data_step
forward_step_fn = dit_forward_step
+ replicated_t_embedder = True
+
+ seq_length: int = 2048
+
+ qkv_format: str = 'sbhd'
+ attn_mask_type: AttnMaskType = AttnMaskType.no_mask
+
@override
def configure_model(self, tokenizer=None) -> DiTCrossAttentionModel:
+ """Configure DiT Model from MCore."""
vp_size = self.virtual_pipeline_model_parallel_size
if vp_size:
p_size = self.pipeline_model_parallel_size
@@ -168,11 +181,14 @@ def configure_model(self, tokenizer=None) -> DiTCrossAttentionModel:
)
def configure_vae(self):
+ """Dynamically import video tokenizer."""
return dynamic_import(self.vae_module)(self.vae_path)
@dataclass
class DiTBConfig(DiTConfig):
+ """DiT-B"""
+
num_layers: int = 12
hidden_size: int = 768
num_attention_heads: int = 12
@@ -180,6 +196,8 @@ class DiTBConfig(DiTConfig):
@dataclass
class DiTLConfig(DiTConfig):
+ """DiT-L"""
+
num_layers: int = 24
hidden_size: int = 1024
num_attention_heads: int = 16
@@ -187,6 +205,8 @@ class DiTLConfig(DiTConfig):
@dataclass
class DiTXLConfig(DiTConfig):
+ """DiT-XL"""
+
num_layers: int = 28
hidden_size: int = 1152
num_attention_heads: int = 16
@@ -194,6 +214,8 @@ class DiTXLConfig(DiTConfig):
@dataclass
class DiT7BConfig(DiTConfig):
+ """DiT-7B"""
+
num_layers: int = 32
hidden_size: int = 3072
num_attention_heads: int = 24
@@ -201,6 +223,8 @@ class DiT7BConfig(DiTConfig):
@dataclass
class DiTLlama30BConfig(DiTConfig):
+ """MovieGen 30B"""
+
num_layers: int = 48
hidden_size: int = 6144
ffn_hidden_size: int = 16384
@@ -228,13 +252,42 @@ class DiTLlama30BConfig(DiTConfig):
@dataclass
class DiTLlama5BConfig(DiTLlama30BConfig):
+ """MovieGen 5B"""
+
num_layers: int = 32
hidden_size: int = 3072
ffn_hidden_size: int = 8192
num_attention_heads: int = 24
+@dataclass
+class DiTLlama1BConfig(DiTLlama30BConfig):
+ """MovieGen 1B"""
+
+ num_layers: int = 16
+ hidden_size: int = 2048
+ ffn_hidden_size: int = 8192
+ num_attention_heads: int = 32
+
+
+@dataclass
+class ECDiTLlama1BConfig(DiTLlama1BConfig):
+ "EC-DiT 1B"
+ moe_router_load_balancing_type: str = 'expert_choice'
+ moe_token_dispatcher_type: str = 'alltoall'
+ moe_grouped_gemm: bool = True
+ moe_expert_capacity_factor: float = 8
+ moe_pad_expert_input_to_capacity: bool = True
+ moe_router_topk: int = 1
+ num_moe_experts: int = 64
+ ffn_hidden_size: int = 1024
+
+
class DiTModel(GPTModel):
+ """
+ Diffusion Transformer Model
+ """
+
def __init__(
self,
config: Optional[DiTConfig] = None,
@@ -256,6 +309,9 @@ def __init__(
self.vae = None
+ def load_state_dict(self, state_dict, strict=False):
+ self.module.load_state_dict(state_dict, strict=False)
+
def data_step(self, dataloader_iter) -> Dict[str, Any]:
return self.config.data_step_fn(dataloader_iter)
@@ -284,10 +340,12 @@ def on_validation_start(self):
self.vae.to('cuda')
def on_validation_end(self):
+ """Move video tokenizer to CPU after validation."""
if self.vae is not None:
self.vae.to('cpu')
def validation_step(self, batch, batch_idx=None) -> torch.Tensor:
+ """Generated validation video sample and logs to wandb."""
# In mcore the loss-function is part of the forward-pass (when labels are provided)
state_shape = batch['video'].shape
sample = self.diffusion_pipeline.generate_samples_from_batch(
@@ -304,7 +362,7 @@ def validation_step(self, batch, batch_idx=None) -> torch.Tensor:
seq_len_q = batch['seq_len_q'][0]
sample = rearrange(
- sample[:, :seq_len_q],
+ sample[0, None, :seq_len_q],
'B (T H W) (ph pw pt C) -> B C (T pt) (H ph) (W pw)',
ph=self.config.patch_spatial,
pw=self.config.patch_spatial,
@@ -318,13 +376,7 @@ def validation_step(self, batch, batch_idx=None) -> torch.Tensor:
video = (video * 255).to(torch.uint8).cpu().numpy().astype(np.uint8)
- T = video.shape[2]
- if T == 1:
- image = rearrange(video, 'b c t h w -> (b t h) w c')
- result = image
- else:
- # result = wandb.Video(video, fps=float(batch['fps'])) # (batch, time, channel, height width)
- result = video
+ result = rearrange(video, 'b c t h w -> (b t) c h w')
# wandb is on the last rank for megatron, first rank for nemo
wandb_rank = 0
@@ -340,11 +392,12 @@ def validation_step(self, batch, batch_idx=None) -> torch.Tensor:
if gather_list is not None:
videos = []
for video in gather_list:
- if len(video.shape) == 3:
- videos.append(wandb.Image(video))
- else:
- videos.append(wandb.Video(video, fps=30))
- wandb.log({'prediction': videos}, step=self.global_step)
+ try:
+ videos.append(wandb.Video(video, fps=24, format='mp4'))
+ except Exception as e:
+ warnings.warn(f'Error saving video as mp4: {e}')
+ videos.append(wandb.Video(video, fps=24))
+ wandb.log({'prediction': videos})
return None
@@ -375,6 +428,10 @@ def on_validation_model_zero_grad(self) -> None:
class DummyLossReduction(MegatronLossReduction):
+ """
+ Diffusion Loss Reduction
+ """
+
def __init__(self, validation_step: bool = False, val_drop_last: bool = True) -> None:
super().__init__()
self.validation_step = validation_step
diff --git a/nemo/collections/diffusion/readme.rst b/nemo/collections/diffusion/readme.rst
index 871527948708..38df88c13955 100644
--- a/nemo/collections/diffusion/readme.rst
+++ b/nemo/collections/diffusion/readme.rst
@@ -11,45 +11,98 @@ Some of the features we currently support include:
- Energon Dataloader for Webscale Dataloading
- Model and Data Parallelism
-- Model Architectures: DiT 30B parameters or even more
+- Model Architectures: Original DiT, MovieGen 30B+ parameters, Spatio-Temporal DiT
-Features Status
+Performance
---------------
-We support image diffusion training. Video training incoming.
+We benchmarked 7B and 28B DiT cross attention models with context length 8k and 64k on 32 H100 DGX nodes.
+
+- 8k context length corresponds to latent of 256 frames of 256px video.
+- 64k context length corresponds to latent of 256 frames 1024px video.
+
+.. list-table::
+ :header-rows: 1
+ :widths: 15 15 25 15 15
+
+ * - Model size
+ - Context length
+ - Training config
+ - GPU util. (TFLOPS/s)
+ - Throughput (token/s/GPU)
+ * - DiT 7B
+ - 8k
+ - baseline, no optimization
+ - OOM
+ -
+ * - DiT 7B
+ - 8k
+ - TP=2 SP
+ - 519
+ - 10052
+ * - DiT 7B
+ - 74k
+ - TP=2 SP CP=4
+ - 439
+ - 3409
+ * - DiT 28B
+ - 8k
+ - TP4 PP4
+ - 468
+ - 2510
+ * - DiT 28B
+ - 64k
+ - FSDP act ckpt
+ - 445
+ - 1386
+**Legend:**
+- **FSDP**: Fully Sharded Data Parallelism
+- **CP**: Context Parallelism
+- **TP**: Tensor Parallelism
+- **SP**: Sequence Parallelism
+- **PP**: Pipeline Parallelism
+- **EP**: Expert Parallelism
+- **distop**: mcore distributed optmizer
+- **act ckpt**: activation checkpointing
+
+Features Status:
+^^^^^^^^^^^^^^^
+
+We support image/video diffusion training with all parallelism strategies.
+---------------------------+------------------+
| Parallelism | Status |
+===========================+==================+
| FSDP | β
Supported |
+---------------------------+------------------+
+| CP+TP+SP+FSDP | β
Supported |
++---------------------------+------------------+
| CP+TP+SP+distopt | β
Supported |
+---------------------------+------------------+
| CP+TP+SP+PP+distopt | β
Supported |
+---------------------------+------------------+
-| CP+TP+SP+FSDP | π Coming Soon |
+| CP+TP+SP+PP+distopt+EP | β
Supported |
++---------------------------+------------------+
+| CP+TP+SP+FSDP+EP | β
Supported |
+---------------------------+------------------+
-**Legend:**
-- **FSDP**: Fully Sharded Data Parallelism
-- **CP**: Context Parallelism
-- **TP**: Tensor Parallelism
-- **SP**: Sequence Parallelism
-- **PP**: Pipeline Parallelism
-- **distop**: mcore distributed optmizer
-+--------------+-------------------+-----------------+
-| Model Size | Modality | Status |
-+==============+===================+=================+
-| DiT 30B+ | 256px image | β
Supported |
-+--------------+-------------------+-----------------+
-| DiT 30B+ | 256px image+video | π Coming Soon |
-+--------------+-------------------+-----------------+
-| DiT 30B+ | 768px image+video | π Coming Soon |
-+--------------+-------------------+-----------------+
+Training Stages:
+^^^^^^^^^^^^^^^^
+
++---------------+----------------------+-----------------+-----------------+
+| Model Size | Modality | sequence length | Status |
++===============+======================+=================+=================+
+| DiT 5B, 30B+ | 256px image | 256 | β
Supported |
++---------------+----------------------+-----------------+-----------------+
+| DiT 5B, 30B+ | 256px image+video | 8k | β
Supported |
++---------------+----------------------+-----------------+-----------------+
+| DiT 5B, 30B+ | 768px image+video | 74k+ | β
Supported |
++---------------+----------------------+-----------------+-----------------+
+
Energon Dataloader for Webscale Dataloading
@@ -58,7 +111,7 @@ Energon Dataloader for Webscale Dataloading
Webscale Dataloading
^^^^^^^^^^^^^^^^^^^^
-Megatron-Energon is an optimized multi-modal dataloader for large-scale deep learning with Megatron. Energon allows for distributed loading of large training training data for multi-modal model training. Energon allows for blending many datasets together and distributing the dataloading workflow across multiple cluster nodes/processes while ensuring reproducibility and resumability.
+Megatron-Energon is an optimized multi-modal dataloader for large-scale deep learning with Megatron. Energon allows for distributed loading of large training training data for multi-modal model training. Energon allows for blending many datasets together and distributing the dataloading workflow across multiple cluster nodes/processes while ensuring reproducibility and resumability. You can learn more about how to prepare your own image / video webdataset for diffusion training `here `_.
Dataloader Checkpointing
^^^^^^^^^^^^^^^^^^^^^^^^
@@ -71,7 +124,7 @@ Parallel Configuration
Energon's architecture allows it to efficiently distribute data across multiple processing units, ensuring that each GPU or node receives a balanced workload. This parallelization not only increases the overall throughput of data processing but also helps in maintaining high utilization of available computational resources.
-Mixed Image-Video Training (comming soon)
+Mixed Image-Video Training
------------------------------
Our dataloader provides support for mixed image-video training by using the NeMo packed sequence feature to pack together images and videos of varying length into the same microbatch. The sequence packing mechanism uses the THD attention kernel, which allows us to increase the model FLOPs utilization (MFU) and efficiently process data with varying length.
@@ -79,7 +132,7 @@ Our dataloader provides support for mixed image-video training by using the NeMo
.. image:: assets/mixed_training.png
:alt: Mixed image-video dataloading strategy
- :width: 300px
+ :scale: 50%
:align: center
Model and Data Parallelism
@@ -88,7 +141,7 @@ NeMo provides support for training models using tensor parallelism, sequence par
.. image:: assets/pipeline_conditioning.png
:alt: Conditioning mechanism for pipeline parallelism
- :width: 300px
+ :scale: 50%
:align: center
Model Architectures
@@ -96,79 +149,97 @@ Model Architectures
DiT
^^^
-We implement an efficient version of the diffusion transformer (DiT) [1]_. Our DiT is slightly modified from the original paper as we use cross attention and adaptive layernorm together in the same architecture. We also use a QK-layernorm for training stability. Our framework allows for customizing the DiT architecture while maintaining its scalability, enabling training large DiT models on long sequence lengths.
-
+We implement an efficient version of the Diffusion Transformer (DiT) [1]_ with several variants to provide users with flexibility in exploring various model architectures.
+The current supported architectures include:
-Data preparation
---------------------------
+- DiT adaLN-Zero (original DiT) [1]_
+- DiT adaLN-Zero with Cross attention
+- MovieGen [2]_
+- Spatio-Temporal DiT (ST-DiT)
-We expect data to be in this webdataset format. For more information about webdataset and energon dataset, please refer to https://github.com/NVIDIA/Megatron-Energon
+In the architectures using DiT adaLN-Zero, we also use a QK-layernorm for training stability for video diffusion training. We also provide an option to use cross-attention with additional conditioning information (i.e. text embeddings) for text-to-video training with the original DiT formulation.
-Here we demonstrate a step by step example of how to prepare a dummy image dataset.
+We also support MovieGen [2]_ training with a Llama-based model architecture that leverages FSDP for large model training (i.e. 30B+ parameters).
-.. code-block:: bash
- torchrun --nproc-per-node 2 nemo/collections/diffusion/data/prepare_energon_dataset.py --factory prepare_dummy_image_dataset
+Our framework allows for customizing the DiT architecture while maintaining its scalability, enabling training large DiT models on long sequence lengths. We provide functionality for ST-DiT, which utilizes spatial self attention and temporal self attention blocks operating on the spatial and temporal sequence dimensions, respectively.
+There are various challenges that emerge with specialized architectures. In the case of ST-DiT, one major challenge is that the spatial and temporal context lengths are much smaller than full input sequence length. This leads to a large communication cost when using CP for a small amount of computation. The P2P communication in context parallel is exposed and leads to longer training step times. For efficient training of ST-DiT, we propose a novel hybrid parallelism strategy, which leverages A2A communication and local attention computation for spatial and temporal self attention while using P2P communications with context parallelism in a ring topology. This approach reduces the bandwidth requirement by factor of hw/cp for temporal attention and t/cp for spatial attention while enjoying the benefits of context parallelism to split the workload of computing full self attention.
-this will generate a folder a tar files. .pth contains image/video latent representations encode by image/video tokenizer, .json contains metadata including text caption, resolution, aspection ratio, and .pickle contains text embeddings encoded by language model like T5.
-
-.. code-block:: bash
-
- shard_000.tar
- βββ samples/sample_0000.pth
- βββ samples/sample_0000.pickle
- βββ samples/sample_0000.json
- βββ samples/sample_0001.pth
- βββ samples/sample_0001.pickle
- βββ samples/sample_0001.json
- βββ ...
- shard_001.tar
-
-The following is a sample command to prepare prepare webdataset into energon dataset:
-
-.. code-block:: bash
+.. image:: assets/st_dit_hybrid_parallel.png
+ :alt: Hybrid Parallelism for ST-DiT
+ :scale: 50%
+ :align: center
- # energon prepare . --num-workers 192
- Found 369057 tar files in total. The first and last ones are:
- - 0.tar
- - 99999.tar
- If you want to exclude some of them, cancel with ctrl+c and specify an exclude filter in the command line.
- Please enter a desired train/val/test split like "0.5, 0.2, 0.3" or "8,1,1": 1,0,0
- Indexing shards [####################################] 369057/369057
- Sample 0, keys:
- - .json
- - .pickle
- - .pth
- Sample 1, keys:
- - .json
- - .pickle
- - .pth
- Found the following part types in the dataset: .json, .pth, .pickle
- Do you want to create a dataset.yaml interactively? [Y/n]: Y
- The following dataset classes are available:
- 0. CaptioningWebdataset
- 1. CrudeWebdataset
- 2. ImageClassificationWebdataset
- 3. ImageWebdataset
- 4. InterleavedWebdataset
- 5. MultiChoiceVQAWebdataset
- 6. OCRWebdataset
- 7. SimilarityInterleavedWebdataset
- 8. TextWebdataset
- 9. VQAOCRWebdataset
- 10. VQAWebdataset
- 11. VidQAWebdataset
- Please enter a number to choose a class: 1
- The dataset you selected uses the following sample type:
-
- class CrudeSample(dict):
- """Generic sample type to be processed later."""
-
- CrudeWebdataset does not need a field map. You will need to provide a `Cooker` for your dataset samples in your `TaskEncoder`.
- Furthermore, you might want to add `subflavors` in your meta dataset specification.
-
-training
+.. Data preparation
+.. --------------------------
+
+.. We expect data to be in this webdataset format. For more information about webdataset and energon dataset, please refer to https://github.com/NVIDIA/Megatron-Energon
+
+.. Here we demonstrate a step by step example of how to prepare a dummy image dataset.
+
+.. .. code-block:: bash
+
+.. torchrun --nproc-per-node 2 nemo/collections/diffusion/data/prepare_energon_dataset.py --factory prepare_dummy_image_dataset
+
+.. this will generate a folder a tar files. .pth contains image/video latent representations encode by image/video tokenizer, .json contains metadata including text caption, resolution, aspection ratio, and .pickle contains text embeddings encoded by language model like T5.
+
+.. .. code-block:: bash
+
+.. shard_000.tar
+.. βββ samples/sample_0000.pth
+.. βββ samples/sample_0000.pickle
+.. βββ samples/sample_0000.json
+.. βββ samples/sample_0001.pth
+.. βββ samples/sample_0001.pickle
+.. βββ samples/sample_0001.json
+.. βββ ...
+.. shard_001.tar
+
+.. The following is a sample command to prepare prepare webdataset into energon dataset:
+
+.. .. code-block:: bash
+
+.. # energon prepare . --num-workers 192
+.. Found 369057 tar files in total. The first and last ones are:
+.. - 0.tar
+.. - 99999.tar
+.. If you want to exclude some of them, cancel with ctrl+c and specify an exclude filter in the command line.
+.. Please enter a desired train/val/test split like "0.5, 0.2, 0.3" or "8,1,1": 1,0,0
+.. Indexing shards [####################################] 369057/369057
+.. Sample 0, keys:
+.. - .json
+.. - .pickle
+.. - .pth
+.. Sample 1, keys:
+.. - .json
+.. - .pickle
+.. - .pth
+.. Found the following part types in the dataset: .json, .pth, .pickle
+.. Do you want to create a dataset.yaml interactively? [Y/n]: Y
+.. The following dataset classes are available:
+.. 0. CaptioningWebdataset
+.. 1. CrudeWebdataset
+.. 2. ImageClassificationWebdataset
+.. 3. ImageWebdataset
+.. 4. InterleavedWebdataset
+.. 5. MultiChoiceVQAWebdataset
+.. 6. OCRWebdataset
+.. 7. SimilarityInterleavedWebdataset
+.. 8. TextWebdataset
+.. 9. VQAOCRWebdataset
+.. 10. VQAWebdataset
+.. 11. VidQAWebdataset
+.. Please enter a number to choose a class: 1
+.. The dataset you selected uses the following sample type:
+
+.. class CrudeSample(dict):
+.. """Generic sample type to be processed later."""
+
+.. CrudeWebdataset does not need a field map. You will need to provide a `Cooker` for your dataset samples in your `TaskEncoder`.
+.. Furthermore, you might want to add `subflavors` in your meta dataset specification.
+
+Model Training
--------------------------
To launch training on one node
@@ -187,4 +258,5 @@ To launch training on multiple nodes using Slurm
Citations
---------
-.. [1] William Peebles and Saining Xie, "Scalable Diffusion Models with Transformers," *arXiv preprint arXiv:2212.09748*, 2022.
\ No newline at end of file
+.. [1] William Peebles and Saining Xie, "Scalable Diffusion Models with Transformers," *arXiv preprint arXiv:2212.09748*, 2022.
+.. [2] The Movie Gen team @ Meta, "Movie Gen: A Cast of Media Foundation Models", *arXiv preprint arXiv:2410.13720*, 2024.
\ No newline at end of file
diff --git a/nemo/collections/diffusion/sampler/edm/edm_pipeline.py b/nemo/collections/diffusion/sampler/edm/edm_pipeline.py
index 6e1be1f6f2a6..16d3177088a9 100644
--- a/nemo/collections/diffusion/sampler/edm/edm_pipeline.py
+++ b/nemo/collections/diffusion/sampler/edm/edm_pipeline.py
@@ -427,8 +427,13 @@ def get_data_and_condition(self, data_batch: dict[str, Tensor], dropout_rate=0.2
latent_state = raw_state
# Condition
- data_batch['crossattn_emb'] = self.random_dropout_input(
+ condition = {} # Create a new dictionary for condition
+ # Copy all keys from data_batch except 'video'
+ for key, value in data_batch.items():
+ if key not in ['video', 't5_text_embeddings']:
+ condition[key] = value
+ condition['crossattn_emb'] = self.random_dropout_input(
data_batch['t5_text_embeddings'], dropout_rate=dropout_rate
)
- return raw_state, latent_state, data_batch
+ return raw_state, latent_state, condition
diff --git a/nemo/collections/diffusion/train.py b/nemo/collections/diffusion/train.py
index 43a0a5dcb536..404602084b85 100644
--- a/nemo/collections/diffusion/train.py
+++ b/nemo/collections/diffusion/train.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,34 +14,43 @@
import os
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.loggers import WandbLogger
from megatron.core.distributed import DistributedDataParallelConfig
from megatron.core.optimizer import OptimizerConfig
-from pytorch_lightning.loggers import WandbLogger
+from megatron.core.transformer.enums import AttnMaskType
from nemo import lightning as nl
from nemo.collections import llm
from nemo.collections.diffusion.data.diffusion_energon_datamodule import DiffusionDataModule
+from nemo.collections.diffusion.data.diffusion_fake_datamodule import VideoLatentFakeDataModule
from nemo.collections.diffusion.data.diffusion_taskencoder import BasicDiffusionTaskEncoder
from nemo.collections.diffusion.models.model import (
DiT7BConfig,
DiTConfig,
DiTLConfig,
+ DiTLlama1BConfig,
DiTLlama5BConfig,
DiTLlama30BConfig,
DiTModel,
DiTXLConfig,
+ ECDiTLlama1BConfig,
)
+from nemo.collections.multimodal.data.energon.base import SimpleMultiModalDataModule
from nemo.lightning.pytorch.callbacks import ModelCheckpoint, PreemptionCallback
+from nemo.lightning.pytorch.callbacks.megatron_comm_overlap import MegatronCommOverlapCallback
from nemo.lightning.pytorch.callbacks.model_transform import ModelTransform
+from nemo.lightning.pytorch.callbacks.nsys import NsysCallback
from nemo.lightning.pytorch.strategies.utils import RestoreConfig
+from nemo.utils.exp_manager import TimingCallback
@run.cli.factory
@run.autoconvert
def multimodal_datamodule() -> pl.LightningDataModule:
+ """Multimodal Datamodule Initialization"""
data_module = DiffusionDataModule(
seq_length=2048,
task_encoder=run.Config(BasicDiffusionTaskEncoder, seq_length=2048),
@@ -51,9 +60,39 @@ def multimodal_datamodule() -> pl.LightningDataModule:
return data_module
+@run.cli.factory
+@run.autoconvert
+def simple_datamodule() -> pl.LightningDataModule:
+ """Simple Datamodule Initialization"""
+ data_module = SimpleMultiModalDataModule(
+ seq_length=2048,
+ micro_batch_size=1,
+ global_batch_size=32,
+ num_workers=16,
+ tokenizer=None,
+ image_processor=None,
+ task_encoder=run.Config(BasicDiffusionTaskEncoder, seq_length=2048),
+ )
+ return data_module
+
+
+@run.cli.factory
+@run.autoconvert
+def multimodal_fake_datamodule() -> pl.LightningDataModule:
+ """Multimodal Mock Datamodule Initialization"""
+ data_module = VideoLatentFakeDataModule(
+ seq_length=None, # Set None to dectect the sequence length automatically.
+ task_encoder=run.Config(BasicDiffusionTaskEncoder, seq_length=2048),
+ micro_batch_size=1,
+ global_batch_size=32,
+ )
+ return data_module
+
+
@run.cli.factory
@run.autoconvert
def peft(args) -> ModelTransform:
+ """Parameter Efficient Fine Tuning"""
return llm.peft.LoRA(
target_modules=['linear_qkv', 'linear_proj'], # , 'linear_fc1', 'linear_fc2'],
dim=args.lora_dim,
@@ -62,6 +101,7 @@ def peft(args) -> ModelTransform:
@run.cli.factory(target=llm.train)
def pretrain() -> run.Partial:
+ """Base Pretraining Config"""
return run.Partial(
llm.train,
model=run.Config(
@@ -85,6 +125,8 @@ def pretrain() -> run.Partial:
DistributedDataParallelConfig,
check_for_nan_in_grad=True,
grad_reduce_in_fp32=True,
+ overlap_grad_reduce=True,
+ overlap_param_gather=True,
),
),
plugins=nl.MegatronMixedPrecision(precision="bf16-mixed"),
@@ -96,12 +138,18 @@ def pretrain() -> run.Partial:
callbacks=[
run.Config(
ModelCheckpoint,
- monitor='reduced_train_loss',
- filename='{epoch}-{step}',
+ monitor='global_step',
+ filename='{global_step}',
every_n_train_steps=1000,
- save_top_k=-1,
+ save_top_k=3,
+ mode='max',
),
run.Config(PreemptionCallback),
+ run.Config(TimingCallback),
+ run.Config(
+ MegatronCommOverlapCallback,
+ tp_comm_overlap=False,
+ ),
],
),
log=nl.NeMoLogger(wandb=(WandbLogger() if "WANDB_API_KEY" in os.environ else None)),
@@ -129,6 +177,7 @@ def pretrain() -> run.Partial:
@run.cli.factory(target=llm.train)
def pretrain_xl() -> run.Partial:
+ """DiT-XL Pretraining Recipe"""
recipe = pretrain()
recipe.model.config = run.Config(DiTXLConfig)
return recipe
@@ -136,13 +185,89 @@ def pretrain_xl() -> run.Partial:
@run.cli.factory(target=llm.train)
def pretrain_l() -> run.Partial:
+ """DiT-L Pretraining Recipe"""
recipe = pretrain()
recipe.model.config = run.Config(DiTLConfig)
return recipe
+@run.cli.factory(target=llm.train)
+def train_mock() -> run.Partial:
+ """DiT Mock Pretraining Recipe"""
+ recipe = pretrain()
+ recipe.model.config = run.Config(DiTLlama5BConfig, max_frames=1)
+ recipe.data = multimodal_fake_datamodule()
+ recipe.model.config.num_layers = 16
+ recipe.data.seq_length = 73728
+ recipe.data.task_encoder.seq_length = 73728
+ recipe.trainer.strategy.tensor_model_parallel_size = 4
+ recipe.trainer.strategy.sequence_parallel = True
+ recipe.trainer.strategy.context_parallel_size = 2
+ recipe.data.micro_batch_size = 1
+ recipe.data.global_batch_size = 1
+ recipe.trainer.limit_val_batches = 0
+ recipe.trainer.val_check_interval = 1.0
+ recipe.data.model_config = recipe.model.config
+ recipe.log.log_dir = 'nemo_experiments/train_mock'
+
+ recipe.trainer.strategy.ddp.with_megatron_fsdp_code_path = True
+ recipe.trainer.strategy.ddp.data_parallel_sharding_strategy = 'MODEL_AND_OPTIMIZER_STATES'
+ recipe.trainer.strategy.ddp.overlap_param_gather = True
+ recipe.trainer.strategy.ddp.overlap_grad_reduce = True
+ recipe.model.config.use_cpu_initialization = True
+
+ return recipe
+
+
+@run.cli.factory(target=llm.train)
+def mock_ditllama5b_8k() -> run.Partial:
+ recipe = pretrain()
+ recipe.model.config = run.Config(DiTLlama5BConfig, max_frames=1)
+ recipe.data = multimodal_fake_datamodule()
+ recipe.data.seq_length = recipe.data.task_encoder.seq_length = 8192
+ recipe.trainer.strategy.tensor_model_parallel_size = 2
+ recipe.trainer.strategy.sequence_parallel = True
+ recipe.trainer.strategy.context_parallel_size = 1
+ recipe.data.micro_batch_size = 1
+ recipe.data.global_batch_size = 32
+ recipe.trainer.limit_val_batches = 0
+ recipe.trainer.val_check_interval = 1.0
+ recipe.data.model_config = recipe.model.config
+ recipe.log.log_dir = 'nemo_experiments/mock_ditllama5b_8k'
+ recipe.model.config.attn_mask_type = AttnMaskType.no_mask
+ recipe.trainer.strategy.ddp.with_megatron_fsdp_code_path = True
+ recipe.trainer.strategy.ddp.data_parallel_sharding_strategy = 'MODEL_AND_OPTIMIZER_STATES'
+ recipe.trainer.strategy.ddp.overlap_param_gather = True
+ recipe.trainer.strategy.ddp.overlap_grad_reduce = True
+ recipe.model.config.use_cpu_initialization = True
+ recipe.trainer.max_steps = 15
+ recipe.trainer.callbacks.pop(0)
+ recipe.trainer.enable_checkpointing = False
+ recipe.trainer.callbacks.append(
+ run.Config(
+ NsysCallback,
+ start_step=10,
+ end_step=11,
+ )
+ )
+ recipe.resume = None
+ return recipe
+
+
+@run.cli.factory(target=llm.train)
+def mock_dit7b_8k() -> run.Partial:
+ recipe = mock_ditllama5b_8k()
+ recipe.model.config = run.Config(DiT7BConfig, max_frames=1)
+ recipe.data.model_config = recipe.model.config
+ recipe.model.config.attn_mask_type = AttnMaskType.no_mask
+ recipe.model.config.use_cpu_initialization = True
+ recipe.log.log_dir = 'nemo_experiments/mock_dit7b_8k'
+ return recipe
+
+
@run.cli.factory(target=llm.train)
def pretrain_7b() -> run.Partial:
+ """DiT-7B Pretraining Recipe"""
recipe = pretrain()
recipe.model.config = run.Config(DiT7BConfig)
recipe.data.global_batch_size = 4608
@@ -161,8 +286,59 @@ def pretrain_7b() -> run.Partial:
return recipe
+@run.cli.factory(target=llm.train)
+def pretrain_7b_pack() -> run.Partial:
+ """DiT-7B Pretraining Recipe with Packing"""
+ recipe = pretrain_7b()
+ recipe.data.global_batch_size = 4608 // 9
+ recipe.data.micro_batch_size = 1
+ recipe.data.num_workers = 15
+ recipe.data.use_train_split_for_val = True
+ recipe.data.seq_length = 256 * 9
+ recipe.data.packing_buffer_size = 1000
+ recipe.data.task_encoder.seq_length = None
+ recipe.data.task_encoder.max_seq_length = recipe.data.seq_length
+ recipe.model.config.qkv_format = 'thd'
+ return recipe
+
+
+@run.cli.factory(target=llm.train)
+def pretrain_7b_256p_joint() -> run.Partial:
+ """DiT-7B Pretraining Recipe 256p Stage 1"""
+ recipe = pretrain_7b()
+ recipe.data.global_batch_size = 256 # 768
+ recipe.data.micro_batch_size = 1
+ recipe.data.seq_length = 8192
+ recipe.data.task_encoder.seq_length = 8192
+ recipe.model.config.seq_length = 8192
+
+ recipe.optim.config.lr = 6e-5
+ recipe.trainer.strategy.tensor_model_parallel_size = 2
+ recipe.trainer.strategy.sequence_parallel = True
+ recipe.trainer.strategy.ddp.overlap_grad_reduce = True
+ # recipe.resume.restore_config = run.Config(RestoreConfig, path='', load_optim_state=True)
+ recipe.log.log_dir = 'nemo_experiments/pretrain_7b_256p_joint'
+ return recipe
+
+
+@run.cli.factory(target=llm.train)
+def pretrain_7b_256p_joint_pack() -> run.Partial:
+ """DiT-7B Pretraining Recipe 256p Stage 1 with Packing"""
+ recipe = pretrain_7b_256p_joint()
+ recipe.data.global_batch_size = 128
+ recipe.data.micro_batch_size = 1
+ recipe.data.num_workers = 10
+ recipe.data.seq_length = recipe.model.config.seq_length = recipe.data.task_encoder.max_seq_length = 10240
+ recipe.data.task_encoder.seq_length = None
+ recipe.data.packing_buffer_size = 1000
+ recipe.data.virtual_epoch_length = 0
+ recipe.model.config.qkv_format = 'thd'
+ return recipe
+
+
@run.cli.factory(target=llm.train)
def pretrain_ditllama5b() -> run.Partial:
+ """MovieGen 5B Training"""
recipe = pretrain_7b()
recipe.data.micro_batch_size = 12
recipe.model.config = run.Config(DiTLlama5BConfig)
@@ -172,30 +348,200 @@ def pretrain_ditllama5b() -> run.Partial:
@run.cli.factory(target=llm.train)
def pretrain_ditllama30b() -> run.Partial:
+ """MovieGen 30B Stage 1 Training"""
recipe = pretrain_ditllama5b()
recipe.model.config = run.Config(DiTLlama30BConfig)
recipe.data.global_batch_size = 9216
recipe.data.micro_batch_size = 6
- recipe.log.log_dir = 'nemo_experiments/ditllama30b'
+ recipe.data.task_encoder.aethetic_score = 4.0
+ recipe.data.seq_length = 256
+ recipe.data.task_encoder.seq_length = 256
+ recipe.data.virtual_epoch_length = 0
+ recipe.log.log_dir = 'nemo_experiments/ditllama30b_stage1_mock'
+ recipe.trainer.strategy.ddp.with_megatron_fsdp_code_path = True
+ recipe.trainer.strategy.ddp.data_parallel_sharding_strategy = 'MODEL_AND_OPTIMIZER_STATES'
+ recipe.trainer.strategy.ddp.overlap_param_gather = True
+ recipe.trainer.strategy.ddp.overlap_grad_reduce = True
+ recipe.model.config.use_cpu_initialization = True
+ return recipe
+
+
+@run.cli.factory(target=llm.train)
+def pretrain_ditllama30b_stage2_mock() -> run.Partial:
+ """MovieGen 30B Stage 2 Training"""
+ recipe = pretrain_ditllama5b()
+ recipe.model.config = run.Config(DiTLlama30BConfig)
+ recipe.data = multimodal_fake_datamodule()
+ recipe.data.model_config = recipe.model.config
+ recipe.data.seq_length = 8192
+ recipe.data.task_encoder.seq_length = 8192
+ recipe.data.global_batch_size = 256
+ recipe.data.micro_batch_size = 1
+ recipe.trainer.strategy.tensor_model_parallel_size = 2
+ recipe.trainer.strategy.context_parallel_size = 4
+ recipe.trainer.strategy.sequence_parallel = True
+ recipe.trainer.limit_val_batches = 0
+ recipe.trainer.val_check_interval = 1.0
+ recipe.data.model_config = recipe.model.config
+ recipe.log.log_dir = 'nemo_experiments/ditllama30b_stage2_mock'
+ recipe.trainer.strategy.ddp.with_megatron_fsdp_code_path = True
+ recipe.trainer.strategy.ddp.data_parallel_sharding_strategy = 'MODEL_AND_OPTIMIZER_STATES'
+ recipe.trainer.strategy.ddp.overlap_param_gather = True
+ recipe.trainer.strategy.ddp.overlap_grad_reduce = True
+ recipe.model.config.use_cpu_initialization = True
+ return recipe
+
+
+@run.cli.factory(target=llm.train)
+def pretrain_ditllama30b_stage3_mock() -> run.Partial:
+ """MovieGen 30B Stage 3 Training"""
+ recipe = pretrain_ditllama5b()
+ recipe.model.config = run.Config(DiTLlama30BConfig)
+ recipe.data = multimodal_fake_datamodule()
+ recipe.data.model_config = recipe.model.config
+ recipe.data.seq_length = 73728
+ recipe.data.task_encoder.seq_length = 73728
+ recipe.data.global_batch_size = 256
+ recipe.data.micro_batch_size = 1
+ recipe.trainer.strategy.tensor_model_parallel_size = 2
+ recipe.trainer.strategy.context_parallel_size = 8
+ recipe.trainer.strategy.sequence_parallel = True
+ recipe.trainer.limit_val_batches = 0
+ recipe.trainer.val_check_interval = 1.0
+ recipe.data.model_config = recipe.model.config
+ recipe.log.log_dir = 'nemo_experiments/ditllama30b_stage3_mock'
+ recipe.trainer.strategy.ddp.with_megatron_fsdp_code_path = True
+ recipe.trainer.strategy.ddp.data_parallel_sharding_strategy = 'MODEL_AND_OPTIMIZER_STATES'
+ recipe.trainer.strategy.ddp.overlap_param_gather = True
+ recipe.trainer.strategy.ddp.overlap_grad_reduce = True
+ recipe.model.config.use_cpu_initialization = True
+ return recipe
+
+
+@run.cli.factory(target=llm.train)
+def pretrain_ditllama5b_stage3_mock_with_pp() -> run.Partial:
+ """MovieGen 30B Stage 3 Training"""
+ recipe = pretrain_ditllama5b()
+ recipe.data = multimodal_fake_datamodule()
+ recipe.data.model_config = recipe.model.config
+ recipe.data.seq_length = 8192
+ recipe.data.task_encoder.seq_length = 8192
+ recipe.data.global_batch_size = 1
+ recipe.data.micro_batch_size = 1
+ recipe.trainer.strategy.tensor_model_parallel_size = 2
+ recipe.trainer.strategy.pipeline_model_parallel_size = 2
+ recipe.trainer.strategy.context_parallel_size = 2
+ recipe.trainer.strategy.sequence_parallel = True
+ recipe.trainer.limit_val_batches = 0
+ recipe.trainer.val_check_interval = 1.0
+ recipe.data.model_config = recipe.model.config
+ recipe.log.log_dir = 'nemo_experiments/ditllama30b_stage5_mock_with_pp'
+ return recipe
+
+
+@run.cli.factory(target=llm.train)
+def pretrain_ditllama30b_stage3_mock_with_pp() -> run.Partial:
+ """MovieGen 30B Stage 3 Training with Pipeline Parallelism"""
+ recipe = pretrain_ditllama5b()
+ recipe.model.config = run.Config(DiTLlama30BConfig)
+ recipe.data = multimodal_fake_datamodule()
+ recipe.data.model_config = recipe.model.config
+ recipe.data.seq_length = 73728
+ recipe.data.task_encoder.seq_length = 73728
+ recipe.data.global_batch_size = 256
+ recipe.data.micro_batch_size = 1
+ recipe.trainer.strategy.tensor_model_parallel_size = 4
+ recipe.trainer.strategy.pipeline_model_parallel_size = 4
+ recipe.trainer.strategy.context_parallel_size = 8
+ recipe.trainer.strategy.sequence_parallel = True
+ recipe.trainer.limit_val_batches = 0
+ recipe.trainer.val_check_interval = 1.0
+ recipe.data.model_config = recipe.model.config
+ recipe.log.log_dir = 'nemo_experiments/ditllama30b_stage3_mock_with_pp'
+ return recipe
+
+
+@run.cli.factory(target=llm.train)
+def pretrain_ditllama1b() -> run.Partial:
+ """MovieGen 1B Stage 1 Training"""
+ recipe = pretrain_ditllama5b()
+ recipe.model.config = run.Config(DiTLlama1BConfig)
+ recipe.data.task_encoder.aethetic_score = 4.0
+ recipe.data.seq_length = 256
+ recipe.data.task_encoder.seq_length = 256
+ recipe.model.config.seq_length = 256
+ recipe.data.global_batch_size = 1536
+ recipe.data.micro_batch_size = 96
+ recipe.trainer.strategy.ddp.overlap_grad_reduce = True
+ recipe.log.log_dir = 'nemo_experiments/ditllama1b'
+ recipe.trainer.val_check_interval = 3000
+ recipe.trainer.callbacks[0].every_n_train_steps = 3000
+ recipe.trainer.callbacks[0].monitor = 'global_step'
+ recipe.trainer.callbacks[0].save_top_k = 3
+ recipe.trainer.callbacks[0].mode = 'max'
+ return recipe
+
+
+@run.cli.factory(target=llm.train)
+def pretrain_ditllama3b() -> run.Partial:
+ """MovieGen 3B Stage 1 Training"""
+ recipe = pretrain_ditllama1b()
+ recipe.data.micro_batch_size = 48
+ recipe.model.config = run.Config(
+ DiTLlama1BConfig,
+ hidden_size=3072,
+ num_layers=28,
+ num_attention_heads=24,
+ ffn_hidden_size=8192,
+ )
+ recipe.log.log_dir = 'nemo_experiments/ditllama3b'
+
+ return recipe
+
+
+@run.cli.factory(target=llm.train)
+def pretrain_ecditllama1b() -> run.Partial:
+ """EC-DiT 1B Training"""
+ recipe = pretrain_ditllama1b()
+ recipe.data.task_encoder.aethetic_score = 5.0
+ recipe.data.micro_batch_size = 72
+ recipe.data.global_batch_size = 2304
+ recipe.model.config = run.Config(ECDiTLlama1BConfig)
+ recipe.log.log_dir = 'nemo_experiments/ecditllama1b'
+ recipe.trainer.val_check_interval = 3000
+
+ recipe.trainer.strategy.ddp.with_megatron_fsdp_code_path = True
+ recipe.trainer.strategy.ddp.data_parallel_sharding_strategy = 'MODEL_AND_OPTIMIZER_STATES'
+ recipe.trainer.strategy.ddp.overlap_param_gather = True
+ recipe.trainer.strategy.ddp.overlap_grad_reduce = True
+ recipe.model.config.use_cpu_initialization = True
+
return recipe
@run.cli.factory(target=llm.train)
def dreambooth() -> run.Partial:
+ """Dreambooth Fine Tuning"""
recipe = pretrain()
recipe.optim.config.lr = 1e-6
recipe.data = multimodal_datamodule()
recipe.model.config = run.Config(DiTConfig)
-
recipe.trainer.max_steps = 1000
recipe.trainer.strategy.tensor_model_parallel_size = 8
recipe.trainer.strategy.sequence_parallel = True
-
recipe.resume.restore_config = run.Config(RestoreConfig)
recipe.resume.resume_if_exists = False
-
return recipe
if __name__ == "__main__":
+ OOM_DEBUG = False
+ if OOM_DEBUG:
+ torch.cuda.memory._record_memory_history(
+ True,
+ # Keep 100,000 alloc/free events from before the snapshot
+ trace_alloc_max_entries=100000,
+ # Record stack information for the trace events
+ trace_alloc_record_context=True,
+ )
run.cli.main(llm.train, default_factory=dreambooth)
diff --git a/nemo/collections/llm/__init__.py b/nemo/collections/llm/__init__.py
index 3fe20173cba2..c36da39b43c7 100644
--- a/nemo/collections/llm/__init__.py
+++ b/nemo/collections/llm/__init__.py
@@ -19,6 +19,7 @@
from nemo.collections.llm import peft
from nemo.collections.llm.gpt.data import (
+ AlpacaDataModule,
DollyDataModule,
FineTuningDataModule,
HfDatasetDataModule,
@@ -26,7 +27,7 @@
PreTrainingDataModule,
SquadDataModule,
)
-from nemo.collections.llm.gpt.data.api import dolly, mock, squad
+from nemo.collections.llm.gpt.data.api import dolly, hf_dataset, mock, squad
from nemo.collections.llm.gpt.model import (
Baichuan2Config,
Baichuan2Config7B,
@@ -72,6 +73,8 @@
Llama31Config8B,
Llama31Config70B,
Llama31Config405B,
+ Llama32Config1B,
+ Llama32Config3B,
LlamaConfig,
LlamaModel,
MaskedTokenLossReduction,
@@ -85,13 +88,16 @@
MixtralModel,
Nemotron3Config4B,
Nemotron3Config8B,
+ Nemotron3Config22B,
Nemotron4Config15B,
- Nemotron4Config22B,
Nemotron4Config340B,
NemotronConfig,
NemotronModel,
NVIDIAMambaConfig8B,
NVIDIAMambaHybridConfig8B,
+ Phi3Config,
+ Phi3ConfigMini,
+ Phi3Model,
Qwen2Config,
Qwen2Config1P5B,
Qwen2Config7B,
@@ -111,10 +117,15 @@
gpt_forward_step,
)
from nemo.collections.llm.quantization import Quantizer, get_calib_data_iter
+from nemo.collections.llm.t5.data import FineTuningDataModule as T5FineTuningDataModule
+from nemo.collections.llm.t5.data import MockDataModule as T5MockDataModule
+from nemo.collections.llm.t5.data import PreTrainingDataModule as T5PreTrainingDataModule
+from nemo.collections.llm.t5.data import SquadDataModule as T5SquadDataModule
from nemo.collections.llm.t5.model import T5Config, T5Model, t5_data_step, t5_forward_step
__all__ = [
"MockDataModule",
+ "T5MockDataModule",
"GPTModel",
"GPTConfig",
"gpt_data_step",
@@ -138,10 +149,13 @@
"NemotronModel",
"Nemotron3Config4B",
"Nemotron3Config8B",
+ "Nemotron3Config22B",
"Nemotron4Config15B",
- "Nemotron4Config22B",
"Nemotron4Config340B",
"NemotronConfig",
+ "Phi3Config",
+ "Phi3ConfigMini",
+ "Phi3Model",
"SSMConfig",
"BaseMambaConfig130M",
"BaseMambaConfig370M",
@@ -159,6 +173,8 @@
"Llama31Config8B",
"Llama31Config70B",
"Llama31Config405B",
+ "Llama32Config1B",
+ "Llama32Config3B",
"CodeLlamaConfig7B",
"CodeLlamaConfig13B",
"CodeLlamaConfig34B",
@@ -191,12 +207,17 @@
"PreTrainingDataModule",
"FineTuningDataModule",
"SquadDataModule",
+ "T5PreTrainingDataModule",
+ "T5FineTuningDataModule",
+ "T5SquadDataModule",
+ "T5MockDataModule",
"DollyDataModule",
"tokenizer",
"mock",
"squad",
"dolly",
"peft",
+ "hf_dataset",
"HfAutoModelForCausalLM",
]
@@ -206,7 +227,7 @@
try:
import nemo_run as run
- from nemo.collections.llm.api import export_ckpt, finetune, generate, import_ckpt, pretrain, train, validate
+ from nemo.collections.llm.api import export_ckpt, finetune, generate, import_ckpt, pretrain, ptq, train, validate
from nemo.collections.llm.recipes import * # noqa
__all__.extend(
@@ -218,6 +239,7 @@
"validate",
"finetune",
"generate",
+ "ptq",
]
)
except ImportError as error:
@@ -229,3 +251,10 @@
__all__.append("deploy")
except ImportError as error:
logging.warning(f"The deploy module could not be imported: {error}")
+
+try:
+ from nemo.collections.llm.api import evaluate
+
+ __all__.append("evaluate")
+except ImportError as error:
+ logging.warning(f"The evaluate module could not be imported: {error}")
diff --git a/nemo/collections/llm/api.py b/nemo/collections/llm/api.py
index 4f47f5c4bc73..4bafdd97ba21 100644
--- a/nemo/collections/llm/api.py
+++ b/nemo/collections/llm/api.py
@@ -11,24 +11,35 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
import json
import os
from copy import deepcopy
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Optional, Union
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from megatron.core import parallel_state
from rich.console import Console
+from torch.distributed import all_gather_object
from typing_extensions import Annotated
import nemo.lightning as nl
-from nemo.lightning import AutoResume, NeMoLogger, OptimizerModule, Trainer, io
+from nemo.collections.llm.quantization import ExportConfig, QuantizationConfig
+from nemo.lightning import (
+ AutoResume,
+ NeMoLogger,
+ OptimizerModule,
+ Trainer,
+ configure_no_restart_validation_training_loop,
+ io,
+)
from nemo.lightning.base import NEMO_MODELS_CACHE
from nemo.lightning.pytorch.callbacks import PEFT, ModelTransform
from nemo.utils import logging
+from nemo.utils.get_rank import is_global_rank_zero
+
if TYPE_CHECKING:
from megatron.core.inference.common_inference_params import CommonInferenceParams
@@ -61,7 +72,8 @@ def train(
resume (Optional[Union[AutoResume, Resume]]): Resume training from a checkpoint.
optim (Optional[OptimizerModule]): The optimizer module to be used. If not provided, the default optimizer
from the model will be used.
- tokenizer (Optional[TokenizerType]): Tokenizer setting to be applied. Can be 'data' or 'model' or an instance of TokenizerSpec.
+ tokenizer (Optional[TokenizerType]): Tokenizer setting to be applied. Can be 'data' or 'model'
+ or an instance of TokenizerSpec.
export (Optional[str]): Filename to save the exported checkpoint after training.
model_transform (Optional[Union[Callable[[nn.Module], nn.Module], PEFT]]): A model transform to be applied.
@@ -77,7 +89,7 @@ def train(
>>> data = llm.SquadDataModule(seq_length=4096, global_batch_size=16, micro_batch_size=2)
>>> precision = nl.MegatronMixedPrecision(precision="bf16-mixed")
>>> trainer = nl.Trainer(strategy=nl.MegatronStrategy(tensor_model_parallel_size=2), plugins=precision)
- >>> train(model, data, trainer, tokenizer="data")
+ >>> llm.train(model, data, trainer, tokenizer="data")
PosixPath('/path/to/log_dir')
"""
app_state = _setup(
@@ -179,7 +191,7 @@ def finetune(
>>> data = llm.SquadDataModule(seq_length=4096, global_batch_size=16, micro_batch_size=2)
>>> precision = nl.MegatronMixedPrecision(precision="bf16-mixed")
>>> trainer = nl.Trainer(strategy=nl.MegatronStrategy(tensor_model_parallel_size=2), plugins=precision)
- >>> finetune(model, data, trainer, peft=llm.peft.LoRA()])
+ >>> llm.finetune(model, data, trainer, peft=llm.peft.LoRA()])
PosixPath('/path/to/log_dir')
"""
@@ -217,7 +229,8 @@ def validate(
resume (Optional[AutoResume]): Resume from a checkpoint for validation.
optim (Optional[OptimizerModule]): The optimizer module to be used. If not provided, the default optimizer
from the model will be used.
- tokenizer (Optional[TokenizerType]): Tokenizer setting to be applied. Can be 'data' or 'model' or an instance of TokenizerSpec.
+ tokenizer (Optional[TokenizerType]): Tokenizer setting to be applied. Can be 'data' or 'model'
+ or an instance of TokenizerSpec.
model_transform (Optional[Union[Callable[[nn.Module], nn.Module], PEFT]]): A model transform to be applied.
Returns:
@@ -230,7 +243,7 @@ def validate(
>>> data = llm.SquadDataModule(seq_length=4096, global_batch_size=16, micro_batch_size=2)
>>> precision = nl.MegatronMixedPrecision(precision="bf16-mixed")
>>> trainer = nl.Trainer(strategy=nl.MegatronStrategy(tensor_model_parallel_size=2), plugins=precision)
- >>> validate(model, data, trainer, tokenizer="data")
+ >>> llm.validate(model, data, trainer, tokenizer="data")
PosixPath('/path/to/log_dir')
"""
app_state = _setup(
@@ -249,84 +262,67 @@ def validate(
return app_state.exp_dir
-def get_trtllm_deployable(
- nemo_checkpoint,
- model_type,
- triton_model_repository,
- num_gpus,
- tensor_parallelism_size,
- pipeline_parallelism_size,
- max_input_len,
- max_output_len,
- max_batch_size,
- dtype,
-):
- from nemo.export.tensorrt_llm import TensorRTLLM
+@run.cli.entrypoint(name="ptq", namespace="llm")
+def ptq(
+ nemo_checkpoint: str,
+ calib_tp: int = 1,
+ calib_pp: int = 1,
+ quantization_config: Annotated[Optional[QuantizationConfig], run.Config[QuantizationConfig]] = None,
+ export_config: Optional[Union[ExportConfig, run.Config[ExportConfig]]] = None,
+) -> Path:
+ # TODO: Fix "nemo_run.cli.cli_parser.CLIException: An unexpected error occurred (Argument: , Context: {})"
+ """
+ Applies Post-Training Quantization (PTQ) for a model using the specified quantization and export configs. It runs
+ calibration for a small dataset to collect scaling factors low-precision GEMMs used by desired quantization method.
+ This function produces TensorRT-LLM checkpoint ready for deployment using nemo.export and nemo.deploy modules
+ or direcly using TensorRT-LLM library.
+ The function can be used through the NeMo CLI in the following way:
+ ```bash
+ # Run calibration using tensor parallel set to 8 and export quantized checkpoint with tensor parallel equal 2
+ nemo llm ptq nemo_checkpoint=/models/Llama-3-70B \
+ export_config.path=/models/Llama-3-70B-FP8 \
+ calib_tp=8 \
+ export_config.inference_tensor_parallel=2
+ # Choose different quantization method, for example, INT8 SmoothQuant
+ nemo llm ptq nemo_checkpoint=/models/Llama-3-8B \
+ export_config.path=/models/Llama-3-8B-INT8_SQ \
+ quantization_config.algorithm=int8_sq
+ ```
+ Args:
+ nemo_checkpoint (str): The path to model to be quantized.
+ calib_tp (int): Calibration tensor parallelism.
+ calib_pp (int): Calibration pipeline parallelism.
+ quantization_config (QuantizationConfig): Configuration for quantization algorithm.
+ export_config (ExportConfig): Export configuration for TensorRT-LLM checkpoint.
+ Returns:
+ Path: The path where the quantized checkpoint has been saved after calibration.
+ """
+ if export_config.path is None:
+ raise ValueError("The export_config.path needs to be specified, got None.")
- if triton_model_repository is None:
- trt_llm_path = "/tmp/trt_llm_model_dir/"
- Path(trt_llm_path).mkdir(parents=True, exist_ok=True)
- else:
- trt_llm_path = triton_model_repository
+ from nemo.collections.llm import quantization
- if nemo_checkpoint is None and triton_model_repository is None:
- raise ValueError(
- "The provided model repository is not a valid TensorRT-LLM model "
- "directory. Please provide a --nemo_checkpoint or a TensorRT-LLM engine."
- )
+ quantizer = quantization.Quantizer(quantization_config, export_config)
- if nemo_checkpoint is None and not os.path.isdir(triton_model_repository):
- raise ValueError(
- "The provided model repository is not a valid TensorRT-LLM model "
- "directory. Please provide a --nemo_checkpoint or a valid TensorRT-LLM engine."
- )
+ model = quantization.load_with_modelopt_layer_spec(nemo_checkpoint, calib_tp, calib_pp)
- if nemo_checkpoint is not None and model_type is None:
- raise ValueError("Model type is required to be defined if a nemo checkpoint is provided.")
+ model = quantizer.quantize(model)
- trt_llm_exporter = TensorRTLLM(
- model_dir=trt_llm_path,
- load_model=(nemo_checkpoint is None),
- )
+ quantizer.export(model, nemo_checkpoint)
- if nemo_checkpoint is not None:
- try:
- logging.info("Export operation will be started to export the nemo checkpoint to TensorRT-LLM.")
- trt_llm_exporter.export(
- nemo_checkpoint_path=nemo_checkpoint,
- model_type=model_type,
- n_gpus=num_gpus,
- tensor_parallelism_size=tensor_parallelism_size,
- pipeline_parallelism_size=pipeline_parallelism_size,
- max_input_len=max_input_len,
- max_output_len=max_output_len,
- max_batch_size=max_batch_size,
- dtype=dtype,
- )
- except Exception as error:
- raise RuntimeError("An error has occurred during the model export. Error message: " + str(error))
-
- return trt_llm_exporter
-
-
-def store_args_to_json(triton_http_address, triton_port, triton_request_timeout, openai_format_response):
- args_dict = {
- "triton_service_ip": triton_http_address,
- "triton_service_port": triton_port,
- "triton_request_timeout": triton_request_timeout,
- "openai_format_response": openai_format_response,
- }
- with open("nemo/deploy/service/config.json", "w") as f:
- json.dump(args_dict, f)
+ console = Console()
+ console.print(f"[green]β PTQ succeded, quantized checkpoint exported to {export_config.path}[/green]")
+
+ return export_config.path
@run.cli.entrypoint(namespace="llm")
def deploy(
nemo_checkpoint: Path = None,
model_type: str = "llama",
- triton_model_name: str = "xxx",
+ triton_model_name: str = 'triton_model',
triton_model_version: Optional[int] = 1,
- triton_port: int = 8080,
+ triton_port: int = 8000,
triton_http_address: str = "0.0.0.0",
triton_request_timeout: int = 60,
triton_model_repository: Path = None,
@@ -337,21 +333,61 @@ def deploy(
max_input_len: int = 256,
max_output_len: int = 256,
max_batch_size: int = 8,
- start_rest_service: bool = False,
+ start_rest_service: bool = True,
rest_service_http_address: str = "0.0.0.0",
- rest_service_port: int = 8000,
- openai_format_response: bool = False,
+ rest_service_port: int = 8080,
+ openai_format_response: bool = True,
+ output_generation_logits: bool = True,
):
+ """
+ Deploys nemo model on a PyTriton server by converting the nemo ckpt to trtllm.
+ Also starts rest service that is used to send OpenAI API compatible input request
+ to the PyTiton server.
+
+ Args:
+ nemo_checkpoint (Path): Path for nemo checkpoint.
+ model_type (str): Type of the model. Choices: gpt, llama, falcon, starcoder. Default: llama.
+ triton_model_name (str): Name for the model that gets deployed on PyTriton. Please ensure that the same model
+ name is passed to the evalute method for the model to be accessible while sending evalution requests.
+ Default: 'triton_model'.
+ triton_model_version (Optional[int]): Version for the triton model. Default: 1.
+ triton_port (int): Port for the PyTriton server. Default: 8000.
+ triton_http_address (str): HTTP address for the PyTriton server. Default: "0.0.0.0".
+ triton_request_timeout (int): Timeout in seconds for Triton server. Default: 60.
+ triton_model_repository (Path): Folder for the trt-llm conversion, trt-llm engine gets saved in this specified
+ path. If None, saves it in /tmp dir. Default: None.
+ num_gpus (int): Number of GPUs for export to trtllm and deploy. Default: 1.
+ tensor_parallelism_size (int): Tensor parallelism size. Default: 1.
+ pipeline_parallelism_size (int): Pipeline parallelism size. Default: 1.
+ dtype (str): dtype of the TensorRT-LLM model. Default: "bfloat16".
+ max_input_len (int): Max input length of the model. Default: 256.
+ max_output_len (int): Max output length of the model. Default: 256.
+ max_batch_size (int): Max batch size of the model. Default: 8.
+ start_rest_service (bool): Start rest service that is used to send evaluation requests to the PyTriton server.
+ Needs to be True to be able to run evaluation. Default: True.
+ rest_service_http_address (str): HTTP address for the rest service. Default: "0.0.0.0".
+ rest_service_port (int): Port for the rest service. Default: 8080.
+ openai_format_response (bool): Return the response from PyTriton server in OpenAI compatible format. Needs to
+ be True while running evaluation. Default: True.
+ output_generation_logits (bool): If True builds trtllm engine with gather_generation_logits set to True.
+ generation_logits are used to compute the logProb of the output token. Default: True.
+ """
+ from nemo.collections.llm import deploy
from nemo.deploy import DeployPyTriton
+ deploy.unset_environment_variables()
if start_rest_service:
if triton_port == rest_service_port:
logging.error("REST service port and Triton server port cannot use the same port.")
return
- # Store triton ip, port and other args relevant for REST API in config.json to be accessible by rest_model_api.py
- store_args_to_json(triton_http_address, triton_port, triton_request_timeout, openai_format_response)
-
- triton_deployable = get_trtllm_deployable(
+ # Store triton ip, port and other args relevant for REST API as env vars to be accessible by rest_model_api.py
+ os.environ['TRITON_HTTP_ADDRESS'] = triton_http_address
+ os.environ['TRITON_PORT'] = str(triton_port)
+ os.environ['TRITON_REQUEST_TIMEOUT'] = str(triton_request_timeout)
+ os.environ['OPENAI_FORMAT_RESPONSE'] = str(openai_format_response)
+ os.environ['OUTPUT_GENERATION_LOGITS'] = str(output_generation_logits)
+
+ triton_deployable = deploy.get_trtllm_deployable(
nemo_checkpoint,
model_type,
triton_model_repository,
@@ -362,6 +398,7 @@ def deploy(
max_output_len,
max_batch_size,
dtype,
+ output_generation_logits,
)
try:
@@ -376,6 +413,7 @@ def deploy(
logging.info("Triton deploy function will be called.")
nm.deploy()
+ nm.run()
except Exception as error:
logging.error("Error message has occurred during deploy function. Error message: " + str(error))
return
@@ -409,6 +447,81 @@ def deploy(
nm.stop()
+def evaluate(
+ nemo_checkpoint_path: Path,
+ url: str = "http://0.0.0.0:8080/v1",
+ model_name: str = "triton_model",
+ eval_task: str = "gsm8k",
+ num_fewshot: Optional[int] = None,
+ limit: Optional[Union[int, float]] = None,
+ bootstrap_iters: int = 100000,
+ # inference params
+ max_tokens_to_generate: Optional[int] = 256,
+ temperature: Optional[float] = 0.000000001,
+ top_p: Optional[float] = 0.0,
+ top_k: Optional[int] = 1,
+ add_bos: Optional[bool] = False,
+):
+ """
+ Evaluates nemo model deployed on PyTriton server (via trtllm) using lm-evaluation-harness
+ (https://github.com/EleutherAI/lm-evaluation-harness/tree/main).
+
+ Args:
+ nemo_checkpoint_path (Path): Path for nemo 2.0 checkpoint. This is used to get the tokenizer from the ckpt
+ which is required to tokenize the evaluation input and output prompts.
+ url (str): rest service url and port that were used in the deploy method above in the format:
+ http://{rest_service_http}:{rest_service_port}. Post requests with evaluation input prompts
+ (from lm-eval-harness) are sent to this url which is then passed to the model deployed on PyTriton server.
+ The rest service url and port serve as the entry point to evaluate model deployed on PyTriton server.
+ model_name (str): Name of the model that is deployed on PyTriton server. It should be the same as
+ triton_model_name passed to the deploy method above to be able to launch evaluation. Deafult: "triton_model".
+ eval_task (str): task to be evaluated on. For ex: "gsm8k", "gsm8k_cot", "mmlu", "lambada". Default: "gsm8k".
+ These are the tasks that are supported currently. Any other task of type generate_until or loglikelihood from
+ lm-evaluation-harness can be run, but only the above mentioned ones are tested. Tasks of type
+ loglikelihood_rolling are not supported yet.
+ num_fewshot (int): number of examples in few-shot context. Default: None.
+ limit (Union[int, float]): Limit the number of examples per task. If <1 (i.e float val between 0 and 1), limit
+ is a percentage of the total number of examples. If int say x, then run evaluation only on x number of samples
+ from the eval dataset. Default: None, which means eval is run the entire dataset.
+ bootstrap_iters (int): Number of iterations for bootstrap statistics, used when calculating stderrs. Set to 0
+ for no stderr calculations to be performed. Default: 100000.
+ # inference params
+ max_tokens_to_generate (int): max tokens to generate. Default: 256.
+ temperature: Optional[float]: float value between 0 and 1. temp of 0 indicates greedy decoding, where the token
+ with highest prob is chosen. Temperature can't be set to 0.0 currently, due to a bug with TRTLLM
+ (# TODO to be investigated). Hence using a very samll value as the default. Default: 0.000000001.
+ top_p: Optional[float]: float value between 0 and 1. limits to the top tokens within a certain probability.
+ top_p=0 means the model will only consider the single most likely token for the next prediction. Default: 0.0.
+ top_k: Optional[int]: limits to a certain number (K) of the top tokens to consider. top_k=1 means the model
+ will only consider the single most likely token for the next prediction. Default: 1
+ add_bos: Optional[bool]: whether a special token representing the beginning of a sequence should be added when
+ encoding a string. Default: False since typically for CausalLM its set to False. If needed set add_bos to True.
+ """
+ try:
+ # lm-evaluation-harness import
+ from lm_eval import evaluator
+ except ImportError:
+ raise ImportError(
+ "Please ensure that lm-evaluation-harness is installed in your env as it is required " "to run evaluations"
+ )
+
+ from nemo.collections.llm import evaluation
+
+ # Get tokenizer from nemo ckpt. This works only with NeMo 2.0 ckpt.
+ tokenizer = io.load_context(nemo_checkpoint_path + '/context', subpath="model").tokenizer
+ # Wait for rest service to be ready before starting evaluation
+ evaluation.wait_for_rest_service(rest_url=f"{url}/v1/health")
+ # Create an object of the NeMoFWLM which is passed as a model to evaluator.simple_evaluate
+ model = evaluation.NeMoFWLMEval(
+ model_name, url, tokenizer, max_tokens_to_generate, temperature, top_p, top_k, add_bos
+ )
+ results = evaluator.simple_evaluate(
+ model=model, tasks=eval_task, limit=limit, num_fewshot=num_fewshot, bootstrap_iters=bootstrap_iters
+ )
+
+ print("score", results['results'][eval_task])
+
+
@run.cli.entrypoint(name="import", namespace="llm")
def import_ckpt(
model: pl.LightningModule,
@@ -553,9 +666,10 @@ def export_ckpt(
@run.cli.entrypoint(name="generate", namespace="llm")
def generate(
path: Union[Path, str],
- prompts: list[str],
trainer: nl.Trainer,
+ prompts: Optional[list[str]] = None,
encoder_prompts: Optional[list[str]] = None,
+ input_dataset: Optional[Union[pl.LightningDataModule, str]] = None,
params_dtype: torch.dtype = torch.bfloat16,
add_BOS: bool = False,
max_batch_size: int = 4,
@@ -563,27 +677,137 @@ def generate(
inference_batch_times_seqlen_threshold: int = 1000,
inference_params: Optional["CommonInferenceParams"] = None,
text_only: bool = False,
+ output_path: Optional[Union[Path, str]] = None,
) -> list[Union["InferenceRequest", str]]:
+ """
+ Generates text using a NeMo LLM model.
+
+ This function takes a checkpoint path and a list of prompts,
+ and generates text based on the loaded model and parameters.
+ It returns a list of generated text, either as a string or as an InferenceRequest object.
+
+ Python Usage:
+ ```python
+ strategy = nl.MegatronStrategy(
+ tensor_model_parallel_size=2,
+ pipeline_model_parallel_size=1,
+ context_parallel_size=1,
+ sequence_parallel=False,
+ setup_optimizers=False,
+ store_optimizer_states=False,
+ )
+
+ trainer = nl.Trainer(
+ accelerator="gpu",
+ devices=2,
+ num_nodes=1,
+ strategy=strategy,
+ plugins=nl.MegatronMixedPrecision(
+ precision="bf16-mixed",
+ params_dtype=torch.bfloat16,
+ pipeline_dtype=torch.bfloat16,
+ autocast_enabled=False,
+ grad_reduce_in_fp32=False,
+ ),
+ )
+ prompts = [
+ "Hello, how are you?",
+ "How many r's are in the word 'strawberry'?",
+ "Which number is bigger? 10.119 or 10.19?",
+ ]
+
+ if __name__ == "__main__":
+ results = api.generate(
+ path=os.path.join(os.environ["NEMO_HOME"], "models", "meta-llama/Meta-Llama-3-8B"),
+ prompts=prompts,
+ trainer=trainer,
+ inference_params=CommonInferenceParams(temperature=0.1, top_k=10, num_tokens_to_generate=512),
+ text_only=True,
+ )
+ ```
+
+ Args:
+ path (Union[Path, str]): The path to the model checkpoint.
+ prompts (list[str]): The list of prompts to generate text for.
+ trainer (nl.Trainer): The trainer object.
+ encoder_prompts (Optional[list[str]], optional): The list of encoder prompts. Defaults to None.
+ input_dataset (Optional[Union[pl.LightningDataModule, str]], optional): The input data module or jsonl file.
+ Test set will be used for generation for data modules. Defaults to None.
+ params_dtype (torch.dtype, optional): The data type of the model parameters. Defaults to torch.bfloat16.
+ add_BOS (bool, optional): Whether to add the beginning of sequence token. Defaults to False.
+ max_batch_size (int, optional): The maximum batch size. Defaults to 4.
+ random_seed (Optional[int], optional): The random seed. Defaults to None.
+ inference_batch_times_seqlen_threshold (int, optional): If batch-size times sequence-length is smaller than
+ this threshold then we will not use pipelining, otherwise we will. Defaults to 1000.
+ inference_params (Optional["CommonInferenceParams"], optional): The inference parameters defined in
+ Mcore's CommonInferenceParams. Defaults to None.
+ text_only (bool, optional): Whether to return only the generated text as a string. Defaults to False.
+ output_path (Optional[Union[Path, str]], optional): The path to save the generated text or test dataset
+ predictions. Defaults to None.
+
+ Returns:
+ list[Union["InferenceRequest", str]]: A list of generated text,
+ either as a string or as an InferenceRequest object.
+ """
from nemo.collections.llm import inference
+ if input_dataset is not None:
+ input_path = input_dataset if isinstance(input_dataset, str) else input_dataset.test_path
+ with open(input_path) as f:
+ dataset = [json.loads(sample) for sample in f.readlines()]
+ inputs = [sample["input"] for sample in dataset]
+ elif prompts is not None:
+ inputs = prompts
+ else:
+ raise ValueError("Either prompts or input_dataset must be provided.")
+
inference_wrapped_model, mcore_tokenizer = inference.setup_model_and_tokenizer(
path=path,
trainer=trainer,
params_dtype=params_dtype,
inference_batch_times_seqlen_threshold=inference_batch_times_seqlen_threshold,
)
- results = inference.generate(
+
+ dp_size = trainer.strategy.distributed_sampler_kwargs['num_replicas']
+ dp_rank = trainer.strategy.distributed_sampler_kwargs['rank']
+ chunk_size = (len(inputs) + dp_size - 1) // dp_size
+ start_idx = dp_rank * chunk_size
+ end_idx = min(start_idx + chunk_size, len(inputs))
+ inputs_on_this_dp_rank = inputs[start_idx:end_idx]
+
+ results_on_this_dp_rank = inference.generate(
model=inference_wrapped_model,
tokenizer=mcore_tokenizer,
- prompts=prompts,
+ prompts=inputs_on_this_dp_rank,
encoder_prompts=encoder_prompts,
add_BOS=add_BOS,
max_batch_size=max_batch_size,
random_seed=random_seed,
inference_params=inference_params,
)
+ gathered_results = [None] * dp_size
+
+ all_gather_object(
+ gathered_results,
+ [r.generated_text if text_only else r for r in results_on_this_dp_rank],
+ group=parallel_state.get_data_parallel_group(),
+ )
+ gathered_results = [result for sublist in gathered_results for result in sublist]
+
+ assert len(gathered_results) == len(inputs)
+
+ if output_path is not None and is_global_rank_zero():
+ with open(output_path, "w") as f:
+ for sample, pred in zip(dataset if input_dataset else inputs, gathered_results):
+ if type(sample) == dict:
+ sample["label"] = sample.pop("output", None)
+ sample["prediction"] = pred if text_only else pred.generated_text
+ elif type(sample) == str:
+ sample = {"input": sample, "prediction": pred if text_only else pred.generated_text}
+ f.write(json.dumps(sample) + "\n")
+ logging.info(f"Predictions written to {output_path}")
- return [r.generated_text if text_only else r for r in results]
+ return gathered_results
def _use_tokenizer(model: pl.LightningModule, data: pl.LightningDataModule, tokenizer: TokenizerType) -> None:
@@ -614,6 +838,7 @@ def _setup(
tokenizer: Optional[TokenizerType],
model_transform: Optional[Union[PEFT, ModelTransform, Callable]],
) -> Any: # Return type is Any because app_state's type is not specified
+ configure_no_restart_validation_training_loop(trainer)
_log = log or NeMoLogger()
if resume and isinstance(model_transform, PEFT) and _log.ckpt:
logging.info("Disabling try_restore_best_ckpt restoration for adapters")
diff --git a/nemo/collections/llm/deploy/__init__.py b/nemo/collections/llm/deploy/__init__.py
new file mode 100644
index 000000000000..24c102bfa0d2
--- /dev/null
+++ b/nemo/collections/llm/deploy/__init__.py
@@ -0,0 +1,3 @@
+from nemo.collections.llm.deploy.base import get_trtllm_deployable, unset_environment_variables
+
+__all__ = ["unset_environment_variables", "get_trtllm_deployable"]
diff --git a/nemo/collections/llm/deploy/base.py b/nemo/collections/llm/deploy/base.py
new file mode 100644
index 000000000000..e21198f5884b
--- /dev/null
+++ b/nemo/collections/llm/deploy/base.py
@@ -0,0 +1,117 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+from pathlib import Path
+
+from nemo.utils import logging
+
+
+def unset_environment_variables() -> None:
+ """
+ SLURM_, PMI_, PMIX_ Variables are needed to be unset for trtllm export to work
+ on clusters. This method takes care of unsetting these env variables
+ """
+ logging.info("Unsetting all SLURM_, PMI_, PMIX_ Variables")
+
+ # Function to unset variables with a specific prefix
+ def unset_vars_with_prefix(prefix):
+ unset_vars = []
+ cmd = f"env | grep ^{prefix} | cut -d= -f1"
+ result = subprocess.run(cmd, shell=True, capture_output=True, text=True)
+ vars_to_unset = result.stdout.strip().split('\n')
+ for var in vars_to_unset:
+ if var: # Check if the variable name is not empty
+ os.environ.pop(var, None)
+ unset_vars.append(var)
+ return unset_vars
+
+ # Collect all unset variables across all prefixes
+ all_unset_vars = []
+
+ # Unset variables for each prefix
+ for prefix in ['SLURM_', 'PMI_', 'PMIX_']:
+ unset_vars = unset_vars_with_prefix(prefix)
+ all_unset_vars.extend(unset_vars)
+
+ if all_unset_vars:
+ logging.info(f"Unset env variables: {', '.join(all_unset_vars)}")
+ else:
+ logging.info("No env variables were unset.")
+
+
+def get_trtllm_deployable(
+ nemo_checkpoint,
+ model_type,
+ triton_model_repository,
+ num_gpus,
+ tensor_parallelism_size,
+ pipeline_parallelism_size,
+ max_input_len,
+ max_output_len,
+ max_batch_size,
+ dtype,
+ output_generation_logits,
+):
+ """
+ Exports the nemo checkpoint to trtllm and returns trt_llm_exporter that is used to deploy on PyTriton.
+ """
+ from nemo.export.tensorrt_llm import TensorRTLLM
+
+ if triton_model_repository is None:
+ trt_llm_path = "/tmp/trt_llm_model_dir/"
+ Path(trt_llm_path).mkdir(parents=True, exist_ok=True)
+ else:
+ trt_llm_path = triton_model_repository
+
+ if nemo_checkpoint is None and triton_model_repository is None:
+ raise ValueError(
+ "The provided model repository is not a valid TensorRT-LLM model "
+ "directory. Please provide a --nemo_checkpoint or a TensorRT-LLM engine."
+ )
+
+ if nemo_checkpoint is None and not os.path.isdir(triton_model_repository):
+ raise ValueError(
+ "The provided model repository is not a valid TensorRT-LLM model "
+ "directory. Please provide a --nemo_checkpoint or a valid TensorRT-LLM engine."
+ )
+
+ if nemo_checkpoint is not None and model_type is None:
+ raise ValueError("Model type is required to be defined if a nemo checkpoint is provided.")
+
+ trt_llm_exporter = TensorRTLLM(
+ model_dir=trt_llm_path,
+ load_model=(nemo_checkpoint is None),
+ )
+
+ if nemo_checkpoint is not None:
+ try:
+ logging.info("Export operation will be started to export the nemo checkpoint to TensorRT-LLM.")
+ trt_llm_exporter.export(
+ nemo_checkpoint_path=nemo_checkpoint,
+ model_type=model_type,
+ n_gpus=num_gpus,
+ tensor_parallelism_size=tensor_parallelism_size,
+ pipeline_parallelism_size=pipeline_parallelism_size,
+ max_input_len=max_input_len,
+ max_output_len=max_output_len,
+ max_batch_size=max_batch_size,
+ dtype=dtype,
+ gather_generation_logits=output_generation_logits,
+ )
+ except Exception as error:
+ raise RuntimeError("An error has occurred during the model export. Error message: " + str(error))
+
+ return trt_llm_exporter
diff --git a/nemo/collections/llm/evaluation/__init__.py b/nemo/collections/llm/evaluation/__init__.py
new file mode 100644
index 000000000000..3012689bb8da
--- /dev/null
+++ b/nemo/collections/llm/evaluation/__init__.py
@@ -0,0 +1,3 @@
+from nemo.collections.llm.evaluation.base import NeMoFWLMEval, wait_for_rest_service
+
+__all__ = ["NeMoFWLMEval", "wait_for_rest_service"]
diff --git a/nemo/collections/llm/evaluation/base.py b/nemo/collections/llm/evaluation/base.py
new file mode 100644
index 000000000000..b1734d6f4d43
--- /dev/null
+++ b/nemo/collections/llm/evaluation/base.py
@@ -0,0 +1,210 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+
+import requests
+import torch
+import torch.nn.functional as F
+from lm_eval.api.instance import Instance
+from lm_eval.api.model import LM
+from requests.exceptions import RequestException
+
+from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
+from nemo.collections.common.tokenizers.sentencepiece_tokenizer import SentencePieceTokenizer
+from nemo.utils import logging
+
+
+class NeMoFWLMEval(LM):
+ """
+ NeMoFWLMEval is a wrapper class subclassing lm_eval.api.model.LM class, that defines how lm_eval interfaces with
+ NeMo model deployed on PyTriton server.
+ Created based on: https://github.com/EleutherAI/lm-evaluation-harness/blob/v0.4.4/docs/model_guide.md
+ """
+
+ def __init__(self, model_name, api_url, tokenizer, max_tokens_to_generate, temperature, top_p, top_k, add_bos):
+ self.model_name = model_name
+ self.api_url = api_url
+ self.tokenizer = tokenizer
+ self.max_tokens_to_generate = max_tokens_to_generate
+ self.temperature = temperature
+ self.top_p = top_p
+ self.top_k = top_k
+ self.add_bos = add_bos
+ super().__init__()
+
+ def _generate_tokens_logits(self, payload, return_text: bool = False, return_logits: bool = False):
+ """
+ A private method that sends post request to the model on PyTriton server and returns either generated text or
+ logits.
+ """
+ # send a post request to /v1/completions/ endpoint with the payload
+ response = requests.post(f"{self.api_url}/v1/completions/", json=payload)
+ response_data = response.json()
+
+ if 'error' in response_data:
+ raise Exception(f"API Error: {response_data['error']}")
+
+ # Assuming the response is in OpenAI format
+ if return_text:
+ # in case of generate_until tasks return just the text
+ return response_data['choices'][0]['text']
+
+ if return_logits:
+ # in case of loglikelihood tasks return the logits
+ return response_data['choices'][0]['generation_logits']
+
+ def tokenizer_type(self, tokenizer):
+ """
+ Returns the type of the tokenizer.
+ """
+ if isinstance(tokenizer, AutoTokenizer):
+ return "AutoTokenizer"
+ elif isinstance(tokenizer, SentencePieceTokenizer):
+ return "SentencePieceTokenizer"
+ else:
+ raise ValueError(
+ "Tokenizer type is not one of SentencePieceTokenizer or HF's AutoTokenizer. Please check "
+ "how to handle special tokens for this tokenizer"
+ )
+
+ def loglikelihood(self, requests: list[Instance]):
+ """
+ Defines the loglikelihood request. Takes input requests of type list[Instance] where Instance is a dataclass
+ defined in lm_eval.api.instance. Each Instance conists of the input prompt, output prompt, request type(here
+ loglikelihood) and other relevant args like few shot samples.
+ """
+ special_tokens_kwargs = {}
+ tokenizer_type = self.tokenizer_type(self.tokenizer)
+ if tokenizer_type == "SentencePieceTokenizer":
+ special_tokens_kwargs['add_bos'] = self.add_bos
+ elif tokenizer_type == "AutoTokenizer":
+ special_tokens_kwargs['add_special_tokens'] = self.add_bos
+
+ results = []
+ for request in requests:
+ # get the input prompt from the request
+ context = request.arguments[0]
+ # get the output prompt from the request
+ continuation = request.arguments[1]
+ # get encoded tokens of continuation
+ continuation_enc = self.tokenizer.tokenizer.encode(continuation, **special_tokens_kwargs)
+ # for SentencePeice consider the encoded tokens from the 2nd token since first encoded token is space.
+ if self.tokenizer_type(self.tokenizer) == "SentencePieceTokenizer":
+ continuation_enc = continuation_enc[1:]
+ num_cont_tokens = len(continuation_enc)
+ # Update self.max_tokens_to_generate with number of continuation tokens (or output tokens) in the request
+ self.max_tokens_to_generate = num_cont_tokens
+ # Create payload to query the model deployed on PyTriton server
+ payload = {
+ "model": self.model_name,
+ "prompt": context,
+ "max_tokens": self.max_tokens_to_generate,
+ "temperature": self.temperature,
+ "top_p": self.top_p,
+ "top_k": self.top_k,
+ }
+ # Get the logits from the model
+ generation_logits = self._generate_tokens_logits(payload, return_logits=True)
+ # Convert generation_logits to torch tensor to easily get logprobs wo manual implementation of log_softmax
+ multi_logits = F.log_softmax(torch.tensor(generation_logits[0]), dim=-1)
+ # Convert encoded continuation tokens to torch tensor
+ cont_toks = torch.tensor(continuation_enc, dtype=torch.long).unsqueeze(0)
+ # Get the greedy token from the logits (i.e token with the highest prob)
+ greedy_tokens = multi_logits.argmax(dim=-1)
+ # Check if all greedy_tokens match the the actual continuation tokens
+ is_greedy = (greedy_tokens == cont_toks).all()
+ # Get the logits corresponding to the actual continuation tokens
+ logits = torch.gather(multi_logits, 2, cont_toks.unsqueeze(-1)).squeeze(-1)
+ # result is tuple of logProb of generating the continuation token and is_greedy
+ result = (float(logits.sum()), bool(is_greedy))
+
+ results.append(result)
+
+ return results
+
+ def loglikelihood_rolling(self, requests: list[Instance]):
+ """
+ Defines the loglikelihood_rolling request type. Yet to be implemented.
+ """
+ pass
+
+ def generate_until(self, inputs: list[Instance]):
+ """
+ Defines the generate_until request type. Takes input requests of type list[Instance] where Instance is a
+ dataclass defined in lm_eval.api.instance. Each Instance conists of the input prompt, output prompt, request
+ type(here loglikelihood) and other relevant args like few shot samples.
+ """
+ results = []
+ for instance in inputs:
+ # Access the 'arguments' attribute of the Instance which contains the input prompt string
+ prompt = instance.arguments[0]
+ # Create payload to query the model deployed on PyTriton server
+ payload = {
+ "model": self.model_name,
+ "prompt": prompt,
+ "max_tokens": self.max_tokens_to_generate,
+ "temperature": self.temperature,
+ "top_p": self.top_p,
+ "top_k": self.top_k,
+ }
+ # Get the text generated by the model
+ generated_text = self._generate_tokens_logits(payload, return_text=True)
+
+ results.append(generated_text)
+
+ return results
+
+
+def wait_for_rest_service(rest_url, max_retries=60, retry_interval=2):
+ """
+ Wait for REST service to be ready.
+
+ Args:
+ rest_url (str): URL of the REST service's health endpoint
+ max_retries (int): Maximum number of retry attempts. Defaul: 60.
+ retry_interval (int): Time to wait between retries in seconds. Default: 2.
+
+ Returns:
+ bool: True if rest service is ready, False otherwise
+ """
+
+ def check_service(url):
+ """
+ Check if the service is ready by making a GET request to its health endpoint.
+
+ Args:
+ url (str): URL of the service's health endpoint
+
+ Returns:
+ bool: True if the service is ready, False otherwise
+ """
+ try:
+ response = requests.get(url, timeout=5)
+ return response.status_code == 200
+ except RequestException:
+ return False
+
+ for _ in range(max_retries):
+ rest_ready = check_service(rest_url)
+
+ if rest_ready:
+ logging.info("REST service is ready.")
+ return True
+
+ logging.info(f"REST Service not ready yet. Retrying in {retry_interval} seconds...")
+ time.sleep(retry_interval)
+
+ logging.info("Timeout: REST service did not become ready.")
+ return False
diff --git a/nemo/collections/llm/fn/activation.py b/nemo/collections/llm/fn/activation.py
index 50e076a79d36..db82f95b4bcc 100644
--- a/nemo/collections/llm/fn/activation.py
+++ b/nemo/collections/llm/fn/activation.py
@@ -13,6 +13,7 @@
# limitations under the License.
import torch
+from megatron.core.jit import jit_fuser
@torch.jit.script
@@ -25,7 +26,12 @@ def openai_gelu(x):
return gelu_impl(x)
-@torch.jit.script
+@jit_fuser
+def quick_gelu(x: torch.Tensor) -> torch.Tensor:
+ return x * torch.sigmoid(1.702 * x)
+
+
+# @torch.jit.script # remove until we have serialization
def squared_relu(x):
"""Squared ReLU activation function."""
return torch.pow(torch.nn.functional.relu(x), 2)
diff --git a/nemo/collections/llm/gpt/data/__init__.py b/nemo/collections/llm/gpt/data/__init__.py
index f4e97d91e5cd..b42c350bcaba 100644
--- a/nemo/collections/llm/gpt/data/__init__.py
+++ b/nemo/collections/llm/gpt/data/__init__.py
@@ -12,18 +12,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from nemo.collections.llm.gpt.data.alpaca import AlpacaDataModule
from nemo.collections.llm.gpt.data.dolly import DollyDataModule
from nemo.collections.llm.gpt.data.fine_tuning import FineTuningDataModule
from nemo.collections.llm.gpt.data.hf_dataset import HfDatasetDataModule
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.gpt.data.pre_training import PreTrainingDataModule
+from nemo.collections.llm.gpt.data.pre_training import PreTrainingDataModule, build_pretraining_datamodule
from nemo.collections.llm.gpt.data.squad import SquadDataModule
__all__ = [
"FineTuningDataModule",
+ "AlpacaDataModule",
"SquadDataModule",
"DollyDataModule",
"MockDataModule",
"PreTrainingDataModule",
+ "build_pretraining_datamodule",
"HfDatasetDataModule",
]
diff --git a/nemo/collections/llm/gpt/data/alpaca.py b/nemo/collections/llm/gpt/data/alpaca.py
new file mode 100644
index 000000000000..8b75bb48a04e
--- /dev/null
+++ b/nemo/collections/llm/gpt/data/alpaca.py
@@ -0,0 +1,126 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import shutil
+from typing import TYPE_CHECKING, Any, Dict, List, Optional
+
+from datasets import load_dataset
+
+from nemo.collections.llm.gpt.data.core import get_dataset_root
+from nemo.collections.llm.gpt.data.fine_tuning import FineTuningDataModule
+from nemo.lightning.io.mixin import IOMixin
+from nemo.utils import logging
+
+if TYPE_CHECKING:
+ from nemo.collections.common.tokenizers import TokenizerSpec
+ from nemo.collections.llm.gpt.data.packed_sequence import PackedSequenceSpecs
+
+
+class AlpacaDataModule(FineTuningDataModule, IOMixin):
+ """A data module for fine-tuning on the Alpaca Python dataset.
+
+ This class inherits from the `FineTuningDataModule` class and is specifically designed for fine-tuning models
+ on the "iamtarun/python_code_instructions_18k_alpaca" dataset. It handles data download, preprocessing, splitting,
+ and preparing the data in a format suitable for training, validation, and testing.
+
+ Args:
+ force_redownload (bool, optional): Whether to force re-download the dataset even if it exists locally.
+ Defaults to False.
+ delete_raw (bool, optional): Whether to delete the raw downloaded dataset after preprocessing.
+ Defaults to True.
+ See FineTuningDataModule for the other args
+ """
+
+ def __init__(
+ self,
+ seq_length: int = 2048,
+ tokenizer: Optional["TokenizerSpec"] = None,
+ micro_batch_size: int = 4,
+ global_batch_size: int = 8,
+ rampup_batch_size: Optional[List[int]] = None,
+ force_redownload: bool = False,
+ delete_raw: bool = True,
+ seed: int = 1234,
+ memmap_workers: int = 1,
+ num_workers: int = 8,
+ pin_memory: bool = True,
+ persistent_workers: bool = False,
+ packed_sequence_specs: Optional["PackedSequenceSpecs"] = None,
+ dataset_kwargs: Optional[Dict[str, Any]] = None,
+ ):
+ self.force_redownload = force_redownload
+ self.delete_raw = delete_raw
+
+ super().__init__(
+ dataset_root=get_dataset_root("alpaca"),
+ seq_length=seq_length,
+ tokenizer=tokenizer,
+ micro_batch_size=micro_batch_size,
+ global_batch_size=global_batch_size,
+ rampup_batch_size=rampup_batch_size,
+ seed=seed,
+ memmap_workers=memmap_workers,
+ num_workers=num_workers,
+ pin_memory=pin_memory,
+ persistent_workers=persistent_workers,
+ packed_sequence_specs=packed_sequence_specs,
+ dataset_kwargs=dataset_kwargs,
+ )
+
+ def prepare_data(self) -> None:
+ # if train file is specified, no need to do anything
+ if not self.train_path.exists() or self.force_redownload:
+ dset = self._download_data()
+ self._preprocess_and_split_data(dset)
+ super().prepare_data()
+
+ def _download_data(self):
+ logging.info(f"Downloading {self.__class__.__name__}...")
+ return load_dataset(
+ "iamtarun/python_code_instructions_18k_alpaca",
+ cache_dir=str(self.dataset_root),
+ download_mode="force_redownload" if self.force_redownload else None,
+ )
+
+ def _preprocess_and_split_data(self, dset, train_ratio: float = 0.80, val_ratio: float = 0.15):
+ logging.info(f"Preprocessing {self.__class__.__name__} to jsonl format and splitting...")
+
+ test_ratio = 1 - train_ratio - val_ratio
+ save_splits = {}
+ dataset = dset.get('train')
+ split_dataset = dataset.train_test_split(test_size=val_ratio + test_ratio, seed=self.seed)
+ split_dataset2 = split_dataset['test'].train_test_split(
+ test_size=test_ratio / (val_ratio + test_ratio), seed=self.seed
+ )
+ save_splits['training'] = split_dataset['train']
+ save_splits['validation'] = split_dataset2['train']
+ save_splits['test'] = split_dataset2['test']
+
+ for split_name, dataset in save_splits.items():
+ output_file = self.dataset_root / f"{split_name}.jsonl"
+ with output_file.open("w", encoding="utf-8") as f:
+ for o in dataset:
+ prompt = o['prompt'][: o['prompt'].find('### Output')]
+ completion = o['output']
+ f.write(json.dumps({"input": prompt, "output": completion}) + "\n")
+
+ logging.info(f"{split_name} split saved to {output_file}")
+
+ if self.delete_raw:
+ for p in self.dataset_root.iterdir():
+ if p.is_dir():
+ shutil.rmtree(p)
+ elif '.jsonl' not in str(p.name):
+ p.unlink()
diff --git a/nemo/collections/llm/gpt/data/api.py b/nemo/collections/llm/gpt/data/api.py
index a7fde4cfc8d8..2ebb30e781d1 100644
--- a/nemo/collections/llm/gpt/data/api.py
+++ b/nemo/collections/llm/gpt/data/api.py
@@ -12,27 +12,37 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
+import nemo_run as run
from nemo.collections.llm.gpt.data.dolly import DollyDataModule
+from nemo.collections.llm.gpt.data.hf_dataset import HfDatasetDataModule
from nemo.collections.llm.gpt.data.mock import MockDataModule
from nemo.collections.llm.gpt.data.squad import SquadDataModule
-from nemo.collections.llm.utils import factory
-@factory
+@run.cli.factory
+@run.autoconvert
def mock() -> pl.LightningDataModule:
return MockDataModule(seq_length=4096, global_batch_size=16, micro_batch_size=2)
-@factory
+@run.cli.factory
+@run.autoconvert
def squad() -> pl.LightningDataModule:
return SquadDataModule(seq_length=4096, global_batch_size=16, micro_batch_size=2)
-@factory
+@run.cli.factory
+@run.autoconvert
def dolly() -> pl.LightningDataModule:
return DollyDataModule(seq_length=4096, global_batch_size=16, micro_batch_size=2)
-__all__ = ["mock", "squad", "dolly"]
+@run.cli.factory
+@run.autoconvert
+def hf_dataset(dataset: str) -> pl.LightningDataModule:
+ return HfDatasetDataModule(dataset=dataset, global_batch_size=16, micro_batch_size=2)
+
+
+__all__ = ["mock", "squad", "dolly", "hf_dataset"]
diff --git a/nemo/collections/llm/gpt/data/dolly.py b/nemo/collections/llm/gpt/data/dolly.py
index fb8cf9fd5da0..c241580db8e3 100644
--- a/nemo/collections/llm/gpt/data/dolly.py
+++ b/nemo/collections/llm/gpt/data/dolly.py
@@ -14,7 +14,7 @@
import json
import shutil
-from typing import TYPE_CHECKING, List, Optional
+from typing import TYPE_CHECKING, Any, Dict, List, Optional
import numpy as np
from datasets import load_dataset
@@ -56,8 +56,8 @@ def __init__(
num_workers: int = 8,
pin_memory: bool = True,
persistent_workers: bool = False,
- pad_to_max_length: bool = False,
packed_sequence_specs: Optional["PackedSequenceSpecs"] = None,
+ dataset_kwargs: Optional[Dict[str, Any]] = None,
):
self.force_redownload = force_redownload
self.delete_raw = delete_raw
@@ -74,8 +74,8 @@ def __init__(
num_workers=num_workers,
pin_memory=pin_memory,
persistent_workers=persistent_workers,
- pad_to_max_length=pad_to_max_length,
packed_sequence_specs=packed_sequence_specs,
+ dataset_kwargs=dataset_kwargs,
)
def prepare_data(self) -> None:
diff --git a/nemo/collections/llm/gpt/data/fine_tuning.py b/nemo/collections/llm/gpt/data/fine_tuning.py
index 2545bbc93f1d..8fcef72f3bd9 100644
--- a/nemo/collections/llm/gpt/data/fine_tuning.py
+++ b/nemo/collections/llm/gpt/data/fine_tuning.py
@@ -15,13 +15,14 @@
import math
from functools import lru_cache
from pathlib import Path
-from typing import TYPE_CHECKING, List, Optional, Union
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from torch.utils.data import DataLoader
from nemo.collections.common.tokenizers import AutoTokenizer
from nemo.collections.llm.gpt.data.core import create_sft_dataset
+from nemo.lightning.data import WrappedDataLoader
from nemo.lightning.pytorch.plugins import MegatronDataSampler
from nemo.utils import logging
@@ -34,25 +35,28 @@ class FineTuningDataModule(pl.LightningDataModule):
"""Base class for fine-tuning an LLM.
This class provides a foundation for building custom data modules for fine-tuning Nemo NLP models. It inherits from
- `pl.LightningDataModule` from the PyTorch Lightning library and handles data loading, preprocessing, and batch creation
- for training, validation, and testing.
+ `pl.LightningDataModule` from the PyTorch Lightning library and handles data loading, preprocessing, and batch
+ creation for training, validation, and testing.
Args:
dataset_root (Union[str, Path]): The root directory containing the training, validation, and test data.
seq_length (int, optional): The maximum sequence length for the input and output text. Defaults to 2048.
- tokenizer (Optional[TokenizerSpec], optional): The tokenizer to use for preprocessing the text. Defaults to None.
+ tokenizer (Optional[TokenizerSpec], optional): The tokenizer to use for preprocessing the text.
If not provided, a Megatron GPT2 BPE tokenizer will be used.
micro_batch_size (int, optional): The micro batch size for training. Defaults to 4.
global_batch_size (int, optional): The global batch size for training. Defaults to 8.
- rampup_batch_size (Optional[List[int]], optional): A list of batch sizes for ramping up during training. Defaults to None.
+ rampup_batch_size (Optional[List[int]], optional): A list of batch sizes for ramping up during training.
+ Defaults to None.
seed (int, optional): The random seed for data shuffling. Defaults to 1234.
- memmap_workers (int, optional): The number of worker processes for loading data using TextMemMapDataset. Defaults to 1.
+ memmap_workers (int, optional): The number of worker processes for loading data using TextMemMapDataset.
+ Defaults to 1.
num_workers (int, optional): The number of worker processes for data loading. Defaults to 8.
- pin_memory (bool, optional): Whether to pin memory during data loading for faster GPU training. Defaults to True.
- persistent_workers (bool, optional): Whether to keep data loading workers persistent across epochs. Defaults to False.
- max_train_steps (int, optional): Maximum number of steps to train. Used to calculate samples mapping for the mmap dataset
- pad_to_max_length (bool, optional): Whether to pad the input to the max sequence length. If False, will pad to the max length of the current batch.
+ pin_memory (bool, optional): Whether to pin memory during data loading for faster GPU training.
+ Defaults to True.
+ persistent_workers (bool, optional): Whether to keep data loading workers persistent across epochs.
+ Defaults to False.
packed_sequence_specs (PackedSequenceSpecs, optional): See PackedSequenceSpecs for details
+ dataset_kwargs (Optional[Dict[str, Any]], optional): Keyword arguments to pass into the GPTSFTDataset class
"""
def __init__(
@@ -68,9 +72,8 @@ def __init__(
num_workers: int = 8,
pin_memory: bool = True,
persistent_workers: bool = False,
- pad_to_max_length: bool = False,
packed_sequence_specs: Optional["PackedSequenceSpecs"] = None,
- sanity_check_dist_workers: bool = True,
+ dataset_kwargs: Optional[Dict[str, Any]] = None,
):
super().__init__()
self.seq_length = seq_length
@@ -86,25 +89,34 @@ def __init__(
self.rampup_batch_size = rampup_batch_size
self.data_sampler = None
self.max_train_samples = None
- self.pad_to_max_length = pad_to_max_length
self.packed_sequence_specs = packed_sequence_specs
self.packed_sequence_size = -1 if not packed_sequence_specs else packed_sequence_specs.packed_sequence_size
self.validate_batch_size_for_packed_sequence()
- self._sanity_check_dist_workers = sanity_check_dist_workers
+ self.dataset_kwargs = dataset_kwargs or {}
def validate_batch_size_for_packed_sequence(self):
+ """
+ Validate that micro batch size must be 1 when using packed sequence.
+ """
if self.packed_sequence_size > 0 and self.micro_batch_size > 1:
raise ValueError(
"Micro batch size should be 1 when training with packed sequence, but your micro batch size "
f"is {self.micro_batch_size}. \nThe following config is equivalent to your current setting for "
f"a packed dataset. Please update your config to the following: \n"
f"Set micro batch size to 1 (currently {self.micro_batch_size})\n"
- f"Set global batch size to {self.global_batch_size // self.micro_batch_size} (currently {self.global_batch_size}) \n"
- f"Set packed sequence length to {self.packed_sequence_size*self.micro_batch_size} (currently {self.packed_sequence_size}) \n"
- f"For details please visit https://docs.nvidia.com/nemo-framework/user-guide/latest/nemotoolkit/features/optimizations/sequence_packing.html"
+ f"Set global batch size to {self.global_batch_size // self.micro_batch_size} "
+ f"(currently {self.global_batch_size}) \n"
+ f"Set packed sequence length to {self.packed_sequence_size*self.micro_batch_size} "
+ f"(currently {self.packed_sequence_size}) \n"
+ f"For details please visit "
+ f"https://docs.nvidia.com/nemo-framework/user-guide/latest/nemotoolkit/features/optimizations/"
+ f"sequence_packing.html"
)
def prepare_data(self) -> None:
+ """
+ Prepare packed sequence data
+ """
if self.packed_sequence_size > 0 and not self.train_path_packed.is_file():
from nemo.collections.llm.gpt.data.packed_sequence import prepare_packed_sequence_data
@@ -118,6 +130,9 @@ def prepare_data(self) -> None:
)
def setup(self, stage: str):
+ """Called by pytorch lightning in datamodule setup"""
+
+ # data_sampler is used in `setup_data_sampler` in MegatronStrategy.setup
self.data_sampler = MegatronDataSampler(
seq_len=self.seq_length,
micro_batch_size=self.micro_batch_size,
@@ -130,39 +145,78 @@ def setup(self, stage: str):
# base_dataset_utils.get_datasets_weights_and_num_samples
self.max_train_samples = int(math.ceil(self.global_batch_size * self.trainer.max_steps * 1.005))
+ def state_dict(self) -> Dict[str, Any]:
+ """Called when saving a checkpoint, implement to generate and save datamodule state.
+
+ Returns:
+ A dictionary containing datamodule state.
+
+ """
+ consumed_samples = self.data_sampler.compute_consumed_samples(
+ self.trainer.global_step - self.data_sampler.init_global_step
+ )
+ return {"consumed_samples": consumed_samples}
+
+ def load_state_dict(self, state_dict: Dict[str, Any]) -> None:
+ """Called when loading a checkpoint, implement to reload datamodule state given datamodule stat
+
+ Args:
+ state_dict: the datamodule state returned by ``state_dict``.
+
+ """
+ try:
+ from megatron.core.num_microbatches_calculator import update_num_microbatches
+
+ except (ImportError, ModuleNotFoundError):
+ logging.warning("Megatron num_microbatches_calculator not found, using Apex version.")
+ from apex.transformer.pipeline_parallel.utils import update_num_microbatches
+ consumed_samples = state_dict["consumed_samples"]
+ self.data_sampler.init_consumed_samples = consumed_samples
+ self.data_sampler.prev_consumed_samples = consumed_samples
+
+ update_num_microbatches(
+ consumed_samples=consumed_samples,
+ consistency_check=False,
+ )
+ self.data_sampler.if_first_step = 1
+
def train_dataloader(self) -> DataLoader:
+ # pylint: disable=C0115,C0116
return self._create_dataloader(
self._create_dataset(
self.train_path if self.packed_sequence_size <= 0 else self.train_path_packed,
max_num_samples=self.max_train_samples,
- pad_to_max_length=self.pad_to_max_length,
- sanity_check_dist_workers=self._sanity_check_dist_workers,
- )
+ **self.dataset_kwargs,
+ ),
+ mode="train",
)
def val_dataloader(self) -> DataLoader:
+ # pylint: disable=C0115,C0116
return self._create_dataloader(
self._create_dataset(
self.validation_path,
is_test=True,
- pad_to_max_length=self.pad_to_max_length,
- sanity_check_dist_workers=self._sanity_check_dist_workers,
+ **self.dataset_kwargs,
),
+ mode="validation",
)
def test_dataloader(self) -> DataLoader:
+ # pylint: disable=C0115,C0116
return self._create_dataloader(
self._create_dataset(
self.test_path,
tokens_to_generate=32,
is_test=True,
- pad_to_max_length=self.pad_to_max_length,
- sanity_check_dist_workers=self._sanity_check_dist_workers,
- )
+ **self.dataset_kwargs,
+ ),
+ mode="test",
)
@lru_cache
def _create_dataset(self, path, is_test=False, **kwargs):
+ # pylint: disable=C0115,C0116
return create_sft_dataset(
path,
tokenizer=self.tokenizer,
@@ -173,9 +227,11 @@ def _create_dataset(self, path, is_test=False, **kwargs):
**kwargs,
)
- def _create_dataloader(self, dataset, **kwargs) -> DataLoader:
- return DataLoader(
- dataset,
+ def _create_dataloader(self, dataset, mode, **kwargs) -> DataLoader:
+ # pylint: disable=C0115,C0116
+ return WrappedDataLoader(
+ mode=mode,
+ dataset=dataset,
num_workers=self.num_workers,
pin_memory=self.pin_memory,
persistent_workers=self.persistent_workers,
@@ -185,10 +241,13 @@ def _create_dataloader(self, dataset, **kwargs) -> DataLoader:
@property
def train_path(self) -> Path:
+ """Path to training dataset file"""
return self.dataset_root / "training.jsonl"
@property
def train_path_packed(self) -> Path:
+ """Path to training dataset file for packed sequence. The file path contains a reference to the
+ tokenizer/model name since packed sequence dataset consists of tokenized indices."""
if self.packed_sequence_size > 0:
if self.packed_sequence_specs.packed_data_path is not None:
return self.packed_sequence_specs.packed_data_path
@@ -201,18 +260,24 @@ def train_path_packed(self) -> Path:
@property
def validation_path(self) -> Path:
+ """Path to validation dataset file"""
return self.dataset_root / "validation.jsonl"
@property
def test_path(self) -> Path:
+ """Path to test dataset file"""
return self.dataset_root / "test.jsonl"
def _extract_tokenizer_model_name(self) -> str:
+ """Automatically get the model name from model path."""
if self.packed_sequence_specs.tokenizer_model_name is not None:
tokenizer_model_name = self.packed_sequence_specs.tokenizer_model_name
elif isinstance(self.tokenizer, AutoTokenizer):
name = self.tokenizer.tokenizer.name_or_path
- if name.endswith("nemo_tokenizer"):
+ if name.endswith("context/nemo_tokenizer"):
+ # NEMO_HOME/hf_org/hf_model/context/nemo_tokenizer => hf_org--hf_model
+ tokenizer_model_name = '--'.join(name.split("/")[-4:-2])
+ elif name.endswith("nemo_tokenizer"):
# NEMO_HOME/hf_org/hf_model/nemo_tokenizer => hf_org--hf_model
tokenizer_model_name = '--'.join(name.split("/")[-3:-1])
else:
diff --git a/nemo/collections/llm/gpt/data/hf_dataset.py b/nemo/collections/llm/gpt/data/hf_dataset.py
index 7e70a970913e..46562b6e72c8 100644
--- a/nemo/collections/llm/gpt/data/hf_dataset.py
+++ b/nemo/collections/llm/gpt/data/hf_dataset.py
@@ -12,9 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from torch.utils.data import DataLoader
+from nemo.lightning.pytorch.plugins import MegatronDataSampler
class HfDatasetDataModule(pl.LightningDataModule):
@@ -24,6 +25,7 @@ def __init__(
num_workers=2,
pin_memory=True,
persistent_workers=True,
+ seq_length=1024,
micro_batch_size=2,
global_batch_size=2,
pad_token_id=0,
@@ -37,6 +39,7 @@ def __init__(
self.num_workers = num_workers
self.pin_memory = pin_memory
self.persistent_workers = persistent_workers
+ self.seq_length = seq_length
self.micro_batch_size = micro_batch_size
self.global_batch_size = global_batch_size
self.pad_token_id = pad_token_id
@@ -58,6 +61,7 @@ def pad_within_micro(batch, pad_token_id):
max_len = max(map(len, batch))
return [item + [pad_token_id] * (max_len - len(item)) for item in batch]
+ keys = list(filter(lambda x: x in batch[0], ['tokens', 'labels', 'position_ids', 'loss_mask']))
return {
key: batchify(
torch.LongTensor(
@@ -67,16 +71,26 @@ def pad_within_micro(batch, pad_token_id):
)
)
)
- for key in ['tokens', 'labels']
+ for key in keys
}
+ def setup(self, stage: str):
+ if not self.use_mcore_sampler:
+ return
+ self.data_sampler = MegatronDataSampler(
+ seq_len=self.seq_length,
+ micro_batch_size=self.micro_batch_size,
+ global_batch_size=self.global_batch_size,
+ dataloader_type=self.mcore_dataloader_type,
+ )
+
def train_dataloader(self, collate_fn=None):
from nemo.lightning.data import add_megatron_sampler
if collate_fn is None:
collate_fn = lambda x: HfDatasetDataModule.collate_fn(x, pad_token_id=self.pad_token_id)
- dataloader = DataLoader(
+ return DataLoader(
self.dataset,
num_workers=self.num_workers,
pin_memory=self.pin_memory,
@@ -84,20 +98,3 @@ def train_dataloader(self, collate_fn=None):
collate_fn=collate_fn,
batch_size=self.micro_batch_size,
)
- if not self.use_mcore_sampler:
- return dataloader
-
- rank = 0
- world_size = 1
- if torch.distributed.is_initialized():
- rank = torch.distributed.get_rank()
- world_size = torch.distributed.get_world_size()
-
- return add_megatron_sampler(
- dataloader,
- self.micro_batch_size,
- self.global_batch_size,
- dataloader_type=self.mcore_dataloader_type,
- rank=rank,
- world_size=world_size,
- )
diff --git a/nemo/collections/llm/gpt/data/mock.py b/nemo/collections/llm/gpt/data/mock.py
index 5678597eda0b..f6b4e26ca355 100644
--- a/nemo/collections/llm/gpt/data/mock.py
+++ b/nemo/collections/llm/gpt/data/mock.py
@@ -14,10 +14,10 @@
from typing import TYPE_CHECKING, Dict, List, Optional
+import lightning.pytorch as pl
import numpy as np
-import pytorch_lightning as pl
import torch
-from pytorch_lightning.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
+from lightning.pytorch.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
from torch.utils import data
from torch.utils.data import DataLoader, Dataset
diff --git a/nemo/collections/llm/gpt/data/pre_training.py b/nemo/collections/llm/gpt/data/pre_training.py
index 534922efe3a3..f659ce72796c 100644
--- a/nemo/collections/llm/gpt/data/pre_training.py
+++ b/nemo/collections/llm/gpt/data/pre_training.py
@@ -16,10 +16,10 @@
import os
import warnings
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Dict, List, Optional
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
-import pytorch_lightning as pl
-from pytorch_lightning.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
+import lightning.pytorch as pl
+from lightning.pytorch.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
from torch.utils import data
from nemo.lightning.data import WrappedDataLoader
@@ -77,7 +77,7 @@ def validate_dataset_asset_accessibility(paths):
raise ValueError("Expected path to be of string or Path type.")
path = Path(paths)
- suffices = ('.bin', '.idx')
+ suffices = (".bin", ".idx")
if path.is_dir():
if not os.access(path, os.R_OK):
raise PermissionError(f"Expected {str(path)} to be readable.")
@@ -133,6 +133,9 @@ class PreTrainingDataModule(pl.LightningDataModule, IOMixin):
to allocate to train, validation, and test sets, respectively. Unused if ``paths`` is a dict.
index_mapping_dir (Optional[str]): Path to a directory to write index mapping files.
num_dataset_builder_threads (int): The number of threads to use for dataset building.
+ num_train_samples (Optional[int]): The number of samples to use for training, defaults to total train steps times global batch size.
+ num_val_samples (Optional[int]): The number of samples to use for validation, defaults to total validation steps times global batch size.
+ num_test_samples (Optional[int]): The number of samples to use for testing, defaults to total test steps times global batch size.
"""
def __init__(
@@ -154,6 +157,9 @@ def __init__(
split: str = "900,50,50",
index_mapping_dir: Optional[str] = None,
num_dataset_builder_threads: int = 1,
+ num_train_samples: Optional[int] = None,
+ num_val_samples: Optional[int] = None,
+ num_test_samples: Optional[int] = None,
) -> None:
super().__init__()
if not isinstance(paths, (list, tuple, dict)):
@@ -196,6 +202,9 @@ def __init__(
self.index_mapping_dir = index_mapping_dir
self.num_dataset_builder_threads = num_dataset_builder_threads
self.init_global_step = 0
+ self.num_train_samples = num_train_samples
+ self.num_val_samples = num_val_samples
+ self.num_test_samples = num_test_samples
from nemo.collections.nlp.modules.common.tokenizer_utils import get_nmt_tokenizer
@@ -207,27 +216,46 @@ def __init__(
rampup_batch_size=rampup_batch_size,
)
- def setup(self, stage: str = "") -> None:
+ def build(
+ self,
+ trainer_max_steps: int,
+ trainer_val_check_interval: int,
+ trainer_limit_val_batches: Union[int, float],
+ trainer_limit_test_batches: Union[int, float],
+ ):
from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder
from megatron.core.datasets.gpt_dataset import GPTDataset
- assert (
- hasattr(self, "trainer") and self.trainer is not None
- ), "Setup should be completed when trainer and config are attached."
+ train_iters = trainer_max_steps
+ assert train_iters > 0, f"max_steps {train_iters} should be greater than 0"
+ num_train_samples = int(train_iters * self.data_sampler.global_batch_size)
+
+ if self.num_train_samples is not None:
+ assert (
+ self.num_train_samples >= num_train_samples
+ ), f"num_train_samples must be greater than or equal to {num_train_samples}."
+ num_train_samples = self.num_train_samples
+ train_iters = int(num_train_samples / self.data_sampler.global_batch_size)
- # Trainer API
- max_train_steps = self.trainer.max_steps
- assert max_train_steps > 0, "Please specify trainer.max_steps"
- eval_iters = (max_train_steps // self.trainer.val_check_interval + 1) * self.trainer.limit_val_batches
- test_iters = self.trainer.limit_test_batches
- num_train_samples = int(max_train_steps * self.data_sampler.global_batch_size)
+ eval_iters = (train_iters // trainer_val_check_interval + 1) * trainer_limit_val_batches
num_val_samples = int(eval_iters * self.data_sampler.global_batch_size)
+
+ test_iters = trainer_limit_test_batches
num_test_samples = int(test_iters * self.data_sampler.global_batch_size)
+ if self.num_val_samples is not None:
+ assert self.num_val_samples > num_val_samples, f"num_val_samples must be greater than {num_val_samples}."
+ num_val_samples = self.num_val_samples
+ if self.num_test_samples is not None:
+ assert (
+ self.num_test_samples > num_test_samples
+ ), f"num_test_samples must be greater than {num_test_samples}."
+ num_test_samples = self.num_test_samples
+
if (
- self.trainer.limit_val_batches > 0.0
- and self.trainer.limit_val_batches <= 1.0
- and isinstance(self.trainer.limit_val_batches, float)
+ trainer_limit_val_batches > 0.0
+ and trainer_limit_val_batches <= 1.0
+ and isinstance(trainer_limit_val_batches, float)
):
assert "blend" not in self.build_kwargs, (
"When using a single data distribution, limit_val_batches <= 1.0 is not supported. If you'd "
@@ -251,6 +279,18 @@ def setup(self, stage: str = "") -> None:
config=self.gpt_dataset_config,
).build()
+ def setup(self, stage: str = "") -> None:
+ assert (
+ hasattr(self, "trainer") and self.trainer is not None
+ ), "Setup should be completed when trainer and config are attached."
+
+ self.build(
+ trainer_max_steps=self.trainer.max_steps,
+ trainer_val_check_interval=self.trainer.val_check_interval,
+ trainer_limit_val_batches=self.trainer.limit_val_batches,
+ trainer_limit_test_batches=self.trainer.limit_test_batches,
+ )
+
# uncomment once fabric API is merged
# def fabric_setup(
# self,
@@ -269,13 +309,13 @@ def setup(self, stage: str = "") -> None:
# ).build()
def train_dataloader(self) -> TRAIN_DATALOADERS:
- return self._create_dataloader(self._train_ds, mode='train')
+ return self._create_dataloader(self._train_ds, mode="train")
def val_dataloader(self) -> EVAL_DATALOADERS:
- return self._create_dataloader(self._validation_ds, mode='validation')
+ return self._create_dataloader(self._validation_ds, mode="validation")
def test_dataloader(self) -> EVAL_DATALOADERS:
- return self._create_dataloader(self._test_ds, mode='test')
+ return self._create_dataloader(self._test_ds, mode="test")
def _create_dataloader(self, dataset, mode, **kwargs) -> WrappedDataLoader:
self.init_global_step = self.trainer.global_step
@@ -286,7 +326,7 @@ def _create_dataloader(self, dataset, mode, **kwargs) -> WrappedDataLoader:
num_workers=self.num_workers,
pin_memory=self.pin_memory,
persistent_workers=self.persistent_workers,
- collate_fn=getattr(dataset, 'collate_fn', data.dataloader.default_collate),
+ collate_fn=getattr(dataset, "collate_fn", data.dataloader.default_collate),
**kwargs,
)
return dataloader
@@ -316,7 +356,7 @@ def state_dict(self) -> Dict[str, Any]:
"""
consumed_samples = self.data_sampler.compute_consumed_samples(self.trainer.global_step - self.init_global_step)
- return {'consumed_samples': consumed_samples}
+ return {"consumed_samples": consumed_samples}
def load_state_dict(self, state_dict: Dict[str, Any]) -> None:
"""Called when loading a checkpoint, implement to reload datamodule state given datamodule stat
@@ -332,7 +372,7 @@ def load_state_dict(self, state_dict: Dict[str, Any]) -> None:
logging.warning("Megatron num_microbatches_calculator not found, using Apex version.")
from apex.transformer.pipeline_parallel.utils import update_num_microbatches
- consumed_samples = state_dict['consumed_samples']
+ consumed_samples = state_dict["consumed_samples"]
self.data_sampler.init_consumed_samples = consumed_samples
self.data_sampler.prev_consumed_samples = consumed_samples
@@ -344,9 +384,9 @@ def load_state_dict(self, state_dict: Dict[str, Any]) -> None:
def reconfigure_limit_batches(self):
# Override limit_train_batches in terms of num of microbatches
- self._reconfigure_limit_batches(self.trainer.limit_train_batches, self._train_ds, 'train')
+ self._reconfigure_limit_batches(self.trainer.limit_train_batches, self._train_ds, "train")
# Override limit_val_batches to be a multiple of num microbatches to prevent val_step from exiting in between a step
- self._reconfigure_limit_batches(self.trainer.limit_val_batches, self._validation_ds, 'val')
+ self._reconfigure_limit_batches(self.trainer.limit_val_batches, self._validation_ds, "val")
def _reconfigure_limit_batches(self, limit_batches, dataloader, mode):
"""
@@ -388,10 +428,47 @@ def _reconfigure_limit_batches(self, limit_batches, dataloader, mode):
else:
limit_batches = limit_batches - limit_batches % get_num_microbatches()
- if mode == 'train':
+ if mode == "train":
self.trainer.limit_train_batches = limit_batches
else:
self.trainer.limit_val_batches = limit_batches
# Override num sanity steps to be a multiple of num of microbatches
self.trainer.num_sanity_val_steps *= get_num_microbatches()
+
+
+def build_pretraining_datamodule(
+ datamodule: PreTrainingDataModule,
+ trainer_max_steps: int,
+ trainer_val_check_interval: int,
+ trainer_limit_val_batches: Union[int, float],
+ trainer_limit_test_batches: Union[int, float],
+):
+ """
+ Builds the index mapping cache for nemo.collections.llm.gpt.data.PreTrainingDataModule.
+
+ Args:
+ datamodule (PreTrainingDataModule): The pre-training data module to build.
+ trainer_max_steps (int): The max_steps set in your trainer.
+ trainer_val_check_interval (int): The interval at which to perform validation in your trainer.
+ trainer_limit_val_batches (Union[int, float]): The number of validation batches to use in your trainer.
+ trainer_limit_test_batches (Union[int, float]): The number of test batches to use in your trainer.
+
+ Returns:
+ None
+ """
+ import torch.distributed as dist
+
+ assert not dist.is_initialized(), "This function cannot be called inside an existing torch.distributed job."
+ # The indices in Megatron are built on rank 0, so we set the world size to 1 here.
+ dist.init_process_group(world_size=1, rank=0)
+
+ from nemo.utils import logging
+
+ logging.info(f"Building {datamodule}")
+ datamodule.build(
+ trainer_max_steps=trainer_max_steps,
+ trainer_val_check_interval=trainer_val_check_interval,
+ trainer_limit_val_batches=trainer_limit_val_batches,
+ trainer_limit_test_batches=trainer_limit_test_batches,
+ )
diff --git a/nemo/collections/llm/gpt/data/squad.py b/nemo/collections/llm/gpt/data/squad.py
index cabbd444c0cf..c359925cb2f6 100644
--- a/nemo/collections/llm/gpt/data/squad.py
+++ b/nemo/collections/llm/gpt/data/squad.py
@@ -13,7 +13,7 @@
# limitations under the License.
import json
import shutil
-from typing import TYPE_CHECKING, List, Optional
+from typing import TYPE_CHECKING, Any, Dict, List, Optional
from datasets import DatasetDict, load_dataset
@@ -54,9 +54,8 @@ def __init__(
num_workers: int = 8,
pin_memory: bool = True,
persistent_workers: bool = False,
- pad_to_max_length: bool = False,
packed_sequence_specs: Optional["PackedSequenceSpecs"] = None,
- sanity_check_dist_workers: bool = True,
+ dataset_kwargs: Optional[Dict[str, Any]] = None,
):
self.force_redownload = force_redownload
self.delete_raw = delete_raw
@@ -73,9 +72,8 @@ def __init__(
num_workers=num_workers,
pin_memory=pin_memory,
persistent_workers=persistent_workers,
- pad_to_max_length=pad_to_max_length,
packed_sequence_specs=packed_sequence_specs,
- sanity_check_dist_workers=sanity_check_dist_workers,
+ dataset_kwargs=dataset_kwargs,
)
def prepare_data(self) -> None:
diff --git a/nemo/collections/llm/gpt/model/__init__.py b/nemo/collections/llm/gpt/model/__init__.py
index 6c7d159dd5cf..9f186ebba90f 100644
--- a/nemo/collections/llm/gpt/model/__init__.py
+++ b/nemo/collections/llm/gpt/model/__init__.py
@@ -59,6 +59,8 @@
Llama31Config8B,
Llama31Config70B,
Llama31Config405B,
+ Llama32Config1B,
+ Llama32Config3B,
LlamaConfig,
LlamaModel,
)
@@ -73,12 +75,13 @@
from nemo.collections.llm.gpt.model.nemotron import (
Nemotron3Config4B,
Nemotron3Config8B,
+ Nemotron3Config22B,
Nemotron4Config15B,
- Nemotron4Config22B,
Nemotron4Config340B,
NemotronConfig,
NemotronModel,
)
+from nemo.collections.llm.gpt.model.phi3mini import Phi3Config, Phi3ConfigMini, Phi3Model
from nemo.collections.llm.gpt.model.qwen2 import (
Qwen2Config,
Qwen2Config1P5B,
@@ -133,13 +136,18 @@
"Llama31Config8B",
"Llama31Config70B",
"Llama31Config405B",
+ "Llama32Config1B",
+ "Llama32Config3B",
"NemotronConfig",
"Nemotron3Config4B",
"Nemotron3Config8B",
"Nemotron4Config15B",
- "Nemotron4Config22B",
+ "Nemotron3Config22B",
"Nemotron4Config340B",
"NemotronModel",
+ "Phi3Config",
+ "Phi3ConfigMini",
+ "Phi3Model",
"CodeLlamaConfig7B",
"CodeLlamaConfig13B",
"CodeLlamaConfig34B",
diff --git a/nemo/collections/llm/gpt/model/base.py b/nemo/collections/llm/gpt/model/base.py
index 5b25e0ca9b62..e411077aca31 100644
--- a/nemo/collections/llm/gpt/model/base.py
+++ b/nemo/collections/llm/gpt/model/base.py
@@ -15,11 +15,12 @@
from dataclasses import dataclass
from typing import TYPE_CHECKING, Callable, Dict, Literal, Optional, Union
-import pytorch_lightning as L
+import lightning.pytorch as L
import torch
import torch.distributed
from megatron.core.inference.model_inference_wrappers.gpt.gpt_inference_wrapper import GPTInferenceWrapper
from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import InferenceWrapperConfig
+from megatron.core.models.gpt.gpt_model import GPTModel as MCoreGPTModel
from megatron.core.optimizer import OptimizerConfig
from megatron.core.transformer.spec_utils import ModuleSpec
from megatron.core.transformer.transformer_config import TransformerConfig
@@ -44,8 +45,6 @@
_grad_accum_fusion_available = False
if TYPE_CHECKING:
- from megatron.core.models.gpt.gpt_model import GPTModel as MCoreGPTModel
-
from nemo.collections.common.tokenizers.tokenizer_spec import TokenizerSpec
@@ -147,6 +146,15 @@ def default_layer_spec(config: "GPTConfig") -> ModuleSpec:
return local_layer_spec(config)
+def torch_dtype_from_mcore_config(config: TransformerConfig):
+ if config.fp16:
+ return torch.float16
+ elif config.bf16:
+ return torch.bfloat16
+ else:
+ return torch.float
+
+
@dataclass
class GPTConfig(TransformerConfig, io.IOMixin):
# From megatron.core.models.gpt.gpt_model.GPTModel
@@ -171,7 +179,7 @@ class GPTConfig(TransformerConfig, io.IOMixin):
forward_step_fn: Callable = gpt_forward_step
data_step_fn: Callable = gpt_data_step
- def configure_model(self, tokenizer) -> "MCoreGPTModel":
+ def configure_model(self, tokenizer, pre_process=None, post_process=None) -> "MCoreGPTModel":
vp_size = self.virtual_pipeline_model_parallel_size
if vp_size:
p_size = self.pipeline_model_parallel_size
@@ -180,7 +188,6 @@ def configure_model(self, tokenizer) -> "MCoreGPTModel":
) % vp_size == 0, "Make sure the number of model chunks is the same across all pipeline stages."
from megatron.core import parallel_state
- from megatron.core.models.gpt.gpt_model import GPTModel as MCoreGPTModel
transformer_layer_spec = self.transformer_layer_spec
if not isinstance(transformer_layer_spec, ModuleSpec):
@@ -207,8 +214,8 @@ def configure_model(self, tokenizer) -> "MCoreGPTModel":
rotary_percent=self.rotary_percent,
rotary_base=self.rotary_base,
seq_len_interpolation_factor=self.seq_len_interpolation_factor,
- pre_process=parallel_state.is_pipeline_first_stage(),
- post_process=parallel_state.is_pipeline_last_stage(),
+ pre_process=pre_process or parallel_state.is_pipeline_first_stage(),
+ post_process=post_process or parallel_state.is_pipeline_last_stage(),
)
# If using full TE layer, need to set TP, CP group since the module call
@@ -248,6 +255,9 @@ class GPTConfig126M(GPTConfig):
hidden_size: int = 768
ffn_hidden_size: int = 3072
num_attention_heads: int = 12
+ bias_activation_fusion: bool = True
+ bias_dropout_add_fusion: bool = True
+ use_transformer_engine_full_layer_spec: bool = True
@dataclass
@@ -257,9 +267,9 @@ class GPTConfig5B(GPTConfig):
hidden_size: int = 4096
ffn_hidden_size: int = 16384
num_attention_heads: int = 32
-
bias_activation_fusion: bool = True
bias_dropout_add_fusion: bool = True
+ use_transformer_engine_full_layer_spec: bool = True
@dataclass
@@ -269,6 +279,9 @@ class GPTConfig7B(GPTConfig):
hidden_size: int = 4096
ffn_hidden_size: int = 10880
num_attention_heads: int = 32
+ bias_activation_fusion: bool = True
+ bias_dropout_add_fusion: bool = True
+ use_transformer_engine_full_layer_spec: bool = True
@dataclass
@@ -278,9 +291,9 @@ class GPTConfig20B(GPTConfig):
hidden_size: int = 6144
ffn_hidden_size: int = 24576
num_attention_heads: int = 48
-
bias_activation_fusion: bool = True
bias_dropout_add_fusion: bool = True
+ use_transformer_engine_full_layer_spec: bool = True
@dataclass
@@ -290,6 +303,9 @@ class GPTConfig40B(GPTConfig):
hidden_size: int = 8192
ffn_hidden_size: int = 32768
num_attention_heads: int = 64
+ bias_activation_fusion: bool = True
+ bias_dropout_add_fusion: bool = True
+ use_transformer_engine_full_layer_spec: bool = True
@dataclass
@@ -301,9 +317,10 @@ class GPTConfig175B(GPTConfig):
num_attention_heads: int = 96
hidden_dropout: float = 0.0
attention_dropout: float = 0.0
- ffn_dropout: float = 0.0
bias_activation_fusion: bool = True
bias_dropout_add_fusion: bool = True
+ use_transformer_engine_full_layer_spec: bool = True
+ layernorm_zero_centered_gamma: bool = True
class GPTModel(L.LightningModule, io.IOMixin, io.ConnectorMixin, fn.FNMixin):
diff --git a/nemo/collections/llm/gpt/model/chatglm.py b/nemo/collections/llm/gpt/model/chatglm.py
index e7450a8db28d..f9ad8fc6010c 100644
--- a/nemo/collections/llm/gpt/model/chatglm.py
+++ b/nemo/collections/llm/gpt/model/chatglm.py
@@ -20,7 +20,7 @@
import torch.nn.functional as F
from torch import nn
-from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel
+from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel, torch_dtype_from_mcore_config
from nemo.collections.llm.utils import Config
from nemo.lightning import OptimizerModule, io, teardown
from nemo.lightning.pytorch.utils import dtype_from_hf
@@ -139,16 +139,16 @@ def config(self) -> ChatGLMConfig:
@io.model_exporter(ChatGLMModel, "hf")
class HFChatGLMExporter(io.ModelConnector[ChatGLMModel, "AutoModelForCausalLM"]):
- def init(self) -> "AutoModelForCausalLM":
+ def init(self, dtype=torch.bfloat16) -> "AutoModelForCausalLM":
from transformers import AutoModelForCausalLM
from transformers.modeling_utils import no_init_weights
with no_init_weights(True):
- return AutoModelForCausalLM.from_config(self.config, trust_remote_code=True)
+ return AutoModelForCausalLM.from_config(self.config, trust_remote_code=True, torch_dtype=dtype)
def apply(self, output_path: Path) -> Path:
- target = self.init()
source, _ = self.nemo_load(str(self))
+ target = self.init(torch_dtype_from_mcore_config(source.config))
target = self.convert_state(source, target)
target = target.cpu()
@@ -159,14 +159,12 @@ def apply(self, output_path: Path) -> Path:
def convert_state(self, source, target):
mapping = {
- "embedding.word_embeddings.weight": "transformer.embedding.word_embeddings.weight",
"decoder.layers.*.self_attention.linear_proj.weight": "transformer.encoder.layers.*.self_attention.dense.weight",
"decoder.layers.*.mlp.linear_fc1.weight": "transformer.encoder.layers.*.mlp.dense_h_to_4h.weight",
"decoder.layers.*.mlp.linear_fc2.weight": "transformer.encoder.layers.*.mlp.dense_4h_to_h.weight",
"decoder.layers.*.self_attention.linear_qkv.layer_norm_weight": "transformer.encoder.layers.*.input_layernorm.weight",
"decoder.layers.*.mlp.linear_fc1.layer_norm_weight": "transformer.encoder.layers.*.post_attention_layernorm.weight",
"decoder.final_layernorm.weight": "transformer.encoder.final_layernorm.weight",
- "output_layer.weight": "transformer.output_layer.weight",
}
return io.apply_transforms(
@@ -176,6 +174,8 @@ def convert_state(self, source, target):
transforms=[
_export_qkv_weight,
_export_qkv_bias,
+ _export_embedding,
+ _export_head,
],
)
@@ -198,6 +198,26 @@ def config(self) -> "AutoConfig":
)
+@io.state_transform(
+ source_key="embedding.word_embeddings.weight",
+ target_key="transformer.embedding.word_embeddings.weight",
+)
+def _export_embedding(ctx: io.TransformCTX, embedding):
+ megatron_config = ctx.target.config
+ # prune padding.
+ return embedding[: megatron_config.vocab_size, :]
+
+
+@io.state_transform(
+ source_key="output_layer.weight",
+ target_key="transformer.output_layer.weight",
+)
+def _export_head(ctx: io.TransformCTX, embedding):
+ megatron_config = ctx.target.config
+ # prune padding.
+ return embedding[: megatron_config.vocab_size, :]
+
+
@io.state_transform(
source_key="transformer.encoder.layers.*.self_attention.query_key_value.weight",
target_key="decoder.layers.*.self_attention.linear_qkv.weight",
diff --git a/nemo/collections/llm/gpt/model/hf_auto_model_for_causal_lm.py b/nemo/collections/llm/gpt/model/hf_auto_model_for_causal_lm.py
index f29756dc05a7..c0f02d706ceb 100644
--- a/nemo/collections/llm/gpt/model/hf_auto_model_for_causal_lm.py
+++ b/nemo/collections/llm/gpt/model/hf_auto_model_for_causal_lm.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
import torch.nn.functional as F
from transformers import AutoModelForCausalLM
@@ -20,10 +20,7 @@
from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
from nemo.collections.llm import fn
from nemo.lightning import io
-
-
-def _extract_non_bias_params(model):
- return list(map(lambda x: x[1], filter(lambda x: not 'bias' in x[0], model.named_parameters())))
+from nemo.utils import logging
def masked_cross_entropy(logits, targets, mask=None):
@@ -35,7 +32,15 @@ def masked_cross_entropy(logits, targets, mask=None):
class HfAutoModelForCausalLM(pl.LightningModule, io.IOMixin, fn.FNMixin):
- def __init__(self, model_name='gpt2', load_pretrained_weights=True, tokenizer=None, loss_fn=masked_cross_entropy):
+ def __init__(
+ self,
+ model_name='gpt2',
+ load_pretrained_weights=True,
+ tokenizer=None,
+ loss_fn=masked_cross_entropy,
+ model_transform=None,
+ trust_remote_code=False,
+ ):
super().__init__()
self.save_hyperparameters()
self.model_name = model_name
@@ -44,11 +49,13 @@ def __init__(self, model_name='gpt2', load_pretrained_weights=True, tokenizer=No
self.loss_fn = loss_fn
self.load_pretrained_weights = load_pretrained_weights
self.is_hf_model = True
+ self.model_transform = model_transform
+ self.trust_remote_code = trust_remote_code
@property
def tokenizer(self):
if self._tokenizer is None:
- self._tokenizer = HfAutoModelForCausalLM.configure_tokenizer(self.model_name)
+ self._tokenizer = HfAutoModelForCausalLM.configure_tokenizer(self.model_name, self.trust_remote_code)
return self._tokenizer
@tokenizer.setter
@@ -57,18 +64,20 @@ def tokenizer(self, value):
self._tokenizer = value
@staticmethod
- def configure_tokenizer(model_name):
- return AutoTokenizer(model_name)
+ def configure_tokenizer(model_name, trust_remote_code=False):
+ return AutoTokenizer(model_name, trust_remote_code=trust_remote_code)
def configure_model(self):
# create all your layers here
if self.load_pretrained_weights:
- self.model = AutoModelForCausalLM.from_pretrained(self.model_name, torch_dtype='auto')
+ self.model = AutoModelForCausalLM.from_pretrained(
+ self.model_name, torch_dtype='auto', trust_remote_code=self.trust_remote_code
+ )
else:
from transformers import AutoConfig
- config = AutoConfig.from_pretained(self.model_name)
- self.model = AutoModelForCausalLM.from_config(config)
+ config = AutoConfig.from_pretrained(self.model_name, trust_remote_code=self.trust_remote_code)
+ self.model = AutoModelForCausalLM.from_config(config, trust_remote_code=self.trust_remote_code)
self.model.train()
def forward(self, input_ids, attention_mask=None, labels=None, loss_mask=None):
@@ -107,3 +116,11 @@ def validation_step(self, batch, batch_idx):
loss = output.loss
self.log('val_loss', loss, on_step=True, on_epoch=True, prog_bar=True)
+
+ def save_pretrained(self, path):
+ assert self.model is not None, "Model has to be created first."
+ self.model.save_pretrained(path)
+ if self._tokenizer is not None:
+ self._tokenizer.save_pretrained(path)
+ else:
+ logging.warning("A tokenizer wasn't created before to save.")
diff --git a/nemo/collections/llm/gpt/model/llama.py b/nemo/collections/llm/gpt/model/llama.py
index 5bc45b1049f3..a7e995addb83 100644
--- a/nemo/collections/llm/gpt/model/llama.py
+++ b/nemo/collections/llm/gpt/model/llama.py
@@ -14,6 +14,7 @@
import math
from dataclasses import dataclass
+from functools import partial
from pathlib import Path
from typing import TYPE_CHECKING, Annotated, Callable, Optional
@@ -21,7 +22,7 @@
import torch.nn.functional as F
from torch import nn
-from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel
+from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel, torch_dtype_from_mcore_config
from nemo.collections.llm.utils import Config
from nemo.lightning import OptimizerModule, io, teardown
from nemo.lightning.pytorch.utils import dtype_from_hf
@@ -86,7 +87,7 @@ class Llama2Config70B(LlamaConfig):
@dataclass
-class Llama3Config(GPTConfig):
+class Llama3Config(LlamaConfig):
num_query_groups: int = 8
hidden_dropout: float = 0.0
attention_dropout: float = 0.0
@@ -115,8 +116,8 @@ class Llama31Config(Llama3Config):
old_context_len: int = 8192
init_method_std: float = 0.02
- def configure_model(self, tokenizer) -> "MCoreGPTModel":
- model = super().configure_model(tokenizer)
+ def configure_model(self, tokenizer, pre_process=None, post_process=None) -> "MCoreGPTModel":
+ model = super().configure_model(tokenizer, pre_process, post_process)
# Apply rope scaling for Llama3.1 model
model.rotary_pos_emb.inv_freq = apply_rope_scaling(
model.rotary_pos_emb.inv_freq,
@@ -182,6 +183,32 @@ class Llama31Config405B(Llama31Config):
make_vocab_size_divisible_by: int = 128
+@dataclass
+class Llama32Config1B(Llama31Config):
+ scale_factor: int = 32
+ share_embeddings_and_output_weights: bool = True
+ rotary_base: int = 500_000
+ num_layers: int = 16
+ hidden_size: int = 2048
+ ffn_hidden_size: int = 8192
+ num_attention_heads: int = 32
+ num_query_groups: int = 8
+ make_vocab_size_divisible_by: int = 128
+
+
+@dataclass
+class Llama32Config3B(Llama31Config):
+ scale_factor: int = 32
+ share_embeddings_and_output_weights: bool = True
+ rotary_base: int = 500_000
+ num_layers: int = 28
+ hidden_size: int = 3072
+ ffn_hidden_size: int = 8192
+ num_attention_heads: int = 24
+ num_query_groups: int = 8
+ make_vocab_size_divisible_by: int = 128
+
+
@dataclass
class CodeLlamaConfig7B(Llama2Config7B):
rotary_base: int = 1_000_000
@@ -252,6 +279,9 @@ def convert_state(self, source, target):
"model.norm.weight": "decoder.final_layernorm.weight",
"lm_head.weight": "output_layer.weight",
}
+ if getattr(source.config, "tie_word_embeddings", False):
+ # llama 3.2 1B and 3B models have no shared input output embeddings
+ del mapping["lm_head.weight"]
return io.apply_transforms(source, target, mapping=mapping, transforms=[_import_qkv, _import_linear_fc1])
@@ -273,7 +303,12 @@ def make_vocab_size_divisible_by(vocab_size):
base //= 2
return base
- output = LlamaConfig(
+ if getattr(source, 'rope_scaling', None) is not None and source.rope_scaling.get('rope_type') == 'llama3':
+ # Apply Llama3.1 customize rope scaling
+ cls = partial(Llama31Config, scale_factor=source.rope_scaling.get("factor", 8.0))
+ else:
+ cls = LlamaConfig
+ output = cls(
num_layers=source.num_hidden_layers,
hidden_size=source.hidden_size,
ffn_hidden_size=source.intermediate_size,
@@ -284,7 +319,7 @@ def make_vocab_size_divisible_by(vocab_size):
rotary_base=source.rope_theta,
gated_linear_unit=True,
make_vocab_size_divisible_by=make_vocab_size_divisible_by(source.vocab_size),
- share_embeddings_and_output_weights=False,
+ share_embeddings_and_output_weights=getattr(source, "tie_word_embeddings", False),
fp16=(dtype_from_hf(source) == torch.float16),
bf16=(dtype_from_hf(source) == torch.bfloat16),
params_dtype=dtype_from_hf(source),
@@ -295,16 +330,16 @@ def make_vocab_size_divisible_by(vocab_size):
@io.model_exporter(LlamaModel, "hf")
class HFLlamaExporter(io.ModelConnector[LlamaModel, "LlamaForCausalLM"]):
- def init(self) -> "LlamaForCausalLM":
+ def init(self, dtype=torch.bfloat16) -> "LlamaForCausalLM":
from transformers import AutoModelForCausalLM
from transformers.modeling_utils import no_init_weights
with no_init_weights(True):
- return AutoModelForCausalLM.from_config(self.config)
+ return AutoModelForCausalLM.from_config(self.config, torch_dtype=dtype)
def apply(self, output_path: Path) -> Path:
- target = self.init()
source, _ = self.nemo_load(str(self))
+ target = self.init(torch_dtype_from_mcore_config(source.config))
target = self.convert_state(source, target)
target = target.cpu()
@@ -315,16 +350,19 @@ def apply(self, output_path: Path) -> Path:
def convert_state(self, source, target):
mapping = {
- "embedding.word_embeddings.weight": "model.embed_tokens.weight",
"decoder.layers.*.self_attention.linear_proj.weight": "model.layers.*.self_attn.o_proj.weight",
"decoder.layers.*.mlp.linear_fc2.weight": "model.layers.*.mlp.down_proj.weight",
"decoder.layers.*.self_attention.linear_qkv.layer_norm_weight": "model.layers.*.input_layernorm.weight",
"decoder.layers.*.mlp.linear_fc1.layer_norm_weight": "model.layers.*.post_attention_layernorm.weight",
"decoder.final_layernorm.weight": "model.norm.weight",
- "output_layer.weight": "lm_head.weight",
}
- return io.apply_transforms(source, target, mapping=mapping, transforms=[_export_qkv, _export_linear_fc1])
+ return io.apply_transforms(
+ source,
+ target,
+ mapping=mapping,
+ transforms=[_export_qkv, _export_linear_fc1, _export_embedding, _export_head],
+ )
@property
def tokenizer(self):
@@ -347,6 +385,7 @@ def config(self) -> "HFLlamaConfig":
num_key_value_heads=source.num_query_groups,
rope_theta=source.rotary_base,
vocab_size=self.tokenizer.vocab_size,
+ tie_word_embeddings=source.share_embeddings_and_output_weights,
)
@@ -426,6 +465,26 @@ def _export_qkv(ctx: io.TransformCTX, linear_qkv):
return q_proj, k_proj, v_proj
+@io.state_transform(
+ source_key="embedding.word_embeddings.weight",
+ target_key="model.embed_tokens.weight",
+)
+def _export_embedding(ctx: io.TransformCTX, embedding):
+ megatron_config = ctx.target.config
+ # prune padding.
+ return embedding[: megatron_config.vocab_size, :]
+
+
+@io.state_transform(
+ source_key="output_layer.weight",
+ target_key="lm_head.weight",
+)
+def _export_head(ctx: io.TransformCTX, embedding):
+ megatron_config = ctx.target.config
+ # prune padding.
+ return embedding[: megatron_config.vocab_size, :]
+
+
@io.state_transform(
source_key=("model.layers.*.mlp.gate_proj.weight", "model.layers.*.mlp.up_proj.weight"),
target_key="decoder.layers.*.mlp.linear_fc1.weight",
@@ -481,6 +540,8 @@ def apply_rope_scaling(
"Llama31Config8B",
"Llama31Config70B",
"Llama31Config405B",
+ "Llama32Config1B",
+ "Llama32Config3B",
"CodeLlamaConfig7B",
"CodeLlamaConfig13B",
"CodeLlamaConfig34B",
diff --git a/nemo/collections/llm/gpt/model/mistral.py b/nemo/collections/llm/gpt/model/mistral.py
index b9f4b6fb8f65..0aa611b4454e 100644
--- a/nemo/collections/llm/gpt/model/mistral.py
+++ b/nemo/collections/llm/gpt/model/mistral.py
@@ -16,13 +16,13 @@
from pathlib import Path
from typing import TYPE_CHECKING, Callable, List, Optional
-import pytorch_lightning as pl
import torch
import torch.nn.functional as F
from torch import nn
from typing_extensions import Annotated
-from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel
+from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel, torch_dtype_from_mcore_config
+from nemo.collections.llm.gpt.model.llama import _export_embedding, _export_head
from nemo.collections.llm.utils import Config
from nemo.lightning import io, teardown
from nemo.lightning.pytorch.optim import OptimizerModule
@@ -186,19 +186,19 @@ def make_vocab_size_divisible_by(mistral_vocab_size):
@io.model_exporter(MistralModel, "hf")
class HFMistralExporter(io.ModelConnector[MistralModel, "MistralForCausalLM"]):
- def init(self) -> "MistralForCausalLM":
+ def init(self, dtype=torch.bfloat16) -> "MistralForCausalLM":
from transformers import AutoModelForCausalLM
from transformers.modeling_utils import no_init_weights
with no_init_weights(True):
- return AutoModelForCausalLM.from_config(self.config)
+ return AutoModelForCausalLM.from_config(self.config, torch_dtype=dtype)
def apply(self, output_path: Path) -> Path:
# TODO: Make it work with lazy init
# with torch.device("meta"):
# target = self.init()
- target = self.init()
source, _ = self.nemo_load(str(self))
+ target = self.init(torch_dtype_from_mcore_config(source.config))
target = self.convert_state(source, target)
# TODO: Make sure we don't need to do this
@@ -210,16 +210,19 @@ def apply(self, output_path: Path) -> Path:
def convert_state(self, source, target):
mapping = {
- "embedding.word_embeddings.weight": "model.embed_tokens.weight",
"decoder.layers.*.self_attention.linear_proj.weight": "model.layers.*.self_attn.o_proj.weight",
"decoder.layers.*.mlp.linear_fc2.weight": "model.layers.*.mlp.down_proj.weight",
"decoder.layers.*.self_attention.linear_qkv.layer_norm_weight": "model.layers.*.input_layernorm.weight",
"decoder.layers.*.mlp.linear_fc1.layer_norm_weight": "model.layers.*.post_attention_layernorm.weight",
"decoder.final_layernorm.weight": "model.norm.weight",
- "output_layer.weight": "lm_head.weight",
}
- return io.apply_transforms(source, target, mapping=mapping, transforms=[_export_qkv, _export_linear_fc1])
+ return io.apply_transforms(
+ source,
+ target,
+ mapping=mapping,
+ transforms=[_export_qkv, _export_linear_fc1, _export_embedding, _export_head],
+ )
@property
def tokenizer(self):
diff --git a/nemo/collections/llm/gpt/model/mixtral.py b/nemo/collections/llm/gpt/model/mixtral.py
index 23b83960a9ec..29361c38fda5 100644
--- a/nemo/collections/llm/gpt/model/mixtral.py
+++ b/nemo/collections/llm/gpt/model/mixtral.py
@@ -21,6 +21,7 @@
from torch import nn
from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel
+from nemo.collections.llm.gpt.model.llama import _export_embedding, _export_head
from nemo.lightning import io, teardown
from nemo.lightning.pytorch.optim import OptimizerModule
@@ -290,7 +291,6 @@ def apply(self, output_path: Path) -> Path:
def convert_state(self, source, target):
mapping = {
- "embedding.word_embeddings.weight": "model.embed_tokens.weight",
"decoder.layers.*.self_attention.linear_proj.weight": "model.layers.*.self_attn.o_proj.weight",
"decoder.layers.*.self_attention.linear_qkv.layer_norm_weight": "model.layers.*.input_layernorm.weight",
"decoder.layers.*.pre_mlp_layernorm.weight": "model.layers.*.post_attention_layernorm.weight",
@@ -299,10 +299,14 @@ def convert_state(self, source, target):
"decoder.layers.*.mlp.router.weight": "model.layers.*.block_sparse_moe.gate.weight",
# lm-head
"decoder.final_layernorm.weight": "model.norm.weight",
- "output_layer.weight": "lm_head.weight",
}
- return io.apply_transforms(source, target, mapping=mapping, transforms=[_export_qkv, _export_moe_w1_w3])
+ return io.apply_transforms(
+ source,
+ target,
+ mapping=mapping,
+ transforms=[_export_qkv, _export_moe_w1_w3, _export_embedding, _export_head],
+ )
@property
def tokenizer(self):
diff --git a/nemo/collections/llm/gpt/model/nemotron.py b/nemo/collections/llm/gpt/model/nemotron.py
index c5ffbf9b52d2..c45c7fcdbb15 100644
--- a/nemo/collections/llm/gpt/model/nemotron.py
+++ b/nemo/collections/llm/gpt/model/nemotron.py
@@ -20,7 +20,8 @@
from torch import nn
from nemo.collections.llm.fn.activation import squared_relu
-from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel
+from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel, torch_dtype_from_mcore_config
+from nemo.collections.llm.gpt.model.llama import _export_embedding, _export_head
from nemo.collections.llm.utils import Config
from nemo.lightning import OptimizerModule, io, teardown
from nemo.lightning.pytorch.utils import dtype_from_hf
@@ -49,6 +50,7 @@ class NemotronConfig(GPTConfig):
persist_layer_norm: bool = True
bias_dropout_add_fusion: bool = False
layernorm_zero_centered_gamma: bool = True
+ cross_entropy_loss_fusion: bool = True
# Nemotron3Config4B as default configs
num_layers: int = 32
@@ -86,27 +88,27 @@ class Nemotron3Config8B(NemotronConfig):
@dataclass
-class Nemotron4Config15B(NemotronConfig):
- num_layers: int = 32
+class Nemotron3Config22B(NemotronConfig):
+ num_layers: int = 40
seq_length: int = 4096
hidden_size: int = 6144
ffn_hidden_size: int = 24576
num_attention_heads: int = 48
- num_query_groups: Optional[int] = 8
+ num_query_groups: Optional[int] = None
kv_channels: Optional[int] = None
- init_method_std: float = 0.0134
+ init_method_std: float = 0.008
@dataclass
-class Nemotron4Config22B(NemotronConfig):
- num_layers: int = 40
+class Nemotron4Config15B(NemotronConfig):
+ num_layers: int = 32
seq_length: int = 4096
hidden_size: int = 6144
ffn_hidden_size: int = 24576
num_attention_heads: int = 48
- num_query_groups: Optional[int] = None
+ num_query_groups: Optional[int] = 8
kv_channels: Optional[int] = None
- init_method_std: float = 0.008
+ init_method_std: float = 0.0134
@dataclass
@@ -140,6 +142,7 @@ def init(self) -> NemotronModel:
def apply(self, output_path: Path) -> Path:
from transformers import NemotronForCausalLM
+ print('Start converting Nemotron model..')
source = NemotronForCausalLM.from_pretrained(str(self), torch_dtype='auto')
target = self.init()
trainer = self.nemo_setup(target)
@@ -211,15 +214,15 @@ def make_vocab_size_divisible_by(vocab_size):
@io.model_exporter(NemotronModel, "hf")
class HFNemotronExporter(io.ModelConnector[NemotronModel, "NemotronForCausalLM"]):
- def init(self) -> "NemotronForCausalLM":
+ def init(self, dtype=torch.bfloat16) -> "NemotronForCausalLM":
from transformers.modeling_utils import no_init_weights
with no_init_weights(True):
- return NemotronForCausalLM.from_config(self.config)
+ return NemotronForCausalLM.from_config(self.config, torch_dtype=dtype)
def apply(self, output_path: Path) -> Path:
- target = self.init()
source, _ = self.nemo_load(str(self))
+ target = self.init(torch_dtype_from_mcore_config(source.config))
target = self.convert_state(source, target)
target = target.cpu()
@@ -230,7 +233,6 @@ def apply(self, output_path: Path) -> Path:
def convert_state(self, source, target):
mapping = {
- "embedding.word_embeddings.weight": "model.embed_tokens.weight",
"decoder.layers.*.self_attention.linear_proj.weight": "model.layers.*.self_attn.o_proj.weight",
"decoder.layers.*.mlp.linear_fc1.weight": "model.layers.*.mlp.up_proj.weight",
"decoder.layers.*.mlp.linear_fc2.weight": "model.layers.*.mlp.down_proj.weight",
@@ -240,10 +242,11 @@ def convert_state(self, source, target):
"decoder.layers.*.mlp.linear_fc1.layer_norm_bias": "model.layers.*.post_attention_layernorm.bias",
"decoder.final_layernorm.weight": "model.norm.weight",
"decoder.final_layernorm.bias": "model.norm.bias",
- "output_layer.weight": "lm_head.weight",
}
- return io.apply_transforms(source, target, mapping=mapping, transforms=[_export_qkv])
+ return io.apply_transforms(
+ source, target, mapping=mapping, transforms=[_export_qkv, _export_embedding, _export_head]
+ )
@property
def tokenizer(self):
@@ -356,8 +359,8 @@ def _export_qkv(ctx: io.TransformCTX, linear_qkv):
"NemotronConfig",
"Nemotron3Config4B",
"Nemotron3Config8B",
+ "Nemotron3Config22B",
"Nemotron4Config15B",
- "Nemotron4Config22B",
"Nemotron4Config340B",
"NemotronModel",
]
diff --git a/nemo/collections/llm/gpt/model/phi3mini.py b/nemo/collections/llm/gpt/model/phi3mini.py
new file mode 100644
index 000000000000..eb0b9c758dd7
--- /dev/null
+++ b/nemo/collections/llm/gpt/model/phi3mini.py
@@ -0,0 +1,258 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Callable, Optional
+
+import torch
+import torch.nn.functional as F
+from torch import nn
+
+from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel
+from nemo.lightning import OptimizerModule, io, teardown
+from nemo.lightning.pytorch.utils import dtype_from_hf
+
+
+@dataclass
+class Phi3Config(GPTConfig):
+ # pylint: disable=C0115,C0116
+ normalization: str = "RMSNorm"
+ activation_func: Callable = F.silu
+ gated_linear_unit: bool = True
+ position_embedding_type: str = "rope"
+ add_bias_linear: bool = False
+ seq_length: int = 4096
+ attention_dropout: float = 0.0
+ hidden_dropout: float = 0.0
+ share_embeddings_and_output_weights: bool = False
+
+
+@dataclass
+class Phi3ConfigMini(Phi3Config):
+ # pylint: disable=C0115,C0116
+ num_layers: int = 32
+ hidden_size: int = 3072
+ ffn_hidden_size: int = 8192
+ num_attention_heads: int = 32
+ num_query_groups: int = 32
+ rotary_base: float = 10000.0
+ vocab_size: int = 32064
+
+
+class Phi3Model(GPTModel):
+ # pylint: disable=C0115,C0116
+ def __init__(
+ self,
+ config: Optional[Phi3Config] = None,
+ optim: Optional[OptimizerModule] = None,
+ tokenizer: Optional["TokenizerSpec"] = None,
+ model_transform: Optional[Callable[[nn.Module], nn.Module]] = None,
+ ):
+ super().__init__(config or Phi3Config(), optim=optim, tokenizer=tokenizer, model_transform=model_transform)
+
+
+@io.model_importer(Phi3Model, "hf")
+class HFPhi3Importer(io.ModelConnector["Phi3ForCausalLM", Phi3Model]):
+ # pylint: disable=C0115,C0116
+ def init(self) -> Phi3Model:
+ return Phi3Model(self.config, tokenizer=self.tokenizer)
+
+ def apply(self, output_path: Path) -> Path:
+ from transformers import Phi3ForCausalLM
+
+ # Check if the source is valid model identifier or path
+ try:
+ source = Phi3ForCausalLM.from_pretrained(str(self), torch_dtype='auto')
+ except Exception as e:
+ raise ValueError(f"Failed to load the model from source '{self}': {e}")
+
+ target = self.init()
+ trainer = self.nemo_setup(target)
+ self.convert_state(source, target)
+ self.nemo_save(output_path, trainer)
+
+ print(f"Converted Phi3 model to Nemo, model saved to {output_path} in {source.dtype}.")
+
+ teardown(trainer, target)
+ del trainer, target
+
+ return output_path
+
+ def convert_state(self, source, target):
+ # pylint: disable=C0115,C0116
+ # Define mapping for mini-4k-instruct
+ mapping = {
+ "model.embed_tokens.weight": "embedding.word_embeddings.weight",
+ "model.layers.*.self_attn.o_proj.weight": "decoder.layers.*.self_attention.linear_proj.weight",
+ "model.layers.*.self_attn.qkv_proj.weight": "decoder.layers.*.self_attention.linear_qkv.weight",
+ "model.layers.*.mlp.gate_up_proj.weight": "decoder.layers.*.mlp.linear_fc1.weight",
+ "model.layers.*.mlp.down_proj.weight": "decoder.layers.*.mlp.linear_fc2.weight",
+ "model.layers.*.input_layernorm.weight": "decoder.layers.*.self_attention.linear_qkv.layer_norm_weight",
+ "model.layers.*.post_attention_layernorm.weight": "decoder.layers.*.mlp.linear_fc1.layer_norm_weight",
+ "model.norm.weight": "decoder.final_layernorm.weight",
+ "lm_head.weight": "output_layer.weight",
+ }
+
+ return io.apply_transforms(source, target, mapping=mapping, transforms=[_import_qkv, _import_linear_fc1])
+
+ @property
+ def tokenizer(self):
+ # pylint: disable=C0115,C0116
+ from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
+
+ return AutoTokenizer(self.save_hf_tokenizer_assets(str(self)))
+
+ @property
+ def config(self) -> Phi3Config:
+ # pylint: disable=C0115,C0116
+ from transformers import Phi3Config as HFPhi3Config
+
+ source = HFPhi3Config.from_pretrained(str(self))
+
+ def make_vocab_size_divisible_by(vocab_size):
+ base = 128
+ while vocab_size % base != 0:
+ base //= 2
+ return base
+
+ output = Phi3Config(
+ num_layers=source.num_hidden_layers,
+ hidden_size=source.hidden_size,
+ ffn_hidden_size=source.intermediate_size,
+ num_attention_heads=source.num_attention_heads,
+ init_method_std=source.initializer_range,
+ layernorm_epsilon=source.rms_norm_eps,
+ rotary_base=source.rope_theta,
+ gated_linear_unit=True,
+ make_vocab_size_divisible_by=make_vocab_size_divisible_by(source.vocab_size),
+ share_embeddings_and_output_weights=False,
+ fp16=(dtype_from_hf(source) == torch.float16),
+ bf16=(dtype_from_hf(source) == torch.bfloat16),
+ params_dtype=dtype_from_hf(source),
+ )
+ print("output:", output)
+ return output
+
+
+@io.model_exporter(Phi3Model, "hf")
+class HFPhi3Exporter(io.ModelConnector[Phi3Model, "Phi3ForCausalLM"]):
+ # pylint: disable=C0115,C0116
+ def init(self) -> "Phi3ForCausalLM":
+ from transformers import AutoModelForCausalLM
+
+ return AutoModelForCausalLM.from_config(self.config)
+
+ def apply(self, output_path: Path) -> Path:
+ target = self.init()
+ source, _ = self.nemo_load(str(self))
+ target = self.convert_state(source, target)
+
+ target.cpu().save_pretrained(output_path)
+ self.tokenizer.save_pretrained(output_path)
+
+ return output_path
+
+ def convert_state(self, source, target):
+ # pylint: disable=C0115,C0116
+ mapping = {
+ "embedding.word_embeddings.weight": "model.embed_tokens.weight",
+ "decoder.layers.*.self_attention.linear_proj.weight": "model.layers.*.self_attn.o_proj.weight",
+ "decoder.layers.*.mlp.linear_fc2.weight": "model.layers.*.mlp.down_proj.weight",
+ "decoder.layers.*.self_attention.linear_qkv.layer_norm_weight": "model.layers.*.input_layernorm.weight",
+ "decoder.layers.*.mlp.linear_fc1.layer_norm_weight": "model.layers.*.post_attention_layernorm.weight",
+ "decoder.final_layernorm.weight": "model.norm.weight",
+ "output_layer.weight": "lm_head.weight",
+ }
+
+ # Convert source weights to target dtype if needed
+ for name, param in source.state_dict().items():
+ if param.dtype != target.state_dict()[name].dtype:
+ param.data = param.data.to(target.state_dict()[name].dtype)
+
+ return io.apply_transforms(source, target, mapping=mapping)
+
+ @property
+ def tokenizer(self):
+ # pylint: disable=C0115,C0116
+ return io.load_context(str(self)).model.tokenizer.tokenizer
+
+ @property
+ def config(self) -> "HFPhi3Config":
+ # pylint: disable=C0115,C0116
+ source: Phi3Config = io.load_context(str(self)).model.config
+
+ from transformers import Phi3Config as HFPhi3Config
+
+ return HFPhi3Config(
+ num_hidden_layers=source.num_layers,
+ hidden_size=source.hidden_size,
+ intermediate_size=source.ffn_hidden_size,
+ num_attention_heads=source.num_attention_heads,
+ max_position_embeddings=source.seq_length,
+ initializer_range=0.02,
+ rms_norm_eps=1e-05,
+ num_key_value_heads=source.num_query_groups,
+ rope_theta=source.rotary_base,
+ vocab_size=self.tokenizer.vocab_size,
+ )
+
+
+@io.state_transform(
+ source_key="model.layers.*.self_attn.qkv_proj.weight",
+ target_key="decoder.layers.*.self_attention.linear_qkv.weight",
+)
+def _import_qkv(ctx: io.TransformCTX, qkv_weight):
+ megatron_config = ctx.target.config
+
+ head_num = megatron_config.num_attention_heads
+ num_query_groups = megatron_config.num_query_groups
+ heads_per_group = head_num // num_query_groups
+ hidden_size = megatron_config.hidden_size
+ head_size = megatron_config.kv_channels
+
+ old_tensor_shape = qkv_weight.size()
+ new_q_tensor_shape = (head_num, head_size, old_tensor_shape[1])
+ new_kv_tensor_shape = (num_query_groups, head_size, old_tensor_shape[1])
+ q, k, v = qkv_weight.split(
+ [head_num * head_size, num_query_groups * head_size, num_query_groups * head_size], dim=0
+ )
+ q = q.view(*new_q_tensor_shape)
+ k = k.view(*new_kv_tensor_shape)
+ v = v.view(*new_kv_tensor_shape)
+
+ qkv_weights = torch.empty((0, head_size, old_tensor_shape[1])).type_as(qkv_weight)
+ for i in range(num_query_groups):
+ qkv_weights = torch.cat((qkv_weights, q[i * heads_per_group : (i + 1) * heads_per_group, :, :]))
+ qkv_weights = torch.cat((qkv_weights, k[i : i + 1, :, :]))
+ qkv_weights = torch.cat((qkv_weights, v[i : i + 1, :, :]))
+ assert qkv_weights.ndim == 3, qkv_weights.shape
+ assert qkv_weights.shape[0] == (heads_per_group + 2) * num_query_groups, qkv_weights.shape
+ assert qkv_weights.shape[1] == head_size, qkv_weights.shape
+ assert qkv_weights.shape[2] == old_tensor_shape[1], qkv_weights.shape
+
+ qkv_weights = qkv_weights.reshape([head_size * (head_num + 2 * num_query_groups), hidden_size])
+
+ return qkv_weights
+
+
+@io.state_transform(
+ source_key=("model.layers.*.mlp.gate_proj.weight", "model.layers.*.mlp.up_proj.weight"), # phi-3-mini-4k-instruct
+ target_key="decoder.layers.*.mlp.linear_fc1.weight",
+)
+def _import_linear_fc1(down, gate):
+ return torch.cat((down, gate), axis=0)
+
+
+__all__ = ["Phi3Config", "Phi3ConfigMini", "Phi3Model"]
diff --git a/nemo/collections/llm/gpt/model/qwen2.py b/nemo/collections/llm/gpt/model/qwen2.py
index 75f436aa9536..4cf0292d1a6a 100644
--- a/nemo/collections/llm/gpt/model/qwen2.py
+++ b/nemo/collections/llm/gpt/model/qwen2.py
@@ -20,7 +20,8 @@
import torch.nn.functional as F
from torch import nn
-from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel
+from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel, torch_dtype_from_mcore_config
+from nemo.collections.llm.gpt.model.llama import _export_embedding, _export_head
from nemo.collections.llm.utils import Config
from nemo.lightning import OptimizerModule, io, teardown
from nemo.lightning.pytorch.utils import dtype_from_hf
@@ -172,16 +173,16 @@ def config(self) -> Qwen2Config:
@io.model_exporter(Qwen2Model, "hf")
class HFQwen2Exporter(io.ModelConnector[Qwen2Model, "AutoModelForCausalLM"]):
- def init(self) -> "AutoModelForCausalLM":
+ def init(self, dtype=torch.bfloat16) -> "AutoModelForCausalLM":
from transformers import AutoModelForCausalLM
from transformers.modeling_utils import no_init_weights
with no_init_weights(True):
- return AutoModelForCausalLM.from_config(self.config, trust_remote_code=True)
+ return AutoModelForCausalLM.from_config(self.config, trust_remote_code=True, torch_dtype=dtype)
def apply(self, output_path: Path) -> Path:
- target = self.init()
source, _ = self.nemo_load(str(self))
+ target = self.init(torch_dtype_from_mcore_config(source.config))
target = self.convert_state(source, target)
target = target.cpu()
@@ -192,17 +193,18 @@ def apply(self, output_path: Path) -> Path:
def convert_state(self, source, target):
mapping = {
- "embedding.word_embeddings.weight": "model.embed_tokens.weight",
"decoder.layers.*.self_attention.linear_proj.weight": "model.layers.*.self_attn.o_proj.weight",
"decoder.layers.*.mlp.linear_fc2.weight": "model.layers.*.mlp.down_proj.weight",
"decoder.layers.*.self_attention.linear_qkv.layer_norm_weight": "model.layers.*.input_layernorm.weight",
"decoder.layers.*.mlp.linear_fc1.layer_norm_weight": "model.layers.*.post_attention_layernorm.weight",
"decoder.final_layernorm.weight": "model.norm.weight",
- "output_layer.weight": "lm_head.weight",
}
return io.apply_transforms(
- source, target, mapping=mapping, transforms=[_export_qkv, _export_qkv_bias, _export_linear_fc1]
+ source,
+ target,
+ mapping=mapping,
+ transforms=[_export_qkv, _export_qkv_bias, _export_linear_fc1, _export_embedding, _export_head],
)
@property
diff --git a/nemo/collections/llm/gpt/model/ssm.py b/nemo/collections/llm/gpt/model/ssm.py
index d38a690cb4ad..f4190114042e 100644
--- a/nemo/collections/llm/gpt/model/ssm.py
+++ b/nemo/collections/llm/gpt/model/ssm.py
@@ -86,7 +86,7 @@ class SSMConfig(TransformerConfig, io.IOMixin):
data_step_fn: Callable = gpt_data_step
tokenizer_model_path: str = None
- def configure_model(self, tokenizer) -> "MCoreMambaModel":
+ def configure_model(self, tokenizer, pre_process=None, post_process=None) -> "MCoreMambaModel":
return MCoreMambaModel(
self,
@@ -101,8 +101,8 @@ def configure_model(self, tokenizer) -> "MCoreMambaModel":
rotary_percent=self.rotary_percent,
rotary_base=self.rotary_base,
seq_len_interpolation_factor=self.seq_len_interpolation_factor,
- pre_process=parallel_state.is_pipeline_first_stage(),
- post_process=parallel_state.is_pipeline_last_stage(),
+ pre_process=pre_process or parallel_state.is_pipeline_first_stage(),
+ post_process=post_process or parallel_state.is_pipeline_last_stage(),
)
@@ -290,6 +290,7 @@ class BaseMambaConfig2_7B(SSMConfig):
@dataclass
class NVIDIAMambaConfig8B(SSMConfig):
hybrid_override_pattern: str = "M" * 56
+ num_attention_heads: int = 32
num_layers: int = 56
seq_length: int = 4096
hidden_size: int = 4096
diff --git a/nemo/collections/llm/gpt/model/starcoder.py b/nemo/collections/llm/gpt/model/starcoder.py
index 34bff1aa613d..b3e7b25f705b 100644
--- a/nemo/collections/llm/gpt/model/starcoder.py
+++ b/nemo/collections/llm/gpt/model/starcoder.py
@@ -16,10 +16,11 @@
from pathlib import Path
from typing import TYPE_CHECKING, Annotated, Callable, Optional
+import torch
import torch.nn.functional as F
from torch import nn
-from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel
+from nemo.collections.llm.gpt.model.base import GPTConfig, GPTModel, torch_dtype_from_mcore_config
from nemo.collections.llm.utils import Config
from nemo.lightning import OptimizerModule, io, teardown
from nemo.lightning.pytorch.utils import dtype_from_hf
@@ -157,16 +158,16 @@ def make_vocab_size_divisible_by(vocab_size):
@io.model_exporter(StarcoderModel, "hf")
class HFStarcoderExporter(io.ModelConnector[StarcoderModel, "GPTBigCodeForCausalLM"]):
- def init(self) -> "GPTBigCodeForCausalLM":
+ def init(self, dtype=torch.bfloat16) -> "GPTBigCodeForCausalLM":
from transformers import GPTBigCodeForCausalLM
from transformers.modeling_utils import no_init_weights
with no_init_weights(True):
- return GPTBigCodeForCausalLM._from_config(self.config)
+ return GPTBigCodeForCausalLM._from_config(self.config, torch_dtype=dtype)
def apply(self, output_path: Path) -> Path:
- target = self.init()
source, _ = self.nemo_load(str(self))
+ target = self.init(torch_dtype_from_mcore_config(source.config))
target = self.convert_state(source, target)
target = target.cpu()
@@ -177,7 +178,6 @@ def apply(self, output_path: Path) -> Path:
def convert_state(self, source, target):
mapping = {
- "embedding.word_embeddings.weight": "transformer.wte.weight",
"embedding.position_embeddings.weight": "transformer.wpe.weight",
"decoder.layers.*.self_attention.linear_proj.weight": "transformer.h.*.attn.c_proj.weight",
"decoder.layers.*.self_attention.linear_proj.bias": "transformer.h.*.attn.c_proj.bias",
@@ -193,10 +193,9 @@ def convert_state(self, source, target):
"decoder.layers.*.mlp.linear_fc1.layer_norm_bias": "transformer.h.*.ln_2.bias",
"decoder.final_layernorm.weight": "transformer.ln_f.weight",
"decoder.final_layernorm.bias": "transformer.ln_f.bias",
- "output_layer.weight": "lm_head.weight",
}
- return io.apply_transforms(source, target, mapping=mapping)
+ return io.apply_transforms(source, target, mapping=mapping, transforms=[_export_embedding, _export_head])
@property
def tokenizer(self):
@@ -225,3 +224,23 @@ def config(self) -> "HFStarcoderConfig":
num_key_value_heads=source.num_query_groups,
vocab_size=self.tokenizer.vocab_size,
)
+
+
+@io.state_transform(
+ source_key="embedding.word_embeddings.weight",
+ target_key="transformer.wte.weight",
+)
+def _export_embedding(ctx: io.TransformCTX, embedding):
+ megatron_config = ctx.target.config
+ # prune padding.
+ return embedding[: megatron_config.vocab_size, :]
+
+
+@io.state_transform(
+ source_key="output_layer.weight",
+ target_key="lm_head.weight",
+)
+def _export_head(ctx: io.TransformCTX, embedding):
+ megatron_config = ctx.target.config
+ # prune padding.
+ return embedding[: megatron_config.vocab_size, :]
diff --git a/nemo/collections/llm/gpt/model/starcoder2.py b/nemo/collections/llm/gpt/model/starcoder2.py
index 5df3fd224613..544721fa4e86 100644
--- a/nemo/collections/llm/gpt/model/starcoder2.py
+++ b/nemo/collections/llm/gpt/model/starcoder2.py
@@ -202,7 +202,6 @@ def apply(self, output_path: Path) -> Path:
def convert_state(self, source, target):
mapping = {
- "embedding.word_embeddings.weight": "model.embed_tokens.weight",
"decoder.layers.*.self_attention.linear_proj.weight": "model.layers.*.self_attn.o_proj.weight",
"decoder.layers.*.self_attention.linear_proj.bias": "model.layers.*.self_attn.o_proj.bias",
"decoder.layers.*.mlp.linear_fc1.weight": "model.layers.*.mlp.c_fc.weight",
@@ -215,7 +214,6 @@ def convert_state(self, source, target):
"decoder.layers.*.mlp.linear_fc1.layer_norm_bias": "model.layers.*.post_attention_layernorm.bias",
"decoder.final_layernorm.weight": "model.norm.weight",
"decoder.final_layernorm.bias": "model.norm.bias",
- "output_layer.weight": "lm_head.weight",
}
return io.apply_transforms(source, target, mapping=mapping, transforms=[_export_qkv_weight, _export_qkv_bias])
diff --git a/nemo/collections/llm/inference/base.py b/nemo/collections/llm/inference/base.py
index f3d202451c60..795d6efadd3a 100644
--- a/nemo/collections/llm/inference/base.py
+++ b/nemo/collections/llm/inference/base.py
@@ -16,9 +16,10 @@
from pathlib import Path
from typing import Optional, Union
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
import torch.distributed
+from lightning.pytorch.trainer.states import TrainerFn
from megatron.core.inference.common_inference_params import CommonInferenceParams
from megatron.core.inference.engines.mcore_engine import MCoreEngine
from megatron.core.inference.model_inference_wrappers.abstract_model_inference_wrapper import (
@@ -31,51 +32,109 @@
SimpleTextGenerationController,
)
from megatron.core.transformer.module import MegatronModule
-from pytorch_lightning.trainer.states import TrainerFn
import nemo.lightning as nl
-from nemo.collections.llm.peft import LoRA
from nemo.lightning import io
-from nemo.lightning.ckpt_utils import ADAPTER_META_FILENAME, ckpt_to_context_subdir, ckpt_to_weights_subdir
+from nemo.lightning.ckpt_utils import ADAPTER_META_FILENAME, ckpt_to_context_subdir
+from nemo.lightning.io.pl import ckpt_to_weights_subdir
+from nemo.lightning.pytorch.callbacks import PEFT
from nemo.lightning.pytorch.strategies.megatron_strategy import MegatronStrategy
from nemo.lightning.pytorch.strategies.utils import RestoreConfig
-# We need this wrapper since mcore generate uses methods/properties such as tokenizer.detokenize, tokenizer.tokenize, tokenizer.bos, tokenizer.pad, etc. to encode and decode prompts
class MCoreTokenizerWrappper:
+ """
+ We need this wrapper since mcore generate uses methods/properties such as
+ tokenizer.detokenize, tokenizer.tokenize, tokenizer.bos, tokenizer.pad, etc. to encode and decode prompts
+ """
+
def __init__(self, tokenizer):
self.tokenizer = tokenizer
self.eod = tokenizer.eod
self.vocab_size = tokenizer.vocab_size
def detokenize(self, tokens, remove_special_tokens=False):
+ """
+ Detokenizes a list of tokens into a string.
+
+ Args:
+ tokens (list): The list of tokens to detokenize.
+ remove_special_tokens (bool, optional): Whether to remove special tokens. Defaults to False.
+
+ Returns:
+ str: The detokenized string.
+ """
return self.tokenizer.ids_to_text(tokens, remove_special_tokens)
def tokenize(self, prompt):
+ """
+ Tokenizes a prompt into a list of tokens.
+
+ Args:
+ prompt (str): The prompt to tokenize.
+
+ Returns:
+ list: The list of tokens.
+ """
return self.tokenizer.text_to_ids(prompt)
@property
def additional_special_tokens_ids(self):
+ """
+ Gets the IDs of additional special tokens.
+
+ Returns:
+ list: The IDs of additional special tokens.
+ """
return self.tokenizer.additional_special_tokens_ids
@property
def bos(self):
+ """
+ Gets the ID of the beginning of sequence token.
+
+ Returns:
+ int: The ID of the beginning of sequence token.
+ """
return self.tokenizer.bos_id
@property
def pad(self):
+ """
+ Gets the ID of the padding token.
+
+ Returns:
+ int: The ID of the padding token.
+ """
return self.tokenizer.pad_id
# TODO: Move to lightning Fabric API.
def _setup_trainer_and_restore_model(path: Path, trainer: nl.Trainer, model: pl.LightningModule):
+ """
+ Sets up the trainer and restores the model from the given checkpoint path.
+
+ It does the following:
+ - Defines a RestoreConfig to restore only model weights
+ - Disables setting up optimizers in the Trainer
+ - Calls strategy.setup_environment(), model.configure_model() and strategy.setup_megatron_parallel(trainer=trainer)
+ - Finally loads the model weights
+
+ Args:
+ path (Path): The path to the checkpoint file.
+ trainer (nl.Trainer): The trainer object.
+ model (pl.LightningModule): The model object.
+
+ Returns:
+ None
+ """
assert isinstance(trainer.strategy, MegatronStrategy), "Only MegatronStrategy is supported for trainer.strategy."
assert trainer.strategy.context_parallel_size <= 1, "Context parallelism is not supported for inference."
- if (adapter_meta_path := ckpt_to_weights_subdir(path) / ADAPTER_META_FILENAME).exists():
+ if (adapter_meta_path := ckpt_to_weights_subdir(path, is_saving=False) / ADAPTER_META_FILENAME).exists():
with open(adapter_meta_path, "r") as f:
metadata = json.load(f)
restore_config = RestoreConfig(
- path=metadata['model_ckpt_path'],
+ path=metadata["model_ckpt_path"],
load_model_state=True,
load_optim_state=False,
)
@@ -102,12 +161,12 @@ def _setup_trainer_and_restore_model(path: Path, trainer: nl.Trainer, model: pl.
trainer.strategy.trainer = trainer
trainer.strategy.selective_restore()
- lora: Union[io.TrainerContext, LoRA] = io.load_context(ckpt_to_context_subdir(path), "model.model_transform")
- if isinstance(lora, LoRA):
- model = lora(model)
+ peft: Union[io.TrainerContext, PEFT] = io.load_context(ckpt_to_context_subdir(path), "model.model_transform")
+ if isinstance(peft, PEFT):
+ model = peft(model)
adapter_sharded_state_dict = {k: v for k, v in model.sharded_state_dict().items() if ".adapter." in k}
adapter_state = trainer.strategy.checkpoint_io.load_checkpoint(
- ckpt_to_weights_subdir(path), sharded_state_dict=adapter_sharded_state_dict
+ ckpt_to_weights_subdir(path, is_saving=False), sharded_state_dict=adapter_sharded_state_dict
)
trainer.strategy.load_model_state_dict(adapter_state, strict=False)
@@ -118,6 +177,24 @@ def setup_model_and_tokenizer(
params_dtype: torch.dtype = torch.bfloat16,
inference_batch_times_seqlen_threshold: int = 1000,
) -> tuple[MegatronModule, MCoreTokenizerWrappper]:
+ """
+ Sets up the model and tokenizer for inference.
+
+ This function loads the model and tokenizer from the given checkpoint path,
+ sets up the trainer, and returns the Megatron inference-wrapped model and tokenizer.
+
+ Args:
+ path (Path): The path to the checkpoint file.
+ trainer (nl.Trainer): The trainer object.
+ params_dtype (torch.dtype, optional): The data type of the model parameters.
+ Defaults to torch.bfloat16.
+ inference_batch_times_seqlen_threshold (int, optional): If batch-size times sequence-length is smaller
+ than this threshold then we will not use pipelining, otherwise we will.
+
+ Returns:
+ tuple[MegatronModule, MCoreTokenizerWrappper]:
+ A tuple containing the inference-wrapped model and Mcore wrapped tokenizer.
+ """
model: io.TrainerContext = io.load_context(path=ckpt_to_context_subdir(path), subpath="model")
_setup_trainer_and_restore_model(path=path, trainer=trainer, model=model)
@@ -135,6 +212,26 @@ def generate(
random_seed: Optional[int] = None,
inference_params: Optional[CommonInferenceParams] = None,
) -> dict:
+ """
+ Runs generate on the model with the given prompts.
+
+ This function uses the loaded model, loaded tokenizer, and prompts to generate text.
+ It returns a dictionary containing the generated text.
+
+ Args:
+ model (AbstractModelInferenceWrapper): The inference-wrapped model.
+ tokenizer (MCoreTokenizerWrappper): The tokenizer.
+ prompts (list[str]): The list of prompts to generate text for.
+ encoder_prompts (Optional[list[str]], optional): The list of encoder prompts. Defaults to None.
+ add_BOS (bool, optional): Whether to add the beginning of sequence token. Defaults to False.
+ max_batch_size (int, optional): The maximum batch size. Defaults to 4.
+ random_seed (Optional[int], optional): The random seed. Defaults to None.
+ inference_params (Optional[CommonInferenceParams], optional): The inference parameters defined in
+ Mcore's CommonInferenceParams. Defaults to None.
+
+ Returns:
+ dict: A dictionary containing the generated results.
+ """
if encoder_prompts is not None:
text_generation_controller = EncoderDecoderTextGenerationController(
inference_wrapped_model=model, tokenizer=tokenizer
@@ -145,7 +242,7 @@ def generate(
text_generation_controller=text_generation_controller, max_batch_size=max_batch_size, random_seed=random_seed
)
- common_inference_params = inference_params or CommonInferenceParams(num_tokens_to_generate=512)
+ common_inference_params = inference_params or CommonInferenceParams(num_tokens_to_generate=512, top_k=1)
results = mcore_engine.generate(
prompts=prompts,
diff --git a/nemo/collections/llm/peft/__init__.py b/nemo/collections/llm/peft/__init__.py
index 3dae5622b733..1dcc070a5a97 100644
--- a/nemo/collections/llm/peft/__init__.py
+++ b/nemo/collections/llm/peft/__init__.py
@@ -12,7 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from nemo.collections.llm.peft.api import gpt_lora
+from nemo.collections.llm.peft.api import gpt_lora, merge_lora
+from nemo.collections.llm.peft.dora import DoRA
from nemo.collections.llm.peft.lora import LoRA
-__all__ = ["LoRA", "gpt_lora"]
+PEFT_STR2CLS = {
+ "LoRA": LoRA,
+ "lora": LoRA,
+ "DoRA": DoRA,
+ "dora": DoRA,
+}
+
+__all__ = ["LoRA", "DoRA", "gpt_lora", "PEFT_STR2CLS", "merge_lora"]
diff --git a/nemo/collections/llm/peft/api.py b/nemo/collections/llm/peft/api.py
index 85c0ae6cae41..a089a6d17515 100644
--- a/nemo/collections/llm/peft/api.py
+++ b/nemo/collections/llm/peft/api.py
@@ -12,9 +12,24 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from nemo.collections.llm.peft.lora import LoRA
+import json
+from pathlib import Path
+from typing import Tuple, Union
+
+import pytorch_lightning as pl
+from megatron.core import dist_checkpointing
+from pytorch_lightning.trainer.states import TrainerFn
+
+from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
+from nemo.collections.llm.peft.lora import LoRA, LoRAMerge
from nemo.collections.llm.utils import factory
+from nemo.lightning import MegatronStrategy, Trainer, _strategy_lib, io
+from nemo.lightning.ckpt_utils import ADAPTER_META_FILENAME, ckpt_to_context_subdir
+from nemo.lightning.io.pl import TrainerContext, ckpt_to_weights_subdir
+from nemo.lightning.pytorch.callbacks import PEFT
from nemo.lightning.pytorch.callbacks.peft import PEFT
+from nemo.lightning.pytorch.strategies.utils import RestoreConfig
+from nemo.utils import logging
@factory
@@ -22,4 +37,108 @@ def gpt_lora() -> PEFT:
return LoRA()
-__all__ = ["gpt_lora"]
+def merge_lora(
+ lora_checkpoint_path: str,
+ output_path: str,
+) -> None:
+ """
+ Merges the LoRA adapter weights into the base model's weights.
+
+ Python Usage:
+ ```python
+ if __name__ == '__main__':
+ llm.peft.merge_lora(
+ lora_checkpoint_path=your_lora_checkpoint_path,
+ output_path=your_output_path,
+ )
+ ```
+
+ Args:
+ lora_checkpoint_path: The path to the LoRA checkpoint.
+ output_path: The path to save the merged checkpoint.
+
+ """
+ from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
+
+ trainer = Trainer(
+ devices=1,
+ accelerator="cpu",
+ strategy=MegatronStrategy(ddp="pytorch", setup_optimizers=False, plugins=bf16_mixed()),
+ )
+
+ model, lora = _load_base_model_and_lora(lora_checkpoint_path)
+ _setup_trainer_and_restore_model_and_adapter(Path(lora_checkpoint_path), trainer, model, lora)
+
+ lora_merge = LoRAMerge()
+ merged_model = lora_merge(trainer.strategy.megatron_parallel)
+ merged_weights = {k: v for k, v in merged_model.sharded_state_dict().items() if ".adapter." not in k}
+ _save_merged_weight(output_path, merged_weights, model, trainer)
+
+
+def _load_base_model_and_lora(lora_checkpoint_path: Path) -> Tuple[pl.LightningModule, LoRA]:
+ model = io.load_context(ckpt_to_context_subdir(lora_checkpoint_path), "model")
+ model.model_transform, model.__io__.model_transform = None, None
+ model.config.bf16 = False
+ lora: Union[io.TrainerContext, LoRA] = io.load_context(
+ ckpt_to_context_subdir(lora_checkpoint_path), "model.model_transform"
+ )
+ assert isinstance(lora, LoRA), "LoRA config not found in checkpoint"
+ return model, lora
+
+
+def _setup_trainer_and_restore_model_and_adapter(
+ lora_checkpoint_path: Path, trainer: Trainer, model: pl.LightningModule, lora: LoRA
+) -> None:
+ if (
+ adapter_meta_path := ckpt_to_weights_subdir(lora_checkpoint_path, is_saving=False) / ADAPTER_META_FILENAME
+ ).exists():
+ with open(adapter_meta_path, "r") as f:
+ metadata = json.load(f)
+ restore_config = RestoreConfig(
+ path=metadata["model_ckpt_path"],
+ load_model_state=True,
+ load_optim_state=False,
+ )
+ else:
+ raise ValueError(f"Cannot find adapter meta file in {lora_checkpoint_path}")
+
+ trainer.strategy.restore_config = restore_config
+ trainer.strategy._setup_optimizers = False
+ trainer.ckpt_path = None
+ trainer.strategy.connect(model)
+ trainer.strategy.setup_environment()
+
+ if not model.state_dict():
+ with _strategy_lib.megatron_cpu_init_context(model.config):
+ model.configure_model()
+
+ trainer.strategy.setup(trainer) # load base model ckpt
+ trainer.state.fn = TrainerFn.TESTING
+ trainer.strategy.setup_megatron_parallel(trainer=trainer)
+ trainer.strategy.trainer = trainer
+ model.trainer = trainer
+
+ lora(model)
+ adapter_sharded_state_dict = {
+ k: v for k, v in trainer.strategy.megatron_parallel.sharded_state_dict().items() if ".adapter." in k
+ }
+ adapter_state = trainer.strategy.checkpoint_io.load_checkpoint(
+ ckpt_to_weights_subdir(lora_checkpoint_path, is_saving=False), sharded_state_dict=adapter_sharded_state_dict
+ )
+ trainer.strategy.load_model_state_dict(adapter_state, strict=False)
+
+
+def _save_merged_weight(output_path: str, merged_weights: dict, model: pl.LightningModule, trainer: Trainer):
+ weight_path = ckpt_to_weights_subdir(output_path, is_saving=True)
+ Path(weight_path).mkdir(parents=True, exist_ok=True)
+ dist_checkpointing.save(merged_weights, str(ckpt_to_weights_subdir(output_path, is_saving=True)))
+ if hasattr(model.tokenizer, "save_pretrained"):
+ model.tokenizer.save_pretrained("/tmp/nemo_tokenizer")
+ model.tokenizer = AutoTokenizer("/tmp/nemo_tokenizer")
+ if hasattr(trainer.model, "__io__") and hasattr(trainer.model.tokenizer, '__io__'):
+ trainer.model.__io__.tokenizer = trainer.model.tokenizer.__io__
+ TrainerContext.from_trainer(trainer).io_dump(ckpt_to_context_subdir(output_path), yaml_attrs=["model"])
+ logging.info(f"Merged checkpoint saved to {output_path}")
+
+
+__all__ = ["gpt_lora", "merge_lora"]
diff --git a/nemo/collections/llm/peft/dora.py b/nemo/collections/llm/peft/dora.py
new file mode 100644
index 000000000000..d77d2a4dc0d4
--- /dev/null
+++ b/nemo/collections/llm/peft/dora.py
@@ -0,0 +1,261 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import re
+from dataclasses import dataclass, field
+from typing import List, Literal, Optional
+
+import torch
+from megatron.core import parallel_state
+from megatron.core.dist_checkpointing.mapping import ShardedStateDict
+from megatron.core.tensor_parallel import (
+ ColumnParallelLinear,
+ RowParallelLinear,
+ gather_from_tensor_model_parallel_region,
+)
+from megatron.core.utils import make_sharded_tensor_for_checkpoint, make_tp_sharded_tensor_for_checkpoint
+from torch import nn
+
+from nemo.collections.llm.peft.lora import LinearAdapter
+from nemo.collections.nlp.modules.common.megatron.adapters.parallel_adapters import ParallelLinearAdapter
+from nemo.lightning.pytorch.callbacks.peft import PEFT, AdapterWrapper
+from nemo.utils import logging
+from nemo.utils.import_utils import safe_import_from
+
+TEColumnParallelLinear, HAVE_TE_COL_LINEAR = safe_import_from(
+ "megatron.core.extensions.transformer_engine", "TEColumnParallelLinear"
+)
+TELayerNormColumnParallelLinear, HAVE_TE_LN_COL_LINEAR = safe_import_from(
+ "megatron.core.extensions.transformer_engine",
+ "TELayerNormColumnParallelLinear",
+)
+TERowParallelLinear, HAVE_TE_ROW_LINEAR = safe_import_from(
+ "megatron.core.extensions.transformer_engine", "TERowParallelLinear"
+)
+HAVE_TE = all((HAVE_TE_COL_LINEAR, HAVE_TE_LN_COL_LINEAR, HAVE_TE_ROW_LINEAR))
+
+
+class ParallelLinearDoRAAdapter(ParallelLinearAdapter):
+ """
+ Adapter class for DoRA to handle the additional weight_magnitude parameter
+ """
+
+ def init_weight_magnitude(self, value):
+ """
+ Initialize weight_magnitude with shape (d,), where d is the output dim of the linear layer
+ """
+ self.weight_magnitude = nn.Parameter(value, requires_grad=True)
+
+ def get_weight_magnitude(self):
+ """
+ Public function to get the weight magnitude parameter
+ """
+ return self.weight_magnitude
+
+ def sharded_state_dict(
+ self, prefix: str = '', sharded_offsets: tuple = (), metadata: Optional[dict] = None
+ ) -> ShardedStateDict:
+ """
+ Sharded state dict implementation for DoRA adapter.
+ Weight magnitude is TP sharded for linear_qkv and linear_fc1 only.
+ """
+ sharded_state_dict = super().sharded_state_dict(prefix, sharded_offsets, metadata)
+
+ magnitude_key = f"{prefix}weight_magnitude"
+ if self.input_is_parallel:
+ # RPL output is gathered, so weight_magnitude is not sharded for TP
+ magnitude_sharded_tensor = make_sharded_tensor_for_checkpoint(
+ self.weight_magnitude, magnitude_key, prepend_offsets=sharded_offsets
+ )
+ else:
+ # CPL output is sharded, so weight_magnitude is sharded for TP
+ magnitude_sharded_tensor = make_tp_sharded_tensor_for_checkpoint(
+ self.weight_magnitude, magnitude_key, 0, prepend_offsets=sharded_offsets
+ )
+ sharded_state_dict[magnitude_key] = magnitude_sharded_tensor
+
+ return sharded_state_dict
+
+
+class DoRALinear(AdapterWrapper):
+ """
+ An adapter wrapper that is designed to be used with DoRA
+ It extends the AdapterWrapper class to provide a specific implementation of the forward method.
+ """
+
+ def __init__(self, to_wrap: nn.Module, adapter: ParallelLinearDoRAAdapter):
+ super().__init__(to_wrap, adapter)
+ self.adapter: ParallelLinearDoRAAdapter
+ self.scaling = adapter.alpha / adapter.dim
+ self.adapter.init_weight_magnitude(self._get_weight_norm())
+
+ def _get_weight_norm(self):
+ if self.adapter.input_is_parallel:
+ linear_out_weight = gather_from_tensor_model_parallel_region(self.adapter.linear_out.weight.T).T
+ linear_in_weight = self.adapter.linear_in.weight
+ else:
+ linear_out_weight = self.adapter.linear_out.weight
+ linear_in_weight = gather_from_tensor_model_parallel_region(self.adapter.linear_in.weight.T).T
+
+ weight = self.to_wrap.weight + self.scaling * linear_out_weight @ linear_in_weight
+ return torch.linalg.norm(weight, dim=1).to(weight.dtype).detach()
+
+ def forward(self, x):
+ """
+ Forward method for DoRA
+
+ mag_norm_scale * (linear_output + adapter_output)
+ = ||W_0 + B_0 A_0|| / ||W_0 + B A|| * (W_0 x + B A x)
+ = ||W_0 + B_0 A_0|| ((W_0 + B A) / ||W_0 + B A||) x
+ = m ((W_0 + B A) / ||W_0 + B A||) x
+ = equation 5 in DoRA paper
+
+ When dropout is used, equation becomes
+ W_0 x + (m /||W_0 + B A|| - 1) W_0 dropout(x) + m /||W_0 + B A|| B A dropout(x)
+ = ...
+ = m /||W_0 + B A|| (W_0 x + B A dropout(x)) + (m /||W_0 + B A|| - 1) W_0 (dropout(x) - x)
+
+ """
+ linear_output, bias, layernorm_output = self.base_linear_forward(x)
+ adapter_output = self.adapter(layernorm_output.contiguous())
+
+ # mag_norm_scale is ||W_0 + B_0 A_0|| / ||W_0 + B A|| (scaling in front of BA not shown)
+ mag_norm_scale = (self.adapter.get_weight_magnitude() / self._get_weight_norm()).view(1, 1, -1)
+ if self.adapter.dropout is None or not self.training:
+ dropout_correction = 0
+ else:
+ dropout_correction = (mag_norm_scale - 1) * self.base_linear_forward(
+ self.adapter.dropout(layernorm_output) - layernorm_output
+ )[0]
+
+ return mag_norm_scale * (linear_output + adapter_output) + dropout_correction, bias
+
+
+@dataclass
+class DoRA(PEFT):
+ """
+ Implements the DoRA (Weight-Decomposed LowRank Adaptation) module for parameter-efficient fine-tuning.
+
+ DoRA decomposes pre-trained weight into magnitude and direction, and uses a low-rank projection in the
+ directional component to adapt the weights of a pre-trained model to a new downstream task.
+ This class facilitates the application of DoRA to specific modules within the model architecture.
+
+ Args:
+ See LoRA class for a detailed explanation of the arguments.
+
+ Example:
+ --------
+ >>> from nemo.collections import llm
+ >>> lora = llm.peft.DoRA(target_modules=['linear_qkv', 'linear_proj'], dim=32, alpha=64)
+ >>> model = llm.Mistral7BModel(model_transform=lora)
+ >>> # (set up trainer and data)
+ >>> trainer.fit(model, data)
+
+ References:
+ -----------
+ Shih-Yang Liu, Chien-Yi Wang, Hongxu Yin, Pavlo Molchanov, Yu-Chiang Frank Wang, Kwang-Ting Cheng,
+ Min-Hung Chen (2024). DoRA: Weight-Decomposed Low-Rank Adaptation. arXiv preprint arXiv:2402.09353.
+ https://arxiv.org/abs/2402.09353
+ )
+ """
+
+ target_modules: List[str] = field(
+ default_factory=lambda: ['linear_qkv', 'linear_proj', 'linear_fc1', 'linear_fc2']
+ )
+ dim: int = 32
+ alpha: int = 64
+ dropout: float = 0.0
+ dropout_position: Literal['pre', 'post'] = 'pre'
+ lora_A_init_method: str = "xavier"
+ lora_B_init_method: str = "zero"
+
+ def __post_init__(self):
+ assert self.dropout_position == "pre", (
+ "DoRA only supports pre-adapter dropout at this time." "Please set DoRA(..., dropout_position='pre')"
+ )
+
+ def transform(self, m: nn.Module, name=None, prefix=None):
+ """
+ Applies DoRA to a specific module within the model architecture.
+
+ Args:
+ m (nn.Module): The module to apply DoRA to.
+ name (str, optional): Name of the module (if applicable). Defaults to None.
+ prefix (str, optional): Prefix for the module name (if applicable). Defaults to None.
+
+ Returns:
+ nn.Module: The modified module with DoRA applied, or the original module if not a target.
+ """
+
+ def wildcard_match(pattern, key):
+ if key is None:
+ return None
+ regex_pattern = re.compile("^" + pattern.replace("*", "(.*)") + "$")
+ match = regex_pattern.match(key)
+ return match is not None
+
+ full_name = f"{prefix}.{name}" if prefix else name
+ if name in self.target_modules or any(wildcard_match(pattern, full_name) for pattern in self.target_modules):
+ if HAVE_TE and isinstance(m, TEColumnParallelLinear) or isinstance(m, TELayerNormColumnParallelLinear):
+ input_is_parallel = False
+ # m.in_features and m.out_features are divided by tp_size already,
+ # but in_features and out_features passed to ParallelLinearAdapter are not.
+ tp_size = parallel_state.get_tensor_model_parallel_world_size()
+ in_features = m.in_features
+ out_features = m.out_features * tp_size
+ # DoRA is applied after layernorm, so layernorm output must be returned
+ m.return_layernorm_output = True
+ # perf optimization for DoRA + SP (to check!)
+ if m.config.sequence_parallel and not m.ub_overlap_ag:
+ m.return_layernorm_output_gathered = True
+ elif HAVE_TE and isinstance(m, TERowParallelLinear):
+ input_is_parallel = True
+ tp_size = parallel_state.get_tensor_model_parallel_world_size()
+ in_features = m.in_features * tp_size
+ out_features = m.out_features
+ elif isinstance(m, ColumnParallelLinear):
+ input_is_parallel = False
+ in_features = m.input_size
+ out_features = m.output_size
+ elif isinstance(m, RowParallelLinear):
+ input_is_parallel = True
+ in_features = m.input_size
+ out_features = m.output_size
+ elif isinstance(m, nn.Linear):
+ return LinearAdapter(
+ m, dim=self.dim, alpha=self.alpha, dropout=self.dropout, lora_A_init_method=self.lora_A_init_method
+ )
+ else:
+ raise NotImplementedError(f"Layer type is unrecognized for LoRA: {type(m)}")
+
+ logging.info(f"Adding DoRA to: {full_name}")
+ adapter = ParallelLinearDoRAAdapter(
+ in_features,
+ out_features,
+ self.dim,
+ activation='identity',
+ norm_position=None,
+ norm_type=None,
+ column_init_method=self.lora_A_init_method,
+ row_init_method=self.lora_B_init_method,
+ gather_output=False,
+ input_is_parallel=input_is_parallel,
+ dropout=self.dropout,
+ dropout_position=self.dropout_position,
+ model_parallel_config=getattr(m, "config", None),
+ alpha=self.alpha,
+ )
+ return DoRALinear(m, adapter)
+ return m
diff --git a/nemo/collections/llm/peft/lora.py b/nemo/collections/llm/peft/lora.py
index ecebf696a42c..57cdda3a2871 100644
--- a/nemo/collections/llm/peft/lora.py
+++ b/nemo/collections/llm/peft/lora.py
@@ -48,26 +48,8 @@ class to provide a specific implementation of the forward method.
"""
def forward(self, x):
- linear_output = self.to_wrap(x)
- assert isinstance(
- linear_output, tuple
- ), f"{self.to_wrap} should return a tuple but instead returns {linear_output}"
- """ Four cases for the wrapped module's return values
- 1. nothing: (out, None)
- 2. return_bias: (out, bias)
- 2. return_layernorm_output: ((out, ln_out), None)
- 3. both: (out, bias, ln_out)
- """
- if len(linear_output) == 2:
- linear_output, bias = linear_output
- if isinstance(linear_output, tuple) and len(linear_output) == 2:
- linear_output, layernorm_output = linear_output
- x = layernorm_output
- elif len(linear_output) == 3:
- linear_output, bias, layernorm_output = linear_output
- x = layernorm_output
-
- adapter_output = self.adapter(x.contiguous())
+ linear_output, bias, layernorm_output = self.base_linear_forward(x)
+ adapter_output = self.adapter(layernorm_output.contiguous())
return linear_output + adapter_output, bias
@@ -114,6 +96,10 @@ def forward(self, x):
return res + lora_res
+def is_expert_linear(fqn):
+ return re.match('.*mlp\.experts\.local_experts.[0-9]+\.linear_fc[1-2]$', fqn) is not None
+
+
@dataclass
class LoRA(PEFT):
"""
@@ -126,8 +112,8 @@ class LoRA(PEFT):
target_modules (List[str], optional): A list of module names to apply LoRA to.
Defaults to all linear layers ['linear_qkv', 'linear_proj', 'linear_fc1', 'linear_fc2'].
- 'linear_qkv': Apply LoRA to the fused linear layer used for query, key, and value projections
- in self-attention modules.
- - 'linear_proj': Apply LoRA to the linear layer used for projecting the output of self-attention modules.
+ in self-attention.
+ - 'linear_proj': Apply LoRA to the linear layer used for projecting the output of self-attention.
- 'linear_fc1': Apply LoRA to the first fully-connected layer in MLP.
- 'linear_fc2': Apply LoRA to the second fully-connected layer in MLP.
Target modules can also contain wildcards. For example, you can specify
@@ -237,6 +223,47 @@ def wildcard_match(pattern, key):
dropout_position=self.dropout_position,
model_parallel_config=getattr(m, "config", None),
alpha=self.alpha,
+ is_expert=is_expert_linear(full_name),
)
return AdapterParallelAdd(m, adapter)
return m
+
+
+class LoRAMerge(PEFT):
+ """
+ Implements the LoRA weight merge for parameter-efficient fine-tuning.
+
+ Example:
+ --------
+ >>> from nemo.collections.llm.peft.lora import LoRAMerge
+ >>> lora_merge = LoRAMerge()
+ >>> merged_model = lora_merge(trainer.strategy.megatron_parallel)
+ """
+
+ @torch.no_grad()
+ def transform(self, m: nn.Module, name=None, prefix=None):
+ """
+ Merges the LoRA adapter with the base model weights.
+
+ Args:
+ m (nn.Module): The module to apply LoRA merge to.
+ name (str, optional): Name of the module to merge. Defaults to None.
+ prefix (str, optional): Prefix for the module name. Defaults to None.
+
+ Returns:
+ nn.Module: The modified module with the LoRA adapter merged into the base model weights.
+ """
+
+ if not isinstance(m, AdapterParallelAdd):
+ return m
+ logging.info(f'merging {(prefix if prefix else "") + "." + (name if name else "")}')
+ base_weight = m.to_wrap.weight
+ lora_weight = (
+ m.adapter.alpha
+ / m.adapter.dim
+ * m.adapter.linear_out.weight.to(base_weight.device)
+ @ m.adapter.linear_in.weight.to(base_weight.device)
+ )
+ merged_weight = base_weight + lora_weight
+ m.to_wrap.weight.data = merged_weight
+ return m
diff --git a/nemo/collections/llm/quantization/quantizer.py b/nemo/collections/llm/quantization/quantizer.py
index 15367cb25aba..45f72f06741e 100644
--- a/nemo/collections/llm/quantization/quantizer.py
+++ b/nemo/collections/llm/quantization/quantizer.py
@@ -13,7 +13,9 @@
# limitations under the License.
import os
+import shutil
from dataclasses import dataclass
+from pathlib import Path
from typing import Optional, Union
import torch
@@ -22,6 +24,7 @@
from tqdm import tqdm
from nemo.collections import llm
+from nemo.lightning.ckpt_utils import CONTEXT_PATH
from nemo.utils import logging
from .utils import get_unwrapped_mcore_model
@@ -73,17 +76,20 @@ class QuantizationConfig:
@dataclass
class ExportConfig:
- """Inference configuration for the quantized TensorRT-LLM engine"""
+ """Inference configuration for the quantized TensorRT-LLM checkpoint."""
- path: str
+ path: Union[Path, str]
dtype: Union[str, int] = "bf16"
decoder_type: Optional[str] = None
inference_tensor_parallel: int = 1
inference_pipeline_parallel: int = 1
+ def __post_init__(self):
+ self.path = Path(self.path)
+
def get_modelopt_decoder_type(config: llm.GPTConfig) -> str:
- """Infers the modelopt decoder type from GPTConfig class"""
+ """Infers the modelopt decoder type from GPTConfig class."""
mapping = [
(llm.Baichuan2Config, "baichuan"),
(llm.ChatGLMConfig, "chatglm"),
@@ -107,17 +113,17 @@ def get_modelopt_decoder_type(config: llm.GPTConfig) -> str:
class Quantizer:
- """Post-training quantization (PTQ) and TRT-LLM export of NeMo 2.0 checkpoints.
+ """Post-training quantization (PTQ) and TensorRT-LLM export of NeMo 2.0 checkpoints.
PTQ converts selected model layers to low-precision format (e.g., INT4, FP8) for efficient serving.
The process consist of several steps:
1. Loading a Nemo model from disk using appropriate parallelism strategy
2. Calibrating the model to obtain appropriate algorithm-specific scaling factors
- 3. Producing output directory
+ 3. Producing an output directory with a quantized checkpoint and a tokenizer
The output directory produced is intended to be consumed by TensorRT-LLM toolbox
- for efficient inference. This can be achieved using NeMo inference containers.
+ for efficient inference. This can be achieved using nemo.export.tensorrt_llm module.
"""
def __init__(self, quantization_config: QuantizationConfig, export_config: ExportConfig):
@@ -229,6 +235,7 @@ def quantize(self, model: llm.GPTModel, forward_loop=None):
def create_megatron_forward_loop(
self, get_dataloader, num_batches, seq_length=None, micro_batch_size=None, decoder_seq_length=None
):
+ """Create a forward loop for over a given data iterator."""
from megatron.core.pipeline_parallel.schedules import get_forward_backward_func
forward_backward_func = get_forward_backward_func()
@@ -259,14 +266,14 @@ def loop(model):
return loop
- def export(self, model: llm.GPTModel) -> None:
+ def export(self, model: llm.GPTModel, model_dir: str) -> None:
+ """Export model to a TensorRT-LLM checkpoint."""
assert self.export_config is not None, "Export config is not set"
# TODO: Add sample generate
# TODO: Support megatron_amp_O2
export_dir = self.export_config.path
- use_nfs_workspace = (model.trainer._fabric.__io__.num_nodes > 1) or (
- model.config.pipeline_model_parallel_size > 1
- )
+
+ use_nfs_workspace = model.config.pipeline_model_parallel_size > 1
export_tensorrt_llm_checkpoint(
model=get_unwrapped_mcore_model(model),
decoder_type=self._get_decoder_type(model.config),
@@ -276,22 +283,25 @@ def export(self, model: llm.GPTModel) -> None:
inference_pipeline_parallel=self.export_config.inference_pipeline_parallel,
use_nfs_workspace=use_nfs_workspace,
)
+ dist.barrier()
- dist.barrier() # Wait until all ranks complete export_model_config step
- logging.info(f"Export succeeded, model has been exported to {export_dir}. Saving tokenizer if possible...")
-
+ # Save the model context in order to restore its tokenizer later. The destination
+ # path is "nemo_context" as this name is used in nemo.export to setup tokenizer.
if dist.get_rank() == 0:
- try:
- tokenizer_dst = os.path.join(export_dir, 'tokenizer')
- model.tokenizer.tokenizer.save_pretrained(tokenizer_dst)
- except Exception as err:
- logging.warning("Could not save the tokenizer: " + str(err))
+ shutil.copytree(
+ os.path.join(model_dir, CONTEXT_PATH),
+ os.path.join(export_dir, "nemo_context"),
+ dirs_exist_ok=True,
+ )
+ logging.info("Model context saved.")
+
+ logging.info(f"Export succeeded, model has been exported to {export_dir}.")
def get_calib_data_iter(
data: str = "cnn_dailymail", batch_size: int = 64, calib_size: int = 512, max_sequence_length: int = 512
):
- """Creates a sample data iterator for calibration"""
+ """Creates a sample data iterator for calibration."""
if data == "wikitext":
dataset = load_dataset("wikitext", "wikitext-103-v1", split="train")
text_column = "text"
@@ -311,6 +321,8 @@ def get_calib_data_iter(
def create_data_iterator_getter(model, dataset, seq_len, batch_size, calibration_size):
+ """Create a function that provides iterator over a given dataset."""
+
def _iterator():
CHARACTERS_PER_TOKEN = 4
diff --git a/nemo/collections/llm/quantization/utils.py b/nemo/collections/llm/quantization/utils.py
index c4c533fe38d0..bdfccb208d06 100644
--- a/nemo/collections/llm/quantization/utils.py
+++ b/nemo/collections/llm/quantization/utils.py
@@ -18,6 +18,7 @@
from nemo import lightning as nl
from nemo.collections import llm
+from nemo.collections.llm.inference.base import _setup_trainer_and_restore_model
from nemo.lightning.ckpt_utils import ckpt_to_context_subdir
from nemo.utils import logging
@@ -42,25 +43,44 @@ def quantizable_model_config(model_cfg: llm.GPTConfig) -> llm.GPTConfig:
return model_cfg
-def load_with_modelopt_layer_spec(nemo_checkpoint_path: str, calib_tp: int = 1, calib_pp: int = 1) -> llm.GPTModel:
+def load_with_modelopt_layer_spec(
+ nemo_checkpoint_path: str, calib_tp: int = 1, calib_pp: int = 1, inference_only: bool = True
+):
+ # TODO: setting ddp="pytorch" with manually deleting model.optim is a hackish way to disable DDP initialization. Needs a systematic solution.
+ if inference_only:
+ strategy = nl.MegatronStrategy(
+ tensor_model_parallel_size=calib_tp,
+ pipeline_model_parallel_size=calib_pp,
+ pipeline_dtype=torch.bfloat16,
+ ckpt_load_optimizer=False,
+ ckpt_parallel_save_optim=False,
+ setup_optimizers=False,
+ lazy_init=True,
+ ddp="pytorch",
+ )
+ else:
+ strategy = nl.MegatronStrategy(
+ tensor_model_parallel_size=calib_tp, pipeline_model_parallel_size=calib_pp, pipeline_dtype=torch.bfloat16
+ )
+
trainer = nl.Trainer(
devices=calib_tp,
num_nodes=calib_pp,
- strategy=nl.MegatronStrategy(
- tensor_model_parallel_size=calib_tp, pipeline_model_parallel_size=calib_pp, pipeline_dtype=torch.bfloat16
- ),
- plugins=nl.MegatronMixedPrecision(precision='bf16', pipeline_dtype=torch.bfloat16, autocast_enabled=True),
+ strategy=strategy,
+ plugins=nl.MegatronMixedPrecision(precision='bf16', params_dtype=torch.bfloat16, autocast_enabled=True),
)
- fabric = trainer.to_fabric()
- fabric.launch()
-
model_path = Path(nemo_checkpoint_path)
- model = nl.io.load_context(ckpt_to_context_subdir(model_path)).model
+ model = nl.io.load_context(path=ckpt_to_context_subdir(model_path), subpath="model")
model.config = quantizable_model_config(model.config)
- return fabric.load_model(nemo_checkpoint_path, model=model)
+
+ if inference_only:
+ del model.optim
+
+ _setup_trainer_and_restore_model(nemo_checkpoint_path, trainer, model)
+ return model
-def get_unwrapped_mcore_model(model: llm.GPTModel):
+def get_unwrapped_mcore_model(model):
from megatron.core.models.gpt import GPTModel as MCoreGPTModel
unwrapped_model = model
diff --git a/nemo/collections/llm/recipes/__init__.py b/nemo/collections/llm/recipes/__init__.py
index 551de93cce43..1db88f633e89 100644
--- a/nemo/collections/llm/recipes/__init__.py
+++ b/nemo/collections/llm/recipes/__init__.py
@@ -30,7 +30,11 @@
llama3_70b,
llama3_70b_16k,
llama3_70b_64k,
+ llama31_8b,
+ llama31_70b,
llama31_405b,
+ llama32_1b,
+ llama32_3b,
mamba2_1_3b,
mamba2_2_7b,
mamba2_8b,
@@ -47,29 +51,31 @@
nemotron,
nemotron3_4b,
nemotron3_8b,
+ nemotron3_22b,
+ nemotron3_22b_16k,
+ nemotron3_22b_64k,
nemotron4_15b,
nemotron4_15b_16k,
nemotron4_15b_64k,
- nemotron4_22b,
- nemotron4_22b_16k,
- nemotron4_22b_64k,
nemotron4_340b,
+ phi3_mini_4k_instruct,
qwen2,
qwen2_1p5b,
qwen2_7b,
qwen2_72b,
qwen2_500m,
- starcoder,
starcoder2,
starcoder2_3b,
starcoder2_7b,
starcoder2_15b,
+ starcoder_15b,
t5_3b,
t5_11b,
t5_220m,
)
from nemo.collections.llm.recipes.log.default import default_log, default_resume
from nemo.collections.llm.recipes.optim import adam
+from nemo.collections.llm.recipes.run.executor import torchrun
__all__ = [
"baichuan2_7b",
@@ -82,7 +88,11 @@
"llama3_70b",
"llama3_70b_16k",
"llama3_70b_64k",
+ "llama31_8b",
+ "llama31_70b",
"llama31_405b",
+ "llama32_1b",
+ "llama32_3b",
"mamba2_130m",
"mamba2_370m",
"mamba2_780m",
@@ -100,17 +110,18 @@
"nemotron",
"nemotron3_4b",
"nemotron3_8b",
+ "nemotron3_22b",
+ "nemotron3_22b_16k",
+ "nemotron3_22b_64k",
"nemotron4_15b",
"nemotron4_15b_16k",
"nemotron4_15b_64k",
- "nemotron4_22b",
- "nemotron4_22b_16k",
- "nemotron4_22b_64k",
"nemotron4_340b",
+ "phi3_mini_4k_instruct",
"t5_220m",
"t5_3b",
"t5_11b",
- "starcoder",
+ "starcoder_15b",
"starcoder2",
"starcoder2_3b",
"starcoder2_7b",
@@ -128,4 +139,5 @@
"adam",
"default_log",
"default_resume",
+ "torchrun",
]
diff --git a/nemo/collections/llm/recipes/baichuan2_7b.py b/nemo/collections/llm/recipes/baichuan2_7b.py
index f56c16afcf6a..1350cbaa7edd 100644
--- a/nemo/collections/llm/recipes/baichuan2_7b.py
+++ b/nemo/collections/llm/recipes/baichuan2_7b.py
@@ -15,17 +15,17 @@
from typing import Callable, Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm import Baichuan2Config7B, Baichuan2Model
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -254,7 +254,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -278,8 +281,8 @@ def finetune_recipe(
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.trainer.strategy.tensor_model_parallel_size = 2
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/callbacks/__init__.py b/nemo/collections/llm/recipes/callbacks/__init__.py
new file mode 100644
index 000000000000..d9155f923f18
--- /dev/null
+++ b/nemo/collections/llm/recipes/callbacks/__init__.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/nemo/collections/llm/recipes/callbacks/common.py b/nemo/collections/llm/recipes/callbacks/common.py
new file mode 100644
index 000000000000..72a1b3a0c640
--- /dev/null
+++ b/nemo/collections/llm/recipes/callbacks/common.py
@@ -0,0 +1,53 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Optional
+
+from nemo_run import Config, cli
+
+from nemo.utils.import_utils import safe_import
+
+res_module, HAVE_RES = safe_import('nvidia_resiliency_ext.ptl_resiliency')
+
+
+@cli.factory(is_target_default=True)
+def straggler_det_callback(
+ straggler_report_time_interval: Optional[int] = 300, stop_if_detected_straggler: Optional[bool] = True
+) -> Config[res_module.StragglerDetectionCallback]:
+ """
+ This callback is used to detect slower ranks participating in a PyTorch distributed workload.
+ This callback is obtained from nvidia-resiliency-ext.
+ Performance scores are scalar values from 0.0 (worst) to 1.0 (best), reflecting each rank's performance.
+ A performance score can be interpreted as the ratio of current performance to reference performance.
+ Depending on the reference used, there are two types of performance scores:
+ Relative performance score: The best-performing rank in the workload is used as a reference.
+ Individual performance score: The best historical performance of the rank is used as a reference.
+ If the performance score drops below the threshold which is set to 0.7, it is deemed as a straggler.
+ To detect the stragglers, users can enable this callback which reports the performance scores every 5mins.
+ Args:
+ straggler_report_time_interval (int): Performance score reporting frequency in seconds, Default is 300 seconds.
+ stop_if_detected_straggler (bool): Whether to stop training if a straggler is detection. Default is True.
+ """
+
+ return Config(
+ res_module.StragglerDetectionCallback,
+ report_time_interval=straggler_report_time_interval,
+ calc_relative_gpu_perf=True,
+ calc_individual_gpu_perf=True,
+ num_gpu_perf_scores_to_print=5,
+ gpu_relative_perf_threshold=0.7,
+ gpu_individual_perf_threshold=0.7,
+ stop_if_detected=stop_if_detected_straggler,
+ enable_ptl_logging=True,
+ )
diff --git a/nemo/collections/llm/recipes/chatglm3_6b.py b/nemo/collections/llm/recipes/chatglm3_6b.py
index 5ced78916c29..2cd424ce5bf6 100644
--- a/nemo/collections/llm/recipes/chatglm3_6b.py
+++ b/nemo/collections/llm/recipes/chatglm3_6b.py
@@ -15,17 +15,17 @@
from typing import Callable, Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm import ChatGLM3Config6B, ChatGLMModel
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -254,7 +254,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -278,8 +281,8 @@ def finetune_recipe(
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.trainer.strategy.tensor_model_parallel_size = 2
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/finetune_default.py b/nemo/collections/llm/recipes/finetune_default.py
index 5a1ff58e8661..e8af7f67bdbd 100644
--- a/nemo/collections/llm/recipes/finetune_default.py
+++ b/nemo/collections/llm/recipes/finetune_default.py
@@ -14,15 +14,18 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
+import torch
import nemo.lightning as nl
from nemo.collections import llm
from nemo.collections.llm.gpt.data.packed_sequence import PackedSequenceSpecs
+from nemo.collections.llm.peft import DoRA, LoRA
from nemo.collections.llm.recipes.log.default import tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
+from nemo.lightning.pytorch.callbacks import PEFT
def default_finetune_recipe(
@@ -42,7 +45,7 @@ def default_finetune_recipe(
Args:
model (run.Config[pl.LightningModule]): Configuration for a NeMo model.
- resume_path (str): Path to the Huggingface model.
+ resume_path (str): Path to the Huggingface model or pretrained distributed checkpoint for resume
dir (Optional[str]): Directory for saving logs and checkpoints.
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
@@ -82,7 +85,7 @@ def default_finetune_recipe(
def default_finetune_trainer(
tensor_parallelism=1,
pipeline_parallelism=1,
- pipeline_parallelism_type=None,
+ pipeline_parallelism_type=torch.bfloat16,
virtual_pipeline_parallelism=None,
context_parallelism=1,
sequence_parallelism=False,
@@ -93,6 +96,19 @@ def default_finetune_trainer(
limit_val_batches=None,
val_check_interval=30,
):
+ """
+ Create a default fine-tuning trainer for any model.
+
+ This function sets up a template for strategy and trainer.
+
+ Args:
+ See docstrings of MegatronStrategy and Trainer.
+
+ Returns:
+ run.Config: Config for a finetuning trainer.
+
+ See usages of this in recipes for further details.
+ """
strategy = run.Config(
nl.MegatronStrategy,
tensor_model_parallel_size=tensor_parallelism,
@@ -125,7 +141,8 @@ def default_finetune_trainer(
def nemo_resume(model_id: str) -> run.Config[nl.AutoResume]:
"""
- Configure automatic resumption from a NeMo checkpoint converted from Huggingface for https://huggingface.co/{model_id}.
+ Configure automatic resumption from a NeMo checkpoint converted from Huggingface for
+ https://huggingface.co/{model_id}.
This NeMo checkpoint should be converted from Huggingface beforehand, using nemo.collections.llm.import_ckpt.
When converting the checkpoint, the NeMo checkpoint will be saved in NEMO_HOME (set to ~/.cache/nemo by default).
@@ -134,7 +151,7 @@ def nemo_resume(model_id: str) -> run.Config[nl.AutoResume]:
This translates to the full path {NEMO_HOME}/models/{model_id}.
Args:
- model_id (str): The Huggingface model to resume.
+ model_id (str): Path to the Huggingface model or pretrained distributed checkpoint for resume
Returns:
run.Config[nl.AutoResume]: Configuration for resuming from NeMo checkpoint.
@@ -143,3 +160,41 @@ def nemo_resume(model_id: str) -> run.Config[nl.AutoResume]:
nl.AutoResume,
restore_config=run.Config(nl.RestoreConfig, path=f"nemo://{model_id}"),
)
+
+
+@run.cli.factory(name='lora')
+def lora() -> run.Config[PEFT]:
+ """
+ Factory function to create a LoRA configuration.
+
+ Returns:
+ run.Config[PEFT]: Configuration for the LoRA class.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune -f llama3_8b peft=lora
+
+ Python API usage:
+ >>> lora_config = lora()
+ >>> print(lora_config)
+ """
+ return run.Config(LoRA)
+
+
+@run.cli.factory(name='dora')
+def dora() -> run.Config[PEFT]:
+ """
+ Factory function to create a DoRA configuration.
+
+ Returns:
+ run.Config[PEFT]: Configuration for the DoRA class.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune -f llama3_8b peft=dora
+
+ Python API usage:
+ >>> dora_config = dora()
+ >>> print(dora_config)
+ """
+ return run.Config(DoRA)
diff --git a/nemo/collections/llm/recipes/gemma2.py b/nemo/collections/llm/recipes/gemma2.py
index 6fd1be83c183..2a690dc556d8 100644
--- a/nemo/collections/llm/recipes/gemma2.py
+++ b/nemo/collections/llm/recipes/gemma2.py
@@ -14,11 +14,11 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.gpt.model.gemma2 import Gemma2Config2B, Gemma2Config9B, Gemma2Config27B, Gemma2Model
diff --git a/nemo/collections/llm/recipes/gemma2_27b.py b/nemo/collections/llm/recipes/gemma2_27b.py
index 4b7c09e30bfc..d6b41c0a221c 100644
--- a/nemo/collections/llm/recipes/gemma2_27b.py
+++ b/nemo/collections/llm/recipes/gemma2_27b.py
@@ -14,13 +14,13 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.gemma2 import gemma2_model, gemma2_trainer
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
@@ -177,6 +177,7 @@ def finetune_recipe(
num_nodes: int = 1,
num_gpus_per_node: int = 8,
peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
) -> run.Partial:
"""
Create a fine-tuning recipe for Gemma2 27B model.
@@ -190,7 +191,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -208,13 +212,18 @@ def finetune_recipe(
on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
`examples/llm/finetune/` directory.
"""
- recipe = default_finetune_recipe(model(), "google/gemma-2-27b", dir, name, num_nodes, num_gpus_per_node)
+ recipe = default_finetune_recipe(
+ model(), "google/gemma-2-27b", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
+ # Gemma requires BOS
+ recipe.data.dataset_kwargs = {'add_bos': True}
+
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.optim.config.lr = 5e-6
recipe.trainer.strategy.tensor_model_parallel_size = 8
recipe.trainer.strategy.pipeline_model_parallel_size = 2
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.trainer.strategy.tensor_model_parallel_size = 4
recipe.optim.config.lr = 1e-4
else:
diff --git a/nemo/collections/llm/recipes/gemma2_2b.py b/nemo/collections/llm/recipes/gemma2_2b.py
index 952d08184168..138140d0515d 100644
--- a/nemo/collections/llm/recipes/gemma2_2b.py
+++ b/nemo/collections/llm/recipes/gemma2_2b.py
@@ -14,13 +14,13 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.gemma2 import gemma2_model, gemma2_trainer
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
@@ -177,6 +177,7 @@ def finetune_recipe(
num_nodes: int = 1,
num_gpus_per_node: int = 8,
peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
) -> run.Partial:
"""
Create a fine-tuning recipe for Gemma2 2B model.
@@ -190,7 +191,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -208,11 +212,16 @@ def finetune_recipe(
on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
`examples/llm/finetune/` directory.
"""
- recipe = default_finetune_recipe(model(), "google/gemma-2-2b", dir, name, num_nodes, num_gpus_per_node)
+ recipe = default_finetune_recipe(
+ model(), "google/gemma-2-2b", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
+ # Gemma requires BOS
+ recipe.data.dataset_kwargs = {'add_bos': True}
+
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/gemma2_9b.py b/nemo/collections/llm/recipes/gemma2_9b.py
index 8f004c5a2a8d..c49ac0246307 100644
--- a/nemo/collections/llm/recipes/gemma2_9b.py
+++ b/nemo/collections/llm/recipes/gemma2_9b.py
@@ -14,13 +14,13 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.gemma2 import gemma2_model, gemma2_trainer
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
@@ -177,6 +177,7 @@ def finetune_recipe(
num_nodes: int = 1,
num_gpus_per_node: int = 8,
peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
) -> run.Partial:
"""
Create a fine-tuning recipe for Gemma2 9B model.
@@ -190,7 +191,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -208,12 +212,17 @@ def finetune_recipe(
on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
`examples/llm/finetune/` directory.
"""
- recipe = default_finetune_recipe(model(), "google/gemma-2-9b", dir, name, num_nodes, num_gpus_per_node)
+ recipe = default_finetune_recipe(
+ model(), "google/gemma-2-9b", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
+ # Gemma requires BOS
+ recipe.data.dataset_kwargs = {'add_bos': True}
+
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.optim.config.lr = 5e-6
recipe.trainer.strategy.tensor_model_parallel_size = 4
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/gemma_2b.py b/nemo/collections/llm/recipes/gemma_2b.py
index cead1f2e5689..8bdf89696d56 100644
--- a/nemo/collections/llm/recipes/gemma_2b.py
+++ b/nemo/collections/llm/recipes/gemma_2b.py
@@ -14,17 +14,17 @@
import os
from typing import Callable, Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm import GemmaConfig2B, GemmaModel
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -253,7 +253,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -277,11 +280,14 @@ def finetune_recipe(
recipe = default_finetune_recipe(
model(), "google/gemma-2b", dir, name, num_nodes, num_gpus_per_node, packed_sequence
)
+ # Gemma requires BOS
+ recipe.data.dataset_kwargs = {'add_bos': True}
+
if peft_scheme is None or peft_scheme.lower() == 'none':
- recipe.trainer.strategy.tensor_model_parallel_size = 2
+ recipe.trainer.strategy.context_parallel_size = 2
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/gemma_7b.py b/nemo/collections/llm/recipes/gemma_7b.py
index ba6458af20d2..46c91e27575a 100644
--- a/nemo/collections/llm/recipes/gemma_7b.py
+++ b/nemo/collections/llm/recipes/gemma_7b.py
@@ -14,17 +14,17 @@
import os
from typing import Callable, Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm import GemmaConfig7B, GemmaModel
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -55,7 +55,7 @@ def model() -> run.Config[pl.LightningModule]:
def trainer(
- tensor_parallelism: int = 1,
+ tensor_parallelism: int = 2,
pipeline_parallelism: int = 1,
pipeline_parallelism_type: Optional[torch.dtype] = None,
virtual_pipeline_parallelism: Optional[int] = None,
@@ -171,6 +171,9 @@ def pretrain_recipe(
For more details on pre-training LLMs with NeMo, see the pre-training
guide in the `examples/llm/pretrain/` directory.
"""
+ # Disable cuDNN attention since TE 1.8 does not support head dim > 128
+ os.environ['NVTE_FUSED_ATTN'] = "0"
+
return run.Partial(
fn,
model=model(),
@@ -253,7 +256,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -277,11 +283,14 @@ def finetune_recipe(
recipe = default_finetune_recipe(
model(), "google/gemma-7b", dir, name, num_nodes, num_gpus_per_node, packed_sequence
)
+ # Gemma requires BOS
+ recipe.data.dataset_kwargs = {'add_bos': True}
+
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.trainer.strategy.tensor_model_parallel_size = 2
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/gpt3_175b.py b/nemo/collections/llm/recipes/gpt3_175b.py
index 1abe8a218e82..189f0ca6baf1 100644
--- a/nemo/collections/llm/recipes/gpt3_175b.py
+++ b/nemo/collections/llm/recipes/gpt3_175b.py
@@ -15,11 +15,11 @@
from typing import Callable, Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.api import pretrain
@@ -229,7 +229,7 @@ def pretrain_performance_optimizations(recipe: run.Partial) -> run.Partial:
tp_comm_overlap_cfg=userbuffers_bf16_h100_h12288_tp4_mbs1_seqlen2048,
defer_embedding_wgrad_compute=True,
wgrad_deferral_limit=50,
- overlap_param_gather_with_optimizer_step=True,
+ overlap_param_gather_with_optimizer_step=False, # Currently disabled due to an issue with checkpointing
align_param_gather=True,
)
)
diff --git a/nemo/collections/llm/recipes/hf_auto_model_for_causal_lm.py b/nemo/collections/llm/recipes/hf_auto_model_for_causal_lm.py
index f3ac1d6975bc..d93b167b45b6 100644
--- a/nemo/collections/llm/recipes/hf_auto_model_for_causal_lm.py
+++ b/nemo/collections/llm/recipes/hf_auto_model_for_causal_lm.py
@@ -15,15 +15,16 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
-from pytorch_lightning.callbacks.callback import Callback
+from lightning.pytorch.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
from nemo.collections.llm.gpt.model.hf_auto_model_for_causal_lm import HfAutoModelForCausalLM
+from nemo.collections.llm.peft.lora import LoRA
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import pytorch_adam_with_cosine_annealing
from nemo.utils.exp_manager import TimingCallback
@@ -32,7 +33,7 @@
@run.cli.factory(name=NAME)
-def model(model_name) -> run.Config[pl.LightningModule]:
+def model(model_name, load_pretrained_weights) -> run.Config[pl.LightningModule]:
"""
Factory function to create HfAutoModelForCausalLM model configurations.
@@ -50,7 +51,7 @@ def model(model_name) -> run.Config[pl.LightningModule]:
>>> model_config = model(model_name="mistralai/Mistral-Nemo-Instruct-2407")
>>> print(model_config)
"""
- return run.Config(HfAutoModelForCausalLM, model_name=model_name)
+ return run.Config(HfAutoModelForCausalLM, model_name=model_name, load_pretrained_weights=load_pretrained_weights)
def trainer(
@@ -223,7 +224,7 @@ def finetune_recipe(
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.optim.config.lr = 5e-6
elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ recipe.peft = run.Config(LoRA, target_modules=['*_proj'])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/llama31_405b.py b/nemo/collections/llm/recipes/llama31_405b.py
index 055e9a06fcba..5f08d82bd888 100644
--- a/nemo/collections/llm/recipes/llama31_405b.py
+++ b/nemo/collections/llm/recipes/llama31_405b.py
@@ -15,22 +15,26 @@
from typing import Callable, Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
-from nemo.collections.llm.api import pretrain
+from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
+from nemo.collections.llm.gpt.data.packed_sequence import PackedSequenceSpecs
from nemo.collections.llm.gpt.model.llama import Llama31Config405B, LlamaModel
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
from nemo.collections.llm.recipes.tp_overlap_configs.userbuffers import (
userbuffers_bf16_h100_h16384_tp8_cp2_mbs1_seqlen8192,
)
+from nemo.lightning.pytorch.callbacks import GarbageCollectionCallback
from nemo.lightning.pytorch.callbacks.megatron_comm_overlap import MegatronCommOverlapCallback
from nemo.utils.exp_manager import TimingCallback
@@ -231,9 +235,170 @@ def pretrain_performance_optimizations(recipe: run.Partial) -> run.Partial:
tp_comm_overlap_cfg=userbuffers_bf16_h100_h16384_tp8_cp2_mbs1_seqlen8192,
defer_embedding_wgrad_compute=True,
wgrad_deferral_limit=50,
- overlap_param_gather_with_optimizer_step=True,
+ overlap_param_gather_with_optimizer_step=False, # Currently disabled due to an issue with checkpointing
align_param_gather=True,
)
)
return recipe
+
+
+@run.cli.factory(target=finetune, name=NAME)
+def finetune_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 3,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+ seq_length: Optional[int] = None,
+ packed_sequence: Optional[bool] = None,
+ performance_mode: bool = False,
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for Llama3.1 405B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ seq_length (int): Maximum number of tokens per microbatch.
+ packed_sequence (Optional[bool]): If true, fine-tuning sequences will be packed into batches up to the given
+ maximum seq_length for better efficiency. By default, this value equals performance_mode.
+ performance_mode (bool): If true, enables optimizations for maximum performance.
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory llama31_405b
+ $ nemo llm finetune --factory "llama31_405b(num_nodes=3, name='my_llama31_405b_finetune')"
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="llama31_405b_finetune", num_nodes=3)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. Be aware that fine-tuning a 405B model
+ requires substantial computational resources.
+ """
+ if packed_sequence is None:
+ packed_sequence = performance_mode
+
+ if seq_length is None:
+ seq_length = 2048
+
+ if num_nodes is None:
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ num_nodes = 12
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ num_nodes = 3
+
+ recipe = default_finetune_recipe(
+ model(), "meta-llama/Llama-3.1-405B", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 8
+ recipe.trainer.strategy.pipeline_model_parallel_size = 14
+ recipe.data.global_batch_size = 6
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.peft.dim = 16
+ recipe.peft.alpha = 32
+ recipe.optim.config.use_distributed_optimizer = False
+
+ # some settings currently do not function correctly with LoRA
+ recipe.model.config.cross_entropy_loss_fusion = False
+ recipe.trainer.strategy.tensor_model_parallel_size = 4
+ recipe.trainer.strategy.pipeline_model_parallel_size = 6
+ recipe.trainer.strategy.virtual_pipeline_model_parallel_size = 7
+ recipe.data.global_batch_size = 6
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ # Sequence length settings in the model and dataset must agree
+ recipe.model.config.seq_length = seq_length
+ recipe.data.seq_length = seq_length
+ if packed_sequence:
+ recipe.data.dataset_kwargs = {'pad_to_max_length': True}
+ recipe.data.packed_sequence_specs = run.Config(PackedSequenceSpecs, packed_sequence_size=seq_length)
+
+ if performance_mode:
+ recipe = finetune_performance_optimizations(recipe, peft_scheme)
+
+ return recipe
+
+
+def finetune_performance_optimizations(
+ recipe: run.Partial,
+ peft_scheme: str,
+) -> run.Partial:
+ """
+ Modify the given recipe to optimize settings for performance.
+
+ This method enables performance optimizations that may not be suitable for all use cases.
+ Intended to build upon the standard fine-tuning recipe.
+
+ Args:
+ recipe (run.Partial): Base fine-tuning recipe to which performance optimizations will be added
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+
+ Returns:
+ run.Partial: Partial configuration for performance-optimized fine-tuning.
+
+ Note:
+ Use this method with caution and only when you need maximum performance.
+ It may not be suitable for all hardware configurations or use cases.
+ """
+
+ if not hasattr(recipe.trainer, "callbacks"):
+ recipe.trainer.callbacks = []
+
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ # Note: limited support. This is not necessarily the most optimized setting
+ recipe.trainer.strategy.tensor_model_parallel_size = 8
+ recipe.trainer.strategy.pipeline_model_parallel_size = 14
+ recipe.trainer.plugins.grad_reduce_in_fp32 = False
+ recipe.trainer.strategy.ddp = run.Config(
+ DistributedDataParallelConfig,
+ check_for_nan_in_grad=True,
+ grad_reduce_in_fp32=False,
+ overlap_grad_reduce=True,
+ overlap_param_gather=True,
+ average_in_collective=True,
+ )
+ recipe.trainer.callbacks.append(
+ run.Config(
+ MegatronCommOverlapCallback,
+ tp_comm_overlap=True,
+ defer_embedding_wgrad_compute=True,
+ wgrad_deferral_limit=22,
+ )
+ )
+ else:
+ recipe.trainer.strategy.tensor_model_parallel_size = 4
+ recipe.trainer.strategy.pipeline_model_parallel_size = 6
+ recipe.trainer.strategy.virtual_pipeline_model_parallel_size = 7
+ recipe.peft.target_modules = ['linear_qkv']
+
+ recipe.trainer.strategy.sequence_parallel = True
+
+ recipe.trainer.callbacks.append(run.Config(TimingCallback))
+ recipe.trainer.callbacks.append(
+ run.Config(
+ GarbageCollectionCallback,
+ 100,
+ 100,
+ )
+ )
+
+ return recipe
diff --git a/nemo/collections/llm/recipes/llama31_70b.py b/nemo/collections/llm/recipes/llama31_70b.py
new file mode 100644
index 000000000000..3120fedd7923
--- /dev/null
+++ b/nemo/collections/llm/recipes/llama31_70b.py
@@ -0,0 +1,405 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from typing import Callable, Optional
+
+import lightning.pytorch as pl
+import nemo_run as run
+import torch
+from lightning.pytorch.callbacks.callback import Callback
+from megatron.core.distributed import DistributedDataParallelConfig
+
+from nemo import lightning as nl
+from nemo.collections.llm.api import finetune, pretrain
+from nemo.collections.llm.gpt.data.mock import MockDataModule
+from nemo.collections.llm.gpt.data.packed_sequence import PackedSequenceSpecs
+from nemo.collections.llm.gpt.model.llama import Llama31Config70B, LlamaModel
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
+from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
+from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
+from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
+from nemo.collections.llm.recipes.tp_overlap_configs.userbuffers import (
+ userbuffers_bf16_h100_h16384_tp8_cp2_mbs1_seqlen8192,
+)
+from nemo.lightning.pytorch.callbacks import GarbageCollectionCallback
+from nemo.lightning.pytorch.callbacks.megatron_comm_overlap import MegatronCommOverlapCallback
+from nemo.utils.exp_manager import TimingCallback
+
+NAME = "llama31_70b"
+
+
+@run.cli.factory(name=NAME)
+def model() -> run.Config[pl.LightningModule]:
+ """
+ Factory function to create a Llama3.1 70B model configuration.
+
+ Returns:
+ run.Config[pl.LightningModule]: Configuration for the Llama3.1 70B model.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain model=llama31_70b ...
+
+ Python API usage:
+ >>> model_config = model()
+ >>> print(model_config)
+ """
+ conf = run.Config(Llama31Config70B)
+ conf.seq_length = 8192
+ return run.Config(LlamaModel, config=conf)
+
+
+def trainer(
+ tensor_parallelism: int = 4,
+ pipeline_parallelism: int = 4,
+ pipeline_parallelism_type: Optional[torch.dtype] = torch.bfloat16,
+ virtual_pipeline_parallelism: Optional[int] = 5,
+ context_parallelism: int = 2,
+ sequence_parallelism: bool = True,
+ num_nodes: int = 4,
+ num_gpus_per_node: int = 8,
+ max_steps: int = 1168251,
+ callbacks: Optional[list[run.Config[Callback]]] = None,
+) -> run.Config[nl.Trainer]:
+ """
+ Configure the NeMo Lightning Trainer for Llama3.1 70B model.
+
+ This function sets up the distributed training strategy optimized for the large 70B model.
+
+ Args:
+ tensor_parallelism (int): Degree of tensor model parallelism.
+ pipeline_parallelism (int): Degree of pipeline model parallelism.
+ pipeline_parallelism_type (Optional[torch.dtype]): Data type for pipeline parallelism.
+ virtual_pipeline_parallelism (Optional[int]): Size of virtual pipeline parallelism.
+ context_parallelism (int): Degree of context parallelism.
+ sequence_parallelism (bool): Whether to use sequence parallelism.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ max_steps (int): Maximum number of training steps.
+ callbacks (Optional[list[run.Config[Callback]]]): List of callback configurations.
+
+ Returns:
+ run.Config[nl.Trainer]: Configuration for the NeMo Lightning Trainer.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain trainer=llama31_70b ...
+
+ Python API usage:
+ >>> trainer_config = trainer(num_nodes=4, num_gpus_per_node=8)
+ >>> print(trainer_config)
+
+ Note:
+ This configuration uses extensive parallelism to handle the large model size efficiently.
+ """
+ strategy = run.Config(
+ nl.MegatronStrategy,
+ tensor_model_parallel_size=tensor_parallelism,
+ pipeline_model_parallel_size=pipeline_parallelism,
+ pipeline_dtype=pipeline_parallelism_type,
+ virtual_pipeline_model_parallel_size=virtual_pipeline_parallelism,
+ context_parallel_size=context_parallelism,
+ sequence_parallel=sequence_parallelism,
+ gradient_as_bucket_view=True,
+ ckpt_async_save=True,
+ ckpt_parallel_load=True,
+ ddp=run.Config(
+ DistributedDataParallelConfig,
+ check_for_nan_in_grad=True,
+ grad_reduce_in_fp32=True,
+ overlap_grad_reduce=True,
+ overlap_param_gather=True,
+ average_in_collective=True,
+ ),
+ )
+
+ trainer = run.Config(
+ nl.Trainer,
+ accelerator="gpu",
+ accumulate_grad_batches=1,
+ callbacks=callbacks,
+ devices=num_gpus_per_node,
+ limit_test_batches=50,
+ limit_val_batches=32,
+ log_every_n_steps=10,
+ max_steps=max_steps,
+ num_nodes=num_nodes,
+ plugins=bf16_mixed(),
+ strategy=strategy,
+ use_distributed_sampler=False,
+ val_check_interval=2000,
+ )
+
+ return trainer
+
+
+@run.cli.factory(target=pretrain, name=NAME)
+def pretrain_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ performance_mode: bool = False,
+ fn: Callable = pretrain,
+) -> run.Partial:
+ """
+ Create a pre-training recipe for Llama3.1 70B model.
+
+ This function sets up a complete configuration for pre-training, including
+ model, trainer, data, logging, optimization, and resumption settings.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the pre-training run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ performance_mode (bool): If true, enables optimizations for maximum performance.
+ fn (Callable): The pre-training function to use.
+
+ Returns:
+ run.Partial: Partial configuration for pre-training.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain --factory llama31_70b
+ $ nemo llm pretrain --factory "llama31_70b(num_nodes=4, name='my_70b_pretrain')"
+
+ Python API usage:
+ >>> recipe = pretrain_recipe(name="llama31_70b_pretrain", num_nodes=4)
+ >>> print(recipe)
+
+ Note:
+ This recipe is optimized for the large 70B model and requires significant computational resources.
+ """
+ recipe = run.Partial(
+ fn,
+ model=model(),
+ trainer=trainer(
+ num_nodes=num_nodes,
+ num_gpus_per_node=num_gpus_per_node,
+ callbacks=[run.Config(TimingCallback)],
+ ),
+ data=run.Config(MockDataModule, seq_length=8192, global_batch_size=512, micro_batch_size=1),
+ log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
+ optim=distributed_fused_adam_with_cosine_annealing(max_lr=3e-4),
+ resume=default_resume(),
+ )
+
+ if performance_mode:
+ recipe = pretrain_performance_optimizations(recipe)
+
+ return recipe
+
+
+def pretrain_performance_optimizations(recipe: run.Partial) -> run.Partial:
+ """
+ Create a performance-optimized pre-training recipe for Llama3.1 70B model.
+
+ This method enables performance optimizations that may not be suitable for all use cases.
+ It builds upon the standard pre-training recipe and adds additional performance enhancements.
+
+ Args:
+ recipe (run.Partial): Base pre-train recipe to which performance optimizations will be added
+
+ Returns:
+ run.Partial: Partial configuration for performance-optimized pre-training.
+
+ Note:
+ Use this method with caution and only when you need maximum performance.
+ It may not be suitable for all hardware configurations or use cases.
+ """
+
+ # 'overlap_param_gather_with_optimizer_step' and 'align_param_gather' params are set automatically
+ # by MegatronCommOverlapCallback. They are added here for user's knowledge.
+ # overlap_param_gather_with_optimizer_step- Overlap param all-gather of first bucket with optimizer step.
+ # align_param_gather- If true, all PP stages launch param all-gathers simultaneously, else
+ # each PP stage launches independently as needed.
+
+ recipe.trainer.callbacks.append(
+ run.Config(
+ MegatronCommOverlapCallback,
+ tp_comm_overlap=True,
+ tp_comm_overlap_cfg=userbuffers_bf16_h100_h16384_tp8_cp2_mbs1_seqlen8192,
+ defer_embedding_wgrad_compute=True,
+ wgrad_deferral_limit=50,
+ overlap_param_gather_with_optimizer_step=False, # Currently disabled due to an issue with checkpointing
+ align_param_gather=True,
+ )
+ )
+
+ return recipe
+
+
+@run.cli.factory(target=finetune, name=NAME)
+def finetune_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = None,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+ seq_length: Optional[int] = None,
+ packed_sequence: Optional[bool] = None,
+ performance_mode: bool = False,
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for Llama3.1 70B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ seq_length (int): Maximum number of tokens per microbatch.
+ packed_sequence (Optional[bool]): If true, fine-tuning sequences will be packed into batches up to the given
+ maximum seq_length for better efficiency. By default, this value equals performance_mode.
+ performance_mode (bool): If true, enables optimizations for maximum performance.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory llama31_70b
+ $ nemo llm finetune --factory "llama31_70b(num_nodes=4, name='my_70b_finetune')"
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="llama31_70b_finetune", num_nodes=4)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. Be aware that fine-tuning a 70B model
+ requires substantial computational resources.
+ """
+ # Default to unpacked data in normal mode and packed data in performance mode
+ # once packing recipe is well tested, change this default to true
+ if packed_sequence is None:
+ packed_sequence = performance_mode
+
+ # For unpacked sequence, most samples in SQuAD dataset are shorter than 2K
+ if seq_length is None:
+ seq_length = 4096 if packed_sequence else 2048
+
+ if num_nodes is None:
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ num_nodes = 4
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ num_nodes = 1
+
+ recipe = default_finetune_recipe(
+ model(), "meta-llama/Llama-3.1-70B", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 8
+ recipe.trainer.strategy.pipeline_model_parallel_size = 4
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.peft.dim = 16
+ recipe.peft.alpha = 32
+ recipe.optim.config.use_distributed_optimizer = False
+
+ # some settings currently do not function correctly with LoRA
+ recipe.model.config.cross_entropy_loss_fusion = False
+
+ recipe.trainer.strategy.tensor_model_parallel_size = 8
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ # Sequence length settings in the model and dataset must agree
+ recipe.model.config.seq_length = seq_length
+ recipe.data.seq_length = seq_length
+ if packed_sequence:
+ recipe.data.dataset_kwargs = {'pad_to_max_length': True}
+ recipe.data.packed_sequence_specs = run.Config(PackedSequenceSpecs, packed_sequence_size=seq_length)
+
+ if performance_mode:
+ recipe = finetune_performance_optimizations(recipe, peft_scheme)
+
+ return recipe
+
+
+def finetune_performance_optimizations(
+ recipe: run.Partial,
+ peft_scheme: str,
+) -> run.Partial:
+ """
+ Modify the given recipe to optimize settings for performance.
+
+ This method enables performance optimizations that may not be suitable for all use cases.
+ Intended to build upon the standard fine-tuning recipe.
+
+ Args:
+ recipe (run.Partial): Base fine-tuning recipe to which performance optimizations will be added
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+
+ Returns:
+ run.Partial: Partial configuration for performance-optimized fine-tuning.
+
+ Note:
+ Use this method with caution and only when you need maximum performance.
+ It may not be suitable for all hardware configurations or use cases.
+ """
+
+ if not hasattr(recipe.trainer, "callbacks"):
+ recipe.trainer.callbacks = []
+
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 4
+ recipe.trainer.strategy.pipeline_model_parallel_size = 4
+ recipe.trainer.strategy.virtual_pipeline_model_parallel_size = 5
+ recipe.trainer.plugins.grad_reduce_in_fp32 = False
+ recipe.trainer.strategy.ddp = run.Config(
+ DistributedDataParallelConfig,
+ check_for_nan_in_grad=True,
+ grad_reduce_in_fp32=False,
+ overlap_grad_reduce=True,
+ overlap_param_gather=True,
+ average_in_collective=True,
+ )
+ recipe.trainer.callbacks.append(
+ run.Config(
+ MegatronCommOverlapCallback,
+ tp_comm_overlap=True,
+ defer_embedding_wgrad_compute=True,
+ wgrad_deferral_limit=22,
+ )
+ )
+ else:
+ recipe.trainer.strategy.tensor_model_parallel_size = 2
+ recipe.trainer.strategy.pipeline_model_parallel_size = 4
+ recipe.trainer.strategy.virtual_pipeline_model_parallel_size = 5
+ recipe.peft.target_modules = ['linear_qkv']
+
+ recipe.trainer.strategy.sequence_parallel = True
+
+ recipe.trainer.callbacks.append(run.Config(TimingCallback))
+ recipe.trainer.callbacks.append(
+ run.Config(
+ GarbageCollectionCallback,
+ 100,
+ 100,
+ )
+ )
+
+ return recipe
diff --git a/nemo/collections/llm/recipes/llama31_8b.py b/nemo/collections/llm/recipes/llama31_8b.py
new file mode 100644
index 000000000000..62514940b678
--- /dev/null
+++ b/nemo/collections/llm/recipes/llama31_8b.py
@@ -0,0 +1,388 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from typing import Callable, Optional
+
+import lightning.pytorch as pl
+import nemo_run as run
+import torch
+from lightning.pytorch.callbacks.callback import Callback
+from megatron.core.distributed import DistributedDataParallelConfig
+
+from nemo import lightning as nl
+from nemo.collections.llm.api import finetune, pretrain
+from nemo.collections.llm.gpt.data.mock import MockDataModule
+from nemo.collections.llm.gpt.data.packed_sequence import PackedSequenceSpecs
+from nemo.collections.llm.gpt.model.llama import Llama31Config8B, LlamaModel
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
+from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
+from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
+from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
+from nemo.collections.llm.recipes.tp_overlap_configs.userbuffers import (
+ userbuffers_bf16_h100_h16384_tp8_cp2_mbs1_seqlen8192,
+)
+from nemo.lightning.pytorch.callbacks import GarbageCollectionCallback
+from nemo.lightning.pytorch.callbacks.megatron_comm_overlap import MegatronCommOverlapCallback
+from nemo.utils.exp_manager import TimingCallback
+
+NAME = "llama31_8b"
+
+
+@run.cli.factory(name=NAME)
+def model() -> run.Config[pl.LightningModule]:
+ """
+ Factory function to create a Llama3.1 8B model configuration.
+
+ Returns:
+ run.Config[pl.LightningModule]: Configuration for the Llama3.1 8B model.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain model=llama31_8b ...
+
+ Python API usage:
+ >>> model_config = model()
+ >>> print(model_config)
+ """
+ conf = run.Config(Llama31Config8B)
+ conf.seq_length = 8192
+ return run.Config(LlamaModel, config=conf)
+
+
+def trainer(
+ tensor_parallelism: int = 1,
+ pipeline_parallelism: int = 1,
+ pipeline_parallelism_type: Optional[torch.dtype] = None,
+ virtual_pipeline_parallelism: Optional[int] = None,
+ context_parallelism: int = 2,
+ sequence_parallelism: bool = False,
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ max_steps: int = 1168251,
+ callbacks: Optional[list[run.Config[Callback]]] = None,
+) -> run.Config[nl.Trainer]:
+ """
+ Configure the NeMo Lightning Trainer for Llama3.1 8B model.
+
+ This function sets up the distributed training strategy optimized for the large 8B model.
+
+ Args:
+ tensor_parallelism (int): Degree of tensor model parallelism.
+ pipeline_parallelism (int): Degree of pipeline model parallelism.
+ pipeline_parallelism_type (Optional[torch.dtype]): Data type for pipeline parallelism.
+ virtual_pipeline_parallelism (Optional[int]): Size of virtual pipeline parallelism.
+ context_parallelism (int): Degree of context parallelism.
+ sequence_parallelism (bool): Whether to use sequence parallelism.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ max_steps (int): Maximum number of training steps.
+ callbacks (Optional[list[run.Config[Callback]]]): List of callback configurations.
+
+ Returns:
+ run.Config[nl.Trainer]: Configuration for the NeMo Lightning Trainer.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain trainer=llama31_8b ...
+
+ Python API usage:
+ >>> trainer_config = trainer(num_nodes=2, num_gpus_per_node=8)
+ >>> print(trainer_config)
+
+ Note:
+ This configuration uses extensive parallelism to handle the large model size efficiently.
+ """
+ strategy = run.Config(
+ nl.MegatronStrategy,
+ tensor_model_parallel_size=tensor_parallelism,
+ pipeline_model_parallel_size=pipeline_parallelism,
+ pipeline_dtype=pipeline_parallelism_type,
+ virtual_pipeline_model_parallel_size=virtual_pipeline_parallelism,
+ context_parallel_size=context_parallelism,
+ sequence_parallel=sequence_parallelism,
+ gradient_as_bucket_view=True,
+ ckpt_async_save=True,
+ ckpt_parallel_load=True,
+ ddp=run.Config(
+ DistributedDataParallelConfig,
+ check_for_nan_in_grad=True,
+ grad_reduce_in_fp32=True,
+ overlap_grad_reduce=True,
+ overlap_param_gather=True,
+ average_in_collective=True,
+ ),
+ )
+
+ trainer = run.Config(
+ nl.Trainer,
+ accelerator="gpu",
+ accumulate_grad_batches=1,
+ callbacks=callbacks,
+ devices=num_gpus_per_node,
+ limit_test_batches=50,
+ limit_val_batches=32,
+ log_every_n_steps=10,
+ max_steps=max_steps,
+ num_nodes=num_nodes,
+ plugins=bf16_mixed(),
+ strategy=strategy,
+ use_distributed_sampler=False,
+ val_check_interval=2000,
+ )
+
+ return trainer
+
+
+@run.cli.factory(target=pretrain, name=NAME)
+def pretrain_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ performance_mode: bool = False,
+ fn: Callable = pretrain,
+) -> run.Partial:
+ """
+ Create a pre-training recipe for Llama3.1 8B model.
+
+ This function sets up a complete configuration for pre-training, including
+ model, trainer, data, logging, optimization, and resumption settings.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the pre-training run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ performance_mode (bool): If true, enables optimizations for maximum performance.
+ fn (Callable): The pre-training function to use.
+
+ Returns:
+ run.Partial: Partial configuration for pre-training.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain --factory llama31_8b
+ $ nemo llm pretrain --factory "llama31_8b(num_nodes=4, name='my_8b_pretrain')"
+
+ Python API usage:
+ >>> recipe = pretrain_recipe(name="llama31_8b_pretrain", num_nodes=4)
+ >>> print(recipe)
+
+ Note:
+ This recipe is optimized for the large 8B model and requires significant computational resources.
+ """
+ recipe = run.Partial(
+ fn,
+ model=model(),
+ trainer=trainer(
+ num_nodes=num_nodes,
+ num_gpus_per_node=num_gpus_per_node,
+ callbacks=[run.Config(TimingCallback)],
+ ),
+ data=run.Config(MockDataModule, seq_length=8192, global_batch_size=512, micro_batch_size=1),
+ log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
+ optim=distributed_fused_adam_with_cosine_annealing(max_lr=3e-4),
+ resume=default_resume(),
+ )
+
+ if performance_mode:
+ recipe = pretrain_performance_optimizations(recipe)
+
+ return recipe
+
+
+def pretrain_performance_optimizations(recipe: run.Partial) -> run.Partial:
+ """
+ Create a performance-optimized pre-training recipe for Llama3.1 8B model.
+
+ This method enables performance optimizations that may not be suitable for all use cases.
+ It builds upon the standard pre-training recipe and adds additional performance enhancements.
+
+ Args:
+ recipe (run.Partial): Base pre-train recipe to which performance optimizations will be added
+
+ Returns:
+ run.Partial: Partial configuration for performance-optimized pre-training.
+
+ Note:
+ Use this method with caution and only when you need maximum performance.
+ It may not be suitable for all hardware configurations or use cases.
+ """
+
+ # 'overlap_param_gather_with_optimizer_step' and 'align_param_gather' params are set automatically
+ # by MegatronCommOverlapCallback. They are added here for user's knowledge.
+ # overlap_param_gather_with_optimizer_step- Overlap param all-gather of first bucket with optimizer step.
+ # align_param_gather- If true, all PP stages launch param all-gathers simultaneously, else
+ # each PP stage launches independently as needed.
+
+ recipe.trainer.callbacks.append(
+ run.Config(
+ MegatronCommOverlapCallback,
+ tp_comm_overlap=True,
+ tp_comm_overlap_cfg=userbuffers_bf16_h100_h16384_tp8_cp2_mbs1_seqlen8192,
+ defer_embedding_wgrad_compute=True,
+ wgrad_deferral_limit=50,
+ overlap_param_gather_with_optimizer_step=False, # Currently disabled due to an issue with checkpointing
+ align_param_gather=True,
+ )
+ )
+
+ return recipe
+
+
+@run.cli.factory(target=finetune, name=NAME)
+def finetune_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+ seq_length: Optional[int] = None,
+ packed_sequence: Optional[bool] = None,
+ performance_mode: bool = False,
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for Llama3.1 8B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ seq_length (int): Maximum number of tokens per microbatch.
+ packed_sequence (Optional[bool]): If true, fine-tuning sequences will be packed into batches up to the given
+ maximum seq_length for better efficiency. By default, this value equals performance_mode.
+ performance_mode (bool): If true, enables optimizations for maximum performance.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory llama31_8b
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="llama31_8b_finetune", num_nodes=2)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+ # Default to unpacked data in normal mode and packed data in performance mode
+ # once packing recipe is well tested, change this default to true
+ if packed_sequence is None:
+ packed_sequence = performance_mode
+
+ # For unpacked sequence, most samples in SQuAD dataset are shorter than 2K
+ if seq_length is None:
+ seq_length = 4096 if packed_sequence else 2048
+
+ recipe = default_finetune_recipe(
+ model(), "meta-llama/Meta-Llama-3.1-8B", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 2
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.peft.dim = 8
+ recipe.peft.alpha = 16
+ recipe.optim.config.use_distributed_optimizer = False
+
+ # some settings currently do not function correctly with LoRA
+ recipe.model.config.cross_entropy_loss_fusion = False
+
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ # Sequence length settings in the model and dataset must agree
+ recipe.model.config.seq_length = seq_length
+ recipe.data.seq_length = seq_length
+ if packed_sequence:
+ recipe.data.dataset_kwargs = {'pad_to_max_length': True}
+ recipe.data.packed_sequence_specs = run.Config(PackedSequenceSpecs, packed_sequence_size=seq_length)
+
+ if performance_mode:
+ recipe = finetune_performance_optimizations(recipe, peft_scheme)
+
+ return recipe
+
+
+def finetune_performance_optimizations(
+ recipe: run.Partial,
+ peft_scheme: str,
+) -> run.Partial:
+ """
+ Modify the given recipe to optimize settings for performance.
+
+ This method enables performance optimizations that may not be suitable for all use cases.
+ Intended to build upon the standard fine-tuning recipe.
+
+ Args:
+ recipe (run.Partial): Base fine-tuning recipe to which performance optimizations will be added
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+
+ Returns:
+ run.Partial: Partial configuration for performance-optimized fine-tuning.
+
+ Note:
+ Use this method with caution and only when you need maximum performance.
+ It may not be suitable for all hardware configurations or use cases.
+ """
+ recipe.trainer.strategy.tensor_model_parallel_size = 1
+
+ if not hasattr(recipe.trainer, "callbacks"):
+ recipe.trainer.callbacks = []
+
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.plugins.grad_reduce_in_fp32 = False
+ recipe.trainer.strategy.ddp = run.Config(
+ DistributedDataParallelConfig,
+ check_for_nan_in_grad=True,
+ grad_reduce_in_fp32=False,
+ overlap_grad_reduce=True,
+ overlap_param_gather=True,
+ average_in_collective=True,
+ )
+ recipe.trainer.callbacks.append(
+ run.Config(
+ MegatronCommOverlapCallback,
+ tp_comm_overlap=False,
+ )
+ )
+ else:
+ recipe.peft.target_modules = ['linear_qkv']
+
+ recipe.trainer.callbacks.append(run.Config(TimingCallback))
+ recipe.trainer.callbacks.append(
+ run.Config(
+ GarbageCollectionCallback,
+ 100,
+ 100,
+ )
+ )
+
+ return recipe
diff --git a/nemo/collections/llm/recipes/llama32_1b.py b/nemo/collections/llm/recipes/llama32_1b.py
new file mode 100644
index 000000000000..32675adf3686
--- /dev/null
+++ b/nemo/collections/llm/recipes/llama32_1b.py
@@ -0,0 +1,270 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from typing import Callable, Optional
+
+import lightning.pytorch as pl
+import nemo_run as run
+import torch
+from lightning.pytorch.callbacks.callback import Callback
+from megatron.core.distributed import DistributedDataParallelConfig
+
+from nemo import lightning as nl
+from nemo.collections.llm.api import finetune, pretrain
+from nemo.collections.llm.gpt.data.mock import MockDataModule
+from nemo.collections.llm.gpt.data.packed_sequence import PackedSequenceSpecs
+from nemo.collections.llm.gpt.model.llama import Llama32Config1B, LlamaModel
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
+from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
+from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
+from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
+from nemo.utils.exp_manager import TimingCallback
+
+NAME = "llama32_1b"
+
+
+@run.cli.factory(name=NAME)
+def model() -> run.Config[pl.LightningModule]:
+ """
+ Factory function to create a Llama3.2 1B model configuration.
+
+ Returns:
+ run.Config[pl.LightningModule]: Configuration for the Llama3.2 1B model.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain model=llama32_1b ...
+
+ Python API usage:
+ >>> model_config = model()
+ >>> print(model_config)
+ """
+ conf = run.Config(Llama32Config1B)
+ conf.seq_length = 8192
+ return run.Config(LlamaModel, config=conf)
+
+
+def trainer(
+ tensor_parallelism: int = 1,
+ pipeline_parallelism: int = 1,
+ pipeline_parallelism_type: Optional[torch.dtype] = None,
+ virtual_pipeline_parallelism: Optional[int] = None,
+ context_parallelism: int = 1,
+ sequence_parallelism: bool = False,
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ max_steps: int = 1168251,
+ callbacks: Optional[list[run.Config[Callback]]] = None,
+) -> run.Config[nl.Trainer]:
+ """
+ Configure the NeMo Lightning Trainer for Llama3.2 1B model.
+
+ Args:
+ tensor_parallelism (int): Degree of tensor model parallelism.
+ pipeline_parallelism (int): Degree of pipeline model parallelism.
+ pipeline_parallelism_type (Optional[torch.dtype]): Data type for pipeline parallelism.
+ virtual_pipeline_parallelism (Optional[int]): Size of virtual pipeline parallelism.
+ context_parallelism (int): Degree of context parallelism.
+ sequence_parallelism (bool): Whether to use sequence parallelism.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ max_steps (int): Maximum number of training steps.
+ callbacks (Optional[list[run.Config[Callback]]]): List of callback configurations.
+
+ Returns:
+ run.Config[nl.Trainer]: Configuration for the NeMo Lightning Trainer.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain trainer=llama32_1b ...
+
+ Python API usage:
+ >>> trainer_config = trainer(num_nodes=1, num_gpus_per_node=1)
+ >>> print(trainer_config)
+
+ Note:
+ This configuration uses extensive parallelism to handle the large model size efficiently.
+ """
+ strategy = run.Config(
+ nl.MegatronStrategy,
+ tensor_model_parallel_size=tensor_parallelism,
+ pipeline_model_parallel_size=pipeline_parallelism,
+ pipeline_dtype=pipeline_parallelism_type,
+ virtual_pipeline_model_parallel_size=virtual_pipeline_parallelism,
+ context_parallel_size=context_parallelism,
+ sequence_parallel=sequence_parallelism,
+ gradient_as_bucket_view=True,
+ ckpt_async_save=True,
+ ckpt_parallel_load=True,
+ ddp=run.Config(
+ DistributedDataParallelConfig,
+ check_for_nan_in_grad=True,
+ grad_reduce_in_fp32=True,
+ overlap_grad_reduce=True,
+ overlap_param_gather=True,
+ average_in_collective=True,
+ ),
+ )
+
+ trainer = run.Config(
+ nl.Trainer,
+ accelerator="gpu",
+ accumulate_grad_batches=1,
+ callbacks=callbacks,
+ devices=num_gpus_per_node,
+ limit_test_batches=50,
+ limit_val_batches=32,
+ log_every_n_steps=10,
+ max_steps=max_steps,
+ num_nodes=num_nodes,
+ plugins=bf16_mixed(),
+ strategy=strategy,
+ use_distributed_sampler=False,
+ val_check_interval=2000,
+ )
+
+ return trainer
+
+
+@run.cli.factory(target=pretrain, name=NAME)
+def pretrain_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ fn: Callable = pretrain,
+) -> run.Partial:
+ """
+ Create a pre-training recipe for Llama3.2 1B model.
+
+ This function sets up a complete configuration for pre-training, including
+ model, trainer, data, logging, optimization, and resumption settings.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the pre-training run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ fn (Callable): The pre-training function to use.
+
+ Returns:
+ run.Partial: Partial configuration for pre-training.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain --factory llama32_1b
+ $ nemo llm pretrain --factory "llama32_1b(num_nodes=1, name='my_1b_pretrain')"
+
+ Python API usage:
+ >>> recipe = pretrain_recipe(name="llama32_1b_pretrain", num_nodes=1)
+ >>> print(recipe)
+
+ Note:
+ This recipe is optimized for the large 8B model and requires significant computational resources.
+ """
+ recipe = run.Partial(
+ fn,
+ model=model(),
+ trainer=trainer(
+ num_nodes=num_nodes,
+ num_gpus_per_node=num_gpus_per_node,
+ callbacks=[run.Config(TimingCallback)],
+ ),
+ data=run.Config(MockDataModule, seq_length=8192, global_batch_size=512, micro_batch_size=1),
+ log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
+ optim=distributed_fused_adam_with_cosine_annealing(max_lr=3e-4),
+ resume=default_resume(),
+ )
+
+ return recipe
+
+
+@run.cli.factory(target=finetune, name=NAME)
+def finetune_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+ seq_length: Optional[int] = None,
+ packed_sequence: Optional[bool] = None,
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for Llama3.2 1B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ seq_length (int): Maximum number of tokens per microbatch.
+ packed_sequence (Optional[bool]): If true, fine-tuning sequences will be packed into batches up to the given
+ maximum seq_length for better efficiency.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory llama32_1b
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="llama32_1b_finetune", num_nodes=1)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+
+ # For unpacked sequence, most samples in SQuAD dataset are shorter than 2K
+ if seq_length is None:
+ seq_length = 4096 if packed_sequence else 2048
+
+ recipe = default_finetune_recipe(
+ model(), "meta-llama/Llama-3.2-1B", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 1
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.peft.dim = 8
+ recipe.peft.alpha = 16
+ recipe.optim.config.use_distributed_optimizer = False
+
+ # some settings currently do not function correctly with LoRA
+ recipe.model.config.cross_entropy_loss_fusion = False
+
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ # Sequence length settings in the model and dataset must agree
+ recipe.model.config.seq_length = seq_length
+ recipe.data.seq_length = seq_length
+ if packed_sequence:
+ recipe.data.dataset_kwargs = {'pad_to_max_length': True}
+ recipe.data.packed_sequence_specs = run.Config(PackedSequenceSpecs, packed_sequence_size=seq_length)
+
+ return recipe
diff --git a/nemo/collections/llm/recipes/llama32_3b.py b/nemo/collections/llm/recipes/llama32_3b.py
new file mode 100644
index 000000000000..d78ea0b50983
--- /dev/null
+++ b/nemo/collections/llm/recipes/llama32_3b.py
@@ -0,0 +1,270 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from typing import Callable, Optional
+
+import lightning.pytorch as pl
+import nemo_run as run
+import torch
+from lightning.pytorch.callbacks.callback import Callback
+from megatron.core.distributed import DistributedDataParallelConfig
+
+from nemo import lightning as nl
+from nemo.collections.llm.api import finetune, pretrain
+from nemo.collections.llm.gpt.data.mock import MockDataModule
+from nemo.collections.llm.gpt.data.packed_sequence import PackedSequenceSpecs
+from nemo.collections.llm.gpt.model.llama import Llama32Config3B, LlamaModel
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
+from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
+from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
+from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
+from nemo.utils.exp_manager import TimingCallback
+
+NAME = "llama32_3b"
+
+
+@run.cli.factory(name=NAME)
+def model() -> run.Config[pl.LightningModule]:
+ """
+ Factory function to create a Llama3.2 3B model configuration.
+
+ Returns:
+ run.Config[pl.LightningModule]: Configuration for the Llama3.2 3B model.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain model=llama32_3b ...
+
+ Python API usage:
+ >>> model_config = model()
+ >>> print(model_config)
+ """
+ conf = run.Config(Llama32Config3B)
+ conf.seq_length = 8192
+ return run.Config(LlamaModel, config=conf)
+
+
+def trainer(
+ tensor_parallelism: int = 1,
+ pipeline_parallelism: int = 1,
+ pipeline_parallelism_type: Optional[torch.dtype] = None,
+ virtual_pipeline_parallelism: Optional[int] = None,
+ context_parallelism: int = 1,
+ sequence_parallelism: bool = False,
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ max_steps: int = 1168251,
+ callbacks: Optional[list[run.Config[Callback]]] = None,
+) -> run.Config[nl.Trainer]:
+ """
+ Configure the NeMo Lightning Trainer for Llama3.2 3B model.
+
+ Args:
+ tensor_parallelism (int): Degree of tensor model parallelism.
+ pipeline_parallelism (int): Degree of pipeline model parallelism.
+ pipeline_parallelism_type (Optional[torch.dtype]): Data type for pipeline parallelism.
+ virtual_pipeline_parallelism (Optional[int]): Size of virtual pipeline parallelism.
+ context_parallelism (int): Degree of context parallelism.
+ sequence_parallelism (bool): Whether to use sequence parallelism.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ max_steps (int): Maximum number of training steps.
+ callbacks (Optional[list[run.Config[Callback]]]): List of callback configurations.
+
+ Returns:
+ run.Config[nl.Trainer]: Configuration for the NeMo Lightning Trainer.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain trainer=llama32_3b ...
+
+ Python API usage:
+ >>> trainer_config = trainer(num_nodes=1, num_gpus_per_node=1)
+ >>> print(trainer_config)
+
+ Note:
+ This configuration uses extensive parallelism to handle the large model size efficiently.
+ """
+ strategy = run.Config(
+ nl.MegatronStrategy,
+ tensor_model_parallel_size=tensor_parallelism,
+ pipeline_model_parallel_size=pipeline_parallelism,
+ pipeline_dtype=pipeline_parallelism_type,
+ virtual_pipeline_model_parallel_size=virtual_pipeline_parallelism,
+ context_parallel_size=context_parallelism,
+ sequence_parallel=sequence_parallelism,
+ gradient_as_bucket_view=True,
+ ckpt_async_save=True,
+ ckpt_parallel_load=True,
+ ddp=run.Config(
+ DistributedDataParallelConfig,
+ check_for_nan_in_grad=True,
+ grad_reduce_in_fp32=True,
+ overlap_grad_reduce=True,
+ overlap_param_gather=True,
+ average_in_collective=True,
+ ),
+ )
+
+ trainer = run.Config(
+ nl.Trainer,
+ accelerator="gpu",
+ accumulate_grad_batches=1,
+ callbacks=callbacks,
+ devices=num_gpus_per_node,
+ limit_test_batches=50,
+ limit_val_batches=32,
+ log_every_n_steps=10,
+ max_steps=max_steps,
+ num_nodes=num_nodes,
+ plugins=bf16_mixed(),
+ strategy=strategy,
+ use_distributed_sampler=False,
+ val_check_interval=2000,
+ )
+
+ return trainer
+
+
+@run.cli.factory(target=pretrain, name=NAME)
+def pretrain_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ fn: Callable = pretrain,
+) -> run.Partial:
+ """
+ Create a pre-training recipe for Llama3.2 3B model.
+
+ This function sets up a complete configuration for pre-training, including
+ model, trainer, data, logging, optimization, and resumption settings.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the pre-training run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ fn (Callable): The pre-training function to use.
+
+ Returns:
+ run.Partial: Partial configuration for pre-training.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain --factory llama32_3b
+ $ nemo llm pretrain --factory "llama32_3b(num_nodes=1, name='my_3b_pretrain')"
+
+ Python API usage:
+ >>> recipe = pretrain_recipe(name="llama32_3b_pretrain", num_nodes=1)
+ >>> print(recipe)
+
+ Note:
+ This recipe is optimized for the large 8B model and requires significant computational resources.
+ """
+ recipe = run.Partial(
+ fn,
+ model=model(),
+ trainer=trainer(
+ num_nodes=num_nodes,
+ num_gpus_per_node=num_gpus_per_node,
+ callbacks=[run.Config(TimingCallback)],
+ ),
+ data=run.Config(MockDataModule, seq_length=8192, global_batch_size=512, micro_batch_size=1),
+ log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
+ optim=distributed_fused_adam_with_cosine_annealing(max_lr=3e-4),
+ resume=default_resume(),
+ )
+
+ return recipe
+
+
+@run.cli.factory(target=finetune, name=NAME)
+def finetune_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+ seq_length: Optional[int] = None,
+ packed_sequence: Optional[bool] = None,
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for Llama3.2 3B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ seq_length (int): Maximum number of tokens per microbatch.
+ packed_sequence (Optional[bool]): If true, fine-tuning sequences will be packed into batches up to the given
+ maximum seq_length for better efficiency.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory llama32_3b
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="llama32_3b_finetune", num_nodes=1)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+
+ # For unpacked sequence, most samples in SQuAD dataset are shorter than 2K
+ if seq_length is None:
+ seq_length = 4096 if packed_sequence else 2048
+
+ recipe = default_finetune_recipe(
+ model(), "meta-llama/Llama-3.2-3B", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 1
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.peft.dim = 8
+ recipe.peft.alpha = 16
+ recipe.optim.config.use_distributed_optimizer = False
+
+ # some settings currently do not function correctly with LoRA
+ recipe.model.config.cross_entropy_loss_fusion = False
+
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ # Sequence length settings in the model and dataset must agree
+ recipe.model.config.seq_length = seq_length
+ recipe.data.seq_length = seq_length
+ if packed_sequence:
+ recipe.data.dataset_kwargs = {'pad_to_max_length': True}
+ recipe.data.packed_sequence_specs = run.Config(PackedSequenceSpecs, packed_sequence_size=seq_length)
+
+ return recipe
diff --git a/nemo/collections/llm/recipes/llama3_70b.py b/nemo/collections/llm/recipes/llama3_70b.py
index cb862bf50ee4..8b61bff80e01 100644
--- a/nemo/collections/llm/recipes/llama3_70b.py
+++ b/nemo/collections/llm/recipes/llama3_70b.py
@@ -15,18 +15,18 @@
from typing import Callable, Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
from nemo.collections.llm.gpt.data.packed_sequence import PackedSequenceSpecs
from nemo.collections.llm.gpt.model.llama import Llama3Config70B, LlamaModel
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -232,7 +232,7 @@ def pretrain_performance_optimizations(recipe: run.Partial) -> run.Partial:
tp_comm_overlap_cfg=userbuffers_bf16_h100_h8192_tp4_mbs1_seqlen8192,
defer_embedding_wgrad_compute=True,
wgrad_deferral_limit=22,
- overlap_param_gather_with_optimizer_step=True,
+ overlap_param_gather_with_optimizer_step=False, # Currently disabled due to an issue with checkpointing.
align_param_gather=True,
)
)
@@ -244,7 +244,7 @@ def pretrain_performance_optimizations(recipe: run.Partial) -> run.Partial:
def finetune_recipe(
dir: Optional[str] = None,
name: str = "default",
- num_nodes: int = 1,
+ num_nodes: int = None,
num_gpus_per_node: int = 8,
peft_scheme: Optional[str] = 'lora',
seq_length: Optional[int] = None,
@@ -263,9 +263,11 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
seq_length (int): Maximum number of tokens per microbatch.
- packed_sequence (Optional[bool]): If true, fine-tuning sequences will be packed into batches up to the given maximum seq_length for better efficiency. By default, this value equals performance_mode.
+ packed_sequence (Optional[bool]): If true, fine-tuning sequences will be packed into batches up to the given
+ maximum seq_length for better efficiency. By default, this value equals performance_mode.
performance_mode (bool): If true, enables optimizations for maximum performance.
Returns:
@@ -293,19 +295,23 @@ def finetune_recipe(
if seq_length is None:
seq_length = 4096 if packed_sequence else 2048
+ if num_nodes is None:
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ num_nodes = 4
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ num_nodes = 1
+
recipe = default_finetune_recipe(
model(), "meta-llama/Meta-Llama-3-70B", dir, name, num_nodes, num_gpus_per_node, packed_sequence
)
if peft_scheme is None or peft_scheme.lower() == 'none':
- assert num_nodes >= 4
recipe.trainer.strategy.tensor_model_parallel_size = 8
recipe.trainer.strategy.pipeline_model_parallel_size = 4
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.peft.dim = 16
recipe.peft.alpha = 32
- recipe.peft.target_modules = ['linear_qkv']
recipe.optim.config.use_distributed_optimizer = False
# some settings currently do not function correctly with LoRA
@@ -320,7 +326,7 @@ def finetune_recipe(
recipe.model.config.seq_length = seq_length
recipe.data.seq_length = seq_length
if packed_sequence:
- recipe.data.pad_to_max_length = True
+ recipe.data.dataset_kwargs = {'pad_to_max_length': True}
recipe.data.packed_sequence_specs = run.Config(PackedSequenceSpecs, packed_sequence_size=seq_length)
if performance_mode:
@@ -341,7 +347,8 @@ def finetune_performance_optimizations(
Args:
recipe (run.Partial): Base fine-tuning recipe to which performance optimizations will be added
- peft_scheme (str): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
Returns:
run.Partial: Partial configuration for performance-optimized fine-tuning.
@@ -379,6 +386,7 @@ def finetune_performance_optimizations(
recipe.trainer.strategy.tensor_model_parallel_size = 2
recipe.trainer.strategy.pipeline_model_parallel_size = 4
recipe.trainer.strategy.virtual_pipeline_model_parallel_size = 5
+ recipe.peft.target_modules = ['linear_qkv']
recipe.trainer.strategy.sequence_parallel = True
diff --git a/nemo/collections/llm/recipes/llama3_70b_16k.py b/nemo/collections/llm/recipes/llama3_70b_16k.py
index 928f961f7cf3..0a394d386afd 100644
--- a/nemo/collections/llm/recipes/llama3_70b_16k.py
+++ b/nemo/collections/llm/recipes/llama3_70b_16k.py
@@ -15,8 +15,8 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
diff --git a/nemo/collections/llm/recipes/llama3_70b_64k.py b/nemo/collections/llm/recipes/llama3_70b_64k.py
index ffadf5ca8084..e035424d3506 100644
--- a/nemo/collections/llm/recipes/llama3_70b_64k.py
+++ b/nemo/collections/llm/recipes/llama3_70b_64k.py
@@ -15,8 +15,8 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
diff --git a/nemo/collections/llm/recipes/llama3_8b.py b/nemo/collections/llm/recipes/llama3_8b.py
index 1030ad8799a1..36b20c12ddb2 100644
--- a/nemo/collections/llm/recipes/llama3_8b.py
+++ b/nemo/collections/llm/recipes/llama3_8b.py
@@ -15,19 +15,18 @@
from typing import Callable, Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
from nemo.collections.llm.gpt.data.packed_sequence import PackedSequenceSpecs
-from nemo.collections.llm.gpt.data.squad import SquadDataModule
from nemo.collections.llm.gpt.model.llama import Llama3Config8B, LlamaModel
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -251,9 +250,11 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
seq_length (int): Maximum number of tokens per microbatch.
- packed_sequence (Optional[bool]): If true, fine-tuning sequences will be packed into batches up to the given maximum seq_length for better efficiency. By default, this value equals performance_mode.
+ packed_sequence (Optional[bool]): If true, fine-tuning sequences will be packed into batches up to the given
+ maximum seq_length for better efficiency. By default, this value equals performance_mode.
performance_mode (bool): If true, enables optimizations for maximum performance.
Returns:
@@ -287,11 +288,10 @@ def finetune_recipe(
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.trainer.strategy.tensor_model_parallel_size = 2
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.peft.dim = 8
recipe.peft.alpha = 16
- recipe.peft.target_modules = ['linear_qkv']
recipe.optim.config.use_distributed_optimizer = False
# some settings currently do not function correctly with LoRA
@@ -305,7 +305,7 @@ def finetune_recipe(
recipe.model.config.seq_length = seq_length
recipe.data.seq_length = seq_length
if packed_sequence:
- recipe.data.pad_to_max_length = True
+ recipe.data.dataset_kwargs = {'pad_to_max_length': True}
recipe.data.packed_sequence_specs = run.Config(PackedSequenceSpecs, packed_sequence_size=seq_length)
if performance_mode:
@@ -326,7 +326,8 @@ def finetune_performance_optimizations(
Args:
recipe (run.Partial): Base fine-tuning recipe to which performance optimizations will be added
- peft_scheme (str): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
Returns:
run.Partial: Partial configuration for performance-optimized fine-tuning.
@@ -356,6 +357,8 @@ def finetune_performance_optimizations(
tp_comm_overlap=False,
)
)
+ else:
+ recipe.peft.target_modules = ['linear_qkv']
recipe.trainer.callbacks.append(run.Config(TimingCallback))
recipe.trainer.callbacks.append(
diff --git a/nemo/collections/llm/recipes/llama3_8b_16k.py b/nemo/collections/llm/recipes/llama3_8b_16k.py
index d6c1677a3b4b..b81d01c6ec9a 100644
--- a/nemo/collections/llm/recipes/llama3_8b_16k.py
+++ b/nemo/collections/llm/recipes/llama3_8b_16k.py
@@ -15,8 +15,8 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
diff --git a/nemo/collections/llm/recipes/llama3_8b_64k.py b/nemo/collections/llm/recipes/llama3_8b_64k.py
index 692347ea8dd0..ff176fb372bb 100644
--- a/nemo/collections/llm/recipes/llama3_8b_64k.py
+++ b/nemo/collections/llm/recipes/llama3_8b_64k.py
@@ -15,8 +15,8 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
diff --git a/nemo/collections/llm/recipes/log/default.py b/nemo/collections/llm/recipes/log/default.py
index d83580a1a543..023e4e459d5f 100644
--- a/nemo/collections/llm/recipes/log/default.py
+++ b/nemo/collections/llm/recipes/log/default.py
@@ -16,8 +16,8 @@
from datetime import timedelta
from typing import Optional
+from lightning.pytorch.loggers import TensorBoardLogger, WandbLogger
from nemo_run import Config, cli
-from pytorch_lightning.loggers import TensorBoardLogger, WandbLogger
from nemo import lightning as nl
diff --git a/nemo/collections/llm/recipes/mamba2_130m.py b/nemo/collections/llm/recipes/mamba2_130m.py
index 08640604a112..e70fec03b3fb 100644
--- a/nemo/collections/llm/recipes/mamba2_130m.py
+++ b/nemo/collections/llm/recipes/mamba2_130m.py
@@ -15,11 +15,11 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections import llm
@@ -67,6 +67,7 @@ def model(tokenizer_model: str = None) -> run.Config[pl.LightningModule]:
)
+@run.cli.factory(target=finetune, name=NAME)
def trainer(
tensor_parallelism: int = 1,
pipeline_parallelism: int = 1,
@@ -76,7 +77,11 @@ def trainer(
sequence_parallelism: bool = False,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
- max_steps: int = 1168251,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
callbacks: Optional[list[run.Config[Callback]]] = None,
) -> run.Config[nl.Trainer]:
"""
@@ -137,15 +142,15 @@ def trainer(
accumulate_grad_batches=1,
callbacks=callbacks,
devices=num_gpus_per_node,
- limit_test_batches=50,
- limit_val_batches=32,
- log_every_n_steps=10,
max_steps=max_steps,
num_nodes=num_nodes,
plugins=bf16_mixed(),
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=2000,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
)
return trainer
@@ -158,6 +163,16 @@ def pretrain_recipe(
tokenizer_model: str = None,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
+ tensor_parallelism: int = 1,
+ pipeline_parallelism: int = 1,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
+ seq_length: int = 4096,
+ gbs: int = 8,
+ mbs: int = 1,
fn=pretrain,
) -> run.Partial:
"""
@@ -193,16 +208,23 @@ def pretrain_recipe(
fn,
model=model(),
trainer=trainer(
+ max_steps=max_steps,
num_nodes=num_nodes,
+ tensor_parallelism=tensor_parallelism,
+ pipeline_parallelism=pipeline_parallelism,
num_gpus_per_node=num_gpus_per_node,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
callbacks=[run.Config(TimingCallback)],
),
data=run.Config(
MockDataModule,
- seq_length=4096,
- global_batch_size=8,
- micro_batch_size=1,
- tokenizer=tokenizer(tokenizer_model=tokenizer_model),
+ seq_length=seq_length,
+ global_batch_size=gbs,
+ micro_batch_size=mbs,
+ tokenizer=tokenizer(),
),
log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
optim=distributed_fused_adam_with_cosine_annealing(max_lr=3e-4),
@@ -218,6 +240,14 @@ def finetune_recipe(
tokenizer_model: str = None,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
+ tensor_model_parallel_size: int = 1,
+ pipeline_model_parallel_size: int = 1,
+ seq_length: int = 4096,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
gbs: int = 8,
mbs: int = 1,
peft_scheme: Optional[str] = 'none',
@@ -266,8 +296,8 @@ def finetune_recipe(
)
strategy = run.Config(
nl.MegatronStrategy,
- tensor_model_parallel_size=1,
- pipeline_model_parallel_size=1,
+ tensor_model_parallel_size=tensor_model_parallel_size,
+ pipeline_model_parallel_size=pipeline_model_parallel_size,
gradient_as_bucket_view=True,
ckpt_load_optimizer=False,
ckpt_save_optimizer=False,
@@ -283,10 +313,11 @@ def finetune_recipe(
accelerator="gpu",
accumulate_grad_batches=1,
devices=num_gpus_per_node,
- limit_test_batches=10,
- limit_val_batches=10,
- log_every_n_steps=20,
- max_steps=100,
+ max_steps=max_steps,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
num_nodes=num_nodes,
plugins=run.Config(
nl.MegatronMixedPrecision,
@@ -296,7 +327,6 @@ def finetune_recipe(
callbacks=[checkpoint_callback],
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=20,
)
recipe = run.Partial(
llm.finetune,
@@ -304,7 +334,7 @@ def finetune_recipe(
trainer=trainer,
data=run.Config(
llm.SquadDataModule,
- seq_length=2048,
+ seq_length=seq_length,
global_batch_size=gbs,
micro_batch_size=mbs,
tokenizer=tokenizer(tokenizer_model=tokenizer_model),
diff --git a/nemo/collections/llm/recipes/mamba2_1_3b.py b/nemo/collections/llm/recipes/mamba2_1_3b.py
index 58eaf049b059..aaa263078686 100644
--- a/nemo/collections/llm/recipes/mamba2_1_3b.py
+++ b/nemo/collections/llm/recipes/mamba2_1_3b.py
@@ -15,11 +15,11 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections import llm
@@ -67,6 +67,7 @@ def model(tokenizer_model: str = None) -> run.Config[pl.LightningModule]:
)
+@run.cli.factory(target=finetune, name=NAME)
def trainer(
tensor_parallelism: int = 1,
pipeline_parallelism: int = 1,
@@ -76,7 +77,11 @@ def trainer(
sequence_parallelism: bool = False,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
- max_steps: int = 1168251,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
callbacks: Optional[list[run.Config[Callback]]] = None,
) -> run.Config[nl.Trainer]:
"""
@@ -137,15 +142,15 @@ def trainer(
accumulate_grad_batches=1,
callbacks=callbacks,
devices=num_gpus_per_node,
- limit_test_batches=50,
- limit_val_batches=32,
- log_every_n_steps=10,
max_steps=max_steps,
num_nodes=num_nodes,
plugins=bf16_mixed(),
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=2000,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
)
return trainer
@@ -157,7 +162,17 @@ def pretrain_recipe(
name: str = "default",
tokenizer_model: str = None,
num_nodes: int = 1,
- num_gpus_per_node: int = 8,
+ num_gpus_per_node: int = 1,
+ tensor_parallelism: int = 1,
+ pipeline_parallelism: int = 1,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
+ seq_length: int = 4096,
+ gbs: int = 8,
+ mbs: int = 1,
fn=pretrain,
) -> run.Partial:
"""
@@ -191,17 +206,24 @@ def pretrain_recipe(
"""
return run.Partial(
fn,
- model=model(),
+ model=model(tokenizer_model=tokenizer_model),
trainer=trainer(
+ max_steps=max_steps,
num_nodes=num_nodes,
+ tensor_parallelism=tensor_parallelism,
+ pipeline_parallelism=pipeline_parallelism,
num_gpus_per_node=num_gpus_per_node,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
callbacks=[run.Config(TimingCallback)],
),
data=run.Config(
MockDataModule,
- seq_length=4096,
- global_batch_size=8,
- micro_batch_size=1,
+ seq_length=seq_length,
+ global_batch_size=gbs,
+ micro_batch_size=mbs,
tokenizer=tokenizer(tokenizer_model=tokenizer_model),
),
log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
@@ -217,7 +239,15 @@ def finetune_recipe(
resume_path: str = None,
tokenizer_model: str = None,
num_nodes: int = 1,
- num_gpus_per_node: int = 8,
+ num_gpus_per_node: int = 1,
+ tensor_model_parallel_size: int = 1,
+ pipeline_model_parallel_size: int = 1,
+ seq_length: int = 4096,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
gbs: int = 8,
mbs: int = 1,
peft_scheme: Optional[str] = 'none',
@@ -266,8 +296,8 @@ def finetune_recipe(
)
strategy = run.Config(
nl.MegatronStrategy,
- tensor_model_parallel_size=1,
- pipeline_model_parallel_size=1,
+ tensor_model_parallel_size=tensor_model_parallel_size,
+ pipeline_model_parallel_size=pipeline_model_parallel_size,
gradient_as_bucket_view=True,
ckpt_load_optimizer=False,
ckpt_save_optimizer=False,
@@ -283,10 +313,11 @@ def finetune_recipe(
accelerator="gpu",
accumulate_grad_batches=1,
devices=num_gpus_per_node,
- limit_test_batches=10,
- limit_val_batches=10,
- log_every_n_steps=20,
- max_steps=100,
+ max_steps=max_steps,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
num_nodes=num_nodes,
plugins=run.Config(
nl.MegatronMixedPrecision,
@@ -296,7 +327,6 @@ def finetune_recipe(
callbacks=[checkpoint_callback],
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=20,
)
recipe = run.Partial(
llm.finetune,
@@ -304,7 +334,7 @@ def finetune_recipe(
trainer=trainer,
data=run.Config(
llm.SquadDataModule,
- seq_length=2048,
+ seq_length=seq_length,
global_batch_size=gbs,
micro_batch_size=mbs,
tokenizer=tokenizer(tokenizer_model=tokenizer_model),
diff --git a/nemo/collections/llm/recipes/mamba2_2_7b.py b/nemo/collections/llm/recipes/mamba2_2_7b.py
index 5cb37c6a02a5..b4fd5b487b6a 100644
--- a/nemo/collections/llm/recipes/mamba2_2_7b.py
+++ b/nemo/collections/llm/recipes/mamba2_2_7b.py
@@ -15,11 +15,11 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections import llm
@@ -67,6 +67,7 @@ def model(tokenizer_model: str = None) -> run.Config[pl.LightningModule]:
)
+@run.cli.factory(target=finetune, name=NAME)
def trainer(
tensor_parallelism: int = 1,
pipeline_parallelism: int = 1,
@@ -76,7 +77,11 @@ def trainer(
sequence_parallelism: bool = False,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
- max_steps: int = 1168251,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
callbacks: Optional[list[run.Config[Callback]]] = None,
) -> run.Config[nl.Trainer]:
"""
@@ -137,15 +142,15 @@ def trainer(
accumulate_grad_batches=1,
callbacks=callbacks,
devices=num_gpus_per_node,
- limit_test_batches=50,
- limit_val_batches=32,
- log_every_n_steps=10,
max_steps=max_steps,
num_nodes=num_nodes,
plugins=bf16_mixed(),
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=2000,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
)
return trainer
@@ -158,6 +163,16 @@ def pretrain_recipe(
tokenizer_model: str = None,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
+ tensor_parallelism: int = 1,
+ pipeline_parallelism: int = 1,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
+ seq_length: int = 4096,
+ gbs: int = 8,
+ mbs: int = 1,
fn=pretrain,
) -> run.Partial:
"""
@@ -193,16 +208,23 @@ def pretrain_recipe(
fn,
model=model(),
trainer=trainer(
+ max_steps=max_steps,
num_nodes=num_nodes,
+ tensor_parallelism=tensor_parallelism,
+ pipeline_parallelism=pipeline_parallelism,
num_gpus_per_node=num_gpus_per_node,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
callbacks=[run.Config(TimingCallback)],
),
data=run.Config(
MockDataModule,
- seq_length=4096,
- global_batch_size=8,
- micro_batch_size=1,
- tokenizer=tokenizer(tokenizer_model=tokenizer_model),
+ seq_length=seq_length,
+ global_batch_size=gbs,
+ micro_batch_size=mbs,
+ tokenizer=tokenizer(),
),
log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
optim=distributed_fused_adam_with_cosine_annealing(max_lr=3e-4),
@@ -218,6 +240,14 @@ def finetune_recipe(
tokenizer_model: str = None,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
+ tensor_model_parallel_size: int = 1,
+ pipeline_model_parallel_size: int = 1,
+ seq_length: int = 4096,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
gbs: int = 8,
mbs: int = 1,
peft_scheme: Optional[str] = 'none',
@@ -266,8 +296,8 @@ def finetune_recipe(
)
strategy = run.Config(
nl.MegatronStrategy,
- tensor_model_parallel_size=1,
- pipeline_model_parallel_size=1,
+ tensor_model_parallel_size=tensor_model_parallel_size,
+ pipeline_model_parallel_size=pipeline_model_parallel_size,
gradient_as_bucket_view=True,
ckpt_load_optimizer=False,
ckpt_save_optimizer=False,
@@ -283,10 +313,11 @@ def finetune_recipe(
accelerator="gpu",
accumulate_grad_batches=1,
devices=num_gpus_per_node,
- limit_test_batches=10,
- limit_val_batches=10,
- log_every_n_steps=20,
- max_steps=100,
+ max_steps=max_steps,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
num_nodes=num_nodes,
plugins=run.Config(
nl.MegatronMixedPrecision,
@@ -296,7 +327,6 @@ def finetune_recipe(
callbacks=[checkpoint_callback],
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=20,
)
recipe = run.Partial(
llm.finetune,
@@ -304,7 +334,7 @@ def finetune_recipe(
trainer=trainer,
data=run.Config(
llm.SquadDataModule,
- seq_length=2048,
+ seq_length=seq_length,
global_batch_size=gbs,
micro_batch_size=mbs,
tokenizer=tokenizer(tokenizer_model=tokenizer_model),
diff --git a/nemo/collections/llm/recipes/mamba2_370m.py b/nemo/collections/llm/recipes/mamba2_370m.py
index bb8bddc4045a..6fa619b33486 100644
--- a/nemo/collections/llm/recipes/mamba2_370m.py
+++ b/nemo/collections/llm/recipes/mamba2_370m.py
@@ -15,11 +15,11 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections import llm
@@ -67,6 +67,7 @@ def model(tokenizer_model: str = None) -> run.Config[pl.LightningModule]:
)
+@run.cli.factory(target=finetune, name=NAME)
def trainer(
tensor_parallelism: int = 1,
pipeline_parallelism: int = 1,
@@ -76,7 +77,11 @@ def trainer(
sequence_parallelism: bool = False,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
- max_steps: int = 1168251,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
callbacks: Optional[list[run.Config[Callback]]] = None,
) -> run.Config[nl.Trainer]:
"""
@@ -137,15 +142,15 @@ def trainer(
accumulate_grad_batches=1,
callbacks=callbacks,
devices=num_gpus_per_node,
- limit_test_batches=50,
- limit_val_batches=32,
- log_every_n_steps=10,
max_steps=max_steps,
num_nodes=num_nodes,
plugins=bf16_mixed(),
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=2000,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
)
return trainer
@@ -158,6 +163,16 @@ def pretrain_recipe(
tokenizer_model: str = None,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
+ tensor_parallelism: int = 1,
+ pipeline_parallelism: int = 1,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
+ seq_length: int = 4096,
+ gbs: int = 8,
+ mbs: int = 1,
fn=pretrain,
) -> run.Partial:
"""
@@ -193,16 +208,23 @@ def pretrain_recipe(
fn,
model=model(),
trainer=trainer(
+ max_steps=max_steps,
num_nodes=num_nodes,
+ tensor_parallelism=tensor_parallelism,
+ pipeline_parallelism=pipeline_parallelism,
num_gpus_per_node=num_gpus_per_node,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
callbacks=[run.Config(TimingCallback)],
),
data=run.Config(
MockDataModule,
- seq_length=4096,
- global_batch_size=8,
- micro_batch_size=1,
- tokenizer=tokenizer(tokenizer_model=tokenizer_model),
+ seq_length=seq_length,
+ global_batch_size=gbs,
+ micro_batch_size=mbs,
+ tokenizer=tokenizer(),
),
log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
optim=distributed_fused_adam_with_cosine_annealing(max_lr=3e-4),
@@ -218,6 +240,14 @@ def finetune_recipe(
tokenizer_model: str = None,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
+ tensor_model_parallel_size: int = 1,
+ pipeline_model_parallel_size: int = 1,
+ seq_length: int = 4096,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
gbs: int = 8,
mbs: int = 1,
peft_scheme: Optional[str] = 'none',
@@ -266,8 +296,8 @@ def finetune_recipe(
)
strategy = run.Config(
nl.MegatronStrategy,
- tensor_model_parallel_size=1,
- pipeline_model_parallel_size=1,
+ tensor_model_parallel_size=tensor_model_parallel_size,
+ pipeline_model_parallel_size=pipeline_model_parallel_size,
gradient_as_bucket_view=True,
ckpt_load_optimizer=False,
ckpt_save_optimizer=False,
@@ -283,10 +313,11 @@ def finetune_recipe(
accelerator="gpu",
accumulate_grad_batches=1,
devices=num_gpus_per_node,
- limit_test_batches=10,
- limit_val_batches=10,
- log_every_n_steps=20,
- max_steps=100,
+ max_steps=max_steps,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
num_nodes=num_nodes,
plugins=run.Config(
nl.MegatronMixedPrecision,
@@ -296,7 +327,6 @@ def finetune_recipe(
callbacks=[checkpoint_callback],
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=20,
)
recipe = run.Partial(
llm.finetune,
@@ -304,7 +334,7 @@ def finetune_recipe(
trainer=trainer,
data=run.Config(
llm.SquadDataModule,
- seq_length=2048,
+ seq_length=seq_length,
global_batch_size=gbs,
micro_batch_size=mbs,
tokenizer=tokenizer(tokenizer_model=tokenizer_model),
diff --git a/nemo/collections/llm/recipes/mamba2_780m.py b/nemo/collections/llm/recipes/mamba2_780m.py
index 2f6ab6717ae1..45d28f82f779 100644
--- a/nemo/collections/llm/recipes/mamba2_780m.py
+++ b/nemo/collections/llm/recipes/mamba2_780m.py
@@ -15,11 +15,11 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections import llm
@@ -67,6 +67,7 @@ def model(tokenizer_model: str = None) -> run.Config[pl.LightningModule]:
)
+@run.cli.factory(target=finetune, name=NAME)
def trainer(
tensor_parallelism: int = 1,
pipeline_parallelism: int = 1,
@@ -76,7 +77,11 @@ def trainer(
sequence_parallelism: bool = False,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
- max_steps: int = 1168251,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
callbacks: Optional[list[run.Config[Callback]]] = None,
) -> run.Config[nl.Trainer]:
"""
@@ -137,15 +142,15 @@ def trainer(
accumulate_grad_batches=1,
callbacks=callbacks,
devices=num_gpus_per_node,
- limit_test_batches=50,
- limit_val_batches=32,
- log_every_n_steps=10,
max_steps=max_steps,
num_nodes=num_nodes,
plugins=bf16_mixed(),
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=2000,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
)
return trainer
@@ -158,6 +163,16 @@ def pretrain_recipe(
tokenizer_model: str = None,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
+ tensor_parallelism: int = 1,
+ pipeline_parallelism: int = 1,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
+ seq_length: int = 4096,
+ gbs: int = 8,
+ mbs: int = 1,
fn=pretrain,
) -> run.Partial:
"""
@@ -193,16 +208,23 @@ def pretrain_recipe(
fn,
model=model(),
trainer=trainer(
+ max_steps=max_steps,
num_nodes=num_nodes,
+ tensor_parallelism=tensor_parallelism,
+ pipeline_parallelism=pipeline_parallelism,
num_gpus_per_node=num_gpus_per_node,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
callbacks=[run.Config(TimingCallback)],
),
data=run.Config(
MockDataModule,
- seq_length=4096,
- global_batch_size=8,
- micro_batch_size=1,
- tokenizer=tokenizer(tokenizer_model=tokenizer_model),
+ seq_length=seq_length,
+ global_batch_size=gbs,
+ micro_batch_size=mbs,
+ tokenizer=tokenizer(),
),
log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
optim=distributed_fused_adam_with_cosine_annealing(max_lr=3e-4),
@@ -218,6 +240,14 @@ def finetune_recipe(
tokenizer_model: str = None,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
+ tensor_model_parallel_size: int = 1,
+ pipeline_model_parallel_size: int = 1,
+ seq_length: int = 4096,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
gbs: int = 8,
mbs: int = 1,
peft_scheme: Optional[str] = 'none',
@@ -266,8 +296,8 @@ def finetune_recipe(
)
strategy = run.Config(
nl.MegatronStrategy,
- tensor_model_parallel_size=1,
- pipeline_model_parallel_size=1,
+ tensor_model_parallel_size=tensor_model_parallel_size,
+ pipeline_model_parallel_size=pipeline_model_parallel_size,
gradient_as_bucket_view=True,
ckpt_load_optimizer=False,
ckpt_save_optimizer=False,
@@ -283,10 +313,11 @@ def finetune_recipe(
accelerator="gpu",
accumulate_grad_batches=1,
devices=num_gpus_per_node,
- limit_test_batches=10,
- limit_val_batches=10,
- log_every_n_steps=20,
- max_steps=100,
+ max_steps=max_steps,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
num_nodes=num_nodes,
plugins=run.Config(
nl.MegatronMixedPrecision,
@@ -296,7 +327,6 @@ def finetune_recipe(
callbacks=[checkpoint_callback],
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=20,
)
recipe = run.Partial(
llm.finetune,
@@ -304,7 +334,7 @@ def finetune_recipe(
trainer=trainer,
data=run.Config(
llm.SquadDataModule,
- seq_length=2048,
+ seq_length=seq_length,
global_batch_size=gbs,
micro_batch_size=mbs,
tokenizer=tokenizer(tokenizer_model=tokenizer_model),
diff --git a/nemo/collections/llm/recipes/mamba2_8b.py b/nemo/collections/llm/recipes/mamba2_8b.py
index 58883deba732..8f8384b45059 100644
--- a/nemo/collections/llm/recipes/mamba2_8b.py
+++ b/nemo/collections/llm/recipes/mamba2_8b.py
@@ -15,11 +15,11 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections import llm
@@ -67,6 +67,7 @@ def model(tokenizer_model: str = None) -> run.Config[pl.LightningModule]:
)
+@run.cli.factory(name=NAME)
def trainer(
tensor_parallelism: int = 8,
pipeline_parallelism: int = 1,
@@ -76,7 +77,11 @@ def trainer(
sequence_parallelism: bool = False,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
- max_steps: int = 1168251,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
callbacks: Optional[list[run.Config[Callback]]] = None,
) -> run.Config[nl.Trainer]:
"""
@@ -137,15 +142,15 @@ def trainer(
accumulate_grad_batches=1,
callbacks=callbacks,
devices=num_gpus_per_node,
- limit_test_batches=50,
- limit_val_batches=32,
- log_every_n_steps=10,
max_steps=max_steps,
num_nodes=num_nodes,
plugins=bf16_mixed(),
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=2000,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
)
return trainer
@@ -158,6 +163,16 @@ def pretrain_recipe(
tokenizer_model: str = None,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
+ tensor_parallelism: int = 8,
+ pipeline_parallelism: int = 1,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
+ seq_length: int = 4096,
+ gbs: int = 8,
+ mbs: int = 1,
fn=pretrain,
) -> run.Partial:
"""
@@ -191,17 +206,24 @@ def pretrain_recipe(
"""
return run.Partial(
fn,
- model=model(),
+ model=model(tokenizer_model=tokenizer_model),
trainer=trainer(
+ max_steps=max_steps,
num_nodes=num_nodes,
+ tensor_parallelism=tensor_parallelism,
+ pipeline_parallelism=pipeline_parallelism,
num_gpus_per_node=num_gpus_per_node,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
callbacks=[run.Config(TimingCallback)],
),
data=run.Config(
MockDataModule,
- seq_length=4096,
- global_batch_size=8,
- micro_batch_size=1,
+ seq_length=seq_length,
+ global_batch_size=gbs,
+ micro_batch_size=mbs,
tokenizer=tokenizer(tokenizer_model=tokenizer_model),
),
log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
@@ -218,6 +240,14 @@ def finetune_recipe(
name: str = "default",
num_nodes: int = 1,
num_gpus_per_node: int = 8,
+ tensor_model_parallel_size: int = 8,
+ pipeline_model_parallel_size: int = 1,
+ seq_length: int = 4096,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
gbs: int = 8,
mbs: int = 1,
peft_scheme: Optional[str] = 'none',
@@ -266,8 +296,8 @@ def finetune_recipe(
)
strategy = run.Config(
nl.MegatronStrategy,
- tensor_model_parallel_size=8,
- pipeline_model_parallel_size=1,
+ tensor_model_parallel_size=tensor_model_parallel_size,
+ pipeline_model_parallel_size=pipeline_model_parallel_size,
gradient_as_bucket_view=True,
ckpt_load_optimizer=False,
ckpt_save_optimizer=False,
@@ -283,10 +313,11 @@ def finetune_recipe(
accelerator="gpu",
accumulate_grad_batches=1,
devices=num_gpus_per_node,
- limit_test_batches=10,
- limit_val_batches=10,
- log_every_n_steps=20,
- max_steps=100,
+ max_steps=max_steps,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
num_nodes=num_nodes,
plugins=run.Config(
nl.MegatronMixedPrecision,
@@ -296,7 +327,6 @@ def finetune_recipe(
callbacks=[checkpoint_callback],
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=20,
)
recipe = run.Partial(
llm.finetune,
@@ -304,7 +334,7 @@ def finetune_recipe(
trainer=trainer,
data=run.Config(
llm.SquadDataModule,
- seq_length=2048,
+ seq_length=seq_length,
global_batch_size=gbs,
micro_batch_size=mbs,
tokenizer=tokenizer(tokenizer_model=tokenizer_model),
diff --git a/nemo/collections/llm/recipes/mamba2_hybrid_8b.py b/nemo/collections/llm/recipes/mamba2_hybrid_8b.py
index eff37da46fca..b91c8e228bc9 100644
--- a/nemo/collections/llm/recipes/mamba2_hybrid_8b.py
+++ b/nemo/collections/llm/recipes/mamba2_hybrid_8b.py
@@ -15,11 +15,11 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections import llm
@@ -39,7 +39,7 @@ def tokenizer(tokenizer_model: str = None) -> run.Config[pl.LightningModule]:
return run.Config(
get_nmt_tokenizer,
- library='megatronNVIDIAMambaConfig8B',
+ library='megatron',
model_name="GPTSentencePieceTokenizer",
tokenizer_model=tokenizer_model,
use_fast=True,
@@ -69,6 +69,7 @@ def model(tokenizer_model: str = None) -> run.Config[pl.LightningModule]:
)
+@run.cli.factory(target=finetune, name=NAME)
def trainer(
tensor_parallelism: int = 8,
pipeline_parallelism: int = 1,
@@ -78,7 +79,11 @@ def trainer(
sequence_parallelism: bool = False,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
- max_steps: int = 1168251,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
callbacks: Optional[list[run.Config[Callback]]] = None,
) -> run.Config[nl.Trainer]:
"""
@@ -139,15 +144,15 @@ def trainer(
accumulate_grad_batches=1,
callbacks=callbacks,
devices=num_gpus_per_node,
- limit_test_batches=50,
- limit_val_batches=32,
- log_every_n_steps=10,
max_steps=max_steps,
num_nodes=num_nodes,
plugins=bf16_mixed(),
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=2000,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
)
return trainer
@@ -160,6 +165,16 @@ def pretrain_recipe(
tokenizer_model: str = None,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
+ tensor_parallelism: int = 8,
+ pipeline_parallelism: int = 1,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
+ seq_length: int = 4096,
+ gbs: int = 8,
+ mbs: int = 1,
fn=pretrain,
) -> run.Partial:
"""
@@ -193,17 +208,24 @@ def pretrain_recipe(
"""
return run.Partial(
fn,
- model=model(),
+ model=model(tokenizer_model=tokenizer_model),
trainer=trainer(
+ max_steps=max_steps,
num_nodes=num_nodes,
+ tensor_parallelism=tensor_parallelism,
+ pipeline_parallelism=pipeline_parallelism,
num_gpus_per_node=num_gpus_per_node,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
callbacks=[run.Config(TimingCallback)],
),
data=run.Config(
MockDataModule,
- seq_length=4096,
- global_batch_size=8,
- micro_batch_size=1,
+ seq_length=seq_length,
+ global_batch_size=gbs,
+ micro_batch_size=mbs,
tokenizer=tokenizer(tokenizer_model=tokenizer_model),
),
log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
@@ -220,6 +242,14 @@ def finetune_recipe(
name: str = "default",
num_nodes: int = 1,
num_gpus_per_node: int = 8,
+ tensor_model_parallel_size: int = 8,
+ pipeline_model_parallel_size: int = 1,
+ seq_length: int = 4096,
+ max_steps: int = 100,
+ val_check_interval: int = 100,
+ limit_test_batches: int = 50,
+ limit_val_batches: int = 32,
+ log_every_n_steps: int = 10,
gbs: int = 8,
mbs: int = 1,
peft_scheme: Optional[str] = 'none',
@@ -268,8 +298,8 @@ def finetune_recipe(
)
strategy = run.Config(
nl.MegatronStrategy,
- tensor_model_parallel_size=8,
- pipeline_model_parallel_size=1,
+ tensor_model_parallel_size=tensor_model_parallel_size,
+ pipeline_model_parallel_size=pipeline_model_parallel_size,
gradient_as_bucket_view=True,
ckpt_load_optimizer=False,
ckpt_save_optimizer=False,
@@ -285,10 +315,11 @@ def finetune_recipe(
accelerator="gpu",
accumulate_grad_batches=1,
devices=num_gpus_per_node,
- limit_test_batches=10,
- limit_val_batches=10,
- log_every_n_steps=20,
- max_steps=100,
+ max_steps=max_steps,
+ val_check_interval=val_check_interval,
+ limit_test_batches=limit_test_batches,
+ limit_val_batches=limit_val_batches,
+ log_every_n_steps=log_every_n_steps,
num_nodes=num_nodes,
plugins=run.Config(
nl.MegatronMixedPrecision,
@@ -298,7 +329,6 @@ def finetune_recipe(
callbacks=[checkpoint_callback],
strategy=strategy,
use_distributed_sampler=False,
- val_check_interval=20,
)
recipe = run.Partial(
llm.finetune,
@@ -306,7 +336,7 @@ def finetune_recipe(
trainer=trainer,
data=run.Config(
llm.SquadDataModule,
- seq_length=2048,
+ seq_length=seq_length,
global_batch_size=gbs,
micro_batch_size=mbs,
tokenizer=tokenizer(tokenizer_model=tokenizer_model),
diff --git a/nemo/collections/llm/recipes/mistral_7b.py b/nemo/collections/llm/recipes/mistral_7b.py
index dfcc671cc61f..9e2d2e256fbe 100644
--- a/nemo/collections/llm/recipes/mistral_7b.py
+++ b/nemo/collections/llm/recipes/mistral_7b.py
@@ -15,18 +15,17 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.gpt.data.squad import SquadDataModule
from nemo.collections.llm.gpt.model.mistral import MistralConfig7B, MistralModel
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -207,7 +206,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -236,8 +238,8 @@ def finetune_recipe(
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.trainer.strategy.tensor_model_parallel_size = 2
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/mistral_nemo_12b.py b/nemo/collections/llm/recipes/mistral_nemo_12b.py
index 3f1878edb59d..a10f8ae804b8 100644
--- a/nemo/collections/llm/recipes/mistral_nemo_12b.py
+++ b/nemo/collections/llm/recipes/mistral_nemo_12b.py
@@ -15,18 +15,17 @@
from typing import Callable, Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.gpt.data.squad import SquadDataModule
from nemo.collections.llm.gpt.model.mistral import MistralModel, MistralNeMoConfig12B
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -255,7 +254,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -284,8 +286,10 @@ def finetune_recipe(
)
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA, target_modules=['linear_qkv', 'linear_proj'], dim=32)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(
+ PEFT_STR2CLS[peft_scheme.lower()], target_modules=['linear_qkv', 'linear_proj'], dim=32
+ )
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/mixtral_8x22b.py b/nemo/collections/llm/recipes/mixtral_8x22b.py
index e9e6a27c24b7..ec1641a08d80 100644
--- a/nemo/collections/llm/recipes/mixtral_8x22b.py
+++ b/nemo/collections/llm/recipes/mixtral_8x22b.py
@@ -15,18 +15,17 @@
from typing import Callable, Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.gpt.data.squad import SquadDataModule
from nemo.collections.llm.gpt.model.mixtral import MixtralConfig8x22B, MixtralModel
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -226,7 +225,9 @@ def pretrain_performance_optimizations(recipe: run.Partial) -> run.Partial:
MegatronTokenDropCallback,
),
run.Config(
- MegatronCommOverlapCallback, overlap_param_gather_with_optimizer_step=True, align_param_gather=True
+ MegatronCommOverlapCallback,
+ overlap_param_gather_with_optimizer_step=False, # Currently disabled due to issue with checkpointing
+ align_param_gather=True,
),
]
)
@@ -257,7 +258,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): If true, fine-tuning sequences will be packed into batches up to the given
+ maximum seq_length for better efficiency.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -283,8 +287,10 @@ def finetune_recipe(
recipe.trainer.strategy.pipeline_model_parallel_size = 4
recipe.trainer.strategy.virtual_pipeline_model_parallel_size = 14
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA, target_modules=['linear_qkv', 'linear_proj'], dim=32)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(
+ PEFT_STR2CLS[peft_scheme.lower()], target_modules=['linear_qkv', 'linear_proj'], dim=32
+ )
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/mixtral_8x7b.py b/nemo/collections/llm/recipes/mixtral_8x7b.py
index 9b41e03882a1..d06e22fc2180 100644
--- a/nemo/collections/llm/recipes/mixtral_8x7b.py
+++ b/nemo/collections/llm/recipes/mixtral_8x7b.py
@@ -15,18 +15,17 @@
from typing import Callable, Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.gpt.data.squad import SquadDataModule
from nemo.collections.llm.gpt.model.mixtral import MixtralConfig8x7B, MixtralModel
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -222,7 +221,7 @@ def pretrain_performance_optimizations(recipe: run.Partial) -> run.Partial:
run.Config(MegatronTokenDropCallback),
run.Config(
MegatronCommOverlapCallback,
- overlap_param_gather_with_optimizer_step=True,
+ overlap_param_gather_with_optimizer_step=False, # Currently disabled due to issue with checkpointing.
align_param_gather=True,
),
]
@@ -254,7 +253,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -279,8 +281,10 @@ def finetune_recipe(
recipe.trainer.strategy.pipeline_model_parallel_size = 4
recipe.trainer.strategy.virtual_pipeline_model_parallel_size = 8
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA, target_modules=['linear_qkv', 'linear_proj'], dim=32)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(
+ PEFT_STR2CLS[peft_scheme.lower()], target_modules=['linear_qkv', 'linear_proj'], dim=32
+ )
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/mixtral_8x7b_16k.py b/nemo/collections/llm/recipes/mixtral_8x7b_16k.py
index 7cbfaf723544..499280cc8542 100644
--- a/nemo/collections/llm/recipes/mixtral_8x7b_16k.py
+++ b/nemo/collections/llm/recipes/mixtral_8x7b_16k.py
@@ -15,8 +15,8 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
diff --git a/nemo/collections/llm/recipes/mixtral_8x7b_64k.py b/nemo/collections/llm/recipes/mixtral_8x7b_64k.py
index 3606be5ec12b..e0702f7b2a63 100644
--- a/nemo/collections/llm/recipes/mixtral_8x7b_64k.py
+++ b/nemo/collections/llm/recipes/mixtral_8x7b_64k.py
@@ -15,8 +15,8 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
diff --git a/nemo/collections/llm/recipes/nemotron.py b/nemo/collections/llm/recipes/nemotron.py
index aedf3fcf2954..7982665eb3d5 100644
--- a/nemo/collections/llm/recipes/nemotron.py
+++ b/nemo/collections/llm/recipes/nemotron.py
@@ -14,18 +14,18 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.gpt.model.nemotron import (
Nemotron3Config4B,
Nemotron3Config8B,
+ Nemotron3Config22B,
Nemotron4Config15B,
- Nemotron4Config22B,
Nemotron4Config340B,
NemotronModel,
)
@@ -37,9 +37,9 @@ def nemotron_model(version: str) -> run.Config[pl.LightningModule]:
A function to create a Nemotron models.
Args:
- version (str): The version of the Nemotron model to create. one of ["nemotron3_4b", "nemotron3_8b",
+ version (str): The version of the Nemotron model to create. one of ["nemotron3_4b", "nemotron3_8b",\
+ "nemotron3_22b", "nemotron3_22b_16k", "nemotron3_22b_64k",
"nemotron4_15b", "nemotron4_15b_16k", "nemotron4_15b_64k",
- "nemotron4_22b", "nemotron4_22b_16k", "nemotron4_22b_64k",
"nemotron4_340b"].
Returns:
@@ -50,18 +50,18 @@ def nemotron_model(version: str) -> run.Config[pl.LightningModule]:
config = run.Config(Nemotron3Config4B)
elif version == "nemotron3_8b":
config = run.Config(Nemotron3Config8B)
+ elif version == "nemotron3_22b":
+ config = run.Config(Nemotron3Config22B)
+ elif version == "nemotron3_22b_16k":
+ config = run.Config(Nemotron3Config22B, seq_length=16384)
+ elif version == "nemotron3_22b_64k":
+ config = run.Config(Nemotron3Config22B, seq_length=65536)
elif version == "nemotron4_15b":
config = run.Config(Nemotron4Config15B)
elif version == "nemotron4_15b_16k":
config = run.Config(Nemotron4Config15B, seq_length=16384)
elif version == "nemotron4_15b_64k":
config = run.Config(Nemotron4Config15B, seq_length=65536)
- elif version == "nemotron4_22b":
- config = run.Config(Nemotron4Config22B)
- elif version == "nemotron4_22b_16k":
- config = run.Config(Nemotron4Config22B, seq_length=16384)
- elif version == "nemotron4_22b_64k":
- config = run.Config(Nemotron4Config22B, seq_length=65536)
elif version == "nemotron4_340b":
config = run.Config(Nemotron4Config340B)
diff --git a/nemo/collections/llm/recipes/nemotron4_22b.py b/nemo/collections/llm/recipes/nemotron3_22b.py
similarity index 71%
rename from nemo/collections/llm/recipes/nemotron4_22b.py
rename to nemo/collections/llm/recipes/nemotron3_22b.py
index a20afedfea56..4c763301bc52 100644
--- a/nemo/collections/llm/recipes/nemotron4_22b.py
+++ b/nemo/collections/llm/recipes/nemotron3_22b.py
@@ -12,34 +12,36 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Callable, Optional
+from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
-from nemo.collections.llm.api import pretrain
+from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.nemotron import nemotron_model, nemotron_trainer
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
from nemo.lightning.pytorch.callbacks.megatron_comm_overlap import MegatronCommOverlapCallback
from nemo.utils.exp_manager import TimingCallback
-NAME = "nemotron4_22b"
+NAME = "nemotron3_22b"
@run.cli.factory(name=NAME)
def model() -> run.Config[pl.LightningModule]:
"""
- Factory function to create a Nemotron4 22b model configuration.
+ Factory function to create a Nemotron3 22B model configuration.
Returns:
- run.Config[pl.LightningModule]: Configuration for the Nemotron4 22b model.
+ run.Config[pl.LightningModule]: Configuration for the Nemotron3 22b model.
Examples:
CLI usage:
- $ nemo llm pretrain model=nemotron4_22b ...
+ $ nemo llm pretrain model=nemotron3_22b ...
Python API usage:
>>> model_config = model()
@@ -85,7 +87,7 @@ def pretrain_recipe(
fn=pretrain,
) -> run.Partial:
"""
- Create a pre-training recipe for Nemotron4 22b model.
+ Create a pre-training recipe for Nemotron3 22B model.
This function sets up a complete configuration for pre-training, including
model, trainer, data, logging, optimization, and resumption settings.
@@ -124,8 +126,8 @@ def pretrain_recipe(
Examples:
CLI usage:
- $ nemo llm pretrain --factory nemotron4_22b
- $ nemo llm pretrain --factory "nemotron4_22b(num_nodes=1, name='my_nemotron_pretrain')"
+ $ nemo llm pretrain --factory nemotron3_22b
+ $ nemo llm pretrain --factory "nemotron3_22b(num_nodes=1, name='my_nemotron_pretrain')"
Python API usage:
>>> recipe = pretrain_recipe(name="nemotron_pretrain", num_nodes=1)
@@ -181,7 +183,7 @@ def pretrain_recipe(
def pretrain_performance_optimizations(recipe: run.Partial) -> run.Partial:
"""
- Create a performance-optimized pre-training recipe for Nemotron4 22B model.
+ Create a performance-optimized pre-training recipe for Nemotron3 22B model.
This method enables performance optimizations that may not be suitable for all use cases.
It builds upon the standard pre-training recipe and adds additional performance enhancements.
@@ -209,8 +211,68 @@ def pretrain_performance_optimizations(recipe: run.Partial) -> run.Partial:
tp_comm_overlap=True,
defer_embedding_wgrad_compute=True,
wgrad_deferral_limit=22,
- overlap_param_gather_with_optimizer_step=True,
+ overlap_param_gather_with_optimizer_step=False, # Currently disabled due to an issue with checkpointing
align_param_gather=True,
)
)
return recipe
+
+
+@run.cli.factory(target=finetune, name=NAME)
+def finetune_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for Nemotron3 22B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory nemotron3_22b
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="nemotron3_22b_finetune", num_nodes=8)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+
+ recipe = default_finetune_recipe(
+ model(), "thhaus/nemotron3-22b-hf", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 8
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ # some settings currently do not function correctly with finetuning
+ recipe.model.config.cross_entropy_loss_fusion = False
+ return recipe
diff --git a/nemo/collections/llm/recipes/nemotron4_22b_16k.py b/nemo/collections/llm/recipes/nemotron3_22b_16k.py
similarity index 93%
rename from nemo/collections/llm/recipes/nemotron4_22b_16k.py
rename to nemo/collections/llm/recipes/nemotron3_22b_16k.py
index 42f258c6057d..5ae58d1a757d 100644
--- a/nemo/collections/llm/recipes/nemotron4_22b_16k.py
+++ b/nemo/collections/llm/recipes/nemotron3_22b_16k.py
@@ -14,8 +14,8 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import pretrain
@@ -25,20 +25,20 @@
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
from nemo.utils.exp_manager import TimingCallback
-NAME = "nemotron4_22b_16k"
+NAME = "nemotron3_22b_16k"
@run.cli.factory(name=NAME)
def model() -> run.Config[pl.LightningModule]:
"""
- Factory function to create a Nemotron4 22b model with 16k sequence length.
+ Factory function to create a Nemotron3 22B model with 16k sequence length.
Returns:
- run.Config[pl.LightningModule]: Configuration for the Nemotron4 22b and 16k sequence length model.
+ run.Config[pl.LightningModule]: Configuration for the Nemotron3 22b and 16k sequence length model.
Examples:
CLI usage:
- $ nemo llm pretrain model=nemotron4_22b_16k ...
+ $ nemo llm pretrain model=nemotron3_22b_16k ...
Python API usage:
>>> model_config = model()
@@ -83,7 +83,7 @@ def pretrain_recipe(
fn=pretrain,
) -> run.Partial:
"""
- Create a pre-training recipe for Nemotron4 22b model with 16k sequence length.
+ Create a pre-training recipe for Nemotron3 22B model with 16k sequence length.
This function sets up a complete configuration for pre-training, including
model, trainer, data, logging, optimization, and resumption settings.
@@ -121,8 +121,8 @@ def pretrain_recipe(
Examples:
CLI usage:
- $ nemo llm pretrain --factory nemotron4_22b_16k
- $ nemo llm pretrain --factory "nemotron4_22b_16k(num_nodes=1, name='my_nemotron_pretrain')"
+ $ nemo llm pretrain --factory nemotron3_22b_16k
+ $ nemo llm pretrain --factory "nemotron3_22b_16k(num_nodes=1, name='my_nemotron_pretrain')"
Python API usage:
>>> recipe = pretrain_recipe(name="nemotron_pretrain", num_nodes=1)
diff --git a/nemo/collections/llm/recipes/nemotron4_22b_64k.py b/nemo/collections/llm/recipes/nemotron3_22b_64k.py
similarity index 93%
rename from nemo/collections/llm/recipes/nemotron4_22b_64k.py
rename to nemo/collections/llm/recipes/nemotron3_22b_64k.py
index 67d60a6e1c90..22f6291cfadb 100644
--- a/nemo/collections/llm/recipes/nemotron4_22b_64k.py
+++ b/nemo/collections/llm/recipes/nemotron3_22b_64k.py
@@ -14,8 +14,8 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import pretrain
@@ -25,20 +25,20 @@
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
from nemo.utils.exp_manager import TimingCallback
-NAME = "nemotron4_22b_64k"
+NAME = "nemotron3_22b_64k"
@run.cli.factory(name=NAME)
def model() -> run.Config[pl.LightningModule]:
"""
- Factory function to create a Nemotron4 22b model with 64k sequence length.
+ Factory function to create a Nemotron3 22B model with 64k sequence length.
Returns:
- run.Config[pl.LightningModule]: Configuration for the Nemotron4 22b and 64k sequence length model.
+ run.Config[pl.LightningModule]: Configuration for the Nemotron3 22b and 64k sequence length model.
Examples:
CLI usage:
- $ nemo llm pretrain model=nemotron4_22b_64k ...
+ $ nemo llm pretrain model=nemotron3_22b_64k ...
Python API usage:
>>> model_config = model()
@@ -83,7 +83,7 @@ def pretrain_recipe(
fn=pretrain,
) -> run.Partial:
"""
- Create a pre-training recipe for Nemotron4 22b model with 16k sequence length.
+ Create a pre-training recipe for Nemotron3 22B model with 16k sequence length.
This function sets up a complete configuration for pre-training, including
model, trainer, data, logging, optimization, and resumption settings.
@@ -121,8 +121,8 @@ def pretrain_recipe(
Examples:
CLI usage:
- $ nemo llm pretrain --factory nemotron4_22b_64k
- $ nemo llm pretrain --factory "nemotron4_22b_64k(num_nodes=2, name='my_nemotron_pretrain')"
+ $ nemo llm pretrain --factory nemotron3_22b_64k
+ $ nemo llm pretrain --factory "nemotron3_22b_64k(num_nodes=2, name='my_nemotron_pretrain')"
Python API usage:
>>> recipe = pretrain_recipe(name="nemotron_pretrain", num_nodes=2)
diff --git a/nemo/collections/llm/recipes/nemotron3_4b.py b/nemo/collections/llm/recipes/nemotron3_4b.py
index b5521c0d087a..fc6f09a09358 100644
--- a/nemo/collections/llm/recipes/nemotron3_4b.py
+++ b/nemo/collections/llm/recipes/nemotron3_4b.py
@@ -14,12 +14,14 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
-from nemo.collections.llm.api import pretrain
+from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.nemotron import nemotron_model, nemotron_trainer
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -166,3 +168,62 @@ def pretrain_recipe(
),
resume=default_resume(),
)
+
+
+@run.cli.factory(target=finetune, name=NAME)
+def finetune_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for Nemotron3 4B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory nemotron3_4b
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="nemotron3_4b_finetune", num_nodes=2)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+
+ recipe = default_finetune_recipe(
+ model(), "nvidia/Minitron-4B-Base", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ # some settings currently do not function correctly with finetuning
+ recipe.model.config.cross_entropy_loss_fusion = False
+ return recipe
diff --git a/nemo/collections/llm/recipes/nemotron3_8b.py b/nemo/collections/llm/recipes/nemotron3_8b.py
index 7dcebe17f872..f60463330cad 100644
--- a/nemo/collections/llm/recipes/nemotron3_8b.py
+++ b/nemo/collections/llm/recipes/nemotron3_8b.py
@@ -12,17 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Callable, Optional
+from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.gpt.data.squad import SquadDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.nemotron import nemotron_model, nemotron_trainer
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -236,71 +236,29 @@ def nemo_resume() -> run.Config[nl.AutoResume]:
@run.cli.factory(target=finetune, name=NAME)
def finetune_recipe(
- # General
dir: Optional[str] = None,
name: str = "default",
- # Trainer
- tensor_parallelism: int = 2,
- pipeline_parallelism: int = 1,
- pipeline_parallelism_type: Optional[torch.dtype] = None,
- virtual_pipeline_parallelism: Optional[int] = None,
- context_parallelism: int = 1,
- sequence_parallelism: bool = False,
num_nodes: int = 1,
num_gpus_per_node: int = 8,
- max_steps: int = 300000,
- precision: str = "bf16-mixed",
- accumulate_grad_batches: int = 1,
- gradient_clip_val: float = 1.0,
- limit_test_batches: int = 32,
- limit_val_batches: int = 32,
- log_every_n_steps: int = 10,
- val_check_interval: int = 2000,
- # Data
- global_batch_size=32,
- micro_batch_size=2,
- seq_length=4096,
- # Optimizer
- warmup_steps=500,
- constant_steps=0,
- min_lr=3.0e-5,
- max_lr=3e-4,
- # Training function
- fn=finetune,
+ peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
) -> run.Partial:
"""
Create a fine-tuning recipe for Nemotron3 8B model.
This function sets up a complete configuration for fine-tuning, including
- model, trainer, and data settings.
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
Args:
dir (Optional[str]): Directory for saving logs and checkpoints.
- name (str): Name of the pre-training run.
- tensor_parallelism (int): Degree of tensor model parallelism.
- pipeline_parallelism (int): Degree of pipeline model parallelism.
- pipeline_parallelism_type (Optional[torch.dtype]): Data type for pipeline parallelism.
- virtual_pipeline_parallelism (Optional[int]): Size of virtual pipeline parallelism.
- context_parallelism (int): Degree of context parallelism.
- sequence_parallelism (bool): Whether to use sequence parallelism.
+ name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- max_steps (int): Maximum number of training steps.
- precision (str): Precision configuration, one of fp32, 16-mixed or bf16-mixed.
- accumulate_grad_batches (int): Number of steps per gradient accumulation.
- gradient_clip_val (float): Value for gradient clipping.
- limit_test_batches (int): Limit the number of test batches.
- limit_val_batches (int): Limit the number of validation batches.
- log_every_n_steps (int): Log every n steps.
- val_check_interval (int): Run validation every N steps.
- global_batch_size (int): Global batch size.
- micro_batch_size (int): Micro batch size.
- seq_length (int): Sequence length.
- warmup_steps (int): Number of warmup steps.
- constant_steps (int): Number of constant steps.
- min_lr (float): Minimum learning rate.
- max_lr (float): Maximum learning rate.
- fn (Callable): The pre-training function to use.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -308,47 +266,29 @@ def finetune_recipe(
Examples:
CLI usage:
$ nemo llm finetune --factory nemotron3_8b
- $ nemo llm finetune --factory "nemotron3_8b(name='my_nemotron3_8b_finetune', num_nodes=4)"
Python API usage:
- >>> recipe = finetune_recipe(name="my_nemotron3_8b_finetune", num_nodes=4)
+ >>> recipe = finetune_recipe(name="nemotron3_8b_finetune", num_nodes=2)
>>> print(recipe)
Note:
- This recipe is optimized for fine-tuning Nemotron3 8b model.
- This recipe uses the SQuAD dataset.
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
"""
- recipe = pretrain_recipe(
- dir=dir,
- name=name,
- tensor_parallelism=tensor_parallelism,
- pipeline_parallelism=pipeline_parallelism,
- pipeline_parallelism_type=pipeline_parallelism_type,
- virtual_pipeline_parallelism=virtual_pipeline_parallelism,
- context_parallelism=context_parallelism,
- sequence_parallelism=sequence_parallelism,
- num_nodes=num_nodes,
- num_gpus_per_node=num_gpus_per_node,
- max_steps=max_steps,
- precision=precision,
- accumulate_grad_batches=accumulate_grad_batches,
- gradient_clip_val=gradient_clip_val,
- limit_test_batches=limit_test_batches,
- limit_val_batches=limit_val_batches,
- log_every_n_steps=log_every_n_steps,
- val_check_interval=val_check_interval,
- global_batch_size=global_batch_size,
- micro_batch_size=micro_batch_size,
- seq_length=seq_length,
- warmup_steps=warmup_steps,
- constant_steps=constant_steps,
- min_lr=min_lr,
- max_lr=max_lr,
- fn=fn,
- )
- recipe.resume = nemo_resume()
- recipe.peft = run.Config(LoRA)
- recipe.data = run.Config(
- SquadDataModule, seq_length=seq_length, global_batch_size=global_batch_size, micro_batch_size=micro_batch_size
+
+ recipe = default_finetune_recipe(
+ model(), "thhaus/nemotron3-8b", dir, name, num_nodes, num_gpus_per_node, packed_sequence
)
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 2
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ # some settings currently do not function correctly with finetuning
+ recipe.model.config.cross_entropy_loss_fusion = False
return recipe
diff --git a/nemo/collections/llm/recipes/nemotron4_15b.py b/nemo/collections/llm/recipes/nemotron4_15b.py
index 16ae7b2b1e79..49f92fcc1616 100644
--- a/nemo/collections/llm/recipes/nemotron4_15b.py
+++ b/nemo/collections/llm/recipes/nemotron4_15b.py
@@ -12,14 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Callable, Optional
+from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
-from nemo.collections.llm.api import pretrain
+from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.nemotron import nemotron_model, nemotron_trainer
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -203,3 +205,63 @@ def pretrain_performance_optimizations(recipe: run.Partial) -> run.Partial:
)
)
return recipe
+
+
+@run.cli.factory(target=finetune, name=NAME)
+def finetune_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for Nemotron4 15B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory nemotron4_15b
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="nemotron4_15b_finetune", num_nodes=2)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+
+ recipe = default_finetune_recipe(
+ model(), "thhaus/nemotron4-15b-base-hf", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 4
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ # some settings currently do not function correctly with finetuning
+ recipe.model.config.cross_entropy_loss_fusion = False
+ return recipe
diff --git a/nemo/collections/llm/recipes/nemotron4_15b_16k.py b/nemo/collections/llm/recipes/nemotron4_15b_16k.py
index 75eced72761f..e16c2b03b032 100644
--- a/nemo/collections/llm/recipes/nemotron4_15b_16k.py
+++ b/nemo/collections/llm/recipes/nemotron4_15b_16k.py
@@ -14,8 +14,8 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import pretrain
diff --git a/nemo/collections/llm/recipes/nemotron4_15b_64k.py b/nemo/collections/llm/recipes/nemotron4_15b_64k.py
index 8286778aa7ba..2cedfbed398b 100644
--- a/nemo/collections/llm/recipes/nemotron4_15b_64k.py
+++ b/nemo/collections/llm/recipes/nemotron4_15b_64k.py
@@ -14,8 +14,8 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import pretrain
diff --git a/nemo/collections/llm/recipes/nemotron4_340b.py b/nemo/collections/llm/recipes/nemotron4_340b.py
index 8268b2a87791..14d4c0f32d11 100644
--- a/nemo/collections/llm/recipes/nemotron4_340b.py
+++ b/nemo/collections/llm/recipes/nemotron4_340b.py
@@ -12,17 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Callable, Optional
+from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
-from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.gpt.data.squad import SquadDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.nemotron import nemotron_model, nemotron_trainer
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -62,8 +61,8 @@ def pretrain_recipe(
pipeline_parallelism: int = 12,
pipeline_parallelism_type: Optional[torch.dtype] = torch.bfloat16,
virtual_pipeline_parallelism: Optional[int] = 8,
- context_parallelism: int = 1,
- sequence_parallelism: bool = False,
+ context_parallelism: int = 2,
+ sequence_parallelism: bool = True,
num_nodes: int = 768,
num_gpus_per_node: int = 8,
max_steps: int = 100000,
@@ -212,153 +211,72 @@ def pretrain_performance_optimizations(recipe: run.Partial) -> run.Partial:
tp_comm_overlap=True,
defer_embedding_wgrad_compute=True,
wgrad_deferral_limit=22,
- overlap_param_gather_with_optimizer_step=True,
+ overlap_param_gather_with_optimizer_step=False, # Currently disabled due to an issue with checkpointing
align_param_gather=True,
)
)
return recipe
-@run.cli.factory(name=NAME + "_nemo")
-def nemo_resume() -> run.Config[nl.AutoResume]:
- """
- Configure automatic resumption from a NeMo checkpoint converted from Huggingface for Nemotron4 340B model.
-
- More info about the Huggingface model can be found at: https://huggingface.co/nvidia/Nemotron-4-340B-Base.
-
- This NeMo checkpoint should be converted from Huggingface beforehand, using nemo.collections.llm.import_ckpt.
- When converting the checkpoint, the NeMo checkpoint will be saved in NEMO_HOME (set to ~/.cache/nemo by default).
-
- This function sets up the configuration to resume training from path nemo://nvidia/Nemotron-4-340B-Base.
- This translates to the full path {NEMO_HOME}/models/nvidia/Nemotron-4-340B-Base.
-
- Returns:
- run.Config[nl.AutoResume]: Configuration for resuming from NeMo checkpoint.
-
- Note:
- This is particularly useful for fine-tuning scenarios where you want to
- start from the pre-trained Nemotron4 340B model.
- """
- return run.Config(
- nl.AutoResume, restore_config=run.Config(nl.RestoreConfig, path="nemo://nvidia/Nemotron-4-340B-Base")
- )
-
-
@run.cli.factory(target=finetune, name=NAME)
def finetune_recipe(
- # General
dir: Optional[str] = None,
name: str = "default",
- # Trainer
- tensor_parallelism: int = 8,
- pipeline_parallelism: int = 12,
- pipeline_parallelism_type: Optional[torch.dtype] = torch.bfloat16,
- virtual_pipeline_parallelism: Optional[int] = 8,
- context_parallelism: int = 1,
- sequence_parallelism: bool = False,
- num_nodes: int = 768,
+ num_nodes: int = 4,
num_gpus_per_node: int = 8,
- max_steps: int = 100000,
- precision: str = "bf16-mixed",
- accumulate_grad_batches: int = 1,
- gradient_clip_val: float = 1.0,
- limit_test_batches: int = 32,
- limit_val_batches: int = 32,
- log_every_n_steps: int = 10,
- val_check_interval: int = 2000,
- # Data
- global_batch_size=2304,
- micro_batch_size=1,
- seq_length=4096,
- # Optimizer
- warmup_steps=500,
- constant_steps=0,
- min_lr=1.0e-5,
- max_lr=1.0e-4,
- # Training function
- fn=finetune,
+ peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
) -> run.Partial:
"""
Create a fine-tuning recipe for Nemotron4 340B model.
This function sets up a complete configuration for fine-tuning, including
- model, trainer, and data settings.
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
Args:
dir (Optional[str]): Directory for saving logs and checkpoints.
- name (str): Name of the pre-training run.
- tensor_parallelism (int): Degree of tensor model parallelism.
- pipeline_parallelism (int): Degree of pipeline model parallelism.
- pipeline_parallelism_type (Optional[torch.dtype]): Data type for pipeline parallelism.
- virtual_pipeline_parallelism (Optional[int]): Size of virtual pipeline parallelism.
- context_parallelism (int): Degree of context parallelism.
- sequence_parallelism (bool): Whether to use sequence parallelism.
+ name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- max_steps (int): Maximum number of training steps.
- precision (str): Precision configuration, one of fp32, 16-mixed or bf16-mixed.
- accumulate_grad_batches (int): Number of steps per gradient accumulation.
- gradient_clip_val (float): Value for gradient clipping.
- limit_test_batches (int): Limit the number of test batches.
- limit_val_batches (int): Limit the number of validation batches.
- log_every_n_steps (int): Log every n steps.
- val_check_interval (int): Run validation every N steps.
- global_batch_size (int): Global batch size.
- micro_batch_size (int): Micro batch size.
- seq_length (int): Sequence length.
- warmup_steps (int): Number of warmup steps.
- constant_steps (int): Number of constant steps.
- min_lr (float): Minimum learning rate.
- max_lr (float): Maximum learning rate.
- fn (Callable): The pre-training function to use.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
Examples:
CLI usage:
- $ nemo llm finetune --factory nemotron4_340b
- $ nemo llm finetune --factory "nemotron4_340b(name='my_nemotron4_340_finetune', num_nodes=4)"
+ $ nemo llm finetune --factory nemotron3_22b
Python API usage:
- >>> recipe = finetune_recipe(name="my_nemotron4_340_finetune", num_nodes=4)
+ >>> recipe = finetune_recipe(name="nemotron4_340b_finetune", num_nodes=2)
>>> print(recipe)
Note:
- This recipe is optimized for fine-tuning Nemotron4 8b model.
- This recipe uses the SQuAD dataset.
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
"""
- recipe = pretrain_recipe(
- dir=dir,
- name=name,
- tensor_parallelism=tensor_parallelism,
- pipeline_parallelism=pipeline_parallelism,
- pipeline_parallelism_type=pipeline_parallelism_type,
- virtual_pipeline_parallelism=virtual_pipeline_parallelism,
- context_parallelism=context_parallelism,
- sequence_parallelism=sequence_parallelism,
- num_nodes=num_nodes,
- num_gpus_per_node=num_gpus_per_node,
- max_steps=max_steps,
- precision=precision,
- accumulate_grad_batches=accumulate_grad_batches,
- gradient_clip_val=gradient_clip_val,
- limit_test_batches=limit_test_batches,
- limit_val_batches=limit_val_batches,
- log_every_n_steps=log_every_n_steps,
- val_check_interval=val_check_interval,
- global_batch_size=global_batch_size,
- micro_batch_size=micro_batch_size,
- seq_length=seq_length,
- warmup_steps=warmup_steps,
- constant_steps=constant_steps,
- min_lr=min_lr,
- max_lr=max_lr,
- fn=fn,
- )
- recipe.resume = nemo_resume()
- recipe.peft = run.Config(LoRA)
- recipe.data = run.Config(
- SquadDataModule, seq_length=seq_length, global_batch_size=global_batch_size, micro_batch_size=micro_batch_size
+
+ recipe = default_finetune_recipe(
+ model(), "mgoin/Nemotron-4-340B-Base-hf", dir, name, num_nodes, num_gpus_per_node, packed_sequence
)
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ assert num_nodes >= 12
+ recipe.trainer.strategy.tensor_model_parallel_size = 8
+ recipe.trainer.strategy.pipeline_model_parallel_size = 12
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.trainer.strategy.tensor_model_parallel_size = 8
+ recipe.trainer.strategy.pipeline_model_parallel_size = 4
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ # some settings currently do not function correctly with finetuning
+ recipe.model.config.cross_entropy_loss_fusion = False
return recipe
diff --git a/nemo/collections/llm/recipes/optim/adam.py b/nemo/collections/llm/recipes/optim/adam.py
index 4148d19c6635..b5a60b6f8b3f 100644
--- a/nemo/collections/llm/recipes/optim/adam.py
+++ b/nemo/collections/llm/recipes/optim/adam.py
@@ -17,12 +17,7 @@
import nemo_run as run
from megatron.core.optimizer import OptimizerConfig
-from nemo.lightning.pytorch.optim import (
- CosineAnnealingScheduler,
- MegatronOptimizerModule,
- OptimizerModule,
- PytorchOptimizerModule,
-)
+from nemo.lightning.pytorch.optim import CosineAnnealingScheduler, MegatronOptimizerModule, PytorchOptimizerModule
@run.cli.factory
@@ -35,7 +30,7 @@ def distributed_fused_adam_with_cosine_annealing(
max_lr: float = 1e-4,
min_lr: Optional[float] = None,
clip_grad: float = 1.0,
-) -> run.Config[OptimizerModule]:
+) -> run.Config[PytorchOptimizerModule]:
opt_cfg = run.Config(
OptimizerConfig,
@@ -68,19 +63,17 @@ def distributed_fused_adam_with_cosine_annealing(
@run.cli.factory
def pytorch_adam_with_cosine_annealing(
- precision: str = "bf16-mixed", # or "16-mixed"
warmup_steps: int = 2000,
constant_steps: int = 0,
max_lr: float = 1e-5,
min_lr: Optional[float] = None,
- clip_grad: float = 1.0,
-) -> run.Config[OptimizerModule]:
+) -> run.Config[PytorchOptimizerModule]:
from torch.optim import Adam
return run.Config(
PytorchOptimizerModule,
- optim_cls=Adam,
- config=dict(
+ optimizer_fn=run.Partial(
+ Adam,
lr=max_lr,
weight_decay=0.1,
betas=(0.9, 0.95),
@@ -97,20 +90,15 @@ def pytorch_adam_with_cosine_annealing(
@run.cli.factory
def pytorch_adam_with_flat_lr(
- precision: str = "bf16-mixed", # or "16-mixed"
- warmup_steps: int = 2000,
- constant_steps: int = 0,
- max_lr: float = 1e-5,
- min_lr: Optional[float] = None,
- clip_grad: float = 1.0,
-) -> run.Config[OptimizerModule]:
+ lr: float = 1e-5,
+) -> run.Config[PytorchOptimizerModule]:
from torch.optim import Adam
return run.Config(
PytorchOptimizerModule,
- optim_cls=Adam,
- config=dict(
- lr=max_lr,
+ optimizer_fn=run.Partial(
+ Adam,
+ lr=lr,
weight_decay=0.1,
betas=(0.9, 0.95),
eps=1e-8,
diff --git a/nemo/collections/llm/recipes/optim/sgd.py b/nemo/collections/llm/recipes/optim/sgd.py
new file mode 100644
index 000000000000..7c55c6915ee1
--- /dev/null
+++ b/nemo/collections/llm/recipes/optim/sgd.py
@@ -0,0 +1,62 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Optional
+
+import nemo_run as run
+
+from nemo.lightning.pytorch.optim import CosineAnnealingScheduler, PytorchOptimizerModule
+
+
+@run.cli.factory
+def pytorch_sgd_with_cosine_annealing(
+ warmup_steps: int = 2000,
+ constant_steps: int = 0,
+ max_lr: float = 1e-5,
+ min_lr: Optional[float] = None,
+ wd: float = 1e-4,
+) -> run.Config[PytorchOptimizerModule]:
+ from torch.optim import SGD
+
+ return run.Config(
+ PytorchOptimizerModule,
+ optimizer_fn=run.Partial(
+ SGD,
+ lr=max_lr,
+ weight_decay=wd,
+ ),
+ lr_scheduler=run.Config(
+ CosineAnnealingScheduler,
+ warmup_steps=warmup_steps,
+ constant_steps=constant_steps,
+ min_lr=min_lr or (0.1 * max_lr),
+ ),
+ )
+
+
+@run.cli.factory
+def pytorch_sgd_with_flat_lr(
+ lr: float = 1e-5,
+ wd: float = 1e-4,
+) -> run.Config[PytorchOptimizerModule]:
+ from torch.optim import SGD
+
+ return run.Config(
+ PytorchOptimizerModule,
+ optimizer_fn=run.Partial(
+ SGD,
+ lr=lr,
+ weight_decay=wd,
+ ),
+ )
diff --git a/nemo/collections/llm/recipes/phi3_mini_4k_instruct.py b/nemo/collections/llm/recipes/phi3_mini_4k_instruct.py
new file mode 100644
index 000000000000..73bbe4735adb
--- /dev/null
+++ b/nemo/collections/llm/recipes/phi3_mini_4k_instruct.py
@@ -0,0 +1,283 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Callable, Optional
+
+import lightning.pytorch as pl
+import nemo_run as run
+import torch
+from lightning.pytorch.callbacks.callback import Callback
+from megatron.core.distributed import DistributedDataParallelConfig
+
+from nemo import lightning as nl
+from nemo.collections.llm.api import finetune, pretrain
+from nemo.collections.llm.gpt.data.mock import MockDataModule
+from nemo.collections.llm.gpt.data.packed_sequence import PackedSequenceSpecs
+from nemo.collections.llm.gpt.model.phi3mini import Phi3ConfigMini, Phi3Model
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
+from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
+from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
+from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
+from nemo.utils.exp_manager import TimingCallback
+
+NAME = "phi3_mini_4k_instruct"
+
+
+@run.cli.factory(name=NAME)
+def model() -> run.Config[pl.LightningModule]:
+ """
+ Factory function to create a Phi3 Mini 4k instruct model configuration.
+
+ Returns:
+ run.Config[pl.LightningModule]: Configuration for the Phi3 mini 4k instruct model.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain model=phi3_mini_4k_instruct ...
+
+ Python API usage:
+ >>> model_config = model()
+ >>> print(model_config)
+ """
+ return run.Config(Phi3Model, config=run.Config(Phi3ConfigMini))
+
+
+def trainer(
+ tensor_parallelism: int = 1,
+ pipeline_parallelism: int = 1,
+ pipeline_parallelism_type: Optional[torch.dtype] = None,
+ virtual_pipeline_parallelism: Optional[int] = None,
+ context_parallelism: int = 1,
+ sequence_parallelism: bool = False,
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 1,
+ max_steps: int = 1168251,
+ callbacks: Optional[list[run.Config[Callback]]] = None,
+) -> run.Config[nl.Trainer]:
+ """
+ Configure the NeMo Lightning Trainer for Phi3 mini 4k instruct model.
+
+ This function sets up the distributed training strategy and other training parameters.
+
+ Args:
+ tensor_parallelism (int): Degree of tensor model parallelism.
+ pipeline_parallelism (int): Degree of pipeline model parallelism.
+ pipeline_parallelism_type (Optional[torch.dtype]): Data type for pipeline parallelism.
+ virtual_pipeline_parallelism (Optional[int]): Size of virtual pipeline parallelism.
+ context_parallelism (int): Degree of context parallelism.
+ sequence_parallelism (bool): Whether to use sequence parallelism.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ max_steps (int): Maximum number of training steps.
+ callbacks (Optional[list[run.Config[Callback]]]): List of callback configurations.
+
+ Returns:
+ run.Config[nl.Trainer]: Configuration for the NeMo Lightning Trainer.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain trainer=phi3_mini_4k_instruct ...
+
+ Python API usage:
+ >>> trainer_config = trainer(num_nodes=2, num_gpus_per_node=8)
+ >>> print(trainer_config)
+
+ Note:
+ For more information on distributed training strategies, refer to the
+ NeMo documentation on multi-GPU and multi-node training.
+ """
+ strategy = run.Config(
+ nl.MegatronStrategy,
+ tensor_model_parallel_size=tensor_parallelism,
+ pipeline_model_parallel_size=pipeline_parallelism,
+ pipeline_dtype=pipeline_parallelism_type,
+ virtual_pipeline_model_parallel_size=virtual_pipeline_parallelism,
+ context_parallel_size=context_parallelism,
+ sequence_parallel=sequence_parallelism,
+ gradient_as_bucket_view=True,
+ ckpt_async_save=True,
+ ckpt_parallel_load=True,
+ ddp=run.Config(
+ DistributedDataParallelConfig,
+ check_for_nan_in_grad=True,
+ grad_reduce_in_fp32=True,
+ overlap_grad_reduce=True,
+ overlap_param_gather=True,
+ average_in_collective=True,
+ ),
+ )
+
+ trainer = run.Config(
+ nl.Trainer,
+ accelerator="gpu",
+ accumulate_grad_batches=1,
+ callbacks=callbacks,
+ devices=num_gpus_per_node,
+ limit_test_batches=50,
+ limit_val_batches=32,
+ log_every_n_steps=10,
+ max_steps=max_steps,
+ num_nodes=num_nodes,
+ plugins=bf16_mixed(),
+ strategy=strategy,
+ use_distributed_sampler=False,
+ val_check_interval=2000,
+ )
+
+ return trainer
+
+
+@run.cli.factory(target=pretrain, name=NAME)
+def pretrain_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ tensor_parallelism: int = 1,
+ num_gpus_per_node: int = 1,
+ max_steps: int = 1168251,
+ performance_mode: bool = False,
+ fn: Callable = pretrain,
+) -> run.Partial:
+ """
+ Create a pre-training recipe for phi3_mini_4k_instruct model.
+
+ This function sets up a complete configuration for pre-training, including
+ model, trainer, data, logging, optimization, and resumption settings.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the pre-training run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ performance_mode (bool): If true, enables optimizations for maximum performance.
+ fn (Callable): The pre-training function to use.
+
+ Returns:
+ run.Partial: Partial configuration for pre-training.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain --factory phi3_mini_4k_instruct
+ $ nemo llm pretrain --factory "phi3_mini_4k_instruct(num_nodes=1, name='my_pretrain')"
+
+ Python API usage:
+ >>> recipe = pretrain_recipe(name="phi3_mini_4k_instruct", num_nodes=1)
+ >>> print(recipe)
+
+ Note:
+ For more details on pre-training LLMs with NeMo, see the pre-training
+ guide in the `examples/llm/pretrain/` directory.
+ """
+ recipe = run.Partial(
+ fn,
+ model=model(),
+ trainer=trainer(
+ num_nodes=num_nodes,
+ num_gpus_per_node=num_gpus_per_node,
+ callbacks=[run.Config(TimingCallback)],
+ ),
+ data=run.Config(MockDataModule, seq_length=4096, global_batch_size=512, micro_batch_size=1),
+ log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
+ optim=distributed_fused_adam_with_cosine_annealing(max_lr=3e-4),
+ resume=default_resume(),
+ )
+
+ return recipe
+
+
+@run.cli.factory(target=finetune, name=NAME)
+def finetune_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 1,
+ tensor_parallelism: int = 1,
+ max_steps: int = 116825,
+ peft_scheme: Optional[str] = 'lora',
+ seq_length: Optional[int] = None,
+ packed_sequence: Optional[bool] = None,
+ performance_mode: bool = False,
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for Phi3 mini-4k-instruct model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ seq_length (int): Maximum number of tokens per microbatch.
+ packed_sequence (Optional[bool]): If true, fine-tuning sequences will be packed into batches up to the given
+ maximum seq_length for better efficiency. By default, this value equals performance_mode.
+ performance_mode (bool): If true, enables optimizations for maximum performance.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory phi3_mini_4k_instruct
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="phi3_mini_4k_instruct", num_nodes=1)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+ # Default to unpacked data in normal mode and packed data in performance mode
+ # once packing recipe is well tested, change this default to true
+ if packed_sequence is None:
+ packed_sequence = performance_mode
+
+ # For unpacked sequence, most samples in SQuAD dataset are shorter than 2K
+ if seq_length is None:
+ seq_length = 4096 if packed_sequence else 2048
+
+ recipe = default_finetune_recipe(
+ model(), "microsoft/Phi-3-mini-4k-instruct", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 1
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.peft.dim = 8
+ recipe.peft.alpha = 16
+ recipe.optim.config.use_distributed_optimizer = False
+
+ # some settings currently do not function correctly with LoRA
+ recipe.model.config.cross_entropy_loss_fusion = False
+
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ # Sequence length settings in the model and dataset must agree
+ recipe.model.config.seq_length = seq_length
+ recipe.data.seq_length = seq_length
+ if packed_sequence:
+ recipe.data.pad_to_max_length = True
+ recipe.data.packed_sequence_specs = run.Config(PackedSequenceSpecs, packed_sequence_size=seq_length)
+
+ return recipe
diff --git a/nemo/collections/llm/recipes/qwen2.py b/nemo/collections/llm/recipes/qwen2.py
index ff0c76a714f1..db9dcfc88865 100644
--- a/nemo/collections/llm/recipes/qwen2.py
+++ b/nemo/collections/llm/recipes/qwen2.py
@@ -14,10 +14,10 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
-from pytorch_lightning.callbacks.callback import Callback
+from lightning.pytorch.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.gpt.model.qwen2 import (
diff --git a/nemo/collections/llm/recipes/qwen2_1p5b.py b/nemo/collections/llm/recipes/qwen2_1p5b.py
index 80ed957e3b48..99ba5cd907fc 100644
--- a/nemo/collections/llm/recipes/qwen2_1p5b.py
+++ b/nemo/collections/llm/recipes/qwen2_1p5b.py
@@ -14,13 +14,13 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -180,6 +180,7 @@ def finetune_recipe(
num_nodes: int = 1,
num_gpus_per_node: int = 8,
peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
) -> run.Partial:
"""
Create a fine-tuning recipe for Qwen2 1.5b model.
@@ -193,7 +194,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -211,11 +215,13 @@ def finetune_recipe(
on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
`examples/llm/finetune/` directory.
"""
- recipe = default_finetune_recipe(model(), "Qwen/Qwen2-1.5B", dir, name, num_nodes, num_gpus_per_node)
+ recipe = default_finetune_recipe(
+ model(), "Qwen/Qwen2-1.5B", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/qwen2_500m.py b/nemo/collections/llm/recipes/qwen2_500m.py
index 677fc066c047..96d99c271c85 100644
--- a/nemo/collections/llm/recipes/qwen2_500m.py
+++ b/nemo/collections/llm/recipes/qwen2_500m.py
@@ -14,13 +14,13 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -180,6 +180,7 @@ def finetune_recipe(
num_nodes: int = 1,
num_gpus_per_node: int = 8,
peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
) -> run.Partial:
"""
Create a fine-tuning recipe for Qwen2 500m model.
@@ -193,7 +194,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -211,11 +215,13 @@ def finetune_recipe(
on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
`examples/llm/finetune/` directory.
"""
- recipe = default_finetune_recipe(model(), "Qwen/Qwen2-0.5B", dir, name, num_nodes, num_gpus_per_node)
+ recipe = default_finetune_recipe(
+ model(), "Qwen/Qwen2-0.5B", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/qwen2_72b.py b/nemo/collections/llm/recipes/qwen2_72b.py
index d93be1b9257a..33bb0dd40835 100644
--- a/nemo/collections/llm/recipes/qwen2_72b.py
+++ b/nemo/collections/llm/recipes/qwen2_72b.py
@@ -14,13 +14,13 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -180,6 +180,7 @@ def finetune_recipe(
num_nodes: int = 1,
num_gpus_per_node: int = 8,
peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
) -> run.Partial:
"""
Create a fine-tuning recipe for Qwen2 72b model.
@@ -193,7 +194,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -211,14 +215,16 @@ def finetune_recipe(
on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
`examples/llm/finetune/` directory.
"""
- recipe = default_finetune_recipe(model(), "Qwen/Qwen2-72B", dir, name, num_nodes, num_gpus_per_node)
+ recipe = default_finetune_recipe(
+ model(), "Qwen/Qwen2-72B", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
if peft_scheme is None or peft_scheme.lower() == 'none':
assert num_nodes >= 4
recipe.trainer.strategy.tensor_model_parallel_size = 8
recipe.trainer.strategy.pipeline_model_parallel_size = 4
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.trainer.strategy.tensor_model_parallel_size = 8
recipe.optim.config.lr = 1e-4
else:
diff --git a/nemo/collections/llm/recipes/qwen2_7b.py b/nemo/collections/llm/recipes/qwen2_7b.py
index 57ccd48e9fe1..2e62176a408e 100644
--- a/nemo/collections/llm/recipes/qwen2_7b.py
+++ b/nemo/collections/llm/recipes/qwen2_7b.py
@@ -14,13 +14,13 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -180,6 +180,7 @@ def finetune_recipe(
num_nodes: int = 1,
num_gpus_per_node: int = 8,
peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
) -> run.Partial:
"""
Create a fine-tuning recipe for Qwen2 7b model.
@@ -193,7 +194,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -211,12 +215,14 @@ def finetune_recipe(
on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
`examples/llm/finetune/` directory.
"""
- recipe = default_finetune_recipe(model(), "Qwen/Qwen2-7B", dir, name, num_nodes, num_gpus_per_node)
+ recipe = default_finetune_recipe(
+ model(), "Qwen/Qwen2-7B", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.trainer.strategy.tensor_model_parallel_size = 2
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/run/__init__.py b/nemo/collections/llm/recipes/run/__init__.py
new file mode 100644
index 000000000000..d9155f923f18
--- /dev/null
+++ b/nemo/collections/llm/recipes/run/__init__.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/nemo/collections/llm/recipes/run/executor.py b/nemo/collections/llm/recipes/run/executor.py
new file mode 100644
index 000000000000..305fa6b0a3c7
--- /dev/null
+++ b/nemo/collections/llm/recipes/run/executor.py
@@ -0,0 +1,35 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import nemo_run as run
+
+
+@run.cli.factory
+def torchrun(devices: int = 8) -> run.Config[run.LocalExecutor]:
+ """Local executor using torchrun."""
+ env_vars = {
+ "TRANSFORMERS_OFFLINE": "1",
+ "TORCH_NCCL_AVOID_RECORD_STREAMS": "1",
+ "NCCL_NVLS_ENABLE": "0",
+ "NVTE_DP_AMAX_REDUCE_INTERVAL": "0",
+ "NVTE_ASYNC_AMAX_REDUCTION": "1",
+ }
+
+ executor = run.Config(
+ run.LocalExecutor,
+ ntasks_per_node=devices,
+ launcher="torchrun",
+ env_vars=env_vars,
+ )
+
+ return executor
diff --git a/nemo/collections/llm/recipes/starcoder2.py b/nemo/collections/llm/recipes/starcoder2.py
index c3a19326585c..b090ce1cf9ef 100644
--- a/nemo/collections/llm/recipes/starcoder2.py
+++ b/nemo/collections/llm/recipes/starcoder2.py
@@ -14,10 +14,11 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
-from pytorch_lightning.callbacks.callback import Callback
+from lightning.pytorch.callbacks.callback import Callback
+
from nemo import lightning as nl
from nemo.collections.llm.gpt.model.starcoder2 import (
Starcoder2Config3B,
diff --git a/nemo/collections/llm/recipes/starcoder2_15b.py b/nemo/collections/llm/recipes/starcoder2_15b.py
index 5faebb9460f3..e424cb67dba4 100644
--- a/nemo/collections/llm/recipes/starcoder2_15b.py
+++ b/nemo/collections/llm/recipes/starcoder2_15b.py
@@ -14,13 +14,13 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -58,7 +58,7 @@ def pretrain_recipe(
# Trainer
tensor_parallelism: int = 4,
pipeline_parallelism: int = 2,
- pipeline_parallelism_type: Optional[torch.dtype] = None,
+ pipeline_parallelism_type: Optional[torch.dtype] = torch.bfloat16,
virtual_pipeline_parallelism: Optional[int] = None,
context_parallelism: int = 1,
sequence_parallelism: bool = False,
@@ -180,6 +180,7 @@ def finetune_recipe(
num_nodes: int = 1,
num_gpus_per_node: int = 8,
peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
) -> run.Partial:
"""
Create a fine-tuning recipe for Starcoder2 15B model.
@@ -193,7 +194,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -211,12 +215,14 @@ def finetune_recipe(
on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
`examples/llm/finetune/` directory.
"""
- recipe = default_finetune_recipe(model(), "bigcode/starcoder2-15b", dir, name, num_nodes, num_gpus_per_node)
+ recipe = default_finetune_recipe(
+ model(), "bigcode/starcoder2-15b", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.trainer.strategy.tensor_model_parallel_size = 4
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/starcoder2_3b.py b/nemo/collections/llm/recipes/starcoder2_3b.py
index 232f5842ff84..faf0b416c56a 100644
--- a/nemo/collections/llm/recipes/starcoder2_3b.py
+++ b/nemo/collections/llm/recipes/starcoder2_3b.py
@@ -14,13 +14,13 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -180,6 +180,7 @@ def finetune_recipe(
num_nodes: int = 1,
num_gpus_per_node: int = 8,
peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
) -> run.Partial:
"""
Create a fine-tuning recipe for Starcoder2 3B model.
@@ -193,7 +194,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -211,12 +215,14 @@ def finetune_recipe(
on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
`examples/llm/finetune/` directory.
"""
- recipe = default_finetune_recipe(model(), "bigcode/starcoder2-3b", dir, name, num_nodes, num_gpus_per_node)
+ recipe = default_finetune_recipe(
+ model(), "bigcode/starcoder2-3b", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.trainer.strategy.tensor_model_parallel_size = 2
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/starcoder2_7b.py b/nemo/collections/llm/recipes/starcoder2_7b.py
index ee6dacdc98e9..091e882cd932 100644
--- a/nemo/collections/llm/recipes/starcoder2_7b.py
+++ b/nemo/collections/llm/recipes/starcoder2_7b.py
@@ -14,13 +14,13 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -180,6 +180,7 @@ def finetune_recipe(
num_nodes: int = 1,
num_gpus_per_node: int = 8,
peft_scheme: Optional[str] = 'lora',
+ packed_sequence: bool = False,
) -> run.Partial:
"""
Create a fine-tuning recipe for Starcoder2 7B model.
@@ -193,7 +194,10 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+ packed_sequence (Optional[bool]): Packing multiple training sequences into one long sequence for training
+ efficiency. Default sequence length is 2048.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -211,12 +215,14 @@ def finetune_recipe(
on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
`examples/llm/finetune/` directory.
"""
- recipe = default_finetune_recipe(model(), "bigcode/starcoder2-7b", dir, name, num_nodes, num_gpus_per_node)
+ recipe = default_finetune_recipe(
+ model(), "bigcode/starcoder2-7b", dir, name, num_nodes, num_gpus_per_node, packed_sequence
+ )
if peft_scheme is None or peft_scheme.lower() == 'none':
recipe.trainer.strategy.tensor_model_parallel_size = 2
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/starcoder.py b/nemo/collections/llm/recipes/starcoder_15b.py
similarity index 95%
rename from nemo/collections/llm/recipes/starcoder.py
rename to nemo/collections/llm/recipes/starcoder_15b.py
index b90cec0fbd7e..382d0eb4d8ca 100644
--- a/nemo/collections/llm/recipes/starcoder.py
+++ b/nemo/collections/llm/recipes/starcoder_15b.py
@@ -14,16 +14,16 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
-from pytorch_lightning.callbacks.callback import Callback
+from lightning.pytorch.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
from nemo.collections.llm.gpt.model.starcoder import StarcoderConfig15B, StarcoderModel
-from nemo.collections.llm.peft.lora import LoRA
+from nemo.collections.llm.peft import PEFT_STR2CLS
from nemo.collections.llm.recipes.finetune_default import default_finetune_recipe
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
@@ -143,9 +143,9 @@ def pretrain_recipe(
dir: Optional[str] = None,
name: str = "default",
# Trainer
- tensor_parallelism: int = 2,
- pipeline_parallelism: int = 1,
- pipeline_parallelism_type: Optional[torch.dtype] = None,
+ tensor_parallelism: int = 1,
+ pipeline_parallelism: int = 8,
+ pipeline_parallelism_type: Optional[torch.dtype] = torch.bfloat16,
virtual_pipeline_parallelism: Optional[int] = None,
context_parallelism: int = 1,
sequence_parallelism: bool = False,
@@ -280,7 +280,8 @@ def finetune_recipe(
name (str): Name of the fine-tuning run.
num_nodes (int): Number of compute nodes to use.
num_gpus_per_node (int): Number of GPUs per node.
- peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning. Allowed values: 'lora', 'none'/None.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
Returns:
run.Partial: Partial configuration for fine-tuning.
@@ -300,10 +301,10 @@ def finetune_recipe(
"""
recipe = default_finetune_recipe(model(), "bigcode/starcoder", dir, name, num_nodes, num_gpus_per_node)
if peft_scheme is None or peft_scheme.lower() == 'none':
- recipe.trainer.strategy.tensor_model_parallel_size = 4
+ recipe.trainer.strategy.pipeline_model_parallel_size = 8
recipe.optim.config.lr = 5e-6
- elif peft_scheme.lower() == 'lora':
- recipe.peft = run.Config(LoRA)
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
recipe.optim.config.lr = 1e-4
else:
raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
diff --git a/nemo/collections/llm/recipes/t5_11b.py b/nemo/collections/llm/recipes/t5_11b.py
index 09d469879364..ee7323aa044f 100644
--- a/nemo/collections/llm/recipes/t5_11b.py
+++ b/nemo/collections/llm/recipes/t5_11b.py
@@ -15,18 +15,21 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
from megatron.core.optimizer import OptimizerConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_trainer, nemo_resume
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
from nemo.collections.llm.t5.data.mock import MockDataModule
+from nemo.collections.llm.t5.data.squad import SquadDataModule
from nemo.collections.llm.t5.model.t5 import T5Config11B, T5Model
from nemo.lightning.pytorch.optim.lr_scheduler import WarmupAnnealingScheduler
from nemo.lightning.pytorch.optim.megatron import MegatronOptimizerModule
@@ -202,3 +205,84 @@ def pretrain_recipe(
optim=MegatronOptimizerModule(config=opt_config, lr_scheduler=lr_scheduler),
resume=default_resume(),
)
+
+
+@run.cli.factory(target=finetune, name=NAME)
+def finetune_recipe(
+ checkpoint_path: str = "",
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for T5 11B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ checkpoint_path (str): Path to pretrained checkpoint
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory t5_11b
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="t5_11b_finetune", num_nodes=1)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+ opt_config = OptimizerConfig(
+ optimizer='adam',
+ lr=1e-4,
+ use_distributed_optimizer=True,
+ bf16=True,
+ weight_decay=0.01,
+ )
+
+ lr_scheduler = WarmupAnnealingScheduler(
+ warmup_steps=50,
+ max_steps=2000,
+ min_lr=0.00001,
+ )
+
+ recipe = run.Partial(
+ finetune,
+ model=model(),
+ trainer=default_finetune_trainer(
+ num_nodes=num_nodes,
+ num_gpus_per_node=num_gpus_per_node,
+ ),
+ data=run.Config(
+ SquadDataModule, seq_length=512, seq_length_dec=128, global_batch_size=128, micro_batch_size=1
+ ),
+ log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
+ optim=MegatronOptimizerModule(config=opt_config, lr_scheduler=lr_scheduler),
+ resume=nemo_resume(checkpoint_path),
+ )
+
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 4
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+ return recipe
diff --git a/nemo/collections/llm/recipes/t5_220m.py b/nemo/collections/llm/recipes/t5_220m.py
index a3b2b761b65b..975ac5519859 100644
--- a/nemo/collections/llm/recipes/t5_220m.py
+++ b/nemo/collections/llm/recipes/t5_220m.py
@@ -15,18 +15,21 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
from megatron.core.optimizer import OptimizerConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_trainer, nemo_resume
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
from nemo.collections.llm.t5.data.mock import MockDataModule
+from nemo.collections.llm.t5.data.squad import SquadDataModule
from nemo.collections.llm.t5.model.t5 import T5Config220M, T5Model
from nemo.lightning.pytorch.optim.lr_scheduler import WarmupAnnealingScheduler
from nemo.lightning.pytorch.optim.megatron import MegatronOptimizerModule
@@ -132,7 +135,6 @@ def trainer(
plugins=bf16_mixed(),
strategy=strategy,
use_distributed_sampler=False,
- # DEBUGGING
val_check_interval=2000,
)
@@ -173,7 +175,8 @@ def pretrain_recipe(
guide in the `examples/llm/pretrain/` directory.
"""
- opt_config = OptimizerConfig(
+ opt_config = run.Config(
+ OptimizerConfig,
optimizer='adam',
lr=0.0001,
use_distributed_optimizer=True,
@@ -181,7 +184,8 @@ def pretrain_recipe(
weight_decay=0.01,
)
- lr_scheduler = WarmupAnnealingScheduler(
+ lr_scheduler = run.Config(
+ WarmupAnnealingScheduler,
warmup_steps=None,
warmup_ratio=0.01,
max_steps=1000000,
@@ -198,6 +202,90 @@ def pretrain_recipe(
),
data=run.Config(MockDataModule, seq_length=512, seq_length_dec=128, global_batch_size=512, micro_batch_size=1),
log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
- optim=MegatronOptimizerModule(config=opt_config, lr_scheduler=lr_scheduler),
+ optim=run.Config(MegatronOptimizerModule, config=opt_config, lr_scheduler=lr_scheduler),
resume=default_resume(),
)
+
+
+@run.cli.factory(target=finetune, name=NAME)
+def finetune_recipe(
+ checkpoint_path: str = "",
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for T5 220M model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ checkpoint_path (str): Path to pretrained checkpoint
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory t5_220m
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="t5_220m_finetune", num_nodes=1)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+ opt_config = run.Config(
+ OptimizerConfig,
+ optimizer='adam',
+ lr=0.0001,
+ use_distributed_optimizer=True,
+ bf16=True,
+ weight_decay=0.01,
+ )
+
+ lr_scheduler = run.Config(
+ WarmupAnnealingScheduler,
+ warmup_steps=50,
+ max_steps=2000,
+ min_lr=0.00001,
+ )
+
+ recipe = run.Partial(
+ finetune,
+ model=model(),
+ trainer=default_finetune_trainer(
+ num_nodes=num_nodes,
+ num_gpus_per_node=num_gpus_per_node,
+ ),
+ data=run.Config(
+ SquadDataModule, seq_length=512, seq_length_dec=128, global_batch_size=128, micro_batch_size=1
+ ),
+ log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
+ optim=run.Config(MegatronOptimizerModule, config=opt_config, lr_scheduler=lr_scheduler),
+ resume=nemo_resume(checkpoint_path),
+ )
+
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 1
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ return recipe
diff --git a/nemo/collections/llm/recipes/t5_3b.py b/nemo/collections/llm/recipes/t5_3b.py
index 08bcae895c3e..82772e1b865a 100644
--- a/nemo/collections/llm/recipes/t5_3b.py
+++ b/nemo/collections/llm/recipes/t5_3b.py
@@ -15,18 +15,21 @@
from typing import Optional
+import lightning.pytorch as pl
import nemo_run as run
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
from megatron.core.optimizer import OptimizerConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
+from nemo.collections.llm.peft import PEFT_STR2CLS
+from nemo.collections.llm.recipes.finetune_default import default_finetune_trainer, nemo_resume
from nemo.collections.llm.recipes.log.default import default_log, default_resume, tensorboard_logger
from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
from nemo.collections.llm.t5.data.mock import MockDataModule
+from nemo.collections.llm.t5.data.squad import SquadDataModule
from nemo.collections.llm.t5.model.t5 import T5Config3B, T5Model
from nemo.lightning.pytorch.optim.lr_scheduler import WarmupAnnealingScheduler
from nemo.lightning.pytorch.optim.megatron import MegatronOptimizerModule
@@ -202,3 +205,84 @@ def pretrain_recipe(
optim=MegatronOptimizerModule(config=opt_config, lr_scheduler=lr_scheduler),
resume=default_resume(),
)
+
+
+@run.cli.factory(target=finetune, name=NAME)
+def finetune_recipe(
+ checkpoint_path: str = "",
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for T5 3B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ checkpoint_path (str): Path to pretrained checkpoint
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+ peft_scheme (Optional[str]): Name of the peft scheme to use for fine-tuning.
+ Allowed values: 'lora'/'dora'/'none'/None.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory t5_3b
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="t5_3b_finetune", num_nodes=1)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+ opt_config = OptimizerConfig(
+ optimizer='adam',
+ lr=1e-4,
+ use_distributed_optimizer=True,
+ bf16=True,
+ weight_decay=0.01,
+ )
+
+ lr_scheduler = WarmupAnnealingScheduler(
+ warmup_steps=50,
+ max_steps=2000,
+ min_lr=0.00001,
+ )
+
+ recipe = run.Partial(
+ finetune,
+ model=model(),
+ trainer=default_finetune_trainer(
+ num_nodes=num_nodes,
+ num_gpus_per_node=num_gpus_per_node,
+ ),
+ data=run.Config(
+ SquadDataModule, seq_length=512, seq_length_dec=128, global_batch_size=128, micro_batch_size=1
+ ),
+ log=default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
+ optim=MegatronOptimizerModule(config=opt_config, lr_scheduler=lr_scheduler),
+ resume=nemo_resume(checkpoint_path),
+ )
+
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 2
+ recipe.optim.config.lr = 5e-6
+ elif peft_scheme.lower() in ['lora', 'dora']:
+ recipe.peft = run.Config(PEFT_STR2CLS[peft_scheme.lower()])
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+ return recipe
diff --git a/nemo/collections/llm/t5/data/__init__.py b/nemo/collections/llm/t5/data/__init__.py
index d65f6923033f..e4f879b76a06 100644
--- a/nemo/collections/llm/t5/data/__init__.py
+++ b/nemo/collections/llm/t5/data/__init__.py
@@ -1,5 +1,6 @@
from nemo.collections.llm.t5.data.fine_tuning import FineTuningDataModule
+from nemo.collections.llm.t5.data.mock import MockDataModule
from nemo.collections.llm.t5.data.pre_training import PreTrainingDataModule
from nemo.collections.llm.t5.data.squad import SquadDataModule
-__all__ = ["FineTuningDataModule", "PreTrainingDataModule", "SquadDataModule"]
+__all__ = ["FineTuningDataModule", "PreTrainingDataModule", "SquadDataModule", "MockDataModule"]
diff --git a/nemo/collections/llm/t5/data/fine_tuning.py b/nemo/collections/llm/t5/data/fine_tuning.py
index 4e2a88e5712c..ced4ea1a0b37 100644
--- a/nemo/collections/llm/t5/data/fine_tuning.py
+++ b/nemo/collections/llm/t5/data/fine_tuning.py
@@ -17,7 +17,7 @@
from pathlib import Path
from typing import TYPE_CHECKING, List, Optional, Union
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from torch.utils.data import DataLoader
from nemo.collections.llm.t5.data.core import create_sft_dataset
@@ -71,10 +71,18 @@ def __init__(
self.seed = seed
self.dataset_root = Path(dataset_root)
- # add additional tokens for T5 tokenizer
- from nemo.collections.nlp.modules.common.tokenizer_utils import get_nmt_tokenizer
+ # create tokenizer if tokenizer is None
+ if tokenizer is None:
+ from nemo.collections.nlp.modules.common.tokenizer_utils import get_nmt_tokenizer
- self.tokenizer = tokenizer or get_nmt_tokenizer("megatron", "BertWordPieceCase")
+ special_tokens = {}
+ special_tokens['additional_special_tokens'] = [f'' for i in range(100)]
+ tokenizer = get_nmt_tokenizer(
+ "megatron",
+ "BertWordPieceCase",
+ special_tokens=special_tokens,
+ )
+ self.tokenizer = tokenizer
self.memmap_workers = memmap_workers
self.num_workers = num_workers
diff --git a/nemo/collections/llm/t5/data/mock.py b/nemo/collections/llm/t5/data/mock.py
index eaf41d290da4..31198a4446e9 100644
--- a/nemo/collections/llm/t5/data/mock.py
+++ b/nemo/collections/llm/t5/data/mock.py
@@ -14,10 +14,10 @@
from typing import TYPE_CHECKING, Dict, List, Optional
+import lightning.pytorch as pl
import numpy as np
-import pytorch_lightning as pl
import torch
-from pytorch_lightning.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
+from lightning.pytorch.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
from torch.utils import data
from torch.utils.data import DataLoader, Dataset
@@ -125,13 +125,11 @@ def __init__(
self.seed = seed
self.create_attention_mask = create_attention_mask
- self.mask_encoder = torch.ones((self.seq_length, self.seq_length), device='cpu')
- self.mask_decoder = torch.tril(torch.ones((self.seq_length_dec, self.seq_length_dec), device='cpu'))
- self.mask_encoder_decoder = torch.ones((self.seq_length_dec, self.seq_length), device='cpu')
+ # update for T5 now use FlashFused attention (b11s)
+ self.mask_encoder = torch.ones(self.seq_length, device='cpu')
+ self.mask_decoder = torch.ones(self.seq_length_dec, device='cpu')
self.mask_encoder = self.mask_encoder < 0.5
self.mask_decoder = self.mask_decoder < 0.5
- self.mask_encoder_decoder = self.mask_encoder_decoder < 0.5
-
self.loss_mask = torch.ones(self.seq_length_dec, dtype=torch.float)
def __len__(self) -> int:
@@ -156,7 +154,6 @@ def __getitem__(self, idx) -> Dict[str, torch.Tensor]:
"truncated": 0,
"enc_mask": self.mask_encoder,
"dec_mask": self.mask_decoder,
- "enc_dec_mask": self.mask_encoder_decoder,
}
return batch
diff --git a/nemo/collections/llm/t5/data/pre_training.py b/nemo/collections/llm/t5/data/pre_training.py
index 9f6cb27933d5..4bd6e5ed5e93 100644
--- a/nemo/collections/llm/t5/data/pre_training.py
+++ b/nemo/collections/llm/t5/data/pre_training.py
@@ -17,8 +17,8 @@
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict, List, Optional
-import pytorch_lightning as pl
-from pytorch_lightning.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
+import lightning.pytorch as pl
+from lightning.pytorch.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
from torch.utils import data
from nemo.lightning.data import WrappedDataLoader
@@ -141,8 +141,18 @@ def __init__(
self.index_mapping_dir = index_mapping_dir
self.init_global_step = 0
- # add additional tokens for T5 tokenizer
- from nemo.collections.nlp.modules.common.tokenizer_utils import get_nmt_tokenizer
+ # create tokenizer if tokenizer is None
+ if tokenizer is None:
+ from nemo.collections.nlp.modules.common.tokenizer_utils import get_nmt_tokenizer
+
+ special_tokens = {}
+ special_tokens['additional_special_tokens'] = [f'' for i in range(100)]
+ tokenizer = get_nmt_tokenizer(
+ "megatron",
+ "BertWordPieceCase",
+ special_tokens=special_tokens,
+ )
+ self.tokenizer = tokenizer
self.data_sampler = MegatronDataSampler(
seq_len=self.seq_length,
diff --git a/nemo/collections/llm/t5/data/squad.py b/nemo/collections/llm/t5/data/squad.py
index 3e413919211c..4e90b09e622e 100644
--- a/nemo/collections/llm/t5/data/squad.py
+++ b/nemo/collections/llm/t5/data/squad.py
@@ -42,6 +42,7 @@ class SquadDataModule(FineTuningDataModule, IOMixin):
def __init__(
self,
+ dataset_root: str = None,
seq_length: int = 512,
seq_length_dec: int = 128,
tokenizer: Optional["TokenizerSpec"] = None,
@@ -60,7 +61,7 @@ def __init__(
self.delete_raw = delete_raw
super().__init__(
- dataset_root=get_dataset_root("squad"),
+ dataset_root=get_dataset_root("squad") if dataset_root is None else dataset_root,
seq_length=seq_length,
seq_length_dec=seq_length_dec,
tokenizer=tokenizer,
diff --git a/nemo/collections/llm/t5/model/t5.py b/nemo/collections/llm/t5/model/t5.py
index 058acaaec7b0..940c0e51ee92 100644
--- a/nemo/collections/llm/t5/model/t5.py
+++ b/nemo/collections/llm/t5/model/t5.py
@@ -16,11 +16,12 @@
from dataclasses import dataclass
from typing import TYPE_CHECKING, Callable, Dict, Literal, Optional, Union
-import pytorch_lightning as L
+import lightning.pytorch as L
import torch
import torch.distributed
from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import InferenceWrapperConfig
from megatron.core.inference.model_inference_wrappers.t5.t5_inference_wrapper import T5InferenceWrapper
+from megatron.core.models.T5.t5_model import T5Model as MCoreT5Model
from megatron.core.optimizer import OptimizerConfig
from megatron.core.transformer.spec_utils import ModuleSpec
from megatron.core.transformer.transformer_config import TransformerConfig
@@ -38,8 +39,6 @@
HAVE_TE = False
if TYPE_CHECKING:
- from megatron.core.models.T5.t5_model import T5Model as MCoreT5Model
-
from nemo.collections.common.tokenizers.tokenizer_spec import TokenizerSpec
@@ -58,22 +57,32 @@ def t5_data_step(dataloader_iter) -> Dict[str, torch.Tensor]:
else:
_batch = batch
- # if Dataset object is NeMo 1.0's T5SFTDataset (e.g. when finetuning with SQUAD)
- if 'enc_dec_mask' not in _batch:
- encoder_attn_mask_3d = build_attention_mask_3d(_batch['enc_mask'], _batch['enc_mask'], AttnMaskType.padding)
- decoder_attn_mask_3d = build_attention_mask_3d(_batch['dec_mask'], _batch['dec_mask'], AttnMaskType.causal)
- enc_dec_attn_mask_3d = build_attention_mask_3d(_batch['dec_mask'], _batch['enc_mask'], AttnMaskType.padding)
- _batch['enc_mask'] = encoder_attn_mask_3d
- _batch['dec_mask'] = decoder_attn_mask_3d
- _batch['enc_dec_mask'] = enc_dec_attn_mask_3d
-
- # if Dataset object is Mcore T5 dataset (e.g. pretraining)
- else:
- # convert attention mask values from int to True/False
- _batch['enc_mask'] = _batch['enc_mask'] < 0.5
- _batch['dec_mask'] = _batch['dec_mask'] < 0.5
- _batch['enc_dec_mask'] = _batch['enc_dec_mask'] < 0.5
-
+ # work for both mcore's T5 pre-train dataset object, and NeMo's T5SFTDataset dataset
+ enc_mask = _batch['enc_mask'] < 0.5
+ dec_mask = _batch['dec_mask'] < 0.5
+ # process for Flash/Fused
+ enc_mask = enc_mask.unsqueeze(1).unsqueeze(1)
+ dec_mask = dec_mask.unsqueeze(1).unsqueeze(1)
+ enc_dec_mask = (
+ dec_mask,
+ enc_mask,
+ )
+ # set dec_mask to None because decoder uses AttnMaskType.causal
+ dec_mask = None
+ _batch['enc_mask'] = enc_mask
+ _batch['dec_mask'] = dec_mask
+ _batch['enc_dec_mask'] = enc_dec_mask
+
+ # bring to device
+ for key in _batch.keys():
+ if key == "enc_dec_mask": # because enc_dec_mask is a tuple
+ _batch[key] = (_batch[key][0].cuda(non_blocking=True), _batch[key][1].cuda(non_blocking=True))
+ elif key == "dec_mask": # because dec_mask is a None since decoder uses AttnMaskType.causal
+ continue
+ else:
+ _batch[key] = _batch[key].cuda(non_blocking=True)
+
+ # set up forward arguments for pipeline parallelism
required_keys = set()
required_keys.update(["enc_mask", "dec_mask", "enc_dec_mask"])
if parallel_state.is_pipeline_first_stage():
@@ -81,7 +90,7 @@ def t5_data_step(dataloader_iter) -> Dict[str, torch.Tensor]:
if parallel_state.is_pipeline_last_stage():
required_keys.update(("labels", "loss_mask"))
- output = {key: val.cuda(non_blocking=True) if key in required_keys else None for key, val in _batch.items()}
+ output = {key: val if key in required_keys else None for key, val in _batch.items()}
return output
@@ -139,9 +148,12 @@ class T5Config(TransformerConfig, io.IOMixin):
share_embeddings_and_output_weights: bool = True
make_vocab_size_divisible_by: int = 128
position_embedding_type: Literal["learned_absolute", "rope"] = "learned_absolute"
+ apply_rope_fusion: bool = True
max_position_embeddings: int = 512
rotary_percent: float = 1.0
seq_len_interpolation_factor: Optional[float] = None
+ seq_length: int = 512
+ seq_length_dec: int = 128
encoder_pipeline_model_parallel_size: int = 0
attention_softmax_in_fp32: float = False
bias_activation_fusion: bool = True
@@ -168,7 +180,6 @@ def configure_model(self, tokenizer) -> "MCoreT5Model":
) % vp_size == 0, "Make sure the number of model chunks is the same across all pipeline stages."
from megatron.core import parallel_state
- from megatron.core.models.T5.t5_model import T5Model as MCoreT5Model
encoder_config = copy.deepcopy(self)
encoder_config.num_layers = self.encoder_num_layers
diff --git a/nemo/collections/multimodal/data/dreambooth/dreambooth_dataset.py b/nemo/collections/multimodal/data/dreambooth/dreambooth_dataset.py
index 1c39b1a72216..baead0c47962 100644
--- a/nemo/collections/multimodal/data/dreambooth/dreambooth_dataset.py
+++ b/nemo/collections/multimodal/data/dreambooth/dreambooth_dataset.py
@@ -15,8 +15,8 @@
from pathlib import Path
import torch
+from lightning.pytorch.utilities import rank_zero_only
from PIL import Image
-from pytorch_lightning.utilities import rank_zero_only
from torch.utils.data import Dataset
from tqdm import tqdm
diff --git a/nemo/collections/multimodal/data/energon/base.py b/nemo/collections/multimodal/data/energon/base.py
index 34752c878b1d..4e90dce55c7a 100644
--- a/nemo/collections/multimodal/data/energon/base.py
+++ b/nemo/collections/multimodal/data/energon/base.py
@@ -11,23 +11,24 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import TYPE_CHECKING, Any, Dict, Literal, Optional
-import pytorch_lightning as pl
+from copy import deepcopy
+from typing import Any, Dict, Literal, Optional
+
+import fiddle as fdl
+import lightning.pytorch as pl
+from lightning.pytorch.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
from megatron.core import parallel_state
from megatron.energon import WorkerConfig, get_savable_loader, get_train_dataset
-from pytorch_lightning.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
from torch.utils.data import DataLoader
+from typing_extensions import Self
from nemo.collections.multimodal.data.energon.config import MultiModalSampleConfig
from nemo.collections.multimodal.data.energon.task_encoder import MultiModalTaskEncoder
-from nemo.lightning.io.mixin import IOMixin
+from nemo.lightning.io.mixin import IOMixin, serialization, track_io
from nemo.lightning.pytorch.plugins import MegatronDataSampler
from nemo.utils import logging
-if TYPE_CHECKING:
- from nemo.collections.common.tokenizers.tokenizer_spec import TokenizerSpec
-
class SimpleMultiModalDataModule(pl.LightningDataModule, IOMixin):
"""
@@ -66,6 +67,7 @@ def __init__(
pin_memory: bool = True,
multimodal_sample_config: Optional[MultiModalSampleConfig] = MultiModalSampleConfig(),
task_encoder: Optional[MultiModalTaskEncoder] = None,
+ decoder_seq_length: Optional[int] = None,
) -> None:
"""
Initialize the SimpleMultiModalDataModule.
@@ -87,6 +89,7 @@ def __init__(
self.tokenizer = tokenizer
self.image_processor = image_processor
self.seq_length = seq_length
+ self.decoder_seq_length = decoder_seq_length
self.micro_batch_size = micro_batch_size
self.global_batch_size = global_batch_size
self.num_workers = num_workers
@@ -99,11 +102,24 @@ def __init__(
)
self.init_global_step = 0
self.data_sampler = SequentialMegatronSampler(
- seq_len=self.seq_length, micro_batch_size=self.micro_batch_size, global_batch_size=self.global_batch_size
+ seq_len=self.seq_length,
+ decoder_seq_len=self.decoder_seq_length,
+ micro_batch_size=self.micro_batch_size,
+ global_batch_size=self.global_batch_size,
)
self.train_dataloader_object = None
self.val_dataloader_object = None
+ def io_init(self, **kwargs) -> fdl.Config[Self]:
+ # (pleasefixme) image_processor and task_encoder are problematic with Fiddle so we skip serializing them for now
+ cfg_kwargs = {k: deepcopy(v) for k, v in kwargs.items() if k not in ['image_processor', 'task_encoder']}
+
+ for val in cfg_kwargs.values():
+ if not serialization.find_node_traverser(type(val)):
+ track_io(type(val))
+ cfg = fdl.Config(type(self), **cfg_kwargs)
+ return cfg
+
def datasets_provider(self, worker_config, split: Literal['train', 'val'] = 'val'):
"""
Provide the dataset for training or validation.
@@ -315,6 +331,7 @@ def __init__(
micro_batch_size: int = 4,
global_batch_size: int = 8,
init_consumed_samples: int = 0,
+ decoder_seq_len: Optional[int] = None,
init_global_step=0,
):
"""
@@ -328,6 +345,7 @@ def __init__(
"""
super().__init__(
seq_len=seq_len,
+ decoder_seq_len=decoder_seq_len,
micro_batch_size=micro_batch_size,
global_batch_size=global_batch_size,
init_consumed_samples=init_consumed_samples,
diff --git a/nemo/collections/multimodal/data/energon/config.py b/nemo/collections/multimodal/data/energon/config.py
index 45ca8e9db800..c145c5e51019 100644
--- a/nemo/collections/multimodal/data/energon/config.py
+++ b/nemo/collections/multimodal/data/energon/config.py
@@ -15,7 +15,7 @@
from dataclasses import dataclass, field
from typing import List
import torch
-from nemo.collections.multimodal.data.energon.conversation import BaseConversationTemplateConfig
+from nemo.collections.multimodal.data.energon.conversation import LLaVATemplateConfig
@dataclass
@@ -56,12 +56,6 @@ class ImageTextRawBatch:
loss_mask: torch.Tensor = field(default_factory=lambda: torch.empty(0, dtype=torch.float))
-class LLaVATemplateConfig(BaseConversationTemplateConfig):
- """LLava specific template configuration which extends the base config"""
-
- pass
-
-
@dataclass
class MultiModalSampleConfig:
image_token: ImageToken = field(default_factory=ImageToken)
diff --git a/nemo/collections/multimodal/data/energon/conversation.py b/nemo/collections/multimodal/data/energon/conversation.py
index 3342b7e9a411..f0749e47dc12 100644
--- a/nemo/collections/multimodal/data/energon/conversation.py
+++ b/nemo/collections/multimodal/data/energon/conversation.py
@@ -19,6 +19,15 @@
class BaseConversationTemplateConfig:
"""Conversation template config related parameters"""
+ system: Optional[str] = "".format() # fmt: off
+ roles: List[str] = field(default_factory=lambda: ['user', 'assistant'])
+ stop_string: Optional[str] = None
+ chat_template = None
+
+
+class LLaVATemplateConfig(BaseConversationTemplateConfig):
+ """LLava specific template configuration which extends the base config"""
+
system: Optional[str] = (
"A chat between a curious user and artificial assistant agent. The assistant gives helpful, detailed and polite answers to user's questions.".format()
) # fmt: off
@@ -36,3 +45,14 @@ class BaseConversationTemplateConfig:
{%- endif %}
{%- endfor -%}
"""
+
+
+class MLlamaTemplateConfig(BaseConversationTemplateConfig):
+ """LLava specific template configuration which extends the base config"""
+
+ system: Optional[str] = None
+ roles: List[str] = field(default_factory=lambda: ['user', 'assistant'])
+ stop_string: str = None
+ chat_template = """
+ '{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- if strftime_now is defined %}\n {%- set date_string = strftime_now("%d %b %Y") %}\n {%- else %}\n {%- set date_string = "26 Jul 2024" %}\n {%- endif %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0][\'role\'] == \'system\' %}\n {%- set system_message = messages[0][\'content\']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = "" %}\n{%- endif %}\n\n{#- Find out if there are any images #}\n{% set image_ns = namespace(has_images=false) %} \n{%- for message in messages %}\n {%- for content in message[\'content\'] %}\n {%- if content[\'type\'] == \'image\' %}\n {%- set image_ns.has_images = true %}\n {%- endif %}\n {%- endfor %}\n{%- endfor %}\n\n{#- Error out if there are images and system message #}\n{%- if image_ns.has_images and not system_message == "" %}\n {{- raise_exception("Prompting with images is incompatible with system messages.") }}\n{%- endif %}\n\n{#- System message if there are no images #}\n{%- if not image_ns.has_images %}\n {{- "<|start_header_id|>system<|end_header_id|>\\n\\n" }}\n {%- if tools is not none %}\n {{- "Environment: ipython\\n" }}\n {%- endif %}\n {{- "Cutting Knowledge Date: December 2023\\n" }}\n {{- "Today Date: " + date_string + "\\n\\n" }}\n {%- if tools is not none and not tools_in_user_message %}\n {{- "You have access to the following functions. To call a function, please respond with JSON for a function call." }}\n {{- \'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.\' }}\n {{- "Do not use variables.\\n\\n" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- "\\n\\n" }}\n {%- endfor %}\n {%- endif %}\n {{- system_message }}\n {{- "<|eot_id|>" }}\n{%- endif %}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0][\'content\']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception("Cannot put tools in the first user message when there\'s no first user message!") }}\n{%- endif %}\n {{- \'<|start_header_id|>user<|end_header_id|>\\n\\n\' -}}\n {{- "Given the following functions, please respond with a JSON for a function call " }}\n {{- "with its proper arguments that best answers the given prompt.\\n\\n" }}\n {{- \'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.\' }}\n {{- "Do not use variables.\\n\\n" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- "\\n\\n" }}\n {%- endfor %}\n {{- first_user_message + "<|eot_id|>"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == \'ipython\' or message.role == \'tool\' or \'tool_calls\' in message) %}\n {{- \'<|start_header_id|>\' + message[\'role\'] + \'<|end_header_id|>\\n\\n\' }}\n {%- if message[\'content\'] is string %}\n {{- message[\'content\'] }}\n {%- else %}\n {%- for content in message[\'content\'] %}\n {%- if content[\'type\'] == \'image\' %}\n {{- \'<|image|>\' }}\n {%- elif content[\'type\'] == \'text\' %}\n {{- content[\'text\'] }}\n {%- endif %}\n {%- endfor %}\n {%- endif %}\n {{- \'<|eot_id|>\' }}\n {%- elif \'tool_calls\' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception("This model only supports single tool-calls at once!") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {{- \'<|start_header_id|>assistant<|end_header_id|>\\n\\n\' -}}\n {{- \'{"name": "\' + tool_call.name + \'", \' }}\n {{- \'"parameters": \' }}\n {{- tool_call.arguments | tojson }}\n {{- "}" }}\n {{- "<|eot_id|>" }}\n {%- elif message.role == "tool" or message.role == "ipython" %}\n {{- "<|start_header_id|>ipython<|end_header_id|>\\n\\n" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- "<|eot_id|>" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- \'<|start_header_id|>assistant<|end_header_id|>\\n\\n\' }}\n{%- endif %}\n'
+ """
diff --git a/nemo/collections/multimodal/data/energon/task_encoder.py b/nemo/collections/multimodal/data/energon/task_encoder.py
index 5989ecad879b..23758b3a43db 100644
--- a/nemo/collections/multimodal/data/energon/task_encoder.py
+++ b/nemo/collections/multimodal/data/energon/task_encoder.py
@@ -62,7 +62,7 @@ def __init__(self, tokenizer, image_processor, multimodal_sample_config):
image_processor (ImageProcessor): The image processor used for preprocessing images across different sample types.
multimodal_sample_config (MultiModalSampleConfig): Configuration object for multimodal samples, including tokens and placeholders.
"""
-
+ self.tokenizer = tokenizer
self.encoders: Dict[str, SampleEncoder] = {
VQASample.__name__: VQASampleEncoder(
tokenizer=tokenizer,
diff --git a/nemo/collections/multimodal/models/multimodal_llm/neva/neva_model.py b/nemo/collections/multimodal/models/multimodal_llm/neva/neva_model.py
index 5291497f92c3..5d19b8544305 100644
--- a/nemo/collections/multimodal/models/multimodal_llm/neva/neva_model.py
+++ b/nemo/collections/multimodal/models/multimodal_llm/neva/neva_model.py
@@ -22,8 +22,8 @@
import torch
import torch.nn.functional as F
from einops import rearrange, reduce, repeat
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import DictConfig, ListConfig, OmegaConf
-from pytorch_lightning.trainer.trainer import Trainer
from transformers import CLIPVisionModel, SiglipVisionModel
from nemo.collections.common.parts.utils import extend_instance
diff --git a/nemo/collections/multimodal/models/text_to_image/controlnet/controlnet.py b/nemo/collections/multimodal/models/text_to_image/controlnet/controlnet.py
index 158fa7595782..981600fcc3a1 100644
--- a/nemo/collections/multimodal/models/text_to_image/controlnet/controlnet.py
+++ b/nemo/collections/multimodal/models/text_to_image/controlnet/controlnet.py
@@ -18,9 +18,9 @@
import torch
import torch.nn as nn
from einops import rearrange, repeat
+from lightning.pytorch import Trainer
+from lightning.pytorch.utilities.rank_zero import rank_zero_only
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
-from pytorch_lightning.utilities.rank_zero import rank_zero_only
from torch._inductor import config as inductor_config
from nemo.collections.multimodal.data.controlnet.controlnet_dataset import build_train_valid_datasets
diff --git a/nemo/collections/multimodal/models/text_to_image/controlnet/util.py b/nemo/collections/multimodal/models/text_to_image/controlnet/util.py
index 3d9a7d16b1c3..f890426c98f4 100644
--- a/nemo/collections/multimodal/models/text_to_image/controlnet/util.py
+++ b/nemo/collections/multimodal/models/text_to_image/controlnet/util.py
@@ -17,9 +17,9 @@
import numpy as np
import torch
import torchvision
+from lightning.pytorch import Callback
+from lightning.pytorch.utilities.rank_zero import rank_zero_only
from PIL import Image
-from pytorch_lightning import Callback
-from pytorch_lightning.utilities.rank_zero import rank_zero_only
class ImageLogger(Callback):
diff --git a/nemo/collections/multimodal/models/text_to_image/dreambooth/dreambooth.py b/nemo/collections/multimodal/models/text_to_image/dreambooth/dreambooth.py
index 47548b02961d..8906263faeba 100644
--- a/nemo/collections/multimodal/models/text_to_image/dreambooth/dreambooth.py
+++ b/nemo/collections/multimodal/models/text_to_image/dreambooth/dreambooth.py
@@ -15,8 +15,8 @@
from typing import Any, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from torch._inductor import config as inductor_config
from nemo.collections.multimodal.data.dreambooth.dreambooth_dataset import DreamBoothDataset
diff --git a/nemo/collections/multimodal/models/text_to_image/imagen/imagen.py b/nemo/collections/multimodal/models/text_to_image/imagen/imagen.py
index ed9be58178c4..1772e465f604 100644
--- a/nemo/collections/multimodal/models/text_to_image/imagen/imagen.py
+++ b/nemo/collections/multimodal/models/text_to_image/imagen/imagen.py
@@ -17,8 +17,8 @@
from typing import Any
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, open_dict
-from pytorch_lightning import Trainer
from nemo.collections.multimodal.data.imagen.imagen_dataset import build_train_valid_datasets
from nemo.collections.multimodal.models.text_to_image.imagen.precond import ContinousDDPMPrecond, EDMPrecond
diff --git a/nemo/collections/multimodal/models/text_to_image/imagen/imagen_pipeline.py b/nemo/collections/multimodal/models/text_to_image/imagen/imagen_pipeline.py
index 43660c9000a1..63963321fcf7 100644
--- a/nemo/collections/multimodal/models/text_to_image/imagen/imagen_pipeline.py
+++ b/nemo/collections/multimodal/models/text_to_image/imagen/imagen_pipeline.py
@@ -17,8 +17,8 @@
from typing import Callable, List, Optional, Union
import torch
+from lightning.pytorch import Trainer
from omegaconf.omegaconf import OmegaConf
-from pytorch_lightning import Trainer
from torch.cuda.amp import autocast
from nemo.collections.multimodal.models.text_to_image.imagen.imagen import Imagen, MegatronImagen
@@ -73,7 +73,9 @@ def _load_model(model_ckpt: str, model_cfg: str, eval_mode: bool = True, trainer
model_cfg.micro_batch_size = 1
model_cfg.global_batch_size = 1
model = MegatronImagen.restore_from(
- restore_path=model_ckpt, override_config_path=model_cfg, trainer=trainer,
+ restore_path=model_ckpt,
+ override_config_path=model_cfg,
+ trainer=trainer,
)
elif model_ckpt.endswith('.ckpt'):
model_cfg = OmegaConf.load(model_cfg)
@@ -128,7 +130,9 @@ def model_cfg_modifier(model_cfg):
models = []
print('Load base model.')
model = ImagenPipeline._load_model(
- model_ckpt=customized_models.base_ckpt, model_cfg=customized_models.base_cfg, trainer=trainer,
+ model_ckpt=customized_models.base_ckpt,
+ model_cfg=customized_models.base_cfg,
+ trainer=trainer,
)
models.append(model)
diff --git a/nemo/collections/multimodal/models/text_to_image/stable_diffusion/diffusion_engine.py b/nemo/collections/multimodal/models/text_to_image/stable_diffusion/diffusion_engine.py
index 8b18fe2b25fe..c7e8795a749c 100644
--- a/nemo/collections/multimodal/models/text_to_image/stable_diffusion/diffusion_engine.py
+++ b/nemo/collections/multimodal/models/text_to_image/stable_diffusion/diffusion_engine.py
@@ -17,14 +17,14 @@
from typing import Any, Dict, List, Tuple, Union
import hydra
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
import torch._dynamo
import torch.nn as nn
from einops import rearrange
+from lightning.pytorch import Trainer
+from lightning.pytorch.utilities import rank_zero_only
from omegaconf import DictConfig, ListConfig, OmegaConf
-from pytorch_lightning import Trainer
-from pytorch_lightning.utilities import rank_zero_only
from safetensors.torch import load_file as load_safetensors
from torch._dynamo import optimize
from torch.optim.lr_scheduler import LambdaLR
diff --git a/nemo/collections/multimodal/models/text_to_image/stable_diffusion/ldm/autoencoder.py b/nemo/collections/multimodal/models/text_to_image/stable_diffusion/ldm/autoencoder.py
index d79d85c2e026..311ebc0f06f5 100644
--- a/nemo/collections/multimodal/models/text_to_image/stable_diffusion/ldm/autoencoder.py
+++ b/nemo/collections/multimodal/models/text_to_image/stable_diffusion/ldm/autoencoder.py
@@ -13,7 +13,7 @@
# limitations under the License.
from contextlib import contextmanager
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
import torch.nn.functional as F
from nemo.utils import logging
diff --git a/nemo/collections/multimodal/models/text_to_image/stable_diffusion/ldm/ddpm.py b/nemo/collections/multimodal/models/text_to_image/stable_diffusion/ldm/ddpm.py
index 744dc6945394..163b2fb27e0f 100644
--- a/nemo/collections/multimodal/models/text_to_image/stable_diffusion/ldm/ddpm.py
+++ b/nemo/collections/multimodal/models/text_to_image/stable_diffusion/ldm/ddpm.py
@@ -17,18 +17,18 @@
from functools import partial
from typing import Any, Dict, List, Optional, Union
+import lightning.pytorch as pl
import numpy as np
-import pytorch_lightning as pl
import torch
import torch.nn as nn
from einops import rearrange, repeat
-from lightning_fabric.utilities.cloud_io import _load as pl_load
+from lightning.fabric.utilities.cloud_io import _load as pl_load
+from lightning.pytorch import Trainer
+from lightning.pytorch.core.saving import _load_state as ptl_load_state
+from lightning.pytorch.core.saving import load_hparams_from_tags_csv, load_hparams_from_yaml
+from lightning.pytorch.utilities.migration import pl_legacy_patch
+from lightning.pytorch.utilities.rank_zero import rank_zero_only
from omegaconf import DictConfig, open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.core.saving import _load_state as ptl_load_state
-from pytorch_lightning.core.saving import load_hparams_from_tags_csv, load_hparams_from_yaml
-from pytorch_lightning.utilities.migration import pl_legacy_patch
-from pytorch_lightning.utilities.rank_zero import rank_zero_only
from torch._inductor import config as inductor_config
from torchvision.utils import make_grid
from tqdm import tqdm
diff --git a/nemo/collections/multimodal/models/vision_language_foundation/clip/megatron_clip_models.py b/nemo/collections/multimodal/models/vision_language_foundation/clip/megatron_clip_models.py
index a9e51610bedd..84718f99262f 100644
--- a/nemo/collections/multimodal/models/vision_language_foundation/clip/megatron_clip_models.py
+++ b/nemo/collections/multimodal/models/vision_language_foundation/clip/megatron_clip_models.py
@@ -23,10 +23,10 @@
import numpy as np
import torch
import torch.nn.functional as F
+from lightning.pytorch.accelerators import CPUAccelerator
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.accelerators import CPUAccelerator
-from pytorch_lightning.trainer.trainer import Trainer
from tqdm import tqdm
from nemo.collections.multimodal.data.clip.clip_dataset import (
diff --git a/nemo/collections/multimodal/models/vision_language_foundation/megatron_nsfw_clip_models.py b/nemo/collections/multimodal/models/vision_language_foundation/megatron_nsfw_clip_models.py
index 79c0f3910be0..37e33f892890 100644
--- a/nemo/collections/multimodal/models/vision_language_foundation/megatron_nsfw_clip_models.py
+++ b/nemo/collections/multimodal/models/vision_language_foundation/megatron_nsfw_clip_models.py
@@ -19,11 +19,11 @@
import torch
import torch.nn as nn
import torch.nn.functional as F
+from lightning.pytorch.accelerators import CPUAccelerator
+from lightning.pytorch.trainer.trainer import Trainer
from megatron.core import parallel_state
from megatron.core.pipeline_parallel.schedules import get_forward_backward_func
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.accelerators import CPUAccelerator
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.multimodal.data.clip.clip_dataset import tokenize
from nemo.collections.multimodal.data.nsfw.nsfw_dataset import build_dataset
@@ -38,7 +38,6 @@
from nemo.core.classes.common import PretrainedModelInfo
from nemo.utils import logging
-
try:
from megatron.core.num_microbatches_calculator import get_num_microbatches
diff --git a/nemo/collections/multimodal/parts/utils.py b/nemo/collections/multimodal/parts/utils.py
index 6ba2e8ca91f9..8773b47025bc 100644
--- a/nemo/collections/multimodal/parts/utils.py
+++ b/nemo/collections/multimodal/parts/utils.py
@@ -17,10 +17,10 @@
import numpy as np
import torch
+from lightning.pytorch import Trainer
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
from omegaconf import DictConfig, OmegaConf, open_dict
from PIL import Image
-from pytorch_lightning import Trainer
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
from transformers import CLIPImageProcessor, SiglipImageProcessor
from nemo.collections.multimodal.data.clip.augmentations.augmentations import image_transform
diff --git a/nemo/collections/multimodal/speech_cv/models/visual_ctc_models.py b/nemo/collections/multimodal/speech_cv/models/visual_ctc_models.py
index 50b4d29c05a4..53ae4a2dfb65 100644
--- a/nemo/collections/multimodal/speech_cv/models/visual_ctc_models.py
+++ b/nemo/collections/multimodal/speech_cv/models/visual_ctc_models.py
@@ -20,8 +20,8 @@
from typing import Dict, List, Optional, Union
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from tqdm.auto import tqdm
from nemo.collections.asr.data import audio_to_text_dataset
diff --git a/nemo/collections/multimodal/speech_cv/models/visual_hybrid_rnnt_ctc_bpe_models.py b/nemo/collections/multimodal/speech_cv/models/visual_hybrid_rnnt_ctc_bpe_models.py
index 106fbc432926..8249e5d8a7f8 100644
--- a/nemo/collections/multimodal/speech_cv/models/visual_hybrid_rnnt_ctc_bpe_models.py
+++ b/nemo/collections/multimodal/speech_cv/models/visual_hybrid_rnnt_ctc_bpe_models.py
@@ -17,8 +17,8 @@
from typing import Dict, Optional, Union
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, ListConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from nemo.collections.asr.losses.ctc import CTCLoss
from nemo.collections.asr.losses.rnnt import RNNTLoss
@@ -90,7 +90,10 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
# Setup decoding object
self.decoding = RNNTBPEDecoding(
- decoding_cfg=self.cfg.decoding, decoder=self.decoder, joint=self.joint, tokenizer=self.tokenizer,
+ decoding_cfg=self.cfg.decoding,
+ decoder=self.decoder,
+ joint=self.joint,
+ tokenizer=self.tokenizer,
)
# Setup wer object
@@ -282,7 +285,10 @@ def change_vocabulary(
decoding_cfg = OmegaConf.merge(decoding_cls, decoding_cfg)
self.decoding = RNNTBPEDecoding(
- decoding_cfg=decoding_cfg, decoder=self.decoder, joint=self.joint, tokenizer=self.tokenizer,
+ decoding_cfg=decoding_cfg,
+ decoder=self.decoder,
+ joint=self.joint,
+ tokenizer=self.tokenizer,
)
self.wer = WER(
@@ -388,7 +394,10 @@ def change_decoding_strategy(self, decoding_cfg: DictConfig, decoder_type: str =
decoding_cfg = OmegaConf.merge(decoding_cls, decoding_cfg)
self.decoding = RNNTBPEDecoding(
- decoding_cfg=decoding_cfg, decoder=self.decoder, joint=self.joint, tokenizer=self.tokenizer,
+ decoding_cfg=decoding_cfg,
+ decoder=self.decoder,
+ joint=self.joint,
+ tokenizer=self.tokenizer,
)
self.wer = WER(
diff --git a/nemo/collections/multimodal/speech_cv/models/visual_hybrid_rnnt_ctc_models.py b/nemo/collections/multimodal/speech_cv/models/visual_hybrid_rnnt_ctc_models.py
index 1b30263985da..158bfaddcc96 100644
--- a/nemo/collections/multimodal/speech_cv/models/visual_hybrid_rnnt_ctc_models.py
+++ b/nemo/collections/multimodal/speech_cv/models/visual_hybrid_rnnt_ctc_models.py
@@ -19,8 +19,8 @@
from typing import List, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from tqdm.auto import tqdm
from nemo.collections.asr.losses.ctc import CTCLoss
diff --git a/nemo/collections/multimodal/speech_cv/models/visual_rnnt_bpe_models.py b/nemo/collections/multimodal/speech_cv/models/visual_rnnt_bpe_models.py
index eeffb906981a..11e9d43e1737 100644
--- a/nemo/collections/multimodal/speech_cv/models/visual_rnnt_bpe_models.py
+++ b/nemo/collections/multimodal/speech_cv/models/visual_rnnt_bpe_models.py
@@ -17,8 +17,8 @@
from typing import Dict, List, Optional, Union
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, ListConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from nemo.collections.asr.losses.rnnt import RNNTLoss
from nemo.collections.asr.metrics.wer import WER
@@ -68,7 +68,10 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
# Setup decoding object
self.decoding = RNNTBPEDecoding(
- decoding_cfg=self.cfg.decoding, decoder=self.decoder, joint=self.joint, tokenizer=self.tokenizer,
+ decoding_cfg=self.cfg.decoding,
+ decoder=self.decoder,
+ joint=self.joint,
+ tokenizer=self.tokenizer,
)
# Setup wer object
@@ -165,7 +168,10 @@ def change_vocabulary(
decoding_cfg = OmegaConf.merge(decoding_cls, decoding_cfg)
self.decoding = RNNTBPEDecoding(
- decoding_cfg=decoding_cfg, decoder=self.decoder, joint=self.joint, tokenizer=self.tokenizer,
+ decoding_cfg=decoding_cfg,
+ decoder=self.decoder,
+ joint=self.joint,
+ tokenizer=self.tokenizer,
)
self.wer = WER(
@@ -214,7 +220,10 @@ def change_decoding_strategy(self, decoding_cfg: DictConfig):
decoding_cfg = OmegaConf.merge(decoding_cls, decoding_cfg)
self.decoding = RNNTBPEDecoding(
- decoding_cfg=decoding_cfg, decoder=self.decoder, joint=self.joint, tokenizer=self.tokenizer,
+ decoding_cfg=decoding_cfg,
+ decoder=self.decoder,
+ joint=self.joint,
+ tokenizer=self.tokenizer,
)
self.wer = WER(
diff --git a/nemo/collections/multimodal/speech_cv/models/visual_rnnt_models.py b/nemo/collections/multimodal/speech_cv/models/visual_rnnt_models.py
index 5a86eed93019..75202238d2d0 100644
--- a/nemo/collections/multimodal/speech_cv/models/visual_rnnt_models.py
+++ b/nemo/collections/multimodal/speech_cv/models/visual_rnnt_models.py
@@ -20,8 +20,8 @@
from typing import Dict, List, Optional, Tuple, Union
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from tqdm.auto import tqdm
from nemo.collections.asr.data import audio_to_text_dataset
diff --git a/nemo/collections/multimodal/speech_llm/models/modular_models.py b/nemo/collections/multimodal/speech_llm/models/modular_models.py
index 46b2ca3e26fd..aab27cf2d908 100644
--- a/nemo/collections/multimodal/speech_llm/models/modular_models.py
+++ b/nemo/collections/multimodal/speech_llm/models/modular_models.py
@@ -21,11 +21,11 @@
import sacrebleu
import torch
from hydra.utils import get_class
+from lightning.pytorch.trainer.trainer import Trainer
+from lightning.pytorch.utilities import rank_zero_only
from omegaconf import ListConfig
from omegaconf.dictconfig import DictConfig
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning.trainer.trainer import Trainer
-from pytorch_lightning.utilities import rank_zero_only
from nemo.collections.asr.models import ASRModel, EncDecSpeakerLabelModel
from nemo.collections.asr.parts.utils.eval_utils import remove_punctuations
diff --git a/nemo/collections/multimodal/speech_llm/models/modular_t5_models.py b/nemo/collections/multimodal/speech_llm/models/modular_t5_models.py
index 79fc0468e819..a99f5c346831 100644
--- a/nemo/collections/multimodal/speech_llm/models/modular_t5_models.py
+++ b/nemo/collections/multimodal/speech_llm/models/modular_t5_models.py
@@ -21,10 +21,10 @@
import sacrebleu
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import ListConfig
from omegaconf.dictconfig import DictConfig
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.asr.models import ASRModel, SpeechEncDecSelfSupervisedModel
from nemo.collections.common.data.utils import move_data_to_device
diff --git a/nemo/collections/multimodal_autoregressive/__init__.py b/nemo/collections/multimodal_autoregressive/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/nemo/collections/multimodal_autoregressive/data/README.md b/nemo/collections/multimodal_autoregressive/data/README.md
new file mode 100644
index 000000000000..3f6d5a6c6a81
--- /dev/null
+++ b/nemo/collections/multimodal_autoregressive/data/README.md
@@ -0,0 +1,106 @@
+## MULTIMODAL AUTOREGRESSIVE GENERATION
+This is an example of how to do autoregressive generation for multiple modalities using discrete tokenizer. This example will cover vision understanding (i.e Image to captions). However this can easily be extended to image generation , or to other modalities depending on how you preprocess the data.
+
+
+1. Vision Understanding using EMU3 Tokenizer
+2. Image generation using Cosmos Tokenizer
+
+### 1. Vision Understanding using EMU3 Tokenizer
+
+#### Download and Extract data
+Download the [COYO700M dataset](https://github.com/kakaobrain/coyo-dataset)
+
+Once downloaded extract the data using tar utilities.
+
+
+#### Preprocess data
+In the preprocessing script we will do the following
+1. Convert images to discrete vision tokens using [EMU3 Tokenizer](https://github.com/baaivision/Emu3)
+2. Create input data of the format You are a helpful assistant. USER: Please describe the image. ASSISTANT:
+3. We will then store it as an indexed dataset. (i.e .bin and .idx files)
+
+Run the preprocessing script as follows :
+```
+NUM_GPUS=2
+IMAGES_DIR=/path/to/images
+CAPTIONS_DIR=/path/to/captions
+OUTPUT_PREFIX=/path/to/bin/idx/file
+
+
+# Make sure you have tiktoken==0.6.0 installed
+torchrun --nproc-per-node $NUM_GPUS nemo/collections/multimodal_autoregressive/data/coyo700m/pypreprocess_coyo.py --input_image_dir $IMAGES_DIR --input_captions_dir /$CAPTIONS_DIR --output_prefix $OUTPUT_PREFIX
+```
+
+*NOTE* : The images should be of type .jpg, and each image file should have a caption file of type .pkl with the same name as image file.
+
+#### Train model
+Follow usual nemo instructions to train any autoregressive model.
+1. Make sure you have tiktoken (pip install tiktoken==0.6.0)
+2. For tokenizer use this :
+```
+ tokenizer:
+ library: huggingface
+ type: BAAI/Emu3-Gen
+ model: null
+ delimiter: null
+ vocab_file: null
+ merge_file: null
+ sentencepiece_legacy: false
+ trust_remote_code: true
+```
+
+#### Inference
+To run inference edit the [inference config file](examples/multimodal_autoregressive/conf/megatron_mm_ar_inference_vision_understanding.yaml)
+*NOTE* Make sure you have a .nemo file (checkpoint). If you just have a regular megatron checkpoint you have to do a conversion as shown in [this doc](https://docs.nvidia.com/nemo-framework/user-guide/latest/llms/gpt/checkpointconversion.html?highlight=convert)
+
+Run inference as follows
+
+```
+torchrun --nproc-per-node 2 examples/multimodal_autoregressive/megatron_mm_autoregressive_eval_vision_understanding.py
+```
+
+
+### 2. Image generation using Cosmos Tokenizer
+
+#### Preprocess data
+In the preprocessing script for image generation we will do the following
+1. Download pokemon image captions dataset from hugging face
+2. Convert images to discrete vision tokens using [Cosmos Tokenizer](../../../../nemo/collections/common/video_tokenizers/cosmos_tokenizer.py)
+3. Create input data of the format You are a helpful assistant. Draw a picture for the caption given by the user. USER: . ASSISTANT:
+4. We will then store it as an indexed dataset. (i.e .bin and .idx files)
+
+Run the preprocessing script as follows :
+```
+# Make sure you have tiktoken == 0.6.0 installed
+
+MULTIMODAL_TOKENIZER_PATH=/path/to/nemo/collections/multimodal_autoregressive/tokenizer
+OUTPUT_PREFIX=/path/to/bin/idx/file
+
+python nemo/collections/multimodal_autoregressive/data/preprocess_pokemon_blip_cosmos_tokenizer.py --output_prefix $OUTPUT_PREFIX --multimodal_tokenizer_path $MULTIMODAL_TOKENIZER_PATH
+```
+
+#### Train model
+Follow usual nemo instructions to train any autoregressive model.
+1. Make sure you have tiktoken (pip install tiktoken==0.6.0)
+2. For tokenizer use this :
+```
+ tokenizer:
+ library: huggingface
+ type: /path/to/nemo/collections/multimodal_autoregressive/tokenizer
+ model: null
+ delimiter: null
+ vocab_file: null
+ merge_file: null
+ sentencepiece_legacy: false
+ trust_remote_code: true
+```
+
+#### Inference
+To run inference edit the [inference config file](examples/multimodal_autoregressive/conf/megatron_mm_ar_inference_image_generation.yaml)
+*NOTE* Make sure you have a .nemo file (checkpoint). If you just have a regular megatron checkpoint you have to do a conversion as shown in [this doc](https://docs.nvidia.com/nemo-framework/user-guide/latest/llms/gpt/checkpointconversion.html?highlight=convert)
+
+Run inference as follows
+
+```
+torchrun --nproc-per-node 2 examples/multimodal_autoregressive/megatron_mm_autoregressive_eval_image_generation.py
+```
\ No newline at end of file
diff --git a/nemo/collections/multimodal_autoregressive/data/__init__.py b/nemo/collections/multimodal_autoregressive/data/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/nemo/collections/multimodal_autoregressive/data/preprocess_coyo_emu3_tokenizer.py b/nemo/collections/multimodal_autoregressive/data/preprocess_coyo_emu3_tokenizer.py
new file mode 100644
index 000000000000..3ab977ca9f5b
--- /dev/null
+++ b/nemo/collections/multimodal_autoregressive/data/preprocess_coyo_emu3_tokenizer.py
@@ -0,0 +1,244 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import math
+import os
+import pickle
+from argparse import ArgumentParser
+from glob import glob
+from pathlib import Path
+
+import torch
+import torchvision
+from PIL import Image
+from tqdm import tqdm
+from transformers import AutoModel, AutoTokenizer
+
+from nemo.collections.nlp.data.language_modeling.megatron import indexed_dataset
+
+"""
+You can run this script as follows
+
+torchrun --nproc-per-node 8 preprocess_coyo.py \
+ --input_image_dir /path/to/images \
+ --input_captions_dir /path/to/captions \
+ --output_dir /path/to/output/prefix \
+
+NOTE : Make sure you install tiktoken==0.6.0
+NOTE : Make sure the images and captions have the same filename (Images should be .jpg and Captions .pkl)
+"""
+
+EMU_HUB = "BAAI/Emu3-Gen"
+VQ_HUB = "BAAI/Emu3-VisionTokenizer"
+
+
+def smart_resize(image, factor: int = 8, min_pixels: int = 512 * 512, max_pixels: int = 1024 * 1024):
+ """Rescales the image so that the following conditions are met:
+
+ 1. Both dimensions (height and width) are divisible by 'factor'.
+
+ 2. The total number of pixels is within the range ['min_pixels', 'max_pixels'].
+
+ 3. The aspect ratio of the image is maintained as closely as possible.
+
+ """
+ height, width = image.size
+ if height < factor or width < factor:
+ raise ValueError(f"height:{height} or width:{width} must be larger than factor:{factor}")
+ elif max(height, width) / min(height, width) > 5:
+ raise ValueError(
+ f"absolute aspect ratio must be smaller than 5, got {max(height, width) / min(height, width)}"
+ )
+
+ h_bar = round(height / factor) * factor
+ w_bar = round(width / factor) * factor
+ if h_bar * w_bar > max_pixels:
+ beta = math.sqrt((height * width) / max_pixels)
+ h_bar = math.floor(height / beta / factor) * factor
+ w_bar = math.floor(width / beta / factor) * factor
+ elif h_bar * w_bar < min_pixels:
+ beta = math.sqrt(min_pixels / (height * width))
+ h_bar = math.ceil(height * beta / factor) * factor
+ w_bar = math.ceil(width * beta / factor) * factor
+
+ image = image.resize((h_bar, w_bar))
+ return image
+
+
+def to_imgstr(image_tokens, tokenizer):
+ """Convert the image tokens to string
+
+ Given image tokens e.g [1,5,32] as input, this produces the appropriate string tokens
+ e.g., "<|visual token 000001|><|visual token 000005|><|visual token 000032|>"
+
+ Args:
+ image_tokens : The image tokens as an integer list
+ tokenizer: EMU3 Tokenizer
+
+ Returns:
+ str: The image token converted to string
+ """
+ image_tokens = image_tokens.cpu().numpy().tolist()
+ image_token_str = [
+ ['<|visual token {token_id:0>6d}|>'.format(token_id=token_id) for token_id in token_row]
+ for token_row in image_tokens
+ ]
+ image_row_str = ["".join(token_row) for token_row in image_token_str]
+ imgstr = tokenizer.eol_token.join(image_row_str)
+ return imgstr
+
+
+def main(args):
+ """Main Function"""
+
+ gpu_rank = torch.cuda.current_device()
+ world_size = torch.cuda.device_count()
+
+ tokenizer = AutoTokenizer.from_pretrained(EMU_HUB, trust_remote_code=True)
+ image_tokenizer = AutoModel.from_pretrained(VQ_HUB, device_map="cuda", trust_remote_code=True).eval()
+
+ # prepare input
+ text = "Please describe the image"
+
+ builders = {}
+ key = 'text'
+ builders[key] = indexed_dataset.make_builder(
+ f'{args.output_prefix}.bin',
+ impl=args.dataset_impl,
+ chunk_size=args.chunk_size,
+ pad_id=tokenizer.pad_id if getattr(tokenizer, "pad_id", None) is not None else 0,
+ retrieval_db=None,
+ vocab_size=tokenizer.vocab_size,
+ stride=args.chunk_stride_size,
+ )
+
+ filepaths_final = glob(f'{args.input_image_dir}/*.jpg')
+
+ pbar = tqdm(filepaths_final)
+ total_images_to_process = len(filepaths_final)
+ total_images_to_process_per_gpu = total_images_to_process // torch.cuda.device_count()
+ if total_images_to_process_per_gpu > 30000:
+ print(
+ 'WARNING : Found more than 30k images to process per GPU. '
+ 'This job might take more than 3 hours to process as tested on H100 gpus'
+ )
+ print(
+ f'Total images to process : {total_images_to_process_per_gpu}. '
+ 'Each GPU will get {total_images_to_process_per_gpu} files'
+ )
+
+ for idx, filepath in enumerate(pbar):
+ pbar.update(1)
+ if idx % world_size != gpu_rank:
+ continue
+ try:
+ image = Image.open(filepath)
+ caption_filename = filepath.split('/')[-1].replace('.jpg', '.pkl')
+ caption_path = Path(args.input_captions_dir).joinpath(caption_filename)
+ if not os.path.isfile(caption_path):
+ print(f'WARNING : Caption file does not exist {caption_path}. So skipping')
+ continue
+ if image.mode == 'L':
+ print(f'WARNING : Image {filepath} is gray scale. So skipping')
+ continue
+ image = smart_resize(image)
+ image_tensor = torchvision.transforms.functional.pil_to_tensor(image).unsqueeze(0)
+ image_tokens = image_tokenizer.encode(image_tensor.to(image_tokenizer.device, image_tokenizer.dtype))
+ bs, h, w = image_tokens.shape
+
+ imgstr = to_imgstr(image_tokens[0], tokenizer=tokenizer)
+ image_prompt = (
+ tokenizer.boi_token
+ + f'{h}*{w}'
+ + tokenizer.img_token
+ + imgstr
+ + tokenizer.eol_token
+ + tokenizer.eof_token
+ + tokenizer.eoi_token
+ )
+
+ caption = ""
+ with open(caption_path, 'rb') as f:
+ caption_data = pickle.load(f)
+ caption = caption_data['captions']['llava']
+
+ prompt = (
+ f'{tokenizer.bos_token}You are a helpful assistant. '
+ f'USER: {image_prompt}{text}. ASSISTANT: {caption}{tokenizer.eos_token}'
+ )
+ int_tokens = tokenizer(prompt).input_ids
+ builders[key].add_item(torch.IntTensor(int_tokens))
+ builders[key].end_document()
+ except Exception as e:
+ print(f'Error in handling {filepath}. Exception {e} raised. Continuing to next file')
+ continue
+
+ builders[key].finalize(
+ f'{args.output_prefix}.idx',
+ )
+
+
+if __name__ == '__main__':
+ parser = ArgumentParser()
+ parser.add_argument("--input_image_dir", required=True, type=str, help="The directory which contains images.")
+ parser.add_argument(
+ "--input_captions_dir",
+ required=True,
+ type=str,
+ help="The directory which contains captions (as .pkl file with same names as the image names).",
+ )
+ parser.add_argument(
+ "--output_prefix",
+ required=True,
+ type=str,
+ help="The directory along with the output file name to "
+ "write the .idx and .bin files (e.g /path/to/output/sample)",
+ )
+ parser.add_argument('--dataset_impl', type=str, default='mmap', choices=['lazy', 'cached', 'mmap', 'retmmap'])
+ parser.add_argument('--chunk_size', type=int, default=64, help='chunk size used for retrieval')
+ parser.add_argument(
+ '--resize_image', type=bool, default=True, help='Resizes the image to be between mix_pixels and max_pixels'
+ )
+ parser.add_argument(
+ '--spatial_factor',
+ type=int,
+ default=8,
+ help='The spatial downsample factor the image will be downsampled/upsampled'
+ 'to fit between min_pixels and max_pixels if resize_image is set to True',
+ )
+ parser.add_argument(
+ '--min_pixels',
+ type=int,
+ default=512 * 512,
+ help='The minimum number of pixels in the image. '
+ 'Picture will be upsampled if smaller and resize_image is set to True',
+ )
+ parser.add_argument(
+ '--max_pixels',
+ type=int,
+ default=1024 * 1024,
+ help='The maximum number of pixels in the image. '
+ 'Picture will be downsampled if smaller and resize_image is set to False',
+ )
+ parser.add_argument(
+ '--chunk_stride_size', type=int, default=64, help='the stride size for neighbor chunks used for retrieval'
+ )
+
+ args = parser.parse_args()
+
+ rank = int(os.environ['LOCAL_RANK'])
+ torch.cuda.set_device(rank)
+
+ with torch.no_grad():
+ main(args)
diff --git a/nemo/collections/multimodal_autoregressive/data/preprocess_pokemon_blip_cosmos_tokenizer.py b/nemo/collections/multimodal_autoregressive/data/preprocess_pokemon_blip_cosmos_tokenizer.py
new file mode 100644
index 000000000000..1d68e3f6a300
--- /dev/null
+++ b/nemo/collections/multimodal_autoregressive/data/preprocess_pokemon_blip_cosmos_tokenizer.py
@@ -0,0 +1,142 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from argparse import ArgumentParser
+
+import numpy as np
+import torch
+from datasets import load_dataset
+from einops import rearrange
+from tqdm import tqdm
+
+from nemo.collections.common.video_tokenizers.cosmos_tokenizer import CausalVideoTokenizer
+from nemo.collections.common.video_tokenizers.utils import numpy2tensor, pad_video_batch
+from nemo.collections.multimodal_autoregressive.tokenizer.cosmos_multimodal_tokenizer import CosmosMultiModalTokenizer
+from nemo.collections.nlp.data.language_modeling.megatron import indexed_dataset
+
+"""
+You can run this script as follows
+
+python3 nemo/collections/multimodal_autoregressive/data/preprocess_pokemon_blip_cosmos_tokenizer.py
+
+NOTE : Make sure you install tiktoken==0.6.0
+"""
+
+
+def to_imgstr(image_tokens_flattened):
+ """Convert the image tokens to string
+
+ Given image tokens e.g [1,5,32] as input, this produces the appropriate string tokens
+ e.g., "<|visual token 000001|><|visual token 000005|><|visual token 000032|>"
+
+ Args:
+ image_tokens : The image tokens as an integer list
+ tokenizer: EMU3 Tokenizer
+
+ Returns:
+ str: The image token converted to string
+ """
+ image_tokens_flattened = image_tokens_flattened.cpu().numpy().tolist()
+ visual_tokens = [
+ '<|visual token {token_id:0>6d}|>'.format(token_id=token_id) for token_id in image_tokens_flattened
+ ]
+ visual_tokens_str = "".join(visual_tokens)
+ return visual_tokens_str
+
+
+def main(args):
+ """Main function"""
+
+ dataset = load_dataset(args.dataset)
+
+ text_tokenizer = CosmosMultiModalTokenizer.from_pretrained(args.multimodal_tokenizer_path)
+ image_tokenizer = CausalVideoTokenizer.from_pretrained(
+ tokenizer_type=args.image_encoder, load_encoder=True, load_decoder=False, load_full_model=False
+ )
+
+ builders = {}
+ key = 'text'
+ builders[key] = indexed_dataset.make_builder(
+ f'{args.output_prefix}.bin',
+ impl='mmap',
+ chunk_size=64,
+ pad_id=text_tokenizer.pad_token if getattr(text_tokenizer, "pad_token", None) is not None else 0,
+ retrieval_db=None,
+ vocab_size=text_tokenizer.vocab_size,
+ stride=64,
+ )
+
+ dataset = dataset['train']
+
+ for data in tqdm(dataset):
+ image, caption = data['image'], data['text']
+ image = image.resize((512, 512))
+ image_numpy_array = np.array(image)
+ image_numpy_array = rearrange(image_numpy_array, 'h w (t c) -> t h w c', t=1)
+ batch_image_array = image_numpy_array[np.newaxis, ...]
+ padded_input_image_batch, crop_region = pad_video_batch(batch_image_array)
+ input_tensor = numpy2tensor(
+ padded_input_image_batch, dtype=image_tokenizer._dtype, device=image_tokenizer._device
+ )
+ output_indices, output_latent_vectors = image_tokenizer.encode(input_tensor)
+ output_indices_flattened = output_indices.reshape(-1)
+
+ imgstr = to_imgstr(output_indices_flattened)
+ image_prompt = text_tokenizer.boi_token + text_tokenizer.img_token + imgstr + text_tokenizer.eoi_token
+
+ prompt = (
+ f'{text_tokenizer.bos_token}You are a helpful assistant. '
+ 'Draw a picture for the caption given by the user. '
+ f'USER: {caption}. ASSISTANT: {image_prompt}{text_tokenizer.eos_token}'
+ )
+
+ int_tokens = text_tokenizer(prompt).input_ids
+ builders[key].add_item(torch.IntTensor(int_tokens))
+ builders[key].end_document()
+
+ builders[key].finalize(
+ f'{args.output_prefix}.idx',
+ )
+ print(f' Output .bin and .idx files saved to {args.output_prefix}')
+
+
+if __name__ == '__main__':
+
+ parser = ArgumentParser()
+ parser.add_argument(
+ "--output_prefix",
+ required=True,
+ type=str,
+ help="The directory along with the output file name to write "
+ "the .idx and .bin files (e.g /path/to/output/sample)",
+ )
+ parser.add_argument(
+ "--image_encoder",
+ type=str,
+ help="Discrete image encoder. Options are (Cosmos-Tokenizer-DV8x16x16/Cosmos-Tokenizer-DV4x8x8)",
+ default='Cosmos-Tokenizer-DV8x16x16',
+ )
+ parser.add_argument(
+ "--dataset", type=str, help="The hugging face dataset", default='reach-vb/pokemon-blip-captions'
+ )
+ parser.add_argument(
+ "--multimodal_tokenizer_path",
+ required=True,
+ type=str,
+ help="The path to the multimodal tokenizer. (nemo/collections/multimodal_autoregressive/tokenizer)",
+ )
+ args = parser.parse_args()
+
+ with torch.no_grad():
+ main(args)
diff --git a/nemo/collections/multimodal_autoregressive/tokenizer/__init__.py b/nemo/collections/multimodal_autoregressive/tokenizer/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/nemo/collections/multimodal_autoregressive/tokenizer/cosmos_multimodal_tokenizer.py b/nemo/collections/multimodal_autoregressive/tokenizer/cosmos_multimodal_tokenizer.py
new file mode 100644
index 000000000000..b06145bcc208
--- /dev/null
+++ b/nemo/collections/multimodal_autoregressive/tokenizer/cosmos_multimodal_tokenizer.py
@@ -0,0 +1,297 @@
+# coding=utf-8
+# Copyright 2024 The Emu team, BAAI and The HuggingFace Inc. team. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tokenization classes Cosmos (Visual tokens and Text tokens)."""
+
+import base64
+import logging
+import os
+import unicodedata
+from typing import Collection, Dict, List, Optional, Set, Tuple, Union
+
+import tiktoken
+from transformers import AddedToken, PreTrainedTokenizer
+
+logger = logging.getLogger(__name__)
+
+
+VOCAB_FILES_NAMES = {
+ "vocab_file": "emu3.tiktoken",
+ "special_tokens_file": "cosmos_vision_tokens.txt",
+}
+
+PAT_STR = r"""(?i:'s|'t|'re|'ve|'m|'ll|'d)|[^\r\n\p{L}\p{N}]?\p{L}+|\p{N}| ?[^\s\p{L}\p{N}]+[\r\n]*|\s*[\r\n]+|\s+(?!\S)|\s+""" # pylint: disable=line-too-long
+ENDOFTEXT = "<|endoftext|>"
+IMSTART = "<|im_start|>"
+IMEND = "<|im_end|>"
+# as the default behavior is changed to allow special tokens in
+# regular texts, the surface forms of special tokens need to be
+# as different as possible to minimize the impact
+EXTRAS = tuple((f"<|extra_{i}|>" for i in range(205)))
+# changed to use actual index to avoid misconfiguration with vocabulary expansion
+SPECIAL_START_ID = 151643
+
+
+def _load_tiktoken_bpe(tiktoken_bpe_file: str) -> Dict[bytes, int]:
+ with open(tiktoken_bpe_file, "rb") as f:
+ contents = f.read()
+ return {
+ base64.b64decode(token): int(rank) for token, rank in (line.split() for line in contents.splitlines() if line)
+ }
+
+
+class CosmosMultiModalTokenizer(PreTrainedTokenizer):
+ """Emu3 tokenizer."""
+
+ vocab_files_names = VOCAB_FILES_NAMES
+
+ def __init__(
+ self,
+ vocab_file,
+ special_tokens_file,
+ errors="replace",
+ bos_token="<|extra_203|>",
+ eos_token="<|extra_204|>",
+ pad_token="<|endoftext|>",
+ img_token="<|image token|>",
+ boi_token="<|image start|>",
+ eoi_token="<|image end|>",
+ eol_token="<|extra_200|>",
+ eof_token="<|extra_201|>",
+ **kwargs,
+ ):
+ super().__init__(**kwargs)
+
+ # how to handle errors in decoding UTF-8 byte sequences
+ # use ignore if you are in streaming inference
+ self.errors = errors
+
+ self.mergeable_ranks = _load_tiktoken_bpe(vocab_file)
+
+ vision_tokens = [t.strip() for t in open(special_tokens_file).readlines() if len(t.strip()) > 0]
+ SPECIAL_TOKENS = tuple(
+ enumerate(
+ (
+ (
+ ENDOFTEXT,
+ IMSTART,
+ IMEND,
+ )
+ + EXTRAS
+ + tuple(vision_tokens)
+ ),
+ start=SPECIAL_START_ID,
+ )
+ )
+ self.special_tokens = {token: index for index, token in SPECIAL_TOKENS}
+ self.special_tokens_set = set(t for _, t in SPECIAL_TOKENS)
+
+ enc = tiktoken.Encoding(
+ "Emu3",
+ pat_str=PAT_STR,
+ mergeable_ranks=self.mergeable_ranks,
+ special_tokens=self.special_tokens,
+ )
+
+ assert (
+ len(self.mergeable_ranks) + len(self.special_tokens) == enc.n_vocab
+ ), f"{len(self.mergeable_ranks) + len(self.special_tokens)} != {enc.n_vocab} in encoding"
+
+ self.decoder = {v: k for k, v in self.mergeable_ranks.items()}
+ self.decoder.update({v: k for k, v in self.special_tokens.items()})
+
+ self.tokenizer = enc
+
+ self.eod_id = self.tokenizer.eot_token
+ self.bos_token = bos_token
+ self.eos_token = eos_token
+ self.pad_token = pad_token
+ self.img_token = img_token
+ self.boi_token = boi_token
+ self.eoi_token = eoi_token
+ self.eol_token = eol_token
+ self.eof_token = eof_token
+
+ def __getstate__(self):
+ # for pickle lovers
+ state = self.__dict__.copy()
+ del state["tokenizer"]
+ return state
+
+ def __setstate__(self, state):
+ # tokenizer is not python native; don't pass it; rebuild it
+ self.__dict__.update(state)
+ enc = tiktoken.Encoding(
+ "Emu3",
+ pat_str=PAT_STR,
+ mergeable_ranks=self.mergeable_ranks,
+ special_tokens=self.special_tokens,
+ )
+ self.tokenizer = enc
+
+ def __len__(self) -> int:
+ return self.tokenizer.n_vocab
+
+ def get_vocab(self) -> Dict[bytes, int]:
+ """Returns the tokenizer vocab"""
+ return self.mergeable_ranks
+
+ def convert_tokens_to_ids(self, tokens: Union[bytes, str, List[Union[bytes, str]]]) -> List[int]:
+ """Converts the computed tokens to their corresponding ids"""
+ if isinstance(tokens, (str, bytes)):
+ if tokens in self.special_tokens:
+ return self.special_tokens[tokens]
+ else:
+ return self.mergeable_ranks.get(tokens)
+
+ ids = []
+ for token in tokens:
+ if token in self.special_tokens:
+ ids.append(self.special_tokens[token])
+ else:
+ ids.append(self.mergeable_ranks.get(token))
+ return ids
+
+ def _add_tokens(
+ self,
+ new_tokens: Union[List[str], List[AddedToken]],
+ special_tokens: bool = False,
+ ) -> int:
+ if not special_tokens and new_tokens:
+ raise ValueError("Adding regular tokens is not supported")
+
+ for token in new_tokens:
+ surface_form = token.content if isinstance(token, AddedToken) else token
+ if surface_form not in self.special_tokens_set:
+ raise ValueError("Adding unknown special tokens is not supported")
+
+ return 0
+
+ def save_vocabulary(self, save_directory: str, **kwargs) -> Tuple[str]:
+ """
+ Save only the vocabulary of the tokenizer (vocabulary).
+
+ Returns:
+ `Tuple(str)`: Paths to the files saved.
+ """
+ regular_file_path = os.path.join(save_directory, self.vocab_files_names["vocab_file"])
+ with open(regular_file_path, 'w', encoding="utf8") as w:
+ for k, v in self.mergeable_ranks.items():
+ line = base64.b64encode(k).decode("utf8") + " " + str(v) + "\n"
+ w.write(line)
+
+ excluded_special_tokens = set(
+ (
+ ENDOFTEXT,
+ IMSTART,
+ IMEND,
+ )
+ + EXTRAS
+ )
+ special_file_path = os.path.join(save_directory, self.vocab_files_names["special_tokens_file"])
+ with open(special_file_path, 'w', encoding="utf8") as w:
+ for k in self.special_tokens:
+ if k not in excluded_special_tokens:
+ print(k, file=w)
+
+ return (regular_file_path, special_file_path)
+
+ def tokenize(
+ self,
+ text: str,
+ allowed_special: Union[Set, str] = "all",
+ disallowed_special: Union[Collection, str] = (),
+ **kwargs,
+ ) -> List[Union[bytes, str]]:
+ """
+ Converts a string in a sequence of tokens.
+
+ Args:
+ text (`str`):
+ The sequence to be encoded.
+ allowed_special (`Literal["all"]` or `set`):
+ The surface forms of the tokens to be encoded as special tokens in regular texts.
+ Default to "all".
+ disallowed_special (`Literal["all"]` or `Collection`):
+ The surface forms of the tokens that should not be in regular texts and trigger errors.
+ Default to an empty tuple.
+
+ kwargs (additional keyword arguments, *optional*):
+ Will be passed to the underlying model specific encode method.
+
+ Returns:
+ `List[bytes|str]`: The list of tokens.
+ """
+ tokens = []
+ text = unicodedata.normalize("NFC", text)
+
+ # this implementation takes a detour: text -> token id -> token surface forms
+ for t in self.tokenizer.encode(text, allowed_special=allowed_special, disallowed_special=disallowed_special):
+ tokens.append(self.decoder[t])
+
+ return tokens
+
+ def convert_tokens_to_string(self, tokens: List[Union[bytes, str]]) -> str:
+ """
+ Converts a sequence of tokens in a single string.
+ """
+ text = ""
+ temp = b""
+ for t in tokens:
+ if isinstance(t, str):
+ if temp:
+ text += temp.decode("utf-8", errors=self.errors)
+ temp = b""
+ text += t
+ elif isinstance(t, bytes):
+ temp += t
+ else:
+ raise TypeError("token should only be of type types or str")
+ if temp:
+ text += temp.decode("utf-8", errors=self.errors)
+ return text
+
+ @property
+ def vocab_size(self):
+ """Returns the vocab size"""
+ return self.tokenizer.n_vocab
+
+ def _convert_id_to_token(self, index: int) -> Union[bytes, str]:
+ """Converts an id to a token, special tokens included"""
+ if index in self.decoder:
+ return self.decoder[index]
+ raise ValueError("unknown ids")
+
+ def _convert_token_to_id(self, token: Union[bytes, str]) -> int:
+ """Converts a token to an id using the vocab, special tokens included"""
+ if token in self.special_tokens:
+ return self.special_tokens[token]
+ if token in self.mergeable_ranks:
+ return self.mergeable_ranks[token]
+ raise ValueError("unknown token")
+
+ def _decode(
+ self,
+ token_ids: Union[int, List[int]],
+ skip_special_tokens: bool = False,
+ errors: Optional[str] = None,
+ **kwargs,
+ ) -> str:
+ if isinstance(token_ids, int):
+ token_ids = [token_ids]
+
+ if skip_special_tokens:
+ token_ids = [i for i in token_ids if i < self.eod_id]
+
+ return self.tokenizer.decode(token_ids, errors=errors or self.errors)
diff --git a/nemo/collections/multimodal_autoregressive/tokenizer/cosmos_vision_tokens.txt b/nemo/collections/multimodal_autoregressive/tokenizer/cosmos_vision_tokens.txt
new file mode 100644
index 000000000000..7ace0f0d7e8f
--- /dev/null
+++ b/nemo/collections/multimodal_autoregressive/tokenizer/cosmos_vision_tokens.txt
@@ -0,0 +1,65003 @@
+<|image token|>
+<|image start|>
+<|image end|>
+<|visual token 000000|>
+<|visual token 000001|>
+<|visual token 000002|>
+<|visual token 000003|>
+<|visual token 000004|>
+<|visual token 000005|>
+<|visual token 000006|>
+<|visual token 000007|>
+<|visual token 000008|>
+<|visual token 000009|>
+<|visual token 000010|>
+<|visual token 000011|>
+<|visual token 000012|>
+<|visual token 000013|>
+<|visual token 000014|>
+<|visual token 000015|>
+<|visual token 000016|>
+<|visual token 000017|>
+<|visual token 000018|>
+<|visual token 000019|>
+<|visual token 000020|>
+<|visual token 000021|>
+<|visual token 000022|>
+<|visual token 000023|>
+<|visual token 000024|>
+<|visual token 000025|>
+<|visual token 000026|>
+<|visual token 000027|>
+<|visual token 000028|>
+<|visual token 000029|>
+<|visual token 000030|>
+<|visual token 000031|>
+<|visual token 000032|>
+<|visual token 000033|>
+<|visual token 000034|>
+<|visual token 000035|>
+<|visual token 000036|>
+<|visual token 000037|>
+<|visual token 000038|>
+<|visual token 000039|>
+<|visual token 000040|>
+<|visual token 000041|>
+<|visual token 000042|>
+<|visual token 000043|>
+<|visual token 000044|>
+<|visual token 000045|>
+<|visual token 000046|>
+<|visual token 000047|>
+<|visual token 000048|>
+<|visual token 000049|>
+<|visual token 000050|>
+<|visual token 000051|>
+<|visual token 000052|>
+<|visual token 000053|>
+<|visual token 000054|>
+<|visual token 000055|>
+<|visual token 000056|>
+<|visual token 000057|>
+<|visual token 000058|>
+<|visual token 000059|>
+<|visual token 000060|>
+<|visual token 000061|>
+<|visual token 000062|>
+<|visual token 000063|>
+<|visual token 000064|>
+<|visual token 000065|>
+<|visual token 000066|>
+<|visual token 000067|>
+<|visual token 000068|>
+<|visual token 000069|>
+<|visual token 000070|>
+<|visual token 000071|>
+<|visual token 000072|>
+<|visual token 000073|>
+<|visual token 000074|>
+<|visual token 000075|>
+<|visual token 000076|>
+<|visual token 000077|>
+<|visual token 000078|>
+<|visual token 000079|>
+<|visual token 000080|>
+<|visual token 000081|>
+<|visual token 000082|>
+<|visual token 000083|>
+<|visual token 000084|>
+<|visual token 000085|>
+<|visual token 000086|>
+<|visual token 000087|>
+<|visual token 000088|>
+<|visual token 000089|>
+<|visual token 000090|>
+<|visual token 000091|>
+<|visual token 000092|>
+<|visual token 000093|>
+<|visual token 000094|>
+<|visual token 000095|>
+<|visual token 000096|>
+<|visual token 000097|>
+<|visual token 000098|>
+<|visual token 000099|>
+<|visual token 000100|>
+<|visual token 000101|>
+<|visual token 000102|>
+<|visual token 000103|>
+<|visual token 000104|>
+<|visual token 000105|>
+<|visual token 000106|>
+<|visual token 000107|>
+<|visual token 000108|>
+<|visual token 000109|>
+<|visual token 000110|>
+<|visual token 000111|>
+<|visual token 000112|>
+<|visual token 000113|>
+<|visual token 000114|>
+<|visual token 000115|>
+<|visual token 000116|>
+<|visual token 000117|>
+<|visual token 000118|>
+<|visual token 000119|>
+<|visual token 000120|>
+<|visual token 000121|>
+<|visual token 000122|>
+<|visual token 000123|>
+<|visual token 000124|>
+<|visual token 000125|>
+<|visual token 000126|>
+<|visual token 000127|>
+<|visual token 000128|>
+<|visual token 000129|>
+<|visual token 000130|>
+<|visual token 000131|>
+<|visual token 000132|>
+<|visual token 000133|>
+<|visual token 000134|>
+<|visual token 000135|>
+<|visual token 000136|>
+<|visual token 000137|>
+<|visual token 000138|>
+<|visual token 000139|>
+<|visual token 000140|>
+<|visual token 000141|>
+<|visual token 000142|>
+<|visual token 000143|>
+<|visual token 000144|>
+<|visual token 000145|>
+<|visual token 000146|>
+<|visual token 000147|>
+<|visual token 000148|>
+<|visual token 000149|>
+<|visual token 000150|>
+<|visual token 000151|>
+<|visual token 000152|>
+<|visual token 000153|>
+<|visual token 000154|>
+<|visual token 000155|>
+<|visual token 000156|>
+<|visual token 000157|>
+<|visual token 000158|>
+<|visual token 000159|>
+<|visual token 000160|>
+<|visual token 000161|>
+<|visual token 000162|>
+<|visual token 000163|>
+<|visual token 000164|>
+<|visual token 000165|>
+<|visual token 000166|>
+<|visual token 000167|>
+<|visual token 000168|>
+<|visual token 000169|>
+<|visual token 000170|>
+<|visual token 000171|>
+<|visual token 000172|>
+<|visual token 000173|>
+<|visual token 000174|>
+<|visual token 000175|>
+<|visual token 000176|>
+<|visual token 000177|>
+<|visual token 000178|>
+<|visual token 000179|>
+<|visual token 000180|>
+<|visual token 000181|>
+<|visual token 000182|>
+<|visual token 000183|>
+<|visual token 000184|>
+<|visual token 000185|>
+<|visual token 000186|>
+<|visual token 000187|>
+<|visual token 000188|>
+<|visual token 000189|>
+<|visual token 000190|>
+<|visual token 000191|>
+<|visual token 000192|>
+<|visual token 000193|>
+<|visual token 000194|>
+<|visual token 000195|>
+<|visual token 000196|>
+<|visual token 000197|>
+<|visual token 000198|>
+<|visual token 000199|>
+<|visual token 000200|>
+<|visual token 000201|>
+<|visual token 000202|>
+<|visual token 000203|>
+<|visual token 000204|>
+<|visual token 000205|>
+<|visual token 000206|>
+<|visual token 000207|>
+<|visual token 000208|>
+<|visual token 000209|>
+<|visual token 000210|>
+<|visual token 000211|>
+<|visual token 000212|>
+<|visual token 000213|>
+<|visual token 000214|>
+<|visual token 000215|>
+<|visual token 000216|>
+<|visual token 000217|>
+<|visual token 000218|>
+<|visual token 000219|>
+<|visual token 000220|>
+<|visual token 000221|>
+<|visual token 000222|>
+<|visual token 000223|>
+<|visual token 000224|>
+<|visual token 000225|>
+<|visual token 000226|>
+<|visual token 000227|>
+<|visual token 000228|>
+<|visual token 000229|>
+<|visual token 000230|>
+<|visual token 000231|>
+<|visual token 000232|>
+<|visual token 000233|>
+<|visual token 000234|>
+<|visual token 000235|>
+<|visual token 000236|>
+<|visual token 000237|>
+<|visual token 000238|>
+<|visual token 000239|>
+<|visual token 000240|>
+<|visual token 000241|>
+<|visual token 000242|>
+<|visual token 000243|>
+<|visual token 000244|>
+<|visual token 000245|>
+<|visual token 000246|>
+<|visual token 000247|>
+<|visual token 000248|>
+<|visual token 000249|>
+<|visual token 000250|>
+<|visual token 000251|>
+<|visual token 000252|>
+<|visual token 000253|>
+<|visual token 000254|>
+<|visual token 000255|>
+<|visual token 000256|>
+<|visual token 000257|>
+<|visual token 000258|>
+<|visual token 000259|>
+<|visual token 000260|>
+<|visual token 000261|>
+<|visual token 000262|>
+<|visual token 000263|>
+<|visual token 000264|>
+<|visual token 000265|>
+<|visual token 000266|>
+<|visual token 000267|>
+<|visual token 000268|>
+<|visual token 000269|>
+<|visual token 000270|>
+<|visual token 000271|>
+<|visual token 000272|>
+<|visual token 000273|>
+<|visual token 000274|>
+<|visual token 000275|>
+<|visual token 000276|>
+<|visual token 000277|>
+<|visual token 000278|>
+<|visual token 000279|>
+<|visual token 000280|>
+<|visual token 000281|>
+<|visual token 000282|>
+<|visual token 000283|>
+<|visual token 000284|>
+<|visual token 000285|>
+<|visual token 000286|>
+<|visual token 000287|>
+<|visual token 000288|>
+<|visual token 000289|>
+<|visual token 000290|>
+<|visual token 000291|>
+<|visual token 000292|>
+<|visual token 000293|>
+<|visual token 000294|>
+<|visual token 000295|>
+<|visual token 000296|>
+<|visual token 000297|>
+<|visual token 000298|>
+<|visual token 000299|>
+<|visual token 000300|>
+<|visual token 000301|>
+<|visual token 000302|>
+<|visual token 000303|>
+<|visual token 000304|>
+<|visual token 000305|>
+<|visual token 000306|>
+<|visual token 000307|>
+<|visual token 000308|>
+<|visual token 000309|>
+<|visual token 000310|>
+<|visual token 000311|>
+<|visual token 000312|>
+<|visual token 000313|>
+<|visual token 000314|>
+<|visual token 000315|>
+<|visual token 000316|>
+<|visual token 000317|>
+<|visual token 000318|>
+<|visual token 000319|>
+<|visual token 000320|>
+<|visual token 000321|>
+<|visual token 000322|>
+<|visual token 000323|>
+<|visual token 000324|>
+<|visual token 000325|>
+<|visual token 000326|>
+<|visual token 000327|>
+<|visual token 000328|>
+<|visual token 000329|>
+<|visual token 000330|>
+<|visual token 000331|>
+<|visual token 000332|>
+<|visual token 000333|>
+<|visual token 000334|>
+<|visual token 000335|>
+<|visual token 000336|>
+<|visual token 000337|>
+<|visual token 000338|>
+<|visual token 000339|>
+<|visual token 000340|>
+<|visual token 000341|>
+<|visual token 000342|>
+<|visual token 000343|>
+<|visual token 000344|>
+<|visual token 000345|>
+<|visual token 000346|>
+<|visual token 000347|>
+<|visual token 000348|>
+<|visual token 000349|>
+<|visual token 000350|>
+<|visual token 000351|>
+<|visual token 000352|>
+<|visual token 000353|>
+<|visual token 000354|>
+<|visual token 000355|>
+<|visual token 000356|>
+<|visual token 000357|>
+<|visual token 000358|>
+<|visual token 000359|>
+<|visual token 000360|>
+<|visual token 000361|>
+<|visual token 000362|>
+<|visual token 000363|>
+<|visual token 000364|>
+<|visual token 000365|>
+<|visual token 000366|>
+<|visual token 000367|>
+<|visual token 000368|>
+<|visual token 000369|>
+<|visual token 000370|>
+<|visual token 000371|>
+<|visual token 000372|>
+<|visual token 000373|>
+<|visual token 000374|>
+<|visual token 000375|>
+<|visual token 000376|>
+<|visual token 000377|>
+<|visual token 000378|>
+<|visual token 000379|>
+<|visual token 000380|>
+<|visual token 000381|>
+<|visual token 000382|>
+<|visual token 000383|>
+<|visual token 000384|>
+<|visual token 000385|>
+<|visual token 000386|>
+<|visual token 000387|>
+<|visual token 000388|>
+<|visual token 000389|>
+<|visual token 000390|>
+<|visual token 000391|>
+<|visual token 000392|>
+<|visual token 000393|>
+<|visual token 000394|>
+<|visual token 000395|>
+<|visual token 000396|>
+<|visual token 000397|>
+<|visual token 000398|>
+<|visual token 000399|>
+<|visual token 000400|>
+<|visual token 000401|>
+<|visual token 000402|>
+<|visual token 000403|>
+<|visual token 000404|>
+<|visual token 000405|>
+<|visual token 000406|>
+<|visual token 000407|>
+<|visual token 000408|>
+<|visual token 000409|>
+<|visual token 000410|>
+<|visual token 000411|>
+<|visual token 000412|>
+<|visual token 000413|>
+<|visual token 000414|>
+<|visual token 000415|>
+<|visual token 000416|>
+<|visual token 000417|>
+<|visual token 000418|>
+<|visual token 000419|>
+<|visual token 000420|>
+<|visual token 000421|>
+<|visual token 000422|>
+<|visual token 000423|>
+<|visual token 000424|>
+<|visual token 000425|>
+<|visual token 000426|>
+<|visual token 000427|>
+<|visual token 000428|>
+<|visual token 000429|>
+<|visual token 000430|>
+<|visual token 000431|>
+<|visual token 000432|>
+<|visual token 000433|>
+<|visual token 000434|>
+<|visual token 000435|>
+<|visual token 000436|>
+<|visual token 000437|>
+<|visual token 000438|>
+<|visual token 000439|>
+<|visual token 000440|>
+<|visual token 000441|>
+<|visual token 000442|>
+<|visual token 000443|>
+<|visual token 000444|>
+<|visual token 000445|>
+<|visual token 000446|>
+<|visual token 000447|>
+<|visual token 000448|>
+<|visual token 000449|>
+<|visual token 000450|>
+<|visual token 000451|>
+<|visual token 000452|>
+<|visual token 000453|>
+<|visual token 000454|>
+<|visual token 000455|>
+<|visual token 000456|>
+<|visual token 000457|>
+<|visual token 000458|>
+<|visual token 000459|>
+<|visual token 000460|>
+<|visual token 000461|>
+<|visual token 000462|>
+<|visual token 000463|>
+<|visual token 000464|>
+<|visual token 000465|>
+<|visual token 000466|>
+<|visual token 000467|>
+<|visual token 000468|>
+<|visual token 000469|>
+<|visual token 000470|>
+<|visual token 000471|>
+<|visual token 000472|>
+<|visual token 000473|>
+<|visual token 000474|>
+<|visual token 000475|>
+<|visual token 000476|>
+<|visual token 000477|>
+<|visual token 000478|>
+<|visual token 000479|>
+<|visual token 000480|>
+<|visual token 000481|>
+<|visual token 000482|>
+<|visual token 000483|>
+<|visual token 000484|>
+<|visual token 000485|>
+<|visual token 000486|>
+<|visual token 000487|>
+<|visual token 000488|>
+<|visual token 000489|>
+<|visual token 000490|>
+<|visual token 000491|>
+<|visual token 000492|>
+<|visual token 000493|>
+<|visual token 000494|>
+<|visual token 000495|>
+<|visual token 000496|>
+<|visual token 000497|>
+<|visual token 000498|>
+<|visual token 000499|>
+<|visual token 000500|>
+<|visual token 000501|>
+<|visual token 000502|>
+<|visual token 000503|>
+<|visual token 000504|>
+<|visual token 000505|>
+<|visual token 000506|>
+<|visual token 000507|>
+<|visual token 000508|>
+<|visual token 000509|>
+<|visual token 000510|>
+<|visual token 000511|>
+<|visual token 000512|>
+<|visual token 000513|>
+<|visual token 000514|>
+<|visual token 000515|>
+<|visual token 000516|>
+<|visual token 000517|>
+<|visual token 000518|>
+<|visual token 000519|>
+<|visual token 000520|>
+<|visual token 000521|>
+<|visual token 000522|>
+<|visual token 000523|>
+<|visual token 000524|>
+<|visual token 000525|>
+<|visual token 000526|>
+<|visual token 000527|>
+<|visual token 000528|>
+<|visual token 000529|>
+<|visual token 000530|>
+<|visual token 000531|>
+<|visual token 000532|>
+<|visual token 000533|>
+<|visual token 000534|>
+<|visual token 000535|>
+<|visual token 000536|>
+<|visual token 000537|>
+<|visual token 000538|>
+<|visual token 000539|>
+<|visual token 000540|>
+<|visual token 000541|>
+<|visual token 000542|>
+<|visual token 000543|>
+<|visual token 000544|>
+<|visual token 000545|>
+<|visual token 000546|>
+<|visual token 000547|>
+<|visual token 000548|>
+<|visual token 000549|>
+<|visual token 000550|>
+<|visual token 000551|>
+<|visual token 000552|>
+<|visual token 000553|>
+<|visual token 000554|>
+<|visual token 000555|>
+<|visual token 000556|>
+<|visual token 000557|>
+<|visual token 000558|>
+<|visual token 000559|>
+<|visual token 000560|>
+<|visual token 000561|>
+<|visual token 000562|>
+<|visual token 000563|>
+<|visual token 000564|>
+<|visual token 000565|>
+<|visual token 000566|>
+<|visual token 000567|>
+<|visual token 000568|>
+<|visual token 000569|>
+<|visual token 000570|>
+<|visual token 000571|>
+<|visual token 000572|>
+<|visual token 000573|>
+<|visual token 000574|>
+<|visual token 000575|>
+<|visual token 000576|>
+<|visual token 000577|>
+<|visual token 000578|>
+<|visual token 000579|>
+<|visual token 000580|>
+<|visual token 000581|>
+<|visual token 000582|>
+<|visual token 000583|>
+<|visual token 000584|>
+<|visual token 000585|>
+<|visual token 000586|>
+<|visual token 000587|>
+<|visual token 000588|>
+<|visual token 000589|>
+<|visual token 000590|>
+<|visual token 000591|>
+<|visual token 000592|>
+<|visual token 000593|>
+<|visual token 000594|>
+<|visual token 000595|>
+<|visual token 000596|>
+<|visual token 000597|>
+<|visual token 000598|>
+<|visual token 000599|>
+<|visual token 000600|>
+<|visual token 000601|>
+<|visual token 000602|>
+<|visual token 000603|>
+<|visual token 000604|>
+<|visual token 000605|>
+<|visual token 000606|>
+<|visual token 000607|>
+<|visual token 000608|>
+<|visual token 000609|>
+<|visual token 000610|>
+<|visual token 000611|>
+<|visual token 000612|>
+<|visual token 000613|>
+<|visual token 000614|>
+<|visual token 000615|>
+<|visual token 000616|>
+<|visual token 000617|>
+<|visual token 000618|>
+<|visual token 000619|>
+<|visual token 000620|>
+<|visual token 000621|>
+<|visual token 000622|>
+<|visual token 000623|>
+<|visual token 000624|>
+<|visual token 000625|>
+<|visual token 000626|>
+<|visual token 000627|>
+<|visual token 000628|>
+<|visual token 000629|>
+<|visual token 000630|>
+<|visual token 000631|>
+<|visual token 000632|>
+<|visual token 000633|>
+<|visual token 000634|>
+<|visual token 000635|>
+<|visual token 000636|>
+<|visual token 000637|>
+<|visual token 000638|>
+<|visual token 000639|>
+<|visual token 000640|>
+<|visual token 000641|>
+<|visual token 000642|>
+<|visual token 000643|>
+<|visual token 000644|>
+<|visual token 000645|>
+<|visual token 000646|>
+<|visual token 000647|>
+<|visual token 000648|>
+<|visual token 000649|>
+<|visual token 000650|>
+<|visual token 000651|>
+<|visual token 000652|>
+<|visual token 000653|>
+<|visual token 000654|>
+<|visual token 000655|>
+<|visual token 000656|>
+<|visual token 000657|>
+<|visual token 000658|>
+<|visual token 000659|>
+<|visual token 000660|>
+<|visual token 000661|>
+<|visual token 000662|>
+<|visual token 000663|>
+<|visual token 000664|>
+<|visual token 000665|>
+<|visual token 000666|>
+<|visual token 000667|>
+<|visual token 000668|>
+<|visual token 000669|>
+<|visual token 000670|>
+<|visual token 000671|>
+<|visual token 000672|>
+<|visual token 000673|>
+<|visual token 000674|>
+<|visual token 000675|>
+<|visual token 000676|>
+<|visual token 000677|>
+<|visual token 000678|>
+<|visual token 000679|>
+<|visual token 000680|>
+<|visual token 000681|>
+<|visual token 000682|>
+<|visual token 000683|>
+<|visual token 000684|>
+<|visual token 000685|>
+<|visual token 000686|>
+<|visual token 000687|>
+<|visual token 000688|>
+<|visual token 000689|>
+<|visual token 000690|>
+<|visual token 000691|>
+<|visual token 000692|>
+<|visual token 000693|>
+<|visual token 000694|>
+<|visual token 000695|>
+<|visual token 000696|>
+<|visual token 000697|>
+<|visual token 000698|>
+<|visual token 000699|>
+<|visual token 000700|>
+<|visual token 000701|>
+<|visual token 000702|>
+<|visual token 000703|>
+<|visual token 000704|>
+<|visual token 000705|>
+<|visual token 000706|>
+<|visual token 000707|>
+<|visual token 000708|>
+<|visual token 000709|>
+<|visual token 000710|>
+<|visual token 000711|>
+<|visual token 000712|>
+<|visual token 000713|>
+<|visual token 000714|>
+<|visual token 000715|>
+<|visual token 000716|>
+<|visual token 000717|>
+<|visual token 000718|>
+<|visual token 000719|>
+<|visual token 000720|>
+<|visual token 000721|>
+<|visual token 000722|>
+<|visual token 000723|>
+<|visual token 000724|>
+<|visual token 000725|>
+<|visual token 000726|>
+<|visual token 000727|>
+<|visual token 000728|>
+<|visual token 000729|>
+<|visual token 000730|>
+<|visual token 000731|>
+<|visual token 000732|>
+<|visual token 000733|>
+<|visual token 000734|>
+<|visual token 000735|>
+<|visual token 000736|>
+<|visual token 000737|>
+<|visual token 000738|>
+<|visual token 000739|>
+<|visual token 000740|>
+<|visual token 000741|>
+<|visual token 000742|>
+<|visual token 000743|>
+<|visual token 000744|>
+<|visual token 000745|>
+<|visual token 000746|>
+<|visual token 000747|>
+<|visual token 000748|>
+<|visual token 000749|>
+<|visual token 000750|>
+<|visual token 000751|>
+<|visual token 000752|>
+<|visual token 000753|>
+<|visual token 000754|>
+<|visual token 000755|>
+<|visual token 000756|>
+<|visual token 000757|>
+<|visual token 000758|>
+<|visual token 000759|>
+<|visual token 000760|>
+<|visual token 000761|>
+<|visual token 000762|>
+<|visual token 000763|>
+<|visual token 000764|>
+<|visual token 000765|>
+<|visual token 000766|>
+<|visual token 000767|>
+<|visual token 000768|>
+<|visual token 000769|>
+<|visual token 000770|>
+<|visual token 000771|>
+<|visual token 000772|>
+<|visual token 000773|>
+<|visual token 000774|>
+<|visual token 000775|>
+<|visual token 000776|>
+<|visual token 000777|>
+<|visual token 000778|>
+<|visual token 000779|>
+<|visual token 000780|>
+<|visual token 000781|>
+<|visual token 000782|>
+<|visual token 000783|>
+<|visual token 000784|>
+<|visual token 000785|>
+<|visual token 000786|>
+<|visual token 000787|>
+<|visual token 000788|>
+<|visual token 000789|>
+<|visual token 000790|>
+<|visual token 000791|>
+<|visual token 000792|>
+<|visual token 000793|>
+<|visual token 000794|>
+<|visual token 000795|>
+<|visual token 000796|>
+<|visual token 000797|>
+<|visual token 000798|>
+<|visual token 000799|>
+<|visual token 000800|>
+<|visual token 000801|>
+<|visual token 000802|>
+<|visual token 000803|>
+<|visual token 000804|>
+<|visual token 000805|>
+<|visual token 000806|>
+<|visual token 000807|>
+<|visual token 000808|>
+<|visual token 000809|>
+<|visual token 000810|>
+<|visual token 000811|>
+<|visual token 000812|>
+<|visual token 000813|>
+<|visual token 000814|>
+<|visual token 000815|>
+<|visual token 000816|>
+<|visual token 000817|>
+<|visual token 000818|>
+<|visual token 000819|>
+<|visual token 000820|>
+<|visual token 000821|>
+<|visual token 000822|>
+<|visual token 000823|>
+<|visual token 000824|>
+<|visual token 000825|>
+<|visual token 000826|>
+<|visual token 000827|>
+<|visual token 000828|>
+<|visual token 000829|>
+<|visual token 000830|>
+<|visual token 000831|>
+<|visual token 000832|>
+<|visual token 000833|>
+<|visual token 000834|>
+<|visual token 000835|>
+<|visual token 000836|>
+<|visual token 000837|>
+<|visual token 000838|>
+<|visual token 000839|>
+<|visual token 000840|>
+<|visual token 000841|>
+<|visual token 000842|>
+<|visual token 000843|>
+<|visual token 000844|>
+<|visual token 000845|>
+<|visual token 000846|>
+<|visual token 000847|>
+<|visual token 000848|>
+<|visual token 000849|>
+<|visual token 000850|>
+<|visual token 000851|>
+<|visual token 000852|>
+<|visual token 000853|>
+<|visual token 000854|>
+<|visual token 000855|>
+<|visual token 000856|>
+<|visual token 000857|>
+<|visual token 000858|>
+<|visual token 000859|>
+<|visual token 000860|>
+<|visual token 000861|>
+<|visual token 000862|>
+<|visual token 000863|>
+<|visual token 000864|>
+<|visual token 000865|>
+<|visual token 000866|>
+<|visual token 000867|>
+<|visual token 000868|>
+<|visual token 000869|>
+<|visual token 000870|>
+<|visual token 000871|>
+<|visual token 000872|>
+<|visual token 000873|>
+<|visual token 000874|>
+<|visual token 000875|>
+<|visual token 000876|>
+<|visual token 000877|>
+<|visual token 000878|>
+<|visual token 000879|>
+<|visual token 000880|>
+<|visual token 000881|>
+<|visual token 000882|>
+<|visual token 000883|>
+<|visual token 000884|>
+<|visual token 000885|>
+<|visual token 000886|>
+<|visual token 000887|>
+<|visual token 000888|>
+<|visual token 000889|>
+<|visual token 000890|>
+<|visual token 000891|>
+<|visual token 000892|>
+<|visual token 000893|>
+<|visual token 000894|>
+<|visual token 000895|>
+<|visual token 000896|>
+<|visual token 000897|>
+<|visual token 000898|>
+<|visual token 000899|>
+<|visual token 000900|>
+<|visual token 000901|>
+<|visual token 000902|>
+<|visual token 000903|>
+<|visual token 000904|>
+<|visual token 000905|>
+<|visual token 000906|>
+<|visual token 000907|>
+<|visual token 000908|>
+<|visual token 000909|>
+<|visual token 000910|>
+<|visual token 000911|>
+<|visual token 000912|>
+<|visual token 000913|>
+<|visual token 000914|>
+<|visual token 000915|>
+<|visual token 000916|>
+<|visual token 000917|>
+<|visual token 000918|>
+<|visual token 000919|>
+<|visual token 000920|>
+<|visual token 000921|>
+<|visual token 000922|>
+<|visual token 000923|>
+<|visual token 000924|>
+<|visual token 000925|>
+<|visual token 000926|>
+<|visual token 000927|>
+<|visual token 000928|>
+<|visual token 000929|>
+<|visual token 000930|>
+<|visual token 000931|>
+<|visual token 000932|>
+<|visual token 000933|>
+<|visual token 000934|>
+<|visual token 000935|>
+<|visual token 000936|>
+<|visual token 000937|>
+<|visual token 000938|>
+<|visual token 000939|>
+<|visual token 000940|>
+<|visual token 000941|>
+<|visual token 000942|>
+<|visual token 000943|>
+<|visual token 000944|>
+<|visual token 000945|>
+<|visual token 000946|>
+<|visual token 000947|>
+<|visual token 000948|>
+<|visual token 000949|>
+<|visual token 000950|>
+<|visual token 000951|>
+<|visual token 000952|>
+<|visual token 000953|>
+<|visual token 000954|>
+<|visual token 000955|>
+<|visual token 000956|>
+<|visual token 000957|>
+<|visual token 000958|>
+<|visual token 000959|>
+<|visual token 000960|>
+<|visual token 000961|>
+<|visual token 000962|>
+<|visual token 000963|>
+<|visual token 000964|>
+<|visual token 000965|>
+<|visual token 000966|>
+<|visual token 000967|>
+<|visual token 000968|>
+<|visual token 000969|>
+<|visual token 000970|>
+<|visual token 000971|>
+<|visual token 000972|>
+<|visual token 000973|>
+<|visual token 000974|>
+<|visual token 000975|>
+<|visual token 000976|>
+<|visual token 000977|>
+<|visual token 000978|>
+<|visual token 000979|>
+<|visual token 000980|>
+<|visual token 000981|>
+<|visual token 000982|>
+<|visual token 000983|>
+<|visual token 000984|>
+<|visual token 000985|>
+<|visual token 000986|>
+<|visual token 000987|>
+<|visual token 000988|>
+<|visual token 000989|>
+<|visual token 000990|>
+<|visual token 000991|>
+<|visual token 000992|>
+<|visual token 000993|>
+<|visual token 000994|>
+<|visual token 000995|>
+<|visual token 000996|>
+<|visual token 000997|>
+<|visual token 000998|>
+<|visual token 000999|>
+<|visual token 001000|>
+<|visual token 001001|>
+<|visual token 001002|>
+<|visual token 001003|>
+<|visual token 001004|>
+<|visual token 001005|>
+<|visual token 001006|>
+<|visual token 001007|>
+<|visual token 001008|>
+<|visual token 001009|>
+<|visual token 001010|>
+<|visual token 001011|>
+<|visual token 001012|>
+<|visual token 001013|>
+<|visual token 001014|>
+<|visual token 001015|>
+<|visual token 001016|>
+<|visual token 001017|>
+<|visual token 001018|>
+<|visual token 001019|>
+<|visual token 001020|>
+<|visual token 001021|>
+<|visual token 001022|>
+<|visual token 001023|>
+<|visual token 001024|>
+<|visual token 001025|>
+<|visual token 001026|>
+<|visual token 001027|>
+<|visual token 001028|>
+<|visual token 001029|>
+<|visual token 001030|>
+<|visual token 001031|>
+<|visual token 001032|>
+<|visual token 001033|>
+<|visual token 001034|>
+<|visual token 001035|>
+<|visual token 001036|>
+<|visual token 001037|>
+<|visual token 001038|>
+<|visual token 001039|>
+<|visual token 001040|>
+<|visual token 001041|>
+<|visual token 001042|>
+<|visual token 001043|>
+<|visual token 001044|>
+<|visual token 001045|>
+<|visual token 001046|>
+<|visual token 001047|>
+<|visual token 001048|>
+<|visual token 001049|>
+<|visual token 001050|>
+<|visual token 001051|>
+<|visual token 001052|>
+<|visual token 001053|>
+<|visual token 001054|>
+<|visual token 001055|>
+<|visual token 001056|>
+<|visual token 001057|>
+<|visual token 001058|>
+<|visual token 001059|>
+<|visual token 001060|>
+<|visual token 001061|>
+<|visual token 001062|>
+<|visual token 001063|>
+<|visual token 001064|>
+<|visual token 001065|>
+<|visual token 001066|>
+<|visual token 001067|>
+<|visual token 001068|>
+<|visual token 001069|>
+<|visual token 001070|>
+<|visual token 001071|>
+<|visual token 001072|>
+<|visual token 001073|>
+<|visual token 001074|>
+<|visual token 001075|>
+<|visual token 001076|>
+<|visual token 001077|>
+<|visual token 001078|>
+<|visual token 001079|>
+<|visual token 001080|>
+<|visual token 001081|>
+<|visual token 001082|>
+<|visual token 001083|>
+<|visual token 001084|>
+<|visual token 001085|>
+<|visual token 001086|>
+<|visual token 001087|>
+<|visual token 001088|>
+<|visual token 001089|>
+<|visual token 001090|>
+<|visual token 001091|>
+<|visual token 001092|>
+<|visual token 001093|>
+<|visual token 001094|>
+<|visual token 001095|>
+<|visual token 001096|>
+<|visual token 001097|>
+<|visual token 001098|>
+<|visual token 001099|>
+<|visual token 001100|>
+<|visual token 001101|>
+<|visual token 001102|>
+<|visual token 001103|>
+<|visual token 001104|>
+<|visual token 001105|>
+<|visual token 001106|>
+<|visual token 001107|>
+<|visual token 001108|>
+<|visual token 001109|>
+<|visual token 001110|>
+<|visual token 001111|>
+<|visual token 001112|>
+<|visual token 001113|>
+<|visual token 001114|>
+<|visual token 001115|>
+<|visual token 001116|>
+<|visual token 001117|>
+<|visual token 001118|>
+<|visual token 001119|>
+<|visual token 001120|>
+<|visual token 001121|>
+<|visual token 001122|>
+<|visual token 001123|>
+<|visual token 001124|>
+<|visual token 001125|>
+<|visual token 001126|>
+<|visual token 001127|>
+<|visual token 001128|>
+<|visual token 001129|>
+<|visual token 001130|>
+<|visual token 001131|>
+<|visual token 001132|>
+<|visual token 001133|>
+<|visual token 001134|>
+<|visual token 001135|>
+<|visual token 001136|>
+<|visual token 001137|>
+<|visual token 001138|>
+<|visual token 001139|>
+<|visual token 001140|>
+<|visual token 001141|>
+<|visual token 001142|>
+<|visual token 001143|>
+<|visual token 001144|>
+<|visual token 001145|>
+<|visual token 001146|>
+<|visual token 001147|>
+<|visual token 001148|>
+<|visual token 001149|>
+<|visual token 001150|>
+<|visual token 001151|>
+<|visual token 001152|>
+<|visual token 001153|>
+<|visual token 001154|>
+<|visual token 001155|>
+<|visual token 001156|>
+<|visual token 001157|>
+<|visual token 001158|>
+<|visual token 001159|>
+<|visual token 001160|>
+<|visual token 001161|>
+<|visual token 001162|>
+<|visual token 001163|>
+<|visual token 001164|>
+<|visual token 001165|>
+<|visual token 001166|>
+<|visual token 001167|>
+<|visual token 001168|>
+<|visual token 001169|>
+<|visual token 001170|>
+<|visual token 001171|>
+<|visual token 001172|>
+<|visual token 001173|>
+<|visual token 001174|>
+<|visual token 001175|>
+<|visual token 001176|>
+<|visual token 001177|>
+<|visual token 001178|>
+<|visual token 001179|>
+<|visual token 001180|>
+<|visual token 001181|>
+<|visual token 001182|>
+<|visual token 001183|>
+<|visual token 001184|>
+<|visual token 001185|>
+<|visual token 001186|>
+<|visual token 001187|>
+<|visual token 001188|>
+<|visual token 001189|>
+<|visual token 001190|>
+<|visual token 001191|>
+<|visual token 001192|>
+<|visual token 001193|>
+<|visual token 001194|>
+<|visual token 001195|>
+<|visual token 001196|>
+<|visual token 001197|>
+<|visual token 001198|>
+<|visual token 001199|>
+<|visual token 001200|>
+<|visual token 001201|>
+<|visual token 001202|>
+<|visual token 001203|>
+<|visual token 001204|>
+<|visual token 001205|>
+<|visual token 001206|>
+<|visual token 001207|>
+<|visual token 001208|>
+<|visual token 001209|>
+<|visual token 001210|>
+<|visual token 001211|>
+<|visual token 001212|>
+<|visual token 001213|>
+<|visual token 001214|>
+<|visual token 001215|>
+<|visual token 001216|>
+<|visual token 001217|>
+<|visual token 001218|>
+<|visual token 001219|>
+<|visual token 001220|>
+<|visual token 001221|>
+<|visual token 001222|>
+<|visual token 001223|>
+<|visual token 001224|>
+<|visual token 001225|>
+<|visual token 001226|>
+<|visual token 001227|>
+<|visual token 001228|>
+<|visual token 001229|>
+<|visual token 001230|>
+<|visual token 001231|>
+<|visual token 001232|>
+<|visual token 001233|>
+<|visual token 001234|>
+<|visual token 001235|>
+<|visual token 001236|>
+<|visual token 001237|>
+<|visual token 001238|>
+<|visual token 001239|>
+<|visual token 001240|>
+<|visual token 001241|>
+<|visual token 001242|>
+<|visual token 001243|>
+<|visual token 001244|>
+<|visual token 001245|>
+<|visual token 001246|>
+<|visual token 001247|>
+<|visual token 001248|>
+<|visual token 001249|>
+<|visual token 001250|>
+<|visual token 001251|>
+<|visual token 001252|>
+<|visual token 001253|>
+<|visual token 001254|>
+<|visual token 001255|>
+<|visual token 001256|>
+<|visual token 001257|>
+<|visual token 001258|>
+<|visual token 001259|>
+<|visual token 001260|>
+<|visual token 001261|>
+<|visual token 001262|>
+<|visual token 001263|>
+<|visual token 001264|>
+<|visual token 001265|>
+<|visual token 001266|>
+<|visual token 001267|>
+<|visual token 001268|>
+<|visual token 001269|>
+<|visual token 001270|>
+<|visual token 001271|>
+<|visual token 001272|>
+<|visual token 001273|>
+<|visual token 001274|>
+<|visual token 001275|>
+<|visual token 001276|>
+<|visual token 001277|>
+<|visual token 001278|>
+<|visual token 001279|>
+<|visual token 001280|>
+<|visual token 001281|>
+<|visual token 001282|>
+<|visual token 001283|>
+<|visual token 001284|>
+<|visual token 001285|>
+<|visual token 001286|>
+<|visual token 001287|>
+<|visual token 001288|>
+<|visual token 001289|>
+<|visual token 001290|>
+<|visual token 001291|>
+<|visual token 001292|>
+<|visual token 001293|>
+<|visual token 001294|>
+<|visual token 001295|>
+<|visual token 001296|>
+<|visual token 001297|>
+<|visual token 001298|>
+<|visual token 001299|>
+<|visual token 001300|>
+<|visual token 001301|>
+<|visual token 001302|>
+<|visual token 001303|>
+<|visual token 001304|>
+<|visual token 001305|>
+<|visual token 001306|>
+<|visual token 001307|>
+<|visual token 001308|>
+<|visual token 001309|>
+<|visual token 001310|>
+<|visual token 001311|>
+<|visual token 001312|>
+<|visual token 001313|>
+<|visual token 001314|>
+<|visual token 001315|>
+<|visual token 001316|>
+<|visual token 001317|>
+<|visual token 001318|>
+<|visual token 001319|>
+<|visual token 001320|>
+<|visual token 001321|>
+<|visual token 001322|>
+<|visual token 001323|>
+<|visual token 001324|>
+<|visual token 001325|>
+<|visual token 001326|>
+<|visual token 001327|>
+<|visual token 001328|>
+<|visual token 001329|>
+<|visual token 001330|>
+<|visual token 001331|>
+<|visual token 001332|>
+<|visual token 001333|>
+<|visual token 001334|>
+<|visual token 001335|>
+<|visual token 001336|>
+<|visual token 001337|>
+<|visual token 001338|>
+<|visual token 001339|>
+<|visual token 001340|>
+<|visual token 001341|>
+<|visual token 001342|>
+<|visual token 001343|>
+<|visual token 001344|>
+<|visual token 001345|>
+<|visual token 001346|>
+<|visual token 001347|>
+<|visual token 001348|>
+<|visual token 001349|>
+<|visual token 001350|>
+<|visual token 001351|>
+<|visual token 001352|>
+<|visual token 001353|>
+<|visual token 001354|>
+<|visual token 001355|>
+<|visual token 001356|>
+<|visual token 001357|>
+<|visual token 001358|>
+<|visual token 001359|>
+<|visual token 001360|>
+<|visual token 001361|>
+<|visual token 001362|>
+<|visual token 001363|>
+<|visual token 001364|>
+<|visual token 001365|>
+<|visual token 001366|>
+<|visual token 001367|>
+<|visual token 001368|>
+<|visual token 001369|>
+<|visual token 001370|>
+<|visual token 001371|>
+<|visual token 001372|>
+<|visual token 001373|>
+<|visual token 001374|>
+<|visual token 001375|>
+<|visual token 001376|>
+<|visual token 001377|>
+<|visual token 001378|>
+<|visual token 001379|>
+<|visual token 001380|>
+<|visual token 001381|>
+<|visual token 001382|>
+<|visual token 001383|>
+<|visual token 001384|>
+<|visual token 001385|>
+<|visual token 001386|>
+<|visual token 001387|>
+<|visual token 001388|>
+<|visual token 001389|>
+<|visual token 001390|>
+<|visual token 001391|>
+<|visual token 001392|>
+<|visual token 001393|>
+<|visual token 001394|>
+<|visual token 001395|>
+<|visual token 001396|>
+<|visual token 001397|>
+<|visual token 001398|>
+<|visual token 001399|>
+<|visual token 001400|>
+<|visual token 001401|>
+<|visual token 001402|>
+<|visual token 001403|>
+<|visual token 001404|>
+<|visual token 001405|>
+<|visual token 001406|>
+<|visual token 001407|>
+<|visual token 001408|>
+<|visual token 001409|>
+<|visual token 001410|>
+<|visual token 001411|>
+<|visual token 001412|>
+<|visual token 001413|>
+<|visual token 001414|>
+<|visual token 001415|>
+<|visual token 001416|>
+<|visual token 001417|>
+<|visual token 001418|>
+<|visual token 001419|>
+<|visual token 001420|>
+<|visual token 001421|>
+<|visual token 001422|>
+<|visual token 001423|>
+<|visual token 001424|>
+<|visual token 001425|>
+<|visual token 001426|>
+<|visual token 001427|>
+<|visual token 001428|>
+<|visual token 001429|>
+<|visual token 001430|>
+<|visual token 001431|>
+<|visual token 001432|>
+<|visual token 001433|>
+<|visual token 001434|>
+<|visual token 001435|>
+<|visual token 001436|>
+<|visual token 001437|>
+<|visual token 001438|>
+<|visual token 001439|>
+<|visual token 001440|>
+<|visual token 001441|>
+<|visual token 001442|>
+<|visual token 001443|>
+<|visual token 001444|>
+<|visual token 001445|>
+<|visual token 001446|>
+<|visual token 001447|>
+<|visual token 001448|>
+<|visual token 001449|>
+<|visual token 001450|>
+<|visual token 001451|>
+<|visual token 001452|>
+<|visual token 001453|>
+<|visual token 001454|>
+<|visual token 001455|>
+<|visual token 001456|>
+<|visual token 001457|>
+<|visual token 001458|>
+<|visual token 001459|>
+<|visual token 001460|>
+<|visual token 001461|>
+<|visual token 001462|>
+<|visual token 001463|>
+<|visual token 001464|>
+<|visual token 001465|>
+<|visual token 001466|>
+<|visual token 001467|>
+<|visual token 001468|>
+<|visual token 001469|>
+<|visual token 001470|>
+<|visual token 001471|>
+<|visual token 001472|>
+<|visual token 001473|>
+<|visual token 001474|>
+<|visual token 001475|>
+<|visual token 001476|>
+<|visual token 001477|>
+<|visual token 001478|>
+<|visual token 001479|>
+<|visual token 001480|>
+<|visual token 001481|>
+<|visual token 001482|>
+<|visual token 001483|>
+<|visual token 001484|>
+<|visual token 001485|>
+<|visual token 001486|>
+<|visual token 001487|>
+<|visual token 001488|>
+<|visual token 001489|>
+<|visual token 001490|>
+<|visual token 001491|>
+<|visual token 001492|>
+<|visual token 001493|>
+<|visual token 001494|>
+<|visual token 001495|>
+<|visual token 001496|>
+<|visual token 001497|>
+<|visual token 001498|>
+<|visual token 001499|>
+<|visual token 001500|>
+<|visual token 001501|>
+<|visual token 001502|>
+<|visual token 001503|>
+<|visual token 001504|>
+<|visual token 001505|>
+<|visual token 001506|>
+<|visual token 001507|>
+<|visual token 001508|>
+<|visual token 001509|>
+<|visual token 001510|>
+<|visual token 001511|>
+<|visual token 001512|>
+<|visual token 001513|>
+<|visual token 001514|>
+<|visual token 001515|>
+<|visual token 001516|>
+<|visual token 001517|>
+<|visual token 001518|>
+<|visual token 001519|>
+<|visual token 001520|>
+<|visual token 001521|>
+<|visual token 001522|>
+<|visual token 001523|>
+<|visual token 001524|>
+<|visual token 001525|>
+<|visual token 001526|>
+<|visual token 001527|>
+<|visual token 001528|>
+<|visual token 001529|>
+<|visual token 001530|>
+<|visual token 001531|>
+<|visual token 001532|>
+<|visual token 001533|>
+<|visual token 001534|>
+<|visual token 001535|>
+<|visual token 001536|>
+<|visual token 001537|>
+<|visual token 001538|>
+<|visual token 001539|>
+<|visual token 001540|>
+<|visual token 001541|>
+<|visual token 001542|>
+<|visual token 001543|>
+<|visual token 001544|>
+<|visual token 001545|>
+<|visual token 001546|>
+<|visual token 001547|>
+<|visual token 001548|>
+<|visual token 001549|>
+<|visual token 001550|>
+<|visual token 001551|>
+<|visual token 001552|>
+<|visual token 001553|>
+<|visual token 001554|>
+<|visual token 001555|>
+<|visual token 001556|>
+<|visual token 001557|>
+<|visual token 001558|>
+<|visual token 001559|>
+<|visual token 001560|>
+<|visual token 001561|>
+<|visual token 001562|>
+<|visual token 001563|>
+<|visual token 001564|>
+<|visual token 001565|>
+<|visual token 001566|>
+<|visual token 001567|>
+<|visual token 001568|>
+<|visual token 001569|>
+<|visual token 001570|>
+<|visual token 001571|>
+<|visual token 001572|>
+<|visual token 001573|>
+<|visual token 001574|>
+<|visual token 001575|>
+<|visual token 001576|>
+<|visual token 001577|>
+<|visual token 001578|>
+<|visual token 001579|>
+<|visual token 001580|>
+<|visual token 001581|>
+<|visual token 001582|>
+<|visual token 001583|>
+<|visual token 001584|>
+<|visual token 001585|>
+<|visual token 001586|>
+<|visual token 001587|>
+<|visual token 001588|>
+<|visual token 001589|>
+<|visual token 001590|>
+<|visual token 001591|>
+<|visual token 001592|>
+<|visual token 001593|>
+<|visual token 001594|>
+<|visual token 001595|>
+<|visual token 001596|>
+<|visual token 001597|>
+<|visual token 001598|>
+<|visual token 001599|>
+<|visual token 001600|>
+<|visual token 001601|>
+<|visual token 001602|>
+<|visual token 001603|>
+<|visual token 001604|>
+<|visual token 001605|>
+<|visual token 001606|>
+<|visual token 001607|>
+<|visual token 001608|>
+<|visual token 001609|>
+<|visual token 001610|>
+<|visual token 001611|>
+<|visual token 001612|>
+<|visual token 001613|>
+<|visual token 001614|>
+<|visual token 001615|>
+<|visual token 001616|>
+<|visual token 001617|>
+<|visual token 001618|>
+<|visual token 001619|>
+<|visual token 001620|>
+<|visual token 001621|>
+<|visual token 001622|>
+<|visual token 001623|>
+<|visual token 001624|>
+<|visual token 001625|>
+<|visual token 001626|>
+<|visual token 001627|>
+<|visual token 001628|>
+<|visual token 001629|>
+<|visual token 001630|>
+<|visual token 001631|>
+<|visual token 001632|>
+<|visual token 001633|>
+<|visual token 001634|>
+<|visual token 001635|>
+<|visual token 001636|>
+<|visual token 001637|>
+<|visual token 001638|>
+<|visual token 001639|>
+<|visual token 001640|>
+<|visual token 001641|>
+<|visual token 001642|>
+<|visual token 001643|>
+<|visual token 001644|>
+<|visual token 001645|>
+<|visual token 001646|>
+<|visual token 001647|>
+<|visual token 001648|>
+<|visual token 001649|>
+<|visual token 001650|>
+<|visual token 001651|>
+<|visual token 001652|>
+<|visual token 001653|>
+<|visual token 001654|>
+<|visual token 001655|>
+<|visual token 001656|>
+<|visual token 001657|>
+<|visual token 001658|>
+<|visual token 001659|>
+<|visual token 001660|>
+<|visual token 001661|>
+<|visual token 001662|>
+<|visual token 001663|>
+<|visual token 001664|>
+<|visual token 001665|>
+<|visual token 001666|>
+<|visual token 001667|>
+<|visual token 001668|>
+<|visual token 001669|>
+<|visual token 001670|>
+<|visual token 001671|>
+<|visual token 001672|>
+<|visual token 001673|>
+<|visual token 001674|>
+<|visual token 001675|>
+<|visual token 001676|>
+<|visual token 001677|>
+<|visual token 001678|>
+<|visual token 001679|>
+<|visual token 001680|>
+<|visual token 001681|>
+<|visual token 001682|>
+<|visual token 001683|>
+<|visual token 001684|>
+<|visual token 001685|>
+<|visual token 001686|>
+<|visual token 001687|>
+<|visual token 001688|>
+<|visual token 001689|>
+<|visual token 001690|>
+<|visual token 001691|>
+<|visual token 001692|>
+<|visual token 001693|>
+<|visual token 001694|>
+<|visual token 001695|>
+<|visual token 001696|>
+<|visual token 001697|>
+<|visual token 001698|>
+<|visual token 001699|>
+<|visual token 001700|>
+<|visual token 001701|>
+<|visual token 001702|>
+<|visual token 001703|>
+<|visual token 001704|>
+<|visual token 001705|>
+<|visual token 001706|>
+<|visual token 001707|>
+<|visual token 001708|>
+<|visual token 001709|>
+<|visual token 001710|>
+<|visual token 001711|>
+<|visual token 001712|>
+<|visual token 001713|>
+<|visual token 001714|>
+<|visual token 001715|>
+<|visual token 001716|>
+<|visual token 001717|>
+<|visual token 001718|>
+<|visual token 001719|>
+<|visual token 001720|>
+<|visual token 001721|>
+<|visual token 001722|>
+<|visual token 001723|>
+<|visual token 001724|>
+<|visual token 001725|>
+<|visual token 001726|>
+<|visual token 001727|>
+<|visual token 001728|>
+<|visual token 001729|>
+<|visual token 001730|>
+<|visual token 001731|>
+<|visual token 001732|>
+<|visual token 001733|>
+<|visual token 001734|>
+<|visual token 001735|>
+<|visual token 001736|>
+<|visual token 001737|>
+<|visual token 001738|>
+<|visual token 001739|>
+<|visual token 001740|>
+<|visual token 001741|>
+<|visual token 001742|>
+<|visual token 001743|>
+<|visual token 001744|>
+<|visual token 001745|>
+<|visual token 001746|>
+<|visual token 001747|>
+<|visual token 001748|>
+<|visual token 001749|>
+<|visual token 001750|>
+<|visual token 001751|>
+<|visual token 001752|>
+<|visual token 001753|>
+<|visual token 001754|>
+<|visual token 001755|>
+<|visual token 001756|>
+<|visual token 001757|>
+<|visual token 001758|>
+<|visual token 001759|>
+<|visual token 001760|>
+<|visual token 001761|>
+<|visual token 001762|>
+<|visual token 001763|>
+<|visual token 001764|>
+<|visual token 001765|>
+<|visual token 001766|>
+<|visual token 001767|>
+<|visual token 001768|>
+<|visual token 001769|>
+<|visual token 001770|>
+<|visual token 001771|>
+<|visual token 001772|>
+<|visual token 001773|>
+<|visual token 001774|>
+<|visual token 001775|>
+<|visual token 001776|>
+<|visual token 001777|>
+<|visual token 001778|>
+<|visual token 001779|>
+<|visual token 001780|>
+<|visual token 001781|>
+<|visual token 001782|>
+<|visual token 001783|>
+<|visual token 001784|>
+<|visual token 001785|>
+<|visual token 001786|>
+<|visual token 001787|>
+<|visual token 001788|>
+<|visual token 001789|>
+<|visual token 001790|>
+<|visual token 001791|>
+<|visual token 001792|>
+<|visual token 001793|>
+<|visual token 001794|>
+<|visual token 001795|>
+<|visual token 001796|>
+<|visual token 001797|>
+<|visual token 001798|>
+<|visual token 001799|>
+<|visual token 001800|>
+<|visual token 001801|>
+<|visual token 001802|>
+<|visual token 001803|>
+<|visual token 001804|>
+<|visual token 001805|>
+<|visual token 001806|>
+<|visual token 001807|>
+<|visual token 001808|>
+<|visual token 001809|>
+<|visual token 001810|>
+<|visual token 001811|>
+<|visual token 001812|>
+<|visual token 001813|>
+<|visual token 001814|>
+<|visual token 001815|>
+<|visual token 001816|>
+<|visual token 001817|>
+<|visual token 001818|>
+<|visual token 001819|>
+<|visual token 001820|>
+<|visual token 001821|>
+<|visual token 001822|>
+<|visual token 001823|>
+<|visual token 001824|>
+<|visual token 001825|>
+<|visual token 001826|>
+<|visual token 001827|>
+<|visual token 001828|>
+<|visual token 001829|>
+<|visual token 001830|>
+<|visual token 001831|>
+<|visual token 001832|>
+<|visual token 001833|>
+<|visual token 001834|>
+<|visual token 001835|>
+<|visual token 001836|>
+<|visual token 001837|>
+<|visual token 001838|>
+<|visual token 001839|>
+<|visual token 001840|>
+<|visual token 001841|>
+<|visual token 001842|>
+<|visual token 001843|>
+<|visual token 001844|>
+<|visual token 001845|>
+<|visual token 001846|>
+<|visual token 001847|>
+<|visual token 001848|>
+<|visual token 001849|>
+<|visual token 001850|>
+<|visual token 001851|>
+<|visual token 001852|>
+<|visual token 001853|>
+<|visual token 001854|>
+<|visual token 001855|>
+<|visual token 001856|>
+<|visual token 001857|>
+<|visual token 001858|>
+<|visual token 001859|>
+<|visual token 001860|>
+<|visual token 001861|>
+<|visual token 001862|>
+<|visual token 001863|>
+<|visual token 001864|>
+<|visual token 001865|>
+<|visual token 001866|>
+<|visual token 001867|>
+<|visual token 001868|>
+<|visual token 001869|>
+<|visual token 001870|>
+<|visual token 001871|>
+<|visual token 001872|>
+<|visual token 001873|>
+<|visual token 001874|>
+<|visual token 001875|>
+<|visual token 001876|>
+<|visual token 001877|>
+<|visual token 001878|>
+<|visual token 001879|>
+<|visual token 001880|>
+<|visual token 001881|>
+<|visual token 001882|>
+<|visual token 001883|>
+<|visual token 001884|>
+<|visual token 001885|>
+<|visual token 001886|>
+<|visual token 001887|>
+<|visual token 001888|>
+<|visual token 001889|>
+<|visual token 001890|>
+<|visual token 001891|>
+<|visual token 001892|>
+<|visual token 001893|>
+<|visual token 001894|>
+<|visual token 001895|>
+<|visual token 001896|>
+<|visual token 001897|>
+<|visual token 001898|>
+<|visual token 001899|>
+<|visual token 001900|>
+<|visual token 001901|>
+<|visual token 001902|>
+<|visual token 001903|>
+<|visual token 001904|>
+<|visual token 001905|>
+<|visual token 001906|>
+<|visual token 001907|>
+<|visual token 001908|>
+<|visual token 001909|>
+<|visual token 001910|>
+<|visual token 001911|>
+<|visual token 001912|>
+<|visual token 001913|>
+<|visual token 001914|>
+<|visual token 001915|>
+<|visual token 001916|>
+<|visual token 001917|>
+<|visual token 001918|>
+<|visual token 001919|>
+<|visual token 001920|>
+<|visual token 001921|>
+<|visual token 001922|>
+<|visual token 001923|>
+<|visual token 001924|>
+<|visual token 001925|>
+<|visual token 001926|>
+<|visual token 001927|>
+<|visual token 001928|>
+<|visual token 001929|>
+<|visual token 001930|>
+<|visual token 001931|>
+<|visual token 001932|>
+<|visual token 001933|>
+<|visual token 001934|>
+<|visual token 001935|>
+<|visual token 001936|>
+<|visual token 001937|>
+<|visual token 001938|>
+<|visual token 001939|>
+<|visual token 001940|>
+<|visual token 001941|>
+<|visual token 001942|>
+<|visual token 001943|>
+<|visual token 001944|>
+<|visual token 001945|>
+<|visual token 001946|>
+<|visual token 001947|>
+<|visual token 001948|>
+<|visual token 001949|>
+<|visual token 001950|>
+<|visual token 001951|>
+<|visual token 001952|>
+<|visual token 001953|>
+<|visual token 001954|>
+<|visual token 001955|>
+<|visual token 001956|>
+<|visual token 001957|>
+<|visual token 001958|>
+<|visual token 001959|>
+<|visual token 001960|>
+<|visual token 001961|>
+<|visual token 001962|>
+<|visual token 001963|>
+<|visual token 001964|>
+<|visual token 001965|>
+<|visual token 001966|>
+<|visual token 001967|>
+<|visual token 001968|>
+<|visual token 001969|>
+<|visual token 001970|>
+<|visual token 001971|>
+<|visual token 001972|>
+<|visual token 001973|>
+<|visual token 001974|>
+<|visual token 001975|>
+<|visual token 001976|>
+<|visual token 001977|>
+<|visual token 001978|>
+<|visual token 001979|>
+<|visual token 001980|>
+<|visual token 001981|>
+<|visual token 001982|>
+<|visual token 001983|>
+<|visual token 001984|>
+<|visual token 001985|>
+<|visual token 001986|>
+<|visual token 001987|>
+<|visual token 001988|>
+<|visual token 001989|>
+<|visual token 001990|>
+<|visual token 001991|>
+<|visual token 001992|>
+<|visual token 001993|>
+<|visual token 001994|>
+<|visual token 001995|>
+<|visual token 001996|>
+<|visual token 001997|>
+<|visual token 001998|>
+<|visual token 001999|>
+<|visual token 002000|>
+<|visual token 002001|>
+<|visual token 002002|>
+<|visual token 002003|>
+<|visual token 002004|>
+<|visual token 002005|>
+<|visual token 002006|>
+<|visual token 002007|>
+<|visual token 002008|>
+<|visual token 002009|>
+<|visual token 002010|>
+<|visual token 002011|>
+<|visual token 002012|>
+<|visual token 002013|>
+<|visual token 002014|>
+<|visual token 002015|>
+<|visual token 002016|>
+<|visual token 002017|>
+<|visual token 002018|>
+<|visual token 002019|>
+<|visual token 002020|>
+<|visual token 002021|>
+<|visual token 002022|>
+<|visual token 002023|>
+<|visual token 002024|>
+<|visual token 002025|>
+<|visual token 002026|>
+<|visual token 002027|>
+<|visual token 002028|>
+<|visual token 002029|>
+<|visual token 002030|>
+<|visual token 002031|>
+<|visual token 002032|>
+<|visual token 002033|>
+<|visual token 002034|>
+<|visual token 002035|>
+<|visual token 002036|>
+<|visual token 002037|>
+<|visual token 002038|>
+<|visual token 002039|>
+<|visual token 002040|>
+<|visual token 002041|>
+<|visual token 002042|>
+<|visual token 002043|>
+<|visual token 002044|>
+<|visual token 002045|>
+<|visual token 002046|>
+<|visual token 002047|>
+<|visual token 002048|>
+<|visual token 002049|>
+<|visual token 002050|>
+<|visual token 002051|>
+<|visual token 002052|>
+<|visual token 002053|>
+<|visual token 002054|>
+<|visual token 002055|>
+<|visual token 002056|>
+<|visual token 002057|>
+<|visual token 002058|>
+<|visual token 002059|>
+<|visual token 002060|>
+<|visual token 002061|>
+<|visual token 002062|>
+<|visual token 002063|>
+<|visual token 002064|>
+<|visual token 002065|>
+<|visual token 002066|>
+<|visual token 002067|>
+<|visual token 002068|>
+<|visual token 002069|>
+<|visual token 002070|>
+<|visual token 002071|>
+<|visual token 002072|>
+<|visual token 002073|>
+<|visual token 002074|>
+<|visual token 002075|>
+<|visual token 002076|>
+<|visual token 002077|>
+<|visual token 002078|>
+<|visual token 002079|>
+<|visual token 002080|>
+<|visual token 002081|>
+<|visual token 002082|>
+<|visual token 002083|>
+<|visual token 002084|>
+<|visual token 002085|>
+<|visual token 002086|>
+<|visual token 002087|>
+<|visual token 002088|>
+<|visual token 002089|>
+<|visual token 002090|>
+<|visual token 002091|>
+<|visual token 002092|>
+<|visual token 002093|>
+<|visual token 002094|>
+<|visual token 002095|>
+<|visual token 002096|>
+<|visual token 002097|>
+<|visual token 002098|>
+<|visual token 002099|>
+<|visual token 002100|>
+<|visual token 002101|>
+<|visual token 002102|>
+<|visual token 002103|>
+<|visual token 002104|>
+<|visual token 002105|>
+<|visual token 002106|>
+<|visual token 002107|>
+<|visual token 002108|>
+<|visual token 002109|>
+<|visual token 002110|>
+<|visual token 002111|>
+<|visual token 002112|>
+<|visual token 002113|>
+<|visual token 002114|>
+<|visual token 002115|>
+<|visual token 002116|>
+<|visual token 002117|>
+<|visual token 002118|>
+<|visual token 002119|>
+<|visual token 002120|>
+<|visual token 002121|>
+<|visual token 002122|>
+<|visual token 002123|>
+<|visual token 002124|>
+<|visual token 002125|>
+<|visual token 002126|>
+<|visual token 002127|>
+<|visual token 002128|>
+<|visual token 002129|>
+<|visual token 002130|>
+<|visual token 002131|>
+<|visual token 002132|>
+<|visual token 002133|>
+<|visual token 002134|>
+<|visual token 002135|>
+<|visual token 002136|>
+<|visual token 002137|>
+<|visual token 002138|>
+<|visual token 002139|>
+<|visual token 002140|>
+<|visual token 002141|>
+<|visual token 002142|>
+<|visual token 002143|>
+<|visual token 002144|>
+<|visual token 002145|>
+<|visual token 002146|>
+<|visual token 002147|>
+<|visual token 002148|>
+<|visual token 002149|>
+<|visual token 002150|>
+<|visual token 002151|>
+<|visual token 002152|>
+<|visual token 002153|>
+<|visual token 002154|>
+<|visual token 002155|>
+<|visual token 002156|>
+<|visual token 002157|>
+<|visual token 002158|>
+<|visual token 002159|>
+<|visual token 002160|>
+<|visual token 002161|>
+<|visual token 002162|>
+<|visual token 002163|>
+<|visual token 002164|>
+<|visual token 002165|>
+<|visual token 002166|>
+<|visual token 002167|>
+<|visual token 002168|>
+<|visual token 002169|>
+<|visual token 002170|>
+<|visual token 002171|>
+<|visual token 002172|>
+<|visual token 002173|>
+<|visual token 002174|>
+<|visual token 002175|>
+<|visual token 002176|>
+<|visual token 002177|>
+<|visual token 002178|>
+<|visual token 002179|>
+<|visual token 002180|>
+<|visual token 002181|>
+<|visual token 002182|>
+<|visual token 002183|>
+<|visual token 002184|>
+<|visual token 002185|>
+<|visual token 002186|>
+<|visual token 002187|>
+<|visual token 002188|>
+<|visual token 002189|>
+<|visual token 002190|>
+<|visual token 002191|>
+<|visual token 002192|>
+<|visual token 002193|>
+<|visual token 002194|>
+<|visual token 002195|>
+<|visual token 002196|>
+<|visual token 002197|>
+<|visual token 002198|>
+<|visual token 002199|>
+<|visual token 002200|>
+<|visual token 002201|>
+<|visual token 002202|>
+<|visual token 002203|>
+<|visual token 002204|>
+<|visual token 002205|>
+<|visual token 002206|>
+<|visual token 002207|>
+<|visual token 002208|>
+<|visual token 002209|>
+<|visual token 002210|>
+<|visual token 002211|>
+<|visual token 002212|>
+<|visual token 002213|>
+<|visual token 002214|>
+<|visual token 002215|>
+<|visual token 002216|>
+<|visual token 002217|>
+<|visual token 002218|>
+<|visual token 002219|>
+<|visual token 002220|>
+<|visual token 002221|>
+<|visual token 002222|>
+<|visual token 002223|>
+<|visual token 002224|>
+<|visual token 002225|>
+<|visual token 002226|>
+<|visual token 002227|>
+<|visual token 002228|>
+<|visual token 002229|>
+<|visual token 002230|>
+<|visual token 002231|>
+<|visual token 002232|>
+<|visual token 002233|>
+<|visual token 002234|>
+<|visual token 002235|>
+<|visual token 002236|>
+<|visual token 002237|>
+<|visual token 002238|>
+<|visual token 002239|>
+<|visual token 002240|>
+<|visual token 002241|>
+<|visual token 002242|>
+<|visual token 002243|>
+<|visual token 002244|>
+<|visual token 002245|>
+<|visual token 002246|>
+<|visual token 002247|>
+<|visual token 002248|>
+<|visual token 002249|>
+<|visual token 002250|>
+<|visual token 002251|>
+<|visual token 002252|>
+<|visual token 002253|>
+<|visual token 002254|>
+<|visual token 002255|>
+<|visual token 002256|>
+<|visual token 002257|>
+<|visual token 002258|>
+<|visual token 002259|>
+<|visual token 002260|>
+<|visual token 002261|>
+<|visual token 002262|>
+<|visual token 002263|>
+<|visual token 002264|>
+<|visual token 002265|>
+<|visual token 002266|>
+<|visual token 002267|>
+<|visual token 002268|>
+<|visual token 002269|>
+<|visual token 002270|>
+<|visual token 002271|>
+<|visual token 002272|>
+<|visual token 002273|>
+<|visual token 002274|>
+<|visual token 002275|>
+<|visual token 002276|>
+<|visual token 002277|>
+<|visual token 002278|>
+<|visual token 002279|>
+<|visual token 002280|>
+<|visual token 002281|>
+<|visual token 002282|>
+<|visual token 002283|>
+<|visual token 002284|>
+<|visual token 002285|>
+<|visual token 002286|>
+<|visual token 002287|>
+<|visual token 002288|>
+<|visual token 002289|>
+<|visual token 002290|>
+<|visual token 002291|>
+<|visual token 002292|>
+<|visual token 002293|>
+<|visual token 002294|>
+<|visual token 002295|>
+<|visual token 002296|>
+<|visual token 002297|>
+<|visual token 002298|>
+<|visual token 002299|>
+<|visual token 002300|>
+<|visual token 002301|>
+<|visual token 002302|>
+<|visual token 002303|>
+<|visual token 002304|>
+<|visual token 002305|>
+<|visual token 002306|>
+<|visual token 002307|>
+<|visual token 002308|>
+<|visual token 002309|>
+<|visual token 002310|>
+<|visual token 002311|>
+<|visual token 002312|>
+<|visual token 002313|>
+<|visual token 002314|>
+<|visual token 002315|>
+<|visual token 002316|>
+<|visual token 002317|>
+<|visual token 002318|>
+<|visual token 002319|>
+<|visual token 002320|>
+<|visual token 002321|>
+<|visual token 002322|>
+<|visual token 002323|>
+<|visual token 002324|>
+<|visual token 002325|>
+<|visual token 002326|>
+<|visual token 002327|>
+<|visual token 002328|>
+<|visual token 002329|>
+<|visual token 002330|>
+<|visual token 002331|>
+<|visual token 002332|>
+<|visual token 002333|>
+<|visual token 002334|>
+<|visual token 002335|>
+<|visual token 002336|>
+<|visual token 002337|>
+<|visual token 002338|>
+<|visual token 002339|>
+<|visual token 002340|>
+<|visual token 002341|>
+<|visual token 002342|>
+<|visual token 002343|>
+<|visual token 002344|>
+<|visual token 002345|>
+<|visual token 002346|>
+<|visual token 002347|>
+<|visual token 002348|>
+<|visual token 002349|>
+<|visual token 002350|>
+<|visual token 002351|>
+<|visual token 002352|>
+<|visual token 002353|>
+<|visual token 002354|>
+<|visual token 002355|>
+<|visual token 002356|>
+<|visual token 002357|>
+<|visual token 002358|>
+<|visual token 002359|>
+<|visual token 002360|>
+<|visual token 002361|>
+<|visual token 002362|>
+<|visual token 002363|>
+<|visual token 002364|>
+<|visual token 002365|>
+<|visual token 002366|>
+<|visual token 002367|>
+<|visual token 002368|>
+<|visual token 002369|>
+<|visual token 002370|>
+<|visual token 002371|>
+<|visual token 002372|>
+<|visual token 002373|>
+<|visual token 002374|>
+<|visual token 002375|>
+<|visual token 002376|>
+<|visual token 002377|>
+<|visual token 002378|>
+<|visual token 002379|>
+<|visual token 002380|>
+<|visual token 002381|>
+<|visual token 002382|>
+<|visual token 002383|>
+<|visual token 002384|>
+<|visual token 002385|>
+<|visual token 002386|>
+<|visual token 002387|>
+<|visual token 002388|>
+<|visual token 002389|>
+<|visual token 002390|>
+<|visual token 002391|>
+<|visual token 002392|>
+<|visual token 002393|>
+<|visual token 002394|>
+<|visual token 002395|>
+<|visual token 002396|>
+<|visual token 002397|>
+<|visual token 002398|>
+<|visual token 002399|>
+<|visual token 002400|>
+<|visual token 002401|>
+<|visual token 002402|>
+<|visual token 002403|>
+<|visual token 002404|>
+<|visual token 002405|>
+<|visual token 002406|>
+<|visual token 002407|>
+<|visual token 002408|>
+<|visual token 002409|>
+<|visual token 002410|>
+<|visual token 002411|>
+<|visual token 002412|>
+<|visual token 002413|>
+<|visual token 002414|>
+<|visual token 002415|>
+<|visual token 002416|>
+<|visual token 002417|>
+<|visual token 002418|>
+<|visual token 002419|>
+<|visual token 002420|>
+<|visual token 002421|>
+<|visual token 002422|>
+<|visual token 002423|>
+<|visual token 002424|>
+<|visual token 002425|>
+<|visual token 002426|>
+<|visual token 002427|>
+<|visual token 002428|>
+<|visual token 002429|>
+<|visual token 002430|>
+<|visual token 002431|>
+<|visual token 002432|>
+<|visual token 002433|>
+<|visual token 002434|>
+<|visual token 002435|>
+<|visual token 002436|>
+<|visual token 002437|>
+<|visual token 002438|>
+<|visual token 002439|>
+<|visual token 002440|>
+<|visual token 002441|>
+<|visual token 002442|>
+<|visual token 002443|>
+<|visual token 002444|>
+<|visual token 002445|>
+<|visual token 002446|>
+<|visual token 002447|>
+<|visual token 002448|>
+<|visual token 002449|>
+<|visual token 002450|>
+<|visual token 002451|>
+<|visual token 002452|>
+<|visual token 002453|>
+<|visual token 002454|>
+<|visual token 002455|>
+<|visual token 002456|>
+<|visual token 002457|>
+<|visual token 002458|>
+<|visual token 002459|>
+<|visual token 002460|>
+<|visual token 002461|>
+<|visual token 002462|>
+<|visual token 002463|>
+<|visual token 002464|>
+<|visual token 002465|>
+<|visual token 002466|>
+<|visual token 002467|>
+<|visual token 002468|>
+<|visual token 002469|>
+<|visual token 002470|>
+<|visual token 002471|>
+<|visual token 002472|>
+<|visual token 002473|>
+<|visual token 002474|>
+<|visual token 002475|>
+<|visual token 002476|>
+<|visual token 002477|>
+<|visual token 002478|>
+<|visual token 002479|>
+<|visual token 002480|>
+<|visual token 002481|>
+<|visual token 002482|>
+<|visual token 002483|>
+<|visual token 002484|>
+<|visual token 002485|>
+<|visual token 002486|>
+<|visual token 002487|>
+<|visual token 002488|>
+<|visual token 002489|>
+<|visual token 002490|>
+<|visual token 002491|>
+<|visual token 002492|>
+<|visual token 002493|>
+<|visual token 002494|>
+<|visual token 002495|>
+<|visual token 002496|>
+<|visual token 002497|>
+<|visual token 002498|>
+<|visual token 002499|>
+<|visual token 002500|>
+<|visual token 002501|>
+<|visual token 002502|>
+<|visual token 002503|>
+<|visual token 002504|>
+<|visual token 002505|>
+<|visual token 002506|>
+<|visual token 002507|>
+<|visual token 002508|>
+<|visual token 002509|>
+<|visual token 002510|>
+<|visual token 002511|>
+<|visual token 002512|>
+<|visual token 002513|>
+<|visual token 002514|>
+<|visual token 002515|>
+<|visual token 002516|>
+<|visual token 002517|>
+<|visual token 002518|>
+<|visual token 002519|>
+<|visual token 002520|>
+<|visual token 002521|>
+<|visual token 002522|>
+<|visual token 002523|>
+<|visual token 002524|>
+<|visual token 002525|>
+<|visual token 002526|>
+<|visual token 002527|>
+<|visual token 002528|>
+<|visual token 002529|>
+<|visual token 002530|>
+<|visual token 002531|>
+<|visual token 002532|>
+<|visual token 002533|>
+<|visual token 002534|>
+<|visual token 002535|>
+<|visual token 002536|>
+<|visual token 002537|>
+<|visual token 002538|>
+<|visual token 002539|>
+<|visual token 002540|>
+<|visual token 002541|>
+<|visual token 002542|>
+<|visual token 002543|>
+<|visual token 002544|>
+<|visual token 002545|>
+<|visual token 002546|>
+<|visual token 002547|>
+<|visual token 002548|>
+<|visual token 002549|>
+<|visual token 002550|>
+<|visual token 002551|>
+<|visual token 002552|>
+<|visual token 002553|>
+<|visual token 002554|>
+<|visual token 002555|>
+<|visual token 002556|>
+<|visual token 002557|>
+<|visual token 002558|>
+<|visual token 002559|>
+<|visual token 002560|>
+<|visual token 002561|>
+<|visual token 002562|>
+<|visual token 002563|>
+<|visual token 002564|>
+<|visual token 002565|>
+<|visual token 002566|>
+<|visual token 002567|>
+<|visual token 002568|>
+<|visual token 002569|>
+<|visual token 002570|>
+<|visual token 002571|>
+<|visual token 002572|>
+<|visual token 002573|>
+<|visual token 002574|>
+<|visual token 002575|>
+<|visual token 002576|>
+<|visual token 002577|>
+<|visual token 002578|>
+<|visual token 002579|>
+<|visual token 002580|>
+<|visual token 002581|>
+<|visual token 002582|>
+<|visual token 002583|>
+<|visual token 002584|>
+<|visual token 002585|>
+<|visual token 002586|>
+<|visual token 002587|>
+<|visual token 002588|>
+<|visual token 002589|>
+<|visual token 002590|>
+<|visual token 002591|>
+<|visual token 002592|>
+<|visual token 002593|>
+<|visual token 002594|>
+<|visual token 002595|>
+<|visual token 002596|>
+<|visual token 002597|>
+<|visual token 002598|>
+<|visual token 002599|>
+<|visual token 002600|>
+<|visual token 002601|>
+<|visual token 002602|>
+<|visual token 002603|>
+<|visual token 002604|>
+<|visual token 002605|>
+<|visual token 002606|>
+<|visual token 002607|>
+<|visual token 002608|>
+<|visual token 002609|>
+<|visual token 002610|>
+<|visual token 002611|>
+<|visual token 002612|>
+<|visual token 002613|>
+<|visual token 002614|>
+<|visual token 002615|>
+<|visual token 002616|>
+<|visual token 002617|>
+<|visual token 002618|>
+<|visual token 002619|>
+<|visual token 002620|>
+<|visual token 002621|>
+<|visual token 002622|>
+<|visual token 002623|>
+<|visual token 002624|>
+<|visual token 002625|>
+<|visual token 002626|>
+<|visual token 002627|>
+<|visual token 002628|>
+<|visual token 002629|>
+<|visual token 002630|>
+<|visual token 002631|>
+<|visual token 002632|>
+<|visual token 002633|>
+<|visual token 002634|>
+<|visual token 002635|>
+<|visual token 002636|>
+<|visual token 002637|>
+<|visual token 002638|>
+<|visual token 002639|>
+<|visual token 002640|>
+<|visual token 002641|>
+<|visual token 002642|>
+<|visual token 002643|>
+<|visual token 002644|>
+<|visual token 002645|>
+<|visual token 002646|>
+<|visual token 002647|>
+<|visual token 002648|>
+<|visual token 002649|>
+<|visual token 002650|>
+<|visual token 002651|>
+<|visual token 002652|>
+<|visual token 002653|>
+<|visual token 002654|>
+<|visual token 002655|>
+<|visual token 002656|>
+<|visual token 002657|>
+<|visual token 002658|>
+<|visual token 002659|>
+<|visual token 002660|>
+<|visual token 002661|>
+<|visual token 002662|>
+<|visual token 002663|>
+<|visual token 002664|>
+<|visual token 002665|>
+<|visual token 002666|>
+<|visual token 002667|>
+<|visual token 002668|>
+<|visual token 002669|>
+<|visual token 002670|>
+<|visual token 002671|>
+<|visual token 002672|>
+<|visual token 002673|>
+<|visual token 002674|>
+<|visual token 002675|>
+<|visual token 002676|>
+<|visual token 002677|>
+<|visual token 002678|>
+<|visual token 002679|>
+<|visual token 002680|>
+<|visual token 002681|>
+<|visual token 002682|>
+<|visual token 002683|>
+<|visual token 002684|>
+<|visual token 002685|>
+<|visual token 002686|>
+<|visual token 002687|>
+<|visual token 002688|>
+<|visual token 002689|>
+<|visual token 002690|>
+<|visual token 002691|>
+<|visual token 002692|>
+<|visual token 002693|>
+<|visual token 002694|>
+<|visual token 002695|>
+<|visual token 002696|>
+<|visual token 002697|>
+<|visual token 002698|>
+<|visual token 002699|>
+<|visual token 002700|>
+<|visual token 002701|>
+<|visual token 002702|>
+<|visual token 002703|>
+<|visual token 002704|>
+<|visual token 002705|>
+<|visual token 002706|>
+<|visual token 002707|>
+<|visual token 002708|>
+<|visual token 002709|>
+<|visual token 002710|>
+<|visual token 002711|>
+<|visual token 002712|>
+<|visual token 002713|>
+<|visual token 002714|>
+<|visual token 002715|>
+<|visual token 002716|>
+<|visual token 002717|>
+<|visual token 002718|>
+<|visual token 002719|>
+<|visual token 002720|>
+<|visual token 002721|>
+<|visual token 002722|>
+<|visual token 002723|>
+<|visual token 002724|>
+<|visual token 002725|>
+<|visual token 002726|>
+<|visual token 002727|>
+<|visual token 002728|>
+<|visual token 002729|>
+<|visual token 002730|>
+<|visual token 002731|>
+<|visual token 002732|>
+<|visual token 002733|>
+<|visual token 002734|>
+<|visual token 002735|>
+<|visual token 002736|>
+<|visual token 002737|>
+<|visual token 002738|>
+<|visual token 002739|>
+<|visual token 002740|>
+<|visual token 002741|>
+<|visual token 002742|>
+<|visual token 002743|>
+<|visual token 002744|>
+<|visual token 002745|>
+<|visual token 002746|>
+<|visual token 002747|>
+<|visual token 002748|>
+<|visual token 002749|>
+<|visual token 002750|>
+<|visual token 002751|>
+<|visual token 002752|>
+<|visual token 002753|>
+<|visual token 002754|>
+<|visual token 002755|>
+<|visual token 002756|>
+<|visual token 002757|>
+<|visual token 002758|>
+<|visual token 002759|>
+<|visual token 002760|>
+<|visual token 002761|>
+<|visual token 002762|>
+<|visual token 002763|>
+<|visual token 002764|>
+<|visual token 002765|>
+<|visual token 002766|>
+<|visual token 002767|>
+<|visual token 002768|>
+<|visual token 002769|>
+<|visual token 002770|>
+<|visual token 002771|>
+<|visual token 002772|>
+<|visual token 002773|>
+<|visual token 002774|>
+<|visual token 002775|>
+<|visual token 002776|>
+<|visual token 002777|>
+<|visual token 002778|>
+<|visual token 002779|>
+<|visual token 002780|>
+<|visual token 002781|>
+<|visual token 002782|>
+<|visual token 002783|>
+<|visual token 002784|>
+<|visual token 002785|>
+<|visual token 002786|>
+<|visual token 002787|>
+<|visual token 002788|>
+<|visual token 002789|>
+<|visual token 002790|>
+<|visual token 002791|>
+<|visual token 002792|>
+<|visual token 002793|>
+<|visual token 002794|>
+<|visual token 002795|>
+<|visual token 002796|>
+<|visual token 002797|>
+<|visual token 002798|>
+<|visual token 002799|>
+<|visual token 002800|>
+<|visual token 002801|>
+<|visual token 002802|>
+<|visual token 002803|>
+<|visual token 002804|>
+<|visual token 002805|>
+<|visual token 002806|>
+<|visual token 002807|>
+<|visual token 002808|>
+<|visual token 002809|>
+<|visual token 002810|>
+<|visual token 002811|>
+<|visual token 002812|>
+<|visual token 002813|>
+<|visual token 002814|>
+<|visual token 002815|>
+<|visual token 002816|>
+<|visual token 002817|>
+<|visual token 002818|>
+<|visual token 002819|>
+<|visual token 002820|>
+<|visual token 002821|>
+<|visual token 002822|>
+<|visual token 002823|>
+<|visual token 002824|>
+<|visual token 002825|>
+<|visual token 002826|>
+<|visual token 002827|>
+<|visual token 002828|>
+<|visual token 002829|>
+<|visual token 002830|>
+<|visual token 002831|>
+<|visual token 002832|>
+<|visual token 002833|>
+<|visual token 002834|>
+<|visual token 002835|>
+<|visual token 002836|>
+<|visual token 002837|>
+<|visual token 002838|>
+<|visual token 002839|>
+<|visual token 002840|>
+<|visual token 002841|>
+<|visual token 002842|>
+<|visual token 002843|>
+<|visual token 002844|>
+<|visual token 002845|>
+<|visual token 002846|>
+<|visual token 002847|>
+<|visual token 002848|>
+<|visual token 002849|>
+<|visual token 002850|>
+<|visual token 002851|>
+<|visual token 002852|>
+<|visual token 002853|>
+<|visual token 002854|>
+<|visual token 002855|>
+<|visual token 002856|>
+<|visual token 002857|>
+<|visual token 002858|>
+<|visual token 002859|>
+<|visual token 002860|>
+<|visual token 002861|>
+<|visual token 002862|>
+<|visual token 002863|>
+<|visual token 002864|>
+<|visual token 002865|>
+<|visual token 002866|>
+<|visual token 002867|>
+<|visual token 002868|>
+<|visual token 002869|>
+<|visual token 002870|>
+<|visual token 002871|>
+<|visual token 002872|>
+<|visual token 002873|>
+<|visual token 002874|>
+<|visual token 002875|>
+<|visual token 002876|>
+<|visual token 002877|>
+<|visual token 002878|>
+<|visual token 002879|>
+<|visual token 002880|>
+<|visual token 002881|>
+<|visual token 002882|>
+<|visual token 002883|>
+<|visual token 002884|>
+<|visual token 002885|>
+<|visual token 002886|>
+<|visual token 002887|>
+<|visual token 002888|>
+<|visual token 002889|>
+<|visual token 002890|>
+<|visual token 002891|>
+<|visual token 002892|>
+<|visual token 002893|>
+<|visual token 002894|>
+<|visual token 002895|>
+<|visual token 002896|>
+<|visual token 002897|>
+<|visual token 002898|>
+<|visual token 002899|>
+<|visual token 002900|>
+<|visual token 002901|>
+<|visual token 002902|>
+<|visual token 002903|>
+<|visual token 002904|>
+<|visual token 002905|>
+<|visual token 002906|>
+<|visual token 002907|>
+<|visual token 002908|>
+<|visual token 002909|>
+<|visual token 002910|>
+<|visual token 002911|>
+<|visual token 002912|>
+<|visual token 002913|>
+<|visual token 002914|>
+<|visual token 002915|>
+<|visual token 002916|>
+<|visual token 002917|>
+<|visual token 002918|>
+<|visual token 002919|>
+<|visual token 002920|>
+<|visual token 002921|>
+<|visual token 002922|>
+<|visual token 002923|>
+<|visual token 002924|>
+<|visual token 002925|>
+<|visual token 002926|>
+<|visual token 002927|>
+<|visual token 002928|>
+<|visual token 002929|>
+<|visual token 002930|>
+<|visual token 002931|>
+<|visual token 002932|>
+<|visual token 002933|>
+<|visual token 002934|>
+<|visual token 002935|>
+<|visual token 002936|>
+<|visual token 002937|>
+<|visual token 002938|>
+<|visual token 002939|>
+<|visual token 002940|>
+<|visual token 002941|>
+<|visual token 002942|>
+<|visual token 002943|>
+<|visual token 002944|>
+<|visual token 002945|>
+<|visual token 002946|>
+<|visual token 002947|>
+<|visual token 002948|>
+<|visual token 002949|>
+<|visual token 002950|>
+<|visual token 002951|>
+<|visual token 002952|>
+<|visual token 002953|>
+<|visual token 002954|>
+<|visual token 002955|>
+<|visual token 002956|>
+<|visual token 002957|>
+<|visual token 002958|>
+<|visual token 002959|>
+<|visual token 002960|>
+<|visual token 002961|>
+<|visual token 002962|>
+<|visual token 002963|>
+<|visual token 002964|>
+<|visual token 002965|>
+<|visual token 002966|>
+<|visual token 002967|>
+<|visual token 002968|>
+<|visual token 002969|>
+<|visual token 002970|>
+<|visual token 002971|>
+<|visual token 002972|>
+<|visual token 002973|>
+<|visual token 002974|>
+<|visual token 002975|>
+<|visual token 002976|>
+<|visual token 002977|>
+<|visual token 002978|>
+<|visual token 002979|>
+<|visual token 002980|>
+<|visual token 002981|>
+<|visual token 002982|>
+<|visual token 002983|>
+<|visual token 002984|>
+<|visual token 002985|>
+<|visual token 002986|>
+<|visual token 002987|>
+<|visual token 002988|>
+<|visual token 002989|>
+<|visual token 002990|>
+<|visual token 002991|>
+<|visual token 002992|>
+<|visual token 002993|>
+<|visual token 002994|>
+<|visual token 002995|>
+<|visual token 002996|>
+<|visual token 002997|>
+<|visual token 002998|>
+<|visual token 002999|>
+<|visual token 003000|>
+<|visual token 003001|>
+<|visual token 003002|>
+<|visual token 003003|>
+<|visual token 003004|>
+<|visual token 003005|>
+<|visual token 003006|>
+<|visual token 003007|>
+<|visual token 003008|>
+<|visual token 003009|>
+<|visual token 003010|>
+<|visual token 003011|>
+<|visual token 003012|>
+<|visual token 003013|>
+<|visual token 003014|>
+<|visual token 003015|>
+<|visual token 003016|>
+<|visual token 003017|>
+<|visual token 003018|>
+<|visual token 003019|>
+<|visual token 003020|>
+<|visual token 003021|>
+<|visual token 003022|>
+<|visual token 003023|>
+<|visual token 003024|>
+<|visual token 003025|>
+<|visual token 003026|>
+<|visual token 003027|>
+<|visual token 003028|>
+<|visual token 003029|>
+<|visual token 003030|>
+<|visual token 003031|>
+<|visual token 003032|>
+<|visual token 003033|>
+<|visual token 003034|>
+<|visual token 003035|>
+<|visual token 003036|>
+<|visual token 003037|>
+<|visual token 003038|>
+<|visual token 003039|>
+<|visual token 003040|>
+<|visual token 003041|>
+<|visual token 003042|>
+<|visual token 003043|>
+<|visual token 003044|>
+<|visual token 003045|>
+<|visual token 003046|>
+<|visual token 003047|>
+<|visual token 003048|>
+<|visual token 003049|>
+<|visual token 003050|>
+<|visual token 003051|>
+<|visual token 003052|>
+<|visual token 003053|>
+<|visual token 003054|>
+<|visual token 003055|>
+<|visual token 003056|>
+<|visual token 003057|>
+<|visual token 003058|>
+<|visual token 003059|>
+<|visual token 003060|>
+<|visual token 003061|>
+<|visual token 003062|>
+<|visual token 003063|>
+<|visual token 003064|>
+<|visual token 003065|>
+<|visual token 003066|>
+<|visual token 003067|>
+<|visual token 003068|>
+<|visual token 003069|>
+<|visual token 003070|>
+<|visual token 003071|>
+<|visual token 003072|>
+<|visual token 003073|>
+<|visual token 003074|>
+<|visual token 003075|>
+<|visual token 003076|>
+<|visual token 003077|>
+<|visual token 003078|>
+<|visual token 003079|>
+<|visual token 003080|>
+<|visual token 003081|>
+<|visual token 003082|>
+<|visual token 003083|>
+<|visual token 003084|>
+<|visual token 003085|>
+<|visual token 003086|>
+<|visual token 003087|>
+<|visual token 003088|>
+<|visual token 003089|>
+<|visual token 003090|>
+<|visual token 003091|>
+<|visual token 003092|>
+<|visual token 003093|>
+<|visual token 003094|>
+<|visual token 003095|>
+<|visual token 003096|>
+<|visual token 003097|>
+<|visual token 003098|>
+<|visual token 003099|>
+<|visual token 003100|>
+<|visual token 003101|>
+<|visual token 003102|>
+<|visual token 003103|>
+<|visual token 003104|>
+<|visual token 003105|>
+<|visual token 003106|>
+<|visual token 003107|>
+<|visual token 003108|>
+<|visual token 003109|>
+<|visual token 003110|>
+<|visual token 003111|>
+<|visual token 003112|>
+<|visual token 003113|>
+<|visual token 003114|>
+<|visual token 003115|>
+<|visual token 003116|>
+<|visual token 003117|>
+<|visual token 003118|>
+<|visual token 003119|>
+<|visual token 003120|>
+<|visual token 003121|>
+<|visual token 003122|>
+<|visual token 003123|>
+<|visual token 003124|>
+<|visual token 003125|>
+<|visual token 003126|>
+<|visual token 003127|>
+<|visual token 003128|>
+<|visual token 003129|>
+<|visual token 003130|>
+<|visual token 003131|>
+<|visual token 003132|>
+<|visual token 003133|>
+<|visual token 003134|>
+<|visual token 003135|>
+<|visual token 003136|>
+<|visual token 003137|>
+<|visual token 003138|>
+<|visual token 003139|>
+<|visual token 003140|>
+<|visual token 003141|>
+<|visual token 003142|>
+<|visual token 003143|>
+<|visual token 003144|>
+<|visual token 003145|>
+<|visual token 003146|>
+<|visual token 003147|>
+<|visual token 003148|>
+<|visual token 003149|>
+<|visual token 003150|>
+<|visual token 003151|>
+<|visual token 003152|>
+<|visual token 003153|>
+<|visual token 003154|>
+<|visual token 003155|>
+<|visual token 003156|>
+<|visual token 003157|>
+<|visual token 003158|>
+<|visual token 003159|>
+<|visual token 003160|>
+<|visual token 003161|>
+<|visual token 003162|>
+<|visual token 003163|>
+<|visual token 003164|>
+<|visual token 003165|>
+<|visual token 003166|>
+<|visual token 003167|>
+<|visual token 003168|>
+<|visual token 003169|>
+<|visual token 003170|>
+<|visual token 003171|>
+<|visual token 003172|>
+<|visual token 003173|>
+<|visual token 003174|>
+<|visual token 003175|>
+<|visual token 003176|>
+<|visual token 003177|>
+<|visual token 003178|>
+<|visual token 003179|>
+<|visual token 003180|>
+<|visual token 003181|>
+<|visual token 003182|>
+<|visual token 003183|>
+<|visual token 003184|>
+<|visual token 003185|>
+<|visual token 003186|>
+<|visual token 003187|>
+<|visual token 003188|>
+<|visual token 003189|>
+<|visual token 003190|>
+<|visual token 003191|>
+<|visual token 003192|>
+<|visual token 003193|>
+<|visual token 003194|>
+<|visual token 003195|>
+<|visual token 003196|>
+<|visual token 003197|>
+<|visual token 003198|>
+<|visual token 003199|>
+<|visual token 003200|>
+<|visual token 003201|>
+<|visual token 003202|>
+<|visual token 003203|>
+<|visual token 003204|>
+<|visual token 003205|>
+<|visual token 003206|>
+<|visual token 003207|>
+<|visual token 003208|>
+<|visual token 003209|>
+<|visual token 003210|>
+<|visual token 003211|>
+<|visual token 003212|>
+<|visual token 003213|>
+<|visual token 003214|>
+<|visual token 003215|>
+<|visual token 003216|>
+<|visual token 003217|>
+<|visual token 003218|>
+<|visual token 003219|>
+<|visual token 003220|>
+<|visual token 003221|>
+<|visual token 003222|>
+<|visual token 003223|>
+<|visual token 003224|>
+<|visual token 003225|>
+<|visual token 003226|>
+<|visual token 003227|>
+<|visual token 003228|>
+<|visual token 003229|>
+<|visual token 003230|>
+<|visual token 003231|>
+<|visual token 003232|>
+<|visual token 003233|>
+<|visual token 003234|>
+<|visual token 003235|>
+<|visual token 003236|>
+<|visual token 003237|>
+<|visual token 003238|>
+<|visual token 003239|>
+<|visual token 003240|>
+<|visual token 003241|>
+<|visual token 003242|>
+<|visual token 003243|>
+<|visual token 003244|>
+<|visual token 003245|>
+<|visual token 003246|>
+<|visual token 003247|>
+<|visual token 003248|>
+<|visual token 003249|>
+<|visual token 003250|>
+<|visual token 003251|>
+<|visual token 003252|>
+<|visual token 003253|>
+<|visual token 003254|>
+<|visual token 003255|>
+<|visual token 003256|>
+<|visual token 003257|>
+<|visual token 003258|>
+<|visual token 003259|>
+<|visual token 003260|>
+<|visual token 003261|>
+<|visual token 003262|>
+<|visual token 003263|>
+<|visual token 003264|>
+<|visual token 003265|>
+<|visual token 003266|>
+<|visual token 003267|>
+<|visual token 003268|>
+<|visual token 003269|>
+<|visual token 003270|>
+<|visual token 003271|>
+<|visual token 003272|>
+<|visual token 003273|>
+<|visual token 003274|>
+<|visual token 003275|>
+<|visual token 003276|>
+<|visual token 003277|>
+<|visual token 003278|>
+<|visual token 003279|>
+<|visual token 003280|>
+<|visual token 003281|>
+<|visual token 003282|>
+<|visual token 003283|>
+<|visual token 003284|>
+<|visual token 003285|>
+<|visual token 003286|>
+<|visual token 003287|>
+<|visual token 003288|>
+<|visual token 003289|>
+<|visual token 003290|>
+<|visual token 003291|>
+<|visual token 003292|>
+<|visual token 003293|>
+<|visual token 003294|>
+<|visual token 003295|>
+<|visual token 003296|>
+<|visual token 003297|>
+<|visual token 003298|>
+<|visual token 003299|>
+<|visual token 003300|>
+<|visual token 003301|>
+<|visual token 003302|>
+<|visual token 003303|>
+<|visual token 003304|>
+<|visual token 003305|>
+<|visual token 003306|>
+<|visual token 003307|>
+<|visual token 003308|>
+<|visual token 003309|>
+<|visual token 003310|>
+<|visual token 003311|>
+<|visual token 003312|>
+<|visual token 003313|>
+<|visual token 003314|>
+<|visual token 003315|>
+<|visual token 003316|>
+<|visual token 003317|>
+<|visual token 003318|>
+<|visual token 003319|>
+<|visual token 003320|>
+<|visual token 003321|>
+<|visual token 003322|>
+<|visual token 003323|>
+<|visual token 003324|>
+<|visual token 003325|>
+<|visual token 003326|>
+<|visual token 003327|>
+<|visual token 003328|>
+<|visual token 003329|>
+<|visual token 003330|>
+<|visual token 003331|>
+<|visual token 003332|>
+<|visual token 003333|>
+<|visual token 003334|>
+<|visual token 003335|>
+<|visual token 003336|>
+<|visual token 003337|>
+<|visual token 003338|>
+<|visual token 003339|>
+<|visual token 003340|>
+<|visual token 003341|>
+<|visual token 003342|>
+<|visual token 003343|>
+<|visual token 003344|>
+<|visual token 003345|>
+<|visual token 003346|>
+<|visual token 003347|>
+<|visual token 003348|>
+<|visual token 003349|>
+<|visual token 003350|>
+<|visual token 003351|>
+<|visual token 003352|>
+<|visual token 003353|>
+<|visual token 003354|>
+<|visual token 003355|>
+<|visual token 003356|>
+<|visual token 003357|>
+<|visual token 003358|>
+<|visual token 003359|>
+<|visual token 003360|>
+<|visual token 003361|>
+<|visual token 003362|>
+<|visual token 003363|>
+<|visual token 003364|>
+<|visual token 003365|>
+<|visual token 003366|>
+<|visual token 003367|>
+<|visual token 003368|>
+<|visual token 003369|>
+<|visual token 003370|>
+<|visual token 003371|>
+<|visual token 003372|>
+<|visual token 003373|>
+<|visual token 003374|>
+<|visual token 003375|>
+<|visual token 003376|>
+<|visual token 003377|>
+<|visual token 003378|>
+<|visual token 003379|>
+<|visual token 003380|>
+<|visual token 003381|>
+<|visual token 003382|>
+<|visual token 003383|>
+<|visual token 003384|>
+<|visual token 003385|>
+<|visual token 003386|>
+<|visual token 003387|>
+<|visual token 003388|>
+<|visual token 003389|>
+<|visual token 003390|>
+<|visual token 003391|>
+<|visual token 003392|>
+<|visual token 003393|>
+<|visual token 003394|>
+<|visual token 003395|>
+<|visual token 003396|>
+<|visual token 003397|>
+<|visual token 003398|>
+<|visual token 003399|>
+<|visual token 003400|>
+<|visual token 003401|>
+<|visual token 003402|>
+<|visual token 003403|>
+<|visual token 003404|>
+<|visual token 003405|>
+<|visual token 003406|>
+<|visual token 003407|>
+<|visual token 003408|>
+<|visual token 003409|>
+<|visual token 003410|>
+<|visual token 003411|>
+<|visual token 003412|>
+<|visual token 003413|>
+<|visual token 003414|>
+<|visual token 003415|>
+<|visual token 003416|>
+<|visual token 003417|>
+<|visual token 003418|>
+<|visual token 003419|>
+<|visual token 003420|>
+<|visual token 003421|>
+<|visual token 003422|>
+<|visual token 003423|>
+<|visual token 003424|>
+<|visual token 003425|>
+<|visual token 003426|>
+<|visual token 003427|>
+<|visual token 003428|>
+<|visual token 003429|>
+<|visual token 003430|>
+<|visual token 003431|>
+<|visual token 003432|>
+<|visual token 003433|>
+<|visual token 003434|>
+<|visual token 003435|>
+<|visual token 003436|>
+<|visual token 003437|>
+<|visual token 003438|>
+<|visual token 003439|>
+<|visual token 003440|>
+<|visual token 003441|>
+<|visual token 003442|>
+<|visual token 003443|>
+<|visual token 003444|>
+<|visual token 003445|>
+<|visual token 003446|>
+<|visual token 003447|>
+<|visual token 003448|>
+<|visual token 003449|>
+<|visual token 003450|>
+<|visual token 003451|>
+<|visual token 003452|>
+<|visual token 003453|>
+<|visual token 003454|>
+<|visual token 003455|>
+<|visual token 003456|>
+<|visual token 003457|>
+<|visual token 003458|>
+<|visual token 003459|>
+<|visual token 003460|>
+<|visual token 003461|>
+<|visual token 003462|>
+<|visual token 003463|>
+<|visual token 003464|>
+<|visual token 003465|>
+<|visual token 003466|>
+<|visual token 003467|>
+<|visual token 003468|>
+<|visual token 003469|>
+<|visual token 003470|>
+<|visual token 003471|>
+<|visual token 003472|>
+<|visual token 003473|>
+<|visual token 003474|>
+<|visual token 003475|>
+<|visual token 003476|>
+<|visual token 003477|>
+<|visual token 003478|>
+<|visual token 003479|>
+<|visual token 003480|>
+<|visual token 003481|>
+<|visual token 003482|>
+<|visual token 003483|>
+<|visual token 003484|>
+<|visual token 003485|>
+<|visual token 003486|>
+<|visual token 003487|>
+<|visual token 003488|>
+<|visual token 003489|>
+<|visual token 003490|>
+<|visual token 003491|>
+<|visual token 003492|>
+<|visual token 003493|>
+<|visual token 003494|>
+<|visual token 003495|>
+<|visual token 003496|>
+<|visual token 003497|>
+<|visual token 003498|>
+<|visual token 003499|>
+<|visual token 003500|>
+<|visual token 003501|>
+<|visual token 003502|>
+<|visual token 003503|>
+<|visual token 003504|>
+<|visual token 003505|>
+<|visual token 003506|>
+<|visual token 003507|>
+<|visual token 003508|>
+<|visual token 003509|>
+<|visual token 003510|>
+<|visual token 003511|>
+<|visual token 003512|>
+<|visual token 003513|>
+<|visual token 003514|>
+<|visual token 003515|>
+<|visual token 003516|>
+<|visual token 003517|>
+<|visual token 003518|>
+<|visual token 003519|>
+<|visual token 003520|>
+<|visual token 003521|>
+<|visual token 003522|>
+<|visual token 003523|>
+<|visual token 003524|>
+<|visual token 003525|>
+<|visual token 003526|>
+<|visual token 003527|>
+<|visual token 003528|>
+<|visual token 003529|>
+<|visual token 003530|>
+<|visual token 003531|>
+<|visual token 003532|>
+<|visual token 003533|>
+<|visual token 003534|>
+<|visual token 003535|>
+<|visual token 003536|>
+<|visual token 003537|>
+<|visual token 003538|>
+<|visual token 003539|>
+<|visual token 003540|>
+<|visual token 003541|>
+<|visual token 003542|>
+<|visual token 003543|>
+<|visual token 003544|>
+<|visual token 003545|>
+<|visual token 003546|>
+<|visual token 003547|>
+<|visual token 003548|>
+<|visual token 003549|>
+<|visual token 003550|>
+<|visual token 003551|>
+<|visual token 003552|>
+<|visual token 003553|>
+<|visual token 003554|>
+<|visual token 003555|>
+<|visual token 003556|>
+<|visual token 003557|>
+<|visual token 003558|>
+<|visual token 003559|>
+<|visual token 003560|>
+<|visual token 003561|>
+<|visual token 003562|>
+<|visual token 003563|>
+<|visual token 003564|>
+<|visual token 003565|>
+<|visual token 003566|>
+<|visual token 003567|>
+<|visual token 003568|>
+<|visual token 003569|>
+<|visual token 003570|>
+<|visual token 003571|>
+<|visual token 003572|>
+<|visual token 003573|>
+<|visual token 003574|>
+<|visual token 003575|>
+<|visual token 003576|>
+<|visual token 003577|>
+<|visual token 003578|>
+<|visual token 003579|>
+<|visual token 003580|>
+<|visual token 003581|>
+<|visual token 003582|>
+<|visual token 003583|>
+<|visual token 003584|>
+<|visual token 003585|>
+<|visual token 003586|>
+<|visual token 003587|>
+<|visual token 003588|>
+<|visual token 003589|>
+<|visual token 003590|>
+<|visual token 003591|>
+<|visual token 003592|>
+<|visual token 003593|>
+<|visual token 003594|>
+<|visual token 003595|>
+<|visual token 003596|>
+<|visual token 003597|>
+<|visual token 003598|>
+<|visual token 003599|>
+<|visual token 003600|>
+<|visual token 003601|>
+<|visual token 003602|>
+<|visual token 003603|>
+<|visual token 003604|>
+<|visual token 003605|>
+<|visual token 003606|>
+<|visual token 003607|>
+<|visual token 003608|>
+<|visual token 003609|>
+<|visual token 003610|>
+<|visual token 003611|>
+<|visual token 003612|>
+<|visual token 003613|>
+<|visual token 003614|>
+<|visual token 003615|>
+<|visual token 003616|>
+<|visual token 003617|>
+<|visual token 003618|>
+<|visual token 003619|>
+<|visual token 003620|>
+<|visual token 003621|>
+<|visual token 003622|>
+<|visual token 003623|>
+<|visual token 003624|>
+<|visual token 003625|>
+<|visual token 003626|>
+<|visual token 003627|>
+<|visual token 003628|>
+<|visual token 003629|>
+<|visual token 003630|>
+<|visual token 003631|>
+<|visual token 003632|>
+<|visual token 003633|>
+<|visual token 003634|>
+<|visual token 003635|>
+<|visual token 003636|>
+<|visual token 003637|>
+<|visual token 003638|>
+<|visual token 003639|>
+<|visual token 003640|>
+<|visual token 003641|>
+<|visual token 003642|>
+<|visual token 003643|>
+<|visual token 003644|>
+<|visual token 003645|>
+<|visual token 003646|>
+<|visual token 003647|>
+<|visual token 003648|>
+<|visual token 003649|>
+<|visual token 003650|>
+<|visual token 003651|>
+<|visual token 003652|>
+<|visual token 003653|>
+<|visual token 003654|>
+<|visual token 003655|>
+<|visual token 003656|>
+<|visual token 003657|>
+<|visual token 003658|>
+<|visual token 003659|>
+<|visual token 003660|>
+<|visual token 003661|>
+<|visual token 003662|>
+<|visual token 003663|>
+<|visual token 003664|>
+<|visual token 003665|>
+<|visual token 003666|>
+<|visual token 003667|>
+<|visual token 003668|>
+<|visual token 003669|>
+<|visual token 003670|>
+<|visual token 003671|>
+<|visual token 003672|>
+<|visual token 003673|>
+<|visual token 003674|>
+<|visual token 003675|>
+<|visual token 003676|>
+<|visual token 003677|>
+<|visual token 003678|>
+<|visual token 003679|>
+<|visual token 003680|>
+<|visual token 003681|>
+<|visual token 003682|>
+<|visual token 003683|>
+<|visual token 003684|>
+<|visual token 003685|>
+<|visual token 003686|>
+<|visual token 003687|>
+<|visual token 003688|>
+<|visual token 003689|>
+<|visual token 003690|>
+<|visual token 003691|>
+<|visual token 003692|>
+<|visual token 003693|>
+<|visual token 003694|>
+<|visual token 003695|>
+<|visual token 003696|>
+<|visual token 003697|>
+<|visual token 003698|>
+<|visual token 003699|>
+<|visual token 003700|>
+<|visual token 003701|>
+<|visual token 003702|>
+<|visual token 003703|>
+<|visual token 003704|>
+<|visual token 003705|>
+<|visual token 003706|>
+<|visual token 003707|>
+<|visual token 003708|>
+<|visual token 003709|>
+<|visual token 003710|>
+<|visual token 003711|>
+<|visual token 003712|>
+<|visual token 003713|>
+<|visual token 003714|>
+<|visual token 003715|>
+<|visual token 003716|>
+<|visual token 003717|>
+<|visual token 003718|>
+<|visual token 003719|>
+<|visual token 003720|>
+<|visual token 003721|>
+<|visual token 003722|>
+<|visual token 003723|>
+<|visual token 003724|>
+<|visual token 003725|>
+<|visual token 003726|>
+<|visual token 003727|>
+<|visual token 003728|>
+<|visual token 003729|>
+<|visual token 003730|>
+<|visual token 003731|>
+<|visual token 003732|>
+<|visual token 003733|>
+<|visual token 003734|>
+<|visual token 003735|>
+<|visual token 003736|>
+<|visual token 003737|>
+<|visual token 003738|>
+<|visual token 003739|>
+<|visual token 003740|>
+<|visual token 003741|>
+<|visual token 003742|>
+<|visual token 003743|>
+<|visual token 003744|>
+<|visual token 003745|>
+<|visual token 003746|>
+<|visual token 003747|>
+<|visual token 003748|>
+<|visual token 003749|>
+<|visual token 003750|>
+<|visual token 003751|>
+<|visual token 003752|>
+<|visual token 003753|>
+<|visual token 003754|>
+<|visual token 003755|>
+<|visual token 003756|>
+<|visual token 003757|>
+<|visual token 003758|>
+<|visual token 003759|>
+<|visual token 003760|>
+<|visual token 003761|>
+<|visual token 003762|>
+<|visual token 003763|>
+<|visual token 003764|>
+<|visual token 003765|>
+<|visual token 003766|>
+<|visual token 003767|>
+<|visual token 003768|>
+<|visual token 003769|>
+<|visual token 003770|>
+<|visual token 003771|>
+<|visual token 003772|>
+<|visual token 003773|>
+<|visual token 003774|>
+<|visual token 003775|>
+<|visual token 003776|>
+<|visual token 003777|>
+<|visual token 003778|>
+<|visual token 003779|>
+<|visual token 003780|>
+<|visual token 003781|>
+<|visual token 003782|>
+<|visual token 003783|>
+<|visual token 003784|>
+<|visual token 003785|>
+<|visual token 003786|>
+<|visual token 003787|>
+<|visual token 003788|>
+<|visual token 003789|>
+<|visual token 003790|>
+<|visual token 003791|>
+<|visual token 003792|>
+<|visual token 003793|>
+<|visual token 003794|>
+<|visual token 003795|>
+<|visual token 003796|>
+<|visual token 003797|>
+<|visual token 003798|>
+<|visual token 003799|>
+<|visual token 003800|>
+<|visual token 003801|>
+<|visual token 003802|>
+<|visual token 003803|>
+<|visual token 003804|>
+<|visual token 003805|>
+<|visual token 003806|>
+<|visual token 003807|>
+<|visual token 003808|>
+<|visual token 003809|>
+<|visual token 003810|>
+<|visual token 003811|>
+<|visual token 003812|>
+<|visual token 003813|>
+<|visual token 003814|>
+<|visual token 003815|>
+<|visual token 003816|>
+<|visual token 003817|>
+<|visual token 003818|>
+<|visual token 003819|>
+<|visual token 003820|>
+<|visual token 003821|>
+<|visual token 003822|>
+<|visual token 003823|>
+<|visual token 003824|>
+<|visual token 003825|>
+<|visual token 003826|>
+<|visual token 003827|>
+<|visual token 003828|>
+<|visual token 003829|>
+<|visual token 003830|>
+<|visual token 003831|>
+<|visual token 003832|>
+<|visual token 003833|>
+<|visual token 003834|>
+<|visual token 003835|>
+<|visual token 003836|>
+<|visual token 003837|>
+<|visual token 003838|>
+<|visual token 003839|>
+<|visual token 003840|>
+<|visual token 003841|>
+<|visual token 003842|>
+<|visual token 003843|>
+<|visual token 003844|>
+<|visual token 003845|>
+<|visual token 003846|>
+<|visual token 003847|>
+<|visual token 003848|>
+<|visual token 003849|>
+<|visual token 003850|>
+<|visual token 003851|>
+<|visual token 003852|>
+<|visual token 003853|>
+<|visual token 003854|>
+<|visual token 003855|>
+<|visual token 003856|>
+<|visual token 003857|>
+<|visual token 003858|>
+<|visual token 003859|>
+<|visual token 003860|>
+<|visual token 003861|>
+<|visual token 003862|>
+<|visual token 003863|>
+<|visual token 003864|>
+<|visual token 003865|>
+<|visual token 003866|>
+<|visual token 003867|>
+<|visual token 003868|>
+<|visual token 003869|>
+<|visual token 003870|>
+<|visual token 003871|>
+<|visual token 003872|>
+<|visual token 003873|>
+<|visual token 003874|>
+<|visual token 003875|>
+<|visual token 003876|>
+<|visual token 003877|>
+<|visual token 003878|>
+<|visual token 003879|>
+<|visual token 003880|>
+<|visual token 003881|>
+<|visual token 003882|>
+<|visual token 003883|>
+<|visual token 003884|>
+<|visual token 003885|>
+<|visual token 003886|>
+<|visual token 003887|>
+<|visual token 003888|>
+<|visual token 003889|>
+<|visual token 003890|>
+<|visual token 003891|>
+<|visual token 003892|>
+<|visual token 003893|>
+<|visual token 003894|>
+<|visual token 003895|>
+<|visual token 003896|>
+<|visual token 003897|>
+<|visual token 003898|>
+<|visual token 003899|>
+<|visual token 003900|>
+<|visual token 003901|>
+<|visual token 003902|>
+<|visual token 003903|>
+<|visual token 003904|>
+<|visual token 003905|>
+<|visual token 003906|>
+<|visual token 003907|>
+<|visual token 003908|>
+<|visual token 003909|>
+<|visual token 003910|>
+<|visual token 003911|>
+<|visual token 003912|>
+<|visual token 003913|>
+<|visual token 003914|>
+<|visual token 003915|>
+<|visual token 003916|>
+<|visual token 003917|>
+<|visual token 003918|>
+<|visual token 003919|>
+<|visual token 003920|>
+<|visual token 003921|>
+<|visual token 003922|>
+<|visual token 003923|>
+<|visual token 003924|>
+<|visual token 003925|>
+<|visual token 003926|>
+<|visual token 003927|>
+<|visual token 003928|>
+<|visual token 003929|>
+<|visual token 003930|>
+<|visual token 003931|>
+<|visual token 003932|>
+<|visual token 003933|>
+<|visual token 003934|>
+<|visual token 003935|>
+<|visual token 003936|>
+<|visual token 003937|>
+<|visual token 003938|>
+<|visual token 003939|>
+<|visual token 003940|>
+<|visual token 003941|>
+<|visual token 003942|>
+<|visual token 003943|>
+<|visual token 003944|>
+<|visual token 003945|>
+<|visual token 003946|>
+<|visual token 003947|>
+<|visual token 003948|>
+<|visual token 003949|>
+<|visual token 003950|>
+<|visual token 003951|>
+<|visual token 003952|>
+<|visual token 003953|>
+<|visual token 003954|>
+<|visual token 003955|>
+<|visual token 003956|>
+<|visual token 003957|>
+<|visual token 003958|>
+<|visual token 003959|>
+<|visual token 003960|>
+<|visual token 003961|>
+<|visual token 003962|>
+<|visual token 003963|>
+<|visual token 003964|>
+<|visual token 003965|>
+<|visual token 003966|>
+<|visual token 003967|>
+<|visual token 003968|>
+<|visual token 003969|>
+<|visual token 003970|>
+<|visual token 003971|>
+<|visual token 003972|>
+<|visual token 003973|>
+<|visual token 003974|>
+<|visual token 003975|>
+<|visual token 003976|>
+<|visual token 003977|>
+<|visual token 003978|>
+<|visual token 003979|>
+<|visual token 003980|>
+<|visual token 003981|>
+<|visual token 003982|>
+<|visual token 003983|>
+<|visual token 003984|>
+<|visual token 003985|>
+<|visual token 003986|>
+<|visual token 003987|>
+<|visual token 003988|>
+<|visual token 003989|>
+<|visual token 003990|>
+<|visual token 003991|>
+<|visual token 003992|>
+<|visual token 003993|>
+<|visual token 003994|>
+<|visual token 003995|>
+<|visual token 003996|>
+<|visual token 003997|>
+<|visual token 003998|>
+<|visual token 003999|>
+<|visual token 004000|>
+<|visual token 004001|>
+<|visual token 004002|>
+<|visual token 004003|>
+<|visual token 004004|>
+<|visual token 004005|>
+<|visual token 004006|>
+<|visual token 004007|>
+<|visual token 004008|>
+<|visual token 004009|>
+<|visual token 004010|>
+<|visual token 004011|>
+<|visual token 004012|>
+<|visual token 004013|>
+<|visual token 004014|>
+<|visual token 004015|>
+<|visual token 004016|>
+<|visual token 004017|>
+<|visual token 004018|>
+<|visual token 004019|>
+<|visual token 004020|>
+<|visual token 004021|>
+<|visual token 004022|>
+<|visual token 004023|>
+<|visual token 004024|>
+<|visual token 004025|>
+<|visual token 004026|>
+<|visual token 004027|>
+<|visual token 004028|>
+<|visual token 004029|>
+<|visual token 004030|>
+<|visual token 004031|>
+<|visual token 004032|>
+<|visual token 004033|>
+<|visual token 004034|>
+<|visual token 004035|>
+<|visual token 004036|>
+<|visual token 004037|>
+<|visual token 004038|>
+<|visual token 004039|>
+<|visual token 004040|>
+<|visual token 004041|>
+<|visual token 004042|>
+<|visual token 004043|>
+<|visual token 004044|>
+<|visual token 004045|>
+<|visual token 004046|>
+<|visual token 004047|>
+<|visual token 004048|>
+<|visual token 004049|>
+<|visual token 004050|>
+<|visual token 004051|>
+<|visual token 004052|>
+<|visual token 004053|>
+<|visual token 004054|>
+<|visual token 004055|>
+<|visual token 004056|>
+<|visual token 004057|>
+<|visual token 004058|>
+<|visual token 004059|>
+<|visual token 004060|>
+<|visual token 004061|>
+<|visual token 004062|>
+<|visual token 004063|>
+<|visual token 004064|>
+<|visual token 004065|>
+<|visual token 004066|>
+<|visual token 004067|>
+<|visual token 004068|>
+<|visual token 004069|>
+<|visual token 004070|>
+<|visual token 004071|>
+<|visual token 004072|>
+<|visual token 004073|>
+<|visual token 004074|>
+<|visual token 004075|>
+<|visual token 004076|>
+<|visual token 004077|>
+<|visual token 004078|>
+<|visual token 004079|>
+<|visual token 004080|>
+<|visual token 004081|>
+<|visual token 004082|>
+<|visual token 004083|>
+<|visual token 004084|>
+<|visual token 004085|>
+<|visual token 004086|>
+<|visual token 004087|>
+<|visual token 004088|>
+<|visual token 004089|>
+<|visual token 004090|>
+<|visual token 004091|>
+<|visual token 004092|>
+<|visual token 004093|>
+<|visual token 004094|>
+<|visual token 004095|>
+<|visual token 004096|>
+<|visual token 004097|>
+<|visual token 004098|>
+<|visual token 004099|>
+<|visual token 004100|>
+<|visual token 004101|>
+<|visual token 004102|>
+<|visual token 004103|>
+<|visual token 004104|>
+<|visual token 004105|>
+<|visual token 004106|>
+<|visual token 004107|>
+<|visual token 004108|>
+<|visual token 004109|>
+<|visual token 004110|>
+<|visual token 004111|>
+<|visual token 004112|>
+<|visual token 004113|>
+<|visual token 004114|>
+<|visual token 004115|>
+<|visual token 004116|>
+<|visual token 004117|>
+<|visual token 004118|>
+<|visual token 004119|>
+<|visual token 004120|>
+<|visual token 004121|>
+<|visual token 004122|>
+<|visual token 004123|>
+<|visual token 004124|>
+<|visual token 004125|>
+<|visual token 004126|>
+<|visual token 004127|>
+<|visual token 004128|>
+<|visual token 004129|>
+<|visual token 004130|>
+<|visual token 004131|>
+<|visual token 004132|>
+<|visual token 004133|>
+<|visual token 004134|>
+<|visual token 004135|>
+<|visual token 004136|>
+<|visual token 004137|>
+<|visual token 004138|>
+<|visual token 004139|>
+<|visual token 004140|>
+<|visual token 004141|>
+<|visual token 004142|>
+<|visual token 004143|>
+<|visual token 004144|>
+<|visual token 004145|>
+<|visual token 004146|>
+<|visual token 004147|>
+<|visual token 004148|>
+<|visual token 004149|>
+<|visual token 004150|>
+<|visual token 004151|>
+<|visual token 004152|>
+<|visual token 004153|>
+<|visual token 004154|>
+<|visual token 004155|>
+<|visual token 004156|>
+<|visual token 004157|>
+<|visual token 004158|>
+<|visual token 004159|>
+<|visual token 004160|>
+<|visual token 004161|>
+<|visual token 004162|>
+<|visual token 004163|>
+<|visual token 004164|>
+<|visual token 004165|>
+<|visual token 004166|>
+<|visual token 004167|>
+<|visual token 004168|>
+<|visual token 004169|>
+<|visual token 004170|>
+<|visual token 004171|>
+<|visual token 004172|>
+<|visual token 004173|>
+<|visual token 004174|>
+<|visual token 004175|>
+<|visual token 004176|>
+<|visual token 004177|>
+<|visual token 004178|>
+<|visual token 004179|>
+<|visual token 004180|>
+<|visual token 004181|>
+<|visual token 004182|>
+<|visual token 004183|>
+<|visual token 004184|>
+<|visual token 004185|>
+<|visual token 004186|>
+<|visual token 004187|>
+<|visual token 004188|>
+<|visual token 004189|>
+<|visual token 004190|>
+<|visual token 004191|>
+<|visual token 004192|>
+<|visual token 004193|>
+<|visual token 004194|>
+<|visual token 004195|>
+<|visual token 004196|>
+<|visual token 004197|>
+<|visual token 004198|>
+<|visual token 004199|>
+<|visual token 004200|>
+<|visual token 004201|>
+<|visual token 004202|>
+<|visual token 004203|>
+<|visual token 004204|>
+<|visual token 004205|>
+<|visual token 004206|>
+<|visual token 004207|>
+<|visual token 004208|>
+<|visual token 004209|>
+<|visual token 004210|>
+<|visual token 004211|>
+<|visual token 004212|>
+<|visual token 004213|>
+<|visual token 004214|>
+<|visual token 004215|>
+<|visual token 004216|>
+<|visual token 004217|>
+<|visual token 004218|>
+<|visual token 004219|>
+<|visual token 004220|>
+<|visual token 004221|>
+<|visual token 004222|>
+<|visual token 004223|>
+<|visual token 004224|>
+<|visual token 004225|>
+<|visual token 004226|>
+<|visual token 004227|>
+<|visual token 004228|>
+<|visual token 004229|>
+<|visual token 004230|>
+<|visual token 004231|>
+<|visual token 004232|>
+<|visual token 004233|>
+<|visual token 004234|>
+<|visual token 004235|>
+<|visual token 004236|>
+<|visual token 004237|>
+<|visual token 004238|>
+<|visual token 004239|>
+<|visual token 004240|>
+<|visual token 004241|>
+<|visual token 004242|>
+<|visual token 004243|>
+<|visual token 004244|>
+<|visual token 004245|>
+<|visual token 004246|>
+<|visual token 004247|>
+<|visual token 004248|>
+<|visual token 004249|>
+<|visual token 004250|>
+<|visual token 004251|>
+<|visual token 004252|>
+<|visual token 004253|>
+<|visual token 004254|>
+<|visual token 004255|>
+<|visual token 004256|>
+<|visual token 004257|>
+<|visual token 004258|>
+<|visual token 004259|>
+<|visual token 004260|>
+<|visual token 004261|>
+<|visual token 004262|>
+<|visual token 004263|>
+<|visual token 004264|>
+<|visual token 004265|>
+<|visual token 004266|>
+<|visual token 004267|>
+<|visual token 004268|>
+<|visual token 004269|>
+<|visual token 004270|>
+<|visual token 004271|>
+<|visual token 004272|>
+<|visual token 004273|>
+<|visual token 004274|>
+<|visual token 004275|>
+<|visual token 004276|>
+<|visual token 004277|>
+<|visual token 004278|>
+<|visual token 004279|>
+<|visual token 004280|>
+<|visual token 004281|>
+<|visual token 004282|>
+<|visual token 004283|>
+<|visual token 004284|>
+<|visual token 004285|>
+<|visual token 004286|>
+<|visual token 004287|>
+<|visual token 004288|>
+<|visual token 004289|>
+<|visual token 004290|>
+<|visual token 004291|>
+<|visual token 004292|>
+<|visual token 004293|>
+<|visual token 004294|>
+<|visual token 004295|>
+<|visual token 004296|>
+<|visual token 004297|>
+<|visual token 004298|>
+<|visual token 004299|>
+<|visual token 004300|>
+<|visual token 004301|>
+<|visual token 004302|>
+<|visual token 004303|>
+<|visual token 004304|>
+<|visual token 004305|>
+<|visual token 004306|>
+<|visual token 004307|>
+<|visual token 004308|>
+<|visual token 004309|>
+<|visual token 004310|>
+<|visual token 004311|>
+<|visual token 004312|>
+<|visual token 004313|>
+<|visual token 004314|>
+<|visual token 004315|>
+<|visual token 004316|>
+<|visual token 004317|>
+<|visual token 004318|>
+<|visual token 004319|>
+<|visual token 004320|>
+<|visual token 004321|>
+<|visual token 004322|>
+<|visual token 004323|>
+<|visual token 004324|>
+<|visual token 004325|>
+<|visual token 004326|>
+<|visual token 004327|>
+<|visual token 004328|>
+<|visual token 004329|>
+<|visual token 004330|>
+<|visual token 004331|>
+<|visual token 004332|>
+<|visual token 004333|>
+<|visual token 004334|>
+<|visual token 004335|>
+<|visual token 004336|>
+<|visual token 004337|>
+<|visual token 004338|>
+<|visual token 004339|>
+<|visual token 004340|>
+<|visual token 004341|>
+<|visual token 004342|>
+<|visual token 004343|>
+<|visual token 004344|>
+<|visual token 004345|>
+<|visual token 004346|>
+<|visual token 004347|>
+<|visual token 004348|>
+<|visual token 004349|>
+<|visual token 004350|>
+<|visual token 004351|>
+<|visual token 004352|>
+<|visual token 004353|>
+<|visual token 004354|>
+<|visual token 004355|>
+<|visual token 004356|>
+<|visual token 004357|>
+<|visual token 004358|>
+<|visual token 004359|>
+<|visual token 004360|>
+<|visual token 004361|>
+<|visual token 004362|>
+<|visual token 004363|>
+<|visual token 004364|>
+<|visual token 004365|>
+<|visual token 004366|>
+<|visual token 004367|>
+<|visual token 004368|>
+<|visual token 004369|>
+<|visual token 004370|>
+<|visual token 004371|>
+<|visual token 004372|>
+<|visual token 004373|>
+<|visual token 004374|>
+<|visual token 004375|>
+<|visual token 004376|>
+<|visual token 004377|>
+<|visual token 004378|>
+<|visual token 004379|>
+<|visual token 004380|>
+<|visual token 004381|>
+<|visual token 004382|>
+<|visual token 004383|>
+<|visual token 004384|>
+<|visual token 004385|>
+<|visual token 004386|>
+<|visual token 004387|>
+<|visual token 004388|>
+<|visual token 004389|>
+<|visual token 004390|>
+<|visual token 004391|>
+<|visual token 004392|>
+<|visual token 004393|>
+<|visual token 004394|>
+<|visual token 004395|>
+<|visual token 004396|>
+<|visual token 004397|>
+<|visual token 004398|>
+<|visual token 004399|>
+<|visual token 004400|>
+<|visual token 004401|>
+<|visual token 004402|>
+<|visual token 004403|>
+<|visual token 004404|>
+<|visual token 004405|>
+<|visual token 004406|>
+<|visual token 004407|>
+<|visual token 004408|>
+<|visual token 004409|>
+<|visual token 004410|>
+<|visual token 004411|>
+<|visual token 004412|>
+<|visual token 004413|>
+<|visual token 004414|>
+<|visual token 004415|>
+<|visual token 004416|>
+<|visual token 004417|>
+<|visual token 004418|>
+<|visual token 004419|>
+<|visual token 004420|>
+<|visual token 004421|>
+<|visual token 004422|>
+<|visual token 004423|>
+<|visual token 004424|>
+<|visual token 004425|>
+<|visual token 004426|>
+<|visual token 004427|>
+<|visual token 004428|>
+<|visual token 004429|>
+<|visual token 004430|>
+<|visual token 004431|>
+<|visual token 004432|>
+<|visual token 004433|>
+<|visual token 004434|>
+<|visual token 004435|>
+<|visual token 004436|>
+<|visual token 004437|>
+<|visual token 004438|>
+<|visual token 004439|>
+<|visual token 004440|>
+<|visual token 004441|>
+<|visual token 004442|>
+<|visual token 004443|>
+<|visual token 004444|>
+<|visual token 004445|>
+<|visual token 004446|>
+<|visual token 004447|>
+<|visual token 004448|>
+<|visual token 004449|>
+<|visual token 004450|>
+<|visual token 004451|>
+<|visual token 004452|>
+<|visual token 004453|>
+<|visual token 004454|>
+<|visual token 004455|>
+<|visual token 004456|>
+<|visual token 004457|>
+<|visual token 004458|>
+<|visual token 004459|>
+<|visual token 004460|>
+<|visual token 004461|>
+<|visual token 004462|>
+<|visual token 004463|>
+<|visual token 004464|>
+<|visual token 004465|>
+<|visual token 004466|>
+<|visual token 004467|>
+<|visual token 004468|>
+<|visual token 004469|>
+<|visual token 004470|>
+<|visual token 004471|>
+<|visual token 004472|>
+<|visual token 004473|>
+<|visual token 004474|>
+<|visual token 004475|>
+<|visual token 004476|>
+<|visual token 004477|>
+<|visual token 004478|>
+<|visual token 004479|>
+<|visual token 004480|>
+<|visual token 004481|>
+<|visual token 004482|>
+<|visual token 004483|>
+<|visual token 004484|>
+<|visual token 004485|>
+<|visual token 004486|>
+<|visual token 004487|>
+<|visual token 004488|>
+<|visual token 004489|>
+<|visual token 004490|>
+<|visual token 004491|>
+<|visual token 004492|>
+<|visual token 004493|>
+<|visual token 004494|>
+<|visual token 004495|>
+<|visual token 004496|>
+<|visual token 004497|>
+<|visual token 004498|>
+<|visual token 004499|>
+<|visual token 004500|>
+<|visual token 004501|>
+<|visual token 004502|>
+<|visual token 004503|>
+<|visual token 004504|>
+<|visual token 004505|>
+<|visual token 004506|>
+<|visual token 004507|>
+<|visual token 004508|>
+<|visual token 004509|>
+<|visual token 004510|>
+<|visual token 004511|>
+<|visual token 004512|>
+<|visual token 004513|>
+<|visual token 004514|>
+<|visual token 004515|>
+<|visual token 004516|>
+<|visual token 004517|>
+<|visual token 004518|>
+<|visual token 004519|>
+<|visual token 004520|>
+<|visual token 004521|>
+<|visual token 004522|>
+<|visual token 004523|>
+<|visual token 004524|>
+<|visual token 004525|>
+<|visual token 004526|>
+<|visual token 004527|>
+<|visual token 004528|>
+<|visual token 004529|>
+<|visual token 004530|>
+<|visual token 004531|>
+<|visual token 004532|>
+<|visual token 004533|>
+<|visual token 004534|>
+<|visual token 004535|>
+<|visual token 004536|>
+<|visual token 004537|>
+<|visual token 004538|>
+<|visual token 004539|>
+<|visual token 004540|>
+<|visual token 004541|>
+<|visual token 004542|>
+<|visual token 004543|>
+<|visual token 004544|>
+<|visual token 004545|>
+<|visual token 004546|>
+<|visual token 004547|>
+<|visual token 004548|>
+<|visual token 004549|>
+<|visual token 004550|>
+<|visual token 004551|>
+<|visual token 004552|>
+<|visual token 004553|>
+<|visual token 004554|>
+<|visual token 004555|>
+<|visual token 004556|>
+<|visual token 004557|>
+<|visual token 004558|>
+<|visual token 004559|>
+<|visual token 004560|>
+<|visual token 004561|>
+<|visual token 004562|>
+<|visual token 004563|>
+<|visual token 004564|>
+<|visual token 004565|>
+<|visual token 004566|>
+<|visual token 004567|>
+<|visual token 004568|>
+<|visual token 004569|>
+<|visual token 004570|>
+<|visual token 004571|>
+<|visual token 004572|>
+<|visual token 004573|>
+<|visual token 004574|>
+<|visual token 004575|>
+<|visual token 004576|>
+<|visual token 004577|>
+<|visual token 004578|>
+<|visual token 004579|>
+<|visual token 004580|>
+<|visual token 004581|>
+<|visual token 004582|>
+<|visual token 004583|>
+<|visual token 004584|>
+<|visual token 004585|>
+<|visual token 004586|>
+<|visual token 004587|>
+<|visual token 004588|>
+<|visual token 004589|>
+<|visual token 004590|>
+<|visual token 004591|>
+<|visual token 004592|>
+<|visual token 004593|>
+<|visual token 004594|>
+<|visual token 004595|>
+<|visual token 004596|>
+<|visual token 004597|>
+<|visual token 004598|>
+<|visual token 004599|>
+<|visual token 004600|>
+<|visual token 004601|>
+<|visual token 004602|>
+<|visual token 004603|>
+<|visual token 004604|>
+<|visual token 004605|>
+<|visual token 004606|>
+<|visual token 004607|>
+<|visual token 004608|>
+<|visual token 004609|>
+<|visual token 004610|>
+<|visual token 004611|>
+<|visual token 004612|>
+<|visual token 004613|>
+<|visual token 004614|>
+<|visual token 004615|>
+<|visual token 004616|>
+<|visual token 004617|>
+<|visual token 004618|>
+<|visual token 004619|>
+<|visual token 004620|>
+<|visual token 004621|>
+<|visual token 004622|>
+<|visual token 004623|>
+<|visual token 004624|>
+<|visual token 004625|>
+<|visual token 004626|>
+<|visual token 004627|>
+<|visual token 004628|>
+<|visual token 004629|>
+<|visual token 004630|>
+<|visual token 004631|>
+<|visual token 004632|>
+<|visual token 004633|>
+<|visual token 004634|>
+<|visual token 004635|>
+<|visual token 004636|>
+<|visual token 004637|>
+<|visual token 004638|>
+<|visual token 004639|>
+<|visual token 004640|>
+<|visual token 004641|>
+<|visual token 004642|>
+<|visual token 004643|>
+<|visual token 004644|>
+<|visual token 004645|>
+<|visual token 004646|>
+<|visual token 004647|>
+<|visual token 004648|>
+<|visual token 004649|>
+<|visual token 004650|>
+<|visual token 004651|>
+<|visual token 004652|>
+<|visual token 004653|>
+<|visual token 004654|>
+<|visual token 004655|>
+<|visual token 004656|>
+<|visual token 004657|>
+<|visual token 004658|>
+<|visual token 004659|>
+<|visual token 004660|>
+<|visual token 004661|>
+<|visual token 004662|>
+<|visual token 004663|>
+<|visual token 004664|>
+<|visual token 004665|>
+<|visual token 004666|>
+<|visual token 004667|>
+<|visual token 004668|>
+<|visual token 004669|>
+<|visual token 004670|>
+<|visual token 004671|>
+<|visual token 004672|>
+<|visual token 004673|>
+<|visual token 004674|>
+<|visual token 004675|>
+<|visual token 004676|>
+<|visual token 004677|>
+<|visual token 004678|>
+<|visual token 004679|>
+<|visual token 004680|>
+<|visual token 004681|>
+<|visual token 004682|>
+<|visual token 004683|>
+<|visual token 004684|>
+<|visual token 004685|>
+<|visual token 004686|>
+<|visual token 004687|>
+<|visual token 004688|>
+<|visual token 004689|>
+<|visual token 004690|>
+<|visual token 004691|>
+<|visual token 004692|>
+<|visual token 004693|>
+<|visual token 004694|>
+<|visual token 004695|>
+<|visual token 004696|>
+<|visual token 004697|>
+<|visual token 004698|>
+<|visual token 004699|>
+<|visual token 004700|>
+<|visual token 004701|>
+<|visual token 004702|>
+<|visual token 004703|>
+<|visual token 004704|>
+<|visual token 004705|>
+<|visual token 004706|>
+<|visual token 004707|>
+<|visual token 004708|>
+<|visual token 004709|>
+<|visual token 004710|>
+<|visual token 004711|>
+<|visual token 004712|>
+<|visual token 004713|>
+<|visual token 004714|>
+<|visual token 004715|>
+<|visual token 004716|>
+<|visual token 004717|>
+<|visual token 004718|>
+<|visual token 004719|>
+<|visual token 004720|>
+<|visual token 004721|>
+<|visual token 004722|>
+<|visual token 004723|>
+<|visual token 004724|>
+<|visual token 004725|>
+<|visual token 004726|>
+<|visual token 004727|>
+<|visual token 004728|>
+<|visual token 004729|>
+<|visual token 004730|>
+<|visual token 004731|>
+<|visual token 004732|>
+<|visual token 004733|>
+<|visual token 004734|>
+<|visual token 004735|>
+<|visual token 004736|>
+<|visual token 004737|>
+<|visual token 004738|>
+<|visual token 004739|>
+<|visual token 004740|>
+<|visual token 004741|>
+<|visual token 004742|>
+<|visual token 004743|>
+<|visual token 004744|>
+<|visual token 004745|>
+<|visual token 004746|>
+<|visual token 004747|>
+<|visual token 004748|>
+<|visual token 004749|>
+<|visual token 004750|>
+<|visual token 004751|>
+<|visual token 004752|>
+<|visual token 004753|>
+<|visual token 004754|>
+<|visual token 004755|>
+<|visual token 004756|>
+<|visual token 004757|>
+<|visual token 004758|>
+<|visual token 004759|>
+<|visual token 004760|>
+<|visual token 004761|>
+<|visual token 004762|>
+<|visual token 004763|>
+<|visual token 004764|>
+<|visual token 004765|>
+<|visual token 004766|>
+<|visual token 004767|>
+<|visual token 004768|>
+<|visual token 004769|>
+<|visual token 004770|>
+<|visual token 004771|>
+<|visual token 004772|>
+<|visual token 004773|>
+<|visual token 004774|>
+<|visual token 004775|>
+<|visual token 004776|>
+<|visual token 004777|>
+<|visual token 004778|>
+<|visual token 004779|>
+<|visual token 004780|>
+<|visual token 004781|>
+<|visual token 004782|>
+<|visual token 004783|>
+<|visual token 004784|>
+<|visual token 004785|>
+<|visual token 004786|>
+<|visual token 004787|>
+<|visual token 004788|>
+<|visual token 004789|>
+<|visual token 004790|>
+<|visual token 004791|>
+<|visual token 004792|>
+<|visual token 004793|>
+<|visual token 004794|>
+<|visual token 004795|>
+<|visual token 004796|>
+<|visual token 004797|>
+<|visual token 004798|>
+<|visual token 004799|>
+<|visual token 004800|>
+<|visual token 004801|>
+<|visual token 004802|>
+<|visual token 004803|>
+<|visual token 004804|>
+<|visual token 004805|>
+<|visual token 004806|>
+<|visual token 004807|>
+<|visual token 004808|>
+<|visual token 004809|>
+<|visual token 004810|>
+<|visual token 004811|>
+<|visual token 004812|>
+<|visual token 004813|>
+<|visual token 004814|>
+<|visual token 004815|>
+<|visual token 004816|>
+<|visual token 004817|>
+<|visual token 004818|>
+<|visual token 004819|>
+<|visual token 004820|>
+<|visual token 004821|>
+<|visual token 004822|>
+<|visual token 004823|>
+<|visual token 004824|>
+<|visual token 004825|>
+<|visual token 004826|>
+<|visual token 004827|>
+<|visual token 004828|>
+<|visual token 004829|>
+<|visual token 004830|>
+<|visual token 004831|>
+<|visual token 004832|>
+<|visual token 004833|>
+<|visual token 004834|>
+<|visual token 004835|>
+<|visual token 004836|>
+<|visual token 004837|>
+<|visual token 004838|>
+<|visual token 004839|>
+<|visual token 004840|>
+<|visual token 004841|>
+<|visual token 004842|>
+<|visual token 004843|>
+<|visual token 004844|>
+<|visual token 004845|>
+<|visual token 004846|>
+<|visual token 004847|>
+<|visual token 004848|>
+<|visual token 004849|>
+<|visual token 004850|>
+<|visual token 004851|>
+<|visual token 004852|>
+<|visual token 004853|>
+<|visual token 004854|>
+<|visual token 004855|>
+<|visual token 004856|>
+<|visual token 004857|>
+<|visual token 004858|>
+<|visual token 004859|>
+<|visual token 004860|>
+<|visual token 004861|>
+<|visual token 004862|>
+<|visual token 004863|>
+<|visual token 004864|>
+<|visual token 004865|>
+<|visual token 004866|>
+<|visual token 004867|>
+<|visual token 004868|>
+<|visual token 004869|>
+<|visual token 004870|>
+<|visual token 004871|>
+<|visual token 004872|>
+<|visual token 004873|>
+<|visual token 004874|>
+<|visual token 004875|>
+<|visual token 004876|>
+<|visual token 004877|>
+<|visual token 004878|>
+<|visual token 004879|>
+<|visual token 004880|>
+<|visual token 004881|>
+<|visual token 004882|>
+<|visual token 004883|>
+<|visual token 004884|>
+<|visual token 004885|>
+<|visual token 004886|>
+<|visual token 004887|>
+<|visual token 004888|>
+<|visual token 004889|>
+<|visual token 004890|>
+<|visual token 004891|>
+<|visual token 004892|>
+<|visual token 004893|>
+<|visual token 004894|>
+<|visual token 004895|>
+<|visual token 004896|>
+<|visual token 004897|>
+<|visual token 004898|>
+<|visual token 004899|>
+<|visual token 004900|>
+<|visual token 004901|>
+<|visual token 004902|>
+<|visual token 004903|>
+<|visual token 004904|>
+<|visual token 004905|>
+<|visual token 004906|>
+<|visual token 004907|>
+<|visual token 004908|>
+<|visual token 004909|>
+<|visual token 004910|>
+<|visual token 004911|>
+<|visual token 004912|>
+<|visual token 004913|>
+<|visual token 004914|>
+<|visual token 004915|>
+<|visual token 004916|>
+<|visual token 004917|>
+<|visual token 004918|>
+<|visual token 004919|>
+<|visual token 004920|>
+<|visual token 004921|>
+<|visual token 004922|>
+<|visual token 004923|>
+<|visual token 004924|>
+<|visual token 004925|>
+<|visual token 004926|>
+<|visual token 004927|>
+<|visual token 004928|>
+<|visual token 004929|>
+<|visual token 004930|>
+<|visual token 004931|>
+<|visual token 004932|>
+<|visual token 004933|>
+<|visual token 004934|>
+<|visual token 004935|>
+<|visual token 004936|>
+<|visual token 004937|>
+<|visual token 004938|>
+<|visual token 004939|>
+<|visual token 004940|>
+<|visual token 004941|>
+<|visual token 004942|>
+<|visual token 004943|>
+<|visual token 004944|>
+<|visual token 004945|>
+<|visual token 004946|>
+<|visual token 004947|>
+<|visual token 004948|>
+<|visual token 004949|>
+<|visual token 004950|>
+<|visual token 004951|>
+<|visual token 004952|>
+<|visual token 004953|>
+<|visual token 004954|>
+<|visual token 004955|>
+<|visual token 004956|>
+<|visual token 004957|>
+<|visual token 004958|>
+<|visual token 004959|>
+<|visual token 004960|>
+<|visual token 004961|>
+<|visual token 004962|>
+<|visual token 004963|>
+<|visual token 004964|>
+<|visual token 004965|>
+<|visual token 004966|>
+<|visual token 004967|>
+<|visual token 004968|>
+<|visual token 004969|>
+<|visual token 004970|>
+<|visual token 004971|>
+<|visual token 004972|>
+<|visual token 004973|>
+<|visual token 004974|>
+<|visual token 004975|>
+<|visual token 004976|>
+<|visual token 004977|>
+<|visual token 004978|>
+<|visual token 004979|>
+<|visual token 004980|>
+<|visual token 004981|>
+<|visual token 004982|>
+<|visual token 004983|>
+<|visual token 004984|>
+<|visual token 004985|>
+<|visual token 004986|>
+<|visual token 004987|>
+<|visual token 004988|>
+<|visual token 004989|>
+<|visual token 004990|>
+<|visual token 004991|>
+<|visual token 004992|>
+<|visual token 004993|>
+<|visual token 004994|>
+<|visual token 004995|>
+<|visual token 004996|>
+<|visual token 004997|>
+<|visual token 004998|>
+<|visual token 004999|>
+<|visual token 005000|>
+<|visual token 005001|>
+<|visual token 005002|>
+<|visual token 005003|>
+<|visual token 005004|>
+<|visual token 005005|>
+<|visual token 005006|>
+<|visual token 005007|>
+<|visual token 005008|>
+<|visual token 005009|>
+<|visual token 005010|>
+<|visual token 005011|>
+<|visual token 005012|>
+<|visual token 005013|>
+<|visual token 005014|>
+<|visual token 005015|>
+<|visual token 005016|>
+<|visual token 005017|>
+<|visual token 005018|>
+<|visual token 005019|>
+<|visual token 005020|>
+<|visual token 005021|>
+<|visual token 005022|>
+<|visual token 005023|>
+<|visual token 005024|>
+<|visual token 005025|>
+<|visual token 005026|>
+<|visual token 005027|>
+<|visual token 005028|>
+<|visual token 005029|>
+<|visual token 005030|>
+<|visual token 005031|>
+<|visual token 005032|>
+<|visual token 005033|>
+<|visual token 005034|>
+<|visual token 005035|>
+<|visual token 005036|>
+<|visual token 005037|>
+<|visual token 005038|>
+<|visual token 005039|>
+<|visual token 005040|>
+<|visual token 005041|>
+<|visual token 005042|>
+<|visual token 005043|>
+<|visual token 005044|>
+<|visual token 005045|>
+<|visual token 005046|>
+<|visual token 005047|>
+<|visual token 005048|>
+<|visual token 005049|>
+<|visual token 005050|>
+<|visual token 005051|>
+<|visual token 005052|>
+<|visual token 005053|>
+<|visual token 005054|>
+<|visual token 005055|>
+<|visual token 005056|>
+<|visual token 005057|>
+<|visual token 005058|>
+<|visual token 005059|>
+<|visual token 005060|>
+<|visual token 005061|>
+<|visual token 005062|>
+<|visual token 005063|>
+<|visual token 005064|>
+<|visual token 005065|>
+<|visual token 005066|>
+<|visual token 005067|>
+<|visual token 005068|>
+<|visual token 005069|>
+<|visual token 005070|>
+<|visual token 005071|>
+<|visual token 005072|>
+<|visual token 005073|>
+<|visual token 005074|>
+<|visual token 005075|>
+<|visual token 005076|>
+<|visual token 005077|>
+<|visual token 005078|>
+<|visual token 005079|>
+<|visual token 005080|>
+<|visual token 005081|>
+<|visual token 005082|>
+<|visual token 005083|>
+<|visual token 005084|>
+<|visual token 005085|>
+<|visual token 005086|>
+<|visual token 005087|>
+<|visual token 005088|>
+<|visual token 005089|>
+<|visual token 005090|>
+<|visual token 005091|>
+<|visual token 005092|>
+<|visual token 005093|>
+<|visual token 005094|>
+<|visual token 005095|>
+<|visual token 005096|>
+<|visual token 005097|>
+<|visual token 005098|>
+<|visual token 005099|>
+<|visual token 005100|>
+<|visual token 005101|>
+<|visual token 005102|>
+<|visual token 005103|>
+<|visual token 005104|>
+<|visual token 005105|>
+<|visual token 005106|>
+<|visual token 005107|>
+<|visual token 005108|>
+<|visual token 005109|>
+<|visual token 005110|>
+<|visual token 005111|>
+<|visual token 005112|>
+<|visual token 005113|>
+<|visual token 005114|>
+<|visual token 005115|>
+<|visual token 005116|>
+<|visual token 005117|>
+<|visual token 005118|>
+<|visual token 005119|>
+<|visual token 005120|>
+<|visual token 005121|>
+<|visual token 005122|>
+<|visual token 005123|>
+<|visual token 005124|>
+<|visual token 005125|>
+<|visual token 005126|>
+<|visual token 005127|>
+<|visual token 005128|>
+<|visual token 005129|>
+<|visual token 005130|>
+<|visual token 005131|>
+<|visual token 005132|>
+<|visual token 005133|>
+<|visual token 005134|>
+<|visual token 005135|>
+<|visual token 005136|>
+<|visual token 005137|>
+<|visual token 005138|>
+<|visual token 005139|>
+<|visual token 005140|>
+<|visual token 005141|>
+<|visual token 005142|>
+<|visual token 005143|>
+<|visual token 005144|>
+<|visual token 005145|>
+<|visual token 005146|>
+<|visual token 005147|>
+<|visual token 005148|>
+<|visual token 005149|>
+<|visual token 005150|>
+<|visual token 005151|>
+<|visual token 005152|>
+<|visual token 005153|>
+<|visual token 005154|>
+<|visual token 005155|>
+<|visual token 005156|>
+<|visual token 005157|>
+<|visual token 005158|>
+<|visual token 005159|>
+<|visual token 005160|>
+<|visual token 005161|>
+<|visual token 005162|>
+<|visual token 005163|>
+<|visual token 005164|>
+<|visual token 005165|>
+<|visual token 005166|>
+<|visual token 005167|>
+<|visual token 005168|>
+<|visual token 005169|>
+<|visual token 005170|>
+<|visual token 005171|>
+<|visual token 005172|>
+<|visual token 005173|>
+<|visual token 005174|>
+<|visual token 005175|>
+<|visual token 005176|>
+<|visual token 005177|>
+<|visual token 005178|>
+<|visual token 005179|>
+<|visual token 005180|>
+<|visual token 005181|>
+<|visual token 005182|>
+<|visual token 005183|>
+<|visual token 005184|>
+<|visual token 005185|>
+<|visual token 005186|>
+<|visual token 005187|>
+<|visual token 005188|>
+<|visual token 005189|>
+<|visual token 005190|>
+<|visual token 005191|>
+<|visual token 005192|>
+<|visual token 005193|>
+<|visual token 005194|>
+<|visual token 005195|>
+<|visual token 005196|>
+<|visual token 005197|>
+<|visual token 005198|>
+<|visual token 005199|>
+<|visual token 005200|>
+<|visual token 005201|>
+<|visual token 005202|>
+<|visual token 005203|>
+<|visual token 005204|>
+<|visual token 005205|>
+<|visual token 005206|>
+<|visual token 005207|>
+<|visual token 005208|>
+<|visual token 005209|>
+<|visual token 005210|>
+<|visual token 005211|>
+<|visual token 005212|>
+<|visual token 005213|>
+<|visual token 005214|>
+<|visual token 005215|>
+<|visual token 005216|>
+<|visual token 005217|>
+<|visual token 005218|>
+<|visual token 005219|>
+<|visual token 005220|>
+<|visual token 005221|>
+<|visual token 005222|>
+<|visual token 005223|>
+<|visual token 005224|>
+<|visual token 005225|>
+<|visual token 005226|>
+<|visual token 005227|>
+<|visual token 005228|>
+<|visual token 005229|>
+<|visual token 005230|>
+<|visual token 005231|>
+<|visual token 005232|>
+<|visual token 005233|>
+<|visual token 005234|>
+<|visual token 005235|>
+<|visual token 005236|>
+<|visual token 005237|>
+<|visual token 005238|>
+<|visual token 005239|>
+<|visual token 005240|>
+<|visual token 005241|>
+<|visual token 005242|>
+<|visual token 005243|>
+<|visual token 005244|>
+<|visual token 005245|>
+<|visual token 005246|>
+<|visual token 005247|>
+<|visual token 005248|>
+<|visual token 005249|>
+<|visual token 005250|>
+<|visual token 005251|>
+<|visual token 005252|>
+<|visual token 005253|>
+<|visual token 005254|>
+<|visual token 005255|>
+<|visual token 005256|>
+<|visual token 005257|>
+<|visual token 005258|>
+<|visual token 005259|>
+<|visual token 005260|>
+<|visual token 005261|>
+<|visual token 005262|>
+<|visual token 005263|>
+<|visual token 005264|>
+<|visual token 005265|>
+<|visual token 005266|>
+<|visual token 005267|>
+<|visual token 005268|>
+<|visual token 005269|>
+<|visual token 005270|>
+<|visual token 005271|>
+<|visual token 005272|>
+<|visual token 005273|>
+<|visual token 005274|>
+<|visual token 005275|>
+<|visual token 005276|>
+<|visual token 005277|>
+<|visual token 005278|>
+<|visual token 005279|>
+<|visual token 005280|>
+<|visual token 005281|>
+<|visual token 005282|>
+<|visual token 005283|>
+<|visual token 005284|>
+<|visual token 005285|>
+<|visual token 005286|>
+<|visual token 005287|>
+<|visual token 005288|>
+<|visual token 005289|>
+<|visual token 005290|>
+<|visual token 005291|>
+<|visual token 005292|>
+<|visual token 005293|>
+<|visual token 005294|>
+<|visual token 005295|>
+<|visual token 005296|>
+<|visual token 005297|>
+<|visual token 005298|>
+<|visual token 005299|>
+<|visual token 005300|>
+<|visual token 005301|>
+<|visual token 005302|>
+<|visual token 005303|>
+<|visual token 005304|>
+<|visual token 005305|>
+<|visual token 005306|>
+<|visual token 005307|>
+<|visual token 005308|>
+<|visual token 005309|>
+<|visual token 005310|>
+<|visual token 005311|>
+<|visual token 005312|>
+<|visual token 005313|>
+<|visual token 005314|>
+<|visual token 005315|>
+<|visual token 005316|>
+<|visual token 005317|>
+<|visual token 005318|>
+<|visual token 005319|>
+<|visual token 005320|>
+<|visual token 005321|>
+<|visual token 005322|>
+<|visual token 005323|>
+<|visual token 005324|>
+<|visual token 005325|>
+<|visual token 005326|>
+<|visual token 005327|>
+<|visual token 005328|>
+<|visual token 005329|>
+<|visual token 005330|>
+<|visual token 005331|>
+<|visual token 005332|>
+<|visual token 005333|>
+<|visual token 005334|>
+<|visual token 005335|>
+<|visual token 005336|>
+<|visual token 005337|>
+<|visual token 005338|>
+<|visual token 005339|>
+<|visual token 005340|>
+<|visual token 005341|>
+<|visual token 005342|>
+<|visual token 005343|>
+<|visual token 005344|>
+<|visual token 005345|>
+<|visual token 005346|>
+<|visual token 005347|>
+<|visual token 005348|>
+<|visual token 005349|>
+<|visual token 005350|>
+<|visual token 005351|>
+<|visual token 005352|>
+<|visual token 005353|>
+<|visual token 005354|>
+<|visual token 005355|>
+<|visual token 005356|>
+<|visual token 005357|>
+<|visual token 005358|>
+<|visual token 005359|>
+<|visual token 005360|>
+<|visual token 005361|>
+<|visual token 005362|>
+<|visual token 005363|>
+<|visual token 005364|>
+<|visual token 005365|>
+<|visual token 005366|>
+<|visual token 005367|>
+<|visual token 005368|>
+<|visual token 005369|>
+<|visual token 005370|>
+<|visual token 005371|>
+<|visual token 005372|>
+<|visual token 005373|>
+<|visual token 005374|>
+<|visual token 005375|>
+<|visual token 005376|>
+<|visual token 005377|>
+<|visual token 005378|>
+<|visual token 005379|>
+<|visual token 005380|>
+<|visual token 005381|>
+<|visual token 005382|>
+<|visual token 005383|>
+<|visual token 005384|>
+<|visual token 005385|>
+<|visual token 005386|>
+<|visual token 005387|>
+<|visual token 005388|>
+<|visual token 005389|>
+<|visual token 005390|>
+<|visual token 005391|>
+<|visual token 005392|>
+<|visual token 005393|>
+<|visual token 005394|>
+<|visual token 005395|>
+<|visual token 005396|>
+<|visual token 005397|>
+<|visual token 005398|>
+<|visual token 005399|>
+<|visual token 005400|>
+<|visual token 005401|>
+<|visual token 005402|>
+<|visual token 005403|>
+<|visual token 005404|>
+<|visual token 005405|>
+<|visual token 005406|>
+<|visual token 005407|>
+<|visual token 005408|>
+<|visual token 005409|>
+<|visual token 005410|>
+<|visual token 005411|>
+<|visual token 005412|>
+<|visual token 005413|>
+<|visual token 005414|>
+<|visual token 005415|>
+<|visual token 005416|>
+<|visual token 005417|>
+<|visual token 005418|>
+<|visual token 005419|>
+<|visual token 005420|>
+<|visual token 005421|>
+<|visual token 005422|>
+<|visual token 005423|>
+<|visual token 005424|>
+<|visual token 005425|>
+<|visual token 005426|>
+<|visual token 005427|>
+<|visual token 005428|>
+<|visual token 005429|>
+<|visual token 005430|>
+<|visual token 005431|>
+<|visual token 005432|>
+<|visual token 005433|>
+<|visual token 005434|>
+<|visual token 005435|>
+<|visual token 005436|>
+<|visual token 005437|>
+<|visual token 005438|>
+<|visual token 005439|>
+<|visual token 005440|>
+<|visual token 005441|>
+<|visual token 005442|>
+<|visual token 005443|>
+<|visual token 005444|>
+<|visual token 005445|>
+<|visual token 005446|>
+<|visual token 005447|>
+<|visual token 005448|>
+<|visual token 005449|>
+<|visual token 005450|>
+<|visual token 005451|>
+<|visual token 005452|>
+<|visual token 005453|>
+<|visual token 005454|>
+<|visual token 005455|>
+<|visual token 005456|>
+<|visual token 005457|>
+<|visual token 005458|>
+<|visual token 005459|>
+<|visual token 005460|>
+<|visual token 005461|>
+<|visual token 005462|>
+<|visual token 005463|>
+<|visual token 005464|>
+<|visual token 005465|>
+<|visual token 005466|>
+<|visual token 005467|>
+<|visual token 005468|>
+<|visual token 005469|>
+<|visual token 005470|>
+<|visual token 005471|>
+<|visual token 005472|>
+<|visual token 005473|>
+<|visual token 005474|>
+<|visual token 005475|>
+<|visual token 005476|>
+<|visual token 005477|>
+<|visual token 005478|>
+<|visual token 005479|>
+<|visual token 005480|>
+<|visual token 005481|>
+<|visual token 005482|>
+<|visual token 005483|>
+<|visual token 005484|>
+<|visual token 005485|>
+<|visual token 005486|>
+<|visual token 005487|>
+<|visual token 005488|>
+<|visual token 005489|>
+<|visual token 005490|>
+<|visual token 005491|>
+<|visual token 005492|>
+<|visual token 005493|>
+<|visual token 005494|>
+<|visual token 005495|>
+<|visual token 005496|>
+<|visual token 005497|>
+<|visual token 005498|>
+<|visual token 005499|>
+<|visual token 005500|>
+<|visual token 005501|>
+<|visual token 005502|>
+<|visual token 005503|>
+<|visual token 005504|>
+<|visual token 005505|>
+<|visual token 005506|>
+<|visual token 005507|>
+<|visual token 005508|>
+<|visual token 005509|>
+<|visual token 005510|>
+<|visual token 005511|>
+<|visual token 005512|>
+<|visual token 005513|>
+<|visual token 005514|>
+<|visual token 005515|>
+<|visual token 005516|>
+<|visual token 005517|>
+<|visual token 005518|>
+<|visual token 005519|>
+<|visual token 005520|>
+<|visual token 005521|>
+<|visual token 005522|>
+<|visual token 005523|>
+<|visual token 005524|>
+<|visual token 005525|>
+<|visual token 005526|>
+<|visual token 005527|>
+<|visual token 005528|>
+<|visual token 005529|>
+<|visual token 005530|>
+<|visual token 005531|>
+<|visual token 005532|>
+<|visual token 005533|>
+<|visual token 005534|>
+<|visual token 005535|>
+<|visual token 005536|>
+<|visual token 005537|>
+<|visual token 005538|>
+<|visual token 005539|>
+<|visual token 005540|>
+<|visual token 005541|>
+<|visual token 005542|>
+<|visual token 005543|>
+<|visual token 005544|>
+<|visual token 005545|>
+<|visual token 005546|>
+<|visual token 005547|>
+<|visual token 005548|>
+<|visual token 005549|>
+<|visual token 005550|>
+<|visual token 005551|>
+<|visual token 005552|>
+<|visual token 005553|>
+<|visual token 005554|>
+<|visual token 005555|>
+<|visual token 005556|>
+<|visual token 005557|>
+<|visual token 005558|>
+<|visual token 005559|>
+<|visual token 005560|>
+<|visual token 005561|>
+<|visual token 005562|>
+<|visual token 005563|>
+<|visual token 005564|>
+<|visual token 005565|>
+<|visual token 005566|>
+<|visual token 005567|>
+<|visual token 005568|>
+<|visual token 005569|>
+<|visual token 005570|>
+<|visual token 005571|>
+<|visual token 005572|>
+<|visual token 005573|>
+<|visual token 005574|>
+<|visual token 005575|>
+<|visual token 005576|>
+<|visual token 005577|>
+<|visual token 005578|>
+<|visual token 005579|>
+<|visual token 005580|>
+<|visual token 005581|>
+<|visual token 005582|>
+<|visual token 005583|>
+<|visual token 005584|>
+<|visual token 005585|>
+<|visual token 005586|>
+<|visual token 005587|>
+<|visual token 005588|>
+<|visual token 005589|>
+<|visual token 005590|>
+<|visual token 005591|>
+<|visual token 005592|>
+<|visual token 005593|>
+<|visual token 005594|>
+<|visual token 005595|>
+<|visual token 005596|>
+<|visual token 005597|>
+<|visual token 005598|>
+<|visual token 005599|>
+<|visual token 005600|>
+<|visual token 005601|>
+<|visual token 005602|>
+<|visual token 005603|>
+<|visual token 005604|>
+<|visual token 005605|>
+<|visual token 005606|>
+<|visual token 005607|>
+<|visual token 005608|>
+<|visual token 005609|>
+<|visual token 005610|>
+<|visual token 005611|>
+<|visual token 005612|>
+<|visual token 005613|>
+<|visual token 005614|>
+<|visual token 005615|>
+<|visual token 005616|>
+<|visual token 005617|>
+<|visual token 005618|>
+<|visual token 005619|>
+<|visual token 005620|>
+<|visual token 005621|>
+<|visual token 005622|>
+<|visual token 005623|>
+<|visual token 005624|>
+<|visual token 005625|>
+<|visual token 005626|>
+<|visual token 005627|>
+<|visual token 005628|>
+<|visual token 005629|>
+<|visual token 005630|>
+<|visual token 005631|>
+<|visual token 005632|>
+<|visual token 005633|>
+<|visual token 005634|>
+<|visual token 005635|>
+<|visual token 005636|>
+<|visual token 005637|>
+<|visual token 005638|>
+<|visual token 005639|>
+<|visual token 005640|>
+<|visual token 005641|>
+<|visual token 005642|>
+<|visual token 005643|>
+<|visual token 005644|>
+<|visual token 005645|>
+<|visual token 005646|>
+<|visual token 005647|>
+<|visual token 005648|>
+<|visual token 005649|>
+<|visual token 005650|>
+<|visual token 005651|>
+<|visual token 005652|>
+<|visual token 005653|>
+<|visual token 005654|>
+<|visual token 005655|>
+<|visual token 005656|>
+<|visual token 005657|>
+<|visual token 005658|>
+<|visual token 005659|>
+<|visual token 005660|>
+<|visual token 005661|>
+<|visual token 005662|>
+<|visual token 005663|>
+<|visual token 005664|>
+<|visual token 005665|>
+<|visual token 005666|>
+<|visual token 005667|>
+<|visual token 005668|>
+<|visual token 005669|>
+<|visual token 005670|>
+<|visual token 005671|>
+<|visual token 005672|>
+<|visual token 005673|>
+<|visual token 005674|>
+<|visual token 005675|>
+<|visual token 005676|>
+<|visual token 005677|>
+<|visual token 005678|>
+<|visual token 005679|>
+<|visual token 005680|>
+<|visual token 005681|>
+<|visual token 005682|>
+<|visual token 005683|>
+<|visual token 005684|>
+<|visual token 005685|>
+<|visual token 005686|>
+<|visual token 005687|>
+<|visual token 005688|>
+<|visual token 005689|>
+<|visual token 005690|>
+<|visual token 005691|>
+<|visual token 005692|>
+<|visual token 005693|>
+<|visual token 005694|>
+<|visual token 005695|>
+<|visual token 005696|>
+<|visual token 005697|>
+<|visual token 005698|>
+<|visual token 005699|>
+<|visual token 005700|>
+<|visual token 005701|>
+<|visual token 005702|>
+<|visual token 005703|>
+<|visual token 005704|>
+<|visual token 005705|>
+<|visual token 005706|>
+<|visual token 005707|>
+<|visual token 005708|>
+<|visual token 005709|>
+<|visual token 005710|>
+<|visual token 005711|>
+<|visual token 005712|>
+<|visual token 005713|>
+<|visual token 005714|>
+<|visual token 005715|>
+<|visual token 005716|>
+<|visual token 005717|>
+<|visual token 005718|>
+<|visual token 005719|>
+<|visual token 005720|>
+<|visual token 005721|>
+<|visual token 005722|>
+<|visual token 005723|>
+<|visual token 005724|>
+<|visual token 005725|>
+<|visual token 005726|>
+<|visual token 005727|>
+<|visual token 005728|>
+<|visual token 005729|>
+<|visual token 005730|>
+<|visual token 005731|>
+<|visual token 005732|>
+<|visual token 005733|>
+<|visual token 005734|>
+<|visual token 005735|>
+<|visual token 005736|>
+<|visual token 005737|>
+<|visual token 005738|>
+<|visual token 005739|>
+<|visual token 005740|>
+<|visual token 005741|>
+<|visual token 005742|>
+<|visual token 005743|>
+<|visual token 005744|>
+<|visual token 005745|>
+<|visual token 005746|>
+<|visual token 005747|>
+<|visual token 005748|>
+<|visual token 005749|>
+<|visual token 005750|>
+<|visual token 005751|>
+<|visual token 005752|>
+<|visual token 005753|>
+<|visual token 005754|>
+<|visual token 005755|>
+<|visual token 005756|>
+<|visual token 005757|>
+<|visual token 005758|>
+<|visual token 005759|>
+<|visual token 005760|>
+<|visual token 005761|>
+<|visual token 005762|>
+<|visual token 005763|>
+<|visual token 005764|>
+<|visual token 005765|>
+<|visual token 005766|>
+<|visual token 005767|>
+<|visual token 005768|>
+<|visual token 005769|>
+<|visual token 005770|>
+<|visual token 005771|>
+<|visual token 005772|>
+<|visual token 005773|>
+<|visual token 005774|>
+<|visual token 005775|>
+<|visual token 005776|>
+<|visual token 005777|>
+<|visual token 005778|>
+<|visual token 005779|>
+<|visual token 005780|>
+<|visual token 005781|>
+<|visual token 005782|>
+<|visual token 005783|>
+<|visual token 005784|>
+<|visual token 005785|>
+<|visual token 005786|>
+<|visual token 005787|>
+<|visual token 005788|>
+<|visual token 005789|>
+<|visual token 005790|>
+<|visual token 005791|>
+<|visual token 005792|>
+<|visual token 005793|>
+<|visual token 005794|>
+<|visual token 005795|>
+<|visual token 005796|>
+<|visual token 005797|>
+<|visual token 005798|>
+<|visual token 005799|>
+<|visual token 005800|>
+<|visual token 005801|>
+<|visual token 005802|>
+<|visual token 005803|>
+<|visual token 005804|>
+<|visual token 005805|>
+<|visual token 005806|>
+<|visual token 005807|>
+<|visual token 005808|>
+<|visual token 005809|>
+<|visual token 005810|>
+<|visual token 005811|>
+<|visual token 005812|>
+<|visual token 005813|>
+<|visual token 005814|>
+<|visual token 005815|>
+<|visual token 005816|>
+<|visual token 005817|>
+<|visual token 005818|>
+<|visual token 005819|>
+<|visual token 005820|>
+<|visual token 005821|>
+<|visual token 005822|>
+<|visual token 005823|>
+<|visual token 005824|>
+<|visual token 005825|>
+<|visual token 005826|>
+<|visual token 005827|>
+<|visual token 005828|>
+<|visual token 005829|>
+<|visual token 005830|>
+<|visual token 005831|>
+<|visual token 005832|>
+<|visual token 005833|>
+<|visual token 005834|>
+<|visual token 005835|>
+<|visual token 005836|>
+<|visual token 005837|>
+<|visual token 005838|>
+<|visual token 005839|>
+<|visual token 005840|>
+<|visual token 005841|>
+<|visual token 005842|>
+<|visual token 005843|>
+<|visual token 005844|>
+<|visual token 005845|>
+<|visual token 005846|>
+<|visual token 005847|>
+<|visual token 005848|>
+<|visual token 005849|>
+<|visual token 005850|>
+<|visual token 005851|>
+<|visual token 005852|>
+<|visual token 005853|>
+<|visual token 005854|>
+<|visual token 005855|>
+<|visual token 005856|>
+<|visual token 005857|>
+<|visual token 005858|>
+<|visual token 005859|>
+<|visual token 005860|>
+<|visual token 005861|>
+<|visual token 005862|>
+<|visual token 005863|>
+<|visual token 005864|>
+<|visual token 005865|>
+<|visual token 005866|>
+<|visual token 005867|>
+<|visual token 005868|>
+<|visual token 005869|>
+<|visual token 005870|>
+<|visual token 005871|>
+<|visual token 005872|>
+<|visual token 005873|>
+<|visual token 005874|>
+<|visual token 005875|>
+<|visual token 005876|>
+<|visual token 005877|>
+<|visual token 005878|>
+<|visual token 005879|>
+<|visual token 005880|>
+<|visual token 005881|>
+<|visual token 005882|>
+<|visual token 005883|>
+<|visual token 005884|>
+<|visual token 005885|>
+<|visual token 005886|>
+<|visual token 005887|>
+<|visual token 005888|>
+<|visual token 005889|>
+<|visual token 005890|>
+<|visual token 005891|>
+<|visual token 005892|>
+<|visual token 005893|>
+<|visual token 005894|>
+<|visual token 005895|>
+<|visual token 005896|>
+<|visual token 005897|>
+<|visual token 005898|>
+<|visual token 005899|>
+<|visual token 005900|>
+<|visual token 005901|>
+<|visual token 005902|>
+<|visual token 005903|>
+<|visual token 005904|>
+<|visual token 005905|>
+<|visual token 005906|>
+<|visual token 005907|>
+<|visual token 005908|>
+<|visual token 005909|>
+<|visual token 005910|>
+<|visual token 005911|>
+<|visual token 005912|>
+<|visual token 005913|>
+<|visual token 005914|>
+<|visual token 005915|>
+<|visual token 005916|>
+<|visual token 005917|>
+<|visual token 005918|>
+<|visual token 005919|>
+<|visual token 005920|>
+<|visual token 005921|>
+<|visual token 005922|>
+<|visual token 005923|>
+<|visual token 005924|>
+<|visual token 005925|>
+<|visual token 005926|>
+<|visual token 005927|>
+<|visual token 005928|>
+<|visual token 005929|>
+<|visual token 005930|>
+<|visual token 005931|>
+<|visual token 005932|>
+<|visual token 005933|>
+<|visual token 005934|>
+<|visual token 005935|>
+<|visual token 005936|>
+<|visual token 005937|>
+<|visual token 005938|>
+<|visual token 005939|>
+<|visual token 005940|>
+<|visual token 005941|>
+<|visual token 005942|>
+<|visual token 005943|>
+<|visual token 005944|>
+<|visual token 005945|>
+<|visual token 005946|>
+<|visual token 005947|>
+<|visual token 005948|>
+<|visual token 005949|>
+<|visual token 005950|>
+<|visual token 005951|>
+<|visual token 005952|>
+<|visual token 005953|>
+<|visual token 005954|>
+<|visual token 005955|>
+<|visual token 005956|>
+<|visual token 005957|>
+<|visual token 005958|>
+<|visual token 005959|>
+<|visual token 005960|>
+<|visual token 005961|>
+<|visual token 005962|>
+<|visual token 005963|>
+<|visual token 005964|>
+<|visual token 005965|>
+<|visual token 005966|>
+<|visual token 005967|>
+<|visual token 005968|>
+<|visual token 005969|>
+<|visual token 005970|>
+<|visual token 005971|>
+<|visual token 005972|>
+<|visual token 005973|>
+<|visual token 005974|>
+<|visual token 005975|>
+<|visual token 005976|>
+<|visual token 005977|>
+<|visual token 005978|>
+<|visual token 005979|>
+<|visual token 005980|>
+<|visual token 005981|>
+<|visual token 005982|>
+<|visual token 005983|>
+<|visual token 005984|>
+<|visual token 005985|>
+<|visual token 005986|>
+<|visual token 005987|>
+<|visual token 005988|>
+<|visual token 005989|>
+<|visual token 005990|>
+<|visual token 005991|>
+<|visual token 005992|>
+<|visual token 005993|>
+<|visual token 005994|>
+<|visual token 005995|>
+<|visual token 005996|>
+<|visual token 005997|>
+<|visual token 005998|>
+<|visual token 005999|>
+<|visual token 006000|>
+<|visual token 006001|>
+<|visual token 006002|>
+<|visual token 006003|>
+<|visual token 006004|>
+<|visual token 006005|>
+<|visual token 006006|>
+<|visual token 006007|>
+<|visual token 006008|>
+<|visual token 006009|>
+<|visual token 006010|>
+<|visual token 006011|>
+<|visual token 006012|>
+<|visual token 006013|>
+<|visual token 006014|>
+<|visual token 006015|>
+<|visual token 006016|>
+<|visual token 006017|>
+<|visual token 006018|>
+<|visual token 006019|>
+<|visual token 006020|>
+<|visual token 006021|>
+<|visual token 006022|>
+<|visual token 006023|>
+<|visual token 006024|>
+<|visual token 006025|>
+<|visual token 006026|>
+<|visual token 006027|>
+<|visual token 006028|>
+<|visual token 006029|>
+<|visual token 006030|>
+<|visual token 006031|>
+<|visual token 006032|>
+<|visual token 006033|>
+<|visual token 006034|>
+<|visual token 006035|>
+<|visual token 006036|>
+<|visual token 006037|>
+<|visual token 006038|>
+<|visual token 006039|>
+<|visual token 006040|>
+<|visual token 006041|>
+<|visual token 006042|>
+<|visual token 006043|>
+<|visual token 006044|>
+<|visual token 006045|>
+<|visual token 006046|>
+<|visual token 006047|>
+<|visual token 006048|>
+<|visual token 006049|>
+<|visual token 006050|>
+<|visual token 006051|>
+<|visual token 006052|>
+<|visual token 006053|>
+<|visual token 006054|>
+<|visual token 006055|>
+<|visual token 006056|>
+<|visual token 006057|>
+<|visual token 006058|>
+<|visual token 006059|>
+<|visual token 006060|>
+<|visual token 006061|>
+<|visual token 006062|>
+<|visual token 006063|>
+<|visual token 006064|>
+<|visual token 006065|>
+<|visual token 006066|>
+<|visual token 006067|>
+<|visual token 006068|>
+<|visual token 006069|>
+<|visual token 006070|>
+<|visual token 006071|>
+<|visual token 006072|>
+<|visual token 006073|>
+<|visual token 006074|>
+<|visual token 006075|>
+<|visual token 006076|>
+<|visual token 006077|>
+<|visual token 006078|>
+<|visual token 006079|>
+<|visual token 006080|>
+<|visual token 006081|>
+<|visual token 006082|>
+<|visual token 006083|>
+<|visual token 006084|>
+<|visual token 006085|>
+<|visual token 006086|>
+<|visual token 006087|>
+<|visual token 006088|>
+<|visual token 006089|>
+<|visual token 006090|>
+<|visual token 006091|>
+<|visual token 006092|>
+<|visual token 006093|>
+<|visual token 006094|>
+<|visual token 006095|>
+<|visual token 006096|>
+<|visual token 006097|>
+<|visual token 006098|>
+<|visual token 006099|>
+<|visual token 006100|>
+<|visual token 006101|>
+<|visual token 006102|>
+<|visual token 006103|>
+<|visual token 006104|>
+<|visual token 006105|>
+<|visual token 006106|>
+<|visual token 006107|>
+<|visual token 006108|>
+<|visual token 006109|>
+<|visual token 006110|>
+<|visual token 006111|>
+<|visual token 006112|>
+<|visual token 006113|>
+<|visual token 006114|>
+<|visual token 006115|>
+<|visual token 006116|>
+<|visual token 006117|>
+<|visual token 006118|>
+<|visual token 006119|>
+<|visual token 006120|>
+<|visual token 006121|>
+<|visual token 006122|>
+<|visual token 006123|>
+<|visual token 006124|>
+<|visual token 006125|>
+<|visual token 006126|>
+<|visual token 006127|>
+<|visual token 006128|>
+<|visual token 006129|>
+<|visual token 006130|>
+<|visual token 006131|>
+<|visual token 006132|>
+<|visual token 006133|>
+<|visual token 006134|>
+<|visual token 006135|>
+<|visual token 006136|>
+<|visual token 006137|>
+<|visual token 006138|>
+<|visual token 006139|>
+<|visual token 006140|>
+<|visual token 006141|>
+<|visual token 006142|>
+<|visual token 006143|>
+<|visual token 006144|>
+<|visual token 006145|>
+<|visual token 006146|>
+<|visual token 006147|>
+<|visual token 006148|>
+<|visual token 006149|>
+<|visual token 006150|>
+<|visual token 006151|>
+<|visual token 006152|>
+<|visual token 006153|>
+<|visual token 006154|>
+<|visual token 006155|>
+<|visual token 006156|>
+<|visual token 006157|>
+<|visual token 006158|>
+<|visual token 006159|>
+<|visual token 006160|>
+<|visual token 006161|>
+<|visual token 006162|>
+<|visual token 006163|>
+<|visual token 006164|>
+<|visual token 006165|>
+<|visual token 006166|>
+<|visual token 006167|>
+<|visual token 006168|>
+<|visual token 006169|>
+<|visual token 006170|>
+<|visual token 006171|>
+<|visual token 006172|>
+<|visual token 006173|>
+<|visual token 006174|>
+<|visual token 006175|>
+<|visual token 006176|>
+<|visual token 006177|>
+<|visual token 006178|>
+<|visual token 006179|>
+<|visual token 006180|>
+<|visual token 006181|>
+<|visual token 006182|>
+<|visual token 006183|>
+<|visual token 006184|>
+<|visual token 006185|>
+<|visual token 006186|>
+<|visual token 006187|>
+<|visual token 006188|>
+<|visual token 006189|>
+<|visual token 006190|>
+<|visual token 006191|>
+<|visual token 006192|>
+<|visual token 006193|>
+<|visual token 006194|>
+<|visual token 006195|>
+<|visual token 006196|>
+<|visual token 006197|>
+<|visual token 006198|>
+<|visual token 006199|>
+<|visual token 006200|>
+<|visual token 006201|>
+<|visual token 006202|>
+<|visual token 006203|>
+<|visual token 006204|>
+<|visual token 006205|>
+<|visual token 006206|>
+<|visual token 006207|>
+<|visual token 006208|>
+<|visual token 006209|>
+<|visual token 006210|>
+<|visual token 006211|>
+<|visual token 006212|>
+<|visual token 006213|>
+<|visual token 006214|>
+<|visual token 006215|>
+<|visual token 006216|>
+<|visual token 006217|>
+<|visual token 006218|>
+<|visual token 006219|>
+<|visual token 006220|>
+<|visual token 006221|>
+<|visual token 006222|>
+<|visual token 006223|>
+<|visual token 006224|>
+<|visual token 006225|>
+<|visual token 006226|>
+<|visual token 006227|>
+<|visual token 006228|>
+<|visual token 006229|>
+<|visual token 006230|>
+<|visual token 006231|>
+<|visual token 006232|>
+<|visual token 006233|>
+<|visual token 006234|>
+<|visual token 006235|>
+<|visual token 006236|>
+<|visual token 006237|>
+<|visual token 006238|>
+<|visual token 006239|>
+<|visual token 006240|>
+<|visual token 006241|>
+<|visual token 006242|>
+<|visual token 006243|>
+<|visual token 006244|>
+<|visual token 006245|>
+<|visual token 006246|>
+<|visual token 006247|>
+<|visual token 006248|>
+<|visual token 006249|>
+<|visual token 006250|>
+<|visual token 006251|>
+<|visual token 006252|>
+<|visual token 006253|>
+<|visual token 006254|>
+<|visual token 006255|>
+<|visual token 006256|>
+<|visual token 006257|>
+<|visual token 006258|>
+<|visual token 006259|>
+<|visual token 006260|>
+<|visual token 006261|>
+<|visual token 006262|>
+<|visual token 006263|>
+<|visual token 006264|>
+<|visual token 006265|>
+<|visual token 006266|>
+<|visual token 006267|>
+<|visual token 006268|>
+<|visual token 006269|>
+<|visual token 006270|>
+<|visual token 006271|>
+<|visual token 006272|>
+<|visual token 006273|>
+<|visual token 006274|>
+<|visual token 006275|>
+<|visual token 006276|>
+<|visual token 006277|>
+<|visual token 006278|>
+<|visual token 006279|>
+<|visual token 006280|>
+<|visual token 006281|>
+<|visual token 006282|>
+<|visual token 006283|>
+<|visual token 006284|>
+<|visual token 006285|>
+<|visual token 006286|>
+<|visual token 006287|>
+<|visual token 006288|>
+<|visual token 006289|>
+<|visual token 006290|>
+<|visual token 006291|>
+<|visual token 006292|>
+<|visual token 006293|>
+<|visual token 006294|>
+<|visual token 006295|>
+<|visual token 006296|>
+<|visual token 006297|>
+<|visual token 006298|>
+<|visual token 006299|>
+<|visual token 006300|>
+<|visual token 006301|>
+<|visual token 006302|>
+<|visual token 006303|>
+<|visual token 006304|>
+<|visual token 006305|>
+<|visual token 006306|>
+<|visual token 006307|>
+<|visual token 006308|>
+<|visual token 006309|>
+<|visual token 006310|>
+<|visual token 006311|>
+<|visual token 006312|>
+<|visual token 006313|>
+<|visual token 006314|>
+<|visual token 006315|>
+<|visual token 006316|>
+<|visual token 006317|>
+<|visual token 006318|>
+<|visual token 006319|>
+<|visual token 006320|>
+<|visual token 006321|>
+<|visual token 006322|>
+<|visual token 006323|>
+<|visual token 006324|>
+<|visual token 006325|>
+<|visual token 006326|>
+<|visual token 006327|>
+<|visual token 006328|>
+<|visual token 006329|>
+<|visual token 006330|>
+<|visual token 006331|>
+<|visual token 006332|>
+<|visual token 006333|>
+<|visual token 006334|>
+<|visual token 006335|>
+<|visual token 006336|>
+<|visual token 006337|>
+<|visual token 006338|>
+<|visual token 006339|>
+<|visual token 006340|>
+<|visual token 006341|>
+<|visual token 006342|>
+<|visual token 006343|>
+<|visual token 006344|>
+<|visual token 006345|>
+<|visual token 006346|>
+<|visual token 006347|>
+<|visual token 006348|>
+<|visual token 006349|>
+<|visual token 006350|>
+<|visual token 006351|>
+<|visual token 006352|>
+<|visual token 006353|>
+<|visual token 006354|>
+<|visual token 006355|>
+<|visual token 006356|>
+<|visual token 006357|>
+<|visual token 006358|>
+<|visual token 006359|>
+<|visual token 006360|>
+<|visual token 006361|>
+<|visual token 006362|>
+<|visual token 006363|>
+<|visual token 006364|>
+<|visual token 006365|>
+<|visual token 006366|>
+<|visual token 006367|>
+<|visual token 006368|>
+<|visual token 006369|>
+<|visual token 006370|>
+<|visual token 006371|>
+<|visual token 006372|>
+<|visual token 006373|>
+<|visual token 006374|>
+<|visual token 006375|>
+<|visual token 006376|>
+<|visual token 006377|>
+<|visual token 006378|>
+<|visual token 006379|>
+<|visual token 006380|>
+<|visual token 006381|>
+<|visual token 006382|>
+<|visual token 006383|>
+<|visual token 006384|>
+<|visual token 006385|>
+<|visual token 006386|>
+<|visual token 006387|>
+<|visual token 006388|>
+<|visual token 006389|>
+<|visual token 006390|>
+<|visual token 006391|>
+<|visual token 006392|>
+<|visual token 006393|>
+<|visual token 006394|>
+<|visual token 006395|>
+<|visual token 006396|>
+<|visual token 006397|>
+<|visual token 006398|>
+<|visual token 006399|>
+<|visual token 006400|>
+<|visual token 006401|>
+<|visual token 006402|>
+<|visual token 006403|>
+<|visual token 006404|>
+<|visual token 006405|>
+<|visual token 006406|>
+<|visual token 006407|>
+<|visual token 006408|>
+<|visual token 006409|>
+<|visual token 006410|>
+<|visual token 006411|>
+<|visual token 006412|>
+<|visual token 006413|>
+<|visual token 006414|>
+<|visual token 006415|>
+<|visual token 006416|>
+<|visual token 006417|>
+<|visual token 006418|>
+<|visual token 006419|>
+<|visual token 006420|>
+<|visual token 006421|>
+<|visual token 006422|>
+<|visual token 006423|>
+<|visual token 006424|>
+<|visual token 006425|>
+<|visual token 006426|>
+<|visual token 006427|>
+<|visual token 006428|>
+<|visual token 00642[O9|>
+<|visual token 006430|>
+<|visual token 006431|>
+<|visual token 006432|>
+<|visual token 006433|>
+<|visual token 006434|>
+<|visual token 006435|>
+<|visual token 006436|>
+<|visual token 006437|>
+<|visual token 006438|>
+<|visual token 006439|>
+<|visual token 006440|>
+<|visual token 006441|>
+<|visual token 006442|>
+<|visual token 006443|>
+<|visual token 006444|>
+<|visual token 006445|>
+<|visual token 006446|>
+<|visual token 006447|>
+<|visual token 006448|>
+<|visual token 006449|>
+<|visual token 006450|>
+<|visual token 006451|>
+<|visual token 006452|>
+<|visual token 006453|>
+<|visual token 006454|>
+<|visual token 006455|>
+<|visual token 006456|>
+<|visual token 006457|>
+<|visual token 006458|>
+<|visual token 006459|>
+<|visual token 006460|>
+<|visual token 006461|>
+<|visual token 006462|>
+<|visual token 006463|>
+<|visual token 006464|>
+<|visual token 006465|>
+<|visual token 006466|>
+<|visual token 006467|>
+<|visual token 006468|>
+<|visual token 006469|>
+<|visual token 006470|>
+<|visual token 006471|>
+<|visual token 006472|>
+<|visual token 006473|>
+<|visual token 006474|>
+<|visual token 006475|>
+<|visual token 006476|>
+<|visual token 006477|>
+<|visual token 006478|>
+<|visual token 006479|>
+<|visual token 006480|>
+<|visual token 006481|>
+<|visual token 006482|>
+<|visual token 006483|>
+<|visual token 006484|>
+<|visual token 006485|>
+<|visual token 006486|>
+<|visual token 006487|>
+<|visual token 006488|>
+<|visual token 006489|>
+<|visual token 006490|>
+<|visual token 006491|>
+<|visual token 006492|>
+<|visual token 006493|>
+<|visual token 006494|>
+<|visual token 006495|>
+<|visual token 006496|>
+<|visual token 006497|>
+<|visual token 006498|>
+<|visual token 006499|>
+<|visual token 006500|>
+<|visual token 006501|>
+<|visual token 006502|>
+<|visual token 006503|>
+<|visual token 006504|>
+<|visual token 006505|>
+<|visual token 006506|>
+<|visual token 006507|>
+<|visual token 006508|>
+<|visual token 006509|>
+<|visual token 006510|>
+<|visual token 006511|>
+<|visual token 006512|>
+<|visual token 006513|>
+<|visual token 006514|>
+<|visual token 006515|>
+<|visual token 006516|>
+<|visual token 006517|>
+<|visual token 006518|>
+<|visual token 006519|>
+<|visual token 006520|>
+<|visual token 006521|>
+<|visual token 006522|>
+<|visual token 006523|>
+<|visual token 006524|>
+<|visual token 006525|>
+<|visual token 006526|>
+<|visual token 006527|>
+<|visual token 006528|>
+<|visual token 006529|>
+<|visual token 006530|>
+<|visual token 006531|>
+<|visual token 006532|>
+<|visual token 006533|>
+<|visual token 006534|>
+<|visual token 006535|>
+<|visual token 006536|>
+<|visual token 006537|>
+<|visual token 006538|>
+<|visual token 006539|>
+<|visual token 006540|>
+<|visual token 006541|>
+<|visual token 006542|>
+<|visual token 006543|>
+<|visual token 006544|>
+<|visual token 006545|>
+<|visual token 006546|>
+<|visual token 006547|>
+<|visual token 006548|>
+<|visual token 006549|>
+<|visual token 006550|>
+<|visual token 006551|>
+<|visual token 006552|>
+<|visual token 006553|>
+<|visual token 006554|>
+<|visual token 006555|>
+<|visual token 006556|>
+<|visual token 006557|>
+<|visual token 006558|>
+<|visual token 006559|>
+<|visual token 006560|>
+<|visual token 006561|>
+<|visual token 006562|>
+<|visual token 006563|>
+<|visual token 006564|>
+<|visual token 006565|>
+<|visual token 006566|>
+<|visual token 006567|>
+<|visual token 006568|>
+<|visual token 006569|>
+<|visual token 006570|>
+<|visual token 006571|>
+<|visual token 006572|>
+<|visual token 006573|>
+<|visual token 006574|>
+<|visual token 006575|>
+<|visual token 006576|>
+<|visual token 006577|>
+<|visual token 006578|>
+<|visual token 006579|>
+<|visual token 006580|>
+<|visual token 006581|>
+<|visual token 006582|>
+<|visual token 006583|>
+<|visual token 006584|>
+<|visual token 006585|>
+<|visual token 006586|>
+<|visual token 006587|>
+<|visual token 006588|>
+<|visual token 006589|>
+<|visual token 006590|>
+<|visual token 006591|>
+<|visual token 006592|>
+<|visual token 006593|>
+<|visual token 006594|>
+<|visual token 006595|>
+<|visual token 006596|>
+<|visual token 006597|>
+<|visual token 006598|>
+<|visual token 006599|>
+<|visual token 006600|>
+<|visual token 006601|>
+<|visual token 006602|>
+<|visual token 006603|>
+<|visual token 006604|>
+<|visual token 006605|>
+<|visual token 006606|>
+<|visual token 006607|>
+<|visual token 006608|>
+<|visual token 006609|>
+<|visual token 006610|>
+<|visual token 006611|>
+<|visual token 006612|>
+<|visual token 006613|>
+<|visual token 006614|>
+<|visual token 006615|>
+<|visual token 006616|>
+<|visual token 006617|>
+<|visual token 006618|>
+<|visual token 006619|>
+<|visual token 006620|>
+<|visual token 006621|>
+<|visual token 006622|>
+<|visual token 006623|>
+<|visual token 006624|>
+<|visual token 006625|>
+<|visual token 006626|>
+<|visual token 006627|>
+<|visual token 006628|>
+<|visual token 006629|>
+<|visual token 006630|>
+<|visual token 006631|>
+<|visual token 006632|>
+<|visual token 006633|>
+<|visual token 006634|>
+<|visual token 006635|>
+<|visual token 006636|>
+<|visual token 006637|>
+<|visual token 006638|>
+<|visual token 006639|>
+<|visual token 006640|>
+<|visual token 006641|>
+<|visual token 006642|>
+<|visual token 006643|>
+<|visual token 006644|>
+<|visual token 006645|>
+<|visual token 006646|>
+<|visual token 006647|>
+<|visual token 006648|>
+<|visual token 006649|>
+<|visual token 006650|>
+<|visual token 006651|>
+<|visual token 006652|>
+<|visual token 006653|>
+<|visual token 006654|>
+<|visual token 006655|>
+<|visual token 006656|>
+<|visual token 006657|>
+<|visual token 006658|>
+<|visual token 006659|>
+<|visual token 006660|>
+<|visual token 006661|>
+<|visual token 006662|>
+<|visual token 006663|>
+<|visual token 006664|>
+<|visual token 006665|>
+<|visual token 006666|>
+<|visual token 006667|>
+<|visual token 006668|>
+<|visual token 006669|>
+<|visual token 006670|>
+<|visual token 006671|>
+<|visual token 006672|>
+<|visual token 006673|>
+<|visual token 006674|>
+<|visual token 006675|>
+<|visual token 006676|>
+<|visual token 006677|>
+<|visual token 006678|>
+<|visual token 006679|>
+<|visual token 006680|>
+<|visual token 006681|>
+<|visual token 006682|>
+<|visual token 006683|>
+<|visual token 006684|>
+<|visual token 006685|>
+<|visual token 006686|>
+<|visual token 006687|>
+<|visual token 006688|>
+<|visual token 006689|>
+<|visual token 006690|>
+<|visual token 006691|>
+<|visual token 006692|>
+<|visual token 006693|>
+<|visual token 006694|>
+<|visual token 006695|>
+<|visual token 006696|>
+<|visual token 006697|>
+<|visual token 006698|>
+<|visual token 006699|>
+<|visual token 006700|>
+<|visual token 006701|>
+<|visual token 006702|>
+<|visual token 006703|>
+<|visual token 006704|>
+<|visual token 006705|>
+<|visual token 006706|>
+<|visual token 006707|>
+<|visual token 006708|>
+<|visual token 006709|>
+<|visual token 006710|>
+<|visual token 006711|>
+<|visual token 006712|>
+<|visual token 006713|>
+<|visual token 006714|>
+<|visual token 006715|>
+<|visual token 006716|>
+<|visual token 006717|>
+<|visual token 006718|>
+<|visual token 006719|>
+<|visual token 006720|>
+<|visual token 006721|>
+<|visual token 006722|>
+<|visual token 006723|>
+<|visual token 006724|>
+<|visual token 006725|>
+<|visual token 006726|>
+<|visual token 006727|>
+<|visual token 006728|>
+<|visual token 006729|>
+<|visual token 006730|>
+<|visual token 006731|>
+<|visual token 006732|>
+<|visual token 006733|>
+<|visual token 006734|>
+<|visual token 006735|>
+<|visual token 006736|>
+<|visual token 006737|>
+<|visual token 006738|>
+<|visual token 006739|>
+<|visual token 006740|>
+<|visual token 006741|>
+<|visual token 006742|>
+<|visual token 006743|>
+<|visual token 006744|>
+<|visual token 006745|>
+<|visual token 006746|>
+<|visual token 006747|>
+<|visual token 006748|>
+<|visual token 006749|>
+<|visual token 006750|>
+<|visual token 006751|>
+<|visual token 006752|>
+<|visual token 006753|>
+<|visual token 006754|>
+<|visual token 006755|>
+<|visual token 006756|>
+<|visual token 006757|>
+<|visual token 006758|>
+<|visual token 006759|>
+<|visual token 006760|>
+<|visual token 006761|>
+<|visual token 006762|>
+<|visual token 006763|>
+<|visual token 006764|>
+<|visual token 006765|>
+<|visual token 006766|>
+<|visual token 006767|>
+<|visual token 006768|>
+<|visual token 006769|>
+<|visual token 006770|>
+<|visual token 006771|>
+<|visual token 006772|>
+<|visual token 006773|>
+<|visual token 006774|>
+<|visual token 006775|>
+<|visual token 006776|>
+<|visual token 006777|>
+<|visual token 006778|>
+<|visual token 006779|>
+<|visual token 006780|>
+<|visual token 006781|>
+<|visual token 006782|>
+<|visual token 006783|>
+<|visual token 006784|>
+<|visual token 006785|>
+<|visual token 006786|>
+<|visual token 006787|>
+<|visual token 006788|>
+<|visual token 006789|>
+<|visual token 006790|>
+<|visual token 006791|>
+<|visual token 006792|>
+<|visual token 006793|>
+<|visual token 006794|>
+<|visual token 006795|>
+<|visual token 006796|>
+<|visual token 006797|>
+<|visual token 006798|>
+<|visual token 006799|>
+<|visual token 006800|>
+<|visual token 006801|>
+<|visual token 006802|>
+<|visual token 006803|>
+<|visual token 006804|>
+<|visual token 006805|>
+<|visual token 006806|>
+<|visual token 006807|>
+<|visual token 006808|>
+<|visual token 006809|>
+<|visual token 006810|>
+<|visual token 006811|>
+<|visual token 006812|>
+<|visual token 006813|>
+<|visual token 006814|>
+<|visual token 006815|>
+<|visual token 006816|>
+<|visual token 006817|>
+<|visual token 006818|>
+<|visual token 006819|>
+<|visual token 006820|>
+<|visual token 006821|>
+<|visual token 006822|>
+<|visual token 006823|>
+<|visual token 006824|>
+<|visual token 006825|>
+<|visual token 006826|>
+<|visual token 006827|>
+<|visual token 006828|>
+<|visual token 006829|>
+<|visual token 006830|>
+<|visual token 006831|>
+<|visual token 006832|>
+<|visual token 006833|>
+<|visual token 006834|>
+<|visual token 006835|>
+<|visual token 006836|>
+<|visual token 006837|>
+<|visual token 006838|>
+<|visual token 006839|>
+<|visual token 006840|>
+<|visual token 006841|>
+<|visual token 006842|>
+<|visual token 006843|>
+<|visual token 006844|>
+<|visual token 006845|>
+<|visual token 006846|>
+<|visual token 006847|>
+<|visual token 006848|>
+<|visual token 006849|>
+<|visual token 006850|>
+<|visual token 006851|>
+<|visual token 006852|>
+<|visual token 006853|>
+<|visual token 006854|>
+<|visual token 006855|>
+<|visual token 006856|>
+<|visual token 006857|>
+<|visual token 006858|>
+<|visual token 006859|>
+<|visual token 006860|>
+<|visual token 006861|>
+<|visual token 006862|>
+<|visual token 006863|>
+<|visual token 006864|>
+<|visual token 006865|>
+<|visual token 006866|>
+<|visual token 006867|>
+<|visual token 006868|>
+<|visual token 006869|>
+<|visual token 006870|>
+<|visual token 006871|>
+<|visual token 006872|>
+<|visual token 006873|>
+<|visual token 006874|>
+<|visual token 006875|>
+<|visual token 006876|>
+<|visual token 006877|>
+<|visual token 006878|>
+<|visual token 006879|>
+<|visual token 006880|>
+<|visual token 006881|>
+<|visual token 006882|>
+<|visual token 006883|>
+<|visual token 006884|>
+<|visual token 006885|>
+<|visual token 006886|>
+<|visual token 006887|>
+<|visual token 006888|>
+<|visual token 006889|>
+<|visual token 006890|>
+<|visual token 006891|>
+<|visual token 006892|>
+<|visual token 006893|>
+<|visual token 006894|>
+<|visual token 006895|>
+<|visual token 006896|>
+<|visual token 006897|>
+<|visual token 006898|>
+<|visual token 006899|>
+<|visual token 006900|>
+<|visual token 006901|>
+<|visual token 006902|>
+<|visual token 006903|>
+<|visual token 006904|>
+<|visual token 006905|>
+<|visual token 006906|>
+<|visual token 006907|>
+<|visual token 006908|>
+<|visual token 006909|>
+<|visual token 006910|>
+<|visual token 006911|>
+<|visual token 006912|>
+<|visual token 006913|>
+<|visual token 006914|>
+<|visual token 006915|>
+<|visual token 006916|>
+<|visual token 006917|>
+<|visual token 006918|>
+<|visual token 006919|>
+<|visual token 006920|>
+<|visual token 006921|>
+<|visual token 006922|>
+<|visual token 006923|>
+<|visual token 006924|>
+<|visual token 006925|>
+<|visual token 006926|>
+<|visual token 006927|>
+<|visual token 006928|>
+<|visual token 006929|>
+<|visual token 006930|>
+<|visual token 006931|>
+<|visual token 006932|>
+<|visual token 006933|>
+<|visual token 006934|>
+<|visual token 006935|>
+<|visual token 006936|>
+<|visual token 006937|>
+<|visual token 006938|>
+<|visual token 006939|>
+<|visual token 006940|>
+<|visual token 006941|>
+<|visual token 006942|>
+<|visual token 006943|>
+<|visual token 006944|>
+<|visual token 006945|>
+<|visual token 006946|>
+<|visual token 006947|>
+<|visual token 006948|>
+<|visual token 006949|>
+<|visual token 006950|>
+<|visual token 006951|>
+<|visual token 006952|>
+<|visual token 006953|>
+<|visual token 006954|>
+<|visual token 006955|>
+<|visual token 006956|>
+<|visual token 006957|>
+<|visual token 006958|>
+<|visual token 006959|>
+<|visual token 006960|>
+<|visual token 006961|>
+<|visual token 006962|>
+<|visual token 006963|>
+<|visual token 006964|>
+<|visual token 006965|>
+<|visual token 006966|>
+<|visual token 006967|>
+<|visual token 006968|>
+<|visual token 006969|>
+<|visual token 006970|>
+<|visual token 006971|>
+<|visual token 006972|>
+<|visual token 006973|>
+<|visual token 006974|>
+<|visual token 006975|>
+<|visual token 006976|>
+<|visual token 006977|>
+<|visual token 006978|>
+<|visual token 006979|>
+<|visual token 006980|>
+<|visual token 006981|>
+<|visual token 006982|>
+<|visual token 006983|>
+<|visual token 006984|>
+<|visual token 006985|>
+<|visual token 006986|>
+<|visual token 006987|>
+<|visual token 006988|>
+<|visual token 006989|>
+<|visual token 006990|>
+<|visual token 006991|>
+<|visual token 006992|>
+<|visual token 006993|>
+<|visual token 006994|>
+<|visual token 006995|>
+<|visual token 006996|>
+<|visual token 006997|>
+<|visual token 006998|>
+<|visual token 006999|>
+<|visual token 007000|>
+<|visual token 007001|>
+<|visual token 007002|>
+<|visual token 007003|>
+<|visual token 007004|>
+<|visual token 007005|>
+<|visual token 007006|>
+<|visual token 007007|>
+<|visual token 007008|>
+<|visual token 007009|>
+<|visual token 007010|>
+<|visual token 007011|>
+<|visual token 007012|>
+<|visual token 007013|>
+<|visual token 007014|>
+<|visual token 007015|>
+<|visual token 007016|>
+<|visual token 007017|>
+<|visual token 007018|>
+<|visual token 007019|>
+<|visual token 007020|>
+<|visual token 007021|>
+<|visual token 007022|>
+<|visual token 007023|>
+<|visual token 007024|>
+<|visual token 007025|>
+<|visual token 007026|>
+<|visual token 007027|>
+<|visual token 007028|>
+<|visual token 007029|>
+<|visual token 007030|>
+<|visual token 007031|>
+<|visual token 007032|>
+<|visual token 007033|>
+<|visual token 007034|>
+<|visual token 007035|>
+<|visual token 007036|>
+<|visual token 007037|>
+<|visual token 007038|>
+<|visual token 007039|>
+<|visual token 007040|>
+<|visual token 007041|>
+<|visual token 007042|>
+<|visual token 007043|>
+<|visual token 007044|>
+<|visual token 007045|>
+<|visual token 007046|>
+<|visual token 007047|>
+<|visual token 007048|>
+<|visual token 007049|>
+<|visual token 007050|>
+<|visual token 007051|>
+<|visual token 007052|>
+<|visual token 007053|>
+<|visual token 007054|>
+<|visual token 007055|>
+<|visual token 007056|>
+<|visual token 007057|>
+<|visual token 007058|>
+<|visual token 007059|>
+<|visual token 007060|>
+<|visual token 007061|>
+<|visual token 007062|>
+<|visual token 007063|>
+<|visual token 007064|>
+<|visual token 007065|>
+<|visual token 007066|>
+<|visual token 007067|>
+<|visual token 007068|>
+<|visual token 007069|>
+<|visual token 007070|>
+<|visual token 007071|>
+<|visual token 007072|>
+<|visual token 007073|>
+<|visual token 007074|>
+<|visual token 007075|>
+<|visual token 007076|>
+<|visual token 007077|>
+<|visual token 007078|>
+<|visual token 007079|>
+<|visual token 007080|>
+<|visual token 007081|>
+<|visual token 007082|>
+<|visual token 007083|>
+<|visual token 007084|>
+<|visual token 007085|>
+<|visual token 007086|>
+<|visual token 007087|>
+<|visual token 007088|>
+<|visual token 007089|>
+<|visual token 007090|>
+<|visual token 007091|>
+<|visual token 007092|>
+<|visual token 007093|>
+<|visual token 007094|>
+<|visual token 007095|>
+<|visual token 007096|>
+<|visual token 007097|>
+<|visual token 007098|>
+<|visual token 007099|>
+<|visual token 007100|>
+<|visual token 007101|>
+<|visual token 007102|>
+<|visual token 007103|>
+<|visual token 007104|>
+<|visual token 007105|>
+<|visual token 007106|>
+<|visual token 007107|>
+<|visual token 007108|>
+<|visual token 007109|>
+<|visual token 007110|>
+<|visual token 007111|>
+<|visual token 007112|>
+<|visual token 007113|>
+<|visual token 007114|>
+<|visual token 007115|>
+<|visual token 007116|>
+<|visual token 007117|>
+<|visual token 007118|>
+<|visual token 007119|>
+<|visual token 007120|>
+<|visual token 007121|>
+<|visual token 007122|>
+<|visual token 007123|>
+<|visual token 007124|>
+<|visual token 007125|>
+<|visual token 007126|>
+<|visual token 007127|>
+<|visual token 007128|>
+<|visual token 007129|>
+<|visual token 007130|>
+<|visual token 007131|>
+<|visual token 007132|>
+<|visual token 007133|>
+<|visual token 007134|>
+<|visual token 007135|>
+<|visual token 007136|>
+<|visual token 007137|>
+<|visual token 007138|>
+<|visual token 007139|>
+<|visual token 007140|>
+<|visual token 007141|>
+<|visual token 007142|>
+<|visual token 007143|>
+<|visual token 007144|>
+<|visual token 007145|>
+<|visual token 007146|>
+<|visual token 007147|>
+<|visual token 007148|>
+<|visual token 007149|>
+<|visual token 007150|>
+<|visual token 007151|>
+<|visual token 007152|>
+<|visual token 007153|>
+<|visual token 007154|>
+<|visual token 007155|>
+<|visual token 007156|>
+<|visual token 007157|>
+<|visual token 007158|>
+<|visual token 007159|>
+<|visual token 007160|>
+<|visual token 007161|>
+<|visual token 007162|>
+<|visual token 007163|>
+<|visual token 007164|>
+<|visual token 007165|>
+<|visual token 007166|>
+<|visual token 007167|>
+<|visual token 007168|>
+<|visual token 007169|>
+<|visual token 007170|>
+<|visual token 007171|>
+<|visual token 007172|>
+<|visual token 007173|>
+<|visual token 007174|>
+<|visual token 007175|>
+<|visual token 007176|>
+<|visual token 007177|>
+<|visual token 007178|>
+<|visual token 007179|>
+<|visual token 007180|>
+<|visual token 007181|>
+<|visual token 007182|>
+<|visual token 007183|>
+<|visual token 007184|>
+<|visual token 007185|>
+<|visual token 007186|>
+<|visual token 007187|>
+<|visual token 007188|>
+<|visual token 007189|>
+<|visual token 007190|>
+<|visual token 007191|>
+<|visual token 007192|>
+<|visual token 007193|>
+<|visual token 007194|>
+<|visual token 007195|>
+<|visual token 007196|>
+<|visual token 007197|>
+<|visual token 007198|>
+<|visual token 007199|>
+<|visual token 007200|>
+<|visual token 007201|>
+<|visual token 007202|>
+<|visual token 007203|>
+<|visual token 007204|>
+<|visual token 007205|>
+<|visual token 007206|>
+<|visual token 007207|>
+<|visual token 007208|>
+<|visual token 007209|>
+<|visual token 007210|>
+<|visual token 007211|>
+<|visual token 007212|>
+<|visual token 007213|>
+<|visual token 007214|>
+<|visual token 007215|>
+<|visual token 007216|>
+<|visual token 007217|>
+<|visual token 007218|>
+<|visual token 007219|>
+<|visual token 007220|>
+<|visual token 007221|>
+<|visual token 007222|>
+<|visual token 007223|>
+<|visual token 007224|>
+<|visual token 007225|>
+<|visual token 007226|>
+<|visual token 007227|>
+<|visual token 007228|>
+<|visual token 007229|>
+<|visual token 007230|>
+<|visual token 007231|>
+<|visual token 007232|>
+<|visual token 007233|>
+<|visual token 007234|>
+<|visual token 007235|>
+<|visual token 007236|>
+<|visual token 007237|>
+<|visual token 007238|>
+<|visual token 007239|>
+<|visual token 007240|>
+<|visual token 007241|>
+<|visual token 007242|>
+<|visual token 007243|>
+<|visual token 007244|>
+<|visual token 007245|>
+<|visual token 007246|>
+<|visual token 007247|>
+<|visual token 007248|>
+<|visual token 007249|>
+<|visual token 007250|>
+<|visual token 007251|>
+<|visual token 007252|>
+<|visual token 007253|>
+<|visual token 007254|>
+<|visual token 007255|>
+<|visual token 007256|>
+<|visual token 007257|>
+<|visual token 007258|>
+<|visual token 007259|>
+<|visual token 007260|>
+<|visual token 007261|>
+<|visual token 007262|>
+<|visual token 007263|>
+<|visual token 007264|>
+<|visual token 007265|>
+<|visual token 007266|>
+<|visual token 007267|>
+<|visual token 007268|>
+<|visual token 007269|>
+<|visual token 007270|>
+<|visual token 007271|>
+<|visual token 007272|>
+<|visual token 007273|>
+<|visual token 007274|>
+<|visual token 007275|>
+<|visual token 007276|>
+<|visual token 007277|>
+<|visual token 007278|>
+<|visual token 007279|>
+<|visual token 007280|>
+<|visual token 007281|>
+<|visual token 007282|>
+<|visual token 007283|>
+<|visual token 007284|>
+<|visual token 007285|>
+<|visual token 007286|>
+<|visual token 007287|>
+<|visual token 007288|>
+<|visual token 007289|>
+<|visual token 007290|>
+<|visual token 007291|>
+<|visual token 007292|>
+<|visual token 007293|>
+<|visual token 007294|>
+<|visual token 007295|>
+<|visual token 007296|>
+<|visual token 007297|>
+<|visual token 007298|>
+<|visual token 007299|>
+<|visual token 007300|>
+<|visual token 007301|>
+<|visual token 007302|>
+<|visual token 007303|>
+<|visual token 007304|>
+<|visual token 007305|>
+<|visual token 007306|>
+<|visual token 007307|>
+<|visual token 007308|>
+<|visual token 007309|>
+<|visual token 007310|>
+<|visual token 007311|>
+<|visual token 007312|>
+<|visual token 007313|>
+<|visual token 007314|>
+<|visual token 007315|>
+<|visual token 007316|>
+<|visual token 007317|>
+<|visual token 007318|>
+<|visual token 007319|>
+<|visual token 007320|>
+<|visual token 007321|>
+<|visual token 007322|>
+<|visual token 007323|>
+<|visual token 007324|>
+<|visual token 007325|>
+<|visual token 007326|>
+<|visual token 007327|>
+<|visual token 007328|>
+<|visual token 007329|>
+<|visual token 007330|>
+<|visual token 007331|>
+<|visual token 007332|>
+<|visual token 007333|>
+<|visual token 007334|>
+<|visual token 007335|>
+<|visual token 007336|>
+<|visual token 007337|>
+<|visual token 007338|>
+<|visual token 007339|>
+<|visual token 007340|>
+<|visual token 007341|>
+<|visual token 007342|>
+<|visual token 007343|>
+<|visual token 007344|>
+<|visual token 007345|>
+<|visual token 007346|>
+<|visual token 007347|>
+<|visual token 007348|>
+<|visual token 007349|>
+<|visual token 007350|>
+<|visual token 007351|>
+<|visual token 007352|>
+<|visual token 007353|>
+<|visual token 007354|>
+<|visual token 007355|>
+<|visual token 007356|>
+<|visual token 007357|>
+<|visual token 007358|>
+<|visual token 007359|>
+<|visual token 007360|>
+<|visual token 007361|>
+<|visual token 007362|>
+<|visual token 007363|>
+<|visual token 007364|>
+<|visual token 007365|>
+<|visual token 007366|>
+<|visual token 007367|>
+<|visual token 007368|>
+<|visual token 007369|>
+<|visual token 007370|>
+<|visual token 007371|>
+<|visual token 007372|>
+<|visual token 007373|>
+<|visual token 007374|>
+<|visual token 007375|>
+<|visual token 007376|>
+<|visual token 007377|>
+<|visual token 007378|>
+<|visual token 007379|>
+<|visual token 007380|>
+<|visual token 007381|>
+<|visual token 007382|>
+<|visual token 007383|>
+<|visual token 007384|>
+<|visual token 007385|>
+<|visual token 007386|>
+<|visual token 007387|>
+<|visual token 007388|>
+<|visual token 007389|>
+<|visual token 007390|>
+<|visual token 007391|>
+<|visual token 007392|>
+<|visual token 007393|>
+<|visual token 007394|>
+<|visual token 007395|>
+<|visual token 007396|>
+<|visual token 007397|>
+<|visual token 007398|>
+<|visual token 007399|>
+<|visual token 007400|>
+<|visual token 007401|>
+<|visual token 007402|>
+<|visual token 007403|>
+<|visual token 007404|>
+<|visual token 007405|>
+<|visual token 007406|>
+<|visual token 007407|>
+<|visual token 007408|>
+<|visual token 007409|>
+<|visual token 007410|>
+<|visual token 007411|>
+<|visual token 007412|>
+<|visual token 007413|>
+<|visual token 007414|>
+<|visual token 007415|>
+<|visual token 007416|>
+<|visual token 007417|>
+<|visual token 007418|>
+<|visual token 007419|>
+<|visual token 007420|>
+<|visual token 007421|>
+<|visual token 007422|>
+<|visual token 007423|>
+<|visual token 007424|>
+<|visual token 007425|>
+<|visual token 007426|>
+<|visual token 007427|>
+<|visual token 007428|>
+<|visual token 007429|>
+<|visual token 007430|>
+<|visual token 007431|>
+<|visual token 007432|>
+<|visual token 007433|>
+<|visual token 007434|>
+<|visual token 007435|>
+<|visual token 007436|>
+<|visual token 007437|>
+<|visual token 007438|>
+<|visual token 007439|>
+<|visual token 007440|>
+<|visual token 007441|>
+<|visual token 007442|>
+<|visual token 007443|>
+<|visual token 007444|>
+<|visual token 007445|>
+<|visual token 007446|>
+<|visual token 007447|>
+<|visual token 007448|>
+<|visual token 007449|>
+<|visual token 007450|>
+<|visual token 007451|>
+<|visual token 007452|>
+<|visual token 007453|>
+<|visual token 007454|>
+<|visual token 007455|>
+<|visual token 007456|>
+<|visual token 007457|>
+<|visual token 007458|>
+<|visual token 007459|>
+<|visual token 007460|>
+<|visual token 007461|>
+<|visual token 007462|>
+<|visual token 007463|>
+<|visual token 007464|>
+<|visual token 007465|>
+<|visual token 007466|>
+<|visual token 007467|>
+<|visual token 007468|>
+<|visual token 007469|>
+<|visual token 007470|>
+<|visual token 007471|>
+<|visual token 007472|>
+<|visual token 007473|>
+<|visual token 007474|>
+<|visual token 007475|>
+<|visual token 007476|>
+<|visual token 007477|>
+<|visual token 007478|>
+<|visual token 007479|>
+<|visual token 007480|>
+<|visual token 007481|>
+<|visual token 007482|>
+<|visual token 007483|>
+<|visual token 007484|>
+<|visual token 007485|>
+<|visual token 007486|>
+<|visual token 007487|>
+<|visual token 007488|>
+<|visual token 007489|>
+<|visual token 007490|>
+<|visual token 007491|>
+<|visual token 007492|>
+<|visual token 007493|>
+<|visual token 007494|>
+<|visual token 007495|>
+<|visual token 007496|>
+<|visual token 007497|>
+<|visual token 007498|>
+<|visual token 007499|>
+<|visual token 007500|>
+<|visual token 007501|>
+<|visual token 007502|>
+<|visual token 007503|>
+<|visual token 007504|>
+<|visual token 007505|>
+<|visual token 007506|>
+<|visual token 007507|>
+<|visual token 007508|>
+<|visual token 007509|>
+<|visual token 007510|>
+<|visual token 007511|>
+<|visual token 007512|>
+<|visual token 007513|>
+<|visual token 007514|>
+<|visual token 007515|>
+<|visual token 007516|>
+<|visual token 007517|>
+<|visual token 007518|>
+<|visual token 007519|>
+<|visual token 007520|>
+<|visual token 007521|>
+<|visual token 007522|>
+<|visual token 007523|>
+<|visual token 007524|>
+<|visual token 007525|>
+<|visual token 007526|>
+<|visual token 007527|>
+<|visual token 007528|>
+<|visual token 007529|>
+<|visual token 007530|>
+<|visual token 007531|>
+<|visual token 007532|>
+<|visual token 007533|>
+<|visual token 007534|>
+<|visual token 007535|>
+<|visual token 007536|>
+<|visual token 007537|>
+<|visual token 007538|>
+<|visual token 007539|>
+<|visual token 007540|>
+<|visual token 007541|>
+<|visual token 007542|>
+<|visual token 007543|>
+<|visual token 007544|>
+<|visual token 007545|>
+<|visual token 007546|>
+<|visual token 007547|>
+<|visual token 007548|>
+<|visual token 007549|>
+<|visual token 007550|>
+<|visual token 007551|>
+<|visual token 007552|>
+<|visual token 007553|>
+<|visual token 007554|>
+<|visual token 007555|>
+<|visual token 007556|>
+<|visual token 007557|>
+<|visual token 007558|>
+<|visual token 007559|>
+<|visual token 007560|>
+<|visual token 007561|>
+<|visual token 007562|>
+<|visual token 007563|>
+<|visual token 007564|>
+<|visual token 007565|>
+<|visual token 007566|>
+<|visual token 007567|>
+<|visual token 007568|>
+<|visual token 007569|>
+<|visual token 007570|>
+<|visual token 007571|>
+<|visual token 007572|>
+<|visual token 007573|>
+<|visual token 007574|>
+<|visual token 007575|>
+<|visual token 007576|>
+<|visual token 007577|>
+<|visual token 007578|>
+<|visual token 007579|>
+<|visual token 007580|>
+<|visual token 007581|>
+<|visual token 007582|>
+<|visual token 007583|>
+<|visual token 007584|>
+<|visual token 007585|>
+<|visual token 007586|>
+<|visual token 007587|>
+<|visual token 007588|>
+<|visual token 007589|>
+<|visual token 007590|>
+<|visual token 007591|>
+<|visual token 007592|>
+<|visual token 007593|>
+<|visual token 007594|>
+<|visual token 007595|>
+<|visual token 007596|>
+<|visual token 007597|>
+<|visual token 007598|>
+<|visual token 007599|>
+<|visual token 007600|>
+<|visual token 007601|>
+<|visual token 007602|>
+<|visual token 007603|>
+<|visual token 007604|>
+<|visual token 007605|>
+<|visual token 007606|>
+<|visual token 007607|>
+<|visual token 007608|>
+<|visual token 007609|>
+<|visual token 007610|>
+<|visual token 007611|>
+<|visual token 007612|>
+<|visual token 007613|>
+<|visual token 007614|>
+<|visual token 007615|>
+<|visual token 007616|>
+<|visual token 007617|>
+<|visual token 007618|>
+<|visual token 007619|>
+<|visual token 007620|>
+<|visual token 007621|>
+<|visual token 007622|>
+<|visual token 007623|>
+<|visual token 007624|>
+<|visual token 007625|>
+<|visual token 007626|>
+<|visual token 007627|>
+<|visual token 007628|>
+<|visual token 007629|>
+<|visual token 007630|>
+<|visual token 007631|>
+<|visual token 007632|>
+<|visual token 007633|>
+<|visual token 007634|>
+<|visual token 007635|>
+<|visual token 007636|>
+<|visual token 007637|>
+<|visual token 007638|>
+<|visual token 007639|>
+<|visual token 007640|>
+<|visual token 007641|>
+<|visual token 007642|>
+<|visual token 007643|>
+<|visual token 007644|>
+<|visual token 007645|>
+<|visual token 007646|>
+<|visual token 007647|>
+<|visual token 007648|>
+<|visual token 007649|>
+<|visual token 007650|>
+<|visual token 007651|>
+<|visual token 007652|>
+<|visual token 007653|>
+<|visual token 007654|>
+<|visual token 007655|>
+<|visual token 007656|>
+<|visual token 007657|>
+<|visual token 007658|>
+<|visual token 007659|>
+<|visual token 007660|>
+<|visual token 007661|>
+<|visual token 007662|>
+<|visual token 007663|>
+<|visual token 007664|>
+<|visual token 007665|>
+<|visual token 007666|>
+<|visual token 007667|>
+<|visual token 007668|>
+<|visual token 007669|>
+<|visual token 007670|>
+<|visual token 007671|>
+<|visual token 007672|>
+<|visual token 007673|>
+<|visual token 007674|>
+<|visual token 007675|>
+<|visual token 007676|>
+<|visual token 007677|>
+<|visual token 007678|>
+<|visual token 007679|>
+<|visual token 007680|>
+<|visual token 007681|>
+<|visual token 007682|>
+<|visual token 007683|>
+<|visual token 007684|>
+<|visual token 007685|>
+<|visual token 007686|>
+<|visual token 007687|>
+<|visual token 007688|>
+<|visual token 007689|>
+<|visual token 007690|>
+<|visual token 007691|>
+<|visual token 007692|>
+<|visual token 007693|>
+<|visual token 007694|>
+<|visual token 007695|>
+<|visual token 007696|>
+<|visual token 007697|>
+<|visual token 007698|>
+<|visual token 007699|>
+<|visual token 007700|>
+<|visual token 007701|>
+<|visual token 007702|>
+<|visual token 007703|>
+<|visual token 007704|>
+<|visual token 007705|>
+<|visual token 007706|>
+<|visual token 007707|>
+<|visual token 007708|>
+<|visual token 007709|>
+<|visual token 007710|>
+<|visual token 007711|>
+<|visual token 007712|>
+<|visual token 007713|>
+<|visual token 007714|>
+<|visual token 007715|>
+<|visual token 007716|>
+<|visual token 007717|>
+<|visual token 007718|>
+<|visual token 007719|>
+<|visual token 007720|>
+<|visual token 007721|>
+<|visual token 007722|>
+<|visual token 007723|>
+<|visual token 007724|>
+<|visual token 007725|>
+<|visual token 007726|>
+<|visual token 007727|>
+<|visual token 007728|>
+<|visual token 007729|>
+<|visual token 007730|>
+<|visual token 007731|>
+<|visual token 007732|>
+<|visual token 007733|>
+<|visual token 007734|>
+<|visual token 007735|>
+<|visual token 007736|>
+<|visual token 007737|>
+<|visual token 007738|>
+<|visual token 007739|>
+<|visual token 007740|>
+<|visual token 007741|>
+<|visual token 007742|>
+<|visual token 007743|>
+<|visual token 007744|>
+<|visual token 007745|>
+<|visual token 007746|>
+<|visual token 007747|>
+<|visual token 007748|>
+<|visual token 007749|>
+<|visual token 007750|>
+<|visual token 007751|>
+<|visual token 007752|>
+<|visual token 007753|>
+<|visual token 007754|>
+<|visual token 007755|>
+<|visual token 007756|>
+<|visual token 007757|>
+<|visual token 007758|>
+<|visual token 007759|>
+<|visual token 007760|>
+<|visual token 007761|>
+<|visual token 007762|>
+<|visual token 007763|>
+<|visual token 007764|>
+<|visual token 007765|>
+<|visual token 007766|>
+<|visual token 007767|>
+<|visual token 007768|>
+<|visual token 007769|>
+<|visual token 007770|>
+<|visual token 007771|>
+<|visual token 007772|>
+<|visual token 007773|>
+<|visual token 007774|>
+<|visual token 007775|>
+<|visual token 007776|>
+<|visual token 007777|>
+<|visual token 007778|>
+<|visual token 007779|>
+<|visual token 007780|>
+<|visual token 007781|>
+<|visual token 007782|>
+<|visual token 007783|>
+<|visual token 007784|>
+<|visual token 007785|>
+<|visual token 007786|>
+<|visual token 007787|>
+<|visual token 007788|>
+<|visual token 007789|>
+<|visual token 007790|>
+<|visual token 007791|>
+<|visual token 007792|>
+<|visual token 007793|>
+<|visual token 007794|>
+<|visual token 007795|>
+<|visual token 007796|>
+<|visual token 007797|>
+<|visual token 007798|>
+<|visual token 007799|>
+<|visual token 007800|>
+<|visual token 007801|>
+<|visual token 007802|>
+<|visual token 007803|>
+<|visual token 007804|>
+<|visual token 007805|>
+<|visual token 007806|>
+<|visual token 007807|>
+<|visual token 007808|>
+<|visual token 007809|>
+<|visual token 007810|>
+<|visual token 007811|>
+<|visual token 007812|>
+<|visual token 007813|>
+<|visual token 007814|>
+<|visual token 007815|>
+<|visual token 007816|>
+<|visual token 007817|>
+<|visual token 007818|>
+<|visual token 007819|>
+<|visual token 007820|>
+<|visual token 007821|>
+<|visual token 007822|>
+<|visual token 007823|>
+<|visual token 007824|>
+<|visual token 007825|>
+<|visual token 007826|>
+<|visual token 007827|>
+<|visual token 007828|>
+<|visual token 007829|>
+<|visual token 007830|>
+<|visual token 007831|>
+<|visual token 007832|>
+<|visual token 007833|>
+<|visual token 007834|>
+<|visual token 007835|>
+<|visual token 007836|>
+<|visual token 007837|>
+<|visual token 007838|>
+<|visual token 007839|>
+<|visual token 007840|>
+<|visual token 007841|>
+<|visual token 007842|>
+<|visual token 007843|>
+<|visual token 007844|>
+<|visual token 007845|>
+<|visual token 007846|>
+<|visual token 007847|>
+<|visual token 007848|>
+<|visual token 007849|>
+<|visual token 007850|>
+<|visual token 007851|>
+<|visual token 007852|>
+<|visual token 007853|>
+<|visual token 007854|>
+<|visual token 007855|>
+<|visual token 007856|>
+<|visual token 007857|>
+<|visual token 007858|>
+<|visual token 007859|>
+<|visual token 007860|>
+<|visual token 007861|>
+<|visual token 007862|>
+<|visual token 007863|>
+<|visual token 007864|>
+<|visual token 007865|>
+<|visual token 007866|>
+<|visual token 007867|>
+<|visual token 007868|>
+<|visual token 007869|>
+<|visual token 007870|>
+<|visual token 007871|>
+<|visual token 007872|>
+<|visual token 007873|>
+<|visual token 007874|>
+<|visual token 007875|>
+<|visual token 007876|>
+<|visual token 007877|>
+<|visual token 007878|>
+<|visual token 007879|>
+<|visual token 007880|>
+<|visual token 007881|>
+<|visual token 007882|>
+<|visual token 007883|>
+<|visual token 007884|>
+<|visual token 007885|>
+<|visual token 007886|>
+<|visual token 007887|>
+<|visual token 007888|>
+<|visual token 007889|>
+<|visual token 007890|>
+<|visual token 007891|>
+<|visual token 007892|>
+<|visual token 007893|>
+<|visual token 007894|>
+<|visual token 007895|>
+<|visual token 007896|>
+<|visual token 007897|>
+<|visual token 007898|>
+<|visual token 007899|>
+<|visual token 007900|>
+<|visual token 007901|>
+<|visual token 007902|>
+<|visual token 007903|>
+<|visual token 007904|>
+<|visual token 007905|>
+<|visual token 007906|>
+<|visual token 007907|>
+<|visual token 007908|>
+<|visual token 007909|>
+<|visual token 007910|>
+<|visual token 007911|>
+<|visual token 007912|>
+<|visual token 007913|>
+<|visual token 007914|>
+<|visual token 007915|>
+<|visual token 007916|>
+<|visual token 007917|>
+<|visual token 007918|>
+<|visual token 007919|>
+<|visual token 007920|>
+<|visual token 007921|>
+<|visual token 007922|>
+<|visual token 007923|>
+<|visual token 007924|>
+<|visual token 007925|>
+<|visual token 007926|>
+<|visual token 007927|>
+<|visual token 007928|>
+<|visual token 007929|>
+<|visual token 007930|>
+<|visual token 007931|>
+<|visual token 007932|>
+<|visual token 007933|>
+<|visual token 007934|>
+<|visual token 007935|>
+<|visual token 007936|>
+<|visual token 007937|>
+<|visual token 007938|>
+<|visual token 007939|>
+<|visual token 007940|>
+<|visual token 007941|>
+<|visual token 007942|>
+<|visual token 007943|>
+<|visual token 007944|>
+<|visual token 007945|>
+<|visual token 007946|>
+<|visual token 007947|>
+<|visual token 007948|>
+<|visual token 007949|>
+<|visual token 007950|>
+<|visual token 007951|>
+<|visual token 007952|>
+<|visual token 007953|>
+<|visual token 007954|>
+<|visual token 007955|>
+<|visual token 007956|>
+<|visual token 007957|>
+<|visual token 007958|>
+<|visual token 007959|>
+<|visual token 007960|>
+<|visual token 007961|>
+<|visual token 007962|>
+<|visual token 007963|>
+<|visual token 007964|>
+<|visual token 007965|>
+<|visual token 007966|>
+<|visual token 007967|>
+<|visual token 007968|>
+<|visual token 007969|>
+<|visual token 007970|>
+<|visual token 007971|>
+<|visual token 007972|>
+<|visual token 007973|>
+<|visual token 007974|>
+<|visual token 007975|>
+<|visual token 007976|>
+<|visual token 007977|>
+<|visual token 007978|>
+<|visual token 007979|>
+<|visual token 007980|>
+<|visual token 007981|>
+<|visual token 007982|>
+<|visual token 007983|>
+<|visual token 007984|>
+<|visual token 007985|>
+<|visual token 007986|>
+<|visual token 007987|>
+<|visual token 007988|>
+<|visual token 007989|>
+<|visual token 007990|>
+<|visual token 007991|>
+<|visual token 007992|>
+<|visual token 007993|>
+<|visual token 007994|>
+<|visual token 007995|>
+<|visual token 007996|>
+<|visual token 007997|>
+<|visual token 007998|>
+<|visual token 007999|>
+<|visual token 008000|>
+<|visual token 008001|>
+<|visual token 008002|>
+<|visual token 008003|>
+<|visual token 008004|>
+<|visual token 008005|>
+<|visual token 008006|>
+<|visual token 008007|>
+<|visual token 008008|>
+<|visual token 008009|>
+<|visual token 008010|>
+<|visual token 008011|>
+<|visual token 008012|>
+<|visual token 008013|>
+<|visual token 008014|>
+<|visual token 008015|>
+<|visual token 008016|>
+<|visual token 008017|>
+<|visual token 008018|>
+<|visual token 008019|>
+<|visual token 008020|>
+<|visual token 008021|>
+<|visual token 008022|>
+<|visual token 008023|>
+<|visual token 008024|>
+<|visual token 008025|>
+<|visual token 008026|>
+<|visual token 008027|>
+<|visual token 008028|>
+<|visual token 008029|>
+<|visual token 008030|>
+<|visual token 008031|>
+<|visual token 008032|>
+<|visual token 008033|>
+<|visual token 008034|>
+<|visual token 008035|>
+<|visual token 008036|>
+<|visual token 008037|>
+<|visual token 008038|>
+<|visual token 008039|>
+<|visual token 008040|>
+<|visual token 008041|>
+<|visual token 008042|>
+<|visual token 008043|>
+<|visual token 008044|>
+<|visual token 008045|>
+<|visual token 008046|>
+<|visual token 008047|>
+<|visual token 008048|>
+<|visual token 008049|>
+<|visual token 008050|>
+<|visual token 008051|>
+<|visual token 008052|>
+<|visual token 008053|>
+<|visual token 008054|>
+<|visual token 008055|>
+<|visual token 008056|>
+<|visual token 008057|>
+<|visual token 008058|>
+<|visual token 008059|>
+<|visual token 008060|>
+<|visual token 008061|>
+<|visual token 008062|>
+<|visual token 008063|>
+<|visual token 008064|>
+<|visual token 008065|>
+<|visual token 008066|>
+<|visual token 008067|>
+<|visual token 008068|>
+<|visual token 008069|>
+<|visual token 008070|>
+<|visual token 008071|>
+<|visual token 008072|>
+<|visual token 008073|>
+<|visual token 008074|>
+<|visual token 008075|>
+<|visual token 008076|>
+<|visual token 008077|>
+<|visual token 008078|>
+<|visual token 008079|>
+<|visual token 008080|>
+<|visual token 008081|>
+<|visual token 008082|>
+<|visual token 008083|>
+<|visual token 008084|>
+<|visual token 008085|>
+<|visual token 008086|>
+<|visual token 008087|>
+<|visual token 008088|>
+<|visual token 008089|>
+<|visual token 008090|>
+<|visual token 008091|>
+<|visual token 008092|>
+<|visual token 008093|>
+<|visual token 008094|>
+<|visual token 008095|>
+<|visual token 008096|>
+<|visual token 008097|>
+<|visual token 008098|>
+<|visual token 008099|>
+<|visual token 008100|>
+<|visual token 008101|>
+<|visual token 008102|>
+<|visual token 008103|>
+<|visual token 008104|>
+<|visual token 008105|>
+<|visual token 008106|>
+<|visual token 008107|>
+<|visual token 008108|>
+<|visual token 008109|>
+<|visual token 008110|>
+<|visual token 008111|>
+<|visual token 008112|>
+<|visual token 008113|>
+<|visual token 008114|>
+<|visual token 008115|>
+<|visual token 008116|>
+<|visual token 008117|>
+<|visual token 008118|>
+<|visual token 008119|>
+<|visual token 008120|>
+<|visual token 008121|>
+<|visual token 008122|>
+<|visual token 008123|>
+<|visual token 008124|>
+<|visual token 008125|>
+<|visual token 008126|>
+<|visual token 008127|>
+<|visual token 008128|>
+<|visual token 008129|>
+<|visual token 008130|>
+<|visual token 008131|>
+<|visual token 008132|>
+<|visual token 008133|>
+<|visual token 008134|>
+<|visual token 008135|>
+<|visual token 008136|>
+<|visual token 008137|>
+<|visual token 008138|>
+<|visual token 008139|>
+<|visual token 008140|>
+<|visual token 008141|>
+<|visual token 008142|>
+<|visual token 008143|>
+<|visual token 008144|>
+<|visual token 008145|>
+<|visual token 008146|>
+<|visual token 008147|>
+<|visual token 008148|>
+<|visual token 008149|>
+<|visual token 008150|>
+<|visual token 008151|>
+<|visual token 008152|>
+<|visual token 008153|>
+<|visual token 008154|>
+<|visual token 008155|>
+<|visual token 008156|>
+<|visual token 008157|>
+<|visual token 008158|>
+<|visual token 008159|>
+<|visual token 008160|>
+<|visual token 008161|>
+<|visual token 008162|>
+<|visual token 008163|>
+<|visual token 008164|>
+<|visual token 008165|>
+<|visual token 008166|>
+<|visual token 008167|>
+<|visual token 008168|>
+<|visual token 008169|>
+<|visual token 008170|>
+<|visual token 008171|>
+<|visual token 008172|>
+<|visual token 008173|>
+<|visual token 008174|>
+<|visual token 008175|>
+<|visual token 008176|>
+<|visual token 008177|>
+<|visual token 008178|>
+<|visual token 008179|>
+<|visual token 008180|>
+<|visual token 008181|>
+<|visual token 008182|>
+<|visual token 008183|>
+<|visual token 008184|>
+<|visual token 008185|>
+<|visual token 008186|>
+<|visual token 008187|>
+<|visual token 008188|>
+<|visual token 008189|>
+<|visual token 008190|>
+<|visual token 008191|>
+<|visual token 008192|>
+<|visual token 008193|>
+<|visual token 008194|>
+<|visual token 008195|>
+<|visual token 008196|>
+<|visual token 008197|>
+<|visual token 008198|>
+<|visual token 008199|>
+<|visual token 008200|>
+<|visual token 008201|>
+<|visual token 008202|>
+<|visual token 008203|>
+<|visual token 008204|>
+<|visual token 008205|>
+<|visual token 008206|>
+<|visual token 008207|>
+<|visual token 008208|>
+<|visual token 008209|>
+<|visual token 008210|>
+<|visual token 008211|>
+<|visual token 008212|>
+<|visual token 008213|>
+<|visual token 008214|>
+<|visual token 008215|>
+<|visual token 008216|>
+<|visual token 008217|>
+<|visual token 008218|>
+<|visual token 008219|>
+<|visual token 008220|>
+<|visual token 008221|>
+<|visual token 008222|>
+<|visual token 008223|>
+<|visual token 008224|>
+<|visual token 008225|>
+<|visual token 008226|>
+<|visual token 008227|>
+<|visual token 008228|>
+<|visual token 008229|>
+<|visual token 008230|>
+<|visual token 008231|>
+<|visual token 008232|>
+<|visual token 008233|>
+<|visual token 008234|>
+<|visual token 008235|>
+<|visual token 008236|>
+<|visual token 008237|>
+<|visual token 008238|>
+<|visual token 008239|>
+<|visual token 008240|>
+<|visual token 008241|>
+<|visual token 008242|>
+<|visual token 008243|>
+<|visual token 008244|>
+<|visual token 008245|>
+<|visual token 008246|>
+<|visual token 008247|>
+<|visual token 008248|>
+<|visual token 008249|>
+<|visual token 008250|>
+<|visual token 008251|>
+<|visual token 008252|>
+<|visual token 008253|>
+<|visual token 008254|>
+<|visual token 008255|>
+<|visual token 008256|>
+<|visual token 008257|>
+<|visual token 008258|>
+<|visual token 008259|>
+<|visual token 008260|>
+<|visual token 008261|>
+<|visual token 008262|>
+<|visual token 008263|>
+<|visual token 008264|>
+<|visual token 008265|>
+<|visual token 008266|>
+<|visual token 008267|>
+<|visual token 008268|>
+<|visual token 008269|>
+<|visual token 008270|>
+<|visual token 008271|>
+<|visual token 008272|>
+<|visual token 008273|>
+<|visual token 008274|>
+<|visual token 008275|>
+<|visual token 008276|>
+<|visual token 008277|>
+<|visual token 008278|>
+<|visual token 008279|>
+<|visual token 008280|>
+<|visual token 008281|>
+<|visual token 008282|>
+<|visual token 008283|>
+<|visual token 008284|>
+<|visual token 008285|>
+<|visual token 008286|>
+<|visual token 008287|>
+<|visual token 008288|>
+<|visual token 008289|>
+<|visual token 008290|>
+<|visual token 008291|>
+<|visual token 008292|>
+<|visual token 008293|>
+<|visual token 008294|>
+<|visual token 008295|>
+<|visual token 008296|>
+<|visual token 008297|>
+<|visual token 008298|>
+<|visual token 008299|>
+<|visual token 008300|>
+<|visual token 008301|>
+<|visual token 008302|>
+<|visual token 008303|>
+<|visual token 008304|>
+<|visual token 008305|>
+<|visual token 008306|>
+<|visual token 008307|>
+<|visual token 008308|>
+<|visual token 008309|>
+<|visual token 008310|>
+<|visual token 008311|>
+<|visual token 008312|>
+<|visual token 008313|>
+<|visual token 008314|>
+<|visual token 008315|>
+<|visual token 008316|>
+<|visual token 008317|>
+<|visual token 008318|>
+<|visual token 008319|>
+<|visual token 008320|>
+<|visual token 008321|>
+<|visual token 008322|>
+<|visual token 008323|>
+<|visual token 008324|>
+<|visual token 008325|>
+<|visual token 008326|>
+<|visual token 008327|>
+<|visual token 008328|>
+<|visual token 008329|>
+<|visual token 008330|>
+<|visual token 008331|>
+<|visual token 008332|>
+<|visual token 008333|>
+<|visual token 008334|>
+<|visual token 008335|>
+<|visual token 008336|>
+<|visual token 008337|>
+<|visual token 008338|>
+<|visual token 008339|>
+<|visual token 008340|>
+<|visual token 008341|>
+<|visual token 008342|>
+<|visual token 008343|>
+<|visual token 008344|>
+<|visual token 008345|>
+<|visual token 008346|>
+<|visual token 008347|>
+<|visual token 008348|>
+<|visual token 008349|>
+<|visual token 008350|>
+<|visual token 008351|>
+<|visual token 008352|>
+<|visual token 008353|>
+<|visual token 008354|>
+<|visual token 008355|>
+<|visual token 008356|>
+<|visual token 008357|>
+<|visual token 008358|>
+<|visual token 008359|>
+<|visual token 008360|>
+<|visual token 008361|>
+<|visual token 008362|>
+<|visual token 008363|>
+<|visual token 008364|>
+<|visual token 008365|>
+<|visual token 008366|>
+<|visual token 008367|>
+<|visual token 008368|>
+<|visual token 008369|>
+<|visual token 008370|>
+<|visual token 008371|>
+<|visual token 008372|>
+<|visual token 008373|>
+<|visual token 008374|>
+<|visual token 008375|>
+<|visual token 008376|>
+<|visual token 008377|>
+<|visual token 008378|>
+<|visual token 008379|>
+<|visual token 008380|>
+<|visual token 008381|>
+<|visual token 008382|>
+<|visual token 008383|>
+<|visual token 008384|>
+<|visual token 008385|>
+<|visual token 008386|>
+<|visual token 008387|>
+<|visual token 008388|>
+<|visual token 008389|>
+<|visual token 008390|>
+<|visual token 008391|>
+<|visual token 008392|>
+<|visual token 008393|>
+<|visual token 008394|>
+<|visual token 008395|>
+<|visual token 008396|>
+<|visual token 008397|>
+<|visual token 008398|>
+<|visual token 008399|>
+<|visual token 008400|>
+<|visual token 008401|>
+<|visual token 008402|>
+<|visual token 008403|>
+<|visual token 008404|>
+<|visual token 008405|>
+<|visual token 008406|>
+<|visual token 008407|>
+<|visual token 008408|>
+<|visual token 008409|>
+<|visual token 008410|>
+<|visual token 008411|>
+<|visual token 008412|>
+<|visual token 008413|>
+<|visual token 008414|>
+<|visual token 008415|>
+<|visual token 008416|>
+<|visual token 008417|>
+<|visual token 008418|>
+<|visual token 008419|>
+<|visual token 008420|>
+<|visual token 008421|>
+<|visual token 008422|>
+<|visual token 008423|>
+<|visual token 008424|>
+<|visual token 008425|>
+<|visual token 008426|>
+<|visual token 008427|>
+<|visual token 008428|>
+<|visual token 008429|>
+<|visual token 008430|>
+<|visual token 008431|>
+<|visual token 008432|>
+<|visual token 008433|>
+<|visual token 008434|>
+<|visual token 008435|>
+<|visual token 008436|>
+<|visual token 008437|>
+<|visual token 008438|>
+<|visual token 008439|>
+<|visual token 008440|>
+<|visual token 008441|>
+<|visual token 008442|>
+<|visual token 008443|>
+<|visual token 008444|>
+<|visual token 008445|>
+<|visual token 008446|>
+<|visual token 008447|>
+<|visual token 008448|>
+<|visual token 008449|>
+<|visual token 008450|>
+<|visual token 008451|>
+<|visual token 008452|>
+<|visual token 008453|>
+<|visual token 008454|>
+<|visual token 008455|>
+<|visual token 008456|>
+<|visual token 008457|>
+<|visual token 008458|>
+<|visual token 008459|>
+<|visual token 008460|>
+<|visual token 008461|>
+<|visual token 008462|>
+<|visual token 008463|>
+<|visual token 008464|>
+<|visual token 008465|>
+<|visual token 008466|>
+<|visual token 008467|>
+<|visual token 008468|>
+<|visual token 008469|>
+<|visual token 008470|>
+<|visual token 008471|>
+<|visual token 008472|>
+<|visual token 008473|>
+<|visual token 008474|>
+<|visual token 008475|>
+<|visual token 008476|>
+<|visual token 008477|>
+<|visual token 008478|>
+<|visual token 008479|>
+<|visual token 008480|>
+<|visual token 008481|>
+<|visual token 008482|>
+<|visual token 008483|>
+<|visual token 008484|>
+<|visual token 008485|>
+<|visual token 008486|>
+<|visual token 008487|>
+<|visual token 008488|>
+<|visual token 008489|>
+<|visual token 008490|>
+<|visual token 008491|>
+<|visual token 008492|>
+<|visual token 008493|>
+<|visual token 008494|>
+<|visual token 008495|>
+<|visual token 008496|>
+<|visual token 008497|>
+<|visual token 008498|>
+<|visual token 008499|>
+<|visual token 008500|>
+<|visual token 008501|>
+<|visual token 008502|>
+<|visual token 008503|>
+<|visual token 008504|>
+<|visual token 008505|>
+<|visual token 008506|>
+<|visual token 008507|>
+<|visual token 008508|>
+<|visual token 008509|>
+<|visual token 008510|>
+<|visual token 008511|>
+<|visual token 008512|>
+<|visual token 008513|>
+<|visual token 008514|>
+<|visual token 008515|>
+<|visual token 008516|>
+<|visual token 008517|>
+<|visual token 008518|>
+<|visual token 008519|>
+<|visual token 008520|>
+<|visual token 008521|>
+<|visual token 008522|>
+<|visual token 008523|>
+<|visual token 008524|>
+<|visual token 008525|>
+<|visual token 008526|>
+<|visual token 008527|>
+<|visual token 008528|>
+<|visual token 008529|>
+<|visual token 008530|>
+<|visual token 008531|>
+<|visual token 008532|>
+<|visual token 008533|>
+<|visual token 008534|>
+<|visual token 008535|>
+<|visual token 008536|>
+<|visual token 008537|>
+<|visual token 008538|>
+<|visual token 008539|>
+<|visual token 008540|>
+<|visual token 008541|>
+<|visual token 008542|>
+<|visual token 008543|>
+<|visual token 008544|>
+<|visual token 008545|>
+<|visual token 008546|>
+<|visual token 008547|>
+<|visual token 008548|>
+<|visual token 008549|>
+<|visual token 008550|>
+<|visual token 008551|>
+<|visual token 008552|>
+<|visual token 008553|>
+<|visual token 008554|>
+<|visual token 008555|>
+<|visual token 008556|>
+<|visual token 008557|>
+<|visual token 008558|>
+<|visual token 008559|>
+<|visual token 008560|>
+<|visual token 008561|>
+<|visual token 008562|>
+<|visual token 008563|>
+<|visual token 008564|>
+<|visual token 008565|>
+<|visual token 008566|>
+<|visual token 008567|>
+<|visual token 008568|>
+<|visual token 008569|>
+<|visual token 008570|>
+<|visual token 008571|>
+<|visual token 008572|>
+<|visual token 008573|>
+<|visual token 008574|>
+<|visual token 008575|>
+<|visual token 008576|>
+<|visual token 008577|>
+<|visual token 008578|>
+<|visual token 008579|>
+<|visual token 008580|>
+<|visual token 008581|>
+<|visual token 008582|>
+<|visual token 008583|>
+<|visual token 008584|>
+<|visual token 008585|>
+<|visual token 008586|>
+<|visual token 008587|>
+<|visual token 008588|>
+<|visual token 008589|>
+<|visual token 008590|>
+<|visual token 008591|>
+<|visual token 008592|>
+<|visual token 008593|>
+<|visual token 008594|>
+<|visual token 008595|>
+<|visual token 008596|>
+<|visual token 008597|>
+<|visual token 008598|>
+<|visual token 008599|>
+<|visual token 008600|>
+<|visual token 008601|>
+<|visual token 008602|>
+<|visual token 008603|>
+<|visual token 008604|>
+<|visual token 008605|>
+<|visual token 008606|>
+<|visual token 008607|>
+<|visual token 008608|>
+<|visual token 008609|>
+<|visual token 008610|>
+<|visual token 008611|>
+<|visual token 008612|>
+<|visual token 008613|>
+<|visual token 008614|>
+<|visual token 008615|>
+<|visual token 008616|>
+<|visual token 008617|>
+<|visual token 008618|>
+<|visual token 008619|>
+<|visual token 008620|>
+<|visual token 008621|>
+<|visual token 008622|>
+<|visual token 008623|>
+<|visual token 008624|>
+<|visual token 008625|>
+<|visual token 008626|>
+<|visual token 008627|>
+<|visual token 008628|>
+<|visual token 008629|>
+<|visual token 008630|>
+<|visual token 008631|>
+<|visual token 008632|>
+<|visual token 008633|>
+<|visual token 008634|>
+<|visual token 008635|>
+<|visual token 008636|>
+<|visual token 008637|>
+<|visual token 008638|>
+<|visual token 008639|>
+<|visual token 008640|>
+<|visual token 008641|>
+<|visual token 008642|>
+<|visual token 008643|>
+<|visual token 008644|>
+<|visual token 008645|>
+<|visual token 008646|>
+<|visual token 008647|>
+<|visual token 008648|>
+<|visual token 008649|>
+<|visual token 008650|>
+<|visual token 008651|>
+<|visual token 008652|>
+<|visual token 008653|>
+<|visual token 008654|>
+<|visual token 008655|>
+<|visual token 008656|>
+<|visual token 008657|>
+<|visual token 008658|>
+<|visual token 008659|>
+<|visual token 008660|>
+<|visual token 008661|>
+<|visual token 008662|>
+<|visual token 008663|>
+<|visual token 008664|>
+<|visual token 008665|>
+<|visual token 008666|>
+<|visual token 008667|>
+<|visual token 008668|>
+<|visual token 008669|>
+<|visual token 008670|>
+<|visual token 008671|>
+<|visual token 008672|>
+<|visual token 008673|>
+<|visual token 008674|>
+<|visual token 008675|>
+<|visual token 008676|>
+<|visual token 008677|>
+<|visual token 008678|>
+<|visual token 008679|>
+<|visual token 008680|>
+<|visual token 008681|>
+<|visual token 008682|>
+<|visual token 008683|>
+<|visual token 008684|>
+<|visual token 008685|>
+<|visual token 008686|>
+<|visual token 008687|>
+<|visual token 008688|>
+<|visual token 008689|>
+<|visual token 008690|>
+<|visual token 008691|>
+<|visual token 008692|>
+<|visual token 008693|>
+<|visual token 008694|>
+<|visual token 008695|>
+<|visual token 008696|>
+<|visual token 008697|>
+<|visual token 008698|>
+<|visual token 008699|>
+<|visual token 008700|>
+<|visual token 008701|>
+<|visual token 008702|>
+<|visual token 008703|>
+<|visual token 008704|>
+<|visual token 008705|>
+<|visual token 008706|>
+<|visual token 008707|>
+<|visual token 008708|>
+<|visual token 008709|>
+<|visual token 008710|>
+<|visual token 008711|>
+<|visual token 008712|>
+<|visual token 008713|>
+<|visual token 008714|>
+<|visual token 008715|>
+<|visual token 008716|>
+<|visual token 008717|>
+<|visual token 008718|>
+<|visual token 008719|>
+<|visual token 008720|>
+<|visual token 008721|>
+<|visual token 008722|>
+<|visual token 008723|>
+<|visual token 008724|>
+<|visual token 008725|>
+<|visual token 008726|>
+<|visual token 008727|>
+<|visual token 008728|>
+<|visual token 008729|>
+<|visual token 008730|>
+<|visual token 008731|>
+<|visual token 008732|>
+<|visual token 008733|>
+<|visual token 008734|>
+<|visual token 008735|>
+<|visual token 008736|>
+<|visual token 008737|>
+<|visual token 008738|>
+<|visual token 008739|>
+<|visual token 008740|>
+<|visual token 008741|>
+<|visual token 008742|>
+<|visual token 008743|>
+<|visual token 008744|>
+<|visual token 008745|>
+<|visual token 008746|>
+<|visual token 008747|>
+<|visual token 008748|>
+<|visual token 008749|>
+<|visual token 008750|>
+<|visual token 008751|>
+<|visual token 008752|>
+<|visual token 008753|>
+<|visual token 008754|>
+<|visual token 008755|>
+<|visual token 008756|>
+<|visual token 008757|>
+<|visual token 008758|>
+<|visual token 008759|>
+<|visual token 008760|>
+<|visual token 008761|>
+<|visual token 008762|>
+<|visual token 008763|>
+<|visual token 008764|>
+<|visual token 008765|>
+<|visual token 008766|>
+<|visual token 008767|>
+<|visual token 008768|>
+<|visual token 008769|>
+<|visual token 008770|>
+<|visual token 008771|>
+<|visual token 008772|>
+<|visual token 008773|>
+<|visual token 008774|>
+<|visual token 008775|>
+<|visual token 008776|>
+<|visual token 008777|>
+<|visual token 008778|>
+<|visual token 008779|>
+<|visual token 008780|>
+<|visual token 008781|>
+<|visual token 008782|>
+<|visual token 008783|>
+<|visual token 008784|>
+<|visual token 008785|>
+<|visual token 008786|>
+<|visual token 008787|>
+<|visual token 008788|>
+<|visual token 008789|>
+<|visual token 008790|>
+<|visual token 008791|>
+<|visual token 008792|>
+<|visual token 008793|>
+<|visual token 008794|>
+<|visual token 008795|>
+<|visual token 008796|>
+<|visual token 008797|>
+<|visual token 008798|>
+<|visual token 008799|>
+<|visual token 008800|>
+<|visual token 008801|>
+<|visual token 008802|>
+<|visual token 008803|>
+<|visual token 008804|>
+<|visual token 008805|>
+<|visual token 008806|>
+<|visual token 008807|>
+<|visual token 008808|>
+<|visual token 008809|>
+<|visual token 008810|>
+<|visual token 008811|>
+<|visual token 008812|>
+<|visual token 008813|>
+<|visual token 008814|>
+<|visual token 008815|>
+<|visual token 008816|>
+<|visual token 008817|>
+<|visual token 008818|>
+<|visual token 008819|>
+<|visual token 008820|>
+<|visual token 008821|>
+<|visual token 008822|>
+<|visual token 008823|>
+<|visual token 008824|>
+<|visual token 008825|>
+<|visual token 008826|>
+<|visual token 008827|>
+<|visual token 008828|>
+<|visual token 008829|>
+<|visual token 008830|>
+<|visual token 008831|>
+<|visual token 008832|>
+<|visual token 008833|>
+<|visual token 008834|>
+<|visual token 008835|>
+<|visual token 008836|>
+<|visual token 008837|>
+<|visual token 008838|>
+<|visual token 008839|>
+<|visual token 008840|>
+<|visual token 008841|>
+<|visual token 008842|>
+<|visual token 008843|>
+<|visual token 008844|>
+<|visual token 008845|>
+<|visual token 008846|>
+<|visual token 008847|>
+<|visual token 008848|>
+<|visual token 008849|>
+<|visual token 008850|>
+<|visual token 008851|>
+<|visual token 008852|>
+<|visual token 008853|>
+<|visual token 008854|>
+<|visual token 008855|>
+<|visual token 008856|>
+<|visual token 008857|>
+<|visual token 008858|>
+<|visual token 008859|>
+<|visual token 008860|>
+<|visual token 008861|>
+<|visual token 008862|>
+<|visual token 008863|>
+<|visual token 008864|>
+<|visual token 008865|>
+<|visual token 008866|>
+<|visual token 008867|>
+<|visual token 008868|>
+<|visual token 008869|>
+<|visual token 008870|>
+<|visual token 008871|>
+<|visual token 008872|>
+<|visual token 008873|>
+<|visual token 008874|>
+<|visual token 008875|>
+<|visual token 008876|>
+<|visual token 008877|>
+<|visual token 008878|>
+<|visual token 008879|>
+<|visual token 008880|>
+<|visual token 008881|>
+<|visual token 008882|>
+<|visual token 008883|>
+<|visual token 008884|>
+<|visual token 008885|>
+<|visual token 008886|>
+<|visual token 008887|>
+<|visual token 008888|>
+<|visual token 008889|>
+<|visual token 008890|>
+<|visual token 008891|>
+<|visual token 008892|>
+<|visual token 008893|>
+<|visual token 008894|>
+<|visual token 008895|>
+<|visual token 008896|>
+<|visual token 008897|>
+<|visual token 008898|>
+<|visual token 008899|>
+<|visual token 008900|>
+<|visual token 008901|>
+<|visual token 008902|>
+<|visual token 008903|>
+<|visual token 008904|>
+<|visual token 008905|>
+<|visual token 008906|>
+<|visual token 008907|>
+<|visual token 008908|>
+<|visual token 008909|>
+<|visual token 008910|>
+<|visual token 008911|>
+<|visual token 008912|>
+<|visual token 008913|>
+<|visual token 008914|>
+<|visual token 008915|>
+<|visual token 008916|>
+<|visual token 008917|>
+<|visual token 008918|>
+<|visual token 008919|>
+<|visual token 008920|>
+<|visual token 008921|>
+<|visual token 008922|>
+<|visual token 008923|>
+<|visual token 008924|>
+<|visual token 008925|>
+<|visual token 008926|>
+<|visual token 008927|>
+<|visual token 008928|>
+<|visual token 008929|>
+<|visual token 008930|>
+<|visual token 008931|>
+<|visual token 008932|>
+<|visual token 008933|>
+<|visual token 008934|>
+<|visual token 008935|>
+<|visual token 008936|>
+<|visual token 008937|>
+<|visual token 008938|>
+<|visual token 008939|>
+<|visual token 008940|>
+<|visual token 008941|>
+<|visual token 008942|>
+<|visual token 008943|>
+<|visual token 008944|>
+<|visual token 008945|>
+<|visual token 008946|>
+<|visual token 008947|>
+<|visual token 008948|>
+<|visual token 008949|>
+<|visual token 008950|>
+<|visual token 008951|>
+<|visual token 008952|>
+<|visual token 008953|>
+<|visual token 008954|>
+<|visual token 008955|>
+<|visual token 008956|>
+<|visual token 008957|>
+<|visual token 008958|>
+<|visual token 008959|>
+<|visual token 008960|>
+<|visual token 008961|>
+<|visual token 008962|>
+<|visual token 008963|>
+<|visual token 008964|>
+<|visual token 008965|>
+<|visual token 008966|>
+<|visual token 008967|>
+<|visual token 008968|>
+<|visual token 008969|>
+<|visual token 008970|>
+<|visual token 008971|>
+<|visual token 008972|>
+<|visual token 008973|>
+<|visual token 008974|>
+<|visual token 008975|>
+<|visual token 008976|>
+<|visual token 008977|>
+<|visual token 008978|>
+<|visual token 008979|>
+<|visual token 008980|>
+<|visual token 008981|>
+<|visual token 008982|>
+<|visual token 008983|>
+<|visual token 008984|>
+<|visual token 008985|>
+<|visual token 008986|>
+<|visual token 008987|>
+<|visual token 008988|>
+<|visual token 008989|>
+<|visual token 008990|>
+<|visual token 008991|>
+<|visual token 008992|>
+<|visual token 008993|>
+<|visual token 008994|>
+<|visual token 008995|>
+<|visual token 008996|>
+<|visual token 008997|>
+<|visual token 008998|>
+<|visual token 008999|>
+<|visual token 009000|>
+<|visual token 009001|>
+<|visual token 009002|>
+<|visual token 009003|>
+<|visual token 009004|>
+<|visual token 009005|>
+<|visual token 009006|>
+<|visual token 009007|>
+<|visual token 009008|>
+<|visual token 009009|>
+<|visual token 009010|>
+<|visual token 009011|>
+<|visual token 009012|>
+<|visual token 009013|>
+<|visual token 009014|>
+<|visual token 009015|>
+<|visual token 009016|>
+<|visual token 009017|>
+<|visual token 009018|>
+<|visual token 009019|>
+<|visual token 009020|>
+<|visual token 009021|>
+<|visual token 009022|>
+<|visual token 009023|>
+<|visual token 009024|>
+<|visual token 009025|>
+<|visual token 009026|>
+<|visual token 009027|>
+<|visual token 009028|>
+<|visual token 009029|>
+<|visual token 009030|>
+<|visual token 009031|>
+<|visual token 009032|>
+<|visual token 009033|>
+<|visual token 009034|>
+<|visual token 009035|>
+<|visual token 009036|>
+<|visual token 009037|>
+<|visual token 009038|>
+<|visual token 009039|>
+<|visual token 009040|>
+<|visual token 009041|>
+<|visual token 009042|>
+<|visual token 009043|>
+<|visual token 009044|>
+<|visual token 009045|>
+<|visual token 009046|>
+<|visual token 009047|>
+<|visual token 009048|>
+<|visual token 009049|>
+<|visual token 009050|>
+<|visual token 009051|>
+<|visual token 009052|>
+<|visual token 009053|>
+<|visual token 009054|>
+<|visual token 009055|>
+<|visual token 009056|>
+<|visual token 009057|>
+<|visual token 009058|>
+<|visual token 009059|>
+<|visual token 009060|>
+<|visual token 009061|>
+<|visual token 009062|>
+<|visual token 009063|>
+<|visual token 009064|>
+<|visual token 009065|>
+<|visual token 009066|>
+<|visual token 009067|>
+<|visual token 009068|>
+<|visual token 009069|>
+<|visual token 009070|>
+<|visual token 009071|>
+<|visual token 009072|>
+<|visual token 009073|>
+<|visual token 009074|>
+<|visual token 009075|>
+<|visual token 009076|>
+<|visual token 009077|>
+<|visual token 009078|>
+<|visual token 009079|>
+<|visual token 009080|>
+<|visual token 009081|>
+<|visual token 009082|>
+<|visual token 009083|>
+<|visual token 009084|>
+<|visual token 009085|>
+<|visual token 009086|>
+<|visual token 009087|>
+<|visual token 009088|>
+<|visual token 009089|>
+<|visual token 009090|>
+<|visual token 009091|>
+<|visual token 009092|>
+<|visual token 009093|>
+<|visual token 009094|>
+<|visual token 009095|>
+<|visual token 009096|>
+<|visual token 009097|>
+<|visual token 009098|>
+<|visual token 009099|>
+<|visual token 009100|>
+<|visual token 009101|>
+<|visual token 009102|>
+<|visual token 009103|>
+<|visual token 009104|>
+<|visual token 009105|>
+<|visual token 009106|>
+<|visual token 009107|>
+<|visual token 009108|>
+<|visual token 009109|>
+<|visual token 009110|>
+<|visual token 009111|>
+<|visual token 009112|>
+<|visual token 009113|>
+<|visual token 009114|>
+<|visual token 009115|>
+<|visual token 009116|>
+<|visual token 009117|>
+<|visual token 009118|>
+<|visual token 009119|>
+<|visual token 009120|>
+<|visual token 009121|>
+<|visual token 009122|>
+<|visual token 009123|>
+<|visual token 009124|>
+<|visual token 009125|>
+<|visual token 009126|>
+<|visual token 009127|>
+<|visual token 009128|>
+<|visual token 009129|>
+<|visual token 009130|>
+<|visual token 009131|>
+<|visual token 009132|>
+<|visual token 009133|>
+<|visual token 009134|>
+<|visual token 009135|>
+<|visual token 009136|>
+<|visual token 009137|>
+<|visual token 009138|>
+<|visual token 009139|>
+<|visual token 009140|>
+<|visual token 009141|>
+<|visual token 009142|>
+<|visual token 009143|>
+<|visual token 009144|>
+<|visual token 009145|>
+<|visual token 009146|>
+<|visual token 009147|>
+<|visual token 009148|>
+<|visual token 009149|>
+<|visual token 009150|>
+<|visual token 009151|>
+<|visual token 009152|>
+<|visual token 009153|>
+<|visual token 009154|>
+<|visual token 009155|>
+<|visual token 009156|>
+<|visual token 009157|>
+<|visual token 009158|>
+<|visual token 009159|>
+<|visual token 009160|>
+<|visual token 009161|>
+<|visual token 009162|>
+<|visual token 009163|>
+<|visual token 009164|>
+<|visual token 009165|>
+<|visual token 009166|>
+<|visual token 009167|>
+<|visual token 009168|>
+<|visual token 009169|>
+<|visual token 009170|>
+<|visual token 009171|>
+<|visual token 009172|>
+<|visual token 009173|>
+<|visual token 009174|>
+<|visual token 009175|>
+<|visual token 009176|>
+<|visual token 009177|>
+<|visual token 009178|>
+<|visual token 009179|>
+<|visual token 009180|>
+<|visual token 009181|>
+<|visual token 009182|>
+<|visual token 009183|>
+<|visual token 009184|>
+<|visual token 009185|>
+<|visual token 009186|>
+<|visual token 009187|>
+<|visual token 009188|>
+<|visual token 009189|>
+<|visual token 009190|>
+<|visual token 009191|>
+<|visual token 009192|>
+<|visual token 009193|>
+<|visual token 009194|>
+<|visual token 009195|>
+<|visual token 009196|>
+<|visual token 009197|>
+<|visual token 009198|>
+<|visual token 009199|>
+<|visual token 009200|>
+<|visual token 009201|>
+<|visual token 009202|>
+<|visual token 009203|>
+<|visual token 009204|>
+<|visual token 009205|>
+<|visual token 009206|>
+<|visual token 009207|>
+<|visual token 009208|>
+<|visual token 009209|>
+<|visual token 009210|>
+<|visual token 009211|>
+<|visual token 009212|>
+<|visual token 009213|>
+<|visual token 009214|>
+<|visual token 009215|>
+<|visual token 009216|>
+<|visual token 009217|>
+<|visual token 009218|>
+<|visual token 009219|>
+<|visual token 009220|>
+<|visual token 009221|>
+<|visual token 009222|>
+<|visual token 009223|>
+<|visual token 009224|>
+<|visual token 009225|>
+<|visual token 009226|>
+<|visual token 009227|>
+<|visual token 009228|>
+<|visual token 009229|>
+<|visual token 009230|>
+<|visual token 009231|>
+<|visual token 009232|>
+<|visual token 009233|>
+<|visual token 009234|>
+<|visual token 009235|>
+<|visual token 009236|>
+<|visual token 009237|>
+<|visual token 009238|>
+<|visual token 009239|>
+<|visual token 009240|>
+<|visual token 009241|>
+<|visual token 009242|>
+<|visual token 009243|>
+<|visual token 009244|>
+<|visual token 009245|>
+<|visual token 009246|>
+<|visual token 009247|>
+<|visual token 009248|>
+<|visual token 009249|>
+<|visual token 009250|>
+<|visual token 009251|>
+<|visual token 009252|>
+<|visual token 009253|>
+<|visual token 009254|>
+<|visual token 009255|>
+<|visual token 009256|>
+<|visual token 009257|>
+<|visual token 009258|>
+<|visual token 009259|>
+<|visual token 009260|>
+<|visual token 009261|>
+<|visual token 009262|>
+<|visual token 009263|>
+<|visual token 009264|>
+<|visual token 009265|>
+<|visual token 009266|>
+<|visual token 009267|>
+<|visual token 009268|>
+<|visual token 009269|>
+<|visual token 009270|>
+<|visual token 009271|>
+<|visual token 009272|>
+<|visual token 009273|>
+<|visual token 009274|>
+<|visual token 009275|>
+<|visual token 009276|>
+<|visual token 009277|>
+<|visual token 009278|>
+<|visual token 009279|>
+<|visual token 009280|>
+<|visual token 009281|>
+<|visual token 009282|>
+<|visual token 009283|>
+<|visual token 009284|>
+<|visual token 009285|>
+<|visual token 009286|>
+<|visual token 009287|>
+<|visual token 009288|>
+<|visual token 009289|>
+<|visual token 009290|>
+<|visual token 009291|>
+<|visual token 009292|>
+<|visual token 009293|>
+<|visual token 009294|>
+<|visual token 009295|>
+<|visual token 009296|>
+<|visual token 009297|>
+<|visual token 009298|>
+<|visual token 009299|>
+<|visual token 009300|>
+<|visual token 009301|>
+<|visual token 009302|>
+<|visual token 009303|>
+<|visual token 009304|>
+<|visual token 009305|>
+<|visual token 009306|>
+<|visual token 009307|>
+<|visual token 009308|>
+<|visual token 009309|>
+<|visual token 009310|>
+<|visual token 009311|>
+<|visual token 009312|>
+<|visual token 009313|>
+<|visual token 009314|>
+<|visual token 009315|>
+<|visual token 009316|>
+<|visual token 009317|>
+<|visual token 009318|>
+<|visual token 009319|>
+<|visual token 009320|>
+<|visual token 009321|>
+<|visual token 009322|>
+<|visual token 009323|>
+<|visual token 009324|>
+<|visual token 009325|>
+<|visual token 009326|>
+<|visual token 009327|>
+<|visual token 009328|>
+<|visual token 009329|>
+<|visual token 009330|>
+<|visual token 009331|>
+<|visual token 009332|>
+<|visual token 009333|>
+<|visual token 009334|>
+<|visual token 009335|>
+<|visual token 009336|>
+<|visual token 009337|>
+<|visual token 009338|>
+<|visual token 009339|>
+<|visual token 009340|>
+<|visual token 009341|>
+<|visual token 009342|>
+<|visual token 009343|>
+<|visual token 009344|>
+<|visual token 009345|>
+<|visual token 009346|>
+<|visual token 009347|>
+<|visual token 009348|>
+<|visual token 009349|>
+<|visual token 009350|>
+<|visual token 009351|>
+<|visual token 009352|>
+<|visual token 009353|>
+<|visual token 009354|>
+<|visual token 009355|>
+<|visual token 009356|>
+<|visual token 009357|>
+<|visual token 009358|>
+<|visual token 009359|>
+<|visual token 009360|>
+<|visual token 009361|>
+<|visual token 009362|>
+<|visual token 009363|>
+<|visual token 009364|>
+<|visual token 009365|>
+<|visual token 009366|>
+<|visual token 009367|>
+<|visual token 009368|>
+<|visual token 009369|>
+<|visual token 009370|>
+<|visual token 009371|>
+<|visual token 009372|>
+<|visual token 009373|>
+<|visual token 009374|>
+<|visual token 009375|>
+<|visual token 009376|>
+<|visual token 009377|>
+<|visual token 009378|>
+<|visual token 009379|>
+<|visual token 009380|>
+<|visual token 009381|>
+<|visual token 009382|>
+<|visual token 009383|>
+<|visual token 009384|>
+<|visual token 009385|>
+<|visual token 009386|>
+<|visual token 009387|>
+<|visual token 009388|>
+<|visual token 009389|>
+<|visual token 009390|>
+<|visual token 009391|>
+<|visual token 009392|>
+<|visual token 009393|>
+<|visual token 009394|>
+<|visual token 009395|>
+<|visual token 009396|>
+<|visual token 009397|>
+<|visual token 009398|>
+<|visual token 009399|>
+<|visual token 009400|>
+<|visual token 009401|>
+<|visual token 009402|>
+<|visual token 009403|>
+<|visual token 009404|>
+<|visual token 009405|>
+<|visual token 009406|>
+<|visual token 009407|>
+<|visual token 009408|>
+<|visual token 009409|>
+<|visual token 009410|>
+<|visual token 009411|>
+<|visual token 009412|>
+<|visual token 009413|>
+<|visual token 009414|>
+<|visual token 009415|>
+<|visual token 009416|>
+<|visual token 009417|>
+<|visual token 009418|>
+<|visual token 009419|>
+<|visual token 009420|>
+<|visual token 009421|>
+<|visual token 009422|>
+<|visual token 009423|>
+<|visual token 009424|>
+<|visual token 009425|>
+<|visual token 009426|>
+<|visual token 009427|>
+<|visual token 009428|>
+<|visual token 009429|>
+<|visual token 009430|>
+<|visual token 009431|>
+<|visual token 009432|>
+<|visual token 009433|>
+<|visual token 009434|>
+<|visual token 009435|>
+<|visual token 009436|>
+<|visual token 009437|>
+<|visual token 009438|>
+<|visual token 009439|>
+<|visual token 009440|>
+<|visual token 009441|>
+<|visual token 009442|>
+<|visual token 009443|>
+<|visual token 009444|>
+<|visual token 009445|>
+<|visual token 009446|>
+<|visual token 009447|>
+<|visual token 009448|>
+<|visual token 009449|>
+<|visual token 009450|>
+<|visual token 009451|>
+<|visual token 009452|>
+<|visual token 009453|>
+<|visual token 009454|>
+<|visual token 009455|>
+<|visual token 009456|>
+<|visual token 009457|>
+<|visual token 009458|>
+<|visual token 009459|>
+<|visual token 009460|>
+<|visual token 009461|>
+<|visual token 009462|>
+<|visual token 009463|>
+<|visual token 009464|>
+<|visual token 009465|>
+<|visual token 009466|>
+<|visual token 009467|>
+<|visual token 009468|>
+<|visual token 009469|>
+<|visual token 009470|>
+<|visual token 009471|>
+<|visual token 009472|>
+<|visual token 009473|>
+<|visual token 009474|>
+<|visual token 009475|>
+<|visual token 009476|>
+<|visual token 009477|>
+<|visual token 009478|>
+<|visual token 009479|>
+<|visual token 009480|>
+<|visual token 009481|>
+<|visual token 009482|>
+<|visual token 009483|>
+<|visual token 009484|>
+<|visual token 009485|>
+<|visual token 009486|>
+<|visual token 009487|>
+<|visual token 009488|>
+<|visual token 009489|>
+<|visual token 009490|>
+<|visual token 009491|>
+<|visual token 009492|>
+<|visual token 009493|>
+<|visual token 009494|>
+<|visual token 009495|>
+<|visual token 009496|>
+<|visual token 009497|>
+<|visual token 009498|>
+<|visual token 009499|>
+<|visual token 009500|>
+<|visual token 009501|>
+<|visual token 009502|>
+<|visual token 009503|>
+<|visual token 009504|>
+<|visual token 009505|>
+<|visual token 009506|>
+<|visual token 009507|>
+<|visual token 009508|>
+<|visual token 009509|>
+<|visual token 009510|>
+<|visual token 009511|>
+<|visual token 009512|>
+<|visual token 009513|>
+<|visual token 009514|>
+<|visual token 009515|>
+<|visual token 009516|>
+<|visual token 009517|>
+<|visual token 009518|>
+<|visual token 009519|>
+<|visual token 009520|>
+<|visual token 009521|>
+<|visual token 009522|>
+<|visual token 009523|>
+<|visual token 009524|>
+<|visual token 009525|>
+<|visual token 009526|>
+<|visual token 009527|>
+<|visual token 009528|>
+<|visual token 009529|>
+<|visual token 009530|>
+<|visual token 009531|>
+<|visual token 009532|>
+<|visual token 009533|>
+<|visual token 009534|>
+<|visual token 009535|>
+<|visual token 009536|>
+<|visual token 009537|>
+<|visual token 009538|>
+<|visual token 009539|>
+<|visual token 009540|>
+<|visual token 009541|>
+<|visual token 009542|>
+<|visual token 009543|>
+<|visual token 009544|>
+<|visual token 009545|>
+<|visual token 009546|>
+<|visual token 009547|>
+<|visual token 009548|>
+<|visual token 009549|>
+<|visual token 009550|>
+<|visual token 009551|>
+<|visual token 009552|>
+<|visual token 009553|>
+<|visual token 009554|>
+<|visual token 009555|>
+<|visual token 009556|>
+<|visual token 009557|>
+<|visual token 009558|>
+<|visual token 009559|>
+<|visual token 009560|>
+<|visual token 009561|>
+<|visual token 009562|>
+<|visual token 009563|>
+<|visual token 009564|>
+<|visual token 009565|>
+<|visual token 009566|>
+<|visual token 009567|>
+<|visual token 009568|>
+<|visual token 009569|>
+<|visual token 009570|>
+<|visual token 009571|>
+<|visual token 009572|>
+<|visual token 009573|>
+<|visual token 009574|>
+<|visual token 009575|>
+<|visual token 009576|>
+<|visual token 009577|>
+<|visual token 009578|>
+<|visual token 009579|>
+<|visual token 009580|>
+<|visual token 009581|>
+<|visual token 009582|>
+<|visual token 009583|>
+<|visual token 009584|>
+<|visual token 009585|>
+<|visual token 009586|>
+<|visual token 009587|>
+<|visual token 009588|>
+<|visual token 009589|>
+<|visual token 009590|>
+<|visual token 009591|>
+<|visual token 009592|>
+<|visual token 009593|>
+<|visual token 009594|>
+<|visual token 009595|>
+<|visual token 009596|>
+<|visual token 009597|>
+<|visual token 009598|>
+<|visual token 009599|>
+<|visual token 009600|>
+<|visual token 009601|>
+<|visual token 009602|>
+<|visual token 009603|>
+<|visual token 009604|>
+<|visual token 009605|>
+<|visual token 009606|>
+<|visual token 009607|>
+<|visual token 009608|>
+<|visual token 009609|>
+<|visual token 009610|>
+<|visual token 009611|>
+<|visual token 009612|>
+<|visual token 009613|>
+<|visual token 009614|>
+<|visual token 009615|>
+<|visual token 009616|>
+<|visual token 009617|>
+<|visual token 009618|>
+<|visual token 009619|>
+<|visual token 009620|>
+<|visual token 009621|>
+<|visual token 009622|>
+<|visual token 009623|>
+<|visual token 009624|>
+<|visual token 009625|>
+<|visual token 009626|>
+<|visual token 009627|>
+<|visual token 009628|>
+<|visual token 009629|>
+<|visual token 009630|>
+<|visual token 009631|>
+<|visual token 009632|>
+<|visual token 009633|>
+<|visual token 009634|>
+<|visual token 009635|>
+<|visual token 009636|>
+<|visual token 009637|>
+<|visual token 009638|>
+<|visual token 009639|>
+<|visual token 009640|>
+<|visual token 009641|>
+<|visual token 009642|>
+<|visual token 009643|>
+<|visual token 009644|>
+<|visual token 009645|>
+<|visual token 009646|>
+<|visual token 009647|>
+<|visual token 009648|>
+<|visual token 009649|>
+<|visual token 009650|>
+<|visual token 009651|>
+<|visual token 009652|>
+<|visual token 009653|>
+<|visual token 009654|>
+<|visual token 009655|>
+<|visual token 009656|>
+<|visual token 009657|>
+<|visual token 009658|>
+<|visual token 009659|>
+<|visual token 009660|>
+<|visual token 009661|>
+<|visual token 009662|>
+<|visual token 009663|>
+<|visual token 009664|>
+<|visual token 009665|>
+<|visual token 009666|>
+<|visual token 009667|>
+<|visual token 009668|>
+<|visual token 009669|>
+<|visual token 009670|>
+<|visual token 009671|>
+<|visual token 009672|>
+<|visual token 009673|>
+<|visual token 009674|>
+<|visual token 009675|>
+<|visual token 009676|>
+<|visual token 009677|>
+<|visual token 009678|>
+<|visual token 009679|>
+<|visual token 009680|>
+<|visual token 009681|>
+<|visual token 009682|>
+<|visual token 009683|>
+<|visual token 009684|>
+<|visual token 009685|>
+<|visual token 009686|>
+<|visual token 009687|>
+<|visual token 009688|>
+<|visual token 009689|>
+<|visual token 009690|>
+<|visual token 009691|>
+<|visual token 009692|>
+<|visual token 009693|>
+<|visual token 009694|>
+<|visual token 009695|>
+<|visual token 009696|>
+<|visual token 009697|>
+<|visual token 009698|>
+<|visual token 009699|>
+<|visual token 009700|>
+<|visual token 009701|>
+<|visual token 009702|>
+<|visual token 009703|>
+<|visual token 009704|>
+<|visual token 009705|>
+<|visual token 009706|>
+<|visual token 009707|>
+<|visual token 009708|>
+<|visual token 009709|>
+<|visual token 009710|>
+<|visual token 009711|>
+<|visual token 009712|>
+<|visual token 009713|>
+<|visual token 009714|>
+<|visual token 009715|>
+<|visual token 009716|>
+<|visual token 009717|>
+<|visual token 009718|>
+<|visual token 009719|>
+<|visual token 009720|>
+<|visual token 009721|>
+<|visual token 009722|>
+<|visual token 009723|>
+<|visual token 009724|>
+<|visual token 009725|>
+<|visual token 009726|>
+<|visual token 009727|>
+<|visual token 009728|>
+<|visual token 009729|>
+<|visual token 009730|>
+<|visual token 009731|>
+<|visual token 009732|>
+<|visual token 009733|>
+<|visual token 009734|>
+<|visual token 009735|>
+<|visual token 009736|>
+<|visual token 009737|>
+<|visual token 009738|>
+<|visual token 009739|>
+<|visual token 009740|>
+<|visual token 009741|>
+<|visual token 009742|>
+<|visual token 009743|>
+<|visual token 009744|>
+<|visual token 009745|>
+<|visual token 009746|>
+<|visual token 009747|>
+<|visual token 009748|>
+<|visual token 009749|>
+<|visual token 009750|>
+<|visual token 009751|>
+<|visual token 009752|>
+<|visual token 009753|>
+<|visual token 009754|>
+<|visual token 009755|>
+<|visual token 009756|>
+<|visual token 009757|>
+<|visual token 009758|>
+<|visual token 009759|>
+<|visual token 009760|>
+<|visual token 009761|>
+<|visual token 009762|>
+<|visual token 009763|>
+<|visual token 009764|>
+<|visual token 009765|>
+<|visual token 009766|>
+<|visual token 009767|>
+<|visual token 009768|>
+<|visual token 009769|>
+<|visual token 009770|>
+<|visual token 009771|>
+<|visual token 009772|>
+<|visual token 009773|>
+<|visual token 009774|>
+<|visual token 009775|>
+<|visual token 009776|>
+<|visual token 009777|>
+<|visual token 009778|>
+<|visual token 009779|>
+<|visual token 009780|>
+<|visual token 009781|>
+<|visual token 009782|>
+<|visual token 009783|>
+<|visual token 009784|>
+<|visual token 009785|>
+<|visual token 009786|>
+<|visual token 009787|>
+<|visual token 009788|>
+<|visual token 009789|>
+<|visual token 009790|>
+<|visual token 009791|>
+<|visual token 009792|>
+<|visual token 009793|>
+<|visual token 009794|>
+<|visual token 009795|>
+<|visual token 009796|>
+<|visual token 009797|>
+<|visual token 009798|>
+<|visual token 009799|>
+<|visual token 009800|>
+<|visual token 009801|>
+<|visual token 009802|>
+<|visual token 009803|>
+<|visual token 009804|>
+<|visual token 009805|>
+<|visual token 009806|>
+<|visual token 009807|>
+<|visual token 009808|>
+<|visual token 009809|>
+<|visual token 009810|>
+<|visual token 009811|>
+<|visual token 009812|>
+<|visual token 009813|>
+<|visual token 009814|>
+<|visual token 009815|>
+<|visual token 009816|>
+<|visual token 009817|>
+<|visual token 009818|>
+<|visual token 009819|>
+<|visual token 009820|>
+<|visual token 009821|>
+<|visual token 009822|>
+<|visual token 009823|>
+<|visual token 009824|>
+<|visual token 009825|>
+<|visual token 009826|>
+<|visual token 009827|>
+<|visual token 009828|>
+<|visual token 009829|>
+<|visual token 009830|>
+<|visual token 009831|>
+<|visual token 009832|>
+<|visual token 009833|>
+<|visual token 009834|>
+<|visual token 009835|>
+<|visual token 009836|>
+<|visual token 009837|>
+<|visual token 009838|>
+<|visual token 009839|>
+<|visual token 009840|>
+<|visual token 009841|>
+<|visual token 009842|>
+<|visual token 009843|>
+<|visual token 009844|>
+<|visual token 009845|>
+<|visual token 009846|>
+<|visual token 009847|>
+<|visual token 009848|>
+<|visual token 009849|>
+<|visual token 009850|>
+<|visual token 009851|>
+<|visual token 009852|>
+<|visual token 009853|>
+<|visual token 009854|>
+<|visual token 009855|>
+<|visual token 009856|>
+<|visual token 009857|>
+<|visual token 009858|>
+<|visual token 009859|>
+<|visual token 009860|>
+<|visual token 009861|>
+<|visual token 009862|>
+<|visual token 009863|>
+<|visual token 009864|>
+<|visual token 009865|>
+<|visual token 009866|>
+<|visual token 009867|>
+<|visual token 009868|>
+<|visual token 009869|>
+<|visual token 009870|>
+<|visual token 009871|>
+<|visual token 009872|>
+<|visual token 009873|>
+<|visual token 009874|>
+<|visual token 009875|>
+<|visual token 009876|>
+<|visual token 009877|>
+<|visual token 009878|>
+<|visual token 009879|>
+<|visual token 009880|>
+<|visual token 009881|>
+<|visual token 009882|>
+<|visual token 009883|>
+<|visual token 009884|>
+<|visual token 009885|>
+<|visual token 009886|>
+<|visual token 009887|>
+<|visual token 009888|>
+<|visual token 009889|>
+<|visual token 009890|>
+<|visual token 009891|>
+<|visual token 009892|>
+<|visual token 009893|>
+<|visual token 009894|>
+<|visual token 009895|>
+<|visual token 009896|>
+<|visual token 009897|>
+<|visual token 009898|>
+<|visual token 009899|>
+<|visual token 009900|>
+<|visual token 009901|>
+<|visual token 009902|>
+<|visual token 009903|>
+<|visual token 009904|>
+<|visual token 009905|>
+<|visual token 009906|>
+<|visual token 009907|>
+<|visual token 009908|>
+<|visual token 009909|>
+<|visual token 009910|>
+<|visual token 009911|>
+<|visual token 009912|>
+<|visual token 009913|>
+<|visual token 009914|>
+<|visual token 009915|>
+<|visual token 009916|>
+<|visual token 009917|>
+<|visual token 009918|>
+<|visual token 009919|>
+<|visual token 009920|>
+<|visual token 009921|>
+<|visual token 009922|>
+<|visual token 009923|>
+<|visual token 009924|>
+<|visual token 009925|>
+<|visual token 009926|>
+<|visual token 009927|>
+<|visual token 009928|>
+<|visual token 009929|>
+<|visual token 009930|>
+<|visual token 009931|>
+<|visual token 009932|>
+<|visual token 009933|>
+<|visual token 009934|>
+<|visual token 009935|>
+<|visual token 009936|>
+<|visual token 009937|>
+<|visual token 009938|>
+<|visual token 009939|>
+<|visual token 009940|>
+<|visual token 009941|>
+<|visual token 009942|>
+<|visual token 009943|>
+<|visual token 009944|>
+<|visual token 009945|>
+<|visual token 009946|>
+<|visual token 009947|>
+<|visual token 009948|>
+<|visual token 009949|>
+<|visual token 009950|>
+<|visual token 009951|>
+<|visual token 009952|>
+<|visual token 009953|>
+<|visual token 009954|>
+<|visual token 009955|>
+<|visual token 009956|>
+<|visual token 009957|>
+<|visual token 009958|>
+<|visual token 009959|>
+<|visual token 009960|>
+<|visual token 009961|>
+<|visual token 009962|>
+<|visual token 009963|>
+<|visual token 009964|>
+<|visual token 009965|>
+<|visual token 009966|>
+<|visual token 009967|>
+<|visual token 009968|>
+<|visual token 009969|>
+<|visual token 009970|>
+<|visual token 009971|>
+<|visual token 009972|>
+<|visual token 009973|>
+<|visual token 009974|>
+<|visual token 009975|>
+<|visual token 009976|>
+<|visual token 009977|>
+<|visual token 009978|>
+<|visual token 009979|>
+<|visual token 009980|>
+<|visual token 009981|>
+<|visual token 009982|>
+<|visual token 009983|>
+<|visual token 009984|>
+<|visual token 009985|>
+<|visual token 009986|>
+<|visual token 009987|>
+<|visual token 009988|>
+<|visual token 009989|>
+<|visual token 009990|>
+<|visual token 009991|>
+<|visual token 009992|>
+<|visual token 009993|>
+<|visual token 009994|>
+<|visual token 009995|>
+<|visual token 009996|>
+<|visual token 009997|>
+<|visual token 009998|>
+<|visual token 009999|>
+<|visual token 010000|>
+<|visual token 010001|>
+<|visual token 010002|>
+<|visual token 010003|>
+<|visual token 010004|>
+<|visual token 010005|>
+<|visual token 010006|>
+<|visual token 010007|>
+<|visual token 010008|>
+<|visual token 010009|>
+<|visual token 010010|>
+<|visual token 010011|>
+<|visual token 010012|>
+<|visual token 010013|>
+<|visual token 010014|>
+<|visual token 010015|>
+<|visual token 010016|>
+<|visual token 010017|>
+<|visual token 010018|>
+<|visual token 010019|>
+<|visual token 010020|>
+<|visual token 010021|>
+<|visual token 010022|>
+<|visual token 010023|>
+<|visual token 010024|>
+<|visual token 010025|>
+<|visual token 010026|>
+<|visual token 010027|>
+<|visual token 010028|>
+<|visual token 010029|>
+<|visual token 010030|>
+<|visual token 010031|>
+<|visual token 010032|>
+<|visual token 010033|>
+<|visual token 010034|>
+<|visual token 010035|>
+<|visual token 010036|>
+<|visual token 010037|>
+<|visual token 010038|>
+<|visual token 010039|>
+<|visual token 010040|>
+<|visual token 010041|>
+<|visual token 010042|>
+<|visual token 010043|>
+<|visual token 010044|>
+<|visual token 010045|>
+<|visual token 010046|>
+<|visual token 010047|>
+<|visual token 010048|>
+<|visual token 010049|>
+<|visual token 010050|>
+<|visual token 010051|>
+<|visual token 010052|>
+<|visual token 010053|>
+<|visual token 010054|>
+<|visual token 010055|>
+<|visual token 010056|>
+<|visual token 010057|>
+<|visual token 010058|>
+<|visual token 010059|>
+<|visual token 010060|>
+<|visual token 010061|>
+<|visual token 010062|>
+<|visual token 010063|>
+<|visual token 010064|>
+<|visual token 010065|>
+<|visual token 010066|>
+<|visual token 010067|>
+<|visual token 010068|>
+<|visual token 010069|>
+<|visual token 010070|>
+<|visual token 010071|>
+<|visual token 010072|>
+<|visual token 010073|>
+<|visual token 010074|>
+<|visual token 010075|>
+<|visual token 010076|>
+<|visual token 010077|>
+<|visual token 010078|>
+<|visual token 010079|>
+<|visual token 010080|>
+<|visual token 010081|>
+<|visual token 010082|>
+<|visual token 010083|>
+<|visual token 010084|>
+<|visual token 010085|>
+<|visual token 010086|>
+<|visual token 010087|>
+<|visual token 010088|>
+<|visual token 010089|>
+<|visual token 010090|>
+<|visual token 010091|>
+<|visual token 010092|>
+<|visual token 010093|>
+<|visual token 010094|>
+<|visual token 010095|>
+<|visual token 010096|>
+<|visual token 010097|>
+<|visual token 010098|>
+<|visual token 010099|>
+<|visual token 010100|>
+<|visual token 010101|>
+<|visual token 010102|>
+<|visual token 010103|>
+<|visual token 010104|>
+<|visual token 010105|>
+<|visual token 010106|>
+<|visual token 010107|>
+<|visual token 010108|>
+<|visual token 010109|>
+<|visual token 010110|>
+<|visual token 010111|>
+<|visual token 010112|>
+<|visual token 010113|>
+<|visual token 010114|>
+<|visual token 010115|>
+<|visual token 010116|>
+<|visual token 010117|>
+<|visual token 010118|>
+<|visual token 010119|>
+<|visual token 010120|>
+<|visual token 010121|>
+<|visual token 010122|>
+<|visual token 010123|>
+<|visual token 010124|>
+<|visual token 010125|>
+<|visual token 010126|>
+<|visual token 010127|>
+<|visual token 010128|>
+<|visual token 010129|>
+<|visual token 010130|>
+<|visual token 010131|>
+<|visual token 010132|>
+<|visual token 010133|>
+<|visual token 010134|>
+<|visual token 010135|>
+<|visual token 010136|>
+<|visual token 010137|>
+<|visual token 010138|>
+<|visual token 010139|>
+<|visual token 010140|>
+<|visual token 010141|>
+<|visual token 010142|>
+<|visual token 010143|>
+<|visual token 010144|>
+<|visual token 010145|>
+<|visual token 010146|>
+<|visual token 010147|>
+<|visual token 010148|>
+<|visual token 010149|>
+<|visual token 010150|>
+<|visual token 010151|>
+<|visual token 010152|>
+<|visual token 010153|>
+<|visual token 010154|>
+<|visual token 010155|>
+<|visual token 010156|>
+<|visual token 010157|>
+<|visual token 010158|>
+<|visual token 010159|>
+<|visual token 010160|>
+<|visual token 010161|>
+<|visual token 010162|>
+<|visual token 010163|>
+<|visual token 010164|>
+<|visual token 010165|>
+<|visual token 010166|>
+<|visual token 010167|>
+<|visual token 010168|>
+<|visual token 010169|>
+<|visual token 010170|>
+<|visual token 010171|>
+<|visual token 010172|>
+<|visual token 010173|>
+<|visual token 010174|>
+<|visual token 010175|>
+<|visual token 010176|>
+<|visual token 010177|>
+<|visual token 010178|>
+<|visual token 010179|>
+<|visual token 010180|>
+<|visual token 010181|>
+<|visual token 010182|>
+<|visual token 010183|>
+<|visual token 010184|>
+<|visual token 010185|>
+<|visual token 010186|>
+<|visual token 010187|>
+<|visual token 010188|>
+<|visual token 010189|>
+<|visual token 010190|>
+<|visual token 010191|>
+<|visual token 010192|>
+<|visual token 010193|>
+<|visual token 010194|>
+<|visual token 010195|>
+<|visual token 010196|>
+<|visual token 010197|>
+<|visual token 010198|>
+<|visual token 010199|>
+<|visual token 010200|>
+<|visual token 010201|>
+<|visual token 010202|>
+<|visual token 010203|>
+<|visual token 010204|>
+<|visual token 010205|>
+<|visual token 010206|>
+<|visual token 010207|>
+<|visual token 010208|>
+<|visual token 010209|>
+<|visual token 010210|>
+<|visual token 010211|>
+<|visual token 010212|>
+<|visual token 010213|>
+<|visual token 010214|>
+<|visual token 010215|>
+<|visual token 010216|>
+<|visual token 010217|>
+<|visual token 010218|>
+<|visual token 010219|>
+<|visual token 010220|>
+<|visual token 010221|>
+<|visual token 010222|>
+<|visual token 010223|>
+<|visual token 010224|>
+<|visual token 010225|>
+<|visual token 010226|>
+<|visual token 010227|>
+<|visual token 010228|>
+<|visual token 010229|>
+<|visual token 010230|>
+<|visual token 010231|>
+<|visual token 010232|>
+<|visual token 010233|>
+<|visual token 010234|>
+<|visual token 010235|>
+<|visual token 010236|>
+<|visual token 010237|>
+<|visual token 010238|>
+<|visual token 010239|>
+<|visual token 010240|>
+<|visual token 010241|>
+<|visual token 010242|>
+<|visual token 010243|>
+<|visual token 010244|>
+<|visual token 010245|>
+<|visual token 010246|>
+<|visual token 010247|>
+<|visual token 010248|>
+<|visual token 010249|>
+<|visual token 010250|>
+<|visual token 010251|>
+<|visual token 010252|>
+<|visual token 010253|>
+<|visual token 010254|>
+<|visual token 010255|>
+<|visual token 010256|>
+<|visual token 010257|>
+<|visual token 010258|>
+<|visual token 010259|>
+<|visual token 010260|>
+<|visual token 010261|>
+<|visual token 010262|>
+<|visual token 010263|>
+<|visual token 010264|>
+<|visual token 010265|>
+<|visual token 010266|>
+<|visual token 010267|>
+<|visual token 010268|>
+<|visual token 010269|>
+<|visual token 010270|>
+<|visual token 010271|>
+<|visual token 010272|>
+<|visual token 010273|>
+<|visual token 010274|>
+<|visual token 010275|>
+<|visual token 010276|>
+<|visual token 010277|>
+<|visual token 010278|>
+<|visual token 010279|>
+<|visual token 010280|>
+<|visual token 010281|>
+<|visual token 010282|>
+<|visual token 010283|>
+<|visual token 010284|>
+<|visual token 010285|>
+<|visual token 010286|>
+<|visual token 010287|>
+<|visual token 010288|>
+<|visual token 010289|>
+<|visual token 010290|>
+<|visual token 010291|>
+<|visual token 010292|>
+<|visual token 010293|>
+<|visual token 010294|>
+<|visual token 010295|>
+<|visual token 010296|>
+<|visual token 010297|>
+<|visual token 010298|>
+<|visual token 010299|>
+<|visual token 010300|>
+<|visual token 010301|>
+<|visual token 010302|>
+<|visual token 010303|>
+<|visual token 010304|>
+<|visual token 010305|>
+<|visual token 010306|>
+<|visual token 010307|>
+<|visual token 010308|>
+<|visual token 010309|>
+<|visual token 010310|>
+<|visual token 010311|>
+<|visual token 010312|>
+<|visual token 010313|>
+<|visual token 010314|>
+<|visual token 010315|>
+<|visual token 010316|>
+<|visual token 010317|>
+<|visual token 010318|>
+<|visual token 010319|>
+<|visual token 010320|>
+<|visual token 010321|>
+<|visual token 010322|>
+<|visual token 010323|>
+<|visual token 010324|>
+<|visual token 010325|>
+<|visual token 010326|>
+<|visual token 010327|>
+<|visual token 010328|>
+<|visual token 010329|>
+<|visual token 010330|>
+<|visual token 010331|>
+<|visual token 010332|>
+<|visual token 010333|>
+<|visual token 010334|>
+<|visual token 010335|>
+<|visual token 010336|>
+<|visual token 010337|>
+<|visual token 010338|>
+<|visual token 010339|>
+<|visual token 010340|>
+<|visual token 010341|>
+<|visual token 010342|>
+<|visual token 010343|>
+<|visual token 010344|>
+<|visual token 010345|>
+<|visual token 010346|>
+<|visual token 010347|>
+<|visual token 010348|>
+<|visual token 010349|>
+<|visual token 010350|>
+<|visual token 010351|>
+<|visual token 010352|>
+<|visual token 010353|>
+<|visual token 010354|>
+<|visual token 010355|>
+<|visual token 010356|>
+<|visual token 010357|>
+<|visual token 010358|>
+<|visual token 010359|>
+<|visual token 010360|>
+<|visual token 010361|>
+<|visual token 010362|>
+<|visual token 010363|>
+<|visual token 010364|>
+<|visual token 010365|>
+<|visual token 010366|>
+<|visual token 010367|>
+<|visual token 010368|>
+<|visual token 010369|>
+<|visual token 010370|>
+<|visual token 010371|>
+<|visual token 010372|>
+<|visual token 010373|>
+<|visual token 010374|>
+<|visual token 010375|>
+<|visual token 010376|>
+<|visual token 010377|>
+<|visual token 010378|>
+<|visual token 010379|>
+<|visual token 010380|>
+<|visual token 010381|>
+<|visual token 010382|>
+<|visual token 010383|>
+<|visual token 010384|>
+<|visual token 010385|>
+<|visual token 010386|>
+<|visual token 010387|>
+<|visual token 010388|>
+<|visual token 010389|>
+<|visual token 010390|>
+<|visual token 010391|>
+<|visual token 010392|>
+<|visual token 010393|>
+<|visual token 010394|>
+<|visual token 010395|>
+<|visual token 010396|>
+<|visual token 010397|>
+<|visual token 010398|>
+<|visual token 010399|>
+<|visual token 010400|>
+<|visual token 010401|>
+<|visual token 010402|>
+<|visual token 010403|>
+<|visual token 010404|>
+<|visual token 010405|>
+<|visual token 010406|>
+<|visual token 010407|>
+<|visual token 010408|>
+<|visual token 010409|>
+<|visual token 010410|>
+<|visual token 010411|>
+<|visual token 010412|>
+<|visual token 010413|>
+<|visual token 010414|>
+<|visual token 010415|>
+<|visual token 010416|>
+<|visual token 010417|>
+<|visual token 010418|>
+<|visual token 010419|>
+<|visual token 010420|>
+<|visual token 010421|>
+<|visual token 010422|>
+<|visual token 010423|>
+<|visual token 010424|>
+<|visual token 010425|>
+<|visual token 010426|>
+<|visual token 010427|>
+<|visual token 010428|>
+<|visual token 010429|>
+<|visual token 010430|>
+<|visual token 010431|>
+<|visual token 010432|>
+<|visual token 010433|>
+<|visual token 010434|>
+<|visual token 010435|>
+<|visual token 010436|>
+<|visual token 010437|>
+<|visual token 010438|>
+<|visual token 010439|>
+<|visual token 010440|>
+<|visual token 010441|>
+<|visual token 010442|>
+<|visual token 010443|>
+<|visual token 010444|>
+<|visual token 010445|>
+<|visual token 010446|>
+<|visual token 010447|>
+<|visual token 010448|>
+<|visual token 010449|>
+<|visual token 010450|>
+<|visual token 010451|>
+<|visual token 010452|>
+<|visual token 010453|>
+<|visual token 010454|>
+<|visual token 010455|>
+<|visual token 010456|>
+<|visual token 010457|>
+<|visual token 010458|>
+<|visual token 010459|>
+<|visual token 010460|>
+<|visual token 010461|>
+<|visual token 010462|>
+<|visual token 010463|>
+<|visual token 010464|>
+<|visual token 010465|>
+<|visual token 010466|>
+<|visual token 010467|>
+<|visual token 010468|>
+<|visual token 010469|>
+<|visual token 010470|>
+<|visual token 010471|>
+<|visual token 010472|>
+<|visual token 010473|>
+<|visual token 010474|>
+<|visual token 010475|>
+<|visual token 010476|>
+<|visual token 010477|>
+<|visual token 010478|>
+<|visual token 010479|>
+<|visual token 010480|>
+<|visual token 010481|>
+<|visual token 010482|>
+<|visual token 010483|>
+<|visual token 010484|>
+<|visual token 010485|>
+<|visual token 010486|>
+<|visual token 010487|>
+<|visual token 010488|>
+<|visual token 010489|>
+<|visual token 010490|>
+<|visual token 010491|>
+<|visual token 010492|>
+<|visual token 010493|>
+<|visual token 010494|>
+<|visual token 010495|>
+<|visual token 010496|>
+<|visual token 010497|>
+<|visual token 010498|>
+<|visual token 010499|>
+<|visual token 010500|>
+<|visual token 010501|>
+<|visual token 010502|>
+<|visual token 010503|>
+<|visual token 010504|>
+<|visual token 010505|>
+<|visual token 010506|>
+<|visual token 010507|>
+<|visual token 010508|>
+<|visual token 010509|>
+<|visual token 010510|>
+<|visual token 010511|>
+<|visual token 010512|>
+<|visual token 010513|>
+<|visual token 010514|>
+<|visual token 010515|>
+<|visual token 010516|>
+<|visual token 010517|>
+<|visual token 010518|>
+<|visual token 010519|>
+<|visual token 010520|>
+<|visual token 010521|>
+<|visual token 010522|>
+<|visual token 010523|>
+<|visual token 010524|>
+<|visual token 010525|>
+<|visual token 010526|>
+<|visual token 010527|>
+<|visual token 010528|>
+<|visual token 010529|>
+<|visual token 010530|>
+<|visual token 010531|>
+<|visual token 010532|>
+<|visual token 010533|>
+<|visual token 010534|>
+<|visual token 010535|>
+<|visual token 010536|>
+<|visual token 010537|>
+<|visual token 010538|>
+<|visual token 010539|>
+<|visual token 010540|>
+<|visual token 010541|>
+<|visual token 010542|>
+<|visual token 010543|>
+<|visual token 010544|>
+<|visual token 010545|>
+<|visual token 010546|>
+<|visual token 010547|>
+<|visual token 010548|>
+<|visual token 010549|>
+<|visual token 010550|>
+<|visual token 010551|>
+<|visual token 010552|>
+<|visual token 010553|>
+<|visual token 010554|>
+<|visual token 010555|>
+<|visual token 010556|>
+<|visual token 010557|>
+<|visual token 010558|>
+<|visual token 010559|>
+<|visual token 010560|>
+<|visual token 010561|>
+<|visual token 010562|>
+<|visual token 010563|>
+<|visual token 010564|>
+<|visual token 010565|>
+<|visual token 010566|>
+<|visual token 010567|>
+<|visual token 010568|>
+<|visual token 010569|>
+<|visual token 010570|>
+<|visual token 010571|>
+<|visual token 010572|>
+<|visual token 010573|>
+<|visual token 010574|>
+<|visual token 010575|>
+<|visual token 010576|>
+<|visual token 010577|>
+<|visual token 010578|>
+<|visual token 010579|>
+<|visual token 010580|>
+<|visual token 010581|>
+<|visual token 010582|>
+<|visual token 010583|>
+<|visual token 010584|>
+<|visual token 010585|>
+<|visual token 010586|>
+<|visual token 010587|>
+<|visual token 010588|>
+<|visual token 010589|>
+<|visual token 010590|>
+<|visual token 010591|>
+<|visual token 010592|>
+<|visual token 010593|>
+<|visual token 010594|>
+<|visual token 010595|>
+<|visual token 010596|>
+<|visual token 010597|>
+<|visual token 010598|>
+<|visual token 010599|>
+<|visual token 010600|>
+<|visual token 010601|>
+<|visual token 010602|>
+<|visual token 010603|>
+<|visual token 010604|>
+<|visual token 010605|>
+<|visual token 010606|>
+<|visual token 010607|>
+<|visual token 010608|>
+<|visual token 010609|>
+<|visual token 010610|>
+<|visual token 010611|>
+<|visual token 010612|>
+<|visual token 010613|>
+<|visual token 010614|>
+<|visual token 010615|>
+<|visual token 010616|>
+<|visual token 010617|>
+<|visual token 010618|>
+<|visual token 010619|>
+<|visual token 010620|>
+<|visual token 010621|>
+<|visual token 010622|>
+<|visual token 010623|>
+<|visual token 010624|>
+<|visual token 010625|>
+<|visual token 010626|>
+<|visual token 010627|>
+<|visual token 010628|>
+<|visual token 010629|>
+<|visual token 010630|>
+<|visual token 010631|>
+<|visual token 010632|>
+<|visual token 010633|>
+<|visual token 010634|>
+<|visual token 010635|>
+<|visual token 010636|>
+<|visual token 010637|>
+<|visual token 010638|>
+<|visual token 010639|>
+<|visual token 010640|>
+<|visual token 010641|>
+<|visual token 010642|>
+<|visual token 010643|>
+<|visual token 010644|>
+<|visual token 010645|>
+<|visual token 010646|>
+<|visual token 010647|>
+<|visual token 010648|>
+<|visual token 010649|>
+<|visual token 010650|>
+<|visual token 010651|>
+<|visual token 010652|>
+<|visual token 010653|>
+<|visual token 010654|>
+<|visual token 010655|>
+<|visual token 010656|>
+<|visual token 010657|>
+<|visual token 010658|>
+<|visual token 010659|>
+<|visual token 010660|>
+<|visual token 010661|>
+<|visual token 010662|>
+<|visual token 010663|>
+<|visual token 010664|>
+<|visual token 010665|>
+<|visual token 010666|>
+<|visual token 010667|>
+<|visual token 010668|>
+<|visual token 010669|>
+<|visual token 010670|>
+<|visual token 010671|>
+<|visual token 010672|>
+<|visual token 010673|>
+<|visual token 010674|>
+<|visual token 010675|>
+<|visual token 010676|>
+<|visual token 010677|>
+<|visual token 010678|>
+<|visual token 010679|>
+<|visual token 010680|>
+<|visual token 010681|>
+<|visual token 010682|>
+<|visual token 010683|>
+<|visual token 010684|>
+<|visual token 010685|>
+<|visual token 010686|>
+<|visual token 010687|>
+<|visual token 010688|>
+<|visual token 010689|>
+<|visual token 010690|>
+<|visual token 010691|>
+<|visual token 010692|>
+<|visual token 010693|>
+<|visual token 010694|>
+<|visual token 010695|>
+<|visual token 010696|>
+<|visual token 010697|>
+<|visual token 010698|>
+<|visual token 010699|>
+<|visual token 010700|>
+<|visual token 010701|>
+<|visual token 010702|>
+<|visual token 010703|>
+<|visual token 010704|>
+<|visual token 010705|>
+<|visual token 010706|>
+<|visual token 010707|>
+<|visual token 010708|>
+<|visual token 010709|>
+<|visual token 010710|>
+<|visual token 010711|>
+<|visual token 010712|>
+<|visual token 010713|>
+<|visual token 010714|>
+<|visual token 010715|>
+<|visual token 010716|>
+<|visual token 010717|>
+<|visual token 010718|>
+<|visual token 010719|>
+<|visual token 010720|>
+<|visual token 010721|>
+<|visual token 010722|>
+<|visual token 010723|>
+<|visual token 010724|>
+<|visual token 010725|>
+<|visual token 010726|>
+<|visual token 010727|>
+<|visual token 010728|>
+<|visual token 010729|>
+<|visual token 010730|>
+<|visual token 010731|>
+<|visual token 010732|>
+<|visual token 010733|>
+<|visual token 010734|>
+<|visual token 010735|>
+<|visual token 010736|>
+<|visual token 010737|>
+<|visual token 010738|>
+<|visual token 010739|>
+<|visual token 010740|>
+<|visual token 010741|>
+<|visual token 010742|>
+<|visual token 010743|>
+<|visual token 010744|>
+<|visual token 010745|>
+<|visual token 010746|>
+<|visual token 010747|>
+<|visual token 010748|>
+<|visual token 010749|>
+<|visual token 010750|>
+<|visual token 010751|>
+<|visual token 010752|>
+<|visual token 010753|>
+<|visual token 010754|>
+<|visual token 010755|>
+<|visual token 010756|>
+<|visual token 010757|>
+<|visual token 010758|>
+<|visual token 010759|>
+<|visual token 010760|>
+<|visual token 010761|>
+<|visual token 010762|>
+<|visual token 010763|>
+<|visual token 010764|>
+<|visual token 010765|>
+<|visual token 010766|>
+<|visual token 010767|>
+<|visual token 010768|>
+<|visual token 010769|>
+<|visual token 010770|>
+<|visual token 010771|>
+<|visual token 010772|>
+<|visual token 010773|>
+<|visual token 010774|>
+<|visual token 010775|>
+<|visual token 010776|>
+<|visual token 010777|>
+<|visual token 010778|>
+<|visual token 010779|>
+<|visual token 010780|>
+<|visual token 010781|>
+<|visual token 010782|>
+<|visual token 010783|>
+<|visual token 010784|>
+<|visual token 010785|>
+<|visual token 010786|>
+<|visual token 010787|>
+<|visual token 010788|>
+<|visual token 010789|>
+<|visual token 010790|>
+<|visual token 010791|>
+<|visual token 010792|>
+<|visual token 010793|>
+<|visual token 010794|>
+<|visual token 010795|>
+<|visual token 010796|>
+<|visual token 010797|>
+<|visual token 010798|>
+<|visual token 010799|>
+<|visual token 010800|>
+<|visual token 010801|>
+<|visual token 010802|>
+<|visual token 010803|>
+<|visual token 010804|>
+<|visual token 010805|>
+<|visual token 010806|>
+<|visual token 010807|>
+<|visual token 010808|>
+<|visual token 010809|>
+<|visual token 010810|>
+<|visual token 010811|>
+<|visual token 010812|>
+<|visual token 010813|>
+<|visual token 010814|>
+<|visual token 010815|>
+<|visual token 010816|>
+<|visual token 010817|>
+<|visual token 010818|>
+<|visual token 010819|>
+<|visual token 010820|>
+<|visual token 010821|>
+<|visual token 010822|>
+<|visual token 010823|>
+<|visual token 010824|>
+<|visual token 010825|>
+<|visual token 010826|>
+<|visual token 010827|>
+<|visual token 010828|>
+<|visual token 010829|>
+<|visual token 010830|>
+<|visual token 010831|>
+<|visual token 010832|>
+<|visual token 010833|>
+<|visual token 010834|>
+<|visual token 010835|>
+<|visual token 010836|>
+<|visual token 010837|>
+<|visual token 010838|>
+<|visual token 010839|>
+<|visual token 010840|>
+<|visual token 010841|>
+<|visual token 010842|>
+<|visual token 010843|>
+<|visual token 010844|>
+<|visual token 010845|>
+<|visual token 010846|>
+<|visual token 010847|>
+<|visual token 010848|>
+<|visual token 010849|>
+<|visual token 010850|>
+<|visual token 010851|>
+<|visual token 010852|>
+<|visual token 010853|>
+<|visual token 010854|>
+<|visual token 010855|>
+<|visual token 010856|>
+<|visual token 010857|>
+<|visual token 010858|>
+<|visual token 010859|>
+<|visual token 010860|>
+<|visual token 010861|>
+<|visual token 010862|>
+<|visual token 010863|>
+<|visual token 010864|>
+<|visual token 010865|>
+<|visual token 010866|>
+<|visual token 010867|>
+<|visual token 010868|>
+<|visual token 010869|>
+<|visual token 010870|>
+<|visual token 010871|>
+<|visual token 010872|>
+<|visual token 010873|>
+<|visual token 010874|>
+<|visual token 010875|>
+<|visual token 010876|>
+<|visual token 010877|>
+<|visual token 010878|>
+<|visual token 010879|>
+<|visual token 010880|>
+<|visual token 010881|>
+<|visual token 010882|>
+<|visual token 010883|>
+<|visual token 010884|>
+<|visual token 010885|>
+<|visual token 010886|>
+<|visual token 010887|>
+<|visual token 010888|>
+<|visual token 010889|>
+<|visual token 010890|>
+<|visual token 010891|>
+<|visual token 010892|>
+<|visual token 010893|>
+<|visual token 010894|>
+<|visual token 010895|>
+<|visual token 010896|>
+<|visual token 010897|>
+<|visual token 010898|>
+<|visual token 010899|>
+<|visual token 010900|>
+<|visual token 010901|>
+<|visual token 010902|>
+<|visual token 010903|>
+<|visual token 010904|>
+<|visual token 010905|>
+<|visual token 010906|>
+<|visual token 010907|>
+<|visual token 010908|>
+<|visual token 010909|>
+<|visual token 010910|>
+<|visual token 010911|>
+<|visual token 010912|>
+<|visual token 010913|>
+<|visual token 010914|>
+<|visual token 010915|>
+<|visual token 010916|>
+<|visual token 010917|>
+<|visual token 010918|>
+<|visual token 010919|>
+<|visual token 010920|>
+<|visual token 010921|>
+<|visual token 010922|>
+<|visual token 010923|>
+<|visual token 010924|>
+<|visual token 010925|>
+<|visual token 010926|>
+<|visual token 010927|>
+<|visual token 010928|>
+<|visual token 010929|>
+<|visual token 010930|>
+<|visual token 010931|>
+<|visual token 010932|>
+<|visual token 010933|>
+<|visual token 010934|>
+<|visual token 010935|>
+<|visual token 010936|>
+<|visual token 010937|>
+<|visual token 010938|>
+<|visual token 010939|>
+<|visual token 010940|>
+<|visual token 010941|>
+<|visual token 010942|>
+<|visual token 010943|>
+<|visual token 010944|>
+<|visual token 010945|>
+<|visual token 010946|>
+<|visual token 010947|>
+<|visual token 010948|>
+<|visual token 010949|>
+<|visual token 010950|>
+<|visual token 010951|>
+<|visual token 010952|>
+<|visual token 010953|>
+<|visual token 010954|>
+<|visual token 010955|>
+<|visual token 010956|>
+<|visual token 010957|>
+<|visual token 010958|>
+<|visual token 010959|>
+<|visual token 010960|>
+<|visual token 010961|>
+<|visual token 010962|>
+<|visual token 010963|>
+<|visual token 010964|>
+<|visual token 010965|>
+<|visual token 010966|>
+<|visual token 010967|>
+<|visual token 010968|>
+<|visual token 010969|>
+<|visual token 010970|>
+<|visual token 010971|>
+<|visual token 010972|>
+<|visual token 010973|>
+<|visual token 010974|>
+<|visual token 010975|>
+<|visual token 010976|>
+<|visual token 010977|>
+<|visual token 010978|>
+<|visual token 010979|>
+<|visual token 010980|>
+<|visual token 010981|>
+<|visual token 010982|>
+<|visual token 010983|>
+<|visual token 010984|>
+<|visual token 010985|>
+<|visual token 010986|>
+<|visual token 010987|>
+<|visual token 010988|>
+<|visual token 010989|>
+<|visual token 010990|>
+<|visual token 010991|>
+<|visual token 010992|>
+<|visual token 010993|>
+<|visual token 010994|>
+<|visual token 010995|>
+<|visual token 010996|>
+<|visual token 010997|>
+<|visual token 010998|>
+<|visual token 010999|>
+<|visual token 011000|>
+<|visual token 011001|>
+<|visual token 011002|>
+<|visual token 011003|>
+<|visual token 011004|>
+<|visual token 011005|>
+<|visual token 011006|>
+<|visual token 011007|>
+<|visual token 011008|>
+<|visual token 011009|>
+<|visual token 011010|>
+<|visual token 011011|>
+<|visual token 011012|>
+<|visual token 011013|>
+<|visual token 011014|>
+<|visual token 011015|>
+<|visual token 011016|>
+<|visual token 011017|>
+<|visual token 011018|>
+<|visual token 011019|>
+<|visual token 011020|>
+<|visual token 011021|>
+<|visual token 011022|>
+<|visual token 011023|>
+<|visual token 011024|>
+<|visual token 011025|>
+<|visual token 011026|>
+<|visual token 011027|>
+<|visual token 011028|>
+<|visual token 011029|>
+<|visual token 011030|>
+<|visual token 011031|>
+<|visual token 011032|>
+<|visual token 011033|>
+<|visual token 011034|>
+<|visual token 011035|>
+<|visual token 011036|>
+<|visual token 011037|>
+<|visual token 011038|>
+<|visual token 011039|>
+<|visual token 011040|>
+<|visual token 011041|>
+<|visual token 011042|>
+<|visual token 011043|>
+<|visual token 011044|>
+<|visual token 011045|>
+<|visual token 011046|>
+<|visual token 011047|>
+<|visual token 011048|>
+<|visual token 011049|>
+<|visual token 011050|>
+<|visual token 011051|>
+<|visual token 011052|>
+<|visual token 011053|>
+<|visual token 011054|>
+<|visual token 011055|>
+<|visual token 011056|>
+<|visual token 011057|>
+<|visual token 011058|>
+<|visual token 011059|>
+<|visual token 011060|>
+<|visual token 011061|>
+<|visual token 011062|>
+<|visual token 011063|>
+<|visual token 011064|>
+<|visual token 011065|>
+<|visual token 011066|>
+<|visual token 011067|>
+<|visual token 011068|>
+<|visual token 011069|>
+<|visual token 011070|>
+<|visual token 011071|>
+<|visual token 011072|>
+<|visual token 011073|>
+<|visual token 011074|>
+<|visual token 011075|>
+<|visual token 011076|>
+<|visual token 011077|>
+<|visual token 011078|>
+<|visual token 011079|>
+<|visual token 011080|>
+<|visual token 011081|>
+<|visual token 011082|>
+<|visual token 011083|>
+<|visual token 011084|>
+<|visual token 011085|>
+<|visual token 011086|>
+<|visual token 011087|>
+<|visual token 011088|>
+<|visual token 011089|>
+<|visual token 011090|>
+<|visual token 011091|>
+<|visual token 011092|>
+<|visual token 011093|>
+<|visual token 011094|>
+<|visual token 011095|>
+<|visual token 011096|>
+<|visual token 011097|>
+<|visual token 011098|>
+<|visual token 011099|>
+<|visual token 011100|>
+<|visual token 011101|>
+<|visual token 011102|>
+<|visual token 011103|>
+<|visual token 011104|>
+<|visual token 011105|>
+<|visual token 011106|>
+<|visual token 011107|>
+<|visual token 011108|>
+<|visual token 011109|>
+<|visual token 011110|>
+<|visual token 011111|>
+<|visual token 011112|>
+<|visual token 011113|>
+<|visual token 011114|>
+<|visual token 011115|>
+<|visual token 011116|>
+<|visual token 011117|>
+<|visual token 011118|>
+<|visual token 011119|>
+<|visual token 011120|>
+<|visual token 011121|>
+<|visual token 011122|>
+<|visual token 011123|>
+<|visual token 011124|>
+<|visual token 011125|>
+<|visual token 011126|>
+<|visual token 011127|>
+<|visual token 011128|>
+<|visual token 011129|>
+<|visual token 011130|>
+<|visual token 011131|>
+<|visual token 011132|>
+<|visual token 011133|>
+<|visual token 011134|>
+<|visual token 011135|>
+<|visual token 011136|>
+<|visual token 011137|>
+<|visual token 011138|>
+<|visual token 011139|>
+<|visual token 011140|>
+<|visual token 011141|>
+<|visual token 011142|>
+<|visual token 011143|>
+<|visual token 011144|>
+<|visual token 011145|>
+<|visual token 011146|>
+<|visual token 011147|>
+<|visual token 011148|>
+<|visual token 011149|>
+<|visual token 011150|>
+<|visual token 011151|>
+<|visual token 011152|>
+<|visual token 011153|>
+<|visual token 011154|>
+<|visual token 011155|>
+<|visual token 011156|>
+<|visual token 011157|>
+<|visual token 011158|>
+<|visual token 011159|>
+<|visual token 011160|>
+<|visual token 011161|>
+<|visual token 011162|>
+<|visual token 011163|>
+<|visual token 011164|>
+<|visual token 011165|>
+<|visual token 011166|>
+<|visual token 011167|>
+<|visual token 011168|>
+<|visual token 011169|>
+<|visual token 011170|>
+<|visual token 011171|>
+<|visual token 011172|>
+<|visual token 011173|>
+<|visual token 011174|>
+<|visual token 011175|>
+<|visual token 011176|>
+<|visual token 011177|>
+<|visual token 011178|>
+<|visual token 011179|>
+<|visual token 011180|>
+<|visual token 011181|>
+<|visual token 011182|>
+<|visual token 011183|>
+<|visual token 011184|>
+<|visual token 011185|>
+<|visual token 011186|>
+<|visual token 011187|>
+<|visual token 011188|>
+<|visual token 011189|>
+<|visual token 011190|>
+<|visual token 011191|>
+<|visual token 011192|>
+<|visual token 011193|>
+<|visual token 011194|>
+<|visual token 011195|>
+<|visual token 011196|>
+<|visual token 011197|>
+<|visual token 011198|>
+<|visual token 011199|>
+<|visual token 011200|>
+<|visual token 011201|>
+<|visual token 011202|>
+<|visual token 011203|>
+<|visual token 011204|>
+<|visual token 011205|>
+<|visual token 011206|>
+<|visual token 011207|>
+<|visual token 011208|>
+<|visual token 011209|>
+<|visual token 011210|>
+<|visual token 011211|>
+<|visual token 011212|>
+<|visual token 011213|>
+<|visual token 011214|>
+<|visual token 011215|>
+<|visual token 011216|>
+<|visual token 011217|>
+<|visual token 011218|>
+<|visual token 011219|>
+<|visual token 011220|>
+<|visual token 011221|>
+<|visual token 011222|>
+<|visual token 011223|>
+<|visual token 011224|>
+<|visual token 011225|>
+<|visual token 011226|>
+<|visual token 011227|>
+<|visual token 011228|>
+<|visual token 011229|>
+<|visual token 011230|>
+<|visual token 011231|>
+<|visual token 011232|>
+<|visual token 011233|>
+<|visual token 011234|>
+<|visual token 011235|>
+<|visual token 011236|>
+<|visual token 011237|>
+<|visual token 011238|>
+<|visual token 011239|>
+<|visual token 011240|>
+<|visual token 011241|>
+<|visual token 011242|>
+<|visual token 011243|>
+<|visual token 011244|>
+<|visual token 011245|>
+<|visual token 011246|>
+<|visual token 011247|>
+<|visual token 011248|>
+<|visual token 011249|>
+<|visual token 011250|>
+<|visual token 011251|>
+<|visual token 011252|>
+<|visual token 011253|>
+<|visual token 011254|>
+<|visual token 011255|>
+<|visual token 011256|>
+<|visual token 011257|>
+<|visual token 011258|>
+<|visual token 011259|>
+<|visual token 011260|>
+<|visual token 011261|>
+<|visual token 011262|>
+<|visual token 011263|>
+<|visual token 011264|>
+<|visual token 011265|>
+<|visual token 011266|>
+<|visual token 011267|>
+<|visual token 011268|>
+<|visual token 011269|>
+<|visual token 011270|>
+<|visual token 011271|>
+<|visual token 011272|>
+<|visual token 011273|>
+<|visual token 011274|>
+<|visual token 011275|>
+<|visual token 011276|>
+<|visual token 011277|>
+<|visual token 011278|>
+<|visual token 011279|>
+<|visual token 011280|>
+<|visual token 011281|>
+<|visual token 011282|>
+<|visual token 011283|>
+<|visual token 011284|>
+<|visual token 011285|>
+<|visual token 011286|>
+<|visual token 011287|>
+<|visual token 011288|>
+<|visual token 011289|>
+<|visual token 011290|>
+<|visual token 011291|>
+<|visual token 011292|>
+<|visual token 011293|>
+<|visual token 011294|>
+<|visual token 011295|>
+<|visual token 011296|>
+<|visual token 011297|>
+<|visual token 011298|>
+<|visual token 011299|>
+<|visual token 011300|>
+<|visual token 011301|>
+<|visual token 011302|>
+<|visual token 011303|>
+<|visual token 011304|>
+<|visual token 011305|>
+<|visual token 011306|>
+<|visual token 011307|>
+<|visual token 011308|>
+<|visual token 011309|>
+<|visual token 011310|>
+<|visual token 011311|>
+<|visual token 011312|>
+<|visual token 011313|>
+<|visual token 011314|>
+<|visual token 011315|>
+<|visual token 011316|>
+<|visual token 011317|>
+<|visual token 011318|>
+<|visual token 011319|>
+<|visual token 011320|>
+<|visual token 011321|>
+<|visual token 011322|>
+<|visual token 011323|>
+<|visual token 011324|>
+<|visual token 011325|>
+<|visual token 011326|>
+<|visual token 011327|>
+<|visual token 011328|>
+<|visual token 011329|>
+<|visual token 011330|>
+<|visual token 011331|>
+<|visual token 011332|>
+<|visual token 011333|>
+<|visual token 011334|>
+<|visual token 011335|>
+<|visual token 011336|>
+<|visual token 011337|>
+<|visual token 011338|>
+<|visual token 011339|>
+<|visual token 011340|>
+<|visual token 011341|>
+<|visual token 011342|>
+<|visual token 011343|>
+<|visual token 011344|>
+<|visual token 011345|>
+<|visual token 011346|>
+<|visual token 011347|>
+<|visual token 011348|>
+<|visual token 011349|>
+<|visual token 011350|>
+<|visual token 011351|>
+<|visual token 011352|>
+<|visual token 011353|>
+<|visual token 011354|>
+<|visual token 011355|>
+<|visual token 011356|>
+<|visual token 011357|>
+<|visual token 011358|>
+<|visual token 011359|>
+<|visual token 011360|>
+<|visual token 011361|>
+<|visual token 011362|>
+<|visual token 011363|>
+<|visual token 011364|>
+<|visual token 011365|>
+<|visual token 011366|>
+<|visual token 011367|>
+<|visual token 011368|>
+<|visual token 011369|>
+<|visual token 011370|>
+<|visual token 011371|>
+<|visual token 011372|>
+<|visual token 011373|>
+<|visual token 011374|>
+<|visual token 011375|>
+<|visual token 011376|>
+<|visual token 011377|>
+<|visual token 011378|>
+<|visual token 011379|>
+<|visual token 011380|>
+<|visual token 011381|>
+<|visual token 011382|>
+<|visual token 011383|>
+<|visual token 011384|>
+<|visual token 011385|>
+<|visual token 011386|>
+<|visual token 011387|>
+<|visual token 011388|>
+<|visual token 011389|>
+<|visual token 011390|>
+<|visual token 011391|>
+<|visual token 011392|>
+<|visual token 011393|>
+<|visual token 011394|>
+<|visual token 011395|>
+<|visual token 011396|>
+<|visual token 011397|>
+<|visual token 011398|>
+<|visual token 011399|>
+<|visual token 011400|>
+<|visual token 011401|>
+<|visual token 011402|>
+<|visual token 011403|>
+<|visual token 011404|>
+<|visual token 011405|>
+<|visual token 011406|>
+<|visual token 011407|>
+<|visual token 011408|>
+<|visual token 011409|>
+<|visual token 011410|>
+<|visual token 011411|>
+<|visual token 011412|>
+<|visual token 011413|>
+<|visual token 011414|>
+<|visual token 011415|>
+<|visual token 011416|>
+<|visual token 011417|>
+<|visual token 011418|>
+<|visual token 011419|>
+<|visual token 011420|>
+<|visual token 011421|>
+<|visual token 011422|>
+<|visual token 011423|>
+<|visual token 011424|>
+<|visual token 011425|>
+<|visual token 011426|>
+<|visual token 011427|>
+<|visual token 011428|>
+<|visual token 011429|>
+<|visual token 011430|>
+<|visual token 011431|>
+<|visual token 011432|>
+<|visual token 011433|>
+<|visual token 011434|>
+<|visual token 011435|>
+<|visual token 011436|>
+<|visual token 011437|>
+<|visual token 011438|>
+<|visual token 011439|>
+<|visual token 011440|>
+<|visual token 011441|>
+<|visual token 011442|>
+<|visual token 011443|>
+<|visual token 011444|>
+<|visual token 011445|>
+<|visual token 011446|>
+<|visual token 011447|>
+<|visual token 011448|>
+<|visual token 011449|>
+<|visual token 011450|>
+<|visual token 011451|>
+<|visual token 011452|>
+<|visual token 011453|>
+<|visual token 011454|>
+<|visual token 011455|>
+<|visual token 011456|>
+<|visual token 011457|>
+<|visual token 011458|>
+<|visual token 011459|>
+<|visual token 011460|>
+<|visual token 011461|>
+<|visual token 011462|>
+<|visual token 011463|>
+<|visual token 011464|>
+<|visual token 011465|>
+<|visual token 011466|>
+<|visual token 011467|>
+<|visual token 011468|>
+<|visual token 011469|>
+<|visual token 011470|>
+<|visual token 011471|>
+<|visual token 011472|>
+<|visual token 011473|>
+<|visual token 011474|>
+<|visual token 011475|>
+<|visual token 011476|>
+<|visual token 011477|>
+<|visual token 011478|>
+<|visual token 011479|>
+<|visual token 011480|>
+<|visual token 011481|>
+<|visual token 011482|>
+<|visual token 011483|>
+<|visual token 011484|>
+<|visual token 011485|>
+<|visual token 011486|>
+<|visual token 011487|>
+<|visual token 011488|>
+<|visual token 011489|>
+<|visual token 011490|>
+<|visual token 011491|>
+<|visual token 011492|>
+<|visual token 011493|>
+<|visual token 011494|>
+<|visual token 011495|>
+<|visual token 011496|>
+<|visual token 011497|>
+<|visual token 011498|>
+<|visual token 011499|>
+<|visual token 011500|>
+<|visual token 011501|>
+<|visual token 011502|>
+<|visual token 011503|>
+<|visual token 011504|>
+<|visual token 011505|>
+<|visual token 011506|>
+<|visual token 011507|>
+<|visual token 011508|>
+<|visual token 011509|>
+<|visual token 011510|>
+<|visual token 011511|>
+<|visual token 011512|>
+<|visual token 011513|>
+<|visual token 011514|>
+<|visual token 011515|>
+<|visual token 011516|>
+<|visual token 011517|>
+<|visual token 011518|>
+<|visual token 011519|>
+<|visual token 011520|>
+<|visual token 011521|>
+<|visual token 011522|>
+<|visual token 011523|>
+<|visual token 011524|>
+<|visual token 011525|>
+<|visual token 011526|>
+<|visual token 011527|>
+<|visual token 011528|>
+<|visual token 011529|>
+<|visual token 011530|>
+<|visual token 011531|>
+<|visual token 011532|>
+<|visual token 011533|>
+<|visual token 011534|>
+<|visual token 011535|>
+<|visual token 011536|>
+<|visual token 011537|>
+<|visual token 011538|>
+<|visual token 011539|>
+<|visual token 011540|>
+<|visual token 011541|>
+<|visual token 011542|>
+<|visual token 011543|>
+<|visual token 011544|>
+<|visual token 011545|>
+<|visual token 011546|>
+<|visual token 011547|>
+<|visual token 011548|>
+<|visual token 011549|>
+<|visual token 011550|>
+<|visual token 011551|>
+<|visual token 011552|>
+<|visual token 011553|>
+<|visual token 011554|>
+<|visual token 011555|>
+<|visual token 011556|>
+<|visual token 011557|>
+<|visual token 011558|>
+<|visual token 011559|>
+<|visual token 011560|>
+<|visual token 011561|>
+<|visual token 011562|>
+<|visual token 011563|>
+<|visual token 011564|>
+<|visual token 011565|>
+<|visual token 011566|>
+<|visual token 011567|>
+<|visual token 011568|>
+<|visual token 011569|>
+<|visual token 011570|>
+<|visual token 011571|>
+<|visual token 011572|>
+<|visual token 011573|>
+<|visual token 011574|>
+<|visual token 011575|>
+<|visual token 011576|>
+<|visual token 011577|>
+<|visual token 011578|>
+<|visual token 011579|>
+<|visual token 011580|>
+<|visual token 011581|>
+<|visual token 011582|>
+<|visual token 011583|>
+<|visual token 011584|>
+<|visual token 011585|>
+<|visual token 011586|>
+<|visual token 011587|>
+<|visual token 011588|>
+<|visual token 011589|>
+<|visual token 011590|>
+<|visual token 011591|>
+<|visual token 011592|>
+<|visual token 011593|>
+<|visual token 011594|>
+<|visual token 011595|>
+<|visual token 011596|>
+<|visual token 011597|>
+<|visual token 011598|>
+<|visual token 011599|>
+<|visual token 011600|>
+<|visual token 011601|>
+<|visual token 011602|>
+<|visual token 011603|>
+<|visual token 011604|>
+<|visual token 011605|>
+<|visual token 011606|>
+<|visual token 011607|>
+<|visual token 011608|>
+<|visual token 011609|>
+<|visual token 011610|>
+<|visual token 011611|>
+<|visual token 011612|>
+<|visual token 011613|>
+<|visual token 011614|>
+<|visual token 011615|>
+<|visual token 011616|>
+<|visual token 011617|>
+<|visual token 011618|>
+<|visual token 011619|>
+<|visual token 011620|>
+<|visual token 011621|>
+<|visual token 011622|>
+<|visual token 011623|>
+<|visual token 011624|>
+<|visual token 011625|>
+<|visual token 011626|>
+<|visual token 011627|>
+<|visual token 011628|>
+<|visual token 011629|>
+<|visual token 011630|>
+<|visual token 011631|>
+<|visual token 011632|>
+<|visual token 011633|>
+<|visual token 011634|>
+<|visual token 011635|>
+<|visual token 011636|>
+<|visual token 011637|>
+<|visual token 011638|>
+<|visual token 011639|>
+<|visual token 011640|>
+<|visual token 011641|>
+<|visual token 011642|>
+<|visual token 011643|>
+<|visual token 011644|>
+<|visual token 011645|>
+<|visual token 011646|>
+<|visual token 011647|>
+<|visual token 011648|>
+<|visual token 011649|>
+<|visual token 011650|>
+<|visual token 011651|>
+<|visual token 011652|>
+<|visual token 011653|>
+<|visual token 011654|>
+<|visual token 011655|>
+<|visual token 011656|>
+<|visual token 011657|>
+<|visual token 011658|>
+<|visual token 011659|>
+<|visual token 011660|>
+<|visual token 011661|>
+<|visual token 011662|>
+<|visual token 011663|>
+<|visual token 011664|>
+<|visual token 011665|>
+<|visual token 011666|>
+<|visual token 011667|>
+<|visual token 011668|>
+<|visual token 011669|>
+<|visual token 011670|>
+<|visual token 011671|>
+<|visual token 011672|>
+<|visual token 011673|>
+<|visual token 011674|>
+<|visual token 011675|>
+<|visual token 011676|>
+<|visual token 011677|>
+<|visual token 011678|>
+<|visual token 011679|>
+<|visual token 011680|>
+<|visual token 011681|>
+<|visual token 011682|>
+<|visual token 011683|>
+<|visual token 011684|>
+<|visual token 011685|>
+<|visual token 011686|>
+<|visual token 011687|>
+<|visual token 011688|>
+<|visual token 011689|>
+<|visual token 011690|>
+<|visual token 011691|>
+<|visual token 011692|>
+<|visual token 011693|>
+<|visual token 011694|>
+<|visual token 011695|>
+<|visual token 011696|>
+<|visual token 011697|>
+<|visual token 011698|>
+<|visual token 011699|>
+<|visual token 011700|>
+<|visual token 011701|>
+<|visual token 011702|>
+<|visual token 011703|>
+<|visual token 011704|>
+<|visual token 011705|>
+<|visual token 011706|>
+<|visual token 011707|>
+<|visual token 011708|>
+<|visual token 011709|>
+<|visual token 011710|>
+<|visual token 011711|>
+<|visual token 011712|>
+<|visual token 011713|>
+<|visual token 011714|>
+<|visual token 011715|>
+<|visual token 011716|>
+<|visual token 011717|>
+<|visual token 011718|>
+<|visual token 011719|>
+<|visual token 011720|>
+<|visual token 011721|>
+<|visual token 011722|>
+<|visual token 011723|>
+<|visual token 011724|>
+<|visual token 011725|>
+<|visual token 011726|>
+<|visual token 011727|>
+<|visual token 011728|>
+<|visual token 011729|>
+<|visual token 011730|>
+<|visual token 011731|>
+<|visual token 011732|>
+<|visual token 011733|>
+<|visual token 011734|>
+<|visual token 011735|>
+<|visual token 011736|>
+<|visual token 011737|>
+<|visual token 011738|>
+<|visual token 011739|>
+<|visual token 011740|>
+<|visual token 011741|>
+<|visual token 011742|>
+<|visual token 011743|>
+<|visual token 011744|>
+<|visual token 011745|>
+<|visual token 011746|>
+<|visual token 011747|>
+<|visual token 011748|>
+<|visual token 011749|>
+<|visual token 011750|>
+<|visual token 011751|>
+<|visual token 011752|>
+<|visual token 011753|>
+<|visual token 011754|>
+<|visual token 011755|>
+<|visual token 011756|>
+<|visual token 011757|>
+<|visual token 011758|>
+<|visual token 011759|>
+<|visual token 011760|>
+<|visual token 011761|>
+<|visual token 011762|>
+<|visual token 011763|>
+<|visual token 011764|>
+<|visual token 011765|>
+<|visual token 011766|>
+<|visual token 011767|>
+<|visual token 011768|>
+<|visual token 011769|>
+<|visual token 011770|>
+<|visual token 011771|>
+<|visual token 011772|>
+<|visual token 011773|>
+<|visual token 011774|>
+<|visual token 011775|>
+<|visual token 011776|>
+<|visual token 011777|>
+<|visual token 011778|>
+<|visual token 011779|>
+<|visual token 011780|>
+<|visual token 011781|>
+<|visual token 011782|>
+<|visual token 011783|>
+<|visual token 011784|>
+<|visual token 011785|>
+<|visual token 011786|>
+<|visual token 011787|>
+<|visual token 011788|>
+<|visual token 011789|>
+<|visual token 011790|>
+<|visual token 011791|>
+<|visual token 011792|>
+<|visual token 011793|>
+<|visual token 011794|>
+<|visual token 011795|>
+<|visual token 011796|>
+<|visual token 011797|>
+<|visual token 011798|>
+<|visual token 011799|>
+<|visual token 011800|>
+<|visual token 011801|>
+<|visual token 011802|>
+<|visual token 011803|>
+<|visual token 011804|>
+<|visual token 011805|>
+<|visual token 011806|>
+<|visual token 011807|>
+<|visual token 011808|>
+<|visual token 011809|>
+<|visual token 011810|>
+<|visual token 011811|>
+<|visual token 011812|>
+<|visual token 011813|>
+<|visual token 011814|>
+<|visual token 011815|>
+<|visual token 011816|>
+<|visual token 011817|>
+<|visual token 011818|>
+<|visual token 011819|>
+<|visual token 011820|>
+<|visual token 011821|>
+<|visual token 011822|>
+<|visual token 011823|>
+<|visual token 011824|>
+<|visual token 011825|>
+<|visual token 011826|>
+<|visual token 011827|>
+<|visual token 011828|>
+<|visual token 011829|>
+<|visual token 011830|>
+<|visual token 011831|>
+<|visual token 011832|>
+<|visual token 011833|>
+<|visual token 011834|>
+<|visual token 011835|>
+<|visual token 011836|>
+<|visual token 011837|>
+<|visual token 011838|>
+<|visual token 011839|>
+<|visual token 011840|>
+<|visual token 011841|>
+<|visual token 011842|>
+<|visual token 011843|>
+<|visual token 011844|>
+<|visual token 011845|>
+<|visual token 011846|>
+<|visual token 011847|>
+<|visual token 011848|>
+<|visual token 011849|>
+<|visual token 011850|>
+<|visual token 011851|>
+<|visual token 011852|>
+<|visual token 011853|>
+<|visual token 011854|>
+<|visual token 011855|>
+<|visual token 011856|>
+<|visual token 011857|>
+<|visual token 011858|>
+<|visual token 011859|>
+<|visual token 011860|>
+<|visual token 011861|>
+<|visual token 011862|>
+<|visual token 011863|>
+<|visual token 011864|>
+<|visual token 011865|>
+<|visual token 011866|>
+<|visual token 011867|>
+<|visual token 011868|>
+<|visual token 011869|>
+<|visual token 011870|>
+<|visual token 011871|>
+<|visual token 011872|>
+<|visual token 011873|>
+<|visual token 011874|>
+<|visual token 011875|>
+<|visual token 011876|>
+<|visual token 011877|>
+<|visual token 011878|>
+<|visual token 011879|>
+<|visual token 011880|>
+<|visual token 011881|>
+<|visual token 011882|>
+<|visual token 011883|>
+<|visual token 011884|>
+<|visual token 011885|>
+<|visual token 011886|>
+<|visual token 011887|>
+<|visual token 011888|>
+<|visual token 011889|>
+<|visual token 011890|>
+<|visual token 011891|>
+<|visual token 011892|>
+<|visual token 011893|>
+<|visual token 011894|>
+<|visual token 011895|>
+<|visual token 011896|>
+<|visual token 011897|>
+<|visual token 011898|>
+<|visual token 011899|>
+<|visual token 011900|>
+<|visual token 011901|>
+<|visual token 011902|>
+<|visual token 011903|>
+<|visual token 011904|>
+<|visual token 011905|>
+<|visual token 011906|>
+<|visual token 011907|>
+<|visual token 011908|>
+<|visual token 011909|>
+<|visual token 011910|>
+<|visual token 011911|>
+<|visual token 011912|>
+<|visual token 011913|>
+<|visual token 011914|>
+<|visual token 011915|>
+<|visual token 011916|>
+<|visual token 011917|>
+<|visual token 011918|>
+<|visual token 011919|>
+<|visual token 011920|>
+<|visual token 011921|>
+<|visual token 011922|>
+<|visual token 011923|>
+<|visual token 011924|>
+<|visual token 011925|>
+<|visual token 011926|>
+<|visual token 011927|>
+<|visual token 011928|>
+<|visual token 011929|>
+<|visual token 011930|>
+<|visual token 011931|>
+<|visual token 011932|>
+<|visual token 011933|>
+<|visual token 011934|>
+<|visual token 011935|>
+<|visual token 011936|>
+<|visual token 011937|>
+<|visual token 011938|>
+<|visual token 011939|>
+<|visual token 011940|>
+<|visual token 011941|>
+<|visual token 011942|>
+<|visual token 011943|>
+<|visual token 011944|>
+<|visual token 011945|>
+<|visual token 011946|>
+<|visual token 011947|>
+<|visual token 011948|>
+<|visual token 011949|>
+<|visual token 011950|>
+<|visual token 011951|>
+<|visual token 011952|>
+<|visual token 011953|>
+<|visual token 011954|>
+<|visual token 011955|>
+<|visual token 011956|>
+<|visual token 011957|>
+<|visual token 011958|>
+<|visual token 011959|>
+<|visual token 011960|>
+<|visual token 011961|>
+<|visual token 011962|>
+<|visual token 011963|>
+<|visual token 011964|>
+<|visual token 011965|>
+<|visual token 011966|>
+<|visual token 011967|>
+<|visual token 011968|>
+<|visual token 011969|>
+<|visual token 011970|>
+<|visual token 011971|>
+<|visual token 011972|>
+<|visual token 011973|>
+<|visual token 011974|>
+<|visual token 011975|>
+<|visual token 011976|>
+<|visual token 011977|>
+<|visual token 011978|>
+<|visual token 011979|>
+<|visual token 011980|>
+<|visual token 011981|>
+<|visual token 011982|>
+<|visual token 011983|>
+<|visual token 011984|>
+<|visual token 011985|>
+<|visual token 011986|>
+<|visual token 011987|>
+<|visual token 011988|>
+<|visual token 011989|>
+<|visual token 011990|>
+<|visual token 011991|>
+<|visual token 011992|>
+<|visual token 011993|>
+<|visual token 011994|>
+<|visual token 011995|>
+<|visual token 011996|>
+<|visual token 011997|>
+<|visual token 011998|>
+<|visual token 011999|>
+<|visual token 012000|>
+<|visual token 012001|>
+<|visual token 012002|>
+<|visual token 012003|>
+<|visual token 012004|>
+<|visual token 012005|>
+<|visual token 012006|>
+<|visual token 012007|>
+<|visual token 012008|>
+<|visual token 012009|>
+<|visual token 012010|>
+<|visual token 012011|>
+<|visual token 012012|>
+<|visual token 012013|>
+<|visual token 012014|>
+<|visual token 012015|>
+<|visual token 012016|>
+<|visual token 012017|>
+<|visual token 012018|>
+<|visual token 012019|>
+<|visual token 012020|>
+<|visual token 012021|>
+<|visual token 012022|>
+<|visual token 012023|>
+<|visual token 012024|>
+<|visual token 012025|>
+<|visual token 012026|>
+<|visual token 012027|>
+<|visual token 012028|>
+<|visual token 012029|>
+<|visual token 012030|>
+<|visual token 012031|>
+<|visual token 012032|>
+<|visual token 012033|>
+<|visual token 012034|>
+<|visual token 012035|>
+<|visual token 012036|>
+<|visual token 012037|>
+<|visual token 012038|>
+<|visual token 012039|>
+<|visual token 012040|>
+<|visual token 012041|>
+<|visual token 012042|>
+<|visual token 012043|>
+<|visual token 012044|>
+<|visual token 012045|>
+<|visual token 012046|>
+<|visual token 012047|>
+<|visual token 012048|>
+<|visual token 012049|>
+<|visual token 012050|>
+<|visual token 012051|>
+<|visual token 012052|>
+<|visual token 012053|>
+<|visual token 012054|>
+<|visual token 012055|>
+<|visual token 012056|>
+<|visual token 012057|>
+<|visual token 012058|>
+<|visual token 012059|>
+<|visual token 012060|>
+<|visual token 012061|>
+<|visual token 012062|>
+<|visual token 012063|>
+<|visual token 012064|>
+<|visual token 012065|>
+<|visual token 012066|>
+<|visual token 012067|>
+<|visual token 012068|>
+<|visual token 012069|>
+<|visual token 012070|>
+<|visual token 012071|>
+<|visual token 012072|>
+<|visual token 012073|>
+<|visual token 012074|>
+<|visual token 012075|>
+<|visual token 012076|>
+<|visual token 012077|>
+<|visual token 012078|>
+<|visual token 012079|>
+<|visual token 012080|>
+<|visual token 012081|>
+<|visual token 012082|>
+<|visual token 012083|>
+<|visual token 012084|>
+<|visual token 012085|>
+<|visual token 012086|>
+<|visual token 012087|>
+<|visual token 012088|>
+<|visual token 012089|>
+<|visual token 012090|>
+<|visual token 012091|>
+<|visual token 012092|>
+<|visual token 012093|>
+<|visual token 012094|>
+<|visual token 012095|>
+<|visual token 012096|>
+<|visual token 012097|>
+<|visual token 012098|>
+<|visual token 012099|>
+<|visual token 012100|>
+<|visual token 012101|>
+<|visual token 012102|>
+<|visual token 012103|>
+<|visual token 012104|>
+<|visual token 012105|>
+<|visual token 012106|>
+<|visual token 012107|>
+<|visual token 012108|>
+<|visual token 012109|>
+<|visual token 012110|>
+<|visual token 012111|>
+<|visual token 012112|>
+<|visual token 012113|>
+<|visual token 012114|>
+<|visual token 012115|>
+<|visual token 012116|>
+<|visual token 012117|>
+<|visual token 012118|>
+<|visual token 012119|>
+<|visual token 012120|>
+<|visual token 012121|>
+<|visual token 012122|>
+<|visual token 012123|>
+<|visual token 012124|>
+<|visual token 012125|>
+<|visual token 012126|>
+<|visual token 012127|>
+<|visual token 012128|>
+<|visual token 012129|>
+<|visual token 012130|>
+<|visual token 012131|>
+<|visual token 012132|>
+<|visual token 012133|>
+<|visual token 012134|>
+<|visual token 012135|>
+<|visual token 012136|>
+<|visual token 012137|>
+<|visual token 012138|>
+<|visual token 012139|>
+<|visual token 012140|>
+<|visual token 012141|>
+<|visual token 012142|>
+<|visual token 012143|>
+<|visual token 012144|>
+<|visual token 012145|>
+<|visual token 012146|>
+<|visual token 012147|>
+<|visual token 012148|>
+<|visual token 012149|>
+<|visual token 012150|>
+<|visual token 012151|>
+<|visual token 012152|>
+<|visual token 012153|>
+<|visual token 012154|>
+<|visual token 012155|>
+<|visual token 012156|>
+<|visual token 012157|>
+<|visual token 012158|>
+<|visual token 012159|>
+<|visual token 012160|>
+<|visual token 012161|>
+<|visual token 012162|>
+<|visual token 012163|>
+<|visual token 012164|>
+<|visual token 012165|>
+<|visual token 012166|>
+<|visual token 012167|>
+<|visual token 012168|>
+<|visual token 012169|>
+<|visual token 012170|>
+<|visual token 012171|>
+<|visual token 012172|>
+<|visual token 012173|>
+<|visual token 012174|>
+<|visual token 012175|>
+<|visual token 012176|>
+<|visual token 012177|>
+<|visual token 012178|>
+<|visual token 012179|>
+<|visual token 012180|>
+<|visual token 012181|>
+<|visual token 012182|>
+<|visual token 012183|>
+<|visual token 012184|>
+<|visual token 012185|>
+<|visual token 012186|>
+<|visual token 012187|>
+<|visual token 012188|>
+<|visual token 012189|>
+<|visual token 012190|>
+<|visual token 012191|>
+<|visual token 012192|>
+<|visual token 012193|>
+<|visual token 012194|>
+<|visual token 012195|>
+<|visual token 012196|>
+<|visual token 012197|>
+<|visual token 012198|>
+<|visual token 012199|>
+<|visual token 012200|>
+<|visual token 012201|>
+<|visual token 012202|>
+<|visual token 012203|>
+<|visual token 012204|>
+<|visual token 012205|>
+<|visual token 012206|>
+<|visual token 012207|>
+<|visual token 012208|>
+<|visual token 012209|>
+<|visual token 012210|>
+<|visual token 012211|>
+<|visual token 012212|>
+<|visual token 012213|>
+<|visual token 012214|>
+<|visual token 012215|>
+<|visual token 012216|>
+<|visual token 012217|>
+<|visual token 012218|>
+<|visual token 012219|>
+<|visual token 012220|>
+<|visual token 012221|>
+<|visual token 012222|>
+<|visual token 012223|>
+<|visual token 012224|>
+<|visual token 012225|>
+<|visual token 012226|>
+<|visual token 012227|>
+<|visual token 012228|>
+<|visual token 012229|>
+<|visual token 012230|>
+<|visual token 012231|>
+<|visual token 012232|>
+<|visual token 012233|>
+<|visual token 012234|>
+<|visual token 012235|>
+<|visual token 012236|>
+<|visual token 012237|>
+<|visual token 012238|>
+<|visual token 012239|>
+<|visual token 012240|>
+<|visual token 012241|>
+<|visual token 012242|>
+<|visual token 012243|>
+<|visual token 012244|>
+<|visual token 012245|>
+<|visual token 012246|>
+<|visual token 012247|>
+<|visual token 012248|>
+<|visual token 012249|>
+<|visual token 012250|>
+<|visual token 012251|>
+<|visual token 012252|>
+<|visual token 012253|>
+<|visual token 012254|>
+<|visual token 012255|>
+<|visual token 012256|>
+<|visual token 012257|>
+<|visual token 012258|>
+<|visual token 012259|>
+<|visual token 012260|>
+<|visual token 012261|>
+<|visual token 012262|>
+<|visual token 012263|>
+<|visual token 012264|>
+<|visual token 012265|>
+<|visual token 012266|>
+<|visual token 012267|>
+<|visual token 012268|>
+<|visual token 012269|>
+<|visual token 012270|>
+<|visual token 012271|>
+<|visual token 012272|>
+<|visual token 012273|>
+<|visual token 012274|>
+<|visual token 012275|>
+<|visual token 012276|>
+<|visual token 012277|>
+<|visual token 012278|>
+<|visual token 012279|>
+<|visual token 012280|>
+<|visual token 012281|>
+<|visual token 012282|>
+<|visual token 012283|>
+<|visual token 012284|>
+<|visual token 012285|>
+<|visual token 012286|>
+<|visual token 012287|>
+<|visual token 012288|>
+<|visual token 012289|>
+<|visual token 012290|>
+<|visual token 012291|>
+<|visual token 012292|>
+<|visual token 012293|>
+<|visual token 012294|>
+<|visual token 012295|>
+<|visual token 012296|>
+<|visual token 012297|>
+<|visual token 012298|>
+<|visual token 012299|>
+<|visual token 012300|>
+<|visual token 012301|>
+<|visual token 012302|>
+<|visual token 012303|>
+<|visual token 012304|>
+<|visual token 012305|>
+<|visual token 012306|>
+<|visual token 012307|>
+<|visual token 012308|>
+<|visual token 012309|>
+<|visual token 012310|>
+<|visual token 012311|>
+<|visual token 012312|>
+<|visual token 012313|>
+<|visual token 012314|>
+<|visual token 012315|>
+<|visual token 012316|>
+<|visual token 012317|>
+<|visual token 012318|>
+<|visual token 012319|>
+<|visual token 012320|>
+<|visual token 012321|>
+<|visual token 012322|>
+<|visual token 012323|>
+<|visual token 012324|>
+<|visual token 012325|>
+<|visual token 012326|>
+<|visual token 012327|>
+<|visual token 012328|>
+<|visual token 012329|>
+<|visual token 012330|>
+<|visual token 012331|>
+<|visual token 012332|>
+<|visual token 012333|>
+<|visual token 012334|>
+<|visual token 012335|>
+<|visual token 012336|>
+<|visual token 012337|>
+<|visual token 012338|>
+<|visual token 012339|>
+<|visual token 012340|>
+<|visual token 012341|>
+<|visual token 012342|>
+<|visual token 012343|>
+<|visual token 012344|>
+<|visual token 012345|>
+<|visual token 012346|>
+<|visual token 012347|>
+<|visual token 012348|>
+<|visual token 012349|>
+<|visual token 012350|>
+<|visual token 012351|>
+<|visual token 012352|>
+<|visual token 012353|>
+<|visual token 012354|>
+<|visual token 012355|>
+<|visual token 012356|>
+<|visual token 012357|>
+<|visual token 012358|>
+<|visual token 012359|>
+<|visual token 012360|>
+<|visual token 012361|>
+<|visual token 012362|>
+<|visual token 012363|>
+<|visual token 012364|>
+<|visual token 012365|>
+<|visual token 012366|>
+<|visual token 012367|>
+<|visual token 012368|>
+<|visual token 012369|>
+<|visual token 012370|>
+<|visual token 012371|>
+<|visual token 012372|>
+<|visual token 012373|>
+<|visual token 012374|>
+<|visual token 012375|>
+<|visual token 012376|>
+<|visual token 012377|>
+<|visual token 012378|>
+<|visual token 012379|>
+<|visual token 012380|>
+<|visual token 012381|>
+<|visual token 012382|>
+<|visual token 012383|>
+<|visual token 012384|>
+<|visual token 012385|>
+<|visual token 012386|>
+<|visual token 012387|>
+<|visual token 012388|>
+<|visual token 012389|>
+<|visual token 012390|>
+<|visual token 012391|>
+<|visual token 012392|>
+<|visual token 012393|>
+<|visual token 012394|>
+<|visual token 012395|>
+<|visual token 012396|>
+<|visual token 012397|>
+<|visual token 012398|>
+<|visual token 012399|>
+<|visual token 012400|>
+<|visual token 012401|>
+<|visual token 012402|>
+<|visual token 012403|>
+<|visual token 012404|>
+<|visual token 012405|>
+<|visual token 012406|>
+<|visual token 012407|>
+<|visual token 012408|>
+<|visual token 012409|>
+<|visual token 012410|>
+<|visual token 012411|>
+<|visual token 012412|>
+<|visual token 012413|>
+<|visual token 012414|>
+<|visual token 012415|>
+<|visual token 012416|>
+<|visual token 012417|>
+<|visual token 012418|>
+<|visual token 012419|>
+<|visual token 012420|>
+<|visual token 012421|>
+<|visual token 012422|>
+<|visual token 012423|>
+<|visual token 012424|>
+<|visual token 012425|>
+<|visual token 012426|>
+<|visual token 012427|>
+<|visual token 012428|>
+<|visual token 012429|>
+<|visual token 012430|>
+<|visual token 012431|>
+<|visual token 012432|>
+<|visual token 012433|>
+<|visual token 012434|>
+<|visual token 012435|>
+<|visual token 012436|>
+<|visual token 012437|>
+<|visual token 012438|>
+<|visual token 012439|>
+<|visual token 012440|>
+<|visual token 012441|>
+<|visual token 012442|>
+<|visual token 012443|>
+<|visual token 012444|>
+<|visual token 012445|>
+<|visual token 012446|>
+<|visual token 012447|>
+<|visual token 012448|>
+<|visual token 012449|>
+<|visual token 012450|>
+<|visual token 012451|>
+<|visual token 012452|>
+<|visual token 012453|>
+<|visual token 012454|>
+<|visual token 012455|>
+<|visual token 012456|>
+<|visual token 012457|>
+<|visual token 012458|>
+<|visual token 012459|>
+<|visual token 012460|>
+<|visual token 012461|>
+<|visual token 012462|>
+<|visual token 012463|>
+<|visual token 012464|>
+<|visual token 012465|>
+<|visual token 012466|>
+<|visual token 012467|>
+<|visual token 012468|>
+<|visual token 012469|>
+<|visual token 012470|>
+<|visual token 012471|>
+<|visual token 012472|>
+<|visual token 012473|>
+<|visual token 012474|>
+<|visual token 012475|>
+<|visual token 012476|>
+<|visual token 012477|>
+<|visual token 012478|>
+<|visual token 012479|>
+<|visual token 012480|>
+<|visual token 012481|>
+<|visual token 012482|>
+<|visual token 012483|>
+<|visual token 012484|>
+<|visual token 012485|>
+<|visual token 012486|>
+<|visual token 012487|>
+<|visual token 012488|>
+<|visual token 012489|>
+<|visual token 012490|>
+<|visual token 012491|>
+<|visual token 012492|>
+<|visual token 012493|>
+<|visual token 012494|>
+<|visual token 012495|>
+<|visual token 012496|>
+<|visual token 012497|>
+<|visual token 012498|>
+<|visual token 012499|>
+<|visual token 012500|>
+<|visual token 012501|>
+<|visual token 012502|>
+<|visual token 012503|>
+<|visual token 012504|>
+<|visual token 012505|>
+<|visual token 012506|>
+<|visual token 012507|>
+<|visual token 012508|>
+<|visual token 012509|>
+<|visual token 012510|>
+<|visual token 012511|>
+<|visual token 012512|>
+<|visual token 012513|>
+<|visual token 012514|>
+<|visual token 012515|>
+<|visual token 012516|>
+<|visual token 012517|>
+<|visual token 012518|>
+<|visual token 012519|>
+<|visual token 012520|>
+<|visual token 012521|>
+<|visual token 012522|>
+<|visual token 012523|>
+<|visual token 012524|>
+<|visual token 012525|>
+<|visual token 012526|>
+<|visual token 012527|>
+<|visual token 012528|>
+<|visual token 012529|>
+<|visual token 012530|>
+<|visual token 012531|>
+<|visual token 012532|>
+<|visual token 012533|>
+<|visual token 012534|>
+<|visual token 012535|>
+<|visual token 012536|>
+<|visual token 012537|>
+<|visual token 012538|>
+<|visual token 012539|>
+<|visual token 012540|>
+<|visual token 012541|>
+<|visual token 012542|>
+<|visual token 012543|>
+<|visual token 012544|>
+<|visual token 012545|>
+<|visual token 012546|>
+<|visual token 012547|>
+<|visual token 012548|>
+<|visual token 012549|>
+<|visual token 012550|>
+<|visual token 012551|>
+<|visual token 012552|>
+<|visual token 012553|>
+<|visual token 012554|>
+<|visual token 012555|>
+<|visual token 012556|>
+<|visual token 012557|>
+<|visual token 012558|>
+<|visual token 012559|>
+<|visual token 012560|>
+<|visual token 012561|>
+<|visual token 012562|>
+<|visual token 012563|>
+<|visual token 012564|>
+<|visual token 012565|>
+<|visual token 012566|>
+<|visual token 012567|>
+<|visual token 012568|>
+<|visual token 012569|>
+<|visual token 012570|>
+<|visual token 012571|>
+<|visual token 012572|>
+<|visual token 012573|>
+<|visual token 012574|>
+<|visual token 012575|>
+<|visual token 012576|>
+<|visual token 012577|>
+<|visual token 012578|>
+<|visual token 012579|>
+<|visual token 012580|>
+<|visual token 012581|>
+<|visual token 012582|>
+<|visual token 012583|>
+<|visual token 012584|>
+<|visual token 012585|>
+<|visual token 012586|>
+<|visual token 012587|>
+<|visual token 012588|>
+<|visual token 012589|>
+<|visual token 012590|>
+<|visual token 012591|>
+<|visual token 012592|>
+<|visual token 012593|>
+<|visual token 012594|>
+<|visual token 012595|>
+<|visual token 012596|>
+<|visual token 012597|>
+<|visual token 012598|>
+<|visual token 012599|>
+<|visual token 012600|>
+<|visual token 012601|>
+<|visual token 012602|>
+<|visual token 012603|>
+<|visual token 012604|>
+<|visual token 012605|>
+<|visual token 012606|>
+<|visual token 012607|>
+<|visual token 012608|>
+<|visual token 012609|>
+<|visual token 012610|>
+<|visual token 012611|>
+<|visual token 012612|>
+<|visual token 012613|>
+<|visual token 012614|>
+<|visual token 012615|>
+<|visual token 012616|>
+<|visual token 012617|>
+<|visual token 012618|>
+<|visual token 012619|>
+<|visual token 012620|>
+<|visual token 012621|>
+<|visual token 012622|>
+<|visual token 012623|>
+<|visual token 012624|>
+<|visual token 012625|>
+<|visual token 012626|>
+<|visual token 012627|>
+<|visual token 012628|>
+<|visual token 012629|>
+<|visual token 012630|>
+<|visual token 012631|>
+<|visual token 012632|>
+<|visual token 012633|>
+<|visual token 012634|>
+<|visual token 012635|>
+<|visual token 012636|>
+<|visual token 012637|>
+<|visual token 012638|>
+<|visual token 012639|>
+<|visual token 012640|>
+<|visual token 012641|>
+<|visual token 012642|>
+<|visual token 012643|>
+<|visual token 012644|>
+<|visual token 012645|>
+<|visual token 012646|>
+<|visual token 012647|>
+<|visual token 012648|>
+<|visual token 012649|>
+<|visual token 012650|>
+<|visual token 012651|>
+<|visual token 012652|>
+<|visual token 012653|>
+<|visual token 012654|>
+<|visual token 012655|>
+<|visual token 012656|>
+<|visual token 012657|>
+<|visual token 012658|>
+<|visual token 012659|>
+<|visual token 012660|>
+<|visual token 012661|>
+<|visual token 012662|>
+<|visual token 012663|>
+<|visual token 012664|>
+<|visual token 012665|>
+<|visual token 012666|>
+<|visual token 012667|>
+<|visual token 012668|>
+<|visual token 012669|>
+<|visual token 012670|>
+<|visual token 012671|>
+<|visual token 012672|>
+<|visual token 012673|>
+<|visual token 012674|>
+<|visual token 012675|>
+<|visual token 012676|>
+<|visual token 012677|>
+<|visual token 012678|>
+<|visual token 012679|>
+<|visual token 012680|>
+<|visual token 012681|>
+<|visual token 012682|>
+<|visual token 012683|>
+<|visual token 012684|>
+<|visual token 012685|>
+<|visual token 012686|>
+<|visual token 012687|>
+<|visual token 012688|>
+<|visual token 012689|>
+<|visual token 012690|>
+<|visual token 012691|>
+<|visual token 012692|>
+<|visual token 012693|>
+<|visual token 012694|>
+<|visual token 012695|>
+<|visual token 012696|>
+<|visual token 012697|>
+<|visual token 012698|>
+<|visual token 012699|>
+<|visual token 012700|>
+<|visual token 012701|>
+<|visual token 012702|>
+<|visual token 012703|>
+<|visual token 012704|>
+<|visual token 012705|>
+<|visual token 012706|>
+<|visual token 012707|>
+<|visual token 012708|>
+<|visual token 012709|>
+<|visual token 012710|>
+<|visual token 012711|>
+<|visual token 012712|>
+<|visual token 012713|>
+<|visual token 012714|>
+<|visual token 012715|>
+<|visual token 012716|>
+<|visual token 012717|>
+<|visual token 012718|>
+<|visual token 012719|>
+<|visual token 012720|>
+<|visual token 012721|>
+<|visual token 012722|>
+<|visual token 012723|>
+<|visual token 012724|>
+<|visual token 012725|>
+<|visual token 012726|>
+<|visual token 012727|>
+<|visual token 012728|>
+<|visual token 012729|>
+<|visual token 012730|>
+<|visual token 012731|>
+<|visual token 012732|>
+<|visual token 012733|>
+<|visual token 012734|>
+<|visual token 012735|>
+<|visual token 012736|>
+<|visual token 012737|>
+<|visual token 012738|>
+<|visual token 012739|>
+<|visual token 012740|>
+<|visual token 012741|>
+<|visual token 012742|>
+<|visual token 012743|>
+<|visual token 012744|>
+<|visual token 012745|>
+<|visual token 012746|>
+<|visual token 012747|>
+<|visual token 012748|>
+<|visual token 012749|>
+<|visual token 012750|>
+<|visual token 012751|>
+<|visual token 012752|>
+<|visual token 012753|>
+<|visual token 012754|>
+<|visual token 012755|>
+<|visual token 012756|>
+<|visual token 012757|>
+<|visual token 012758|>
+<|visual token 012759|>
+<|visual token 012760|>
+<|visual token 012761|>
+<|visual token 012762|>
+<|visual token 012763|>
+<|visual token 012764|>
+<|visual token 012765|>
+<|visual token 012766|>
+<|visual token 012767|>
+<|visual token 012768|>
+<|visual token 012769|>
+<|visual token 012770|>
+<|visual token 012771|>
+<|visual token 012772|>
+<|visual token 012773|>
+<|visual token 012774|>
+<|visual token 012775|>
+<|visual token 012776|>
+<|visual token 012777|>
+<|visual token 012778|>
+<|visual token 012779|>
+<|visual token 012780|>
+<|visual token 012781|>
+<|visual token 012782|>
+<|visual token 012783|>
+<|visual token 012784|>
+<|visual token 012785|>
+<|visual token 012786|>
+<|visual token 012787|>
+<|visual token 012788|>
+<|visual token 012789|>
+<|visual token 012790|>
+<|visual token 012791|>
+<|visual token 012792|>
+<|visual token 012793|>
+<|visual token 012794|>
+<|visual token 012795|>
+<|visual token 012796|>
+<|visual token 012797|>
+<|visual token 012798|>
+<|visual token 012799|>
+<|visual token 012800|>
+<|visual token 012801|>
+<|visual token 012802|>
+<|visual token 012803|>
+<|visual token 012804|>
+<|visual token 012805|>
+<|visual token 012806|>
+<|visual token 012807|>
+<|visual token 012808|>
+<|visual token 012809|>
+<|visual token 012810|>
+<|visual token 012811|>
+<|visual token 012812|>
+<|visual token 012813|>
+<|visual token 012814|>
+<|visual token 012815|>
+<|visual token 012816|>
+<|visual token 012817|>
+<|visual token 012818|>
+<|visual token 012819|>
+<|visual token 012820|>
+<|visual token 012821|>
+<|visual token 012822|>
+<|visual token 012823|>
+<|visual token 012824|>
+<|visual token 012825|>
+<|visual token 012826|>
+<|visual token 012827|>
+<|visual token 012828|>
+<|visual token 012829|>
+<|visual token 012830|>
+<|visual token 012831|>
+<|visual token 012832|>
+<|visual token 012833|>
+<|visual token 012834|>
+<|visual token 012835|>
+<|visual token 012836|>
+<|visual token 012837|>
+<|visual token 012838|>
+<|visual token 012839|>
+<|visual token 012840|>
+<|visual token 012841|>
+<|visual token 012842|>
+<|visual token 012843|>
+<|visual token 012844|>
+<|visual token 012845|>
+<|visual token 012846|>
+<|visual token 012847|>
+<|visual token 012848|>
+<|visual token 012849|>
+<|visual token 012850|>
+<|visual token 012851|>
+<|visual token 012852|>
+<|visual token 012853|>
+<|visual token 012854|>
+<|visual token 012855|>
+<|visual token 012856|>
+<|visual token 012857|>
+<|visual token 012858|>
+<|visual token 012859|>
+<|visual token 012860|>
+<|visual token 012861|>
+<|visual token 012862|>
+<|visual token 012863|>
+<|visual token 012864|>
+<|visual token 012865|>
+<|visual token 012866|>
+<|visual token 012867|>
+<|visual token 012868|>
+<|visual token 012869|>
+<|visual token 012870|>
+<|visual token 012871|>
+<|visual token 012872|>
+<|visual token 012873|>
+<|visual token 012874|>
+<|visual token 012875|>
+<|visual token 012876|>
+<|visual token 012877|>
+<|visual token 012878|>
+<|visual token 012879|>
+<|visual token 012880|>
+<|visual token 012881|>
+<|visual token 012882|>
+<|visual token 012883|>
+<|visual token 012884|>
+<|visual token 012885|>
+<|visual token 012886|>
+<|visual token 012887|>
+<|visual token 012888|>
+<|visual token 012889|>
+<|visual token 012890|>
+<|visual token 012891|>
+<|visual token 012892|>
+<|visual token 012893|>
+<|visual token 012894|>
+<|visual token 012895|>
+<|visual token 012896|>
+<|visual token 012897|>
+<|visual token 012898|>
+<|visual token 012899|>
+<|visual token 012900|>
+<|visual token 012901|>
+<|visual token 012902|>
+<|visual token 012903|>
+<|visual token 012904|>
+<|visual token 012905|>
+<|visual token 012906|>
+<|visual token 012907|>
+<|visual token 012908|>
+<|visual token 012909|>
+<|visual token 012910|>
+<|visual token 012911|>
+<|visual token 012912|>
+<|visual token 012913|>
+<|visual token 012914|>
+<|visual token 012915|>
+<|visual token 012916|>
+<|visual token 012917|>
+<|visual token 012918|>
+<|visual token 012919|>
+<|visual token 012920|>
+<|visual token 012921|>
+<|visual token 012922|>
+<|visual token 012923|>
+<|visual token 012924|>
+<|visual token 012925|>
+<|visual token 012926|>
+<|visual token 012927|>
+<|visual token 012928|>
+<|visual token 012929|>
+<|visual token 012930|>
+<|visual token 012931|>
+<|visual token 012932|>
+<|visual token 012933|>
+<|visual token 012934|>
+<|visual token 012935|>
+<|visual token 012936|>
+<|visual token 012937|>
+<|visual token 012938|>
+<|visual token 012939|>
+<|visual token 012940|>
+<|visual token 012941|>
+<|visual token 012942|>
+<|visual token 012943|>
+<|visual token 012944|>
+<|visual token 012945|>
+<|visual token 012946|>
+<|visual token 012947|>
+<|visual token 012948|>
+<|visual token 012949|>
+<|visual token 012950|>
+<|visual token 012951|>
+<|visual token 012952|>
+<|visual token 012953|>
+<|visual token 012954|>
+<|visual token 012955|>
+<|visual token 012956|>
+<|visual token 012957|>
+<|visual token 012958|>
+<|visual token 012959|>
+<|visual token 012960|>
+<|visual token 012961|>
+<|visual token 012962|>
+<|visual token 012963|>
+<|visual token 012964|>
+<|visual token 012965|>
+<|visual token 012966|>
+<|visual token 012967|>
+<|visual token 012968|>
+<|visual token 012969|>
+<|visual token 012970|>
+<|visual token 012971|>
+<|visual token 012972|>
+<|visual token 012973|>
+<|visual token 012974|>
+<|visual token 012975|>
+<|visual token 012976|>
+<|visual token 012977|>
+<|visual token 012978|>
+<|visual token 012979|>
+<|visual token 012980|>
+<|visual token 012981|>
+<|visual token 012982|>
+<|visual token 012983|>
+<|visual token 012984|>
+<|visual token 012985|>
+<|visual token 012986|>
+<|visual token 012987|>
+<|visual token 012988|>
+<|visual token 012989|>
+<|visual token 012990|>
+<|visual token 012991|>
+<|visual token 012992|>
+<|visual token 012993|>
+<|visual token 012994|>
+<|visual token 012995|>
+<|visual token 012996|>
+<|visual token 012997|>
+<|visual token 012998|>
+<|visual token 012999|>
+<|visual token 013000|>
+<|visual token 013001|>
+<|visual token 013002|>
+<|visual token 013003|>
+<|visual token 013004|>
+<|visual token 013005|>
+<|visual token 013006|>
+<|visual token 013007|>
+<|visual token 013008|>
+<|visual token 013009|>
+<|visual token 013010|>
+<|visual token 013011|>
+<|visual token 013012|>
+<|visual token 013013|>
+<|visual token 013014|>
+<|visual token 013015|>
+<|visual token 013016|>
+<|visual token 013017|>
+<|visual token 013018|>
+<|visual token 013019|>
+<|visual token 013020|>
+<|visual token 013021|>
+<|visual token 013022|>
+<|visual token 013023|>
+<|visual token 013024|>
+<|visual token 013025|>
+<|visual token 013026|>
+<|visual token 013027|>
+<|visual token 013028|>
+<|visual token 013029|>
+<|visual token 013030|>
+<|visual token 013031|>
+<|visual token 013032|>
+<|visual token 013033|>
+<|visual token 013034|>
+<|visual token 013035|>
+<|visual token 013036|>
+<|visual token 013037|>
+<|visual token 013038|>
+<|visual token 013039|>
+<|visual token 013040|>
+<|visual token 013041|>
+<|visual token 013042|>
+<|visual token 013043|>
+<|visual token 013044|>
+<|visual token 013045|>
+<|visual token 013046|>
+<|visual token 013047|>
+<|visual token 013048|>
+<|visual token 013049|>
+<|visual token 013050|>
+<|visual token 013051|>
+<|visual token 013052|>
+<|visual token 013053|>
+<|visual token 013054|>
+<|visual token 013055|>
+<|visual token 013056|>
+<|visual token 013057|>
+<|visual token 013058|>
+<|visual token 013059|>
+<|visual token 013060|>
+<|visual token 013061|>
+<|visual token 013062|>
+<|visual token 013063|>
+<|visual token 013064|>
+<|visual token 013065|>
+<|visual token 013066|>
+<|visual token 013067|>
+<|visual token 013068|>
+<|visual token 013069|>
+<|visual token 013070|>
+<|visual token 013071|>
+<|visual token 013072|>
+<|visual token 013073|>
+<|visual token 013074|>
+<|visual token 013075|>
+<|visual token 013076|>
+<|visual token 013077|>
+<|visual token 013078|>
+<|visual token 013079|>
+<|visual token 013080|>
+<|visual token 013081|>
+<|visual token 013082|>
+<|visual token 013083|>
+<|visual token 013084|>
+<|visual token 013085|>
+<|visual token 013086|>
+<|visual token 013087|>
+<|visual token 013088|>
+<|visual token 013089|>
+<|visual token 013090|>
+<|visual token 013091|>
+<|visual token 013092|>
+<|visual token 013093|>
+<|visual token 013094|>
+<|visual token 013095|>
+<|visual token 013096|>
+<|visual token 013097|>
+<|visual token 013098|>
+<|visual token 013099|>
+<|visual token 013100|>
+<|visual token 013101|>
+<|visual token 013102|>
+<|visual token 013103|>
+<|visual token 013104|>
+<|visual token 013105|>
+<|visual token 013106|>
+<|visual token 013107|>
+<|visual token 013108|>
+<|visual token 013109|>
+<|visual token 013110|>
+<|visual token 013111|>
+<|visual token 013112|>
+<|visual token 013113|>
+<|visual token 013114|>
+<|visual token 013115|>
+<|visual token 013116|>
+<|visual token 013117|>
+<|visual token 013118|>
+<|visual token 013119|>
+<|visual token 013120|>
+<|visual token 013121|>
+<|visual token 013122|>
+<|visual token 013123|>
+<|visual token 013124|>
+<|visual token 013125|>
+<|visual token 013126|>
+<|visual token 013127|>
+<|visual token 013128|>
+<|visual token 013129|>
+<|visual token 013130|>
+<|visual token 013131|>
+<|visual token 013132|>
+<|visual token 013133|>
+<|visual token 013134|>
+<|visual token 013135|>
+<|visual token 013136|>
+<|visual token 013137|>
+<|visual token 013138|>
+<|visual token 013139|>
+<|visual token 013140|>
+<|visual token 013141|>
+<|visual token 013142|>
+<|visual token 013143|>
+<|visual token 013144|>
+<|visual token 013145|>
+<|visual token 013146|>
+<|visual token 013147|>
+<|visual token 013148|>
+<|visual token 013149|>
+<|visual token 013150|>
+<|visual token 013151|>
+<|visual token 013152|>
+<|visual token 013153|>
+<|visual token 013154|>
+<|visual token 013155|>
+<|visual token 013156|>
+<|visual token 013157|>
+<|visual token 013158|>
+<|visual token 013159|>
+<|visual token 013160|>
+<|visual token 013161|>
+<|visual token 013162|>
+<|visual token 013163|>
+<|visual token 013164|>
+<|visual token 013165|>
+<|visual token 013166|>
+<|visual token 013167|>
+<|visual token 013168|>
+<|visual token 013169|>
+<|visual token 013170|>
+<|visual token 013171|>
+<|visual token 013172|>
+<|visual token 013173|>
+<|visual token 013174|>
+<|visual token 013175|>
+<|visual token 013176|>
+<|visual token 013177|>
+<|visual token 013178|>
+<|visual token 013179|>
+<|visual token 013180|>
+<|visual token 013181|>
+<|visual token 013182|>
+<|visual token 013183|>
+<|visual token 013184|>
+<|visual token 013185|>
+<|visual token 013186|>
+<|visual token 013187|>
+<|visual token 013188|>
+<|visual token 013189|>
+<|visual token 013190|>
+<|visual token 013191|>
+<|visual token 013192|>
+<|visual token 013193|>
+<|visual token 013194|>
+<|visual token 013195|>
+<|visual token 013196|>
+<|visual token 013197|>
+<|visual token 013198|>
+<|visual token 013199|>
+<|visual token 013200|>
+<|visual token 013201|>
+<|visual token 013202|>
+<|visual token 013203|>
+<|visual token 013204|>
+<|visual token 013205|>
+<|visual token 013206|>
+<|visual token 013207|>
+<|visual token 013208|>
+<|visual token 013209|>
+<|visual token 013210|>
+<|visual token 013211|>
+<|visual token 013212|>
+<|visual token 013213|>
+<|visual token 013214|>
+<|visual token 013215|>
+<|visual token 013216|>
+<|visual token 013217|>
+<|visual token 013218|>
+<|visual token 013219|>
+<|visual token 013220|>
+<|visual token 013221|>
+<|visual token 013222|>
+<|visual token 013223|>
+<|visual token 013224|>
+<|visual token 013225|>
+<|visual token 013226|>
+<|visual token 013227|>
+<|visual token 013228|>
+<|visual token 013229|>
+<|visual token 013230|>
+<|visual token 013231|>
+<|visual token 013232|>
+<|visual token 013233|>
+<|visual token 013234|>
+<|visual token 013235|>
+<|visual token 013236|>
+<|visual token 013237|>
+<|visual token 013238|>
+<|visual token 013239|>
+<|visual token 013240|>
+<|visual token 013241|>
+<|visual token 013242|>
+<|visual token 013243|>
+<|visual token 013244|>
+<|visual token 013245|>
+<|visual token 013246|>
+<|visual token 013247|>
+<|visual token 013248|>
+<|visual token 013249|>
+<|visual token 013250|>
+<|visual token 013251|>
+<|visual token 013252|>
+<|visual token 013253|>
+<|visual token 013254|>
+<|visual token 013255|>
+<|visual token 013256|>
+<|visual token 013257|>
+<|visual token 013258|>
+<|visual token 013259|>
+<|visual token 013260|>
+<|visual token 013261|>
+<|visual token 013262|>
+<|visual token 013263|>
+<|visual token 013264|>
+<|visual token 013265|>
+<|visual token 013266|>
+<|visual token 013267|>
+<|visual token 013268|>
+<|visual token 013269|>
+<|visual token 013270|>
+<|visual token 013271|>
+<|visual token 013272|>
+<|visual token 013273|>
+<|visual token 013274|>
+<|visual token 013275|>
+<|visual token 013276|>
+<|visual token 013277|>
+<|visual token 013278|>
+<|visual token 013279|>
+<|visual token 013280|>
+<|visual token 013281|>
+<|visual token 013282|>
+<|visual token 013283|>
+<|visual token 013284|>
+<|visual token 013285|>
+<|visual token 013286|>
+<|visual token 013287|>
+<|visual token 013288|>
+<|visual token 013289|>
+<|visual token 013290|>
+<|visual token 013291|>
+<|visual token 013292|>
+<|visual token 013293|>
+<|visual token 013294|>
+<|visual token 013295|>
+<|visual token 013296|>
+<|visual token 013297|>
+<|visual token 013298|>
+<|visual token 013299|>
+<|visual token 013300|>
+<|visual token 013301|>
+<|visual token 013302|>
+<|visual token 013303|>
+<|visual token 013304|>
+<|visual token 013305|>
+<|visual token 013306|>
+<|visual token 013307|>
+<|visual token 013308|>
+<|visual token 013309|>
+<|visual token 013310|>
+<|visual token 013311|>
+<|visual token 013312|>
+<|visual token 013313|>
+<|visual token 013314|>
+<|visual token 013315|>
+<|visual token 013316|>
+<|visual token 013317|>
+<|visual token 013318|>
+<|visual token 013319|>
+<|visual token 013320|>
+<|visual token 013321|>
+<|visual token 013322|>
+<|visual token 013323|>
+<|visual token 013324|>
+<|visual token 013325|>
+<|visual token 013326|>
+<|visual token 013327|>
+<|visual token 013328|>
+<|visual token 013329|>
+<|visual token 013330|>
+<|visual token 013331|>
+<|visual token 013332|>
+<|visual token 013333|>
+<|visual token 013334|>
+<|visual token 013335|>
+<|visual token 013336|>
+<|visual token 013337|>
+<|visual token 013338|>
+<|visual token 013339|>
+<|visual token 013340|>
+<|visual token 013341|>
+<|visual token 013342|>
+<|visual token 013343|>
+<|visual token 013344|>
+<|visual token 013345|>
+<|visual token 013346|>
+<|visual token 013347|>
+<|visual token 013348|>
+<|visual token 013349|>
+<|visual token 013350|>
+<|visual token 013351|>
+<|visual token 013352|>
+<|visual token 013353|>
+<|visual token 013354|>
+<|visual token 013355|>
+<|visual token 013356|>
+<|visual token 013357|>
+<|visual token 013358|>
+<|visual token 013359|>
+<|visual token 013360|>
+<|visual token 013361|>
+<|visual token 013362|>
+<|visual token 013363|>
+<|visual token 013364|>
+<|visual token 013365|>
+<|visual token 013366|>
+<|visual token 013367|>
+<|visual token 013368|>
+<|visual token 013369|>
+<|visual token 013370|>
+<|visual token 013371|>
+<|visual token 013372|>
+<|visual token 013373|>
+<|visual token 013374|>
+<|visual token 013375|>
+<|visual token 013376|>
+<|visual token 013377|>
+<|visual token 013378|>
+<|visual token 013379|>
+<|visual token 013380|>
+<|visual token 013381|>
+<|visual token 013382|>
+<|visual token 013383|>
+<|visual token 013384|>
+<|visual token 013385|>
+<|visual token 013386|>
+<|visual token 013387|>
+<|visual token 013388|>
+<|visual token 013389|>
+<|visual token 013390|>
+<|visual token 013391|>
+<|visual token 013392|>
+<|visual token 013393|>
+<|visual token 013394|>
+<|visual token 013395|>
+<|visual token 013396|>
+<|visual token 013397|>
+<|visual token 013398|>
+<|visual token 013399|>
+<|visual token 013400|>
+<|visual token 013401|>
+<|visual token 013402|>
+<|visual token 013403|>
+<|visual token 013404|>
+<|visual token 013405|>
+<|visual token 013406|>
+<|visual token 013407|>
+<|visual token 013408|>
+<|visual token 013409|>
+<|visual token 013410|>
+<|visual token 013411|>
+<|visual token 013412|>
+<|visual token 013413|>
+<|visual token 013414|>
+<|visual token 013415|>
+<|visual token 013416|>
+<|visual token 013417|>
+<|visual token 013418|>
+<|visual token 013419|>
+<|visual token 013420|>
+<|visual token 013421|>
+<|visual token 013422|>
+<|visual token 013423|>
+<|visual token 013424|>
+<|visual token 013425|>
+<|visual token 013426|>
+<|visual token 013427|>
+<|visual token 013428|>
+<|visual token 013429|>
+<|visual token 013430|>
+<|visual token 013431|>
+<|visual token 013432|>
+<|visual token 013433|>
+<|visual token 013434|>
+<|visual token 013435|>
+<|visual token 013436|>
+<|visual token 013437|>
+<|visual token 013438|>
+<|visual token 013439|>
+<|visual token 013440|>
+<|visual token 013441|>
+<|visual token 013442|>
+<|visual token 013443|>
+<|visual token 013444|>
+<|visual token 013445|>
+<|visual token 013446|>
+<|visual token 013447|>
+<|visual token 013448|>
+<|visual token 013449|>
+<|visual token 013450|>
+<|visual token 013451|>
+<|visual token 013452|>
+<|visual token 013453|>
+<|visual token 013454|>
+<|visual token 013455|>
+<|visual token 013456|>
+<|visual token 013457|>
+<|visual token 013458|>
+<|visual token 013459|>
+<|visual token 013460|>
+<|visual token 013461|>
+<|visual token 013462|>
+<|visual token 013463|>
+<|visual token 013464|>
+<|visual token 013465|>
+<|visual token 013466|>
+<|visual token 013467|>
+<|visual token 013468|>
+<|visual token 013469|>
+<|visual token 013470|>
+<|visual token 013471|>
+<|visual token 013472|>
+<|visual token 013473|>
+<|visual token 013474|>
+<|visual token 013475|>
+<|visual token 013476|>
+<|visual token 013477|>
+<|visual token 013478|>
+<|visual token 013479|>
+<|visual token 013480|>
+<|visual token 013481|>
+<|visual token 013482|>
+<|visual token 013483|>
+<|visual token 013484|>
+<|visual token 013485|>
+<|visual token 013486|>
+<|visual token 013487|>
+<|visual token 013488|>
+<|visual token 013489|>
+<|visual token 013490|>
+<|visual token 013491|>
+<|visual token 013492|>
+<|visual token 013493|>
+<|visual token 013494|>
+<|visual token 013495|>
+<|visual token 013496|>
+<|visual token 013497|>
+<|visual token 013498|>
+<|visual token 013499|>
+<|visual token 013500|>
+<|visual token 013501|>
+<|visual token 013502|>
+<|visual token 013503|>
+<|visual token 013504|>
+<|visual token 013505|>
+<|visual token 013506|>
+<|visual token 013507|>
+<|visual token 013508|>
+<|visual token 013509|>
+<|visual token 013510|>
+<|visual token 013511|>
+<|visual token 013512|>
+<|visual token 013513|>
+<|visual token 013514|>
+<|visual token 013515|>
+<|visual token 013516|>
+<|visual token 013517|>
+<|visual token 013518|>
+<|visual token 013519|>
+<|visual token 013520|>
+<|visual token 013521|>
+<|visual token 013522|>
+<|visual token 013523|>
+<|visual token 013524|>
+<|visual token 013525|>
+<|visual token 013526|>
+<|visual token 013527|>
+<|visual token 013528|>
+<|visual token 013529|>
+<|visual token 013530|>
+<|visual token 013531|>
+<|visual token 013532|>
+<|visual token 013533|>
+<|visual token 013534|>
+<|visual token 013535|>
+<|visual token 013536|>
+<|visual token 013537|>
+<|visual token 013538|>
+<|visual token 013539|>
+<|visual token 013540|>
+<|visual token 013541|>
+<|visual token 013542|>
+<|visual token 013543|>
+<|visual token 013544|>
+<|visual token 013545|>
+<|visual token 013546|>
+<|visual token 013547|>
+<|visual token 013548|>
+<|visual token 013549|>
+<|visual token 013550|>
+<|visual token 013551|>
+<|visual token 013552|>
+<|visual token 013553|>
+<|visual token 013554|>
+<|visual token 013555|>
+<|visual token 013556|>
+<|visual token 013557|>
+<|visual token 013558|>
+<|visual token 013559|>
+<|visual token 013560|>
+<|visual token 013561|>
+<|visual token 013562|>
+<|visual token 013563|>
+<|visual token 013564|>
+<|visual token 013565|>
+<|visual token 013566|>
+<|visual token 013567|>
+<|visual token 013568|>
+<|visual token 013569|>
+<|visual token 013570|>
+<|visual token 013571|>
+<|visual token 013572|>
+<|visual token 013573|>
+<|visual token 013574|>
+<|visual token 013575|>
+<|visual token 013576|>
+<|visual token 013577|>
+<|visual token 013578|>
+<|visual token 013579|>
+<|visual token 013580|>
+<|visual token 013581|>
+<|visual token 013582|>
+<|visual token 013583|>
+<|visual token 013584|>
+<|visual token 013585|>
+<|visual token 013586|>
+<|visual token 013587|>
+<|visual token 013588|>
+<|visual token 013589|>
+<|visual token 013590|>
+<|visual token 013591|>
+<|visual token 013592|>
+<|visual token 013593|>
+<|visual token 013594|>
+<|visual token 013595|>
+<|visual token 013596|>
+<|visual token 013597|>
+<|visual token 013598|>
+<|visual token 013599|>
+<|visual token 013600|>
+<|visual token 013601|>
+<|visual token 013602|>
+<|visual token 013603|>
+<|visual token 013604|>
+<|visual token 013605|>
+<|visual token 013606|>
+<|visual token 013607|>
+<|visual token 013608|>
+<|visual token 013609|>
+<|visual token 013610|>
+<|visual token 013611|>
+<|visual token 013612|>
+<|visual token 013613|>
+<|visual token 013614|>
+<|visual token 013615|>
+<|visual token 013616|>
+<|visual token 013617|>
+<|visual token 013618|>
+<|visual token 013619|>
+<|visual token 013620|>
+<|visual token 013621|>
+<|visual token 013622|>
+<|visual token 013623|>
+<|visual token 013624|>
+<|visual token 013625|>
+<|visual token 013626|>
+<|visual token 013627|>
+<|visual token 013628|>
+<|visual token 013629|>
+<|visual token 013630|>
+<|visual token 013631|>
+<|visual token 013632|>
+<|visual token 013633|>
+<|visual token 013634|>
+<|visual token 013635|>
+<|visual token 013636|>
+<|visual token 013637|>
+<|visual token 013638|>
+<|visual token 013639|>
+<|visual token 013640|>
+<|visual token 013641|>
+<|visual token 013642|>
+<|visual token 013643|>
+<|visual token 013644|>
+<|visual token 013645|>
+<|visual token 013646|>
+<|visual token 013647|>
+<|visual token 013648|>
+<|visual token 013649|>
+<|visual token 013650|>
+<|visual token 013651|>
+<|visual token 013652|>
+<|visual token 013653|>
+<|visual token 013654|>
+<|visual token 013655|>
+<|visual token 013656|>
+<|visual token 013657|>
+<|visual token 013658|>
+<|visual token 013659|>
+<|visual token 013660|>
+<|visual token 013661|>
+<|visual token 013662|>
+<|visual token 013663|>
+<|visual token 013664|>
+<|visual token 013665|>
+<|visual token 013666|>
+<|visual token 013667|>
+<|visual token 013668|>
+<|visual token 013669|>
+<|visual token 013670|>
+<|visual token 013671|>
+<|visual token 013672|>
+<|visual token 013673|>
+<|visual token 013674|>
+<|visual token 013675|>
+<|visual token 013676|>
+<|visual token 013677|>
+<|visual token 013678|>
+<|visual token 013679|>
+<|visual token 013680|>
+<|visual token 013681|>
+<|visual token 013682|>
+<|visual token 013683|>
+<|visual token 013684|>
+<|visual token 013685|>
+<|visual token 013686|>
+<|visual token 013687|>
+<|visual token 013688|>
+<|visual token 013689|>
+<|visual token 013690|>
+<|visual token 013691|>
+<|visual token 013692|>
+<|visual token 013693|>
+<|visual token 013694|>
+<|visual token 013695|>
+<|visual token 013696|>
+<|visual token 013697|>
+<|visual token 013698|>
+<|visual token 013699|>
+<|visual token 013700|>
+<|visual token 013701|>
+<|visual token 013702|>
+<|visual token 013703|>
+<|visual token 013704|>
+<|visual token 013705|>
+<|visual token 013706|>
+<|visual token 013707|>
+<|visual token 013708|>
+<|visual token 013709|>
+<|visual token 013710|>
+<|visual token 013711|>
+<|visual token 013712|>
+<|visual token 013713|>
+<|visual token 013714|>
+<|visual token 013715|>
+<|visual token 013716|>
+<|visual token 013717|>
+<|visual token 013718|>
+<|visual token 013719|>
+<|visual token 013720|>
+<|visual token 013721|>
+<|visual token 013722|>
+<|visual token 013723|>
+<|visual token 013724|>
+<|visual token 013725|>
+<|visual token 013726|>
+<|visual token 013727|>
+<|visual token 013728|>
+<|visual token 013729|>
+<|visual token 013730|>
+<|visual token 013731|>
+<|visual token 013732|>
+<|visual token 013733|>
+<|visual token 013734|>
+<|visual token 013735|>
+<|visual token 013736|>
+<|visual token 013737|>
+<|visual token 013738|>
+<|visual token 013739|>
+<|visual token 013740|>
+<|visual token 013741|>
+<|visual token 013742|>
+<|visual token 013743|>
+<|visual token 013744|>
+<|visual token 013745|>
+<|visual token 013746|>
+<|visual token 013747|>
+<|visual token 013748|>
+<|visual token 013749|>
+<|visual token 013750|>
+<|visual token 013751|>
+<|visual token 013752|>
+<|visual token 013753|>
+<|visual token 013754|>
+<|visual token 013755|>
+<|visual token 013756|>
+<|visual token 013757|>
+<|visual token 013758|>
+<|visual token 013759|>
+<|visual token 013760|>
+<|visual token 013761|>
+<|visual token 013762|>
+<|visual token 013763|>
+<|visual token 013764|>
+<|visual token 013765|>
+<|visual token 013766|>
+<|visual token 013767|>
+<|visual token 013768|>
+<|visual token 013769|>
+<|visual token 013770|>
+<|visual token 013771|>
+<|visual token 013772|>
+<|visual token 013773|>
+<|visual token 013774|>
+<|visual token 013775|>
+<|visual token 013776|>
+<|visual token 013777|>
+<|visual token 013778|>
+<|visual token 013779|>
+<|visual token 013780|>
+<|visual token 013781|>
+<|visual token 013782|>
+<|visual token 013783|>
+<|visual token 013784|>
+<|visual token 013785|>
+<|visual token 013786|>
+<|visual token 013787|>
+<|visual token 013788|>
+<|visual token 013789|>
+<|visual token 013790|>
+<|visual token 013791|>
+<|visual token 013792|>
+<|visual token 013793|>
+<|visual token 013794|>
+<|visual token 013795|>
+<|visual token 013796|>
+<|visual token 013797|>
+<|visual token 013798|>
+<|visual token 013799|>
+<|visual token 013800|>
+<|visual token 013801|>
+<|visual token 013802|>
+<|visual token 013803|>
+<|visual token 013804|>
+<|visual token 013805|>
+<|visual token 013806|>
+<|visual token 013807|>
+<|visual token 013808|>
+<|visual token 013809|>
+<|visual token 013810|>
+<|visual token 013811|>
+<|visual token 013812|>
+<|visual token 013813|>
+<|visual token 013814|>
+<|visual token 013815|>
+<|visual token 013816|>
+<|visual token 013817|>
+<|visual token 013818|>
+<|visual token 013819|>
+<|visual token 013820|>
+<|visual token 013821|>
+<|visual token 013822|>
+<|visual token 013823|>
+<|visual token 013824|>
+<|visual token 013825|>
+<|visual token 013826|>
+<|visual token 013827|>
+<|visual token 013828|>
+<|visual token 013829|>
+<|visual token 013830|>
+<|visual token 013831|>
+<|visual token 013832|>
+<|visual token 013833|>
+<|visual token 013834|>
+<|visual token 013835|>
+<|visual token 013836|>
+<|visual token 013837|>
+<|visual token 013838|>
+<|visual token 013839|>
+<|visual token 013840|>
+<|visual token 013841|>
+<|visual token 013842|>
+<|visual token 013843|>
+<|visual token 013844|>
+<|visual token 013845|>
+<|visual token 013846|>
+<|visual token 013847|>
+<|visual token 013848|>
+<|visual token 013849|>
+<|visual token 013850|>
+<|visual token 013851|>
+<|visual token 013852|>
+<|visual token 013853|>
+<|visual token 013854|>
+<|visual token 013855|>
+<|visual token 013856|>
+<|visual token 013857|>
+<|visual token 013858|>
+<|visual token 013859|>
+<|visual token 013860|>
+<|visual token 013861|>
+<|visual token 013862|>
+<|visual token 013863|>
+<|visual token 013864|>
+<|visual token 013865|>
+<|visual token 013866|>
+<|visual token 013867|>
+<|visual token 013868|>
+<|visual token 013869|>
+<|visual token 013870|>
+<|visual token 013871|>
+<|visual token 013872|>
+<|visual token 013873|>
+<|visual token 013874|>
+<|visual token 013875|>
+<|visual token 013876|>
+<|visual token 013877|>
+<|visual token 013878|>
+<|visual token 013879|>
+<|visual token 013880|>
+<|visual token 013881|>
+<|visual token 013882|>
+<|visual token 013883|>
+<|visual token 013884|>
+<|visual token 013885|>
+<|visual token 013886|>
+<|visual token 013887|>
+<|visual token 013888|>
+<|visual token 013889|>
+<|visual token 013890|>
+<|visual token 013891|>
+<|visual token 013892|>
+<|visual token 013893|>
+<|visual token 013894|>
+<|visual token 013895|>
+<|visual token 013896|>
+<|visual token 013897|>
+<|visual token 013898|>
+<|visual token 013899|>
+<|visual token 013900|>
+<|visual token 013901|>
+<|visual token 013902|>
+<|visual token 013903|>
+<|visual token 013904|>
+<|visual token 013905|>
+<|visual token 013906|>
+<|visual token 013907|>
+<|visual token 013908|>
+<|visual token 013909|>
+<|visual token 013910|>
+<|visual token 013911|>
+<|visual token 013912|>
+<|visual token 013913|>
+<|visual token 013914|>
+<|visual token 013915|>
+<|visual token 013916|>
+<|visual token 013917|>
+<|visual token 013918|>
+<|visual token 013919|>
+<|visual token 013920|>
+<|visual token 013921|>
+<|visual token 013922|>
+<|visual token 013923|>
+<|visual token 013924|>
+<|visual token 013925|>
+<|visual token 013926|>
+<|visual token 013927|>
+<|visual token 013928|>
+<|visual token 013929|>
+<|visual token 013930|>
+<|visual token 013931|>
+<|visual token 013932|>
+<|visual token 013933|>
+<|visual token 013934|>
+<|visual token 013935|>
+<|visual token 013936|>
+<|visual token 013937|>
+<|visual token 013938|>
+<|visual token 013939|>
+<|visual token 013940|>
+<|visual token 013941|>
+<|visual token 013942|>
+<|visual token 013943|>
+<|visual token 013944|>
+<|visual token 013945|>
+<|visual token 013946|>
+<|visual token 013947|>
+<|visual token 013948|>
+<|visual token 013949|>
+<|visual token 013950|>
+<|visual token 013951|>
+<|visual token 013952|>
+<|visual token 013953|>
+<|visual token 013954|>
+<|visual token 013955|>
+<|visual token 013956|>
+<|visual token 013957|>
+<|visual token 013958|>
+<|visual token 013959|>
+<|visual token 013960|>
+<|visual token 013961|>
+<|visual token 013962|>
+<|visual token 013963|>
+<|visual token 013964|>
+<|visual token 013965|>
+<|visual token 013966|>
+<|visual token 013967|>
+<|visual token 013968|>
+<|visual token 013969|>
+<|visual token 013970|>
+<|visual token 013971|>
+<|visual token 013972|>
+<|visual token 013973|>
+<|visual token 013974|>
+<|visual token 013975|>
+<|visual token 013976|>
+<|visual token 013977|>
+<|visual token 013978|>
+<|visual token 013979|>
+<|visual token 013980|>
+<|visual token 013981|>
+<|visual token 013982|>
+<|visual token 013983|>
+<|visual token 013984|>
+<|visual token 013985|>
+<|visual token 013986|>
+<|visual token 013987|>
+<|visual token 013988|>
+<|visual token 013989|>
+<|visual token 013990|>
+<|visual token 013991|>
+<|visual token 013992|>
+<|visual token 013993|>
+<|visual token 013994|>
+<|visual token 013995|>
+<|visual token 013996|>
+<|visual token 013997|>
+<|visual token 013998|>
+<|visual token 013999|>
+<|visual token 014000|>
+<|visual token 014001|>
+<|visual token 014002|>
+<|visual token 014003|>
+<|visual token 014004|>
+<|visual token 014005|>
+<|visual token 014006|>
+<|visual token 014007|>
+<|visual token 014008|>
+<|visual token 014009|>
+<|visual token 014010|>
+<|visual token 014011|>
+<|visual token 014012|>
+<|visual token 014013|>
+<|visual token 014014|>
+<|visual token 014015|>
+<|visual token 014016|>
+<|visual token 014017|>
+<|visual token 014018|>
+<|visual token 014019|>
+<|visual token 014020|>
+<|visual token 014021|>
+<|visual token 014022|>
+<|visual token 014023|>
+<|visual token 014024|>
+<|visual token 014025|>
+<|visual token 014026|>
+<|visual token 014027|>
+<|visual token 014028|>
+<|visual token 014029|>
+<|visual token 014030|>
+<|visual token 014031|>
+<|visual token 014032|>
+<|visual token 014033|>
+<|visual token 014034|>
+<|visual token 014035|>
+<|visual token 014036|>
+<|visual token 014037|>
+<|visual token 014038|>
+<|visual token 014039|>
+<|visual token 014040|>
+<|visual token 014041|>
+<|visual token 014042|>
+<|visual token 014043|>
+<|visual token 014044|>
+<|visual token 014045|>
+<|visual token 014046|>
+<|visual token 014047|>
+<|visual token 014048|>
+<|visual token 014049|>
+<|visual token 014050|>
+<|visual token 014051|>
+<|visual token 014052|>
+<|visual token 014053|>
+<|visual token 014054|>
+<|visual token 014055|>
+<|visual token 014056|>
+<|visual token 014057|>
+<|visual token 014058|>
+<|visual token 014059|>
+<|visual token 014060|>
+<|visual token 014061|>
+<|visual token 014062|>
+<|visual token 014063|>
+<|visual token 014064|>
+<|visual token 014065|>
+<|visual token 014066|>
+<|visual token 014067|>
+<|visual token 014068|>
+<|visual token 014069|>
+<|visual token 014070|>
+<|visual token 014071|>
+<|visual token 014072|>
+<|visual token 014073|>
+<|visual token 014074|>
+<|visual token 014075|>
+<|visual token 014076|>
+<|visual token 014077|>
+<|visual token 014078|>
+<|visual token 014079|>
+<|visual token 014080|>
+<|visual token 014081|>
+<|visual token 014082|>
+<|visual token 014083|>
+<|visual token 014084|>
+<|visual token 014085|>
+<|visual token 014086|>
+<|visual token 014087|>
+<|visual token 014088|>
+<|visual token 014089|>
+<|visual token 014090|>
+<|visual token 014091|>
+<|visual token 014092|>
+<|visual token 014093|>
+<|visual token 014094|>
+<|visual token 014095|>
+<|visual token 014096|>
+<|visual token 014097|>
+<|visual token 014098|>
+<|visual token 014099|>
+<|visual token 014100|>
+<|visual token 014101|>
+<|visual token 014102|>
+<|visual token 014103|>
+<|visual token 014104|>
+<|visual token 014105|>
+<|visual token 014106|>
+<|visual token 014107|>
+<|visual token 014108|>
+<|visual token 014109|>
+<|visual token 014110|>
+<|visual token 014111|>
+<|visual token 014112|>
+<|visual token 014113|>
+<|visual token 014114|>
+<|visual token 014115|>
+<|visual token 014116|>
+<|visual token 014117|>
+<|visual token 014118|>
+<|visual token 014119|>
+<|visual token 014120|>
+<|visual token 014121|>
+<|visual token 014122|>
+<|visual token 014123|>
+<|visual token 014124|>
+<|visual token 014125|>
+<|visual token 014126|>
+<|visual token 014127|>
+<|visual token 014128|>
+<|visual token 014129|>
+<|visual token 014130|>
+<|visual token 014131|>
+<|visual token 014132|>
+<|visual token 014133|>
+<|visual token 014134|>
+<|visual token 014135|>
+<|visual token 014136|>
+<|visual token 014137|>
+<|visual token 014138|>
+<|visual token 014139|>
+<|visual token 014140|>
+<|visual token 014141|>
+<|visual token 014142|>
+<|visual token 014143|>
+<|visual token 014144|>
+<|visual token 014145|>
+<|visual token 014146|>
+<|visual token 014147|>
+<|visual token 014148|>
+<|visual token 014149|>
+<|visual token 014150|>
+<|visual token 014151|>
+<|visual token 014152|>
+<|visual token 014153|>
+<|visual token 014154|>
+<|visual token 014155|>
+<|visual token 014156|>
+<|visual token 014157|>
+<|visual token 014158|>
+<|visual token 014159|>
+<|visual token 014160|>
+<|visual token 014161|>
+<|visual token 014162|>
+<|visual token 014163|>
+<|visual token 014164|>
+<|visual token 014165|>
+<|visual token 014166|>
+<|visual token 014167|>
+<|visual token 014168|>
+<|visual token 014169|>
+<|visual token 014170|>
+<|visual token 014171|>
+<|visual token 014172|>
+<|visual token 014173|>
+<|visual token 014174|>
+<|visual token 014175|>
+<|visual token 014176|>
+<|visual token 014177|>
+<|visual token 014178|>
+<|visual token 014179|>
+<|visual token 014180|>
+<|visual token 014181|>
+<|visual token 014182|>
+<|visual token 014183|>
+<|visual token 014184|>
+<|visual token 014185|>
+<|visual token 014186|>
+<|visual token 014187|>
+<|visual token 014188|>
+<|visual token 014189|>
+<|visual token 014190|>
+<|visual token 014191|>
+<|visual token 014192|>
+<|visual token 014193|>
+<|visual token 014194|>
+<|visual token 014195|>
+<|visual token 014196|>
+<|visual token 014197|>
+<|visual token 014198|>
+<|visual token 014199|>
+<|visual token 014200|>
+<|visual token 014201|>
+<|visual token 014202|>
+<|visual token 014203|>
+<|visual token 014204|>
+<|visual token 014205|>
+<|visual token 014206|>
+<|visual token 014207|>
+<|visual token 014208|>
+<|visual token 014209|>
+<|visual token 014210|>
+<|visual token 014211|>
+<|visual token 014212|>
+<|visual token 014213|>
+<|visual token 014214|>
+<|visual token 014215|>
+<|visual token 014216|>
+<|visual token 014217|>
+<|visual token 014218|>
+<|visual token 014219|>
+<|visual token 014220|>
+<|visual token 014221|>
+<|visual token 014222|>
+<|visual token 014223|>
+<|visual token 014224|>
+<|visual token 014225|>
+<|visual token 014226|>
+<|visual token 014227|>
+<|visual token 014228|>
+<|visual token 014229|>
+<|visual token 014230|>
+<|visual token 014231|>
+<|visual token 014232|>
+<|visual token 014233|>
+<|visual token 014234|>
+<|visual token 014235|>
+<|visual token 014236|>
+<|visual token 014237|>
+<|visual token 014238|>
+<|visual token 014239|>
+<|visual token 014240|>
+<|visual token 014241|>
+<|visual token 014242|>
+<|visual token 014243|>
+<|visual token 014244|>
+<|visual token 014245|>
+<|visual token 014246|>
+<|visual token 014247|>
+<|visual token 014248|>
+<|visual token 014249|>
+<|visual token 014250|>
+<|visual token 014251|>
+<|visual token 014252|>
+<|visual token 014253|>
+<|visual token 014254|>
+<|visual token 014255|>
+<|visual token 014256|>
+<|visual token 014257|>
+<|visual token 014258|>
+<|visual token 014259|>
+<|visual token 014260|>
+<|visual token 014261|>
+<|visual token 014262|>
+<|visual token 014263|>
+<|visual token 014264|>
+<|visual token 014265|>
+<|visual token 014266|>
+<|visual token 014267|>
+<|visual token 014268|>
+<|visual token 014269|>
+<|visual token 014270|>
+<|visual token 014271|>
+<|visual token 014272|>
+<|visual token 014273|>
+<|visual token 014274|>
+<|visual token 014275|>
+<|visual token 014276|>
+<|visual token 014277|>
+<|visual token 014278|>
+<|visual token 014279|>
+<|visual token 014280|>
+<|visual token 014281|>
+<|visual token 014282|>
+<|visual token 014283|>
+<|visual token 014284|>
+<|visual token 014285|>
+<|visual token 014286|>
+<|visual token 014287|>
+<|visual token 014288|>
+<|visual token 014289|>
+<|visual token 014290|>
+<|visual token 014291|>
+<|visual token 014292|>
+<|visual token 014293|>
+<|visual token 014294|>
+<|visual token 014295|>
+<|visual token 014296|>
+<|visual token 014297|>
+<|visual token 014298|>
+<|visual token 014299|>
+<|visual token 014300|>
+<|visual token 014301|>
+<|visual token 014302|>
+<|visual token 014303|>
+<|visual token 014304|>
+<|visual token 014305|>
+<|visual token 014306|>
+<|visual token 014307|>
+<|visual token 014308|>
+<|visual token 014309|>
+<|visual token 014310|>
+<|visual token 014311|>
+<|visual token 014312|>
+<|visual token 014313|>
+<|visual token 014314|>
+<|visual token 014315|>
+<|visual token 014316|>
+<|visual token 014317|>
+<|visual token 014318|>
+<|visual token 014319|>
+<|visual token 014320|>
+<|visual token 014321|>
+<|visual token 014322|>
+<|visual token 014323|>
+<|visual token 014324|>
+<|visual token 014325|>
+<|visual token 014326|>
+<|visual token 014327|>
+<|visual token 014328|>
+<|visual token 014329|>
+<|visual token 014330|>
+<|visual token 014331|>
+<|visual token 014332|>
+<|visual token 014333|>
+<|visual token 014334|>
+<|visual token 014335|>
+<|visual token 014336|>
+<|visual token 014337|>
+<|visual token 014338|>
+<|visual token 014339|>
+<|visual token 014340|>
+<|visual token 014341|>
+<|visual token 014342|>
+<|visual token 014343|>
+<|visual token 014344|>
+<|visual token 014345|>
+<|visual token 014346|>
+<|visual token 014347|>
+<|visual token 014348|>
+<|visual token 014349|>
+<|visual token 014350|>
+<|visual token 014351|>
+<|visual token 014352|>
+<|visual token 014353|>
+<|visual token 014354|>
+<|visual token 014355|>
+<|visual token 014356|>
+<|visual token 014357|>
+<|visual token 014358|>
+<|visual token 014359|>
+<|visual token 014360|>
+<|visual token 014361|>
+<|visual token 014362|>
+<|visual token 014363|>
+<|visual token 014364|>
+<|visual token 014365|>
+<|visual token 014366|>
+<|visual token 014367|>
+<|visual token 014368|>
+<|visual token 014369|>
+<|visual token 014370|>
+<|visual token 014371|>
+<|visual token 014372|>
+<|visual token 014373|>
+<|visual token 014374|>
+<|visual token 014375|>
+<|visual token 014376|>
+<|visual token 014377|>
+<|visual token 014378|>
+<|visual token 014379|>
+<|visual token 014380|>
+<|visual token 014381|>
+<|visual token 014382|>
+<|visual token 014383|>
+<|visual token 014384|>
+<|visual token 014385|>
+<|visual token 014386|>
+<|visual token 014387|>
+<|visual token 014388|>
+<|visual token 014389|>
+<|visual token 014390|>
+<|visual token 014391|>
+<|visual token 014392|>
+<|visual token 014393|>
+<|visual token 014394|>
+<|visual token 014395|>
+<|visual token 014396|>
+<|visual token 014397|>
+<|visual token 014398|>
+<|visual token 014399|>
+<|visual token 014400|>
+<|visual token 014401|>
+<|visual token 014402|>
+<|visual token 014403|>
+<|visual token 014404|>
+<|visual token 014405|>
+<|visual token 014406|>
+<|visual token 014407|>
+<|visual token 014408|>
+<|visual token 014409|>
+<|visual token 014410|>
+<|visual token 014411|>
+<|visual token 014412|>
+<|visual token 014413|>
+<|visual token 014414|>
+<|visual token 014415|>
+<|visual token 014416|>
+<|visual token 014417|>
+<|visual token 014418|>
+<|visual token 014419|>
+<|visual token 014420|>
+<|visual token 014421|>
+<|visual token 014422|>
+<|visual token 014423|>
+<|visual token 014424|>
+<|visual token 014425|>
+<|visual token 014426|>
+<|visual token 014427|>
+<|visual token 014428|>
+<|visual token 014429|>
+<|visual token 014430|>
+<|visual token 014431|>
+<|visual token 014432|>
+<|visual token 014433|>
+<|visual token 014434|>
+<|visual token 014435|>
+<|visual token 014436|>
+<|visual token 014437|>
+<|visual token 014438|>
+<|visual token 014439|>
+<|visual token 014440|>
+<|visual token 014441|>
+<|visual token 014442|>
+<|visual token 014443|>
+<|visual token 014444|>
+<|visual token 014445|>
+<|visual token 014446|>
+<|visual token 014447|>
+<|visual token 014448|>
+<|visual token 014449|>
+<|visual token 014450|>
+<|visual token 014451|>
+<|visual token 014452|>
+<|visual token 014453|>
+<|visual token 014454|>
+<|visual token 014455|>
+<|visual token 014456|>
+<|visual token 014457|>
+<|visual token 014458|>
+<|visual token 014459|>
+<|visual token 014460|>
+<|visual token 014461|>
+<|visual token 014462|>
+<|visual token 014463|>
+<|visual token 014464|>
+<|visual token 014465|>
+<|visual token 014466|>
+<|visual token 014467|>
+<|visual token 014468|>
+<|visual token 014469|>
+<|visual token 014470|>
+<|visual token 014471|>
+<|visual token 014472|>
+<|visual token 014473|>
+<|visual token 014474|>
+<|visual token 014475|>
+<|visual token 014476|>
+<|visual token 014477|>
+<|visual token 014478|>
+<|visual token 014479|>
+<|visual token 014480|>
+<|visual token 014481|>
+<|visual token 014482|>
+<|visual token 014483|>
+<|visual token 014484|>
+<|visual token 014485|>
+<|visual token 014486|>
+<|visual token 014487|>
+<|visual token 014488|>
+<|visual token 014489|>
+<|visual token 014490|>
+<|visual token 014491|>
+<|visual token 014492|>
+<|visual token 014493|>
+<|visual token 014494|>
+<|visual token 014495|>
+<|visual token 014496|>
+<|visual token 014497|>
+<|visual token 014498|>
+<|visual token 014499|>
+<|visual token 014500|>
+<|visual token 014501|>
+<|visual token 014502|>
+<|visual token 014503|>
+<|visual token 014504|>
+<|visual token 014505|>
+<|visual token 014506|>
+<|visual token 014507|>
+<|visual token 014508|>
+<|visual token 014509|>
+<|visual token 014510|>
+<|visual token 014511|>
+<|visual token 014512|>
+<|visual token 014513|>
+<|visual token 014514|>
+<|visual token 014515|>
+<|visual token 014516|>
+<|visual token 014517|>
+<|visual token 014518|>
+<|visual token 014519|>
+<|visual token 014520|>
+<|visual token 014521|>
+<|visual token 014522|>
+<|visual token 014523|>
+<|visual token 014524|>
+<|visual token 014525|>
+<|visual token 014526|>
+<|visual token 014527|>
+<|visual token 014528|>
+<|visual token 014529|>
+<|visual token 014530|>
+<|visual token 014531|>
+<|visual token 014532|>
+<|visual token 014533|>
+<|visual token 014534|>
+<|visual token 014535|>
+<|visual token 014536|>
+<|visual token 014537|>
+<|visual token 014538|>
+<|visual token 014539|>
+<|visual token 014540|>
+<|visual token 014541|>
+<|visual token 014542|>
+<|visual token 014543|>
+<|visual token 014544|>
+<|visual token 014545|>
+<|visual token 014546|>
+<|visual token 014547|>
+<|visual token 014548|>
+<|visual token 014549|>
+<|visual token 014550|>
+<|visual token 014551|>
+<|visual token 014552|>
+<|visual token 014553|>
+<|visual token 014554|>
+<|visual token 014555|>
+<|visual token 014556|>
+<|visual token 014557|>
+<|visual token 014558|>
+<|visual token 014559|>
+<|visual token 014560|>
+<|visual token 014561|>
+<|visual token 014562|>
+<|visual token 014563|>
+<|visual token 014564|>
+<|visual token 014565|>
+<|visual token 014566|>
+<|visual token 014567|>
+<|visual token 014568|>
+<|visual token 014569|>
+<|visual token 014570|>
+<|visual token 014571|>
+<|visual token 014572|>
+<|visual token 014573|>
+<|visual token 014574|>
+<|visual token 014575|>
+<|visual token 014576|>
+<|visual token 014577|>
+<|visual token 014578|>
+<|visual token 014579|>
+<|visual token 014580|>
+<|visual token 014581|>
+<|visual token 014582|>
+<|visual token 014583|>
+<|visual token 014584|>
+<|visual token 014585|>
+<|visual token 014586|>
+<|visual token 014587|>
+<|visual token 014588|>
+<|visual token 014589|>
+<|visual token 014590|>
+<|visual token 014591|>
+<|visual token 014592|>
+<|visual token 014593|>
+<|visual token 014594|>
+<|visual token 014595|>
+<|visual token 014596|>
+<|visual token 014597|>
+<|visual token 014598|>
+<|visual token 014599|>
+<|visual token 014600|>
+<|visual token 014601|>
+<|visual token 014602|>
+<|visual token 014603|>
+<|visual token 014604|>
+<|visual token 014605|>
+<|visual token 014606|>
+<|visual token 014607|>
+<|visual token 014608|>
+<|visual token 014609|>
+<|visual token 014610|>
+<|visual token 014611|>
+<|visual token 014612|>
+<|visual token 014613|>
+<|visual token 014614|>
+<|visual token 014615|>
+<|visual token 014616|>
+<|visual token 014617|>
+<|visual token 014618|>
+<|visual token 014619|>
+<|visual token 014620|>
+<|visual token 014621|>
+<|visual token 014622|>
+<|visual token 014623|>
+<|visual token 014624|>
+<|visual token 014625|>
+<|visual token 014626|>
+<|visual token 014627|>
+<|visual token 014628|>
+<|visual token 014629|>
+<|visual token 014630|>
+<|visual token 014631|>
+<|visual token 014632|>
+<|visual token 014633|>
+<|visual token 014634|>
+<|visual token 014635|>
+<|visual token 014636|>
+<|visual token 014637|>
+<|visual token 014638|>
+<|visual token 014639|>
+<|visual token 014640|>
+<|visual token 014641|>
+<|visual token 014642|>
+<|visual token 014643|>
+<|visual token 014644|>
+<|visual token 014645|>
+<|visual token 014646|>
+<|visual token 014647|>
+<|visual token 014648|>
+<|visual token 014649|>
+<|visual token 014650|>
+<|visual token 014651|>
+<|visual token 014652|>
+<|visual token 014653|>
+<|visual token 014654|>
+<|visual token 014655|>
+<|visual token 014656|>
+<|visual token 014657|>
+<|visual token 014658|>
+<|visual token 014659|>
+<|visual token 014660|>
+<|visual token 014661|>
+<|visual token 014662|>
+<|visual token 014663|>
+<|visual token 014664|>
+<|visual token 014665|>
+<|visual token 014666|>
+<|visual token 014667|>
+<|visual token 014668|>
+<|visual token 014669|>
+<|visual token 014670|>
+<|visual token 014671|>
+<|visual token 014672|>
+<|visual token 014673|>
+<|visual token 014674|>
+<|visual token 014675|>
+<|visual token 014676|>
+<|visual token 014677|>
+<|visual token 014678|>
+<|visual token 014679|>
+<|visual token 014680|>
+<|visual token 014681|>
+<|visual token 014682|>
+<|visual token 014683|>
+<|visual token 014684|>
+<|visual token 014685|>
+<|visual token 014686|>
+<|visual token 014687|>
+<|visual token 014688|>
+<|visual token 014689|>
+<|visual token 014690|>
+<|visual token 014691|>
+<|visual token 014692|>
+<|visual token 014693|>
+<|visual token 014694|>
+<|visual token 014695|>
+<|visual token 014696|>
+<|visual token 014697|>
+<|visual token 014698|>
+<|visual token 014699|>
+<|visual token 014700|>
+<|visual token 014701|>
+<|visual token 014702|>
+<|visual token 014703|>
+<|visual token 014704|>
+<|visual token 014705|>
+<|visual token 014706|>
+<|visual token 014707|>
+<|visual token 014708|>
+<|visual token 014709|>
+<|visual token 014710|>
+<|visual token 014711|>
+<|visual token 014712|>
+<|visual token 014713|>
+<|visual token 014714|>
+<|visual token 014715|>
+<|visual token 014716|>
+<|visual token 014717|>
+<|visual token 014718|>
+<|visual token 014719|>
+<|visual token 014720|>
+<|visual token 014721|>
+<|visual token 014722|>
+<|visual token 014723|>
+<|visual token 014724|>
+<|visual token 014725|>
+<|visual token 014726|>
+<|visual token 014727|>
+<|visual token 014728|>
+<|visual token 014729|>
+<|visual token 014730|>
+<|visual token 014731|>
+<|visual token 014732|>
+<|visual token 014733|>
+<|visual token 014734|>
+<|visual token 014735|>
+<|visual token 014736|>
+<|visual token 014737|>
+<|visual token 014738|>
+<|visual token 014739|>
+<|visual token 014740|>
+<|visual token 014741|>
+<|visual token 014742|>
+<|visual token 014743|>
+<|visual token 014744|>
+<|visual token 014745|>
+<|visual token 014746|>
+<|visual token 014747|>
+<|visual token 014748|>
+<|visual token 014749|>
+<|visual token 014750|>
+<|visual token 014751|>
+<|visual token 014752|>
+<|visual token 014753|>
+<|visual token 014754|>
+<|visual token 014755|>
+<|visual token 014756|>
+<|visual token 014757|>
+<|visual token 014758|>
+<|visual token 014759|>
+<|visual token 014760|>
+<|visual token 014761|>
+<|visual token 014762|>
+<|visual token 014763|>
+<|visual token 014764|>
+<|visual token 014765|>
+<|visual token 014766|>
+<|visual token 014767|>
+<|visual token 014768|>
+<|visual token 014769|>
+<|visual token 014770|>
+<|visual token 014771|>
+<|visual token 014772|>
+<|visual token 014773|>
+<|visual token 014774|>
+<|visual token 014775|>
+<|visual token 014776|>
+<|visual token 014777|>
+<|visual token 014778|>
+<|visual token 014779|>
+<|visual token 014780|>
+<|visual token 014781|>
+<|visual token 014782|>
+<|visual token 014783|>
+<|visual token 014784|>
+<|visual token 014785|>
+<|visual token 014786|>
+<|visual token 014787|>
+<|visual token 014788|>
+<|visual token 014789|>
+<|visual token 014790|>
+<|visual token 014791|>
+<|visual token 014792|>
+<|visual token 014793|>
+<|visual token 014794|>
+<|visual token 014795|>
+<|visual token 014796|>
+<|visual token 014797|>
+<|visual token 014798|>
+<|visual token 014799|>
+<|visual token 014800|>
+<|visual token 014801|>
+<|visual token 014802|>
+<|visual token 014803|>
+<|visual token 014804|>
+<|visual token 014805|>
+<|visual token 014806|>
+<|visual token 014807|>
+<|visual token 014808|>
+<|visual token 014809|>
+<|visual token 014810|>
+<|visual token 014811|>
+<|visual token 014812|>
+<|visual token 014813|>
+<|visual token 014814|>
+<|visual token 014815|>
+<|visual token 014816|>
+<|visual token 014817|>
+<|visual token 014818|>
+<|visual token 014819|>
+<|visual token 014820|>
+<|visual token 014821|>
+<|visual token 014822|>
+<|visual token 014823|>
+<|visual token 014824|>
+<|visual token 014825|>
+<|visual token 014826|>
+<|visual token 014827|>
+<|visual token 014828|>
+<|visual token 014829|>
+<|visual token 014830|>
+<|visual token 014831|>
+<|visual token 014832|>
+<|visual token 014833|>
+<|visual token 014834|>
+<|visual token 014835|>
+<|visual token 014836|>
+<|visual token 014837|>
+<|visual token 014838|>
+<|visual token 014839|>
+<|visual token 014840|>
+<|visual token 014841|>
+<|visual token 014842|>
+<|visual token 014843|>
+<|visual token 014844|>
+<|visual token 014845|>
+<|visual token 014846|>
+<|visual token 014847|>
+<|visual token 014848|>
+<|visual token 014849|>
+<|visual token 014850|>
+<|visual token 014851|>
+<|visual token 014852|>
+<|visual token 014853|>
+<|visual token 014854|>
+<|visual token 014855|>
+<|visual token 014856|>
+<|visual token 014857|>
+<|visual token 014858|>
+<|visual token 014859|>
+<|visual token 014860|>
+<|visual token 014861|>
+<|visual token 014862|>
+<|visual token 014863|>
+<|visual token 014864|>
+<|visual token 014865|>
+<|visual token 014866|>
+<|visual token 014867|>
+<|visual token 014868|>
+<|visual token 014869|>
+<|visual token 014870|>
+<|visual token 014871|>
+<|visual token 014872|>
+<|visual token 014873|>
+<|visual token 014874|>
+<|visual token 014875|>
+<|visual token 014876|>
+<|visual token 014877|>
+<|visual token 014878|>
+<|visual token 014879|>
+<|visual token 014880|>
+<|visual token 014881|>
+<|visual token 014882|>
+<|visual token 014883|>
+<|visual token 014884|>
+<|visual token 014885|>
+<|visual token 014886|>
+<|visual token 014887|>
+<|visual token 014888|>
+<|visual token 014889|>
+<|visual token 014890|>
+<|visual token 014891|>
+<|visual token 014892|>
+<|visual token 014893|>
+<|visual token 014894|>
+<|visual token 014895|>
+<|visual token 014896|>
+<|visual token 014897|>
+<|visual token 014898|>
+<|visual token 014899|>
+<|visual token 014900|>
+<|visual token 014901|>
+<|visual token 014902|>
+<|visual token 014903|>
+<|visual token 014904|>
+<|visual token 014905|>
+<|visual token 014906|>
+<|visual token 014907|>
+<|visual token 014908|>
+<|visual token 014909|>
+<|visual token 014910|>
+<|visual token 014911|>
+<|visual token 014912|>
+<|visual token 014913|>
+<|visual token 014914|>
+<|visual token 014915|>
+<|visual token 014916|>
+<|visual token 014917|>
+<|visual token 014918|>
+<|visual token 014919|>
+<|visual token 014920|>
+<|visual token 014921|>
+<|visual token 014922|>
+<|visual token 014923|>
+<|visual token 014924|>
+<|visual token 014925|>
+<|visual token 014926|>
+<|visual token 014927|>
+<|visual token 014928|>
+<|visual token 014929|>
+<|visual token 014930|>
+<|visual token 014931|>
+<|visual token 014932|>
+<|visual token 014933|>
+<|visual token 014934|>
+<|visual token 014935|>
+<|visual token 014936|>
+<|visual token 014937|>
+<|visual token 014938|>
+<|visual token 014939|>
+<|visual token 014940|>
+<|visual token 014941|>
+<|visual token 014942|>
+<|visual token 014943|>
+<|visual token 014944|>
+<|visual token 014945|>
+<|visual token 014946|>
+<|visual token 014947|>
+<|visual token 014948|>
+<|visual token 014949|>
+<|visual token 014950|>
+<|visual token 014951|>
+<|visual token 014952|>
+<|visual token 014953|>
+<|visual token 014954|>
+<|visual token 014955|>
+<|visual token 014956|>
+<|visual token 014957|>
+<|visual token 014958|>
+<|visual token 014959|>
+<|visual token 014960|>
+<|visual token 014961|>
+<|visual token 014962|>
+<|visual token 014963|>
+<|visual token 014964|>
+<|visual token 014965|>
+<|visual token 014966|>
+<|visual token 014967|>
+<|visual token 014968|>
+<|visual token 014969|>
+<|visual token 014970|>
+<|visual token 014971|>
+<|visual token 014972|>
+<|visual token 014973|>
+<|visual token 014974|>
+<|visual token 014975|>
+<|visual token 014976|>
+<|visual token 014977|>
+<|visual token 014978|>
+<|visual token 014979|>
+<|visual token 014980|>
+<|visual token 014981|>
+<|visual token 014982|>
+<|visual token 014983|>
+<|visual token 014984|>
+<|visual token 014985|>
+<|visual token 014986|>
+<|visual token 014987|>
+<|visual token 014988|>
+<|visual token 014989|>
+<|visual token 014990|>
+<|visual token 014991|>
+<|visual token 014992|>
+<|visual token 014993|>
+<|visual token 014994|>
+<|visual token 014995|>
+<|visual token 014996|>
+<|visual token 014997|>
+<|visual token 014998|>
+<|visual token 014999|>
+<|visual token 015000|>
+<|visual token 015001|>
+<|visual token 015002|>
+<|visual token 015003|>
+<|visual token 015004|>
+<|visual token 015005|>
+<|visual token 015006|>
+<|visual token 015007|>
+<|visual token 015008|>
+<|visual token 015009|>
+<|visual token 015010|>
+<|visual token 015011|>
+<|visual token 015012|>
+<|visual token 015013|>
+<|visual token 015014|>
+<|visual token 015015|>
+<|visual token 015016|>
+<|visual token 015017|>
+<|visual token 015018|>
+<|visual token 015019|>
+<|visual token 015020|>
+<|visual token 015021|>
+<|visual token 015022|>
+<|visual token 015023|>
+<|visual token 015024|>
+<|visual token 015025|>
+<|visual token 015026|>
+<|visual token 015027|>
+<|visual token 015028|>
+<|visual token 015029|>
+<|visual token 015030|>
+<|visual token 015031|>
+<|visual token 015032|>
+<|visual token 015033|>
+<|visual token 015034|>
+<|visual token 015035|>
+<|visual token 015036|>
+<|visual token 015037|>
+<|visual token 015038|>
+<|visual token 015039|>
+<|visual token 015040|>
+<|visual token 015041|>
+<|visual token 015042|>
+<|visual token 015043|>
+<|visual token 015044|>
+<|visual token 015045|>
+<|visual token 015046|>
+<|visual token 015047|>
+<|visual token 015048|>
+<|visual token 015049|>
+<|visual token 015050|>
+<|visual token 015051|>
+<|visual token 015052|>
+<|visual token 015053|>
+<|visual token 015054|>
+<|visual token 015055|>
+<|visual token 015056|>
+<|visual token 015057|>
+<|visual token 015058|>
+<|visual token 015059|>
+<|visual token 015060|>
+<|visual token 015061|>
+<|visual token 015062|>
+<|visual token 015063|>
+<|visual token 015064|>
+<|visual token 015065|>
+<|visual token 015066|>
+<|visual token 015067|>
+<|visual token 015068|>
+<|visual token 015069|>
+<|visual token 015070|>
+<|visual token 015071|>
+<|visual token 015072|>
+<|visual token 015073|>
+<|visual token 015074|>
+<|visual token 015075|>
+<|visual token 015076|>
+<|visual token 015077|>
+<|visual token 015078|>
+<|visual token 015079|>
+<|visual token 015080|>
+<|visual token 015081|>
+<|visual token 015082|>
+<|visual token 015083|>
+<|visual token 015084|>
+<|visual token 015085|>
+<|visual token 015086|>
+<|visual token 015087|>
+<|visual token 015088|>
+<|visual token 015089|>
+<|visual token 015090|>
+<|visual token 015091|>
+<|visual token 015092|>
+<|visual token 015093|>
+<|visual token 015094|>
+<|visual token 015095|>
+<|visual token 015096|>
+<|visual token 015097|>
+<|visual token 015098|>
+<|visual token 015099|>
+<|visual token 015100|>
+<|visual token 015101|>
+<|visual token 015102|>
+<|visual token 015103|>
+<|visual token 015104|>
+<|visual token 015105|>
+<|visual token 015106|>
+<|visual token 015107|>
+<|visual token 015108|>
+<|visual token 015109|>
+<|visual token 015110|>
+<|visual token 015111|>
+<|visual token 015112|>
+<|visual token 015113|>
+<|visual token 015114|>
+<|visual token 015115|>
+<|visual token 015116|>
+<|visual token 015117|>
+<|visual token 015118|>
+<|visual token 015119|>
+<|visual token 015120|>
+<|visual token 015121|>
+<|visual token 015122|>
+<|visual token 015123|>
+<|visual token 015124|>
+<|visual token 015125|>
+<|visual token 015126|>
+<|visual token 015127|>
+<|visual token 015128|>
+<|visual token 015129|>
+<|visual token 015130|>
+<|visual token 015131|>
+<|visual token 015132|>
+<|visual token 015133|>
+<|visual token 015134|>
+<|visual token 015135|>
+<|visual token 015136|>
+<|visual token 015137|>
+<|visual token 015138|>
+<|visual token 015139|>
+<|visual token 015140|>
+<|visual token 015141|>
+<|visual token 015142|>
+<|visual token 015143|>
+<|visual token 015144|>
+<|visual token 015145|>
+<|visual token 015146|>
+<|visual token 015147|>
+<|visual token 015148|>
+<|visual token 015149|>
+<|visual token 015150|>
+<|visual token 015151|>
+<|visual token 015152|>
+<|visual token 015153|>
+<|visual token 015154|>
+<|visual token 015155|>
+<|visual token 015156|>
+<|visual token 015157|>
+<|visual token 015158|>
+<|visual token 015159|>
+<|visual token 015160|>
+<|visual token 015161|>
+<|visual token 015162|>
+<|visual token 015163|>
+<|visual token 015164|>
+<|visual token 015165|>
+<|visual token 015166|>
+<|visual token 015167|>
+<|visual token 015168|>
+<|visual token 015169|>
+<|visual token 015170|>
+<|visual token 015171|>
+<|visual token 015172|>
+<|visual token 015173|>
+<|visual token 015174|>
+<|visual token 015175|>
+<|visual token 015176|>
+<|visual token 015177|>
+<|visual token 015178|>
+<|visual token 015179|>
+<|visual token 015180|>
+<|visual token 015181|>
+<|visual token 015182|>
+<|visual token 015183|>
+<|visual token 015184|>
+<|visual token 015185|>
+<|visual token 015186|>
+<|visual token 015187|>
+<|visual token 015188|>
+<|visual token 015189|>
+<|visual token 015190|>
+<|visual token 015191|>
+<|visual token 015192|>
+<|visual token 015193|>
+<|visual token 015194|>
+<|visual token 015195|>
+<|visual token 015196|>
+<|visual token 015197|>
+<|visual token 015198|>
+<|visual token 015199|>
+<|visual token 015200|>
+<|visual token 015201|>
+<|visual token 015202|>
+<|visual token 015203|>
+<|visual token 015204|>
+<|visual token 015205|>
+<|visual token 015206|>
+<|visual token 015207|>
+<|visual token 015208|>
+<|visual token 015209|>
+<|visual token 015210|>
+<|visual token 015211|>
+<|visual token 015212|>
+<|visual token 015213|>
+<|visual token 015214|>
+<|visual token 015215|>
+<|visual token 015216|>
+<|visual token 015217|>
+<|visual token 015218|>
+<|visual token 015219|>
+<|visual token 015220|>
+<|visual token 015221|>
+<|visual token 015222|>
+<|visual token 015223|>
+<|visual token 015224|>
+<|visual token 015225|>
+<|visual token 015226|>
+<|visual token 015227|>
+<|visual token 015228|>
+<|visual token 015229|>
+<|visual token 015230|>
+<|visual token 015231|>
+<|visual token 015232|>
+<|visual token 015233|>
+<|visual token 015234|>
+<|visual token 015235|>
+<|visual token 015236|>
+<|visual token 015237|>
+<|visual token 015238|>
+<|visual token 015239|>
+<|visual token 015240|>
+<|visual token 015241|>
+<|visual token 015242|>
+<|visual token 015243|>
+<|visual token 015244|>
+<|visual token 015245|>
+<|visual token 015246|>
+<|visual token 015247|>
+<|visual token 015248|>
+<|visual token 015249|>
+<|visual token 015250|>
+<|visual token 015251|>
+<|visual token 015252|>
+<|visual token 015253|>
+<|visual token 015254|>
+<|visual token 015255|>
+<|visual token 015256|>
+<|visual token 015257|>
+<|visual token 015258|>
+<|visual token 015259|>
+<|visual token 015260|>
+<|visual token 015261|>
+<|visual token 015262|>
+<|visual token 015263|>
+<|visual token 015264|>
+<|visual token 015265|>
+<|visual token 015266|>
+<|visual token 015267|>
+<|visual token 015268|>
+<|visual token 015269|>
+<|visual token 015270|>
+<|visual token 015271|>
+<|visual token 015272|>
+<|visual token 015273|>
+<|visual token 015274|>
+<|visual token 015275|>
+<|visual token 015276|>
+<|visual token 015277|>
+<|visual token 015278|>
+<|visual token 015279|>
+<|visual token 015280|>
+<|visual token 015281|>
+<|visual token 015282|>
+<|visual token 015283|>
+<|visual token 015284|>
+<|visual token 015285|>
+<|visual token 015286|>
+<|visual token 015287|>
+<|visual token 015288|>
+<|visual token 015289|>
+<|visual token 015290|>
+<|visual token 015291|>
+<|visual token 015292|>
+<|visual token 015293|>
+<|visual token 015294|>
+<|visual token 015295|>
+<|visual token 015296|>
+<|visual token 015297|>
+<|visual token 015298|>
+<|visual token 015299|>
+<|visual token 015300|>
+<|visual token 015301|>
+<|visual token 015302|>
+<|visual token 015303|>
+<|visual token 015304|>
+<|visual token 015305|>
+<|visual token 015306|>
+<|visual token 015307|>
+<|visual token 015308|>
+<|visual token 015309|>
+<|visual token 015310|>
+<|visual token 015311|>
+<|visual token 015312|>
+<|visual token 015313|>
+<|visual token 015314|>
+<|visual token 015315|>
+<|visual token 015316|>
+<|visual token 015317|>
+<|visual token 015318|>
+<|visual token 015319|>
+<|visual token 015320|>
+<|visual token 015321|>
+<|visual token 015322|>
+<|visual token 015323|>
+<|visual token 015324|>
+<|visual token 015325|>
+<|visual token 015326|>
+<|visual token 015327|>
+<|visual token 015328|>
+<|visual token 015329|>
+<|visual token 015330|>
+<|visual token 015331|>
+<|visual token 015332|>
+<|visual token 015333|>
+<|visual token 015334|>
+<|visual token 015335|>
+<|visual token 015336|>
+<|visual token 015337|>
+<|visual token 015338|>
+<|visual token 015339|>
+<|visual token 015340|>
+<|visual token 015341|>
+<|visual token 015342|>
+<|visual token 015343|>
+<|visual token 015344|>
+<|visual token 015345|>
+<|visual token 015346|>
+<|visual token 015347|>
+<|visual token 015348|>
+<|visual token 015349|>
+<|visual token 015350|>
+<|visual token 015351|>
+<|visual token 015352|>
+<|visual token 015353|>
+<|visual token 015354|>
+<|visual token 015355|>
+<|visual token 015356|>
+<|visual token 015357|>
+<|visual token 015358|>
+<|visual token 015359|>
+<|visual token 015360|>
+<|visual token 015361|>
+<|visual token 015362|>
+<|visual token 015363|>
+<|visual token 015364|>
+<|visual token 015365|>
+<|visual token 015366|>
+<|visual token 015367|>
+<|visual token 015368|>
+<|visual token 015369|>
+<|visual token 015370|>
+<|visual token 015371|>
+<|visual token 015372|>
+<|visual token 015373|>
+<|visual token 015374|>
+<|visual token 015375|>
+<|visual token 015376|>
+<|visual token 015377|>
+<|visual token 015378|>
+<|visual token 015379|>
+<|visual token 015380|>
+<|visual token 015381|>
+<|visual token 015382|>
+<|visual token 015383|>
+<|visual token 015384|>
+<|visual token 015385|>
+<|visual token 015386|>
+<|visual token 015387|>
+<|visual token 015388|>
+<|visual token 015389|>
+<|visual token 015390|>
+<|visual token 015391|>
+<|visual token 015392|>
+<|visual token 015393|>
+<|visual token 015394|>
+<|visual token 015395|>
+<|visual token 015396|>
+<|visual token 015397|>
+<|visual token 015398|>
+<|visual token 015399|>
+<|visual token 015400|>
+<|visual token 015401|>
+<|visual token 015402|>
+<|visual token 015403|>
+<|visual token 015404|>
+<|visual token 015405|>
+<|visual token 015406|>
+<|visual token 015407|>
+<|visual token 015408|>
+<|visual token 015409|>
+<|visual token 015410|>
+<|visual token 015411|>
+<|visual token 015412|>
+<|visual token 015413|>
+<|visual token 015414|>
+<|visual token 015415|>
+<|visual token 015416|>
+<|visual token 015417|>
+<|visual token 015418|>
+<|visual token 015419|>
+<|visual token 015420|>
+<|visual token 015421|>
+<|visual token 015422|>
+<|visual token 015423|>
+<|visual token 015424|>
+<|visual token 015425|>
+<|visual token 015426|>
+<|visual token 015427|>
+<|visual token 015428|>
+<|visual token 015429|>
+<|visual token 015430|>
+<|visual token 015431|>
+<|visual token 015432|>
+<|visual token 015433|>
+<|visual token 015434|>
+<|visual token 015435|>
+<|visual token 015436|>
+<|visual token 015437|>
+<|visual token 015438|>
+<|visual token 015439|>
+<|visual token 015440|>
+<|visual token 015441|>
+<|visual token 015442|>
+<|visual token 015443|>
+<|visual token 015444|>
+<|visual token 015445|>
+<|visual token 015446|>
+<|visual token 015447|>
+<|visual token 015448|>
+<|visual token 015449|>
+<|visual token 015450|>
+<|visual token 015451|>
+<|visual token 015452|>
+<|visual token 015453|>
+<|visual token 015454|>
+<|visual token 015455|>
+<|visual token 015456|>
+<|visual token 015457|>
+<|visual token 015458|>
+<|visual token 015459|>
+<|visual token 015460|>
+<|visual token 015461|>
+<|visual token 015462|>
+<|visual token 015463|>
+<|visual token 015464|>
+<|visual token 015465|>
+<|visual token 015466|>
+<|visual token 015467|>
+<|visual token 015468|>
+<|visual token 015469|>
+<|visual token 015470|>
+<|visual token 015471|>
+<|visual token 015472|>
+<|visual token 015473|>
+<|visual token 015474|>
+<|visual token 015475|>
+<|visual token 015476|>
+<|visual token 015477|>
+<|visual token 015478|>
+<|visual token 015479|>
+<|visual token 015480|>
+<|visual token 015481|>
+<|visual token 015482|>
+<|visual token 015483|>
+<|visual token 015484|>
+<|visual token 015485|>
+<|visual token 015486|>
+<|visual token 015487|>
+<|visual token 015488|>
+<|visual token 015489|>
+<|visual token 015490|>
+<|visual token 015491|>
+<|visual token 015492|>
+<|visual token 015493|>
+<|visual token 015494|>
+<|visual token 015495|>
+<|visual token 015496|>
+<|visual token 015497|>
+<|visual token 015498|>
+<|visual token 015499|>
+<|visual token 015500|>
+<|visual token 015501|>
+<|visual token 015502|>
+<|visual token 015503|>
+<|visual token 015504|>
+<|visual token 015505|>
+<|visual token 015506|>
+<|visual token 015507|>
+<|visual token 015508|>
+<|visual token 015509|>
+<|visual token 015510|>
+<|visual token 015511|>
+<|visual token 015512|>
+<|visual token 015513|>
+<|visual token 015514|>
+<|visual token 015515|>
+<|visual token 015516|>
+<|visual token 015517|>
+<|visual token 015518|>
+<|visual token 015519|>
+<|visual token 015520|>
+<|visual token 015521|>
+<|visual token 015522|>
+<|visual token 015523|>
+<|visual token 015524|>
+<|visual token 015525|>
+<|visual token 015526|>
+<|visual token 015527|>
+<|visual token 015528|>
+<|visual token 015529|>
+<|visual token 015530|>
+<|visual token 015531|>
+<|visual token 015532|>
+<|visual token 015533|>
+<|visual token 015534|>
+<|visual token 015535|>
+<|visual token 015536|>
+<|visual token 015537|>
+<|visual token 015538|>
+<|visual token 015539|>
+<|visual token 015540|>
+<|visual token 015541|>
+<|visual token 015542|>
+<|visual token 015543|>
+<|visual token 015544|>
+<|visual token 015545|>
+<|visual token 015546|>
+<|visual token 015547|>
+<|visual token 015548|>
+<|visual token 015549|>
+<|visual token 015550|>
+<|visual token 015551|>
+<|visual token 015552|>
+<|visual token 015553|>
+<|visual token 015554|>
+<|visual token 015555|>
+<|visual token 015556|>
+<|visual token 015557|>
+<|visual token 015558|>
+<|visual token 015559|>
+<|visual token 015560|>
+<|visual token 015561|>
+<|visual token 015562|>
+<|visual token 015563|>
+<|visual token 015564|>
+<|visual token 015565|>
+<|visual token 015566|>
+<|visual token 015567|>
+<|visual token 015568|>
+<|visual token 015569|>
+<|visual token 015570|>
+<|visual token 015571|>
+<|visual token 015572|>
+<|visual token 015573|>
+<|visual token 015574|>
+<|visual token 015575|>
+<|visual token 015576|>
+<|visual token 015577|>
+<|visual token 015578|>
+<|visual token 015579|>
+<|visual token 015580|>
+<|visual token 015581|>
+<|visual token 015582|>
+<|visual token 015583|>
+<|visual token 015584|>
+<|visual token 015585|>
+<|visual token 015586|>
+<|visual token 015587|>
+<|visual token 015588|>
+<|visual token 015589|>
+<|visual token 015590|>
+<|visual token 015591|>
+<|visual token 015592|>
+<|visual token 015593|>
+<|visual token 015594|>
+<|visual token 015595|>
+<|visual token 015596|>
+<|visual token 015597|>
+<|visual token 015598|>
+<|visual token 015599|>
+<|visual token 015600|>
+<|visual token 015601|>
+<|visual token 015602|>
+<|visual token 015603|>
+<|visual token 015604|>
+<|visual token 015605|>
+<|visual token 015606|>
+<|visual token 015607|>
+<|visual token 015608|>
+<|visual token 015609|>
+<|visual token 015610|>
+<|visual token 015611|>
+<|visual token 015612|>
+<|visual token 015613|>
+<|visual token 015614|>
+<|visual token 015615|>
+<|visual token 015616|>
+<|visual token 015617|>
+<|visual token 015618|>
+<|visual token 015619|>
+<|visual token 015620|>
+<|visual token 015621|>
+<|visual token 015622|>
+<|visual token 015623|>
+<|visual token 015624|>
+<|visual token 015625|>
+<|visual token 015626|>
+<|visual token 015627|>
+<|visual token 015628|>
+<|visual token 015629|>
+<|visual token 015630|>
+<|visual token 015631|>
+<|visual token 015632|>
+<|visual token 015633|>
+<|visual token 015634|>
+<|visual token 015635|>
+<|visual token 015636|>
+<|visual token 015637|>
+<|visual token 015638|>
+<|visual token 015639|>
+<|visual token 015640|>
+<|visual token 015641|>
+<|visual token 015642|>
+<|visual token 015643|>
+<|visual token 015644|>
+<|visual token 015645|>
+<|visual token 015646|>
+<|visual token 015647|>
+<|visual token 015648|>
+<|visual token 015649|>
+<|visual token 015650|>
+<|visual token 015651|>
+<|visual token 015652|>
+<|visual token 015653|>
+<|visual token 015654|>
+<|visual token 015655|>
+<|visual token 015656|>
+<|visual token 015657|>
+<|visual token 015658|>
+<|visual token 015659|>
+<|visual token 015660|>
+<|visual token 015661|>
+<|visual token 015662|>
+<|visual token 015663|>
+<|visual token 015664|>
+<|visual token 015665|>
+<|visual token 015666|>
+<|visual token 015667|>
+<|visual token 015668|>
+<|visual token 015669|>
+<|visual token 015670|>
+<|visual token 015671|>
+<|visual token 015672|>
+<|visual token 015673|>
+<|visual token 015674|>
+<|visual token 015675|>
+<|visual token 015676|>
+<|visual token 015677|>
+<|visual token 015678|>
+<|visual token 015679|>
+<|visual token 015680|>
+<|visual token 015681|>
+<|visual token 015682|>
+<|visual token 015683|>
+<|visual token 015684|>
+<|visual token 015685|>
+<|visual token 015686|>
+<|visual token 015687|>
+<|visual token 015688|>
+<|visual token 015689|>
+<|visual token 015690|>
+<|visual token 015691|>
+<|visual token 015692|>
+<|visual token 015693|>
+<|visual token 015694|>
+<|visual token 015695|>
+<|visual token 015696|>
+<|visual token 015697|>
+<|visual token 015698|>
+<|visual token 015699|>
+<|visual token 015700|>
+<|visual token 015701|>
+<|visual token 015702|>
+<|visual token 015703|>
+<|visual token 015704|>
+<|visual token 015705|>
+<|visual token 015706|>
+<|visual token 015707|>
+<|visual token 015708|>
+<|visual token 015709|>
+<|visual token 015710|>
+<|visual token 015711|>
+<|visual token 015712|>
+<|visual token 015713|>
+<|visual token 015714|>
+<|visual token 015715|>
+<|visual token 015716|>
+<|visual token 015717|>
+<|visual token 015718|>
+<|visual token 015719|>
+<|visual token 015720|>
+<|visual token 015721|>
+<|visual token 015722|>
+<|visual token 015723|>
+<|visual token 015724|>
+<|visual token 015725|>
+<|visual token 015726|>
+<|visual token 015727|>
+<|visual token 015728|>
+<|visual token 015729|>
+<|visual token 015730|>
+<|visual token 015731|>
+<|visual token 015732|>
+<|visual token 015733|>
+<|visual token 015734|>
+<|visual token 015735|>
+<|visual token 015736|>
+<|visual token 015737|>
+<|visual token 015738|>
+<|visual token 015739|>
+<|visual token 015740|>
+<|visual token 015741|>
+<|visual token 015742|>
+<|visual token 015743|>
+<|visual token 015744|>
+<|visual token 015745|>
+<|visual token 015746|>
+<|visual token 015747|>
+<|visual token 015748|>
+<|visual token 015749|>
+<|visual token 015750|>
+<|visual token 015751|>
+<|visual token 015752|>
+<|visual token 015753|>
+<|visual token 015754|>
+<|visual token 015755|>
+<|visual token 015756|>
+<|visual token 015757|>
+<|visual token 015758|>
+<|visual token 015759|>
+<|visual token 015760|>
+<|visual token 015761|>
+<|visual token 015762|>
+<|visual token 015763|>
+<|visual token 015764|>
+<|visual token 015765|>
+<|visual token 015766|>
+<|visual token 015767|>
+<|visual token 015768|>
+<|visual token 015769|>
+<|visual token 015770|>
+<|visual token 015771|>
+<|visual token 015772|>
+<|visual token 015773|>
+<|visual token 015774|>
+<|visual token 015775|>
+<|visual token 015776|>
+<|visual token 015777|>
+<|visual token 015778|>
+<|visual token 015779|>
+<|visual token 015780|>
+<|visual token 015781|>
+<|visual token 015782|>
+<|visual token 015783|>
+<|visual token 015784|>
+<|visual token 015785|>
+<|visual token 015786|>
+<|visual token 015787|>
+<|visual token 015788|>
+<|visual token 015789|>
+<|visual token 015790|>
+<|visual token 015791|>
+<|visual token 015792|>
+<|visual token 015793|>
+<|visual token 015794|>
+<|visual token 015795|>
+<|visual token 015796|>
+<|visual token 015797|>
+<|visual token 015798|>
+<|visual token 015799|>
+<|visual token 015800|>
+<|visual token 015801|>
+<|visual token 015802|>
+<|visual token 015803|>
+<|visual token 015804|>
+<|visual token 015805|>
+<|visual token 015806|>
+<|visual token 015807|>
+<|visual token 015808|>
+<|visual token 015809|>
+<|visual token 015810|>
+<|visual token 015811|>
+<|visual token 015812|>
+<|visual token 015813|>
+<|visual token 015814|>
+<|visual token 015815|>
+<|visual token 015816|>
+<|visual token 015817|>
+<|visual token 015818|>
+<|visual token 015819|>
+<|visual token 015820|>
+<|visual token 015821|>
+<|visual token 015822|>
+<|visual token 015823|>
+<|visual token 015824|>
+<|visual token 015825|>
+<|visual token 015826|>
+<|visual token 015827|>
+<|visual token 015828|>
+<|visual token 015829|>
+<|visual token 015830|>
+<|visual token 015831|>
+<|visual token 015832|>
+<|visual token 015833|>
+<|visual token 015834|>
+<|visual token 015835|>
+<|visual token 015836|>
+<|visual token 015837|>
+<|visual token 015838|>
+<|visual token 015839|>
+<|visual token 015840|>
+<|visual token 015841|>
+<|visual token 015842|>
+<|visual token 015843|>
+<|visual token 015844|>
+<|visual token 015845|>
+<|visual token 015846|>
+<|visual token 015847|>
+<|visual token 015848|>
+<|visual token 015849|>
+<|visual token 015850|>
+<|visual token 015851|>
+<|visual token 015852|>
+<|visual token 015853|>
+<|visual token 015854|>
+<|visual token 015855|>
+<|visual token 015856|>
+<|visual token 015857|>
+<|visual token 015858|>
+<|visual token 015859|>
+<|visual token 015860|>
+<|visual token 015861|>
+<|visual token 015862|>
+<|visual token 015863|>
+<|visual token 015864|>
+<|visual token 015865|>
+<|visual token 015866|>
+<|visual token 015867|>
+<|visual token 015868|>
+<|visual token 015869|>
+<|visual token 015870|>
+<|visual token 015871|>
+<|visual token 015872|>
+<|visual token 015873|>
+<|visual token 015874|>
+<|visual token 015875|>
+<|visual token 015876|>
+<|visual token 015877|>
+<|visual token 015878|>
+<|visual token 015879|>
+<|visual token 015880|>
+<|visual token 015881|>
+<|visual token 015882|>
+<|visual token 015883|>
+<|visual token 015884|>
+<|visual token 015885|>
+<|visual token 015886|>
+<|visual token 015887|>
+<|visual token 015888|>
+<|visual token 015889|>
+<|visual token 015890|>
+<|visual token 015891|>
+<|visual token 015892|>
+<|visual token 015893|>
+<|visual token 015894|>
+<|visual token 015895|>
+<|visual token 015896|>
+<|visual token 015897|>
+<|visual token 015898|>
+<|visual token 015899|>
+<|visual token 015900|>
+<|visual token 015901|>
+<|visual token 015902|>
+<|visual token 015903|>
+<|visual token 015904|>
+<|visual token 015905|>
+<|visual token 015906|>
+<|visual token 015907|>
+<|visual token 015908|>
+<|visual token 015909|>
+<|visual token 015910|>
+<|visual token 015911|>
+<|visual token 015912|>
+<|visual token 015913|>
+<|visual token 015914|>
+<|visual token 015915|>
+<|visual token 015916|>
+<|visual token 015917|>
+<|visual token 015918|>
+<|visual token 015919|>
+<|visual token 015920|>
+<|visual token 015921|>
+<|visual token 015922|>
+<|visual token 015923|>
+<|visual token 015924|>
+<|visual token 015925|>
+<|visual token 015926|>
+<|visual token 015927|>
+<|visual token 015928|>
+<|visual token 015929|>
+<|visual token 015930|>
+<|visual token 015931|>
+<|visual token 015932|>
+<|visual token 015933|>
+<|visual token 015934|>
+<|visual token 015935|>
+<|visual token 015936|>
+<|visual token 015937|>
+<|visual token 015938|>
+<|visual token 015939|>
+<|visual token 015940|>
+<|visual token 015941|>
+<|visual token 015942|>
+<|visual token 015943|>
+<|visual token 015944|>
+<|visual token 015945|>
+<|visual token 015946|>
+<|visual token 015947|>
+<|visual token 015948|>
+<|visual token 015949|>
+<|visual token 015950|>
+<|visual token 015951|>
+<|visual token 015952|>
+<|visual token 015953|>
+<|visual token 015954|>
+<|visual token 015955|>
+<|visual token 015956|>
+<|visual token 015957|>
+<|visual token 015958|>
+<|visual token 015959|>
+<|visual token 015960|>
+<|visual token 015961|>
+<|visual token 015962|>
+<|visual token 015963|>
+<|visual token 015964|>
+<|visual token 015965|>
+<|visual token 015966|>
+<|visual token 015967|>
+<|visual token 015968|>
+<|visual token 015969|>
+<|visual token 015970|>
+<|visual token 015971|>
+<|visual token 015972|>
+<|visual token 015973|>
+<|visual token 015974|>
+<|visual token 015975|>
+<|visual token 015976|>
+<|visual token 015977|>
+<|visual token 015978|>
+<|visual token 015979|>
+<|visual token 015980|>
+<|visual token 015981|>
+<|visual token 015982|>
+<|visual token 015983|>
+<|visual token 015984|>
+<|visual token 015985|>
+<|visual token 015986|>
+<|visual token 015987|>
+<|visual token 015988|>
+<|visual token 015989|>
+<|visual token 015990|>
+<|visual token 015991|>
+<|visual token 015992|>
+<|visual token 015993|>
+<|visual token 015994|>
+<|visual token 015995|>
+<|visual token 015996|>
+<|visual token 015997|>
+<|visual token 015998|>
+<|visual token 015999|>
+<|visual token 016000|>
+<|visual token 016001|>
+<|visual token 016002|>
+<|visual token 016003|>
+<|visual token 016004|>
+<|visual token 016005|>
+<|visual token 016006|>
+<|visual token 016007|>
+<|visual token 016008|>
+<|visual token 016009|>
+<|visual token 016010|>
+<|visual token 016011|>
+<|visual token 016012|>
+<|visual token 016013|>
+<|visual token 016014|>
+<|visual token 016015|>
+<|visual token 016016|>
+<|visual token 016017|>
+<|visual token 016018|>
+<|visual token 016019|>
+<|visual token 016020|>
+<|visual token 016021|>
+<|visual token 016022|>
+<|visual token 016023|>
+<|visual token 016024|>
+<|visual token 016025|>
+<|visual token 016026|>
+<|visual token 016027|>
+<|visual token 016028|>
+<|visual token 016029|>
+<|visual token 016030|>
+<|visual token 016031|>
+<|visual token 016032|>
+<|visual token 016033|>
+<|visual token 016034|>
+<|visual token 016035|>
+<|visual token 016036|>
+<|visual token 016037|>
+<|visual token 016038|>
+<|visual token 016039|>
+<|visual token 016040|>
+<|visual token 016041|>
+<|visual token 016042|>
+<|visual token 016043|>
+<|visual token 016044|>
+<|visual token 016045|>
+<|visual token 016046|>
+<|visual token 016047|>
+<|visual token 016048|>
+<|visual token 016049|>
+<|visual token 016050|>
+<|visual token 016051|>
+<|visual token 016052|>
+<|visual token 016053|>
+<|visual token 016054|>
+<|visual token 016055|>
+<|visual token 016056|>
+<|visual token 016057|>
+<|visual token 016058|>
+<|visual token 016059|>
+<|visual token 016060|>
+<|visual token 016061|>
+<|visual token 016062|>
+<|visual token 016063|>
+<|visual token 016064|>
+<|visual token 016065|>
+<|visual token 016066|>
+<|visual token 016067|>
+<|visual token 016068|>
+<|visual token 016069|>
+<|visual token 016070|>
+<|visual token 016071|>
+<|visual token 016072|>
+<|visual token 016073|>
+<|visual token 016074|>
+<|visual token 016075|>
+<|visual token 016076|>
+<|visual token 016077|>
+<|visual token 016078|>
+<|visual token 016079|>
+<|visual token 016080|>
+<|visual token 016081|>
+<|visual token 016082|>
+<|visual token 016083|>
+<|visual token 016084|>
+<|visual token 016085|>
+<|visual token 016086|>
+<|visual token 016087|>
+<|visual token 016088|>
+<|visual token 016089|>
+<|visual token 016090|>
+<|visual token 016091|>
+<|visual token 016092|>
+<|visual token 016093|>
+<|visual token 016094|>
+<|visual token 016095|>
+<|visual token 016096|>
+<|visual token 016097|>
+<|visual token 016098|>
+<|visual token 016099|>
+<|visual token 016100|>
+<|visual token 016101|>
+<|visual token 016102|>
+<|visual token 016103|>
+<|visual token 016104|>
+<|visual token 016105|>
+<|visual token 016106|>
+<|visual token 016107|>
+<|visual token 016108|>
+<|visual token 016109|>
+<|visual token 016110|>
+<|visual token 016111|>
+<|visual token 016112|>
+<|visual token 016113|>
+<|visual token 016114|>
+<|visual token 016115|>
+<|visual token 016116|>
+<|visual token 016117|>
+<|visual token 016118|>
+<|visual token 016119|>
+<|visual token 016120|>
+<|visual token 016121|>
+<|visual token 016122|>
+<|visual token 016123|>
+<|visual token 016124|>
+<|visual token 016125|>
+<|visual token 016126|>
+<|visual token 016127|>
+<|visual token 016128|>
+<|visual token 016129|>
+<|visual token 016130|>
+<|visual token 016131|>
+<|visual token 016132|>
+<|visual token 016133|>
+<|visual token 016134|>
+<|visual token 016135|>
+<|visual token 016136|>
+<|visual token 016137|>
+<|visual token 016138|>
+<|visual token 016139|>
+<|visual token 016140|>
+<|visual token 016141|>
+<|visual token 016142|>
+<|visual token 016143|>
+<|visual token 016144|>
+<|visual token 016145|>
+<|visual token 016146|>
+<|visual token 016147|>
+<|visual token 016148|>
+<|visual token 016149|>
+<|visual token 016150|>
+<|visual token 016151|>
+<|visual token 016152|>
+<|visual token 016153|>
+<|visual token 016154|>
+<|visual token 016155|>
+<|visual token 016156|>
+<|visual token 016157|>
+<|visual token 016158|>
+<|visual token 016159|>
+<|visual token 016160|>
+<|visual token 016161|>
+<|visual token 016162|>
+<|visual token 016163|>
+<|visual token 016164|>
+<|visual token 016165|>
+<|visual token 016166|>
+<|visual token 016167|>
+<|visual token 016168|>
+<|visual token 016169|>
+<|visual token 016170|>
+<|visual token 016171|>
+<|visual token 016172|>
+<|visual token 016173|>
+<|visual token 016174|>
+<|visual token 016175|>
+<|visual token 016176|>
+<|visual token 016177|>
+<|visual token 016178|>
+<|visual token 016179|>
+<|visual token 016180|>
+<|visual token 016181|>
+<|visual token 016182|>
+<|visual token 016183|>
+<|visual token 016184|>
+<|visual token 016185|>
+<|visual token 016186|>
+<|visual token 016187|>
+<|visual token 016188|>
+<|visual token 016189|>
+<|visual token 016190|>
+<|visual token 016191|>
+<|visual token 016192|>
+<|visual token 016193|>
+<|visual token 016194|>
+<|visual token 016195|>
+<|visual token 016196|>
+<|visual token 016197|>
+<|visual token 016198|>
+<|visual token 016199|>
+<|visual token 016200|>
+<|visual token 016201|>
+<|visual token 016202|>
+<|visual token 016203|>
+<|visual token 016204|>
+<|visual token 016205|>
+<|visual token 016206|>
+<|visual token 016207|>
+<|visual token 016208|>
+<|visual token 016209|>
+<|visual token 016210|>
+<|visual token 016211|>
+<|visual token 016212|>
+<|visual token 016213|>
+<|visual token 016214|>
+<|visual token 016215|>
+<|visual token 016216|>
+<|visual token 016217|>
+<|visual token 016218|>
+<|visual token 016219|>
+<|visual token 016220|>
+<|visual token 016221|>
+<|visual token 016222|>
+<|visual token 016223|>
+<|visual token 016224|>
+<|visual token 016225|>
+<|visual token 016226|>
+<|visual token 016227|>
+<|visual token 016228|>
+<|visual token 016229|>
+<|visual token 016230|>
+<|visual token 016231|>
+<|visual token 016232|>
+<|visual token 016233|>
+<|visual token 016234|>
+<|visual token 016235|>
+<|visual token 016236|>
+<|visual token 016237|>
+<|visual token 016238|>
+<|visual token 016239|>
+<|visual token 016240|>
+<|visual token 016241|>
+<|visual token 016242|>
+<|visual token 016243|>
+<|visual token 016244|>
+<|visual token 016245|>
+<|visual token 016246|>
+<|visual token 016247|>
+<|visual token 016248|>
+<|visual token 016249|>
+<|visual token 016250|>
+<|visual token 016251|>
+<|visual token 016252|>
+<|visual token 016253|>
+<|visual token 016254|>
+<|visual token 016255|>
+<|visual token 016256|>
+<|visual token 016257|>
+<|visual token 016258|>
+<|visual token 016259|>
+<|visual token 016260|>
+<|visual token 016261|>
+<|visual token 016262|>
+<|visual token 016263|>
+<|visual token 016264|>
+<|visual token 016265|>
+<|visual token 016266|>
+<|visual token 016267|>
+<|visual token 016268|>
+<|visual token 016269|>
+<|visual token 016270|>
+<|visual token 016271|>
+<|visual token 016272|>
+<|visual token 016273|>
+<|visual token 016274|>
+<|visual token 016275|>
+<|visual token 016276|>
+<|visual token 016277|>
+<|visual token 016278|>
+<|visual token 016279|>
+<|visual token 016280|>
+<|visual token 016281|>
+<|visual token 016282|>
+<|visual token 016283|>
+<|visual token 016284|>
+<|visual token 016285|>
+<|visual token 016286|>
+<|visual token 016287|>
+<|visual token 016288|>
+<|visual token 016289|>
+<|visual token 016290|>
+<|visual token 016291|>
+<|visual token 016292|>
+<|visual token 016293|>
+<|visual token 016294|>
+<|visual token 016295|>
+<|visual token 016296|>
+<|visual token 016297|>
+<|visual token 016298|>
+<|visual token 016299|>
+<|visual token 016300|>
+<|visual token 016301|>
+<|visual token 016302|>
+<|visual token 016303|>
+<|visual token 016304|>
+<|visual token 016305|>
+<|visual token 016306|>
+<|visual token 016307|>
+<|visual token 016308|>
+<|visual token 016309|>
+<|visual token 016310|>
+<|visual token 016311|>
+<|visual token 016312|>
+<|visual token 016313|>
+<|visual token 016314|>
+<|visual token 016315|>
+<|visual token 016316|>
+<|visual token 016317|>
+<|visual token 016318|>
+<|visual token 016319|>
+<|visual token 016320|>
+<|visual token 016321|>
+<|visual token 016322|>
+<|visual token 016323|>
+<|visual token 016324|>
+<|visual token 016325|>
+<|visual token 016326|>
+<|visual token 016327|>
+<|visual token 016328|>
+<|visual token 016329|>
+<|visual token 016330|>
+<|visual token 016331|>
+<|visual token 016332|>
+<|visual token 016333|>
+<|visual token 016334|>
+<|visual token 016335|>
+<|visual token 016336|>
+<|visual token 016337|>
+<|visual token 016338|>
+<|visual token 016339|>
+<|visual token 016340|>
+<|visual token 016341|>
+<|visual token 016342|>
+<|visual token 016343|>
+<|visual token 016344|>
+<|visual token 016345|>
+<|visual token 016346|>
+<|visual token 016347|>
+<|visual token 016348|>
+<|visual token 016349|>
+<|visual token 016350|>
+<|visual token 016351|>
+<|visual token 016352|>
+<|visual token 016353|>
+<|visual token 016354|>
+<|visual token 016355|>
+<|visual token 016356|>
+<|visual token 016357|>
+<|visual token 016358|>
+<|visual token 016359|>
+<|visual token 016360|>
+<|visual token 016361|>
+<|visual token 016362|>
+<|visual token 016363|>
+<|visual token 016364|>
+<|visual token 016365|>
+<|visual token 016366|>
+<|visual token 016367|>
+<|visual token 016368|>
+<|visual token 016369|>
+<|visual token 016370|>
+<|visual token 016371|>
+<|visual token 016372|>
+<|visual token 016373|>
+<|visual token 016374|>
+<|visual token 016375|>
+<|visual token 016376|>
+<|visual token 016377|>
+<|visual token 016378|>
+<|visual token 016379|>
+<|visual token 016380|>
+<|visual token 016381|>
+<|visual token 016382|>
+<|visual token 016383|>
+<|visual token 016384|>
+<|visual token 016385|>
+<|visual token 016386|>
+<|visual token 016387|>
+<|visual token 016388|>
+<|visual token 016389|>
+<|visual token 016390|>
+<|visual token 016391|>
+<|visual token 016392|>
+<|visual token 016393|>
+<|visual token 016394|>
+<|visual token 016395|>
+<|visual token 016396|>
+<|visual token 016397|>
+<|visual token 016398|>
+<|visual token 016399|>
+<|visual token 016400|>
+<|visual token 016401|>
+<|visual token 016402|>
+<|visual token 016403|>
+<|visual token 016404|>
+<|visual token 016405|>
+<|visual token 016406|>
+<|visual token 016407|>
+<|visual token 016408|>
+<|visual token 016409|>
+<|visual token 016410|>
+<|visual token 016411|>
+<|visual token 016412|>
+<|visual token 016413|>
+<|visual token 016414|>
+<|visual token 016415|>
+<|visual token 016416|>
+<|visual token 016417|>
+<|visual token 016418|>
+<|visual token 016419|>
+<|visual token 016420|>
+<|visual token 016421|>
+<|visual token 016422|>
+<|visual token 016423|>
+<|visual token 016424|>
+<|visual token 016425|>
+<|visual token 016426|>
+<|visual token 016427|>
+<|visual token 016428|>
+<|visual token 016429|>
+<|visual token 016430|>
+<|visual token 016431|>
+<|visual token 016432|>
+<|visual token 016433|>
+<|visual token 016434|>
+<|visual token 016435|>
+<|visual token 016436|>
+<|visual token 016437|>
+<|visual token 016438|>
+<|visual token 016439|>
+<|visual token 016440|>
+<|visual token 016441|>
+<|visual token 016442|>
+<|visual token 016443|>
+<|visual token 016444|>
+<|visual token 016445|>
+<|visual token 016446|>
+<|visual token 016447|>
+<|visual token 016448|>
+<|visual token 016449|>
+<|visual token 016450|>
+<|visual token 016451|>
+<|visual token 016452|>
+<|visual token 016453|>
+<|visual token 016454|>
+<|visual token 016455|>
+<|visual token 016456|>
+<|visual token 016457|>
+<|visual token 016458|>
+<|visual token 016459|>
+<|visual token 016460|>
+<|visual token 016461|>
+<|visual token 016462|>
+<|visual token 016463|>
+<|visual token 016464|>
+<|visual token 016465|>
+<|visual token 016466|>
+<|visual token 016467|>
+<|visual token 016468|>
+<|visual token 016469|>
+<|visual token 016470|>
+<|visual token 016471|>
+<|visual token 016472|>
+<|visual token 016473|>
+<|visual token 016474|>
+<|visual token 016475|>
+<|visual token 016476|>
+<|visual token 016477|>
+<|visual token 016478|>
+<|visual token 016479|>
+<|visual token 016480|>
+<|visual token 016481|>
+<|visual token 016482|>
+<|visual token 016483|>
+<|visual token 016484|>
+<|visual token 016485|>
+<|visual token 016486|>
+<|visual token 016487|>
+<|visual token 016488|>
+<|visual token 016489|>
+<|visual token 016490|>
+<|visual token 016491|>
+<|visual token 016492|>
+<|visual token 016493|>
+<|visual token 016494|>
+<|visual token 016495|>
+<|visual token 016496|>
+<|visual token 016497|>
+<|visual token 016498|>
+<|visual token 016499|>
+<|visual token 016500|>
+<|visual token 016501|>
+<|visual token 016502|>
+<|visual token 016503|>
+<|visual token 016504|>
+<|visual token 016505|>
+<|visual token 016506|>
+<|visual token 016507|>
+<|visual token 016508|>
+<|visual token 016509|>
+<|visual token 016510|>
+<|visual token 016511|>
+<|visual token 016512|>
+<|visual token 016513|>
+<|visual token 016514|>
+<|visual token 016515|>
+<|visual token 016516|>
+<|visual token 016517|>
+<|visual token 016518|>
+<|visual token 016519|>
+<|visual token 016520|>
+<|visual token 016521|>
+<|visual token 016522|>
+<|visual token 016523|>
+<|visual token 016524|>
+<|visual token 016525|>
+<|visual token 016526|>
+<|visual token 016527|>
+<|visual token 016528|>
+<|visual token 016529|>
+<|visual token 016530|>
+<|visual token 016531|>
+<|visual token 016532|>
+<|visual token 016533|>
+<|visual token 016534|>
+<|visual token 016535|>
+<|visual token 016536|>
+<|visual token 016537|>
+<|visual token 016538|>
+<|visual token 016539|>
+<|visual token 016540|>
+<|visual token 016541|>
+<|visual token 016542|>
+<|visual token 016543|>
+<|visual token 016544|>
+<|visual token 016545|>
+<|visual token 016546|>
+<|visual token 016547|>
+<|visual token 016548|>
+<|visual token 016549|>
+<|visual token 016550|>
+<|visual token 016551|>
+<|visual token 016552|>
+<|visual token 016553|>
+<|visual token 016554|>
+<|visual token 016555|>
+<|visual token 016556|>
+<|visual token 016557|>
+<|visual token 016558|>
+<|visual token 016559|>
+<|visual token 016560|>
+<|visual token 016561|>
+<|visual token 016562|>
+<|visual token 016563|>
+<|visual token 016564|>
+<|visual token 016565|>
+<|visual token 016566|>
+<|visual token 016567|>
+<|visual token 016568|>
+<|visual token 016569|>
+<|visual token 016570|>
+<|visual token 016571|>
+<|visual token 016572|>
+<|visual token 016573|>
+<|visual token 016574|>
+<|visual token 016575|>
+<|visual token 016576|>
+<|visual token 016577|>
+<|visual token 016578|>
+<|visual token 016579|>
+<|visual token 016580|>
+<|visual token 016581|>
+<|visual token 016582|>
+<|visual token 016583|>
+<|visual token 016584|>
+<|visual token 016585|>
+<|visual token 016586|>
+<|visual token 016587|>
+<|visual token 016588|>
+<|visual token 016589|>
+<|visual token 016590|>
+<|visual token 016591|>
+<|visual token 016592|>
+<|visual token 016593|>
+<|visual token 016594|>
+<|visual token 016595|>
+<|visual token 016596|>
+<|visual token 016597|>
+<|visual token 016598|>
+<|visual token 016599|>
+<|visual token 016600|>
+<|visual token 016601|>
+<|visual token 016602|>
+<|visual token 016603|>
+<|visual token 016604|>
+<|visual token 016605|>
+<|visual token 016606|>
+<|visual token 016607|>
+<|visual token 016608|>
+<|visual token 016609|>
+<|visual token 016610|>
+<|visual token 016611|>
+<|visual token 016612|>
+<|visual token 016613|>
+<|visual token 016614|>
+<|visual token 016615|>
+<|visual token 016616|>
+<|visual token 016617|>
+<|visual token 016618|>
+<|visual token 016619|>
+<|visual token 016620|>
+<|visual token 016621|>
+<|visual token 016622|>
+<|visual token 016623|>
+<|visual token 016624|>
+<|visual token 016625|>
+<|visual token 016626|>
+<|visual token 016627|>
+<|visual token 016628|>
+<|visual token 016629|>
+<|visual token 016630|>
+<|visual token 016631|>
+<|visual token 016632|>
+<|visual token 016633|>
+<|visual token 016634|>
+<|visual token 016635|>
+<|visual token 016636|>
+<|visual token 016637|>
+<|visual token 016638|>
+<|visual token 016639|>
+<|visual token 016640|>
+<|visual token 016641|>
+<|visual token 016642|>
+<|visual token 016643|>
+<|visual token 016644|>
+<|visual token 016645|>
+<|visual token 016646|>
+<|visual token 016647|>
+<|visual token 016648|>
+<|visual token 016649|>
+<|visual token 016650|>
+<|visual token 016651|>
+<|visual token 016652|>
+<|visual token 016653|>
+<|visual token 016654|>
+<|visual token 016655|>
+<|visual token 016656|>
+<|visual token 016657|>
+<|visual token 016658|>
+<|visual token 016659|>
+<|visual token 016660|>
+<|visual token 016661|>
+<|visual token 016662|>
+<|visual token 016663|>
+<|visual token 016664|>
+<|visual token 016665|>
+<|visual token 016666|>
+<|visual token 016667|>
+<|visual token 016668|>
+<|visual token 016669|>
+<|visual token 016670|>
+<|visual token 016671|>
+<|visual token 016672|>
+<|visual token 016673|>
+<|visual token 016674|>
+<|visual token 016675|>
+<|visual token 016676|>
+<|visual token 016677|>
+<|visual token 016678|>
+<|visual token 016679|>
+<|visual token 016680|>
+<|visual token 016681|>
+<|visual token 016682|>
+<|visual token 016683|>
+<|visual token 016684|>
+<|visual token 016685|>
+<|visual token 016686|>
+<|visual token 016687|>
+<|visual token 016688|>
+<|visual token 016689|>
+<|visual token 016690|>
+<|visual token 016691|>
+<|visual token 016692|>
+<|visual token 016693|>
+<|visual token 016694|>
+<|visual token 016695|>
+<|visual token 016696|>
+<|visual token 016697|>
+<|visual token 016698|>
+<|visual token 016699|>
+<|visual token 016700|>
+<|visual token 016701|>
+<|visual token 016702|>
+<|visual token 016703|>
+<|visual token 016704|>
+<|visual token 016705|>
+<|visual token 016706|>
+<|visual token 016707|>
+<|visual token 016708|>
+<|visual token 016709|>
+<|visual token 016710|>
+<|visual token 016711|>
+<|visual token 016712|>
+<|visual token 016713|>
+<|visual token 016714|>
+<|visual token 016715|>
+<|visual token 016716|>
+<|visual token 016717|>
+<|visual token 016718|>
+<|visual token 016719|>
+<|visual token 016720|>
+<|visual token 016721|>
+<|visual token 016722|>
+<|visual token 016723|>
+<|visual token 016724|>
+<|visual token 016725|>
+<|visual token 016726|>
+<|visual token 016727|>
+<|visual token 016728|>
+<|visual token 016729|>
+<|visual token 016730|>
+<|visual token 016731|>
+<|visual token 016732|>
+<|visual token 016733|>
+<|visual token 016734|>
+<|visual token 016735|>
+<|visual token 016736|>
+<|visual token 016737|>
+<|visual token 016738|>
+<|visual token 016739|>
+<|visual token 016740|>
+<|visual token 016741|>
+<|visual token 016742|>
+<|visual token 016743|>
+<|visual token 016744|>
+<|visual token 016745|>
+<|visual token 016746|>
+<|visual token 016747|>
+<|visual token 016748|>
+<|visual token 016749|>
+<|visual token 016750|>
+<|visual token 016751|>
+<|visual token 016752|>
+<|visual token 016753|>
+<|visual token 016754|>
+<|visual token 016755|>
+<|visual token 016756|>
+<|visual token 016757|>
+<|visual token 016758|>
+<|visual token 016759|>
+<|visual token 016760|>
+<|visual token 016761|>
+<|visual token 016762|>
+<|visual token 016763|>
+<|visual token 016764|>
+<|visual token 016765|>
+<|visual token 016766|>
+<|visual token 016767|>
+<|visual token 016768|>
+<|visual token 016769|>
+<|visual token 016770|>
+<|visual token 016771|>
+<|visual token 016772|>
+<|visual token 016773|>
+<|visual token 016774|>
+<|visual token 016775|>
+<|visual token 016776|>
+<|visual token 016777|>
+<|visual token 016778|>
+<|visual token 016779|>
+<|visual token 016780|>
+<|visual token 016781|>
+<|visual token 016782|>
+<|visual token 016783|>
+<|visual token 016784|>
+<|visual token 016785|>
+<|visual token 016786|>
+<|visual token 016787|>
+<|visual token 016788|>
+<|visual token 016789|>
+<|visual token 016790|>
+<|visual token 016791|>
+<|visual token 016792|>
+<|visual token 016793|>
+<|visual token 016794|>
+<|visual token 016795|>
+<|visual token 016796|>
+<|visual token 016797|>
+<|visual token 016798|>
+<|visual token 016799|>
+<|visual token 016800|>
+<|visual token 016801|>
+<|visual token 016802|>
+<|visual token 016803|>
+<|visual token 016804|>
+<|visual token 016805|>
+<|visual token 016806|>
+<|visual token 016807|>
+<|visual token 016808|>
+<|visual token 016809|>
+<|visual token 016810|>
+<|visual token 016811|>
+<|visual token 016812|>
+<|visual token 016813|>
+<|visual token 016814|>
+<|visual token 016815|>
+<|visual token 016816|>
+<|visual token 016817|>
+<|visual token 016818|>
+<|visual token 016819|>
+<|visual token 016820|>
+<|visual token 016821|>
+<|visual token 016822|>
+<|visual token 016823|>
+<|visual token 016824|>
+<|visual token 016825|>
+<|visual token 016826|>
+<|visual token 016827|>
+<|visual token 016828|>
+<|visual token 016829|>
+<|visual token 016830|>
+<|visual token 016831|>
+<|visual token 016832|>
+<|visual token 016833|>
+<|visual token 016834|>
+<|visual token 016835|>
+<|visual token 016836|>
+<|visual token 016837|>
+<|visual token 016838|>
+<|visual token 016839|>
+<|visual token 016840|>
+<|visual token 016841|>
+<|visual token 016842|>
+<|visual token 016843|>
+<|visual token 016844|>
+<|visual token 016845|>
+<|visual token 016846|>
+<|visual token 016847|>
+<|visual token 016848|>
+<|visual token 016849|>
+<|visual token 016850|>
+<|visual token 016851|>
+<|visual token 016852|>
+<|visual token 016853|>
+<|visual token 016854|>
+<|visual token 016855|>
+<|visual token 016856|>
+<|visual token 016857|>
+<|visual token 016858|>
+<|visual token 016859|>
+<|visual token 016860|>
+<|visual token 016861|>
+<|visual token 016862|>
+<|visual token 016863|>
+<|visual token 016864|>
+<|visual token 016865|>
+<|visual token 016866|>
+<|visual token 016867|>
+<|visual token 016868|>
+<|visual token 016869|>
+<|visual token 016870|>
+<|visual token 016871|>
+<|visual token 016872|>
+<|visual token 016873|>
+<|visual token 016874|>
+<|visual token 016875|>
+<|visual token 016876|>
+<|visual token 016877|>
+<|visual token 016878|>
+<|visual token 016879|>
+<|visual token 016880|>
+<|visual token 016881|>
+<|visual token 016882|>
+<|visual token 016883|>
+<|visual token 016884|>
+<|visual token 016885|>
+<|visual token 016886|>
+<|visual token 016887|>
+<|visual token 016888|>
+<|visual token 016889|>
+<|visual token 016890|>
+<|visual token 016891|>
+<|visual token 016892|>
+<|visual token 016893|>
+<|visual token 016894|>
+<|visual token 016895|>
+<|visual token 016896|>
+<|visual token 016897|>
+<|visual token 016898|>
+<|visual token 016899|>
+<|visual token 016900|>
+<|visual token 016901|>
+<|visual token 016902|>
+<|visual token 016903|>
+<|visual token 016904|>
+<|visual token 016905|>
+<|visual token 016906|>
+<|visual token 016907|>
+<|visual token 016908|>
+<|visual token 016909|>
+<|visual token 016910|>
+<|visual token 016911|>
+<|visual token 016912|>
+<|visual token 016913|>
+<|visual token 016914|>
+<|visual token 016915|>
+<|visual token 016916|>
+<|visual token 016917|>
+<|visual token 016918|>
+<|visual token 016919|>
+<|visual token 016920|>
+<|visual token 016921|>
+<|visual token 016922|>
+<|visual token 016923|>
+<|visual token 016924|>
+<|visual token 016925|>
+<|visual token 016926|>
+<|visual token 016927|>
+<|visual token 016928|>
+<|visual token 016929|>
+<|visual token 016930|>
+<|visual token 016931|>
+<|visual token 016932|>
+<|visual token 016933|>
+<|visual token 016934|>
+<|visual token 016935|>
+<|visual token 016936|>
+<|visual token 016937|>
+<|visual token 016938|>
+<|visual token 016939|>
+<|visual token 016940|>
+<|visual token 016941|>
+<|visual token 016942|>
+<|visual token 016943|>
+<|visual token 016944|>
+<|visual token 016945|>
+<|visual token 016946|>
+<|visual token 016947|>
+<|visual token 016948|>
+<|visual token 016949|>
+<|visual token 016950|>
+<|visual token 016951|>
+<|visual token 016952|>
+<|visual token 016953|>
+<|visual token 016954|>
+<|visual token 016955|>
+<|visual token 016956|>
+<|visual token 016957|>
+<|visual token 016958|>
+<|visual token 016959|>
+<|visual token 016960|>
+<|visual token 016961|>
+<|visual token 016962|>
+<|visual token 016963|>
+<|visual token 016964|>
+<|visual token 016965|>
+<|visual token 016966|>
+<|visual token 016967|>
+<|visual token 016968|>
+<|visual token 016969|>
+<|visual token 016970|>
+<|visual token 016971|>
+<|visual token 016972|>
+<|visual token 016973|>
+<|visual token 016974|>
+<|visual token 016975|>
+<|visual token 016976|>
+<|visual token 016977|>
+<|visual token 016978|>
+<|visual token 016979|>
+<|visual token 016980|>
+<|visual token 016981|>
+<|visual token 016982|>
+<|visual token 016983|>
+<|visual token 016984|>
+<|visual token 016985|>
+<|visual token 016986|>
+<|visual token 016987|>
+<|visual token 016988|>
+<|visual token 016989|>
+<|visual token 016990|>
+<|visual token 016991|>
+<|visual token 016992|>
+<|visual token 016993|>
+<|visual token 016994|>
+<|visual token 016995|>
+<|visual token 016996|>
+<|visual token 016997|>
+<|visual token 016998|>
+<|visual token 016999|>
+<|visual token 017000|>
+<|visual token 017001|>
+<|visual token 017002|>
+<|visual token 017003|>
+<|visual token 017004|>
+<|visual token 017005|>
+<|visual token 017006|>
+<|visual token 017007|>
+<|visual token 017008|>
+<|visual token 017009|>
+<|visual token 017010|>
+<|visual token 017011|>
+<|visual token 017012|>
+<|visual token 017013|>
+<|visual token 017014|>
+<|visual token 017015|>
+<|visual token 017016|>
+<|visual token 017017|>
+<|visual token 017018|>
+<|visual token 017019|>
+<|visual token 017020|>
+<|visual token 017021|>
+<|visual token 017022|>
+<|visual token 017023|>
+<|visual token 017024|>
+<|visual token 017025|>
+<|visual token 017026|>
+<|visual token 017027|>
+<|visual token 017028|>
+<|visual token 017029|>
+<|visual token 017030|>
+<|visual token 017031|>
+<|visual token 017032|>
+<|visual token 017033|>
+<|visual token 017034|>
+<|visual token 017035|>
+<|visual token 017036|>
+<|visual token 017037|>
+<|visual token 017038|>
+<|visual token 017039|>
+<|visual token 017040|>
+<|visual token 017041|>
+<|visual token 017042|>
+<|visual token 017043|>
+<|visual token 017044|>
+<|visual token 017045|>
+<|visual token 017046|>
+<|visual token 017047|>
+<|visual token 017048|>
+<|visual token 017049|>
+<|visual token 017050|>
+<|visual token 017051|>
+<|visual token 017052|>
+<|visual token 017053|>
+<|visual token 017054|>
+<|visual token 017055|>
+<|visual token 017056|>
+<|visual token 017057|>
+<|visual token 017058|>
+<|visual token 017059|>
+<|visual token 017060|>
+<|visual token 017061|>
+<|visual token 017062|>
+<|visual token 017063|>
+<|visual token 017064|>
+<|visual token 017065|>
+<|visual token 017066|>
+<|visual token 017067|>
+<|visual token 017068|>
+<|visual token 017069|>
+<|visual token 017070|>
+<|visual token 017071|>
+<|visual token 017072|>
+<|visual token 017073|>
+<|visual token 017074|>
+<|visual token 017075|>
+<|visual token 017076|>
+<|visual token 017077|>
+<|visual token 017078|>
+<|visual token 017079|>
+<|visual token 017080|>
+<|visual token 017081|>
+<|visual token 017082|>
+<|visual token 017083|>
+<|visual token 017084|>
+<|visual token 017085|>
+<|visual token 017086|>
+<|visual token 017087|>
+<|visual token 017088|>
+<|visual token 017089|>
+<|visual token 017090|>
+<|visual token 017091|>
+<|visual token 017092|>
+<|visual token 017093|>
+<|visual token 017094|>
+<|visual token 017095|>
+<|visual token 017096|>
+<|visual token 017097|>
+<|visual token 017098|>
+<|visual token 017099|>
+<|visual token 017100|>
+<|visual token 017101|>
+<|visual token 017102|>
+<|visual token 017103|>
+<|visual token 017104|>
+<|visual token 017105|>
+<|visual token 017106|>
+<|visual token 017107|>
+<|visual token 017108|>
+<|visual token 017109|>
+<|visual token 017110|>
+<|visual token 017111|>
+<|visual token 017112|>
+<|visual token 017113|>
+<|visual token 017114|>
+<|visual token 017115|>
+<|visual token 017116|>
+<|visual token 017117|>
+<|visual token 017118|>
+<|visual token 017119|>
+<|visual token 017120|>
+<|visual token 017121|>
+<|visual token 017122|>
+<|visual token 017123|>
+<|visual token 017124|>
+<|visual token 017125|>
+<|visual token 017126|>
+<|visual token 017127|>
+<|visual token 017128|>
+<|visual token 017129|>
+<|visual token 017130|>
+<|visual token 017131|>
+<|visual token 017132|>
+<|visual token 017133|>
+<|visual token 017134|>
+<|visual token 017135|>
+<|visual token 017136|>
+<|visual token 017137|>
+<|visual token 017138|>
+<|visual token 017139|>
+<|visual token 017140|>
+<|visual token 017141|>
+<|visual token 017142|>
+<|visual token 017143|>
+<|visual token 017144|>
+<|visual token 017145|>
+<|visual token 017146|>
+<|visual token 017147|>
+<|visual token 017148|>
+<|visual token 017149|>
+<|visual token 017150|>
+<|visual token 017151|>
+<|visual token 017152|>
+<|visual token 017153|>
+<|visual token 017154|>
+<|visual token 017155|>
+<|visual token 017156|>
+<|visual token 017157|>
+<|visual token 017158|>
+<|visual token 017159|>
+<|visual token 017160|>
+<|visual token 017161|>
+<|visual token 017162|>
+<|visual token 017163|>
+<|visual token 017164|>
+<|visual token 017165|>
+<|visual token 017166|>
+<|visual token 017167|>
+<|visual token 017168|>
+<|visual token 017169|>
+<|visual token 017170|>
+<|visual token 017171|>
+<|visual token 017172|>
+<|visual token 017173|>
+<|visual token 017174|>
+<|visual token 017175|>
+<|visual token 017176|>
+<|visual token 017177|>
+<|visual token 017178|>
+<|visual token 017179|>
+<|visual token 017180|>
+<|visual token 017181|>
+<|visual token 017182|>
+<|visual token 017183|>
+<|visual token 017184|>
+<|visual token 017185|>
+<|visual token 017186|>
+<|visual token 017187|>
+<|visual token 017188|>
+<|visual token 017189|>
+<|visual token 017190|>
+<|visual token 017191|>
+<|visual token 017192|>
+<|visual token 017193|>
+<|visual token 017194|>
+<|visual token 017195|>
+<|visual token 017196|>
+<|visual token 017197|>
+<|visual token 017198|>
+<|visual token 017199|>
+<|visual token 017200|>
+<|visual token 017201|>
+<|visual token 017202|>
+<|visual token 017203|>
+<|visual token 017204|>
+<|visual token 017205|>
+<|visual token 017206|>
+<|visual token 017207|>
+<|visual token 017208|>
+<|visual token 017209|>
+<|visual token 017210|>
+<|visual token 017211|>
+<|visual token 017212|>
+<|visual token 017213|>
+<|visual token 017214|>
+<|visual token 017215|>
+<|visual token 017216|>
+<|visual token 017217|>
+<|visual token 017218|>
+<|visual token 017219|>
+<|visual token 017220|>
+<|visual token 017221|>
+<|visual token 017222|>
+<|visual token 017223|>
+<|visual token 017224|>
+<|visual token 017225|>
+<|visual token 017226|>
+<|visual token 017227|>
+<|visual token 017228|>
+<|visual token 017229|>
+<|visual token 017230|>
+<|visual token 017231|>
+<|visual token 017232|>
+<|visual token 017233|>
+<|visual token 017234|>
+<|visual token 017235|>
+<|visual token 017236|>
+<|visual token 017237|>
+<|visual token 017238|>
+<|visual token 017239|>
+<|visual token 017240|>
+<|visual token 017241|>
+<|visual token 017242|>
+<|visual token 017243|>
+<|visual token 017244|>
+<|visual token 017245|>
+<|visual token 017246|>
+<|visual token 017247|>
+<|visual token 017248|>
+<|visual token 017249|>
+<|visual token 017250|>
+<|visual token 017251|>
+<|visual token 017252|>
+<|visual token 017253|>
+<|visual token 017254|>
+<|visual token 017255|>
+<|visual token 017256|>
+<|visual token 017257|>
+<|visual token 017258|>
+<|visual token 017259|>
+<|visual token 017260|>
+<|visual token 017261|>
+<|visual token 017262|>
+<|visual token 017263|>
+<|visual token 017264|>
+<|visual token 017265|>
+<|visual token 017266|>
+<|visual token 017267|>
+<|visual token 017268|>
+<|visual token 017269|>
+<|visual token 017270|>
+<|visual token 017271|>
+<|visual token 017272|>
+<|visual token 017273|>
+<|visual token 017274|>
+<|visual token 017275|>
+<|visual token 017276|>
+<|visual token 017277|>
+<|visual token 017278|>
+<|visual token 017279|>
+<|visual token 017280|>
+<|visual token 017281|>
+<|visual token 017282|>
+<|visual token 017283|>
+<|visual token 017284|>
+<|visual token 017285|>
+<|visual token 017286|>
+<|visual token 017287|>
+<|visual token 017288|>
+<|visual token 017289|>
+<|visual token 017290|>
+<|visual token 017291|>
+<|visual token 017292|>
+<|visual token 017293|>
+<|visual token 017294|>
+<|visual token 017295|>
+<|visual token 017296|>
+<|visual token 017297|>
+<|visual token 017298|>
+<|visual token 017299|>
+<|visual token 017300|>
+<|visual token 017301|>
+<|visual token 017302|>
+<|visual token 017303|>
+<|visual token 017304|>
+<|visual token 017305|>
+<|visual token 017306|>
+<|visual token 017307|>
+<|visual token 017308|>
+<|visual token 017309|>
+<|visual token 017310|>
+<|visual token 017311|>
+<|visual token 017312|>
+<|visual token 017313|>
+<|visual token 017314|>
+<|visual token 017315|>
+<|visual token 017316|>
+<|visual token 017317|>
+<|visual token 017318|>
+<|visual token 017319|>
+<|visual token 017320|>
+<|visual token 017321|>
+<|visual token 017322|>
+<|visual token 017323|>
+<|visual token 017324|>
+<|visual token 017325|>
+<|visual token 017326|>
+<|visual token 017327|>
+<|visual token 017328|>
+<|visual token 017329|>
+<|visual token 017330|>
+<|visual token 017331|>
+<|visual token 017332|>
+<|visual token 017333|>
+<|visual token 017334|>
+<|visual token 017335|>
+<|visual token 017336|>
+<|visual token 017337|>
+<|visual token 017338|>
+<|visual token 017339|>
+<|visual token 017340|>
+<|visual token 017341|>
+<|visual token 017342|>
+<|visual token 017343|>
+<|visual token 017344|>
+<|visual token 017345|>
+<|visual token 017346|>
+<|visual token 017347|>
+<|visual token 017348|>
+<|visual token 017349|>
+<|visual token 017350|>
+<|visual token 017351|>
+<|visual token 017352|>
+<|visual token 017353|>
+<|visual token 017354|>
+<|visual token 017355|>
+<|visual token 017356|>
+<|visual token 017357|>
+<|visual token 017358|>
+<|visual token 017359|>
+<|visual token 017360|>
+<|visual token 017361|>
+<|visual token 017362|>
+<|visual token 017363|>
+<|visual token 017364|>
+<|visual token 017365|>
+<|visual token 017366|>
+<|visual token 017367|>
+<|visual token 017368|>
+<|visual token 017369|>
+<|visual token 017370|>
+<|visual token 017371|>
+<|visual token 017372|>
+<|visual token 017373|>
+<|visual token 017374|>
+<|visual token 017375|>
+<|visual token 017376|>
+<|visual token 017377|>
+<|visual token 017378|>
+<|visual token 017379|>
+<|visual token 017380|>
+<|visual token 017381|>
+<|visual token 017382|>
+<|visual token 017383|>
+<|visual token 017384|>
+<|visual token 017385|>
+<|visual token 017386|>
+<|visual token 017387|>
+<|visual token 017388|>
+<|visual token 017389|>
+<|visual token 017390|>
+<|visual token 017391|>
+<|visual token 017392|>
+<|visual token 017393|>
+<|visual token 017394|>
+<|visual token 017395|>
+<|visual token 017396|>
+<|visual token 017397|>
+<|visual token 017398|>
+<|visual token 017399|>
+<|visual token 017400|>
+<|visual token 017401|>
+<|visual token 017402|>
+<|visual token 017403|>
+<|visual token 017404|>
+<|visual token 017405|>
+<|visual token 017406|>
+<|visual token 017407|>
+<|visual token 017408|>
+<|visual token 017409|>
+<|visual token 017410|>
+<|visual token 017411|>
+<|visual token 017412|>
+<|visual token 017413|>
+<|visual token 017414|>
+<|visual token 017415|>
+<|visual token 017416|>
+<|visual token 017417|>
+<|visual token 017418|>
+<|visual token 017419|>
+<|visual token 017420|>
+<|visual token 017421|>
+<|visual token 017422|>
+<|visual token 017423|>
+<|visual token 017424|>
+<|visual token 017425|>
+<|visual token 017426|>
+<|visual token 017427|>
+<|visual token 017428|>
+<|visual token 017429|>
+<|visual token 017430|>
+<|visual token 017431|>
+<|visual token 017432|>
+<|visual token 017433|>
+<|visual token 017434|>
+<|visual token 017435|>
+<|visual token 017436|>
+<|visual token 017437|>
+<|visual token 017438|>
+<|visual token 017439|>
+<|visual token 017440|>
+<|visual token 017441|>
+<|visual token 017442|>
+<|visual token 017443|>
+<|visual token 017444|>
+<|visual token 017445|>
+<|visual token 017446|>
+<|visual token 017447|>
+<|visual token 017448|>
+<|visual token 017449|>
+<|visual token 017450|>
+<|visual token 017451|>
+<|visual token 017452|>
+<|visual token 017453|>
+<|visual token 017454|>
+<|visual token 017455|>
+<|visual token 017456|>
+<|visual token 017457|>
+<|visual token 017458|>
+<|visual token 017459|>
+<|visual token 017460|>
+<|visual token 017461|>
+<|visual token 017462|>
+<|visual token 017463|>
+<|visual token 017464|>
+<|visual token 017465|>
+<|visual token 017466|>
+<|visual token 017467|>
+<|visual token 017468|>
+<|visual token 017469|>
+<|visual token 017470|>
+<|visual token 017471|>
+<|visual token 017472|>
+<|visual token 017473|>
+<|visual token 017474|>
+<|visual token 017475|>
+<|visual token 017476|>
+<|visual token 017477|>
+<|visual token 017478|>
+<|visual token 017479|>
+<|visual token 017480|>
+<|visual token 017481|>
+<|visual token 017482|>
+<|visual token 017483|>
+<|visual token 017484|>
+<|visual token 017485|>
+<|visual token 017486|>
+<|visual token 017487|>
+<|visual token 017488|>
+<|visual token 017489|>
+<|visual token 017490|>
+<|visual token 017491|>
+<|visual token 017492|>
+<|visual token 017493|>
+<|visual token 017494|>
+<|visual token 017495|>
+<|visual token 017496|>
+<|visual token 017497|>
+<|visual token 017498|>
+<|visual token 017499|>
+<|visual token 017500|>
+<|visual token 017501|>
+<|visual token 017502|>
+<|visual token 017503|>
+<|visual token 017504|>
+<|visual token 017505|>
+<|visual token 017506|>
+<|visual token 017507|>
+<|visual token 017508|>
+<|visual token 017509|>
+<|visual token 017510|>
+<|visual token 017511|>
+<|visual token 017512|>
+<|visual token 017513|>
+<|visual token 017514|>
+<|visual token 017515|>
+<|visual token 017516|>
+<|visual token 017517|>
+<|visual token 017518|>
+<|visual token 017519|>
+<|visual token 017520|>
+<|visual token 017521|>
+<|visual token 017522|>
+<|visual token 017523|>
+<|visual token 017524|>
+<|visual token 017525|>
+<|visual token 017526|>
+<|visual token 017527|>
+<|visual token 017528|>
+<|visual token 017529|>
+<|visual token 017530|>
+<|visual token 017531|>
+<|visual token 017532|>
+<|visual token 017533|>
+<|visual token 017534|>
+<|visual token 017535|>
+<|visual token 017536|>
+<|visual token 017537|>
+<|visual token 017538|>
+<|visual token 017539|>
+<|visual token 017540|>
+<|visual token 017541|>
+<|visual token 017542|>
+<|visual token 017543|>
+<|visual token 017544|>
+<|visual token 017545|>
+<|visual token 017546|>
+<|visual token 017547|>
+<|visual token 017548|>
+<|visual token 017549|>
+<|visual token 017550|>
+<|visual token 017551|>
+<|visual token 017552|>
+<|visual token 017553|>
+<|visual token 017554|>
+<|visual token 017555|>
+<|visual token 017556|>
+<|visual token 017557|>
+<|visual token 017558|>
+<|visual token 017559|>
+<|visual token 017560|>
+<|visual token 017561|>
+<|visual token 017562|>
+<|visual token 017563|>
+<|visual token 017564|>
+<|visual token 017565|>
+<|visual token 017566|>
+<|visual token 017567|>
+<|visual token 017568|>
+<|visual token 017569|>
+<|visual token 017570|>
+<|visual token 017571|>
+<|visual token 017572|>
+<|visual token 017573|>
+<|visual token 017574|>
+<|visual token 017575|>
+<|visual token 017576|>
+<|visual token 017577|>
+<|visual token 017578|>
+<|visual token 017579|>
+<|visual token 017580|>
+<|visual token 017581|>
+<|visual token 017582|>
+<|visual token 017583|>
+<|visual token 017584|>
+<|visual token 017585|>
+<|visual token 017586|>
+<|visual token 017587|>
+<|visual token 017588|>
+<|visual token 017589|>
+<|visual token 017590|>
+<|visual token 017591|>
+<|visual token 017592|>
+<|visual token 017593|>
+<|visual token 017594|>
+<|visual token 017595|>
+<|visual token 017596|>
+<|visual token 017597|>
+<|visual token 017598|>
+<|visual token 017599|>
+<|visual token 017600|>
+<|visual token 017601|>
+<|visual token 017602|>
+<|visual token 017603|>
+<|visual token 017604|>
+<|visual token 017605|>
+<|visual token 017606|>
+<|visual token 017607|>
+<|visual token 017608|>
+<|visual token 017609|>
+<|visual token 017610|>
+<|visual token 017611|>
+<|visual token 017612|>
+<|visual token 017613|>
+<|visual token 017614|>
+<|visual token 017615|>
+<|visual token 017616|>
+<|visual token 017617|>
+<|visual token 017618|>
+<|visual token 017619|>
+<|visual token 017620|>
+<|visual token 017621|>
+<|visual token 017622|>
+<|visual token 017623|>
+<|visual token 017624|>
+<|visual token 017625|>
+<|visual token 017626|>
+<|visual token 017627|>
+<|visual token 017628|>
+<|visual token 017629|>
+<|visual token 017630|>
+<|visual token 017631|>
+<|visual token 017632|>
+<|visual token 017633|>
+<|visual token 017634|>
+<|visual token 017635|>
+<|visual token 017636|>
+<|visual token 017637|>
+<|visual token 017638|>
+<|visual token 017639|>
+<|visual token 017640|>
+<|visual token 017641|>
+<|visual token 017642|>
+<|visual token 017643|>
+<|visual token 017644|>
+<|visual token 017645|>
+<|visual token 017646|>
+<|visual token 017647|>
+<|visual token 017648|>
+<|visual token 017649|>
+<|visual token 017650|>
+<|visual token 017651|>
+<|visual token 017652|>
+<|visual token 017653|>
+<|visual token 017654|>
+<|visual token 017655|>
+<|visual token 017656|>
+<|visual token 017657|>
+<|visual token 017658|>
+<|visual token 017659|>
+<|visual token 017660|>
+<|visual token 017661|>
+<|visual token 017662|>
+<|visual token 017663|>
+<|visual token 017664|>
+<|visual token 017665|>
+<|visual token 017666|>
+<|visual token 017667|>
+<|visual token 017668|>
+<|visual token 017669|>
+<|visual token 017670|>
+<|visual token 017671|>
+<|visual token 017672|>
+<|visual token 017673|>
+<|visual token 017674|>
+<|visual token 017675|>
+<|visual token 017676|>
+<|visual token 017677|>
+<|visual token 017678|>
+<|visual token 017679|>
+<|visual token 017680|>
+<|visual token 017681|>
+<|visual token 017682|>
+<|visual token 017683|>
+<|visual token 017684|>
+<|visual token 017685|>
+<|visual token 017686|>
+<|visual token 017687|>
+<|visual token 017688|>
+<|visual token 017689|>
+<|visual token 017690|>
+<|visual token 017691|>
+<|visual token 017692|>
+<|visual token 017693|>
+<|visual token 017694|>
+<|visual token 017695|>
+<|visual token 017696|>
+<|visual token 017697|>
+<|visual token 017698|>
+<|visual token 017699|>
+<|visual token 017700|>
+<|visual token 017701|>
+<|visual token 017702|>
+<|visual token 017703|>
+<|visual token 017704|>
+<|visual token 017705|>
+<|visual token 017706|>
+<|visual token 017707|>
+<|visual token 017708|>
+<|visual token 017709|>
+<|visual token 017710|>
+<|visual token 017711|>
+<|visual token 017712|>
+<|visual token 017713|>
+<|visual token 017714|>
+<|visual token 017715|>
+<|visual token 017716|>
+<|visual token 017717|>
+<|visual token 017718|>
+<|visual token 017719|>
+<|visual token 017720|>
+<|visual token 017721|>
+<|visual token 017722|>
+<|visual token 017723|>
+<|visual token 017724|>
+<|visual token 017725|>
+<|visual token 017726|>
+<|visual token 017727|>
+<|visual token 017728|>
+<|visual token 017729|>
+<|visual token 017730|>
+<|visual token 017731|>
+<|visual token 017732|>
+<|visual token 017733|>
+<|visual token 017734|>
+<|visual token 017735|>
+<|visual token 017736|>
+<|visual token 017737|>
+<|visual token 017738|>
+<|visual token 017739|>
+<|visual token 017740|>
+<|visual token 017741|>
+<|visual token 017742|>
+<|visual token 017743|>
+<|visual token 017744|>
+<|visual token 017745|>
+<|visual token 017746|>
+<|visual token 017747|>
+<|visual token 017748|>
+<|visual token 017749|>
+<|visual token 017750|>
+<|visual token 017751|>
+<|visual token 017752|>
+<|visual token 017753|>
+<|visual token 017754|>
+<|visual token 017755|>
+<|visual token 017756|>
+<|visual token 017757|>
+<|visual token 017758|>
+<|visual token 017759|>
+<|visual token 017760|>
+<|visual token 017761|>
+<|visual token 017762|>
+<|visual token 017763|>
+<|visual token 017764|>
+<|visual token 017765|>
+<|visual token 017766|>
+<|visual token 017767|>
+<|visual token 017768|>
+<|visual token 017769|>
+<|visual token 017770|>
+<|visual token 017771|>
+<|visual token 017772|>
+<|visual token 017773|>
+<|visual token 017774|>
+<|visual token 017775|>
+<|visual token 017776|>
+<|visual token 017777|>
+<|visual token 017778|>
+<|visual token 017779|>
+<|visual token 017780|>
+<|visual token 017781|>
+<|visual token 017782|>
+<|visual token 017783|>
+<|visual token 017784|>
+<|visual token 017785|>
+<|visual token 017786|>
+<|visual token 017787|>
+<|visual token 017788|>
+<|visual token 017789|>
+<|visual token 017790|>
+<|visual token 017791|>
+<|visual token 017792|>
+<|visual token 017793|>
+<|visual token 017794|>
+<|visual token 017795|>
+<|visual token 017796|>
+<|visual token 017797|>
+<|visual token 017798|>
+<|visual token 017799|>
+<|visual token 017800|>
+<|visual token 017801|>
+<|visual token 017802|>
+<|visual token 017803|>
+<|visual token 017804|>
+<|visual token 017805|>
+<|visual token 017806|>
+<|visual token 017807|>
+<|visual token 017808|>
+<|visual token 017809|>
+<|visual token 017810|>
+<|visual token 017811|>
+<|visual token 017812|>
+<|visual token 017813|>
+<|visual token 017814|>
+<|visual token 017815|>
+<|visual token 017816|>
+<|visual token 017817|>
+<|visual token 017818|>
+<|visual token 017819|>
+<|visual token 017820|>
+<|visual token 017821|>
+<|visual token 017822|>
+<|visual token 017823|>
+<|visual token 017824|>
+<|visual token 017825|>
+<|visual token 017826|>
+<|visual token 017827|>
+<|visual token 017828|>
+<|visual token 017829|>
+<|visual token 017830|>
+<|visual token 017831|>
+<|visual token 017832|>
+<|visual token 017833|>
+<|visual token 017834|>
+<|visual token 017835|>
+<|visual token 017836|>
+<|visual token 017837|>
+<|visual token 017838|>
+<|visual token 017839|>
+<|visual token 017840|>
+<|visual token 017841|>
+<|visual token 017842|>
+<|visual token 017843|>
+<|visual token 017844|>
+<|visual token 017845|>
+<|visual token 017846|>
+<|visual token 017847|>
+<|visual token 017848|>
+<|visual token 017849|>
+<|visual token 017850|>
+<|visual token 017851|>
+<|visual token 017852|>
+<|visual token 017853|>
+<|visual token 017854|>
+<|visual token 017855|>
+<|visual token 017856|>
+<|visual token 017857|>
+<|visual token 017858|>
+<|visual token 017859|>
+<|visual token 017860|>
+<|visual token 017861|>
+<|visual token 017862|>
+<|visual token 017863|>
+<|visual token 017864|>
+<|visual token 017865|>
+<|visual token 017866|>
+<|visual token 017867|>
+<|visual token 017868|>
+<|visual token 017869|>
+<|visual token 017870|>
+<|visual token 017871|>
+<|visual token 017872|>
+<|visual token 017873|>
+<|visual token 017874|>
+<|visual token 017875|>
+<|visual token 017876|>
+<|visual token 017877|>
+<|visual token 017878|>
+<|visual token 017879|>
+<|visual token 017880|>
+<|visual token 017881|>
+<|visual token 017882|>
+<|visual token 017883|>
+<|visual token 017884|>
+<|visual token 017885|>
+<|visual token 017886|>
+<|visual token 017887|>
+<|visual token 017888|>
+<|visual token 017889|>
+<|visual token 017890|>
+<|visual token 017891|>
+<|visual token 017892|>
+<|visual token 017893|>
+<|visual token 017894|>
+<|visual token 017895|>
+<|visual token 017896|>
+<|visual token 017897|>
+<|visual token 017898|>
+<|visual token 017899|>
+<|visual token 017900|>
+<|visual token 017901|>
+<|visual token 017902|>
+<|visual token 017903|>
+<|visual token 017904|>
+<|visual token 017905|>
+<|visual token 017906|>
+<|visual token 017907|>
+<|visual token 017908|>
+<|visual token 017909|>
+<|visual token 017910|>
+<|visual token 017911|>
+<|visual token 017912|>
+<|visual token 017913|>
+<|visual token 017914|>
+<|visual token 017915|>
+<|visual token 017916|>
+<|visual token 017917|>
+<|visual token 017918|>
+<|visual token 017919|>
+<|visual token 017920|>
+<|visual token 017921|>
+<|visual token 017922|>
+<|visual token 017923|>
+<|visual token 017924|>
+<|visual token 017925|>
+<|visual token 017926|>
+<|visual token 017927|>
+<|visual token 017928|>
+<|visual token 017929|>
+<|visual token 017930|>
+<|visual token 017931|>
+<|visual token 017932|>
+<|visual token 017933|>
+<|visual token 017934|>
+<|visual token 017935|>
+<|visual token 017936|>
+<|visual token 017937|>
+<|visual token 017938|>
+<|visual token 017939|>
+<|visual token 017940|>
+<|visual token 017941|>
+<|visual token 017942|>
+<|visual token 017943|>
+<|visual token 017944|>
+<|visual token 017945|>
+<|visual token 017946|>
+<|visual token 017947|>
+<|visual token 017948|>
+<|visual token 017949|>
+<|visual token 017950|>
+<|visual token 017951|>
+<|visual token 017952|>
+<|visual token 017953|>
+<|visual token 017954|>
+<|visual token 017955|>
+<|visual token 017956|>
+<|visual token 017957|>
+<|visual token 017958|>
+<|visual token 017959|>
+<|visual token 017960|>
+<|visual token 017961|>
+<|visual token 017962|>
+<|visual token 017963|>
+<|visual token 017964|>
+<|visual token 017965|>
+<|visual token 017966|>
+<|visual token 017967|>
+<|visual token 017968|>
+<|visual token 017969|>
+<|visual token 017970|>
+<|visual token 017971|>
+<|visual token 017972|>
+<|visual token 017973|>
+<|visual token 017974|>
+<|visual token 017975|>
+<|visual token 017976|>
+<|visual token 017977|>
+<|visual token 017978|>
+<|visual token 017979|>
+<|visual token 017980|>
+<|visual token 017981|>
+<|visual token 017982|>
+<|visual token 017983|>
+<|visual token 017984|>
+<|visual token 017985|>
+<|visual token 017986|>
+<|visual token 017987|>
+<|visual token 017988|>
+<|visual token 017989|>
+<|visual token 017990|>
+<|visual token 017991|>
+<|visual token 017992|>
+<|visual token 017993|>
+<|visual token 017994|>
+<|visual token 017995|>
+<|visual token 017996|>
+<|visual token 017997|>
+<|visual token 017998|>
+<|visual token 017999|>
+<|visual token 018000|>
+<|visual token 018001|>
+<|visual token 018002|>
+<|visual token 018003|>
+<|visual token 018004|>
+<|visual token 018005|>
+<|visual token 018006|>
+<|visual token 018007|>
+<|visual token 018008|>
+<|visual token 018009|>
+<|visual token 018010|>
+<|visual token 018011|>
+<|visual token 018012|>
+<|visual token 018013|>
+<|visual token 018014|>
+<|visual token 018015|>
+<|visual token 018016|>
+<|visual token 018017|>
+<|visual token 018018|>
+<|visual token 018019|>
+<|visual token 018020|>
+<|visual token 018021|>
+<|visual token 018022|>
+<|visual token 018023|>
+<|visual token 018024|>
+<|visual token 018025|>
+<|visual token 018026|>
+<|visual token 018027|>
+<|visual token 018028|>
+<|visual token 018029|>
+<|visual token 018030|>
+<|visual token 018031|>
+<|visual token 018032|>
+<|visual token 018033|>
+<|visual token 018034|>
+<|visual token 018035|>
+<|visual token 018036|>
+<|visual token 018037|>
+<|visual token 018038|>
+<|visual token 018039|>
+<|visual token 018040|>
+<|visual token 018041|>
+<|visual token 018042|>
+<|visual token 018043|>
+<|visual token 018044|>
+<|visual token 018045|>
+<|visual token 018046|>
+<|visual token 018047|>
+<|visual token 018048|>
+<|visual token 018049|>
+<|visual token 018050|>
+<|visual token 018051|>
+<|visual token 018052|>
+<|visual token 018053|>
+<|visual token 018054|>
+<|visual token 018055|>
+<|visual token 018056|>
+<|visual token 018057|>
+<|visual token 018058|>
+<|visual token 018059|>
+<|visual token 018060|>
+<|visual token 018061|>
+<|visual token 018062|>
+<|visual token 018063|>
+<|visual token 018064|>
+<|visual token 018065|>
+<|visual token 018066|>
+<|visual token 018067|>
+<|visual token 018068|>
+<|visual token 018069|>
+<|visual token 018070|>
+<|visual token 018071|>
+<|visual token 018072|>
+<|visual token 018073|>
+<|visual token 018074|>
+<|visual token 018075|>
+<|visual token 018076|>
+<|visual token 018077|>
+<|visual token 018078|>
+<|visual token 018079|>
+<|visual token 018080|>
+<|visual token 018081|>
+<|visual token 018082|>
+<|visual token 018083|>
+<|visual token 018084|>
+<|visual token 018085|>
+<|visual token 018086|>
+<|visual token 018087|>
+<|visual token 018088|>
+<|visual token 018089|>
+<|visual token 018090|>
+<|visual token 018091|>
+<|visual token 018092|>
+<|visual token 018093|>
+<|visual token 018094|>
+<|visual token 018095|>
+<|visual token 018096|>
+<|visual token 018097|>
+<|visual token 018098|>
+<|visual token 018099|>
+<|visual token 018100|>
+<|visual token 018101|>
+<|visual token 018102|>
+<|visual token 018103|>
+<|visual token 018104|>
+<|visual token 018105|>
+<|visual token 018106|>
+<|visual token 018107|>
+<|visual token 018108|>
+<|visual token 018109|>
+<|visual token 018110|>
+<|visual token 018111|>
+<|visual token 018112|>
+<|visual token 018113|>
+<|visual token 018114|>
+<|visual token 018115|>
+<|visual token 018116|>
+<|visual token 018117|>
+<|visual token 018118|>
+<|visual token 018119|>
+<|visual token 018120|>
+<|visual token 018121|>
+<|visual token 018122|>
+<|visual token 018123|>
+<|visual token 018124|>
+<|visual token 018125|>
+<|visual token 018126|>
+<|visual token 018127|>
+<|visual token 018128|>
+<|visual token 018129|>
+<|visual token 018130|>
+<|visual token 018131|>
+<|visual token 018132|>
+<|visual token 018133|>
+<|visual token 018134|>
+<|visual token 018135|>
+<|visual token 018136|>
+<|visual token 018137|>
+<|visual token 018138|>
+<|visual token 018139|>
+<|visual token 018140|>
+<|visual token 018141|>
+<|visual token 018142|>
+<|visual token 018143|>
+<|visual token 018144|>
+<|visual token 018145|>
+<|visual token 018146|>
+<|visual token 018147|>
+<|visual token 018148|>
+<|visual token 018149|>
+<|visual token 018150|>
+<|visual token 018151|>
+<|visual token 018152|>
+<|visual token 018153|>
+<|visual token 018154|>
+<|visual token 018155|>
+<|visual token 018156|>
+<|visual token 018157|>
+<|visual token 018158|>
+<|visual token 018159|>
+<|visual token 018160|>
+<|visual token 018161|>
+<|visual token 018162|>
+<|visual token 018163|>
+<|visual token 018164|>
+<|visual token 018165|>
+<|visual token 018166|>
+<|visual token 018167|>
+<|visual token 018168|>
+<|visual token 018169|>
+<|visual token 018170|>
+<|visual token 018171|>
+<|visual token 018172|>
+<|visual token 018173|>
+<|visual token 018174|>
+<|visual token 018175|>
+<|visual token 018176|>
+<|visual token 018177|>
+<|visual token 018178|>
+<|visual token 018179|>
+<|visual token 018180|>
+<|visual token 018181|>
+<|visual token 018182|>
+<|visual token 018183|>
+<|visual token 018184|>
+<|visual token 018185|>
+<|visual token 018186|>
+<|visual token 018187|>
+<|visual token 018188|>
+<|visual token 018189|>
+<|visual token 018190|>
+<|visual token 018191|>
+<|visual token 018192|>
+<|visual token 018193|>
+<|visual token 018194|>
+<|visual token 018195|>
+<|visual token 018196|>
+<|visual token 018197|>
+<|visual token 018198|>
+<|visual token 018199|>
+<|visual token 018200|>
+<|visual token 018201|>
+<|visual token 018202|>
+<|visual token 018203|>
+<|visual token 018204|>
+<|visual token 018205|>
+<|visual token 018206|>
+<|visual token 018207|>
+<|visual token 018208|>
+<|visual token 018209|>
+<|visual token 018210|>
+<|visual token 018211|>
+<|visual token 018212|>
+<|visual token 018213|>
+<|visual token 018214|>
+<|visual token 018215|>
+<|visual token 018216|>
+<|visual token 018217|>
+<|visual token 018218|>
+<|visual token 018219|>
+<|visual token 018220|>
+<|visual token 018221|>
+<|visual token 018222|>
+<|visual token 018223|>
+<|visual token 018224|>
+<|visual token 018225|>
+<|visual token 018226|>
+<|visual token 018227|>
+<|visual token 018228|>
+<|visual token 018229|>
+<|visual token 018230|>
+<|visual token 018231|>
+<|visual token 018232|>
+<|visual token 018233|>
+<|visual token 018234|>
+<|visual token 018235|>
+<|visual token 018236|>
+<|visual token 018237|>
+<|visual token 018238|>
+<|visual token 018239|>
+<|visual token 018240|>
+<|visual token 018241|>
+<|visual token 018242|>
+<|visual token 018243|>
+<|visual token 018244|>
+<|visual token 018245|>
+<|visual token 018246|>
+<|visual token 018247|>
+<|visual token 018248|>
+<|visual token 018249|>
+<|visual token 018250|>
+<|visual token 018251|>
+<|visual token 018252|>
+<|visual token 018253|>
+<|visual token 018254|>
+<|visual token 018255|>
+<|visual token 018256|>
+<|visual token 018257|>
+<|visual token 018258|>
+<|visual token 018259|>
+<|visual token 018260|>
+<|visual token 018261|>
+<|visual token 018262|>
+<|visual token 018263|>
+<|visual token 018264|>
+<|visual token 018265|>
+<|visual token 018266|>
+<|visual token 018267|>
+<|visual token 018268|>
+<|visual token 018269|>
+<|visual token 018270|>
+<|visual token 018271|>
+<|visual token 018272|>
+<|visual token 018273|>
+<|visual token 018274|>
+<|visual token 018275|>
+<|visual token 018276|>
+<|visual token 018277|>
+<|visual token 018278|>
+<|visual token 018279|>
+<|visual token 018280|>
+<|visual token 018281|>
+<|visual token 018282|>
+<|visual token 018283|>
+<|visual token 018284|>
+<|visual token 018285|>
+<|visual token 018286|>
+<|visual token 018287|>
+<|visual token 018288|>
+<|visual token 018289|>
+<|visual token 018290|>
+<|visual token 018291|>
+<|visual token 018292|>
+<|visual token 018293|>
+<|visual token 018294|>
+<|visual token 018295|>
+<|visual token 018296|>
+<|visual token 018297|>
+<|visual token 018298|>
+<|visual token 018299|>
+<|visual token 018300|>
+<|visual token 018301|>
+<|visual token 018302|>
+<|visual token 018303|>
+<|visual token 018304|>
+<|visual token 018305|>
+<|visual token 018306|>
+<|visual token 018307|>
+<|visual token 018308|>
+<|visual token 018309|>
+<|visual token 018310|>
+<|visual token 018311|>
+<|visual token 018312|>
+<|visual token 018313|>
+<|visual token 018314|>
+<|visual token 018315|>
+<|visual token 018316|>
+<|visual token 018317|>
+<|visual token 018318|>
+<|visual token 018319|>
+<|visual token 018320|>
+<|visual token 018321|>
+<|visual token 018322|>
+<|visual token 018323|>
+<|visual token 018324|>
+<|visual token 018325|>
+<|visual token 018326|>
+<|visual token 018327|>
+<|visual token 018328|>
+<|visual token 018329|>
+<|visual token 018330|>
+<|visual token 018331|>
+<|visual token 018332|>
+<|visual token 018333|>
+<|visual token 018334|>
+<|visual token 018335|>
+<|visual token 018336|>
+<|visual token 018337|>
+<|visual token 018338|>
+<|visual token 018339|>
+<|visual token 018340|>
+<|visual token 018341|>
+<|visual token 018342|>
+<|visual token 018343|>
+<|visual token 018344|>
+<|visual token 018345|>
+<|visual token 018346|>
+<|visual token 018347|>
+<|visual token 018348|>
+<|visual token 018349|>
+<|visual token 018350|>
+<|visual token 018351|>
+<|visual token 018352|>
+<|visual token 018353|>
+<|visual token 018354|>
+<|visual token 018355|>
+<|visual token 018356|>
+<|visual token 018357|>
+<|visual token 018358|>
+<|visual token 018359|>
+<|visual token 018360|>
+<|visual token 018361|>
+<|visual token 018362|>
+<|visual token 018363|>
+<|visual token 018364|>
+<|visual token 018365|>
+<|visual token 018366|>
+<|visual token 018367|>
+<|visual token 018368|>
+<|visual token 018369|>
+<|visual token 018370|>
+<|visual token 018371|>
+<|visual token 018372|>
+<|visual token 018373|>
+<|visual token 018374|>
+<|visual token 018375|>
+<|visual token 018376|>
+<|visual token 018377|>
+<|visual token 018378|>
+<|visual token 018379|>
+<|visual token 018380|>
+<|visual token 018381|>
+<|visual token 018382|>
+<|visual token 018383|>
+<|visual token 018384|>
+<|visual token 018385|>
+<|visual token 018386|>
+<|visual token 018387|>
+<|visual token 018388|>
+<|visual token 018389|>
+<|visual token 018390|>
+<|visual token 018391|>
+<|visual token 018392|>
+<|visual token 018393|>
+<|visual token 018394|>
+<|visual token 018395|>
+<|visual token 018396|>
+<|visual token 018397|>
+<|visual token 018398|>
+<|visual token 018399|>
+<|visual token 018400|>
+<|visual token 018401|>
+<|visual token 018402|>
+<|visual token 018403|>
+<|visual token 018404|>
+<|visual token 018405|>
+<|visual token 018406|>
+<|visual token 018407|>
+<|visual token 018408|>
+<|visual token 018409|>
+<|visual token 018410|>
+<|visual token 018411|>
+<|visual token 018412|>
+<|visual token 018413|>
+<|visual token 018414|>
+<|visual token 018415|>
+<|visual token 018416|>
+<|visual token 018417|>
+<|visual token 018418|>
+<|visual token 018419|>
+<|visual token 018420|>
+<|visual token 018421|>
+<|visual token 018422|>
+<|visual token 018423|>
+<|visual token 018424|>
+<|visual token 018425|>
+<|visual token 018426|>
+<|visual token 018427|>
+<|visual token 018428|>
+<|visual token 018429|>
+<|visual token 018430|>
+<|visual token 018431|>
+<|visual token 018432|>
+<|visual token 018433|>
+<|visual token 018434|>
+<|visual token 018435|>
+<|visual token 018436|>
+<|visual token 018437|>
+<|visual token 018438|>
+<|visual token 018439|>
+<|visual token 018440|>
+<|visual token 018441|>
+<|visual token 018442|>
+<|visual token 018443|>
+<|visual token 018444|>
+<|visual token 018445|>
+<|visual token 018446|>
+<|visual token 018447|>
+<|visual token 018448|>
+<|visual token 018449|>
+<|visual token 018450|>
+<|visual token 018451|>
+<|visual token 018452|>
+<|visual token 018453|>
+<|visual token 018454|>
+<|visual token 018455|>
+<|visual token 018456|>
+<|visual token 018457|>
+<|visual token 018458|>
+<|visual token 018459|>
+<|visual token 018460|>
+<|visual token 018461|>
+<|visual token 018462|>
+<|visual token 018463|>
+<|visual token 018464|>
+<|visual token 018465|>
+<|visual token 018466|>
+<|visual token 018467|>
+<|visual token 018468|>
+<|visual token 018469|>
+<|visual token 018470|>
+<|visual token 018471|>
+<|visual token 018472|>
+<|visual token 018473|>
+<|visual token 018474|>
+<|visual token 018475|>
+<|visual token 018476|>
+<|visual token 018477|>
+<|visual token 018478|>
+<|visual token 018479|>
+<|visual token 018480|>
+<|visual token 018481|>
+<|visual token 018482|>
+<|visual token 018483|>
+<|visual token 018484|>
+<|visual token 018485|>
+<|visual token 018486|>
+<|visual token 018487|>
+<|visual token 018488|>
+<|visual token 018489|>
+<|visual token 018490|>
+<|visual token 018491|>
+<|visual token 018492|>
+<|visual token 018493|>
+<|visual token 018494|>
+<|visual token 018495|>
+<|visual token 018496|>
+<|visual token 018497|>
+<|visual token 018498|>
+<|visual token 018499|>
+<|visual token 018500|>
+<|visual token 018501|>
+<|visual token 018502|>
+<|visual token 018503|>
+<|visual token 018504|>
+<|visual token 018505|>
+<|visual token 018506|>
+<|visual token 018507|>
+<|visual token 018508|>
+<|visual token 018509|>
+<|visual token 018510|>
+<|visual token 018511|>
+<|visual token 018512|>
+<|visual token 018513|>
+<|visual token 018514|>
+<|visual token 018515|>
+<|visual token 018516|>
+<|visual token 018517|>
+<|visual token 018518|>
+<|visual token 018519|>
+<|visual token 018520|>
+<|visual token 018521|>
+<|visual token 018522|>
+<|visual token 018523|>
+<|visual token 018524|>
+<|visual token 018525|>
+<|visual token 018526|>
+<|visual token 018527|>
+<|visual token 018528|>
+<|visual token 018529|>
+<|visual token 018530|>
+<|visual token 018531|>
+<|visual token 018532|>
+<|visual token 018533|>
+<|visual token 018534|>
+<|visual token 018535|>
+<|visual token 018536|>
+<|visual token 018537|>
+<|visual token 018538|>
+<|visual token 018539|>
+<|visual token 018540|>
+<|visual token 018541|>
+<|visual token 018542|>
+<|visual token 018543|>
+<|visual token 018544|>
+<|visual token 018545|>
+<|visual token 018546|>
+<|visual token 018547|>
+<|visual token 018548|>
+<|visual token 018549|>
+<|visual token 018550|>
+<|visual token 018551|>
+<|visual token 018552|>
+<|visual token 018553|>
+<|visual token 018554|>
+<|visual token 018555|>
+<|visual token 018556|>
+<|visual token 018557|>
+<|visual token 018558|>
+<|visual token 018559|>
+<|visual token 018560|>
+<|visual token 018561|>
+<|visual token 018562|>
+<|visual token 018563|>
+<|visual token 018564|>
+<|visual token 018565|>
+<|visual token 018566|>
+<|visual token 018567|>
+<|visual token 018568|>
+<|visual token 018569|>
+<|visual token 018570|>
+<|visual token 018571|>
+<|visual token 018572|>
+<|visual token 018573|>
+<|visual token 018574|>
+<|visual token 018575|>
+<|visual token 018576|>
+<|visual token 018577|>
+<|visual token 018578|>
+<|visual token 018579|>
+<|visual token 018580|>
+<|visual token 018581|>
+<|visual token 018582|>
+<|visual token 018583|>
+<|visual token 018584|>
+<|visual token 018585|>
+<|visual token 018586|>
+<|visual token 018587|>
+<|visual token 018588|>
+<|visual token 018589|>
+<|visual token 018590|>
+<|visual token 018591|>
+<|visual token 018592|>
+<|visual token 018593|>
+<|visual token 018594|>
+<|visual token 018595|>
+<|visual token 018596|>
+<|visual token 018597|>
+<|visual token 018598|>
+<|visual token 018599|>
+<|visual token 018600|>
+<|visual token 018601|>
+<|visual token 018602|>
+<|visual token 018603|>
+<|visual token 018604|>
+<|visual token 018605|>
+<|visual token 018606|>
+<|visual token 018607|>
+<|visual token 018608|>
+<|visual token 018609|>
+<|visual token 018610|>
+<|visual token 018611|>
+<|visual token 018612|>
+<|visual token 018613|>
+<|visual token 018614|>
+<|visual token 018615|>
+<|visual token 018616|>
+<|visual token 018617|>
+<|visual token 018618|>
+<|visual token 018619|>
+<|visual token 018620|>
+<|visual token 018621|>
+<|visual token 018622|>
+<|visual token 018623|>
+<|visual token 018624|>
+<|visual token 018625|>
+<|visual token 018626|>
+<|visual token 018627|>
+<|visual token 018628|>
+<|visual token 018629|>
+<|visual token 018630|>
+<|visual token 018631|>
+<|visual token 018632|>
+<|visual token 018633|>
+<|visual token 018634|>
+<|visual token 018635|>
+<|visual token 018636|>
+<|visual token 018637|>
+<|visual token 018638|>
+<|visual token 018639|>
+<|visual token 018640|>
+<|visual token 018641|>
+<|visual token 018642|>
+<|visual token 018643|>
+<|visual token 018644|>
+<|visual token 018645|>
+<|visual token 018646|>
+<|visual token 018647|>
+<|visual token 018648|>
+<|visual token 018649|>
+<|visual token 018650|>
+<|visual token 018651|>
+<|visual token 018652|>
+<|visual token 018653|>
+<|visual token 018654|>
+<|visual token 018655|>
+<|visual token 018656|>
+<|visual token 018657|>
+<|visual token 018658|>
+<|visual token 018659|>
+<|visual token 018660|>
+<|visual token 018661|>
+<|visual token 018662|>
+<|visual token 018663|>
+<|visual token 018664|>
+<|visual token 018665|>
+<|visual token 018666|>
+<|visual token 018667|>
+<|visual token 018668|>
+<|visual token 018669|>
+<|visual token 018670|>
+<|visual token 018671|>
+<|visual token 018672|>
+<|visual token 018673|>
+<|visual token 018674|>
+<|visual token 018675|>
+<|visual token 018676|>
+<|visual token 018677|>
+<|visual token 018678|>
+<|visual token 018679|>
+<|visual token 018680|>
+<|visual token 018681|>
+<|visual token 018682|>
+<|visual token 018683|>
+<|visual token 018684|>
+<|visual token 018685|>
+<|visual token 018686|>
+<|visual token 018687|>
+<|visual token 018688|>
+<|visual token 018689|>
+<|visual token 018690|>
+<|visual token 018691|>
+<|visual token 018692|>
+<|visual token 018693|>
+<|visual token 018694|>
+<|visual token 018695|>
+<|visual token 018696|>
+<|visual token 018697|>
+<|visual token 018698|>
+<|visual token 018699|>
+<|visual token 018700|>
+<|visual token 018701|>
+<|visual token 018702|>
+<|visual token 018703|>
+<|visual token 018704|>
+<|visual token 018705|>
+<|visual token 018706|>
+<|visual token 018707|>
+<|visual token 018708|>
+<|visual token 018709|>
+<|visual token 018710|>
+<|visual token 018711|>
+<|visual token 018712|>
+<|visual token 018713|>
+<|visual token 018714|>
+<|visual token 018715|>
+<|visual token 018716|>
+<|visual token 018717|>
+<|visual token 018718|>
+<|visual token 018719|>
+<|visual token 018720|>
+<|visual token 018721|>
+<|visual token 018722|>
+<|visual token 018723|>
+<|visual token 018724|>
+<|visual token 018725|>
+<|visual token 018726|>
+<|visual token 018727|>
+<|visual token 018728|>
+<|visual token 018729|>
+<|visual token 018730|>
+<|visual token 018731|>
+<|visual token 018732|>
+<|visual token 018733|>
+<|visual token 018734|>
+<|visual token 018735|>
+<|visual token 018736|>
+<|visual token 018737|>
+<|visual token 018738|>
+<|visual token 018739|>
+<|visual token 018740|>
+<|visual token 018741|>
+<|visual token 018742|>
+<|visual token 018743|>
+<|visual token 018744|>
+<|visual token 018745|>
+<|visual token 018746|>
+<|visual token 018747|>
+<|visual token 018748|>
+<|visual token 018749|>
+<|visual token 018750|>
+<|visual token 018751|>
+<|visual token 018752|>
+<|visual token 018753|>
+<|visual token 018754|>
+<|visual token 018755|>
+<|visual token 018756|>
+<|visual token 018757|>
+<|visual token 018758|>
+<|visual token 018759|>
+<|visual token 018760|>
+<|visual token 018761|>
+<|visual token 018762|>
+<|visual token 018763|>
+<|visual token 018764|>
+<|visual token 018765|>
+<|visual token 018766|>
+<|visual token 018767|>
+<|visual token 018768|>
+<|visual token 018769|>
+<|visual token 018770|>
+<|visual token 018771|>
+<|visual token 018772|>
+<|visual token 018773|>
+<|visual token 018774|>
+<|visual token 018775|>
+<|visual token 018776|>
+<|visual token 018777|>
+<|visual token 018778|>
+<|visual token 018779|>
+<|visual token 018780|>
+<|visual token 018781|>
+<|visual token 018782|>
+<|visual token 018783|>
+<|visual token 018784|>
+<|visual token 018785|>
+<|visual token 018786|>
+<|visual token 018787|>
+<|visual token 018788|>
+<|visual token 018789|>
+<|visual token 018790|>
+<|visual token 018791|>
+<|visual token 018792|>
+<|visual token 018793|>
+<|visual token 018794|>
+<|visual token 018795|>
+<|visual token 018796|>
+<|visual token 018797|>
+<|visual token 018798|>
+<|visual token 018799|>
+<|visual token 018800|>
+<|visual token 018801|>
+<|visual token 018802|>
+<|visual token 018803|>
+<|visual token 018804|>
+<|visual token 018805|>
+<|visual token 018806|>
+<|visual token 018807|>
+<|visual token 018808|>
+<|visual token 018809|>
+<|visual token 018810|>
+<|visual token 018811|>
+<|visual token 018812|>
+<|visual token 018813|>
+<|visual token 018814|>
+<|visual token 018815|>
+<|visual token 018816|>
+<|visual token 018817|>
+<|visual token 018818|>
+<|visual token 018819|>
+<|visual token 018820|>
+<|visual token 018821|>
+<|visual token 018822|>
+<|visual token 018823|>
+<|visual token 018824|>
+<|visual token 018825|>
+<|visual token 018826|>
+<|visual token 018827|>
+<|visual token 018828|>
+<|visual token 018829|>
+<|visual token 018830|>
+<|visual token 018831|>
+<|visual token 018832|>
+<|visual token 018833|>
+<|visual token 018834|>
+<|visual token 018835|>
+<|visual token 018836|>
+<|visual token 018837|>
+<|visual token 018838|>
+<|visual token 018839|>
+<|visual token 018840|>
+<|visual token 018841|>
+<|visual token 018842|>
+<|visual token 018843|>
+<|visual token 018844|>
+<|visual token 018845|>
+<|visual token 018846|>
+<|visual token 018847|>
+<|visual token 018848|>
+<|visual token 018849|>
+<|visual token 018850|>
+<|visual token 018851|>
+<|visual token 018852|>
+<|visual token 018853|>
+<|visual token 018854|>
+<|visual token 018855|>
+<|visual token 018856|>
+<|visual token 018857|>
+<|visual token 018858|>
+<|visual token 018859|>
+<|visual token 018860|>
+<|visual token 018861|>
+<|visual token 018862|>
+<|visual token 018863|>
+<|visual token 018864|>
+<|visual token 018865|>
+<|visual token 018866|>
+<|visual token 018867|>
+<|visual token 018868|>
+<|visual token 018869|>
+<|visual token 018870|>
+<|visual token 018871|>
+<|visual token 018872|>
+<|visual token 018873|>
+<|visual token 018874|>
+<|visual token 018875|>
+<|visual token 018876|>
+<|visual token 018877|>
+<|visual token 018878|>
+<|visual token 018879|>
+<|visual token 018880|>
+<|visual token 018881|>
+<|visual token 018882|>
+<|visual token 018883|>
+<|visual token 018884|>
+<|visual token 018885|>
+<|visual token 018886|>
+<|visual token 018887|>
+<|visual token 018888|>
+<|visual token 018889|>
+<|visual token 018890|>
+<|visual token 018891|>
+<|visual token 018892|>
+<|visual token 018893|>
+<|visual token 018894|>
+<|visual token 018895|>
+<|visual token 018896|>
+<|visual token 018897|>
+<|visual token 018898|>
+<|visual token 018899|>
+<|visual token 018900|>
+<|visual token 018901|>
+<|visual token 018902|>
+<|visual token 018903|>
+<|visual token 018904|>
+<|visual token 018905|>
+<|visual token 018906|>
+<|visual token 018907|>
+<|visual token 018908|>
+<|visual token 018909|>
+<|visual token 018910|>
+<|visual token 018911|>
+<|visual token 018912|>
+<|visual token 018913|>
+<|visual token 018914|>
+<|visual token 018915|>
+<|visual token 018916|>
+<|visual token 018917|>
+<|visual token 018918|>
+<|visual token 018919|>
+<|visual token 018920|>
+<|visual token 018921|>
+<|visual token 018922|>
+<|visual token 018923|>
+<|visual token 018924|>
+<|visual token 018925|>
+<|visual token 018926|>
+<|visual token 018927|>
+<|visual token 018928|>
+<|visual token 018929|>
+<|visual token 018930|>
+<|visual token 018931|>
+<|visual token 018932|>
+<|visual token 018933|>
+<|visual token 018934|>
+<|visual token 018935|>
+<|visual token 018936|>
+<|visual token 018937|>
+<|visual token 018938|>
+<|visual token 018939|>
+<|visual token 018940|>
+<|visual token 018941|>
+<|visual token 018942|>
+<|visual token 018943|>
+<|visual token 018944|>
+<|visual token 018945|>
+<|visual token 018946|>
+<|visual token 018947|>
+<|visual token 018948|>
+<|visual token 018949|>
+<|visual token 018950|>
+<|visual token 018951|>
+<|visual token 018952|>
+<|visual token 018953|>
+<|visual token 018954|>
+<|visual token 018955|>
+<|visual token 018956|>
+<|visual token 018957|>
+<|visual token 018958|>
+<|visual token 018959|>
+<|visual token 018960|>
+<|visual token 018961|>
+<|visual token 018962|>
+<|visual token 018963|>
+<|visual token 018964|>
+<|visual token 018965|>
+<|visual token 018966|>
+<|visual token 018967|>
+<|visual token 018968|>
+<|visual token 018969|>
+<|visual token 018970|>
+<|visual token 018971|>
+<|visual token 018972|>
+<|visual token 018973|>
+<|visual token 018974|>
+<|visual token 018975|>
+<|visual token 018976|>
+<|visual token 018977|>
+<|visual token 018978|>
+<|visual token 018979|>
+<|visual token 018980|>
+<|visual token 018981|>
+<|visual token 018982|>
+<|visual token 018983|>
+<|visual token 018984|>
+<|visual token 018985|>
+<|visual token 018986|>
+<|visual token 018987|>
+<|visual token 018988|>
+<|visual token 018989|>
+<|visual token 018990|>
+<|visual token 018991|>
+<|visual token 018992|>
+<|visual token 018993|>
+<|visual token 018994|>
+<|visual token 018995|>
+<|visual token 018996|>
+<|visual token 018997|>
+<|visual token 018998|>
+<|visual token 018999|>
+<|visual token 019000|>
+<|visual token 019001|>
+<|visual token 019002|>
+<|visual token 019003|>
+<|visual token 019004|>
+<|visual token 019005|>
+<|visual token 019006|>
+<|visual token 019007|>
+<|visual token 019008|>
+<|visual token 019009|>
+<|visual token 019010|>
+<|visual token 019011|>
+<|visual token 019012|>
+<|visual token 019013|>
+<|visual token 019014|>
+<|visual token 019015|>
+<|visual token 019016|>
+<|visual token 019017|>
+<|visual token 019018|>
+<|visual token 019019|>
+<|visual token 019020|>
+<|visual token 019021|>
+<|visual token 019022|>
+<|visual token 019023|>
+<|visual token 019024|>
+<|visual token 019025|>
+<|visual token 019026|>
+<|visual token 019027|>
+<|visual token 019028|>
+<|visual token 019029|>
+<|visual token 019030|>
+<|visual token 019031|>
+<|visual token 019032|>
+<|visual token 019033|>
+<|visual token 019034|>
+<|visual token 019035|>
+<|visual token 019036|>
+<|visual token 019037|>
+<|visual token 019038|>
+<|visual token 019039|>
+<|visual token 019040|>
+<|visual token 019041|>
+<|visual token 019042|>
+<|visual token 019043|>
+<|visual token 019044|>
+<|visual token 019045|>
+<|visual token 019046|>
+<|visual token 019047|>
+<|visual token 019048|>
+<|visual token 019049|>
+<|visual token 019050|>
+<|visual token 019051|>
+<|visual token 019052|>
+<|visual token 019053|>
+<|visual token 019054|>
+<|visual token 019055|>
+<|visual token 019056|>
+<|visual token 019057|>
+<|visual token 019058|>
+<|visual token 019059|>
+<|visual token 019060|>
+<|visual token 019061|>
+<|visual token 019062|>
+<|visual token 019063|>
+<|visual token 019064|>
+<|visual token 019065|>
+<|visual token 019066|>
+<|visual token 019067|>
+<|visual token 019068|>
+<|visual token 019069|>
+<|visual token 019070|>
+<|visual token 019071|>
+<|visual token 019072|>
+<|visual token 019073|>
+<|visual token 019074|>
+<|visual token 019075|>
+<|visual token 019076|>
+<|visual token 019077|>
+<|visual token 019078|>
+<|visual token 019079|>
+<|visual token 019080|>
+<|visual token 019081|>
+<|visual token 019082|>
+<|visual token 019083|>
+<|visual token 019084|>
+<|visual token 019085|>
+<|visual token 019086|>
+<|visual token 019087|>
+<|visual token 019088|>
+<|visual token 019089|>
+<|visual token 019090|>
+<|visual token 019091|>
+<|visual token 019092|>
+<|visual token 019093|>
+<|visual token 019094|>
+<|visual token 019095|>
+<|visual token 019096|>
+<|visual token 019097|>
+<|visual token 019098|>
+<|visual token 019099|>
+<|visual token 019100|>
+<|visual token 019101|>
+<|visual token 019102|>
+<|visual token 019103|>
+<|visual token 019104|>
+<|visual token 019105|>
+<|visual token 019106|>
+<|visual token 019107|>
+<|visual token 019108|>
+<|visual token 019109|>
+<|visual token 019110|>
+<|visual token 019111|>
+<|visual token 019112|>
+<|visual token 019113|>
+<|visual token 019114|>
+<|visual token 019115|>
+<|visual token 019116|>
+<|visual token 019117|>
+<|visual token 019118|>
+<|visual token 019119|>
+<|visual token 019120|>
+<|visual token 019121|>
+<|visual token 019122|>
+<|visual token 019123|>
+<|visual token 019124|>
+<|visual token 019125|>
+<|visual token 019126|>
+<|visual token 019127|>
+<|visual token 019128|>
+<|visual token 019129|>
+<|visual token 019130|>
+<|visual token 019131|>
+<|visual token 019132|>
+<|visual token 019133|>
+<|visual token 019134|>
+<|visual token 019135|>
+<|visual token 019136|>
+<|visual token 019137|>
+<|visual token 019138|>
+<|visual token 019139|>
+<|visual token 019140|>
+<|visual token 019141|>
+<|visual token 019142|>
+<|visual token 019143|>
+<|visual token 019144|>
+<|visual token 019145|>
+<|visual token 019146|>
+<|visual token 019147|>
+<|visual token 019148|>
+<|visual token 019149|>
+<|visual token 019150|>
+<|visual token 019151|>
+<|visual token 019152|>
+<|visual token 019153|>
+<|visual token 019154|>
+<|visual token 019155|>
+<|visual token 019156|>
+<|visual token 019157|>
+<|visual token 019158|>
+<|visual token 019159|>
+<|visual token 019160|>
+<|visual token 019161|>
+<|visual token 019162|>
+<|visual token 019163|>
+<|visual token 019164|>
+<|visual token 019165|>
+<|visual token 019166|>
+<|visual token 019167|>
+<|visual token 019168|>
+<|visual token 019169|>
+<|visual token 019170|>
+<|visual token 019171|>
+<|visual token 019172|>
+<|visual token 019173|>
+<|visual token 019174|>
+<|visual token 019175|>
+<|visual token 019176|>
+<|visual token 019177|>
+<|visual token 019178|>
+<|visual token 019179|>
+<|visual token 019180|>
+<|visual token 019181|>
+<|visual token 019182|>
+<|visual token 019183|>
+<|visual token 019184|>
+<|visual token 019185|>
+<|visual token 019186|>
+<|visual token 019187|>
+<|visual token 019188|>
+<|visual token 019189|>
+<|visual token 019190|>
+<|visual token 019191|>
+<|visual token 019192|>
+<|visual token 019193|>
+<|visual token 019194|>
+<|visual token 019195|>
+<|visual token 019196|>
+<|visual token 019197|>
+<|visual token 019198|>
+<|visual token 019199|>
+<|visual token 019200|>
+<|visual token 019201|>
+<|visual token 019202|>
+<|visual token 019203|>
+<|visual token 019204|>
+<|visual token 019205|>
+<|visual token 019206|>
+<|visual token 019207|>
+<|visual token 019208|>
+<|visual token 019209|>
+<|visual token 019210|>
+<|visual token 019211|>
+<|visual token 019212|>
+<|visual token 019213|>
+<|visual token 019214|>
+<|visual token 019215|>
+<|visual token 019216|>
+<|visual token 019217|>
+<|visual token 019218|>
+<|visual token 019219|>
+<|visual token 019220|>
+<|visual token 019221|>
+<|visual token 019222|>
+<|visual token 019223|>
+<|visual token 019224|>
+<|visual token 019225|>
+<|visual token 019226|>
+<|visual token 019227|>
+<|visual token 019228|>
+<|visual token 019229|>
+<|visual token 019230|>
+<|visual token 019231|>
+<|visual token 019232|>
+<|visual token 019233|>
+<|visual token 019234|>
+<|visual token 019235|>
+<|visual token 019236|>
+<|visual token 019237|>
+<|visual token 019238|>
+<|visual token 019239|>
+<|visual token 019240|>
+<|visual token 019241|>
+<|visual token 019242|>
+<|visual token 019243|>
+<|visual token 019244|>
+<|visual token 019245|>
+<|visual token 019246|>
+<|visual token 019247|>
+<|visual token 019248|>
+<|visual token 019249|>
+<|visual token 019250|>
+<|visual token 019251|>
+<|visual token 019252|>
+<|visual token 019253|>
+<|visual token 019254|>
+<|visual token 019255|>
+<|visual token 019256|>
+<|visual token 019257|>
+<|visual token 019258|>
+<|visual token 019259|>
+<|visual token 019260|>
+<|visual token 019261|>
+<|visual token 019262|>
+<|visual token 019263|>
+<|visual token 019264|>
+<|visual token 019265|>
+<|visual token 019266|>
+<|visual token 019267|>
+<|visual token 019268|>
+<|visual token 019269|>
+<|visual token 019270|>
+<|visual token 019271|>
+<|visual token 019272|>
+<|visual token 019273|>
+<|visual token 019274|>
+<|visual token 019275|>
+<|visual token 019276|>
+<|visual token 019277|>
+<|visual token 019278|>
+<|visual token 019279|>
+<|visual token 019280|>
+<|visual token 019281|>
+<|visual token 019282|>
+<|visual token 019283|>
+<|visual token 019284|>
+<|visual token 019285|>
+<|visual token 019286|>
+<|visual token 019287|>
+<|visual token 019288|>
+<|visual token 019289|>
+<|visual token 019290|>
+<|visual token 019291|>
+<|visual token 019292|>
+<|visual token 019293|>
+<|visual token 019294|>
+<|visual token 019295|>
+<|visual token 019296|>
+<|visual token 019297|>
+<|visual token 019298|>
+<|visual token 019299|>
+<|visual token 019300|>
+<|visual token 019301|>
+<|visual token 019302|>
+<|visual token 019303|>
+<|visual token 019304|>
+<|visual token 019305|>
+<|visual token 019306|>
+<|visual token 019307|>
+<|visual token 019308|>
+<|visual token 019309|>
+<|visual token 019310|>
+<|visual token 019311|>
+<|visual token 019312|>
+<|visual token 019313|>
+<|visual token 019314|>
+<|visual token 019315|>
+<|visual token 019316|>
+<|visual token 019317|>
+<|visual token 019318|>
+<|visual token 019319|>
+<|visual token 019320|>
+<|visual token 019321|>
+<|visual token 019322|>
+<|visual token 019323|>
+<|visual token 019324|>
+<|visual token 019325|>
+<|visual token 019326|>
+<|visual token 019327|>
+<|visual token 019328|>
+<|visual token 019329|>
+<|visual token 019330|>
+<|visual token 019331|>
+<|visual token 019332|>
+<|visual token 019333|>
+<|visual token 019334|>
+<|visual token 019335|>
+<|visual token 019336|>
+<|visual token 019337|>
+<|visual token 019338|>
+<|visual token 019339|>
+<|visual token 019340|>
+<|visual token 019341|>
+<|visual token 019342|>
+<|visual token 019343|>
+<|visual token 019344|>
+<|visual token 019345|>
+<|visual token 019346|>
+<|visual token 019347|>
+<|visual token 019348|>
+<|visual token 019349|>
+<|visual token 019350|>
+<|visual token 019351|>
+<|visual token 019352|>
+<|visual token 019353|>
+<|visual token 019354|>
+<|visual token 019355|>
+<|visual token 019356|>
+<|visual token 019357|>
+<|visual token 019358|>
+<|visual token 019359|>
+<|visual token 019360|>
+<|visual token 019361|>
+<|visual token 019362|>
+<|visual token 019363|>
+<|visual token 019364|>
+<|visual token 019365|>
+<|visual token 019366|>
+<|visual token 019367|>
+<|visual token 019368|>
+<|visual token 019369|>
+<|visual token 019370|>
+<|visual token 019371|>
+<|visual token 019372|>
+<|visual token 019373|>
+<|visual token 019374|>
+<|visual token 019375|>
+<|visual token 019376|>
+<|visual token 019377|>
+<|visual token 019378|>
+<|visual token 019379|>
+<|visual token 019380|>
+<|visual token 019381|>
+<|visual token 019382|>
+<|visual token 019383|>
+<|visual token 019384|>
+<|visual token 019385|>
+<|visual token 019386|>
+<|visual token 019387|>
+<|visual token 019388|>
+<|visual token 019389|>
+<|visual token 019390|>
+<|visual token 019391|>
+<|visual token 019392|>
+<|visual token 019393|>
+<|visual token 019394|>
+<|visual token 019395|>
+<|visual token 019396|>
+<|visual token 019397|>
+<|visual token 019398|>
+<|visual token 019399|>
+<|visual token 019400|>
+<|visual token 019401|>
+<|visual token 019402|>
+<|visual token 019403|>
+<|visual token 019404|>
+<|visual token 019405|>
+<|visual token 019406|>
+<|visual token 019407|>
+<|visual token 019408|>
+<|visual token 019409|>
+<|visual token 019410|>
+<|visual token 019411|>
+<|visual token 019412|>
+<|visual token 019413|>
+<|visual token 019414|>
+<|visual token 019415|>
+<|visual token 019416|>
+<|visual token 019417|>
+<|visual token 019418|>
+<|visual token 019419|>
+<|visual token 019420|>
+<|visual token 019421|>
+<|visual token 019422|>
+<|visual token 019423|>
+<|visual token 019424|>
+<|visual token 019425|>
+<|visual token 019426|>
+<|visual token 019427|>
+<|visual token 019428|>
+<|visual token 019429|>
+<|visual token 019430|>
+<|visual token 019431|>
+<|visual token 019432|>
+<|visual token 019433|>
+<|visual token 019434|>
+<|visual token 019435|>
+<|visual token 019436|>
+<|visual token 019437|>
+<|visual token 019438|>
+<|visual token 019439|>
+<|visual token 019440|>
+<|visual token 019441|>
+<|visual token 019442|>
+<|visual token 019443|>
+<|visual token 019444|>
+<|visual token 019445|>
+<|visual token 019446|>
+<|visual token 019447|>
+<|visual token 019448|>
+<|visual token 019449|>
+<|visual token 019450|>
+<|visual token 019451|>
+<|visual token 019452|>
+<|visual token 019453|>
+<|visual token 019454|>
+<|visual token 019455|>
+<|visual token 019456|>
+<|visual token 019457|>
+<|visual token 019458|>
+<|visual token 019459|>
+<|visual token 019460|>
+<|visual token 019461|>
+<|visual token 019462|>
+<|visual token 019463|>
+<|visual token 019464|>
+<|visual token 019465|>
+<|visual token 019466|>
+<|visual token 019467|>
+<|visual token 019468|>
+<|visual token 019469|>
+<|visual token 019470|>
+<|visual token 019471|>
+<|visual token 019472|>
+<|visual token 019473|>
+<|visual token 019474|>
+<|visual token 019475|>
+<|visual token 019476|>
+<|visual token 019477|>
+<|visual token 019478|>
+<|visual token 019479|>
+<|visual token 019480|>
+<|visual token 019481|>
+<|visual token 019482|>
+<|visual token 019483|>
+<|visual token 019484|>
+<|visual token 019485|>
+<|visual token 019486|>
+<|visual token 019487|>
+<|visual token 019488|>
+<|visual token 019489|>
+<|visual token 019490|>
+<|visual token 019491|>
+<|visual token 019492|>
+<|visual token 019493|>
+<|visual token 019494|>
+<|visual token 019495|>
+<|visual token 019496|>
+<|visual token 019497|>
+<|visual token 019498|>
+<|visual token 019499|>
+<|visual token 019500|>
+<|visual token 019501|>
+<|visual token 019502|>
+<|visual token 019503|>
+<|visual token 019504|>
+<|visual token 019505|>
+<|visual token 019506|>
+<|visual token 019507|>
+<|visual token 019508|>
+<|visual token 019509|>
+<|visual token 019510|>
+<|visual token 019511|>
+<|visual token 019512|>
+<|visual token 019513|>
+<|visual token 019514|>
+<|visual token 019515|>
+<|visual token 019516|>
+<|visual token 019517|>
+<|visual token 019518|>
+<|visual token 019519|>
+<|visual token 019520|>
+<|visual token 019521|>
+<|visual token 019522|>
+<|visual token 019523|>
+<|visual token 019524|>
+<|visual token 019525|>
+<|visual token 019526|>
+<|visual token 019527|>
+<|visual token 019528|>
+<|visual token 019529|>
+<|visual token 019530|>
+<|visual token 019531|>
+<|visual token 019532|>
+<|visual token 019533|>
+<|visual token 019534|>
+<|visual token 019535|>
+<|visual token 019536|>
+<|visual token 019537|>
+<|visual token 019538|>
+<|visual token 019539|>
+<|visual token 019540|>
+<|visual token 019541|>
+<|visual token 019542|>
+<|visual token 019543|>
+<|visual token 019544|>
+<|visual token 019545|>
+<|visual token 019546|>
+<|visual token 019547|>
+<|visual token 019548|>
+<|visual token 019549|>
+<|visual token 019550|>
+<|visual token 019551|>
+<|visual token 019552|>
+<|visual token 019553|>
+<|visual token 019554|>
+<|visual token 019555|>
+<|visual token 019556|>
+<|visual token 019557|>
+<|visual token 019558|>
+<|visual token 019559|>
+<|visual token 019560|>
+<|visual token 019561|>
+<|visual token 019562|>
+<|visual token 019563|>
+<|visual token 019564|>
+<|visual token 019565|>
+<|visual token 019566|>
+<|visual token 019567|>
+<|visual token 019568|>
+<|visual token 019569|>
+<|visual token 019570|>
+<|visual token 019571|>
+<|visual token 019572|>
+<|visual token 019573|>
+<|visual token 019574|>
+<|visual token 019575|>
+<|visual token 019576|>
+<|visual token 019577|>
+<|visual token 019578|>
+<|visual token 019579|>
+<|visual token 019580|>
+<|visual token 019581|>
+<|visual token 019582|>
+<|visual token 019583|>
+<|visual token 019584|>
+<|visual token 019585|>
+<|visual token 019586|>
+<|visual token 019587|>
+<|visual token 019588|>
+<|visual token 019589|>
+<|visual token 019590|>
+<|visual token 019591|>
+<|visual token 019592|>
+<|visual token 019593|>
+<|visual token 019594|>
+<|visual token 019595|>
+<|visual token 019596|>
+<|visual token 019597|>
+<|visual token 019598|>
+<|visual token 019599|>
+<|visual token 019600|>
+<|visual token 019601|>
+<|visual token 019602|>
+<|visual token 019603|>
+<|visual token 019604|>
+<|visual token 019605|>
+<|visual token 019606|>
+<|visual token 019607|>
+<|visual token 019608|>
+<|visual token 019609|>
+<|visual token 019610|>
+<|visual token 019611|>
+<|visual token 019612|>
+<|visual token 019613|>
+<|visual token 019614|>
+<|visual token 019615|>
+<|visual token 019616|>
+<|visual token 019617|>
+<|visual token 019618|>
+<|visual token 019619|>
+<|visual token 019620|>
+<|visual token 019621|>
+<|visual token 019622|>
+<|visual token 019623|>
+<|visual token 019624|>
+<|visual token 019625|>
+<|visual token 019626|>
+<|visual token 019627|>
+<|visual token 019628|>
+<|visual token 019629|>
+<|visual token 019630|>
+<|visual token 019631|>
+<|visual token 019632|>
+<|visual token 019633|>
+<|visual token 019634|>
+<|visual token 019635|>
+<|visual token 019636|>
+<|visual token 019637|>
+<|visual token 019638|>
+<|visual token 019639|>
+<|visual token 019640|>
+<|visual token 019641|>
+<|visual token 019642|>
+<|visual token 019643|>
+<|visual token 019644|>
+<|visual token 019645|>
+<|visual token 019646|>
+<|visual token 019647|>
+<|visual token 019648|>
+<|visual token 019649|>
+<|visual token 019650|>
+<|visual token 019651|>
+<|visual token 019652|>
+<|visual token 019653|>
+<|visual token 019654|>
+<|visual token 019655|>
+<|visual token 019656|>
+<|visual token 019657|>
+<|visual token 019658|>
+<|visual token 019659|>
+<|visual token 019660|>
+<|visual token 019661|>
+<|visual token 019662|>
+<|visual token 019663|>
+<|visual token 019664|>
+<|visual token 019665|>
+<|visual token 019666|>
+<|visual token 019667|>
+<|visual token 019668|>
+<|visual token 019669|>
+<|visual token 019670|>
+<|visual token 019671|>
+<|visual token 019672|>
+<|visual token 019673|>
+<|visual token 019674|>
+<|visual token 019675|>
+<|visual token 019676|>
+<|visual token 019677|>
+<|visual token 019678|>
+<|visual token 019679|>
+<|visual token 019680|>
+<|visual token 019681|>
+<|visual token 019682|>
+<|visual token 019683|>
+<|visual token 019684|>
+<|visual token 019685|>
+<|visual token 019686|>
+<|visual token 019687|>
+<|visual token 019688|>
+<|visual token 019689|>
+<|visual token 019690|>
+<|visual token 019691|>
+<|visual token 019692|>
+<|visual token 019693|>
+<|visual token 019694|>
+<|visual token 019695|>
+<|visual token 019696|>
+<|visual token 019697|>
+<|visual token 019698|>
+<|visual token 019699|>
+<|visual token 019700|>
+<|visual token 019701|>
+<|visual token 019702|>
+<|visual token 019703|>
+<|visual token 019704|>
+<|visual token 019705|>
+<|visual token 019706|>
+<|visual token 019707|>
+<|visual token 019708|>
+<|visual token 019709|>
+<|visual token 019710|>
+<|visual token 019711|>
+<|visual token 019712|>
+<|visual token 019713|>
+<|visual token 019714|>
+<|visual token 019715|>
+<|visual token 019716|>
+<|visual token 019717|>
+<|visual token 019718|>
+<|visual token 019719|>
+<|visual token 019720|>
+<|visual token 019721|>
+<|visual token 019722|>
+<|visual token 019723|>
+<|visual token 019724|>
+<|visual token 019725|>
+<|visual token 019726|>
+<|visual token 019727|>
+<|visual token 019728|>
+<|visual token 019729|>
+<|visual token 019730|>
+<|visual token 019731|>
+<|visual token 019732|>
+<|visual token 019733|>
+<|visual token 019734|>
+<|visual token 019735|>
+<|visual token 019736|>
+<|visual token 019737|>
+<|visual token 019738|>
+<|visual token 019739|>
+<|visual token 019740|>
+<|visual token 019741|>
+<|visual token 019742|>
+<|visual token 019743|>
+<|visual token 019744|>
+<|visual token 019745|>
+<|visual token 019746|>
+<|visual token 019747|>
+<|visual token 019748|>
+<|visual token 019749|>
+<|visual token 019750|>
+<|visual token 019751|>
+<|visual token 019752|>
+<|visual token 019753|>
+<|visual token 019754|>
+<|visual token 019755|>
+<|visual token 019756|>
+<|visual token 019757|>
+<|visual token 019758|>
+<|visual token 019759|>
+<|visual token 019760|>
+<|visual token 019761|>
+<|visual token 019762|>
+<|visual token 019763|>
+<|visual token 019764|>
+<|visual token 019765|>
+<|visual token 019766|>
+<|visual token 019767|>
+<|visual token 019768|>
+<|visual token 019769|>
+<|visual token 019770|>
+<|visual token 019771|>
+<|visual token 019772|>
+<|visual token 019773|>
+<|visual token 019774|>
+<|visual token 019775|>
+<|visual token 019776|>
+<|visual token 019777|>
+<|visual token 019778|>
+<|visual token 019779|>
+<|visual token 019780|>
+<|visual token 019781|>
+<|visual token 019782|>
+<|visual token 019783|>
+<|visual token 019784|>
+<|visual token 019785|>
+<|visual token 019786|>
+<|visual token 019787|>
+<|visual token 019788|>
+<|visual token 019789|>
+<|visual token 019790|>
+<|visual token 019791|>
+<|visual token 019792|>
+<|visual token 019793|>
+<|visual token 019794|>
+<|visual token 019795|>
+<|visual token 019796|>
+<|visual token 019797|>
+<|visual token 019798|>
+<|visual token 019799|>
+<|visual token 019800|>
+<|visual token 019801|>
+<|visual token 019802|>
+<|visual token 019803|>
+<|visual token 019804|>
+<|visual token 019805|>
+<|visual token 019806|>
+<|visual token 019807|>
+<|visual token 019808|>
+<|visual token 019809|>
+<|visual token 019810|>
+<|visual token 019811|>
+<|visual token 019812|>
+<|visual token 019813|>
+<|visual token 019814|>
+<|visual token 019815|>
+<|visual token 019816|>
+<|visual token 019817|>
+<|visual token 019818|>
+<|visual token 019819|>
+<|visual token 019820|>
+<|visual token 019821|>
+<|visual token 019822|>
+<|visual token 019823|>
+<|visual token 019824|>
+<|visual token 019825|>
+<|visual token 019826|>
+<|visual token 019827|>
+<|visual token 019828|>
+<|visual token 019829|>
+<|visual token 019830|>
+<|visual token 019831|>
+<|visual token 019832|>
+<|visual token 019833|>
+<|visual token 019834|>
+<|visual token 019835|>
+<|visual token 019836|>
+<|visual token 019837|>
+<|visual token 019838|>
+<|visual token 019839|>
+<|visual token 019840|>
+<|visual token 019841|>
+<|visual token 019842|>
+<|visual token 019843|>
+<|visual token 019844|>
+<|visual token 019845|>
+<|visual token 019846|>
+<|visual token 019847|>
+<|visual token 019848|>
+<|visual token 019849|>
+<|visual token 019850|>
+<|visual token 019851|>
+<|visual token 019852|>
+<|visual token 019853|>
+<|visual token 019854|>
+<|visual token 019855|>
+<|visual token 019856|>
+<|visual token 019857|>
+<|visual token 019858|>
+<|visual token 019859|>
+<|visual token 019860|>
+<|visual token 019861|>
+<|visual token 019862|>
+<|visual token 019863|>
+<|visual token 019864|>
+<|visual token 019865|>
+<|visual token 019866|>
+<|visual token 019867|>
+<|visual token 019868|>
+<|visual token 019869|>
+<|visual token 019870|>
+<|visual token 019871|>
+<|visual token 019872|>
+<|visual token 019873|>
+<|visual token 019874|>
+<|visual token 019875|>
+<|visual token 019876|>
+<|visual token 019877|>
+<|visual token 019878|>
+<|visual token 019879|>
+<|visual token 019880|>
+<|visual token 019881|>
+<|visual token 019882|>
+<|visual token 019883|>
+<|visual token 019884|>
+<|visual token 019885|>
+<|visual token 019886|>
+<|visual token 019887|>
+<|visual token 019888|>
+<|visual token 019889|>
+<|visual token 019890|>
+<|visual token 019891|>
+<|visual token 019892|>
+<|visual token 019893|>
+<|visual token 019894|>
+<|visual token 019895|>
+<|visual token 019896|>
+<|visual token 019897|>
+<|visual token 019898|>
+<|visual token 019899|>
+<|visual token 019900|>
+<|visual token 019901|>
+<|visual token 019902|>
+<|visual token 019903|>
+<|visual token 019904|>
+<|visual token 019905|>
+<|visual token 019906|>
+<|visual token 019907|>
+<|visual token 019908|>
+<|visual token 019909|>
+<|visual token 019910|>
+<|visual token 019911|>
+<|visual token 019912|>
+<|visual token 019913|>
+<|visual token 019914|>
+<|visual token 019915|>
+<|visual token 019916|>
+<|visual token 019917|>
+<|visual token 019918|>
+<|visual token 019919|>
+<|visual token 019920|>
+<|visual token 019921|>
+<|visual token 019922|>
+<|visual token 019923|>
+<|visual token 019924|>
+<|visual token 019925|>
+<|visual token 019926|>
+<|visual token 019927|>
+<|visual token 019928|>
+<|visual token 019929|>
+<|visual token 019930|>
+<|visual token 019931|>
+<|visual token 019932|>
+<|visual token 019933|>
+<|visual token 019934|>
+<|visual token 019935|>
+<|visual token 019936|>
+<|visual token 019937|>
+<|visual token 019938|>
+<|visual token 019939|>
+<|visual token 019940|>
+<|visual token 019941|>
+<|visual token 019942|>
+<|visual token 019943|>
+<|visual token 019944|>
+<|visual token 019945|>
+<|visual token 019946|>
+<|visual token 019947|>
+<|visual token 019948|>
+<|visual token 019949|>
+<|visual token 019950|>
+<|visual token 019951|>
+<|visual token 019952|>
+<|visual token 019953|>
+<|visual token 019954|>
+<|visual token 019955|>
+<|visual token 019956|>
+<|visual token 019957|>
+<|visual token 019958|>
+<|visual token 019959|>
+<|visual token 019960|>
+<|visual token 019961|>
+<|visual token 019962|>
+<|visual token 019963|>
+<|visual token 019964|>
+<|visual token 019965|>
+<|visual token 019966|>
+<|visual token 019967|>
+<|visual token 019968|>
+<|visual token 019969|>
+<|visual token 019970|>
+<|visual token 019971|>
+<|visual token 019972|>
+<|visual token 019973|>
+<|visual token 019974|>
+<|visual token 019975|>
+<|visual token 019976|>
+<|visual token 019977|>
+<|visual token 019978|>
+<|visual token 019979|>
+<|visual token 019980|>
+<|visual token 019981|>
+<|visual token 019982|>
+<|visual token 019983|>
+<|visual token 019984|>
+<|visual token 019985|>
+<|visual token 019986|>
+<|visual token 019987|>
+<|visual token 019988|>
+<|visual token 019989|>
+<|visual token 019990|>
+<|visual token 019991|>
+<|visual token 019992|>
+<|visual token 019993|>
+<|visual token 019994|>
+<|visual token 019995|>
+<|visual token 019996|>
+<|visual token 019997|>
+<|visual token 019998|>
+<|visual token 019999|>
+<|visual token 020000|>
+<|visual token 020001|>
+<|visual token 020002|>
+<|visual token 020003|>
+<|visual token 020004|>
+<|visual token 020005|>
+<|visual token 020006|>
+<|visual token 020007|>
+<|visual token 020008|>
+<|visual token 020009|>
+<|visual token 020010|>
+<|visual token 020011|>
+<|visual token 020012|>
+<|visual token 020013|>
+<|visual token 020014|>
+<|visual token 020015|>
+<|visual token 020016|>
+<|visual token 020017|>
+<|visual token 020018|>
+<|visual token 020019|>
+<|visual token 020020|>
+<|visual token 020021|>
+<|visual token 020022|>
+<|visual token 020023|>
+<|visual token 020024|>
+<|visual token 020025|>
+<|visual token 020026|>
+<|visual token 020027|>
+<|visual token 020028|>
+<|visual token 020029|>
+<|visual token 020030|>
+<|visual token 020031|>
+<|visual token 020032|>
+<|visual token 020033|>
+<|visual token 020034|>
+<|visual token 020035|>
+<|visual token 020036|>
+<|visual token 020037|>
+<|visual token 020038|>
+<|visual token 020039|>
+<|visual token 020040|>
+<|visual token 020041|>
+<|visual token 020042|>
+<|visual token 020043|>
+<|visual token 020044|>
+<|visual token 020045|>
+<|visual token 020046|>
+<|visual token 020047|>
+<|visual token 020048|>
+<|visual token 020049|>
+<|visual token 020050|>
+<|visual token 020051|>
+<|visual token 020052|>
+<|visual token 020053|>
+<|visual token 020054|>
+<|visual token 020055|>
+<|visual token 020056|>
+<|visual token 020057|>
+<|visual token 020058|>
+<|visual token 020059|>
+<|visual token 020060|>
+<|visual token 020061|>
+<|visual token 020062|>
+<|visual token 020063|>
+<|visual token 020064|>
+<|visual token 020065|>
+<|visual token 020066|>
+<|visual token 020067|>
+<|visual token 020068|>
+<|visual token 020069|>
+<|visual token 020070|>
+<|visual token 020071|>
+<|visual token 020072|>
+<|visual token 020073|>
+<|visual token 020074|>
+<|visual token 020075|>
+<|visual token 020076|>
+<|visual token 020077|>
+<|visual token 020078|>
+<|visual token 020079|>
+<|visual token 020080|>
+<|visual token 020081|>
+<|visual token 020082|>
+<|visual token 020083|>
+<|visual token 020084|>
+<|visual token 020085|>
+<|visual token 020086|>
+<|visual token 020087|>
+<|visual token 020088|>
+<|visual token 020089|>
+<|visual token 020090|>
+<|visual token 020091|>
+<|visual token 020092|>
+<|visual token 020093|>
+<|visual token 020094|>
+<|visual token 020095|>
+<|visual token 020096|>
+<|visual token 020097|>
+<|visual token 020098|>
+<|visual token 020099|>
+<|visual token 020100|>
+<|visual token 020101|>
+<|visual token 020102|>
+<|visual token 020103|>
+<|visual token 020104|>
+<|visual token 020105|>
+<|visual token 020106|>
+<|visual token 020107|>
+<|visual token 020108|>
+<|visual token 020109|>
+<|visual token 020110|>
+<|visual token 020111|>
+<|visual token 020112|>
+<|visual token 020113|>
+<|visual token 020114|>
+<|visual token 020115|>
+<|visual token 020116|>
+<|visual token 020117|>
+<|visual token 020118|>
+<|visual token 020119|>
+<|visual token 020120|>
+<|visual token 020121|>
+<|visual token 020122|>
+<|visual token 020123|>
+<|visual token 020124|>
+<|visual token 020125|>
+<|visual token 020126|>
+<|visual token 020127|>
+<|visual token 020128|>
+<|visual token 020129|>
+<|visual token 020130|>
+<|visual token 020131|>
+<|visual token 020132|>
+<|visual token 020133|>
+<|visual token 020134|>
+<|visual token 020135|>
+<|visual token 020136|>
+<|visual token 020137|>
+<|visual token 020138|>
+<|visual token 020139|>
+<|visual token 020140|>
+<|visual token 020141|>
+<|visual token 020142|>
+<|visual token 020143|>
+<|visual token 020144|>
+<|visual token 020145|>
+<|visual token 020146|>
+<|visual token 020147|>
+<|visual token 020148|>
+<|visual token 020149|>
+<|visual token 020150|>
+<|visual token 020151|>
+<|visual token 020152|>
+<|visual token 020153|>
+<|visual token 020154|>
+<|visual token 020155|>
+<|visual token 020156|>
+<|visual token 020157|>
+<|visual token 020158|>
+<|visual token 020159|>
+<|visual token 020160|>
+<|visual token 020161|>
+<|visual token 020162|>
+<|visual token 020163|>
+<|visual token 020164|>
+<|visual token 020165|>
+<|visual token 020166|>
+<|visual token 020167|>
+<|visual token 020168|>
+<|visual token 020169|>
+<|visual token 020170|>
+<|visual token 020171|>
+<|visual token 020172|>
+<|visual token 020173|>
+<|visual token 020174|>
+<|visual token 020175|>
+<|visual token 020176|>
+<|visual token 020177|>
+<|visual token 020178|>
+<|visual token 020179|>
+<|visual token 020180|>
+<|visual token 020181|>
+<|visual token 020182|>
+<|visual token 020183|>
+<|visual token 020184|>
+<|visual token 020185|>
+<|visual token 020186|>
+<|visual token 020187|>
+<|visual token 020188|>
+<|visual token 020189|>
+<|visual token 020190|>
+<|visual token 020191|>
+<|visual token 020192|>
+<|visual token 020193|>
+<|visual token 020194|>
+<|visual token 020195|>
+<|visual token 020196|>
+<|visual token 020197|>
+<|visual token 020198|>
+<|visual token 020199|>
+<|visual token 020200|>
+<|visual token 020201|>
+<|visual token 020202|>
+<|visual token 020203|>
+<|visual token 020204|>
+<|visual token 020205|>
+<|visual token 020206|>
+<|visual token 020207|>
+<|visual token 020208|>
+<|visual token 020209|>
+<|visual token 020210|>
+<|visual token 020211|>
+<|visual token 020212|>
+<|visual token 020213|>
+<|visual token 020214|>
+<|visual token 020215|>
+<|visual token 020216|>
+<|visual token 020217|>
+<|visual token 020218|>
+<|visual token 020219|>
+<|visual token 020220|>
+<|visual token 020221|>
+<|visual token 020222|>
+<|visual token 020223|>
+<|visual token 020224|>
+<|visual token 020225|>
+<|visual token 020226|>
+<|visual token 020227|>
+<|visual token 020228|>
+<|visual token 020229|>
+<|visual token 020230|>
+<|visual token 020231|>
+<|visual token 020232|>
+<|visual token 020233|>
+<|visual token 020234|>
+<|visual token 020235|>
+<|visual token 020236|>
+<|visual token 020237|>
+<|visual token 020238|>
+<|visual token 020239|>
+<|visual token 020240|>
+<|visual token 020241|>
+<|visual token 020242|>
+<|visual token 020243|>
+<|visual token 020244|>
+<|visual token 020245|>
+<|visual token 020246|>
+<|visual token 020247|>
+<|visual token 020248|>
+<|visual token 020249|>
+<|visual token 020250|>
+<|visual token 020251|>
+<|visual token 020252|>
+<|visual token 020253|>
+<|visual token 020254|>
+<|visual token 020255|>
+<|visual token 020256|>
+<|visual token 020257|>
+<|visual token 020258|>
+<|visual token 020259|>
+<|visual token 020260|>
+<|visual token 020261|>
+<|visual token 020262|>
+<|visual token 020263|>
+<|visual token 020264|>
+<|visual token 020265|>
+<|visual token 020266|>
+<|visual token 020267|>
+<|visual token 020268|>
+<|visual token 020269|>
+<|visual token 020270|>
+<|visual token 020271|>
+<|visual token 020272|>
+<|visual token 020273|>
+<|visual token 020274|>
+<|visual token 020275|>
+<|visual token 020276|>
+<|visual token 020277|>
+<|visual token 020278|>
+<|visual token 020279|>
+<|visual token 020280|>
+<|visual token 020281|>
+<|visual token 020282|>
+<|visual token 020283|>
+<|visual token 020284|>
+<|visual token 020285|>
+<|visual token 020286|>
+<|visual token 020287|>
+<|visual token 020288|>
+<|visual token 020289|>
+<|visual token 020290|>
+<|visual token 020291|>
+<|visual token 020292|>
+<|visual token 020293|>
+<|visual token 020294|>
+<|visual token 020295|>
+<|visual token 020296|>
+<|visual token 020297|>
+<|visual token 020298|>
+<|visual token 020299|>
+<|visual token 020300|>
+<|visual token 020301|>
+<|visual token 020302|>
+<|visual token 020303|>
+<|visual token 020304|>
+<|visual token 020305|>
+<|visual token 020306|>
+<|visual token 020307|>
+<|visual token 020308|>
+<|visual token 020309|>
+<|visual token 020310|>
+<|visual token 020311|>
+<|visual token 020312|>
+<|visual token 020313|>
+<|visual token 020314|>
+<|visual token 020315|>
+<|visual token 020316|>
+<|visual token 020317|>
+<|visual token 020318|>
+<|visual token 020319|>
+<|visual token 020320|>
+<|visual token 020321|>
+<|visual token 020322|>
+<|visual token 020323|>
+<|visual token 020324|>
+<|visual token 020325|>
+<|visual token 020326|>
+<|visual token 020327|>
+<|visual token 020328|>
+<|visual token 020329|>
+<|visual token 020330|>
+<|visual token 020331|>
+<|visual token 020332|>
+<|visual token 020333|>
+<|visual token 020334|>
+<|visual token 020335|>
+<|visual token 020336|>
+<|visual token 020337|>
+<|visual token 020338|>
+<|visual token 020339|>
+<|visual token 020340|>
+<|visual token 020341|>
+<|visual token 020342|>
+<|visual token 020343|>
+<|visual token 020344|>
+<|visual token 020345|>
+<|visual token 020346|>
+<|visual token 020347|>
+<|visual token 020348|>
+<|visual token 020349|>
+<|visual token 020350|>
+<|visual token 020351|>
+<|visual token 020352|>
+<|visual token 020353|>
+<|visual token 020354|>
+<|visual token 020355|>
+<|visual token 020356|>
+<|visual token 020357|>
+<|visual token 020358|>
+<|visual token 020359|>
+<|visual token 020360|>
+<|visual token 020361|>
+<|visual token 020362|>
+<|visual token 020363|>
+<|visual token 020364|>
+<|visual token 020365|>
+<|visual token 020366|>
+<|visual token 020367|>
+<|visual token 020368|>
+<|visual token 020369|>
+<|visual token 020370|>
+<|visual token 020371|>
+<|visual token 020372|>
+<|visual token 020373|>
+<|visual token 020374|>
+<|visual token 020375|>
+<|visual token 020376|>
+<|visual token 020377|>
+<|visual token 020378|>
+<|visual token 020379|>
+<|visual token 020380|>
+<|visual token 020381|>
+<|visual token 020382|>
+<|visual token 020383|>
+<|visual token 020384|>
+<|visual token 020385|>
+<|visual token 020386|>
+<|visual token 020387|>
+<|visual token 020388|>
+<|visual token 020389|>
+<|visual token 020390|>
+<|visual token 020391|>
+<|visual token 020392|>
+<|visual token 020393|>
+<|visual token 020394|>
+<|visual token 020395|>
+<|visual token 020396|>
+<|visual token 020397|>
+<|visual token 020398|>
+<|visual token 020399|>
+<|visual token 020400|>
+<|visual token 020401|>
+<|visual token 020402|>
+<|visual token 020403|>
+<|visual token 020404|>
+<|visual token 020405|>
+<|visual token 020406|>
+<|visual token 020407|>
+<|visual token 020408|>
+<|visual token 020409|>
+<|visual token 020410|>
+<|visual token 020411|>
+<|visual token 020412|>
+<|visual token 020413|>
+<|visual token 020414|>
+<|visual token 020415|>
+<|visual token 020416|>
+<|visual token 020417|>
+<|visual token 020418|>
+<|visual token 020419|>
+<|visual token 020420|>
+<|visual token 020421|>
+<|visual token 020422|>
+<|visual token 020423|>
+<|visual token 020424|>
+<|visual token 020425|>
+<|visual token 020426|>
+<|visual token 020427|>
+<|visual token 020428|>
+<|visual token 020429|>
+<|visual token 020430|>
+<|visual token 020431|>
+<|visual token 020432|>
+<|visual token 020433|>
+<|visual token 020434|>
+<|visual token 020435|>
+<|visual token 020436|>
+<|visual token 020437|>
+<|visual token 020438|>
+<|visual token 020439|>
+<|visual token 020440|>
+<|visual token 020441|>
+<|visual token 020442|>
+<|visual token 020443|>
+<|visual token 020444|>
+<|visual token 020445|>
+<|visual token 020446|>
+<|visual token 020447|>
+<|visual token 020448|>
+<|visual token 020449|>
+<|visual token 020450|>
+<|visual token 020451|>
+<|visual token 020452|>
+<|visual token 020453|>
+<|visual token 020454|>
+<|visual token 020455|>
+<|visual token 020456|>
+<|visual token 020457|>
+<|visual token 020458|>
+<|visual token 020459|>
+<|visual token 020460|>
+<|visual token 020461|>
+<|visual token 020462|>
+<|visual token 020463|>
+<|visual token 020464|>
+<|visual token 020465|>
+<|visual token 020466|>
+<|visual token 020467|>
+<|visual token 020468|>
+<|visual token 020469|>
+<|visual token 020470|>
+<|visual token 020471|>
+<|visual token 020472|>
+<|visual token 020473|>
+<|visual token 020474|>
+<|visual token 020475|>
+<|visual token 020476|>
+<|visual token 020477|>
+<|visual token 020478|>
+<|visual token 020479|>
+<|visual token 020480|>
+<|visual token 020481|>
+<|visual token 020482|>
+<|visual token 020483|>
+<|visual token 020484|>
+<|visual token 020485|>
+<|visual token 020486|>
+<|visual token 020487|>
+<|visual token 020488|>
+<|visual token 020489|>
+<|visual token 020490|>
+<|visual token 020491|>
+<|visual token 020492|>
+<|visual token 020493|>
+<|visual token 020494|>
+<|visual token 020495|>
+<|visual token 020496|>
+<|visual token 020497|>
+<|visual token 020498|>
+<|visual token 020499|>
+<|visual token 020500|>
+<|visual token 020501|>
+<|visual token 020502|>
+<|visual token 020503|>
+<|visual token 020504|>
+<|visual token 020505|>
+<|visual token 020506|>
+<|visual token 020507|>
+<|visual token 020508|>
+<|visual token 020509|>
+<|visual token 020510|>
+<|visual token 020511|>
+<|visual token 020512|>
+<|visual token 020513|>
+<|visual token 020514|>
+<|visual token 020515|>
+<|visual token 020516|>
+<|visual token 020517|>
+<|visual token 020518|>
+<|visual token 020519|>
+<|visual token 020520|>
+<|visual token 020521|>
+<|visual token 020522|>
+<|visual token 020523|>
+<|visual token 020524|>
+<|visual token 020525|>
+<|visual token 020526|>
+<|visual token 020527|>
+<|visual token 020528|>
+<|visual token 020529|>
+<|visual token 020530|>
+<|visual token 020531|>
+<|visual token 020532|>
+<|visual token 020533|>
+<|visual token 020534|>
+<|visual token 020535|>
+<|visual token 020536|>
+<|visual token 020537|>
+<|visual token 020538|>
+<|visual token 020539|>
+<|visual token 020540|>
+<|visual token 020541|>
+<|visual token 020542|>
+<|visual token 020543|>
+<|visual token 020544|>
+<|visual token 020545|>
+<|visual token 020546|>
+<|visual token 020547|>
+<|visual token 020548|>
+<|visual token 020549|>
+<|visual token 020550|>
+<|visual token 020551|>
+<|visual token 020552|>
+<|visual token 020553|>
+<|visual token 020554|>
+<|visual token 020555|>
+<|visual token 020556|>
+<|visual token 020557|>
+<|visual token 020558|>
+<|visual token 020559|>
+<|visual token 020560|>
+<|visual token 020561|>
+<|visual token 020562|>
+<|visual token 020563|>
+<|visual token 020564|>
+<|visual token 020565|>
+<|visual token 020566|>
+<|visual token 020567|>
+<|visual token 020568|>
+<|visual token 020569|>
+<|visual token 020570|>
+<|visual token 020571|>
+<|visual token 020572|>
+<|visual token 020573|>
+<|visual token 020574|>
+<|visual token 020575|>
+<|visual token 020576|>
+<|visual token 020577|>
+<|visual token 020578|>
+<|visual token 020579|>
+<|visual token 020580|>
+<|visual token 020581|>
+<|visual token 020582|>
+<|visual token 020583|>
+<|visual token 020584|>
+<|visual token 020585|>
+<|visual token 020586|>
+<|visual token 020587|>
+<|visual token 020588|>
+<|visual token 020589|>
+<|visual token 020590|>
+<|visual token 020591|>
+<|visual token 020592|>
+<|visual token 020593|>
+<|visual token 020594|>
+<|visual token 020595|>
+<|visual token 020596|>
+<|visual token 020597|>
+<|visual token 020598|>
+<|visual token 020599|>
+<|visual token 020600|>
+<|visual token 020601|>
+<|visual token 020602|>
+<|visual token 020603|>
+<|visual token 020604|>
+<|visual token 020605|>
+<|visual token 020606|>
+<|visual token 020607|>
+<|visual token 020608|>
+<|visual token 020609|>
+<|visual token 020610|>
+<|visual token 020611|>
+<|visual token 020612|>
+<|visual token 020613|>
+<|visual token 020614|>
+<|visual token 020615|>
+<|visual token 020616|>
+<|visual token 020617|>
+<|visual token 020618|>
+<|visual token 020619|>
+<|visual token 020620|>
+<|visual token 020621|>
+<|visual token 020622|>
+<|visual token 020623|>
+<|visual token 020624|>
+<|visual token 020625|>
+<|visual token 020626|>
+<|visual token 020627|>
+<|visual token 020628|>
+<|visual token 020629|>
+<|visual token 020630|>
+<|visual token 020631|>
+<|visual token 020632|>
+<|visual token 020633|>
+<|visual token 020634|>
+<|visual token 020635|>
+<|visual token 020636|>
+<|visual token 020637|>
+<|visual token 020638|>
+<|visual token 020639|>
+<|visual token 020640|>
+<|visual token 020641|>
+<|visual token 020642|>
+<|visual token 020643|>
+<|visual token 020644|>
+<|visual token 020645|>
+<|visual token 020646|>
+<|visual token 020647|>
+<|visual token 020648|>
+<|visual token 020649|>
+<|visual token 020650|>
+<|visual token 020651|>
+<|visual token 020652|>
+<|visual token 020653|>
+<|visual token 020654|>
+<|visual token 020655|>
+<|visual token 020656|>
+<|visual token 020657|>
+<|visual token 020658|>
+<|visual token 020659|>
+<|visual token 020660|>
+<|visual token 020661|>
+<|visual token 020662|>
+<|visual token 020663|>
+<|visual token 020664|>
+<|visual token 020665|>
+<|visual token 020666|>
+<|visual token 020667|>
+<|visual token 020668|>
+<|visual token 020669|>
+<|visual token 020670|>
+<|visual token 020671|>
+<|visual token 020672|>
+<|visual token 020673|>
+<|visual token 020674|>
+<|visual token 020675|>
+<|visual token 020676|>
+<|visual token 020677|>
+<|visual token 020678|>
+<|visual token 020679|>
+<|visual token 020680|>
+<|visual token 020681|>
+<|visual token 020682|>
+<|visual token 020683|>
+<|visual token 020684|>
+<|visual token 020685|>
+<|visual token 020686|>
+<|visual token 020687|>
+<|visual token 020688|>
+<|visual token 020689|>
+<|visual token 020690|>
+<|visual token 020691|>
+<|visual token 020692|>
+<|visual token 020693|>
+<|visual token 020694|>
+<|visual token 020695|>
+<|visual token 020696|>
+<|visual token 020697|>
+<|visual token 020698|>
+<|visual token 020699|>
+<|visual token 020700|>
+<|visual token 020701|>
+<|visual token 020702|>
+<|visual token 020703|>
+<|visual token 020704|>
+<|visual token 020705|>
+<|visual token 020706|>
+<|visual token 020707|>
+<|visual token 020708|>
+<|visual token 020709|>
+<|visual token 020710|>
+<|visual token 020711|>
+<|visual token 020712|>
+<|visual token 020713|>
+<|visual token 020714|>
+<|visual token 020715|>
+<|visual token 020716|>
+<|visual token 020717|>
+<|visual token 020718|>
+<|visual token 020719|>
+<|visual token 020720|>
+<|visual token 020721|>
+<|visual token 020722|>
+<|visual token 020723|>
+<|visual token 020724|>
+<|visual token 020725|>
+<|visual token 020726|>
+<|visual token 020727|>
+<|visual token 020728|>
+<|visual token 020729|>
+<|visual token 020730|>
+<|visual token 020731|>
+<|visual token 020732|>
+<|visual token 020733|>
+<|visual token 020734|>
+<|visual token 020735|>
+<|visual token 020736|>
+<|visual token 020737|>
+<|visual token 020738|>
+<|visual token 020739|>
+<|visual token 020740|>
+<|visual token 020741|>
+<|visual token 020742|>
+<|visual token 020743|>
+<|visual token 020744|>
+<|visual token 020745|>
+<|visual token 020746|>
+<|visual token 020747|>
+<|visual token 020748|>
+<|visual token 020749|>
+<|visual token 020750|>
+<|visual token 020751|>
+<|visual token 020752|>
+<|visual token 020753|>
+<|visual token 020754|>
+<|visual token 020755|>
+<|visual token 020756|>
+<|visual token 020757|>
+<|visual token 020758|>
+<|visual token 020759|>
+<|visual token 020760|>
+<|visual token 020761|>
+<|visual token 020762|>
+<|visual token 020763|>
+<|visual token 020764|>
+<|visual token 020765|>
+<|visual token 020766|>
+<|visual token 020767|>
+<|visual token 020768|>
+<|visual token 020769|>
+<|visual token 020770|>
+<|visual token 020771|>
+<|visual token 020772|>
+<|visual token 020773|>
+<|visual token 020774|>
+<|visual token 020775|>
+<|visual token 020776|>
+<|visual token 020777|>
+<|visual token 020778|>
+<|visual token 020779|>
+<|visual token 020780|>
+<|visual token 020781|>
+<|visual token 020782|>
+<|visual token 020783|>
+<|visual token 020784|>
+<|visual token 020785|>
+<|visual token 020786|>
+<|visual token 020787|>
+<|visual token 020788|>
+<|visual token 020789|>
+<|visual token 020790|>
+<|visual token 020791|>
+<|visual token 020792|>
+<|visual token 020793|>
+<|visual token 020794|>
+<|visual token 020795|>
+<|visual token 020796|>
+<|visual token 020797|>
+<|visual token 020798|>
+<|visual token 020799|>
+<|visual token 020800|>
+<|visual token 020801|>
+<|visual token 020802|>
+<|visual token 020803|>
+<|visual token 020804|>
+<|visual token 020805|>
+<|visual token 020806|>
+<|visual token 020807|>
+<|visual token 020808|>
+<|visual token 020809|>
+<|visual token 020810|>
+<|visual token 020811|>
+<|visual token 020812|>
+<|visual token 020813|>
+<|visual token 020814|>
+<|visual token 020815|>
+<|visual token 020816|>
+<|visual token 020817|>
+<|visual token 020818|>
+<|visual token 020819|>
+<|visual token 020820|>
+<|visual token 020821|>
+<|visual token 020822|>
+<|visual token 020823|>
+<|visual token 020824|>
+<|visual token 020825|>
+<|visual token 020826|>
+<|visual token 020827|>
+<|visual token 020828|>
+<|visual token 020829|>
+<|visual token 020830|>
+<|visual token 020831|>
+<|visual token 020832|>
+<|visual token 020833|>
+<|visual token 020834|>
+<|visual token 020835|>
+<|visual token 020836|>
+<|visual token 020837|>
+<|visual token 020838|>
+<|visual token 020839|>
+<|visual token 020840|>
+<|visual token 020841|>
+<|visual token 020842|>
+<|visual token 020843|>
+<|visual token 020844|>
+<|visual token 020845|>
+<|visual token 020846|>
+<|visual token 020847|>
+<|visual token 020848|>
+<|visual token 020849|>
+<|visual token 020850|>
+<|visual token 020851|>
+<|visual token 020852|>
+<|visual token 020853|>
+<|visual token 020854|>
+<|visual token 020855|>
+<|visual token 020856|>
+<|visual token 020857|>
+<|visual token 020858|>
+<|visual token 020859|>
+<|visual token 020860|>
+<|visual token 020861|>
+<|visual token 020862|>
+<|visual token 020863|>
+<|visual token 020864|>
+<|visual token 020865|>
+<|visual token 020866|>
+<|visual token 020867|>
+<|visual token 020868|>
+<|visual token 020869|>
+<|visual token 020870|>
+<|visual token 020871|>
+<|visual token 020872|>
+<|visual token 020873|>
+<|visual token 020874|>
+<|visual token 020875|>
+<|visual token 020876|>
+<|visual token 020877|>
+<|visual token 020878|>
+<|visual token 020879|>
+<|visual token 020880|>
+<|visual token 020881|>
+<|visual token 020882|>
+<|visual token 020883|>
+<|visual token 020884|>
+<|visual token 020885|>
+<|visual token 020886|>
+<|visual token 020887|>
+<|visual token 020888|>
+<|visual token 020889|>
+<|visual token 020890|>
+<|visual token 020891|>
+<|visual token 020892|>
+<|visual token 020893|>
+<|visual token 020894|>
+<|visual token 020895|>
+<|visual token 020896|>
+<|visual token 020897|>
+<|visual token 020898|>
+<|visual token 020899|>
+<|visual token 020900|>
+<|visual token 020901|>
+<|visual token 020902|>
+<|visual token 020903|>
+<|visual token 020904|>
+<|visual token 020905|>
+<|visual token 020906|>
+<|visual token 020907|>
+<|visual token 020908|>
+<|visual token 020909|>
+<|visual token 020910|>
+<|visual token 020911|>
+<|visual token 020912|>
+<|visual token 020913|>
+<|visual token 020914|>
+<|visual token 020915|>
+<|visual token 020916|>
+<|visual token 020917|>
+<|visual token 020918|>
+<|visual token 020919|>
+<|visual token 020920|>
+<|visual token 020921|>
+<|visual token 020922|>
+<|visual token 020923|>
+<|visual token 020924|>
+<|visual token 020925|>
+<|visual token 020926|>
+<|visual token 020927|>
+<|visual token 020928|>
+<|visual token 020929|>
+<|visual token 020930|>
+<|visual token 020931|>
+<|visual token 020932|>
+<|visual token 020933|>
+<|visual token 020934|>
+<|visual token 020935|>
+<|visual token 020936|>
+<|visual token 020937|>
+<|visual token 020938|>
+<|visual token 020939|>
+<|visual token 020940|>
+<|visual token 020941|>
+<|visual token 020942|>
+<|visual token 020943|>
+<|visual token 020944|>
+<|visual token 020945|>
+<|visual token 020946|>
+<|visual token 020947|>
+<|visual token 020948|>
+<|visual token 020949|>
+<|visual token 020950|>
+<|visual token 020951|>
+<|visual token 020952|>
+<|visual token 020953|>
+<|visual token 020954|>
+<|visual token 020955|>
+<|visual token 020956|>
+<|visual token 020957|>
+<|visual token 020958|>
+<|visual token 020959|>
+<|visual token 020960|>
+<|visual token 020961|>
+<|visual token 020962|>
+<|visual token 020963|>
+<|visual token 020964|>
+<|visual token 020965|>
+<|visual token 020966|>
+<|visual token 020967|>
+<|visual token 020968|>
+<|visual token 020969|>
+<|visual token 020970|>
+<|visual token 020971|>
+<|visual token 020972|>
+<|visual token 020973|>
+<|visual token 020974|>
+<|visual token 020975|>
+<|visual token 020976|>
+<|visual token 020977|>
+<|visual token 020978|>
+<|visual token 020979|>
+<|visual token 020980|>
+<|visual token 020981|>
+<|visual token 020982|>
+<|visual token 020983|>
+<|visual token 020984|>
+<|visual token 020985|>
+<|visual token 020986|>
+<|visual token 020987|>
+<|visual token 020988|>
+<|visual token 020989|>
+<|visual token 020990|>
+<|visual token 020991|>
+<|visual token 020992|>
+<|visual token 020993|>
+<|visual token 020994|>
+<|visual token 020995|>
+<|visual token 020996|>
+<|visual token 020997|>
+<|visual token 020998|>
+<|visual token 020999|>
+<|visual token 021000|>
+<|visual token 021001|>
+<|visual token 021002|>
+<|visual token 021003|>
+<|visual token 021004|>
+<|visual token 021005|>
+<|visual token 021006|>
+<|visual token 021007|>
+<|visual token 021008|>
+<|visual token 021009|>
+<|visual token 021010|>
+<|visual token 021011|>
+<|visual token 021012|>
+<|visual token 021013|>
+<|visual token 021014|>
+<|visual token 021015|>
+<|visual token 021016|>
+<|visual token 021017|>
+<|visual token 021018|>
+<|visual token 021019|>
+<|visual token 021020|>
+<|visual token 021021|>
+<|visual token 021022|>
+<|visual token 021023|>
+<|visual token 021024|>
+<|visual token 021025|>
+<|visual token 021026|>
+<|visual token 021027|>
+<|visual token 021028|>
+<|visual token 021029|>
+<|visual token 021030|>
+<|visual token 021031|>
+<|visual token 021032|>
+<|visual token 021033|>
+<|visual token 021034|>
+<|visual token 021035|>
+<|visual token 021036|>
+<|visual token 021037|>
+<|visual token 021038|>
+<|visual token 021039|>
+<|visual token 021040|>
+<|visual token 021041|>
+<|visual token 021042|>
+<|visual token 021043|>
+<|visual token 021044|>
+<|visual token 021045|>
+<|visual token 021046|>
+<|visual token 021047|>
+<|visual token 021048|>
+<|visual token 021049|>
+<|visual token 021050|>
+<|visual token 021051|>
+<|visual token 021052|>
+<|visual token 021053|>
+<|visual token 021054|>
+<|visual token 021055|>
+<|visual token 021056|>
+<|visual token 021057|>
+<|visual token 021058|>
+<|visual token 021059|>
+<|visual token 021060|>
+<|visual token 021061|>
+<|visual token 021062|>
+<|visual token 021063|>
+<|visual token 021064|>
+<|visual token 021065|>
+<|visual token 021066|>
+<|visual token 021067|>
+<|visual token 021068|>
+<|visual token 021069|>
+<|visual token 021070|>
+<|visual token 021071|>
+<|visual token 021072|>
+<|visual token 021073|>
+<|visual token 021074|>
+<|visual token 021075|>
+<|visual token 021076|>
+<|visual token 021077|>
+<|visual token 021078|>
+<|visual token 021079|>
+<|visual token 021080|>
+<|visual token 021081|>
+<|visual token 021082|>
+<|visual token 021083|>
+<|visual token 021084|>
+<|visual token 021085|>
+<|visual token 021086|>
+<|visual token 021087|>
+<|visual token 021088|>
+<|visual token 021089|>
+<|visual token 021090|>
+<|visual token 021091|>
+<|visual token 021092|>
+<|visual token 021093|>
+<|visual token 021094|>
+<|visual token 021095|>
+<|visual token 021096|>
+<|visual token 021097|>
+<|visual token 021098|>
+<|visual token 021099|>
+<|visual token 021100|>
+<|visual token 021101|>
+<|visual token 021102|>
+<|visual token 021103|>
+<|visual token 021104|>
+<|visual token 021105|>
+<|visual token 021106|>
+<|visual token 021107|>
+<|visual token 021108|>
+<|visual token 021109|>
+<|visual token 021110|>
+<|visual token 021111|>
+<|visual token 021112|>
+<|visual token 021113|>
+<|visual token 021114|>
+<|visual token 021115|>
+<|visual token 021116|>
+<|visual token 021117|>
+<|visual token 021118|>
+<|visual token 021119|>
+<|visual token 021120|>
+<|visual token 021121|>
+<|visual token 021122|>
+<|visual token 021123|>
+<|visual token 021124|>
+<|visual token 021125|>
+<|visual token 021126|>
+<|visual token 021127|>
+<|visual token 021128|>
+<|visual token 021129|>
+<|visual token 021130|>
+<|visual token 021131|>
+<|visual token 021132|>
+<|visual token 021133|>
+<|visual token 021134|>
+<|visual token 021135|>
+<|visual token 021136|>
+<|visual token 021137|>
+<|visual token 021138|>
+<|visual token 021139|>
+<|visual token 021140|>
+<|visual token 021141|>
+<|visual token 021142|>
+<|visual token 021143|>
+<|visual token 021144|>
+<|visual token 021145|>
+<|visual token 021146|>
+<|visual token 021147|>
+<|visual token 021148|>
+<|visual token 021149|>
+<|visual token 021150|>
+<|visual token 021151|>
+<|visual token 021152|>
+<|visual token 021153|>
+<|visual token 021154|>
+<|visual token 021155|>
+<|visual token 021156|>
+<|visual token 021157|>
+<|visual token 021158|>
+<|visual token 021159|>
+<|visual token 021160|>
+<|visual token 021161|>
+<|visual token 021162|>
+<|visual token 021163|>
+<|visual token 021164|>
+<|visual token 021165|>
+<|visual token 021166|>
+<|visual token 021167|>
+<|visual token 021168|>
+<|visual token 021169|>
+<|visual token 021170|>
+<|visual token 021171|>
+<|visual token 021172|>
+<|visual token 021173|>
+<|visual token 021174|>
+<|visual token 021175|>
+<|visual token 021176|>
+<|visual token 021177|>
+<|visual token 021178|>
+<|visual token 021179|>
+<|visual token 021180|>
+<|visual token 021181|>
+<|visual token 021182|>
+<|visual token 021183|>
+<|visual token 021184|>
+<|visual token 021185|>
+<|visual token 021186|>
+<|visual token 021187|>
+<|visual token 021188|>
+<|visual token 021189|>
+<|visual token 021190|>
+<|visual token 021191|>
+<|visual token 021192|>
+<|visual token 021193|>
+<|visual token 021194|>
+<|visual token 021195|>
+<|visual token 021196|>
+<|visual token 021197|>
+<|visual token 021198|>
+<|visual token 021199|>
+<|visual token 021200|>
+<|visual token 021201|>
+<|visual token 021202|>
+<|visual token 021203|>
+<|visual token 021204|>
+<|visual token 021205|>
+<|visual token 021206|>
+<|visual token 021207|>
+<|visual token 021208|>
+<|visual token 021209|>
+<|visual token 021210|>
+<|visual token 021211|>
+<|visual token 021212|>
+<|visual token 021213|>
+<|visual token 021214|>
+<|visual token 021215|>
+<|visual token 021216|>
+<|visual token 021217|>
+<|visual token 021218|>
+<|visual token 021219|>
+<|visual token 021220|>
+<|visual token 021221|>
+<|visual token 021222|>
+<|visual token 021223|>
+<|visual token 021224|>
+<|visual token 021225|>
+<|visual token 021226|>
+<|visual token 021227|>
+<|visual token 021228|>
+<|visual token 021229|>
+<|visual token 021230|>
+<|visual token 021231|>
+<|visual token 021232|>
+<|visual token 021233|>
+<|visual token 021234|>
+<|visual token 021235|>
+<|visual token 021236|>
+<|visual token 021237|>
+<|visual token 021238|>
+<|visual token 021239|>
+<|visual token 021240|>
+<|visual token 021241|>
+<|visual token 021242|>
+<|visual token 021243|>
+<|visual token 021244|>
+<|visual token 021245|>
+<|visual token 021246|>
+<|visual token 021247|>
+<|visual token 021248|>
+<|visual token 021249|>
+<|visual token 021250|>
+<|visual token 021251|>
+<|visual token 021252|>
+<|visual token 021253|>
+<|visual token 021254|>
+<|visual token 021255|>
+<|visual token 021256|>
+<|visual token 021257|>
+<|visual token 021258|>
+<|visual token 021259|>
+<|visual token 021260|>
+<|visual token 021261|>
+<|visual token 021262|>
+<|visual token 021263|>
+<|visual token 021264|>
+<|visual token 021265|>
+<|visual token 021266|>
+<|visual token 021267|>
+<|visual token 021268|>
+<|visual token 021269|>
+<|visual token 021270|>
+<|visual token 021271|>
+<|visual token 021272|>
+<|visual token 021273|>
+<|visual token 021274|>
+<|visual token 021275|>
+<|visual token 021276|>
+<|visual token 021277|>
+<|visual token 021278|>
+<|visual token 021279|>
+<|visual token 021280|>
+<|visual token 021281|>
+<|visual token 021282|>
+<|visual token 021283|>
+<|visual token 021284|>
+<|visual token 021285|>
+<|visual token 021286|>
+<|visual token 021287|>
+<|visual token 021288|>
+<|visual token 021289|>
+<|visual token 021290|>
+<|visual token 021291|>
+<|visual token 021292|>
+<|visual token 021293|>
+<|visual token 021294|>
+<|visual token 021295|>
+<|visual token 021296|>
+<|visual token 021297|>
+<|visual token 021298|>
+<|visual token 021299|>
+<|visual token 021300|>
+<|visual token 021301|>
+<|visual token 021302|>
+<|visual token 021303|>
+<|visual token 021304|>
+<|visual token 021305|>
+<|visual token 021306|>
+<|visual token 021307|>
+<|visual token 021308|>
+<|visual token 021309|>
+<|visual token 021310|>
+<|visual token 021311|>
+<|visual token 021312|>
+<|visual token 021313|>
+<|visual token 021314|>
+<|visual token 021315|>
+<|visual token 021316|>
+<|visual token 021317|>
+<|visual token 021318|>
+<|visual token 021319|>
+<|visual token 021320|>
+<|visual token 021321|>
+<|visual token 021322|>
+<|visual token 021323|>
+<|visual token 021324|>
+<|visual token 021325|>
+<|visual token 021326|>
+<|visual token 021327|>
+<|visual token 021328|>
+<|visual token 021329|>
+<|visual token 021330|>
+<|visual token 021331|>
+<|visual token 021332|>
+<|visual token 021333|>
+<|visual token 021334|>
+<|visual token 021335|>
+<|visual token 021336|>
+<|visual token 021337|>
+<|visual token 021338|>
+<|visual token 021339|>
+<|visual token 021340|>
+<|visual token 021341|>
+<|visual token 021342|>
+<|visual token 021343|>
+<|visual token 021344|>
+<|visual token 021345|>
+<|visual token 021346|>
+<|visual token 021347|>
+<|visual token 021348|>
+<|visual token 021349|>
+<|visual token 021350|>
+<|visual token 021351|>
+<|visual token 021352|>
+<|visual token 021353|>
+<|visual token 021354|>
+<|visual token 021355|>
+<|visual token 021356|>
+<|visual token 021357|>
+<|visual token 021358|>
+<|visual token 021359|>
+<|visual token 021360|>
+<|visual token 021361|>
+<|visual token 021362|>
+<|visual token 021363|>
+<|visual token 021364|>
+<|visual token 021365|>
+<|visual token 021366|>
+<|visual token 021367|>
+<|visual token 021368|>
+<|visual token 021369|>
+<|visual token 021370|>
+<|visual token 021371|>
+<|visual token 021372|>
+<|visual token 021373|>
+<|visual token 021374|>
+<|visual token 021375|>
+<|visual token 021376|>
+<|visual token 021377|>
+<|visual token 021378|>
+<|visual token 021379|>
+<|visual token 021380|>
+<|visual token 021381|>
+<|visual token 021382|>
+<|visual token 021383|>
+<|visual token 021384|>
+<|visual token 021385|>
+<|visual token 021386|>
+<|visual token 021387|>
+<|visual token 021388|>
+<|visual token 021389|>
+<|visual token 021390|>
+<|visual token 021391|>
+<|visual token 021392|>
+<|visual token 021393|>
+<|visual token 021394|>
+<|visual token 021395|>
+<|visual token 021396|>
+<|visual token 021397|>
+<|visual token 021398|>
+<|visual token 021399|>
+<|visual token 021400|>
+<|visual token 021401|>
+<|visual token 021402|>
+<|visual token 021403|>
+<|visual token 021404|>
+<|visual token 021405|>
+<|visual token 021406|>
+<|visual token 021407|>
+<|visual token 021408|>
+<|visual token 021409|>
+<|visual token 021410|>
+<|visual token 021411|>
+<|visual token 021412|>
+<|visual token 021413|>
+<|visual token 021414|>
+<|visual token 021415|>
+<|visual token 021416|>
+<|visual token 021417|>
+<|visual token 021418|>
+<|visual token 021419|>
+<|visual token 021420|>
+<|visual token 021421|>
+<|visual token 021422|>
+<|visual token 021423|>
+<|visual token 021424|>
+<|visual token 021425|>
+<|visual token 021426|>
+<|visual token 021427|>
+<|visual token 021428|>
+<|visual token 021429|>
+<|visual token 021430|>
+<|visual token 021431|>
+<|visual token 021432|>
+<|visual token 021433|>
+<|visual token 021434|>
+<|visual token 021435|>
+<|visual token 021436|>
+<|visual token 021437|>
+<|visual token 021438|>
+<|visual token 021439|>
+<|visual token 021440|>
+<|visual token 021441|>
+<|visual token 021442|>
+<|visual token 021443|>
+<|visual token 021444|>
+<|visual token 021445|>
+<|visual token 021446|>
+<|visual token 021447|>
+<|visual token 021448|>
+<|visual token 021449|>
+<|visual token 021450|>
+<|visual token 021451|>
+<|visual token 021452|>
+<|visual token 021453|>
+<|visual token 021454|>
+<|visual token 021455|>
+<|visual token 021456|>
+<|visual token 021457|>
+<|visual token 021458|>
+<|visual token 021459|>
+<|visual token 021460|>
+<|visual token 021461|>
+<|visual token 021462|>
+<|visual token 021463|>
+<|visual token 021464|>
+<|visual token 021465|>
+<|visual token 021466|>
+<|visual token 021467|>
+<|visual token 021468|>
+<|visual token 021469|>
+<|visual token 021470|>
+<|visual token 021471|>
+<|visual token 021472|>
+<|visual token 021473|>
+<|visual token 021474|>
+<|visual token 021475|>
+<|visual token 021476|>
+<|visual token 021477|>
+<|visual token 021478|>
+<|visual token 021479|>
+<|visual token 021480|>
+<|visual token 021481|>
+<|visual token 021482|>
+<|visual token 021483|>
+<|visual token 021484|>
+<|visual token 021485|>
+<|visual token 021486|>
+<|visual token 021487|>
+<|visual token 021488|>
+<|visual token 021489|>
+<|visual token 021490|>
+<|visual token 021491|>
+<|visual token 021492|>
+<|visual token 021493|>
+<|visual token 021494|>
+<|visual token 021495|>
+<|visual token 021496|>
+<|visual token 021497|>
+<|visual token 021498|>
+<|visual token 021499|>
+<|visual token 021500|>
+<|visual token 021501|>
+<|visual token 021502|>
+<|visual token 021503|>
+<|visual token 021504|>
+<|visual token 021505|>
+<|visual token 021506|>
+<|visual token 021507|>
+<|visual token 021508|>
+<|visual token 021509|>
+<|visual token 021510|>
+<|visual token 021511|>
+<|visual token 021512|>
+<|visual token 021513|>
+<|visual token 021514|>
+<|visual token 021515|>
+<|visual token 021516|>
+<|visual token 021517|>
+<|visual token 021518|>
+<|visual token 021519|>
+<|visual token 021520|>
+<|visual token 021521|>
+<|visual token 021522|>
+<|visual token 021523|>
+<|visual token 021524|>
+<|visual token 021525|>
+<|visual token 021526|>
+<|visual token 021527|>
+<|visual token 021528|>
+<|visual token 021529|>
+<|visual token 021530|>
+<|visual token 021531|>
+<|visual token 021532|>
+<|visual token 021533|>
+<|visual token 021534|>
+<|visual token 021535|>
+<|visual token 021536|>
+<|visual token 021537|>
+<|visual token 021538|>
+<|visual token 021539|>
+<|visual token 021540|>
+<|visual token 021541|>
+<|visual token 021542|>
+<|visual token 021543|>
+<|visual token 021544|>
+<|visual token 021545|>
+<|visual token 021546|>
+<|visual token 021547|>
+<|visual token 021548|>
+<|visual token 021549|>
+<|visual token 021550|>
+<|visual token 021551|>
+<|visual token 021552|>
+<|visual token 021553|>
+<|visual token 021554|>
+<|visual token 021555|>
+<|visual token 021556|>
+<|visual token 021557|>
+<|visual token 021558|>
+<|visual token 021559|>
+<|visual token 021560|>
+<|visual token 021561|>
+<|visual token 021562|>
+<|visual token 021563|>
+<|visual token 021564|>
+<|visual token 021565|>
+<|visual token 021566|>
+<|visual token 021567|>
+<|visual token 021568|>
+<|visual token 021569|>
+<|visual token 021570|>
+<|visual token 021571|>
+<|visual token 021572|>
+<|visual token 021573|>
+<|visual token 021574|>
+<|visual token 021575|>
+<|visual token 021576|>
+<|visual token 021577|>
+<|visual token 021578|>
+<|visual token 021579|>
+<|visual token 021580|>
+<|visual token 021581|>
+<|visual token 021582|>
+<|visual token 021583|>
+<|visual token 021584|>
+<|visual token 021585|>
+<|visual token 021586|>
+<|visual token 021587|>
+<|visual token 021588|>
+<|visual token 021589|>
+<|visual token 021590|>
+<|visual token 021591|>
+<|visual token 021592|>
+<|visual token 021593|>
+<|visual token 021594|>
+<|visual token 021595|>
+<|visual token 021596|>
+<|visual token 021597|>
+<|visual token 021598|>
+<|visual token 021599|>
+<|visual token 021600|>
+<|visual token 021601|>
+<|visual token 021602|>
+<|visual token 021603|>
+<|visual token 021604|>
+<|visual token 021605|>
+<|visual token 021606|>
+<|visual token 021607|>
+<|visual token 021608|>
+<|visual token 021609|>
+<|visual token 021610|>
+<|visual token 021611|>
+<|visual token 021612|>
+<|visual token 021613|>
+<|visual token 021614|>
+<|visual token 021615|>
+<|visual token 021616|>
+<|visual token 021617|>
+<|visual token 021618|>
+<|visual token 021619|>
+<|visual token 021620|>
+<|visual token 021621|>
+<|visual token 021622|>
+<|visual token 021623|>
+<|visual token 021624|>
+<|visual token 021625|>
+<|visual token 021626|>
+<|visual token 021627|>
+<|visual token 021628|>
+<|visual token 021629|>
+<|visual token 021630|>
+<|visual token 021631|>
+<|visual token 021632|>
+<|visual token 021633|>
+<|visual token 021634|>
+<|visual token 021635|>
+<|visual token 021636|>
+<|visual token 021637|>
+<|visual token 021638|>
+<|visual token 021639|>
+<|visual token 021640|>
+<|visual token 021641|>
+<|visual token 021642|>
+<|visual token 021643|>
+<|visual token 021644|>
+<|visual token 021645|>
+<|visual token 021646|>
+<|visual token 021647|>
+<|visual token 021648|>
+<|visual token 021649|>
+<|visual token 021650|>
+<|visual token 021651|>
+<|visual token 021652|>
+<|visual token 021653|>
+<|visual token 021654|>
+<|visual token 021655|>
+<|visual token 021656|>
+<|visual token 021657|>
+<|visual token 021658|>
+<|visual token 021659|>
+<|visual token 021660|>
+<|visual token 021661|>
+<|visual token 021662|>
+<|visual token 021663|>
+<|visual token 021664|>
+<|visual token 021665|>
+<|visual token 021666|>
+<|visual token 021667|>
+<|visual token 021668|>
+<|visual token 021669|>
+<|visual token 021670|>
+<|visual token 021671|>
+<|visual token 021672|>
+<|visual token 021673|>
+<|visual token 021674|>
+<|visual token 021675|>
+<|visual token 021676|>
+<|visual token 021677|>
+<|visual token 021678|>
+<|visual token 021679|>
+<|visual token 021680|>
+<|visual token 021681|>
+<|visual token 021682|>
+<|visual token 021683|>
+<|visual token 021684|>
+<|visual token 021685|>
+<|visual token 021686|>
+<|visual token 021687|>
+<|visual token 021688|>
+<|visual token 021689|>
+<|visual token 021690|>
+<|visual token 021691|>
+<|visual token 021692|>
+<|visual token 021693|>
+<|visual token 021694|>
+<|visual token 021695|>
+<|visual token 021696|>
+<|visual token 021697|>
+<|visual token 021698|>
+<|visual token 021699|>
+<|visual token 021700|>
+<|visual token 021701|>
+<|visual token 021702|>
+<|visual token 021703|>
+<|visual token 021704|>
+<|visual token 021705|>
+<|visual token 021706|>
+<|visual token 021707|>
+<|visual token 021708|>
+<|visual token 021709|>
+<|visual token 021710|>
+<|visual token 021711|>
+<|visual token 021712|>
+<|visual token 021713|>
+<|visual token 021714|>
+<|visual token 021715|>
+<|visual token 021716|>
+<|visual token 021717|>
+<|visual token 021718|>
+<|visual token 021719|>
+<|visual token 021720|>
+<|visual token 021721|>
+<|visual token 021722|>
+<|visual token 021723|>
+<|visual token 021724|>
+<|visual token 021725|>
+<|visual token 021726|>
+<|visual token 021727|>
+<|visual token 021728|>
+<|visual token 021729|>
+<|visual token 021730|>
+<|visual token 021731|>
+<|visual token 021732|>
+<|visual token 021733|>
+<|visual token 021734|>
+<|visual token 021735|>
+<|visual token 021736|>
+<|visual token 021737|>
+<|visual token 021738|>
+<|visual token 021739|>
+<|visual token 021740|>
+<|visual token 021741|>
+<|visual token 021742|>
+<|visual token 021743|>
+<|visual token 021744|>
+<|visual token 021745|>
+<|visual token 021746|>
+<|visual token 021747|>
+<|visual token 021748|>
+<|visual token 021749|>
+<|visual token 021750|>
+<|visual token 021751|>
+<|visual token 021752|>
+<|visual token 021753|>
+<|visual token 021754|>
+<|visual token 021755|>
+<|visual token 021756|>
+<|visual token 021757|>
+<|visual token 021758|>
+<|visual token 021759|>
+<|visual token 021760|>
+<|visual token 021761|>
+<|visual token 021762|>
+<|visual token 021763|>
+<|visual token 021764|>
+<|visual token 021765|>
+<|visual token 021766|>
+<|visual token 021767|>
+<|visual token 021768|>
+<|visual token 021769|>
+<|visual token 021770|>
+<|visual token 021771|>
+<|visual token 021772|>
+<|visual token 021773|>
+<|visual token 021774|>
+<|visual token 021775|>
+<|visual token 021776|>
+<|visual token 021777|>
+<|visual token 021778|>
+<|visual token 021779|>
+<|visual token 021780|>
+<|visual token 021781|>
+<|visual token 021782|>
+<|visual token 021783|>
+<|visual token 021784|>
+<|visual token 021785|>
+<|visual token 021786|>
+<|visual token 021787|>
+<|visual token 021788|>
+<|visual token 021789|>
+<|visual token 021790|>
+<|visual token 021791|>
+<|visual token 021792|>
+<|visual token 021793|>
+<|visual token 021794|>
+<|visual token 021795|>
+<|visual token 021796|>
+<|visual token 021797|>
+<|visual token 021798|>
+<|visual token 021799|>
+<|visual token 021800|>
+<|visual token 021801|>
+<|visual token 021802|>
+<|visual token 021803|>
+<|visual token 021804|>
+<|visual token 021805|>
+<|visual token 021806|>
+<|visual token 021807|>
+<|visual token 021808|>
+<|visual token 021809|>
+<|visual token 021810|>
+<|visual token 021811|>
+<|visual token 021812|>
+<|visual token 021813|>
+<|visual token 021814|>
+<|visual token 021815|>
+<|visual token 021816|>
+<|visual token 021817|>
+<|visual token 021818|>
+<|visual token 021819|>
+<|visual token 021820|>
+<|visual token 021821|>
+<|visual token 021822|>
+<|visual token 021823|>
+<|visual token 021824|>
+<|visual token 021825|>
+<|visual token 021826|>
+<|visual token 021827|>
+<|visual token 021828|>
+<|visual token 021829|>
+<|visual token 021830|>
+<|visual token 021831|>
+<|visual token 021832|>
+<|visual token 021833|>
+<|visual token 021834|>
+<|visual token 021835|>
+<|visual token 021836|>
+<|visual token 021837|>
+<|visual token 021838|>
+<|visual token 021839|>
+<|visual token 021840|>
+<|visual token 021841|>
+<|visual token 021842|>
+<|visual token 021843|>
+<|visual token 021844|>
+<|visual token 021845|>
+<|visual token 021846|>
+<|visual token 021847|>
+<|visual token 021848|>
+<|visual token 021849|>
+<|visual token 021850|>
+<|visual token 021851|>
+<|visual token 021852|>
+<|visual token 021853|>
+<|visual token 021854|>
+<|visual token 021855|>
+<|visual token 021856|>
+<|visual token 021857|>
+<|visual token 021858|>
+<|visual token 021859|>
+<|visual token 021860|>
+<|visual token 021861|>
+<|visual token 021862|>
+<|visual token 021863|>
+<|visual token 021864|>
+<|visual token 021865|>
+<|visual token 021866|>
+<|visual token 021867|>
+<|visual token 021868|>
+<|visual token 021869|>
+<|visual token 021870|>
+<|visual token 021871|>
+<|visual token 021872|>
+<|visual token 021873|>
+<|visual token 021874|>
+<|visual token 021875|>
+<|visual token 021876|>
+<|visual token 021877|>
+<|visual token 021878|>
+<|visual token 021879|>
+<|visual token 021880|>
+<|visual token 021881|>
+<|visual token 021882|>
+<|visual token 021883|>
+<|visual token 021884|>
+<|visual token 021885|>
+<|visual token 021886|>
+<|visual token 021887|>
+<|visual token 021888|>
+<|visual token 021889|>
+<|visual token 021890|>
+<|visual token 021891|>
+<|visual token 021892|>
+<|visual token 021893|>
+<|visual token 021894|>
+<|visual token 021895|>
+<|visual token 021896|>
+<|visual token 021897|>
+<|visual token 021898|>
+<|visual token 021899|>
+<|visual token 021900|>
+<|visual token 021901|>
+<|visual token 021902|>
+<|visual token 021903|>
+<|visual token 021904|>
+<|visual token 021905|>
+<|visual token 021906|>
+<|visual token 021907|>
+<|visual token 021908|>
+<|visual token 021909|>
+<|visual token 021910|>
+<|visual token 021911|>
+<|visual token 021912|>
+<|visual token 021913|>
+<|visual token 021914|>
+<|visual token 021915|>
+<|visual token 021916|>
+<|visual token 021917|>
+<|visual token 021918|>
+<|visual token 021919|>
+<|visual token 021920|>
+<|visual token 021921|>
+<|visual token 021922|>
+<|visual token 021923|>
+<|visual token 021924|>
+<|visual token 021925|>
+<|visual token 021926|>
+<|visual token 021927|>
+<|visual token 021928|>
+<|visual token 021929|>
+<|visual token 021930|>
+<|visual token 021931|>
+<|visual token 021932|>
+<|visual token 021933|>
+<|visual token 021934|>
+<|visual token 021935|>
+<|visual token 021936|>
+<|visual token 021937|>
+<|visual token 021938|>
+<|visual token 021939|>
+<|visual token 021940|>
+<|visual token 021941|>
+<|visual token 021942|>
+<|visual token 021943|>
+<|visual token 021944|>
+<|visual token 021945|>
+<|visual token 021946|>
+<|visual token 021947|>
+<|visual token 021948|>
+<|visual token 021949|>
+<|visual token 021950|>
+<|visual token 021951|>
+<|visual token 021952|>
+<|visual token 021953|>
+<|visual token 021954|>
+<|visual token 021955|>
+<|visual token 021956|>
+<|visual token 021957|>
+<|visual token 021958|>
+<|visual token 021959|>
+<|visual token 021960|>
+<|visual token 021961|>
+<|visual token 021962|>
+<|visual token 021963|>
+<|visual token 021964|>
+<|visual token 021965|>
+<|visual token 021966|>
+<|visual token 021967|>
+<|visual token 021968|>
+<|visual token 021969|>
+<|visual token 021970|>
+<|visual token 021971|>
+<|visual token 021972|>
+<|visual token 021973|>
+<|visual token 021974|>
+<|visual token 021975|>
+<|visual token 021976|>
+<|visual token 021977|>
+<|visual token 021978|>
+<|visual token 021979|>
+<|visual token 021980|>
+<|visual token 021981|>
+<|visual token 021982|>
+<|visual token 021983|>
+<|visual token 021984|>
+<|visual token 021985|>
+<|visual token 021986|>
+<|visual token 021987|>
+<|visual token 021988|>
+<|visual token 021989|>
+<|visual token 021990|>
+<|visual token 021991|>
+<|visual token 021992|>
+<|visual token 021993|>
+<|visual token 021994|>
+<|visual token 021995|>
+<|visual token 021996|>
+<|visual token 021997|>
+<|visual token 021998|>
+<|visual token 021999|>
+<|visual token 022000|>
+<|visual token 022001|>
+<|visual token 022002|>
+<|visual token 022003|>
+<|visual token 022004|>
+<|visual token 022005|>
+<|visual token 022006|>
+<|visual token 022007|>
+<|visual token 022008|>
+<|visual token 022009|>
+<|visual token 022010|>
+<|visual token 022011|>
+<|visual token 022012|>
+<|visual token 022013|>
+<|visual token 022014|>
+<|visual token 022015|>
+<|visual token 022016|>
+<|visual token 022017|>
+<|visual token 022018|>
+<|visual token 022019|>
+<|visual token 022020|>
+<|visual token 022021|>
+<|visual token 022022|>
+<|visual token 022023|>
+<|visual token 022024|>
+<|visual token 022025|>
+<|visual token 022026|>
+<|visual token 022027|>
+<|visual token 022028|>
+<|visual token 022029|>
+<|visual token 022030|>
+<|visual token 022031|>
+<|visual token 022032|>
+<|visual token 022033|>
+<|visual token 022034|>
+<|visual token 022035|>
+<|visual token 022036|>
+<|visual token 022037|>
+<|visual token 022038|>
+<|visual token 022039|>
+<|visual token 022040|>
+<|visual token 022041|>
+<|visual token 022042|>
+<|visual token 022043|>
+<|visual token 022044|>
+<|visual token 022045|>
+<|visual token 022046|>
+<|visual token 022047|>
+<|visual token 022048|>
+<|visual token 022049|>
+<|visual token 022050|>
+<|visual token 022051|>
+<|visual token 022052|>
+<|visual token 022053|>
+<|visual token 022054|>
+<|visual token 022055|>
+<|visual token 022056|>
+<|visual token 022057|>
+<|visual token 022058|>
+<|visual token 022059|>
+<|visual token 022060|>
+<|visual token 022061|>
+<|visual token 022062|>
+<|visual token 022063|>
+<|visual token 022064|>
+<|visual token 022065|>
+<|visual token 022066|>
+<|visual token 022067|>
+<|visual token 022068|>
+<|visual token 022069|>
+<|visual token 022070|>
+<|visual token 022071|>
+<|visual token 022072|>
+<|visual token 022073|>
+<|visual token 022074|>
+<|visual token 022075|>
+<|visual token 022076|>
+<|visual token 022077|>
+<|visual token 022078|>
+<|visual token 022079|>
+<|visual token 022080|>
+<|visual token 022081|>
+<|visual token 022082|>
+<|visual token 022083|>
+<|visual token 022084|>
+<|visual token 022085|>
+<|visual token 022086|>
+<|visual token 022087|>
+<|visual token 022088|>
+<|visual token 022089|>
+<|visual token 022090|>
+<|visual token 022091|>
+<|visual token 022092|>
+<|visual token 022093|>
+<|visual token 022094|>
+<|visual token 022095|>
+<|visual token 022096|>
+<|visual token 022097|>
+<|visual token 022098|>
+<|visual token 022099|>
+<|visual token 022100|>
+<|visual token 022101|>
+<|visual token 022102|>
+<|visual token 022103|>
+<|visual token 022104|>
+<|visual token 022105|>
+<|visual token 022106|>
+<|visual token 022107|>
+<|visual token 022108|>
+<|visual token 022109|>
+<|visual token 022110|>
+<|visual token 022111|>
+<|visual token 022112|>
+<|visual token 022113|>
+<|visual token 022114|>
+<|visual token 022115|>
+<|visual token 022116|>
+<|visual token 022117|>
+<|visual token 022118|>
+<|visual token 022119|>
+<|visual token 022120|>
+<|visual token 022121|>
+<|visual token 022122|>
+<|visual token 022123|>
+<|visual token 022124|>
+<|visual token 022125|>
+<|visual token 022126|>
+<|visual token 022127|>
+<|visual token 022128|>
+<|visual token 022129|>
+<|visual token 022130|>
+<|visual token 022131|>
+<|visual token 022132|>
+<|visual token 022133|>
+<|visual token 022134|>
+<|visual token 022135|>
+<|visual token 022136|>
+<|visual token 022137|>
+<|visual token 022138|>
+<|visual token 022139|>
+<|visual token 022140|>
+<|visual token 022141|>
+<|visual token 022142|>
+<|visual token 022143|>
+<|visual token 022144|>
+<|visual token 022145|>
+<|visual token 022146|>
+<|visual token 022147|>
+<|visual token 022148|>
+<|visual token 022149|>
+<|visual token 022150|>
+<|visual token 022151|>
+<|visual token 022152|>
+<|visual token 022153|>
+<|visual token 022154|>
+<|visual token 022155|>
+<|visual token 022156|>
+<|visual token 022157|>
+<|visual token 022158|>
+<|visual token 022159|>
+<|visual token 022160|>
+<|visual token 022161|>
+<|visual token 022162|>
+<|visual token 022163|>
+<|visual token 022164|>
+<|visual token 022165|>
+<|visual token 022166|>
+<|visual token 022167|>
+<|visual token 022168|>
+<|visual token 022169|>
+<|visual token 022170|>
+<|visual token 022171|>
+<|visual token 022172|>
+<|visual token 022173|>
+<|visual token 022174|>
+<|visual token 022175|>
+<|visual token 022176|>
+<|visual token 022177|>
+<|visual token 022178|>
+<|visual token 022179|>
+<|visual token 022180|>
+<|visual token 022181|>
+<|visual token 022182|>
+<|visual token 022183|>
+<|visual token 022184|>
+<|visual token 022185|>
+<|visual token 022186|>
+<|visual token 022187|>
+<|visual token 022188|>
+<|visual token 022189|>
+<|visual token 022190|>
+<|visual token 022191|>
+<|visual token 022192|>
+<|visual token 022193|>
+<|visual token 022194|>
+<|visual token 022195|>
+<|visual token 022196|>
+<|visual token 022197|>
+<|visual token 022198|>
+<|visual token 022199|>
+<|visual token 022200|>
+<|visual token 022201|>
+<|visual token 022202|>
+<|visual token 022203|>
+<|visual token 022204|>
+<|visual token 022205|>
+<|visual token 022206|>
+<|visual token 022207|>
+<|visual token 022208|>
+<|visual token 022209|>
+<|visual token 022210|>
+<|visual token 022211|>
+<|visual token 022212|>
+<|visual token 022213|>
+<|visual token 022214|>
+<|visual token 022215|>
+<|visual token 022216|>
+<|visual token 022217|>
+<|visual token 022218|>
+<|visual token 022219|>
+<|visual token 022220|>
+<|visual token 022221|>
+<|visual token 022222|>
+<|visual token 022223|>
+<|visual token 022224|>
+<|visual token 022225|>
+<|visual token 022226|>
+<|visual token 022227|>
+<|visual token 022228|>
+<|visual token 022229|>
+<|visual token 022230|>
+<|visual token 022231|>
+<|visual token 022232|>
+<|visual token 022233|>
+<|visual token 022234|>
+<|visual token 022235|>
+<|visual token 022236|>
+<|visual token 022237|>
+<|visual token 022238|>
+<|visual token 022239|>
+<|visual token 022240|>
+<|visual token 022241|>
+<|visual token 022242|>
+<|visual token 022243|>
+<|visual token 022244|>
+<|visual token 022245|>
+<|visual token 022246|>
+<|visual token 022247|>
+<|visual token 022248|>
+<|visual token 022249|>
+<|visual token 022250|>
+<|visual token 022251|>
+<|visual token 022252|>
+<|visual token 022253|>
+<|visual token 022254|>
+<|visual token 022255|>
+<|visual token 022256|>
+<|visual token 022257|>
+<|visual token 022258|>
+<|visual token 022259|>
+<|visual token 022260|>
+<|visual token 022261|>
+<|visual token 022262|>
+<|visual token 022263|>
+<|visual token 022264|>
+<|visual token 022265|>
+<|visual token 022266|>
+<|visual token 022267|>
+<|visual token 022268|>
+<|visual token 022269|>
+<|visual token 022270|>
+<|visual token 022271|>
+<|visual token 022272|>
+<|visual token 022273|>
+<|visual token 022274|>
+<|visual token 022275|>
+<|visual token 022276|>
+<|visual token 022277|>
+<|visual token 022278|>
+<|visual token 022279|>
+<|visual token 022280|>
+<|visual token 022281|>
+<|visual token 022282|>
+<|visual token 022283|>
+<|visual token 022284|>
+<|visual token 022285|>
+<|visual token 022286|>
+<|visual token 022287|>
+<|visual token 022288|>
+<|visual token 022289|>
+<|visual token 022290|>
+<|visual token 022291|>
+<|visual token 022292|>
+<|visual token 022293|>
+<|visual token 022294|>
+<|visual token 022295|>
+<|visual token 022296|>
+<|visual token 022297|>
+<|visual token 022298|>
+<|visual token 022299|>
+<|visual token 022300|>
+<|visual token 022301|>
+<|visual token 022302|>
+<|visual token 022303|>
+<|visual token 022304|>
+<|visual token 022305|>
+<|visual token 022306|>
+<|visual token 022307|>
+<|visual token 022308|>
+<|visual token 022309|>
+<|visual token 022310|>
+<|visual token 022311|>
+<|visual token 022312|>
+<|visual token 022313|>
+<|visual token 022314|>
+<|visual token 022315|>
+<|visual token 022316|>
+<|visual token 022317|>
+<|visual token 022318|>
+<|visual token 022319|>
+<|visual token 022320|>
+<|visual token 022321|>
+<|visual token 022322|>
+<|visual token 022323|>
+<|visual token 022324|>
+<|visual token 022325|>
+<|visual token 022326|>
+<|visual token 022327|>
+<|visual token 022328|>
+<|visual token 022329|>
+<|visual token 022330|>
+<|visual token 022331|>
+<|visual token 022332|>
+<|visual token 022333|>
+<|visual token 022334|>
+<|visual token 022335|>
+<|visual token 022336|>
+<|visual token 022337|>
+<|visual token 022338|>
+<|visual token 022339|>
+<|visual token 022340|>
+<|visual token 022341|>
+<|visual token 022342|>
+<|visual token 022343|>
+<|visual token 022344|>
+<|visual token 022345|>
+<|visual token 022346|>
+<|visual token 022347|>
+<|visual token 022348|>
+<|visual token 022349|>
+<|visual token 022350|>
+<|visual token 022351|>
+<|visual token 022352|>
+<|visual token 022353|>
+<|visual token 022354|>
+<|visual token 022355|>
+<|visual token 022356|>
+<|visual token 022357|>
+<|visual token 022358|>
+<|visual token 022359|>
+<|visual token 022360|>
+<|visual token 022361|>
+<|visual token 022362|>
+<|visual token 022363|>
+<|visual token 022364|>
+<|visual token 022365|>
+<|visual token 022366|>
+<|visual token 022367|>
+<|visual token 022368|>
+<|visual token 022369|>
+<|visual token 022370|>
+<|visual token 022371|>
+<|visual token 022372|>
+<|visual token 022373|>
+<|visual token 022374|>
+<|visual token 022375|>
+<|visual token 022376|>
+<|visual token 022377|>
+<|visual token 022378|>
+<|visual token 022379|>
+<|visual token 022380|>
+<|visual token 022381|>
+<|visual token 022382|>
+<|visual token 022383|>
+<|visual token 022384|>
+<|visual token 022385|>
+<|visual token 022386|>
+<|visual token 022387|>
+<|visual token 022388|>
+<|visual token 022389|>
+<|visual token 022390|>
+<|visual token 022391|>
+<|visual token 022392|>
+<|visual token 022393|>
+<|visual token 022394|>
+<|visual token 022395|>
+<|visual token 022396|>
+<|visual token 022397|>
+<|visual token 022398|>
+<|visual token 022399|>
+<|visual token 022400|>
+<|visual token 022401|>
+<|visual token 022402|>
+<|visual token 022403|>
+<|visual token 022404|>
+<|visual token 022405|>
+<|visual token 022406|>
+<|visual token 022407|>
+<|visual token 022408|>
+<|visual token 022409|>
+<|visual token 022410|>
+<|visual token 022411|>
+<|visual token 022412|>
+<|visual token 022413|>
+<|visual token 022414|>
+<|visual token 022415|>
+<|visual token 022416|>
+<|visual token 022417|>
+<|visual token 022418|>
+<|visual token 022419|>
+<|visual token 022420|>
+<|visual token 022421|>
+<|visual token 022422|>
+<|visual token 022423|>
+<|visual token 022424|>
+<|visual token 022425|>
+<|visual token 022426|>
+<|visual token 022427|>
+<|visual token 022428|>
+<|visual token 022429|>
+<|visual token 022430|>
+<|visual token 022431|>
+<|visual token 022432|>
+<|visual token 022433|>
+<|visual token 022434|>
+<|visual token 022435|>
+<|visual token 022436|>
+<|visual token 022437|>
+<|visual token 022438|>
+<|visual token 022439|>
+<|visual token 022440|>
+<|visual token 022441|>
+<|visual token 022442|>
+<|visual token 022443|>
+<|visual token 022444|>
+<|visual token 022445|>
+<|visual token 022446|>
+<|visual token 022447|>
+<|visual token 022448|>
+<|visual token 022449|>
+<|visual token 022450|>
+<|visual token 022451|>
+<|visual token 022452|>
+<|visual token 022453|>
+<|visual token 022454|>
+<|visual token 022455|>
+<|visual token 022456|>
+<|visual token 022457|>
+<|visual token 022458|>
+<|visual token 022459|>
+<|visual token 022460|>
+<|visual token 022461|>
+<|visual token 022462|>
+<|visual token 022463|>
+<|visual token 022464|>
+<|visual token 022465|>
+<|visual token 022466|>
+<|visual token 022467|>
+<|visual token 022468|>
+<|visual token 022469|>
+<|visual token 022470|>
+<|visual token 022471|>
+<|visual token 022472|>
+<|visual token 022473|>
+<|visual token 022474|>
+<|visual token 022475|>
+<|visual token 022476|>
+<|visual token 022477|>
+<|visual token 022478|>
+<|visual token 022479|>
+<|visual token 022480|>
+<|visual token 022481|>
+<|visual token 022482|>
+<|visual token 022483|>
+<|visual token 022484|>
+<|visual token 022485|>
+<|visual token 022486|>
+<|visual token 022487|>
+<|visual token 022488|>
+<|visual token 022489|>
+<|visual token 022490|>
+<|visual token 022491|>
+<|visual token 022492|>
+<|visual token 022493|>
+<|visual token 022494|>
+<|visual token 022495|>
+<|visual token 022496|>
+<|visual token 022497|>
+<|visual token 022498|>
+<|visual token 022499|>
+<|visual token 022500|>
+<|visual token 022501|>
+<|visual token 022502|>
+<|visual token 022503|>
+<|visual token 022504|>
+<|visual token 022505|>
+<|visual token 022506|>
+<|visual token 022507|>
+<|visual token 022508|>
+<|visual token 022509|>
+<|visual token 022510|>
+<|visual token 022511|>
+<|visual token 022512|>
+<|visual token 022513|>
+<|visual token 022514|>
+<|visual token 022515|>
+<|visual token 022516|>
+<|visual token 022517|>
+<|visual token 022518|>
+<|visual token 022519|>
+<|visual token 022520|>
+<|visual token 022521|>
+<|visual token 022522|>
+<|visual token 022523|>
+<|visual token 022524|>
+<|visual token 022525|>
+<|visual token 022526|>
+<|visual token 022527|>
+<|visual token 022528|>
+<|visual token 022529|>
+<|visual token 022530|>
+<|visual token 022531|>
+<|visual token 022532|>
+<|visual token 022533|>
+<|visual token 022534|>
+<|visual token 022535|>
+<|visual token 022536|>
+<|visual token 022537|>
+<|visual token 022538|>
+<|visual token 022539|>
+<|visual token 022540|>
+<|visual token 022541|>
+<|visual token 022542|>
+<|visual token 022543|>
+<|visual token 022544|>
+<|visual token 022545|>
+<|visual token 022546|>
+<|visual token 022547|>
+<|visual token 022548|>
+<|visual token 022549|>
+<|visual token 022550|>
+<|visual token 022551|>
+<|visual token 022552|>
+<|visual token 022553|>
+<|visual token 022554|>
+<|visual token 022555|>
+<|visual token 022556|>
+<|visual token 022557|>
+<|visual token 022558|>
+<|visual token 022559|>
+<|visual token 022560|>
+<|visual token 022561|>
+<|visual token 022562|>
+<|visual token 022563|>
+<|visual token 022564|>
+<|visual token 022565|>
+<|visual token 022566|>
+<|visual token 022567|>
+<|visual token 022568|>
+<|visual token 022569|>
+<|visual token 022570|>
+<|visual token 022571|>
+<|visual token 022572|>
+<|visual token 022573|>
+<|visual token 022574|>
+<|visual token 022575|>
+<|visual token 022576|>
+<|visual token 022577|>
+<|visual token 022578|>
+<|visual token 022579|>
+<|visual token 022580|>
+<|visual token 022581|>
+<|visual token 022582|>
+<|visual token 022583|>
+<|visual token 022584|>
+<|visual token 022585|>
+<|visual token 022586|>
+<|visual token 022587|>
+<|visual token 022588|>
+<|visual token 022589|>
+<|visual token 022590|>
+<|visual token 022591|>
+<|visual token 022592|>
+<|visual token 022593|>
+<|visual token 022594|>
+<|visual token 022595|>
+<|visual token 022596|>
+<|visual token 022597|>
+<|visual token 022598|>
+<|visual token 022599|>
+<|visual token 022600|>
+<|visual token 022601|>
+<|visual token 022602|>
+<|visual token 022603|>
+<|visual token 022604|>
+<|visual token 022605|>
+<|visual token 022606|>
+<|visual token 022607|>
+<|visual token 022608|>
+<|visual token 022609|>
+<|visual token 022610|>
+<|visual token 022611|>
+<|visual token 022612|>
+<|visual token 022613|>
+<|visual token 022614|>
+<|visual token 022615|>
+<|visual token 022616|>
+<|visual token 022617|>
+<|visual token 022618|>
+<|visual token 022619|>
+<|visual token 022620|>
+<|visual token 022621|>
+<|visual token 022622|>
+<|visual token 022623|>
+<|visual token 022624|>
+<|visual token 022625|>
+<|visual token 022626|>
+<|visual token 022627|>
+<|visual token 022628|>
+<|visual token 022629|>
+<|visual token 022630|>
+<|visual token 022631|>
+<|visual token 022632|>
+<|visual token 022633|>
+<|visual token 022634|>
+<|visual token 022635|>
+<|visual token 022636|>
+<|visual token 022637|>
+<|visual token 022638|>
+<|visual token 022639|>
+<|visual token 022640|>
+<|visual token 022641|>
+<|visual token 022642|>
+<|visual token 022643|>
+<|visual token 022644|>
+<|visual token 022645|>
+<|visual token 022646|>
+<|visual token 022647|>
+<|visual token 022648|>
+<|visual token 022649|>
+<|visual token 022650|>
+<|visual token 022651|>
+<|visual token 022652|>
+<|visual token 022653|>
+<|visual token 022654|>
+<|visual token 022655|>
+<|visual token 022656|>
+<|visual token 022657|>
+<|visual token 022658|>
+<|visual token 022659|>
+<|visual token 022660|>
+<|visual token 022661|>
+<|visual token 022662|>
+<|visual token 022663|>
+<|visual token 022664|>
+<|visual token 022665|>
+<|visual token 022666|>
+<|visual token 022667|>
+<|visual token 022668|>
+<|visual token 022669|>
+<|visual token 022670|>
+<|visual token 022671|>
+<|visual token 022672|>
+<|visual token 022673|>
+<|visual token 022674|>
+<|visual token 022675|>
+<|visual token 022676|>
+<|visual token 022677|>
+<|visual token 022678|>
+<|visual token 022679|>
+<|visual token 022680|>
+<|visual token 022681|>
+<|visual token 022682|>
+<|visual token 022683|>
+<|visual token 022684|>
+<|visual token 022685|>
+<|visual token 022686|>
+<|visual token 022687|>
+<|visual token 022688|>
+<|visual token 022689|>
+<|visual token 022690|>
+<|visual token 022691|>
+<|visual token 022692|>
+<|visual token 022693|>
+<|visual token 022694|>
+<|visual token 022695|>
+<|visual token 022696|>
+<|visual token 022697|>
+<|visual token 022698|>
+<|visual token 022699|>
+<|visual token 022700|>
+<|visual token 022701|>
+<|visual token 022702|>
+<|visual token 022703|>
+<|visual token 022704|>
+<|visual token 022705|>
+<|visual token 022706|>
+<|visual token 022707|>
+<|visual token 022708|>
+<|visual token 022709|>
+<|visual token 022710|>
+<|visual token 022711|>
+<|visual token 022712|>
+<|visual token 022713|>
+<|visual token 022714|>
+<|visual token 022715|>
+<|visual token 022716|>
+<|visual token 022717|>
+<|visual token 022718|>
+<|visual token 022719|>
+<|visual token 022720|>
+<|visual token 022721|>
+<|visual token 022722|>
+<|visual token 022723|>
+<|visual token 022724|>
+<|visual token 022725|>
+<|visual token 022726|>
+<|visual token 022727|>
+<|visual token 022728|>
+<|visual token 022729|>
+<|visual token 022730|>
+<|visual token 022731|>
+<|visual token 022732|>
+<|visual token 022733|>
+<|visual token 022734|>
+<|visual token 022735|>
+<|visual token 022736|>
+<|visual token 022737|>
+<|visual token 022738|>
+<|visual token 022739|>
+<|visual token 022740|>
+<|visual token 022741|>
+<|visual token 022742|>
+<|visual token 022743|>
+<|visual token 022744|>
+<|visual token 022745|>
+<|visual token 022746|>
+<|visual token 022747|>
+<|visual token 022748|>
+<|visual token 022749|>
+<|visual token 022750|>
+<|visual token 022751|>
+<|visual token 022752|>
+<|visual token 022753|>
+<|visual token 022754|>
+<|visual token 022755|>
+<|visual token 022756|>
+<|visual token 022757|>
+<|visual token 022758|>
+<|visual token 022759|>
+<|visual token 022760|>
+<|visual token 022761|>
+<|visual token 022762|>
+<|visual token 022763|>
+<|visual token 022764|>
+<|visual token 022765|>
+<|visual token 022766|>
+<|visual token 022767|>
+<|visual token 022768|>
+<|visual token 022769|>
+<|visual token 022770|>
+<|visual token 022771|>
+<|visual token 022772|>
+<|visual token 022773|>
+<|visual token 022774|>
+<|visual token 022775|>
+<|visual token 022776|>
+<|visual token 022777|>
+<|visual token 022778|>
+<|visual token 022779|>
+<|visual token 022780|>
+<|visual token 022781|>
+<|visual token 022782|>
+<|visual token 022783|>
+<|visual token 022784|>
+<|visual token 022785|>
+<|visual token 022786|>
+<|visual token 022787|>
+<|visual token 022788|>
+<|visual token 022789|>
+<|visual token 022790|>
+<|visual token 022791|>
+<|visual token 022792|>
+<|visual token 022793|>
+<|visual token 022794|>
+<|visual token 022795|>
+<|visual token 022796|>
+<|visual token 022797|>
+<|visual token 022798|>
+<|visual token 022799|>
+<|visual token 022800|>
+<|visual token 022801|>
+<|visual token 022802|>
+<|visual token 022803|>
+<|visual token 022804|>
+<|visual token 022805|>
+<|visual token 022806|>
+<|visual token 022807|>
+<|visual token 022808|>
+<|visual token 022809|>
+<|visual token 022810|>
+<|visual token 022811|>
+<|visual token 022812|>
+<|visual token 022813|>
+<|visual token 022814|>
+<|visual token 022815|>
+<|visual token 022816|>
+<|visual token 022817|>
+<|visual token 022818|>
+<|visual token 022819|>
+<|visual token 022820|>
+<|visual token 022821|>
+<|visual token 022822|>
+<|visual token 022823|>
+<|visual token 022824|>
+<|visual token 022825|>
+<|visual token 022826|>
+<|visual token 022827|>
+<|visual token 022828|>
+<|visual token 022829|>
+<|visual token 022830|>
+<|visual token 022831|>
+<|visual token 022832|>
+<|visual token 022833|>
+<|visual token 022834|>
+<|visual token 022835|>
+<|visual token 022836|>
+<|visual token 022837|>
+<|visual token 022838|>
+<|visual token 022839|>
+<|visual token 022840|>
+<|visual token 022841|>
+<|visual token 022842|>
+<|visual token 022843|>
+<|visual token 022844|>
+<|visual token 022845|>
+<|visual token 022846|>
+<|visual token 022847|>
+<|visual token 022848|>
+<|visual token 022849|>
+<|visual token 022850|>
+<|visual token 022851|>
+<|visual token 022852|>
+<|visual token 022853|>
+<|visual token 022854|>
+<|visual token 022855|>
+<|visual token 022856|>
+<|visual token 022857|>
+<|visual token 022858|>
+<|visual token 022859|>
+<|visual token 022860|>
+<|visual token 022861|>
+<|visual token 022862|>
+<|visual token 022863|>
+<|visual token 022864|>
+<|visual token 022865|>
+<|visual token 022866|>
+<|visual token 022867|>
+<|visual token 022868|>
+<|visual token 022869|>
+<|visual token 022870|>
+<|visual token 022871|>
+<|visual token 022872|>
+<|visual token 022873|>
+<|visual token 022874|>
+<|visual token 022875|>
+<|visual token 022876|>
+<|visual token 022877|>
+<|visual token 022878|>
+<|visual token 022879|>
+<|visual token 022880|>
+<|visual token 022881|>
+<|visual token 022882|>
+<|visual token 022883|>
+<|visual token 022884|>
+<|visual token 022885|>
+<|visual token 022886|>
+<|visual token 022887|>
+<|visual token 022888|>
+<|visual token 022889|>
+<|visual token 022890|>
+<|visual token 022891|>
+<|visual token 022892|>
+<|visual token 022893|>
+<|visual token 022894|>
+<|visual token 022895|>
+<|visual token 022896|>
+<|visual token 022897|>
+<|visual token 022898|>
+<|visual token 022899|>
+<|visual token 022900|>
+<|visual token 022901|>
+<|visual token 022902|>
+<|visual token 022903|>
+<|visual token 022904|>
+<|visual token 022905|>
+<|visual token 022906|>
+<|visual token 022907|>
+<|visual token 022908|>
+<|visual token 022909|>
+<|visual token 022910|>
+<|visual token 022911|>
+<|visual token 022912|>
+<|visual token 022913|>
+<|visual token 022914|>
+<|visual token 022915|>
+<|visual token 022916|>
+<|visual token 022917|>
+<|visual token 022918|>
+<|visual token 022919|>
+<|visual token 022920|>
+<|visual token 022921|>
+<|visual token 022922|>
+<|visual token 022923|>
+<|visual token 022924|>
+<|visual token 022925|>
+<|visual token 022926|>
+<|visual token 022927|>
+<|visual token 022928|>
+<|visual token 022929|>
+<|visual token 022930|>
+<|visual token 022931|>
+<|visual token 022932|>
+<|visual token 022933|>
+<|visual token 022934|>
+<|visual token 022935|>
+<|visual token 022936|>
+<|visual token 022937|>
+<|visual token 022938|>
+<|visual token 022939|>
+<|visual token 022940|>
+<|visual token 022941|>
+<|visual token 022942|>
+<|visual token 022943|>
+<|visual token 022944|>
+<|visual token 022945|>
+<|visual token 022946|>
+<|visual token 022947|>
+<|visual token 022948|>
+<|visual token 022949|>
+<|visual token 022950|>
+<|visual token 022951|>
+<|visual token 022952|>
+<|visual token 022953|>
+<|visual token 022954|>
+<|visual token 022955|>
+<|visual token 022956|>
+<|visual token 022957|>
+<|visual token 022958|>
+<|visual token 022959|>
+<|visual token 022960|>
+<|visual token 022961|>
+<|visual token 022962|>
+<|visual token 022963|>
+<|visual token 022964|>
+<|visual token 022965|>
+<|visual token 022966|>
+<|visual token 022967|>
+<|visual token 022968|>
+<|visual token 022969|>
+<|visual token 022970|>
+<|visual token 022971|>
+<|visual token 022972|>
+<|visual token 022973|>
+<|visual token 022974|>
+<|visual token 022975|>
+<|visual token 022976|>
+<|visual token 022977|>
+<|visual token 022978|>
+<|visual token 022979|>
+<|visual token 022980|>
+<|visual token 022981|>
+<|visual token 022982|>
+<|visual token 022983|>
+<|visual token 022984|>
+<|visual token 022985|>
+<|visual token 022986|>
+<|visual token 022987|>
+<|visual token 022988|>
+<|visual token 022989|>
+<|visual token 022990|>
+<|visual token 022991|>
+<|visual token 022992|>
+<|visual token 022993|>
+<|visual token 022994|>
+<|visual token 022995|>
+<|visual token 022996|>
+<|visual token 022997|>
+<|visual token 022998|>
+<|visual token 022999|>
+<|visual token 023000|>
+<|visual token 023001|>
+<|visual token 023002|>
+<|visual token 023003|>
+<|visual token 023004|>
+<|visual token 023005|>
+<|visual token 023006|>
+<|visual token 023007|>
+<|visual token 023008|>
+<|visual token 023009|>
+<|visual token 023010|>
+<|visual token 023011|>
+<|visual token 023012|>
+<|visual token 023013|>
+<|visual token 023014|>
+<|visual token 023015|>
+<|visual token 023016|>
+<|visual token 023017|>
+<|visual token 023018|>
+<|visual token 023019|>
+<|visual token 023020|>
+<|visual token 023021|>
+<|visual token 023022|>
+<|visual token 023023|>
+<|visual token 023024|>
+<|visual token 023025|>
+<|visual token 023026|>
+<|visual token 023027|>
+<|visual token 023028|>
+<|visual token 023029|>
+<|visual token 023030|>
+<|visual token 023031|>
+<|visual token 023032|>
+<|visual token 023033|>
+<|visual token 023034|>
+<|visual token 023035|>
+<|visual token 023036|>
+<|visual token 023037|>
+<|visual token 023038|>
+<|visual token 023039|>
+<|visual token 023040|>
+<|visual token 023041|>
+<|visual token 023042|>
+<|visual token 023043|>
+<|visual token 023044|>
+<|visual token 023045|>
+<|visual token 023046|>
+<|visual token 023047|>
+<|visual token 023048|>
+<|visual token 023049|>
+<|visual token 023050|>
+<|visual token 023051|>
+<|visual token 023052|>
+<|visual token 023053|>
+<|visual token 023054|>
+<|visual token 023055|>
+<|visual token 023056|>
+<|visual token 023057|>
+<|visual token 023058|>
+<|visual token 023059|>
+<|visual token 023060|>
+<|visual token 023061|>
+<|visual token 023062|>
+<|visual token 023063|>
+<|visual token 023064|>
+<|visual token 023065|>
+<|visual token 023066|>
+<|visual token 023067|>
+<|visual token 023068|>
+<|visual token 023069|>
+<|visual token 023070|>
+<|visual token 023071|>
+<|visual token 023072|>
+<|visual token 023073|>
+<|visual token 023074|>
+<|visual token 023075|>
+<|visual token 023076|>
+<|visual token 023077|>
+<|visual token 023078|>
+<|visual token 023079|>
+<|visual token 023080|>
+<|visual token 023081|>
+<|visual token 023082|>
+<|visual token 023083|>
+<|visual token 023084|>
+<|visual token 023085|>
+<|visual token 023086|>
+<|visual token 023087|>
+<|visual token 023088|>
+<|visual token 023089|>
+<|visual token 023090|>
+<|visual token 023091|>
+<|visual token 023092|>
+<|visual token 023093|>
+<|visual token 023094|>
+<|visual token 023095|>
+<|visual token 023096|>
+<|visual token 023097|>
+<|visual token 023098|>
+<|visual token 023099|>
+<|visual token 023100|>
+<|visual token 023101|>
+<|visual token 023102|>
+<|visual token 023103|>
+<|visual token 023104|>
+<|visual token 023105|>
+<|visual token 023106|>
+<|visual token 023107|>
+<|visual token 023108|>
+<|visual token 023109|>
+<|visual token 023110|>
+<|visual token 023111|>
+<|visual token 023112|>
+<|visual token 023113|>
+<|visual token 023114|>
+<|visual token 023115|>
+<|visual token 023116|>
+<|visual token 023117|>
+<|visual token 023118|>
+<|visual token 023119|>
+<|visual token 023120|>
+<|visual token 023121|>
+<|visual token 023122|>
+<|visual token 023123|>
+<|visual token 023124|>
+<|visual token 023125|>
+<|visual token 023126|>
+<|visual token 023127|>
+<|visual token 023128|>
+<|visual token 023129|>
+<|visual token 023130|>
+<|visual token 023131|>
+<|visual token 023132|>
+<|visual token 023133|>
+<|visual token 023134|>
+<|visual token 023135|>
+<|visual token 023136|>
+<|visual token 023137|>
+<|visual token 023138|>
+<|visual token 023139|>
+<|visual token 023140|>
+<|visual token 023141|>
+<|visual token 023142|>
+<|visual token 023143|>
+<|visual token 023144|>
+<|visual token 023145|>
+<|visual token 023146|>
+<|visual token 023147|>
+<|visual token 023148|>
+<|visual token 023149|>
+<|visual token 023150|>
+<|visual token 023151|>
+<|visual token 023152|>
+<|visual token 023153|>
+<|visual token 023154|>
+<|visual token 023155|>
+<|visual token 023156|>
+<|visual token 023157|>
+<|visual token 023158|>
+<|visual token 023159|>
+<|visual token 023160|>
+<|visual token 023161|>
+<|visual token 023162|>
+<|visual token 023163|>
+<|visual token 023164|>
+<|visual token 023165|>
+<|visual token 023166|>
+<|visual token 023167|>
+<|visual token 023168|>
+<|visual token 023169|>
+<|visual token 023170|>
+<|visual token 023171|>
+<|visual token 023172|>
+<|visual token 023173|>
+<|visual token 023174|>
+<|visual token 023175|>
+<|visual token 023176|>
+<|visual token 023177|>
+<|visual token 023178|>
+<|visual token 023179|>
+<|visual token 023180|>
+<|visual token 023181|>
+<|visual token 023182|>
+<|visual token 023183|>
+<|visual token 023184|>
+<|visual token 023185|>
+<|visual token 023186|>
+<|visual token 023187|>
+<|visual token 023188|>
+<|visual token 023189|>
+<|visual token 023190|>
+<|visual token 023191|>
+<|visual token 023192|>
+<|visual token 023193|>
+<|visual token 023194|>
+<|visual token 023195|>
+<|visual token 023196|>
+<|visual token 023197|>
+<|visual token 023198|>
+<|visual token 023199|>
+<|visual token 023200|>
+<|visual token 023201|>
+<|visual token 023202|>
+<|visual token 023203|>
+<|visual token 023204|>
+<|visual token 023205|>
+<|visual token 023206|>
+<|visual token 023207|>
+<|visual token 023208|>
+<|visual token 023209|>
+<|visual token 023210|>
+<|visual token 023211|>
+<|visual token 023212|>
+<|visual token 023213|>
+<|visual token 023214|>
+<|visual token 023215|>
+<|visual token 023216|>
+<|visual token 023217|>
+<|visual token 023218|>
+<|visual token 023219|>
+<|visual token 023220|>
+<|visual token 023221|>
+<|visual token 023222|>
+<|visual token 023223|>
+<|visual token 023224|>
+<|visual token 023225|>
+<|visual token 023226|>
+<|visual token 023227|>
+<|visual token 023228|>
+<|visual token 023229|>
+<|visual token 023230|>
+<|visual token 023231|>
+<|visual token 023232|>
+<|visual token 023233|>
+<|visual token 023234|>
+<|visual token 023235|>
+<|visual token 023236|>
+<|visual token 023237|>
+<|visual token 023238|>
+<|visual token 023239|>
+<|visual token 023240|>
+<|visual token 023241|>
+<|visual token 023242|>
+<|visual token 023243|>
+<|visual token 023244|>
+<|visual token 023245|>
+<|visual token 023246|>
+<|visual token 023247|>
+<|visual token 023248|>
+<|visual token 023249|>
+<|visual token 023250|>
+<|visual token 023251|>
+<|visual token 023252|>
+<|visual token 023253|>
+<|visual token 023254|>
+<|visual token 023255|>
+<|visual token 023256|>
+<|visual token 023257|>
+<|visual token 023258|>
+<|visual token 023259|>
+<|visual token 023260|>
+<|visual token 023261|>
+<|visual token 023262|>
+<|visual token 023263|>
+<|visual token 023264|>
+<|visual token 023265|>
+<|visual token 023266|>
+<|visual token 023267|>
+<|visual token 023268|>
+<|visual token 023269|>
+<|visual token 023270|>
+<|visual token 023271|>
+<|visual token 023272|>
+<|visual token 023273|>
+<|visual token 023274|>
+<|visual token 023275|>
+<|visual token 023276|>
+<|visual token 023277|>
+<|visual token 023278|>
+<|visual token 023279|>
+<|visual token 023280|>
+<|visual token 023281|>
+<|visual token 023282|>
+<|visual token 023283|>
+<|visual token 023284|>
+<|visual token 023285|>
+<|visual token 023286|>
+<|visual token 023287|>
+<|visual token 023288|>
+<|visual token 023289|>
+<|visual token 023290|>
+<|visual token 023291|>
+<|visual token 023292|>
+<|visual token 023293|>
+<|visual token 023294|>
+<|visual token 023295|>
+<|visual token 023296|>
+<|visual token 023297|>
+<|visual token 023298|>
+<|visual token 023299|>
+<|visual token 023300|>
+<|visual token 023301|>
+<|visual token 023302|>
+<|visual token 023303|>
+<|visual token 023304|>
+<|visual token 023305|>
+<|visual token 023306|>
+<|visual token 023307|>
+<|visual token 023308|>
+<|visual token 023309|>
+<|visual token 023310|>
+<|visual token 023311|>
+<|visual token 023312|>
+<|visual token 023313|>
+<|visual token 023314|>
+<|visual token 023315|>
+<|visual token 023316|>
+<|visual token 023317|>
+<|visual token 023318|>
+<|visual token 023319|>
+<|visual token 023320|>
+<|visual token 023321|>
+<|visual token 023322|>
+<|visual token 023323|>
+<|visual token 023324|>
+<|visual token 023325|>
+<|visual token 023326|>
+<|visual token 023327|>
+<|visual token 023328|>
+<|visual token 023329|>
+<|visual token 023330|>
+<|visual token 023331|>
+<|visual token 023332|>
+<|visual token 023333|>
+<|visual token 023334|>
+<|visual token 023335|>
+<|visual token 023336|>
+<|visual token 023337|>
+<|visual token 023338|>
+<|visual token 023339|>
+<|visual token 023340|>
+<|visual token 023341|>
+<|visual token 023342|>
+<|visual token 023343|>
+<|visual token 023344|>
+<|visual token 023345|>
+<|visual token 023346|>
+<|visual token 023347|>
+<|visual token 023348|>
+<|visual token 023349|>
+<|visual token 023350|>
+<|visual token 023351|>
+<|visual token 023352|>
+<|visual token 023353|>
+<|visual token 023354|>
+<|visual token 023355|>
+<|visual token 023356|>
+<|visual token 023357|>
+<|visual token 023358|>
+<|visual token 023359|>
+<|visual token 023360|>
+<|visual token 023361|>
+<|visual token 023362|>
+<|visual token 023363|>
+<|visual token 023364|>
+<|visual token 023365|>
+<|visual token 023366|>
+<|visual token 023367|>
+<|visual token 023368|>
+<|visual token 023369|>
+<|visual token 023370|>
+<|visual token 023371|>
+<|visual token 023372|>
+<|visual token 023373|>
+<|visual token 023374|>
+<|visual token 023375|>
+<|visual token 023376|>
+<|visual token 023377|>
+<|visual token 023378|>
+<|visual token 023379|>
+<|visual token 023380|>
+<|visual token 023381|>
+<|visual token 023382|>
+<|visual token 023383|>
+<|visual token 023384|>
+<|visual token 023385|>
+<|visual token 023386|>
+<|visual token 023387|>
+<|visual token 023388|>
+<|visual token 023389|>
+<|visual token 023390|>
+<|visual token 023391|>
+<|visual token 023392|>
+<|visual token 023393|>
+<|visual token 023394|>
+<|visual token 023395|>
+<|visual token 023396|>
+<|visual token 023397|>
+<|visual token 023398|>
+<|visual token 023399|>
+<|visual token 023400|>
+<|visual token 023401|>
+<|visual token 023402|>
+<|visual token 023403|>
+<|visual token 023404|>
+<|visual token 023405|>
+<|visual token 023406|>
+<|visual token 023407|>
+<|visual token 023408|>
+<|visual token 023409|>
+<|visual token 023410|>
+<|visual token 023411|>
+<|visual token 023412|>
+<|visual token 023413|>
+<|visual token 023414|>
+<|visual token 023415|>
+<|visual token 023416|>
+<|visual token 023417|>
+<|visual token 023418|>
+<|visual token 023419|>
+<|visual token 023420|>
+<|visual token 023421|>
+<|visual token 023422|>
+<|visual token 023423|>
+<|visual token 023424|>
+<|visual token 023425|>
+<|visual token 023426|>
+<|visual token 023427|>
+<|visual token 023428|>
+<|visual token 023429|>
+<|visual token 023430|>
+<|visual token 023431|>
+<|visual token 023432|>
+<|visual token 023433|>
+<|visual token 023434|>
+<|visual token 023435|>
+<|visual token 023436|>
+<|visual token 023437|>
+<|visual token 023438|>
+<|visual token 023439|>
+<|visual token 023440|>
+<|visual token 023441|>
+<|visual token 023442|>
+<|visual token 023443|>
+<|visual token 023444|>
+<|visual token 023445|>
+<|visual token 023446|>
+<|visual token 023447|>
+<|visual token 023448|>
+<|visual token 023449|>
+<|visual token 023450|>
+<|visual token 023451|>
+<|visual token 023452|>
+<|visual token 023453|>
+<|visual token 023454|>
+<|visual token 023455|>
+<|visual token 023456|>
+<|visual token 023457|>
+<|visual token 023458|>
+<|visual token 023459|>
+<|visual token 023460|>
+<|visual token 023461|>
+<|visual token 023462|>
+<|visual token 023463|>
+<|visual token 023464|>
+<|visual token 023465|>
+<|visual token 023466|>
+<|visual token 023467|>
+<|visual token 023468|>
+<|visual token 023469|>
+<|visual token 023470|>
+<|visual token 023471|>
+<|visual token 023472|>
+<|visual token 023473|>
+<|visual token 023474|>
+<|visual token 023475|>
+<|visual token 023476|>
+<|visual token 023477|>
+<|visual token 023478|>
+<|visual token 023479|>
+<|visual token 023480|>
+<|visual token 023481|>
+<|visual token 023482|>
+<|visual token 023483|>
+<|visual token 023484|>
+<|visual token 023485|>
+<|visual token 023486|>
+<|visual token 023487|>
+<|visual token 023488|>
+<|visual token 023489|>
+<|visual token 023490|>
+<|visual token 023491|>
+<|visual token 023492|>
+<|visual token 023493|>
+<|visual token 023494|>
+<|visual token 023495|>
+<|visual token 023496|>
+<|visual token 023497|>
+<|visual token 023498|>
+<|visual token 023499|>
+<|visual token 023500|>
+<|visual token 023501|>
+<|visual token 023502|>
+<|visual token 023503|>
+<|visual token 023504|>
+<|visual token 023505|>
+<|visual token 023506|>
+<|visual token 023507|>
+<|visual token 023508|>
+<|visual token 023509|>
+<|visual token 023510|>
+<|visual token 023511|>
+<|visual token 023512|>
+<|visual token 023513|>
+<|visual token 023514|>
+<|visual token 023515|>
+<|visual token 023516|>
+<|visual token 023517|>
+<|visual token 023518|>
+<|visual token 023519|>
+<|visual token 023520|>
+<|visual token 023521|>
+<|visual token 023522|>
+<|visual token 023523|>
+<|visual token 023524|>
+<|visual token 023525|>
+<|visual token 023526|>
+<|visual token 023527|>
+<|visual token 023528|>
+<|visual token 023529|>
+<|visual token 023530|>
+<|visual token 023531|>
+<|visual token 023532|>
+<|visual token 023533|>
+<|visual token 023534|>
+<|visual token 023535|>
+<|visual token 023536|>
+<|visual token 023537|>
+<|visual token 023538|>
+<|visual token 023539|>
+<|visual token 023540|>
+<|visual token 023541|>
+<|visual token 023542|>
+<|visual token 023543|>
+<|visual token 023544|>
+<|visual token 023545|>
+<|visual token 023546|>
+<|visual token 023547|>
+<|visual token 023548|>
+<|visual token 023549|>
+<|visual token 023550|>
+<|visual token 023551|>
+<|visual token 023552|>
+<|visual token 023553|>
+<|visual token 023554|>
+<|visual token 023555|>
+<|visual token 023556|>
+<|visual token 023557|>
+<|visual token 023558|>
+<|visual token 023559|>
+<|visual token 023560|>
+<|visual token 023561|>
+<|visual token 023562|>
+<|visual token 023563|>
+<|visual token 023564|>
+<|visual token 023565|>
+<|visual token 023566|>
+<|visual token 023567|>
+<|visual token 023568|>
+<|visual token 023569|>
+<|visual token 023570|>
+<|visual token 023571|>
+<|visual token 023572|>
+<|visual token 023573|>
+<|visual token 023574|>
+<|visual token 023575|>
+<|visual token 023576|>
+<|visual token 023577|>
+<|visual token 023578|>
+<|visual token 023579|>
+<|visual token 023580|>
+<|visual token 023581|>
+<|visual token 023582|>
+<|visual token 023583|>
+<|visual token 023584|>
+<|visual token 023585|>
+<|visual token 023586|>
+<|visual token 023587|>
+<|visual token 023588|>
+<|visual token 023589|>
+<|visual token 023590|>
+<|visual token 023591|>
+<|visual token 023592|>
+<|visual token 023593|>
+<|visual token 023594|>
+<|visual token 023595|>
+<|visual token 023596|>
+<|visual token 023597|>
+<|visual token 023598|>
+<|visual token 023599|>
+<|visual token 023600|>
+<|visual token 023601|>
+<|visual token 023602|>
+<|visual token 023603|>
+<|visual token 023604|>
+<|visual token 023605|>
+<|visual token 023606|>
+<|visual token 023607|>
+<|visual token 023608|>
+<|visual token 023609|>
+<|visual token 023610|>
+<|visual token 023611|>
+<|visual token 023612|>
+<|visual token 023613|>
+<|visual token 023614|>
+<|visual token 023615|>
+<|visual token 023616|>
+<|visual token 023617|>
+<|visual token 023618|>
+<|visual token 023619|>
+<|visual token 023620|>
+<|visual token 023621|>
+<|visual token 023622|>
+<|visual token 023623|>
+<|visual token 023624|>
+<|visual token 023625|>
+<|visual token 023626|>
+<|visual token 023627|>
+<|visual token 023628|>
+<|visual token 023629|>
+<|visual token 023630|>
+<|visual token 023631|>
+<|visual token 023632|>
+<|visual token 023633|>
+<|visual token 023634|>
+<|visual token 023635|>
+<|visual token 023636|>
+<|visual token 023637|>
+<|visual token 023638|>
+<|visual token 023639|>
+<|visual token 023640|>
+<|visual token 023641|>
+<|visual token 023642|>
+<|visual token 023643|>
+<|visual token 023644|>
+<|visual token 023645|>
+<|visual token 023646|>
+<|visual token 023647|>
+<|visual token 023648|>
+<|visual token 023649|>
+<|visual token 023650|>
+<|visual token 023651|>
+<|visual token 023652|>
+<|visual token 023653|>
+<|visual token 023654|>
+<|visual token 023655|>
+<|visual token 023656|>
+<|visual token 023657|>
+<|visual token 023658|>
+<|visual token 023659|>
+<|visual token 023660|>
+<|visual token 023661|>
+<|visual token 023662|>
+<|visual token 023663|>
+<|visual token 023664|>
+<|visual token 023665|>
+<|visual token 023666|>
+<|visual token 023667|>
+<|visual token 023668|>
+<|visual token 023669|>
+<|visual token 023670|>
+<|visual token 023671|>
+<|visual token 023672|>
+<|visual token 023673|>
+<|visual token 023674|>
+<|visual token 023675|>
+<|visual token 023676|>
+<|visual token 023677|>
+<|visual token 023678|>
+<|visual token 023679|>
+<|visual token 023680|>
+<|visual token 023681|>
+<|visual token 023682|>
+<|visual token 023683|>
+<|visual token 023684|>
+<|visual token 023685|>
+<|visual token 023686|>
+<|visual token 023687|>
+<|visual token 023688|>
+<|visual token 023689|>
+<|visual token 023690|>
+<|visual token 023691|>
+<|visual token 023692|>
+<|visual token 023693|>
+<|visual token 023694|>
+<|visual token 023695|>
+<|visual token 023696|>
+<|visual token 023697|>
+<|visual token 023698|>
+<|visual token 023699|>
+<|visual token 023700|>
+<|visual token 023701|>
+<|visual token 023702|>
+<|visual token 023703|>
+<|visual token 023704|>
+<|visual token 023705|>
+<|visual token 023706|>
+<|visual token 023707|>
+<|visual token 023708|>
+<|visual token 023709|>
+<|visual token 023710|>
+<|visual token 023711|>
+<|visual token 023712|>
+<|visual token 023713|>
+<|visual token 023714|>
+<|visual token 023715|>
+<|visual token 023716|>
+<|visual token 023717|>
+<|visual token 023718|>
+<|visual token 023719|>
+<|visual token 023720|>
+<|visual token 023721|>
+<|visual token 023722|>
+<|visual token 023723|>
+<|visual token 023724|>
+<|visual token 023725|>
+<|visual token 023726|>
+<|visual token 023727|>
+<|visual token 023728|>
+<|visual token 023729|>
+<|visual token 023730|>
+<|visual token 023731|>
+<|visual token 023732|>
+<|visual token 023733|>
+<|visual token 023734|>
+<|visual token 023735|>
+<|visual token 023736|>
+<|visual token 023737|>
+<|visual token 023738|>
+<|visual token 023739|>
+<|visual token 023740|>
+<|visual token 023741|>
+<|visual token 023742|>
+<|visual token 023743|>
+<|visual token 023744|>
+<|visual token 023745|>
+<|visual token 023746|>
+<|visual token 023747|>
+<|visual token 023748|>
+<|visual token 023749|>
+<|visual token 023750|>
+<|visual token 023751|>
+<|visual token 023752|>
+<|visual token 023753|>
+<|visual token 023754|>
+<|visual token 023755|>
+<|visual token 023756|>
+<|visual token 023757|>
+<|visual token 023758|>
+<|visual token 023759|>
+<|visual token 023760|>
+<|visual token 023761|>
+<|visual token 023762|>
+<|visual token 023763|>
+<|visual token 023764|>
+<|visual token 023765|>
+<|visual token 023766|>
+<|visual token 023767|>
+<|visual token 023768|>
+<|visual token 023769|>
+<|visual token 023770|>
+<|visual token 023771|>
+<|visual token 023772|>
+<|visual token 023773|>
+<|visual token 023774|>
+<|visual token 023775|>
+<|visual token 023776|>
+<|visual token 023777|>
+<|visual token 023778|>
+<|visual token 023779|>
+<|visual token 023780|>
+<|visual token 023781|>
+<|visual token 023782|>
+<|visual token 023783|>
+<|visual token 023784|>
+<|visual token 023785|>
+<|visual token 023786|>
+<|visual token 023787|>
+<|visual token 023788|>
+<|visual token 023789|>
+<|visual token 023790|>
+<|visual token 023791|>
+<|visual token 023792|>
+<|visual token 023793|>
+<|visual token 023794|>
+<|visual token 023795|>
+<|visual token 023796|>
+<|visual token 023797|>
+<|visual token 023798|>
+<|visual token 023799|>
+<|visual token 023800|>
+<|visual token 023801|>
+<|visual token 023802|>
+<|visual token 023803|>
+<|visual token 023804|>
+<|visual token 023805|>
+<|visual token 023806|>
+<|visual token 023807|>
+<|visual token 023808|>
+<|visual token 023809|>
+<|visual token 023810|>
+<|visual token 023811|>
+<|visual token 023812|>
+<|visual token 023813|>
+<|visual token 023814|>
+<|visual token 023815|>
+<|visual token 023816|>
+<|visual token 023817|>
+<|visual token 023818|>
+<|visual token 023819|>
+<|visual token 023820|>
+<|visual token 023821|>
+<|visual token 023822|>
+<|visual token 023823|>
+<|visual token 023824|>
+<|visual token 023825|>
+<|visual token 023826|>
+<|visual token 023827|>
+<|visual token 023828|>
+<|visual token 023829|>
+<|visual token 023830|>
+<|visual token 023831|>
+<|visual token 023832|>
+<|visual token 023833|>
+<|visual token 023834|>
+<|visual token 023835|>
+<|visual token 023836|>
+<|visual token 023837|>
+<|visual token 023838|>
+<|visual token 023839|>
+<|visual token 023840|>
+<|visual token 023841|>
+<|visual token 023842|>
+<|visual token 023843|>
+<|visual token 023844|>
+<|visual token 023845|>
+<|visual token 023846|>
+<|visual token 023847|>
+<|visual token 023848|>
+<|visual token 023849|>
+<|visual token 023850|>
+<|visual token 023851|>
+<|visual token 023852|>
+<|visual token 023853|>
+<|visual token 023854|>
+<|visual token 023855|>
+<|visual token 023856|>
+<|visual token 023857|>
+<|visual token 023858|>
+<|visual token 023859|>
+<|visual token 023860|>
+<|visual token 023861|>
+<|visual token 023862|>
+<|visual token 023863|>
+<|visual token 023864|>
+<|visual token 023865|>
+<|visual token 023866|>
+<|visual token 023867|>
+<|visual token 023868|>
+<|visual token 023869|>
+<|visual token 023870|>
+<|visual token 023871|>
+<|visual token 023872|>
+<|visual token 023873|>
+<|visual token 023874|>
+<|visual token 023875|>
+<|visual token 023876|>
+<|visual token 023877|>
+<|visual token 023878|>
+<|visual token 023879|>
+<|visual token 023880|>
+<|visual token 023881|>
+<|visual token 023882|>
+<|visual token 023883|>
+<|visual token 023884|>
+<|visual token 023885|>
+<|visual token 023886|>
+<|visual token 023887|>
+<|visual token 023888|>
+<|visual token 023889|>
+<|visual token 023890|>
+<|visual token 023891|>
+<|visual token 023892|>
+<|visual token 023893|>
+<|visual token 023894|>
+<|visual token 023895|>
+<|visual token 023896|>
+<|visual token 023897|>
+<|visual token 023898|>
+<|visual token 023899|>
+<|visual token 023900|>
+<|visual token 023901|>
+<|visual token 023902|>
+<|visual token 023903|>
+<|visual token 023904|>
+<|visual token 023905|>
+<|visual token 023906|>
+<|visual token 023907|>
+<|visual token 023908|>
+<|visual token 023909|>
+<|visual token 023910|>
+<|visual token 023911|>
+<|visual token 023912|>
+<|visual token 023913|>
+<|visual token 023914|>
+<|visual token 023915|>
+<|visual token 023916|>
+<|visual token 023917|>
+<|visual token 023918|>
+<|visual token 023919|>
+<|visual token 023920|>
+<|visual token 023921|>
+<|visual token 023922|>
+<|visual token 023923|>
+<|visual token 023924|>
+<|visual token 023925|>
+<|visual token 023926|>
+<|visual token 023927|>
+<|visual token 023928|>
+<|visual token 023929|>
+<|visual token 023930|>
+<|visual token 023931|>
+<|visual token 023932|>
+<|visual token 023933|>
+<|visual token 023934|>
+<|visual token 023935|>
+<|visual token 023936|>
+<|visual token 023937|>
+<|visual token 023938|>
+<|visual token 023939|>
+<|visual token 023940|>
+<|visual token 023941|>
+<|visual token 023942|>
+<|visual token 023943|>
+<|visual token 023944|>
+<|visual token 023945|>
+<|visual token 023946|>
+<|visual token 023947|>
+<|visual token 023948|>
+<|visual token 023949|>
+<|visual token 023950|>
+<|visual token 023951|>
+<|visual token 023952|>
+<|visual token 023953|>
+<|visual token 023954|>
+<|visual token 023955|>
+<|visual token 023956|>
+<|visual token 023957|>
+<|visual token 023958|>
+<|visual token 023959|>
+<|visual token 023960|>
+<|visual token 023961|>
+<|visual token 023962|>
+<|visual token 023963|>
+<|visual token 023964|>
+<|visual token 023965|>
+<|visual token 023966|>
+<|visual token 023967|>
+<|visual token 023968|>
+<|visual token 023969|>
+<|visual token 023970|>
+<|visual token 023971|>
+<|visual token 023972|>
+<|visual token 023973|>
+<|visual token 023974|>
+<|visual token 023975|>
+<|visual token 023976|>
+<|visual token 023977|>
+<|visual token 023978|>
+<|visual token 023979|>
+<|visual token 023980|>
+<|visual token 023981|>
+<|visual token 023982|>
+<|visual token 023983|>
+<|visual token 023984|>
+<|visual token 023985|>
+<|visual token 023986|>
+<|visual token 023987|>
+<|visual token 023988|>
+<|visual token 023989|>
+<|visual token 023990|>
+<|visual token 023991|>
+<|visual token 023992|>
+<|visual token 023993|>
+<|visual token 023994|>
+<|visual token 023995|>
+<|visual token 023996|>
+<|visual token 023997|>
+<|visual token 023998|>
+<|visual token 023999|>
+<|visual token 024000|>
+<|visual token 024001|>
+<|visual token 024002|>
+<|visual token 024003|>
+<|visual token 024004|>
+<|visual token 024005|>
+<|visual token 024006|>
+<|visual token 024007|>
+<|visual token 024008|>
+<|visual token 024009|>
+<|visual token 024010|>
+<|visual token 024011|>
+<|visual token 024012|>
+<|visual token 024013|>
+<|visual token 024014|>
+<|visual token 024015|>
+<|visual token 024016|>
+<|visual token 024017|>
+<|visual token 024018|>
+<|visual token 024019|>
+<|visual token 024020|>
+<|visual token 024021|>
+<|visual token 024022|>
+<|visual token 024023|>
+<|visual token 024024|>
+<|visual token 024025|>
+<|visual token 024026|>
+<|visual token 024027|>
+<|visual token 024028|>
+<|visual token 024029|>
+<|visual token 024030|>
+<|visual token 024031|>
+<|visual token 024032|>
+<|visual token 024033|>
+<|visual token 024034|>
+<|visual token 024035|>
+<|visual token 024036|>
+<|visual token 024037|>
+<|visual token 024038|>
+<|visual token 024039|>
+<|visual token 024040|>
+<|visual token 024041|>
+<|visual token 024042|>
+<|visual token 024043|>
+<|visual token 024044|>
+<|visual token 024045|>
+<|visual token 024046|>
+<|visual token 024047|>
+<|visual token 024048|>
+<|visual token 024049|>
+<|visual token 024050|>
+<|visual token 024051|>
+<|visual token 024052|>
+<|visual token 024053|>
+<|visual token 024054|>
+<|visual token 024055|>
+<|visual token 024056|>
+<|visual token 024057|>
+<|visual token 024058|>
+<|visual token 024059|>
+<|visual token 024060|>
+<|visual token 024061|>
+<|visual token 024062|>
+<|visual token 024063|>
+<|visual token 024064|>
+<|visual token 024065|>
+<|visual token 024066|>
+<|visual token 024067|>
+<|visual token 024068|>
+<|visual token 024069|>
+<|visual token 024070|>
+<|visual token 024071|>
+<|visual token 024072|>
+<|visual token 024073|>
+<|visual token 024074|>
+<|visual token 024075|>
+<|visual token 024076|>
+<|visual token 024077|>
+<|visual token 024078|>
+<|visual token 024079|>
+<|visual token 024080|>
+<|visual token 024081|>
+<|visual token 024082|>
+<|visual token 024083|>
+<|visual token 024084|>
+<|visual token 024085|>
+<|visual token 024086|>
+<|visual token 024087|>
+<|visual token 024088|>
+<|visual token 024089|>
+<|visual token 024090|>
+<|visual token 024091|>
+<|visual token 024092|>
+<|visual token 024093|>
+<|visual token 024094|>
+<|visual token 024095|>
+<|visual token 024096|>
+<|visual token 024097|>
+<|visual token 024098|>
+<|visual token 024099|>
+<|visual token 024100|>
+<|visual token 024101|>
+<|visual token 024102|>
+<|visual token 024103|>
+<|visual token 024104|>
+<|visual token 024105|>
+<|visual token 024106|>
+<|visual token 024107|>
+<|visual token 024108|>
+<|visual token 024109|>
+<|visual token 024110|>
+<|visual token 024111|>
+<|visual token 024112|>
+<|visual token 024113|>
+<|visual token 024114|>
+<|visual token 024115|>
+<|visual token 024116|>
+<|visual token 024117|>
+<|visual token 024118|>
+<|visual token 024119|>
+<|visual token 024120|>
+<|visual token 024121|>
+<|visual token 024122|>
+<|visual token 024123|>
+<|visual token 024124|>
+<|visual token 024125|>
+<|visual token 024126|>
+<|visual token 024127|>
+<|visual token 024128|>
+<|visual token 024129|>
+<|visual token 024130|>
+<|visual token 024131|>
+<|visual token 024132|>
+<|visual token 024133|>
+<|visual token 024134|>
+<|visual token 024135|>
+<|visual token 024136|>
+<|visual token 024137|>
+<|visual token 024138|>
+<|visual token 024139|>
+<|visual token 024140|>
+<|visual token 024141|>
+<|visual token 024142|>
+<|visual token 024143|>
+<|visual token 024144|>
+<|visual token 024145|>
+<|visual token 024146|>
+<|visual token 024147|>
+<|visual token 024148|>
+<|visual token 024149|>
+<|visual token 024150|>
+<|visual token 024151|>
+<|visual token 024152|>
+<|visual token 024153|>
+<|visual token 024154|>
+<|visual token 024155|>
+<|visual token 024156|>
+<|visual token 024157|>
+<|visual token 024158|>
+<|visual token 024159|>
+<|visual token 024160|>
+<|visual token 024161|>
+<|visual token 024162|>
+<|visual token 024163|>
+<|visual token 024164|>
+<|visual token 024165|>
+<|visual token 024166|>
+<|visual token 024167|>
+<|visual token 024168|>
+<|visual token 024169|>
+<|visual token 024170|>
+<|visual token 024171|>
+<|visual token 024172|>
+<|visual token 024173|>
+<|visual token 024174|>
+<|visual token 024175|>
+<|visual token 024176|>
+<|visual token 024177|>
+<|visual token 024178|>
+<|visual token 024179|>
+<|visual token 024180|>
+<|visual token 024181|>
+<|visual token 024182|>
+<|visual token 024183|>
+<|visual token 024184|>
+<|visual token 024185|>
+<|visual token 024186|>
+<|visual token 024187|>
+<|visual token 024188|>
+<|visual token 024189|>
+<|visual token 024190|>
+<|visual token 024191|>
+<|visual token 024192|>
+<|visual token 024193|>
+<|visual token 024194|>
+<|visual token 024195|>
+<|visual token 024196|>
+<|visual token 024197|>
+<|visual token 024198|>
+<|visual token 024199|>
+<|visual token 024200|>
+<|visual token 024201|>
+<|visual token 024202|>
+<|visual token 024203|>
+<|visual token 024204|>
+<|visual token 024205|>
+<|visual token 024206|>
+<|visual token 024207|>
+<|visual token 024208|>
+<|visual token 024209|>
+<|visual token 024210|>
+<|visual token 024211|>
+<|visual token 024212|>
+<|visual token 024213|>
+<|visual token 024214|>
+<|visual token 024215|>
+<|visual token 024216|>
+<|visual token 024217|>
+<|visual token 024218|>
+<|visual token 024219|>
+<|visual token 024220|>
+<|visual token 024221|>
+<|visual token 024222|>
+<|visual token 024223|>
+<|visual token 024224|>
+<|visual token 024225|>
+<|visual token 024226|>
+<|visual token 024227|>
+<|visual token 024228|>
+<|visual token 024229|>
+<|visual token 024230|>
+<|visual token 024231|>
+<|visual token 024232|>
+<|visual token 024233|>
+<|visual token 024234|>
+<|visual token 024235|>
+<|visual token 024236|>
+<|visual token 024237|>
+<|visual token 024238|>
+<|visual token 024239|>
+<|visual token 024240|>
+<|visual token 024241|>
+<|visual token 024242|>
+<|visual token 024243|>
+<|visual token 024244|>
+<|visual token 024245|>
+<|visual token 024246|>
+<|visual token 024247|>
+<|visual token 024248|>
+<|visual token 024249|>
+<|visual token 024250|>
+<|visual token 024251|>
+<|visual token 024252|>
+<|visual token 024253|>
+<|visual token 024254|>
+<|visual token 024255|>
+<|visual token 024256|>
+<|visual token 024257|>
+<|visual token 024258|>
+<|visual token 024259|>
+<|visual token 024260|>
+<|visual token 024261|>
+<|visual token 024262|>
+<|visual token 024263|>
+<|visual token 024264|>
+<|visual token 024265|>
+<|visual token 024266|>
+<|visual token 024267|>
+<|visual token 024268|>
+<|visual token 024269|>
+<|visual token 024270|>
+<|visual token 024271|>
+<|visual token 024272|>
+<|visual token 024273|>
+<|visual token 024274|>
+<|visual token 024275|>
+<|visual token 024276|>
+<|visual token 024277|>
+<|visual token 024278|>
+<|visual token 024279|>
+<|visual token 024280|>
+<|visual token 024281|>
+<|visual token 024282|>
+<|visual token 024283|>
+<|visual token 024284|>
+<|visual token 024285|>
+<|visual token 024286|>
+<|visual token 024287|>
+<|visual token 024288|>
+<|visual token 024289|>
+<|visual token 024290|>
+<|visual token 024291|>
+<|visual token 024292|>
+<|visual token 024293|>
+<|visual token 024294|>
+<|visual token 024295|>
+<|visual token 024296|>
+<|visual token 024297|>
+<|visual token 024298|>
+<|visual token 024299|>
+<|visual token 024300|>
+<|visual token 024301|>
+<|visual token 024302|>
+<|visual token 024303|>
+<|visual token 024304|>
+<|visual token 024305|>
+<|visual token 024306|>
+<|visual token 024307|>
+<|visual token 024308|>
+<|visual token 024309|>
+<|visual token 024310|>
+<|visual token 024311|>
+<|visual token 024312|>
+<|visual token 024313|>
+<|visual token 024314|>
+<|visual token 024315|>
+<|visual token 024316|>
+<|visual token 024317|>
+<|visual token 024318|>
+<|visual token 024319|>
+<|visual token 024320|>
+<|visual token 024321|>
+<|visual token 024322|>
+<|visual token 024323|>
+<|visual token 024324|>
+<|visual token 024325|>
+<|visual token 024326|>
+<|visual token 024327|>
+<|visual token 024328|>
+<|visual token 024329|>
+<|visual token 024330|>
+<|visual token 024331|>
+<|visual token 024332|>
+<|visual token 024333|>
+<|visual token 024334|>
+<|visual token 024335|>
+<|visual token 024336|>
+<|visual token 024337|>
+<|visual token 024338|>
+<|visual token 024339|>
+<|visual token 024340|>
+<|visual token 024341|>
+<|visual token 024342|>
+<|visual token 024343|>
+<|visual token 024344|>
+<|visual token 024345|>
+<|visual token 024346|>
+<|visual token 024347|>
+<|visual token 024348|>
+<|visual token 024349|>
+<|visual token 024350|>
+<|visual token 024351|>
+<|visual token 024352|>
+<|visual token 024353|>
+<|visual token 024354|>
+<|visual token 024355|>
+<|visual token 024356|>
+<|visual token 024357|>
+<|visual token 024358|>
+<|visual token 024359|>
+<|visual token 024360|>
+<|visual token 024361|>
+<|visual token 024362|>
+<|visual token 024363|>
+<|visual token 024364|>
+<|visual token 024365|>
+<|visual token 024366|>
+<|visual token 024367|>
+<|visual token 024368|>
+<|visual token 024369|>
+<|visual token 024370|>
+<|visual token 024371|>
+<|visual token 024372|>
+<|visual token 024373|>
+<|visual token 024374|>
+<|visual token 024375|>
+<|visual token 024376|>
+<|visual token 024377|>
+<|visual token 024378|>
+<|visual token 024379|>
+<|visual token 024380|>
+<|visual token 024381|>
+<|visual token 024382|>
+<|visual token 024383|>
+<|visual token 024384|>
+<|visual token 024385|>
+<|visual token 024386|>
+<|visual token 024387|>
+<|visual token 024388|>
+<|visual token 024389|>
+<|visual token 024390|>
+<|visual token 024391|>
+<|visual token 024392|>
+<|visual token 024393|>
+<|visual token 024394|>
+<|visual token 024395|>
+<|visual token 024396|>
+<|visual token 024397|>
+<|visual token 024398|>
+<|visual token 024399|>
+<|visual token 024400|>
+<|visual token 024401|>
+<|visual token 024402|>
+<|visual token 024403|>
+<|visual token 024404|>
+<|visual token 024405|>
+<|visual token 024406|>
+<|visual token 024407|>
+<|visual token 024408|>
+<|visual token 024409|>
+<|visual token 024410|>
+<|visual token 024411|>
+<|visual token 024412|>
+<|visual token 024413|>
+<|visual token 024414|>
+<|visual token 024415|>
+<|visual token 024416|>
+<|visual token 024417|>
+<|visual token 024418|>
+<|visual token 024419|>
+<|visual token 024420|>
+<|visual token 024421|>
+<|visual token 024422|>
+<|visual token 024423|>
+<|visual token 024424|>
+<|visual token 024425|>
+<|visual token 024426|>
+<|visual token 024427|>
+<|visual token 024428|>
+<|visual token 024429|>
+<|visual token 024430|>
+<|visual token 024431|>
+<|visual token 024432|>
+<|visual token 024433|>
+<|visual token 024434|>
+<|visual token 024435|>
+<|visual token 024436|>
+<|visual token 024437|>
+<|visual token 024438|>
+<|visual token 024439|>
+<|visual token 024440|>
+<|visual token 024441|>
+<|visual token 024442|>
+<|visual token 024443|>
+<|visual token 024444|>
+<|visual token 024445|>
+<|visual token 024446|>
+<|visual token 024447|>
+<|visual token 024448|>
+<|visual token 024449|>
+<|visual token 024450|>
+<|visual token 024451|>
+<|visual token 024452|>
+<|visual token 024453|>
+<|visual token 024454|>
+<|visual token 024455|>
+<|visual token 024456|>
+<|visual token 024457|>
+<|visual token 024458|>
+<|visual token 024459|>
+<|visual token 024460|>
+<|visual token 024461|>
+<|visual token 024462|>
+<|visual token 024463|>
+<|visual token 024464|>
+<|visual token 024465|>
+<|visual token 024466|>
+<|visual token 024467|>
+<|visual token 024468|>
+<|visual token 024469|>
+<|visual token 024470|>
+<|visual token 024471|>
+<|visual token 024472|>
+<|visual token 024473|>
+<|visual token 024474|>
+<|visual token 024475|>
+<|visual token 024476|>
+<|visual token 024477|>
+<|visual token 024478|>
+<|visual token 024479|>
+<|visual token 024480|>
+<|visual token 024481|>
+<|visual token 024482|>
+<|visual token 024483|>
+<|visual token 024484|>
+<|visual token 024485|>
+<|visual token 024486|>
+<|visual token 024487|>
+<|visual token 024488|>
+<|visual token 024489|>
+<|visual token 024490|>
+<|visual token 024491|>
+<|visual token 024492|>
+<|visual token 024493|>
+<|visual token 024494|>
+<|visual token 024495|>
+<|visual token 024496|>
+<|visual token 024497|>
+<|visual token 024498|>
+<|visual token 024499|>
+<|visual token 024500|>
+<|visual token 024501|>
+<|visual token 024502|>
+<|visual token 024503|>
+<|visual token 024504|>
+<|visual token 024505|>
+<|visual token 024506|>
+<|visual token 024507|>
+<|visual token 024508|>
+<|visual token 024509|>
+<|visual token 024510|>
+<|visual token 024511|>
+<|visual token 024512|>
+<|visual token 024513|>
+<|visual token 024514|>
+<|visual token 024515|>
+<|visual token 024516|>
+<|visual token 024517|>
+<|visual token 024518|>
+<|visual token 024519|>
+<|visual token 024520|>
+<|visual token 024521|>
+<|visual token 024522|>
+<|visual token 024523|>
+<|visual token 024524|>
+<|visual token 024525|>
+<|visual token 024526|>
+<|visual token 024527|>
+<|visual token 024528|>
+<|visual token 024529|>
+<|visual token 024530|>
+<|visual token 024531|>
+<|visual token 024532|>
+<|visual token 024533|>
+<|visual token 024534|>
+<|visual token 024535|>
+<|visual token 024536|>
+<|visual token 024537|>
+<|visual token 024538|>
+<|visual token 024539|>
+<|visual token 024540|>
+<|visual token 024541|>
+<|visual token 024542|>
+<|visual token 024543|>
+<|visual token 024544|>
+<|visual token 024545|>
+<|visual token 024546|>
+<|visual token 024547|>
+<|visual token 024548|>
+<|visual token 024549|>
+<|visual token 024550|>
+<|visual token 024551|>
+<|visual token 024552|>
+<|visual token 024553|>
+<|visual token 024554|>
+<|visual token 024555|>
+<|visual token 024556|>
+<|visual token 024557|>
+<|visual token 024558|>
+<|visual token 024559|>
+<|visual token 024560|>
+<|visual token 024561|>
+<|visual token 024562|>
+<|visual token 024563|>
+<|visual token 024564|>
+<|visual token 024565|>
+<|visual token 024566|>
+<|visual token 024567|>
+<|visual token 024568|>
+<|visual token 024569|>
+<|visual token 024570|>
+<|visual token 024571|>
+<|visual token 024572|>
+<|visual token 024573|>
+<|visual token 024574|>
+<|visual token 024575|>
+<|visual token 024576|>
+<|visual token 024577|>
+<|visual token 024578|>
+<|visual token 024579|>
+<|visual token 024580|>
+<|visual token 024581|>
+<|visual token 024582|>
+<|visual token 024583|>
+<|visual token 024584|>
+<|visual token 024585|>
+<|visual token 024586|>
+<|visual token 024587|>
+<|visual token 024588|>
+<|visual token 024589|>
+<|visual token 024590|>
+<|visual token 024591|>
+<|visual token 024592|>
+<|visual token 024593|>
+<|visual token 024594|>
+<|visual token 024595|>
+<|visual token 024596|>
+<|visual token 024597|>
+<|visual token 024598|>
+<|visual token 024599|>
+<|visual token 024600|>
+<|visual token 024601|>
+<|visual token 024602|>
+<|visual token 024603|>
+<|visual token 024604|>
+<|visual token 024605|>
+<|visual token 024606|>
+<|visual token 024607|>
+<|visual token 024608|>
+<|visual token 024609|>
+<|visual token 024610|>
+<|visual token 024611|>
+<|visual token 024612|>
+<|visual token 024613|>
+<|visual token 024614|>
+<|visual token 024615|>
+<|visual token 024616|>
+<|visual token 024617|>
+<|visual token 024618|>
+<|visual token 024619|>
+<|visual token 024620|>
+<|visual token 024621|>
+<|visual token 024622|>
+<|visual token 024623|>
+<|visual token 024624|>
+<|visual token 024625|>
+<|visual token 024626|>
+<|visual token 024627|>
+<|visual token 024628|>
+<|visual token 024629|>
+<|visual token 024630|>
+<|visual token 024631|>
+<|visual token 024632|>
+<|visual token 024633|>
+<|visual token 024634|>
+<|visual token 024635|>
+<|visual token 024636|>
+<|visual token 024637|>
+<|visual token 024638|>
+<|visual token 024639|>
+<|visual token 024640|>
+<|visual token 024641|>
+<|visual token 024642|>
+<|visual token 024643|>
+<|visual token 024644|>
+<|visual token 024645|>
+<|visual token 024646|>
+<|visual token 024647|>
+<|visual token 024648|>
+<|visual token 024649|>
+<|visual token 024650|>
+<|visual token 024651|>
+<|visual token 024652|>
+<|visual token 024653|>
+<|visual token 024654|>
+<|visual token 024655|>
+<|visual token 024656|>
+<|visual token 024657|>
+<|visual token 024658|>
+<|visual token 024659|>
+<|visual token 024660|>
+<|visual token 024661|>
+<|visual token 024662|>
+<|visual token 024663|>
+<|visual token 024664|>
+<|visual token 024665|>
+<|visual token 024666|>
+<|visual token 024667|>
+<|visual token 024668|>
+<|visual token 024669|>
+<|visual token 024670|>
+<|visual token 024671|>
+<|visual token 024672|>
+<|visual token 024673|>
+<|visual token 024674|>
+<|visual token 024675|>
+<|visual token 024676|>
+<|visual token 024677|>
+<|visual token 024678|>
+<|visual token 024679|>
+<|visual token 024680|>
+<|visual token 024681|>
+<|visual token 024682|>
+<|visual token 024683|>
+<|visual token 024684|>
+<|visual token 024685|>
+<|visual token 024686|>
+<|visual token 024687|>
+<|visual token 024688|>
+<|visual token 024689|>
+<|visual token 024690|>
+<|visual token 024691|>
+<|visual token 024692|>
+<|visual token 024693|>
+<|visual token 024694|>
+<|visual token 024695|>
+<|visual token 024696|>
+<|visual token 024697|>
+<|visual token 024698|>
+<|visual token 024699|>
+<|visual token 024700|>
+<|visual token 024701|>
+<|visual token 024702|>
+<|visual token 024703|>
+<|visual token 024704|>
+<|visual token 024705|>
+<|visual token 024706|>
+<|visual token 024707|>
+<|visual token 024708|>
+<|visual token 024709|>
+<|visual token 024710|>
+<|visual token 024711|>
+<|visual token 024712|>
+<|visual token 024713|>
+<|visual token 024714|>
+<|visual token 024715|>
+<|visual token 024716|>
+<|visual token 024717|>
+<|visual token 024718|>
+<|visual token 024719|>
+<|visual token 024720|>
+<|visual token 024721|>
+<|visual token 024722|>
+<|visual token 024723|>
+<|visual token 024724|>
+<|visual token 024725|>
+<|visual token 024726|>
+<|visual token 024727|>
+<|visual token 024728|>
+<|visual token 024729|>
+<|visual token 024730|>
+<|visual token 024731|>
+<|visual token 024732|>
+<|visual token 024733|>
+<|visual token 024734|>
+<|visual token 024735|>
+<|visual token 024736|>
+<|visual token 024737|>
+<|visual token 024738|>
+<|visual token 024739|>
+<|visual token 024740|>
+<|visual token 024741|>
+<|visual token 024742|>
+<|visual token 024743|>
+<|visual token 024744|>
+<|visual token 024745|>
+<|visual token 024746|>
+<|visual token 024747|>
+<|visual token 024748|>
+<|visual token 024749|>
+<|visual token 024750|>
+<|visual token 024751|>
+<|visual token 024752|>
+<|visual token 024753|>
+<|visual token 024754|>
+<|visual token 024755|>
+<|visual token 024756|>
+<|visual token 024757|>
+<|visual token 024758|>
+<|visual token 024759|>
+<|visual token 024760|>
+<|visual token 024761|>
+<|visual token 024762|>
+<|visual token 024763|>
+<|visual token 024764|>
+<|visual token 024765|>
+<|visual token 024766|>
+<|visual token 024767|>
+<|visual token 024768|>
+<|visual token 024769|>
+<|visual token 024770|>
+<|visual token 024771|>
+<|visual token 024772|>
+<|visual token 024773|>
+<|visual token 024774|>
+<|visual token 024775|>
+<|visual token 024776|>
+<|visual token 024777|>
+<|visual token 024778|>
+<|visual token 024779|>
+<|visual token 024780|>
+<|visual token 024781|>
+<|visual token 024782|>
+<|visual token 024783|>
+<|visual token 024784|>
+<|visual token 024785|>
+<|visual token 024786|>
+<|visual token 024787|>
+<|visual token 024788|>
+<|visual token 024789|>
+<|visual token 024790|>
+<|visual token 024791|>
+<|visual token 024792|>
+<|visual token 024793|>
+<|visual token 024794|>
+<|visual token 024795|>
+<|visual token 024796|>
+<|visual token 024797|>
+<|visual token 024798|>
+<|visual token 024799|>
+<|visual token 024800|>
+<|visual token 024801|>
+<|visual token 024802|>
+<|visual token 024803|>
+<|visual token 024804|>
+<|visual token 024805|>
+<|visual token 024806|>
+<|visual token 024807|>
+<|visual token 024808|>
+<|visual token 024809|>
+<|visual token 024810|>
+<|visual token 024811|>
+<|visual token 024812|>
+<|visual token 024813|>
+<|visual token 024814|>
+<|visual token 024815|>
+<|visual token 024816|>
+<|visual token 024817|>
+<|visual token 024818|>
+<|visual token 024819|>
+<|visual token 024820|>
+<|visual token 024821|>
+<|visual token 024822|>
+<|visual token 024823|>
+<|visual token 024824|>
+<|visual token 024825|>
+<|visual token 024826|>
+<|visual token 024827|>
+<|visual token 024828|>
+<|visual token 024829|>
+<|visual token 024830|>
+<|visual token 024831|>
+<|visual token 024832|>
+<|visual token 024833|>
+<|visual token 024834|>
+<|visual token 024835|>
+<|visual token 024836|>
+<|visual token 024837|>
+<|visual token 024838|>
+<|visual token 024839|>
+<|visual token 024840|>
+<|visual token 024841|>
+<|visual token 024842|>
+<|visual token 024843|>
+<|visual token 024844|>
+<|visual token 024845|>
+<|visual token 024846|>
+<|visual token 024847|>
+<|visual token 024848|>
+<|visual token 024849|>
+<|visual token 024850|>
+<|visual token 024851|>
+<|visual token 024852|>
+<|visual token 024853|>
+<|visual token 024854|>
+<|visual token 024855|>
+<|visual token 024856|>
+<|visual token 024857|>
+<|visual token 024858|>
+<|visual token 024859|>
+<|visual token 024860|>
+<|visual token 024861|>
+<|visual token 024862|>
+<|visual token 024863|>
+<|visual token 024864|>
+<|visual token 024865|>
+<|visual token 024866|>
+<|visual token 024867|>
+<|visual token 024868|>
+<|visual token 024869|>
+<|visual token 024870|>
+<|visual token 024871|>
+<|visual token 024872|>
+<|visual token 024873|>
+<|visual token 024874|>
+<|visual token 024875|>
+<|visual token 024876|>
+<|visual token 024877|>
+<|visual token 024878|>
+<|visual token 024879|>
+<|visual token 024880|>
+<|visual token 024881|>
+<|visual token 024882|>
+<|visual token 024883|>
+<|visual token 024884|>
+<|visual token 024885|>
+<|visual token 024886|>
+<|visual token 024887|>
+<|visual token 024888|>
+<|visual token 024889|>
+<|visual token 024890|>
+<|visual token 024891|>
+<|visual token 024892|>
+<|visual token 024893|>
+<|visual token 024894|>
+<|visual token 024895|>
+<|visual token 024896|>
+<|visual token 024897|>
+<|visual token 024898|>
+<|visual token 024899|>
+<|visual token 024900|>
+<|visual token 024901|>
+<|visual token 024902|>
+<|visual token 024903|>
+<|visual token 024904|>
+<|visual token 024905|>
+<|visual token 024906|>
+<|visual token 024907|>
+<|visual token 024908|>
+<|visual token 024909|>
+<|visual token 024910|>
+<|visual token 024911|>
+<|visual token 024912|>
+<|visual token 024913|>
+<|visual token 024914|>
+<|visual token 024915|>
+<|visual token 024916|>
+<|visual token 024917|>
+<|visual token 024918|>
+<|visual token 024919|>
+<|visual token 024920|>
+<|visual token 024921|>
+<|visual token 024922|>
+<|visual token 024923|>
+<|visual token 024924|>
+<|visual token 024925|>
+<|visual token 024926|>
+<|visual token 024927|>
+<|visual token 024928|>
+<|visual token 024929|>
+<|visual token 024930|>
+<|visual token 024931|>
+<|visual token 024932|>
+<|visual token 024933|>
+<|visual token 024934|>
+<|visual token 024935|>
+<|visual token 024936|>
+<|visual token 024937|>
+<|visual token 024938|>
+<|visual token 024939|>
+<|visual token 024940|>
+<|visual token 024941|>
+<|visual token 024942|>
+<|visual token 024943|>
+<|visual token 024944|>
+<|visual token 024945|>
+<|visual token 024946|>
+<|visual token 024947|>
+<|visual token 024948|>
+<|visual token 024949|>
+<|visual token 024950|>
+<|visual token 024951|>
+<|visual token 024952|>
+<|visual token 024953|>
+<|visual token 024954|>
+<|visual token 024955|>
+<|visual token 024956|>
+<|visual token 024957|>
+<|visual token 024958|>
+<|visual token 024959|>
+<|visual token 024960|>
+<|visual token 024961|>
+<|visual token 024962|>
+<|visual token 024963|>
+<|visual token 024964|>
+<|visual token 024965|>
+<|visual token 024966|>
+<|visual token 024967|>
+<|visual token 024968|>
+<|visual token 024969|>
+<|visual token 024970|>
+<|visual token 024971|>
+<|visual token 024972|>
+<|visual token 024973|>
+<|visual token 024974|>
+<|visual token 024975|>
+<|visual token 024976|>
+<|visual token 024977|>
+<|visual token 024978|>
+<|visual token 024979|>
+<|visual token 024980|>
+<|visual token 024981|>
+<|visual token 024982|>
+<|visual token 024983|>
+<|visual token 024984|>
+<|visual token 024985|>
+<|visual token 024986|>
+<|visual token 024987|>
+<|visual token 024988|>
+<|visual token 024989|>
+<|visual token 024990|>
+<|visual token 024991|>
+<|visual token 024992|>
+<|visual token 024993|>
+<|visual token 024994|>
+<|visual token 024995|>
+<|visual token 024996|>
+<|visual token 024997|>
+<|visual token 024998|>
+<|visual token 024999|>
+<|visual token 025000|>
+<|visual token 025001|>
+<|visual token 025002|>
+<|visual token 025003|>
+<|visual token 025004|>
+<|visual token 025005|>
+<|visual token 025006|>
+<|visual token 025007|>
+<|visual token 025008|>
+<|visual token 025009|>
+<|visual token 025010|>
+<|visual token 025011|>
+<|visual token 025012|>
+<|visual token 025013|>
+<|visual token 025014|>
+<|visual token 025015|>
+<|visual token 025016|>
+<|visual token 025017|>
+<|visual token 025018|>
+<|visual token 025019|>
+<|visual token 025020|>
+<|visual token 025021|>
+<|visual token 025022|>
+<|visual token 025023|>
+<|visual token 025024|>
+<|visual token 025025|>
+<|visual token 025026|>
+<|visual token 025027|>
+<|visual token 025028|>
+<|visual token 025029|>
+<|visual token 025030|>
+<|visual token 025031|>
+<|visual token 025032|>
+<|visual token 025033|>
+<|visual token 025034|>
+<|visual token 025035|>
+<|visual token 025036|>
+<|visual token 025037|>
+<|visual token 025038|>
+<|visual token 025039|>
+<|visual token 025040|>
+<|visual token 025041|>
+<|visual token 025042|>
+<|visual token 025043|>
+<|visual token 025044|>
+<|visual token 025045|>
+<|visual token 025046|>
+<|visual token 025047|>
+<|visual token 025048|>
+<|visual token 025049|>
+<|visual token 025050|>
+<|visual token 025051|>
+<|visual token 025052|>
+<|visual token 025053|>
+<|visual token 025054|>
+<|visual token 025055|>
+<|visual token 025056|>
+<|visual token 025057|>
+<|visual token 025058|>
+<|visual token 025059|>
+<|visual token 025060|>
+<|visual token 025061|>
+<|visual token 025062|>
+<|visual token 025063|>
+<|visual token 025064|>
+<|visual token 025065|>
+<|visual token 025066|>
+<|visual token 025067|>
+<|visual token 025068|>
+<|visual token 025069|>
+<|visual token 025070|>
+<|visual token 025071|>
+<|visual token 025072|>
+<|visual token 025073|>
+<|visual token 025074|>
+<|visual token 025075|>
+<|visual token 025076|>
+<|visual token 025077|>
+<|visual token 025078|>
+<|visual token 025079|>
+<|visual token 025080|>
+<|visual token 025081|>
+<|visual token 025082|>
+<|visual token 025083|>
+<|visual token 025084|>
+<|visual token 025085|>
+<|visual token 025086|>
+<|visual token 025087|>
+<|visual token 025088|>
+<|visual token 025089|>
+<|visual token 025090|>
+<|visual token 025091|>
+<|visual token 025092|>
+<|visual token 025093|>
+<|visual token 025094|>
+<|visual token 025095|>
+<|visual token 025096|>
+<|visual token 025097|>
+<|visual token 025098|>
+<|visual token 025099|>
+<|visual token 025100|>
+<|visual token 025101|>
+<|visual token 025102|>
+<|visual token 025103|>
+<|visual token 025104|>
+<|visual token 025105|>
+<|visual token 025106|>
+<|visual token 025107|>
+<|visual token 025108|>
+<|visual token 025109|>
+<|visual token 025110|>
+<|visual token 025111|>
+<|visual token 025112|>
+<|visual token 025113|>
+<|visual token 025114|>
+<|visual token 025115|>
+<|visual token 025116|>
+<|visual token 025117|>
+<|visual token 025118|>
+<|visual token 025119|>
+<|visual token 025120|>
+<|visual token 025121|>
+<|visual token 025122|>
+<|visual token 025123|>
+<|visual token 025124|>
+<|visual token 025125|>
+<|visual token 025126|>
+<|visual token 025127|>
+<|visual token 025128|>
+<|visual token 025129|>
+<|visual token 025130|>
+<|visual token 025131|>
+<|visual token 025132|>
+<|visual token 025133|>
+<|visual token 025134|>
+<|visual token 025135|>
+<|visual token 025136|>
+<|visual token 025137|>
+<|visual token 025138|>
+<|visual token 025139|>
+<|visual token 025140|>
+<|visual token 025141|>
+<|visual token 025142|>
+<|visual token 025143|>
+<|visual token 025144|>
+<|visual token 025145|>
+<|visual token 025146|>
+<|visual token 025147|>
+<|visual token 025148|>
+<|visual token 025149|>
+<|visual token 025150|>
+<|visual token 025151|>
+<|visual token 025152|>
+<|visual token 025153|>
+<|visual token 025154|>
+<|visual token 025155|>
+<|visual token 025156|>
+<|visual token 025157|>
+<|visual token 025158|>
+<|visual token 025159|>
+<|visual token 025160|>
+<|visual token 025161|>
+<|visual token 025162|>
+<|visual token 025163|>
+<|visual token 025164|>
+<|visual token 025165|>
+<|visual token 025166|>
+<|visual token 025167|>
+<|visual token 025168|>
+<|visual token 025169|>
+<|visual token 025170|>
+<|visual token 025171|>
+<|visual token 025172|>
+<|visual token 025173|>
+<|visual token 025174|>
+<|visual token 025175|>
+<|visual token 025176|>
+<|visual token 025177|>
+<|visual token 025178|>
+<|visual token 025179|>
+<|visual token 025180|>
+<|visual token 025181|>
+<|visual token 025182|>
+<|visual token 025183|>
+<|visual token 025184|>
+<|visual token 025185|>
+<|visual token 025186|>
+<|visual token 025187|>
+<|visual token 025188|>
+<|visual token 025189|>
+<|visual token 025190|>
+<|visual token 025191|>
+<|visual token 025192|>
+<|visual token 025193|>
+<|visual token 025194|>
+<|visual token 025195|>
+<|visual token 025196|>
+<|visual token 025197|>
+<|visual token 025198|>
+<|visual token 025199|>
+<|visual token 025200|>
+<|visual token 025201|>
+<|visual token 025202|>
+<|visual token 025203|>
+<|visual token 025204|>
+<|visual token 025205|>
+<|visual token 025206|>
+<|visual token 025207|>
+<|visual token 025208|>
+<|visual token 025209|>
+<|visual token 025210|>
+<|visual token 025211|>
+<|visual token 025212|>
+<|visual token 025213|>
+<|visual token 025214|>
+<|visual token 025215|>
+<|visual token 025216|>
+<|visual token 025217|>
+<|visual token 025218|>
+<|visual token 025219|>
+<|visual token 025220|>
+<|visual token 025221|>
+<|visual token 025222|>
+<|visual token 025223|>
+<|visual token 025224|>
+<|visual token 025225|>
+<|visual token 025226|>
+<|visual token 025227|>
+<|visual token 025228|>
+<|visual token 025229|>
+<|visual token 025230|>
+<|visual token 025231|>
+<|visual token 025232|>
+<|visual token 025233|>
+<|visual token 025234|>
+<|visual token 025235|>
+<|visual token 025236|>
+<|visual token 025237|>
+<|visual token 025238|>
+<|visual token 025239|>
+<|visual token 025240|>
+<|visual token 025241|>
+<|visual token 025242|>
+<|visual token 025243|>
+<|visual token 025244|>
+<|visual token 025245|>
+<|visual token 025246|>
+<|visual token 025247|>
+<|visual token 025248|>
+<|visual token 025249|>
+<|visual token 025250|>
+<|visual token 025251|>
+<|visual token 025252|>
+<|visual token 025253|>
+<|visual token 025254|>
+<|visual token 025255|>
+<|visual token 025256|>
+<|visual token 025257|>
+<|visual token 025258|>
+<|visual token 025259|>
+<|visual token 025260|>
+<|visual token 025261|>
+<|visual token 025262|>
+<|visual token 025263|>
+<|visual token 025264|>
+<|visual token 025265|>
+<|visual token 025266|>
+<|visual token 025267|>
+<|visual token 025268|>
+<|visual token 025269|>
+<|visual token 025270|>
+<|visual token 025271|>
+<|visual token 025272|>
+<|visual token 025273|>
+<|visual token 025274|>
+<|visual token 025275|>
+<|visual token 025276|>
+<|visual token 025277|>
+<|visual token 025278|>
+<|visual token 025279|>
+<|visual token 025280|>
+<|visual token 025281|>
+<|visual token 025282|>
+<|visual token 025283|>
+<|visual token 025284|>
+<|visual token 025285|>
+<|visual token 025286|>
+<|visual token 025287|>
+<|visual token 025288|>
+<|visual token 025289|>
+<|visual token 025290|>
+<|visual token 025291|>
+<|visual token 025292|>
+<|visual token 025293|>
+<|visual token 025294|>
+<|visual token 025295|>
+<|visual token 025296|>
+<|visual token 025297|>
+<|visual token 025298|>
+<|visual token 025299|>
+<|visual token 025300|>
+<|visual token 025301|>
+<|visual token 025302|>
+<|visual token 025303|>
+<|visual token 025304|>
+<|visual token 025305|>
+<|visual token 025306|>
+<|visual token 025307|>
+<|visual token 025308|>
+<|visual token 025309|>
+<|visual token 025310|>
+<|visual token 025311|>
+<|visual token 025312|>
+<|visual token 025313|>
+<|visual token 025314|>
+<|visual token 025315|>
+<|visual token 025316|>
+<|visual token 025317|>
+<|visual token 025318|>
+<|visual token 025319|>
+<|visual token 025320|>
+<|visual token 025321|>
+<|visual token 025322|>
+<|visual token 025323|>
+<|visual token 025324|>
+<|visual token 025325|>
+<|visual token 025326|>
+<|visual token 025327|>
+<|visual token 025328|>
+<|visual token 025329|>
+<|visual token 025330|>
+<|visual token 025331|>
+<|visual token 025332|>
+<|visual token 025333|>
+<|visual token 025334|>
+<|visual token 025335|>
+<|visual token 025336|>
+<|visual token 025337|>
+<|visual token 025338|>
+<|visual token 025339|>
+<|visual token 025340|>
+<|visual token 025341|>
+<|visual token 025342|>
+<|visual token 025343|>
+<|visual token 025344|>
+<|visual token 025345|>
+<|visual token 025346|>
+<|visual token 025347|>
+<|visual token 025348|>
+<|visual token 025349|>
+<|visual token 025350|>
+<|visual token 025351|>
+<|visual token 025352|>
+<|visual token 025353|>
+<|visual token 025354|>
+<|visual token 025355|>
+<|visual token 025356|>
+<|visual token 025357|>
+<|visual token 025358|>
+<|visual token 025359|>
+<|visual token 025360|>
+<|visual token 025361|>
+<|visual token 025362|>
+<|visual token 025363|>
+<|visual token 025364|>
+<|visual token 025365|>
+<|visual token 025366|>
+<|visual token 025367|>
+<|visual token 025368|>
+<|visual token 025369|>
+<|visual token 025370|>
+<|visual token 025371|>
+<|visual token 025372|>
+<|visual token 025373|>
+<|visual token 025374|>
+<|visual token 025375|>
+<|visual token 025376|>
+<|visual token 025377|>
+<|visual token 025378|>
+<|visual token 025379|>
+<|visual token 025380|>
+<|visual token 025381|>
+<|visual token 025382|>
+<|visual token 025383|>
+<|visual token 025384|>
+<|visual token 025385|>
+<|visual token 025386|>
+<|visual token 025387|>
+<|visual token 025388|>
+<|visual token 025389|>
+<|visual token 025390|>
+<|visual token 025391|>
+<|visual token 025392|>
+<|visual token 025393|>
+<|visual token 025394|>
+<|visual token 025395|>
+<|visual token 025396|>
+<|visual token 025397|>
+<|visual token 025398|>
+<|visual token 025399|>
+<|visual token 025400|>
+<|visual token 025401|>
+<|visual token 025402|>
+<|visual token 025403|>
+<|visual token 025404|>
+<|visual token 025405|>
+<|visual token 025406|>
+<|visual token 025407|>
+<|visual token 025408|>
+<|visual token 025409|>
+<|visual token 025410|>
+<|visual token 025411|>
+<|visual token 025412|>
+<|visual token 025413|>
+<|visual token 025414|>
+<|visual token 025415|>
+<|visual token 025416|>
+<|visual token 025417|>
+<|visual token 025418|>
+<|visual token 025419|>
+<|visual token 025420|>
+<|visual token 025421|>
+<|visual token 025422|>
+<|visual token 025423|>
+<|visual token 025424|>
+<|visual token 025425|>
+<|visual token 025426|>
+<|visual token 025427|>
+<|visual token 025428|>
+<|visual token 025429|>
+<|visual token 025430|>
+<|visual token 025431|>
+<|visual token 025432|>
+<|visual token 025433|>
+<|visual token 025434|>
+<|visual token 025435|>
+<|visual token 025436|>
+<|visual token 025437|>
+<|visual token 025438|>
+<|visual token 025439|>
+<|visual token 025440|>
+<|visual token 025441|>
+<|visual token 025442|>
+<|visual token 025443|>
+<|visual token 025444|>
+<|visual token 025445|>
+<|visual token 025446|>
+<|visual token 025447|>
+<|visual token 025448|>
+<|visual token 025449|>
+<|visual token 025450|>
+<|visual token 025451|>
+<|visual token 025452|>
+<|visual token 025453|>
+<|visual token 025454|>
+<|visual token 025455|>
+<|visual token 025456|>
+<|visual token 025457|>
+<|visual token 025458|>
+<|visual token 025459|>
+<|visual token 025460|>
+<|visual token 025461|>
+<|visual token 025462|>
+<|visual token 025463|>
+<|visual token 025464|>
+<|visual token 025465|>
+<|visual token 025466|>
+<|visual token 025467|>
+<|visual token 025468|>
+<|visual token 025469|>
+<|visual token 025470|>
+<|visual token 025471|>
+<|visual token 025472|>
+<|visual token 025473|>
+<|visual token 025474|>
+<|visual token 025475|>
+<|visual token 025476|>
+<|visual token 025477|>
+<|visual token 025478|>
+<|visual token 025479|>
+<|visual token 025480|>
+<|visual token 025481|>
+<|visual token 025482|>
+<|visual token 025483|>
+<|visual token 025484|>
+<|visual token 025485|>
+<|visual token 025486|>
+<|visual token 025487|>
+<|visual token 025488|>
+<|visual token 025489|>
+<|visual token 025490|>
+<|visual token 025491|>
+<|visual token 025492|>
+<|visual token 025493|>
+<|visual token 025494|>
+<|visual token 025495|>
+<|visual token 025496|>
+<|visual token 025497|>
+<|visual token 025498|>
+<|visual token 025499|>
+<|visual token 025500|>
+<|visual token 025501|>
+<|visual token 025502|>
+<|visual token 025503|>
+<|visual token 025504|>
+<|visual token 025505|>
+<|visual token 025506|>
+<|visual token 025507|>
+<|visual token 025508|>
+<|visual token 025509|>
+<|visual token 025510|>
+<|visual token 025511|>
+<|visual token 025512|>
+<|visual token 025513|>
+<|visual token 025514|>
+<|visual token 025515|>
+<|visual token 025516|>
+<|visual token 025517|>
+<|visual token 025518|>
+<|visual token 025519|>
+<|visual token 025520|>
+<|visual token 025521|>
+<|visual token 025522|>
+<|visual token 025523|>
+<|visual token 025524|>
+<|visual token 025525|>
+<|visual token 025526|>
+<|visual token 025527|>
+<|visual token 025528|>
+<|visual token 025529|>
+<|visual token 025530|>
+<|visual token 025531|>
+<|visual token 025532|>
+<|visual token 025533|>
+<|visual token 025534|>
+<|visual token 025535|>
+<|visual token 025536|>
+<|visual token 025537|>
+<|visual token 025538|>
+<|visual token 025539|>
+<|visual token 025540|>
+<|visual token 025541|>
+<|visual token 025542|>
+<|visual token 025543|>
+<|visual token 025544|>
+<|visual token 025545|>
+<|visual token 025546|>
+<|visual token 025547|>
+<|visual token 025548|>
+<|visual token 025549|>
+<|visual token 025550|>
+<|visual token 025551|>
+<|visual token 025552|>
+<|visual token 025553|>
+<|visual token 025554|>
+<|visual token 025555|>
+<|visual token 025556|>
+<|visual token 025557|>
+<|visual token 025558|>
+<|visual token 025559|>
+<|visual token 025560|>
+<|visual token 025561|>
+<|visual token 025562|>
+<|visual token 025563|>
+<|visual token 025564|>
+<|visual token 025565|>
+<|visual token 025566|>
+<|visual token 025567|>
+<|visual token 025568|>
+<|visual token 025569|>
+<|visual token 025570|>
+<|visual token 025571|>
+<|visual token 025572|>
+<|visual token 025573|>
+<|visual token 025574|>
+<|visual token 025575|>
+<|visual token 025576|>
+<|visual token 025577|>
+<|visual token 025578|>
+<|visual token 025579|>
+<|visual token 025580|>
+<|visual token 025581|>
+<|visual token 025582|>
+<|visual token 025583|>
+<|visual token 025584|>
+<|visual token 025585|>
+<|visual token 025586|>
+<|visual token 025587|>
+<|visual token 025588|>
+<|visual token 025589|>
+<|visual token 025590|>
+<|visual token 025591|>
+<|visual token 025592|>
+<|visual token 025593|>
+<|visual token 025594|>
+<|visual token 025595|>
+<|visual token 025596|>
+<|visual token 025597|>
+<|visual token 025598|>
+<|visual token 025599|>
+<|visual token 025600|>
+<|visual token 025601|>
+<|visual token 025602|>
+<|visual token 025603|>
+<|visual token 025604|>
+<|visual token 025605|>
+<|visual token 025606|>
+<|visual token 025607|>
+<|visual token 025608|>
+<|visual token 025609|>
+<|visual token 025610|>
+<|visual token 025611|>
+<|visual token 025612|>
+<|visual token 025613|>
+<|visual token 025614|>
+<|visual token 025615|>
+<|visual token 025616|>
+<|visual token 025617|>
+<|visual token 025618|>
+<|visual token 025619|>
+<|visual token 025620|>
+<|visual token 025621|>
+<|visual token 025622|>
+<|visual token 025623|>
+<|visual token 025624|>
+<|visual token 025625|>
+<|visual token 025626|>
+<|visual token 025627|>
+<|visual token 025628|>
+<|visual token 025629|>
+<|visual token 025630|>
+<|visual token 025631|>
+<|visual token 025632|>
+<|visual token 025633|>
+<|visual token 025634|>
+<|visual token 025635|>
+<|visual token 025636|>
+<|visual token 025637|>
+<|visual token 025638|>
+<|visual token 025639|>
+<|visual token 025640|>
+<|visual token 025641|>
+<|visual token 025642|>
+<|visual token 025643|>
+<|visual token 025644|>
+<|visual token 025645|>
+<|visual token 025646|>
+<|visual token 025647|>
+<|visual token 025648|>
+<|visual token 025649|>
+<|visual token 025650|>
+<|visual token 025651|>
+<|visual token 025652|>
+<|visual token 025653|>
+<|visual token 025654|>
+<|visual token 025655|>
+<|visual token 025656|>
+<|visual token 025657|>
+<|visual token 025658|>
+<|visual token 025659|>
+<|visual token 025660|>
+<|visual token 025661|>
+<|visual token 025662|>
+<|visual token 025663|>
+<|visual token 025664|>
+<|visual token 025665|>
+<|visual token 025666|>
+<|visual token 025667|>
+<|visual token 025668|>
+<|visual token 025669|>
+<|visual token 025670|>
+<|visual token 025671|>
+<|visual token 025672|>
+<|visual token 025673|>
+<|visual token 025674|>
+<|visual token 025675|>
+<|visual token 025676|>
+<|visual token 025677|>
+<|visual token 025678|>
+<|visual token 025679|>
+<|visual token 025680|>
+<|visual token 025681|>
+<|visual token 025682|>
+<|visual token 025683|>
+<|visual token 025684|>
+<|visual token 025685|>
+<|visual token 025686|>
+<|visual token 025687|>
+<|visual token 025688|>
+<|visual token 025689|>
+<|visual token 025690|>
+<|visual token 025691|>
+<|visual token 025692|>
+<|visual token 025693|>
+<|visual token 025694|>
+<|visual token 025695|>
+<|visual token 025696|>
+<|visual token 025697|>
+<|visual token 025698|>
+<|visual token 025699|>
+<|visual token 025700|>
+<|visual token 025701|>
+<|visual token 025702|>
+<|visual token 025703|>
+<|visual token 025704|>
+<|visual token 025705|>
+<|visual token 025706|>
+<|visual token 025707|>
+<|visual token 025708|>
+<|visual token 025709|>
+<|visual token 025710|>
+<|visual token 025711|>
+<|visual token 025712|>
+<|visual token 025713|>
+<|visual token 025714|>
+<|visual token 025715|>
+<|visual token 025716|>
+<|visual token 025717|>
+<|visual token 025718|>
+<|visual token 025719|>
+<|visual token 025720|>
+<|visual token 025721|>
+<|visual token 025722|>
+<|visual token 025723|>
+<|visual token 025724|>
+<|visual token 025725|>
+<|visual token 025726|>
+<|visual token 025727|>
+<|visual token 025728|>
+<|visual token 025729|>
+<|visual token 025730|>
+<|visual token 025731|>
+<|visual token 025732|>
+<|visual token 025733|>
+<|visual token 025734|>
+<|visual token 025735|>
+<|visual token 025736|>
+<|visual token 025737|>
+<|visual token 025738|>
+<|visual token 025739|>
+<|visual token 025740|>
+<|visual token 025741|>
+<|visual token 025742|>
+<|visual token 025743|>
+<|visual token 025744|>
+<|visual token 025745|>
+<|visual token 025746|>
+<|visual token 025747|>
+<|visual token 025748|>
+<|visual token 025749|>
+<|visual token 025750|>
+<|visual token 025751|>
+<|visual token 025752|>
+<|visual token 025753|>
+<|visual token 025754|>
+<|visual token 025755|>
+<|visual token 025756|>
+<|visual token 025757|>
+<|visual token 025758|>
+<|visual token 025759|>
+<|visual token 025760|>
+<|visual token 025761|>
+<|visual token 025762|>
+<|visual token 025763|>
+<|visual token 025764|>
+<|visual token 025765|>
+<|visual token 025766|>
+<|visual token 025767|>
+<|visual token 025768|>
+<|visual token 025769|>
+<|visual token 025770|>
+<|visual token 025771|>
+<|visual token 025772|>
+<|visual token 025773|>
+<|visual token 025774|>
+<|visual token 025775|>
+<|visual token 025776|>
+<|visual token 025777|>
+<|visual token 025778|>
+<|visual token 025779|>
+<|visual token 025780|>
+<|visual token 025781|>
+<|visual token 025782|>
+<|visual token 025783|>
+<|visual token 025784|>
+<|visual token 025785|>
+<|visual token 025786|>
+<|visual token 025787|>
+<|visual token 025788|>
+<|visual token 025789|>
+<|visual token 025790|>
+<|visual token 025791|>
+<|visual token 025792|>
+<|visual token 025793|>
+<|visual token 025794|>
+<|visual token 025795|>
+<|visual token 025796|>
+<|visual token 025797|>
+<|visual token 025798|>
+<|visual token 025799|>
+<|visual token 025800|>
+<|visual token 025801|>
+<|visual token 025802|>
+<|visual token 025803|>
+<|visual token 025804|>
+<|visual token 025805|>
+<|visual token 025806|>
+<|visual token 025807|>
+<|visual token 025808|>
+<|visual token 025809|>
+<|visual token 025810|>
+<|visual token 025811|>
+<|visual token 025812|>
+<|visual token 025813|>
+<|visual token 025814|>
+<|visual token 025815|>
+<|visual token 025816|>
+<|visual token 025817|>
+<|visual token 025818|>
+<|visual token 025819|>
+<|visual token 025820|>
+<|visual token 025821|>
+<|visual token 025822|>
+<|visual token 025823|>
+<|visual token 025824|>
+<|visual token 025825|>
+<|visual token 025826|>
+<|visual token 025827|>
+<|visual token 025828|>
+<|visual token 025829|>
+<|visual token 025830|>
+<|visual token 025831|>
+<|visual token 025832|>
+<|visual token 025833|>
+<|visual token 025834|>
+<|visual token 025835|>
+<|visual token 025836|>
+<|visual token 025837|>
+<|visual token 025838|>
+<|visual token 025839|>
+<|visual token 025840|>
+<|visual token 025841|>
+<|visual token 025842|>
+<|visual token 025843|>
+<|visual token 025844|>
+<|visual token 025845|>
+<|visual token 025846|>
+<|visual token 025847|>
+<|visual token 025848|>
+<|visual token 025849|>
+<|visual token 025850|>
+<|visual token 025851|>
+<|visual token 025852|>
+<|visual token 025853|>
+<|visual token 025854|>
+<|visual token 025855|>
+<|visual token 025856|>
+<|visual token 025857|>
+<|visual token 025858|>
+<|visual token 025859|>
+<|visual token 025860|>
+<|visual token 025861|>
+<|visual token 025862|>
+<|visual token 025863|>
+<|visual token 025864|>
+<|visual token 025865|>
+<|visual token 025866|>
+<|visual token 025867|>
+<|visual token 025868|>
+<|visual token 025869|>
+<|visual token 025870|>
+<|visual token 025871|>
+<|visual token 025872|>
+<|visual token 025873|>
+<|visual token 025874|>
+<|visual token 025875|>
+<|visual token 025876|>
+<|visual token 025877|>
+<|visual token 025878|>
+<|visual token 025879|>
+<|visual token 025880|>
+<|visual token 025881|>
+<|visual token 025882|>
+<|visual token 025883|>
+<|visual token 025884|>
+<|visual token 025885|>
+<|visual token 025886|>
+<|visual token 025887|>
+<|visual token 025888|>
+<|visual token 025889|>
+<|visual token 025890|>
+<|visual token 025891|>
+<|visual token 025892|>
+<|visual token 025893|>
+<|visual token 025894|>
+<|visual token 025895|>
+<|visual token 025896|>
+<|visual token 025897|>
+<|visual token 025898|>
+<|visual token 025899|>
+<|visual token 025900|>
+<|visual token 025901|>
+<|visual token 025902|>
+<|visual token 025903|>
+<|visual token 025904|>
+<|visual token 025905|>
+<|visual token 025906|>
+<|visual token 025907|>
+<|visual token 025908|>
+<|visual token 025909|>
+<|visual token 025910|>
+<|visual token 025911|>
+<|visual token 025912|>
+<|visual token 025913|>
+<|visual token 025914|>
+<|visual token 025915|>
+<|visual token 025916|>
+<|visual token 025917|>
+<|visual token 025918|>
+<|visual token 025919|>
+<|visual token 025920|>
+<|visual token 025921|>
+<|visual token 025922|>
+<|visual token 025923|>
+<|visual token 025924|>
+<|visual token 025925|>
+<|visual token 025926|>
+<|visual token 025927|>
+<|visual token 025928|>
+<|visual token 025929|>
+<|visual token 025930|>
+<|visual token 025931|>
+<|visual token 025932|>
+<|visual token 025933|>
+<|visual token 025934|>
+<|visual token 025935|>
+<|visual token 025936|>
+<|visual token 025937|>
+<|visual token 025938|>
+<|visual token 025939|>
+<|visual token 025940|>
+<|visual token 025941|>
+<|visual token 025942|>
+<|visual token 025943|>
+<|visual token 025944|>
+<|visual token 025945|>
+<|visual token 025946|>
+<|visual token 025947|>
+<|visual token 025948|>
+<|visual token 025949|>
+<|visual token 025950|>
+<|visual token 025951|>
+<|visual token 025952|>
+<|visual token 025953|>
+<|visual token 025954|>
+<|visual token 025955|>
+<|visual token 025956|>
+<|visual token 025957|>
+<|visual token 025958|>
+<|visual token 025959|>
+<|visual token 025960|>
+<|visual token 025961|>
+<|visual token 025962|>
+<|visual token 025963|>
+<|visual token 025964|>
+<|visual token 025965|>
+<|visual token 025966|>
+<|visual token 025967|>
+<|visual token 025968|>
+<|visual token 025969|>
+<|visual token 025970|>
+<|visual token 025971|>
+<|visual token 025972|>
+<|visual token 025973|>
+<|visual token 025974|>
+<|visual token 025975|>
+<|visual token 025976|>
+<|visual token 025977|>
+<|visual token 025978|>
+<|visual token 025979|>
+<|visual token 025980|>
+<|visual token 025981|>
+<|visual token 025982|>
+<|visual token 025983|>
+<|visual token 025984|>
+<|visual token 025985|>
+<|visual token 025986|>
+<|visual token 025987|>
+<|visual token 025988|>
+<|visual token 025989|>
+<|visual token 025990|>
+<|visual token 025991|>
+<|visual token 025992|>
+<|visual token 025993|>
+<|visual token 025994|>
+<|visual token 025995|>
+<|visual token 025996|>
+<|visual token 025997|>
+<|visual token 025998|>
+<|visual token 025999|>
+<|visual token 026000|>
+<|visual token 026001|>
+<|visual token 026002|>
+<|visual token 026003|>
+<|visual token 026004|>
+<|visual token 026005|>
+<|visual token 026006|>
+<|visual token 026007|>
+<|visual token 026008|>
+<|visual token 026009|>
+<|visual token 026010|>
+<|visual token 026011|>
+<|visual token 026012|>
+<|visual token 026013|>
+<|visual token 026014|>
+<|visual token 026015|>
+<|visual token 026016|>
+<|visual token 026017|>
+<|visual token 026018|>
+<|visual token 026019|>
+<|visual token 026020|>
+<|visual token 026021|>
+<|visual token 026022|>
+<|visual token 026023|>
+<|visual token 026024|>
+<|visual token 026025|>
+<|visual token 026026|>
+<|visual token 026027|>
+<|visual token 026028|>
+<|visual token 026029|>
+<|visual token 026030|>
+<|visual token 026031|>
+<|visual token 026032|>
+<|visual token 026033|>
+<|visual token 026034|>
+<|visual token 026035|>
+<|visual token 026036|>
+<|visual token 026037|>
+<|visual token 026038|>
+<|visual token 026039|>
+<|visual token 026040|>
+<|visual token 026041|>
+<|visual token 026042|>
+<|visual token 026043|>
+<|visual token 026044|>
+<|visual token 026045|>
+<|visual token 026046|>
+<|visual token 026047|>
+<|visual token 026048|>
+<|visual token 026049|>
+<|visual token 026050|>
+<|visual token 026051|>
+<|visual token 026052|>
+<|visual token 026053|>
+<|visual token 026054|>
+<|visual token 026055|>
+<|visual token 026056|>
+<|visual token 026057|>
+<|visual token 026058|>
+<|visual token 026059|>
+<|visual token 026060|>
+<|visual token 026061|>
+<|visual token 026062|>
+<|visual token 026063|>
+<|visual token 026064|>
+<|visual token 026065|>
+<|visual token 026066|>
+<|visual token 026067|>
+<|visual token 026068|>
+<|visual token 026069|>
+<|visual token 026070|>
+<|visual token 026071|>
+<|visual token 026072|>
+<|visual token 026073|>
+<|visual token 026074|>
+<|visual token 026075|>
+<|visual token 026076|>
+<|visual token 026077|>
+<|visual token 026078|>
+<|visual token 026079|>
+<|visual token 026080|>
+<|visual token 026081|>
+<|visual token 026082|>
+<|visual token 026083|>
+<|visual token 026084|>
+<|visual token 026085|>
+<|visual token 026086|>
+<|visual token 026087|>
+<|visual token 026088|>
+<|visual token 026089|>
+<|visual token 026090|>
+<|visual token 026091|>
+<|visual token 026092|>
+<|visual token 026093|>
+<|visual token 026094|>
+<|visual token 026095|>
+<|visual token 026096|>
+<|visual token 026097|>
+<|visual token 026098|>
+<|visual token 026099|>
+<|visual token 026100|>
+<|visual token 026101|>
+<|visual token 026102|>
+<|visual token 026103|>
+<|visual token 026104|>
+<|visual token 026105|>
+<|visual token 026106|>
+<|visual token 026107|>
+<|visual token 026108|>
+<|visual token 026109|>
+<|visual token 026110|>
+<|visual token 026111|>
+<|visual token 026112|>
+<|visual token 026113|>
+<|visual token 026114|>
+<|visual token 026115|>
+<|visual token 026116|>
+<|visual token 026117|>
+<|visual token 026118|>
+<|visual token 026119|>
+<|visual token 026120|>
+<|visual token 026121|>
+<|visual token 026122|>
+<|visual token 026123|>
+<|visual token 026124|>
+<|visual token 026125|>
+<|visual token 026126|>
+<|visual token 026127|>
+<|visual token 026128|>
+<|visual token 026129|>
+<|visual token 026130|>
+<|visual token 026131|>
+<|visual token 026132|>
+<|visual token 026133|>
+<|visual token 026134|>
+<|visual token 026135|>
+<|visual token 026136|>
+<|visual token 026137|>
+<|visual token 026138|>
+<|visual token 026139|>
+<|visual token 026140|>
+<|visual token 026141|>
+<|visual token 026142|>
+<|visual token 026143|>
+<|visual token 026144|>
+<|visual token 026145|>
+<|visual token 026146|>
+<|visual token 026147|>
+<|visual token 026148|>
+<|visual token 026149|>
+<|visual token 026150|>
+<|visual token 026151|>
+<|visual token 026152|>
+<|visual token 026153|>
+<|visual token 026154|>
+<|visual token 026155|>
+<|visual token 026156|>
+<|visual token 026157|>
+<|visual token 026158|>
+<|visual token 026159|>
+<|visual token 026160|>
+<|visual token 026161|>
+<|visual token 026162|>
+<|visual token 026163|>
+<|visual token 026164|>
+<|visual token 026165|>
+<|visual token 026166|>
+<|visual token 026167|>
+<|visual token 026168|>
+<|visual token 026169|>
+<|visual token 026170|>
+<|visual token 026171|>
+<|visual token 026172|>
+<|visual token 026173|>
+<|visual token 026174|>
+<|visual token 026175|>
+<|visual token 026176|>
+<|visual token 026177|>
+<|visual token 026178|>
+<|visual token 026179|>
+<|visual token 026180|>
+<|visual token 026181|>
+<|visual token 026182|>
+<|visual token 026183|>
+<|visual token 026184|>
+<|visual token 026185|>
+<|visual token 026186|>
+<|visual token 026187|>
+<|visual token 026188|>
+<|visual token 026189|>
+<|visual token 026190|>
+<|visual token 026191|>
+<|visual token 026192|>
+<|visual token 026193|>
+<|visual token 026194|>
+<|visual token 026195|>
+<|visual token 026196|>
+<|visual token 026197|>
+<|visual token 026198|>
+<|visual token 026199|>
+<|visual token 026200|>
+<|visual token 026201|>
+<|visual token 026202|>
+<|visual token 026203|>
+<|visual token 026204|>
+<|visual token 026205|>
+<|visual token 026206|>
+<|visual token 026207|>
+<|visual token 026208|>
+<|visual token 026209|>
+<|visual token 026210|>
+<|visual token 026211|>
+<|visual token 026212|>
+<|visual token 026213|>
+<|visual token 026214|>
+<|visual token 026215|>
+<|visual token 026216|>
+<|visual token 026217|>
+<|visual token 026218|>
+<|visual token 026219|>
+<|visual token 026220|>
+<|visual token 026221|>
+<|visual token 026222|>
+<|visual token 026223|>
+<|visual token 026224|>
+<|visual token 026225|>
+<|visual token 026226|>
+<|visual token 026227|>
+<|visual token 026228|>
+<|visual token 026229|>
+<|visual token 026230|>
+<|visual token 026231|>
+<|visual token 026232|>
+<|visual token 026233|>
+<|visual token 026234|>
+<|visual token 026235|>
+<|visual token 026236|>
+<|visual token 026237|>
+<|visual token 026238|>
+<|visual token 026239|>
+<|visual token 026240|>
+<|visual token 026241|>
+<|visual token 026242|>
+<|visual token 026243|>
+<|visual token 026244|>
+<|visual token 026245|>
+<|visual token 026246|>
+<|visual token 026247|>
+<|visual token 026248|>
+<|visual token 026249|>
+<|visual token 026250|>
+<|visual token 026251|>
+<|visual token 026252|>
+<|visual token 026253|>
+<|visual token 026254|>
+<|visual token 026255|>
+<|visual token 026256|>
+<|visual token 026257|>
+<|visual token 026258|>
+<|visual token 026259|>
+<|visual token 026260|>
+<|visual token 026261|>
+<|visual token 026262|>
+<|visual token 026263|>
+<|visual token 026264|>
+<|visual token 026265|>
+<|visual token 026266|>
+<|visual token 026267|>
+<|visual token 026268|>
+<|visual token 026269|>
+<|visual token 026270|>
+<|visual token 026271|>
+<|visual token 026272|>
+<|visual token 026273|>
+<|visual token 026274|>
+<|visual token 026275|>
+<|visual token 026276|>
+<|visual token 026277|>
+<|visual token 026278|>
+<|visual token 026279|>
+<|visual token 026280|>
+<|visual token 026281|>
+<|visual token 026282|>
+<|visual token 026283|>
+<|visual token 026284|>
+<|visual token 026285|>
+<|visual token 026286|>
+<|visual token 026287|>
+<|visual token 026288|>
+<|visual token 026289|>
+<|visual token 026290|>
+<|visual token 026291|>
+<|visual token 026292|>
+<|visual token 026293|>
+<|visual token 026294|>
+<|visual token 026295|>
+<|visual token 026296|>
+<|visual token 026297|>
+<|visual token 026298|>
+<|visual token 026299|>
+<|visual token 026300|>
+<|visual token 026301|>
+<|visual token 026302|>
+<|visual token 026303|>
+<|visual token 026304|>
+<|visual token 026305|>
+<|visual token 026306|>
+<|visual token 026307|>
+<|visual token 026308|>
+<|visual token 026309|>
+<|visual token 026310|>
+<|visual token 026311|>
+<|visual token 026312|>
+<|visual token 026313|>
+<|visual token 026314|>
+<|visual token 026315|>
+<|visual token 026316|>
+<|visual token 026317|>
+<|visual token 026318|>
+<|visual token 026319|>
+<|visual token 026320|>
+<|visual token 026321|>
+<|visual token 026322|>
+<|visual token 026323|>
+<|visual token 026324|>
+<|visual token 026325|>
+<|visual token 026326|>
+<|visual token 026327|>
+<|visual token 026328|>
+<|visual token 026329|>
+<|visual token 026330|>
+<|visual token 026331|>
+<|visual token 026332|>
+<|visual token 026333|>
+<|visual token 026334|>
+<|visual token 026335|>
+<|visual token 026336|>
+<|visual token 026337|>
+<|visual token 026338|>
+<|visual token 026339|>
+<|visual token 026340|>
+<|visual token 026341|>
+<|visual token 026342|>
+<|visual token 026343|>
+<|visual token 026344|>
+<|visual token 026345|>
+<|visual token 026346|>
+<|visual token 026347|>
+<|visual token 026348|>
+<|visual token 026349|>
+<|visual token 026350|>
+<|visual token 026351|>
+<|visual token 026352|>
+<|visual token 026353|>
+<|visual token 026354|>
+<|visual token 026355|>
+<|visual token 026356|>
+<|visual token 026357|>
+<|visual token 026358|>
+<|visual token 026359|>
+<|visual token 026360|>
+<|visual token 026361|>
+<|visual token 026362|>
+<|visual token 026363|>
+<|visual token 026364|>
+<|visual token 026365|>
+<|visual token 026366|>
+<|visual token 026367|>
+<|visual token 026368|>
+<|visual token 026369|>
+<|visual token 026370|>
+<|visual token 026371|>
+<|visual token 026372|>
+<|visual token 026373|>
+<|visual token 026374|>
+<|visual token 026375|>
+<|visual token 026376|>
+<|visual token 026377|>
+<|visual token 026378|>
+<|visual token 026379|>
+<|visual token 026380|>
+<|visual token 026381|>
+<|visual token 026382|>
+<|visual token 026383|>
+<|visual token 026384|>
+<|visual token 026385|>
+<|visual token 026386|>
+<|visual token 026387|>
+<|visual token 026388|>
+<|visual token 026389|>
+<|visual token 026390|>
+<|visual token 026391|>
+<|visual token 026392|>
+<|visual token 026393|>
+<|visual token 026394|>
+<|visual token 026395|>
+<|visual token 026396|>
+<|visual token 026397|>
+<|visual token 026398|>
+<|visual token 026399|>
+<|visual token 026400|>
+<|visual token 026401|>
+<|visual token 026402|>
+<|visual token 026403|>
+<|visual token 026404|>
+<|visual token 026405|>
+<|visual token 026406|>
+<|visual token 026407|>
+<|visual token 026408|>
+<|visual token 026409|>
+<|visual token 026410|>
+<|visual token 026411|>
+<|visual token 026412|>
+<|visual token 026413|>
+<|visual token 026414|>
+<|visual token 026415|>
+<|visual token 026416|>
+<|visual token 026417|>
+<|visual token 026418|>
+<|visual token 026419|>
+<|visual token 026420|>
+<|visual token 026421|>
+<|visual token 026422|>
+<|visual token 026423|>
+<|visual token 026424|>
+<|visual token 026425|>
+<|visual token 026426|>
+<|visual token 026427|>
+<|visual token 026428|>
+<|visual token 026429|>
+<|visual token 026430|>
+<|visual token 026431|>
+<|visual token 026432|>
+<|visual token 026433|>
+<|visual token 026434|>
+<|visual token 026435|>
+<|visual token 026436|>
+<|visual token 026437|>
+<|visual token 026438|>
+<|visual token 026439|>
+<|visual token 026440|>
+<|visual token 026441|>
+<|visual token 026442|>
+<|visual token 026443|>
+<|visual token 026444|>
+<|visual token 026445|>
+<|visual token 026446|>
+<|visual token 026447|>
+<|visual token 026448|>
+<|visual token 026449|>
+<|visual token 026450|>
+<|visual token 026451|>
+<|visual token 026452|>
+<|visual token 026453|>
+<|visual token 026454|>
+<|visual token 026455|>
+<|visual token 026456|>
+<|visual token 026457|>
+<|visual token 026458|>
+<|visual token 026459|>
+<|visual token 026460|>
+<|visual token 026461|>
+<|visual token 026462|>
+<|visual token 026463|>
+<|visual token 026464|>
+<|visual token 026465|>
+<|visual token 026466|>
+<|visual token 026467|>
+<|visual token 026468|>
+<|visual token 026469|>
+<|visual token 026470|>
+<|visual token 026471|>
+<|visual token 026472|>
+<|visual token 026473|>
+<|visual token 026474|>
+<|visual token 026475|>
+<|visual token 026476|>
+<|visual token 026477|>
+<|visual token 026478|>
+<|visual token 026479|>
+<|visual token 026480|>
+<|visual token 026481|>
+<|visual token 026482|>
+<|visual token 026483|>
+<|visual token 026484|>
+<|visual token 026485|>
+<|visual token 026486|>
+<|visual token 026487|>
+<|visual token 026488|>
+<|visual token 026489|>
+<|visual token 026490|>
+<|visual token 026491|>
+<|visual token 026492|>
+<|visual token 026493|>
+<|visual token 026494|>
+<|visual token 026495|>
+<|visual token 026496|>
+<|visual token 026497|>
+<|visual token 026498|>
+<|visual token 026499|>
+<|visual token 026500|>
+<|visual token 026501|>
+<|visual token 026502|>
+<|visual token 026503|>
+<|visual token 026504|>
+<|visual token 026505|>
+<|visual token 026506|>
+<|visual token 026507|>
+<|visual token 026508|>
+<|visual token 026509|>
+<|visual token 026510|>
+<|visual token 026511|>
+<|visual token 026512|>
+<|visual token 026513|>
+<|visual token 026514|>
+<|visual token 026515|>
+<|visual token 026516|>
+<|visual token 026517|>
+<|visual token 026518|>
+<|visual token 026519|>
+<|visual token 026520|>
+<|visual token 026521|>
+<|visual token 026522|>
+<|visual token 026523|>
+<|visual token 026524|>
+<|visual token 026525|>
+<|visual token 026526|>
+<|visual token 026527|>
+<|visual token 026528|>
+<|visual token 026529|>
+<|visual token 026530|>
+<|visual token 026531|>
+<|visual token 026532|>
+<|visual token 026533|>
+<|visual token 026534|>
+<|visual token 026535|>
+<|visual token 026536|>
+<|visual token 026537|>
+<|visual token 026538|>
+<|visual token 026539|>
+<|visual token 026540|>
+<|visual token 026541|>
+<|visual token 026542|>
+<|visual token 026543|>
+<|visual token 026544|>
+<|visual token 026545|>
+<|visual token 026546|>
+<|visual token 026547|>
+<|visual token 026548|>
+<|visual token 026549|>
+<|visual token 026550|>
+<|visual token 026551|>
+<|visual token 026552|>
+<|visual token 026553|>
+<|visual token 026554|>
+<|visual token 026555|>
+<|visual token 026556|>
+<|visual token 026557|>
+<|visual token 026558|>
+<|visual token 026559|>
+<|visual token 026560|>
+<|visual token 026561|>
+<|visual token 026562|>
+<|visual token 026563|>
+<|visual token 026564|>
+<|visual token 026565|>
+<|visual token 026566|>
+<|visual token 026567|>
+<|visual token 026568|>
+<|visual token 026569|>
+<|visual token 026570|>
+<|visual token 026571|>
+<|visual token 026572|>
+<|visual token 026573|>
+<|visual token 026574|>
+<|visual token 026575|>
+<|visual token 026576|>
+<|visual token 026577|>
+<|visual token 026578|>
+<|visual token 026579|>
+<|visual token 026580|>
+<|visual token 026581|>
+<|visual token 026582|>
+<|visual token 026583|>
+<|visual token 026584|>
+<|visual token 026585|>
+<|visual token 026586|>
+<|visual token 026587|>
+<|visual token 026588|>
+<|visual token 026589|>
+<|visual token 026590|>
+<|visual token 026591|>
+<|visual token 026592|>
+<|visual token 026593|>
+<|visual token 026594|>
+<|visual token 026595|>
+<|visual token 026596|>
+<|visual token 026597|>
+<|visual token 026598|>
+<|visual token 026599|>
+<|visual token 026600|>
+<|visual token 026601|>
+<|visual token 026602|>
+<|visual token 026603|>
+<|visual token 026604|>
+<|visual token 026605|>
+<|visual token 026606|>
+<|visual token 026607|>
+<|visual token 026608|>
+<|visual token 026609|>
+<|visual token 026610|>
+<|visual token 026611|>
+<|visual token 026612|>
+<|visual token 026613|>
+<|visual token 026614|>
+<|visual token 026615|>
+<|visual token 026616|>
+<|visual token 026617|>
+<|visual token 026618|>
+<|visual token 026619|>
+<|visual token 026620|>
+<|visual token 026621|>
+<|visual token 026622|>
+<|visual token 026623|>
+<|visual token 026624|>
+<|visual token 026625|>
+<|visual token 026626|>
+<|visual token 026627|>
+<|visual token 026628|>
+<|visual token 026629|>
+<|visual token 026630|>
+<|visual token 026631|>
+<|visual token 026632|>
+<|visual token 026633|>
+<|visual token 026634|>
+<|visual token 026635|>
+<|visual token 026636|>
+<|visual token 026637|>
+<|visual token 026638|>
+<|visual token 026639|>
+<|visual token 026640|>
+<|visual token 026641|>
+<|visual token 026642|>
+<|visual token 026643|>
+<|visual token 026644|>
+<|visual token 026645|>
+<|visual token 026646|>
+<|visual token 026647|>
+<|visual token 026648|>
+<|visual token 026649|>
+<|visual token 026650|>
+<|visual token 026651|>
+<|visual token 026652|>
+<|visual token 026653|>
+<|visual token 026654|>
+<|visual token 026655|>
+<|visual token 026656|>
+<|visual token 026657|>
+<|visual token 026658|>
+<|visual token 026659|>
+<|visual token 026660|>
+<|visual token 026661|>
+<|visual token 026662|>
+<|visual token 026663|>
+<|visual token 026664|>
+<|visual token 026665|>
+<|visual token 026666|>
+<|visual token 026667|>
+<|visual token 026668|>
+<|visual token 026669|>
+<|visual token 026670|>
+<|visual token 026671|>
+<|visual token 026672|>
+<|visual token 026673|>
+<|visual token 026674|>
+<|visual token 026675|>
+<|visual token 026676|>
+<|visual token 026677|>
+<|visual token 026678|>
+<|visual token 026679|>
+<|visual token 026680|>
+<|visual token 026681|>
+<|visual token 026682|>
+<|visual token 026683|>
+<|visual token 026684|>
+<|visual token 026685|>
+<|visual token 026686|>
+<|visual token 026687|>
+<|visual token 026688|>
+<|visual token 026689|>
+<|visual token 026690|>
+<|visual token 026691|>
+<|visual token 026692|>
+<|visual token 026693|>
+<|visual token 026694|>
+<|visual token 026695|>
+<|visual token 026696|>
+<|visual token 026697|>
+<|visual token 026698|>
+<|visual token 026699|>
+<|visual token 026700|>
+<|visual token 026701|>
+<|visual token 026702|>
+<|visual token 026703|>
+<|visual token 026704|>
+<|visual token 026705|>
+<|visual token 026706|>
+<|visual token 026707|>
+<|visual token 026708|>
+<|visual token 026709|>
+<|visual token 026710|>
+<|visual token 026711|>
+<|visual token 026712|>
+<|visual token 026713|>
+<|visual token 026714|>
+<|visual token 026715|>
+<|visual token 026716|>
+<|visual token 026717|>
+<|visual token 026718|>
+<|visual token 026719|>
+<|visual token 026720|>
+<|visual token 026721|>
+<|visual token 026722|>
+<|visual token 026723|>
+<|visual token 026724|>
+<|visual token 026725|>
+<|visual token 026726|>
+<|visual token 026727|>
+<|visual token 026728|>
+<|visual token 026729|>
+<|visual token 026730|>
+<|visual token 026731|>
+<|visual token 026732|>
+<|visual token 026733|>
+<|visual token 026734|>
+<|visual token 026735|>
+<|visual token 026736|>
+<|visual token 026737|>
+<|visual token 026738|>
+<|visual token 026739|>
+<|visual token 026740|>
+<|visual token 026741|>
+<|visual token 026742|>
+<|visual token 026743|>
+<|visual token 026744|>
+<|visual token 026745|>
+<|visual token 026746|>
+<|visual token 026747|>
+<|visual token 026748|>
+<|visual token 026749|>
+<|visual token 026750|>
+<|visual token 026751|>
+<|visual token 026752|>
+<|visual token 026753|>
+<|visual token 026754|>
+<|visual token 026755|>
+<|visual token 026756|>
+<|visual token 026757|>
+<|visual token 026758|>
+<|visual token 026759|>
+<|visual token 026760|>
+<|visual token 026761|>
+<|visual token 026762|>
+<|visual token 026763|>
+<|visual token 026764|>
+<|visual token 026765|>
+<|visual token 026766|>
+<|visual token 026767|>
+<|visual token 026768|>
+<|visual token 026769|>
+<|visual token 026770|>
+<|visual token 026771|>
+<|visual token 026772|>
+<|visual token 026773|>
+<|visual token 026774|>
+<|visual token 026775|>
+<|visual token 026776|>
+<|visual token 026777|>
+<|visual token 026778|>
+<|visual token 026779|>
+<|visual token 026780|>
+<|visual token 026781|>
+<|visual token 026782|>
+<|visual token 026783|>
+<|visual token 026784|>
+<|visual token 026785|>
+<|visual token 026786|>
+<|visual token 026787|>
+<|visual token 026788|>
+<|visual token 026789|>
+<|visual token 026790|>
+<|visual token 026791|>
+<|visual token 026792|>
+<|visual token 026793|>
+<|visual token 026794|>
+<|visual token 026795|>
+<|visual token 026796|>
+<|visual token 026797|>
+<|visual token 026798|>
+<|visual token 026799|>
+<|visual token 026800|>
+<|visual token 026801|>
+<|visual token 026802|>
+<|visual token 026803|>
+<|visual token 026804|>
+<|visual token 026805|>
+<|visual token 026806|>
+<|visual token 026807|>
+<|visual token 026808|>
+<|visual token 026809|>
+<|visual token 026810|>
+<|visual token 026811|>
+<|visual token 026812|>
+<|visual token 026813|>
+<|visual token 026814|>
+<|visual token 026815|>
+<|visual token 026816|>
+<|visual token 026817|>
+<|visual token 026818|>
+<|visual token 026819|>
+<|visual token 026820|>
+<|visual token 026821|>
+<|visual token 026822|>
+<|visual token 026823|>
+<|visual token 026824|>
+<|visual token 026825|>
+<|visual token 026826|>
+<|visual token 026827|>
+<|visual token 026828|>
+<|visual token 026829|>
+<|visual token 026830|>
+<|visual token 026831|>
+<|visual token 026832|>
+<|visual token 026833|>
+<|visual token 026834|>
+<|visual token 026835|>
+<|visual token 026836|>
+<|visual token 026837|>
+<|visual token 026838|>
+<|visual token 026839|>
+<|visual token 026840|>
+<|visual token 026841|>
+<|visual token 026842|>
+<|visual token 026843|>
+<|visual token 026844|>
+<|visual token 026845|>
+<|visual token 026846|>
+<|visual token 026847|>
+<|visual token 026848|>
+<|visual token 026849|>
+<|visual token 026850|>
+<|visual token 026851|>
+<|visual token 026852|>
+<|visual token 026853|>
+<|visual token 026854|>
+<|visual token 026855|>
+<|visual token 026856|>
+<|visual token 026857|>
+<|visual token 026858|>
+<|visual token 026859|>
+<|visual token 026860|>
+<|visual token 026861|>
+<|visual token 026862|>
+<|visual token 026863|>
+<|visual token 026864|>
+<|visual token 026865|>
+<|visual token 026866|>
+<|visual token 026867|>
+<|visual token 026868|>
+<|visual token 026869|>
+<|visual token 026870|>
+<|visual token 026871|>
+<|visual token 026872|>
+<|visual token 026873|>
+<|visual token 026874|>
+<|visual token 026875|>
+<|visual token 026876|>
+<|visual token 026877|>
+<|visual token 026878|>
+<|visual token 026879|>
+<|visual token 026880|>
+<|visual token 026881|>
+<|visual token 026882|>
+<|visual token 026883|>
+<|visual token 026884|>
+<|visual token 026885|>
+<|visual token 026886|>
+<|visual token 026887|>
+<|visual token 026888|>
+<|visual token 026889|>
+<|visual token 026890|>
+<|visual token 026891|>
+<|visual token 026892|>
+<|visual token 026893|>
+<|visual token 026894|>
+<|visual token 026895|>
+<|visual token 026896|>
+<|visual token 026897|>
+<|visual token 026898|>
+<|visual token 026899|>
+<|visual token 026900|>
+<|visual token 026901|>
+<|visual token 026902|>
+<|visual token 026903|>
+<|visual token 026904|>
+<|visual token 026905|>
+<|visual token 026906|>
+<|visual token 026907|>
+<|visual token 026908|>
+<|visual token 026909|>
+<|visual token 026910|>
+<|visual token 026911|>
+<|visual token 026912|>
+<|visual token 026913|>
+<|visual token 026914|>
+<|visual token 026915|>
+<|visual token 026916|>
+<|visual token 026917|>
+<|visual token 026918|>
+<|visual token 026919|>
+<|visual token 026920|>
+<|visual token 026921|>
+<|visual token 026922|>
+<|visual token 026923|>
+<|visual token 026924|>
+<|visual token 026925|>
+<|visual token 026926|>
+<|visual token 026927|>
+<|visual token 026928|>
+<|visual token 026929|>
+<|visual token 026930|>
+<|visual token 026931|>
+<|visual token 026932|>
+<|visual token 026933|>
+<|visual token 026934|>
+<|visual token 026935|>
+<|visual token 026936|>
+<|visual token 026937|>
+<|visual token 026938|>
+<|visual token 026939|>
+<|visual token 026940|>
+<|visual token 026941|>
+<|visual token 026942|>
+<|visual token 026943|>
+<|visual token 026944|>
+<|visual token 026945|>
+<|visual token 026946|>
+<|visual token 026947|>
+<|visual token 026948|>
+<|visual token 026949|>
+<|visual token 026950|>
+<|visual token 026951|>
+<|visual token 026952|>
+<|visual token 026953|>
+<|visual token 026954|>
+<|visual token 026955|>
+<|visual token 026956|>
+<|visual token 026957|>
+<|visual token 026958|>
+<|visual token 026959|>
+<|visual token 026960|>
+<|visual token 026961|>
+<|visual token 026962|>
+<|visual token 026963|>
+<|visual token 026964|>
+<|visual token 026965|>
+<|visual token 026966|>
+<|visual token 026967|>
+<|visual token 026968|>
+<|visual token 026969|>
+<|visual token 026970|>
+<|visual token 026971|>
+<|visual token 026972|>
+<|visual token 026973|>
+<|visual token 026974|>
+<|visual token 026975|>
+<|visual token 026976|>
+<|visual token 026977|>
+<|visual token 026978|>
+<|visual token 026979|>
+<|visual token 026980|>
+<|visual token 026981|>
+<|visual token 026982|>
+<|visual token 026983|>
+<|visual token 026984|>
+<|visual token 026985|>
+<|visual token 026986|>
+<|visual token 026987|>
+<|visual token 026988|>
+<|visual token 026989|>
+<|visual token 026990|>
+<|visual token 026991|>
+<|visual token 026992|>
+<|visual token 026993|>
+<|visual token 026994|>
+<|visual token 026995|>
+<|visual token 026996|>
+<|visual token 026997|>
+<|visual token 026998|>
+<|visual token 026999|>
+<|visual token 027000|>
+<|visual token 027001|>
+<|visual token 027002|>
+<|visual token 027003|>
+<|visual token 027004|>
+<|visual token 027005|>
+<|visual token 027006|>
+<|visual token 027007|>
+<|visual token 027008|>
+<|visual token 027009|>
+<|visual token 027010|>
+<|visual token 027011|>
+<|visual token 027012|>
+<|visual token 027013|>
+<|visual token 027014|>
+<|visual token 027015|>
+<|visual token 027016|>
+<|visual token 027017|>
+<|visual token 027018|>
+<|visual token 027019|>
+<|visual token 027020|>
+<|visual token 027021|>
+<|visual token 027022|>
+<|visual token 027023|>
+<|visual token 027024|>
+<|visual token 027025|>
+<|visual token 027026|>
+<|visual token 027027|>
+<|visual token 027028|>
+<|visual token 027029|>
+<|visual token 027030|>
+<|visual token 027031|>
+<|visual token 027032|>
+<|visual token 027033|>
+<|visual token 027034|>
+<|visual token 027035|>
+<|visual token 027036|>
+<|visual token 027037|>
+<|visual token 027038|>
+<|visual token 027039|>
+<|visual token 027040|>
+<|visual token 027041|>
+<|visual token 027042|>
+<|visual token 027043|>
+<|visual token 027044|>
+<|visual token 027045|>
+<|visual token 027046|>
+<|visual token 027047|>
+<|visual token 027048|>
+<|visual token 027049|>
+<|visual token 027050|>
+<|visual token 027051|>
+<|visual token 027052|>
+<|visual token 027053|>
+<|visual token 027054|>
+<|visual token 027055|>
+<|visual token 027056|>
+<|visual token 027057|>
+<|visual token 027058|>
+<|visual token 027059|>
+<|visual token 027060|>
+<|visual token 027061|>
+<|visual token 027062|>
+<|visual token 027063|>
+<|visual token 027064|>
+<|visual token 027065|>
+<|visual token 027066|>
+<|visual token 027067|>
+<|visual token 027068|>
+<|visual token 027069|>
+<|visual token 027070|>
+<|visual token 027071|>
+<|visual token 027072|>
+<|visual token 027073|>
+<|visual token 027074|>
+<|visual token 027075|>
+<|visual token 027076|>
+<|visual token 027077|>
+<|visual token 027078|>
+<|visual token 027079|>
+<|visual token 027080|>
+<|visual token 027081|>
+<|visual token 027082|>
+<|visual token 027083|>
+<|visual token 027084|>
+<|visual token 027085|>
+<|visual token 027086|>
+<|visual token 027087|>
+<|visual token 027088|>
+<|visual token 027089|>
+<|visual token 027090|>
+<|visual token 027091|>
+<|visual token 027092|>
+<|visual token 027093|>
+<|visual token 027094|>
+<|visual token 027095|>
+<|visual token 027096|>
+<|visual token 027097|>
+<|visual token 027098|>
+<|visual token 027099|>
+<|visual token 027100|>
+<|visual token 027101|>
+<|visual token 027102|>
+<|visual token 027103|>
+<|visual token 027104|>
+<|visual token 027105|>
+<|visual token 027106|>
+<|visual token 027107|>
+<|visual token 027108|>
+<|visual token 027109|>
+<|visual token 027110|>
+<|visual token 027111|>
+<|visual token 027112|>
+<|visual token 027113|>
+<|visual token 027114|>
+<|visual token 027115|>
+<|visual token 027116|>
+<|visual token 027117|>
+<|visual token 027118|>
+<|visual token 027119|>
+<|visual token 027120|>
+<|visual token 027121|>
+<|visual token 027122|>
+<|visual token 027123|>
+<|visual token 027124|>
+<|visual token 027125|>
+<|visual token 027126|>
+<|visual token 027127|>
+<|visual token 027128|>
+<|visual token 027129|>
+<|visual token 027130|>
+<|visual token 027131|>
+<|visual token 027132|>
+<|visual token 027133|>
+<|visual token 027134|>
+<|visual token 027135|>
+<|visual token 027136|>
+<|visual token 027137|>
+<|visual token 027138|>
+<|visual token 027139|>
+<|visual token 027140|>
+<|visual token 027141|>
+<|visual token 027142|>
+<|visual token 027143|>
+<|visual token 027144|>
+<|visual token 027145|>
+<|visual token 027146|>
+<|visual token 027147|>
+<|visual token 027148|>
+<|visual token 027149|>
+<|visual token 027150|>
+<|visual token 027151|>
+<|visual token 027152|>
+<|visual token 027153|>
+<|visual token 027154|>
+<|visual token 027155|>
+<|visual token 027156|>
+<|visual token 027157|>
+<|visual token 027158|>
+<|visual token 027159|>
+<|visual token 027160|>
+<|visual token 027161|>
+<|visual token 027162|>
+<|visual token 027163|>
+<|visual token 027164|>
+<|visual token 027165|>
+<|visual token 027166|>
+<|visual token 027167|>
+<|visual token 027168|>
+<|visual token 027169|>
+<|visual token 027170|>
+<|visual token 027171|>
+<|visual token 027172|>
+<|visual token 027173|>
+<|visual token 027174|>
+<|visual token 027175|>
+<|visual token 027176|>
+<|visual token 027177|>
+<|visual token 027178|>
+<|visual token 027179|>
+<|visual token 027180|>
+<|visual token 027181|>
+<|visual token 027182|>
+<|visual token 027183|>
+<|visual token 027184|>
+<|visual token 027185|>
+<|visual token 027186|>
+<|visual token 027187|>
+<|visual token 027188|>
+<|visual token 027189|>
+<|visual token 027190|>
+<|visual token 027191|>
+<|visual token 027192|>
+<|visual token 027193|>
+<|visual token 027194|>
+<|visual token 027195|>
+<|visual token 027196|>
+<|visual token 027197|>
+<|visual token 027198|>
+<|visual token 027199|>
+<|visual token 027200|>
+<|visual token 027201|>
+<|visual token 027202|>
+<|visual token 027203|>
+<|visual token 027204|>
+<|visual token 027205|>
+<|visual token 027206|>
+<|visual token 027207|>
+<|visual token 027208|>
+<|visual token 027209|>
+<|visual token 027210|>
+<|visual token 027211|>
+<|visual token 027212|>
+<|visual token 027213|>
+<|visual token 027214|>
+<|visual token 027215|>
+<|visual token 027216|>
+<|visual token 027217|>
+<|visual token 027218|>
+<|visual token 027219|>
+<|visual token 027220|>
+<|visual token 027221|>
+<|visual token 027222|>
+<|visual token 027223|>
+<|visual token 027224|>
+<|visual token 027225|>
+<|visual token 027226|>
+<|visual token 027227|>
+<|visual token 027228|>
+<|visual token 027229|>
+<|visual token 027230|>
+<|visual token 027231|>
+<|visual token 027232|>
+<|visual token 027233|>
+<|visual token 027234|>
+<|visual token 027235|>
+<|visual token 027236|>
+<|visual token 027237|>
+<|visual token 027238|>
+<|visual token 027239|>
+<|visual token 027240|>
+<|visual token 027241|>
+<|visual token 027242|>
+<|visual token 027243|>
+<|visual token 027244|>
+<|visual token 027245|>
+<|visual token 027246|>
+<|visual token 027247|>
+<|visual token 027248|>
+<|visual token 027249|>
+<|visual token 027250|>
+<|visual token 027251|>
+<|visual token 027252|>
+<|visual token 027253|>
+<|visual token 027254|>
+<|visual token 027255|>
+<|visual token 027256|>
+<|visual token 027257|>
+<|visual token 027258|>
+<|visual token 027259|>
+<|visual token 027260|>
+<|visual token 027261|>
+<|visual token 027262|>
+<|visual token 027263|>
+<|visual token 027264|>
+<|visual token 027265|>
+<|visual token 027266|>
+<|visual token 027267|>
+<|visual token 027268|>
+<|visual token 027269|>
+<|visual token 027270|>
+<|visual token 027271|>
+<|visual token 027272|>
+<|visual token 027273|>
+<|visual token 027274|>
+<|visual token 027275|>
+<|visual token 027276|>
+<|visual token 027277|>
+<|visual token 027278|>
+<|visual token 027279|>
+<|visual token 027280|>
+<|visual token 027281|>
+<|visual token 027282|>
+<|visual token 027283|>
+<|visual token 027284|>
+<|visual token 027285|>
+<|visual token 027286|>
+<|visual token 027287|>
+<|visual token 027288|>
+<|visual token 027289|>
+<|visual token 027290|>
+<|visual token 027291|>
+<|visual token 027292|>
+<|visual token 027293|>
+<|visual token 027294|>
+<|visual token 027295|>
+<|visual token 027296|>
+<|visual token 027297|>
+<|visual token 027298|>
+<|visual token 027299|>
+<|visual token 027300|>
+<|visual token 027301|>
+<|visual token 027302|>
+<|visual token 027303|>
+<|visual token 027304|>
+<|visual token 027305|>
+<|visual token 027306|>
+<|visual token 027307|>
+<|visual token 027308|>
+<|visual token 027309|>
+<|visual token 027310|>
+<|visual token 027311|>
+<|visual token 027312|>
+<|visual token 027313|>
+<|visual token 027314|>
+<|visual token 027315|>
+<|visual token 027316|>
+<|visual token 027317|>
+<|visual token 027318|>
+<|visual token 027319|>
+<|visual token 027320|>
+<|visual token 027321|>
+<|visual token 027322|>
+<|visual token 027323|>
+<|visual token 027324|>
+<|visual token 027325|>
+<|visual token 027326|>
+<|visual token 027327|>
+<|visual token 027328|>
+<|visual token 027329|>
+<|visual token 027330|>
+<|visual token 027331|>
+<|visual token 027332|>
+<|visual token 027333|>
+<|visual token 027334|>
+<|visual token 027335|>
+<|visual token 027336|>
+<|visual token 027337|>
+<|visual token 027338|>
+<|visual token 027339|>
+<|visual token 027340|>
+<|visual token 027341|>
+<|visual token 027342|>
+<|visual token 027343|>
+<|visual token 027344|>
+<|visual token 027345|>
+<|visual token 027346|>
+<|visual token 027347|>
+<|visual token 027348|>
+<|visual token 027349|>
+<|visual token 027350|>
+<|visual token 027351|>
+<|visual token 027352|>
+<|visual token 027353|>
+<|visual token 027354|>
+<|visual token 027355|>
+<|visual token 027356|>
+<|visual token 027357|>
+<|visual token 027358|>
+<|visual token 027359|>
+<|visual token 027360|>
+<|visual token 027361|>
+<|visual token 027362|>
+<|visual token 027363|>
+<|visual token 027364|>
+<|visual token 027365|>
+<|visual token 027366|>
+<|visual token 027367|>
+<|visual token 027368|>
+<|visual token 027369|>
+<|visual token 027370|>
+<|visual token 027371|>
+<|visual token 027372|>
+<|visual token 027373|>
+<|visual token 027374|>
+<|visual token 027375|>
+<|visual token 027376|>
+<|visual token 027377|>
+<|visual token 027378|>
+<|visual token 027379|>
+<|visual token 027380|>
+<|visual token 027381|>
+<|visual token 027382|>
+<|visual token 027383|>
+<|visual token 027384|>
+<|visual token 027385|>
+<|visual token 027386|>
+<|visual token 027387|>
+<|visual token 027388|>
+<|visual token 027389|>
+<|visual token 027390|>
+<|visual token 027391|>
+<|visual token 027392|>
+<|visual token 027393|>
+<|visual token 027394|>
+<|visual token 027395|>
+<|visual token 027396|>
+<|visual token 027397|>
+<|visual token 027398|>
+<|visual token 027399|>
+<|visual token 027400|>
+<|visual token 027401|>
+<|visual token 027402|>
+<|visual token 027403|>
+<|visual token 027404|>
+<|visual token 027405|>
+<|visual token 027406|>
+<|visual token 027407|>
+<|visual token 027408|>
+<|visual token 027409|>
+<|visual token 027410|>
+<|visual token 027411|>
+<|visual token 027412|>
+<|visual token 027413|>
+<|visual token 027414|>
+<|visual token 027415|>
+<|visual token 027416|>
+<|visual token 027417|>
+<|visual token 027418|>
+<|visual token 027419|>
+<|visual token 027420|>
+<|visual token 027421|>
+<|visual token 027422|>
+<|visual token 027423|>
+<|visual token 027424|>
+<|visual token 027425|>
+<|visual token 027426|>
+<|visual token 027427|>
+<|visual token 027428|>
+<|visual token 027429|>
+<|visual token 027430|>
+<|visual token 027431|>
+<|visual token 027432|>
+<|visual token 027433|>
+<|visual token 027434|>
+<|visual token 027435|>
+<|visual token 027436|>
+<|visual token 027437|>
+<|visual token 027438|>
+<|visual token 027439|>
+<|visual token 027440|>
+<|visual token 027441|>
+<|visual token 027442|>
+<|visual token 027443|>
+<|visual token 027444|>
+<|visual token 027445|>
+<|visual token 027446|>
+<|visual token 027447|>
+<|visual token 027448|>
+<|visual token 027449|>
+<|visual token 027450|>
+<|visual token 027451|>
+<|visual token 027452|>
+<|visual token 027453|>
+<|visual token 027454|>
+<|visual token 027455|>
+<|visual token 027456|>
+<|visual token 027457|>
+<|visual token 027458|>
+<|visual token 027459|>
+<|visual token 027460|>
+<|visual token 027461|>
+<|visual token 027462|>
+<|visual token 027463|>
+<|visual token 027464|>
+<|visual token 027465|>
+<|visual token 027466|>
+<|visual token 027467|>
+<|visual token 027468|>
+<|visual token 027469|>
+<|visual token 027470|>
+<|visual token 027471|>
+<|visual token 027472|>
+<|visual token 027473|>
+<|visual token 027474|>
+<|visual token 027475|>
+<|visual token 027476|>
+<|visual token 027477|>
+<|visual token 027478|>
+<|visual token 027479|>
+<|visual token 027480|>
+<|visual token 027481|>
+<|visual token 027482|>
+<|visual token 027483|>
+<|visual token 027484|>
+<|visual token 027485|>
+<|visual token 027486|>
+<|visual token 027487|>
+<|visual token 027488|>
+<|visual token 027489|>
+<|visual token 027490|>
+<|visual token 027491|>
+<|visual token 027492|>
+<|visual token 027493|>
+<|visual token 027494|>
+<|visual token 027495|>
+<|visual token 027496|>
+<|visual token 027497|>
+<|visual token 027498|>
+<|visual token 027499|>
+<|visual token 027500|>
+<|visual token 027501|>
+<|visual token 027502|>
+<|visual token 027503|>
+<|visual token 027504|>
+<|visual token 027505|>
+<|visual token 027506|>
+<|visual token 027507|>
+<|visual token 027508|>
+<|visual token 027509|>
+<|visual token 027510|>
+<|visual token 027511|>
+<|visual token 027512|>
+<|visual token 027513|>
+<|visual token 027514|>
+<|visual token 027515|>
+<|visual token 027516|>
+<|visual token 027517|>
+<|visual token 027518|>
+<|visual token 027519|>
+<|visual token 027520|>
+<|visual token 027521|>
+<|visual token 027522|>
+<|visual token 027523|>
+<|visual token 027524|>
+<|visual token 027525|>
+<|visual token 027526|>
+<|visual token 027527|>
+<|visual token 027528|>
+<|visual token 027529|>
+<|visual token 027530|>
+<|visual token 027531|>
+<|visual token 027532|>
+<|visual token 027533|>
+<|visual token 027534|>
+<|visual token 027535|>
+<|visual token 027536|>
+<|visual token 027537|>
+<|visual token 027538|>
+<|visual token 027539|>
+<|visual token 027540|>
+<|visual token 027541|>
+<|visual token 027542|>
+<|visual token 027543|>
+<|visual token 027544|>
+<|visual token 027545|>
+<|visual token 027546|>
+<|visual token 027547|>
+<|visual token 027548|>
+<|visual token 027549|>
+<|visual token 027550|>
+<|visual token 027551|>
+<|visual token 027552|>
+<|visual token 027553|>
+<|visual token 027554|>
+<|visual token 027555|>
+<|visual token 027556|>
+<|visual token 027557|>
+<|visual token 027558|>
+<|visual token 027559|>
+<|visual token 027560|>
+<|visual token 027561|>
+<|visual token 027562|>
+<|visual token 027563|>
+<|visual token 027564|>
+<|visual token 027565|>
+<|visual token 027566|>
+<|visual token 027567|>
+<|visual token 027568|>
+<|visual token 027569|>
+<|visual token 027570|>
+<|visual token 027571|>
+<|visual token 027572|>
+<|visual token 027573|>
+<|visual token 027574|>
+<|visual token 027575|>
+<|visual token 027576|>
+<|visual token 027577|>
+<|visual token 027578|>
+<|visual token 027579|>
+<|visual token 027580|>
+<|visual token 027581|>
+<|visual token 027582|>
+<|visual token 027583|>
+<|visual token 027584|>
+<|visual token 027585|>
+<|visual token 027586|>
+<|visual token 027587|>
+<|visual token 027588|>
+<|visual token 027589|>
+<|visual token 027590|>
+<|visual token 027591|>
+<|visual token 027592|>
+<|visual token 027593|>
+<|visual token 027594|>
+<|visual token 027595|>
+<|visual token 027596|>
+<|visual token 027597|>
+<|visual token 027598|>
+<|visual token 027599|>
+<|visual token 027600|>
+<|visual token 027601|>
+<|visual token 027602|>
+<|visual token 027603|>
+<|visual token 027604|>
+<|visual token 027605|>
+<|visual token 027606|>
+<|visual token 027607|>
+<|visual token 027608|>
+<|visual token 027609|>
+<|visual token 027610|>
+<|visual token 027611|>
+<|visual token 027612|>
+<|visual token 027613|>
+<|visual token 027614|>
+<|visual token 027615|>
+<|visual token 027616|>
+<|visual token 027617|>
+<|visual token 027618|>
+<|visual token 027619|>
+<|visual token 027620|>
+<|visual token 027621|>
+<|visual token 027622|>
+<|visual token 027623|>
+<|visual token 027624|>
+<|visual token 027625|>
+<|visual token 027626|>
+<|visual token 027627|>
+<|visual token 027628|>
+<|visual token 027629|>
+<|visual token 027630|>
+<|visual token 027631|>
+<|visual token 027632|>
+<|visual token 027633|>
+<|visual token 027634|>
+<|visual token 027635|>
+<|visual token 027636|>
+<|visual token 027637|>
+<|visual token 027638|>
+<|visual token 027639|>
+<|visual token 027640|>
+<|visual token 027641|>
+<|visual token 027642|>
+<|visual token 027643|>
+<|visual token 027644|>
+<|visual token 027645|>
+<|visual token 027646|>
+<|visual token 027647|>
+<|visual token 027648|>
+<|visual token 027649|>
+<|visual token 027650|>
+<|visual token 027651|>
+<|visual token 027652|>
+<|visual token 027653|>
+<|visual token 027654|>
+<|visual token 027655|>
+<|visual token 027656|>
+<|visual token 027657|>
+<|visual token 027658|>
+<|visual token 027659|>
+<|visual token 027660|>
+<|visual token 027661|>
+<|visual token 027662|>
+<|visual token 027663|>
+<|visual token 027664|>
+<|visual token 027665|>
+<|visual token 027666|>
+<|visual token 027667|>
+<|visual token 027668|>
+<|visual token 027669|>
+<|visual token 027670|>
+<|visual token 027671|>
+<|visual token 027672|>
+<|visual token 027673|>
+<|visual token 027674|>
+<|visual token 027675|>
+<|visual token 027676|>
+<|visual token 027677|>
+<|visual token 027678|>
+<|visual token 027679|>
+<|visual token 027680|>
+<|visual token 027681|>
+<|visual token 027682|>
+<|visual token 027683|>
+<|visual token 027684|>
+<|visual token 027685|>
+<|visual token 027686|>
+<|visual token 027687|>
+<|visual token 027688|>
+<|visual token 027689|>
+<|visual token 027690|>
+<|visual token 027691|>
+<|visual token 027692|>
+<|visual token 027693|>
+<|visual token 027694|>
+<|visual token 027695|>
+<|visual token 027696|>
+<|visual token 027697|>
+<|visual token 027698|>
+<|visual token 027699|>
+<|visual token 027700|>
+<|visual token 027701|>
+<|visual token 027702|>
+<|visual token 027703|>
+<|visual token 027704|>
+<|visual token 027705|>
+<|visual token 027706|>
+<|visual token 027707|>
+<|visual token 027708|>
+<|visual token 027709|>
+<|visual token 027710|>
+<|visual token 027711|>
+<|visual token 027712|>
+<|visual token 027713|>
+<|visual token 027714|>
+<|visual token 027715|>
+<|visual token 027716|>
+<|visual token 027717|>
+<|visual token 027718|>
+<|visual token 027719|>
+<|visual token 027720|>
+<|visual token 027721|>
+<|visual token 027722|>
+<|visual token 027723|>
+<|visual token 027724|>
+<|visual token 027725|>
+<|visual token 027726|>
+<|visual token 027727|>
+<|visual token 027728|>
+<|visual token 027729|>
+<|visual token 027730|>
+<|visual token 027731|>
+<|visual token 027732|>
+<|visual token 027733|>
+<|visual token 027734|>
+<|visual token 027735|>
+<|visual token 027736|>
+<|visual token 027737|>
+<|visual token 027738|>
+<|visual token 027739|>
+<|visual token 027740|>
+<|visual token 027741|>
+<|visual token 027742|>
+<|visual token 027743|>
+<|visual token 027744|>
+<|visual token 027745|>
+<|visual token 027746|>
+<|visual token 027747|>
+<|visual token 027748|>
+<|visual token 027749|>
+<|visual token 027750|>
+<|visual token 027751|>
+<|visual token 027752|>
+<|visual token 027753|>
+<|visual token 027754|>
+<|visual token 027755|>
+<|visual token 027756|>
+<|visual token 027757|>
+<|visual token 027758|>
+<|visual token 027759|>
+<|visual token 027760|>
+<|visual token 027761|>
+<|visual token 027762|>
+<|visual token 027763|>
+<|visual token 027764|>
+<|visual token 027765|>
+<|visual token 027766|>
+<|visual token 027767|>
+<|visual token 027768|>
+<|visual token 027769|>
+<|visual token 027770|>
+<|visual token 027771|>
+<|visual token 027772|>
+<|visual token 027773|>
+<|visual token 027774|>
+<|visual token 027775|>
+<|visual token 027776|>
+<|visual token 027777|>
+<|visual token 027778|>
+<|visual token 027779|>
+<|visual token 027780|>
+<|visual token 027781|>
+<|visual token 027782|>
+<|visual token 027783|>
+<|visual token 027784|>
+<|visual token 027785|>
+<|visual token 027786|>
+<|visual token 027787|>
+<|visual token 027788|>
+<|visual token 027789|>
+<|visual token 027790|>
+<|visual token 027791|>
+<|visual token 027792|>
+<|visual token 027793|>
+<|visual token 027794|>
+<|visual token 027795|>
+<|visual token 027796|>
+<|visual token 027797|>
+<|visual token 027798|>
+<|visual token 027799|>
+<|visual token 027800|>
+<|visual token 027801|>
+<|visual token 027802|>
+<|visual token 027803|>
+<|visual token 027804|>
+<|visual token 027805|>
+<|visual token 027806|>
+<|visual token 027807|>
+<|visual token 027808|>
+<|visual token 027809|>
+<|visual token 027810|>
+<|visual token 027811|>
+<|visual token 027812|>
+<|visual token 027813|>
+<|visual token 027814|>
+<|visual token 027815|>
+<|visual token 027816|>
+<|visual token 027817|>
+<|visual token 027818|>
+<|visual token 027819|>
+<|visual token 027820|>
+<|visual token 027821|>
+<|visual token 027822|>
+<|visual token 027823|>
+<|visual token 027824|>
+<|visual token 027825|>
+<|visual token 027826|>
+<|visual token 027827|>
+<|visual token 027828|>
+<|visual token 027829|>
+<|visual token 027830|>
+<|visual token 027831|>
+<|visual token 027832|>
+<|visual token 027833|>
+<|visual token 027834|>
+<|visual token 027835|>
+<|visual token 027836|>
+<|visual token 027837|>
+<|visual token 027838|>
+<|visual token 027839|>
+<|visual token 027840|>
+<|visual token 027841|>
+<|visual token 027842|>
+<|visual token 027843|>
+<|visual token 027844|>
+<|visual token 027845|>
+<|visual token 027846|>
+<|visual token 027847|>
+<|visual token 027848|>
+<|visual token 027849|>
+<|visual token 027850|>
+<|visual token 027851|>
+<|visual token 027852|>
+<|visual token 027853|>
+<|visual token 027854|>
+<|visual token 027855|>
+<|visual token 027856|>
+<|visual token 027857|>
+<|visual token 027858|>
+<|visual token 027859|>
+<|visual token 027860|>
+<|visual token 027861|>
+<|visual token 027862|>
+<|visual token 027863|>
+<|visual token 027864|>
+<|visual token 027865|>
+<|visual token 027866|>
+<|visual token 027867|>
+<|visual token 027868|>
+<|visual token 027869|>
+<|visual token 027870|>
+<|visual token 027871|>
+<|visual token 027872|>
+<|visual token 027873|>
+<|visual token 027874|>
+<|visual token 027875|>
+<|visual token 027876|>
+<|visual token 027877|>
+<|visual token 027878|>
+<|visual token 027879|>
+<|visual token 027880|>
+<|visual token 027881|>
+<|visual token 027882|>
+<|visual token 027883|>
+<|visual token 027884|>
+<|visual token 027885|>
+<|visual token 027886|>
+<|visual token 027887|>
+<|visual token 027888|>
+<|visual token 027889|>
+<|visual token 027890|>
+<|visual token 027891|>
+<|visual token 027892|>
+<|visual token 027893|>
+<|visual token 027894|>
+<|visual token 027895|>
+<|visual token 027896|>
+<|visual token 027897|>
+<|visual token 027898|>
+<|visual token 027899|>
+<|visual token 027900|>
+<|visual token 027901|>
+<|visual token 027902|>
+<|visual token 027903|>
+<|visual token 027904|>
+<|visual token 027905|>
+<|visual token 027906|>
+<|visual token 027907|>
+<|visual token 027908|>
+<|visual token 027909|>
+<|visual token 027910|>
+<|visual token 027911|>
+<|visual token 027912|>
+<|visual token 027913|>
+<|visual token 027914|>
+<|visual token 027915|>
+<|visual token 027916|>
+<|visual token 027917|>
+<|visual token 027918|>
+<|visual token 027919|>
+<|visual token 027920|>
+<|visual token 027921|>
+<|visual token 027922|>
+<|visual token 027923|>
+<|visual token 027924|>
+<|visual token 027925|>
+<|visual token 027926|>
+<|visual token 027927|>
+<|visual token 027928|>
+<|visual token 027929|>
+<|visual token 027930|>
+<|visual token 027931|>
+<|visual token 027932|>
+<|visual token 027933|>
+<|visual token 027934|>
+<|visual token 027935|>
+<|visual token 027936|>
+<|visual token 027937|>
+<|visual token 027938|>
+<|visual token 027939|>
+<|visual token 027940|>
+<|visual token 027941|>
+<|visual token 027942|>
+<|visual token 027943|>
+<|visual token 027944|>
+<|visual token 027945|>
+<|visual token 027946|>
+<|visual token 027947|>
+<|visual token 027948|>
+<|visual token 027949|>
+<|visual token 027950|>
+<|visual token 027951|>
+<|visual token 027952|>
+<|visual token 027953|>
+<|visual token 027954|>
+<|visual token 027955|>
+<|visual token 027956|>
+<|visual token 027957|>
+<|visual token 027958|>
+<|visual token 027959|>
+<|visual token 027960|>
+<|visual token 027961|>
+<|visual token 027962|>
+<|visual token 027963|>
+<|visual token 027964|>
+<|visual token 027965|>
+<|visual token 027966|>
+<|visual token 027967|>
+<|visual token 027968|>
+<|visual token 027969|>
+<|visual token 027970|>
+<|visual token 027971|>
+<|visual token 027972|>
+<|visual token 027973|>
+<|visual token 027974|>
+<|visual token 027975|>
+<|visual token 027976|>
+<|visual token 027977|>
+<|visual token 027978|>
+<|visual token 027979|>
+<|visual token 027980|>
+<|visual token 027981|>
+<|visual token 027982|>
+<|visual token 027983|>
+<|visual token 027984|>
+<|visual token 027985|>
+<|visual token 027986|>
+<|visual token 027987|>
+<|visual token 027988|>
+<|visual token 027989|>
+<|visual token 027990|>
+<|visual token 027991|>
+<|visual token 027992|>
+<|visual token 027993|>
+<|visual token 027994|>
+<|visual token 027995|>
+<|visual token 027996|>
+<|visual token 027997|>
+<|visual token 027998|>
+<|visual token 027999|>
+<|visual token 028000|>
+<|visual token 028001|>
+<|visual token 028002|>
+<|visual token 028003|>
+<|visual token 028004|>
+<|visual token 028005|>
+<|visual token 028006|>
+<|visual token 028007|>
+<|visual token 028008|>
+<|visual token 028009|>
+<|visual token 028010|>
+<|visual token 028011|>
+<|visual token 028012|>
+<|visual token 028013|>
+<|visual token 028014|>
+<|visual token 028015|>
+<|visual token 028016|>
+<|visual token 028017|>
+<|visual token 028018|>
+<|visual token 028019|>
+<|visual token 028020|>
+<|visual token 028021|>
+<|visual token 028022|>
+<|visual token 028023|>
+<|visual token 028024|>
+<|visual token 028025|>
+<|visual token 028026|>
+<|visual token 028027|>
+<|visual token 028028|>
+<|visual token 028029|>
+<|visual token 028030|>
+<|visual token 028031|>
+<|visual token 028032|>
+<|visual token 028033|>
+<|visual token 028034|>
+<|visual token 028035|>
+<|visual token 028036|>
+<|visual token 028037|>
+<|visual token 028038|>
+<|visual token 028039|>
+<|visual token 028040|>
+<|visual token 028041|>
+<|visual token 028042|>
+<|visual token 028043|>
+<|visual token 028044|>
+<|visual token 028045|>
+<|visual token 028046|>
+<|visual token 028047|>
+<|visual token 028048|>
+<|visual token 028049|>
+<|visual token 028050|>
+<|visual token 028051|>
+<|visual token 028052|>
+<|visual token 028053|>
+<|visual token 028054|>
+<|visual token 028055|>
+<|visual token 028056|>
+<|visual token 028057|>
+<|visual token 028058|>
+<|visual token 028059|>
+<|visual token 028060|>
+<|visual token 028061|>
+<|visual token 028062|>
+<|visual token 028063|>
+<|visual token 028064|>
+<|visual token 028065|>
+<|visual token 028066|>
+<|visual token 028067|>
+<|visual token 028068|>
+<|visual token 028069|>
+<|visual token 028070|>
+<|visual token 028071|>
+<|visual token 028072|>
+<|visual token 028073|>
+<|visual token 028074|>
+<|visual token 028075|>
+<|visual token 028076|>
+<|visual token 028077|>
+<|visual token 028078|>
+<|visual token 028079|>
+<|visual token 028080|>
+<|visual token 028081|>
+<|visual token 028082|>
+<|visual token 028083|>
+<|visual token 028084|>
+<|visual token 028085|>
+<|visual token 028086|>
+<|visual token 028087|>
+<|visual token 028088|>
+<|visual token 028089|>
+<|visual token 028090|>
+<|visual token 028091|>
+<|visual token 028092|>
+<|visual token 028093|>
+<|visual token 028094|>
+<|visual token 028095|>
+<|visual token 028096|>
+<|visual token 028097|>
+<|visual token 028098|>
+<|visual token 028099|>
+<|visual token 028100|>
+<|visual token 028101|>
+<|visual token 028102|>
+<|visual token 028103|>
+<|visual token 028104|>
+<|visual token 028105|>
+<|visual token 028106|>
+<|visual token 028107|>
+<|visual token 028108|>
+<|visual token 028109|>
+<|visual token 028110|>
+<|visual token 028111|>
+<|visual token 028112|>
+<|visual token 028113|>
+<|visual token 028114|>
+<|visual token 028115|>
+<|visual token 028116|>
+<|visual token 028117|>
+<|visual token 028118|>
+<|visual token 028119|>
+<|visual token 028120|>
+<|visual token 028121|>
+<|visual token 028122|>
+<|visual token 028123|>
+<|visual token 028124|>
+<|visual token 028125|>
+<|visual token 028126|>
+<|visual token 028127|>
+<|visual token 028128|>
+<|visual token 028129|>
+<|visual token 028130|>
+<|visual token 028131|>
+<|visual token 028132|>
+<|visual token 028133|>
+<|visual token 028134|>
+<|visual token 028135|>
+<|visual token 028136|>
+<|visual token 028137|>
+<|visual token 028138|>
+<|visual token 028139|>
+<|visual token 028140|>
+<|visual token 028141|>
+<|visual token 028142|>
+<|visual token 028143|>
+<|visual token 028144|>
+<|visual token 028145|>
+<|visual token 028146|>
+<|visual token 028147|>
+<|visual token 028148|>
+<|visual token 028149|>
+<|visual token 028150|>
+<|visual token 028151|>
+<|visual token 028152|>
+<|visual token 028153|>
+<|visual token 028154|>
+<|visual token 028155|>
+<|visual token 028156|>
+<|visual token 028157|>
+<|visual token 028158|>
+<|visual token 028159|>
+<|visual token 028160|>
+<|visual token 028161|>
+<|visual token 028162|>
+<|visual token 028163|>
+<|visual token 028164|>
+<|visual token 028165|>
+<|visual token 028166|>
+<|visual token 028167|>
+<|visual token 028168|>
+<|visual token 028169|>
+<|visual token 028170|>
+<|visual token 028171|>
+<|visual token 028172|>
+<|visual token 028173|>
+<|visual token 028174|>
+<|visual token 028175|>
+<|visual token 028176|>
+<|visual token 028177|>
+<|visual token 028178|>
+<|visual token 028179|>
+<|visual token 028180|>
+<|visual token 028181|>
+<|visual token 028182|>
+<|visual token 028183|>
+<|visual token 028184|>
+<|visual token 028185|>
+<|visual token 028186|>
+<|visual token 028187|>
+<|visual token 028188|>
+<|visual token 028189|>
+<|visual token 028190|>
+<|visual token 028191|>
+<|visual token 028192|>
+<|visual token 028193|>
+<|visual token 028194|>
+<|visual token 028195|>
+<|visual token 028196|>
+<|visual token 028197|>
+<|visual token 028198|>
+<|visual token 028199|>
+<|visual token 028200|>
+<|visual token 028201|>
+<|visual token 028202|>
+<|visual token 028203|>
+<|visual token 028204|>
+<|visual token 028205|>
+<|visual token 028206|>
+<|visual token 028207|>
+<|visual token 028208|>
+<|visual token 028209|>
+<|visual token 028210|>
+<|visual token 028211|>
+<|visual token 028212|>
+<|visual token 028213|>
+<|visual token 028214|>
+<|visual token 028215|>
+<|visual token 028216|>
+<|visual token 028217|>
+<|visual token 028218|>
+<|visual token 028219|>
+<|visual token 028220|>
+<|visual token 028221|>
+<|visual token 028222|>
+<|visual token 028223|>
+<|visual token 028224|>
+<|visual token 028225|>
+<|visual token 028226|>
+<|visual token 028227|>
+<|visual token 028228|>
+<|visual token 028229|>
+<|visual token 028230|>
+<|visual token 028231|>
+<|visual token 028232|>
+<|visual token 028233|>
+<|visual token 028234|>
+<|visual token 028235|>
+<|visual token 028236|>
+<|visual token 028237|>
+<|visual token 028238|>
+<|visual token 028239|>
+<|visual token 028240|>
+<|visual token 028241|>
+<|visual token 028242|>
+<|visual token 028243|>
+<|visual token 028244|>
+<|visual token 028245|>
+<|visual token 028246|>
+<|visual token 028247|>
+<|visual token 028248|>
+<|visual token 028249|>
+<|visual token 028250|>
+<|visual token 028251|>
+<|visual token 028252|>
+<|visual token 028253|>
+<|visual token 028254|>
+<|visual token 028255|>
+<|visual token 028256|>
+<|visual token 028257|>
+<|visual token 028258|>
+<|visual token 028259|>
+<|visual token 028260|>
+<|visual token 028261|>
+<|visual token 028262|>
+<|visual token 028263|>
+<|visual token 028264|>
+<|visual token 028265|>
+<|visual token 028266|>
+<|visual token 028267|>
+<|visual token 028268|>
+<|visual token 028269|>
+<|visual token 028270|>
+<|visual token 028271|>
+<|visual token 028272|>
+<|visual token 028273|>
+<|visual token 028274|>
+<|visual token 028275|>
+<|visual token 028276|>
+<|visual token 028277|>
+<|visual token 028278|>
+<|visual token 028279|>
+<|visual token 028280|>
+<|visual token 028281|>
+<|visual token 028282|>
+<|visual token 028283|>
+<|visual token 028284|>
+<|visual token 028285|>
+<|visual token 028286|>
+<|visual token 028287|>
+<|visual token 028288|>
+<|visual token 028289|>
+<|visual token 028290|>
+<|visual token 028291|>
+<|visual token 028292|>
+<|visual token 028293|>
+<|visual token 028294|>
+<|visual token 028295|>
+<|visual token 028296|>
+<|visual token 028297|>
+<|visual token 028298|>
+<|visual token 028299|>
+<|visual token 028300|>
+<|visual token 028301|>
+<|visual token 028302|>
+<|visual token 028303|>
+<|visual token 028304|>
+<|visual token 028305|>
+<|visual token 028306|>
+<|visual token 028307|>
+<|visual token 028308|>
+<|visual token 028309|>
+<|visual token 028310|>
+<|visual token 028311|>
+<|visual token 028312|>
+<|visual token 028313|>
+<|visual token 028314|>
+<|visual token 028315|>
+<|visual token 028316|>
+<|visual token 028317|>
+<|visual token 028318|>
+<|visual token 028319|>
+<|visual token 028320|>
+<|visual token 028321|>
+<|visual token 028322|>
+<|visual token 028323|>
+<|visual token 028324|>
+<|visual token 028325|>
+<|visual token 028326|>
+<|visual token 028327|>
+<|visual token 028328|>
+<|visual token 028329|>
+<|visual token 028330|>
+<|visual token 028331|>
+<|visual token 028332|>
+<|visual token 028333|>
+<|visual token 028334|>
+<|visual token 028335|>
+<|visual token 028336|>
+<|visual token 028337|>
+<|visual token 028338|>
+<|visual token 028339|>
+<|visual token 028340|>
+<|visual token 028341|>
+<|visual token 028342|>
+<|visual token 028343|>
+<|visual token 028344|>
+<|visual token 028345|>
+<|visual token 028346|>
+<|visual token 028347|>
+<|visual token 028348|>
+<|visual token 028349|>
+<|visual token 028350|>
+<|visual token 028351|>
+<|visual token 028352|>
+<|visual token 028353|>
+<|visual token 028354|>
+<|visual token 028355|>
+<|visual token 028356|>
+<|visual token 028357|>
+<|visual token 028358|>
+<|visual token 028359|>
+<|visual token 028360|>
+<|visual token 028361|>
+<|visual token 028362|>
+<|visual token 028363|>
+<|visual token 028364|>
+<|visual token 028365|>
+<|visual token 028366|>
+<|visual token 028367|>
+<|visual token 028368|>
+<|visual token 028369|>
+<|visual token 028370|>
+<|visual token 028371|>
+<|visual token 028372|>
+<|visual token 028373|>
+<|visual token 028374|>
+<|visual token 028375|>
+<|visual token 028376|>
+<|visual token 028377|>
+<|visual token 028378|>
+<|visual token 028379|>
+<|visual token 028380|>
+<|visual token 028381|>
+<|visual token 028382|>
+<|visual token 028383|>
+<|visual token 028384|>
+<|visual token 028385|>
+<|visual token 028386|>
+<|visual token 028387|>
+<|visual token 028388|>
+<|visual token 028389|>
+<|visual token 028390|>
+<|visual token 028391|>
+<|visual token 028392|>
+<|visual token 028393|>
+<|visual token 028394|>
+<|visual token 028395|>
+<|visual token 028396|>
+<|visual token 028397|>
+<|visual token 028398|>
+<|visual token 028399|>
+<|visual token 028400|>
+<|visual token 028401|>
+<|visual token 028402|>
+<|visual token 028403|>
+<|visual token 028404|>
+<|visual token 028405|>
+<|visual token 028406|>
+<|visual token 028407|>
+<|visual token 028408|>
+<|visual token 028409|>
+<|visual token 028410|>
+<|visual token 028411|>
+<|visual token 028412|>
+<|visual token 028413|>
+<|visual token 028414|>
+<|visual token 028415|>
+<|visual token 028416|>
+<|visual token 028417|>
+<|visual token 028418|>
+<|visual token 028419|>
+<|visual token 028420|>
+<|visual token 028421|>
+<|visual token 028422|>
+<|visual token 028423|>
+<|visual token 028424|>
+<|visual token 028425|>
+<|visual token 028426|>
+<|visual token 028427|>
+<|visual token 028428|>
+<|visual token 028429|>
+<|visual token 028430|>
+<|visual token 028431|>
+<|visual token 028432|>
+<|visual token 028433|>
+<|visual token 028434|>
+<|visual token 028435|>
+<|visual token 028436|>
+<|visual token 028437|>
+<|visual token 028438|>
+<|visual token 028439|>
+<|visual token 028440|>
+<|visual token 028441|>
+<|visual token 028442|>
+<|visual token 028443|>
+<|visual token 028444|>
+<|visual token 028445|>
+<|visual token 028446|>
+<|visual token 028447|>
+<|visual token 028448|>
+<|visual token 028449|>
+<|visual token 028450|>
+<|visual token 028451|>
+<|visual token 028452|>
+<|visual token 028453|>
+<|visual token 028454|>
+<|visual token 028455|>
+<|visual token 028456|>
+<|visual token 028457|>
+<|visual token 028458|>
+<|visual token 028459|>
+<|visual token 028460|>
+<|visual token 028461|>
+<|visual token 028462|>
+<|visual token 028463|>
+<|visual token 028464|>
+<|visual token 028465|>
+<|visual token 028466|>
+<|visual token 028467|>
+<|visual token 028468|>
+<|visual token 028469|>
+<|visual token 028470|>
+<|visual token 028471|>
+<|visual token 028472|>
+<|visual token 028473|>
+<|visual token 028474|>
+<|visual token 028475|>
+<|visual token 028476|>
+<|visual token 028477|>
+<|visual token 028478|>
+<|visual token 028479|>
+<|visual token 028480|>
+<|visual token 028481|>
+<|visual token 028482|>
+<|visual token 028483|>
+<|visual token 028484|>
+<|visual token 028485|>
+<|visual token 028486|>
+<|visual token 028487|>
+<|visual token 028488|>
+<|visual token 028489|>
+<|visual token 028490|>
+<|visual token 028491|>
+<|visual token 028492|>
+<|visual token 028493|>
+<|visual token 028494|>
+<|visual token 028495|>
+<|visual token 028496|>
+<|visual token 028497|>
+<|visual token 028498|>
+<|visual token 028499|>
+<|visual token 028500|>
+<|visual token 028501|>
+<|visual token 028502|>
+<|visual token 028503|>
+<|visual token 028504|>
+<|visual token 028505|>
+<|visual token 028506|>
+<|visual token 028507|>
+<|visual token 028508|>
+<|visual token 028509|>
+<|visual token 028510|>
+<|visual token 028511|>
+<|visual token 028512|>
+<|visual token 028513|>
+<|visual token 028514|>
+<|visual token 028515|>
+<|visual token 028516|>
+<|visual token 028517|>
+<|visual token 028518|>
+<|visual token 028519|>
+<|visual token 028520|>
+<|visual token 028521|>
+<|visual token 028522|>
+<|visual token 028523|>
+<|visual token 028524|>
+<|visual token 028525|>
+<|visual token 028526|>
+<|visual token 028527|>
+<|visual token 028528|>
+<|visual token 028529|>
+<|visual token 028530|>
+<|visual token 028531|>
+<|visual token 028532|>
+<|visual token 028533|>
+<|visual token 028534|>
+<|visual token 028535|>
+<|visual token 028536|>
+<|visual token 028537|>
+<|visual token 028538|>
+<|visual token 028539|>
+<|visual token 028540|>
+<|visual token 028541|>
+<|visual token 028542|>
+<|visual token 028543|>
+<|visual token 028544|>
+<|visual token 028545|>
+<|visual token 028546|>
+<|visual token 028547|>
+<|visual token 028548|>
+<|visual token 028549|>
+<|visual token 028550|>
+<|visual token 028551|>
+<|visual token 028552|>
+<|visual token 028553|>
+<|visual token 028554|>
+<|visual token 028555|>
+<|visual token 028556|>
+<|visual token 028557|>
+<|visual token 028558|>
+<|visual token 028559|>
+<|visual token 028560|>
+<|visual token 028561|>
+<|visual token 028562|>
+<|visual token 028563|>
+<|visual token 028564|>
+<|visual token 028565|>
+<|visual token 028566|>
+<|visual token 028567|>
+<|visual token 028568|>
+<|visual token 028569|>
+<|visual token 028570|>
+<|visual token 028571|>
+<|visual token 028572|>
+<|visual token 028573|>
+<|visual token 028574|>
+<|visual token 028575|>
+<|visual token 028576|>
+<|visual token 028577|>
+<|visual token 028578|>
+<|visual token 028579|>
+<|visual token 028580|>
+<|visual token 028581|>
+<|visual token 028582|>
+<|visual token 028583|>
+<|visual token 028584|>
+<|visual token 028585|>
+<|visual token 028586|>
+<|visual token 028587|>
+<|visual token 028588|>
+<|visual token 028589|>
+<|visual token 028590|>
+<|visual token 028591|>
+<|visual token 028592|>
+<|visual token 028593|>
+<|visual token 028594|>
+<|visual token 028595|>
+<|visual token 028596|>
+<|visual token 028597|>
+<|visual token 028598|>
+<|visual token 028599|>
+<|visual token 028600|>
+<|visual token 028601|>
+<|visual token 028602|>
+<|visual token 028603|>
+<|visual token 028604|>
+<|visual token 028605|>
+<|visual token 028606|>
+<|visual token 028607|>
+<|visual token 028608|>
+<|visual token 028609|>
+<|visual token 028610|>
+<|visual token 028611|>
+<|visual token 028612|>
+<|visual token 028613|>
+<|visual token 028614|>
+<|visual token 028615|>
+<|visual token 028616|>
+<|visual token 028617|>
+<|visual token 028618|>
+<|visual token 028619|>
+<|visual token 028620|>
+<|visual token 028621|>
+<|visual token 028622|>
+<|visual token 028623|>
+<|visual token 028624|>
+<|visual token 028625|>
+<|visual token 028626|>
+<|visual token 028627|>
+<|visual token 028628|>
+<|visual token 028629|>
+<|visual token 028630|>
+<|visual token 028631|>
+<|visual token 028632|>
+<|visual token 028633|>
+<|visual token 028634|>
+<|visual token 028635|>
+<|visual token 028636|>
+<|visual token 028637|>
+<|visual token 028638|>
+<|visual token 028639|>
+<|visual token 028640|>
+<|visual token 028641|>
+<|visual token 028642|>
+<|visual token 028643|>
+<|visual token 028644|>
+<|visual token 028645|>
+<|visual token 028646|>
+<|visual token 028647|>
+<|visual token 028648|>
+<|visual token 028649|>
+<|visual token 028650|>
+<|visual token 028651|>
+<|visual token 028652|>
+<|visual token 028653|>
+<|visual token 028654|>
+<|visual token 028655|>
+<|visual token 028656|>
+<|visual token 028657|>
+<|visual token 028658|>
+<|visual token 028659|>
+<|visual token 028660|>
+<|visual token 028661|>
+<|visual token 028662|>
+<|visual token 028663|>
+<|visual token 028664|>
+<|visual token 028665|>
+<|visual token 028666|>
+<|visual token 028667|>
+<|visual token 028668|>
+<|visual token 028669|>
+<|visual token 028670|>
+<|visual token 028671|>
+<|visual token 028672|>
+<|visual token 028673|>
+<|visual token 028674|>
+<|visual token 028675|>
+<|visual token 028676|>
+<|visual token 028677|>
+<|visual token 028678|>
+<|visual token 028679|>
+<|visual token 028680|>
+<|visual token 028681|>
+<|visual token 028682|>
+<|visual token 028683|>
+<|visual token 028684|>
+<|visual token 028685|>
+<|visual token 028686|>
+<|visual token 028687|>
+<|visual token 028688|>
+<|visual token 028689|>
+<|visual token 028690|>
+<|visual token 028691|>
+<|visual token 028692|>
+<|visual token 028693|>
+<|visual token 028694|>
+<|visual token 028695|>
+<|visual token 028696|>
+<|visual token 028697|>
+<|visual token 028698|>
+<|visual token 028699|>
+<|visual token 028700|>
+<|visual token 028701|>
+<|visual token 028702|>
+<|visual token 028703|>
+<|visual token 028704|>
+<|visual token 028705|>
+<|visual token 028706|>
+<|visual token 028707|>
+<|visual token 028708|>
+<|visual token 028709|>
+<|visual token 028710|>
+<|visual token 028711|>
+<|visual token 028712|>
+<|visual token 028713|>
+<|visual token 028714|>
+<|visual token 028715|>
+<|visual token 028716|>
+<|visual token 028717|>
+<|visual token 028718|>
+<|visual token 028719|>
+<|visual token 028720|>
+<|visual token 028721|>
+<|visual token 028722|>
+<|visual token 028723|>
+<|visual token 028724|>
+<|visual token 028725|>
+<|visual token 028726|>
+<|visual token 028727|>
+<|visual token 028728|>
+<|visual token 028729|>
+<|visual token 028730|>
+<|visual token 028731|>
+<|visual token 028732|>
+<|visual token 028733|>
+<|visual token 028734|>
+<|visual token 028735|>
+<|visual token 028736|>
+<|visual token 028737|>
+<|visual token 028738|>
+<|visual token 028739|>
+<|visual token 028740|>
+<|visual token 028741|>
+<|visual token 028742|>
+<|visual token 028743|>
+<|visual token 028744|>
+<|visual token 028745|>
+<|visual token 028746|>
+<|visual token 028747|>
+<|visual token 028748|>
+<|visual token 028749|>
+<|visual token 028750|>
+<|visual token 028751|>
+<|visual token 028752|>
+<|visual token 028753|>
+<|visual token 028754|>
+<|visual token 028755|>
+<|visual token 028756|>
+<|visual token 028757|>
+<|visual token 028758|>
+<|visual token 028759|>
+<|visual token 028760|>
+<|visual token 028761|>
+<|visual token 028762|>
+<|visual token 028763|>
+<|visual token 028764|>
+<|visual token 028765|>
+<|visual token 028766|>
+<|visual token 028767|>
+<|visual token 028768|>
+<|visual token 028769|>
+<|visual token 028770|>
+<|visual token 028771|>
+<|visual token 028772|>
+<|visual token 028773|>
+<|visual token 028774|>
+<|visual token 028775|>
+<|visual token 028776|>
+<|visual token 028777|>
+<|visual token 028778|>
+<|visual token 028779|>
+<|visual token 028780|>
+<|visual token 028781|>
+<|visual token 028782|>
+<|visual token 028783|>
+<|visual token 028784|>
+<|visual token 028785|>
+<|visual token 028786|>
+<|visual token 028787|>
+<|visual token 028788|>
+<|visual token 028789|>
+<|visual token 028790|>
+<|visual token 028791|>
+<|visual token 028792|>
+<|visual token 028793|>
+<|visual token 028794|>
+<|visual token 028795|>
+<|visual token 028796|>
+<|visual token 028797|>
+<|visual token 028798|>
+<|visual token 028799|>
+<|visual token 028800|>
+<|visual token 028801|>
+<|visual token 028802|>
+<|visual token 028803|>
+<|visual token 028804|>
+<|visual token 028805|>
+<|visual token 028806|>
+<|visual token 028807|>
+<|visual token 028808|>
+<|visual token 028809|>
+<|visual token 028810|>
+<|visual token 028811|>
+<|visual token 028812|>
+<|visual token 028813|>
+<|visual token 028814|>
+<|visual token 028815|>
+<|visual token 028816|>
+<|visual token 028817|>
+<|visual token 028818|>
+<|visual token 028819|>
+<|visual token 028820|>
+<|visual token 028821|>
+<|visual token 028822|>
+<|visual token 028823|>
+<|visual token 028824|>
+<|visual token 028825|>
+<|visual token 028826|>
+<|visual token 028827|>
+<|visual token 028828|>
+<|visual token 028829|>
+<|visual token 028830|>
+<|visual token 028831|>
+<|visual token 028832|>
+<|visual token 028833|>
+<|visual token 028834|>
+<|visual token 028835|>
+<|visual token 028836|>
+<|visual token 028837|>
+<|visual token 028838|>
+<|visual token 028839|>
+<|visual token 028840|>
+<|visual token 028841|>
+<|visual token 028842|>
+<|visual token 028843|>
+<|visual token 028844|>
+<|visual token 028845|>
+<|visual token 028846|>
+<|visual token 028847|>
+<|visual token 028848|>
+<|visual token 028849|>
+<|visual token 028850|>
+<|visual token 028851|>
+<|visual token 028852|>
+<|visual token 028853|>
+<|visual token 028854|>
+<|visual token 028855|>
+<|visual token 028856|>
+<|visual token 028857|>
+<|visual token 028858|>
+<|visual token 028859|>
+<|visual token 028860|>
+<|visual token 028861|>
+<|visual token 028862|>
+<|visual token 028863|>
+<|visual token 028864|>
+<|visual token 028865|>
+<|visual token 028866|>
+<|visual token 028867|>
+<|visual token 028868|>
+<|visual token 028869|>
+<|visual token 028870|>
+<|visual token 028871|>
+<|visual token 028872|>
+<|visual token 028873|>
+<|visual token 028874|>
+<|visual token 028875|>
+<|visual token 028876|>
+<|visual token 028877|>
+<|visual token 028878|>
+<|visual token 028879|>
+<|visual token 028880|>
+<|visual token 028881|>
+<|visual token 028882|>
+<|visual token 028883|>
+<|visual token 028884|>
+<|visual token 028885|>
+<|visual token 028886|>
+<|visual token 028887|>
+<|visual token 028888|>
+<|visual token 028889|>
+<|visual token 028890|>
+<|visual token 028891|>
+<|visual token 028892|>
+<|visual token 028893|>
+<|visual token 028894|>
+<|visual token 028895|>
+<|visual token 028896|>
+<|visual token 028897|>
+<|visual token 028898|>
+<|visual token 028899|>
+<|visual token 028900|>
+<|visual token 028901|>
+<|visual token 028902|>
+<|visual token 028903|>
+<|visual token 028904|>
+<|visual token 028905|>
+<|visual token 028906|>
+<|visual token 028907|>
+<|visual token 028908|>
+<|visual token 028909|>
+<|visual token 028910|>
+<|visual token 028911|>
+<|visual token 028912|>
+<|visual token 028913|>
+<|visual token 028914|>
+<|visual token 028915|>
+<|visual token 028916|>
+<|visual token 028917|>
+<|visual token 028918|>
+<|visual token 028919|>
+<|visual token 028920|>
+<|visual token 028921|>
+<|visual token 028922|>
+<|visual token 028923|>
+<|visual token 028924|>
+<|visual token 028925|>
+<|visual token 028926|>
+<|visual token 028927|>
+<|visual token 028928|>
+<|visual token 028929|>
+<|visual token 028930|>
+<|visual token 028931|>
+<|visual token 028932|>
+<|visual token 028933|>
+<|visual token 028934|>
+<|visual token 028935|>
+<|visual token 028936|>
+<|visual token 028937|>
+<|visual token 028938|>
+<|visual token 028939|>
+<|visual token 028940|>
+<|visual token 028941|>
+<|visual token 028942|>
+<|visual token 028943|>
+<|visual token 028944|>
+<|visual token 028945|>
+<|visual token 028946|>
+<|visual token 028947|>
+<|visual token 028948|>
+<|visual token 028949|>
+<|visual token 028950|>
+<|visual token 028951|>
+<|visual token 028952|>
+<|visual token 028953|>
+<|visual token 028954|>
+<|visual token 028955|>
+<|visual token 028956|>
+<|visual token 028957|>
+<|visual token 028958|>
+<|visual token 028959|>
+<|visual token 028960|>
+<|visual token 028961|>
+<|visual token 028962|>
+<|visual token 028963|>
+<|visual token 028964|>
+<|visual token 028965|>
+<|visual token 028966|>
+<|visual token 028967|>
+<|visual token 028968|>
+<|visual token 028969|>
+<|visual token 028970|>
+<|visual token 028971|>
+<|visual token 028972|>
+<|visual token 028973|>
+<|visual token 028974|>
+<|visual token 028975|>
+<|visual token 028976|>
+<|visual token 028977|>
+<|visual token 028978|>
+<|visual token 028979|>
+<|visual token 028980|>
+<|visual token 028981|>
+<|visual token 028982|>
+<|visual token 028983|>
+<|visual token 028984|>
+<|visual token 028985|>
+<|visual token 028986|>
+<|visual token 028987|>
+<|visual token 028988|>
+<|visual token 028989|>
+<|visual token 028990|>
+<|visual token 028991|>
+<|visual token 028992|>
+<|visual token 028993|>
+<|visual token 028994|>
+<|visual token 028995|>
+<|visual token 028996|>
+<|visual token 028997|>
+<|visual token 028998|>
+<|visual token 028999|>
+<|visual token 029000|>
+<|visual token 029001|>
+<|visual token 029002|>
+<|visual token 029003|>
+<|visual token 029004|>
+<|visual token 029005|>
+<|visual token 029006|>
+<|visual token 029007|>
+<|visual token 029008|>
+<|visual token 029009|>
+<|visual token 029010|>
+<|visual token 029011|>
+<|visual token 029012|>
+<|visual token 029013|>
+<|visual token 029014|>
+<|visual token 029015|>
+<|visual token 029016|>
+<|visual token 029017|>
+<|visual token 029018|>
+<|visual token 029019|>
+<|visual token 029020|>
+<|visual token 029021|>
+<|visual token 029022|>
+<|visual token 029023|>
+<|visual token 029024|>
+<|visual token 029025|>
+<|visual token 029026|>
+<|visual token 029027|>
+<|visual token 029028|>
+<|visual token 029029|>
+<|visual token 029030|>
+<|visual token 029031|>
+<|visual token 029032|>
+<|visual token 029033|>
+<|visual token 029034|>
+<|visual token 029035|>
+<|visual token 029036|>
+<|visual token 029037|>
+<|visual token 029038|>
+<|visual token 029039|>
+<|visual token 029040|>
+<|visual token 029041|>
+<|visual token 029042|>
+<|visual token 029043|>
+<|visual token 029044|>
+<|visual token 029045|>
+<|visual token 029046|>
+<|visual token 029047|>
+<|visual token 029048|>
+<|visual token 029049|>
+<|visual token 029050|>
+<|visual token 029051|>
+<|visual token 029052|>
+<|visual token 029053|>
+<|visual token 029054|>
+<|visual token 029055|>
+<|visual token 029056|>
+<|visual token 029057|>
+<|visual token 029058|>
+<|visual token 029059|>
+<|visual token 029060|>
+<|visual token 029061|>
+<|visual token 029062|>
+<|visual token 029063|>
+<|visual token 029064|>
+<|visual token 029065|>
+<|visual token 029066|>
+<|visual token 029067|>
+<|visual token 029068|>
+<|visual token 029069|>
+<|visual token 029070|>
+<|visual token 029071|>
+<|visual token 029072|>
+<|visual token 029073|>
+<|visual token 029074|>
+<|visual token 029075|>
+<|visual token 029076|>
+<|visual token 029077|>
+<|visual token 029078|>
+<|visual token 029079|>
+<|visual token 029080|>
+<|visual token 029081|>
+<|visual token 029082|>
+<|visual token 029083|>
+<|visual token 029084|>
+<|visual token 029085|>
+<|visual token 029086|>
+<|visual token 029087|>
+<|visual token 029088|>
+<|visual token 029089|>
+<|visual token 029090|>
+<|visual token 029091|>
+<|visual token 029092|>
+<|visual token 029093|>
+<|visual token 029094|>
+<|visual token 029095|>
+<|visual token 029096|>
+<|visual token 029097|>
+<|visual token 029098|>
+<|visual token 029099|>
+<|visual token 029100|>
+<|visual token 029101|>
+<|visual token 029102|>
+<|visual token 029103|>
+<|visual token 029104|>
+<|visual token 029105|>
+<|visual token 029106|>
+<|visual token 029107|>
+<|visual token 029108|>
+<|visual token 029109|>
+<|visual token 029110|>
+<|visual token 029111|>
+<|visual token 029112|>
+<|visual token 029113|>
+<|visual token 029114|>
+<|visual token 029115|>
+<|visual token 029116|>
+<|visual token 029117|>
+<|visual token 029118|>
+<|visual token 029119|>
+<|visual token 029120|>
+<|visual token 029121|>
+<|visual token 029122|>
+<|visual token 029123|>
+<|visual token 029124|>
+<|visual token 029125|>
+<|visual token 029126|>
+<|visual token 029127|>
+<|visual token 029128|>
+<|visual token 029129|>
+<|visual token 029130|>
+<|visual token 029131|>
+<|visual token 029132|>
+<|visual token 029133|>
+<|visual token 029134|>
+<|visual token 029135|>
+<|visual token 029136|>
+<|visual token 029137|>
+<|visual token 029138|>
+<|visual token 029139|>
+<|visual token 029140|>
+<|visual token 029141|>
+<|visual token 029142|>
+<|visual token 029143|>
+<|visual token 029144|>
+<|visual token 029145|>
+<|visual token 029146|>
+<|visual token 029147|>
+<|visual token 029148|>
+<|visual token 029149|>
+<|visual token 029150|>
+<|visual token 029151|>
+<|visual token 029152|>
+<|visual token 029153|>
+<|visual token 029154|>
+<|visual token 029155|>
+<|visual token 029156|>
+<|visual token 029157|>
+<|visual token 029158|>
+<|visual token 029159|>
+<|visual token 029160|>
+<|visual token 029161|>
+<|visual token 029162|>
+<|visual token 029163|>
+<|visual token 029164|>
+<|visual token 029165|>
+<|visual token 029166|>
+<|visual token 029167|>
+<|visual token 029168|>
+<|visual token 029169|>
+<|visual token 029170|>
+<|visual token 029171|>
+<|visual token 029172|>
+<|visual token 029173|>
+<|visual token 029174|>
+<|visual token 029175|>
+<|visual token 029176|>
+<|visual token 029177|>
+<|visual token 029178|>
+<|visual token 029179|>
+<|visual token 029180|>
+<|visual token 029181|>
+<|visual token 029182|>
+<|visual token 029183|>
+<|visual token 029184|>
+<|visual token 029185|>
+<|visual token 029186|>
+<|visual token 029187|>
+<|visual token 029188|>
+<|visual token 029189|>
+<|visual token 029190|>
+<|visual token 029191|>
+<|visual token 029192|>
+<|visual token 029193|>
+<|visual token 029194|>
+<|visual token 029195|>
+<|visual token 029196|>
+<|visual token 029197|>
+<|visual token 029198|>
+<|visual token 029199|>
+<|visual token 029200|>
+<|visual token 029201|>
+<|visual token 029202|>
+<|visual token 029203|>
+<|visual token 029204|>
+<|visual token 029205|>
+<|visual token 029206|>
+<|visual token 029207|>
+<|visual token 029208|>
+<|visual token 029209|>
+<|visual token 029210|>
+<|visual token 029211|>
+<|visual token 029212|>
+<|visual token 029213|>
+<|visual token 029214|>
+<|visual token 029215|>
+<|visual token 029216|>
+<|visual token 029217|>
+<|visual token 029218|>
+<|visual token 029219|>
+<|visual token 029220|>
+<|visual token 029221|>
+<|visual token 029222|>
+<|visual token 029223|>
+<|visual token 029224|>
+<|visual token 029225|>
+<|visual token 029226|>
+<|visual token 029227|>
+<|visual token 029228|>
+<|visual token 029229|>
+<|visual token 029230|>
+<|visual token 029231|>
+<|visual token 029232|>
+<|visual token 029233|>
+<|visual token 029234|>
+<|visual token 029235|>
+<|visual token 029236|>
+<|visual token 029237|>
+<|visual token 029238|>
+<|visual token 029239|>
+<|visual token 029240|>
+<|visual token 029241|>
+<|visual token 029242|>
+<|visual token 029243|>
+<|visual token 029244|>
+<|visual token 029245|>
+<|visual token 029246|>
+<|visual token 029247|>
+<|visual token 029248|>
+<|visual token 029249|>
+<|visual token 029250|>
+<|visual token 029251|>
+<|visual token 029252|>
+<|visual token 029253|>
+<|visual token 029254|>
+<|visual token 029255|>
+<|visual token 029256|>
+<|visual token 029257|>
+<|visual token 029258|>
+<|visual token 029259|>
+<|visual token 029260|>
+<|visual token 029261|>
+<|visual token 029262|>
+<|visual token 029263|>
+<|visual token 029264|>
+<|visual token 029265|>
+<|visual token 029266|>
+<|visual token 029267|>
+<|visual token 029268|>
+<|visual token 029269|>
+<|visual token 029270|>
+<|visual token 029271|>
+<|visual token 029272|>
+<|visual token 029273|>
+<|visual token 029274|>
+<|visual token 029275|>
+<|visual token 029276|>
+<|visual token 029277|>
+<|visual token 029278|>
+<|visual token 029279|>
+<|visual token 029280|>
+<|visual token 029281|>
+<|visual token 029282|>
+<|visual token 029283|>
+<|visual token 029284|>
+<|visual token 029285|>
+<|visual token 029286|>
+<|visual token 029287|>
+<|visual token 029288|>
+<|visual token 029289|>
+<|visual token 029290|>
+<|visual token 029291|>
+<|visual token 029292|>
+<|visual token 029293|>
+<|visual token 029294|>
+<|visual token 029295|>
+<|visual token 029296|>
+<|visual token 029297|>
+<|visual token 029298|>
+<|visual token 029299|>
+<|visual token 029300|>
+<|visual token 029301|>
+<|visual token 029302|>
+<|visual token 029303|>
+<|visual token 029304|>
+<|visual token 029305|>
+<|visual token 029306|>
+<|visual token 029307|>
+<|visual token 029308|>
+<|visual token 029309|>
+<|visual token 029310|>
+<|visual token 029311|>
+<|visual token 029312|>
+<|visual token 029313|>
+<|visual token 029314|>
+<|visual token 029315|>
+<|visual token 029316|>
+<|visual token 029317|>
+<|visual token 029318|>
+<|visual token 029319|>
+<|visual token 029320|>
+<|visual token 029321|>
+<|visual token 029322|>
+<|visual token 029323|>
+<|visual token 029324|>
+<|visual token 029325|>
+<|visual token 029326|>
+<|visual token 029327|>
+<|visual token 029328|>
+<|visual token 029329|>
+<|visual token 029330|>
+<|visual token 029331|>
+<|visual token 029332|>
+<|visual token 029333|>
+<|visual token 029334|>
+<|visual token 029335|>
+<|visual token 029336|>
+<|visual token 029337|>
+<|visual token 029338|>
+<|visual token 029339|>
+<|visual token 029340|>
+<|visual token 029341|>
+<|visual token 029342|>
+<|visual token 029343|>
+<|visual token 029344|>
+<|visual token 029345|>
+<|visual token 029346|>
+<|visual token 029347|>
+<|visual token 029348|>
+<|visual token 029349|>
+<|visual token 029350|>
+<|visual token 029351|>
+<|visual token 029352|>
+<|visual token 029353|>
+<|visual token 029354|>
+<|visual token 029355|>
+<|visual token 029356|>
+<|visual token 029357|>
+<|visual token 029358|>
+<|visual token 029359|>
+<|visual token 029360|>
+<|visual token 029361|>
+<|visual token 029362|>
+<|visual token 029363|>
+<|visual token 029364|>
+<|visual token 029365|>
+<|visual token 029366|>
+<|visual token 029367|>
+<|visual token 029368|>
+<|visual token 029369|>
+<|visual token 029370|>
+<|visual token 029371|>
+<|visual token 029372|>
+<|visual token 029373|>
+<|visual token 029374|>
+<|visual token 029375|>
+<|visual token 029376|>
+<|visual token 029377|>
+<|visual token 029378|>
+<|visual token 029379|>
+<|visual token 029380|>
+<|visual token 029381|>
+<|visual token 029382|>
+<|visual token 029383|>
+<|visual token 029384|>
+<|visual token 029385|>
+<|visual token 029386|>
+<|visual token 029387|>
+<|visual token 029388|>
+<|visual token 029389|>
+<|visual token 029390|>
+<|visual token 029391|>
+<|visual token 029392|>
+<|visual token 029393|>
+<|visual token 029394|>
+<|visual token 029395|>
+<|visual token 029396|>
+<|visual token 029397|>
+<|visual token 029398|>
+<|visual token 029399|>
+<|visual token 029400|>
+<|visual token 029401|>
+<|visual token 029402|>
+<|visual token 029403|>
+<|visual token 029404|>
+<|visual token 029405|>
+<|visual token 029406|>
+<|visual token 029407|>
+<|visual token 029408|>
+<|visual token 029409|>
+<|visual token 029410|>
+<|visual token 029411|>
+<|visual token 029412|>
+<|visual token 029413|>
+<|visual token 029414|>
+<|visual token 029415|>
+<|visual token 029416|>
+<|visual token 029417|>
+<|visual token 029418|>
+<|visual token 029419|>
+<|visual token 029420|>
+<|visual token 029421|>
+<|visual token 029422|>
+<|visual token 029423|>
+<|visual token 029424|>
+<|visual token 029425|>
+<|visual token 029426|>
+<|visual token 029427|>
+<|visual token 029428|>
+<|visual token 029429|>
+<|visual token 029430|>
+<|visual token 029431|>
+<|visual token 029432|>
+<|visual token 029433|>
+<|visual token 029434|>
+<|visual token 029435|>
+<|visual token 029436|>
+<|visual token 029437|>
+<|visual token 029438|>
+<|visual token 029439|>
+<|visual token 029440|>
+<|visual token 029441|>
+<|visual token 029442|>
+<|visual token 029443|>
+<|visual token 029444|>
+<|visual token 029445|>
+<|visual token 029446|>
+<|visual token 029447|>
+<|visual token 029448|>
+<|visual token 029449|>
+<|visual token 029450|>
+<|visual token 029451|>
+<|visual token 029452|>
+<|visual token 029453|>
+<|visual token 029454|>
+<|visual token 029455|>
+<|visual token 029456|>
+<|visual token 029457|>
+<|visual token 029458|>
+<|visual token 029459|>
+<|visual token 029460|>
+<|visual token 029461|>
+<|visual token 029462|>
+<|visual token 029463|>
+<|visual token 029464|>
+<|visual token 029465|>
+<|visual token 029466|>
+<|visual token 029467|>
+<|visual token 029468|>
+<|visual token 029469|>
+<|visual token 029470|>
+<|visual token 029471|>
+<|visual token 029472|>
+<|visual token 029473|>
+<|visual token 029474|>
+<|visual token 029475|>
+<|visual token 029476|>
+<|visual token 029477|>
+<|visual token 029478|>
+<|visual token 029479|>
+<|visual token 029480|>
+<|visual token 029481|>
+<|visual token 029482|>
+<|visual token 029483|>
+<|visual token 029484|>
+<|visual token 029485|>
+<|visual token 029486|>
+<|visual token 029487|>
+<|visual token 029488|>
+<|visual token 029489|>
+<|visual token 029490|>
+<|visual token 029491|>
+<|visual token 029492|>
+<|visual token 029493|>
+<|visual token 029494|>
+<|visual token 029495|>
+<|visual token 029496|>
+<|visual token 029497|>
+<|visual token 029498|>
+<|visual token 029499|>
+<|visual token 029500|>
+<|visual token 029501|>
+<|visual token 029502|>
+<|visual token 029503|>
+<|visual token 029504|>
+<|visual token 029505|>
+<|visual token 029506|>
+<|visual token 029507|>
+<|visual token 029508|>
+<|visual token 029509|>
+<|visual token 029510|>
+<|visual token 029511|>
+<|visual token 029512|>
+<|visual token 029513|>
+<|visual token 029514|>
+<|visual token 029515|>
+<|visual token 029516|>
+<|visual token 029517|>
+<|visual token 029518|>
+<|visual token 029519|>
+<|visual token 029520|>
+<|visual token 029521|>
+<|visual token 029522|>
+<|visual token 029523|>
+<|visual token 029524|>
+<|visual token 029525|>
+<|visual token 029526|>
+<|visual token 029527|>
+<|visual token 029528|>
+<|visual token 029529|>
+<|visual token 029530|>
+<|visual token 029531|>
+<|visual token 029532|>
+<|visual token 029533|>
+<|visual token 029534|>
+<|visual token 029535|>
+<|visual token 029536|>
+<|visual token 029537|>
+<|visual token 029538|>
+<|visual token 029539|>
+<|visual token 029540|>
+<|visual token 029541|>
+<|visual token 029542|>
+<|visual token 029543|>
+<|visual token 029544|>
+<|visual token 029545|>
+<|visual token 029546|>
+<|visual token 029547|>
+<|visual token 029548|>
+<|visual token 029549|>
+<|visual token 029550|>
+<|visual token 029551|>
+<|visual token 029552|>
+<|visual token 029553|>
+<|visual token 029554|>
+<|visual token 029555|>
+<|visual token 029556|>
+<|visual token 029557|>
+<|visual token 029558|>
+<|visual token 029559|>
+<|visual token 029560|>
+<|visual token 029561|>
+<|visual token 029562|>
+<|visual token 029563|>
+<|visual token 029564|>
+<|visual token 029565|>
+<|visual token 029566|>
+<|visual token 029567|>
+<|visual token 029568|>
+<|visual token 029569|>
+<|visual token 029570|>
+<|visual token 029571|>
+<|visual token 029572|>
+<|visual token 029573|>
+<|visual token 029574|>
+<|visual token 029575|>
+<|visual token 029576|>
+<|visual token 029577|>
+<|visual token 029578|>
+<|visual token 029579|>
+<|visual token 029580|>
+<|visual token 029581|>
+<|visual token 029582|>
+<|visual token 029583|>
+<|visual token 029584|>
+<|visual token 029585|>
+<|visual token 029586|>
+<|visual token 029587|>
+<|visual token 029588|>
+<|visual token 029589|>
+<|visual token 029590|>
+<|visual token 029591|>
+<|visual token 029592|>
+<|visual token 029593|>
+<|visual token 029594|>
+<|visual token 029595|>
+<|visual token 029596|>
+<|visual token 029597|>
+<|visual token 029598|>
+<|visual token 029599|>
+<|visual token 029600|>
+<|visual token 029601|>
+<|visual token 029602|>
+<|visual token 029603|>
+<|visual token 029604|>
+<|visual token 029605|>
+<|visual token 029606|>
+<|visual token 029607|>
+<|visual token 029608|>
+<|visual token 029609|>
+<|visual token 029610|>
+<|visual token 029611|>
+<|visual token 029612|>
+<|visual token 029613|>
+<|visual token 029614|>
+<|visual token 029615|>
+<|visual token 029616|>
+<|visual token 029617|>
+<|visual token 029618|>
+<|visual token 029619|>
+<|visual token 029620|>
+<|visual token 029621|>
+<|visual token 029622|>
+<|visual token 029623|>
+<|visual token 029624|>
+<|visual token 029625|>
+<|visual token 029626|>
+<|visual token 029627|>
+<|visual token 029628|>
+<|visual token 029629|>
+<|visual token 029630|>
+<|visual token 029631|>
+<|visual token 029632|>
+<|visual token 029633|>
+<|visual token 029634|>
+<|visual token 029635|>
+<|visual token 029636|>
+<|visual token 029637|>
+<|visual token 029638|>
+<|visual token 029639|>
+<|visual token 029640|>
+<|visual token 029641|>
+<|visual token 029642|>
+<|visual token 029643|>
+<|visual token 029644|>
+<|visual token 029645|>
+<|visual token 029646|>
+<|visual token 029647|>
+<|visual token 029648|>
+<|visual token 029649|>
+<|visual token 029650|>
+<|visual token 029651|>
+<|visual token 029652|>
+<|visual token 029653|>
+<|visual token 029654|>
+<|visual token 029655|>
+<|visual token 029656|>
+<|visual token 029657|>
+<|visual token 029658|>
+<|visual token 029659|>
+<|visual token 029660|>
+<|visual token 029661|>
+<|visual token 029662|>
+<|visual token 029663|>
+<|visual token 029664|>
+<|visual token 029665|>
+<|visual token 029666|>
+<|visual token 029667|>
+<|visual token 029668|>
+<|visual token 029669|>
+<|visual token 029670|>
+<|visual token 029671|>
+<|visual token 029672|>
+<|visual token 029673|>
+<|visual token 029674|>
+<|visual token 029675|>
+<|visual token 029676|>
+<|visual token 029677|>
+<|visual token 029678|>
+<|visual token 029679|>
+<|visual token 029680|>
+<|visual token 029681|>
+<|visual token 029682|>
+<|visual token 029683|>
+<|visual token 029684|>
+<|visual token 029685|>
+<|visual token 029686|>
+<|visual token 029687|>
+<|visual token 029688|>
+<|visual token 029689|>
+<|visual token 029690|>
+<|visual token 029691|>
+<|visual token 029692|>
+<|visual token 029693|>
+<|visual token 029694|>
+<|visual token 029695|>
+<|visual token 029696|>
+<|visual token 029697|>
+<|visual token 029698|>
+<|visual token 029699|>
+<|visual token 029700|>
+<|visual token 029701|>
+<|visual token 029702|>
+<|visual token 029703|>
+<|visual token 029704|>
+<|visual token 029705|>
+<|visual token 029706|>
+<|visual token 029707|>
+<|visual token 029708|>
+<|visual token 029709|>
+<|visual token 029710|>
+<|visual token 029711|>
+<|visual token 029712|>
+<|visual token 029713|>
+<|visual token 029714|>
+<|visual token 029715|>
+<|visual token 029716|>
+<|visual token 029717|>
+<|visual token 029718|>
+<|visual token 029719|>
+<|visual token 029720|>
+<|visual token 029721|>
+<|visual token 029722|>
+<|visual token 029723|>
+<|visual token 029724|>
+<|visual token 029725|>
+<|visual token 029726|>
+<|visual token 029727|>
+<|visual token 029728|>
+<|visual token 029729|>
+<|visual token 029730|>
+<|visual token 029731|>
+<|visual token 029732|>
+<|visual token 029733|>
+<|visual token 029734|>
+<|visual token 029735|>
+<|visual token 029736|>
+<|visual token 029737|>
+<|visual token 029738|>
+<|visual token 029739|>
+<|visual token 029740|>
+<|visual token 029741|>
+<|visual token 029742|>
+<|visual token 029743|>
+<|visual token 029744|>
+<|visual token 029745|>
+<|visual token 029746|>
+<|visual token 029747|>
+<|visual token 029748|>
+<|visual token 029749|>
+<|visual token 029750|>
+<|visual token 029751|>
+<|visual token 029752|>
+<|visual token 029753|>
+<|visual token 029754|>
+<|visual token 029755|>
+<|visual token 029756|>
+<|visual token 029757|>
+<|visual token 029758|>
+<|visual token 029759|>
+<|visual token 029760|>
+<|visual token 029761|>
+<|visual token 029762|>
+<|visual token 029763|>
+<|visual token 029764|>
+<|visual token 029765|>
+<|visual token 029766|>
+<|visual token 029767|>
+<|visual token 029768|>
+<|visual token 029769|>
+<|visual token 029770|>
+<|visual token 029771|>
+<|visual token 029772|>
+<|visual token 029773|>
+<|visual token 029774|>
+<|visual token 029775|>
+<|visual token 029776|>
+<|visual token 029777|>
+<|visual token 029778|>
+<|visual token 029779|>
+<|visual token 029780|>
+<|visual token 029781|>
+<|visual token 029782|>
+<|visual token 029783|>
+<|visual token 029784|>
+<|visual token 029785|>
+<|visual token 029786|>
+<|visual token 029787|>
+<|visual token 029788|>
+<|visual token 029789|>
+<|visual token 029790|>
+<|visual token 029791|>
+<|visual token 029792|>
+<|visual token 029793|>
+<|visual token 029794|>
+<|visual token 029795|>
+<|visual token 029796|>
+<|visual token 029797|>
+<|visual token 029798|>
+<|visual token 029799|>
+<|visual token 029800|>
+<|visual token 029801|>
+<|visual token 029802|>
+<|visual token 029803|>
+<|visual token 029804|>
+<|visual token 029805|>
+<|visual token 029806|>
+<|visual token 029807|>
+<|visual token 029808|>
+<|visual token 029809|>
+<|visual token 029810|>
+<|visual token 029811|>
+<|visual token 029812|>
+<|visual token 029813|>
+<|visual token 029814|>
+<|visual token 029815|>
+<|visual token 029816|>
+<|visual token 029817|>
+<|visual token 029818|>
+<|visual token 029819|>
+<|visual token 029820|>
+<|visual token 029821|>
+<|visual token 029822|>
+<|visual token 029823|>
+<|visual token 029824|>
+<|visual token 029825|>
+<|visual token 029826|>
+<|visual token 029827|>
+<|visual token 029828|>
+<|visual token 029829|>
+<|visual token 029830|>
+<|visual token 029831|>
+<|visual token 029832|>
+<|visual token 029833|>
+<|visual token 029834|>
+<|visual token 029835|>
+<|visual token 029836|>
+<|visual token 029837|>
+<|visual token 029838|>
+<|visual token 029839|>
+<|visual token 029840|>
+<|visual token 029841|>
+<|visual token 029842|>
+<|visual token 029843|>
+<|visual token 029844|>
+<|visual token 029845|>
+<|visual token 029846|>
+<|visual token 029847|>
+<|visual token 029848|>
+<|visual token 029849|>
+<|visual token 029850|>
+<|visual token 029851|>
+<|visual token 029852|>
+<|visual token 029853|>
+<|visual token 029854|>
+<|visual token 029855|>
+<|visual token 029856|>
+<|visual token 029857|>
+<|visual token 029858|>
+<|visual token 029859|>
+<|visual token 029860|>
+<|visual token 029861|>
+<|visual token 029862|>
+<|visual token 029863|>
+<|visual token 029864|>
+<|visual token 029865|>
+<|visual token 029866|>
+<|visual token 029867|>
+<|visual token 029868|>
+<|visual token 029869|>
+<|visual token 029870|>
+<|visual token 029871|>
+<|visual token 029872|>
+<|visual token 029873|>
+<|visual token 029874|>
+<|visual token 029875|>
+<|visual token 029876|>
+<|visual token 029877|>
+<|visual token 029878|>
+<|visual token 029879|>
+<|visual token 029880|>
+<|visual token 029881|>
+<|visual token 029882|>
+<|visual token 029883|>
+<|visual token 029884|>
+<|visual token 029885|>
+<|visual token 029886|>
+<|visual token 029887|>
+<|visual token 029888|>
+<|visual token 029889|>
+<|visual token 029890|>
+<|visual token 029891|>
+<|visual token 029892|>
+<|visual token 029893|>
+<|visual token 029894|>
+<|visual token 029895|>
+<|visual token 029896|>
+<|visual token 029897|>
+<|visual token 029898|>
+<|visual token 029899|>
+<|visual token 029900|>
+<|visual token 029901|>
+<|visual token 029902|>
+<|visual token 029903|>
+<|visual token 029904|>
+<|visual token 029905|>
+<|visual token 029906|>
+<|visual token 029907|>
+<|visual token 029908|>
+<|visual token 029909|>
+<|visual token 029910|>
+<|visual token 029911|>
+<|visual token 029912|>
+<|visual token 029913|>
+<|visual token 029914|>
+<|visual token 029915|>
+<|visual token 029916|>
+<|visual token 029917|>
+<|visual token 029918|>
+<|visual token 029919|>
+<|visual token 029920|>
+<|visual token 029921|>
+<|visual token 029922|>
+<|visual token 029923|>
+<|visual token 029924|>
+<|visual token 029925|>
+<|visual token 029926|>
+<|visual token 029927|>
+<|visual token 029928|>
+<|visual token 029929|>
+<|visual token 029930|>
+<|visual token 029931|>
+<|visual token 029932|>
+<|visual token 029933|>
+<|visual token 029934|>
+<|visual token 029935|>
+<|visual token 029936|>
+<|visual token 029937|>
+<|visual token 029938|>
+<|visual token 029939|>
+<|visual token 029940|>
+<|visual token 029941|>
+<|visual token 029942|>
+<|visual token 029943|>
+<|visual token 029944|>
+<|visual token 029945|>
+<|visual token 029946|>
+<|visual token 029947|>
+<|visual token 029948|>
+<|visual token 029949|>
+<|visual token 029950|>
+<|visual token 029951|>
+<|visual token 029952|>
+<|visual token 029953|>
+<|visual token 029954|>
+<|visual token 029955|>
+<|visual token 029956|>
+<|visual token 029957|>
+<|visual token 029958|>
+<|visual token 029959|>
+<|visual token 029960|>
+<|visual token 029961|>
+<|visual token 029962|>
+<|visual token 029963|>
+<|visual token 029964|>
+<|visual token 029965|>
+<|visual token 029966|>
+<|visual token 029967|>
+<|visual token 029968|>
+<|visual token 029969|>
+<|visual token 029970|>
+<|visual token 029971|>
+<|visual token 029972|>
+<|visual token 029973|>
+<|visual token 029974|>
+<|visual token 029975|>
+<|visual token 029976|>
+<|visual token 029977|>
+<|visual token 029978|>
+<|visual token 029979|>
+<|visual token 029980|>
+<|visual token 029981|>
+<|visual token 029982|>
+<|visual token 029983|>
+<|visual token 029984|>
+<|visual token 029985|>
+<|visual token 029986|>
+<|visual token 029987|>
+<|visual token 029988|>
+<|visual token 029989|>
+<|visual token 029990|>
+<|visual token 029991|>
+<|visual token 029992|>
+<|visual token 029993|>
+<|visual token 029994|>
+<|visual token 029995|>
+<|visual token 029996|>
+<|visual token 029997|>
+<|visual token 029998|>
+<|visual token 029999|>
+<|visual token 030000|>
+<|visual token 030001|>
+<|visual token 030002|>
+<|visual token 030003|>
+<|visual token 030004|>
+<|visual token 030005|>
+<|visual token 030006|>
+<|visual token 030007|>
+<|visual token 030008|>
+<|visual token 030009|>
+<|visual token 030010|>
+<|visual token 030011|>
+<|visual token 030012|>
+<|visual token 030013|>
+<|visual token 030014|>
+<|visual token 030015|>
+<|visual token 030016|>
+<|visual token 030017|>
+<|visual token 030018|>
+<|visual token 030019|>
+<|visual token 030020|>
+<|visual token 030021|>
+<|visual token 030022|>
+<|visual token 030023|>
+<|visual token 030024|>
+<|visual token 030025|>
+<|visual token 030026|>
+<|visual token 030027|>
+<|visual token 030028|>
+<|visual token 030029|>
+<|visual token 030030|>
+<|visual token 030031|>
+<|visual token 030032|>
+<|visual token 030033|>
+<|visual token 030034|>
+<|visual token 030035|>
+<|visual token 030036|>
+<|visual token 030037|>
+<|visual token 030038|>
+<|visual token 030039|>
+<|visual token 030040|>
+<|visual token 030041|>
+<|visual token 030042|>
+<|visual token 030043|>
+<|visual token 030044|>
+<|visual token 030045|>
+<|visual token 030046|>
+<|visual token 030047|>
+<|visual token 030048|>
+<|visual token 030049|>
+<|visual token 030050|>
+<|visual token 030051|>
+<|visual token 030052|>
+<|visual token 030053|>
+<|visual token 030054|>
+<|visual token 030055|>
+<|visual token 030056|>
+<|visual token 030057|>
+<|visual token 030058|>
+<|visual token 030059|>
+<|visual token 030060|>
+<|visual token 030061|>
+<|visual token 030062|>
+<|visual token 030063|>
+<|visual token 030064|>
+<|visual token 030065|>
+<|visual token 030066|>
+<|visual token 030067|>
+<|visual token 030068|>
+<|visual token 030069|>
+<|visual token 030070|>
+<|visual token 030071|>
+<|visual token 030072|>
+<|visual token 030073|>
+<|visual token 030074|>
+<|visual token 030075|>
+<|visual token 030076|>
+<|visual token 030077|>
+<|visual token 030078|>
+<|visual token 030079|>
+<|visual token 030080|>
+<|visual token 030081|>
+<|visual token 030082|>
+<|visual token 030083|>
+<|visual token 030084|>
+<|visual token 030085|>
+<|visual token 030086|>
+<|visual token 030087|>
+<|visual token 030088|>
+<|visual token 030089|>
+<|visual token 030090|>
+<|visual token 030091|>
+<|visual token 030092|>
+<|visual token 030093|>
+<|visual token 030094|>
+<|visual token 030095|>
+<|visual token 030096|>
+<|visual token 030097|>
+<|visual token 030098|>
+<|visual token 030099|>
+<|visual token 030100|>
+<|visual token 030101|>
+<|visual token 030102|>
+<|visual token 030103|>
+<|visual token 030104|>
+<|visual token 030105|>
+<|visual token 030106|>
+<|visual token 030107|>
+<|visual token 030108|>
+<|visual token 030109|>
+<|visual token 030110|>
+<|visual token 030111|>
+<|visual token 030112|>
+<|visual token 030113|>
+<|visual token 030114|>
+<|visual token 030115|>
+<|visual token 030116|>
+<|visual token 030117|>
+<|visual token 030118|>
+<|visual token 030119|>
+<|visual token 030120|>
+<|visual token 030121|>
+<|visual token 030122|>
+<|visual token 030123|>
+<|visual token 030124|>
+<|visual token 030125|>
+<|visual token 030126|>
+<|visual token 030127|>
+<|visual token 030128|>
+<|visual token 030129|>
+<|visual token 030130|>
+<|visual token 030131|>
+<|visual token 030132|>
+<|visual token 030133|>
+<|visual token 030134|>
+<|visual token 030135|>
+<|visual token 030136|>
+<|visual token 030137|>
+<|visual token 030138|>
+<|visual token 030139|>
+<|visual token 030140|>
+<|visual token 030141|>
+<|visual token 030142|>
+<|visual token 030143|>
+<|visual token 030144|>
+<|visual token 030145|>
+<|visual token 030146|>
+<|visual token 030147|>
+<|visual token 030148|>
+<|visual token 030149|>
+<|visual token 030150|>
+<|visual token 030151|>
+<|visual token 030152|>
+<|visual token 030153|>
+<|visual token 030154|>
+<|visual token 030155|>
+<|visual token 030156|>
+<|visual token 030157|>
+<|visual token 030158|>
+<|visual token 030159|>
+<|visual token 030160|>
+<|visual token 030161|>
+<|visual token 030162|>
+<|visual token 030163|>
+<|visual token 030164|>
+<|visual token 030165|>
+<|visual token 030166|>
+<|visual token 030167|>
+<|visual token 030168|>
+<|visual token 030169|>
+<|visual token 030170|>
+<|visual token 030171|>
+<|visual token 030172|>
+<|visual token 030173|>
+<|visual token 030174|>
+<|visual token 030175|>
+<|visual token 030176|>
+<|visual token 030177|>
+<|visual token 030178|>
+<|visual token 030179|>
+<|visual token 030180|>
+<|visual token 030181|>
+<|visual token 030182|>
+<|visual token 030183|>
+<|visual token 030184|>
+<|visual token 030185|>
+<|visual token 030186|>
+<|visual token 030187|>
+<|visual token 030188|>
+<|visual token 030189|>
+<|visual token 030190|>
+<|visual token 030191|>
+<|visual token 030192|>
+<|visual token 030193|>
+<|visual token 030194|>
+<|visual token 030195|>
+<|visual token 030196|>
+<|visual token 030197|>
+<|visual token 030198|>
+<|visual token 030199|>
+<|visual token 030200|>
+<|visual token 030201|>
+<|visual token 030202|>
+<|visual token 030203|>
+<|visual token 030204|>
+<|visual token 030205|>
+<|visual token 030206|>
+<|visual token 030207|>
+<|visual token 030208|>
+<|visual token 030209|>
+<|visual token 030210|>
+<|visual token 030211|>
+<|visual token 030212|>
+<|visual token 030213|>
+<|visual token 030214|>
+<|visual token 030215|>
+<|visual token 030216|>
+<|visual token 030217|>
+<|visual token 030218|>
+<|visual token 030219|>
+<|visual token 030220|>
+<|visual token 030221|>
+<|visual token 030222|>
+<|visual token 030223|>
+<|visual token 030224|>
+<|visual token 030225|>
+<|visual token 030226|>
+<|visual token 030227|>
+<|visual token 030228|>
+<|visual token 030229|>
+<|visual token 030230|>
+<|visual token 030231|>
+<|visual token 030232|>
+<|visual token 030233|>
+<|visual token 030234|>
+<|visual token 030235|>
+<|visual token 030236|>
+<|visual token 030237|>
+<|visual token 030238|>
+<|visual token 030239|>
+<|visual token 030240|>
+<|visual token 030241|>
+<|visual token 030242|>
+<|visual token 030243|>
+<|visual token 030244|>
+<|visual token 030245|>
+<|visual token 030246|>
+<|visual token 030247|>
+<|visual token 030248|>
+<|visual token 030249|>
+<|visual token 030250|>
+<|visual token 030251|>
+<|visual token 030252|>
+<|visual token 030253|>
+<|visual token 030254|>
+<|visual token 030255|>
+<|visual token 030256|>
+<|visual token 030257|>
+<|visual token 030258|>
+<|visual token 030259|>
+<|visual token 030260|>
+<|visual token 030261|>
+<|visual token 030262|>
+<|visual token 030263|>
+<|visual token 030264|>
+<|visual token 030265|>
+<|visual token 030266|>
+<|visual token 030267|>
+<|visual token 030268|>
+<|visual token 030269|>
+<|visual token 030270|>
+<|visual token 030271|>
+<|visual token 030272|>
+<|visual token 030273|>
+<|visual token 030274|>
+<|visual token 030275|>
+<|visual token 030276|>
+<|visual token 030277|>
+<|visual token 030278|>
+<|visual token 030279|>
+<|visual token 030280|>
+<|visual token 030281|>
+<|visual token 030282|>
+<|visual token 030283|>
+<|visual token 030284|>
+<|visual token 030285|>
+<|visual token 030286|>
+<|visual token 030287|>
+<|visual token 030288|>
+<|visual token 030289|>
+<|visual token 030290|>
+<|visual token 030291|>
+<|visual token 030292|>
+<|visual token 030293|>
+<|visual token 030294|>
+<|visual token 030295|>
+<|visual token 030296|>
+<|visual token 030297|>
+<|visual token 030298|>
+<|visual token 030299|>
+<|visual token 030300|>
+<|visual token 030301|>
+<|visual token 030302|>
+<|visual token 030303|>
+<|visual token 030304|>
+<|visual token 030305|>
+<|visual token 030306|>
+<|visual token 030307|>
+<|visual token 030308|>
+<|visual token 030309|>
+<|visual token 030310|>
+<|visual token 030311|>
+<|visual token 030312|>
+<|visual token 030313|>
+<|visual token 030314|>
+<|visual token 030315|>
+<|visual token 030316|>
+<|visual token 030317|>
+<|visual token 030318|>
+<|visual token 030319|>
+<|visual token 030320|>
+<|visual token 030321|>
+<|visual token 030322|>
+<|visual token 030323|>
+<|visual token 030324|>
+<|visual token 030325|>
+<|visual token 030326|>
+<|visual token 030327|>
+<|visual token 030328|>
+<|visual token 030329|>
+<|visual token 030330|>
+<|visual token 030331|>
+<|visual token 030332|>
+<|visual token 030333|>
+<|visual token 030334|>
+<|visual token 030335|>
+<|visual token 030336|>
+<|visual token 030337|>
+<|visual token 030338|>
+<|visual token 030339|>
+<|visual token 030340|>
+<|visual token 030341|>
+<|visual token 030342|>
+<|visual token 030343|>
+<|visual token 030344|>
+<|visual token 030345|>
+<|visual token 030346|>
+<|visual token 030347|>
+<|visual token 030348|>
+<|visual token 030349|>
+<|visual token 030350|>
+<|visual token 030351|>
+<|visual token 030352|>
+<|visual token 030353|>
+<|visual token 030354|>
+<|visual token 030355|>
+<|visual token 030356|>
+<|visual token 030357|>
+<|visual token 030358|>
+<|visual token 030359|>
+<|visual token 030360|>
+<|visual token 030361|>
+<|visual token 030362|>
+<|visual token 030363|>
+<|visual token 030364|>
+<|visual token 030365|>
+<|visual token 030366|>
+<|visual token 030367|>
+<|visual token 030368|>
+<|visual token 030369|>
+<|visual token 030370|>
+<|visual token 030371|>
+<|visual token 030372|>
+<|visual token 030373|>
+<|visual token 030374|>
+<|visual token 030375|>
+<|visual token 030376|>
+<|visual token 030377|>
+<|visual token 030378|>
+<|visual token 030379|>
+<|visual token 030380|>
+<|visual token 030381|>
+<|visual token 030382|>
+<|visual token 030383|>
+<|visual token 030384|>
+<|visual token 030385|>
+<|visual token 030386|>
+<|visual token 030387|>
+<|visual token 030388|>
+<|visual token 030389|>
+<|visual token 030390|>
+<|visual token 030391|>
+<|visual token 030392|>
+<|visual token 030393|>
+<|visual token 030394|>
+<|visual token 030395|>
+<|visual token 030396|>
+<|visual token 030397|>
+<|visual token 030398|>
+<|visual token 030399|>
+<|visual token 030400|>
+<|visual token 030401|>
+<|visual token 030402|>
+<|visual token 030403|>
+<|visual token 030404|>
+<|visual token 030405|>
+<|visual token 030406|>
+<|visual token 030407|>
+<|visual token 030408|>
+<|visual token 030409|>
+<|visual token 030410|>
+<|visual token 030411|>
+<|visual token 030412|>
+<|visual token 030413|>
+<|visual token 030414|>
+<|visual token 030415|>
+<|visual token 030416|>
+<|visual token 030417|>
+<|visual token 030418|>
+<|visual token 030419|>
+<|visual token 030420|>
+<|visual token 030421|>
+<|visual token 030422|>
+<|visual token 030423|>
+<|visual token 030424|>
+<|visual token 030425|>
+<|visual token 030426|>
+<|visual token 030427|>
+<|visual token 030428|>
+<|visual token 030429|>
+<|visual token 030430|>
+<|visual token 030431|>
+<|visual token 030432|>
+<|visual token 030433|>
+<|visual token 030434|>
+<|visual token 030435|>
+<|visual token 030436|>
+<|visual token 030437|>
+<|visual token 030438|>
+<|visual token 030439|>
+<|visual token 030440|>
+<|visual token 030441|>
+<|visual token 030442|>
+<|visual token 030443|>
+<|visual token 030444|>
+<|visual token 030445|>
+<|visual token 030446|>
+<|visual token 030447|>
+<|visual token 030448|>
+<|visual token 030449|>
+<|visual token 030450|>
+<|visual token 030451|>
+<|visual token 030452|>
+<|visual token 030453|>
+<|visual token 030454|>
+<|visual token 030455|>
+<|visual token 030456|>
+<|visual token 030457|>
+<|visual token 030458|>
+<|visual token 030459|>
+<|visual token 030460|>
+<|visual token 030461|>
+<|visual token 030462|>
+<|visual token 030463|>
+<|visual token 030464|>
+<|visual token 030465|>
+<|visual token 030466|>
+<|visual token 030467|>
+<|visual token 030468|>
+<|visual token 030469|>
+<|visual token 030470|>
+<|visual token 030471|>
+<|visual token 030472|>
+<|visual token 030473|>
+<|visual token 030474|>
+<|visual token 030475|>
+<|visual token 030476|>
+<|visual token 030477|>
+<|visual token 030478|>
+<|visual token 030479|>
+<|visual token 030480|>
+<|visual token 030481|>
+<|visual token 030482|>
+<|visual token 030483|>
+<|visual token 030484|>
+<|visual token 030485|>
+<|visual token 030486|>
+<|visual token 030487|>
+<|visual token 030488|>
+<|visual token 030489|>
+<|visual token 030490|>
+<|visual token 030491|>
+<|visual token 030492|>
+<|visual token 030493|>
+<|visual token 030494|>
+<|visual token 030495|>
+<|visual token 030496|>
+<|visual token 030497|>
+<|visual token 030498|>
+<|visual token 030499|>
+<|visual token 030500|>
+<|visual token 030501|>
+<|visual token 030502|>
+<|visual token 030503|>
+<|visual token 030504|>
+<|visual token 030505|>
+<|visual token 030506|>
+<|visual token 030507|>
+<|visual token 030508|>
+<|visual token 030509|>
+<|visual token 030510|>
+<|visual token 030511|>
+<|visual token 030512|>
+<|visual token 030513|>
+<|visual token 030514|>
+<|visual token 030515|>
+<|visual token 030516|>
+<|visual token 030517|>
+<|visual token 030518|>
+<|visual token 030519|>
+<|visual token 030520|>
+<|visual token 030521|>
+<|visual token 030522|>
+<|visual token 030523|>
+<|visual token 030524|>
+<|visual token 030525|>
+<|visual token 030526|>
+<|visual token 030527|>
+<|visual token 030528|>
+<|visual token 030529|>
+<|visual token 030530|>
+<|visual token 030531|>
+<|visual token 030532|>
+<|visual token 030533|>
+<|visual token 030534|>
+<|visual token 030535|>
+<|visual token 030536|>
+<|visual token 030537|>
+<|visual token 030538|>
+<|visual token 030539|>
+<|visual token 030540|>
+<|visual token 030541|>
+<|visual token 030542|>
+<|visual token 030543|>
+<|visual token 030544|>
+<|visual token 030545|>
+<|visual token 030546|>
+<|visual token 030547|>
+<|visual token 030548|>
+<|visual token 030549|>
+<|visual token 030550|>
+<|visual token 030551|>
+<|visual token 030552|>
+<|visual token 030553|>
+<|visual token 030554|>
+<|visual token 030555|>
+<|visual token 030556|>
+<|visual token 030557|>
+<|visual token 030558|>
+<|visual token 030559|>
+<|visual token 030560|>
+<|visual token 030561|>
+<|visual token 030562|>
+<|visual token 030563|>
+<|visual token 030564|>
+<|visual token 030565|>
+<|visual token 030566|>
+<|visual token 030567|>
+<|visual token 030568|>
+<|visual token 030569|>
+<|visual token 030570|>
+<|visual token 030571|>
+<|visual token 030572|>
+<|visual token 030573|>
+<|visual token 030574|>
+<|visual token 030575|>
+<|visual token 030576|>
+<|visual token 030577|>
+<|visual token 030578|>
+<|visual token 030579|>
+<|visual token 030580|>
+<|visual token 030581|>
+<|visual token 030582|>
+<|visual token 030583|>
+<|visual token 030584|>
+<|visual token 030585|>
+<|visual token 030586|>
+<|visual token 030587|>
+<|visual token 030588|>
+<|visual token 030589|>
+<|visual token 030590|>
+<|visual token 030591|>
+<|visual token 030592|>
+<|visual token 030593|>
+<|visual token 030594|>
+<|visual token 030595|>
+<|visual token 030596|>
+<|visual token 030597|>
+<|visual token 030598|>
+<|visual token 030599|>
+<|visual token 030600|>
+<|visual token 030601|>
+<|visual token 030602|>
+<|visual token 030603|>
+<|visual token 030604|>
+<|visual token 030605|>
+<|visual token 030606|>
+<|visual token 030607|>
+<|visual token 030608|>
+<|visual token 030609|>
+<|visual token 030610|>
+<|visual token 030611|>
+<|visual token 030612|>
+<|visual token 030613|>
+<|visual token 030614|>
+<|visual token 030615|>
+<|visual token 030616|>
+<|visual token 030617|>
+<|visual token 030618|>
+<|visual token 030619|>
+<|visual token 030620|>
+<|visual token 030621|>
+<|visual token 030622|>
+<|visual token 030623|>
+<|visual token 030624|>
+<|visual token 030625|>
+<|visual token 030626|>
+<|visual token 030627|>
+<|visual token 030628|>
+<|visual token 030629|>
+<|visual token 030630|>
+<|visual token 030631|>
+<|visual token 030632|>
+<|visual token 030633|>
+<|visual token 030634|>
+<|visual token 030635|>
+<|visual token 030636|>
+<|visual token 030637|>
+<|visual token 030638|>
+<|visual token 030639|>
+<|visual token 030640|>
+<|visual token 030641|>
+<|visual token 030642|>
+<|visual token 030643|>
+<|visual token 030644|>
+<|visual token 030645|>
+<|visual token 030646|>
+<|visual token 030647|>
+<|visual token 030648|>
+<|visual token 030649|>
+<|visual token 030650|>
+<|visual token 030651|>
+<|visual token 030652|>
+<|visual token 030653|>
+<|visual token 030654|>
+<|visual token 030655|>
+<|visual token 030656|>
+<|visual token 030657|>
+<|visual token 030658|>
+<|visual token 030659|>
+<|visual token 030660|>
+<|visual token 030661|>
+<|visual token 030662|>
+<|visual token 030663|>
+<|visual token 030664|>
+<|visual token 030665|>
+<|visual token 030666|>
+<|visual token 030667|>
+<|visual token 030668|>
+<|visual token 030669|>
+<|visual token 030670|>
+<|visual token 030671|>
+<|visual token 030672|>
+<|visual token 030673|>
+<|visual token 030674|>
+<|visual token 030675|>
+<|visual token 030676|>
+<|visual token 030677|>
+<|visual token 030678|>
+<|visual token 030679|>
+<|visual token 030680|>
+<|visual token 030681|>
+<|visual token 030682|>
+<|visual token 030683|>
+<|visual token 030684|>
+<|visual token 030685|>
+<|visual token 030686|>
+<|visual token 030687|>
+<|visual token 030688|>
+<|visual token 030689|>
+<|visual token 030690|>
+<|visual token 030691|>
+<|visual token 030692|>
+<|visual token 030693|>
+<|visual token 030694|>
+<|visual token 030695|>
+<|visual token 030696|>
+<|visual token 030697|>
+<|visual token 030698|>
+<|visual token 030699|>
+<|visual token 030700|>
+<|visual token 030701|>
+<|visual token 030702|>
+<|visual token 030703|>
+<|visual token 030704|>
+<|visual token 030705|>
+<|visual token 030706|>
+<|visual token 030707|>
+<|visual token 030708|>
+<|visual token 030709|>
+<|visual token 030710|>
+<|visual token 030711|>
+<|visual token 030712|>
+<|visual token 030713|>
+<|visual token 030714|>
+<|visual token 030715|>
+<|visual token 030716|>
+<|visual token 030717|>
+<|visual token 030718|>
+<|visual token 030719|>
+<|visual token 030720|>
+<|visual token 030721|>
+<|visual token 030722|>
+<|visual token 030723|>
+<|visual token 030724|>
+<|visual token 030725|>
+<|visual token 030726|>
+<|visual token 030727|>
+<|visual token 030728|>
+<|visual token 030729|>
+<|visual token 030730|>
+<|visual token 030731|>
+<|visual token 030732|>
+<|visual token 030733|>
+<|visual token 030734|>
+<|visual token 030735|>
+<|visual token 030736|>
+<|visual token 030737|>
+<|visual token 030738|>
+<|visual token 030739|>
+<|visual token 030740|>
+<|visual token 030741|>
+<|visual token 030742|>
+<|visual token 030743|>
+<|visual token 030744|>
+<|visual token 030745|>
+<|visual token 030746|>
+<|visual token 030747|>
+<|visual token 030748|>
+<|visual token 030749|>
+<|visual token 030750|>
+<|visual token 030751|>
+<|visual token 030752|>
+<|visual token 030753|>
+<|visual token 030754|>
+<|visual token 030755|>
+<|visual token 030756|>
+<|visual token 030757|>
+<|visual token 030758|>
+<|visual token 030759|>
+<|visual token 030760|>
+<|visual token 030761|>
+<|visual token 030762|>
+<|visual token 030763|>
+<|visual token 030764|>
+<|visual token 030765|>
+<|visual token 030766|>
+<|visual token 030767|>
+<|visual token 030768|>
+<|visual token 030769|>
+<|visual token 030770|>
+<|visual token 030771|>
+<|visual token 030772|>
+<|visual token 030773|>
+<|visual token 030774|>
+<|visual token 030775|>
+<|visual token 030776|>
+<|visual token 030777|>
+<|visual token 030778|>
+<|visual token 030779|>
+<|visual token 030780|>
+<|visual token 030781|>
+<|visual token 030782|>
+<|visual token 030783|>
+<|visual token 030784|>
+<|visual token 030785|>
+<|visual token 030786|>
+<|visual token 030787|>
+<|visual token 030788|>
+<|visual token 030789|>
+<|visual token 030790|>
+<|visual token 030791|>
+<|visual token 030792|>
+<|visual token 030793|>
+<|visual token 030794|>
+<|visual token 030795|>
+<|visual token 030796|>
+<|visual token 030797|>
+<|visual token 030798|>
+<|visual token 030799|>
+<|visual token 030800|>
+<|visual token 030801|>
+<|visual token 030802|>
+<|visual token 030803|>
+<|visual token 030804|>
+<|visual token 030805|>
+<|visual token 030806|>
+<|visual token 030807|>
+<|visual token 030808|>
+<|visual token 030809|>
+<|visual token 030810|>
+<|visual token 030811|>
+<|visual token 030812|>
+<|visual token 030813|>
+<|visual token 030814|>
+<|visual token 030815|>
+<|visual token 030816|>
+<|visual token 030817|>
+<|visual token 030818|>
+<|visual token 030819|>
+<|visual token 030820|>
+<|visual token 030821|>
+<|visual token 030822|>
+<|visual token 030823|>
+<|visual token 030824|>
+<|visual token 030825|>
+<|visual token 030826|>
+<|visual token 030827|>
+<|visual token 030828|>
+<|visual token 030829|>
+<|visual token 030830|>
+<|visual token 030831|>
+<|visual token 030832|>
+<|visual token 030833|>
+<|visual token 030834|>
+<|visual token 030835|>
+<|visual token 030836|>
+<|visual token 030837|>
+<|visual token 030838|>
+<|visual token 030839|>
+<|visual token 030840|>
+<|visual token 030841|>
+<|visual token 030842|>
+<|visual token 030843|>
+<|visual token 030844|>
+<|visual token 030845|>
+<|visual token 030846|>
+<|visual token 030847|>
+<|visual token 030848|>
+<|visual token 030849|>
+<|visual token 030850|>
+<|visual token 030851|>
+<|visual token 030852|>
+<|visual token 030853|>
+<|visual token 030854|>
+<|visual token 030855|>
+<|visual token 030856|>
+<|visual token 030857|>
+<|visual token 030858|>
+<|visual token 030859|>
+<|visual token 030860|>
+<|visual token 030861|>
+<|visual token 030862|>
+<|visual token 030863|>
+<|visual token 030864|>
+<|visual token 030865|>
+<|visual token 030866|>
+<|visual token 030867|>
+<|visual token 030868|>
+<|visual token 030869|>
+<|visual token 030870|>
+<|visual token 030871|>
+<|visual token 030872|>
+<|visual token 030873|>
+<|visual token 030874|>
+<|visual token 030875|>
+<|visual token 030876|>
+<|visual token 030877|>
+<|visual token 030878|>
+<|visual token 030879|>
+<|visual token 030880|>
+<|visual token 030881|>
+<|visual token 030882|>
+<|visual token 030883|>
+<|visual token 030884|>
+<|visual token 030885|>
+<|visual token 030886|>
+<|visual token 030887|>
+<|visual token 030888|>
+<|visual token 030889|>
+<|visual token 030890|>
+<|visual token 030891|>
+<|visual token 030892|>
+<|visual token 030893|>
+<|visual token 030894|>
+<|visual token 030895|>
+<|visual token 030896|>
+<|visual token 030897|>
+<|visual token 030898|>
+<|visual token 030899|>
+<|visual token 030900|>
+<|visual token 030901|>
+<|visual token 030902|>
+<|visual token 030903|>
+<|visual token 030904|>
+<|visual token 030905|>
+<|visual token 030906|>
+<|visual token 030907|>
+<|visual token 030908|>
+<|visual token 030909|>
+<|visual token 030910|>
+<|visual token 030911|>
+<|visual token 030912|>
+<|visual token 030913|>
+<|visual token 030914|>
+<|visual token 030915|>
+<|visual token 030916|>
+<|visual token 030917|>
+<|visual token 030918|>
+<|visual token 030919|>
+<|visual token 030920|>
+<|visual token 030921|>
+<|visual token 030922|>
+<|visual token 030923|>
+<|visual token 030924|>
+<|visual token 030925|>
+<|visual token 030926|>
+<|visual token 030927|>
+<|visual token 030928|>
+<|visual token 030929|>
+<|visual token 030930|>
+<|visual token 030931|>
+<|visual token 030932|>
+<|visual token 030933|>
+<|visual token 030934|>
+<|visual token 030935|>
+<|visual token 030936|>
+<|visual token 030937|>
+<|visual token 030938|>
+<|visual token 030939|>
+<|visual token 030940|>
+<|visual token 030941|>
+<|visual token 030942|>
+<|visual token 030943|>
+<|visual token 030944|>
+<|visual token 030945|>
+<|visual token 030946|>
+<|visual token 030947|>
+<|visual token 030948|>
+<|visual token 030949|>
+<|visual token 030950|>
+<|visual token 030951|>
+<|visual token 030952|>
+<|visual token 030953|>
+<|visual token 030954|>
+<|visual token 030955|>
+<|visual token 030956|>
+<|visual token 030957|>
+<|visual token 030958|>
+<|visual token 030959|>
+<|visual token 030960|>
+<|visual token 030961|>
+<|visual token 030962|>
+<|visual token 030963|>
+<|visual token 030964|>
+<|visual token 030965|>
+<|visual token 030966|>
+<|visual token 030967|>
+<|visual token 030968|>
+<|visual token 030969|>
+<|visual token 030970|>
+<|visual token 030971|>
+<|visual token 030972|>
+<|visual token 030973|>
+<|visual token 030974|>
+<|visual token 030975|>
+<|visual token 030976|>
+<|visual token 030977|>
+<|visual token 030978|>
+<|visual token 030979|>
+<|visual token 030980|>
+<|visual token 030981|>
+<|visual token 030982|>
+<|visual token 030983|>
+<|visual token 030984|>
+<|visual token 030985|>
+<|visual token 030986|>
+<|visual token 030987|>
+<|visual token 030988|>
+<|visual token 030989|>
+<|visual token 030990|>
+<|visual token 030991|>
+<|visual token 030992|>
+<|visual token 030993|>
+<|visual token 030994|>
+<|visual token 030995|>
+<|visual token 030996|>
+<|visual token 030997|>
+<|visual token 030998|>
+<|visual token 030999|>
+<|visual token 031000|>
+<|visual token 031001|>
+<|visual token 031002|>
+<|visual token 031003|>
+<|visual token 031004|>
+<|visual token 031005|>
+<|visual token 031006|>
+<|visual token 031007|>
+<|visual token 031008|>
+<|visual token 031009|>
+<|visual token 031010|>
+<|visual token 031011|>
+<|visual token 031012|>
+<|visual token 031013|>
+<|visual token 031014|>
+<|visual token 031015|>
+<|visual token 031016|>
+<|visual token 031017|>
+<|visual token 031018|>
+<|visual token 031019|>
+<|visual token 031020|>
+<|visual token 031021|>
+<|visual token 031022|>
+<|visual token 031023|>
+<|visual token 031024|>
+<|visual token 031025|>
+<|visual token 031026|>
+<|visual token 031027|>
+<|visual token 031028|>
+<|visual token 031029|>
+<|visual token 031030|>
+<|visual token 031031|>
+<|visual token 031032|>
+<|visual token 031033|>
+<|visual token 031034|>
+<|visual token 031035|>
+<|visual token 031036|>
+<|visual token 031037|>
+<|visual token 031038|>
+<|visual token 031039|>
+<|visual token 031040|>
+<|visual token 031041|>
+<|visual token 031042|>
+<|visual token 031043|>
+<|visual token 031044|>
+<|visual token 031045|>
+<|visual token 031046|>
+<|visual token 031047|>
+<|visual token 031048|>
+<|visual token 031049|>
+<|visual token 031050|>
+<|visual token 031051|>
+<|visual token 031052|>
+<|visual token 031053|>
+<|visual token 031054|>
+<|visual token 031055|>
+<|visual token 031056|>
+<|visual token 031057|>
+<|visual token 031058|>
+<|visual token 031059|>
+<|visual token 031060|>
+<|visual token 031061|>
+<|visual token 031062|>
+<|visual token 031063|>
+<|visual token 031064|>
+<|visual token 031065|>
+<|visual token 031066|>
+<|visual token 031067|>
+<|visual token 031068|>
+<|visual token 031069|>
+<|visual token 031070|>
+<|visual token 031071|>
+<|visual token 031072|>
+<|visual token 031073|>
+<|visual token 031074|>
+<|visual token 031075|>
+<|visual token 031076|>
+<|visual token 031077|>
+<|visual token 031078|>
+<|visual token 031079|>
+<|visual token 031080|>
+<|visual token 031081|>
+<|visual token 031082|>
+<|visual token 031083|>
+<|visual token 031084|>
+<|visual token 031085|>
+<|visual token 031086|>
+<|visual token 031087|>
+<|visual token 031088|>
+<|visual token 031089|>
+<|visual token 031090|>
+<|visual token 031091|>
+<|visual token 031092|>
+<|visual token 031093|>
+<|visual token 031094|>
+<|visual token 031095|>
+<|visual token 031096|>
+<|visual token 031097|>
+<|visual token 031098|>
+<|visual token 031099|>
+<|visual token 031100|>
+<|visual token 031101|>
+<|visual token 031102|>
+<|visual token 031103|>
+<|visual token 031104|>
+<|visual token 031105|>
+<|visual token 031106|>
+<|visual token 031107|>
+<|visual token 031108|>
+<|visual token 031109|>
+<|visual token 031110|>
+<|visual token 031111|>
+<|visual token 031112|>
+<|visual token 031113|>
+<|visual token 031114|>
+<|visual token 031115|>
+<|visual token 031116|>
+<|visual token 031117|>
+<|visual token 031118|>
+<|visual token 031119|>
+<|visual token 031120|>
+<|visual token 031121|>
+<|visual token 031122|>
+<|visual token 031123|>
+<|visual token 031124|>
+<|visual token 031125|>
+<|visual token 031126|>
+<|visual token 031127|>
+<|visual token 031128|>
+<|visual token 031129|>
+<|visual token 031130|>
+<|visual token 031131|>
+<|visual token 031132|>
+<|visual token 031133|>
+<|visual token 031134|>
+<|visual token 031135|>
+<|visual token 031136|>
+<|visual token 031137|>
+<|visual token 031138|>
+<|visual token 031139|>
+<|visual token 031140|>
+<|visual token 031141|>
+<|visual token 031142|>
+<|visual token 031143|>
+<|visual token 031144|>
+<|visual token 031145|>
+<|visual token 031146|>
+<|visual token 031147|>
+<|visual token 031148|>
+<|visual token 031149|>
+<|visual token 031150|>
+<|visual token 031151|>
+<|visual token 031152|>
+<|visual token 031153|>
+<|visual token 031154|>
+<|visual token 031155|>
+<|visual token 031156|>
+<|visual token 031157|>
+<|visual token 031158|>
+<|visual token 031159|>
+<|visual token 031160|>
+<|visual token 031161|>
+<|visual token 031162|>
+<|visual token 031163|>
+<|visual token 031164|>
+<|visual token 031165|>
+<|visual token 031166|>
+<|visual token 031167|>
+<|visual token 031168|>
+<|visual token 031169|>
+<|visual token 031170|>
+<|visual token 031171|>
+<|visual token 031172|>
+<|visual token 031173|>
+<|visual token 031174|>
+<|visual token 031175|>
+<|visual token 031176|>
+<|visual token 031177|>
+<|visual token 031178|>
+<|visual token 031179|>
+<|visual token 031180|>
+<|visual token 031181|>
+<|visual token 031182|>
+<|visual token 031183|>
+<|visual token 031184|>
+<|visual token 031185|>
+<|visual token 031186|>
+<|visual token 031187|>
+<|visual token 031188|>
+<|visual token 031189|>
+<|visual token 031190|>
+<|visual token 031191|>
+<|visual token 031192|>
+<|visual token 031193|>
+<|visual token 031194|>
+<|visual token 031195|>
+<|visual token 031196|>
+<|visual token 031197|>
+<|visual token 031198|>
+<|visual token 031199|>
+<|visual token 031200|>
+<|visual token 031201|>
+<|visual token 031202|>
+<|visual token 031203|>
+<|visual token 031204|>
+<|visual token 031205|>
+<|visual token 031206|>
+<|visual token 031207|>
+<|visual token 031208|>
+<|visual token 031209|>
+<|visual token 031210|>
+<|visual token 031211|>
+<|visual token 031212|>
+<|visual token 031213|>
+<|visual token 031214|>
+<|visual token 031215|>
+<|visual token 031216|>
+<|visual token 031217|>
+<|visual token 031218|>
+<|visual token 031219|>
+<|visual token 031220|>
+<|visual token 031221|>
+<|visual token 031222|>
+<|visual token 031223|>
+<|visual token 031224|>
+<|visual token 031225|>
+<|visual token 031226|>
+<|visual token 031227|>
+<|visual token 031228|>
+<|visual token 031229|>
+<|visual token 031230|>
+<|visual token 031231|>
+<|visual token 031232|>
+<|visual token 031233|>
+<|visual token 031234|>
+<|visual token 031235|>
+<|visual token 031236|>
+<|visual token 031237|>
+<|visual token 031238|>
+<|visual token 031239|>
+<|visual token 031240|>
+<|visual token 031241|>
+<|visual token 031242|>
+<|visual token 031243|>
+<|visual token 031244|>
+<|visual token 031245|>
+<|visual token 031246|>
+<|visual token 031247|>
+<|visual token 031248|>
+<|visual token 031249|>
+<|visual token 031250|>
+<|visual token 031251|>
+<|visual token 031252|>
+<|visual token 031253|>
+<|visual token 031254|>
+<|visual token 031255|>
+<|visual token 031256|>
+<|visual token 031257|>
+<|visual token 031258|>
+<|visual token 031259|>
+<|visual token 031260|>
+<|visual token 031261|>
+<|visual token 031262|>
+<|visual token 031263|>
+<|visual token 031264|>
+<|visual token 031265|>
+<|visual token 031266|>
+<|visual token 031267|>
+<|visual token 031268|>
+<|visual token 031269|>
+<|visual token 031270|>
+<|visual token 031271|>
+<|visual token 031272|>
+<|visual token 031273|>
+<|visual token 031274|>
+<|visual token 031275|>
+<|visual token 031276|>
+<|visual token 031277|>
+<|visual token 031278|>
+<|visual token 031279|>
+<|visual token 031280|>
+<|visual token 031281|>
+<|visual token 031282|>
+<|visual token 031283|>
+<|visual token 031284|>
+<|visual token 031285|>
+<|visual token 031286|>
+<|visual token 031287|>
+<|visual token 031288|>
+<|visual token 031289|>
+<|visual token 031290|>
+<|visual token 031291|>
+<|visual token 031292|>
+<|visual token 031293|>
+<|visual token 031294|>
+<|visual token 031295|>
+<|visual token 031296|>
+<|visual token 031297|>
+<|visual token 031298|>
+<|visual token 031299|>
+<|visual token 031300|>
+<|visual token 031301|>
+<|visual token 031302|>
+<|visual token 031303|>
+<|visual token 031304|>
+<|visual token 031305|>
+<|visual token 031306|>
+<|visual token 031307|>
+<|visual token 031308|>
+<|visual token 031309|>
+<|visual token 031310|>
+<|visual token 031311|>
+<|visual token 031312|>
+<|visual token 031313|>
+<|visual token 031314|>
+<|visual token 031315|>
+<|visual token 031316|>
+<|visual token 031317|>
+<|visual token 031318|>
+<|visual token 031319|>
+<|visual token 031320|>
+<|visual token 031321|>
+<|visual token 031322|>
+<|visual token 031323|>
+<|visual token 031324|>
+<|visual token 031325|>
+<|visual token 031326|>
+<|visual token 031327|>
+<|visual token 031328|>
+<|visual token 031329|>
+<|visual token 031330|>
+<|visual token 031331|>
+<|visual token 031332|>
+<|visual token 031333|>
+<|visual token 031334|>
+<|visual token 031335|>
+<|visual token 031336|>
+<|visual token 031337|>
+<|visual token 031338|>
+<|visual token 031339|>
+<|visual token 031340|>
+<|visual token 031341|>
+<|visual token 031342|>
+<|visual token 031343|>
+<|visual token 031344|>
+<|visual token 031345|>
+<|visual token 031346|>
+<|visual token 031347|>
+<|visual token 031348|>
+<|visual token 031349|>
+<|visual token 031350|>
+<|visual token 031351|>
+<|visual token 031352|>
+<|visual token 031353|>
+<|visual token 031354|>
+<|visual token 031355|>
+<|visual token 031356|>
+<|visual token 031357|>
+<|visual token 031358|>
+<|visual token 031359|>
+<|visual token 031360|>
+<|visual token 031361|>
+<|visual token 031362|>
+<|visual token 031363|>
+<|visual token 031364|>
+<|visual token 031365|>
+<|visual token 031366|>
+<|visual token 031367|>
+<|visual token 031368|>
+<|visual token 031369|>
+<|visual token 031370|>
+<|visual token 031371|>
+<|visual token 031372|>
+<|visual token 031373|>
+<|visual token 031374|>
+<|visual token 031375|>
+<|visual token 031376|>
+<|visual token 031377|>
+<|visual token 031378|>
+<|visual token 031379|>
+<|visual token 031380|>
+<|visual token 031381|>
+<|visual token 031382|>
+<|visual token 031383|>
+<|visual token 031384|>
+<|visual token 031385|>
+<|visual token 031386|>
+<|visual token 031387|>
+<|visual token 031388|>
+<|visual token 031389|>
+<|visual token 031390|>
+<|visual token 031391|>
+<|visual token 031392|>
+<|visual token 031393|>
+<|visual token 031394|>
+<|visual token 031395|>
+<|visual token 031396|>
+<|visual token 031397|>
+<|visual token 031398|>
+<|visual token 031399|>
+<|visual token 031400|>
+<|visual token 031401|>
+<|visual token 031402|>
+<|visual token 031403|>
+<|visual token 031404|>
+<|visual token 031405|>
+<|visual token 031406|>
+<|visual token 031407|>
+<|visual token 031408|>
+<|visual token 031409|>
+<|visual token 031410|>
+<|visual token 031411|>
+<|visual token 031412|>
+<|visual token 031413|>
+<|visual token 031414|>
+<|visual token 031415|>
+<|visual token 031416|>
+<|visual token 031417|>
+<|visual token 031418|>
+<|visual token 031419|>
+<|visual token 031420|>
+<|visual token 031421|>
+<|visual token 031422|>
+<|visual token 031423|>
+<|visual token 031424|>
+<|visual token 031425|>
+<|visual token 031426|>
+<|visual token 031427|>
+<|visual token 031428|>
+<|visual token 031429|>
+<|visual token 031430|>
+<|visual token 031431|>
+<|visual token 031432|>
+<|visual token 031433|>
+<|visual token 031434|>
+<|visual token 031435|>
+<|visual token 031436|>
+<|visual token 031437|>
+<|visual token 031438|>
+<|visual token 031439|>
+<|visual token 031440|>
+<|visual token 031441|>
+<|visual token 031442|>
+<|visual token 031443|>
+<|visual token 031444|>
+<|visual token 031445|>
+<|visual token 031446|>
+<|visual token 031447|>
+<|visual token 031448|>
+<|visual token 031449|>
+<|visual token 031450|>
+<|visual token 031451|>
+<|visual token 031452|>
+<|visual token 031453|>
+<|visual token 031454|>
+<|visual token 031455|>
+<|visual token 031456|>
+<|visual token 031457|>
+<|visual token 031458|>
+<|visual token 031459|>
+<|visual token 031460|>
+<|visual token 031461|>
+<|visual token 031462|>
+<|visual token 031463|>
+<|visual token 031464|>
+<|visual token 031465|>
+<|visual token 031466|>
+<|visual token 031467|>
+<|visual token 031468|>
+<|visual token 031469|>
+<|visual token 031470|>
+<|visual token 031471|>
+<|visual token 031472|>
+<|visual token 031473|>
+<|visual token 031474|>
+<|visual token 031475|>
+<|visual token 031476|>
+<|visual token 031477|>
+<|visual token 031478|>
+<|visual token 031479|>
+<|visual token 031480|>
+<|visual token 031481|>
+<|visual token 031482|>
+<|visual token 031483|>
+<|visual token 031484|>
+<|visual token 031485|>
+<|visual token 031486|>
+<|visual token 031487|>
+<|visual token 031488|>
+<|visual token 031489|>
+<|visual token 031490|>
+<|visual token 031491|>
+<|visual token 031492|>
+<|visual token 031493|>
+<|visual token 031494|>
+<|visual token 031495|>
+<|visual token 031496|>
+<|visual token 031497|>
+<|visual token 031498|>
+<|visual token 031499|>
+<|visual token 031500|>
+<|visual token 031501|>
+<|visual token 031502|>
+<|visual token 031503|>
+<|visual token 031504|>
+<|visual token 031505|>
+<|visual token 031506|>
+<|visual token 031507|>
+<|visual token 031508|>
+<|visual token 031509|>
+<|visual token 031510|>
+<|visual token 031511|>
+<|visual token 031512|>
+<|visual token 031513|>
+<|visual token 031514|>
+<|visual token 031515|>
+<|visual token 031516|>
+<|visual token 031517|>
+<|visual token 031518|>
+<|visual token 031519|>
+<|visual token 031520|>
+<|visual token 031521|>
+<|visual token 031522|>
+<|visual token 031523|>
+<|visual token 031524|>
+<|visual token 031525|>
+<|visual token 031526|>
+<|visual token 031527|>
+<|visual token 031528|>
+<|visual token 031529|>
+<|visual token 031530|>
+<|visual token 031531|>
+<|visual token 031532|>
+<|visual token 031533|>
+<|visual token 031534|>
+<|visual token 031535|>
+<|visual token 031536|>
+<|visual token 031537|>
+<|visual token 031538|>
+<|visual token 031539|>
+<|visual token 031540|>
+<|visual token 031541|>
+<|visual token 031542|>
+<|visual token 031543|>
+<|visual token 031544|>
+<|visual token 031545|>
+<|visual token 031546|>
+<|visual token 031547|>
+<|visual token 031548|>
+<|visual token 031549|>
+<|visual token 031550|>
+<|visual token 031551|>
+<|visual token 031552|>
+<|visual token 031553|>
+<|visual token 031554|>
+<|visual token 031555|>
+<|visual token 031556|>
+<|visual token 031557|>
+<|visual token 031558|>
+<|visual token 031559|>
+<|visual token 031560|>
+<|visual token 031561|>
+<|visual token 031562|>
+<|visual token 031563|>
+<|visual token 031564|>
+<|visual token 031565|>
+<|visual token 031566|>
+<|visual token 031567|>
+<|visual token 031568|>
+<|visual token 031569|>
+<|visual token 031570|>
+<|visual token 031571|>
+<|visual token 031572|>
+<|visual token 031573|>
+<|visual token 031574|>
+<|visual token 031575|>
+<|visual token 031576|>
+<|visual token 031577|>
+<|visual token 031578|>
+<|visual token 031579|>
+<|visual token 031580|>
+<|visual token 031581|>
+<|visual token 031582|>
+<|visual token 031583|>
+<|visual token 031584|>
+<|visual token 031585|>
+<|visual token 031586|>
+<|visual token 031587|>
+<|visual token 031588|>
+<|visual token 031589|>
+<|visual token 031590|>
+<|visual token 031591|>
+<|visual token 031592|>
+<|visual token 031593|>
+<|visual token 031594|>
+<|visual token 031595|>
+<|visual token 031596|>
+<|visual token 031597|>
+<|visual token 031598|>
+<|visual token 031599|>
+<|visual token 031600|>
+<|visual token 031601|>
+<|visual token 031602|>
+<|visual token 031603|>
+<|visual token 031604|>
+<|visual token 031605|>
+<|visual token 031606|>
+<|visual token 031607|>
+<|visual token 031608|>
+<|visual token 031609|>
+<|visual token 031610|>
+<|visual token 031611|>
+<|visual token 031612|>
+<|visual token 031613|>
+<|visual token 031614|>
+<|visual token 031615|>
+<|visual token 031616|>
+<|visual token 031617|>
+<|visual token 031618|>
+<|visual token 031619|>
+<|visual token 031620|>
+<|visual token 031621|>
+<|visual token 031622|>
+<|visual token 031623|>
+<|visual token 031624|>
+<|visual token 031625|>
+<|visual token 031626|>
+<|visual token 031627|>
+<|visual token 031628|>
+<|visual token 031629|>
+<|visual token 031630|>
+<|visual token 031631|>
+<|visual token 031632|>
+<|visual token 031633|>
+<|visual token 031634|>
+<|visual token 031635|>
+<|visual token 031636|>
+<|visual token 031637|>
+<|visual token 031638|>
+<|visual token 031639|>
+<|visual token 031640|>
+<|visual token 031641|>
+<|visual token 031642|>
+<|visual token 031643|>
+<|visual token 031644|>
+<|visual token 031645|>
+<|visual token 031646|>
+<|visual token 031647|>
+<|visual token 031648|>
+<|visual token 031649|>
+<|visual token 031650|>
+<|visual token 031651|>
+<|visual token 031652|>
+<|visual token 031653|>
+<|visual token 031654|>
+<|visual token 031655|>
+<|visual token 031656|>
+<|visual token 031657|>
+<|visual token 031658|>
+<|visual token 031659|>
+<|visual token 031660|>
+<|visual token 031661|>
+<|visual token 031662|>
+<|visual token 031663|>
+<|visual token 031664|>
+<|visual token 031665|>
+<|visual token 031666|>
+<|visual token 031667|>
+<|visual token 031668|>
+<|visual token 031669|>
+<|visual token 031670|>
+<|visual token 031671|>
+<|visual token 031672|>
+<|visual token 031673|>
+<|visual token 031674|>
+<|visual token 031675|>
+<|visual token 031676|>
+<|visual token 031677|>
+<|visual token 031678|>
+<|visual token 031679|>
+<|visual token 031680|>
+<|visual token 031681|>
+<|visual token 031682|>
+<|visual token 031683|>
+<|visual token 031684|>
+<|visual token 031685|>
+<|visual token 031686|>
+<|visual token 031687|>
+<|visual token 031688|>
+<|visual token 031689|>
+<|visual token 031690|>
+<|visual token 031691|>
+<|visual token 031692|>
+<|visual token 031693|>
+<|visual token 031694|>
+<|visual token 031695|>
+<|visual token 031696|>
+<|visual token 031697|>
+<|visual token 031698|>
+<|visual token 031699|>
+<|visual token 031700|>
+<|visual token 031701|>
+<|visual token 031702|>
+<|visual token 031703|>
+<|visual token 031704|>
+<|visual token 031705|>
+<|visual token 031706|>
+<|visual token 031707|>
+<|visual token 031708|>
+<|visual token 031709|>
+<|visual token 031710|>
+<|visual token 031711|>
+<|visual token 031712|>
+<|visual token 031713|>
+<|visual token 031714|>
+<|visual token 031715|>
+<|visual token 031716|>
+<|visual token 031717|>
+<|visual token 031718|>
+<|visual token 031719|>
+<|visual token 031720|>
+<|visual token 031721|>
+<|visual token 031722|>
+<|visual token 031723|>
+<|visual token 031724|>
+<|visual token 031725|>
+<|visual token 031726|>
+<|visual token 031727|>
+<|visual token 031728|>
+<|visual token 031729|>
+<|visual token 031730|>
+<|visual token 031731|>
+<|visual token 031732|>
+<|visual token 031733|>
+<|visual token 031734|>
+<|visual token 031735|>
+<|visual token 031736|>
+<|visual token 031737|>
+<|visual token 031738|>
+<|visual token 031739|>
+<|visual token 031740|>
+<|visual token 031741|>
+<|visual token 031742|>
+<|visual token 031743|>
+<|visual token 031744|>
+<|visual token 031745|>
+<|visual token 031746|>
+<|visual token 031747|>
+<|visual token 031748|>
+<|visual token 031749|>
+<|visual token 031750|>
+<|visual token 031751|>
+<|visual token 031752|>
+<|visual token 031753|>
+<|visual token 031754|>
+<|visual token 031755|>
+<|visual token 031756|>
+<|visual token 031757|>
+<|visual token 031758|>
+<|visual token 031759|>
+<|visual token 031760|>
+<|visual token 031761|>
+<|visual token 031762|>
+<|visual token 031763|>
+<|visual token 031764|>
+<|visual token 031765|>
+<|visual token 031766|>
+<|visual token 031767|>
+<|visual token 031768|>
+<|visual token 031769|>
+<|visual token 031770|>
+<|visual token 031771|>
+<|visual token 031772|>
+<|visual token 031773|>
+<|visual token 031774|>
+<|visual token 031775|>
+<|visual token 031776|>
+<|visual token 031777|>
+<|visual token 031778|>
+<|visual token 031779|>
+<|visual token 031780|>
+<|visual token 031781|>
+<|visual token 031782|>
+<|visual token 031783|>
+<|visual token 031784|>
+<|visual token 031785|>
+<|visual token 031786|>
+<|visual token 031787|>
+<|visual token 031788|>
+<|visual token 031789|>
+<|visual token 031790|>
+<|visual token 031791|>
+<|visual token 031792|>
+<|visual token 031793|>
+<|visual token 031794|>
+<|visual token 031795|>
+<|visual token 031796|>
+<|visual token 031797|>
+<|visual token 031798|>
+<|visual token 031799|>
+<|visual token 031800|>
+<|visual token 031801|>
+<|visual token 031802|>
+<|visual token 031803|>
+<|visual token 031804|>
+<|visual token 031805|>
+<|visual token 031806|>
+<|visual token 031807|>
+<|visual token 031808|>
+<|visual token 031809|>
+<|visual token 031810|>
+<|visual token 031811|>
+<|visual token 031812|>
+<|visual token 031813|>
+<|visual token 031814|>
+<|visual token 031815|>
+<|visual token 031816|>
+<|visual token 031817|>
+<|visual token 031818|>
+<|visual token 031819|>
+<|visual token 031820|>
+<|visual token 031821|>
+<|visual token 031822|>
+<|visual token 031823|>
+<|visual token 031824|>
+<|visual token 031825|>
+<|visual token 031826|>
+<|visual token 031827|>
+<|visual token 031828|>
+<|visual token 031829|>
+<|visual token 031830|>
+<|visual token 031831|>
+<|visual token 031832|>
+<|visual token 031833|>
+<|visual token 031834|>
+<|visual token 031835|>
+<|visual token 031836|>
+<|visual token 031837|>
+<|visual token 031838|>
+<|visual token 031839|>
+<|visual token 031840|>
+<|visual token 031841|>
+<|visual token 031842|>
+<|visual token 031843|>
+<|visual token 031844|>
+<|visual token 031845|>
+<|visual token 031846|>
+<|visual token 031847|>
+<|visual token 031848|>
+<|visual token 031849|>
+<|visual token 031850|>
+<|visual token 031851|>
+<|visual token 031852|>
+<|visual token 031853|>
+<|visual token 031854|>
+<|visual token 031855|>
+<|visual token 031856|>
+<|visual token 031857|>
+<|visual token 031858|>
+<|visual token 031859|>
+<|visual token 031860|>
+<|visual token 031861|>
+<|visual token 031862|>
+<|visual token 031863|>
+<|visual token 031864|>
+<|visual token 031865|>
+<|visual token 031866|>
+<|visual token 031867|>
+<|visual token 031868|>
+<|visual token 031869|>
+<|visual token 031870|>
+<|visual token 031871|>
+<|visual token 031872|>
+<|visual token 031873|>
+<|visual token 031874|>
+<|visual token 031875|>
+<|visual token 031876|>
+<|visual token 031877|>
+<|visual token 031878|>
+<|visual token 031879|>
+<|visual token 031880|>
+<|visual token 031881|>
+<|visual token 031882|>
+<|visual token 031883|>
+<|visual token 031884|>
+<|visual token 031885|>
+<|visual token 031886|>
+<|visual token 031887|>
+<|visual token 031888|>
+<|visual token 031889|>
+<|visual token 031890|>
+<|visual token 031891|>
+<|visual token 031892|>
+<|visual token 031893|>
+<|visual token 031894|>
+<|visual token 031895|>
+<|visual token 031896|>
+<|visual token 031897|>
+<|visual token 031898|>
+<|visual token 031899|>
+<|visual token 031900|>
+<|visual token 031901|>
+<|visual token 031902|>
+<|visual token 031903|>
+<|visual token 031904|>
+<|visual token 031905|>
+<|visual token 031906|>
+<|visual token 031907|>
+<|visual token 031908|>
+<|visual token 031909|>
+<|visual token 031910|>
+<|visual token 031911|>
+<|visual token 031912|>
+<|visual token 031913|>
+<|visual token 031914|>
+<|visual token 031915|>
+<|visual token 031916|>
+<|visual token 031917|>
+<|visual token 031918|>
+<|visual token 031919|>
+<|visual token 031920|>
+<|visual token 031921|>
+<|visual token 031922|>
+<|visual token 031923|>
+<|visual token 031924|>
+<|visual token 031925|>
+<|visual token 031926|>
+<|visual token 031927|>
+<|visual token 031928|>
+<|visual token 031929|>
+<|visual token 031930|>
+<|visual token 031931|>
+<|visual token 031932|>
+<|visual token 031933|>
+<|visual token 031934|>
+<|visual token 031935|>
+<|visual token 031936|>
+<|visual token 031937|>
+<|visual token 031938|>
+<|visual token 031939|>
+<|visual token 031940|>
+<|visual token 031941|>
+<|visual token 031942|>
+<|visual token 031943|>
+<|visual token 031944|>
+<|visual token 031945|>
+<|visual token 031946|>
+<|visual token 031947|>
+<|visual token 031948|>
+<|visual token 031949|>
+<|visual token 031950|>
+<|visual token 031951|>
+<|visual token 031952|>
+<|visual token 031953|>
+<|visual token 031954|>
+<|visual token 031955|>
+<|visual token 031956|>
+<|visual token 031957|>
+<|visual token 031958|>
+<|visual token 031959|>
+<|visual token 031960|>
+<|visual token 031961|>
+<|visual token 031962|>
+<|visual token 031963|>
+<|visual token 031964|>
+<|visual token 031965|>
+<|visual token 031966|>
+<|visual token 031967|>
+<|visual token 031968|>
+<|visual token 031969|>
+<|visual token 031970|>
+<|visual token 031971|>
+<|visual token 031972|>
+<|visual token 031973|>
+<|visual token 031974|>
+<|visual token 031975|>
+<|visual token 031976|>
+<|visual token 031977|>
+<|visual token 031978|>
+<|visual token 031979|>
+<|visual token 031980|>
+<|visual token 031981|>
+<|visual token 031982|>
+<|visual token 031983|>
+<|visual token 031984|>
+<|visual token 031985|>
+<|visual token 031986|>
+<|visual token 031987|>
+<|visual token 031988|>
+<|visual token 031989|>
+<|visual token 031990|>
+<|visual token 031991|>
+<|visual token 031992|>
+<|visual token 031993|>
+<|visual token 031994|>
+<|visual token 031995|>
+<|visual token 031996|>
+<|visual token 031997|>
+<|visual token 031998|>
+<|visual token 031999|>
+<|visual token 032000|>
+<|visual token 032001|>
+<|visual token 032002|>
+<|visual token 032003|>
+<|visual token 032004|>
+<|visual token 032005|>
+<|visual token 032006|>
+<|visual token 032007|>
+<|visual token 032008|>
+<|visual token 032009|>
+<|visual token 032010|>
+<|visual token 032011|>
+<|visual token 032012|>
+<|visual token 032013|>
+<|visual token 032014|>
+<|visual token 032015|>
+<|visual token 032016|>
+<|visual token 032017|>
+<|visual token 032018|>
+<|visual token 032019|>
+<|visual token 032020|>
+<|visual token 032021|>
+<|visual token 032022|>
+<|visual token 032023|>
+<|visual token 032024|>
+<|visual token 032025|>
+<|visual token 032026|>
+<|visual token 032027|>
+<|visual token 032028|>
+<|visual token 032029|>
+<|visual token 032030|>
+<|visual token 032031|>
+<|visual token 032032|>
+<|visual token 032033|>
+<|visual token 032034|>
+<|visual token 032035|>
+<|visual token 032036|>
+<|visual token 032037|>
+<|visual token 032038|>
+<|visual token 032039|>
+<|visual token 032040|>
+<|visual token 032041|>
+<|visual token 032042|>
+<|visual token 032043|>
+<|visual token 032044|>
+<|visual token 032045|>
+<|visual token 032046|>
+<|visual token 032047|>
+<|visual token 032048|>
+<|visual token 032049|>
+<|visual token 032050|>
+<|visual token 032051|>
+<|visual token 032052|>
+<|visual token 032053|>
+<|visual token 032054|>
+<|visual token 032055|>
+<|visual token 032056|>
+<|visual token 032057|>
+<|visual token 032058|>
+<|visual token 032059|>
+<|visual token 032060|>
+<|visual token 032061|>
+<|visual token 032062|>
+<|visual token 032063|>
+<|visual token 032064|>
+<|visual token 032065|>
+<|visual token 032066|>
+<|visual token 032067|>
+<|visual token 032068|>
+<|visual token 032069|>
+<|visual token 032070|>
+<|visual token 032071|>
+<|visual token 032072|>
+<|visual token 032073|>
+<|visual token 032074|>
+<|visual token 032075|>
+<|visual token 032076|>
+<|visual token 032077|>
+<|visual token 032078|>
+<|visual token 032079|>
+<|visual token 032080|>
+<|visual token 032081|>
+<|visual token 032082|>
+<|visual token 032083|>
+<|visual token 032084|>
+<|visual token 032085|>
+<|visual token 032086|>
+<|visual token 032087|>
+<|visual token 032088|>
+<|visual token 032089|>
+<|visual token 032090|>
+<|visual token 032091|>
+<|visual token 032092|>
+<|visual token 032093|>
+<|visual token 032094|>
+<|visual token 032095|>
+<|visual token 032096|>
+<|visual token 032097|>
+<|visual token 032098|>
+<|visual token 032099|>
+<|visual token 032100|>
+<|visual token 032101|>
+<|visual token 032102|>
+<|visual token 032103|>
+<|visual token 032104|>
+<|visual token 032105|>
+<|visual token 032106|>
+<|visual token 032107|>
+<|visual token 032108|>
+<|visual token 032109|>
+<|visual token 032110|>
+<|visual token 032111|>
+<|visual token 032112|>
+<|visual token 032113|>
+<|visual token 032114|>
+<|visual token 032115|>
+<|visual token 032116|>
+<|visual token 032117|>
+<|visual token 032118|>
+<|visual token 032119|>
+<|visual token 032120|>
+<|visual token 032121|>
+<|visual token 032122|>
+<|visual token 032123|>
+<|visual token 032124|>
+<|visual token 032125|>
+<|visual token 032126|>
+<|visual token 032127|>
+<|visual token 032128|>
+<|visual token 032129|>
+<|visual token 032130|>
+<|visual token 032131|>
+<|visual token 032132|>
+<|visual token 032133|>
+<|visual token 032134|>
+<|visual token 032135|>
+<|visual token 032136|>
+<|visual token 032137|>
+<|visual token 032138|>
+<|visual token 032139|>
+<|visual token 032140|>
+<|visual token 032141|>
+<|visual token 032142|>
+<|visual token 032143|>
+<|visual token 032144|>
+<|visual token 032145|>
+<|visual token 032146|>
+<|visual token 032147|>
+<|visual token 032148|>
+<|visual token 032149|>
+<|visual token 032150|>
+<|visual token 032151|>
+<|visual token 032152|>
+<|visual token 032153|>
+<|visual token 032154|>
+<|visual token 032155|>
+<|visual token 032156|>
+<|visual token 032157|>
+<|visual token 032158|>
+<|visual token 032159|>
+<|visual token 032160|>
+<|visual token 032161|>
+<|visual token 032162|>
+<|visual token 032163|>
+<|visual token 032164|>
+<|visual token 032165|>
+<|visual token 032166|>
+<|visual token 032167|>
+<|visual token 032168|>
+<|visual token 032169|>
+<|visual token 032170|>
+<|visual token 032171|>
+<|visual token 032172|>
+<|visual token 032173|>
+<|visual token 032174|>
+<|visual token 032175|>
+<|visual token 032176|>
+<|visual token 032177|>
+<|visual token 032178|>
+<|visual token 032179|>
+<|visual token 032180|>
+<|visual token 032181|>
+<|visual token 032182|>
+<|visual token 032183|>
+<|visual token 032184|>
+<|visual token 032185|>
+<|visual token 032186|>
+<|visual token 032187|>
+<|visual token 032188|>
+<|visual token 032189|>
+<|visual token 032190|>
+<|visual token 032191|>
+<|visual token 032192|>
+<|visual token 032193|>
+<|visual token 032194|>
+<|visual token 032195|>
+<|visual token 032196|>
+<|visual token 032197|>
+<|visual token 032198|>
+<|visual token 032199|>
+<|visual token 032200|>
+<|visual token 032201|>
+<|visual token 032202|>
+<|visual token 032203|>
+<|visual token 032204|>
+<|visual token 032205|>
+<|visual token 032206|>
+<|visual token 032207|>
+<|visual token 032208|>
+<|visual token 032209|>
+<|visual token 032210|>
+<|visual token 032211|>
+<|visual token 032212|>
+<|visual token 032213|>
+<|visual token 032214|>
+<|visual token 032215|>
+<|visual token 032216|>
+<|visual token 032217|>
+<|visual token 032218|>
+<|visual token 032219|>
+<|visual token 032220|>
+<|visual token 032221|>
+<|visual token 032222|>
+<|visual token 032223|>
+<|visual token 032224|>
+<|visual token 032225|>
+<|visual token 032226|>
+<|visual token 032227|>
+<|visual token 032228|>
+<|visual token 032229|>
+<|visual token 032230|>
+<|visual token 032231|>
+<|visual token 032232|>
+<|visual token 032233|>
+<|visual token 032234|>
+<|visual token 032235|>
+<|visual token 032236|>
+<|visual token 032237|>
+<|visual token 032238|>
+<|visual token 032239|>
+<|visual token 032240|>
+<|visual token 032241|>
+<|visual token 032242|>
+<|visual token 032243|>
+<|visual token 032244|>
+<|visual token 032245|>
+<|visual token 032246|>
+<|visual token 032247|>
+<|visual token 032248|>
+<|visual token 032249|>
+<|visual token 032250|>
+<|visual token 032251|>
+<|visual token 032252|>
+<|visual token 032253|>
+<|visual token 032254|>
+<|visual token 032255|>
+<|visual token 032256|>
+<|visual token 032257|>
+<|visual token 032258|>
+<|visual token 032259|>
+<|visual token 032260|>
+<|visual token 032261|>
+<|visual token 032262|>
+<|visual token 032263|>
+<|visual token 032264|>
+<|visual token 032265|>
+<|visual token 032266|>
+<|visual token 032267|>
+<|visual token 032268|>
+<|visual token 032269|>
+<|visual token 032270|>
+<|visual token 032271|>
+<|visual token 032272|>
+<|visual token 032273|>
+<|visual token 032274|>
+<|visual token 032275|>
+<|visual token 032276|>
+<|visual token 032277|>
+<|visual token 032278|>
+<|visual token 032279|>
+<|visual token 032280|>
+<|visual token 032281|>
+<|visual token 032282|>
+<|visual token 032283|>
+<|visual token 032284|>
+<|visual token 032285|>
+<|visual token 032286|>
+<|visual token 032287|>
+<|visual token 032288|>
+<|visual token 032289|>
+<|visual token 032290|>
+<|visual token 032291|>
+<|visual token 032292|>
+<|visual token 032293|>
+<|visual token 032294|>
+<|visual token 032295|>
+<|visual token 032296|>
+<|visual token 032297|>
+<|visual token 032298|>
+<|visual token 032299|>
+<|visual token 032300|>
+<|visual token 032301|>
+<|visual token 032302|>
+<|visual token 032303|>
+<|visual token 032304|>
+<|visual token 032305|>
+<|visual token 032306|>
+<|visual token 032307|>
+<|visual token 032308|>
+<|visual token 032309|>
+<|visual token 032310|>
+<|visual token 032311|>
+<|visual token 032312|>
+<|visual token 032313|>
+<|visual token 032314|>
+<|visual token 032315|>
+<|visual token 032316|>
+<|visual token 032317|>
+<|visual token 032318|>
+<|visual token 032319|>
+<|visual token 032320|>
+<|visual token 032321|>
+<|visual token 032322|>
+<|visual token 032323|>
+<|visual token 032324|>
+<|visual token 032325|>
+<|visual token 032326|>
+<|visual token 032327|>
+<|visual token 032328|>
+<|visual token 032329|>
+<|visual token 032330|>
+<|visual token 032331|>
+<|visual token 032332|>
+<|visual token 032333|>
+<|visual token 032334|>
+<|visual token 032335|>
+<|visual token 032336|>
+<|visual token 032337|>
+<|visual token 032338|>
+<|visual token 032339|>
+<|visual token 032340|>
+<|visual token 032341|>
+<|visual token 032342|>
+<|visual token 032343|>
+<|visual token 032344|>
+<|visual token 032345|>
+<|visual token 032346|>
+<|visual token 032347|>
+<|visual token 032348|>
+<|visual token 032349|>
+<|visual token 032350|>
+<|visual token 032351|>
+<|visual token 032352|>
+<|visual token 032353|>
+<|visual token 032354|>
+<|visual token 032355|>
+<|visual token 032356|>
+<|visual token 032357|>
+<|visual token 032358|>
+<|visual token 032359|>
+<|visual token 032360|>
+<|visual token 032361|>
+<|visual token 032362|>
+<|visual token 032363|>
+<|visual token 032364|>
+<|visual token 032365|>
+<|visual token 032366|>
+<|visual token 032367|>
+<|visual token 032368|>
+<|visual token 032369|>
+<|visual token 032370|>
+<|visual token 032371|>
+<|visual token 032372|>
+<|visual token 032373|>
+<|visual token 032374|>
+<|visual token 032375|>
+<|visual token 032376|>
+<|visual token 032377|>
+<|visual token 032378|>
+<|visual token 032379|>
+<|visual token 032380|>
+<|visual token 032381|>
+<|visual token 032382|>
+<|visual token 032383|>
+<|visual token 032384|>
+<|visual token 032385|>
+<|visual token 032386|>
+<|visual token 032387|>
+<|visual token 032388|>
+<|visual token 032389|>
+<|visual token 032390|>
+<|visual token 032391|>
+<|visual token 032392|>
+<|visual token 032393|>
+<|visual token 032394|>
+<|visual token 032395|>
+<|visual token 032396|>
+<|visual token 032397|>
+<|visual token 032398|>
+<|visual token 032399|>
+<|visual token 032400|>
+<|visual token 032401|>
+<|visual token 032402|>
+<|visual token 032403|>
+<|visual token 032404|>
+<|visual token 032405|>
+<|visual token 032406|>
+<|visual token 032407|>
+<|visual token 032408|>
+<|visual token 032409|>
+<|visual token 032410|>
+<|visual token 032411|>
+<|visual token 032412|>
+<|visual token 032413|>
+<|visual token 032414|>
+<|visual token 032415|>
+<|visual token 032416|>
+<|visual token 032417|>
+<|visual token 032418|>
+<|visual token 032419|>
+<|visual token 032420|>
+<|visual token 032421|>
+<|visual token 032422|>
+<|visual token 032423|>
+<|visual token 032424|>
+<|visual token 032425|>
+<|visual token 032426|>
+<|visual token 032427|>
+<|visual token 032428|>
+<|visual token 032429|>
+<|visual token 032430|>
+<|visual token 032431|>
+<|visual token 032432|>
+<|visual token 032433|>
+<|visual token 032434|>
+<|visual token 032435|>
+<|visual token 032436|>
+<|visual token 032437|>
+<|visual token 032438|>
+<|visual token 032439|>
+<|visual token 032440|>
+<|visual token 032441|>
+<|visual token 032442|>
+<|visual token 032443|>
+<|visual token 032444|>
+<|visual token 032445|>
+<|visual token 032446|>
+<|visual token 032447|>
+<|visual token 032448|>
+<|visual token 032449|>
+<|visual token 032450|>
+<|visual token 032451|>
+<|visual token 032452|>
+<|visual token 032453|>
+<|visual token 032454|>
+<|visual token 032455|>
+<|visual token 032456|>
+<|visual token 032457|>
+<|visual token 032458|>
+<|visual token 032459|>
+<|visual token 032460|>
+<|visual token 032461|>
+<|visual token 032462|>
+<|visual token 032463|>
+<|visual token 032464|>
+<|visual token 032465|>
+<|visual token 032466|>
+<|visual token 032467|>
+<|visual token 032468|>
+<|visual token 032469|>
+<|visual token 032470|>
+<|visual token 032471|>
+<|visual token 032472|>
+<|visual token 032473|>
+<|visual token 032474|>
+<|visual token 032475|>
+<|visual token 032476|>
+<|visual token 032477|>
+<|visual token 032478|>
+<|visual token 032479|>
+<|visual token 032480|>
+<|visual token 032481|>
+<|visual token 032482|>
+<|visual token 032483|>
+<|visual token 032484|>
+<|visual token 032485|>
+<|visual token 032486|>
+<|visual token 032487|>
+<|visual token 032488|>
+<|visual token 032489|>
+<|visual token 032490|>
+<|visual token 032491|>
+<|visual token 032492|>
+<|visual token 032493|>
+<|visual token 032494|>
+<|visual token 032495|>
+<|visual token 032496|>
+<|visual token 032497|>
+<|visual token 032498|>
+<|visual token 032499|>
+<|visual token 032500|>
+<|visual token 032501|>
+<|visual token 032502|>
+<|visual token 032503|>
+<|visual token 032504|>
+<|visual token 032505|>
+<|visual token 032506|>
+<|visual token 032507|>
+<|visual token 032508|>
+<|visual token 032509|>
+<|visual token 032510|>
+<|visual token 032511|>
+<|visual token 032512|>
+<|visual token 032513|>
+<|visual token 032514|>
+<|visual token 032515|>
+<|visual token 032516|>
+<|visual token 032517|>
+<|visual token 032518|>
+<|visual token 032519|>
+<|visual token 032520|>
+<|visual token 032521|>
+<|visual token 032522|>
+<|visual token 032523|>
+<|visual token 032524|>
+<|visual token 032525|>
+<|visual token 032526|>
+<|visual token 032527|>
+<|visual token 032528|>
+<|visual token 032529|>
+<|visual token 032530|>
+<|visual token 032531|>
+<|visual token 032532|>
+<|visual token 032533|>
+<|visual token 032534|>
+<|visual token 032535|>
+<|visual token 032536|>
+<|visual token 032537|>
+<|visual token 032538|>
+<|visual token 032539|>
+<|visual token 032540|>
+<|visual token 032541|>
+<|visual token 032542|>
+<|visual token 032543|>
+<|visual token 032544|>
+<|visual token 032545|>
+<|visual token 032546|>
+<|visual token 032547|>
+<|visual token 032548|>
+<|visual token 032549|>
+<|visual token 032550|>
+<|visual token 032551|>
+<|visual token 032552|>
+<|visual token 032553|>
+<|visual token 032554|>
+<|visual token 032555|>
+<|visual token 032556|>
+<|visual token 032557|>
+<|visual token 032558|>
+<|visual token 032559|>
+<|visual token 032560|>
+<|visual token 032561|>
+<|visual token 032562|>
+<|visual token 032563|>
+<|visual token 032564|>
+<|visual token 032565|>
+<|visual token 032566|>
+<|visual token 032567|>
+<|visual token 032568|>
+<|visual token 032569|>
+<|visual token 032570|>
+<|visual token 032571|>
+<|visual token 032572|>
+<|visual token 032573|>
+<|visual token 032574|>
+<|visual token 032575|>
+<|visual token 032576|>
+<|visual token 032577|>
+<|visual token 032578|>
+<|visual token 032579|>
+<|visual token 032580|>
+<|visual token 032581|>
+<|visual token 032582|>
+<|visual token 032583|>
+<|visual token 032584|>
+<|visual token 032585|>
+<|visual token 032586|>
+<|visual token 032587|>
+<|visual token 032588|>
+<|visual token 032589|>
+<|visual token 032590|>
+<|visual token 032591|>
+<|visual token 032592|>
+<|visual token 032593|>
+<|visual token 032594|>
+<|visual token 032595|>
+<|visual token 032596|>
+<|visual token 032597|>
+<|visual token 032598|>
+<|visual token 032599|>
+<|visual token 032600|>
+<|visual token 032601|>
+<|visual token 032602|>
+<|visual token 032603|>
+<|visual token 032604|>
+<|visual token 032605|>
+<|visual token 032606|>
+<|visual token 032607|>
+<|visual token 032608|>
+<|visual token 032609|>
+<|visual token 032610|>
+<|visual token 032611|>
+<|visual token 032612|>
+<|visual token 032613|>
+<|visual token 032614|>
+<|visual token 032615|>
+<|visual token 032616|>
+<|visual token 032617|>
+<|visual token 032618|>
+<|visual token 032619|>
+<|visual token 032620|>
+<|visual token 032621|>
+<|visual token 032622|>
+<|visual token 032623|>
+<|visual token 032624|>
+<|visual token 032625|>
+<|visual token 032626|>
+<|visual token 032627|>
+<|visual token 032628|>
+<|visual token 032629|>
+<|visual token 032630|>
+<|visual token 032631|>
+<|visual token 032632|>
+<|visual token 032633|>
+<|visual token 032634|>
+<|visual token 032635|>
+<|visual token 032636|>
+<|visual token 032637|>
+<|visual token 032638|>
+<|visual token 032639|>
+<|visual token 032640|>
+<|visual token 032641|>
+<|visual token 032642|>
+<|visual token 032643|>
+<|visual token 032644|>
+<|visual token 032645|>
+<|visual token 032646|>
+<|visual token 032647|>
+<|visual token 032648|>
+<|visual token 032649|>
+<|visual token 032650|>
+<|visual token 032651|>
+<|visual token 032652|>
+<|visual token 032653|>
+<|visual token 032654|>
+<|visual token 032655|>
+<|visual token 032656|>
+<|visual token 032657|>
+<|visual token 032658|>
+<|visual token 032659|>
+<|visual token 032660|>
+<|visual token 032661|>
+<|visual token 032662|>
+<|visual token 032663|>
+<|visual token 032664|>
+<|visual token 032665|>
+<|visual token 032666|>
+<|visual token 032667|>
+<|visual token 032668|>
+<|visual token 032669|>
+<|visual token 032670|>
+<|visual token 032671|>
+<|visual token 032672|>
+<|visual token 032673|>
+<|visual token 032674|>
+<|visual token 032675|>
+<|visual token 032676|>
+<|visual token 032677|>
+<|visual token 032678|>
+<|visual token 032679|>
+<|visual token 032680|>
+<|visual token 032681|>
+<|visual token 032682|>
+<|visual token 032683|>
+<|visual token 032684|>
+<|visual token 032685|>
+<|visual token 032686|>
+<|visual token 032687|>
+<|visual token 032688|>
+<|visual token 032689|>
+<|visual token 032690|>
+<|visual token 032691|>
+<|visual token 032692|>
+<|visual token 032693|>
+<|visual token 032694|>
+<|visual token 032695|>
+<|visual token 032696|>
+<|visual token 032697|>
+<|visual token 032698|>
+<|visual token 032699|>
+<|visual token 032700|>
+<|visual token 032701|>
+<|visual token 032702|>
+<|visual token 032703|>
+<|visual token 032704|>
+<|visual token 032705|>
+<|visual token 032706|>
+<|visual token 032707|>
+<|visual token 032708|>
+<|visual token 032709|>
+<|visual token 032710|>
+<|visual token 032711|>
+<|visual token 032712|>
+<|visual token 032713|>
+<|visual token 032714|>
+<|visual token 032715|>
+<|visual token 032716|>
+<|visual token 032717|>
+<|visual token 032718|>
+<|visual token 032719|>
+<|visual token 032720|>
+<|visual token 032721|>
+<|visual token 032722|>
+<|visual token 032723|>
+<|visual token 032724|>
+<|visual token 032725|>
+<|visual token 032726|>
+<|visual token 032727|>
+<|visual token 032728|>
+<|visual token 032729|>
+<|visual token 032730|>
+<|visual token 032731|>
+<|visual token 032732|>
+<|visual token 032733|>
+<|visual token 032734|>
+<|visual token 032735|>
+<|visual token 032736|>
+<|visual token 032737|>
+<|visual token 032738|>
+<|visual token 032739|>
+<|visual token 032740|>
+<|visual token 032741|>
+<|visual token 032742|>
+<|visual token 032743|>
+<|visual token 032744|>
+<|visual token 032745|>
+<|visual token 032746|>
+<|visual token 032747|>
+<|visual token 032748|>
+<|visual token 032749|>
+<|visual token 032750|>
+<|visual token 032751|>
+<|visual token 032752|>
+<|visual token 032753|>
+<|visual token 032754|>
+<|visual token 032755|>
+<|visual token 032756|>
+<|visual token 032757|>
+<|visual token 032758|>
+<|visual token 032759|>
+<|visual token 032760|>
+<|visual token 032761|>
+<|visual token 032762|>
+<|visual token 032763|>
+<|visual token 032764|>
+<|visual token 032765|>
+<|visual token 032766|>
+<|visual token 032767|>
+<|visual token 032768|>
+<|visual token 032769|>
+<|visual token 032770|>
+<|visual token 032771|>
+<|visual token 032772|>
+<|visual token 032773|>
+<|visual token 032774|>
+<|visual token 032775|>
+<|visual token 032776|>
+<|visual token 032777|>
+<|visual token 032778|>
+<|visual token 032779|>
+<|visual token 032780|>
+<|visual token 032781|>
+<|visual token 032782|>
+<|visual token 032783|>
+<|visual token 032784|>
+<|visual token 032785|>
+<|visual token 032786|>
+<|visual token 032787|>
+<|visual token 032788|>
+<|visual token 032789|>
+<|visual token 032790|>
+<|visual token 032791|>
+<|visual token 032792|>
+<|visual token 032793|>
+<|visual token 032794|>
+<|visual token 032795|>
+<|visual token 032796|>
+<|visual token 032797|>
+<|visual token 032798|>
+<|visual token 032799|>
+<|visual token 032800|>
+<|visual token 032801|>
+<|visual token 032802|>
+<|visual token 032803|>
+<|visual token 032804|>
+<|visual token 032805|>
+<|visual token 032806|>
+<|visual token 032807|>
+<|visual token 032808|>
+<|visual token 032809|>
+<|visual token 032810|>
+<|visual token 032811|>
+<|visual token 032812|>
+<|visual token 032813|>
+<|visual token 032814|>
+<|visual token 032815|>
+<|visual token 032816|>
+<|visual token 032817|>
+<|visual token 032818|>
+<|visual token 032819|>
+<|visual token 032820|>
+<|visual token 032821|>
+<|visual token 032822|>
+<|visual token 032823|>
+<|visual token 032824|>
+<|visual token 032825|>
+<|visual token 032826|>
+<|visual token 032827|>
+<|visual token 032828|>
+<|visual token 032829|>
+<|visual token 032830|>
+<|visual token 032831|>
+<|visual token 032832|>
+<|visual token 032833|>
+<|visual token 032834|>
+<|visual token 032835|>
+<|visual token 032836|>
+<|visual token 032837|>
+<|visual token 032838|>
+<|visual token 032839|>
+<|visual token 032840|>
+<|visual token 032841|>
+<|visual token 032842|>
+<|visual token 032843|>
+<|visual token 032844|>
+<|visual token 032845|>
+<|visual token 032846|>
+<|visual token 032847|>
+<|visual token 032848|>
+<|visual token 032849|>
+<|visual token 032850|>
+<|visual token 032851|>
+<|visual token 032852|>
+<|visual token 032853|>
+<|visual token 032854|>
+<|visual token 032855|>
+<|visual token 032856|>
+<|visual token 032857|>
+<|visual token 032858|>
+<|visual token 032859|>
+<|visual token 032860|>
+<|visual token 032861|>
+<|visual token 032862|>
+<|visual token 032863|>
+<|visual token 032864|>
+<|visual token 032865|>
+<|visual token 032866|>
+<|visual token 032867|>
+<|visual token 032868|>
+<|visual token 032869|>
+<|visual token 032870|>
+<|visual token 032871|>
+<|visual token 032872|>
+<|visual token 032873|>
+<|visual token 032874|>
+<|visual token 032875|>
+<|visual token 032876|>
+<|visual token 032877|>
+<|visual token 032878|>
+<|visual token 032879|>
+<|visual token 032880|>
+<|visual token 032881|>
+<|visual token 032882|>
+<|visual token 032883|>
+<|visual token 032884|>
+<|visual token 032885|>
+<|visual token 032886|>
+<|visual token 032887|>
+<|visual token 032888|>
+<|visual token 032889|>
+<|visual token 032890|>
+<|visual token 032891|>
+<|visual token 032892|>
+<|visual token 032893|>
+<|visual token 032894|>
+<|visual token 032895|>
+<|visual token 032896|>
+<|visual token 032897|>
+<|visual token 032898|>
+<|visual token 032899|>
+<|visual token 032900|>
+<|visual token 032901|>
+<|visual token 032902|>
+<|visual token 032903|>
+<|visual token 032904|>
+<|visual token 032905|>
+<|visual token 032906|>
+<|visual token 032907|>
+<|visual token 032908|>
+<|visual token 032909|>
+<|visual token 032910|>
+<|visual token 032911|>
+<|visual token 032912|>
+<|visual token 032913|>
+<|visual token 032914|>
+<|visual token 032915|>
+<|visual token 032916|>
+<|visual token 032917|>
+<|visual token 032918|>
+<|visual token 032919|>
+<|visual token 032920|>
+<|visual token 032921|>
+<|visual token 032922|>
+<|visual token 032923|>
+<|visual token 032924|>
+<|visual token 032925|>
+<|visual token 032926|>
+<|visual token 032927|>
+<|visual token 032928|>
+<|visual token 032929|>
+<|visual token 032930|>
+<|visual token 032931|>
+<|visual token 032932|>
+<|visual token 032933|>
+<|visual token 032934|>
+<|visual token 032935|>
+<|visual token 032936|>
+<|visual token 032937|>
+<|visual token 032938|>
+<|visual token 032939|>
+<|visual token 032940|>
+<|visual token 032941|>
+<|visual token 032942|>
+<|visual token 032943|>
+<|visual token 032944|>
+<|visual token 032945|>
+<|visual token 032946|>
+<|visual token 032947|>
+<|visual token 032948|>
+<|visual token 032949|>
+<|visual token 032950|>
+<|visual token 032951|>
+<|visual token 032952|>
+<|visual token 032953|>
+<|visual token 032954|>
+<|visual token 032955|>
+<|visual token 032956|>
+<|visual token 032957|>
+<|visual token 032958|>
+<|visual token 032959|>
+<|visual token 032960|>
+<|visual token 032961|>
+<|visual token 032962|>
+<|visual token 032963|>
+<|visual token 032964|>
+<|visual token 032965|>
+<|visual token 032966|>
+<|visual token 032967|>
+<|visual token 032968|>
+<|visual token 032969|>
+<|visual token 032970|>
+<|visual token 032971|>
+<|visual token 032972|>
+<|visual token 032973|>
+<|visual token 032974|>
+<|visual token 032975|>
+<|visual token 032976|>
+<|visual token 032977|>
+<|visual token 032978|>
+<|visual token 032979|>
+<|visual token 032980|>
+<|visual token 032981|>
+<|visual token 032982|>
+<|visual token 032983|>
+<|visual token 032984|>
+<|visual token 032985|>
+<|visual token 032986|>
+<|visual token 032987|>
+<|visual token 032988|>
+<|visual token 032989|>
+<|visual token 032990|>
+<|visual token 032991|>
+<|visual token 032992|>
+<|visual token 032993|>
+<|visual token 032994|>
+<|visual token 032995|>
+<|visual token 032996|>
+<|visual token 032997|>
+<|visual token 032998|>
+<|visual token 032999|>
+<|visual token 033000|>
+<|visual token 033001|>
+<|visual token 033002|>
+<|visual token 033003|>
+<|visual token 033004|>
+<|visual token 033005|>
+<|visual token 033006|>
+<|visual token 033007|>
+<|visual token 033008|>
+<|visual token 033009|>
+<|visual token 033010|>
+<|visual token 033011|>
+<|visual token 033012|>
+<|visual token 033013|>
+<|visual token 033014|>
+<|visual token 033015|>
+<|visual token 033016|>
+<|visual token 033017|>
+<|visual token 033018|>
+<|visual token 033019|>
+<|visual token 033020|>
+<|visual token 033021|>
+<|visual token 033022|>
+<|visual token 033023|>
+<|visual token 033024|>
+<|visual token 033025|>
+<|visual token 033026|>
+<|visual token 033027|>
+<|visual token 033028|>
+<|visual token 033029|>
+<|visual token 033030|>
+<|visual token 033031|>
+<|visual token 033032|>
+<|visual token 033033|>
+<|visual token 033034|>
+<|visual token 033035|>
+<|visual token 033036|>
+<|visual token 033037|>
+<|visual token 033038|>
+<|visual token 033039|>
+<|visual token 033040|>
+<|visual token 033041|>
+<|visual token 033042|>
+<|visual token 033043|>
+<|visual token 033044|>
+<|visual token 033045|>
+<|visual token 033046|>
+<|visual token 033047|>
+<|visual token 033048|>
+<|visual token 033049|>
+<|visual token 033050|>
+<|visual token 033051|>
+<|visual token 033052|>
+<|visual token 033053|>
+<|visual token 033054|>
+<|visual token 033055|>
+<|visual token 033056|>
+<|visual token 033057|>
+<|visual token 033058|>
+<|visual token 033059|>
+<|visual token 033060|>
+<|visual token 033061|>
+<|visual token 033062|>
+<|visual token 033063|>
+<|visual token 033064|>
+<|visual token 033065|>
+<|visual token 033066|>
+<|visual token 033067|>
+<|visual token 033068|>
+<|visual token 033069|>
+<|visual token 033070|>
+<|visual token 033071|>
+<|visual token 033072|>
+<|visual token 033073|>
+<|visual token 033074|>
+<|visual token 033075|>
+<|visual token 033076|>
+<|visual token 033077|>
+<|visual token 033078|>
+<|visual token 033079|>
+<|visual token 033080|>
+<|visual token 033081|>
+<|visual token 033082|>
+<|visual token 033083|>
+<|visual token 033084|>
+<|visual token 033085|>
+<|visual token 033086|>
+<|visual token 033087|>
+<|visual token 033088|>
+<|visual token 033089|>
+<|visual token 033090|>
+<|visual token 033091|>
+<|visual token 033092|>
+<|visual token 033093|>
+<|visual token 033094|>
+<|visual token 033095|>
+<|visual token 033096|>
+<|visual token 033097|>
+<|visual token 033098|>
+<|visual token 033099|>
+<|visual token 033100|>
+<|visual token 033101|>
+<|visual token 033102|>
+<|visual token 033103|>
+<|visual token 033104|>
+<|visual token 033105|>
+<|visual token 033106|>
+<|visual token 033107|>
+<|visual token 033108|>
+<|visual token 033109|>
+<|visual token 033110|>
+<|visual token 033111|>
+<|visual token 033112|>
+<|visual token 033113|>
+<|visual token 033114|>
+<|visual token 033115|>
+<|visual token 033116|>
+<|visual token 033117|>
+<|visual token 033118|>
+<|visual token 033119|>
+<|visual token 033120|>
+<|visual token 033121|>
+<|visual token 033122|>
+<|visual token 033123|>
+<|visual token 033124|>
+<|visual token 033125|>
+<|visual token 033126|>
+<|visual token 033127|>
+<|visual token 033128|>
+<|visual token 033129|>
+<|visual token 033130|>
+<|visual token 033131|>
+<|visual token 033132|>
+<|visual token 033133|>
+<|visual token 033134|>
+<|visual token 033135|>
+<|visual token 033136|>
+<|visual token 033137|>
+<|visual token 033138|>
+<|visual token 033139|>
+<|visual token 033140|>
+<|visual token 033141|>
+<|visual token 033142|>
+<|visual token 033143|>
+<|visual token 033144|>
+<|visual token 033145|>
+<|visual token 033146|>
+<|visual token 033147|>
+<|visual token 033148|>
+<|visual token 033149|>
+<|visual token 033150|>
+<|visual token 033151|>
+<|visual token 033152|>
+<|visual token 033153|>
+<|visual token 033154|>
+<|visual token 033155|>
+<|visual token 033156|>
+<|visual token 033157|>
+<|visual token 033158|>
+<|visual token 033159|>
+<|visual token 033160|>
+<|visual token 033161|>
+<|visual token 033162|>
+<|visual token 033163|>
+<|visual token 033164|>
+<|visual token 033165|>
+<|visual token 033166|>
+<|visual token 033167|>
+<|visual token 033168|>
+<|visual token 033169|>
+<|visual token 033170|>
+<|visual token 033171|>
+<|visual token 033172|>
+<|visual token 033173|>
+<|visual token 033174|>
+<|visual token 033175|>
+<|visual token 033176|>
+<|visual token 033177|>
+<|visual token 033178|>
+<|visual token 033179|>
+<|visual token 033180|>
+<|visual token 033181|>
+<|visual token 033182|>
+<|visual token 033183|>
+<|visual token 033184|>
+<|visual token 033185|>
+<|visual token 033186|>
+<|visual token 033187|>
+<|visual token 033188|>
+<|visual token 033189|>
+<|visual token 033190|>
+<|visual token 033191|>
+<|visual token 033192|>
+<|visual token 033193|>
+<|visual token 033194|>
+<|visual token 033195|>
+<|visual token 033196|>
+<|visual token 033197|>
+<|visual token 033198|>
+<|visual token 033199|>
+<|visual token 033200|>
+<|visual token 033201|>
+<|visual token 033202|>
+<|visual token 033203|>
+<|visual token 033204|>
+<|visual token 033205|>
+<|visual token 033206|>
+<|visual token 033207|>
+<|visual token 033208|>
+<|visual token 033209|>
+<|visual token 033210|>
+<|visual token 033211|>
+<|visual token 033212|>
+<|visual token 033213|>
+<|visual token 033214|>
+<|visual token 033215|>
+<|visual token 033216|>
+<|visual token 033217|>
+<|visual token 033218|>
+<|visual token 033219|>
+<|visual token 033220|>
+<|visual token 033221|>
+<|visual token 033222|>
+<|visual token 033223|>
+<|visual token 033224|>
+<|visual token 033225|>
+<|visual token 033226|>
+<|visual token 033227|>
+<|visual token 033228|>
+<|visual token 033229|>
+<|visual token 033230|>
+<|visual token 033231|>
+<|visual token 033232|>
+<|visual token 033233|>
+<|visual token 033234|>
+<|visual token 033235|>
+<|visual token 033236|>
+<|visual token 033237|>
+<|visual token 033238|>
+<|visual token 033239|>
+<|visual token 033240|>
+<|visual token 033241|>
+<|visual token 033242|>
+<|visual token 033243|>
+<|visual token 033244|>
+<|visual token 033245|>
+<|visual token 033246|>
+<|visual token 033247|>
+<|visual token 033248|>
+<|visual token 033249|>
+<|visual token 033250|>
+<|visual token 033251|>
+<|visual token 033252|>
+<|visual token 033253|>
+<|visual token 033254|>
+<|visual token 033255|>
+<|visual token 033256|>
+<|visual token 033257|>
+<|visual token 033258|>
+<|visual token 033259|>
+<|visual token 033260|>
+<|visual token 033261|>
+<|visual token 033262|>
+<|visual token 033263|>
+<|visual token 033264|>
+<|visual token 033265|>
+<|visual token 033266|>
+<|visual token 033267|>
+<|visual token 033268|>
+<|visual token 033269|>
+<|visual token 033270|>
+<|visual token 033271|>
+<|visual token 033272|>
+<|visual token 033273|>
+<|visual token 033274|>
+<|visual token 033275|>
+<|visual token 033276|>
+<|visual token 033277|>
+<|visual token 033278|>
+<|visual token 033279|>
+<|visual token 033280|>
+<|visual token 033281|>
+<|visual token 033282|>
+<|visual token 033283|>
+<|visual token 033284|>
+<|visual token 033285|>
+<|visual token 033286|>
+<|visual token 033287|>
+<|visual token 033288|>
+<|visual token 033289|>
+<|visual token 033290|>
+<|visual token 033291|>
+<|visual token 033292|>
+<|visual token 033293|>
+<|visual token 033294|>
+<|visual token 033295|>
+<|visual token 033296|>
+<|visual token 033297|>
+<|visual token 033298|>
+<|visual token 033299|>
+<|visual token 033300|>
+<|visual token 033301|>
+<|visual token 033302|>
+<|visual token 033303|>
+<|visual token 033304|>
+<|visual token 033305|>
+<|visual token 033306|>
+<|visual token 033307|>
+<|visual token 033308|>
+<|visual token 033309|>
+<|visual token 033310|>
+<|visual token 033311|>
+<|visual token 033312|>
+<|visual token 033313|>
+<|visual token 033314|>
+<|visual token 033315|>
+<|visual token 033316|>
+<|visual token 033317|>
+<|visual token 033318|>
+<|visual token 033319|>
+<|visual token 033320|>
+<|visual token 033321|>
+<|visual token 033322|>
+<|visual token 033323|>
+<|visual token 033324|>
+<|visual token 033325|>
+<|visual token 033326|>
+<|visual token 033327|>
+<|visual token 033328|>
+<|visual token 033329|>
+<|visual token 033330|>
+<|visual token 033331|>
+<|visual token 033332|>
+<|visual token 033333|>
+<|visual token 033334|>
+<|visual token 033335|>
+<|visual token 033336|>
+<|visual token 033337|>
+<|visual token 033338|>
+<|visual token 033339|>
+<|visual token 033340|>
+<|visual token 033341|>
+<|visual token 033342|>
+<|visual token 033343|>
+<|visual token 033344|>
+<|visual token 033345|>
+<|visual token 033346|>
+<|visual token 033347|>
+<|visual token 033348|>
+<|visual token 033349|>
+<|visual token 033350|>
+<|visual token 033351|>
+<|visual token 033352|>
+<|visual token 033353|>
+<|visual token 033354|>
+<|visual token 033355|>
+<|visual token 033356|>
+<|visual token 033357|>
+<|visual token 033358|>
+<|visual token 033359|>
+<|visual token 033360|>
+<|visual token 033361|>
+<|visual token 033362|>
+<|visual token 033363|>
+<|visual token 033364|>
+<|visual token 033365|>
+<|visual token 033366|>
+<|visual token 033367|>
+<|visual token 033368|>
+<|visual token 033369|>
+<|visual token 033370|>
+<|visual token 033371|>
+<|visual token 033372|>
+<|visual token 033373|>
+<|visual token 033374|>
+<|visual token 033375|>
+<|visual token 033376|>
+<|visual token 033377|>
+<|visual token 033378|>
+<|visual token 033379|>
+<|visual token 033380|>
+<|visual token 033381|>
+<|visual token 033382|>
+<|visual token 033383|>
+<|visual token 033384|>
+<|visual token 033385|>
+<|visual token 033386|>
+<|visual token 033387|>
+<|visual token 033388|>
+<|visual token 033389|>
+<|visual token 033390|>
+<|visual token 033391|>
+<|visual token 033392|>
+<|visual token 033393|>
+<|visual token 033394|>
+<|visual token 033395|>
+<|visual token 033396|>
+<|visual token 033397|>
+<|visual token 033398|>
+<|visual token 033399|>
+<|visual token 033400|>
+<|visual token 033401|>
+<|visual token 033402|>
+<|visual token 033403|>
+<|visual token 033404|>
+<|visual token 033405|>
+<|visual token 033406|>
+<|visual token 033407|>
+<|visual token 033408|>
+<|visual token 033409|>
+<|visual token 033410|>
+<|visual token 033411|>
+<|visual token 033412|>
+<|visual token 033413|>
+<|visual token 033414|>
+<|visual token 033415|>
+<|visual token 033416|>
+<|visual token 033417|>
+<|visual token 033418|>
+<|visual token 033419|>
+<|visual token 033420|>
+<|visual token 033421|>
+<|visual token 033422|>
+<|visual token 033423|>
+<|visual token 033424|>
+<|visual token 033425|>
+<|visual token 033426|>
+<|visual token 033427|>
+<|visual token 033428|>
+<|visual token 033429|>
+<|visual token 033430|>
+<|visual token 033431|>
+<|visual token 033432|>
+<|visual token 033433|>
+<|visual token 033434|>
+<|visual token 033435|>
+<|visual token 033436|>
+<|visual token 033437|>
+<|visual token 033438|>
+<|visual token 033439|>
+<|visual token 033440|>
+<|visual token 033441|>
+<|visual token 033442|>
+<|visual token 033443|>
+<|visual token 033444|>
+<|visual token 033445|>
+<|visual token 033446|>
+<|visual token 033447|>
+<|visual token 033448|>
+<|visual token 033449|>
+<|visual token 033450|>
+<|visual token 033451|>
+<|visual token 033452|>
+<|visual token 033453|>
+<|visual token 033454|>
+<|visual token 033455|>
+<|visual token 033456|>
+<|visual token 033457|>
+<|visual token 033458|>
+<|visual token 033459|>
+<|visual token 033460|>
+<|visual token 033461|>
+<|visual token 033462|>
+<|visual token 033463|>
+<|visual token 033464|>
+<|visual token 033465|>
+<|visual token 033466|>
+<|visual token 033467|>
+<|visual token 033468|>
+<|visual token 033469|>
+<|visual token 033470|>
+<|visual token 033471|>
+<|visual token 033472|>
+<|visual token 033473|>
+<|visual token 033474|>
+<|visual token 033475|>
+<|visual token 033476|>
+<|visual token 033477|>
+<|visual token 033478|>
+<|visual token 033479|>
+<|visual token 033480|>
+<|visual token 033481|>
+<|visual token 033482|>
+<|visual token 033483|>
+<|visual token 033484|>
+<|visual token 033485|>
+<|visual token 033486|>
+<|visual token 033487|>
+<|visual token 033488|>
+<|visual token 033489|>
+<|visual token 033490|>
+<|visual token 033491|>
+<|visual token 033492|>
+<|visual token 033493|>
+<|visual token 033494|>
+<|visual token 033495|>
+<|visual token 033496|>
+<|visual token 033497|>
+<|visual token 033498|>
+<|visual token 033499|>
+<|visual token 033500|>
+<|visual token 033501|>
+<|visual token 033502|>
+<|visual token 033503|>
+<|visual token 033504|>
+<|visual token 033505|>
+<|visual token 033506|>
+<|visual token 033507|>
+<|visual token 033508|>
+<|visual token 033509|>
+<|visual token 033510|>
+<|visual token 033511|>
+<|visual token 033512|>
+<|visual token 033513|>
+<|visual token 033514|>
+<|visual token 033515|>
+<|visual token 033516|>
+<|visual token 033517|>
+<|visual token 033518|>
+<|visual token 033519|>
+<|visual token 033520|>
+<|visual token 033521|>
+<|visual token 033522|>
+<|visual token 033523|>
+<|visual token 033524|>
+<|visual token 033525|>
+<|visual token 033526|>
+<|visual token 033527|>
+<|visual token 033528|>
+<|visual token 033529|>
+<|visual token 033530|>
+<|visual token 033531|>
+<|visual token 033532|>
+<|visual token 033533|>
+<|visual token 033534|>
+<|visual token 033535|>
+<|visual token 033536|>
+<|visual token 033537|>
+<|visual token 033538|>
+<|visual token 033539|>
+<|visual token 033540|>
+<|visual token 033541|>
+<|visual token 033542|>
+<|visual token 033543|>
+<|visual token 033544|>
+<|visual token 033545|>
+<|visual token 033546|>
+<|visual token 033547|>
+<|visual token 033548|>
+<|visual token 033549|>
+<|visual token 033550|>
+<|visual token 033551|>
+<|visual token 033552|>
+<|visual token 033553|>
+<|visual token 033554|>
+<|visual token 033555|>
+<|visual token 033556|>
+<|visual token 033557|>
+<|visual token 033558|>
+<|visual token 033559|>
+<|visual token 033560|>
+<|visual token 033561|>
+<|visual token 033562|>
+<|visual token 033563|>
+<|visual token 033564|>
+<|visual token 033565|>
+<|visual token 033566|>
+<|visual token 033567|>
+<|visual token 033568|>
+<|visual token 033569|>
+<|visual token 033570|>
+<|visual token 033571|>
+<|visual token 033572|>
+<|visual token 033573|>
+<|visual token 033574|>
+<|visual token 033575|>
+<|visual token 033576|>
+<|visual token 033577|>
+<|visual token 033578|>
+<|visual token 033579|>
+<|visual token 033580|>
+<|visual token 033581|>
+<|visual token 033582|>
+<|visual token 033583|>
+<|visual token 033584|>
+<|visual token 033585|>
+<|visual token 033586|>
+<|visual token 033587|>
+<|visual token 033588|>
+<|visual token 033589|>
+<|visual token 033590|>
+<|visual token 033591|>
+<|visual token 033592|>
+<|visual token 033593|>
+<|visual token 033594|>
+<|visual token 033595|>
+<|visual token 033596|>
+<|visual token 033597|>
+<|visual token 033598|>
+<|visual token 033599|>
+<|visual token 033600|>
+<|visual token 033601|>
+<|visual token 033602|>
+<|visual token 033603|>
+<|visual token 033604|>
+<|visual token 033605|>
+<|visual token 033606|>
+<|visual token 033607|>
+<|visual token 033608|>
+<|visual token 033609|>
+<|visual token 033610|>
+<|visual token 033611|>
+<|visual token 033612|>
+<|visual token 033613|>
+<|visual token 033614|>
+<|visual token 033615|>
+<|visual token 033616|>
+<|visual token 033617|>
+<|visual token 033618|>
+<|visual token 033619|>
+<|visual token 033620|>
+<|visual token 033621|>
+<|visual token 033622|>
+<|visual token 033623|>
+<|visual token 033624|>
+<|visual token 033625|>
+<|visual token 033626|>
+<|visual token 033627|>
+<|visual token 033628|>
+<|visual token 033629|>
+<|visual token 033630|>
+<|visual token 033631|>
+<|visual token 033632|>
+<|visual token 033633|>
+<|visual token 033634|>
+<|visual token 033635|>
+<|visual token 033636|>
+<|visual token 033637|>
+<|visual token 033638|>
+<|visual token 033639|>
+<|visual token 033640|>
+<|visual token 033641|>
+<|visual token 033642|>
+<|visual token 033643|>
+<|visual token 033644|>
+<|visual token 033645|>
+<|visual token 033646|>
+<|visual token 033647|>
+<|visual token 033648|>
+<|visual token 033649|>
+<|visual token 033650|>
+<|visual token 033651|>
+<|visual token 033652|>
+<|visual token 033653|>
+<|visual token 033654|>
+<|visual token 033655|>
+<|visual token 033656|>
+<|visual token 033657|>
+<|visual token 033658|>
+<|visual token 033659|>
+<|visual token 033660|>
+<|visual token 033661|>
+<|visual token 033662|>
+<|visual token 033663|>
+<|visual token 033664|>
+<|visual token 033665|>
+<|visual token 033666|>
+<|visual token 033667|>
+<|visual token 033668|>
+<|visual token 033669|>
+<|visual token 033670|>
+<|visual token 033671|>
+<|visual token 033672|>
+<|visual token 033673|>
+<|visual token 033674|>
+<|visual token 033675|>
+<|visual token 033676|>
+<|visual token 033677|>
+<|visual token 033678|>
+<|visual token 033679|>
+<|visual token 033680|>
+<|visual token 033681|>
+<|visual token 033682|>
+<|visual token 033683|>
+<|visual token 033684|>
+<|visual token 033685|>
+<|visual token 033686|>
+<|visual token 033687|>
+<|visual token 033688|>
+<|visual token 033689|>
+<|visual token 033690|>
+<|visual token 033691|>
+<|visual token 033692|>
+<|visual token 033693|>
+<|visual token 033694|>
+<|visual token 033695|>
+<|visual token 033696|>
+<|visual token 033697|>
+<|visual token 033698|>
+<|visual token 033699|>
+<|visual token 033700|>
+<|visual token 033701|>
+<|visual token 033702|>
+<|visual token 033703|>
+<|visual token 033704|>
+<|visual token 033705|>
+<|visual token 033706|>
+<|visual token 033707|>
+<|visual token 033708|>
+<|visual token 033709|>
+<|visual token 033710|>
+<|visual token 033711|>
+<|visual token 033712|>
+<|visual token 033713|>
+<|visual token 033714|>
+<|visual token 033715|>
+<|visual token 033716|>
+<|visual token 033717|>
+<|visual token 033718|>
+<|visual token 033719|>
+<|visual token 033720|>
+<|visual token 033721|>
+<|visual token 033722|>
+<|visual token 033723|>
+<|visual token 033724|>
+<|visual token 033725|>
+<|visual token 033726|>
+<|visual token 033727|>
+<|visual token 033728|>
+<|visual token 033729|>
+<|visual token 033730|>
+<|visual token 033731|>
+<|visual token 033732|>
+<|visual token 033733|>
+<|visual token 033734|>
+<|visual token 033735|>
+<|visual token 033736|>
+<|visual token 033737|>
+<|visual token 033738|>
+<|visual token 033739|>
+<|visual token 033740|>
+<|visual token 033741|>
+<|visual token 033742|>
+<|visual token 033743|>
+<|visual token 033744|>
+<|visual token 033745|>
+<|visual token 033746|>
+<|visual token 033747|>
+<|visual token 033748|>
+<|visual token 033749|>
+<|visual token 033750|>
+<|visual token 033751|>
+<|visual token 033752|>
+<|visual token 033753|>
+<|visual token 033754|>
+<|visual token 033755|>
+<|visual token 033756|>
+<|visual token 033757|>
+<|visual token 033758|>
+<|visual token 033759|>
+<|visual token 033760|>
+<|visual token 033761|>
+<|visual token 033762|>
+<|visual token 033763|>
+<|visual token 033764|>
+<|visual token 033765|>
+<|visual token 033766|>
+<|visual token 033767|>
+<|visual token 033768|>
+<|visual token 033769|>
+<|visual token 033770|>
+<|visual token 033771|>
+<|visual token 033772|>
+<|visual token 033773|>
+<|visual token 033774|>
+<|visual token 033775|>
+<|visual token 033776|>
+<|visual token 033777|>
+<|visual token 033778|>
+<|visual token 033779|>
+<|visual token 033780|>
+<|visual token 033781|>
+<|visual token 033782|>
+<|visual token 033783|>
+<|visual token 033784|>
+<|visual token 033785|>
+<|visual token 033786|>
+<|visual token 033787|>
+<|visual token 033788|>
+<|visual token 033789|>
+<|visual token 033790|>
+<|visual token 033791|>
+<|visual token 033792|>
+<|visual token 033793|>
+<|visual token 033794|>
+<|visual token 033795|>
+<|visual token 033796|>
+<|visual token 033797|>
+<|visual token 033798|>
+<|visual token 033799|>
+<|visual token 033800|>
+<|visual token 033801|>
+<|visual token 033802|>
+<|visual token 033803|>
+<|visual token 033804|>
+<|visual token 033805|>
+<|visual token 033806|>
+<|visual token 033807|>
+<|visual token 033808|>
+<|visual token 033809|>
+<|visual token 033810|>
+<|visual token 033811|>
+<|visual token 033812|>
+<|visual token 033813|>
+<|visual token 033814|>
+<|visual token 033815|>
+<|visual token 033816|>
+<|visual token 033817|>
+<|visual token 033818|>
+<|visual token 033819|>
+<|visual token 033820|>
+<|visual token 033821|>
+<|visual token 033822|>
+<|visual token 033823|>
+<|visual token 033824|>
+<|visual token 033825|>
+<|visual token 033826|>
+<|visual token 033827|>
+<|visual token 033828|>
+<|visual token 033829|>
+<|visual token 033830|>
+<|visual token 033831|>
+<|visual token 033832|>
+<|visual token 033833|>
+<|visual token 033834|>
+<|visual token 033835|>
+<|visual token 033836|>
+<|visual token 033837|>
+<|visual token 033838|>
+<|visual token 033839|>
+<|visual token 033840|>
+<|visual token 033841|>
+<|visual token 033842|>
+<|visual token 033843|>
+<|visual token 033844|>
+<|visual token 033845|>
+<|visual token 033846|>
+<|visual token 033847|>
+<|visual token 033848|>
+<|visual token 033849|>
+<|visual token 033850|>
+<|visual token 033851|>
+<|visual token 033852|>
+<|visual token 033853|>
+<|visual token 033854|>
+<|visual token 033855|>
+<|visual token 033856|>
+<|visual token 033857|>
+<|visual token 033858|>
+<|visual token 033859|>
+<|visual token 033860|>
+<|visual token 033861|>
+<|visual token 033862|>
+<|visual token 033863|>
+<|visual token 033864|>
+<|visual token 033865|>
+<|visual token 033866|>
+<|visual token 033867|>
+<|visual token 033868|>
+<|visual token 033869|>
+<|visual token 033870|>
+<|visual token 033871|>
+<|visual token 033872|>
+<|visual token 033873|>
+<|visual token 033874|>
+<|visual token 033875|>
+<|visual token 033876|>
+<|visual token 033877|>
+<|visual token 033878|>
+<|visual token 033879|>
+<|visual token 033880|>
+<|visual token 033881|>
+<|visual token 033882|>
+<|visual token 033883|>
+<|visual token 033884|>
+<|visual token 033885|>
+<|visual token 033886|>
+<|visual token 033887|>
+<|visual token 033888|>
+<|visual token 033889|>
+<|visual token 033890|>
+<|visual token 033891|>
+<|visual token 033892|>
+<|visual token 033893|>
+<|visual token 033894|>
+<|visual token 033895|>
+<|visual token 033896|>
+<|visual token 033897|>
+<|visual token 033898|>
+<|visual token 033899|>
+<|visual token 033900|>
+<|visual token 033901|>
+<|visual token 033902|>
+<|visual token 033903|>
+<|visual token 033904|>
+<|visual token 033905|>
+<|visual token 033906|>
+<|visual token 033907|>
+<|visual token 033908|>
+<|visual token 033909|>
+<|visual token 033910|>
+<|visual token 033911|>
+<|visual token 033912|>
+<|visual token 033913|>
+<|visual token 033914|>
+<|visual token 033915|>
+<|visual token 033916|>
+<|visual token 033917|>
+<|visual token 033918|>
+<|visual token 033919|>
+<|visual token 033920|>
+<|visual token 033921|>
+<|visual token 033922|>
+<|visual token 033923|>
+<|visual token 033924|>
+<|visual token 033925|>
+<|visual token 033926|>
+<|visual token 033927|>
+<|visual token 033928|>
+<|visual token 033929|>
+<|visual token 033930|>
+<|visual token 033931|>
+<|visual token 033932|>
+<|visual token 033933|>
+<|visual token 033934|>
+<|visual token 033935|>
+<|visual token 033936|>
+<|visual token 033937|>
+<|visual token 033938|>
+<|visual token 033939|>
+<|visual token 033940|>
+<|visual token 033941|>
+<|visual token 033942|>
+<|visual token 033943|>
+<|visual token 033944|>
+<|visual token 033945|>
+<|visual token 033946|>
+<|visual token 033947|>
+<|visual token 033948|>
+<|visual token 033949|>
+<|visual token 033950|>
+<|visual token 033951|>
+<|visual token 033952|>
+<|visual token 033953|>
+<|visual token 033954|>
+<|visual token 033955|>
+<|visual token 033956|>
+<|visual token 033957|>
+<|visual token 033958|>
+<|visual token 033959|>
+<|visual token 033960|>
+<|visual token 033961|>
+<|visual token 033962|>
+<|visual token 033963|>
+<|visual token 033964|>
+<|visual token 033965|>
+<|visual token 033966|>
+<|visual token 033967|>
+<|visual token 033968|>
+<|visual token 033969|>
+<|visual token 033970|>
+<|visual token 033971|>
+<|visual token 033972|>
+<|visual token 033973|>
+<|visual token 033974|>
+<|visual token 033975|>
+<|visual token 033976|>
+<|visual token 033977|>
+<|visual token 033978|>
+<|visual token 033979|>
+<|visual token 033980|>
+<|visual token 033981|>
+<|visual token 033982|>
+<|visual token 033983|>
+<|visual token 033984|>
+<|visual token 033985|>
+<|visual token 033986|>
+<|visual token 033987|>
+<|visual token 033988|>
+<|visual token 033989|>
+<|visual token 033990|>
+<|visual token 033991|>
+<|visual token 033992|>
+<|visual token 033993|>
+<|visual token 033994|>
+<|visual token 033995|>
+<|visual token 033996|>
+<|visual token 033997|>
+<|visual token 033998|>
+<|visual token 033999|>
+<|visual token 034000|>
+<|visual token 034001|>
+<|visual token 034002|>
+<|visual token 034003|>
+<|visual token 034004|>
+<|visual token 034005|>
+<|visual token 034006|>
+<|visual token 034007|>
+<|visual token 034008|>
+<|visual token 034009|>
+<|visual token 034010|>
+<|visual token 034011|>
+<|visual token 034012|>
+<|visual token 034013|>
+<|visual token 034014|>
+<|visual token 034015|>
+<|visual token 034016|>
+<|visual token 034017|>
+<|visual token 034018|>
+<|visual token 034019|>
+<|visual token 034020|>
+<|visual token 034021|>
+<|visual token 034022|>
+<|visual token 034023|>
+<|visual token 034024|>
+<|visual token 034025|>
+<|visual token 034026|>
+<|visual token 034027|>
+<|visual token 034028|>
+<|visual token 034029|>
+<|visual token 034030|>
+<|visual token 034031|>
+<|visual token 034032|>
+<|visual token 034033|>
+<|visual token 034034|>
+<|visual token 034035|>
+<|visual token 034036|>
+<|visual token 034037|>
+<|visual token 034038|>
+<|visual token 034039|>
+<|visual token 034040|>
+<|visual token 034041|>
+<|visual token 034042|>
+<|visual token 034043|>
+<|visual token 034044|>
+<|visual token 034045|>
+<|visual token 034046|>
+<|visual token 034047|>
+<|visual token 034048|>
+<|visual token 034049|>
+<|visual token 034050|>
+<|visual token 034051|>
+<|visual token 034052|>
+<|visual token 034053|>
+<|visual token 034054|>
+<|visual token 034055|>
+<|visual token 034056|>
+<|visual token 034057|>
+<|visual token 034058|>
+<|visual token 034059|>
+<|visual token 034060|>
+<|visual token 034061|>
+<|visual token 034062|>
+<|visual token 034063|>
+<|visual token 034064|>
+<|visual token 034065|>
+<|visual token 034066|>
+<|visual token 034067|>
+<|visual token 034068|>
+<|visual token 034069|>
+<|visual token 034070|>
+<|visual token 034071|>
+<|visual token 034072|>
+<|visual token 034073|>
+<|visual token 034074|>
+<|visual token 034075|>
+<|visual token 034076|>
+<|visual token 034077|>
+<|visual token 034078|>
+<|visual token 034079|>
+<|visual token 034080|>
+<|visual token 034081|>
+<|visual token 034082|>
+<|visual token 034083|>
+<|visual token 034084|>
+<|visual token 034085|>
+<|visual token 034086|>
+<|visual token 034087|>
+<|visual token 034088|>
+<|visual token 034089|>
+<|visual token 034090|>
+<|visual token 034091|>
+<|visual token 034092|>
+<|visual token 034093|>
+<|visual token 034094|>
+<|visual token 034095|>
+<|visual token 034096|>
+<|visual token 034097|>
+<|visual token 034098|>
+<|visual token 034099|>
+<|visual token 034100|>
+<|visual token 034101|>
+<|visual token 034102|>
+<|visual token 034103|>
+<|visual token 034104|>
+<|visual token 034105|>
+<|visual token 034106|>
+<|visual token 034107|>
+<|visual token 034108|>
+<|visual token 034109|>
+<|visual token 034110|>
+<|visual token 034111|>
+<|visual token 034112|>
+<|visual token 034113|>
+<|visual token 034114|>
+<|visual token 034115|>
+<|visual token 034116|>
+<|visual token 034117|>
+<|visual token 034118|>
+<|visual token 034119|>
+<|visual token 034120|>
+<|visual token 034121|>
+<|visual token 034122|>
+<|visual token 034123|>
+<|visual token 034124|>
+<|visual token 034125|>
+<|visual token 034126|>
+<|visual token 034127|>
+<|visual token 034128|>
+<|visual token 034129|>
+<|visual token 034130|>
+<|visual token 034131|>
+<|visual token 034132|>
+<|visual token 034133|>
+<|visual token 034134|>
+<|visual token 034135|>
+<|visual token 034136|>
+<|visual token 034137|>
+<|visual token 034138|>
+<|visual token 034139|>
+<|visual token 034140|>
+<|visual token 034141|>
+<|visual token 034142|>
+<|visual token 034143|>
+<|visual token 034144|>
+<|visual token 034145|>
+<|visual token 034146|>
+<|visual token 034147|>
+<|visual token 034148|>
+<|visual token 034149|>
+<|visual token 034150|>
+<|visual token 034151|>
+<|visual token 034152|>
+<|visual token 034153|>
+<|visual token 034154|>
+<|visual token 034155|>
+<|visual token 034156|>
+<|visual token 034157|>
+<|visual token 034158|>
+<|visual token 034159|>
+<|visual token 034160|>
+<|visual token 034161|>
+<|visual token 034162|>
+<|visual token 034163|>
+<|visual token 034164|>
+<|visual token 034165|>
+<|visual token 034166|>
+<|visual token 034167|>
+<|visual token 034168|>
+<|visual token 034169|>
+<|visual token 034170|>
+<|visual token 034171|>
+<|visual token 034172|>
+<|visual token 034173|>
+<|visual token 034174|>
+<|visual token 034175|>
+<|visual token 034176|>
+<|visual token 034177|>
+<|visual token 034178|>
+<|visual token 034179|>
+<|visual token 034180|>
+<|visual token 034181|>
+<|visual token 034182|>
+<|visual token 034183|>
+<|visual token 034184|>
+<|visual token 034185|>
+<|visual token 034186|>
+<|visual token 034187|>
+<|visual token 034188|>
+<|visual token 034189|>
+<|visual token 034190|>
+<|visual token 034191|>
+<|visual token 034192|>
+<|visual token 034193|>
+<|visual token 034194|>
+<|visual token 034195|>
+<|visual token 034196|>
+<|visual token 034197|>
+<|visual token 034198|>
+<|visual token 034199|>
+<|visual token 034200|>
+<|visual token 034201|>
+<|visual token 034202|>
+<|visual token 034203|>
+<|visual token 034204|>
+<|visual token 034205|>
+<|visual token 034206|>
+<|visual token 034207|>
+<|visual token 034208|>
+<|visual token 034209|>
+<|visual token 034210|>
+<|visual token 034211|>
+<|visual token 034212|>
+<|visual token 034213|>
+<|visual token 034214|>
+<|visual token 034215|>
+<|visual token 034216|>
+<|visual token 034217|>
+<|visual token 034218|>
+<|visual token 034219|>
+<|visual token 034220|>
+<|visual token 034221|>
+<|visual token 034222|>
+<|visual token 034223|>
+<|visual token 034224|>
+<|visual token 034225|>
+<|visual token 034226|>
+<|visual token 034227|>
+<|visual token 034228|>
+<|visual token 034229|>
+<|visual token 034230|>
+<|visual token 034231|>
+<|visual token 034232|>
+<|visual token 034233|>
+<|visual token 034234|>
+<|visual token 034235|>
+<|visual token 034236|>
+<|visual token 034237|>
+<|visual token 034238|>
+<|visual token 034239|>
+<|visual token 034240|>
+<|visual token 034241|>
+<|visual token 034242|>
+<|visual token 034243|>
+<|visual token 034244|>
+<|visual token 034245|>
+<|visual token 034246|>
+<|visual token 034247|>
+<|visual token 034248|>
+<|visual token 034249|>
+<|visual token 034250|>
+<|visual token 034251|>
+<|visual token 034252|>
+<|visual token 034253|>
+<|visual token 034254|>
+<|visual token 034255|>
+<|visual token 034256|>
+<|visual token 034257|>
+<|visual token 034258|>
+<|visual token 034259|>
+<|visual token 034260|>
+<|visual token 034261|>
+<|visual token 034262|>
+<|visual token 034263|>
+<|visual token 034264|>
+<|visual token 034265|>
+<|visual token 034266|>
+<|visual token 034267|>
+<|visual token 034268|>
+<|visual token 034269|>
+<|visual token 034270|>
+<|visual token 034271|>
+<|visual token 034272|>
+<|visual token 034273|>
+<|visual token 034274|>
+<|visual token 034275|>
+<|visual token 034276|>
+<|visual token 034277|>
+<|visual token 034278|>
+<|visual token 034279|>
+<|visual token 034280|>
+<|visual token 034281|>
+<|visual token 034282|>
+<|visual token 034283|>
+<|visual token 034284|>
+<|visual token 034285|>
+<|visual token 034286|>
+<|visual token 034287|>
+<|visual token 034288|>
+<|visual token 034289|>
+<|visual token 034290|>
+<|visual token 034291|>
+<|visual token 034292|>
+<|visual token 034293|>
+<|visual token 034294|>
+<|visual token 034295|>
+<|visual token 034296|>
+<|visual token 034297|>
+<|visual token 034298|>
+<|visual token 034299|>
+<|visual token 034300|>
+<|visual token 034301|>
+<|visual token 034302|>
+<|visual token 034303|>
+<|visual token 034304|>
+<|visual token 034305|>
+<|visual token 034306|>
+<|visual token 034307|>
+<|visual token 034308|>
+<|visual token 034309|>
+<|visual token 034310|>
+<|visual token 034311|>
+<|visual token 034312|>
+<|visual token 034313|>
+<|visual token 034314|>
+<|visual token 034315|>
+<|visual token 034316|>
+<|visual token 034317|>
+<|visual token 034318|>
+<|visual token 034319|>
+<|visual token 034320|>
+<|visual token 034321|>
+<|visual token 034322|>
+<|visual token 034323|>
+<|visual token 034324|>
+<|visual token 034325|>
+<|visual token 034326|>
+<|visual token 034327|>
+<|visual token 034328|>
+<|visual token 034329|>
+<|visual token 034330|>
+<|visual token 034331|>
+<|visual token 034332|>
+<|visual token 034333|>
+<|visual token 034334|>
+<|visual token 034335|>
+<|visual token 034336|>
+<|visual token 034337|>
+<|visual token 034338|>
+<|visual token 034339|>
+<|visual token 034340|>
+<|visual token 034341|>
+<|visual token 034342|>
+<|visual token 034343|>
+<|visual token 034344|>
+<|visual token 034345|>
+<|visual token 034346|>
+<|visual token 034347|>
+<|visual token 034348|>
+<|visual token 034349|>
+<|visual token 034350|>
+<|visual token 034351|>
+<|visual token 034352|>
+<|visual token 034353|>
+<|visual token 034354|>
+<|visual token 034355|>
+<|visual token 034356|>
+<|visual token 034357|>
+<|visual token 034358|>
+<|visual token 034359|>
+<|visual token 034360|>
+<|visual token 034361|>
+<|visual token 034362|>
+<|visual token 034363|>
+<|visual token 034364|>
+<|visual token 034365|>
+<|visual token 034366|>
+<|visual token 034367|>
+<|visual token 034368|>
+<|visual token 034369|>
+<|visual token 034370|>
+<|visual token 034371|>
+<|visual token 034372|>
+<|visual token 034373|>
+<|visual token 034374|>
+<|visual token 034375|>
+<|visual token 034376|>
+<|visual token 034377|>
+<|visual token 034378|>
+<|visual token 034379|>
+<|visual token 034380|>
+<|visual token 034381|>
+<|visual token 034382|>
+<|visual token 034383|>
+<|visual token 034384|>
+<|visual token 034385|>
+<|visual token 034386|>
+<|visual token 034387|>
+<|visual token 034388|>
+<|visual token 034389|>
+<|visual token 034390|>
+<|visual token 034391|>
+<|visual token 034392|>
+<|visual token 034393|>
+<|visual token 034394|>
+<|visual token 034395|>
+<|visual token 034396|>
+<|visual token 034397|>
+<|visual token 034398|>
+<|visual token 034399|>
+<|visual token 034400|>
+<|visual token 034401|>
+<|visual token 034402|>
+<|visual token 034403|>
+<|visual token 034404|>
+<|visual token 034405|>
+<|visual token 034406|>
+<|visual token 034407|>
+<|visual token 034408|>
+<|visual token 034409|>
+<|visual token 034410|>
+<|visual token 034411|>
+<|visual token 034412|>
+<|visual token 034413|>
+<|visual token 034414|>
+<|visual token 034415|>
+<|visual token 034416|>
+<|visual token 034417|>
+<|visual token 034418|>
+<|visual token 034419|>
+<|visual token 034420|>
+<|visual token 034421|>
+<|visual token 034422|>
+<|visual token 034423|>
+<|visual token 034424|>
+<|visual token 034425|>
+<|visual token 034426|>
+<|visual token 034427|>
+<|visual token 034428|>
+<|visual token 034429|>
+<|visual token 034430|>
+<|visual token 034431|>
+<|visual token 034432|>
+<|visual token 034433|>
+<|visual token 034434|>
+<|visual token 034435|>
+<|visual token 034436|>
+<|visual token 034437|>
+<|visual token 034438|>
+<|visual token 034439|>
+<|visual token 034440|>
+<|visual token 034441|>
+<|visual token 034442|>
+<|visual token 034443|>
+<|visual token 034444|>
+<|visual token 034445|>
+<|visual token 034446|>
+<|visual token 034447|>
+<|visual token 034448|>
+<|visual token 034449|>
+<|visual token 034450|>
+<|visual token 034451|>
+<|visual token 034452|>
+<|visual token 034453|>
+<|visual token 034454|>
+<|visual token 034455|>
+<|visual token 034456|>
+<|visual token 034457|>
+<|visual token 034458|>
+<|visual token 034459|>
+<|visual token 034460|>
+<|visual token 034461|>
+<|visual token 034462|>
+<|visual token 034463|>
+<|visual token 034464|>
+<|visual token 034465|>
+<|visual token 034466|>
+<|visual token 034467|>
+<|visual token 034468|>
+<|visual token 034469|>
+<|visual token 034470|>
+<|visual token 034471|>
+<|visual token 034472|>
+<|visual token 034473|>
+<|visual token 034474|>
+<|visual token 034475|>
+<|visual token 034476|>
+<|visual token 034477|>
+<|visual token 034478|>
+<|visual token 034479|>
+<|visual token 034480|>
+<|visual token 034481|>
+<|visual token 034482|>
+<|visual token 034483|>
+<|visual token 034484|>
+<|visual token 034485|>
+<|visual token 034486|>
+<|visual token 034487|>
+<|visual token 034488|>
+<|visual token 034489|>
+<|visual token 034490|>
+<|visual token 034491|>
+<|visual token 034492|>
+<|visual token 034493|>
+<|visual token 034494|>
+<|visual token 034495|>
+<|visual token 034496|>
+<|visual token 034497|>
+<|visual token 034498|>
+<|visual token 034499|>
+<|visual token 034500|>
+<|visual token 034501|>
+<|visual token 034502|>
+<|visual token 034503|>
+<|visual token 034504|>
+<|visual token 034505|>
+<|visual token 034506|>
+<|visual token 034507|>
+<|visual token 034508|>
+<|visual token 034509|>
+<|visual token 034510|>
+<|visual token 034511|>
+<|visual token 034512|>
+<|visual token 034513|>
+<|visual token 034514|>
+<|visual token 034515|>
+<|visual token 034516|>
+<|visual token 034517|>
+<|visual token 034518|>
+<|visual token 034519|>
+<|visual token 034520|>
+<|visual token 034521|>
+<|visual token 034522|>
+<|visual token 034523|>
+<|visual token 034524|>
+<|visual token 034525|>
+<|visual token 034526|>
+<|visual token 034527|>
+<|visual token 034528|>
+<|visual token 034529|>
+<|visual token 034530|>
+<|visual token 034531|>
+<|visual token 034532|>
+<|visual token 034533|>
+<|visual token 034534|>
+<|visual token 034535|>
+<|visual token 034536|>
+<|visual token 034537|>
+<|visual token 034538|>
+<|visual token 034539|>
+<|visual token 034540|>
+<|visual token 034541|>
+<|visual token 034542|>
+<|visual token 034543|>
+<|visual token 034544|>
+<|visual token 034545|>
+<|visual token 034546|>
+<|visual token 034547|>
+<|visual token 034548|>
+<|visual token 034549|>
+<|visual token 034550|>
+<|visual token 034551|>
+<|visual token 034552|>
+<|visual token 034553|>
+<|visual token 034554|>
+<|visual token 034555|>
+<|visual token 034556|>
+<|visual token 034557|>
+<|visual token 034558|>
+<|visual token 034559|>
+<|visual token 034560|>
+<|visual token 034561|>
+<|visual token 034562|>
+<|visual token 034563|>
+<|visual token 034564|>
+<|visual token 034565|>
+<|visual token 034566|>
+<|visual token 034567|>
+<|visual token 034568|>
+<|visual token 034569|>
+<|visual token 034570|>
+<|visual token 034571|>
+<|visual token 034572|>
+<|visual token 034573|>
+<|visual token 034574|>
+<|visual token 034575|>
+<|visual token 034576|>
+<|visual token 034577|>
+<|visual token 034578|>
+<|visual token 034579|>
+<|visual token 034580|>
+<|visual token 034581|>
+<|visual token 034582|>
+<|visual token 034583|>
+<|visual token 034584|>
+<|visual token 034585|>
+<|visual token 034586|>
+<|visual token 034587|>
+<|visual token 034588|>
+<|visual token 034589|>
+<|visual token 034590|>
+<|visual token 034591|>
+<|visual token 034592|>
+<|visual token 034593|>
+<|visual token 034594|>
+<|visual token 034595|>
+<|visual token 034596|>
+<|visual token 034597|>
+<|visual token 034598|>
+<|visual token 034599|>
+<|visual token 034600|>
+<|visual token 034601|>
+<|visual token 034602|>
+<|visual token 034603|>
+<|visual token 034604|>
+<|visual token 034605|>
+<|visual token 034606|>
+<|visual token 034607|>
+<|visual token 034608|>
+<|visual token 034609|>
+<|visual token 034610|>
+<|visual token 034611|>
+<|visual token 034612|>
+<|visual token 034613|>
+<|visual token 034614|>
+<|visual token 034615|>
+<|visual token 034616|>
+<|visual token 034617|>
+<|visual token 034618|>
+<|visual token 034619|>
+<|visual token 034620|>
+<|visual token 034621|>
+<|visual token 034622|>
+<|visual token 034623|>
+<|visual token 034624|>
+<|visual token 034625|>
+<|visual token 034626|>
+<|visual token 034627|>
+<|visual token 034628|>
+<|visual token 034629|>
+<|visual token 034630|>
+<|visual token 034631|>
+<|visual token 034632|>
+<|visual token 034633|>
+<|visual token 034634|>
+<|visual token 034635|>
+<|visual token 034636|>
+<|visual token 034637|>
+<|visual token 034638|>
+<|visual token 034639|>
+<|visual token 034640|>
+<|visual token 034641|>
+<|visual token 034642|>
+<|visual token 034643|>
+<|visual token 034644|>
+<|visual token 034645|>
+<|visual token 034646|>
+<|visual token 034647|>
+<|visual token 034648|>
+<|visual token 034649|>
+<|visual token 034650|>
+<|visual token 034651|>
+<|visual token 034652|>
+<|visual token 034653|>
+<|visual token 034654|>
+<|visual token 034655|>
+<|visual token 034656|>
+<|visual token 034657|>
+<|visual token 034658|>
+<|visual token 034659|>
+<|visual token 034660|>
+<|visual token 034661|>
+<|visual token 034662|>
+<|visual token 034663|>
+<|visual token 034664|>
+<|visual token 034665|>
+<|visual token 034666|>
+<|visual token 034667|>
+<|visual token 034668|>
+<|visual token 034669|>
+<|visual token 034670|>
+<|visual token 034671|>
+<|visual token 034672|>
+<|visual token 034673|>
+<|visual token 034674|>
+<|visual token 034675|>
+<|visual token 034676|>
+<|visual token 034677|>
+<|visual token 034678|>
+<|visual token 034679|>
+<|visual token 034680|>
+<|visual token 034681|>
+<|visual token 034682|>
+<|visual token 034683|>
+<|visual token 034684|>
+<|visual token 034685|>
+<|visual token 034686|>
+<|visual token 034687|>
+<|visual token 034688|>
+<|visual token 034689|>
+<|visual token 034690|>
+<|visual token 034691|>
+<|visual token 034692|>
+<|visual token 034693|>
+<|visual token 034694|>
+<|visual token 034695|>
+<|visual token 034696|>
+<|visual token 034697|>
+<|visual token 034698|>
+<|visual token 034699|>
+<|visual token 034700|>
+<|visual token 034701|>
+<|visual token 034702|>
+<|visual token 034703|>
+<|visual token 034704|>
+<|visual token 034705|>
+<|visual token 034706|>
+<|visual token 034707|>
+<|visual token 034708|>
+<|visual token 034709|>
+<|visual token 034710|>
+<|visual token 034711|>
+<|visual token 034712|>
+<|visual token 034713|>
+<|visual token 034714|>
+<|visual token 034715|>
+<|visual token 034716|>
+<|visual token 034717|>
+<|visual token 034718|>
+<|visual token 034719|>
+<|visual token 034720|>
+<|visual token 034721|>
+<|visual token 034722|>
+<|visual token 034723|>
+<|visual token 034724|>
+<|visual token 034725|>
+<|visual token 034726|>
+<|visual token 034727|>
+<|visual token 034728|>
+<|visual token 034729|>
+<|visual token 034730|>
+<|visual token 034731|>
+<|visual token 034732|>
+<|visual token 034733|>
+<|visual token 034734|>
+<|visual token 034735|>
+<|visual token 034736|>
+<|visual token 034737|>
+<|visual token 034738|>
+<|visual token 034739|>
+<|visual token 034740|>
+<|visual token 034741|>
+<|visual token 034742|>
+<|visual token 034743|>
+<|visual token 034744|>
+<|visual token 034745|>
+<|visual token 034746|>
+<|visual token 034747|>
+<|visual token 034748|>
+<|visual token 034749|>
+<|visual token 034750|>
+<|visual token 034751|>
+<|visual token 034752|>
+<|visual token 034753|>
+<|visual token 034754|>
+<|visual token 034755|>
+<|visual token 034756|>
+<|visual token 034757|>
+<|visual token 034758|>
+<|visual token 034759|>
+<|visual token 034760|>
+<|visual token 034761|>
+<|visual token 034762|>
+<|visual token 034763|>
+<|visual token 034764|>
+<|visual token 034765|>
+<|visual token 034766|>
+<|visual token 034767|>
+<|visual token 034768|>
+<|visual token 034769|>
+<|visual token 034770|>
+<|visual token 034771|>
+<|visual token 034772|>
+<|visual token 034773|>
+<|visual token 034774|>
+<|visual token 034775|>
+<|visual token 034776|>
+<|visual token 034777|>
+<|visual token 034778|>
+<|visual token 034779|>
+<|visual token 034780|>
+<|visual token 034781|>
+<|visual token 034782|>
+<|visual token 034783|>
+<|visual token 034784|>
+<|visual token 034785|>
+<|visual token 034786|>
+<|visual token 034787|>
+<|visual token 034788|>
+<|visual token 034789|>
+<|visual token 034790|>
+<|visual token 034791|>
+<|visual token 034792|>
+<|visual token 034793|>
+<|visual token 034794|>
+<|visual token 034795|>
+<|visual token 034796|>
+<|visual token 034797|>
+<|visual token 034798|>
+<|visual token 034799|>
+<|visual token 034800|>
+<|visual token 034801|>
+<|visual token 034802|>
+<|visual token 034803|>
+<|visual token 034804|>
+<|visual token 034805|>
+<|visual token 034806|>
+<|visual token 034807|>
+<|visual token 034808|>
+<|visual token 034809|>
+<|visual token 034810|>
+<|visual token 034811|>
+<|visual token 034812|>
+<|visual token 034813|>
+<|visual token 034814|>
+<|visual token 034815|>
+<|visual token 034816|>
+<|visual token 034817|>
+<|visual token 034818|>
+<|visual token 034819|>
+<|visual token 034820|>
+<|visual token 034821|>
+<|visual token 034822|>
+<|visual token 034823|>
+<|visual token 034824|>
+<|visual token 034825|>
+<|visual token 034826|>
+<|visual token 034827|>
+<|visual token 034828|>
+<|visual token 034829|>
+<|visual token 034830|>
+<|visual token 034831|>
+<|visual token 034832|>
+<|visual token 034833|>
+<|visual token 034834|>
+<|visual token 034835|>
+<|visual token 034836|>
+<|visual token 034837|>
+<|visual token 034838|>
+<|visual token 034839|>
+<|visual token 034840|>
+<|visual token 034841|>
+<|visual token 034842|>
+<|visual token 034843|>
+<|visual token 034844|>
+<|visual token 034845|>
+<|visual token 034846|>
+<|visual token 034847|>
+<|visual token 034848|>
+<|visual token 034849|>
+<|visual token 034850|>
+<|visual token 034851|>
+<|visual token 034852|>
+<|visual token 034853|>
+<|visual token 034854|>
+<|visual token 034855|>
+<|visual token 034856|>
+<|visual token 034857|>
+<|visual token 034858|>
+<|visual token 034859|>
+<|visual token 034860|>
+<|visual token 034861|>
+<|visual token 034862|>
+<|visual token 034863|>
+<|visual token 034864|>
+<|visual token 034865|>
+<|visual token 034866|>
+<|visual token 034867|>
+<|visual token 034868|>
+<|visual token 034869|>
+<|visual token 034870|>
+<|visual token 034871|>
+<|visual token 034872|>
+<|visual token 034873|>
+<|visual token 034874|>
+<|visual token 034875|>
+<|visual token 034876|>
+<|visual token 034877|>
+<|visual token 034878|>
+<|visual token 034879|>
+<|visual token 034880|>
+<|visual token 034881|>
+<|visual token 034882|>
+<|visual token 034883|>
+<|visual token 034884|>
+<|visual token 034885|>
+<|visual token 034886|>
+<|visual token 034887|>
+<|visual token 034888|>
+<|visual token 034889|>
+<|visual token 034890|>
+<|visual token 034891|>
+<|visual token 034892|>
+<|visual token 034893|>
+<|visual token 034894|>
+<|visual token 034895|>
+<|visual token 034896|>
+<|visual token 034897|>
+<|visual token 034898|>
+<|visual token 034899|>
+<|visual token 034900|>
+<|visual token 034901|>
+<|visual token 034902|>
+<|visual token 034903|>
+<|visual token 034904|>
+<|visual token 034905|>
+<|visual token 034906|>
+<|visual token 034907|>
+<|visual token 034908|>
+<|visual token 034909|>
+<|visual token 034910|>
+<|visual token 034911|>
+<|visual token 034912|>
+<|visual token 034913|>
+<|visual token 034914|>
+<|visual token 034915|>
+<|visual token 034916|>
+<|visual token 034917|>
+<|visual token 034918|>
+<|visual token 034919|>
+<|visual token 034920|>
+<|visual token 034921|>
+<|visual token 034922|>
+<|visual token 034923|>
+<|visual token 034924|>
+<|visual token 034925|>
+<|visual token 034926|>
+<|visual token 034927|>
+<|visual token 034928|>
+<|visual token 034929|>
+<|visual token 034930|>
+<|visual token 034931|>
+<|visual token 034932|>
+<|visual token 034933|>
+<|visual token 034934|>
+<|visual token 034935|>
+<|visual token 034936|>
+<|visual token 034937|>
+<|visual token 034938|>
+<|visual token 034939|>
+<|visual token 034940|>
+<|visual token 034941|>
+<|visual token 034942|>
+<|visual token 034943|>
+<|visual token 034944|>
+<|visual token 034945|>
+<|visual token 034946|>
+<|visual token 034947|>
+<|visual token 034948|>
+<|visual token 034949|>
+<|visual token 034950|>
+<|visual token 034951|>
+<|visual token 034952|>
+<|visual token 034953|>
+<|visual token 034954|>
+<|visual token 034955|>
+<|visual token 034956|>
+<|visual token 034957|>
+<|visual token 034958|>
+<|visual token 034959|>
+<|visual token 034960|>
+<|visual token 034961|>
+<|visual token 034962|>
+<|visual token 034963|>
+<|visual token 034964|>
+<|visual token 034965|>
+<|visual token 034966|>
+<|visual token 034967|>
+<|visual token 034968|>
+<|visual token 034969|>
+<|visual token 034970|>
+<|visual token 034971|>
+<|visual token 034972|>
+<|visual token 034973|>
+<|visual token 034974|>
+<|visual token 034975|>
+<|visual token 034976|>
+<|visual token 034977|>
+<|visual token 034978|>
+<|visual token 034979|>
+<|visual token 034980|>
+<|visual token 034981|>
+<|visual token 034982|>
+<|visual token 034983|>
+<|visual token 034984|>
+<|visual token 034985|>
+<|visual token 034986|>
+<|visual token 034987|>
+<|visual token 034988|>
+<|visual token 034989|>
+<|visual token 034990|>
+<|visual token 034991|>
+<|visual token 034992|>
+<|visual token 034993|>
+<|visual token 034994|>
+<|visual token 034995|>
+<|visual token 034996|>
+<|visual token 034997|>
+<|visual token 034998|>
+<|visual token 034999|>
+<|visual token 035000|>
+<|visual token 035001|>
+<|visual token 035002|>
+<|visual token 035003|>
+<|visual token 035004|>
+<|visual token 035005|>
+<|visual token 035006|>
+<|visual token 035007|>
+<|visual token 035008|>
+<|visual token 035009|>
+<|visual token 035010|>
+<|visual token 035011|>
+<|visual token 035012|>
+<|visual token 035013|>
+<|visual token 035014|>
+<|visual token 035015|>
+<|visual token 035016|>
+<|visual token 035017|>
+<|visual token 035018|>
+<|visual token 035019|>
+<|visual token 035020|>
+<|visual token 035021|>
+<|visual token 035022|>
+<|visual token 035023|>
+<|visual token 035024|>
+<|visual token 035025|>
+<|visual token 035026|>
+<|visual token 035027|>
+<|visual token 035028|>
+<|visual token 035029|>
+<|visual token 035030|>
+<|visual token 035031|>
+<|visual token 035032|>
+<|visual token 035033|>
+<|visual token 035034|>
+<|visual token 035035|>
+<|visual token 035036|>
+<|visual token 035037|>
+<|visual token 035038|>
+<|visual token 035039|>
+<|visual token 035040|>
+<|visual token 035041|>
+<|visual token 035042|>
+<|visual token 035043|>
+<|visual token 035044|>
+<|visual token 035045|>
+<|visual token 035046|>
+<|visual token 035047|>
+<|visual token 035048|>
+<|visual token 035049|>
+<|visual token 035050|>
+<|visual token 035051|>
+<|visual token 035052|>
+<|visual token 035053|>
+<|visual token 035054|>
+<|visual token 035055|>
+<|visual token 035056|>
+<|visual token 035057|>
+<|visual token 035058|>
+<|visual token 035059|>
+<|visual token 035060|>
+<|visual token 035061|>
+<|visual token 035062|>
+<|visual token 035063|>
+<|visual token 035064|>
+<|visual token 035065|>
+<|visual token 035066|>
+<|visual token 035067|>
+<|visual token 035068|>
+<|visual token 035069|>
+<|visual token 035070|>
+<|visual token 035071|>
+<|visual token 035072|>
+<|visual token 035073|>
+<|visual token 035074|>
+<|visual token 035075|>
+<|visual token 035076|>
+<|visual token 035077|>
+<|visual token 035078|>
+<|visual token 035079|>
+<|visual token 035080|>
+<|visual token 035081|>
+<|visual token 035082|>
+<|visual token 035083|>
+<|visual token 035084|>
+<|visual token 035085|>
+<|visual token 035086|>
+<|visual token 035087|>
+<|visual token 035088|>
+<|visual token 035089|>
+<|visual token 035090|>
+<|visual token 035091|>
+<|visual token 035092|>
+<|visual token 035093|>
+<|visual token 035094|>
+<|visual token 035095|>
+<|visual token 035096|>
+<|visual token 035097|>
+<|visual token 035098|>
+<|visual token 035099|>
+<|visual token 035100|>
+<|visual token 035101|>
+<|visual token 035102|>
+<|visual token 035103|>
+<|visual token 035104|>
+<|visual token 035105|>
+<|visual token 035106|>
+<|visual token 035107|>
+<|visual token 035108|>
+<|visual token 035109|>
+<|visual token 035110|>
+<|visual token 035111|>
+<|visual token 035112|>
+<|visual token 035113|>
+<|visual token 035114|>
+<|visual token 035115|>
+<|visual token 035116|>
+<|visual token 035117|>
+<|visual token 035118|>
+<|visual token 035119|>
+<|visual token 035120|>
+<|visual token 035121|>
+<|visual token 035122|>
+<|visual token 035123|>
+<|visual token 035124|>
+<|visual token 035125|>
+<|visual token 035126|>
+<|visual token 035127|>
+<|visual token 035128|>
+<|visual token 035129|>
+<|visual token 035130|>
+<|visual token 035131|>
+<|visual token 035132|>
+<|visual token 035133|>
+<|visual token 035134|>
+<|visual token 035135|>
+<|visual token 035136|>
+<|visual token 035137|>
+<|visual token 035138|>
+<|visual token 035139|>
+<|visual token 035140|>
+<|visual token 035141|>
+<|visual token 035142|>
+<|visual token 035143|>
+<|visual token 035144|>
+<|visual token 035145|>
+<|visual token 035146|>
+<|visual token 035147|>
+<|visual token 035148|>
+<|visual token 035149|>
+<|visual token 035150|>
+<|visual token 035151|>
+<|visual token 035152|>
+<|visual token 035153|>
+<|visual token 035154|>
+<|visual token 035155|>
+<|visual token 035156|>
+<|visual token 035157|>
+<|visual token 035158|>
+<|visual token 035159|>
+<|visual token 035160|>
+<|visual token 035161|>
+<|visual token 035162|>
+<|visual token 035163|>
+<|visual token 035164|>
+<|visual token 035165|>
+<|visual token 035166|>
+<|visual token 035167|>
+<|visual token 035168|>
+<|visual token 035169|>
+<|visual token 035170|>
+<|visual token 035171|>
+<|visual token 035172|>
+<|visual token 035173|>
+<|visual token 035174|>
+<|visual token 035175|>
+<|visual token 035176|>
+<|visual token 035177|>
+<|visual token 035178|>
+<|visual token 035179|>
+<|visual token 035180|>
+<|visual token 035181|>
+<|visual token 035182|>
+<|visual token 035183|>
+<|visual token 035184|>
+<|visual token 035185|>
+<|visual token 035186|>
+<|visual token 035187|>
+<|visual token 035188|>
+<|visual token 035189|>
+<|visual token 035190|>
+<|visual token 035191|>
+<|visual token 035192|>
+<|visual token 035193|>
+<|visual token 035194|>
+<|visual token 035195|>
+<|visual token 035196|>
+<|visual token 035197|>
+<|visual token 035198|>
+<|visual token 035199|>
+<|visual token 035200|>
+<|visual token 035201|>
+<|visual token 035202|>
+<|visual token 035203|>
+<|visual token 035204|>
+<|visual token 035205|>
+<|visual token 035206|>
+<|visual token 035207|>
+<|visual token 035208|>
+<|visual token 035209|>
+<|visual token 035210|>
+<|visual token 035211|>
+<|visual token 035212|>
+<|visual token 035213|>
+<|visual token 035214|>
+<|visual token 035215|>
+<|visual token 035216|>
+<|visual token 035217|>
+<|visual token 035218|>
+<|visual token 035219|>
+<|visual token 035220|>
+<|visual token 035221|>
+<|visual token 035222|>
+<|visual token 035223|>
+<|visual token 035224|>
+<|visual token 035225|>
+<|visual token 035226|>
+<|visual token 035227|>
+<|visual token 035228|>
+<|visual token 035229|>
+<|visual token 035230|>
+<|visual token 035231|>
+<|visual token 035232|>
+<|visual token 035233|>
+<|visual token 035234|>
+<|visual token 035235|>
+<|visual token 035236|>
+<|visual token 035237|>
+<|visual token 035238|>
+<|visual token 035239|>
+<|visual token 035240|>
+<|visual token 035241|>
+<|visual token 035242|>
+<|visual token 035243|>
+<|visual token 035244|>
+<|visual token 035245|>
+<|visual token 035246|>
+<|visual token 035247|>
+<|visual token 035248|>
+<|visual token 035249|>
+<|visual token 035250|>
+<|visual token 035251|>
+<|visual token 035252|>
+<|visual token 035253|>
+<|visual token 035254|>
+<|visual token 035255|>
+<|visual token 035256|>
+<|visual token 035257|>
+<|visual token 035258|>
+<|visual token 035259|>
+<|visual token 035260|>
+<|visual token 035261|>
+<|visual token 035262|>
+<|visual token 035263|>
+<|visual token 035264|>
+<|visual token 035265|>
+<|visual token 035266|>
+<|visual token 035267|>
+<|visual token 035268|>
+<|visual token 035269|>
+<|visual token 035270|>
+<|visual token 035271|>
+<|visual token 035272|>
+<|visual token 035273|>
+<|visual token 035274|>
+<|visual token 035275|>
+<|visual token 035276|>
+<|visual token 035277|>
+<|visual token 035278|>
+<|visual token 035279|>
+<|visual token 035280|>
+<|visual token 035281|>
+<|visual token 035282|>
+<|visual token 035283|>
+<|visual token 035284|>
+<|visual token 035285|>
+<|visual token 035286|>
+<|visual token 035287|>
+<|visual token 035288|>
+<|visual token 035289|>
+<|visual token 035290|>
+<|visual token 035291|>
+<|visual token 035292|>
+<|visual token 035293|>
+<|visual token 035294|>
+<|visual token 035295|>
+<|visual token 035296|>
+<|visual token 035297|>
+<|visual token 035298|>
+<|visual token 035299|>
+<|visual token 035300|>
+<|visual token 035301|>
+<|visual token 035302|>
+<|visual token 035303|>
+<|visual token 035304|>
+<|visual token 035305|>
+<|visual token 035306|>
+<|visual token 035307|>
+<|visual token 035308|>
+<|visual token 035309|>
+<|visual token 035310|>
+<|visual token 035311|>
+<|visual token 035312|>
+<|visual token 035313|>
+<|visual token 035314|>
+<|visual token 035315|>
+<|visual token 035316|>
+<|visual token 035317|>
+<|visual token 035318|>
+<|visual token 035319|>
+<|visual token 035320|>
+<|visual token 035321|>
+<|visual token 035322|>
+<|visual token 035323|>
+<|visual token 035324|>
+<|visual token 035325|>
+<|visual token 035326|>
+<|visual token 035327|>
+<|visual token 035328|>
+<|visual token 035329|>
+<|visual token 035330|>
+<|visual token 035331|>
+<|visual token 035332|>
+<|visual token 035333|>
+<|visual token 035334|>
+<|visual token 035335|>
+<|visual token 035336|>
+<|visual token 035337|>
+<|visual token 035338|>
+<|visual token 035339|>
+<|visual token 035340|>
+<|visual token 035341|>
+<|visual token 035342|>
+<|visual token 035343|>
+<|visual token 035344|>
+<|visual token 035345|>
+<|visual token 035346|>
+<|visual token 035347|>
+<|visual token 035348|>
+<|visual token 035349|>
+<|visual token 035350|>
+<|visual token 035351|>
+<|visual token 035352|>
+<|visual token 035353|>
+<|visual token 035354|>
+<|visual token 035355|>
+<|visual token 035356|>
+<|visual token 035357|>
+<|visual token 035358|>
+<|visual token 035359|>
+<|visual token 035360|>
+<|visual token 035361|>
+<|visual token 035362|>
+<|visual token 035363|>
+<|visual token 035364|>
+<|visual token 035365|>
+<|visual token 035366|>
+<|visual token 035367|>
+<|visual token 035368|>
+<|visual token 035369|>
+<|visual token 035370|>
+<|visual token 035371|>
+<|visual token 035372|>
+<|visual token 035373|>
+<|visual token 035374|>
+<|visual token 035375|>
+<|visual token 035376|>
+<|visual token 035377|>
+<|visual token 035378|>
+<|visual token 035379|>
+<|visual token 035380|>
+<|visual token 035381|>
+<|visual token 035382|>
+<|visual token 035383|>
+<|visual token 035384|>
+<|visual token 035385|>
+<|visual token 035386|>
+<|visual token 035387|>
+<|visual token 035388|>
+<|visual token 035389|>
+<|visual token 035390|>
+<|visual token 035391|>
+<|visual token 035392|>
+<|visual token 035393|>
+<|visual token 035394|>
+<|visual token 035395|>
+<|visual token 035396|>
+<|visual token 035397|>
+<|visual token 035398|>
+<|visual token 035399|>
+<|visual token 035400|>
+<|visual token 035401|>
+<|visual token 035402|>
+<|visual token 035403|>
+<|visual token 035404|>
+<|visual token 035405|>
+<|visual token 035406|>
+<|visual token 035407|>
+<|visual token 035408|>
+<|visual token 035409|>
+<|visual token 035410|>
+<|visual token 035411|>
+<|visual token 035412|>
+<|visual token 035413|>
+<|visual token 035414|>
+<|visual token 035415|>
+<|visual token 035416|>
+<|visual token 035417|>
+<|visual token 035418|>
+<|visual token 035419|>
+<|visual token 035420|>
+<|visual token 035421|>
+<|visual token 035422|>
+<|visual token 035423|>
+<|visual token 035424|>
+<|visual token 035425|>
+<|visual token 035426|>
+<|visual token 035427|>
+<|visual token 035428|>
+<|visual token 035429|>
+<|visual token 035430|>
+<|visual token 035431|>
+<|visual token 035432|>
+<|visual token 035433|>
+<|visual token 035434|>
+<|visual token 035435|>
+<|visual token 035436|>
+<|visual token 035437|>
+<|visual token 035438|>
+<|visual token 035439|>
+<|visual token 035440|>
+<|visual token 035441|>
+<|visual token 035442|>
+<|visual token 035443|>
+<|visual token 035444|>
+<|visual token 035445|>
+<|visual token 035446|>
+<|visual token 035447|>
+<|visual token 035448|>
+<|visual token 035449|>
+<|visual token 035450|>
+<|visual token 035451|>
+<|visual token 035452|>
+<|visual token 035453|>
+<|visual token 035454|>
+<|visual token 035455|>
+<|visual token 035456|>
+<|visual token 035457|>
+<|visual token 035458|>
+<|visual token 035459|>
+<|visual token 035460|>
+<|visual token 035461|>
+<|visual token 035462|>
+<|visual token 035463|>
+<|visual token 035464|>
+<|visual token 035465|>
+<|visual token 035466|>
+<|visual token 035467|>
+<|visual token 035468|>
+<|visual token 035469|>
+<|visual token 035470|>
+<|visual token 035471|>
+<|visual token 035472|>
+<|visual token 035473|>
+<|visual token 035474|>
+<|visual token 035475|>
+<|visual token 035476|>
+<|visual token 035477|>
+<|visual token 035478|>
+<|visual token 035479|>
+<|visual token 035480|>
+<|visual token 035481|>
+<|visual token 035482|>
+<|visual token 035483|>
+<|visual token 035484|>
+<|visual token 035485|>
+<|visual token 035486|>
+<|visual token 035487|>
+<|visual token 035488|>
+<|visual token 035489|>
+<|visual token 035490|>
+<|visual token 035491|>
+<|visual token 035492|>
+<|visual token 035493|>
+<|visual token 035494|>
+<|visual token 035495|>
+<|visual token 035496|>
+<|visual token 035497|>
+<|visual token 035498|>
+<|visual token 035499|>
+<|visual token 035500|>
+<|visual token 035501|>
+<|visual token 035502|>
+<|visual token 035503|>
+<|visual token 035504|>
+<|visual token 035505|>
+<|visual token 035506|>
+<|visual token 035507|>
+<|visual token 035508|>
+<|visual token 035509|>
+<|visual token 035510|>
+<|visual token 035511|>
+<|visual token 035512|>
+<|visual token 035513|>
+<|visual token 035514|>
+<|visual token 035515|>
+<|visual token 035516|>
+<|visual token 035517|>
+<|visual token 035518|>
+<|visual token 035519|>
+<|visual token 035520|>
+<|visual token 035521|>
+<|visual token 035522|>
+<|visual token 035523|>
+<|visual token 035524|>
+<|visual token 035525|>
+<|visual token 035526|>
+<|visual token 035527|>
+<|visual token 035528|>
+<|visual token 035529|>
+<|visual token 035530|>
+<|visual token 035531|>
+<|visual token 035532|>
+<|visual token 035533|>
+<|visual token 035534|>
+<|visual token 035535|>
+<|visual token 035536|>
+<|visual token 035537|>
+<|visual token 035538|>
+<|visual token 035539|>
+<|visual token 035540|>
+<|visual token 035541|>
+<|visual token 035542|>
+<|visual token 035543|>
+<|visual token 035544|>
+<|visual token 035545|>
+<|visual token 035546|>
+<|visual token 035547|>
+<|visual token 035548|>
+<|visual token 035549|>
+<|visual token 035550|>
+<|visual token 035551|>
+<|visual token 035552|>
+<|visual token 035553|>
+<|visual token 035554|>
+<|visual token 035555|>
+<|visual token 035556|>
+<|visual token 035557|>
+<|visual token 035558|>
+<|visual token 035559|>
+<|visual token 035560|>
+<|visual token 035561|>
+<|visual token 035562|>
+<|visual token 035563|>
+<|visual token 035564|>
+<|visual token 035565|>
+<|visual token 035566|>
+<|visual token 035567|>
+<|visual token 035568|>
+<|visual token 035569|>
+<|visual token 035570|>
+<|visual token 035571|>
+<|visual token 035572|>
+<|visual token 035573|>
+<|visual token 035574|>
+<|visual token 035575|>
+<|visual token 035576|>
+<|visual token 035577|>
+<|visual token 035578|>
+<|visual token 035579|>
+<|visual token 035580|>
+<|visual token 035581|>
+<|visual token 035582|>
+<|visual token 035583|>
+<|visual token 035584|>
+<|visual token 035585|>
+<|visual token 035586|>
+<|visual token 035587|>
+<|visual token 035588|>
+<|visual token 035589|>
+<|visual token 035590|>
+<|visual token 035591|>
+<|visual token 035592|>
+<|visual token 035593|>
+<|visual token 035594|>
+<|visual token 035595|>
+<|visual token 035596|>
+<|visual token 035597|>
+<|visual token 035598|>
+<|visual token 035599|>
+<|visual token 035600|>
+<|visual token 035601|>
+<|visual token 035602|>
+<|visual token 035603|>
+<|visual token 035604|>
+<|visual token 035605|>
+<|visual token 035606|>
+<|visual token 035607|>
+<|visual token 035608|>
+<|visual token 035609|>
+<|visual token 035610|>
+<|visual token 035611|>
+<|visual token 035612|>
+<|visual token 035613|>
+<|visual token 035614|>
+<|visual token 035615|>
+<|visual token 035616|>
+<|visual token 035617|>
+<|visual token 035618|>
+<|visual token 035619|>
+<|visual token 035620|>
+<|visual token 035621|>
+<|visual token 035622|>
+<|visual token 035623|>
+<|visual token 035624|>
+<|visual token 035625|>
+<|visual token 035626|>
+<|visual token 035627|>
+<|visual token 035628|>
+<|visual token 035629|>
+<|visual token 035630|>
+<|visual token 035631|>
+<|visual token 035632|>
+<|visual token 035633|>
+<|visual token 035634|>
+<|visual token 035635|>
+<|visual token 035636|>
+<|visual token 035637|>
+<|visual token 035638|>
+<|visual token 035639|>
+<|visual token 035640|>
+<|visual token 035641|>
+<|visual token 035642|>
+<|visual token 035643|>
+<|visual token 035644|>
+<|visual token 035645|>
+<|visual token 035646|>
+<|visual token 035647|>
+<|visual token 035648|>
+<|visual token 035649|>
+<|visual token 035650|>
+<|visual token 035651|>
+<|visual token 035652|>
+<|visual token 035653|>
+<|visual token 035654|>
+<|visual token 035655|>
+<|visual token 035656|>
+<|visual token 035657|>
+<|visual token 035658|>
+<|visual token 035659|>
+<|visual token 035660|>
+<|visual token 035661|>
+<|visual token 035662|>
+<|visual token 035663|>
+<|visual token 035664|>
+<|visual token 035665|>
+<|visual token 035666|>
+<|visual token 035667|>
+<|visual token 035668|>
+<|visual token 035669|>
+<|visual token 035670|>
+<|visual token 035671|>
+<|visual token 035672|>
+<|visual token 035673|>
+<|visual token 035674|>
+<|visual token 035675|>
+<|visual token 035676|>
+<|visual token 035677|>
+<|visual token 035678|>
+<|visual token 035679|>
+<|visual token 035680|>
+<|visual token 035681|>
+<|visual token 035682|>
+<|visual token 035683|>
+<|visual token 035684|>
+<|visual token 035685|>
+<|visual token 035686|>
+<|visual token 035687|>
+<|visual token 035688|>
+<|visual token 035689|>
+<|visual token 035690|>
+<|visual token 035691|>
+<|visual token 035692|>
+<|visual token 035693|>
+<|visual token 035694|>
+<|visual token 035695|>
+<|visual token 035696|>
+<|visual token 035697|>
+<|visual token 035698|>
+<|visual token 035699|>
+<|visual token 035700|>
+<|visual token 035701|>
+<|visual token 035702|>
+<|visual token 035703|>
+<|visual token 035704|>
+<|visual token 035705|>
+<|visual token 035706|>
+<|visual token 035707|>
+<|visual token 035708|>
+<|visual token 035709|>
+<|visual token 035710|>
+<|visual token 035711|>
+<|visual token 035712|>
+<|visual token 035713|>
+<|visual token 035714|>
+<|visual token 035715|>
+<|visual token 035716|>
+<|visual token 035717|>
+<|visual token 035718|>
+<|visual token 035719|>
+<|visual token 035720|>
+<|visual token 035721|>
+<|visual token 035722|>
+<|visual token 035723|>
+<|visual token 035724|>
+<|visual token 035725|>
+<|visual token 035726|>
+<|visual token 035727|>
+<|visual token 035728|>
+<|visual token 035729|>
+<|visual token 035730|>
+<|visual token 035731|>
+<|visual token 035732|>
+<|visual token 035733|>
+<|visual token 035734|>
+<|visual token 035735|>
+<|visual token 035736|>
+<|visual token 035737|>
+<|visual token 035738|>
+<|visual token 035739|>
+<|visual token 035740|>
+<|visual token 035741|>
+<|visual token 035742|>
+<|visual token 035743|>
+<|visual token 035744|>
+<|visual token 035745|>
+<|visual token 035746|>
+<|visual token 035747|>
+<|visual token 035748|>
+<|visual token 035749|>
+<|visual token 035750|>
+<|visual token 035751|>
+<|visual token 035752|>
+<|visual token 035753|>
+<|visual token 035754|>
+<|visual token 035755|>
+<|visual token 035756|>
+<|visual token 035757|>
+<|visual token 035758|>
+<|visual token 035759|>
+<|visual token 035760|>
+<|visual token 035761|>
+<|visual token 035762|>
+<|visual token 035763|>
+<|visual token 035764|>
+<|visual token 035765|>
+<|visual token 035766|>
+<|visual token 035767|>
+<|visual token 035768|>
+<|visual token 035769|>
+<|visual token 035770|>
+<|visual token 035771|>
+<|visual token 035772|>
+<|visual token 035773|>
+<|visual token 035774|>
+<|visual token 035775|>
+<|visual token 035776|>
+<|visual token 035777|>
+<|visual token 035778|>
+<|visual token 035779|>
+<|visual token 035780|>
+<|visual token 035781|>
+<|visual token 035782|>
+<|visual token 035783|>
+<|visual token 035784|>
+<|visual token 035785|>
+<|visual token 035786|>
+<|visual token 035787|>
+<|visual token 035788|>
+<|visual token 035789|>
+<|visual token 035790|>
+<|visual token 035791|>
+<|visual token 035792|>
+<|visual token 035793|>
+<|visual token 035794|>
+<|visual token 035795|>
+<|visual token 035796|>
+<|visual token 035797|>
+<|visual token 035798|>
+<|visual token 035799|>
+<|visual token 035800|>
+<|visual token 035801|>
+<|visual token 035802|>
+<|visual token 035803|>
+<|visual token 035804|>
+<|visual token 035805|>
+<|visual token 035806|>
+<|visual token 035807|>
+<|visual token 035808|>
+<|visual token 035809|>
+<|visual token 035810|>
+<|visual token 035811|>
+<|visual token 035812|>
+<|visual token 035813|>
+<|visual token 035814|>
+<|visual token 035815|>
+<|visual token 035816|>
+<|visual token 035817|>
+<|visual token 035818|>
+<|visual token 035819|>
+<|visual token 035820|>
+<|visual token 035821|>
+<|visual token 035822|>
+<|visual token 035823|>
+<|visual token 035824|>
+<|visual token 035825|>
+<|visual token 035826|>
+<|visual token 035827|>
+<|visual token 035828|>
+<|visual token 035829|>
+<|visual token 035830|>
+<|visual token 035831|>
+<|visual token 035832|>
+<|visual token 035833|>
+<|visual token 035834|>
+<|visual token 035835|>
+<|visual token 035836|>
+<|visual token 035837|>
+<|visual token 035838|>
+<|visual token 035839|>
+<|visual token 035840|>
+<|visual token 035841|>
+<|visual token 035842|>
+<|visual token 035843|>
+<|visual token 035844|>
+<|visual token 035845|>
+<|visual token 035846|>
+<|visual token 035847|>
+<|visual token 035848|>
+<|visual token 035849|>
+<|visual token 035850|>
+<|visual token 035851|>
+<|visual token 035852|>
+<|visual token 035853|>
+<|visual token 035854|>
+<|visual token 035855|>
+<|visual token 035856|>
+<|visual token 035857|>
+<|visual token 035858|>
+<|visual token 035859|>
+<|visual token 035860|>
+<|visual token 035861|>
+<|visual token 035862|>
+<|visual token 035863|>
+<|visual token 035864|>
+<|visual token 035865|>
+<|visual token 035866|>
+<|visual token 035867|>
+<|visual token 035868|>
+<|visual token 035869|>
+<|visual token 035870|>
+<|visual token 035871|>
+<|visual token 035872|>
+<|visual token 035873|>
+<|visual token 035874|>
+<|visual token 035875|>
+<|visual token 035876|>
+<|visual token 035877|>
+<|visual token 035878|>
+<|visual token 035879|>
+<|visual token 035880|>
+<|visual token 035881|>
+<|visual token 035882|>
+<|visual token 035883|>
+<|visual token 035884|>
+<|visual token 035885|>
+<|visual token 035886|>
+<|visual token 035887|>
+<|visual token 035888|>
+<|visual token 035889|>
+<|visual token 035890|>
+<|visual token 035891|>
+<|visual token 035892|>
+<|visual token 035893|>
+<|visual token 035894|>
+<|visual token 035895|>
+<|visual token 035896|>
+<|visual token 035897|>
+<|visual token 035898|>
+<|visual token 035899|>
+<|visual token 035900|>
+<|visual token 035901|>
+<|visual token 035902|>
+<|visual token 035903|>
+<|visual token 035904|>
+<|visual token 035905|>
+<|visual token 035906|>
+<|visual token 035907|>
+<|visual token 035908|>
+<|visual token 035909|>
+<|visual token 035910|>
+<|visual token 035911|>
+<|visual token 035912|>
+<|visual token 035913|>
+<|visual token 035914|>
+<|visual token 035915|>
+<|visual token 035916|>
+<|visual token 035917|>
+<|visual token 035918|>
+<|visual token 035919|>
+<|visual token 035920|>
+<|visual token 035921|>
+<|visual token 035922|>
+<|visual token 035923|>
+<|visual token 035924|>
+<|visual token 035925|>
+<|visual token 035926|>
+<|visual token 035927|>
+<|visual token 035928|>
+<|visual token 035929|>
+<|visual token 035930|>
+<|visual token 035931|>
+<|visual token 035932|>
+<|visual token 035933|>
+<|visual token 035934|>
+<|visual token 035935|>
+<|visual token 035936|>
+<|visual token 035937|>
+<|visual token 035938|>
+<|visual token 035939|>
+<|visual token 035940|>
+<|visual token 035941|>
+<|visual token 035942|>
+<|visual token 035943|>
+<|visual token 035944|>
+<|visual token 035945|>
+<|visual token 035946|>
+<|visual token 035947|>
+<|visual token 035948|>
+<|visual token 035949|>
+<|visual token 035950|>
+<|visual token 035951|>
+<|visual token 035952|>
+<|visual token 035953|>
+<|visual token 035954|>
+<|visual token 035955|>
+<|visual token 035956|>
+<|visual token 035957|>
+<|visual token 035958|>
+<|visual token 035959|>
+<|visual token 035960|>
+<|visual token 035961|>
+<|visual token 035962|>
+<|visual token 035963|>
+<|visual token 035964|>
+<|visual token 035965|>
+<|visual token 035966|>
+<|visual token 035967|>
+<|visual token 035968|>
+<|visual token 035969|>
+<|visual token 035970|>
+<|visual token 035971|>
+<|visual token 035972|>
+<|visual token 035973|>
+<|visual token 035974|>
+<|visual token 035975|>
+<|visual token 035976|>
+<|visual token 035977|>
+<|visual token 035978|>
+<|visual token 035979|>
+<|visual token 035980|>
+<|visual token 035981|>
+<|visual token 035982|>
+<|visual token 035983|>
+<|visual token 035984|>
+<|visual token 035985|>
+<|visual token 035986|>
+<|visual token 035987|>
+<|visual token 035988|>
+<|visual token 035989|>
+<|visual token 035990|>
+<|visual token 035991|>
+<|visual token 035992|>
+<|visual token 035993|>
+<|visual token 035994|>
+<|visual token 035995|>
+<|visual token 035996|>
+<|visual token 035997|>
+<|visual token 035998|>
+<|visual token 035999|>
+<|visual token 036000|>
+<|visual token 036001|>
+<|visual token 036002|>
+<|visual token 036003|>
+<|visual token 036004|>
+<|visual token 036005|>
+<|visual token 036006|>
+<|visual token 036007|>
+<|visual token 036008|>
+<|visual token 036009|>
+<|visual token 036010|>
+<|visual token 036011|>
+<|visual token 036012|>
+<|visual token 036013|>
+<|visual token 036014|>
+<|visual token 036015|>
+<|visual token 036016|>
+<|visual token 036017|>
+<|visual token 036018|>
+<|visual token 036019|>
+<|visual token 036020|>
+<|visual token 036021|>
+<|visual token 036022|>
+<|visual token 036023|>
+<|visual token 036024|>
+<|visual token 036025|>
+<|visual token 036026|>
+<|visual token 036027|>
+<|visual token 036028|>
+<|visual token 036029|>
+<|visual token 036030|>
+<|visual token 036031|>
+<|visual token 036032|>
+<|visual token 036033|>
+<|visual token 036034|>
+<|visual token 036035|>
+<|visual token 036036|>
+<|visual token 036037|>
+<|visual token 036038|>
+<|visual token 036039|>
+<|visual token 036040|>
+<|visual token 036041|>
+<|visual token 036042|>
+<|visual token 036043|>
+<|visual token 036044|>
+<|visual token 036045|>
+<|visual token 036046|>
+<|visual token 036047|>
+<|visual token 036048|>
+<|visual token 036049|>
+<|visual token 036050|>
+<|visual token 036051|>
+<|visual token 036052|>
+<|visual token 036053|>
+<|visual token 036054|>
+<|visual token 036055|>
+<|visual token 036056|>
+<|visual token 036057|>
+<|visual token 036058|>
+<|visual token 036059|>
+<|visual token 036060|>
+<|visual token 036061|>
+<|visual token 036062|>
+<|visual token 036063|>
+<|visual token 036064|>
+<|visual token 036065|>
+<|visual token 036066|>
+<|visual token 036067|>
+<|visual token 036068|>
+<|visual token 036069|>
+<|visual token 036070|>
+<|visual token 036071|>
+<|visual token 036072|>
+<|visual token 036073|>
+<|visual token 036074|>
+<|visual token 036075|>
+<|visual token 036076|>
+<|visual token 036077|>
+<|visual token 036078|>
+<|visual token 036079|>
+<|visual token 036080|>
+<|visual token 036081|>
+<|visual token 036082|>
+<|visual token 036083|>
+<|visual token 036084|>
+<|visual token 036085|>
+<|visual token 036086|>
+<|visual token 036087|>
+<|visual token 036088|>
+<|visual token 036089|>
+<|visual token 036090|>
+<|visual token 036091|>
+<|visual token 036092|>
+<|visual token 036093|>
+<|visual token 036094|>
+<|visual token 036095|>
+<|visual token 036096|>
+<|visual token 036097|>
+<|visual token 036098|>
+<|visual token 036099|>
+<|visual token 036100|>
+<|visual token 036101|>
+<|visual token 036102|>
+<|visual token 036103|>
+<|visual token 036104|>
+<|visual token 036105|>
+<|visual token 036106|>
+<|visual token 036107|>
+<|visual token 036108|>
+<|visual token 036109|>
+<|visual token 036110|>
+<|visual token 036111|>
+<|visual token 036112|>
+<|visual token 036113|>
+<|visual token 036114|>
+<|visual token 036115|>
+<|visual token 036116|>
+<|visual token 036117|>
+<|visual token 036118|>
+<|visual token 036119|>
+<|visual token 036120|>
+<|visual token 036121|>
+<|visual token 036122|>
+<|visual token 036123|>
+<|visual token 036124|>
+<|visual token 036125|>
+<|visual token 036126|>
+<|visual token 036127|>
+<|visual token 036128|>
+<|visual token 036129|>
+<|visual token 036130|>
+<|visual token 036131|>
+<|visual token 036132|>
+<|visual token 036133|>
+<|visual token 036134|>
+<|visual token 036135|>
+<|visual token 036136|>
+<|visual token 036137|>
+<|visual token 036138|>
+<|visual token 036139|>
+<|visual token 036140|>
+<|visual token 036141|>
+<|visual token 036142|>
+<|visual token 036143|>
+<|visual token 036144|>
+<|visual token 036145|>
+<|visual token 036146|>
+<|visual token 036147|>
+<|visual token 036148|>
+<|visual token 036149|>
+<|visual token 036150|>
+<|visual token 036151|>
+<|visual token 036152|>
+<|visual token 036153|>
+<|visual token 036154|>
+<|visual token 036155|>
+<|visual token 036156|>
+<|visual token 036157|>
+<|visual token 036158|>
+<|visual token 036159|>
+<|visual token 036160|>
+<|visual token 036161|>
+<|visual token 036162|>
+<|visual token 036163|>
+<|visual token 036164|>
+<|visual token 036165|>
+<|visual token 036166|>
+<|visual token 036167|>
+<|visual token 036168|>
+<|visual token 036169|>
+<|visual token 036170|>
+<|visual token 036171|>
+<|visual token 036172|>
+<|visual token 036173|>
+<|visual token 036174|>
+<|visual token 036175|>
+<|visual token 036176|>
+<|visual token 036177|>
+<|visual token 036178|>
+<|visual token 036179|>
+<|visual token 036180|>
+<|visual token 036181|>
+<|visual token 036182|>
+<|visual token 036183|>
+<|visual token 036184|>
+<|visual token 036185|>
+<|visual token 036186|>
+<|visual token 036187|>
+<|visual token 036188|>
+<|visual token 036189|>
+<|visual token 036190|>
+<|visual token 036191|>
+<|visual token 036192|>
+<|visual token 036193|>
+<|visual token 036194|>
+<|visual token 036195|>
+<|visual token 036196|>
+<|visual token 036197|>
+<|visual token 036198|>
+<|visual token 036199|>
+<|visual token 036200|>
+<|visual token 036201|>
+<|visual token 036202|>
+<|visual token 036203|>
+<|visual token 036204|>
+<|visual token 036205|>
+<|visual token 036206|>
+<|visual token 036207|>
+<|visual token 036208|>
+<|visual token 036209|>
+<|visual token 036210|>
+<|visual token 036211|>
+<|visual token 036212|>
+<|visual token 036213|>
+<|visual token 036214|>
+<|visual token 036215|>
+<|visual token 036216|>
+<|visual token 036217|>
+<|visual token 036218|>
+<|visual token 036219|>
+<|visual token 036220|>
+<|visual token 036221|>
+<|visual token 036222|>
+<|visual token 036223|>
+<|visual token 036224|>
+<|visual token 036225|>
+<|visual token 036226|>
+<|visual token 036227|>
+<|visual token 036228|>
+<|visual token 036229|>
+<|visual token 036230|>
+<|visual token 036231|>
+<|visual token 036232|>
+<|visual token 036233|>
+<|visual token 036234|>
+<|visual token 036235|>
+<|visual token 036236|>
+<|visual token 036237|>
+<|visual token 036238|>
+<|visual token 036239|>
+<|visual token 036240|>
+<|visual token 036241|>
+<|visual token 036242|>
+<|visual token 036243|>
+<|visual token 036244|>
+<|visual token 036245|>
+<|visual token 036246|>
+<|visual token 036247|>
+<|visual token 036248|>
+<|visual token 036249|>
+<|visual token 036250|>
+<|visual token 036251|>
+<|visual token 036252|>
+<|visual token 036253|>
+<|visual token 036254|>
+<|visual token 036255|>
+<|visual token 036256|>
+<|visual token 036257|>
+<|visual token 036258|>
+<|visual token 036259|>
+<|visual token 036260|>
+<|visual token 036261|>
+<|visual token 036262|>
+<|visual token 036263|>
+<|visual token 036264|>
+<|visual token 036265|>
+<|visual token 036266|>
+<|visual token 036267|>
+<|visual token 036268|>
+<|visual token 036269|>
+<|visual token 036270|>
+<|visual token 036271|>
+<|visual token 036272|>
+<|visual token 036273|>
+<|visual token 036274|>
+<|visual token 036275|>
+<|visual token 036276|>
+<|visual token 036277|>
+<|visual token 036278|>
+<|visual token 036279|>
+<|visual token 036280|>
+<|visual token 036281|>
+<|visual token 036282|>
+<|visual token 036283|>
+<|visual token 036284|>
+<|visual token 036285|>
+<|visual token 036286|>
+<|visual token 036287|>
+<|visual token 036288|>
+<|visual token 036289|>
+<|visual token 036290|>
+<|visual token 036291|>
+<|visual token 036292|>
+<|visual token 036293|>
+<|visual token 036294|>
+<|visual token 036295|>
+<|visual token 036296|>
+<|visual token 036297|>
+<|visual token 036298|>
+<|visual token 036299|>
+<|visual token 036300|>
+<|visual token 036301|>
+<|visual token 036302|>
+<|visual token 036303|>
+<|visual token 036304|>
+<|visual token 036305|>
+<|visual token 036306|>
+<|visual token 036307|>
+<|visual token 036308|>
+<|visual token 036309|>
+<|visual token 036310|>
+<|visual token 036311|>
+<|visual token 036312|>
+<|visual token 036313|>
+<|visual token 036314|>
+<|visual token 036315|>
+<|visual token 036316|>
+<|visual token 036317|>
+<|visual token 036318|>
+<|visual token 036319|>
+<|visual token 036320|>
+<|visual token 036321|>
+<|visual token 036322|>
+<|visual token 036323|>
+<|visual token 036324|>
+<|visual token 036325|>
+<|visual token 036326|>
+<|visual token 036327|>
+<|visual token 036328|>
+<|visual token 036329|>
+<|visual token 036330|>
+<|visual token 036331|>
+<|visual token 036332|>
+<|visual token 036333|>
+<|visual token 036334|>
+<|visual token 036335|>
+<|visual token 036336|>
+<|visual token 036337|>
+<|visual token 036338|>
+<|visual token 036339|>
+<|visual token 036340|>
+<|visual token 036341|>
+<|visual token 036342|>
+<|visual token 036343|>
+<|visual token 036344|>
+<|visual token 036345|>
+<|visual token 036346|>
+<|visual token 036347|>
+<|visual token 036348|>
+<|visual token 036349|>
+<|visual token 036350|>
+<|visual token 036351|>
+<|visual token 036352|>
+<|visual token 036353|>
+<|visual token 036354|>
+<|visual token 036355|>
+<|visual token 036356|>
+<|visual token 036357|>
+<|visual token 036358|>
+<|visual token 036359|>
+<|visual token 036360|>
+<|visual token 036361|>
+<|visual token 036362|>
+<|visual token 036363|>
+<|visual token 036364|>
+<|visual token 036365|>
+<|visual token 036366|>
+<|visual token 036367|>
+<|visual token 036368|>
+<|visual token 036369|>
+<|visual token 036370|>
+<|visual token 036371|>
+<|visual token 036372|>
+<|visual token 036373|>
+<|visual token 036374|>
+<|visual token 036375|>
+<|visual token 036376|>
+<|visual token 036377|>
+<|visual token 036378|>
+<|visual token 036379|>
+<|visual token 036380|>
+<|visual token 036381|>
+<|visual token 036382|>
+<|visual token 036383|>
+<|visual token 036384|>
+<|visual token 036385|>
+<|visual token 036386|>
+<|visual token 036387|>
+<|visual token 036388|>
+<|visual token 036389|>
+<|visual token 036390|>
+<|visual token 036391|>
+<|visual token 036392|>
+<|visual token 036393|>
+<|visual token 036394|>
+<|visual token 036395|>
+<|visual token 036396|>
+<|visual token 036397|>
+<|visual token 036398|>
+<|visual token 036399|>
+<|visual token 036400|>
+<|visual token 036401|>
+<|visual token 036402|>
+<|visual token 036403|>
+<|visual token 036404|>
+<|visual token 036405|>
+<|visual token 036406|>
+<|visual token 036407|>
+<|visual token 036408|>
+<|visual token 036409|>
+<|visual token 036410|>
+<|visual token 036411|>
+<|visual token 036412|>
+<|visual token 036413|>
+<|visual token 036414|>
+<|visual token 036415|>
+<|visual token 036416|>
+<|visual token 036417|>
+<|visual token 036418|>
+<|visual token 036419|>
+<|visual token 036420|>
+<|visual token 036421|>
+<|visual token 036422|>
+<|visual token 036423|>
+<|visual token 036424|>
+<|visual token 036425|>
+<|visual token 036426|>
+<|visual token 036427|>
+<|visual token 036428|>
+<|visual token 036429|>
+<|visual token 036430|>
+<|visual token 036431|>
+<|visual token 036432|>
+<|visual token 036433|>
+<|visual token 036434|>
+<|visual token 036435|>
+<|visual token 036436|>
+<|visual token 036437|>
+<|visual token 036438|>
+<|visual token 036439|>
+<|visual token 036440|>
+<|visual token 036441|>
+<|visual token 036442|>
+<|visual token 036443|>
+<|visual token 036444|>
+<|visual token 036445|>
+<|visual token 036446|>
+<|visual token 036447|>
+<|visual token 036448|>
+<|visual token 036449|>
+<|visual token 036450|>
+<|visual token 036451|>
+<|visual token 036452|>
+<|visual token 036453|>
+<|visual token 036454|>
+<|visual token 036455|>
+<|visual token 036456|>
+<|visual token 036457|>
+<|visual token 036458|>
+<|visual token 036459|>
+<|visual token 036460|>
+<|visual token 036461|>
+<|visual token 036462|>
+<|visual token 036463|>
+<|visual token 036464|>
+<|visual token 036465|>
+<|visual token 036466|>
+<|visual token 036467|>
+<|visual token 036468|>
+<|visual token 036469|>
+<|visual token 036470|>
+<|visual token 036471|>
+<|visual token 036472|>
+<|visual token 036473|>
+<|visual token 036474|>
+<|visual token 036475|>
+<|visual token 036476|>
+<|visual token 036477|>
+<|visual token 036478|>
+<|visual token 036479|>
+<|visual token 036480|>
+<|visual token 036481|>
+<|visual token 036482|>
+<|visual token 036483|>
+<|visual token 036484|>
+<|visual token 036485|>
+<|visual token 036486|>
+<|visual token 036487|>
+<|visual token 036488|>
+<|visual token 036489|>
+<|visual token 036490|>
+<|visual token 036491|>
+<|visual token 036492|>
+<|visual token 036493|>
+<|visual token 036494|>
+<|visual token 036495|>
+<|visual token 036496|>
+<|visual token 036497|>
+<|visual token 036498|>
+<|visual token 036499|>
+<|visual token 036500|>
+<|visual token 036501|>
+<|visual token 036502|>
+<|visual token 036503|>
+<|visual token 036504|>
+<|visual token 036505|>
+<|visual token 036506|>
+<|visual token 036507|>
+<|visual token 036508|>
+<|visual token 036509|>
+<|visual token 036510|>
+<|visual token 036511|>
+<|visual token 036512|>
+<|visual token 036513|>
+<|visual token 036514|>
+<|visual token 036515|>
+<|visual token 036516|>
+<|visual token 036517|>
+<|visual token 036518|>
+<|visual token 036519|>
+<|visual token 036520|>
+<|visual token 036521|>
+<|visual token 036522|>
+<|visual token 036523|>
+<|visual token 036524|>
+<|visual token 036525|>
+<|visual token 036526|>
+<|visual token 036527|>
+<|visual token 036528|>
+<|visual token 036529|>
+<|visual token 036530|>
+<|visual token 036531|>
+<|visual token 036532|>
+<|visual token 036533|>
+<|visual token 036534|>
+<|visual token 036535|>
+<|visual token 036536|>
+<|visual token 036537|>
+<|visual token 036538|>
+<|visual token 036539|>
+<|visual token 036540|>
+<|visual token 036541|>
+<|visual token 036542|>
+<|visual token 036543|>
+<|visual token 036544|>
+<|visual token 036545|>
+<|visual token 036546|>
+<|visual token 036547|>
+<|visual token 036548|>
+<|visual token 036549|>
+<|visual token 036550|>
+<|visual token 036551|>
+<|visual token 036552|>
+<|visual token 036553|>
+<|visual token 036554|>
+<|visual token 036555|>
+<|visual token 036556|>
+<|visual token 036557|>
+<|visual token 036558|>
+<|visual token 036559|>
+<|visual token 036560|>
+<|visual token 036561|>
+<|visual token 036562|>
+<|visual token 036563|>
+<|visual token 036564|>
+<|visual token 036565|>
+<|visual token 036566|>
+<|visual token 036567|>
+<|visual token 036568|>
+<|visual token 036569|>
+<|visual token 036570|>
+<|visual token 036571|>
+<|visual token 036572|>
+<|visual token 036573|>
+<|visual token 036574|>
+<|visual token 036575|>
+<|visual token 036576|>
+<|visual token 036577|>
+<|visual token 036578|>
+<|visual token 036579|>
+<|visual token 036580|>
+<|visual token 036581|>
+<|visual token 036582|>
+<|visual token 036583|>
+<|visual token 036584|>
+<|visual token 036585|>
+<|visual token 036586|>
+<|visual token 036587|>
+<|visual token 036588|>
+<|visual token 036589|>
+<|visual token 036590|>
+<|visual token 036591|>
+<|visual token 036592|>
+<|visual token 036593|>
+<|visual token 036594|>
+<|visual token 036595|>
+<|visual token 036596|>
+<|visual token 036597|>
+<|visual token 036598|>
+<|visual token 036599|>
+<|visual token 036600|>
+<|visual token 036601|>
+<|visual token 036602|>
+<|visual token 036603|>
+<|visual token 036604|>
+<|visual token 036605|>
+<|visual token 036606|>
+<|visual token 036607|>
+<|visual token 036608|>
+<|visual token 036609|>
+<|visual token 036610|>
+<|visual token 036611|>
+<|visual token 036612|>
+<|visual token 036613|>
+<|visual token 036614|>
+<|visual token 036615|>
+<|visual token 036616|>
+<|visual token 036617|>
+<|visual token 036618|>
+<|visual token 036619|>
+<|visual token 036620|>
+<|visual token 036621|>
+<|visual token 036622|>
+<|visual token 036623|>
+<|visual token 036624|>
+<|visual token 036625|>
+<|visual token 036626|>
+<|visual token 036627|>
+<|visual token 036628|>
+<|visual token 036629|>
+<|visual token 036630|>
+<|visual token 036631|>
+<|visual token 036632|>
+<|visual token 036633|>
+<|visual token 036634|>
+<|visual token 036635|>
+<|visual token 036636|>
+<|visual token 036637|>
+<|visual token 036638|>
+<|visual token 036639|>
+<|visual token 036640|>
+<|visual token 036641|>
+<|visual token 036642|>
+<|visual token 036643|>
+<|visual token 036644|>
+<|visual token 036645|>
+<|visual token 036646|>
+<|visual token 036647|>
+<|visual token 036648|>
+<|visual token 036649|>
+<|visual token 036650|>
+<|visual token 036651|>
+<|visual token 036652|>
+<|visual token 036653|>
+<|visual token 036654|>
+<|visual token 036655|>
+<|visual token 036656|>
+<|visual token 036657|>
+<|visual token 036658|>
+<|visual token 036659|>
+<|visual token 036660|>
+<|visual token 036661|>
+<|visual token 036662|>
+<|visual token 036663|>
+<|visual token 036664|>
+<|visual token 036665|>
+<|visual token 036666|>
+<|visual token 036667|>
+<|visual token 036668|>
+<|visual token 036669|>
+<|visual token 036670|>
+<|visual token 036671|>
+<|visual token 036672|>
+<|visual token 036673|>
+<|visual token 036674|>
+<|visual token 036675|>
+<|visual token 036676|>
+<|visual token 036677|>
+<|visual token 036678|>
+<|visual token 036679|>
+<|visual token 036680|>
+<|visual token 036681|>
+<|visual token 036682|>
+<|visual token 036683|>
+<|visual token 036684|>
+<|visual token 036685|>
+<|visual token 036686|>
+<|visual token 036687|>
+<|visual token 036688|>
+<|visual token 036689|>
+<|visual token 036690|>
+<|visual token 036691|>
+<|visual token 036692|>
+<|visual token 036693|>
+<|visual token 036694|>
+<|visual token 036695|>
+<|visual token 036696|>
+<|visual token 036697|>
+<|visual token 036698|>
+<|visual token 036699|>
+<|visual token 036700|>
+<|visual token 036701|>
+<|visual token 036702|>
+<|visual token 036703|>
+<|visual token 036704|>
+<|visual token 036705|>
+<|visual token 036706|>
+<|visual token 036707|>
+<|visual token 036708|>
+<|visual token 036709|>
+<|visual token 036710|>
+<|visual token 036711|>
+<|visual token 036712|>
+<|visual token 036713|>
+<|visual token 036714|>
+<|visual token 036715|>
+<|visual token 036716|>
+<|visual token 036717|>
+<|visual token 036718|>
+<|visual token 036719|>
+<|visual token 036720|>
+<|visual token 036721|>
+<|visual token 036722|>
+<|visual token 036723|>
+<|visual token 036724|>
+<|visual token 036725|>
+<|visual token 036726|>
+<|visual token 036727|>
+<|visual token 036728|>
+<|visual token 036729|>
+<|visual token 036730|>
+<|visual token 036731|>
+<|visual token 036732|>
+<|visual token 036733|>
+<|visual token 036734|>
+<|visual token 036735|>
+<|visual token 036736|>
+<|visual token 036737|>
+<|visual token 036738|>
+<|visual token 036739|>
+<|visual token 036740|>
+<|visual token 036741|>
+<|visual token 036742|>
+<|visual token 036743|>
+<|visual token 036744|>
+<|visual token 036745|>
+<|visual token 036746|>
+<|visual token 036747|>
+<|visual token 036748|>
+<|visual token 036749|>
+<|visual token 036750|>
+<|visual token 036751|>
+<|visual token 036752|>
+<|visual token 036753|>
+<|visual token 036754|>
+<|visual token 036755|>
+<|visual token 036756|>
+<|visual token 036757|>
+<|visual token 036758|>
+<|visual token 036759|>
+<|visual token 036760|>
+<|visual token 036761|>
+<|visual token 036762|>
+<|visual token 036763|>
+<|visual token 036764|>
+<|visual token 036765|>
+<|visual token 036766|>
+<|visual token 036767|>
+<|visual token 036768|>
+<|visual token 036769|>
+<|visual token 036770|>
+<|visual token 036771|>
+<|visual token 036772|>
+<|visual token 036773|>
+<|visual token 036774|>
+<|visual token 036775|>
+<|visual token 036776|>
+<|visual token 036777|>
+<|visual token 036778|>
+<|visual token 036779|>
+<|visual token 036780|>
+<|visual token 036781|>
+<|visual token 036782|>
+<|visual token 036783|>
+<|visual token 036784|>
+<|visual token 036785|>
+<|visual token 036786|>
+<|visual token 036787|>
+<|visual token 036788|>
+<|visual token 036789|>
+<|visual token 036790|>
+<|visual token 036791|>
+<|visual token 036792|>
+<|visual token 036793|>
+<|visual token 036794|>
+<|visual token 036795|>
+<|visual token 036796|>
+<|visual token 036797|>
+<|visual token 036798|>
+<|visual token 036799|>
+<|visual token 036800|>
+<|visual token 036801|>
+<|visual token 036802|>
+<|visual token 036803|>
+<|visual token 036804|>
+<|visual token 036805|>
+<|visual token 036806|>
+<|visual token 036807|>
+<|visual token 036808|>
+<|visual token 036809|>
+<|visual token 036810|>
+<|visual token 036811|>
+<|visual token 036812|>
+<|visual token 036813|>
+<|visual token 036814|>
+<|visual token 036815|>
+<|visual token 036816|>
+<|visual token 036817|>
+<|visual token 036818|>
+<|visual token 036819|>
+<|visual token 036820|>
+<|visual token 036821|>
+<|visual token 036822|>
+<|visual token 036823|>
+<|visual token 036824|>
+<|visual token 036825|>
+<|visual token 036826|>
+<|visual token 036827|>
+<|visual token 036828|>
+<|visual token 036829|>
+<|visual token 036830|>
+<|visual token 036831|>
+<|visual token 036832|>
+<|visual token 036833|>
+<|visual token 036834|>
+<|visual token 036835|>
+<|visual token 036836|>
+<|visual token 036837|>
+<|visual token 036838|>
+<|visual token 036839|>
+<|visual token 036840|>
+<|visual token 036841|>
+<|visual token 036842|>
+<|visual token 036843|>
+<|visual token 036844|>
+<|visual token 036845|>
+<|visual token 036846|>
+<|visual token 036847|>
+<|visual token 036848|>
+<|visual token 036849|>
+<|visual token 036850|>
+<|visual token 036851|>
+<|visual token 036852|>
+<|visual token 036853|>
+<|visual token 036854|>
+<|visual token 036855|>
+<|visual token 036856|>
+<|visual token 036857|>
+<|visual token 036858|>
+<|visual token 036859|>
+<|visual token 036860|>
+<|visual token 036861|>
+<|visual token 036862|>
+<|visual token 036863|>
+<|visual token 036864|>
+<|visual token 036865|>
+<|visual token 036866|>
+<|visual token 036867|>
+<|visual token 036868|>
+<|visual token 036869|>
+<|visual token 036870|>
+<|visual token 036871|>
+<|visual token 036872|>
+<|visual token 036873|>
+<|visual token 036874|>
+<|visual token 036875|>
+<|visual token 036876|>
+<|visual token 036877|>
+<|visual token 036878|>
+<|visual token 036879|>
+<|visual token 036880|>
+<|visual token 036881|>
+<|visual token 036882|>
+<|visual token 036883|>
+<|visual token 036884|>
+<|visual token 036885|>
+<|visual token 036886|>
+<|visual token 036887|>
+<|visual token 036888|>
+<|visual token 036889|>
+<|visual token 036890|>
+<|visual token 036891|>
+<|visual token 036892|>
+<|visual token 036893|>
+<|visual token 036894|>
+<|visual token 036895|>
+<|visual token 036896|>
+<|visual token 036897|>
+<|visual token 036898|>
+<|visual token 036899|>
+<|visual token 036900|>
+<|visual token 036901|>
+<|visual token 036902|>
+<|visual token 036903|>
+<|visual token 036904|>
+<|visual token 036905|>
+<|visual token 036906|>
+<|visual token 036907|>
+<|visual token 036908|>
+<|visual token 036909|>
+<|visual token 036910|>
+<|visual token 036911|>
+<|visual token 036912|>
+<|visual token 036913|>
+<|visual token 036914|>
+<|visual token 036915|>
+<|visual token 036916|>
+<|visual token 036917|>
+<|visual token 036918|>
+<|visual token 036919|>
+<|visual token 036920|>
+<|visual token 036921|>
+<|visual token 036922|>
+<|visual token 036923|>
+<|visual token 036924|>
+<|visual token 036925|>
+<|visual token 036926|>
+<|visual token 036927|>
+<|visual token 036928|>
+<|visual token 036929|>
+<|visual token 036930|>
+<|visual token 036931|>
+<|visual token 036932|>
+<|visual token 036933|>
+<|visual token 036934|>
+<|visual token 036935|>
+<|visual token 036936|>
+<|visual token 036937|>
+<|visual token 036938|>
+<|visual token 036939|>
+<|visual token 036940|>
+<|visual token 036941|>
+<|visual token 036942|>
+<|visual token 036943|>
+<|visual token 036944|>
+<|visual token 036945|>
+<|visual token 036946|>
+<|visual token 036947|>
+<|visual token 036948|>
+<|visual token 036949|>
+<|visual token 036950|>
+<|visual token 036951|>
+<|visual token 036952|>
+<|visual token 036953|>
+<|visual token 036954|>
+<|visual token 036955|>
+<|visual token 036956|>
+<|visual token 036957|>
+<|visual token 036958|>
+<|visual token 036959|>
+<|visual token 036960|>
+<|visual token 036961|>
+<|visual token 036962|>
+<|visual token 036963|>
+<|visual token 036964|>
+<|visual token 036965|>
+<|visual token 036966|>
+<|visual token 036967|>
+<|visual token 036968|>
+<|visual token 036969|>
+<|visual token 036970|>
+<|visual token 036971|>
+<|visual token 036972|>
+<|visual token 036973|>
+<|visual token 036974|>
+<|visual token 036975|>
+<|visual token 036976|>
+<|visual token 036977|>
+<|visual token 036978|>
+<|visual token 036979|>
+<|visual token 036980|>
+<|visual token 036981|>
+<|visual token 036982|>
+<|visual token 036983|>
+<|visual token 036984|>
+<|visual token 036985|>
+<|visual token 036986|>
+<|visual token 036987|>
+<|visual token 036988|>
+<|visual token 036989|>
+<|visual token 036990|>
+<|visual token 036991|>
+<|visual token 036992|>
+<|visual token 036993|>
+<|visual token 036994|>
+<|visual token 036995|>
+<|visual token 036996|>
+<|visual token 036997|>
+<|visual token 036998|>
+<|visual token 036999|>
+<|visual token 037000|>
+<|visual token 037001|>
+<|visual token 037002|>
+<|visual token 037003|>
+<|visual token 037004|>
+<|visual token 037005|>
+<|visual token 037006|>
+<|visual token 037007|>
+<|visual token 037008|>
+<|visual token 037009|>
+<|visual token 037010|>
+<|visual token 037011|>
+<|visual token 037012|>
+<|visual token 037013|>
+<|visual token 037014|>
+<|visual token 037015|>
+<|visual token 037016|>
+<|visual token 037017|>
+<|visual token 037018|>
+<|visual token 037019|>
+<|visual token 037020|>
+<|visual token 037021|>
+<|visual token 037022|>
+<|visual token 037023|>
+<|visual token 037024|>
+<|visual token 037025|>
+<|visual token 037026|>
+<|visual token 037027|>
+<|visual token 037028|>
+<|visual token 037029|>
+<|visual token 037030|>
+<|visual token 037031|>
+<|visual token 037032|>
+<|visual token 037033|>
+<|visual token 037034|>
+<|visual token 037035|>
+<|visual token 037036|>
+<|visual token 037037|>
+<|visual token 037038|>
+<|visual token 037039|>
+<|visual token 037040|>
+<|visual token 037041|>
+<|visual token 037042|>
+<|visual token 037043|>
+<|visual token 037044|>
+<|visual token 037045|>
+<|visual token 037046|>
+<|visual token 037047|>
+<|visual token 037048|>
+<|visual token 037049|>
+<|visual token 037050|>
+<|visual token 037051|>
+<|visual token 037052|>
+<|visual token 037053|>
+<|visual token 037054|>
+<|visual token 037055|>
+<|visual token 037056|>
+<|visual token 037057|>
+<|visual token 037058|>
+<|visual token 037059|>
+<|visual token 037060|>
+<|visual token 037061|>
+<|visual token 037062|>
+<|visual token 037063|>
+<|visual token 037064|>
+<|visual token 037065|>
+<|visual token 037066|>
+<|visual token 037067|>
+<|visual token 037068|>
+<|visual token 037069|>
+<|visual token 037070|>
+<|visual token 037071|>
+<|visual token 037072|>
+<|visual token 037073|>
+<|visual token 037074|>
+<|visual token 037075|>
+<|visual token 037076|>
+<|visual token 037077|>
+<|visual token 037078|>
+<|visual token 037079|>
+<|visual token 037080|>
+<|visual token 037081|>
+<|visual token 037082|>
+<|visual token 037083|>
+<|visual token 037084|>
+<|visual token 037085|>
+<|visual token 037086|>
+<|visual token 037087|>
+<|visual token 037088|>
+<|visual token 037089|>
+<|visual token 037090|>
+<|visual token 037091|>
+<|visual token 037092|>
+<|visual token 037093|>
+<|visual token 037094|>
+<|visual token 037095|>
+<|visual token 037096|>
+<|visual token 037097|>
+<|visual token 037098|>
+<|visual token 037099|>
+<|visual token 037100|>
+<|visual token 037101|>
+<|visual token 037102|>
+<|visual token 037103|>
+<|visual token 037104|>
+<|visual token 037105|>
+<|visual token 037106|>
+<|visual token 037107|>
+<|visual token 037108|>
+<|visual token 037109|>
+<|visual token 037110|>
+<|visual token 037111|>
+<|visual token 037112|>
+<|visual token 037113|>
+<|visual token 037114|>
+<|visual token 037115|>
+<|visual token 037116|>
+<|visual token 037117|>
+<|visual token 037118|>
+<|visual token 037119|>
+<|visual token 037120|>
+<|visual token 037121|>
+<|visual token 037122|>
+<|visual token 037123|>
+<|visual token 037124|>
+<|visual token 037125|>
+<|visual token 037126|>
+<|visual token 037127|>
+<|visual token 037128|>
+<|visual token 037129|>
+<|visual token 037130|>
+<|visual token 037131|>
+<|visual token 037132|>
+<|visual token 037133|>
+<|visual token 037134|>
+<|visual token 037135|>
+<|visual token 037136|>
+<|visual token 037137|>
+<|visual token 037138|>
+<|visual token 037139|>
+<|visual token 037140|>
+<|visual token 037141|>
+<|visual token 037142|>
+<|visual token 037143|>
+<|visual token 037144|>
+<|visual token 037145|>
+<|visual token 037146|>
+<|visual token 037147|>
+<|visual token 037148|>
+<|visual token 037149|>
+<|visual token 037150|>
+<|visual token 037151|>
+<|visual token 037152|>
+<|visual token 037153|>
+<|visual token 037154|>
+<|visual token 037155|>
+<|visual token 037156|>
+<|visual token 037157|>
+<|visual token 037158|>
+<|visual token 037159|>
+<|visual token 037160|>
+<|visual token 037161|>
+<|visual token 037162|>
+<|visual token 037163|>
+<|visual token 037164|>
+<|visual token 037165|>
+<|visual token 037166|>
+<|visual token 037167|>
+<|visual token 037168|>
+<|visual token 037169|>
+<|visual token 037170|>
+<|visual token 037171|>
+<|visual token 037172|>
+<|visual token 037173|>
+<|visual token 037174|>
+<|visual token 037175|>
+<|visual token 037176|>
+<|visual token 037177|>
+<|visual token 037178|>
+<|visual token 037179|>
+<|visual token 037180|>
+<|visual token 037181|>
+<|visual token 037182|>
+<|visual token 037183|>
+<|visual token 037184|>
+<|visual token 037185|>
+<|visual token 037186|>
+<|visual token 037187|>
+<|visual token 037188|>
+<|visual token 037189|>
+<|visual token 037190|>
+<|visual token 037191|>
+<|visual token 037192|>
+<|visual token 037193|>
+<|visual token 037194|>
+<|visual token 037195|>
+<|visual token 037196|>
+<|visual token 037197|>
+<|visual token 037198|>
+<|visual token 037199|>
+<|visual token 037200|>
+<|visual token 037201|>
+<|visual token 037202|>
+<|visual token 037203|>
+<|visual token 037204|>
+<|visual token 037205|>
+<|visual token 037206|>
+<|visual token 037207|>
+<|visual token 037208|>
+<|visual token 037209|>
+<|visual token 037210|>
+<|visual token 037211|>
+<|visual token 037212|>
+<|visual token 037213|>
+<|visual token 037214|>
+<|visual token 037215|>
+<|visual token 037216|>
+<|visual token 037217|>
+<|visual token 037218|>
+<|visual token 037219|>
+<|visual token 037220|>
+<|visual token 037221|>
+<|visual token 037222|>
+<|visual token 037223|>
+<|visual token 037224|>
+<|visual token 037225|>
+<|visual token 037226|>
+<|visual token 037227|>
+<|visual token 037228|>
+<|visual token 037229|>
+<|visual token 037230|>
+<|visual token 037231|>
+<|visual token 037232|>
+<|visual token 037233|>
+<|visual token 037234|>
+<|visual token 037235|>
+<|visual token 037236|>
+<|visual token 037237|>
+<|visual token 037238|>
+<|visual token 037239|>
+<|visual token 037240|>
+<|visual token 037241|>
+<|visual token 037242|>
+<|visual token 037243|>
+<|visual token 037244|>
+<|visual token 037245|>
+<|visual token 037246|>
+<|visual token 037247|>
+<|visual token 037248|>
+<|visual token 037249|>
+<|visual token 037250|>
+<|visual token 037251|>
+<|visual token 037252|>
+<|visual token 037253|>
+<|visual token 037254|>
+<|visual token 037255|>
+<|visual token 037256|>
+<|visual token 037257|>
+<|visual token 037258|>
+<|visual token 037259|>
+<|visual token 037260|>
+<|visual token 037261|>
+<|visual token 037262|>
+<|visual token 037263|>
+<|visual token 037264|>
+<|visual token 037265|>
+<|visual token 037266|>
+<|visual token 037267|>
+<|visual token 037268|>
+<|visual token 037269|>
+<|visual token 037270|>
+<|visual token 037271|>
+<|visual token 037272|>
+<|visual token 037273|>
+<|visual token 037274|>
+<|visual token 037275|>
+<|visual token 037276|>
+<|visual token 037277|>
+<|visual token 037278|>
+<|visual token 037279|>
+<|visual token 037280|>
+<|visual token 037281|>
+<|visual token 037282|>
+<|visual token 037283|>
+<|visual token 037284|>
+<|visual token 037285|>
+<|visual token 037286|>
+<|visual token 037287|>
+<|visual token 037288|>
+<|visual token 037289|>
+<|visual token 037290|>
+<|visual token 037291|>
+<|visual token 037292|>
+<|visual token 037293|>
+<|visual token 037294|>
+<|visual token 037295|>
+<|visual token 037296|>
+<|visual token 037297|>
+<|visual token 037298|>
+<|visual token 037299|>
+<|visual token 037300|>
+<|visual token 037301|>
+<|visual token 037302|>
+<|visual token 037303|>
+<|visual token 037304|>
+<|visual token 037305|>
+<|visual token 037306|>
+<|visual token 037307|>
+<|visual token 037308|>
+<|visual token 037309|>
+<|visual token 037310|>
+<|visual token 037311|>
+<|visual token 037312|>
+<|visual token 037313|>
+<|visual token 037314|>
+<|visual token 037315|>
+<|visual token 037316|>
+<|visual token 037317|>
+<|visual token 037318|>
+<|visual token 037319|>
+<|visual token 037320|>
+<|visual token 037321|>
+<|visual token 037322|>
+<|visual token 037323|>
+<|visual token 037324|>
+<|visual token 037325|>
+<|visual token 037326|>
+<|visual token 037327|>
+<|visual token 037328|>
+<|visual token 037329|>
+<|visual token 037330|>
+<|visual token 037331|>
+<|visual token 037332|>
+<|visual token 037333|>
+<|visual token 037334|>
+<|visual token 037335|>
+<|visual token 037336|>
+<|visual token 037337|>
+<|visual token 037338|>
+<|visual token 037339|>
+<|visual token 037340|>
+<|visual token 037341|>
+<|visual token 037342|>
+<|visual token 037343|>
+<|visual token 037344|>
+<|visual token 037345|>
+<|visual token 037346|>
+<|visual token 037347|>
+<|visual token 037348|>
+<|visual token 037349|>
+<|visual token 037350|>
+<|visual token 037351|>
+<|visual token 037352|>
+<|visual token 037353|>
+<|visual token 037354|>
+<|visual token 037355|>
+<|visual token 037356|>
+<|visual token 037357|>
+<|visual token 037358|>
+<|visual token 037359|>
+<|visual token 037360|>
+<|visual token 037361|>
+<|visual token 037362|>
+<|visual token 037363|>
+<|visual token 037364|>
+<|visual token 037365|>
+<|visual token 037366|>
+<|visual token 037367|>
+<|visual token 037368|>
+<|visual token 037369|>
+<|visual token 037370|>
+<|visual token 037371|>
+<|visual token 037372|>
+<|visual token 037373|>
+<|visual token 037374|>
+<|visual token 037375|>
+<|visual token 037376|>
+<|visual token 037377|>
+<|visual token 037378|>
+<|visual token 037379|>
+<|visual token 037380|>
+<|visual token 037381|>
+<|visual token 037382|>
+<|visual token 037383|>
+<|visual token 037384|>
+<|visual token 037385|>
+<|visual token 037386|>
+<|visual token 037387|>
+<|visual token 037388|>
+<|visual token 037389|>
+<|visual token 037390|>
+<|visual token 037391|>
+<|visual token 037392|>
+<|visual token 037393|>
+<|visual token 037394|>
+<|visual token 037395|>
+<|visual token 037396|>
+<|visual token 037397|>
+<|visual token 037398|>
+<|visual token 037399|>
+<|visual token 037400|>
+<|visual token 037401|>
+<|visual token 037402|>
+<|visual token 037403|>
+<|visual token 037404|>
+<|visual token 037405|>
+<|visual token 037406|>
+<|visual token 037407|>
+<|visual token 037408|>
+<|visual token 037409|>
+<|visual token 037410|>
+<|visual token 037411|>
+<|visual token 037412|>
+<|visual token 037413|>
+<|visual token 037414|>
+<|visual token 037415|>
+<|visual token 037416|>
+<|visual token 037417|>
+<|visual token 037418|>
+<|visual token 037419|>
+<|visual token 037420|>
+<|visual token 037421|>
+<|visual token 037422|>
+<|visual token 037423|>
+<|visual token 037424|>
+<|visual token 037425|>
+<|visual token 037426|>
+<|visual token 037427|>
+<|visual token 037428|>
+<|visual token 037429|>
+<|visual token 037430|>
+<|visual token 037431|>
+<|visual token 037432|>
+<|visual token 037433|>
+<|visual token 037434|>
+<|visual token 037435|>
+<|visual token 037436|>
+<|visual token 037437|>
+<|visual token 037438|>
+<|visual token 037439|>
+<|visual token 037440|>
+<|visual token 037441|>
+<|visual token 037442|>
+<|visual token 037443|>
+<|visual token 037444|>
+<|visual token 037445|>
+<|visual token 037446|>
+<|visual token 037447|>
+<|visual token 037448|>
+<|visual token 037449|>
+<|visual token 037450|>
+<|visual token 037451|>
+<|visual token 037452|>
+<|visual token 037453|>
+<|visual token 037454|>
+<|visual token 037455|>
+<|visual token 037456|>
+<|visual token 037457|>
+<|visual token 037458|>
+<|visual token 037459|>
+<|visual token 037460|>
+<|visual token 037461|>
+<|visual token 037462|>
+<|visual token 037463|>
+<|visual token 037464|>
+<|visual token 037465|>
+<|visual token 037466|>
+<|visual token 037467|>
+<|visual token 037468|>
+<|visual token 037469|>
+<|visual token 037470|>
+<|visual token 037471|>
+<|visual token 037472|>
+<|visual token 037473|>
+<|visual token 037474|>
+<|visual token 037475|>
+<|visual token 037476|>
+<|visual token 037477|>
+<|visual token 037478|>
+<|visual token 037479|>
+<|visual token 037480|>
+<|visual token 037481|>
+<|visual token 037482|>
+<|visual token 037483|>
+<|visual token 037484|>
+<|visual token 037485|>
+<|visual token 037486|>
+<|visual token 037487|>
+<|visual token 037488|>
+<|visual token 037489|>
+<|visual token 037490|>
+<|visual token 037491|>
+<|visual token 037492|>
+<|visual token 037493|>
+<|visual token 037494|>
+<|visual token 037495|>
+<|visual token 037496|>
+<|visual token 037497|>
+<|visual token 037498|>
+<|visual token 037499|>
+<|visual token 037500|>
+<|visual token 037501|>
+<|visual token 037502|>
+<|visual token 037503|>
+<|visual token 037504|>
+<|visual token 037505|>
+<|visual token 037506|>
+<|visual token 037507|>
+<|visual token 037508|>
+<|visual token 037509|>
+<|visual token 037510|>
+<|visual token 037511|>
+<|visual token 037512|>
+<|visual token 037513|>
+<|visual token 037514|>
+<|visual token 037515|>
+<|visual token 037516|>
+<|visual token 037517|>
+<|visual token 037518|>
+<|visual token 037519|>
+<|visual token 037520|>
+<|visual token 037521|>
+<|visual token 037522|>
+<|visual token 037523|>
+<|visual token 037524|>
+<|visual token 037525|>
+<|visual token 037526|>
+<|visual token 037527|>
+<|visual token 037528|>
+<|visual token 037529|>
+<|visual token 037530|>
+<|visual token 037531|>
+<|visual token 037532|>
+<|visual token 037533|>
+<|visual token 037534|>
+<|visual token 037535|>
+<|visual token 037536|>
+<|visual token 037537|>
+<|visual token 037538|>
+<|visual token 037539|>
+<|visual token 037540|>
+<|visual token 037541|>
+<|visual token 037542|>
+<|visual token 037543|>
+<|visual token 037544|>
+<|visual token 037545|>
+<|visual token 037546|>
+<|visual token 037547|>
+<|visual token 037548|>
+<|visual token 037549|>
+<|visual token 037550|>
+<|visual token 037551|>
+<|visual token 037552|>
+<|visual token 037553|>
+<|visual token 037554|>
+<|visual token 037555|>
+<|visual token 037556|>
+<|visual token 037557|>
+<|visual token 037558|>
+<|visual token 037559|>
+<|visual token 037560|>
+<|visual token 037561|>
+<|visual token 037562|>
+<|visual token 037563|>
+<|visual token 037564|>
+<|visual token 037565|>
+<|visual token 037566|>
+<|visual token 037567|>
+<|visual token 037568|>
+<|visual token 037569|>
+<|visual token 037570|>
+<|visual token 037571|>
+<|visual token 037572|>
+<|visual token 037573|>
+<|visual token 037574|>
+<|visual token 037575|>
+<|visual token 037576|>
+<|visual token 037577|>
+<|visual token 037578|>
+<|visual token 037579|>
+<|visual token 037580|>
+<|visual token 037581|>
+<|visual token 037582|>
+<|visual token 037583|>
+<|visual token 037584|>
+<|visual token 037585|>
+<|visual token 037586|>
+<|visual token 037587|>
+<|visual token 037588|>
+<|visual token 037589|>
+<|visual token 037590|>
+<|visual token 037591|>
+<|visual token 037592|>
+<|visual token 037593|>
+<|visual token 037594|>
+<|visual token 037595|>
+<|visual token 037596|>
+<|visual token 037597|>
+<|visual token 037598|>
+<|visual token 037599|>
+<|visual token 037600|>
+<|visual token 037601|>
+<|visual token 037602|>
+<|visual token 037603|>
+<|visual token 037604|>
+<|visual token 037605|>
+<|visual token 037606|>
+<|visual token 037607|>
+<|visual token 037608|>
+<|visual token 037609|>
+<|visual token 037610|>
+<|visual token 037611|>
+<|visual token 037612|>
+<|visual token 037613|>
+<|visual token 037614|>
+<|visual token 037615|>
+<|visual token 037616|>
+<|visual token 037617|>
+<|visual token 037618|>
+<|visual token 037619|>
+<|visual token 037620|>
+<|visual token 037621|>
+<|visual token 037622|>
+<|visual token 037623|>
+<|visual token 037624|>
+<|visual token 037625|>
+<|visual token 037626|>
+<|visual token 037627|>
+<|visual token 037628|>
+<|visual token 037629|>
+<|visual token 037630|>
+<|visual token 037631|>
+<|visual token 037632|>
+<|visual token 037633|>
+<|visual token 037634|>
+<|visual token 037635|>
+<|visual token 037636|>
+<|visual token 037637|>
+<|visual token 037638|>
+<|visual token 037639|>
+<|visual token 037640|>
+<|visual token 037641|>
+<|visual token 037642|>
+<|visual token 037643|>
+<|visual token 037644|>
+<|visual token 037645|>
+<|visual token 037646|>
+<|visual token 037647|>
+<|visual token 037648|>
+<|visual token 037649|>
+<|visual token 037650|>
+<|visual token 037651|>
+<|visual token 037652|>
+<|visual token 037653|>
+<|visual token 037654|>
+<|visual token 037655|>
+<|visual token 037656|>
+<|visual token 037657|>
+<|visual token 037658|>
+<|visual token 037659|>
+<|visual token 037660|>
+<|visual token 037661|>
+<|visual token 037662|>
+<|visual token 037663|>
+<|visual token 037664|>
+<|visual token 037665|>
+<|visual token 037666|>
+<|visual token 037667|>
+<|visual token 037668|>
+<|visual token 037669|>
+<|visual token 037670|>
+<|visual token 037671|>
+<|visual token 037672|>
+<|visual token 037673|>
+<|visual token 037674|>
+<|visual token 037675|>
+<|visual token 037676|>
+<|visual token 037677|>
+<|visual token 037678|>
+<|visual token 037679|>
+<|visual token 037680|>
+<|visual token 037681|>
+<|visual token 037682|>
+<|visual token 037683|>
+<|visual token 037684|>
+<|visual token 037685|>
+<|visual token 037686|>
+<|visual token 037687|>
+<|visual token 037688|>
+<|visual token 037689|>
+<|visual token 037690|>
+<|visual token 037691|>
+<|visual token 037692|>
+<|visual token 037693|>
+<|visual token 037694|>
+<|visual token 037695|>
+<|visual token 037696|>
+<|visual token 037697|>
+<|visual token 037698|>
+<|visual token 037699|>
+<|visual token 037700|>
+<|visual token 037701|>
+<|visual token 037702|>
+<|visual token 037703|>
+<|visual token 037704|>
+<|visual token 037705|>
+<|visual token 037706|>
+<|visual token 037707|>
+<|visual token 037708|>
+<|visual token 037709|>
+<|visual token 037710|>
+<|visual token 037711|>
+<|visual token 037712|>
+<|visual token 037713|>
+<|visual token 037714|>
+<|visual token 037715|>
+<|visual token 037716|>
+<|visual token 037717|>
+<|visual token 037718|>
+<|visual token 037719|>
+<|visual token 037720|>
+<|visual token 037721|>
+<|visual token 037722|>
+<|visual token 037723|>
+<|visual token 037724|>
+<|visual token 037725|>
+<|visual token 037726|>
+<|visual token 037727|>
+<|visual token 037728|>
+<|visual token 037729|>
+<|visual token 037730|>
+<|visual token 037731|>
+<|visual token 037732|>
+<|visual token 037733|>
+<|visual token 037734|>
+<|visual token 037735|>
+<|visual token 037736|>
+<|visual token 037737|>
+<|visual token 037738|>
+<|visual token 037739|>
+<|visual token 037740|>
+<|visual token 037741|>
+<|visual token 037742|>
+<|visual token 037743|>
+<|visual token 037744|>
+<|visual token 037745|>
+<|visual token 037746|>
+<|visual token 037747|>
+<|visual token 037748|>
+<|visual token 037749|>
+<|visual token 037750|>
+<|visual token 037751|>
+<|visual token 037752|>
+<|visual token 037753|>
+<|visual token 037754|>
+<|visual token 037755|>
+<|visual token 037756|>
+<|visual token 037757|>
+<|visual token 037758|>
+<|visual token 037759|>
+<|visual token 037760|>
+<|visual token 037761|>
+<|visual token 037762|>
+<|visual token 037763|>
+<|visual token 037764|>
+<|visual token 037765|>
+<|visual token 037766|>
+<|visual token 037767|>
+<|visual token 037768|>
+<|visual token 037769|>
+<|visual token 037770|>
+<|visual token 037771|>
+<|visual token 037772|>
+<|visual token 037773|>
+<|visual token 037774|>
+<|visual token 037775|>
+<|visual token 037776|>
+<|visual token 037777|>
+<|visual token 037778|>
+<|visual token 037779|>
+<|visual token 037780|>
+<|visual token 037781|>
+<|visual token 037782|>
+<|visual token 037783|>
+<|visual token 037784|>
+<|visual token 037785|>
+<|visual token 037786|>
+<|visual token 037787|>
+<|visual token 037788|>
+<|visual token 037789|>
+<|visual token 037790|>
+<|visual token 037791|>
+<|visual token 037792|>
+<|visual token 037793|>
+<|visual token 037794|>
+<|visual token 037795|>
+<|visual token 037796|>
+<|visual token 037797|>
+<|visual token 037798|>
+<|visual token 037799|>
+<|visual token 037800|>
+<|visual token 037801|>
+<|visual token 037802|>
+<|visual token 037803|>
+<|visual token 037804|>
+<|visual token 037805|>
+<|visual token 037806|>
+<|visual token 037807|>
+<|visual token 037808|>
+<|visual token 037809|>
+<|visual token 037810|>
+<|visual token 037811|>
+<|visual token 037812|>
+<|visual token 037813|>
+<|visual token 037814|>
+<|visual token 037815|>
+<|visual token 037816|>
+<|visual token 037817|>
+<|visual token 037818|>
+<|visual token 037819|>
+<|visual token 037820|>
+<|visual token 037821|>
+<|visual token 037822|>
+<|visual token 037823|>
+<|visual token 037824|>
+<|visual token 037825|>
+<|visual token 037826|>
+<|visual token 037827|>
+<|visual token 037828|>
+<|visual token 037829|>
+<|visual token 037830|>
+<|visual token 037831|>
+<|visual token 037832|>
+<|visual token 037833|>
+<|visual token 037834|>
+<|visual token 037835|>
+<|visual token 037836|>
+<|visual token 037837|>
+<|visual token 037838|>
+<|visual token 037839|>
+<|visual token 037840|>
+<|visual token 037841|>
+<|visual token 037842|>
+<|visual token 037843|>
+<|visual token 037844|>
+<|visual token 037845|>
+<|visual token 037846|>
+<|visual token 037847|>
+<|visual token 037848|>
+<|visual token 037849|>
+<|visual token 037850|>
+<|visual token 037851|>
+<|visual token 037852|>
+<|visual token 037853|>
+<|visual token 037854|>
+<|visual token 037855|>
+<|visual token 037856|>
+<|visual token 037857|>
+<|visual token 037858|>
+<|visual token 037859|>
+<|visual token 037860|>
+<|visual token 037861|>
+<|visual token 037862|>
+<|visual token 037863|>
+<|visual token 037864|>
+<|visual token 037865|>
+<|visual token 037866|>
+<|visual token 037867|>
+<|visual token 037868|>
+<|visual token 037869|>
+<|visual token 037870|>
+<|visual token 037871|>
+<|visual token 037872|>
+<|visual token 037873|>
+<|visual token 037874|>
+<|visual token 037875|>
+<|visual token 037876|>
+<|visual token 037877|>
+<|visual token 037878|>
+<|visual token 037879|>
+<|visual token 037880|>
+<|visual token 037881|>
+<|visual token 037882|>
+<|visual token 037883|>
+<|visual token 037884|>
+<|visual token 037885|>
+<|visual token 037886|>
+<|visual token 037887|>
+<|visual token 037888|>
+<|visual token 037889|>
+<|visual token 037890|>
+<|visual token 037891|>
+<|visual token 037892|>
+<|visual token 037893|>
+<|visual token 037894|>
+<|visual token 037895|>
+<|visual token 037896|>
+<|visual token 037897|>
+<|visual token 037898|>
+<|visual token 037899|>
+<|visual token 037900|>
+<|visual token 037901|>
+<|visual token 037902|>
+<|visual token 037903|>
+<|visual token 037904|>
+<|visual token 037905|>
+<|visual token 037906|>
+<|visual token 037907|>
+<|visual token 037908|>
+<|visual token 037909|>
+<|visual token 037910|>
+<|visual token 037911|>
+<|visual token 037912|>
+<|visual token 037913|>
+<|visual token 037914|>
+<|visual token 037915|>
+<|visual token 037916|>
+<|visual token 037917|>
+<|visual token 037918|>
+<|visual token 037919|>
+<|visual token 037920|>
+<|visual token 037921|>
+<|visual token 037922|>
+<|visual token 037923|>
+<|visual token 037924|>
+<|visual token 037925|>
+<|visual token 037926|>
+<|visual token 037927|>
+<|visual token 037928|>
+<|visual token 037929|>
+<|visual token 037930|>
+<|visual token 037931|>
+<|visual token 037932|>
+<|visual token 037933|>
+<|visual token 037934|>
+<|visual token 037935|>
+<|visual token 037936|>
+<|visual token 037937|>
+<|visual token 037938|>
+<|visual token 037939|>
+<|visual token 037940|>
+<|visual token 037941|>
+<|visual token 037942|>
+<|visual token 037943|>
+<|visual token 037944|>
+<|visual token 037945|>
+<|visual token 037946|>
+<|visual token 037947|>
+<|visual token 037948|>
+<|visual token 037949|>
+<|visual token 037950|>
+<|visual token 037951|>
+<|visual token 037952|>
+<|visual token 037953|>
+<|visual token 037954|>
+<|visual token 037955|>
+<|visual token 037956|>
+<|visual token 037957|>
+<|visual token 037958|>
+<|visual token 037959|>
+<|visual token 037960|>
+<|visual token 037961|>
+<|visual token 037962|>
+<|visual token 037963|>
+<|visual token 037964|>
+<|visual token 037965|>
+<|visual token 037966|>
+<|visual token 037967|>
+<|visual token 037968|>
+<|visual token 037969|>
+<|visual token 037970|>
+<|visual token 037971|>
+<|visual token 037972|>
+<|visual token 037973|>
+<|visual token 037974|>
+<|visual token 037975|>
+<|visual token 037976|>
+<|visual token 037977|>
+<|visual token 037978|>
+<|visual token 037979|>
+<|visual token 037980|>
+<|visual token 037981|>
+<|visual token 037982|>
+<|visual token 037983|>
+<|visual token 037984|>
+<|visual token 037985|>
+<|visual token 037986|>
+<|visual token 037987|>
+<|visual token 037988|>
+<|visual token 037989|>
+<|visual token 037990|>
+<|visual token 037991|>
+<|visual token 037992|>
+<|visual token 037993|>
+<|visual token 037994|>
+<|visual token 037995|>
+<|visual token 037996|>
+<|visual token 037997|>
+<|visual token 037998|>
+<|visual token 037999|>
+<|visual token 038000|>
+<|visual token 038001|>
+<|visual token 038002|>
+<|visual token 038003|>
+<|visual token 038004|>
+<|visual token 038005|>
+<|visual token 038006|>
+<|visual token 038007|>
+<|visual token 038008|>
+<|visual token 038009|>
+<|visual token 038010|>
+<|visual token 038011|>
+<|visual token 038012|>
+<|visual token 038013|>
+<|visual token 038014|>
+<|visual token 038015|>
+<|visual token 038016|>
+<|visual token 038017|>
+<|visual token 038018|>
+<|visual token 038019|>
+<|visual token 038020|>
+<|visual token 038021|>
+<|visual token 038022|>
+<|visual token 038023|>
+<|visual token 038024|>
+<|visual token 038025|>
+<|visual token 038026|>
+<|visual token 038027|>
+<|visual token 038028|>
+<|visual token 038029|>
+<|visual token 038030|>
+<|visual token 038031|>
+<|visual token 038032|>
+<|visual token 038033|>
+<|visual token 038034|>
+<|visual token 038035|>
+<|visual token 038036|>
+<|visual token 038037|>
+<|visual token 038038|>
+<|visual token 038039|>
+<|visual token 038040|>
+<|visual token 038041|>
+<|visual token 038042|>
+<|visual token 038043|>
+<|visual token 038044|>
+<|visual token 038045|>
+<|visual token 038046|>
+<|visual token 038047|>
+<|visual token 038048|>
+<|visual token 038049|>
+<|visual token 038050|>
+<|visual token 038051|>
+<|visual token 038052|>
+<|visual token 038053|>
+<|visual token 038054|>
+<|visual token 038055|>
+<|visual token 038056|>
+<|visual token 038057|>
+<|visual token 038058|>
+<|visual token 038059|>
+<|visual token 038060|>
+<|visual token 038061|>
+<|visual token 038062|>
+<|visual token 038063|>
+<|visual token 038064|>
+<|visual token 038065|>
+<|visual token 038066|>
+<|visual token 038067|>
+<|visual token 038068|>
+<|visual token 038069|>
+<|visual token 038070|>
+<|visual token 038071|>
+<|visual token 038072|>
+<|visual token 038073|>
+<|visual token 038074|>
+<|visual token 038075|>
+<|visual token 038076|>
+<|visual token 038077|>
+<|visual token 038078|>
+<|visual token 038079|>
+<|visual token 038080|>
+<|visual token 038081|>
+<|visual token 038082|>
+<|visual token 038083|>
+<|visual token 038084|>
+<|visual token 038085|>
+<|visual token 038086|>
+<|visual token 038087|>
+<|visual token 038088|>
+<|visual token 038089|>
+<|visual token 038090|>
+<|visual token 038091|>
+<|visual token 038092|>
+<|visual token 038093|>
+<|visual token 038094|>
+<|visual token 038095|>
+<|visual token 038096|>
+<|visual token 038097|>
+<|visual token 038098|>
+<|visual token 038099|>
+<|visual token 038100|>
+<|visual token 038101|>
+<|visual token 038102|>
+<|visual token 038103|>
+<|visual token 038104|>
+<|visual token 038105|>
+<|visual token 038106|>
+<|visual token 038107|>
+<|visual token 038108|>
+<|visual token 038109|>
+<|visual token 038110|>
+<|visual token 038111|>
+<|visual token 038112|>
+<|visual token 038113|>
+<|visual token 038114|>
+<|visual token 038115|>
+<|visual token 038116|>
+<|visual token 038117|>
+<|visual token 038118|>
+<|visual token 038119|>
+<|visual token 038120|>
+<|visual token 038121|>
+<|visual token 038122|>
+<|visual token 038123|>
+<|visual token 038124|>
+<|visual token 038125|>
+<|visual token 038126|>
+<|visual token 038127|>
+<|visual token 038128|>
+<|visual token 038129|>
+<|visual token 038130|>
+<|visual token 038131|>
+<|visual token 038132|>
+<|visual token 038133|>
+<|visual token 038134|>
+<|visual token 038135|>
+<|visual token 038136|>
+<|visual token 038137|>
+<|visual token 038138|>
+<|visual token 038139|>
+<|visual token 038140|>
+<|visual token 038141|>
+<|visual token 038142|>
+<|visual token 038143|>
+<|visual token 038144|>
+<|visual token 038145|>
+<|visual token 038146|>
+<|visual token 038147|>
+<|visual token 038148|>
+<|visual token 038149|>
+<|visual token 038150|>
+<|visual token 038151|>
+<|visual token 038152|>
+<|visual token 038153|>
+<|visual token 038154|>
+<|visual token 038155|>
+<|visual token 038156|>
+<|visual token 038157|>
+<|visual token 038158|>
+<|visual token 038159|>
+<|visual token 038160|>
+<|visual token 038161|>
+<|visual token 038162|>
+<|visual token 038163|>
+<|visual token 038164|>
+<|visual token 038165|>
+<|visual token 038166|>
+<|visual token 038167|>
+<|visual token 038168|>
+<|visual token 038169|>
+<|visual token 038170|>
+<|visual token 038171|>
+<|visual token 038172|>
+<|visual token 038173|>
+<|visual token 038174|>
+<|visual token 038175|>
+<|visual token 038176|>
+<|visual token 038177|>
+<|visual token 038178|>
+<|visual token 038179|>
+<|visual token 038180|>
+<|visual token 038181|>
+<|visual token 038182|>
+<|visual token 038183|>
+<|visual token 038184|>
+<|visual token 038185|>
+<|visual token 038186|>
+<|visual token 038187|>
+<|visual token 038188|>
+<|visual token 038189|>
+<|visual token 038190|>
+<|visual token 038191|>
+<|visual token 038192|>
+<|visual token 038193|>
+<|visual token 038194|>
+<|visual token 038195|>
+<|visual token 038196|>
+<|visual token 038197|>
+<|visual token 038198|>
+<|visual token 038199|>
+<|visual token 038200|>
+<|visual token 038201|>
+<|visual token 038202|>
+<|visual token 038203|>
+<|visual token 038204|>
+<|visual token 038205|>
+<|visual token 038206|>
+<|visual token 038207|>
+<|visual token 038208|>
+<|visual token 038209|>
+<|visual token 038210|>
+<|visual token 038211|>
+<|visual token 038212|>
+<|visual token 038213|>
+<|visual token 038214|>
+<|visual token 038215|>
+<|visual token 038216|>
+<|visual token 038217|>
+<|visual token 038218|>
+<|visual token 038219|>
+<|visual token 038220|>
+<|visual token 038221|>
+<|visual token 038222|>
+<|visual token 038223|>
+<|visual token 038224|>
+<|visual token 038225|>
+<|visual token 038226|>
+<|visual token 038227|>
+<|visual token 038228|>
+<|visual token 038229|>
+<|visual token 038230|>
+<|visual token 038231|>
+<|visual token 038232|>
+<|visual token 038233|>
+<|visual token 038234|>
+<|visual token 038235|>
+<|visual token 038236|>
+<|visual token 038237|>
+<|visual token 038238|>
+<|visual token 038239|>
+<|visual token 038240|>
+<|visual token 038241|>
+<|visual token 038242|>
+<|visual token 038243|>
+<|visual token 038244|>
+<|visual token 038245|>
+<|visual token 038246|>
+<|visual token 038247|>
+<|visual token 038248|>
+<|visual token 038249|>
+<|visual token 038250|>
+<|visual token 038251|>
+<|visual token 038252|>
+<|visual token 038253|>
+<|visual token 038254|>
+<|visual token 038255|>
+<|visual token 038256|>
+<|visual token 038257|>
+<|visual token 038258|>
+<|visual token 038259|>
+<|visual token 038260|>
+<|visual token 038261|>
+<|visual token 038262|>
+<|visual token 038263|>
+<|visual token 038264|>
+<|visual token 038265|>
+<|visual token 038266|>
+<|visual token 038267|>
+<|visual token 038268|>
+<|visual token 038269|>
+<|visual token 038270|>
+<|visual token 038271|>
+<|visual token 038272|>
+<|visual token 038273|>
+<|visual token 038274|>
+<|visual token 038275|>
+<|visual token 038276|>
+<|visual token 038277|>
+<|visual token 038278|>
+<|visual token 038279|>
+<|visual token 038280|>
+<|visual token 038281|>
+<|visual token 038282|>
+<|visual token 038283|>
+<|visual token 038284|>
+<|visual token 038285|>
+<|visual token 038286|>
+<|visual token 038287|>
+<|visual token 038288|>
+<|visual token 038289|>
+<|visual token 038290|>
+<|visual token 038291|>
+<|visual token 038292|>
+<|visual token 038293|>
+<|visual token 038294|>
+<|visual token 038295|>
+<|visual token 038296|>
+<|visual token 038297|>
+<|visual token 038298|>
+<|visual token 038299|>
+<|visual token 038300|>
+<|visual token 038301|>
+<|visual token 038302|>
+<|visual token 038303|>
+<|visual token 038304|>
+<|visual token 038305|>
+<|visual token 038306|>
+<|visual token 038307|>
+<|visual token 038308|>
+<|visual token 038309|>
+<|visual token 038310|>
+<|visual token 038311|>
+<|visual token 038312|>
+<|visual token 038313|>
+<|visual token 038314|>
+<|visual token 038315|>
+<|visual token 038316|>
+<|visual token 038317|>
+<|visual token 038318|>
+<|visual token 038319|>
+<|visual token 038320|>
+<|visual token 038321|>
+<|visual token 038322|>
+<|visual token 038323|>
+<|visual token 038324|>
+<|visual token 038325|>
+<|visual token 038326|>
+<|visual token 038327|>
+<|visual token 038328|>
+<|visual token 038329|>
+<|visual token 038330|>
+<|visual token 038331|>
+<|visual token 038332|>
+<|visual token 038333|>
+<|visual token 038334|>
+<|visual token 038335|>
+<|visual token 038336|>
+<|visual token 038337|>
+<|visual token 038338|>
+<|visual token 038339|>
+<|visual token 038340|>
+<|visual token 038341|>
+<|visual token 038342|>
+<|visual token 038343|>
+<|visual token 038344|>
+<|visual token 038345|>
+<|visual token 038346|>
+<|visual token 038347|>
+<|visual token 038348|>
+<|visual token 038349|>
+<|visual token 038350|>
+<|visual token 038351|>
+<|visual token 038352|>
+<|visual token 038353|>
+<|visual token 038354|>
+<|visual token 038355|>
+<|visual token 038356|>
+<|visual token 038357|>
+<|visual token 038358|>
+<|visual token 038359|>
+<|visual token 038360|>
+<|visual token 038361|>
+<|visual token 038362|>
+<|visual token 038363|>
+<|visual token 038364|>
+<|visual token 038365|>
+<|visual token 038366|>
+<|visual token 038367|>
+<|visual token 038368|>
+<|visual token 038369|>
+<|visual token 038370|>
+<|visual token 038371|>
+<|visual token 038372|>
+<|visual token 038373|>
+<|visual token 038374|>
+<|visual token 038375|>
+<|visual token 038376|>
+<|visual token 038377|>
+<|visual token 038378|>
+<|visual token 038379|>
+<|visual token 038380|>
+<|visual token 038381|>
+<|visual token 038382|>
+<|visual token 038383|>
+<|visual token 038384|>
+<|visual token 038385|>
+<|visual token 038386|>
+<|visual token 038387|>
+<|visual token 038388|>
+<|visual token 038389|>
+<|visual token 038390|>
+<|visual token 038391|>
+<|visual token 038392|>
+<|visual token 038393|>
+<|visual token 038394|>
+<|visual token 038395|>
+<|visual token 038396|>
+<|visual token 038397|>
+<|visual token 038398|>
+<|visual token 038399|>
+<|visual token 038400|>
+<|visual token 038401|>
+<|visual token 038402|>
+<|visual token 038403|>
+<|visual token 038404|>
+<|visual token 038405|>
+<|visual token 038406|>
+<|visual token 038407|>
+<|visual token 038408|>
+<|visual token 038409|>
+<|visual token 038410|>
+<|visual token 038411|>
+<|visual token 038412|>
+<|visual token 038413|>
+<|visual token 038414|>
+<|visual token 038415|>
+<|visual token 038416|>
+<|visual token 038417|>
+<|visual token 038418|>
+<|visual token 038419|>
+<|visual token 038420|>
+<|visual token 038421|>
+<|visual token 038422|>
+<|visual token 038423|>
+<|visual token 038424|>
+<|visual token 038425|>
+<|visual token 038426|>
+<|visual token 038427|>
+<|visual token 038428|>
+<|visual token 038429|>
+<|visual token 038430|>
+<|visual token 038431|>
+<|visual token 038432|>
+<|visual token 038433|>
+<|visual token 038434|>
+<|visual token 038435|>
+<|visual token 038436|>
+<|visual token 038437|>
+<|visual token 038438|>
+<|visual token 038439|>
+<|visual token 038440|>
+<|visual token 038441|>
+<|visual token 038442|>
+<|visual token 038443|>
+<|visual token 038444|>
+<|visual token 038445|>
+<|visual token 038446|>
+<|visual token 038447|>
+<|visual token 038448|>
+<|visual token 038449|>
+<|visual token 038450|>
+<|visual token 038451|>
+<|visual token 038452|>
+<|visual token 038453|>
+<|visual token 038454|>
+<|visual token 038455|>
+<|visual token 038456|>
+<|visual token 038457|>
+<|visual token 038458|>
+<|visual token 038459|>
+<|visual token 038460|>
+<|visual token 038461|>
+<|visual token 038462|>
+<|visual token 038463|>
+<|visual token 038464|>
+<|visual token 038465|>
+<|visual token 038466|>
+<|visual token 038467|>
+<|visual token 038468|>
+<|visual token 038469|>
+<|visual token 038470|>
+<|visual token 038471|>
+<|visual token 038472|>
+<|visual token 038473|>
+<|visual token 038474|>
+<|visual token 038475|>
+<|visual token 038476|>
+<|visual token 038477|>
+<|visual token 038478|>
+<|visual token 038479|>
+<|visual token 038480|>
+<|visual token 038481|>
+<|visual token 038482|>
+<|visual token 038483|>
+<|visual token 038484|>
+<|visual token 038485|>
+<|visual token 038486|>
+<|visual token 038487|>
+<|visual token 038488|>
+<|visual token 038489|>
+<|visual token 038490|>
+<|visual token 038491|>
+<|visual token 038492|>
+<|visual token 038493|>
+<|visual token 038494|>
+<|visual token 038495|>
+<|visual token 038496|>
+<|visual token 038497|>
+<|visual token 038498|>
+<|visual token 038499|>
+<|visual token 038500|>
+<|visual token 038501|>
+<|visual token 038502|>
+<|visual token 038503|>
+<|visual token 038504|>
+<|visual token 038505|>
+<|visual token 038506|>
+<|visual token 038507|>
+<|visual token 038508|>
+<|visual token 038509|>
+<|visual token 038510|>
+<|visual token 038511|>
+<|visual token 038512|>
+<|visual token 038513|>
+<|visual token 038514|>
+<|visual token 038515|>
+<|visual token 038516|>
+<|visual token 038517|>
+<|visual token 038518|>
+<|visual token 038519|>
+<|visual token 038520|>
+<|visual token 038521|>
+<|visual token 038522|>
+<|visual token 038523|>
+<|visual token 038524|>
+<|visual token 038525|>
+<|visual token 038526|>
+<|visual token 038527|>
+<|visual token 038528|>
+<|visual token 038529|>
+<|visual token 038530|>
+<|visual token 038531|>
+<|visual token 038532|>
+<|visual token 038533|>
+<|visual token 038534|>
+<|visual token 038535|>
+<|visual token 038536|>
+<|visual token 038537|>
+<|visual token 038538|>
+<|visual token 038539|>
+<|visual token 038540|>
+<|visual token 038541|>
+<|visual token 038542|>
+<|visual token 038543|>
+<|visual token 038544|>
+<|visual token 038545|>
+<|visual token 038546|>
+<|visual token 038547|>
+<|visual token 038548|>
+<|visual token 038549|>
+<|visual token 038550|>
+<|visual token 038551|>
+<|visual token 038552|>
+<|visual token 038553|>
+<|visual token 038554|>
+<|visual token 038555|>
+<|visual token 038556|>
+<|visual token 038557|>
+<|visual token 038558|>
+<|visual token 038559|>
+<|visual token 038560|>
+<|visual token 038561|>
+<|visual token 038562|>
+<|visual token 038563|>
+<|visual token 038564|>
+<|visual token 038565|>
+<|visual token 038566|>
+<|visual token 038567|>
+<|visual token 038568|>
+<|visual token 038569|>
+<|visual token 038570|>
+<|visual token 038571|>
+<|visual token 038572|>
+<|visual token 038573|>
+<|visual token 038574|>
+<|visual token 038575|>
+<|visual token 038576|>
+<|visual token 038577|>
+<|visual token 038578|>
+<|visual token 038579|>
+<|visual token 038580|>
+<|visual token 038581|>
+<|visual token 038582|>
+<|visual token 038583|>
+<|visual token 038584|>
+<|visual token 038585|>
+<|visual token 038586|>
+<|visual token 038587|>
+<|visual token 038588|>
+<|visual token 038589|>
+<|visual token 038590|>
+<|visual token 038591|>
+<|visual token 038592|>
+<|visual token 038593|>
+<|visual token 038594|>
+<|visual token 038595|>
+<|visual token 038596|>
+<|visual token 038597|>
+<|visual token 038598|>
+<|visual token 038599|>
+<|visual token 038600|>
+<|visual token 038601|>
+<|visual token 038602|>
+<|visual token 038603|>
+<|visual token 038604|>
+<|visual token 038605|>
+<|visual token 038606|>
+<|visual token 038607|>
+<|visual token 038608|>
+<|visual token 038609|>
+<|visual token 038610|>
+<|visual token 038611|>
+<|visual token 038612|>
+<|visual token 038613|>
+<|visual token 038614|>
+<|visual token 038615|>
+<|visual token 038616|>
+<|visual token 038617|>
+<|visual token 038618|>
+<|visual token 038619|>
+<|visual token 038620|>
+<|visual token 038621|>
+<|visual token 038622|>
+<|visual token 038623|>
+<|visual token 038624|>
+<|visual token 038625|>
+<|visual token 038626|>
+<|visual token 038627|>
+<|visual token 038628|>
+<|visual token 038629|>
+<|visual token 038630|>
+<|visual token 038631|>
+<|visual token 038632|>
+<|visual token 038633|>
+<|visual token 038634|>
+<|visual token 038635|>
+<|visual token 038636|>
+<|visual token 038637|>
+<|visual token 038638|>
+<|visual token 038639|>
+<|visual token 038640|>
+<|visual token 038641|>
+<|visual token 038642|>
+<|visual token 038643|>
+<|visual token 038644|>
+<|visual token 038645|>
+<|visual token 038646|>
+<|visual token 038647|>
+<|visual token 038648|>
+<|visual token 038649|>
+<|visual token 038650|>
+<|visual token 038651|>
+<|visual token 038652|>
+<|visual token 038653|>
+<|visual token 038654|>
+<|visual token 038655|>
+<|visual token 038656|>
+<|visual token 038657|>
+<|visual token 038658|>
+<|visual token 038659|>
+<|visual token 038660|>
+<|visual token 038661|>
+<|visual token 038662|>
+<|visual token 038663|>
+<|visual token 038664|>
+<|visual token 038665|>
+<|visual token 038666|>
+<|visual token 038667|>
+<|visual token 038668|>
+<|visual token 038669|>
+<|visual token 038670|>
+<|visual token 038671|>
+<|visual token 038672|>
+<|visual token 038673|>
+<|visual token 038674|>
+<|visual token 038675|>
+<|visual token 038676|>
+<|visual token 038677|>
+<|visual token 038678|>
+<|visual token 038679|>
+<|visual token 038680|>
+<|visual token 038681|>
+<|visual token 038682|>
+<|visual token 038683|>
+<|visual token 038684|>
+<|visual token 038685|>
+<|visual token 038686|>
+<|visual token 038687|>
+<|visual token 038688|>
+<|visual token 038689|>
+<|visual token 038690|>
+<|visual token 038691|>
+<|visual token 038692|>
+<|visual token 038693|>
+<|visual token 038694|>
+<|visual token 038695|>
+<|visual token 038696|>
+<|visual token 038697|>
+<|visual token 038698|>
+<|visual token 038699|>
+<|visual token 038700|>
+<|visual token 038701|>
+<|visual token 038702|>
+<|visual token 038703|>
+<|visual token 038704|>
+<|visual token 038705|>
+<|visual token 038706|>
+<|visual token 038707|>
+<|visual token 038708|>
+<|visual token 038709|>
+<|visual token 038710|>
+<|visual token 038711|>
+<|visual token 038712|>
+<|visual token 038713|>
+<|visual token 038714|>
+<|visual token 038715|>
+<|visual token 038716|>
+<|visual token 038717|>
+<|visual token 038718|>
+<|visual token 038719|>
+<|visual token 038720|>
+<|visual token 038721|>
+<|visual token 038722|>
+<|visual token 038723|>
+<|visual token 038724|>
+<|visual token 038725|>
+<|visual token 038726|>
+<|visual token 038727|>
+<|visual token 038728|>
+<|visual token 038729|>
+<|visual token 038730|>
+<|visual token 038731|>
+<|visual token 038732|>
+<|visual token 038733|>
+<|visual token 038734|>
+<|visual token 038735|>
+<|visual token 038736|>
+<|visual token 038737|>
+<|visual token 038738|>
+<|visual token 038739|>
+<|visual token 038740|>
+<|visual token 038741|>
+<|visual token 038742|>
+<|visual token 038743|>
+<|visual token 038744|>
+<|visual token 038745|>
+<|visual token 038746|>
+<|visual token 038747|>
+<|visual token 038748|>
+<|visual token 038749|>
+<|visual token 038750|>
+<|visual token 038751|>
+<|visual token 038752|>
+<|visual token 038753|>
+<|visual token 038754|>
+<|visual token 038755|>
+<|visual token 038756|>
+<|visual token 038757|>
+<|visual token 038758|>
+<|visual token 038759|>
+<|visual token 038760|>
+<|visual token 038761|>
+<|visual token 038762|>
+<|visual token 038763|>
+<|visual token 038764|>
+<|visual token 038765|>
+<|visual token 038766|>
+<|visual token 038767|>
+<|visual token 038768|>
+<|visual token 038769|>
+<|visual token 038770|>
+<|visual token 038771|>
+<|visual token 038772|>
+<|visual token 038773|>
+<|visual token 038774|>
+<|visual token 038775|>
+<|visual token 038776|>
+<|visual token 038777|>
+<|visual token 038778|>
+<|visual token 038779|>
+<|visual token 038780|>
+<|visual token 038781|>
+<|visual token 038782|>
+<|visual token 038783|>
+<|visual token 038784|>
+<|visual token 038785|>
+<|visual token 038786|>
+<|visual token 038787|>
+<|visual token 038788|>
+<|visual token 038789|>
+<|visual token 038790|>
+<|visual token 038791|>
+<|visual token 038792|>
+<|visual token 038793|>
+<|visual token 038794|>
+<|visual token 038795|>
+<|visual token 038796|>
+<|visual token 038797|>
+<|visual token 038798|>
+<|visual token 038799|>
+<|visual token 038800|>
+<|visual token 038801|>
+<|visual token 038802|>
+<|visual token 038803|>
+<|visual token 038804|>
+<|visual token 038805|>
+<|visual token 038806|>
+<|visual token 038807|>
+<|visual token 038808|>
+<|visual token 038809|>
+<|visual token 038810|>
+<|visual token 038811|>
+<|visual token 038812|>
+<|visual token 038813|>
+<|visual token 038814|>
+<|visual token 038815|>
+<|visual token 038816|>
+<|visual token 038817|>
+<|visual token 038818|>
+<|visual token 038819|>
+<|visual token 038820|>
+<|visual token 038821|>
+<|visual token 038822|>
+<|visual token 038823|>
+<|visual token 038824|>
+<|visual token 038825|>
+<|visual token 038826|>
+<|visual token 038827|>
+<|visual token 038828|>
+<|visual token 038829|>
+<|visual token 038830|>
+<|visual token 038831|>
+<|visual token 038832|>
+<|visual token 038833|>
+<|visual token 038834|>
+<|visual token 038835|>
+<|visual token 038836|>
+<|visual token 038837|>
+<|visual token 038838|>
+<|visual token 038839|>
+<|visual token 038840|>
+<|visual token 038841|>
+<|visual token 038842|>
+<|visual token 038843|>
+<|visual token 038844|>
+<|visual token 038845|>
+<|visual token 038846|>
+<|visual token 038847|>
+<|visual token 038848|>
+<|visual token 038849|>
+<|visual token 038850|>
+<|visual token 038851|>
+<|visual token 038852|>
+<|visual token 038853|>
+<|visual token 038854|>
+<|visual token 038855|>
+<|visual token 038856|>
+<|visual token 038857|>
+<|visual token 038858|>
+<|visual token 038859|>
+<|visual token 038860|>
+<|visual token 038861|>
+<|visual token 038862|>
+<|visual token 038863|>
+<|visual token 038864|>
+<|visual token 038865|>
+<|visual token 038866|>
+<|visual token 038867|>
+<|visual token 038868|>
+<|visual token 038869|>
+<|visual token 038870|>
+<|visual token 038871|>
+<|visual token 038872|>
+<|visual token 038873|>
+<|visual token 038874|>
+<|visual token 038875|>
+<|visual token 038876|>
+<|visual token 038877|>
+<|visual token 038878|>
+<|visual token 038879|>
+<|visual token 038880|>
+<|visual token 038881|>
+<|visual token 038882|>
+<|visual token 038883|>
+<|visual token 038884|>
+<|visual token 038885|>
+<|visual token 038886|>
+<|visual token 038887|>
+<|visual token 038888|>
+<|visual token 038889|>
+<|visual token 038890|>
+<|visual token 038891|>
+<|visual token 038892|>
+<|visual token 038893|>
+<|visual token 038894|>
+<|visual token 038895|>
+<|visual token 038896|>
+<|visual token 038897|>
+<|visual token 038898|>
+<|visual token 038899|>
+<|visual token 038900|>
+<|visual token 038901|>
+<|visual token 038902|>
+<|visual token 038903|>
+<|visual token 038904|>
+<|visual token 038905|>
+<|visual token 038906|>
+<|visual token 038907|>
+<|visual token 038908|>
+<|visual token 038909|>
+<|visual token 038910|>
+<|visual token 038911|>
+<|visual token 038912|>
+<|visual token 038913|>
+<|visual token 038914|>
+<|visual token 038915|>
+<|visual token 038916|>
+<|visual token 038917|>
+<|visual token 038918|>
+<|visual token 038919|>
+<|visual token 038920|>
+<|visual token 038921|>
+<|visual token 038922|>
+<|visual token 038923|>
+<|visual token 038924|>
+<|visual token 038925|>
+<|visual token 038926|>
+<|visual token 038927|>
+<|visual token 038928|>
+<|visual token 038929|>
+<|visual token 038930|>
+<|visual token 038931|>
+<|visual token 038932|>
+<|visual token 038933|>
+<|visual token 038934|>
+<|visual token 038935|>
+<|visual token 038936|>
+<|visual token 038937|>
+<|visual token 038938|>
+<|visual token 038939|>
+<|visual token 038940|>
+<|visual token 038941|>
+<|visual token 038942|>
+<|visual token 038943|>
+<|visual token 038944|>
+<|visual token 038945|>
+<|visual token 038946|>
+<|visual token 038947|>
+<|visual token 038948|>
+<|visual token 038949|>
+<|visual token 038950|>
+<|visual token 038951|>
+<|visual token 038952|>
+<|visual token 038953|>
+<|visual token 038954|>
+<|visual token 038955|>
+<|visual token 038956|>
+<|visual token 038957|>
+<|visual token 038958|>
+<|visual token 038959|>
+<|visual token 038960|>
+<|visual token 038961|>
+<|visual token 038962|>
+<|visual token 038963|>
+<|visual token 038964|>
+<|visual token 038965|>
+<|visual token 038966|>
+<|visual token 038967|>
+<|visual token 038968|>
+<|visual token 038969|>
+<|visual token 038970|>
+<|visual token 038971|>
+<|visual token 038972|>
+<|visual token 038973|>
+<|visual token 038974|>
+<|visual token 038975|>
+<|visual token 038976|>
+<|visual token 038977|>
+<|visual token 038978|>
+<|visual token 038979|>
+<|visual token 038980|>
+<|visual token 038981|>
+<|visual token 038982|>
+<|visual token 038983|>
+<|visual token 038984|>
+<|visual token 038985|>
+<|visual token 038986|>
+<|visual token 038987|>
+<|visual token 038988|>
+<|visual token 038989|>
+<|visual token 038990|>
+<|visual token 038991|>
+<|visual token 038992|>
+<|visual token 038993|>
+<|visual token 038994|>
+<|visual token 038995|>
+<|visual token 038996|>
+<|visual token 038997|>
+<|visual token 038998|>
+<|visual token 038999|>
+<|visual token 039000|>
+<|visual token 039001|>
+<|visual token 039002|>
+<|visual token 039003|>
+<|visual token 039004|>
+<|visual token 039005|>
+<|visual token 039006|>
+<|visual token 039007|>
+<|visual token 039008|>
+<|visual token 039009|>
+<|visual token 039010|>
+<|visual token 039011|>
+<|visual token 039012|>
+<|visual token 039013|>
+<|visual token 039014|>
+<|visual token 039015|>
+<|visual token 039016|>
+<|visual token 039017|>
+<|visual token 039018|>
+<|visual token 039019|>
+<|visual token 039020|>
+<|visual token 039021|>
+<|visual token 039022|>
+<|visual token 039023|>
+<|visual token 039024|>
+<|visual token 039025|>
+<|visual token 039026|>
+<|visual token 039027|>
+<|visual token 039028|>
+<|visual token 039029|>
+<|visual token 039030|>
+<|visual token 039031|>
+<|visual token 039032|>
+<|visual token 039033|>
+<|visual token 039034|>
+<|visual token 039035|>
+<|visual token 039036|>
+<|visual token 039037|>
+<|visual token 039038|>
+<|visual token 039039|>
+<|visual token 039040|>
+<|visual token 039041|>
+<|visual token 039042|>
+<|visual token 039043|>
+<|visual token 039044|>
+<|visual token 039045|>
+<|visual token 039046|>
+<|visual token 039047|>
+<|visual token 039048|>
+<|visual token 039049|>
+<|visual token 039050|>
+<|visual token 039051|>
+<|visual token 039052|>
+<|visual token 039053|>
+<|visual token 039054|>
+<|visual token 039055|>
+<|visual token 039056|>
+<|visual token 039057|>
+<|visual token 039058|>
+<|visual token 039059|>
+<|visual token 039060|>
+<|visual token 039061|>
+<|visual token 039062|>
+<|visual token 039063|>
+<|visual token 039064|>
+<|visual token 039065|>
+<|visual token 039066|>
+<|visual token 039067|>
+<|visual token 039068|>
+<|visual token 039069|>
+<|visual token 039070|>
+<|visual token 039071|>
+<|visual token 039072|>
+<|visual token 039073|>
+<|visual token 039074|>
+<|visual token 039075|>
+<|visual token 039076|>
+<|visual token 039077|>
+<|visual token 039078|>
+<|visual token 039079|>
+<|visual token 039080|>
+<|visual token 039081|>
+<|visual token 039082|>
+<|visual token 039083|>
+<|visual token 039084|>
+<|visual token 039085|>
+<|visual token 039086|>
+<|visual token 039087|>
+<|visual token 039088|>
+<|visual token 039089|>
+<|visual token 039090|>
+<|visual token 039091|>
+<|visual token 039092|>
+<|visual token 039093|>
+<|visual token 039094|>
+<|visual token 039095|>
+<|visual token 039096|>
+<|visual token 039097|>
+<|visual token 039098|>
+<|visual token 039099|>
+<|visual token 039100|>
+<|visual token 039101|>
+<|visual token 039102|>
+<|visual token 039103|>
+<|visual token 039104|>
+<|visual token 039105|>
+<|visual token 039106|>
+<|visual token 039107|>
+<|visual token 039108|>
+<|visual token 039109|>
+<|visual token 039110|>
+<|visual token 039111|>
+<|visual token 039112|>
+<|visual token 039113|>
+<|visual token 039114|>
+<|visual token 039115|>
+<|visual token 039116|>
+<|visual token 039117|>
+<|visual token 039118|>
+<|visual token 039119|>
+<|visual token 039120|>
+<|visual token 039121|>
+<|visual token 039122|>
+<|visual token 039123|>
+<|visual token 039124|>
+<|visual token 039125|>
+<|visual token 039126|>
+<|visual token 039127|>
+<|visual token 039128|>
+<|visual token 039129|>
+<|visual token 039130|>
+<|visual token 039131|>
+<|visual token 039132|>
+<|visual token 039133|>
+<|visual token 039134|>
+<|visual token 039135|>
+<|visual token 039136|>
+<|visual token 039137|>
+<|visual token 039138|>
+<|visual token 039139|>
+<|visual token 039140|>
+<|visual token 039141|>
+<|visual token 039142|>
+<|visual token 039143|>
+<|visual token 039144|>
+<|visual token 039145|>
+<|visual token 039146|>
+<|visual token 039147|>
+<|visual token 039148|>
+<|visual token 039149|>
+<|visual token 039150|>
+<|visual token 039151|>
+<|visual token 039152|>
+<|visual token 039153|>
+<|visual token 039154|>
+<|visual token 039155|>
+<|visual token 039156|>
+<|visual token 039157|>
+<|visual token 039158|>
+<|visual token 039159|>
+<|visual token 039160|>
+<|visual token 039161|>
+<|visual token 039162|>
+<|visual token 039163|>
+<|visual token 039164|>
+<|visual token 039165|>
+<|visual token 039166|>
+<|visual token 039167|>
+<|visual token 039168|>
+<|visual token 039169|>
+<|visual token 039170|>
+<|visual token 039171|>
+<|visual token 039172|>
+<|visual token 039173|>
+<|visual token 039174|>
+<|visual token 039175|>
+<|visual token 039176|>
+<|visual token 039177|>
+<|visual token 039178|>
+<|visual token 039179|>
+<|visual token 039180|>
+<|visual token 039181|>
+<|visual token 039182|>
+<|visual token 039183|>
+<|visual token 039184|>
+<|visual token 039185|>
+<|visual token 039186|>
+<|visual token 039187|>
+<|visual token 039188|>
+<|visual token 039189|>
+<|visual token 039190|>
+<|visual token 039191|>
+<|visual token 039192|>
+<|visual token 039193|>
+<|visual token 039194|>
+<|visual token 039195|>
+<|visual token 039196|>
+<|visual token 039197|>
+<|visual token 039198|>
+<|visual token 039199|>
+<|visual token 039200|>
+<|visual token 039201|>
+<|visual token 039202|>
+<|visual token 039203|>
+<|visual token 039204|>
+<|visual token 039205|>
+<|visual token 039206|>
+<|visual token 039207|>
+<|visual token 039208|>
+<|visual token 039209|>
+<|visual token 039210|>
+<|visual token 039211|>
+<|visual token 039212|>
+<|visual token 039213|>
+<|visual token 039214|>
+<|visual token 039215|>
+<|visual token 039216|>
+<|visual token 039217|>
+<|visual token 039218|>
+<|visual token 039219|>
+<|visual token 039220|>
+<|visual token 039221|>
+<|visual token 039222|>
+<|visual token 039223|>
+<|visual token 039224|>
+<|visual token 039225|>
+<|visual token 039226|>
+<|visual token 039227|>
+<|visual token 039228|>
+<|visual token 039229|>
+<|visual token 039230|>
+<|visual token 039231|>
+<|visual token 039232|>
+<|visual token 039233|>
+<|visual token 039234|>
+<|visual token 039235|>
+<|visual token 039236|>
+<|visual token 039237|>
+<|visual token 039238|>
+<|visual token 039239|>
+<|visual token 039240|>
+<|visual token 039241|>
+<|visual token 039242|>
+<|visual token 039243|>
+<|visual token 039244|>
+<|visual token 039245|>
+<|visual token 039246|>
+<|visual token 039247|>
+<|visual token 039248|>
+<|visual token 039249|>
+<|visual token 039250|>
+<|visual token 039251|>
+<|visual token 039252|>
+<|visual token 039253|>
+<|visual token 039254|>
+<|visual token 039255|>
+<|visual token 039256|>
+<|visual token 039257|>
+<|visual token 039258|>
+<|visual token 039259|>
+<|visual token 039260|>
+<|visual token 039261|>
+<|visual token 039262|>
+<|visual token 039263|>
+<|visual token 039264|>
+<|visual token 039265|>
+<|visual token 039266|>
+<|visual token 039267|>
+<|visual token 039268|>
+<|visual token 039269|>
+<|visual token 039270|>
+<|visual token 039271|>
+<|visual token 039272|>
+<|visual token 039273|>
+<|visual token 039274|>
+<|visual token 039275|>
+<|visual token 039276|>
+<|visual token 039277|>
+<|visual token 039278|>
+<|visual token 039279|>
+<|visual token 039280|>
+<|visual token 039281|>
+<|visual token 039282|>
+<|visual token 039283|>
+<|visual token 039284|>
+<|visual token 039285|>
+<|visual token 039286|>
+<|visual token 039287|>
+<|visual token 039288|>
+<|visual token 039289|>
+<|visual token 039290|>
+<|visual token 039291|>
+<|visual token 039292|>
+<|visual token 039293|>
+<|visual token 039294|>
+<|visual token 039295|>
+<|visual token 039296|>
+<|visual token 039297|>
+<|visual token 039298|>
+<|visual token 039299|>
+<|visual token 039300|>
+<|visual token 039301|>
+<|visual token 039302|>
+<|visual token 039303|>
+<|visual token 039304|>
+<|visual token 039305|>
+<|visual token 039306|>
+<|visual token 039307|>
+<|visual token 039308|>
+<|visual token 039309|>
+<|visual token 039310|>
+<|visual token 039311|>
+<|visual token 039312|>
+<|visual token 039313|>
+<|visual token 039314|>
+<|visual token 039315|>
+<|visual token 039316|>
+<|visual token 039317|>
+<|visual token 039318|>
+<|visual token 039319|>
+<|visual token 039320|>
+<|visual token 039321|>
+<|visual token 039322|>
+<|visual token 039323|>
+<|visual token 039324|>
+<|visual token 039325|>
+<|visual token 039326|>
+<|visual token 039327|>
+<|visual token 039328|>
+<|visual token 039329|>
+<|visual token 039330|>
+<|visual token 039331|>
+<|visual token 039332|>
+<|visual token 039333|>
+<|visual token 039334|>
+<|visual token 039335|>
+<|visual token 039336|>
+<|visual token 039337|>
+<|visual token 039338|>
+<|visual token 039339|>
+<|visual token 039340|>
+<|visual token 039341|>
+<|visual token 039342|>
+<|visual token 039343|>
+<|visual token 039344|>
+<|visual token 039345|>
+<|visual token 039346|>
+<|visual token 039347|>
+<|visual token 039348|>
+<|visual token 039349|>
+<|visual token 039350|>
+<|visual token 039351|>
+<|visual token 039352|>
+<|visual token 039353|>
+<|visual token 039354|>
+<|visual token 039355|>
+<|visual token 039356|>
+<|visual token 039357|>
+<|visual token 039358|>
+<|visual token 039359|>
+<|visual token 039360|>
+<|visual token 039361|>
+<|visual token 039362|>
+<|visual token 039363|>
+<|visual token 039364|>
+<|visual token 039365|>
+<|visual token 039366|>
+<|visual token 039367|>
+<|visual token 039368|>
+<|visual token 039369|>
+<|visual token 039370|>
+<|visual token 039371|>
+<|visual token 039372|>
+<|visual token 039373|>
+<|visual token 039374|>
+<|visual token 039375|>
+<|visual token 039376|>
+<|visual token 039377|>
+<|visual token 039378|>
+<|visual token 039379|>
+<|visual token 039380|>
+<|visual token 039381|>
+<|visual token 039382|>
+<|visual token 039383|>
+<|visual token 039384|>
+<|visual token 039385|>
+<|visual token 039386|>
+<|visual token 039387|>
+<|visual token 039388|>
+<|visual token 039389|>
+<|visual token 039390|>
+<|visual token 039391|>
+<|visual token 039392|>
+<|visual token 039393|>
+<|visual token 039394|>
+<|visual token 039395|>
+<|visual token 039396|>
+<|visual token 039397|>
+<|visual token 039398|>
+<|visual token 039399|>
+<|visual token 039400|>
+<|visual token 039401|>
+<|visual token 039402|>
+<|visual token 039403|>
+<|visual token 039404|>
+<|visual token 039405|>
+<|visual token 039406|>
+<|visual token 039407|>
+<|visual token 039408|>
+<|visual token 039409|>
+<|visual token 039410|>
+<|visual token 039411|>
+<|visual token 039412|>
+<|visual token 039413|>
+<|visual token 039414|>
+<|visual token 039415|>
+<|visual token 039416|>
+<|visual token 039417|>
+<|visual token 039418|>
+<|visual token 039419|>
+<|visual token 039420|>
+<|visual token 039421|>
+<|visual token 039422|>
+<|visual token 039423|>
+<|visual token 039424|>
+<|visual token 039425|>
+<|visual token 039426|>
+<|visual token 039427|>
+<|visual token 039428|>
+<|visual token 039429|>
+<|visual token 039430|>
+<|visual token 039431|>
+<|visual token 039432|>
+<|visual token 039433|>
+<|visual token 039434|>
+<|visual token 039435|>
+<|visual token 039436|>
+<|visual token 039437|>
+<|visual token 039438|>
+<|visual token 039439|>
+<|visual token 039440|>
+<|visual token 039441|>
+<|visual token 039442|>
+<|visual token 039443|>
+<|visual token 039444|>
+<|visual token 039445|>
+<|visual token 039446|>
+<|visual token 039447|>
+<|visual token 039448|>
+<|visual token 039449|>
+<|visual token 039450|>
+<|visual token 039451|>
+<|visual token 039452|>
+<|visual token 039453|>
+<|visual token 039454|>
+<|visual token 039455|>
+<|visual token 039456|>
+<|visual token 039457|>
+<|visual token 039458|>
+<|visual token 039459|>
+<|visual token 039460|>
+<|visual token 039461|>
+<|visual token 039462|>
+<|visual token 039463|>
+<|visual token 039464|>
+<|visual token 039465|>
+<|visual token 039466|>
+<|visual token 039467|>
+<|visual token 039468|>
+<|visual token 039469|>
+<|visual token 039470|>
+<|visual token 039471|>
+<|visual token 039472|>
+<|visual token 039473|>
+<|visual token 039474|>
+<|visual token 039475|>
+<|visual token 039476|>
+<|visual token 039477|>
+<|visual token 039478|>
+<|visual token 039479|>
+<|visual token 039480|>
+<|visual token 039481|>
+<|visual token 039482|>
+<|visual token 039483|>
+<|visual token 039484|>
+<|visual token 039485|>
+<|visual token 039486|>
+<|visual token 039487|>
+<|visual token 039488|>
+<|visual token 039489|>
+<|visual token 039490|>
+<|visual token 039491|>
+<|visual token 039492|>
+<|visual token 039493|>
+<|visual token 039494|>
+<|visual token 039495|>
+<|visual token 039496|>
+<|visual token 039497|>
+<|visual token 039498|>
+<|visual token 039499|>
+<|visual token 039500|>
+<|visual token 039501|>
+<|visual token 039502|>
+<|visual token 039503|>
+<|visual token 039504|>
+<|visual token 039505|>
+<|visual token 039506|>
+<|visual token 039507|>
+<|visual token 039508|>
+<|visual token 039509|>
+<|visual token 039510|>
+<|visual token 039511|>
+<|visual token 039512|>
+<|visual token 039513|>
+<|visual token 039514|>
+<|visual token 039515|>
+<|visual token 039516|>
+<|visual token 039517|>
+<|visual token 039518|>
+<|visual token 039519|>
+<|visual token 039520|>
+<|visual token 039521|>
+<|visual token 039522|>
+<|visual token 039523|>
+<|visual token 039524|>
+<|visual token 039525|>
+<|visual token 039526|>
+<|visual token 039527|>
+<|visual token 039528|>
+<|visual token 039529|>
+<|visual token 039530|>
+<|visual token 039531|>
+<|visual token 039532|>
+<|visual token 039533|>
+<|visual token 039534|>
+<|visual token 039535|>
+<|visual token 039536|>
+<|visual token 039537|>
+<|visual token 039538|>
+<|visual token 039539|>
+<|visual token 039540|>
+<|visual token 039541|>
+<|visual token 039542|>
+<|visual token 039543|>
+<|visual token 039544|>
+<|visual token 039545|>
+<|visual token 039546|>
+<|visual token 039547|>
+<|visual token 039548|>
+<|visual token 039549|>
+<|visual token 039550|>
+<|visual token 039551|>
+<|visual token 039552|>
+<|visual token 039553|>
+<|visual token 039554|>
+<|visual token 039555|>
+<|visual token 039556|>
+<|visual token 039557|>
+<|visual token 039558|>
+<|visual token 039559|>
+<|visual token 039560|>
+<|visual token 039561|>
+<|visual token 039562|>
+<|visual token 039563|>
+<|visual token 039564|>
+<|visual token 039565|>
+<|visual token 039566|>
+<|visual token 039567|>
+<|visual token 039568|>
+<|visual token 039569|>
+<|visual token 039570|>
+<|visual token 039571|>
+<|visual token 039572|>
+<|visual token 039573|>
+<|visual token 039574|>
+<|visual token 039575|>
+<|visual token 039576|>
+<|visual token 039577|>
+<|visual token 039578|>
+<|visual token 039579|>
+<|visual token 039580|>
+<|visual token 039581|>
+<|visual token 039582|>
+<|visual token 039583|>
+<|visual token 039584|>
+<|visual token 039585|>
+<|visual token 039586|>
+<|visual token 039587|>
+<|visual token 039588|>
+<|visual token 039589|>
+<|visual token 039590|>
+<|visual token 039591|>
+<|visual token 039592|>
+<|visual token 039593|>
+<|visual token 039594|>
+<|visual token 039595|>
+<|visual token 039596|>
+<|visual token 039597|>
+<|visual token 039598|>
+<|visual token 039599|>
+<|visual token 039600|>
+<|visual token 039601|>
+<|visual token 039602|>
+<|visual token 039603|>
+<|visual token 039604|>
+<|visual token 039605|>
+<|visual token 039606|>
+<|visual token 039607|>
+<|visual token 039608|>
+<|visual token 039609|>
+<|visual token 039610|>
+<|visual token 039611|>
+<|visual token 039612|>
+<|visual token 039613|>
+<|visual token 039614|>
+<|visual token 039615|>
+<|visual token 039616|>
+<|visual token 039617|>
+<|visual token 039618|>
+<|visual token 039619|>
+<|visual token 039620|>
+<|visual token 039621|>
+<|visual token 039622|>
+<|visual token 039623|>
+<|visual token 039624|>
+<|visual token 039625|>
+<|visual token 039626|>
+<|visual token 039627|>
+<|visual token 039628|>
+<|visual token 039629|>
+<|visual token 039630|>
+<|visual token 039631|>
+<|visual token 039632|>
+<|visual token 039633|>
+<|visual token 039634|>
+<|visual token 039635|>
+<|visual token 039636|>
+<|visual token 039637|>
+<|visual token 039638|>
+<|visual token 039639|>
+<|visual token 039640|>
+<|visual token 039641|>
+<|visual token 039642|>
+<|visual token 039643|>
+<|visual token 039644|>
+<|visual token 039645|>
+<|visual token 039646|>
+<|visual token 039647|>
+<|visual token 039648|>
+<|visual token 039649|>
+<|visual token 039650|>
+<|visual token 039651|>
+<|visual token 039652|>
+<|visual token 039653|>
+<|visual token 039654|>
+<|visual token 039655|>
+<|visual token 039656|>
+<|visual token 039657|>
+<|visual token 039658|>
+<|visual token 039659|>
+<|visual token 039660|>
+<|visual token 039661|>
+<|visual token 039662|>
+<|visual token 039663|>
+<|visual token 039664|>
+<|visual token 039665|>
+<|visual token 039666|>
+<|visual token 039667|>
+<|visual token 039668|>
+<|visual token 039669|>
+<|visual token 039670|>
+<|visual token 039671|>
+<|visual token 039672|>
+<|visual token 039673|>
+<|visual token 039674|>
+<|visual token 039675|>
+<|visual token 039676|>
+<|visual token 039677|>
+<|visual token 039678|>
+<|visual token 039679|>
+<|visual token 039680|>
+<|visual token 039681|>
+<|visual token 039682|>
+<|visual token 039683|>
+<|visual token 039684|>
+<|visual token 039685|>
+<|visual token 039686|>
+<|visual token 039687|>
+<|visual token 039688|>
+<|visual token 039689|>
+<|visual token 039690|>
+<|visual token 039691|>
+<|visual token 039692|>
+<|visual token 039693|>
+<|visual token 039694|>
+<|visual token 039695|>
+<|visual token 039696|>
+<|visual token 039697|>
+<|visual token 039698|>
+<|visual token 039699|>
+<|visual token 039700|>
+<|visual token 039701|>
+<|visual token 039702|>
+<|visual token 039703|>
+<|visual token 039704|>
+<|visual token 039705|>
+<|visual token 039706|>
+<|visual token 039707|>
+<|visual token 039708|>
+<|visual token 039709|>
+<|visual token 039710|>
+<|visual token 039711|>
+<|visual token 039712|>
+<|visual token 039713|>
+<|visual token 039714|>
+<|visual token 039715|>
+<|visual token 039716|>
+<|visual token 039717|>
+<|visual token 039718|>
+<|visual token 039719|>
+<|visual token 039720|>
+<|visual token 039721|>
+<|visual token 039722|>
+<|visual token 039723|>
+<|visual token 039724|>
+<|visual token 039725|>
+<|visual token 039726|>
+<|visual token 039727|>
+<|visual token 039728|>
+<|visual token 039729|>
+<|visual token 039730|>
+<|visual token 039731|>
+<|visual token 039732|>
+<|visual token 039733|>
+<|visual token 039734|>
+<|visual token 039735|>
+<|visual token 039736|>
+<|visual token 039737|>
+<|visual token 039738|>
+<|visual token 039739|>
+<|visual token 039740|>
+<|visual token 039741|>
+<|visual token 039742|>
+<|visual token 039743|>
+<|visual token 039744|>
+<|visual token 039745|>
+<|visual token 039746|>
+<|visual token 039747|>
+<|visual token 039748|>
+<|visual token 039749|>
+<|visual token 039750|>
+<|visual token 039751|>
+<|visual token 039752|>
+<|visual token 039753|>
+<|visual token 039754|>
+<|visual token 039755|>
+<|visual token 039756|>
+<|visual token 039757|>
+<|visual token 039758|>
+<|visual token 039759|>
+<|visual token 039760|>
+<|visual token 039761|>
+<|visual token 039762|>
+<|visual token 039763|>
+<|visual token 039764|>
+<|visual token 039765|>
+<|visual token 039766|>
+<|visual token 039767|>
+<|visual token 039768|>
+<|visual token 039769|>
+<|visual token 039770|>
+<|visual token 039771|>
+<|visual token 039772|>
+<|visual token 039773|>
+<|visual token 039774|>
+<|visual token 039775|>
+<|visual token 039776|>
+<|visual token 039777|>
+<|visual token 039778|>
+<|visual token 039779|>
+<|visual token 039780|>
+<|visual token 039781|>
+<|visual token 039782|>
+<|visual token 039783|>
+<|visual token 039784|>
+<|visual token 039785|>
+<|visual token 039786|>
+<|visual token 039787|>
+<|visual token 039788|>
+<|visual token 039789|>
+<|visual token 039790|>
+<|visual token 039791|>
+<|visual token 039792|>
+<|visual token 039793|>
+<|visual token 039794|>
+<|visual token 039795|>
+<|visual token 039796|>
+<|visual token 039797|>
+<|visual token 039798|>
+<|visual token 039799|>
+<|visual token 039800|>
+<|visual token 039801|>
+<|visual token 039802|>
+<|visual token 039803|>
+<|visual token 039804|>
+<|visual token 039805|>
+<|visual token 039806|>
+<|visual token 039807|>
+<|visual token 039808|>
+<|visual token 039809|>
+<|visual token 039810|>
+<|visual token 039811|>
+<|visual token 039812|>
+<|visual token 039813|>
+<|visual token 039814|>
+<|visual token 039815|>
+<|visual token 039816|>
+<|visual token 039817|>
+<|visual token 039818|>
+<|visual token 039819|>
+<|visual token 039820|>
+<|visual token 039821|>
+<|visual token 039822|>
+<|visual token 039823|>
+<|visual token 039824|>
+<|visual token 039825|>
+<|visual token 039826|>
+<|visual token 039827|>
+<|visual token 039828|>
+<|visual token 039829|>
+<|visual token 039830|>
+<|visual token 039831|>
+<|visual token 039832|>
+<|visual token 039833|>
+<|visual token 039834|>
+<|visual token 039835|>
+<|visual token 039836|>
+<|visual token 039837|>
+<|visual token 039838|>
+<|visual token 039839|>
+<|visual token 039840|>
+<|visual token 039841|>
+<|visual token 039842|>
+<|visual token 039843|>
+<|visual token 039844|>
+<|visual token 039845|>
+<|visual token 039846|>
+<|visual token 039847|>
+<|visual token 039848|>
+<|visual token 039849|>
+<|visual token 039850|>
+<|visual token 039851|>
+<|visual token 039852|>
+<|visual token 039853|>
+<|visual token 039854|>
+<|visual token 039855|>
+<|visual token 039856|>
+<|visual token 039857|>
+<|visual token 039858|>
+<|visual token 039859|>
+<|visual token 039860|>
+<|visual token 039861|>
+<|visual token 039862|>
+<|visual token 039863|>
+<|visual token 039864|>
+<|visual token 039865|>
+<|visual token 039866|>
+<|visual token 039867|>
+<|visual token 039868|>
+<|visual token 039869|>
+<|visual token 039870|>
+<|visual token 039871|>
+<|visual token 039872|>
+<|visual token 039873|>
+<|visual token 039874|>
+<|visual token 039875|>
+<|visual token 039876|>
+<|visual token 039877|>
+<|visual token 039878|>
+<|visual token 039879|>
+<|visual token 039880|>
+<|visual token 039881|>
+<|visual token 039882|>
+<|visual token 039883|>
+<|visual token 039884|>
+<|visual token 039885|>
+<|visual token 039886|>
+<|visual token 039887|>
+<|visual token 039888|>
+<|visual token 039889|>
+<|visual token 039890|>
+<|visual token 039891|>
+<|visual token 039892|>
+<|visual token 039893|>
+<|visual token 039894|>
+<|visual token 039895|>
+<|visual token 039896|>
+<|visual token 039897|>
+<|visual token 039898|>
+<|visual token 039899|>
+<|visual token 039900|>
+<|visual token 039901|>
+<|visual token 039902|>
+<|visual token 039903|>
+<|visual token 039904|>
+<|visual token 039905|>
+<|visual token 039906|>
+<|visual token 039907|>
+<|visual token 039908|>
+<|visual token 039909|>
+<|visual token 039910|>
+<|visual token 039911|>
+<|visual token 039912|>
+<|visual token 039913|>
+<|visual token 039914|>
+<|visual token 039915|>
+<|visual token 039916|>
+<|visual token 039917|>
+<|visual token 039918|>
+<|visual token 039919|>
+<|visual token 039920|>
+<|visual token 039921|>
+<|visual token 039922|>
+<|visual token 039923|>
+<|visual token 039924|>
+<|visual token 039925|>
+<|visual token 039926|>
+<|visual token 039927|>
+<|visual token 039928|>
+<|visual token 039929|>
+<|visual token 039930|>
+<|visual token 039931|>
+<|visual token 039932|>
+<|visual token 039933|>
+<|visual token 039934|>
+<|visual token 039935|>
+<|visual token 039936|>
+<|visual token 039937|>
+<|visual token 039938|>
+<|visual token 039939|>
+<|visual token 039940|>
+<|visual token 039941|>
+<|visual token 039942|>
+<|visual token 039943|>
+<|visual token 039944|>
+<|visual token 039945|>
+<|visual token 039946|>
+<|visual token 039947|>
+<|visual token 039948|>
+<|visual token 039949|>
+<|visual token 039950|>
+<|visual token 039951|>
+<|visual token 039952|>
+<|visual token 039953|>
+<|visual token 039954|>
+<|visual token 039955|>
+<|visual token 039956|>
+<|visual token 039957|>
+<|visual token 039958|>
+<|visual token 039959|>
+<|visual token 039960|>
+<|visual token 039961|>
+<|visual token 039962|>
+<|visual token 039963|>
+<|visual token 039964|>
+<|visual token 039965|>
+<|visual token 039966|>
+<|visual token 039967|>
+<|visual token 039968|>
+<|visual token 039969|>
+<|visual token 039970|>
+<|visual token 039971|>
+<|visual token 039972|>
+<|visual token 039973|>
+<|visual token 039974|>
+<|visual token 039975|>
+<|visual token 039976|>
+<|visual token 039977|>
+<|visual token 039978|>
+<|visual token 039979|>
+<|visual token 039980|>
+<|visual token 039981|>
+<|visual token 039982|>
+<|visual token 039983|>
+<|visual token 039984|>
+<|visual token 039985|>
+<|visual token 039986|>
+<|visual token 039987|>
+<|visual token 039988|>
+<|visual token 039989|>
+<|visual token 039990|>
+<|visual token 039991|>
+<|visual token 039992|>
+<|visual token 039993|>
+<|visual token 039994|>
+<|visual token 039995|>
+<|visual token 039996|>
+<|visual token 039997|>
+<|visual token 039998|>
+<|visual token 039999|>
+<|visual token 040000|>
+<|visual token 040001|>
+<|visual token 040002|>
+<|visual token 040003|>
+<|visual token 040004|>
+<|visual token 040005|>
+<|visual token 040006|>
+<|visual token 040007|>
+<|visual token 040008|>
+<|visual token 040009|>
+<|visual token 040010|>
+<|visual token 040011|>
+<|visual token 040012|>
+<|visual token 040013|>
+<|visual token 040014|>
+<|visual token 040015|>
+<|visual token 040016|>
+<|visual token 040017|>
+<|visual token 040018|>
+<|visual token 040019|>
+<|visual token 040020|>
+<|visual token 040021|>
+<|visual token 040022|>
+<|visual token 040023|>
+<|visual token 040024|>
+<|visual token 040025|>
+<|visual token 040026|>
+<|visual token 040027|>
+<|visual token 040028|>
+<|visual token 040029|>
+<|visual token 040030|>
+<|visual token 040031|>
+<|visual token 040032|>
+<|visual token 040033|>
+<|visual token 040034|>
+<|visual token 040035|>
+<|visual token 040036|>
+<|visual token 040037|>
+<|visual token 040038|>
+<|visual token 040039|>
+<|visual token 040040|>
+<|visual token 040041|>
+<|visual token 040042|>
+<|visual token 040043|>
+<|visual token 040044|>
+<|visual token 040045|>
+<|visual token 040046|>
+<|visual token 040047|>
+<|visual token 040048|>
+<|visual token 040049|>
+<|visual token 040050|>
+<|visual token 040051|>
+<|visual token 040052|>
+<|visual token 040053|>
+<|visual token 040054|>
+<|visual token 040055|>
+<|visual token 040056|>
+<|visual token 040057|>
+<|visual token 040058|>
+<|visual token 040059|>
+<|visual token 040060|>
+<|visual token 040061|>
+<|visual token 040062|>
+<|visual token 040063|>
+<|visual token 040064|>
+<|visual token 040065|>
+<|visual token 040066|>
+<|visual token 040067|>
+<|visual token 040068|>
+<|visual token 040069|>
+<|visual token 040070|>
+<|visual token 040071|>
+<|visual token 040072|>
+<|visual token 040073|>
+<|visual token 040074|>
+<|visual token 040075|>
+<|visual token 040076|>
+<|visual token 040077|>
+<|visual token 040078|>
+<|visual token 040079|>
+<|visual token 040080|>
+<|visual token 040081|>
+<|visual token 040082|>
+<|visual token 040083|>
+<|visual token 040084|>
+<|visual token 040085|>
+<|visual token 040086|>
+<|visual token 040087|>
+<|visual token 040088|>
+<|visual token 040089|>
+<|visual token 040090|>
+<|visual token 040091|>
+<|visual token 040092|>
+<|visual token 040093|>
+<|visual token 040094|>
+<|visual token 040095|>
+<|visual token 040096|>
+<|visual token 040097|>
+<|visual token 040098|>
+<|visual token 040099|>
+<|visual token 040100|>
+<|visual token 040101|>
+<|visual token 040102|>
+<|visual token 040103|>
+<|visual token 040104|>
+<|visual token 040105|>
+<|visual token 040106|>
+<|visual token 040107|>
+<|visual token 040108|>
+<|visual token 040109|>
+<|visual token 040110|>
+<|visual token 040111|>
+<|visual token 040112|>
+<|visual token 040113|>
+<|visual token 040114|>
+<|visual token 040115|>
+<|visual token 040116|>
+<|visual token 040117|>
+<|visual token 040118|>
+<|visual token 040119|>
+<|visual token 040120|>
+<|visual token 040121|>
+<|visual token 040122|>
+<|visual token 040123|>
+<|visual token 040124|>
+<|visual token 040125|>
+<|visual token 040126|>
+<|visual token 040127|>
+<|visual token 040128|>
+<|visual token 040129|>
+<|visual token 040130|>
+<|visual token 040131|>
+<|visual token 040132|>
+<|visual token 040133|>
+<|visual token 040134|>
+<|visual token 040135|>
+<|visual token 040136|>
+<|visual token 040137|>
+<|visual token 040138|>
+<|visual token 040139|>
+<|visual token 040140|>
+<|visual token 040141|>
+<|visual token 040142|>
+<|visual token 040143|>
+<|visual token 040144|>
+<|visual token 040145|>
+<|visual token 040146|>
+<|visual token 040147|>
+<|visual token 040148|>
+<|visual token 040149|>
+<|visual token 040150|>
+<|visual token 040151|>
+<|visual token 040152|>
+<|visual token 040153|>
+<|visual token 040154|>
+<|visual token 040155|>
+<|visual token 040156|>
+<|visual token 040157|>
+<|visual token 040158|>
+<|visual token 040159|>
+<|visual token 040160|>
+<|visual token 040161|>
+<|visual token 040162|>
+<|visual token 040163|>
+<|visual token 040164|>
+<|visual token 040165|>
+<|visual token 040166|>
+<|visual token 040167|>
+<|visual token 040168|>
+<|visual token 040169|>
+<|visual token 040170|>
+<|visual token 040171|>
+<|visual token 040172|>
+<|visual token 040173|>
+<|visual token 040174|>
+<|visual token 040175|>
+<|visual token 040176|>
+<|visual token 040177|>
+<|visual token 040178|>
+<|visual token 040179|>
+<|visual token 040180|>
+<|visual token 040181|>
+<|visual token 040182|>
+<|visual token 040183|>
+<|visual token 040184|>
+<|visual token 040185|>
+<|visual token 040186|>
+<|visual token 040187|>
+<|visual token 040188|>
+<|visual token 040189|>
+<|visual token 040190|>
+<|visual token 040191|>
+<|visual token 040192|>
+<|visual token 040193|>
+<|visual token 040194|>
+<|visual token 040195|>
+<|visual token 040196|>
+<|visual token 040197|>
+<|visual token 040198|>
+<|visual token 040199|>
+<|visual token 040200|>
+<|visual token 040201|>
+<|visual token 040202|>
+<|visual token 040203|>
+<|visual token 040204|>
+<|visual token 040205|>
+<|visual token 040206|>
+<|visual token 040207|>
+<|visual token 040208|>
+<|visual token 040209|>
+<|visual token 040210|>
+<|visual token 040211|>
+<|visual token 040212|>
+<|visual token 040213|>
+<|visual token 040214|>
+<|visual token 040215|>
+<|visual token 040216|>
+<|visual token 040217|>
+<|visual token 040218|>
+<|visual token 040219|>
+<|visual token 040220|>
+<|visual token 040221|>
+<|visual token 040222|>
+<|visual token 040223|>
+<|visual token 040224|>
+<|visual token 040225|>
+<|visual token 040226|>
+<|visual token 040227|>
+<|visual token 040228|>
+<|visual token 040229|>
+<|visual token 040230|>
+<|visual token 040231|>
+<|visual token 040232|>
+<|visual token 040233|>
+<|visual token 040234|>
+<|visual token 040235|>
+<|visual token 040236|>
+<|visual token 040237|>
+<|visual token 040238|>
+<|visual token 040239|>
+<|visual token 040240|>
+<|visual token 040241|>
+<|visual token 040242|>
+<|visual token 040243|>
+<|visual token 040244|>
+<|visual token 040245|>
+<|visual token 040246|>
+<|visual token 040247|>
+<|visual token 040248|>
+<|visual token 040249|>
+<|visual token 040250|>
+<|visual token 040251|>
+<|visual token 040252|>
+<|visual token 040253|>
+<|visual token 040254|>
+<|visual token 040255|>
+<|visual token 040256|>
+<|visual token 040257|>
+<|visual token 040258|>
+<|visual token 040259|>
+<|visual token 040260|>
+<|visual token 040261|>
+<|visual token 040262|>
+<|visual token 040263|>
+<|visual token 040264|>
+<|visual token 040265|>
+<|visual token 040266|>
+<|visual token 040267|>
+<|visual token 040268|>
+<|visual token 040269|>
+<|visual token 040270|>
+<|visual token 040271|>
+<|visual token 040272|>
+<|visual token 040273|>
+<|visual token 040274|>
+<|visual token 040275|>
+<|visual token 040276|>
+<|visual token 040277|>
+<|visual token 040278|>
+<|visual token 040279|>
+<|visual token 040280|>
+<|visual token 040281|>
+<|visual token 040282|>
+<|visual token 040283|>
+<|visual token 040284|>
+<|visual token 040285|>
+<|visual token 040286|>
+<|visual token 040287|>
+<|visual token 040288|>
+<|visual token 040289|>
+<|visual token 040290|>
+<|visual token 040291|>
+<|visual token 040292|>
+<|visual token 040293|>
+<|visual token 040294|>
+<|visual token 040295|>
+<|visual token 040296|>
+<|visual token 040297|>
+<|visual token 040298|>
+<|visual token 040299|>
+<|visual token 040300|>
+<|visual token 040301|>
+<|visual token 040302|>
+<|visual token 040303|>
+<|visual token 040304|>
+<|visual token 040305|>
+<|visual token 040306|>
+<|visual token 040307|>
+<|visual token 040308|>
+<|visual token 040309|>
+<|visual token 040310|>
+<|visual token 040311|>
+<|visual token 040312|>
+<|visual token 040313|>
+<|visual token 040314|>
+<|visual token 040315|>
+<|visual token 040316|>
+<|visual token 040317|>
+<|visual token 040318|>
+<|visual token 040319|>
+<|visual token 040320|>
+<|visual token 040321|>
+<|visual token 040322|>
+<|visual token 040323|>
+<|visual token 040324|>
+<|visual token 040325|>
+<|visual token 040326|>
+<|visual token 040327|>
+<|visual token 040328|>
+<|visual token 040329|>
+<|visual token 040330|>
+<|visual token 040331|>
+<|visual token 040332|>
+<|visual token 040333|>
+<|visual token 040334|>
+<|visual token 040335|>
+<|visual token 040336|>
+<|visual token 040337|>
+<|visual token 040338|>
+<|visual token 040339|>
+<|visual token 040340|>
+<|visual token 040341|>
+<|visual token 040342|>
+<|visual token 040343|>
+<|visual token 040344|>
+<|visual token 040345|>
+<|visual token 040346|>
+<|visual token 040347|>
+<|visual token 040348|>
+<|visual token 040349|>
+<|visual token 040350|>
+<|visual token 040351|>
+<|visual token 040352|>
+<|visual token 040353|>
+<|visual token 040354|>
+<|visual token 040355|>
+<|visual token 040356|>
+<|visual token 040357|>
+<|visual token 040358|>
+<|visual token 040359|>
+<|visual token 040360|>
+<|visual token 040361|>
+<|visual token 040362|>
+<|visual token 040363|>
+<|visual token 040364|>
+<|visual token 040365|>
+<|visual token 040366|>
+<|visual token 040367|>
+<|visual token 040368|>
+<|visual token 040369|>
+<|visual token 040370|>
+<|visual token 040371|>
+<|visual token 040372|>
+<|visual token 040373|>
+<|visual token 040374|>
+<|visual token 040375|>
+<|visual token 040376|>
+<|visual token 040377|>
+<|visual token 040378|>
+<|visual token 040379|>
+<|visual token 040380|>
+<|visual token 040381|>
+<|visual token 040382|>
+<|visual token 040383|>
+<|visual token 040384|>
+<|visual token 040385|>
+<|visual token 040386|>
+<|visual token 040387|>
+<|visual token 040388|>
+<|visual token 040389|>
+<|visual token 040390|>
+<|visual token 040391|>
+<|visual token 040392|>
+<|visual token 040393|>
+<|visual token 040394|>
+<|visual token 040395|>
+<|visual token 040396|>
+<|visual token 040397|>
+<|visual token 040398|>
+<|visual token 040399|>
+<|visual token 040400|>
+<|visual token 040401|>
+<|visual token 040402|>
+<|visual token 040403|>
+<|visual token 040404|>
+<|visual token 040405|>
+<|visual token 040406|>
+<|visual token 040407|>
+<|visual token 040408|>
+<|visual token 040409|>
+<|visual token 040410|>
+<|visual token 040411|>
+<|visual token 040412|>
+<|visual token 040413|>
+<|visual token 040414|>
+<|visual token 040415|>
+<|visual token 040416|>
+<|visual token 040417|>
+<|visual token 040418|>
+<|visual token 040419|>
+<|visual token 040420|>
+<|visual token 040421|>
+<|visual token 040422|>
+<|visual token 040423|>
+<|visual token 040424|>
+<|visual token 040425|>
+<|visual token 040426|>
+<|visual token 040427|>
+<|visual token 040428|>
+<|visual token 040429|>
+<|visual token 040430|>
+<|visual token 040431|>
+<|visual token 040432|>
+<|visual token 040433|>
+<|visual token 040434|>
+<|visual token 040435|>
+<|visual token 040436|>
+<|visual token 040437|>
+<|visual token 040438|>
+<|visual token 040439|>
+<|visual token 040440|>
+<|visual token 040441|>
+<|visual token 040442|>
+<|visual token 040443|>
+<|visual token 040444|>
+<|visual token 040445|>
+<|visual token 040446|>
+<|visual token 040447|>
+<|visual token 040448|>
+<|visual token 040449|>
+<|visual token 040450|>
+<|visual token 040451|>
+<|visual token 040452|>
+<|visual token 040453|>
+<|visual token 040454|>
+<|visual token 040455|>
+<|visual token 040456|>
+<|visual token 040457|>
+<|visual token 040458|>
+<|visual token 040459|>
+<|visual token 040460|>
+<|visual token 040461|>
+<|visual token 040462|>
+<|visual token 040463|>
+<|visual token 040464|>
+<|visual token 040465|>
+<|visual token 040466|>
+<|visual token 040467|>
+<|visual token 040468|>
+<|visual token 040469|>
+<|visual token 040470|>
+<|visual token 040471|>
+<|visual token 040472|>
+<|visual token 040473|>
+<|visual token 040474|>
+<|visual token 040475|>
+<|visual token 040476|>
+<|visual token 040477|>
+<|visual token 040478|>
+<|visual token 040479|>
+<|visual token 040480|>
+<|visual token 040481|>
+<|visual token 040482|>
+<|visual token 040483|>
+<|visual token 040484|>
+<|visual token 040485|>
+<|visual token 040486|>
+<|visual token 040487|>
+<|visual token 040488|>
+<|visual token 040489|>
+<|visual token 040490|>
+<|visual token 040491|>
+<|visual token 040492|>
+<|visual token 040493|>
+<|visual token 040494|>
+<|visual token 040495|>
+<|visual token 040496|>
+<|visual token 040497|>
+<|visual token 040498|>
+<|visual token 040499|>
+<|visual token 040500|>
+<|visual token 040501|>
+<|visual token 040502|>
+<|visual token 040503|>
+<|visual token 040504|>
+<|visual token 040505|>
+<|visual token 040506|>
+<|visual token 040507|>
+<|visual token 040508|>
+<|visual token 040509|>
+<|visual token 040510|>
+<|visual token 040511|>
+<|visual token 040512|>
+<|visual token 040513|>
+<|visual token 040514|>
+<|visual token 040515|>
+<|visual token 040516|>
+<|visual token 040517|>
+<|visual token 040518|>
+<|visual token 040519|>
+<|visual token 040520|>
+<|visual token 040521|>
+<|visual token 040522|>
+<|visual token 040523|>
+<|visual token 040524|>
+<|visual token 040525|>
+<|visual token 040526|>
+<|visual token 040527|>
+<|visual token 040528|>
+<|visual token 040529|>
+<|visual token 040530|>
+<|visual token 040531|>
+<|visual token 040532|>
+<|visual token 040533|>
+<|visual token 040534|>
+<|visual token 040535|>
+<|visual token 040536|>
+<|visual token 040537|>
+<|visual token 040538|>
+<|visual token 040539|>
+<|visual token 040540|>
+<|visual token 040541|>
+<|visual token 040542|>
+<|visual token 040543|>
+<|visual token 040544|>
+<|visual token 040545|>
+<|visual token 040546|>
+<|visual token 040547|>
+<|visual token 040548|>
+<|visual token 040549|>
+<|visual token 040550|>
+<|visual token 040551|>
+<|visual token 040552|>
+<|visual token 040553|>
+<|visual token 040554|>
+<|visual token 040555|>
+<|visual token 040556|>
+<|visual token 040557|>
+<|visual token 040558|>
+<|visual token 040559|>
+<|visual token 040560|>
+<|visual token 040561|>
+<|visual token 040562|>
+<|visual token 040563|>
+<|visual token 040564|>
+<|visual token 040565|>
+<|visual token 040566|>
+<|visual token 040567|>
+<|visual token 040568|>
+<|visual token 040569|>
+<|visual token 040570|>
+<|visual token 040571|>
+<|visual token 040572|>
+<|visual token 040573|>
+<|visual token 040574|>
+<|visual token 040575|>
+<|visual token 040576|>
+<|visual token 040577|>
+<|visual token 040578|>
+<|visual token 040579|>
+<|visual token 040580|>
+<|visual token 040581|>
+<|visual token 040582|>
+<|visual token 040583|>
+<|visual token 040584|>
+<|visual token 040585|>
+<|visual token 040586|>
+<|visual token 040587|>
+<|visual token 040588|>
+<|visual token 040589|>
+<|visual token 040590|>
+<|visual token 040591|>
+<|visual token 040592|>
+<|visual token 040593|>
+<|visual token 040594|>
+<|visual token 040595|>
+<|visual token 040596|>
+<|visual token 040597|>
+<|visual token 040598|>
+<|visual token 040599|>
+<|visual token 040600|>
+<|visual token 040601|>
+<|visual token 040602|>
+<|visual token 040603|>
+<|visual token 040604|>
+<|visual token 040605|>
+<|visual token 040606|>
+<|visual token 040607|>
+<|visual token 040608|>
+<|visual token 040609|>
+<|visual token 040610|>
+<|visual token 040611|>
+<|visual token 040612|>
+<|visual token 040613|>
+<|visual token 040614|>
+<|visual token 040615|>
+<|visual token 040616|>
+<|visual token 040617|>
+<|visual token 040618|>
+<|visual token 040619|>
+<|visual token 040620|>
+<|visual token 040621|>
+<|visual token 040622|>
+<|visual token 040623|>
+<|visual token 040624|>
+<|visual token 040625|>
+<|visual token 040626|>
+<|visual token 040627|>
+<|visual token 040628|>
+<|visual token 040629|>
+<|visual token 040630|>
+<|visual token 040631|>
+<|visual token 040632|>
+<|visual token 040633|>
+<|visual token 040634|>
+<|visual token 040635|>
+<|visual token 040636|>
+<|visual token 040637|>
+<|visual token 040638|>
+<|visual token 040639|>
+<|visual token 040640|>
+<|visual token 040641|>
+<|visual token 040642|>
+<|visual token 040643|>
+<|visual token 040644|>
+<|visual token 040645|>
+<|visual token 040646|>
+<|visual token 040647|>
+<|visual token 040648|>
+<|visual token 040649|>
+<|visual token 040650|>
+<|visual token 040651|>
+<|visual token 040652|>
+<|visual token 040653|>
+<|visual token 040654|>
+<|visual token 040655|>
+<|visual token 040656|>
+<|visual token 040657|>
+<|visual token 040658|>
+<|visual token 040659|>
+<|visual token 040660|>
+<|visual token 040661|>
+<|visual token 040662|>
+<|visual token 040663|>
+<|visual token 040664|>
+<|visual token 040665|>
+<|visual token 040666|>
+<|visual token 040667|>
+<|visual token 040668|>
+<|visual token 040669|>
+<|visual token 040670|>
+<|visual token 040671|>
+<|visual token 040672|>
+<|visual token 040673|>
+<|visual token 040674|>
+<|visual token 040675|>
+<|visual token 040676|>
+<|visual token 040677|>
+<|visual token 040678|>
+<|visual token 040679|>
+<|visual token 040680|>
+<|visual token 040681|>
+<|visual token 040682|>
+<|visual token 040683|>
+<|visual token 040684|>
+<|visual token 040685|>
+<|visual token 040686|>
+<|visual token 040687|>
+<|visual token 040688|>
+<|visual token 040689|>
+<|visual token 040690|>
+<|visual token 040691|>
+<|visual token 040692|>
+<|visual token 040693|>
+<|visual token 040694|>
+<|visual token 040695|>
+<|visual token 040696|>
+<|visual token 040697|>
+<|visual token 040698|>
+<|visual token 040699|>
+<|visual token 040700|>
+<|visual token 040701|>
+<|visual token 040702|>
+<|visual token 040703|>
+<|visual token 040704|>
+<|visual token 040705|>
+<|visual token 040706|>
+<|visual token 040707|>
+<|visual token 040708|>
+<|visual token 040709|>
+<|visual token 040710|>
+<|visual token 040711|>
+<|visual token 040712|>
+<|visual token 040713|>
+<|visual token 040714|>
+<|visual token 040715|>
+<|visual token 040716|>
+<|visual token 040717|>
+<|visual token 040718|>
+<|visual token 040719|>
+<|visual token 040720|>
+<|visual token 040721|>
+<|visual token 040722|>
+<|visual token 040723|>
+<|visual token 040724|>
+<|visual token 040725|>
+<|visual token 040726|>
+<|visual token 040727|>
+<|visual token 040728|>
+<|visual token 040729|>
+<|visual token 040730|>
+<|visual token 040731|>
+<|visual token 040732|>
+<|visual token 040733|>
+<|visual token 040734|>
+<|visual token 040735|>
+<|visual token 040736|>
+<|visual token 040737|>
+<|visual token 040738|>
+<|visual token 040739|>
+<|visual token 040740|>
+<|visual token 040741|>
+<|visual token 040742|>
+<|visual token 040743|>
+<|visual token 040744|>
+<|visual token 040745|>
+<|visual token 040746|>
+<|visual token 040747|>
+<|visual token 040748|>
+<|visual token 040749|>
+<|visual token 040750|>
+<|visual token 040751|>
+<|visual token 040752|>
+<|visual token 040753|>
+<|visual token 040754|>
+<|visual token 040755|>
+<|visual token 040756|>
+<|visual token 040757|>
+<|visual token 040758|>
+<|visual token 040759|>
+<|visual token 040760|>
+<|visual token 040761|>
+<|visual token 040762|>
+<|visual token 040763|>
+<|visual token 040764|>
+<|visual token 040765|>
+<|visual token 040766|>
+<|visual token 040767|>
+<|visual token 040768|>
+<|visual token 040769|>
+<|visual token 040770|>
+<|visual token 040771|>
+<|visual token 040772|>
+<|visual token 040773|>
+<|visual token 040774|>
+<|visual token 040775|>
+<|visual token 040776|>
+<|visual token 040777|>
+<|visual token 040778|>
+<|visual token 040779|>
+<|visual token 040780|>
+<|visual token 040781|>
+<|visual token 040782|>
+<|visual token 040783|>
+<|visual token 040784|>
+<|visual token 040785|>
+<|visual token 040786|>
+<|visual token 040787|>
+<|visual token 040788|>
+<|visual token 040789|>
+<|visual token 040790|>
+<|visual token 040791|>
+<|visual token 040792|>
+<|visual token 040793|>
+<|visual token 040794|>
+<|visual token 040795|>
+<|visual token 040796|>
+<|visual token 040797|>
+<|visual token 040798|>
+<|visual token 040799|>
+<|visual token 040800|>
+<|visual token 040801|>
+<|visual token 040802|>
+<|visual token 040803|>
+<|visual token 040804|>
+<|visual token 040805|>
+<|visual token 040806|>
+<|visual token 040807|>
+<|visual token 040808|>
+<|visual token 040809|>
+<|visual token 040810|>
+<|visual token 040811|>
+<|visual token 040812|>
+<|visual token 040813|>
+<|visual token 040814|>
+<|visual token 040815|>
+<|visual token 040816|>
+<|visual token 040817|>
+<|visual token 040818|>
+<|visual token 040819|>
+<|visual token 040820|>
+<|visual token 040821|>
+<|visual token 040822|>
+<|visual token 040823|>
+<|visual token 040824|>
+<|visual token 040825|>
+<|visual token 040826|>
+<|visual token 040827|>
+<|visual token 040828|>
+<|visual token 040829|>
+<|visual token 040830|>
+<|visual token 040831|>
+<|visual token 040832|>
+<|visual token 040833|>
+<|visual token 040834|>
+<|visual token 040835|>
+<|visual token 040836|>
+<|visual token 040837|>
+<|visual token 040838|>
+<|visual token 040839|>
+<|visual token 040840|>
+<|visual token 040841|>
+<|visual token 040842|>
+<|visual token 040843|>
+<|visual token 040844|>
+<|visual token 040845|>
+<|visual token 040846|>
+<|visual token 040847|>
+<|visual token 040848|>
+<|visual token 040849|>
+<|visual token 040850|>
+<|visual token 040851|>
+<|visual token 040852|>
+<|visual token 040853|>
+<|visual token 040854|>
+<|visual token 040855|>
+<|visual token 040856|>
+<|visual token 040857|>
+<|visual token 040858|>
+<|visual token 040859|>
+<|visual token 040860|>
+<|visual token 040861|>
+<|visual token 040862|>
+<|visual token 040863|>
+<|visual token 040864|>
+<|visual token 040865|>
+<|visual token 040866|>
+<|visual token 040867|>
+<|visual token 040868|>
+<|visual token 040869|>
+<|visual token 040870|>
+<|visual token 040871|>
+<|visual token 040872|>
+<|visual token 040873|>
+<|visual token 040874|>
+<|visual token 040875|>
+<|visual token 040876|>
+<|visual token 040877|>
+<|visual token 040878|>
+<|visual token 040879|>
+<|visual token 040880|>
+<|visual token 040881|>
+<|visual token 040882|>
+<|visual token 040883|>
+<|visual token 040884|>
+<|visual token 040885|>
+<|visual token 040886|>
+<|visual token 040887|>
+<|visual token 040888|>
+<|visual token 040889|>
+<|visual token 040890|>
+<|visual token 040891|>
+<|visual token 040892|>
+<|visual token 040893|>
+<|visual token 040894|>
+<|visual token 040895|>
+<|visual token 040896|>
+<|visual token 040897|>
+<|visual token 040898|>
+<|visual token 040899|>
+<|visual token 040900|>
+<|visual token 040901|>
+<|visual token 040902|>
+<|visual token 040903|>
+<|visual token 040904|>
+<|visual token 040905|>
+<|visual token 040906|>
+<|visual token 040907|>
+<|visual token 040908|>
+<|visual token 040909|>
+<|visual token 040910|>
+<|visual token 040911|>
+<|visual token 040912|>
+<|visual token 040913|>
+<|visual token 040914|>
+<|visual token 040915|>
+<|visual token 040916|>
+<|visual token 040917|>
+<|visual token 040918|>
+<|visual token 040919|>
+<|visual token 040920|>
+<|visual token 040921|>
+<|visual token 040922|>
+<|visual token 040923|>
+<|visual token 040924|>
+<|visual token 040925|>
+<|visual token 040926|>
+<|visual token 040927|>
+<|visual token 040928|>
+<|visual token 040929|>
+<|visual token 040930|>
+<|visual token 040931|>
+<|visual token 040932|>
+<|visual token 040933|>
+<|visual token 040934|>
+<|visual token 040935|>
+<|visual token 040936|>
+<|visual token 040937|>
+<|visual token 040938|>
+<|visual token 040939|>
+<|visual token 040940|>
+<|visual token 040941|>
+<|visual token 040942|>
+<|visual token 040943|>
+<|visual token 040944|>
+<|visual token 040945|>
+<|visual token 040946|>
+<|visual token 040947|>
+<|visual token 040948|>
+<|visual token 040949|>
+<|visual token 040950|>
+<|visual token 040951|>
+<|visual token 040952|>
+<|visual token 040953|>
+<|visual token 040954|>
+<|visual token 040955|>
+<|visual token 040956|>
+<|visual token 040957|>
+<|visual token 040958|>
+<|visual token 040959|>
+<|visual token 040960|>
+<|visual token 040961|>
+<|visual token 040962|>
+<|visual token 040963|>
+<|visual token 040964|>
+<|visual token 040965|>
+<|visual token 040966|>
+<|visual token 040967|>
+<|visual token 040968|>
+<|visual token 040969|>
+<|visual token 040970|>
+<|visual token 040971|>
+<|visual token 040972|>
+<|visual token 040973|>
+<|visual token 040974|>
+<|visual token 040975|>
+<|visual token 040976|>
+<|visual token 040977|>
+<|visual token 040978|>
+<|visual token 040979|>
+<|visual token 040980|>
+<|visual token 040981|>
+<|visual token 040982|>
+<|visual token 040983|>
+<|visual token 040984|>
+<|visual token 040985|>
+<|visual token 040986|>
+<|visual token 040987|>
+<|visual token 040988|>
+<|visual token 040989|>
+<|visual token 040990|>
+<|visual token 040991|>
+<|visual token 040992|>
+<|visual token 040993|>
+<|visual token 040994|>
+<|visual token 040995|>
+<|visual token 040996|>
+<|visual token 040997|>
+<|visual token 040998|>
+<|visual token 040999|>
+<|visual token 041000|>
+<|visual token 041001|>
+<|visual token 041002|>
+<|visual token 041003|>
+<|visual token 041004|>
+<|visual token 041005|>
+<|visual token 041006|>
+<|visual token 041007|>
+<|visual token 041008|>
+<|visual token 041009|>
+<|visual token 041010|>
+<|visual token 041011|>
+<|visual token 041012|>
+<|visual token 041013|>
+<|visual token 041014|>
+<|visual token 041015|>
+<|visual token 041016|>
+<|visual token 041017|>
+<|visual token 041018|>
+<|visual token 041019|>
+<|visual token 041020|>
+<|visual token 041021|>
+<|visual token 041022|>
+<|visual token 041023|>
+<|visual token 041024|>
+<|visual token 041025|>
+<|visual token 041026|>
+<|visual token 041027|>
+<|visual token 041028|>
+<|visual token 041029|>
+<|visual token 041030|>
+<|visual token 041031|>
+<|visual token 041032|>
+<|visual token 041033|>
+<|visual token 041034|>
+<|visual token 041035|>
+<|visual token 041036|>
+<|visual token 041037|>
+<|visual token 041038|>
+<|visual token 041039|>
+<|visual token 041040|>
+<|visual token 041041|>
+<|visual token 041042|>
+<|visual token 041043|>
+<|visual token 041044|>
+<|visual token 041045|>
+<|visual token 041046|>
+<|visual token 041047|>
+<|visual token 041048|>
+<|visual token 041049|>
+<|visual token 041050|>
+<|visual token 041051|>
+<|visual token 041052|>
+<|visual token 041053|>
+<|visual token 041054|>
+<|visual token 041055|>
+<|visual token 041056|>
+<|visual token 041057|>
+<|visual token 041058|>
+<|visual token 041059|>
+<|visual token 041060|>
+<|visual token 041061|>
+<|visual token 041062|>
+<|visual token 041063|>
+<|visual token 041064|>
+<|visual token 041065|>
+<|visual token 041066|>
+<|visual token 041067|>
+<|visual token 041068|>
+<|visual token 041069|>
+<|visual token 041070|>
+<|visual token 041071|>
+<|visual token 041072|>
+<|visual token 041073|>
+<|visual token 041074|>
+<|visual token 041075|>
+<|visual token 041076|>
+<|visual token 041077|>
+<|visual token 041078|>
+<|visual token 041079|>
+<|visual token 041080|>
+<|visual token 041081|>
+<|visual token 041082|>
+<|visual token 041083|>
+<|visual token 041084|>
+<|visual token 041085|>
+<|visual token 041086|>
+<|visual token 041087|>
+<|visual token 041088|>
+<|visual token 041089|>
+<|visual token 041090|>
+<|visual token 041091|>
+<|visual token 041092|>
+<|visual token 041093|>
+<|visual token 041094|>
+<|visual token 041095|>
+<|visual token 041096|>
+<|visual token 041097|>
+<|visual token 041098|>
+<|visual token 041099|>
+<|visual token 041100|>
+<|visual token 041101|>
+<|visual token 041102|>
+<|visual token 041103|>
+<|visual token 041104|>
+<|visual token 041105|>
+<|visual token 041106|>
+<|visual token 041107|>
+<|visual token 041108|>
+<|visual token 041109|>
+<|visual token 041110|>
+<|visual token 041111|>
+<|visual token 041112|>
+<|visual token 041113|>
+<|visual token 041114|>
+<|visual token 041115|>
+<|visual token 041116|>
+<|visual token 041117|>
+<|visual token 041118|>
+<|visual token 041119|>
+<|visual token 041120|>
+<|visual token 041121|>
+<|visual token 041122|>
+<|visual token 041123|>
+<|visual token 041124|>
+<|visual token 041125|>
+<|visual token 041126|>
+<|visual token 041127|>
+<|visual token 041128|>
+<|visual token 041129|>
+<|visual token 041130|>
+<|visual token 041131|>
+<|visual token 041132|>
+<|visual token 041133|>
+<|visual token 041134|>
+<|visual token 041135|>
+<|visual token 041136|>
+<|visual token 041137|>
+<|visual token 041138|>
+<|visual token 041139|>
+<|visual token 041140|>
+<|visual token 041141|>
+<|visual token 041142|>
+<|visual token 041143|>
+<|visual token 041144|>
+<|visual token 041145|>
+<|visual token 041146|>
+<|visual token 041147|>
+<|visual token 041148|>
+<|visual token 041149|>
+<|visual token 041150|>
+<|visual token 041151|>
+<|visual token 041152|>
+<|visual token 041153|>
+<|visual token 041154|>
+<|visual token 041155|>
+<|visual token 041156|>
+<|visual token 041157|>
+<|visual token 041158|>
+<|visual token 041159|>
+<|visual token 041160|>
+<|visual token 041161|>
+<|visual token 041162|>
+<|visual token 041163|>
+<|visual token 041164|>
+<|visual token 041165|>
+<|visual token 041166|>
+<|visual token 041167|>
+<|visual token 041168|>
+<|visual token 041169|>
+<|visual token 041170|>
+<|visual token 041171|>
+<|visual token 041172|>
+<|visual token 041173|>
+<|visual token 041174|>
+<|visual token 041175|>
+<|visual token 041176|>
+<|visual token 041177|>
+<|visual token 041178|>
+<|visual token 041179|>
+<|visual token 041180|>
+<|visual token 041181|>
+<|visual token 041182|>
+<|visual token 041183|>
+<|visual token 041184|>
+<|visual token 041185|>
+<|visual token 041186|>
+<|visual token 041187|>
+<|visual token 041188|>
+<|visual token 041189|>
+<|visual token 041190|>
+<|visual token 041191|>
+<|visual token 041192|>
+<|visual token 041193|>
+<|visual token 041194|>
+<|visual token 041195|>
+<|visual token 041196|>
+<|visual token 041197|>
+<|visual token 041198|>
+<|visual token 041199|>
+<|visual token 041200|>
+<|visual token 041201|>
+<|visual token 041202|>
+<|visual token 041203|>
+<|visual token 041204|>
+<|visual token 041205|>
+<|visual token 041206|>
+<|visual token 041207|>
+<|visual token 041208|>
+<|visual token 041209|>
+<|visual token 041210|>
+<|visual token 041211|>
+<|visual token 041212|>
+<|visual token 041213|>
+<|visual token 041214|>
+<|visual token 041215|>
+<|visual token 041216|>
+<|visual token 041217|>
+<|visual token 041218|>
+<|visual token 041219|>
+<|visual token 041220|>
+<|visual token 041221|>
+<|visual token 041222|>
+<|visual token 041223|>
+<|visual token 041224|>
+<|visual token 041225|>
+<|visual token 041226|>
+<|visual token 041227|>
+<|visual token 041228|>
+<|visual token 041229|>
+<|visual token 041230|>
+<|visual token 041231|>
+<|visual token 041232|>
+<|visual token 041233|>
+<|visual token 041234|>
+<|visual token 041235|>
+<|visual token 041236|>
+<|visual token 041237|>
+<|visual token 041238|>
+<|visual token 041239|>
+<|visual token 041240|>
+<|visual token 041241|>
+<|visual token 041242|>
+<|visual token 041243|>
+<|visual token 041244|>
+<|visual token 041245|>
+<|visual token 041246|>
+<|visual token 041247|>
+<|visual token 041248|>
+<|visual token 041249|>
+<|visual token 041250|>
+<|visual token 041251|>
+<|visual token 041252|>
+<|visual token 041253|>
+<|visual token 041254|>
+<|visual token 041255|>
+<|visual token 041256|>
+<|visual token 041257|>
+<|visual token 041258|>
+<|visual token 041259|>
+<|visual token 041260|>
+<|visual token 041261|>
+<|visual token 041262|>
+<|visual token 041263|>
+<|visual token 041264|>
+<|visual token 041265|>
+<|visual token 041266|>
+<|visual token 041267|>
+<|visual token 041268|>
+<|visual token 041269|>
+<|visual token 041270|>
+<|visual token 041271|>
+<|visual token 041272|>
+<|visual token 041273|>
+<|visual token 041274|>
+<|visual token 041275|>
+<|visual token 041276|>
+<|visual token 041277|>
+<|visual token 041278|>
+<|visual token 041279|>
+<|visual token 041280|>
+<|visual token 041281|>
+<|visual token 041282|>
+<|visual token 041283|>
+<|visual token 041284|>
+<|visual token 041285|>
+<|visual token 041286|>
+<|visual token 041287|>
+<|visual token 041288|>
+<|visual token 041289|>
+<|visual token 041290|>
+<|visual token 041291|>
+<|visual token 041292|>
+<|visual token 041293|>
+<|visual token 041294|>
+<|visual token 041295|>
+<|visual token 041296|>
+<|visual token 041297|>
+<|visual token 041298|>
+<|visual token 041299|>
+<|visual token 041300|>
+<|visual token 041301|>
+<|visual token 041302|>
+<|visual token 041303|>
+<|visual token 041304|>
+<|visual token 041305|>
+<|visual token 041306|>
+<|visual token 041307|>
+<|visual token 041308|>
+<|visual token 041309|>
+<|visual token 041310|>
+<|visual token 041311|>
+<|visual token 041312|>
+<|visual token 041313|>
+<|visual token 041314|>
+<|visual token 041315|>
+<|visual token 041316|>
+<|visual token 041317|>
+<|visual token 041318|>
+<|visual token 041319|>
+<|visual token 041320|>
+<|visual token 041321|>
+<|visual token 041322|>
+<|visual token 041323|>
+<|visual token 041324|>
+<|visual token 041325|>
+<|visual token 041326|>
+<|visual token 041327|>
+<|visual token 041328|>
+<|visual token 041329|>
+<|visual token 041330|>
+<|visual token 041331|>
+<|visual token 041332|>
+<|visual token 041333|>
+<|visual token 041334|>
+<|visual token 041335|>
+<|visual token 041336|>
+<|visual token 041337|>
+<|visual token 041338|>
+<|visual token 041339|>
+<|visual token 041340|>
+<|visual token 041341|>
+<|visual token 041342|>
+<|visual token 041343|>
+<|visual token 041344|>
+<|visual token 041345|>
+<|visual token 041346|>
+<|visual token 041347|>
+<|visual token 041348|>
+<|visual token 041349|>
+<|visual token 041350|>
+<|visual token 041351|>
+<|visual token 041352|>
+<|visual token 041353|>
+<|visual token 041354|>
+<|visual token 041355|>
+<|visual token 041356|>
+<|visual token 041357|>
+<|visual token 041358|>
+<|visual token 041359|>
+<|visual token 041360|>
+<|visual token 041361|>
+<|visual token 041362|>
+<|visual token 041363|>
+<|visual token 041364|>
+<|visual token 041365|>
+<|visual token 041366|>
+<|visual token 041367|>
+<|visual token 041368|>
+<|visual token 041369|>
+<|visual token 041370|>
+<|visual token 041371|>
+<|visual token 041372|>
+<|visual token 041373|>
+<|visual token 041374|>
+<|visual token 041375|>
+<|visual token 041376|>
+<|visual token 041377|>
+<|visual token 041378|>
+<|visual token 041379|>
+<|visual token 041380|>
+<|visual token 041381|>
+<|visual token 041382|>
+<|visual token 041383|>
+<|visual token 041384|>
+<|visual token 041385|>
+<|visual token 041386|>
+<|visual token 041387|>
+<|visual token 041388|>
+<|visual token 041389|>
+<|visual token 041390|>
+<|visual token 041391|>
+<|visual token 041392|>
+<|visual token 041393|>
+<|visual token 041394|>
+<|visual token 041395|>
+<|visual token 041396|>
+<|visual token 041397|>
+<|visual token 041398|>
+<|visual token 041399|>
+<|visual token 041400|>
+<|visual token 041401|>
+<|visual token 041402|>
+<|visual token 041403|>
+<|visual token 041404|>
+<|visual token 041405|>
+<|visual token 041406|>
+<|visual token 041407|>
+<|visual token 041408|>
+<|visual token 041409|>
+<|visual token 041410|>
+<|visual token 041411|>
+<|visual token 041412|>
+<|visual token 041413|>
+<|visual token 041414|>
+<|visual token 041415|>
+<|visual token 041416|>
+<|visual token 041417|>
+<|visual token 041418|>
+<|visual token 041419|>
+<|visual token 041420|>
+<|visual token 041421|>
+<|visual token 041422|>
+<|visual token 041423|>
+<|visual token 041424|>
+<|visual token 041425|>
+<|visual token 041426|>
+<|visual token 041427|>
+<|visual token 041428|>
+<|visual token 041429|>
+<|visual token 041430|>
+<|visual token 041431|>
+<|visual token 041432|>
+<|visual token 041433|>
+<|visual token 041434|>
+<|visual token 041435|>
+<|visual token 041436|>
+<|visual token 041437|>
+<|visual token 041438|>
+<|visual token 041439|>
+<|visual token 041440|>
+<|visual token 041441|>
+<|visual token 041442|>
+<|visual token 041443|>
+<|visual token 041444|>
+<|visual token 041445|>
+<|visual token 041446|>
+<|visual token 041447|>
+<|visual token 041448|>
+<|visual token 041449|>
+<|visual token 041450|>
+<|visual token 041451|>
+<|visual token 041452|>
+<|visual token 041453|>
+<|visual token 041454|>
+<|visual token 041455|>
+<|visual token 041456|>
+<|visual token 041457|>
+<|visual token 041458|>
+<|visual token 041459|>
+<|visual token 041460|>
+<|visual token 041461|>
+<|visual token 041462|>
+<|visual token 041463|>
+<|visual token 041464|>
+<|visual token 041465|>
+<|visual token 041466|>
+<|visual token 041467|>
+<|visual token 041468|>
+<|visual token 041469|>
+<|visual token 041470|>
+<|visual token 041471|>
+<|visual token 041472|>
+<|visual token 041473|>
+<|visual token 041474|>
+<|visual token 041475|>
+<|visual token 041476|>
+<|visual token 041477|>
+<|visual token 041478|>
+<|visual token 041479|>
+<|visual token 041480|>
+<|visual token 041481|>
+<|visual token 041482|>
+<|visual token 041483|>
+<|visual token 041484|>
+<|visual token 041485|>
+<|visual token 041486|>
+<|visual token 041487|>
+<|visual token 041488|>
+<|visual token 041489|>
+<|visual token 041490|>
+<|visual token 041491|>
+<|visual token 041492|>
+<|visual token 041493|>
+<|visual token 041494|>
+<|visual token 041495|>
+<|visual token 041496|>
+<|visual token 041497|>
+<|visual token 041498|>
+<|visual token 041499|>
+<|visual token 041500|>
+<|visual token 041501|>
+<|visual token 041502|>
+<|visual token 041503|>
+<|visual token 041504|>
+<|visual token 041505|>
+<|visual token 041506|>
+<|visual token 041507|>
+<|visual token 041508|>
+<|visual token 041509|>
+<|visual token 041510|>
+<|visual token 041511|>
+<|visual token 041512|>
+<|visual token 041513|>
+<|visual token 041514|>
+<|visual token 041515|>
+<|visual token 041516|>
+<|visual token 041517|>
+<|visual token 041518|>
+<|visual token 041519|>
+<|visual token 041520|>
+<|visual token 041521|>
+<|visual token 041522|>
+<|visual token 041523|>
+<|visual token 041524|>
+<|visual token 041525|>
+<|visual token 041526|>
+<|visual token 041527|>
+<|visual token 041528|>
+<|visual token 041529|>
+<|visual token 041530|>
+<|visual token 041531|>
+<|visual token 041532|>
+<|visual token 041533|>
+<|visual token 041534|>
+<|visual token 041535|>
+<|visual token 041536|>
+<|visual token 041537|>
+<|visual token 041538|>
+<|visual token 041539|>
+<|visual token 041540|>
+<|visual token 041541|>
+<|visual token 041542|>
+<|visual token 041543|>
+<|visual token 041544|>
+<|visual token 041545|>
+<|visual token 041546|>
+<|visual token 041547|>
+<|visual token 041548|>
+<|visual token 041549|>
+<|visual token 041550|>
+<|visual token 041551|>
+<|visual token 041552|>
+<|visual token 041553|>
+<|visual token 041554|>
+<|visual token 041555|>
+<|visual token 041556|>
+<|visual token 041557|>
+<|visual token 041558|>
+<|visual token 041559|>
+<|visual token 041560|>
+<|visual token 041561|>
+<|visual token 041562|>
+<|visual token 041563|>
+<|visual token 041564|>
+<|visual token 041565|>
+<|visual token 041566|>
+<|visual token 041567|>
+<|visual token 041568|>
+<|visual token 041569|>
+<|visual token 041570|>
+<|visual token 041571|>
+<|visual token 041572|>
+<|visual token 041573|>
+<|visual token 041574|>
+<|visual token 041575|>
+<|visual token 041576|>
+<|visual token 041577|>
+<|visual token 041578|>
+<|visual token 041579|>
+<|visual token 041580|>
+<|visual token 041581|>
+<|visual token 041582|>
+<|visual token 041583|>
+<|visual token 041584|>
+<|visual token 041585|>
+<|visual token 041586|>
+<|visual token 041587|>
+<|visual token 041588|>
+<|visual token 041589|>
+<|visual token 041590|>
+<|visual token 041591|>
+<|visual token 041592|>
+<|visual token 041593|>
+<|visual token 041594|>
+<|visual token 041595|>
+<|visual token 041596|>
+<|visual token 041597|>
+<|visual token 041598|>
+<|visual token 041599|>
+<|visual token 041600|>
+<|visual token 041601|>
+<|visual token 041602|>
+<|visual token 041603|>
+<|visual token 041604|>
+<|visual token 041605|>
+<|visual token 041606|>
+<|visual token 041607|>
+<|visual token 041608|>
+<|visual token 041609|>
+<|visual token 041610|>
+<|visual token 041611|>
+<|visual token 041612|>
+<|visual token 041613|>
+<|visual token 041614|>
+<|visual token 041615|>
+<|visual token 041616|>
+<|visual token 041617|>
+<|visual token 041618|>
+<|visual token 041619|>
+<|visual token 041620|>
+<|visual token 041621|>
+<|visual token 041622|>
+<|visual token 041623|>
+<|visual token 041624|>
+<|visual token 041625|>
+<|visual token 041626|>
+<|visual token 041627|>
+<|visual token 041628|>
+<|visual token 041629|>
+<|visual token 041630|>
+<|visual token 041631|>
+<|visual token 041632|>
+<|visual token 041633|>
+<|visual token 041634|>
+<|visual token 041635|>
+<|visual token 041636|>
+<|visual token 041637|>
+<|visual token 041638|>
+<|visual token 041639|>
+<|visual token 041640|>
+<|visual token 041641|>
+<|visual token 041642|>
+<|visual token 041643|>
+<|visual token 041644|>
+<|visual token 041645|>
+<|visual token 041646|>
+<|visual token 041647|>
+<|visual token 041648|>
+<|visual token 041649|>
+<|visual token 041650|>
+<|visual token 041651|>
+<|visual token 041652|>
+<|visual token 041653|>
+<|visual token 041654|>
+<|visual token 041655|>
+<|visual token 041656|>
+<|visual token 041657|>
+<|visual token 041658|>
+<|visual token 041659|>
+<|visual token 041660|>
+<|visual token 041661|>
+<|visual token 041662|>
+<|visual token 041663|>
+<|visual token 041664|>
+<|visual token 041665|>
+<|visual token 041666|>
+<|visual token 041667|>
+<|visual token 041668|>
+<|visual token 041669|>
+<|visual token 041670|>
+<|visual token 041671|>
+<|visual token 041672|>
+<|visual token 041673|>
+<|visual token 041674|>
+<|visual token 041675|>
+<|visual token 041676|>
+<|visual token 041677|>
+<|visual token 041678|>
+<|visual token 041679|>
+<|visual token 041680|>
+<|visual token 041681|>
+<|visual token 041682|>
+<|visual token 041683|>
+<|visual token 041684|>
+<|visual token 041685|>
+<|visual token 041686|>
+<|visual token 041687|>
+<|visual token 041688|>
+<|visual token 041689|>
+<|visual token 041690|>
+<|visual token 041691|>
+<|visual token 041692|>
+<|visual token 041693|>
+<|visual token 041694|>
+<|visual token 041695|>
+<|visual token 041696|>
+<|visual token 041697|>
+<|visual token 041698|>
+<|visual token 041699|>
+<|visual token 041700|>
+<|visual token 041701|>
+<|visual token 041702|>
+<|visual token 041703|>
+<|visual token 041704|>
+<|visual token 041705|>
+<|visual token 041706|>
+<|visual token 041707|>
+<|visual token 041708|>
+<|visual token 041709|>
+<|visual token 041710|>
+<|visual token 041711|>
+<|visual token 041712|>
+<|visual token 041713|>
+<|visual token 041714|>
+<|visual token 041715|>
+<|visual token 041716|>
+<|visual token 041717|>
+<|visual token 041718|>
+<|visual token 041719|>
+<|visual token 041720|>
+<|visual token 041721|>
+<|visual token 041722|>
+<|visual token 041723|>
+<|visual token 041724|>
+<|visual token 041725|>
+<|visual token 041726|>
+<|visual token 041727|>
+<|visual token 041728|>
+<|visual token 041729|>
+<|visual token 041730|>
+<|visual token 041731|>
+<|visual token 041732|>
+<|visual token 041733|>
+<|visual token 041734|>
+<|visual token 041735|>
+<|visual token 041736|>
+<|visual token 041737|>
+<|visual token 041738|>
+<|visual token 041739|>
+<|visual token 041740|>
+<|visual token 041741|>
+<|visual token 041742|>
+<|visual token 041743|>
+<|visual token 041744|>
+<|visual token 041745|>
+<|visual token 041746|>
+<|visual token 041747|>
+<|visual token 041748|>
+<|visual token 041749|>
+<|visual token 041750|>
+<|visual token 041751|>
+<|visual token 041752|>
+<|visual token 041753|>
+<|visual token 041754|>
+<|visual token 041755|>
+<|visual token 041756|>
+<|visual token 041757|>
+<|visual token 041758|>
+<|visual token 041759|>
+<|visual token 041760|>
+<|visual token 041761|>
+<|visual token 041762|>
+<|visual token 041763|>
+<|visual token 041764|>
+<|visual token 041765|>
+<|visual token 041766|>
+<|visual token 041767|>
+<|visual token 041768|>
+<|visual token 041769|>
+<|visual token 041770|>
+<|visual token 041771|>
+<|visual token 041772|>
+<|visual token 041773|>
+<|visual token 041774|>
+<|visual token 041775|>
+<|visual token 041776|>
+<|visual token 041777|>
+<|visual token 041778|>
+<|visual token 041779|>
+<|visual token 041780|>
+<|visual token 041781|>
+<|visual token 041782|>
+<|visual token 041783|>
+<|visual token 041784|>
+<|visual token 041785|>
+<|visual token 041786|>
+<|visual token 041787|>
+<|visual token 041788|>
+<|visual token 041789|>
+<|visual token 041790|>
+<|visual token 041791|>
+<|visual token 041792|>
+<|visual token 041793|>
+<|visual token 041794|>
+<|visual token 041795|>
+<|visual token 041796|>
+<|visual token 041797|>
+<|visual token 041798|>
+<|visual token 041799|>
+<|visual token 041800|>
+<|visual token 041801|>
+<|visual token 041802|>
+<|visual token 041803|>
+<|visual token 041804|>
+<|visual token 041805|>
+<|visual token 041806|>
+<|visual token 041807|>
+<|visual token 041808|>
+<|visual token 041809|>
+<|visual token 041810|>
+<|visual token 041811|>
+<|visual token 041812|>
+<|visual token 041813|>
+<|visual token 041814|>
+<|visual token 041815|>
+<|visual token 041816|>
+<|visual token 041817|>
+<|visual token 041818|>
+<|visual token 041819|>
+<|visual token 041820|>
+<|visual token 041821|>
+<|visual token 041822|>
+<|visual token 041823|>
+<|visual token 041824|>
+<|visual token 041825|>
+<|visual token 041826|>
+<|visual token 041827|>
+<|visual token 041828|>
+<|visual token 041829|>
+<|visual token 041830|>
+<|visual token 041831|>
+<|visual token 041832|>
+<|visual token 041833|>
+<|visual token 041834|>
+<|visual token 041835|>
+<|visual token 041836|>
+<|visual token 041837|>
+<|visual token 041838|>
+<|visual token 041839|>
+<|visual token 041840|>
+<|visual token 041841|>
+<|visual token 041842|>
+<|visual token 041843|>
+<|visual token 041844|>
+<|visual token 041845|>
+<|visual token 041846|>
+<|visual token 041847|>
+<|visual token 041848|>
+<|visual token 041849|>
+<|visual token 041850|>
+<|visual token 041851|>
+<|visual token 041852|>
+<|visual token 041853|>
+<|visual token 041854|>
+<|visual token 041855|>
+<|visual token 041856|>
+<|visual token 041857|>
+<|visual token 041858|>
+<|visual token 041859|>
+<|visual token 041860|>
+<|visual token 041861|>
+<|visual token 041862|>
+<|visual token 041863|>
+<|visual token 041864|>
+<|visual token 041865|>
+<|visual token 041866|>
+<|visual token 041867|>
+<|visual token 041868|>
+<|visual token 041869|>
+<|visual token 041870|>
+<|visual token 041871|>
+<|visual token 041872|>
+<|visual token 041873|>
+<|visual token 041874|>
+<|visual token 041875|>
+<|visual token 041876|>
+<|visual token 041877|>
+<|visual token 041878|>
+<|visual token 041879|>
+<|visual token 041880|>
+<|visual token 041881|>
+<|visual token 041882|>
+<|visual token 041883|>
+<|visual token 041884|>
+<|visual token 041885|>
+<|visual token 041886|>
+<|visual token 041887|>
+<|visual token 041888|>
+<|visual token 041889|>
+<|visual token 041890|>
+<|visual token 041891|>
+<|visual token 041892|>
+<|visual token 041893|>
+<|visual token 041894|>
+<|visual token 041895|>
+<|visual token 041896|>
+<|visual token 041897|>
+<|visual token 041898|>
+<|visual token 041899|>
+<|visual token 041900|>
+<|visual token 041901|>
+<|visual token 041902|>
+<|visual token 041903|>
+<|visual token 041904|>
+<|visual token 041905|>
+<|visual token 041906|>
+<|visual token 041907|>
+<|visual token 041908|>
+<|visual token 041909|>
+<|visual token 041910|>
+<|visual token 041911|>
+<|visual token 041912|>
+<|visual token 041913|>
+<|visual token 041914|>
+<|visual token 041915|>
+<|visual token 041916|>
+<|visual token 041917|>
+<|visual token 041918|>
+<|visual token 041919|>
+<|visual token 041920|>
+<|visual token 041921|>
+<|visual token 041922|>
+<|visual token 041923|>
+<|visual token 041924|>
+<|visual token 041925|>
+<|visual token 041926|>
+<|visual token 041927|>
+<|visual token 041928|>
+<|visual token 041929|>
+<|visual token 041930|>
+<|visual token 041931|>
+<|visual token 041932|>
+<|visual token 041933|>
+<|visual token 041934|>
+<|visual token 041935|>
+<|visual token 041936|>
+<|visual token 041937|>
+<|visual token 041938|>
+<|visual token 041939|>
+<|visual token 041940|>
+<|visual token 041941|>
+<|visual token 041942|>
+<|visual token 041943|>
+<|visual token 041944|>
+<|visual token 041945|>
+<|visual token 041946|>
+<|visual token 041947|>
+<|visual token 041948|>
+<|visual token 041949|>
+<|visual token 041950|>
+<|visual token 041951|>
+<|visual token 041952|>
+<|visual token 041953|>
+<|visual token 041954|>
+<|visual token 041955|>
+<|visual token 041956|>
+<|visual token 041957|>
+<|visual token 041958|>
+<|visual token 041959|>
+<|visual token 041960|>
+<|visual token 041961|>
+<|visual token 041962|>
+<|visual token 041963|>
+<|visual token 041964|>
+<|visual token 041965|>
+<|visual token 041966|>
+<|visual token 041967|>
+<|visual token 041968|>
+<|visual token 041969|>
+<|visual token 041970|>
+<|visual token 041971|>
+<|visual token 041972|>
+<|visual token 041973|>
+<|visual token 041974|>
+<|visual token 041975|>
+<|visual token 041976|>
+<|visual token 041977|>
+<|visual token 041978|>
+<|visual token 041979|>
+<|visual token 041980|>
+<|visual token 041981|>
+<|visual token 041982|>
+<|visual token 041983|>
+<|visual token 041984|>
+<|visual token 041985|>
+<|visual token 041986|>
+<|visual token 041987|>
+<|visual token 041988|>
+<|visual token 041989|>
+<|visual token 041990|>
+<|visual token 041991|>
+<|visual token 041992|>
+<|visual token 041993|>
+<|visual token 041994|>
+<|visual token 041995|>
+<|visual token 041996|>
+<|visual token 041997|>
+<|visual token 041998|>
+<|visual token 041999|>
+<|visual token 042000|>
+<|visual token 042001|>
+<|visual token 042002|>
+<|visual token 042003|>
+<|visual token 042004|>
+<|visual token 042005|>
+<|visual token 042006|>
+<|visual token 042007|>
+<|visual token 042008|>
+<|visual token 042009|>
+<|visual token 042010|>
+<|visual token 042011|>
+<|visual token 042012|>
+<|visual token 042013|>
+<|visual token 042014|>
+<|visual token 042015|>
+<|visual token 042016|>
+<|visual token 042017|>
+<|visual token 042018|>
+<|visual token 042019|>
+<|visual token 042020|>
+<|visual token 042021|>
+<|visual token 042022|>
+<|visual token 042023|>
+<|visual token 042024|>
+<|visual token 042025|>
+<|visual token 042026|>
+<|visual token 042027|>
+<|visual token 042028|>
+<|visual token 042029|>
+<|visual token 042030|>
+<|visual token 042031|>
+<|visual token 042032|>
+<|visual token 042033|>
+<|visual token 042034|>
+<|visual token 042035|>
+<|visual token 042036|>
+<|visual token 042037|>
+<|visual token 042038|>
+<|visual token 042039|>
+<|visual token 042040|>
+<|visual token 042041|>
+<|visual token 042042|>
+<|visual token 042043|>
+<|visual token 042044|>
+<|visual token 042045|>
+<|visual token 042046|>
+<|visual token 042047|>
+<|visual token 042048|>
+<|visual token 042049|>
+<|visual token 042050|>
+<|visual token 042051|>
+<|visual token 042052|>
+<|visual token 042053|>
+<|visual token 042054|>
+<|visual token 042055|>
+<|visual token 042056|>
+<|visual token 042057|>
+<|visual token 042058|>
+<|visual token 042059|>
+<|visual token 042060|>
+<|visual token 042061|>
+<|visual token 042062|>
+<|visual token 042063|>
+<|visual token 042064|>
+<|visual token 042065|>
+<|visual token 042066|>
+<|visual token 042067|>
+<|visual token 042068|>
+<|visual token 042069|>
+<|visual token 042070|>
+<|visual token 042071|>
+<|visual token 042072|>
+<|visual token 042073|>
+<|visual token 042074|>
+<|visual token 042075|>
+<|visual token 042076|>
+<|visual token 042077|>
+<|visual token 042078|>
+<|visual token 042079|>
+<|visual token 042080|>
+<|visual token 042081|>
+<|visual token 042082|>
+<|visual token 042083|>
+<|visual token 042084|>
+<|visual token 042085|>
+<|visual token 042086|>
+<|visual token 042087|>
+<|visual token 042088|>
+<|visual token 042089|>
+<|visual token 042090|>
+<|visual token 042091|>
+<|visual token 042092|>
+<|visual token 042093|>
+<|visual token 042094|>
+<|visual token 042095|>
+<|visual token 042096|>
+<|visual token 042097|>
+<|visual token 042098|>
+<|visual token 042099|>
+<|visual token 042100|>
+<|visual token 042101|>
+<|visual token 042102|>
+<|visual token 042103|>
+<|visual token 042104|>
+<|visual token 042105|>
+<|visual token 042106|>
+<|visual token 042107|>
+<|visual token 042108|>
+<|visual token 042109|>
+<|visual token 042110|>
+<|visual token 042111|>
+<|visual token 042112|>
+<|visual token 042113|>
+<|visual token 042114|>
+<|visual token 042115|>
+<|visual token 042116|>
+<|visual token 042117|>
+<|visual token 042118|>
+<|visual token 042119|>
+<|visual token 042120|>
+<|visual token 042121|>
+<|visual token 042122|>
+<|visual token 042123|>
+<|visual token 042124|>
+<|visual token 042125|>
+<|visual token 042126|>
+<|visual token 042127|>
+<|visual token 042128|>
+<|visual token 042129|>
+<|visual token 042130|>
+<|visual token 042131|>
+<|visual token 042132|>
+<|visual token 042133|>
+<|visual token 042134|>
+<|visual token 042135|>
+<|visual token 042136|>
+<|visual token 042137|>
+<|visual token 042138|>
+<|visual token 042139|>
+<|visual token 042140|>
+<|visual token 042141|>
+<|visual token 042142|>
+<|visual token 042143|>
+<|visual token 042144|>
+<|visual token 042145|>
+<|visual token 042146|>
+<|visual token 042147|>
+<|visual token 042148|>
+<|visual token 042149|>
+<|visual token 042150|>
+<|visual token 042151|>
+<|visual token 042152|>
+<|visual token 042153|>
+<|visual token 042154|>
+<|visual token 042155|>
+<|visual token 042156|>
+<|visual token 042157|>
+<|visual token 042158|>
+<|visual token 042159|>
+<|visual token 042160|>
+<|visual token 042161|>
+<|visual token 042162|>
+<|visual token 042163|>
+<|visual token 042164|>
+<|visual token 042165|>
+<|visual token 042166|>
+<|visual token 042167|>
+<|visual token 042168|>
+<|visual token 042169|>
+<|visual token 042170|>
+<|visual token 042171|>
+<|visual token 042172|>
+<|visual token 042173|>
+<|visual token 042174|>
+<|visual token 042175|>
+<|visual token 042176|>
+<|visual token 042177|>
+<|visual token 042178|>
+<|visual token 042179|>
+<|visual token 042180|>
+<|visual token 042181|>
+<|visual token 042182|>
+<|visual token 042183|>
+<|visual token 042184|>
+<|visual token 042185|>
+<|visual token 042186|>
+<|visual token 042187|>
+<|visual token 042188|>
+<|visual token 042189|>
+<|visual token 042190|>
+<|visual token 042191|>
+<|visual token 042192|>
+<|visual token 042193|>
+<|visual token 042194|>
+<|visual token 042195|>
+<|visual token 042196|>
+<|visual token 042197|>
+<|visual token 042198|>
+<|visual token 042199|>
+<|visual token 042200|>
+<|visual token 042201|>
+<|visual token 042202|>
+<|visual token 042203|>
+<|visual token 042204|>
+<|visual token 042205|>
+<|visual token 042206|>
+<|visual token 042207|>
+<|visual token 042208|>
+<|visual token 042209|>
+<|visual token 042210|>
+<|visual token 042211|>
+<|visual token 042212|>
+<|visual token 042213|>
+<|visual token 042214|>
+<|visual token 042215|>
+<|visual token 042216|>
+<|visual token 042217|>
+<|visual token 042218|>
+<|visual token 042219|>
+<|visual token 042220|>
+<|visual token 042221|>
+<|visual token 042222|>
+<|visual token 042223|>
+<|visual token 042224|>
+<|visual token 042225|>
+<|visual token 042226|>
+<|visual token 042227|>
+<|visual token 042228|>
+<|visual token 042229|>
+<|visual token 042230|>
+<|visual token 042231|>
+<|visual token 042232|>
+<|visual token 042233|>
+<|visual token 042234|>
+<|visual token 042235|>
+<|visual token 042236|>
+<|visual token 042237|>
+<|visual token 042238|>
+<|visual token 042239|>
+<|visual token 042240|>
+<|visual token 042241|>
+<|visual token 042242|>
+<|visual token 042243|>
+<|visual token 042244|>
+<|visual token 042245|>
+<|visual token 042246|>
+<|visual token 042247|>
+<|visual token 042248|>
+<|visual token 042249|>
+<|visual token 042250|>
+<|visual token 042251|>
+<|visual token 042252|>
+<|visual token 042253|>
+<|visual token 042254|>
+<|visual token 042255|>
+<|visual token 042256|>
+<|visual token 042257|>
+<|visual token 042258|>
+<|visual token 042259|>
+<|visual token 042260|>
+<|visual token 042261|>
+<|visual token 042262|>
+<|visual token 042263|>
+<|visual token 042264|>
+<|visual token 042265|>
+<|visual token 042266|>
+<|visual token 042267|>
+<|visual token 042268|>
+<|visual token 042269|>
+<|visual token 042270|>
+<|visual token 042271|>
+<|visual token 042272|>
+<|visual token 042273|>
+<|visual token 042274|>
+<|visual token 042275|>
+<|visual token 042276|>
+<|visual token 042277|>
+<|visual token 042278|>
+<|visual token 042279|>
+<|visual token 042280|>
+<|visual token 042281|>
+<|visual token 042282|>
+<|visual token 042283|>
+<|visual token 042284|>
+<|visual token 042285|>
+<|visual token 042286|>
+<|visual token 042287|>
+<|visual token 042288|>
+<|visual token 042289|>
+<|visual token 042290|>
+<|visual token 042291|>
+<|visual token 042292|>
+<|visual token 042293|>
+<|visual token 042294|>
+<|visual token 042295|>
+<|visual token 042296|>
+<|visual token 042297|>
+<|visual token 042298|>
+<|visual token 042299|>
+<|visual token 042300|>
+<|visual token 042301|>
+<|visual token 042302|>
+<|visual token 042303|>
+<|visual token 042304|>
+<|visual token 042305|>
+<|visual token 042306|>
+<|visual token 042307|>
+<|visual token 042308|>
+<|visual token 042309|>
+<|visual token 042310|>
+<|visual token 042311|>
+<|visual token 042312|>
+<|visual token 042313|>
+<|visual token 042314|>
+<|visual token 042315|>
+<|visual token 042316|>
+<|visual token 042317|>
+<|visual token 042318|>
+<|visual token 042319|>
+<|visual token 042320|>
+<|visual token 042321|>
+<|visual token 042322|>
+<|visual token 042323|>
+<|visual token 042324|>
+<|visual token 042325|>
+<|visual token 042326|>
+<|visual token 042327|>
+<|visual token 042328|>
+<|visual token 042329|>
+<|visual token 042330|>
+<|visual token 042331|>
+<|visual token 042332|>
+<|visual token 042333|>
+<|visual token 042334|>
+<|visual token 042335|>
+<|visual token 042336|>
+<|visual token 042337|>
+<|visual token 042338|>
+<|visual token 042339|>
+<|visual token 042340|>
+<|visual token 042341|>
+<|visual token 042342|>
+<|visual token 042343|>
+<|visual token 042344|>
+<|visual token 042345|>
+<|visual token 042346|>
+<|visual token 042347|>
+<|visual token 042348|>
+<|visual token 042349|>
+<|visual token 042350|>
+<|visual token 042351|>
+<|visual token 042352|>
+<|visual token 042353|>
+<|visual token 042354|>
+<|visual token 042355|>
+<|visual token 042356|>
+<|visual token 042357|>
+<|visual token 042358|>
+<|visual token 042359|>
+<|visual token 042360|>
+<|visual token 042361|>
+<|visual token 042362|>
+<|visual token 042363|>
+<|visual token 042364|>
+<|visual token 042365|>
+<|visual token 042366|>
+<|visual token 042367|>
+<|visual token 042368|>
+<|visual token 042369|>
+<|visual token 042370|>
+<|visual token 042371|>
+<|visual token 042372|>
+<|visual token 042373|>
+<|visual token 042374|>
+<|visual token 042375|>
+<|visual token 042376|>
+<|visual token 042377|>
+<|visual token 042378|>
+<|visual token 042379|>
+<|visual token 042380|>
+<|visual token 042381|>
+<|visual token 042382|>
+<|visual token 042383|>
+<|visual token 042384|>
+<|visual token 042385|>
+<|visual token 042386|>
+<|visual token 042387|>
+<|visual token 042388|>
+<|visual token 042389|>
+<|visual token 042390|>
+<|visual token 042391|>
+<|visual token 042392|>
+<|visual token 042393|>
+<|visual token 042394|>
+<|visual token 042395|>
+<|visual token 042396|>
+<|visual token 042397|>
+<|visual token 042398|>
+<|visual token 042399|>
+<|visual token 042400|>
+<|visual token 042401|>
+<|visual token 042402|>
+<|visual token 042403|>
+<|visual token 042404|>
+<|visual token 042405|>
+<|visual token 042406|>
+<|visual token 042407|>
+<|visual token 042408|>
+<|visual token 042409|>
+<|visual token 042410|>
+<|visual token 042411|>
+<|visual token 042412|>
+<|visual token 042413|>
+<|visual token 042414|>
+<|visual token 042415|>
+<|visual token 042416|>
+<|visual token 042417|>
+<|visual token 042418|>
+<|visual token 042419|>
+<|visual token 042420|>
+<|visual token 042421|>
+<|visual token 042422|>
+<|visual token 042423|>
+<|visual token 042424|>
+<|visual token 042425|>
+<|visual token 042426|>
+<|visual token 042427|>
+<|visual token 042428|>
+<|visual token 042429|>
+<|visual token 042430|>
+<|visual token 042431|>
+<|visual token 042432|>
+<|visual token 042433|>
+<|visual token 042434|>
+<|visual token 042435|>
+<|visual token 042436|>
+<|visual token 042437|>
+<|visual token 042438|>
+<|visual token 042439|>
+<|visual token 042440|>
+<|visual token 042441|>
+<|visual token 042442|>
+<|visual token 042443|>
+<|visual token 042444|>
+<|visual token 042445|>
+<|visual token 042446|>
+<|visual token 042447|>
+<|visual token 042448|>
+<|visual token 042449|>
+<|visual token 042450|>
+<|visual token 042451|>
+<|visual token 042452|>
+<|visual token 042453|>
+<|visual token 042454|>
+<|visual token 042455|>
+<|visual token 042456|>
+<|visual token 042457|>
+<|visual token 042458|>
+<|visual token 042459|>
+<|visual token 042460|>
+<|visual token 042461|>
+<|visual token 042462|>
+<|visual token 042463|>
+<|visual token 042464|>
+<|visual token 042465|>
+<|visual token 042466|>
+<|visual token 042467|>
+<|visual token 042468|>
+<|visual token 042469|>
+<|visual token 042470|>
+<|visual token 042471|>
+<|visual token 042472|>
+<|visual token 042473|>
+<|visual token 042474|>
+<|visual token 042475|>
+<|visual token 042476|>
+<|visual token 042477|>
+<|visual token 042478|>
+<|visual token 042479|>
+<|visual token 042480|>
+<|visual token 042481|>
+<|visual token 042482|>
+<|visual token 042483|>
+<|visual token 042484|>
+<|visual token 042485|>
+<|visual token 042486|>
+<|visual token 042487|>
+<|visual token 042488|>
+<|visual token 042489|>
+<|visual token 042490|>
+<|visual token 042491|>
+<|visual token 042492|>
+<|visual token 042493|>
+<|visual token 042494|>
+<|visual token 042495|>
+<|visual token 042496|>
+<|visual token 042497|>
+<|visual token 042498|>
+<|visual token 042499|>
+<|visual token 042500|>
+<|visual token 042501|>
+<|visual token 042502|>
+<|visual token 042503|>
+<|visual token 042504|>
+<|visual token 042505|>
+<|visual token 042506|>
+<|visual token 042507|>
+<|visual token 042508|>
+<|visual token 042509|>
+<|visual token 042510|>
+<|visual token 042511|>
+<|visual token 042512|>
+<|visual token 042513|>
+<|visual token 042514|>
+<|visual token 042515|>
+<|visual token 042516|>
+<|visual token 042517|>
+<|visual token 042518|>
+<|visual token 042519|>
+<|visual token 042520|>
+<|visual token 042521|>
+<|visual token 042522|>
+<|visual token 042523|>
+<|visual token 042524|>
+<|visual token 042525|>
+<|visual token 042526|>
+<|visual token 042527|>
+<|visual token 042528|>
+<|visual token 042529|>
+<|visual token 042530|>
+<|visual token 042531|>
+<|visual token 042532|>
+<|visual token 042533|>
+<|visual token 042534|>
+<|visual token 042535|>
+<|visual token 042536|>
+<|visual token 042537|>
+<|visual token 042538|>
+<|visual token 042539|>
+<|visual token 042540|>
+<|visual token 042541|>
+<|visual token 042542|>
+<|visual token 042543|>
+<|visual token 042544|>
+<|visual token 042545|>
+<|visual token 042546|>
+<|visual token 042547|>
+<|visual token 042548|>
+<|visual token 042549|>
+<|visual token 042550|>
+<|visual token 042551|>
+<|visual token 042552|>
+<|visual token 042553|>
+<|visual token 042554|>
+<|visual token 042555|>
+<|visual token 042556|>
+<|visual token 042557|>
+<|visual token 042558|>
+<|visual token 042559|>
+<|visual token 042560|>
+<|visual token 042561|>
+<|visual token 042562|>
+<|visual token 042563|>
+<|visual token 042564|>
+<|visual token 042565|>
+<|visual token 042566|>
+<|visual token 042567|>
+<|visual token 042568|>
+<|visual token 042569|>
+<|visual token 042570|>
+<|visual token 042571|>
+<|visual token 042572|>
+<|visual token 042573|>
+<|visual token 042574|>
+<|visual token 042575|>
+<|visual token 042576|>
+<|visual token 042577|>
+<|visual token 042578|>
+<|visual token 042579|>
+<|visual token 042580|>
+<|visual token 042581|>
+<|visual token 042582|>
+<|visual token 042583|>
+<|visual token 042584|>
+<|visual token 042585|>
+<|visual token 042586|>
+<|visual token 042587|>
+<|visual token 042588|>
+<|visual token 042589|>
+<|visual token 042590|>
+<|visual token 042591|>
+<|visual token 042592|>
+<|visual token 042593|>
+<|visual token 042594|>
+<|visual token 042595|>
+<|visual token 042596|>
+<|visual token 042597|>
+<|visual token 042598|>
+<|visual token 042599|>
+<|visual token 042600|>
+<|visual token 042601|>
+<|visual token 042602|>
+<|visual token 042603|>
+<|visual token 042604|>
+<|visual token 042605|>
+<|visual token 042606|>
+<|visual token 042607|>
+<|visual token 042608|>
+<|visual token 042609|>
+<|visual token 042610|>
+<|visual token 042611|>
+<|visual token 042612|>
+<|visual token 042613|>
+<|visual token 042614|>
+<|visual token 042615|>
+<|visual token 042616|>
+<|visual token 042617|>
+<|visual token 042618|>
+<|visual token 042619|>
+<|visual token 042620|>
+<|visual token 042621|>
+<|visual token 042622|>
+<|visual token 042623|>
+<|visual token 042624|>
+<|visual token 042625|>
+<|visual token 042626|>
+<|visual token 042627|>
+<|visual token 042628|>
+<|visual token 042629|>
+<|visual token 042630|>
+<|visual token 042631|>
+<|visual token 042632|>
+<|visual token 042633|>
+<|visual token 042634|>
+<|visual token 042635|>
+<|visual token 042636|>
+<|visual token 042637|>
+<|visual token 042638|>
+<|visual token 042639|>
+<|visual token 042640|>
+<|visual token 042641|>
+<|visual token 042642|>
+<|visual token 042643|>
+<|visual token 042644|>
+<|visual token 042645|>
+<|visual token 042646|>
+<|visual token 042647|>
+<|visual token 042648|>
+<|visual token 042649|>
+<|visual token 042650|>
+<|visual token 042651|>
+<|visual token 042652|>
+<|visual token 042653|>
+<|visual token 042654|>
+<|visual token 042655|>
+<|visual token 042656|>
+<|visual token 042657|>
+<|visual token 042658|>
+<|visual token 042659|>
+<|visual token 042660|>
+<|visual token 042661|>
+<|visual token 042662|>
+<|visual token 042663|>
+<|visual token 042664|>
+<|visual token 042665|>
+<|visual token 042666|>
+<|visual token 042667|>
+<|visual token 042668|>
+<|visual token 042669|>
+<|visual token 042670|>
+<|visual token 042671|>
+<|visual token 042672|>
+<|visual token 042673|>
+<|visual token 042674|>
+<|visual token 042675|>
+<|visual token 042676|>
+<|visual token 042677|>
+<|visual token 042678|>
+<|visual token 042679|>
+<|visual token 042680|>
+<|visual token 042681|>
+<|visual token 042682|>
+<|visual token 042683|>
+<|visual token 042684|>
+<|visual token 042685|>
+<|visual token 042686|>
+<|visual token 042687|>
+<|visual token 042688|>
+<|visual token 042689|>
+<|visual token 042690|>
+<|visual token 042691|>
+<|visual token 042692|>
+<|visual token 042693|>
+<|visual token 042694|>
+<|visual token 042695|>
+<|visual token 042696|>
+<|visual token 042697|>
+<|visual token 042698|>
+<|visual token 042699|>
+<|visual token 042700|>
+<|visual token 042701|>
+<|visual token 042702|>
+<|visual token 042703|>
+<|visual token 042704|>
+<|visual token 042705|>
+<|visual token 042706|>
+<|visual token 042707|>
+<|visual token 042708|>
+<|visual token 042709|>
+<|visual token 042710|>
+<|visual token 042711|>
+<|visual token 042712|>
+<|visual token 042713|>
+<|visual token 042714|>
+<|visual token 042715|>
+<|visual token 042716|>
+<|visual token 042717|>
+<|visual token 042718|>
+<|visual token 042719|>
+<|visual token 042720|>
+<|visual token 042721|>
+<|visual token 042722|>
+<|visual token 042723|>
+<|visual token 042724|>
+<|visual token 042725|>
+<|visual token 042726|>
+<|visual token 042727|>
+<|visual token 042728|>
+<|visual token 042729|>
+<|visual token 042730|>
+<|visual token 042731|>
+<|visual token 042732|>
+<|visual token 042733|>
+<|visual token 042734|>
+<|visual token 042735|>
+<|visual token 042736|>
+<|visual token 042737|>
+<|visual token 042738|>
+<|visual token 042739|>
+<|visual token 042740|>
+<|visual token 042741|>
+<|visual token 042742|>
+<|visual token 042743|>
+<|visual token 042744|>
+<|visual token 042745|>
+<|visual token 042746|>
+<|visual token 042747|>
+<|visual token 042748|>
+<|visual token 042749|>
+<|visual token 042750|>
+<|visual token 042751|>
+<|visual token 042752|>
+<|visual token 042753|>
+<|visual token 042754|>
+<|visual token 042755|>
+<|visual token 042756|>
+<|visual token 042757|>
+<|visual token 042758|>
+<|visual token 042759|>
+<|visual token 042760|>
+<|visual token 042761|>
+<|visual token 042762|>
+<|visual token 042763|>
+<|visual token 042764|>
+<|visual token 042765|>
+<|visual token 042766|>
+<|visual token 042767|>
+<|visual token 042768|>
+<|visual token 042769|>
+<|visual token 042770|>
+<|visual token 042771|>
+<|visual token 042772|>
+<|visual token 042773|>
+<|visual token 042774|>
+<|visual token 042775|>
+<|visual token 042776|>
+<|visual token 042777|>
+<|visual token 042778|>
+<|visual token 042779|>
+<|visual token 042780|>
+<|visual token 042781|>
+<|visual token 042782|>
+<|visual token 042783|>
+<|visual token 042784|>
+<|visual token 042785|>
+<|visual token 042786|>
+<|visual token 042787|>
+<|visual token 042788|>
+<|visual token 042789|>
+<|visual token 042790|>
+<|visual token 042791|>
+<|visual token 042792|>
+<|visual token 042793|>
+<|visual token 042794|>
+<|visual token 042795|>
+<|visual token 042796|>
+<|visual token 042797|>
+<|visual token 042798|>
+<|visual token 042799|>
+<|visual token 042800|>
+<|visual token 042801|>
+<|visual token 042802|>
+<|visual token 042803|>
+<|visual token 042804|>
+<|visual token 042805|>
+<|visual token 042806|>
+<|visual token 042807|>
+<|visual token 042808|>
+<|visual token 042809|>
+<|visual token 042810|>
+<|visual token 042811|>
+<|visual token 042812|>
+<|visual token 042813|>
+<|visual token 042814|>
+<|visual token 042815|>
+<|visual token 042816|>
+<|visual token 042817|>
+<|visual token 042818|>
+<|visual token 042819|>
+<|visual token 042820|>
+<|visual token 042821|>
+<|visual token 042822|>
+<|visual token 042823|>
+<|visual token 042824|>
+<|visual token 042825|>
+<|visual token 042826|>
+<|visual token 042827|>
+<|visual token 042828|>
+<|visual token 042829|>
+<|visual token 042830|>
+<|visual token 042831|>
+<|visual token 042832|>
+<|visual token 042833|>
+<|visual token 042834|>
+<|visual token 042835|>
+<|visual token 042836|>
+<|visual token 042837|>
+<|visual token 042838|>
+<|visual token 042839|>
+<|visual token 042840|>
+<|visual token 042841|>
+<|visual token 042842|>
+<|visual token 042843|>
+<|visual token 042844|>
+<|visual token 042845|>
+<|visual token 042846|>
+<|visual token 042847|>
+<|visual token 042848|>
+<|visual token 042849|>
+<|visual token 042850|>
+<|visual token 042851|>
+<|visual token 042852|>
+<|visual token 042853|>
+<|visual token 042854|>
+<|visual token 042855|>
+<|visual token 042856|>
+<|visual token 042857|>
+<|visual token 042858|>
+<|visual token 042859|>
+<|visual token 042860|>
+<|visual token 042861|>
+<|visual token 042862|>
+<|visual token 042863|>
+<|visual token 042864|>
+<|visual token 042865|>
+<|visual token 042866|>
+<|visual token 042867|>
+<|visual token 042868|>
+<|visual token 042869|>
+<|visual token 042870|>
+<|visual token 042871|>
+<|visual token 042872|>
+<|visual token 042873|>
+<|visual token 042874|>
+<|visual token 042875|>
+<|visual token 042876|>
+<|visual token 042877|>
+<|visual token 042878|>
+<|visual token 042879|>
+<|visual token 042880|>
+<|visual token 042881|>
+<|visual token 042882|>
+<|visual token 042883|>
+<|visual token 042884|>
+<|visual token 042885|>
+<|visual token 042886|>
+<|visual token 042887|>
+<|visual token 042888|>
+<|visual token 042889|>
+<|visual token 042890|>
+<|visual token 042891|>
+<|visual token 042892|>
+<|visual token 042893|>
+<|visual token 042894|>
+<|visual token 042895|>
+<|visual token 042896|>
+<|visual token 042897|>
+<|visual token 042898|>
+<|visual token 042899|>
+<|visual token 042900|>
+<|visual token 042901|>
+<|visual token 042902|>
+<|visual token 042903|>
+<|visual token 042904|>
+<|visual token 042905|>
+<|visual token 042906|>
+<|visual token 042907|>
+<|visual token 042908|>
+<|visual token 042909|>
+<|visual token 042910|>
+<|visual token 042911|>
+<|visual token 042912|>
+<|visual token 042913|>
+<|visual token 042914|>
+<|visual token 042915|>
+<|visual token 042916|>
+<|visual token 042917|>
+<|visual token 042918|>
+<|visual token 042919|>
+<|visual token 042920|>
+<|visual token 042921|>
+<|visual token 042922|>
+<|visual token 042923|>
+<|visual token 042924|>
+<|visual token 042925|>
+<|visual token 042926|>
+<|visual token 042927|>
+<|visual token 042928|>
+<|visual token 042929|>
+<|visual token 042930|>
+<|visual token 042931|>
+<|visual token 042932|>
+<|visual token 042933|>
+<|visual token 042934|>
+<|visual token 042935|>
+<|visual token 042936|>
+<|visual token 042937|>
+<|visual token 042938|>
+<|visual token 042939|>
+<|visual token 042940|>
+<|visual token 042941|>
+<|visual token 042942|>
+<|visual token 042943|>
+<|visual token 042944|>
+<|visual token 042945|>
+<|visual token 042946|>
+<|visual token 042947|>
+<|visual token 042948|>
+<|visual token 042949|>
+<|visual token 042950|>
+<|visual token 042951|>
+<|visual token 042952|>
+<|visual token 042953|>
+<|visual token 042954|>
+<|visual token 042955|>
+<|visual token 042956|>
+<|visual token 042957|>
+<|visual token 042958|>
+<|visual token 042959|>
+<|visual token 042960|>
+<|visual token 042961|>
+<|visual token 042962|>
+<|visual token 042963|>
+<|visual token 042964|>
+<|visual token 042965|>
+<|visual token 042966|>
+<|visual token 042967|>
+<|visual token 042968|>
+<|visual token 042969|>
+<|visual token 042970|>
+<|visual token 042971|>
+<|visual token 042972|>
+<|visual token 042973|>
+<|visual token 042974|>
+<|visual token 042975|>
+<|visual token 042976|>
+<|visual token 042977|>
+<|visual token 042978|>
+<|visual token 042979|>
+<|visual token 042980|>
+<|visual token 042981|>
+<|visual token 042982|>
+<|visual token 042983|>
+<|visual token 042984|>
+<|visual token 042985|>
+<|visual token 042986|>
+<|visual token 042987|>
+<|visual token 042988|>
+<|visual token 042989|>
+<|visual token 042990|>
+<|visual token 042991|>
+<|visual token 042992|>
+<|visual token 042993|>
+<|visual token 042994|>
+<|visual token 042995|>
+<|visual token 042996|>
+<|visual token 042997|>
+<|visual token 042998|>
+<|visual token 042999|>
+<|visual token 043000|>
+<|visual token 043001|>
+<|visual token 043002|>
+<|visual token 043003|>
+<|visual token 043004|>
+<|visual token 043005|>
+<|visual token 043006|>
+<|visual token 043007|>
+<|visual token 043008|>
+<|visual token 043009|>
+<|visual token 043010|>
+<|visual token 043011|>
+<|visual token 043012|>
+<|visual token 043013|>
+<|visual token 043014|>
+<|visual token 043015|>
+<|visual token 043016|>
+<|visual token 043017|>
+<|visual token 043018|>
+<|visual token 043019|>
+<|visual token 043020|>
+<|visual token 043021|>
+<|visual token 043022|>
+<|visual token 043023|>
+<|visual token 043024|>
+<|visual token 043025|>
+<|visual token 043026|>
+<|visual token 043027|>
+<|visual token 043028|>
+<|visual token 043029|>
+<|visual token 043030|>
+<|visual token 043031|>
+<|visual token 043032|>
+<|visual token 043033|>
+<|visual token 043034|>
+<|visual token 043035|>
+<|visual token 043036|>
+<|visual token 043037|>
+<|visual token 043038|>
+<|visual token 043039|>
+<|visual token 043040|>
+<|visual token 043041|>
+<|visual token 043042|>
+<|visual token 043043|>
+<|visual token 043044|>
+<|visual token 043045|>
+<|visual token 043046|>
+<|visual token 043047|>
+<|visual token 043048|>
+<|visual token 043049|>
+<|visual token 043050|>
+<|visual token 043051|>
+<|visual token 043052|>
+<|visual token 043053|>
+<|visual token 043054|>
+<|visual token 043055|>
+<|visual token 043056|>
+<|visual token 043057|>
+<|visual token 043058|>
+<|visual token 043059|>
+<|visual token 043060|>
+<|visual token 043061|>
+<|visual token 043062|>
+<|visual token 043063|>
+<|visual token 043064|>
+<|visual token 043065|>
+<|visual token 043066|>
+<|visual token 043067|>
+<|visual token 043068|>
+<|visual token 043069|>
+<|visual token 043070|>
+<|visual token 043071|>
+<|visual token 043072|>
+<|visual token 043073|>
+<|visual token 043074|>
+<|visual token 043075|>
+<|visual token 043076|>
+<|visual token 043077|>
+<|visual token 043078|>
+<|visual token 043079|>
+<|visual token 043080|>
+<|visual token 043081|>
+<|visual token 043082|>
+<|visual token 043083|>
+<|visual token 043084|>
+<|visual token 043085|>
+<|visual token 043086|>
+<|visual token 043087|>
+<|visual token 043088|>
+<|visual token 043089|>
+<|visual token 043090|>
+<|visual token 043091|>
+<|visual token 043092|>
+<|visual token 043093|>
+<|visual token 043094|>
+<|visual token 043095|>
+<|visual token 043096|>
+<|visual token 043097|>
+<|visual token 043098|>
+<|visual token 043099|>
+<|visual token 043100|>
+<|visual token 043101|>
+<|visual token 043102|>
+<|visual token 043103|>
+<|visual token 043104|>
+<|visual token 043105|>
+<|visual token 043106|>
+<|visual token 043107|>
+<|visual token 043108|>
+<|visual token 043109|>
+<|visual token 043110|>
+<|visual token 043111|>
+<|visual token 043112|>
+<|visual token 043113|>
+<|visual token 043114|>
+<|visual token 043115|>
+<|visual token 043116|>
+<|visual token 043117|>
+<|visual token 043118|>
+<|visual token 043119|>
+<|visual token 043120|>
+<|visual token 043121|>
+<|visual token 043122|>
+<|visual token 043123|>
+<|visual token 043124|>
+<|visual token 043125|>
+<|visual token 043126|>
+<|visual token 043127|>
+<|visual token 043128|>
+<|visual token 043129|>
+<|visual token 043130|>
+<|visual token 043131|>
+<|visual token 043132|>
+<|visual token 043133|>
+<|visual token 043134|>
+<|visual token 043135|>
+<|visual token 043136|>
+<|visual token 043137|>
+<|visual token 043138|>
+<|visual token 043139|>
+<|visual token 043140|>
+<|visual token 043141|>
+<|visual token 043142|>
+<|visual token 043143|>
+<|visual token 043144|>
+<|visual token 043145|>
+<|visual token 043146|>
+<|visual token 043147|>
+<|visual token 043148|>
+<|visual token 043149|>
+<|visual token 043150|>
+<|visual token 043151|>
+<|visual token 043152|>
+<|visual token 043153|>
+<|visual token 043154|>
+<|visual token 043155|>
+<|visual token 043156|>
+<|visual token 043157|>
+<|visual token 043158|>
+<|visual token 043159|>
+<|visual token 043160|>
+<|visual token 043161|>
+<|visual token 043162|>
+<|visual token 043163|>
+<|visual token 043164|>
+<|visual token 043165|>
+<|visual token 043166|>
+<|visual token 043167|>
+<|visual token 043168|>
+<|visual token 043169|>
+<|visual token 043170|>
+<|visual token 043171|>
+<|visual token 043172|>
+<|visual token 043173|>
+<|visual token 043174|>
+<|visual token 043175|>
+<|visual token 043176|>
+<|visual token 043177|>
+<|visual token 043178|>
+<|visual token 043179|>
+<|visual token 043180|>
+<|visual token 043181|>
+<|visual token 043182|>
+<|visual token 043183|>
+<|visual token 043184|>
+<|visual token 043185|>
+<|visual token 043186|>
+<|visual token 043187|>
+<|visual token 043188|>
+<|visual token 043189|>
+<|visual token 043190|>
+<|visual token 043191|>
+<|visual token 043192|>
+<|visual token 043193|>
+<|visual token 043194|>
+<|visual token 043195|>
+<|visual token 043196|>
+<|visual token 043197|>
+<|visual token 043198|>
+<|visual token 043199|>
+<|visual token 043200|>
+<|visual token 043201|>
+<|visual token 043202|>
+<|visual token 043203|>
+<|visual token 043204|>
+<|visual token 043205|>
+<|visual token 043206|>
+<|visual token 043207|>
+<|visual token 043208|>
+<|visual token 043209|>
+<|visual token 043210|>
+<|visual token 043211|>
+<|visual token 043212|>
+<|visual token 043213|>
+<|visual token 043214|>
+<|visual token 043215|>
+<|visual token 043216|>
+<|visual token 043217|>
+<|visual token 043218|>
+<|visual token 043219|>
+<|visual token 043220|>
+<|visual token 043221|>
+<|visual token 043222|>
+<|visual token 043223|>
+<|visual token 043224|>
+<|visual token 043225|>
+<|visual token 043226|>
+<|visual token 043227|>
+<|visual token 043228|>
+<|visual token 043229|>
+<|visual token 043230|>
+<|visual token 043231|>
+<|visual token 043232|>
+<|visual token 043233|>
+<|visual token 043234|>
+<|visual token 043235|>
+<|visual token 043236|>
+<|visual token 043237|>
+<|visual token 043238|>
+<|visual token 043239|>
+<|visual token 043240|>
+<|visual token 043241|>
+<|visual token 043242|>
+<|visual token 043243|>
+<|visual token 043244|>
+<|visual token 043245|>
+<|visual token 043246|>
+<|visual token 043247|>
+<|visual token 043248|>
+<|visual token 043249|>
+<|visual token 043250|>
+<|visual token 043251|>
+<|visual token 043252|>
+<|visual token 043253|>
+<|visual token 043254|>
+<|visual token 043255|>
+<|visual token 043256|>
+<|visual token 043257|>
+<|visual token 043258|>
+<|visual token 043259|>
+<|visual token 043260|>
+<|visual token 043261|>
+<|visual token 043262|>
+<|visual token 043263|>
+<|visual token 043264|>
+<|visual token 043265|>
+<|visual token 043266|>
+<|visual token 043267|>
+<|visual token 043268|>
+<|visual token 043269|>
+<|visual token 043270|>
+<|visual token 043271|>
+<|visual token 043272|>
+<|visual token 043273|>
+<|visual token 043274|>
+<|visual token 043275|>
+<|visual token 043276|>
+<|visual token 043277|>
+<|visual token 043278|>
+<|visual token 043279|>
+<|visual token 043280|>
+<|visual token 043281|>
+<|visual token 043282|>
+<|visual token 043283|>
+<|visual token 043284|>
+<|visual token 043285|>
+<|visual token 043286|>
+<|visual token 043287|>
+<|visual token 043288|>
+<|visual token 043289|>
+<|visual token 043290|>
+<|visual token 043291|>
+<|visual token 043292|>
+<|visual token 043293|>
+<|visual token 043294|>
+<|visual token 043295|>
+<|visual token 043296|>
+<|visual token 043297|>
+<|visual token 043298|>
+<|visual token 043299|>
+<|visual token 043300|>
+<|visual token 043301|>
+<|visual token 043302|>
+<|visual token 043303|>
+<|visual token 043304|>
+<|visual token 043305|>
+<|visual token 043306|>
+<|visual token 043307|>
+<|visual token 043308|>
+<|visual token 043309|>
+<|visual token 043310|>
+<|visual token 043311|>
+<|visual token 043312|>
+<|visual token 043313|>
+<|visual token 043314|>
+<|visual token 043315|>
+<|visual token 043316|>
+<|visual token 043317|>
+<|visual token 043318|>
+<|visual token 043319|>
+<|visual token 043320|>
+<|visual token 043321|>
+<|visual token 043322|>
+<|visual token 043323|>
+<|visual token 043324|>
+<|visual token 043325|>
+<|visual token 043326|>
+<|visual token 043327|>
+<|visual token 043328|>
+<|visual token 043329|>
+<|visual token 043330|>
+<|visual token 043331|>
+<|visual token 043332|>
+<|visual token 043333|>
+<|visual token 043334|>
+<|visual token 043335|>
+<|visual token 043336|>
+<|visual token 043337|>
+<|visual token 043338|>
+<|visual token 043339|>
+<|visual token 043340|>
+<|visual token 043341|>
+<|visual token 043342|>
+<|visual token 043343|>
+<|visual token 043344|>
+<|visual token 043345|>
+<|visual token 043346|>
+<|visual token 043347|>
+<|visual token 043348|>
+<|visual token 043349|>
+<|visual token 043350|>
+<|visual token 043351|>
+<|visual token 043352|>
+<|visual token 043353|>
+<|visual token 043354|>
+<|visual token 043355|>
+<|visual token 043356|>
+<|visual token 043357|>
+<|visual token 043358|>
+<|visual token 043359|>
+<|visual token 043360|>
+<|visual token 043361|>
+<|visual token 043362|>
+<|visual token 043363|>
+<|visual token 043364|>
+<|visual token 043365|>
+<|visual token 043366|>
+<|visual token 043367|>
+<|visual token 043368|>
+<|visual token 043369|>
+<|visual token 043370|>
+<|visual token 043371|>
+<|visual token 043372|>
+<|visual token 043373|>
+<|visual token 043374|>
+<|visual token 043375|>
+<|visual token 043376|>
+<|visual token 043377|>
+<|visual token 043378|>
+<|visual token 043379|>
+<|visual token 043380|>
+<|visual token 043381|>
+<|visual token 043382|>
+<|visual token 043383|>
+<|visual token 043384|>
+<|visual token 043385|>
+<|visual token 043386|>
+<|visual token 043387|>
+<|visual token 043388|>
+<|visual token 043389|>
+<|visual token 043390|>
+<|visual token 043391|>
+<|visual token 043392|>
+<|visual token 043393|>
+<|visual token 043394|>
+<|visual token 043395|>
+<|visual token 043396|>
+<|visual token 043397|>
+<|visual token 043398|>
+<|visual token 043399|>
+<|visual token 043400|>
+<|visual token 043401|>
+<|visual token 043402|>
+<|visual token 043403|>
+<|visual token 043404|>
+<|visual token 043405|>
+<|visual token 043406|>
+<|visual token 043407|>
+<|visual token 043408|>
+<|visual token 043409|>
+<|visual token 043410|>
+<|visual token 043411|>
+<|visual token 043412|>
+<|visual token 043413|>
+<|visual token 043414|>
+<|visual token 043415|>
+<|visual token 043416|>
+<|visual token 043417|>
+<|visual token 043418|>
+<|visual token 043419|>
+<|visual token 043420|>
+<|visual token 043421|>
+<|visual token 043422|>
+<|visual token 043423|>
+<|visual token 043424|>
+<|visual token 043425|>
+<|visual token 043426|>
+<|visual token 043427|>
+<|visual token 043428|>
+<|visual token 043429|>
+<|visual token 043430|>
+<|visual token 043431|>
+<|visual token 043432|>
+<|visual token 043433|>
+<|visual token 043434|>
+<|visual token 043435|>
+<|visual token 043436|>
+<|visual token 043437|>
+<|visual token 043438|>
+<|visual token 043439|>
+<|visual token 043440|>
+<|visual token 043441|>
+<|visual token 043442|>
+<|visual token 043443|>
+<|visual token 043444|>
+<|visual token 043445|>
+<|visual token 043446|>
+<|visual token 043447|>
+<|visual token 043448|>
+<|visual token 043449|>
+<|visual token 043450|>
+<|visual token 043451|>
+<|visual token 043452|>
+<|visual token 043453|>
+<|visual token 043454|>
+<|visual token 043455|>
+<|visual token 043456|>
+<|visual token 043457|>
+<|visual token 043458|>
+<|visual token 043459|>
+<|visual token 043460|>
+<|visual token 043461|>
+<|visual token 043462|>
+<|visual token 043463|>
+<|visual token 043464|>
+<|visual token 043465|>
+<|visual token 043466|>
+<|visual token 043467|>
+<|visual token 043468|>
+<|visual token 043469|>
+<|visual token 043470|>
+<|visual token 043471|>
+<|visual token 043472|>
+<|visual token 043473|>
+<|visual token 043474|>
+<|visual token 043475|>
+<|visual token 043476|>
+<|visual token 043477|>
+<|visual token 043478|>
+<|visual token 043479|>
+<|visual token 043480|>
+<|visual token 043481|>
+<|visual token 043482|>
+<|visual token 043483|>
+<|visual token 043484|>
+<|visual token 043485|>
+<|visual token 043486|>
+<|visual token 043487|>
+<|visual token 043488|>
+<|visual token 043489|>
+<|visual token 043490|>
+<|visual token 043491|>
+<|visual token 043492|>
+<|visual token 043493|>
+<|visual token 043494|>
+<|visual token 043495|>
+<|visual token 043496|>
+<|visual token 043497|>
+<|visual token 043498|>
+<|visual token 043499|>
+<|visual token 043500|>
+<|visual token 043501|>
+<|visual token 043502|>
+<|visual token 043503|>
+<|visual token 043504|>
+<|visual token 043505|>
+<|visual token 043506|>
+<|visual token 043507|>
+<|visual token 043508|>
+<|visual token 043509|>
+<|visual token 043510|>
+<|visual token 043511|>
+<|visual token 043512|>
+<|visual token 043513|>
+<|visual token 043514|>
+<|visual token 043515|>
+<|visual token 043516|>
+<|visual token 043517|>
+<|visual token 043518|>
+<|visual token 043519|>
+<|visual token 043520|>
+<|visual token 043521|>
+<|visual token 043522|>
+<|visual token 043523|>
+<|visual token 043524|>
+<|visual token 043525|>
+<|visual token 043526|>
+<|visual token 043527|>
+<|visual token 043528|>
+<|visual token 043529|>
+<|visual token 043530|>
+<|visual token 043531|>
+<|visual token 043532|>
+<|visual token 043533|>
+<|visual token 043534|>
+<|visual token 043535|>
+<|visual token 043536|>
+<|visual token 043537|>
+<|visual token 043538|>
+<|visual token 043539|>
+<|visual token 043540|>
+<|visual token 043541|>
+<|visual token 043542|>
+<|visual token 043543|>
+<|visual token 043544|>
+<|visual token 043545|>
+<|visual token 043546|>
+<|visual token 043547|>
+<|visual token 043548|>
+<|visual token 043549|>
+<|visual token 043550|>
+<|visual token 043551|>
+<|visual token 043552|>
+<|visual token 043553|>
+<|visual token 043554|>
+<|visual token 043555|>
+<|visual token 043556|>
+<|visual token 043557|>
+<|visual token 043558|>
+<|visual token 043559|>
+<|visual token 043560|>
+<|visual token 043561|>
+<|visual token 043562|>
+<|visual token 043563|>
+<|visual token 043564|>
+<|visual token 043565|>
+<|visual token 043566|>
+<|visual token 043567|>
+<|visual token 043568|>
+<|visual token 043569|>
+<|visual token 043570|>
+<|visual token 043571|>
+<|visual token 043572|>
+<|visual token 043573|>
+<|visual token 043574|>
+<|visual token 043575|>
+<|visual token 043576|>
+<|visual token 043577|>
+<|visual token 043578|>
+<|visual token 043579|>
+<|visual token 043580|>
+<|visual token 043581|>
+<|visual token 043582|>
+<|visual token 043583|>
+<|visual token 043584|>
+<|visual token 043585|>
+<|visual token 043586|>
+<|visual token 043587|>
+<|visual token 043588|>
+<|visual token 043589|>
+<|visual token 043590|>
+<|visual token 043591|>
+<|visual token 043592|>
+<|visual token 043593|>
+<|visual token 043594|>
+<|visual token 043595|>
+<|visual token 043596|>
+<|visual token 043597|>
+<|visual token 043598|>
+<|visual token 043599|>
+<|visual token 043600|>
+<|visual token 043601|>
+<|visual token 043602|>
+<|visual token 043603|>
+<|visual token 043604|>
+<|visual token 043605|>
+<|visual token 043606|>
+<|visual token 043607|>
+<|visual token 043608|>
+<|visual token 043609|>
+<|visual token 043610|>
+<|visual token 043611|>
+<|visual token 043612|>
+<|visual token 043613|>
+<|visual token 043614|>
+<|visual token 043615|>
+<|visual token 043616|>
+<|visual token 043617|>
+<|visual token 043618|>
+<|visual token 043619|>
+<|visual token 043620|>
+<|visual token 043621|>
+<|visual token 043622|>
+<|visual token 043623|>
+<|visual token 043624|>
+<|visual token 043625|>
+<|visual token 043626|>
+<|visual token 043627|>
+<|visual token 043628|>
+<|visual token 043629|>
+<|visual token 043630|>
+<|visual token 043631|>
+<|visual token 043632|>
+<|visual token 043633|>
+<|visual token 043634|>
+<|visual token 043635|>
+<|visual token 043636|>
+<|visual token 043637|>
+<|visual token 043638|>
+<|visual token 043639|>
+<|visual token 043640|>
+<|visual token 043641|>
+<|visual token 043642|>
+<|visual token 043643|>
+<|visual token 043644|>
+<|visual token 043645|>
+<|visual token 043646|>
+<|visual token 043647|>
+<|visual token 043648|>
+<|visual token 043649|>
+<|visual token 043650|>
+<|visual token 043651|>
+<|visual token 043652|>
+<|visual token 043653|>
+<|visual token 043654|>
+<|visual token 043655|>
+<|visual token 043656|>
+<|visual token 043657|>
+<|visual token 043658|>
+<|visual token 043659|>
+<|visual token 043660|>
+<|visual token 043661|>
+<|visual token 043662|>
+<|visual token 043663|>
+<|visual token 043664|>
+<|visual token 043665|>
+<|visual token 043666|>
+<|visual token 043667|>
+<|visual token 043668|>
+<|visual token 043669|>
+<|visual token 043670|>
+<|visual token 043671|>
+<|visual token 043672|>
+<|visual token 043673|>
+<|visual token 043674|>
+<|visual token 043675|>
+<|visual token 043676|>
+<|visual token 043677|>
+<|visual token 043678|>
+<|visual token 043679|>
+<|visual token 043680|>
+<|visual token 043681|>
+<|visual token 043682|>
+<|visual token 043683|>
+<|visual token 043684|>
+<|visual token 043685|>
+<|visual token 043686|>
+<|visual token 043687|>
+<|visual token 043688|>
+<|visual token 043689|>
+<|visual token 043690|>
+<|visual token 043691|>
+<|visual token 043692|>
+<|visual token 043693|>
+<|visual token 043694|>
+<|visual token 043695|>
+<|visual token 043696|>
+<|visual token 043697|>
+<|visual token 043698|>
+<|visual token 043699|>
+<|visual token 043700|>
+<|visual token 043701|>
+<|visual token 043702|>
+<|visual token 043703|>
+<|visual token 043704|>
+<|visual token 043705|>
+<|visual token 043706|>
+<|visual token 043707|>
+<|visual token 043708|>
+<|visual token 043709|>
+<|visual token 043710|>
+<|visual token 043711|>
+<|visual token 043712|>
+<|visual token 043713|>
+<|visual token 043714|>
+<|visual token 043715|>
+<|visual token 043716|>
+<|visual token 043717|>
+<|visual token 043718|>
+<|visual token 043719|>
+<|visual token 043720|>
+<|visual token 043721|>
+<|visual token 043722|>
+<|visual token 043723|>
+<|visual token 043724|>
+<|visual token 043725|>
+<|visual token 043726|>
+<|visual token 043727|>
+<|visual token 043728|>
+<|visual token 043729|>
+<|visual token 043730|>
+<|visual token 043731|>
+<|visual token 043732|>
+<|visual token 043733|>
+<|visual token 043734|>
+<|visual token 043735|>
+<|visual token 043736|>
+<|visual token 043737|>
+<|visual token 043738|>
+<|visual token 043739|>
+<|visual token 043740|>
+<|visual token 043741|>
+<|visual token 043742|>
+<|visual token 043743|>
+<|visual token 043744|>
+<|visual token 043745|>
+<|visual token 043746|>
+<|visual token 043747|>
+<|visual token 043748|>
+<|visual token 043749|>
+<|visual token 043750|>
+<|visual token 043751|>
+<|visual token 043752|>
+<|visual token 043753|>
+<|visual token 043754|>
+<|visual token 043755|>
+<|visual token 043756|>
+<|visual token 043757|>
+<|visual token 043758|>
+<|visual token 043759|>
+<|visual token 043760|>
+<|visual token 043761|>
+<|visual token 043762|>
+<|visual token 043763|>
+<|visual token 043764|>
+<|visual token 043765|>
+<|visual token 043766|>
+<|visual token 043767|>
+<|visual token 043768|>
+<|visual token 043769|>
+<|visual token 043770|>
+<|visual token 043771|>
+<|visual token 043772|>
+<|visual token 043773|>
+<|visual token 043774|>
+<|visual token 043775|>
+<|visual token 043776|>
+<|visual token 043777|>
+<|visual token 043778|>
+<|visual token 043779|>
+<|visual token 043780|>
+<|visual token 043781|>
+<|visual token 043782|>
+<|visual token 043783|>
+<|visual token 043784|>
+<|visual token 043785|>
+<|visual token 043786|>
+<|visual token 043787|>
+<|visual token 043788|>
+<|visual token 043789|>
+<|visual token 043790|>
+<|visual token 043791|>
+<|visual token 043792|>
+<|visual token 043793|>
+<|visual token 043794|>
+<|visual token 043795|>
+<|visual token 043796|>
+<|visual token 043797|>
+<|visual token 043798|>
+<|visual token 043799|>
+<|visual token 043800|>
+<|visual token 043801|>
+<|visual token 043802|>
+<|visual token 043803|>
+<|visual token 043804|>
+<|visual token 043805|>
+<|visual token 043806|>
+<|visual token 043807|>
+<|visual token 043808|>
+<|visual token 043809|>
+<|visual token 043810|>
+<|visual token 043811|>
+<|visual token 043812|>
+<|visual token 043813|>
+<|visual token 043814|>
+<|visual token 043815|>
+<|visual token 043816|>
+<|visual token 043817|>
+<|visual token 043818|>
+<|visual token 043819|>
+<|visual token 043820|>
+<|visual token 043821|>
+<|visual token 043822|>
+<|visual token 043823|>
+<|visual token 043824|>
+<|visual token 043825|>
+<|visual token 043826|>
+<|visual token 043827|>
+<|visual token 043828|>
+<|visual token 043829|>
+<|visual token 043830|>
+<|visual token 043831|>
+<|visual token 043832|>
+<|visual token 043833|>
+<|visual token 043834|>
+<|visual token 043835|>
+<|visual token 043836|>
+<|visual token 043837|>
+<|visual token 043838|>
+<|visual token 043839|>
+<|visual token 043840|>
+<|visual token 043841|>
+<|visual token 043842|>
+<|visual token 043843|>
+<|visual token 043844|>
+<|visual token 043845|>
+<|visual token 043846|>
+<|visual token 043847|>
+<|visual token 043848|>
+<|visual token 043849|>
+<|visual token 043850|>
+<|visual token 043851|>
+<|visual token 043852|>
+<|visual token 043853|>
+<|visual token 043854|>
+<|visual token 043855|>
+<|visual token 043856|>
+<|visual token 043857|>
+<|visual token 043858|>
+<|visual token 043859|>
+<|visual token 043860|>
+<|visual token 043861|>
+<|visual token 043862|>
+<|visual token 043863|>
+<|visual token 043864|>
+<|visual token 043865|>
+<|visual token 043866|>
+<|visual token 043867|>
+<|visual token 043868|>
+<|visual token 043869|>
+<|visual token 043870|>
+<|visual token 043871|>
+<|visual token 043872|>
+<|visual token 043873|>
+<|visual token 043874|>
+<|visual token 043875|>
+<|visual token 043876|>
+<|visual token 043877|>
+<|visual token 043878|>
+<|visual token 043879|>
+<|visual token 043880|>
+<|visual token 043881|>
+<|visual token 043882|>
+<|visual token 043883|>
+<|visual token 043884|>
+<|visual token 043885|>
+<|visual token 043886|>
+<|visual token 043887|>
+<|visual token 043888|>
+<|visual token 043889|>
+<|visual token 043890|>
+<|visual token 043891|>
+<|visual token 043892|>
+<|visual token 043893|>
+<|visual token 043894|>
+<|visual token 043895|>
+<|visual token 043896|>
+<|visual token 043897|>
+<|visual token 043898|>
+<|visual token 043899|>
+<|visual token 043900|>
+<|visual token 043901|>
+<|visual token 043902|>
+<|visual token 043903|>
+<|visual token 043904|>
+<|visual token 043905|>
+<|visual token 043906|>
+<|visual token 043907|>
+<|visual token 043908|>
+<|visual token 043909|>
+<|visual token 043910|>
+<|visual token 043911|>
+<|visual token 043912|>
+<|visual token 043913|>
+<|visual token 043914|>
+<|visual token 043915|>
+<|visual token 043916|>
+<|visual token 043917|>
+<|visual token 043918|>
+<|visual token 043919|>
+<|visual token 043920|>
+<|visual token 043921|>
+<|visual token 043922|>
+<|visual token 043923|>
+<|visual token 043924|>
+<|visual token 043925|>
+<|visual token 043926|>
+<|visual token 043927|>
+<|visual token 043928|>
+<|visual token 043929|>
+<|visual token 043930|>
+<|visual token 043931|>
+<|visual token 043932|>
+<|visual token 043933|>
+<|visual token 043934|>
+<|visual token 043935|>
+<|visual token 043936|>
+<|visual token 043937|>
+<|visual token 043938|>
+<|visual token 043939|>
+<|visual token 043940|>
+<|visual token 043941|>
+<|visual token 043942|>
+<|visual token 043943|>
+<|visual token 043944|>
+<|visual token 043945|>
+<|visual token 043946|>
+<|visual token 043947|>
+<|visual token 043948|>
+<|visual token 043949|>
+<|visual token 043950|>
+<|visual token 043951|>
+<|visual token 043952|>
+<|visual token 043953|>
+<|visual token 043954|>
+<|visual token 043955|>
+<|visual token 043956|>
+<|visual token 043957|>
+<|visual token 043958|>
+<|visual token 043959|>
+<|visual token 043960|>
+<|visual token 043961|>
+<|visual token 043962|>
+<|visual token 043963|>
+<|visual token 043964|>
+<|visual token 043965|>
+<|visual token 043966|>
+<|visual token 043967|>
+<|visual token 043968|>
+<|visual token 043969|>
+<|visual token 043970|>
+<|visual token 043971|>
+<|visual token 043972|>
+<|visual token 043973|>
+<|visual token 043974|>
+<|visual token 043975|>
+<|visual token 043976|>
+<|visual token 043977|>
+<|visual token 043978|>
+<|visual token 043979|>
+<|visual token 043980|>
+<|visual token 043981|>
+<|visual token 043982|>
+<|visual token 043983|>
+<|visual token 043984|>
+<|visual token 043985|>
+<|visual token 043986|>
+<|visual token 043987|>
+<|visual token 043988|>
+<|visual token 043989|>
+<|visual token 043990|>
+<|visual token 043991|>
+<|visual token 043992|>
+<|visual token 043993|>
+<|visual token 043994|>
+<|visual token 043995|>
+<|visual token 043996|>
+<|visual token 043997|>
+<|visual token 043998|>
+<|visual token 043999|>
+<|visual token 044000|>
+<|visual token 044001|>
+<|visual token 044002|>
+<|visual token 044003|>
+<|visual token 044004|>
+<|visual token 044005|>
+<|visual token 044006|>
+<|visual token 044007|>
+<|visual token 044008|>
+<|visual token 044009|>
+<|visual token 044010|>
+<|visual token 044011|>
+<|visual token 044012|>
+<|visual token 044013|>
+<|visual token 044014|>
+<|visual token 044015|>
+<|visual token 044016|>
+<|visual token 044017|>
+<|visual token 044018|>
+<|visual token 044019|>
+<|visual token 044020|>
+<|visual token 044021|>
+<|visual token 044022|>
+<|visual token 044023|>
+<|visual token 044024|>
+<|visual token 044025|>
+<|visual token 044026|>
+<|visual token 044027|>
+<|visual token 044028|>
+<|visual token 044029|>
+<|visual token 044030|>
+<|visual token 044031|>
+<|visual token 044032|>
+<|visual token 044033|>
+<|visual token 044034|>
+<|visual token 044035|>
+<|visual token 044036|>
+<|visual token 044037|>
+<|visual token 044038|>
+<|visual token 044039|>
+<|visual token 044040|>
+<|visual token 044041|>
+<|visual token 044042|>
+<|visual token 044043|>
+<|visual token 044044|>
+<|visual token 044045|>
+<|visual token 044046|>
+<|visual token 044047|>
+<|visual token 044048|>
+<|visual token 044049|>
+<|visual token 044050|>
+<|visual token 044051|>
+<|visual token 044052|>
+<|visual token 044053|>
+<|visual token 044054|>
+<|visual token 044055|>
+<|visual token 044056|>
+<|visual token 044057|>
+<|visual token 044058|>
+<|visual token 044059|>
+<|visual token 044060|>
+<|visual token 044061|>
+<|visual token 044062|>
+<|visual token 044063|>
+<|visual token 044064|>
+<|visual token 044065|>
+<|visual token 044066|>
+<|visual token 044067|>
+<|visual token 044068|>
+<|visual token 044069|>
+<|visual token 044070|>
+<|visual token 044071|>
+<|visual token 044072|>
+<|visual token 044073|>
+<|visual token 044074|>
+<|visual token 044075|>
+<|visual token 044076|>
+<|visual token 044077|>
+<|visual token 044078|>
+<|visual token 044079|>
+<|visual token 044080|>
+<|visual token 044081|>
+<|visual token 044082|>
+<|visual token 044083|>
+<|visual token 044084|>
+<|visual token 044085|>
+<|visual token 044086|>
+<|visual token 044087|>
+<|visual token 044088|>
+<|visual token 044089|>
+<|visual token 044090|>
+<|visual token 044091|>
+<|visual token 044092|>
+<|visual token 044093|>
+<|visual token 044094|>
+<|visual token 044095|>
+<|visual token 044096|>
+<|visual token 044097|>
+<|visual token 044098|>
+<|visual token 044099|>
+<|visual token 044100|>
+<|visual token 044101|>
+<|visual token 044102|>
+<|visual token 044103|>
+<|visual token 044104|>
+<|visual token 044105|>
+<|visual token 044106|>
+<|visual token 044107|>
+<|visual token 044108|>
+<|visual token 044109|>
+<|visual token 044110|>
+<|visual token 044111|>
+<|visual token 044112|>
+<|visual token 044113|>
+<|visual token 044114|>
+<|visual token 044115|>
+<|visual token 044116|>
+<|visual token 044117|>
+<|visual token 044118|>
+<|visual token 044119|>
+<|visual token 044120|>
+<|visual token 044121|>
+<|visual token 044122|>
+<|visual token 044123|>
+<|visual token 044124|>
+<|visual token 044125|>
+<|visual token 044126|>
+<|visual token 044127|>
+<|visual token 044128|>
+<|visual token 044129|>
+<|visual token 044130|>
+<|visual token 044131|>
+<|visual token 044132|>
+<|visual token 044133|>
+<|visual token 044134|>
+<|visual token 044135|>
+<|visual token 044136|>
+<|visual token 044137|>
+<|visual token 044138|>
+<|visual token 044139|>
+<|visual token 044140|>
+<|visual token 044141|>
+<|visual token 044142|>
+<|visual token 044143|>
+<|visual token 044144|>
+<|visual token 044145|>
+<|visual token 044146|>
+<|visual token 044147|>
+<|visual token 044148|>
+<|visual token 044149|>
+<|visual token 044150|>
+<|visual token 044151|>
+<|visual token 044152|>
+<|visual token 044153|>
+<|visual token 044154|>
+<|visual token 044155|>
+<|visual token 044156|>
+<|visual token 044157|>
+<|visual token 044158|>
+<|visual token 044159|>
+<|visual token 044160|>
+<|visual token 044161|>
+<|visual token 044162|>
+<|visual token 044163|>
+<|visual token 044164|>
+<|visual token 044165|>
+<|visual token 044166|>
+<|visual token 044167|>
+<|visual token 044168|>
+<|visual token 044169|>
+<|visual token 044170|>
+<|visual token 044171|>
+<|visual token 044172|>
+<|visual token 044173|>
+<|visual token 044174|>
+<|visual token 044175|>
+<|visual token 044176|>
+<|visual token 044177|>
+<|visual token 044178|>
+<|visual token 044179|>
+<|visual token 044180|>
+<|visual token 044181|>
+<|visual token 044182|>
+<|visual token 044183|>
+<|visual token 044184|>
+<|visual token 044185|>
+<|visual token 044186|>
+<|visual token 044187|>
+<|visual token 044188|>
+<|visual token 044189|>
+<|visual token 044190|>
+<|visual token 044191|>
+<|visual token 044192|>
+<|visual token 044193|>
+<|visual token 044194|>
+<|visual token 044195|>
+<|visual token 044196|>
+<|visual token 044197|>
+<|visual token 044198|>
+<|visual token 044199|>
+<|visual token 044200|>
+<|visual token 044201|>
+<|visual token 044202|>
+<|visual token 044203|>
+<|visual token 044204|>
+<|visual token 044205|>
+<|visual token 044206|>
+<|visual token 044207|>
+<|visual token 044208|>
+<|visual token 044209|>
+<|visual token 044210|>
+<|visual token 044211|>
+<|visual token 044212|>
+<|visual token 044213|>
+<|visual token 044214|>
+<|visual token 044215|>
+<|visual token 044216|>
+<|visual token 044217|>
+<|visual token 044218|>
+<|visual token 044219|>
+<|visual token 044220|>
+<|visual token 044221|>
+<|visual token 044222|>
+<|visual token 044223|>
+<|visual token 044224|>
+<|visual token 044225|>
+<|visual token 044226|>
+<|visual token 044227|>
+<|visual token 044228|>
+<|visual token 044229|>
+<|visual token 044230|>
+<|visual token 044231|>
+<|visual token 044232|>
+<|visual token 044233|>
+<|visual token 044234|>
+<|visual token 044235|>
+<|visual token 044236|>
+<|visual token 044237|>
+<|visual token 044238|>
+<|visual token 044239|>
+<|visual token 044240|>
+<|visual token 044241|>
+<|visual token 044242|>
+<|visual token 044243|>
+<|visual token 044244|>
+<|visual token 044245|>
+<|visual token 044246|>
+<|visual token 044247|>
+<|visual token 044248|>
+<|visual token 044249|>
+<|visual token 044250|>
+<|visual token 044251|>
+<|visual token 044252|>
+<|visual token 044253|>
+<|visual token 044254|>
+<|visual token 044255|>
+<|visual token 044256|>
+<|visual token 044257|>
+<|visual token 044258|>
+<|visual token 044259|>
+<|visual token 044260|>
+<|visual token 044261|>
+<|visual token 044262|>
+<|visual token 044263|>
+<|visual token 044264|>
+<|visual token 044265|>
+<|visual token 044266|>
+<|visual token 044267|>
+<|visual token 044268|>
+<|visual token 044269|>
+<|visual token 044270|>
+<|visual token 044271|>
+<|visual token 044272|>
+<|visual token 044273|>
+<|visual token 044274|>
+<|visual token 044275|>
+<|visual token 044276|>
+<|visual token 044277|>
+<|visual token 044278|>
+<|visual token 044279|>
+<|visual token 044280|>
+<|visual token 044281|>
+<|visual token 044282|>
+<|visual token 044283|>
+<|visual token 044284|>
+<|visual token 044285|>
+<|visual token 044286|>
+<|visual token 044287|>
+<|visual token 044288|>
+<|visual token 044289|>
+<|visual token 044290|>
+<|visual token 044291|>
+<|visual token 044292|>
+<|visual token 044293|>
+<|visual token 044294|>
+<|visual token 044295|>
+<|visual token 044296|>
+<|visual token 044297|>
+<|visual token 044298|>
+<|visual token 044299|>
+<|visual token 044300|>
+<|visual token 044301|>
+<|visual token 044302|>
+<|visual token 044303|>
+<|visual token 044304|>
+<|visual token 044305|>
+<|visual token 044306|>
+<|visual token 044307|>
+<|visual token 044308|>
+<|visual token 044309|>
+<|visual token 044310|>
+<|visual token 044311|>
+<|visual token 044312|>
+<|visual token 044313|>
+<|visual token 044314|>
+<|visual token 044315|>
+<|visual token 044316|>
+<|visual token 044317|>
+<|visual token 044318|>
+<|visual token 044319|>
+<|visual token 044320|>
+<|visual token 044321|>
+<|visual token 044322|>
+<|visual token 044323|>
+<|visual token 044324|>
+<|visual token 044325|>
+<|visual token 044326|>
+<|visual token 044327|>
+<|visual token 044328|>
+<|visual token 044329|>
+<|visual token 044330|>
+<|visual token 044331|>
+<|visual token 044332|>
+<|visual token 044333|>
+<|visual token 044334|>
+<|visual token 044335|>
+<|visual token 044336|>
+<|visual token 044337|>
+<|visual token 044338|>
+<|visual token 044339|>
+<|visual token 044340|>
+<|visual token 044341|>
+<|visual token 044342|>
+<|visual token 044343|>
+<|visual token 044344|>
+<|visual token 044345|>
+<|visual token 044346|>
+<|visual token 044347|>
+<|visual token 044348|>
+<|visual token 044349|>
+<|visual token 044350|>
+<|visual token 044351|>
+<|visual token 044352|>
+<|visual token 044353|>
+<|visual token 044354|>
+<|visual token 044355|>
+<|visual token 044356|>
+<|visual token 044357|>
+<|visual token 044358|>
+<|visual token 044359|>
+<|visual token 044360|>
+<|visual token 044361|>
+<|visual token 044362|>
+<|visual token 044363|>
+<|visual token 044364|>
+<|visual token 044365|>
+<|visual token 044366|>
+<|visual token 044367|>
+<|visual token 044368|>
+<|visual token 044369|>
+<|visual token 044370|>
+<|visual token 044371|>
+<|visual token 044372|>
+<|visual token 044373|>
+<|visual token 044374|>
+<|visual token 044375|>
+<|visual token 044376|>
+<|visual token 044377|>
+<|visual token 044378|>
+<|visual token 044379|>
+<|visual token 044380|>
+<|visual token 044381|>
+<|visual token 044382|>
+<|visual token 044383|>
+<|visual token 044384|>
+<|visual token 044385|>
+<|visual token 044386|>
+<|visual token 044387|>
+<|visual token 044388|>
+<|visual token 044389|>
+<|visual token 044390|>
+<|visual token 044391|>
+<|visual token 044392|>
+<|visual token 044393|>
+<|visual token 044394|>
+<|visual token 044395|>
+<|visual token 044396|>
+<|visual token 044397|>
+<|visual token 044398|>
+<|visual token 044399|>
+<|visual token 044400|>
+<|visual token 044401|>
+<|visual token 044402|>
+<|visual token 044403|>
+<|visual token 044404|>
+<|visual token 044405|>
+<|visual token 044406|>
+<|visual token 044407|>
+<|visual token 044408|>
+<|visual token 044409|>
+<|visual token 044410|>
+<|visual token 044411|>
+<|visual token 044412|>
+<|visual token 044413|>
+<|visual token 044414|>
+<|visual token 044415|>
+<|visual token 044416|>
+<|visual token 044417|>
+<|visual token 044418|>
+<|visual token 044419|>
+<|visual token 044420|>
+<|visual token 044421|>
+<|visual token 044422|>
+<|visual token 044423|>
+<|visual token 044424|>
+<|visual token 044425|>
+<|visual token 044426|>
+<|visual token 044427|>
+<|visual token 044428|>
+<|visual token 044429|>
+<|visual token 044430|>
+<|visual token 044431|>
+<|visual token 044432|>
+<|visual token 044433|>
+<|visual token 044434|>
+<|visual token 044435|>
+<|visual token 044436|>
+<|visual token 044437|>
+<|visual token 044438|>
+<|visual token 044439|>
+<|visual token 044440|>
+<|visual token 044441|>
+<|visual token 044442|>
+<|visual token 044443|>
+<|visual token 044444|>
+<|visual token 044445|>
+<|visual token 044446|>
+<|visual token 044447|>
+<|visual token 044448|>
+<|visual token 044449|>
+<|visual token 044450|>
+<|visual token 044451|>
+<|visual token 044452|>
+<|visual token 044453|>
+<|visual token 044454|>
+<|visual token 044455|>
+<|visual token 044456|>
+<|visual token 044457|>
+<|visual token 044458|>
+<|visual token 044459|>
+<|visual token 044460|>
+<|visual token 044461|>
+<|visual token 044462|>
+<|visual token 044463|>
+<|visual token 044464|>
+<|visual token 044465|>
+<|visual token 044466|>
+<|visual token 044467|>
+<|visual token 044468|>
+<|visual token 044469|>
+<|visual token 044470|>
+<|visual token 044471|>
+<|visual token 044472|>
+<|visual token 044473|>
+<|visual token 044474|>
+<|visual token 044475|>
+<|visual token 044476|>
+<|visual token 044477|>
+<|visual token 044478|>
+<|visual token 044479|>
+<|visual token 044480|>
+<|visual token 044481|>
+<|visual token 044482|>
+<|visual token 044483|>
+<|visual token 044484|>
+<|visual token 044485|>
+<|visual token 044486|>
+<|visual token 044487|>
+<|visual token 044488|>
+<|visual token 044489|>
+<|visual token 044490|>
+<|visual token 044491|>
+<|visual token 044492|>
+<|visual token 044493|>
+<|visual token 044494|>
+<|visual token 044495|>
+<|visual token 044496|>
+<|visual token 044497|>
+<|visual token 044498|>
+<|visual token 044499|>
+<|visual token 044500|>
+<|visual token 044501|>
+<|visual token 044502|>
+<|visual token 044503|>
+<|visual token 044504|>
+<|visual token 044505|>
+<|visual token 044506|>
+<|visual token 044507|>
+<|visual token 044508|>
+<|visual token 044509|>
+<|visual token 044510|>
+<|visual token 044511|>
+<|visual token 044512|>
+<|visual token 044513|>
+<|visual token 044514|>
+<|visual token 044515|>
+<|visual token 044516|>
+<|visual token 044517|>
+<|visual token 044518|>
+<|visual token 044519|>
+<|visual token 044520|>
+<|visual token 044521|>
+<|visual token 044522|>
+<|visual token 044523|>
+<|visual token 044524|>
+<|visual token 044525|>
+<|visual token 044526|>
+<|visual token 044527|>
+<|visual token 044528|>
+<|visual token 044529|>
+<|visual token 044530|>
+<|visual token 044531|>
+<|visual token 044532|>
+<|visual token 044533|>
+<|visual token 044534|>
+<|visual token 044535|>
+<|visual token 044536|>
+<|visual token 044537|>
+<|visual token 044538|>
+<|visual token 044539|>
+<|visual token 044540|>
+<|visual token 044541|>
+<|visual token 044542|>
+<|visual token 044543|>
+<|visual token 044544|>
+<|visual token 044545|>
+<|visual token 044546|>
+<|visual token 044547|>
+<|visual token 044548|>
+<|visual token 044549|>
+<|visual token 044550|>
+<|visual token 044551|>
+<|visual token 044552|>
+<|visual token 044553|>
+<|visual token 044554|>
+<|visual token 044555|>
+<|visual token 044556|>
+<|visual token 044557|>
+<|visual token 044558|>
+<|visual token 044559|>
+<|visual token 044560|>
+<|visual token 044561|>
+<|visual token 044562|>
+<|visual token 044563|>
+<|visual token 044564|>
+<|visual token 044565|>
+<|visual token 044566|>
+<|visual token 044567|>
+<|visual token 044568|>
+<|visual token 044569|>
+<|visual token 044570|>
+<|visual token 044571|>
+<|visual token 044572|>
+<|visual token 044573|>
+<|visual token 044574|>
+<|visual token 044575|>
+<|visual token 044576|>
+<|visual token 044577|>
+<|visual token 044578|>
+<|visual token 044579|>
+<|visual token 044580|>
+<|visual token 044581|>
+<|visual token 044582|>
+<|visual token 044583|>
+<|visual token 044584|>
+<|visual token 044585|>
+<|visual token 044586|>
+<|visual token 044587|>
+<|visual token 044588|>
+<|visual token 044589|>
+<|visual token 044590|>
+<|visual token 044591|>
+<|visual token 044592|>
+<|visual token 044593|>
+<|visual token 044594|>
+<|visual token 044595|>
+<|visual token 044596|>
+<|visual token 044597|>
+<|visual token 044598|>
+<|visual token 044599|>
+<|visual token 044600|>
+<|visual token 044601|>
+<|visual token 044602|>
+<|visual token 044603|>
+<|visual token 044604|>
+<|visual token 044605|>
+<|visual token 044606|>
+<|visual token 044607|>
+<|visual token 044608|>
+<|visual token 044609|>
+<|visual token 044610|>
+<|visual token 044611|>
+<|visual token 044612|>
+<|visual token 044613|>
+<|visual token 044614|>
+<|visual token 044615|>
+<|visual token 044616|>
+<|visual token 044617|>
+<|visual token 044618|>
+<|visual token 044619|>
+<|visual token 044620|>
+<|visual token 044621|>
+<|visual token 044622|>
+<|visual token 044623|>
+<|visual token 044624|>
+<|visual token 044625|>
+<|visual token 044626|>
+<|visual token 044627|>
+<|visual token 044628|>
+<|visual token 044629|>
+<|visual token 044630|>
+<|visual token 044631|>
+<|visual token 044632|>
+<|visual token 044633|>
+<|visual token 044634|>
+<|visual token 044635|>
+<|visual token 044636|>
+<|visual token 044637|>
+<|visual token 044638|>
+<|visual token 044639|>
+<|visual token 044640|>
+<|visual token 044641|>
+<|visual token 044642|>
+<|visual token 044643|>
+<|visual token 044644|>
+<|visual token 044645|>
+<|visual token 044646|>
+<|visual token 044647|>
+<|visual token 044648|>
+<|visual token 044649|>
+<|visual token 044650|>
+<|visual token 044651|>
+<|visual token 044652|>
+<|visual token 044653|>
+<|visual token 044654|>
+<|visual token 044655|>
+<|visual token 044656|>
+<|visual token 044657|>
+<|visual token 044658|>
+<|visual token 044659|>
+<|visual token 044660|>
+<|visual token 044661|>
+<|visual token 044662|>
+<|visual token 044663|>
+<|visual token 044664|>
+<|visual token 044665|>
+<|visual token 044666|>
+<|visual token 044667|>
+<|visual token 044668|>
+<|visual token 044669|>
+<|visual token 044670|>
+<|visual token 044671|>
+<|visual token 044672|>
+<|visual token 044673|>
+<|visual token 044674|>
+<|visual token 044675|>
+<|visual token 044676|>
+<|visual token 044677|>
+<|visual token 044678|>
+<|visual token 044679|>
+<|visual token 044680|>
+<|visual token 044681|>
+<|visual token 044682|>
+<|visual token 044683|>
+<|visual token 044684|>
+<|visual token 044685|>
+<|visual token 044686|>
+<|visual token 044687|>
+<|visual token 044688|>
+<|visual token 044689|>
+<|visual token 044690|>
+<|visual token 044691|>
+<|visual token 044692|>
+<|visual token 044693|>
+<|visual token 044694|>
+<|visual token 044695|>
+<|visual token 044696|>
+<|visual token 044697|>
+<|visual token 044698|>
+<|visual token 044699|>
+<|visual token 044700|>
+<|visual token 044701|>
+<|visual token 044702|>
+<|visual token 044703|>
+<|visual token 044704|>
+<|visual token 044705|>
+<|visual token 044706|>
+<|visual token 044707|>
+<|visual token 044708|>
+<|visual token 044709|>
+<|visual token 044710|>
+<|visual token 044711|>
+<|visual token 044712|>
+<|visual token 044713|>
+<|visual token 044714|>
+<|visual token 044715|>
+<|visual token 044716|>
+<|visual token 044717|>
+<|visual token 044718|>
+<|visual token 044719|>
+<|visual token 044720|>
+<|visual token 044721|>
+<|visual token 044722|>
+<|visual token 044723|>
+<|visual token 044724|>
+<|visual token 044725|>
+<|visual token 044726|>
+<|visual token 044727|>
+<|visual token 044728|>
+<|visual token 044729|>
+<|visual token 044730|>
+<|visual token 044731|>
+<|visual token 044732|>
+<|visual token 044733|>
+<|visual token 044734|>
+<|visual token 044735|>
+<|visual token 044736|>
+<|visual token 044737|>
+<|visual token 044738|>
+<|visual token 044739|>
+<|visual token 044740|>
+<|visual token 044741|>
+<|visual token 044742|>
+<|visual token 044743|>
+<|visual token 044744|>
+<|visual token 044745|>
+<|visual token 044746|>
+<|visual token 044747|>
+<|visual token 044748|>
+<|visual token 044749|>
+<|visual token 044750|>
+<|visual token 044751|>
+<|visual token 044752|>
+<|visual token 044753|>
+<|visual token 044754|>
+<|visual token 044755|>
+<|visual token 044756|>
+<|visual token 044757|>
+<|visual token 044758|>
+<|visual token 044759|>
+<|visual token 044760|>
+<|visual token 044761|>
+<|visual token 044762|>
+<|visual token 044763|>
+<|visual token 044764|>
+<|visual token 044765|>
+<|visual token 044766|>
+<|visual token 044767|>
+<|visual token 044768|>
+<|visual token 044769|>
+<|visual token 044770|>
+<|visual token 044771|>
+<|visual token 044772|>
+<|visual token 044773|>
+<|visual token 044774|>
+<|visual token 044775|>
+<|visual token 044776|>
+<|visual token 044777|>
+<|visual token 044778|>
+<|visual token 044779|>
+<|visual token 044780|>
+<|visual token 044781|>
+<|visual token 044782|>
+<|visual token 044783|>
+<|visual token 044784|>
+<|visual token 044785|>
+<|visual token 044786|>
+<|visual token 044787|>
+<|visual token 044788|>
+<|visual token 044789|>
+<|visual token 044790|>
+<|visual token 044791|>
+<|visual token 044792|>
+<|visual token 044793|>
+<|visual token 044794|>
+<|visual token 044795|>
+<|visual token 044796|>
+<|visual token 044797|>
+<|visual token 044798|>
+<|visual token 044799|>
+<|visual token 044800|>
+<|visual token 044801|>
+<|visual token 044802|>
+<|visual token 044803|>
+<|visual token 044804|>
+<|visual token 044805|>
+<|visual token 044806|>
+<|visual token 044807|>
+<|visual token 044808|>
+<|visual token 044809|>
+<|visual token 044810|>
+<|visual token 044811|>
+<|visual token 044812|>
+<|visual token 044813|>
+<|visual token 044814|>
+<|visual token 044815|>
+<|visual token 044816|>
+<|visual token 044817|>
+<|visual token 044818|>
+<|visual token 044819|>
+<|visual token 044820|>
+<|visual token 044821|>
+<|visual token 044822|>
+<|visual token 044823|>
+<|visual token 044824|>
+<|visual token 044825|>
+<|visual token 044826|>
+<|visual token 044827|>
+<|visual token 044828|>
+<|visual token 044829|>
+<|visual token 044830|>
+<|visual token 044831|>
+<|visual token 044832|>
+<|visual token 044833|>
+<|visual token 044834|>
+<|visual token 044835|>
+<|visual token 044836|>
+<|visual token 044837|>
+<|visual token 044838|>
+<|visual token 044839|>
+<|visual token 044840|>
+<|visual token 044841|>
+<|visual token 044842|>
+<|visual token 044843|>
+<|visual token 044844|>
+<|visual token 044845|>
+<|visual token 044846|>
+<|visual token 044847|>
+<|visual token 044848|>
+<|visual token 044849|>
+<|visual token 044850|>
+<|visual token 044851|>
+<|visual token 044852|>
+<|visual token 044853|>
+<|visual token 044854|>
+<|visual token 044855|>
+<|visual token 044856|>
+<|visual token 044857|>
+<|visual token 044858|>
+<|visual token 044859|>
+<|visual token 044860|>
+<|visual token 044861|>
+<|visual token 044862|>
+<|visual token 044863|>
+<|visual token 044864|>
+<|visual token 044865|>
+<|visual token 044866|>
+<|visual token 044867|>
+<|visual token 044868|>
+<|visual token 044869|>
+<|visual token 044870|>
+<|visual token 044871|>
+<|visual token 044872|>
+<|visual token 044873|>
+<|visual token 044874|>
+<|visual token 044875|>
+<|visual token 044876|>
+<|visual token 044877|>
+<|visual token 044878|>
+<|visual token 044879|>
+<|visual token 044880|>
+<|visual token 044881|>
+<|visual token 044882|>
+<|visual token 044883|>
+<|visual token 044884|>
+<|visual token 044885|>
+<|visual token 044886|>
+<|visual token 044887|>
+<|visual token 044888|>
+<|visual token 044889|>
+<|visual token 044890|>
+<|visual token 044891|>
+<|visual token 044892|>
+<|visual token 044893|>
+<|visual token 044894|>
+<|visual token 044895|>
+<|visual token 044896|>
+<|visual token 044897|>
+<|visual token 044898|>
+<|visual token 044899|>
+<|visual token 044900|>
+<|visual token 044901|>
+<|visual token 044902|>
+<|visual token 044903|>
+<|visual token 044904|>
+<|visual token 044905|>
+<|visual token 044906|>
+<|visual token 044907|>
+<|visual token 044908|>
+<|visual token 044909|>
+<|visual token 044910|>
+<|visual token 044911|>
+<|visual token 044912|>
+<|visual token 044913|>
+<|visual token 044914|>
+<|visual token 044915|>
+<|visual token 044916|>
+<|visual token 044917|>
+<|visual token 044918|>
+<|visual token 044919|>
+<|visual token 044920|>
+<|visual token 044921|>
+<|visual token 044922|>
+<|visual token 044923|>
+<|visual token 044924|>
+<|visual token 044925|>
+<|visual token 044926|>
+<|visual token 044927|>
+<|visual token 044928|>
+<|visual token 044929|>
+<|visual token 044930|>
+<|visual token 044931|>
+<|visual token 044932|>
+<|visual token 044933|>
+<|visual token 044934|>
+<|visual token 044935|>
+<|visual token 044936|>
+<|visual token 044937|>
+<|visual token 044938|>
+<|visual token 044939|>
+<|visual token 044940|>
+<|visual token 044941|>
+<|visual token 044942|>
+<|visual token 044943|>
+<|visual token 044944|>
+<|visual token 044945|>
+<|visual token 044946|>
+<|visual token 044947|>
+<|visual token 044948|>
+<|visual token 044949|>
+<|visual token 044950|>
+<|visual token 044951|>
+<|visual token 044952|>
+<|visual token 044953|>
+<|visual token 044954|>
+<|visual token 044955|>
+<|visual token 044956|>
+<|visual token 044957|>
+<|visual token 044958|>
+<|visual token 044959|>
+<|visual token 044960|>
+<|visual token 044961|>
+<|visual token 044962|>
+<|visual token 044963|>
+<|visual token 044964|>
+<|visual token 044965|>
+<|visual token 044966|>
+<|visual token 044967|>
+<|visual token 044968|>
+<|visual token 044969|>
+<|visual token 044970|>
+<|visual token 044971|>
+<|visual token 044972|>
+<|visual token 044973|>
+<|visual token 044974|>
+<|visual token 044975|>
+<|visual token 044976|>
+<|visual token 044977|>
+<|visual token 044978|>
+<|visual token 044979|>
+<|visual token 044980|>
+<|visual token 044981|>
+<|visual token 044982|>
+<|visual token 044983|>
+<|visual token 044984|>
+<|visual token 044985|>
+<|visual token 044986|>
+<|visual token 044987|>
+<|visual token 044988|>
+<|visual token 044989|>
+<|visual token 044990|>
+<|visual token 044991|>
+<|visual token 044992|>
+<|visual token 044993|>
+<|visual token 044994|>
+<|visual token 044995|>
+<|visual token 044996|>
+<|visual token 044997|>
+<|visual token 044998|>
+<|visual token 044999|>
+<|visual token 045000|>
+<|visual token 045001|>
+<|visual token 045002|>
+<|visual token 045003|>
+<|visual token 045004|>
+<|visual token 045005|>
+<|visual token 045006|>
+<|visual token 045007|>
+<|visual token 045008|>
+<|visual token 045009|>
+<|visual token 045010|>
+<|visual token 045011|>
+<|visual token 045012|>
+<|visual token 045013|>
+<|visual token 045014|>
+<|visual token 045015|>
+<|visual token 045016|>
+<|visual token 045017|>
+<|visual token 045018|>
+<|visual token 045019|>
+<|visual token 045020|>
+<|visual token 045021|>
+<|visual token 045022|>
+<|visual token 045023|>
+<|visual token 045024|>
+<|visual token 045025|>
+<|visual token 045026|>
+<|visual token 045027|>
+<|visual token 045028|>
+<|visual token 045029|>
+<|visual token 045030|>
+<|visual token 045031|>
+<|visual token 045032|>
+<|visual token 045033|>
+<|visual token 045034|>
+<|visual token 045035|>
+<|visual token 045036|>
+<|visual token 045037|>
+<|visual token 045038|>
+<|visual token 045039|>
+<|visual token 045040|>
+<|visual token 045041|>
+<|visual token 045042|>
+<|visual token 045043|>
+<|visual token 045044|>
+<|visual token 045045|>
+<|visual token 045046|>
+<|visual token 045047|>
+<|visual token 045048|>
+<|visual token 045049|>
+<|visual token 045050|>
+<|visual token 045051|>
+<|visual token 045052|>
+<|visual token 045053|>
+<|visual token 045054|>
+<|visual token 045055|>
+<|visual token 045056|>
+<|visual token 045057|>
+<|visual token 045058|>
+<|visual token 045059|>
+<|visual token 045060|>
+<|visual token 045061|>
+<|visual token 045062|>
+<|visual token 045063|>
+<|visual token 045064|>
+<|visual token 045065|>
+<|visual token 045066|>
+<|visual token 045067|>
+<|visual token 045068|>
+<|visual token 045069|>
+<|visual token 045070|>
+<|visual token 045071|>
+<|visual token 045072|>
+<|visual token 045073|>
+<|visual token 045074|>
+<|visual token 045075|>
+<|visual token 045076|>
+<|visual token 045077|>
+<|visual token 045078|>
+<|visual token 045079|>
+<|visual token 045080|>
+<|visual token 045081|>
+<|visual token 045082|>
+<|visual token 045083|>
+<|visual token 045084|>
+<|visual token 045085|>
+<|visual token 045086|>
+<|visual token 045087|>
+<|visual token 045088|>
+<|visual token 045089|>
+<|visual token 045090|>
+<|visual token 045091|>
+<|visual token 045092|>
+<|visual token 045093|>
+<|visual token 045094|>
+<|visual token 045095|>
+<|visual token 045096|>
+<|visual token 045097|>
+<|visual token 045098|>
+<|visual token 045099|>
+<|visual token 045100|>
+<|visual token 045101|>
+<|visual token 045102|>
+<|visual token 045103|>
+<|visual token 045104|>
+<|visual token 045105|>
+<|visual token 045106|>
+<|visual token 045107|>
+<|visual token 045108|>
+<|visual token 045109|>
+<|visual token 045110|>
+<|visual token 045111|>
+<|visual token 045112|>
+<|visual token 045113|>
+<|visual token 045114|>
+<|visual token 045115|>
+<|visual token 045116|>
+<|visual token 045117|>
+<|visual token 045118|>
+<|visual token 045119|>
+<|visual token 045120|>
+<|visual token 045121|>
+<|visual token 045122|>
+<|visual token 045123|>
+<|visual token 045124|>
+<|visual token 045125|>
+<|visual token 045126|>
+<|visual token 045127|>
+<|visual token 045128|>
+<|visual token 045129|>
+<|visual token 045130|>
+<|visual token 045131|>
+<|visual token 045132|>
+<|visual token 045133|>
+<|visual token 045134|>
+<|visual token 045135|>
+<|visual token 045136|>
+<|visual token 045137|>
+<|visual token 045138|>
+<|visual token 045139|>
+<|visual token 045140|>
+<|visual token 045141|>
+<|visual token 045142|>
+<|visual token 045143|>
+<|visual token 045144|>
+<|visual token 045145|>
+<|visual token 045146|>
+<|visual token 045147|>
+<|visual token 045148|>
+<|visual token 045149|>
+<|visual token 045150|>
+<|visual token 045151|>
+<|visual token 045152|>
+<|visual token 045153|>
+<|visual token 045154|>
+<|visual token 045155|>
+<|visual token 045156|>
+<|visual token 045157|>
+<|visual token 045158|>
+<|visual token 045159|>
+<|visual token 045160|>
+<|visual token 045161|>
+<|visual token 045162|>
+<|visual token 045163|>
+<|visual token 045164|>
+<|visual token 045165|>
+<|visual token 045166|>
+<|visual token 045167|>
+<|visual token 045168|>
+<|visual token 045169|>
+<|visual token 045170|>
+<|visual token 045171|>
+<|visual token 045172|>
+<|visual token 045173|>
+<|visual token 045174|>
+<|visual token 045175|>
+<|visual token 045176|>
+<|visual token 045177|>
+<|visual token 045178|>
+<|visual token 045179|>
+<|visual token 045180|>
+<|visual token 045181|>
+<|visual token 045182|>
+<|visual token 045183|>
+<|visual token 045184|>
+<|visual token 045185|>
+<|visual token 045186|>
+<|visual token 045187|>
+<|visual token 045188|>
+<|visual token 045189|>
+<|visual token 045190|>
+<|visual token 045191|>
+<|visual token 045192|>
+<|visual token 045193|>
+<|visual token 045194|>
+<|visual token 045195|>
+<|visual token 045196|>
+<|visual token 045197|>
+<|visual token 045198|>
+<|visual token 045199|>
+<|visual token 045200|>
+<|visual token 045201|>
+<|visual token 045202|>
+<|visual token 045203|>
+<|visual token 045204|>
+<|visual token 045205|>
+<|visual token 045206|>
+<|visual token 045207|>
+<|visual token 045208|>
+<|visual token 045209|>
+<|visual token 045210|>
+<|visual token 045211|>
+<|visual token 045212|>
+<|visual token 045213|>
+<|visual token 045214|>
+<|visual token 045215|>
+<|visual token 045216|>
+<|visual token 045217|>
+<|visual token 045218|>
+<|visual token 045219|>
+<|visual token 045220|>
+<|visual token 045221|>
+<|visual token 045222|>
+<|visual token 045223|>
+<|visual token 045224|>
+<|visual token 045225|>
+<|visual token 045226|>
+<|visual token 045227|>
+<|visual token 045228|>
+<|visual token 045229|>
+<|visual token 045230|>
+<|visual token 045231|>
+<|visual token 045232|>
+<|visual token 045233|>
+<|visual token 045234|>
+<|visual token 045235|>
+<|visual token 045236|>
+<|visual token 045237|>
+<|visual token 045238|>
+<|visual token 045239|>
+<|visual token 045240|>
+<|visual token 045241|>
+<|visual token 045242|>
+<|visual token 045243|>
+<|visual token 045244|>
+<|visual token 045245|>
+<|visual token 045246|>
+<|visual token 045247|>
+<|visual token 045248|>
+<|visual token 045249|>
+<|visual token 045250|>
+<|visual token 045251|>
+<|visual token 045252|>
+<|visual token 045253|>
+<|visual token 045254|>
+<|visual token 045255|>
+<|visual token 045256|>
+<|visual token 045257|>
+<|visual token 045258|>
+<|visual token 045259|>
+<|visual token 045260|>
+<|visual token 045261|>
+<|visual token 045262|>
+<|visual token 045263|>
+<|visual token 045264|>
+<|visual token 045265|>
+<|visual token 045266|>
+<|visual token 045267|>
+<|visual token 045268|>
+<|visual token 045269|>
+<|visual token 045270|>
+<|visual token 045271|>
+<|visual token 045272|>
+<|visual token 045273|>
+<|visual token 045274|>
+<|visual token 045275|>
+<|visual token 045276|>
+<|visual token 045277|>
+<|visual token 045278|>
+<|visual token 045279|>
+<|visual token 045280|>
+<|visual token 045281|>
+<|visual token 045282|>
+<|visual token 045283|>
+<|visual token 045284|>
+<|visual token 045285|>
+<|visual token 045286|>
+<|visual token 045287|>
+<|visual token 045288|>
+<|visual token 045289|>
+<|visual token 045290|>
+<|visual token 045291|>
+<|visual token 045292|>
+<|visual token 045293|>
+<|visual token 045294|>
+<|visual token 045295|>
+<|visual token 045296|>
+<|visual token 045297|>
+<|visual token 045298|>
+<|visual token 045299|>
+<|visual token 045300|>
+<|visual token 045301|>
+<|visual token 045302|>
+<|visual token 045303|>
+<|visual token 045304|>
+<|visual token 045305|>
+<|visual token 045306|>
+<|visual token 045307|>
+<|visual token 045308|>
+<|visual token 045309|>
+<|visual token 045310|>
+<|visual token 045311|>
+<|visual token 045312|>
+<|visual token 045313|>
+<|visual token 045314|>
+<|visual token 045315|>
+<|visual token 045316|>
+<|visual token 045317|>
+<|visual token 045318|>
+<|visual token 045319|>
+<|visual token 045320|>
+<|visual token 045321|>
+<|visual token 045322|>
+<|visual token 045323|>
+<|visual token 045324|>
+<|visual token 045325|>
+<|visual token 045326|>
+<|visual token 045327|>
+<|visual token 045328|>
+<|visual token 045329|>
+<|visual token 045330|>
+<|visual token 045331|>
+<|visual token 045332|>
+<|visual token 045333|>
+<|visual token 045334|>
+<|visual token 045335|>
+<|visual token 045336|>
+<|visual token 045337|>
+<|visual token 045338|>
+<|visual token 045339|>
+<|visual token 045340|>
+<|visual token 045341|>
+<|visual token 045342|>
+<|visual token 045343|>
+<|visual token 045344|>
+<|visual token 045345|>
+<|visual token 045346|>
+<|visual token 045347|>
+<|visual token 045348|>
+<|visual token 045349|>
+<|visual token 045350|>
+<|visual token 045351|>
+<|visual token 045352|>
+<|visual token 045353|>
+<|visual token 045354|>
+<|visual token 045355|>
+<|visual token 045356|>
+<|visual token 045357|>
+<|visual token 045358|>
+<|visual token 045359|>
+<|visual token 045360|>
+<|visual token 045361|>
+<|visual token 045362|>
+<|visual token 045363|>
+<|visual token 045364|>
+<|visual token 045365|>
+<|visual token 045366|>
+<|visual token 045367|>
+<|visual token 045368|>
+<|visual token 045369|>
+<|visual token 045370|>
+<|visual token 045371|>
+<|visual token 045372|>
+<|visual token 045373|>
+<|visual token 045374|>
+<|visual token 045375|>
+<|visual token 045376|>
+<|visual token 045377|>
+<|visual token 045378|>
+<|visual token 045379|>
+<|visual token 045380|>
+<|visual token 045381|>
+<|visual token 045382|>
+<|visual token 045383|>
+<|visual token 045384|>
+<|visual token 045385|>
+<|visual token 045386|>
+<|visual token 045387|>
+<|visual token 045388|>
+<|visual token 045389|>
+<|visual token 045390|>
+<|visual token 045391|>
+<|visual token 045392|>
+<|visual token 045393|>
+<|visual token 045394|>
+<|visual token 045395|>
+<|visual token 045396|>
+<|visual token 045397|>
+<|visual token 045398|>
+<|visual token 045399|>
+<|visual token 045400|>
+<|visual token 045401|>
+<|visual token 045402|>
+<|visual token 045403|>
+<|visual token 045404|>
+<|visual token 045405|>
+<|visual token 045406|>
+<|visual token 045407|>
+<|visual token 045408|>
+<|visual token 045409|>
+<|visual token 045410|>
+<|visual token 045411|>
+<|visual token 045412|>
+<|visual token 045413|>
+<|visual token 045414|>
+<|visual token 045415|>
+<|visual token 045416|>
+<|visual token 045417|>
+<|visual token 045418|>
+<|visual token 045419|>
+<|visual token 045420|>
+<|visual token 045421|>
+<|visual token 045422|>
+<|visual token 045423|>
+<|visual token 045424|>
+<|visual token 045425|>
+<|visual token 045426|>
+<|visual token 045427|>
+<|visual token 045428|>
+<|visual token 045429|>
+<|visual token 045430|>
+<|visual token 045431|>
+<|visual token 045432|>
+<|visual token 045433|>
+<|visual token 045434|>
+<|visual token 045435|>
+<|visual token 045436|>
+<|visual token 045437|>
+<|visual token 045438|>
+<|visual token 045439|>
+<|visual token 045440|>
+<|visual token 045441|>
+<|visual token 045442|>
+<|visual token 045443|>
+<|visual token 045444|>
+<|visual token 045445|>
+<|visual token 045446|>
+<|visual token 045447|>
+<|visual token 045448|>
+<|visual token 045449|>
+<|visual token 045450|>
+<|visual token 045451|>
+<|visual token 045452|>
+<|visual token 045453|>
+<|visual token 045454|>
+<|visual token 045455|>
+<|visual token 045456|>
+<|visual token 045457|>
+<|visual token 045458|>
+<|visual token 045459|>
+<|visual token 045460|>
+<|visual token 045461|>
+<|visual token 045462|>
+<|visual token 045463|>
+<|visual token 045464|>
+<|visual token 045465|>
+<|visual token 045466|>
+<|visual token 045467|>
+<|visual token 045468|>
+<|visual token 045469|>
+<|visual token 045470|>
+<|visual token 045471|>
+<|visual token 045472|>
+<|visual token 045473|>
+<|visual token 045474|>
+<|visual token 045475|>
+<|visual token 045476|>
+<|visual token 045477|>
+<|visual token 045478|>
+<|visual token 045479|>
+<|visual token 045480|>
+<|visual token 045481|>
+<|visual token 045482|>
+<|visual token 045483|>
+<|visual token 045484|>
+<|visual token 045485|>
+<|visual token 045486|>
+<|visual token 045487|>
+<|visual token 045488|>
+<|visual token 045489|>
+<|visual token 045490|>
+<|visual token 045491|>
+<|visual token 045492|>
+<|visual token 045493|>
+<|visual token 045494|>
+<|visual token 045495|>
+<|visual token 045496|>
+<|visual token 045497|>
+<|visual token 045498|>
+<|visual token 045499|>
+<|visual token 045500|>
+<|visual token 045501|>
+<|visual token 045502|>
+<|visual token 045503|>
+<|visual token 045504|>
+<|visual token 045505|>
+<|visual token 045506|>
+<|visual token 045507|>
+<|visual token 045508|>
+<|visual token 045509|>
+<|visual token 045510|>
+<|visual token 045511|>
+<|visual token 045512|>
+<|visual token 045513|>
+<|visual token 045514|>
+<|visual token 045515|>
+<|visual token 045516|>
+<|visual token 045517|>
+<|visual token 045518|>
+<|visual token 045519|>
+<|visual token 045520|>
+<|visual token 045521|>
+<|visual token 045522|>
+<|visual token 045523|>
+<|visual token 045524|>
+<|visual token 045525|>
+<|visual token 045526|>
+<|visual token 045527|>
+<|visual token 045528|>
+<|visual token 045529|>
+<|visual token 045530|>
+<|visual token 045531|>
+<|visual token 045532|>
+<|visual token 045533|>
+<|visual token 045534|>
+<|visual token 045535|>
+<|visual token 045536|>
+<|visual token 045537|>
+<|visual token 045538|>
+<|visual token 045539|>
+<|visual token 045540|>
+<|visual token 045541|>
+<|visual token 045542|>
+<|visual token 045543|>
+<|visual token 045544|>
+<|visual token 045545|>
+<|visual token 045546|>
+<|visual token 045547|>
+<|visual token 045548|>
+<|visual token 045549|>
+<|visual token 045550|>
+<|visual token 045551|>
+<|visual token 045552|>
+<|visual token 045553|>
+<|visual token 045554|>
+<|visual token 045555|>
+<|visual token 045556|>
+<|visual token 045557|>
+<|visual token 045558|>
+<|visual token 045559|>
+<|visual token 045560|>
+<|visual token 045561|>
+<|visual token 045562|>
+<|visual token 045563|>
+<|visual token 045564|>
+<|visual token 045565|>
+<|visual token 045566|>
+<|visual token 045567|>
+<|visual token 045568|>
+<|visual token 045569|>
+<|visual token 045570|>
+<|visual token 045571|>
+<|visual token 045572|>
+<|visual token 045573|>
+<|visual token 045574|>
+<|visual token 045575|>
+<|visual token 045576|>
+<|visual token 045577|>
+<|visual token 045578|>
+<|visual token 045579|>
+<|visual token 045580|>
+<|visual token 045581|>
+<|visual token 045582|>
+<|visual token 045583|>
+<|visual token 045584|>
+<|visual token 045585|>
+<|visual token 045586|>
+<|visual token 045587|>
+<|visual token 045588|>
+<|visual token 045589|>
+<|visual token 045590|>
+<|visual token 045591|>
+<|visual token 045592|>
+<|visual token 045593|>
+<|visual token 045594|>
+<|visual token 045595|>
+<|visual token 045596|>
+<|visual token 045597|>
+<|visual token 045598|>
+<|visual token 045599|>
+<|visual token 045600|>
+<|visual token 045601|>
+<|visual token 045602|>
+<|visual token 045603|>
+<|visual token 045604|>
+<|visual token 045605|>
+<|visual token 045606|>
+<|visual token 045607|>
+<|visual token 045608|>
+<|visual token 045609|>
+<|visual token 045610|>
+<|visual token 045611|>
+<|visual token 045612|>
+<|visual token 045613|>
+<|visual token 045614|>
+<|visual token 045615|>
+<|visual token 045616|>
+<|visual token 045617|>
+<|visual token 045618|>
+<|visual token 045619|>
+<|visual token 045620|>
+<|visual token 045621|>
+<|visual token 045622|>
+<|visual token 045623|>
+<|visual token 045624|>
+<|visual token 045625|>
+<|visual token 045626|>
+<|visual token 045627|>
+<|visual token 045628|>
+<|visual token 045629|>
+<|visual token 045630|>
+<|visual token 045631|>
+<|visual token 045632|>
+<|visual token 045633|>
+<|visual token 045634|>
+<|visual token 045635|>
+<|visual token 045636|>
+<|visual token 045637|>
+<|visual token 045638|>
+<|visual token 045639|>
+<|visual token 045640|>
+<|visual token 045641|>
+<|visual token 045642|>
+<|visual token 045643|>
+<|visual token 045644|>
+<|visual token 045645|>
+<|visual token 045646|>
+<|visual token 045647|>
+<|visual token 045648|>
+<|visual token 045649|>
+<|visual token 045650|>
+<|visual token 045651|>
+<|visual token 045652|>
+<|visual token 045653|>
+<|visual token 045654|>
+<|visual token 045655|>
+<|visual token 045656|>
+<|visual token 045657|>
+<|visual token 045658|>
+<|visual token 045659|>
+<|visual token 045660|>
+<|visual token 045661|>
+<|visual token 045662|>
+<|visual token 045663|>
+<|visual token 045664|>
+<|visual token 045665|>
+<|visual token 045666|>
+<|visual token 045667|>
+<|visual token 045668|>
+<|visual token 045669|>
+<|visual token 045670|>
+<|visual token 045671|>
+<|visual token 045672|>
+<|visual token 045673|>
+<|visual token 045674|>
+<|visual token 045675|>
+<|visual token 045676|>
+<|visual token 045677|>
+<|visual token 045678|>
+<|visual token 045679|>
+<|visual token 045680|>
+<|visual token 045681|>
+<|visual token 045682|>
+<|visual token 045683|>
+<|visual token 045684|>
+<|visual token 045685|>
+<|visual token 045686|>
+<|visual token 045687|>
+<|visual token 045688|>
+<|visual token 045689|>
+<|visual token 045690|>
+<|visual token 045691|>
+<|visual token 045692|>
+<|visual token 045693|>
+<|visual token 045694|>
+<|visual token 045695|>
+<|visual token 045696|>
+<|visual token 045697|>
+<|visual token 045698|>
+<|visual token 045699|>
+<|visual token 045700|>
+<|visual token 045701|>
+<|visual token 045702|>
+<|visual token 045703|>
+<|visual token 045704|>
+<|visual token 045705|>
+<|visual token 045706|>
+<|visual token 045707|>
+<|visual token 045708|>
+<|visual token 045709|>
+<|visual token 045710|>
+<|visual token 045711|>
+<|visual token 045712|>
+<|visual token 045713|>
+<|visual token 045714|>
+<|visual token 045715|>
+<|visual token 045716|>
+<|visual token 045717|>
+<|visual token 045718|>
+<|visual token 045719|>
+<|visual token 045720|>
+<|visual token 045721|>
+<|visual token 045722|>
+<|visual token 045723|>
+<|visual token 045724|>
+<|visual token 045725|>
+<|visual token 045726|>
+<|visual token 045727|>
+<|visual token 045728|>
+<|visual token 045729|>
+<|visual token 045730|>
+<|visual token 045731|>
+<|visual token 045732|>
+<|visual token 045733|>
+<|visual token 045734|>
+<|visual token 045735|>
+<|visual token 045736|>
+<|visual token 045737|>
+<|visual token 045738|>
+<|visual token 045739|>
+<|visual token 045740|>
+<|visual token 045741|>
+<|visual token 045742|>
+<|visual token 045743|>
+<|visual token 045744|>
+<|visual token 045745|>
+<|visual token 045746|>
+<|visual token 045747|>
+<|visual token 045748|>
+<|visual token 045749|>
+<|visual token 045750|>
+<|visual token 045751|>
+<|visual token 045752|>
+<|visual token 045753|>
+<|visual token 045754|>
+<|visual token 045755|>
+<|visual token 045756|>
+<|visual token 045757|>
+<|visual token 045758|>
+<|visual token 045759|>
+<|visual token 045760|>
+<|visual token 045761|>
+<|visual token 045762|>
+<|visual token 045763|>
+<|visual token 045764|>
+<|visual token 045765|>
+<|visual token 045766|>
+<|visual token 045767|>
+<|visual token 045768|>
+<|visual token 045769|>
+<|visual token 045770|>
+<|visual token 045771|>
+<|visual token 045772|>
+<|visual token 045773|>
+<|visual token 045774|>
+<|visual token 045775|>
+<|visual token 045776|>
+<|visual token 045777|>
+<|visual token 045778|>
+<|visual token 045779|>
+<|visual token 045780|>
+<|visual token 045781|>
+<|visual token 045782|>
+<|visual token 045783|>
+<|visual token 045784|>
+<|visual token 045785|>
+<|visual token 045786|>
+<|visual token 045787|>
+<|visual token 045788|>
+<|visual token 045789|>
+<|visual token 045790|>
+<|visual token 045791|>
+<|visual token 045792|>
+<|visual token 045793|>
+<|visual token 045794|>
+<|visual token 045795|>
+<|visual token 045796|>
+<|visual token 045797|>
+<|visual token 045798|>
+<|visual token 045799|>
+<|visual token 045800|>
+<|visual token 045801|>
+<|visual token 045802|>
+<|visual token 045803|>
+<|visual token 045804|>
+<|visual token 045805|>
+<|visual token 045806|>
+<|visual token 045807|>
+<|visual token 045808|>
+<|visual token 045809|>
+<|visual token 045810|>
+<|visual token 045811|>
+<|visual token 045812|>
+<|visual token 045813|>
+<|visual token 045814|>
+<|visual token 045815|>
+<|visual token 045816|>
+<|visual token 045817|>
+<|visual token 045818|>
+<|visual token 045819|>
+<|visual token 045820|>
+<|visual token 045821|>
+<|visual token 045822|>
+<|visual token 045823|>
+<|visual token 045824|>
+<|visual token 045825|>
+<|visual token 045826|>
+<|visual token 045827|>
+<|visual token 045828|>
+<|visual token 045829|>
+<|visual token 045830|>
+<|visual token 045831|>
+<|visual token 045832|>
+<|visual token 045833|>
+<|visual token 045834|>
+<|visual token 045835|>
+<|visual token 045836|>
+<|visual token 045837|>
+<|visual token 045838|>
+<|visual token 045839|>
+<|visual token 045840|>
+<|visual token 045841|>
+<|visual token 045842|>
+<|visual token 045843|>
+<|visual token 045844|>
+<|visual token 045845|>
+<|visual token 045846|>
+<|visual token 045847|>
+<|visual token 045848|>
+<|visual token 045849|>
+<|visual token 045850|>
+<|visual token 045851|>
+<|visual token 045852|>
+<|visual token 045853|>
+<|visual token 045854|>
+<|visual token 045855|>
+<|visual token 045856|>
+<|visual token 045857|>
+<|visual token 045858|>
+<|visual token 045859|>
+<|visual token 045860|>
+<|visual token 045861|>
+<|visual token 045862|>
+<|visual token 045863|>
+<|visual token 045864|>
+<|visual token 045865|>
+<|visual token 045866|>
+<|visual token 045867|>
+<|visual token 045868|>
+<|visual token 045869|>
+<|visual token 045870|>
+<|visual token 045871|>
+<|visual token 045872|>
+<|visual token 045873|>
+<|visual token 045874|>
+<|visual token 045875|>
+<|visual token 045876|>
+<|visual token 045877|>
+<|visual token 045878|>
+<|visual token 045879|>
+<|visual token 045880|>
+<|visual token 045881|>
+<|visual token 045882|>
+<|visual token 045883|>
+<|visual token 045884|>
+<|visual token 045885|>
+<|visual token 045886|>
+<|visual token 045887|>
+<|visual token 045888|>
+<|visual token 045889|>
+<|visual token 045890|>
+<|visual token 045891|>
+<|visual token 045892|>
+<|visual token 045893|>
+<|visual token 045894|>
+<|visual token 045895|>
+<|visual token 045896|>
+<|visual token 045897|>
+<|visual token 045898|>
+<|visual token 045899|>
+<|visual token 045900|>
+<|visual token 045901|>
+<|visual token 045902|>
+<|visual token 045903|>
+<|visual token 045904|>
+<|visual token 045905|>
+<|visual token 045906|>
+<|visual token 045907|>
+<|visual token 045908|>
+<|visual token 045909|>
+<|visual token 045910|>
+<|visual token 045911|>
+<|visual token 045912|>
+<|visual token 045913|>
+<|visual token 045914|>
+<|visual token 045915|>
+<|visual token 045916|>
+<|visual token 045917|>
+<|visual token 045918|>
+<|visual token 045919|>
+<|visual token 045920|>
+<|visual token 045921|>
+<|visual token 045922|>
+<|visual token 045923|>
+<|visual token 045924|>
+<|visual token 045925|>
+<|visual token 045926|>
+<|visual token 045927|>
+<|visual token 045928|>
+<|visual token 045929|>
+<|visual token 045930|>
+<|visual token 045931|>
+<|visual token 045932|>
+<|visual token 045933|>
+<|visual token 045934|>
+<|visual token 045935|>
+<|visual token 045936|>
+<|visual token 045937|>
+<|visual token 045938|>
+<|visual token 045939|>
+<|visual token 045940|>
+<|visual token 045941|>
+<|visual token 045942|>
+<|visual token 045943|>
+<|visual token 045944|>
+<|visual token 045945|>
+<|visual token 045946|>
+<|visual token 045947|>
+<|visual token 045948|>
+<|visual token 045949|>
+<|visual token 045950|>
+<|visual token 045951|>
+<|visual token 045952|>
+<|visual token 045953|>
+<|visual token 045954|>
+<|visual token 045955|>
+<|visual token 045956|>
+<|visual token 045957|>
+<|visual token 045958|>
+<|visual token 045959|>
+<|visual token 045960|>
+<|visual token 045961|>
+<|visual token 045962|>
+<|visual token 045963|>
+<|visual token 045964|>
+<|visual token 045965|>
+<|visual token 045966|>
+<|visual token 045967|>
+<|visual token 045968|>
+<|visual token 045969|>
+<|visual token 045970|>
+<|visual token 045971|>
+<|visual token 045972|>
+<|visual token 045973|>
+<|visual token 045974|>
+<|visual token 045975|>
+<|visual token 045976|>
+<|visual token 045977|>
+<|visual token 045978|>
+<|visual token 045979|>
+<|visual token 045980|>
+<|visual token 045981|>
+<|visual token 045982|>
+<|visual token 045983|>
+<|visual token 045984|>
+<|visual token 045985|>
+<|visual token 045986|>
+<|visual token 045987|>
+<|visual token 045988|>
+<|visual token 045989|>
+<|visual token 045990|>
+<|visual token 045991|>
+<|visual token 045992|>
+<|visual token 045993|>
+<|visual token 045994|>
+<|visual token 045995|>
+<|visual token 045996|>
+<|visual token 045997|>
+<|visual token 045998|>
+<|visual token 045999|>
+<|visual token 046000|>
+<|visual token 046001|>
+<|visual token 046002|>
+<|visual token 046003|>
+<|visual token 046004|>
+<|visual token 046005|>
+<|visual token 046006|>
+<|visual token 046007|>
+<|visual token 046008|>
+<|visual token 046009|>
+<|visual token 046010|>
+<|visual token 046011|>
+<|visual token 046012|>
+<|visual token 046013|>
+<|visual token 046014|>
+<|visual token 046015|>
+<|visual token 046016|>
+<|visual token 046017|>
+<|visual token 046018|>
+<|visual token 046019|>
+<|visual token 046020|>
+<|visual token 046021|>
+<|visual token 046022|>
+<|visual token 046023|>
+<|visual token 046024|>
+<|visual token 046025|>
+<|visual token 046026|>
+<|visual token 046027|>
+<|visual token 046028|>
+<|visual token 046029|>
+<|visual token 046030|>
+<|visual token 046031|>
+<|visual token 046032|>
+<|visual token 046033|>
+<|visual token 046034|>
+<|visual token 046035|>
+<|visual token 046036|>
+<|visual token 046037|>
+<|visual token 046038|>
+<|visual token 046039|>
+<|visual token 046040|>
+<|visual token 046041|>
+<|visual token 046042|>
+<|visual token 046043|>
+<|visual token 046044|>
+<|visual token 046045|>
+<|visual token 046046|>
+<|visual token 046047|>
+<|visual token 046048|>
+<|visual token 046049|>
+<|visual token 046050|>
+<|visual token 046051|>
+<|visual token 046052|>
+<|visual token 046053|>
+<|visual token 046054|>
+<|visual token 046055|>
+<|visual token 046056|>
+<|visual token 046057|>
+<|visual token 046058|>
+<|visual token 046059|>
+<|visual token 046060|>
+<|visual token 046061|>
+<|visual token 046062|>
+<|visual token 046063|>
+<|visual token 046064|>
+<|visual token 046065|>
+<|visual token 046066|>
+<|visual token 046067|>
+<|visual token 046068|>
+<|visual token 046069|>
+<|visual token 046070|>
+<|visual token 046071|>
+<|visual token 046072|>
+<|visual token 046073|>
+<|visual token 046074|>
+<|visual token 046075|>
+<|visual token 046076|>
+<|visual token 046077|>
+<|visual token 046078|>
+<|visual token 046079|>
+<|visual token 046080|>
+<|visual token 046081|>
+<|visual token 046082|>
+<|visual token 046083|>
+<|visual token 046084|>
+<|visual token 046085|>
+<|visual token 046086|>
+<|visual token 046087|>
+<|visual token 046088|>
+<|visual token 046089|>
+<|visual token 046090|>
+<|visual token 046091|>
+<|visual token 046092|>
+<|visual token 046093|>
+<|visual token 046094|>
+<|visual token 046095|>
+<|visual token 046096|>
+<|visual token 046097|>
+<|visual token 046098|>
+<|visual token 046099|>
+<|visual token 046100|>
+<|visual token 046101|>
+<|visual token 046102|>
+<|visual token 046103|>
+<|visual token 046104|>
+<|visual token 046105|>
+<|visual token 046106|>
+<|visual token 046107|>
+<|visual token 046108|>
+<|visual token 046109|>
+<|visual token 046110|>
+<|visual token 046111|>
+<|visual token 046112|>
+<|visual token 046113|>
+<|visual token 046114|>
+<|visual token 046115|>
+<|visual token 046116|>
+<|visual token 046117|>
+<|visual token 046118|>
+<|visual token 046119|>
+<|visual token 046120|>
+<|visual token 046121|>
+<|visual token 046122|>
+<|visual token 046123|>
+<|visual token 046124|>
+<|visual token 046125|>
+<|visual token 046126|>
+<|visual token 046127|>
+<|visual token 046128|>
+<|visual token 046129|>
+<|visual token 046130|>
+<|visual token 046131|>
+<|visual token 046132|>
+<|visual token 046133|>
+<|visual token 046134|>
+<|visual token 046135|>
+<|visual token 046136|>
+<|visual token 046137|>
+<|visual token 046138|>
+<|visual token 046139|>
+<|visual token 046140|>
+<|visual token 046141|>
+<|visual token 046142|>
+<|visual token 046143|>
+<|visual token 046144|>
+<|visual token 046145|>
+<|visual token 046146|>
+<|visual token 046147|>
+<|visual token 046148|>
+<|visual token 046149|>
+<|visual token 046150|>
+<|visual token 046151|>
+<|visual token 046152|>
+<|visual token 046153|>
+<|visual token 046154|>
+<|visual token 046155|>
+<|visual token 046156|>
+<|visual token 046157|>
+<|visual token 046158|>
+<|visual token 046159|>
+<|visual token 046160|>
+<|visual token 046161|>
+<|visual token 046162|>
+<|visual token 046163|>
+<|visual token 046164|>
+<|visual token 046165|>
+<|visual token 046166|>
+<|visual token 046167|>
+<|visual token 046168|>
+<|visual token 046169|>
+<|visual token 046170|>
+<|visual token 046171|>
+<|visual token 046172|>
+<|visual token 046173|>
+<|visual token 046174|>
+<|visual token 046175|>
+<|visual token 046176|>
+<|visual token 046177|>
+<|visual token 046178|>
+<|visual token 046179|>
+<|visual token 046180|>
+<|visual token 046181|>
+<|visual token 046182|>
+<|visual token 046183|>
+<|visual token 046184|>
+<|visual token 046185|>
+<|visual token 046186|>
+<|visual token 046187|>
+<|visual token 046188|>
+<|visual token 046189|>
+<|visual token 046190|>
+<|visual token 046191|>
+<|visual token 046192|>
+<|visual token 046193|>
+<|visual token 046194|>
+<|visual token 046195|>
+<|visual token 046196|>
+<|visual token 046197|>
+<|visual token 046198|>
+<|visual token 046199|>
+<|visual token 046200|>
+<|visual token 046201|>
+<|visual token 046202|>
+<|visual token 046203|>
+<|visual token 046204|>
+<|visual token 046205|>
+<|visual token 046206|>
+<|visual token 046207|>
+<|visual token 046208|>
+<|visual token 046209|>
+<|visual token 046210|>
+<|visual token 046211|>
+<|visual token 046212|>
+<|visual token 046213|>
+<|visual token 046214|>
+<|visual token 046215|>
+<|visual token 046216|>
+<|visual token 046217|>
+<|visual token 046218|>
+<|visual token 046219|>
+<|visual token 046220|>
+<|visual token 046221|>
+<|visual token 046222|>
+<|visual token 046223|>
+<|visual token 046224|>
+<|visual token 046225|>
+<|visual token 046226|>
+<|visual token 046227|>
+<|visual token 046228|>
+<|visual token 046229|>
+<|visual token 046230|>
+<|visual token 046231|>
+<|visual token 046232|>
+<|visual token 046233|>
+<|visual token 046234|>
+<|visual token 046235|>
+<|visual token 046236|>
+<|visual token 046237|>
+<|visual token 046238|>
+<|visual token 046239|>
+<|visual token 046240|>
+<|visual token 046241|>
+<|visual token 046242|>
+<|visual token 046243|>
+<|visual token 046244|>
+<|visual token 046245|>
+<|visual token 046246|>
+<|visual token 046247|>
+<|visual token 046248|>
+<|visual token 046249|>
+<|visual token 046250|>
+<|visual token 046251|>
+<|visual token 046252|>
+<|visual token 046253|>
+<|visual token 046254|>
+<|visual token 046255|>
+<|visual token 046256|>
+<|visual token 046257|>
+<|visual token 046258|>
+<|visual token 046259|>
+<|visual token 046260|>
+<|visual token 046261|>
+<|visual token 046262|>
+<|visual token 046263|>
+<|visual token 046264|>
+<|visual token 046265|>
+<|visual token 046266|>
+<|visual token 046267|>
+<|visual token 046268|>
+<|visual token 046269|>
+<|visual token 046270|>
+<|visual token 046271|>
+<|visual token 046272|>
+<|visual token 046273|>
+<|visual token 046274|>
+<|visual token 046275|>
+<|visual token 046276|>
+<|visual token 046277|>
+<|visual token 046278|>
+<|visual token 046279|>
+<|visual token 046280|>
+<|visual token 046281|>
+<|visual token 046282|>
+<|visual token 046283|>
+<|visual token 046284|>
+<|visual token 046285|>
+<|visual token 046286|>
+<|visual token 046287|>
+<|visual token 046288|>
+<|visual token 046289|>
+<|visual token 046290|>
+<|visual token 046291|>
+<|visual token 046292|>
+<|visual token 046293|>
+<|visual token 046294|>
+<|visual token 046295|>
+<|visual token 046296|>
+<|visual token 046297|>
+<|visual token 046298|>
+<|visual token 046299|>
+<|visual token 046300|>
+<|visual token 046301|>
+<|visual token 046302|>
+<|visual token 046303|>
+<|visual token 046304|>
+<|visual token 046305|>
+<|visual token 046306|>
+<|visual token 046307|>
+<|visual token 046308|>
+<|visual token 046309|>
+<|visual token 046310|>
+<|visual token 046311|>
+<|visual token 046312|>
+<|visual token 046313|>
+<|visual token 046314|>
+<|visual token 046315|>
+<|visual token 046316|>
+<|visual token 046317|>
+<|visual token 046318|>
+<|visual token 046319|>
+<|visual token 046320|>
+<|visual token 046321|>
+<|visual token 046322|>
+<|visual token 046323|>
+<|visual token 046324|>
+<|visual token 046325|>
+<|visual token 046326|>
+<|visual token 046327|>
+<|visual token 046328|>
+<|visual token 046329|>
+<|visual token 046330|>
+<|visual token 046331|>
+<|visual token 046332|>
+<|visual token 046333|>
+<|visual token 046334|>
+<|visual token 046335|>
+<|visual token 046336|>
+<|visual token 046337|>
+<|visual token 046338|>
+<|visual token 046339|>
+<|visual token 046340|>
+<|visual token 046341|>
+<|visual token 046342|>
+<|visual token 046343|>
+<|visual token 046344|>
+<|visual token 046345|>
+<|visual token 046346|>
+<|visual token 046347|>
+<|visual token 046348|>
+<|visual token 046349|>
+<|visual token 046350|>
+<|visual token 046351|>
+<|visual token 046352|>
+<|visual token 046353|>
+<|visual token 046354|>
+<|visual token 046355|>
+<|visual token 046356|>
+<|visual token 046357|>
+<|visual token 046358|>
+<|visual token 046359|>
+<|visual token 046360|>
+<|visual token 046361|>
+<|visual token 046362|>
+<|visual token 046363|>
+<|visual token 046364|>
+<|visual token 046365|>
+<|visual token 046366|>
+<|visual token 046367|>
+<|visual token 046368|>
+<|visual token 046369|>
+<|visual token 046370|>
+<|visual token 046371|>
+<|visual token 046372|>
+<|visual token 046373|>
+<|visual token 046374|>
+<|visual token 046375|>
+<|visual token 046376|>
+<|visual token 046377|>
+<|visual token 046378|>
+<|visual token 046379|>
+<|visual token 046380|>
+<|visual token 046381|>
+<|visual token 046382|>
+<|visual token 046383|>
+<|visual token 046384|>
+<|visual token 046385|>
+<|visual token 046386|>
+<|visual token 046387|>
+<|visual token 046388|>
+<|visual token 046389|>
+<|visual token 046390|>
+<|visual token 046391|>
+<|visual token 046392|>
+<|visual token 046393|>
+<|visual token 046394|>
+<|visual token 046395|>
+<|visual token 046396|>
+<|visual token 046397|>
+<|visual token 046398|>
+<|visual token 046399|>
+<|visual token 046400|>
+<|visual token 046401|>
+<|visual token 046402|>
+<|visual token 046403|>
+<|visual token 046404|>
+<|visual token 046405|>
+<|visual token 046406|>
+<|visual token 046407|>
+<|visual token 046408|>
+<|visual token 046409|>
+<|visual token 046410|>
+<|visual token 046411|>
+<|visual token 046412|>
+<|visual token 046413|>
+<|visual token 046414|>
+<|visual token 046415|>
+<|visual token 046416|>
+<|visual token 046417|>
+<|visual token 046418|>
+<|visual token 046419|>
+<|visual token 046420|>
+<|visual token 046421|>
+<|visual token 046422|>
+<|visual token 046423|>
+<|visual token 046424|>
+<|visual token 046425|>
+<|visual token 046426|>
+<|visual token 046427|>
+<|visual token 046428|>
+<|visual token 046429|>
+<|visual token 046430|>
+<|visual token 046431|>
+<|visual token 046432|>
+<|visual token 046433|>
+<|visual token 046434|>
+<|visual token 046435|>
+<|visual token 046436|>
+<|visual token 046437|>
+<|visual token 046438|>
+<|visual token 046439|>
+<|visual token 046440|>
+<|visual token 046441|>
+<|visual token 046442|>
+<|visual token 046443|>
+<|visual token 046444|>
+<|visual token 046445|>
+<|visual token 046446|>
+<|visual token 046447|>
+<|visual token 046448|>
+<|visual token 046449|>
+<|visual token 046450|>
+<|visual token 046451|>
+<|visual token 046452|>
+<|visual token 046453|>
+<|visual token 046454|>
+<|visual token 046455|>
+<|visual token 046456|>
+<|visual token 046457|>
+<|visual token 046458|>
+<|visual token 046459|>
+<|visual token 046460|>
+<|visual token 046461|>
+<|visual token 046462|>
+<|visual token 046463|>
+<|visual token 046464|>
+<|visual token 046465|>
+<|visual token 046466|>
+<|visual token 046467|>
+<|visual token 046468|>
+<|visual token 046469|>
+<|visual token 046470|>
+<|visual token 046471|>
+<|visual token 046472|>
+<|visual token 046473|>
+<|visual token 046474|>
+<|visual token 046475|>
+<|visual token 046476|>
+<|visual token 046477|>
+<|visual token 046478|>
+<|visual token 046479|>
+<|visual token 046480|>
+<|visual token 046481|>
+<|visual token 046482|>
+<|visual token 046483|>
+<|visual token 046484|>
+<|visual token 046485|>
+<|visual token 046486|>
+<|visual token 046487|>
+<|visual token 046488|>
+<|visual token 046489|>
+<|visual token 046490|>
+<|visual token 046491|>
+<|visual token 046492|>
+<|visual token 046493|>
+<|visual token 046494|>
+<|visual token 046495|>
+<|visual token 046496|>
+<|visual token 046497|>
+<|visual token 046498|>
+<|visual token 046499|>
+<|visual token 046500|>
+<|visual token 046501|>
+<|visual token 046502|>
+<|visual token 046503|>
+<|visual token 046504|>
+<|visual token 046505|>
+<|visual token 046506|>
+<|visual token 046507|>
+<|visual token 046508|>
+<|visual token 046509|>
+<|visual token 046510|>
+<|visual token 046511|>
+<|visual token 046512|>
+<|visual token 046513|>
+<|visual token 046514|>
+<|visual token 046515|>
+<|visual token 046516|>
+<|visual token 046517|>
+<|visual token 046518|>
+<|visual token 046519|>
+<|visual token 046520|>
+<|visual token 046521|>
+<|visual token 046522|>
+<|visual token 046523|>
+<|visual token 046524|>
+<|visual token 046525|>
+<|visual token 046526|>
+<|visual token 046527|>
+<|visual token 046528|>
+<|visual token 046529|>
+<|visual token 046530|>
+<|visual token 046531|>
+<|visual token 046532|>
+<|visual token 046533|>
+<|visual token 046534|>
+<|visual token 046535|>
+<|visual token 046536|>
+<|visual token 046537|>
+<|visual token 046538|>
+<|visual token 046539|>
+<|visual token 046540|>
+<|visual token 046541|>
+<|visual token 046542|>
+<|visual token 046543|>
+<|visual token 046544|>
+<|visual token 046545|>
+<|visual token 046546|>
+<|visual token 046547|>
+<|visual token 046548|>
+<|visual token 046549|>
+<|visual token 046550|>
+<|visual token 046551|>
+<|visual token 046552|>
+<|visual token 046553|>
+<|visual token 046554|>
+<|visual token 046555|>
+<|visual token 046556|>
+<|visual token 046557|>
+<|visual token 046558|>
+<|visual token 046559|>
+<|visual token 046560|>
+<|visual token 046561|>
+<|visual token 046562|>
+<|visual token 046563|>
+<|visual token 046564|>
+<|visual token 046565|>
+<|visual token 046566|>
+<|visual token 046567|>
+<|visual token 046568|>
+<|visual token 046569|>
+<|visual token 046570|>
+<|visual token 046571|>
+<|visual token 046572|>
+<|visual token 046573|>
+<|visual token 046574|>
+<|visual token 046575|>
+<|visual token 046576|>
+<|visual token 046577|>
+<|visual token 046578|>
+<|visual token 046579|>
+<|visual token 046580|>
+<|visual token 046581|>
+<|visual token 046582|>
+<|visual token 046583|>
+<|visual token 046584|>
+<|visual token 046585|>
+<|visual token 046586|>
+<|visual token 046587|>
+<|visual token 046588|>
+<|visual token 046589|>
+<|visual token 046590|>
+<|visual token 046591|>
+<|visual token 046592|>
+<|visual token 046593|>
+<|visual token 046594|>
+<|visual token 046595|>
+<|visual token 046596|>
+<|visual token 046597|>
+<|visual token 046598|>
+<|visual token 046599|>
+<|visual token 046600|>
+<|visual token 046601|>
+<|visual token 046602|>
+<|visual token 046603|>
+<|visual token 046604|>
+<|visual token 046605|>
+<|visual token 046606|>
+<|visual token 046607|>
+<|visual token 046608|>
+<|visual token 046609|>
+<|visual token 046610|>
+<|visual token 046611|>
+<|visual token 046612|>
+<|visual token 046613|>
+<|visual token 046614|>
+<|visual token 046615|>
+<|visual token 046616|>
+<|visual token 046617|>
+<|visual token 046618|>
+<|visual token 046619|>
+<|visual token 046620|>
+<|visual token 046621|>
+<|visual token 046622|>
+<|visual token 046623|>
+<|visual token 046624|>
+<|visual token 046625|>
+<|visual token 046626|>
+<|visual token 046627|>
+<|visual token 046628|>
+<|visual token 046629|>
+<|visual token 046630|>
+<|visual token 046631|>
+<|visual token 046632|>
+<|visual token 046633|>
+<|visual token 046634|>
+<|visual token 046635|>
+<|visual token 046636|>
+<|visual token 046637|>
+<|visual token 046638|>
+<|visual token 046639|>
+<|visual token 046640|>
+<|visual token 046641|>
+<|visual token 046642|>
+<|visual token 046643|>
+<|visual token 046644|>
+<|visual token 046645|>
+<|visual token 046646|>
+<|visual token 046647|>
+<|visual token 046648|>
+<|visual token 046649|>
+<|visual token 046650|>
+<|visual token 046651|>
+<|visual token 046652|>
+<|visual token 046653|>
+<|visual token 046654|>
+<|visual token 046655|>
+<|visual token 046656|>
+<|visual token 046657|>
+<|visual token 046658|>
+<|visual token 046659|>
+<|visual token 046660|>
+<|visual token 046661|>
+<|visual token 046662|>
+<|visual token 046663|>
+<|visual token 046664|>
+<|visual token 046665|>
+<|visual token 046666|>
+<|visual token 046667|>
+<|visual token 046668|>
+<|visual token 046669|>
+<|visual token 046670|>
+<|visual token 046671|>
+<|visual token 046672|>
+<|visual token 046673|>
+<|visual token 046674|>
+<|visual token 046675|>
+<|visual token 046676|>
+<|visual token 046677|>
+<|visual token 046678|>
+<|visual token 046679|>
+<|visual token 046680|>
+<|visual token 046681|>
+<|visual token 046682|>
+<|visual token 046683|>
+<|visual token 046684|>
+<|visual token 046685|>
+<|visual token 046686|>
+<|visual token 046687|>
+<|visual token 046688|>
+<|visual token 046689|>
+<|visual token 046690|>
+<|visual token 046691|>
+<|visual token 046692|>
+<|visual token 046693|>
+<|visual token 046694|>
+<|visual token 046695|>
+<|visual token 046696|>
+<|visual token 046697|>
+<|visual token 046698|>
+<|visual token 046699|>
+<|visual token 046700|>
+<|visual token 046701|>
+<|visual token 046702|>
+<|visual token 046703|>
+<|visual token 046704|>
+<|visual token 046705|>
+<|visual token 046706|>
+<|visual token 046707|>
+<|visual token 046708|>
+<|visual token 046709|>
+<|visual token 046710|>
+<|visual token 046711|>
+<|visual token 046712|>
+<|visual token 046713|>
+<|visual token 046714|>
+<|visual token 046715|>
+<|visual token 046716|>
+<|visual token 046717|>
+<|visual token 046718|>
+<|visual token 046719|>
+<|visual token 046720|>
+<|visual token 046721|>
+<|visual token 046722|>
+<|visual token 046723|>
+<|visual token 046724|>
+<|visual token 046725|>
+<|visual token 046726|>
+<|visual token 046727|>
+<|visual token 046728|>
+<|visual token 046729|>
+<|visual token 046730|>
+<|visual token 046731|>
+<|visual token 046732|>
+<|visual token 046733|>
+<|visual token 046734|>
+<|visual token 046735|>
+<|visual token 046736|>
+<|visual token 046737|>
+<|visual token 046738|>
+<|visual token 046739|>
+<|visual token 046740|>
+<|visual token 046741|>
+<|visual token 046742|>
+<|visual token 046743|>
+<|visual token 046744|>
+<|visual token 046745|>
+<|visual token 046746|>
+<|visual token 046747|>
+<|visual token 046748|>
+<|visual token 046749|>
+<|visual token 046750|>
+<|visual token 046751|>
+<|visual token 046752|>
+<|visual token 046753|>
+<|visual token 046754|>
+<|visual token 046755|>
+<|visual token 046756|>
+<|visual token 046757|>
+<|visual token 046758|>
+<|visual token 046759|>
+<|visual token 046760|>
+<|visual token 046761|>
+<|visual token 046762|>
+<|visual token 046763|>
+<|visual token 046764|>
+<|visual token 046765|>
+<|visual token 046766|>
+<|visual token 046767|>
+<|visual token 046768|>
+<|visual token 046769|>
+<|visual token 046770|>
+<|visual token 046771|>
+<|visual token 046772|>
+<|visual token 046773|>
+<|visual token 046774|>
+<|visual token 046775|>
+<|visual token 046776|>
+<|visual token 046777|>
+<|visual token 046778|>
+<|visual token 046779|>
+<|visual token 046780|>
+<|visual token 046781|>
+<|visual token 046782|>
+<|visual token 046783|>
+<|visual token 046784|>
+<|visual token 046785|>
+<|visual token 046786|>
+<|visual token 046787|>
+<|visual token 046788|>
+<|visual token 046789|>
+<|visual token 046790|>
+<|visual token 046791|>
+<|visual token 046792|>
+<|visual token 046793|>
+<|visual token 046794|>
+<|visual token 046795|>
+<|visual token 046796|>
+<|visual token 046797|>
+<|visual token 046798|>
+<|visual token 046799|>
+<|visual token 046800|>
+<|visual token 046801|>
+<|visual token 046802|>
+<|visual token 046803|>
+<|visual token 046804|>
+<|visual token 046805|>
+<|visual token 046806|>
+<|visual token 046807|>
+<|visual token 046808|>
+<|visual token 046809|>
+<|visual token 046810|>
+<|visual token 046811|>
+<|visual token 046812|>
+<|visual token 046813|>
+<|visual token 046814|>
+<|visual token 046815|>
+<|visual token 046816|>
+<|visual token 046817|>
+<|visual token 046818|>
+<|visual token 046819|>
+<|visual token 046820|>
+<|visual token 046821|>
+<|visual token 046822|>
+<|visual token 046823|>
+<|visual token 046824|>
+<|visual token 046825|>
+<|visual token 046826|>
+<|visual token 046827|>
+<|visual token 046828|>
+<|visual token 046829|>
+<|visual token 046830|>
+<|visual token 046831|>
+<|visual token 046832|>
+<|visual token 046833|>
+<|visual token 046834|>
+<|visual token 046835|>
+<|visual token 046836|>
+<|visual token 046837|>
+<|visual token 046838|>
+<|visual token 046839|>
+<|visual token 046840|>
+<|visual token 046841|>
+<|visual token 046842|>
+<|visual token 046843|>
+<|visual token 046844|>
+<|visual token 046845|>
+<|visual token 046846|>
+<|visual token 046847|>
+<|visual token 046848|>
+<|visual token 046849|>
+<|visual token 046850|>
+<|visual token 046851|>
+<|visual token 046852|>
+<|visual token 046853|>
+<|visual token 046854|>
+<|visual token 046855|>
+<|visual token 046856|>
+<|visual token 046857|>
+<|visual token 046858|>
+<|visual token 046859|>
+<|visual token 046860|>
+<|visual token 046861|>
+<|visual token 046862|>
+<|visual token 046863|>
+<|visual token 046864|>
+<|visual token 046865|>
+<|visual token 046866|>
+<|visual token 046867|>
+<|visual token 046868|>
+<|visual token 046869|>
+<|visual token 046870|>
+<|visual token 046871|>
+<|visual token 046872|>
+<|visual token 046873|>
+<|visual token 046874|>
+<|visual token 046875|>
+<|visual token 046876|>
+<|visual token 046877|>
+<|visual token 046878|>
+<|visual token 046879|>
+<|visual token 046880|>
+<|visual token 046881|>
+<|visual token 046882|>
+<|visual token 046883|>
+<|visual token 046884|>
+<|visual token 046885|>
+<|visual token 046886|>
+<|visual token 046887|>
+<|visual token 046888|>
+<|visual token 046889|>
+<|visual token 046890|>
+<|visual token 046891|>
+<|visual token 046892|>
+<|visual token 046893|>
+<|visual token 046894|>
+<|visual token 046895|>
+<|visual token 046896|>
+<|visual token 046897|>
+<|visual token 046898|>
+<|visual token 046899|>
+<|visual token 046900|>
+<|visual token 046901|>
+<|visual token 046902|>
+<|visual token 046903|>
+<|visual token 046904|>
+<|visual token 046905|>
+<|visual token 046906|>
+<|visual token 046907|>
+<|visual token 046908|>
+<|visual token 046909|>
+<|visual token 046910|>
+<|visual token 046911|>
+<|visual token 046912|>
+<|visual token 046913|>
+<|visual token 046914|>
+<|visual token 046915|>
+<|visual token 046916|>
+<|visual token 046917|>
+<|visual token 046918|>
+<|visual token 046919|>
+<|visual token 046920|>
+<|visual token 046921|>
+<|visual token 046922|>
+<|visual token 046923|>
+<|visual token 046924|>
+<|visual token 046925|>
+<|visual token 046926|>
+<|visual token 046927|>
+<|visual token 046928|>
+<|visual token 046929|>
+<|visual token 046930|>
+<|visual token 046931|>
+<|visual token 046932|>
+<|visual token 046933|>
+<|visual token 046934|>
+<|visual token 046935|>
+<|visual token 046936|>
+<|visual token 046937|>
+<|visual token 046938|>
+<|visual token 046939|>
+<|visual token 046940|>
+<|visual token 046941|>
+<|visual token 046942|>
+<|visual token 046943|>
+<|visual token 046944|>
+<|visual token 046945|>
+<|visual token 046946|>
+<|visual token 046947|>
+<|visual token 046948|>
+<|visual token 046949|>
+<|visual token 046950|>
+<|visual token 046951|>
+<|visual token 046952|>
+<|visual token 046953|>
+<|visual token 046954|>
+<|visual token 046955|>
+<|visual token 046956|>
+<|visual token 046957|>
+<|visual token 046958|>
+<|visual token 046959|>
+<|visual token 046960|>
+<|visual token 046961|>
+<|visual token 046962|>
+<|visual token 046963|>
+<|visual token 046964|>
+<|visual token 046965|>
+<|visual token 046966|>
+<|visual token 046967|>
+<|visual token 046968|>
+<|visual token 046969|>
+<|visual token 046970|>
+<|visual token 046971|>
+<|visual token 046972|>
+<|visual token 046973|>
+<|visual token 046974|>
+<|visual token 046975|>
+<|visual token 046976|>
+<|visual token 046977|>
+<|visual token 046978|>
+<|visual token 046979|>
+<|visual token 046980|>
+<|visual token 046981|>
+<|visual token 046982|>
+<|visual token 046983|>
+<|visual token 046984|>
+<|visual token 046985|>
+<|visual token 046986|>
+<|visual token 046987|>
+<|visual token 046988|>
+<|visual token 046989|>
+<|visual token 046990|>
+<|visual token 046991|>
+<|visual token 046992|>
+<|visual token 046993|>
+<|visual token 046994|>
+<|visual token 046995|>
+<|visual token 046996|>
+<|visual token 046997|>
+<|visual token 046998|>
+<|visual token 046999|>
+<|visual token 047000|>
+<|visual token 047001|>
+<|visual token 047002|>
+<|visual token 047003|>
+<|visual token 047004|>
+<|visual token 047005|>
+<|visual token 047006|>
+<|visual token 047007|>
+<|visual token 047008|>
+<|visual token 047009|>
+<|visual token 047010|>
+<|visual token 047011|>
+<|visual token 047012|>
+<|visual token 047013|>
+<|visual token 047014|>
+<|visual token 047015|>
+<|visual token 047016|>
+<|visual token 047017|>
+<|visual token 047018|>
+<|visual token 047019|>
+<|visual token 047020|>
+<|visual token 047021|>
+<|visual token 047022|>
+<|visual token 047023|>
+<|visual token 047024|>
+<|visual token 047025|>
+<|visual token 047026|>
+<|visual token 047027|>
+<|visual token 047028|>
+<|visual token 047029|>
+<|visual token 047030|>
+<|visual token 047031|>
+<|visual token 047032|>
+<|visual token 047033|>
+<|visual token 047034|>
+<|visual token 047035|>
+<|visual token 047036|>
+<|visual token 047037|>
+<|visual token 047038|>
+<|visual token 047039|>
+<|visual token 047040|>
+<|visual token 047041|>
+<|visual token 047042|>
+<|visual token 047043|>
+<|visual token 047044|>
+<|visual token 047045|>
+<|visual token 047046|>
+<|visual token 047047|>
+<|visual token 047048|>
+<|visual token 047049|>
+<|visual token 047050|>
+<|visual token 047051|>
+<|visual token 047052|>
+<|visual token 047053|>
+<|visual token 047054|>
+<|visual token 047055|>
+<|visual token 047056|>
+<|visual token 047057|>
+<|visual token 047058|>
+<|visual token 047059|>
+<|visual token 047060|>
+<|visual token 047061|>
+<|visual token 047062|>
+<|visual token 047063|>
+<|visual token 047064|>
+<|visual token 047065|>
+<|visual token 047066|>
+<|visual token 047067|>
+<|visual token 047068|>
+<|visual token 047069|>
+<|visual token 047070|>
+<|visual token 047071|>
+<|visual token 047072|>
+<|visual token 047073|>
+<|visual token 047074|>
+<|visual token 047075|>
+<|visual token 047076|>
+<|visual token 047077|>
+<|visual token 047078|>
+<|visual token 047079|>
+<|visual token 047080|>
+<|visual token 047081|>
+<|visual token 047082|>
+<|visual token 047083|>
+<|visual token 047084|>
+<|visual token 047085|>
+<|visual token 047086|>
+<|visual token 047087|>
+<|visual token 047088|>
+<|visual token 047089|>
+<|visual token 047090|>
+<|visual token 047091|>
+<|visual token 047092|>
+<|visual token 047093|>
+<|visual token 047094|>
+<|visual token 047095|>
+<|visual token 047096|>
+<|visual token 047097|>
+<|visual token 047098|>
+<|visual token 047099|>
+<|visual token 047100|>
+<|visual token 047101|>
+<|visual token 047102|>
+<|visual token 047103|>
+<|visual token 047104|>
+<|visual token 047105|>
+<|visual token 047106|>
+<|visual token 047107|>
+<|visual token 047108|>
+<|visual token 047109|>
+<|visual token 047110|>
+<|visual token 047111|>
+<|visual token 047112|>
+<|visual token 047113|>
+<|visual token 047114|>
+<|visual token 047115|>
+<|visual token 047116|>
+<|visual token 047117|>
+<|visual token 047118|>
+<|visual token 047119|>
+<|visual token 047120|>
+<|visual token 047121|>
+<|visual token 047122|>
+<|visual token 047123|>
+<|visual token 047124|>
+<|visual token 047125|>
+<|visual token 047126|>
+<|visual token 047127|>
+<|visual token 047128|>
+<|visual token 047129|>
+<|visual token 047130|>
+<|visual token 047131|>
+<|visual token 047132|>
+<|visual token 047133|>
+<|visual token 047134|>
+<|visual token 047135|>
+<|visual token 047136|>
+<|visual token 047137|>
+<|visual token 047138|>
+<|visual token 047139|>
+<|visual token 047140|>
+<|visual token 047141|>
+<|visual token 047142|>
+<|visual token 047143|>
+<|visual token 047144|>
+<|visual token 047145|>
+<|visual token 047146|>
+<|visual token 047147|>
+<|visual token 047148|>
+<|visual token 047149|>
+<|visual token 047150|>
+<|visual token 047151|>
+<|visual token 047152|>
+<|visual token 047153|>
+<|visual token 047154|>
+<|visual token 047155|>
+<|visual token 047156|>
+<|visual token 047157|>
+<|visual token 047158|>
+<|visual token 047159|>
+<|visual token 047160|>
+<|visual token 047161|>
+<|visual token 047162|>
+<|visual token 047163|>
+<|visual token 047164|>
+<|visual token 047165|>
+<|visual token 047166|>
+<|visual token 047167|>
+<|visual token 047168|>
+<|visual token 047169|>
+<|visual token 047170|>
+<|visual token 047171|>
+<|visual token 047172|>
+<|visual token 047173|>
+<|visual token 047174|>
+<|visual token 047175|>
+<|visual token 047176|>
+<|visual token 047177|>
+<|visual token 047178|>
+<|visual token 047179|>
+<|visual token 047180|>
+<|visual token 047181|>
+<|visual token 047182|>
+<|visual token 047183|>
+<|visual token 047184|>
+<|visual token 047185|>
+<|visual token 047186|>
+<|visual token 047187|>
+<|visual token 047188|>
+<|visual token 047189|>
+<|visual token 047190|>
+<|visual token 047191|>
+<|visual token 047192|>
+<|visual token 047193|>
+<|visual token 047194|>
+<|visual token 047195|>
+<|visual token 047196|>
+<|visual token 047197|>
+<|visual token 047198|>
+<|visual token 047199|>
+<|visual token 047200|>
+<|visual token 047201|>
+<|visual token 047202|>
+<|visual token 047203|>
+<|visual token 047204|>
+<|visual token 047205|>
+<|visual token 047206|>
+<|visual token 047207|>
+<|visual token 047208|>
+<|visual token 047209|>
+<|visual token 047210|>
+<|visual token 047211|>
+<|visual token 047212|>
+<|visual token 047213|>
+<|visual token 047214|>
+<|visual token 047215|>
+<|visual token 047216|>
+<|visual token 047217|>
+<|visual token 047218|>
+<|visual token 047219|>
+<|visual token 047220|>
+<|visual token 047221|>
+<|visual token 047222|>
+<|visual token 047223|>
+<|visual token 047224|>
+<|visual token 047225|>
+<|visual token 047226|>
+<|visual token 047227|>
+<|visual token 047228|>
+<|visual token 047229|>
+<|visual token 047230|>
+<|visual token 047231|>
+<|visual token 047232|>
+<|visual token 047233|>
+<|visual token 047234|>
+<|visual token 047235|>
+<|visual token 047236|>
+<|visual token 047237|>
+<|visual token 047238|>
+<|visual token 047239|>
+<|visual token 047240|>
+<|visual token 047241|>
+<|visual token 047242|>
+<|visual token 047243|>
+<|visual token 047244|>
+<|visual token 047245|>
+<|visual token 047246|>
+<|visual token 047247|>
+<|visual token 047248|>
+<|visual token 047249|>
+<|visual token 047250|>
+<|visual token 047251|>
+<|visual token 047252|>
+<|visual token 047253|>
+<|visual token 047254|>
+<|visual token 047255|>
+<|visual token 047256|>
+<|visual token 047257|>
+<|visual token 047258|>
+<|visual token 047259|>
+<|visual token 047260|>
+<|visual token 047261|>
+<|visual token 047262|>
+<|visual token 047263|>
+<|visual token 047264|>
+<|visual token 047265|>
+<|visual token 047266|>
+<|visual token 047267|>
+<|visual token 047268|>
+<|visual token 047269|>
+<|visual token 047270|>
+<|visual token 047271|>
+<|visual token 047272|>
+<|visual token 047273|>
+<|visual token 047274|>
+<|visual token 047275|>
+<|visual token 047276|>
+<|visual token 047277|>
+<|visual token 047278|>
+<|visual token 047279|>
+<|visual token 047280|>
+<|visual token 047281|>
+<|visual token 047282|>
+<|visual token 047283|>
+<|visual token 047284|>
+<|visual token 047285|>
+<|visual token 047286|>
+<|visual token 047287|>
+<|visual token 047288|>
+<|visual token 047289|>
+<|visual token 047290|>
+<|visual token 047291|>
+<|visual token 047292|>
+<|visual token 047293|>
+<|visual token 047294|>
+<|visual token 047295|>
+<|visual token 047296|>
+<|visual token 047297|>
+<|visual token 047298|>
+<|visual token 047299|>
+<|visual token 047300|>
+<|visual token 047301|>
+<|visual token 047302|>
+<|visual token 047303|>
+<|visual token 047304|>
+<|visual token 047305|>
+<|visual token 047306|>
+<|visual token 047307|>
+<|visual token 047308|>
+<|visual token 047309|>
+<|visual token 047310|>
+<|visual token 047311|>
+<|visual token 047312|>
+<|visual token 047313|>
+<|visual token 047314|>
+<|visual token 047315|>
+<|visual token 047316|>
+<|visual token 047317|>
+<|visual token 047318|>
+<|visual token 047319|>
+<|visual token 047320|>
+<|visual token 047321|>
+<|visual token 047322|>
+<|visual token 047323|>
+<|visual token 047324|>
+<|visual token 047325|>
+<|visual token 047326|>
+<|visual token 047327|>
+<|visual token 047328|>
+<|visual token 047329|>
+<|visual token 047330|>
+<|visual token 047331|>
+<|visual token 047332|>
+<|visual token 047333|>
+<|visual token 047334|>
+<|visual token 047335|>
+<|visual token 047336|>
+<|visual token 047337|>
+<|visual token 047338|>
+<|visual token 047339|>
+<|visual token 047340|>
+<|visual token 047341|>
+<|visual token 047342|>
+<|visual token 047343|>
+<|visual token 047344|>
+<|visual token 047345|>
+<|visual token 047346|>
+<|visual token 047347|>
+<|visual token 047348|>
+<|visual token 047349|>
+<|visual token 047350|>
+<|visual token 047351|>
+<|visual token 047352|>
+<|visual token 047353|>
+<|visual token 047354|>
+<|visual token 047355|>
+<|visual token 047356|>
+<|visual token 047357|>
+<|visual token 047358|>
+<|visual token 047359|>
+<|visual token 047360|>
+<|visual token 047361|>
+<|visual token 047362|>
+<|visual token 047363|>
+<|visual token 047364|>
+<|visual token 047365|>
+<|visual token 047366|>
+<|visual token 047367|>
+<|visual token 047368|>
+<|visual token 047369|>
+<|visual token 047370|>
+<|visual token 047371|>
+<|visual token 047372|>
+<|visual token 047373|>
+<|visual token 047374|>
+<|visual token 047375|>
+<|visual token 047376|>
+<|visual token 047377|>
+<|visual token 047378|>
+<|visual token 047379|>
+<|visual token 047380|>
+<|visual token 047381|>
+<|visual token 047382|>
+<|visual token 047383|>
+<|visual token 047384|>
+<|visual token 047385|>
+<|visual token 047386|>
+<|visual token 047387|>
+<|visual token 047388|>
+<|visual token 047389|>
+<|visual token 047390|>
+<|visual token 047391|>
+<|visual token 047392|>
+<|visual token 047393|>
+<|visual token 047394|>
+<|visual token 047395|>
+<|visual token 047396|>
+<|visual token 047397|>
+<|visual token 047398|>
+<|visual token 047399|>
+<|visual token 047400|>
+<|visual token 047401|>
+<|visual token 047402|>
+<|visual token 047403|>
+<|visual token 047404|>
+<|visual token 047405|>
+<|visual token 047406|>
+<|visual token 047407|>
+<|visual token 047408|>
+<|visual token 047409|>
+<|visual token 047410|>
+<|visual token 047411|>
+<|visual token 047412|>
+<|visual token 047413|>
+<|visual token 047414|>
+<|visual token 047415|>
+<|visual token 047416|>
+<|visual token 047417|>
+<|visual token 047418|>
+<|visual token 047419|>
+<|visual token 047420|>
+<|visual token 047421|>
+<|visual token 047422|>
+<|visual token 047423|>
+<|visual token 047424|>
+<|visual token 047425|>
+<|visual token 047426|>
+<|visual token 047427|>
+<|visual token 047428|>
+<|visual token 047429|>
+<|visual token 047430|>
+<|visual token 047431|>
+<|visual token 047432|>
+<|visual token 047433|>
+<|visual token 047434|>
+<|visual token 047435|>
+<|visual token 047436|>
+<|visual token 047437|>
+<|visual token 047438|>
+<|visual token 047439|>
+<|visual token 047440|>
+<|visual token 047441|>
+<|visual token 047442|>
+<|visual token 047443|>
+<|visual token 047444|>
+<|visual token 047445|>
+<|visual token 047446|>
+<|visual token 047447|>
+<|visual token 047448|>
+<|visual token 047449|>
+<|visual token 047450|>
+<|visual token 047451|>
+<|visual token 047452|>
+<|visual token 047453|>
+<|visual token 047454|>
+<|visual token 047455|>
+<|visual token 047456|>
+<|visual token 047457|>
+<|visual token 047458|>
+<|visual token 047459|>
+<|visual token 047460|>
+<|visual token 047461|>
+<|visual token 047462|>
+<|visual token 047463|>
+<|visual token 047464|>
+<|visual token 047465|>
+<|visual token 047466|>
+<|visual token 047467|>
+<|visual token 047468|>
+<|visual token 047469|>
+<|visual token 047470|>
+<|visual token 047471|>
+<|visual token 047472|>
+<|visual token 047473|>
+<|visual token 047474|>
+<|visual token 047475|>
+<|visual token 047476|>
+<|visual token 047477|>
+<|visual token 047478|>
+<|visual token 047479|>
+<|visual token 047480|>
+<|visual token 047481|>
+<|visual token 047482|>
+<|visual token 047483|>
+<|visual token 047484|>
+<|visual token 047485|>
+<|visual token 047486|>
+<|visual token 047487|>
+<|visual token 047488|>
+<|visual token 047489|>
+<|visual token 047490|>
+<|visual token 047491|>
+<|visual token 047492|>
+<|visual token 047493|>
+<|visual token 047494|>
+<|visual token 047495|>
+<|visual token 047496|>
+<|visual token 047497|>
+<|visual token 047498|>
+<|visual token 047499|>
+<|visual token 047500|>
+<|visual token 047501|>
+<|visual token 047502|>
+<|visual token 047503|>
+<|visual token 047504|>
+<|visual token 047505|>
+<|visual token 047506|>
+<|visual token 047507|>
+<|visual token 047508|>
+<|visual token 047509|>
+<|visual token 047510|>
+<|visual token 047511|>
+<|visual token 047512|>
+<|visual token 047513|>
+<|visual token 047514|>
+<|visual token 047515|>
+<|visual token 047516|>
+<|visual token 047517|>
+<|visual token 047518|>
+<|visual token 047519|>
+<|visual token 047520|>
+<|visual token 047521|>
+<|visual token 047522|>
+<|visual token 047523|>
+<|visual token 047524|>
+<|visual token 047525|>
+<|visual token 047526|>
+<|visual token 047527|>
+<|visual token 047528|>
+<|visual token 047529|>
+<|visual token 047530|>
+<|visual token 047531|>
+<|visual token 047532|>
+<|visual token 047533|>
+<|visual token 047534|>
+<|visual token 047535|>
+<|visual token 047536|>
+<|visual token 047537|>
+<|visual token 047538|>
+<|visual token 047539|>
+<|visual token 047540|>
+<|visual token 047541|>
+<|visual token 047542|>
+<|visual token 047543|>
+<|visual token 047544|>
+<|visual token 047545|>
+<|visual token 047546|>
+<|visual token 047547|>
+<|visual token 047548|>
+<|visual token 047549|>
+<|visual token 047550|>
+<|visual token 047551|>
+<|visual token 047552|>
+<|visual token 047553|>
+<|visual token 047554|>
+<|visual token 047555|>
+<|visual token 047556|>
+<|visual token 047557|>
+<|visual token 047558|>
+<|visual token 047559|>
+<|visual token 047560|>
+<|visual token 047561|>
+<|visual token 047562|>
+<|visual token 047563|>
+<|visual token 047564|>
+<|visual token 047565|>
+<|visual token 047566|>
+<|visual token 047567|>
+<|visual token 047568|>
+<|visual token 047569|>
+<|visual token 047570|>
+<|visual token 047571|>
+<|visual token 047572|>
+<|visual token 047573|>
+<|visual token 047574|>
+<|visual token 047575|>
+<|visual token 047576|>
+<|visual token 047577|>
+<|visual token 047578|>
+<|visual token 047579|>
+<|visual token 047580|>
+<|visual token 047581|>
+<|visual token 047582|>
+<|visual token 047583|>
+<|visual token 047584|>
+<|visual token 047585|>
+<|visual token 047586|>
+<|visual token 047587|>
+<|visual token 047588|>
+<|visual token 047589|>
+<|visual token 047590|>
+<|visual token 047591|>
+<|visual token 047592|>
+<|visual token 047593|>
+<|visual token 047594|>
+<|visual token 047595|>
+<|visual token 047596|>
+<|visual token 047597|>
+<|visual token 047598|>
+<|visual token 047599|>
+<|visual token 047600|>
+<|visual token 047601|>
+<|visual token 047602|>
+<|visual token 047603|>
+<|visual token 047604|>
+<|visual token 047605|>
+<|visual token 047606|>
+<|visual token 047607|>
+<|visual token 047608|>
+<|visual token 047609|>
+<|visual token 047610|>
+<|visual token 047611|>
+<|visual token 047612|>
+<|visual token 047613|>
+<|visual token 047614|>
+<|visual token 047615|>
+<|visual token 047616|>
+<|visual token 047617|>
+<|visual token 047618|>
+<|visual token 047619|>
+<|visual token 047620|>
+<|visual token 047621|>
+<|visual token 047622|>
+<|visual token 047623|>
+<|visual token 047624|>
+<|visual token 047625|>
+<|visual token 047626|>
+<|visual token 047627|>
+<|visual token 047628|>
+<|visual token 047629|>
+<|visual token 047630|>
+<|visual token 047631|>
+<|visual token 047632|>
+<|visual token 047633|>
+<|visual token 047634|>
+<|visual token 047635|>
+<|visual token 047636|>
+<|visual token 047637|>
+<|visual token 047638|>
+<|visual token 047639|>
+<|visual token 047640|>
+<|visual token 047641|>
+<|visual token 047642|>
+<|visual token 047643|>
+<|visual token 047644|>
+<|visual token 047645|>
+<|visual token 047646|>
+<|visual token 047647|>
+<|visual token 047648|>
+<|visual token 047649|>
+<|visual token 047650|>
+<|visual token 047651|>
+<|visual token 047652|>
+<|visual token 047653|>
+<|visual token 047654|>
+<|visual token 047655|>
+<|visual token 047656|>
+<|visual token 047657|>
+<|visual token 047658|>
+<|visual token 047659|>
+<|visual token 047660|>
+<|visual token 047661|>
+<|visual token 047662|>
+<|visual token 047663|>
+<|visual token 047664|>
+<|visual token 047665|>
+<|visual token 047666|>
+<|visual token 047667|>
+<|visual token 047668|>
+<|visual token 047669|>
+<|visual token 047670|>
+<|visual token 047671|>
+<|visual token 047672|>
+<|visual token 047673|>
+<|visual token 047674|>
+<|visual token 047675|>
+<|visual token 047676|>
+<|visual token 047677|>
+<|visual token 047678|>
+<|visual token 047679|>
+<|visual token 047680|>
+<|visual token 047681|>
+<|visual token 047682|>
+<|visual token 047683|>
+<|visual token 047684|>
+<|visual token 047685|>
+<|visual token 047686|>
+<|visual token 047687|>
+<|visual token 047688|>
+<|visual token 047689|>
+<|visual token 047690|>
+<|visual token 047691|>
+<|visual token 047692|>
+<|visual token 047693|>
+<|visual token 047694|>
+<|visual token 047695|>
+<|visual token 047696|>
+<|visual token 047697|>
+<|visual token 047698|>
+<|visual token 047699|>
+<|visual token 047700|>
+<|visual token 047701|>
+<|visual token 047702|>
+<|visual token 047703|>
+<|visual token 047704|>
+<|visual token 047705|>
+<|visual token 047706|>
+<|visual token 047707|>
+<|visual token 047708|>
+<|visual token 047709|>
+<|visual token 047710|>
+<|visual token 047711|>
+<|visual token 047712|>
+<|visual token 047713|>
+<|visual token 047714|>
+<|visual token 047715|>
+<|visual token 047716|>
+<|visual token 047717|>
+<|visual token 047718|>
+<|visual token 047719|>
+<|visual token 047720|>
+<|visual token 047721|>
+<|visual token 047722|>
+<|visual token 047723|>
+<|visual token 047724|>
+<|visual token 047725|>
+<|visual token 047726|>
+<|visual token 047727|>
+<|visual token 047728|>
+<|visual token 047729|>
+<|visual token 047730|>
+<|visual token 047731|>
+<|visual token 047732|>
+<|visual token 047733|>
+<|visual token 047734|>
+<|visual token 047735|>
+<|visual token 047736|>
+<|visual token 047737|>
+<|visual token 047738|>
+<|visual token 047739|>
+<|visual token 047740|>
+<|visual token 047741|>
+<|visual token 047742|>
+<|visual token 047743|>
+<|visual token 047744|>
+<|visual token 047745|>
+<|visual token 047746|>
+<|visual token 047747|>
+<|visual token 047748|>
+<|visual token 047749|>
+<|visual token 047750|>
+<|visual token 047751|>
+<|visual token 047752|>
+<|visual token 047753|>
+<|visual token 047754|>
+<|visual token 047755|>
+<|visual token 047756|>
+<|visual token 047757|>
+<|visual token 047758|>
+<|visual token 047759|>
+<|visual token 047760|>
+<|visual token 047761|>
+<|visual token 047762|>
+<|visual token 047763|>
+<|visual token 047764|>
+<|visual token 047765|>
+<|visual token 047766|>
+<|visual token 047767|>
+<|visual token 047768|>
+<|visual token 047769|>
+<|visual token 047770|>
+<|visual token 047771|>
+<|visual token 047772|>
+<|visual token 047773|>
+<|visual token 047774|>
+<|visual token 047775|>
+<|visual token 047776|>
+<|visual token 047777|>
+<|visual token 047778|>
+<|visual token 047779|>
+<|visual token 047780|>
+<|visual token 047781|>
+<|visual token 047782|>
+<|visual token 047783|>
+<|visual token 047784|>
+<|visual token 047785|>
+<|visual token 047786|>
+<|visual token 047787|>
+<|visual token 047788|>
+<|visual token 047789|>
+<|visual token 047790|>
+<|visual token 047791|>
+<|visual token 047792|>
+<|visual token 047793|>
+<|visual token 047794|>
+<|visual token 047795|>
+<|visual token 047796|>
+<|visual token 047797|>
+<|visual token 047798|>
+<|visual token 047799|>
+<|visual token 047800|>
+<|visual token 047801|>
+<|visual token 047802|>
+<|visual token 047803|>
+<|visual token 047804|>
+<|visual token 047805|>
+<|visual token 047806|>
+<|visual token 047807|>
+<|visual token 047808|>
+<|visual token 047809|>
+<|visual token 047810|>
+<|visual token 047811|>
+<|visual token 047812|>
+<|visual token 047813|>
+<|visual token 047814|>
+<|visual token 047815|>
+<|visual token 047816|>
+<|visual token 047817|>
+<|visual token 047818|>
+<|visual token 047819|>
+<|visual token 047820|>
+<|visual token 047821|>
+<|visual token 047822|>
+<|visual token 047823|>
+<|visual token 047824|>
+<|visual token 047825|>
+<|visual token 047826|>
+<|visual token 047827|>
+<|visual token 047828|>
+<|visual token 047829|>
+<|visual token 047830|>
+<|visual token 047831|>
+<|visual token 047832|>
+<|visual token 047833|>
+<|visual token 047834|>
+<|visual token 047835|>
+<|visual token 047836|>
+<|visual token 047837|>
+<|visual token 047838|>
+<|visual token 047839|>
+<|visual token 047840|>
+<|visual token 047841|>
+<|visual token 047842|>
+<|visual token 047843|>
+<|visual token 047844|>
+<|visual token 047845|>
+<|visual token 047846|>
+<|visual token 047847|>
+<|visual token 047848|>
+<|visual token 047849|>
+<|visual token 047850|>
+<|visual token 047851|>
+<|visual token 047852|>
+<|visual token 047853|>
+<|visual token 047854|>
+<|visual token 047855|>
+<|visual token 047856|>
+<|visual token 047857|>
+<|visual token 047858|>
+<|visual token 047859|>
+<|visual token 047860|>
+<|visual token 047861|>
+<|visual token 047862|>
+<|visual token 047863|>
+<|visual token 047864|>
+<|visual token 047865|>
+<|visual token 047866|>
+<|visual token 047867|>
+<|visual token 047868|>
+<|visual token 047869|>
+<|visual token 047870|>
+<|visual token 047871|>
+<|visual token 047872|>
+<|visual token 047873|>
+<|visual token 047874|>
+<|visual token 047875|>
+<|visual token 047876|>
+<|visual token 047877|>
+<|visual token 047878|>
+<|visual token 047879|>
+<|visual token 047880|>
+<|visual token 047881|>
+<|visual token 047882|>
+<|visual token 047883|>
+<|visual token 047884|>
+<|visual token 047885|>
+<|visual token 047886|>
+<|visual token 047887|>
+<|visual token 047888|>
+<|visual token 047889|>
+<|visual token 047890|>
+<|visual token 047891|>
+<|visual token 047892|>
+<|visual token 047893|>
+<|visual token 047894|>
+<|visual token 047895|>
+<|visual token 047896|>
+<|visual token 047897|>
+<|visual token 047898|>
+<|visual token 047899|>
+<|visual token 047900|>
+<|visual token 047901|>
+<|visual token 047902|>
+<|visual token 047903|>
+<|visual token 047904|>
+<|visual token 047905|>
+<|visual token 047906|>
+<|visual token 047907|>
+<|visual token 047908|>
+<|visual token 047909|>
+<|visual token 047910|>
+<|visual token 047911|>
+<|visual token 047912|>
+<|visual token 047913|>
+<|visual token 047914|>
+<|visual token 047915|>
+<|visual token 047916|>
+<|visual token 047917|>
+<|visual token 047918|>
+<|visual token 047919|>
+<|visual token 047920|>
+<|visual token 047921|>
+<|visual token 047922|>
+<|visual token 047923|>
+<|visual token 047924|>
+<|visual token 047925|>
+<|visual token 047926|>
+<|visual token 047927|>
+<|visual token 047928|>
+<|visual token 047929|>
+<|visual token 047930|>
+<|visual token 047931|>
+<|visual token 047932|>
+<|visual token 047933|>
+<|visual token 047934|>
+<|visual token 047935|>
+<|visual token 047936|>
+<|visual token 047937|>
+<|visual token 047938|>
+<|visual token 047939|>
+<|visual token 047940|>
+<|visual token 047941|>
+<|visual token 047942|>
+<|visual token 047943|>
+<|visual token 047944|>
+<|visual token 047945|>
+<|visual token 047946|>
+<|visual token 047947|>
+<|visual token 047948|>
+<|visual token 047949|>
+<|visual token 047950|>
+<|visual token 047951|>
+<|visual token 047952|>
+<|visual token 047953|>
+<|visual token 047954|>
+<|visual token 047955|>
+<|visual token 047956|>
+<|visual token 047957|>
+<|visual token 047958|>
+<|visual token 047959|>
+<|visual token 047960|>
+<|visual token 047961|>
+<|visual token 047962|>
+<|visual token 047963|>
+<|visual token 047964|>
+<|visual token 047965|>
+<|visual token 047966|>
+<|visual token 047967|>
+<|visual token 047968|>
+<|visual token 047969|>
+<|visual token 047970|>
+<|visual token 047971|>
+<|visual token 047972|>
+<|visual token 047973|>
+<|visual token 047974|>
+<|visual token 047975|>
+<|visual token 047976|>
+<|visual token 047977|>
+<|visual token 047978|>
+<|visual token 047979|>
+<|visual token 047980|>
+<|visual token 047981|>
+<|visual token 047982|>
+<|visual token 047983|>
+<|visual token 047984|>
+<|visual token 047985|>
+<|visual token 047986|>
+<|visual token 047987|>
+<|visual token 047988|>
+<|visual token 047989|>
+<|visual token 047990|>
+<|visual token 047991|>
+<|visual token 047992|>
+<|visual token 047993|>
+<|visual token 047994|>
+<|visual token 047995|>
+<|visual token 047996|>
+<|visual token 047997|>
+<|visual token 047998|>
+<|visual token 047999|>
+<|visual token 048000|>
+<|visual token 048001|>
+<|visual token 048002|>
+<|visual token 048003|>
+<|visual token 048004|>
+<|visual token 048005|>
+<|visual token 048006|>
+<|visual token 048007|>
+<|visual token 048008|>
+<|visual token 048009|>
+<|visual token 048010|>
+<|visual token 048011|>
+<|visual token 048012|>
+<|visual token 048013|>
+<|visual token 048014|>
+<|visual token 048015|>
+<|visual token 048016|>
+<|visual token 048017|>
+<|visual token 048018|>
+<|visual token 048019|>
+<|visual token 048020|>
+<|visual token 048021|>
+<|visual token 048022|>
+<|visual token 048023|>
+<|visual token 048024|>
+<|visual token 048025|>
+<|visual token 048026|>
+<|visual token 048027|>
+<|visual token 048028|>
+<|visual token 048029|>
+<|visual token 048030|>
+<|visual token 048031|>
+<|visual token 048032|>
+<|visual token 048033|>
+<|visual token 048034|>
+<|visual token 048035|>
+<|visual token 048036|>
+<|visual token 048037|>
+<|visual token 048038|>
+<|visual token 048039|>
+<|visual token 048040|>
+<|visual token 048041|>
+<|visual token 048042|>
+<|visual token 048043|>
+<|visual token 048044|>
+<|visual token 048045|>
+<|visual token 048046|>
+<|visual token 048047|>
+<|visual token 048048|>
+<|visual token 048049|>
+<|visual token 048050|>
+<|visual token 048051|>
+<|visual token 048052|>
+<|visual token 048053|>
+<|visual token 048054|>
+<|visual token 048055|>
+<|visual token 048056|>
+<|visual token 048057|>
+<|visual token 048058|>
+<|visual token 048059|>
+<|visual token 048060|>
+<|visual token 048061|>
+<|visual token 048062|>
+<|visual token 048063|>
+<|visual token 048064|>
+<|visual token 048065|>
+<|visual token 048066|>
+<|visual token 048067|>
+<|visual token 048068|>
+<|visual token 048069|>
+<|visual token 048070|>
+<|visual token 048071|>
+<|visual token 048072|>
+<|visual token 048073|>
+<|visual token 048074|>
+<|visual token 048075|>
+<|visual token 048076|>
+<|visual token 048077|>
+<|visual token 048078|>
+<|visual token 048079|>
+<|visual token 048080|>
+<|visual token 048081|>
+<|visual token 048082|>
+<|visual token 048083|>
+<|visual token 048084|>
+<|visual token 048085|>
+<|visual token 048086|>
+<|visual token 048087|>
+<|visual token 048088|>
+<|visual token 048089|>
+<|visual token 048090|>
+<|visual token 048091|>
+<|visual token 048092|>
+<|visual token 048093|>
+<|visual token 048094|>
+<|visual token 048095|>
+<|visual token 048096|>
+<|visual token 048097|>
+<|visual token 048098|>
+<|visual token 048099|>
+<|visual token 048100|>
+<|visual token 048101|>
+<|visual token 048102|>
+<|visual token 048103|>
+<|visual token 048104|>
+<|visual token 048105|>
+<|visual token 048106|>
+<|visual token 048107|>
+<|visual token 048108|>
+<|visual token 048109|>
+<|visual token 048110|>
+<|visual token 048111|>
+<|visual token 048112|>
+<|visual token 048113|>
+<|visual token 048114|>
+<|visual token 048115|>
+<|visual token 048116|>
+<|visual token 048117|>
+<|visual token 048118|>
+<|visual token 048119|>
+<|visual token 048120|>
+<|visual token 048121|>
+<|visual token 048122|>
+<|visual token 048123|>
+<|visual token 048124|>
+<|visual token 048125|>
+<|visual token 048126|>
+<|visual token 048127|>
+<|visual token 048128|>
+<|visual token 048129|>
+<|visual token 048130|>
+<|visual token 048131|>
+<|visual token 048132|>
+<|visual token 048133|>
+<|visual token 048134|>
+<|visual token 048135|>
+<|visual token 048136|>
+<|visual token 048137|>
+<|visual token 048138|>
+<|visual token 048139|>
+<|visual token 048140|>
+<|visual token 048141|>
+<|visual token 048142|>
+<|visual token 048143|>
+<|visual token 048144|>
+<|visual token 048145|>
+<|visual token 048146|>
+<|visual token 048147|>
+<|visual token 048148|>
+<|visual token 048149|>
+<|visual token 048150|>
+<|visual token 048151|>
+<|visual token 048152|>
+<|visual token 048153|>
+<|visual token 048154|>
+<|visual token 048155|>
+<|visual token 048156|>
+<|visual token 048157|>
+<|visual token 048158|>
+<|visual token 048159|>
+<|visual token 048160|>
+<|visual token 048161|>
+<|visual token 048162|>
+<|visual token 048163|>
+<|visual token 048164|>
+<|visual token 048165|>
+<|visual token 048166|>
+<|visual token 048167|>
+<|visual token 048168|>
+<|visual token 048169|>
+<|visual token 048170|>
+<|visual token 048171|>
+<|visual token 048172|>
+<|visual token 048173|>
+<|visual token 048174|>
+<|visual token 048175|>
+<|visual token 048176|>
+<|visual token 048177|>
+<|visual token 048178|>
+<|visual token 048179|>
+<|visual token 048180|>
+<|visual token 048181|>
+<|visual token 048182|>
+<|visual token 048183|>
+<|visual token 048184|>
+<|visual token 048185|>
+<|visual token 048186|>
+<|visual token 048187|>
+<|visual token 048188|>
+<|visual token 048189|>
+<|visual token 048190|>
+<|visual token 048191|>
+<|visual token 048192|>
+<|visual token 048193|>
+<|visual token 048194|>
+<|visual token 048195|>
+<|visual token 048196|>
+<|visual token 048197|>
+<|visual token 048198|>
+<|visual token 048199|>
+<|visual token 048200|>
+<|visual token 048201|>
+<|visual token 048202|>
+<|visual token 048203|>
+<|visual token 048204|>
+<|visual token 048205|>
+<|visual token 048206|>
+<|visual token 048207|>
+<|visual token 048208|>
+<|visual token 048209|>
+<|visual token 048210|>
+<|visual token 048211|>
+<|visual token 048212|>
+<|visual token 048213|>
+<|visual token 048214|>
+<|visual token 048215|>
+<|visual token 048216|>
+<|visual token 048217|>
+<|visual token 048218|>
+<|visual token 048219|>
+<|visual token 048220|>
+<|visual token 048221|>
+<|visual token 048222|>
+<|visual token 048223|>
+<|visual token 048224|>
+<|visual token 048225|>
+<|visual token 048226|>
+<|visual token 048227|>
+<|visual token 048228|>
+<|visual token 048229|>
+<|visual token 048230|>
+<|visual token 048231|>
+<|visual token 048232|>
+<|visual token 048233|>
+<|visual token 048234|>
+<|visual token 048235|>
+<|visual token 048236|>
+<|visual token 048237|>
+<|visual token 048238|>
+<|visual token 048239|>
+<|visual token 048240|>
+<|visual token 048241|>
+<|visual token 048242|>
+<|visual token 048243|>
+<|visual token 048244|>
+<|visual token 048245|>
+<|visual token 048246|>
+<|visual token 048247|>
+<|visual token 048248|>
+<|visual token 048249|>
+<|visual token 048250|>
+<|visual token 048251|>
+<|visual token 048252|>
+<|visual token 048253|>
+<|visual token 048254|>
+<|visual token 048255|>
+<|visual token 048256|>
+<|visual token 048257|>
+<|visual token 048258|>
+<|visual token 048259|>
+<|visual token 048260|>
+<|visual token 048261|>
+<|visual token 048262|>
+<|visual token 048263|>
+<|visual token 048264|>
+<|visual token 048265|>
+<|visual token 048266|>
+<|visual token 048267|>
+<|visual token 048268|>
+<|visual token 048269|>
+<|visual token 048270|>
+<|visual token 048271|>
+<|visual token 048272|>
+<|visual token 048273|>
+<|visual token 048274|>
+<|visual token 048275|>
+<|visual token 048276|>
+<|visual token 048277|>
+<|visual token 048278|>
+<|visual token 048279|>
+<|visual token 048280|>
+<|visual token 048281|>
+<|visual token 048282|>
+<|visual token 048283|>
+<|visual token 048284|>
+<|visual token 048285|>
+<|visual token 048286|>
+<|visual token 048287|>
+<|visual token 048288|>
+<|visual token 048289|>
+<|visual token 048290|>
+<|visual token 048291|>
+<|visual token 048292|>
+<|visual token 048293|>
+<|visual token 048294|>
+<|visual token 048295|>
+<|visual token 048296|>
+<|visual token 048297|>
+<|visual token 048298|>
+<|visual token 048299|>
+<|visual token 048300|>
+<|visual token 048301|>
+<|visual token 048302|>
+<|visual token 048303|>
+<|visual token 048304|>
+<|visual token 048305|>
+<|visual token 048306|>
+<|visual token 048307|>
+<|visual token 048308|>
+<|visual token 048309|>
+<|visual token 048310|>
+<|visual token 048311|>
+<|visual token 048312|>
+<|visual token 048313|>
+<|visual token 048314|>
+<|visual token 048315|>
+<|visual token 048316|>
+<|visual token 048317|>
+<|visual token 048318|>
+<|visual token 048319|>
+<|visual token 048320|>
+<|visual token 048321|>
+<|visual token 048322|>
+<|visual token 048323|>
+<|visual token 048324|>
+<|visual token 048325|>
+<|visual token 048326|>
+<|visual token 048327|>
+<|visual token 048328|>
+<|visual token 048329|>
+<|visual token 048330|>
+<|visual token 048331|>
+<|visual token 048332|>
+<|visual token 048333|>
+<|visual token 048334|>
+<|visual token 048335|>
+<|visual token 048336|>
+<|visual token 048337|>
+<|visual token 048338|>
+<|visual token 048339|>
+<|visual token 048340|>
+<|visual token 048341|>
+<|visual token 048342|>
+<|visual token 048343|>
+<|visual token 048344|>
+<|visual token 048345|>
+<|visual token 048346|>
+<|visual token 048347|>
+<|visual token 048348|>
+<|visual token 048349|>
+<|visual token 048350|>
+<|visual token 048351|>
+<|visual token 048352|>
+<|visual token 048353|>
+<|visual token 048354|>
+<|visual token 048355|>
+<|visual token 048356|>
+<|visual token 048357|>
+<|visual token 048358|>
+<|visual token 048359|>
+<|visual token 048360|>
+<|visual token 048361|>
+<|visual token 048362|>
+<|visual token 048363|>
+<|visual token 048364|>
+<|visual token 048365|>
+<|visual token 048366|>
+<|visual token 048367|>
+<|visual token 048368|>
+<|visual token 048369|>
+<|visual token 048370|>
+<|visual token 048371|>
+<|visual token 048372|>
+<|visual token 048373|>
+<|visual token 048374|>
+<|visual token 048375|>
+<|visual token 048376|>
+<|visual token 048377|>
+<|visual token 048378|>
+<|visual token 048379|>
+<|visual token 048380|>
+<|visual token 048381|>
+<|visual token 048382|>
+<|visual token 048383|>
+<|visual token 048384|>
+<|visual token 048385|>
+<|visual token 048386|>
+<|visual token 048387|>
+<|visual token 048388|>
+<|visual token 048389|>
+<|visual token 048390|>
+<|visual token 048391|>
+<|visual token 048392|>
+<|visual token 048393|>
+<|visual token 048394|>
+<|visual token 048395|>
+<|visual token 048396|>
+<|visual token 048397|>
+<|visual token 048398|>
+<|visual token 048399|>
+<|visual token 048400|>
+<|visual token 048401|>
+<|visual token 048402|>
+<|visual token 048403|>
+<|visual token 048404|>
+<|visual token 048405|>
+<|visual token 048406|>
+<|visual token 048407|>
+<|visual token 048408|>
+<|visual token 048409|>
+<|visual token 048410|>
+<|visual token 048411|>
+<|visual token 048412|>
+<|visual token 048413|>
+<|visual token 048414|>
+<|visual token 048415|>
+<|visual token 048416|>
+<|visual token 048417|>
+<|visual token 048418|>
+<|visual token 048419|>
+<|visual token 048420|>
+<|visual token 048421|>
+<|visual token 048422|>
+<|visual token 048423|>
+<|visual token 048424|>
+<|visual token 048425|>
+<|visual token 048426|>
+<|visual token 048427|>
+<|visual token 048428|>
+<|visual token 048429|>
+<|visual token 048430|>
+<|visual token 048431|>
+<|visual token 048432|>
+<|visual token 048433|>
+<|visual token 048434|>
+<|visual token 048435|>
+<|visual token 048436|>
+<|visual token 048437|>
+<|visual token 048438|>
+<|visual token 048439|>
+<|visual token 048440|>
+<|visual token 048441|>
+<|visual token 048442|>
+<|visual token 048443|>
+<|visual token 048444|>
+<|visual token 048445|>
+<|visual token 048446|>
+<|visual token 048447|>
+<|visual token 048448|>
+<|visual token 048449|>
+<|visual token 048450|>
+<|visual token 048451|>
+<|visual token 048452|>
+<|visual token 048453|>
+<|visual token 048454|>
+<|visual token 048455|>
+<|visual token 048456|>
+<|visual token 048457|>
+<|visual token 048458|>
+<|visual token 048459|>
+<|visual token 048460|>
+<|visual token 048461|>
+<|visual token 048462|>
+<|visual token 048463|>
+<|visual token 048464|>
+<|visual token 048465|>
+<|visual token 048466|>
+<|visual token 048467|>
+<|visual token 048468|>
+<|visual token 048469|>
+<|visual token 048470|>
+<|visual token 048471|>
+<|visual token 048472|>
+<|visual token 048473|>
+<|visual token 048474|>
+<|visual token 048475|>
+<|visual token 048476|>
+<|visual token 048477|>
+<|visual token 048478|>
+<|visual token 048479|>
+<|visual token 048480|>
+<|visual token 048481|>
+<|visual token 048482|>
+<|visual token 048483|>
+<|visual token 048484|>
+<|visual token 048485|>
+<|visual token 048486|>
+<|visual token 048487|>
+<|visual token 048488|>
+<|visual token 048489|>
+<|visual token 048490|>
+<|visual token 048491|>
+<|visual token 048492|>
+<|visual token 048493|>
+<|visual token 048494|>
+<|visual token 048495|>
+<|visual token 048496|>
+<|visual token 048497|>
+<|visual token 048498|>
+<|visual token 048499|>
+<|visual token 048500|>
+<|visual token 048501|>
+<|visual token 048502|>
+<|visual token 048503|>
+<|visual token 048504|>
+<|visual token 048505|>
+<|visual token 048506|>
+<|visual token 048507|>
+<|visual token 048508|>
+<|visual token 048509|>
+<|visual token 048510|>
+<|visual token 048511|>
+<|visual token 048512|>
+<|visual token 048513|>
+<|visual token 048514|>
+<|visual token 048515|>
+<|visual token 048516|>
+<|visual token 048517|>
+<|visual token 048518|>
+<|visual token 048519|>
+<|visual token 048520|>
+<|visual token 048521|>
+<|visual token 048522|>
+<|visual token 048523|>
+<|visual token 048524|>
+<|visual token 048525|>
+<|visual token 048526|>
+<|visual token 048527|>
+<|visual token 048528|>
+<|visual token 048529|>
+<|visual token 048530|>
+<|visual token 048531|>
+<|visual token 048532|>
+<|visual token 048533|>
+<|visual token 048534|>
+<|visual token 048535|>
+<|visual token 048536|>
+<|visual token 048537|>
+<|visual token 048538|>
+<|visual token 048539|>
+<|visual token 048540|>
+<|visual token 048541|>
+<|visual token 048542|>
+<|visual token 048543|>
+<|visual token 048544|>
+<|visual token 048545|>
+<|visual token 048546|>
+<|visual token 048547|>
+<|visual token 048548|>
+<|visual token 048549|>
+<|visual token 048550|>
+<|visual token 048551|>
+<|visual token 048552|>
+<|visual token 048553|>
+<|visual token 048554|>
+<|visual token 048555|>
+<|visual token 048556|>
+<|visual token 048557|>
+<|visual token 048558|>
+<|visual token 048559|>
+<|visual token 048560|>
+<|visual token 048561|>
+<|visual token 048562|>
+<|visual token 048563|>
+<|visual token 048564|>
+<|visual token 048565|>
+<|visual token 048566|>
+<|visual token 048567|>
+<|visual token 048568|>
+<|visual token 048569|>
+<|visual token 048570|>
+<|visual token 048571|>
+<|visual token 048572|>
+<|visual token 048573|>
+<|visual token 048574|>
+<|visual token 048575|>
+<|visual token 048576|>
+<|visual token 048577|>
+<|visual token 048578|>
+<|visual token 048579|>
+<|visual token 048580|>
+<|visual token 048581|>
+<|visual token 048582|>
+<|visual token 048583|>
+<|visual token 048584|>
+<|visual token 048585|>
+<|visual token 048586|>
+<|visual token 048587|>
+<|visual token 048588|>
+<|visual token 048589|>
+<|visual token 048590|>
+<|visual token 048591|>
+<|visual token 048592|>
+<|visual token 048593|>
+<|visual token 048594|>
+<|visual token 048595|>
+<|visual token 048596|>
+<|visual token 048597|>
+<|visual token 048598|>
+<|visual token 048599|>
+<|visual token 048600|>
+<|visual token 048601|>
+<|visual token 048602|>
+<|visual token 048603|>
+<|visual token 048604|>
+<|visual token 048605|>
+<|visual token 048606|>
+<|visual token 048607|>
+<|visual token 048608|>
+<|visual token 048609|>
+<|visual token 048610|>
+<|visual token 048611|>
+<|visual token 048612|>
+<|visual token 048613|>
+<|visual token 048614|>
+<|visual token 048615|>
+<|visual token 048616|>
+<|visual token 048617|>
+<|visual token 048618|>
+<|visual token 048619|>
+<|visual token 048620|>
+<|visual token 048621|>
+<|visual token 048622|>
+<|visual token 048623|>
+<|visual token 048624|>
+<|visual token 048625|>
+<|visual token 048626|>
+<|visual token 048627|>
+<|visual token 048628|>
+<|visual token 048629|>
+<|visual token 048630|>
+<|visual token 048631|>
+<|visual token 048632|>
+<|visual token 048633|>
+<|visual token 048634|>
+<|visual token 048635|>
+<|visual token 048636|>
+<|visual token 048637|>
+<|visual token 048638|>
+<|visual token 048639|>
+<|visual token 048640|>
+<|visual token 048641|>
+<|visual token 048642|>
+<|visual token 048643|>
+<|visual token 048644|>
+<|visual token 048645|>
+<|visual token 048646|>
+<|visual token 048647|>
+<|visual token 048648|>
+<|visual token 048649|>
+<|visual token 048650|>
+<|visual token 048651|>
+<|visual token 048652|>
+<|visual token 048653|>
+<|visual token 048654|>
+<|visual token 048655|>
+<|visual token 048656|>
+<|visual token 048657|>
+<|visual token 048658|>
+<|visual token 048659|>
+<|visual token 048660|>
+<|visual token 048661|>
+<|visual token 048662|>
+<|visual token 048663|>
+<|visual token 048664|>
+<|visual token 048665|>
+<|visual token 048666|>
+<|visual token 048667|>
+<|visual token 048668|>
+<|visual token 048669|>
+<|visual token 048670|>
+<|visual token 048671|>
+<|visual token 048672|>
+<|visual token 048673|>
+<|visual token 048674|>
+<|visual token 048675|>
+<|visual token 048676|>
+<|visual token 048677|>
+<|visual token 048678|>
+<|visual token 048679|>
+<|visual token 048680|>
+<|visual token 048681|>
+<|visual token 048682|>
+<|visual token 048683|>
+<|visual token 048684|>
+<|visual token 048685|>
+<|visual token 048686|>
+<|visual token 048687|>
+<|visual token 048688|>
+<|visual token 048689|>
+<|visual token 048690|>
+<|visual token 048691|>
+<|visual token 048692|>
+<|visual token 048693|>
+<|visual token 048694|>
+<|visual token 048695|>
+<|visual token 048696|>
+<|visual token 048697|>
+<|visual token 048698|>
+<|visual token 048699|>
+<|visual token 048700|>
+<|visual token 048701|>
+<|visual token 048702|>
+<|visual token 048703|>
+<|visual token 048704|>
+<|visual token 048705|>
+<|visual token 048706|>
+<|visual token 048707|>
+<|visual token 048708|>
+<|visual token 048709|>
+<|visual token 048710|>
+<|visual token 048711|>
+<|visual token 048712|>
+<|visual token 048713|>
+<|visual token 048714|>
+<|visual token 048715|>
+<|visual token 048716|>
+<|visual token 048717|>
+<|visual token 048718|>
+<|visual token 048719|>
+<|visual token 048720|>
+<|visual token 048721|>
+<|visual token 048722|>
+<|visual token 048723|>
+<|visual token 048724|>
+<|visual token 048725|>
+<|visual token 048726|>
+<|visual token 048727|>
+<|visual token 048728|>
+<|visual token 048729|>
+<|visual token 048730|>
+<|visual token 048731|>
+<|visual token 048732|>
+<|visual token 048733|>
+<|visual token 048734|>
+<|visual token 048735|>
+<|visual token 048736|>
+<|visual token 048737|>
+<|visual token 048738|>
+<|visual token 048739|>
+<|visual token 048740|>
+<|visual token 048741|>
+<|visual token 048742|>
+<|visual token 048743|>
+<|visual token 048744|>
+<|visual token 048745|>
+<|visual token 048746|>
+<|visual token 048747|>
+<|visual token 048748|>
+<|visual token 048749|>
+<|visual token 048750|>
+<|visual token 048751|>
+<|visual token 048752|>
+<|visual token 048753|>
+<|visual token 048754|>
+<|visual token 048755|>
+<|visual token 048756|>
+<|visual token 048757|>
+<|visual token 048758|>
+<|visual token 048759|>
+<|visual token 048760|>
+<|visual token 048761|>
+<|visual token 048762|>
+<|visual token 048763|>
+<|visual token 048764|>
+<|visual token 048765|>
+<|visual token 048766|>
+<|visual token 048767|>
+<|visual token 048768|>
+<|visual token 048769|>
+<|visual token 048770|>
+<|visual token 048771|>
+<|visual token 048772|>
+<|visual token 048773|>
+<|visual token 048774|>
+<|visual token 048775|>
+<|visual token 048776|>
+<|visual token 048777|>
+<|visual token 048778|>
+<|visual token 048779|>
+<|visual token 048780|>
+<|visual token 048781|>
+<|visual token 048782|>
+<|visual token 048783|>
+<|visual token 048784|>
+<|visual token 048785|>
+<|visual token 048786|>
+<|visual token 048787|>
+<|visual token 048788|>
+<|visual token 048789|>
+<|visual token 048790|>
+<|visual token 048791|>
+<|visual token 048792|>
+<|visual token 048793|>
+<|visual token 048794|>
+<|visual token 048795|>
+<|visual token 048796|>
+<|visual token 048797|>
+<|visual token 048798|>
+<|visual token 048799|>
+<|visual token 048800|>
+<|visual token 048801|>
+<|visual token 048802|>
+<|visual token 048803|>
+<|visual token 048804|>
+<|visual token 048805|>
+<|visual token 048806|>
+<|visual token 048807|>
+<|visual token 048808|>
+<|visual token 048809|>
+<|visual token 048810|>
+<|visual token 048811|>
+<|visual token 048812|>
+<|visual token 048813|>
+<|visual token 048814|>
+<|visual token 048815|>
+<|visual token 048816|>
+<|visual token 048817|>
+<|visual token 048818|>
+<|visual token 048819|>
+<|visual token 048820|>
+<|visual token 048821|>
+<|visual token 048822|>
+<|visual token 048823|>
+<|visual token 048824|>
+<|visual token 048825|>
+<|visual token 048826|>
+<|visual token 048827|>
+<|visual token 048828|>
+<|visual token 048829|>
+<|visual token 048830|>
+<|visual token 048831|>
+<|visual token 048832|>
+<|visual token 048833|>
+<|visual token 048834|>
+<|visual token 048835|>
+<|visual token 048836|>
+<|visual token 048837|>
+<|visual token 048838|>
+<|visual token 048839|>
+<|visual token 048840|>
+<|visual token 048841|>
+<|visual token 048842|>
+<|visual token 048843|>
+<|visual token 048844|>
+<|visual token 048845|>
+<|visual token 048846|>
+<|visual token 048847|>
+<|visual token 048848|>
+<|visual token 048849|>
+<|visual token 048850|>
+<|visual token 048851|>
+<|visual token 048852|>
+<|visual token 048853|>
+<|visual token 048854|>
+<|visual token 048855|>
+<|visual token 048856|>
+<|visual token 048857|>
+<|visual token 048858|>
+<|visual token 048859|>
+<|visual token 048860|>
+<|visual token 048861|>
+<|visual token 048862|>
+<|visual token 048863|>
+<|visual token 048864|>
+<|visual token 048865|>
+<|visual token 048866|>
+<|visual token 048867|>
+<|visual token 048868|>
+<|visual token 048869|>
+<|visual token 048870|>
+<|visual token 048871|>
+<|visual token 048872|>
+<|visual token 048873|>
+<|visual token 048874|>
+<|visual token 048875|>
+<|visual token 048876|>
+<|visual token 048877|>
+<|visual token 048878|>
+<|visual token 048879|>
+<|visual token 048880|>
+<|visual token 048881|>
+<|visual token 048882|>
+<|visual token 048883|>
+<|visual token 048884|>
+<|visual token 048885|>
+<|visual token 048886|>
+<|visual token 048887|>
+<|visual token 048888|>
+<|visual token 048889|>
+<|visual token 048890|>
+<|visual token 048891|>
+<|visual token 048892|>
+<|visual token 048893|>
+<|visual token 048894|>
+<|visual token 048895|>
+<|visual token 048896|>
+<|visual token 048897|>
+<|visual token 048898|>
+<|visual token 048899|>
+<|visual token 048900|>
+<|visual token 048901|>
+<|visual token 048902|>
+<|visual token 048903|>
+<|visual token 048904|>
+<|visual token 048905|>
+<|visual token 048906|>
+<|visual token 048907|>
+<|visual token 048908|>
+<|visual token 048909|>
+<|visual token 048910|>
+<|visual token 048911|>
+<|visual token 048912|>
+<|visual token 048913|>
+<|visual token 048914|>
+<|visual token 048915|>
+<|visual token 048916|>
+<|visual token 048917|>
+<|visual token 048918|>
+<|visual token 048919|>
+<|visual token 048920|>
+<|visual token 048921|>
+<|visual token 048922|>
+<|visual token 048923|>
+<|visual token 048924|>
+<|visual token 048925|>
+<|visual token 048926|>
+<|visual token 048927|>
+<|visual token 048928|>
+<|visual token 048929|>
+<|visual token 048930|>
+<|visual token 048931|>
+<|visual token 048932|>
+<|visual token 048933|>
+<|visual token 048934|>
+<|visual token 048935|>
+<|visual token 048936|>
+<|visual token 048937|>
+<|visual token 048938|>
+<|visual token 048939|>
+<|visual token 048940|>
+<|visual token 048941|>
+<|visual token 048942|>
+<|visual token 048943|>
+<|visual token 048944|>
+<|visual token 048945|>
+<|visual token 048946|>
+<|visual token 048947|>
+<|visual token 048948|>
+<|visual token 048949|>
+<|visual token 048950|>
+<|visual token 048951|>
+<|visual token 048952|>
+<|visual token 048953|>
+<|visual token 048954|>
+<|visual token 048955|>
+<|visual token 048956|>
+<|visual token 048957|>
+<|visual token 048958|>
+<|visual token 048959|>
+<|visual token 048960|>
+<|visual token 048961|>
+<|visual token 048962|>
+<|visual token 048963|>
+<|visual token 048964|>
+<|visual token 048965|>
+<|visual token 048966|>
+<|visual token 048967|>
+<|visual token 048968|>
+<|visual token 048969|>
+<|visual token 048970|>
+<|visual token 048971|>
+<|visual token 048972|>
+<|visual token 048973|>
+<|visual token 048974|>
+<|visual token 048975|>
+<|visual token 048976|>
+<|visual token 048977|>
+<|visual token 048978|>
+<|visual token 048979|>
+<|visual token 048980|>
+<|visual token 048981|>
+<|visual token 048982|>
+<|visual token 048983|>
+<|visual token 048984|>
+<|visual token 048985|>
+<|visual token 048986|>
+<|visual token 048987|>
+<|visual token 048988|>
+<|visual token 048989|>
+<|visual token 048990|>
+<|visual token 048991|>
+<|visual token 048992|>
+<|visual token 048993|>
+<|visual token 048994|>
+<|visual token 048995|>
+<|visual token 048996|>
+<|visual token 048997|>
+<|visual token 048998|>
+<|visual token 048999|>
+<|visual token 049000|>
+<|visual token 049001|>
+<|visual token 049002|>
+<|visual token 049003|>
+<|visual token 049004|>
+<|visual token 049005|>
+<|visual token 049006|>
+<|visual token 049007|>
+<|visual token 049008|>
+<|visual token 049009|>
+<|visual token 049010|>
+<|visual token 049011|>
+<|visual token 049012|>
+<|visual token 049013|>
+<|visual token 049014|>
+<|visual token 049015|>
+<|visual token 049016|>
+<|visual token 049017|>
+<|visual token 049018|>
+<|visual token 049019|>
+<|visual token 049020|>
+<|visual token 049021|>
+<|visual token 049022|>
+<|visual token 049023|>
+<|visual token 049024|>
+<|visual token 049025|>
+<|visual token 049026|>
+<|visual token 049027|>
+<|visual token 049028|>
+<|visual token 049029|>
+<|visual token 049030|>
+<|visual token 049031|>
+<|visual token 049032|>
+<|visual token 049033|>
+<|visual token 049034|>
+<|visual token 049035|>
+<|visual token 049036|>
+<|visual token 049037|>
+<|visual token 049038|>
+<|visual token 049039|>
+<|visual token 049040|>
+<|visual token 049041|>
+<|visual token 049042|>
+<|visual token 049043|>
+<|visual token 049044|>
+<|visual token 049045|>
+<|visual token 049046|>
+<|visual token 049047|>
+<|visual token 049048|>
+<|visual token 049049|>
+<|visual token 049050|>
+<|visual token 049051|>
+<|visual token 049052|>
+<|visual token 049053|>
+<|visual token 049054|>
+<|visual token 049055|>
+<|visual token 049056|>
+<|visual token 049057|>
+<|visual token 049058|>
+<|visual token 049059|>
+<|visual token 049060|>
+<|visual token 049061|>
+<|visual token 049062|>
+<|visual token 049063|>
+<|visual token 049064|>
+<|visual token 049065|>
+<|visual token 049066|>
+<|visual token 049067|>
+<|visual token 049068|>
+<|visual token 049069|>
+<|visual token 049070|>
+<|visual token 049071|>
+<|visual token 049072|>
+<|visual token 049073|>
+<|visual token 049074|>
+<|visual token 049075|>
+<|visual token 049076|>
+<|visual token 049077|>
+<|visual token 049078|>
+<|visual token 049079|>
+<|visual token 049080|>
+<|visual token 049081|>
+<|visual token 049082|>
+<|visual token 049083|>
+<|visual token 049084|>
+<|visual token 049085|>
+<|visual token 049086|>
+<|visual token 049087|>
+<|visual token 049088|>
+<|visual token 049089|>
+<|visual token 049090|>
+<|visual token 049091|>
+<|visual token 049092|>
+<|visual token 049093|>
+<|visual token 049094|>
+<|visual token 049095|>
+<|visual token 049096|>
+<|visual token 049097|>
+<|visual token 049098|>
+<|visual token 049099|>
+<|visual token 049100|>
+<|visual token 049101|>
+<|visual token 049102|>
+<|visual token 049103|>
+<|visual token 049104|>
+<|visual token 049105|>
+<|visual token 049106|>
+<|visual token 049107|>
+<|visual token 049108|>
+<|visual token 049109|>
+<|visual token 049110|>
+<|visual token 049111|>
+<|visual token 049112|>
+<|visual token 049113|>
+<|visual token 049114|>
+<|visual token 049115|>
+<|visual token 049116|>
+<|visual token 049117|>
+<|visual token 049118|>
+<|visual token 049119|>
+<|visual token 049120|>
+<|visual token 049121|>
+<|visual token 049122|>
+<|visual token 049123|>
+<|visual token 049124|>
+<|visual token 049125|>
+<|visual token 049126|>
+<|visual token 049127|>
+<|visual token 049128|>
+<|visual token 049129|>
+<|visual token 049130|>
+<|visual token 049131|>
+<|visual token 049132|>
+<|visual token 049133|>
+<|visual token 049134|>
+<|visual token 049135|>
+<|visual token 049136|>
+<|visual token 049137|>
+<|visual token 049138|>
+<|visual token 049139|>
+<|visual token 049140|>
+<|visual token 049141|>
+<|visual token 049142|>
+<|visual token 049143|>
+<|visual token 049144|>
+<|visual token 049145|>
+<|visual token 049146|>
+<|visual token 049147|>
+<|visual token 049148|>
+<|visual token 049149|>
+<|visual token 049150|>
+<|visual token 049151|>
+<|visual token 049152|>
+<|visual token 049153|>
+<|visual token 049154|>
+<|visual token 049155|>
+<|visual token 049156|>
+<|visual token 049157|>
+<|visual token 049158|>
+<|visual token 049159|>
+<|visual token 049160|>
+<|visual token 049161|>
+<|visual token 049162|>
+<|visual token 049163|>
+<|visual token 049164|>
+<|visual token 049165|>
+<|visual token 049166|>
+<|visual token 049167|>
+<|visual token 049168|>
+<|visual token 049169|>
+<|visual token 049170|>
+<|visual token 049171|>
+<|visual token 049172|>
+<|visual token 049173|>
+<|visual token 049174|>
+<|visual token 049175|>
+<|visual token 049176|>
+<|visual token 049177|>
+<|visual token 049178|>
+<|visual token 049179|>
+<|visual token 049180|>
+<|visual token 049181|>
+<|visual token 049182|>
+<|visual token 049183|>
+<|visual token 049184|>
+<|visual token 049185|>
+<|visual token 049186|>
+<|visual token 049187|>
+<|visual token 049188|>
+<|visual token 049189|>
+<|visual token 049190|>
+<|visual token 049191|>
+<|visual token 049192|>
+<|visual token 049193|>
+<|visual token 049194|>
+<|visual token 049195|>
+<|visual token 049196|>
+<|visual token 049197|>
+<|visual token 049198|>
+<|visual token 049199|>
+<|visual token 049200|>
+<|visual token 049201|>
+<|visual token 049202|>
+<|visual token 049203|>
+<|visual token 049204|>
+<|visual token 049205|>
+<|visual token 049206|>
+<|visual token 049207|>
+<|visual token 049208|>
+<|visual token 049209|>
+<|visual token 049210|>
+<|visual token 049211|>
+<|visual token 049212|>
+<|visual token 049213|>
+<|visual token 049214|>
+<|visual token 049215|>
+<|visual token 049216|>
+<|visual token 049217|>
+<|visual token 049218|>
+<|visual token 049219|>
+<|visual token 049220|>
+<|visual token 049221|>
+<|visual token 049222|>
+<|visual token 049223|>
+<|visual token 049224|>
+<|visual token 049225|>
+<|visual token 049226|>
+<|visual token 049227|>
+<|visual token 049228|>
+<|visual token 049229|>
+<|visual token 049230|>
+<|visual token 049231|>
+<|visual token 049232|>
+<|visual token 049233|>
+<|visual token 049234|>
+<|visual token 049235|>
+<|visual token 049236|>
+<|visual token 049237|>
+<|visual token 049238|>
+<|visual token 049239|>
+<|visual token 049240|>
+<|visual token 049241|>
+<|visual token 049242|>
+<|visual token 049243|>
+<|visual token 049244|>
+<|visual token 049245|>
+<|visual token 049246|>
+<|visual token 049247|>
+<|visual token 049248|>
+<|visual token 049249|>
+<|visual token 049250|>
+<|visual token 049251|>
+<|visual token 049252|>
+<|visual token 049253|>
+<|visual token 049254|>
+<|visual token 049255|>
+<|visual token 049256|>
+<|visual token 049257|>
+<|visual token 049258|>
+<|visual token 049259|>
+<|visual token 049260|>
+<|visual token 049261|>
+<|visual token 049262|>
+<|visual token 049263|>
+<|visual token 049264|>
+<|visual token 049265|>
+<|visual token 049266|>
+<|visual token 049267|>
+<|visual token 049268|>
+<|visual token 049269|>
+<|visual token 049270|>
+<|visual token 049271|>
+<|visual token 049272|>
+<|visual token 049273|>
+<|visual token 049274|>
+<|visual token 049275|>
+<|visual token 049276|>
+<|visual token 049277|>
+<|visual token 049278|>
+<|visual token 049279|>
+<|visual token 049280|>
+<|visual token 049281|>
+<|visual token 049282|>
+<|visual token 049283|>
+<|visual token 049284|>
+<|visual token 049285|>
+<|visual token 049286|>
+<|visual token 049287|>
+<|visual token 049288|>
+<|visual token 049289|>
+<|visual token 049290|>
+<|visual token 049291|>
+<|visual token 049292|>
+<|visual token 049293|>
+<|visual token 049294|>
+<|visual token 049295|>
+<|visual token 049296|>
+<|visual token 049297|>
+<|visual token 049298|>
+<|visual token 049299|>
+<|visual token 049300|>
+<|visual token 049301|>
+<|visual token 049302|>
+<|visual token 049303|>
+<|visual token 049304|>
+<|visual token 049305|>
+<|visual token 049306|>
+<|visual token 049307|>
+<|visual token 049308|>
+<|visual token 049309|>
+<|visual token 049310|>
+<|visual token 049311|>
+<|visual token 049312|>
+<|visual token 049313|>
+<|visual token 049314|>
+<|visual token 049315|>
+<|visual token 049316|>
+<|visual token 049317|>
+<|visual token 049318|>
+<|visual token 049319|>
+<|visual token 049320|>
+<|visual token 049321|>
+<|visual token 049322|>
+<|visual token 049323|>
+<|visual token 049324|>
+<|visual token 049325|>
+<|visual token 049326|>
+<|visual token 049327|>
+<|visual token 049328|>
+<|visual token 049329|>
+<|visual token 049330|>
+<|visual token 049331|>
+<|visual token 049332|>
+<|visual token 049333|>
+<|visual token 049334|>
+<|visual token 049335|>
+<|visual token 049336|>
+<|visual token 049337|>
+<|visual token 049338|>
+<|visual token 049339|>
+<|visual token 049340|>
+<|visual token 049341|>
+<|visual token 049342|>
+<|visual token 049343|>
+<|visual token 049344|>
+<|visual token 049345|>
+<|visual token 049346|>
+<|visual token 049347|>
+<|visual token 049348|>
+<|visual token 049349|>
+<|visual token 049350|>
+<|visual token 049351|>
+<|visual token 049352|>
+<|visual token 049353|>
+<|visual token 049354|>
+<|visual token 049355|>
+<|visual token 049356|>
+<|visual token 049357|>
+<|visual token 049358|>
+<|visual token 049359|>
+<|visual token 049360|>
+<|visual token 049361|>
+<|visual token 049362|>
+<|visual token 049363|>
+<|visual token 049364|>
+<|visual token 049365|>
+<|visual token 049366|>
+<|visual token 049367|>
+<|visual token 049368|>
+<|visual token 049369|>
+<|visual token 049370|>
+<|visual token 049371|>
+<|visual token 049372|>
+<|visual token 049373|>
+<|visual token 049374|>
+<|visual token 049375|>
+<|visual token 049376|>
+<|visual token 049377|>
+<|visual token 049378|>
+<|visual token 049379|>
+<|visual token 049380|>
+<|visual token 049381|>
+<|visual token 049382|>
+<|visual token 049383|>
+<|visual token 049384|>
+<|visual token 049385|>
+<|visual token 049386|>
+<|visual token 049387|>
+<|visual token 049388|>
+<|visual token 049389|>
+<|visual token 049390|>
+<|visual token 049391|>
+<|visual token 049392|>
+<|visual token 049393|>
+<|visual token 049394|>
+<|visual token 049395|>
+<|visual token 049396|>
+<|visual token 049397|>
+<|visual token 049398|>
+<|visual token 049399|>
+<|visual token 049400|>
+<|visual token 049401|>
+<|visual token 049402|>
+<|visual token 049403|>
+<|visual token 049404|>
+<|visual token 049405|>
+<|visual token 049406|>
+<|visual token 049407|>
+<|visual token 049408|>
+<|visual token 049409|>
+<|visual token 049410|>
+<|visual token 049411|>
+<|visual token 049412|>
+<|visual token 049413|>
+<|visual token 049414|>
+<|visual token 049415|>
+<|visual token 049416|>
+<|visual token 049417|>
+<|visual token 049418|>
+<|visual token 049419|>
+<|visual token 049420|>
+<|visual token 049421|>
+<|visual token 049422|>
+<|visual token 049423|>
+<|visual token 049424|>
+<|visual token 049425|>
+<|visual token 049426|>
+<|visual token 049427|>
+<|visual token 049428|>
+<|visual token 049429|>
+<|visual token 049430|>
+<|visual token 049431|>
+<|visual token 049432|>
+<|visual token 049433|>
+<|visual token 049434|>
+<|visual token 049435|>
+<|visual token 049436|>
+<|visual token 049437|>
+<|visual token 049438|>
+<|visual token 049439|>
+<|visual token 049440|>
+<|visual token 049441|>
+<|visual token 049442|>
+<|visual token 049443|>
+<|visual token 049444|>
+<|visual token 049445|>
+<|visual token 049446|>
+<|visual token 049447|>
+<|visual token 049448|>
+<|visual token 049449|>
+<|visual token 049450|>
+<|visual token 049451|>
+<|visual token 049452|>
+<|visual token 049453|>
+<|visual token 049454|>
+<|visual token 049455|>
+<|visual token 049456|>
+<|visual token 049457|>
+<|visual token 049458|>
+<|visual token 049459|>
+<|visual token 049460|>
+<|visual token 049461|>
+<|visual token 049462|>
+<|visual token 049463|>
+<|visual token 049464|>
+<|visual token 049465|>
+<|visual token 049466|>
+<|visual token 049467|>
+<|visual token 049468|>
+<|visual token 049469|>
+<|visual token 049470|>
+<|visual token 049471|>
+<|visual token 049472|>
+<|visual token 049473|>
+<|visual token 049474|>
+<|visual token 049475|>
+<|visual token 049476|>
+<|visual token 049477|>
+<|visual token 049478|>
+<|visual token 049479|>
+<|visual token 049480|>
+<|visual token 049481|>
+<|visual token 049482|>
+<|visual token 049483|>
+<|visual token 049484|>
+<|visual token 049485|>
+<|visual token 049486|>
+<|visual token 049487|>
+<|visual token 049488|>
+<|visual token 049489|>
+<|visual token 049490|>
+<|visual token 049491|>
+<|visual token 049492|>
+<|visual token 049493|>
+<|visual token 049494|>
+<|visual token 049495|>
+<|visual token 049496|>
+<|visual token 049497|>
+<|visual token 049498|>
+<|visual token 049499|>
+<|visual token 049500|>
+<|visual token 049501|>
+<|visual token 049502|>
+<|visual token 049503|>
+<|visual token 049504|>
+<|visual token 049505|>
+<|visual token 049506|>
+<|visual token 049507|>
+<|visual token 049508|>
+<|visual token 049509|>
+<|visual token 049510|>
+<|visual token 049511|>
+<|visual token 049512|>
+<|visual token 049513|>
+<|visual token 049514|>
+<|visual token 049515|>
+<|visual token 049516|>
+<|visual token 049517|>
+<|visual token 049518|>
+<|visual token 049519|>
+<|visual token 049520|>
+<|visual token 049521|>
+<|visual token 049522|>
+<|visual token 049523|>
+<|visual token 049524|>
+<|visual token 049525|>
+<|visual token 049526|>
+<|visual token 049527|>
+<|visual token 049528|>
+<|visual token 049529|>
+<|visual token 049530|>
+<|visual token 049531|>
+<|visual token 049532|>
+<|visual token 049533|>
+<|visual token 049534|>
+<|visual token 049535|>
+<|visual token 049536|>
+<|visual token 049537|>
+<|visual token 049538|>
+<|visual token 049539|>
+<|visual token 049540|>
+<|visual token 049541|>
+<|visual token 049542|>
+<|visual token 049543|>
+<|visual token 049544|>
+<|visual token 049545|>
+<|visual token 049546|>
+<|visual token 049547|>
+<|visual token 049548|>
+<|visual token 049549|>
+<|visual token 049550|>
+<|visual token 049551|>
+<|visual token 049552|>
+<|visual token 049553|>
+<|visual token 049554|>
+<|visual token 049555|>
+<|visual token 049556|>
+<|visual token 049557|>
+<|visual token 049558|>
+<|visual token 049559|>
+<|visual token 049560|>
+<|visual token 049561|>
+<|visual token 049562|>
+<|visual token 049563|>
+<|visual token 049564|>
+<|visual token 049565|>
+<|visual token 049566|>
+<|visual token 049567|>
+<|visual token 049568|>
+<|visual token 049569|>
+<|visual token 049570|>
+<|visual token 049571|>
+<|visual token 049572|>
+<|visual token 049573|>
+<|visual token 049574|>
+<|visual token 049575|>
+<|visual token 049576|>
+<|visual token 049577|>
+<|visual token 049578|>
+<|visual token 049579|>
+<|visual token 049580|>
+<|visual token 049581|>
+<|visual token 049582|>
+<|visual token 049583|>
+<|visual token 049584|>
+<|visual token 049585|>
+<|visual token 049586|>
+<|visual token 049587|>
+<|visual token 049588|>
+<|visual token 049589|>
+<|visual token 049590|>
+<|visual token 049591|>
+<|visual token 049592|>
+<|visual token 049593|>
+<|visual token 049594|>
+<|visual token 049595|>
+<|visual token 049596|>
+<|visual token 049597|>
+<|visual token 049598|>
+<|visual token 049599|>
+<|visual token 049600|>
+<|visual token 049601|>
+<|visual token 049602|>
+<|visual token 049603|>
+<|visual token 049604|>
+<|visual token 049605|>
+<|visual token 049606|>
+<|visual token 049607|>
+<|visual token 049608|>
+<|visual token 049609|>
+<|visual token 049610|>
+<|visual token 049611|>
+<|visual token 049612|>
+<|visual token 049613|>
+<|visual token 049614|>
+<|visual token 049615|>
+<|visual token 049616|>
+<|visual token 049617|>
+<|visual token 049618|>
+<|visual token 049619|>
+<|visual token 049620|>
+<|visual token 049621|>
+<|visual token 049622|>
+<|visual token 049623|>
+<|visual token 049624|>
+<|visual token 049625|>
+<|visual token 049626|>
+<|visual token 049627|>
+<|visual token 049628|>
+<|visual token 049629|>
+<|visual token 049630|>
+<|visual token 049631|>
+<|visual token 049632|>
+<|visual token 049633|>
+<|visual token 049634|>
+<|visual token 049635|>
+<|visual token 049636|>
+<|visual token 049637|>
+<|visual token 049638|>
+<|visual token 049639|>
+<|visual token 049640|>
+<|visual token 049641|>
+<|visual token 049642|>
+<|visual token 049643|>
+<|visual token 049644|>
+<|visual token 049645|>
+<|visual token 049646|>
+<|visual token 049647|>
+<|visual token 049648|>
+<|visual token 049649|>
+<|visual token 049650|>
+<|visual token 049651|>
+<|visual token 049652|>
+<|visual token 049653|>
+<|visual token 049654|>
+<|visual token 049655|>
+<|visual token 049656|>
+<|visual token 049657|>
+<|visual token 049658|>
+<|visual token 049659|>
+<|visual token 049660|>
+<|visual token 049661|>
+<|visual token 049662|>
+<|visual token 049663|>
+<|visual token 049664|>
+<|visual token 049665|>
+<|visual token 049666|>
+<|visual token 049667|>
+<|visual token 049668|>
+<|visual token 049669|>
+<|visual token 049670|>
+<|visual token 049671|>
+<|visual token 049672|>
+<|visual token 049673|>
+<|visual token 049674|>
+<|visual token 049675|>
+<|visual token 049676|>
+<|visual token 049677|>
+<|visual token 049678|>
+<|visual token 049679|>
+<|visual token 049680|>
+<|visual token 049681|>
+<|visual token 049682|>
+<|visual token 049683|>
+<|visual token 049684|>
+<|visual token 049685|>
+<|visual token 049686|>
+<|visual token 049687|>
+<|visual token 049688|>
+<|visual token 049689|>
+<|visual token 049690|>
+<|visual token 049691|>
+<|visual token 049692|>
+<|visual token 049693|>
+<|visual token 049694|>
+<|visual token 049695|>
+<|visual token 049696|>
+<|visual token 049697|>
+<|visual token 049698|>
+<|visual token 049699|>
+<|visual token 049700|>
+<|visual token 049701|>
+<|visual token 049702|>
+<|visual token 049703|>
+<|visual token 049704|>
+<|visual token 049705|>
+<|visual token 049706|>
+<|visual token 049707|>
+<|visual token 049708|>
+<|visual token 049709|>
+<|visual token 049710|>
+<|visual token 049711|>
+<|visual token 049712|>
+<|visual token 049713|>
+<|visual token 049714|>
+<|visual token 049715|>
+<|visual token 049716|>
+<|visual token 049717|>
+<|visual token 049718|>
+<|visual token 049719|>
+<|visual token 049720|>
+<|visual token 049721|>
+<|visual token 049722|>
+<|visual token 049723|>
+<|visual token 049724|>
+<|visual token 049725|>
+<|visual token 049726|>
+<|visual token 049727|>
+<|visual token 049728|>
+<|visual token 049729|>
+<|visual token 049730|>
+<|visual token 049731|>
+<|visual token 049732|>
+<|visual token 049733|>
+<|visual token 049734|>
+<|visual token 049735|>
+<|visual token 049736|>
+<|visual token 049737|>
+<|visual token 049738|>
+<|visual token 049739|>
+<|visual token 049740|>
+<|visual token 049741|>
+<|visual token 049742|>
+<|visual token 049743|>
+<|visual token 049744|>
+<|visual token 049745|>
+<|visual token 049746|>
+<|visual token 049747|>
+<|visual token 049748|>
+<|visual token 049749|>
+<|visual token 049750|>
+<|visual token 049751|>
+<|visual token 049752|>
+<|visual token 049753|>
+<|visual token 049754|>
+<|visual token 049755|>
+<|visual token 049756|>
+<|visual token 049757|>
+<|visual token 049758|>
+<|visual token 049759|>
+<|visual token 049760|>
+<|visual token 049761|>
+<|visual token 049762|>
+<|visual token 049763|>
+<|visual token 049764|>
+<|visual token 049765|>
+<|visual token 049766|>
+<|visual token 049767|>
+<|visual token 049768|>
+<|visual token 049769|>
+<|visual token 049770|>
+<|visual token 049771|>
+<|visual token 049772|>
+<|visual token 049773|>
+<|visual token 049774|>
+<|visual token 049775|>
+<|visual token 049776|>
+<|visual token 049777|>
+<|visual token 049778|>
+<|visual token 049779|>
+<|visual token 049780|>
+<|visual token 049781|>
+<|visual token 049782|>
+<|visual token 049783|>
+<|visual token 049784|>
+<|visual token 049785|>
+<|visual token 049786|>
+<|visual token 049787|>
+<|visual token 049788|>
+<|visual token 049789|>
+<|visual token 049790|>
+<|visual token 049791|>
+<|visual token 049792|>
+<|visual token 049793|>
+<|visual token 049794|>
+<|visual token 049795|>
+<|visual token 049796|>
+<|visual token 049797|>
+<|visual token 049798|>
+<|visual token 049799|>
+<|visual token 049800|>
+<|visual token 049801|>
+<|visual token 049802|>
+<|visual token 049803|>
+<|visual token 049804|>
+<|visual token 049805|>
+<|visual token 049806|>
+<|visual token 049807|>
+<|visual token 049808|>
+<|visual token 049809|>
+<|visual token 049810|>
+<|visual token 049811|>
+<|visual token 049812|>
+<|visual token 049813|>
+<|visual token 049814|>
+<|visual token 049815|>
+<|visual token 049816|>
+<|visual token 049817|>
+<|visual token 049818|>
+<|visual token 049819|>
+<|visual token 049820|>
+<|visual token 049821|>
+<|visual token 049822|>
+<|visual token 049823|>
+<|visual token 049824|>
+<|visual token 049825|>
+<|visual token 049826|>
+<|visual token 049827|>
+<|visual token 049828|>
+<|visual token 049829|>
+<|visual token 049830|>
+<|visual token 049831|>
+<|visual token 049832|>
+<|visual token 049833|>
+<|visual token 049834|>
+<|visual token 049835|>
+<|visual token 049836|>
+<|visual token 049837|>
+<|visual token 049838|>
+<|visual token 049839|>
+<|visual token 049840|>
+<|visual token 049841|>
+<|visual token 049842|>
+<|visual token 049843|>
+<|visual token 049844|>
+<|visual token 049845|>
+<|visual token 049846|>
+<|visual token 049847|>
+<|visual token 049848|>
+<|visual token 049849|>
+<|visual token 049850|>
+<|visual token 049851|>
+<|visual token 049852|>
+<|visual token 049853|>
+<|visual token 049854|>
+<|visual token 049855|>
+<|visual token 049856|>
+<|visual token 049857|>
+<|visual token 049858|>
+<|visual token 049859|>
+<|visual token 049860|>
+<|visual token 049861|>
+<|visual token 049862|>
+<|visual token 049863|>
+<|visual token 049864|>
+<|visual token 049865|>
+<|visual token 049866|>
+<|visual token 049867|>
+<|visual token 049868|>
+<|visual token 049869|>
+<|visual token 049870|>
+<|visual token 049871|>
+<|visual token 049872|>
+<|visual token 049873|>
+<|visual token 049874|>
+<|visual token 049875|>
+<|visual token 049876|>
+<|visual token 049877|>
+<|visual token 049878|>
+<|visual token 049879|>
+<|visual token 049880|>
+<|visual token 049881|>
+<|visual token 049882|>
+<|visual token 049883|>
+<|visual token 049884|>
+<|visual token 049885|>
+<|visual token 049886|>
+<|visual token 049887|>
+<|visual token 049888|>
+<|visual token 049889|>
+<|visual token 049890|>
+<|visual token 049891|>
+<|visual token 049892|>
+<|visual token 049893|>
+<|visual token 049894|>
+<|visual token 049895|>
+<|visual token 049896|>
+<|visual token 049897|>
+<|visual token 049898|>
+<|visual token 049899|>
+<|visual token 049900|>
+<|visual token 049901|>
+<|visual token 049902|>
+<|visual token 049903|>
+<|visual token 049904|>
+<|visual token 049905|>
+<|visual token 049906|>
+<|visual token 049907|>
+<|visual token 049908|>
+<|visual token 049909|>
+<|visual token 049910|>
+<|visual token 049911|>
+<|visual token 049912|>
+<|visual token 049913|>
+<|visual token 049914|>
+<|visual token 049915|>
+<|visual token 049916|>
+<|visual token 049917|>
+<|visual token 049918|>
+<|visual token 049919|>
+<|visual token 049920|>
+<|visual token 049921|>
+<|visual token 049922|>
+<|visual token 049923|>
+<|visual token 049924|>
+<|visual token 049925|>
+<|visual token 049926|>
+<|visual token 049927|>
+<|visual token 049928|>
+<|visual token 049929|>
+<|visual token 049930|>
+<|visual token 049931|>
+<|visual token 049932|>
+<|visual token 049933|>
+<|visual token 049934|>
+<|visual token 049935|>
+<|visual token 049936|>
+<|visual token 049937|>
+<|visual token 049938|>
+<|visual token 049939|>
+<|visual token 049940|>
+<|visual token 049941|>
+<|visual token 049942|>
+<|visual token 049943|>
+<|visual token 049944|>
+<|visual token 049945|>
+<|visual token 049946|>
+<|visual token 049947|>
+<|visual token 049948|>
+<|visual token 049949|>
+<|visual token 049950|>
+<|visual token 049951|>
+<|visual token 049952|>
+<|visual token 049953|>
+<|visual token 049954|>
+<|visual token 049955|>
+<|visual token 049956|>
+<|visual token 049957|>
+<|visual token 049958|>
+<|visual token 049959|>
+<|visual token 049960|>
+<|visual token 049961|>
+<|visual token 049962|>
+<|visual token 049963|>
+<|visual token 049964|>
+<|visual token 049965|>
+<|visual token 049966|>
+<|visual token 049967|>
+<|visual token 049968|>
+<|visual token 049969|>
+<|visual token 049970|>
+<|visual token 049971|>
+<|visual token 049972|>
+<|visual token 049973|>
+<|visual token 049974|>
+<|visual token 049975|>
+<|visual token 049976|>
+<|visual token 049977|>
+<|visual token 049978|>
+<|visual token 049979|>
+<|visual token 049980|>
+<|visual token 049981|>
+<|visual token 049982|>
+<|visual token 049983|>
+<|visual token 049984|>
+<|visual token 049985|>
+<|visual token 049986|>
+<|visual token 049987|>
+<|visual token 049988|>
+<|visual token 049989|>
+<|visual token 049990|>
+<|visual token 049991|>
+<|visual token 049992|>
+<|visual token 049993|>
+<|visual token 049994|>
+<|visual token 049995|>
+<|visual token 049996|>
+<|visual token 049997|>
+<|visual token 049998|>
+<|visual token 049999|>
+<|visual token 050000|>
+<|visual token 050001|>
+<|visual token 050002|>
+<|visual token 050003|>
+<|visual token 050004|>
+<|visual token 050005|>
+<|visual token 050006|>
+<|visual token 050007|>
+<|visual token 050008|>
+<|visual token 050009|>
+<|visual token 050010|>
+<|visual token 050011|>
+<|visual token 050012|>
+<|visual token 050013|>
+<|visual token 050014|>
+<|visual token 050015|>
+<|visual token 050016|>
+<|visual token 050017|>
+<|visual token 050018|>
+<|visual token 050019|>
+<|visual token 050020|>
+<|visual token 050021|>
+<|visual token 050022|>
+<|visual token 050023|>
+<|visual token 050024|>
+<|visual token 050025|>
+<|visual token 050026|>
+<|visual token 050027|>
+<|visual token 050028|>
+<|visual token 050029|>
+<|visual token 050030|>
+<|visual token 050031|>
+<|visual token 050032|>
+<|visual token 050033|>
+<|visual token 050034|>
+<|visual token 050035|>
+<|visual token 050036|>
+<|visual token 050037|>
+<|visual token 050038|>
+<|visual token 050039|>
+<|visual token 050040|>
+<|visual token 050041|>
+<|visual token 050042|>
+<|visual token 050043|>
+<|visual token 050044|>
+<|visual token 050045|>
+<|visual token 050046|>
+<|visual token 050047|>
+<|visual token 050048|>
+<|visual token 050049|>
+<|visual token 050050|>
+<|visual token 050051|>
+<|visual token 050052|>
+<|visual token 050053|>
+<|visual token 050054|>
+<|visual token 050055|>
+<|visual token 050056|>
+<|visual token 050057|>
+<|visual token 050058|>
+<|visual token 050059|>
+<|visual token 050060|>
+<|visual token 050061|>
+<|visual token 050062|>
+<|visual token 050063|>
+<|visual token 050064|>
+<|visual token 050065|>
+<|visual token 050066|>
+<|visual token 050067|>
+<|visual token 050068|>
+<|visual token 050069|>
+<|visual token 050070|>
+<|visual token 050071|>
+<|visual token 050072|>
+<|visual token 050073|>
+<|visual token 050074|>
+<|visual token 050075|>
+<|visual token 050076|>
+<|visual token 050077|>
+<|visual token 050078|>
+<|visual token 050079|>
+<|visual token 050080|>
+<|visual token 050081|>
+<|visual token 050082|>
+<|visual token 050083|>
+<|visual token 050084|>
+<|visual token 050085|>
+<|visual token 050086|>
+<|visual token 050087|>
+<|visual token 050088|>
+<|visual token 050089|>
+<|visual token 050090|>
+<|visual token 050091|>
+<|visual token 050092|>
+<|visual token 050093|>
+<|visual token 050094|>
+<|visual token 050095|>
+<|visual token 050096|>
+<|visual token 050097|>
+<|visual token 050098|>
+<|visual token 050099|>
+<|visual token 050100|>
+<|visual token 050101|>
+<|visual token 050102|>
+<|visual token 050103|>
+<|visual token 050104|>
+<|visual token 050105|>
+<|visual token 050106|>
+<|visual token 050107|>
+<|visual token 050108|>
+<|visual token 050109|>
+<|visual token 050110|>
+<|visual token 050111|>
+<|visual token 050112|>
+<|visual token 050113|>
+<|visual token 050114|>
+<|visual token 050115|>
+<|visual token 050116|>
+<|visual token 050117|>
+<|visual token 050118|>
+<|visual token 050119|>
+<|visual token 050120|>
+<|visual token 050121|>
+<|visual token 050122|>
+<|visual token 050123|>
+<|visual token 050124|>
+<|visual token 050125|>
+<|visual token 050126|>
+<|visual token 050127|>
+<|visual token 050128|>
+<|visual token 050129|>
+<|visual token 050130|>
+<|visual token 050131|>
+<|visual token 050132|>
+<|visual token 050133|>
+<|visual token 050134|>
+<|visual token 050135|>
+<|visual token 050136|>
+<|visual token 050137|>
+<|visual token 050138|>
+<|visual token 050139|>
+<|visual token 050140|>
+<|visual token 050141|>
+<|visual token 050142|>
+<|visual token 050143|>
+<|visual token 050144|>
+<|visual token 050145|>
+<|visual token 050146|>
+<|visual token 050147|>
+<|visual token 050148|>
+<|visual token 050149|>
+<|visual token 050150|>
+<|visual token 050151|>
+<|visual token 050152|>
+<|visual token 050153|>
+<|visual token 050154|>
+<|visual token 050155|>
+<|visual token 050156|>
+<|visual token 050157|>
+<|visual token 050158|>
+<|visual token 050159|>
+<|visual token 050160|>
+<|visual token 050161|>
+<|visual token 050162|>
+<|visual token 050163|>
+<|visual token 050164|>
+<|visual token 050165|>
+<|visual token 050166|>
+<|visual token 050167|>
+<|visual token 050168|>
+<|visual token 050169|>
+<|visual token 050170|>
+<|visual token 050171|>
+<|visual token 050172|>
+<|visual token 050173|>
+<|visual token 050174|>
+<|visual token 050175|>
+<|visual token 050176|>
+<|visual token 050177|>
+<|visual token 050178|>
+<|visual token 050179|>
+<|visual token 050180|>
+<|visual token 050181|>
+<|visual token 050182|>
+<|visual token 050183|>
+<|visual token 050184|>
+<|visual token 050185|>
+<|visual token 050186|>
+<|visual token 050187|>
+<|visual token 050188|>
+<|visual token 050189|>
+<|visual token 050190|>
+<|visual token 050191|>
+<|visual token 050192|>
+<|visual token 050193|>
+<|visual token 050194|>
+<|visual token 050195|>
+<|visual token 050196|>
+<|visual token 050197|>
+<|visual token 050198|>
+<|visual token 050199|>
+<|visual token 050200|>
+<|visual token 050201|>
+<|visual token 050202|>
+<|visual token 050203|>
+<|visual token 050204|>
+<|visual token 050205|>
+<|visual token 050206|>
+<|visual token 050207|>
+<|visual token 050208|>
+<|visual token 050209|>
+<|visual token 050210|>
+<|visual token 050211|>
+<|visual token 050212|>
+<|visual token 050213|>
+<|visual token 050214|>
+<|visual token 050215|>
+<|visual token 050216|>
+<|visual token 050217|>
+<|visual token 050218|>
+<|visual token 050219|>
+<|visual token 050220|>
+<|visual token 050221|>
+<|visual token 050222|>
+<|visual token 050223|>
+<|visual token 050224|>
+<|visual token 050225|>
+<|visual token 050226|>
+<|visual token 050227|>
+<|visual token 050228|>
+<|visual token 050229|>
+<|visual token 050230|>
+<|visual token 050231|>
+<|visual token 050232|>
+<|visual token 050233|>
+<|visual token 050234|>
+<|visual token 050235|>
+<|visual token 050236|>
+<|visual token 050237|>
+<|visual token 050238|>
+<|visual token 050239|>
+<|visual token 050240|>
+<|visual token 050241|>
+<|visual token 050242|>
+<|visual token 050243|>
+<|visual token 050244|>
+<|visual token 050245|>
+<|visual token 050246|>
+<|visual token 050247|>
+<|visual token 050248|>
+<|visual token 050249|>
+<|visual token 050250|>
+<|visual token 050251|>
+<|visual token 050252|>
+<|visual token 050253|>
+<|visual token 050254|>
+<|visual token 050255|>
+<|visual token 050256|>
+<|visual token 050257|>
+<|visual token 050258|>
+<|visual token 050259|>
+<|visual token 050260|>
+<|visual token 050261|>
+<|visual token 050262|>
+<|visual token 050263|>
+<|visual token 050264|>
+<|visual token 050265|>
+<|visual token 050266|>
+<|visual token 050267|>
+<|visual token 050268|>
+<|visual token 050269|>
+<|visual token 050270|>
+<|visual token 050271|>
+<|visual token 050272|>
+<|visual token 050273|>
+<|visual token 050274|>
+<|visual token 050275|>
+<|visual token 050276|>
+<|visual token 050277|>
+<|visual token 050278|>
+<|visual token 050279|>
+<|visual token 050280|>
+<|visual token 050281|>
+<|visual token 050282|>
+<|visual token 050283|>
+<|visual token 050284|>
+<|visual token 050285|>
+<|visual token 050286|>
+<|visual token 050287|>
+<|visual token 050288|>
+<|visual token 050289|>
+<|visual token 050290|>
+<|visual token 050291|>
+<|visual token 050292|>
+<|visual token 050293|>
+<|visual token 050294|>
+<|visual token 050295|>
+<|visual token 050296|>
+<|visual token 050297|>
+<|visual token 050298|>
+<|visual token 050299|>
+<|visual token 050300|>
+<|visual token 050301|>
+<|visual token 050302|>
+<|visual token 050303|>
+<|visual token 050304|>
+<|visual token 050305|>
+<|visual token 050306|>
+<|visual token 050307|>
+<|visual token 050308|>
+<|visual token 050309|>
+<|visual token 050310|>
+<|visual token 050311|>
+<|visual token 050312|>
+<|visual token 050313|>
+<|visual token 050314|>
+<|visual token 050315|>
+<|visual token 050316|>
+<|visual token 050317|>
+<|visual token 050318|>
+<|visual token 050319|>
+<|visual token 050320|>
+<|visual token 050321|>
+<|visual token 050322|>
+<|visual token 050323|>
+<|visual token 050324|>
+<|visual token 050325|>
+<|visual token 050326|>
+<|visual token 050327|>
+<|visual token 050328|>
+<|visual token 050329|>
+<|visual token 050330|>
+<|visual token 050331|>
+<|visual token 050332|>
+<|visual token 050333|>
+<|visual token 050334|>
+<|visual token 050335|>
+<|visual token 050336|>
+<|visual token 050337|>
+<|visual token 050338|>
+<|visual token 050339|>
+<|visual token 050340|>
+<|visual token 050341|>
+<|visual token 050342|>
+<|visual token 050343|>
+<|visual token 050344|>
+<|visual token 050345|>
+<|visual token 050346|>
+<|visual token 050347|>
+<|visual token 050348|>
+<|visual token 050349|>
+<|visual token 050350|>
+<|visual token 050351|>
+<|visual token 050352|>
+<|visual token 050353|>
+<|visual token 050354|>
+<|visual token 050355|>
+<|visual token 050356|>
+<|visual token 050357|>
+<|visual token 050358|>
+<|visual token 050359|>
+<|visual token 050360|>
+<|visual token 050361|>
+<|visual token 050362|>
+<|visual token 050363|>
+<|visual token 050364|>
+<|visual token 050365|>
+<|visual token 050366|>
+<|visual token 050367|>
+<|visual token 050368|>
+<|visual token 050369|>
+<|visual token 050370|>
+<|visual token 050371|>
+<|visual token 050372|>
+<|visual token 050373|>
+<|visual token 050374|>
+<|visual token 050375|>
+<|visual token 050376|>
+<|visual token 050377|>
+<|visual token 050378|>
+<|visual token 050379|>
+<|visual token 050380|>
+<|visual token 050381|>
+<|visual token 050382|>
+<|visual token 050383|>
+<|visual token 050384|>
+<|visual token 050385|>
+<|visual token 050386|>
+<|visual token 050387|>
+<|visual token 050388|>
+<|visual token 050389|>
+<|visual token 050390|>
+<|visual token 050391|>
+<|visual token 050392|>
+<|visual token 050393|>
+<|visual token 050394|>
+<|visual token 050395|>
+<|visual token 050396|>
+<|visual token 050397|>
+<|visual token 050398|>
+<|visual token 050399|>
+<|visual token 050400|>
+<|visual token 050401|>
+<|visual token 050402|>
+<|visual token 050403|>
+<|visual token 050404|>
+<|visual token 050405|>
+<|visual token 050406|>
+<|visual token 050407|>
+<|visual token 050408|>
+<|visual token 050409|>
+<|visual token 050410|>
+<|visual token 050411|>
+<|visual token 050412|>
+<|visual token 050413|>
+<|visual token 050414|>
+<|visual token 050415|>
+<|visual token 050416|>
+<|visual token 050417|>
+<|visual token 050418|>
+<|visual token 050419|>
+<|visual token 050420|>
+<|visual token 050421|>
+<|visual token 050422|>
+<|visual token 050423|>
+<|visual token 050424|>
+<|visual token 050425|>
+<|visual token 050426|>
+<|visual token 050427|>
+<|visual token 050428|>
+<|visual token 050429|>
+<|visual token 050430|>
+<|visual token 050431|>
+<|visual token 050432|>
+<|visual token 050433|>
+<|visual token 050434|>
+<|visual token 050435|>
+<|visual token 050436|>
+<|visual token 050437|>
+<|visual token 050438|>
+<|visual token 050439|>
+<|visual token 050440|>
+<|visual token 050441|>
+<|visual token 050442|>
+<|visual token 050443|>
+<|visual token 050444|>
+<|visual token 050445|>
+<|visual token 050446|>
+<|visual token 050447|>
+<|visual token 050448|>
+<|visual token 050449|>
+<|visual token 050450|>
+<|visual token 050451|>
+<|visual token 050452|>
+<|visual token 050453|>
+<|visual token 050454|>
+<|visual token 050455|>
+<|visual token 050456|>
+<|visual token 050457|>
+<|visual token 050458|>
+<|visual token 050459|>
+<|visual token 050460|>
+<|visual token 050461|>
+<|visual token 050462|>
+<|visual token 050463|>
+<|visual token 050464|>
+<|visual token 050465|>
+<|visual token 050466|>
+<|visual token 050467|>
+<|visual token 050468|>
+<|visual token 050469|>
+<|visual token 050470|>
+<|visual token 050471|>
+<|visual token 050472|>
+<|visual token 050473|>
+<|visual token 050474|>
+<|visual token 050475|>
+<|visual token 050476|>
+<|visual token 050477|>
+<|visual token 050478|>
+<|visual token 050479|>
+<|visual token 050480|>
+<|visual token 050481|>
+<|visual token 050482|>
+<|visual token 050483|>
+<|visual token 050484|>
+<|visual token 050485|>
+<|visual token 050486|>
+<|visual token 050487|>
+<|visual token 050488|>
+<|visual token 050489|>
+<|visual token 050490|>
+<|visual token 050491|>
+<|visual token 050492|>
+<|visual token 050493|>
+<|visual token 050494|>
+<|visual token 050495|>
+<|visual token 050496|>
+<|visual token 050497|>
+<|visual token 050498|>
+<|visual token 050499|>
+<|visual token 050500|>
+<|visual token 050501|>
+<|visual token 050502|>
+<|visual token 050503|>
+<|visual token 050504|>
+<|visual token 050505|>
+<|visual token 050506|>
+<|visual token 050507|>
+<|visual token 050508|>
+<|visual token 050509|>
+<|visual token 050510|>
+<|visual token 050511|>
+<|visual token 050512|>
+<|visual token 050513|>
+<|visual token 050514|>
+<|visual token 050515|>
+<|visual token 050516|>
+<|visual token 050517|>
+<|visual token 050518|>
+<|visual token 050519|>
+<|visual token 050520|>
+<|visual token 050521|>
+<|visual token 050522|>
+<|visual token 050523|>
+<|visual token 050524|>
+<|visual token 050525|>
+<|visual token 050526|>
+<|visual token 050527|>
+<|visual token 050528|>
+<|visual token 050529|>
+<|visual token 050530|>
+<|visual token 050531|>
+<|visual token 050532|>
+<|visual token 050533|>
+<|visual token 050534|>
+<|visual token 050535|>
+<|visual token 050536|>
+<|visual token 050537|>
+<|visual token 050538|>
+<|visual token 050539|>
+<|visual token 050540|>
+<|visual token 050541|>
+<|visual token 050542|>
+<|visual token 050543|>
+<|visual token 050544|>
+<|visual token 050545|>
+<|visual token 050546|>
+<|visual token 050547|>
+<|visual token 050548|>
+<|visual token 050549|>
+<|visual token 050550|>
+<|visual token 050551|>
+<|visual token 050552|>
+<|visual token 050553|>
+<|visual token 050554|>
+<|visual token 050555|>
+<|visual token 050556|>
+<|visual token 050557|>
+<|visual token 050558|>
+<|visual token 050559|>
+<|visual token 050560|>
+<|visual token 050561|>
+<|visual token 050562|>
+<|visual token 050563|>
+<|visual token 050564|>
+<|visual token 050565|>
+<|visual token 050566|>
+<|visual token 050567|>
+<|visual token 050568|>
+<|visual token 050569|>
+<|visual token 050570|>
+<|visual token 050571|>
+<|visual token 050572|>
+<|visual token 050573|>
+<|visual token 050574|>
+<|visual token 050575|>
+<|visual token 050576|>
+<|visual token 050577|>
+<|visual token 050578|>
+<|visual token 050579|>
+<|visual token 050580|>
+<|visual token 050581|>
+<|visual token 050582|>
+<|visual token 050583|>
+<|visual token 050584|>
+<|visual token 050585|>
+<|visual token 050586|>
+<|visual token 050587|>
+<|visual token 050588|>
+<|visual token 050589|>
+<|visual token 050590|>
+<|visual token 050591|>
+<|visual token 050592|>
+<|visual token 050593|>
+<|visual token 050594|>
+<|visual token 050595|>
+<|visual token 050596|>
+<|visual token 050597|>
+<|visual token 050598|>
+<|visual token 050599|>
+<|visual token 050600|>
+<|visual token 050601|>
+<|visual token 050602|>
+<|visual token 050603|>
+<|visual token 050604|>
+<|visual token 050605|>
+<|visual token 050606|>
+<|visual token 050607|>
+<|visual token 050608|>
+<|visual token 050609|>
+<|visual token 050610|>
+<|visual token 050611|>
+<|visual token 050612|>
+<|visual token 050613|>
+<|visual token 050614|>
+<|visual token 050615|>
+<|visual token 050616|>
+<|visual token 050617|>
+<|visual token 050618|>
+<|visual token 050619|>
+<|visual token 050620|>
+<|visual token 050621|>
+<|visual token 050622|>
+<|visual token 050623|>
+<|visual token 050624|>
+<|visual token 050625|>
+<|visual token 050626|>
+<|visual token 050627|>
+<|visual token 050628|>
+<|visual token 050629|>
+<|visual token 050630|>
+<|visual token 050631|>
+<|visual token 050632|>
+<|visual token 050633|>
+<|visual token 050634|>
+<|visual token 050635|>
+<|visual token 050636|>
+<|visual token 050637|>
+<|visual token 050638|>
+<|visual token 050639|>
+<|visual token 050640|>
+<|visual token 050641|>
+<|visual token 050642|>
+<|visual token 050643|>
+<|visual token 050644|>
+<|visual token 050645|>
+<|visual token 050646|>
+<|visual token 050647|>
+<|visual token 050648|>
+<|visual token 050649|>
+<|visual token 050650|>
+<|visual token 050651|>
+<|visual token 050652|>
+<|visual token 050653|>
+<|visual token 050654|>
+<|visual token 050655|>
+<|visual token 050656|>
+<|visual token 050657|>
+<|visual token 050658|>
+<|visual token 050659|>
+<|visual token 050660|>
+<|visual token 050661|>
+<|visual token 050662|>
+<|visual token 050663|>
+<|visual token 050664|>
+<|visual token 050665|>
+<|visual token 050666|>
+<|visual token 050667|>
+<|visual token 050668|>
+<|visual token 050669|>
+<|visual token 050670|>
+<|visual token 050671|>
+<|visual token 050672|>
+<|visual token 050673|>
+<|visual token 050674|>
+<|visual token 050675|>
+<|visual token 050676|>
+<|visual token 050677|>
+<|visual token 050678|>
+<|visual token 050679|>
+<|visual token 050680|>
+<|visual token 050681|>
+<|visual token 050682|>
+<|visual token 050683|>
+<|visual token 050684|>
+<|visual token 050685|>
+<|visual token 050686|>
+<|visual token 050687|>
+<|visual token 050688|>
+<|visual token 050689|>
+<|visual token 050690|>
+<|visual token 050691|>
+<|visual token 050692|>
+<|visual token 050693|>
+<|visual token 050694|>
+<|visual token 050695|>
+<|visual token 050696|>
+<|visual token 050697|>
+<|visual token 050698|>
+<|visual token 050699|>
+<|visual token 050700|>
+<|visual token 050701|>
+<|visual token 050702|>
+<|visual token 050703|>
+<|visual token 050704|>
+<|visual token 050705|>
+<|visual token 050706|>
+<|visual token 050707|>
+<|visual token 050708|>
+<|visual token 050709|>
+<|visual token 050710|>
+<|visual token 050711|>
+<|visual token 050712|>
+<|visual token 050713|>
+<|visual token 050714|>
+<|visual token 050715|>
+<|visual token 050716|>
+<|visual token 050717|>
+<|visual token 050718|>
+<|visual token 050719|>
+<|visual token 050720|>
+<|visual token 050721|>
+<|visual token 050722|>
+<|visual token 050723|>
+<|visual token 050724|>
+<|visual token 050725|>
+<|visual token 050726|>
+<|visual token 050727|>
+<|visual token 050728|>
+<|visual token 050729|>
+<|visual token 050730|>
+<|visual token 050731|>
+<|visual token 050732|>
+<|visual token 050733|>
+<|visual token 050734|>
+<|visual token 050735|>
+<|visual token 050736|>
+<|visual token 050737|>
+<|visual token 050738|>
+<|visual token 050739|>
+<|visual token 050740|>
+<|visual token 050741|>
+<|visual token 050742|>
+<|visual token 050743|>
+<|visual token 050744|>
+<|visual token 050745|>
+<|visual token 050746|>
+<|visual token 050747|>
+<|visual token 050748|>
+<|visual token 050749|>
+<|visual token 050750|>
+<|visual token 050751|>
+<|visual token 050752|>
+<|visual token 050753|>
+<|visual token 050754|>
+<|visual token 050755|>
+<|visual token 050756|>
+<|visual token 050757|>
+<|visual token 050758|>
+<|visual token 050759|>
+<|visual token 050760|>
+<|visual token 050761|>
+<|visual token 050762|>
+<|visual token 050763|>
+<|visual token 050764|>
+<|visual token 050765|>
+<|visual token 050766|>
+<|visual token 050767|>
+<|visual token 050768|>
+<|visual token 050769|>
+<|visual token 050770|>
+<|visual token 050771|>
+<|visual token 050772|>
+<|visual token 050773|>
+<|visual token 050774|>
+<|visual token 050775|>
+<|visual token 050776|>
+<|visual token 050777|>
+<|visual token 050778|>
+<|visual token 050779|>
+<|visual token 050780|>
+<|visual token 050781|>
+<|visual token 050782|>
+<|visual token 050783|>
+<|visual token 050784|>
+<|visual token 050785|>
+<|visual token 050786|>
+<|visual token 050787|>
+<|visual token 050788|>
+<|visual token 050789|>
+<|visual token 050790|>
+<|visual token 050791|>
+<|visual token 050792|>
+<|visual token 050793|>
+<|visual token 050794|>
+<|visual token 050795|>
+<|visual token 050796|>
+<|visual token 050797|>
+<|visual token 050798|>
+<|visual token 050799|>
+<|visual token 050800|>
+<|visual token 050801|>
+<|visual token 050802|>
+<|visual token 050803|>
+<|visual token 050804|>
+<|visual token 050805|>
+<|visual token 050806|>
+<|visual token 050807|>
+<|visual token 050808|>
+<|visual token 050809|>
+<|visual token 050810|>
+<|visual token 050811|>
+<|visual token 050812|>
+<|visual token 050813|>
+<|visual token 050814|>
+<|visual token 050815|>
+<|visual token 050816|>
+<|visual token 050817|>
+<|visual token 050818|>
+<|visual token 050819|>
+<|visual token 050820|>
+<|visual token 050821|>
+<|visual token 050822|>
+<|visual token 050823|>
+<|visual token 050824|>
+<|visual token 050825|>
+<|visual token 050826|>
+<|visual token 050827|>
+<|visual token 050828|>
+<|visual token 050829|>
+<|visual token 050830|>
+<|visual token 050831|>
+<|visual token 050832|>
+<|visual token 050833|>
+<|visual token 050834|>
+<|visual token 050835|>
+<|visual token 050836|>
+<|visual token 050837|>
+<|visual token 050838|>
+<|visual token 050839|>
+<|visual token 050840|>
+<|visual token 050841|>
+<|visual token 050842|>
+<|visual token 050843|>
+<|visual token 050844|>
+<|visual token 050845|>
+<|visual token 050846|>
+<|visual token 050847|>
+<|visual token 050848|>
+<|visual token 050849|>
+<|visual token 050850|>
+<|visual token 050851|>
+<|visual token 050852|>
+<|visual token 050853|>
+<|visual token 050854|>
+<|visual token 050855|>
+<|visual token 050856|>
+<|visual token 050857|>
+<|visual token 050858|>
+<|visual token 050859|>
+<|visual token 050860|>
+<|visual token 050861|>
+<|visual token 050862|>
+<|visual token 050863|>
+<|visual token 050864|>
+<|visual token 050865|>
+<|visual token 050866|>
+<|visual token 050867|>
+<|visual token 050868|>
+<|visual token 050869|>
+<|visual token 050870|>
+<|visual token 050871|>
+<|visual token 050872|>
+<|visual token 050873|>
+<|visual token 050874|>
+<|visual token 050875|>
+<|visual token 050876|>
+<|visual token 050877|>
+<|visual token 050878|>
+<|visual token 050879|>
+<|visual token 050880|>
+<|visual token 050881|>
+<|visual token 050882|>
+<|visual token 050883|>
+<|visual token 050884|>
+<|visual token 050885|>
+<|visual token 050886|>
+<|visual token 050887|>
+<|visual token 050888|>
+<|visual token 050889|>
+<|visual token 050890|>
+<|visual token 050891|>
+<|visual token 050892|>
+<|visual token 050893|>
+<|visual token 050894|>
+<|visual token 050895|>
+<|visual token 050896|>
+<|visual token 050897|>
+<|visual token 050898|>
+<|visual token 050899|>
+<|visual token 050900|>
+<|visual token 050901|>
+<|visual token 050902|>
+<|visual token 050903|>
+<|visual token 050904|>
+<|visual token 050905|>
+<|visual token 050906|>
+<|visual token 050907|>
+<|visual token 050908|>
+<|visual token 050909|>
+<|visual token 050910|>
+<|visual token 050911|>
+<|visual token 050912|>
+<|visual token 050913|>
+<|visual token 050914|>
+<|visual token 050915|>
+<|visual token 050916|>
+<|visual token 050917|>
+<|visual token 050918|>
+<|visual token 050919|>
+<|visual token 050920|>
+<|visual token 050921|>
+<|visual token 050922|>
+<|visual token 050923|>
+<|visual token 050924|>
+<|visual token 050925|>
+<|visual token 050926|>
+<|visual token 050927|>
+<|visual token 050928|>
+<|visual token 050929|>
+<|visual token 050930|>
+<|visual token 050931|>
+<|visual token 050932|>
+<|visual token 050933|>
+<|visual token 050934|>
+<|visual token 050935|>
+<|visual token 050936|>
+<|visual token 050937|>
+<|visual token 050938|>
+<|visual token 050939|>
+<|visual token 050940|>
+<|visual token 050941|>
+<|visual token 050942|>
+<|visual token 050943|>
+<|visual token 050944|>
+<|visual token 050945|>
+<|visual token 050946|>
+<|visual token 050947|>
+<|visual token 050948|>
+<|visual token 050949|>
+<|visual token 050950|>
+<|visual token 050951|>
+<|visual token 050952|>
+<|visual token 050953|>
+<|visual token 050954|>
+<|visual token 050955|>
+<|visual token 050956|>
+<|visual token 050957|>
+<|visual token 050958|>
+<|visual token 050959|>
+<|visual token 050960|>
+<|visual token 050961|>
+<|visual token 050962|>
+<|visual token 050963|>
+<|visual token 050964|>
+<|visual token 050965|>
+<|visual token 050966|>
+<|visual token 050967|>
+<|visual token 050968|>
+<|visual token 050969|>
+<|visual token 050970|>
+<|visual token 050971|>
+<|visual token 050972|>
+<|visual token 050973|>
+<|visual token 050974|>
+<|visual token 050975|>
+<|visual token 050976|>
+<|visual token 050977|>
+<|visual token 050978|>
+<|visual token 050979|>
+<|visual token 050980|>
+<|visual token 050981|>
+<|visual token 050982|>
+<|visual token 050983|>
+<|visual token 050984|>
+<|visual token 050985|>
+<|visual token 050986|>
+<|visual token 050987|>
+<|visual token 050988|>
+<|visual token 050989|>
+<|visual token 050990|>
+<|visual token 050991|>
+<|visual token 050992|>
+<|visual token 050993|>
+<|visual token 050994|>
+<|visual token 050995|>
+<|visual token 050996|>
+<|visual token 050997|>
+<|visual token 050998|>
+<|visual token 050999|>
+<|visual token 051000|>
+<|visual token 051001|>
+<|visual token 051002|>
+<|visual token 051003|>
+<|visual token 051004|>
+<|visual token 051005|>
+<|visual token 051006|>
+<|visual token 051007|>
+<|visual token 051008|>
+<|visual token 051009|>
+<|visual token 051010|>
+<|visual token 051011|>
+<|visual token 051012|>
+<|visual token 051013|>
+<|visual token 051014|>
+<|visual token 051015|>
+<|visual token 051016|>
+<|visual token 051017|>
+<|visual token 051018|>
+<|visual token 051019|>
+<|visual token 051020|>
+<|visual token 051021|>
+<|visual token 051022|>
+<|visual token 051023|>
+<|visual token 051024|>
+<|visual token 051025|>
+<|visual token 051026|>
+<|visual token 051027|>
+<|visual token 051028|>
+<|visual token 051029|>
+<|visual token 051030|>
+<|visual token 051031|>
+<|visual token 051032|>
+<|visual token 051033|>
+<|visual token 051034|>
+<|visual token 051035|>
+<|visual token 051036|>
+<|visual token 051037|>
+<|visual token 051038|>
+<|visual token 051039|>
+<|visual token 051040|>
+<|visual token 051041|>
+<|visual token 051042|>
+<|visual token 051043|>
+<|visual token 051044|>
+<|visual token 051045|>
+<|visual token 051046|>
+<|visual token 051047|>
+<|visual token 051048|>
+<|visual token 051049|>
+<|visual token 051050|>
+<|visual token 051051|>
+<|visual token 051052|>
+<|visual token 051053|>
+<|visual token 051054|>
+<|visual token 051055|>
+<|visual token 051056|>
+<|visual token 051057|>
+<|visual token 051058|>
+<|visual token 051059|>
+<|visual token 051060|>
+<|visual token 051061|>
+<|visual token 051062|>
+<|visual token 051063|>
+<|visual token 051064|>
+<|visual token 051065|>
+<|visual token 051066|>
+<|visual token 051067|>
+<|visual token 051068|>
+<|visual token 051069|>
+<|visual token 051070|>
+<|visual token 051071|>
+<|visual token 051072|>
+<|visual token 051073|>
+<|visual token 051074|>
+<|visual token 051075|>
+<|visual token 051076|>
+<|visual token 051077|>
+<|visual token 051078|>
+<|visual token 051079|>
+<|visual token 051080|>
+<|visual token 051081|>
+<|visual token 051082|>
+<|visual token 051083|>
+<|visual token 051084|>
+<|visual token 051085|>
+<|visual token 051086|>
+<|visual token 051087|>
+<|visual token 051088|>
+<|visual token 051089|>
+<|visual token 051090|>
+<|visual token 051091|>
+<|visual token 051092|>
+<|visual token 051093|>
+<|visual token 051094|>
+<|visual token 051095|>
+<|visual token 051096|>
+<|visual token 051097|>
+<|visual token 051098|>
+<|visual token 051099|>
+<|visual token 051100|>
+<|visual token 051101|>
+<|visual token 051102|>
+<|visual token 051103|>
+<|visual token 051104|>
+<|visual token 051105|>
+<|visual token 051106|>
+<|visual token 051107|>
+<|visual token 051108|>
+<|visual token 051109|>
+<|visual token 051110|>
+<|visual token 051111|>
+<|visual token 051112|>
+<|visual token 051113|>
+<|visual token 051114|>
+<|visual token 051115|>
+<|visual token 051116|>
+<|visual token 051117|>
+<|visual token 051118|>
+<|visual token 051119|>
+<|visual token 051120|>
+<|visual token 051121|>
+<|visual token 051122|>
+<|visual token 051123|>
+<|visual token 051124|>
+<|visual token 051125|>
+<|visual token 051126|>
+<|visual token 051127|>
+<|visual token 051128|>
+<|visual token 051129|>
+<|visual token 051130|>
+<|visual token 051131|>
+<|visual token 051132|>
+<|visual token 051133|>
+<|visual token 051134|>
+<|visual token 051135|>
+<|visual token 051136|>
+<|visual token 051137|>
+<|visual token 051138|>
+<|visual token 051139|>
+<|visual token 051140|>
+<|visual token 051141|>
+<|visual token 051142|>
+<|visual token 051143|>
+<|visual token 051144|>
+<|visual token 051145|>
+<|visual token 051146|>
+<|visual token 051147|>
+<|visual token 051148|>
+<|visual token 051149|>
+<|visual token 051150|>
+<|visual token 051151|>
+<|visual token 051152|>
+<|visual token 051153|>
+<|visual token 051154|>
+<|visual token 051155|>
+<|visual token 051156|>
+<|visual token 051157|>
+<|visual token 051158|>
+<|visual token 051159|>
+<|visual token 051160|>
+<|visual token 051161|>
+<|visual token 051162|>
+<|visual token 051163|>
+<|visual token 051164|>
+<|visual token 051165|>
+<|visual token 051166|>
+<|visual token 051167|>
+<|visual token 051168|>
+<|visual token 051169|>
+<|visual token 051170|>
+<|visual token 051171|>
+<|visual token 051172|>
+<|visual token 051173|>
+<|visual token 051174|>
+<|visual token 051175|>
+<|visual token 051176|>
+<|visual token 051177|>
+<|visual token 051178|>
+<|visual token 051179|>
+<|visual token 051180|>
+<|visual token 051181|>
+<|visual token 051182|>
+<|visual token 051183|>
+<|visual token 051184|>
+<|visual token 051185|>
+<|visual token 051186|>
+<|visual token 051187|>
+<|visual token 051188|>
+<|visual token 051189|>
+<|visual token 051190|>
+<|visual token 051191|>
+<|visual token 051192|>
+<|visual token 051193|>
+<|visual token 051194|>
+<|visual token 051195|>
+<|visual token 051196|>
+<|visual token 051197|>
+<|visual token 051198|>
+<|visual token 051199|>
+<|visual token 051200|>
+<|visual token 051201|>
+<|visual token 051202|>
+<|visual token 051203|>
+<|visual token 051204|>
+<|visual token 051205|>
+<|visual token 051206|>
+<|visual token 051207|>
+<|visual token 051208|>
+<|visual token 051209|>
+<|visual token 051210|>
+<|visual token 051211|>
+<|visual token 051212|>
+<|visual token 051213|>
+<|visual token 051214|>
+<|visual token 051215|>
+<|visual token 051216|>
+<|visual token 051217|>
+<|visual token 051218|>
+<|visual token 051219|>
+<|visual token 051220|>
+<|visual token 051221|>
+<|visual token 051222|>
+<|visual token 051223|>
+<|visual token 051224|>
+<|visual token 051225|>
+<|visual token 051226|>
+<|visual token 051227|>
+<|visual token 051228|>
+<|visual token 051229|>
+<|visual token 051230|>
+<|visual token 051231|>
+<|visual token 051232|>
+<|visual token 051233|>
+<|visual token 051234|>
+<|visual token 051235|>
+<|visual token 051236|>
+<|visual token 051237|>
+<|visual token 051238|>
+<|visual token 051239|>
+<|visual token 051240|>
+<|visual token 051241|>
+<|visual token 051242|>
+<|visual token 051243|>
+<|visual token 051244|>
+<|visual token 051245|>
+<|visual token 051246|>
+<|visual token 051247|>
+<|visual token 051248|>
+<|visual token 051249|>
+<|visual token 051250|>
+<|visual token 051251|>
+<|visual token 051252|>
+<|visual token 051253|>
+<|visual token 051254|>
+<|visual token 051255|>
+<|visual token 051256|>
+<|visual token 051257|>
+<|visual token 051258|>
+<|visual token 051259|>
+<|visual token 051260|>
+<|visual token 051261|>
+<|visual token 051262|>
+<|visual token 051263|>
+<|visual token 051264|>
+<|visual token 051265|>
+<|visual token 051266|>
+<|visual token 051267|>
+<|visual token 051268|>
+<|visual token 051269|>
+<|visual token 051270|>
+<|visual token 051271|>
+<|visual token 051272|>
+<|visual token 051273|>
+<|visual token 051274|>
+<|visual token 051275|>
+<|visual token 051276|>
+<|visual token 051277|>
+<|visual token 051278|>
+<|visual token 051279|>
+<|visual token 051280|>
+<|visual token 051281|>
+<|visual token 051282|>
+<|visual token 051283|>
+<|visual token 051284|>
+<|visual token 051285|>
+<|visual token 051286|>
+<|visual token 051287|>
+<|visual token 051288|>
+<|visual token 051289|>
+<|visual token 051290|>
+<|visual token 051291|>
+<|visual token 051292|>
+<|visual token 051293|>
+<|visual token 051294|>
+<|visual token 051295|>
+<|visual token 051296|>
+<|visual token 051297|>
+<|visual token 051298|>
+<|visual token 051299|>
+<|visual token 051300|>
+<|visual token 051301|>
+<|visual token 051302|>
+<|visual token 051303|>
+<|visual token 051304|>
+<|visual token 051305|>
+<|visual token 051306|>
+<|visual token 051307|>
+<|visual token 051308|>
+<|visual token 051309|>
+<|visual token 051310|>
+<|visual token 051311|>
+<|visual token 051312|>
+<|visual token 051313|>
+<|visual token 051314|>
+<|visual token 051315|>
+<|visual token 051316|>
+<|visual token 051317|>
+<|visual token 051318|>
+<|visual token 051319|>
+<|visual token 051320|>
+<|visual token 051321|>
+<|visual token 051322|>
+<|visual token 051323|>
+<|visual token 051324|>
+<|visual token 051325|>
+<|visual token 051326|>
+<|visual token 051327|>
+<|visual token 051328|>
+<|visual token 051329|>
+<|visual token 051330|>
+<|visual token 051331|>
+<|visual token 051332|>
+<|visual token 051333|>
+<|visual token 051334|>
+<|visual token 051335|>
+<|visual token 051336|>
+<|visual token 051337|>
+<|visual token 051338|>
+<|visual token 051339|>
+<|visual token 051340|>
+<|visual token 051341|>
+<|visual token 051342|>
+<|visual token 051343|>
+<|visual token 051344|>
+<|visual token 051345|>
+<|visual token 051346|>
+<|visual token 051347|>
+<|visual token 051348|>
+<|visual token 051349|>
+<|visual token 051350|>
+<|visual token 051351|>
+<|visual token 051352|>
+<|visual token 051353|>
+<|visual token 051354|>
+<|visual token 051355|>
+<|visual token 051356|>
+<|visual token 051357|>
+<|visual token 051358|>
+<|visual token 051359|>
+<|visual token 051360|>
+<|visual token 051361|>
+<|visual token 051362|>
+<|visual token 051363|>
+<|visual token 051364|>
+<|visual token 051365|>
+<|visual token 051366|>
+<|visual token 051367|>
+<|visual token 051368|>
+<|visual token 051369|>
+<|visual token 051370|>
+<|visual token 051371|>
+<|visual token 051372|>
+<|visual token 051373|>
+<|visual token 051374|>
+<|visual token 051375|>
+<|visual token 051376|>
+<|visual token 051377|>
+<|visual token 051378|>
+<|visual token 051379|>
+<|visual token 051380|>
+<|visual token 051381|>
+<|visual token 051382|>
+<|visual token 051383|>
+<|visual token 051384|>
+<|visual token 051385|>
+<|visual token 051386|>
+<|visual token 051387|>
+<|visual token 051388|>
+<|visual token 051389|>
+<|visual token 051390|>
+<|visual token 051391|>
+<|visual token 051392|>
+<|visual token 051393|>
+<|visual token 051394|>
+<|visual token 051395|>
+<|visual token 051396|>
+<|visual token 051397|>
+<|visual token 051398|>
+<|visual token 051399|>
+<|visual token 051400|>
+<|visual token 051401|>
+<|visual token 051402|>
+<|visual token 051403|>
+<|visual token 051404|>
+<|visual token 051405|>
+<|visual token 051406|>
+<|visual token 051407|>
+<|visual token 051408|>
+<|visual token 051409|>
+<|visual token 051410|>
+<|visual token 051411|>
+<|visual token 051412|>
+<|visual token 051413|>
+<|visual token 051414|>
+<|visual token 051415|>
+<|visual token 051416|>
+<|visual token 051417|>
+<|visual token 051418|>
+<|visual token 051419|>
+<|visual token 051420|>
+<|visual token 051421|>
+<|visual token 051422|>
+<|visual token 051423|>
+<|visual token 051424|>
+<|visual token 051425|>
+<|visual token 051426|>
+<|visual token 051427|>
+<|visual token 051428|>
+<|visual token 051429|>
+<|visual token 051430|>
+<|visual token 051431|>
+<|visual token 051432|>
+<|visual token 051433|>
+<|visual token 051434|>
+<|visual token 051435|>
+<|visual token 051436|>
+<|visual token 051437|>
+<|visual token 051438|>
+<|visual token 051439|>
+<|visual token 051440|>
+<|visual token 051441|>
+<|visual token 051442|>
+<|visual token 051443|>
+<|visual token 051444|>
+<|visual token 051445|>
+<|visual token 051446|>
+<|visual token 051447|>
+<|visual token 051448|>
+<|visual token 051449|>
+<|visual token 051450|>
+<|visual token 051451|>
+<|visual token 051452|>
+<|visual token 051453|>
+<|visual token 051454|>
+<|visual token 051455|>
+<|visual token 051456|>
+<|visual token 051457|>
+<|visual token 051458|>
+<|visual token 051459|>
+<|visual token 051460|>
+<|visual token 051461|>
+<|visual token 051462|>
+<|visual token 051463|>
+<|visual token 051464|>
+<|visual token 051465|>
+<|visual token 051466|>
+<|visual token 051467|>
+<|visual token 051468|>
+<|visual token 051469|>
+<|visual token 051470|>
+<|visual token 051471|>
+<|visual token 051472|>
+<|visual token 051473|>
+<|visual token 051474|>
+<|visual token 051475|>
+<|visual token 051476|>
+<|visual token 051477|>
+<|visual token 051478|>
+<|visual token 051479|>
+<|visual token 051480|>
+<|visual token 051481|>
+<|visual token 051482|>
+<|visual token 051483|>
+<|visual token 051484|>
+<|visual token 051485|>
+<|visual token 051486|>
+<|visual token 051487|>
+<|visual token 051488|>
+<|visual token 051489|>
+<|visual token 051490|>
+<|visual token 051491|>
+<|visual token 051492|>
+<|visual token 051493|>
+<|visual token 051494|>
+<|visual token 051495|>
+<|visual token 051496|>
+<|visual token 051497|>
+<|visual token 051498|>
+<|visual token 051499|>
+<|visual token 051500|>
+<|visual token 051501|>
+<|visual token 051502|>
+<|visual token 051503|>
+<|visual token 051504|>
+<|visual token 051505|>
+<|visual token 051506|>
+<|visual token 051507|>
+<|visual token 051508|>
+<|visual token 051509|>
+<|visual token 051510|>
+<|visual token 051511|>
+<|visual token 051512|>
+<|visual token 051513|>
+<|visual token 051514|>
+<|visual token 051515|>
+<|visual token 051516|>
+<|visual token 051517|>
+<|visual token 051518|>
+<|visual token 051519|>
+<|visual token 051520|>
+<|visual token 051521|>
+<|visual token 051522|>
+<|visual token 051523|>
+<|visual token 051524|>
+<|visual token 051525|>
+<|visual token 051526|>
+<|visual token 051527|>
+<|visual token 051528|>
+<|visual token 051529|>
+<|visual token 051530|>
+<|visual token 051531|>
+<|visual token 051532|>
+<|visual token 051533|>
+<|visual token 051534|>
+<|visual token 051535|>
+<|visual token 051536|>
+<|visual token 051537|>
+<|visual token 051538|>
+<|visual token 051539|>
+<|visual token 051540|>
+<|visual token 051541|>
+<|visual token 051542|>
+<|visual token 051543|>
+<|visual token 051544|>
+<|visual token 051545|>
+<|visual token 051546|>
+<|visual token 051547|>
+<|visual token 051548|>
+<|visual token 051549|>
+<|visual token 051550|>
+<|visual token 051551|>
+<|visual token 051552|>
+<|visual token 051553|>
+<|visual token 051554|>
+<|visual token 051555|>
+<|visual token 051556|>
+<|visual token 051557|>
+<|visual token 051558|>
+<|visual token 051559|>
+<|visual token 051560|>
+<|visual token 051561|>
+<|visual token 051562|>
+<|visual token 051563|>
+<|visual token 051564|>
+<|visual token 051565|>
+<|visual token 051566|>
+<|visual token 051567|>
+<|visual token 051568|>
+<|visual token 051569|>
+<|visual token 051570|>
+<|visual token 051571|>
+<|visual token 051572|>
+<|visual token 051573|>
+<|visual token 051574|>
+<|visual token 051575|>
+<|visual token 051576|>
+<|visual token 051577|>
+<|visual token 051578|>
+<|visual token 051579|>
+<|visual token 051580|>
+<|visual token 051581|>
+<|visual token 051582|>
+<|visual token 051583|>
+<|visual token 051584|>
+<|visual token 051585|>
+<|visual token 051586|>
+<|visual token 051587|>
+<|visual token 051588|>
+<|visual token 051589|>
+<|visual token 051590|>
+<|visual token 051591|>
+<|visual token 051592|>
+<|visual token 051593|>
+<|visual token 051594|>
+<|visual token 051595|>
+<|visual token 051596|>
+<|visual token 051597|>
+<|visual token 051598|>
+<|visual token 051599|>
+<|visual token 051600|>
+<|visual token 051601|>
+<|visual token 051602|>
+<|visual token 051603|>
+<|visual token 051604|>
+<|visual token 051605|>
+<|visual token 051606|>
+<|visual token 051607|>
+<|visual token 051608|>
+<|visual token 051609|>
+<|visual token 051610|>
+<|visual token 051611|>
+<|visual token 051612|>
+<|visual token 051613|>
+<|visual token 051614|>
+<|visual token 051615|>
+<|visual token 051616|>
+<|visual token 051617|>
+<|visual token 051618|>
+<|visual token 051619|>
+<|visual token 051620|>
+<|visual token 051621|>
+<|visual token 051622|>
+<|visual token 051623|>
+<|visual token 051624|>
+<|visual token 051625|>
+<|visual token 051626|>
+<|visual token 051627|>
+<|visual token 051628|>
+<|visual token 051629|>
+<|visual token 051630|>
+<|visual token 051631|>
+<|visual token 051632|>
+<|visual token 051633|>
+<|visual token 051634|>
+<|visual token 051635|>
+<|visual token 051636|>
+<|visual token 051637|>
+<|visual token 051638|>
+<|visual token 051639|>
+<|visual token 051640|>
+<|visual token 051641|>
+<|visual token 051642|>
+<|visual token 051643|>
+<|visual token 051644|>
+<|visual token 051645|>
+<|visual token 051646|>
+<|visual token 051647|>
+<|visual token 051648|>
+<|visual token 051649|>
+<|visual token 051650|>
+<|visual token 051651|>
+<|visual token 051652|>
+<|visual token 051653|>
+<|visual token 051654|>
+<|visual token 051655|>
+<|visual token 051656|>
+<|visual token 051657|>
+<|visual token 051658|>
+<|visual token 051659|>
+<|visual token 051660|>
+<|visual token 051661|>
+<|visual token 051662|>
+<|visual token 051663|>
+<|visual token 051664|>
+<|visual token 051665|>
+<|visual token 051666|>
+<|visual token 051667|>
+<|visual token 051668|>
+<|visual token 051669|>
+<|visual token 051670|>
+<|visual token 051671|>
+<|visual token 051672|>
+<|visual token 051673|>
+<|visual token 051674|>
+<|visual token 051675|>
+<|visual token 051676|>
+<|visual token 051677|>
+<|visual token 051678|>
+<|visual token 051679|>
+<|visual token 051680|>
+<|visual token 051681|>
+<|visual token 051682|>
+<|visual token 051683|>
+<|visual token 051684|>
+<|visual token 051685|>
+<|visual token 051686|>
+<|visual token 051687|>
+<|visual token 051688|>
+<|visual token 051689|>
+<|visual token 051690|>
+<|visual token 051691|>
+<|visual token 051692|>
+<|visual token 051693|>
+<|visual token 051694|>
+<|visual token 051695|>
+<|visual token 051696|>
+<|visual token 051697|>
+<|visual token 051698|>
+<|visual token 051699|>
+<|visual token 051700|>
+<|visual token 051701|>
+<|visual token 051702|>
+<|visual token 051703|>
+<|visual token 051704|>
+<|visual token 051705|>
+<|visual token 051706|>
+<|visual token 051707|>
+<|visual token 051708|>
+<|visual token 051709|>
+<|visual token 051710|>
+<|visual token 051711|>
+<|visual token 051712|>
+<|visual token 051713|>
+<|visual token 051714|>
+<|visual token 051715|>
+<|visual token 051716|>
+<|visual token 051717|>
+<|visual token 051718|>
+<|visual token 051719|>
+<|visual token 051720|>
+<|visual token 051721|>
+<|visual token 051722|>
+<|visual token 051723|>
+<|visual token 051724|>
+<|visual token 051725|>
+<|visual token 051726|>
+<|visual token 051727|>
+<|visual token 051728|>
+<|visual token 051729|>
+<|visual token 051730|>
+<|visual token 051731|>
+<|visual token 051732|>
+<|visual token 051733|>
+<|visual token 051734|>
+<|visual token 051735|>
+<|visual token 051736|>
+<|visual token 051737|>
+<|visual token 051738|>
+<|visual token 051739|>
+<|visual token 051740|>
+<|visual token 051741|>
+<|visual token 051742|>
+<|visual token 051743|>
+<|visual token 051744|>
+<|visual token 051745|>
+<|visual token 051746|>
+<|visual token 051747|>
+<|visual token 051748|>
+<|visual token 051749|>
+<|visual token 051750|>
+<|visual token 051751|>
+<|visual token 051752|>
+<|visual token 051753|>
+<|visual token 051754|>
+<|visual token 051755|>
+<|visual token 051756|>
+<|visual token 051757|>
+<|visual token 051758|>
+<|visual token 051759|>
+<|visual token 051760|>
+<|visual token 051761|>
+<|visual token 051762|>
+<|visual token 051763|>
+<|visual token 051764|>
+<|visual token 051765|>
+<|visual token 051766|>
+<|visual token 051767|>
+<|visual token 051768|>
+<|visual token 051769|>
+<|visual token 051770|>
+<|visual token 051771|>
+<|visual token 051772|>
+<|visual token 051773|>
+<|visual token 051774|>
+<|visual token 051775|>
+<|visual token 051776|>
+<|visual token 051777|>
+<|visual token 051778|>
+<|visual token 051779|>
+<|visual token 051780|>
+<|visual token 051781|>
+<|visual token 051782|>
+<|visual token 051783|>
+<|visual token 051784|>
+<|visual token 051785|>
+<|visual token 051786|>
+<|visual token 051787|>
+<|visual token 051788|>
+<|visual token 051789|>
+<|visual token 051790|>
+<|visual token 051791|>
+<|visual token 051792|>
+<|visual token 051793|>
+<|visual token 051794|>
+<|visual token 051795|>
+<|visual token 051796|>
+<|visual token 051797|>
+<|visual token 051798|>
+<|visual token 051799|>
+<|visual token 051800|>
+<|visual token 051801|>
+<|visual token 051802|>
+<|visual token 051803|>
+<|visual token 051804|>
+<|visual token 051805|>
+<|visual token 051806|>
+<|visual token 051807|>
+<|visual token 051808|>
+<|visual token 051809|>
+<|visual token 051810|>
+<|visual token 051811|>
+<|visual token 051812|>
+<|visual token 051813|>
+<|visual token 051814|>
+<|visual token 051815|>
+<|visual token 051816|>
+<|visual token 051817|>
+<|visual token 051818|>
+<|visual token 051819|>
+<|visual token 051820|>
+<|visual token 051821|>
+<|visual token 051822|>
+<|visual token 051823|>
+<|visual token 051824|>
+<|visual token 051825|>
+<|visual token 051826|>
+<|visual token 051827|>
+<|visual token 051828|>
+<|visual token 051829|>
+<|visual token 051830|>
+<|visual token 051831|>
+<|visual token 051832|>
+<|visual token 051833|>
+<|visual token 051834|>
+<|visual token 051835|>
+<|visual token 051836|>
+<|visual token 051837|>
+<|visual token 051838|>
+<|visual token 051839|>
+<|visual token 051840|>
+<|visual token 051841|>
+<|visual token 051842|>
+<|visual token 051843|>
+<|visual token 051844|>
+<|visual token 051845|>
+<|visual token 051846|>
+<|visual token 051847|>
+<|visual token 051848|>
+<|visual token 051849|>
+<|visual token 051850|>
+<|visual token 051851|>
+<|visual token 051852|>
+<|visual token 051853|>
+<|visual token 051854|>
+<|visual token 051855|>
+<|visual token 051856|>
+<|visual token 051857|>
+<|visual token 051858|>
+<|visual token 051859|>
+<|visual token 051860|>
+<|visual token 051861|>
+<|visual token 051862|>
+<|visual token 051863|>
+<|visual token 051864|>
+<|visual token 051865|>
+<|visual token 051866|>
+<|visual token 051867|>
+<|visual token 051868|>
+<|visual token 051869|>
+<|visual token 051870|>
+<|visual token 051871|>
+<|visual token 051872|>
+<|visual token 051873|>
+<|visual token 051874|>
+<|visual token 051875|>
+<|visual token 051876|>
+<|visual token 051877|>
+<|visual token 051878|>
+<|visual token 051879|>
+<|visual token 051880|>
+<|visual token 051881|>
+<|visual token 051882|>
+<|visual token 051883|>
+<|visual token 051884|>
+<|visual token 051885|>
+<|visual token 051886|>
+<|visual token 051887|>
+<|visual token 051888|>
+<|visual token 051889|>
+<|visual token 051890|>
+<|visual token 051891|>
+<|visual token 051892|>
+<|visual token 051893|>
+<|visual token 051894|>
+<|visual token 051895|>
+<|visual token 051896|>
+<|visual token 051897|>
+<|visual token 051898|>
+<|visual token 051899|>
+<|visual token 051900|>
+<|visual token 051901|>
+<|visual token 051902|>
+<|visual token 051903|>
+<|visual token 051904|>
+<|visual token 051905|>
+<|visual token 051906|>
+<|visual token 051907|>
+<|visual token 051908|>
+<|visual token 051909|>
+<|visual token 051910|>
+<|visual token 051911|>
+<|visual token 051912|>
+<|visual token 051913|>
+<|visual token 051914|>
+<|visual token 051915|>
+<|visual token 051916|>
+<|visual token 051917|>
+<|visual token 051918|>
+<|visual token 051919|>
+<|visual token 051920|>
+<|visual token 051921|>
+<|visual token 051922|>
+<|visual token 051923|>
+<|visual token 051924|>
+<|visual token 051925|>
+<|visual token 051926|>
+<|visual token 051927|>
+<|visual token 051928|>
+<|visual token 051929|>
+<|visual token 051930|>
+<|visual token 051931|>
+<|visual token 051932|>
+<|visual token 051933|>
+<|visual token 051934|>
+<|visual token 051935|>
+<|visual token 051936|>
+<|visual token 051937|>
+<|visual token 051938|>
+<|visual token 051939|>
+<|visual token 051940|>
+<|visual token 051941|>
+<|visual token 051942|>
+<|visual token 051943|>
+<|visual token 051944|>
+<|visual token 051945|>
+<|visual token 051946|>
+<|visual token 051947|>
+<|visual token 051948|>
+<|visual token 051949|>
+<|visual token 051950|>
+<|visual token 051951|>
+<|visual token 051952|>
+<|visual token 051953|>
+<|visual token 051954|>
+<|visual token 051955|>
+<|visual token 051956|>
+<|visual token 051957|>
+<|visual token 051958|>
+<|visual token 051959|>
+<|visual token 051960|>
+<|visual token 051961|>
+<|visual token 051962|>
+<|visual token 051963|>
+<|visual token 051964|>
+<|visual token 051965|>
+<|visual token 051966|>
+<|visual token 051967|>
+<|visual token 051968|>
+<|visual token 051969|>
+<|visual token 051970|>
+<|visual token 051971|>
+<|visual token 051972|>
+<|visual token 051973|>
+<|visual token 051974|>
+<|visual token 051975|>
+<|visual token 051976|>
+<|visual token 051977|>
+<|visual token 051978|>
+<|visual token 051979|>
+<|visual token 051980|>
+<|visual token 051981|>
+<|visual token 051982|>
+<|visual token 051983|>
+<|visual token 051984|>
+<|visual token 051985|>
+<|visual token 051986|>
+<|visual token 051987|>
+<|visual token 051988|>
+<|visual token 051989|>
+<|visual token 051990|>
+<|visual token 051991|>
+<|visual token 051992|>
+<|visual token 051993|>
+<|visual token 051994|>
+<|visual token 051995|>
+<|visual token 051996|>
+<|visual token 051997|>
+<|visual token 051998|>
+<|visual token 051999|>
+<|visual token 052000|>
+<|visual token 052001|>
+<|visual token 052002|>
+<|visual token 052003|>
+<|visual token 052004|>
+<|visual token 052005|>
+<|visual token 052006|>
+<|visual token 052007|>
+<|visual token 052008|>
+<|visual token 052009|>
+<|visual token 052010|>
+<|visual token 052011|>
+<|visual token 052012|>
+<|visual token 052013|>
+<|visual token 052014|>
+<|visual token 052015|>
+<|visual token 052016|>
+<|visual token 052017|>
+<|visual token 052018|>
+<|visual token 052019|>
+<|visual token 052020|>
+<|visual token 052021|>
+<|visual token 052022|>
+<|visual token 052023|>
+<|visual token 052024|>
+<|visual token 052025|>
+<|visual token 052026|>
+<|visual token 052027|>
+<|visual token 052028|>
+<|visual token 052029|>
+<|visual token 052030|>
+<|visual token 052031|>
+<|visual token 052032|>
+<|visual token 052033|>
+<|visual token 052034|>
+<|visual token 052035|>
+<|visual token 052036|>
+<|visual token 052037|>
+<|visual token 052038|>
+<|visual token 052039|>
+<|visual token 052040|>
+<|visual token 052041|>
+<|visual token 052042|>
+<|visual token 052043|>
+<|visual token 052044|>
+<|visual token 052045|>
+<|visual token 052046|>
+<|visual token 052047|>
+<|visual token 052048|>
+<|visual token 052049|>
+<|visual token 052050|>
+<|visual token 052051|>
+<|visual token 052052|>
+<|visual token 052053|>
+<|visual token 052054|>
+<|visual token 052055|>
+<|visual token 052056|>
+<|visual token 052057|>
+<|visual token 052058|>
+<|visual token 052059|>
+<|visual token 052060|>
+<|visual token 052061|>
+<|visual token 052062|>
+<|visual token 052063|>
+<|visual token 052064|>
+<|visual token 052065|>
+<|visual token 052066|>
+<|visual token 052067|>
+<|visual token 052068|>
+<|visual token 052069|>
+<|visual token 052070|>
+<|visual token 052071|>
+<|visual token 052072|>
+<|visual token 052073|>
+<|visual token 052074|>
+<|visual token 052075|>
+<|visual token 052076|>
+<|visual token 052077|>
+<|visual token 052078|>
+<|visual token 052079|>
+<|visual token 052080|>
+<|visual token 052081|>
+<|visual token 052082|>
+<|visual token 052083|>
+<|visual token 052084|>
+<|visual token 052085|>
+<|visual token 052086|>
+<|visual token 052087|>
+<|visual token 052088|>
+<|visual token 052089|>
+<|visual token 052090|>
+<|visual token 052091|>
+<|visual token 052092|>
+<|visual token 052093|>
+<|visual token 052094|>
+<|visual token 052095|>
+<|visual token 052096|>
+<|visual token 052097|>
+<|visual token 052098|>
+<|visual token 052099|>
+<|visual token 052100|>
+<|visual token 052101|>
+<|visual token 052102|>
+<|visual token 052103|>
+<|visual token 052104|>
+<|visual token 052105|>
+<|visual token 052106|>
+<|visual token 052107|>
+<|visual token 052108|>
+<|visual token 052109|>
+<|visual token 052110|>
+<|visual token 052111|>
+<|visual token 052112|>
+<|visual token 052113|>
+<|visual token 052114|>
+<|visual token 052115|>
+<|visual token 052116|>
+<|visual token 052117|>
+<|visual token 052118|>
+<|visual token 052119|>
+<|visual token 052120|>
+<|visual token 052121|>
+<|visual token 052122|>
+<|visual token 052123|>
+<|visual token 052124|>
+<|visual token 052125|>
+<|visual token 052126|>
+<|visual token 052127|>
+<|visual token 052128|>
+<|visual token 052129|>
+<|visual token 052130|>
+<|visual token 052131|>
+<|visual token 052132|>
+<|visual token 052133|>
+<|visual token 052134|>
+<|visual token 052135|>
+<|visual token 052136|>
+<|visual token 052137|>
+<|visual token 052138|>
+<|visual token 052139|>
+<|visual token 052140|>
+<|visual token 052141|>
+<|visual token 052142|>
+<|visual token 052143|>
+<|visual token 052144|>
+<|visual token 052145|>
+<|visual token 052146|>
+<|visual token 052147|>
+<|visual token 052148|>
+<|visual token 052149|>
+<|visual token 052150|>
+<|visual token 052151|>
+<|visual token 052152|>
+<|visual token 052153|>
+<|visual token 052154|>
+<|visual token 052155|>
+<|visual token 052156|>
+<|visual token 052157|>
+<|visual token 052158|>
+<|visual token 052159|>
+<|visual token 052160|>
+<|visual token 052161|>
+<|visual token 052162|>
+<|visual token 052163|>
+<|visual token 052164|>
+<|visual token 052165|>
+<|visual token 052166|>
+<|visual token 052167|>
+<|visual token 052168|>
+<|visual token 052169|>
+<|visual token 052170|>
+<|visual token 052171|>
+<|visual token 052172|>
+<|visual token 052173|>
+<|visual token 052174|>
+<|visual token 052175|>
+<|visual token 052176|>
+<|visual token 052177|>
+<|visual token 052178|>
+<|visual token 052179|>
+<|visual token 052180|>
+<|visual token 052181|>
+<|visual token 052182|>
+<|visual token 052183|>
+<|visual token 052184|>
+<|visual token 052185|>
+<|visual token 052186|>
+<|visual token 052187|>
+<|visual token 052188|>
+<|visual token 052189|>
+<|visual token 052190|>
+<|visual token 052191|>
+<|visual token 052192|>
+<|visual token 052193|>
+<|visual token 052194|>
+<|visual token 052195|>
+<|visual token 052196|>
+<|visual token 052197|>
+<|visual token 052198|>
+<|visual token 052199|>
+<|visual token 052200|>
+<|visual token 052201|>
+<|visual token 052202|>
+<|visual token 052203|>
+<|visual token 052204|>
+<|visual token 052205|>
+<|visual token 052206|>
+<|visual token 052207|>
+<|visual token 052208|>
+<|visual token 052209|>
+<|visual token 052210|>
+<|visual token 052211|>
+<|visual token 052212|>
+<|visual token 052213|>
+<|visual token 052214|>
+<|visual token 052215|>
+<|visual token 052216|>
+<|visual token 052217|>
+<|visual token 052218|>
+<|visual token 052219|>
+<|visual token 052220|>
+<|visual token 052221|>
+<|visual token 052222|>
+<|visual token 052223|>
+<|visual token 052224|>
+<|visual token 052225|>
+<|visual token 052226|>
+<|visual token 052227|>
+<|visual token 052228|>
+<|visual token 052229|>
+<|visual token 052230|>
+<|visual token 052231|>
+<|visual token 052232|>
+<|visual token 052233|>
+<|visual token 052234|>
+<|visual token 052235|>
+<|visual token 052236|>
+<|visual token 052237|>
+<|visual token 052238|>
+<|visual token 052239|>
+<|visual token 052240|>
+<|visual token 052241|>
+<|visual token 052242|>
+<|visual token 052243|>
+<|visual token 052244|>
+<|visual token 052245|>
+<|visual token 052246|>
+<|visual token 052247|>
+<|visual token 052248|>
+<|visual token 052249|>
+<|visual token 052250|>
+<|visual token 052251|>
+<|visual token 052252|>
+<|visual token 052253|>
+<|visual token 052254|>
+<|visual token 052255|>
+<|visual token 052256|>
+<|visual token 052257|>
+<|visual token 052258|>
+<|visual token 052259|>
+<|visual token 052260|>
+<|visual token 052261|>
+<|visual token 052262|>
+<|visual token 052263|>
+<|visual token 052264|>
+<|visual token 052265|>
+<|visual token 052266|>
+<|visual token 052267|>
+<|visual token 052268|>
+<|visual token 052269|>
+<|visual token 052270|>
+<|visual token 052271|>
+<|visual token 052272|>
+<|visual token 052273|>
+<|visual token 052274|>
+<|visual token 052275|>
+<|visual token 052276|>
+<|visual token 052277|>
+<|visual token 052278|>
+<|visual token 052279|>
+<|visual token 052280|>
+<|visual token 052281|>
+<|visual token 052282|>
+<|visual token 052283|>
+<|visual token 052284|>
+<|visual token 052285|>
+<|visual token 052286|>
+<|visual token 052287|>
+<|visual token 052288|>
+<|visual token 052289|>
+<|visual token 052290|>
+<|visual token 052291|>
+<|visual token 052292|>
+<|visual token 052293|>
+<|visual token 052294|>
+<|visual token 052295|>
+<|visual token 052296|>
+<|visual token 052297|>
+<|visual token 052298|>
+<|visual token 052299|>
+<|visual token 052300|>
+<|visual token 052301|>
+<|visual token 052302|>
+<|visual token 052303|>
+<|visual token 052304|>
+<|visual token 052305|>
+<|visual token 052306|>
+<|visual token 052307|>
+<|visual token 052308|>
+<|visual token 052309|>
+<|visual token 052310|>
+<|visual token 052311|>
+<|visual token 052312|>
+<|visual token 052313|>
+<|visual token 052314|>
+<|visual token 052315|>
+<|visual token 052316|>
+<|visual token 052317|>
+<|visual token 052318|>
+<|visual token 052319|>
+<|visual token 052320|>
+<|visual token 052321|>
+<|visual token 052322|>
+<|visual token 052323|>
+<|visual token 052324|>
+<|visual token 052325|>
+<|visual token 052326|>
+<|visual token 052327|>
+<|visual token 052328|>
+<|visual token 052329|>
+<|visual token 052330|>
+<|visual token 052331|>
+<|visual token 052332|>
+<|visual token 052333|>
+<|visual token 052334|>
+<|visual token 052335|>
+<|visual token 052336|>
+<|visual token 052337|>
+<|visual token 052338|>
+<|visual token 052339|>
+<|visual token 052340|>
+<|visual token 052341|>
+<|visual token 052342|>
+<|visual token 052343|>
+<|visual token 052344|>
+<|visual token 052345|>
+<|visual token 052346|>
+<|visual token 052347|>
+<|visual token 052348|>
+<|visual token 052349|>
+<|visual token 052350|>
+<|visual token 052351|>
+<|visual token 052352|>
+<|visual token 052353|>
+<|visual token 052354|>
+<|visual token 052355|>
+<|visual token 052356|>
+<|visual token 052357|>
+<|visual token 052358|>
+<|visual token 052359|>
+<|visual token 052360|>
+<|visual token 052361|>
+<|visual token 052362|>
+<|visual token 052363|>
+<|visual token 052364|>
+<|visual token 052365|>
+<|visual token 052366|>
+<|visual token 052367|>
+<|visual token 052368|>
+<|visual token 052369|>
+<|visual token 052370|>
+<|visual token 052371|>
+<|visual token 052372|>
+<|visual token 052373|>
+<|visual token 052374|>
+<|visual token 052375|>
+<|visual token 052376|>
+<|visual token 052377|>
+<|visual token 052378|>
+<|visual token 052379|>
+<|visual token 052380|>
+<|visual token 052381|>
+<|visual token 052382|>
+<|visual token 052383|>
+<|visual token 052384|>
+<|visual token 052385|>
+<|visual token 052386|>
+<|visual token 052387|>
+<|visual token 052388|>
+<|visual token 052389|>
+<|visual token 052390|>
+<|visual token 052391|>
+<|visual token 052392|>
+<|visual token 052393|>
+<|visual token 052394|>
+<|visual token 052395|>
+<|visual token 052396|>
+<|visual token 052397|>
+<|visual token 052398|>
+<|visual token 052399|>
+<|visual token 052400|>
+<|visual token 052401|>
+<|visual token 052402|>
+<|visual token 052403|>
+<|visual token 052404|>
+<|visual token 052405|>
+<|visual token 052406|>
+<|visual token 052407|>
+<|visual token 052408|>
+<|visual token 052409|>
+<|visual token 052410|>
+<|visual token 052411|>
+<|visual token 052412|>
+<|visual token 052413|>
+<|visual token 052414|>
+<|visual token 052415|>
+<|visual token 052416|>
+<|visual token 052417|>
+<|visual token 052418|>
+<|visual token 052419|>
+<|visual token 052420|>
+<|visual token 052421|>
+<|visual token 052422|>
+<|visual token 052423|>
+<|visual token 052424|>
+<|visual token 052425|>
+<|visual token 052426|>
+<|visual token 052427|>
+<|visual token 052428|>
+<|visual token 052429|>
+<|visual token 052430|>
+<|visual token 052431|>
+<|visual token 052432|>
+<|visual token 052433|>
+<|visual token 052434|>
+<|visual token 052435|>
+<|visual token 052436|>
+<|visual token 052437|>
+<|visual token 052438|>
+<|visual token 052439|>
+<|visual token 052440|>
+<|visual token 052441|>
+<|visual token 052442|>
+<|visual token 052443|>
+<|visual token 052444|>
+<|visual token 052445|>
+<|visual token 052446|>
+<|visual token 052447|>
+<|visual token 052448|>
+<|visual token 052449|>
+<|visual token 052450|>
+<|visual token 052451|>
+<|visual token 052452|>
+<|visual token 052453|>
+<|visual token 052454|>
+<|visual token 052455|>
+<|visual token 052456|>
+<|visual token 052457|>
+<|visual token 052458|>
+<|visual token 052459|>
+<|visual token 052460|>
+<|visual token 052461|>
+<|visual token 052462|>
+<|visual token 052463|>
+<|visual token 052464|>
+<|visual token 052465|>
+<|visual token 052466|>
+<|visual token 052467|>
+<|visual token 052468|>
+<|visual token 052469|>
+<|visual token 052470|>
+<|visual token 052471|>
+<|visual token 052472|>
+<|visual token 052473|>
+<|visual token 052474|>
+<|visual token 052475|>
+<|visual token 052476|>
+<|visual token 052477|>
+<|visual token 052478|>
+<|visual token 052479|>
+<|visual token 052480|>
+<|visual token 052481|>
+<|visual token 052482|>
+<|visual token 052483|>
+<|visual token 052484|>
+<|visual token 052485|>
+<|visual token 052486|>
+<|visual token 052487|>
+<|visual token 052488|>
+<|visual token 052489|>
+<|visual token 052490|>
+<|visual token 052491|>
+<|visual token 052492|>
+<|visual token 052493|>
+<|visual token 052494|>
+<|visual token 052495|>
+<|visual token 052496|>
+<|visual token 052497|>
+<|visual token 052498|>
+<|visual token 052499|>
+<|visual token 052500|>
+<|visual token 052501|>
+<|visual token 052502|>
+<|visual token 052503|>
+<|visual token 052504|>
+<|visual token 052505|>
+<|visual token 052506|>
+<|visual token 052507|>
+<|visual token 052508|>
+<|visual token 052509|>
+<|visual token 052510|>
+<|visual token 052511|>
+<|visual token 052512|>
+<|visual token 052513|>
+<|visual token 052514|>
+<|visual token 052515|>
+<|visual token 052516|>
+<|visual token 052517|>
+<|visual token 052518|>
+<|visual token 052519|>
+<|visual token 052520|>
+<|visual token 052521|>
+<|visual token 052522|>
+<|visual token 052523|>
+<|visual token 052524|>
+<|visual token 052525|>
+<|visual token 052526|>
+<|visual token 052527|>
+<|visual token 052528|>
+<|visual token 052529|>
+<|visual token 052530|>
+<|visual token 052531|>
+<|visual token 052532|>
+<|visual token 052533|>
+<|visual token 052534|>
+<|visual token 052535|>
+<|visual token 052536|>
+<|visual token 052537|>
+<|visual token 052538|>
+<|visual token 052539|>
+<|visual token 052540|>
+<|visual token 052541|>
+<|visual token 052542|>
+<|visual token 052543|>
+<|visual token 052544|>
+<|visual token 052545|>
+<|visual token 052546|>
+<|visual token 052547|>
+<|visual token 052548|>
+<|visual token 052549|>
+<|visual token 052550|>
+<|visual token 052551|>
+<|visual token 052552|>
+<|visual token 052553|>
+<|visual token 052554|>
+<|visual token 052555|>
+<|visual token 052556|>
+<|visual token 052557|>
+<|visual token 052558|>
+<|visual token 052559|>
+<|visual token 052560|>
+<|visual token 052561|>
+<|visual token 052562|>
+<|visual token 052563|>
+<|visual token 052564|>
+<|visual token 052565|>
+<|visual token 052566|>
+<|visual token 052567|>
+<|visual token 052568|>
+<|visual token 052569|>
+<|visual token 052570|>
+<|visual token 052571|>
+<|visual token 052572|>
+<|visual token 052573|>
+<|visual token 052574|>
+<|visual token 052575|>
+<|visual token 052576|>
+<|visual token 052577|>
+<|visual token 052578|>
+<|visual token 052579|>
+<|visual token 052580|>
+<|visual token 052581|>
+<|visual token 052582|>
+<|visual token 052583|>
+<|visual token 052584|>
+<|visual token 052585|>
+<|visual token 052586|>
+<|visual token 052587|>
+<|visual token 052588|>
+<|visual token 052589|>
+<|visual token 052590|>
+<|visual token 052591|>
+<|visual token 052592|>
+<|visual token 052593|>
+<|visual token 052594|>
+<|visual token 052595|>
+<|visual token 052596|>
+<|visual token 052597|>
+<|visual token 052598|>
+<|visual token 052599|>
+<|visual token 052600|>
+<|visual token 052601|>
+<|visual token 052602|>
+<|visual token 052603|>
+<|visual token 052604|>
+<|visual token 052605|>
+<|visual token 052606|>
+<|visual token 052607|>
+<|visual token 052608|>
+<|visual token 052609|>
+<|visual token 052610|>
+<|visual token 052611|>
+<|visual token 052612|>
+<|visual token 052613|>
+<|visual token 052614|>
+<|visual token 052615|>
+<|visual token 052616|>
+<|visual token 052617|>
+<|visual token 052618|>
+<|visual token 052619|>
+<|visual token 052620|>
+<|visual token 052621|>
+<|visual token 052622|>
+<|visual token 052623|>
+<|visual token 052624|>
+<|visual token 052625|>
+<|visual token 052626|>
+<|visual token 052627|>
+<|visual token 052628|>
+<|visual token 052629|>
+<|visual token 052630|>
+<|visual token 052631|>
+<|visual token 052632|>
+<|visual token 052633|>
+<|visual token 052634|>
+<|visual token 052635|>
+<|visual token 052636|>
+<|visual token 052637|>
+<|visual token 052638|>
+<|visual token 052639|>
+<|visual token 052640|>
+<|visual token 052641|>
+<|visual token 052642|>
+<|visual token 052643|>
+<|visual token 052644|>
+<|visual token 052645|>
+<|visual token 052646|>
+<|visual token 052647|>
+<|visual token 052648|>
+<|visual token 052649|>
+<|visual token 052650|>
+<|visual token 052651|>
+<|visual token 052652|>
+<|visual token 052653|>
+<|visual token 052654|>
+<|visual token 052655|>
+<|visual token 052656|>
+<|visual token 052657|>
+<|visual token 052658|>
+<|visual token 052659|>
+<|visual token 052660|>
+<|visual token 052661|>
+<|visual token 052662|>
+<|visual token 052663|>
+<|visual token 052664|>
+<|visual token 052665|>
+<|visual token 052666|>
+<|visual token 052667|>
+<|visual token 052668|>
+<|visual token 052669|>
+<|visual token 052670|>
+<|visual token 052671|>
+<|visual token 052672|>
+<|visual token 052673|>
+<|visual token 052674|>
+<|visual token 052675|>
+<|visual token 052676|>
+<|visual token 052677|>
+<|visual token 052678|>
+<|visual token 052679|>
+<|visual token 052680|>
+<|visual token 052681|>
+<|visual token 052682|>
+<|visual token 052683|>
+<|visual token 052684|>
+<|visual token 052685|>
+<|visual token 052686|>
+<|visual token 052687|>
+<|visual token 052688|>
+<|visual token 052689|>
+<|visual token 052690|>
+<|visual token 052691|>
+<|visual token 052692|>
+<|visual token 052693|>
+<|visual token 052694|>
+<|visual token 052695|>
+<|visual token 052696|>
+<|visual token 052697|>
+<|visual token 052698|>
+<|visual token 052699|>
+<|visual token 052700|>
+<|visual token 052701|>
+<|visual token 052702|>
+<|visual token 052703|>
+<|visual token 052704|>
+<|visual token 052705|>
+<|visual token 052706|>
+<|visual token 052707|>
+<|visual token 052708|>
+<|visual token 052709|>
+<|visual token 052710|>
+<|visual token 052711|>
+<|visual token 052712|>
+<|visual token 052713|>
+<|visual token 052714|>
+<|visual token 052715|>
+<|visual token 052716|>
+<|visual token 052717|>
+<|visual token 052718|>
+<|visual token 052719|>
+<|visual token 052720|>
+<|visual token 052721|>
+<|visual token 052722|>
+<|visual token 052723|>
+<|visual token 052724|>
+<|visual token 052725|>
+<|visual token 052726|>
+<|visual token 052727|>
+<|visual token 052728|>
+<|visual token 052729|>
+<|visual token 052730|>
+<|visual token 052731|>
+<|visual token 052732|>
+<|visual token 052733|>
+<|visual token 052734|>
+<|visual token 052735|>
+<|visual token 052736|>
+<|visual token 052737|>
+<|visual token 052738|>
+<|visual token 052739|>
+<|visual token 052740|>
+<|visual token 052741|>
+<|visual token 052742|>
+<|visual token 052743|>
+<|visual token 052744|>
+<|visual token 052745|>
+<|visual token 052746|>
+<|visual token 052747|>
+<|visual token 052748|>
+<|visual token 052749|>
+<|visual token 052750|>
+<|visual token 052751|>
+<|visual token 052752|>
+<|visual token 052753|>
+<|visual token 052754|>
+<|visual token 052755|>
+<|visual token 052756|>
+<|visual token 052757|>
+<|visual token 052758|>
+<|visual token 052759|>
+<|visual token 052760|>
+<|visual token 052761|>
+<|visual token 052762|>
+<|visual token 052763|>
+<|visual token 052764|>
+<|visual token 052765|>
+<|visual token 052766|>
+<|visual token 052767|>
+<|visual token 052768|>
+<|visual token 052769|>
+<|visual token 052770|>
+<|visual token 052771|>
+<|visual token 052772|>
+<|visual token 052773|>
+<|visual token 052774|>
+<|visual token 052775|>
+<|visual token 052776|>
+<|visual token 052777|>
+<|visual token 052778|>
+<|visual token 052779|>
+<|visual token 052780|>
+<|visual token 052781|>
+<|visual token 052782|>
+<|visual token 052783|>
+<|visual token 052784|>
+<|visual token 052785|>
+<|visual token 052786|>
+<|visual token 052787|>
+<|visual token 052788|>
+<|visual token 052789|>
+<|visual token 052790|>
+<|visual token 052791|>
+<|visual token 052792|>
+<|visual token 052793|>
+<|visual token 052794|>
+<|visual token 052795|>
+<|visual token 052796|>
+<|visual token 052797|>
+<|visual token 052798|>
+<|visual token 052799|>
+<|visual token 052800|>
+<|visual token 052801|>
+<|visual token 052802|>
+<|visual token 052803|>
+<|visual token 052804|>
+<|visual token 052805|>
+<|visual token 052806|>
+<|visual token 052807|>
+<|visual token 052808|>
+<|visual token 052809|>
+<|visual token 052810|>
+<|visual token 052811|>
+<|visual token 052812|>
+<|visual token 052813|>
+<|visual token 052814|>
+<|visual token 052815|>
+<|visual token 052816|>
+<|visual token 052817|>
+<|visual token 052818|>
+<|visual token 052819|>
+<|visual token 052820|>
+<|visual token 052821|>
+<|visual token 052822|>
+<|visual token 052823|>
+<|visual token 052824|>
+<|visual token 052825|>
+<|visual token 052826|>
+<|visual token 052827|>
+<|visual token 052828|>
+<|visual token 052829|>
+<|visual token 052830|>
+<|visual token 052831|>
+<|visual token 052832|>
+<|visual token 052833|>
+<|visual token 052834|>
+<|visual token 052835|>
+<|visual token 052836|>
+<|visual token 052837|>
+<|visual token 052838|>
+<|visual token 052839|>
+<|visual token 052840|>
+<|visual token 052841|>
+<|visual token 052842|>
+<|visual token 052843|>
+<|visual token 052844|>
+<|visual token 052845|>
+<|visual token 052846|>
+<|visual token 052847|>
+<|visual token 052848|>
+<|visual token 052849|>
+<|visual token 052850|>
+<|visual token 052851|>
+<|visual token 052852|>
+<|visual token 052853|>
+<|visual token 052854|>
+<|visual token 052855|>
+<|visual token 052856|>
+<|visual token 052857|>
+<|visual token 052858|>
+<|visual token 052859|>
+<|visual token 052860|>
+<|visual token 052861|>
+<|visual token 052862|>
+<|visual token 052863|>
+<|visual token 052864|>
+<|visual token 052865|>
+<|visual token 052866|>
+<|visual token 052867|>
+<|visual token 052868|>
+<|visual token 052869|>
+<|visual token 052870|>
+<|visual token 052871|>
+<|visual token 052872|>
+<|visual token 052873|>
+<|visual token 052874|>
+<|visual token 052875|>
+<|visual token 052876|>
+<|visual token 052877|>
+<|visual token 052878|>
+<|visual token 052879|>
+<|visual token 052880|>
+<|visual token 052881|>
+<|visual token 052882|>
+<|visual token 052883|>
+<|visual token 052884|>
+<|visual token 052885|>
+<|visual token 052886|>
+<|visual token 052887|>
+<|visual token 052888|>
+<|visual token 052889|>
+<|visual token 052890|>
+<|visual token 052891|>
+<|visual token 052892|>
+<|visual token 052893|>
+<|visual token 052894|>
+<|visual token 052895|>
+<|visual token 052896|>
+<|visual token 052897|>
+<|visual token 052898|>
+<|visual token 052899|>
+<|visual token 052900|>
+<|visual token 052901|>
+<|visual token 052902|>
+<|visual token 052903|>
+<|visual token 052904|>
+<|visual token 052905|>
+<|visual token 052906|>
+<|visual token 052907|>
+<|visual token 052908|>
+<|visual token 052909|>
+<|visual token 052910|>
+<|visual token 052911|>
+<|visual token 052912|>
+<|visual token 052913|>
+<|visual token 052914|>
+<|visual token 052915|>
+<|visual token 052916|>
+<|visual token 052917|>
+<|visual token 052918|>
+<|visual token 052919|>
+<|visual token 052920|>
+<|visual token 052921|>
+<|visual token 052922|>
+<|visual token 052923|>
+<|visual token 052924|>
+<|visual token 052925|>
+<|visual token 052926|>
+<|visual token 052927|>
+<|visual token 052928|>
+<|visual token 052929|>
+<|visual token 052930|>
+<|visual token 052931|>
+<|visual token 052932|>
+<|visual token 052933|>
+<|visual token 052934|>
+<|visual token 052935|>
+<|visual token 052936|>
+<|visual token 052937|>
+<|visual token 052938|>
+<|visual token 052939|>
+<|visual token 052940|>
+<|visual token 052941|>
+<|visual token 052942|>
+<|visual token 052943|>
+<|visual token 052944|>
+<|visual token 052945|>
+<|visual token 052946|>
+<|visual token 052947|>
+<|visual token 052948|>
+<|visual token 052949|>
+<|visual token 052950|>
+<|visual token 052951|>
+<|visual token 052952|>
+<|visual token 052953|>
+<|visual token 052954|>
+<|visual token 052955|>
+<|visual token 052956|>
+<|visual token 052957|>
+<|visual token 052958|>
+<|visual token 052959|>
+<|visual token 052960|>
+<|visual token 052961|>
+<|visual token 052962|>
+<|visual token 052963|>
+<|visual token 052964|>
+<|visual token 052965|>
+<|visual token 052966|>
+<|visual token 052967|>
+<|visual token 052968|>
+<|visual token 052969|>
+<|visual token 052970|>
+<|visual token 052971|>
+<|visual token 052972|>
+<|visual token 052973|>
+<|visual token 052974|>
+<|visual token 052975|>
+<|visual token 052976|>
+<|visual token 052977|>
+<|visual token 052978|>
+<|visual token 052979|>
+<|visual token 052980|>
+<|visual token 052981|>
+<|visual token 052982|>
+<|visual token 052983|>
+<|visual token 052984|>
+<|visual token 052985|>
+<|visual token 052986|>
+<|visual token 052987|>
+<|visual token 052988|>
+<|visual token 052989|>
+<|visual token 052990|>
+<|visual token 052991|>
+<|visual token 052992|>
+<|visual token 052993|>
+<|visual token 052994|>
+<|visual token 052995|>
+<|visual token 052996|>
+<|visual token 052997|>
+<|visual token 052998|>
+<|visual token 052999|>
+<|visual token 053000|>
+<|visual token 053001|>
+<|visual token 053002|>
+<|visual token 053003|>
+<|visual token 053004|>
+<|visual token 053005|>
+<|visual token 053006|>
+<|visual token 053007|>
+<|visual token 053008|>
+<|visual token 053009|>
+<|visual token 053010|>
+<|visual token 053011|>
+<|visual token 053012|>
+<|visual token 053013|>
+<|visual token 053014|>
+<|visual token 053015|>
+<|visual token 053016|>
+<|visual token 053017|>
+<|visual token 053018|>
+<|visual token 053019|>
+<|visual token 053020|>
+<|visual token 053021|>
+<|visual token 053022|>
+<|visual token 053023|>
+<|visual token 053024|>
+<|visual token 053025|>
+<|visual token 053026|>
+<|visual token 053027|>
+<|visual token 053028|>
+<|visual token 053029|>
+<|visual token 053030|>
+<|visual token 053031|>
+<|visual token 053032|>
+<|visual token 053033|>
+<|visual token 053034|>
+<|visual token 053035|>
+<|visual token 053036|>
+<|visual token 053037|>
+<|visual token 053038|>
+<|visual token 053039|>
+<|visual token 053040|>
+<|visual token 053041|>
+<|visual token 053042|>
+<|visual token 053043|>
+<|visual token 053044|>
+<|visual token 053045|>
+<|visual token 053046|>
+<|visual token 053047|>
+<|visual token 053048|>
+<|visual token 053049|>
+<|visual token 053050|>
+<|visual token 053051|>
+<|visual token 053052|>
+<|visual token 053053|>
+<|visual token 053054|>
+<|visual token 053055|>
+<|visual token 053056|>
+<|visual token 053057|>
+<|visual token 053058|>
+<|visual token 053059|>
+<|visual token 053060|>
+<|visual token 053061|>
+<|visual token 053062|>
+<|visual token 053063|>
+<|visual token 053064|>
+<|visual token 053065|>
+<|visual token 053066|>
+<|visual token 053067|>
+<|visual token 053068|>
+<|visual token 053069|>
+<|visual token 053070|>
+<|visual token 053071|>
+<|visual token 053072|>
+<|visual token 053073|>
+<|visual token 053074|>
+<|visual token 053075|>
+<|visual token 053076|>
+<|visual token 053077|>
+<|visual token 053078|>
+<|visual token 053079|>
+<|visual token 053080|>
+<|visual token 053081|>
+<|visual token 053082|>
+<|visual token 053083|>
+<|visual token 053084|>
+<|visual token 053085|>
+<|visual token 053086|>
+<|visual token 053087|>
+<|visual token 053088|>
+<|visual token 053089|>
+<|visual token 053090|>
+<|visual token 053091|>
+<|visual token 053092|>
+<|visual token 053093|>
+<|visual token 053094|>
+<|visual token 053095|>
+<|visual token 053096|>
+<|visual token 053097|>
+<|visual token 053098|>
+<|visual token 053099|>
+<|visual token 053100|>
+<|visual token 053101|>
+<|visual token 053102|>
+<|visual token 053103|>
+<|visual token 053104|>
+<|visual token 053105|>
+<|visual token 053106|>
+<|visual token 053107|>
+<|visual token 053108|>
+<|visual token 053109|>
+<|visual token 053110|>
+<|visual token 053111|>
+<|visual token 053112|>
+<|visual token 053113|>
+<|visual token 053114|>
+<|visual token 053115|>
+<|visual token 053116|>
+<|visual token 053117|>
+<|visual token 053118|>
+<|visual token 053119|>
+<|visual token 053120|>
+<|visual token 053121|>
+<|visual token 053122|>
+<|visual token 053123|>
+<|visual token 053124|>
+<|visual token 053125|>
+<|visual token 053126|>
+<|visual token 053127|>
+<|visual token 053128|>
+<|visual token 053129|>
+<|visual token 053130|>
+<|visual token 053131|>
+<|visual token 053132|>
+<|visual token 053133|>
+<|visual token 053134|>
+<|visual token 053135|>
+<|visual token 053136|>
+<|visual token 053137|>
+<|visual token 053138|>
+<|visual token 053139|>
+<|visual token 053140|>
+<|visual token 053141|>
+<|visual token 053142|>
+<|visual token 053143|>
+<|visual token 053144|>
+<|visual token 053145|>
+<|visual token 053146|>
+<|visual token 053147|>
+<|visual token 053148|>
+<|visual token 053149|>
+<|visual token 053150|>
+<|visual token 053151|>
+<|visual token 053152|>
+<|visual token 053153|>
+<|visual token 053154|>
+<|visual token 053155|>
+<|visual token 053156|>
+<|visual token 053157|>
+<|visual token 053158|>
+<|visual token 053159|>
+<|visual token 053160|>
+<|visual token 053161|>
+<|visual token 053162|>
+<|visual token 053163|>
+<|visual token 053164|>
+<|visual token 053165|>
+<|visual token 053166|>
+<|visual token 053167|>
+<|visual token 053168|>
+<|visual token 053169|>
+<|visual token 053170|>
+<|visual token 053171|>
+<|visual token 053172|>
+<|visual token 053173|>
+<|visual token 053174|>
+<|visual token 053175|>
+<|visual token 053176|>
+<|visual token 053177|>
+<|visual token 053178|>
+<|visual token 053179|>
+<|visual token 053180|>
+<|visual token 053181|>
+<|visual token 053182|>
+<|visual token 053183|>
+<|visual token 053184|>
+<|visual token 053185|>
+<|visual token 053186|>
+<|visual token 053187|>
+<|visual token 053188|>
+<|visual token 053189|>
+<|visual token 053190|>
+<|visual token 053191|>
+<|visual token 053192|>
+<|visual token 053193|>
+<|visual token 053194|>
+<|visual token 053195|>
+<|visual token 053196|>
+<|visual token 053197|>
+<|visual token 053198|>
+<|visual token 053199|>
+<|visual token 053200|>
+<|visual token 053201|>
+<|visual token 053202|>
+<|visual token 053203|>
+<|visual token 053204|>
+<|visual token 053205|>
+<|visual token 053206|>
+<|visual token 053207|>
+<|visual token 053208|>
+<|visual token 053209|>
+<|visual token 053210|>
+<|visual token 053211|>
+<|visual token 053212|>
+<|visual token 053213|>
+<|visual token 053214|>
+<|visual token 053215|>
+<|visual token 053216|>
+<|visual token 053217|>
+<|visual token 053218|>
+<|visual token 053219|>
+<|visual token 053220|>
+<|visual token 053221|>
+<|visual token 053222|>
+<|visual token 053223|>
+<|visual token 053224|>
+<|visual token 053225|>
+<|visual token 053226|>
+<|visual token 053227|>
+<|visual token 053228|>
+<|visual token 053229|>
+<|visual token 053230|>
+<|visual token 053231|>
+<|visual token 053232|>
+<|visual token 053233|>
+<|visual token 053234|>
+<|visual token 053235|>
+<|visual token 053236|>
+<|visual token 053237|>
+<|visual token 053238|>
+<|visual token 053239|>
+<|visual token 053240|>
+<|visual token 053241|>
+<|visual token 053242|>
+<|visual token 053243|>
+<|visual token 053244|>
+<|visual token 053245|>
+<|visual token 053246|>
+<|visual token 053247|>
+<|visual token 053248|>
+<|visual token 053249|>
+<|visual token 053250|>
+<|visual token 053251|>
+<|visual token 053252|>
+<|visual token 053253|>
+<|visual token 053254|>
+<|visual token 053255|>
+<|visual token 053256|>
+<|visual token 053257|>
+<|visual token 053258|>
+<|visual token 053259|>
+<|visual token 053260|>
+<|visual token 053261|>
+<|visual token 053262|>
+<|visual token 053263|>
+<|visual token 053264|>
+<|visual token 053265|>
+<|visual token 053266|>
+<|visual token 053267|>
+<|visual token 053268|>
+<|visual token 053269|>
+<|visual token 053270|>
+<|visual token 053271|>
+<|visual token 053272|>
+<|visual token 053273|>
+<|visual token 053274|>
+<|visual token 053275|>
+<|visual token 053276|>
+<|visual token 053277|>
+<|visual token 053278|>
+<|visual token 053279|>
+<|visual token 053280|>
+<|visual token 053281|>
+<|visual token 053282|>
+<|visual token 053283|>
+<|visual token 053284|>
+<|visual token 053285|>
+<|visual token 053286|>
+<|visual token 053287|>
+<|visual token 053288|>
+<|visual token 053289|>
+<|visual token 053290|>
+<|visual token 053291|>
+<|visual token 053292|>
+<|visual token 053293|>
+<|visual token 053294|>
+<|visual token 053295|>
+<|visual token 053296|>
+<|visual token 053297|>
+<|visual token 053298|>
+<|visual token 053299|>
+<|visual token 053300|>
+<|visual token 053301|>
+<|visual token 053302|>
+<|visual token 053303|>
+<|visual token 053304|>
+<|visual token 053305|>
+<|visual token 053306|>
+<|visual token 053307|>
+<|visual token 053308|>
+<|visual token 053309|>
+<|visual token 053310|>
+<|visual token 053311|>
+<|visual token 053312|>
+<|visual token 053313|>
+<|visual token 053314|>
+<|visual token 053315|>
+<|visual token 053316|>
+<|visual token 053317|>
+<|visual token 053318|>
+<|visual token 053319|>
+<|visual token 053320|>
+<|visual token 053321|>
+<|visual token 053322|>
+<|visual token 053323|>
+<|visual token 053324|>
+<|visual token 053325|>
+<|visual token 053326|>
+<|visual token 053327|>
+<|visual token 053328|>
+<|visual token 053329|>
+<|visual token 053330|>
+<|visual token 053331|>
+<|visual token 053332|>
+<|visual token 053333|>
+<|visual token 053334|>
+<|visual token 053335|>
+<|visual token 053336|>
+<|visual token 053337|>
+<|visual token 053338|>
+<|visual token 053339|>
+<|visual token 053340|>
+<|visual token 053341|>
+<|visual token 053342|>
+<|visual token 053343|>
+<|visual token 053344|>
+<|visual token 053345|>
+<|visual token 053346|>
+<|visual token 053347|>
+<|visual token 053348|>
+<|visual token 053349|>
+<|visual token 053350|>
+<|visual token 053351|>
+<|visual token 053352|>
+<|visual token 053353|>
+<|visual token 053354|>
+<|visual token 053355|>
+<|visual token 053356|>
+<|visual token 053357|>
+<|visual token 053358|>
+<|visual token 053359|>
+<|visual token 053360|>
+<|visual token 053361|>
+<|visual token 053362|>
+<|visual token 053363|>
+<|visual token 053364|>
+<|visual token 053365|>
+<|visual token 053366|>
+<|visual token 053367|>
+<|visual token 053368|>
+<|visual token 053369|>
+<|visual token 053370|>
+<|visual token 053371|>
+<|visual token 053372|>
+<|visual token 053373|>
+<|visual token 053374|>
+<|visual token 053375|>
+<|visual token 053376|>
+<|visual token 053377|>
+<|visual token 053378|>
+<|visual token 053379|>
+<|visual token 053380|>
+<|visual token 053381|>
+<|visual token 053382|>
+<|visual token 053383|>
+<|visual token 053384|>
+<|visual token 053385|>
+<|visual token 053386|>
+<|visual token 053387|>
+<|visual token 053388|>
+<|visual token 053389|>
+<|visual token 053390|>
+<|visual token 053391|>
+<|visual token 053392|>
+<|visual token 053393|>
+<|visual token 053394|>
+<|visual token 053395|>
+<|visual token 053396|>
+<|visual token 053397|>
+<|visual token 053398|>
+<|visual token 053399|>
+<|visual token 053400|>
+<|visual token 053401|>
+<|visual token 053402|>
+<|visual token 053403|>
+<|visual token 053404|>
+<|visual token 053405|>
+<|visual token 053406|>
+<|visual token 053407|>
+<|visual token 053408|>
+<|visual token 053409|>
+<|visual token 053410|>
+<|visual token 053411|>
+<|visual token 053412|>
+<|visual token 053413|>
+<|visual token 053414|>
+<|visual token 053415|>
+<|visual token 053416|>
+<|visual token 053417|>
+<|visual token 053418|>
+<|visual token 053419|>
+<|visual token 053420|>
+<|visual token 053421|>
+<|visual token 053422|>
+<|visual token 053423|>
+<|visual token 053424|>
+<|visual token 053425|>
+<|visual token 053426|>
+<|visual token 053427|>
+<|visual token 053428|>
+<|visual token 053429|>
+<|visual token 053430|>
+<|visual token 053431|>
+<|visual token 053432|>
+<|visual token 053433|>
+<|visual token 053434|>
+<|visual token 053435|>
+<|visual token 053436|>
+<|visual token 053437|>
+<|visual token 053438|>
+<|visual token 053439|>
+<|visual token 053440|>
+<|visual token 053441|>
+<|visual token 053442|>
+<|visual token 053443|>
+<|visual token 053444|>
+<|visual token 053445|>
+<|visual token 053446|>
+<|visual token 053447|>
+<|visual token 053448|>
+<|visual token 053449|>
+<|visual token 053450|>
+<|visual token 053451|>
+<|visual token 053452|>
+<|visual token 053453|>
+<|visual token 053454|>
+<|visual token 053455|>
+<|visual token 053456|>
+<|visual token 053457|>
+<|visual token 053458|>
+<|visual token 053459|>
+<|visual token 053460|>
+<|visual token 053461|>
+<|visual token 053462|>
+<|visual token 053463|>
+<|visual token 053464|>
+<|visual token 053465|>
+<|visual token 053466|>
+<|visual token 053467|>
+<|visual token 053468|>
+<|visual token 053469|>
+<|visual token 053470|>
+<|visual token 053471|>
+<|visual token 053472|>
+<|visual token 053473|>
+<|visual token 053474|>
+<|visual token 053475|>
+<|visual token 053476|>
+<|visual token 053477|>
+<|visual token 053478|>
+<|visual token 053479|>
+<|visual token 053480|>
+<|visual token 053481|>
+<|visual token 053482|>
+<|visual token 053483|>
+<|visual token 053484|>
+<|visual token 053485|>
+<|visual token 053486|>
+<|visual token 053487|>
+<|visual token 053488|>
+<|visual token 053489|>
+<|visual token 053490|>
+<|visual token 053491|>
+<|visual token 053492|>
+<|visual token 053493|>
+<|visual token 053494|>
+<|visual token 053495|>
+<|visual token 053496|>
+<|visual token 053497|>
+<|visual token 053498|>
+<|visual token 053499|>
+<|visual token 053500|>
+<|visual token 053501|>
+<|visual token 053502|>
+<|visual token 053503|>
+<|visual token 053504|>
+<|visual token 053505|>
+<|visual token 053506|>
+<|visual token 053507|>
+<|visual token 053508|>
+<|visual token 053509|>
+<|visual token 053510|>
+<|visual token 053511|>
+<|visual token 053512|>
+<|visual token 053513|>
+<|visual token 053514|>
+<|visual token 053515|>
+<|visual token 053516|>
+<|visual token 053517|>
+<|visual token 053518|>
+<|visual token 053519|>
+<|visual token 053520|>
+<|visual token 053521|>
+<|visual token 053522|>
+<|visual token 053523|>
+<|visual token 053524|>
+<|visual token 053525|>
+<|visual token 053526|>
+<|visual token 053527|>
+<|visual token 053528|>
+<|visual token 053529|>
+<|visual token 053530|>
+<|visual token 053531|>
+<|visual token 053532|>
+<|visual token 053533|>
+<|visual token 053534|>
+<|visual token 053535|>
+<|visual token 053536|>
+<|visual token 053537|>
+<|visual token 053538|>
+<|visual token 053539|>
+<|visual token 053540|>
+<|visual token 053541|>
+<|visual token 053542|>
+<|visual token 053543|>
+<|visual token 053544|>
+<|visual token 053545|>
+<|visual token 053546|>
+<|visual token 053547|>
+<|visual token 053548|>
+<|visual token 053549|>
+<|visual token 053550|>
+<|visual token 053551|>
+<|visual token 053552|>
+<|visual token 053553|>
+<|visual token 053554|>
+<|visual token 053555|>
+<|visual token 053556|>
+<|visual token 053557|>
+<|visual token 053558|>
+<|visual token 053559|>
+<|visual token 053560|>
+<|visual token 053561|>
+<|visual token 053562|>
+<|visual token 053563|>
+<|visual token 053564|>
+<|visual token 053565|>
+<|visual token 053566|>
+<|visual token 053567|>
+<|visual token 053568|>
+<|visual token 053569|>
+<|visual token 053570|>
+<|visual token 053571|>
+<|visual token 053572|>
+<|visual token 053573|>
+<|visual token 053574|>
+<|visual token 053575|>
+<|visual token 053576|>
+<|visual token 053577|>
+<|visual token 053578|>
+<|visual token 053579|>
+<|visual token 053580|>
+<|visual token 053581|>
+<|visual token 053582|>
+<|visual token 053583|>
+<|visual token 053584|>
+<|visual token 053585|>
+<|visual token 053586|>
+<|visual token 053587|>
+<|visual token 053588|>
+<|visual token 053589|>
+<|visual token 053590|>
+<|visual token 053591|>
+<|visual token 053592|>
+<|visual token 053593|>
+<|visual token 053594|>
+<|visual token 053595|>
+<|visual token 053596|>
+<|visual token 053597|>
+<|visual token 053598|>
+<|visual token 053599|>
+<|visual token 053600|>
+<|visual token 053601|>
+<|visual token 053602|>
+<|visual token 053603|>
+<|visual token 053604|>
+<|visual token 053605|>
+<|visual token 053606|>
+<|visual token 053607|>
+<|visual token 053608|>
+<|visual token 053609|>
+<|visual token 053610|>
+<|visual token 053611|>
+<|visual token 053612|>
+<|visual token 053613|>
+<|visual token 053614|>
+<|visual token 053615|>
+<|visual token 053616|>
+<|visual token 053617|>
+<|visual token 053618|>
+<|visual token 053619|>
+<|visual token 053620|>
+<|visual token 053621|>
+<|visual token 053622|>
+<|visual token 053623|>
+<|visual token 053624|>
+<|visual token 053625|>
+<|visual token 053626|>
+<|visual token 053627|>
+<|visual token 053628|>
+<|visual token 053629|>
+<|visual token 053630|>
+<|visual token 053631|>
+<|visual token 053632|>
+<|visual token 053633|>
+<|visual token 053634|>
+<|visual token 053635|>
+<|visual token 053636|>
+<|visual token 053637|>
+<|visual token 053638|>
+<|visual token 053639|>
+<|visual token 053640|>
+<|visual token 053641|>
+<|visual token 053642|>
+<|visual token 053643|>
+<|visual token 053644|>
+<|visual token 053645|>
+<|visual token 053646|>
+<|visual token 053647|>
+<|visual token 053648|>
+<|visual token 053649|>
+<|visual token 053650|>
+<|visual token 053651|>
+<|visual token 053652|>
+<|visual token 053653|>
+<|visual token 053654|>
+<|visual token 053655|>
+<|visual token 053656|>
+<|visual token 053657|>
+<|visual token 053658|>
+<|visual token 053659|>
+<|visual token 053660|>
+<|visual token 053661|>
+<|visual token 053662|>
+<|visual token 053663|>
+<|visual token 053664|>
+<|visual token 053665|>
+<|visual token 053666|>
+<|visual token 053667|>
+<|visual token 053668|>
+<|visual token 053669|>
+<|visual token 053670|>
+<|visual token 053671|>
+<|visual token 053672|>
+<|visual token 053673|>
+<|visual token 053674|>
+<|visual token 053675|>
+<|visual token 053676|>
+<|visual token 053677|>
+<|visual token 053678|>
+<|visual token 053679|>
+<|visual token 053680|>
+<|visual token 053681|>
+<|visual token 053682|>
+<|visual token 053683|>
+<|visual token 053684|>
+<|visual token 053685|>
+<|visual token 053686|>
+<|visual token 053687|>
+<|visual token 053688|>
+<|visual token 053689|>
+<|visual token 053690|>
+<|visual token 053691|>
+<|visual token 053692|>
+<|visual token 053693|>
+<|visual token 053694|>
+<|visual token 053695|>
+<|visual token 053696|>
+<|visual token 053697|>
+<|visual token 053698|>
+<|visual token 053699|>
+<|visual token 053700|>
+<|visual token 053701|>
+<|visual token 053702|>
+<|visual token 053703|>
+<|visual token 053704|>
+<|visual token 053705|>
+<|visual token 053706|>
+<|visual token 053707|>
+<|visual token 053708|>
+<|visual token 053709|>
+<|visual token 053710|>
+<|visual token 053711|>
+<|visual token 053712|>
+<|visual token 053713|>
+<|visual token 053714|>
+<|visual token 053715|>
+<|visual token 053716|>
+<|visual token 053717|>
+<|visual token 053718|>
+<|visual token 053719|>
+<|visual token 053720|>
+<|visual token 053721|>
+<|visual token 053722|>
+<|visual token 053723|>
+<|visual token 053724|>
+<|visual token 053725|>
+<|visual token 053726|>
+<|visual token 053727|>
+<|visual token 053728|>
+<|visual token 053729|>
+<|visual token 053730|>
+<|visual token 053731|>
+<|visual token 053732|>
+<|visual token 053733|>
+<|visual token 053734|>
+<|visual token 053735|>
+<|visual token 053736|>
+<|visual token 053737|>
+<|visual token 053738|>
+<|visual token 053739|>
+<|visual token 053740|>
+<|visual token 053741|>
+<|visual token 053742|>
+<|visual token 053743|>
+<|visual token 053744|>
+<|visual token 053745|>
+<|visual token 053746|>
+<|visual token 053747|>
+<|visual token 053748|>
+<|visual token 053749|>
+<|visual token 053750|>
+<|visual token 053751|>
+<|visual token 053752|>
+<|visual token 053753|>
+<|visual token 053754|>
+<|visual token 053755|>
+<|visual token 053756|>
+<|visual token 053757|>
+<|visual token 053758|>
+<|visual token 053759|>
+<|visual token 053760|>
+<|visual token 053761|>
+<|visual token 053762|>
+<|visual token 053763|>
+<|visual token 053764|>
+<|visual token 053765|>
+<|visual token 053766|>
+<|visual token 053767|>
+<|visual token 053768|>
+<|visual token 053769|>
+<|visual token 053770|>
+<|visual token 053771|>
+<|visual token 053772|>
+<|visual token 053773|>
+<|visual token 053774|>
+<|visual token 053775|>
+<|visual token 053776|>
+<|visual token 053777|>
+<|visual token 053778|>
+<|visual token 053779|>
+<|visual token 053780|>
+<|visual token 053781|>
+<|visual token 053782|>
+<|visual token 053783|>
+<|visual token 053784|>
+<|visual token 053785|>
+<|visual token 053786|>
+<|visual token 053787|>
+<|visual token 053788|>
+<|visual token 053789|>
+<|visual token 053790|>
+<|visual token 053791|>
+<|visual token 053792|>
+<|visual token 053793|>
+<|visual token 053794|>
+<|visual token 053795|>
+<|visual token 053796|>
+<|visual token 053797|>
+<|visual token 053798|>
+<|visual token 053799|>
+<|visual token 053800|>
+<|visual token 053801|>
+<|visual token 053802|>
+<|visual token 053803|>
+<|visual token 053804|>
+<|visual token 053805|>
+<|visual token 053806|>
+<|visual token 053807|>
+<|visual token 053808|>
+<|visual token 053809|>
+<|visual token 053810|>
+<|visual token 053811|>
+<|visual token 053812|>
+<|visual token 053813|>
+<|visual token 053814|>
+<|visual token 053815|>
+<|visual token 053816|>
+<|visual token 053817|>
+<|visual token 053818|>
+<|visual token 053819|>
+<|visual token 053820|>
+<|visual token 053821|>
+<|visual token 053822|>
+<|visual token 053823|>
+<|visual token 053824|>
+<|visual token 053825|>
+<|visual token 053826|>
+<|visual token 053827|>
+<|visual token 053828|>
+<|visual token 053829|>
+<|visual token 053830|>
+<|visual token 053831|>
+<|visual token 053832|>
+<|visual token 053833|>
+<|visual token 053834|>
+<|visual token 053835|>
+<|visual token 053836|>
+<|visual token 053837|>
+<|visual token 053838|>
+<|visual token 053839|>
+<|visual token 053840|>
+<|visual token 053841|>
+<|visual token 053842|>
+<|visual token 053843|>
+<|visual token 053844|>
+<|visual token 053845|>
+<|visual token 053846|>
+<|visual token 053847|>
+<|visual token 053848|>
+<|visual token 053849|>
+<|visual token 053850|>
+<|visual token 053851|>
+<|visual token 053852|>
+<|visual token 053853|>
+<|visual token 053854|>
+<|visual token 053855|>
+<|visual token 053856|>
+<|visual token 053857|>
+<|visual token 053858|>
+<|visual token 053859|>
+<|visual token 053860|>
+<|visual token 053861|>
+<|visual token 053862|>
+<|visual token 053863|>
+<|visual token 053864|>
+<|visual token 053865|>
+<|visual token 053866|>
+<|visual token 053867|>
+<|visual token 053868|>
+<|visual token 053869|>
+<|visual token 053870|>
+<|visual token 053871|>
+<|visual token 053872|>
+<|visual token 053873|>
+<|visual token 053874|>
+<|visual token 053875|>
+<|visual token 053876|>
+<|visual token 053877|>
+<|visual token 053878|>
+<|visual token 053879|>
+<|visual token 053880|>
+<|visual token 053881|>
+<|visual token 053882|>
+<|visual token 053883|>
+<|visual token 053884|>
+<|visual token 053885|>
+<|visual token 053886|>
+<|visual token 053887|>
+<|visual token 053888|>
+<|visual token 053889|>
+<|visual token 053890|>
+<|visual token 053891|>
+<|visual token 053892|>
+<|visual token 053893|>
+<|visual token 053894|>
+<|visual token 053895|>
+<|visual token 053896|>
+<|visual token 053897|>
+<|visual token 053898|>
+<|visual token 053899|>
+<|visual token 053900|>
+<|visual token 053901|>
+<|visual token 053902|>
+<|visual token 053903|>
+<|visual token 053904|>
+<|visual token 053905|>
+<|visual token 053906|>
+<|visual token 053907|>
+<|visual token 053908|>
+<|visual token 053909|>
+<|visual token 053910|>
+<|visual token 053911|>
+<|visual token 053912|>
+<|visual token 053913|>
+<|visual token 053914|>
+<|visual token 053915|>
+<|visual token 053916|>
+<|visual token 053917|>
+<|visual token 053918|>
+<|visual token 053919|>
+<|visual token 053920|>
+<|visual token 053921|>
+<|visual token 053922|>
+<|visual token 053923|>
+<|visual token 053924|>
+<|visual token 053925|>
+<|visual token 053926|>
+<|visual token 053927|>
+<|visual token 053928|>
+<|visual token 053929|>
+<|visual token 053930|>
+<|visual token 053931|>
+<|visual token 053932|>
+<|visual token 053933|>
+<|visual token 053934|>
+<|visual token 053935|>
+<|visual token 053936|>
+<|visual token 053937|>
+<|visual token 053938|>
+<|visual token 053939|>
+<|visual token 053940|>
+<|visual token 053941|>
+<|visual token 053942|>
+<|visual token 053943|>
+<|visual token 053944|>
+<|visual token 053945|>
+<|visual token 053946|>
+<|visual token 053947|>
+<|visual token 053948|>
+<|visual token 053949|>
+<|visual token 053950|>
+<|visual token 053951|>
+<|visual token 053952|>
+<|visual token 053953|>
+<|visual token 053954|>
+<|visual token 053955|>
+<|visual token 053956|>
+<|visual token 053957|>
+<|visual token 053958|>
+<|visual token 053959|>
+<|visual token 053960|>
+<|visual token 053961|>
+<|visual token 053962|>
+<|visual token 053963|>
+<|visual token 053964|>
+<|visual token 053965|>
+<|visual token 053966|>
+<|visual token 053967|>
+<|visual token 053968|>
+<|visual token 053969|>
+<|visual token 053970|>
+<|visual token 053971|>
+<|visual token 053972|>
+<|visual token 053973|>
+<|visual token 053974|>
+<|visual token 053975|>
+<|visual token 053976|>
+<|visual token 053977|>
+<|visual token 053978|>
+<|visual token 053979|>
+<|visual token 053980|>
+<|visual token 053981|>
+<|visual token 053982|>
+<|visual token 053983|>
+<|visual token 053984|>
+<|visual token 053985|>
+<|visual token 053986|>
+<|visual token 053987|>
+<|visual token 053988|>
+<|visual token 053989|>
+<|visual token 053990|>
+<|visual token 053991|>
+<|visual token 053992|>
+<|visual token 053993|>
+<|visual token 053994|>
+<|visual token 053995|>
+<|visual token 053996|>
+<|visual token 053997|>
+<|visual token 053998|>
+<|visual token 053999|>
+<|visual token 054000|>
+<|visual token 054001|>
+<|visual token 054002|>
+<|visual token 054003|>
+<|visual token 054004|>
+<|visual token 054005|>
+<|visual token 054006|>
+<|visual token 054007|>
+<|visual token 054008|>
+<|visual token 054009|>
+<|visual token 054010|>
+<|visual token 054011|>
+<|visual token 054012|>
+<|visual token 054013|>
+<|visual token 054014|>
+<|visual token 054015|>
+<|visual token 054016|>
+<|visual token 054017|>
+<|visual token 054018|>
+<|visual token 054019|>
+<|visual token 054020|>
+<|visual token 054021|>
+<|visual token 054022|>
+<|visual token 054023|>
+<|visual token 054024|>
+<|visual token 054025|>
+<|visual token 054026|>
+<|visual token 054027|>
+<|visual token 054028|>
+<|visual token 054029|>
+<|visual token 054030|>
+<|visual token 054031|>
+<|visual token 054032|>
+<|visual token 054033|>
+<|visual token 054034|>
+<|visual token 054035|>
+<|visual token 054036|>
+<|visual token 054037|>
+<|visual token 054038|>
+<|visual token 054039|>
+<|visual token 054040|>
+<|visual token 054041|>
+<|visual token 054042|>
+<|visual token 054043|>
+<|visual token 054044|>
+<|visual token 054045|>
+<|visual token 054046|>
+<|visual token 054047|>
+<|visual token 054048|>
+<|visual token 054049|>
+<|visual token 054050|>
+<|visual token 054051|>
+<|visual token 054052|>
+<|visual token 054053|>
+<|visual token 054054|>
+<|visual token 054055|>
+<|visual token 054056|>
+<|visual token 054057|>
+<|visual token 054058|>
+<|visual token 054059|>
+<|visual token 054060|>
+<|visual token 054061|>
+<|visual token 054062|>
+<|visual token 054063|>
+<|visual token 054064|>
+<|visual token 054065|>
+<|visual token 054066|>
+<|visual token 054067|>
+<|visual token 054068|>
+<|visual token 054069|>
+<|visual token 054070|>
+<|visual token 054071|>
+<|visual token 054072|>
+<|visual token 054073|>
+<|visual token 054074|>
+<|visual token 054075|>
+<|visual token 054076|>
+<|visual token 054077|>
+<|visual token 054078|>
+<|visual token 054079|>
+<|visual token 054080|>
+<|visual token 054081|>
+<|visual token 054082|>
+<|visual token 054083|>
+<|visual token 054084|>
+<|visual token 054085|>
+<|visual token 054086|>
+<|visual token 054087|>
+<|visual token 054088|>
+<|visual token 054089|>
+<|visual token 054090|>
+<|visual token 054091|>
+<|visual token 054092|>
+<|visual token 054093|>
+<|visual token 054094|>
+<|visual token 054095|>
+<|visual token 054096|>
+<|visual token 054097|>
+<|visual token 054098|>
+<|visual token 054099|>
+<|visual token 054100|>
+<|visual token 054101|>
+<|visual token 054102|>
+<|visual token 054103|>
+<|visual token 054104|>
+<|visual token 054105|>
+<|visual token 054106|>
+<|visual token 054107|>
+<|visual token 054108|>
+<|visual token 054109|>
+<|visual token 054110|>
+<|visual token 054111|>
+<|visual token 054112|>
+<|visual token 054113|>
+<|visual token 054114|>
+<|visual token 054115|>
+<|visual token 054116|>
+<|visual token 054117|>
+<|visual token 054118|>
+<|visual token 054119|>
+<|visual token 054120|>
+<|visual token 054121|>
+<|visual token 054122|>
+<|visual token 054123|>
+<|visual token 054124|>
+<|visual token 054125|>
+<|visual token 054126|>
+<|visual token 054127|>
+<|visual token 054128|>
+<|visual token 054129|>
+<|visual token 054130|>
+<|visual token 054131|>
+<|visual token 054132|>
+<|visual token 054133|>
+<|visual token 054134|>
+<|visual token 054135|>
+<|visual token 054136|>
+<|visual token 054137|>
+<|visual token 054138|>
+<|visual token 054139|>
+<|visual token 054140|>
+<|visual token 054141|>
+<|visual token 054142|>
+<|visual token 054143|>
+<|visual token 054144|>
+<|visual token 054145|>
+<|visual token 054146|>
+<|visual token 054147|>
+<|visual token 054148|>
+<|visual token 054149|>
+<|visual token 054150|>
+<|visual token 054151|>
+<|visual token 054152|>
+<|visual token 054153|>
+<|visual token 054154|>
+<|visual token 054155|>
+<|visual token 054156|>
+<|visual token 054157|>
+<|visual token 054158|>
+<|visual token 054159|>
+<|visual token 054160|>
+<|visual token 054161|>
+<|visual token 054162|>
+<|visual token 054163|>
+<|visual token 054164|>
+<|visual token 054165|>
+<|visual token 054166|>
+<|visual token 054167|>
+<|visual token 054168|>
+<|visual token 054169|>
+<|visual token 054170|>
+<|visual token 054171|>
+<|visual token 054172|>
+<|visual token 054173|>
+<|visual token 054174|>
+<|visual token 054175|>
+<|visual token 054176|>
+<|visual token 054177|>
+<|visual token 054178|>
+<|visual token 054179|>
+<|visual token 054180|>
+<|visual token 054181|>
+<|visual token 054182|>
+<|visual token 054183|>
+<|visual token 054184|>
+<|visual token 054185|>
+<|visual token 054186|>
+<|visual token 054187|>
+<|visual token 054188|>
+<|visual token 054189|>
+<|visual token 054190|>
+<|visual token 054191|>
+<|visual token 054192|>
+<|visual token 054193|>
+<|visual token 054194|>
+<|visual token 054195|>
+<|visual token 054196|>
+<|visual token 054197|>
+<|visual token 054198|>
+<|visual token 054199|>
+<|visual token 054200|>
+<|visual token 054201|>
+<|visual token 054202|>
+<|visual token 054203|>
+<|visual token 054204|>
+<|visual token 054205|>
+<|visual token 054206|>
+<|visual token 054207|>
+<|visual token 054208|>
+<|visual token 054209|>
+<|visual token 054210|>
+<|visual token 054211|>
+<|visual token 054212|>
+<|visual token 054213|>
+<|visual token 054214|>
+<|visual token 054215|>
+<|visual token 054216|>
+<|visual token 054217|>
+<|visual token 054218|>
+<|visual token 054219|>
+<|visual token 054220|>
+<|visual token 054221|>
+<|visual token 054222|>
+<|visual token 054223|>
+<|visual token 054224|>
+<|visual token 054225|>
+<|visual token 054226|>
+<|visual token 054227|>
+<|visual token 054228|>
+<|visual token 054229|>
+<|visual token 054230|>
+<|visual token 054231|>
+<|visual token 054232|>
+<|visual token 054233|>
+<|visual token 054234|>
+<|visual token 054235|>
+<|visual token 054236|>
+<|visual token 054237|>
+<|visual token 054238|>
+<|visual token 054239|>
+<|visual token 054240|>
+<|visual token 054241|>
+<|visual token 054242|>
+<|visual token 054243|>
+<|visual token 054244|>
+<|visual token 054245|>
+<|visual token 054246|>
+<|visual token 054247|>
+<|visual token 054248|>
+<|visual token 054249|>
+<|visual token 054250|>
+<|visual token 054251|>
+<|visual token 054252|>
+<|visual token 054253|>
+<|visual token 054254|>
+<|visual token 054255|>
+<|visual token 054256|>
+<|visual token 054257|>
+<|visual token 054258|>
+<|visual token 054259|>
+<|visual token 054260|>
+<|visual token 054261|>
+<|visual token 054262|>
+<|visual token 054263|>
+<|visual token 054264|>
+<|visual token 054265|>
+<|visual token 054266|>
+<|visual token 054267|>
+<|visual token 054268|>
+<|visual token 054269|>
+<|visual token 054270|>
+<|visual token 054271|>
+<|visual token 054272|>
+<|visual token 054273|>
+<|visual token 054274|>
+<|visual token 054275|>
+<|visual token 054276|>
+<|visual token 054277|>
+<|visual token 054278|>
+<|visual token 054279|>
+<|visual token 054280|>
+<|visual token 054281|>
+<|visual token 054282|>
+<|visual token 054283|>
+<|visual token 054284|>
+<|visual token 054285|>
+<|visual token 054286|>
+<|visual token 054287|>
+<|visual token 054288|>
+<|visual token 054289|>
+<|visual token 054290|>
+<|visual token 054291|>
+<|visual token 054292|>
+<|visual token 054293|>
+<|visual token 054294|>
+<|visual token 054295|>
+<|visual token 054296|>
+<|visual token 054297|>
+<|visual token 054298|>
+<|visual token 054299|>
+<|visual token 054300|>
+<|visual token 054301|>
+<|visual token 054302|>
+<|visual token 054303|>
+<|visual token 054304|>
+<|visual token 054305|>
+<|visual token 054306|>
+<|visual token 054307|>
+<|visual token 054308|>
+<|visual token 054309|>
+<|visual token 054310|>
+<|visual token 054311|>
+<|visual token 054312|>
+<|visual token 054313|>
+<|visual token 054314|>
+<|visual token 054315|>
+<|visual token 054316|>
+<|visual token 054317|>
+<|visual token 054318|>
+<|visual token 054319|>
+<|visual token 054320|>
+<|visual token 054321|>
+<|visual token 054322|>
+<|visual token 054323|>
+<|visual token 054324|>
+<|visual token 054325|>
+<|visual token 054326|>
+<|visual token 054327|>
+<|visual token 054328|>
+<|visual token 054329|>
+<|visual token 054330|>
+<|visual token 054331|>
+<|visual token 054332|>
+<|visual token 054333|>
+<|visual token 054334|>
+<|visual token 054335|>
+<|visual token 054336|>
+<|visual token 054337|>
+<|visual token 054338|>
+<|visual token 054339|>
+<|visual token 054340|>
+<|visual token 054341|>
+<|visual token 054342|>
+<|visual token 054343|>
+<|visual token 054344|>
+<|visual token 054345|>
+<|visual token 054346|>
+<|visual token 054347|>
+<|visual token 054348|>
+<|visual token 054349|>
+<|visual token 054350|>
+<|visual token 054351|>
+<|visual token 054352|>
+<|visual token 054353|>
+<|visual token 054354|>
+<|visual token 054355|>
+<|visual token 054356|>
+<|visual token 054357|>
+<|visual token 054358|>
+<|visual token 054359|>
+<|visual token 054360|>
+<|visual token 054361|>
+<|visual token 054362|>
+<|visual token 054363|>
+<|visual token 054364|>
+<|visual token 054365|>
+<|visual token 054366|>
+<|visual token 054367|>
+<|visual token 054368|>
+<|visual token 054369|>
+<|visual token 054370|>
+<|visual token 054371|>
+<|visual token 054372|>
+<|visual token 054373|>
+<|visual token 054374|>
+<|visual token 054375|>
+<|visual token 054376|>
+<|visual token 054377|>
+<|visual token 054378|>
+<|visual token 054379|>
+<|visual token 054380|>
+<|visual token 054381|>
+<|visual token 054382|>
+<|visual token 054383|>
+<|visual token 054384|>
+<|visual token 054385|>
+<|visual token 054386|>
+<|visual token 054387|>
+<|visual token 054388|>
+<|visual token 054389|>
+<|visual token 054390|>
+<|visual token 054391|>
+<|visual token 054392|>
+<|visual token 054393|>
+<|visual token 054394|>
+<|visual token 054395|>
+<|visual token 054396|>
+<|visual token 054397|>
+<|visual token 054398|>
+<|visual token 054399|>
+<|visual token 054400|>
+<|visual token 054401|>
+<|visual token 054402|>
+<|visual token 054403|>
+<|visual token 054404|>
+<|visual token 054405|>
+<|visual token 054406|>
+<|visual token 054407|>
+<|visual token 054408|>
+<|visual token 054409|>
+<|visual token 054410|>
+<|visual token 054411|>
+<|visual token 054412|>
+<|visual token 054413|>
+<|visual token 054414|>
+<|visual token 054415|>
+<|visual token 054416|>
+<|visual token 054417|>
+<|visual token 054418|>
+<|visual token 054419|>
+<|visual token 054420|>
+<|visual token 054421|>
+<|visual token 054422|>
+<|visual token 054423|>
+<|visual token 054424|>
+<|visual token 054425|>
+<|visual token 054426|>
+<|visual token 054427|>
+<|visual token 054428|>
+<|visual token 054429|>
+<|visual token 054430|>
+<|visual token 054431|>
+<|visual token 054432|>
+<|visual token 054433|>
+<|visual token 054434|>
+<|visual token 054435|>
+<|visual token 054436|>
+<|visual token 054437|>
+<|visual token 054438|>
+<|visual token 054439|>
+<|visual token 054440|>
+<|visual token 054441|>
+<|visual token 054442|>
+<|visual token 054443|>
+<|visual token 054444|>
+<|visual token 054445|>
+<|visual token 054446|>
+<|visual token 054447|>
+<|visual token 054448|>
+<|visual token 054449|>
+<|visual token 054450|>
+<|visual token 054451|>
+<|visual token 054452|>
+<|visual token 054453|>
+<|visual token 054454|>
+<|visual token 054455|>
+<|visual token 054456|>
+<|visual token 054457|>
+<|visual token 054458|>
+<|visual token 054459|>
+<|visual token 054460|>
+<|visual token 054461|>
+<|visual token 054462|>
+<|visual token 054463|>
+<|visual token 054464|>
+<|visual token 054465|>
+<|visual token 054466|>
+<|visual token 054467|>
+<|visual token 054468|>
+<|visual token 054469|>
+<|visual token 054470|>
+<|visual token 054471|>
+<|visual token 054472|>
+<|visual token 054473|>
+<|visual token 054474|>
+<|visual token 054475|>
+<|visual token 054476|>
+<|visual token 054477|>
+<|visual token 054478|>
+<|visual token 054479|>
+<|visual token 054480|>
+<|visual token 054481|>
+<|visual token 054482|>
+<|visual token 054483|>
+<|visual token 054484|>
+<|visual token 054485|>
+<|visual token 054486|>
+<|visual token 054487|>
+<|visual token 054488|>
+<|visual token 054489|>
+<|visual token 054490|>
+<|visual token 054491|>
+<|visual token 054492|>
+<|visual token 054493|>
+<|visual token 054494|>
+<|visual token 054495|>
+<|visual token 054496|>
+<|visual token 054497|>
+<|visual token 054498|>
+<|visual token 054499|>
+<|visual token 054500|>
+<|visual token 054501|>
+<|visual token 054502|>
+<|visual token 054503|>
+<|visual token 054504|>
+<|visual token 054505|>
+<|visual token 054506|>
+<|visual token 054507|>
+<|visual token 054508|>
+<|visual token 054509|>
+<|visual token 054510|>
+<|visual token 054511|>
+<|visual token 054512|>
+<|visual token 054513|>
+<|visual token 054514|>
+<|visual token 054515|>
+<|visual token 054516|>
+<|visual token 054517|>
+<|visual token 054518|>
+<|visual token 054519|>
+<|visual token 054520|>
+<|visual token 054521|>
+<|visual token 054522|>
+<|visual token 054523|>
+<|visual token 054524|>
+<|visual token 054525|>
+<|visual token 054526|>
+<|visual token 054527|>
+<|visual token 054528|>
+<|visual token 054529|>
+<|visual token 054530|>
+<|visual token 054531|>
+<|visual token 054532|>
+<|visual token 054533|>
+<|visual token 054534|>
+<|visual token 054535|>
+<|visual token 054536|>
+<|visual token 054537|>
+<|visual token 054538|>
+<|visual token 054539|>
+<|visual token 054540|>
+<|visual token 054541|>
+<|visual token 054542|>
+<|visual token 054543|>
+<|visual token 054544|>
+<|visual token 054545|>
+<|visual token 054546|>
+<|visual token 054547|>
+<|visual token 054548|>
+<|visual token 054549|>
+<|visual token 054550|>
+<|visual token 054551|>
+<|visual token 054552|>
+<|visual token 054553|>
+<|visual token 054554|>
+<|visual token 054555|>
+<|visual token 054556|>
+<|visual token 054557|>
+<|visual token 054558|>
+<|visual token 054559|>
+<|visual token 054560|>
+<|visual token 054561|>
+<|visual token 054562|>
+<|visual token 054563|>
+<|visual token 054564|>
+<|visual token 054565|>
+<|visual token 054566|>
+<|visual token 054567|>
+<|visual token 054568|>
+<|visual token 054569|>
+<|visual token 054570|>
+<|visual token 054571|>
+<|visual token 054572|>
+<|visual token 054573|>
+<|visual token 054574|>
+<|visual token 054575|>
+<|visual token 054576|>
+<|visual token 054577|>
+<|visual token 054578|>
+<|visual token 054579|>
+<|visual token 054580|>
+<|visual token 054581|>
+<|visual token 054582|>
+<|visual token 054583|>
+<|visual token 054584|>
+<|visual token 054585|>
+<|visual token 054586|>
+<|visual token 054587|>
+<|visual token 054588|>
+<|visual token 054589|>
+<|visual token 054590|>
+<|visual token 054591|>
+<|visual token 054592|>
+<|visual token 054593|>
+<|visual token 054594|>
+<|visual token 054595|>
+<|visual token 054596|>
+<|visual token 054597|>
+<|visual token 054598|>
+<|visual token 054599|>
+<|visual token 054600|>
+<|visual token 054601|>
+<|visual token 054602|>
+<|visual token 054603|>
+<|visual token 054604|>
+<|visual token 054605|>
+<|visual token 054606|>
+<|visual token 054607|>
+<|visual token 054608|>
+<|visual token 054609|>
+<|visual token 054610|>
+<|visual token 054611|>
+<|visual token 054612|>
+<|visual token 054613|>
+<|visual token 054614|>
+<|visual token 054615|>
+<|visual token 054616|>
+<|visual token 054617|>
+<|visual token 054618|>
+<|visual token 054619|>
+<|visual token 054620|>
+<|visual token 054621|>
+<|visual token 054622|>
+<|visual token 054623|>
+<|visual token 054624|>
+<|visual token 054625|>
+<|visual token 054626|>
+<|visual token 054627|>
+<|visual token 054628|>
+<|visual token 054629|>
+<|visual token 054630|>
+<|visual token 054631|>
+<|visual token 054632|>
+<|visual token 054633|>
+<|visual token 054634|>
+<|visual token 054635|>
+<|visual token 054636|>
+<|visual token 054637|>
+<|visual token 054638|>
+<|visual token 054639|>
+<|visual token 054640|>
+<|visual token 054641|>
+<|visual token 054642|>
+<|visual token 054643|>
+<|visual token 054644|>
+<|visual token 054645|>
+<|visual token 054646|>
+<|visual token 054647|>
+<|visual token 054648|>
+<|visual token 054649|>
+<|visual token 054650|>
+<|visual token 054651|>
+<|visual token 054652|>
+<|visual token 054653|>
+<|visual token 054654|>
+<|visual token 054655|>
+<|visual token 054656|>
+<|visual token 054657|>
+<|visual token 054658|>
+<|visual token 054659|>
+<|visual token 054660|>
+<|visual token 054661|>
+<|visual token 054662|>
+<|visual token 054663|>
+<|visual token 054664|>
+<|visual token 054665|>
+<|visual token 054666|>
+<|visual token 054667|>
+<|visual token 054668|>
+<|visual token 054669|>
+<|visual token 054670|>
+<|visual token 054671|>
+<|visual token 054672|>
+<|visual token 054673|>
+<|visual token 054674|>
+<|visual token 054675|>
+<|visual token 054676|>
+<|visual token 054677|>
+<|visual token 054678|>
+<|visual token 054679|>
+<|visual token 054680|>
+<|visual token 054681|>
+<|visual token 054682|>
+<|visual token 054683|>
+<|visual token 054684|>
+<|visual token 054685|>
+<|visual token 054686|>
+<|visual token 054687|>
+<|visual token 054688|>
+<|visual token 054689|>
+<|visual token 054690|>
+<|visual token 054691|>
+<|visual token 054692|>
+<|visual token 054693|>
+<|visual token 054694|>
+<|visual token 054695|>
+<|visual token 054696|>
+<|visual token 054697|>
+<|visual token 054698|>
+<|visual token 054699|>
+<|visual token 054700|>
+<|visual token 054701|>
+<|visual token 054702|>
+<|visual token 054703|>
+<|visual token 054704|>
+<|visual token 054705|>
+<|visual token 054706|>
+<|visual token 054707|>
+<|visual token 054708|>
+<|visual token 054709|>
+<|visual token 054710|>
+<|visual token 054711|>
+<|visual token 054712|>
+<|visual token 054713|>
+<|visual token 054714|>
+<|visual token 054715|>
+<|visual token 054716|>
+<|visual token 054717|>
+<|visual token 054718|>
+<|visual token 054719|>
+<|visual token 054720|>
+<|visual token 054721|>
+<|visual token 054722|>
+<|visual token 054723|>
+<|visual token 054724|>
+<|visual token 054725|>
+<|visual token 054726|>
+<|visual token 054727|>
+<|visual token 054728|>
+<|visual token 054729|>
+<|visual token 054730|>
+<|visual token 054731|>
+<|visual token 054732|>
+<|visual token 054733|>
+<|visual token 054734|>
+<|visual token 054735|>
+<|visual token 054736|>
+<|visual token 054737|>
+<|visual token 054738|>
+<|visual token 054739|>
+<|visual token 054740|>
+<|visual token 054741|>
+<|visual token 054742|>
+<|visual token 054743|>
+<|visual token 054744|>
+<|visual token 054745|>
+<|visual token 054746|>
+<|visual token 054747|>
+<|visual token 054748|>
+<|visual token 054749|>
+<|visual token 054750|>
+<|visual token 054751|>
+<|visual token 054752|>
+<|visual token 054753|>
+<|visual token 054754|>
+<|visual token 054755|>
+<|visual token 054756|>
+<|visual token 054757|>
+<|visual token 054758|>
+<|visual token 054759|>
+<|visual token 054760|>
+<|visual token 054761|>
+<|visual token 054762|>
+<|visual token 054763|>
+<|visual token 054764|>
+<|visual token 054765|>
+<|visual token 054766|>
+<|visual token 054767|>
+<|visual token 054768|>
+<|visual token 054769|>
+<|visual token 054770|>
+<|visual token 054771|>
+<|visual token 054772|>
+<|visual token 054773|>
+<|visual token 054774|>
+<|visual token 054775|>
+<|visual token 054776|>
+<|visual token 054777|>
+<|visual token 054778|>
+<|visual token 054779|>
+<|visual token 054780|>
+<|visual token 054781|>
+<|visual token 054782|>
+<|visual token 054783|>
+<|visual token 054784|>
+<|visual token 054785|>
+<|visual token 054786|>
+<|visual token 054787|>
+<|visual token 054788|>
+<|visual token 054789|>
+<|visual token 054790|>
+<|visual token 054791|>
+<|visual token 054792|>
+<|visual token 054793|>
+<|visual token 054794|>
+<|visual token 054795|>
+<|visual token 054796|>
+<|visual token 054797|>
+<|visual token 054798|>
+<|visual token 054799|>
+<|visual token 054800|>
+<|visual token 054801|>
+<|visual token 054802|>
+<|visual token 054803|>
+<|visual token 054804|>
+<|visual token 054805|>
+<|visual token 054806|>
+<|visual token 054807|>
+<|visual token 054808|>
+<|visual token 054809|>
+<|visual token 054810|>
+<|visual token 054811|>
+<|visual token 054812|>
+<|visual token 054813|>
+<|visual token 054814|>
+<|visual token 054815|>
+<|visual token 054816|>
+<|visual token 054817|>
+<|visual token 054818|>
+<|visual token 054819|>
+<|visual token 054820|>
+<|visual token 054821|>
+<|visual token 054822|>
+<|visual token 054823|>
+<|visual token 054824|>
+<|visual token 054825|>
+<|visual token 054826|>
+<|visual token 054827|>
+<|visual token 054828|>
+<|visual token 054829|>
+<|visual token 054830|>
+<|visual token 054831|>
+<|visual token 054832|>
+<|visual token 054833|>
+<|visual token 054834|>
+<|visual token 054835|>
+<|visual token 054836|>
+<|visual token 054837|>
+<|visual token 054838|>
+<|visual token 054839|>
+<|visual token 054840|>
+<|visual token 054841|>
+<|visual token 054842|>
+<|visual token 054843|>
+<|visual token 054844|>
+<|visual token 054845|>
+<|visual token 054846|>
+<|visual token 054847|>
+<|visual token 054848|>
+<|visual token 054849|>
+<|visual token 054850|>
+<|visual token 054851|>
+<|visual token 054852|>
+<|visual token 054853|>
+<|visual token 054854|>
+<|visual token 054855|>
+<|visual token 054856|>
+<|visual token 054857|>
+<|visual token 054858|>
+<|visual token 054859|>
+<|visual token 054860|>
+<|visual token 054861|>
+<|visual token 054862|>
+<|visual token 054863|>
+<|visual token 054864|>
+<|visual token 054865|>
+<|visual token 054866|>
+<|visual token 054867|>
+<|visual token 054868|>
+<|visual token 054869|>
+<|visual token 054870|>
+<|visual token 054871|>
+<|visual token 054872|>
+<|visual token 054873|>
+<|visual token 054874|>
+<|visual token 054875|>
+<|visual token 054876|>
+<|visual token 054877|>
+<|visual token 054878|>
+<|visual token 054879|>
+<|visual token 054880|>
+<|visual token 054881|>
+<|visual token 054882|>
+<|visual token 054883|>
+<|visual token 054884|>
+<|visual token 054885|>
+<|visual token 054886|>
+<|visual token 054887|>
+<|visual token 054888|>
+<|visual token 054889|>
+<|visual token 054890|>
+<|visual token 054891|>
+<|visual token 054892|>
+<|visual token 054893|>
+<|visual token 054894|>
+<|visual token 054895|>
+<|visual token 054896|>
+<|visual token 054897|>
+<|visual token 054898|>
+<|visual token 054899|>
+<|visual token 054900|>
+<|visual token 054901|>
+<|visual token 054902|>
+<|visual token 054903|>
+<|visual token 054904|>
+<|visual token 054905|>
+<|visual token 054906|>
+<|visual token 054907|>
+<|visual token 054908|>
+<|visual token 054909|>
+<|visual token 054910|>
+<|visual token 054911|>
+<|visual token 054912|>
+<|visual token 054913|>
+<|visual token 054914|>
+<|visual token 054915|>
+<|visual token 054916|>
+<|visual token 054917|>
+<|visual token 054918|>
+<|visual token 054919|>
+<|visual token 054920|>
+<|visual token 054921|>
+<|visual token 054922|>
+<|visual token 054923|>
+<|visual token 054924|>
+<|visual token 054925|>
+<|visual token 054926|>
+<|visual token 054927|>
+<|visual token 054928|>
+<|visual token 054929|>
+<|visual token 054930|>
+<|visual token 054931|>
+<|visual token 054932|>
+<|visual token 054933|>
+<|visual token 054934|>
+<|visual token 054935|>
+<|visual token 054936|>
+<|visual token 054937|>
+<|visual token 054938|>
+<|visual token 054939|>
+<|visual token 054940|>
+<|visual token 054941|>
+<|visual token 054942|>
+<|visual token 054943|>
+<|visual token 054944|>
+<|visual token 054945|>
+<|visual token 054946|>
+<|visual token 054947|>
+<|visual token 054948|>
+<|visual token 054949|>
+<|visual token 054950|>
+<|visual token 054951|>
+<|visual token 054952|>
+<|visual token 054953|>
+<|visual token 054954|>
+<|visual token 054955|>
+<|visual token 054956|>
+<|visual token 054957|>
+<|visual token 054958|>
+<|visual token 054959|>
+<|visual token 054960|>
+<|visual token 054961|>
+<|visual token 054962|>
+<|visual token 054963|>
+<|visual token 054964|>
+<|visual token 054965|>
+<|visual token 054966|>
+<|visual token 054967|>
+<|visual token 054968|>
+<|visual token 054969|>
+<|visual token 054970|>
+<|visual token 054971|>
+<|visual token 054972|>
+<|visual token 054973|>
+<|visual token 054974|>
+<|visual token 054975|>
+<|visual token 054976|>
+<|visual token 054977|>
+<|visual token 054978|>
+<|visual token 054979|>
+<|visual token 054980|>
+<|visual token 054981|>
+<|visual token 054982|>
+<|visual token 054983|>
+<|visual token 054984|>
+<|visual token 054985|>
+<|visual token 054986|>
+<|visual token 054987|>
+<|visual token 054988|>
+<|visual token 054989|>
+<|visual token 054990|>
+<|visual token 054991|>
+<|visual token 054992|>
+<|visual token 054993|>
+<|visual token 054994|>
+<|visual token 054995|>
+<|visual token 054996|>
+<|visual token 054997|>
+<|visual token 054998|>
+<|visual token 054999|>
+<|visual token 055000|>
+<|visual token 055001|>
+<|visual token 055002|>
+<|visual token 055003|>
+<|visual token 055004|>
+<|visual token 055005|>
+<|visual token 055006|>
+<|visual token 055007|>
+<|visual token 055008|>
+<|visual token 055009|>
+<|visual token 055010|>
+<|visual token 055011|>
+<|visual token 055012|>
+<|visual token 055013|>
+<|visual token 055014|>
+<|visual token 055015|>
+<|visual token 055016|>
+<|visual token 055017|>
+<|visual token 055018|>
+<|visual token 055019|>
+<|visual token 055020|>
+<|visual token 055021|>
+<|visual token 055022|>
+<|visual token 055023|>
+<|visual token 055024|>
+<|visual token 055025|>
+<|visual token 055026|>
+<|visual token 055027|>
+<|visual token 055028|>
+<|visual token 055029|>
+<|visual token 055030|>
+<|visual token 055031|>
+<|visual token 055032|>
+<|visual token 055033|>
+<|visual token 055034|>
+<|visual token 055035|>
+<|visual token 055036|>
+<|visual token 055037|>
+<|visual token 055038|>
+<|visual token 055039|>
+<|visual token 055040|>
+<|visual token 055041|>
+<|visual token 055042|>
+<|visual token 055043|>
+<|visual token 055044|>
+<|visual token 055045|>
+<|visual token 055046|>
+<|visual token 055047|>
+<|visual token 055048|>
+<|visual token 055049|>
+<|visual token 055050|>
+<|visual token 055051|>
+<|visual token 055052|>
+<|visual token 055053|>
+<|visual token 055054|>
+<|visual token 055055|>
+<|visual token 055056|>
+<|visual token 055057|>
+<|visual token 055058|>
+<|visual token 055059|>
+<|visual token 055060|>
+<|visual token 055061|>
+<|visual token 055062|>
+<|visual token 055063|>
+<|visual token 055064|>
+<|visual token 055065|>
+<|visual token 055066|>
+<|visual token 055067|>
+<|visual token 055068|>
+<|visual token 055069|>
+<|visual token 055070|>
+<|visual token 055071|>
+<|visual token 055072|>
+<|visual token 055073|>
+<|visual token 055074|>
+<|visual token 055075|>
+<|visual token 055076|>
+<|visual token 055077|>
+<|visual token 055078|>
+<|visual token 055079|>
+<|visual token 055080|>
+<|visual token 055081|>
+<|visual token 055082|>
+<|visual token 055083|>
+<|visual token 055084|>
+<|visual token 055085|>
+<|visual token 055086|>
+<|visual token 055087|>
+<|visual token 055088|>
+<|visual token 055089|>
+<|visual token 055090|>
+<|visual token 055091|>
+<|visual token 055092|>
+<|visual token 055093|>
+<|visual token 055094|>
+<|visual token 055095|>
+<|visual token 055096|>
+<|visual token 055097|>
+<|visual token 055098|>
+<|visual token 055099|>
+<|visual token 055100|>
+<|visual token 055101|>
+<|visual token 055102|>
+<|visual token 055103|>
+<|visual token 055104|>
+<|visual token 055105|>
+<|visual token 055106|>
+<|visual token 055107|>
+<|visual token 055108|>
+<|visual token 055109|>
+<|visual token 055110|>
+<|visual token 055111|>
+<|visual token 055112|>
+<|visual token 055113|>
+<|visual token 055114|>
+<|visual token 055115|>
+<|visual token 055116|>
+<|visual token 055117|>
+<|visual token 055118|>
+<|visual token 055119|>
+<|visual token 055120|>
+<|visual token 055121|>
+<|visual token 055122|>
+<|visual token 055123|>
+<|visual token 055124|>
+<|visual token 055125|>
+<|visual token 055126|>
+<|visual token 055127|>
+<|visual token 055128|>
+<|visual token 055129|>
+<|visual token 055130|>
+<|visual token 055131|>
+<|visual token 055132|>
+<|visual token 055133|>
+<|visual token 055134|>
+<|visual token 055135|>
+<|visual token 055136|>
+<|visual token 055137|>
+<|visual token 055138|>
+<|visual token 055139|>
+<|visual token 055140|>
+<|visual token 055141|>
+<|visual token 055142|>
+<|visual token 055143|>
+<|visual token 055144|>
+<|visual token 055145|>
+<|visual token 055146|>
+<|visual token 055147|>
+<|visual token 055148|>
+<|visual token 055149|>
+<|visual token 055150|>
+<|visual token 055151|>
+<|visual token 055152|>
+<|visual token 055153|>
+<|visual token 055154|>
+<|visual token 055155|>
+<|visual token 055156|>
+<|visual token 055157|>
+<|visual token 055158|>
+<|visual token 055159|>
+<|visual token 055160|>
+<|visual token 055161|>
+<|visual token 055162|>
+<|visual token 055163|>
+<|visual token 055164|>
+<|visual token 055165|>
+<|visual token 055166|>
+<|visual token 055167|>
+<|visual token 055168|>
+<|visual token 055169|>
+<|visual token 055170|>
+<|visual token 055171|>
+<|visual token 055172|>
+<|visual token 055173|>
+<|visual token 055174|>
+<|visual token 055175|>
+<|visual token 055176|>
+<|visual token 055177|>
+<|visual token 055178|>
+<|visual token 055179|>
+<|visual token 055180|>
+<|visual token 055181|>
+<|visual token 055182|>
+<|visual token 055183|>
+<|visual token 055184|>
+<|visual token 055185|>
+<|visual token 055186|>
+<|visual token 055187|>
+<|visual token 055188|>
+<|visual token 055189|>
+<|visual token 055190|>
+<|visual token 055191|>
+<|visual token 055192|>
+<|visual token 055193|>
+<|visual token 055194|>
+<|visual token 055195|>
+<|visual token 055196|>
+<|visual token 055197|>
+<|visual token 055198|>
+<|visual token 055199|>
+<|visual token 055200|>
+<|visual token 055201|>
+<|visual token 055202|>
+<|visual token 055203|>
+<|visual token 055204|>
+<|visual token 055205|>
+<|visual token 055206|>
+<|visual token 055207|>
+<|visual token 055208|>
+<|visual token 055209|>
+<|visual token 055210|>
+<|visual token 055211|>
+<|visual token 055212|>
+<|visual token 055213|>
+<|visual token 055214|>
+<|visual token 055215|>
+<|visual token 055216|>
+<|visual token 055217|>
+<|visual token 055218|>
+<|visual token 055219|>
+<|visual token 055220|>
+<|visual token 055221|>
+<|visual token 055222|>
+<|visual token 055223|>
+<|visual token 055224|>
+<|visual token 055225|>
+<|visual token 055226|>
+<|visual token 055227|>
+<|visual token 055228|>
+<|visual token 055229|>
+<|visual token 055230|>
+<|visual token 055231|>
+<|visual token 055232|>
+<|visual token 055233|>
+<|visual token 055234|>
+<|visual token 055235|>
+<|visual token 055236|>
+<|visual token 055237|>
+<|visual token 055238|>
+<|visual token 055239|>
+<|visual token 055240|>
+<|visual token 055241|>
+<|visual token 055242|>
+<|visual token 055243|>
+<|visual token 055244|>
+<|visual token 055245|>
+<|visual token 055246|>
+<|visual token 055247|>
+<|visual token 055248|>
+<|visual token 055249|>
+<|visual token 055250|>
+<|visual token 055251|>
+<|visual token 055252|>
+<|visual token 055253|>
+<|visual token 055254|>
+<|visual token 055255|>
+<|visual token 055256|>
+<|visual token 055257|>
+<|visual token 055258|>
+<|visual token 055259|>
+<|visual token 055260|>
+<|visual token 055261|>
+<|visual token 055262|>
+<|visual token 055263|>
+<|visual token 055264|>
+<|visual token 055265|>
+<|visual token 055266|>
+<|visual token 055267|>
+<|visual token 055268|>
+<|visual token 055269|>
+<|visual token 055270|>
+<|visual token 055271|>
+<|visual token 055272|>
+<|visual token 055273|>
+<|visual token 055274|>
+<|visual token 055275|>
+<|visual token 055276|>
+<|visual token 055277|>
+<|visual token 055278|>
+<|visual token 055279|>
+<|visual token 055280|>
+<|visual token 055281|>
+<|visual token 055282|>
+<|visual token 055283|>
+<|visual token 055284|>
+<|visual token 055285|>
+<|visual token 055286|>
+<|visual token 055287|>
+<|visual token 055288|>
+<|visual token 055289|>
+<|visual token 055290|>
+<|visual token 055291|>
+<|visual token 055292|>
+<|visual token 055293|>
+<|visual token 055294|>
+<|visual token 055295|>
+<|visual token 055296|>
+<|visual token 055297|>
+<|visual token 055298|>
+<|visual token 055299|>
+<|visual token 055300|>
+<|visual token 055301|>
+<|visual token 055302|>
+<|visual token 055303|>
+<|visual token 055304|>
+<|visual token 055305|>
+<|visual token 055306|>
+<|visual token 055307|>
+<|visual token 055308|>
+<|visual token 055309|>
+<|visual token 055310|>
+<|visual token 055311|>
+<|visual token 055312|>
+<|visual token 055313|>
+<|visual token 055314|>
+<|visual token 055315|>
+<|visual token 055316|>
+<|visual token 055317|>
+<|visual token 055318|>
+<|visual token 055319|>
+<|visual token 055320|>
+<|visual token 055321|>
+<|visual token 055322|>
+<|visual token 055323|>
+<|visual token 055324|>
+<|visual token 055325|>
+<|visual token 055326|>
+<|visual token 055327|>
+<|visual token 055328|>
+<|visual token 055329|>
+<|visual token 055330|>
+<|visual token 055331|>
+<|visual token 055332|>
+<|visual token 055333|>
+<|visual token 055334|>
+<|visual token 055335|>
+<|visual token 055336|>
+<|visual token 055337|>
+<|visual token 055338|>
+<|visual token 055339|>
+<|visual token 055340|>
+<|visual token 055341|>
+<|visual token 055342|>
+<|visual token 055343|>
+<|visual token 055344|>
+<|visual token 055345|>
+<|visual token 055346|>
+<|visual token 055347|>
+<|visual token 055348|>
+<|visual token 055349|>
+<|visual token 055350|>
+<|visual token 055351|>
+<|visual token 055352|>
+<|visual token 055353|>
+<|visual token 055354|>
+<|visual token 055355|>
+<|visual token 055356|>
+<|visual token 055357|>
+<|visual token 055358|>
+<|visual token 055359|>
+<|visual token 055360|>
+<|visual token 055361|>
+<|visual token 055362|>
+<|visual token 055363|>
+<|visual token 055364|>
+<|visual token 055365|>
+<|visual token 055366|>
+<|visual token 055367|>
+<|visual token 055368|>
+<|visual token 055369|>
+<|visual token 055370|>
+<|visual token 055371|>
+<|visual token 055372|>
+<|visual token 055373|>
+<|visual token 055374|>
+<|visual token 055375|>
+<|visual token 055376|>
+<|visual token 055377|>
+<|visual token 055378|>
+<|visual token 055379|>
+<|visual token 055380|>
+<|visual token 055381|>
+<|visual token 055382|>
+<|visual token 055383|>
+<|visual token 055384|>
+<|visual token 055385|>
+<|visual token 055386|>
+<|visual token 055387|>
+<|visual token 055388|>
+<|visual token 055389|>
+<|visual token 055390|>
+<|visual token 055391|>
+<|visual token 055392|>
+<|visual token 055393|>
+<|visual token 055394|>
+<|visual token 055395|>
+<|visual token 055396|>
+<|visual token 055397|>
+<|visual token 055398|>
+<|visual token 055399|>
+<|visual token 055400|>
+<|visual token 055401|>
+<|visual token 055402|>
+<|visual token 055403|>
+<|visual token 055404|>
+<|visual token 055405|>
+<|visual token 055406|>
+<|visual token 055407|>
+<|visual token 055408|>
+<|visual token 055409|>
+<|visual token 055410|>
+<|visual token 055411|>
+<|visual token 055412|>
+<|visual token 055413|>
+<|visual token 055414|>
+<|visual token 055415|>
+<|visual token 055416|>
+<|visual token 055417|>
+<|visual token 055418|>
+<|visual token 055419|>
+<|visual token 055420|>
+<|visual token 055421|>
+<|visual token 055422|>
+<|visual token 055423|>
+<|visual token 055424|>
+<|visual token 055425|>
+<|visual token 055426|>
+<|visual token 055427|>
+<|visual token 055428|>
+<|visual token 055429|>
+<|visual token 055430|>
+<|visual token 055431|>
+<|visual token 055432|>
+<|visual token 055433|>
+<|visual token 055434|>
+<|visual token 055435|>
+<|visual token 055436|>
+<|visual token 055437|>
+<|visual token 055438|>
+<|visual token 055439|>
+<|visual token 055440|>
+<|visual token 055441|>
+<|visual token 055442|>
+<|visual token 055443|>
+<|visual token 055444|>
+<|visual token 055445|>
+<|visual token 055446|>
+<|visual token 055447|>
+<|visual token 055448|>
+<|visual token 055449|>
+<|visual token 055450|>
+<|visual token 055451|>
+<|visual token 055452|>
+<|visual token 055453|>
+<|visual token 055454|>
+<|visual token 055455|>
+<|visual token 055456|>
+<|visual token 055457|>
+<|visual token 055458|>
+<|visual token 055459|>
+<|visual token 055460|>
+<|visual token 055461|>
+<|visual token 055462|>
+<|visual token 055463|>
+<|visual token 055464|>
+<|visual token 055465|>
+<|visual token 055466|>
+<|visual token 055467|>
+<|visual token 055468|>
+<|visual token 055469|>
+<|visual token 055470|>
+<|visual token 055471|>
+<|visual token 055472|>
+<|visual token 055473|>
+<|visual token 055474|>
+<|visual token 055475|>
+<|visual token 055476|>
+<|visual token 055477|>
+<|visual token 055478|>
+<|visual token 055479|>
+<|visual token 055480|>
+<|visual token 055481|>
+<|visual token 055482|>
+<|visual token 055483|>
+<|visual token 055484|>
+<|visual token 055485|>
+<|visual token 055486|>
+<|visual token 055487|>
+<|visual token 055488|>
+<|visual token 055489|>
+<|visual token 055490|>
+<|visual token 055491|>
+<|visual token 055492|>
+<|visual token 055493|>
+<|visual token 055494|>
+<|visual token 055495|>
+<|visual token 055496|>
+<|visual token 055497|>
+<|visual token 055498|>
+<|visual token 055499|>
+<|visual token 055500|>
+<|visual token 055501|>
+<|visual token 055502|>
+<|visual token 055503|>
+<|visual token 055504|>
+<|visual token 055505|>
+<|visual token 055506|>
+<|visual token 055507|>
+<|visual token 055508|>
+<|visual token 055509|>
+<|visual token 055510|>
+<|visual token 055511|>
+<|visual token 055512|>
+<|visual token 055513|>
+<|visual token 055514|>
+<|visual token 055515|>
+<|visual token 055516|>
+<|visual token 055517|>
+<|visual token 055518|>
+<|visual token 055519|>
+<|visual token 055520|>
+<|visual token 055521|>
+<|visual token 055522|>
+<|visual token 055523|>
+<|visual token 055524|>
+<|visual token 055525|>
+<|visual token 055526|>
+<|visual token 055527|>
+<|visual token 055528|>
+<|visual token 055529|>
+<|visual token 055530|>
+<|visual token 055531|>
+<|visual token 055532|>
+<|visual token 055533|>
+<|visual token 055534|>
+<|visual token 055535|>
+<|visual token 055536|>
+<|visual token 055537|>
+<|visual token 055538|>
+<|visual token 055539|>
+<|visual token 055540|>
+<|visual token 055541|>
+<|visual token 055542|>
+<|visual token 055543|>
+<|visual token 055544|>
+<|visual token 055545|>
+<|visual token 055546|>
+<|visual token 055547|>
+<|visual token 055548|>
+<|visual token 055549|>
+<|visual token 055550|>
+<|visual token 055551|>
+<|visual token 055552|>
+<|visual token 055553|>
+<|visual token 055554|>
+<|visual token 055555|>
+<|visual token 055556|>
+<|visual token 055557|>
+<|visual token 055558|>
+<|visual token 055559|>
+<|visual token 055560|>
+<|visual token 055561|>
+<|visual token 055562|>
+<|visual token 055563|>
+<|visual token 055564|>
+<|visual token 055565|>
+<|visual token 055566|>
+<|visual token 055567|>
+<|visual token 055568|>
+<|visual token 055569|>
+<|visual token 055570|>
+<|visual token 055571|>
+<|visual token 055572|>
+<|visual token 055573|>
+<|visual token 055574|>
+<|visual token 055575|>
+<|visual token 055576|>
+<|visual token 055577|>
+<|visual token 055578|>
+<|visual token 055579|>
+<|visual token 055580|>
+<|visual token 055581|>
+<|visual token 055582|>
+<|visual token 055583|>
+<|visual token 055584|>
+<|visual token 055585|>
+<|visual token 055586|>
+<|visual token 055587|>
+<|visual token 055588|>
+<|visual token 055589|>
+<|visual token 055590|>
+<|visual token 055591|>
+<|visual token 055592|>
+<|visual token 055593|>
+<|visual token 055594|>
+<|visual token 055595|>
+<|visual token 055596|>
+<|visual token 055597|>
+<|visual token 055598|>
+<|visual token 055599|>
+<|visual token 055600|>
+<|visual token 055601|>
+<|visual token 055602|>
+<|visual token 055603|>
+<|visual token 055604|>
+<|visual token 055605|>
+<|visual token 055606|>
+<|visual token 055607|>
+<|visual token 055608|>
+<|visual token 055609|>
+<|visual token 055610|>
+<|visual token 055611|>
+<|visual token 055612|>
+<|visual token 055613|>
+<|visual token 055614|>
+<|visual token 055615|>
+<|visual token 055616|>
+<|visual token 055617|>
+<|visual token 055618|>
+<|visual token 055619|>
+<|visual token 055620|>
+<|visual token 055621|>
+<|visual token 055622|>
+<|visual token 055623|>
+<|visual token 055624|>
+<|visual token 055625|>
+<|visual token 055626|>
+<|visual token 055627|>
+<|visual token 055628|>
+<|visual token 055629|>
+<|visual token 055630|>
+<|visual token 055631|>
+<|visual token 055632|>
+<|visual token 055633|>
+<|visual token 055634|>
+<|visual token 055635|>
+<|visual token 055636|>
+<|visual token 055637|>
+<|visual token 055638|>
+<|visual token 055639|>
+<|visual token 055640|>
+<|visual token 055641|>
+<|visual token 055642|>
+<|visual token 055643|>
+<|visual token 055644|>
+<|visual token 055645|>
+<|visual token 055646|>
+<|visual token 055647|>
+<|visual token 055648|>
+<|visual token 055649|>
+<|visual token 055650|>
+<|visual token 055651|>
+<|visual token 055652|>
+<|visual token 055653|>
+<|visual token 055654|>
+<|visual token 055655|>
+<|visual token 055656|>
+<|visual token 055657|>
+<|visual token 055658|>
+<|visual token 055659|>
+<|visual token 055660|>
+<|visual token 055661|>
+<|visual token 055662|>
+<|visual token 055663|>
+<|visual token 055664|>
+<|visual token 055665|>
+<|visual token 055666|>
+<|visual token 055667|>
+<|visual token 055668|>
+<|visual token 055669|>
+<|visual token 055670|>
+<|visual token 055671|>
+<|visual token 055672|>
+<|visual token 055673|>
+<|visual token 055674|>
+<|visual token 055675|>
+<|visual token 055676|>
+<|visual token 055677|>
+<|visual token 055678|>
+<|visual token 055679|>
+<|visual token 055680|>
+<|visual token 055681|>
+<|visual token 055682|>
+<|visual token 055683|>
+<|visual token 055684|>
+<|visual token 055685|>
+<|visual token 055686|>
+<|visual token 055687|>
+<|visual token 055688|>
+<|visual token 055689|>
+<|visual token 055690|>
+<|visual token 055691|>
+<|visual token 055692|>
+<|visual token 055693|>
+<|visual token 055694|>
+<|visual token 055695|>
+<|visual token 055696|>
+<|visual token 055697|>
+<|visual token 055698|>
+<|visual token 055699|>
+<|visual token 055700|>
+<|visual token 055701|>
+<|visual token 055702|>
+<|visual token 055703|>
+<|visual token 055704|>
+<|visual token 055705|>
+<|visual token 055706|>
+<|visual token 055707|>
+<|visual token 055708|>
+<|visual token 055709|>
+<|visual token 055710|>
+<|visual token 055711|>
+<|visual token 055712|>
+<|visual token 055713|>
+<|visual token 055714|>
+<|visual token 055715|>
+<|visual token 055716|>
+<|visual token 055717|>
+<|visual token 055718|>
+<|visual token 055719|>
+<|visual token 055720|>
+<|visual token 055721|>
+<|visual token 055722|>
+<|visual token 055723|>
+<|visual token 055724|>
+<|visual token 055725|>
+<|visual token 055726|>
+<|visual token 055727|>
+<|visual token 055728|>
+<|visual token 055729|>
+<|visual token 055730|>
+<|visual token 055731|>
+<|visual token 055732|>
+<|visual token 055733|>
+<|visual token 055734|>
+<|visual token 055735|>
+<|visual token 055736|>
+<|visual token 055737|>
+<|visual token 055738|>
+<|visual token 055739|>
+<|visual token 055740|>
+<|visual token 055741|>
+<|visual token 055742|>
+<|visual token 055743|>
+<|visual token 055744|>
+<|visual token 055745|>
+<|visual token 055746|>
+<|visual token 055747|>
+<|visual token 055748|>
+<|visual token 055749|>
+<|visual token 055750|>
+<|visual token 055751|>
+<|visual token 055752|>
+<|visual token 055753|>
+<|visual token 055754|>
+<|visual token 055755|>
+<|visual token 055756|>
+<|visual token 055757|>
+<|visual token 055758|>
+<|visual token 055759|>
+<|visual token 055760|>
+<|visual token 055761|>
+<|visual token 055762|>
+<|visual token 055763|>
+<|visual token 055764|>
+<|visual token 055765|>
+<|visual token 055766|>
+<|visual token 055767|>
+<|visual token 055768|>
+<|visual token 055769|>
+<|visual token 055770|>
+<|visual token 055771|>
+<|visual token 055772|>
+<|visual token 055773|>
+<|visual token 055774|>
+<|visual token 055775|>
+<|visual token 055776|>
+<|visual token 055777|>
+<|visual token 055778|>
+<|visual token 055779|>
+<|visual token 055780|>
+<|visual token 055781|>
+<|visual token 055782|>
+<|visual token 055783|>
+<|visual token 055784|>
+<|visual token 055785|>
+<|visual token 055786|>
+<|visual token 055787|>
+<|visual token 055788|>
+<|visual token 055789|>
+<|visual token 055790|>
+<|visual token 055791|>
+<|visual token 055792|>
+<|visual token 055793|>
+<|visual token 055794|>
+<|visual token 055795|>
+<|visual token 055796|>
+<|visual token 055797|>
+<|visual token 055798|>
+<|visual token 055799|>
+<|visual token 055800|>
+<|visual token 055801|>
+<|visual token 055802|>
+<|visual token 055803|>
+<|visual token 055804|>
+<|visual token 055805|>
+<|visual token 055806|>
+<|visual token 055807|>
+<|visual token 055808|>
+<|visual token 055809|>
+<|visual token 055810|>
+<|visual token 055811|>
+<|visual token 055812|>
+<|visual token 055813|>
+<|visual token 055814|>
+<|visual token 055815|>
+<|visual token 055816|>
+<|visual token 055817|>
+<|visual token 055818|>
+<|visual token 055819|>
+<|visual token 055820|>
+<|visual token 055821|>
+<|visual token 055822|>
+<|visual token 055823|>
+<|visual token 055824|>
+<|visual token 055825|>
+<|visual token 055826|>
+<|visual token 055827|>
+<|visual token 055828|>
+<|visual token 055829|>
+<|visual token 055830|>
+<|visual token 055831|>
+<|visual token 055832|>
+<|visual token 055833|>
+<|visual token 055834|>
+<|visual token 055835|>
+<|visual token 055836|>
+<|visual token 055837|>
+<|visual token 055838|>
+<|visual token 055839|>
+<|visual token 055840|>
+<|visual token 055841|>
+<|visual token 055842|>
+<|visual token 055843|>
+<|visual token 055844|>
+<|visual token 055845|>
+<|visual token 055846|>
+<|visual token 055847|>
+<|visual token 055848|>
+<|visual token 055849|>
+<|visual token 055850|>
+<|visual token 055851|>
+<|visual token 055852|>
+<|visual token 055853|>
+<|visual token 055854|>
+<|visual token 055855|>
+<|visual token 055856|>
+<|visual token 055857|>
+<|visual token 055858|>
+<|visual token 055859|>
+<|visual token 055860|>
+<|visual token 055861|>
+<|visual token 055862|>
+<|visual token 055863|>
+<|visual token 055864|>
+<|visual token 055865|>
+<|visual token 055866|>
+<|visual token 055867|>
+<|visual token 055868|>
+<|visual token 055869|>
+<|visual token 055870|>
+<|visual token 055871|>
+<|visual token 055872|>
+<|visual token 055873|>
+<|visual token 055874|>
+<|visual token 055875|>
+<|visual token 055876|>
+<|visual token 055877|>
+<|visual token 055878|>
+<|visual token 055879|>
+<|visual token 055880|>
+<|visual token 055881|>
+<|visual token 055882|>
+<|visual token 055883|>
+<|visual token 055884|>
+<|visual token 055885|>
+<|visual token 055886|>
+<|visual token 055887|>
+<|visual token 055888|>
+<|visual token 055889|>
+<|visual token 055890|>
+<|visual token 055891|>
+<|visual token 055892|>
+<|visual token 055893|>
+<|visual token 055894|>
+<|visual token 055895|>
+<|visual token 055896|>
+<|visual token 055897|>
+<|visual token 055898|>
+<|visual token 055899|>
+<|visual token 055900|>
+<|visual token 055901|>
+<|visual token 055902|>
+<|visual token 055903|>
+<|visual token 055904|>
+<|visual token 055905|>
+<|visual token 055906|>
+<|visual token 055907|>
+<|visual token 055908|>
+<|visual token 055909|>
+<|visual token 055910|>
+<|visual token 055911|>
+<|visual token 055912|>
+<|visual token 055913|>
+<|visual token 055914|>
+<|visual token 055915|>
+<|visual token 055916|>
+<|visual token 055917|>
+<|visual token 055918|>
+<|visual token 055919|>
+<|visual token 055920|>
+<|visual token 055921|>
+<|visual token 055922|>
+<|visual token 055923|>
+<|visual token 055924|>
+<|visual token 055925|>
+<|visual token 055926|>
+<|visual token 055927|>
+<|visual token 055928|>
+<|visual token 055929|>
+<|visual token 055930|>
+<|visual token 055931|>
+<|visual token 055932|>
+<|visual token 055933|>
+<|visual token 055934|>
+<|visual token 055935|>
+<|visual token 055936|>
+<|visual token 055937|>
+<|visual token 055938|>
+<|visual token 055939|>
+<|visual token 055940|>
+<|visual token 055941|>
+<|visual token 055942|>
+<|visual token 055943|>
+<|visual token 055944|>
+<|visual token 055945|>
+<|visual token 055946|>
+<|visual token 055947|>
+<|visual token 055948|>
+<|visual token 055949|>
+<|visual token 055950|>
+<|visual token 055951|>
+<|visual token 055952|>
+<|visual token 055953|>
+<|visual token 055954|>
+<|visual token 055955|>
+<|visual token 055956|>
+<|visual token 055957|>
+<|visual token 055958|>
+<|visual token 055959|>
+<|visual token 055960|>
+<|visual token 055961|>
+<|visual token 055962|>
+<|visual token 055963|>
+<|visual token 055964|>
+<|visual token 055965|>
+<|visual token 055966|>
+<|visual token 055967|>
+<|visual token 055968|>
+<|visual token 055969|>
+<|visual token 055970|>
+<|visual token 055971|>
+<|visual token 055972|>
+<|visual token 055973|>
+<|visual token 055974|>
+<|visual token 055975|>
+<|visual token 055976|>
+<|visual token 055977|>
+<|visual token 055978|>
+<|visual token 055979|>
+<|visual token 055980|>
+<|visual token 055981|>
+<|visual token 055982|>
+<|visual token 055983|>
+<|visual token 055984|>
+<|visual token 055985|>
+<|visual token 055986|>
+<|visual token 055987|>
+<|visual token 055988|>
+<|visual token 055989|>
+<|visual token 055990|>
+<|visual token 055991|>
+<|visual token 055992|>
+<|visual token 055993|>
+<|visual token 055994|>
+<|visual token 055995|>
+<|visual token 055996|>
+<|visual token 055997|>
+<|visual token 055998|>
+<|visual token 055999|>
+<|visual token 056000|>
+<|visual token 056001|>
+<|visual token 056002|>
+<|visual token 056003|>
+<|visual token 056004|>
+<|visual token 056005|>
+<|visual token 056006|>
+<|visual token 056007|>
+<|visual token 056008|>
+<|visual token 056009|>
+<|visual token 056010|>
+<|visual token 056011|>
+<|visual token 056012|>
+<|visual token 056013|>
+<|visual token 056014|>
+<|visual token 056015|>
+<|visual token 056016|>
+<|visual token 056017|>
+<|visual token 056018|>
+<|visual token 056019|>
+<|visual token 056020|>
+<|visual token 056021|>
+<|visual token 056022|>
+<|visual token 056023|>
+<|visual token 056024|>
+<|visual token 056025|>
+<|visual token 056026|>
+<|visual token 056027|>
+<|visual token 056028|>
+<|visual token 056029|>
+<|visual token 056030|>
+<|visual token 056031|>
+<|visual token 056032|>
+<|visual token 056033|>
+<|visual token 056034|>
+<|visual token 056035|>
+<|visual token 056036|>
+<|visual token 056037|>
+<|visual token 056038|>
+<|visual token 056039|>
+<|visual token 056040|>
+<|visual token 056041|>
+<|visual token 056042|>
+<|visual token 056043|>
+<|visual token 056044|>
+<|visual token 056045|>
+<|visual token 056046|>
+<|visual token 056047|>
+<|visual token 056048|>
+<|visual token 056049|>
+<|visual token 056050|>
+<|visual token 056051|>
+<|visual token 056052|>
+<|visual token 056053|>
+<|visual token 056054|>
+<|visual token 056055|>
+<|visual token 056056|>
+<|visual token 056057|>
+<|visual token 056058|>
+<|visual token 056059|>
+<|visual token 056060|>
+<|visual token 056061|>
+<|visual token 056062|>
+<|visual token 056063|>
+<|visual token 056064|>
+<|visual token 056065|>
+<|visual token 056066|>
+<|visual token 056067|>
+<|visual token 056068|>
+<|visual token 056069|>
+<|visual token 056070|>
+<|visual token 056071|>
+<|visual token 056072|>
+<|visual token 056073|>
+<|visual token 056074|>
+<|visual token 056075|>
+<|visual token 056076|>
+<|visual token 056077|>
+<|visual token 056078|>
+<|visual token 056079|>
+<|visual token 056080|>
+<|visual token 056081|>
+<|visual token 056082|>
+<|visual token 056083|>
+<|visual token 056084|>
+<|visual token 056085|>
+<|visual token 056086|>
+<|visual token 056087|>
+<|visual token 056088|>
+<|visual token 056089|>
+<|visual token 056090|>
+<|visual token 056091|>
+<|visual token 056092|>
+<|visual token 056093|>
+<|visual token 056094|>
+<|visual token 056095|>
+<|visual token 056096|>
+<|visual token 056097|>
+<|visual token 056098|>
+<|visual token 056099|>
+<|visual token 056100|>
+<|visual token 056101|>
+<|visual token 056102|>
+<|visual token 056103|>
+<|visual token 056104|>
+<|visual token 056105|>
+<|visual token 056106|>
+<|visual token 056107|>
+<|visual token 056108|>
+<|visual token 056109|>
+<|visual token 056110|>
+<|visual token 056111|>
+<|visual token 056112|>
+<|visual token 056113|>
+<|visual token 056114|>
+<|visual token 056115|>
+<|visual token 056116|>
+<|visual token 056117|>
+<|visual token 056118|>
+<|visual token 056119|>
+<|visual token 056120|>
+<|visual token 056121|>
+<|visual token 056122|>
+<|visual token 056123|>
+<|visual token 056124|>
+<|visual token 056125|>
+<|visual token 056126|>
+<|visual token 056127|>
+<|visual token 056128|>
+<|visual token 056129|>
+<|visual token 056130|>
+<|visual token 056131|>
+<|visual token 056132|>
+<|visual token 056133|>
+<|visual token 056134|>
+<|visual token 056135|>
+<|visual token 056136|>
+<|visual token 056137|>
+<|visual token 056138|>
+<|visual token 056139|>
+<|visual token 056140|>
+<|visual token 056141|>
+<|visual token 056142|>
+<|visual token 056143|>
+<|visual token 056144|>
+<|visual token 056145|>
+<|visual token 056146|>
+<|visual token 056147|>
+<|visual token 056148|>
+<|visual token 056149|>
+<|visual token 056150|>
+<|visual token 056151|>
+<|visual token 056152|>
+<|visual token 056153|>
+<|visual token 056154|>
+<|visual token 056155|>
+<|visual token 056156|>
+<|visual token 056157|>
+<|visual token 056158|>
+<|visual token 056159|>
+<|visual token 056160|>
+<|visual token 056161|>
+<|visual token 056162|>
+<|visual token 056163|>
+<|visual token 056164|>
+<|visual token 056165|>
+<|visual token 056166|>
+<|visual token 056167|>
+<|visual token 056168|>
+<|visual token 056169|>
+<|visual token 056170|>
+<|visual token 056171|>
+<|visual token 056172|>
+<|visual token 056173|>
+<|visual token 056174|>
+<|visual token 056175|>
+<|visual token 056176|>
+<|visual token 056177|>
+<|visual token 056178|>
+<|visual token 056179|>
+<|visual token 056180|>
+<|visual token 056181|>
+<|visual token 056182|>
+<|visual token 056183|>
+<|visual token 056184|>
+<|visual token 056185|>
+<|visual token 056186|>
+<|visual token 056187|>
+<|visual token 056188|>
+<|visual token 056189|>
+<|visual token 056190|>
+<|visual token 056191|>
+<|visual token 056192|>
+<|visual token 056193|>
+<|visual token 056194|>
+<|visual token 056195|>
+<|visual token 056196|>
+<|visual token 056197|>
+<|visual token 056198|>
+<|visual token 056199|>
+<|visual token 056200|>
+<|visual token 056201|>
+<|visual token 056202|>
+<|visual token 056203|>
+<|visual token 056204|>
+<|visual token 056205|>
+<|visual token 056206|>
+<|visual token 056207|>
+<|visual token 056208|>
+<|visual token 056209|>
+<|visual token 056210|>
+<|visual token 056211|>
+<|visual token 056212|>
+<|visual token 056213|>
+<|visual token 056214|>
+<|visual token 056215|>
+<|visual token 056216|>
+<|visual token 056217|>
+<|visual token 056218|>
+<|visual token 056219|>
+<|visual token 056220|>
+<|visual token 056221|>
+<|visual token 056222|>
+<|visual token 056223|>
+<|visual token 056224|>
+<|visual token 056225|>
+<|visual token 056226|>
+<|visual token 056227|>
+<|visual token 056228|>
+<|visual token 056229|>
+<|visual token 056230|>
+<|visual token 056231|>
+<|visual token 056232|>
+<|visual token 056233|>
+<|visual token 056234|>
+<|visual token 056235|>
+<|visual token 056236|>
+<|visual token 056237|>
+<|visual token 056238|>
+<|visual token 056239|>
+<|visual token 056240|>
+<|visual token 056241|>
+<|visual token 056242|>
+<|visual token 056243|>
+<|visual token 056244|>
+<|visual token 056245|>
+<|visual token 056246|>
+<|visual token 056247|>
+<|visual token 056248|>
+<|visual token 056249|>
+<|visual token 056250|>
+<|visual token 056251|>
+<|visual token 056252|>
+<|visual token 056253|>
+<|visual token 056254|>
+<|visual token 056255|>
+<|visual token 056256|>
+<|visual token 056257|>
+<|visual token 056258|>
+<|visual token 056259|>
+<|visual token 056260|>
+<|visual token 056261|>
+<|visual token 056262|>
+<|visual token 056263|>
+<|visual token 056264|>
+<|visual token 056265|>
+<|visual token 056266|>
+<|visual token 056267|>
+<|visual token 056268|>
+<|visual token 056269|>
+<|visual token 056270|>
+<|visual token 056271|>
+<|visual token 056272|>
+<|visual token 056273|>
+<|visual token 056274|>
+<|visual token 056275|>
+<|visual token 056276|>
+<|visual token 056277|>
+<|visual token 056278|>
+<|visual token 056279|>
+<|visual token 056280|>
+<|visual token 056281|>
+<|visual token 056282|>
+<|visual token 056283|>
+<|visual token 056284|>
+<|visual token 056285|>
+<|visual token 056286|>
+<|visual token 056287|>
+<|visual token 056288|>
+<|visual token 056289|>
+<|visual token 056290|>
+<|visual token 056291|>
+<|visual token 056292|>
+<|visual token 056293|>
+<|visual token 056294|>
+<|visual token 056295|>
+<|visual token 056296|>
+<|visual token 056297|>
+<|visual token 056298|>
+<|visual token 056299|>
+<|visual token 056300|>
+<|visual token 056301|>
+<|visual token 056302|>
+<|visual token 056303|>
+<|visual token 056304|>
+<|visual token 056305|>
+<|visual token 056306|>
+<|visual token 056307|>
+<|visual token 056308|>
+<|visual token 056309|>
+<|visual token 056310|>
+<|visual token 056311|>
+<|visual token 056312|>
+<|visual token 056313|>
+<|visual token 056314|>
+<|visual token 056315|>
+<|visual token 056316|>
+<|visual token 056317|>
+<|visual token 056318|>
+<|visual token 056319|>
+<|visual token 056320|>
+<|visual token 056321|>
+<|visual token 056322|>
+<|visual token 056323|>
+<|visual token 056324|>
+<|visual token 056325|>
+<|visual token 056326|>
+<|visual token 056327|>
+<|visual token 056328|>
+<|visual token 056329|>
+<|visual token 056330|>
+<|visual token 056331|>
+<|visual token 056332|>
+<|visual token 056333|>
+<|visual token 056334|>
+<|visual token 056335|>
+<|visual token 056336|>
+<|visual token 056337|>
+<|visual token 056338|>
+<|visual token 056339|>
+<|visual token 056340|>
+<|visual token 056341|>
+<|visual token 056342|>
+<|visual token 056343|>
+<|visual token 056344|>
+<|visual token 056345|>
+<|visual token 056346|>
+<|visual token 056347|>
+<|visual token 056348|>
+<|visual token 056349|>
+<|visual token 056350|>
+<|visual token 056351|>
+<|visual token 056352|>
+<|visual token 056353|>
+<|visual token 056354|>
+<|visual token 056355|>
+<|visual token 056356|>
+<|visual token 056357|>
+<|visual token 056358|>
+<|visual token 056359|>
+<|visual token 056360|>
+<|visual token 056361|>
+<|visual token 056362|>
+<|visual token 056363|>
+<|visual token 056364|>
+<|visual token 056365|>
+<|visual token 056366|>
+<|visual token 056367|>
+<|visual token 056368|>
+<|visual token 056369|>
+<|visual token 056370|>
+<|visual token 056371|>
+<|visual token 056372|>
+<|visual token 056373|>
+<|visual token 056374|>
+<|visual token 056375|>
+<|visual token 056376|>
+<|visual token 056377|>
+<|visual token 056378|>
+<|visual token 056379|>
+<|visual token 056380|>
+<|visual token 056381|>
+<|visual token 056382|>
+<|visual token 056383|>
+<|visual token 056384|>
+<|visual token 056385|>
+<|visual token 056386|>
+<|visual token 056387|>
+<|visual token 056388|>
+<|visual token 056389|>
+<|visual token 056390|>
+<|visual token 056391|>
+<|visual token 056392|>
+<|visual token 056393|>
+<|visual token 056394|>
+<|visual token 056395|>
+<|visual token 056396|>
+<|visual token 056397|>
+<|visual token 056398|>
+<|visual token 056399|>
+<|visual token 056400|>
+<|visual token 056401|>
+<|visual token 056402|>
+<|visual token 056403|>
+<|visual token 056404|>
+<|visual token 056405|>
+<|visual token 056406|>
+<|visual token 056407|>
+<|visual token 056408|>
+<|visual token 056409|>
+<|visual token 056410|>
+<|visual token 056411|>
+<|visual token 056412|>
+<|visual token 056413|>
+<|visual token 056414|>
+<|visual token 056415|>
+<|visual token 056416|>
+<|visual token 056417|>
+<|visual token 056418|>
+<|visual token 056419|>
+<|visual token 056420|>
+<|visual token 056421|>
+<|visual token 056422|>
+<|visual token 056423|>
+<|visual token 056424|>
+<|visual token 056425|>
+<|visual token 056426|>
+<|visual token 056427|>
+<|visual token 056428|>
+<|visual token 056429|>
+<|visual token 056430|>
+<|visual token 056431|>
+<|visual token 056432|>
+<|visual token 056433|>
+<|visual token 056434|>
+<|visual token 056435|>
+<|visual token 056436|>
+<|visual token 056437|>
+<|visual token 056438|>
+<|visual token 056439|>
+<|visual token 056440|>
+<|visual token 056441|>
+<|visual token 056442|>
+<|visual token 056443|>
+<|visual token 056444|>
+<|visual token 056445|>
+<|visual token 056446|>
+<|visual token 056447|>
+<|visual token 056448|>
+<|visual token 056449|>
+<|visual token 056450|>
+<|visual token 056451|>
+<|visual token 056452|>
+<|visual token 056453|>
+<|visual token 056454|>
+<|visual token 056455|>
+<|visual token 056456|>
+<|visual token 056457|>
+<|visual token 056458|>
+<|visual token 056459|>
+<|visual token 056460|>
+<|visual token 056461|>
+<|visual token 056462|>
+<|visual token 056463|>
+<|visual token 056464|>
+<|visual token 056465|>
+<|visual token 056466|>
+<|visual token 056467|>
+<|visual token 056468|>
+<|visual token 056469|>
+<|visual token 056470|>
+<|visual token 056471|>
+<|visual token 056472|>
+<|visual token 056473|>
+<|visual token 056474|>
+<|visual token 056475|>
+<|visual token 056476|>
+<|visual token 056477|>
+<|visual token 056478|>
+<|visual token 056479|>
+<|visual token 056480|>
+<|visual token 056481|>
+<|visual token 056482|>
+<|visual token 056483|>
+<|visual token 056484|>
+<|visual token 056485|>
+<|visual token 056486|>
+<|visual token 056487|>
+<|visual token 056488|>
+<|visual token 056489|>
+<|visual token 056490|>
+<|visual token 056491|>
+<|visual token 056492|>
+<|visual token 056493|>
+<|visual token 056494|>
+<|visual token 056495|>
+<|visual token 056496|>
+<|visual token 056497|>
+<|visual token 056498|>
+<|visual token 056499|>
+<|visual token 056500|>
+<|visual token 056501|>
+<|visual token 056502|>
+<|visual token 056503|>
+<|visual token 056504|>
+<|visual token 056505|>
+<|visual token 056506|>
+<|visual token 056507|>
+<|visual token 056508|>
+<|visual token 056509|>
+<|visual token 056510|>
+<|visual token 056511|>
+<|visual token 056512|>
+<|visual token 056513|>
+<|visual token 056514|>
+<|visual token 056515|>
+<|visual token 056516|>
+<|visual token 056517|>
+<|visual token 056518|>
+<|visual token 056519|>
+<|visual token 056520|>
+<|visual token 056521|>
+<|visual token 056522|>
+<|visual token 056523|>
+<|visual token 056524|>
+<|visual token 056525|>
+<|visual token 056526|>
+<|visual token 056527|>
+<|visual token 056528|>
+<|visual token 056529|>
+<|visual token 056530|>
+<|visual token 056531|>
+<|visual token 056532|>
+<|visual token 056533|>
+<|visual token 056534|>
+<|visual token 056535|>
+<|visual token 056536|>
+<|visual token 056537|>
+<|visual token 056538|>
+<|visual token 056539|>
+<|visual token 056540|>
+<|visual token 056541|>
+<|visual token 056542|>
+<|visual token 056543|>
+<|visual token 056544|>
+<|visual token 056545|>
+<|visual token 056546|>
+<|visual token 056547|>
+<|visual token 056548|>
+<|visual token 056549|>
+<|visual token 056550|>
+<|visual token 056551|>
+<|visual token 056552|>
+<|visual token 056553|>
+<|visual token 056554|>
+<|visual token 056555|>
+<|visual token 056556|>
+<|visual token 056557|>
+<|visual token 056558|>
+<|visual token 056559|>
+<|visual token 056560|>
+<|visual token 056561|>
+<|visual token 056562|>
+<|visual token 056563|>
+<|visual token 056564|>
+<|visual token 056565|>
+<|visual token 056566|>
+<|visual token 056567|>
+<|visual token 056568|>
+<|visual token 056569|>
+<|visual token 056570|>
+<|visual token 056571|>
+<|visual token 056572|>
+<|visual token 056573|>
+<|visual token 056574|>
+<|visual token 056575|>
+<|visual token 056576|>
+<|visual token 056577|>
+<|visual token 056578|>
+<|visual token 056579|>
+<|visual token 056580|>
+<|visual token 056581|>
+<|visual token 056582|>
+<|visual token 056583|>
+<|visual token 056584|>
+<|visual token 056585|>
+<|visual token 056586|>
+<|visual token 056587|>
+<|visual token 056588|>
+<|visual token 056589|>
+<|visual token 056590|>
+<|visual token 056591|>
+<|visual token 056592|>
+<|visual token 056593|>
+<|visual token 056594|>
+<|visual token 056595|>
+<|visual token 056596|>
+<|visual token 056597|>
+<|visual token 056598|>
+<|visual token 056599|>
+<|visual token 056600|>
+<|visual token 056601|>
+<|visual token 056602|>
+<|visual token 056603|>
+<|visual token 056604|>
+<|visual token 056605|>
+<|visual token 056606|>
+<|visual token 056607|>
+<|visual token 056608|>
+<|visual token 056609|>
+<|visual token 056610|>
+<|visual token 056611|>
+<|visual token 056612|>
+<|visual token 056613|>
+<|visual token 056614|>
+<|visual token 056615|>
+<|visual token 056616|>
+<|visual token 056617|>
+<|visual token 056618|>
+<|visual token 056619|>
+<|visual token 056620|>
+<|visual token 056621|>
+<|visual token 056622|>
+<|visual token 056623|>
+<|visual token 056624|>
+<|visual token 056625|>
+<|visual token 056626|>
+<|visual token 056627|>
+<|visual token 056628|>
+<|visual token 056629|>
+<|visual token 056630|>
+<|visual token 056631|>
+<|visual token 056632|>
+<|visual token 056633|>
+<|visual token 056634|>
+<|visual token 056635|>
+<|visual token 056636|>
+<|visual token 056637|>
+<|visual token 056638|>
+<|visual token 056639|>
+<|visual token 056640|>
+<|visual token 056641|>
+<|visual token 056642|>
+<|visual token 056643|>
+<|visual token 056644|>
+<|visual token 056645|>
+<|visual token 056646|>
+<|visual token 056647|>
+<|visual token 056648|>
+<|visual token 056649|>
+<|visual token 056650|>
+<|visual token 056651|>
+<|visual token 056652|>
+<|visual token 056653|>
+<|visual token 056654|>
+<|visual token 056655|>
+<|visual token 056656|>
+<|visual token 056657|>
+<|visual token 056658|>
+<|visual token 056659|>
+<|visual token 056660|>
+<|visual token 056661|>
+<|visual token 056662|>
+<|visual token 056663|>
+<|visual token 056664|>
+<|visual token 056665|>
+<|visual token 056666|>
+<|visual token 056667|>
+<|visual token 056668|>
+<|visual token 056669|>
+<|visual token 056670|>
+<|visual token 056671|>
+<|visual token 056672|>
+<|visual token 056673|>
+<|visual token 056674|>
+<|visual token 056675|>
+<|visual token 056676|>
+<|visual token 056677|>
+<|visual token 056678|>
+<|visual token 056679|>
+<|visual token 056680|>
+<|visual token 056681|>
+<|visual token 056682|>
+<|visual token 056683|>
+<|visual token 056684|>
+<|visual token 056685|>
+<|visual token 056686|>
+<|visual token 056687|>
+<|visual token 056688|>
+<|visual token 056689|>
+<|visual token 056690|>
+<|visual token 056691|>
+<|visual token 056692|>
+<|visual token 056693|>
+<|visual token 056694|>
+<|visual token 056695|>
+<|visual token 056696|>
+<|visual token 056697|>
+<|visual token 056698|>
+<|visual token 056699|>
+<|visual token 056700|>
+<|visual token 056701|>
+<|visual token 056702|>
+<|visual token 056703|>
+<|visual token 056704|>
+<|visual token 056705|>
+<|visual token 056706|>
+<|visual token 056707|>
+<|visual token 056708|>
+<|visual token 056709|>
+<|visual token 056710|>
+<|visual token 056711|>
+<|visual token 056712|>
+<|visual token 056713|>
+<|visual token 056714|>
+<|visual token 056715|>
+<|visual token 056716|>
+<|visual token 056717|>
+<|visual token 056718|>
+<|visual token 056719|>
+<|visual token 056720|>
+<|visual token 056721|>
+<|visual token 056722|>
+<|visual token 056723|>
+<|visual token 056724|>
+<|visual token 056725|>
+<|visual token 056726|>
+<|visual token 056727|>
+<|visual token 056728|>
+<|visual token 056729|>
+<|visual token 056730|>
+<|visual token 056731|>
+<|visual token 056732|>
+<|visual token 056733|>
+<|visual token 056734|>
+<|visual token 056735|>
+<|visual token 056736|>
+<|visual token 056737|>
+<|visual token 056738|>
+<|visual token 056739|>
+<|visual token 056740|>
+<|visual token 056741|>
+<|visual token 056742|>
+<|visual token 056743|>
+<|visual token 056744|>
+<|visual token 056745|>
+<|visual token 056746|>
+<|visual token 056747|>
+<|visual token 056748|>
+<|visual token 056749|>
+<|visual token 056750|>
+<|visual token 056751|>
+<|visual token 056752|>
+<|visual token 056753|>
+<|visual token 056754|>
+<|visual token 056755|>
+<|visual token 056756|>
+<|visual token 056757|>
+<|visual token 056758|>
+<|visual token 056759|>
+<|visual token 056760|>
+<|visual token 056761|>
+<|visual token 056762|>
+<|visual token 056763|>
+<|visual token 056764|>
+<|visual token 056765|>
+<|visual token 056766|>
+<|visual token 056767|>
+<|visual token 056768|>
+<|visual token 056769|>
+<|visual token 056770|>
+<|visual token 056771|>
+<|visual token 056772|>
+<|visual token 056773|>
+<|visual token 056774|>
+<|visual token 056775|>
+<|visual token 056776|>
+<|visual token 056777|>
+<|visual token 056778|>
+<|visual token 056779|>
+<|visual token 056780|>
+<|visual token 056781|>
+<|visual token 056782|>
+<|visual token 056783|>
+<|visual token 056784|>
+<|visual token 056785|>
+<|visual token 056786|>
+<|visual token 056787|>
+<|visual token 056788|>
+<|visual token 056789|>
+<|visual token 056790|>
+<|visual token 056791|>
+<|visual token 056792|>
+<|visual token 056793|>
+<|visual token 056794|>
+<|visual token 056795|>
+<|visual token 056796|>
+<|visual token 056797|>
+<|visual token 056798|>
+<|visual token 056799|>
+<|visual token 056800|>
+<|visual token 056801|>
+<|visual token 056802|>
+<|visual token 056803|>
+<|visual token 056804|>
+<|visual token 056805|>
+<|visual token 056806|>
+<|visual token 056807|>
+<|visual token 056808|>
+<|visual token 056809|>
+<|visual token 056810|>
+<|visual token 056811|>
+<|visual token 056812|>
+<|visual token 056813|>
+<|visual token 056814|>
+<|visual token 056815|>
+<|visual token 056816|>
+<|visual token 056817|>
+<|visual token 056818|>
+<|visual token 056819|>
+<|visual token 056820|>
+<|visual token 056821|>
+<|visual token 056822|>
+<|visual token 056823|>
+<|visual token 056824|>
+<|visual token 056825|>
+<|visual token 056826|>
+<|visual token 056827|>
+<|visual token 056828|>
+<|visual token 056829|>
+<|visual token 056830|>
+<|visual token 056831|>
+<|visual token 056832|>
+<|visual token 056833|>
+<|visual token 056834|>
+<|visual token 056835|>
+<|visual token 056836|>
+<|visual token 056837|>
+<|visual token 056838|>
+<|visual token 056839|>
+<|visual token 056840|>
+<|visual token 056841|>
+<|visual token 056842|>
+<|visual token 056843|>
+<|visual token 056844|>
+<|visual token 056845|>
+<|visual token 056846|>
+<|visual token 056847|>
+<|visual token 056848|>
+<|visual token 056849|>
+<|visual token 056850|>
+<|visual token 056851|>
+<|visual token 056852|>
+<|visual token 056853|>
+<|visual token 056854|>
+<|visual token 056855|>
+<|visual token 056856|>
+<|visual token 056857|>
+<|visual token 056858|>
+<|visual token 056859|>
+<|visual token 056860|>
+<|visual token 056861|>
+<|visual token 056862|>
+<|visual token 056863|>
+<|visual token 056864|>
+<|visual token 056865|>
+<|visual token 056866|>
+<|visual token 056867|>
+<|visual token 056868|>
+<|visual token 056869|>
+<|visual token 056870|>
+<|visual token 056871|>
+<|visual token 056872|>
+<|visual token 056873|>
+<|visual token 056874|>
+<|visual token 056875|>
+<|visual token 056876|>
+<|visual token 056877|>
+<|visual token 056878|>
+<|visual token 056879|>
+<|visual token 056880|>
+<|visual token 056881|>
+<|visual token 056882|>
+<|visual token 056883|>
+<|visual token 056884|>
+<|visual token 056885|>
+<|visual token 056886|>
+<|visual token 056887|>
+<|visual token 056888|>
+<|visual token 056889|>
+<|visual token 056890|>
+<|visual token 056891|>
+<|visual token 056892|>
+<|visual token 056893|>
+<|visual token 056894|>
+<|visual token 056895|>
+<|visual token 056896|>
+<|visual token 056897|>
+<|visual token 056898|>
+<|visual token 056899|>
+<|visual token 056900|>
+<|visual token 056901|>
+<|visual token 056902|>
+<|visual token 056903|>
+<|visual token 056904|>
+<|visual token 056905|>
+<|visual token 056906|>
+<|visual token 056907|>
+<|visual token 056908|>
+<|visual token 056909|>
+<|visual token 056910|>
+<|visual token 056911|>
+<|visual token 056912|>
+<|visual token 056913|>
+<|visual token 056914|>
+<|visual token 056915|>
+<|visual token 056916|>
+<|visual token 056917|>
+<|visual token 056918|>
+<|visual token 056919|>
+<|visual token 056920|>
+<|visual token 056921|>
+<|visual token 056922|>
+<|visual token 056923|>
+<|visual token 056924|>
+<|visual token 056925|>
+<|visual token 056926|>
+<|visual token 056927|>
+<|visual token 056928|>
+<|visual token 056929|>
+<|visual token 056930|>
+<|visual token 056931|>
+<|visual token 056932|>
+<|visual token 056933|>
+<|visual token 056934|>
+<|visual token 056935|>
+<|visual token 056936|>
+<|visual token 056937|>
+<|visual token 056938|>
+<|visual token 056939|>
+<|visual token 056940|>
+<|visual token 056941|>
+<|visual token 056942|>
+<|visual token 056943|>
+<|visual token 056944|>
+<|visual token 056945|>
+<|visual token 056946|>
+<|visual token 056947|>
+<|visual token 056948|>
+<|visual token 056949|>
+<|visual token 056950|>
+<|visual token 056951|>
+<|visual token 056952|>
+<|visual token 056953|>
+<|visual token 056954|>
+<|visual token 056955|>
+<|visual token 056956|>
+<|visual token 056957|>
+<|visual token 056958|>
+<|visual token 056959|>
+<|visual token 056960|>
+<|visual token 056961|>
+<|visual token 056962|>
+<|visual token 056963|>
+<|visual token 056964|>
+<|visual token 056965|>
+<|visual token 056966|>
+<|visual token 056967|>
+<|visual token 056968|>
+<|visual token 056969|>
+<|visual token 056970|>
+<|visual token 056971|>
+<|visual token 056972|>
+<|visual token 056973|>
+<|visual token 056974|>
+<|visual token 056975|>
+<|visual token 056976|>
+<|visual token 056977|>
+<|visual token 056978|>
+<|visual token 056979|>
+<|visual token 056980|>
+<|visual token 056981|>
+<|visual token 056982|>
+<|visual token 056983|>
+<|visual token 056984|>
+<|visual token 056985|>
+<|visual token 056986|>
+<|visual token 056987|>
+<|visual token 056988|>
+<|visual token 056989|>
+<|visual token 056990|>
+<|visual token 056991|>
+<|visual token 056992|>
+<|visual token 056993|>
+<|visual token 056994|>
+<|visual token 056995|>
+<|visual token 056996|>
+<|visual token 056997|>
+<|visual token 056998|>
+<|visual token 056999|>
+<|visual token 057000|>
+<|visual token 057001|>
+<|visual token 057002|>
+<|visual token 057003|>
+<|visual token 057004|>
+<|visual token 057005|>
+<|visual token 057006|>
+<|visual token 057007|>
+<|visual token 057008|>
+<|visual token 057009|>
+<|visual token 057010|>
+<|visual token 057011|>
+<|visual token 057012|>
+<|visual token 057013|>
+<|visual token 057014|>
+<|visual token 057015|>
+<|visual token 057016|>
+<|visual token 057017|>
+<|visual token 057018|>
+<|visual token 057019|>
+<|visual token 057020|>
+<|visual token 057021|>
+<|visual token 057022|>
+<|visual token 057023|>
+<|visual token 057024|>
+<|visual token 057025|>
+<|visual token 057026|>
+<|visual token 057027|>
+<|visual token 057028|>
+<|visual token 057029|>
+<|visual token 057030|>
+<|visual token 057031|>
+<|visual token 057032|>
+<|visual token 057033|>
+<|visual token 057034|>
+<|visual token 057035|>
+<|visual token 057036|>
+<|visual token 057037|>
+<|visual token 057038|>
+<|visual token 057039|>
+<|visual token 057040|>
+<|visual token 057041|>
+<|visual token 057042|>
+<|visual token 057043|>
+<|visual token 057044|>
+<|visual token 057045|>
+<|visual token 057046|>
+<|visual token 057047|>
+<|visual token 057048|>
+<|visual token 057049|>
+<|visual token 057050|>
+<|visual token 057051|>
+<|visual token 057052|>
+<|visual token 057053|>
+<|visual token 057054|>
+<|visual token 057055|>
+<|visual token 057056|>
+<|visual token 057057|>
+<|visual token 057058|>
+<|visual token 057059|>
+<|visual token 057060|>
+<|visual token 057061|>
+<|visual token 057062|>
+<|visual token 057063|>
+<|visual token 057064|>
+<|visual token 057065|>
+<|visual token 057066|>
+<|visual token 057067|>
+<|visual token 057068|>
+<|visual token 057069|>
+<|visual token 057070|>
+<|visual token 057071|>
+<|visual token 057072|>
+<|visual token 057073|>
+<|visual token 057074|>
+<|visual token 057075|>
+<|visual token 057076|>
+<|visual token 057077|>
+<|visual token 057078|>
+<|visual token 057079|>
+<|visual token 057080|>
+<|visual token 057081|>
+<|visual token 057082|>
+<|visual token 057083|>
+<|visual token 057084|>
+<|visual token 057085|>
+<|visual token 057086|>
+<|visual token 057087|>
+<|visual token 057088|>
+<|visual token 057089|>
+<|visual token 057090|>
+<|visual token 057091|>
+<|visual token 057092|>
+<|visual token 057093|>
+<|visual token 057094|>
+<|visual token 057095|>
+<|visual token 057096|>
+<|visual token 057097|>
+<|visual token 057098|>
+<|visual token 057099|>
+<|visual token 057100|>
+<|visual token 057101|>
+<|visual token 057102|>
+<|visual token 057103|>
+<|visual token 057104|>
+<|visual token 057105|>
+<|visual token 057106|>
+<|visual token 057107|>
+<|visual token 057108|>
+<|visual token 057109|>
+<|visual token 057110|>
+<|visual token 057111|>
+<|visual token 057112|>
+<|visual token 057113|>
+<|visual token 057114|>
+<|visual token 057115|>
+<|visual token 057116|>
+<|visual token 057117|>
+<|visual token 057118|>
+<|visual token 057119|>
+<|visual token 057120|>
+<|visual token 057121|>
+<|visual token 057122|>
+<|visual token 057123|>
+<|visual token 057124|>
+<|visual token 057125|>
+<|visual token 057126|>
+<|visual token 057127|>
+<|visual token 057128|>
+<|visual token 057129|>
+<|visual token 057130|>
+<|visual token 057131|>
+<|visual token 057132|>
+<|visual token 057133|>
+<|visual token 057134|>
+<|visual token 057135|>
+<|visual token 057136|>
+<|visual token 057137|>
+<|visual token 057138|>
+<|visual token 057139|>
+<|visual token 057140|>
+<|visual token 057141|>
+<|visual token 057142|>
+<|visual token 057143|>
+<|visual token 057144|>
+<|visual token 057145|>
+<|visual token 057146|>
+<|visual token 057147|>
+<|visual token 057148|>
+<|visual token 057149|>
+<|visual token 057150|>
+<|visual token 057151|>
+<|visual token 057152|>
+<|visual token 057153|>
+<|visual token 057154|>
+<|visual token 057155|>
+<|visual token 057156|>
+<|visual token 057157|>
+<|visual token 057158|>
+<|visual token 057159|>
+<|visual token 057160|>
+<|visual token 057161|>
+<|visual token 057162|>
+<|visual token 057163|>
+<|visual token 057164|>
+<|visual token 057165|>
+<|visual token 057166|>
+<|visual token 057167|>
+<|visual token 057168|>
+<|visual token 057169|>
+<|visual token 057170|>
+<|visual token 057171|>
+<|visual token 057172|>
+<|visual token 057173|>
+<|visual token 057174|>
+<|visual token 057175|>
+<|visual token 057176|>
+<|visual token 057177|>
+<|visual token 057178|>
+<|visual token 057179|>
+<|visual token 057180|>
+<|visual token 057181|>
+<|visual token 057182|>
+<|visual token 057183|>
+<|visual token 057184|>
+<|visual token 057185|>
+<|visual token 057186|>
+<|visual token 057187|>
+<|visual token 057188|>
+<|visual token 057189|>
+<|visual token 057190|>
+<|visual token 057191|>
+<|visual token 057192|>
+<|visual token 057193|>
+<|visual token 057194|>
+<|visual token 057195|>
+<|visual token 057196|>
+<|visual token 057197|>
+<|visual token 057198|>
+<|visual token 057199|>
+<|visual token 057200|>
+<|visual token 057201|>
+<|visual token 057202|>
+<|visual token 057203|>
+<|visual token 057204|>
+<|visual token 057205|>
+<|visual token 057206|>
+<|visual token 057207|>
+<|visual token 057208|>
+<|visual token 057209|>
+<|visual token 057210|>
+<|visual token 057211|>
+<|visual token 057212|>
+<|visual token 057213|>
+<|visual token 057214|>
+<|visual token 057215|>
+<|visual token 057216|>
+<|visual token 057217|>
+<|visual token 057218|>
+<|visual token 057219|>
+<|visual token 057220|>
+<|visual token 057221|>
+<|visual token 057222|>
+<|visual token 057223|>
+<|visual token 057224|>
+<|visual token 057225|>
+<|visual token 057226|>
+<|visual token 057227|>
+<|visual token 057228|>
+<|visual token 057229|>
+<|visual token 057230|>
+<|visual token 057231|>
+<|visual token 057232|>
+<|visual token 057233|>
+<|visual token 057234|>
+<|visual token 057235|>
+<|visual token 057236|>
+<|visual token 057237|>
+<|visual token 057238|>
+<|visual token 057239|>
+<|visual token 057240|>
+<|visual token 057241|>
+<|visual token 057242|>
+<|visual token 057243|>
+<|visual token 057244|>
+<|visual token 057245|>
+<|visual token 057246|>
+<|visual token 057247|>
+<|visual token 057248|>
+<|visual token 057249|>
+<|visual token 057250|>
+<|visual token 057251|>
+<|visual token 057252|>
+<|visual token 057253|>
+<|visual token 057254|>
+<|visual token 057255|>
+<|visual token 057256|>
+<|visual token 057257|>
+<|visual token 057258|>
+<|visual token 057259|>
+<|visual token 057260|>
+<|visual token 057261|>
+<|visual token 057262|>
+<|visual token 057263|>
+<|visual token 057264|>
+<|visual token 057265|>
+<|visual token 057266|>
+<|visual token 057267|>
+<|visual token 057268|>
+<|visual token 057269|>
+<|visual token 057270|>
+<|visual token 057271|>
+<|visual token 057272|>
+<|visual token 057273|>
+<|visual token 057274|>
+<|visual token 057275|>
+<|visual token 057276|>
+<|visual token 057277|>
+<|visual token 057278|>
+<|visual token 057279|>
+<|visual token 057280|>
+<|visual token 057281|>
+<|visual token 057282|>
+<|visual token 057283|>
+<|visual token 057284|>
+<|visual token 057285|>
+<|visual token 057286|>
+<|visual token 057287|>
+<|visual token 057288|>
+<|visual token 057289|>
+<|visual token 057290|>
+<|visual token 057291|>
+<|visual token 057292|>
+<|visual token 057293|>
+<|visual token 057294|>
+<|visual token 057295|>
+<|visual token 057296|>
+<|visual token 057297|>
+<|visual token 057298|>
+<|visual token 057299|>
+<|visual token 057300|>
+<|visual token 057301|>
+<|visual token 057302|>
+<|visual token 057303|>
+<|visual token 057304|>
+<|visual token 057305|>
+<|visual token 057306|>
+<|visual token 057307|>
+<|visual token 057308|>
+<|visual token 057309|>
+<|visual token 057310|>
+<|visual token 057311|>
+<|visual token 057312|>
+<|visual token 057313|>
+<|visual token 057314|>
+<|visual token 057315|>
+<|visual token 057316|>
+<|visual token 057317|>
+<|visual token 057318|>
+<|visual token 057319|>
+<|visual token 057320|>
+<|visual token 057321|>
+<|visual token 057322|>
+<|visual token 057323|>
+<|visual token 057324|>
+<|visual token 057325|>
+<|visual token 057326|>
+<|visual token 057327|>
+<|visual token 057328|>
+<|visual token 057329|>
+<|visual token 057330|>
+<|visual token 057331|>
+<|visual token 057332|>
+<|visual token 057333|>
+<|visual token 057334|>
+<|visual token 057335|>
+<|visual token 057336|>
+<|visual token 057337|>
+<|visual token 057338|>
+<|visual token 057339|>
+<|visual token 057340|>
+<|visual token 057341|>
+<|visual token 057342|>
+<|visual token 057343|>
+<|visual token 057344|>
+<|visual token 057345|>
+<|visual token 057346|>
+<|visual token 057347|>
+<|visual token 057348|>
+<|visual token 057349|>
+<|visual token 057350|>
+<|visual token 057351|>
+<|visual token 057352|>
+<|visual token 057353|>
+<|visual token 057354|>
+<|visual token 057355|>
+<|visual token 057356|>
+<|visual token 057357|>
+<|visual token 057358|>
+<|visual token 057359|>
+<|visual token 057360|>
+<|visual token 057361|>
+<|visual token 057362|>
+<|visual token 057363|>
+<|visual token 057364|>
+<|visual token 057365|>
+<|visual token 057366|>
+<|visual token 057367|>
+<|visual token 057368|>
+<|visual token 057369|>
+<|visual token 057370|>
+<|visual token 057371|>
+<|visual token 057372|>
+<|visual token 057373|>
+<|visual token 057374|>
+<|visual token 057375|>
+<|visual token 057376|>
+<|visual token 057377|>
+<|visual token 057378|>
+<|visual token 057379|>
+<|visual token 057380|>
+<|visual token 057381|>
+<|visual token 057382|>
+<|visual token 057383|>
+<|visual token 057384|>
+<|visual token 057385|>
+<|visual token 057386|>
+<|visual token 057387|>
+<|visual token 057388|>
+<|visual token 057389|>
+<|visual token 057390|>
+<|visual token 057391|>
+<|visual token 057392|>
+<|visual token 057393|>
+<|visual token 057394|>
+<|visual token 057395|>
+<|visual token 057396|>
+<|visual token 057397|>
+<|visual token 057398|>
+<|visual token 057399|>
+<|visual token 057400|>
+<|visual token 057401|>
+<|visual token 057402|>
+<|visual token 057403|>
+<|visual token 057404|>
+<|visual token 057405|>
+<|visual token 057406|>
+<|visual token 057407|>
+<|visual token 057408|>
+<|visual token 057409|>
+<|visual token 057410|>
+<|visual token 057411|>
+<|visual token 057412|>
+<|visual token 057413|>
+<|visual token 057414|>
+<|visual token 057415|>
+<|visual token 057416|>
+<|visual token 057417|>
+<|visual token 057418|>
+<|visual token 057419|>
+<|visual token 057420|>
+<|visual token 057421|>
+<|visual token 057422|>
+<|visual token 057423|>
+<|visual token 057424|>
+<|visual token 057425|>
+<|visual token 057426|>
+<|visual token 057427|>
+<|visual token 057428|>
+<|visual token 057429|>
+<|visual token 057430|>
+<|visual token 057431|>
+<|visual token 057432|>
+<|visual token 057433|>
+<|visual token 057434|>
+<|visual token 057435|>
+<|visual token 057436|>
+<|visual token 057437|>
+<|visual token 057438|>
+<|visual token 057439|>
+<|visual token 057440|>
+<|visual token 057441|>
+<|visual token 057442|>
+<|visual token 057443|>
+<|visual token 057444|>
+<|visual token 057445|>
+<|visual token 057446|>
+<|visual token 057447|>
+<|visual token 057448|>
+<|visual token 057449|>
+<|visual token 057450|>
+<|visual token 057451|>
+<|visual token 057452|>
+<|visual token 057453|>
+<|visual token 057454|>
+<|visual token 057455|>
+<|visual token 057456|>
+<|visual token 057457|>
+<|visual token 057458|>
+<|visual token 057459|>
+<|visual token 057460|>
+<|visual token 057461|>
+<|visual token 057462|>
+<|visual token 057463|>
+<|visual token 057464|>
+<|visual token 057465|>
+<|visual token 057466|>
+<|visual token 057467|>
+<|visual token 057468|>
+<|visual token 057469|>
+<|visual token 057470|>
+<|visual token 057471|>
+<|visual token 057472|>
+<|visual token 057473|>
+<|visual token 057474|>
+<|visual token 057475|>
+<|visual token 057476|>
+<|visual token 057477|>
+<|visual token 057478|>
+<|visual token 057479|>
+<|visual token 057480|>
+<|visual token 057481|>
+<|visual token 057482|>
+<|visual token 057483|>
+<|visual token 057484|>
+<|visual token 057485|>
+<|visual token 057486|>
+<|visual token 057487|>
+<|visual token 057488|>
+<|visual token 057489|>
+<|visual token 057490|>
+<|visual token 057491|>
+<|visual token 057492|>
+<|visual token 057493|>
+<|visual token 057494|>
+<|visual token 057495|>
+<|visual token 057496|>
+<|visual token 057497|>
+<|visual token 057498|>
+<|visual token 057499|>
+<|visual token 057500|>
+<|visual token 057501|>
+<|visual token 057502|>
+<|visual token 057503|>
+<|visual token 057504|>
+<|visual token 057505|>
+<|visual token 057506|>
+<|visual token 057507|>
+<|visual token 057508|>
+<|visual token 057509|>
+<|visual token 057510|>
+<|visual token 057511|>
+<|visual token 057512|>
+<|visual token 057513|>
+<|visual token 057514|>
+<|visual token 057515|>
+<|visual token 057516|>
+<|visual token 057517|>
+<|visual token 057518|>
+<|visual token 057519|>
+<|visual token 057520|>
+<|visual token 057521|>
+<|visual token 057522|>
+<|visual token 057523|>
+<|visual token 057524|>
+<|visual token 057525|>
+<|visual token 057526|>
+<|visual token 057527|>
+<|visual token 057528|>
+<|visual token 057529|>
+<|visual token 057530|>
+<|visual token 057531|>
+<|visual token 057532|>
+<|visual token 057533|>
+<|visual token 057534|>
+<|visual token 057535|>
+<|visual token 057536|>
+<|visual token 057537|>
+<|visual token 057538|>
+<|visual token 057539|>
+<|visual token 057540|>
+<|visual token 057541|>
+<|visual token 057542|>
+<|visual token 057543|>
+<|visual token 057544|>
+<|visual token 057545|>
+<|visual token 057546|>
+<|visual token 057547|>
+<|visual token 057548|>
+<|visual token 057549|>
+<|visual token 057550|>
+<|visual token 057551|>
+<|visual token 057552|>
+<|visual token 057553|>
+<|visual token 057554|>
+<|visual token 057555|>
+<|visual token 057556|>
+<|visual token 057557|>
+<|visual token 057558|>
+<|visual token 057559|>
+<|visual token 057560|>
+<|visual token 057561|>
+<|visual token 057562|>
+<|visual token 057563|>
+<|visual token 057564|>
+<|visual token 057565|>
+<|visual token 057566|>
+<|visual token 057567|>
+<|visual token 057568|>
+<|visual token 057569|>
+<|visual token 057570|>
+<|visual token 057571|>
+<|visual token 057572|>
+<|visual token 057573|>
+<|visual token 057574|>
+<|visual token 057575|>
+<|visual token 057576|>
+<|visual token 057577|>
+<|visual token 057578|>
+<|visual token 057579|>
+<|visual token 057580|>
+<|visual token 057581|>
+<|visual token 057582|>
+<|visual token 057583|>
+<|visual token 057584|>
+<|visual token 057585|>
+<|visual token 057586|>
+<|visual token 057587|>
+<|visual token 057588|>
+<|visual token 057589|>
+<|visual token 057590|>
+<|visual token 057591|>
+<|visual token 057592|>
+<|visual token 057593|>
+<|visual token 057594|>
+<|visual token 057595|>
+<|visual token 057596|>
+<|visual token 057597|>
+<|visual token 057598|>
+<|visual token 057599|>
+<|visual token 057600|>
+<|visual token 057601|>
+<|visual token 057602|>
+<|visual token 057603|>
+<|visual token 057604|>
+<|visual token 057605|>
+<|visual token 057606|>
+<|visual token 057607|>
+<|visual token 057608|>
+<|visual token 057609|>
+<|visual token 057610|>
+<|visual token 057611|>
+<|visual token 057612|>
+<|visual token 057613|>
+<|visual token 057614|>
+<|visual token 057615|>
+<|visual token 057616|>
+<|visual token 057617|>
+<|visual token 057618|>
+<|visual token 057619|>
+<|visual token 057620|>
+<|visual token 057621|>
+<|visual token 057622|>
+<|visual token 057623|>
+<|visual token 057624|>
+<|visual token 057625|>
+<|visual token 057626|>
+<|visual token 057627|>
+<|visual token 057628|>
+<|visual token 057629|>
+<|visual token 057630|>
+<|visual token 057631|>
+<|visual token 057632|>
+<|visual token 057633|>
+<|visual token 057634|>
+<|visual token 057635|>
+<|visual token 057636|>
+<|visual token 057637|>
+<|visual token 057638|>
+<|visual token 057639|>
+<|visual token 057640|>
+<|visual token 057641|>
+<|visual token 057642|>
+<|visual token 057643|>
+<|visual token 057644|>
+<|visual token 057645|>
+<|visual token 057646|>
+<|visual token 057647|>
+<|visual token 057648|>
+<|visual token 057649|>
+<|visual token 057650|>
+<|visual token 057651|>
+<|visual token 057652|>
+<|visual token 057653|>
+<|visual token 057654|>
+<|visual token 057655|>
+<|visual token 057656|>
+<|visual token 057657|>
+<|visual token 057658|>
+<|visual token 057659|>
+<|visual token 057660|>
+<|visual token 057661|>
+<|visual token 057662|>
+<|visual token 057663|>
+<|visual token 057664|>
+<|visual token 057665|>
+<|visual token 057666|>
+<|visual token 057667|>
+<|visual token 057668|>
+<|visual token 057669|>
+<|visual token 057670|>
+<|visual token 057671|>
+<|visual token 057672|>
+<|visual token 057673|>
+<|visual token 057674|>
+<|visual token 057675|>
+<|visual token 057676|>
+<|visual token 057677|>
+<|visual token 057678|>
+<|visual token 057679|>
+<|visual token 057680|>
+<|visual token 057681|>
+<|visual token 057682|>
+<|visual token 057683|>
+<|visual token 057684|>
+<|visual token 057685|>
+<|visual token 057686|>
+<|visual token 057687|>
+<|visual token 057688|>
+<|visual token 057689|>
+<|visual token 057690|>
+<|visual token 057691|>
+<|visual token 057692|>
+<|visual token 057693|>
+<|visual token 057694|>
+<|visual token 057695|>
+<|visual token 057696|>
+<|visual token 057697|>
+<|visual token 057698|>
+<|visual token 057699|>
+<|visual token 057700|>
+<|visual token 057701|>
+<|visual token 057702|>
+<|visual token 057703|>
+<|visual token 057704|>
+<|visual token 057705|>
+<|visual token 057706|>
+<|visual token 057707|>
+<|visual token 057708|>
+<|visual token 057709|>
+<|visual token 057710|>
+<|visual token 057711|>
+<|visual token 057712|>
+<|visual token 057713|>
+<|visual token 057714|>
+<|visual token 057715|>
+<|visual token 057716|>
+<|visual token 057717|>
+<|visual token 057718|>
+<|visual token 057719|>
+<|visual token 057720|>
+<|visual token 057721|>
+<|visual token 057722|>
+<|visual token 057723|>
+<|visual token 057724|>
+<|visual token 057725|>
+<|visual token 057726|>
+<|visual token 057727|>
+<|visual token 057728|>
+<|visual token 057729|>
+<|visual token 057730|>
+<|visual token 057731|>
+<|visual token 057732|>
+<|visual token 057733|>
+<|visual token 057734|>
+<|visual token 057735|>
+<|visual token 057736|>
+<|visual token 057737|>
+<|visual token 057738|>
+<|visual token 057739|>
+<|visual token 057740|>
+<|visual token 057741|>
+<|visual token 057742|>
+<|visual token 057743|>
+<|visual token 057744|>
+<|visual token 057745|>
+<|visual token 057746|>
+<|visual token 057747|>
+<|visual token 057748|>
+<|visual token 057749|>
+<|visual token 057750|>
+<|visual token 057751|>
+<|visual token 057752|>
+<|visual token 057753|>
+<|visual token 057754|>
+<|visual token 057755|>
+<|visual token 057756|>
+<|visual token 057757|>
+<|visual token 057758|>
+<|visual token 057759|>
+<|visual token 057760|>
+<|visual token 057761|>
+<|visual token 057762|>
+<|visual token 057763|>
+<|visual token 057764|>
+<|visual token 057765|>
+<|visual token 057766|>
+<|visual token 057767|>
+<|visual token 057768|>
+<|visual token 057769|>
+<|visual token 057770|>
+<|visual token 057771|>
+<|visual token 057772|>
+<|visual token 057773|>
+<|visual token 057774|>
+<|visual token 057775|>
+<|visual token 057776|>
+<|visual token 057777|>
+<|visual token 057778|>
+<|visual token 057779|>
+<|visual token 057780|>
+<|visual token 057781|>
+<|visual token 057782|>
+<|visual token 057783|>
+<|visual token 057784|>
+<|visual token 057785|>
+<|visual token 057786|>
+<|visual token 057787|>
+<|visual token 057788|>
+<|visual token 057789|>
+<|visual token 057790|>
+<|visual token 057791|>
+<|visual token 057792|>
+<|visual token 057793|>
+<|visual token 057794|>
+<|visual token 057795|>
+<|visual token 057796|>
+<|visual token 057797|>
+<|visual token 057798|>
+<|visual token 057799|>
+<|visual token 057800|>
+<|visual token 057801|>
+<|visual token 057802|>
+<|visual token 057803|>
+<|visual token 057804|>
+<|visual token 057805|>
+<|visual token 057806|>
+<|visual token 057807|>
+<|visual token 057808|>
+<|visual token 057809|>
+<|visual token 057810|>
+<|visual token 057811|>
+<|visual token 057812|>
+<|visual token 057813|>
+<|visual token 057814|>
+<|visual token 057815|>
+<|visual token 057816|>
+<|visual token 057817|>
+<|visual token 057818|>
+<|visual token 057819|>
+<|visual token 057820|>
+<|visual token 057821|>
+<|visual token 057822|>
+<|visual token 057823|>
+<|visual token 057824|>
+<|visual token 057825|>
+<|visual token 057826|>
+<|visual token 057827|>
+<|visual token 057828|>
+<|visual token 057829|>
+<|visual token 057830|>
+<|visual token 057831|>
+<|visual token 057832|>
+<|visual token 057833|>
+<|visual token 057834|>
+<|visual token 057835|>
+<|visual token 057836|>
+<|visual token 057837|>
+<|visual token 057838|>
+<|visual token 057839|>
+<|visual token 057840|>
+<|visual token 057841|>
+<|visual token 057842|>
+<|visual token 057843|>
+<|visual token 057844|>
+<|visual token 057845|>
+<|visual token 057846|>
+<|visual token 057847|>
+<|visual token 057848|>
+<|visual token 057849|>
+<|visual token 057850|>
+<|visual token 057851|>
+<|visual token 057852|>
+<|visual token 057853|>
+<|visual token 057854|>
+<|visual token 057855|>
+<|visual token 057856|>
+<|visual token 057857|>
+<|visual token 057858|>
+<|visual token 057859|>
+<|visual token 057860|>
+<|visual token 057861|>
+<|visual token 057862|>
+<|visual token 057863|>
+<|visual token 057864|>
+<|visual token 057865|>
+<|visual token 057866|>
+<|visual token 057867|>
+<|visual token 057868|>
+<|visual token 057869|>
+<|visual token 057870|>
+<|visual token 057871|>
+<|visual token 057872|>
+<|visual token 057873|>
+<|visual token 057874|>
+<|visual token 057875|>
+<|visual token 057876|>
+<|visual token 057877|>
+<|visual token 057878|>
+<|visual token 057879|>
+<|visual token 057880|>
+<|visual token 057881|>
+<|visual token 057882|>
+<|visual token 057883|>
+<|visual token 057884|>
+<|visual token 057885|>
+<|visual token 057886|>
+<|visual token 057887|>
+<|visual token 057888|>
+<|visual token 057889|>
+<|visual token 057890|>
+<|visual token 057891|>
+<|visual token 057892|>
+<|visual token 057893|>
+<|visual token 057894|>
+<|visual token 057895|>
+<|visual token 057896|>
+<|visual token 057897|>
+<|visual token 057898|>
+<|visual token 057899|>
+<|visual token 057900|>
+<|visual token 057901|>
+<|visual token 057902|>
+<|visual token 057903|>
+<|visual token 057904|>
+<|visual token 057905|>
+<|visual token 057906|>
+<|visual token 057907|>
+<|visual token 057908|>
+<|visual token 057909|>
+<|visual token 057910|>
+<|visual token 057911|>
+<|visual token 057912|>
+<|visual token 057913|>
+<|visual token 057914|>
+<|visual token 057915|>
+<|visual token 057916|>
+<|visual token 057917|>
+<|visual token 057918|>
+<|visual token 057919|>
+<|visual token 057920|>
+<|visual token 057921|>
+<|visual token 057922|>
+<|visual token 057923|>
+<|visual token 057924|>
+<|visual token 057925|>
+<|visual token 057926|>
+<|visual token 057927|>
+<|visual token 057928|>
+<|visual token 057929|>
+<|visual token 057930|>
+<|visual token 057931|>
+<|visual token 057932|>
+<|visual token 057933|>
+<|visual token 057934|>
+<|visual token 057935|>
+<|visual token 057936|>
+<|visual token 057937|>
+<|visual token 057938|>
+<|visual token 057939|>
+<|visual token 057940|>
+<|visual token 057941|>
+<|visual token 057942|>
+<|visual token 057943|>
+<|visual token 057944|>
+<|visual token 057945|>
+<|visual token 057946|>
+<|visual token 057947|>
+<|visual token 057948|>
+<|visual token 057949|>
+<|visual token 057950|>
+<|visual token 057951|>
+<|visual token 057952|>
+<|visual token 057953|>
+<|visual token 057954|>
+<|visual token 057955|>
+<|visual token 057956|>
+<|visual token 057957|>
+<|visual token 057958|>
+<|visual token 057959|>
+<|visual token 057960|>
+<|visual token 057961|>
+<|visual token 057962|>
+<|visual token 057963|>
+<|visual token 057964|>
+<|visual token 057965|>
+<|visual token 057966|>
+<|visual token 057967|>
+<|visual token 057968|>
+<|visual token 057969|>
+<|visual token 057970|>
+<|visual token 057971|>
+<|visual token 057972|>
+<|visual token 057973|>
+<|visual token 057974|>
+<|visual token 057975|>
+<|visual token 057976|>
+<|visual token 057977|>
+<|visual token 057978|>
+<|visual token 057979|>
+<|visual token 057980|>
+<|visual token 057981|>
+<|visual token 057982|>
+<|visual token 057983|>
+<|visual token 057984|>
+<|visual token 057985|>
+<|visual token 057986|>
+<|visual token 057987|>
+<|visual token 057988|>
+<|visual token 057989|>
+<|visual token 057990|>
+<|visual token 057991|>
+<|visual token 057992|>
+<|visual token 057993|>
+<|visual token 057994|>
+<|visual token 057995|>
+<|visual token 057996|>
+<|visual token 057997|>
+<|visual token 057998|>
+<|visual token 057999|>
+<|visual token 058000|>
+<|visual token 058001|>
+<|visual token 058002|>
+<|visual token 058003|>
+<|visual token 058004|>
+<|visual token 058005|>
+<|visual token 058006|>
+<|visual token 058007|>
+<|visual token 058008|>
+<|visual token 058009|>
+<|visual token 058010|>
+<|visual token 058011|>
+<|visual token 058012|>
+<|visual token 058013|>
+<|visual token 058014|>
+<|visual token 058015|>
+<|visual token 058016|>
+<|visual token 058017|>
+<|visual token 058018|>
+<|visual token 058019|>
+<|visual token 058020|>
+<|visual token 058021|>
+<|visual token 058022|>
+<|visual token 058023|>
+<|visual token 058024|>
+<|visual token 058025|>
+<|visual token 058026|>
+<|visual token 058027|>
+<|visual token 058028|>
+<|visual token 058029|>
+<|visual token 058030|>
+<|visual token 058031|>
+<|visual token 058032|>
+<|visual token 058033|>
+<|visual token 058034|>
+<|visual token 058035|>
+<|visual token 058036|>
+<|visual token 058037|>
+<|visual token 058038|>
+<|visual token 058039|>
+<|visual token 058040|>
+<|visual token 058041|>
+<|visual token 058042|>
+<|visual token 058043|>
+<|visual token 058044|>
+<|visual token 058045|>
+<|visual token 058046|>
+<|visual token 058047|>
+<|visual token 058048|>
+<|visual token 058049|>
+<|visual token 058050|>
+<|visual token 058051|>
+<|visual token 058052|>
+<|visual token 058053|>
+<|visual token 058054|>
+<|visual token 058055|>
+<|visual token 058056|>
+<|visual token 058057|>
+<|visual token 058058|>
+<|visual token 058059|>
+<|visual token 058060|>
+<|visual token 058061|>
+<|visual token 058062|>
+<|visual token 058063|>
+<|visual token 058064|>
+<|visual token 058065|>
+<|visual token 058066|>
+<|visual token 058067|>
+<|visual token 058068|>
+<|visual token 058069|>
+<|visual token 058070|>
+<|visual token 058071|>
+<|visual token 058072|>
+<|visual token 058073|>
+<|visual token 058074|>
+<|visual token 058075|>
+<|visual token 058076|>
+<|visual token 058077|>
+<|visual token 058078|>
+<|visual token 058079|>
+<|visual token 058080|>
+<|visual token 058081|>
+<|visual token 058082|>
+<|visual token 058083|>
+<|visual token 058084|>
+<|visual token 058085|>
+<|visual token 058086|>
+<|visual token 058087|>
+<|visual token 058088|>
+<|visual token 058089|>
+<|visual token 058090|>
+<|visual token 058091|>
+<|visual token 058092|>
+<|visual token 058093|>
+<|visual token 058094|>
+<|visual token 058095|>
+<|visual token 058096|>
+<|visual token 058097|>
+<|visual token 058098|>
+<|visual token 058099|>
+<|visual token 058100|>
+<|visual token 058101|>
+<|visual token 058102|>
+<|visual token 058103|>
+<|visual token 058104|>
+<|visual token 058105|>
+<|visual token 058106|>
+<|visual token 058107|>
+<|visual token 058108|>
+<|visual token 058109|>
+<|visual token 058110|>
+<|visual token 058111|>
+<|visual token 058112|>
+<|visual token 058113|>
+<|visual token 058114|>
+<|visual token 058115|>
+<|visual token 058116|>
+<|visual token 058117|>
+<|visual token 058118|>
+<|visual token 058119|>
+<|visual token 058120|>
+<|visual token 058121|>
+<|visual token 058122|>
+<|visual token 058123|>
+<|visual token 058124|>
+<|visual token 058125|>
+<|visual token 058126|>
+<|visual token 058127|>
+<|visual token 058128|>
+<|visual token 058129|>
+<|visual token 058130|>
+<|visual token 058131|>
+<|visual token 058132|>
+<|visual token 058133|>
+<|visual token 058134|>
+<|visual token 058135|>
+<|visual token 058136|>
+<|visual token 058137|>
+<|visual token 058138|>
+<|visual token 058139|>
+<|visual token 058140|>
+<|visual token 058141|>
+<|visual token 058142|>
+<|visual token 058143|>
+<|visual token 058144|>
+<|visual token 058145|>
+<|visual token 058146|>
+<|visual token 058147|>
+<|visual token 058148|>
+<|visual token 058149|>
+<|visual token 058150|>
+<|visual token 058151|>
+<|visual token 058152|>
+<|visual token 058153|>
+<|visual token 058154|>
+<|visual token 058155|>
+<|visual token 058156|>
+<|visual token 058157|>
+<|visual token 058158|>
+<|visual token 058159|>
+<|visual token 058160|>
+<|visual token 058161|>
+<|visual token 058162|>
+<|visual token 058163|>
+<|visual token 058164|>
+<|visual token 058165|>
+<|visual token 058166|>
+<|visual token 058167|>
+<|visual token 058168|>
+<|visual token 058169|>
+<|visual token 058170|>
+<|visual token 058171|>
+<|visual token 058172|>
+<|visual token 058173|>
+<|visual token 058174|>
+<|visual token 058175|>
+<|visual token 058176|>
+<|visual token 058177|>
+<|visual token 058178|>
+<|visual token 058179|>
+<|visual token 058180|>
+<|visual token 058181|>
+<|visual token 058182|>
+<|visual token 058183|>
+<|visual token 058184|>
+<|visual token 058185|>
+<|visual token 058186|>
+<|visual token 058187|>
+<|visual token 058188|>
+<|visual token 058189|>
+<|visual token 058190|>
+<|visual token 058191|>
+<|visual token 058192|>
+<|visual token 058193|>
+<|visual token 058194|>
+<|visual token 058195|>
+<|visual token 058196|>
+<|visual token 058197|>
+<|visual token 058198|>
+<|visual token 058199|>
+<|visual token 058200|>
+<|visual token 058201|>
+<|visual token 058202|>
+<|visual token 058203|>
+<|visual token 058204|>
+<|visual token 058205|>
+<|visual token 058206|>
+<|visual token 058207|>
+<|visual token 058208|>
+<|visual token 058209|>
+<|visual token 058210|>
+<|visual token 058211|>
+<|visual token 058212|>
+<|visual token 058213|>
+<|visual token 058214|>
+<|visual token 058215|>
+<|visual token 058216|>
+<|visual token 058217|>
+<|visual token 058218|>
+<|visual token 058219|>
+<|visual token 058220|>
+<|visual token 058221|>
+<|visual token 058222|>
+<|visual token 058223|>
+<|visual token 058224|>
+<|visual token 058225|>
+<|visual token 058226|>
+<|visual token 058227|>
+<|visual token 058228|>
+<|visual token 058229|>
+<|visual token 058230|>
+<|visual token 058231|>
+<|visual token 058232|>
+<|visual token 058233|>
+<|visual token 058234|>
+<|visual token 058235|>
+<|visual token 058236|>
+<|visual token 058237|>
+<|visual token 058238|>
+<|visual token 058239|>
+<|visual token 058240|>
+<|visual token 058241|>
+<|visual token 058242|>
+<|visual token 058243|>
+<|visual token 058244|>
+<|visual token 058245|>
+<|visual token 058246|>
+<|visual token 058247|>
+<|visual token 058248|>
+<|visual token 058249|>
+<|visual token 058250|>
+<|visual token 058251|>
+<|visual token 058252|>
+<|visual token 058253|>
+<|visual token 058254|>
+<|visual token 058255|>
+<|visual token 058256|>
+<|visual token 058257|>
+<|visual token 058258|>
+<|visual token 058259|>
+<|visual token 058260|>
+<|visual token 058261|>
+<|visual token 058262|>
+<|visual token 058263|>
+<|visual token 058264|>
+<|visual token 058265|>
+<|visual token 058266|>
+<|visual token 058267|>
+<|visual token 058268|>
+<|visual token 058269|>
+<|visual token 058270|>
+<|visual token 058271|>
+<|visual token 058272|>
+<|visual token 058273|>
+<|visual token 058274|>
+<|visual token 058275|>
+<|visual token 058276|>
+<|visual token 058277|>
+<|visual token 058278|>
+<|visual token 058279|>
+<|visual token 058280|>
+<|visual token 058281|>
+<|visual token 058282|>
+<|visual token 058283|>
+<|visual token 058284|>
+<|visual token 058285|>
+<|visual token 058286|>
+<|visual token 058287|>
+<|visual token 058288|>
+<|visual token 058289|>
+<|visual token 058290|>
+<|visual token 058291|>
+<|visual token 058292|>
+<|visual token 058293|>
+<|visual token 058294|>
+<|visual token 058295|>
+<|visual token 058296|>
+<|visual token 058297|>
+<|visual token 058298|>
+<|visual token 058299|>
+<|visual token 058300|>
+<|visual token 058301|>
+<|visual token 058302|>
+<|visual token 058303|>
+<|visual token 058304|>
+<|visual token 058305|>
+<|visual token 058306|>
+<|visual token 058307|>
+<|visual token 058308|>
+<|visual token 058309|>
+<|visual token 058310|>
+<|visual token 058311|>
+<|visual token 058312|>
+<|visual token 058313|>
+<|visual token 058314|>
+<|visual token 058315|>
+<|visual token 058316|>
+<|visual token 058317|>
+<|visual token 058318|>
+<|visual token 058319|>
+<|visual token 058320|>
+<|visual token 058321|>
+<|visual token 058322|>
+<|visual token 058323|>
+<|visual token 058324|>
+<|visual token 058325|>
+<|visual token 058326|>
+<|visual token 058327|>
+<|visual token 058328|>
+<|visual token 058329|>
+<|visual token 058330|>
+<|visual token 058331|>
+<|visual token 058332|>
+<|visual token 058333|>
+<|visual token 058334|>
+<|visual token 058335|>
+<|visual token 058336|>
+<|visual token 058337|>
+<|visual token 058338|>
+<|visual token 058339|>
+<|visual token 058340|>
+<|visual token 058341|>
+<|visual token 058342|>
+<|visual token 058343|>
+<|visual token 058344|>
+<|visual token 058345|>
+<|visual token 058346|>
+<|visual token 058347|>
+<|visual token 058348|>
+<|visual token 058349|>
+<|visual token 058350|>
+<|visual token 058351|>
+<|visual token 058352|>
+<|visual token 058353|>
+<|visual token 058354|>
+<|visual token 058355|>
+<|visual token 058356|>
+<|visual token 058357|>
+<|visual token 058358|>
+<|visual token 058359|>
+<|visual token 058360|>
+<|visual token 058361|>
+<|visual token 058362|>
+<|visual token 058363|>
+<|visual token 058364|>
+<|visual token 058365|>
+<|visual token 058366|>
+<|visual token 058367|>
+<|visual token 058368|>
+<|visual token 058369|>
+<|visual token 058370|>
+<|visual token 058371|>
+<|visual token 058372|>
+<|visual token 058373|>
+<|visual token 058374|>
+<|visual token 058375|>
+<|visual token 058376|>
+<|visual token 058377|>
+<|visual token 058378|>
+<|visual token 058379|>
+<|visual token 058380|>
+<|visual token 058381|>
+<|visual token 058382|>
+<|visual token 058383|>
+<|visual token 058384|>
+<|visual token 058385|>
+<|visual token 058386|>
+<|visual token 058387|>
+<|visual token 058388|>
+<|visual token 058389|>
+<|visual token 058390|>
+<|visual token 058391|>
+<|visual token 058392|>
+<|visual token 058393|>
+<|visual token 058394|>
+<|visual token 058395|>
+<|visual token 058396|>
+<|visual token 058397|>
+<|visual token 058398|>
+<|visual token 058399|>
+<|visual token 058400|>
+<|visual token 058401|>
+<|visual token 058402|>
+<|visual token 058403|>
+<|visual token 058404|>
+<|visual token 058405|>
+<|visual token 058406|>
+<|visual token 058407|>
+<|visual token 058408|>
+<|visual token 058409|>
+<|visual token 058410|>
+<|visual token 058411|>
+<|visual token 058412|>
+<|visual token 058413|>
+<|visual token 058414|>
+<|visual token 058415|>
+<|visual token 058416|>
+<|visual token 058417|>
+<|visual token 058418|>
+<|visual token 058419|>
+<|visual token 058420|>
+<|visual token 058421|>
+<|visual token 058422|>
+<|visual token 058423|>
+<|visual token 058424|>
+<|visual token 058425|>
+<|visual token 058426|>
+<|visual token 058427|>
+<|visual token 058428|>
+<|visual token 058429|>
+<|visual token 058430|>
+<|visual token 058431|>
+<|visual token 058432|>
+<|visual token 058433|>
+<|visual token 058434|>
+<|visual token 058435|>
+<|visual token 058436|>
+<|visual token 058437|>
+<|visual token 058438|>
+<|visual token 058439|>
+<|visual token 058440|>
+<|visual token 058441|>
+<|visual token 058442|>
+<|visual token 058443|>
+<|visual token 058444|>
+<|visual token 058445|>
+<|visual token 058446|>
+<|visual token 058447|>
+<|visual token 058448|>
+<|visual token 058449|>
+<|visual token 058450|>
+<|visual token 058451|>
+<|visual token 058452|>
+<|visual token 058453|>
+<|visual token 058454|>
+<|visual token 058455|>
+<|visual token 058456|>
+<|visual token 058457|>
+<|visual token 058458|>
+<|visual token 058459|>
+<|visual token 058460|>
+<|visual token 058461|>
+<|visual token 058462|>
+<|visual token 058463|>
+<|visual token 058464|>
+<|visual token 058465|>
+<|visual token 058466|>
+<|visual token 058467|>
+<|visual token 058468|>
+<|visual token 058469|>
+<|visual token 058470|>
+<|visual token 058471|>
+<|visual token 058472|>
+<|visual token 058473|>
+<|visual token 058474|>
+<|visual token 058475|>
+<|visual token 058476|>
+<|visual token 058477|>
+<|visual token 058478|>
+<|visual token 058479|>
+<|visual token 058480|>
+<|visual token 058481|>
+<|visual token 058482|>
+<|visual token 058483|>
+<|visual token 058484|>
+<|visual token 058485|>
+<|visual token 058486|>
+<|visual token 058487|>
+<|visual token 058488|>
+<|visual token 058489|>
+<|visual token 058490|>
+<|visual token 058491|>
+<|visual token 058492|>
+<|visual token 058493|>
+<|visual token 058494|>
+<|visual token 058495|>
+<|visual token 058496|>
+<|visual token 058497|>
+<|visual token 058498|>
+<|visual token 058499|>
+<|visual token 058500|>
+<|visual token 058501|>
+<|visual token 058502|>
+<|visual token 058503|>
+<|visual token 058504|>
+<|visual token 058505|>
+<|visual token 058506|>
+<|visual token 058507|>
+<|visual token 058508|>
+<|visual token 058509|>
+<|visual token 058510|>
+<|visual token 058511|>
+<|visual token 058512|>
+<|visual token 058513|>
+<|visual token 058514|>
+<|visual token 058515|>
+<|visual token 058516|>
+<|visual token 058517|>
+<|visual token 058518|>
+<|visual token 058519|>
+<|visual token 058520|>
+<|visual token 058521|>
+<|visual token 058522|>
+<|visual token 058523|>
+<|visual token 058524|>
+<|visual token 058525|>
+<|visual token 058526|>
+<|visual token 058527|>
+<|visual token 058528|>
+<|visual token 058529|>
+<|visual token 058530|>
+<|visual token 058531|>
+<|visual token 058532|>
+<|visual token 058533|>
+<|visual token 058534|>
+<|visual token 058535|>
+<|visual token 058536|>
+<|visual token 058537|>
+<|visual token 058538|>
+<|visual token 058539|>
+<|visual token 058540|>
+<|visual token 058541|>
+<|visual token 058542|>
+<|visual token 058543|>
+<|visual token 058544|>
+<|visual token 058545|>
+<|visual token 058546|>
+<|visual token 058547|>
+<|visual token 058548|>
+<|visual token 058549|>
+<|visual token 058550|>
+<|visual token 058551|>
+<|visual token 058552|>
+<|visual token 058553|>
+<|visual token 058554|>
+<|visual token 058555|>
+<|visual token 058556|>
+<|visual token 058557|>
+<|visual token 058558|>
+<|visual token 058559|>
+<|visual token 058560|>
+<|visual token 058561|>
+<|visual token 058562|>
+<|visual token 058563|>
+<|visual token 058564|>
+<|visual token 058565|>
+<|visual token 058566|>
+<|visual token 058567|>
+<|visual token 058568|>
+<|visual token 058569|>
+<|visual token 058570|>
+<|visual token 058571|>
+<|visual token 058572|>
+<|visual token 058573|>
+<|visual token 058574|>
+<|visual token 058575|>
+<|visual token 058576|>
+<|visual token 058577|>
+<|visual token 058578|>
+<|visual token 058579|>
+<|visual token 058580|>
+<|visual token 058581|>
+<|visual token 058582|>
+<|visual token 058583|>
+<|visual token 058584|>
+<|visual token 058585|>
+<|visual token 058586|>
+<|visual token 058587|>
+<|visual token 058588|>
+<|visual token 058589|>
+<|visual token 058590|>
+<|visual token 058591|>
+<|visual token 058592|>
+<|visual token 058593|>
+<|visual token 058594|>
+<|visual token 058595|>
+<|visual token 058596|>
+<|visual token 058597|>
+<|visual token 058598|>
+<|visual token 058599|>
+<|visual token 058600|>
+<|visual token 058601|>
+<|visual token 058602|>
+<|visual token 058603|>
+<|visual token 058604|>
+<|visual token 058605|>
+<|visual token 058606|>
+<|visual token 058607|>
+<|visual token 058608|>
+<|visual token 058609|>
+<|visual token 058610|>
+<|visual token 058611|>
+<|visual token 058612|>
+<|visual token 058613|>
+<|visual token 058614|>
+<|visual token 058615|>
+<|visual token 058616|>
+<|visual token 058617|>
+<|visual token 058618|>
+<|visual token 058619|>
+<|visual token 058620|>
+<|visual token 058621|>
+<|visual token 058622|>
+<|visual token 058623|>
+<|visual token 058624|>
+<|visual token 058625|>
+<|visual token 058626|>
+<|visual token 058627|>
+<|visual token 058628|>
+<|visual token 058629|>
+<|visual token 058630|>
+<|visual token 058631|>
+<|visual token 058632|>
+<|visual token 058633|>
+<|visual token 058634|>
+<|visual token 058635|>
+<|visual token 058636|>
+<|visual token 058637|>
+<|visual token 058638|>
+<|visual token 058639|>
+<|visual token 058640|>
+<|visual token 058641|>
+<|visual token 058642|>
+<|visual token 058643|>
+<|visual token 058644|>
+<|visual token 058645|>
+<|visual token 058646|>
+<|visual token 058647|>
+<|visual token 058648|>
+<|visual token 058649|>
+<|visual token 058650|>
+<|visual token 058651|>
+<|visual token 058652|>
+<|visual token 058653|>
+<|visual token 058654|>
+<|visual token 058655|>
+<|visual token 058656|>
+<|visual token 058657|>
+<|visual token 058658|>
+<|visual token 058659|>
+<|visual token 058660|>
+<|visual token 058661|>
+<|visual token 058662|>
+<|visual token 058663|>
+<|visual token 058664|>
+<|visual token 058665|>
+<|visual token 058666|>
+<|visual token 058667|>
+<|visual token 058668|>
+<|visual token 058669|>
+<|visual token 058670|>
+<|visual token 058671|>
+<|visual token 058672|>
+<|visual token 058673|>
+<|visual token 058674|>
+<|visual token 058675|>
+<|visual token 058676|>
+<|visual token 058677|>
+<|visual token 058678|>
+<|visual token 058679|>
+<|visual token 058680|>
+<|visual token 058681|>
+<|visual token 058682|>
+<|visual token 058683|>
+<|visual token 058684|>
+<|visual token 058685|>
+<|visual token 058686|>
+<|visual token 058687|>
+<|visual token 058688|>
+<|visual token 058689|>
+<|visual token 058690|>
+<|visual token 058691|>
+<|visual token 058692|>
+<|visual token 058693|>
+<|visual token 058694|>
+<|visual token 058695|>
+<|visual token 058696|>
+<|visual token 058697|>
+<|visual token 058698|>
+<|visual token 058699|>
+<|visual token 058700|>
+<|visual token 058701|>
+<|visual token 058702|>
+<|visual token 058703|>
+<|visual token 058704|>
+<|visual token 058705|>
+<|visual token 058706|>
+<|visual token 058707|>
+<|visual token 058708|>
+<|visual token 058709|>
+<|visual token 058710|>
+<|visual token 058711|>
+<|visual token 058712|>
+<|visual token 058713|>
+<|visual token 058714|>
+<|visual token 058715|>
+<|visual token 058716|>
+<|visual token 058717|>
+<|visual token 058718|>
+<|visual token 058719|>
+<|visual token 058720|>
+<|visual token 058721|>
+<|visual token 058722|>
+<|visual token 058723|>
+<|visual token 058724|>
+<|visual token 058725|>
+<|visual token 058726|>
+<|visual token 058727|>
+<|visual token 058728|>
+<|visual token 058729|>
+<|visual token 058730|>
+<|visual token 058731|>
+<|visual token 058732|>
+<|visual token 058733|>
+<|visual token 058734|>
+<|visual token 058735|>
+<|visual token 058736|>
+<|visual token 058737|>
+<|visual token 058738|>
+<|visual token 058739|>
+<|visual token 058740|>
+<|visual token 058741|>
+<|visual token 058742|>
+<|visual token 058743|>
+<|visual token 058744|>
+<|visual token 058745|>
+<|visual token 058746|>
+<|visual token 058747|>
+<|visual token 058748|>
+<|visual token 058749|>
+<|visual token 058750|>
+<|visual token 058751|>
+<|visual token 058752|>
+<|visual token 058753|>
+<|visual token 058754|>
+<|visual token 058755|>
+<|visual token 058756|>
+<|visual token 058757|>
+<|visual token 058758|>
+<|visual token 058759|>
+<|visual token 058760|>
+<|visual token 058761|>
+<|visual token 058762|>
+<|visual token 058763|>
+<|visual token 058764|>
+<|visual token 058765|>
+<|visual token 058766|>
+<|visual token 058767|>
+<|visual token 058768|>
+<|visual token 058769|>
+<|visual token 058770|>
+<|visual token 058771|>
+<|visual token 058772|>
+<|visual token 058773|>
+<|visual token 058774|>
+<|visual token 058775|>
+<|visual token 058776|>
+<|visual token 058777|>
+<|visual token 058778|>
+<|visual token 058779|>
+<|visual token 058780|>
+<|visual token 058781|>
+<|visual token 058782|>
+<|visual token 058783|>
+<|visual token 058784|>
+<|visual token 058785|>
+<|visual token 058786|>
+<|visual token 058787|>
+<|visual token 058788|>
+<|visual token 058789|>
+<|visual token 058790|>
+<|visual token 058791|>
+<|visual token 058792|>
+<|visual token 058793|>
+<|visual token 058794|>
+<|visual token 058795|>
+<|visual token 058796|>
+<|visual token 058797|>
+<|visual token 058798|>
+<|visual token 058799|>
+<|visual token 058800|>
+<|visual token 058801|>
+<|visual token 058802|>
+<|visual token 058803|>
+<|visual token 058804|>
+<|visual token 058805|>
+<|visual token 058806|>
+<|visual token 058807|>
+<|visual token 058808|>
+<|visual token 058809|>
+<|visual token 058810|>
+<|visual token 058811|>
+<|visual token 058812|>
+<|visual token 058813|>
+<|visual token 058814|>
+<|visual token 058815|>
+<|visual token 058816|>
+<|visual token 058817|>
+<|visual token 058818|>
+<|visual token 058819|>
+<|visual token 058820|>
+<|visual token 058821|>
+<|visual token 058822|>
+<|visual token 058823|>
+<|visual token 058824|>
+<|visual token 058825|>
+<|visual token 058826|>
+<|visual token 058827|>
+<|visual token 058828|>
+<|visual token 058829|>
+<|visual token 058830|>
+<|visual token 058831|>
+<|visual token 058832|>
+<|visual token 058833|>
+<|visual token 058834|>
+<|visual token 058835|>
+<|visual token 058836|>
+<|visual token 058837|>
+<|visual token 058838|>
+<|visual token 058839|>
+<|visual token 058840|>
+<|visual token 058841|>
+<|visual token 058842|>
+<|visual token 058843|>
+<|visual token 058844|>
+<|visual token 058845|>
+<|visual token 058846|>
+<|visual token 058847|>
+<|visual token 058848|>
+<|visual token 058849|>
+<|visual token 058850|>
+<|visual token 058851|>
+<|visual token 058852|>
+<|visual token 058853|>
+<|visual token 058854|>
+<|visual token 058855|>
+<|visual token 058856|>
+<|visual token 058857|>
+<|visual token 058858|>
+<|visual token 058859|>
+<|visual token 058860|>
+<|visual token 058861|>
+<|visual token 058862|>
+<|visual token 058863|>
+<|visual token 058864|>
+<|visual token 058865|>
+<|visual token 058866|>
+<|visual token 058867|>
+<|visual token 058868|>
+<|visual token 058869|>
+<|visual token 058870|>
+<|visual token 058871|>
+<|visual token 058872|>
+<|visual token 058873|>
+<|visual token 058874|>
+<|visual token 058875|>
+<|visual token 058876|>
+<|visual token 058877|>
+<|visual token 058878|>
+<|visual token 058879|>
+<|visual token 058880|>
+<|visual token 058881|>
+<|visual token 058882|>
+<|visual token 058883|>
+<|visual token 058884|>
+<|visual token 058885|>
+<|visual token 058886|>
+<|visual token 058887|>
+<|visual token 058888|>
+<|visual token 058889|>
+<|visual token 058890|>
+<|visual token 058891|>
+<|visual token 058892|>
+<|visual token 058893|>
+<|visual token 058894|>
+<|visual token 058895|>
+<|visual token 058896|>
+<|visual token 058897|>
+<|visual token 058898|>
+<|visual token 058899|>
+<|visual token 058900|>
+<|visual token 058901|>
+<|visual token 058902|>
+<|visual token 058903|>
+<|visual token 058904|>
+<|visual token 058905|>
+<|visual token 058906|>
+<|visual token 058907|>
+<|visual token 058908|>
+<|visual token 058909|>
+<|visual token 058910|>
+<|visual token 058911|>
+<|visual token 058912|>
+<|visual token 058913|>
+<|visual token 058914|>
+<|visual token 058915|>
+<|visual token 058916|>
+<|visual token 058917|>
+<|visual token 058918|>
+<|visual token 058919|>
+<|visual token 058920|>
+<|visual token 058921|>
+<|visual token 058922|>
+<|visual token 058923|>
+<|visual token 058924|>
+<|visual token 058925|>
+<|visual token 058926|>
+<|visual token 058927|>
+<|visual token 058928|>
+<|visual token 058929|>
+<|visual token 058930|>
+<|visual token 058931|>
+<|visual token 058932|>
+<|visual token 058933|>
+<|visual token 058934|>
+<|visual token 058935|>
+<|visual token 058936|>
+<|visual token 058937|>
+<|visual token 058938|>
+<|visual token 058939|>
+<|visual token 058940|>
+<|visual token 058941|>
+<|visual token 058942|>
+<|visual token 058943|>
+<|visual token 058944|>
+<|visual token 058945|>
+<|visual token 058946|>
+<|visual token 058947|>
+<|visual token 058948|>
+<|visual token 058949|>
+<|visual token 058950|>
+<|visual token 058951|>
+<|visual token 058952|>
+<|visual token 058953|>
+<|visual token 058954|>
+<|visual token 058955|>
+<|visual token 058956|>
+<|visual token 058957|>
+<|visual token 058958|>
+<|visual token 058959|>
+<|visual token 058960|>
+<|visual token 058961|>
+<|visual token 058962|>
+<|visual token 058963|>
+<|visual token 058964|>
+<|visual token 058965|>
+<|visual token 058966|>
+<|visual token 058967|>
+<|visual token 058968|>
+<|visual token 058969|>
+<|visual token 058970|>
+<|visual token 058971|>
+<|visual token 058972|>
+<|visual token 058973|>
+<|visual token 058974|>
+<|visual token 058975|>
+<|visual token 058976|>
+<|visual token 058977|>
+<|visual token 058978|>
+<|visual token 058979|>
+<|visual token 058980|>
+<|visual token 058981|>
+<|visual token 058982|>
+<|visual token 058983|>
+<|visual token 058984|>
+<|visual token 058985|>
+<|visual token 058986|>
+<|visual token 058987|>
+<|visual token 058988|>
+<|visual token 058989|>
+<|visual token 058990|>
+<|visual token 058991|>
+<|visual token 058992|>
+<|visual token 058993|>
+<|visual token 058994|>
+<|visual token 058995|>
+<|visual token 058996|>
+<|visual token 058997|>
+<|visual token 058998|>
+<|visual token 058999|>
+<|visual token 059000|>
+<|visual token 059001|>
+<|visual token 059002|>
+<|visual token 059003|>
+<|visual token 059004|>
+<|visual token 059005|>
+<|visual token 059006|>
+<|visual token 059007|>
+<|visual token 059008|>
+<|visual token 059009|>
+<|visual token 059010|>
+<|visual token 059011|>
+<|visual token 059012|>
+<|visual token 059013|>
+<|visual token 059014|>
+<|visual token 059015|>
+<|visual token 059016|>
+<|visual token 059017|>
+<|visual token 059018|>
+<|visual token 059019|>
+<|visual token 059020|>
+<|visual token 059021|>
+<|visual token 059022|>
+<|visual token 059023|>
+<|visual token 059024|>
+<|visual token 059025|>
+<|visual token 059026|>
+<|visual token 059027|>
+<|visual token 059028|>
+<|visual token 059029|>
+<|visual token 059030|>
+<|visual token 059031|>
+<|visual token 059032|>
+<|visual token 059033|>
+<|visual token 059034|>
+<|visual token 059035|>
+<|visual token 059036|>
+<|visual token 059037|>
+<|visual token 059038|>
+<|visual token 059039|>
+<|visual token 059040|>
+<|visual token 059041|>
+<|visual token 059042|>
+<|visual token 059043|>
+<|visual token 059044|>
+<|visual token 059045|>
+<|visual token 059046|>
+<|visual token 059047|>
+<|visual token 059048|>
+<|visual token 059049|>
+<|visual token 059050|>
+<|visual token 059051|>
+<|visual token 059052|>
+<|visual token 059053|>
+<|visual token 059054|>
+<|visual token 059055|>
+<|visual token 059056|>
+<|visual token 059057|>
+<|visual token 059058|>
+<|visual token 059059|>
+<|visual token 059060|>
+<|visual token 059061|>
+<|visual token 059062|>
+<|visual token 059063|>
+<|visual token 059064|>
+<|visual token 059065|>
+<|visual token 059066|>
+<|visual token 059067|>
+<|visual token 059068|>
+<|visual token 059069|>
+<|visual token 059070|>
+<|visual token 059071|>
+<|visual token 059072|>
+<|visual token 059073|>
+<|visual token 059074|>
+<|visual token 059075|>
+<|visual token 059076|>
+<|visual token 059077|>
+<|visual token 059078|>
+<|visual token 059079|>
+<|visual token 059080|>
+<|visual token 059081|>
+<|visual token 059082|>
+<|visual token 059083|>
+<|visual token 059084|>
+<|visual token 059085|>
+<|visual token 059086|>
+<|visual token 059087|>
+<|visual token 059088|>
+<|visual token 059089|>
+<|visual token 059090|>
+<|visual token 059091|>
+<|visual token 059092|>
+<|visual token 059093|>
+<|visual token 059094|>
+<|visual token 059095|>
+<|visual token 059096|>
+<|visual token 059097|>
+<|visual token 059098|>
+<|visual token 059099|>
+<|visual token 059100|>
+<|visual token 059101|>
+<|visual token 059102|>
+<|visual token 059103|>
+<|visual token 059104|>
+<|visual token 059105|>
+<|visual token 059106|>
+<|visual token 059107|>
+<|visual token 059108|>
+<|visual token 059109|>
+<|visual token 059110|>
+<|visual token 059111|>
+<|visual token 059112|>
+<|visual token 059113|>
+<|visual token 059114|>
+<|visual token 059115|>
+<|visual token 059116|>
+<|visual token 059117|>
+<|visual token 059118|>
+<|visual token 059119|>
+<|visual token 059120|>
+<|visual token 059121|>
+<|visual token 059122|>
+<|visual token 059123|>
+<|visual token 059124|>
+<|visual token 059125|>
+<|visual token 059126|>
+<|visual token 059127|>
+<|visual token 059128|>
+<|visual token 059129|>
+<|visual token 059130|>
+<|visual token 059131|>
+<|visual token 059132|>
+<|visual token 059133|>
+<|visual token 059134|>
+<|visual token 059135|>
+<|visual token 059136|>
+<|visual token 059137|>
+<|visual token 059138|>
+<|visual token 059139|>
+<|visual token 059140|>
+<|visual token 059141|>
+<|visual token 059142|>
+<|visual token 059143|>
+<|visual token 059144|>
+<|visual token 059145|>
+<|visual token 059146|>
+<|visual token 059147|>
+<|visual token 059148|>
+<|visual token 059149|>
+<|visual token 059150|>
+<|visual token 059151|>
+<|visual token 059152|>
+<|visual token 059153|>
+<|visual token 059154|>
+<|visual token 059155|>
+<|visual token 059156|>
+<|visual token 059157|>
+<|visual token 059158|>
+<|visual token 059159|>
+<|visual token 059160|>
+<|visual token 059161|>
+<|visual token 059162|>
+<|visual token 059163|>
+<|visual token 059164|>
+<|visual token 059165|>
+<|visual token 059166|>
+<|visual token 059167|>
+<|visual token 059168|>
+<|visual token 059169|>
+<|visual token 059170|>
+<|visual token 059171|>
+<|visual token 059172|>
+<|visual token 059173|>
+<|visual token 059174|>
+<|visual token 059175|>
+<|visual token 059176|>
+<|visual token 059177|>
+<|visual token 059178|>
+<|visual token 059179|>
+<|visual token 059180|>
+<|visual token 059181|>
+<|visual token 059182|>
+<|visual token 059183|>
+<|visual token 059184|>
+<|visual token 059185|>
+<|visual token 059186|>
+<|visual token 059187|>
+<|visual token 059188|>
+<|visual token 059189|>
+<|visual token 059190|>
+<|visual token 059191|>
+<|visual token 059192|>
+<|visual token 059193|>
+<|visual token 059194|>
+<|visual token 059195|>
+<|visual token 059196|>
+<|visual token 059197|>
+<|visual token 059198|>
+<|visual token 059199|>
+<|visual token 059200|>
+<|visual token 059201|>
+<|visual token 059202|>
+<|visual token 059203|>
+<|visual token 059204|>
+<|visual token 059205|>
+<|visual token 059206|>
+<|visual token 059207|>
+<|visual token 059208|>
+<|visual token 059209|>
+<|visual token 059210|>
+<|visual token 059211|>
+<|visual token 059212|>
+<|visual token 059213|>
+<|visual token 059214|>
+<|visual token 059215|>
+<|visual token 059216|>
+<|visual token 059217|>
+<|visual token 059218|>
+<|visual token 059219|>
+<|visual token 059220|>
+<|visual token 059221|>
+<|visual token 059222|>
+<|visual token 059223|>
+<|visual token 059224|>
+<|visual token 059225|>
+<|visual token 059226|>
+<|visual token 059227|>
+<|visual token 059228|>
+<|visual token 059229|>
+<|visual token 059230|>
+<|visual token 059231|>
+<|visual token 059232|>
+<|visual token 059233|>
+<|visual token 059234|>
+<|visual token 059235|>
+<|visual token 059236|>
+<|visual token 059237|>
+<|visual token 059238|>
+<|visual token 059239|>
+<|visual token 059240|>
+<|visual token 059241|>
+<|visual token 059242|>
+<|visual token 059243|>
+<|visual token 059244|>
+<|visual token 059245|>
+<|visual token 059246|>
+<|visual token 059247|>
+<|visual token 059248|>
+<|visual token 059249|>
+<|visual token 059250|>
+<|visual token 059251|>
+<|visual token 059252|>
+<|visual token 059253|>
+<|visual token 059254|>
+<|visual token 059255|>
+<|visual token 059256|>
+<|visual token 059257|>
+<|visual token 059258|>
+<|visual token 059259|>
+<|visual token 059260|>
+<|visual token 059261|>
+<|visual token 059262|>
+<|visual token 059263|>
+<|visual token 059264|>
+<|visual token 059265|>
+<|visual token 059266|>
+<|visual token 059267|>
+<|visual token 059268|>
+<|visual token 059269|>
+<|visual token 059270|>
+<|visual token 059271|>
+<|visual token 059272|>
+<|visual token 059273|>
+<|visual token 059274|>
+<|visual token 059275|>
+<|visual token 059276|>
+<|visual token 059277|>
+<|visual token 059278|>
+<|visual token 059279|>
+<|visual token 059280|>
+<|visual token 059281|>
+<|visual token 059282|>
+<|visual token 059283|>
+<|visual token 059284|>
+<|visual token 059285|>
+<|visual token 059286|>
+<|visual token 059287|>
+<|visual token 059288|>
+<|visual token 059289|>
+<|visual token 059290|>
+<|visual token 059291|>
+<|visual token 059292|>
+<|visual token 059293|>
+<|visual token 059294|>
+<|visual token 059295|>
+<|visual token 059296|>
+<|visual token 059297|>
+<|visual token 059298|>
+<|visual token 059299|>
+<|visual token 059300|>
+<|visual token 059301|>
+<|visual token 059302|>
+<|visual token 059303|>
+<|visual token 059304|>
+<|visual token 059305|>
+<|visual token 059306|>
+<|visual token 059307|>
+<|visual token 059308|>
+<|visual token 059309|>
+<|visual token 059310|>
+<|visual token 059311|>
+<|visual token 059312|>
+<|visual token 059313|>
+<|visual token 059314|>
+<|visual token 059315|>
+<|visual token 059316|>
+<|visual token 059317|>
+<|visual token 059318|>
+<|visual token 059319|>
+<|visual token 059320|>
+<|visual token 059321|>
+<|visual token 059322|>
+<|visual token 059323|>
+<|visual token 059324|>
+<|visual token 059325|>
+<|visual token 059326|>
+<|visual token 059327|>
+<|visual token 059328|>
+<|visual token 059329|>
+<|visual token 059330|>
+<|visual token 059331|>
+<|visual token 059332|>
+<|visual token 059333|>
+<|visual token 059334|>
+<|visual token 059335|>
+<|visual token 059336|>
+<|visual token 059337|>
+<|visual token 059338|>
+<|visual token 059339|>
+<|visual token 059340|>
+<|visual token 059341|>
+<|visual token 059342|>
+<|visual token 059343|>
+<|visual token 059344|>
+<|visual token 059345|>
+<|visual token 059346|>
+<|visual token 059347|>
+<|visual token 059348|>
+<|visual token 059349|>
+<|visual token 059350|>
+<|visual token 059351|>
+<|visual token 059352|>
+<|visual token 059353|>
+<|visual token 059354|>
+<|visual token 059355|>
+<|visual token 059356|>
+<|visual token 059357|>
+<|visual token 059358|>
+<|visual token 059359|>
+<|visual token 059360|>
+<|visual token 059361|>
+<|visual token 059362|>
+<|visual token 059363|>
+<|visual token 059364|>
+<|visual token 059365|>
+<|visual token 059366|>
+<|visual token 059367|>
+<|visual token 059368|>
+<|visual token 059369|>
+<|visual token 059370|>
+<|visual token 059371|>
+<|visual token 059372|>
+<|visual token 059373|>
+<|visual token 059374|>
+<|visual token 059375|>
+<|visual token 059376|>
+<|visual token 059377|>
+<|visual token 059378|>
+<|visual token 059379|>
+<|visual token 059380|>
+<|visual token 059381|>
+<|visual token 059382|>
+<|visual token 059383|>
+<|visual token 059384|>
+<|visual token 059385|>
+<|visual token 059386|>
+<|visual token 059387|>
+<|visual token 059388|>
+<|visual token 059389|>
+<|visual token 059390|>
+<|visual token 059391|>
+<|visual token 059392|>
+<|visual token 059393|>
+<|visual token 059394|>
+<|visual token 059395|>
+<|visual token 059396|>
+<|visual token 059397|>
+<|visual token 059398|>
+<|visual token 059399|>
+<|visual token 059400|>
+<|visual token 059401|>
+<|visual token 059402|>
+<|visual token 059403|>
+<|visual token 059404|>
+<|visual token 059405|>
+<|visual token 059406|>
+<|visual token 059407|>
+<|visual token 059408|>
+<|visual token 059409|>
+<|visual token 059410|>
+<|visual token 059411|>
+<|visual token 059412|>
+<|visual token 059413|>
+<|visual token 059414|>
+<|visual token 059415|>
+<|visual token 059416|>
+<|visual token 059417|>
+<|visual token 059418|>
+<|visual token 059419|>
+<|visual token 059420|>
+<|visual token 059421|>
+<|visual token 059422|>
+<|visual token 059423|>
+<|visual token 059424|>
+<|visual token 059425|>
+<|visual token 059426|>
+<|visual token 059427|>
+<|visual token 059428|>
+<|visual token 059429|>
+<|visual token 059430|>
+<|visual token 059431|>
+<|visual token 059432|>
+<|visual token 059433|>
+<|visual token 059434|>
+<|visual token 059435|>
+<|visual token 059436|>
+<|visual token 059437|>
+<|visual token 059438|>
+<|visual token 059439|>
+<|visual token 059440|>
+<|visual token 059441|>
+<|visual token 059442|>
+<|visual token 059443|>
+<|visual token 059444|>
+<|visual token 059445|>
+<|visual token 059446|>
+<|visual token 059447|>
+<|visual token 059448|>
+<|visual token 059449|>
+<|visual token 059450|>
+<|visual token 059451|>
+<|visual token 059452|>
+<|visual token 059453|>
+<|visual token 059454|>
+<|visual token 059455|>
+<|visual token 059456|>
+<|visual token 059457|>
+<|visual token 059458|>
+<|visual token 059459|>
+<|visual token 059460|>
+<|visual token 059461|>
+<|visual token 059462|>
+<|visual token 059463|>
+<|visual token 059464|>
+<|visual token 059465|>
+<|visual token 059466|>
+<|visual token 059467|>
+<|visual token 059468|>
+<|visual token 059469|>
+<|visual token 059470|>
+<|visual token 059471|>
+<|visual token 059472|>
+<|visual token 059473|>
+<|visual token 059474|>
+<|visual token 059475|>
+<|visual token 059476|>
+<|visual token 059477|>
+<|visual token 059478|>
+<|visual token 059479|>
+<|visual token 059480|>
+<|visual token 059481|>
+<|visual token 059482|>
+<|visual token 059483|>
+<|visual token 059484|>
+<|visual token 059485|>
+<|visual token 059486|>
+<|visual token 059487|>
+<|visual token 059488|>
+<|visual token 059489|>
+<|visual token 059490|>
+<|visual token 059491|>
+<|visual token 059492|>
+<|visual token 059493|>
+<|visual token 059494|>
+<|visual token 059495|>
+<|visual token 059496|>
+<|visual token 059497|>
+<|visual token 059498|>
+<|visual token 059499|>
+<|visual token 059500|>
+<|visual token 059501|>
+<|visual token 059502|>
+<|visual token 059503|>
+<|visual token 059504|>
+<|visual token 059505|>
+<|visual token 059506|>
+<|visual token 059507|>
+<|visual token 059508|>
+<|visual token 059509|>
+<|visual token 059510|>
+<|visual token 059511|>
+<|visual token 059512|>
+<|visual token 059513|>
+<|visual token 059514|>
+<|visual token 059515|>
+<|visual token 059516|>
+<|visual token 059517|>
+<|visual token 059518|>
+<|visual token 059519|>
+<|visual token 059520|>
+<|visual token 059521|>
+<|visual token 059522|>
+<|visual token 059523|>
+<|visual token 059524|>
+<|visual token 059525|>
+<|visual token 059526|>
+<|visual token 059527|>
+<|visual token 059528|>
+<|visual token 059529|>
+<|visual token 059530|>
+<|visual token 059531|>
+<|visual token 059532|>
+<|visual token 059533|>
+<|visual token 059534|>
+<|visual token 059535|>
+<|visual token 059536|>
+<|visual token 059537|>
+<|visual token 059538|>
+<|visual token 059539|>
+<|visual token 059540|>
+<|visual token 059541|>
+<|visual token 059542|>
+<|visual token 059543|>
+<|visual token 059544|>
+<|visual token 059545|>
+<|visual token 059546|>
+<|visual token 059547|>
+<|visual token 059548|>
+<|visual token 059549|>
+<|visual token 059550|>
+<|visual token 059551|>
+<|visual token 059552|>
+<|visual token 059553|>
+<|visual token 059554|>
+<|visual token 059555|>
+<|visual token 059556|>
+<|visual token 059557|>
+<|visual token 059558|>
+<|visual token 059559|>
+<|visual token 059560|>
+<|visual token 059561|>
+<|visual token 059562|>
+<|visual token 059563|>
+<|visual token 059564|>
+<|visual token 059565|>
+<|visual token 059566|>
+<|visual token 059567|>
+<|visual token 059568|>
+<|visual token 059569|>
+<|visual token 059570|>
+<|visual token 059571|>
+<|visual token 059572|>
+<|visual token 059573|>
+<|visual token 059574|>
+<|visual token 059575|>
+<|visual token 059576|>
+<|visual token 059577|>
+<|visual token 059578|>
+<|visual token 059579|>
+<|visual token 059580|>
+<|visual token 059581|>
+<|visual token 059582|>
+<|visual token 059583|>
+<|visual token 059584|>
+<|visual token 059585|>
+<|visual token 059586|>
+<|visual token 059587|>
+<|visual token 059588|>
+<|visual token 059589|>
+<|visual token 059590|>
+<|visual token 059591|>
+<|visual token 059592|>
+<|visual token 059593|>
+<|visual token 059594|>
+<|visual token 059595|>
+<|visual token 059596|>
+<|visual token 059597|>
+<|visual token 059598|>
+<|visual token 059599|>
+<|visual token 059600|>
+<|visual token 059601|>
+<|visual token 059602|>
+<|visual token 059603|>
+<|visual token 059604|>
+<|visual token 059605|>
+<|visual token 059606|>
+<|visual token 059607|>
+<|visual token 059608|>
+<|visual token 059609|>
+<|visual token 059610|>
+<|visual token 059611|>
+<|visual token 059612|>
+<|visual token 059613|>
+<|visual token 059614|>
+<|visual token 059615|>
+<|visual token 059616|>
+<|visual token 059617|>
+<|visual token 059618|>
+<|visual token 059619|>
+<|visual token 059620|>
+<|visual token 059621|>
+<|visual token 059622|>
+<|visual token 059623|>
+<|visual token 059624|>
+<|visual token 059625|>
+<|visual token 059626|>
+<|visual token 059627|>
+<|visual token 059628|>
+<|visual token 059629|>
+<|visual token 059630|>
+<|visual token 059631|>
+<|visual token 059632|>
+<|visual token 059633|>
+<|visual token 059634|>
+<|visual token 059635|>
+<|visual token 059636|>
+<|visual token 059637|>
+<|visual token 059638|>
+<|visual token 059639|>
+<|visual token 059640|>
+<|visual token 059641|>
+<|visual token 059642|>
+<|visual token 059643|>
+<|visual token 059644|>
+<|visual token 059645|>
+<|visual token 059646|>
+<|visual token 059647|>
+<|visual token 059648|>
+<|visual token 059649|>
+<|visual token 059650|>
+<|visual token 059651|>
+<|visual token 059652|>
+<|visual token 059653|>
+<|visual token 059654|>
+<|visual token 059655|>
+<|visual token 059656|>
+<|visual token 059657|>
+<|visual token 059658|>
+<|visual token 059659|>
+<|visual token 059660|>
+<|visual token 059661|>
+<|visual token 059662|>
+<|visual token 059663|>
+<|visual token 059664|>
+<|visual token 059665|>
+<|visual token 059666|>
+<|visual token 059667|>
+<|visual token 059668|>
+<|visual token 059669|>
+<|visual token 059670|>
+<|visual token 059671|>
+<|visual token 059672|>
+<|visual token 059673|>
+<|visual token 059674|>
+<|visual token 059675|>
+<|visual token 059676|>
+<|visual token 059677|>
+<|visual token 059678|>
+<|visual token 059679|>
+<|visual token 059680|>
+<|visual token 059681|>
+<|visual token 059682|>
+<|visual token 059683|>
+<|visual token 059684|>
+<|visual token 059685|>
+<|visual token 059686|>
+<|visual token 059687|>
+<|visual token 059688|>
+<|visual token 059689|>
+<|visual token 059690|>
+<|visual token 059691|>
+<|visual token 059692|>
+<|visual token 059693|>
+<|visual token 059694|>
+<|visual token 059695|>
+<|visual token 059696|>
+<|visual token 059697|>
+<|visual token 059698|>
+<|visual token 059699|>
+<|visual token 059700|>
+<|visual token 059701|>
+<|visual token 059702|>
+<|visual token 059703|>
+<|visual token 059704|>
+<|visual token 059705|>
+<|visual token 059706|>
+<|visual token 059707|>
+<|visual token 059708|>
+<|visual token 059709|>
+<|visual token 059710|>
+<|visual token 059711|>
+<|visual token 059712|>
+<|visual token 059713|>
+<|visual token 059714|>
+<|visual token 059715|>
+<|visual token 059716|>
+<|visual token 059717|>
+<|visual token 059718|>
+<|visual token 059719|>
+<|visual token 059720|>
+<|visual token 059721|>
+<|visual token 059722|>
+<|visual token 059723|>
+<|visual token 059724|>
+<|visual token 059725|>
+<|visual token 059726|>
+<|visual token 059727|>
+<|visual token 059728|>
+<|visual token 059729|>
+<|visual token 059730|>
+<|visual token 059731|>
+<|visual token 059732|>
+<|visual token 059733|>
+<|visual token 059734|>
+<|visual token 059735|>
+<|visual token 059736|>
+<|visual token 059737|>
+<|visual token 059738|>
+<|visual token 059739|>
+<|visual token 059740|>
+<|visual token 059741|>
+<|visual token 059742|>
+<|visual token 059743|>
+<|visual token 059744|>
+<|visual token 059745|>
+<|visual token 059746|>
+<|visual token 059747|>
+<|visual token 059748|>
+<|visual token 059749|>
+<|visual token 059750|>
+<|visual token 059751|>
+<|visual token 059752|>
+<|visual token 059753|>
+<|visual token 059754|>
+<|visual token 059755|>
+<|visual token 059756|>
+<|visual token 059757|>
+<|visual token 059758|>
+<|visual token 059759|>
+<|visual token 059760|>
+<|visual token 059761|>
+<|visual token 059762|>
+<|visual token 059763|>
+<|visual token 059764|>
+<|visual token 059765|>
+<|visual token 059766|>
+<|visual token 059767|>
+<|visual token 059768|>
+<|visual token 059769|>
+<|visual token 059770|>
+<|visual token 059771|>
+<|visual token 059772|>
+<|visual token 059773|>
+<|visual token 059774|>
+<|visual token 059775|>
+<|visual token 059776|>
+<|visual token 059777|>
+<|visual token 059778|>
+<|visual token 059779|>
+<|visual token 059780|>
+<|visual token 059781|>
+<|visual token 059782|>
+<|visual token 059783|>
+<|visual token 059784|>
+<|visual token 059785|>
+<|visual token 059786|>
+<|visual token 059787|>
+<|visual token 059788|>
+<|visual token 059789|>
+<|visual token 059790|>
+<|visual token 059791|>
+<|visual token 059792|>
+<|visual token 059793|>
+<|visual token 059794|>
+<|visual token 059795|>
+<|visual token 059796|>
+<|visual token 059797|>
+<|visual token 059798|>
+<|visual token 059799|>
+<|visual token 059800|>
+<|visual token 059801|>
+<|visual token 059802|>
+<|visual token 059803|>
+<|visual token 059804|>
+<|visual token 059805|>
+<|visual token 059806|>
+<|visual token 059807|>
+<|visual token 059808|>
+<|visual token 059809|>
+<|visual token 059810|>
+<|visual token 059811|>
+<|visual token 059812|>
+<|visual token 059813|>
+<|visual token 059814|>
+<|visual token 059815|>
+<|visual token 059816|>
+<|visual token 059817|>
+<|visual token 059818|>
+<|visual token 059819|>
+<|visual token 059820|>
+<|visual token 059821|>
+<|visual token 059822|>
+<|visual token 059823|>
+<|visual token 059824|>
+<|visual token 059825|>
+<|visual token 059826|>
+<|visual token 059827|>
+<|visual token 059828|>
+<|visual token 059829|>
+<|visual token 059830|>
+<|visual token 059831|>
+<|visual token 059832|>
+<|visual token 059833|>
+<|visual token 059834|>
+<|visual token 059835|>
+<|visual token 059836|>
+<|visual token 059837|>
+<|visual token 059838|>
+<|visual token 059839|>
+<|visual token 059840|>
+<|visual token 059841|>
+<|visual token 059842|>
+<|visual token 059843|>
+<|visual token 059844|>
+<|visual token 059845|>
+<|visual token 059846|>
+<|visual token 059847|>
+<|visual token 059848|>
+<|visual token 059849|>
+<|visual token 059850|>
+<|visual token 059851|>
+<|visual token 059852|>
+<|visual token 059853|>
+<|visual token 059854|>
+<|visual token 059855|>
+<|visual token 059856|>
+<|visual token 059857|>
+<|visual token 059858|>
+<|visual token 059859|>
+<|visual token 059860|>
+<|visual token 059861|>
+<|visual token 059862|>
+<|visual token 059863|>
+<|visual token 059864|>
+<|visual token 059865|>
+<|visual token 059866|>
+<|visual token 059867|>
+<|visual token 059868|>
+<|visual token 059869|>
+<|visual token 059870|>
+<|visual token 059871|>
+<|visual token 059872|>
+<|visual token 059873|>
+<|visual token 059874|>
+<|visual token 059875|>
+<|visual token 059876|>
+<|visual token 059877|>
+<|visual token 059878|>
+<|visual token 059879|>
+<|visual token 059880|>
+<|visual token 059881|>
+<|visual token 059882|>
+<|visual token 059883|>
+<|visual token 059884|>
+<|visual token 059885|>
+<|visual token 059886|>
+<|visual token 059887|>
+<|visual token 059888|>
+<|visual token 059889|>
+<|visual token 059890|>
+<|visual token 059891|>
+<|visual token 059892|>
+<|visual token 059893|>
+<|visual token 059894|>
+<|visual token 059895|>
+<|visual token 059896|>
+<|visual token 059897|>
+<|visual token 059898|>
+<|visual token 059899|>
+<|visual token 059900|>
+<|visual token 059901|>
+<|visual token 059902|>
+<|visual token 059903|>
+<|visual token 059904|>
+<|visual token 059905|>
+<|visual token 059906|>
+<|visual token 059907|>
+<|visual token 059908|>
+<|visual token 059909|>
+<|visual token 059910|>
+<|visual token 059911|>
+<|visual token 059912|>
+<|visual token 059913|>
+<|visual token 059914|>
+<|visual token 059915|>
+<|visual token 059916|>
+<|visual token 059917|>
+<|visual token 059918|>
+<|visual token 059919|>
+<|visual token 059920|>
+<|visual token 059921|>
+<|visual token 059922|>
+<|visual token 059923|>
+<|visual token 059924|>
+<|visual token 059925|>
+<|visual token 059926|>
+<|visual token 059927|>
+<|visual token 059928|>
+<|visual token 059929|>
+<|visual token 059930|>
+<|visual token 059931|>
+<|visual token 059932|>
+<|visual token 059933|>
+<|visual token 059934|>
+<|visual token 059935|>
+<|visual token 059936|>
+<|visual token 059937|>
+<|visual token 059938|>
+<|visual token 059939|>
+<|visual token 059940|>
+<|visual token 059941|>
+<|visual token 059942|>
+<|visual token 059943|>
+<|visual token 059944|>
+<|visual token 059945|>
+<|visual token 059946|>
+<|visual token 059947|>
+<|visual token 059948|>
+<|visual token 059949|>
+<|visual token 059950|>
+<|visual token 059951|>
+<|visual token 059952|>
+<|visual token 059953|>
+<|visual token 059954|>
+<|visual token 059955|>
+<|visual token 059956|>
+<|visual token 059957|>
+<|visual token 059958|>
+<|visual token 059959|>
+<|visual token 059960|>
+<|visual token 059961|>
+<|visual token 059962|>
+<|visual token 059963|>
+<|visual token 059964|>
+<|visual token 059965|>
+<|visual token 059966|>
+<|visual token 059967|>
+<|visual token 059968|>
+<|visual token 059969|>
+<|visual token 059970|>
+<|visual token 059971|>
+<|visual token 059972|>
+<|visual token 059973|>
+<|visual token 059974|>
+<|visual token 059975|>
+<|visual token 059976|>
+<|visual token 059977|>
+<|visual token 059978|>
+<|visual token 059979|>
+<|visual token 059980|>
+<|visual token 059981|>
+<|visual token 059982|>
+<|visual token 059983|>
+<|visual token 059984|>
+<|visual token 059985|>
+<|visual token 059986|>
+<|visual token 059987|>
+<|visual token 059988|>
+<|visual token 059989|>
+<|visual token 059990|>
+<|visual token 059991|>
+<|visual token 059992|>
+<|visual token 059993|>
+<|visual token 059994|>
+<|visual token 059995|>
+<|visual token 059996|>
+<|visual token 059997|>
+<|visual token 059998|>
+<|visual token 059999|>
+<|visual token 060000|>
+<|visual token 060001|>
+<|visual token 060002|>
+<|visual token 060003|>
+<|visual token 060004|>
+<|visual token 060005|>
+<|visual token 060006|>
+<|visual token 060007|>
+<|visual token 060008|>
+<|visual token 060009|>
+<|visual token 060010|>
+<|visual token 060011|>
+<|visual token 060012|>
+<|visual token 060013|>
+<|visual token 060014|>
+<|visual token 060015|>
+<|visual token 060016|>
+<|visual token 060017|>
+<|visual token 060018|>
+<|visual token 060019|>
+<|visual token 060020|>
+<|visual token 060021|>
+<|visual token 060022|>
+<|visual token 060023|>
+<|visual token 060024|>
+<|visual token 060025|>
+<|visual token 060026|>
+<|visual token 060027|>
+<|visual token 060028|>
+<|visual token 060029|>
+<|visual token 060030|>
+<|visual token 060031|>
+<|visual token 060032|>
+<|visual token 060033|>
+<|visual token 060034|>
+<|visual token 060035|>
+<|visual token 060036|>
+<|visual token 060037|>
+<|visual token 060038|>
+<|visual token 060039|>
+<|visual token 060040|>
+<|visual token 060041|>
+<|visual token 060042|>
+<|visual token 060043|>
+<|visual token 060044|>
+<|visual token 060045|>
+<|visual token 060046|>
+<|visual token 060047|>
+<|visual token 060048|>
+<|visual token 060049|>
+<|visual token 060050|>
+<|visual token 060051|>
+<|visual token 060052|>
+<|visual token 060053|>
+<|visual token 060054|>
+<|visual token 060055|>
+<|visual token 060056|>
+<|visual token 060057|>
+<|visual token 060058|>
+<|visual token 060059|>
+<|visual token 060060|>
+<|visual token 060061|>
+<|visual token 060062|>
+<|visual token 060063|>
+<|visual token 060064|>
+<|visual token 060065|>
+<|visual token 060066|>
+<|visual token 060067|>
+<|visual token 060068|>
+<|visual token 060069|>
+<|visual token 060070|>
+<|visual token 060071|>
+<|visual token 060072|>
+<|visual token 060073|>
+<|visual token 060074|>
+<|visual token 060075|>
+<|visual token 060076|>
+<|visual token 060077|>
+<|visual token 060078|>
+<|visual token 060079|>
+<|visual token 060080|>
+<|visual token 060081|>
+<|visual token 060082|>
+<|visual token 060083|>
+<|visual token 060084|>
+<|visual token 060085|>
+<|visual token 060086|>
+<|visual token 060087|>
+<|visual token 060088|>
+<|visual token 060089|>
+<|visual token 060090|>
+<|visual token 060091|>
+<|visual token 060092|>
+<|visual token 060093|>
+<|visual token 060094|>
+<|visual token 060095|>
+<|visual token 060096|>
+<|visual token 060097|>
+<|visual token 060098|>
+<|visual token 060099|>
+<|visual token 060100|>
+<|visual token 060101|>
+<|visual token 060102|>
+<|visual token 060103|>
+<|visual token 060104|>
+<|visual token 060105|>
+<|visual token 060106|>
+<|visual token 060107|>
+<|visual token 060108|>
+<|visual token 060109|>
+<|visual token 060110|>
+<|visual token 060111|>
+<|visual token 060112|>
+<|visual token 060113|>
+<|visual token 060114|>
+<|visual token 060115|>
+<|visual token 060116|>
+<|visual token 060117|>
+<|visual token 060118|>
+<|visual token 060119|>
+<|visual token 060120|>
+<|visual token 060121|>
+<|visual token 060122|>
+<|visual token 060123|>
+<|visual token 060124|>
+<|visual token 060125|>
+<|visual token 060126|>
+<|visual token 060127|>
+<|visual token 060128|>
+<|visual token 060129|>
+<|visual token 060130|>
+<|visual token 060131|>
+<|visual token 060132|>
+<|visual token 060133|>
+<|visual token 060134|>
+<|visual token 060135|>
+<|visual token 060136|>
+<|visual token 060137|>
+<|visual token 060138|>
+<|visual token 060139|>
+<|visual token 060140|>
+<|visual token 060141|>
+<|visual token 060142|>
+<|visual token 060143|>
+<|visual token 060144|>
+<|visual token 060145|>
+<|visual token 060146|>
+<|visual token 060147|>
+<|visual token 060148|>
+<|visual token 060149|>
+<|visual token 060150|>
+<|visual token 060151|>
+<|visual token 060152|>
+<|visual token 060153|>
+<|visual token 060154|>
+<|visual token 060155|>
+<|visual token 060156|>
+<|visual token 060157|>
+<|visual token 060158|>
+<|visual token 060159|>
+<|visual token 060160|>
+<|visual token 060161|>
+<|visual token 060162|>
+<|visual token 060163|>
+<|visual token 060164|>
+<|visual token 060165|>
+<|visual token 060166|>
+<|visual token 060167|>
+<|visual token 060168|>
+<|visual token 060169|>
+<|visual token 060170|>
+<|visual token 060171|>
+<|visual token 060172|>
+<|visual token 060173|>
+<|visual token 060174|>
+<|visual token 060175|>
+<|visual token 060176|>
+<|visual token 060177|>
+<|visual token 060178|>
+<|visual token 060179|>
+<|visual token 060180|>
+<|visual token 060181|>
+<|visual token 060182|>
+<|visual token 060183|>
+<|visual token 060184|>
+<|visual token 060185|>
+<|visual token 060186|>
+<|visual token 060187|>
+<|visual token 060188|>
+<|visual token 060189|>
+<|visual token 060190|>
+<|visual token 060191|>
+<|visual token 060192|>
+<|visual token 060193|>
+<|visual token 060194|>
+<|visual token 060195|>
+<|visual token 060196|>
+<|visual token 060197|>
+<|visual token 060198|>
+<|visual token 060199|>
+<|visual token 060200|>
+<|visual token 060201|>
+<|visual token 060202|>
+<|visual token 060203|>
+<|visual token 060204|>
+<|visual token 060205|>
+<|visual token 060206|>
+<|visual token 060207|>
+<|visual token 060208|>
+<|visual token 060209|>
+<|visual token 060210|>
+<|visual token 060211|>
+<|visual token 060212|>
+<|visual token 060213|>
+<|visual token 060214|>
+<|visual token 060215|>
+<|visual token 060216|>
+<|visual token 060217|>
+<|visual token 060218|>
+<|visual token 060219|>
+<|visual token 060220|>
+<|visual token 060221|>
+<|visual token 060222|>
+<|visual token 060223|>
+<|visual token 060224|>
+<|visual token 060225|>
+<|visual token 060226|>
+<|visual token 060227|>
+<|visual token 060228|>
+<|visual token 060229|>
+<|visual token 060230|>
+<|visual token 060231|>
+<|visual token 060232|>
+<|visual token 060233|>
+<|visual token 060234|>
+<|visual token 060235|>
+<|visual token 060236|>
+<|visual token 060237|>
+<|visual token 060238|>
+<|visual token 060239|>
+<|visual token 060240|>
+<|visual token 060241|>
+<|visual token 060242|>
+<|visual token 060243|>
+<|visual token 060244|>
+<|visual token 060245|>
+<|visual token 060246|>
+<|visual token 060247|>
+<|visual token 060248|>
+<|visual token 060249|>
+<|visual token 060250|>
+<|visual token 060251|>
+<|visual token 060252|>
+<|visual token 060253|>
+<|visual token 060254|>
+<|visual token 060255|>
+<|visual token 060256|>
+<|visual token 060257|>
+<|visual token 060258|>
+<|visual token 060259|>
+<|visual token 060260|>
+<|visual token 060261|>
+<|visual token 060262|>
+<|visual token 060263|>
+<|visual token 060264|>
+<|visual token 060265|>
+<|visual token 060266|>
+<|visual token 060267|>
+<|visual token 060268|>
+<|visual token 060269|>
+<|visual token 060270|>
+<|visual token 060271|>
+<|visual token 060272|>
+<|visual token 060273|>
+<|visual token 060274|>
+<|visual token 060275|>
+<|visual token 060276|>
+<|visual token 060277|>
+<|visual token 060278|>
+<|visual token 060279|>
+<|visual token 060280|>
+<|visual token 060281|>
+<|visual token 060282|>
+<|visual token 060283|>
+<|visual token 060284|>
+<|visual token 060285|>
+<|visual token 060286|>
+<|visual token 060287|>
+<|visual token 060288|>
+<|visual token 060289|>
+<|visual token 060290|>
+<|visual token 060291|>
+<|visual token 060292|>
+<|visual token 060293|>
+<|visual token 060294|>
+<|visual token 060295|>
+<|visual token 060296|>
+<|visual token 060297|>
+<|visual token 060298|>
+<|visual token 060299|>
+<|visual token 060300|>
+<|visual token 060301|>
+<|visual token 060302|>
+<|visual token 060303|>
+<|visual token 060304|>
+<|visual token 060305|>
+<|visual token 060306|>
+<|visual token 060307|>
+<|visual token 060308|>
+<|visual token 060309|>
+<|visual token 060310|>
+<|visual token 060311|>
+<|visual token 060312|>
+<|visual token 060313|>
+<|visual token 060314|>
+<|visual token 060315|>
+<|visual token 060316|>
+<|visual token 060317|>
+<|visual token 060318|>
+<|visual token 060319|>
+<|visual token 060320|>
+<|visual token 060321|>
+<|visual token 060322|>
+<|visual token 060323|>
+<|visual token 060324|>
+<|visual token 060325|>
+<|visual token 060326|>
+<|visual token 060327|>
+<|visual token 060328|>
+<|visual token 060329|>
+<|visual token 060330|>
+<|visual token 060331|>
+<|visual token 060332|>
+<|visual token 060333|>
+<|visual token 060334|>
+<|visual token 060335|>
+<|visual token 060336|>
+<|visual token 060337|>
+<|visual token 060338|>
+<|visual token 060339|>
+<|visual token 060340|>
+<|visual token 060341|>
+<|visual token 060342|>
+<|visual token 060343|>
+<|visual token 060344|>
+<|visual token 060345|>
+<|visual token 060346|>
+<|visual token 060347|>
+<|visual token 060348|>
+<|visual token 060349|>
+<|visual token 060350|>
+<|visual token 060351|>
+<|visual token 060352|>
+<|visual token 060353|>
+<|visual token 060354|>
+<|visual token 060355|>
+<|visual token 060356|>
+<|visual token 060357|>
+<|visual token 060358|>
+<|visual token 060359|>
+<|visual token 060360|>
+<|visual token 060361|>
+<|visual token 060362|>
+<|visual token 060363|>
+<|visual token 060364|>
+<|visual token 060365|>
+<|visual token 060366|>
+<|visual token 060367|>
+<|visual token 060368|>
+<|visual token 060369|>
+<|visual token 060370|>
+<|visual token 060371|>
+<|visual token 060372|>
+<|visual token 060373|>
+<|visual token 060374|>
+<|visual token 060375|>
+<|visual token 060376|>
+<|visual token 060377|>
+<|visual token 060378|>
+<|visual token 060379|>
+<|visual token 060380|>
+<|visual token 060381|>
+<|visual token 060382|>
+<|visual token 060383|>
+<|visual token 060384|>
+<|visual token 060385|>
+<|visual token 060386|>
+<|visual token 060387|>
+<|visual token 060388|>
+<|visual token 060389|>
+<|visual token 060390|>
+<|visual token 060391|>
+<|visual token 060392|>
+<|visual token 060393|>
+<|visual token 060394|>
+<|visual token 060395|>
+<|visual token 060396|>
+<|visual token 060397|>
+<|visual token 060398|>
+<|visual token 060399|>
+<|visual token 060400|>
+<|visual token 060401|>
+<|visual token 060402|>
+<|visual token 060403|>
+<|visual token 060404|>
+<|visual token 060405|>
+<|visual token 060406|>
+<|visual token 060407|>
+<|visual token 060408|>
+<|visual token 060409|>
+<|visual token 060410|>
+<|visual token 060411|>
+<|visual token 060412|>
+<|visual token 060413|>
+<|visual token 060414|>
+<|visual token 060415|>
+<|visual token 060416|>
+<|visual token 060417|>
+<|visual token 060418|>
+<|visual token 060419|>
+<|visual token 060420|>
+<|visual token 060421|>
+<|visual token 060422|>
+<|visual token 060423|>
+<|visual token 060424|>
+<|visual token 060425|>
+<|visual token 060426|>
+<|visual token 060427|>
+<|visual token 060428|>
+<|visual token 060429|>
+<|visual token 060430|>
+<|visual token 060431|>
+<|visual token 060432|>
+<|visual token 060433|>
+<|visual token 060434|>
+<|visual token 060435|>
+<|visual token 060436|>
+<|visual token 060437|>
+<|visual token 060438|>
+<|visual token 060439|>
+<|visual token 060440|>
+<|visual token 060441|>
+<|visual token 060442|>
+<|visual token 060443|>
+<|visual token 060444|>
+<|visual token 060445|>
+<|visual token 060446|>
+<|visual token 060447|>
+<|visual token 060448|>
+<|visual token 060449|>
+<|visual token 060450|>
+<|visual token 060451|>
+<|visual token 060452|>
+<|visual token 060453|>
+<|visual token 060454|>
+<|visual token 060455|>
+<|visual token 060456|>
+<|visual token 060457|>
+<|visual token 060458|>
+<|visual token 060459|>
+<|visual token 060460|>
+<|visual token 060461|>
+<|visual token 060462|>
+<|visual token 060463|>
+<|visual token 060464|>
+<|visual token 060465|>
+<|visual token 060466|>
+<|visual token 060467|>
+<|visual token 060468|>
+<|visual token 060469|>
+<|visual token 060470|>
+<|visual token 060471|>
+<|visual token 060472|>
+<|visual token 060473|>
+<|visual token 060474|>
+<|visual token 060475|>
+<|visual token 060476|>
+<|visual token 060477|>
+<|visual token 060478|>
+<|visual token 060479|>
+<|visual token 060480|>
+<|visual token 060481|>
+<|visual token 060482|>
+<|visual token 060483|>
+<|visual token 060484|>
+<|visual token 060485|>
+<|visual token 060486|>
+<|visual token 060487|>
+<|visual token 060488|>
+<|visual token 060489|>
+<|visual token 060490|>
+<|visual token 060491|>
+<|visual token 060492|>
+<|visual token 060493|>
+<|visual token 060494|>
+<|visual token 060495|>
+<|visual token 060496|>
+<|visual token 060497|>
+<|visual token 060498|>
+<|visual token 060499|>
+<|visual token 060500|>
+<|visual token 060501|>
+<|visual token 060502|>
+<|visual token 060503|>
+<|visual token 060504|>
+<|visual token 060505|>
+<|visual token 060506|>
+<|visual token 060507|>
+<|visual token 060508|>
+<|visual token 060509|>
+<|visual token 060510|>
+<|visual token 060511|>
+<|visual token 060512|>
+<|visual token 060513|>
+<|visual token 060514|>
+<|visual token 060515|>
+<|visual token 060516|>
+<|visual token 060517|>
+<|visual token 060518|>
+<|visual token 060519|>
+<|visual token 060520|>
+<|visual token 060521|>
+<|visual token 060522|>
+<|visual token 060523|>
+<|visual token 060524|>
+<|visual token 060525|>
+<|visual token 060526|>
+<|visual token 060527|>
+<|visual token 060528|>
+<|visual token 060529|>
+<|visual token 060530|>
+<|visual token 060531|>
+<|visual token 060532|>
+<|visual token 060533|>
+<|visual token 060534|>
+<|visual token 060535|>
+<|visual token 060536|>
+<|visual token 060537|>
+<|visual token 060538|>
+<|visual token 060539|>
+<|visual token 060540|>
+<|visual token 060541|>
+<|visual token 060542|>
+<|visual token 060543|>
+<|visual token 060544|>
+<|visual token 060545|>
+<|visual token 060546|>
+<|visual token 060547|>
+<|visual token 060548|>
+<|visual token 060549|>
+<|visual token 060550|>
+<|visual token 060551|>
+<|visual token 060552|>
+<|visual token 060553|>
+<|visual token 060554|>
+<|visual token 060555|>
+<|visual token 060556|>
+<|visual token 060557|>
+<|visual token 060558|>
+<|visual token 060559|>
+<|visual token 060560|>
+<|visual token 060561|>
+<|visual token 060562|>
+<|visual token 060563|>
+<|visual token 060564|>
+<|visual token 060565|>
+<|visual token 060566|>
+<|visual token 060567|>
+<|visual token 060568|>
+<|visual token 060569|>
+<|visual token 060570|>
+<|visual token 060571|>
+<|visual token 060572|>
+<|visual token 060573|>
+<|visual token 060574|>
+<|visual token 060575|>
+<|visual token 060576|>
+<|visual token 060577|>
+<|visual token 060578|>
+<|visual token 060579|>
+<|visual token 060580|>
+<|visual token 060581|>
+<|visual token 060582|>
+<|visual token 060583|>
+<|visual token 060584|>
+<|visual token 060585|>
+<|visual token 060586|>
+<|visual token 060587|>
+<|visual token 060588|>
+<|visual token 060589|>
+<|visual token 060590|>
+<|visual token 060591|>
+<|visual token 060592|>
+<|visual token 060593|>
+<|visual token 060594|>
+<|visual token 060595|>
+<|visual token 060596|>
+<|visual token 060597|>
+<|visual token 060598|>
+<|visual token 060599|>
+<|visual token 060600|>
+<|visual token 060601|>
+<|visual token 060602|>
+<|visual token 060603|>
+<|visual token 060604|>
+<|visual token 060605|>
+<|visual token 060606|>
+<|visual token 060607|>
+<|visual token 060608|>
+<|visual token 060609|>
+<|visual token 060610|>
+<|visual token 060611|>
+<|visual token 060612|>
+<|visual token 060613|>
+<|visual token 060614|>
+<|visual token 060615|>
+<|visual token 060616|>
+<|visual token 060617|>
+<|visual token 060618|>
+<|visual token 060619|>
+<|visual token 060620|>
+<|visual token 060621|>
+<|visual token 060622|>
+<|visual token 060623|>
+<|visual token 060624|>
+<|visual token 060625|>
+<|visual token 060626|>
+<|visual token 060627|>
+<|visual token 060628|>
+<|visual token 060629|>
+<|visual token 060630|>
+<|visual token 060631|>
+<|visual token 060632|>
+<|visual token 060633|>
+<|visual token 060634|>
+<|visual token 060635|>
+<|visual token 060636|>
+<|visual token 060637|>
+<|visual token 060638|>
+<|visual token 060639|>
+<|visual token 060640|>
+<|visual token 060641|>
+<|visual token 060642|>
+<|visual token 060643|>
+<|visual token 060644|>
+<|visual token 060645|>
+<|visual token 060646|>
+<|visual token 060647|>
+<|visual token 060648|>
+<|visual token 060649|>
+<|visual token 060650|>
+<|visual token 060651|>
+<|visual token 060652|>
+<|visual token 060653|>
+<|visual token 060654|>
+<|visual token 060655|>
+<|visual token 060656|>
+<|visual token 060657|>
+<|visual token 060658|>
+<|visual token 060659|>
+<|visual token 060660|>
+<|visual token 060661|>
+<|visual token 060662|>
+<|visual token 060663|>
+<|visual token 060664|>
+<|visual token 060665|>
+<|visual token 060666|>
+<|visual token 060667|>
+<|visual token 060668|>
+<|visual token 060669|>
+<|visual token 060670|>
+<|visual token 060671|>
+<|visual token 060672|>
+<|visual token 060673|>
+<|visual token 060674|>
+<|visual token 060675|>
+<|visual token 060676|>
+<|visual token 060677|>
+<|visual token 060678|>
+<|visual token 060679|>
+<|visual token 060680|>
+<|visual token 060681|>
+<|visual token 060682|>
+<|visual token 060683|>
+<|visual token 060684|>
+<|visual token 060685|>
+<|visual token 060686|>
+<|visual token 060687|>
+<|visual token 060688|>
+<|visual token 060689|>
+<|visual token 060690|>
+<|visual token 060691|>
+<|visual token 060692|>
+<|visual token 060693|>
+<|visual token 060694|>
+<|visual token 060695|>
+<|visual token 060696|>
+<|visual token 060697|>
+<|visual token 060698|>
+<|visual token 060699|>
+<|visual token 060700|>
+<|visual token 060701|>
+<|visual token 060702|>
+<|visual token 060703|>
+<|visual token 060704|>
+<|visual token 060705|>
+<|visual token 060706|>
+<|visual token 060707|>
+<|visual token 060708|>
+<|visual token 060709|>
+<|visual token 060710|>
+<|visual token 060711|>
+<|visual token 060712|>
+<|visual token 060713|>
+<|visual token 060714|>
+<|visual token 060715|>
+<|visual token 060716|>
+<|visual token 060717|>
+<|visual token 060718|>
+<|visual token 060719|>
+<|visual token 060720|>
+<|visual token 060721|>
+<|visual token 060722|>
+<|visual token 060723|>
+<|visual token 060724|>
+<|visual token 060725|>
+<|visual token 060726|>
+<|visual token 060727|>
+<|visual token 060728|>
+<|visual token 060729|>
+<|visual token 060730|>
+<|visual token 060731|>
+<|visual token 060732|>
+<|visual token 060733|>
+<|visual token 060734|>
+<|visual token 060735|>
+<|visual token 060736|>
+<|visual token 060737|>
+<|visual token 060738|>
+<|visual token 060739|>
+<|visual token 060740|>
+<|visual token 060741|>
+<|visual token 060742|>
+<|visual token 060743|>
+<|visual token 060744|>
+<|visual token 060745|>
+<|visual token 060746|>
+<|visual token 060747|>
+<|visual token 060748|>
+<|visual token 060749|>
+<|visual token 060750|>
+<|visual token 060751|>
+<|visual token 060752|>
+<|visual token 060753|>
+<|visual token 060754|>
+<|visual token 060755|>
+<|visual token 060756|>
+<|visual token 060757|>
+<|visual token 060758|>
+<|visual token 060759|>
+<|visual token 060760|>
+<|visual token 060761|>
+<|visual token 060762|>
+<|visual token 060763|>
+<|visual token 060764|>
+<|visual token 060765|>
+<|visual token 060766|>
+<|visual token 060767|>
+<|visual token 060768|>
+<|visual token 060769|>
+<|visual token 060770|>
+<|visual token 060771|>
+<|visual token 060772|>
+<|visual token 060773|>
+<|visual token 060774|>
+<|visual token 060775|>
+<|visual token 060776|>
+<|visual token 060777|>
+<|visual token 060778|>
+<|visual token 060779|>
+<|visual token 060780|>
+<|visual token 060781|>
+<|visual token 060782|>
+<|visual token 060783|>
+<|visual token 060784|>
+<|visual token 060785|>
+<|visual token 060786|>
+<|visual token 060787|>
+<|visual token 060788|>
+<|visual token 060789|>
+<|visual token 060790|>
+<|visual token 060791|>
+<|visual token 060792|>
+<|visual token 060793|>
+<|visual token 060794|>
+<|visual token 060795|>
+<|visual token 060796|>
+<|visual token 060797|>
+<|visual token 060798|>
+<|visual token 060799|>
+<|visual token 060800|>
+<|visual token 060801|>
+<|visual token 060802|>
+<|visual token 060803|>
+<|visual token 060804|>
+<|visual token 060805|>
+<|visual token 060806|>
+<|visual token 060807|>
+<|visual token 060808|>
+<|visual token 060809|>
+<|visual token 060810|>
+<|visual token 060811|>
+<|visual token 060812|>
+<|visual token 060813|>
+<|visual token 060814|>
+<|visual token 060815|>
+<|visual token 060816|>
+<|visual token 060817|>
+<|visual token 060818|>
+<|visual token 060819|>
+<|visual token 060820|>
+<|visual token 060821|>
+<|visual token 060822|>
+<|visual token 060823|>
+<|visual token 060824|>
+<|visual token 060825|>
+<|visual token 060826|>
+<|visual token 060827|>
+<|visual token 060828|>
+<|visual token 060829|>
+<|visual token 060830|>
+<|visual token 060831|>
+<|visual token 060832|>
+<|visual token 060833|>
+<|visual token 060834|>
+<|visual token 060835|>
+<|visual token 060836|>
+<|visual token 060837|>
+<|visual token 060838|>
+<|visual token 060839|>
+<|visual token 060840|>
+<|visual token 060841|>
+<|visual token 060842|>
+<|visual token 060843|>
+<|visual token 060844|>
+<|visual token 060845|>
+<|visual token 060846|>
+<|visual token 060847|>
+<|visual token 060848|>
+<|visual token 060849|>
+<|visual token 060850|>
+<|visual token 060851|>
+<|visual token 060852|>
+<|visual token 060853|>
+<|visual token 060854|>
+<|visual token 060855|>
+<|visual token 060856|>
+<|visual token 060857|>
+<|visual token 060858|>
+<|visual token 060859|>
+<|visual token 060860|>
+<|visual token 060861|>
+<|visual token 060862|>
+<|visual token 060863|>
+<|visual token 060864|>
+<|visual token 060865|>
+<|visual token 060866|>
+<|visual token 060867|>
+<|visual token 060868|>
+<|visual token 060869|>
+<|visual token 060870|>
+<|visual token 060871|>
+<|visual token 060872|>
+<|visual token 060873|>
+<|visual token 060874|>
+<|visual token 060875|>
+<|visual token 060876|>
+<|visual token 060877|>
+<|visual token 060878|>
+<|visual token 060879|>
+<|visual token 060880|>
+<|visual token 060881|>
+<|visual token 060882|>
+<|visual token 060883|>
+<|visual token 060884|>
+<|visual token 060885|>
+<|visual token 060886|>
+<|visual token 060887|>
+<|visual token 060888|>
+<|visual token 060889|>
+<|visual token 060890|>
+<|visual token 060891|>
+<|visual token 060892|>
+<|visual token 060893|>
+<|visual token 060894|>
+<|visual token 060895|>
+<|visual token 060896|>
+<|visual token 060897|>
+<|visual token 060898|>
+<|visual token 060899|>
+<|visual token 060900|>
+<|visual token 060901|>
+<|visual token 060902|>
+<|visual token 060903|>
+<|visual token 060904|>
+<|visual token 060905|>
+<|visual token 060906|>
+<|visual token 060907|>
+<|visual token 060908|>
+<|visual token 060909|>
+<|visual token 060910|>
+<|visual token 060911|>
+<|visual token 060912|>
+<|visual token 060913|>
+<|visual token 060914|>
+<|visual token 060915|>
+<|visual token 060916|>
+<|visual token 060917|>
+<|visual token 060918|>
+<|visual token 060919|>
+<|visual token 060920|>
+<|visual token 060921|>
+<|visual token 060922|>
+<|visual token 060923|>
+<|visual token 060924|>
+<|visual token 060925|>
+<|visual token 060926|>
+<|visual token 060927|>
+<|visual token 060928|>
+<|visual token 060929|>
+<|visual token 060930|>
+<|visual token 060931|>
+<|visual token 060932|>
+<|visual token 060933|>
+<|visual token 060934|>
+<|visual token 060935|>
+<|visual token 060936|>
+<|visual token 060937|>
+<|visual token 060938|>
+<|visual token 060939|>
+<|visual token 060940|>
+<|visual token 060941|>
+<|visual token 060942|>
+<|visual token 060943|>
+<|visual token 060944|>
+<|visual token 060945|>
+<|visual token 060946|>
+<|visual token 060947|>
+<|visual token 060948|>
+<|visual token 060949|>
+<|visual token 060950|>
+<|visual token 060951|>
+<|visual token 060952|>
+<|visual token 060953|>
+<|visual token 060954|>
+<|visual token 060955|>
+<|visual token 060956|>
+<|visual token 060957|>
+<|visual token 060958|>
+<|visual token 060959|>
+<|visual token 060960|>
+<|visual token 060961|>
+<|visual token 060962|>
+<|visual token 060963|>
+<|visual token 060964|>
+<|visual token 060965|>
+<|visual token 060966|>
+<|visual token 060967|>
+<|visual token 060968|>
+<|visual token 060969|>
+<|visual token 060970|>
+<|visual token 060971|>
+<|visual token 060972|>
+<|visual token 060973|>
+<|visual token 060974|>
+<|visual token 060975|>
+<|visual token 060976|>
+<|visual token 060977|>
+<|visual token 060978|>
+<|visual token 060979|>
+<|visual token 060980|>
+<|visual token 060981|>
+<|visual token 060982|>
+<|visual token 060983|>
+<|visual token 060984|>
+<|visual token 060985|>
+<|visual token 060986|>
+<|visual token 060987|>
+<|visual token 060988|>
+<|visual token 060989|>
+<|visual token 060990|>
+<|visual token 060991|>
+<|visual token 060992|>
+<|visual token 060993|>
+<|visual token 060994|>
+<|visual token 060995|>
+<|visual token 060996|>
+<|visual token 060997|>
+<|visual token 060998|>
+<|visual token 060999|>
+<|visual token 061000|>
+<|visual token 061001|>
+<|visual token 061002|>
+<|visual token 061003|>
+<|visual token 061004|>
+<|visual token 061005|>
+<|visual token 061006|>
+<|visual token 061007|>
+<|visual token 061008|>
+<|visual token 061009|>
+<|visual token 061010|>
+<|visual token 061011|>
+<|visual token 061012|>
+<|visual token 061013|>
+<|visual token 061014|>
+<|visual token 061015|>
+<|visual token 061016|>
+<|visual token 061017|>
+<|visual token 061018|>
+<|visual token 061019|>
+<|visual token 061020|>
+<|visual token 061021|>
+<|visual token 061022|>
+<|visual token 061023|>
+<|visual token 061024|>
+<|visual token 061025|>
+<|visual token 061026|>
+<|visual token 061027|>
+<|visual token 061028|>
+<|visual token 061029|>
+<|visual token 061030|>
+<|visual token 061031|>
+<|visual token 061032|>
+<|visual token 061033|>
+<|visual token 061034|>
+<|visual token 061035|>
+<|visual token 061036|>
+<|visual token 061037|>
+<|visual token 061038|>
+<|visual token 061039|>
+<|visual token 061040|>
+<|visual token 061041|>
+<|visual token 061042|>
+<|visual token 061043|>
+<|visual token 061044|>
+<|visual token 061045|>
+<|visual token 061046|>
+<|visual token 061047|>
+<|visual token 061048|>
+<|visual token 061049|>
+<|visual token 061050|>
+<|visual token 061051|>
+<|visual token 061052|>
+<|visual token 061053|>
+<|visual token 061054|>
+<|visual token 061055|>
+<|visual token 061056|>
+<|visual token 061057|>
+<|visual token 061058|>
+<|visual token 061059|>
+<|visual token 061060|>
+<|visual token 061061|>
+<|visual token 061062|>
+<|visual token 061063|>
+<|visual token 061064|>
+<|visual token 061065|>
+<|visual token 061066|>
+<|visual token 061067|>
+<|visual token 061068|>
+<|visual token 061069|>
+<|visual token 061070|>
+<|visual token 061071|>
+<|visual token 061072|>
+<|visual token 061073|>
+<|visual token 061074|>
+<|visual token 061075|>
+<|visual token 061076|>
+<|visual token 061077|>
+<|visual token 061078|>
+<|visual token 061079|>
+<|visual token 061080|>
+<|visual token 061081|>
+<|visual token 061082|>
+<|visual token 061083|>
+<|visual token 061084|>
+<|visual token 061085|>
+<|visual token 061086|>
+<|visual token 061087|>
+<|visual token 061088|>
+<|visual token 061089|>
+<|visual token 061090|>
+<|visual token 061091|>
+<|visual token 061092|>
+<|visual token 061093|>
+<|visual token 061094|>
+<|visual token 061095|>
+<|visual token 061096|>
+<|visual token 061097|>
+<|visual token 061098|>
+<|visual token 061099|>
+<|visual token 061100|>
+<|visual token 061101|>
+<|visual token 061102|>
+<|visual token 061103|>
+<|visual token 061104|>
+<|visual token 061105|>
+<|visual token 061106|>
+<|visual token 061107|>
+<|visual token 061108|>
+<|visual token 061109|>
+<|visual token 061110|>
+<|visual token 061111|>
+<|visual token 061112|>
+<|visual token 061113|>
+<|visual token 061114|>
+<|visual token 061115|>
+<|visual token 061116|>
+<|visual token 061117|>
+<|visual token 061118|>
+<|visual token 061119|>
+<|visual token 061120|>
+<|visual token 061121|>
+<|visual token 061122|>
+<|visual token 061123|>
+<|visual token 061124|>
+<|visual token 061125|>
+<|visual token 061126|>
+<|visual token 061127|>
+<|visual token 061128|>
+<|visual token 061129|>
+<|visual token 061130|>
+<|visual token 061131|>
+<|visual token 061132|>
+<|visual token 061133|>
+<|visual token 061134|>
+<|visual token 061135|>
+<|visual token 061136|>
+<|visual token 061137|>
+<|visual token 061138|>
+<|visual token 061139|>
+<|visual token 061140|>
+<|visual token 061141|>
+<|visual token 061142|>
+<|visual token 061143|>
+<|visual token 061144|>
+<|visual token 061145|>
+<|visual token 061146|>
+<|visual token 061147|>
+<|visual token 061148|>
+<|visual token 061149|>
+<|visual token 061150|>
+<|visual token 061151|>
+<|visual token 061152|>
+<|visual token 061153|>
+<|visual token 061154|>
+<|visual token 061155|>
+<|visual token 061156|>
+<|visual token 061157|>
+<|visual token 061158|>
+<|visual token 061159|>
+<|visual token 061160|>
+<|visual token 061161|>
+<|visual token 061162|>
+<|visual token 061163|>
+<|visual token 061164|>
+<|visual token 061165|>
+<|visual token 061166|>
+<|visual token 061167|>
+<|visual token 061168|>
+<|visual token 061169|>
+<|visual token 061170|>
+<|visual token 061171|>
+<|visual token 061172|>
+<|visual token 061173|>
+<|visual token 061174|>
+<|visual token 061175|>
+<|visual token 061176|>
+<|visual token 061177|>
+<|visual token 061178|>
+<|visual token 061179|>
+<|visual token 061180|>
+<|visual token 061181|>
+<|visual token 061182|>
+<|visual token 061183|>
+<|visual token 061184|>
+<|visual token 061185|>
+<|visual token 061186|>
+<|visual token 061187|>
+<|visual token 061188|>
+<|visual token 061189|>
+<|visual token 061190|>
+<|visual token 061191|>
+<|visual token 061192|>
+<|visual token 061193|>
+<|visual token 061194|>
+<|visual token 061195|>
+<|visual token 061196|>
+<|visual token 061197|>
+<|visual token 061198|>
+<|visual token 061199|>
+<|visual token 061200|>
+<|visual token 061201|>
+<|visual token 061202|>
+<|visual token 061203|>
+<|visual token 061204|>
+<|visual token 061205|>
+<|visual token 061206|>
+<|visual token 061207|>
+<|visual token 061208|>
+<|visual token 061209|>
+<|visual token 061210|>
+<|visual token 061211|>
+<|visual token 061212|>
+<|visual token 061213|>
+<|visual token 061214|>
+<|visual token 061215|>
+<|visual token 061216|>
+<|visual token 061217|>
+<|visual token 061218|>
+<|visual token 061219|>
+<|visual token 061220|>
+<|visual token 061221|>
+<|visual token 061222|>
+<|visual token 061223|>
+<|visual token 061224|>
+<|visual token 061225|>
+<|visual token 061226|>
+<|visual token 061227|>
+<|visual token 061228|>
+<|visual token 061229|>
+<|visual token 061230|>
+<|visual token 061231|>
+<|visual token 061232|>
+<|visual token 061233|>
+<|visual token 061234|>
+<|visual token 061235|>
+<|visual token 061236|>
+<|visual token 061237|>
+<|visual token 061238|>
+<|visual token 061239|>
+<|visual token 061240|>
+<|visual token 061241|>
+<|visual token 061242|>
+<|visual token 061243|>
+<|visual token 061244|>
+<|visual token 061245|>
+<|visual token 061246|>
+<|visual token 061247|>
+<|visual token 061248|>
+<|visual token 061249|>
+<|visual token 061250|>
+<|visual token 061251|>
+<|visual token 061252|>
+<|visual token 061253|>
+<|visual token 061254|>
+<|visual token 061255|>
+<|visual token 061256|>
+<|visual token 061257|>
+<|visual token 061258|>
+<|visual token 061259|>
+<|visual token 061260|>
+<|visual token 061261|>
+<|visual token 061262|>
+<|visual token 061263|>
+<|visual token 061264|>
+<|visual token 061265|>
+<|visual token 061266|>
+<|visual token 061267|>
+<|visual token 061268|>
+<|visual token 061269|>
+<|visual token 061270|>
+<|visual token 061271|>
+<|visual token 061272|>
+<|visual token 061273|>
+<|visual token 061274|>
+<|visual token 061275|>
+<|visual token 061276|>
+<|visual token 061277|>
+<|visual token 061278|>
+<|visual token 061279|>
+<|visual token 061280|>
+<|visual token 061281|>
+<|visual token 061282|>
+<|visual token 061283|>
+<|visual token 061284|>
+<|visual token 061285|>
+<|visual token 061286|>
+<|visual token 061287|>
+<|visual token 061288|>
+<|visual token 061289|>
+<|visual token 061290|>
+<|visual token 061291|>
+<|visual token 061292|>
+<|visual token 061293|>
+<|visual token 061294|>
+<|visual token 061295|>
+<|visual token 061296|>
+<|visual token 061297|>
+<|visual token 061298|>
+<|visual token 061299|>
+<|visual token 061300|>
+<|visual token 061301|>
+<|visual token 061302|>
+<|visual token 061303|>
+<|visual token 061304|>
+<|visual token 061305|>
+<|visual token 061306|>
+<|visual token 061307|>
+<|visual token 061308|>
+<|visual token 061309|>
+<|visual token 061310|>
+<|visual token 061311|>
+<|visual token 061312|>
+<|visual token 061313|>
+<|visual token 061314|>
+<|visual token 061315|>
+<|visual token 061316|>
+<|visual token 061317|>
+<|visual token 061318|>
+<|visual token 061319|>
+<|visual token 061320|>
+<|visual token 061321|>
+<|visual token 061322|>
+<|visual token 061323|>
+<|visual token 061324|>
+<|visual token 061325|>
+<|visual token 061326|>
+<|visual token 061327|>
+<|visual token 061328|>
+<|visual token 061329|>
+<|visual token 061330|>
+<|visual token 061331|>
+<|visual token 061332|>
+<|visual token 061333|>
+<|visual token 061334|>
+<|visual token 061335|>
+<|visual token 061336|>
+<|visual token 061337|>
+<|visual token 061338|>
+<|visual token 061339|>
+<|visual token 061340|>
+<|visual token 061341|>
+<|visual token 061342|>
+<|visual token 061343|>
+<|visual token 061344|>
+<|visual token 061345|>
+<|visual token 061346|>
+<|visual token 061347|>
+<|visual token 061348|>
+<|visual token 061349|>
+<|visual token 061350|>
+<|visual token 061351|>
+<|visual token 061352|>
+<|visual token 061353|>
+<|visual token 061354|>
+<|visual token 061355|>
+<|visual token 061356|>
+<|visual token 061357|>
+<|visual token 061358|>
+<|visual token 061359|>
+<|visual token 061360|>
+<|visual token 061361|>
+<|visual token 061362|>
+<|visual token 061363|>
+<|visual token 061364|>
+<|visual token 061365|>
+<|visual token 061366|>
+<|visual token 061367|>
+<|visual token 061368|>
+<|visual token 061369|>
+<|visual token 061370|>
+<|visual token 061371|>
+<|visual token 061372|>
+<|visual token 061373|>
+<|visual token 061374|>
+<|visual token 061375|>
+<|visual token 061376|>
+<|visual token 061377|>
+<|visual token 061378|>
+<|visual token 061379|>
+<|visual token 061380|>
+<|visual token 061381|>
+<|visual token 061382|>
+<|visual token 061383|>
+<|visual token 061384|>
+<|visual token 061385|>
+<|visual token 061386|>
+<|visual token 061387|>
+<|visual token 061388|>
+<|visual token 061389|>
+<|visual token 061390|>
+<|visual token 061391|>
+<|visual token 061392|>
+<|visual token 061393|>
+<|visual token 061394|>
+<|visual token 061395|>
+<|visual token 061396|>
+<|visual token 061397|>
+<|visual token 061398|>
+<|visual token 061399|>
+<|visual token 061400|>
+<|visual token 061401|>
+<|visual token 061402|>
+<|visual token 061403|>
+<|visual token 061404|>
+<|visual token 061405|>
+<|visual token 061406|>
+<|visual token 061407|>
+<|visual token 061408|>
+<|visual token 061409|>
+<|visual token 061410|>
+<|visual token 061411|>
+<|visual token 061412|>
+<|visual token 061413|>
+<|visual token 061414|>
+<|visual token 061415|>
+<|visual token 061416|>
+<|visual token 061417|>
+<|visual token 061418|>
+<|visual token 061419|>
+<|visual token 061420|>
+<|visual token 061421|>
+<|visual token 061422|>
+<|visual token 061423|>
+<|visual token 061424|>
+<|visual token 061425|>
+<|visual token 061426|>
+<|visual token 061427|>
+<|visual token 061428|>
+<|visual token 061429|>
+<|visual token 061430|>
+<|visual token 061431|>
+<|visual token 061432|>
+<|visual token 061433|>
+<|visual token 061434|>
+<|visual token 061435|>
+<|visual token 061436|>
+<|visual token 061437|>
+<|visual token 061438|>
+<|visual token 061439|>
+<|visual token 061440|>
+<|visual token 061441|>
+<|visual token 061442|>
+<|visual token 061443|>
+<|visual token 061444|>
+<|visual token 061445|>
+<|visual token 061446|>
+<|visual token 061447|>
+<|visual token 061448|>
+<|visual token 061449|>
+<|visual token 061450|>
+<|visual token 061451|>
+<|visual token 061452|>
+<|visual token 061453|>
+<|visual token 061454|>
+<|visual token 061455|>
+<|visual token 061456|>
+<|visual token 061457|>
+<|visual token 061458|>
+<|visual token 061459|>
+<|visual token 061460|>
+<|visual token 061461|>
+<|visual token 061462|>
+<|visual token 061463|>
+<|visual token 061464|>
+<|visual token 061465|>
+<|visual token 061466|>
+<|visual token 061467|>
+<|visual token 061468|>
+<|visual token 061469|>
+<|visual token 061470|>
+<|visual token 061471|>
+<|visual token 061472|>
+<|visual token 061473|>
+<|visual token 061474|>
+<|visual token 061475|>
+<|visual token 061476|>
+<|visual token 061477|>
+<|visual token 061478|>
+<|visual token 061479|>
+<|visual token 061480|>
+<|visual token 061481|>
+<|visual token 061482|>
+<|visual token 061483|>
+<|visual token 061484|>
+<|visual token 061485|>
+<|visual token 061486|>
+<|visual token 061487|>
+<|visual token 061488|>
+<|visual token 061489|>
+<|visual token 061490|>
+<|visual token 061491|>
+<|visual token 061492|>
+<|visual token 061493|>
+<|visual token 061494|>
+<|visual token 061495|>
+<|visual token 061496|>
+<|visual token 061497|>
+<|visual token 061498|>
+<|visual token 061499|>
+<|visual token 061500|>
+<|visual token 061501|>
+<|visual token 061502|>
+<|visual token 061503|>
+<|visual token 061504|>
+<|visual token 061505|>
+<|visual token 061506|>
+<|visual token 061507|>
+<|visual token 061508|>
+<|visual token 061509|>
+<|visual token 061510|>
+<|visual token 061511|>
+<|visual token 061512|>
+<|visual token 061513|>
+<|visual token 061514|>
+<|visual token 061515|>
+<|visual token 061516|>
+<|visual token 061517|>
+<|visual token 061518|>
+<|visual token 061519|>
+<|visual token 061520|>
+<|visual token 061521|>
+<|visual token 061522|>
+<|visual token 061523|>
+<|visual token 061524|>
+<|visual token 061525|>
+<|visual token 061526|>
+<|visual token 061527|>
+<|visual token 061528|>
+<|visual token 061529|>
+<|visual token 061530|>
+<|visual token 061531|>
+<|visual token 061532|>
+<|visual token 061533|>
+<|visual token 061534|>
+<|visual token 061535|>
+<|visual token 061536|>
+<|visual token 061537|>
+<|visual token 061538|>
+<|visual token 061539|>
+<|visual token 061540|>
+<|visual token 061541|>
+<|visual token 061542|>
+<|visual token 061543|>
+<|visual token 061544|>
+<|visual token 061545|>
+<|visual token 061546|>
+<|visual token 061547|>
+<|visual token 061548|>
+<|visual token 061549|>
+<|visual token 061550|>
+<|visual token 061551|>
+<|visual token 061552|>
+<|visual token 061553|>
+<|visual token 061554|>
+<|visual token 061555|>
+<|visual token 061556|>
+<|visual token 061557|>
+<|visual token 061558|>
+<|visual token 061559|>
+<|visual token 061560|>
+<|visual token 061561|>
+<|visual token 061562|>
+<|visual token 061563|>
+<|visual token 061564|>
+<|visual token 061565|>
+<|visual token 061566|>
+<|visual token 061567|>
+<|visual token 061568|>
+<|visual token 061569|>
+<|visual token 061570|>
+<|visual token 061571|>
+<|visual token 061572|>
+<|visual token 061573|>
+<|visual token 061574|>
+<|visual token 061575|>
+<|visual token 061576|>
+<|visual token 061577|>
+<|visual token 061578|>
+<|visual token 061579|>
+<|visual token 061580|>
+<|visual token 061581|>
+<|visual token 061582|>
+<|visual token 061583|>
+<|visual token 061584|>
+<|visual token 061585|>
+<|visual token 061586|>
+<|visual token 061587|>
+<|visual token 061588|>
+<|visual token 061589|>
+<|visual token 061590|>
+<|visual token 061591|>
+<|visual token 061592|>
+<|visual token 061593|>
+<|visual token 061594|>
+<|visual token 061595|>
+<|visual token 061596|>
+<|visual token 061597|>
+<|visual token 061598|>
+<|visual token 061599|>
+<|visual token 061600|>
+<|visual token 061601|>
+<|visual token 061602|>
+<|visual token 061603|>
+<|visual token 061604|>
+<|visual token 061605|>
+<|visual token 061606|>
+<|visual token 061607|>
+<|visual token 061608|>
+<|visual token 061609|>
+<|visual token 061610|>
+<|visual token 061611|>
+<|visual token 061612|>
+<|visual token 061613|>
+<|visual token 061614|>
+<|visual token 061615|>
+<|visual token 061616|>
+<|visual token 061617|>
+<|visual token 061618|>
+<|visual token 061619|>
+<|visual token 061620|>
+<|visual token 061621|>
+<|visual token 061622|>
+<|visual token 061623|>
+<|visual token 061624|>
+<|visual token 061625|>
+<|visual token 061626|>
+<|visual token 061627|>
+<|visual token 061628|>
+<|visual token 061629|>
+<|visual token 061630|>
+<|visual token 061631|>
+<|visual token 061632|>
+<|visual token 061633|>
+<|visual token 061634|>
+<|visual token 061635|>
+<|visual token 061636|>
+<|visual token 061637|>
+<|visual token 061638|>
+<|visual token 061639|>
+<|visual token 061640|>
+<|visual token 061641|>
+<|visual token 061642|>
+<|visual token 061643|>
+<|visual token 061644|>
+<|visual token 061645|>
+<|visual token 061646|>
+<|visual token 061647|>
+<|visual token 061648|>
+<|visual token 061649|>
+<|visual token 061650|>
+<|visual token 061651|>
+<|visual token 061652|>
+<|visual token 061653|>
+<|visual token 061654|>
+<|visual token 061655|>
+<|visual token 061656|>
+<|visual token 061657|>
+<|visual token 061658|>
+<|visual token 061659|>
+<|visual token 061660|>
+<|visual token 061661|>
+<|visual token 061662|>
+<|visual token 061663|>
+<|visual token 061664|>
+<|visual token 061665|>
+<|visual token 061666|>
+<|visual token 061667|>
+<|visual token 061668|>
+<|visual token 061669|>
+<|visual token 061670|>
+<|visual token 061671|>
+<|visual token 061672|>
+<|visual token 061673|>
+<|visual token 061674|>
+<|visual token 061675|>
+<|visual token 061676|>
+<|visual token 061677|>
+<|visual token 061678|>
+<|visual token 061679|>
+<|visual token 061680|>
+<|visual token 061681|>
+<|visual token 061682|>
+<|visual token 061683|>
+<|visual token 061684|>
+<|visual token 061685|>
+<|visual token 061686|>
+<|visual token 061687|>
+<|visual token 061688|>
+<|visual token 061689|>
+<|visual token 061690|>
+<|visual token 061691|>
+<|visual token 061692|>
+<|visual token 061693|>
+<|visual token 061694|>
+<|visual token 061695|>
+<|visual token 061696|>
+<|visual token 061697|>
+<|visual token 061698|>
+<|visual token 061699|>
+<|visual token 061700|>
+<|visual token 061701|>
+<|visual token 061702|>
+<|visual token 061703|>
+<|visual token 061704|>
+<|visual token 061705|>
+<|visual token 061706|>
+<|visual token 061707|>
+<|visual token 061708|>
+<|visual token 061709|>
+<|visual token 061710|>
+<|visual token 061711|>
+<|visual token 061712|>
+<|visual token 061713|>
+<|visual token 061714|>
+<|visual token 061715|>
+<|visual token 061716|>
+<|visual token 061717|>
+<|visual token 061718|>
+<|visual token 061719|>
+<|visual token 061720|>
+<|visual token 061721|>
+<|visual token 061722|>
+<|visual token 061723|>
+<|visual token 061724|>
+<|visual token 061725|>
+<|visual token 061726|>
+<|visual token 061727|>
+<|visual token 061728|>
+<|visual token 061729|>
+<|visual token 061730|>
+<|visual token 061731|>
+<|visual token 061732|>
+<|visual token 061733|>
+<|visual token 061734|>
+<|visual token 061735|>
+<|visual token 061736|>
+<|visual token 061737|>
+<|visual token 061738|>
+<|visual token 061739|>
+<|visual token 061740|>
+<|visual token 061741|>
+<|visual token 061742|>
+<|visual token 061743|>
+<|visual token 061744|>
+<|visual token 061745|>
+<|visual token 061746|>
+<|visual token 061747|>
+<|visual token 061748|>
+<|visual token 061749|>
+<|visual token 061750|>
+<|visual token 061751|>
+<|visual token 061752|>
+<|visual token 061753|>
+<|visual token 061754|>
+<|visual token 061755|>
+<|visual token 061756|>
+<|visual token 061757|>
+<|visual token 061758|>
+<|visual token 061759|>
+<|visual token 061760|>
+<|visual token 061761|>
+<|visual token 061762|>
+<|visual token 061763|>
+<|visual token 061764|>
+<|visual token 061765|>
+<|visual token 061766|>
+<|visual token 061767|>
+<|visual token 061768|>
+<|visual token 061769|>
+<|visual token 061770|>
+<|visual token 061771|>
+<|visual token 061772|>
+<|visual token 061773|>
+<|visual token 061774|>
+<|visual token 061775|>
+<|visual token 061776|>
+<|visual token 061777|>
+<|visual token 061778|>
+<|visual token 061779|>
+<|visual token 061780|>
+<|visual token 061781|>
+<|visual token 061782|>
+<|visual token 061783|>
+<|visual token 061784|>
+<|visual token 061785|>
+<|visual token 061786|>
+<|visual token 061787|>
+<|visual token 061788|>
+<|visual token 061789|>
+<|visual token 061790|>
+<|visual token 061791|>
+<|visual token 061792|>
+<|visual token 061793|>
+<|visual token 061794|>
+<|visual token 061795|>
+<|visual token 061796|>
+<|visual token 061797|>
+<|visual token 061798|>
+<|visual token 061799|>
+<|visual token 061800|>
+<|visual token 061801|>
+<|visual token 061802|>
+<|visual token 061803|>
+<|visual token 061804|>
+<|visual token 061805|>
+<|visual token 061806|>
+<|visual token 061807|>
+<|visual token 061808|>
+<|visual token 061809|>
+<|visual token 061810|>
+<|visual token 061811|>
+<|visual token 061812|>
+<|visual token 061813|>
+<|visual token 061814|>
+<|visual token 061815|>
+<|visual token 061816|>
+<|visual token 061817|>
+<|visual token 061818|>
+<|visual token 061819|>
+<|visual token 061820|>
+<|visual token 061821|>
+<|visual token 061822|>
+<|visual token 061823|>
+<|visual token 061824|>
+<|visual token 061825|>
+<|visual token 061826|>
+<|visual token 061827|>
+<|visual token 061828|>
+<|visual token 061829|>
+<|visual token 061830|>
+<|visual token 061831|>
+<|visual token 061832|>
+<|visual token 061833|>
+<|visual token 061834|>
+<|visual token 061835|>
+<|visual token 061836|>
+<|visual token 061837|>
+<|visual token 061838|>
+<|visual token 061839|>
+<|visual token 061840|>
+<|visual token 061841|>
+<|visual token 061842|>
+<|visual token 061843|>
+<|visual token 061844|>
+<|visual token 061845|>
+<|visual token 061846|>
+<|visual token 061847|>
+<|visual token 061848|>
+<|visual token 061849|>
+<|visual token 061850|>
+<|visual token 061851|>
+<|visual token 061852|>
+<|visual token 061853|>
+<|visual token 061854|>
+<|visual token 061855|>
+<|visual token 061856|>
+<|visual token 061857|>
+<|visual token 061858|>
+<|visual token 061859|>
+<|visual token 061860|>
+<|visual token 061861|>
+<|visual token 061862|>
+<|visual token 061863|>
+<|visual token 061864|>
+<|visual token 061865|>
+<|visual token 061866|>
+<|visual token 061867|>
+<|visual token 061868|>
+<|visual token 061869|>
+<|visual token 061870|>
+<|visual token 061871|>
+<|visual token 061872|>
+<|visual token 061873|>
+<|visual token 061874|>
+<|visual token 061875|>
+<|visual token 061876|>
+<|visual token 061877|>
+<|visual token 061878|>
+<|visual token 061879|>
+<|visual token 061880|>
+<|visual token 061881|>
+<|visual token 061882|>
+<|visual token 061883|>
+<|visual token 061884|>
+<|visual token 061885|>
+<|visual token 061886|>
+<|visual token 061887|>
+<|visual token 061888|>
+<|visual token 061889|>
+<|visual token 061890|>
+<|visual token 061891|>
+<|visual token 061892|>
+<|visual token 061893|>
+<|visual token 061894|>
+<|visual token 061895|>
+<|visual token 061896|>
+<|visual token 061897|>
+<|visual token 061898|>
+<|visual token 061899|>
+<|visual token 061900|>
+<|visual token 061901|>
+<|visual token 061902|>
+<|visual token 061903|>
+<|visual token 061904|>
+<|visual token 061905|>
+<|visual token 061906|>
+<|visual token 061907|>
+<|visual token 061908|>
+<|visual token 061909|>
+<|visual token 061910|>
+<|visual token 061911|>
+<|visual token 061912|>
+<|visual token 061913|>
+<|visual token 061914|>
+<|visual token 061915|>
+<|visual token 061916|>
+<|visual token 061917|>
+<|visual token 061918|>
+<|visual token 061919|>
+<|visual token 061920|>
+<|visual token 061921|>
+<|visual token 061922|>
+<|visual token 061923|>
+<|visual token 061924|>
+<|visual token 061925|>
+<|visual token 061926|>
+<|visual token 061927|>
+<|visual token 061928|>
+<|visual token 061929|>
+<|visual token 061930|>
+<|visual token 061931|>
+<|visual token 061932|>
+<|visual token 061933|>
+<|visual token 061934|>
+<|visual token 061935|>
+<|visual token 061936|>
+<|visual token 061937|>
+<|visual token 061938|>
+<|visual token 061939|>
+<|visual token 061940|>
+<|visual token 061941|>
+<|visual token 061942|>
+<|visual token 061943|>
+<|visual token 061944|>
+<|visual token 061945|>
+<|visual token 061946|>
+<|visual token 061947|>
+<|visual token 061948|>
+<|visual token 061949|>
+<|visual token 061950|>
+<|visual token 061951|>
+<|visual token 061952|>
+<|visual token 061953|>
+<|visual token 061954|>
+<|visual token 061955|>
+<|visual token 061956|>
+<|visual token 061957|>
+<|visual token 061958|>
+<|visual token 061959|>
+<|visual token 061960|>
+<|visual token 061961|>
+<|visual token 061962|>
+<|visual token 061963|>
+<|visual token 061964|>
+<|visual token 061965|>
+<|visual token 061966|>
+<|visual token 061967|>
+<|visual token 061968|>
+<|visual token 061969|>
+<|visual token 061970|>
+<|visual token 061971|>
+<|visual token 061972|>
+<|visual token 061973|>
+<|visual token 061974|>
+<|visual token 061975|>
+<|visual token 061976|>
+<|visual token 061977|>
+<|visual token 061978|>
+<|visual token 061979|>
+<|visual token 061980|>
+<|visual token 061981|>
+<|visual token 061982|>
+<|visual token 061983|>
+<|visual token 061984|>
+<|visual token 061985|>
+<|visual token 061986|>
+<|visual token 061987|>
+<|visual token 061988|>
+<|visual token 061989|>
+<|visual token 061990|>
+<|visual token 061991|>
+<|visual token 061992|>
+<|visual token 061993|>
+<|visual token 061994|>
+<|visual token 061995|>
+<|visual token 061996|>
+<|visual token 061997|>
+<|visual token 061998|>
+<|visual token 061999|>
+<|visual token 062000|>
+<|visual token 062001|>
+<|visual token 062002|>
+<|visual token 062003|>
+<|visual token 062004|>
+<|visual token 062005|>
+<|visual token 062006|>
+<|visual token 062007|>
+<|visual token 062008|>
+<|visual token 062009|>
+<|visual token 062010|>
+<|visual token 062011|>
+<|visual token 062012|>
+<|visual token 062013|>
+<|visual token 062014|>
+<|visual token 062015|>
+<|visual token 062016|>
+<|visual token 062017|>
+<|visual token 062018|>
+<|visual token 062019|>
+<|visual token 062020|>
+<|visual token 062021|>
+<|visual token 062022|>
+<|visual token 062023|>
+<|visual token 062024|>
+<|visual token 062025|>
+<|visual token 062026|>
+<|visual token 062027|>
+<|visual token 062028|>
+<|visual token 062029|>
+<|visual token 062030|>
+<|visual token 062031|>
+<|visual token 062032|>
+<|visual token 062033|>
+<|visual token 062034|>
+<|visual token 062035|>
+<|visual token 062036|>
+<|visual token 062037|>
+<|visual token 062038|>
+<|visual token 062039|>
+<|visual token 062040|>
+<|visual token 062041|>
+<|visual token 062042|>
+<|visual token 062043|>
+<|visual token 062044|>
+<|visual token 062045|>
+<|visual token 062046|>
+<|visual token 062047|>
+<|visual token 062048|>
+<|visual token 062049|>
+<|visual token 062050|>
+<|visual token 062051|>
+<|visual token 062052|>
+<|visual token 062053|>
+<|visual token 062054|>
+<|visual token 062055|>
+<|visual token 062056|>
+<|visual token 062057|>
+<|visual token 062058|>
+<|visual token 062059|>
+<|visual token 062060|>
+<|visual token 062061|>
+<|visual token 062062|>
+<|visual token 062063|>
+<|visual token 062064|>
+<|visual token 062065|>
+<|visual token 062066|>
+<|visual token 062067|>
+<|visual token 062068|>
+<|visual token 062069|>
+<|visual token 062070|>
+<|visual token 062071|>
+<|visual token 062072|>
+<|visual token 062073|>
+<|visual token 062074|>
+<|visual token 062075|>
+<|visual token 062076|>
+<|visual token 062077|>
+<|visual token 062078|>
+<|visual token 062079|>
+<|visual token 062080|>
+<|visual token 062081|>
+<|visual token 062082|>
+<|visual token 062083|>
+<|visual token 062084|>
+<|visual token 062085|>
+<|visual token 062086|>
+<|visual token 062087|>
+<|visual token 062088|>
+<|visual token 062089|>
+<|visual token 062090|>
+<|visual token 062091|>
+<|visual token 062092|>
+<|visual token 062093|>
+<|visual token 062094|>
+<|visual token 062095|>
+<|visual token 062096|>
+<|visual token 062097|>
+<|visual token 062098|>
+<|visual token 062099|>
+<|visual token 062100|>
+<|visual token 062101|>
+<|visual token 062102|>
+<|visual token 062103|>
+<|visual token 062104|>
+<|visual token 062105|>
+<|visual token 062106|>
+<|visual token 062107|>
+<|visual token 062108|>
+<|visual token 062109|>
+<|visual token 062110|>
+<|visual token 062111|>
+<|visual token 062112|>
+<|visual token 062113|>
+<|visual token 062114|>
+<|visual token 062115|>
+<|visual token 062116|>
+<|visual token 062117|>
+<|visual token 062118|>
+<|visual token 062119|>
+<|visual token 062120|>
+<|visual token 062121|>
+<|visual token 062122|>
+<|visual token 062123|>
+<|visual token 062124|>
+<|visual token 062125|>
+<|visual token 062126|>
+<|visual token 062127|>
+<|visual token 062128|>
+<|visual token 062129|>
+<|visual token 062130|>
+<|visual token 062131|>
+<|visual token 062132|>
+<|visual token 062133|>
+<|visual token 062134|>
+<|visual token 062135|>
+<|visual token 062136|>
+<|visual token 062137|>
+<|visual token 062138|>
+<|visual token 062139|>
+<|visual token 062140|>
+<|visual token 062141|>
+<|visual token 062142|>
+<|visual token 062143|>
+<|visual token 062144|>
+<|visual token 062145|>
+<|visual token 062146|>
+<|visual token 062147|>
+<|visual token 062148|>
+<|visual token 062149|>
+<|visual token 062150|>
+<|visual token 062151|>
+<|visual token 062152|>
+<|visual token 062153|>
+<|visual token 062154|>
+<|visual token 062155|>
+<|visual token 062156|>
+<|visual token 062157|>
+<|visual token 062158|>
+<|visual token 062159|>
+<|visual token 062160|>
+<|visual token 062161|>
+<|visual token 062162|>
+<|visual token 062163|>
+<|visual token 062164|>
+<|visual token 062165|>
+<|visual token 062166|>
+<|visual token 062167|>
+<|visual token 062168|>
+<|visual token 062169|>
+<|visual token 062170|>
+<|visual token 062171|>
+<|visual token 062172|>
+<|visual token 062173|>
+<|visual token 062174|>
+<|visual token 062175|>
+<|visual token 062176|>
+<|visual token 062177|>
+<|visual token 062178|>
+<|visual token 062179|>
+<|visual token 062180|>
+<|visual token 062181|>
+<|visual token 062182|>
+<|visual token 062183|>
+<|visual token 062184|>
+<|visual token 062185|>
+<|visual token 062186|>
+<|visual token 062187|>
+<|visual token 062188|>
+<|visual token 062189|>
+<|visual token 062190|>
+<|visual token 062191|>
+<|visual token 062192|>
+<|visual token 062193|>
+<|visual token 062194|>
+<|visual token 062195|>
+<|visual token 062196|>
+<|visual token 062197|>
+<|visual token 062198|>
+<|visual token 062199|>
+<|visual token 062200|>
+<|visual token 062201|>
+<|visual token 062202|>
+<|visual token 062203|>
+<|visual token 062204|>
+<|visual token 062205|>
+<|visual token 062206|>
+<|visual token 062207|>
+<|visual token 062208|>
+<|visual token 062209|>
+<|visual token 062210|>
+<|visual token 062211|>
+<|visual token 062212|>
+<|visual token 062213|>
+<|visual token 062214|>
+<|visual token 062215|>
+<|visual token 062216|>
+<|visual token 062217|>
+<|visual token 062218|>
+<|visual token 062219|>
+<|visual token 062220|>
+<|visual token 062221|>
+<|visual token 062222|>
+<|visual token 062223|>
+<|visual token 062224|>
+<|visual token 062225|>
+<|visual token 062226|>
+<|visual token 062227|>
+<|visual token 062228|>
+<|visual token 062229|>
+<|visual token 062230|>
+<|visual token 062231|>
+<|visual token 062232|>
+<|visual token 062233|>
+<|visual token 062234|>
+<|visual token 062235|>
+<|visual token 062236|>
+<|visual token 062237|>
+<|visual token 062238|>
+<|visual token 062239|>
+<|visual token 062240|>
+<|visual token 062241|>
+<|visual token 062242|>
+<|visual token 062243|>
+<|visual token 062244|>
+<|visual token 062245|>
+<|visual token 062246|>
+<|visual token 062247|>
+<|visual token 062248|>
+<|visual token 062249|>
+<|visual token 062250|>
+<|visual token 062251|>
+<|visual token 062252|>
+<|visual token 062253|>
+<|visual token 062254|>
+<|visual token 062255|>
+<|visual token 062256|>
+<|visual token 062257|>
+<|visual token 062258|>
+<|visual token 062259|>
+<|visual token 062260|>
+<|visual token 062261|>
+<|visual token 062262|>
+<|visual token 062263|>
+<|visual token 062264|>
+<|visual token 062265|>
+<|visual token 062266|>
+<|visual token 062267|>
+<|visual token 062268|>
+<|visual token 062269|>
+<|visual token 062270|>
+<|visual token 062271|>
+<|visual token 062272|>
+<|visual token 062273|>
+<|visual token 062274|>
+<|visual token 062275|>
+<|visual token 062276|>
+<|visual token 062277|>
+<|visual token 062278|>
+<|visual token 062279|>
+<|visual token 062280|>
+<|visual token 062281|>
+<|visual token 062282|>
+<|visual token 062283|>
+<|visual token 062284|>
+<|visual token 062285|>
+<|visual token 062286|>
+<|visual token 062287|>
+<|visual token 062288|>
+<|visual token 062289|>
+<|visual token 062290|>
+<|visual token 062291|>
+<|visual token 062292|>
+<|visual token 062293|>
+<|visual token 062294|>
+<|visual token 062295|>
+<|visual token 062296|>
+<|visual token 062297|>
+<|visual token 062298|>
+<|visual token 062299|>
+<|visual token 062300|>
+<|visual token 062301|>
+<|visual token 062302|>
+<|visual token 062303|>
+<|visual token 062304|>
+<|visual token 062305|>
+<|visual token 062306|>
+<|visual token 062307|>
+<|visual token 062308|>
+<|visual token 062309|>
+<|visual token 062310|>
+<|visual token 062311|>
+<|visual token 062312|>
+<|visual token 062313|>
+<|visual token 062314|>
+<|visual token 062315|>
+<|visual token 062316|>
+<|visual token 062317|>
+<|visual token 062318|>
+<|visual token 062319|>
+<|visual token 062320|>
+<|visual token 062321|>
+<|visual token 062322|>
+<|visual token 062323|>
+<|visual token 062324|>
+<|visual token 062325|>
+<|visual token 062326|>
+<|visual token 062327|>
+<|visual token 062328|>
+<|visual token 062329|>
+<|visual token 062330|>
+<|visual token 062331|>
+<|visual token 062332|>
+<|visual token 062333|>
+<|visual token 062334|>
+<|visual token 062335|>
+<|visual token 062336|>
+<|visual token 062337|>
+<|visual token 062338|>
+<|visual token 062339|>
+<|visual token 062340|>
+<|visual token 062341|>
+<|visual token 062342|>
+<|visual token 062343|>
+<|visual token 062344|>
+<|visual token 062345|>
+<|visual token 062346|>
+<|visual token 062347|>
+<|visual token 062348|>
+<|visual token 062349|>
+<|visual token 062350|>
+<|visual token 062351|>
+<|visual token 062352|>
+<|visual token 062353|>
+<|visual token 062354|>
+<|visual token 062355|>
+<|visual token 062356|>
+<|visual token 062357|>
+<|visual token 062358|>
+<|visual token 062359|>
+<|visual token 062360|>
+<|visual token 062361|>
+<|visual token 062362|>
+<|visual token 062363|>
+<|visual token 062364|>
+<|visual token 062365|>
+<|visual token 062366|>
+<|visual token 062367|>
+<|visual token 062368|>
+<|visual token 062369|>
+<|visual token 062370|>
+<|visual token 062371|>
+<|visual token 062372|>
+<|visual token 062373|>
+<|visual token 062374|>
+<|visual token 062375|>
+<|visual token 062376|>
+<|visual token 062377|>
+<|visual token 062378|>
+<|visual token 062379|>
+<|visual token 062380|>
+<|visual token 062381|>
+<|visual token 062382|>
+<|visual token 062383|>
+<|visual token 062384|>
+<|visual token 062385|>
+<|visual token 062386|>
+<|visual token 062387|>
+<|visual token 062388|>
+<|visual token 062389|>
+<|visual token 062390|>
+<|visual token 062391|>
+<|visual token 062392|>
+<|visual token 062393|>
+<|visual token 062394|>
+<|visual token 062395|>
+<|visual token 062396|>
+<|visual token 062397|>
+<|visual token 062398|>
+<|visual token 062399|>
+<|visual token 062400|>
+<|visual token 062401|>
+<|visual token 062402|>
+<|visual token 062403|>
+<|visual token 062404|>
+<|visual token 062405|>
+<|visual token 062406|>
+<|visual token 062407|>
+<|visual token 062408|>
+<|visual token 062409|>
+<|visual token 062410|>
+<|visual token 062411|>
+<|visual token 062412|>
+<|visual token 062413|>
+<|visual token 062414|>
+<|visual token 062415|>
+<|visual token 062416|>
+<|visual token 062417|>
+<|visual token 062418|>
+<|visual token 062419|>
+<|visual token 062420|>
+<|visual token 062421|>
+<|visual token 062422|>
+<|visual token 062423|>
+<|visual token 062424|>
+<|visual token 062425|>
+<|visual token 062426|>
+<|visual token 062427|>
+<|visual token 062428|>
+<|visual token 062429|>
+<|visual token 062430|>
+<|visual token 062431|>
+<|visual token 062432|>
+<|visual token 062433|>
+<|visual token 062434|>
+<|visual token 062435|>
+<|visual token 062436|>
+<|visual token 062437|>
+<|visual token 062438|>
+<|visual token 062439|>
+<|visual token 062440|>
+<|visual token 062441|>
+<|visual token 062442|>
+<|visual token 062443|>
+<|visual token 062444|>
+<|visual token 062445|>
+<|visual token 062446|>
+<|visual token 062447|>
+<|visual token 062448|>
+<|visual token 062449|>
+<|visual token 062450|>
+<|visual token 062451|>
+<|visual token 062452|>
+<|visual token 062453|>
+<|visual token 062454|>
+<|visual token 062455|>
+<|visual token 062456|>
+<|visual token 062457|>
+<|visual token 062458|>
+<|visual token 062459|>
+<|visual token 062460|>
+<|visual token 062461|>
+<|visual token 062462|>
+<|visual token 062463|>
+<|visual token 062464|>
+<|visual token 062465|>
+<|visual token 062466|>
+<|visual token 062467|>
+<|visual token 062468|>
+<|visual token 062469|>
+<|visual token 062470|>
+<|visual token 062471|>
+<|visual token 062472|>
+<|visual token 062473|>
+<|visual token 062474|>
+<|visual token 062475|>
+<|visual token 062476|>
+<|visual token 062477|>
+<|visual token 062478|>
+<|visual token 062479|>
+<|visual token 062480|>
+<|visual token 062481|>
+<|visual token 062482|>
+<|visual token 062483|>
+<|visual token 062484|>
+<|visual token 062485|>
+<|visual token 062486|>
+<|visual token 062487|>
+<|visual token 062488|>
+<|visual token 062489|>
+<|visual token 062490|>
+<|visual token 062491|>
+<|visual token 062492|>
+<|visual token 062493|>
+<|visual token 062494|>
+<|visual token 062495|>
+<|visual token 062496|>
+<|visual token 062497|>
+<|visual token 062498|>
+<|visual token 062499|>
+<|visual token 062500|>
+<|visual token 062501|>
+<|visual token 062502|>
+<|visual token 062503|>
+<|visual token 062504|>
+<|visual token 062505|>
+<|visual token 062506|>
+<|visual token 062507|>
+<|visual token 062508|>
+<|visual token 062509|>
+<|visual token 062510|>
+<|visual token 062511|>
+<|visual token 062512|>
+<|visual token 062513|>
+<|visual token 062514|>
+<|visual token 062515|>
+<|visual token 062516|>
+<|visual token 062517|>
+<|visual token 062518|>
+<|visual token 062519|>
+<|visual token 062520|>
+<|visual token 062521|>
+<|visual token 062522|>
+<|visual token 062523|>
+<|visual token 062524|>
+<|visual token 062525|>
+<|visual token 062526|>
+<|visual token 062527|>
+<|visual token 062528|>
+<|visual token 062529|>
+<|visual token 062530|>
+<|visual token 062531|>
+<|visual token 062532|>
+<|visual token 062533|>
+<|visual token 062534|>
+<|visual token 062535|>
+<|visual token 062536|>
+<|visual token 062537|>
+<|visual token 062538|>
+<|visual token 062539|>
+<|visual token 062540|>
+<|visual token 062541|>
+<|visual token 062542|>
+<|visual token 062543|>
+<|visual token 062544|>
+<|visual token 062545|>
+<|visual token 062546|>
+<|visual token 062547|>
+<|visual token 062548|>
+<|visual token 062549|>
+<|visual token 062550|>
+<|visual token 062551|>
+<|visual token 062552|>
+<|visual token 062553|>
+<|visual token 062554|>
+<|visual token 062555|>
+<|visual token 062556|>
+<|visual token 062557|>
+<|visual token 062558|>
+<|visual token 062559|>
+<|visual token 062560|>
+<|visual token 062561|>
+<|visual token 062562|>
+<|visual token 062563|>
+<|visual token 062564|>
+<|visual token 062565|>
+<|visual token 062566|>
+<|visual token 062567|>
+<|visual token 062568|>
+<|visual token 062569|>
+<|visual token 062570|>
+<|visual token 062571|>
+<|visual token 062572|>
+<|visual token 062573|>
+<|visual token 062574|>
+<|visual token 062575|>
+<|visual token 062576|>
+<|visual token 062577|>
+<|visual token 062578|>
+<|visual token 062579|>
+<|visual token 062580|>
+<|visual token 062581|>
+<|visual token 062582|>
+<|visual token 062583|>
+<|visual token 062584|>
+<|visual token 062585|>
+<|visual token 062586|>
+<|visual token 062587|>
+<|visual token 062588|>
+<|visual token 062589|>
+<|visual token 062590|>
+<|visual token 062591|>
+<|visual token 062592|>
+<|visual token 062593|>
+<|visual token 062594|>
+<|visual token 062595|>
+<|visual token 062596|>
+<|visual token 062597|>
+<|visual token 062598|>
+<|visual token 062599|>
+<|visual token 062600|>
+<|visual token 062601|>
+<|visual token 062602|>
+<|visual token 062603|>
+<|visual token 062604|>
+<|visual token 062605|>
+<|visual token 062606|>
+<|visual token 062607|>
+<|visual token 062608|>
+<|visual token 062609|>
+<|visual token 062610|>
+<|visual token 062611|>
+<|visual token 062612|>
+<|visual token 062613|>
+<|visual token 062614|>
+<|visual token 062615|>
+<|visual token 062616|>
+<|visual token 062617|>
+<|visual token 062618|>
+<|visual token 062619|>
+<|visual token 062620|>
+<|visual token 062621|>
+<|visual token 062622|>
+<|visual token 062623|>
+<|visual token 062624|>
+<|visual token 062625|>
+<|visual token 062626|>
+<|visual token 062627|>
+<|visual token 062628|>
+<|visual token 062629|>
+<|visual token 062630|>
+<|visual token 062631|>
+<|visual token 062632|>
+<|visual token 062633|>
+<|visual token 062634|>
+<|visual token 062635|>
+<|visual token 062636|>
+<|visual token 062637|>
+<|visual token 062638|>
+<|visual token 062639|>
+<|visual token 062640|>
+<|visual token 062641|>
+<|visual token 062642|>
+<|visual token 062643|>
+<|visual token 062644|>
+<|visual token 062645|>
+<|visual token 062646|>
+<|visual token 062647|>
+<|visual token 062648|>
+<|visual token 062649|>
+<|visual token 062650|>
+<|visual token 062651|>
+<|visual token 062652|>
+<|visual token 062653|>
+<|visual token 062654|>
+<|visual token 062655|>
+<|visual token 062656|>
+<|visual token 062657|>
+<|visual token 062658|>
+<|visual token 062659|>
+<|visual token 062660|>
+<|visual token 062661|>
+<|visual token 062662|>
+<|visual token 062663|>
+<|visual token 062664|>
+<|visual token 062665|>
+<|visual token 062666|>
+<|visual token 062667|>
+<|visual token 062668|>
+<|visual token 062669|>
+<|visual token 062670|>
+<|visual token 062671|>
+<|visual token 062672|>
+<|visual token 062673|>
+<|visual token 062674|>
+<|visual token 062675|>
+<|visual token 062676|>
+<|visual token 062677|>
+<|visual token 062678|>
+<|visual token 062679|>
+<|visual token 062680|>
+<|visual token 062681|>
+<|visual token 062682|>
+<|visual token 062683|>
+<|visual token 062684|>
+<|visual token 062685|>
+<|visual token 062686|>
+<|visual token 062687|>
+<|visual token 062688|>
+<|visual token 062689|>
+<|visual token 062690|>
+<|visual token 062691|>
+<|visual token 062692|>
+<|visual token 062693|>
+<|visual token 062694|>
+<|visual token 062695|>
+<|visual token 062696|>
+<|visual token 062697|>
+<|visual token 062698|>
+<|visual token 062699|>
+<|visual token 062700|>
+<|visual token 062701|>
+<|visual token 062702|>
+<|visual token 062703|>
+<|visual token 062704|>
+<|visual token 062705|>
+<|visual token 062706|>
+<|visual token 062707|>
+<|visual token 062708|>
+<|visual token 062709|>
+<|visual token 062710|>
+<|visual token 062711|>
+<|visual token 062712|>
+<|visual token 062713|>
+<|visual token 062714|>
+<|visual token 062715|>
+<|visual token 062716|>
+<|visual token 062717|>
+<|visual token 062718|>
+<|visual token 062719|>
+<|visual token 062720|>
+<|visual token 062721|>
+<|visual token 062722|>
+<|visual token 062723|>
+<|visual token 062724|>
+<|visual token 062725|>
+<|visual token 062726|>
+<|visual token 062727|>
+<|visual token 062728|>
+<|visual token 062729|>
+<|visual token 062730|>
+<|visual token 062731|>
+<|visual token 062732|>
+<|visual token 062733|>
+<|visual token 062734|>
+<|visual token 062735|>
+<|visual token 062736|>
+<|visual token 062737|>
+<|visual token 062738|>
+<|visual token 062739|>
+<|visual token 062740|>
+<|visual token 062741|>
+<|visual token 062742|>
+<|visual token 062743|>
+<|visual token 062744|>
+<|visual token 062745|>
+<|visual token 062746|>
+<|visual token 062747|>
+<|visual token 062748|>
+<|visual token 062749|>
+<|visual token 062750|>
+<|visual token 062751|>
+<|visual token 062752|>
+<|visual token 062753|>
+<|visual token 062754|>
+<|visual token 062755|>
+<|visual token 062756|>
+<|visual token 062757|>
+<|visual token 062758|>
+<|visual token 062759|>
+<|visual token 062760|>
+<|visual token 062761|>
+<|visual token 062762|>
+<|visual token 062763|>
+<|visual token 062764|>
+<|visual token 062765|>
+<|visual token 062766|>
+<|visual token 062767|>
+<|visual token 062768|>
+<|visual token 062769|>
+<|visual token 062770|>
+<|visual token 062771|>
+<|visual token 062772|>
+<|visual token 062773|>
+<|visual token 062774|>
+<|visual token 062775|>
+<|visual token 062776|>
+<|visual token 062777|>
+<|visual token 062778|>
+<|visual token 062779|>
+<|visual token 062780|>
+<|visual token 062781|>
+<|visual token 062782|>
+<|visual token 062783|>
+<|visual token 062784|>
+<|visual token 062785|>
+<|visual token 062786|>
+<|visual token 062787|>
+<|visual token 062788|>
+<|visual token 062789|>
+<|visual token 062790|>
+<|visual token 062791|>
+<|visual token 062792|>
+<|visual token 062793|>
+<|visual token 062794|>
+<|visual token 062795|>
+<|visual token 062796|>
+<|visual token 062797|>
+<|visual token 062798|>
+<|visual token 062799|>
+<|visual token 062800|>
+<|visual token 062801|>
+<|visual token 062802|>
+<|visual token 062803|>
+<|visual token 062804|>
+<|visual token 062805|>
+<|visual token 062806|>
+<|visual token 062807|>
+<|visual token 062808|>
+<|visual token 062809|>
+<|visual token 062810|>
+<|visual token 062811|>
+<|visual token 062812|>
+<|visual token 062813|>
+<|visual token 062814|>
+<|visual token 062815|>
+<|visual token 062816|>
+<|visual token 062817|>
+<|visual token 062818|>
+<|visual token 062819|>
+<|visual token 062820|>
+<|visual token 062821|>
+<|visual token 062822|>
+<|visual token 062823|>
+<|visual token 062824|>
+<|visual token 062825|>
+<|visual token 062826|>
+<|visual token 062827|>
+<|visual token 062828|>
+<|visual token 062829|>
+<|visual token 062830|>
+<|visual token 062831|>
+<|visual token 062832|>
+<|visual token 062833|>
+<|visual token 062834|>
+<|visual token 062835|>
+<|visual token 062836|>
+<|visual token 062837|>
+<|visual token 062838|>
+<|visual token 062839|>
+<|visual token 062840|>
+<|visual token 062841|>
+<|visual token 062842|>
+<|visual token 062843|>
+<|visual token 062844|>
+<|visual token 062845|>
+<|visual token 062846|>
+<|visual token 062847|>
+<|visual token 062848|>
+<|visual token 062849|>
+<|visual token 062850|>
+<|visual token 062851|>
+<|visual token 062852|>
+<|visual token 062853|>
+<|visual token 062854|>
+<|visual token 062855|>
+<|visual token 062856|>
+<|visual token 062857|>
+<|visual token 062858|>
+<|visual token 062859|>
+<|visual token 062860|>
+<|visual token 062861|>
+<|visual token 062862|>
+<|visual token 062863|>
+<|visual token 062864|>
+<|visual token 062865|>
+<|visual token 062866|>
+<|visual token 062867|>
+<|visual token 062868|>
+<|visual token 062869|>
+<|visual token 062870|>
+<|visual token 062871|>
+<|visual token 062872|>
+<|visual token 062873|>
+<|visual token 062874|>
+<|visual token 062875|>
+<|visual token 062876|>
+<|visual token 062877|>
+<|visual token 062878|>
+<|visual token 062879|>
+<|visual token 062880|>
+<|visual token 062881|>
+<|visual token 062882|>
+<|visual token 062883|>
+<|visual token 062884|>
+<|visual token 062885|>
+<|visual token 062886|>
+<|visual token 062887|>
+<|visual token 062888|>
+<|visual token 062889|>
+<|visual token 062890|>
+<|visual token 062891|>
+<|visual token 062892|>
+<|visual token 062893|>
+<|visual token 062894|>
+<|visual token 062895|>
+<|visual token 062896|>
+<|visual token 062897|>
+<|visual token 062898|>
+<|visual token 062899|>
+<|visual token 062900|>
+<|visual token 062901|>
+<|visual token 062902|>
+<|visual token 062903|>
+<|visual token 062904|>
+<|visual token 062905|>
+<|visual token 062906|>
+<|visual token 062907|>
+<|visual token 062908|>
+<|visual token 062909|>
+<|visual token 062910|>
+<|visual token 062911|>
+<|visual token 062912|>
+<|visual token 062913|>
+<|visual token 062914|>
+<|visual token 062915|>
+<|visual token 062916|>
+<|visual token 062917|>
+<|visual token 062918|>
+<|visual token 062919|>
+<|visual token 062920|>
+<|visual token 062921|>
+<|visual token 062922|>
+<|visual token 062923|>
+<|visual token 062924|>
+<|visual token 062925|>
+<|visual token 062926|>
+<|visual token 062927|>
+<|visual token 062928|>
+<|visual token 062929|>
+<|visual token 062930|>
+<|visual token 062931|>
+<|visual token 062932|>
+<|visual token 062933|>
+<|visual token 062934|>
+<|visual token 062935|>
+<|visual token 062936|>
+<|visual token 062937|>
+<|visual token 062938|>
+<|visual token 062939|>
+<|visual token 062940|>
+<|visual token 062941|>
+<|visual token 062942|>
+<|visual token 062943|>
+<|visual token 062944|>
+<|visual token 062945|>
+<|visual token 062946|>
+<|visual token 062947|>
+<|visual token 062948|>
+<|visual token 062949|>
+<|visual token 062950|>
+<|visual token 062951|>
+<|visual token 062952|>
+<|visual token 062953|>
+<|visual token 062954|>
+<|visual token 062955|>
+<|visual token 062956|>
+<|visual token 062957|>
+<|visual token 062958|>
+<|visual token 062959|>
+<|visual token 062960|>
+<|visual token 062961|>
+<|visual token 062962|>
+<|visual token 062963|>
+<|visual token 062964|>
+<|visual token 062965|>
+<|visual token 062966|>
+<|visual token 062967|>
+<|visual token 062968|>
+<|visual token 062969|>
+<|visual token 062970|>
+<|visual token 062971|>
+<|visual token 062972|>
+<|visual token 062973|>
+<|visual token 062974|>
+<|visual token 062975|>
+<|visual token 062976|>
+<|visual token 062977|>
+<|visual token 062978|>
+<|visual token 062979|>
+<|visual token 062980|>
+<|visual token 062981|>
+<|visual token 062982|>
+<|visual token 062983|>
+<|visual token 062984|>
+<|visual token 062985|>
+<|visual token 062986|>
+<|visual token 062987|>
+<|visual token 062988|>
+<|visual token 062989|>
+<|visual token 062990|>
+<|visual token 062991|>
+<|visual token 062992|>
+<|visual token 062993|>
+<|visual token 062994|>
+<|visual token 062995|>
+<|visual token 062996|>
+<|visual token 062997|>
+<|visual token 062998|>
+<|visual token 062999|>
+<|visual token 063000|>
+<|visual token 063001|>
+<|visual token 063002|>
+<|visual token 063003|>
+<|visual token 063004|>
+<|visual token 063005|>
+<|visual token 063006|>
+<|visual token 063007|>
+<|visual token 063008|>
+<|visual token 063009|>
+<|visual token 063010|>
+<|visual token 063011|>
+<|visual token 063012|>
+<|visual token 063013|>
+<|visual token 063014|>
+<|visual token 063015|>
+<|visual token 063016|>
+<|visual token 063017|>
+<|visual token 063018|>
+<|visual token 063019|>
+<|visual token 063020|>
+<|visual token 063021|>
+<|visual token 063022|>
+<|visual token 063023|>
+<|visual token 063024|>
+<|visual token 063025|>
+<|visual token 063026|>
+<|visual token 063027|>
+<|visual token 063028|>
+<|visual token 063029|>
+<|visual token 063030|>
+<|visual token 063031|>
+<|visual token 063032|>
+<|visual token 063033|>
+<|visual token 063034|>
+<|visual token 063035|>
+<|visual token 063036|>
+<|visual token 063037|>
+<|visual token 063038|>
+<|visual token 063039|>
+<|visual token 063040|>
+<|visual token 063041|>
+<|visual token 063042|>
+<|visual token 063043|>
+<|visual token 063044|>
+<|visual token 063045|>
+<|visual token 063046|>
+<|visual token 063047|>
+<|visual token 063048|>
+<|visual token 063049|>
+<|visual token 063050|>
+<|visual token 063051|>
+<|visual token 063052|>
+<|visual token 063053|>
+<|visual token 063054|>
+<|visual token 063055|>
+<|visual token 063056|>
+<|visual token 063057|>
+<|visual token 063058|>
+<|visual token 063059|>
+<|visual token 063060|>
+<|visual token 063061|>
+<|visual token 063062|>
+<|visual token 063063|>
+<|visual token 063064|>
+<|visual token 063065|>
+<|visual token 063066|>
+<|visual token 063067|>
+<|visual token 063068|>
+<|visual token 063069|>
+<|visual token 063070|>
+<|visual token 063071|>
+<|visual token 063072|>
+<|visual token 063073|>
+<|visual token 063074|>
+<|visual token 063075|>
+<|visual token 063076|>
+<|visual token 063077|>
+<|visual token 063078|>
+<|visual token 063079|>
+<|visual token 063080|>
+<|visual token 063081|>
+<|visual token 063082|>
+<|visual token 063083|>
+<|visual token 063084|>
+<|visual token 063085|>
+<|visual token 063086|>
+<|visual token 063087|>
+<|visual token 063088|>
+<|visual token 063089|>
+<|visual token 063090|>
+<|visual token 063091|>
+<|visual token 063092|>
+<|visual token 063093|>
+<|visual token 063094|>
+<|visual token 063095|>
+<|visual token 063096|>
+<|visual token 063097|>
+<|visual token 063098|>
+<|visual token 063099|>
+<|visual token 063100|>
+<|visual token 063101|>
+<|visual token 063102|>
+<|visual token 063103|>
+<|visual token 063104|>
+<|visual token 063105|>
+<|visual token 063106|>
+<|visual token 063107|>
+<|visual token 063108|>
+<|visual token 063109|>
+<|visual token 063110|>
+<|visual token 063111|>
+<|visual token 063112|>
+<|visual token 063113|>
+<|visual token 063114|>
+<|visual token 063115|>
+<|visual token 063116|>
+<|visual token 063117|>
+<|visual token 063118|>
+<|visual token 063119|>
+<|visual token 063120|>
+<|visual token 063121|>
+<|visual token 063122|>
+<|visual token 063123|>
+<|visual token 063124|>
+<|visual token 063125|>
+<|visual token 063126|>
+<|visual token 063127|>
+<|visual token 063128|>
+<|visual token 063129|>
+<|visual token 063130|>
+<|visual token 063131|>
+<|visual token 063132|>
+<|visual token 063133|>
+<|visual token 063134|>
+<|visual token 063135|>
+<|visual token 063136|>
+<|visual token 063137|>
+<|visual token 063138|>
+<|visual token 063139|>
+<|visual token 063140|>
+<|visual token 063141|>
+<|visual token 063142|>
+<|visual token 063143|>
+<|visual token 063144|>
+<|visual token 063145|>
+<|visual token 063146|>
+<|visual token 063147|>
+<|visual token 063148|>
+<|visual token 063149|>
+<|visual token 063150|>
+<|visual token 063151|>
+<|visual token 063152|>
+<|visual token 063153|>
+<|visual token 063154|>
+<|visual token 063155|>
+<|visual token 063156|>
+<|visual token 063157|>
+<|visual token 063158|>
+<|visual token 063159|>
+<|visual token 063160|>
+<|visual token 063161|>
+<|visual token 063162|>
+<|visual token 063163|>
+<|visual token 063164|>
+<|visual token 063165|>
+<|visual token 063166|>
+<|visual token 063167|>
+<|visual token 063168|>
+<|visual token 063169|>
+<|visual token 063170|>
+<|visual token 063171|>
+<|visual token 063172|>
+<|visual token 063173|>
+<|visual token 063174|>
+<|visual token 063175|>
+<|visual token 063176|>
+<|visual token 063177|>
+<|visual token 063178|>
+<|visual token 063179|>
+<|visual token 063180|>
+<|visual token 063181|>
+<|visual token 063182|>
+<|visual token 063183|>
+<|visual token 063184|>
+<|visual token 063185|>
+<|visual token 063186|>
+<|visual token 063187|>
+<|visual token 063188|>
+<|visual token 063189|>
+<|visual token 063190|>
+<|visual token 063191|>
+<|visual token 063192|>
+<|visual token 063193|>
+<|visual token 063194|>
+<|visual token 063195|>
+<|visual token 063196|>
+<|visual token 063197|>
+<|visual token 063198|>
+<|visual token 063199|>
+<|visual token 063200|>
+<|visual token 063201|>
+<|visual token 063202|>
+<|visual token 063203|>
+<|visual token 063204|>
+<|visual token 063205|>
+<|visual token 063206|>
+<|visual token 063207|>
+<|visual token 063208|>
+<|visual token 063209|>
+<|visual token 063210|>
+<|visual token 063211|>
+<|visual token 063212|>
+<|visual token 063213|>
+<|visual token 063214|>
+<|visual token 063215|>
+<|visual token 063216|>
+<|visual token 063217|>
+<|visual token 063218|>
+<|visual token 063219|>
+<|visual token 063220|>
+<|visual token 063221|>
+<|visual token 063222|>
+<|visual token 063223|>
+<|visual token 063224|>
+<|visual token 063225|>
+<|visual token 063226|>
+<|visual token 063227|>
+<|visual token 063228|>
+<|visual token 063229|>
+<|visual token 063230|>
+<|visual token 063231|>
+<|visual token 063232|>
+<|visual token 063233|>
+<|visual token 063234|>
+<|visual token 063235|>
+<|visual token 063236|>
+<|visual token 063237|>
+<|visual token 063238|>
+<|visual token 063239|>
+<|visual token 063240|>
+<|visual token 063241|>
+<|visual token 063242|>
+<|visual token 063243|>
+<|visual token 063244|>
+<|visual token 063245|>
+<|visual token 063246|>
+<|visual token 063247|>
+<|visual token 063248|>
+<|visual token 063249|>
+<|visual token 063250|>
+<|visual token 063251|>
+<|visual token 063252|>
+<|visual token 063253|>
+<|visual token 063254|>
+<|visual token 063255|>
+<|visual token 063256|>
+<|visual token 063257|>
+<|visual token 063258|>
+<|visual token 063259|>
+<|visual token 063260|>
+<|visual token 063261|>
+<|visual token 063262|>
+<|visual token 063263|>
+<|visual token 063264|>
+<|visual token 063265|>
+<|visual token 063266|>
+<|visual token 063267|>
+<|visual token 063268|>
+<|visual token 063269|>
+<|visual token 063270|>
+<|visual token 063271|>
+<|visual token 063272|>
+<|visual token 063273|>
+<|visual token 063274|>
+<|visual token 063275|>
+<|visual token 063276|>
+<|visual token 063277|>
+<|visual token 063278|>
+<|visual token 063279|>
+<|visual token 063280|>
+<|visual token 063281|>
+<|visual token 063282|>
+<|visual token 063283|>
+<|visual token 063284|>
+<|visual token 063285|>
+<|visual token 063286|>
+<|visual token 063287|>
+<|visual token 063288|>
+<|visual token 063289|>
+<|visual token 063290|>
+<|visual token 063291|>
+<|visual token 063292|>
+<|visual token 063293|>
+<|visual token 063294|>
+<|visual token 063295|>
+<|visual token 063296|>
+<|visual token 063297|>
+<|visual token 063298|>
+<|visual token 063299|>
+<|visual token 063300|>
+<|visual token 063301|>
+<|visual token 063302|>
+<|visual token 063303|>
+<|visual token 063304|>
+<|visual token 063305|>
+<|visual token 063306|>
+<|visual token 063307|>
+<|visual token 063308|>
+<|visual token 063309|>
+<|visual token 063310|>
+<|visual token 063311|>
+<|visual token 063312|>
+<|visual token 063313|>
+<|visual token 063314|>
+<|visual token 063315|>
+<|visual token 063316|>
+<|visual token 063317|>
+<|visual token 063318|>
+<|visual token 063319|>
+<|visual token 063320|>
+<|visual token 063321|>
+<|visual token 063322|>
+<|visual token 063323|>
+<|visual token 063324|>
+<|visual token 063325|>
+<|visual token 063326|>
+<|visual token 063327|>
+<|visual token 063328|>
+<|visual token 063329|>
+<|visual token 063330|>
+<|visual token 063331|>
+<|visual token 063332|>
+<|visual token 063333|>
+<|visual token 063334|>
+<|visual token 063335|>
+<|visual token 063336|>
+<|visual token 063337|>
+<|visual token 063338|>
+<|visual token 063339|>
+<|visual token 063340|>
+<|visual token 063341|>
+<|visual token 063342|>
+<|visual token 063343|>
+<|visual token 063344|>
+<|visual token 063345|>
+<|visual token 063346|>
+<|visual token 063347|>
+<|visual token 063348|>
+<|visual token 063349|>
+<|visual token 063350|>
+<|visual token 063351|>
+<|visual token 063352|>
+<|visual token 063353|>
+<|visual token 063354|>
+<|visual token 063355|>
+<|visual token 063356|>
+<|visual token 063357|>
+<|visual token 063358|>
+<|visual token 063359|>
+<|visual token 063360|>
+<|visual token 063361|>
+<|visual token 063362|>
+<|visual token 063363|>
+<|visual token 063364|>
+<|visual token 063365|>
+<|visual token 063366|>
+<|visual token 063367|>
+<|visual token 063368|>
+<|visual token 063369|>
+<|visual token 063370|>
+<|visual token 063371|>
+<|visual token 063372|>
+<|visual token 063373|>
+<|visual token 063374|>
+<|visual token 063375|>
+<|visual token 063376|>
+<|visual token 063377|>
+<|visual token 063378|>
+<|visual token 063379|>
+<|visual token 063380|>
+<|visual token 063381|>
+<|visual token 063382|>
+<|visual token 063383|>
+<|visual token 063384|>
+<|visual token 063385|>
+<|visual token 063386|>
+<|visual token 063387|>
+<|visual token 063388|>
+<|visual token 063389|>
+<|visual token 063390|>
+<|visual token 063391|>
+<|visual token 063392|>
+<|visual token 063393|>
+<|visual token 063394|>
+<|visual token 063395|>
+<|visual token 063396|>
+<|visual token 063397|>
+<|visual token 063398|>
+<|visual token 063399|>
+<|visual token 063400|>
+<|visual token 063401|>
+<|visual token 063402|>
+<|visual token 063403|>
+<|visual token 063404|>
+<|visual token 063405|>
+<|visual token 063406|>
+<|visual token 063407|>
+<|visual token 063408|>
+<|visual token 063409|>
+<|visual token 063410|>
+<|visual token 063411|>
+<|visual token 063412|>
+<|visual token 063413|>
+<|visual token 063414|>
+<|visual token 063415|>
+<|visual token 063416|>
+<|visual token 063417|>
+<|visual token 063418|>
+<|visual token 063419|>
+<|visual token 063420|>
+<|visual token 063421|>
+<|visual token 063422|>
+<|visual token 063423|>
+<|visual token 063424|>
+<|visual token 063425|>
+<|visual token 063426|>
+<|visual token 063427|>
+<|visual token 063428|>
+<|visual token 063429|>
+<|visual token 063430|>
+<|visual token 063431|>
+<|visual token 063432|>
+<|visual token 063433|>
+<|visual token 063434|>
+<|visual token 063435|>
+<|visual token 063436|>
+<|visual token 063437|>
+<|visual token 063438|>
+<|visual token 063439|>
+<|visual token 063440|>
+<|visual token 063441|>
+<|visual token 063442|>
+<|visual token 063443|>
+<|visual token 063444|>
+<|visual token 063445|>
+<|visual token 063446|>
+<|visual token 063447|>
+<|visual token 063448|>
+<|visual token 063449|>
+<|visual token 063450|>
+<|visual token 063451|>
+<|visual token 063452|>
+<|visual token 063453|>
+<|visual token 063454|>
+<|visual token 063455|>
+<|visual token 063456|>
+<|visual token 063457|>
+<|visual token 063458|>
+<|visual token 063459|>
+<|visual token 063460|>
+<|visual token 063461|>
+<|visual token 063462|>
+<|visual token 063463|>
+<|visual token 063464|>
+<|visual token 063465|>
+<|visual token 063466|>
+<|visual token 063467|>
+<|visual token 063468|>
+<|visual token 063469|>
+<|visual token 063470|>
+<|visual token 063471|>
+<|visual token 063472|>
+<|visual token 063473|>
+<|visual token 063474|>
+<|visual token 063475|>
+<|visual token 063476|>
+<|visual token 063477|>
+<|visual token 063478|>
+<|visual token 063479|>
+<|visual token 063480|>
+<|visual token 063481|>
+<|visual token 063482|>
+<|visual token 063483|>
+<|visual token 063484|>
+<|visual token 063485|>
+<|visual token 063486|>
+<|visual token 063487|>
+<|visual token 063488|>
+<|visual token 063489|>
+<|visual token 063490|>
+<|visual token 063491|>
+<|visual token 063492|>
+<|visual token 063493|>
+<|visual token 063494|>
+<|visual token 063495|>
+<|visual token 063496|>
+<|visual token 063497|>
+<|visual token 063498|>
+<|visual token 063499|>
+<|visual token 063500|>
+<|visual token 063501|>
+<|visual token 063502|>
+<|visual token 063503|>
+<|visual token 063504|>
+<|visual token 063505|>
+<|visual token 063506|>
+<|visual token 063507|>
+<|visual token 063508|>
+<|visual token 063509|>
+<|visual token 063510|>
+<|visual token 063511|>
+<|visual token 063512|>
+<|visual token 063513|>
+<|visual token 063514|>
+<|visual token 063515|>
+<|visual token 063516|>
+<|visual token 063517|>
+<|visual token 063518|>
+<|visual token 063519|>
+<|visual token 063520|>
+<|visual token 063521|>
+<|visual token 063522|>
+<|visual token 063523|>
+<|visual token 063524|>
+<|visual token 063525|>
+<|visual token 063526|>
+<|visual token 063527|>
+<|visual token 063528|>
+<|visual token 063529|>
+<|visual token 063530|>
+<|visual token 063531|>
+<|visual token 063532|>
+<|visual token 063533|>
+<|visual token 063534|>
+<|visual token 063535|>
+<|visual token 063536|>
+<|visual token 063537|>
+<|visual token 063538|>
+<|visual token 063539|>
+<|visual token 063540|>
+<|visual token 063541|>
+<|visual token 063542|>
+<|visual token 063543|>
+<|visual token 063544|>
+<|visual token 063545|>
+<|visual token 063546|>
+<|visual token 063547|>
+<|visual token 063548|>
+<|visual token 063549|>
+<|visual token 063550|>
+<|visual token 063551|>
+<|visual token 063552|>
+<|visual token 063553|>
+<|visual token 063554|>
+<|visual token 063555|>
+<|visual token 063556|>
+<|visual token 063557|>
+<|visual token 063558|>
+<|visual token 063559|>
+<|visual token 063560|>
+<|visual token 063561|>
+<|visual token 063562|>
+<|visual token 063563|>
+<|visual token 063564|>
+<|visual token 063565|>
+<|visual token 063566|>
+<|visual token 063567|>
+<|visual token 063568|>
+<|visual token 063569|>
+<|visual token 063570|>
+<|visual token 063571|>
+<|visual token 063572|>
+<|visual token 063573|>
+<|visual token 063574|>
+<|visual token 063575|>
+<|visual token 063576|>
+<|visual token 063577|>
+<|visual token 063578|>
+<|visual token 063579|>
+<|visual token 063580|>
+<|visual token 063581|>
+<|visual token 063582|>
+<|visual token 063583|>
+<|visual token 063584|>
+<|visual token 063585|>
+<|visual token 063586|>
+<|visual token 063587|>
+<|visual token 063588|>
+<|visual token 063589|>
+<|visual token 063590|>
+<|visual token 063591|>
+<|visual token 063592|>
+<|visual token 063593|>
+<|visual token 063594|>
+<|visual token 063595|>
+<|visual token 063596|>
+<|visual token 063597|>
+<|visual token 063598|>
+<|visual token 063599|>
+<|visual token 063600|>
+<|visual token 063601|>
+<|visual token 063602|>
+<|visual token 063603|>
+<|visual token 063604|>
+<|visual token 063605|>
+<|visual token 063606|>
+<|visual token 063607|>
+<|visual token 063608|>
+<|visual token 063609|>
+<|visual token 063610|>
+<|visual token 063611|>
+<|visual token 063612|>
+<|visual token 063613|>
+<|visual token 063614|>
+<|visual token 063615|>
+<|visual token 063616|>
+<|visual token 063617|>
+<|visual token 063618|>
+<|visual token 063619|>
+<|visual token 063620|>
+<|visual token 063621|>
+<|visual token 063622|>
+<|visual token 063623|>
+<|visual token 063624|>
+<|visual token 063625|>
+<|visual token 063626|>
+<|visual token 063627|>
+<|visual token 063628|>
+<|visual token 063629|>
+<|visual token 063630|>
+<|visual token 063631|>
+<|visual token 063632|>
+<|visual token 063633|>
+<|visual token 063634|>
+<|visual token 063635|>
+<|visual token 063636|>
+<|visual token 063637|>
+<|visual token 063638|>
+<|visual token 063639|>
+<|visual token 063640|>
+<|visual token 063641|>
+<|visual token 063642|>
+<|visual token 063643|>
+<|visual token 063644|>
+<|visual token 063645|>
+<|visual token 063646|>
+<|visual token 063647|>
+<|visual token 063648|>
+<|visual token 063649|>
+<|visual token 063650|>
+<|visual token 063651|>
+<|visual token 063652|>
+<|visual token 063653|>
+<|visual token 063654|>
+<|visual token 063655|>
+<|visual token 063656|>
+<|visual token 063657|>
+<|visual token 063658|>
+<|visual token 063659|>
+<|visual token 063660|>
+<|visual token 063661|>
+<|visual token 063662|>
+<|visual token 063663|>
+<|visual token 063664|>
+<|visual token 063665|>
+<|visual token 063666|>
+<|visual token 063667|>
+<|visual token 063668|>
+<|visual token 063669|>
+<|visual token 063670|>
+<|visual token 063671|>
+<|visual token 063672|>
+<|visual token 063673|>
+<|visual token 063674|>
+<|visual token 063675|>
+<|visual token 063676|>
+<|visual token 063677|>
+<|visual token 063678|>
+<|visual token 063679|>
+<|visual token 063680|>
+<|visual token 063681|>
+<|visual token 063682|>
+<|visual token 063683|>
+<|visual token 063684|>
+<|visual token 063685|>
+<|visual token 063686|>
+<|visual token 063687|>
+<|visual token 063688|>
+<|visual token 063689|>
+<|visual token 063690|>
+<|visual token 063691|>
+<|visual token 063692|>
+<|visual token 063693|>
+<|visual token 063694|>
+<|visual token 063695|>
+<|visual token 063696|>
+<|visual token 063697|>
+<|visual token 063698|>
+<|visual token 063699|>
+<|visual token 063700|>
+<|visual token 063701|>
+<|visual token 063702|>
+<|visual token 063703|>
+<|visual token 063704|>
+<|visual token 063705|>
+<|visual token 063706|>
+<|visual token 063707|>
+<|visual token 063708|>
+<|visual token 063709|>
+<|visual token 063710|>
+<|visual token 063711|>
+<|visual token 063712|>
+<|visual token 063713|>
+<|visual token 063714|>
+<|visual token 063715|>
+<|visual token 063716|>
+<|visual token 063717|>
+<|visual token 063718|>
+<|visual token 063719|>
+<|visual token 063720|>
+<|visual token 063721|>
+<|visual token 063722|>
+<|visual token 063723|>
+<|visual token 063724|>
+<|visual token 063725|>
+<|visual token 063726|>
+<|visual token 063727|>
+<|visual token 063728|>
+<|visual token 063729|>
+<|visual token 063730|>
+<|visual token 063731|>
+<|visual token 063732|>
+<|visual token 063733|>
+<|visual token 063734|>
+<|visual token 063735|>
+<|visual token 063736|>
+<|visual token 063737|>
+<|visual token 063738|>
+<|visual token 063739|>
+<|visual token 063740|>
+<|visual token 063741|>
+<|visual token 063742|>
+<|visual token 063743|>
+<|visual token 063744|>
+<|visual token 063745|>
+<|visual token 063746|>
+<|visual token 063747|>
+<|visual token 063748|>
+<|visual token 063749|>
+<|visual token 063750|>
+<|visual token 063751|>
+<|visual token 063752|>
+<|visual token 063753|>
+<|visual token 063754|>
+<|visual token 063755|>
+<|visual token 063756|>
+<|visual token 063757|>
+<|visual token 063758|>
+<|visual token 063759|>
+<|visual token 063760|>
+<|visual token 063761|>
+<|visual token 063762|>
+<|visual token 063763|>
+<|visual token 063764|>
+<|visual token 063765|>
+<|visual token 063766|>
+<|visual token 063767|>
+<|visual token 063768|>
+<|visual token 063769|>
+<|visual token 063770|>
+<|visual token 063771|>
+<|visual token 063772|>
+<|visual token 063773|>
+<|visual token 063774|>
+<|visual token 063775|>
+<|visual token 063776|>
+<|visual token 063777|>
+<|visual token 063778|>
+<|visual token 063779|>
+<|visual token 063780|>
+<|visual token 063781|>
+<|visual token 063782|>
+<|visual token 063783|>
+<|visual token 063784|>
+<|visual token 063785|>
+<|visual token 063786|>
+<|visual token 063787|>
+<|visual token 063788|>
+<|visual token 063789|>
+<|visual token 063790|>
+<|visual token 063791|>
+<|visual token 063792|>
+<|visual token 063793|>
+<|visual token 063794|>
+<|visual token 063795|>
+<|visual token 063796|>
+<|visual token 063797|>
+<|visual token 063798|>
+<|visual token 063799|>
+<|visual token 063800|>
+<|visual token 063801|>
+<|visual token 063802|>
+<|visual token 063803|>
+<|visual token 063804|>
+<|visual token 063805|>
+<|visual token 063806|>
+<|visual token 063807|>
+<|visual token 063808|>
+<|visual token 063809|>
+<|visual token 063810|>
+<|visual token 063811|>
+<|visual token 063812|>
+<|visual token 063813|>
+<|visual token 063814|>
+<|visual token 063815|>
+<|visual token 063816|>
+<|visual token 063817|>
+<|visual token 063818|>
+<|visual token 063819|>
+<|visual token 063820|>
+<|visual token 063821|>
+<|visual token 063822|>
+<|visual token 063823|>
+<|visual token 063824|>
+<|visual token 063825|>
+<|visual token 063826|>
+<|visual token 063827|>
+<|visual token 063828|>
+<|visual token 063829|>
+<|visual token 063830|>
+<|visual token 063831|>
+<|visual token 063832|>
+<|visual token 063833|>
+<|visual token 063834|>
+<|visual token 063835|>
+<|visual token 063836|>
+<|visual token 063837|>
+<|visual token 063838|>
+<|visual token 063839|>
+<|visual token 063840|>
+<|visual token 063841|>
+<|visual token 063842|>
+<|visual token 063843|>
+<|visual token 063844|>
+<|visual token 063845|>
+<|visual token 063846|>
+<|visual token 063847|>
+<|visual token 063848|>
+<|visual token 063849|>
+<|visual token 063850|>
+<|visual token 063851|>
+<|visual token 063852|>
+<|visual token 063853|>
+<|visual token 063854|>
+<|visual token 063855|>
+<|visual token 063856|>
+<|visual token 063857|>
+<|visual token 063858|>
+<|visual token 063859|>
+<|visual token 063860|>
+<|visual token 063861|>
+<|visual token 063862|>
+<|visual token 063863|>
+<|visual token 063864|>
+<|visual token 063865|>
+<|visual token 063866|>
+<|visual token 063867|>
+<|visual token 063868|>
+<|visual token 063869|>
+<|visual token 063870|>
+<|visual token 063871|>
+<|visual token 063872|>
+<|visual token 063873|>
+<|visual token 063874|>
+<|visual token 063875|>
+<|visual token 063876|>
+<|visual token 063877|>
+<|visual token 063878|>
+<|visual token 063879|>
+<|visual token 063880|>
+<|visual token 063881|>
+<|visual token 063882|>
+<|visual token 063883|>
+<|visual token 063884|>
+<|visual token 063885|>
+<|visual token 063886|>
+<|visual token 063887|>
+<|visual token 063888|>
+<|visual token 063889|>
+<|visual token 063890|>
+<|visual token 063891|>
+<|visual token 063892|>
+<|visual token 063893|>
+<|visual token 063894|>
+<|visual token 063895|>
+<|visual token 063896|>
+<|visual token 063897|>
+<|visual token 063898|>
+<|visual token 063899|>
+<|visual token 063900|>
+<|visual token 063901|>
+<|visual token 063902|>
+<|visual token 063903|>
+<|visual token 063904|>
+<|visual token 063905|>
+<|visual token 063906|>
+<|visual token 063907|>
+<|visual token 063908|>
+<|visual token 063909|>
+<|visual token 063910|>
+<|visual token 063911|>
+<|visual token 063912|>
+<|visual token 063913|>
+<|visual token 063914|>
+<|visual token 063915|>
+<|visual token 063916|>
+<|visual token 063917|>
+<|visual token 063918|>
+<|visual token 063919|>
+<|visual token 063920|>
+<|visual token 063921|>
+<|visual token 063922|>
+<|visual token 063923|>
+<|visual token 063924|>
+<|visual token 063925|>
+<|visual token 063926|>
+<|visual token 063927|>
+<|visual token 063928|>
+<|visual token 063929|>
+<|visual token 063930|>
+<|visual token 063931|>
+<|visual token 063932|>
+<|visual token 063933|>
+<|visual token 063934|>
+<|visual token 063935|>
+<|visual token 063936|>
+<|visual token 063937|>
+<|visual token 063938|>
+<|visual token 063939|>
+<|visual token 063940|>
+<|visual token 063941|>
+<|visual token 063942|>
+<|visual token 063943|>
+<|visual token 063944|>
+<|visual token 063945|>
+<|visual token 063946|>
+<|visual token 063947|>
+<|visual token 063948|>
+<|visual token 063949|>
+<|visual token 063950|>
+<|visual token 063951|>
+<|visual token 063952|>
+<|visual token 063953|>
+<|visual token 063954|>
+<|visual token 063955|>
+<|visual token 063956|>
+<|visual token 063957|>
+<|visual token 063958|>
+<|visual token 063959|>
+<|visual token 063960|>
+<|visual token 063961|>
+<|visual token 063962|>
+<|visual token 063963|>
+<|visual token 063964|>
+<|visual token 063965|>
+<|visual token 063966|>
+<|visual token 063967|>
+<|visual token 063968|>
+<|visual token 063969|>
+<|visual token 063970|>
+<|visual token 063971|>
+<|visual token 063972|>
+<|visual token 063973|>
+<|visual token 063974|>
+<|visual token 063975|>
+<|visual token 063976|>
+<|visual token 063977|>
+<|visual token 063978|>
+<|visual token 063979|>
+<|visual token 063980|>
+<|visual token 063981|>
+<|visual token 063982|>
+<|visual token 063983|>
+<|visual token 063984|>
+<|visual token 063985|>
+<|visual token 063986|>
+<|visual token 063987|>
+<|visual token 063988|>
+<|visual token 063989|>
+<|visual token 063990|>
+<|visual token 063991|>
+<|visual token 063992|>
+<|visual token 063993|>
+<|visual token 063994|>
+<|visual token 063995|>
+<|visual token 063996|>
+<|visual token 063997|>
+<|visual token 063998|>
+<|visual token 063999|>
+<|visual token 064000|>
+<|visual token 064001|>
+<|visual token 064002|>
+<|visual token 064003|>
+<|visual token 064004|>
+<|visual token 064005|>
+<|visual token 064006|>
+<|visual token 064007|>
+<|visual token 064008|>
+<|visual token 064009|>
+<|visual token 064010|>
+<|visual token 064011|>
+<|visual token 064012|>
+<|visual token 064013|>
+<|visual token 064014|>
+<|visual token 064015|>
+<|visual token 064016|>
+<|visual token 064017|>
+<|visual token 064018|>
+<|visual token 064019|>
+<|visual token 064020|>
+<|visual token 064021|>
+<|visual token 064022|>
+<|visual token 064023|>
+<|visual token 064024|>
+<|visual token 064025|>
+<|visual token 064026|>
+<|visual token 064027|>
+<|visual token 064028|>
+<|visual token 064029|>
+<|visual token 064030|>
+<|visual token 064031|>
+<|visual token 064032|>
+<|visual token 064033|>
+<|visual token 064034|>
+<|visual token 064035|>
+<|visual token 064036|>
+<|visual token 064037|>
+<|visual token 064038|>
+<|visual token 064039|>
+<|visual token 064040|>
+<|visual token 064041|>
+<|visual token 064042|>
+<|visual token 064043|>
+<|visual token 064044|>
+<|visual token 064045|>
+<|visual token 064046|>
+<|visual token 064047|>
+<|visual token 064048|>
+<|visual token 064049|>
+<|visual token 064050|>
+<|visual token 064051|>
+<|visual token 064052|>
+<|visual token 064053|>
+<|visual token 064054|>
+<|visual token 064055|>
+<|visual token 064056|>
+<|visual token 064057|>
+<|visual token 064058|>
+<|visual token 064059|>
+<|visual token 064060|>
+<|visual token 064061|>
+<|visual token 064062|>
+<|visual token 064063|>
+<|visual token 064064|>
+<|visual token 064065|>
+<|visual token 064066|>
+<|visual token 064067|>
+<|visual token 064068|>
+<|visual token 064069|>
+<|visual token 064070|>
+<|visual token 064071|>
+<|visual token 064072|>
+<|visual token 064073|>
+<|visual token 064074|>
+<|visual token 064075|>
+<|visual token 064076|>
+<|visual token 064077|>
+<|visual token 064078|>
+<|visual token 064079|>
+<|visual token 064080|>
+<|visual token 064081|>
+<|visual token 064082|>
+<|visual token 064083|>
+<|visual token 064084|>
+<|visual token 064085|>
+<|visual token 064086|>
+<|visual token 064087|>
+<|visual token 064088|>
+<|visual token 064089|>
+<|visual token 064090|>
+<|visual token 064091|>
+<|visual token 064092|>
+<|visual token 064093|>
+<|visual token 064094|>
+<|visual token 064095|>
+<|visual token 064096|>
+<|visual token 064097|>
+<|visual token 064098|>
+<|visual token 064099|>
+<|visual token 064100|>
+<|visual token 064101|>
+<|visual token 064102|>
+<|visual token 064103|>
+<|visual token 064104|>
+<|visual token 064105|>
+<|visual token 064106|>
+<|visual token 064107|>
+<|visual token 064108|>
+<|visual token 064109|>
+<|visual token 064110|>
+<|visual token 064111|>
+<|visual token 064112|>
+<|visual token 064113|>
+<|visual token 064114|>
+<|visual token 064115|>
+<|visual token 064116|>
+<|visual token 064117|>
+<|visual token 064118|>
+<|visual token 064119|>
+<|visual token 064120|>
+<|visual token 064121|>
+<|visual token 064122|>
+<|visual token 064123|>
+<|visual token 064124|>
+<|visual token 064125|>
+<|visual token 064126|>
+<|visual token 064127|>
+<|visual token 064128|>
+<|visual token 064129|>
+<|visual token 064130|>
+<|visual token 064131|>
+<|visual token 064132|>
+<|visual token 064133|>
+<|visual token 064134|>
+<|visual token 064135|>
+<|visual token 064136|>
+<|visual token 064137|>
+<|visual token 064138|>
+<|visual token 064139|>
+<|visual token 064140|>
+<|visual token 064141|>
+<|visual token 064142|>
+<|visual token 064143|>
+<|visual token 064144|>
+<|visual token 064145|>
+<|visual token 064146|>
+<|visual token 064147|>
+<|visual token 064148|>
+<|visual token 064149|>
+<|visual token 064150|>
+<|visual token 064151|>
+<|visual token 064152|>
+<|visual token 064153|>
+<|visual token 064154|>
+<|visual token 064155|>
+<|visual token 064156|>
+<|visual token 064157|>
+<|visual token 064158|>
+<|visual token 064159|>
+<|visual token 064160|>
+<|visual token 064161|>
+<|visual token 064162|>
+<|visual token 064163|>
+<|visual token 064164|>
+<|visual token 064165|>
+<|visual token 064166|>
+<|visual token 064167|>
+<|visual token 064168|>
+<|visual token 064169|>
+<|visual token 064170|>
+<|visual token 064171|>
+<|visual token 064172|>
+<|visual token 064173|>
+<|visual token 064174|>
+<|visual token 064175|>
+<|visual token 064176|>
+<|visual token 064177|>
+<|visual token 064178|>
+<|visual token 064179|>
+<|visual token 064180|>
+<|visual token 064181|>
+<|visual token 064182|>
+<|visual token 064183|>
+<|visual token 064184|>
+<|visual token 064185|>
+<|visual token 064186|>
+<|visual token 064187|>
+<|visual token 064188|>
+<|visual token 064189|>
+<|visual token 064190|>
+<|visual token 064191|>
+<|visual token 064192|>
+<|visual token 064193|>
+<|visual token 064194|>
+<|visual token 064195|>
+<|visual token 064196|>
+<|visual token 064197|>
+<|visual token 064198|>
+<|visual token 064199|>
+<|visual token 064200|>
+<|visual token 064201|>
+<|visual token 064202|>
+<|visual token 064203|>
+<|visual token 064204|>
+<|visual token 064205|>
+<|visual token 064206|>
+<|visual token 064207|>
+<|visual token 064208|>
+<|visual token 064209|>
+<|visual token 064210|>
+<|visual token 064211|>
+<|visual token 064212|>
+<|visual token 064213|>
+<|visual token 064214|>
+<|visual token 064215|>
+<|visual token 064216|>
+<|visual token 064217|>
+<|visual token 064218|>
+<|visual token 064219|>
+<|visual token 064220|>
+<|visual token 064221|>
+<|visual token 064222|>
+<|visual token 064223|>
+<|visual token 064224|>
+<|visual token 064225|>
+<|visual token 064226|>
+<|visual token 064227|>
+<|visual token 064228|>
+<|visual token 064229|>
+<|visual token 064230|>
+<|visual token 064231|>
+<|visual token 064232|>
+<|visual token 064233|>
+<|visual token 064234|>
+<|visual token 064235|>
+<|visual token 064236|>
+<|visual token 064237|>
+<|visual token 064238|>
+<|visual token 064239|>
+<|visual token 064240|>
+<|visual token 064241|>
+<|visual token 064242|>
+<|visual token 064243|>
+<|visual token 064244|>
+<|visual token 064245|>
+<|visual token 064246|>
+<|visual token 064247|>
+<|visual token 064248|>
+<|visual token 064249|>
+<|visual token 064250|>
+<|visual token 064251|>
+<|visual token 064252|>
+<|visual token 064253|>
+<|visual token 064254|>
+<|visual token 064255|>
+<|visual token 064256|>
+<|visual token 064257|>
+<|visual token 064258|>
+<|visual token 064259|>
+<|visual token 064260|>
+<|visual token 064261|>
+<|visual token 064262|>
+<|visual token 064263|>
+<|visual token 064264|>
+<|visual token 064265|>
+<|visual token 064266|>
+<|visual token 064267|>
+<|visual token 064268|>
+<|visual token 064269|>
+<|visual token 064270|>
+<|visual token 064271|>
+<|visual token 064272|>
+<|visual token 064273|>
+<|visual token 064274|>
+<|visual token 064275|>
+<|visual token 064276|>
+<|visual token 064277|>
+<|visual token 064278|>
+<|visual token 064279|>
+<|visual token 064280|>
+<|visual token 064281|>
+<|visual token 064282|>
+<|visual token 064283|>
+<|visual token 064284|>
+<|visual token 064285|>
+<|visual token 064286|>
+<|visual token 064287|>
+<|visual token 064288|>
+<|visual token 064289|>
+<|visual token 064290|>
+<|visual token 064291|>
+<|visual token 064292|>
+<|visual token 064293|>
+<|visual token 064294|>
+<|visual token 064295|>
+<|visual token 064296|>
+<|visual token 064297|>
+<|visual token 064298|>
+<|visual token 064299|>
+<|visual token 064300|>
+<|visual token 064301|>
+<|visual token 064302|>
+<|visual token 064303|>
+<|visual token 064304|>
+<|visual token 064305|>
+<|visual token 064306|>
+<|visual token 064307|>
+<|visual token 064308|>
+<|visual token 064309|>
+<|visual token 064310|>
+<|visual token 064311|>
+<|visual token 064312|>
+<|visual token 064313|>
+<|visual token 064314|>
+<|visual token 064315|>
+<|visual token 064316|>
+<|visual token 064317|>
+<|visual token 064318|>
+<|visual token 064319|>
+<|visual token 064320|>
+<|visual token 064321|>
+<|visual token 064322|>
+<|visual token 064323|>
+<|visual token 064324|>
+<|visual token 064325|>
+<|visual token 064326|>
+<|visual token 064327|>
+<|visual token 064328|>
+<|visual token 064329|>
+<|visual token 064330|>
+<|visual token 064331|>
+<|visual token 064332|>
+<|visual token 064333|>
+<|visual token 064334|>
+<|visual token 064335|>
+<|visual token 064336|>
+<|visual token 064337|>
+<|visual token 064338|>
+<|visual token 064339|>
+<|visual token 064340|>
+<|visual token 064341|>
+<|visual token 064342|>
+<|visual token 064343|>
+<|visual token 064344|>
+<|visual token 064345|>
+<|visual token 064346|>
+<|visual token 064347|>
+<|visual token 064348|>
+<|visual token 064349|>
+<|visual token 064350|>
+<|visual token 064351|>
+<|visual token 064352|>
+<|visual token 064353|>
+<|visual token 064354|>
+<|visual token 064355|>
+<|visual token 064356|>
+<|visual token 064357|>
+<|visual token 064358|>
+<|visual token 064359|>
+<|visual token 064360|>
+<|visual token 064361|>
+<|visual token 064362|>
+<|visual token 064363|>
+<|visual token 064364|>
+<|visual token 064365|>
+<|visual token 064366|>
+<|visual token 064367|>
+<|visual token 064368|>
+<|visual token 064369|>
+<|visual token 064370|>
+<|visual token 064371|>
+<|visual token 064372|>
+<|visual token 064373|>
+<|visual token 064374|>
+<|visual token 064375|>
+<|visual token 064376|>
+<|visual token 064377|>
+<|visual token 064378|>
+<|visual token 064379|>
+<|visual token 064380|>
+<|visual token 064381|>
+<|visual token 064382|>
+<|visual token 064383|>
+<|visual token 064384|>
+<|visual token 064385|>
+<|visual token 064386|>
+<|visual token 064387|>
+<|visual token 064388|>
+<|visual token 064389|>
+<|visual token 064390|>
+<|visual token 064391|>
+<|visual token 064392|>
+<|visual token 064393|>
+<|visual token 064394|>
+<|visual token 064395|>
+<|visual token 064396|>
+<|visual token 064397|>
+<|visual token 064398|>
+<|visual token 064399|>
+<|visual token 064400|>
+<|visual token 064401|>
+<|visual token 064402|>
+<|visual token 064403|>
+<|visual token 064404|>
+<|visual token 064405|>
+<|visual token 064406|>
+<|visual token 064407|>
+<|visual token 064408|>
+<|visual token 064409|>
+<|visual token 064410|>
+<|visual token 064411|>
+<|visual token 064412|>
+<|visual token 064413|>
+<|visual token 064414|>
+<|visual token 064415|>
+<|visual token 064416|>
+<|visual token 064417|>
+<|visual token 064418|>
+<|visual token 064419|>
+<|visual token 064420|>
+<|visual token 064421|>
+<|visual token 064422|>
+<|visual token 064423|>
+<|visual token 064424|>
+<|visual token 064425|>
+<|visual token 064426|>
+<|visual token 064427|>
+<|visual token 064428|>
+<|visual token 064429|>
+<|visual token 064430|>
+<|visual token 064431|>
+<|visual token 064432|>
+<|visual token 064433|>
+<|visual token 064434|>
+<|visual token 064435|>
+<|visual token 064436|>
+<|visual token 064437|>
+<|visual token 064438|>
+<|visual token 064439|>
+<|visual token 064440|>
+<|visual token 064441|>
+<|visual token 064442|>
+<|visual token 064443|>
+<|visual token 064444|>
+<|visual token 064445|>
+<|visual token 064446|>
+<|visual token 064447|>
+<|visual token 064448|>
+<|visual token 064449|>
+<|visual token 064450|>
+<|visual token 064451|>
+<|visual token 064452|>
+<|visual token 064453|>
+<|visual token 064454|>
+<|visual token 064455|>
+<|visual token 064456|>
+<|visual token 064457|>
+<|visual token 064458|>
+<|visual token 064459|>
+<|visual token 064460|>
+<|visual token 064461|>
+<|visual token 064462|>
+<|visual token 064463|>
+<|visual token 064464|>
+<|visual token 064465|>
+<|visual token 064466|>
+<|visual token 064467|>
+<|visual token 064468|>
+<|visual token 064469|>
+<|visual token 064470|>
+<|visual token 064471|>
+<|visual token 064472|>
+<|visual token 064473|>
+<|visual token 064474|>
+<|visual token 064475|>
+<|visual token 064476|>
+<|visual token 064477|>
+<|visual token 064478|>
+<|visual token 064479|>
+<|visual token 064480|>
+<|visual token 064481|>
+<|visual token 064482|>
+<|visual token 064483|>
+<|visual token 064484|>
+<|visual token 064485|>
+<|visual token 064486|>
+<|visual token 064487|>
+<|visual token 064488|>
+<|visual token 064489|>
+<|visual token 064490|>
+<|visual token 064491|>
+<|visual token 064492|>
+<|visual token 064493|>
+<|visual token 064494|>
+<|visual token 064495|>
+<|visual token 064496|>
+<|visual token 064497|>
+<|visual token 064498|>
+<|visual token 064499|>
+<|visual token 064500|>
+<|visual token 064501|>
+<|visual token 064502|>
+<|visual token 064503|>
+<|visual token 064504|>
+<|visual token 064505|>
+<|visual token 064506|>
+<|visual token 064507|>
+<|visual token 064508|>
+<|visual token 064509|>
+<|visual token 064510|>
+<|visual token 064511|>
+<|visual token 064512|>
+<|visual token 064513|>
+<|visual token 064514|>
+<|visual token 064515|>
+<|visual token 064516|>
+<|visual token 064517|>
+<|visual token 064518|>
+<|visual token 064519|>
+<|visual token 064520|>
+<|visual token 064521|>
+<|visual token 064522|>
+<|visual token 064523|>
+<|visual token 064524|>
+<|visual token 064525|>
+<|visual token 064526|>
+<|visual token 064527|>
+<|visual token 064528|>
+<|visual token 064529|>
+<|visual token 064530|>
+<|visual token 064531|>
+<|visual token 064532|>
+<|visual token 064533|>
+<|visual token 064534|>
+<|visual token 064535|>
+<|visual token 064536|>
+<|visual token 064537|>
+<|visual token 064538|>
+<|visual token 064539|>
+<|visual token 064540|>
+<|visual token 064541|>
+<|visual token 064542|>
+<|visual token 064543|>
+<|visual token 064544|>
+<|visual token 064545|>
+<|visual token 064546|>
+<|visual token 064547|>
+<|visual token 064548|>
+<|visual token 064549|>
+<|visual token 064550|>
+<|visual token 064551|>
+<|visual token 064552|>
+<|visual token 064553|>
+<|visual token 064554|>
+<|visual token 064555|>
+<|visual token 064556|>
+<|visual token 064557|>
+<|visual token 064558|>
+<|visual token 064559|>
+<|visual token 064560|>
+<|visual token 064561|>
+<|visual token 064562|>
+<|visual token 064563|>
+<|visual token 064564|>
+<|visual token 064565|>
+<|visual token 064566|>
+<|visual token 064567|>
+<|visual token 064568|>
+<|visual token 064569|>
+<|visual token 064570|>
+<|visual token 064571|>
+<|visual token 064572|>
+<|visual token 064573|>
+<|visual token 064574|>
+<|visual token 064575|>
+<|visual token 064576|>
+<|visual token 064577|>
+<|visual token 064578|>
+<|visual token 064579|>
+<|visual token 064580|>
+<|visual token 064581|>
+<|visual token 064582|>
+<|visual token 064583|>
+<|visual token 064584|>
+<|visual token 064585|>
+<|visual token 064586|>
+<|visual token 064587|>
+<|visual token 064588|>
+<|visual token 064589|>
+<|visual token 064590|>
+<|visual token 064591|>
+<|visual token 064592|>
+<|visual token 064593|>
+<|visual token 064594|>
+<|visual token 064595|>
+<|visual token 064596|>
+<|visual token 064597|>
+<|visual token 064598|>
+<|visual token 064599|>
+<|visual token 064600|>
+<|visual token 064601|>
+<|visual token 064602|>
+<|visual token 064603|>
+<|visual token 064604|>
+<|visual token 064605|>
+<|visual token 064606|>
+<|visual token 064607|>
+<|visual token 064608|>
+<|visual token 064609|>
+<|visual token 064610|>
+<|visual token 064611|>
+<|visual token 064612|>
+<|visual token 064613|>
+<|visual token 064614|>
+<|visual token 064615|>
+<|visual token 064616|>
+<|visual token 064617|>
+<|visual token 064618|>
+<|visual token 064619|>
+<|visual token 064620|>
+<|visual token 064621|>
+<|visual token 064622|>
+<|visual token 064623|>
+<|visual token 064624|>
+<|visual token 064625|>
+<|visual token 064626|>
+<|visual token 064627|>
+<|visual token 064628|>
+<|visual token 064629|>
+<|visual token 064630|>
+<|visual token 064631|>
+<|visual token 064632|>
+<|visual token 064633|>
+<|visual token 064634|>
+<|visual token 064635|>
+<|visual token 064636|>
+<|visual token 064637|>
+<|visual token 064638|>
+<|visual token 064639|>
+<|visual token 064640|>
+<|visual token 064641|>
+<|visual token 064642|>
+<|visual token 064643|>
+<|visual token 064644|>
+<|visual token 064645|>
+<|visual token 064646|>
+<|visual token 064647|>
+<|visual token 064648|>
+<|visual token 064649|>
+<|visual token 064650|>
+<|visual token 064651|>
+<|visual token 064652|>
+<|visual token 064653|>
+<|visual token 064654|>
+<|visual token 064655|>
+<|visual token 064656|>
+<|visual token 064657|>
+<|visual token 064658|>
+<|visual token 064659|>
+<|visual token 064660|>
+<|visual token 064661|>
+<|visual token 064662|>
+<|visual token 064663|>
+<|visual token 064664|>
+<|visual token 064665|>
+<|visual token 064666|>
+<|visual token 064667|>
+<|visual token 064668|>
+<|visual token 064669|>
+<|visual token 064670|>
+<|visual token 064671|>
+<|visual token 064672|>
+<|visual token 064673|>
+<|visual token 064674|>
+<|visual token 064675|>
+<|visual token 064676|>
+<|visual token 064677|>
+<|visual token 064678|>
+<|visual token 064679|>
+<|visual token 064680|>
+<|visual token 064681|>
+<|visual token 064682|>
+<|visual token 064683|>
+<|visual token 064684|>
+<|visual token 064685|>
+<|visual token 064686|>
+<|visual token 064687|>
+<|visual token 064688|>
+<|visual token 064689|>
+<|visual token 064690|>
+<|visual token 064691|>
+<|visual token 064692|>
+<|visual token 064693|>
+<|visual token 064694|>
+<|visual token 064695|>
+<|visual token 064696|>
+<|visual token 064697|>
+<|visual token 064698|>
+<|visual token 064699|>
+<|visual token 064700|>
+<|visual token 064701|>
+<|visual token 064702|>
+<|visual token 064703|>
+<|visual token 064704|>
+<|visual token 064705|>
+<|visual token 064706|>
+<|visual token 064707|>
+<|visual token 064708|>
+<|visual token 064709|>
+<|visual token 064710|>
+<|visual token 064711|>
+<|visual token 064712|>
+<|visual token 064713|>
+<|visual token 064714|>
+<|visual token 064715|>
+<|visual token 064716|>
+<|visual token 064717|>
+<|visual token 064718|>
+<|visual token 064719|>
+<|visual token 064720|>
+<|visual token 064721|>
+<|visual token 064722|>
+<|visual token 064723|>
+<|visual token 064724|>
+<|visual token 064725|>
+<|visual token 064726|>
+<|visual token 064727|>
+<|visual token 064728|>
+<|visual token 064729|>
+<|visual token 064730|>
+<|visual token 064731|>
+<|visual token 064732|>
+<|visual token 064733|>
+<|visual token 064734|>
+<|visual token 064735|>
+<|visual token 064736|>
+<|visual token 064737|>
+<|visual token 064738|>
+<|visual token 064739|>
+<|visual token 064740|>
+<|visual token 064741|>
+<|visual token 064742|>
+<|visual token 064743|>
+<|visual token 064744|>
+<|visual token 064745|>
+<|visual token 064746|>
+<|visual token 064747|>
+<|visual token 064748|>
+<|visual token 064749|>
+<|visual token 064750|>
+<|visual token 064751|>
+<|visual token 064752|>
+<|visual token 064753|>
+<|visual token 064754|>
+<|visual token 064755|>
+<|visual token 064756|>
+<|visual token 064757|>
+<|visual token 064758|>
+<|visual token 064759|>
+<|visual token 064760|>
+<|visual token 064761|>
+<|visual token 064762|>
+<|visual token 064763|>
+<|visual token 064764|>
+<|visual token 064765|>
+<|visual token 064766|>
+<|visual token 064767|>
+<|visual token 064768|>
+<|visual token 064769|>
+<|visual token 064770|>
+<|visual token 064771|>
+<|visual token 064772|>
+<|visual token 064773|>
+<|visual token 064774|>
+<|visual token 064775|>
+<|visual token 064776|>
+<|visual token 064777|>
+<|visual token 064778|>
+<|visual token 064779|>
+<|visual token 064780|>
+<|visual token 064781|>
+<|visual token 064782|>
+<|visual token 064783|>
+<|visual token 064784|>
+<|visual token 064785|>
+<|visual token 064786|>
+<|visual token 064787|>
+<|visual token 064788|>
+<|visual token 064789|>
+<|visual token 064790|>
+<|visual token 064791|>
+<|visual token 064792|>
+<|visual token 064793|>
+<|visual token 064794|>
+<|visual token 064795|>
+<|visual token 064796|>
+<|visual token 064797|>
+<|visual token 064798|>
+<|visual token 064799|>
+<|visual token 064800|>
+<|visual token 064801|>
+<|visual token 064802|>
+<|visual token 064803|>
+<|visual token 064804|>
+<|visual token 064805|>
+<|visual token 064806|>
+<|visual token 064807|>
+<|visual token 064808|>
+<|visual token 064809|>
+<|visual token 064810|>
+<|visual token 064811|>
+<|visual token 064812|>
+<|visual token 064813|>
+<|visual token 064814|>
+<|visual token 064815|>
+<|visual token 064816|>
+<|visual token 064817|>
+<|visual token 064818|>
+<|visual token 064819|>
+<|visual token 064820|>
+<|visual token 064821|>
+<|visual token 064822|>
+<|visual token 064823|>
+<|visual token 064824|>
+<|visual token 064825|>
+<|visual token 064826|>
+<|visual token 064827|>
+<|visual token 064828|>
+<|visual token 064829|>
+<|visual token 064830|>
+<|visual token 064831|>
+<|visual token 064832|>
+<|visual token 064833|>
+<|visual token 064834|>
+<|visual token 064835|>
+<|visual token 064836|>
+<|visual token 064837|>
+<|visual token 064838|>
+<|visual token 064839|>
+<|visual token 064840|>
+<|visual token 064841|>
+<|visual token 064842|>
+<|visual token 064843|>
+<|visual token 064844|>
+<|visual token 064845|>
+<|visual token 064846|>
+<|visual token 064847|>
+<|visual token 064848|>
+<|visual token 064849|>
+<|visual token 064850|>
+<|visual token 064851|>
+<|visual token 064852|>
+<|visual token 064853|>
+<|visual token 064854|>
+<|visual token 064855|>
+<|visual token 064856|>
+<|visual token 064857|>
+<|visual token 064858|>
+<|visual token 064859|>
+<|visual token 064860|>
+<|visual token 064861|>
+<|visual token 064862|>
+<|visual token 064863|>
+<|visual token 064864|>
+<|visual token 064865|>
+<|visual token 064866|>
+<|visual token 064867|>
+<|visual token 064868|>
+<|visual token 064869|>
+<|visual token 064870|>
+<|visual token 064871|>
+<|visual token 064872|>
+<|visual token 064873|>
+<|visual token 064874|>
+<|visual token 064875|>
+<|visual token 064876|>
+<|visual token 064877|>
+<|visual token 064878|>
+<|visual token 064879|>
+<|visual token 064880|>
+<|visual token 064881|>
+<|visual token 064882|>
+<|visual token 064883|>
+<|visual token 064884|>
+<|visual token 064885|>
+<|visual token 064886|>
+<|visual token 064887|>
+<|visual token 064888|>
+<|visual token 064889|>
+<|visual token 064890|>
+<|visual token 064891|>
+<|visual token 064892|>
+<|visual token 064893|>
+<|visual token 064894|>
+<|visual token 064895|>
+<|visual token 064896|>
+<|visual token 064897|>
+<|visual token 064898|>
+<|visual token 064899|>
+<|visual token 064900|>
+<|visual token 064901|>
+<|visual token 064902|>
+<|visual token 064903|>
+<|visual token 064904|>
+<|visual token 064905|>
+<|visual token 064906|>
+<|visual token 064907|>
+<|visual token 064908|>
+<|visual token 064909|>
+<|visual token 064910|>
+<|visual token 064911|>
+<|visual token 064912|>
+<|visual token 064913|>
+<|visual token 064914|>
+<|visual token 064915|>
+<|visual token 064916|>
+<|visual token 064917|>
+<|visual token 064918|>
+<|visual token 064919|>
+<|visual token 064920|>
+<|visual token 064921|>
+<|visual token 064922|>
+<|visual token 064923|>
+<|visual token 064924|>
+<|visual token 064925|>
+<|visual token 064926|>
+<|visual token 064927|>
+<|visual token 064928|>
+<|visual token 064929|>
+<|visual token 064930|>
+<|visual token 064931|>
+<|visual token 064932|>
+<|visual token 064933|>
+<|visual token 064934|>
+<|visual token 064935|>
+<|visual token 064936|>
+<|visual token 064937|>
+<|visual token 064938|>
+<|visual token 064939|>
+<|visual token 064940|>
+<|visual token 064941|>
+<|visual token 064942|>
+<|visual token 064943|>
+<|visual token 064944|>
+<|visual token 064945|>
+<|visual token 064946|>
+<|visual token 064947|>
+<|visual token 064948|>
+<|visual token 064949|>
+<|visual token 064950|>
+<|visual token 064951|>
+<|visual token 064952|>
+<|visual token 064953|>
+<|visual token 064954|>
+<|visual token 064955|>
+<|visual token 064956|>
+<|visual token 064957|>
+<|visual token 064958|>
+<|visual token 064959|>
+<|visual token 064960|>
+<|visual token 064961|>
+<|visual token 064962|>
+<|visual token 064963|>
+<|visual token 064964|>
+<|visual token 064965|>
+<|visual token 064966|>
+<|visual token 064967|>
+<|visual token 064968|>
+<|visual token 064969|>
+<|visual token 064970|>
+<|visual token 064971|>
+<|visual token 064972|>
+<|visual token 064973|>
+<|visual token 064974|>
+<|visual token 064975|>
+<|visual token 064976|>
+<|visual token 064977|>
+<|visual token 064978|>
+<|visual token 064979|>
+<|visual token 064980|>
+<|visual token 064981|>
+<|visual token 064982|>
+<|visual token 064983|>
+<|visual token 064984|>
+<|visual token 064985|>
+<|visual token 064986|>
+<|visual token 064987|>
+<|visual token 064988|>
+<|visual token 064989|>
+<|visual token 064990|>
+<|visual token 064991|>
+<|visual token 064992|>
+<|visual token 064993|>
+<|visual token 064994|>
+<|visual token 064995|>
+<|visual token 064996|>
+<|visual token 064997|>
+<|visual token 064998|>
+<|visual token 064999|>
diff --git a/nemo/collections/multimodal_autoregressive/tokenizer/emu3.tiktoken b/nemo/collections/multimodal_autoregressive/tokenizer/emu3.tiktoken
new file mode 100644
index 000000000000..9b9b0e0416d8
--- /dev/null
+++ b/nemo/collections/multimodal_autoregressive/tokenizer/emu3.tiktoken
@@ -0,0 +1,151643 @@
+IQ== 0
+Ig== 1
+Iw== 2
+JA== 3
+JQ== 4
+Jg== 5
+Jw== 6
+KA== 7
+KQ== 8
+Kg== 9
+Kw== 10
+LA== 11
+LQ== 12
+Lg== 13
+Lw== 14
+MA== 15
+MQ== 16
+Mg== 17
+Mw== 18
+NA== 19
+NQ== 20
+Ng== 21
+Nw== 22
+OA== 23
+OQ== 24
+Og== 25
+Ow== 26
+PA== 27
+PQ== 28
+Pg== 29
+Pw== 30
+QA== 31
+QQ== 32
+Qg== 33
+Qw== 34
+RA== 35
+RQ== 36
+Rg== 37
+Rw== 38
+SA== 39
+SQ== 40
+Sg== 41
+Sw== 42
+TA== 43
+TQ== 44
+Tg== 45
+Tw== 46
+UA== 47
+UQ== 48
+Ug== 49
+Uw== 50
+VA== 51
+VQ== 52
+Vg== 53
+Vw== 54
+WA== 55
+WQ== 56
+Wg== 57
+Ww== 58
+XA== 59
+XQ== 60
+Xg== 61
+Xw== 62
+YA== 63
+YQ== 64
+Yg== 65
+Yw== 66
+ZA== 67
+ZQ== 68
+Zg== 69
+Zw== 70
+aA== 71
+aQ== 72
+ag== 73
+aw== 74
+bA== 75
+bQ== 76
+bg== 77
+bw== 78
+cA== 79
+cQ== 80
+cg== 81
+cw== 82
+dA== 83
+dQ== 84
+dg== 85
+dw== 86
+eA== 87
+eQ== 88
+eg== 89
+ew== 90
+fA== 91
+fQ== 92
+fg== 93
+oQ== 94
+og== 95
+ow== 96
+pA== 97
+pQ== 98
+pg== 99
+pw== 100
+qA== 101
+qQ== 102
+qg== 103
+qw== 104
+rA== 105
+rg== 106
+rw== 107
+sA== 108
+sQ== 109
+sg== 110
+sw== 111
+tA== 112
+tQ== 113
+tg== 114
+tw== 115
+uA== 116
+uQ== 117
+ug== 118
+uw== 119
+vA== 120
+vQ== 121
+vg== 122
+vw== 123
+wA== 124
+wQ== 125
+wg== 126
+ww== 127
+xA== 128
+xQ== 129
+xg== 130
+xw== 131
+yA== 132
+yQ== 133
+yg== 134
+yw== 135
+zA== 136
+zQ== 137
+zg== 138
+zw== 139
+0A== 140
+0Q== 141
+0g== 142
+0w== 143
+1A== 144
+1Q== 145
+1g== 146
+1w== 147
+2A== 148
+2Q== 149
+2g== 150
+2w== 151
+3A== 152
+3Q== 153
+3g== 154
+3w== 155
+4A== 156
+4Q== 157
+4g== 158
+4w== 159
+5A== 160
+5Q== 161
+5g== 162
+5w== 163
+6A== 164
+6Q== 165
+6g== 166
+6w== 167
+7A== 168
+7Q== 169
+7g== 170
+7w== 171
+8A== 172
+8Q== 173
+8g== 174
+8w== 175
+9A== 176
+9Q== 177
+9g== 178
+9w== 179
++A== 180
++Q== 181
++g== 182
++w== 183
+/A== 184
+/Q== 185
+/g== 186
+/w== 187
+AA== 188
+AQ== 189
+Ag== 190
+Aw== 191
+BA== 192
+BQ== 193
+Bg== 194
+Bw== 195
+CA== 196
+CQ== 197
+Cg== 198
+Cw== 199
+DA== 200
+DQ== 201
+Dg== 202
+Dw== 203
+EA== 204
+EQ== 205
+Eg== 206
+Ew== 207
+FA== 208
+FQ== 209
+Fg== 210
+Fw== 211
+GA== 212
+GQ== 213
+Gg== 214
+Gw== 215
+HA== 216
+HQ== 217
+Hg== 218
+Hw== 219
+IA== 220
+fw== 221
+gA== 222
+gQ== 223
+gg== 224
+gw== 225
+hA== 226
+hQ== 227
+hg== 228
+hw== 229
+iA== 230
+iQ== 231
+ig== 232
+iw== 233
+jA== 234
+jQ== 235
+jg== 236
+jw== 237
+kA== 238
+kQ== 239
+kg== 240
+kw== 241
+lA== 242
+lQ== 243
+lg== 244
+lw== 245
+mA== 246
+mQ== 247
+mg== 248
+mw== 249
+nA== 250
+nQ== 251
+ng== 252
+nw== 253
+oA== 254
+rQ== 255
+ICA= 256
+ICAgIA== 257
+aW4= 258
+IHQ= 259
+ICAgICAgICA= 260
+ZXI= 261
+ICAg 262
+b24= 263
+IGE= 264
+cmU= 265
+YXQ= 266
+c3Q= 267
+ZW4= 268
+b3I= 269
+IHRo 270
+Cgo= 271
+IGM= 272
+bGU= 273
+IHM= 274
+aXQ= 275
+YW4= 276
+YXI= 277
+YWw= 278
+IHRoZQ== 279
+Owo= 280
+IHA= 281
+IGY= 282
+b3U= 283
+ID0= 284
+aXM= 285
+ICAgICAgIA== 286
+aW5n 287
+ZXM= 288
+IHc= 289
+aW9u 290
+ZWQ= 291
+aWM= 292
+IGI= 293
+IGQ= 294
+ZXQ= 295
+IG0= 296
+IG8= 297
+CQk= 298
+cm8= 299
+YXM= 300
+ZWw= 301
+Y3Q= 302
+bmQ= 303
+IGlu 304
+IGg= 305
+ZW50 306
+aWQ= 307
+IG4= 308
+YW0= 309
+ICAgICAgICAgICA= 310
+IHRv 311
+IHJl 312
+LS0= 313
+IHs= 314
+IG9m 315
+b20= 316
+KTsK 317
+aW0= 318
+DQo= 319
+ICg= 320
+aWw= 321
+Ly8= 322
+IGFuZA== 323
+dXI= 324
+c2U= 325
+IGw= 326
+ZXg= 327
+IFM= 328
+YWQ= 329
+ICI= 330
+Y2g= 331
+dXQ= 332
+aWY= 333
+Kio= 334
+IH0= 335
+ZW0= 336
+b2w= 337
+ICAgICAgICAgICAgICAgIA== 338
+dGg= 339
+KQo= 340
+IHsK 341
+IGc= 342
+aWc= 343
+aXY= 344
+LAo= 345
+Y2U= 346
+b2Q= 347
+IHY= 348
+YXRl 349
+IFQ= 350
+YWc= 351
+YXk= 352
+ICo= 353
+b3Q= 354
+dXM= 355
+IEM= 356
+IHN0 357
+IEk= 358
+dW4= 359
+dWw= 360
+dWU= 361
+IEE= 362
+b3c= 363
+ICc= 364
+ZXc= 365
+IDw= 366
+YXRpb24= 367
+KCk= 368
+IGZvcg== 369
+YWI= 370
+b3J0 371
+dW0= 372
+YW1l 373
+IGlz 374
+cGU= 375
+dHI= 376
+Y2s= 377
+4oA= 378
+IHk= 379
+aXN0 380
+LS0tLQ== 381
+LgoK 382
+aGU= 383
+IGU= 384
+bG8= 385
+IE0= 386
+IGJl 387
+ZXJz 388
+IG9u 389
+IGNvbg== 390
+YXA= 391
+dWI= 392
+IFA= 393
+ICAgICAgICAgICAgICAg 394
+YXNz 395
+aW50 396
+Pgo= 397
+bHk= 398
+dXJu 399
+ICQ= 400
+OwoK 401
+YXY= 402
+cG9ydA== 403
+aXI= 404
+LT4= 405
+bnQ= 406
+Y3Rpb24= 407
+ZW5k 408
+IGRl 409
+aXRo 410
+b3V0 411
+dHVybg== 412
+b3Vy 413
+ICAgICA= 414
+bGlj 415
+cmVz 416
+cHQ= 417
+PT0= 418
+IHRoaXM= 419
+IHdo 420
+IGlm 421
+IEQ= 422
+dmVy 423
+YWdl 424
+IEI= 425
+aHQ= 426
+ZXh0 427
+PSI= 428
+IHRoYXQ= 429
+KioqKg== 430
+IFI= 431
+IGl0 432
+ZXNz 433
+IEY= 434
+IHI= 435
+b3M= 436
+YW5k 437
+IGFz 438
+ZWN0 439
+a2U= 440
+cm9t 441
+IC8v 442
+Y29u 443
+IEw= 444
+KCI= 445
+cXU= 446
+bGFzcw== 447
+IHdpdGg= 448
+aXo= 449
+ZGU= 450
+IE4= 451
+IGFs 452
+b3A= 453
+dXA= 454
+Z2V0 455
+IH0K 456
+aWxl 457
+IGFu 458
+YXRh 459
+b3Jl 460
+cmk= 461
+IHBybw== 462
+Ow0K 463
+CQkJCQ== 464
+dGVy 465
+YWlu 466
+IFc= 467
+IEU= 468
+IGNvbQ== 469
+IHJldHVybg== 470
+YXJ0 471
+IEg= 472
+YWNr 473
+aW1wb3J0 474
+dWJsaWM= 475
+IG9y 476
+ZXN0 477
+bWVudA== 478
+IEc= 479
+YWJsZQ== 480
+IC0= 481
+aW5l 482
+aWxs 483
+aW5k 484
+ZXJl 485
+Ojo= 486
+aXR5 487
+ICs= 488
+IHRy 489
+ZWxm 490
+aWdodA== 491
+KCc= 492
+b3Jt 493
+dWx0 494
+c3Ry 495
+Li4= 496
+Iiw= 497
+IHlvdQ== 498
+eXBl 499
+cGw= 500
+IG5ldw== 501
+IGo= 502
+ICAgICAgICAgICAgICAgICAgIA== 503
+IGZyb20= 504
+IGV4 505
+IE8= 506
+bGQ= 507
+IFs= 508
+b2M= 509
+Ogo= 510
+IHNl 511
+IGxl 512
+LS0tLS0tLS0= 513
+LnM= 514
+ewo= 515
+Jyw= 516
+YW50 517
+IGF0 518
+YXNl 519
+LmM= 520
+IGNo 521
+PC8= 522
+YXZl 523
+YW5n 524
+IGFyZQ== 525
+IGludA== 526
+4oCZ 527
+X3Q= 528
+ZXJ0 529
+aWFs 530
+YWN0 531
+fQo= 532
+aXZl 533
+b2Rl 534
+b3N0 535
+IGNsYXNz 536
+IG5vdA== 537
+b2c= 538
+b3Jk 539
+YWx1ZQ== 540
+YWxs 541
+ZmY= 542
+KCk7Cg== 543
+b250 544
+aW1l 545
+YXJl 546
+IFU= 547
+IHBy 548
+IDo= 549
+aWVz 550
+aXpl 551
+dXJl 552
+IGJ5 553
+aXJl 554
+IH0KCg== 555
+LnA= 556
+IHNo 557
+aWNl 558
+YXN0 559
+cHRpb24= 560
+dHJpbmc= 561
+b2s= 562
+X18= 563
+Y2w= 564
+IyM= 565
+IGhl 566
+YXJk 567
+KS4= 568
+IEA= 569
+aWV3 570
+CQkJ 571
+IHdhcw== 572
+aXA= 573
+dGhpcw== 574
+IHU= 575
+IFRoZQ== 576
+aWRl 577
+YWNl 578
+aWI= 579
+YWM= 580
+cm91 581
+IHdl 582
+amVjdA== 583
+IHB1YmxpYw== 584
+YWs= 585
+dmU= 586
+YXRo 587
+b2lk 588
+ID0+ 589
+dXN0 590
+cXVl 591
+IHJlcw== 592
+KSk= 593
+J3M= 594
+IGs= 595
+YW5z 596
+eXN0 597
+dW5jdGlvbg== 598
+KioqKioqKio= 599
+IGk= 600
+IHVz 601
+cHA= 602
+b25l 603
+YWls 604
+PT09PQ== 605
+bmFtZQ== 606
+IHN0cg== 607
+IC8= 608
+ICY= 609
+YWNo 610
+ZGl2 611
+eXN0ZW0= 612
+ZWxs 613
+IGhhdmU= 614
+ZXJy 615
+b3VsZA== 616
+dWxs 617
+cG9u 618
+IEo= 619
+X3A= 620
+ID09 621
+aWdu 622
+U3Q= 623
+Lgo= 624
+IHBs 625
+KTsKCg== 626
+Zm9ybQ== 627
+cHV0 628
+b3VudA== 629
+fQoK 630
+ZGQ= 631
+aXRl 632
+IGdldA== 633
+cnI= 634
+b21l 635
+IOKA 636
+YXJhbQ== 637
+Y2M= 638
+ICov 639
+RVI= 640
+SW4= 641
+bGVz 642
+X3M= 643
+b25n 644
+aWU= 645
+IGNhbg== 646
+IFY= 647
+ZXJ2 648
+cHI= 649
+IHVu 650
+cm93 651
+YmVy 652
+IGRv 653
+bGw= 654
+IGVs 655
+IHNlbGY= 656
+YXRlZA== 657
+YXJ5 658
+IC4= 659
+J10= 660
+dWQ= 661
+IGVu 662
+IFRo 663
+ICAgICAgICAgICAgICAgICAgICAgICA= 664
+dGU= 665
+X2M= 666
+dWN0 667
+IGFi 668
+b3Jr 669
+LmdldA== 670
+ICM= 671
+YXc= 672
+cmVzcw== 673
+b2I= 674
+TmFtZQ== 675
+YXBw 676
+Wyc= 677
+IGFsbA== 678
+b3J5 679
+aXRpb24= 680
+YW5jZQ== 681
+ZWFy 682
+IGNvbnQ= 683
+dmVudA== 684
+aWE= 685
+IHdpbGw= 686
+SU4= 687
+ICAgICAgICAg 688
+cmV0dXJu 689
+IDwv 690
+ZGF0YQ== 691
+KQoK 692
+UmU= 693
+cGxl 694
+aWxk 695
+dGhlcg== 696
+IHlvdXI= 697
+Igo= 698
+KCQ= 699
+IG91dA== 700
+KSw= 701
+IGhhcw== 702
+U3RyaW5n 703
+c28= 704
+IHVw 705
+YXg= 706
+IGRlZg== 707
+IGJv 708
+Z2U= 709
+YWxzZQ== 710
+T04= 711
+cGVy 712
+aWNo 713
+IGJ1dA== 714
+IAo= 715
+IF8= 716
+X20= 717
+YWRk 718
+cXVlc3Q= 719
+b2RlbA== 720
+c2VsZg== 721
+ZXJ5 722
+ZnQ= 723
+ZW5z 724
+Ly8vLw== 725
+YWtl 726
+LkM= 727
+IGdv 728
+IGZ1bmN0aW9u 729
+IEs= 730
+aXZhdGU= 731
+IGlt 732
+IGNvbnN0 733
+LnQ= 734
+ICovCg== 735
+KTsNCg== 736
+IHZvaWQ= 737
+IHNldA== 738
+IFN5c3RlbQ== 739
+Y3Jp 740
+KCkK 741
+bGk= 742
+CWlm 743
+Lm0= 744
+YWxseQ== 745
+c2V0 746
+ZXA= 747
+4oCZcw== 748
+Ym8= 749
+ZGVm 750
+JywK 751
+IG1l 752
+ICE= 753
+YXRjaA== 754
+Ij4= 755
+IiwK 756
+ZWM= 757
+IElu 758
+cGg= 759
+IHw= 760
+X2Y= 761
+IHZhcg== 762
+ZW5jZQ== 763
+SWQ= 764
+cmVl 765
+aW5r 766
+bGVjdA== 767
+dWc= 768
+ZXRo 769
+IGVsc2U= 770
+LS0tLS0tLS0tLS0tLS0tLQ== 771
+Y29udA== 772
+IHNv 773
+YXRpYw== 774
+IGxv 775
+cHJv 776
+dG9u 777
+c3M= 778
+b3du 779
+YWJlbA== 780
+b2ludA== 781
+b3Vz 782
+ZWxk 783
+U1Q= 784
+VGhl 785
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 786
+UkU= 787
+Ijo= 788
+b2xvcg== 789
+dHA= 790
+ZWc= 791
+a2V5 792
+dWRl 793
+IFN0 794
+b3VuZA== 795
+IGFy 796
+Iik7Cg== 797
+ZW5lcg== 798
+c2Vy 799
+YmplY3Q= 800
+ZXNzYWdl 801
+ZmVy 802
+IG1vcmU= 803
+YXRpb25z 804
+ZW50cw== 805
+IGhpcw== 806
+IHRoZXk= 807
+LlM= 808
+IFk= 809
+dXNl 810
+bmU= 811
+aXNo 812
+b2xk 813
+X2Q= 814
+aW8= 815
+aWVsZA== 816
+IHBlcg== 817
+Q29udA== 818
+aW5ncw== 819
+IyMjIw== 820
+IGRhdGE= 821
+IHNh 822
+ZWY= 823
+Zm8= 824
+IG9uZQ== 825
+ZW5n 826
+IGRpcw== 827
+QVQ= 828
+IG5hbWU= 829
+IHRydWU= 830
+dmFs 831
+bGVk 832
+LmY= 833
+IG5l 834
+IGVuZA== 835
+LlQ= 836
+Y3Jl 837
+YXJr 838
+bG9n 839
+RXg= 840
+ZXJyb3I= 841
+X2lk 842
+dXJyZQ== 843
+YW5nZQ== 844
+IG51bGw= 845
+cnJheQ== 846
+IG15 847
+cGFu 848
+aWN0 849
+YXRvcg== 850
+Vmlldw== 851
+TGlzdA== 852
+CXJldHVybg== 853
+4oCd 854
+IHByZQ== 855
+IHg= 856
+Y2x1ZGU= 857
+YXJn 858
+b3Y= 859
+Lmg= 860
+ID4= 861
+IHRoZWly 862
+Jyk= 863
+aXJzdA== 864
+aWNr 865
+Z2g= 866
+TEU= 867
+T1I= 868
+IHByaXZhdGU= 869
+dGVt 870
+DQoNCg== 871
+dXNlcg== 872
+ICk= 873
+Y29t 874
+LkE= 875
+IjsK 876
+IGlk 877
+cmVhZA== 878
+IHdobw== 879
+X2I= 880
+Ij4K 881
+IHRpbWU= 882
+IG1hbg== 883
+cnk= 884
+PT09PT09PT0= 885
+cm91cA== 886
+cm9w 887
+cHVibGlj 888
+dmVs 889
+dW1iZXI= 890
+Ymxl 891
+IHdoaWNo 892
+KioqKioqKioqKioqKioqKg== 893
+IGFueQ== 894
+IGZhbHNl 895
+d2U= 896
+IHZhbHVl 897
+IGxp 898
+Iik= 899
+bmRlcg== 900
+Z3I= 901
+IG5v 902
+cGFyYW0= 903
+Zmln 904
+LmNvbQ== 905
+IGFwcA== 906
+X2w= 907
+aW9ucw== 908
+LkQ= 909
+IENo 910
+IGFib3V0 911
+IGFkZA== 912
+IHN1 913
+IHN0cmluZw== 914
+SUQ= 915
+IG92ZXI= 916
+c3RyaW5n 917
+Lmw= 918
+b3VyY2U= 919
+X0M= 920
+XQo= 921
+IHF1 922
+IFN0cmluZw== 923
+Y2E= 924
+U0U= 925
+IHJv 926
+c2g= 927
+dWFs 928
+VHlwZQ== 929
+c29u 930
+bmV3 931
+ZXJu 932
+IGFn 933
+QVI= 934
+XTsK 935
+XS4= 936
+ID8= 937
+aWNhbA== 938
+IGRlcw== 939
+dXRo 940
+aXg= 941
+YXlz 942
+IHR5cGU= 943
+J3Q= 944
+YXVsdA== 945
+IGludGVy 946
+dmFy 947
+LmI= 948
+IHBhcnQ= 949
+LmQ= 950
+dXJyZW50 951
+SVQ= 952
+RU4= 953
+ZW5j 954
+KGY= 955
+cmE= 956
+dmFsdWU= 957
+Y2hv 958
+dXR0b24= 959
+b3Nl 960
+ICE9 961
+YXRlcg== 962
+w6k= 963
+cmVhdGU= 964
+b2xs 965
+cG9z 966
+eWxl 967
+bmc= 968
+QUw= 969
+dXNpbmc= 970
+YW1lcw== 971
+IHsNCg== 972
+YXRlcw== 973
+ZWx5 974
+IHdvcms= 975
+IGVt 976
+aW5hbA== 977
+IHNw 978
+IHdoZW4= 979
+LnNldA== 980
+ICAgICAg 981
+KToK 982
+dG8= 983
+cXVpcmU= 984
+aW5kb3c= 985
+bGVtZW50 986
+cGVjdA== 987
+YXNo 988
+W2k= 989
+IHVzZQ== 990
+LkY= 991
+cGVj 992
+IGFk 993
+b3Zl 994
+Y2VwdGlvbg== 995
+ZW5ndGg= 996
+aW5jbHVkZQ== 997
+YWRlcg== 998
+ICAgICAgICAgICAgICAgICAgICAgICAgICAg 999
+YXR1cw== 1000
+VGg= 1001
+aXRsZQ== 1002
+cml0 1003
+dm9pZA== 1004
+KCku 1005
+KAo= 1006
+IG9mZg== 1007
+IG90aGVy 1008
+ICYm 1009
+JzsK 1010
+bXM= 1011
+IGJlZW4= 1012
+IHRl 1013
+bWw= 1014
+Y28= 1015
+bmM= 1016
+ZXJ2aWNl 1017
+ICU= 1018
+KioK 1019
+YW5u 1020
+YWRl 1021
+CgoKCg== 1022
+bG9jaw== 1023
+Y29uc3Q= 1024
+cG9uc2U= 1025
+IHN1cA== 1026
+Kys= 1027
+ZGF0ZQ== 1028
+IGFjYw== 1029
+IGhhZA== 1030
+IGJ1 1031
+IFJl 1032
+IHdlcmU= 1033
+IGZpbGU= 1034
+IHdvdWxk 1035
+IOKAnA== 1036
+dmVu 1037
+aXNz 1038
+IG91cg== 1039
+Y2xhc3M= 1040
+cmF3 1041
+IHllYXI= 1042
+RGF0YQ== 1043
+IHZhbA== 1044
+IHNvbWU= 1045
+ZnRlcg== 1046
+eXM= 1047
+IC8vLw== 1048
+cm91bmQ= 1049
+dmlldw== 1050
+IHBl 1051
+IHRoZXJl 1052
+IHNhaWQ= 1053
+ZHU= 1054
+b2Y= 1055
+bGluZQ== 1056
+Lyo= 1057
+ZHVjdA== 1058
+IGhlcg== 1059
+ICAgICAgICAgICAgIA== 1060
+UmVz 1061
+IGNv 1062
+IGNvbW0= 1063
+aXNl 1064
+bWlu 1065
+ICAgIAo= 1066
+I2luY2x1ZGU= 1067
+ZXRob2Q= 1068
+LlA= 1069
+dXRl 1070
+IGFzcw== 1071
+SW50 1072
+YXNr 1073
+bG9j 1074
+IGxpa2U= 1075
+b2R5 1076
+IGxldA== 1077
+bG9hZA== 1078
+IGFt 1079
+cm9s 1080
+IGdy 1081
+eXA= 1082
+IGFsc28= 1083
+IEl0 1084
+dXJs 1085
+aWZpYw== 1086
+b3Jz 1087
+X1A= 1088
+X24= 1089
+aWdo 1090
+IHRoYW4= 1091
+Q29t 1092
+QU4= 1093
+VUw= 1094
+YXRpbmc= 1095
+IFRoaXM= 1096
+cmVm 1097
+X1M= 1098
+IHN0YXRpYw== 1099
+cm9sbA== 1100
+IGp1c3Q= 1101
+IHJlc3VsdA== 1102
+aWFu 1103
+aWR0aA== 1104
+IHRoZW0= 1105
+KSk7Cg== 1106
+ZGVy 1107
+cmVhaw== 1108
+Q29u 1109
+Oi8v 1110
+dWxl 1111
+Li4u 1112
+YXJjaA== 1113
+ZW1lbnQ= 1114
+IDw8 1115
+dXNo 1116
+ZW5zZQ== 1117
+YXJy 1118
+IGludG8= 1119
+Y2Vzcw== 1120
+YW1w 1121
+aWVk 1122
+dW1lbnQ= 1123
+IFw= 1124
+XSw= 1125
+d28= 1126
+YWxz 1127
+IHdoYXQ= 1128
+YW5j 1129
+VmFsdWU= 1130
+PSc= 1131
+b2x1bQ== 1132
+IHBvcw== 1133
+YWdlcw== 1134
+YXllcg== 1135
+IHNj 1136
+dWVz 1137
+IikK 1138
+X1Q= 1139
+IGxpc3Q= 1140
+KHM= 1141
+IGNhc2U= 1142
+Q2g= 1143
+CQkJCQk= 1144
+Ly8vLy8vLy8= 1145
+cG9uZW50 1146
+IHo= 1147
+IGtu 1148
+bGV0 1149
+REU= 1150
+cmVk 1151
+IGZl 1152
+IH0sCg== 1153
+ICw= 1154
+KHQ= 1155
+IGZpcnN0 1156
+Jyk7Cg== 1157
+d29yZA== 1158
+IGltcG9ydA== 1159
+IGFjdA== 1160
+IGNoYXI= 1161
+Q1Q= 1162
+IFRy 1163
+b3BsZQ== 1164
+PXs= 1165
+CWY= 1166
+aWVudA== 1167
+Y2VudA== 1168
+Lmo= 1169
+bGVjdGlvbg== 1170
+KSkK 1171
+IG9ubHk= 1172
+IHByaW50 1173
+bWVy 1174
+Llc= 1175
+b2Nr 1176
+IC0t 1177
+VGV4dA== 1178
+IG9w 1179
+YW5r 1180
+IGl0cw== 1181
+IGJhY2s= 1182
+WyI= 1183
+IG5lZWQ= 1184
+IGNs 1185
+IHN1Yg== 1186
+IGxh 1187
+KCg= 1188
+LiI= 1189
+T2JqZWN0 1190
+IHN0YXJ0 1191
+ZmlsZQ== 1192
+KHNlbGY= 1193
+bmVy 1194
+ZXk= 1195
+IHVzZXI= 1196
+IGVudA== 1197
+IENvbQ== 1198
+aXRz 1199
+IENvbg== 1200
+b3VibGU= 1201
+b3dlcg== 1202
+aXRlbQ== 1203
+dmVyeQ== 1204
+IFdl 1205
+bGljaw== 1206
+IFE= 1207
+cGhw 1208
+dHRw 1209
+Jzo= 1210
+aWNz 1211
+IHVuZGVy 1212
+ICoK 1213
+Lkw= 1214
+KTs= 1215
+aWNlcw== 1216
+IHJlZw== 1217
+KQ0K 1218
+CXB1YmxpYw== 1219
+U1M= 1220
+IHRoZW4= 1221
+cmVhdA== 1222
+aW91cw== 1223
+Lkc= 1224
+ZWs= 1225
+aXJlY3Q= 1226
+aGVjaw== 1227
+Y3JpcHQ= 1228
+bmluZw== 1229
+IFVu 1230
+IG1heQ== 1231
+IFdo 1232
+Qm8= 1233
+SXRlbQ== 1234
+c3RydWN0 1235
+LnN0 1236
+cmVhbQ== 1237
+aWJsZQ== 1238
+bG9hdA== 1239
+IG9yZw== 1240
+dW5k 1241
+c3Vt 1242
+X2lu 1243
+Li4v 1244
+X00= 1245
+IGhvdw== 1246
+cml0ZQ== 1247
+Jwo= 1248
+VG8= 1249
+d3c= 1250
+IHBlb3BsZQ== 1251
+aW5kZXg= 1252
+Lm4= 1253
+aHR0cA== 1254
+KG0= 1255
+ZWN0b3I= 1256
+IGluZA== 1257
+IGphdg== 1258
+XSwK 1259
+IEhl 1260
+X3N0 1261
+ZnVs 1262
+b2xl 1263
+KXsK 1264
+IHNob3VsZA== 1265
+b3B5 1266
+ZWxw 1267
+aWVy 1268
+X25hbWU= 1269
+ZXJzb24= 1270
+SU9O 1271
+b3Rl 1272
+IHRlc3Q= 1273
+IGJldA== 1274
+cnJvcg== 1275
+dWxhcg== 1276
+44A= 1277
+INA= 1278
+YnM= 1279
+dGluZw== 1280
+IG1ha2U= 1281
+VHI= 1282
+IGFmdGVy 1283
+YXJnZXQ= 1284
+Uk8= 1285
+b2x1bW4= 1286
+cmM= 1287
+X3Jl 1288
+ZGVmaW5l 1289
+IHJpZ2h0 1290
+cmlnaHQ= 1291
+ZGF5 1292
+IGxvbmc= 1293
+W10= 1294
+KHA= 1295
+dGQ= 1296
+Y29uZA== 1297
+IFBybw== 1298
+IHJlbQ== 1299
+cHRpb25z 1300
+dmlk 1301
+Lmc= 1302
+IGV4dA== 1303
+IF9f 1304
+JykK 1305
+cGFjZQ== 1306
+bXA= 1307
+IG1pbg== 1308
+c3RhbmNl 1309
+YWly 1310
+YWN0aW9u 1311
+d2g= 1312
+dHlwZQ== 1313
+dXRpbA== 1314
+YWl0 1315
+PD8= 1316
+SUM= 1317
+dGV4dA== 1318
+IHBo 1319
+IGZs 1320
+Lk0= 1321
+Y2Nlc3M= 1322
+YnI= 1323
+Zm9yZQ== 1324
+ZXJzaW9u 1325
+KSwK 1326
+LnJl 1327
+YXRlZw== 1328
+IGxvYw== 1329
+aW5z 1330
+LXM= 1331
+dHJpYg== 1332
+IEludA== 1333
+IGFycmF5 1334
+LCI= 1335
+UHJv 1336
+KGM= 1337
+ZXNzaW9u 1338
+PgoK 1339
+IHNoZQ== 1340
+Il0= 1341
+YXBo 1342
+IGV4cA== 1343
+ZXJ0eQ== 1344
+IFNl 1345
+IHBhcg== 1346
+dW5j 1347
+RVQ= 1348
+IHJlYWQ= 1349
+cHJpbnQ= 1350
+IHJlbA== 1351
+IGZvcm0= 1352
+IGRy 1353
+RXhjZXB0aW9u 1354
+aW5wdXQ= 1355
+IHRyYW5z 1356
+IyMjIyMjIyM= 1357
+b3JkZXI= 1358
+Qnk= 1359
+IGF3 1360
+aXRpZXM= 1361
+dWZm 1362
+cGxheQ== 1363
+LmFkZA== 1364
+IOKAkw== 1365
+IHdhbnQ= 1366
+IGNvbXA= 1367
+bWVudHM= 1368
+IHx8 1369
+YXo= 1370
+YmU= 1371
+IG51bWJlcg== 1372
+IHJlcXVpcmU= 1373
+IEV4 1374
+IGNvbA== 1375
+IGtleQ== 1376
+ZW1iZXI= 1377
+IHR3bw== 1378
+IHNpemU= 1379
+IHdoZXJl 1380
+VVQ= 1381
+cmVzdWx0 1382
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 1383
+b3VnaA== 1384
+b3JsZA== 1385
+b29k 1386
+dWNo 1387
+YXRpdmU= 1388
+Z2Vy 1389
+YXJlbnQ= 1390
+IC8q 1391
+IGFyZw== 1392
+IHdoaWxl 1393
+KHRoaXM= 1394
+IHJlYw== 1395
+IGRpZg== 1396
+U3RhdGU= 1397
+IHNwZWM= 1398
+cmlkZQ== 1399
+X0Y= 1400
+IGxvb2s= 1401
+QU0= 1402
+aWxpdHk= 1403
+ZXRlcg== 1404
+4oCZdA== 1405
+CgoK 1406
+YXlvdXQ= 1407
+LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0= 1408
+YWdlcg== 1409
+IGNvdWxk 1410
+IGJy 1411
+ZW5kcw== 1412
+dXJlcw== 1413
+IGtub3c= 1414
+ZXRz 1415
+IElm 1416
+IFNo 1417
+Lnc= 1418
+YmFjaw== 1419
+IHNlcg== 1420
+ICs9 1421
+IGZy 1422
+KCkpOwo= 1423
+IGhhbmQ= 1424
+SW5k 1425
+VUxM 1426
+SW0= 1427
+KCk7Cgo= 1428
+IG1vc3Q= 1429
+IHRyeQ== 1430
+IG5vdw== 1431
+cm91Z2g= 1432
+Pg0K 1433
+YWNrYWdl 1434
+IGhpbQ== 1435
+Ll8= 1436
+aWZ5 1437
+IGJyZWFr 1438
+ICk7Cg== 1439
+cmVu 1440
+I2RlZmluZQ== 1441
+aXR0 1442
+IGFw 1443
+CWM= 1444
+KG4= 1445
+IFlvdQ== 1446
+OgoK 1447
+LW0= 1448
+IGV2ZXJ5 1449
+dXN0b20= 1450
+bGllbnQ= 1451
+b2N1bWVudA== 1452
+Y3JpcHRpb24= 1453
+RXJyb3I= 1454
+LWI= 1455
+0L4= 1456
+XVs= 1457
+dHJhbnM= 1458
+IHBvaW50 1459
+IHN0ZA== 1460
+IGZpbA== 1461
+VGltZQ== 1462
+IG1vZA== 1463
+IC0+ 1464
+IGVycm9y 1465
+YWg= 1466
+IHRleHQ= 1467
+cm9sbGVy 1468
+bG9zZQ== 1469
+cWw= 1470
+IHBvbA== 1471
+Pjwv 1472
+IHNob3c= 1473
+VXNlcg== 1474
+YXNlZA== 1475
+IHsKCg== 1476
+IGZpbmQ= 1477
+0LA= 1478
+RUQ= 1479
+c3Bhbg== 1480
+ZW51 1481
+IGN1cnJlbnQ= 1482
+IHVzZWQ= 1483
+Y2VwdA== 1484
+Y2x1ZA== 1485
+IHBsYXk= 1486
+IGxvZw== 1487
+dXRpb24= 1488
+Zmw= 1489
+IHNlZQ== 1490
+aW5kb3dz 1491
+IGhlbHA= 1492
+IHRoZXNl 1493
+IHBhc3M= 1494
+IGRvd24= 1495
+IGV2ZW4= 1496
+YXNvbg== 1497
+dWlsZA== 1498
+ZnJvbQ== 1499
+KGQ= 1500
+IGJs 1501
+bGFiZWw= 1502
+ZWxzZQ== 1503
+0LU= 1504
+ICgh 1505
+aXplZA== 1506
+KCks 1507
+IG9i 1508
+IGl0ZW0= 1509
+dW1w 1510
+VVI= 1511
+b3Ju 1512
+IGRvbg== 1513
+U2U= 1514
+bWFu 1515
+YW1wbGU= 1516
+dG4= 1517
+PT09PT09PT09PT09PT09PQ== 1518
+SGU= 1519
+Z3JhbQ== 1520
+IGRpZA== 1521
+d24= 1522
+X2g= 1523
+aXZlcg== 1524
+IHNt 1525
+IHRocm91Z2g= 1526
+IEFu 1527
+Y2hl 1528
+IGludg== 1529
+b3VzZQ== 1530
+IGVz 1531
+IE5ldw== 1532
+ZXhwb3J0 1533
+bWFyeQ== 1534
+dXRv 1535
+bGVy 1536
+IGxhc3Q= 1537
+IGV2ZW50 1538
+dHJ5 1539
+77w= 1540
+aWx5 1541
+aWduZWQ= 1542
+aW5lcw== 1543
+b2xsb3c= 1544
+aWNlbnNl 1545
+c29sZQ== 1546
+bGVhcg== 1547
+KGludA== 1548
+IGFnYWlu 1549
+IGhpZ2g= 1550
+aHRtbA== 1551
+SW5kZXg= 1552
+dXRob3I= 1553
+IC8qKgo= 1554
+IGxpbmU= 1555
+RXZlbnQ= 1556
+X0Q= 1557
+IGRvZXM= 1558
+aXRpYWw= 1559
+IGNy 1560
+YXJz 1561
+IHRlbQ== 1562
+Y2F1c2U= 1563
+ZmFjZQ== 1564
+IGA= 1565
+X0E= 1566
+QnV0dG9u 1567
+YXR1cmU= 1568
+ZWN0ZWQ= 1569
+RVM= 1570
+aXN0ZXI= 1571
+CQo= 1572
+IGJlZm9yZQ== 1573
+YWxl 1574
+b3RoZXI= 1575
+IGJlY2F1c2U= 1576
+cm9pZA== 1577
+IGVk 1578
+aWs= 1579
+cmVn 1580
+IERl 1581
+IGRpc3Q= 1582
+fSwK 1583
+IHN0YXRl 1584
+IGNvbnM= 1585
+cmludA== 1586
+YXR0 1587
+IGhlcmU= 1588
+aW5lZA== 1589
+IGZpbmFs 1590
+ICIi 1591
+S2V5 1592
+TE8= 1593
+IGRlbA== 1594
+cHR5 1595
+dGhpbmc= 1596
+IEFuZA== 1597
+IHJ1bg== 1598
+IFg= 1599
+eW0= 1600
+LmFwcA== 1601
+IHZlcnk= 1602
+Y2Vz 1603
+X04= 1604
+YXJlZA== 1605
+d2FyZA== 1606
+bGlzdA== 1607
+aXRlZA== 1608
+b2xvZw== 1609
+aXRjaA== 1610
+Qm94 1611
+aWZl 1612
+IGFj 1613
+IG1vZGVs 1614
+IG1vbg== 1615
+IHdheQ== 1616
+bGV0ZQ== 1617
+IGNhbGw= 1618
+IGF0dA== 1619
+IGNhbA== 1620
+dmVydA== 1621
+IGRlYw== 1622
+bGVhc2U= 1623
+b3Vu 1624
+IH0pOwo= 1625
+ZnI= 1626
+Zm9ybWF0aW9u 1627
+ZXRhaWw= 1628
+IG51bQ== 1629
+YWo= 1630
+cXVlcnk= 1631
+IHdlbGw= 1632
+IG9iamVjdA== 1633
+IEFz 1634
+IHllYXJz 1635
+Q29sb3I= 1636
+SVM= 1637
+IGRlZmF1bHQ= 1638
+V2g= 1639
+IGlucw== 1640
+YWludA== 1641
+IGphdmE= 1642
+IHNpbQ== 1643
+IEFy 1644
+bW9u 1645
+dGls 1646
+KCk7DQo= 1647
+KTo= 1648
+U2V0 1649
+YXR0ZXI= 1650
+IHZpZXc= 1651
+IHByZXM= 1652
+YXJyYXk= 1653
+V2U= 1654
+QXQ= 1655
+IGJlbA== 1656
+IG1hbnk= 1657
+TWFu 1658
+ZW5kZXI= 1659
+IGJlaW5n 1660
+IGdvb2Q= 1661
+CQkJCQkJ 1662
+YXRpb25hbA== 1663
+d2FyZQ== 1664
+LmxvZw== 1665
+ew0K 1666
+IHVzaW5n 1667
+X0I= 1668
+IDo9 1669
+X3c= 1670
+aXN0cw== 1671
+bGlzaA== 1672
+IHN0dWQ= 1673
+IEFs 1674
+IGd1 1675
+Y29uZmln 1676
+dXJpbmc= 1677
+dGltZQ== 1678
+b2tlbg== 1679
+YW1lc3BhY2U= 1680
+IHJlcXVlc3Q= 1681
+IGNoaWxk 1682
+IMM= 1683
+bG9i 1684
+IHBhcmFt 1685
+IH0NCg== 1686
+IGVjaG8= 1687
+ZnVuY3Rpb24= 1688
+KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio= 1689
+cHM= 1690
+RWxlbWVudA== 1691
+YWxr 1692
+bGljYXRpb24= 1693
+Ynk= 1694
+U2l6ZQ== 1695
+cmF3aW5n 1696
+IHBlcnNvbg== 1697
+ICAgICAgICAgICAgICAgICA= 1698
+XG4= 1699
+b2JqZWN0 1700
+aW5jZQ== 1701
+RW4= 1702
+RmlsZQ== 1703
+dWY= 1704
+ZmZlY3Q= 1705
+QUM= 1706
+IHN0eWxl 1707
+c3VtbWFyeQ== 1708
+IHF1ZQ== 1709
+X3I= 1710
+ICgk 1711
+TW9kZWw= 1712
+aWRlbnQ= 1713
+IG1ldGhvZA== 1714
+SUw= 1715
+b3R0 1716
+bGVzcw== 1717
+SU5H 1718
+ICgp 1719
+IGV4cGVjdA== 1720
+eW5j 1721
+cGFja2FnZQ== 1722
+dXJz 1723
+IHByb3Q= 1724
+Li8= 1725
+cHJl 1726
+ICkK 1727
+bWE= 1728
+IHN1cg== 1729
+IGZvdW5k 1730
+SW5mbw== 1731
+cGFy 1732
+aW1lcw== 1733
+LmU= 1734
+YWlucw== 1735
+IHBvc3Q= 1736
+LWQ= 1737
+b2xlYW4= 1738
+IHNs 1739
+UEU= 1740
+IHN1Y2g= 1741
+c2VsZWN0 1742
+YWluZXI= 1743
+IHRoaW5r 1744
+IGRpZmZlcg== 1745
+LnI= 1746
+LyoqCg== 1747
+RkY= 1748
+b29s 1749
+cGxhdGU= 1750
+cXVhbA== 1751
+IEZvcg== 1752
+IG11Y2g= 1753
+dWM= 1754
+KG5ldw== 1755
+b2R1bGU= 1756
+IHNvbQ== 1757
+IGh0dHA= 1758
+IExpc3Q= 1759
+IGNvdW50 1760
+IGluc3Q= 1761
+Y2hhcg== 1762
+bWl0 1763
+Lmlk 1764
+YWtpbmc= 1765
+IGdlbmVy 1766
+cHg= 1767
+dmljZQ== 1768
+X2RhdGE= 1769
+IE5VTEw= 1770
+fQ0K 1771
+aWRk 1772
+44CC 1773
+IG1lZA== 1774
+b3Jn 1775
+aWRlcg== 1776
+YWNoZQ== 1777
+d29yaw== 1778
+IGNoZWNr 1779
+d2Vlbg== 1780
+ICgo 1781
+dGhl 1782
+YW50cw== 1783
+Pjw= 1784
+LkI= 1785
+LWM= 1786
+IG9wZW4= 1787
+IGVzdA== 1788
+ICAgICAgICAK 1789
+IG5leHQ= 1790
+SU0= 1791
+0YI= 1792
+T1Q= 1793
+w7M= 1794
+IGZvbGxvdw== 1795
+Y29udGVudA== 1796
+ICAgICAgICAgICAg 1797
+IGluY2x1ZA== 1798
+SEU= 1799
+IFJlcw== 1800
+IGhyZWY= 1801
+0Lg= 1802
+IGNhcg== 1803
+eXBlcw== 1804
+aW1hZ2U= 1805
+VW4= 1806
+IGJvb2w= 1807
+QUQ= 1808
+IGdhbWU= 1809
+LkZvcm0= 1810
+cm93cw== 1811
+Ki8= 1812
+dmVsb3A= 1813
+LkRyYXdpbmc= 1814
+IHBhdGg= 1815
+aXNpb24= 1816
+IGVhY2g= 1817
+IFBs 1818
+X3R5cGU= 1819
+UGF0aA== 1820
+bmVjdGlvbg== 1821
+IGF2 1822
+Jyku 1823
+IHN1cHBvcnQ= 1824
+RU5U 1825
+cmVt 1826
+Iiku 1827
+IG93bg== 1828
+IGNvcg== 1829
+Y291bnQ= 1830
+bWlzcw== 1831
+dWFsbHk= 1832
+IG1lbQ== 1833
+c3Rk 1834
+aWVuY2U= 1835
+c2VhcmNo 1836
+IgoK 1837
+Rm9ybQ== 1838
+IHNleA== 1839
+ZW5hbWU= 1840
+IHNpZ24= 1841
+IGV0 1842
+ICAgICAgICAgIA== 1843
+Jywn 1844
+IEFwcA== 1845
+IHRob3Nl 1846
+b2Zm 1847
+IGVycg== 1848
+IHN5c3RlbQ== 1849
+IGJlc3Q= 1850
+Y29kZQ== 1851
+IHNhbWU= 1852
+IGRp 1853
+dXNz 1854
+IGNyZWF0ZQ== 1855
+YXRoZXI= 1856
+QXJyYXk= 1857
+Lmlu 1858
+ZmU= 1859
+U2VydmljZQ== 1860
+VU4= 1861
+YXRz 1862
+IFo= 1863
+YWx0aA== 1864
+IG1hZGU= 1865
+dHJ1ZQ== 1866
+QUI= 1867
+IG1hcms= 1868
+cmlk 1869
+aWZpZWQ= 1870
+LA0K 1871
+eW4= 1872
+cHJlc3M= 1873
+IGdyb3Vw 1874
+IGZpbg== 1875
+IExpY2Vuc2U= 1876
+RmllbGQ= 1877
+ZWdlcg== 1878
+IHdvcmxk 1879
+aW5lc3M= 1880
+dHk= 1881
+IHByb2Nlc3M= 1882
+KGI= 1883
+IGNyZQ== 1884
+YXJu 1885
+aXZlcw== 1886
+IG1haW4= 1887
+aWRlbw== 1888
+X2c= 1889
+QUc= 1890
+dmFsaWQ= 1891
+aW1n 1892
+UEk= 1893
+IGNvbG9y 1894
+IHJlcG9ydA== 1895
+IHRha2U= 1896
+cmli 1897
+T00= 1898
+IGRheQ== 1899
+UmVxdWVzdA== 1900
+IHNr 1901
+YmVycw== 1902
+CXM= 1903
+LkFkZA== 1904
+b290 1905
+SW1hZ2U= 1906
+IGNvbXBsZQ== 1907
+b2xsZWN0aW9u 1908
+IHRvcA== 1909
+IGZyZWU= 1910
+QVM= 1911
+RGU= 1912
+IE9u 1913
+SUc= 1914
+ZXRh 1915
+RGF0ZQ== 1916
+IGFjdGlvbg== 1917
+T3Zlcg== 1918
+aXRvcg== 1919
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 1920
+bm90 1921
+IGluZGV4 1922
+aGVy 1923
+aWNvbg== 1924
+T24= 1925
+Ow0KDQo= 1926
+aXZpdHk= 1927
+bWFuZA== 1928
+LldpbmRvd3M= 1929
+T0w= 1930
+IHJlYWw= 1931
+IG1heA== 1932
+bGFuZA== 1933
+Li4uLg== 1934
+cmFwaA== 1935
+IGJ1aWxk 1936
+bGVn 1937
+YXNzd29yZA== 1938
+PwoK 1939
+4oCm 1940
+b29r 1941
+dWNr 1942
+IG1lc3NhZ2U= 1943
+dGVzdA== 1944
+aXZlcnM= 1945
+IGlucHV0 1946
+IGFydA== 1947
+IGJldHdlZW4= 1948
+R2V0 1949
+ZW50ZXI= 1950
+Z3JvdW5k 1951
+ZW5l 1952
+w6E= 1953
+Lmxlbmd0aA== 1954
+Tm9kZQ== 1955
+KGk= 1956
+Q2xhc3M= 1957
+Zm9y 1958
+IOKAlA== 1959
+dGVu 1960
+b2lu 1961
+IGtl 1962
+dWk= 1963
+IElO 1964
+IHRhYmxl 1965
+c3Vi 1966
+IExl 1967
+IGhlYWQ= 1968
+IG11c3Q= 1969
+Ly8vLy8vLy8vLy8vLy8vLw== 1970
+LnV0aWw= 1971
+Q29udGV4dA== 1972
+IG9yZGVy 1973
+IG1vdg== 1974
+b3Zlcg== 1975
+IGNvbnRpbg== 1976
+IHNheQ== 1977
+c3RhdGlj 1978
+LlRleHQ= 1979
+IGNsYXNzTmFtZQ== 1980
+cGFueQ== 1981
+IHRlcg== 1982
+aGVhZA== 1983
+cmc= 1984
+IHByb2R1Y3Q= 1985
+VGhpcw== 1986
+LuKAnQ== 1987
+IEJ1dA== 1988
+bG95 1989
+IGRvdWJsZQ== 1990
+c2c= 1991
+IHBsYWNl 1992
+Lng= 1993
+bWVzc2FnZQ== 1994
+IGluZm9ybWF0aW9u 1995
+cHJpdmF0ZQ== 1996
+IG9wZXI= 1997
+Y2Vk 1998
+ZGI= 1999
+Ij48Lw== 2000
+UGFyYW0= 2001
+aWNsZQ== 2002
+IHdlZWs= 2003
+IHByb3A= 2004
+dGFibGU= 2005
+aWRnZXQ= 2006
+cGxhY2U= 2007
+UHJvcA== 2008
+IEFsbA== 2009
+ZWxz 2010
+Ym94 2011
+LgoKCgo= 2012
+LlI= 2013
+IFRv 2014
+aXRlcg== 2015
+U2g= 2016
+dXJhdGlvbg== 2017
+b2xkZXI= 2018
+X2xpc3Q= 2019
+Y29tZQ== 2020
+IHN3 2021
+aXphdGlvbg== 2022
+CWZvcg== 2023
+Ymw= 2024
+IHByb2dyYW0= 2025
+KGU= 2026
+YXBl 2027
+Y2hlY2s= 2028
+LkZvcm1z 2029
+IHVuZA== 2030
+YXRlZ29yeQ== 2031
+YWdz 2032
+IHJlc3BvbnNl 2033
+VVM= 2034
+cmVxdWVzdA== 2035
+IHN0cnVjdA== 2036
+ZXNjcmlwdGlvbg== 2037
+IGNvZGU= 2038
+X0g= 2039
+dWZmZXI= 2040
+IHdpdGhvdXQ= 2041
+bG9iYWw= 2042
+TWFuYWdlcg== 2043
+aWx0ZXI= 2044
+UE8= 2045
+CXRoaXM= 2046
+b3B0aW9u 2047
+IHNvbA== 2048
+ID09PQ== 2049
+YWtlcw== 2050
+Q29udHJvbGxlcg== 2051
+TWVzc2FnZQ== 2052
+IHJlZg== 2053
+ZXZlcg== 2054
+IFNv 2055
+YWluaW5n 2056
+LmFwcGVuZA== 2057
+IHN0aWxs 2058
+IHByb3ZpZA== 2059
+IGFzc2VydA== 2060
+bWVk 2061
+IGNhcA== 2062
+dXNpbmVzcw== 2063
+IHJlcA== 2064
+dGluZ3M= 2065
+dmVk 2066
+Lk4= 2067
+YXBp 2068
+T0Q= 2069
+IGZpZWxk 2070
+aXZlbg== 2071
+b3Rv 2072
+4oCc 2073
+Y29s 2074
+KHg= 2075
+Z2h0 2076
+UmVzdWx0 2077
+Q29kZQ== 2078
+Lmlz 2079
+bGluaw== 2080
+IGNvdXI= 2081
+QW4= 2082
+IHRlYW0= 2083
+CWludA== 2084
+aWZ0 2085
+IHNlY29uZA== 2086
+IGdvaW5n 2087
+IHJhbmdl 2088
+X0U= 2089
+bmVzcw== 2090
+IGZhbQ== 2091
+IG5pbA== 2092
+IENvbnQ= 2093
+YWlsYWJsZQ== 2094
+dXRlcw== 2095
+YXRhYg== 2096
+IGZhY3Q= 2097
+IHZpcw== 2098
+KCY= 2099
+IEFO 2100
+QWw= 2101
+dGl0bGU= 2102
+IGFuZHJvaWQ= 2103
+Q0U= 2104
+XCI= 2105
+aXJ0 2106
+IHdyaXQ= 2107
+0L0= 2108
+CW0= 2109
+ZnR3YXJl 2110
+b25k 2111
+IHJldA== 2112
+b3NpdGlvbg== 2113
+IGhvbWU= 2114
+IGxlZnQ= 2115
+YXJncw== 2116
+bWVyaWM= 2117
+IGRpcmVjdA== 2118
+b2Np 2119
+UGw= 2120
+QXM= 2121
+cmV0 2122
+YWRv 2123
+T2Y= 2124
+Y2hu 2125
+IEdldA== 2126
+ZWU= 2127
+cm9zcw== 2128
+KCk7 2129
+X19fXw== 2130
+LnBo 2131
+SXQ= 2132
+b3V0ZQ== 2133
+IGV4cGVy 2134
+Y2hvb2w= 2135
+d3d3 2136
+fSw= 2137
+IGFsbG93 2138
+IMI= 2139
+KCkp 2140
+c2l6ZQ== 2141
+aXNt 2142
+YWk= 2143
+dHJhY3Q= 2144
+YW5l 2145
+Li4uCgo= 2146
+Y29udGV4dA== 2147
+IGJlZw== 2148
+Q0g= 2149
+IHBhZ2U= 2150
+aGlw 2151
+bm8= 2152
+Y29yZQ== 2153
+c3A= 2154
+IGRpZmZlcmVudA== 2155
+aWFibGU= 2156
+IE1l 2157
+X0lO 2158
+YnV0dG9u 2159
+IElz 2160
+ZXJ2aWNlcw== 2161
+IGNh 2162
+IGFyb3VuZA== 2163
+QXBw 2164
+cmF0aW9u 2165
+IHJlY2U= 2166
+IHJlYWxseQ== 2167
+IGltYWdl 2168
+IHRhcmdldA== 2169
+IGRlcA== 2170
+b3B5cmlnaHQ= 2171
+dHJh 2172
+aW5nbGU= 2173
+aXRhbA== 2174
+TGF5b3V0 2175
+IGJvdGg= 2176
+T3ZlcnJpZGU= 2177
+YXJt 2178
+PT4= 2179
+YXRlcmlhbA== 2180
+aWxlZA== 2181
+IHB1dA== 2182
+UXU= 2183
+0YA= 2184
+dW5n 2185
+bWFw 2186
+CQkJCQkJCQk= 2187
+IGxldmVs 2188
+Q29tcG9uZW50 2189
+Ym9vaw== 2190
+Y3JlZW4= 2191
+X1JF 2192
+IGNvbmZpZw== 2193
+44E= 2194
+T3I= 2195
+LmRhdGE= 2196
+IGRvY3VtZW50 2197
+Iiwi 2198
+dHJpYnV0ZQ== 2199
+dXg= 2200
+TG9n 2201
+ZmVyZW5jZQ== 2202
+cG9zdA== 2203
+X2U= 2204
+IGxvY2Fs 2205
+YW5kb20= 2206
+YXNzZXJ0 2207
+VmFs 2208
+bGVjdGVk 2209
+aW5h 2210
+YXRhYmFzZQ== 2211
+QWRk 2212
+IGNvbnRlbnQ= 2213
+LnByaW50 2214
+c2lnbmVk 2215
+cmlj 2216
+LiIKCg== 2217
+IGZh 2218
+IQoK 2219
+LWY= 2220
+aXZlZA== 2221
+IHF1ZXN0 2222
+LmV4 2223
+IGZsb2F0 2224
+IGRldmVsb3A= 2225
+0L7Q 2226
+TWFw 2227
+YWRpbmc= 2228
+IHBvc3M= 2229
+VUU= 2230
+bmFtZXNwYWNl 2231
+X08= 2232
+CWI= 2233
+LkdldA== 2234
+Pig= 2235
+anNvbg== 2236
+ZXRhaWxz 2237
+IHRvbw== 2238
+IGV4dGVuZHM= 2239
+IE5vbmU= 2240
+IGZvcmU= 2241
+KFN0cmluZw== 2242
+Zm9ybWF0 2243
+IGdyZWF0 2244
+aW50ZXI= 2245
+Y2FsZQ== 2246
+0YE= 2247
+cm9u 2248
+aXZpbmc= 2249
+RW50 2250
+ZW5jeQ== 2251
+eHQ= 2252
+b3k= 2253
+IG1vbnRo 2254
+IGhhcHA= 2255
+IHN1cGVy 2256
+YmFy 2257
+ZGVmYXVsdA== 2258
+X2Rl 2259
+b3Jkcw== 2260
+bG4= 2261
+KHsK 2262
+IEluZA== 2263
+YXNlcw== 2264
+IHRpdGxl 2265
+IGNvbnRleHQ= 2266
+b2g= 2267
+LXA= 2268
+RW0= 2269
+IG1ldA== 2270
+VGVzdA== 2271
+IGxpZmU= 2272
+X3Y= 2273
+IFVT 2274
+VUk= 2275
+b2NhdGlvbg== 2276
+bWQ= 2277
+IFsK 2278
+IF0= 2279
+c3c= 2280
+IGluY3Jl 2281
+c2NyaXB0 2282
+ZW50aWFs 2283
+d2F5cw== 2284
+LmRl 2285
+IHNyYw== 2286
+IGNhdGNo 2287
+IEFtZXJpYw== 2288
+Ly8K 2289
+ICAgICAgICAgICAgICA= 2290
+IHBheQ== 2291
+cGxpdA== 2292
+4oCU 2293
+IGNvdW4= 2294
+b2Jq 2295
+LnBocA== 2296
+IGNoYW5nZQ== 2297
+ZXRoaW5n 2298
+J3Jl 2299
+YXN0ZXI= 2300
+bG9z 2301
+bGF0aW9u 2302
+ICAK 2303
+TGU= 2304
+w6Q= 2305
+KHs= 2306
+cmVhZHk= 2307
+IE5v 2308
+IHBvc2l0aW9u 2309
+IG9sZA== 2310
+IGJvb2s= 2311
+YWJsZWQ= 2312
+YnVn 2313
+SGFuZA== 2314
+fTsKCg== 2315
+aXNwbGF5 2316
+YXZpbmc= 2317
+IGdvdmVy 2318
+IHZlcnNpb24= 2319
+U3lzdGVt 2320
+bmVjdA== 2321
+cmVzcG9uc2U= 2322
+U3R5bGU= 2323
+VXA= 2324
+YW5ndQ== 2325
+IHRocmVl 2326
+aW5pdA== 2327
+ZXJv 2328
+IGxhdw== 2329
+ZW5kaWY= 2330
+IGJhc2U= 2331
+ZW1haWw= 2332
+KGw= 2333
+X1Y= 2334
+IGNvbmY= 2335
+QVRF 2336
+IGR1cmluZw== 2337
+dGVz 2338
+IGNvbnNvbGU= 2339
+IFBy 2340
+IHNwZQ== 2341
+dmVz 2342
+cGF0aA== 2343
+aWFsb2c= 2344
+ZGl0aW9u 2345
+X3Rv 2346
+YXJkcw== 2347
+IGFnYWluc3Q= 2348
+ZXR3b3Jr 2349
+IFBo 2350
+X0w= 2351
+Y3Vy 2352
+aW1pdA== 2353
+V2l0aA== 2354
+IHBvd2Vy 2355
+aXVt 2356
+JzsKCg== 2357
+IHdvbQ== 2358
+bGVmdA== 2359
+b3VyY2Vz 2360
+YXRyaQ== 2361
+IElt 2362
+IE1hbg== 2363
+b3J0aA== 2364
+JHs= 2365
+cXVhbHM= 2366
+ZXNl 2367
+X3NpemU= 2368
+IGlzcw== 2369
+b3RhbA== 2370
+LWc= 2371
+aXF1ZQ== 2372
+cmFtZQ== 2373
+IHdpZHRo 2374
+ZXJn 2375
+KSg= 2376
+aXR0bGU= 2377
+VFI= 2378
+IFRoZXk= 2379
+ZW5jZXM= 2380
+cmw= 2381
+b25z 2382
+IGxhYmVs 2383
+Lnk= 2384
+LXQ= 2385
+dXBkYXRl 2386
+YW5lbA== 2387
+c2M= 2388
+LnRv 2389
+IHByb2plY3Q= 2390
+w7w= 2391
+IGVsZW1lbnQ= 2392
+IHN1Y2Nlc3M= 2393
+CQkK 2394
+LnNo 2395
+cmFt 2396
+Y2hlZA== 2397
+KCkpCg== 2398
+ICgK 2399
+IGRhdGU= 2400
+IHRvdA== 2401
+X1NU 2402
+QWxs 2403
+aWZpY2F0aW9u 2404
+CXZhcg== 2405
+IHRyaQ== 2406
+Y2hlbQ== 2407
+bXk= 2408
+IGJpZw== 2409
+IEFk 2410
+IEF0 2411
+b3Rz 2412
+bnVt 2413
+QWN0 2414
+IG1hcA== 2415
+ZXJh 2416
+Y29wZQ== 2417
+LiQ= 2418
+LOKAnQ== 2419
+IHBvcA== 2420
+IGZldw== 2421
+IGxlbg== 2422
+dWlk 2423
+ZXRlcnM= 2424
+dWxlcw== 2425
+w60= 2426
+c291cmNl 2427
+aHR0cHM= 2428
+IGRlbQ== 2429
+IGVhcg== 2430
+IyMjIyMjIyMjIyMjIyMjIw== 2431
+IG1hdGNo 2432
+b3JpZXM= 2433
+YWNlcw== 2434
+IENs 2435
+IG5vZGU= 2436
+aXJj 2437
+bG9jYWw= 2438
+dW5pdHk= 2439
+fTsK 2440
+IGFub3RoZXI= 2441
+PDw= 2442
+b2dsZQ== 2443
+IHNpdA== 2444
+ZXdvcms= 2445
+VEU= 2446
+Lkk= 2447
+TlM= 2448
+b2xvZ3k= 2449
+b3VnaHQ= 2450
+LkNvbnQ= 2451
+Pj4= 2452
+IGNhcmU= 2453
+c3RhdGU= 2454
+CXByaXZhdGU= 2455
+IGVmZmVjdA== 2456
+Kysp 2457
+X2ZpbGU= 2458
+ZW5kaW5n 2459
+TGluZQ== 2460
+Rm9y 2461
+aW9y 2462
+IFNj 2463
+IGZ1bg== 2464
+LlNpemU= 2465
+CWVsc2U= 2466
+XSk= 2467
+c3RhcnQ= 2468
+dmlvdXM= 2469
+IH0s 2470
+b3Vycw== 2471
+IGxlZw== 2472
+IHNlcnZpY2U= 2473
+IHNpbmNl 2474
+aXJvbg== 2475
+TGFiZWw= 2476
+IG5vbg== 2477
+IGxvcw== 2478
+aWN0aW9u 2479
+IGZ1bGw= 2480
+YWN0ZXI= 2481
+Ym9hcmQ= 2482
+Z3Jlc3M= 2483
+IHR1cm4= 2484
+aXRoZXI= 2485
+LnNpemU= 2486
+IGJvZHk= 2487
+cmVzaA== 2488
+ZXR1cm4= 2489
+KF8= 2490
+eWxlcw== 2491
+b3JtYWw= 2492
+cGk= 2493
+IHNvbWV0aGluZw== 2494
+IS0t 2495
+dWludA== 2496
+IHByb2R1 2497
+IHN0YW5k 2498
+IHByb2JsZQ== 2499
+IGF2YWlsYWJsZQ== 2500
+bXQ= 2501
+IEJs 2502
+IC4uLg== 2503
+IGJsb2Nr 2504
+SW5wdXQ= 2505
+IGtlZXA= 2506
+Q291bnQ= 2507
+b3Blbg== 2508
+IFsn 2509
+IHRocm93 2510
+dWlsZGVy 2511
+QWN0aW9u 2512
+IHRoaW5ncw== 2513
+VHJ1ZQ== 2514
+IHVybA== 2515
+IEJv 2516
+cHJpbnRm 2517
+IHJlZA== 2518
+anM= 2519
+LmNyZWF0ZQ== 2520
+IE9y 2521
+U3RhdHVz 2522
+SW5zdGFuY2U= 2523
+IGNvbnRyb2w= 2524
+IGNvbWU= 2525
+IGN1c3RvbQ== 2526
+bG9jYXRpb24= 2527
+bW9kZWw= 2528
+IA0K 2529
+IHNvdXJjZQ== 2530
+IGVhcw== 2531
+Lm91dA== 2532
+XQoK 2533
+b25leQ== 2534
+IGF3YWl0 2535
+IHBhcnRpYw== 2536
+QVA= 2537
+dWJsaXNo 2538
+b2Rlcw== 2539
+X3Bybw== 2540
+cGx5 2541
+cml0ZXI= 2542
+IHByb3Y= 2543
+IG1pbGw= 2544
+SFQ= 2545
+XSkK 2546
+IGNoYW5n 2547
+IGFzaw== 2548
+ICAgICAgICAgICAgICAgICAgICAg 2549
+IG91dHB1dA== 2550
+IGVtYWls 2551
+LnB1c2g= 2552
+IH0NCg0K 2553
+aW5hdGlvbg== 2554
+YXRyaXg= 2555
+VGFibGU= 2556
+dWNjZXNz 2557
+XSk7Cg== 2558
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 2559
+IGRpc2M= 2560
+KFs= 2561
+IGJ1c2luZXNz 2562
+aGVpZ2h0 2563
+Lmh0bWw= 2564
+dGE= 2565
+ZmllbGQ= 2566
+IHJlcXVpcmVk 2567
+X1I= 2568
+IGdvdmVybg== 2569
+fQ0KDQo= 2570
+bGV4 2571
+Liw= 2572
+IFNldA== 2573
+dXJjaA== 2574
+Ly8v 2575
+dHM= 2576
+YWY= 2577
+IG1pZ2h0 2578
+aXN0b3J5 2579
+U3Ry 2580
+IG5ldmVy 2581
+UmVzcG9uc2U= 2582
+YXJzZQ== 2583
+YWRh 2584
+IEhvdw== 2585
+ICop 2586
+IDs= 2587
+IGhhcmQ= 2588
+QWQ= 2589
+IGludGVybg== 2590
+dXNlZA== 2591
+KGRhdGE= 2592
+bW9k 2593
+YW5uZWw= 2594
+IG5w 2595
+dWdn 2596
+IC8+Cg== 2597
+IGNhbGxlZA== 2598
+Ym9keQ== 2599
+IGNobw== 2600
+KHI= 2601
+X3NldA== 2602
+aXJk 2603
+ID49 2604
+IH07Cg== 2605
+IG9wdGlvbnM= 2606
+IEdlbmVy 2607
+IGhlaWdodA== 2608
+UG9pbnQ= 2609
+WW91 2610
+ZXR5 2611
+Q2xpY2s= 2612
+IHNtYWxs 2613
+IGlkZQ== 2614
+IGFjY2Vzcw== 2615
+YW5ndWFnZQ== 2616
+IHByb3RlY3RlZA== 2617
+IGpvYg== 2618
+IFRoZXJl 2619
+RGVm 2620
+IGFkZHJlc3M= 2621
+IHVpbnQ= 2622
+Tm90 2623
+b28= 2624
+YXBz 2625
+PGRpdg== 2626
+YWluZWQ= 2627
+YXR1cg== 2628
+IHN1bQ== 2629
+LXc= 2630
+IERhdGU= 2631
+IGxpdHRsZQ== 2632
+IGZyaQ== 2633
+WVBF 2634
+IHBvcnQ= 2635
+ZWg= 2636
+cHJpbmc= 2637
+X3BhdGg= 2638
+IHN0YXR1cw== 2639
+YWlt 2640
+Ym9vbA== 2641
+IGFwcGU= 2642
+IG9z 2643
+Lm5hbWU= 2644
+ZW5zaW9u 2645
+X0c= 2646
+IHVwZGF0ZQ== 2647
+Q29uZmln 2648
+YWZm 2649
+RVJS 2650
+IDw9 2651
+YXRlbHk= 2652
+I2lm 2653
+dWN0aW9u 2654
+IFRl 2655
+IGxpbms= 2656
+IFVzZXI= 2657
+LmZpbmQ= 2658
+Lm9yZw== 2659
+bWU= 2660
+IGdpdmVu 2661
+T3V0 2662
+I2VuZGlm 2663
+IGJldHRlcg== 2664
+UGFnZQ== 2665
+IGZlZWw= 2666
+ZW5u 2667
+TUw= 2668
+IGFscmVhZHk= 2669
+IGluY2x1ZGluZw== 2670
+b29nbGU= 2671
+cnU= 2672
+aWNhbGx5 2673
+cHJvcA== 2674
+bGVhbg== 2675
+b3V0ZXI= 2676
+IGFsd2F5cw== 2677
+b3JkaW5n 2678
+SWY= 2679
+b3JhZ2U= 2680
+IHBhcmVudA== 2681
+dmlz 2682
+CQkJCQkJCQ== 2683
+IGdvdA== 2684
+c3RhbmQ= 2685
+IGxlc3M= 2686
+L3M= 2687
+IEFzcw== 2688
+YXB0 2689
+aXJlZA== 2690
+IEFkZA== 2691
+IGFjY291bnQ= 2692
+cGxveQ== 2693
+IGRlcg== 2694
+cmVzZW50 2695
+IGxvdA== 2696
+IHZhbGlk 2697
+CWQ= 2698
+IGJpdA== 2699
+cG9uZW50cw== 2700
+IGZvbGxvd2luZw== 2701
+X2V4 2702
+U09O 2703
+IHN1cmU= 2704
+b2NpYWw= 2705
+IHByb20= 2706
+ZXJ0aWVz 2707
+aGVhZGVy 2708
+LnBybw== 2709
+IGJvb2xlYW4= 2710
+IHNlYXJjaA== 2711
+a2Vu 2712
+IG9yaWc= 2713
+IGVy 2714
+RWQ= 2715
+RU0= 2716
+YXV0 2717
+bGluZw== 2718
+YWxpdHk= 2719
+QnlJZA== 2720
+YmVk 2721
+CWNhc2U= 2722
+ZXRoZXI= 2723
+cG9zaXQ= 2724
+IGludmVzdA== 2725
+IE9S 2726
+IHNheXM= 2727
+bWlzc2lvbg== 2728
+QU1F 2729
+IHRlbXA= 2730
+b2Fk 2731
+IHJlc3Q= 2732
+aW5mbw== 2733
+IGludGVyZXN0 2734
+QXJn 2735
+IHBlcmZvcm0= 2736
+cG9ucw== 2737
+IFZpZXc= 2738
+IHZlcg== 2739
+bGli 2740
+KGNvbnN0 2741
+VXRpbA== 2742
+TGlzdGVuZXI= 2743
+YXJnZQ== 2744
+IG11bHQ= 2745
+IGRpZQ== 2746
+IHNpdGU= 2747
+Li4vLi4v 2748
+RUw= 2749
+IHZhbHVlcw== 2750
+IH0pCg== 2751
+cGVu 2752
+Tm8= 2753
+aWNybw== 2754
+IGJlaA== 2755
+ICcuLw== 2756
+YWN5 2757
+cmVj 2758
+KCktPg== 2759
+CSAgIA== 2760
+Iikp 2761
+Q29udGVudA== 2762
+X1c= 2763
+cGxlbWVudA== 2764
+IHdvbg== 2765
+IHZpZGVv 2766
+YWRp 2767
+cG9pbnQ= 2768
+JSU= 2769
+IGds 2770
+ZXJ2ZWQ= 2771
+dmlyb24= 2772
+SUY= 2773
+dXRlZA== 2774
+44M= 2775
+J20= 2776
+IGNlcnQ= 2777
+IHByb2Y= 2778
+IGNlbGw= 2779
+YXJp 2780
+IHBsYXllcg== 2781
+YWlz 2782
+IGNvc3Q= 2783
+IGh1bQ== 2784
+KFI= 2785
+IG9mZmlj 2786
+a3M= 2787
+LnRleHQ= 2788
+YXR1cmVz 2789
+IHRvdGFs 2790
+ICovCgo= 2791
+b3Bl 2792
+IHN0YXQ= 2793
+VU0= 2794
+IGxvYWQ= 2795
+aWdodHM= 2796
+IGNsZWFy 2797
+dXJv 2798
+IHRlY2hu 2799
+dXBwb3J0 2800
+SVI= 2801
+IHJvdw== 2802
+IHNlZW0= 2803
+IHE= 2804
+IHNob3J0 2805
+IE5vdA== 2806
+aXBw 2807
+R3JvdXA= 2808
+c2VjdGlvbg== 2809
+bWF4 2810
+aXJs 2811
+IG92ZXJyaWRl 2812
+IGNvbXBhbnk= 2813
+IGRvbmU= 2814
+Iik7DQo= 2815
+IGdyZQ== 2816
+LlJl 2817
+IGJlbGll 2818
+cmlzdA== 2819
+IGhlYWx0aA== 2820
+QU5U 2821
+KCkKCg== 2822
+IEJl 2823
+LnZhbHVl 2824
+IEdy 2825
+b3R0b20= 2826
+IGFyZ3M= 2827
+UFQ= 2828
+c3RhdHVz 2829
+ZnVuYw== 2830
+dW1lbnRz 2831
+LWg= 2832
+TnVtYmVy 2833
+Og0K 2834
+IExvZw== 2835
+ZXJ2ZXI= 2836
+ICksCg== 2837
+YW1lbnQ= 2838
+IG9iag== 2839
+aW5j 2840
+IGNoaWxkcmVu 2841
+aWN5 2842
+SVo= 2843
+YW5kcw== 2844
+YWJseQ== 2845
+IGRpc3RyaWI= 2846
+IGN1cg== 2847
+ZXJpYWw= 2848
+IGRheXM= 2849
+cmVhdGVk 2850
+cmVjdA== 2851
+LWw= 2852
+aXJt 2853
+aWRkZW4= 2854
+b21i 2855
+IGluaXRpYWw= 2856
+Lmpz 2857
+IOI= 2858
+UXVlcnk= 2859
+IG9ubGluZQ== 2860
+aW1hbA== 2861
+LmNvbg== 2862
+YXU= 2863
+VXJs 2864
+Y29udHJvbA== 2865
+aXJlY3Rpb24= 2866
+IGluc3RhbmNl 2867
+T1JU 2868
+IEZy 2869
+d2hlcmU= 2870
+IGphdmF4 2871
+IG9yZ2Fu 2872
+YXB0ZXI= 2873
+IHJlYXNvbg== 2874
+b3B0aW9ucw== 2875
+IE1hcg== 2876
+KGE= 2877
+IHdpdGhpbg== 2878
+LuKAnQoK 2879
+T0RF 2880
+X0RF 2881
+YWRtaW4= 2882
+ZW5kZWQ= 2883
+IGRlc2lnbg== 2884
+IERhdGE= 2885
+dW5l 2886
+IEZpbGU= 2887
+cm9vdA== 2888
+IGNlbnQ= 2889
+IGFycg== 2890
+X2FkZA== 2891
+bGVu 2892
+cGFnZQ== 2893
+LCc= 2894
+X3N0cg== 2895
+IGJybw== 2896
+YWJpbGl0eQ== 2897
+b3V0aA== 2898
+L2M= 2899
+cG9zZQ== 2900
+aXJ0dWFs 2901
+ZWFyY2g= 2902
+X3VybA== 2903
+YXJnaW4= 2904
+SHR0cA== 2905
+IHNjaG9vbA== 2906
+YXZh 2907
+IGNvbnNpZGVy 2908
+LmxhYmVs 2909
+IEFycmF5 2910
+d2Vi 2911
+b3B0 2912
+LnByaW50bG4= 2913
+dWxhdGlvbg== 2914
+IGZ1bmM= 2915
+UEw= 2916
+ICJc 2917
+IFRleHQ= 2918
+YWN0b3J5 2919
+KGZ1bmN0aW9u 2920
+bnVsbA== 2921
+IGVuZw== 2922
+ZG93bg== 2923
+IGluY2x1ZGU= 2924
+IEVu 2925
+IERy 2926
+IGRi 2927
+ISE= 2928
+c2lkZQ== 2929
+IGluaXQ= 2930
+cXVpcmVk 2931
+IFNoZQ== 2932
+Q29sdW1u 2933
+cmVhY3Q= 2934
+IGFubg== 2935
+IHN0b3A= 2936
+IGxhdGVy 2937
+IFRoYXQ= 2938
+ZW50aW9u 2939
+ZGY= 2940
+VUc= 2941
+SUxF 2942
+IGNsaWVudA== 2943
+cmFmdA== 2944
+ZmZlcg== 2945
+UE9TVA== 2946
+ZWxwZXI= 2947
+IGxvdmU= 2948
+cXVvdGU= 2949
+b3Vk 2950
+IGpzb24= 2951
+IGFibGU= 2952
+IG1lbg== 2953
+QVg= 2954
+IENvcHlyaWdodA== 2955
+w7Y= 2956
+YXZpZw== 2957
+cmVx 2958
+Q2xpZW50 2959
+fSk7Cg== 2960
+LkNvbQ== 2961
+ZXJj 2962
+aWx0 2963
+cGVjaWFs 2964
+X2NvbQ== 2965
+cm9vbQ== 2966
+Lk5hbWU= 2967
+IGdpdmU= 2968
+YW1i 2969
+aWtl 2970
+IGNvbmRpdGlvbg== 2971
+Y2xpZW50 2972
+YXRvcnM= 2973
+OiI= 2974
+IGNvcHk= 2975
+dXR1cmU= 2976
+aXZlcnNpdHk= 2977
+ZXJuYWw= 2978
+e3s= 2979
+IENhbg== 2980
+b3VuYw== 2981
+ZG8= 2982
+IG9jYw== 2983
+IGFwcHJv 2984
+dGhlcnM= 2985
+emU= 2986
+IGVpdGhlcg== 2987
+IEZs 2988
+IGltcG9ydGFudA== 2989
+IGxlYWQ= 2990
+YXR0cg== 2991
+QVJU 2992
+RXF1YWw= 2993
+IGRh 2994
+ZXRjaA== 2995
+ZW50aXR5 2996
+IGZhbWlseQ== 2997
+YWRkaW5n 2998
+IG9wdGlvbg== 2999
+IGV4aXN0 3000
+aWNh 3001
+IE9iamVjdA== 3002
+J3Zl 3003
+dmVycw== 3004
+aXRpb25hbA== 3005
+b3V0cHV0 3006
+IFRydWU= 3007
+IE9G 3008
+X3RpbWU= 3009
+IG9mZmVy 3010
+IH0pOwoK 3011
+SEVS 3012
+ZWdpbg== 3013
+IiI= 3014
+IHdhdGVy 3015
+IGNoZQ== 3016
+IE15 3017
+b3JlZA== 3018
+IHN0ZXA= 3019
+YW5jZXM= 3020
+Q0s= 3021
+QVk= 3022
+4Lg= 3023
+c3RydWN0aW9u 3024
+KEM= 3025
+b3VjaA== 3026
+U3RyZWFt 3027
+YWN0aXZl 3028
+YW1h 3029
+RW50aXR5 3030
+cHJvZHVjdA== 3031
+KCl7Cg== 3032
+IGdvdmVybm1lbnQ= 3033
+IElE 3034
+YWpvcg== 3035
+QW5k 3036
+IGRpc3BsYXk= 3037
+0Ls= 3038
+IHRpbWVz 3039
+IGZvdXI= 3040
+IGZhcg== 3041
+IHByZXNlbnQ= 3042
+IE5T 3043
+IFwK 3044
+dWVzdA== 3045
+IGJhcw== 3046
+ZWNobw== 3047
+Y2hpbGQ= 3048
+aWZpZXI= 3049
+SGFuZGxlcg== 3050
+IGxpYg== 3051
+UHJvcGVydHk= 3052
+dHJhbnNsYXRpb24= 3053
+IHJvb20= 3054
+IG9uY2U= 3055
+IFtd 3056
+Y2VudGVy 3057
+PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0= 3058
+IHJlc3VsdHM= 3059
+IGNvbnRpbnVl 3060
+IHRhbGs= 3061
+X2dldA== 3062
+IGdyb3c= 3063
+LnN3 3064
+ZWI= 3065
+IFB1YmxpYw== 3066
+T1A= 3067
+ZWN1dGU= 3068
+b2xz 3069
+ICoq 3070
+Iik7Cgo= 3071
+IG1hc3M= 3072
+dXJlZA== 3073
+LmNsYXNz 3074
+b21pYw== 3075
+IG1lYW4= 3076
+aXBz 3077
+IGF1dA== 3078
+KTsNCg0K 3079
+IHVudGls 3080
+IG1hcmtldA== 3081
+IGFyZWE= 3082
+dWl0 3083
+IGxlbmd0aA== 3084
+IFdpdGg= 3085
+c3RydWN0b3I= 3086
+ZXZlbnQ= 3087
+Ij48 3088
+IFNw 3089
+SVY= 3090
+IG11cw== 3091
+aWZm 3092
+IGtpbmQ= 3093
+YXV0aG9y 3094
+b3VuZHM= 3095
+bWI= 3096
+X2tleQ== 3097
+d2lkdGg= 3098
+cG9zaXRvcnk= 3099
+IGxpZ2h0 3100
+dWs= 3101
+Um93 3102
+b2hu 3103
+YWxm 3104
+dmlyb25tZW50 3105
+YXBwZXI= 3106
+b2xsZWN0aW9ucw== 3107
+IHNpZGU= 3108
+X2luZm8= 3109
+IGV4YW1wbGU= 3110
+aW1hcnk= 3111
+IHdy 3112
+IGNhbXA= 3113
+Y3JpYmU= 3114
+Ii8= 3115
+IG1pc3M= 3116
+d2F5 3117
+IGJhc2Vk 3118
+IHBsYW4= 3119
+Vmlz 3120
+b21haW4= 3121
+dW5r 3122
+IGF3YXk= 3123
+VVA= 3124
+PFQ= 3125
+T1M= 3126
+aW9k 3127
+IE1vbg== 3128
+4oCZcmU= 3129
+IGxpaw== 3130
+w6c= 3131
+aXZlbHk= 3132
+LnY= 3133
+aW1lcg== 3134
+aXplcg== 3135
+U3Vi 3136
+IGJ1dHRvbg== 3137
+IFVw 3138
+IGV4cGVyaWVuY2U= 3139
+Q0w= 3140
+IHJlbmRlcg== 3141
+X3ZhbHVl 3142
+IG5lYXI= 3143
+VVJM 3144
+YWx0 3145
+IGNvdW50cnk= 3146
+aWJpbGl0eQ== 3147
+KCksCg== 3148
+ZWFk 3149
+IGF1dGhvcg== 3150
+IHNwZWNpZmlj 3151
+YmFzZQ== 3152
+KG5hbWU= 3153
+b25lcw== 3154
+IERv 3155
+IGFsb25n 3156
+eWVhcg== 3157
+IGV4cHJlc3M= 3158
+Lic= 3159
+ZW52 3160
+IGJlZ2lu 3161
+IHNvZnR3YXJl 3162
+IGltcA== 3163
+IHdpbg== 3164
+w7Nu 3165
+IHRoaW5n 3166
+VHJhbnM= 3167
+IFRIRQ== 3168
+IDw/ 3169
+IHdoeQ== 3170
+IGRvZXNu 3171
+aWo= 3172
+Z2luZw== 3173
+CWc= 3174
+IHNpbmdsZQ== 3175
+b2Zmc2V0 3176
+YXJuaW5n 3177
+b2dyYXBo 3178
+bGV5 3179
+X2NvdW50 3180
+IGFuYWw= 3181
+Y3JlYXRl 3182
+L20= 3183
+IFJlZw== 3184
+dW5jaA== 3185
+PSQ= 3186
+aXNr 3187
+IHJpZ2h0cw== 3188
+KE0= 3189
+ICIiIgo= 3190
+YXBlcg== 3191
+Lm1vZGVs 3192
+IHBv 3193
+ZW1wdHk= 3194
+YXJ0bWVudA== 3195
+IGFudA== 3196
+IFdoZW4= 3197
+IHdvbWVu 3198
+IEVk 3199
+IHNlYXNvbg== 3200
+IGRlc3Q= 3201
+w6M= 3202
+KGg= 3203
+IHBvc3NpYmxl 3204
+IHNldmVy 3205
+IGJ0bg== 3206
+IGRpZG4= 3207
+IHNlbnQ= 3208
+IGVuYw== 3209
+IGNvbW1hbmQ= 3210
+IF0sCg== 3211
+X3g= 3212
+IHJlY2VudA== 3213
+b2x1dGlvbg== 3214
+dmVjdG9y 3215
+IEJ5 3216
+IE1heQ== 3217
+IEFjdA== 3218
+u78= 3219
+IG1vbmV5 3220
+SU5U 3221
+YnNpdGU= 3222
+CXA= 3223
+Lg0K 3224
+77u/ 3225
+c2w= 3226
+YXR0ZXJu 3227
+IENsYXNz 3228
+IHRvbGQ= 3229
+dWRpbw== 3230
+Y3VycmVudA== 3231
+IGVxdQ== 3232
+IGF1dG8= 3233
+IFN0YXRl 3234
+ZGE= 3235
+bXNn 3236
+KSk7Cgo= 3237
+IHdvcmtpbmc= 3238
+IHF1ZXJ5 3239
+IEJy 3240
+IHdpbmRvdw== 3241
+YXV0aA== 3242
+b25seQ== 3243
+CXQ= 3244
+IGxlYXN0 3245
+YWdu 3246
+IGV4cGw= 3247
+aXR0ZXI= 3248
+YXJpbmc= 3249
+IGNvbHVtbg== 3250
+IEdlbmVyYWw= 3251
+Ijoi 3252
+ZXJhbA== 3253
+cmlvcg== 3254
+IHJlY29yZA== 3255
+SUI= 3256
+RVg= 3257
+IGRhdA== 3258
+IG1ha2luZw== 3259
+dWVk 3260
+IENhcg== 3261
+ZW1w 3262
+Ii4= 3263
+IE1lZA== 3264
+IGNsb3Nl 3265
+IHBlcmNlbnQ= 3266
+IHBhc3Q= 3267
+KGc= 3268
+Oig= 3269
+IHdyaXRl 3270
+IG1vdmU= 3271
+IHBhdA== 3272
+Q29udHJvbA== 3273
+LlRv 3274
+IHZp 3275
+Ki8K 3276
+aW5hdGU= 3277
+J2xs 3278
+YWdlZA== 3279
+TnVsbA== 3280
+IHNwZWNpYWw= 3281
+SVpF 3282
+IGNpdHk= 3283
+LyoK 3284
+IEVuZw== 3285
+aXhlZA== 3286
+aW5hcnk= 3287
+cHk= 3288
+IGVmZg== 3289
+YXJpbw== 3290
+IHRlbGw= 3291
+YXZvcg== 3292
+IHNlbGVjdA== 3293
+bGV2ZWw= 3294
+aW11bQ== 3295
+b3Blcg== 3296
+QnVpbGRlcg== 3297
+SVA= 3298
+JyksCg== 3299
+ZXNj 3300
+IGZvbnQ= 3301
+IjsKCg== 3302
+IEFt 3303
+aXNoZWQ= 3304
+aWxscw== 3305
+SW50ZXI= 3306
+T1c= 3307
+IGNvdXJzZQ== 3308
+IGxhdGU= 3309
+aWRkbGU= 3310
+IGFtb3VudA== 3311
+IGFzeW5j 3312
+aW5v 3313
+Y3Vs 3314
+IOw= 3315
+YW5kbGU= 3316
+X3VzZXI= 3317
+IGJlbg== 3318
+IENhbA== 3319
+ICRf 3320
+IFJlcA== 3321
+IGVub3VnaA== 3322
+VG9rZW4= 3323
+LnVzZXI= 3324
+KGo= 3325
+U2M= 3326
+V2lkdGg= 3327
+bm93 3328
+YXRmb3Jt 3329
+IGxvb2tpbmc= 3330
+IGhvbGQ= 3331
+TW9kdWxl 3332
+SVRZ 3333
+dm8= 3334
+aXNvbg== 3335
+LkRhdGE= 3336
+eWM= 3337
+IHBvdA== 3338
+IFRydW1w 3339
+aWR1YWw= 3340
+aWRlcw== 3341
+cnQ= 3342
+IHByb3BlcnR5 3343
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 3344
+YW1ld29yaw== 3345
+Z28= 3346
+IGxvdw== 3347
+IHBhcmE= 3348
+IHByaWNl 3349
+dXJ5 3350
+IHRvZGF5 3351
+cm95 3352
+ICcv 3353
+IHBvbGl0 3354
+ICcn 3355
+eW1i 3356
+UGg= 3357
+IGFkdg== 3358
+IGF0dGFjaw== 3359
+IFN0ZQ== 3360
+Uk9N 3361
+YW5h 3362
+IG1lYW5z 3363
+IHN0b3J5 3364
+aWRz 3365
+YWtlbg== 3366
+IG1lZXQ= 3367
+IG1vbQ== 3368
+IOKAmA== 3369
+ID8+ 3370
+IGRlbg== 3371
+b2JpbGU= 3372
+Y2hhbmdl 3373
+ICAgICAgICAgICAgCg== 3374
+aWNp 3375
+bmE= 3376
+IEZvcm0= 3377
+IHNvcnQ= 3378
+U2VsZWN0 3379
+cGFyZQ== 3380
+IHRob3VnaHQ= 3381
+X2Nvbg== 3382
+IHRhc2s= 3383
+b2N1cw== 3384
+IERF 3385
+IE1pbg== 3386
+IG9wdA== 3387
+CWJyZWFr 3388
+dW1lcg== 3389
+S0U= 3390
+dGhlbg== 3391
+IGRldA== 3392
+IFRlc3Q= 3393
+cG9ydHM= 3394
+IHJldmlldw== 3395
+KCcv 3396
+bW92ZQ== 3397
+IHN3aXRjaA== 3398
+RVJU 3399
+cGF0Y2g= 3400
+YW5ub3Q= 3401
+44I= 3402
+IGFib3Zl 3403
+aXRpdmU= 3404
+IHF1ZXN0aW9u 3405
+IFF1 3406
+44CCCgo= 3407
+Z2xl 3408
+IHdvcmQ= 3409
+IHByb3ZpZGU= 3410
+IFJldHVybg== 3411
+IHJlc2VhcmNo 3412
+w6Nv 3413
+dXN0cg== 3414
+IHB1Ymxpc2g= 3415
+Y2hlbWE= 3416
+fX0= 3417
+IENPTg== 3418
+LWlu 3419
+YWxsYmFjaw== 3420
+IGNvdmVy 3421
+XFw= 3422
+Y29sb3I= 3423
+IElT 3424
+IHdoZXRoZXI= 3425
+aW1hdGU= 3426
+aXNj 3427
+QmFy 3428
+IGRpdg== 3429
+QmU= 3430
+b3Vybg== 3431
+IGhhdmluZw== 3432
+bGVt 3433
+cGxheWVy 3434
+YWJz 3435
+YW1lcmE= 3436
+bmV5 3437
+IGV4Yw== 3438
+Z2V0aGVy 3439
+cGxpZWQ= 3440
+YW8= 3441
+WyQ= 3442
+ICsr 3443
+aXBl 3444
+c2hvdw== 3445
+L2Q= 3446
+Wzo= 3447
+YWdlbWVudA== 3448
+bGV2 3449
+X0lE 3450
+cmFyeQ== 3451
+YWRlcw== 3452
+X3Nl 3453
+YXVzZQ== 3454
+IGVtcGxveQ== 3455
+ICovDQo= 3456
+IGZyZQ== 3457
+ICdA 3458
+IGNvbXBsZXQ= 3459
+IGxhcmdl 3460
+cmFs 3461
+XHg= 3462
+IGZhYw== 3463
+PFN0cmluZw== 3464
+IGNyZWF0ZWQ= 3465
+dXBlcg== 3466
+LnN0YXRl 3467
+IGhvc3Q= 3468
+ZW5lcmlj 3469
+L2I= 3470
+KCE= 3471
+d2hpbGU= 3472
+aWFz 3473
+QlVH 3474
+ICk7Cgo= 3475
+IHJvbGU= 3476
+UmVn 3477
+IENvbG9y 3478
+U3RhcnQ= 3479
+IHBvcm4= 3480
+dG9w 3481
+IHdlYg== 3482
+IGRldg== 3483
+IGRlYWw= 3484
+KyspCg== 3485
+SW50ZWdlcg== 3486
+cG9zaXRpb24= 3487
+Lm9u 3488
+ICgi 3489
+5Lg= 3490
+IHByb2JsZW0= 3491
+c3Y= 3492
+IHByZXNz 3493
+QUJMRQ== 3494
+QVRJT04= 3495
+IFNlZQ== 3496
+YW5jaA== 3497
+IHRob3VnaA== 3498
+bGVlcA== 3499
+IDwhLS0= 3500
+IHBvaW50cw== 3501
+ICAgICAgICAgICAgICAgICAgICAgICAgIA== 3502
+Lko= 3503
+IDo6 3504
+cHRy 3505
+REI= 3506
+Kys7Cg== 3507
+LnBuZw== 3508
+bm9kZQ== 3509
+c29mdA== 3510
+cG9uZA== 3511
+IGV2ZXI= 3512
+LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ== 3513
+TWVudQ== 3514
+KCcj 3515
+IHNlcnZpY2Vz 3516
+cGc= 3517
+fSkK 3518
+cGFyYW1z 3519
+IGFjdHVhbGx5 3520
+ICIv 3521
+RW1wdHk= 3522
+TWV0aG9k 3523
+IGlkZW50 3524
+dW5pYw== 3525
+IG1pbGxpb24= 3526
+IGFmZg== 3527
+c3R5bGU= 3528
+IGNvbmM= 3529
+aW9z 3530
+aWdubWVudA== 3531
+VUxU 3532
+UHI= 3533
+IjsNCg== 3534
+IHVuZGVyc3RhbmQ= 3535
+dWFyeQ== 3536
+IGhhcHBlbg== 3537
+IHNlcnZlcg== 3538
+IENv 3539
+U0M= 3540
+IGxlcw== 3541
+IGZpbGVz 3542
+R3JpZA== 3543
+c3Fs 3544
+IG9mdGVu 3545
+IGluZm8= 3546
+X3Ry 3547
+c3Jj 3548
+b255 3549
+IHNwYWNl 3550
+dW1i 3551
+IHBhc3N3b3Jk 3552
+IHN0b3Jl 3553
+LAoK 3554
+IFdoYXQ= 3555
+Z2Vk 3556
+IEZhbHNl 3557
+VXM= 3558
+c3dlcg== 3559
+X2luZGV4 3560
+IGZvcm1hdA== 3561
+bW9zdA== 3562
+c20= 3563
+TmV3 3564
+IGRldGFpbHM= 3565
+IHByb2I= 3566
+IEFORA== 3567
+KCkNCg== 3568
+aWxhcg== 3569
+ICR7 3570
+cnlwdA== 3571
+LkNvbGxlY3Rpb25z 3572
+JHRoaXM= 3573
+IEZyZWU= 3574
+X29m 3575
+KGZhbHNl 3576
+ZGF0ZWQ= 3577
+ID4+ 3578
+IGZhY2U= 3579
+Q1RJT04= 3580
+IHNhdmU= 3581
+IHR5cA== 3582
+ZGV2 3583
+KCIj 3584
+QUdF 3585
+Y29udGFpbmVy 3586
+ZWRpdA== 3587
+UUw= 3588
+IGl0ZW1z 3589
+IHNvY2lhbA== 3590
+aWVu 3591
+IFJlYWN0 3592
+KS4KCg== 3593
+IG1hcg== 3594
+IHJlZHU= 3595
+IFJF 3596
+LnB1dA== 3597
+IG1ham9y 3598
+Q2VsbA== 3599
+bmV4dA== 3600
+IGV4cGVjdGVk 3601
+IHlldA== 3602
+IGluZGl2 3603
+dHJpYnV0ZXM= 3604
+YXRpcw== 3605
+YW1lZA== 3606
+IGZvb2Q= 3607
+U291cmNl 3608
+KHN0cmluZw== 3609
+ICsK 3610
+aXRlcw== 3611
+ZHI= 3612
+IG1lbWJlcnM= 3613
+IGNvbWI= 3614
+aXRlbXM= 3615
+IFBlcg== 3616
+VEg= 3617
+PVRydWU= 3618
+IGJhcg== 3619
+X1NF 3620
+Y29tbQ== 3621
+KHc= 3622
+KQoKCg== 3623
+IHNlbmQ= 3624
+IGluYw== 3625
+dW5zaWduZWQ= 3626
+RkE= 3627
+IHBhcmFtcw== 3628
+YXBwaW5n 3629
+cm9z 3630
+dWdpbg== 3631
+ZmE= 3632
+IGNvbm5lY3Rpb24= 3633
+IH07Cgo= 3634
+IGJlY29tZQ== 3635
+TW9kZQ== 3636
+IGV2 3637
+IGRpZmY= 3638
+IFVuaXRlZA== 3639
+SGVpZ2h0 3640
+ZnVsbHk= 3641
+aW1hZ2Vz 3642
+IG1ha2Vz 3643
+IGdsb2JhbA== 3644
+IGNvbnRhY3Q= 3645
+JzoK 3646
+IGFicw== 3647
+0LDQ 3648
+ZmxvYXQ= 3649
+IGV4Y2VwdA== 3650
+IFBvbA== 3651
+Q2hpbGQ= 3652
+dHlw 3653
+IGNlcnRhaW4= 3654
+acOzbg== 3655
+T1VU 3656
+IGltcHJv 3657
+aWxlcw== 3658
+IC0tPgo= 3659
+IFBhcnQ= 3660
+dmFsdWVz 3661
+b3Nz 3662
+Lyoq 3663
+aWxpdA== 3664
+IEV2ZW50 3665
+Y3VyaXR5 3666
+c3Rlcg== 3667
+IGNoYXJhY3Rlcg== 3668
+IG5ld3M= 3669
+ICIs 3670
+IGRldmljZQ== 3671
+Y2Vs 3672
+bG9naW4= 3673
+aGVldA== 3674
+RGVmYXVsdA== 3675
+QCI= 3676
+CSA= 3677
+Y2xpY2s= 3678
+KHZhbHVl 3679
+IEFi 3680
+IHByZXZpb3Vz 3681
+RVJST1I= 3682
+b2NhbA== 3683
+IG1hdGVyaWFs 3684
+IGJlbG93 3685
+IENocmlzdA== 3686
+IG1lZGlh 3687
+Y292ZXI= 3688
+IFVJ 3689
+IGZhaWw= 3690
+IGJsYWNr 3691
+IGNvbXBvbmVudA== 3692
+IEFtZXJpY2Fu 3693
+IGFkZGVk 3694
+IGJ1eQ== 3695
+c3RpdA== 3696
+IGNhbWU= 3697
+IGRlbGV0ZQ== 3698
+cHJvcGVydHk= 3699
+b2Rpbmc= 3700
+IGNhcmQ= 3701
+cm9wcw== 3702
+IGh0dHBz 3703
+IHJvb3Q= 3704
+IGhhbmRsZQ== 3705
+Q0M= 3706
+QmFjaw== 3707
+ZW1wbGF0ZQ== 3708
+IGdldHRpbmc= 3709
+X2J5 3710
+bWFpbA== 3711
+X3No 3712
+LmFzc2VydA== 3713
+IERlYw== 3714
+KHRydWU= 3715
+IGNvbXB1dA== 3716
+IGNsYWlt 3717
+Jz0+ 3718
+IFN1Yg== 3719
+IGFpcg== 3720
+b3Bz 3721
+bmF2 3722
+ZW1lbnRz 3723
+KGlk 3724
+IGVudGVy 3725
+YW5nZWQ= 3726
+RW5k 3727
+IGxvY2F0aW9u 3728
+IG5pZ2h0 3729
+IGRvaW5n 3730
+IFJlZA== 3731
+bGlu 3732
+fQoKCg== 3733
+dmlkZXI= 3734
+IHBpY2s= 3735
+IHdhdGNo 3736
+ZXNzYWdlcw== 3737
+IGh1bWFu 3738
+IGRhbQ== 3739
+cGVuZA== 3740
+ZGly 3741
+IHRheA== 3742
+IGdpcmw= 3743
+cmVldA== 3744
+IGJveA== 3745
+IHN0cm9uZw== 3746
+KHY= 3747
+cmVs 3748
+IGludGVyZmFjZQ== 3749
+IG1zZw== 3750
+ZmVjdA== 3751
+X2F0 3752
+IGhvdXNl 3753
+IHRyYWNr 3754
+Jyk7Cgo= 3755
+amU= 3756
+IEpvaG4= 3757
+aXN0cg== 3758
+KFM= 3759
+dWJl 3760
+IGNl 3761
+aXR0ZWQ= 3762
+VkVS 3763
+Kik= 3764
+cGFyZW50 3765
+IGFwcGxpY2F0aW9u 3766
+YW55 3767
+LnN3aW5n 3768
+IHBhY2s= 3769
+XHU= 3770
+IHByYWN0 3771
+IHNlY3Rpb24= 3772
+Y3R4 3773
+IHVuc2lnbmVk 3774
+LlBvaW50 3775
+IE9uZQ== 3776
+xLE= 3777
+aXBsZQ== 3778
+YWlk 3779
+0YM= 3780
+VmVjdG9y 3781
+Ynl0ZQ== 3782
+IHdhaXQ= 3783
+IMOg 3784
+w6U= 3785
+IHRvZ2V0aGVy 3786
+IHRocm93cw== 3787
+Rk8= 3788
+Jykp 3789
+aG9zdA== 3790
+aXNpbmc= 3791
+LnZpZXc= 3792
+IHRlcm1z 3793
+ZnJhbWV3b3Jr 3794
+LXI= 3795
+IGFwcGx5 3796
+IHNlc3Npb24= 3797
+T3B0aW9ucw== 3798
+dWdnZXN0 3799
+IG90aGVycw== 3800
+d2l0dGVy 3801
+IGZ1bmQ= 3802
+SW5pdA== 3803
+X18o 3804
+ZW5zb3I= 3805
+R0VU 3806
+IHNldmVyYWw= 3807
+aWk= 3808
+W2o= 3809
+SU8= 3810
+IHRlbXBsYXRl 3811
+UG9zaXRpb24= 3812
+IGVjb24= 3813
+YWNoaW5l 3814
+IGls 3815
+LnNwcmluZw== 3816
+bWFpbg== 3817
+ZWx0 3818
+aW1lbnQ= 3819
+UmVj 3820
+bW0= 3821
+IFVuaXZlcnNpdHk= 3822
+dXJzb3I= 3823
+ICAgICAgICAgICAgICAgICAgICA= 3824
+R0w= 3825
+aWN0dXJl 3826
+aXRodWI= 3827
+Y2Vy 3828
+Y2FzdA== 3829
+RnJvbQ== 3830
+YWxlcw== 3831
+IHN1YmplY3Q= 3832
+cGFzc3dvcmQ= 3833
+bnk= 3834
+IGVzYw== 3835
+LndyaXRl 3836
+77yM 3837
+V2hhdA== 3838
+Lkg= 3839
+IGhpc3Rvcnk= 3840
+IEZl 3841
+IGluZGl2aWR1YWw= 3842
+dW5pdA== 3843
+IC0tPg== 3844
+IGR1 3845
+SVNU 3846
+IHVzZXJz 3847
+ZnM= 3848
+ZmFsc2U= 3849
+dW50 3850
+VGl0bGU= 3851
+IG1vdA== 3852
+IGZ1dHVyZQ== 3853
+YWNoZWQ= 3854
+IHN0YXJ0ZWQ= 3855
+IG1vZGU= 3856
+ICc8 3857
+X2FycmF5 3858
+IGF4 3859
+J107Cg== 3860
+aXJlcw== 3861
+VGhlcmU= 3862
+dWdodA== 3863
+dG1s 3864
+cG9zZWQ= 3865
+aWN1bHQ= 3866
+IHRvb2s= 3867
+IGdhbWVz 3868
+IH19 3869
+ID8+Cg== 3870
+IHByb2R1Y3Rz 3871
+SXM= 3872
+IGJhZA== 3873
+IERlcw== 3874
+LnBhdGg= 3875
+JwoK 3876
+IFBvc3Q= 3877
+YXZlbA== 3878
+KDo= 3879
+IG5lZWRz 3880
+IGtub3du 3881
+Rmw= 3882
+IGV4ZWM= 3883
+IHNlZW4= 3884
+dW1l 3885
+IGJvcmRlcg== 3886
+IGxpdmU= 3887
+dGVtcA== 3888
+UGVy 3889
+IHZhcmlhYmxl 3890
+aWV0 3891
+IERlZg== 3892
+IGdl 3893
+ZW1l 3894
+X2JhY2s= 3895
+Zmlyc3Q= 3896
+IHByb3ZpZGVk 3897
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8= 3898
+IGZpbGVuYW1l 3899
+IGhvcGU= 3900
+dWx5 3901
+YXV0bw== 3902
+ZmluZA== 3903
+X3N0cmluZw== 3904
+YnRu 3905
+aXR1ZGU= 3906
+QXR0cmlidXRl 3907
+IHlvdW5n 3908
+LnR4dA== 3909
+IHdlYnNpdGU= 3910
+IFByb3A= 3911
+IGV5 3912
+PigpOwo= 3913
+aW9uYWw= 3914
+QVJS 3915
+aWN0aW9uYXJ5 3916
+dXJ0aGVy 3917
+Ljwv 3918
+QUxM 3919
+IHN0dWR5 3920
+aWxp 3921
+IG5ldHdvcms= 3922
+eWw= 3923
+aXN0YW5jZQ== 3924
+T0s= 3925
+TlU= 3926
+cmVzdA== 3927
+IFNU 3928
+aWNyb3NvZnQ= 3929
+IGxpbWl0 3930
+IGN1dA== 3931
+KCk6Cg== 3932
+IGNvdQ== 3933
+b2du 3934
+IHNpemVvZg== 3935
+aXZhbA== 3936
+IHdlbnQ= 3937
+Lno= 3938
+TGluaw== 3939
+IGZpcmU= 3940
+IGFjcm9zcw== 3941
+IGNvbW11bml0eQ== 3942
+cmVnaW9u 3943
+TkU= 3944
+UmVm 3945
+IG9mZmljaWFs 3946
+IHZpc2l0 3947
+b2x2ZQ== 3948
+IHJlY2VpdmVk 3949
+IHRva2Vu 3950
+IG1vbnRocw== 3951
+IGFuaW0= 3952
+IHBhcnRpY3VsYXI= 3953
+c3R5bGVz 3954
+aWNv 3955
+IGVzcw== 3956
+LkNvbnRyb2w= 3957
+IMOp 3958
+YmFsbA== 3959
+IGxlYXJu 3960
+aW5kaW5n 3961
+VmFy 3962
+IGRlY2w= 3963
+KGVycg== 3964
+TEVDVA== 3965
+T25l 3966
+cGhh 3967
+IH4= 3968
+Zm9ydA== 3969
+YXN1cmU= 3970
+IG1pbmQ= 3971
+IEVuZA== 3972
+Q2hlY2s= 3973
+IHF1aWNr 3974
+Iiks 3975
+QU5E 3976
+dXRpb25z 3977
+QmFzZQ== 3978
+X19fX19fX18= 3979
+IGNvbW1lbnQ= 3980
+SU5F 3981
+4oCZdmU= 3982
+QnV0 3983
+IEVs 3984
+IFVz 3985
+IGFkbWlu 3986
+bWFyaw== 3987
+IE5hbWU= 3988
+YAo= 3989
+IFR5cGU= 3990
+YW1pYw== 3991
+cGM= 3992
+bG9vcg== 3993
+RlQ= 3994
+IG9wcA== 3995
+Y2tldA== 3996
+KS0+ 3997
+dHg= 3998
+IHB1cg== 3999
+dWVs 4000
+eW1ib2w= 4001
+dWF0aW9u 4002
+YW5nZXI= 4003
+IGJhY2tncm91bmQ= 4004
+ZWNlc3M= 4005
+ZWZpbmVk 4006
+Li4uLi4uLi4= 4007
+IGRlc2NyaXB0aW9u 4008
+IHJlcHJlc2VudA== 4009
+IikpOwo= 4010
+cHJlc3Npb24= 4011
+cm93c2Vy 4012
+IHNlcmllcw== 4013
+d2FyZHM= 4014
+KCRf 4015
+YWlzZQ== 4016
+IGhvdA== 4017
+YWNpdHk= 4018
+cmllcw== 4019
+YWN0aW9ucw== 4020
+Q3JlYXRl 4021
+YWRpbw== 4022
+YW1wbGVz 4023
+IG9yaWdpbmFs 4024
+ZW5zaXZl 4025
+Zm9udA== 4026
+c3RyZWFt 4027
+77u/dXNpbmc= 4028
+LnNwcmluZ2ZyYW1ld29yaw== 4029
+c2VydmVy 4030
+IGJpbGw= 4031
+QUNL 4032
+aWxlbmFtZQ== 4033
+IGZyYW1l 4034
+ID0K 4035
+RWRpdA== 4036
+YWRpdXM= 4037
+IGRyYXc= 4038
+YW5rcw== 4039
+IGRldGVy 4040
+IGNvbWVz 4041
+X2ludA== 4042
+IGZvcmVhY2g= 4043
+YW5nbGU= 4044
+IGVsZWN0 4045
+cGVjdGVk 4046
+SGVhZGVy 4047
+aXN0cmF0aW9u 4048
+RmFsc2U= 4049
+IEdhbWU= 4050
+IGZpbHRlcg== 4051
+QWN0aXZpdHk= 4052
+IGxhcmc= 4053
+aW5pdGlvbg== 4054
+ICI8 4055
+aXNlZA== 4056
+IHJlbW92ZQ== 4057
+IFRyYW5z 4058
+bWV0 4059
+c2Vl 4060
+Rm9ybWF0 4061
+Q29tbWFuZA== 4062
+IEVY 4063
+Tm9uZQ== 4064
+IGZyb250 4065
+QVNF 4066
+IFJlYw== 4067
+b3VuZGF0aW9u 4068
+IHZv 4069
+PVwi 4070
+KCo= 4071
+Q2hhbmdl 4072
+LldyaXRl 4073
+Z3JvdXA= 4074
+aWVudHM= 4075
+dXk= 4076
+KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg== 4077
+IGRpZw== 4078
+aHI= 4079
+KC0= 4080
+IGdlbg== 4081
+bnVtYmVy 4082
+dmVj 4083
+dXJvcGU= 4084
+ZW50cnk= 4085
+TEw= 4086
+IHN0ZQ== 4087
+VmFsaWQ= 4088
+J10s 4089
+X3BhcmFt 4090
+IHNlbGVjdGVk 4091
+IGFjY29yZGluZw== 4092
+IERpcw== 4093
+IHV0aWw= 4094
+QnVmZmVy 4095
+X2Vycm9y 4096
+IGFzc29jaQ== 4097
+X1NJWkU= 4098
+IHdvcg== 4099
+IHByaW50Zg== 4100
+cmFn 4101
+wqA= 4102
+REQ= 4103
+IFZhbA== 4104
+IGFjdGl2 4105
+RW5n 4106
+ZXRpbWU= 4107
+IHZpcnR1YWw= 4108
+YWlnbg== 4109
+YXVy 4110
+IFByZXM= 4111
+IEV4Y2VwdGlvbg== 4112
+IGFueXRoaW5n 4113
+IE9mZg== 4114
+IGhvdXJz 4115
+IHdhcg== 4116
+QXJncw== 4117
+YWdpbmc= 4118
+IG1vZGVscw== 4119
+IFRpbWU= 4120
+T2I= 4121
+YW1z 4122
+am95 4123
+IGVhcmx5 4124
+LnJlYWQ= 4125
+IGNlbnRlcg== 4126
+IEluaXRpYWw= 4127
+IGxhbmd1YWdl 4128
+bGVuZ3Ro 4129
+eHk= 4130
+IHNu 4131
+IGluZg== 4132
+UG9zdA== 4133
+IGFnbw== 4134
+IGVhc3k= 4135
+X2NvZGU= 4136
+IEFOWQ== 4137
+X2No 4138
+IGRvd25sb2Fk 4139
+KFQ= 4140
+YXZlZA== 4141
+4oCT 4142
+IHN0dWRlbnRz 4143
+IGZpZw== 4144
+bGlnaHQ= 4145
+eHg= 4146
+IGJ1ZmZlcg== 4147
+IERlcA== 4148
+IE1hdGg= 4149
+SVRI 4150
+IHZhcmk= 4151
+IGR1ZQ== 4152
+RmFjdG9yeQ== 4153
+IHBvcg== 4154
+IGVw 4155
+b3R5cGU= 4156
+IGNhbm5vdA== 4157
+IHdoaXRl 4158
+PGludA== 4159
+dGVybg== 4160
+IHJlZ2lzdGVy 4161
+IHByZWQ= 4162
+Y2x1cw== 4163
+X2RhdGU= 4164
+IC8qKg== 4165
+IGF1dGg= 4166
+IFtdCg== 4167
+IHBlcmlvZA== 4168
+bm93bg== 4169
+IHZvdA== 4170
+IHNjcmVlbg== 4171
+J2Q= 4172
+VHlwZXM= 4173
+IHRtcA== 4174
+0LXQ 4175
+dXJhbA== 4176
+IGJlbmVm 4177
+X3k= 4178
+IG5ldA== 4179
+IFN0YXRlcw== 4180
+J11bJw== 4181
+IE5l 4182
+IE5PVA== 4183
+IG5lZw== 4184
+IGNvbW1vbg== 4185
+c2NvcGU= 4186
+IGNyZWQ= 4187
+Z2Vz 4188
+X1RZUEU= 4189
+IHN1Z2dlc3Q= 4190
+b29t 4191
+LgoKCg== 4192
+IGFjY2VwdA== 4193
+IHJhbmRvbQ== 4194
+ZXJt 4195
+IFZlY3Rvcg== 4196
+d2l0aA== 4197
+VEVS 4198
+KHN0cg== 4199
+IHJlc3BvbnM= 4200
+IGhpdA== 4201
+LlNldA== 4202
+Z3JpZA== 4203
+cmlh 4204
+IGNsaWNr 4205
+dW5kbGU= 4206
+Q2FzZQ== 4207
+aW5zZXJ0 4208
+VXRpbHM= 4209
+ICIiIg== 4210
+IGltcGxlbWVudA== 4211
+YXRhbA== 4212
+dGVtcHQ= 4213
+dGVtcGxhdGU= 4214
+b2Ny 4215
+cmV0dXJucw== 4216
+IHBsYXllcnM= 4217
+dXNlcnM= 4218
+ZWRlZg== 4219
+IFRoZXNl 4220
+IGFtb25n 4221
+IGRlYg== 4222
+aGE= 4223
+LmdldEVsZW1lbnQ= 4224
+IGNpcmM= 4225
+IGFuc3dlcg== 4226
+IHdhbGs= 4227
+IHRyZWF0 4228
+IEdl 4229
+IENyZWF0ZQ== 4230
+IGFnZQ== 4231
+IHJlcQ== 4232
+T1NU 4233
+YW5ndWxhcg== 4234
+0Y8= 4235
+IGZpdmU= 4236
+IGRpc3RyaWJ1dGVk 4237
+IGZyaWVuZA== 4238
+VFA= 4239
+IGNsZWFu 4240
+b3dz 4241
+LkNvbnRyb2xz 4242
+ZGlz 4243
+IHdvcmRz 4244
+Lmlv 4245
+enk= 4246
+IGhlYWRlcg== 4247
+IENoZWNr 4248
+4oCZbQ== 4249
+anVzdA== 4250
+aG9sZGVy 4251
+PSI8Pw== 4252
+IEdOVQ== 4253
+IENvbA== 4254
+aW1lc3Q= 4255
+ZW50aWM= 4256
+ewoK 4257
+IHRyZQ== 4258
+bGFzdA== 4259
+bGE= 4260
+IFlvcms= 4261
+TG8= 4262
+IGRpc2N1c3M= 4263
+IEdvZA== 4264
+IGlzc3Vl 4265
+cmV3 4266
+V2luZG93 4267
+IGxhbmQ= 4268
+IHN0cmVhbQ== 4269
+IFBhcg== 4270
+IHF1YWxpdHk= 4271
+UGFy 4272
+X251bQ== 4273
+IHNhbA== 4274
+ZWx2ZXM= 4275
+T1JE 4276
+KHVzZXI= 4277
+IHdvcmtz 4278
+IGhhbGY= 4279
+ZW5zZXM= 4280
+dmFz 4281
+IHBvbGljZQ== 4282
+KCIv 4283
+dWE= 4284
+IHNpbXBsZQ== 4285
+QWRkcmVzcw== 4286
+IGVtcHR5 4287
+ZXNo 4288
+VXBkYXRl 4289
+IENyZWF0ZWQ= 4290
+KCcu 4291
+KS4K 4292
+ICAgICAgICAgICAgICAgICAg 4293
+IGFncmU= 4294
+IEZST00= 4295
+IGNvb2s= 4296
+IGV2ZXJ5dGhpbmc= 4297
+aWxpdGllcw== 4298
+LnN0YXR1cw== 4299
+IHJlbGF0aW9ucw== 4300
+ZXh0ZXJu 4301
+IG5vdGhpbmc= 4302
+IHJ1bm5pbmc= 4303
+CXZvaWQ= 4304
+Ukk= 4305
+X2E= 4306
+X0NPTg== 4307
+cG9y 4308
+LnN1Yg== 4309
+cmVxdWlyZQ== 4310
+IENpdHk= 4311
+IFdlc3Q= 4312
+IG1vcg== 4313
+c3RvcmU= 4314
+RXF1YWxz 4315
+b2Rlcg== 4316
+IG5h 4317
+IFtb 4318
+ICgn 4319
+IERvbg== 4320
+RVJT 4321
+L3A= 4322
+Lmpzb24= 4323
+YWJvcg== 4324
+IHNvbWVvbmU= 4325
+X3RleHQ= 4326
+LmNzcw== 4327
+LlRhYg== 4328
+IFNvbWU= 4329
+YXRv 4330
+ZG91Ymxl 4331
+IHNoYXJl 4332
+KHZvaWQ= 4333
+X2Rpcg== 4334
+IHVy 4335
+U3RhY2s= 4336
+IFdvcmxk 4337
+Llg= 4338
+c3RyYWN0 4339
+SG93 4340
+LkdlbmVyaWM= 4341
+aWNsZXM= 4342
+IGVudHJ5 4343
+IGNoYW5nZXM= 4344
+IHBlcnNvbmFs 4345
+KEE= 4346
+IG9mZnNldA== 4347
+X3B0cg== 4348
+IHBpZQ== 4349
+IEphbg== 4350
+LWdyb3Vw 4351
+bW9kdWxl 4352
+SXRlbXM= 4353
+IEhvd2V2ZXI= 4354
+dmVyYWdl 4355
+LkZvbnQ= 4356
+IGV2ZW50cw== 4357
+Lm1pbg== 4358
+IGludm9s 4359
+emE= 4360
+IHdob2xl 4361
+IG5lZWRlZA== 4362
+IGxpa2VseQ== 4363
+cmllZg== 4364
+T1JN 4365
+dmVyc2lvbg== 4366
+IGZpZ2h0 4367
+IGVpbg== 4368
+RnJhbWU= 4369
+Z2Vu 4370
+IE91dA== 4371
+YXZpZ2F0aW9u 4372
+TGVuZ3Ro 4373
+aWxsZWQ= 4374
+cXVlbmNl 4375
+ICE9PQ== 4376
+IFNvZnR3YXJl 4377
+IHdyaXRpbmc= 4378
+IHJhdGU= 4379
+J10sCg== 4380
+UGFuZWw= 4381
+aW5uZXI= 4382
+IFsi 4383
+IHR3 4384
+Y2Q= 4385
+IDsK 4386
+X3N0YXRl 4387
+IFNt 4388
+IE1hcms= 4389
+KSkKCg== 4390
+cHJvdA== 4391
+IE1y 4392
+bWV0aG9k 4393
+dXN0b21lcg== 4394
+SWNvbg== 4395
+IGNvcnJlY3Q= 4396
+KG9iamVjdA== 4397
+IE1vcmU= 4398
+IGZhbGw= 4399
+IHZvbA== 4400
+IGRldmVsb3BtZW50 4401
+ZW50bHk= 4402
+IHNp 4403
+bWVkaQ== 4404
+dmluZw== 4405
+UFA= 4406
+YWtlcg== 4407
+IGluZHU= 4408
+IGVsaWY= 4409
+IHByZXQ= 4410
+IGJlbGlldmU= 4411
+bnM= 4412
+b21ldA== 4413
+IEludGVybg== 4414
+UmVjdA== 4415
+U28= 4416
+LmVycm9y 4417
+UmVhZA== 4418
+IGZlYXR1cmVz 4419
+IG1pbnV0ZXM= 4420
+LS0t 4421
+YXNpbmc= 4422
+Y3JldA== 4423
+Ij4NCg== 4424
+LmFubm90 4425
+IGNvbGxlY3Rpb24= 4426
+Jy4= 4427
+IHNpbWlsYXI= 4428
+IHRha2Vu 4429
+KCIl 4430
+T3JkZXI= 4431
+J10K 4432
+LW1k 4433
+IFRI 4434
+YWNlZA== 4435
+IGlzbg== 4436
+L2o= 4437
+IHNvbg== 4438
+Z3JhcGg= 4439
+IEludGVnZXI= 4440
+IG5lY2Vzcw== 4441
+cmVlbg== 4442
+IHVt 4443
+IFw8 4444
+IG1vbWVudA== 4445
+IGJyaW5n 4446
+IGluZGlj 4447
+eXNpcw== 4448
+TGV2ZWw= 4449
+dmVyc2U= 4450
+dXJyZW5j 4451
+X3Rlc3Q= 4452
+IGVudGlyZQ== 4453
+RG93bg== 4454
+IH0KCgo= 4455
+KHJlc3VsdA== 4456
+IFJlYWQ= 4457
+w6g= 4458
+TW9k 4459
+IHRyeWluZw== 4460
+IiksCg== 4461
+IG1lbWJlcg== 4462
+IENvcg== 4463
+T0RP 4464
+LWNvbnRyb2w= 4465
+dW50aW1l 4466
+IFNpbQ== 4467
+RGlhbG9n 4468
+cGxvdA== 4469
+X29u 4470
+IHBoeXM= 4471
+fS8= 4472
+IG5hbWVzcGFjZQ== 4473
+CQ0K 4474
+YWNj 4475
+UGxheWVy 4476
+QVJF 4477
+IGZvb3Q= 4478
+IGJvYXJk 4479
+cGFydA== 4480
+IHN1cw== 4481
+d2lzZQ== 4482
+IE1j 4483
+IHB1c2g= 4484
+QVRB 4485
+IHBsZWFzZQ== 4486
+cmllZA== 4487
+d2VldA== 4488
+Yml0 4489
+aWRlZA== 4490
+VkU= 4491
+IFN3 4492
+VUI= 4493
+IHR5cGVz 4494
+ZWRpYQ== 4495
+IGNsb3M= 4496
+YWNlYm9vaw== 4497
+V2hlbg== 4498
+IGVkaXQ= 4499
+aWdnZXI= 4500
+IGVuZXJn 4501
+Q29udGFpbmVy 4502
+IHBob3Q= 4503
+IENvdW50 4504
+IEV1cm9wZQ== 4505
+Lklz 4506
+IFJ1c3M= 4507
+cGVlZA== 4508
+IFN0cg== 4509
+IHB5 4510
+IGN1bHQ= 4511
+IGRlZmluZWQ= 4512
+Y2NvdW50 4513
+IG9idA== 4514
+LkxvY2F0aW9u 4515
+IHRocmVhZA== 4516
+aWxsZQ== 4517
+IGluc3RlYWQ= 4518
+c3Ryb25n 4519
+IFNlYw== 4520
+VVJF 4521
+IGlkZWE= 4522
+LnNl 4523
+ZW15 4524
+c2VsZWN0ZWQ= 4525
+Q29ubmVjdGlvbg== 4526
+YWNpbmc= 4527
+dGhyZWFk 4528
+Lm5leHQ= 4529
+IGNvbGw= 4530
+IGZpbG0= 4531
+aXN0aWM= 4532
+IGNvbXBldA== 4533
+IGNvbm4= 4534
+dGhvdWdo 4535
+IGNvbXBhbg== 4536
+b2NrZXQ= 4537
+IHRlYWNo 4538
+PSg= 4539
+IHBob25l 4540
+IGFjdGl2ZQ== 4541
+ZGVsZXRl 4542
+dHJpZXM= 4543
+IG1v 4544
+IGRlYXRo 4545
+fSk7Cgo= 4546
+b2NvbA== 4547
+V2lkZ2V0 4548
+IGFydGljbGU= 4549
+cm9kdQ== 4550
+YW5kaWQ= 4551
+0Ys= 4552
+IENy 4553
+a2E= 4554
+KCk6 4555
+bG9vZA== 4556
+CQkJCg== 4557
+IGFsbW9zdA== 4558
+IHNlbGw= 4559
+ZXJ2bGV0 4560
+cmlw 4561
+VW5pdA== 4562
+IGFwcGxpYw== 4563
+IGNvbm5lY3Q= 4564
+IGZlYXR1cmU= 4565
+IHZpYQ== 4566
+Jyks 4567
+IGxpbQ== 4568
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 4569
+IEd1 4570
+RW5naW5l 4571
+IGVucw== 4572
+IGVudmlyb25tZW50 4573
+YmxvY2s= 4574
+SEVSRQ== 4575
+TlVMTA== 4576
+Z3k= 4577
+dGFn 4578
+KSku 4579
+ZXhw 4580
+IGNvbXBs 4581
+IGluc3RhbGw= 4582
+IGNvbXBsZXRl 4583
+cXVldWU= 4584
+YXR1cmFs 4585
+IGdlbmVyYWw= 4586
+dGhvbg== 4587
+IGFza2Vk 4588
+b3Jlcw== 4589
+KHJlcw== 4590
+IHJlc2VydmVk 4591
+U1A= 4592
+IOKApg== 4593
+xYI= 4594
+IHNpZ25pZmlj 4595
+T2Zm 4596
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 4597
+IEFn 4598
+IEp1c3Q= 4599
+IEVycm9y 4600
+IGluZmw= 4601
+YWRhdGE= 4602
+IGljb24= 4603
+YXNrcw== 4604
+Jyc= 4605
+X0xP 4606
+Py4= 4607
+YWNjb3VudA== 4608
+ICgq 4609
+JykKCg== 4610
+cmFw 4611
+X3Zhcg== 4612
+IEZPUg== 4613
+IHBhcnR5 4614
+IFlvdXI= 4615
+Y2F0 4616
+c3RyeQ== 4617
+Lm5ldw== 4618
+Ym9vdA== 4619
+IE5vdg== 4620
+IHZlY3Rvcg== 4621
+IG5vcm1hbA== 4622
+IGZ1cnRoZXI= 4623
+UmVwb3NpdG9yeQ== 4624
+IGRhdGFiYXNl 4625
+YXR0bGU= 4626
+IG11c2lj 4627
+IHNwZWVk 4628
+IGRvYw== 4629
+cHJvY2Vzcw== 4630
+SUdIVA== 4631
+LnBhcnNl 4632
+IHRha2luZw== 4633
+IHZpb2w= 4634
+Y2VlZA== 4635
+IEFmdGVy 4636
+IGZvcndhcmQ= 4637
+IGNyaXQ= 4638
+Ii8+Cg== 4639
+cm90 4640
+IGZhaWxlZA== 4641
+ZWZvcmU= 4642
+IGNvbmNlcm4= 4643
+b2U= 4644
+YmE= 4645
+IHNlbmRlcg== 4646
+IHRlcm0= 4647
+aGFz 4648
+PSIj 4649
+IHBvdGVudGlhbA== 4650
+TnVt 4651
+IHB1Ymxpc2hlZA== 4652
+LmNsb3Nl 4653
+IEltYWdl 4654
+c3RyYWludA== 4655
+VUQ= 4656
+IE9i 4657
+IHByb2JhYmx5 4658
+bGlt 4659
+IjoK 4660
+b2x1bWU= 4661
+IGNvbnN1bQ== 4662
+YWd1ZQ== 4663
+ZW5zaW9ucw== 4664
+IGludmVzdGln 4665
+LXllYXI= 4666
+Jyk7 4667
+LXNt 4668
+IGVuam95 4669
+b3JpZw== 4670
+ZXJpbmc= 4671
+Y3A= 4672
+bGVhc2Vk 4673
+cGxlbWVudHM= 4674
+IHJldHVybnM= 4675
+cGF0 4676
+Qk8= 4677
+IEhvdXNl 4678
+LkxhYmVs 4679
+IHdlaWdodA== 4680
+aWdoYg== 4681
+IGNvbmRpdGlvbnM= 4682
+IGV4Y2VwdGlvbg== 4683
+ZGVzY3JpcHRpb24= 4684
+IHRyYWQ= 4685
+LXRv 4686
+IHt9 4687
+IG1vZHVsZQ== 4688
+RU5E 4689
+LmFw 4690
+LnByb3Bz 4691
+IGNvbnN0cnVjdG9y 4692
+YXZlcw== 4693
+IGZhdm9y 4694
+IE5vdw== 4695
+O2k= 4696
+IE1haW4= 4697
+X2s= 4698
+ZXJpZXM= 4699
+4oCZbGw= 4700
+dHJhbnNmb3Jt 4701
+aW1lc3RhbXA= 4702
+UHJl 4703
+IG1lcg== 4704
+LnJlcw== 4705
+c3RhbnQ= 4706
+TG9jYXRpb24= 4707
+X05BTUU= 4708
+IGxvc3M= 4709
+IAoK 4710
+bmV0 4711
+IGVuZ2luZQ== 4712
+QmxvY2s= 4713
+IGlzc3Vlcw== 4714
+IHBhcnNl 4715
+IEJhcg== 4716
+IHN0YXk= 4717
+IEpTT04= 4718
+IGRvbQ== 4719
+YWlycw== 4720
+d25lcg== 4721
+IGxvd2Vy 4722
+IiwNCg== 4723
+IERlbQ== 4724
+dWZhY3Q= 4725
+IHBz 4726
+IHBlcmZlY3Q= 4727
+Ukw= 4728
+IGVkdWM= 4729
+bHM= 4730
+ZW1vcnk= 4731
+QVJSQU5U 4732
+dWdl 4733
+IGV4YWN0 4734
+LmtleQ== 4735
+YWxsZWQ= 4736
+ZWNo 4737
+aWVm 4738
+XC8= 4739
+b2tl 4740
+IGZvcm1lcg== 4741
+YWxsb2M= 4742
+IHNpeA== 4743
+aWRh 4744
+IG1hcmdpbg== 4745
+IGhlYXJ0 4746
+YWxk 4747
+cGFjaw== 4748
+LmdldEVsZW1lbnRCeUlk 4749
+IFdBUlJBTlQ= 4750
+IHJhdGhlcg== 4751
+IGJ1aWxkaW5n 4752
+ZXJtYW4= 4753
+bGljZQ== 4754
+IHF1ZXN0aW9ucw== 4755
+aXplcw== 4756
+bGVnZQ== 4757
+aXJlY3Rvcnk= 4758
+IGpl 4759
+IGNhcw== 4760
+cHJvcHM= 4761
+dXRm 4762
+IHNlY3VyaXR5 4763
+IGhvd2V2ZXI= 4764
+d2VpZ2h0 4765
+IGluc2lkZQ== 4766
+IHByZXNpZGVudA== 4767
+Q2hhcg== 4768
+IFdJVEg= 4769
+Lm1hcA== 4770
+IGdyYXBo 4771
+IHRhZw== 4772
+X3N0YXR1cw== 4773
+IGF0dGVtcHQ= 4774
+b3Bw 4775
+dXNlcw== 4776
+CWNvbnN0 4777
+IHJvdW5k 4778
+LCQ= 4779
+IGZyaWVuZHM= 4780
+RW1haWw= 4781
+Pz4= 4782
+UmVzb3VyY2U= 4783
+S0VZ 4784
+b3Nw 4785
+LnF1ZXJ5 4786
+IE5vcnRo 4787
+YWJsZXM= 4788
+aXN0cmli 4789
+X2NsYXNz 4790
+ZWxsbw== 4791
+VGhhdA== 4792
+0Lo= 4793
+cGVjaWFsbHk= 4794
+IFByZXNpZGVudA== 4795
+IGNhbXBhaWdu 4796
+IGFsdA== 4797
+YXJlYQ== 4798
+IGNoYWxs 4799
+IG9wcG9ydA== 4800
+LkNvbg== 4801
+IGVuZXJneQ== 4802
+bGlrZQ== 4803
+LnN0cmluZw== 4804
+aW5ndG9u 4805
+KSo= 4806
+eXk= 4807
+IHByb2Zlc3Npb24= 4808
+aXJ0aA== 4809
+IHNlZw== 4810
+5pw= 4811
+IGhvcg== 4812
+aWVycw== 4813
+Y2Fu 4814
+IGJlaGluZA== 4815
+UHJvZHVjdA== 4816
+Zmc= 4817
+IFNr 4818
+LmpwZw== 4819
+Pzo= 4820
+XTsKCg== 4821
+IGNhbGxiYWNr 4822
+IEh0dHA= 4823
+0Yw= 4824
+bG9uZw== 4825
+TVM= 4826
+QVRI 4827
+IHJhaXNl 4828
+IHdhbnRlZA== 4829
+cm93bg== 4830
+dXRvcg== 4831
+bHQ= 4832
+XT0= 4833
+ZWxpbmU= 4834
+TUE= 4835
+IHNlcGFy 4836
+Y3M= 4837
+c2VtYg== 4838
+RGlz 4839
+YnNlcnY= 4840
+IFdpbGw= 4841
+IHBvbGljeQ== 4842
+IHRoaXJk 4843
+cGhvbmU= 4844
+IGJlZA== 4845
+L2c= 4846
+Ll9f 4847
+IEluYw== 4848
+aXppbmc= 4849
+LnJlbW92ZQ== 4850
+aW5zdGFuY2U= 4851
+LnR5cGU= 4852
+IHNlcnY= 4853
+RWFjaA== 4854
+IGhhcg== 4855
+IE1lc3NhZ2U= 4856
+KGtleQ== 4857
+U0VMRUNU 4858
+UG9z 4859
+KSk7DQo= 4860
+IHJlY29tbQ== 4861
+IHRyYWluaW5n 4862
+IEVudA== 4863
+IENoYXI= 4864
+aWNodA== 4865
+KGZpbGU= 4866
+IHByaW9y 4867
+R2FtZQ== 4868
+IGV4aXQ= 4869
+UGFyYW1z 4870
+LmNvcmU= 4871
+UEM= 4872
+bmVz 4873
+YW5jZWQ= 4874
+KHJlcXVlc3Q= 4875
+UGFzc3dvcmQ= 4876
+fT4K 4877
+IG1hZw== 4878
+IHJlbGVhc2U= 4879
+IHNoYWxs 4880
+dWRlbnQ= 4881
+IFNvdXRo 4882
+YW5kbw== 4883
+Oic= 4884
+LlRhYkluZGV4 4885
+c2s= 4886
+YW5uZXI= 4887
+aXNzZXQ= 4888
+IG91dHNpZGU= 4889
+bGVkZ2U= 4890
+IOU= 4891
+IFJvYg== 4892
+IGltbQ== 4893
+IQo= 4894
+IFdlYg== 4895
+RGVz 4896
+QkM= 4897
+YW5jaWFs 4898
+Um91dGU= 4899
+RGVj 4900
+ZmVyZW5jZXM= 4901
+IHB1cmNo 4902
+IE1vZGVs 4903
+Y3Rvcg== 4904
+Z24= 4905
+X3N0YXJ0 4906
+X3Vu 4907
+Lio= 4908
+aXNlcw== 4909
+IGdyb3VuZA== 4910
+IHVuaXF1ZQ== 4911
+IGJlYXV0 4912
+eyI= 4913
+IHBvdXI= 4914
+IE9jdA== 4915
+IHRyZWU= 4916
+c2V0cw== 4917
+X3Jlcw== 4918
+JyktPg== 4919
+X3JlZw== 4920
+KCJc 4921
+IGJ5dGU= 4922
+Qmw= 4923
+IGRhdGluZw== 4924
+IG1hdHRlcg== 4925
+IFJlbQ== 4926
+ICcuLi8= 4927
+IEF1Zw== 4928
+IExh 4929
+ICQo 4930
+b3VybmFs 4931
+aWFt 4932
+IHNob3dz 4933
+d3JpdGU= 4934
+IGJhbGw= 4935
+IHNpbXBseQ== 4936
+IGZhc3Q= 4937
+IG1lbW9yeQ== 4938
+QVNT 4939
+IE9m 4940
+b3ZlZA== 4941
+YW50ZQ== 4942
+YXVs 4943
+aXN0cnk= 4944
+KSkpOwo= 4945
+IGZpdA== 4946
+PHN0cmluZw== 4947
+IHBvbGl0aWNhbA== 4948
+YW5jZWw= 4949
+Xy4= 4950
+Y2FyZA== 4951
+LmN1cnJlbnQ= 4952
+b2No 4953
+X2ltYWdl 4954
+XHQ= 4955
+Iwo= 4956
+KEw= 4957
+IGluZHVzdHJ5 4958
+Y29taW5n 4959
+IGV4dHJh 4960
+IHJlcG9ydGVk 4961
+LnN0YXJ0 4962
+IHJlc291cmNlcw== 4963
+IGltZw== 4964
+Zmxvdw== 4965
+X0VY 4966
+KG51bGw= 4967
+IFByZQ== 4968
+IHdyb25n 4969
+aW50ZXJmYWNl 4970
+UGFyYW1ldGVy 4971
+bmVycw== 4972
+4bs= 4973
+dHVyZQ== 4974
+ZXJzaXN0 4975
+b3VudHJ5 4976
+IHNlZW1z 4977
+YWxhbmNl 4978
+ZGVzdA== 4979
+CVN0cmluZw== 4980
+IG1haW50 4981
+IHVuaXQ= 4982
+YWN0ZXJz 4983
+IFRS 4984
+aWZ1bA== 4985
+ZXhwb3J0cw== 4986
+cHJvamVjdA== 4987
+QXBwbGljYXRpb24= 4988
+bGVnYXRl 4989
+IHRha2Vz 4990
+dGVybQ== 4991
+IGV0Yw== 4992
+dXN0ZXI= 4993
+IGFwcGVhcg== 4994
+YWRkcmVzcw== 4995
+IGZlbQ== 4996
+aHM= 4997
+IGhvbQ== 4998
+LC0= 4999
+IGRpZmZpY3VsdA== 5000
+IGNvbWluZw== 5001
+T3Blbg== 5002
+IHNldHRpbmdz 5003
+IFdhcg== 5004
+IFRoZW4= 5005
+IGF1dG9t 5006
+IEZvdW5kYXRpb24= 5007
+IHF1aXRl 5008
+RGVzY3JpcHRpb24= 5009
+IGJsb2c= 5010
+aXF1 5011
+UFM= 5012
+X2ZpZWxk 5013
+SnNvbg== 5014
+U1NJT04= 5015
+IFNjaA== 5016
+IExP 5017
+IGRlc2NyaQ== 5018
+IGV2ZXJ5b25l 5019
+IHByZXR0eQ== 5020
+IGxvbmdlcg== 5021
+IG1lbnU= 5022
+IGN1cnJlbnRseQ== 5023
+c2Vj 5024
+IHJlbGF0aW9uc2hpcA== 5025
+IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyM= 5026
+IE1hcA== 5027
+YXNldA== 5028
+IHBhcmFtZXRlcnM= 5029
+IGNydXNo 5030
+Ig0K 5031
+SUxJVFk= 5032
+aWdyYXRpb24= 5033
+IGNvdXQ= 5034
+dG90YWw= 5035
+IG5hbWVz 5036
+bmRlZg== 5037
+Iik7 5038
+cmllbmQ= 5039
+eW5hbWlj 5040
+IGVmZm9ydA== 5041
+IGFjdHVhbA== 5042
+IGZpZWxkcw== 5043
+T1VO 5044
+dGVycw== 5045
+IGZpeA== 5046
+X21vZGVs 5047
+IGNhc2Vz 5048
+Q0E= 5049
+TXk= 5050
+SW50ZXJmYWNl 5051
+IFNF 5052
+XV0= 5053
+YWxsZQ== 5054
+IE5hdGlvbmFs 5055
+IEFycmF5TGlzdA== 5056
+aW5saW5l 5057
+LlY= 5058
+YXJh 5059
+cmVmaXg= 5060
+YXNj 5061
+UmVhZGVy 5062
+INC/ 5063
+YXN0aWM= 5064
+KCgp 5065
+Q2w= 5066
+LmFubm90YXRpb24= 5067
+IHBlcmZvcm1hbmNl 5068
+YWlseQ== 5069
+LnRvU3RyaW5n 5070
+Lm5ldA== 5071
+dmlld3M= 5072
+LmVuZA== 5073
+YXllcnM= 5074
+bGF0ZQ== 5075
+IEFwcg== 5076
+ZWRlcmFs 5077
+J10p 5078
+LmJvZHk= 5079
+IGhpZ2hlcg== 5080
+X2Zs 5081
+Y3I= 5082
+YWxlcnQ= 5083
+X25vZGU= 5084
+IEdvb2dsZQ== 5085
+IGl0c2VsZg== 5086
+QXV0aA== 5087
+dXJyZW5jeQ== 5088
+IHNpZ25pZmljYW50 5089
+YXBwZW5k 5090
+IHJlc3BlY3Q= 5091
+c3RyYXA= 5092
+IHVuYQ== 5093
+cml0ZXJpYQ== 5094
+UE9SVA== 5095
+LmFwYWNoZQ== 5096
+T3V0cHV0 5097
+IHByb2dyZXNz 5098
+IG1pZA== 5099
+IE1pY3Jvc29mdA== 5100
+IHJlc291cmNl 5101
+YWJsaXNo 5102
+IGRpbQ== 5103
+LmxvYWQ= 5104
+LkFwcA== 5105
+IGRpcmVjdGlvbg== 5106
+IGFkZGl0aW9uYWw= 5107
+ICAgICAgICAgICAgICAgICAgICAgICAg 5108
+IG51bWJlcnM= 5109
+IGNvbXBhbmllcw== 5110
+LlRo 5111
+IHNvdW5k 5112
+dXNlcm5hbWU= 5113
+IHN0YXRlbWVudA== 5114
+IGFsZXJ0 5115
+IGNvbnRyYWN0 5116
+aG9tZQ== 5117
+X2xlbmd0aA== 5118
+LkNvbXBvbmVudA== 5119
+ZXY= 5120
+LkV4 5121
+77ya 5122
+Ijs= 5123
+IEhpZ2g= 5124
+ICkKCg== 5125
+IFBvaW50 5126
+b3Bo 5127
+IGxpbmVz 5128
+LT5f 5129
+IikKCg== 5130
+b3g= 5131
+YXBwbGljYXRpb24= 5132
+IF0K 5133
+CgoKCgoK 5134
+IHNvb24= 5135
+Y3Rpb25z 5136
+aW5nZXI= 5137
+IGpvaW4= 5138
+IFBl 5139
+IOs= 5140
+IGxhcw== 5141
+LkU= 5142
+Y3Nz 5143
+L29y 5144
+IFN0YXJ0 5145
+IFRP 5146
+IHN1YnM= 5147
+Y29ubg== 5148
+Y29tcG9uZW50cw== 5149
+REVCVUc= 5150
+cXVhcmU= 5151
+RnVuY3Rpb24= 5152
+ZW5kYXI= 5153
+LmluZGV4 5154
+IGZpbGw= 5155
+xJk= 5156
+IGNob29zZQ== 5157
+aG93 5158
+IEFtZXJpY2E= 5159
+YXNzZXRz 5160
+LS0tLS0tLS0tLS0t 5161
+IFZhbHVl 5162
+IG9mZmljZQ== 5163
+IHZlaA== 5164
+IHRyYW5zZm9ybQ== 5165
+IEFydA== 5166
+IGluZGU= 5167
+IGZu 5168
+IGltcGxlbWVudHM= 5169
+YW5nbw== 5170
+cGxldGU= 5171
+KyI= 5172
+dG1w 5173
+YW1pbHk= 5174
+IGhhc2g= 5175
+bWlzc2lvbnM= 5176
+RVNU 5177
+Z3Q= 5178
+UHJvdmlkZXI= 5179
+ICAgICAgICAgICAgICAgICAgICAgIA== 5180
+IGZsYWc= 5181
+IHBhcnRpY2lw 5182
+ZGVu 5183
+IFJldHVybnM= 5184
+IG5vdGU= 5185
+w7xy 5186
+cG0= 5187
+aWRlb3M= 5188
+IHNwZWNpZmllZA== 5189
+IEVO 5190
+ZXN0ZXI= 5191
+b2xpZA== 5192
+IHVwb24= 5193
+KHN0ZA== 5194
+CXY= 5195
+ICdc 5196
+dXo= 5197
+IHZlcnQ= 5198
+IHZpY3Q= 5199
+CXNlbGY= 5200
+ICIk 5201
+Lms= 5202
+IGdyb3Vwcw== 5203
+Z2l0aHVi 5204
+bGFuZw== 5205
+IG11dA== 5206
+VE8= 5207
+IHZl 5208
+IFBsZWFzZQ== 5209
+OwoKCg== 5210
+YWNjZXNz 5211
+IHsi 5212
+cmVh 5213
+IHJpc2s= 5214
+aWNrZXI= 5215
+b2dnbGU= 5216
+CXdoaWxl 5217
+QU5H 5218
+LnNlbmQ= 5219
+IHdvbWFu 5220
+IGdldHM= 5221
+IGlnbg== 5222
+IElk 5223
+X2xvZw== 5224
+T05F 5225
+IGV2aWQ= 5226
+IEhhcg== 5227
+X3N1Yg== 5228
+IGVuZGw= 5229
+IGluY2x1ZGVk 5230
+KCkpOwoK 5231
+IEFw 5232
+aWdy 5233
+IHNlbQ== 5234
+IEJsYWNr 5235
+ZG9j 5236
+X3RhYmxl 5237
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 5238
+LXVw 5239
+IGNhdXNl 5240
+IC4u 5241
+IHZhbg== 5242
+X2RpY3Q= 5243
+IGZvY3Vz 5244
+SU5E 5245
+Q0VTUw== 5246
+LkxvZw== 5247
+IG11bHRpcGxl 5248
+aWRv 5249
+IHJlZ2FyZA== 5250
+LU0= 5251
+YW5kbGVy 5252
+b3Vyc2U= 5253
+IGRlZw== 5254
+LlU= 5255
+IGFkZGl0aW9u 5256
+IHZhcmlvdXM= 5257
+IHJlY2VpdmU= 5258
+0LXQvQ== 5259
+IEhU 5260
+T2Jq 5261
+REY= 5262
+IGluY3JlYXNl 5263
+IE9wZW4= 5264
+XTs= 5265
+IGNvbW1pdA== 5266
+Pwo= 5267
+YXRlZ29yaWVz 5268
+YXRvcnk= 5269
+c2hpcA== 5270
+IE1pY2g= 5271
+IGh0bWw= 5272
+cm9taXNl 5273
+IGxlYXZl 5274
+IHN0cmF0ZWc= 5275
+YXZlbg== 5276
+IENvbnNvbGU= 5277
+a25vd24= 5278
+LW4= 5279
+X0xF 5280
+LmNvbXBvbmVudA== 5281
+IGJyZQ== 5282
+U2Vzc2lvbg== 5283
+aWFuY2U= 5284
+IGFsaWdu 5285
+dHlwZWRlZg== 5286
+X3Jlc3VsdA== 5287
+IFdIRVJF 5288
+LnNwbGl0 5289
+IHJlYWRpbmc= 5290
+RkFVTFQ= 5291
+IGNsbw== 5292
+IG5vdGljZQ== 5293
+X3By 5294
+YXJ0ZXI= 5295
+IGxvY2s= 5296
+IHN0YW5kYXJk 5297
+ZXRpYw== 5298
+ZWxsb3c= 5299
+IHBhZGRpbmc= 5300
+IEhpcw== 5301
+IHN0YXRlcw== 5302
+X2Nhc3Q= 5303
+KFA= 5304
+YWE= 5305
+IGludGVybmFs 5306
+ZWFu 5307
+IFBSTw== 5308
+IEtleQ== 5309
+IGVzcGVjaWFsbHk= 5310
+bWluZw== 5311
+IGNyb3Nz 5312
+IG5hdGlvbmFs 5313
+X29iamVjdA== 5314
+ZmlsdGVy 5315
+IHNjcmlwdA== 5316
+LnVwZGF0ZQ== 5317
+X2k= 5318
+IEFzc2VydA== 5319
+L2NvcmU= 5320
+JSUlJQ== 5321
+IHByb2JsZW1z 5322
+aXN0b3I= 5323
+IC49 5324
+IGFyY2g= 5325
+IHdyaXR0ZW4= 5326
+IG1pbGl0 5327
+TUVOVA== 5328
+LmNo 5329
+Y2FwZQ== 5330
+IE11cw== 5331
+X2NvbmZpZw== 5332
+IEFQSQ== 5333
+Zm9vdA== 5334
+IGltYWdlcw== 5335
+ZW5kbA== 5336
+Lklu 5337
+Rmlyc3Q= 5338
+IHBsYXRmb3Jt 5339
+LnByb3Q= 5340
+T3B0aW9u 5341
+c3Rl 5342
+IFRPRE8= 5343
+IGZvcmNl 5344
+LmNvbnQ= 5345
+CWVjaG8= 5346
+IERhdg== 5347
+UHRy 5348
+KEI= 5349
+UlQ= 5350
+IEJhc2U= 5351
+XVsn 5352
+IGFubm91bmM= 5353
+Y29uc29sZQ== 5354
+IFB5 5355
+ZHM= 5356
+LmFz 5357
+IHByZXZlbnQ= 5358
+YXBhbg== 5359
+IHsn 5360
+fTwv 5361
+IFNlcnZpY2U= 5362
+IFNlbg== 5363
+YWRvcg== 5364
+cHJvZmlsZQ== 5365
+VG9w 5366
+IGl0ZXI= 5367
+cG8= 5368
+SUVT 5369
+SlNPTg== 5370
+SUU= 5371
+aWFudA== 5372
+44CB 5373
+X2o= 5374
+IFNlcHQ= 5375
+X21hcA== 5376
+YnVt 5377
+KGNvbnRleHQ= 5378
+IEhvbWU= 5379
+aWFucw== 5380
+R0I= 5381
+IGxpdmluZw== 5382
+IHBhdHRlcm4= 5383
+KGlucHV0 5384
+aWNpZW50 5385
+Q29yZQ== 5386
+IGVudGl0eQ== 5387
+IGludGVn 5388
+Q2hhbmdlZA== 5389
+IHVzZWZ1bA== 5390
+LmluZm8= 5391
+IHRvb2w= 5392
+KGl0ZW0= 5393
+IG9r 5394
+IGZlZWQ= 5395
+SVg= 5396
+w6lz 5397
+IE5ld3M= 5398
+cmVtb3Zl 5399
+ZXJyeQ== 5400
+CQkJCQkJCQkJ 5401
+aXBtZW50 5402
+YXJlcw== 5403
+RG8= 5404
+Q3VycmVudA== 5405
+LmNvbnRlbnQ= 5406
+Lkdyb3Vw 5407
+dXN0cmFs 5408
+INGB 5409
+fSk= 5410
+IHBvcHVsYXI= 5411
+IHN0cmU= 5412
+IG1ldGhvZHM= 5413
+X0VSUk9S 5414
+TGVmdA== 5415
+Y2Fs 5416
+YnNw 5417
+LlRvU3RyaW5n 5418
+IGRpcg== 5419
+IGFsbG93ZWQ= 5420
+IGltcGFjdA== 5421
+IildCg== 5422
+LmNvbmZpZw== 5423
+IGVsZW1lbnRz 5424
+IHByb3Rl 5425
+IHRyYWlu 5426
+LnRy 5427
+cnM= 5428
+IFJlcHVibGlj 5429
+IFRhc2s= 5430
+YXJpZXM= 5431
+KEQ= 5432
+KGdldA== 5433
+4oCmCgo= 5434
+IHJlbGF0ZWQ= 5435
+IHZlcnM= 5436
+IHNpbA== 5437
+ICIiOwo= 5438
+IGNtZA== 5439
+IHRlY2hub2xvZ3k= 5440
+LndpZHRo 5441
+RmxvYXQ= 5442
+IFVzZQ== 5443
+Qm9keQ== 5444
+c2hvdWxk 5445
+LmpvaW4= 5446
+Rm9udA== 5447
+bGx1bQ== 5448
+eWNsZQ== 5449
+IEJyaXQ= 5450
+IG1pdA== 5451
+IHNjYWxl 5452
+IChf 5453
+ZXJuZWw= 5454
+IikpCg== 5455
+IHNjb3Jl 5456
+L3Y= 5457
+IHN0dWRlbnQ= 5458
+VUM= 5459
+LnNob3c= 5460
+IGF2ZXJhZ2U= 5461
+RW5hYmxlZA== 5462
+KGV4 5463
+Y29tbW9u 5464
+aW1hdGlvbg== 5465
+OkAi 5466
+Y2hpZQ== 5467
+IC4uLgoK 5468
+cml2ZXI= 5469
+IE1hcmNo 5470
+Y2F0ZWdvcnk= 5471
+Zmlu 5472
+IGNvdXJ0 5473
+0LI= 5474
+U2VydmVy 5475
+IGNvbnRhaW5lcg== 5476
+LXN0 5477
+X2Zvcg== 5478
+IHBhcnRz 5479
+IGRlY2lzaW9u 5480
+b2Jz 5481
+b3Vi 5482
+bWl0dGVk 5483
+ICQoJyM= 5484
+IHNhdw== 5485
+IGFwcHJvYWNo 5486
+SUNF 5487
+IHNheWluZw== 5488
+IGFueW9uZQ== 5489
+bWV0YQ== 5490
+U0Q= 5491
+IHNvbmc= 5492
+ZGlzcGxheQ== 5493
+T3Blcg== 5494
+b3V0ZXM= 5495
+IGNoYW5uZWw= 5496
+IGNoYW5nZWQ= 5497
+w6o= 5498
+IGZpbmFsbHk= 5499
+X251bWJlcg== 5500
+UGxlYXNl 5501
+4KQ= 5502
+b3Jpbmc= 5503
+LXJl 5504
+IGtpbGw= 5505
+IGRydWc= 5506
+d2luZG93 5507
+IGNvbnZlcnQ= 5508
+b21icmU= 5509
+IHdheXM= 5510
+SGVscGVy 5511
+IEZpcnN0 5512
+KF9f 5513
+dXJpdHk= 5514
+IFdpbmRvd3M= 5515
+ZWVz 5516
+IG1hdA== 5517
+cmFwcGVy 5518
+IHBsdXM= 5519
+YW5nZXM= 5520
+Il0u 5521
+YXpvbg== 5522
+L3Q= 5523
+bGF0 5524
+YXN0ZQ== 5525
+IHByb2ZpbGU= 5526
+IHJlYWR5 5527
+I2lmbmRlZg== 5528
+cm90ZQ== 5529
+IHNlbnNl 5530
+R2VuZXI= 5531
+IENvbmZpZw== 5532
+b215 5533
+IEp1bmU= 5534
+IGxhdGVzdA== 5535
+IHNhZg== 5536
+IHJlZ2lvbg== 5537
+IGRlZXA= 5538
+d2l0Y2g= 5539
+IFBhcms= 5540
+fWA= 5541
+IEZyb20= 5542
+SUk= 5543
+IGN2 5544
+IHJlYWNo 5545
+IGNvdW50ZXI= 5546
+IFdvcms= 5547
+IFVSTA== 5548
+IFVwZGF0ZQ== 5549
+JywNCg== 5550
+IGltbWVkaQ== 5551
+Y2xvc2U= 5552
+YWRvcw== 5553
+ZmVycmVk 5554
+IHdlZWtz 5555
+dXJn 5556
+IGRhbWFnZQ== 5557
+IGxvc3Q= 5558
+YW5p 5559
+X2xv 5560
+IGhpbXNlbGY= 5561
+IGRvZw== 5562
+KV0K 5563
+778= 5564
+cGly 5565
+dHQ= 5566
+IHBhcGVy 5567
+IHRoZW1z 5568
+c2Vjb25k 5569
+IHN0YWZm 5570
+IElucHV0 5571
+Iis= 5572
+IEZhY2Vib29r 5573
+IGFsbG9j 5574
+IHNjaGVk 5575
+QUNF 5576
+IHRoZW1zZWx2ZXM= 5577
+IENvbXBvbmVudA== 5578
+IGRyaXZlcg== 5579
+amE= 5580
+KHBhdGg= 5581
+IGNhdGVnb3J5 5582
+YWxscw== 5583
+cHU= 5584
+bGx1bWluYXRl 5585
+IEFjdGlvbg== 5586
+LmJ1dHRvbg== 5587
+IEdM 5588
+aXN0aWNz 5589
+IG9pbA== 5590
+IHN0b2Nr 5591
+Pic= 5592
+IGRlYWQ= 5593
+VkFM 5594
+UVVF 5595
+KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq 5596
+IGNoYXJn 5597
+UmV0dXJu 5598
+IGZ1bA== 5599
+ZG9t 5600
+IHJ1bGVz 5601
+IG1vZGlmeQ== 5602
+IGV2YWw= 5603
+aGFt 5604
+YXRlbWVudA== 5605
+XDw= 5606
+dWxh 5607
+PUZhbHNl 5608
+UkE= 5609
+IGNvbnRhaW5z 5610
+IHN0YWNr 5611
+bWFy 5612
+IHt9Cg== 5613
+IHVuZGVmaW5lZA== 5614
+QXNz 5615
+IENoaW5h 5616
+dmV5 5617
+Kgo= 5618
+IHBsYXlpbmc= 5619
+KS8= 5620
+YWN0b3I= 5621
+IGJvdHRvbQ== 5622
+bGllcg== 5623
+IE51bWJlcg== 5624
+IGNvdXBsZQ== 5625
+REM= 5626
+IFNP 5627
+Z29y 5628
+LnNldFRleHQ= 5629
+c3VjY2Vzcw== 5630
+Y29tbWFuZA== 5631
+RmlsdGVy 5632
+IE91cg== 5633
+X2l0ZW0= 5634
+IGN0eA== 5635
+IHJvYWQ= 5636
+VmVyc2lvbg== 5637
+Y2FzZQ== 5638
+dXJ0 5639
+YXZpb3I= 5640
+eWNo 5641
+c2VtYmx5 5642
+IFByb2R1Y3Q= 5643
+IGhlbGQ= 5644
+YWZl 5645
+IGluY2x1ZGVz 5646
+PHF1b3Rl 5647
+IGF2b2lk 5648
+IEZpbg== 5649
+IE1vZA== 5650
+IHRhYg== 5651
+YW5v 5652
+w7E= 5653
+aXBwaW5n 5654
+LWU= 5655
+IGluc2VydA== 5656
+dGFyZ2V0 5657
+Y2hhbg== 5658
+Lk1vZGVs 5659
+SU1F 5660
+XAo= 5661
+IG1hY2hpbmU= 5662
+YXZ5 5663
+IE5P 5664
+IEludGVy 5665
+IG9wZXJhdGlvbg== 5666
+bW9kYWw= 5667
+VGFn 5668
+XTo= 5669
+IHByb2R1Y3Rpb24= 5670
+IGFyZWFz 5671
+IHJlbg== 5672
+X2Zyb20= 5673
+bmJzcA== 5674
+IG9wZXJhdG9y 5675
+bWVu 5676
+YXBwZWQ= 5677
+X3Blcg== 5678
+emVu 5679
+KCIu 5680
+LnNhdmU= 5681
+PSJ7ew== 5682
+IHRvcg== 5683
+KHJlc3BvbnNl 5684
+IGNhbmRpZA== 5685
+IGNvbnY= 5686
+YWlsZWQ= 5687
+IExpYg== 5688
+Y29tcA== 5689
+dXJh 5690
+77+9 5691
+IEhlcmU= 5692
+IGFyZ3VtZW50 5693
+aG9vZA== 5694
+IGVzdGFibGlzaA== 5695
+b2dyYXBoeQ== 5696
+IG9uQ2xpY2s= 5697
+YW1iZGE= 5698
+IHNjaA== 5699
+IG1vdmll 5700
+IHNlYw== 5701
+IGFjdGl2aXR5 5702
+2Kc= 5703
+IHNxbA== 5704
+X2FsbA== 5705
+aW5jaXA= 5706
+IHByb3ZpZGVz 5707
+IHN5cw== 5708
+YWNrZXQ= 5709
+IHdhc24= 5710
+IHVzZXM= 5711
+IEZ1bmN0aW9u 5712
+Lmdvb2dsZQ== 5713
+IFJlc3VsdA== 5714
+VmlzaWJsZQ== 5715
+YWdtYQ== 5716
+ZWxjb21l 5717
+IFN5 5718
+IENlbnQ= 5719
+QUxTRQ== 5720
+YWNpw7Nu 5721
+RVhU 5722
+IGxpY2Vuc2U= 5723
+IExvbmc= 5724
+IGFjY29t 5725
+IGFiaWxpdHk= 5726
+LmhlaWdodA== 5727
+QWN0aXZl 5728
+b2xvZ2ljYWw= 5729
+b2x5 5730
+KSks 5731
+LlNl 5732
+IHBhcmFtZXRlcg== 5733
+cHJpdGU= 5734
+QUJJTElUWQ== 5735
+LnNlcnZpY2U= 5736
+IEdyb3Vw 5737
+X3F1ZXJ5 5738
+IEl0ZW0= 5739
+aW5pbmc= 5740
+IGp1ZA== 5741
+aW1z 5742
+Zml4 5743
+aW5kZXI= 5744
+YWdyYW0= 5745
+IGZ1bmN0aW9ucw== 5746
+IGV4cGVyaQ== 5747
+IEVt 5748
+IHJvdA== 5749
+IHBlbg== 5750
+LmJ0bg== 5751
+IEFT 5752
+I2lmZGVm 5753
+IGNob2ljZQ== 5754
+IFBhZ2U= 5755
+X1BSTw== 5756
+UVU= 5757
+5Y8= 5758
+YW50aXR5 5759
+wq0= 5760
+d29yZHM= 5761
+IHJlYWRvbmx5 5762
+IGZsZXg= 5763
+cHJvdGVjdGVk 5764
+IEFueQ== 5765
+IGNoYXJhY3RlcnM= 5766
+ZW5jZWQ= 5767
+IEp1bHk= 5768
+aWxlcg== 5769
+Q2FyZA== 5770
+dXJhbmNl 5771
+IHJldg== 5772
+LmV2ZW50 5773
+YWx5 5774
+IHdvbmRlcg== 5775
+IFBvcnQ= 5776
+IGxlZ2Fs 5777
+cm9sZQ== 5778
+IHRlbg== 5779
+IGdvZXM= 5780
+TVA= 5781
+d2hpdGU= 5782
+KToNCg== 5783
+KSkNCg== 5784
+IHJlZmVyZW5jZQ== 5785
+IG1pcw== 5786
+IFByb2plY3Q= 5787
+aWNrcw== 5788
+PiY= 5789
+Q09O 5790
+IHJlcGw= 5791
+IHJlZ3VsYXI= 5792
+U3RvcmFnZQ== 5793
+cmFtZXdvcms= 5794
+IGdvYWw= 5795
+IHRvdWNo 5796
+LndpZGdldA== 5797
+IGJ1aWx0 5798
+ZGVz 5799
+UGFydA== 5800
+KHJl 5801
+IHdvcnRo 5802
+aGli 5803
+Z2FtZQ== 5804
+INCy 5805
+YWNpb24= 5806
+IFdoaXRl 5807
+KHR5cGU= 5808
+KGA= 5809
+IG5hdHVyYWw= 5810
+IGluag== 5811
+IGNhbGN1bA== 5812
+IEFwcmls 5813
+Lkxpc3Q= 5814
+IGFzc29jaWF0ZWQ= 5815
+CVN5c3RlbQ== 5816
+fn4= 5817
+PVs= 5818
+IHN0b3JhZ2U= 5819
+IGJ5dGVz 5820
+IHRyYXZlbA== 5821
+IHNvdQ== 5822
+IHBhc3NlZA== 5823
+IT0= 5824
+YXNjcmlwdA== 5825
+Lm9wZW4= 5826
+IGdyaWQ= 5827
+IGJ1cw== 5828
+IHJlY29nbg== 5829
+QWI= 5830
+IGhvbg== 5831
+IENlbnRlcg== 5832
+IHByZWM= 5833
+YnVpbGQ= 5834
+SFRNTA== 5835
+IFNhbg== 5836
+IGNvdW50cmllcw== 5837
+YWxlZA== 5838
+dG9rZW4= 5839
+a3Q= 5840
+IHF1YWw= 5841
+TGFzdA== 5842
+YWRvdw== 5843
+IG1hbnVmYWN0 5844
+aWRhZA== 5845
+amFuZ28= 5846
+TmV4dA== 5847
+eGY= 5848
+LmE= 5849
+IHBvcm5v 5850
+IFBN 5851
+ZXJ2ZQ== 5852
+aXRpbmc= 5853
+X3Ro 5854
+Y2k= 5855
+PU5vbmU= 5856
+Z3M= 5857
+IGxvZ2lu 5858
+YXRpdmVz 5859
+J10pOwo= 5860
+xIU= 5861
+IGlsbA== 5862
+SUE= 5863
+Y2hpbGRyZW4= 5864
+RE8= 5865
+IGxldmVscw== 5866
+IHt7 5867
+IGxvb2tz 5868
+ICIj 5869
+VG9TdHJpbmc= 5870
+IG5lY2Vzc2FyeQ== 5871
+ICAgCg== 5872
+Y2VsbA== 5873
+RW50cnk= 5874
+ICcj 5875
+IGV4dHJlbQ== 5876
+U2VsZWN0b3I= 5877
+IHBsYWNlaG9sZGVy 5878
+TG9hZA== 5879
+IHJlbGVhc2Vk 5880
+T1JF 5881
+RW51bWVy 5882
+IFRW 5883
+U0VU 5884
+aW5x 5885
+UHJlc3M= 5886
+IERlcGFydG1lbnQ= 5887
+IHByb3BlcnRpZXM= 5888
+IHJlc3BvbmQ= 5889
+U2VhcmNo 5890
+YWVs 5891
+IHJlcXU= 5892
+IEJvb2s= 5893
+Lwo= 5894
+KHN0 5895
+IGZpbmFuY2lhbA== 5896
+aWNrZXQ= 5897
+X2lucHV0 5898
+IHRocmVhdA== 5899
+KGlu 5900
+U3RyaXA= 5901
+7J0= 5902
+w6fDo28= 5903
+IGV2aWRlbmNl 5904
+KSk7 5905
+IEJybw== 5906
+IFtdOwo= 5907
+IG91 5908
+YnVm 5909
+U2NyaXB0 5910
+ZGF0 5911
+IHJ1bGU= 5912
+I2ltcG9ydA== 5913
+PSIv 5914
+U2VyaWFs 5915
+IHN0YXJ0aW5n 5916
+W2luZGV4 5917
+YWU= 5918
+IGNvbnRyaWI= 5919
+c2Vzc2lvbg== 5920
+X25ldw== 5921
+dXRhYmxl 5922
+b2Jlcg== 5923
+ICIuLw== 5924
+IGxvZ2dlcg== 5925
+IHJlY2VudGx5 5926
+IHJldHVybmVk 5927
+DQ0K 5928
+KSkpCg== 5929
+aXRpb25z 5930
+IHNlZWs= 5931
+IGNvbW11bmlj 5932
+ICIu 5933
+IHVzZXJuYW1l 5934
+RUNU 5935
+RFM= 5936
+IG90aGVyd2lzZQ== 5937
+IEdlcm1hbg== 5938
+LmF3 5939
+QWRhcHRlcg== 5940
+aXhlbA== 5941
+IHN5c3RlbXM= 5942
+IGRyb3A= 5943
+IHN0cnVjdHVyZQ== 5944
+ICQoIiM= 5945
+ZW5jaWVz 5946
+YW5uaW5n 5947
+IExpbms= 5948
+IFJlc3BvbnNl 5949
+IHN0cmk= 5950
+xbw= 5951
+IERC 5952
+5pc= 5953
+YW5kcm9pZA== 5954
+c3VibWl0 5955
+b3Rpb24= 5956
+KEA= 5957
+LnRlc3Q= 5958
+CgoKCgoKCgo= 5959
+XTsNCg== 5960
+IGRpcmVjdGx5 5961
+ICIl 5962
+cmlz 5963
+ZWx0YQ== 5964
+QUlM 5965
+KXsNCg== 5966
+bWluZQ== 5967
+ICAgICAgICAgICAgICAgICAgICAgICAgICA= 5968
+KGs= 5969
+Ym9u 5970
+YXNpYw== 5971
+cGl0ZQ== 5972
+X19f 5973
+TWF4 5974
+IGVycm9ycw== 5975
+IFdoaWxl 5976
+IGFyZ3VtZW50cw== 5977
+IGVuc3VyZQ== 5978
+UmlnaHQ= 5979
+LWJhc2Vk 5980
+V2Vi 5981
+IC09 5982
+IGludHJvZHU= 5983
+IEluc3Q= 5984
+IFdhc2g= 5985
+b3JkaW4= 5986
+am9pbg== 5987
+RGF0YWJhc2U= 5988
+IGdyYWQ= 5989
+IHVzdWFsbHk= 5990
+SVRF 5991
+UHJvcHM= 5992
+Pz4K 5993
+IEdv 5994
+QE92ZXJyaWRl 5995
+UkVG 5996
+IGlw 5997
+IEF1c3RyYWw= 5998
+IGlzdA== 5999
+Vmlld0J5SWQ= 6000
+IHNlcmlvdXM= 6001
+IGN1c3RvbWVy 6002
+LnByb3RvdHlwZQ== 6003
+b2Rv 6004
+Y29y 6005
+IGRvb3I= 6006
+IFdJVEhPVVQ= 6007
+IHBsYW50 6008
+IGJlZ2Fu 6009
+IGRpc3RhbmNl 6010
+KCkpLg== 6011
+IGNoYW5jZQ== 6012
+IG9yZA== 6013
+Y2FtZQ== 6014
+cHJhZ21h 6015
+IHByb3RlY3Q= 6016
+cmFnbWVudA== 6017
+IE5vZGU= 6018
+ZW5pbmc= 6019
+0Yc= 6020
+IHJvdXRl 6021
+IFNjaG9vbA== 6022
+aGk= 6023
+IG5laWdoYg== 6024
+QWZ0ZXI= 6025
+bGljaXQ= 6026
+IGNvbnRy 6027
+IHByaW1hcnk= 6028
+QUE= 6029
+LldyaXRlTGluZQ== 6030
+dXRpbHM= 6031
+IGJp 6032
+UmVk 6033
+LkxpbnE= 6034
+Lm9iamVjdA== 6035
+IGxlYWRlcnM= 6036
+dW5pdGllcw== 6037
+IGd1bg== 6038
+b250aA== 6039
+IERldg== 6040
+RklMRQ== 6041
+IGNvbW1lbnRz 6042
+X2xlbg== 6043
+YXJyb3c= 6044
+YW1vdW50 6045
+UmFuZ2U= 6046
+c2VydA== 6047
+R3JpZFZpZXc= 6048
+IHVwZGF0ZWQ= 6049
+IE1v 6050
+IGluZm9ybQ== 6051
+b2NpZXR5 6052
+YWxh 6053
+QWNjZXNz 6054
+IGhhYg== 6055
+IGNyZWF0 6056
+X2FyZw== 6057
+IEphbnVhcnk= 6058
+IERheQ== 6059
+IikNCg== 6060
+dXBsZQ== 6061
+ZG9jdW1lbnQ= 6062
+Z29yaXRo 6063
+bWVudQ== 6064
+IE92ZXI= 6065
+YmI= 6066
+LnRpdGxl 6067
+X291dA== 6068
+IGxlZA== 6069
+dXJp 6070
+ID8+PC8= 6071
+Z2w= 6072
+IGJhbms= 6073
+YXltZW50 6074
+CXByaW50Zg== 6075
+TUQ= 6076
+IHNhbXBsZQ== 6077
+IGhhbmRz 6078
+IFZlcnNpb24= 6079
+dWFyaW8= 6080
+IG9mZmVycw== 6081
+aXR5RW5naW5l 6082
+IHNoYXBl 6083
+IHNsZWVw 6084
+X3BvaW50 6085
+U2V0dGluZ3M= 6086
+IGFjaGll 6087
+IHNvbGQ= 6088
+b3Rh 6089
+LmJpbmQ= 6090
+QW0= 6091
+IHNhZmU= 6092
+U3RvcmU= 6093
+IHNoYXJlZA== 6094
+IHByaXY= 6095
+X1ZBTA== 6096
+IHNlbnM= 6097
+KXs= 6098
+IHJlbWVtYmVy 6099
+c2hhcmVk 6100
+ZWxlbWVudA== 6101
+IHNob290 6102
+VmVydA== 6103
+Y291dA== 6104
+IGVudg== 6105
+X2xhYmVs 6106
+ID4K 6107
+cnVu 6108
+IHNjZW5l 6109
+KGFycmF5 6110
+ZGV2aWNl 6111
+X3RpdGxl 6112
+YWdvbg== 6113
+XQ0K 6114
+YWJ5 6115
+IGJlY2FtZQ== 6116
+Ym9vbGVhbg== 6117
+IHBhcms= 6118
+IENvZGU= 6119
+dXBsb2Fk 6120
+cmlkYXk= 6121
+IFNlcHRlbWJlcg== 6122
+RmU= 6123
+IHNlbg== 6124
+Y2luZw== 6125
+Rkw= 6126
+Q29s 6127
+dXRz 6128
+X3BhZ2U= 6129
+aW5u 6130
+IGltcGxpZWQ= 6131
+YWxpbmc= 6132
+IHlvdXJzZWxm 6133
+LkNvdW50 6134
+Y29uZg== 6135
+IGF1ZA== 6136
+X2luaXQ= 6137
+Lik= 6138
+IHdyb3Rl 6139
+Tkc= 6140
+LkVycm9y 6141
+5Ls= 6142
+LmZvcg== 6143
+IGVxdWFs 6144
+IFJlcXVlc3Q= 6145
+IHNlcmlhbA== 6146
+IGFsbG93cw== 6147
+WFg= 6148
+IG1pZGRsZQ== 6149
+Y2hvcg== 6150
+w7g= 6151
+ZXJ2YWw= 6152
+LkNvbHVtbg== 6153
+cmVhZGluZw== 6154
+IGVzY29ydA== 6155
+IEF1Z3VzdA== 6156
+IHF1aWNrbHk= 6157
+IHdlYXA= 6158
+IENH 6159
+cm9wcmk= 6160
+aG8= 6161
+IGNvcA== 6162
+KHN0cnVjdA== 6163
+IEJpZw== 6164
+IHZz 6165
+IGZyZXF1 6166
+LlZhbHVl 6167
+IGFjdGlvbnM= 6168
+IHByb3Blcg== 6169
+IGlubg== 6170
+IG9iamVjdHM= 6171
+IG1hdHJpeA== 6172
+YXZhc2NyaXB0 6173
+IG9uZXM= 6174
+Lmdyb3Vw 6175
+IGdyZWVu 6176
+IHBhaW50 6177
+b29scw== 6178
+eWNs 6179
+ZW5jb2Rl 6180
+b2x0 6181
+Y29tbWVudA== 6182
+LmFwaQ== 6183
+RGly 6184
+IHVuZQ== 6185
+aXpvbnQ= 6186
+LnBvc2l0aW9u 6187
+IGRlc2lnbmVk 6188
+X3ZhbA== 6189
+YXZp 6190
+aXJpbmc= 6191
+dGFi 6192
+IGxheWVy 6193
+IHZpZXdz 6194
+IHJldmU= 6195
+cmFlbA== 6196
+IE9O 6197
+cmljcw== 6198
+bnA= 6199
+IGNvcmU= 6200
+KCkpOw0K 6201
+TWFpbg== 6202
+IGV4cGVydA== 6203
+CQkNCg== 6204
+X2Vu 6205
+IC8+ 6206
+dXR0ZXI= 6207
+SUFM 6208
+YWlscw== 6209
+IEtpbmc= 6210
+Ki8KCg== 6211
+IE1ldA== 6212
+X2VuZA== 6213
+YWRkcg== 6214
+b3Jh 6215
+IGly 6216
+TWlu 6217
+IHN1cnBy 6218
+IHJlcGU= 6219
+IGRpcmVjdG9yeQ== 6220
+UFVU 6221
+LVM= 6222
+IGVsZWN0aW9u 6223
+aGFwcw== 6224
+LnByZQ== 6225
+Y20= 6226
+VmFsdWVz 6227
+ICIK 6228
+Y29sdW1u 6229
+aXZpbA== 6230
+TG9naW4= 6231
+aW51ZQ== 6232
+IGJlYXV0aWZ1bA== 6233
+IHNlY3JldA== 6234
+KGV2ZW50 6235
+IGNoYXQ= 6236
+dW1z 6237
+IG9yaWdpbg== 6238
+IGVmZmVjdHM= 6239
+IG1hbmFnZW1lbnQ= 6240
+aWxsYQ== 6241
+dGs= 6242
+IHNldHRpbmc= 6243
+IENvdXI= 6244
+IG1hc3NhZ2U= 6245
+CWVuZA== 6246
+IGhhcHB5 6247
+IGZpbmlzaA== 6248
+IGNhbWVyYQ== 6249
+IFZlcg== 6250
+IERlbW9jcg== 6251
+IEhlcg== 6252
+KFE= 6253
+Y29ucw== 6254
+aXRh 6255
+ICcu 6256
+e30= 6257
+CUM= 6258
+IHN0dWZm 6259
+IDoK 6260
+IEFS 6261
+VGFzaw== 6262
+aGlkZGVu 6263
+ZXJvcw== 6264
+SUdO 6265
+YXRpbw== 6266
+IEhlYWx0aA== 6267
+b2x1dGU= 6268
+RW50ZXI= 6269
+Jz4= 6270
+IFR3aXR0ZXI= 6271
+IENvdW50eQ== 6272
+c2NyaWJl 6273
+ID0+Cg== 6274
+IGh5 6275
+Zml0 6276
+IG1pbGl0YXJ5 6277
+IHNhbGU= 6278
+cmVxdWlyZWQ= 6279
+bm9u 6280
+Ym9vdHN0cmFw 6281
+aG9sZA== 6282
+cmlt 6283
+LW9sZA== 6284
+IERvd24= 6285
+IG1lbnRpb24= 6286
+Y29udGFjdA== 6287
+X2dyb3Vw 6288
+b2RheQ== 6289
+IHRvd24= 6290
+IHNvbHV0aW9u 6291
+dWF0ZQ== 6292
+ZWxsaW5n 6293
+XS0+ 6294
+b3Rlcw== 6295
+ZW50YWw= 6296
+b21lbg== 6297
+b3NwaXRhbA== 6298
+IFN1cA== 6299
+X0VO 6300
+IHNsb3c= 6301
+U0VTU0lPTg== 6302
+IGJsdWU= 6303
+YWdv 6304
+IGxpdmVz 6305
+IF4= 6306
+LnVu 6307
+aW5zdA== 6308
+ZW5nZQ== 6309
+IGN1c3RvbWVycw== 6310
+IGNhc3Q= 6311
+dWRnZXQ= 6312
+77yB 6313
+aWNlbnM= 6314
+IGRldGVybWlu 6315
+U2VsZWN0ZWQ= 6316
+X3Bs 6317
+dWV1ZQ== 6318
+IGRhcms= 6319
+Ly8KCg== 6320
+c2k= 6321
+dGhlcm4= 6322
+IEphcGFu 6323
+L3c= 6324
+UFU= 6325
+IEVhc3Q= 6326
+b3ZpZQ== 6327
+IHBhY2thZ2U= 6328
+IG5vcg== 6329
+IGFwaQ== 6330
+Ym90 6331
+Il07Cg== 6332
+X3Bvc3Q= 6333
+dWxhdGU= 6334
+IGNsdWI= 6335
+JykpOwo= 6336
+IGxvb3A= 6337
+UElP 6338
+aW9uZQ== 6339
+c2hvdA== 6340
+SW5pdGlhbA== 6341
+IHBsYXllZA== 6342
+cmVnaXN0ZXI= 6343
+cm91Z2h0 6344
+X21heA== 6345
+YWNlbWVudA== 6346
+bWF0Y2g= 6347
+cmFwaGljcw== 6348
+QVNU 6349
+IGV4aXN0aW5n 6350
+IGNvbXBsZXg= 6351
+REE= 6352
+LkNo 6353
+LmNvbW1vbg== 6354
+bW8= 6355
+ICcuLi8uLi8= 6356
+aXRv 6357
+IGFuYWx5c2lz 6358
+IGRlbGl2ZXI= 6359
+ICAgICAgICAgICAgICAgIAo= 6360
+aWR4 6361
+w6A= 6362
+b25nbw== 6363
+IEVuZ2xpc2g= 6364
+PCEtLQ== 6365
+IGNvbXB1dGVy 6366
+RU5TRQ== 6367
+IHBhcw== 6368
+IHJhaXM= 6369
+SGFzaA== 6370
+IG1vYmlsZQ== 6371
+IG93bmVy 6372
+RklH 6373
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 6374
+dGhlcw== 6375
+IGF0dHI= 6376
+d2Q= 6377
+LnRpbWU= 6378
+YXdu 6379
+IHRyZWF0bWVudA== 6380
+IEFj 6381
+LlZpZXc= 6382
+aW1wbA== 6383
+bW9yZQ== 6384
+cGFzcw== 6385
+IGhh 6386
+LmZyb20= 6387
+IGxlYWRpbmc= 6388
+RkZGRg== 6389
+KGVycm9y 6390
+LnVp 6391
+YXRhcg== 6392
+YWRlcnM= 6393
+ZGF0ZXM= 6394
+IHp1 6395
+IGZsb3c= 6396
+VGFyZ2V0 6397
+IGludm9sdmVk 6398
+IGlv 6399
+cGFyc2U= 6400
+JF8= 6401
+aGVzdA== 6402
+LmludA== 6403
+LWl0ZW0= 6404
+YXN5 6405
+U3A= 6406
+IHNoaWZ0 6407
+TlQ= 6408
+IHRm 6409
+X1RS 6410
+LndlYg== 6411
+Q1M= 6412
+IH0p 6413
+IGV5ZXM= 6414
+X3o= 6415
+Jyk7DQo= 6416
+aWZvcm4= 6417
+IHtA 6418
+IG5pY2U= 6419
+Lmxpc3Q= 6420
+ICAgIA0K 6421
+IGZsb29y 6422
+IHJlZGlyZWN0 6423
+IFVL 6424
+KFsn 6425
+IHdpc2g= 6426
+IGNhcHQ= 6427
+bGVnYWw= 6428
+IElP 6429
+IHN0YWdl 6430
+LlN0cmluZw== 6431
+IEFmcg== 6432
+aWdlbg== 6433
+IFNI 6434
+RGVsZXRl 6435
+ZWxscw== 6436
+IHNvbGlk 6437
+IG1lZXRpbmc= 6438
+IHdvcmtlZA== 6439
+IGVkaXRvcg== 6440
+aW55 6441
+0Lw= 6442
+X3JlYWQ= 6443
+Lklk 6444
+ZWZm 6445
+T2Zmc2V0 6446
+Y2hh 6447
+VVNFUg== 6448
+CQkgICA= 6449
+aXBwZWQ= 6450
+IGRpY3Q= 6451
+IFJ1bg== 6452
+LmhwcA== 6453
+IGFuZw== 6454
+eG1s 6455
+aW1wbGU= 6456
+IG1lZGljYWw= 6457
+X3Rva2Vu 6458
+Y29ubmVjdA== 6459
+IGhvdXI= 6460
+IGNvbnRyb2xsZXI= 6461
+X21lc3NhZ2U= 6462
+VUlE 6463
+R3I= 6464
+YW5kZWQ= 6465
+X0NI 6466
+IGJvb2tz 6467
+IHNwZWFr 6468
+YW1pbmc= 6469
+IG1vdW50 6470
+UmVjb3Jk 6471
+CXN0cnVjdA== 6472
+LldlYg== 6473
+b25kb24= 6474
+IC8vCg== 6475
+IGZlbHQ= 6476
+LkF1dG8= 6477
+aWRnZQ== 6478
+X3Bvcw== 6479
+UFI= 6480
+IG1vZGVybg== 6481
+Q29sbGVjdGlvbg== 6482
+X21zZw== 6483
+Q0Q= 6484
+IExv 6485
+IHNlY29uZHM= 6486
+aWJseQ== 6487
+LmVxdWFscw== 6488
+IGludGVybmF0aW9uYWw= 6489
+I3ByYWdtYQ== 6490
+b290aA== 6491
+V3JpdGVy 6492
+aWF0ZQ== 6493
+IGNlbGU= 6494
+IEJpdA== 6495
+aXZv 6496
+aXZlcnk= 6497
+cmQ= 6498
+SEVDSw== 6499
+IGNhY2hl 6500
+LmNvdW50 6501
+IHJvbGw= 6502
+LlJlYWQ= 6503
+UkVE 6504
+IHNldHVw 6505
+aXpvbnRhbA== 6506
+bW9kZWxz 6507
+YXJndg== 6508
+IGNvbnNpZGVyZWQ= 6509
+PSIuLi8= 6510
+c2V0dGluZ3M= 6511
+IFJlbA== 6512
+IGdyb3d0aA== 6513
+IG1peA== 6514
+IFdhc2hpbmd0b24= 6515
+IHBsdA== 6516
+IElN 6517
+4bo= 6518
+IHR1cm5lZA== 6519
+IERhdGVUaW1l 6520
+IFdlZA== 6521
+KHVybA== 6522
+ICIt 6523
+IGxldHRlcg== 6524
+QXN5bmM= 6525
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 6526
+IE9jdG9iZXI= 6527
+X2xpbmU= 6528
+IGF0dGVudGlvbg== 6529
+IGNvbGxlY3Q= 6530
+IEhhc2g= 6531
+IGltYWc= 6532
+VHJlZQ== 6533
+IHNpdHVhdGlvbg== 6534
+ZXR0ZQ== 6535
+X25v 6536
+SVZF 6537
+IHZvbg== 6538
+LnRhcmdldA== 6539
+IGtub3dsZWRnZQ== 6540
+IGRyaXZl 6541
+LnBvc3Q= 6542
+IGJsb29k 6543
+IGNpdA== 6544
+cHJpbWFyeQ== 6545
+IGNvbmZpZ3VyYXRpb24= 6546
+dGVl 6547
+IHBob3Rv 6548
+aXNvZGU= 6549
+VHJhY2U= 6550
+IGdhdmU= 6551
+IHNob3Q= 6552
+IEFpcg== 6553
+IG1vdGhlcg== 6554
+cHJpY2U= 6555
+IG1vcm5pbmc= 6556
+KSl7Cg== 6557
+LXg= 6558
+IHRyYWRl 6559
+IGRlc2M= 6560
+ICYmCg== 6561
+IHBhcmVudHM= 6562
+QXBp 6563
+5Yg= 6564
+dGVk 6565
+d2Vy 6566
+IOY= 6567
+IHN5 6568
+IEtl 6569
+UGFyc2Vy 6570
+5YU= 6571
+YW5jeQ== 6572
+IHBpZWNl 6573
+aWZvcm5pYQ== 6574
+dG9TdHJpbmc= 6575
+cmFu 6576
+aWRpbmc= 6577
+UFRJT04= 6578
+Y29tZXM= 6579
+L2xpYw== 6580
+LmNsaWVudA== 6581
+RWw= 6582
+TG9uZw== 6583
+IHByb2Zlc3Npb25hbA== 6584
+cnVwdA== 6585
+dmE= 6586
+IGNvbXBsZXRlbHk= 6587
+IHByYWN0aWNl 6588
+IHNlbGVjdGlvbg== 6589
+UmVt 6590
+aW5p 6591
+IGNhbQ== 6592
+UkVF 6593
+IHNpdGVz 6594
+cGE= 6595
+QVRVUw== 6596
+0YHRgg== 6597
+YXJyYW50 6598
+Kig= 6599
+X0tFWQ== 6600
+IEJ1dHRvbg== 6601
+IEZyaWRheQ== 6602
+c2VxdQ== 6603
+IHJlYWRlcg== 6604
+IG1lc3NhZ2Vz 6605
+6K8= 6606
+IGJ1Zg== 6607
+S2U= 6608
+IG5vdg== 6609
+SFA= 6610
+TXNn 6611
+YWxpZ24= 6612
+YXJpbHk= 6613
+ICcs 6614
+X3dpdGg= 6615
+IGRhcw== 6616
+IGhlYXJk 6617
+YXRvbWlj 6618
+cmlhbA== 6619
+KVs= 6620
+IGRpc2U= 6621
+QGVuZA== 6622
+IGdvbGQ= 6623
+IGZhaXI= 6624
+IHNhbGVz 6625
+LkJ1dHRvbg== 6626
+c3RyaWN0 6627
+c2F2ZQ== 6628
+IG1lYXN1cmU= 6629
+ICIr 6630
+ZWNhdXNl 6631
+Vmlld0NvbnRyb2xsZXI= 6632
+IFRhYmxl 6633
+LnBhcmFt 6634
+IGRlY2lkZWQ= 6635
+KCgo 6636
+SU5GTw== 6637
+IG9wcG9ydHVuaXR5 6638
+VGU= 6639
+SUNFTlNF 6640
+Y2NvcmRpbmc= 6641
+a2k= 6642
+IFVO 6643
+IGNvbnRhaW4= 6644
+IG1hbmFnZXI= 6645
+IHBhaW4= 6646
+IEZpcmU= 6647
+cm9tZQ== 6648
+IHBsYW5z 6649
+Rm91bmQ= 6650
+bGF5 6651
+IERlY2VtYmVy 6652
+IGluZmx1 6653
+w7o= 6654
+cmVuY2g= 6655
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 6656
+YXppbmc= 6657
+YnJpZWY= 6658
+Y2FsbA== 6659
+d29vZA== 6660
+IGxvYWRlZA== 6661
+IGdyYW5k 6662
+L2Y= 6663
+aW1w 6664
+X1U= 6665
+U1RS 6666
+4oCi 6667
+IGNyZWRpdA== 6668
+LkNvbG9y 6669
+b3JnZQ== 6670
+UVVFU1Q= 6671
+IGRpZmZlcmVuY2U= 6672
+IFBD 6673
+d2FyZ3M= 6674
+IHB1Yg== 6675
+dW5kYXk= 6676
+IGZyYQ== 6677
+Lm1heA== 6678
+IHRyaWVk 6679
+YW5uZWxz 6680
+c2VuZA== 6681
+IHJlcG9ydHM= 6682
+IGFkdWx0 6683
+5Lo= 6684
+IGNvbnNpc3Q= 6685
+IFN0cmVldA== 6686
+IFByb2dyYW0= 6687
+U1FM 6688
+TWF0cml4 6689
+b3VuY2ls 6690
+LUE= 6691
+CXc= 6692
+IHdob3Nl 6693
+IHJlbGln 6694
+IFNleA== 6695
+IGdpdmVz 6696
+bm9uZQ== 6697
+Lm1lc3NhZ2U= 6698
+KEc= 6699
+LmF3dA== 6700
+LXJpZ2h0 6701
+IE5vdmVtYmVy 6702
+ZWxsaWc= 6703
+dXRpdmU= 6704
+xIM= 6705
+b3Zlcm4= 6706
+IGVhc2lseQ== 6707
+IGlkZWFz 6708
+INC9 6709
+L2Nzcw== 6710
+bHlpbmc= 6711
+ZWxsZQ== 6712
+Q2Fu 6713
+X2NvbG9y 6714
+0L7Qsg== 6715
+IHBhaXI= 6716
+bmd0aA== 6717
+IHNwbGl0 6718
+ZHJvcA== 6719
+YXJ0eQ== 6720
+b25h 6721
+IGNhcGl0YWw= 6722
+IGhlYXI= 6723
+IGV4aXN0cw== 6724
+CWxvZw== 6725
+ZW1v 6726
+UnVu 6727
+b2k= 6728
+IHBhcnNlcg== 6729
+IE1ldGhvZA== 6730
+IGVkdWNhdGlvbg== 6731
+W2s= 6732
+IGxpYnJhcnk= 6733
+PiI7Cg== 6734
+X1VO 6735
+CXN0ZA== 6736
+b2RlZA== 6737
+IGNhbGxz 6738
+aGVyZQ== 6739
+UmVs 6740
+IGJyYW5k 6741
+YmFja2dyb3VuZA== 6742
+Z2E= 6743
+X2FkZHJlc3M= 6744
+X3BhcmFtcw== 6745
+Q2F0ZWdvcnk= 6746
+IEluZGlh 6747
+X2V2ZW50 6748
+IGluZw== 6749
+UmVuZGVy 6750
+LmNs 6751
+dW1weQ== 6752
+IHBldA== 6753
+RkM= 6754
+IEFudA== 6755
+RXh0 6756
+IGNoYXJnZQ== 6757
+ZW5lZA== 6758
+Z3JhZA== 6759
+RU8= 6760
+IGRlcGVuZA== 6761
+IC4KCg== 6762
+ZnJhbWU= 6763
+IGRm 6764
+IGh1Z2U= 6765
+IFBBUlQ= 6766
+ZWRz 6767
+Ozs= 6768
+IEFN 6769
+IGJhc2lj 6770
+IExldA== 6771
+bGljaA== 6772
+IGFybQ== 6773
+IHN0YXI= 6774
+IGZlZGVyYWw= 6775
+V29yaw== 6776
+IGNhcnJ5 6777
+IElzcmFlbA== 6778
+KG9iag== 6779
+PXt7 6780
+IHNhdmVk 6781
+IHN5bg== 6782
+IGNvbnN0YW50 6783
+VkVOVA== 6784
+IHBvc2l0aXZl 6785
+IGNvbmR1Y3Q= 6786
+IHNraW4= 6787
+IGVhcmxpZXI= 6788
+IGxheW91dA== 6789
+IElQ 6790
+T1VS 6791
+IHRpbQ== 6792
+c3R5bGVzaGVldA== 6793
+X2Ns 6794
+IENhcmQ= 6795
+Kyspewo= 6796
+IHRlbXBlcg== 6797
+IERhdmlk 6798
+CXRyeQ== 6799
+LmRhcnQ= 6800
+IHdhbnRz 6801
+IHBpY3R1cmU= 6802
+IHZpZGVvcw== 6803
+IENvbW0= 6804
+aXNpb25z 6805
+X01BWA== 6806
+TWFwcGluZw== 6807
+LWNvbnRlbnQ= 6808
+IEVhcg== 6809
+LWRl 6810
+IHByZW0= 6811
+YnJ1YXJ5 6812
+IGNvbXBvbmVudHM= 6813
+IHRocm91Z2hvdXQ= 6814
+IHB1bGw= 6815
+IHBhZ2Vz 6816
+ZW50ZQ== 6817
+cmVzcG9uZA== 6818
+IGdhcw== 6819
+Y3JpcHRvcg== 6820
+IGVkZ2U= 6821
+IGJvdW5k 6822
+QUNU 6823
+KioqKioq 6824
+IGNyZWF0aW5n 6825
+IENI 6826
+IG51bGxwdHI= 6827
+QnI= 6828
+Kyc= 6829
+LmNv 6830
+Pjo6 6831
+IGxlYXJuaW5n 6832
+Lkxlbmd0aA== 6833
+X1NI 6834
+IHBhdGllbnRz 6835
+QUlO 6836
+IGtpZHM= 6837
+IGNvbWZvcnQ= 6838
+IHNob3du 6839
+dWdpbnM= 6840
+IEJhY2s= 6841
+ZWxsYQ== 6842
+X0NM 6843
+IGxhdA== 6844
+IGRpc3BhdGNo 6845
+IGNsYXNzZXM= 6846
+LmF0 6847
+LmJlZ2lu 6848
+IHN1Y2Nlc3NmdWw= 6849
+YmFu 6850
+IG9idGFpbg== 6851
+IFNs 6852
+IGxhY2s= 6853
+aXRlcmF0b3I= 6854
+VGhyZWFk 6855
+KHNpemU= 6856
+IG5vbmU= 6857
+Lmhhcw== 6858
+X1g= 6859
+c29ydA== 6860
+bmFw 6861
+cGV0 6862
+Ymlu 6863
+IENhbmFkYQ== 6864
+VGhleQ== 6865
+IGRhbnM= 6866
+IE1hdA== 6867
+PHRk 6868
+IGhhaXI= 6869
+ICcnLAo= 6870
+IGN1 6871
+IGxhd3M= 6872
+bGV0ZWQ= 6873
+cGVk 6874
+IHBvdw== 6875
+IGtuZXc= 6876
+X0NPTQ== 6877
+Xyw= 6878
+IE1hZw== 6879
+aWRlbnRz 6880
+KHJlcQ== 6881
+ICks 6882
+LWNlbnRlcg== 6883
+IHdpZGU= 6884
+IEF1dGhvcg== 6885
+c3RhbnRz 6886
+IGpvYnM= 6887
+IG1hdGg= 6888
+ZXRpbWVz 6889
+Qm9vbGVhbg== 6890
+IHNjb3Bl 6891
+X2lz 6892
+IG1lYXM= 6893
+IGtleXM= 6894
+ZWxheQ== 6895
+IGV4YWN0bHk= 6896
+Jz0+Jw== 6897
+IFBhdWw= 6898
+bWFz 6899
+CXByaW50 6900
+KGxlbg== 6901
+ZmQ= 6902
+ICk7 6903
+LkV2ZW50 6904
+cWxp 6905
+aXJpdA== 6906
+aWVsZHM= 6907
+b21hbg== 6908
+IFRvcA== 6909
+IHZvdGU= 6910
+IG1hc2s= 6911
+IHRoZW1l 6912
+LQo= 6913
+IHByb3Bz 6914
+IGZpbmU= 6915
+IHdyaXRlcg== 6916
+X29mZnNldA== 6917
+Y2Fy 6918
+IGFsdGVybg== 6919
+IGNvcHlyaWdodA== 6920
+IGRlc3Ryb3k= 6921
+cHBlcg== 6922
+IGdlbmVyYXRl 6923
+cHBlZA== 6924
+4oCZZA== 6925
+ICAgICAgCg== 6926
+bWFrZQ== 6927
+IFNob3c= 6928
+IGJyb3dzZXI= 6929
+IGZhdm9yaXRl 6930
+IGNhcmVlcg== 6931
+IGhhcHBlbmVk 6932
+KGNoYXI= 6933
+IHJlY29tbWVuZA== 6934
+IGxpdGVy 6935
+LmZpbHRlcg== 6936
+Z3JhZGU= 6937
+IMKj 6938
+UGhvbmU= 6939
+b21z 6940
+IG5hbWVk 6941
+LWxhYmVs 6942
+aXBv 6943
+IE90aGVy 6944
+IHBhbmVs 6945
+IHJvY2s= 6946
+U2NhbGU= 6947
+CWFzc2VydA== 6948
+0LQ= 6949
+IHRydXN0 6950
+ZnJvbnQ= 6951
+IGRlbW9u 6952
+QXI= 6953
+TmV0 6954
+IGVjb25vbWlj 6955
+Zm9vdGVy 6956
+IHJhY2U= 6957
+KG5vZGU= 6958
+IE9wdGlvbg== 6959
+c3BsaXQ= 6960
+IHBoeXNpY2Fs 6961
+aWZlc3Q= 6962
+IHJlbW92ZWQ= 6963
+Lmh0dHA= 6964
+KSksCg== 6965
+IGxvb2tlZA== 6966
+Jzs= 6967
+ZGluZw== 6968
+Z2VzdA== 6969
+YXR1cmRheQ== 6970
+L2xpY2Vuc2Vz 6971
+UHJpY2U= 6972
+IGRybw== 6973
+IHRvd2FyZHM= 6974
+IHVucw== 6975
+IENM 6976
+CXN0YXRpYw== 6977
+IHJvd3M= 6978
+IGRlZmluZQ== 6979
+LnJlcGxhY2U= 6980
+IGZhdGhlcg== 6981
+IERlc2lnbg== 6982
+YXNzaWdu 6983
+bXV0 6984
+RGV2aWNl 6985
+RGlk 6986
+JykpCg== 6987
+b21ldHJ5 6988
+YXlsb2Fk 6989
+IGhpc3Rvcg== 6990
+IFBhcmFt 6991
+IEJvb2xlYW4= 6992
+IG5hdHVyZQ== 6993
+IGpz 6994
+IG5hdGlvbg== 6995
+aWg= 6996
+IGRpc2NvdmVy 6997
+c2Vt 6998
+SGFuZGxl 6999
+CXI= 7000
+IFRlY2hu 7001
+IHdhbGw= 7002
+eyQ= 7003
+QHByb3BlcnR5 7004
+ICIuLi8= 7005
+IGV4YW0= 7006
+LmRyYXc= 7007
+b3BwaW5n 7008
+IG5lYXJseQ== 7009
+IGNvb2w= 7010
+IGluZGVwZW5k 7011
+UkVT 7012
+IGhhbmRsZXI= 7013
+IE1vbmRheQ== 7014
+IHN1bg== 7015
+U3R5bGVz 7016
+b3VzbHk= 7017
+IAk= 7018
+dmVzdA== 7019
+RGlzcGxheQ== 7020
+KHk= 7021
+YXRpY2FsbHk= 7022
+IHByZWRpY3Q= 7023
+eWluZw== 7024
+IHNvbWV0aW1lcw== 7025
+Il0K 7026
+IGRyaW5r 7027
+IGJ1bA== 7028
+aWZpY2F0aW9ucw== 7029
+Lmluc2VydA== 7030
+LnJlZw== 7031
+IHRlc3Rz 7032
+QWxpZ25tZW50 7033
+IGFsbGVn 7034
+IGF0dHJpYnV0ZQ== 7035
+IE5vdGU= 7036
+IG15c2VsZg== 7037
+YXJ0cw== 7038
+Tm93 7039
+IGludGVyZXN0aW5n 7040
+bGllbnRz 7041
+IHBvcHVsYXRpb24= 7042
+IENhbGlmb3JuaWE= 7043
+Ikk= 7044
+5bk= 7045
+IGdyZWF0ZXI= 7046
+dWVzZGF5 7047
+IHRob3Vz 7048
+IGNvc3Rz 7049
+IGxhdW5jaA== 7050
+XEh0dHA= 7051
+a2Vy 7052
+YmFuZA== 7053
+IFBsYXk= 7054
+IGJhbmQ= 7055
+LnNoYXBl 7056
+ZXNvbWU= 7057
+YXJ0aWNsZQ== 7058
+LnJm 7059
+IHdlcg== 7060
+w6Fz 7061
+ZW1iZXJz 7062
+dXNy 7063
+QkE= 7064
+aWNhbg== 7065
+ZXR0 7066
+dmFsaWRhdGU= 7067
+dWx0aQ== 7068
+IGltbWVkaWF0ZWx5 7069
+emVy 7070
+IGZpZ3VyZQ== 7071
+b2Vz 7072
+ZWxsZXI= 7073
+aXJjbGU= 7074
+IFNpZ24= 7075
+LmRi 7076
+IHJhbms= 7077
+Qnl0ZXM= 7078
+IHByb2plY3Rz 7079
+X3JlYw== 7080
+VUxBUg== 7081
+QVBJ 7082
+IExpbmU= 7083
+UG9ydA== 7084
+IHBvbGw= 7085
+IGdpdmluZw== 7086
+aWRlbmNl 7087
+LS0K 7088
+IHBsb3Q= 7089
+aWNpYWw= 7090
+IHdhcnJhbnQ= 7091
+SVRJT04= 7092
+IERvdWJsZQ== 7093
+IGJpbGxpb24= 7094
+Z29yaXRobQ== 7095
+IGVxdWlwbWVudA== 7096
+REFURQ== 7097
+IEAi 7098
+RUU= 7099
+IHBsZQ== 7100
+aWF0aW9u 7101
+IGhlYWRlcnM= 7102
+IHByb2NlZA== 7103
+LkNvbXBvbmVudE1vZGVs 7104
+IE9iYW1h 7105
+IHBh 7106
+IEJlc3Q= 7107
+aW1hdGVseQ== 7108
+LmdldFN0cmluZw== 7109
+Llw= 7110
+bXBsb3k= 7111
+IHJhdw== 7112
+X2Jsb2Nr 7113
+dW5kcmVk 7114
+In0sCg== 7115
+Lkdyb3VwTGF5b3V0 7116
+IGJyb3VnaHQ= 7117
+TlNTdHJpbmc= 7118
+dGhyb3c= 7119
+Y3JlYXRlZA== 7120
+Lk5ldw== 7121
+X3ZpZXc= 7122
+Q1A= 7123
+ZXBz 7124
+T3A= 7125
+IGdyYXRpcw== 7126
+ICci 7127
+IGludGVydmlldw== 7128
+IiIiCg== 7129
+IHBhcnRpYWw= 7130
+IGFyaWE= 7131
+YmluZw== 7132
+QXV0aG9y 7133
+Qm9vaw== 7134
+IFBhdA== 7135
+dW1hbg== 7136
+VXNlcnM= 7137
+cGx1cw== 7138
+IERpcmVjdA== 7139
+dmVudWU= 7140
+YWxwaGE= 7141
+VUNDRVNT 7142
+IENhbGw= 7143
+ICk7DQo= 7144
+aW1hdGVk 7145
+IHJlbWFpbg== 7146
+IGFudGk= 7147
+IExvbmRvbg== 7148
+IHNhZmV0eQ== 7149
+UE9TRQ== 7150
+b2xlcw== 7151
+Y29udHJvbGxlcg== 7152
+Qnl0ZQ== 7153
+IENvdXJ0 7154
+IFBoaWw= 7155
+IEFzc29jaQ== 7156
+ZW5h 7157
+5ZA= 7158
+X1NUUg== 7159
+Y29pbg== 7160
+cmVzaG9sZA== 7161
+IGJhdGNo 7162
+X0NsaWNr 7163
+ZW50aWNhdGlvbg== 7164
+Pic7Cg== 7165
+ZW50eQ== 7166
+IGJlZ2lubmluZw== 7167
+IHplcm8= 7168
+IENvbnZlcnQ= 7169
+IHRlcnI= 7170
+IHBhaWQ= 7171
+IGluY3JlYXNlZA== 7172
+Y2F0Y2g= 7173
+LXNpemU= 7174
+YWN0aXZpdHk= 7175
+ZXF1YWxz 7176
+IHF1ZXVl 7177
+ICIn 7178
+IEludGVybmF0aW9uYWw= 7179
+IGbDvHI= 7180
+dXJzZGF5 7181
+IHNjaWVudA== 7182
+YWxsb3c= 7183
+YXhpcw== 7184
+IGFwcHJvcHJp 7185
+ZWRnZQ== 7186
+IGlkeA== 7187
+U3VjY2Vzcw== 7188
+ZW50aWZpZXI= 7189
+Olw= 7190
+eGlz 7191
+IG1heGltdW0= 7192
+YXJrcw== 7193
+IGJpcnRo 7194
+KGluZGV4 7195
+IG1heWJl 7196
+LnB5 7197
+ZmlsZXM= 7198
+IGxpbWl0ZWQ= 7199
+X2NoZWNr 7200
+bG9vaw== 7201
+cGxpZXM= 7202
+IG1vdmVtZW50 7203
+J10u 7204
+IGJyb2Fk 7205
+IEJF 7206
+IFVuaXR5RW5naW5l 7207
+LmNwcA== 7208
+IEV2ZXJ5 7209
+QWRtaW4= 7210
+IGZhbnM= 7211
+cGFyZWQ= 7212
+CiAgICAK 7213
+IGZvcmVpZ24= 7214
+IHBhbg== 7215
+IHRvdXI= 7216
+IE9yZGVy 7217
+IG1vdmluZw== 7218
+IGF1Zg== 7219
+Q2FsbA== 7220
+Y2I= 7221
+xZ8= 7222
+dmVudG9yeQ== 7223
+IFNxbA== 7224
+IGZ1bGx5 7225
+Q2xpY2tMaXN0ZW5lcg== 7226
+V09SRA== 7227
+IGFubm91bmNlZA== 7228
+KQ0KDQo= 7229
+IGFncmVlZA== 7230
+cmll 7231
+IGVhcm4= 7232
+X2xpbms= 7233
+LmFycmF5 7234
+KHRleHQ= 7235
+IG1hdGVyaWFscw== 7236
+LHA= 7237
+ZmZmZg== 7238
+dmc= 7239
+IMKp 7240
+IHVubGVzcw== 7241
+YWpheA== 7242
+TE9H 7243
+IHNleHVhbA== 7244
+IFwi 7245
+LXRpbWU= 7246
+IGNvYWNo 7247
+IHN1cHBvcnRlZA== 7248
+IHBob3Rvcw== 7249
+aWZvcm0= 7250
+LkNyZWF0ZQ== 7251
+KV0= 7252
+cmllcg== 7253
+IGRpYWxvZw== 7254
+YXZlcg== 7255
+aWdl 7256
+KSs= 7257
+X2lkeA== 7258
+Ols= 7259
+X21pbg== 7260
+IENvbmc= 7261
+IHByZXNzdXJl 7262
+IHRlYW1z 7263
+U2lnbg== 7264
+YmVnaW4= 7265
+cmlhbg== 7266
+TkVTUw== 7267
+TFM= 7268
+IGltcHJvdmU= 7269
+IFN1bmRheQ== 7270
+IGRlZmluaXRpb24= 7271
+aWdlcg== 7272
+cm9sbGVycw== 7273
+IHRoaW5raW5n 7274
+VGVtcGxhdGU= 7275
+LUY= 7276
+IGVtZXJn 7277
+cGxhdGVz 7278
+IFVTQQ== 7279
+LnNldFN0YXRl 7280
+IEFsc28= 7281
+cmV2 7282
+IGVuYWJsZQ== 7283
+IENP 7284
+UEVDVA== 7285
+IGNvbmNlcHQ= 7286
+KS0= 7287
+IOKAog== 7288
+IHNldHM= 7289
+IG1lYW5pbmc= 7290
+ZW1vbg== 7291
+IENvbnM= 7292
+Y21w 7293
+ZWRlcg== 7294
+YW5uZWQ= 7295
+aWNlbnNlZA== 7296
+IFN1cGVy 7297
+IGRhaWx5 7298
+IG11bHRp 7299
+X3U= 7300
+IGNoYWxsZW5n 7301
+X21vZGU= 7302
+IFByb21pc2U= 7303
+IHN0cmljdA== 7304
+am8= 7305
+aW50b24= 7306
+KGxpc3Q= 7307
+T25seQ== 7308
+Pns= 7309
+IHZlaGljbGU= 7310
+7ZU= 7311
+IFBsYXllcg== 7312
+IERlbA== 7313
+IHBvb2w= 7314
+LnVybA== 7315
+bmVzZGF5 7316
+KCk7DQoNCg== 7317
+ICIpOwo= 7318
+TG9jYWw= 7319
+LiIpOwo= 7320
+IG9yZ2FuaXphdGlvbg== 7321
+cmVuZGVy 7322
+IEFwcGxpY2F0aW9u 7323
+IHN1bW1lcg== 7324
+ZXhwZWN0ZWQ= 7325
+TkE= 7326
+IHJhcA== 7327
+X29iag== 7328
+IHN1cmZhY2U= 7329
+IFBVUg== 7330
+IH0sCgo= 7331
+IHZhcmlhYmxlcw== 7332
+KG1lc3NhZ2U= 7333
+IG9waW4= 7334
+LmJhY2s= 7335
+0LDQvQ== 7336
+IHdvcmtlcnM= 7337
+dm0= 7338
+Q28= 7339
+dWdodGVy 7340
+IG1hc3Rlcg== 7341
+ICIiLA== 7342
+IHN0b3JpZXM= 7343
+LlVzZXI= 7344
+IGNlbGVicg== 7345
+aW5lc2U= 7346
+QlM= 7347
+IENvbW1hbmQ= 7348
+YXNoYm9hcmQ= 7349
+IG9n 7350
+a2c= 7351
+LmltYWdl 7352
+LnN0eWxl 7353
+IHN0ZXBz 7354
+IEJlbg== 7355
+KGFyZ3M= 7356
+IFBlcnNvbg== 7357
+LHk= 7358
+IG9mZmljaWFscw== 7359
+fAo= 7360
+IHNraWxscw== 7361
+dmM= 7362
+IGJ1aWxkZXI= 7363
+IGdhcg== 7364
+QWNjb3VudA== 7365
+IEF1dGg= 7366
+55Q= 7367
+J10pCg== 7368
+IEFU 7369
+bm4= 7370
+LkludA== 7371
+U1NFUlQ= 7372
+IGVmZmVjdGl2ZQ== 7373
+TEVURQ== 7374
+IHRvb2xz 7375
+QVJE 7376
+IGRpZ2l0YWw= 7377
+RG91Ymxl 7378
+IEZpbmQ= 7379
+UkM= 7380
+IGlubGluZQ== 7381
+L3I= 7382
+QVJBTQ== 7383
+QVNL 7384
+IGludGVudA== 7385
+YWlnaHQ= 7386
+X2FkZHI= 7387
+IHJlcXVlc3Rz 7388
+LmZpcnN0 7389
+IGRlYnVn 7390
+IHNwZW50 7391
+KCkpKTsK 7392
+xZs= 7393
+IHByaW5jaXA= 7394
+TG9nZ2Vy 7395
+Y2x1ZGVz 7396
+LnVzZQ== 7397
+IHN1cnY= 7398
+bWVkaWE= 7399
+IEZlYnJ1YXJ5 7400
+IE1hYw== 7401
+IG1pc3Npbmc= 7402
+IHdpZmU= 7403
+IHRhbGtpbmc= 7404
+IE1ha2U= 7405
+IGNhcnQ= 7406
+IGxvY2F0ZWQ= 7407
+RW5j 7408
+LWE= 7409
+Y2hyb24= 7410
+IGNhcmRz 7411
+IGd1eQ== 7412
+IHBlcnM= 7413
+IFllcw== 7414
+YXRldmVy 7415
+IEFuZw== 7416
+b2xhcg== 7417
+IEV2ZW4= 7418
+IGFjY3Vy 7419
+IFBvd2Vy 7420
+IEdvbGQ= 7421
+Y2xlYXI= 7422
+UHJvY2Vzcw== 7423
+IHJlY29yZHM= 7424
+IGtpbGxlZA== 7425
+LmNsZWFy 7426
+IFdBUlJBTlRJRVM= 7427
+IHB1cnBvc2U= 7428
+cGFuZWw= 7429
+SkVDVA== 7430
+w61h 7431
+IGV4ZXJj 7432
+V1M= 7433
+L0w= 7434
+LmV4cG9ydHM= 7435
+IF9fXw== 7436
+IHNpbg== 7437
+U2VydmxldA== 7438
+IGTDqQ== 7439
+LmRlbGV0ZQ== 7440
+cm9rZQ== 7441
+U2w= 7442
+dWdo 7443
+ZWFycw== 7444
+IHBvaW50ZXI= 7445
+IGhvcA== 7446
+YWxsZXJ5 7447
+IG9icw== 7448
+Y292ZXJ5 7449
+CWNoYXI= 7450
+CQkJCQkJCQkJCQ== 7451
+CWRlZg== 7452
+b2NpdHk= 7453
+aXRjaGVu 7454
+dWxhdGlvbnM= 7455
+IEZJVA== 7456
+ICku 7457
+c3RyYWludHM= 7458
+dmVudGlvbg== 7459
+IHJlcXVpcmVz 7460
+IE9wZXI= 7461
+TUU= 7462
+T1VOVA== 7463
+YWxsZXQ= 7464
+IG5vcm0= 7465
+SVJF 7466
+ZXhhcw== 7467
+IHByb2dyYW1z 7468
+IHdlYWs= 7469
+Jy4k 7470
+dWluZw== 7471
+CSAgICAgICA= 7472
+IG1pbA== 7473
+IGZpcm0= 7474
+aW5pdGVseQ== 7475
+X1ZBTFVF 7476
+YXBzZQ== 7477
+YXRpc2Y= 7478
+IGRlbWFuZA== 7479
+X21vZA== 7480
+IGRlc2NyaWJlZA== 7481
+IHBsYWNlcw== 7482
+VklE 7483
+IGFsb25l 7484
+IGV4cG9ydA== 7485
+IHZlYw== 7486
+IE1heA== 7487
+IGFjdGl2aXRpZXM= 7488
+aWN0dXJlcw== 7489
+Z2VuZXI= 7490
+IG1h 7491
+gqw= 7492
+IGV4cHJlc3Npb24= 7493
+Q2FsbGJhY2s= 7494
+X2NvbnRlbnQ= 7495
+IE1vc3Q= 7496
+IHRlc3Rpbmc= 7497
+RUM= 7498
+Q0hBTlQ= 7499
+IGFkanVzdA== 7500
+LlRocmVhZGluZw== 7501
+KGN0eA== 7502
+IGFncmVl 7503
+aWdoZXN0 7504
+IHVp 7505
+IExhdw== 7506
+Llk= 7507
+Pjw/ 7508
+IHBvZA== 7509
+LWxn 7510
+4oCdCgo= 7511
+IGRlc2NyaWJl 7512
+IEV1cm9wZWFu 7513
+LXNo 7514
+IFBVUlBPU0U= 7515
+T1JZ 7516
+IGNvbnZlcnM= 7517
+IElsbHVtaW5hdGU= 7518
+IEF2 7519
+KGNo 7520
+PyI= 7521
+Y2hlbg== 7522
+aW1h 7523
+RG9jdW1lbnQ= 7524
+IG9wZXJhdGlvbnM= 7525
+d2lu 7526
+CWZ1bmN0aW9u 7527
+LkltYWdl 7528
+IHNjZW4= 7529
+L2g= 7530
+IFND 7531
+IGV4cGxv 7532
+OiU= 7533
+LyoqDQo= 7534
+TkFNRQ== 7535
+5og= 7536
+KHZhcg== 7537
+IGRpcmVjdG9y 7538
+T05H 7539
+IHlpZWxk 7540
+IGZlZXQ= 7541
+IFNlYXJjaA== 7542
+IEls 7543
+IHJlc3RhdXI= 7544
+ZHVj 7545
+IGludGVnZXI= 7546
+ICcnOwo= 7547
+IGhpZ2hseQ== 7548
+Y2hlY2tlZA== 7549
+IFBBUlRJQw== 7550
+RVJDSEFOVA== 7551
+77yJ 7552
+IG9wdGlt 7553
+UXVldWU= 7554
+IExJ 7555
+aXRhdGlvbg== 7556
+IHRyYW5zcG9ydA== 7557
+aXNzaW9u 7558
+ZmlsbA== 7559
+dXNpb24= 7560
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 7561
+CWJvb2w= 7562
+LXRo 7563
+dXB0 7564
+IGVzc2VudGlhbA== 7565
+YW50ZWQ= 7566
+IGJlbmVmaXRz 7567
+CVM= 7568
+JzsNCg== 7569
+aWtp 7570
+IGdpcmxz 7571
+aWNlZA== 7572
+YnVmZmVy 7573
+XSs= 7574
+IHNvY2tldA== 7575
+IHByaWNlcw== 7576
+IEZyZQ== 7577
+IHNhdA== 7578
+IHdvb2Q= 7579
+TWVudUl0ZW0= 7580
+QVJH 7581
+IEFkbWlu 7582
+T1dO 7583
+ZGs= 7584
+IHJlc2V0 7585
+IGZvcm1z 7586
+INC4 7587
+5pY= 7588
+IFR1ZXNkYXk= 7589
+IEluaXRpYWxpemVk 7590
+X3RyYWlu 7591
+b3Jhcnk= 7592
+YXRlZ29y 7593
+IGR0 7594
+VG90YWw= 7595
+Y29uc3RydWN0 7596
+aWxpZXM= 7597
+IGd1eXM= 7598
+0LXRgA== 7599
+IGluc3RydWN0aW9u 7600
+eWxlZA== 7601
+IGludGVybmV0 7602
+ZXRhZGF0YQ== 7603
+YWR5 7604
+ZmFjZXM= 7605
+amVjdGlvbg== 7606
+IEphY2s= 7607
+IHJlY3Q= 7608
+Wy0= 7609
+IExlZw== 7610
+IGRldmljZXM= 7611
+T0M= 7612
+ICoNCg== 7613
+b3JhdGlvbg== 7614
+ZXJ0YWlu 7615
+IGd1YXJk 7616
+b3N0cmVhbQ== 7617
+IGVudW0= 7618
+LmxheW91dA== 7619
+ICI7Cg== 7620
+dm9rZQ== 7621
+IE9r 7622
+SG9tZQ== 7623
+KHRy 7624
+RVRI 7625
+IGRlbGF5 7626
+IHB1cmNoYXNl 7627
+ZGM= 7628
+IGFyZW4= 7629
+X29uY2U= 7630
+CQkJCQo= 7631
+cm9y 7632
+ZHJhdw== 7633
+LnJ1bg== 7634
+KG1vZGVs 7635
+VGltZW91dA== 7636
+bGlr 7637
+IEFyZw== 7638
+LmVu 7639
+IGZpc2g= 7640
+Y3B5 7641
+X2Zl 7642
+RVJDSEFOVEFCSUxJVFk= 7643
+KFg= 7644
+X291dHB1dA== 7645
+Pz8= 7646
+IGpv 7647
+YW5kYXJk 7648
+IGRvbGw= 7649
+ZXJyb3Jz 7650
+X2Jhc2U= 7651
+IFBBUlRJQ1VMQVI= 7652
+IGxlYWRlcg== 7653
+IGNvbXBhcg== 7654
+IGRvdWI= 7655
+IFZpcw== 7656
+U3RhY2tUcmFjZQ== 7657
+LUM= 7658
+IFN0dWQ= 7659
+c3RpdHV0ZQ== 7660
+TW9yZQ== 7661
+IERlc2NyaXB0aW9u 7662
+V0FSRQ== 7663
+YWRz 7664
+INC6 7665
+YmluZA== 7666
+PXNlbGY= 7667
+ZW1wbG95 7668
+W24= 7669
+LmFsbA== 7670
+LUI= 7671
+JiY= 7672
+YWxt 7673
+IGN1bHR1cmU= 7674
+aG91c2U= 7675
+IHN1ZmZlcg== 7676
+ICcl 7677
+IHN0cmFpZ2h0 7678
+IFN0YXI= 7679
+dWRv 7680
+IGRlZA== 7681
+IENPTQ== 7682
+IGNvbmZpcm0= 7683
+IEdvb2Q= 7684
+LnNj 7685
+X19fX19fX19fX19fX19fXw== 7686
+RFI= 7687
+Q29uZmlndXJhdGlvbg== 7688
+RGF0ZVRpbWU= 7689
+IGFkdmVydA== 7690
+IGNvdWxkbg== 7691
+YXN5bmM= 7692
+c3RhY2s= 7693
+JykNCg== 7694
+S2l0 7695
+IGhvdXM= 7696
+IG1lY2hhbg== 7697
+cmF0ZQ== 7698
+IGF1ZGlv 7699
+CWNvdXQ= 7700
+Y29yZXM= 7701
+IHNwb3Q= 7702
+IGluY3JlYXNpbmc= 7703
+ICMj 7704
+KSkp 7705
+cG9pbnRz 7706
+IGNvbXBhcmVk 7707
+bGln 7708
+IGJlaGF2aW9y 7709
+IEJZ 7710
+IEF0dA== 7711
+Y3JhZnQ= 7712
+aGVhZGVycw== 7713
+ZXRl 7714
+ZW5kcmVnaW9u 7715
+IGRldGFpbA== 7716
+VUxF 7717
+IENvbW1vbg== 7718
+CXByb3RlY3RlZA== 7719
+c3Rvbg== 7720
+IEZJVE5FU1M= 7721
+IGZyZXNo 7722
+Ij4KCg== 7723
+LmV4YW1wbGU= 7724
+YmVyZw== 7725
+IG1vdmVk 7726
+CWU= 7727
+IFNhdHVyZGF5 7728
+IHBheWxvYWQ= 7729
+xIc= 7730
+KToKCg== 7731
+IGJleQ== 7732
+dXJlcg== 7733
+PHNjcmlwdA== 7734
+IHN5bWJvbA== 7735
+IGFzc3Vt 7736
+IHB1bA== 7737
+RWZmZWN0 7738
+IGh1bmRyZWQ= 7739
+VG9vbA== 7740
+YWtlZA== 7741
+Y29ubmVjdGlvbg== 7742
+IHZvaWNl 7743
+IHBk 7744
+IHRyYW5zYWN0aW9u 7745
+IGxpbmtz 7746
+RXJy 7747
+IEluZGlhbg== 7748
+VEM= 7749
+YXRhbG9n 7750
+bmk= 7751
+c2lnbg== 7752
+PDwi 7753
+amk= 7754
+eWE= 7755
+IGRlbW9uc3Ry 7756
+dWxhdGVk 7757
+LlN0 7758
+IGluc3RpdA== 7759
+IGJvb3N0 7760
+IGNlbGxz 7761
+b2xpYw== 7762
+LlBybw== 7763
+Ojwv 7764
+RXZlbnRMaXN0ZW5lcg== 7765
+aWZ5aW5n 7766
+IERp 7767
+b3Jyb3c= 7768
+LmV4ZWN1dGU= 7769
+IGNvbGxlZ2U= 7770
+WW91cg== 7771
+IGxhcmdlc3Q= 7772
+LmRpcw== 7773
+IHF1aQ== 7774
+IGluZGl2aWR1YWxz 7775
+X2J1ZmZlcg== 7776
+IG5n 7777
+U0E= 7778
+IENvbnRyb2w= 7779
+IHNpbmc= 7780
+IHN1aXQ= 7781
+ICAgIAk= 7782
+U0c= 7783
+IGp1bXA= 7784
+IHNtYXJ0 7785
+b21h 7786
+IEV4cA== 7787
+ICct 7788
+IGFzc2lzdA== 7789
+IHN1Y2Nlc3NmdWxseQ== 7790
+c3lz 7791
+IENyZQ== 7792
+X3JlZg== 7793
+IFRodXJzZGF5 7794
+IGJ1cg== 7795
+INC0 7796
+IGJleW9uZA== 7797
+IG5vZGVz 7798
+RGV0YWlscw== 7799
+aW5jdA== 7800
+IEphbWVz 7801
+IGFmZmVjdA== 7802
+ZXhjZXB0aW9u 7803
+IHR5cGVvZg== 7804
+KA0K 7805
+LXNl 7806
+IGZldGNo 7807
+YCw= 7808
+IGNydXNoZXI= 7809
+fS4= 7810
+IEJP 7811
+U2hvdw== 7812
+IHJhdGVz 7813
+IGJvbg== 7814
+LWljb24= 7815
+IE1lZGlh 7816
+UkVTUw== 7817
+IFZhbGlk 7818
+0L7Quw== 7819
+IGZ1Y2s= 7820
+YWNrcw== 7821
+IHN0dWRpZXM= 7822
+TWU= 7823
+IG93bmVycw== 7824
+fWVsc2U= 7825
+IGdyb3dpbmc= 7826
+VmFyaWFibGU= 7827
+IEJlbA== 7828
+LnJhbmRvbQ== 7829
+dmVtZW50 7830
+b255bQ== 7831
+KEY= 7832
+IEZBTFNF 7833
+IHRvcmNo 7834
+KHJvdw== 7835
+aWdv 7836
+c3RydWN0dXJl 7837
+IGNlcnRhaW5seQ== 7838
+RGVw 7839
+IEdyZWVu 7840
+cXVlc3Rpb24= 7841
+IGFkZGluZw== 7842
+IERldmVsb3A= 7843
+X2RlZg== 7844
+IG1hY2g= 7845
+PSU= 7846
+CQkg 7847
+Y29uZHM= 7848
+UHJvamVjdA== 7849
+IHJlamVjdA== 7850
+IM4= 7851
+IHBvb3I= 7852
+IGF3YXJl 7853
+IEJ1aWxk 7854
+IEJyaXRpc2g= 7855
+IE5F 7856
+IG51bWVy 7857
+cmVlcw== 7858
+Y2xhaW0= 7859
+IG1vY2s= 7860
+IG9t 7861
+IHNjcmU= 7862
+T0xE 7863
+LnBs 7864
+ZWxlcg== 7865
+IGNvcnJlc3BvbmQ= 7866
+X0hF 7867
+IGJpbmFyeQ== 7868
+X29yZGVy 7869
+IFNRTA== 7870
+IGFkdmFudA== 7871
+IHByZXY= 7872
+Lls= 7873
+LmFzc2VydEVxdWFs 7874
+cGxpZXI= 7875
+YXJw 7876
+IGNsb3NlZA== 7877
+IGVuY291cg== 7878
+IFFTdHJpbmc= 7879
+YXVk 7880
+IGRldmVsb3BlZA== 7881
+IHBlcm1pc3Npb24= 7882
+LmRlYnVn 7883
+b3BlcmF0b3I= 7884
+ICcK 7885
+IHN5bQ== 7886
+YXRpdmVseQ== 7887
+w6ll 7888
+LWNvbG9y 7889
+IEdFVA== 7890
+a3k= 7891
+IGFsdGhvdWdo 7892
+X3JlcXVlc3Q= 7893
+X2VsZW1lbnQ= 7894
+Li4uLi4uLi4uLi4uLi4uLg== 7895
+X0RBVEE= 7896
+IGFtYXppbmc= 7897
+IHNi 7898
+IERlZmF1bHQ= 7899
+RXZlbnRz 7900
+IGZhaWx1cmU= 7901
+YWNsZQ== 7902
+UHJvcGVydGllcw== 7903
+IGRyZWFt 7904
+IGRpc3Ry 7905
+IGF1 7906
+IGdlbmVyYXRlZA== 7907
+5pU= 7908
+IFRlYW0= 7909
+VVNF 7910
+IGluY29tZQ== 7911
+IGV5ZQ== 7912
+X25vdA== 7913
+Il0s 7914
+X2Zvcm0= 7915
+U3VwcG9ydA== 7916
+b3JkZXJz 7917
+LlByaW50 7918
+dmlsbGU= 7919
+IFdlZG5lc2RheQ== 7920
+b2x2ZXI= 7921
+IG9wcG9z 7922
+aXNhdGlvbg== 7923
+b2xh 7924
+Q2xvc2U= 7925
+PHA= 7926
+X3dpZHRo 7927
+SW52YWxpZA== 7928
+eGI= 7929
+IHN0cnVnZw== 7930
+X2FjdGlvbg== 7931
+IHR4dA== 7932
+IFBhdGg= 7933
+YWxhcg== 7934
+IE1FUkNIQU5UQUJJTElUWQ== 7935
+c2VydmljZQ== 7936
+IE1pY2hhZWw= 7937
+YWJsZVZpZXc= 7938
+RGVidWc= 7939
+b2tlcw== 7940
+U2hl 7941
+IGd1ZXNz 7942
+IEphdmE= 7943
+X1BBVEg= 7944
+IHBhcnRpY3VsYXJseQ== 7945
+IElJ 7946
+IGRvbWFpbg== 7947
+5bm0 7948
+IHJlZHVjZQ== 7949
+LWxlZnQ= 7950
+cmVhbA== 7951
+IGFwcGVhcnM= 7952
+IGNvbW8= 7953
+IFVuaXQ= 7954
+IEdvdmVybg== 7955
+YWxp 7956
+YWxsZWw= 7957
+IEpldw== 7958
+X0k= 7959
+IGNvcw== 7960
+LmNvbG9y 7961
+IEdsb2JhbA== 7962
+IHRlbGU= 7963
+YmVu 7964
+X3RyYW5z 7965
+IHJlYXNvbnM= 7966
+IGVtYg== 7967
+ZW5zaXR5 7968
+bGluZXM= 7969
+b21pbg== 7970
+U2NyZWVu 7971
+0LDRgg== 7972
+cGVjdHM= 7973
+Y2xpcA== 7974
+Zm9v 7975
+cmVudA== 7976
+IGFm 7977
+IGRhbmdlcg== 7978
+aWxpbmc= 7979
+TmFtZXM= 7980
+T3Vy 7981
+IGRpc3RyaWJ1dGlvbg== 7982
+V2hpbGU= 7983
+U0w= 7984
+V3JpdGU= 7985
+IGdvdG8= 7986
+IGNvbG9ycw== 7987
+IHBvd2VyZnVs 7988
+a2lu 7989
+IGRlcHRo 7990
+ZXJjaWFs 7991
+IENvbmdyZXNz 7992
+IE1hcmtldA== 7993
+RGI= 7994
+dW5kZXI= 7995
+IExhc3Q= 7996
+w58= 7997
+Z3JlZw== 7998
+IHBvc3Rz 7999
+X1VSTA== 8000
+b3Rvcw== 8001
+RG9u 8002
+IG1pY3Jv 8003
+IGFycmVzdA== 8004
+0L8= 8005
+IChA 8006
+IEhvdA== 8007
+IEluZGV4 8008
+OyY= 8009
+IyE= 8010
+IE5vcg== 8011
+IENhcA== 8012
+LSg= 8013
+IGludGVyZXN0ZWQ= 8014
+cGVhcg== 8015
+IHJlbnQ= 8016
+IGFsYnVt 8017
+b2xpY3k= 8018
+Lmxhbmc= 8019
+LnRyYW5z 8020
+LmZvcm1hdA== 8021
+IHsNCg0K 8022
+cGhlcmU= 8023
+IGF4aXM= 8024
+IEJ1c2luZXNz 8025
+ZXJzaXN0ZW5jZQ== 8026
+dXJy 8027
+IG1pbmltdW0= 8028
+ZW5kb3I= 8029
+IFNE 8030
+IEludGVybmV0 8031
+5aQ= 8032
+RXhw 8033
+aXZlcnNl 8034
+TU0= 8035
+IG9idmlvdXM= 8036
+IGJhc2lz 8037
+IHNjaWVuY2U= 8038
+IGJ1ZGdldA== 8039
+aXphdGlvbnM= 8040
+UEE= 8041
+IGZsYWdz 8042
+cHJldA== 8043
+TE9DSw== 8044
+IHZhcmlldHk= 8045
+IHRydXRo 8046
+ZHQ= 8047
+IGdvbmU= 8048
+IGJhdHRsZQ== 8049
+PHN0ZA== 8050
+IFNpbA== 8051
+cmY= 8052
+dWRh 8053
+IGVyb3Q= 8054
+IENhbQ== 8055
+IHN0YXRpb24= 8056
+ICc8Lw== 8057
+Y2hlbWU= 8058
+IFN1bg== 8059
+IGZpbmlzaGVk 8060
+IHNob3A= 8061
+IEtvcmU= 8062
+IGVpZ2h0 8063
+X1JFRw== 8064
+TkQ= 8065
+Piw= 8066
+Ij48Pw== 8067
+KG51bQ== 8068
+CWlubGluZQ== 8069
+VHJhbnNhY3Rpb24= 8070
+Lk9u 8071
+IG1haWw= 8072
+cmV5 8073
+cmVzdWx0cw== 8074
+IG5hdg== 8075
+SU1JVA== 8076
+X2lkcw== 8077
+TWFrZQ== 8078
+5Yo= 8079
+TW9kYWw= 8080
+IExPRw== 8081
+IFN1cg== 8082
+IGluc3RhbmNlb2Y= 8083
+IG92ZXJhbGw= 8084
+IEluZm9ybWF0aW9u 8085
+IGNvbnN0cnVjdGlvbg== 8086
+X0ZJTEU= 8087
+YnV0 8088
+IG1lZGlj 8089
+IGR1cmF0aW9u 8090
+aXRuZXNz 8091
+YWdlbnQ= 8092
+QVY= 8093
+IHNldmVu 8094
+b2xm 8095
+IH19Cg== 8096
+Il0sCg== 8097
+IGNhbGxpbmc= 8098
+IGFucw== 8099
+dGhyb3dz 8100
+b3Jpem9udGFs 8101
+IHVzZVN0YXRl 8102
+LmZs 8103
+IFN0YXR1cw== 8104
+IE9ubGluZQ== 8105
+UlI= 8106
+IFJpY2g= 8107
+IEhpbGw= 8108
+IGJyYWlu 8109
+IGZvbGxvd2Vk 8110
+ZW1pYw== 8111
+IHNsaWdodA== 8112
+IGluc3VyYW5jZQ== 8113
+LkFycmF5 8114
+IGFic3RyYWN0 8115
+IFN1bQ== 8116
+cmVkaXJlY3Q= 8117
+b3duZXI= 8118
+KG1zZw== 8119
+IENsaW50b24= 8120
+Tm9u 8121
+CWV4 8122
+IHZvbHVtZQ== 8123
+IEV2ZW50QXJncw== 8124
+LUw= 8125
+IERpbQ== 8126
+IE1hcnQ= 8127
+IGN1cnNvcg== 8128
+IGltcGxlbWVudGF0aW9u 8129
+dXJyZWQ= 8130
+IGxhcmdlcg== 8131
+KTsKCgo= 8132
+Jys= 8133
+LnRyYW5zZm9ybQ== 8134
+IHVwbG9hZA== 8135
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 8136
+RHJhdw== 8137
+bmVs 8138
+CWZsb2F0 8139
+cXJ0 8140
+IE5ldHdvcms= 8141
+IHRpdA== 8142
+QXhpcw== 8143
+LmFuZHJvaWQ= 8144
+IGNvbXBsZXRlZA== 8145
+IG11cg== 8146
+IGNvbHVtbnM= 8147
+eGM= 8148
+IHN1cHBseQ== 8149
+aW1pbmFs 8150
+IHNwcg== 8151
+PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PQ== 8152
+IHVuaXRz 8153
+KHU= 8154
+bWk= 8155
+cmVwbGFjZQ== 8156
+W2tleQ== 8157
+4Lk= 8158
+YW50aWM= 8159
+IHBheW1lbnQ= 8160
+LEI= 8161
+IEFwcGxl 8162
+Z2lu 8163
+UmVxdWlyZWQ= 8164
+Iys= 8165
+bGFuZHM= 8166
+IHNxdQ== 8167
+IGZhY3Rvcg== 8168
+ZGVj 8169
+IHN0cmVuZ3Ro 8170
+IGJveQ== 8171
+IGJhbGFuY2U= 8172
+IHNvdXJjZXM= 8173
+c2NyZWVu 8174
+LXRvcA== 8175
+IEFtYXpvbg== 8176
+IGhpZGRlbg== 8177
+0LXRgg== 8178
+X2NsaWVudA== 8179
+IGVhdA== 8180
+LmRpc3BsYXk= 8181
+IMK7 8182
+IHRyaWdnZXI= 8183
+YW5hZ2Vy 8184
+IHRybw== 8185
+IGNsYWltcw== 8186
+Zm9yZA== 8187
+IENvbXBhbnk= 8188
+IGdpZnQ= 8189
+LDo= 8190
+X2FwcA== 8191
+aGFuZGxl 8192
+IHByb2R1Y2U= 8193
+L2xpYg== 8194
+IC0q 8195
+CXNldA== 8196
+J107 8197
+YXJj 8198
+YW5kZXI= 8199
+IEVuZ2luZQ== 8200
+IGF0dHJpYnV0ZXM= 8201
+dGFzaw== 8202
+PD0= 8203
+KE4= 8204
+IHdhcm0= 8205
+d2hpY2g= 8206
+IEZvcmU= 8207
+YWdub3N0 8208
+bXlz 8209
+IHRhbA== 8210
+IFNhbA== 8211
+Z2k= 8212
+IFByaW50 8213
+IFRSVUU= 8214
+INC+ 8215
+LlVJ 8216
+IGZsYXNo 8217
+cm9wZXJ0eQ== 8218
+LmxvY2F0aW9u 8219
+IE1pbGw= 8220
+Ymk= 8221
+Y29udHI= 8222
+LnJlcXVlc3Q= 8223
+IFNhbQ== 8224
+IG5lZ2F0aXZl 8225
+a2l0 8226
+IHNldHQ= 8227
+LnByaW50U3RhY2tUcmFjZQ== 8228
+YWJl 8229
+CWk= 8230
+IGJ1cm4= 8231
+IHNvY2lldHk= 8232
+Q2FjaGU= 8233
+IFNlY3VyaXR5 8234
+Lm1vZGVscw== 8235
+IFdBUlJBTlRZ 8236
+X3Vw 8237
+Y2VpdmU= 8238
+IGNsaWVudHM= 8239
+LlRy 8240
+IHByb3ZpZGluZw== 8241
+IHJvdXQ= 8242
+bWF0ZXJpYWw= 8243
+IHx8Cg== 8244
+IFNlcg== 8245
+IE9mZmljZQ== 8246
+RlRXQVJF 8247
+ICck 8248
+IGZvYw== 8249
+IGV4Y2VsbA== 8250
+IGNhdA== 8251
+bm9ybWFs 8252
+IGRldGVybWluZQ== 8253
+CXVpbnQ= 8254
+UGFuZQ== 8255
+IGVtcGxveWVlcw== 8256
+IFRleGFz 8257
+IHRyYWZm 8258
+IFJlcG9ydA== 8259
+YW50YQ== 8260
+IEJveA== 8261
+IGRqYW5nbw== 8262
+IHBhcnRuZXI= 8263
+RUI= 8264
+TElORQ== 8265
+IGZlZWxpbmc= 8266
+IGNpdmls 8267
+KGZsb2F0 8268
+U3Fs 8269
+IHdvdWxkbg== 8270
+LmluaXQ= 8271
+LmxlZnQ= 8272
+LXY= 8273
+X2xldmVs 8274
+J30= 8275
+QUY= 8276
+IGxvYWRpbmc= 8277
+IE9ubHk= 8278
+IGNvb2tpZXM= 8279
+IEds 8280
+Q08= 8281
+IHN0cmF0ZWd5 8282
+KCcuLw== 8283
+IHNoaXA= 8284
+cG9zZXM= 8285
+IHNpZ25hbA== 8286
+IGFscGhh 8287
+LnBvcA== 8288
+UmFkaXVz 8289
+IHJlcGxhY2U= 8290
+X0RJUg== 8291
+Y291bnRlcg== 8292
+YnNlcnZhYmxl 8293
+ZWxh 8294
+V2VpZ2h0 8295
+aGFzaA== 8296
+Ym9zZQ== 8297
+Zng= 8298
+IEVtYWls 8299
+IHJlZmVy 8300
+bG9jYWxob3N0 8301
+X1JP 8302
+aXF1ZXM= 8303
+U3RlcA== 8304
+IGFoZWFk 8305
+KFZpZXc= 8306
+IFNlcnZpY2Vz 8307
+IEpzb24= 8308
+ZXNzb3I= 8309
+IHB1bg== 8310
+IGFwcHJvcHJpYXRl 8311
+YWtlcnM= 8312
+b3Nlbg== 8313
+cG9zaW5n 8314
+IGFnZW50 8315
+ZmM= 8316
+IHRyYW5zZmVy 8317
+IGludmFsaWQ= 8318
+IFJlc2VhcmNo 8319
+VmVydGV4 8320
+IGdheQ== 8321
+IGpvdXJuYWw= 8322
+W3g= 8323
+ICIiLAo= 8324
+IFdlbGw= 8325
+LlRhc2tz 8326
+U3BlYw== 8327
+IG9s 8328
+IHNwZW5k 8329
+IEF1c3RyYWxpYQ== 8330
+TWF0Y2g= 8331
+Lmp1bml0 8332
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 8333
+IE1BWA== 8334
+aXphYmxl 8335
+Y2x1c2l2ZQ== 8336
+X3ZhbGlk 8337
+IHF1YXJ0ZXI= 8338
+eWFu 8339
+IEVkaXQ= 8340
+YXJkZW4= 8341
+PW5ldw== 8342
+IGZyYWc= 8343
+Qml0 8344
+emk= 8345
+YWluZQ== 8346
+dWRk 8347
+Lk9iamVjdA== 8348
+ZGVidWc= 8349
+IGNhc2g= 8350
+X0lN 8351
+IGVlbg== 8352
+IGNvbW1lcmNpYWw= 8353
+IFZpZGVv 8354
+bG9hZGVy 8355
+IGZpeGVk 8356
+IGFwcGxpY2F0aW9ucw== 8357
+IF8s 8358
+IFJ1c3NpYQ== 8359
+aXRlY3Q= 8360
+Xyg= 8361
+IEJsb2Nr 8362
+IHNhbg== 8363
+IFRvbQ== 8364
+IHBlcmhhcHM= 8365
+IHNpZw== 8366
+bGV2YW50 8367
+IGNvcnBvcg== 8368
+YXRhc2V0 8369
+cm9uaWM= 8370
+eGU= 8371
+IGV0aA== 8372
+U29tZQ== 8373
+cG9w 8374
+X09L 8375
+IHRlbmQ= 8376
+LlJlcw== 8377
+X2FuZA== 8378
+IHJldmlld3M= 8379
+IHdpbGQ= 8380
+IGRlZ3JlZQ== 8381
+Lk8= 8382
+Lm9iamVjdHM= 8383
+X2FyZ3M= 8384
+bmls 8385
+IGRpc2FibGVk 8386
+UGFyZW50 8387
+IG5vdGVz 8388
+ICIiCg== 8389
+KHN0YXRl 8390
+aXN0cmljdA== 8391
+IGxvZ2dpbmc= 8392
+LklP 8393
+IE1hbA== 8394
+RE0= 8395
+IHhtbA== 8396
+IFJvYmVydA== 8397
+ZWxlbg== 8398
+bGF5b3V0 8399
+Zm9s 8400
+J10pKQ== 8401
+LGI= 8402
+IEplcg== 8403
+ZmlsZW5hbWU= 8404
+IGZhbg== 8405
+IEN1c3RvbQ== 8406
+PSIi 8407
+IERpZQ== 8408
+QnVuZGxl 8409
+LnV0aWxz 8410
+IHRyaXA= 8411
+TUI= 8412
+IHNvZnQ= 8413
+X01PREU= 8414
+IGFwcGxpY2FibGU= 8415
+IHVwcGVy 8416
+RVJWRVI= 8417
+X2Fs 8418
+X0xPRw== 8419
+SGVyZQ== 8420
+d3A= 8421
+IFNlcnZlcg== 8422
+IENsaWVudA== 8423
+IGNoZW0= 8424
+U2Nyb2xs 8425
+IGhpZ2hlc3Q= 8426
+IFNlbGVjdA== 8427
+ICJA 8428
+IFdoeQ== 8429
+U2Vj 8430
+aGVlbA== 8431
+T3BlcmF0aW9u 8432
+IGNvbm5lY3RlZA== 8433
+aXJtZWQ= 8434
+IGNpdGl6 8435
+IENoZQ== 8436
+IGZvcmNlcw== 8437
+IHd3dw== 8438
+Um9vdA== 8439
+QU5DRQ== 8440
+TWFueQ== 8441
+aWNpcA== 8442
+cmdhbg== 8443
+IFRvcg== 8444
+IFByZXNz 8445
+IE1vcg== 8446
+LWxpbmU= 8447
+dWxlZA== 8448
+Plw= 8449
+IHRodXM= 8450
+IFJlZ2lzdGVy 8451
+aG9s 8452
+IENoaW5lc2U= 8453
+IHBvc3RlZA== 8454
+IG1hZ24= 8455
+YWJpbGl0aWVz 8456
+IGRpc2Vhc2U= 8457
+IHJlbWFpbnM= 8458
+IFByb2Y= 8459
+LWZvcm0= 8460
+IGNpbg== 8461
+b3JnYW4= 8462
+aWNhdGU= 8463
+IHN0cmVzcw== 8464
+XSo= 8465
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0= 8466
+X2NvbnRleHQ= 8467
+b3JyeQ== 8468
+IGRpZWQ= 8469
+bWF0 8470
+IHN0YXJ0cw== 8471
+Lk1lc3NhZ2U= 8472
+IHJ1bnM= 8473
+IGd1aWRl 8474
+IHdhcnJhbnR5 8475
+ZW50aWFscw== 8476
+ZGljdA== 8477
+IFNpemU= 8478
+dWxlcg== 8479
+IHJlc3BvbnNpYmxl 8480
+X1NFVA== 8481
+IGNvbnRhaW5pbmc= 8482
+IFByaWNl 8483
+fHw= 8484
+RlM= 8485
+IGVtcA== 8486
+X2J1dHRvbg== 8487
+KHVpbnQ= 8488
+IHN1ZmY= 8489
+cHRo 8490
+IGRlZmluaXRlbHk= 8491
+cHV0ZQ== 8492
+IG1hcmtldGluZw== 8493
+IFdI 8494
+IFNpZQ== 8495
+Kz0= 8496
+T0xPUg== 8497
+IGNvbnN1bHQ= 8498
+IHNpZ25lZA== 8499
+IHNlcXVlbmNl 8500
+bGVl 8501
+IHJlcXVpcmVtZW50cw== 8502
+aHk= 8503
+RXhwcmVzcw== 8504
+TVQ= 8505
+c2V5 8506
+IHVsdA== 8507
+5a4= 8508
+ZWxsaWdlbmNl 8509
+IGFuYWx5 8510
+IGRyZXNz 8511
+ZW5naW5l 8512
+IEdyZWF0 8513
+IEFuZHJvaWQ= 8514
+IEFsZXg= 8515
+bW9kZQ== 8516
+RGljdGlvbmFyeQ== 8517
+LkRhdGU= 8518
+5L0= 8519
+VklDRQ== 8520
+IGZhbWlsaWVz 8521
+IFJ1c3NpYW4= 8522
+IFRpbWVz 8523
+LmNhbGw= 8524
+JCg= 8525
+UHJvZmlsZQ== 8526
+IGZvbGRlcg== 8527
+Y2hlcw== 8528
+IGxlZ2lz 8529
+X3Jvdw== 8530
+dW5lcw== 8531
+2YQ= 8532
+IH0pLg== 8533
+QXNzZXJ0 8534
+YWdlbg== 8535
+IEhhbmQ= 8536
+SXRlcg== 8537
+IGJpZ2dlc3Q= 8538
+b3JlYWNo 8539
+IHBvbGlj 8540
+IHBlcm1pc3Npb25z 8541
+IHNob3dlZA== 8542
+IEVsZW1lbnQ= 8543
+IHRvcGlj 8544
+4oCU4oCU 8545
+cm9hZA== 8546
+IEJhbms= 8547
+cmVjb3Jk 8548
+IHBhcnRuZXJz 8549
+IFJlZg== 8550
+ZXNzaW9ucw== 8551
+IGFzc2Vzcw== 8552
+VVNU 8553
+IFBhcnR5 8554
+cHJvZHU= 8555
+TEM= 8556
+IHVs 8557
+LmZvcm0= 8558
+aGlkZQ== 8559
+Y29weQ== 8560
+VVRG 8561
+IFNPRlRXQVJF 8562
+DQoNCg0K 8563
+IExpbg== 8564
+dW5h 8565
+dWdhcg== 8566
+IGFkbWluaXN0cmF0aW9u 8567
+IG9wZW5pbmc= 8568
+IHNjYW4= 8569
+IGNvbnRpbnVlZA== 8570
+Y29tcG9uZW50 8571
+LnNw 8572
+IGhhcHBlbnM= 8573
+dW1teQ== 8574
+IFBS 8575
+LkZpbGU= 8576
+IERvd25sb2Fk 8577
+TG9hZGluZw== 8578
+ZGk= 8579
+IHdhaXRpbmc= 8580
+X0FERA== 8581
+VGFi 8582
+LnF1ZXJ5U2VsZWN0b3I= 8583
+IGVjb25vbXk= 8584
+IEZyZW5jaA== 8585
+dHh0 8586
+IGZhbnQ= 8587
+XzsK 8588
+SG9sZGVy 8589
+U0g= 8590
+IG51bXB5 8591
+IHN0cmVldA== 8592
+IG1hbGU= 8593
+XE1vZGVs 8594
+YW5naW5n 8595
+IEJpbGw= 8596
+IHByZXZpb3VzbHk= 8597
+Qkk= 8598
+IFNlY3JldA== 8599
+IG1pc3Q= 8600
+IEZpZWxk 8601
+dXBz 8602
+IFByb2Nlc3M= 8603
+IGtlcHQ= 8604
+IE9U 8605
+IHRyYWRpdGlvbmFs 8606
+Lmk= 8607
+YW1pbg== 8608
+IGhlbHBz 8609
+QW55 8610
+b3JpZ2lu 8611
+aWx0ZXJz 8612
+anU= 8613
+ZGVzYw== 8614
+IEFjY291bnQ= 8615
+ICkNCg== 8616
+a3RvcA== 8617
+b2xseQ== 8618
+IGZz 8619
+IOo= 8620
+IHV0 8621
+IGNlbnRyYWw= 8622
+KHRlc3Q= 8623
+LkFu 8624
+IHNhdGlzZg== 8625
+R1I= 8626
+IEZ1bGw= 8627
+IGhlYXQ= 8628
+aWJlcg== 8629
+IG9udG8= 8630
+bW9z 8631
+U2NoZW1h 8632
+IGZhY3Rvcnk= 8633
+Ii4k 8634
+YXdz 8635
+U3RhdGVtZW50 8636
+KHRhcmdldA== 8637
+CW5ldw== 8638
+LmJl 8639
+IGd1ZXN0 8640
+IG1hbA== 8641
+QVJZ 8642
+IHJlYWNoZWQ= 8643
+IG1vdXNl 8644
+IGNoYWxsZW5nZQ== 8645
+CWRvdWJsZQ== 8646
+IFRlbQ== 8647
+IHRlcnJvcg== 8648
+IGV4dHJhY3Q= 8649
+X1RP 8650
+IHNlcGFyYXRl 8651
+IG1pcg== 8652
+aGVscA== 8653
+IGNhcGFjaXR5 8654
+IFByb3BlcnR5 8655
+a2Fu 8656
+X2NyZWF0ZQ== 8657
+IExpZ2h0 8658
+LnBhcmVudA== 8659
+IHVuZGVyc3RhbmRpbmc= 8660
+IGVhc2llcg== 8661
+IHw9 8662
+IGVuaA== 8663
+IGZhdA== 8664
+IHByb3Rlc3Q= 8665
+YW1t 8666
+X0FU 8667
+LW9m 8668
+aWxz 8669
+IE9o 8670
+IHBzeWNo 8671
+ICQu 8672
+aW5kcw== 8673
+IHJlbGF0aXZl 8674
+c2hvcA== 8675
+c2hvcnQ= 8676
+IFNhbmQ= 8677
+dWVzdGlvbg== 8678
+IGZlYXI= 8679
+LwoK 8680
+LmNvbnRleHQ= 8681
+IHNjaG9vbHM= 8682
+IHNlcnZl 8683
+em9uZQ== 8684
+X2Ri 8685
+IG1ham9yaXR5 8686
+ZXhhbXBsZQ== 8687
+IGxhbmc= 8688
+CSAg 8689
+UmVnaXN0ZXI= 8690
+ZW5kbw== 8691
+IHByb2Nlc3Npbmc= 8692
+X3RlbXBsYXRl 8693
+LXVzZXI= 8694
+IGVn 8695
+Q09N 8696
+IEJsdWU= 8697
+aXJv 8698
+IHJlbW90ZQ== 8699
+IElU 8700
+IyEv 8701
+IHJlZGlzdHJpYg== 8702
+cmF6 8703
+IFNpbmNl 8704
+IFR1cg== 8705
+QmFja2dyb3VuZA== 8706
+PT09 8707
+IHJlZmxlY3Q= 8708
+IHByb3M= 8709
+Y21k 8710
+IHdob20= 8711
+Q29tcGF0 8712
+IEFyZQ== 8713
+SWRlbnRpZmllcg== 8714
+IFRob20= 8715
+X3BvcnQ= 8716
+Z3U= 8717
+IG1vbml0b3I= 8718
+cm0= 8719
+IHBhdGllbnQ= 8720
+dmVydGVy 8721
+IGdhaW4= 8722
+LXVp 8723
+SW5zdA== 8724
+IGRpZXM= 8725
+QXJlYQ== 8726
+X2ZpbHRlcg== 8727
+IGdyYXQ= 8728
+IHJlYWxpdHk= 8729
+b3JkaW5hdGU= 8730
+b2x2ZWQ= 8731
+Q29udGFjdA== 8732
+IGNvbXBsaWFuY2U= 8733
+X29y 8734
+IFZhcg== 8735
+ZGw= 8736
+IGFwcGVuZA== 8737
+R0VS 8738
+KG1heA== 8739
+LnJlbmRlcg== 8740
+IGR5bmFtaWM= 8741
+b3JkaW5hdGVz 8742
+X29wdGlvbnM= 8743
+X2NvbHVtbg== 8744
+IGJhdHRlcg== 8745
+c3BhY2U= 8746
+TGE= 8747
+IFNvdXJjZQ== 8748
+L2Jpbg== 8749
+IGRvcw== 8750
+IEJvYXJk 8751
+IFRocmVhZA== 8752
+IEFM 8753
+KGNvbmZpZw== 8754
+IE1lcg== 8755
+IG1pbGVz 8756
+X2hlYWRlcg== 8757
+RVRIT0Q= 8758
+aXp6 8759
+IGJlbmVmaXQ= 8760
+IGludGVncg== 8761
+KGN1cnJlbnQ= 8762
+dWxv 8763
+LmRlZmF1bHQ= 8764
+IERpdg== 8765
+IHRvbg== 8766
+b3Ro 8767
+ZXJ2YXRpb24= 8768
+ZWRvbQ== 8769
+IGJhYnk= 8770
+Y2VpdmVk 8771
+LnRvcA== 8772
+cmlvcml0eQ== 8773
+IExvY2Fs 8774
+cmlhZ2U= 8775
+IGF0dGFja3M= 8776
+IGhvc3BpdGFs 8777
+IGZlbWFsZQ== 8778
+IExvZ2lu 8779
+IEZsb3I= 8780
+IGNoYWlu 8781
+YXNoaW9u 8782
+VGV4dHVyZQ== 8783
+U2F2ZQ== 8784
+IGZhcm0= 8785
+LmNvbnRhaW5z 8786
+LlRlc3Q= 8787
+IGtub3dz 8788
+IGdlbmVyYWxseQ== 8789
+aXBlbGluZQ== 8790
+IG1lYW50 8791
+ZW5jaWE= 8792
+IG5pY2h0 8793
+IGNvbnRlbnRz 8794
+UE0= 8795
+Y2hlZHVsZQ== 8796
+KGxpbmU= 8797
+Q0c= 8798
+am9i 8799
+IFJlYWw= 8800
+dWVy 8801
+ZmlybQ== 8802
+INg= 8803
+ZXRybw== 8804
+ImAK 8805
+IHNwZWVjaA== 8806
+IHRocg== 8807
+Zm9yZWFjaA== 8808
+IHdhcm4= 8809
+CWw= 8810
+IGhlYXZ5 8811
+PGxp 8812
+TmU= 8813
+IGludmVzdGlnYXRpb24= 8814
+TWF0aA== 8815
+LXRpdGxl 8816
+IGNodXJjaA== 8817
+IGRlc3BpdGU= 8818
+Y2hhaW4= 8819
+IHdoYXRldmVy 8820
+YXJpYW4= 8821
+Zm4= 8822
+IG1ldGE= 8823
+fSkKCg== 8824
+VUZG 8825
+IHJlZ2FyZGluZw== 8826
+X1NVQ0NFU1M= 8827
+bWVz 8828
+IEludGVudA== 8829
+IHJlc29sdmU= 8830
+cG9zcw== 8831
+aXJh 8832
+Zm9yY2U= 8833
+b2ljZQ== 8834
+w6I= 8835
+IHBt 8836
+IHVwZGF0ZXM= 8837
+QXJy 8838
+INE= 8839
+dGVzdGluZw== 8840
+IHRvd2FyZA== 8841
+bnRheA== 8842
+64s= 8843
+IGxpc3Rlbg== 8844
+IGdvYWxz 8845
+SW5zdGFuY2VTdGF0ZQ== 8846
+RHI= 8847
+IHJhcmU= 8848
+IHRyYWls 8849
+S2V5cw== 8850
+Q2Fs 8851
+Q2Fy 8852
+IFBlb3BsZQ== 8853
+CWxvY2Fs 8854
+Y2xhc3Nlcw== 8855
+UmVmZXJlbmNl 8856
+LmZvckVhY2g= 8857
+ZW1i 8858
+YWN0aXY= 8859
+IHByaW0= 8860
+cmVkaWN0 8861
+IHJhZA== 8862
+5pWw 8863
+LkJhY2s= 8864
+IHNwcmVhZA== 8865
+IGNsb2Nr 8866
+IHZpcg== 8867
+ZWRpdG9y 8868
+IGVmZm9ydHM= 8869
+IGJyYW5jaA== 8870
+IGluZHVzdA== 8871
+IG1vdG9y 8872
+IGFtYg== 8873
+IGRhdGV0aW1l 8874
+IHJlbmNvbnQ= 8875
+IENocmlzdGlhbg== 8876
+IEFtZXJpY2Fucw== 8877
+ZnVsbA== 8878
+IGZtdA== 8879
+Lm1haW4= 8880
+IGNhdXNlZA== 8881
+X3VwZGF0ZQ== 8882
+IENvbnRlbnQ= 8883
+QVRDSA== 8884
+IGJhdGg= 8885
+IEVhY2g= 8886
+IHJhZGlv 8887
+YWNobWVudA== 8888
+dXp6 8889
+U3VibWl0 8890
+IHJlc3RyaWN0 8891
+YWJpbg== 8892
+IExvYWQ= 8893
+IGV4dGVuc2lvbg== 8894
+IGVzc2F5 8895
+IGhhdA== 8896
+YXZpb3Vy 8897
+dG9CZQ== 8898
+Ijpb 8899
+IG9mZmVyZWQ= 8900
+IHZpbGw= 8901
+KGRvdWJsZQ== 8902
+5pel 8903
+YmM= 8904
+X2ZyZWU= 8905
+IE1pc3M= 8906
+IEJlcg== 8907
+IOg= 8908
+IExpa2U= 8909
+IGhlbHBlZA== 8910
+LmdldE5hbWU= 8911
+X0FM 8912
+IHNwaXJpdA== 8913
+IEFwYWNoZQ== 8914
+d3M= 8915
+IHRoZXJlZm9yZQ== 8916
+KHBhcmFtcw== 8917
+X2ltZw== 8918
+IHBlYWNl 8919
+IGluY29y 8920
+IEVYUEVDVA== 8921
+IG1pbm9y 8922
+aXBlcw== 8923
+CWRhdGE= 8924
+c2VsZWN0b3I= 8925
+Y2l0eQ== 8926
+dHJpZQ== 8927
+LmJhc2U= 8928
+X2ZyYW1l 8929
+IG9wZW5lZA== 8930
+L2pzb24= 8931
+TFk= 8932
+bnU= 8933
+LkRl 8934
+dGY= 8935
+bWFyZ2lu 8936
+LlBhcnNl 8937
+IHBp 8938
+IGVx 8939
+YmQ= 8940
+RmllbGRz 8941
+IFRyZWU= 8942
+IGJhbg== 8943
+aXN0YW4= 8944
+CiAgICAgICAgCg== 8945
+CWds 8946
+IHByb2R1Y2Vk 8947
+c3lzdGVt 8948
+TWFyaw== 8949
+X2hhc2g= 8950
+IGJn 8951
+IGNvbnN0aXQ= 8952
+IExlYWd1ZQ== 8953
+IG1pc3Npb24= 8954
+X2Zvcm1hdA== 8955
+KFsK 8956
+Y2x1c2lvbg== 8957
+ISI= 8958
+0Lc= 8959
+YnJlYWs= 8960
+CXN3aXRjaA== 8961
+IHRoZXI= 8962
+VHJhbnNmb3Jt 8963
+IGZvb3RiYWxs 8964
+LWxpbms= 8965
+cm91dGU= 8966
+LmF1dGg= 8967
+IGJhZw== 8968
+b3ZlcnM= 8969
+IGVuYWJsZWQ= 8970
+IHJhYw== 8971
+KEk= 8972
+Q1I= 8973
+YW5jaW5n 8974
+IG1hbmFnZWQ= 8975
+X3E= 8976
+TkdUSA== 8977
+IG1hYw== 8978
+IEF1dG8= 8979
+YW1lbnRl 8980
+ICcnLA== 8981
+LkFwcGVuZA== 8982
+IHBpbg== 8983
+Lml0ZW0= 8984
+YWNraW5n 8985
+IG9jY2Fz 8986
+cGVyc29u 8987
+IHRp 8988
+LlJlZw== 8989
+IGhhdmVu 8990
+IGdsYXNz 8991
+ICI8Lw== 8992
+IFNpbXBsZQ== 8993
+UHJpbnQ= 8994
+IHN1cnJvdW5k 8995
+Tk8= 8996
+44CCCg== 8997
+ICAgICAgICANCg== 8998
+IE1hbnk= 8999
+ICJf 9000
+IHdlZWtlbmQ= 9001
+IHNvbWV3 9002
+LnBhcmFtcw== 9003
+c21hbGw= 9004
+QVRFRA== 9005
+IHBsdWdpbg== 9006
+ZmllbGRz 9007
+IEluaXRpYWxpemU= 9008
+b29u 9009
+YXRpbGU= 9010
+eWU= 9011
+IHZvdXM= 9012
+TEFH 9013
+IG9sZGVy 9014
+IGdhbQ== 9015
+IGV4dHJlbWVseQ== 9016
+IGhldA== 9017
+ZW51bQ== 9018
+IFNFVA== 9019
+eGZm 9020
+IHRpbWVy 9021
+L2luZGV4 9022
+IGNyaXRpY2Fs 9023
+Um93cw== 9024
+X2FyZ3VtZW50 9025
+IGV4ZWN1dGU= 9026
+IHNob3dpbmc= 9027
+LnhtbA== 9028
+LWxpc3Q= 9029
+Um9sZQ== 9030
+dHlwZW5hbWU= 9031
+X21ldGhvZA== 9032
+dGhhdA== 9033
+Y2hlcg== 9034
+IOKG 9035
+WFQ= 9036
+IHRob3VzYW5kcw== 9037
+CW4= 9038
+IHJlc3A= 9039
+X3ByaWNl 9040
+b2x1dA== 9041
+QWc= 9042
+IFR3bw== 9043
+IGJlY29tZXM= 9044
+IGh1cw== 9045
+LlVzZQ== 9046
+dGhlbWU= 9047
+dXJi 9048
+IC8qCg== 9049
+ZXJpYWxpemU= 9050
+QVJO 9051
+IGxvc2U= 9052
+TG93ZXI= 9053
+IHZlbA== 9054
+IGRlZmVuc2U= 9055
+Y29uZGl0aW9u 9056
+IGJlcw== 9057
+IGRyeQ== 9058
+IHNjcm9sbA== 9059
+LlNob3c= 9060
+SUVM 9061
+0L7RgA== 9062
+IFJlc3Q= 9063
+V2hlcmU= 9064
+b29kcw== 9065
+IEplcw== 9066
+IHdpcmU= 9067
+X0lORk8= 9068
+IHN0cmluZ3M= 9069
+Z21lbnQ= 9070
+IG1hdGNoZXM= 9071
+IGVsZWN0cmlj 9072
+IGV4Y2VsbGVudA== 9073
+IENvdW5jaWw= 9074
+aWRhZGU= 9075
+IHd4 9076
+cHVzaA== 9077
+X2VudHJ5 9078
+IHRhc2tz 9079
+IHJpY2g= 9080
+c2E= 9081
+IFNtaXRo 9082
+VU5DVElPTg== 9083
+UG9pbnRlcg== 9084
+cGVjdGl2ZQ== 9085
+IHdpZGdldA== 9086
+aXN0YQ== 9087
+IGFnZW5jeQ== 9088
+IHNpY2g= 9089
+b2xvZ2llcw== 9090
+IHRyaWFs 9091
+YWx5c2lz 9092
+LmNoZWNr 9093
+QVJL 9094
+IG9uQ2hhbmdl 9095
+YWJvdXQ= 9096
+Jywk 9097
+KHZhbA== 9098
+IHBsYWNlZA== 9099
+X05P 9100
+IGRhbg== 9101
+LmVxdWFs 9102
+CSAgICAg 9103
+IHdlYXRoZXI= 9104
+LmdhbWU= 9105
+IGRlc3RpbmF0aW9u 9106
+X1VTRVI= 9107
+aWVjZQ== 9108
+IHByb3ZpZGVy 9109
+Lmxhc3Q= 9110
+cGxleA== 9111
+Tm90ZQ== 9112
+L2pz 9113
+IHDDpQ== 9114
+IHBsYW5uaW5n 9115
+YXR0cmlidXRl 9116
+UFJP 9117
+YXRjaGVz 9118
+IDwt 9119
+IHNlZWluZw== 9120
+IGNhbmNlbA== 9121
+X2luZA== 9122
+LmtleXM= 9123
+IHZpc3VhbA== 9124
+IEN1cnJlbnQ= 9125
+IENvbGxlZ2U= 9126
+IFJvY2s= 9127
+IGFncmVlbWVudA== 9128
+IFN0b3Jl 9129
+b3Zpbmc= 9130
+IGNvcm5lcg== 9131
+YW1waW9ucw== 9132
+SVNF 9133
+Rmlu 9134
+IHByb3RlY3Rpb24= 9135
+IGZp 9136
+UGxheQ== 9137
+cGx1Z2lu 9138
+KX0= 9139
+LmZyYW1l 9140
+LXo= 9141
+IHRyYW5zaXRpb24= 9142
+aWdpbg== 9143
+IGNhbmRpZGF0ZQ== 9144
+IFVuaW9u 9145
+X3ZhbHVlcw== 9146
+KG1hcA== 9147
+Y2xl 9148
+IHRyZW5k 9149
+d2lkZQ== 9150
+YXJlbg== 9151
+TG9j 9152
+VVRI 9153
+IEJheQ== 9154
+IHNtYWxsZXI= 9155
+aXVz 9156
+d2VsbA== 9157
+IGNyaW1pbmFs 9158
+IGNvbmZsaWM= 9159
+YmVydA== 9160
+X0lOVA== 9161
+IGludmVzdG1lbnQ= 9162
+Y3VzdG9t 9163
+IFNlc3Npb24= 9164
+X3dyaXRl 9165
+YW5pYQ== 9166
+IE1hc3M= 9167
+X0VR 9168
+X05PVA== 9169
+IHZpb2xlbmNl 9170
+QXJndW1lbnQ= 9171
+X2VtYWls 9172
+IGJlbG9uZw== 9173
+X2Z1bmN0aW9u 9174
+IGVuZW15 9175
+ZW1h 9176
+IEFkZHJlc3M= 9177
+LmVtcHR5 9178
+IGlubmVy 9179
+IENvbnRhY3Q= 9180
+TG9hZGVy 9181
+PGlucHV0 9182
+IENB 9183
+bG90 9184
+IHBpY3R1cmVz 9185
+IFN1cHBvcnQ= 9186
+X25hbWVz 9187
+TGF5ZXI= 9188
+IENsaWNr 9189
+U3Vt 9190
+w6Y= 9191
+IExvb2s= 9192
+dW91cw== 9193
+TGli 9194
+RmxhZ3M= 9195
+dGVhbQ== 9196
+RVA= 9197
+aGF0 9198
+b3ZlcnJpZGU= 9199
+YXBzZWQ= 9200
+IGxhYmVscw== 9201
+cXVpcw== 9202
+IFN0cmVhbQ== 9203
+X2RldmljZQ== 9204
+IENvbW1pdA== 9205
+KHJvb3Q= 9206
+In0= 9207
+LmlzRW1wdHk= 9208
+CU0= 9209
+IGFuZ2xl 9210
+IEJlY2F1c2U= 9211
+JSUlJSUlJSU= 9212
+IGFpbQ== 9213
+IHN0aWNr 9214
+c3RtdA== 9215
+YWdyYXBo 9216
+YW5zd2Vy 9217
+IGNsaW4= 9218
+IElzbA== 9219
+LmV4dA== 9220
+IElOVA== 9221
+IHN0eWxlcw== 9222
+IGJvcm4= 9223
+IHNjcg== 9224
+IGV4cGFuZA== 9225
+IHJhaXNlZA== 9226
+VGV4dEJveA== 9227
+SUxM 9228
+LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t 9229
+SFRUUA== 9230
+Pik= 9231
+X2NoYXI= 9232
+cmVzb3VyY2U= 9233
+IGVwaXNvZGU= 9234
+ICdf 9235
+IEVz 9236
+IEVhcnRo 9237
+wqDCoA== 9238
+VVBEQVRF 9239
+IFNvdQ== 9240
+dWlz 9241
+dHlwZXM= 9242
+IG1hcw== 9243
+IGZhdg== 9244
+IGNvbnN0cnVjdA== 9245
+X3JhdGU= 9246
+ZXJhcw== 9247
+IHwK 9248
+cm9wZXJ0aWVz 9249
+IGV4dGVybmFs 9250
+IGFwcGxpZWQ= 9251
+IHByZWZpeA== 9252
+b3RlZA== 9253
+bGVycw== 9254
+IGNvbGQ= 9255
+IFNQ 9256
+IENodXJjaA== 9257
+IE91dHB1dA== 9258
+bG9zZWQ= 9259
+55o= 9260
+aWZpY2F0ZQ== 9261
+b3BlcmF0aW9u 9262
+aGVyaXQ= 9263
+eEZG 9264
+LmVudg== 9265
+X2Vycg== 9266
+b3No 9267
+RGlyZWN0aW9u 9268
+Q2FuY2Vs 9269
+IEZyYW5r 9270
+IGZpbmRpbmc= 9271
+LikKCg== 9272
+IHJvdXRlcg== 9273
+44O7 9274
+c2Vz 9275
+IGNyb3c= 9276
+PT0n 9277
+IHNhbmQ= 9278
+IHJpZA== 9279
+aXR1cmU= 9280
+IGVudHJl 9281
+IG9ic2Vydg== 9282
+IHZhYw== 9283
+8J8= 9284
+LVQ= 9285
+QXJ0 9286
+bmlnaHQ= 9287
+LnNlYXJjaA== 9288
+IGV4Y2hhbmdl 9289
+IGRpc3RyaWN0 9290
+Lm9z 9291
+IGRlcGFydG1lbnQ= 9292
+IGRvY3VtZW50cw== 9293
+IGNlbnR1cnk= 9294
+IE5leHQ= 9295
+SG9zdA== 9296
+IEtJTkQ= 9297
+IHN1c3A= 9298
+LVA= 9299
+cmVuZA== 9300
+LmVt 9301
+dWl0ZQ== 9302
+aXN0ZXJz 9303
+KGpzb24= 9304
+IEFubg== 9305
+d3Q= 9306
+YXRp 9307
+IEhUTUw= 9308
+d2hlbg== 9309
+RGlyZWN0b3J5 9310
+IHNodXQ= 9311
+PGE= 9312
+ZWR5 9313
+IGhlYWx0aHk= 9314
+IHRlbXBlcmF0dXJl 9315
+IEdlbg== 9316
+IG1ldGFs 9317
+IHN1Ym1pdA== 9318
+IERP 9319
+IGF0dHJhY3Q= 9320
+IHt9Owo= 9321
+IFdvcmQ= 9322
+IGxs 9323
+IHNlZW1lZA== 9324
+a28= 9325
+SUVE 9326
+IGxhYm9y 9327
+LkNvbnRleHQ= 9328
+IGFzc2V0 9329
+eW91 9330
+IGNhcnM= 9331
+IENvbHVtbg== 9332
+IHLDqQ== 9333
+IHNxdWFyZQ== 9334
+IE5TU3RyaW5n 9335
+4oCdLA== 9336
+YXBlcw== 9337
+Li4uCg== 9338
+IHRoYW5rcw== 9339
+KHByb3Bz 9340
+IHRpY2s= 9341
+IGV4cGVyaW1lbnQ= 9342
+IHByaXNvbg== 9343
+dHJlZQ== 9344
+LXRleHQ= 9345
+IElPRXhjZXB0aW9u 9346
+LXdpZHRo 9347
+X1NUQVRVUw== 9348
+ZmFzdA== 9349
+LWJvZHk= 9350
+LWhlYWRlcg== 9351
+IGd1YXI= 9352
+Y3JldGU= 9353
+IFRpbQ== 9354
+IGNsZWFybHk= 9355
+IFJlcHVibGljYW4= 9356
+IGp1c3RpZnk= 9357
+0LjRgg== 9358
+CSAgICA= 9359
+Y2FjaGU= 9360
+Oy8v 9361
+IHByZXNlbmNl 9362
+IGZhY3RvcnM= 9363
+IGVtcGxveWVl 9364
+XSkp 9365
+TWVtYmVy 9366
+IHNlbGVjdG9y 9367
+Ym9y 9368
+IE1leA== 9369
+55qE 9370
+dXRleA== 9371
+X3RhZw== 9372
+YWlsdXJl 9373
+IE5ldA== 9374
+IHJlbGk= 9375
+RUc= 9376
+IGZwcmludGY= 9377
+IHRlZW4= 9378
+bG9zcw== 9379
+IGxlYXZpbmc= 9380
+RGVsZWdhdGU= 9381
+IGJlYXQ= 9382
+IG1pbnV0ZQ== 9383
+c3Vic2NyaWJl 9384
+IHJlZGlzdHJpYnV0ZQ== 9385
+Q29uc3RhbnRz 9386
+IGNhbmNlcg== 9387
+L3s= 9388
+Qkw= 9389
+IHNwYW4= 9390
+IENoaWxk 9391
+Q2VudGVy 9392
+IGVhcnRo 9393
+WVM= 9394
+IExldmVs 9395
+IHNlYQ== 9396
+LnN1cHBvcnQ= 9397
+LmlubmVy 9398
+Lkl0ZW0= 9399
+aWxsaW5n 9400
+ICAgIAogICAgCg== 9401
+IExhYmVs 9402
+IEVzdA== 9403
+KGFyZw== 9404
+Ym9Cb3g= 9405
+CWZvcmVhY2g= 9406
+Y29z 9407
+RmFpbGVk 9408
+c3dlcnM= 9409
+RWRpdG9y 9410
+cm9udA== 9411
+IE1Q 9412
+ZXhwcg== 9413
+IExpZmU= 9414
+ID8/ 9415
+w7Zy 9416
+IGF0dGVuZA== 9417
+IFF1ZQ== 9418
+IHNwZWNpZXM= 9419
+LUQ= 9420
+IGF1cw== 9421
+U3RydWN0 9422
+IGFkdmFudGFnZQ== 9423
+b3N0b24= 9424
+LWJsb2Nr 9425
+aW5pdGlhbA== 9426
+Q1JF 9427
+IHRydWx5 9428
+IGNvbXBhcmU= 9429
+b3JuZXk= 9430
+IHNwZWN0 9431
+RnVsbA== 9432
+YmVz 9433
+IHZpc2libGU= 9434
+IG1lc3M= 9435
+c3RhbmNlcw== 9436
+IGNsb3Vk 9437
+X3ZlcnNpb24= 9438
+IGZ1cm4= 9439
+aWNhZ28= 9440
+TE9X 9441
+IHRyYWZmaWM= 9442
+IGZvbA== 9443
+cnlwdG8= 9444
+IGRlY2xhcg== 9445
+IHNsb3Q= 9446
+IEV4dA== 9447
+IEVuZ2xhbmQ= 9448
+IFVuZGVy 9449
+IHRh 9450
+bGV0dGVy 9451
+IG9mZmljZXI= 9452
+IERvbmFsZA== 9453
+WWVz 9454
+X2pzb24= 9455
+SVRhYmxlVmlldw== 9456
+IFVTRQ== 9457
+bXBsb3llZQ== 9458
+IG9waW5pb24= 9459
+IEF1dA== 9460
+Ym9yZGVy 9461
+IGFkdmljZQ== 9462
+IGF1dG9tYXRpY2FsbHk= 9463
+aXNjbw== 9464
+IG1t 9465
+LnZpcw== 9466
+YW1s 9467
+IGluaXRpYWxpemU= 9468
+ICh7 9469
+IDsKCg== 9470
+IGdlbmVyYXRpb24= 9471
+IGJpdHM= 9472
+Y2xpcHNl 9473
+IHVuZg== 9474
+dXRvcnM= 9475
+cGx0 9476
+IGRlbHRh 9477
+ZXN0cm95 9478
+aXNpcw== 9479
+PGJy 9480
+IGxpbWl0YXRpb25z 9481
+IGVuZGVk 9482
+IE1hZA== 9483
+aWxt 9484
+VGhlc2U= 9485
+IE1pbmlzdGVy 9486
+IGNoYXJ0 9487
+RnJhZ21lbnQ= 9488
+IGluZGVwZW5kZW50 9489
+WWVhcg== 9490
+IGluc3Ry 9491
+IHRhZ3M= 9492
+QVZF 9493
+IEFyY2g= 9494
+c3RvcA== 9495
+UHJvZ3Jlc3M= 9496
+IG1p 9497
+IGxlYXJuZWQ= 9498
+R2U= 9499
+IGhvdGVs 9500
+U00= 9501
+VFlQRQ== 9502
+IGN5 9503
+RVJTSU9O 9504
+dW5hdGVseQ== 9505
+bGltaXQ= 9506
+c2Vs 9507
+IG1vdmllcw== 9508
+IHN0ZWVs 9509
+b3o= 9510
+Z2I= 9511
+IENhbXA= 9512
+c2l0ZQ== 9513
+IExvZ2dlcg== 9514
+UExF 9515
+0L7QtA== 9516
+LnJpZ2h0 9517
+IENvcmU= 9518
+IG1peGVk 9519
+c3RlcA== 9520
+IHB1dHM= 9521
+c3VwZXI= 9522
+Um91dGVy 9523
+Lkh0dHA= 9524
+bHlwaA== 9525
+IENvbG9ycw== 9526
+IGFuZHJvaWR4 9527
+LnN0cg== 9528
+IGlubm92 9529
+IGRlY2s= 9530
+Jz4K 9531
+YXBlcnM= 9532
+XSg= 9533
+Y29udGludWU= 9534
+c3BlYw== 9535
+IFJvYWQ= 9536
+QVNI 9537
+aWxpYXI= 9538
+IGNvbnRpbnVlcw== 9539
+IGFwcG9pbnQ= 9540
+ICMK 9541
+IFZpcg== 9542
+ID8+Ig== 9543
+IGJpbg== 9544
+fSIs 9545
+Z29pbmc= 9546
+ZWFjaA== 9547
+QkQ= 9548
+IEFjY2Vzcw== 9549
+RG9j 9550
+IE1hbmFnZW1lbnQ= 9551
+QkVS 9552
+YXNrZXQ= 9553
+LmdldEluc3RhbmNl 9554
+IGVzdGFibGlzaGVk 9555
+c29ja2V0 9556
+SU5T 9557
+CXZpcnR1YWw= 9558
+CXJlc3VsdA== 9559
+UkVBRA== 9560
+X2hlaWdodA== 9561
+IEZvbnQ= 9562
+ICgpOwo= 9563
+X2h0bWw= 9564
+IG5laWdoYm9y 9565
+bG9y 9566
+IGdhdGhlcg== 9567
+IH0pCgo= 9568
+IGlkZW50aXR5 9569
+IGZhYg== 9570
+cGFkZGluZw== 9571
+IFJvdXRl 9572
+RW51bWVyYWJsZQ== 9573
+w7Q= 9574
+IGZvcmNlZA== 9575
+L2pxdWVyeQ== 9576
+LgoKCgoKCg== 9577
+cmVzZW50cw== 9578
+X2xlZnQ= 9579
+LlBhcmFt 9580
+CXRocm93 9581
+IEhhbQ== 9582
+IGV2ZW50dWFsbHk= 9583
+YWNlcg== 9584
+cHVi 9585
+IHRyYQ== 9586
+dW5pcXVl 9587
+ZGVs 9588
+IEZsb3JpZGE= 9589
+IENsZWFu 9590
+eGE= 9591
+IMK3 9592
+IHZhbGlkYXRl 9593
+VmlzdWFs 9594
+RXhwcmVzc2lvbg== 9595
+X2Z1bmM= 9596
+bWVtYmVy 9597
+CWg= 9598
+dHJs 9599
+CUc= 9600
+bmFwc2hvdA== 9601
+IFByb3BUeXBlcw== 9602
+dmlu 9603
+XSkKCg== 9604
+b3ds 9605
+aWZpZXM= 9606
+ICQoJy4= 9607
+IENvbnRleHQ= 9608
+IFRvYXN0 9609
+LktleQ== 9610
+IG9mZmljZXJz 9611
+L24= 9612
+c24= 9613
+dW5kZWZpbmVk 9614
+Lml0ZW1z 9615
+dXRvdw== 9616
+YW1hZ2U= 9617
+IGFjY291bnRz 9618
+b29raWU= 9619
+U2VjdGlvbg== 9620
+aWNpYW5z 9621
+IGFkdmlz 9622
+KGlz 9623
+Wzos 9624
+IEZyYW5jZQ== 9625
+RnVuYw== 9626
+aWNpb3Vz 9627
+IHRvaw== 9628
+Q2hhbm5lbA== 9629
+IEFE 9630
+X05VTQ== 9631
+IHRpbWVvdXQ= 9632
+bGVtbWE= 9633
+cmVtZQ== 9634
+dWo= 9635
+LkFs 9636
+dWNsZWFy 9637
+KG9z 9638
+KCI8 9639
+Wwo= 9640
+ZmV0Y2g= 9641
+IGJhbA== 9642
+IGd1aWQ= 9643
+LWFsaWdu 9644
+IFdyaXRl 9645
+IE9uY2U= 9646
+dXRvd2lyZWQ= 9647
+T0RVTEU= 9648
+IHBpdGNo 9649
+Q0Y= 9650
+Ynl0ZXM= 9651
+IENvbW1pc3Npb24= 9652
+IGluY3JlZA== 9653
+UEVS 9654
+X3Jlc3BvbnNl 9655
+IExvcw== 9656
+cGFyc2Vy 9657
+IGFzc3VtZQ== 9658
+LlJlcXVlc3Q= 9659
+IFRva2Vu 9660
+X3Bvc2l0aW9u 9661
+IG5vbQ== 9662
+LXRlcm0= 9663
+IHJlbWFpbmluZw== 9664
+aW9zdHJlYW0= 9665
+IHBpZWNlcw== 9666
+YXB5 9667
+IExlc3M= 9668
+cmFuZ2U= 9669
+dW1ibg== 9670
+cHJpc2U= 9671
+X29wdGlvbg== 9672
+SW1wbA== 9673
+a3dhcmdz 9674
+IGJ1c2luZXNzZXM= 9675
+QWxlcnQ= 9676
+IHBhcnRpZXM= 9677
+IENvbnRhaW5lcg== 9678
+IFByaXZhdGU= 9679
+IFBsYW4= 9680
+IHJlZ2lzdGVyZWQ= 9681
+IGpvdXI= 9682
+YWNrZXI= 9683
+0LXQvdC4 9684
+Lz4= 9685
+Y2hhdA== 9686
+c2VjdA== 9687
+IGNyZWF0aW9u 9688
+b2x1dGVseQ== 9689
+IGluc3RhbnQ= 9690
+IGRlbGl2ZXJ5 9691
+aWNrZW4= 9692
+eWVz 9693
+IEZyYW5j 9694
+Ymxpbmc= 9695
+ZW5kYQ== 9696
+Wyg= 9697
+X3Jhbmdl 9698
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 9699
+IHNjaGVkdWxl 9700
+Q29ubg== 9701
+IHRoYW5r 9702
+eGQ= 9703
+IGhvb2s= 9704
+IGRvY3VtZW50YXRpb24= 9705
+UGFyYW1ldGVycw== 9706
+SGVsbG8= 9707
+dnQ= 9708
+IGFydGljbGVz 9709
+IHdlc3Q= 9710
+ZGVmaW5lZA== 9711
+LnNlbGVjdA== 9712
+b2tlbnM= 9713
+IFZBTA== 9714
+LmZpbGU= 9715
+cmVzZXQ= 9716
+IG15cw== 9717
+IE1B 9718
+XSks 9719
+IGNpdGllcw== 9720
+cmVsYXRlZA== 9721
+5Zs= 9722
+IGFwcGVhcmVk 9723
+IHdpZA== 9724
+LnBhbmVs 9725
+IElucw== 9726
+LmVudGl0eQ== 9727
+IGRlY3Jl 9728
+IExvdQ== 9729
+KHRpbWU= 9730
+IFRoYW5r 9731
+LmNyZWF0ZUVsZW1lbnQ= 9732
+IG1lbnRpb25lZA== 9733
+b3VuY2U= 9734
+IFRyeQ== 9735
+IFdhbGw= 9736
+L2ltYWdlcw== 9737
+IE1lbnU= 9738
+Jw0K 9739
+IEVy 9740
+IGNyaXRpYw== 9741
+IFllYXI= 9742
+KHBhcmFt 9743
+IGZsbw== 9744
+Tk4= 9745
+b290ZXI= 9746
+IF07Cg== 9747
+IEFmZg== 9748
+ImdpdGh1Yg== 9749
+cm9vbXM= 9750
+IGh5cA== 9751
+Z2xvYmFs 9752
+IGF2ZWM= 9753
+5pyI 9754
+IGNvbXBsZXRpb24= 9755
+IGNvbmQ= 9756
+b255bW91cw== 9757
+KHRlbXA= 9758
+IHN0YXJz 9759
+IHJlbGV2YW50 9760
+IGNvdmVyZWQ= 9761
+IGVsaW0= 9762
+X3R5cGVz 9763
+KGJvb2w= 9764
+IHR1 9765
+X2V4aXN0cw== 9766
+IHNlY3VyZQ== 9767
+IHN0b3JlZA== 9768
+XS8= 9769
+eEY= 9770
+IENvbnRyb2xsZXI= 9771
+IG1pZ3I= 9772
+TUk= 9773
+IERlbg== 9774
+IGFubnVhbA== 9775
+VUlM 9776
+LWFuZA== 9777
+IGNyaW1l 9778
+YmVs 9779
+IGtpdGNoZW4= 9780
+QGc= 9781
+X3Bo 9782
+b3VybmFtZW50 9783
+IFNvY2lhbA== 9784
+IFNwZWNpYWw= 9785
+bG9nZ2Vy 9786
+IHRhaWw= 9787
+IHVua25vd24= 9788
+ZGVk 9789
+IGFwcHJlYw== 9790
+KGRi 9791
+Y2Y= 9792
+IGFzc2lnbg== 9793
+LW91dA== 9794
+IE1vbnQ= 9795
+ZHA= 9796
+d2lkZ2V0 9797
+IHN0b25l 9798
+LXByaW1hcnk= 9799
+LmdyaWQ= 9800
+UmVzdWx0cw== 9801
+YXp6 9802
+IGRhdWdodGVy 9803
+IGN1cnI= 9804
+IGxpbg== 9805
+IHNvdXRo 9806
+Zm9ybXM= 9807
+IE9VVA== 9808
+bGV0dGU= 9809
+YWtz 9810
+aWd1cmU= 9811
+IEVV 9812
+dmFyaWFibGU= 9813
+IGJyaWVm 9814
+IFNjb3R0 9815
+IGNvbmZlcmVuY2U= 9816
+YW5kYQ== 9817
+X2xvY2s= 9818
+b3JhbA== 9819
+IGVpbmU= 9820
+T1JT 9821
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLw== 9822
+ZXNzbw== 9823
+IHJpcw== 9824
+IGdlbmRlcg== 9825
+ZXN0aWM= 9826
+TGljZW5zZQ== 9827
+KG91dA== 9828
+IG1z 9829
+U2Vl 9830
+IHdpbGxpbmc= 9831
+YXpl 9832
+IHNwb3J0cw== 9833
+IHllcw== 9834
+bHU= 9835
+IHB1cnM= 9836
+L2phdmFzY3JpcHQ= 9837
+LXBybw== 9838
+bmF2YmFy 9839
+X3Byb2R1Y3Q= 9840
+L2Jvb3RzdHJhcA== 9841
+IGRyaXZpbmc= 9842
+IMQ= 9843
+IHByb3Bvcw== 9844
+dWx0aXA= 9845
+dXBsaWM= 9846
+LmVtYWls 9847
+IGFwcHJveA== 9848
+KGNs 9849
+IHdlYXI= 9850
+IHJlcGx5 9851
+YXNzZXQ= 9852
+IGljZQ== 9853
+IHR4 9854
+a3I= 9855
+IEdlcm1hbnk= 9856
+IEdlb3JnZQ== 9857
+IGNi 9858
+CWVycg== 9859
+TW92ZQ== 9860
+IHBvbHk= 9861
+dm9pY2U= 9862
+fSI= 9863
+IGFuaW1hbA== 9864
+QXY= 9865
+IExvY2F0aW9u 9866
+IG5hdGl2ZQ== 9867
+XVsi 9868
+PGRvdWJsZQ== 9869
+IG1haXM= 9870
+LGludA== 9871
+IHByZXBhcg== 9872
+IGludGVydmFs 9873
+cGxlbWVudGF0aW9u 9874
+X0VSUg== 9875
+IGJ1Zw== 9876
+PiI= 9877
+c3RhdA== 9878
+IH0sDQo= 9879
+PHNwYW4= 9880
+IGZhaXRo 9881
+IHJvbQ== 9882
+cHJldg== 9883
+IEVsZWN0 9884
+RmluZA== 9885
+IGdvZA== 9886
+b3Rvcg== 9887
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t 9888
+b3JpZ2luYWw= 9889
+Q3Bw 9890
+IFNlbmF0ZQ== 9891
+IHBvc2l0aW9ucw== 9892
+IHdlYXBvbnM= 9893
+IGNvZmY= 9894
+IHB1cnBvc2Vz 9895
+cG9s 9896
+IGltcHJlc3M= 9897
+IGFuaW1hbHM= 9898
+LkVudGl0eQ== 9899
+KG5w 9900
+IG11cmRlcg== 9901
+IGBg 9902
+ZmxhZw== 9903
+IHNvbHV0aW9ucw== 9904
+IEFjdGl2ZQ== 9905
+IGJyaWdodA== 9906
+LmRhdGU= 9907
+IHNpdHU= 9908
+77yI 9909
+LklE 9910
+IHNpZQ== 9911
+KSwNCg== 9912
+YWt0 9913
+U3BhY2U= 9914
+LmRhdA== 9915
+LmluZGV4T2Y= 9916
+aGFu 9917
+YXppbmU= 9918
+IFpl 9919
+IGNyYXNo 9920
+KC8= 9921
+Pj0= 9922
+0LE= 9923
+aXZh 9924
+LkF1dG9TaXpl 9925
+IExhdA== 9926
+X2V4dA== 9927
+SW5pdGlhbGl6ZQ== 9928
+LnJlZ2lzdGVy 9929
+T1BZ 9930
+IHJldmVyc2U= 9931
+X2Rpcw== 9932
+J11b 9933
+IHByb21wdA== 9934
+b250bw== 9935
+IEpvdXJuYWw= 9936
+cm91dGVy 9937
+IG15c3FsaQ== 9938
+I2Vsc2U= 9939
+KSI= 9940
+LXhz 9941
+bGV0cw== 9942
+cGhhbg== 9943
+LkxF 9944
+V2lsbA== 9945
+IGFmZm9yZA== 9946
+IHNraWxs 9947
+LXRvZ2dsZQ== 9948
+TkM= 9949
+QmluZA== 9950
+VFM= 9951
+SnVzdA== 9952
+aXRlcmFs 9953
+WVA= 9954
+CXVuc2lnbmVk 9955
+IHdpbmQ= 9956
+KSk6Cg== 9957
+IHdhcm5pbmc= 9958
+IFdhdGVy 9959
+IGRyYWZ0 9960
+IGNt 9961
+IHNhbQ== 9962
+IGhvbGRpbmc= 9963
+emlw 9964
+IFNjaWVuY2U= 9965
+IHN1cHBvc2Vk 9966
+R2Vu 9967
+IGRpZXQ= 9968
+PGg= 9969
+IFBhc3M= 9970
+dmk= 9971
+IGh1c2JhbmQ= 9972
+77+977+9 9973
+bm90ZQ== 9974
+IEFib3V0 9975
+IEluc3RpdHV0ZQ== 9976
+IGNsaW1hdGU= 9977
+LkZvcm1hdA== 9978
+IG51dA== 9979
+ZXN0ZWQ= 9980
+IGFwcGFyZW50 9981
+IGhvbGRz 9982
+Zmk= 9983
+bmV3cw== 9984
+Q00= 9985
+dmlkZW8= 9986
+Jzon 9987
+RElUSU9O 9988
+cGluZw== 9989
+IHNlbmlvcg== 9990
+d2E= 9991
+LS0+Cg== 9992
+X2RlZmF1bHQ= 9993
+IERhdGFiYXNl 9994
+cmVw 9995
+RVNT 9996
+bmVyZ3k= 9997
+LkZpbmQ= 9998
+X21hc2s= 9999
+IHJpc2U= 10000
+IGtlcm5lbA== 10001
+Ojok 10002
+LlE= 10003
+IG9mZmVyaW5n 10004
+ZGVjbA== 10005
+IENT 10006
+IGxpc3RlZA== 10007
+IG1vc3RseQ== 10008
+ZW5nZXI= 10009
+IGJsb2Nrcw== 10010
+b2xv 10011
+IGdvdmVybmluZw== 10012
+XEY= 10013
+IGNvbmNlbnQ= 10014
+LmdldFRleHQ= 10015
+IG1i 10016
+IG9jY3VycmVk 10017
+IGNoYW5naW5n 10018
+U2NlbmU= 10019
+X0NPREU= 10020
+QmVo 10021
+IlRoZQ== 10022
+IHRpbGU= 10023
+IEFzc29jaWF0aW9u 10024
+CVA= 10025
+YWx0eQ== 10026
+X2Fk 10027
+b2RpZXM= 10028
+aWF0ZWQ= 10029
+IHByZXBhcmVk 10030
+cG9zc2libGU= 10031
+IG1vcnQ= 10032
+VEVTVA== 10033
+IGlnbm9yZQ== 10034
+IGNhbGM= 10035
+IHJz 10036
+IGFzc2VydEVxdWFscw== 10037
+IHN6 10038
+IFRISVM= 10039
+LiIK 10040
+IGNhbnZhcw== 10041
+amF2YQ== 10042
+IGR1dA== 10043
+VkFMSUQ= 10044
+LnNxbA== 10045
+LmlucHV0 10046
+IGF1eA== 10047
+U3Vw 10048
+IGFydGlzdA== 10049
+VmVj 10050
+X1RJTUU= 10051
+LnN0cmluZ2lmeQ== 10052
+ZXR3ZWVu 10053
+IENhdGVnb3J5 10054
+IFst 10055
+IERldkV4cHJlc3M= 10056
+IEp1bA== 10057
+IHJpbmc= 10058
+LmVk 10059
+WVk= 10060
+TGV0 10061
+VGV4dEZpZWxk 10062
+IGZsYXQ= 10063
+X3ByaW50 10064
+IE9USEVS 10065
+YWRpYW4= 10066
+IGNoZWNrZWQ= 10067
+ZWxl 10068
+QWxpZ24= 10069
+c3RhbmRpbmc= 10070
+IFtdLA== 10071
+IGxhYg== 10072
+dWNreQ== 10073
+IENocmlzdG1hcw== 10074
+KGltYWdl 10075
+Lm1vZHVsZQ== 10076
+IGxvdHM= 10077
+IHNsaWdodGx5 10078
+KGZpbmFs 10079
+ZXJnZQ== 10080
+6L8= 10081
+IFBvbGljZQ== 10082
+IFJpZ2h0 10083
+IGF3YXJk 10084
+IE9T 10085
+IHt9Cgo= 10086
+IHB0cg== 10087
+b3Zlcw== 10088
+aWNhdGVk 10089
+0LXQvA== 10090
+IG1hbmFnZQ== 10091
+b2xpZGF5 10092
+QW1vdW50 10093
+b29sU3RyaXA= 10094
+dGJvZHk= 10095
+TmF2 10096
+d3JhcA== 10097
+QkI= 10098
+IHdhdGNoaW5n 10099
+YXJpb3M= 10100
+IG9wdGlvbmFs 10101
+X0s= 10102
+IExpY2Vuc2Vk 10103
+Lk1hcA== 10104
+VGltZXI= 10105
+IEFQ 10106
+IFJldg== 10107
+KG8= 10108
+LGM= 10109
+dW1pbg== 10110
+ZXRhaWxlZA== 10111
+IEh5 10112
+IGJsYW5r 10113
+YWdnZXI= 10114
+IFNlbGY= 10115
+KClb 10116
+Lm1ha2U= 10117
+ZWFybg== 10118
+Y2hhbm5lbA== 10119
+PHByZQ== 10120
+YmxlbQ== 10121
+X3Bhc3N3b3Jk 10122
+X3Nw 10123
+aWNpbmc= 10124
+ZXo= 10125
+IHRoZW9yeQ== 10126
+IFRlcg== 10127
+LG4= 10128
+bG9nbw== 10129
+IEhUVFA= 10130
+KCkpKQ== 10131
+LmhhbmRsZQ== 10132
+PjsK 10133
+V29ybGQ= 10134
+IHB5dGhvbg== 10135
+IGxpZg== 10136
+IHRyYXY= 10137
+IGNvbnZlbg== 10138
+Y29tcGFueQ== 10139
+IENsdWI= 10140
+VmVy 10141
+QnRu 10142
+IHpvbmU= 10143
+cHJvZHVjdHM= 10144
+IEVkdWM= 10145
+IHZlcmlmeQ== 10146
+IE1pbA== 10147
+b25v 10148
+XSk7Cgo= 10149
+RU5DRQ== 10150
+IHBhY2tldA== 10151
+IGNlcg== 10152
+IGVudW1lcg== 10153
+IHBhcnM= 10154
+Zm9ybWVk 10155
+IG9jY3Vw 10156
+dHJl 10157
+IGV4ZXJjaXNl 10158
+RGF5 10159
+X3N1bQ== 10160
+IGFza2luZw== 10161
+YXB0aW9u 10162
+IG9yZGVycw== 10163
+IHNwZW5kaW5n 10164
+IEVSUg== 10165
+LkRpcw== 10166
+IFV0aWw= 10167
+4oCcSQ== 10168
+XCc= 10169
+Pyk= 10170
+Lz4K 10171
+IGVtb3Q= 10172
+IGluZmx1ZW5jZQ== 10173
+IEFmcmljYQ== 10174
+YXR0ZXJz 10175
+2YU= 10176
+LnNlc3Npb24= 10177
+IGNoaWVm 10178
+CQkJCQkJCQkJCQk= 10179
+IHRvbQ== 10180
+Y2x1ZGVk 10181
+c2VyaWFs 10182
+X2hhbmRsZXI= 10183
+LlR5cGU= 10184
+YXBlZA== 10185
+IHBvbGljaWVz 10186
+LWV4 10187
+LXRy 10188
+Ymxhbms= 10189
+bWVyY2U= 10190
+IGNvdmVyYWdl 10191
+IHJj 10192
+X21hdHJpeA== 10193
+X2JveA== 10194
+IGNoYXJnZXM= 10195
+IEJvc3Rvbg== 10196
+UGU= 10197
+IGNpcmN1bQ== 10198
+IGZpbGxlZA== 10199
+IG5vcnRo 10200
+aWN0dXJlQm94 10201
+CXJlcw== 10202
+6K4= 10203
+IHRlcm1pbg== 10204
+IFvigKY= 10205
+SVJFQ1Q= 10206
+IGJlcg== 10207
+ICIuLi8uLi8= 10208
+cmV0Y2g= 10209
+LmNvZGU= 10210
+X2NvbA== 10211
+IEdvdmVybm1lbnQ= 10212
+IGFyZ3Y= 10213
+IExvcmQ= 10214
+YXNp 10215
+RXhlYw== 10216
+CWxldA== 10217
+dmVydGlz 10218
+IGRpc2N1c3Npb24= 10219
+ZW5hbmNl 10220
+b3V0dWJl 10221
+dHlwZW9m 10222
+IHNlcnZlZA== 10223
+IFB1dA== 10224
+CXg= 10225
+IHN3ZWV0 10226
+QmVmb3Jl 10227
+YXRlZ3k= 10228
+Lm9m 10229
+IE1hdGVyaWFs 10230
+U29ydA== 10231
+T05U 10232
+aWdpdGFs 10233
+V2h5 10234
+IHN1c3Q= 10235
+IOc= 10236
+YWJldA== 10237
+IHNlZ21lbnQ= 10238
+IFtdLAo= 10239
+IE11c2xpbQ== 10240
+IGZpbmRWaWV3QnlJZA== 10241
+Y3V0 10242
+X1RFWFQ= 10243
+IE1hcnk= 10244
+IGxvdmVk 10245
+IGxpZQ== 10246
+IEpP 10247
+IGlzc2V0 10248
+bW9udGg= 10249
+IHByaW1l 10250
+dGk= 10251
+IENhcm9s 10252
+VXNl 10253
+IFBvcA== 10254
+IFNhdmU= 10255
+SW50ZXJ2YWw= 10256
+ZXhlY3V0ZQ== 10257
+ZHk= 10258
+IElyYW4= 10259
+X2NvbnQ= 10260
+CVQ= 10261
+IHBoYXNl 10262
+Y2hlY2tib3g= 10263
+d2Vlaw== 10264
+IGhpZGU= 10265
+IHRpbA== 10266
+IGp1 10267
+Q3VzdG9t 10268
+YnVyZw== 10269
+L00= 10270
+VE9O 10271
+IHF1YW50 10272
+IHJ1Yg== 10273
+aXhlbHM= 10274
+IGluc3RhbGxlZA== 10275
+IGR1bXA= 10276
+IHByb3Blcmx5 10277
+KExpc3Q= 10278
+IGRlY2lkZQ== 10279
+YXBwbHk= 10280
+SGFz 10281
+IGtlZXBpbmc= 10282
+IGNpdGl6ZW5z 10283
+IGpvaW50 10284
+cG9vbA== 10285
+U29ja2V0 10286
+X29w 10287
+IHdlYXBvbg== 10288
+Z25vcmU= 10289
+IEV4ZWM= 10290
+b3R0ZW4= 10291
+IE1T 10292
+ICgt 10293
+IFJldmlldw== 10294
+IGV4YW1wbGVz 10295
+IHRpZ2h0 10296
+ISg= 10297
+RFA= 10298
+IE1lc3NhZ2VCb3g= 10299
+IHBob3RvZ3JhcGg= 10300
+VVJJ 10301
+w6l0 10302
+bG93 10303
+IEdyYW5k 10304
+LnBlcnNpc3RlbmNl 10305
+IG1haW50YWlu 10306
+IG51bXM= 10307
+IHppcA== 10308
+aWFscw== 10309
+IEdldHM= 10310
+cGVn 10311
+IEJ1ZmZlcg== 10312
+fn5+fg== 10313
+cmFzdHJ1Y3R1cmU= 10314
+IFBM 10315
+dWVu 10316
+b2JieQ== 10317
+c2l6ZW9m 10318
+IHBpYw== 10319
+IHNlZWQ= 10320
+IGV4cGVyaWVuY2Vk 10321
+IG9kZA== 10322
+IGtpY2s= 10323
+IHByb2NlZHVyZQ== 10324
+YXZpZ2F0b3I= 10325
+LW9u 10326
+LGo= 10327
+IEFsdGhvdWdo 10328
+IHVzZXJJZA== 10329
+YWNjZXB0 10330
+Qmx1ZQ== 10331
+SUNvbG9y 10332
+bGF5ZXI= 10333
+YXZhaWxhYmxl 10334
+IGVuZHM= 10335
+LnRhYmxl 10336
+IGRhdGFzZXQ= 10337
+YnVz 10338
+IGV4cGxhaW4= 10339
+KHBybw== 10340
+IENvbW1pdHRlZQ== 10341
+IG5vdGVk 10342
+XToK 10343
+RGlt 10344
+c3RkaW8= 10345
+LiIsCg== 10346
+X3NvdXJjZQ== 10347
+IFdlZWs= 10348
+IEVkZ2U= 10349
+IG9wZXJhdGluZw== 10350
+IGVzdGU= 10351
+aXBs 10352
+YWdpbmF0aW9u 10353
+IHByb2NlZWQ= 10354
+IGFuaW1hdGlvbg== 10355
+Lk1vZGVscw== 10356
+IFdhdGNo 10357
+aWF0 10358
+IG9wcG9u 10359
+L0E= 10360
+UmVwb3J0 10361
+IHNvdW5kcw== 10362
+X2J1Zg== 10363
+SUVMRA== 10364
+IGJ1bmQ= 10365
+CWdldA== 10366
+LnBy 10367
+KHRtcA== 10368
+IGtpZA== 10369
+PgoKCg== 10370
+IHlhbmc= 10371
+Tm90Rm91bmQ= 10372
+0YY= 10373
+bWF0aA== 10374
+QGdtYWls 10375
+IExJTUlU 10376
+cmVkaWVudHM= 10377
+IHZlbnQ= 10378
+YXZpZ2F0ZQ== 10379
+TG9vaw== 10380
+IHJlbGlnaW91cw== 10381
+IHJhbmQ= 10382
+cmlv 10383
+KEdM 10384
+X2lw 10385
+dWFu 10386
+aWNpZW5jeQ== 10387
+IENoYW5nZQ== 10388
+Pg0KDQo= 10389
+IEVudGl0eQ== 10390
+IHJlbmNvbnRyZQ== 10391
+IFJldA== 10392
+cGxhbg== 10393
+w6lu 10394
+Qk9PTA== 10395
+dXJpZXM= 10396
+dHJhaW4= 10397
+RGVmaW5pdGlvbg== 10398
+PT09PT09PT09PT09 10399
+eno= 10400
+QW5pbWF0aW9u 10401
+IE9L 10402
+X21lbnU= 10403
+LmJs 10404
+X3Njb3Jl 10405
+IGFjYWQ= 10406
+KFN5c3RlbQ== 10407
+IHJlZnJlc2g= 10408
+Jz0+JA== 10409
+LkdyYXBoaWNz 10410
+YW1lbnRv 10411
+cGlk 10412
+dGM= 10413
+IHRpcHM= 10414
+IGhvbWVz 10415
+IGZ1ZWw= 10416
+4pY= 10417
+X2hlbHBlcg== 10418
+ICANCg== 10419
+IFJvb20= 10420
+LkNsb3Nl 10421
+X2F0dHI= 10422
+IE1vdW50 10423
+IEV2 10424
+YXJzZXI= 10425
+X3RvcA== 10426
+ZWFo 10427
+IERlbGV0ZQ== 10428
+44CN 10429
+dWtl 10430
+IHVzYWdl 10431
+YXJpYQ== 10432
+X2Rldg== 10433
+IHRleHR1cmU= 10434
+IGNvbnZlcnNhdGlvbg== 10435
+ZXBlcg== 10436
+QmVhbg== 10437
+ZG9uZQ== 10438
+bm9uYXRvbWlj 10439
+IFNlY29uZA== 10440
+IHNob290aW5n 10441
+X3ByZQ== 10442
+Q29tcG9uZW50cw== 10443
+IF0KCg== 10444
+X18s 10445
+c3RpdHV0aW9u 10446
+LkNoYXI= 10447
+PigpOwoK 10448
+IHByZXNlbnRlZA== 10449
+IHdh 10450
+b2tlcg== 10451
+LQoK 10452
+aW5lcg== 10453
+IGJlY29taW5n 10454
+IGluY2lkZW50 10455
+QXR0 10456
+IHJldmVhbGVk 10457
+Zm9yYw== 10458
+IGJvb3Q= 10459
+LnBhZ2U= 10460
+RW51bWVyYXRvcg== 10461
+Xy0+ 10462
+UGhvdG8= 10463
+IHNwcmluZw== 10464
+LiIs 10465
+IERpY3Rpb25hcnk= 10466
+QkpFQ1Q= 10467
+IGxvY2F0aW9ucw== 10468
+IHNhbXBsZXM= 10469
+SW5wdXRTdHJlYW0= 10470
+IEJyb3du 10471
+IHN0YXRz 10472
+cXVhbGl0eQ== 10473
+0YU= 10474
+LWRpcw== 10475
+IGhlbHBpbmc= 10476
+IHBlZA== 10477
+KHNl 10478
+IFdobw== 10479
+YWxpYW4= 10480
+aW50ZXJuYWw= 10481
+IGZ0 10482
+PigpLg== 10483
+LT57 10484
+IG1pbmU= 10485
+IHNlY3Rvcg== 10486
+IGdybw== 10487
+IG9wcG9ydHVuaXRpZXM= 10488
+IMO8 10489
+IG1w 10490
+IGFsbGVnZWQ= 10491
+IGRvdWJ0 10492
+TW91c2U= 10493
+QWJvdXQ= 10494
+X3BhcnQ= 10495
+IGNoYWly 10496
+IHN0b3BwZWQ= 10497
+bG9vcA== 10498
+ZW50aXRpZXM= 10499
+IGFwcHM= 10500
+YW5zaW9u 10501
+IG1lbnRhbA== 10502
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 10503
+RlI= 10504
+IGRlZmVuZA== 10505
+Y2FyZQ== 10506
+IGlkZWFs 10507
+L2FwaQ== 10508
+dXJmYWNl 10509
+IGVsZQ== 10510
+dWxhdG9y 10511
+IFJpZ2h0cw== 10512
+YW5ndWFnZXM= 10513
+IGZ1bmRz 10514
+IGFkYXB0 10515
+QXR0cmlidXRlcw== 10516
+IGRlcGxveQ== 10517
+b3B0cw== 10518
+IHZhbGlkYXRpb24= 10519
+IGNvbmNlcm5z 10520
+dWNl 10521
+Lm51bQ== 10522
+dWx0dXJl 10523
+aWxh 10524
+IGN1cA== 10525
+IHB1cmU= 10526
+LkZvcmU= 10527
+IEhhc2hNYXA= 10528
+LnZhbHVlT2Y= 10529
+YXNt 10530
+TU8= 10531
+IGNz 10532
+IHN0b3Jlcw== 10533
+ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg== 10534
+IGNvbW11bmljYXRpb24= 10535
+bWVt 10536
+LkV2ZW50SGFuZGxlcg== 10537
+LlN0YXR1cw== 10538
+X3JpZ2h0 10539
+LnNldE9u 10540
+U2hlZXQ= 10541
+IGlkZW50aWZ5 10542
+ZW5lcmF0ZWQ= 10543
+b3JkZXJlZA== 10544
+ICJb 10545
+IHN3ZQ== 10546
+Q29uZGl0aW9u 10547
+IEFjY29yZGluZw== 10548
+IHByZXBhcmU= 10549
+IHJvYg== 10550
+UG9vbA== 10551
+IHNwb3J0 10552
+cnY= 10553
+IFJvdXRlcg== 10554
+IGFsdGVybmF0aXZl 10555
+KFtd 10556
+IENoaWNhZ28= 10557
+aXBoZXI= 10558
+aXNjaGU= 10559
+IERpcmVjdG9y 10560
+a2w= 10561
+IFdpbA== 10562
+a2V5cw== 10563
+IG15c3Fs 10564
+IHdlbGNvbWU= 10565
+a2luZw== 10566
+IE1hbmFnZXI= 10567
+IGNhdWdodA== 10568
+KX0K 10569
+U2NvcmU= 10570
+X1BS 10571
+IHN1cnZleQ== 10572
+aGFi 10573
+SGVhZGVycw== 10574
+QURFUg== 10575
+IGRlY29y 10576
+IHR1cm5z 10577
+IHJhZGl1cw== 10578
+ZXJydXB0 10579
+Q29y 10580
+IG1lbA== 10581
+IGludHI= 10582
+KHE= 10583
+IEFD 10584
+YW1vcw== 10585
+TUFY 10586
+IEdyaWQ= 10587
+IEplc3Vz 10588
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 10589
+LkRF 10590
+IHRz 10591
+IGxpbmtlZA== 10592
+ZnJlZQ== 10593
+IFF0 10594
+IC8qKg0K 10595
+IGZhc3Rlcg== 10596
+Y3Ry 10597
+X0o= 10598
+RFQ= 10599
+LkNoZWNr 10600
+IGNvbWJpbmF0aW9u 10601
+IGludGVuZGVk 10602
+LXRoZQ== 10603
+LXR5cGU= 10604
+ZWN0b3Jz 10605
+YW1p 10606
+dXRpbmc= 10607
+IHVtYQ== 10608
+WE1M 10609
+VUNU 10610
+QXA= 10611
+IFJhbmRvbQ== 10612
+IHJhbg== 10613
+LnNvcnQ= 10614
+IHNvcnRlZA== 10615
+LlVu 10616
+X1BFUg== 10617
+aXRvcnk= 10618
+IHByaW9yaXR5 10619
+IEdhbA== 10620
+IE9sZA== 10621
+aG90 10622
+IERpc3BsYXk= 10623
+KHN1Yg== 10624
+X1RI 10625
+X1k= 10626
+IENhcmU= 10627
+bG9hZGluZw== 10628
+S2luZA== 10629
+X2hhbmRsZQ== 10630
+LCw= 10631
+cmFzZQ== 10632
+X3JlcGxhY2U= 10633
+LmFkZEV2ZW50TGlzdGVuZXI= 10634
+IFJU 10635
+IGVudGVyZWQ= 10636
+Z2Vycw== 10637
+IGljaA== 10638
+KHN0YXJ0 10639
+L2FwcA== 10640
+IGJyb3RoZXI= 10641
+TWVtb3J5 10642
+T3V0bGV0 10643
+IHV0Zg== 10644
+cHJlYw== 10645
+IG5hdmlnYXRpb24= 10646
+T1JL 10647
+IGRzdA== 10648
+RGV0YWls 10649
+IGF1ZGllbmNl 10650
+IGR1cg== 10651
+IGNsdXN0ZXI= 10652
+dW5jaGVk 10653
+IF0s 10654
+IGNvbWZvcnRhYmxl 10655
+LnZhbHVlcw== 10656
+IFRvdGFs 10657
+IHNuYXA= 10658
+IHN0YW5kYXJkcw== 10659
+IHBlcmZvcm1lZA== 10660
+aGFuZA== 10661
+KCJA 10662
+5a0= 10663
+IHBoaWw= 10664
+aWJy 10665
+dHJpbQ== 10666
+IGZvcmdldA== 10667
+IGRvY3Rvcg== 10668
+LlRleHRCb3g= 10669
+aWNvbnM= 10670
+LHM= 10671
+IE9w 10672
+U20= 10673
+U3RvcA== 10674
+CUxpc3Q= 10675
+CXU= 10676
+Q29tbWVudA== 10677
+X1ZFUlNJT04= 10678
+Llh0cmE= 10679
+UGVyc29u 10680
+cmI= 10681
+TE9C 10682
+ICAgICAgICAgICAgICAgICAgICAK 10683
+IENlbnRyYWw= 10684
+SUNL 10685
+cmFx 10686
+IHB1dHRpbmc= 10687
+IG1k 10688
+IExvdmU= 10689
+UHJvZ3JhbQ== 10690
+Qm9yZGVy 10691
+b29y 10692
+IGFsbG93aW5n 10693
+YWZ0ZXI= 10694
+IGVudHJpZXM= 10695
+IE1heWJl 10696
+XSku 10697
+IFNob3J0 10698
+KVw= 10699
+Lm5vdw== 10700
+ZnJpZW5k 10701
+IHByZWZlcg== 10702
+IEdQSU8= 10703
+b3Npcw== 10704
+IEdhbWVPYmplY3Q= 10705
+IHNraXA= 10706
+IGNvbXBldGl0aW9u 10707
+X21hdGNo 10708
+bGljYXRpb25z 10709
+X0NPTlQ= 10710
+Lmdyb3VwQm94 10711
+IGFscw== 10712
+Ildl 10713
+X2Vx 10714
+bGFu 10715
+X3NlYXJjaA== 10716
+IE11c2lj 10717
+YXNpcw== 10718
+IGJpbmQ= 10719
+IElzbGFuZA== 10720
+cnVt 10721
+KEU= 10722
+IHNlYXQ= 10723
+VmlkZW8= 10724
+IGFjaw== 10725
+cmVlaw== 10726
+PXsoKQ== 10727
+IHJhdGluZw== 10728
+IHJlc3RhdXJhbnQ= 10729
+REVY 10730
+KGJ1Zg== 10731
+cHBpbmc= 10732
+dWFsaXR5 10733
+IGxlYWd1ZQ== 10734
+IGZvY3VzZWQ= 10735
+YXBvbg== 10736
+JGRhdGE= 10737
+Q0xVRA== 10738
+Q0xVRElORw== 10739
+IGFic29sdXRl 10740
+KHF1ZXJ5 10741
+IHRlbGxz 10742
+QW5n 10743
+IGNvbW11bml0aWVz 10744
+IGhvbmVzdA== 10745
+b2tpbmc= 10746
+IGFwYXJ0 10747
+YXJpdHk= 10748
+LyQ= 10749
+X21vZHVsZQ== 10750
+IEVuYw== 10751
+LmFu 10752
+LkNvbmZpZw== 10753
+Q3Jl 10754
+IHNob2Nr 10755
+IEFyYWI= 10756
+SUVOVA== 10757
+L3Jl 10758
+IHJldHJpZQ== 10759
+eWNsZXI= 10760
+aXNh 10761
+IE9yZ2Fu 10762
+LmdyYXBo 10763
+IO0= 10764
+IEJBUw== 10765
+RW51bQ== 10766
+IHBvc3NpYmx5 10767
+0YDQsNA= 10768
+IEphcGFuZXNl 10769
+IGNyYWZ0 10770
+IFBsYWNl 10771
+IHRhbGVudA== 10772
+IGZ1bmRpbmc= 10773
+IGNvbmZpcm1lZA== 10774
+IGN5Y2xl 10775
+L3g= 10776
+R0U= 10777
+IGhlYXJpbmc= 10778
+IHBsYW50cw== 10779
+IG1vdXRo 10780
+cGFnZXM= 10781
+b3JpYQ== 10782
+IFJlbW92ZQ== 10783
+X3RvdGFs 10784
+IG9k 10785
+b2xsYXBzZQ== 10786
+ZG9vcg== 10787
+IGJvdWdodA== 10788
+IGFkZHI= 10789
+QVJDSA== 10790
+X2RpbQ== 10791
+ZGRlbg== 10792
+IGRlY2FkZXM= 10793
+UkVRVUVTVA== 10794
+IHZlcnNpb25z 10795
+ZmlyZQ== 10796
+IG1vdmVz 10797
+ZmI= 10798
+IGNvZmZlZQ== 10799
+LmNvbm5lY3Q= 10800
+IFJvdw== 10801
+IHNjaGVtYQ== 10802
+U2NvcGU= 10803
+LVR5cGU= 10804
+IGZpZ2h0aW5n 10805
+IHJldGFpbA== 10806
+IG1vZGlmaWVk 10807
+VEY= 10808
+RmlsZXM= 10809
+bmll 10810
+X2NvbW1hbmQ= 10811
+c3RvbmU= 10812
+INGC 10813
+X3RocmVhZA== 10814
+IGJvbmQ= 10815
+IERldmVsb3BtZW50 10816
+IHB0 10817
+Rk9STQ== 10818
+cGxldA== 10819
+IGlkZW50aWZpZWQ= 10820
+Y3Bw 10821
+IGNvZGluZw== 10822
+b2tlZA== 10823
+IE1hc3Rlcg== 10824
+SURUSA== 10825
+IHJlc2lkZW50cw== 10826
+cmVkaXQ= 10827
+IFBob3Rv 10828
+PS0= 10829
+dW50ZQ== 10830
+YXRldXI= 10831
+X1NUQVRF 10832
+IFNpbmc= 10833
+IHNoZWV0 10834
+LnZhbA== 10835
+b3JzZQ== 10836
+IGhlcnM= 10837
+IGRldGVybWluZWQ= 10838
+Q29tbW9u 10839
+IHdlZA== 10840
+X3F1ZXVl 10841
+UEg= 10842
+IEF0bA== 10843
+Y3JlZA== 10844
+L0xJQ0VOU0U= 10845
+IG1lcw== 10846
+IGFkdmFuY2Vk 10847
+LmphdmE= 10848
+LlNo 10849
+R28= 10850
+a2lsbA== 10851
+ZnA= 10852
+X3NldHRpbmdz 10853
+IHBhbA== 10854
+IHRydWNr 10855
+IGNvbWJpbmVk 10856
+ICIkew== 10857
+IENvcnBvcg== 10858
+IGpvaW5lZA== 10859
+IEpvc2U= 10860
+IEN1cA== 10861
+dW5z 10862
+ZXN0aXZhbA== 10863
+bGV2aXNpb24= 10864
+IGJyb2tlbg== 10865
+IG1hcnJpYWdl 10866
+IFdlc3Rlcm4= 10867
+IHJlcHJlc2VudHM= 10868
+IFRpdGxl 10869
+IHNz 10870
+LkFzcw== 10871
+b25nb29zZQ== 10872
+aWVudG8= 10873
+PD4oKTsK 10874
+IGFic29sdXRlbHk= 10875
+IHNtb290aA== 10876
+VEVSTg== 10877
+IFVubGVzcw== 10878
+V29yZA== 10879
+IG1lcmdl 10880
+aWdhbg== 10881
+IFZvbA== 10882
+IG5u 10883
+LmdldElk 10884
+INC3 10885
+IHNleHk= 10886
+IHNlZWtpbmc= 10887
+U2luZ2xl 10888
+LnRoaXM= 10889
+IGtvbQ== 10890
+Ym91bmQ= 10891
+OyI= 10892
+IGZvbnRTaXpl 10893
+X2Rm 10894
+IGluanVyeQ== 10895
+KEg= 10896
+IGlzc3VlZA== 10897
+X0VORA== 10898
+OnNlbGY= 10899
+IHBhdGNo 10900
+IGxlYXZlcw== 10901
+IGFkb3B0 10902
+RmlsZU5hbWU= 10903
+44CQ 10904
+IGV4ZWN1dGl2ZQ== 10905
+IEJ5dGU= 10906
+XSkpCg== 10907
+IG51 10908
+b3V0aW5n 10909
+Y2x1ZGluZw== 10910
+LVI= 10911
+Lm9wdGlvbnM= 10912
+IHN1YnN0YW50 10913
+YXZheA== 10914
+IEJVVA== 10915
+IHRlY2huaWNhbA== 10916
+IHR3aWNl 10917
+IG3DoXM= 10918
+IHVuaXZlcnM= 10919
+eXI= 10920
+IGRyYWc= 10921
+IERD 10922
+IHNlZA== 10923
+IGJvdA== 10924
+IFBhbA== 10925
+IEhhbGw= 10926
+Zm9yY2VtZW50 10927
+IGF1Y2g= 10928
+Lm1vZA== 10929
+bm90YXRpb24= 10930
+X2ZpbGVz 10931
+LmxpbmU= 10932
+X2ZsYWc= 10933
+W25hbWU= 10934
+IHJlc29sdXRpb24= 10935
+IGJvdHQ= 10936
+KCJb 10937
+ZW5kZQ== 10938
+KGFycg== 10939
+RnJlZQ== 10940
+KEAi 10941
+IERpc3RyaWN0 10942
+UEVD 10943
+Oi0= 10944
+UGlja2Vy 10945
+IEpv 10946
+ICAgICAK 10947
+IFJpdmVy 10948
+X3Jvd3M= 10949
+IGhlbHBmdWw= 10950
+IG1hc3NpdmU= 10951
+LS0tCg== 10952
+IG1lYXN1cmVz 10953
+IFJ1bnRpbWU= 10954
+IHdvcnJ5 10955
+IFNwZWM= 10956
+CUQ= 10957
+44CR 10958
+ICl7Cg== 10959
+IHdvcnNl 10960
+KGZpbGVuYW1l 10961
+IGxheQ== 10962
+IG1hZ2lj 10963
+IFRoZWly 10964
+b3Vs 10965
+c3Ryb3k= 10966
+IFdoZXJl 10967
+IHN1ZGRlbg== 10968
+IGRlZmU= 10969
+IGJpbmRpbmc= 10970
+IGZsaWdodA== 10971
+IE9uSW5pdA== 10972
+IFdvbWVu 10973
+IFBvbGljeQ== 10974
+IGRydWdz 10975
+aXNoaW5n 10976
+KCcuLi8= 10977
+IE1lbA== 10978
+cGVhdA== 10979
+dG9y 10980
+IHByb3Bvc2Vk 10981
+IHN0YXRlZA== 10982
+X1JFUw== 10983
+IGVhc3Q= 10984
+IENPTkRJVElPTg== 10985
+X2Rlc2M= 10986
+IHdpbm5pbmc= 10987
+Zm9saW8= 10988
+TWFwcGVy 10989
+IFBhbg== 10990
+IEFuZ2U= 10991
+LnNlcnZsZXQ= 10992
+IGNvcGllcw== 10993
+TE0= 10994
+IHZt 10995
+5Y0= 10996
+IGRpY3Rpb25hcnk= 10997
+U2Vn 10998
+ZWxpbmVz 10999
+IFNlbmQ= 11000
+IGlyb24= 11001
+IEZvcnQ= 11002
+LmRvbWFpbg== 11003
+IGRlYmF0ZQ== 11004
+Tm90TnVsbA== 11005
+ZXE= 11006
+YWNoZXI= 11007
+bGY= 11008
+CWZtdA== 11009
+IGxhd3k= 11010
+xJ8= 11011
+IE1lbg== 11012
+IHRyaW0= 11013
+KE5VTEw= 11014
+ICEh 11015
+IHBhZA== 11016
+IGZvbGxvd3M= 11017
+Il1bIg== 11018
+cmVxdQ== 11019
+IEVw 11020
+LmdpdGh1Yg== 11021
+KGltZw== 11022
+ZXRv 11023
+KCdc 11024
+U2VydmljZXM= 11025
+dW1ibmFpbA== 11026
+X21haW4= 11027
+cGxldGVk 11028
+Zm9ydHVuYXRlbHk= 11029
+IHdpbmRvd3M= 11030
+IHBsYW5l 11031
+IENvbm5lY3Rpb24= 11032
+LmxvY2Fs 11033
+dWFyZA== 11034
+fVw= 11035
+PT0i 11036
+YW5kb24= 11037
+IFJveQ== 11038
+d2VzdA== 11039
+aWdpbmFs 11040
+ZW1pZXM= 11041
+aXR6 11042
+Jyk6Cg== 11043
+IFBldGVy 11044
+IHRvdWdo 11045
+IHJlZHVjZWQ= 11046
+IGNhbGN1bGF0ZQ== 11047
+IHJhcGlk 11048
+Y3VzdG9tZXI= 11049
+IGVmZmljaWVudA== 11050
+IG1lZGl1bQ== 11051
+IGZlbGw= 11052
+LnJlZg== 11053
+IENhcw== 11054
+IGZlZWRiYWNr 11055
+U3BlZWQ= 11056
+KG91dHB1dA== 11057
+YWpl 11058
+IGNhdGVnb3JpZXM= 11059
+IGZlZQ== 11060
+fTs= 11061
+IGRlbGV0ZWQ= 11062
+cmVo 11063
+IHByb29m 11064
+RGVzYw== 11065
+QnVpbGQ= 11066
+IHNpZGVz 11067
+LkFycmF5TGlzdA== 11068
+LSU= 11069
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 11070
+2LE= 11071
+Lm1hdGNo 11072
+0LvQuA== 11073
+IGZlZWxz 11074
+IGFjaGlldmU= 11075
+IGNsaW0= 11076
+X09O 11077
+IENE 11078
+IHRlYWNoZXI= 11079
+X2N1cnJlbnQ= 11080
+Ym4= 11081
+X1BM 11082
+aXN0aW5n 11083
+RW5hYmxl 11084
+R0VO 11085
+IHR2 11086
+IHNvY2s= 11087
+IHBsYXlz 11088
+IGRpc2NvdW50 11089
+IEtF 11090
+IERlYnVn 11091
+Rm9yZQ== 11092
+IElyYXE= 11093
+IGFwcGVhcmFuY2U= 11094
+TW9u 11095
+IHN0eWxlZA== 11096
+IEh1bWFu 11097
+aW90 11098
+IEhpc3Rvcnk= 11099
+IHNhYw== 11100
+IENvbGxlY3Rpb24= 11101
+IHJlY29tbWVuZGVk 11102
+LlNlbGVjdGVk 11103
+IG9yZ2FuaXphdGlvbnM= 11104
+IGRpc2NvdmVyZWQ= 11105
+Y29ob2w= 11106
+YWRhcw== 11107
+IFRob21hcw== 11108
+TWF5 11109
+IGNvbnNlcnY= 11110
+IGRvbWlu 11111
+IEZvbGxvdw== 11112
+IFNlY3Rpb24= 11113
+IFRoYW5rcw== 11114
+VXNlcm5hbWU= 11115
+IHJlY2lwZQ== 11116
+IHdvbmRlcmZ1bA== 11117
+LnNsZWVw 11118
+X2lm 11119
+CQoJCg== 11120
+b3Jubw== 11121
+IHJ1 11122
+X3RhcmdldA== 11123
+LiIi 11124
+4KY= 11125
+RXZlbnRBcmdz 11126
+IGlucHV0cw== 11127
+IGZpZg== 11128
+IHZpc2lvbg== 11129
+Y3k= 11130
+IFNlcmllcw== 11131
+KSgoKA== 11132
+IHRyYWRpbmc= 11133
+IG1hcmtlcg== 11134
+QmVnaW4= 11135
+IHR5cGljYWxseQ== 11136
+IGNhdXNlcw== 11137
+ZHJvcGRvd24= 11138
+X0RFQlVH 11139
+IGRldGVjdA== 11140
+Y291bnRyeQ== 11141
+ISIpOwo= 11142
+CVI= 11143
+YXBweQ== 11144
+IGNyZWY= 11145
+KCc8 11146
+Ij0+ 11147
+IExF 11148
+cmVhZGVy 11149
+IGFkbWluaXN0cg== 11150
+w7U= 11151
+dWNrZXQ= 11152
+IGZhc2hpb24= 11153
+LmNoYXI= 11154
+aXphcg== 11155
+IGRpc2FibGU= 11156
+IHN1Yw== 11157
+IExpdmU= 11158
+aXNzdWU= 11159
+IG1ldGFkYXRh 11160
+ZmxhZ3M= 11161
+IPCf 11162
+IGNvbW1pdHRlZA== 11163
+IHZh 11164
+IHJvdWdo 11165
+ICcnJwo= 11166
+IGhpZ2hsaWdodA== 11167
+X3ZhcnM= 11168
+Vk8= 11169
+IGVuY29kaW5n 11170
+LVo= 11171
+X3NpZ24= 11172
+JCgiIw== 11173
+IHJhaW4= 11174
+cmVhdGVzdA== 11175
+IEVORA== 11176
+U2VsZWN0aW9u 11177
+IGNhbmRpZGF0ZXM= 11178
+IHNhdg== 11179
+LkVtcHR5 11180
+IGRlY2lzaW9ucw== 11181
+IGNvbGxhYm9y 11182
+cmlkZ2U= 11183
+ZmVlZA== 11184
+cmVzc2lvbg== 11185
+IHBlcnNvbnM= 11186
+Vk0= 11187
+ZWdh 11188
+X0JJVA== 11189
+QWNjb3JkaW5n 11190
+YWNrZWQ= 11191
+IGRvbGxhcnM= 11192
+X2xvc3M= 11193
+IENvc3Q= 11194
+fSIK 11195
+Tm90aWZpY2F0aW9u 11196
+IHByb3N0aXQ= 11197
+IGF1dGhvcml0eQ== 11198
+LnJlYw== 11199
+IHNwb2tlcw== 11200
+IFRvZGF5 11201
+aXN0YW50 11202
+IEhlYWQ= 11203
+4oCdLg== 11204
+ZXJ0YWlubWVudA== 11205
+Y2Vhbg== 11206
+Y3VsYXRl 11207
+IHZlbg== 11208
+SG93ZXZlcg== 11209
+X2Fycg== 11210
+IHRva2Vucw== 11211
+R3JhcGg= 11212
+IEp1ZA== 11213
+IFZpcmdpbg== 11214
+IFNlcmlhbA== 11215
+dW5uaW5n 11216
+TXV0YWJsZQ== 11217
+YWdlcnM= 11218
+LmNzdg== 11219
+IGRldmVsb3Bpbmc= 11220
+IGluc3RydWN0aW9ucw== 11221
+IHByb21pc2U= 11222
+IHJlcXVlc3RlZA== 11223
+X2VuY29kZQ== 11224
+LyI= 11225
+IEljb24= 11226
+dWlsdA== 11227
+LWRheQ== 11228
+IGludGVsbGlnZW5jZQ== 11229
+LklT 11230
+IE9ic2VydmFibGU= 11231
+IEhhcmQ= 11232
+Qm9vbA== 11233
+aWRlbnRpYWw= 11234
+LkFuY2hvcg== 11235
+IHNlbGxpbmc= 11236
+Q0k= 11237
+QUdFUw== 11238
+dGxl 11239
+YnVy 11240
+VUZGRVI= 11241
+Ulk= 11242
+IGJpZ2dlcg== 11243
+IHJhdA== 11244
+IGZhbW91cw== 11245
+IHR5cGVuYW1l 11246
+IGV4cGxhaW5lZA== 11247
+fX0K 11248
+IG51Y2xlYXI= 11249
+LU4= 11250
+IGNyaXNpcw== 11251
+IEVudGVy 11252
+IGFuc3dlcnM= 11253
+LyR7 11254
+L3Bs 11255
+IHNlcXU= 11256
+X25leHQ= 11257
+bWFzaw== 11258
+IHN0YW5kaW5n 11259
+IHBsZW50eQ== 11260
+IENyb3Nz 11261
+CXJldA== 11262
+ZHJv 11263
+IENhc3Q= 11264
+PXRydWU= 11265
+IENocmlz 11266
+aWNpbw== 11267
+IE1pa2U= 11268
+RGVjaW1hbA== 11269
+YWRkQ29tcG9uZW50 11270
+TGVu 11271
+IGNvY2s= 11272
+ICN7 11273
+VVJO 11274
+PHRy 11275
+IGF1dGhvcml0aWVz 11276
+UmVzb3VyY2Vz 11277
+LUg= 11278
+Qm90dG9t 11279
+X3F1 11280
+cHV0ZXI= 11281
+ZXN0ZXJkYXk= 11282
+RGlzcGF0Y2g= 11283
+c2luY2U= 11284
+IGZhbWlsaWFy 11285
+LGk= 11286
+VkM= 11287
+IG1lbnQ= 11288
+LEM= 11289
+IGZyZWVkb20= 11290
+IHJvdXRlcw== 11291
+IEJ1eQ== 11292
+IGNvbW1hbmRz 11293
+IG1lc2g= 11294
+L0M= 11295
+IFNldHRpbmdz 11296
+LXN0eWxl 11297
+IHdpdG5lc3M= 11298
+IGNsZQ== 11299
+IHVuaW9u 11300
+ZWZhdWx0 11301
+YXJldA== 11302
+IHRob3VnaHRz 11303
+IC0tLS0= 11304
+X3Byb2Nlc3M= 11305
+X3Vz 11306
+aW5nbHk= 11307
+VUVT 11308
+VG91Y2g= 11309
+INC8 11310
+X29wZW4= 11311
+IFZlYw== 11312
+IHJld2FyZA== 11313
+LkNsaWNr 11314
+Lzo= 11315
+IG5pZQ== 11316
+Q2hhbmdlcw== 11317
+TW9udGg= 11318
+77yf 11319
+IGV4ZWN1dGlvbg== 11320
+IGJlYWNo 11321
+KEludGVnZXI= 11322
+CWE= 11323
+Lyc= 11324
+LkZvbnRTdHlsZQ== 11325
+IGFib3J0 11326
+IFNpbmdsZQ== 11327
+KGlzc2V0 11328
+IGRw 11329
+IH19PC8= 11330
+IE1h 11331
+LlJvd3M= 11332
+IFBldA== 11333
+JSk= 11334
+cmFuZA== 11335
+6YA= 11336
+UnVsZQ== 11337
+IGhlbA== 11338
+UklURQ== 11339
+IHF1aWV0 11340
+IHJhdGlv 11341
+IENPTkRJVElPTlM= 11342
+b3NvcGg= 11343
+IElM 11344
+IGFkdmVudA== 11345
+Y2Fw 11346
+Ozwv 11347
+IFVTQg== 11348
+RHJpdmVy 11349
+IG91cnM= 11350
+IEpvaG5zb24= 11351
+Lks= 11352
+X2RlbGV0ZQ== 11353
+LnE= 11354
+CXN0cg== 11355
+L2NvbW1vbg== 11356
+CXN0cmluZw== 11357
+IFBERg== 11358
+YWN0cw== 11359
+LkFjdGlvbg== 11360
+IFF1ZXJ5 11361
+LnJlc3BvbnNl 11362
+IEdpcmw= 11363
+IHByb2Nlc3Nlcw== 11364
+PEludGVnZXI= 11365
+aW1v 11366
+IGFkZHM= 11367
+IGVudGlyZWx5 11368
+IHdhc2g= 11369
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg== 11370
+IGFuaW1hdGVk 11371
+IHByb2ZpdA== 11372
+ZW5jaW5n 11373
+L1M= 11374
+IFN5bQ== 11375
+IG1hbnVhbA== 11376
+RG93bmxvYWQ= 11377
+ICghJA== 11378
+IG1vdGlvbg== 11379
+d2VicGFjaw== 11380
+LWJvdHRvbQ== 11381
+IGdyYXR1aXQ= 11382
+UEc= 11383
+KDos 11384
+IGVyYQ== 11385
+IGhv 11386
+IEppbQ== 11387
+cXVpcg== 11388
+IEJBU0lT 11389
+w6Fu 11390
+REVS 11391
+IGV4cGVuc2l2ZQ== 11392
+X2Nv 11393
+Qm91bmRz 11394
+V2VsbA== 11395
+IERlbW9jcmF0aWM= 11396
+IOKGkg== 11397
+LlJlbQ== 11398
+X1NZ 11399
+bmFtZXM= 11400
+IFZp 11401
+IGlzaW5zdGFuY2U= 11402
+XCI+ 11403
+ICo9 11404
+IFBT 11405
+IGRhbmdlcm91cw== 11406
+W3A= 11407
+T01F 11408
+T3RoZXI= 11409
+IFN0cmluZ0J1aWxkZXI= 11410
+UG9pbnRz 11411
+aGVhZGluZw== 11412
+IGN1cnJlbmN5 11413
+IHBlcmNlbnRhZ2U= 11414
+X0FQSQ== 11415
+IGNsYXNzaWM= 11416
+dGhlYWQ= 11417
+IE1P 11418
+RkU= 11419
+SWR4 11420
+YXdhaXQ= 11421
+IMOo 11422
+IGFjY2lkZW50 11423
+IHZhcmlhbnQ= 11424
+IG15c3Q= 11425
+IExhbmQ= 11426
+IEJyZQ== 11427
+IGhhcm0= 11428
+IEFjYw== 11429
+IGNoYXJnZWQ= 11430
+aW9uZXM= 11431
+VmlzaWJpbGl0eQ== 11432
+YXJyeQ== 11433
+IExhbmd1YWdl 11434
+IHdhbGtpbmc= 11435
+Ii4KCg== 11436
+aWZlcg== 11437
+IGxlYWRlcnNoaXA= 11438
+LkZyb20= 11439
+eW5hbQ== 11440
+IHRpbWVzdGFtcA== 11441
+aXB0 11442
+IEhhcw== 11443
+UkVGRVI= 11444
+IEl0cw== 11445
+IGxpc3RlbmVy 11446
+VVRF 11447
+X2Rlc2NyaXB0aW9u 11448
+IGV4cGVyaWVuY2Vz 11449
+IGNyZWF0ZXM= 11450
+UlM= 11451
+Y2FydA== 11452
+YmxhY2s= 11453
+IGNob2ljZXM= 11454
+d2Fy 11455
+ICcnJw== 11456
+IG9yZGVyZWQ= 11457
+IGV2ZW5pbmc= 11458
+IHBpbA== 11459
+IHR1bg== 11460
+IEJhZA== 11461
+KGFwcA== 11462
+cmFuZG9t 11463
+IGV4cGxpY2l0 11464
+IGFycml2ZWQ= 11465
+IGZseQ== 11466
+IGVjb25vbQ== 11467
+LW1haWw= 11468
+IGxpc3Rz 11469
+IGFyY2hpdGVjdA== 11470
+IFBheQ== 11471
+IGRz 11472
+IFNvbA== 11473
+IHZlaGljbGVz 11474
+SHo= 11475
+LWNvbQ== 11476
+IGtpbmc= 11477
+X2VxdWFs 11478
+IEhlbHA= 11479
+IGFidXNl 11480
+LS07Cg== 11481
+IGV4dHI= 11482
+IGNoZW1pY2Fs 11483
+5L8= 11484
+IG9yaWVudA== 11485
+IGJyZWF0aA== 11486
+IFNwYWNl 11487
+KGVsZW1lbnQ= 11488
+d2FpdA== 11489
+REVE 11490
+aWdtYQ== 11491
+IGVudHI= 11492
+IHNvYg== 11493
+LW5hbWU= 11494
+IGFmZmVjdGVk 11495
+aWth 11496
+IGNvYWw= 11497
+X3dvcms= 11498
+IGh1bmRyZWRz 11499
+IHBvbGl0aWNz 11500
+c3ViamVjdA== 11501
+IGNvbnN1bWVy 11502
+QU5HRQ== 11503
+IHJlcGVhdGVk 11504
+U2VuZA== 11505
+ICNb 11506
+IHByb3RvY29s 11507
+IGxlYWRz 11508
+dXNldW0= 11509
+RXZlcnk= 11510
+SW1wb3J0 11511
+KGNvdW50 11512
+IGNoYWxsZW5nZXM= 11513
+IG5vdmVs 11514
+IGRlcGFydA== 11515
+Yml0cw== 11516
+LkN1cnJlbnQ= 11517
+IGAkew== 11518
+b3Rpbmc= 11519
+KFw= 11520
+IGNyZWF0aXZl 11521
+IGJ1ZmY= 11522
+IGludHJvZHVjZWQ= 11523
+dXNpYw== 11524
+bW9kdWxlcw== 11525
+QXJl 11526
+LWRvYw== 11527
+bGFuZ3VhZ2U= 11528
+X2NhY2hl 11529
+IHRvZA== 11530
+Pz48Lw== 11531
+b21ldGhpbmc= 11532
+IGh1bg== 11533
+5bo= 11534
+YXRlcnM= 11535
+SW50ZW50 11536
+IGltcGxlbWVudGVk 11537
+IENhc2U= 11538
+Q2hpbGRyZW4= 11539
+IG5vdGlmaWNhdGlvbg== 11540
+UmVuZGVyZXI= 11541
+V3JhcHBlcg== 11542
+T2JqZWN0cw== 11543
+dGw= 11544
+LkNvbnRhaW5z 11545
+UGx1Z2lu 11546
+LnJvdw== 11547
+IGZvcmc= 11548
+IHBlcm1pdA== 11549
+IHRhcmdldHM= 11550
+IElG 11551
+IHRpcA== 11552
+c2V4 11553
+IHN1cHBvcnRz 11554
+IGZvbGQ= 11555
+cGhvdG8= 11556
+fSwNCg== 11557
+IGdvb2dsZQ== 11558
+JCgnIw== 11559
+IHNoYXJpbmc= 11560
+IGdvb2Rz 11561
+dnM= 11562
+IERhbg== 11563
+UmF0ZQ== 11564
+IE1hcnRpbg== 11565
+IG1hbm5lcg== 11566
+bGll 11567
+LlRoZQ== 11568
+SW50ZXJuYWw= 11569
+IENPTlRS 11570
+TW9jaw== 11571
+UklHSFQ= 11572
+ICd7 11573
+IGNvbnRyb2xz 11574
+TWF0 11575
+IG1hbmQ= 11576
+IGV4dGVuZGVk 11577
+T2s= 11578
+IGVtYmVk 11579
+IHBsYW5ldA== 11580
+IE5vbg== 11581
+LWNo 11582
+KSIs 11583
+ZXBhcg== 11584
+IGJlbGlldmVk 11585
+IEVudmlyb25tZW50 11586
+IEZyaWVuZA== 11587
+LXJlcw== 11588
+IGhhbmRsaW5n 11589
+bmlj 11590
+LWxldmVs 11591
+c2NyaQ== 11592
+WG1s 11593
+QkU= 11594
+dW5nZW4= 11595
+IGFsdGVy 11596
+W2lkeA== 11597
+UG9w 11598
+Y2Ft 11599
+ICgoKA== 11600
+IHNoaXBwaW5n 11601
+IGJhdHRlcnk= 11602
+aWRkbGV3YXJl 11603
+TUM= 11604
+IGltcGw= 11605
+b3RhdGlvbg== 11606
+IExhYg== 11607
+PGZvcm0= 11608
+CW5hbWU= 11609
+IEdhbWVz 11610
+cmF5 11611
+RXh0cmE= 11612
+VHdv 11613
+KHBsYXllcg== 11614
+IExlcw== 11615
+wrA= 11616
+IGNoYXJzZXQ= 11617
+IGpvdXJuZXk= 11618
+ZXRpbmc= 11619
+5pg= 11620
+4pQ= 11621
+55So 11622
+IGRpbg== 11623
+IHBlcm1hbg== 11624
+IHNvbHZl 11625
+IGxhdW5jaGVk 11626
+IG5pbmU= 11627
+IHNlbmRpbmc= 11628
+IHRlbGxpbmc= 11629
+LnBhc3N3b3Jk 11630
+IE1hdHJpeA== 11631
+ZXJpYw== 11632
+IGdyYWI= 11633
+LnU= 11634
+IExpYnJhcnk= 11635
+IGRlYnQ= 11636
+SU5L 11637
+LmZpbmRWaWV3QnlJZA== 11638
+IGZyZXF1ZW5jeQ== 11639
+LmFk 11640
+X1RFU1Q= 11641
+IG5lZ290 11642
+IEFmcmljYW4= 11643
+c2VuZGVy 11644
+xaE= 11645
+R2xvYmFs 11646
+IGV4cGVydHM= 11647
+KyspDQo= 11648
+IGRlcGVuZGluZw== 11649
+Z3JheQ== 11650
+IGp1ZGdl 11651
+IHNlbnRlbmNl 11652
+bG9zdXJl 11653
+QWM= 11654
+IHRyYWNl 11655
+RWRnZQ== 11656
+IGZyaWVuZGx5 11657
+IGNvbmNlcm5lZA== 11658
+YmxvZw== 11659
+IGNsYWltZWQ= 11660
+fSc= 11661
+aW50ZWdlcg== 11662
+X3RyZWU= 11663
+CWNvbnRpbnVl 11664
+eGk= 11665
+IGFjY2VwdGVk 11666
+X29uZQ== 11667
+IEVkdWNhdGlvbg== 11668
+dWJsaXNoZWQ= 11669
+Z29u 11670
+YXBwb2ludA== 11671
+b3V0cw== 11672
+IG1pbmluZw== 11673
+IHNvbmdz 11674
+IGhlcnNlbGY= 11675
+IGdyYW50ZWQ= 11676
+IHBhc3Npb24= 11677
+IExha2U= 11678
+IGxvYW4= 11679
+dWVudA== 11680
+Y2hhbnQ= 11681
+IGRldGFpbGVk 11682
+ZXhjZXB0 11683
+X2NtZA== 11684
+IEhF 11685
+UmVsYXRlZA== 11686
+enQ= 11687
+J30sCg== 11688
+IHNwZWNpZmljYWxseQ== 11689
+U3RhdGlj 11690
+IGNhcnJpZWQ= 11691
+QU5T 11692
+XCI6 11693
+Q3JlYXRlZA== 11694
+IGN1bA== 11695
+XS0= 11696
+X2FwaQ== 11697
+RlA= 11698
+IHNpdHRpbmc= 11699
+ICIiKQ== 11700
+CWdvdG8= 11701
+IEVxdQ== 11702
+IGFzc2F1bHQ= 11703
+a2lucw== 11704
+YW5jZXI= 11705
+b2dlbg== 11706
+IHZvdGVycw== 11707
+IFByb3Q= 11708
+RGVzY3JpcHRvcg== 11709
+44O8 11710
+LkFzc2VydA== 11711
+YnNpdGVz 11712
+b3N0ZXI= 11713
+LW1lbnU= 11714
+IGFybXM= 11715
+LkNsaWVudA== 11716
+LmJhY2tncm91bmQ= 11717
+YXZpdHk= 11718
+IHZ1bA== 11719
+X01BU0s= 11720
+IGhvdXNpbmc= 11721
+IGJlYXI= 11722
+X2l0ZXI= 11723
+cGlyZWQ= 11724
+IG1hcmtldHM= 11725
+IFN0dWRlbnQ= 11726
+IHRpY2tldA== 11727
+IG1pbGxpb25z 11728
+ZmxhdGVy 11729
+KT0= 11730
+IHJlY292ZXI= 11731
+IEZvcmNl 11732
+IEJvdGg= 11733
+IHZpY3RpbQ== 11734
+IERpc2M= 11735
+cmVwb3J0 11736
+IGZvdXJ0aA== 11737
+IEFzc2VtYmx5 11738
+L3VzZXI= 11739
+TnVsbE9y 11740
+dGV4dGFyZWE= 11741
+IGF0aA== 11742
+IChb 11743
+IGNoYW5uZWxz 11744
+IEp1c3RpY2U= 11745
+Y2hvaWNl 11746
+TE9CQUw= 11747
+ZXhlYw== 11748
+ZW1hbGU= 11749
+IGVsZW0= 11750
+X2xl 11751
+IHJlc3BvbnNpYmlsaXR5 11752
+IFR3 11753
+SUNBVElPTg== 11754
+IGVsc2VpZg== 11755
+IGZv 11756
+YXN0cw== 11757
+IHRyZWF0ZWQ= 11758
+c2Vu 11759
+IFZpY3Q= 11760
+c3VtZXI= 11761
+X0JBU0U= 11762
+IGFzdA== 11763
+Pnt7 11764
+IFJlc291cmNl 11765
+IFN0YW5kYXJk 11766
+IFByZW0= 11767
+dXBkYXRlZA== 11768
+aXZhbGVudA== 11769
+IGFzc2V0cw== 11770
+X3RlbXA= 11771
+IGludGVyZXN0cw== 11772
+IGhhcmR3YXJl 11773
+IFJvbQ== 11774
+IFNoYXJl 11775
+ICcnCg== 11776
+ICos 11777
+IFRha2U= 11778
+IEltYWdlcw== 11779
+X0NIRUNL 11780
+KHR5cGVvZg== 11781
+IEp1bg== 11782
+XDxe 11783
+IGxpcXU= 11784
+IHdvcnN0 11785
+eW1ib2xz 11786
+CQkJICAg 11787
+IGRyaXZlcnM= 11788
+IERvY3VtZW50 11789
+ZW5v 11790
+IFRlY2hub2xvZ3k= 11791
+IGFwcHJvdmVk 11792
+dW1wcw== 11793
+IHNub3c= 11794
+Zm9ybWFuY2U= 11795
+X0FTU0VSVA== 11796
+dWl0cw== 11797
+2YY= 11798
+IGRpZmZlcmVuY2Vz 11799
+LlZpc2libGU= 11800
+CQkJDQo= 11801
+IFBz 11802
+X2ZldGNo 11803
+IHRvZG8= 11804
+LicsCg== 11805
+IHNlbA== 11806
+dXJlcnM= 11807
+aW52YWxpZA== 11808
+IHR3ZWV0 11809
+VkVM 11810
+IHJlc2VhcmNoZXJz 11811
+IHNwcmludGY= 11812
+IFJP 11813
+IHBlbA== 11814
+LlRyYW5z 11815
+IGlsbGVnYWw= 11816
+ZGlhbG9n 11817
+c21hcnR5 11818
+bGc= 11819
+X01JTg== 11820
+IGhlcm8= 11821
+ZmluYWw= 11822
+IHBw 11823
+Lkxl 11824
+IGNp 11825
+CVJU 11826
+IHN1Z2dlc3RlZA== 11827
+cGRm 11828
+YWNoaW5n 11829
+IFJv 11830
+IFByb3BlcnRpZXM= 11831
+IFNp 11832
+IGJ1eWluZw== 11833
+IG11 11834
+IGxhbmRz 11835
+aWZpZXJz 11836
+IEZJTEU= 11837
+Uk9VUA== 11838
+IGhvbGRlcg== 11839
+IFNvbg== 11840
+IHN5bXB0 11841
+LnJvdXRl 11842
+KT8= 11843
+IGFyZ2M= 11844
+IGZvcnQ= 11845
+IGNhc2lubw== 11846
+X2NhdGVnb3J5 11847
+IGZvcnVt 11848
+cHJlZml4 11849
+YXB0dXJl 11850
+VHViZQ== 11851
+ZW1z 11852
+aW1pemU= 11853
+IG51ZQ== 11854
+YXVz 11855
+Y291cnNl 11856
+QVRPUg== 11857
+KCkpLA== 11858
+QWR2ZXJ0aXM= 11859
+SU5HUw== 11860
+IGFja25vdw== 11861
+IEtvcmVh 11862
+cGxpbmc= 11863
+IHdvcmtlcg== 11864
+UExJRUQ= 11865
+aGFs 11866
+IFJpY2hhcmQ= 11867
+RWxlbWVudHM= 11868
+CQkJIA== 11869
+c3Rhcg== 11870
+IHJlbGF0aW9uc2hpcHM= 11871
+IGNoZWFw 11872
+QUNI 11873
+IFhNTA== 11874
+LCY= 11875
+IExvdWlz 11876
+IHJpZGU= 11877
+X0ZBSUw= 11878
+IGNodW5r 11879
+W3M= 11880
+X09VVA== 11881
+IGNob3Nlbg== 11882
+X1s= 11883
+Lyg= 11884
+IEplZmY= 11885
+X3Ns 11886
+cHJpdg== 11887
+IENhbmFkaWFu 11888
+IHVuYWJsZQ== 11889
+X0ZMQUc= 11890
+IG5vcw== 11891
+aGlnaA== 11892
+IGxpZnQ= 11893
+ZnVu 11894
+KCl7 11895
+ZWxseQ== 11896
+eWNsZXJWaWV3 11897
+X2Fz 11898
+X0xJU1Q= 11899
+IHJhZGk= 11900
+LmdldFZhbHVl 11901
+IEFuZ2VsZXM= 11902
+IFNwYW4= 11903
+X2luc3RhbmNl 11904
+aXRvcnM= 11905
+IG1pZ3JhdGlvbg== 11906
+QUs= 11907
+T2g= 11908
+wq4= 11909
+LnNlbGVjdGVk 11910
+IEdU 11911
+IGFkdmFuY2U= 11912
+IFN0eWxl 11913
+LkRhdGFHcmlkVmlldw== 11914
+ZWN0aW9u 11915
+0Y4= 11916
+cGlv 11917
+cm9n 11918
+IHNob3BwaW5n 11919
+IFJlY3Q= 11920
+SWxsdW1pbmF0ZQ== 11921
+T1U= 11922
+CWFycmF5 11923
+IHN1YnN0YW50aWFs 11924
+IHByZWdu 11925
+IHByb21vdGU= 11926
+SUVX 11927
+LkxheW91dA== 11928
+IHNpZ25z 11929
+Ly4= 11930
+IGxldHRlcnM= 11931
+Qm9hcmQ= 11932
+Y3RybA== 11933
+Ilw= 11934
+IEpvbmVz 11935
+IHZlcnRleA== 11936
+IGph 11937
+IGFmZmlsaQ== 11938
+IHdlYWx0aA== 11939
+CWRlZmF1bHQ= 11940
+IHNpZ25pZmljYW50bHk= 11941
+IGVj 11942
+IHhz 11943
+YWN0dWFs 11944
+LnBlcg== 11945
+X3N0ZXA= 11946
+YW52YXM= 11947
+bWFj 11948
+IHRyYW5zbA== 11949
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 11950
+SXRlcmF0b3I= 11951
+IG9jaA== 11952
+YWdub3N0aWM= 11953
+IER1cmluZw== 11954
+IERFRkFVTFQ= 11955
+IHRpbGw= 11956
+IHNpZ25hdHVyZQ== 11957
+IGJpcmQ= 11958
+IE9s 11959
+IEly 11960
+SFM= 11961
+YXZhdGFy 11962
+RVNTQUdF 11963
+IGVsZXY= 11964
+IG10 11965
+IE5hdg== 11966
+IHJlbGF4 11967
+IHBsYXRl 11968
+SVRFTQ== 11969
+KGRhdGU= 11970
+Lm5vdA== 11971
+IGdyYWRl 11972
+IH0pLAo= 11973
+PyIKCg== 11974
+aWVuY2Vz 11975
+SGlnaA== 11976
+IERJUw== 11977
+ZGlzYWJsZWQ= 11978
+UVVJ 11979
+IG5vaXNl 11980
+YXV4 11981
+IFVQ 11982
+b3Nh 11983
+IHZvYw== 11984
+ICkp 11985
+b2NvbQ== 11986
+X09GRg== 11987
+IERi 11988
+TG9jaw== 11989
+LmVjbGlwc2U= 11990
+LGQ= 11991
+IERyYXc= 11992
+ICIo 11993
+IHZpc2l0ZWQ= 11994
+IOKI 11995
+IHN1Y2NlZWQ= 11996
+IGltcG9zc2libGU= 11997
+YWlyZQ== 11998
+IFR1cm4= 11999
+IGRpc2g= 12000
+Rkc= 12001
+IHNlbnNvcg== 12002
+QU5O 12003
+YWJh 12004
+IHN1cmc= 12005
+XSk7DQo= 12006
+IGZw 12007
+X2Fu 12008
+LUo= 12009
+LUc= 12010
+IEpvYg== 12011
+Q29udmVydA== 12012
+IEtFWQ== 12013
+IGF1dGhvcnM= 12014
+X3NlcnZlcg== 12015
+XHI= 12016
+IC0qLQ== 12017
+ZmxleA== 12018
+IHNvYw== 12019
+UmV0 12020
+IHNhbHQ= 12021
+IOKApgoK 12022
+IENsZWFy 12023
+KHBhZ2U= 12024
+LWRhbmdlcg== 12025
+IHJvb21z 12026
+Y29udg== 12027
+I3s= 12028
+Lm9w 12029
+IEFyZWE= 12030
+X1ND 12031
+aGVu 12032
+IGJlZ2lucw== 12033
+LXk= 12034
+IGV4Y2l0ZWQ= 12035
+IGlnbm9yZWQ= 12036
+IGJvbnVz 12037
+c3R1ZGVudA== 12038
+IE1lbWJlcg== 12039
+IHJlbGF0aXZlbHk= 12040
+IExvdw== 12041
+IFByb2R1 12042
+YXRld2F5 12043
+cG9zdXJl 12044
+IHRoaWNr 12045
+YW5pZWw= 12046
+KHZpZXc= 12047
+IENydXNo 12048
+RXh0ZW5zaW9u 12049
+SWw= 12050
+ZWVk 12051
+TE9D 12052
+Lmlt 12053
+Lkl0ZW1z 12054
+IGNvbmZsaWN0 12055
+LnByZXZlbnQ= 12056
+IG9uQ3JlYXRl 12057
+dXY= 12058
+aXNlcg== 12059
+IHdhdmU= 12060
+TWFy 12061
+IENvbW11bml0eQ== 12062
+aWNoZQ== 12063
+IE5vdGhpbmc= 12064
+W20= 12065
+IExlZQ== 12066
+cmllbmRz 12067
+w6hyZQ== 12068
+ISEh 12069
+YW56 12070
+LnJlc3VsdA== 12071
+IFNL 12072
+X1BBUkFN 12073
+IGRlbW9jcg== 12074
+QmFja0NvbG9y 12075
+LmV4aXN0cw== 12076
+Ikl0 12077
+KG9wdGlvbnM= 12078
+cmF6eQ== 12079
+YXNlcg== 12080
+XERhdGFiYXNl 12081
+YWxlbmRhcg== 12082
+X2Fzcw== 12083
+O30K 12084
+dmVydGV4 12085
+aW5lY3JhZnQ= 12086
+V2FybmluZw== 12087
+YXJnbw== 12088
+IGFjdG9y 12089
+IEluc3RlYWQ= 12090
+IFVzaW5n 12091
+U2VsZg== 12092
+QGludGVyZmFjZQ== 12093
+IHNwZWFraW5n 12094
+IFBhcmlz 12095
+IExJQ0VOU0U= 12096
+Lm5vZGU= 12097
+IEZvb2Q= 12098
+RUlG 12099
+IEJp 12100
+LlN0YXJ0 12101
+IElC 12102
+IHVuaXZlcnNpdHk= 12103
+IEhlYWRlcg== 12104
+LnByb2R1Y3Q= 12105
+Q29weQ== 12106
+ZXRj 12107
+cmljYWw= 12108
+ID4+Pg== 12109
+Ym9va3M= 12110
+IGFsZ29yaXRobQ== 12111
+ICdfXw== 12112
+KGphdmF4 12113
+IG51bWVyb3Vz 12114
+U2hhcmU= 12115
+SGF2ZQ== 12116
+IHJlY3J1 12117
+IHByb3Zl 12118
+LnN1YnN0cmluZw== 12119
+aGVhbHRo 12120
+0LXQuw== 12121
+IGRlY2ltYWw= 12122
+IGNvbW1pc3Npb24= 12123
+c2NyaXB0aW9u 12124
+eEM= 12125
+IHN1bW1hcnk= 12126
+YXR0ZWQ= 12127
+IGNsb3Nlcg== 12128
+ZmluaXNoZWQ= 12129
+KCkpewo= 12130
+IFdvb2Q= 12131
+X2ZpZWxkcw== 12132
+a3U= 12133
+X2l0ZW1z 12134
+RmxhZw== 12135
+IGNvbmZpZGVuY2U= 12136
+IEZlZGVyYWw= 12137
+ZHV4 12138
+IGNvbXBhdA== 12139
+IHZlcnRpY2Fs 12140
+0Lk= 12141
+w6hz 12142
+OyI+Cg== 12143
+X21hbmFnZXI= 12144
+KCkpKQo= 12145
+SURF 12146
+OiIs 12147
+X18K 12148
+IFdheQ== 12149
+0Yg= 12150
+VGVtcA== 12151
+IFNUUg== 12152
+cml0dGVu 12153
+U3luYw== 12154
+IEFW 12155
+IENFTw== 12156
+IEd1aWQ= 12157
+IGVudmlyb25tZW50YWw= 12158
+IGNvcnJlc3BvbmRpbmc= 12159
+CWNvbnNvbGU= 12160
+IGp1c3RpY2U= 12161
+IEpT 12162
+IGxpdmVk 12163
+Z2Fy 12164
+IEdyYXBo 12165
+IFN0YXQ= 12166
+IGlQaG9uZQ== 12167
+LmFs 12168
+IEhE 12169
+IG9jY3Vy 12170
+IHRocmVzaG9sZA== 12171
+IG9uY2xpY2s= 12172
+UkVH 12173
+LkdyYXBoaWNzVW5pdA== 12174
+TWV0YQ== 12175
+xb4= 12176
+IGN1bQ== 12177
+LmdudQ== 12178
+w6s= 12179
+IG9idGFpbmVk 12180
+IGNvbXBsYWludA== 12181
+IGVhdGluZw== 12182
+IHRhcg== 12183
+X3Rhc2s= 12184
+IG9wdHM= 12185
+KHRv 12186
+UGFzcw== 12187
+IHBsYXN0aWM= 12188
+dGlsaXR5 12189
+IFdpbg== 12190
+LnByZXZlbnREZWZhdWx0 12191
+cGlsZQ== 12192
+IEdhcg== 12193
+IHF1YW50aXR5 12194
+X2xhc3Q= 12195
+IGdyZWF0ZXN0 12196
+RGFv 12197
+X0RJUw== 12198
+IFVzZWQ= 12199
+IEhQ 12200
+cml0aW5n 12201
+U0lPTg== 12202
+Ymx1ZQ== 12203
+ZG9tYWlu 12204
+IHNjb3Jlcw== 12205
+Tm9ybWFs 12206
+X2FkbWlu 12207
+IEFTU0VSVA== 12208
+VGhlbg== 12209
+Kioq 12210
+ZGlzdA== 12211
+bG9u 12212
+IGhhdGU= 12213
+c2hhbA== 12214
+SW1hZ2VWaWV3 12215
+ZGF0YWJhc2U= 12216
+IHBhbmQ= 12217
+IGxvZ2lj 12218
+PWZhbHNl 12219
+Ymc= 12220
+IENvbmZpZ3VyYXRpb24= 12221
+IG51cg== 12222
+T0c= 12223
+IG1hcnJpZWQ= 12224
+Ois= 12225
+IGRyb3BwZWQ= 12226
+IHJlZ2lzdHJhdGlvbg== 12227
+0L7QvA== 12228
+dWx0aXBsZQ== 12229
+aXplcnM= 12230
+c2hhcGU= 12231
+LmNvcHk= 12232
+IHdlYXJpbmc= 12233
+IENhdGg= 12234
+IGRlZGljYXRlZA== 12235
+IC4uLgo= 12236
+IGFkdm9j 12237
+IEZhbWlseQ== 12238
+IHN0YXRlbWVudHM= 12239
+ZW1hdGlj 12240
+YW1waW9uc2hpcA== 12241
+IG1vdGl2 12242
+IEhhdmU= 12243
+IGJsb3c= 12244
+Sm9i 12245
+Y2VydA== 12246
+X3ZlY3Rvcg== 12247
+aW5zdGFsbA== 12248
+IENPUFk= 12249
+ZW1iZWQ= 12250
+RElS 12251
+IFNwcmluZw== 12252
+IGV4aGli 12253
+Y2Ru 12254
+IENvbW1lbnQ= 12255
+IE9wdGlvbmFs 12256
+LnBsYXllcg== 12257
+IERhcms= 12258
+KHBvcw== 12259
+IFNob3VsZA== 12260
+IGNlbnRyZQ== 12261
+IEd1YXJk 12262
+w7N3 12263
+IHRyb3VibGU= 12264
+RU5FUg== 12265
+KHVuc2lnbmVk 12266
+X3NlcnZpY2U= 12267
+IG5z 12268
+dWxpbmc= 12269
+IE1leGljbw== 12270
+IE5Z 12271
+bXlzcWw= 12272
+IGxpYw== 12273
+5Zw= 12274
+TXI= 12275
+LWZs 12276
+IEN1c3RvbWVy 12277
+aWRp 12278
+ID8+Cgo= 12279
+cmlibGU= 12280
+INC/0YA= 12281
+IHNpemVz 12282
+X1NUUklORw== 12283
+dmFsaWRhdGlvbg== 12284
+IEpvbg== 12285
+KEh0dHA= 12286
+YWRkQ2xhc3M= 12287
+Tm9kZXM= 12288
+IGZyYWdtZW50 12289
+IHNwb2tl 12290
+IHdhc3Rl 12291
+Sm9pbg== 12292
+IGlsbHVzdHI= 12293
+ZWxp 12294
+Y2llbnQ= 12295
+IGFpZA== 12296
+IHByb3NlYw== 12297
+Jyl7Cg== 12298
+IHBhc3Npbmc= 12299
+IGZhY2Vz 12300
+U2hhcGU= 12301
+X1o= 12302
+aXRp 12303
+IGFsbGU= 12304
+IHJvYm90 12305
+ICAgICAgIAo= 12306
+IFNwZQ== 12307
+IHJlY2VpdmluZw== 12308
+IERldGFpbHM= 12309
+ICIp 12310
+bWc= 12311
+X1JFRg== 12312
+IGNvbXBhcmlzb24= 12313
+Kiw= 12314
+IEZvdW5k 12315
+X3Nlc3Npb24= 12316
+KFU= 12317
+L0Y= 12318
+IHh4eA== 12319
+TmV0d29yaw== 12320
+ZGVycw== 12321
+IGNhcHR1cmU= 12322
+IGNvcnJl 12323
+IEx0ZA== 12324
+IEFkdg== 12325
+W0A= 12326
+IGNsaXA= 12327
+TWlsbA== 12328
+IFByb2ZpbGU= 12329
+IGVuZGlm 12330
+IG9ibGln 12331
+ZGVzY3JpYmU= 12332
+LmVsZW1lbnQ= 12333
+cml0ZXJpb24= 12334
+TEQ= 12335
+ZXJlZA== 12336
+IGZhdm91cg== 12337
+c2NvcmU= 12338
+IEZpbHRlcg== 12339
+YXR0cmlidXRlcw== 12340
+IGNoZWNrcw== 12341
+SW5mbGF0ZXI= 12342
+IFBsdXM= 12343
+IHNjaWVudGlmaWM= 12344
+IHByaXZhY3k= 12345
+SGVhZA== 12346
+IGZlYXQ= 12347
+IGRlZ3JlZXM= 12348
+IFBhbGU= 12349
+OyI+ 12350
+IGZpbG1z 12351
+IEF1ZGlv 12352
+IFRhZw== 12353
+IEVuZXJneQ== 12354
+aXRhcg== 12355
+cGFyYXRvcg== 12356
+IGZlbGxvdw== 12357
+IGV2dA== 12358
+IFRyaQ== 12359
+IERBTQ== 12360
+Y2xvdWQ= 12361
+IFBhc3N3b3Jk 12362
+IERlbW9jcmF0cw== 12363
+IEFjYWQ= 12364
+JGxhbmc= 12365
+IHJlYg== 12366
+KCkpCgo= 12367
+0L3Riw== 12368
+IEJ1cg== 12369
+cmVhZGNy 12370
+IGhleA== 12371
+Q29uc29sZQ== 12372
+Y3Rs 12373
+b3VzZWw= 12374
+IFdpbGxpYW0= 12375
+IGF6 12376
+X1BPUlQ= 12377
+IHByYWN0aWNlcw== 12378
+IGFueXdoZXJl 12379
+IFBvc2l0aW9u 12380
+IC0+Cg== 12381
+aWFtcw== 12382
+LnVzZXJuYW1l 12383
+cGxhY2Vob2xkZXI= 12384
+IG9kZXI= 12385
+IFNlY3JldGFyeQ== 12386
+IGlU 12387
+bW9uZA== 12388
+ZXZlbnRz 12389
+P+KAnQ== 12390
+LlN1Yg== 12391
+IGF0dGFjaGVk 12392
+IG7Do28= 12393
+IGVzdGF0ZQ== 12394
+LmFjdGlvbg== 12395
+IGZpZ3VyZXM= 12396
+IH0pOw0K 12397
+IHN1YnNjcmk= 12398
+LnRhZw== 12399
+bmFt 12400
+LnBsb3Q= 12401
+bm9vbg== 12402
+bGlhbWVudA== 12403
+Q2hhcmFjdGVy 12404
+LnRhYg== 12405
+IHdpbnRlcg== 12406
+IFZhcmlhYmxl 12407
+IHRyZWVz 12408
+IHByb3Vk 12409
+KFY= 12410
+X2xvYWQ= 12411
+IGhpZXI= 12412
+IEVjb24= 12413
+IGZk 12414
+IHZpY3RpbXM= 12415
+UmVzdA== 12416
+aWFuYQ== 12417
+IGZha2U= 12418
+LlByaW50bG4= 12419
+IHN0cmxlbg== 12420
+IHNhZA== 12421
+IGJsZQ== 12422
+UHJvdA== 12423
+IGJ1dHRvbnM= 12424
+IHRlbGV2aXNpb24= 12425
+IGxvZ28= 12426
+ZXh0ZW5zaW9u 12427
+CWo= 12428
+c3RlaW4= 12429
+YWNpb25lcw== 12430
+ICIiIgoK 12431
+IHNpbXA= 12432
+IHJlY29yZGVk 12433
+IGJyaW5ncw== 12434
+IHByaW5jaXBhbA== 12435
+IGZlZXM= 12436
+KHNvdXJjZQ== 12437
+a2Rpcg== 12438
+IHV0aWxz 12439
+IGNvcnJlY3RseQ== 12440
+Zmls 12441
+IHdlbA== 12442
+UGFpcg== 12443
+LWJ1dHRvbg== 12444
+c2NhbGU= 12445
+dmVyaWZ5 12446
+W2M= 12447
+IC0tLQ== 12448
+IGVzY2FwZQ== 12449
+aWtlcw== 12450
+TG93ZXJDYXNl 12451
+aWNpYW4= 12452
+IGNoYXB0ZXI= 12453
+IFRZUEU= 12454
+IHNoYWRvdw== 12455
+IGF3ZXNvbWU= 12456
+V0U= 12457
+ZWxpZg== 12458
+IGxhbWJkYQ== 12459
+IGRpc3RpbmN0 12460
+IGJhcmU= 12461
+LW9mZg== 12462
+IGNvbG91cg== 12463
+LmFwcGVuZENoaWxk 12464
+b2xlYw== 12465
+YWdh 12466
+LmZpbGw= 12467
+CXN1cGVy 12468
+IGFkag== 12469
+KHBvc2l0aW9u 12470
+LmdldEl0ZW0= 12471
+U2hvcnQ= 12472
+IHRvdGFsbHk= 12473
+VkQ= 12474
+IFRyZQ== 12475
+X2Vw 12476
+dmVtZW50cw== 12477
+IFNvbHV0aW9u 12478
+IGZ1bmRhbWVudA== 12479
+Rm9sbG93 12480
+IGZhY2lsaXR5 12481
+IGhhcHBlbmluZw== 12482
+T0Y= 12483
+LnRleHRCb3g= 12484
+U3Bhbg== 12485
+IMKr 12486
+aWRlbg== 12487
+IGV4Y2VlZA== 12488
+KHBhcmVudA== 12489
+IGNw 12490
+57s= 12491
+IGhhc24= 12492
+IHByaQ== 12493
+IGNvbnNlcXU= 12494
+bmVu 12495
+IElOVE8= 12496
+SWdub3Jl 12497
+IEZ1dHVyZQ== 12498
+IGNhcmJvbg== 12499
+IFN0ZWVs 12500
+Zm10 12501
+b2tpZQ== 12502
+IHNwbA== 12503
+KHRpdGxl 12504
+LWluZm8= 12505
+IGRlYWxz 12506
+IGZpeHR1cmU= 12507
+ZWE= 12508
+RGl2 12509
+IHRlc3RlZA== 12510
+X3JldHVybg== 12511
+KQoKCgo= 12512
+dXBwb3J0ZWQ= 12513
+IENvb2s= 12514
+IHBheWluZw== 12515
+IElsbA== 12516
+IGFycmVzdGVk 12517
+IFByaW1l 12518
+X2NhbGxiYWNr 12519
+PiwK 12520
+ZHJpdmVy 12521
+T25jZQ== 12522
+YWJi 12523
+X2J5dGVz 12524
+IFNldHM= 12525
+KE9iamVjdA== 12526
+IGNj 12527
+IHNoZWxs 12528
+YWxv 12529
+KTsvLw== 12530
+KGxvZw== 12531
+Y3RvcnM= 12532
+KTwv 12533
+IG5laWdoYm9yaG9vZA== 12534
+YWlsYWJpbGl0eQ== 12535
+dm9s 12536
+IHlvdXRo 12537
+IHRlY2huaXF1ZXM= 12538
+IFNjaGVtYQ== 12539
+dWg= 12540
+bWVudGU= 12541
+IHJlcG9zaXRvcnk= 12542
+aW1t 12543
+IGNvb2tpZQ== 12544
+SlM= 12545
+b3ZpZXM= 12546
+Ons= 12547
+Q29tcGxldGU= 12548
+U2luY2U= 12549
+IGxhdWdo 12550
+X0JP 12551
+ZW5hYmxl 12552
+IERvZXM= 12553
+IFdhbGs= 12554
+d2hhdA== 12555
+a2Vz 12556
+IG11bHRpcA== 12557
+aW1lbnRz 12558
+ZXVy 12559
+IHZpY3Rvcnk= 12560
+R2VuZXJhdG9y 12561
+IE1vcw== 12562
+cm92ZXJz 12563
+IGNvbXB1dGU= 12564
+IHByb3ZpZGVycw== 12565
+IE1lZGlj 12566
+TFA= 12567
+X0NPTkZJRw== 12568
+IHZldGVy 12569
+c3RlcnM= 12570
+X3dpbmRvdw== 12571
+dW1lcmlj 12572
+CQkJCQkK 12573
+LlJlc3BvbnNl 12574
+IHJlcGxhY2Vk 12575
+LnJvb3Q= 12576
+LWZyZWU= 12577
+LWNvbnRhaW5lcg== 12578
+IG1hdGNoaW5n 12579
+IEVkaXRvcg== 12580
+PSR7 12581
+IFNhZg== 12582
+IHNpbmQ= 12583
+KGJ1ZmZlcg== 12584
+5Yc= 12585
+LmVkdQ== 12586
+KV07Cg== 12587
+IE5GTA== 12588
+YXlh 12589
+IGRvZ3M= 12590
+IGRlc2lyZQ== 12591
+IE1pZGRsZQ== 12592
+Q2FydA== 12593
+VGhlbWU= 12594
+IG1vYg== 12595
+IGRpc3BsYXllZA== 12596
+aWdpdA== 12597
+IGFkdWx0cw== 12598
+IiIi 12599
+IGRlbGl2ZXJlZA== 12600
+dmlzaWJsZQ== 12601
+Ijp7Cg== 12602
+PDw8 12603
+IEdP 12604
+c2Nyb2xs 12605
+eEU= 12606
+IGFzc2lnbmVk 12607
+IEJvb2w= 12608
+IHdw 12609
+IGNvbWJhdA== 12610
+IEhhdw== 12611
+Li0= 12612
+IHN1cHBvcnRpbmc= 12613
+LkNvbnRlbnQ= 12614
+aXJjcmFmdA== 12615
+IHNwaW4= 12616
+IENS 12617
+Lm15 12618
+4KU= 12619
+dHBs 12620
+IHNwYWNlcw== 12621
+Pyw= 12622
+IFN5cmlh 12623
+IHBhdHRlcm5z 12624
+LWJveA== 12625
+IGZyYW1ld29yaw== 12626
+LyU= 12627
+KGxvbmc= 12628
+IHRlYWNoaW5n 12629
+QVJOSU5H 12630
+X2tleXM= 12631
+IHRhYmxlcw== 12632
+VU5D 12633
+aW5hdGlvbnM= 12634
+LXdlaWdodA== 12635
+cmFkaW8= 12636
+IFBhYw== 12637
+LnNlcnZlcg== 12638
+LkNoYXJGaWVsZA== 12639
+cmluZw== 12640
+IHF1b3Rl 12641
+YW5uYQ== 12642
+IHdlcmRlbg== 12643
+IGNyZWFt 12644
+IG1hY2hpbmVz 12645
+LWs= 12646
+IHN0aW0= 12647
+IFN0b2Nr 12648
+cmljaw== 12649
+IGltcG9ydGFuY2U= 12650
+cng= 12651
+w7Vlcw== 12652
+2Yg= 12653
+IHN0cm9rZQ== 12654
+YWdyYQ== 12655
+IHRhc3Rl 12656
+IERFQlVH 12657
+VGhhbmtz 12658
+IFJlcXVpcmVk 12659
+b3Zh 12660
+TWVkaWE= 12661
+IHNpxJk= 12662
+KGJhc2U= 12663
+cG9zdHM= 12664
+IGZpbGVOYW1l 12665
+Q2hlY2tlZA== 12666
+IGludGVycnVwdA== 12667
+ICgpCg== 12668
+cHl0aG9u 12669
+cGFpcg== 12670
+IGNpcmNsZQ== 12671
+IGluaXRp 12672
+X3N0cmVhbQ== 12673
+IGNvbXByZWg= 12674
+bGVhcm4= 12675
+UHVibGlj 12676
+IGh1bWFucw== 12677
+IGJyaW5naW5n 12678
+b2dyYXBoaWM= 12679
+X2xheWVy 12680
+LWxpa2U= 12681
+dXBwb3J0SW5pdGlhbGl6ZQ== 12682
+aWRlYmFy 12683
+IHZvdGVz 12684
+IGRlc2lyZWQ= 12685
+TWFzaw== 12686
+IHJlbGF0aW9u 12687
+Lkluc3RhbmNl 12688
+SGVscA== 12689
+IGluc3Bpcg== 12690
+IE1vbm8= 12691
+Vmlld01vZGVs 12692
+b21ldGltZXM= 12693
+IGJhY2tncm91bmRDb2xvcg== 12694
+IHJvdGF0aW9u 12695
+IG1hcmk= 12696
+L3Rlc3Q= 12697
+SU5TRVJU 12698
+U3Rhcg== 12699
+cGh5 12700
+SWRz 12701
+X0dFVA== 12702
+IGluY3JlYXNlcw== 12703
+X2Nsb3Nl 12704
+X0ZPUk0= 12705
+IFvigKZdCgo= 12706
+YXph 12707
+VEVYVA== 12708
+IMOk 12709
+IFZhbg== 12710
+IGxpZ2h0cw== 12711
+IEd1aWRl 12712
+IGRhdGVz 12713
+LkNvbW1hbmQ= 12714
+YW1hbg== 12715
+IHBhdGhz 12716
+LmVkaXQ= 12717
+CWFkZA== 12718
+ZHg= 12719
+IHJlYWN0aW9u 12720
+IEJlYWNo 12721
+LmdldE1lc3NhZ2U= 12722
+RW52aXJvbm1lbnQ= 12723
+aW50ZXJlc3Q= 12724
+IG1pbmlzdGVy 12725
+IHJlYWRlcnM= 12726
+CUY= 12727
+IGRvbWVzdGlj 12728
+IGZpbGVk 12729
+Q2l0eQ== 12730
+IG1hcHBpbmc= 12731
+IERFUw== 12732
+IHJlcGFpcg== 12733
+dGljcw== 12734
+aXh0dXJl 12735
+IG5vbWJyZQ== 12736
+LklTdXBwb3J0SW5pdGlhbGl6ZQ== 12737
+em8= 12738
+LklzTnVsbE9y 12739
+IENhcm9saW5h 12740
+IERlcg== 12741
+IEVWRU5U 12742
+IGdlc3Q= 12743
+IGhpc3Q= 12744
+cmVzb3VyY2Vz 12745
+IG9ycGhhbg== 12746
+LkFyZQ== 12747
+IEludmVzdA== 12748
+UkVGRVJSRUQ= 12749
+LkxvZ2dlcg== 12750
+IFJvbWFu 12751
+IGN1bHR1cmFs 12752
+ZmVhdHVyZQ== 12753
+cHRz 12754
+YnQ= 12755
+IGRvdA== 12756
+IGRpYW0= 12757
+dXNwZW5k 12758
+X2FjY2Vzcw== 12759
+KCl7DQo= 12760
+IHN1cnByaXNl 12761
+YWJpbA== 12762
+IHZpcnQ= 12763
+IGJvbWI= 12764
+YXJvbg== 12765
+X0lT 12766
+IHZhc3Q= 12767
+UmVhbA== 12768
+ZXBlbmQ= 12769
+aWN0ZWQ= 12770
+IHBpY2tlZA== 12771
+IEZM 12772
+IFJlcHVibGljYW5z 12773
+Lnplcm9z 12774
+UHJlc3NlZA== 12775
+c3Vw 12776
+LkNvcmU= 12777
+TWljcm9zb2Z0 12778
+c2VydmljZXM= 12779
+YWdpYw== 12780
+aXZlbmVzcw== 12781
+IHBkZg== 12782
+IHJvbGVz 12783
+cmFz 12784
+IGluZHVzdHJpYWw= 12785
+IGZhY2lsaXRpZXM= 12786
+6KE= 12787
+IG5p 12788
+IGJh 12789
+IGNscw== 12790
+CUI= 12791
+Q3VzdG9tZXI= 12792
+IGltYWdpbmU= 12793
+IGV4cG9ydHM= 12794
+T3V0cHV0U3RyZWFt 12795
+IG1hZA== 12796
+KGRl 12797
+KXsKCg== 12798
+IGZybw== 12799
+aHVz 12800
+IGNvbW1pdHRlZQ== 12801
+7J20 12802
+LHg= 12803
+IGRpdmlzaW9u 12804
+KGNsaWVudA== 12805
+KGphdmE= 12806
+b3B0aW9uYWw= 12807
+LkVxdWFs 12808
+IFBoeXM= 12809
+aW5ndQ== 12810
+IHN5bmM= 12811
+IE5h 12812
+fX08Lw== 12813
+T0xVTQ== 12814
+aXTDqQ== 12815
+IGlkZW50aWZpZXI= 12816
+b3dlZA== 12817
+IGV4dGVudA== 12818
+IGh1cg== 12819
+VkE= 12820
+Y2xhcg== 12821
+IGVkZ2Vz 12822
+Q3JpdGVyaWE= 12823
+IGluZGVlZA== 12824
+aW5oZXJpdA== 12825
+IE5pZ2h0 12826
+IHJlcG9ydGluZw== 12827
+IGVuY291bnRlcg== 12828
+IGtpbmRz 12829
+X3ByZWQ= 12830
+IGNvbnNpZGVyaW5n 12831
+Lig= 12832
+IHByb3RlaW4= 12833
+VHlw 12834
+Z3JpY3VsdA== 12835
+IEJhbGw= 12836
+QENvbXBvbmVudA== 12837
+IEVzcw== 12838
+IFJ1Yg== 12839
+dWxw 12840
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 12841
+aXR1ZA== 12842
+LmF0dHI= 12843
+aWVudGU= 12844
+IHNwZWxs 12845
+IEpvZQ== 12846
+RU5URVI= 12847
+X2hvc3Q= 12848
+aXRhbg== 12849
+IG1hdHRlcnM= 12850
+IGVtZXJnZW5jeQ== 12851
+dWF0ZWQ= 12852
+IENoYXQ= 12853
+PXsn 12854
+Y29udHJp 12855
+YXJrZXI= 12856
+5oiQ 12857
+aXBlcg== 12858
+IHNjaGVtZQ== 12859
+KHN0ZGVycg== 12860
+ICoo 12861
+Y2VpdmVy 12862
+LmNvbHVtbg== 12863
+IG1hcmtlZA== 12864
+X0FUVFI= 12865
+IGJvZGllcw== 12866
+IElNUExJRUQ= 12867
+R2Fw 12868
+IFBPU1Q= 12869
+IGNvcnBvcmF0ZQ== 12870
+IGRpbWVuc2lvbg== 12871
+IGNvbnRyYXN0 12872
+ZXJ2aWV3 12873
+IEVSUk9S 12874
+IGNhcGFibGU= 12875
+IGFkdmVydGlzaW5n 12876
+dXJjaGFzZQ== 12877
+IFBB 12878
+IEZyYW5jaXNjbw== 12879
+IGZhY2luZw== 12880
+44CM 12881
+Z2l0 12882
+IGJlZXI= 12883
+IHNreQ== 12884
+ZG93bmxvYWQ= 12885
+IEN1cg== 12886
+bWM= 12887
+YW5ueQ== 12888
+LmZsb29y 12889
+IGNyaXRlcmlh 12890
+IHBhcnNlSW50 12891
+YCwK 12892
+IGFzcGVjdA== 12893
+IGJ1bmRsZQ== 12894
+Q291bGQ= 12895
+IHRhbms= 12896
+LWlk 12897
+IGh1cnQ= 12898
+IGJyb2FkY2FzdA== 12899
+T0tFTg== 12900
+b3dudA== 12901
+bnVsbGFibGU= 12902
+Q2Fw 12903
+IGFsY29ob2w= 12904
+IENvbGw= 12905
+IEhlbHBlcg== 12906
+IEFm 12907
+Lm1ldGhvZA== 12908
+IHBsYW5uZWQ= 12909
+cGxlcg== 12910
+IFNpdGU= 12911
+IHJlc2M= 12912
+b21lbnQ= 12913
+IEphdmFTY3JpcHQ= 12914
+U0VSVkVS 12915
+IHJocw== 12916
+ZXJlcw== 12917
+KCIs 12918
+aWZp 12919
+LmZpZWxkcw== 12920
+IHBhcmtpbmc= 12921
+IGlzbGFuZA== 12922
+IHNpc3Rlcg== 12923
+Xwo= 12924
+Q29uc3RyYWludHM= 12925
+IEF1c3Q= 12926
+ZGlt 12927
+X3BvaW50cw== 12928
+IGdhcA== 12929
+X2FjdGl2ZQ== 12930
+IHZvb3I= 12931
+IFBP 12932
+QmFn 12933
+LXNjYWxl 12934
+bGFtYmRh 12935
+LkRpc3Bvc2U= 12936
+cnVsZQ== 12937
+IG93bmVk 12938
+IE1lZGljYWw= 12939
+ZW50cmllcw== 12940
+IHNvbGFy 12941
+IHJlc3VsdGluZw== 12942
+IGVzdGltYXRlZA== 12943
+IGltcHJvdmVk 12944
+RHVyYXRpb24= 12945
+ZW1wbG95ZWU= 12946
+JC4= 12947
+QWN0aW9ucw== 12948
+TGlrZQ== 12949
+LCg= 12950
+KFJlcXVlc3Q= 12951
+JXM= 12952
+Lk9wZW4= 12953
+KSIK 12954
+IHBpeGVs 12955
+IGFkYXB0ZXI= 12956
+IHJldmVudWU= 12957
+b2dyYW0= 12958
+IExB 12959
+IE1hY2hpbmU= 12960
+INin 12961
+IGZsZQ== 12962
+IGJpa2U= 12963
+SW5zZXRz 12964
+IGRpc3A= 12965
+IGNvbnNpc3RlbnQ= 12966
+YcOnw6Nv 12967
+Z2VuZGVy 12968
+IFRob3Nl 12969
+cGVyaWVuY2U= 12970
+LkJhY2tDb2xvcg== 12971
+LnBsYXk= 12972
+IHJ1c2g= 12973
+IGF4aW9z 12974
+IG5lY2s= 12975
+X21lbQ== 12976
+LlBSRUZFUlJFRA== 12977
+X2ZpcnN0 12978
+Q0I= 12979
+IFdpZGdldA== 12980
+IHNlcQ== 12981
+aGFy 12982
+IGhpdHM= 12983
+IOKCrA== 12984
+IGNvbnRhaW5lZA== 12985
+cmllbnQ= 12986
+d2F0ZXI= 12987
+TE9BRA== 12988
+IFZpcmdpbmlh 12989
+IEFybQ== 12990
+IC4v 12991
+wrs= 12992
+X3Jvb3Q= 12993
+IGFzc2lzdGFuY2U= 12994
+W10s 12995
+c3luYw== 12996
+IHZlZ2V0 12997
+ZXNjYXBl 12998
+aWNlcg== 12999
+Ym9vc3Q= 13000
+IEZsb2F0 13001
+LVc= 13002
+Ki8NCg== 13003
+Kj4= 13004
+ICQoIi4= 13005
+LnBvcw== 13006
+IGJveXM= 13007
+IHdlZGRpbmc= 13008
+IGFnZW50cw== 13009
+PSJf 13010
+IEFybXk= 13011
+IGhpbnQ= 13012
+dmlzaW9u 13013
+IHRlY2g= 13014
+IENvbm5lY3Q= 13015
+IGxlZ2VuZA== 13016
+IEJldA== 13017
+LkJhc2U= 13018
+U3ViamVjdA== 13019
+IGxpdA== 13020
+UmVtb3Zl 13021
+ICI6 13022
+IEZpbmFs 13023
+cGVhcmFuY2U= 13024
+IGlUdW5lcw== 13025
+IHBhcnRpY2lwYW50cw== 13026
+IFB5dGhvbg== 13027
+IGJ1c3k= 13028
+aWVs 13029
+dmVydGljZXM= 13030
+IHRlbXBsYXRlVXJs 13031
+IENsb3Nl 13032
+SW1n 13033
+IENvcnBvcmF0aW9u 13034
+dGltZXN0YW1w 13035
+IGV4dGVuZA== 13036
+IHdlYnNpdGVz 13037
+IHBvc3NpYmlsaXR5 13038
+0L7Rgg== 13039
+IGvDtg== 13040
+IG1lYXQ= 13041
+IHJlcHJlc2VudGF0aW9u 13042
+IAkJ 13043
+X1NUQVJU 13044
+LmFwcGx5 13045
+IFZhbGxleQ== 13046
+IFN1Y2Nlc3M= 13047
+SGk= 13048
+IG5vYg== 13049
+IElFbnVtZXJhYmxl 13050
+X3NlbGVjdA== 13051
+Z2Vv 13052
+LiIpCg== 13053
+IHR1cm5pbmc= 13054
+IGZhYnJpYw== 13055
+KCIiKTsK 13056
+IHBlcnNwZWN0aXZl 13057
+6Zc= 13058
+IFNu 13059
+VGhhbms= 13060
+O2o= 13061
+LlBhcmFtZXRlcnM= 13062
+CSAgICAgICAgICAg 13063
+IGZhY3Rz 13064
+IHVudA== 13065
+Lmluc3RhbmNl 13066
+IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw== 13067
+LWVuZA== 13068
+IEpPSU4= 13069
+IEhlbg== 13070
+IHVyaQ== 13071
+5ZCN 13072
+INC90LA= 13073
+IEluZm8= 13074
+IGNvbmR1Y3RlZA== 13075
+IMOl 13076
+T1VSQ0U= 13077
+IHdpbmU= 13078
+Sm9obg== 13079
+LkVycm9yZg== 13080
+IEFnZQ== 13081
+b3VuZGVk 13082
+IHJlYWxpemU= 13083
+IF07 13084
+IHN1YnNlcXU= 13085
+LG0= 13086
+KFVzZXI= 13087
+aWFubw== 13088
+IGFjY29tcGw= 13089
+aXNw 13090
+LnN0ZA== 13091
+6Yc= 13092
+IEJlZA== 13093
+LnNldEF0dHJpYnV0ZQ== 13094
+QlI= 13095
+a2VlcA== 13096
+IEFMTA== 13097
+IGlzb2w= 13098
+YW1tYQ== 13099
+UGFja2FnZQ== 13100
+IG9jY2FzaW9u 13101
+LXN1Y2Nlc3M= 13102
+0LXQtA== 13103
+IExJTUlURUQ= 13104
+c3RyaXA= 13105
+KCkKCgo= 13106
+aXN0cmlidXRpb24= 13107
+Q29sb3Jz 13108
+ICs6Kw== 13109
+RGlkTG9hZA== 13110
+YWxlcg== 13111
+IHRpZA== 13112
+IExFRA== 13113
+IExpbmtlZA== 13114
+IENhcnQ= 13115
+KCkpDQo= 13116
+X1JFQUQ= 13117
+IGtpbGxpbmc= 13118
+IFBIUA== 13119
+ZmVjdGlvbg== 13120
+IGluc3RhbmNlcw== 13121
+Y3Y= 13122
+Ii8+ 13123
+IHNm 13124
+IHRheGVz 13125
+X2xvY2F0aW9u 13126
+IEJpdGNvaW4= 13127
+dWFibGU= 13128
+cmFuaw== 13129
+aWdub3Jl 13130
+dHJhY2s= 13131
+0LrQsA== 13132
+IHNob3VsZG4= 13133
+IE9Q 13134
+PT57Cg== 13135
+IGtt 13136
+IGhlbHBlcg== 13137
+X2hlYWQ= 13138
+IFdoZXRoZXI= 13139
+b2Nv 13140
+X2Js 13141
+IHN0YXRpc3RpY3M= 13142
+IGJlYXV0eQ== 13143
+IHRvZw== 13144
+dGlw 13145
+64uk 13146
+IGNzdg== 13147
+KHNxbA== 13148
+c3RkbGli 13149
+d2Vhaw== 13150
+IGxpa2Vz 13151
+xI0= 13152
+IHJlcGVhdA== 13153
+IGFwYXJ0bWVudA== 13154
+IGVtcGg= 13155
+X2VkaXQ= 13156
+IHZpdA== 13157
+CXR5cGU= 13158
+RXZlbg== 13159
+dXRlbg== 13160
+IGNpcmN1bXN0YW5jZXM= 13161
+Ymlhbg== 13162
+IHN1Z2Fy 13163
+V2luZG93cw== 13164
+7J4= 13165
+IG9ic2VydmVk 13166
+L2RhdGE= 13167
+IGNhbGVuZGFy 13168
+IHN0cmlrZQ== 13169
+IFJFUw== 13170
+X3Nj 13171
+Zm9ueQ== 13172
+b3JlbQ== 13173
+KHo= 13174
+cG93ZXI= 13175
+ZXRlY3Q= 13176
+IFNhdA== 13177
+LmRlc2NyaXB0aW9u 13178
+IGdhbmc= 13179
+IFNwb3J0cw== 13180
+b25ncw== 13181
+IEJ1bmRsZQ== 13182
+LnN1bQ== 13183
+b25jZQ== 13184
+IGFjY3VzZWQ= 13185
+IGV4cGxvcmU= 13186
+IGFwcHJveGltYXRlbHk= 13187
+IGxvc2luZw== 13188
+dGhlc2lz 13189
+IEZ1bmQ= 13190
+IGRpYWdu 13191
+QXV0b3dpcmVk 13192
+cHJvcGVydGllcw== 13193
+IF8u 13194
+IGNudA== 13195
+Y2VkdXJl 13196
+IHl5 13197
+IGdyYW50 13198
+c29jaw== 13199
+LmlubmVySFRNTA== 13200
+IF0pOwo= 13201
+IENPTkZJRw== 13202
+PSck 13203
+XV07Cg== 13204
+VU5E 13205
+IGdsb2I= 13206
+IGRpcmU= 13207
+dWZmbGU= 13208
+X01FTQ== 13209
+IGF1dGhlbnRpYw== 13210
+Pigi 13211
+IGRlY2FkZQ== 13212
+IEltcG9ydA== 13213
+IG9yaWdpbmFsbHk= 13214
+IGpRdWVyeQ== 13215
+IGluZGljYXRl 13216
+IG91cnNlbHZlcw== 13217
+U3c= 13218
+LmxibA== 13219
+ZW5lcmF0ZQ== 13220
+IGJhc2ljYWxseQ== 13221
+IEhvbQ== 13222
+ICsjKw== 13223
+IEJyaXRhaW4= 13224
+IEthcg== 13225
+dG9FcXVhbA== 13226
+LnN0b3A= 13227
+IG1vZGFs 13228
+aXNp 13229
+IHN1Z2dlc3Rz 13230
+IGR0eXBl 13231
+IHR1cg== 13232
+YmY= 13233
+IGNvbm5lY3Rpb25z 13234
+IEJlZm9yZQ== 13235
+aXN0ZWQ= 13236
+bW91c2U= 13237
+IHB1bGxlZA== 13238
+LmJ1aWxk 13239
+IGxlZ2lzbGF0aW9u 13240
+IGZvcnRo 13241
+cGFk 13242
+ZWdv 13243
+Lk5vdw== 13244
+IGV4Y2l0aW5n 13245
+fQoKCgo= 13246
+IGNvbXBy 13247
+IHNoYXJlcw== 13248
+IHJpZw== 13249
+Z3JlZW4= 13250
+X3ZlYw== 13251
+IGVudW1lcmF0ZQ== 13252
+QXV0bw== 13253
+aWNhdG9y 13254
+IFJheQ== 13255
+YXNzZQ== 13256
+IGhvbGlkYXk= 13257
+IG51bGxhYmxl 13258
+Z3Vu 13259
+X2RldGFpbHM= 13260
+IHdyYXBwZXI= 13261
+c2Vx 13262
+IFlvdW5n 13263
+anVhbmE= 13264
+ICJfXw== 13265
+bGljZW5zZQ== 13266
+c2VydmU= 13267
+Xig= 13268
+aWRlcnM= 13269
+LlJlbW92ZQ== 13270
+cm9wZG93bg== 13271
+J1M= 13272
+cGlu 13273
+KHRva2Vu 13274
+LkRlZmF1bHQ= 13275
+IHJlYXNvbmFibGU= 13276
+YW1waW9u 13277
+IFNvY2lldHk= 13278
+IGJlaQ== 13279
+ZXJ2ZXM= 13280
+cmFk 13281
+IEZveA== 13282
+X2ltYWdlcw== 13283
+IHdoZWVs 13284
+Jylb 13285
+IGNmZw== 13286
+KEJ5 13287
+Q29uc3RydWN0b3I= 13288
+IHZhcnk= 13289
+LnN3aWZ0 13290
+IHByb3h5 13291
+CUg= 13292
+IEFub3RoZXI= 13293
+IFBlbg== 13294
+IGNoZWNraW5n 13295
+IGplc3Q= 13296
+bWFuYWdlcg== 13297
+T3JpZ2lu 13298
+dWdz 13299
+b2ly 13300
+PjwhLS0= 13301
+IGV4cHJlc3NlZA== 13302
+IG1vZGVy 13303
+IGFnZW5jaWVz 13304
+IGlo 13305
+LWhpZGRlbg== 13306
+aW91c2x5 13307
+IFJvZA== 13308
+IHNvbGU= 13309
+TWVk 13310
+LkFueQ== 13311
+IHBj 13312
+YmFs 13313
+RXhhbXBsZQ== 13314
+IFNhbGU= 13315
+IHN0cmlw 13316
+IENvbXA= 13317
+IHByZXNpZGVudGlhbA== 13318
+TW9zdA== 13319
+cHV0YXRpb24= 13320
+KHJlZg== 13321
+IEZvdXI= 13322
+X2ZpbGVuYW1l 13323
+IGVuZm9yY2VtZW50 13324
+2K8= 13325
+IEdlb3Jn 13326
+d2VpZ2h0cw== 13327
+L2w= 13328
+IGFnZ3Jlc3M= 13329
+IGRyYXdpbmc= 13330
+YW5keQ== 13331
+PEk= 13332
+LWo= 13333
+YWth 13334
+aHJlZg== 13335
+IHRlYWNoZXJz 13336
+X1E= 13337
+KGl0 13338
+IE1C 13339
+IHRlbXBvcmFyeQ== 13340
+aXJlYmFzZQ== 13341
+c3RyYQ== 13342
+5pe2 13343
+6LQ= 13344
+KGxhYmVs 13345
+b3Vw 13346
+IHRvcGljcw== 13347
+IHBvcnRpb24= 13348
+aWRvcw== 13349
+IEpld2lzaA== 13350
+IHJlY292ZXJ5 13351
+IHN0YW5kcw== 13352
+I1s= 13353
+IGFmdGVybm9vbg== 13354
+IEFydGljbGU= 13355
+X2F0dA== 13356
+IGV4cGxhbg== 13357
+IFBhaw== 13358
+LnNldE9uQ2xpY2tMaXN0ZW5lcg== 13359
+LmNoaWxkcmVu 13360
+IGlr 13361
+Kyg= 13362
+bGFn 13363
+IGRpc2s= 13364
+IGNvbnRyb3ZlcnM= 13365
+Ij4m 13366
+YXNw 13367
+IHdpZQ== 13368
+IEF1c3RyYWxpYW4= 13369
+IFlvdVR1YmU= 13370
+QXR0cg== 13371
+Y29udGFpbnM= 13372
+ZHVjZQ== 13373
+IE1hdHQ= 13374
+YXRlcm4= 13375
+IHZvbHVudGU= 13376
+IG5ld3Nw 13377
+VlA= 13378
+b2x0aXA= 13379
+IGRlbGVnYXRl 13380
+X21ldGE= 13381
+IGFjY3VyYXRl 13382
+IEV4YW1wbGU= 13383
+JSw= 13384
+IERhaWx5 13385
+IGNhYmlu 13386
+IFNX 13387
+IGxpbWl0cw== 13388
+a2lw 13389
+IGFybXk= 13390
+IGVuZGluZw== 13391
+IGJvc3M= 13392
+IERpYWxvZw== 13393
+QWxzbw== 13394
+PSIjIg== 13395
+b3JkYW4= 13396
+cm93c2U= 13397
+LW1pbg== 13398
+ICIm 13399
+X2xvYw== 13400
+VVg= 13401
+IGRldmVsb3BlcnM= 13402
+IGFjY3VyYWN5 13403
+IG1haW50ZW5hbmNl 13404
+IGhlYXY= 13405
+IGZpbHRlcnM= 13406
+LlRvb2xTdHJpcA== 13407
+IG5hcnI= 13408
+IEVtcA== 13409
+T1JERVI= 13410
+IE1vYmlsZQ== 13411
+LlNlcmlhbA== 13412
+Lm91dHB1dA== 13413
+LmNvbA== 13414
+TWF0ZXJpYWw= 13415
+dW1h 13416
+IGNvbnN1bWVycw== 13417
+c2hpZnQ= 13418
+IHB1ZWQ= 13419
+IG1pbmk= 13420
+Y29sbGVjdGlvbg== 13421
+IGthbg== 13422
+LmNlbnRlcg== 13423
+SGlzdG9yeQ== 13424
+IGJlbmNo 13425
+KCkpOw== 13426
+aXRvcmllcw== 13427
+IGNyb3dk 13428
+X2NhbGw= 13429
+IHBvd2Vycw== 13430
+LUU= 13431
+IGRpc21pc3M= 13432
+IHRhbGtz 13433
+IENoYW5uZWw= 13434
+Zm9yd2FyZA== 13435
+X2NvbnRyb2w= 13436
+L3NyYw== 13437
+aWVzdA== 13438
+KioqKioqKioqKioqKioqKioqKioqKioq 13439
+IGJldGE= 13440
+KGNvbG9y 13441
+X09CSkVDVA== 13442
+IEFwaQ== 13443
+IGVmZmVjdGl2ZWx5 13444
+Q2FtZXJh 13445
+c2Q= 13446
+dXNzeQ== 13447
+RGljdA== 13448
+IEVmZmVjdA== 13449
+aWJpbGl0aWVz 13450
+IHJldHVybmluZw== 13451
+IEZhcg== 13452
+ICcnKQ== 13453
+IG1vZHVsZXM= 13454
+aWxhdGlvbg== 13455
+ICgl 13456
+VFJHTA== 13457
+IHN0b3Jt 13458
+b25uYQ== 13459
+IEVYUA== 13460
+IHNwb25z 13461
+IGRpc3Bs 13462
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 13463
+ZmFsbA== 13464
+5Yw= 13465
+aWduS2V5 13466
+X1VT 13467
+ZXRyaWNz 13468
+IGhhbmRsZXM= 13469
+VEw= 13470
+X2Ftb3VudA== 13471
+b3dh 13472
+YnJhbmQ= 13473
+IFRvb2w= 13474
+IHVzdWFs 13475
+Llo= 13476
+Y3JlbWVudA== 13477
+YWRpdW0= 13478
+c3RvY2s= 13479
+IHNlcnZpbmc= 13480
+IEJvbg== 13481
+IGxpbmVhcg== 13482
+IFRhcmdldA== 13483
+IFJhZGlv 13484
+SEw= 13485
+U2hhZGVy 13486
+b21hdGlj 13487
+YWd1ZXM= 13488
+aW5pdHk= 13489
+ZGlmZg== 13490
+X2l0ZXJhdG9y 13491
+cXVvdA== 13492
+ICwK 13493
+Y2FsbGJhY2s= 13494
+IHN5bXB0b21z 13495
+W18= 13496
+IEJ1bA== 13497
+IEZlYg== 13498
+dW5kbw== 13499
+X2FjY291bnQ= 13500
+IHR5cGVkZWY= 13501
+0LjRgQ== 13502
+dHJhcw== 13503
+VXNlcklk 13504
+IFBlbm4= 13505
+IFN1cHJlbWU= 13506
+fT4= 13507
+dXNlcklk 13508
+IEtpbQ== 13509
+IGdh 13510
+IGFydGlzdHM= 13511
+5bg= 13512
+IEFic3RyYWN0 13513
+b2tlbW9u 13514
+IGhhbQ== 13515
+b3ZhbA== 13516
+IGNoYQ== 13517
+YXRlbg== 13518
+5YY= 13519
+Rml4ZWQ= 13520
+IHZ1bG5lcg== 13521
+IFBhcmFtZXRlcnM= 13522
+cXVhbnRpdHk= 13523
+LkNsZWFy 13524
+U2VydmxldFJlcXVlc3Q= 13525
+IHlh 13526
+IHNvdWw= 13527
+dHJhbnNhY3Rpb24= 13528
+IHNvbG8= 13529
+IHBhaXJz 13530
+5pQ= 13531
+IEdyZQ== 13532
+X3dvcmQ= 13533
+IEND 13534
+IGdp 13535
+emll 13536
+IHNjaGVkdWxlZA== 13537
+cm90YXRpb24= 13538
+Z3lwdA== 13539
+dWxvdXM= 13540
+Ojpf 13541
+IEVsbA== 13542
+PCE= 13543
+CQkgIA== 13544
+bHA= 13545
+YWhh 13546
+Q29weXJpZ2h0 13547
+IGRyYW0= 13548
+IGRpYWdyYW0= 13549
+IE1lbQ== 13550
+IGdhcmRlbg== 13551
+Q29tcA== 13552
+IGF0dGVtcHRz 13553
+dWZmaXg= 13554
+Pigp 13555
+IHBoaWxvc29waA== 13556
+X3JlbA== 13557
+5bw= 13558
+IHN2 13559
+LnNlY29uZA== 13560
+YW50bw== 13561
+Lkpzb24= 13562
+IFRlbGU= 13563
+X2xvY2Fs 13564
+X3NlbmQ= 13565
+IGFzcGVjdHM= 13566
+7Jc= 13567
+SUJMRQ== 13568
+IHJhaWw= 13569
+IHdpZGVseQ== 13570
+YXNoZWQ= 13571
+aWFy 13572
+aW5m 13573
+dXBwZXI= 13574
+ZGphbmdv 13575
+X3Jlc3VsdHM= 13576
+aXNzaW5n 13577
+IGVxdWl2YWxlbnQ= 13578
+T1VORA== 13579
+IHR5 13580
+IHBvdGVudGlhbGx5 13581
+QWR2ZXJ0aXNlbWVudA== 13582
+IFJlY29yZA== 13583
+cmVzZW50YXRpb24= 13584
+X3dpZGdldA== 13585
+b3VuZGluZw== 13586
+IHJlbGlnaW9u 13587
+IGNvbnNj 13588
+IExpbQ== 13589
+LmFt 13590
+SHRtbA== 13591
+ICc6 13592
+UEFUSA== 13593
+X3NwZWM= 13594
+b3J0ZWQ= 13595
+aWRhZGVz 13596
+X3NoYXBl 13597
+IGtlZXBz 13598
+LlNhdmU= 13599
+IExvYw== 13600
+b3Jp 13601
+IFRFU1Q= 13602
+dW5pY2lw 13603
+IHJlZ2lvbnM= 13604
+IGJlbGlldmVz 13605
+L2Vu 13606
+cG9zaXRl 13607
+eyc= 13608
+cHJlcGFyZQ== 13609
+X2NvbnN0 13610
+c2FtcGxl 13611
+IFdpbGxpYW1z 13612
+IHN0cnQ= 13613
+X0dldA== 13614
+IEFuZHJldw== 13615
+LmFjdGl2ZQ== 13616
+IGxheWVycw== 13617
+VmlzdWFsU3R5bGU= 13618
+YXp5 13619
+IEtu 13620
+IGFjaWQ= 13621
+IEFzaWE= 13622
+IGV4Y2Vzcw== 13623
+CW15 13624
+IGtleWJvYXJk 13625
+ZW5zdXM= 13626
+IGNyZXc= 13627
+IG1pc3NlZA== 13628
+bWFzdGVy 13629
+IFdpbGQ= 13630
+IG5ld2x5 13631
+IHdpbm5lcg== 13632
+IHN0dWI= 13633
+aWNvZGU= 13634
+Lm1vdmU= 13635
+RG9tYWlu 13636
+IFNhcg== 13637
+IGZvcmVzdA== 13638
+TEVE 13639
+Y2xhaW1lcg== 13640
+LmV4aXQ= 13641
+IFdpbmRvdw== 13642
+IHJlc2lzdGFuY2U= 13643
+IENIRUNL 13644
+KCIt 13645
+IFJ5YW4= 13646
+IHBpcGU= 13647
+IGNvYXN0 13648
+REVG 13649
+Ly8h 13650
+X29mZg== 13651
+ZXhpdA== 13652
+IHVsdGltYXRlbHk= 13653
+aW1pdGl2ZQ== 13654
+IEtlZXA= 13655
+IGhpc3RvcmljYWw= 13656
+IGFueXdheQ== 13657
+IEphY2tzb24= 13658
+b2NrZXI= 13659
+RVJO 13660
+IFVJTlQ= 13661
+eW50YXg= 13662
+RVJZ 13663
+aXNtcw== 13664
+IGNu 13665
+IG9jY3Vycw== 13666
+IDs7 13667
+VGV4dFZpZXc= 13668
+QUU= 13669
+L2ltZw== 13670
+IHllc3RlcmRheQ== 13671
+LWRlZmF1bHQ= 13672
+IHRpbnk= 13673
+IHByb2M= 13674
+IGFsaXZl 13675
+IFJFRw== 13676
+LnRo 13677
+ZWFyaW5n 13678
+LmdldExvZ2dlcg== 13679
+PGxpbms= 13680
+X2xvZ2lu 13681
+Rm9sZGVy 13682
+YWJj 13683
+bHlwaGljb24= 13684
+0L3Qvg== 13685
+IG5vdGljZWQ= 13686
+b2RpZ28= 13687
+IGVkaXRpb24= 13688
+aW1hdG9y 13689
+LkVuYWJsZWQ= 13690
+LnBhcnNlSW50 13691
+IHlhcmRz 13692
+CQkJCQkJCQkJCQkJ 13693
+IHZlcmJvc2U= 13694
+0LvRjw== 13695
+X0JZ 13696
+LmxvZ2lu 13697
+Lio7Cg== 13698
+IE1pZA== 13699
+w6llcw== 13700
+IGdsbw== 13701
+IGJ1aWxkaW5ncw== 13702
+IHpl 13703
+IEl0ZXI= 13704
+IHR1YmU= 13705
+IFBvdA== 13706
+XE0= 13707
+PHRo 13708
+YnJpZGdl 13709
+IFNjcmlwdA== 13710
+IE1vZHVsZQ== 13711
+IHZhY2M= 13712
+IGluc3RhbGxhdGlvbg== 13713
+dnk= 13714
+VmlzdWFsU3R5bGVCYWNrQ29sb3I= 13715
+IFNN 13716
+LnRvdGFs 13717
+YmF0 13718
+IGZpbmRz 13719
+IGF0bW9z 13720
+U3Vidmlldw== 13721
+aXphcmQ= 13722
+IHJlcGxhY2VtZW50 13723
+bGljYXRlZA== 13724
+YXBpcw== 13725
+IGxvZ2dlZA== 13726
+IExlZnQ= 13727
+R3Vp 13728
+X1R5cGU= 13729
+dG0= 13730
+UGFk 13731
+IGhvdXNlaG9sZA== 13732
+IHJlbGU= 13733
+IHByb3Bvc2Fs 13734
+X0NMQVNT 13735
+Ojo6Og== 13736
+IGluZnJhc3RydWN0dXJl 13737
+SW5qZWN0 13738
+L2h0bWw= 13739
+IGFkcw== 13740
+aXp6YQ== 13741
+IG1n 13742
+Y3RyaW5l 13743
+JQo= 13744
+PGh0bWw= 13745
+LWltYWdl 13746
+IGF0dG9ybmV5 13747
+PG0= 13748
+KCcs 13749
+IGNhbm4= 13750
+IHByaW50bG4= 13751
+b29zZQ== 13752
+IHllbGxvdw== 13753
+LmV4cA== 13754
+cGF5bWVudA== 13755
+IHRhYmxlVmlldw== 13756
+YXdheQ== 13757
+IG9wcG9zaXRpb24= 13758
+IEFnYWlu 13759
+IEhhbmRsZQ== 13760
+IGV4Y2x1c2l2ZQ== 13761
+aW5hcg== 13762
+w6ly 13763
+0L7QsQ== 13764
+IENPREU= 13765
+ZW1wb3Jhcnk= 13766
+IHJlYWN0 13767
+cGlwZQ== 13768
+Y3o= 13769
+LmFjdGl2aXR5 13770
+IGxhcmdlbHk= 13771
+IGRpc3M= 13772
+YXh5 13773
+ZXNpcw== 13774
+IFJlbg== 13775
+IGNvcm4= 13776
+LlVzZVZpc3VhbFN0eWxlQmFja0NvbG9y 13777
+ZGF5cw== 13778
+IGZydWl0 13779
+SW5zZXJ0 13780
+X2VuYw== 13781
+RXN0 13782
+X2RlYw== 13783
+IEx1Yw== 13784
+IMO8YmVy 13785
+cGFyYW1ldGVycw== 13786
+UEVSVA== 13787
+ZXhwcmVzcw== 13788
+X3Byb2ZpbGU= 13789
+VW5rbm93bg== 13790
+IHJldm9sdXRpb24= 13791
+LmFkZHJlc3M= 13792
+X3JlcXVpcmU= 13793
+IHVuaWZvcm0= 13794
+IFBhY2s= 13795
+bGFy 13796
+IFVJVGFibGVWaWV3 13797
+IGRlcGVuZHM= 13798
+VmFsaWRhdGlvbg== 13799
+Y29uZmlybQ== 13800
+T3duZXI= 13801
+IHRyaWI= 13802
+aGV0 13803
+IElkZQ== 13804
+YW5zYXM= 13805
+TGFuZ3VhZ2U= 13806
+dWV0 13807
+IFBv 13808
+IFN0ZXZl 13809
+IGNvbnRlc3Q= 13810
+X0RFRkFVTFQ= 13811
+IGFwcGFyZW50bHk= 13812
+UkVFTg== 13813
+IGZyZXF1ZW50bHk= 13814
+IHRyYWRpdGlvbg== 13815
+b2NvbGF0ZQ== 13816
+U0k= 13817
+IEFyZ3VtZW50 13818
+Rm9jdXM= 13819
+ZXJ0ZQ== 13820
+IExheW91dA== 13821
+IGR4 13822
+IGdlbmVyYXRvcg== 13823
+IFdhaXQ= 13824
+UG9saWN5 13825
+bGlnaHRz 13826
+LkV4ZWN1dGU= 13827
+UHk= 13828
+IGJlZHJvb20= 13829
+ZWRh 13830
+cmFpZA== 13831
+CXNpemU= 13832
+IGFuY2llbnQ= 13833
+IHB1bXA= 13834
+IGR3 13835
+ICghKA== 13836
+IHNwZWNpZnk= 13837
+KHN0YXR1cw== 13838
+IEZCSQ== 13839
+LmV4Y2VwdGlvbg== 13840
+IHJlbWFyaw== 13841
+bHltcA== 13842
+YW50ZWU= 13843
+VXBsb2Fk 13844
+ZXJuZXQ= 13845
+6aE= 13846
+aW5lbnQ= 13847
+IFJlbmRlcg== 13848
+ZG0= 13849
+IE1lbW9yeQ== 13850
+cmljaA== 13851
+IFRvb2xz 13852
+IGtuZQ== 13853
+IHBlcm0= 13854
+YmFk 13855
+IGRpbm5lcg== 13856
+LnJlc2V0 13857
+IGpMYWJlbA== 13858
+RmVhdHVyZQ== 13859
+LlNlcnZpY2U= 13860
+ICh7Cg== 13861
+IHJlZmVycmVk 13862
+LmNsYXNzTGlzdA== 13863
+IGluaXRXaXRo 13864
+IFRleHRWaWV3 13865
+IG5laXRoZXI= 13866
+IGNvdW50eQ== 13867
+ICJ7 13868
+56c= 13869
+IHRhY2s= 13870
+Y2xhc3NOYW1l 13871
+IFVTRVI= 13872
+IHJlbmV3 13873
+YGA= 13874
+Z2V0TmFtZQ== 13875
+IGJyb3du 13876
+RXJyb3Jz 13877
+ZXJ0bw== 13878
+IHN1c3RhaW4= 13879
+U08= 13880
+bGV0ZXM= 13881
+IEludmFsaWQ= 13882
+IGVuZW1pZXM= 13883
+dW5nZQ== 13884
+IGV4aXN0ZW5jZQ== 13885
+ZXJyYQ== 13886
+CiAgCg== 13887
+dXRvcmlhbA== 13888
+I2E= 13889
+cGF5 13890
+Y2hhcmdl 13891
+IElyZQ== 13892
+YXRlc3Q= 13893
+IGV4cGxvcw== 13894
+IGZpcmVk 13895
+TkVS 13896
+IFR5 13897
+aWNpb24= 13898
+VXJp 13899
+IG9idmlvdXNseQ== 13900
+IENvbHVt 13901
+ICcr 13902
+IERldmljZQ== 13903
+LXJlbGF0ZWQ= 13904
+X0FSRw== 13905
+IHZvcg== 13906
+IExlc3Nlcg== 13907
+X09Q 13908
+U2VyaWFsaXplcg== 13909
+IHVwZ3JhZGU= 13910
+TGlnaHQ= 13911
+IGNvZGVz 13912
+Kys7DQo= 13913
+IHdyaXRlcw== 13914
+Zm9vZA== 13915
+IMOpdA== 13916
+QHNlY3Rpb24= 13917
+IHRyYWNrcw== 13918
+IHNlcmlvdXNseQ== 13919
+Y2h0 13920
+KHNpemVvZg== 13921
+IGltbWVkaWF0ZQ== 13922
+IHNjaWVudGlzdHM= 13923
+IHsk 13924
+X25l 13925
+LkFuY2hvclN0eWxlcw== 13926
+IGFjY29tbW9k 13927
+IEhhcnJ5 13928
+IHNpZ2h0 13929
+IFBhbGVzdA== 13930
+ZXJzaXN0ZW50 13931
+INGD 13932
+LWlucHV0 13933
+IGNvb3JkaW5hdGVz 13934
+wrc= 13935
+V2VsY29tZQ== 13936
+LmNvbmY= 13937
+IGdyZXc= 13938
+IGJvbGQ= 13939
+IENQVQ== 13940
+KG15 13941
+IHBlcmZlY3RseQ== 13942
+IG1vbWVudHM= 13943
+IE1vdmll 13944
+LWRhdGE= 13945
+eXN0YWw= 13946
+X1dJRFRI 13947
+IFNjcmVlbg== 13948
+5p0= 13949
+IGRpc2Fw 13950
+IHJlZHVjdGlvbg== 13951
+LkdldENvbXBvbmVudA== 13952
+X01PRFVMRQ== 13953
+IGdlbmVyaWM= 13954
+IGR5 13955
+YWxsZXI= 13956
+IGN1cmw= 13957
+IEJvZHk= 13958
+IGJhbmtz 13959
+LHQ= 13960
+YXZn 13961
+IGV2aWw= 13962
+IG1hbnVmYWN0dXJlcg== 13963
+IHJlY2VpdmVy 13964
+Q29sdW1ucw== 13965
+IGluZ3JlZGllbnRz 13966
+CW91dA== 13967
+cXVlcw== 13968
+LkxvYWQ= 13969
+IHNsb3dseQ== 13970
+IFRvd24= 13971
+IENlbGw= 13972
+X25vcm1hbA== 13973
+X3ByZWZpeA== 13974
+IEFsZXJ0 13975
+KCJ7 13976
+w6Ry 13977
+4oCcVGhl 13978
+IE1E 13979
+IGNvdXJzZXM= 13980
+YXRoYW4= 13981
+6Zk= 13982
+b2Nj 13983
+IFNFUg== 13984
+ZXNpZ24= 13985
+QWRkcg== 13986
+PVsn 13987
+KCIuLw== 13988
+XX0= 13989
+LmZvbnQ= 13990
+IEluc3RhZ3JhbQ== 13991
+IEJvcmRlcg== 13992
+b2Rh 13993
+IGhhbGw= 13994
+IHJ1bQ== 13995
+X2JpdA== 13996
+IHNhdmluZw== 13997
+X2Rvd24= 13998
+UmFuZG9t 13999
+X3JlZ2lzdGVy 14000
+KENvbnRleHQ= 14001
+IG9wcG9zaXRl 14002
+Um9vbQ== 14003
+WUVT 14004
+0LDQvdC4 14005
+IGVuam95ZWQ= 14006
+X3J1bg== 14007
+Q2xlYXI= 14008
+4oCY 14009
+IEZvcmQ= 14010
+b25pYw== 14011
+b3N0ZW4= 14012
+Il0p 14013
+X2F1dGg= 14014
+Ly8NCg== 14015
+IHN1ZmZpY2llbnQ= 14016
+TEVT 14017
+IHBoZW4= 14018
+IG9o 14019
+X2Nzdg== 14020
+IHJvdXRpbmU= 14021
+LkFyZUVxdWFs 14022
+YXlsb3I= 14023
+IGJhc2tldA== 14024
+X0NPTU0= 14025
+cnlwdGVk 14026
+U2lt 14027
+IFNob3A= 14028
+IHN0dWRpbw== 14029
+YXRvcw== 14030
+KFc= 14031
+W3N0cmluZw== 14032
+w6R0 14033
+b2dh 14034
+IHNocg== 14035
+IHNpY2s= 14036
+QW5vdGhlcg== 14037
+IGRvb3Jz 14038
+X05F 14039
+IFRIUkVF 14040
+Lm9yZGVy 14041
+cmF6aWw= 14042
+IG1hcHM= 14043
+X1RSVUU= 14044
+dHJhbnNsYXRl 14045
+IG5lYXJieQ== 14046
+IG5hY2g= 14047
+TE9BVA== 14048
+YmF0Y2g= 14049
+IGx1eA== 14050
+YXNoZXM= 14051
+YW5nZXJz 14052
+4oCm4oCm 14053
+X0VWRU5U 14054
+X1VQ 14055
+IGFjdHM= 14056
+aW52 14057
+X01FVEhPRA== 14058
+Y2Npb24= 14059
+IHJldGFpbg== 14060
+dXRjaA== 14061
+INCx 14062
+IGtub3dpbmc= 14063
+IHJlcHJlc2VudGluZw== 14064
+Tk9U 14065
+cG5n 14066
+Q29udHJhY3Q= 14067
+IHRyaWNr 14068
+IEVkaXRpb24= 14069
+dXBsaWNhdGU= 14070
+IGNvbnRyb2xsZWQ= 14071
+Y2Zn 14072
+amF2YXNjcmlwdA== 14073
+IG1pbGs= 14074
+V2hpdGU= 14075
+U2VxdWVuY2U= 14076
+YXdh 14077
+IGRpc2N1c3NlZA== 14078
+IEJ1c2g= 14079
+IFlFUw== 14080
+LmZhY3Rvcnk= 14081
+dGFncw== 14082
+IHRhY3Q= 14083
+IHNpZA== 14084
+JCQ= 14085
+IEVudW0= 14086
+IGZyYW1lcw== 14087
+fSk7 14088
+IHJlZ3Vs 14089
+J107DQo= 14090
+UmVnaW9u 14091
+ZmZm 14092
+IGNybw== 14093
+KGNvbQ== 14094
+PSIr 14095
+U3R1ZGVudA== 14096
+IGRpc2FwcG9pbnQ= 14097
+UkVTVUxU 14098
+Q291bnRlcg== 14099
+IGJ1dHRlcg== 14100
+IEhh 14101
+IERpZ2l0YWw= 14102
+IGJpZA== 14103
+Ij57ew== 14104
+aW5nZXJz 14105
+IENvdW50cnk= 14106
+X3RwbA== 14107
+Il0pCg== 14108
+L2s= 14109
+ZGF0aW5n 14110
+OiM= 14111
+IERBVEE= 14112
+eW5jaHJvbg== 14113
+X2JvZHk= 14114
+b2xseXdvb2Q= 14115
+IHZhbG9y 14116
+aXBpZW50 14117
+b2Z0 14118
+VUJM 14119
+ZG9jcw== 14120
+IHN5bmNocm9u 14121
+IGZvcm1lZA== 14122
+cnVwdGlvbg== 14123
+IGxpc3Rh 14124
+UmVxdWVzdE1hcHBpbmc= 14125
+IHZpbGxhZ2U= 14126
+IGtub2Nr 14127
+b2Nz 14128
+Ins= 14129
+X2ZsYWdz 14130
+IHRyYW5zYWN0aW9ucw== 14131
+IGhhYml0 14132
+IEpl 14133
+ZWRlbg== 14134
+IGFpcmNyYWZ0 14135
+aXJr 14136
+IEFC 14137
+IGZhaXJseQ== 14138
+LmludGVy 14139
+LkFjdA== 14140
+IGluc3RydW1lbnQ= 14141
+cmVtb3ZlQ2xhc3M= 14142
+LmNvbW1hbmQ= 14143
+0Yk= 14144
+CW1lbQ== 14145
+KG1pbg== 14146
+IG90 14147
+IGNvbGxl 14148
+PXM= 14149
+dGltZW91dA== 14150
+IGlkcw== 14151
+IE1hdGNo 14152
+aWpu 14153
+emVybw== 14154
+IG5ldHdvcmtz 14155
+Lmdvdg== 14156
+IGludGVs 14157
+IHNlY3Rpb25z 14158
+b3V0aW5l 14159
+KGNtZA== 14160
+KGRpcg== 14161
+IExJQUJJTElUWQ== 14162
+IEJsb2c= 14163
+IGJyaWRnZQ== 14164
+IENW 14165
+Y29udmVydA== 14166
+ICIpCg== 14167
+IEJlcm4= 14168
+X1BP 14169
+ZXZhbA== 14170
+KHNldA== 14171
+dG9vbA== 14172
+IHBheW1lbnRz 14173
+QmVoYXZpb3Vy 14174
+IGNvbmNyZXRl 14175
+IGVsaWc= 14176
+IGFjY2VsZXI= 14177
+IGhvbGU= 14178
+X28= 14179
+VEVHRVI= 14180
+IGdyYXBoaWNz 14181
+T3du 14182
+Rm9ybWF0dGVy 14183
+b25kZXI= 14184
+IHBhY2thZ2Vz 14185
+L2E= 14186
+IEtub3c= 14187
+T3JEZWZhdWx0 14188
+IGR1dHk= 14189
+V2FpdA== 14190
+0L3QsA== 14191
+X3JlY29yZA== 14192
+W3Q= 14193
+TWVzaA== 14194
+IG9uZ29pbmc= 14195
+LmJlYW5z 14196
+IHRhbg== 14197
+IGludGVycHJldA== 14198
+YXN0ZXJz 14199
+UVVBTA== 14200
+IGxlZ3M= 14201
+XFJlcXVlc3Q= 14202
+LWZpbGU= 14203
+X211dGV4 14204
+IFNhaW50 14205
+Ly8j 14206
+IHByb2hpYg== 14207
+KGluZm8= 14208
+Oj0= 14209
+bGludXg= 14210
+IGJsbw== 14211
+b3RpYw== 14212
+CWZpbmFs 14213
+X2V4cA== 14214
+IFN0b3A= 14215
+YXBpbmc= 14216
+KHNhdmVk 14217
+X3B1c2g= 14218
+IGVhc2U= 14219
+X0ZS 14220
+cG9uc2l2ZQ== 14221
+c3RyY21w 14222
+OgoKCgo= 14223
+5Lu2 14224
+b2xp 14225
+IGV4dHJlbWU= 14226
+IHByb2Zlc3Nvcg== 14227
+SW1hZ2Vz 14228
+LklPRXhjZXB0aW9u 14229
+IGFkZHJlc3Nlcw== 14230
+cGxlbWVudGVk 14231
+IGluY29ycG9y 14232
+IHVzZUVmZmVjdA== 14233
+X09G 14234
+IERh 14235
+bm9tYnJl 14236
+SVJTVA== 14237
+IGRpc2NyaW0= 14238
+IGNvbXBlbnM= 14239
+Z3JlZ2F0ZQ== 14240
+YW5jZWxs 14241
+YWNoZXM= 14242
+IENyaXRlcmlh 14243
+JHJlc3VsdA== 14244
+RGVzdHJveQ== 14245
+IHNlY29uZGFyeQ== 14246
+V2F0Y2g= 14247
+IFNlbQ== 14248
+IE1jQw== 14249
+IGFjYWRlbWlj 14250
+VXBwZXI= 14251
+Ojp+ 14252
+dXRyYWw= 14253
+IERvZw== 14254
+YWRlZA== 14255
+VmFsaWRhdG9y 14256
+IGRlcml2ZWQ= 14257
+IHNldFRpbWVvdXQ= 14258
+IEtlbg== 14259
+IHR5cGljYWw= 14260
+IEJvYg== 14261
+IGJvdW5kcw== 14262
+IFNlYXNvbg== 14263
+IGNyYXp5 14264
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 14265
+LXJvdXRlcg== 14266
+aXR0ZXN0 14267
+IE1pcg== 14268
+IGVtb3Rpb25hbA== 14269
+LHY= 14270
+Y24= 14271
+L3N0 14272
+5b0= 14273
+b25vbQ== 14274
+IGRlY2xhcmVk 14275
+Pi4= 14276
+YWlsaW5n 14277
+IC8qPDw8 14278
+IG5vcm1hbGx5 14279
+KE1l 14280
+ZXZpbg== 14281
+bGlrZWx5 14282
+IHBvaW50ZWQ= 14283
+IFN0YWNr 14284
+IHdhbGxz 14285
+LlZlY3Rvcg== 14286
+bWVhbg== 14287
+XV0K 14288
+IGxpc3RlbmluZw== 14289
+YWR2 14290
+IHN3YXA= 14291
+SUZU 14292
+2Ko= 14293
+LmFyZ3Y= 14294
+dWxz 14295
+PG9wdGlvbg== 14296
+bm90YXRpb25z 14297
+IGVtYWlscw== 14298
+IFVrcg== 14299
+YXN0YQ== 14300
+IFRodXM= 14301
+IFN0b25l 14302
+IGFwcGVhbA== 14303
+LuKAmQ== 14304
+IHJlZ3VsYXRpb25z 14305
+UHJlZmVyZW5jZXM= 14306
+IFBob25l 14307
+dWxm 14308
+IERS 14309
+IHRlY2hub2xvZ2llcw== 14310
+IHBhcmFncmFwaA== 14311
+IG5lY2Vzc2FyaWx5 14312
+LmVhY2g= 14313
+PGZsb2F0 14314
+cmVzYQ== 14315
+IHVuZGVyc3Q= 14316
+IGZpbmdlcg== 14317
+cHJlc3NlZA== 14318
+LWJ5 14319
+aWZmZXI= 14320
+d2F0Y2g= 14321
+IEJh 14322
+QUlN 14323
+IHdlaWdodHM= 14324
+IFJvbg== 14325
+Jyl9fQ== 14326
+W3NlbGY= 14327
+LS0tLS0tLS0tLQo= 14328
+cGVyaW1lbnQ= 14329
+IHRvU3RyaW5n 14330
+eGlj 14331
+IENhbWVyYQ== 14332
+IQoKCgo= 14333
+YXVyYW50 14334
+UHJlZml4 14335
+IGluc3RpdHV0aW9ucw== 14336
+OmludA== 14337
+IGV4cG9zdXJl 14338
+cGF0dGVybg== 14339
+IExpbnV4 14340
+Lm51bWJlcg== 14341
+cmVkaWVudA== 14342
+QXJndW1lbnRFeGNlcHRpb24= 14343
+IENoaWVm 14344
+In0s 14345
+IGVsZWN0cm9uaWM= 14346
+cm9uZw== 14347
+ZXJk 14348
+c3BOZXQ= 14349
+cmFpdA== 14350
+Lycs 14351
+IE9oaW8= 14352
+Q29udHJvbGxlcnM= 14353
+IGNvbnRpbnVpbmc= 14354
+IFRlbXBsYXRl 14355
+IEV0aA== 14356
+c3o= 14357
+L2Vudg== 14358
+RW52 14359
+JS4= 14360
+YXJ0ZXJz 14361
+KSgo 14362
+IFRBQkxF 14363
+IMOu 14364
+cGVyYXR1cmU= 14365
+cHJvZ3Jlc3M= 14366
+UHJlcw== 14367
+6rA= 14368
+aW1wbGVtZW50YXRpb24= 14369
+IGJpZW4= 14370
+IHN0cmVldHM= 14371
+X01TRw== 14372
+TmV3cw== 14373
+IyMj 14374
+Oi8= 14375
+IGN1dHRpbmc= 14376
+eEI= 14377
+cmVzc2Vk 14378
+X0VOQUJMRQ== 14379
+bGFi 14380
+IGNhdXNpbmc= 14381
+XSkpOwo= 14382
+YnJh 14383
+eEZGRkY= 14384
+aWxseQ== 14385
+cGxldGlvbg== 14386
+d2lsbA== 14387
+X2Jhcg== 14388
+IHN0cnVjdHVyZXM= 14389
+IEltcA== 14390
+24w= 14391
+IDw+ 14392
+IC0tLS0tLS0tLS0tLS0tLS0= 14393
+X0JVRkZFUg== 14394
+LmRpcg== 14395
+IHBsYWlu 14396
+IHBlZXI= 14397
+Z2c= 14398
+b2ludHM= 14399
+IHNvbWV3aGF0 14400
+IHdldA== 14401
+IGVtcGxveW1lbnQ= 14402
+IHRpY2tldHM= 14403
+aXJtcw== 14404
+IHR1cGxl 14405
+c2lz 14406
+JHNxbA== 14407
+cmln 14408
+IGNvbnZlcnNpb24= 14409
+IGdlcw== 14410
+IGNvbmZpZ3VyZQ== 14411
+ZWdy 14412
+IENh 14413
+IF9fKCc= 14414
+b3VzdG9u 14415
+LnRva2Vu 14416
+QmxhY2s= 14417
+IG1hZ2F6aW5l 14418
+QVc= 14419
+LklO 14420
+b3Npbmc= 14421
+IGJyb2tl 14422
+IENydQ== 14423
+REVMRVRF 14424
+IGRlc3Ryb3llZA== 14425
+KE1hdGg= 14426
+IGFwcHJvdmFs 14427
+LWRvbQ== 14428
+IElJSQ== 14429
+dGFibGVWaWV3 14430
+IGRlc2lnbnM= 14431
+IGNydXNoaW5n 14432
+IGNvbnNlbnQ= 14433
+ZGlybmFtZQ== 14434
+b21w 14435
+IGNyeXB0 14436
+Pyg= 14437
+b3JvdWdo 14438
+Lm8= 14439
+CWxpc3Q= 14440
+YW1zdW5n 14441
+LiIiIgo= 14442
+ZXJyaW5n 14443
+R29vZ2xl 14444
+X3BhaXI= 14445
+X0lOSVQ= 14446
+cmVtYXJrcw== 14447
+IGdlYXI= 14448
+RmlsbA== 14449
+bGlmZQ== 14450
+fSIpCg== 14451
+IHN1aXRhYmxl 14452
+IHN1cnByaXNlZA== 14453
+X1JFUVVFU1Q= 14454
+IG1hbmlmZXN0 14455
+YXR0ZW4= 14456
+IGZydXN0cg== 14457
+b3ZlbWVudA== 14458
+LmNsaWNr 14459
+IGlp 14460
+IGV4cGFuc2lvbg== 14461
+aWdz 14462
+UGFyc2U= 14463
+LlJlZ3VsYXI= 14464
+Um9i 14465
+X2xheW91dA== 14466
+7KA= 14467
+IHRyYW5zbGF0aW9u 14468
+IEJlYXV0 14469
+QmVzdA== 14470
+X0NPTE9S 14471
+PGxhYmVs 14472
+IGxpcXVpZA== 14473
+SVRT 14474
+IHByb2Q= 14475
+IG9wZXJhdGU= 14476
+VUlLaXQ= 14477
+IG5hdHVy 14478
+YXJndW1lbnQ= 14479
+X2RldGFpbA== 14480
+IENlbnRyZQ== 14481
+ICItLQ== 14482
+IH19Ig== 14483
+bG9jYWxl 14484
+LnR2 14485
+X3NlcQ== 14486
+IHVwY29taW5n 14487
+Q2hhcnQ= 14488
+IERpdmlzaW9u 14489
+IGNsaW5pY2Fs 14490
+Q29tcGFueQ== 14491
+U2VwYXI= 14492
+bGFz 14493
+IEh1bg== 14494
+OnM= 14495
+IGhlYWRpbmc= 14496
+0L7Qsw== 14497
+ICIiKTsK 14498
+W2lk 14499
+Ymlh 14500
+IHN0cmV0Y2g= 14501
+aWNpZGU= 14502
+IHJlcHJvZHU= 14503
+LnByb2plY3Q= 14504
+bGVnZW5k 14505
+ZW5kZXJz 14506
+IHJlc3BvbnNlcw== 14507
+IG9udA== 14508
+cml0aWNhbA== 14509
+IHJlZnVnZQ== 14510
+IExp 14511
+IDoKCg== 14512
+IFRocmVl 14513
+LmNvbnRyb2xsZXI= 14514
+X0lOREVY 14515
+X0ZPUg== 14516
+XE1vZGVscw== 14517
+amF4 14518
+CWV4aXQ= 14519
+IOKW 14520
+IGNvdmVycw== 14521
+CXk= 14522
+LS4= 14523
+SU5ET1c= 14524
+IGZhaWxz 14525
+aW5jbHVkZXM= 14526
+IGZhdWx0 14527
+IGx5 14528
+w7Fv 14529
+LnNsaWNl 14530
+SUxFRA== 14531
+IFB1cg== 14532
+IEFzaWFu 14533
+X2JhdGNo 14534
+Lk1heA== 14535
+dmw= 14536
+IENPUFlSSUdIVA== 14537
+IGdpYW50 14538
+IE1hbnVhbA== 14539
+IENvcHk= 14540
+Q2xhc3NOYW1l 14541
+SGVhbHRo 14542
+Q3Vyc29y 14543
+SUJPdXRsZXQ= 14544
+IHR3ZQ== 14545
+5rM= 14546
+X2xhYmVscw== 14547
+IGNvbGxlY3RlZA== 14548
+IGZ1cm5pdHVyZQ== 14549
+IGRlYWxpbmc= 14550
+Q29udHJvbHM= 14551
+IEhvdGVs 14552
+Y2tz 14553
+IGNob3Nl 14554
+4pSA 14555
+b2Rk 14556
+U1I= 14557
+2Yo= 14558
+7IQ= 14559
+IGFjY29yZA== 14560
+IE1vdmU= 14561
+IE1vZGU= 14562
+IE1vY2s= 14563
+IHRocmVhZHM= 14564
+KysrKw== 14565
+IE9wdGlvbnM= 14566
+UmVmcmVzaA== 14567
+IERpZA== 14568
+J10tPg== 14569
+dWNj 14570
+X2NoYW5uZWw= 14571
+LmFicw== 14572
+IHt9LAo= 14573
+IFdhbA== 14574
+ZXJpb3I= 14575
+IG1haW5seQ== 14576
+IERyaXZlcg== 14577
+Tm90Rm91bmRFeGNlcHRpb24= 14578
+IGNvdW50cw== 14579
+ZWFt 14580
+ICY9 14581
+UXVlc3Rpb24= 14582
+IEFsaQ== 14583
+IGFueW1vcmU= 14584
+ZGV0YWls 14585
+dGFpbA== 14586
+IG1pbGU= 14587
+IEZhaXI= 14588
+IHNvcnJ5 14589
+IHN1cnJvdW5kaW5n 14590
+IGFkbQ== 14591
+RGV2 14592
+IG1hcmlqdWFuYQ== 14593
+IFNvdW5k 14594
+IEFzaA== 14595
+RkQ= 14596
+VGVhbQ== 14597
+LnBvcnQ= 14598
+IFtdCgo= 14599
+dWJibGU= 14600
+IGFzYw== 14601
+IGludGVudGlvbg== 14602
+QWNj 14603
+Y2hp 14604
+dXN0ZXJz 14605
+IGluc3BpcmVk 14606
+c2Vn 14607
+Q0xV 14608
+IG1hbmlw 14609
+TWV0YWRhdGE= 14610
+Q29ubmVjdA== 14611
+IEJlaA== 14612
+IGZpbmRpbmdz 14613
+IGFzc2VtYmx5 14614
+d29ybGQ= 14615
+IHJlbWFpbmVk 14616
+IHVpZA== 14617
+KC4= 14618
+IG14 14619
+TG9vcA== 14620
+CgoKCgo= 14621
+IGZhbnRhc3RpYw== 14622
+d2hv 14623
+YWtp 14624
+IEJhc2lj 14625
+IFlldA== 14626
+IFVzZXJz 14627
+aWtpcA== 14628
+IGhlYWRz 14629
+IE1pY2hpZ2Fu 14630
+X2l0 14631
+IFRvcm9udG8= 14632
+IHJlY29yZGluZw== 14633
+IHN1Ym1pdHRlZA== 14634
+X3ZhcmlhYmxl 14635
+bWVkaWF0ZQ== 14636
+LmdyYXBoaWNz 14637
+IHN0b29k 14638
+IHJlYXI= 14639
+dmVsb2NpdHk= 14640
+X01FU1NBR0U= 14641
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 14642
+cm9sZXM= 14643
+IFRvdXI= 14644
+X3llYXI= 14645
+ZW5kbWVudA== 14646
+YW1wcw== 14647
+IElyZWxhbmQ= 14648
+bWFs 14649
+IHlvdW5nZXI= 14650
+IHN0cnVnZ2xl 14651
+IGNhYmxl 14652
+IFNETA== 14653
+KCct 14654
+YW5lcw== 14655
+IE5lZWQ= 14656
+LlJvdw== 14657
+UG9s 14658
+IFBI 14659
+X3NjcmlwdA== 14660
+YWdlbQ== 14661
+IEJhcw== 14662
+X3NwYWNl 14663
+LmxvYw== 14664
+Omk= 14665
+YWRy 14666
+IGVuZ2luZWVyaW5n 14667
+aXRlbg== 14668
+KSY= 14669
+IHVr 14670
+IExpdHRsZQ== 14671
+X0NPVU5U 14672
+eEE= 14673
+QXJyYXlMaXN0 14674
+5o0= 14675
+ICIiKQo= 14676
+QW5jaG9y 14677
+IGhhbmc= 14678
+dHdpdHRlcg== 14679
+IGNvbXBldGl0aXZl 14680
+LnNyYw== 14681
+44GX 14682
+IHRyYW5zbGF0ZQ== 14683
+IENyZWF0ZXM= 14684
+b29rcw== 14685
+IFJvbGw= 14686
+JycnCg== 14687
+L3No 14688
+c29tZQ== 14689
+RW5jb2Rpbmc= 14690
+LnJlc29sdmU= 14691
+IGRlc2lnbmVy 14692
+IFN0b3JhZ2U= 14693
+IHph 14694
+IE5ldmVy 14695
+IHNvbWV3aGVyZQ== 14696
+IGJveGVz 14697
+LnNvdXJjZQ== 14698
+IHB5Z2FtZQ== 14699
+IGdyb3du 14700
+LnR3 14701
+KCkpLAo= 14702
+JyxbJw== 14703
+IG9wcG9uZW50 14704
+KHNyYw== 14705
+LmxheWVy 14706
+QVBQ 14707
+IEFjdGl2 14708
+IGd1ZXN0cw== 14709
+IFZBTFVFUw== 14710
+fTsKCgo= 14711
+Lm5hdGl2ZQ== 14712
+IGFtb3VudHM= 14713
+LlJF 14714
+IGNsb25l 14715
+IHdlcmVu 14716
+ICI8PA== 14717
+X2Fj 14718
+IGJyZWFraW5n 14719
+IHJlbGlhYmxl 14720
+LlBPU1Q= 14721
+IFNreQ== 14722
+ICcm 14723
+IHNhdmVkSW5zdGFuY2VTdGF0ZQ== 14724
+YXN0aW5n 14725
+aWxsaW9u 14726
+Y29tbWVudHM= 14727
+dWx0eQ== 14728
+Lm1lbnU= 14729
+L2NvbmZpZw== 14730
+IAoKCg== 14731
+VE9ETw== 14732
+IHB1cmNoYXNlZA== 14733
+X2Nvcg== 14734
+CWF1dG8= 14735
+Q29tcGF0QWN0aXZpdHk= 14736
+Y29tcGxldGU= 14737
+X2dyYXBo 14738
+aXNvZGVz 14739
+IHNpdHVhdGlvbnM= 14740
+IEhvcg== 14741
+UmVjZWl2ZQ== 14742
+4oCcV2U= 14743
+IGVudGl0aWVz 14744
+LmFzc2VydEVxdWFscw== 14745
+0L7Qug== 14746
+IFNhbnM= 14747
+dmluY2U= 14748
+cm9tcHQ= 14749
+PQo= 14750
+IC8u 14751
+LlNlbGVjdA== 14752
+eWx2 14753
+IGJhdHQ= 14754
+QXVkaW8= 14755
+IGluY3JlYXNpbmdseQ== 14756
+LkJ1bmRsZQ== 14757
+IGV4cGxhaW5z 14758
+dGhlYXN0 14759
+Lm9mZnNldA== 14760
+IGhhbA== 14761
+IHRlY2huaXF1ZQ== 14762
+X2xpbWl0 14763
+IGRyYXdu 14764
+QVlFUg== 14765
+IGZlYXR1cmVk 14766
+eXl5eQ== 14767
+YXRpbg== 14768
+cGhlbg== 14769
+YWNoZWw= 14770
+IVw= 14771
+bG93ZXI= 14772
+IEdS 14773
+IHBhZw== 14774
+IFBhcnNl 14775
+IHRvdQ== 14776
+5LiA 14777
+RGlzdGFuY2U= 14778
+SW5kZXhQYXRo 14779
+IGhlbGw= 14780
+c2lt 14781
+VVRUT04= 14782
+VXNhZ2U= 14783
+ZWxlbml1bQ== 14784
+IEZhbGw= 14785
+ICIuJA== 14786
+IE11 14787
+IGNydWM= 14788
+IHNvbnQ= 14789
+UkVGSVg= 14790
+IGludGVyaW9y 14791
+IE9seW1w 14792
+LkF1dG9TY2FsZQ== 14793
+cGFyYQ== 14794
+QXhpc0FsaWdubWVudA== 14795
+IHJpdmVy 14796
+RHRv 14797
+IHdpdGhkcmF3 14798
+UmVhY3Q= 14799
+LWNsYXNz 14800
+YmVmb3Jl 14801
+X2FsbG9j 14802
+Q29udGVudHM= 14803
+IFdhcw== 14804
+SUNU 14805
+IGZvcm11bGE= 14806
+IGluZGljYXRlcw== 14807
+ICAgIAoK 14808
+X3N0b3Jl 14809
+aXR0aW5n 14810
+IEl0YWxpYW4= 14811
+X1NldA== 14812
+X3JlcG9ydA== 14813
+IHBpZA== 14814
+X1ZFUg== 14815
+IHdpbnM= 14816
+IENsb3Vk 14817
+Iil7Cg== 14818
+Y2hlc3Rlcg== 14819
+IGRlbmllZA== 14820
+IHdpcmQ= 14821
+IFN0ZXA= 14822
+IGludmVzdG9ycw== 14823
+Ym9sZA== 14824
+X2Rpc3BsYXk= 14825
+b3V2ZXI= 14826
+b3Jlcg== 14827
+UmVzZXQ= 14828
+IHN1cmdlcnk= 14829
+IHN0cmF0ZWdpZXM= 14830
+L21hdGVyaWFs 14831
+X3VuaXQ= 14832
+IGNvdW5jaWw= 14833
+LlBlcg== 14834
+IOKAng== 14835
+IHJlZm9ybQ== 14836
+RnJhbWV3b3Jr 14837
+IGxpc3Rpbmc= 14838
+X2J0bg== 14839
+IGJpcw== 14840
+JWQ= 14841
+ZWdhcw== 14842
+IHN1ZGRlbmx5 14843
+X1NFUg== 14844
+IGFv 14845
+X2RpcmVjdG9yeQ== 14846
+ZmFz 14847
+IHByZW1pdW0= 14848
+IHRyYWNraW5n 14849
+IEJM 14850
+IG1hdHVyZQ== 14851
+IGJhdGhyb29t 14852
+ICcvJw== 14853
+IMSR 14854
+UGVyZm9ybWVk 14855
+IHNvbGRpZXJz 14856
+YXJuaW5ncw== 14857
+IHdhbGtlZA== 14858
+LWNvbg== 14859
+Ym90dG9t 14860
+IHN1cnByaXNpbmc= 14861
+IGdlbmU= 14862
+VXN1YXJpbw== 14863
+LkRFRkFVTFQ= 14864
+IE1JVA== 14865
+Q09ERQ== 14866
+IEVneXB0 14867
+cGlja2Vy 14868
+eXNxbA== 14869
+QVRVUkU= 14870
+ZGV0YWlscw== 14871
+IENvbmZlcmVuY2U= 14872
+SW5mb3JtYXRpb24= 14873
+IE1haWw= 14874
+LWRvd24= 14875
+cmFyaWVz 14876
+YnJv 14877
+IHN1YmplY3Rz 14878
+ICcq 14879
+6K+3 14880
+b3JpZW50 14881
+OkA= 14882
+dmVyYm9zZQ== 14883
+RUY= 14884
+IHRvbGVy 14885
+ZW5nZXJz 14886
+IGVuZHBvaW50 14887
+IHN0cmFuZ2U= 14888
+IGNvbG9u 14889
+IHByZWZlcnJlZA== 14890
+ZGVw 14891
+IEVW 14892
+QVJSQVk= 14893
+IHdoZQ== 14894
+IHB1cA== 14895
+X25vZGVz 14896
+IHRhbGtlZA== 14897
+IGluc3RpdHV0aW9u 14898
+ZGJj 14899
+IGV4cG9zZWQ= 14900
+dGVlbg== 14901
+IEZyb250 14902
+VFQ= 14903
+X05PTkU= 14904
+XC9cLw== 14905
+cHJvZ3JhbQ== 14906
+IGVuY291cmFnZQ== 14907
+LmA= 14908
+c2hpcmU= 14909
+IElzbGFt 14910
+ZWVu 14911
+Tkk= 14912
+JyI= 14913
+LldpZHRo 14914
+IGxpa2Vk 14915
+IHsuLi4= 14916
+IFN5c3RlbXM= 14917
+IHZvdHJl 14918
+IG1hbnVmYWN0dXJpbmc= 14919
+Q29udmVydGVy 14920
+IEluZg== 14921
+7Jo= 14922
+RFRP 14923
+IGluY2hlcw== 14924
+IOCk 14925
+w7k= 14926
+IENoYXJsZXM= 14927
+QlU= 14928
+IikpOwoK 14929
+IExhYm9y 14930
+dW5u 14931
+IGVzdGlt 14932
+bW9iaWxl 14933
+IExlYXJu 14934
+X0NBTEw= 14935
+4oQ= 14936
+IGluZGljZXM= 14937
+IHR1Yg== 14938
+aWtpcGVkaWE= 14939
+Q29zdA== 14940
+cm93YWJsZQ== 14941
+66E= 14942
+Z2FnZQ== 14943
+IGZ1bmN0aW9uYWxpdHk= 14944
+dXp6bGU= 14945
+ZW1vcw== 14946
+LmxpYg== 14947
+IGRhc3M= 14948
+0LXQug== 14949
+ZW5uYQ== 14950
+IHNob3Rz 14951
+IHJlc3RvcmU= 14952
+L0Q= 14953
+Rm9yS2V5 14954
+XSxb 14955
+YWxpYXM= 14956
+bGludA== 14957
+LnN0cmVhbQ== 14958
+5qA= 14959
+X0ZPUk1BVA== 14960
+IHNpbHZlcg== 14961
+LnJlcG9zaXRvcnk= 14962
+IGxlZ2lzbA== 14963
+LkJvcmRlcg== 14964
+X2ZlYXR1cmVz 14965
+UGVybWlzc2lvbg== 14966
+IGhvdXNlcw== 14967
+IFdhcnM= 14968
+X0NPTVA= 14969
+IGluanVyaWVz 14970
+IGNvbnN0YW50bHk= 14971
+Zmx1dHRlcg== 14972
+RU5V 14973
+IENvbmY= 14974
+IHJlY29nbml6ZWQ= 14975
+IHByYWN0aWNhbA== 14976
+IGRlY2VudA== 14977
+Qko= 14978
+XSk7 14979
+YXN0eQ== 14980
+IEFjdGl2aXR5 14981
+LW1vZGU= 14982
+IHNsaWRl 14983
+LklzTnVsbE9yRW1wdHk= 14984
+IFlPVQ== 14985
+UG93ZXI= 14986
+aW5kaWNlcw== 14987
+IHF1YWxpZmllZA== 14988
+IHRocm93bg== 14989
+aGVsbG8= 14990
+IE5pY2s= 14991
+bGFo 14992
+YXNzZW1ibHk= 14993
+IFNtYWxs 14994
+b2xkaW5n 14995
+U2hvdWxk 14996
+IFNpbHZlcg== 14997
+KHNhdmVkSW5zdGFuY2VTdGF0ZQ== 14998
+IHRvZ2dsZQ== 14999
+Lk5vdA== 15000
+Q3RybA== 15001
+Om5pbA== 15002
+IENvbnRpbnVl 15003
+IEJvb3Q= 15004
+5ok= 15005
+IE11cg== 15006
+ZG9u 15007
+IEZB 15008
+U25hcHNob3Q= 15009
+IGFzc29jaWF0aW9u 15010
+Zm94 15011
+LGE= 15012
+YXppb25l 15013
+XSkNCg== 15014
+Q1RZUEU= 15015
+IGZhZGU= 15016
+IERhcg== 15017
+Lm5hdmlnYXRpb24= 15018
+IGx1Y2s= 15019
+U0NSSQ== 15020
+IERlYWQ= 15021
+IHRlcm1pbmFs 15022
+X0xFTkdUSA== 15023
+IGVmZmljaWVuY3k= 15024
+IHVudw== 15025
+IG5hcnJvdw== 15026
+aW1lbnRv 15027
+KENvbG9y 15028
+IFNlYQ== 15029
+X2FyZWE= 15030
+LEE= 15031
+X29wdA== 15032
+IEhpbGxhcnk= 15033
+LnRhc2s= 15034
+IEphYw== 15035
+YXN0ZWQ= 15036
+IEFkYW0= 15037
+IElsbGVnYWw= 15038
+IHNlYXJjaGluZw== 15039
+SW5zdGFuY2VPZg== 15040
+SmF2YQ== 15041
+IEZvcm1hdA== 15042
+IHJlYWxpemVk 15043
+IENoaWxkcmVu 15044
+IGtpbA== 15045
+KGZyYW1l 15046
+4oCdLgoK 15047
+IHNjZW5hcmlv 15048
+Il0pOwo= 15049
+IGluY3JlZGlibGU= 15050
+bGl4 15051
+SU9FeGNlcHRpb24= 15052
+IFF1ZXN0 15053
+aWx0eQ== 15054
+IHVubG9jaw== 15055
+4oKs 15056
+IHJlZmVyZW5jZXM= 15057
+IFZlcnQ= 15058
+QmluZGluZw== 15059
+ZWdhdGl2ZQ== 15060
+IHdyYXA= 15061
+LmRhdGFiYXNl 15062
+KGNvbnRlbnQ= 15063
+QnVm 15064
+IFRyYWQ= 15065
+IEF1ZA== 15066
+dHJhY2U= 15067
+Lm1vY2s= 15068
+IHRoZXJhcHk= 15069
+CUw= 15070
+LlRvSW50 15071
+IEtpbmdkb20= 15072
+QnVz 15073
+aGF1c3Q= 15074
+IiIiCgo= 15075
+KGVuZA== 15076
+LmRyYXdhYmxl 15077
+W107Cg== 15078
+IEhvc3BpdGFs 15079
+IHBoYXJt 15080
+LS0tLS0= 15081
+IEFH 15082
+w6lk 15083
+PiIpOwo= 15084
+IHdhbGxldA== 15085
+YXRhYmxl 15086
+KSQ= 15087
+IG1vbnRobHk= 15088
+IGRpYWdub3N0aWM= 15089
+U3ltYm9s 15090
+IGl0ZXJhdG9y 15091
+dW5maW5pc2hlZA== 15092
+IGltbWlncmF0aW9u 15093
+c3I= 15094
+Uk9X 15095
+KGdhbWU= 15096
+IGNsb3RoZXM= 15097
+IFVudA== 15098
+IGFjdGl2YXRpb24= 15099
+X0Nvbg== 15100
+Lmhhc2g= 15101
+IGluaXRpYWxseQ== 15102
+Lkhhc2g= 15103
+IGN1dHM= 15104
+Zm91bmQ= 15105
+IFN0b3J5 15106
+0YbQuA== 15107
+YWNhbw== 15108
+X1RZUA== 15109
+cHJvdG8= 15110
+ZXN0cg== 15111
+LXBhZ2U= 15112
+YWhy 15113
+IGluY29ycmVjdA== 15114
+IEpvc2VwaA== 15115
+VGV4dEJveENvbHVtbg== 15116
+X3N0eWxl 15117
+IERhbmllbA== 15118
+c2hlZXQ= 15119
+IGxpdg== 15120
+bGluZWQ= 15121
+IHJh 15122
+UnVudGltZQ== 15123
+X2VtcHR5 15124
+c2x1Zw== 15125
+X3N0cnVjdA== 15126
+64o= 15127
+bXU= 15128
+IHBlcm1pdHRlZA== 15129
+IHJlZ2lvbmFs 15130
+IHNvYnJl 15131
+IFN1Y2g= 15132
+IFtf 15133
+IHJvb2Y= 15134
+LkFsaWdubWVudA== 15135
+dGltZXM= 15136
+Lm1zZw== 15137
+IGNoZXN0 15138
+IFRhYg== 15139
+IGVzdGE= 15140
+w6Ru 15141
+IHN1YnNjcmlwdGlvbg== 15142
+KGNvbW1hbmQ= 15143
+c3BlY2lhbA== 15144
+IG1lYWw= 15145
+Iik6Cg== 15146
+X2N0eA== 15147
+IGNsb3NlbHk= 15148
+ZXRyeQ== 15149
+LWJl 15150
+YWRlbA== 15151
+IFJhbQ== 15152
+aWdlc3Q= 15153
+IFNwYW5pc2g= 15154
+IGNvbW1pdG1lbnQ= 15155
+IHdha2U= 15156
+Kj4o 15157
+UEhQ 15158
+X3s= 15159
+Y2tlcg== 15160
+PExpc3Q= 15161
+X251bGw= 15162
+IFJlc2VydmVk 15163
+IGluaGVy 15164
+LkNvbHVtbnM= 15165
+LkFzcE5ldA== 15166
+X0lOVkFMSUQ= 15167
+IFBhcmFtZXRlcg== 15168
+IGV4cHI= 15169
+fXs= 15170
+Q2VsbFN0eWxl 15171
+IHZhbHVhYmxl 15172
+IGZ1bm55 15173
+SW52 15174
+IHN0YWJsZQ== 15175
+KnQ= 15176
+IHBpbGw= 15177
+cGxpZXJz 15178
+IENTUw== 15179
+IENvbmRpdGlvbg== 15180
+IFNwZWVk 15181
+dWJsaXNoZXI= 15182
+IG9mZmVuc2l2ZQ== 15183
+Y2VzdA== 15184
+aWNhcw== 15185
+IHNwYXJr 15186
+IFByb3Rl 15187
+c2V0dXA= 15188
+SUZZ 15189
+IFRheA== 15190
+V2hv 15191
+RmFtaWx5 15192
+LWZvcg== 15193
+LnVr 15194
+IGZhc2M= 15195
+c3Zn 15196
+IikpLg== 15197
+IGJpcnRoZGF5 15198
+4paI 15199
+dmVo 15200
+ZWxsZWQ= 15201
+IGltcG9ydHM= 15202
+IElzbGFtaWM= 15203
+VEE= 15204
+IFN0YW4= 15205
+d2VhdGhlcg== 15206
+IHN1c3BlY3Q= 15207
+ZWF0dXJl 15208
+ZW5uZXM= 15209
+V00= 15210
+Lm1pbmVjcmFmdA== 15211
+YXZpZA== 15212
+6L0= 15213
+LnNlY3VyaXR5 15214
+aW5vcw== 15215
+R29vZA== 15216
+IG1hcmNo 15217
+IHBvc3Nlc3M= 15218
+dXN1YXJpbw== 15219
+Q29ucw== 15220
+YW1iZXI= 15221
+Y2hlZHVsZXI= 15222
+IGhvcnNl 15223
+570= 15224
+KGJvZHk= 15225
+IFRyYW5zZm9ybQ== 15226
+X2RlY29kZQ== 15227
+LnN2Zw== 15228
+IGZvbw== 15229
+IGRlbGxh 15230
+ZXh0ZW5kcw== 15231
+YW1lcg== 15232
+IHByb2Nlc3NlZA== 15233
+IEhhcnI= 15234
+IEFJ 15235
+IGtv 15236
+Q0hBUg== 15237
+KCU= 15238
+IHRhcA== 15239
+KHsn 15240
+Y3JvbGw= 15241
+RE9N 15242
+IHRlYQ== 15243
+IHJlaW4= 15244
+IHdvcmxkd2lkZQ== 15245
+X2Zu 15246
+c2hh 15247
+IGJpcg== 15248
+w6fDtWVz 15249
+PSIjIj4= 15250
+IHJlcHJlc2VudGVk 15251
+aWxsZXI= 15252
+KGV4cGVjdGVk 15253
+IGRhbmNl 15254
+IHZpc2l0b3Jz 15255
+LmNvbmNhdA== 15256
+LWJpdA== 15257
+VVJSRQ== 15258
+IFJvZw== 15259
+dnA= 15260
+aXBo 15261
+IExMQw== 15262
+aXRsZWQ= 15263
+aWFtaQ== 15264
+Q29sbA== 15265
+X3JlYWw= 15266
+X3Nob3c= 15267
+X2ZvbGRlcg== 15268
+IGRhcg== 15269
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 15270
+IGxhdHRlcg== 15271
+YXJjaHk= 15272
+IGJvdw== 15273
+IG91dGNvbWU= 15274
+IFBvc3RlZA== 15275
+IHJpc2tz 15276
+IFRoZXJlZm9yZQ== 15277
+IG93bmVyc2hpcA== 15278
+IHBhcmFsbGVs 15279
+IHBlbmRpbmc= 15280
+Z2VvbWV0cnk= 15281
+IHJlY29nbml6ZQ== 15282
+U1RFTQ== 15283
+IENQ 15284
+IGltbWlncg== 15285
+SVRMRQ== 15286
+ICAgIAkJ 15287
+Y29ubmVjdGVk 15288
+IHNtaWxl 15289
+KGRvY3VtZW50 15290
+XENvbXBvbmVudA== 15291
+dmVydGljYWw= 15292
+IGNvbnN1bXB0aW9u 15293
+IHNob2Vz 15294
+LmltcGw= 15295
+dW5rcw== 15296
+LiI7Cg== 15297
+IGZvb2Rz 15298
+Xyk7Cg== 15299
+LmFzc2VydFRydWU= 15300
+IHBpcGVsaW5l 15301
+IGNvbGxlY3Rpb25z 15302
+IGVhcm5lZA== 15303
+IENlcnQ= 15304
+IHBhcnRuZXJzaGlw 15305
+KGFjdGlvbg== 15306
+IGNk 15307
+IFZlcnk= 15308
+T3B0aW9uYWw= 15309
+IHNjcmVlbnM= 15310
+IHRpdGxlcw== 15311
+ZW5lcmF0b3I= 15312
+IGFiYW5kb24= 15313
+a2luZA== 15314
+SUxURVI= 15315
+IGNsb3Npbmc= 15316
+bGljYQ== 15317
+X2ludGVy 15318
+IGNhbXB1cw== 15319
+c2V0dGluZw== 15320
+U3ByaXRl 15321
+44Gv 15322
+X3JlcGx5 15323
+VG9MaXN0 15324
+OlwvXC8= 15325
+ZWRl 15326
+IGZvbGtz 15327
+IGJvYXQ= 15328
+KGFyZ3Y= 15329
+IHBlcm1hbmVudA== 15330
+IGNhcnJ5aW5n 15331
+IGNvbnNlcnZhdGl2ZQ== 15332
+aW1wb3J0YW50 15333
+LmltZw== 15334
+IEltbQ== 15335
+IGRpbWVuc2lvbnM= 15336
+YWxhbmQ= 15337
+c2luZ2xl 15338
+RXhpdA== 15339
+LS0tLS0tLS0tLQ== 15340
+YXJpYW50 15341
+dGVybmFs 15342
+U2Vjb25kcw== 15343
+IEl0YWx5 15344
+b3RsaW4= 15345
+LlJlc3VtZQ== 15346
+PSci 15347
+KT09 15348
+Y2VwdG9y 15349
+IHNjYQ== 15350
+L21haW4= 15351
+U2VjdXJpdHk= 15352
+X2RhdA== 15353
+IGxldHM= 15354
+IGFxdQ== 15355
+IHdoZW5ldmVy 15356
+YmVycnk= 15357
+IGFjdGluZw== 15358
+YW50aQ== 15359
+cGQ= 15360
+Jmd0 15361
+5q0= 15362
+Wm9uZQ== 15363
+VG9kYXk= 15364
+IS4= 15365
+VG9Qcm9wcw== 15366
+YWJpcw== 15367
+aXRhYmxl 15368
+IGdhbA== 15369
+XXs= 15370
+aXpvbmE= 15371
+IGluY29udHJp 15372
+TkVU 15373
+Ly8vCg== 15374
+W2lu 15375
+X3NhdmU= 15376
+IGV4ZW0= 15377
+IEtlbm4= 15378
+IGV2b2x1dGlvbg== 15379
+dmFycw== 15380
+X3N0YXRz 15381
+LW9ubHk= 15382
+IENvbG9yYWRv 15383
+IHdhdGNoZWQ= 15384
+Ym91cg== 15385
+IHNldmVyZQ== 15386
+IHByb2Zlc3Npb25hbHM= 15387
+cG9ydGlvbg== 15388
+IGd1YXJhbnRl 15389
+0LM= 15390
+IHB1c2hlZA== 15391
+IEdp 15392
+770= 15393
+IHR1bQ== 15394
+IEF6 15395
+IEVkZ2VJbnNldHM= 15396
+IikpOw0K 15397
+aXNzZQ== 15398
+LmFj 15399
+U2V0dGluZw== 15400
+IGFwcHJlY2lhdGU= 15401
+IFZhbHVlRXJyb3I= 15402
+IHN1cnZl 15403
+IFJvbGU= 15404
+LkludGVy 15405
+cGxvdGxpYg== 15406
+amV0 15407
+ZGFt 15408
+IHBsYXRmb3Jtcw== 15409
+dGVsZQ== 15410
+VVRP 15411
+IEludGVybmFs 15412
+Kzo= 15413
+fTsNCg== 15414
+R2VuZXJhbA== 15415
+XEVudGl0eQ== 15416
+IGxhd3llcg== 15417
+cXVpdg== 15418
+IFBvc3Rz 15419
+aXNv 15420
+IGFjY3Vt 15421
+b2Jl 15422
+IG1hcmtz 15423
+IF07Cgo= 15424
+CXRleHQ= 15425
+LnN1Y2Nlc3M= 15426
+Y3Vycg== 15427
+YXNh 15428
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 15429
+IHRoaW4= 15430
+X292ZXI= 15431
+YXJlc3Q= 15432
+IE9z 15433
+KGFkZHJlc3M= 15434
+IHZlbG9jaXR5 15435
+IFtdOwoK 15436
+PSIuLi8uLi8= 15437
+IFByaXY= 15438
+Ym93 15439
+IGd1YXJhbnRlZQ== 15440
+JQoK 15441
+IGV2YWx1YXRl 15442
+LkxFTkdUSA== 15443
+IGludmVudG9yeQ== 15444
+cWE= 15445
+X2RlYnVn 15446
+Lk9uQ2xpY2tMaXN0ZW5lcg== 15447
+IGxpZXM= 15448
+IGFzc2Vzc21lbnQ= 15449
+ZGF0ZXRpbWU= 15450
+LmJhY2tncm91bmRDb2xvcg== 15451
+ICovDQoNCg== 15452
+cmFm 15453
+dW53cmFw 15454
+IEZvb3Q= 15455
+IG5vdGlmeQ== 15456
+IGxvd2VzdA== 15457
+RE9DVFlQRQ== 15458
+IGxhbmd1YWdlcw== 15459
+ZXh0cmE= 15460
+LWJhY2s= 15461
+IGVpbmVu 15462
+dGVtcGxhdGVz 15463
+X3Bhc3M= 15464
+IE11c3Q= 15465
+IGVzdMOh 15466
+X2NvcmU= 15467
+IFNjb3Q= 15468
+QUk= 15469
+IGJpYXM= 15470
+YXRpb25zaGlw 15471
+Q29uc3RhbnQ= 15472
+IHByb2dyYW1taW5n 15473
+SW5z 15474
+dXNwZW5kTGF5b3V0 15475
+IFBST1ZJRA== 15476
+YW50ZXM= 15477
+IHNoaXJ0 15478
+aW5hdGVk 15479
+Lk9L 15480
+W2E= 15481
+IHRoaW5rcw== 15482
+PwoKCgo= 15483
+IHJlZ2FyZGxlc3M= 15484
+IE1hZ2lj 15485
+dWxhdGluZw== 15486
+CWNsYXNz 15487
+YWRkR3JvdXA= 15488
+UkVBVEU= 15489
+IFNV 15490
+IHNpbXBs 15491
+Y29weXJpZ2h0 15492
+IGJ1bmNo 15493
+IHVuaXZlcnNl 15494
+IEVycg== 15495
+IHByZXNlbnRhdGlvbg== 15496
+Y2F0ZWdvcmllcw== 15497
+IGF0dGFjaA== 15498
+LnNpZ24= 15499
+X0FD 15500
+IGRpc2NpcGw= 15501
+IHJlZ3VsYXJseQ== 15502
+IHByaW1hcmlseQ== 15503
+aW5rcw== 15504
+W1s= 15505
+LnJhbmQ= 15506
+LnNob3VsZA== 15507
+b3dudG93bg== 15508
+PSIn 15509
+IHNhbnM= 15510
+IHN1cHBvcnRlcnM= 15511
+c2VxdWVuY2U= 15512
+R08= 15513
+Li4KCg== 15514
+IFNwcg== 15515
+IGNhcmVmdWxseQ== 15516
+VUlDb2xvcg== 15517
+ZGVzdHJveQ== 15518
+IHRvZG9z 15519
+IE9SREVS 15520
+b3R0ZWQ= 15521
+IGRvbnQ= 15522
+YXVkaQ== 15523
+X3BsYXllcg== 15524
+Z3Jl 15525
+IE9pbA== 15526
+PGJvZHk= 15527
+X3N0YWNr 15528
+LlBhZGRpbmc= 15529
+IFByb2R1Y3Rz 15530
+IHByaXZpbGU= 15531
+IGluanVyZWQ= 15532
+IEZ1cnRoZXI= 15533
+IGFsaWFz 15534
+LlJlc3VtZUxheW91dA== 15535
+X0xFTg== 15536
+IHNlcw== 15537
+J107Cgo= 15538
+Y3JlZW5z 15539
+IGRpcmVjdGVk 15540
+LlN1c3BlbmRMYXlvdXQ= 15541
+b2RnZQ== 15542
+LkF0 15543
+bWFya3M= 15544
+IFVuaXZlcnM= 15545
+ZXJ0cw== 15546
+IEVzYw== 15547
+IG5hdmJhcg== 15548
+IHV0aWxpdHk= 15549
+YWdub3N0aWNz 15550
+IGluamVjdA== 15551
+IEROQQ== 15552
+ICIsIg== 15553
+YW1hcg== 15554
+IGV1 15555
+IHJlc3RhdXJhbnRz 15556
+X3B1dA== 15557
+dXRlcnM= 15558
+VG9vbFN0cmlw 15559
+dHc= 15560
+aXN0cm8= 15561
+IHpvb20= 15562
+IGxlZ2l0 15563
+cGVjaWZpYw== 15564
+IENvbWU= 15565
+IGxvY2FsU3RvcmFnZQ== 15566
+IGFic29y 15567
+LlBhbmVs 15568
+IERlc2lnbmVy 15569
+IG93 15570
+SUNBTA== 15571
+X3VyaQ== 15572
+KGZpZWxk 15573
+IHN1cGVydg== 15574
+RXhpc3Rz 15575
+IHJlc3BlY3RpdmVseQ== 15576
+IFN0YW5k 15577
+Q29uZg== 15578
+dXNzaWFu 15579
+IGFyYw== 15580
+IG5k 15581
+dWNrcw== 15582
+IHJlc3Ry 15583
+IHNlYXNvbnM= 15584
+IENoYXB0ZXI= 15585
+IFN3aXRjaA== 15586
+cGlj 15587
+IGhp 15588
+bG9hZGVk 15589
+IGZsdWlk 15590
+LWJ0bg== 15591
+IHJ1bnRpbWU= 15592
+Lml0 15593
+Qk4= 15594
+T3BhY2l0eQ== 15595
+YXNhbnQ= 15596
+cnlwdGlvbg== 15597
+LW5hdGl2ZQ== 15598
+IHRhdWdodA== 15599
+5a8= 15600
+YWdtZW50 15601
+IG11bA== 15602
+UmVnaXN0cnk= 15603
+X2dyaWQ= 15604
+IEJyb29r 15605
+OlNldA== 15606
+IG1vbmdvb3Nl 15607
+QU1FUw== 15608
+aW5uZXJIVE1M 15609
+IHNvY2k= 15610
+IEludGVs 15611
+Z2V0SWQ= 15612
+Q21k 15613
+IGFjY2Vzc2libGU= 15614
+cmFtZXM= 15615
+bGV0b24= 15616
+IF9fKA== 15617
+CWRlbGV0ZQ== 15618
+IFNxdWFyZQ== 15619
+IgoKCg== 15620
+IGJ1Y2tldA== 15621
+YXZvcml0ZQ== 15622
+IEJyZWFr 15623
+Kytd 15624
+IGJydXNo 15625
+IHRlbnNvcg== 15626
+L2h0dHA= 15627
+VGlsZQ== 15628
+IGZ1bmN0aW9uYWw= 15629
+ICIq 15630
+d2hlbA== 15631
+IHRlbnQ= 15632
+IENoYXJhY3Rlcg== 15633
+IHNlZXM= 15634
+LlNU 15635
+Qmln 15636
+IGV4dGVybg== 15637
+VXJscw== 15638
+KSkpKSw= 15639
+IEpy 15640
+LkJ1aWxkZXI= 15641
+Ljs= 15642
+bmw= 15643
+X0luaXQ= 15644
+IEhFUg== 15645
+xbxl 15646
+bXlzcWxp 15647
+X2ljb24= 15648
+dmFu 15649
+IGZlZWxpbmdz 15650
+IGxlYW4= 15651
+IGhvcGluZw== 15652
+VFY= 15653
+PSI8Pz0= 15654
+IGN1cnZl 15655
+X3N0ZA== 15656
+X0xJTkU= 15657
+ZHN0 15658
+IG1vcmFs 15659
+ZW1lcw== 15660
+b2d5 15661
+IHVyYmFu 15662
+IGFzaWRl 15663
+IGVkaXRpbmc= 15664
+QURE 15665
+U2Vjb25k 15666
+VHJhY2s= 15667
+IHZvdGluZw== 15668
+IGhvbm9y 15669
+Lics 15670
+ZWxsZW4= 15671
+Q2hhdA== 15672
+IGltcHJvdmVtZW50 15673
+J10KCg== 15674
+oIE= 15675
+IHBhcnNlZA== 15676
+ICAgICAgICAgCg== 15677
+IGxhenk= 15678
+IGZhbGxpbmc= 15679
+U2VyaWFsaXpl 15680
+IFBh 15681
+X2dy 15682
+IGZvcmV2ZXI= 15683
+LndoaXRl 15684
+LlF1ZXJ5 15685
+QmVk 15686
+IER1 15687
+IHJlc3VtZQ== 15688
+IHBhcGVycw== 15689
+IEluaXQ= 15690
+IHN1ZmZlcmluZw== 15691
+4oCL 15692
+IGRlY2xhcmF0aW9ucw== 15693
+KCkt 15694
+IGV4ZWN1dGVk 15695
+IEhvbA== 15696
+LmJsb2Nr 15697
+44Oz 15698
+U0s= 15699
+IHN0dWNr 15700
+IExvY2s= 15701
+aW5jaXBhbA== 15702
+TnVsbGFibGU= 15703
+IHNlc3Npb25z 15704
+dW5p 15705
+IGNvdXA= 15706
+YXBwcm8= 15707
+Z2hhbg== 15708
+X3Bvb2w= 15709
+CWlk 15710
+IHNsb3Rz 15711
+IG1lZGljaW5l 15712
+IGdsYWQ= 15713
+IE1vbm9CZWhhdmlvdXI= 15714
+YXRyZQ== 15715
+ICQoJw== 15716
+bWVyaWNhbg== 15717
+YWdn 15718
+IGthbm4= 15719
+X2Nvbm5lY3Q= 15720
+IGJyYW5kcw== 15721
+IHNrZQ== 15722
+IGRpZ2l0 15723
+PG4= 15724
+IGJhY2t1cA== 15725
+IHBlcnNvbmFsbHk= 15726
+LlByb3BlcnR5 15727
+LmNvbW1pdA== 15728
+IGNyeQ== 15729
+X2NvdW50ZXI= 15730
+IG1hbGxvYw== 15731
+IGdyYW4= 15732
+IERyb3A= 15733
+cGxhdGZvcm0= 15734
+cmVkZW50aWFscw== 15735
+aW5raW5n 15736
+IFVJTA== 15737
+dWJz 15738
+IG1s 15739
+bGVzc2x5 15740
+R2VuZXJhdGVk 15741
+ZXJlb3R5cGU= 15742
+IGJhdA== 15743
+TGF5b3V0UGFuZWw= 15744
+TE9U 15745
+Iik7DQoNCg== 15746
+IG11c2NsZQ== 15747
+IGNlcnRpZmljYXRl 15748
+QU5ETEU= 15749
+IGhhcmRlcg== 15750
+IHBpeGVscw== 15751
+KSIsCg== 15752
+LkhlYWRlcg== 15753
+IGRldmVsb3Blcg== 15754
+IExhcw== 15755
+ZWdhbg== 15756
+Ljw= 15757
+IGV4cGxvZGU= 15758
+IHBhcnRpY2lwYXRl 15759
+UGF0dGVybg== 15760
+KHRhYmxl 15761
+IFRFWFQ= 15762
+Y29uc3RhbnRz 15763
+eEQ= 15764
+dGhldw== 15765
+fSwKCg== 15766
+44Gu 15767
+X2Rlcw== 15768
+IHN1YnN0cg== 15769
+IFNtYXJ0 15770
+IHNjYWxh 15771
+Z2VudA== 15772
+LWJhcg== 15773
+ZXNzaW9uYWw= 15774
+dW1icw== 15775
+LmV4ZWM= 15776
+J1w= 15777
+VEs= 15778
+dW5pc3Q= 15779
+cHJvb2Y= 15780
+Y2lhbA== 15781
+cHJvYw== 15782
+PXsi 15783
+LmhyZWY= 15784
+PSQo 15785
+IGx1bmNo 15786
+aXNjYWw= 15787
+IEVudHJ5 15788
+IG91dGRvb3I= 15789
+c2VtYmxl 15790
+IGVzc2VudGlhbGx5 15791
+L0c= 15792
+W10p 15793
+JSI= 15794
+c3Rlbg== 15795
+VVNFRA== 15796
+IGR1c3Q= 15797
+5bA= 15798
+CQoK 15799
+IHJldGlyZQ== 15800
+IGZpYg== 15801
+QWx0aG91Z2g= 15802
+IGxvdmVz 15803
+IHJlYWRz 15804
+eWNsZXM= 15805
+IEhlbA== 15806
+X3VpbnQ= 15807
+ICcuJA== 15808
+X2luaXRpYWw= 15809
+TmFtZWQ= 15810
+IGZ1bmRhbWVudGFs 15811
+QURJTkc= 15812
+IHRvdw== 15813
+IEFERA== 15814
+IEFjYWRlbXk= 15815
+OlN0cmluZw== 15816
+IGNvbXByZWhlbnNpdmU= 15817
+LnNjYWw= 15818
+IE1ldGE= 15819
+TWVzc2FnZXM= 15820
+LmFubm90YXRpb25z 15821
+XFJlc3BvbnNl 15822
+IGFja25vd2xlZA== 15823
+IEFSRQ== 15824
+XT09 15825
+IGNsZWFuaW5n 15826
+6L4= 15827
+RW50aXRpZXM= 15828
+IFNhbGVz 15829
+IFdpcw== 15830
+LmV4dGVuZA== 15831
+YWxsZW5nZQ== 15832
+IGdhbWluZw== 15833
+JHF1ZXJ5 15834
+SUNFUw== 15835
+RVRDSA== 15836
+SG9yaXpvbnRhbA== 15837
+cXVlbnRpYWw= 15838
+QkFDSw== 15839
+ZGV2ZWxvcA== 15840
+aXNvcg== 15841
+KGNvZGU= 15842
+LUs= 15843
+X1BJTg== 15844
+cmVxdWVuY3k= 15845
+IFF1ZXN0aW9u 15846
+X2NvbnRhaW5lcg== 15847
+X21vZHVsZXM= 15848
+IEplcnNleQ== 15849
+X2RpZmY= 15850
+LmVs 15851
+ICooKA== 15852
+Y250 15853
+IFNh 15854
+Q1BQ 15855
+aW5pdGU= 15856
+IHVudXM= 15857
+LXdoaXRl 15858
+ZXRhcnk= 15859
+IGludm9sdmluZw== 15860
+ID8+DQo= 15861
+YmVzdA== 15862
+YWxsYXM= 15863
+ZW50ZWQ= 15864
+ICAgICAgICAgICAgICAgICAgICAgICAgCg== 15865
+X2Nvbm5lY3Rpb24= 15866
+IHJlcG8= 15867
+ZW5hYmxlZA== 15868
+0LDQug== 15869
+IHNoYQ== 15870
+IG1lbWJlcnNoaXA= 15871
+U3RhdHVzQ29kZQ== 15872
+aW5hdGluZw== 15873
+X3Nt 15874
+X2N1c3RvbQ== 15875
+X3dlaWdodA== 15876
+IGNzcw== 15877
+U3RhdA== 15878
+X2Vudg== 15879
+bGlua3M= 15880
+VFJM 15881
+IEhpdA== 15882
+LHI= 15883
+dXBpZA== 15884
+IG9wZW5z 15885
+IGdlbnQ= 15886
+X3Zpcw== 15887
+IGpveQ== 15888
+PHc= 15889
+X2Nvc3Q= 15890
+IFB5T2JqZWN0 15891
+cmVuY2U= 15892
+IEdlb3JnaWE= 15893
+IEJyb2Fk 15894
+bW1h 15895
+4oI= 15896
+cGY= 15897
+ICJcIg== 15898
+ICgm 15899
+b21v 15900
+IGxpdGVyYWxseQ== 15901
+iJg= 15902
+bWV0cmlj 15903
+IGJhcnM= 15904
+emVk 15905
+KHdpbmRvdw== 15906
+IElzcmFlbGk= 15907
+IGZvcm1hbA== 15908
+aWRlbnRpZmllcg== 15909
+LmRhbw== 15910
+IERlYXRo 15911
+JTsK 15912
+IGRlY2xhcmU= 15913
+YXJtcw== 15914
+UkVBTQ== 15915
+UEVSVFk= 15916
+IGNvbnNlcXVlbmNlcw== 15917
+dG9vbHM= 15918
+UGVvcGxl 15919
+IFdoaWNo 15920
+PigpOw0K 15921
+LmRlY29kZQ== 15922
+X0FDVA== 15923
+QnV0dG9ucw== 15924
+LmZsb2F0 15925
+LkZpcnN0 15926
+66U= 15927
+IFBvbGl0 15928
+IFhDVA== 15929
+VGFncw== 15930
+IENHRmxvYXQ= 15931
+PXN0cg== 15932
+IGxlYWY= 15933
+LWNoZWNr 15934
+IElzcw== 15935
+LnN5c3RlbQ== 15936
+bG9nb3V0 15937
+YWNodA== 15938
+QW5nbGU= 15939
+c2lu 15940
+Y2hhcnQ= 15941
+SU5URVI= 15942
+IE5VTQ== 15943
+QmFzaWM= 15944
+LlByb3BlcnRpZXM= 15945
+5Lit 15946
+X2NoYW5nZQ== 15947
+IEJyYXppbA== 15948
+QWJzdHJhY3Q= 15949
+IDorOg== 15950
+X3VzZQ== 15951
+0LDQuw== 15952
+IEx5 15953
+SUJVVA== 15954
+IG91dGVy 15955
+IC0tPg0K 15956
+IHJlbGllZg== 15957
+bGFw 15958
+cXVlcg== 15959
+X3BhcmVudA== 15960
+aGVhcA== 15961
+TE9TRQ== 15962
+IGNvbWJpbmU= 15963
+IFJvc2U= 15964
+b3dlcnM= 15965
+IHByb2NlZHVyZXM= 15966
+IFNvcnQ= 15967
+YW5pbQ== 15968
+dmFyaWFudA== 15969
+ZWhpY2xl 15970
+IHNpZ25pbmc= 15971
+UHJpbWFyeQ== 15972
+Y3VycmVuY3k= 15973
+IHNleGU= 15974
+b2Vu 15975
+dGhldGE= 15976
+ZW1hbg== 15977
+IGltcHJlc3NpdmU= 15978
+KCdf 15979
+CVU= 15980
+IFRleHRTdHlsZQ== 15981
+X2NudA== 15982
+IHNsaWNl 15983
+KCc6 15984
+IHVuZGVyc3Rvb2Q= 15985
+SGlz 15986
+IGluZm9ybWVk 15987
+IG5pY2s= 15988
+KFRBRw== 15989
+aGQ= 15990
+IGVsZWN0aW9ucw== 15991
+ZXN0dXJl 15992
+IFNhbnRh 15993
+IENvYXN0 15994
+LnBkZg== 15995
+aW5jaXBsZQ== 15996
+LmNsb25l 15997
+Ym9ybg== 15998
+dXRh 15999
+IGxpY2Vuc2Vk 16000
+Q3I= 16001
+IGJyZWFk 16002
+IEhvdXN0b24= 16003
+IG5vZA== 16004
+IGhvcGVz 16005
+IENHUmVjdA== 16006
+IGd1aWx0eQ== 16007
+LmdpZg== 16008
+IHJvc2U= 16009
+LkNvbW1vbg== 16010
+VGlw 16011
+QU5L 16012
+IEZD 16013
+RHVyaW5n 16014
+IFN5bWZvbnk= 16015
+IGRlZmVuc2l2ZQ== 16016
+a20= 16017
+KT4= 16018
+YXJjaGl2ZQ== 16019
+IFVSSQ== 16020
+eWNsaW5n 16021
+LW8= 16022
+IFdlYnNpdGU= 16023
+QU1Q 16024
+aXNobWVudA== 16025
+IGRvY3RvcnM= 16026
+RGlyZWN0 16027
+QVJJ 16028
+IFJlZGlyZWN0 16029
+aWVyZW4= 16030
+X2Rpc3Q= 16031
+eW8= 16032
+IFByb2dyZXNz 16033
+IHp1bQ== 16034
+IG1lbW9y 16035
+IEVE 16036
+IGp1cg== 16037
+5o2u 16038
+X1RBQkxF 16039
+IHV1aWQ= 16040
+RXhwcg== 16041
+LmhlYWQ= 16042
+KCcl 16043
+cG9pbnRlcg== 16044
+IGVzdGltYXRl 16045
+IEdyZWc= 16046
+IGxvYWRlcg== 16047
+IGlPUw== 16048
+IG1lbnM= 16049
+W3k= 16050
+IHJlZnVzZWQ= 16051
+IHByZWNpc2lvbg== 16052
+aXNjaA== 16053
+IEFDVElPTg== 16054
+Q2xvdWQ= 16055
+c1dpdGg= 16056
+KHJldA== 16057
+X0FERFI= 16058
+X2NvbmY= 16059
+KGRm 16060
+IGxvY2tlZA== 16061
+IHJpc2luZw== 16062
+44O744O7 16063
+IE1z 16064
+IHNjZW5lcw== 16065
+X0VYVA== 16066
+X3Jhdw== 16067
+X3RoZQ== 16068
+cGVvcGxl 16069
+IHJlY29u 16070
+IEZ1bg== 16071
+IGJsZXNz 16072
+IFVwZGF0ZWQ= 16073
+w7xu 16074
+ICAgICAgICAgICAgDQo= 16075
+cGVjdGlvbg== 16076
+UmVsZWFzZQ== 16077
+LmxvZ2dlcg== 16078
+IFNZ 16079
+IGNvdW5zZWw= 16080
+dXJk 16081
+X3RydWU= 16082
+IGV2ZXJ5Ym9keQ== 16083
+aXZvdA== 16084
+IGhlbmNl 16085
+IE5BUw== 16086
+IG9wcG9zZWQ= 16087
+dW5rbm93bg== 16088
+IERFU0M= 16089
+IENoYWly 16090
+ZmFpbGVk 16091
+IElOQ0xVRElORw== 16092
+IHdyaXRlcnM= 16093
+e30K 16094
+w610 16095
+X2NvcHk= 16096
+fTo= 16097
+IEJhdA== 16098
+IGNvbnZlcnRlZA== 16099
+ZWRpbmc= 16100
+cGxhY2VtZW50 16101
+IEhvc3Q= 16102
+U291bmQ= 16103
+0LjQvA== 16104
+IHNvdWdodA== 16105
+bWlk 16106
+IHNhbGFyeQ== 16107
+b2dn 16108
+4oSi 16109
+YnVs 16110
+IHdpcg== 16111
+dmFsaWRhdG9y 16112
+X1NUQVQ= 16113
+LnN0b3Jl 16114
+IEJhdHRsZQ== 16115
+xLFu 16116
+IC0tPgoK 16117
+VHJ1bXA= 16118
+ZG90 16119
+IENPTlQ= 16120
+LmZldGNo 16121
+IGNvbnRpbnU= 16122
+d2Fz 16123
+IGZyYXVk 16124
+X3RtcA== 16125
+bWl0dGVy 16126
+LnBpY3R1cmVCb3g= 16127
+R0E= 16128
+IHRvdXJuYW1lbnQ= 16129
+LklucHV0 16130
+W3I= 16131
+ZXhpb24= 16132
+Y2VudGFnZQ== 16133
+IEtvcmVhbg== 16134
+dW5kZWY= 16135
+IEF2YWlsYWJsZQ== 16136
+cmVzaGFwZQ== 16137
+IGtpdA== 16138
+IFN0cnVjdA== 16139
+IFNVQg== 16140
+QW5zd2Vy 16141
+X2xpYg== 16142
+LnR3aXR0ZXI= 16143
+IG9yZQ== 16144
+IERyYWdvbg== 16145
+LkV4dA== 16146
+LGs= 16147
+IGV4cGxhbmF0aW9u 16148
+cmVmcw== 16149
+IERyaXZl 16150
+IFRyYWluaW5n 16151
+Lkhhcw== 16152
+aW50YWdl 16153
+Ymln 16154
+b2xvZ2lzdA== 16155
+ZW5uaXM= 16156
+2Yc= 16157
+IGNoaWNrZW4= 16158
+ICAgICAgICAgIAo= 16159
+55s= 16160
+44Gn 16161
+IHBlYWs= 16162
+IGRyaW5raW5n 16163
+IGVuY29kZQ== 16164
+IE5FVw== 16165
+bWFsbG9j 16166
+CWZwcmludGY= 16167
+ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09 16168
+aW5jbHVkaW5n 16169
+IHByaW5jaXBsZXM= 16170
+IE1haA== 16171
+c3RvcmFnZQ== 16172
+LWtleQ== 16173
+IGtleXdvcmQ= 16174
+JTs= 16175
+IHRyYWluZWQ= 16176
+LmNvbnRyaWI= 16177
+IGt2 16178
+X18nOgo= 16179
+IEJveQ== 16180
+cGFyYW1ldGVy 16181
+IHN1aXRl 16182
+IHRob3VzYW5k 16183
+IGNvb3JkaW5hdGU= 16184
+LWdlbmVyYXRlZA== 16185
+7ZWY 16186
+Z2VuZXJhdGVk 16187
+IGFkbWl0dGVk 16188
+IHB1c3N5 16189
+I3c= 16190
+IHN3aW0= 16191
+dW5pb24= 16192
+TmE= 16193
+IFJveWFs 16194
+LmNoYW5uZWw= 16195
+VXBkYXRlZA== 16196
+X1JPT1Q= 16197
+IHZpdGFs 16198
+cmFjdGlvbg== 16199
+IENydXNoZXI= 16200
+IHByZWNlZA== 16201
+IGhvcml6b250YWw= 16202
+Qmx1ZXByaW50 16203
+IGF0dHJz 16204
+IHNtb2tl 16205
+0JI= 16206
+LkVxdWFscw== 16207
+RkI= 16208
+IFJlc291cmNlcw== 16209
+cm9sbGluZw== 16210
+IHBhc3Nlcw== 16211
+IE51bQ== 16212
+cm90YXRl 16213
+ZXR5cGU= 16214
+XCIs 16215
+IHNlbnNpdGl2ZQ== 16216
+IHRhbGw= 16217
+P+KAnQoK 16218
+UHJveHk= 16219
+aXk= 16220
+X3NlY3Rpb24= 16221
+4oCU4oCU4oCU4oCU 16222
+YnJpZA== 16223
+IGNpcmN1aXQ= 16224
+YXRhbg== 16225
+RU5D 16226
+IGRyaXZlbg== 16227
+IHZvdGVk 16228
+IGVkdWNhdGlvbmFs 16229
+IGludGVyYWN0aW9u 16230
+YWJldGVz 16231
+IHRvbmU= 16232
+IEluaXRpYWxpemVDb21wb25lbnQ= 16233
+IG1lcmVseQ== 16234
+IOye 16235
+Y29va2ll 16236
+X2Rpdg== 16237
+IFVJTGFiZWw= 16238
+dmVseQ== 16239
+fSk7DQo= 16240
+X0VOVA== 16241
+IysjKw== 16242
+YXJ0aWNsZXM= 16243
+IFNvdXRoZXJu 16244
+IHN0cm9uZ2Vy 16245
+IEdpdmVu 16246
+IEVyaWM= 16247
+IElS 16248
+YWJzdHJhY3Q= 16249
+VW5kZXI= 16250
+bmFibGU= 16251
+IGluY3JlbWVudA== 16252
+b3Zlbg== 16253
+IGNvaW4= 16254
+X3RpbWVy 16255
+IHN1ZmZlcmVk 16256
+IEZSRUU= 16257
+J10uIg== 16258
+IFF1ZWVu 16259
+c3RhdHM= 16260
+IG1lZXRpbmdz 16261
+IGVudGVyaW5n 16262
+IGFsb25nc2lkZQ== 16263
+KHNlc3Npb24= 16264
+aXRhbHM= 16265
+IGZvdW5kYXRpb24= 16266
+IENyZWRpdA== 16267
+LmRpdg== 16268
+X0FMTA== 16269
+cGNpb24= 16270
+X3N0YXQ= 16271
+aWNraW5n 16272
+RGVmYXVsdHM= 16273
+X3NyYw== 16274
+IG91dHB1dHM= 16275
+L0I= 16276
+IGVudGh1cw== 16277
+LWJs 16278
+LkZvcmVDb2xvcg== 16279
+CXRlbXA= 16280
+RmFjZQ== 16281
+IGludGVyYWN0 16282
+IHdlaXJk 16283
+TW91bnQ= 16284
+cmVsbA== 16285
+dWRlbnRz 16286
+IHJlcXVpcmVtZW50 16287
+IFN1cw== 16288
+SUVS 16289
+IGVsZWN0ZWQ= 16290
+cmVmZXJlbmNl 16291
+IE1F 16292
+IHNlcnZlcnM= 16293
+LndhaXQ= 16294
+IHNuYXBzaG90 16295
+aWx0b24= 16296
+IHRyaWVz 16297
+IHRpcG8= 16298
+LlRpbWU= 16299
+Pnc= 16300
+IG1vdW50YWlu 16301
+IHBvdW5kcw== 16302
+IFsuLi4= 16303
+ZXhpc3Rz 16304
+IG5nT24= 16305
+X01BUA== 16306
+IGZseWluZw== 16307
+eGlldHk= 16308
+CXZhbHVl 16309
+X0RC 16310
+dW5v 16311
+IHNlYXRz 16312
+VFVSTg== 16313
+LmF1dGhvcg== 16314
+ISk= 16315
+b3JjZQ== 16316
+IGluZGljYXRlZA== 16317
+LnNpbg== 16318
+IGFzc2lnbm1lbnQ= 16319
+aW1pZW50bw== 16320
+IEZyYW1l 16321
+X2dlbg== 16322
+aW5lcnk= 16323
+Xyk= 16324
+bWVzc2FnZXM= 16325
+LnNldHRpbmdz 16326
+IE1lYW4= 16327
+IE11c2V1bQ== 16328
+aXJx 16329
+YXR0YWNo 16330
+IFBhbGVzdGlu 16331
+X1FV 16332
+X3RhZ3M= 16333
+IGNhc3VhbA== 16334
+ZW1lbg== 16335
+QVNTV09SRA== 16336
+JHM= 16337
+IENpcmM= 16338
+0L7QuQ== 16339
+ZXRyaWM= 16340
+L1A= 16341
+IGVwb2No 16342
+PGhlYWQ= 16343
+X0NNRA== 16344
+IGdpdA== 16345
+IHBlbmFsdHk= 16346
+b3JwaA== 16347
+X3VzZXJz 16348
+b3Vyc2Vz 16349
+LkRhdGVUaW1l 16350
+YXRlcm5pb24= 16351
+X3Byb2plY3Q= 16352
+IHN1cGVyaW9y 16353
+IERhbQ== 16354
+IFNlYXR0bGU= 16355
+WFk= 16356
+PlRoZQ== 16357
+IEFr 16358
+IGdyYXNz 16359
+LyoNCg== 16360
+KGRpcw== 16361
+IGd1bnM= 16362
+IHRi 16363
+IEtldmlu 16364
+LmFyZ3M= 16365
+IEFo 16366
+b3BlZA== 16367
+KEo= 16368
+Y29sdW1ucw== 16369
+YXJndW1lbnRz 16370
+IFdpdGhFdmVudHM= 16371
+X2Z1bGw= 16372
+IERlZmVuc2U= 16373
+U2ltcGxl 16374
+IGRlYXRocw== 16375
+IGV4dGVuc2l2ZQ== 16376
+IFN0aWxs 16377
+IEV4cHJlc3Npb24= 16378
+IEFnZW5jeQ== 16379
+IHBlcmZvcm1pbmc= 16380
+Rlg= 16381
+IHVzdWFyaW8= 16382
+VUFM 16383
+U2lkZQ== 16384
+b2Rvcw== 16385
+YXB0b3A= 16386
+IGNyZWRlbnRpYWxz 16387
+X2NhcA== 16388
+YXRpZW50 16389
+IERpc25leQ== 16390
+IGFp 16391
+IGNoaXA= 16392
+IHZvbHQ= 16393
+Lm1ha2VUZXh0 16394
+JSUlJSUlJSUlJSUlJSUlJQ== 16395
+IGJlbGllZg== 16396
+X0xPQw== 16397
+IENpdmls 16398
+TmF2aWdhdGlvbg== 16399
+IHJldmVhbA== 16400
+IHZpb2xlbnQ= 16401
+IEZpbA== 16402
+IGNhdGFsb2c= 16403
+ZW1lZA== 16404
+c2Nhbg== 16405
+LmNvbnRyb2w= 16406
+IGNvbnN0aXR1dGlvbg== 16407
+Q291bnRyeQ== 16408
+U2VwYXJhdG9y 16409
+X0FQUA== 16410
+dG9waWM= 16411
+dWV0b290aA== 16412
+TUlO 16413
+IGRlc2NyaXB0b3I= 16414
+eXQ= 16415
+RVRIRVI= 16416
+IGRpc3RyaWJ1dGU= 16417
+J30K 16418
+LnRyaW0= 16419
+LkxpbmU= 16420
+IGxibA== 16421
+YXNzZXJ0RXF1YWxz 16422
+IERldA== 16423
+b21ib2s= 16424
+KHdpZHRo 16425
+IHRvcnQ= 16426
+IEVYUFJFU1M= 16427
+YWNv 16428
+VXNpbmc= 16429
+IEJyYW5k 16430
+d2FsbA== 16431
+RU1FTlQ= 16432
+IENvbW11bmlj 16433
+PHVpbnQ= 16434
+IEdVSQ== 16435
+RUdJTg== 16436
+IFJhbmdl 16437
+L2k= 16438
+IFRheWxvcg== 16439
+Y29zdA== 16440
+IHJlc3BvbmRlZA== 16441
+IFRoZW1l 16442
+bmNl 16443
+SVNI 16444
+IGZlYXR1cmluZw== 16445
+UmV0dXJucw== 16446
+IEty 16447
+IC4K 16448
+IG5hbQ== 16449
+X2Ni 16450
+VGVzdGluZw== 16451
+IHt9LA== 16452
+eWFs 16453
+LmZpZWxk 16454
+IC89 16455
+X1NIT1JU 16456
+bWF0ZXM= 16457
+VGVzdENhc2U= 16458
+YWlubGVzcw== 16459
+IGV2YWx1YXRpb24= 16460
+X0lURU0= 16461
+IFBhY2lmaWM= 16462
+CWs= 16463
+IGNhbnQ= 16464
+IFJvcw== 16465
+KXM= 16466
+IGZldA== 16467
+U1RSSU5H 16468
+IERpc3Bvc2U= 16469
+Z2Fs 16470
+IEpvaW4= 16471
+IFBvcm4= 16472
+IENhdGhvbGlj 16473
+QVJHRVQ= 16474
+Y3B1 16475
+56CB 16476
+LnNjcm9sbA== 16477
+SVNJTkc= 16478
+aWZlc3R5bGU= 16479
+YW5jZW1lbnQ= 16480
+IG1lcmM= 16481
+IEJyb3dzZXI= 16482
+ZXRlcm1pbg== 16483
+IG92ZXJmbG93 16484
+QXZhaWxhYmxl 16485
+IGJvdHRsZQ== 16486
+OlVJ 16487
+aWZpY2lhbA== 16488
+IGNvb3Jk 16489
+Y2xhcmF0aW9u 16490
+IGNvbmo= 16491
+R0xPQkFM 16492
+b2t1 16493
+IGt3YXJncw== 16494
+Y29uZGl0aW9ucw== 16495
+dWx1bQ== 16496
+IGdlbnU= 16497
+IEhlcm8= 16498
+5Y4= 16499
+IHVuZXhwZWN0ZWQ= 16500
+IERBTUFHRVM= 16501
+IGth 16502
+IENvdWxk 16503
+VVBQT1JU 16504
+IFBob3Rvcw== 16505
+IGNvbmZpZGVudA== 16506
+IGRldGVjdGVk 16507
+ZGVn 16508
+cmdi 16509
+IHN0cm9uZ2x5 16510
+IH07DQo= 16511
+ICk6 16512
+IGxlY3Q= 16513
+dXJzaXZl 16514
+Uk9M 16515
+IFdlaWdodA== 16516
+IGVudGVydGFpbm1lbnQ= 16517
+ICkpOwo= 16518
+IGdvbm5h 16519
+IGJi 16520
+LmRv 16521
+R1M= 16522
+IG1pc3Rha2U= 16523
+REw= 16524
+IFBST1ZJREVE 16525
+ZWFybmluZw== 16526
+TGltaXQ= 16527
+aXNzaW9ucw== 16528
+W3Y= 16529
+5LiN 16530
+aXJ0eQ== 16531
+RGVs 16532
+IHVuZGVybHlpbmc= 16533
+cHJlbmU= 16534
+IGphdw== 16535
+IERJ 16536
+cGVlcg== 16537
+IG9iamVjdGl2ZQ== 16538
+IGRlcG9zaXQ= 16539
+IGtvbg== 16540
+IGVzcA== 16541
+LnNldFZpc2liaWxpdHk= 16542
+L2xvZ2lu 16543
+PHR5cGVuYW1l 16544
+IGZyYW5jaA== 16545
+L2U= 16546
+UGFyYWxsZWw= 16547
+IHNjb3JlZA== 16548
+IEhvbg== 16549
+IFZpbGw= 16550
+aWdh 16551
+IGFudGljaXA= 16552
+X2Fzc2VydA== 16553
+IE9wdA== 16554
+IGRlc2NyaWJlcw== 16555
+d2Fu 16556
+bW91bnQ= 16557
+IG1vbml0b3Jpbmc= 16558
+IHRvdXQ= 16559
+64qU 16560
+fSx7 16561
+Li4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4= 16562
+PWludA== 16563
+IGN1c3Q= 16564
+LS0tLS0t 16565
+IGF0bW9zcGhlcmU= 16566
+UEFS 16567
+b3J0ZQ== 16568
+SVNJQkxF 16569
+IElyb24= 16570
+IE5vdGlmaWNhdGlvbg== 16571
+LmxvZ2dpbmc= 16572
+IEJPT0w= 16573
+LXBvaW50 16574
+IGFmcmFpZA== 16575
+ZW50YQ== 16576
+IHRvbW9ycm93 16577
+QGltcGxlbWVudGF0aW9u 16578
+IGVuZ2FnZQ== 16579
+IEFudGg= 16580
+IEZsb29y 16581
+IFVs 16582
+VG9vbHM= 16583
+IGJhYg== 16584
+IGNhcmVmdWw= 16585
+44GE 16586
+IGNydWNpYWw= 16587
+IGNhbGN1bGF0ZWQ= 16588
+IFNB 16589
+IHd5 16590
+RFg= 16591
+X1RBRw== 16592
+aW5kZWQ= 16593
+IGpldA== 16594
+IEVuZ2luZWVyaW5n 16595
+Lk1BWA== 16596
+ZW56 16597
+dmQ= 16598
+IHB1YmxpY2F0aW9u 16599
+ICMjIw== 16600
+IGZhY2Vk 16601
+cmFoYW0= 16602
+IENhcHQ= 16603
+QXNzZXQ= 16604
+IENvbnN0YW50cw== 16605
+IGxvYW5z 16606
+X0lQ 16607
+IEZpc2g= 16608
+UmVkdWM= 16609
+X21hdA== 16610
+RGF0ZUZvcm1hdA== 16611
+X21l 16612
+W11bXQ== 16613
+IGludGVncml0eQ== 16614
+IENvdXJzZQ== 16615
+bG9iYWxz 16616
+IGZhY2lsaXQ= 16617
+IGVtYnI= 16618
+IE5n 16619
+LlN5c3RlbQ== 16620
+IG1hbnVmYWN0dXJlcnM= 16621
+IHByb3Zlbg== 16622
+Lm9uQ3JlYXRl 16623
+IGFsYXJt 16624
+IMKn 16625
+IGNvbW1vbmx5 16626
+aWNvcw== 16627
+5paw 16628
+IFN0YXRpb24= 16629
+fSku 16630
+IEZpbG0= 16631
+d2k= 16632
+54k= 16633
+IGVuZ2FnZWQ= 16634
+U3RhdHM= 16635
+IGdvdmVybm1lbnRz 16636
+IGFmZm9yZGFibGU= 16637
+X3Byb3BlcnR5 16638
+IGFnZXM= 16639
+KCctLQ== 16640
+IGbDtnI= 16641
+IFByb2Zlc3Nvcg== 16642
+IGh5ZHJv 16643
+UHVzaA== 16644
+IG9yZ2FuaXplZA== 16645
+QWNjZXB0 16646
+w6lt 16647
+X2NlbGw= 16648
+IG5i 16649
+cGI= 16650
+QXJ0aWNsZQ== 16651
+IHJlbW92YWw= 16652
+IGF1dGhlbnRpY2F0aW9u 16653
+IEZS 16654
+bGlkZQ== 16655
+IHBsZWFzdXJl 16656
+YXBvbA== 16657
+IHBhcnRpdGlvbg== 16658
+IFNpZGU= 16659
+IGNyaW1lcw== 16660
+IGRlbW8= 16661
+aG9sZGVycw== 16662
+IFBha2lzdGFu 16663
+SW5zdHJ1Y3Rpb24= 16664
+IGV4cGVjdGF0aW9ucw== 16665
+LnNjZW5l 16666
+ICcp 16667
+aGVz 16668
+aW5vaXM= 16669
+X1Bybw== 16670
+IG1vbGVj 16671
+YW5kYWw= 16672
+X3Nob3J0 16673
+IGRlZmF1bHRz 16674
+IG5hdGlvbnM= 16675
+aW5lbg== 16676
+IHJ0 16677
+T0NL 16678
+UGFja2V0 16679
+U0I= 16680
+IFNIQUxM 16681
+X2NvbnRlbnRz 16682
+aXNlY29uZHM= 16683
+dmVydHk= 16684
+w6F0 16685
+R3VpZA== 16686
+bm9t 16687
+IGNvbmNsdXNpb24= 16688
+LlVwZGF0ZQ== 16689
+IGxvdmVseQ== 16690
+IGVtaXQ= 16691
+YmVj 16692
+CQkJCSA= 16693
+IGludGVsbGVjdA== 16694
+IGJyZXc= 16695
+ZWN5Y2xl 16696
+RmlyZQ== 16697
+IGFkbWl0 16698
+IGFyYml0 16699
+IGFycmFuZw== 16700
+IE1JTg== 16701
+TWFpbA== 16702
+IE5hdGl2ZQ== 16703
+Q3Vy 16704
+IGNvbnZlbnQ= 16705
+LlJ1bnRpbWU= 16706
+In0K 16707
+LlJ1bg== 16708
+IHByaW50ZWQ= 16709
+IGNvbnZlbmllbnQ= 16710
+LmFy 16711
+bW9jaw== 16712
+IEFkbWluaXN0cmF0aW9u 16713
+44G+ 16714
+IGVsZWN0cm9u 16715
+ZmxhdGU= 16716
+IGxvbWJvaw== 16717
+IGphdmFmeA== 16718
+bmg= 16719
+IHN1cHBsaWVz 16720
+IHZpc2l0aW5n 16721
+YWhs 16722
+IHBvd2Rlcg== 16723
+IHVsdGltYXRl 16724
+IG9yaWVudGF0aW9u 16725
+dXRhcw== 16726
+X3NjYWxl 16727
+Q29uZmlybQ== 16728
+cGhvbmVz 16729
+IE9wZXJhdGlvbg== 16730
+L1Q= 16731
+X0lOVEVS 16732
+IGFpcnBvcnQ= 16733
+IG1ldHJpY3M= 16734
+IHBoZW5vbWVu 16735
+YXVkaW8= 16736
+IG1haQ== 16737
+KEs= 16738
+aHU= 16739
+YWxsaW5n 16740
+cm9kdWN0aW9u 16741
+IFRyYW5zcG9ydA== 16742
+IE5PVEU= 16743
+5paH 16744
+IGZld2Vy 16745
+X1RJTQ== 16746
+7Kc= 16747
+0LrQuA== 16748
+QWdl 16749
+RklO 16750
+IOyd 16751
+IEF0dHJpYnV0ZQ== 16752
+Z3JvdXBz 16753
+ZXJr 16754
+YXR0bw== 16755
+LmRlZmluZQ== 16756
+LkFzcE5ldENvcmU= 16757
+YXRlZ29yaWE= 16758
+IFNpcg== 16759
+KGZvcm0= 16760
+PFVzZXI= 16761
+LnJvdW5k 16762
+X2RheQ== 16763
+LkFsbA== 16764
+U2VydmxldFJlc3BvbnNl 16765
+Lk5v 16766
+bGFyZ2U= 16767
+SUdI 16768
+cXVlbnQ= 16769
+IHZpcnVz 16770
+IHJldHJv 16771
+IGltcGVy 16772
+Qml0bWFw 16773
+IHZpY2U= 16774
+IG9mZmVuc2U= 16775
+aXN0ZQ== 16776
+IEFVVEg= 16777
+IOqw 16778
+VG9vbFN0cmlwTWVudUl0ZW0= 16779
+R3U= 16780
+IHJhcGU= 16781
+IERhdmlz 16782
+IG92ZXJ3aGVs 16783
+OmZsdXR0ZXI= 16784
+LXRhYmxl 16785
+IENvbnN0cnVjdG9y 16786
+UHJpdmF0ZQ== 16787
+ZXZlbg== 16788
+Y2hy 16789
+IGFwcGxpZXM= 16790
+X2F0dHJpYnV0ZQ== 16791
+IGNvbnRyaWJ1dGU= 16792
+RVZFUg== 16793
+TGluZXM= 16794
+IEFmZ2hhbg== 16795
+VmlzaXRvcg== 16796
+IFNM 16797
+c2Vhc29u 16798
+Q1U= 16799
+IGludHJvZHVjdGlvbg== 16800
+IG1hdHBsb3RsaWI= 16801
+xZE= 16802
+IG5ld3NwYXBlcg== 16803
+4oCUYW5k 16804
+PHRhZw== 16805
+IGluaQ== 16806
+IGRpdmVyc2U= 16807
+SWdub3JlQ2FzZQ== 16808
+IFVy 16809
+QWdlbnQ= 16810
+IGJ1bGw= 16811
+LmVtaXQ= 16812
+KEV4Y2VwdGlvbg== 16813
+YXJMYXlvdXQ= 16814
+IGluY3JlZGlibHk= 16815
+IFRydXN0 16816
+PXso 16817
+LW5hdg== 16818
+IGVxdWFscw== 16819
+IGxhZHk= 16820
+IFBvZA== 16821
+ZGlzYw== 16822
+YWxhbQ== 16823
+IElW 16824
+4pk= 16825
+aXZpZHVhbA== 16826
+cGhp 16827
+YWRkZWQ= 16828
+IGRpZmZpY3VsdHk= 16829
+IGNvbXBhY3Q= 16830
+IEFjdGlvblJlc3VsdA== 16831
+Y2Vycw== 16832
+X2NsYXNzZXM= 16833
+Tm9uTnVsbA== 16834
+IHF1aXQ= 16835
+IHBvdQ== 16836
+U3dpdGNo 16837
+aXJz 16838
+LXRlc3Q= 16839
+IEtpbmQ= 16840
+IENhbGVuZGFy 16841
+IHN0cmVhbWluZw== 16842
+fScs 16843
+U1c= 16844
+IHN0ZWFk 16845
+b2Nh 16846
+IHByb3ZpbmNl 16847
+IGNvbHNwYW4= 16848
+IHBlcnNvbm5lbA== 16849
+IEVtcGxveWVl 16850
+IHByb2R1Y2Vy 16851
+IGV2ZXJ5d2hlcmU= 16852
+b2Ri 16853
+0J8= 16854
+YnNvbHV0ZQ== 16855
+YWN0aXZhdGU= 16856
+IGdyaW5kaW5n 16857
+IEJ1aWxkaW5n 16858
+IFNhbmRlcnM= 16859
+KHNj 16860
+IE9mZnNldA== 16861
+Ly8vLy8vLy8vLy8v 16862
+fTsNCg0K 16863
+KHsi 16864
+IHNjYW5m 16865
+IFlZ 16866
+CWRlZmVy 16867
+IGpldw== 16868
+IHJlc3RyaWN0aW9ucw== 16869
+Lm1w 16870
+W2w= 16871
+5LiL 16872
+bGFiZWxz 16873
+cmVkaWNhdGU= 16874
+YXdlc29tZQ== 16875
+IHdhdmVz 16876
+IGNvbmZyb250 16877
+IG1lYXN1cmVk 16878
+IGRhdGFz 16879
+X2V4aXQ= 16880
+b3R0b24= 16881
+IHNob3VsZGVy 16882
+YXNrYQ== 16883
+KyM= 16884
+ICAgICAgICAKICAgICAgICAK 16885
+IHRyb29wcw== 16886
+IFVuZA== 16887
+X2NhcmQ= 16888
+d2ljaA== 16889
+IG5vdXM= 16890
+ICIvIg== 16891
+c2I= 16892
+IGNvbW11bmljYXRpb25z 16893
+RXhwb3J0 16894
+IGRlY29kZQ== 16895
+dGhz 16896
+aW50ZXJwcmV0 16897
+QnlOYW1l 16898
+IFNwaXJpdA== 16899
+ZWRnZXM= 16900
+T0xF 16901
+IEVN 16902
+dGl0 16903
+IFRocm91Z2g= 16904
+IGJpbw== 16905
+IFBhY2thZ2U= 16906
+b3JuZQ== 16907
+IH0u 16908
+YDsK 16909
+IG9rYXk= 16910
+IFplYWxhbmQ= 16911
+aWRlbnRpdHk= 16912
+KG5leHQ= 16913
+IEJhbmc= 16914
+TGlicmFyeQ== 16915
+IGhlYXZpbHk= 16916
+aWxvbg== 16917
+IGRpcGw= 16918
+IHJvdGF0ZQ== 16919
+cHV0cw== 16920
+KScsCg== 16921
+IERhdGFUYWJsZQ== 16922
+IG1heW9y 16923
+LnRvTG93ZXJDYXNl 16924
+IHNvbWVob3c= 16925
+IE5vcnRoZXJu 16926
+YWxj 16927
+IGNhcGFiaWxpdGllcw== 16928
+IHZpYnI= 16929
+Kwo= 16930
+IFN1 16931
+IFJlc2V0 16932
+X21lYW4= 16933
+IGNpZw== 16934
+LmNsb3Vk 16935
+IEJhbmQ= 16936
+IEZhY3Rvcnk= 16937
+IEFyaXpvbmE= 16938
+X2lv 16939
+b3BoZXI= 16940
+IGNvbnNjaW91cw== 16941
+IMO2 16942
+XENvbnRyb2xsZXJz 16943
+X3NwZWVk 16944
+IEZhYw== 16945
+X0NvbQ== 16946
+IEJpYmxl 16947
+d2Vu 16948
+RURJVA== 16949
+IHVubg== 16950
+IFN0YWZm 16951
+IElubg== 16952
+IG1lY2hhbmlzbQ== 16953
+IE1lbWJlcnM= 16954
+IG1pZ3JhdGlvbkJ1aWxkZXI= 16955
+J10uJw== 16956
+LmdldEludA== 16957
+PHZvaWQ= 16958
+CWZyZWU= 16959
+b2lkcw== 16960
+XFN1cHBvcnQ= 16961
+IGF1dG9tYXRpYw== 16962
+IGNoYW5jZXM= 16963
+0LY= 16964
+IGNvbXBsaWNhdGVk 16965
+W3Jvdw== 16966
+YWhvbw== 16967
+IH0KCgoK 16968
+TW9kZWxz 16969
+V2lu 16970
+IHRhcGU= 16971
+aXJ1cw== 16972
+aXpvbg== 16973
+b25vbXk= 16974
+KCJf 16975
+Oi4= 16976
+LnN0ZXJlb3R5cGU= 16977
+KGVudg== 16978
+X3JlY3Q= 16979
+KHdpdGg= 16980
+IGFzc2VydFRoYXQ= 16981
+IGNvbnN0cmFpbnRz 16982
+cHV0eQ== 16983
+RW1wbG95ZWU= 16984
+VEQ= 16985
+IGd1aXRhcg== 16986
+IEpld3M= 16987
+LnByb2Nlc3M= 16988
+IGZpY3Rpb24= 16989
+IFNoYXJlZA== 16990
+4pSA4pSA 16991
+IHByb3BhZw== 16992
+Lk5ldA== 16993
+IGFjaGlldmVk 16994
+CVE= 16995
+IG51cnM= 16996
+U2hhcmVk 16997
+X0ZBSUxVUkU= 16998
+IGJlaGF2aW91cg== 16999
+IGNvbHM= 17000
+aXNtbw== 17001
+IGZlbWlu 17002
+IGNoYWxsZW5naW5n 17003
+IHBvc3Rpbmc= 17004
+ZW5jaWw= 17005
+IGNhcHR1cmVk 17006
+IERvdQ== 17007
+KHdvcmQ= 17008
+IFR1cmtleQ== 17009
+cGFuaWVz 17010
+IHJlcHV0YXRpb24= 17011
+T1JNQUw= 17012
+IGVsaWdpYmxl 17013
+cHJvdG9jb2w= 17014
+aWRhcw== 17015
+KGZyb20= 17016
+IGZpbmFuY2U= 17017
+LXBlcg== 17018
+IGdvdHRlbg== 17019
+SEE= 17020
+ZHVyYXRpb24= 17021
+IFBhcmVudA== 17022
+IGludmVudA== 17023
+IHJlc3RhcnQ= 17024
+0L7Qu9GM 17025
+cml0aW9u 17026
+KHJz 17027
+PGJvb2w= 17028
+aWVydA== 17029
+IG1vZGlmaWNhdGlvbg== 17030
+IFRY 17031
+cmVhZGNydW1i 17032
+YmFuaw== 17033
+JC8= 17034
+IE1pbGxlcg== 17035
+XSksCg== 17036
+LkNoZWNrZWQ= 17037
+IHNhY3I= 17038
+c2VjdXJpdHk= 17039
+IHBvc2U= 17040
+IEJyYWQ= 17041
+IGZpdG5lc3M= 17042
+IGFubm91bmNlbWVudA== 17043
+YXRpb25Ub2tlbg== 17044
+IHNlcnZlcw== 17045
+bmVlZA== 17046
+IGdlb21ldHJ5 17047
+QVJT 17048
+5oA= 17049
+YW5kaWRhdGU= 17050
+IHNwcml0ZQ== 17051
+X3NwbGl0 17052
+V2Vlaw== 17053
+YWRpZXM= 17054
+PigK 17055
+Pz4i 17056
+IC8vLwo= 17057
+IGVpbmVy 17058
+IHdlZWtseQ== 17059
+CWxvZ2dlcg== 17060
+X3BvcA== 17061
+X21hbg== 17062
+IG1pZ3JhdGlvbnM= 17063
+IGFza3M= 17064
+IGJz 17065
+IGZhbGxz 17066
+LldoZXJl 17067
+LWhlaWdodA== 17068
+X2ZlYXR1cmU= 17069
+Lk1pbg== 17070
+IGh5cGVy 17071
+IHZvbGF0aWxl 17072
+IHR3ZW50eQ== 17073
+VHlwb2dyYXBoeQ== 17074
+VW5hYmxl 17075
+RGV0 17076
+LGY= 17077
+LW1vZA== 17078
+IHNldHRsZW1lbnQ= 17079
+IGNvbnRyYWN0cw== 17080
+bm9tZQ== 17081
+QmFk 17082
+IEJyaWFu 17083
+KHVzZXJuYW1l 17084
+ISEhIQ== 17085
+IGhhY2s= 17086
+LkZpZWxk 17087
+SFI= 17088
+IEpvcmRhbg== 17089
+aXph 17090
+IMKg 17091
+IFNoZXI= 17092
+LmhlYWRlcg== 17093
+KG90aGVy 17094
+IER1Yg== 17095
+KG9w 17096
+IFJvdW5k 17097
+IHZpZQ== 17098
+IGFwcGw= 17099
+CUo= 17100
+IEluc2VydA== 17101
+IExQ 17102
+cmVnb24= 17103
+IE1QSQ== 17104
+IGFuY2hvcg== 17105
+YWNh 17106
+w7hy 17107
+IGFkZQ== 17108
+YW5jaG9y 17109
+cXVlZQ== 17110
+IFRyZWVOb2Rl 17111
+IHRhcmdldGVk 17112
+IGxhaWQ= 17113
+QUJFTA== 17114
+dmV0 17115
+IE9yaWdpbg== 17116
+QW50 17117
+LicpOwo= 17118
+ZXhwZWN0 17119
+ZWRSZWFkZXI= 17120
+IE1ham9y 17121
+IGluY2g= 17122
+Q29tcGFy 17123
+IHByZXZpZXc= 17124
+IGlsbG5lc3M= 17125
+IENPTlRSQUNU 17126
+IEluZGVwZW5k 17127
+dXVpZA== 17128
+IG5vbWU= 17129
+IHRj 17130
+IEF2ZW51ZQ== 17131
+aXNhbg== 17132
+IHBocmFzZQ== 17133
+X21vdmU= 17134
+Iilb 17135
+IHByb3Zpc2lvbg== 17136
+IGNvbmNlbnRy 17137
+X0lS 17138
+IFV0 17139
+KCkr 17140
+IG5hcw== 17141
+ISw= 17142
+IFJvYmlu 17143
+aWF0aW9ucw== 17144
+YXRpdHVkZQ== 17145
+IHB4 17146
+IFdpdGhvdXQ= 17147
+L2Jhc2g= 17148
+ZWt0 17149
+cmVlbWVudA== 17150
+T2JzZXJ2ZXI= 17151
+IFJlZ2lvbg== 17152
+VUJMSUM= 17153
+IHsvLw== 17154
+S04= 17155
+5bc= 17156
+R2FtZU9iamVjdA== 17157
+5b4= 17158
+ZW5jb2Rpbmc= 17159
+ICoqKg== 17160
+cHJvamVjdHM= 17161
+IHRr 17162
+IGNoZWVzZQ== 17163
+RU1QTA== 17164
+YXJv 17165
+INin2YQ= 17166
+IGNvbnNpc3Rz 17167
+cmVmcmVzaA== 17168
+dXJlYXU= 17169
+IFNjYW5uZXI= 17170
+IHNvaWw= 17171
+IGZsYXZvcg== 17172
+RGF0YVNvdXJjZQ== 17173
+RXhlY3V0ZQ== 17174
+0LXQvdC40LU= 17175
+IHNoaXQ= 17176
+5YiG 17177
+PGFueQ== 17178
+IHJldHJpZXZl 17179
+IGJlbG9uZ3M= 17180
+LnN0cmlw 17181
+YWJzb2x1dGU= 17182
+IGV4cGFuZGVk 17183
+Ym95 17184
+KTot 17185
+IHJlc2N1ZQ== 17186
+LkpMYWJlbA== 17187
+IHJlbHk= 17188
+IGFsaWdubWVudA== 17189
+LWZhbWlseQ== 17190
+IHJlbmQ= 17191
+T0xVTU4= 17192
+IGJvcnJvdw== 17193
+IHF1b3Rlcw== 17194
+IExldw== 17195
+IHNob3dlcg== 17196
+IERFTEVURQ== 17197
+X2xvb3A= 17198
+ISIKCg== 17199
+CXJl 17200
+IGF0dGVtcHRlZA== 17201
+YXZlcmFnZQ== 17202
+IFBhaW50 17203
+cXVpc2l0aW9u 17204
+b2xlbg== 17205
+IGxpdGVyYXR1cmU= 17206
+IFJlZmVyZW5jZQ== 17207
+X1RFWFRVUkU= 17208
+IFNlZw== 17209
+IEluZHVzdA== 17210
+Y3R5cGU= 17211
+RFVDVA== 17212
+X0hPU1Q= 17213
+IFRyYWRl 17214
+IHBsdWdpbnM= 17215
+IGJyZWFzdA== 17216
+dWxzZQ== 17217
+IGNyZWF0dXJl 17218
+44GZ 17219
+IFdp 17220
+IHN1cHBsaWVk 17221
+Y29sbA== 17222
+ISgi 17223
+IGZ1Y2tpbmc= 17224
+IENocm9tZQ== 17225
+IFVyaQ== 17226
+IE5hdGlvbg== 17227
+IHZlcnRpY2Vz 17228
+VEhF 17229
+IE9yaWdpbmFs 17230
+b25kZQ== 17231
+IHNoYXJw 17232
+IGNvb2tpbmc= 17233
+IHsvKg== 17234
+IFBzeWNo 17235
+IEhvbGx5d29vZA== 17236
+PSRf 17237
+LkRvY2s= 17238
+IGdlcg== 17239
+IGJvbmU= 17240
+X2Nvbm4= 17241
+X3NlYw== 17242
+eXNpY3M= 17243
+ID0i 17244
+U2Fs 17245
+c2Y= 17246
+IGRlZXBseQ== 17247
+YW5nbGVz 17248
+VGVybQ== 17249
+YmVsbA== 17250
+IFF1aWNr 17251
+ZW5lcmF0aW9u 17252
+YWRpb0J1dHRvbg== 17253
+5YWl 17254
+fQ0KDQoNCg== 17255
+IGNhcHRpb24= 17256
+bGM= 17257
+IEVM 17258
+LFs= 17259
+ICAgICAgDQo= 17260
+cmV0dA== 17261
+KG1ldGhvZA== 17262
+IEZsYXNo 17263
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 17264
+V0lTRQ== 17265
+LnNjYWxl 17266
+IHJvdWdobHk= 17267
+X2NoaWxk 17268
+bWVtb3J5 17269
+YXlpbmc= 17270
+IGluaXRpYWxpemVk 17271
+aW5hdG9y 17272
+0LDRgA== 17273
+IHNjYWxhcg== 17274
+IEhv 17275
+YWlyZXM= 17276
+KGNvbHVtbg== 17277
+LmRlc3Ryb3k= 17278
+UEFDSw== 17279
+IGhlbQ== 17280
+YW5nZWw= 17281
+X1NVQg== 17282
+LnF1 17283
+INc= 17284
+REVGQVVMVA== 17285
+cG9zaXRvcmllcw== 17286
+IExlbmd0aA== 17287
+IEZhc3Q= 17288
+IHNpZ25hbHM= 17289
+IC8vJA== 17290
+cmllcnM= 17291
+IGR1bW15 17292
+QU5Z 17293
+IHBlcnNvbmFsaXR5 17294
+IGFncmljdWx0 17295
+UGxhdGZvcm0= 17296
+RVJP 17297
+IFRyYQ== 17298
+IGVub3Jt 17299
+CVc= 17300
+QWN0aW9uUmVzdWx0 17301
+IGF2ZXI= 17302
+W3N0cg== 17303
+ICctLQ== 17304
+LlNwcmludGY= 17305
+IGRlYnV0 17306
+INGH 17307
+aGV4 17308
+X3V0aWxz 17309
+IHBi 17310
+VUlUYWJsZVZpZXc= 17311
+IHp1cg== 17312
+LmVuY29kZQ== 17313
+IHZhZw== 17314
+LmVycm9ycw== 17315
+0L7QvQ== 17316
+IG1y 17317
+IEF3YXJk 17318
+IGNwdQ== 17319
+IHByZXNzZWQ= 17320
+J2VzdA== 17321
+IEZlc3RpdmFs 17322
+J1Q= 17323
+IGFr 17324
+cmVzb2x2ZQ== 17325
+Lm1l 17326
+IG5pYw== 17327
+IGdlbnJl 17328
+IGF0dHJpYg== 17329
+IE1vb24= 17330
+IGFycml2ZQ== 17331
+IERhdGluZw== 17332
+IHRt 17333
+LkNvbmZpZ3VyYXRpb24= 17334
+LnJlZA== 17335
+IGdsbQ== 17336
+IHN0YXRpb25z 17337
+c3dpdGNo 17338
+IHRpZWQ= 17339
+5Lq6 17340
+IC8+PC8= 17341
+UXVhbnRpdHk= 17342
+cXVpcnk= 17343
+X3RhYg== 17344
+IGFsZw== 17345
+VG9hc3Q= 17346
+cmVzaXpl 17347
+cXVlc3Rpb25z 17348
+c2NoZW1h 17349
+TGl0ZXJhbA== 17350
+KGVudGl0eQ== 17351
+TkVDVElPTg== 17352
+Y2hhbmdlZA== 17353
+X0ZJRUxE 17354
+X0hFSUdIVA== 17355
+IG9yZ2FuaWM= 17356
+UFJF 17357
+IENhdA== 17358
+LkRyYXc= 17359
+RXM= 17360
+IGxvdWQ= 17361
+ICAgICAgICAJ 17362
+IEthdA== 17363
+IGhlYXA= 17364
+4oCcSXQ= 17365
+ZXRy 17366
+IHVubGlrZWx5 17367
+ZXJhbHM= 17368
+L2F1dGg= 17369
+dG9kbw== 17370
+UGxhY2U= 17371
+UG9zdGVk 17372
+Q29tbWVudHM= 17373
+IFRlY2g= 17374
+IEZpbmFsbHk= 17375
+ZWdyYXRpb24= 17376
+IG1pbmltYWw= 17377
+IEZpbGVz 17378
+IHRhbWI= 17379
+66Gc 17380
+IFJlbGVhc2U= 17381
+LnJlc2l6ZQ== 17382
+IM8= 17383
+Y29sbGVjdA== 17384
+PXA= 17385
+IExJQUJMRQ== 17386
+IHByb2R1Y2luZw== 17387
+LXdyYXBwZXI= 17388
+IHNpbmdsZXM= 17389
+IE5CQQ== 17390
+b3Jy 17391
+ZXJlbg== 17392
+LmFkZEFjdGlvbg== 17393
+IHRoZXNpcw== 17394
+ZG4= 17395
+UFRZ 17396
+LmRlcw== 17397
+IGJhY3Rlcg== 17398
+IEV4cHJlc3M= 17399
+ICopCg== 17400
+5ZE= 17401
+L2FkbWlu 17402
+c2Vjb25kcw== 17403
+5Yqf 17404
+dXNzaW9u 17405
+YWJldGg= 17406
+IENvbXB1dGVy 17407
+IHJ1bGluZw== 17408
+KCIuLi8= 17409
+LkdFVA== 17410
+IE1lZGFs 17411
+aXRpb25hbGx5 17412
+Y29tbWl0 17413
+Zm9jdXM= 17414
+X0xFVkVM 17415
+aW5kYQ== 17416
+RmFjdA== 17417
+PW5w 17418
+PSIiPgo= 17419
+IHN1YnNlcXVlbnQ= 17420
+cG9zYWJsZQ== 17421
+LWZsdWlk 17422
+IHRob3JvdWdo 17423
+IHB1YmxpY2x5 17424
+YXB0ZXJz 17425
+IFdpbHNvbg== 17426
+X1BSRQ== 17427
+eWFyZA== 17428
+5Lw= 17429
+CWlu 17430
+IHJldmVycw== 17431
+IGJ1bGxldA== 17432
+Y3JpYmVk 17433
+bmVzb3Rh 17434
+ICgkXw== 17435
+YW5ub24= 17436
+Y3Vyc29y 17437
+IGNsb3RoaW5n 17438
+IE11bHRp 17439
+Oics 17440
+IHZlc3M= 17441
+b3JkaW5hdG9y 17442
+IGVpbmVt 17443
+Q2Fubm90 17444
+IGFybWVk 17445
+CVY= 17446
+5LiK 17447
+LkZsYXQ= 17448
+IFNlcA== 17449
+IFN1YmplY3Q= 17450
+X2ZvbnQ= 17451
+IGNoYXJhY3RlcmlzdGljcw== 17452
+RG9uZQ== 17453
+ZWxu 17454
+IyMjIyMjIyMjIyMj 17455
+UE9T 17456
+IGRlbnNpdHk= 17457
+IFBsYXRmb3Jt 17458
+LWl0ZW1z 17459
+IG92ZXJz 17460
+IHB1c2hpbmc= 17461
+56Q= 17462
+LkNvbm5lY3Rpb24= 17463
+X3Rlcm0= 17464
+IGluaXRpYWxpemF0aW9u 17465
+X19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX18= 17466
+56w= 17467
+LmRvY3VtZW50 17468
+bGVzaA== 17469
+CWRvY3VtZW50 17470
+IFBpbg== 17471
+w6dh 17472
+IGRlZmluaXRpb25z 17473
+LlBhdGg= 17474
+X1dSSVRF 17475
+IAkK 17476
+Pz4KCg== 17477
+IHRlcnJpYmxl 17478
+YmVhbg== 17479
+aWNrZXRz 17480
+IFNW 17481
+QnV5 17482
+KHRhc2s= 17483
+IHJlZ2ltZQ== 17484
+Z29vZ2xl 17485
+IGNyYWNr 17486
+LnZpc2l0 17487
+TlVN 17488
+ZW5lcmd5 17489
+IHN0cnVjaw== 17490
+X3NhbXBsZQ== 17491
+LnBheWxvYWQ= 17492
+IHJldmlz 17493
+IFNjZW5l 17494
+IHBn 17495
+IGJyZWFrZmFzdA== 17496
+VVJSRU5U 17497
+LmNoYXJBdA== 17498
+X2V4Y2VwdGlvbg== 17499
+IEFudG9u 17500
+IGd1aWRlbGluZXM= 17501
+IGV4aGF1c3Q= 17502
+IEZpbmFuY2lhbA== 17503
+IGluZGVudA== 17504
+IGRlc2t0b3A= 17505
+SGlkZGVu 17506
+RmFpbHVyZQ== 17507
+IHByaW5jaXBsZQ== 17508
+IGl2 17509
+IHNla3M= 17510
+bmV0d29yaw== 17511
+IG51bWJlck9m 17512
+IEFsYmVydA== 17513
+CWxvbmc= 17514
+LC4= 17515
+IHplcm9z 17516
+ZmFkZQ== 17517
+IFR5cA== 17518
+IFRlcm0= 17519
+IEFydHM= 17520
+LkFwcGxpY2F0aW9u 17521
+IGJlaGFsZg== 17522
+5oi3 17523
+IG1lcmU= 17524
+KGAkew== 17525
+IGF3YXJlbmVzcw== 17526
+ZWxwZXJz 17527
+ZmxpeA== 17528
+IHdlaWdo 17529
+IGVzdGltYXRlcw== 17530
+LmNoaWxk 17531
+L08= 17532
+IEJpdG1hcA== 17533
+LmJvdHRvbQ== 17534
+ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq 17535
+RXhwZWN0 17536
+ZW50bw== 17537
+IEZvcnVt 17538
+dmVyYWw= 17539
+IGphaWw= 17540
+IGFiaWxpdGllcw== 17541
+IEhPTEQ= 17542
+IENpdA== 17543
+IGR5bmFt 17544
+IGdyYXk= 17545
+CQkJCQkJCQkJCQkJCQ== 17546
+Lm5leHRJbnQ= 17547
+YW50bHk= 17548
+IEFSSVNJTkc= 17549
+KHByaXZhdGU= 17550
+IHJlamVjdGVk 17551
+IE5pYw== 17552
+IGxlYXRoZXI= 17553
+PXsK 17554
+YWx5dGljcw== 17555
+dGhldGlj 17556
+LlRvcA== 17557
+LlBhZ2U= 17558
+PXtg 17559
+IDsNCg== 17560
+ZGVwdGg= 17561
+bWFubg== 17562
+V0Q= 17563
+IFNvbQ== 17564
+LlJpZ2h0 17565
+ICl9Cg== 17566
+IHRyYWl0 17567
+w5c= 17568
+aWFj 17569
+IHJ2 17570
+U2FtcGxl 17571
+LlhtbA== 17572
+b3BwZWQ= 17573
+INGE 17574
+bGlzdHM= 17575
+IHRlYXI= 17576
+aXZlcnNhcnk= 17577
+LmNvbGxlY3Rpb24= 17578
+IENvbnN0aXR1dGlvbg== 17579
+IEh0dHBSZXNwb25zZQ== 17580
+IGJyaWxs 17581
+IFByb20= 17582
+aG92ZXI= 17583
+IE1pYW1p 17584
+IGFyZ3Vl 17585
+X2Zsb2F0 17586
+IOOC 17587
+IG5hdA== 17588
+IFRhbA== 17589
+IGludGVncmF0aW9u 17590
+KGN1cg== 17591
+IHJlbW92aW5n 17592
+IGNvZWZm 17593
+IFRob3VnaA== 17594
+IGZvcmVjYXN0 17595
+IFZlZ2Fz 17596
+U2l0ZQ== 17597
+IHRyYWI= 17598
+IEhlbnJ5 17599
+LWk= 17600
+IGludm9sdmVz 17601
+QlQ= 17602
+IHNsbw== 17603
+SW52b2tl 17604
+IGx1Y2t5 17605
+cmF0 17606
+ID8K 17607
+IGhhbmRsZWQ= 17608
+KGZk 17609
+Y29udGVudHM= 17610
+IE9GRg== 17611
+UkY= 17612
+IHN0eQ== 17613
+IE1vdG9y 17614
+dGVyeQ== 17615
+dGF4 17616
+TUFQ 17617
+IE1ycw== 17618
+IHBob25lcw== 17619
+IFVJVmlldw== 17620
+IikpKTsK 17621
+KGRldg== 17622
+IElyaXNo 17623
+IHdz 17624
+REk= 17625
+X09GRlNFVA== 17626
+IEV2ZW50cw== 17627
+IHN0YWdlcw== 17628
+IH0vLw== 17629
+IGhhYmVu 17630
+U1RBTkNF 17631
+IFNpbg== 17632
+IE1vbmV5 17633
+KHRvcA== 17634
+IGFwcG9pbnRtZW50 17635
+VkVSU0lPTg== 17636
+bWV0YWRhdGE= 17637
+X2NvbW1lbnQ= 17638
+IGNvbGxlYWd1ZXM= 17639
+bWFwcw== 17640
+4pg= 17641
+CgkK 17642
+KGFs 17643
+X3JlcQ== 17644
+IGZ1dA== 17645
+IGFyY2hpdGVjdHVyZQ== 17646
+IFdIRVRIRVI= 17647
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 17648
+X3NjcmVlbg== 17649
+IHN0eWxlVXJscw== 17650
+IG1vbnN0ZXI= 17651
+LnVw 17652
+cGhpYQ== 17653
+IHByb2Nlc3Nvcg== 17654
+IFRlcnI= 17655
+PScs 17656
+IE1hbnVmYWN0 17657
+IE5U 17658
+a2Vs 17659
+aWJlcm4= 17660
+CWZpbGU= 17661
+QWxp 17662
+cmllbnRhdGlvbg== 17663
+IC8vIQ== 17664
+YXBvcmU= 17665
+YW5lb3Vz 17666
+IENyZWF0 17667
+Zm9sZGVy 17668
+IGhheQ== 17669
+U3VwcHJlc3M= 17670
+KGxlZnQ= 17671
+IGV1cm8= 17672
+IGRpc2NsYWltZXI= 17673
+dXN0cnk= 17674
+c2hpcHM= 17675
+X2Zk 17676
+IEZh 17677
+X2luc2VydA== 17678
+IHJvbA== 17679
+aWZ0aW5n 17680
+IENvbW1lbnRz 17681
+X2Jy 17682
+IGxvc3Nlcw== 17683
+IEFkZGVk 17684
+Y2hhcmc= 17685
+INC/0L4= 17686
+X3N5c3RlbQ== 17687
+IFNvbWV0aW1lcw== 17688
+IFNwYWlu 17689
+KGdyb3Vw 17690
+aWFsaXM= 17691
+IGRvbGxhcg== 17692
+IEFyZ3M= 17693
+cXVpcmVz 17694
+IFRlbg== 17695
+LnNjc3M= 17696
+IHN1cnZpdmU= 17697
+dXNhZ2U= 17698
+IGp1bg== 17699
+aW1pdGVy 17700
+77yBCgo= 17701
+IGZpZnRo 17702
+dG9nZ2xl 17703
+IGRlY2xpbmU= 17704
+KCQi 17705
+KExvbmc= 17706
+aW5nZQ== 17707
+IHBpbG90 17708
+LWxpZ2h0 17709
+LXJhZGl1cw== 17710
+IHBvZGNhc3Q= 17711
+IG5hdHVyYWxseQ== 17712
+UGFnZXM= 17713
+5Li6 17714
+IERlc3BpdGU= 17715
+IGxpZ2h0aW5n 17716
+IGNyYXRl 17717
+IEJpbmFyeQ== 17718
+IHJlZHVjaW5n 17719
+IGVsZWc= 17720
+IE1vdXNl 17721
+IFRlc3RCZWQ= 17722
+IGJlZm9yZUVhY2g= 17723
+X0FSUkFZ 17724
+UmVkaXJlY3Q= 17725
+IGZsb29k 17726
+IHNoaXBz 17727
+IGVsZWN0cmljaXR5 17728
+KSoo 17729
+6rg= 17730
+IFZpZXQ= 17731
+aGVybw== 17732
+IGRpYQ== 17733
+IEtlbnQ= 17734
+aGVhcnQ= 17735
+IHRocmVhdHM= 17736
+X2FjYw== 17737
+IHN5bWJvbHM= 17738
+aXNjaGVu 17739
+X2luc3Q= 17740
+Q3JpdGVyaW9u 17741
+IFRJTQ== 17742
+LkhlaWdodA== 17743
+IOKAmQ== 17744
+KCk7CgoK 17745
+UHJvZHVjdHM= 17746
+X1NQ 17747
+IEN5 17748
+IGRlcGVuZGVudA== 17749
+ZXN0ZQ== 17750
+IGRhdG9z 17751
+ZGl0 17752
+0LDQsg== 17753
+SUdOQUw= 17754
+IGxlc3Nvbg== 17755
+Ij4n 17756
+IENvdmVy 17757
+IEhvcGU= 17758
+IFRpbWVy 17759
+IGRhZA== 17760
+dmlkZXJz 17761
+IFBob3Q= 17762
+Lz8= 17763
+cm9weQ== 17764
+b21pbmc= 17765
+YXNpb24= 17766
+IFwo 17767
+IEVU 17768
+IFJlYWRpbmc= 17769
+IGVwaXNvZGVz 17770
+bG0= 17771
+ZWNoYQ== 17772
+IG5ldXJv 17773
+IGhhcm1vbg== 17774
+IGxpYmVyYWw= 17775
+LWluZA== 17776
+REFUQQ== 17777
+IGV2ZXJ5ZGF5 17778
+IGRpdmlkZWQ= 17779
+IEFjdGl2ZVJlY29yZA== 17780
+ZmlndXJl 17781
+VUE= 17782
+5Lk= 17783
+cmllbmRseQ== 17784
+dGVjaA== 17785
+LmdhbWVPYmplY3Q= 17786
+0LjRgtGM 17787
+IG1vb24= 17788
+ZnRpbWU= 17789
+IG5vY2g= 17790
+IFRPUlQ= 17791
+IFZN 17792
+LmluaXRpYWw= 17793
+KGNoaWxk 17794
+IG11c2ljYWw= 17795
+IG9j 17796
+YmFz 17797
+IEhheQ== 17798
+X2xvbmc= 17799
+IG1lbXNldA== 17800
+aWxleQ== 17801
+YWRlbHBoaWE= 17802
+U1Y= 17803
+cm9hdA== 17804
+X3R4 17805
+IGxvbg== 17806
+IG5nT25Jbml0 17807
+YnA= 17808
+IEdvbGRlbg== 17809
+QUNIRQ== 17810
+IHdvcnJpZWQ= 17811
+YXpp 17812
+RWFy 17813
+VGFrZQ== 17814
+KGZw 17815
+YnVyZ2g= 17816
+X0RhdGE= 17817
+Z3Jlcw== 17818
+IE9udA== 17819
+cHVz 17820
+IHRyYW5zcGFyZW50 17821
+IHBvY2tldA== 17822
+IHJhbQ== 17823
+aWdyYXRpb25z 17824
+Lg0KDQo= 17825
+IFso 17826
+IGFkb3B0ZWQ= 17827
+IHJlcG9ydGVkbHk= 17828
+IERyZWFt 17829
+IH0pKTsK 17830
+bG9zaW5n 17831
+IHRlZXRo 17832
+IEJvb2tz 17833
+Iiwm 17834
+ZW5ueQ== 17835
+TEVNRU5U 17836
+IGdlbA== 17837
+IFBsYW50 17838
+IeKAnQ== 17839
+Lmhvc3Q= 17840
+IFJlcGx5 17841
+cmVuZ3Ro 17842
+IHJlY29nbml0aW9u 17843
+IH19Pgo= 17844
+TEE= 17845
+IG1pcnJvcg== 17846
+IGFzc2lzdGFudA== 17847
+KGRldmljZQ== 17848
+IHNwaXJpdHVhbA== 17849
+YnVpbGRlcg== 17850
+wqc= 17851
+IG91dHI= 17852
+IHR0 17853
+IFBFUg== 17854
+IHJhZGljYWw= 17855
+TWV0aG9kcw== 17856
+IHBhY2U= 17857
+dWR5 17858
+IGd1dA== 17859
+IEdyZWVr 17860
+IG5vbmF0b21pYw== 17861
+IFBhcGVy 17862
+X0dQSU8= 17863
+IG9ic3Q= 17864
+LkFk 17865
+dmlyb25tZW50cw== 17866
+IFNvdg== 17867
+KGNvbg== 17868
+IFRyYW5zYWN0aW9u 17869
+LmFzc2lnbg== 17870
+CWNhdGNo 17871
+ZWx0ZXI= 17872
+IGJpdGNvaW4= 17873
+X0dS 17874
+IDw/PQ== 17875
+X2xhbmc= 17876
+7J2E 17877
+QnJvd3Nlcg== 17878
+IGNvbnNpZGVyYXRpb24= 17879
+IEV4ZWN1dGl2ZQ== 17880
+6Ze0 17881
+O1w= 17882
+IEpTT05PYmplY3Q= 17883
+IEJlbGw= 17884
+IHNwb2tlc21hbg== 17885
+fn5+fn5+fn4= 17886
+b2NrZXk= 17887
+IEdybw== 17888
+IEF3 17889
+Q29uc3RyYWludA== 17890
+IFByYWN0 17891
+IEV2ZXI= 17892
+cHJpbQ== 17893
+OnsK 17894
+X2lt 17895
+UE4= 17896
+TWlsbGlz 17897
+VU1FTlQ= 17898
+IGJhZ3M= 17899
+w6Vy 17900
+QU5ORUw= 17901
+IGlj 17902
+IHRyYW5zcG9ydGF0aW9u 17903
+IFNhdWRp 17904
+aGFuZGxlcg== 17905
+RHJhZw== 17906
+IGhk 17907
+Y29sbGFwc2U= 17908
+X1BI 17909
+IHVi 17910
+QVJN 17911
+IEFQUA== 17912
+IHRvbmlnaHQ= 17913
+IGRpbmluZw== 17914
+UmVjb2du 17915
+IGJj 17916
+aWd0 17917
+KG51bWJlcg== 17918
+Qm9vdA== 17919
+IGVsc2V3aGVyZQ== 17920
+IGFycm93 17921
+YXJnYQ== 17922
+IGRlbGljaW91cw== 17923
+IFNO 17924
+V1I= 17925
+VmFsaWRhdGU= 17926
+IFF1YWxpdHk= 17927
+KGVtYWls 17928
+IGludGVycHJl 17929
+aWdhdGlvbg== 17930
+IGNob2NvbGF0ZQ== 17931
+X2VkZ2U= 17932
+IHN0b3Bz 17933
+OmZ1bmN0aW9u 17934
+KXw= 17935
+IHRoYWk= 17936
+IExvYWRpbmc= 17937
+U3Rvcnk= 17938
+VHJpZ2dlcg== 17939
+YnJhbmNo 17940
+IHRk 17941
+ZW50aWNhdGVk 17942
+IGFkdmVudHVyZQ== 17943
+IGJsb2NrY2hhaW4= 17944
+RXZlbnRIYW5kbGVy 17945
+IHNxcnQ= 17946
+LlBy 17947
+TG5n 17948
+QmVjYXVzZQ== 17949
+IHZpdg== 17950
+IG9jZWFu 17951
+eWx2YW5pYQ== 17952
+0LDRgQ== 17953
+IFV0aWxz 17954
+IGRlc3Blcg== 17955
+IGRlZmVy 17956
+CXJlcXVpcmU= 17957
+aGw= 17958
+UmVxdWlyZQ== 17959
+XVw= 17960
+IGRpcmVjdGlvbnM= 17961
+X3Jlc291cmNl 17962
+IHN1YnNjcmliZQ== 17963
+IMO6 17964
+IEhlYXJ0 17965
+ZXN0cw== 17966
+LXN1Yg== 17967
+IFJo 17968
+Zm9yRWFjaA== 17969
+IGRlbGlnaHQ= 17970
+IHRlcnJpdG9yeQ== 17971
+LmNvbmN1cnJlbnQ= 17972
+ICgr 17973
+anBn 17974
+IHByZXBhcmF0aW9u 17975
+IHJvdW5kZWQ= 17976
+Q29tbQ== 17977
+LkxlZnQ= 17978
+IG9waW5pb25z 17979
+IE5hdmlnYXRpb24= 17980
+KGZpcnN0 17981
+Iiwk 17982
+IGhpcmU= 17983
+IGRldGVjdGlvbg== 17984
+LmdldEVsZW1lbnRz 17985
+IGVwcw== 17986
+IHNrbGVhcm4= 17987
+IGN6 17988
+IC8+DQo= 17989
+bWV0aWM= 17990
+IHRyYW5zZm9ybWF0aW9u 17991
+5Y+3 17992
+IHJnYg== 17993
+aXN0cmlidXRpb25z 17994
+IGltcGxpY2l0 17995
+L2lu 17996
+ZGVzdGluYXRpb24= 17997
+0LDRgtGM 17998
+WmVybw== 17999
+IHVuc2V0 18000
+LndoZXJl 18001
+Lmdv 18002
+IGZvcm1hdGlvbg== 18003
+IGRlY2xhcmF0aW9u 18004
+KCkNCg0K 18005
+IEV4cGw= 18006
+CQkJICA= 18007
+L3Bybw== 18008
+LkpTT04= 18009
+IGRlc2s= 18010
+LnN1YnN0cg== 18011
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t 18012
+bHlu 18013
+cHNvbg== 18014
+ZGlzYWJsZQ== 18015
+IEZ1bmM= 18016
+CUFzc2VydA== 18017
+IE1BUks= 18018
+IGRlZmVhdA== 18019
+IGJsaW5k 18020
+IGNvbnN0YW50cw== 18021
+LmhlYWRlcnM= 18022
+VUlMRA== 18023
+IGV4cGVuc2Vz 18024
+UGl4ZWw= 18025
+IGhy 18026
+IGZlbA== 18027
+IEVhc3Rlcm4= 18028
+X2RlbA== 18029
+IEN1Yg== 18030
+IHNx 18031
+CWNvdW50 18032
+IERpcmVjdG9yeQ== 18033
+IGV4Y2x1cw== 18034
+IGhpc3Rvcmlj 18035
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ== 18036
+IGNvbXBvc2l0aW9u 18037
+IGRhdGFHcmlkVmlldw== 18038
+IEJ1cm4= 18039
+IEJD 18040
+TWFzdGVy 18041
+IHNwYXdu 18042
+IGJlYXJpbmc= 18043
+LlNldEFjdGl2ZQ== 18044
+aWxv 18045
+IGdhbGxlcnk= 18046
+IGZvdW5kZWQ= 18047
+IGF2YWlsYWJpbGl0eQ== 18048
+LnNxcnQ= 18049
+IHBlcw== 18050
+IERPTQ== 18051
+bWF0ZQ== 18052
+T2N0 18053
+IG1hdGNoZWQ= 18054
+aXRpdml0eQ== 18055
+IGFueGlldHk= 18056
+LnByaWNl 18057
+IEluc3RhbnQ= 18058
+7Io= 18059
+IHR1dA== 18060
+SUNvbGxlY3Rpb24= 18061
+LnNoYXJlZA== 18062
+X3NxbA== 18063
+dGJs 18064
+bGlicmFyeQ== 18065
+X2Rlc3Ryb3k= 18066
+ZXJtYWw= 18067
+IE5vdGVz 18068
+IEVpbg== 18069
+IHNvdXRoZXJu 18070
+IE9USEVSV0lTRQ== 18071
+IG1hY3Jv 18072
+Lmxvd2Vy 18073
+Y2xz 18074
+Q29udGVudFZpZXc= 18075
+Lmxpbms= 18076
+Y29uc3RhbnQ= 18077
+IEJlcw== 18078
+IHNvbWVib2R5 18079
+bmI= 18080
+Ij57 18081
+KGxvY2Fs 18082
+Li4uLi4= 18083
+IE51bGw= 18084
+bXg= 18085
+IMOn 18086
+IHBhdXNl 18087
+LS0tLS0tLS0tLS0= 18088
+X01P 18089
+IENN 18090
+IGZvcktleQ== 18091
+IERWRA== 18092
+IGNsb3Nlc3Q= 18093
+X0RFVklDRQ== 18094
+IFN0ZXBoZW4= 18095
+IEJCQw== 18096
+IFRyYXZlbA== 18097
+UGFpbnQ= 18098
+IFJlc3VsdHM= 18099
+IFJ1bGU= 18100
+IHRw 18101
+IHJhdGluZ3M= 18102
+Y2lu 18103
+Y3N2 18104
+Pi8= 18105
+IEdPUA== 18106
+bGFk 18107
+INGA 18108
+IGluZGV4UGF0aA== 18109
+bWF0cml4 18110
+PWY= 18111
+YXJzZWQ= 18112
+IH0pOw== 18113
+IENvcw== 18114
+IFNjb3Jl 18115
+IHRhaw== 18116
+IEVTUA== 18117
+IElOQw== 18118
+X05VTEw= 18119
+LWZsZXg= 18120
+Il1b 18121
+aW50bw== 18122
+ZWxhbmQ= 18123
+QXV0aG9yaXphdGlvbg== 18124
+X0ZBTFNF 18125
+IGdhdGU= 18126
+IHZpZA== 18127
+aXN0ZW50 18128
+VElNRQ== 18129
+IHJld3JpdGU= 18130
+IHRpZQ== 18131
+IGFyY2hpdmU= 18132
+LmV2ZW50cw== 18133
+LmdldFBhcmFtZXRlcg== 18134
+IFBlcm1pc3Npb24= 18135
+IHByb2dyYW1tZQ== 18136
+IOk= 18137
+anVk 18138
+IGNhbWVyYXM= 18139
+KHN5cw== 18140
+IFN5cmlhbg== 18141
+IGltcHJvdmVtZW50cw== 18142
+IGhpcA== 18143
+IHN1aWNpZGU= 18144
+IHNjaG9sYXI= 18145
+IGNvbXBhdGlibGU= 18146
+cmVtb3Rl 18147
+LmRvd24= 18148
+RlVOQ1RJT04= 18149
+IG1hbmFnaW5n 18150
+IFVJS2l0 18151
+LnJhdw== 18152
+Pj4+Pg== 18153
+IGRlbWFuZHM= 18154
+ZWxsaXRl 18155
+IGRlbnQ= 18156
+IE1pY3Jv 18157
+5Y+W 18158
+J11bJA== 18159
+IElF 18160
+aW1lbnNpb24= 18161
+IHRyZW0= 18162
+IGdhaW5lZA== 18163
+LndpdGg= 18164
+Lm9r 18165
+aG91 18166
+IGJvbQ== 18167
+YW1wYWlnbg== 18168
+IGpvaW5pbmc= 18169
+ZmlzaA== 18170
+IGFkZFN1YnZpZXc= 18171
+IG5vcnRoZXJu 18172
+LmNvcg== 18173
+b3JldA== 18174
+RGll 18175
+aW5pc2g= 18176
+X2NvbXA= 18177
+IGF0dGVuZGVk 18178
+IGNvbGxhcHNl 18179
+IFNT 18180
+YWNlbnQ= 18181
+X0VRVUFM 18182
+IERlZXA= 18183
+UkdC 18184
+CXRlc3Q= 18185
+b2x2ZXM= 18186
+dXNldA== 18187
+VW5pdHlFbmdpbmU= 18188
+d3JpdGVy 18189
+UmVzb2x2ZXI= 18190
+LCU= 18191
+aWZmZXJlbmNl 18192
+X3JlbW92ZQ== 18193
+b25kYQ== 18194
+IGZlbW1l 18195
+ZGVjb2Rl 18196
+QnJhbmNo 18197
+IGZsdXNo 18198
+IGlubm92YXRpdmU= 18199
+VGVzdHM= 18200
+IFsnLi8= 18201
+IGNvdmVyaW5n 18202
+LmFkbWlu 18203
+dWx0aXBhcnQ= 18204
+KGxhbWJkYQ== 18205
+77u/bmFtZXNwYWNl 18206
+IFNwb3J0 18207
+ICEo 18208
+YWNsZXM= 18209
+IGRlcHJlc3Npb24= 18210
+IEtvbmc= 18211
+IHBlcnQ= 18212
+IENvbm4= 18213
+IE90aGVyd2lzZQ== 18214
+L2hvbWU= 18215
+c3VwcG9ydGVk 18216
+IHBpbms= 18217
+IGludml0ZWQ= 18218
+w7Fvcw== 18219
+X2VuYWJsZWQ= 18220
+IC0K 18221
+Rlc= 18222
+ZW5lcnM= 18223
+IE1Z 18224
+IHN1Z2dlc3Rpb25z 18225
+Q2FudmFz 18226
+IGZlcg== 18227
+IE1hcmtldGluZw== 18228
+QFRlc3Q= 18229
+dW50dQ== 18230
+IFZlbg== 18231
+IENvdQ== 18232
+aXZhbHM= 18233
+RG9uYWxk 18234
+bGltaXRlZA== 18235
+CQkJCQkJCg== 18236
+IGFuYWx5c3Q= 18237
+KGVudHJ5 18238
+IHJlcHJlc2VudGF0aXZl 18239
+X2F0dHJpYnV0ZXM= 18240
+IGZ1cg== 18241
+LmhpZGU= 18242
+cmVzcA== 18243
+YWRvcmVz 18244
+cmlkZXM= 18245
+IEpvc2g= 18246
+cm9ib3Q= 18247
+IE5BVA== 18248
+IHNlc3Nv 18249
+IGludGVncmF0ZWQ= 18250
+OnRydWU= 18251
+cGFydHM= 18252
+IHN0dXBpZA== 18253
+OmV2ZW50 18254
+QGVuZHNlY3Rpb24= 18255
+IHB1 18256
+LlRhYmxl 18257
+IFlpaQ== 18258
+YDsKCg== 18259
+IGNsYW5n 18260
+PSIiPg== 18261
+ZW5nYW4= 18262
+X3BhcmFtZXRlcnM= 18263
+LmludGVybmFs 18264
+IE1vZGVybg== 18265
+IG1ldHJpYw== 18266
+IHNlbWk= 18267
+PXt7Cg== 18268
+LmFtYXpvbg== 18269
+IEJC 18270
+YWludHk= 18271
+dmlld3BvcnQ= 18272
+IHN0YXJ0QWN0aXZpdHk= 18273
+ZGlzcGF0Y2g= 18274
+KioqKio= 18275
+IGZsYXY= 18276
+aWZmZXJlbnQ= 18277
+W3RoaXM= 18278
+IHN0YWtl 18279
+IGFyZ3VlZA== 18280
+dmlvdXNseQ== 18281
+Lndvcms= 18282
+IE9haw== 18283
+T2xk 18284
+KGFzeW5j 18285
+bm90ZXM= 18286
+IGZsaXA= 18287
+IGRpc2Fn 18288
+IFRF 18289
+CWVycm9y 18290
+PCc= 18291
+IMK7Cgo= 18292
+IGZpbHRlcmVk 18293
+IE1hY2g= 18294
+IGh1bmc= 18295
+X2R1bXA= 18296
+X3NhbXBsZXM= 18297
+LWRpc21pc3M= 18298
+IHJheQ== 18299
+SW1wbGVtZW50ZWQ= 18300
+REs= 18301
+IGplZA== 18302
+IGJyZWFrcw== 18303
+IGZpdHM= 18304
+Lmdy 18305
+IFplcm8= 18306
+b3Jv 18307
+IGVxdWFsbHk= 18308
+ICdb 18309
+IGNvbmNlcm5pbmc= 18310
+PG1ldGE= 18311
+cGxheWVycw== 18312
+X1BPUw== 18313
+X3NpbQ== 18314
+SmFu 18315
+IHlvdXJz 18316
+CU4= 18317
+IHNwaXI= 18318
+IGNoYW1waW9u 18319
+IEFuYWx5c2lz 18320
+YXBh 18321
+IE5TTG9n 18322
+X2xpbmVz 18323
+w7Fh 18324
+CQkgICAgICAg 18325
+LlNj 18326
+UmVw 18327
+ZXRyb2l0 18328
+dXJhYmxl 18329
+TUlU 18330
+Y29tcGF0 18331
+b3duZWQ= 18332
+X2luZGljZXM= 18333
+XSwNCg== 18334
+IGRpc2NvdmVyeQ== 18335
+IERpZWdv 18336
+b2Jp 18337
+LkluZGV4 18338
+IHRyZW5kcw== 18339
+UExBWQ== 18340
+Lm5v 18341
+IGxlbnM= 18342
+X2NmZw== 18343
+IGFubm8= 18344
+YWdhbg== 18345
+IHBlcmlvZHM= 18346
+dGVybXM= 18347
+eXo= 18348
+IGF0dGFja2Vk 18349
+aWJyYXRpb24= 18350
+UEVDSUFM 18351
+X2dyYWQ= 18352
+IGFjY29yZGFuY2U= 18353
+LlJlYWRMaW5l 18354
+LmRldmljZQ== 18355
+cml4 18356
+LmNvbnRhaW5lcg== 18357
+bWF5 18358
+ZXJjaXNl 18359
+IEx1 18360
+IHJn 18361
+INGB0YI= 18362
+CQkKCQkK 18363
+KHVu 18364
+VEVSTkFM 18365
+IGxlc3NvbnM= 18366
+IGFsbGVnYXRpb25z 18367
+IHRyYW5zbWlzc2lvbg== 18368
+LlJlZg== 18369
+TW9iaWxl 18370
+IFRvdXJuYW1lbnQ= 18371
+IE51dA== 18372
+IEdh 18373
+IENhcGl0YWw= 18374
+ZGVmaW5pdGlvbg== 18375
+LWV4cA== 18376
+Y2xlYW4= 18377
+IGZhbnRhc3k= 18378
+IGVuaGFuY2U= 18379
+ZW50ZW5jZQ== 18380
+J106Cg== 18381
+YWNrZXRz 18382
+IGNlbGVicmF0ZQ== 18383
+QCIs 18384
+U2VyaWFsaXplRmllbGQ= 18385
+IGFycmF5cw== 18386
+dGI= 18387
+CXN0 18388
+W2Fzc2VtYmx5 18389
+KHJlZw== 18390
+LmNhdGVnb3J5 18391
+IGltcHJvdmluZw== 18392
+IHNhbG9wZQ== 18393
+Qnl0ZUFycmF5 18394
+T3JpZ2luYWw= 18395
+IFt7Cg== 18396
+5Zue 18397
+IENsaW4= 18398
+b2VuaXg= 18399
+IFNhbXN1bmc= 18400
+IG1haW50YWluZWQ= 18401
+IGFnZW5kYQ== 18402
+ZmFpbA== 18403
+IHByZXNlbnRz 18404
+IHRpbWluZw== 18405
+Lm1hcms= 18406
+Jz48 18407
+IHByb21vdA== 18408
+IGluY2w= 18409
+X29ubHk= 18410
+66W8 18411
+IEF0dG9ybmV5 18412
+LWRhdGU= 18413
+IGxhbmRzY2FwZQ== 18414
+IGZ1 18415
+U1k= 18416
+LnByb3A= 18417
+IEFycg== 18418
+cGFn 18419
+UGFyYWxsZWxHcm91cA== 18420
+JzoNCg== 18421
+IGxvZ3M= 18422
+YXVuY2g= 18423
+dW5jaQ== 18424
+bmFtYQ== 18425
+VGFibGVDZWxs 18426
+aXNzdWVz 18427
+Lns= 18428
+ZWN1cml0eQ== 18429
+X2V4ZWM= 18430
+b2xkcw== 18431
+IGhvc3Rz 18432
+IHByb3Rv 18433
+X2ltcG9ydA== 18434
+X3NvcnQ= 18435
+IEJvdw== 18436
+IE5vcm1hbA== 18437
+IEZhcm0= 18438
+LmNyZWF0ZVBhcmFsbGVsR3JvdXA= 18439
+Um90YXRpb24= 18440
+LmVycg== 18441
+IHBsZWFzZWQ= 18442
+aXRhZ2U= 18443
+Lldo 18444
+CQkgICAg 18445
+TVI= 18446
+IE1PUkU= 18447
+IE5hdHVyYWw= 18448
+X3RyYW5zZm9ybQ== 18449
+QkFTRQ== 18450
+ZW5lcmFs 18451
+dXRkb3du 18452
+LmNvbW1vbnM= 18453
+V1Q= 18454
+IGFhbg== 18455
+LlJlc3VsdA== 18456
+ZG9n 18457
+IGNsaWNraW5n 18458
+KSwKCg== 18459
+I2xpbmU= 18460
+T3BlcmF0b3I= 18461
+IGNpdg== 18462
+IG1lcmc= 18463
+b2J1Zg== 18464
+bmd0aGVu 18465
+IFt7 18466
+IGNhbmNlbGw= 18467
+dHJpZ2dlcg== 18468
+Ljo= 18469
+V09SSw== 18470
+ZGVjbGFyZQ== 18471
+IGRlY3JlYXNl 18472
+xZtjaQ== 18473
+bG9vbQ== 18474
+Lk5vbmU= 18475
+IE1J 18476
+IEphc29u 18477
+IGhlYWx0aGNhcmU= 18478
+aWFtb25k 18479
+c3lsdmFuaWE= 18480
+Kng= 18481
+IFJh 18482
+W2I= 18483
+IHByaW50aW5n 18484
+cGhhYmV0 18485
+IExhYm91cg== 18486
+b3BwZXI= 18487
+IHppam4= 18488
+LXRhcmdldA== 18489
+X0ZVTkNUSU9O 18490
+IG9jdA== 18491
+0LXQvdC40Y8= 18492
+5Zyo 18493
+IHdlc3Rlcm4= 18494
+IGNvbXB1dGVycw== 18495
+IFJFVA== 18496
+SGFzaE1hcA== 18497
+W1N0cmluZw== 18498
+Z2V0VmFsdWU= 18499
+X0RBVEU= 18500
+Lk5leHQ= 18501
+IEZpZg== 18502
+w6ls 18503
+aWNrZWQ= 18504
+5o4= 18505
+LU1N 18506
+IHsKCgo= 18507
+IGNvbnRhY3Rz 18508
+IGRpZ2l0cw== 18509
+UHJvZHU= 18510
+IHVudXN1YWw= 18511
+IHJhcGlkbHk= 18512
+dHVyZXM= 18513
+IGFuZ3J5 18514
+Y2FuY2Vs 18515
+eHh4eA== 18516
+X3BhcnNlcg== 18517
+aWRpdHk= 18518
+X1BSRUZJWA== 18519
+IG1laHI= 18520
+IHJhcmVseQ== 18521
+ZXRoZQ== 18522
+b3Blcw== 18523
+ICUu 18524
+d29ya3M= 18525
+IHRoZXRh 18526
+IGNvbnRyaWJ1dGlvbg== 18527
+IFRvbnk= 18528
+IHNxdWFk 18529
+0LDQuQ== 18530
+IMOubg== 18531
+dGhlcmU= 18532
+b3V0ZWQ= 18533
+CXE= 18534
+mYI= 18535
+Z29vZA== 18536
+TEk= 18537
+6aG1 18538
+IExpdmluZw== 18539
+aXphYmV0aA== 18540
+IGt0 18541
+IERhbGxhcw== 18542
+XV0sCg== 18543
+IC8+Cgo= 18544
+IHJhaXNpbmc= 18545
+L3JvdXRlcg== 18546
+X2dhbWU= 18547
+IENVUg== 18548
+emVucw== 18549
+LmVz 18550
+IGZvbnRXZWlnaHQ= 18551
+KGZ1bmM= 18552
+bm90aWZpY2F0aW9u 18553
+ICcuLi8uLi8uLi8= 18554
+IGJsYW1l 18555
+44CCCgoKCg== 18556
+YW5jbw== 18557
+SWRlbnRpdHk= 18558
+Zm9sbG93 18559
+IGFydHM= 18560
+eHM= 18561
+IG9mZmljaWFsbHk= 18562
+IFN0dWRpbw== 18563
+IHJlY29tbWVuZGF0aW9ucw== 18564
+IGxvY2FsZQ== 18565
+IGFtYXRldXI= 18566
+IEVuYWJsZQ== 18567
+IGNhcHM= 18568
+LkVuZA== 18569
+LWFkZA== 18570
+X2dzaGFyZWQ= 18571
+IENU 18572
+Rm9yY2U= 18573
+CiAgICAgICAgICAgIAo= 18574
+IG9yYW5nZQ== 18575
+IGxw 18576
+IGFuc3dlcmVk 18577
+LkdyaWQ= 18578
+IGR1YWw= 18579
+IHN0cmF0ZWdpYw== 18580
+IG5vYm9keQ== 18581
+IGZhdGFs 18582
+X2VzdA== 18583
+KGVs 18584
+IOyg 18585
+IEJ1ZGQ= 18586
+QUlU 18587
+X2ZhY3Rvcg== 18588
+LW9uZQ== 18589
+IEhBVkU= 18590
+Ig0KDQo= 18591
+UHJvZg== 18592
+IMOkcg== 18593
+c3RyaW5ncw== 18594
+IGRpcnR5 18595
+IEZhY2U= 18596
+IEJlZ2lu 18597
+IEJ1cw== 18598
+IHdpcw== 18599
+5a2X 18600
+IHNwZWFrZXI= 18601
+IGNhcnJpZXI= 18602
+IE9t 18603
+IGhhZG4= 18604
+QWxsb3c= 18605
+OjpfXw== 18606
+IHZlcmI= 18607
+IENvbXBsZXRl 18608
+IEVhc3k= 18609
+IGJpbGxz 18610
+ICAKCg== 18611
+VmVydGljYWw= 18612
+IHByb24= 18613
+IERlZmluZQ== 18614
+IGxvb2t1cA== 18615
+dmFyaWFibGVz 18616
+IHBhbmRhcw== 18617
+dW1lcw== 18618
+IGlubm9j 18619
+IHNldFVw 18620
+IENoYW1waW9uc2hpcA== 18621
+YXJ0aXN0 18622
+IENUeXBl 18623
+Rm91bmRhdGlvbg== 18624
+4LmI 18625
+IFNldHVw 18626
+IHJlY2lwZXM= 18627
+IFVJQ29sb3I= 18628
+IEZpZ2h0 18629
+IGF1dGhvcml6ZWQ= 18630
+X2NsaWNr 18631
+X3N1Y2Nlc3M= 18632
+YW5nYW4= 18633
+IE1vdW50YWlu 18634
+IERvY3Rvcg== 18635
+IGVnZw== 18636
+IE1lZGljaW5l 18637
+Y2xlcw== 18638
+YC4K 18639
+W2ludA== 18640
+ZGFzaGJvYXJk 18641
+IEFwcHJv 18642
+LWRy 18643
+IHByb2R1Y2Vz 18644
+IHJlbnRhbA== 18645
+IHJlbG9hZA== 18646
+IGFycml2YWw= 18647
+c3BvdA== 18648
+IHVuZGVydA== 18649
+IGVxdWlwcGVk 18650
+IHByb3ZlZA== 18651
+IGNlbnRlcnM= 18652
+IGRlZmluZXM= 18653
+YWxzbw== 18654
+IG9wYWNpdHk= 18655
+IFVuZm9ydHVuYXRlbHk= 18656
+IElsbGlub2lz 18657
+INC90LU= 18658
+IFRlbXBsZQ== 18659
+IFRyYWls 18660
+IEtlbGx5 18661
+IG1lYXN1cmVtZW50 18662
+IHNlcGFyYXRlZA== 18663
+LWNpcmNsZQ== 18664
+SGV5 18665
+IFJFQUQ= 18666
+aWdpdHM= 18667
+IGli 18668
+IE1PRA== 18669
+YXR0ZXJ5 18670
+0LDQtw== 18671
+IHZlbmQ= 18672
+0LXQvdGC 18673
+IEh0dHBDbGllbnQ= 18674
+c2FmZQ== 18675
+X0FTUw== 18676
+aWNpdA== 18677
+IENvbnN0cnVjdA== 18678
+IENsbw== 18679
+IFNpeA== 18680
+X1RPS0VO 18681
+KGJsb2Nr 18682
+IHdhcm5lZA== 18683
+Lyoh 18684
+ITwv 18685
+YWNhZGVz 18686
+IG1hcmc= 18687
+ZXJhc2U= 18688
+IGRpc3BsYXlz 18689
+aXN0cmF0b3I= 18690
+Z2V0cw== 18691
+IGd0aw== 18692
+X0dFTkVS 18693
+bmVk 18694
+XyU= 18695
+IGZhdm91cml0ZQ== 18696
+IEJydQ== 18697
+IMOh 18698
+c2Vjb25kYXJ5 18699
+IG1hc3Q= 18700
+IHNvcGg= 18701
+IFNhZmV0eQ== 18702
+aGFyZA== 18703
+cmFpc2U= 18704
+IEV4Y2hhbmdl 18705
+IGNvbnRlbXBvcmFyeQ== 18706
+IGRyZWFtcw== 18707
+IHRlbA== 18708
+IG5laWdoYm9ycw== 18709
+IEhvbHk= 18710
+Lm1lYW4= 18711
+ZW1pdA== 18712
+IE1lc3M= 18713
+Q2FzdA== 18714
+TkVDVA== 18715
+cGx1Z2lucw== 18716
+IHJi 18717
+d3I= 18718
+IGh1Yg== 18719
+IFN0dWRpZXM= 18720
+IHBvc3Nlc3Npb24= 18721
+JCgnLg== 18722
+ZW5zaXRpdmU= 18723
+IGFkZENyaXRlcmlvbg== 18724
+X18u 18725
+IGV4cGVydGlzZQ== 18726
+QXJjaA== 18727
+IGN1Yg== 18728
+ZXJ2ZXJz 18729
+IHBhcnRpY2xlcw== 18730
+dWFy 18731
+IGJvdW5kYXJ5 18732
+KScs 18733
+YWpv 18734
+IHByZWY= 18735
+OmA= 18736
+IGhhcmFzcw== 18737
+aXU= 18738
+IHJlYWNoaW5n 18739
+IG1lZw== 18740
+IHpv 18741
+KElE 18742
+X3JlcXVpcmVk 18743
+IHPDqQ== 18744
+IFF1ZXVl 18745
+QU8= 18746
+IGdlbQ== 18747
+cHRvbg== 18748
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 18749
+aWpr 18750
+KHsNCg== 18751
+IGNvbGxpc2lvbg== 18752
+IFVrcmFpbmU= 18753
+IC0qLQo= 18754
+TlNJbnRlZ2Vy 18755
+X0JMT0NL 18756
+IFRleHR1cmU= 18757
+IGRlY2xpbmVk 18758
+bmFu 18759
+X3dhaXQ= 18760
+IHBvbGl0aWNpYW5z 18761
+IGNvaW5z 18762
+IGRlcml2 18763
+aGVscGVy 18764
+IFBlcmhhcHM= 18765
+LnJlY3Q= 18766
+IFBvbHk= 18767
+YWJsaW5n 18768
+fS8+Cg== 18769
+IGlubm92YXRpb24= 18770
+XyI= 18771
+ICk7DQoNCg== 18772
+IHNwb3Rz 18773
+IGNob29zaW5n 18774
+LmNz 18775
+IGZsZXhpYmxl 18776
+VUludA== 18777
+IHNjcmF0Y2g= 18778
+LWFs 18779
+IGZlc3RpdmFs 18780
+IG91dHN0YW5kaW5n 18781
+PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09 18782
+TWVhbg== 18783
+IE9yZWdvbg== 18784
+c3ltYm9s 18785
+LmFjY291bnQ= 18786
+ZG5leQ== 18787
+Jycn 18788
+ISIs 18789
+IHBhcnRpY2xl 18790
+w4M= 18791
+W01BWA== 18792
+SVZFUg== 18793
+RVJFTkNF 18794
+TlNNdXRhYmxl 18795
+IENvbHVtYmlh 18796
+XwoK 18797
+LmZy 18798
+IGNvZ24= 18799
+VlI= 18800
+IE1ldGhvZHM= 18801
+IE1hZGU= 18802
+IEJS 18803
+IEVsc2U= 18804
+IGVnZ3M= 18805
+IHN3aW5n 18806
+IEludg== 18807
+IGRpc2Vhc2Vz 18808
+IGZpcm1z 18809
+IGxlbW1h 18810
+fWApOwo= 18811
+bGluZ3M= 18812
+IGd5bQ== 18813
+dW1pbnVt 18814
+LlRyaW0= 18815
+TWVt 18816
+IGNyaXRpY2lzbQ== 18817
+aWJlcm5hdGU= 18818
+X1RY 18819
+aW9uaQ== 18820
+IGd1aWRhbmNl 18821
+IHJlcGVhdGVkbHk= 18822
+IHN1cHBsaWVy 18823
+IHBhaW50aW5n 18824
+LkZyYWdtZW50 18825
+ZWRFeGNlcHRpb24= 18826
+IHdpcmluZw== 18827
+IGNvdXJ0cw== 18828
+V0VC 18829
+5pyJ 18830
+XC4= 18831
+aWxsYW5jZQ== 18832
+IGJyb3dz 18833
+IFBhdHRlcm4= 18834
+UExJQ0FUSU9O 18835
+IFN1bW1lcg== 18836
+Q2hhaW4= 18837
+IGN1dGU= 18838
+bWVyY2lhbA== 18839
+IGRpbA== 18840
+IEZyYW5rbGlu 18841
+CWdsb2JhbA== 18842
+SU5DTFVESU5H 18843
+aGlzdG9yeQ== 18844
+IGxzdA== 18845
+UXQ= 18846
+U0RM 18847
+YWxpYQ== 18848
+aWVyZQ== 18849
+KC4uLg== 18850
+CWNpbg== 18851
+aWZmcw== 18852
+dmVsb3Bl 18853
+IFJvb3Q= 18854
+Y2x1c3Rlcg== 18855
+VXNlck5hbWU= 18856
+aWduZQ== 18857
+PFM= 18858
+IGZlc3Q= 18859
+IGluZGljYXRpbmc= 18860
+a2VlcGVy 18861
+IGNhZGE= 18862
+w6ln 18863
+Y29uc2lu 18864
+IEdC 18865
+IGxi 18866
+ZW1vbnk= 18867
+LWljb25z 18868
+X2RvYw== 18869
+QWN0b3I= 18870
+ZWxlbQ== 18871
+LkRlbGV0ZQ== 18872
+IGluZmVjdGlvbg== 18873
+IFByaXZhY3k= 18874
+IGdyZWF0bHk= 18875
+IFBvcw== 18876
+IFRyZWF0 18877
+Rmxvdw== 18878
+IGF0dHJhY3RpdmU= 18879
+IE1hcmM= 18880
+c3Vkbw== 18881
+dGVzeQ== 18882
+LWFu 18883
+YWJhbWE= 18884
+IFdvdWxk 18885
+IHN1Y2s= 18886
+aW5kZXhQYXRo 18887
+IEV0 18888
+VGltZXM= 18889
+IGNsdWJz 18890
+X2Fzc29j 18891
+IGFjcXVpcmVk 18892
+KCI6 18893
+IGludGVuc2U= 18894
+Lm1hcHM= 18895
+RXhwZWN0ZWQ= 18896
+VG9nZ2xl 18897
+IGF5 18898
+IGxpZmVzdHlsZQ== 18899
+LWNhbGxlZA== 18900
+IFNub3c= 18901
+Vm9sdW1l 18902
+IGNhbm5hYmlz 18903
+IERpcmVjdGlvbg== 18904
+IExpbWl0ZWQ= 18905
+LXNwZWNpZmlj 18906
+IGRvd250b3du 18907
+L2ljb25z 18908
+IHJldmVu 18909
+TGVn 18910
+PW51bGw= 18911
+S2V5Ym9hcmQ= 18912
+JykpLg== 18913
+ICIiOw0K 18914
+IGF0dGl0dWRl 18915
+Lm5hdmlnYXRl 18916
+LWVycm9y 18917
+QU1QTEU= 18918
+IEpheQ== 18919
+dnI= 18920
+Y293 18921
+LmNvbXBpbGU= 18922
+IG1lbW9yaWVz 18923
+X21hcms= 18924
+IE1pbm5lc290YQ== 18925
+IGtvc3Rlbg== 18926
+IHByb2JhYmlsaXR5 18927
+d2FybmluZw== 18928
+IGdlbmV0aWM= 18929
+Rml4dHVyZQ== 18930
+IEhhc2hTZXQ= 18931
+Tm9tYnJl 18932
+X21vbnRo 18933
+xrA= 18934
+LXN0YXJ0 18935
+eHlnZW4= 18936
+CWZ0 18937
+aWFnbm9zdGljcw== 18938
+IE1hdHRoZXc= 18939
+IGNvbmNlcHRz 18940
+IGNvbnN0cg== 18941
+LlN0YXRl 18942
+0LjQvQ== 18943
+Tm92 18944
+zrE= 18945
+IFBhbmVs 18946
+5Liq 18947
+Y29tcGFyZQ== 18948
+PigpCg== 18949
+IGFwcGx5aW5n 18950
+IHByb21pc2Vk 18951
+IG94 18952
+bmNpYQ== 18953
+IFZhbGlkYXRpb24= 18954
+b3J0cw== 18955
+X2N1cg== 18956
+ZWxlY3Q= 18957
+ZXll 18958
+KERhdGE= 18959
+IHJlcG9ydGVy 18960
+IEJ1ZmY= 18961
+IHNy 18962
+ICI7 18963
+aWNreQ== 18964
+IHRlbXBvcg== 18965
+U04= 18966
+IHJlc2lkZW50 18967
+cGlyZXM= 18968
+eXNpY2Fs 18969
+IGVuZG9yc2U= 18970
+IFNvbmc= 18971
+aXNFbXB0eQ== 18972
+bGVldA== 18973
+X3V0aWw= 18974
+IGRpc3Rpbmd1 18975
+IFRhbGs= 18976
+IE1vdA== 18977
+KGRlZmF1bHQ= 18978
+LkFyZw== 18979
+Z29yaXRobXM= 18980
+X3dvcmRz 18981
+aW1tZXI= 18982
+X3Jlc2V0 18983
+ZmFtaWx5 18984
+V1c= 18985
+IHNhdmluZ3M= 18986
+IOKAnQ== 18987
+X2VuYWJsZQ== 18988
+c2lkZWJhcg== 18989
+UnVubmluZw== 18990
+IGFsaQ== 18991
+IHRlc3RpbQ== 18992
+IHdhcm5pbmdz 18993
+IENoZW0= 18994
+IEV4aXQ= 18995
+IGZvdW5kZXI= 18996
+cGVjdG9y 18997
+IHJt 18998
+X2RhdGFzZXQ= 18999
+IERhcw== 19000
+IGhhbg== 19001
+R2V0dHk= 19002
+w6Fs 19003
+IG55 19004
+IHBvdmVydHk= 19005
+IHJlc3VsdGVk 19006
+LmJ5 19007
+IFZpc2l0 19008
+IG9idGFpbmluZw== 19009
+LycuJA== 19010
+ICAgICAgICAgICAK 19011
+c2hhbGw= 19012
+X0xFRlQ= 19013
+VUlJbWFnZQ== 19014
+X05hbWU= 19015
+aGF2ZQ== 19016
+IE5vYg== 19017
+bHI= 19018
+LWZvb3Rlcg== 19019
+IG5ha2Vk 19020
+IEdhcmRlbg== 19021
+XEZhY2FkZXM= 19022
+IGdyYWR1YXRl 19023
+IGZyYW5jaGlzZQ== 19024
+cGxhbmU= 19025
+IGNvbnRyaWJ1dGlvbnM= 19026
+IHN0cmluZ1dpdGg= 19027
+IGNyeXB0bw== 19028
+IG1vdmVtZW50cw== 19029
+YXRoZXJz 19030
+IGxpZmV0aW1l 19031
+IGNvbW11bmljYXRl 19032
+amFy 19033
+IEZyYWdtZW50 19034
+X0lG 19035
+IE5hdnk= 19036
+IEZpZ3VyZQ== 19037
+IHNpbXVsYXRpb24= 19038
+X3N0b3A= 19039
+IHJlcG9ydGVycw== 19040
+IHZlcnN1cw== 19041
+YWph 19042
+IM6x 19043
+IGdvdmVybm9y 19044
+TGlzdEl0ZW0= 19045
+IHNlYWxlZA== 19046
+LkJhY2tncm91bmQ= 19047
+ZWRp 19048
+YXNoaW5n 19049
+IGxpcA== 19050
+IElo 19051
+bWVyZ2U= 19052
+IG5lYw== 19053
+ZWxvY2l0eQ== 19054
+QVRFRw== 19055
+IHNlZWRz 19056
+IGZsb2F0aW5n 19057
+X0ZB 19058
+d2Fsaw== 19059
+CXVzZXI= 19060
+X2RlcHRo 19061
+IHdhZ2U= 19062
+QGFwcA== 19063
+Tmls 19064
+KFsi 19065
+KHZlY3Rvcg== 19066
+IHNlY3JldGFyeQ== 19067
+IGpQYW5lbA== 19068
+dmV6 19069
+wqDCoMKgwqA= 19070
+ZGlyZWN0aW9u 19071
+IEVQ 19072
+IGh1bnQ= 19073
+SnNvblByb3BlcnR5 19074
+IFBPUlQ= 19075
+XSIs 19076
+0LDQvw== 19077
+IEZvcmVpZ24= 19078
+cGFuaWM= 19079
+IHRyaWFscw== 19080
+IEFsZQ== 19081
+IHJ1cmFs 19082
+LXZhbHVl 19083
+YXV0aG9yaXplZA== 19084
+IFNjb3RsYW5k 19085
+LmRyb3A= 19086
+IE1U 19087
+57E= 19088
+cm93dGg= 19089
+RmlsZVBhdGg= 19090
+IHJlY2FsbA== 19091
+aWZsZQ== 19092
+IGNlbA== 19093
+IFNFTEVDVA== 19094
+a24= 19095
+X2Nhc2U= 19096
+IGNyb3A= 19097
+c3VyZQ== 19098
+cG90 19099
+SUNT 19100
+IHN0ZW0= 19101
+IGluZHVzdHJpZXM= 19102
+UHV0 19103
+IGFiZXI= 19104
+cm9hZGNhc3Q= 19105
+SWNvbnM= 19106
+KSIpCg== 19107
+5oiQ5Yqf 19108
+Z3Vp 19109
+IGFzc3VtZWQ= 19110
+IHJ4 19111
+RUE= 19112
+6Kc= 19113
+RUxM 19114
+IGRvc2U= 19115
+IGluZQ== 19116
+IGRlZXBlcg== 19117
+bGlkZXI= 19118
+IG9yZGluYXJ5 19119
+IGdvbGY= 19120
+X0lNQUdF 19121
+IE5BTUU= 19122
+KG1vZHVsZQ== 19123
+IGF0b20= 19124
+IGJlbHQ= 19125
+IG9mZmljZXM= 19126
+YmV0YQ== 19127
+IHBoaWxvc29waHk= 19128
+KEpTT04= 19129
+LWZpZWxk 19130
+IGludHJvZHVjZQ== 19131
+IGNvbnZlbmllbmNl 19132
+b3B0aW0= 19133
+PiIK 19134
+YXRoeQ== 19135
+IGVtcGxveWVy 19136
+cXVhdGU= 19137
+IGVkaXRlZA== 19138
+QXJndW1lbnRz 19139
+IE5hdGlvbnM= 19140
+X18p 19141
+IG5vc2U= 19142
+IFNhbXBsZQ== 19143
+JykKCgo= 19144
+IGNha2U= 19145
+LmdldEF0dHJpYnV0ZQ== 19146
+SEQ= 19147
+TW9kaWZpZWQ= 19148
+IHByZWRpY3RlZA== 19149
+xYQ= 19150
+YW5pZQ== 19151
+U29ycnk= 19152
+KGRvYw== 19153
+d2luZA== 19154
+aWV2ZQ== 19155
+IHByb3Zpc2lvbnM= 19156
+QVRFUg== 19157
+T1RF 19158
+TVk= 19159
+LkF1dG93aXJlZA== 19160
+IEJhdGg= 19161
+LkJvb2xlYW4= 19162
+IGJhY2tlbmQ= 19163
+Lk1vdXNl 19164
+YXRlcmFs 19165
+cGFwZXI= 19166
+Q29uc3Q= 19167
+IFZS 19168
+X2VudGl0eQ== 19169
+X0NUUkw= 19170
+IFByb3RlY3Rpb24= 19171
+IEdN 19172
+IFN0dWR5 19173
+IHNvdXA= 19174
+b3RpbWU= 19175
+J3VzZQ== 19176
+XSI= 19177
+L3VzZXJz 19178
+YXVn 19179
+IEhvbmc= 19180
+X25vcm0= 19181
+44Go 19182
+IHNlY3Jl 19183
+KEJ1aWxk 19184
+IENvbnRyYWN0 19185
+b2xhcw== 19186
+IHNhdWNl 19187
+IGFnZ3Jlc3NpdmU= 19188
+IHJhY2lhbA== 19189
+Y2hhcmFjdGVy 19190
+QEA= 19191
+IGNvbXBpbGU= 19192
+IFZvaWQ= 19193
+X3JlbQ== 19194
+X21lbW9yeQ== 19195
+a2s= 19196
+IG1pYw== 19197
+U2FtZQ== 19198
+VXRpbGl0eQ== 19199
+IEh0bWw= 19200
+IFhtbA== 19201
+UmVhZHk= 19202
+IGdhbGw= 19203
+IGFsbGVnZWRseQ== 19204
+CQkJCSAgIA== 19205
+IE1ldGFs 19206
+IFBlcnNvbmFs 19207
+IGJvcmRlclJhZGl1cw== 19208
+cnhqcw== 19209
+b2JqZWN0cw== 19210
+IHdhbnRpbmc= 19211
+IGJvd2w= 19212
+dmVuZG9y 19213
+b2Zmc2V0b2Y= 19214
+IFJz 19215
+IFJhdGluZw== 19216
+IHJhbGx5 19217
+X05PREU= 19218
+IE1peA== 19219
+IGFkdmVydGlz 19220
+IG5hcnJhdGl2ZQ== 19221
+c2Fs 19222
+IG1j 19223
+U0Vycm9y 19224
+IGZpbmdlcnM= 19225
+IGFjY29tcGFueQ== 19226
+IHRpcmVk 19227
+IHN0cmlkZQ== 19228
+IGd1aQ== 19229
+ZWxpc3Q= 19230
+TG9jYWxl 19231
+IHJlbGVhc2Vz 19232
+aWtpbmc= 19233
+IGFuZ2Vy 19234
+KSkpCgo= 19235
+YWxsZXN0 19236
+U3VtbWFyeQ== 19237
+KE8= 19238
+KGZvcg== 19239
+IGJhc2tldGJhbGw= 19240
+IHJvYWRz 19241
+IEluc3RhbGw= 19242
+IEZhYg== 19243
+aXRtYXA= 19244
+ICkpCg== 19245
+IGludGVyc2VjdGlvbg== 19246
+aWdoYm9y 19247
+IEJyeQ== 19248
+IEhFUkU= 19249
+U29mdHdhcmU= 19250
+ZWxmYXJl 19251
+YWNz 19252
+IHRyYWlsZXI= 19253
+LmdldENsYXNz 19254
+Y2hhcnM= 19255
+IHJlZ3VsYXRpb24= 19256
+IHJlZmVycw== 19257
+IGRlc3RydWN0aW9u 19258
+IGNvbnRpbnVvdXM= 19259
+IEF1c3Rpbg== 19260
+6aI= 19261
+YWthbg== 19262
+LndpbmRvdw== 19263
+IFRlbXBsYXRlcw== 19264
+IGFic2VuY2U= 19265
+Om4= 19266
+IGRpc29yZGVy 19267
+Zmxhc2g= 19268
+IGRlbGV0 19269
+Ym9hcmRz 19270
+ICAJ 19271
+Uk9Q 19272
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 19273
+IGFjcXU= 19274
+IGxhd3N1aXQ= 19275
+IFJldmlld3M= 19276
+IGdhcmFnZQ== 19277
+dGltZXI= 19278
+IGVq 19279
+IFJlY3RhbmdsZQ== 19280
+IGZsb3dlcnM= 19281
+aWxzdA== 19282
+IEluc3RhbmNl 19283
+U3VwZXI= 19284
+ZGV0 19285
+ZGlzcG9zaW5n 19286
+IEVT 19287
+IElD 19288
+dmVyZQ== 19289
+U2s= 19290
+X2NoYW5uZWxz 19291
+cHV0ZWQ= 19292
+L251bGw= 19293
+bm5lbg== 19294
+IEdhbGxlcnk= 19295
+X2dsb2JhbA== 19296
+QXV0aGVudGljYXRpb24= 19297
+IFJhbms= 19298
+IGJsb2NrZWQ= 19299
+IGNhbG0= 19300
+bWFya2V0 19301
+CXZhbA== 19302
+IGF1Zw== 19303
+cGVyaW9k 19304
+IENvbnN0YW50 19305
+ID8+Ij4K 19306
+IGxvYmJ5 19307
+cGFs 19308
+IHNpbms= 19309
+aWFo 19310
+0KE= 19311
+dXJuYW1l 19312
+IGNvbnZlcg== 19313
+IGludmVzdGlnYXRl 19314
+Q2hyaXN0 19315
+SHVi 19316
+IElORA== 19317
+IFBlZA== 19318
+dXJhcw== 19319
+CXVybA== 19320
+IFRybw== 19321
+IHByZWZlcmVuY2Vz 19322
+IGd1YXJhbnRlZWQ= 19323
+YAoK 19324
+IHBvcnRpb25z 19325
+IGV2YWx1 19326
+Jz48Lw== 19327
+KCl7Cgo= 19328
+ZW5jb2RlZA== 19329
+emlsbGE= 19330
+LkNsYXNz 19331
+ICpf 19332
+Xyc= 19333
+IHZpZXdlZA== 19334
+IFBoaWxhZGVscGhpYQ== 19335
+LnJvd3M= 19336
+QWRkZWQ= 19337
+IFRvdWNo 19338
+LmRlbGVnYXRl 19339
+cXVlZXpl 19340
+c2xpZGU= 19341
+IFNlbmlvcg== 19342
+KHRhZw== 19343
+IGludGVydmlld3M= 19344
+IHN1YQ== 19345
+YXRhcw== 19346
+QAoK 19347
+ZGlzdGFuY2U= 19348
+IHNlaW4= 19349
+bGF0ZXN0 19350
+IFByaW5jZQ== 19351
+IGx1eHVyeQ== 19352
+IHJlZnI= 19353
+IEtpdGNoZW4= 19354
+0YQ= 19355
+KGF0 19356
+RmluYWw= 19357
+w7xjaw== 19358
+X3plcm8= 19359
+IEFCQw== 19360
+IE1hbmNoZXN0ZXI= 19361
+IGNvdw== 19362
+Q09M 19363
+X05VTUJFUg== 19364
+Y2hhbmdlcw== 19365
+Z2VuZXJhdGU= 19366
+LlByaW50Zg== 19367
+c2hhcmU= 19368
+U3RvY2s= 19369
+IFBU 19370
+QW5pbQ== 19371
+YW5nYQ== 19372
+IGln 19373
+dXBsb2Fkcw== 19374
+IHBhY2tlZA== 19375
+IH1dOwo= 19376
+KHNlbmRlcg== 19377
+IFdpcmU= 19378
+aXNvbnM= 19379
+IHBsYXlvZmY= 19380
+XEU= 19381
+L1I= 19382
+IGhlYWRlZA== 19383
+QWxwaGE= 19384
+KG9yZGVy 19385
+IG9wcG9uZW50cw== 19386
+YWNrc29u 19387
+X21lbWJlcg== 19388
+VHVybg== 19389
+IFNvdmlldA== 19390
+7JeQ 19391
+YXVnZQ== 19392
+IGluY29taW5n 19393
+IGphaw== 19394
+LWdhbWU= 19395
+IE1hbGU= 19396
+IE1vbnRo 19397
+U3RhZ2U= 19398
+LmV4ZQ== 19399
+T3duUHJvcGVydHk= 19400
+LnNldEl0ZW0= 19401
+IGRj 19402
+5L2c 19403
+IGJydXQ= 19404
+IGF0dGVtcHRpbmc= 19405
+Lmxlbg== 19406
+IGp1ZGdtZW50 19407
+IHNhYg== 19408
+IGNhZA== 19409
+IEl0ZW1z 19410
+Y29tZm9ydA== 19411
+ZWxpemU= 19412
+L2xvZw== 19413
+IGVudHJlcHJlbmU= 19414
+IGNvbXBpbGVy 19415
+X3ZhbGlkYXRpb24= 19416
+cmV2aWV3 19417
+IHRleHRCb3g= 19418
+IGZyYWN0aW9u 19419
+IEJhbA== 19420
+PjsKCg== 19421
+LkF1dG9TY2FsZU1vZGU= 19422
+IGNhdHM= 19423
+IHJlZ2lzdHJ5 19424
+dWx1cw== 19425
+Rkk= 19426
+cGF5bG9hZA== 19427
+LXNlYXJjaA== 19428
+IHN0YXlpbmc= 19429
+YWNpb3Vz 19430
+RGVjb3JhdGlvbg== 19431
+UmV2aWV3 19432
+SW5m 19433
+S2VlcA== 19434
+aXRpcw== 19435
+LFN0cmluZw== 19436
+Q29vcmQ= 19437
+IHBlcm8= 19438
+U2V4 19439
+IEF0bGFudGE= 19440
+dWVzdGE= 19441
+QXJnYg== 19442
+Pio= 19443
+fV8= 19444
+Rm9vdGVy 19445
+IGVtcGxveWVk 19446
+X2JvdW5k 19447
+dmlkZQ== 19448
+LmZ1bmM= 19449
+JHNjb3Bl 19450
+IHNwbw== 19451
+IEFuYWw= 19452
+b3VuY2Vk 19453
+YXJvdW5k 19454
+IHJlc3RyaWN0aW9u 19455
+IHNob3Bz 19456
+5YA= 19457
+IExhdGlu 19458
+LWNvbA== 19459
+IGJhcmVseQ== 19460
+IEV1cm8= 19461
+RXI= 19462
+IGZhaXJl 19463
+X2Rpc3RhbmNl 19464
+X3VubG9jaw== 19465
+UXVvdGU= 19466
+SVZBVEU= 19467
+IOWI 19468
+IGFpbWVk 19469
+IFJldHJpZQ== 19470
+Lml0ZXI= 19471
+IHdyYXBwZWQ= 19472
+IGFncmVlbWVudHM= 19473
+c3RydW1lbnQ= 19474
+KHByb2R1Y3Q= 19475
+IHN0dWRpZWQ= 19476
+LnNldFZhbHVl 19477
+IHll 19478
+IENhY2hl 19479
+TUJPTA== 19480
+IHF1YXJ0ZXJiYWNr 19481
+IHN5bnRheA== 19482
+LmdldEVsZW1lbnRzQnk= 19483
+LnZlcnNpb24= 19484
+d2Vic2l0ZQ== 19485
+UnVubmVy 19486
+X3NpbmdsZQ== 19487
+YXRpdg== 19488
+IEFsdGVybg== 19489
+IEJlYXV0aWZ1bA== 19490
+cmlnaHRhcnJvdw== 19491
+IGRpdmVyc2l0eQ== 19492
+cGxhc2g= 19493
+KGNv 19494
+LkZpbGw= 19495
+IHR5cGluZw== 19496
+IGNsYXI= 19497
+SGl0 19498
+T08= 19499
+YWNjbw== 19500
+d29ydGg= 19501
+IHNjcmlwdHM= 19502
+IE11c2xpbXM= 19503
+IExM 19504
+ZXJ2aW5n 19505
+KGJvb2xlYW4= 19506
+IGJhc2ViYWxs 19507
+IENBTg== 19508
+TUFJTA== 19509
+ZGVwZW5k 19510
+IHJlc3BlY3RpdmU= 19511
+IGNvbnN0ZXhwcg== 19512
+Lio7Cgo= 19513
+J10pKQo= 19514
+IHlhcmQ= 19515
+IGlkZW50aWNhbA== 19516
+aWZlY3ljbGU= 19517
+VVNI 19518
+dXBpdGVy 19519
+LnZhbGlkYXRl 19520
+Y2xp 19521
+SVNURVI= 19522
+SW5kaWNhdG9y 19523
+RmFpbA== 19524
+IGRlbW9jcmFjeQ== 19525
+LnZhcg== 19526
+IHNhdGlzZmllZA== 19527
+LS0tLS0tLS0tLS0tLQ== 19528
+ZW5jZXI= 19529
+aG9y 19530
+IHJvdW5kcw== 19531
+REFP 19532
+b2E= 19533
+IGZsYXNr 19534
+PWM= 19535
+W10K 19536
+L2Rpc3Q= 19537
+IHBhcnRl 19538
+IGNvbmZpcm1hdGlvbg== 19539
+ZXJvbg== 19540
+YXdhcmU= 19541
+PD8+ 19542
+IGRlcGVuZGVuY2llcw== 19543
+IFZpZGVvcw== 19544
+LXJvdw== 19545
+ICoqLwo= 19546
+IG5vdQ== 19547
+IGhvdmVy 19548
+5p4= 19549
+IG5pbg== 19550
+IFVTRA== 19551
+TWFj 19552
+X0xvYWQ= 19553
+IG91dGNvbWVz 19554
+X3NvY2tldA== 19555
+IHF1ZXJpZXM= 19556
+d20= 19557
+IGhpdHRpbmc= 19558
+aW51eA== 19559
+TWljaA== 19560
+dWRnZQ== 19561
+QVRBQg== 19562
+IHZ1bG5lcmFibGU= 19563
+5L4= 19564
+IHBvcnRmb2xpbw== 19565
+OllFUw== 19566
+CW1hcA== 19567
+Qm91bmQ= 19568
+IGl0ZXJhdGlvbg== 19569
+aW5jZXNz 19570
+IGFjdG9ycw== 19571
+IFF1YWw= 19572
+X2NsZWFu 19573
+44CR44CQ 19574
+TVNH 19575
+R3JlZW4= 19576
+IE9mZmljZXI= 19577
+IHNtb2tpbmc= 19578
+Pics 19579
+IEZsbw== 19580
+Kys7 19581
+b2x5Z29u 19582
+IGJ1bGs= 19583
+IGRyYW1h 19584
+IGV4Y2VwdGlvbnM= 19585
+b3NlZA== 19586
+ICsNCg== 19587
+IGxlZ2FjeQ== 19588
+Q1Y= 19589
+IGNvbnRyaWJ1dGVk 19590
+IFRlcm1z 19591
+IGJ0 19592
+IHVudHVr 19593
+IGFsaWVu 19594
+PT09Cg== 19595
+CVZlY3Rvcg== 19596
+IGxz 19597
+T25saW5l 19598
+LmZhY2Vib29r 19599
+bnVtZXJpYw== 19600
+b2NrZXRz 19601
+QXV0 19602
+YnVyeQ== 19603
+LXJlZHV4 19604
+IFJlZGlzdHJpYnV0aW9ucw== 19605
+R0xPQkFMUw== 19606
+dXJyZW5jaWVz 19607
+IHRvbnM= 19608
+4oCZLA== 19609
+IMOq 19610
+KGNvbA== 19611
+IFN5bWJvbA== 19612
+IHN0YXllZA== 19613
+IE1M 19614
+IG11bmljaXA= 19615
+IHNleG8= 19616
+U2Vu 19617
+bnI= 19618
+IGdhaW5z 19619
+IHNob3J0bHk= 19620
+Lk1lbnU= 19621
+w70= 19622
+S05PV04= 19623
+IG9wZXJhdG9ycw== 19624
+LVY= 19625
+IFBhdHJpY2s= 19626
+L2FkZA== 19627
+X0NP 19628
+aXJhdGlvbg== 19629
+KHBvc3Q= 19630
+UG9zdHM= 19631
+L18= 19632
+IHBsdWc= 19633
+IGludGVsbGVjdHVhbA== 19634
+IG1ldGFi 19635
+IHByZWduYW5jeQ== 19636
+IFByZW1pZXI= 19637
+bm0= 19638
+IHByZWRpY3Rpb24= 19639
+IE1pbmlzdHJ5 19640
+VGhyZWU= 19641
+dmFsdWF0ZQ== 19642
+IE1pbmk= 19643
+YnU= 19644
+0L7Qtw== 19645
+PHVs 19646
+IGRk 19647
+b2x2aW5n 19648
+IEN1dA== 19649
+IHNjaGVt 19650
+LnRyYWlu 19651
+aXRhdGU= 19652
+IHJpY2U= 19653
+IGJpcmRz 19654
+44Gr 19655
+bWlkZGxl 19656
+c3RydWN0aW9ucw== 19657
+IG5lcnY= 19658
+YXF1ZQ== 19659
+IGZsdQ== 19660
+IHN1cnZpdmFs 19661
+IEdhbGF4eQ== 19662
+IEZhbnQ= 19663
+Lk9yZGVy 19664
+QXR0cmli 19665
+aXJ0cw== 19666
+w6lj 19667
+TW92aWU= 19668
+IGNvbmNl 19669
+cXVhcnRlcnM= 19670
+IG1vb2Q= 19671
+LkFkZFJhbmdl 19672
+IHJlc29sdmVk 19673
+44OI 19674
+IGJ1cm5pbmc= 19675
+CQkJCQ0K 19676
+IFdF 19677
+IGhvc3Rpbmc= 19678
+TEFC 19679
+IG1hbmFnZXJz 19680
+IHN0cmVuZ3RoZW4= 19681
+PGNvbnN0 19682
+IEZpcmViYXNl 19683
+b25lZA== 19684
+IEplYW4= 19685
+Jzwv 19686
+IDo9Cg== 19687
+YWxnb3JpdGht 19688
+IEFyYw== 19689
+IGZyb3plbg== 19690
+X2V2ZW50cw== 19691
+IG92ZXJzZQ== 19692
+Z29vZHM= 19693
+IGZhaXQ= 19694
+IHZpYWdyYQ== 19695
+b3Nlcw== 19696
+IGNvbXBpbGVk 19697
+IEF0aA== 19698
+IHN1YnN0YW5jZQ== 19699
+YW5pbWF0ZWQ= 19700
+UEY= 19701
+cHJldmlvdXM= 19702
+IHJvb3Rz 19703
+KGZpbHRlcg== 19704
+b2x1bWVz 19705
+IGludHJv 19706
+KGV2dA== 19707
+IEJhZw== 19708
+IERlZmluaXRpb24= 19709
+IEZlYXR1cmVz 19710
+QW5ub3RhdGlvbg== 19711
+IGF2Zw== 19712
+KHN1bQ== 19713
+UVVJUkU= 19714
+IHJlbmRlcmVy 19715
+IEZpeA== 19716
+LmRhdGV0aW1l 19717
+PWRldmljZQ== 19718
+U3Bl 19719
+Z2V0SW5zdGFuY2U= 19720
+IGV4dGVuc2lvbnM= 19721
+X25ldA== 19722
+IFBhcmxpYW1lbnQ= 19723
+IGNvbWlj 19724
+IFBpY2s= 19725
+YXJtYQ== 19726
+CW1vZGVs 19727
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t 19728
+IG1lbmc= 19729
+bWFudWFs 19730
+YWRhcHRlcg== 19731
+fS0= 19732
+ZWRiYWNr 19733
+IGVsZWN0cmljYWw= 19734
+IENvdW50ZXI= 19735
+QXBwbGljYXRpb25Db250ZXh0 19736
+X2J5dGU= 19737
+KGJ5dGU= 19738
+IEF1dG9t 19739
+IHRlcnJvcmlzdA== 19740
+55A= 19741
+dGhyb3VnaA== 19742
+IGZpc2NhbA== 19743
+b25pbmc= 19744
+IHNwZWN0cnVt 19745
+IGJpdG1hcA== 19746
+IHNsZQ== 19747
+cHJvZA== 19748
+IGFnZWQ= 19749
+IGJlbmU= 19750
+IFNwaQ== 19751
+IGJyaWxsaWFudA== 19752
+IHN0YWJpbGl0eQ== 19753
+IGRpYWJldGVz 19754
+IGNvbmZpZ3VyZWQ= 19755
+Ym9uZQ== 19756
+b3VzZXM= 19757
+Lmdvb2dsZWFwaXM= 19758
+RkFDRQ== 19759
+IGluc3BpcmF0aW9u 19760
+IERldHJvaXQ= 19761
+ZW5jaA== 19762
+0YDRgw== 19763
+dmVoaWNsZQ== 19764
+U3RhdGlvbg== 19765
+IGhvbGVz 19766
+IGR1cmNo 19767
+Lk1lZGlh 19768
+IENOTg== 19769
+aW5uaW5n 19770
+IFBlbm5zeWx2YW5pYQ== 19771
+IGVtb3Rpb24= 19772
+U2VjcmV0 19773
+w6FyaW8= 19774
+IFJhdGU= 19775
+RGVwdGg= 19776
+IG1vZGVz 19777
+KGlkeA== 19778
+IGhlcw== 19779
+IGdyZXk= 19780
+U3RhbmRhcmQ= 19781
+UXVlc3Q= 19782
+YnV5 19783
+c3Vy 19784
+IFRyYWNr 19785
+b21t 19786
+Lmds 19787
+IChc 19788
+dHdv 19789
+X0lP 19790
+b3NleA== 19791
+X3JvbGU= 19792
+56S6 19793
+cm91dGVz 19794
+U2hvcA== 19795
+IEFTQw== 19796
+IG1lbWNweQ== 19797
+ZGlyZWN0 19798
+ICoKCg== 19799
+IEJN 19800
+IFBvcg== 19801
+X2hpc3Rvcnk= 19802
+IFJlc3BvbnNlRW50aXR5 19803
+LnNldEZvbnQ= 19804
+IGVuZ2FnZW1lbnQ= 19805
+LGg= 19806
+IFdvcmRQcmVzcw== 19807
+ZmVjaGE= 19808
+IGVudHJhbmNl 19809
+RGVzcGl0ZQ== 19810
+SURFTlQ= 19811
+IHNhbml0 19812
+IEdlbmVyYXRl 19813
+KCIiLA== 19814
+X3ZpZGVv 19815
+U3RyYXRlZ3k= 19816
+X29r 19817
+IHRpZXM= 19818
+IGxvZ2ljYWw= 19819
+IEJyb24= 19820
+KEZpbGU= 19821
+IE1vaA== 19822
+LlNwbGl0 19823
+LlRyeQ== 19824
+IEhpbmQ= 19825
+IHNjb3Jpbmc= 19826
+IGFwcHJvYWNoZXM= 19827
+IGZsb3Vy 19828
+VlJU 19829
+VVNUT00= 19830
+c2NyaXB0cw== 19831
+IEVwaXNvZGU= 19832
+IEFtYg== 19833
+X09S 19834
+IGZyYXVlbg== 19835
+IHVubGlrZQ== 19836
+IHJpZGluZw== 19837
+IHBpdA== 19838
+IHRyYW5zZg== 19839
+YXJ0ZQ== 19840
+4LmJ 19841
+cmFwZQ== 19842
+cmV0dmFs 19843
+X2FmdGVy 19844
+Ijw8 19845
+IEJlcmxpbg== 19846
+IHRpc3N1ZQ== 19847
+LkludGVudA== 19848
+INC00LvRjw== 19849
+IHN0dW5uaW5n 19850
+IEhhbA== 19851
+LkludGVnZXI= 19852
+IHdoZXJlYXM= 19853
+IGRlbGVn 19854
+IHVzZXJOYW1l 19855
+IGZvcm1hdHM= 19856
+IGNvbXBlbnNhdGlvbg== 19857
+IEh1bQ== 19858
+YXJyaW5n 19859
+IHVuc2FmZQ== 19860
+UGlu 19861
+Y2x1Yg== 19862
+a2V5d29yZA== 19863
+X3RoZW1l 19864
+IGNhbGxlcg== 19865
+IGdob3N0 19866
+IGVudGl0bGVk 19867
+IE1hcw== 19868
+IGRlbW9uc3RyYXRl 19869
+IEhvd2FyZA== 19870
+RHJvcA== 19871
+I3VuZGVm 19872
+IGludm9rZQ== 19873
+IEJyaWRnZQ== 19874
+ZW5kZW4= 19875
+aWJsaW5n 19876
+U2xvdA== 19877
+QVRBQkFTRQ== 19878
+IHRlbXBlcmF0dXJlcw== 19879
+c2VyaWVz 19880
+IFJlbWVtYmVy 19881
+Q2FsZW5kYXI= 19882
+QkY= 19883
+PT8= 19884
+IEFG 19885
+KGh0dHA= 19886
+bWFrZXJz 19887
+ZmluaXR5 19888
+cHJlY2F0ZWQ= 19889
+V0g= 19890
+b2xpZGF5cw== 19891
+LXVu 19892
+aWFsZQ== 19893
+XFVzZXI= 19894
+cmVhc29u 19895
+JywKCg== 19896
+T1dFUg== 19897
+IHByZWRpY3Rpb25z 19898
+cHJvYg== 19899
+Lm5u 19900
+ICc7Cg== 19901
+LkZyb21Bcmdi 19902
+X0xPTkc= 19903
+IHRyb3Vi 19904
+IHVuaXR0ZXN0 19905
+ZWxpaG9vZA== 19906
+CWlz 19907
+IGNvbnNlYw== 19908
+TEVBU0U= 19909
+IGNsaWNrZWQ= 19910
+IHRlbXBsYXRlcw== 19911
+Qlk= 19912
+cGVybQ== 19913
+bWF0Y2hlcw== 19914
+bGF3 19915
+KHRm 19916
+X3JhdGlv 19917
+aXRlbXB0eQ== 19918
+IGNyZWF0b3I= 19919
+Qml0cw== 19920
+RW5jb2Rlcg== 19921
+Ki4= 19922
+IFVJVA== 19923
+IE1hc2s= 19924
+Y3VybA== 19925
+LWdv 19926
+IE9jYw== 19927
+Y29ycmVjdA== 19928
+IEdlcg== 19929
+KGxheW91dA== 19930
+dW5jdA== 19931
+LmRpc3BhdGNo 19932
+O2FtcA== 19933
+LmlzUmVxdWlyZWQ= 19934
+CWRv 19935
+bWly 19936
+IHB0aHJlYWQ= 19937
+LWF1dG8= 19938
+IEljZQ== 19939
+IHZpb2xhdGlvbg== 19940
+IGNvbmNsdWRlZA== 19941
+IHZhcnM= 19942
+Y2FudmFz 19943
+IFRlbXA= 19944
+IFBoaWxpcHA= 19945
+iOuLpA== 19946
+Y3JlYXNl 19947
+IGZpc2hpbmc= 19948
+YWJiaXQ= 19949
+IGNvbmNlbnRyYXRpb24= 19950
+aXJ0aGRheQ== 19951
+IGdyb3Nz 19952
+IGtp 19953
+IEhhbmRsZXI= 19954
+IGltbWlncmFudHM= 19955
+6IA= 19956
+VW5k 19957
+cG4= 19958
+cmFj 19959
+IENvbnN1bHQ= 19960
+Zm9sZA== 19961
+IHN0cnVnZ2xpbmc= 19962
+aGVhdA== 19963
+R2VuZXJpYw== 19964
+IHJpZGlj 19965
+IENPVklE 19966
+b21pdGVtcHR5 19967
+X09QVElPTg== 19968
+6rCA 19969
+IGNyZWF0dXJlcw== 19970
+X1BBR0U= 19971
+ZWk= 19972
+KGhvc3Q= 19973
+X0hQUA== 19974
+IFhYWA== 19975
+IGF3aw== 19976
+YXNjYWRl 19977
+IHByZWc= 19978
+cHJvdmlkZXI= 19979
+UGFs 19980
+ZWdlbg== 19981
+Y2xvbmU= 19982
+LlJlZ2lzdGVy 19983
+IGF0dGFjaG1lbnQ= 19984
+YmVpdA== 19985
+dGhlbGVzcw== 19986
+KERhdGU= 19987
+IEZvcmVzdA== 19988
+Q0dSZWN0 19989
+IGNoaWxkaG9vZA== 19990
+YW1pbmU= 19991
+YXhlcw== 19992
+J109 19993
+TmF2aWdhdG9y 19994
+IHJlcGxpZWQ= 19995
+X2ludg== 19996
+LFQ= 19997
+IEZlYXR1cmU= 19998
+ey0= 19999
+TEFORw== 20000
+IGNvbnZleQ== 20001
+55So5oi3 20002
+IFNlcmlm 20003
+IEF1cw== 20004
+bGljaGU= 20005
+IHVudXNlZA== 20006
+IG1vbnQ= 20007
+bm9kZXM= 20008
+IHNldQ== 20009
+LmNsYXNzTmFtZQ== 20010
+bm9ybQ== 20011
+X1NFUlZFUg== 20012
+IHdpbmc= 20013
+aW54 20014
+UmF3 20015
+IEphbQ== 20016
+IGluc2lnaHQ= 20017
+IE5H 20018
+IEludGVyZmFjZQ== 20019
+IHN0bXQ= 20020
+IG5hbg== 20021
+Y3VsYXRvcg== 20022
+LWFwcA== 20023
+KEJ1bmRsZQ== 20024
+TWVzc2FnZUJveA== 20025
+4K4= 20026
+IG1lZXRz 20027
+dWJ5 20028
+T3B0aW9uUGFuZQ== 20029
+aXRhcmlhbg== 20030
+IGNvbGxhYm9yYXRpb24= 20031
+bW92aWU= 20032
+IGFybW9y 20033
+X2JpdHM= 20034
+IEhhdmluZw== 20035
+IG51ZGU= 20036
+IFNldHRpbmc= 20037
+IHN1Y2M= 20038
+RGVsYXk= 20039
+LmNvbXBvbmVudHM= 20040
+YWNodXNldA== 20041
+IEFsZXhhbmRlcg== 20042
+wqk= 20043
+IG1ldGVycw== 20044
+IHByZXBhcmluZw== 20045
+IGluY2VudA== 20046
+5ZM= 20047
+IGvDtm5uZW4= 20048
+IENvbnNlcnY= 20049
+IG51bWVybw== 20050
+YWNodXNldHRz 20051
+LWludA== 20052
+IGVtcGhhcw== 20053
+bGF5b3V0cw== 20054
+RXhjZWw= 20055
+SUJBY3Rpb24= 20056
+IHJlc2lkZW50aWFs 20057
+ZWxpbmc= 20058
+IE5D 20059
+IEFsbGVu 20060
+IGNldHRl 20061
+IG1pbmRz 20062
+LnJlcXVpcmVk 20063
+2LM= 20064
+IEdpcmxz 20065
+IH07 20066
+IHN0cmluZ1dpdGhGb3JtYXQ= 20067
+IGFkZHJlc3NlZA== 20068
+dGhleQ== 20069
+IEJsb29k 20070
+cG9zZXI= 20071
+IGphbQ== 20072
+yJk= 20073
+5pWw5o2u 20074
+IHN0ZG91dA== 20075
+IFVURg== 20076
+Q2xhc3Nlcw== 20077
+PiI7DQo= 20078
+IFNhdg== 20079
+LkJvbGQ= 20080
+IGVuYWJsZXM= 20081
+CXRtcA== 20082
+IG1hbnVhbGx5 20083
+IFNxdQ== 20084
+dXNlcmlk 20085
+LmZ1bmN0aW9u 20086
+LmNhY2hl 20087
+TE9QVA== 20088
+LlNlcnZpY2Vz 20089
+ZGRpdA== 20090
+dGlt 20091
+PGltZw== 20092
+IFRoaW5ncw== 20093
+IEV2ZXJ5dGhpbmc= 20094
+IGFwdA== 20095
+ZW1hbmQ= 20096
+IHJvbGxpbmc= 20097
+66Y= 20098
+LmxldmVs 20099
+IHN0b20= 20100
+IFdpbnRlcg== 20101
+IHZpZXdpbmc= 20102
+KHZhbHVlcw== 20103
+b2NvbXBsZXRl 20104
+dmlh 20105
+dXBv 20106
+IGFib3J0aW9u 20107
+acOocmU= 20108
+77yR 20109
+X0JVVFRPTg== 20110
+X2RvbWFpbg== 20111
+IGJyYQ== 20112
+IEFzdA== 20113
+aW5hcw== 20114
+IHN0YXRpc3Q= 20115
+Y29k 20116
+TFI= 20117
+IGRyaXZlcw== 20118
+IGZvbGxvd2Vycw== 20119
+IGFsbGllcw== 20120
+CWN1cnJlbnQ= 20121
+ZWNlc3Nhcnk= 20122
+IGRhbWFnZWQ= 20123
+X3B0 20124
+YW5kbGVz 20125
+b3VudHJpZXM= 20126
+IHNpbXVsdA== 20127
+ZXU= 20128
+IGNvbnRyb3ZlcnNpYWw= 20129
+X0dST1VQ 20130
+IHJpYg== 20131
+LkluZm8= 20132
+Om1t 20133
+Lm5vcm1hbA== 20134
+X0FERFJFU1M= 20135
+IO2V 20136
+YWRkbGU= 20137
+IER1cg== 20138
+LkVsZW1lbnQ= 20139
+V2FybmluZ3M= 20140
+IGNyZWRpdHM= 20141
+IGluaGli 20142
+IGVtaXNzaW9ucw== 20143
+IGhheg== 20144
+LnlvdXR1YmU= 20145
+dWdnZWQ= 20146
+IGJvdGhlcg== 20147
+IEthbnNhcw== 20148
+IEZpeGVk 20149
+IFRlc3Rz 20150
+IEZJWA== 20151
+VW5pZm9ybQ== 20152
+IGtvbnQ= 20153
+Pj4+ 20154
+c3RhdGlvbg== 20155
+bG9yZQ== 20156
+YXR5cGU= 20157
+aXNob3A= 20158
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio= 20159
+Q29tYm9Cb3g= 20160
+IHZhY2F0aW9u 20161
+IGluaXRpYXRpdmU= 20162
+IGRlZmF1bHRWYWx1ZQ== 20163
+Y29uY2F0 20164
+IEto 20165
+IFdlbGNvbWU= 20166
+aXplZE5hbWU= 20167
+TWlncmF0aW9u 20168
+IGdyYWRpZW50 20169
+SG90 20170
+IGhhcmRseQ== 20171
+ZWxv 20172
+IFN0dWRlbnRz 20173
+IGxvb3Nl 20174
+YXR6 20175
+LlNlbmQ= 20176
+Jy8= 20177
+IHVuaXZlcnNhbA== 20178
+IGVudGVycHJpc2U= 20179
+IHJlZ2V4 20180
+IHZpc2l0b3I= 20181
+IEZseQ== 20182
+U2Vx 20183
+4LiZ 20184
+IFZpc3VhbA== 20185
+IGxpYnJhcmllcw== 20186
+YXRvZXM= 20187
+UGF5bWVudA== 20188
+IHBlbnQ= 20189
+IGdhdGhlcmVk 20190
+VlJUWA== 20191
+IERN 20192
+U3BsaXQ= 20193
+IGxldHRpbmc= 20194
+0J0= 20195
+X2Vycm9ycw== 20196
+ZXBvY2g= 20197
+UEFSQU0= 20198
+Y3U= 20199
+0YHRgtCy 20200
+b2x1dGlvbnM= 20201
+RWRpdGluZw== 20202
+Zm9udHM= 20203
+IGFsbG9jYXRlZA== 20204
+IEJhc2Vk 20205
+KFk= 20206
+IEp1ZGdl 20207
+IGJyb3RoZXJz 20208
+RklMRVM= 20209
+w6dv 20210
+d2I= 20211
+X1BJ 20212
+J14= 20213
+IHN3b3Jk 20214
+LnNlcnZpY2Vz 20215
+IG5s 20216
+VGlt 20217
+aWdn 20218
+IE1vb3Jl 20219
+IGNyeXB0b2M= 20220
+5Ye6 20221
+X3Bvc3Rz 20222
+b3RhdGU= 20223
+Pyc= 20224
+Li4uLgoK 20225
+IGts 20226
+PSIk 20227
+IGRlY29yYXRpb24= 20228
+4bqh 20229
+IERJUkVDVA== 20230
+R1VJ 20231
+KT0+ewo= 20232
+IG5ld3NsZXR0ZXI= 20233
+IHByZWNpcw== 20234
+KHBvaW50 20235
+IEVxdWlwbWVudA== 20236
+dXR5 20237
+IERhdmU= 20238
+IHBhcnRpY2lwYXRpb24= 20239
+dWFyaW9z 20240
+eGl0 20241
+LkFz 20242
+RVRFUg== 20243
+b3JvdXM= 20244
+IHNoaWVsZA== 20245
+W10+ 20246
+aWxpdGFyeQ== 20247
+Lm9yaWdpbg== 20248
+IHByb21vdGlvbg== 20249
+VW50 20250
+IGN0 20251
+VFJB 20252
+Vmlld0hvbGRlcg== 20253
+IHNpZ21h 20254
+ZGVsdGE= 20255
+YXJlaG91c2U= 20256
+Y29udHJhY3Q= 20257
+KFZlY3Rvcg== 20258
+IGNvbXBldGU= 20259
+L2Zvcm0= 20260
+L2NvbXBvbmVudHM= 20261
+IG5y 20262
+IEluZG9uZXM= 20263
+INC+0YI= 20264
+IFZvbHVtZQ== 20265
+LmZpbGVz 20266
+KHJlc3A= 20267
+L21vZGVscw== 20268
+IHN1cmY= 20269
+c3RhbmRhcmQ= 20270
+L28= 20271
+IFhDVEFzc2VydA== 20272
+VklDRVM= 20273
+LkNvZGU= 20274
+U0VE 20275
+IGFjdGl2YXRl 20276
+RGVsdGE= 20277
+IGxpbWl0YXRpb24= 20278
+cmlq 20279
+IHByZWduYW50 20280
+Ol4o 20281
+IHNvdXI= 20282
+cGll 20283
+IGV4cGVuc2U= 20284
+aWNhdGlvbg== 20285
+IExhcmdl 20286
+IMKx 20287
+IEJvd2w= 20288
+KG1vZGVscw== 20289
+L04= 20290
+UGE= 20291
+LnJlbG9hZA== 20292
+IHdvbmRlcmluZw== 20293
+RXhlY3V0aW9u 20294
+CSAgICAgIA== 20295
+IEdyYXBoaWNz 20296
+IENvbnRpbg== 20297
+X2pvYg== 20298
+IGdldE5hbWU= 20299
+IE1hZ24= 20300
+IERXT1JE 20301
+bWFk 20302
+IG5o 20303
+ZmVhdHVyZXM= 20304
+fSIpOwo= 20305
+aGVldHM= 20306
+KHRyYWlu 20307
+em4= 20308
+IHJlY3J1aXQ= 20309
+LmNvbm5lY3Rpb24= 20310
+IGJhcnJlbA== 20311
+IHN0ZWFt 20312
+X3NldHRpbmc= 20313
+IGFuZ3VsYXI= 20314
+YW5lb3VzbHk= 20315
+IGJpbA== 20316
+IE5vcm0= 20317
+KCEk 20318
+aWJ0 20319
+JSg= 20320
+IHBvc2l0 20321
+IEZhdGhlcg== 20322
+aW50ZW5kbw== 20323
+TGl2ZQ== 20324
+IHBvcnRz 20325
+IG1lag== 20326
+IGxhbmRpbmc= 20327
+cG9uZGVy 20328
+IGNvZA== 20329
+X0hFQURFUg== 20330
+Lk1hcmdpbg== 20331
+IGJhbGxz 20332
+IGRpc2N1c3Npb25z 20333
+IGJsZW5k 20334
+SGV4 20335
+IGZhcm1lcnM= 20336
+IG1haW50YWluaW5n 20337
+ICAgDQo= 20338
+c3lu 20339
+W1Q= 20340
+cnVz 20341
+dWZmZXJz 20342
+IGNvbnRyaWJ1dG9ycw== 20343
+X3N5cw== 20344
+LkRlYnVn 20345
+IGNvbnN0cnVjdGVk 20346
+b21lcw== 20347
+P2lk 20348
+c2xpZGVy 20349
+IHN1cHBsaWVycw== 20350
+c2NyaWJlcg== 20351
+cGVz 20352
+0J4= 20353
+IjoNCg== 20354
+XENvbnRyb2xsZXI= 20355
+KSkKCgo= 20356
+IGx1YQ== 20357
+TXVsdGk= 20358
+RU5T 20359
+U3Jj 20360
+IHBldGl0aW9u 20361
+IHNsYXZl 20362
+bG9va2luZw== 20363
+VkVSVA== 20364
+CXZlY3Rvcg== 20365
+U3BlY2lhbA== 20366
+aGg= 20367
+YW5uZQ== 20368
+IE5pZ2Vy 20369
+L3ZpZXdz 20370
+emluZw== 20371
+ZW5kYW50 20372
+PEM= 20373
+c3BlZWQ= 20374
+IHt9OwoK 20375
+QmVnaW5Jbml0 20376
+IGZvcGVu 20377
+QFJlcXVlc3RNYXBwaW5n 20378
+RW5kSW5pdA== 20379
+IHB1bmNo 20380
+U2VuZGVy 20381
+6ZQ= 20382
+Z2V0TWVzc2FnZQ== 20383
+L3R5cGVz 20384
+LlBJ 20385
+KCcnKTsK 20386
+b2N1c2Vk 20387
+KGFsbA== 20388
+IGRyb3Bkb3du 20389
+KS5fXw== 20390
+IFZpbg== 20391
+LkZvcmVpZ25LZXk= 20392
+Y2FuZg== 20393
+b3VyZWQ= 20394
+IE9yZ2FuaXphdGlvbg== 20395
+INCw 20396
+IEN1bHR1cmU= 20397
+KGNscw== 20398
+LF8= 20399
+cmdiYQ== 20400
+7J2Y 20401
+LmRhdGFHcmlkVmlldw== 20402
+IGRvemVu 20403
+IEdlcw== 20404
+X3NoYXJlZA== 20405
+bmljaw== 20406
+IGhvc3A= 20407
+b21ldGVy 20408
+IGNsYWltaW5n 20409
+aWJsZXM= 20410
+cmlr 20411
+5piv 20412
+ZW5hcmlv 20413
+IGRlbmdhbg== 20414
+b2Ji 20415
+bW9udA== 20416
+X3Jhbms= 20417
+KCcvJyw= 20418
+IGFwb2xvZw== 20419
+UHM= 20420
+X3Bvd2Vy 20421
+IEdyZWU= 20422
+IGZ1bGZpbGw= 20423
+IGZpcmViYXNl 20424
+IGZhcmU= 20425
+IEhpbQ== 20426
+IGJlYW4= 20427
+4oCmLg== 20428
+IFNQSQ== 20429
+X1JY 20430
+IHBlcmNlcHRpb24= 20431
+cmVsYXRpdmU= 20432
+Y29tcGlsZQ== 20433
+dXVt 20434
+dXRvcw== 20435
+YXVj 20436
+IEFzaw== 20437
+IGluZGljYXRvcg== 20438
+L3Ro 20439
+LnNldFN0cmluZw== 20440
+IFdpc2NvbnNpbg== 20441
+LkRvbWFpbg== 20442
+IGFydGlmaWNpYWw= 20443
+RGV2ZWxvcA== 20444
+IFNhcmFo 20445
+IGx5aW5n 20446
+KHNlYXJjaA== 20447
+IEVtcGlyZQ== 20448
+dXJyaW5n 20449
+5pe26Ze0 20450
+PSIkew== 20451
+IGdldElk 20452
+IFBheW1lbnQ= 20453
+dHJhbnNpdGlvbg== 20454
+IF0u 20455
+aXhpbg== 20456
+VlQ= 20457
+LXNlbGVjdA== 20458
+IGRlbW9uc3RyYXRlZA== 20459
+IGxhc3ROYW1l 20460
+ZW1wbG95bWVudA== 20461
+LmdldFByb3BlcnR5 20462
+IGZvdWdodA== 20463
+ZmlsZU5hbWU= 20464
+IFBlcnM= 20465
+LWNhcmQ= 20466
+YXN0cg== 20467
+YXR0cnM= 20468
+IHByb21pbmVudA== 20469
+RGVzaWdu 20470
+YW5jb3V2ZXI= 20471
+44GX44E= 20472
+YXJkbw== 20473
+c2VjcmV0 20474
+IHJhZw== 20475
+IHBvaXNvbg== 20476
+LW1hbg== 20477
+LG9taXRlbXB0eQ== 20478
+CXVu 20479
+aXR6ZXI= 20480
+IENhc2lubw== 20481
+IFJvc3M= 20482
+LWZvb3Q= 20483
+KHJlc3VsdHM= 20484
+UGxhbg== 20485
+IGxhc2Vy 20486
+6riw 20487
+X0RS 20488
+RmFjZWJvb2s= 20489
+IGJvYXJkcw== 20490
+c3Rh 20491
+XV0s 20492
+IHRpbGVz 20493
+U0laRQ== 20494
+ID1+ 20495
+IHByZW1pZXI= 20496
+b2NhYg== 20497
+IGVuY29kZWQ= 20498
+IHJlc2VydmU= 20499
+IEFmZ2hhbmlzdGFu 20500
+IExpc3ROb2Rl 20501
+dXJscw== 20502
+IHN1Ym1pc3Npb24= 20503
+IG5ldQ== 20504
+ICMrIw== 20505
+X1BPU1Q= 20506
+IG1vaXN0 20507
+ZWxsaQ== 20508
+ZWxsaWdlbnQ= 20509
+LmFsZXJ0 20510
+w7Nk 20511
+YnJl 20512
+IENvbGxlY3Q= 20513
+IGdyYXBoaWM= 20514
+IGxvbmdpdHVkZQ== 20515
+IFByb3ZpZA== 20516
+IENhbGN1bGF0ZQ== 20517
+eGZmZmY= 20518
+Y3JpdGVyaWE= 20519
+IHdhdGVycw== 20520
+cm9jaw== 20521
+bG9xdWVudA== 20522
+IFRyaWI= 20523
+IGJ1cnN0 20524
+IHN1ZmZpeA== 20525
+LkV4dGVuc2lvbnM= 20526
+aXNoZXM= 20527
+aXZlbA== 20528
+IExJS0U= 20529
+IEdldHR5 20530
+LkFjdGlvbkV2ZW50 20531
+LnNsZg== 20532
+IEhBTA== 20533
+dXBhbA== 20534
+RUFS 20535
+dWRp 20536
+X3RpbWVvdXQ= 20537
+VUY= 20538
+IFNpbmdhcG9yZQ== 20539
+IEFkdmVudA== 20540
+X2ludGVydmFs 20541
+Y2hhZnQ= 20542
+IEVtZXI= 20543
+IHRlbGVwaG9uZQ== 20544
+IFR1cms= 20545
+X2ludGVyZmFjZQ== 20546
+IE93bg== 20547
+IGVuY291cmFnZWQ= 20548
+PE9iamVjdA== 20549
+X1RleHQ= 20550
+IE9udGFyaW8= 20551
+IEFwcGx5 20552
+LmZpcmViYXNl 20553
+IGFudGli 20554
+UHJpb3JpdHk= 20555
+ZW5leg== 20556
+RGF5cw== 20557
+Y2lk 20558
+dXJyZW5jZQ== 20559
+Oy8= 20560
+aW5uZWQ= 20561
+0YHRjw== 20562
+IHZleg== 20563
+Znc= 20564
+Ly8k 20565
+YXR0YWNr 20566
+IHN0YXJ0dXA= 20567
+YWluZXJz 20568
+LmZyYWdtZW50 20569
+b3BhY2l0eQ== 20570
+KGNvbm4= 20571
+aGVpbQ== 20572
+Lm5ldHdvcms= 20573
+KHN0cmVhbQ== 20574
+IE5PTg== 20575
+dG9s 20576
+IFhib3g= 20577
+IERT 20578
+IGNhY2hlZA== 20579
+IHByb3N0aXR1dGFz 20580
+IEJhbHQ= 20581
+KCdb 20582
+IG5vZXhjZXB0 20583
+Iic= 20584
+IHNk 20585
+LnZhbGlk 20586
+X2Fn 20587
+IHJhY2Vz 20588
+IHJvZA== 20589
+aXR1ZGVz 20590
+PD4o 20591
+LlByb2R1Y3Q= 20592
+Rm9ybXM= 20593
+TkVX 20594
+UGF5 20595
+CWJvb2xlYW4= 20596
+X2NvbnRhY3Q= 20597
+IEVsZWN0cmlj 20598
+c2tpcA== 20599
+IHd1cg== 20600
+IGNocm9uaWM= 20601
+X2RyaXZlcg== 20602
+IFNhYg== 20603
+IFVsdA== 20604
+IFJhZA== 20605
+U1RBVFVT 20606
+IExld2lz 20607
+T0I= 20608
+IGdpZnRz 20609
+LlJlYw== 20610
+VFJVRQ== 20611
+IGludGVuc2l0eQ== 20612
+TWFya2Vy 20613
+LmNvbXBhcmU= 20614
+ZmZpYw== 20615
+Q29va2ll 20616
+IEJhYnk= 20617
+IEJpZ0RlY2ltYWw= 20618
+aWxldA== 20619
+IEhPTERFUlM= 20620
+IExhZHk= 20621
+IGx1bmc= 20622
+IEFsYWJhbWE= 20623
+IGRlc3M= 20624
+YCk7Cg== 20625
+IEJ1aWxkZXI= 20626
+X3JlZ2lvbg== 20627
+IG5ldXRyYWw= 20628
+Qm90aA== 20629
+IGhw 20630
+IGhvcm4= 20631
+IHNlZ21lbnRz 20632
+IEVD 20633
+Ij0+Ig== 20634
+KHJlYw== 20635
+IFBp 20636
+R00= 20637
+IGxhcHRvcA== 20638
+U2NhbGFy 20639
+aXNk 20640
+LWRpYWxvZw== 20641
+IEFuZGVyc29u 20642
+IG1pc3Rha2Vz 20643
+IEhhbg== 20644
+amVz 20645
+ZXN0aW5hdGlvbg== 20646
+IHByb21pc2Vz 20647
+Ymlk 20648
+IFNjaWVudA== 20649
+R0lO 20650
+IFBlcmZvcm1hbmNl 20651
+YmFnZQ== 20652
+LnVzZXJz 20653
+bGVhZGluZw== 20654
+IG9yYWw= 20655
+R3JhcGhpY3M= 20656
+X1BUUg== 20657
+aGFuZw== 20658
+IGluZXY= 20659
+cHJvY2Vzc2luZw== 20660
+RmFjdG9y 20661
+IE5B 20662
+JHN0cmluZw== 20663
+IGdyb3VuZHM= 20664
+LlNhdmVDaGFuZ2Vz 20665
+Y2xvY2s= 20666
+Y3JpcGNpb24= 20667
+IE5ld3Rvbg== 20668
+Z2M= 20669
+LmluY2x1ZGVz 20670
+IGJsYXN0 20671
+ICctJw== 20672
+IHB1ZWRl 20673
+LlNlc3Npb24= 20674
+IGdyZXA= 20675
+X2ZpbmFs 20676
+IEdheQ== 20677
+IEdpdmU= 20678
+aXJp 20679
+LXN0YXI= 20680
+IFVJSW1hZ2U= 20681
+X2Vwb2No 20682
+dWJi 20683
+ZW50aA== 20684
+IGVsaXRl 20685
+IGNhbXBhaWducw== 20686
+IFBvcm5v 20687
+X2Fzc2lnbg== 20688
+UHJvdG9jb2w= 20689
+IEJlaW5n 20690
+IEFpcnBvcnQ= 20691
+IGNvbnZlbnRpb25hbA== 20692
+IFdhdA== 20693
+IENJ 20694
+RVRB 20695
+IEFudGhvbnk= 20696
+IHRhYmxldA== 20697
+KGZvcm1hdA== 20698
+IGNvbnNpc3RlbnRseQ== 20699
+IElvd2E= 20700
+IGF2YXRhcg== 20701
+LmN1cnNvcg== 20702
+IVs= 20703
+IGhhbmdpbmc= 20704
+SGVy 20705
+U3VjaA== 20706
+JzsKCgo= 20707
+b3JnZW91cw== 20708
+KCk9PQ== 20709
+IHZpZXdNb2RlbA== 20710
+IOOD 20711
+IGVscw== 20712
+IEFnZW50 20713
+RmV0Y2g= 20714
+YXBvcg== 20715
+IGN4 20716
+cHJlYWQ= 20717
+IFBpZXI= 20718
+b2VmZg== 20719
+U24= 20720
+IFZpcnR1YWw= 20721
+QXBy 20722
+LldoaXRl 20723
+X01PRA== 20724
+IFBvaW50cw== 20725
+5aSx 20726
+IGdlbmVz 20727
+IHZlbmRvcg== 20728
+IG1haW5zdHJlYW0= 20729
+PHNyYw== 20730
+IEVsaXphYmV0aA== 20731
+RGVjb2Rlcg== 20732
+LXN0YXRl 20733
+IEdsYXNz 20734
+bmN5 20735
+YWRpYW5z 20736
+X21vbg== 20737
+IFJlbW90ZQ== 20738
+IHdpcmVsZXNz 20739
+IE1p 20740
+5Yk= 20741
+6KGo 20742
+c3RhZ2U= 20743
+IFRpbGU= 20744
+bGxpYg== 20745
+VmFyaWFudA== 20746
+PT0K 20747
+IGdvbGRlbg== 20748
+KFFTdHJpbmc= 20749
+LnB1dEV4dHJh 20750
+IERvbQ== 20751
+IEFuaW1hdGlvbg== 20752
+IGludGVyYWN0aXZl 20753
+aWZhY3Q= 20754
+6Zmk 20755
+TEVU 20756
+IGZyZXF1ZW50 20757
+IDw+Cg== 20758
+RmlsZW5hbWU= 20759
+IHNuZQ== 20760
+IEZvb3RiYWxs 20761
+IHJpdmFs 20762
+IGRpc2FzdGVy 20763
+aW9uaWM= 20764
+IERhbWFnZQ== 20765
+LlJlc291cmNl 20766
+LWVu 20767
+IFR5cGVz 20768
+Z2V0U3RyaW5n 20769
+KGJvYXJk 20770
+IGJvbA== 20771
+cGxhaW4= 20772
+enlt 20773
+4Liy 20774
+IHNjYW5uZXI= 20775
+aWxkZXI= 20776
+X21zZ3M= 20777
+5o8= 20778
+KGludGVudA== 20779
+IGRlc3RydWN0 20780
+IGJ1c3Q= 20781
+IEVtcGxveQ== 20782
+b25p 20783
+IFVJVmlld0NvbnRyb2xsZXI= 20784
+IG9kZHM= 20785
+ZWFyZXI= 20786
+R2VvbWV0cnk= 20787
+IHlpaQ== 20788
+X0VYUE9SVA== 20789
+IEF0dGFjaw== 20790
+IG5pZXQ= 20791
+IGltcHJlc3Npb24= 20792
+IEdpbA== 20793
+X3Byb2I= 20794
+IENG 20795
+IEV4cGVyaWVuY2U= 20796
+L3BsdWdpbnM= 20797
+Lk1ldGhvZA== 20798
+IGJlbGllZnM= 20799
+TmF0aXZl 20800
+X2J1aWxk 20801
+IHZpZw== 20802
+IHJhbmtz 20803
+Y292ZXJlZA== 20804
+c3VjaA== 20805
+R3VhcmQ= 20806
+LnBhY2s= 20807
+YWRkZXI= 20808
+aXZpYQ== 20809
+bG5n 20810
+INCy0Ys= 20811
+VGltZXN0YW1w 20812
+X25vdw== 20813
+IHBva2Vy 20814
+IHVuYw== 20815
+IHNoYXBlcw== 20816
+LXR5cGVz 20817
+X3BlcmlvZA== 20818
+cGs= 20819
+IHZldGVyYW4= 20820
+IHNvbm8= 20821
+IGFwcG9pbnRlZA== 20822
+b3ZlcmZsb3c= 20823
+LmRyaXZlcg== 20824
+X2NhdA== 20825
+dXR0 20826
+cGxhbnQ= 20827
+aW1i 20828
+IEFjY2VwdA== 20829
+IGNvbmNlcnQ= 20830
+CW5vZGU= 20831
+CXo= 20832
+Pz4NCg== 20833
+IGJhbm5lZA== 20834
+CSAgICAgICAgICAgICAgIA== 20835
+IHRveGlj 20836
+IGRpc2FwcGU= 20837
+yJs= 20838
+IGdyYWNl 20839
+YXRlZnVs 20840
+UmVwbHk= 20841
+IENydXo= 20842
+IHNjcmFw 20843
+IGtleXdvcmRz 20844
+c2ltcA== 20845
+IG1vcnRnYWdl 20846
+IGN5YmVy 20847
+IEV4ZWN1dGU= 20848
+IGxhdGl0dWRl 20849
+aWZ1 20850
+LkNPTQ== 20851
+ZGJv 20852
+IHNvcnRz 20853
+IEdhcw== 20854
+b21pYWw= 20855
+LkxvY2Fs 20856
+Q2VsbHM= 20857
+LlJlcGxhY2U= 20858
+U3RyaW5ncw== 20859
+LmZpdA== 20860
+IFRoaXJk 20861
+JSIsCg== 20862
+IHt9Ii4= 20863
+IFNvbnk= 20864
+IFs6 20865
+IGZhbGxlbg== 20866
+LicpCg== 20867
+aW5o 20868
+IE1D 20869
+IHJlZGlz 20870
+Q29kZXM= 20871
+IHByb2ZpbGVz 20872
+aG9vaw== 20873
+UmVkdWNlcg== 20874
+X0ZVTkM= 20875
+IG5hdmlnYXRl 20876
+c3RybGVu 20877
+IGhvcm0= 20878
+4Z4= 20879
+IFNS 20880
+LmJvb3Q= 20881
+IGRpZ2VzdA== 20882
+CWhlYWRlcg== 20883
+LmZpbmRPbmU= 20884
+5oE= 20885
+RGJUeXBl 20886
+bmlh 20887
+X21lcmdl 20888
+IGRvbm5l 20889
+L0dldHR5 20890
+X0NIQVI= 20891
+IGJhbmRz 20892
+LlVSTA== 20893
+YXJ0aWFs 20894
+IGZyZXE= 20895
+IHNpc3Q= 20896
+Tmc= 20897
+IHJlbmRlcmluZw== 20898
+XENvcmU= 20899
+V2lkZ2V0cw== 20900
+IFZB 20901
+IGFjdGl2aXN0cw== 20902
+U3Rl 20903
+PV8= 20904
+YWxsYQ== 20905
+U3RhbXA= 20906
+IGxvYWRz 20907
+IHh4 20908
+IExlYXJuaW5n 20909
+Lk12Yw== 20910
+dWly 20911
+KCIk 20912
+IGNvbm5lY3Rpbmc= 20913
+UmVhZE9ubHk= 20914
+dXJ1 20915
+IEVhZw== 20916
+QklU 20917
+X0RFTA== 20918
+5ac= 20919
+YXJyYXNz 20920
+ZXh0ZXJuYWw= 20921
+IFlPVVI= 20922
+IEJyZXc= 20923
+IEZpdmU= 20924
+IHJlc2l6ZQ== 20925
+aWdpZA== 20926
+ZXJhdGlvbg== 20927
+INGN 20928
+5Yqg 20929
+IENhdGNo 20930
+2YE= 20931
+IExlb24= 20932
+YW1pbA== 20933
+LkJvZHk= 20934
+Q2xpcA== 20935
+L2xpc3Q= 20936
+LmJy 20937
+RWRpdFRleHQ= 20938
+CWRi 20939
+LkdhbWU= 20940
+KEJ1aWxkQ29udGV4dA== 20941
+YmFja2VuZA== 20942
+LlJlZA== 20943
+ZmFjZWJvb2s= 20944
+LnVybHM= 20945
+bXI= 20946
+cm9sbGVk 20947
+LS0tLS0tLQ== 20948
+IGludGVydmVudGlvbg== 20949
+IHJldGlyZW1lbnQ= 20950
+IEtpdA== 20951
+IFBSRQ== 20952
+VXBwZXJDYXNl 20953
+IFNvY2tldA== 20954
+IDot 20955
+IHN0dWR5aW5n 20956
+IE1ldHJv 20957
+YXJkZWQ= 20958
+IGNvbnZlcnNhdGlvbnM= 20959
+Q2FsbGVk 20960
+IGV4YW1pbmU= 20961
+ZXJ0aWZpY2F0ZQ== 20962
+Lmd6 20963
+LXJlc3BvbnNpdmU= 20964
+IHJlZnVuZA== 20965
+X25ldHdvcms= 20966
+YWxsb3dlZA== 20967
+ZW1wdA== 20968
+IG1lYWxz 20969
+Q2F0ZWdvcmllcw== 20970
+IHRyYXZlbGluZw== 20971
+IGtn 20972
+IHNoYW1l 20973
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 20974
+IGV4cGxpY2l0bHk= 20975
+IG1hdGhlbWF0aWM= 20976
+IFN1aXRl 20977
+IFJHQg== 20978
+KioqKioqLw== 20979
+IG1peHR1cmU= 20980
+bGVhcm5pbmc= 20981
+LnRlbXBsYXRl 20982
+YXR0cw== 20983
+d3g= 20984
+CWN0eA== 20985
+LnByb3BlcnRpZXM= 20986
+IGRyaW5rcw== 20987
+IEVpdGhlcg== 20988
+c2V0VGV4dA== 20989
+LmdldERhdGE= 20990
+LnppcA== 20991
+IHJldmVhbHM= 20992
+PHRhYmxl 20993
+Lkhhc2hNYXA= 20994
+IEh1cg== 20995
+KSIpOwo= 20996
+LmZyYW1ld29yaw== 20997
+IFNUQVJU 20998
+ZmVlZGJhY2s= 20999
+IHNhZmVseQ== 21000
+Lmljb24= 21001
+Y29uZmlndXJl 21002
+LmxvY2s= 21003
+LmxheWVycw== 21004
+Lz4uCg== 21005
+IHJhbmtlZA== 21006
+X2ltcGw= 21007
+IEhhbmRsZXM= 21008
+IGhvc3RlZA== 21009
+IHVwZGF0aW5n 21010
+YWxidW0= 21011
+6Z0= 21012
+IHNoYWRlcg== 21013
+RWRpdG9ycw== 21014
+LXJvdW5k 21015
+W117 21016
+IHNlcA== 21017
+IEhp 21018
+VEVN 21019
+bG9va3Vw 21020
+Lm1hbg== 21021
+X0lOUFVU 21022
+IHRocmVhdGVuZWQ= 21023
+X0lNUE9SVA== 21024
+IGRyb3Bz 21025
+cnVpdA== 21026
+c2lk 21027
+Ym90aA== 21028
+IEV4Y2Vs 21029
+IGplcg== 21030
+b3JkaW5hcnk= 21031
+0LXQuQ== 21032
+VklFVw== 21033
+cmVwbHk= 21034
+ICk6Cg== 21035
+Y29sb3Jz 21036
+dmVyaWZpZWQ= 21037
+X1Ry 21038
+X3BhcnNl 21039
+IGNvbmdyZXNz 21040
+UHJvbWlzZQ== 21041
+aW50cw== 21042
+IE1vdGhlcg== 21043
+LkFwaQ== 21044
+IER1cmF0aW9u 21045
+IGZpcnN0TmFtZQ== 21046
+aW5oZXJpdGRvYw== 21047
+IE1hcnM= 21048
+IGFwcg== 21049
+T0RZ 21050
+IHZpc2l0cw== 21051
+IGhlYWxpbmc= 21052
+bGV0dGVycw== 21053
+KSkpOw0K 21054
+ZnV0dXJl 21055
+LkZyYW1ld29yaw== 21056
+IGtpc3M= 21057
+IGludm9sdmU= 21058
+IHNpbGVudA== 21059
+YWRvd3M= 21060
+IGFueWJvZHk= 21061
+c2No 21062
+IHNvbGVseQ== 21063
+LWltZw== 21064
+IHByb3ByaQ== 21065
+IGluc3RydWN0 21066
+IGxpY2Vuc2Vz 21067
+IG1ldGg= 21068
+IGNvbmRlbQ== 21069
+IERvbWFpbg== 21070
+IEhhcnJpcw== 21071
+IHPDpQ== 21072
+Q0VQVA== 21073
+QmF0Y2g= 21074
+QGV4dGVuZHM= 21075
+IENPTlRSSUJVVA== 21076
+LkRhdGFGcmFtZQ== 21077
+X3BhY2tldA== 21078
+cmVjaXNpb24= 21079
+IGZvY3VzaW5n 21080
+Lmh0 21081
+X18iOgo= 21082
+OkdldA== 21083
+IEtD 21084
+IHBhc3NhZ2U= 21085
+U2VnbWVudA== 21086
+X2NlbnRlcg== 21087
+LXpB 21088
+X0JM 21089
+IGNvbnZpbg== 21090
+IGNsYXNzaWZpZWQ= 21091
+IE5TTXV0YWJsZQ== 21092
+X2Fw 21093
+dGlsZQ== 21094
+UmVjdGFuZ2xl 21095
+KG51bXM= 21096
+dmVucw== 21097
+IFVJQnV0dG9u 21098
+IEZlZGVy 21099
+YW1v 21100
+IG91dGxpbmU= 21101
+IFBhcnNlcg== 21102
+IOKJ 21103
+IFdvcmtz 21104
+LlNjaGVtYQ== 21105
+IGVuZ2luZXM= 21106
+X2NvbW1vbg== 21107
+X29sZA== 21108
+IHNldENvbnRlbnRWaWV3 21109
+IC8vLzw= 21110
+IEJU 21111
+Zm0= 21112
+IGRpdmVycw== 21113
+X3dlaWdodHM= 21114
+ZW1hcms= 21115
+IEFDVA== 21116
+IHByb3BvcnRpb24= 21117
+b3ZlcmxheQ== 21118
+LmRpcm5hbWU= 21119
+IEdpdA== 21120
+X1JFRkVSRU5DRQ== 21121
+PD4= 21122
+bGI= 21123
+X3J1bGU= 21124
+6LSl 21125
+IFB1dGlu 21126
+IHNsZWVwaW5n 21127
+KCk6DQo= 21128
+IHByZXNlcnZl 21129
+IHBhcmxpYW1lbnQ= 21130
+IExvb2tpbmc= 21131
+IHBpY2tpbmc= 21132
+IERpc3BhdGNo 21133
+IHNsaXA= 21134
+65M= 21135
+IEx5bg== 21136
+X3NpZ25hbA== 21137
+Y29uZmlndXJhdGlvbg== 21138
+IFBpdHQ= 21139
+YWRlbg== 21140
+cHJvY2VkdXJl 21141
+IGVudGh1c2k= 21142
+ZmlnaHQ= 21143
+IENvbnNpZGVy 21144
+IHRvcm4= 21145
+Q29ubmVjdGVk 21146
+LmNvcw== 21147
+X2dyb3Vwcw== 21148
+IFRoaW5r 21149
+IGRlbGliZXI= 21150
+IHJlc2lk 21151
+d29ya2luZw== 21152
+LmNvbHVtbnM= 21153
+IENhbGxlZA== 21154
+IGVzbGludA== 21155
+PiIs 21156
+X0RPV04= 21157
+aGlzdA== 21158
+IEFkdmFuY2Vk 21159
+IHJld2FyZHM= 21160
+YWN0b3Jz 21161
+IHNpbGVuY2U= 21162
+IG15dGg= 21163
+IG5ldXI= 21164
+IGF1Y3Rpb24= 21165
+LkdldFN0cmluZw== 21166
+ZWtz 21167
+KHByb2plY3Q= 21168
+CW1zZw== 21169
+CW91dHB1dA== 21170
+IGNvbXBsYWludHM= 21171
+LFM= 21172
+IHRibA== 21173
+ICwKCg== 21174
+cmlvcnM= 21175
+YWhyZW4= 21176
+IGxhd3llcnM= 21177
+cmVkdXg= 21178
+X3N5bWJvbA== 21179
+b2ZmZWU= 21180
+X1JFU1VMVA== 21181
+KE5hbWU= 21182
+VVRD 21183
+LmN1cnJlbnRUaW1l 21184
+IG9yZ2FuaXM= 21185
+LmFyZw== 21186
+IG1pbmlt 21187
+d2ljaw== 21188
+IHJlY2VpdmVz 21189
+QmFsYW5jZQ== 21190
+IHNwZWFrcw== 21191
+IERheXM= 21192
+IEJlbG93 21193
+dGlwbw== 21194
+UHJlc2VudA== 21195
+IHJlc2Vydg== 21196
+aHA= 21197
+IHJpdA== 21198
+X1JJR0hU 21199
+LS0p 21200
+IGNoYWlybWFu 21201
+RElT 21202
+IEJPT1NU 21203
+IGV4cGVyaW1lbnRz 21204
+X18pOwo= 21205
+IHN0YW1w 21206
+IGZlcnQ= 21207
+IGZvbmQ= 21208
+VGVy 21209
+ZWx2ZQ== 21210
+dXJlbg== 21211
+K2k= 21212
+ZW5kZW5jeQ== 21213
+IHZpcnR1YWxseQ== 21214
+Li4uIg== 21215
+772e 21216
+LWNlbnQ= 21217
+X3VuaXF1ZQ== 21218
+IHByaWNpbmc= 21219
+bWlj 21220
+UkVTSA== 21221
+IDo6Og== 21222
+IGFubm90YXRpb24= 21223
+IENpcmNsZQ== 21224
+b25nb2Ri 21225
+aXRhcw== 21226
+ICUo 21227
+KGNvbXBvbmVudA== 21228
+INC+0LE= 21229
+KHBvcnQ= 21230
+LWhvdXI= 21231
+Lm9iag== 21232
+TEJM 21233
+IGp1cnk= 21234
+R0JU 21235
+IHNweQ== 21236
+IFByb2Zlc3Npb25hbA== 21237
+ICIiOwoK 21238
+IHN0cmlraW5n 21239
+IGRpc2NyaW1pbmF0aW9u 21240
+IHBheXM= 21241
+bGljdA== 21242
+ZW50ZXM= 21243
+IHRocm93aW5n 21244
+IFBsdWdpbg== 21245
+KGRlZg== 21246
+IFJ1bnRpbWVFeGNlcHRpb24= 21247
+IE1pZ3JhdGlvbg== 21248
+IGRpYw== 21249
+YmFn 21250
+b25pYQ== 21251
+IGNvcnJ1cHRpb24= 21252
+KE1hcA== 21253
+IHByeg== 21254
+LmR0bw== 21255
+IGFjcXVpcmU= 21256
+U3RhdGVUb1Byb3Bz 21257
+IGxvdmluZw== 21258
+0L7Qtg== 21259
+X3BhdHRlcm4= 21260
+IGVtb3Rpb25z 21261
+IHB1Ymxpc2hlcg== 21262
+X2Jl 21263
+IGNvdXBsZXM= 21264
+b2o= 21265
+IENoYXJ0 21266
+IHRyb3A= 21267
+LnRvb2w= 21268
+IGVzdGFibGlzaG1lbnQ= 21269
+IGRvbA== 21270
+IHRvd2Vy 21271
+IGxhbmU= 21272
+IFN5ZG5leQ== 21273
+IGZpbGxpbmc= 21274
+Y2xhaW1lZA== 21275
+IGRpYWxvZ3Vl 21276
+IGNvbnZlbnRpb24= 21277
+Ym9va2luZw== 21278
+cGFyZW5jeQ== 21279
+5rE= 21280
+IEdlbmVyaWM= 21281
+XFNjaGVtYQ== 21282
+IHJhbmdlcw== 21283
+L2No 21284
+IHBhbmVscw== 21285
+IHJ1bGVk 21286
+55Sf 21287
+LnRz 21288
+X3NldHM= 21289
+IGNsZWFudXA= 21290
+UHJldmlvdXM= 21291
+IEFuaW1hbA== 21292
+KCQo 21293
+IEF2ZQ== 21294
+b2xsYXI= 21295
+X2V2YWw= 21296
+CU5hbWU= 21297
+KHRyZWU= 21298
+ICJd 21299
+IGR1dGllcw== 21300
+PScv 21301
+Q2xpY2tlZA== 21302
+IGRpZmZlcmVudGx5 21303
+IENsYXJr 21304
+IGRpdA== 21305
+b2xvZ2lzdHM= 21306
+IHN5bmQ= 21307
+IHNlbmRz 21308
+LWtub3du 21309
+a2I= 21310
+IE1vZGFs 21311
+aXRhdGl2ZQ== 21312
+IHJhY2luZw== 21313
+IGhpZ2hsaWdodHM= 21314
+IFNpbW9u 21315
+IENhcHRhaW4= 21316
+5L+h 21317
+IENC 21318
+Y29udGlu 21319
+YXJhbg== 21320
+IHBoeXNpY3M= 21321
+cmV0dHk= 21322
+ZXRhbA== 21323
+Lm1k 21324
+YXhpb3M= 21325
+IHNwZWFrZXJz 21326
+IHByZXA= 21327
+IGF3YXJkZWQ= 21328
+7KeA 21329
+IENvcm4= 21330
+IE5hdHVyZQ== 21331
+VURJTw== 21332
+IHByb2o= 21333
+LXByZQ== 21334
+W3U= 21335
+RmVhdHVyZXM= 21336
+IGlzRXF1YWw= 21337
+QmluYXJ5 21338
+c2ln 21339
+IGNvbmZ1c2lvbg== 21340
+IEhhdA== 21341
+IGt0w7M= 21342
+LmNvbmZpZ3VyZQ== 21343
+TU9O 21344
+L2VkaXQ= 21345
+X0FkZA== 21346
+LHRydWU= 21347
+IGNsaQ== 21348
+RXJyb3JNZXNzYWdl 21349
+LWxvYWRlcg== 21350
+RGltZW5zaW9ucw== 21351
+dWx0aXBseQ== 21352
+IHshIQ== 21353
+IFNxbENvbW1hbmQ= 21354
+IHNwb2tlbg== 21355
+IHBpY3M= 21356
+IHRveQ== 21357
+KEtleQ== 21358
+IExvb3A= 21359
+2Kg= 21360
+RUFUVVJF 21361
+aW5jdGlvbg== 21362
+X3NldHVw 21363
+d3JhcHBlcg== 21364
+IHRvbmc= 21365
+Y3VsYXI= 21366
+T3B0 21367
+LlBs 21368
+PSIs 21369
+KGxlbmd0aA== 21370
+dW1u 21371
+IGNocm9t 21372
+IHNldmVudA== 21373
+IElsbGVnYWxBcmd1bWVudEV4Y2VwdGlvbg== 21374
+CXN0YXJ0 21375
+IGJlZ3Vu 21376
+Q0VQVElPTg== 21377
+ZGF0YXNldA== 21378
+IEZhaWxlZA== 21379
+Y29scw== 21380
+IGtuZWU= 21381
+aW1vcmU= 21382
+LnNwbGljZQ== 21383
+c2hlbGw= 21384
+aWdnZXJz 21385
+IHRoZW1lcw== 21386
+IERK 21387
+IEFzc2lzdGFudA== 21388
+LSQ= 21389
+TWF5YmU= 21390
+IG9yZGVyaW5n 21391
+IEludGVsbGlnZW5jZQ== 21392
+IE1hc3NhY2h1c2V0dHM= 21393
+IGZhaWxpbmc= 21394
+ZWxzb24= 21395
+R3JlYXQ= 21396
+PWk= 21397
+LnJlc3Q= 21398
+IGludml0ZQ== 21399
+LWRpc2FibGU= 21400
+Lkdyb3VwQm94 21401
+4oCZZXN0 21402
+IHRhY2tsZQ== 21403
+Z3Y= 21404
+ZXR0ZXI= 21405
+ICksDQo= 21406
+X3J1bGVz 21407
+Lndhcm4= 21408
+ZnVuY3Rpb25z 21409
+IENocmlzdGlhbnM= 21410
+IGJhY2tlZA== 21411
+IHNsaWRlcg== 21412
+IGVuam95aW5n 21413
+bmVzdA== 21414
+IGhpag== 21415
+X21z 21416
+Ly8q 21417
+QW5ub3RhdGlvbnM= 21418
+IFZhcmlhYmxlcw== 21419
+PFY= 21420
+KHNlcnZlcg== 21421
+IE9yYWNsZQ== 21422
+ZWxlbWVudHM= 21423
+IG9yZ2FuaXNhdGlvbg== 21424
+X3BvaW50ZXI= 21425
+IEhlYWRlcnM= 21426
+W2Q= 21427
+IGRlYWRsaW5l 21428
+aXNzYQ== 21429
+IGtuaWZl 21430
+IE5BU0E= 21431
+IEhlaWdodA== 21432
+IEFzeW5j 21433
+IHZlbnVl 21434
+LmRvbQ== 21435
+Ym91cm5l 21436
+IEhhd2Fp 21437
+IG1lbW8= 21438
+aWN0aW9ucw== 21439
+IHN1cnZlaWxsYW5jZQ== 21440
+b21p 21441
+L2Fzc2V0cw== 21442
+IGVkdQ== 21443
+xJs= 21444
+IHJvc3Rlcg== 21445
+IGhpcmVk 21446
+IFRvaw== 21447
+IHBsYWNlbWVudA== 21448
+dXJhdGlvbnM= 21449
+IHNldFN0YXRl 21450
+IE1hZ2F6aW5l 21451
+IGhvcnJvcg== 21452
+VHJ5 21453
+IGxhZw== 21454
+IEV2ZXJ5b25l 21455
+dGh1cg== 21456
+KSk7DQoNCg== 21457
+LnJldHVybg== 21458
+IHN5bXA= 21459
+4paI4paI 21460
+IG5pZ2h0cw== 21461
+d29ya2Vy 21462
+IGFsZQ== 21463
+ZW5uZXNzZWU= 21464
+LnN0ZXA= 21465
+IHN5bmNocm9uaXplZA== 21466
+b3VyaQ== 21467
+RG9lcw== 21468
+LmNoYW5nZQ== 21469
+Zm9u 21470
+LnNldEJhY2tncm91bmQ= 21471
+aXJjdWxhcg== 21472
+Ky0= 21473
+IENJQQ== 21474
+IEphbmU= 21475
+IFNpbWlsYXI= 21476
+LUk= 21477
+bGV2ZWxhbmQ= 21478
+IHByb3NwZWN0 21479
+X2ZvdW5k 21480
+CWNvbG9y 21481
+LkRpYWdub3N0aWNz 21482
+IGFubm91bmNl 21483
+IGFzc3VtZXM= 21484
+L3Ry 21485
+IGJk 21486
+IENhcmJvbg== 21487
+IGFuYWx5cw== 21488
+LmRlc3Q= 21489
+bmlr 21490
+IExpZQ== 21491
+LWluZGV4 21492
+RHJhd2FibGU= 21493
+IFRBRw== 21494
+IHRyaWFuZ2xl 21495
+X0ZMT0FU 21496
+CQkgICAgIA== 21497
+LmJsYWNr 21498
+dnVl 21499
+Y3VyYWN5 21500
+IGFmZmVjdHM= 21501
+IHN1cmVseQ== 21502
+U2xpZGVy 21503
+dWtp 21504
+Y2VyeQ== 21505
+IHVudGVy 21506
+LnByb2ZpbGU= 21507
+b3Jkb24= 21508
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 21509
+bGVhdmU= 21510
+IHNtYXJ0cGhvbmU= 21511
+Z2ll 21512
+IGNvbnNwaXI= 21513
+IHR1dG9yaWFs 21514
+57G7 21515
+IGNhYg== 21516
+IFN1bW1hcnk= 21517
+KgoK 21518
+w6Ro 21519
+IlRoaXM= 21520
+IHNsaWRlcw== 21521
+Ijwv 21522
+LmRldg== 21523
+Jzw= 21524
+IFJpbmc= 21525
+xYJh 21526
+IGtvdGxpbg== 21527
+LmR1bXBz 21528
+IGJhc3M= 21529
+7Is= 21530
+UE9JTlQ= 21531
+IHV0dGVy 21532
+IMOpcw== 21533
+LmZ1bGw= 21534
+T0xM 21535
+IGNlcmVtb255 21536
+c2xvdA== 21537
+IGFpbXM= 21538
+dG9vbHRpcA== 21539
+LnNjb3Jl 21540
+LWRk 21541
+IHByb3g= 21542
+UmVjb2duaXplcg== 21543
+ZHluYW1pYw== 21544
+w6RuZA== 21545
+L3N0ZA== 21546
+RFU= 21547
+IE5vdEltcGxlbWVudGVk 21548
+KCItLQ== 21549
+UkFX 21550
+IGV0aG5pYw== 21551
+YW5ubw== 21552
+IGNoYW1waW9uc2hpcA== 21553
+LHNlbGY= 21554
+IGFjY2VwdGFibGU= 21555
+IFNwcml0ZQ== 21556
+W3R5cGU= 21557
+w7xo 21558
+IFZL 21559
+KGpQYW5lbA== 21560
+aXRy 21561
+66A= 21562
+YXVyYQ== 21563
+IGZhY3VsdHk= 21564
+YXZlcnM= 21565
+IFJlY29yZHM= 21566
+LlNlY3VyaXR5 21567
+IGNvbnN0cmFpbnQ= 21568
+LkJs 21569
+VWludA== 21570
+YmFsYW5jZQ== 21571
+IGNvbW1l 21572
+IE5paw== 21573
+U3VwcHJlc3NXYXJuaW5ncw== 21574
+IE9jZWFu 21575
+X0lk 21576
+RGF0YVNldA== 21577
+IGluc2VydGVk 21578
+IjsNCg0K 21579
+4oCz 21580
+aXBwZXQ= 21581
+IGFubml2ZXJzYXJ5 21582
+IHJldGlyZWQ= 21583
+b3JjaA== 21584
+IHBlcnBldA== 21585
+XEZvcm0= 21586
+IGludm9sdmVtZW50 21587
+X3VzZXJuYW1l 21588
+YWxlbQ== 21589
+X1NFUlZJQ0U= 21590
+IEluZGlhbmE= 21591
+IGNpZ2FyZXQ= 21592
+YXJ0eg== 21593
+IFJD 21594
+IG1lYXN1cmVtZW50cw== 21595
+572u 21596
+IGFmZmlsaWF0ZQ== 21597
+YWNpb25hbA== 21598
+LXNlY3Rpb24= 21599
+X2NvbnRyb2xsZXI= 21600
+dmFyZA== 21601
+X2Vs 21602
+IFRveQ== 21603
+PFA= 21604
+TWFjaGluZQ== 21605
+w7ptZXI= 21606
+IFllYWg= 21607
+IllvdQ== 21608
+IG1vbA== 21609
+LkNs 21610
+Y29udHJvbGxlcnM= 21611
+IHN1c3BlbmRlZA== 21612
+Kys7Cgo= 21613
+QVRU 21614
+IHByb2plY3Rpb24= 21615
+UGFkZGluZw== 21616
+Lm1hdGg= 21617
+ZmFjdG9yeQ== 21618
+IGdhbW1h 21619
+KCk+ 21620
+Y3ljbGU= 21621
+IEJ1bGw= 21622
+cGF0aHM= 21623
+IHVucA== 21624
+IHZpZXdEaWRMb2Fk 21625
+X01vZGVs 21626
+IGFzc2VydFRydWU= 21627
+IHJhdGVk 21628
+RGVjbA== 21629
+dmVydGVk 21630
+IERhdA== 21631
+YnJldw== 21632
+IHBvaW50aW5n 21633
+TXM= 21634
+IFBvaW50ZXI= 21635
+KSc= 21636
+X25vbg== 21637
+IFNFQw== 21638
+IHllYWg= 21639
+Z2VuY3k= 21640
+aW5pdGlhbGl6ZQ== 21641
+Zmx5 21642
+W3Bvcw== 21643
+LGc= 21644
+VGVsZQ== 21645
+IGpva2U= 21646
+IGNsYXVzZQ== 21647
+LmZpbmRCeUlk 21648
+ZW5lcw== 21649
+KGluc3RhbmNl 21650
+wqM= 21651
+IHNsaWM= 21652
+X2hvbWU= 21653
+ICovfQo= 21654
+X3BhZ2Vz 21655
+KHNlcnZpY2U= 21656
+UlA= 21657
+IEFtb25n 21658
+LmdldEN1cnJlbnQ= 21659
+44K5 21660
+IHNsZWU= 21661
+PTw/ 21662
+X3Byb3A= 21663
+Zmx1c2g= 21664
+IE1N 21665
+QmVs 21666
+Tm90ZXM= 21667
+ICovCgoK 21668
+IHJo 21669
+VGFibGVz 21670
+IEp1 21671
+IFwNCg== 21672
+bGljaGVu 21673
+IEluc3VyYW5jZQ== 21674
+XQoKCg== 21675
+IGNvb3Blcg== 21676
+4oCUdGhl 21677
+Lm1hdA== 21678
+IGZvaQ== 21679
+KGF1dG8= 21680
+TWFyZ2lu 21681
+IHJlc2lkZW5jZQ== 21682
+IEhpc3Rvcg== 21683
+IH49 21684
+RGk= 21685
+ICcpCg== 21686
+IGV4Y2x1ZGU= 21687
+LkRyb3A= 21688
+JyI7Cg== 21689
+IGNvYw== 21690
+X3VwbG9hZA== 21691
+SGlkZQ== 21692
+IFVua25vd24= 21693
+IG5vcm1hbGl6ZQ== 21694
+X3JldA== 21695
+LicKCg== 21696
+Lm5vZGVz 21697
+LkRhdGFTb3VyY2U= 21698
+YmxlbXM= 21699
+IGdlbnRsZQ== 21700
+OiQ= 21701
+JykpOwoK 21702
+LlJlc291cmNlcw== 21703
+4og= 21704
+IFRhaQ== 21705
+VkVE 21706
+IEd1bg== 21707
+bGVhbnM= 21708
+IERvYw== 21709
+LlZvaWQ= 21710
+IEFtZW5kbWVudA== 21711
+ZXNzZWQ= 21712
+IHJlY2lwaWVudA== 21713
+Lk5vZGU= 21714
+b3Zv 21715
+IGFsaWduSXRlbXM= 21716
+IFVuaXR5 21717
+IFJvbWU= 21718
+YnVybg== 21719
+IHZvbHRhZ2U= 21720
+IFNIQQ== 21721
+IEdPT0Q= 21722
+aGVscGVycw== 21723
+LyoqKi8= 21724
+IGVsaW1pbmF0ZQ== 21725
+d2Fw 21726
+X2FuZ2xl 21727
+IHJlZnVnZWVz 21728
+CWFzc2VydEVxdWFscw== 21729
+IHByb2Jl 21730
+KCcuLi8uLi8= 21731
+eW91cg== 21732
+IG1lcmNo 21733
+VUJMRQ== 21734
+CXJlc3BvbnNl 21735
+X0RFRg== 21736
+IGVudmlyb25tZW50cw== 21737
+b3VzaW5n 21738
+IHJlc3RyaWN0ZWQ= 21739
+IENPTlRSSUJVVE9SUw== 21740
+IGNvbXBhbmlvbg== 21741
+4bqj 21742
+cG93 21743
+dXJ0bGU= 21744
+Ymll 21745
+LlBlcmZvcm0= 21746
+PW4= 21747
+cmVkaXM= 21748
+IGRpdmlkZQ== 21749
+IGNvbGxlY3RpdmU= 21750
+RGlmZg== 21751
+RHluYW1pYw== 21752
+aXNTZWxlY3RlZA== 21753
+YXN0eXBl 21754
+IExvdA== 21755
+IFN0YXRlbWVudA== 21756
+aWNpcGFudA== 21757
+YWto 21758
+IHNlcmlhbGl6ZXI= 21759
+X0NGRw== 21760
+YXZhbA== 21761
+IHZpZXdlcnM= 21762
+IEZP 21763
+T2Nj 21764
+IHJvYnVzdA== 21765
+IE1pdA== 21766
+X0FORA== 21767
+VHJhbnNpdGlvbg== 21768
+dW5hdGU= 21769
+IHByaWRl 21770
+IGRyYW1hdGlj 21771
+IFBhZ2Vz 21772
+X3R1cGxl 21773
+IGNvcGllZA== 21774
+bW4= 21775
+IG91Z2h0 21776
+IGVxdWFsaXR5 21777
+X2hhcw== 21778
+X1dS 21779
+ZW1p 21780
+IHN1cmdl 21781
+aWxsbw== 21782
+KCl9 21783
+IHBlcmY= 21784
+dWxr 21785
+IGludmVzdG1lbnRz 21786
+IGdlbmVyYXRpb25z 21787
+IHJlc29ydA== 21788
+IHRydXN0ZWQ= 21789
+X2ZyZXE= 21790
+IGZvcm1h 21791
+QVRJT05T 21792
+IEh1 21793
+IEdyYWQ= 21794
+X2NwdQ== 21795
+ICIsCg== 21796
+cmVzc2U= 21797
+KCoq 21798
+IGhlcmVieQ== 21799
+IGxha2U= 21800
+X1NUQUNL 21801
+IEJ1cmVhdQ== 21802
+IHN1c3RhaW5hYmxl 21803
+IFBF 21804
+IGRlaQ== 21805
+IEFuc3dlcg== 21806
+UGx1cw== 21807
+L3dlYg== 21808
+IHN0ZXI= 21809
+IG1vdW50ZWQ= 21810
+X2NsZWFy 21811
+Zm9ubw== 21812
+aWFuY2Vz 21813
+X2ZpbmQ= 21814
+IGNvbmZ1c2Vk 21815
+X2Jpbg== 21816
+REVDTA== 21817
+IGluc3RhbnRseQ== 21818
+VUlU 21819
+X0RP 21820
+U2V0dXA= 21821
+a2Vl 21822
+X3ByaW50Zg== 21823
+X3N0bXQ= 21824
+IFN0ZWFt 21825
+cHJvZg== 21826
+bHY= 21827
+IHNvbHZpbmc= 21828
+bGF0b3I= 21829
+b3R5cGVz 21830
+QW5kcm9pZA== 21831
+X2VzY2FwZQ== 21832
+TGVhdmU= 21833
+LmdldFRpbWU= 21834
+aWZz 21835
+IGNvdg== 21836
+IENsYXNzaWM= 21837
+LWRhcms= 21838
+RGlzcGF0Y2hlcg== 21839
+LWdyYXk= 21840
+IFBhbGVzdGluaWFu 21841
+LmRlZXA= 21842
+IEluamVjdA== 21843
+IHJlZmxlY3Rpb24= 21844
+IGh5cG8= 21845
+Y29uc3RydWN0b3I= 21846
+LmFwcGxpY2F0aW9u 21847
+eXN0ZXI= 21848
+4pU= 21849
+c2Nob29s 21850
+IENvdw== 21851
+IGZvb3RhZ2U= 21852
+LWlucw== 21853
+IC8qKjw= 21854
+YXRvbQ== 21855
+IHByb2ZpdHM= 21856
+IGJvb2tpbmc= 21857
+X3RocmVzaG9sZA== 21858
+IExpdmVy 21859
+IGNpdGl6ZW4= 21860
+Yng= 21861
+IFN0b3Jt 21862
+IENvcnA= 21863
+IHdpZGVy 21864
+Iikpewo= 21865
+X0FDVElPTg== 21866
+aW9ycw== 21867
+YWlzZXM= 21868
+Om5vbmU= 21869
+IGNpdGVk 21870
+ImZtdA== 21871
+QXVn 21872
+Y29tYg== 21873
+IHdoaXRlcw== 21874
+IHNlc3M= 21875
+Xl4= 21876
+aWdodGg= 21877
+IHRhbmc= 21878
+X0NBUA== 21879
+IGludGVyYWN0aW9ucw== 21880
+IGdhcmQ= 21881
+IHByaXpl 21882
+YWZrYQ== 21883
+VHJp 21884
+XEVsb3F1ZW50 21885
+IER5bmFtaWM= 21886
+55CG 21887
+Z3A= 21888
+IHJlYWxt 21889
+IE5p 21890
+IEVkd2FyZA== 21891
+IGlkZW50aWZpY2F0aW9u 21892
+IHBoeXNpY2FsbHk= 21893
+5pys 21894
+IHBpY2tz 21895
+LWZyaWVuZGx5 21896
+PGk= 21897
+aWZpY2U= 21898
+X0FQ 21899
+TG9nZ2Vk 21900
+fSIu 21901
+L3V0aWxz 21902
+IC4uLi4= 21903
+RU5USUFM 21904
+KEFjdGlvbg== 21905
+J10pOwoK 21906
+IHByb3Rlc3Rz 21907
+b2xpbmU= 21908
+X1JFVFVSTg== 21909
+IHBvcHVsYXRpb25z 21910
+IFJhaW4= 21911
+ZHVw 21912
+b3JpYWw= 21913
+IEF1dGhvcml0eQ== 21914
+X2V4cHI= 21915
+LnVz 21916
+IGNvcnJ1cHQ= 21917
+CWltcG9ydA== 21918
+PGNoYXI= 21919
+IExFRlQ= 21920
+IGNhYmluZXQ= 21921
+IG5laWdoYm91cg== 21922
+IFNxbFBhcmFtZXRlcg== 21923
+YXR0ZXJlZA== 21924
+ZW1pYQ== 21925
+IHJldmlld2Vk 21926
+IEhlbGxv 21927
+YmxvY2tz 21928
+KHByb2Nlc3M= 21929
+IG9ic2VydmF0aW9u 21930
+cmF0aW5n 21931
+Lmdsb2JhbA== 21932
+IHByZWZlcmVuY2U= 21933
+LnByZXBhcmU= 21934
+IGRvemVucw== 21935
+V29ya2Vy 21936
+IGNhbGN1bGF0aW9u 21937
+IFRvd2Vy 21938
+YWlyeQ== 21939
+IElTTw== 21940
+IGh1bWFuaXR5 21941
+LmFzSW5zdGFuY2VPZg== 21942
+IGR5cw== 21943
+IHBpZXI= 21944
+aWd1ZQ== 21945
+IGFzc29jaWF0ZQ== 21946
+IGludGlt 21947
+bm90aWZ5 21948
+KHt9LA== 21949
+IFJlcHJlc2VudA== 21950
+cGhldA== 21951
+c2V1ZG8= 21952
+64uI64uk 21953
+LlBvc2l0aW9u 21954
+IGNsb3N1cmU= 21955
+KGNsYXNz 21956
+CXRpbWU= 21957
+IE9yYW5nZQ== 21958
+X29wcw== 21959
+IHBvcHVw 21960
+IEltcHJv 21961
+X3NlY3JldA== 21962
+IEV1 21963
+LnNldExheW91dA== 21964
+dWxseQ== 21965
+IHNjcmV3 21966
+IFNpemVk 21967
+IENPTVA= 21968
+IG5vdGlmaWNhdGlvbnM= 21969
+VHJhbnNmZXI= 21970
+RW1pdHRlcg== 21971
+KG9sZA== 21972
+bGV0aWM= 21973
+IC0KCg== 21974
+IHBhbmlj 21975
+IExDRA== 21976
+cnVsZXM= 21977
+IGFmZmFpcnM= 21978
+IEZpbGw= 21979
+X0lSUQ== 21980
+YXR0YWNobWVudA== 21981
+IHZvbQ== 21982
+PGJ1dHRvbg== 21983
+IHRleHRz 21984
+IGFjdGl2YXRlZA== 21985
+LmFjY2Vzcw== 21986
+KHJlYWRlcg== 21987
+VGVt 21988
+IGNvcm9u 21989
+cm9waA== 21990
+RE1JTg== 21991
+IGVtZXJnZWQ= 21992
+IGluZmxhdGVy 21993
+IEluZGVwZW5kZW50 21994
+b3Jpb3Vz 21995
+IERlbGhp 21996
+IGdseXBoaWNvbg== 21997
+IENhcmw= 21998
+U2k= 21999
+IGV4cGVyaW1lbnRhbA== 22000
+LmJhcg== 22001
+SUFO 22002
+IHNxbGl0ZQ== 22003
+Y2Npw7Nu 22004
+X0JBQ0s= 22005
+LG5hbWU= 22006
+aG9ydA== 22007
+IHRlbnM= 22008
+6rM= 22009
+dXNpdmU= 22010
+IGdlbnVpbmU= 22011
+IGJ1Y2s= 22012
+L2Rpdg== 22013
+LnJvb20= 22014
+X05FVw== 22015
+ZXN0YWRv 22016
+IEFyaw== 22017
+b2NvbHM= 22018
+LmdlbmVyYXRl 22019
+dG91Y2g= 22020
+Zml4ZWQ= 22021
+ICco 22022
+IHJlZmVycmluZw== 22023
+IG92ZXJ3aGVsbWluZw== 22024
+KGxldA== 22025
+IGZ1ZQ== 22026
+X0VOVg== 22027
+d29tYW4= 22028
+RmlndXJl 22029
+YW5pbWF0ZQ== 22030
+IE1vcnQ= 22031
+IGxvbmdlc3Q= 22032
+Y29sbg== 22033
+VE0= 22034
+Ol8= 22035
+cmllbA== 22036
+LE4= 22037
+IFJBTQ== 22038
+IGp1c3RpZnlDb250ZW50 22039
+IGFjdGl2ZWx5 22040
+L3B1YmxpYw== 22041
+IOuw 22042
+R2l2ZW4= 22043
+T1RBTA== 22044
+5aSx6LSl 22045
+U2VxdWVudGlhbA== 22046
+IHN1cHBsZW1lbnQ= 22047
+LmFi 22048
+IGNhdGVnb3I= 22049
+fX0sCg== 22050
+YWhhbg== 22051
+J3Vu 22052
+b3NpdHk= 22053
+IGFjY29tcGxpc2g= 22054
+VXRpbGl0aWVz 22055
+LnZpZXdz 22056
+LmNu 22057
+Y2VpbA== 22058
+IENCRA== 22059
+IFJG 22060
+UEVH 22061
+IEdpZnQ= 22062
+QVlT 22063
+IFdJTg== 22064
+cGFuaWVk 22065
+IMWf 22066
+IG9ic2VydmVy 22067
+IHNtZWxs 22068
+IHs6 22069
+TGlua2Vk 22070
+PlsK 22071
+b2xlcg== 22072
+IGxpYmVydA== 22073
+IGAK 22074
+IHdlbm4= 22075
+bGF0ZWQ= 22076
+IGltbXVuZQ== 22077
+KE5vZGU= 22078
+IFByb2JsZW0= 22079
+IEFicw== 22080
+bG9ncw== 22081
+IC4uLw== 22082
+IEFEQw== 22083
+IH19Ij4K 22084
+PicpOwo= 22085
+PWI= 22086
+IFdpbmQ= 22087
+bGFob21h 22088
+IGFsbG9jYXRl 22089
+b3JpYW4= 22090
+IHByZXNjcmlwdGlvbg== 22091
+LXF1YWxpdHk= 22092
+IE1heW9y 22093
+aW5lbHk= 22094
+ZW5kZm9yZWFjaA== 22095
+IENvbXBsZXg= 22096
+a29t 22097
+VFk= 22098
+XV0u 22099
+LlN0eWxl 22100
+X21hbnk= 22101
+JywnJA== 22102
+IGJhcnJpZXI= 22103
+IEZldGNo 22104
+IE1hcnZlbA== 22105
+IHJlc2lzdA== 22106
+0L7Qs9C+ 22107
+YmlkZGVu 22108
+IFJ1bm5hYmxl 22109
+OmZhbHNl 22110
+IGJ1aWxkcw== 22111
+IFN0YWdl 22112
+IGR1Yg== 22113
+ZW1wbw== 22114
+LnNpdGU= 22115
+OwoKCgo= 22116
+IERlbnZlcg== 22117
+IHJldmVs 22118
+IHRyaWdnZXJlZA== 22119
+IGRpY2U= 22120
+X2ZhaWw= 22121
+IGdj 22122
+CVg= 22123
+IFRocm93YWJsZQ== 22124
+LnJvdXRlcg== 22125
+IFJldm9sdXRpb24= 22126
+0YDQsA== 22127
+X05PTg== 22128
+n6U= 22129
+IGVsZGVy 22130
+IGFicm9hZA== 22131
+INC1 22132
+IEFkdWx0 22133
+Ymxy 22134
+Z2x5cGhpY29u 22135
+IHByb21vdGluZw== 22136
+IGl6 22137
+IFNvbGlk 22138
+X2xvYWRlcg== 22139
+ZWFybHk= 22140
+LmVuYWJsZWQ= 22141
+LWVkaXQ= 22142
+IFVM 22143
+X3BsYXk= 22144
+IEludGVycnVwdA== 22145
+IGFkdmFudGFnZXM= 22146
+dWNsZQ== 22147
+IG1lY2hhbmljYWw= 22148
+LnRhYmxlTGF5b3V0UGFuZWw= 22149
+IFdvcmtpbmc= 22150
+IGFub255bW91cw== 22151
+UmF0aW5n 22152
+aWdpb3Vz 22153
+X3Bob25l 22154
+LmFkZEFjdGlvbkxpc3RlbmVy 22155
+IGZyYW4= 22156
+dW5kZW4= 22157
+ICopJg== 22158
+X2Jvb2w= 22159
+dWxhdGl2ZQ== 22160
+IGNvbmU= 22161
+IE11bHQ= 22162
+IG3Dtg== 22163
+IEZvcndhcmQ= 22164
+XSk6Cg== 22165
+IGNvbnZpbmNlZA== 22166
+YWN0ZWQ= 22167
+44GT 22168
+IENvbmZpZ3VyZQ== 22169
+IGNlaWxpbmc= 22170
+RGVy 22171
+IHBhc3NlbmdlcnM= 22172
+R3JvdXBz 22173
+IHNvY2Nlcg== 22174
+L1c= 22175
+YXZpb3Jz 22176
+c3dpdGg= 22177
+IFpvbmU= 22178
+Lk9wdGlvbnM= 22179
+IE1vbQ== 22180
+aWVkZXI= 22181
+QXJyYXlz 22182
+IHRyZWF0bWVudHM= 22183
+IHByb3RlY3Rpbmc= 22184
+ZmFj 22185
+IHBpY2tsZQ== 22186
+QnV0dG9uSXRlbQ== 22187
+IGJsb2NraW5n 22188
+c3RyYXI= 22189
+w7I= 22190
+IEV4cG9ydA== 22191
+IHRocmV3 22192
+b3R0YQ== 22193
+IEJBU0U= 22194
+Lndz 22195
+LkxFQURJTkc= 22196
+b3JkZXJCeQ== 22197
+X2RlbGF5 22198
+IFB1 22199
+LmRsbA== 22200
+IENob29zZQ== 22201
+UG9saWNl 22202
+IEJFR0lO 22203
+Ym94ZXM= 22204
+IGRpYW1vbmQ= 22205
+LGw= 22206
+IAkJCQ== 22207
+IGN1cmlvdXM= 22208
+dHY= 22209
+IGVyb3Rpc2NoZQ== 22210
+YWNrYWdlcw== 22211
+CVNldA== 22212
+VGljaw== 22213
+LmJvcmRlcg== 22214
+c3RhdGljbWV0aG9k 22215
+IGNoZXI= 22216
+aW52b2ljZQ== 22217
+IGNydQ== 22218
+IGRlZmVjdA== 22219
+X21ldGFkYXRh 22220
+cmVsYXRpb24= 22221
+aWthbg== 22222
+W04= 22223
+KFF0 22224
+KEJhc2U= 22225
+5oGv 22226
+YmVhdA== 22227
+IEVtcHR5 22228
+CW8= 22229
+X3NoaWZ0 22230
+IHJlZ3JldA== 22231
+VGhvc2U= 22232
+Q2VudA== 22233
+IFBvcnR1Zw== 22234
+IElzbGFuZHM= 22235
+IFRJTUU= 22236
+TWFuYWdlbWVudA== 22237
+LXNw 22238
+w6ptZQ== 22239
+IG5vdGlvbg== 22240
+dW5pZnU= 22241
+UEs= 22242
+6KGM 22243
+IENVUkxPUFQ= 22244
+XCJc 22245
+VVY= 22246
+57o= 22247
+ZHJh 22248
+Y291 22249
+PWA= 22250
+IERlc3Ryb3k= 22251
+cnA= 22252
+LmNhbmNlbA== 22253
+R0c= 22254
+cnVudGltZQ== 22255
+IFZ1ZQ== 22256
+IHByb2dyZXNzaXZl 22257
+L3NlcnZpY2Vz 22258
+IHJ1bm5lcg== 22259
+X0ZSQU1F 22260
+LlRvb2xTdHJpcE1lbnVJdGVt 22261
+ICcsJw== 22262
+ZGVsYXk= 22263
+PXV0Zg== 22264
+IHNjcmVlbmluZw== 22265
+IHB1bGxpbmc= 22266
+b21hcw== 22267
+IGFudGg= 22268
+LW5ldw== 22269
+L2xvY2Fs 22270
+IGlQYWQ= 22271
+IHR3aXR0ZXI= 22272
+IGR5aW5n 22273
+IGhlYXZlbg== 22274
+IFVJbnQ= 22275
+IFNlbmF0b3I= 22276
+IHByZXN1bQ== 22277
+IFdhbGtlcg== 22278
+IG92ZXJjb21l 22279
+ZXRlY3Rpb24= 22280
+IGVtYmFycmFzcw== 22281
+Q2hpbmE= 22282
+SW5jbHVkZQ== 22283
+Uk9MTA== 22284
+IGRhdGFUeXBl 22285
+RGF2aWQ= 22286
+4Lij 22287
+bG9w 22288
+LW1vbnRo 22289
+IHNjYXI= 22290
+IFNhZmU= 22291
+ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio= 22292
+IGFjY2Vzc29yaWVz 22293
+IHJhbXA= 22294
+X1VTRQ== 22295
+IGNvbnRyYWQ= 22296
+KSldCg== 22297
+IHByZXN0 22298
+IEhS 22299
+IFJhcA== 22300
+IHVzaXpl 22301
+IGNhcGFiaWxpdHk= 22302
+IGNvcnQ= 22303
+LW5leHQ= 22304
+IGJ1cmRlbg== 22305
+X3JlYWRlcg== 22306
+IEBA 22307
+cmVndWxhcg== 22308
+IEth 22309
+TUFO 22310
+IGFzdHI= 22311
+ICcnKQo= 22312
+IGZlZA== 22313
+IHBhcnNpbmc= 22314
+IFllYXJz 22315
+IGJyb2tlcg== 22316
+Ijp7Ig== 22317
+IGFrdA== 22318
+SW52ZW50b3J5 22319
+YWJlbGVk 22320
+IGFyZ3BhcnNl 22321
+KioqKioqKgo= 22322
+dmVyc2F0aW9u 22323
+IGNvcmQ= 22324
+IFRp 22325
+IGhvcGVmdWxseQ== 22326
+IGFo 22327
+dmVyYg== 22328
+IHN0b2xlbg== 22329
+LkVudHJ5 22330
+IGV4cGVjdGluZw== 22331
+T3JpZW50YXRpb24= 22332
+IHBvd2VyZWQ= 22333
+IHBlcnNpc3Q= 22334
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 22335
+J10pOw== 22336
+JykpLAo= 22337
+IENhc2g= 22338
+CWl0ZW0= 22339
+Z3JhZGVz 22340
+cm9wb2w= 22341
+YmFzaWM= 22342
+ICIpOw0K 22343
+IGF3YXJkcw== 22344
+KHJhbmdl 22345
+LWFsbA== 22346
+IElCT3V0bGV0 22347
+IEluZGVlZA== 22348
+LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ== 22349
+IHN0b21hY2g= 22350
+IGZsb3dlcg== 22351
+IHNldw== 22352
+X3RpbWVz 22353
+YXZpcw== 22354
+UVN0cmluZw== 22355
+IFJvdXRlcw== 22356
+X3Byb3Q= 22357
+IGNvbWVkeQ== 22358
+IGxvZ291dA== 22359
+IHdvb2Rlbg== 22360
+IHBvc3Rlcg== 22361
+cGllY2U= 22362
+LkpvaW4= 22363
+IFBvaw== 22364
+Y2Vsb25h 22365
+bXV0ZXg= 22366
+Ow0KDQoNCg== 22367
+IHN0cmlrZXM= 22368
+TG9hZGVk 22369
+KWFyZw== 22370
+ZXNh 22371
+VW5pdGVk 22372
+RXA= 22373
+UEVMTA== 22374
+IEF0bGFudGlj 22375
+dWxsZXQ= 22376
+YXBwbGU= 22377
+IHNldHRsZWQ= 22378
+YWNvbg== 22379
+IHByaW50ZXI= 22380
+IEdD 22381
+5a6a 22382
+IHJlbmRlcmVk 22383
+LOKAmQ== 22384
+aGVpdA== 22385
+c29jaWFs 22386
+Lmdl 22387
+IFJpY2s= 22388
+IFV0YWg= 22389
+Z290 22390
+b25pY2Fs 22391
+IFNjcm9sbA== 22392
+IFNjaWVuY2Vz 22393
+IGp1Zw== 22394
+IGFtcGw= 22395
+ZW50aQ== 22396
+TEVGVA== 22397
+IHRhYnM= 22398
+IGVub3Jtb3Vz 22399
+LmdldEtleQ== 22400
+bG9jYXRl 22401
+LkVY 22402
+LnN0b3JhZ2U= 22403
+Lldl 22404
+IHRvYXN0 22405
+IEFkZGl0aW9uYWxseQ== 22406
+IE5PVw== 22407
+X1VQREFURQ== 22408
+IHRyYW5zZmVycmVk 22409
+dGhh 22410
+LkRpc3BsYXk= 22411
+X3Vp 22412
+SURFTw== 22413
+IG1lYW5pbmdmdWw= 22414
+IE1vc2Nvdw== 22415
+LHRoaXM= 22416
+IFZpY3Rvcmlh 22417
+5pS5 22418
+INCf 22419
+LnN0YWNr 22420
+IEJhcm4= 22421
+cGFyZWRTdGF0ZW1lbnQ= 22422
+OnN0cmluZw== 22423
+IGJpag== 22424
+IFNUQVRF 22425
+IGVtcGxveWVycw== 22426
+CWlucHV0 22427
+KHw= 22428
+IGxleA== 22429
+aW52b2tl 22430
+CW51bQ== 22431
+Kyss 22432
+YXRpYWw= 22433
+b3JzZXM= 22434
+IGZvcms= 22435
+X3R4dA== 22436
+IEFudG9uaW8= 22437
+ICg8 22438
+YXZlcnNl 22439
+IGRldmFzdA== 22440
+44CA 22441
+LkRlYw== 22442
+IEdhcmQ= 22443
+L3Vp 22444
+LiU= 22445
+dHJp 22446
+IHJvbGxlZA== 22447
+VmFsdWVQYWly 22448
+aXR0ZW4= 22449
+IFRoZXI= 22450
+IHZyb3U= 22451
+IEZsb3c= 22452
+IEZpbmFuY2U= 22453
+IENvbWI= 22454
+SEM= 22455
+LnNldFZpc2libGU= 22456
+aXNs 22457
+IHBr 22458
+IHVwc2V0 22459
+KHJhdw== 22460
+IFZpY2U= 22461
+ZWF0dXJlcw== 22462
+IExhbmc= 22463
+TG9va2luZw== 22464
+IEFTVA== 22465
+IHRyaXBz 22466
+IEp1c3Rpbg== 22467
+YnJvd3Nlcg== 22468
+PSInLiQ= 22469
+LnZlcnRpY2Vz 22470
+LWNv 22471
+fS97 22472
+ID8s 22473
+IERvbWlu 22474
+IEJlbGc= 22475
+Ijw= 22476
+IHN1cHBvc2U= 22477
+YWRkeQ== 22478
+IHdhbGtz 22479
+RVJSVQ== 22480
+X2ZpbHRlcnM= 22481
+UHJlZmVycmVk 22482
+c2NlbmU= 22483
+0LXRgQ== 22484
+IEFmZmFpcnM= 22485
+ICIjew== 22486
+IG9uU3VibWl0 22487
+IHN0b2Nrcw== 22488
+L3ZpZXc= 22489
+Z3JlZQ== 22490
+LWdldA== 22491
+aGl0 22492
+Sm8= 22493
+LmdldEM= 22494
+SW5pdGlhbGl6ZWQ= 22495
+0YLQuA== 22496
+Y3V0cw== 22497
+KFR5cGU= 22498
+IEFncmVlbWVudA== 22499
+IFZpZXRuYW0= 22500
+IC8qIQ== 22501
+IHBpenph 22502
+LXZpZXc= 22503
+X2Vt 22504
+IGxocw== 22505
+IG11eQ== 22506
+IElkZW50 22507
+IEZyaWVuZHM= 22508
+IGFidW5k 22509
+X0FE 22510
+LnRpbWVzdGFtcA== 22511
+LSc= 22512
+IGR1cGxpY2F0ZQ== 22513
+IGh1bnRpbmc= 22514
+IHJlZ3VsYXRvcnk= 22515
+aWFv 22516
+YW1vdXM= 22517
+IEVudGVydGFpbm1lbnQ= 22518
+W0E= 22519
+aWF0cmlj 22520
+X0NMSUVOVA== 22521
+IEtpZHM= 22522
+L3BrZw== 22523
+QnJlYWs= 22524
+KSkpOwoK 22525
+IFNoYXBl 22526
+IHJlbGF0aW5n 22527
+SW50ZXJydXB0 22528
+YWJsZU9wYWNpdHk= 22529
+ZW1icmU= 22530
+IG15c3Rlcnk= 22531
+IGpvdXJuYWxpc3Rz 22532
+cml0YWJsZQ== 22533
+Lkxpbms= 22534
+IHN0b3BwaW5n 22535
+Q1JFVA== 22536
+LkRC 22537
+IHBvcHVsYXJpdHk= 22538
+IGdldw== 22539
+IGltcHI= 22540
+c2V0VmFsdWU= 22541
+RkxBRw== 22542
+CW1heA== 22543
+IGJha2U= 22544
+d3k= 22545
+IEVjb25vbWlj 22546
+IGVuY29udHI= 22547
+IGZuYW1l 22548
+L2Rl 22549
+UmFuaw== 22550
+IGJ1Z3M= 22551
+LnNt 22552
+IG1lZGlhbg== 22553
+RE9XTg== 22554
+IFN1cmU= 22555
+QXRJbmRleA== 22556
+IERpY2s= 22557
+IChfXw== 22558
+LmRlbHRh 22559
+RnI= 22560
+IHN1Z2dlc3Rpbmc= 22561
+IFJlY3ljbGVyVmlldw== 22562
+LGU= 22563
+U1RBUlQ= 22564
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio= 22565
+eGZvcmQ= 22566
+IHJlY2VpcHQ= 22567
+Q0xBSU0= 22568
+cmVhZG9ubHk= 22569
+IGVuZ2FnaW5n 22570
+Q2E= 22571
+YXNtYQ== 22572
+IGVuc3VyaW5n 22573
+RW5nbGlzaA== 22574
+IFZhbmNvdXZlcg== 22575
+aHl0aA== 22576
+IHB1cmNoYXNpbmc= 22577
+IFBJ 22578
+LndvcmQ= 22579
+KHNw 22580
+LmhvbWU= 22581
+OmRlZg== 22582
+IGdpZw== 22583
+IFZl 22584
+Zm9ydW0= 22585
+IE1pdGNo 22586
+QmF5 22587
+X0ZM 22588
+IHNvbGw= 22589
+X2NvbHVtbnM= 22590
+IG1pbm9yaXR5 22591
+YmlyZA== 22592
+IGhhbmRlZA== 22593
+U1NM 22594
+U1RBVA== 22595
+IG5lcnZvdXM= 22596
+g70= 22597
+IGZpbGVQYXRo 22598
+Q1JFQVRF 22599
+QXc= 22600
+IHBlbnM= 22601
+c2VlZA== 22602
+IENvbXB1dGU= 22603
+b2xr 22604
+IEFzc2V0 22605
+cmVhY2g= 22606
+JyksDQo= 22607
+bmF2aWdhdGlvbg== 22608
+TEY= 22609
+L3V0aWw= 22610
+IFB1Yg== 22611
+IOKU 22612
+Y2lvbg== 22613
+IyMK 22614
+SUlJ 22615
+VGFnTmFtZQ== 22616
+IGFtaWQ= 22617
+cGVybWlzc2lvbg== 22618
+aWZpYWJsZQ== 22619
+eEZGRkZGRkZG 22620
+0L3QuA== 22621
+LkJ1ZmZlcg== 22622
+X2lycQ== 22623
+ZGFyaw== 22624
+IHJldHZhbA== 22625
+LmZpcmU= 22626
+cHJvZHVjdGlvbg== 22627
+Lmxpc3Rlbg== 22628
+IFdlYXRoZXI= 22629
+IGJ1eWVycw== 22630
+Lm5l 22631
+ZXJw 22632
+IFBlbnQ= 22633
+IHdlbGZhcmU= 22634
+IHBhZ2VTaXpl 22635
+IFN0YWRpdW0= 22636
+ZXJ0YQ== 22637
+IGxldg== 22638
+YW1wYQ== 22639
+UGFnZXI= 22640
+IGNoYXJnaW5n 22641
+IE5ldGZsaXg= 22642
+fG51bGw= 22643
+X3JhbmRvbQ== 22644
+LnhwYXRo 22645
+IHN0ZXJl 22646
+IElTSVM= 22647
+cG9uc2Vz 22648
+KGxvYw== 22649
+ZXlvbmQ= 22650
+IE9mZmljaWFs 22651
+IE1hcnlsYW5k 22652
+RGF0YVR5cGU= 22653
+X3Bhcg== 22654
+e30s 22655
+IEVuam95 22656
+X1NISUZU 22657
+IEF3YXJkcw== 22658
+X0VOVFJZ 22659
+IHNlZW1pbmdseQ== 22660
+ZW50aWNhdGU= 22661
+IGhlYXJ0cw== 22662
+XzsKCg== 22663
+IEhJVg== 22664
+IGluZGl2aWQ= 22665
+IEZsYWc= 22666
+X2N0cmw= 22667
+IENhbGxiYWNr 22668
+LHo= 22669
+IEdQVQ== 22670
+CW9iag== 22671
+IFBob2VuaXg= 22672
+IEJVUw== 22673
+IHJ1YmJlcg== 22674
+X0FVVEg= 22675
+IFNvbHV0aW9ucw== 22676
+KGxvY2F0aW9u 22677
+VmFyaWFibGVz 22678
+LnNldEVuYWJsZWQ= 22679
+X2hpZ2g= 22680
+V08= 22681
+R2VzdHVyZQ== 22682
+IHJldHJ5 22683
+IG9iamVjdEZvcktleQ== 22684
+YWxsb3dlZW4= 22685
+IG1vcw== 22686
+IENlbGU= 22687
+IGlra2U= 22688
+KGNlbGw= 22689
+IE1PREU= 22690
+cmVuYQ== 22691
+IGRlc2NyaWJpbmc= 22692
+IHBoaQ== 22693
+IHJk 22694
+IGRlc2VydmU= 22695
+IHdoZWVscw== 22696
+5biC 22697
+IGNyaXRpY3M= 22698
+TmFtZXNwYWNl 22699
+IEZyYQ== 22700
+IAoKCgo= 22701
+IGFsbGE= 22702
+IHJlcXVpcmluZw== 22703
+5pyf 22704
+dXRhdGlvbg== 22705
+IGRlbGF5ZWQ= 22706
+IGFkbWluaXN0cmF0aXZl 22707
+IGJheQ== 22708
+LmhpZGRlbg== 22709
+VGV4 22710
+IGJvdW5kYXJpZXM= 22711
+IF0pOwoK 22712
+IEZvbGxvd2luZw== 22713
+fi8= 22714
+Rmk= 22715
+X2NvbnY= 22716
+X1RJVExF 22717
+IGRlc2Rl 22718
+SUNvbGxlY3Rpb25WaWV3 22719
+QWxpYXM= 22720
+IGJpdGU= 22721
+cGF0aWVudA== 22722
+X0NPTU1BTkQ= 22723
+Q29tcGxldGVk 22724
+CWVsaWY= 22725
+KDw= 22726
+QnVzaW5lc3M= 22727
+IFBvb2w= 22728
+IHB1cnN1ZQ== 22729
+IEJhbg== 22730
+X3N0ZXBz 22731
+X0RFQ0w= 22732
+dW1ibGU= 22733
+IGNvbWJv 22734
+IExheWVy 22735
+Lnhy 22736
+IGR1cA== 22737
+LS0tLS0tLS0t 22738
+IG1vZGlmaWVy 22739
+cm9i 22740
+cmV6 22741
+IGF0aGxldGVz 22742
+VXNlZA== 22743
+d2Vhcg== 22744
+IGxlZ2l0aW1hdGU= 22745
+ICIKCg== 22746
+IGh2 22747
+U3Rk 22748
+IEhvbGQ= 22749
+IHN1cnZpdg== 22750
+IEFsbGlhbmNl 22751
+IEVhcmx5 22752
+QmVoYXZpb3I= 22753
+KGZvbnQ= 22754
+L2xpYnM= 22755
+IHJlY3RhbmdsZQ== 22756
+IHNpbmdlcg== 22757
+IGFtcA== 22758
+RXF1YWxUbw== 22759
+ICIuIg== 22760
+IGdpcmxmcmllbmQ= 22761
+5bE= 22762
+bGluZWFy 22763
+b2JzZXJ2 22764
+IHBpw7k= 22765
+IGNvbXBsZW1lbnQ= 22766
+V2l0aFZhbHVl 22767
+KHBhc3N3b3Jk 22768
+dGFrZQ== 22769
+Qmxhbms= 22770
+IENvbXBhcg== 22771
+JyIs 22772
+X3BvbGljeQ== 22773
+bW9uZ29vc2U= 22774
+X0ZBSUxFRA== 22775
+LnJlcG9ydA== 22776
+UmF0aW8= 22777
+LlBlcmZvcm1MYXlvdXQ= 22778
+dXNhYmxl 22779
+bWVycw== 22780
+X3JlbmRlcg== 22781
+UEVFRA== 22782
+IGxlc2I= 22783
+CUU= 22784
+X3Rvb2w= 22785
+IGxhZGllcw== 22786
+0L7RgQ== 22787
+KSkpKQo= 22788
+Ozs7Ow== 22789
+LmRvdA== 22790
+IG5lc3Q= 22791
+cGVhaw== 22792
+dWtraXQ= 22793
+ZWNh 22794
+X1NX 22795
+ICYo 22796
+IE9rbGFob21h 22797
+IGJhbmtpbmc= 22798
+IE5pbnRlbmRv 22799
+IHJlcHJvZHVjZQ== 22800
+X2VsZW1lbnRz 22801
+X21hYw== 22802
+cHJveHk= 22803
+IHJlbWFya2FibGU= 22804
+fS8kew== 22805
+IG91dHM= 22806
+Lmhhc05leHQ= 22807
+TU9ERQ== 22808
+IGFuaW1l 22809
+LmNvbm4= 22810
+VW5pcXVl 22811
+RG9t 22812
+IGltcG9ydGFudGx5 22813
+aXR0eQ== 22814
+IGp1aWNl 22815
+VHc= 22816
+IFBhcnRuZXJz 22817
+IGF0dGFja2luZw== 22818
+IHBvcnRhYmxl 22819
+YW1pZW50bw== 22820
+LlBpY3R1cmVCb3g= 22821
+Lmdlbg== 22822
+IG9wdGltYWw= 22823
+IHJlY3Jl 22824
+IGpvdXJuYWxpc3Q= 22825
+IEV4dHJhY3Q= 22826
+IE1vcmVvdmVy 22827
+IG1hcmdpblRvcA== 22828
+LkFw 22829
+IGZpcmluZw== 22830
+TmFO 22831
+CXRlbXBsYXRl 22832
+0LDQtA== 22833
+LkVu 22834
+IGRlZmVuY2U= 22835
+IFRlbA== 22836
+aWxlbg== 22837
+amFu 22838
+PWRhdGE= 22839
+IFVybA== 22840
+IFJldXRlcnM= 22841
+KHRvdGFs 22842
+IEZpZnRo 22843
+IGVzc2F5cw== 22844
+IGludGVycHJldGF0aW9u 22845
+IGNoYXJpdHk= 22846
+IFJ1bGVz 22847
+IHN1YnNlY3Rpb24= 22848
+c3R5bGVk 22849
+YXplcg== 22850
+bGFncw== 22851
+TElTVA== 22852
+IHVwbG9hZGVk 22853
+IHRyYXNo 22854
+IHJlZ2lzdHI= 22855
+IHNlbGxlcg== 22856
+Pic7DQo= 22857
+IHN0YXJ0VGltZQ== 22858
+55k= 22859
+c3k= 22860
+KEh0dHBTZXJ2bGV0UmVxdWVzdA== 22861
+IHRyYXA= 22862
+R0M= 22863
+IGVtYmVkZGVk 22864
+IHN1cnJvdW5kZWQ= 22865
+aW1pdHM= 22866
+VFg= 22867
+eWxpbmRlcg== 22868
+IEZhbA== 22869
+IHNlbnRlbmNlcw== 22870
+IEph 22871
+SUZJQ0FUSU9O 22872
+d2VhcG9u 22873
+b3ZhdGlvbg== 22874
+IGNvYXQ= 22875
+IGludGVycG9s 22876
+IGxpcHM= 22877
+IEt5 22878
+IHZlY3RvcnM= 22879
+X2Ft 22880
+IGludGFrZQ== 22881
+Lndvcmxk 22882
+IGluYm94 22883
+IE1BQw== 22884
+X2Fi 22885
+KG5hbWVvZg== 22886
+IGVudGVydA== 22887
+IGdhdGhlcmluZw== 22888
+IFNJTQ== 22889
+Kysu 22890
+bnlh 22891
+J319 22892
+IFVQREFURQ== 22893
+IHBhYw== 22894
+KGh0bWw= 22895
+IFNhbnQ= 22896
+aWF0aW5n 22897
+IElkZWFz 22898
+IHNwcmF5 22899
+IEhhcnQ= 22900
+IHZlcmlmaWNhdGlvbg== 22901
+YWRlc2g= 22902
+L21vZHVsZXM= 22903
+IE1pbmQ= 22904
+IFNpemVkQm94 22905
+IHNoZWx0ZXI= 22906
+IGhlcm9lcw== 22907
+YXR0eQ== 22908
+IGNlcnRpZmllZA== 22909
+c2o= 22910
+IMOqdHJl 22911
+xYJv 22912
+IHB1Ymxpc2hpbmc= 22913
+IE1hbGF5cw== 22914
+LmdldFVzZXI= 22915
+IFByb3ZpZGVy 22916
+IExpbmtlZExpc3Q= 22917
+IEJvcg== 22918
+Uk9VTkQ= 22919
+ZGlk 22920
+dGFpbg== 22921
+cGlyZQ== 22922
+IEplbm4= 22923
+dGVs 22924
+YW5kZQ== 22925
+X2Zyb250 22926
+IE1jRw== 22927
+VGVzdE1ldGhvZA== 22928
+4Lit 22929
+IG9jY2FzaW9uYWxseQ== 22930
+IFdhbGVz 22931
+IGV4ZXJjaXNlcw== 22932
+INCS 22933
+LXBsdXM= 22934
+IHZhbGlkYXRvcg== 22935
+IHByYXllcg== 22936
+TEFURUQ= 22937
+X2F1dGhvcg== 22938
+IGxhYm91cg== 22939
+KysK 22940
+LWVxdWl2 22941
+IEdQTA== 22942
+IGZhY2Vib29r 22943
+c2ltcGxl 22944
+Z2x5 22945
+UHJvY2Vzc29y 22946
+aXB5 22947
+ICo+ 22948
+IGNsZWFyZWQ= 22949
+IFB1c2g= 22950
+IHBlbmlz 22951
+U3RydWN0dXJl 22952
+bGlq 22953
+IE1vcmdhbg== 22954
+IGhhbmRmdWw= 22955
+Ii4K 22956
+fFw= 22957
+ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq 22958
+IEFxdQ== 22959
+X0lD 22960
+LmxvYWRz 22961
+IG1ldGVy 22962
+IE1hcmluZQ== 22963
+Ojp7 22964
+IFRT 22965
+IEFycmF5cw== 22966
+LlRpdGxl 22967
+R1JBTQ== 22968
+dGVybWlu 22969
+IGNvaW5j 22970
+RWxzZQ== 22971
+X3N0YXRlcw== 22972
+LXJ1bg== 22973
+bWVtYmVycw== 22974
+YXN0cm8= 22975
+IG9uUHJlc3M= 22976
+IGJlaW5ncw== 22977
+IGFiYW5kb25lZA== 22978
+IHRheHA= 22979
+b3duZXJz 22980
+Lm1vZGU= 22981
+IGRpYWdub3Npcw== 22982
+IF8K 22983
+IEtuaWdodA== 22984
+CUE= 22985
+IG9ic2VydmU= 22986
+KSwn 22987
+ISIpCg== 22988
+IFBhcmE= 22989
+IHZhcmlhdGlvbg== 22990
+KEZhbHNl 22991
+IEFudGk= 22992
+IGdyaQ== 22993
+IGhvbWVsZXNz 22994
+P3Y= 22995
+IGJleg== 22996
+LlNlcnZlcg== 22997
+cmVsZWFzZQ== 22998
+IFBhdHJp 22999
+IGNoYXJz 23000
+IHJhbmtpbmc= 23001
+YWN0aXZhdGlvbg== 23002
+IHdpZGVz 23003
+cXI= 23004
+LlNxbA== 23005
+YWN1bGFy 23006
+IEJvdA== 23007
+X3N5bmM= 23008
+IGhhcHBpbmVzcw== 23009
+IHZvbHVudGVlcnM= 23010
+IHNpdHM= 23011
+Lzw= 23012
+W2U= 23013
+KGZpbGVOYW1l 23014
+IGNhcGFj 23015
+IE1hcmlh 23016
+ZmF0aGVy 23017
+IGdyYW0= 23018
+Kmk= 23019
+IGNhc28= 23020
+X2RyYXc= 23021
+IFJhdw== 23022
+IEl0ZXJhdG9y 23023
+IFBhZGRpbmc= 23024
+UEQ= 23025
+Qk9Y 23026
+IFNQRUNJQUw= 23027
+IGZlY2hh 23028
+IHZpZGU= 23029
+IExlYWRlcg== 23030
+5Lul 23031
+JCgiLg== 23032
+IGRpYW1ldGVy 23033
+IG1pbGQ= 23034
+IHJvY2tz 23035
+YXBwaW5ncw== 23036
+ZGlyZWN0b3J5 23037
+LmZsdXNo 23038
+IEplc3M= 23039
+VU5JVA== 23040
+IFBlYXI= 23041
+IG1hbmRhdG9yeQ== 23042
+U3Vy 23043
+cXQ= 23044
+IHN0cmVhbXM= 23045
+IGNvb3BlcmF0aW9u 23046
+IFNhYw== 23047
+IGNoZWFwZXI= 23048
+CWNo 23049
+YW5pbWF0aW9u 23050
+ZmFyZQ== 23051
+KGhlaWdodA== 23052
+KFRydWU= 23053
+Tlk= 23054
+IHdyZXN0 23055
+IHBvbGxz 23056
+IGVuY291bnRlcmVk 23057
+IE1hcmtldGFibGU= 23058
+X1BBU1NXT1JE 23059
+X1NFTEVDVA== 23060
+IEFyYWJpYQ== 23061
+X2Nsb2Nr 23062
+IHZveQ== 23063
+INC40Lc= 23064
+IHN0aXI= 23065
+aXNpYmxl 23066
+LWVmZmVjdA== 23067
+LmNyZWF0ZWQ= 23068
+IHRveXM= 23069
+IFRyYWRhYmxl 23070
+IHJ1c3Q= 23071
+IHN0cmNweQ== 23072
+X3RpbWVzdGFtcA== 23073
+IHRhbGVudGVk 23074
+LG51bGw= 23075
+IEpvYnM= 23076
+IFBvcnRsYW5k 23077
+IHdlYWtuZXNz 23078
+VGhyb3c= 23079
+IEFuZ2Vs 23080
+5L+u 23081
+IHVuY2VydA== 23082
+77yJCg== 23083
+IOydtA== 23084
+V2hpY2g= 23085
+IFstXTo= 23086
+U29tZXRoaW5n 23087
+IGNvbnZpY3RlZA== 23088
+a2xl 23089
+ZWRpdW0= 23090
+IGJyYW5jaGVz 23091
+IGJhc2Vz 23092
+564= 23093
+IGNvbXBsZXhpdHk= 23094
+IEZpZw== 23095
+LnJlc2hhcGU= 23096
+JGRi 23097
+X0NPTlNU 23098
+IFRlcw== 23099
+LnJ1bnRpbWU= 23100
+IGRlbnk= 23101
+IEJTRA== 23102
+IGty 23103
+aGF0dA== 23104
+IFN0YXRpYw== 23105
+IHVuaXZlcnNpdGllcw== 23106
+UmVwbGFjZQ== 23107
+IGRyb3Zl 23108
+IGFkb2xlcw== 23109
+X3BsdWdpbg== 23110
+IExHQlQ= 23111
+IHRleA== 23112
+ZHVjdGlvbg== 23113
+RURJ 23114
+IFRlZA== 23115
+X1VSSQ== 23116
+IHJlY2VwdGlvbg== 23117
+YXJ0ZW4= 23118
+LlNpbmdsZQ== 23119
+cmljZQ== 23120
+c2Npb3Vz 23121
+X2Jn 23122
+IHdhZ2Vz 23123
+IFNlcnZsZXQ= 23124
+VUlMYXlvdXQ= 23125
+IGZvcm1hdHRlZA== 23126
+Lk1vZA== 23127
+PGNsYXNz 23128
+aXNlbg== 23129
+IHJlcHJlc2VudGF0aXZlcw== 23130
+Il09 23131
+IHBvcnRhbA== 23132
+IEh1bnRlcg== 23133
+IGhpcmluZw== 23134
+X18pCg== 23135
+cmljdWx1bQ== 23136
+dW8= 23137
+bGllc3Q= 23138
+IHRlYXJz 23139
+TGF0 23140
+IGxpdGVyYWw= 23141
+Lkluc2VydA== 23142
+IGN1cnM= 23143
+IENvbXB1dA== 23144
+IHRlcnJvcmlzbQ== 23145
+IHN3ZWVw 23146
+IFtdDQo= 23147
+IHBhc3Nlbmdlcg== 23148
+IGVhc3Rlcm4= 23149
+IHR3ZWV0cw== 23150
+IG9wZXJhdGVk 23151
+d25k 23152
+IFN5bg== 23153
+LnRvb2xz 23154
+IFdN 23155
+dWxhdGVz 23156
+IGJhY3Rlcmlh 23157
+KGJ5dGVz 23158
+LnNldERhdGE= 23159
+IHZpc2liaWxpdHk= 23160
+Ly89PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09 23161
+ZWxt 23162
+IGdlbmVyYXRpbmc= 23163
+IG12 23164
+IGto 23165
+amVu 23166
+L3NlYXJjaA== 23167
+IGFjY291bnRpbmc= 23168
+c2VnbWVudA== 23169
+YWN0aWM= 23170
+Lmlw 23171
+IGRlcGxveW1lbnQ= 23172
+IGZvb3Rlcg== 23173
+PicsCg== 23174
+IGV4cGFuZGluZw== 23175
+IEhhbWlsdG9u 23176
+IENvbnRyaWI= 23177
+LlRhYmxlcw== 23178
+QWN0aXY= 23179
+SEg= 23180
+b2NvbW1lcmNl 23181
+Xzs= 23182
+IGFtb25nc3Q= 23183
+b3dpbmc= 23184
+IENvbGQ= 23185
+QVBI 23186
+IHBzeWNob2xvZ2ljYWw= 23187
+X3RlbnNvcg== 23188
+IHBhY2thZ2luZw== 23189
+IFN3ZWRlbg== 23190
+IHBhcmU= 23191
+IGFnZ3JlZ2F0ZQ== 23192
+IG1vZGVyYXRl 23193
+X2hhbmQ= 23194
+IGRlc2lnbmF0ZWQ= 23195
+IGRydW0= 23196
+IGdldFVzZXI= 23197
+IENyZWVr 23198
+X3Njb3Bl 23199
+IFRyYW5zZmVy 23200
+IE1hcmc= 23201
+IGZpZ2h0ZXJz 23202
+V25k 23203
+IFNlbA== 23204
+IExhdW5jaA== 23205
+IGVtZXJnaW5n 23206
+aWZyYW1l 23207
+IEFkZGl0aW9uYWw= 23208
+IGZlYXJz 23209
+IHNhdGVsbGl0ZQ== 23210
+Xzo= 23211
+IGRpc3Bvc2luZw== 23212
+R2V0VmFsdWU= 23213
+SHR0cFBvc3Q= 23214
+QVRJVkU= 23215
+dWxhcnk= 23216
+Vmlld3M= 23217
+IGF0dGVuZGluZw== 23218
+IFRlbm5lc3NlZQ== 23219
+IE1pc3Npb24= 23220
+IG1lZGljYXRpb24= 23221
+IFd5 23222
+IEFubmE= 23223
+2Lk= 23224
+IFZlcnRleA== 23225
+LnR5cGVz 23226
+T3JnYW4= 23227
+LkRhdGFHcmlkVmlld1RleHRCb3hDb2x1bW4= 23228
+IFJT 23229
+IHRlbXBv 23230
+KEFwcA== 23231
+VmVyc2lvblVJRA== 23232
+LnBvaW50 23233
+IER1dGNo 23234
+SG91cnM= 23235
+TFU= 23236
+IHF1b3RlZA== 23237
+LmJ1aWxkZXI= 23238
+IFBlcmZlY3Q= 23239
+IEFsd2F5cw== 23240
+X3R3bw== 23241
+IGV4Y2x1c2l2ZWx5 23242
+IENyYQ== 23243
+aWZpY2Fy 23244
+IEFXUw== 23245
+aW5naGFt 23246
+Y29tcGxleA== 23247
+a2VybmVs 23248
+IGdyYXZpdHk= 23249
+IHdp 23250
+IG92ZXJ2aWV3 23251
+IFdhbnQ= 23252
+IFdQ 23253
+KHNo 23254
+LnJvdGF0aW9u 23255
+U3RhdGVz 23256
+IFRlZW4= 23257
+X2NvbXBvbmVudHM= 23258
+7IiY 23259
+UmVjZWl2ZWQ= 23260
+IGx5cmljcw== 23261
+cml0ZXM= 23262
+CQkJCQkg 23263
+LUFtZXJpY2Fu 23264
+W251bQ== 23265
+L3B5dGhvbg== 23266
+IFVBUlQ= 23267
+IGFwcGxl 23268
+IEpvbmF0aGFu 23269
+IG1vbWVudHVt 23270
+4Lix 23271
+grk= 23272
+IG1pY2g= 23273
+YW5kcmE= 23274
+IGJpb2xvZ2ljYWw= 23275
+IE1lbnM= 23276
+ICUl 23277
+ZWxzZWE= 23278
+IE1leGljYW4= 23279
+LnJhbmRpbnQ= 23280
+IHRhbGU= 23281
+IFZhbGlkYXRl 23282
+IGRlZmVhdGVk 23283
+Lmh0bQ== 23284
+IGNvcHBlcg== 23285
+PS8= 23286
+Y29zeXN0ZW0= 23287
+IHJpcA== 23288
+ZGVjaW1hbA== 23289
+LlZJU0lCTEU= 23290
+IFRh 23291
+CQkJCQkJCQkJCQkJCQk= 23292
+IGRvd25sb2FkZWQ= 23293
+ZW52aXJvbm1lbnQ= 23294
+IG5vbWluZQ== 23295
+YnVpbGRpbmc= 23296
+IFNwb3Q= 23297
+aXBoZXJhbA== 23298
+IGFsdG8= 23299
+cXVldA== 23300
+IEZU 23301
+L2dldA== 23302
+L21hc3Rlcg== 23303
+V0lO 23304
+5YWD 23305
+V2VzdA== 23306
+YXJnYw== 23307
+IHByb2R1Y2Vycw== 23308
+IE11Y2g= 23309
+X3N0b3JhZ2U= 23310
+Y3JlZGl0 23311
+Q09OVA== 23312
+IHZldA== 23313
+IHZvaWNlcw== 23314
+KCcnLA== 23315
+IGluc3RydW1lbnRz 23316
+IE1TRw== 23317
+ZXNzZQ== 23318
+cmVwb3NpdG9yeQ== 23319
+b21pY3M= 23320
+IGRlYWxlcg== 23321
+U3RpbGw= 23322
+IGJhbm5lcg== 23323
+YXNjaWk= 23324
+IHJlbWFya3M= 23325
+W2pz 23326
+IHNob3J0ZXI= 23327
+Z3VscA== 23328
+IG15c3Rlcg== 23329
+IGt1bg== 23330
+IEJpcmQ= 23331
+IHRpZW5l 23332
+bnV0 23333
+IFVt 23334
+IHdpc2U= 23335
+WWVhaA== 23336
+SU5FU1M= 23337
+X2JlZ2lu 23338
+LWhlYWRpbmc= 23339
+Q291cnNl 23340
+IA0KDQo= 23341
+b21iaWU= 23342
+Z3JhZGVk 23343
+IEdQUw== 23344
+IMW8ZQ== 23345
+Rml0 23346
+Y2FwdGlvbg== 23347
+w7Zu 23348
+L2ltYWdl 23349
+bGlh 23350
+KG1vZA== 23351
+IGxlYWs= 23352
+ZW56YQ== 23353
+L0g= 23354
+IEhhcHB5 23355
+RGlzdA== 23356
+bng= 23357
+IEdvdmVybm9y 23358
+KGxhc3Q= 23359
+dGVhY2hlcg== 23360
+IFNlbnQ= 23361
+c3VwcG9ydA== 23362
+amVjdG9yeQ== 23363
+INmF 23364
+UmVnaXN0cmF0aW9u 23365
+IEdyYXk= 23366
+LGZhbHNl 23367
+IGFkanVzdGVk 23368
+KHNldHRpbmdz 23369
+PFI= 23370
+IE1hZ2U= 23371
+IHBsYWludA== 23372
+XykK 23373
+CWl0 23374
+b21ldHJpYw== 23375
+LmJvb3RzdHJhcA== 23376
+IGNhcnJpZXM= 23377
+SXA= 23378
+ICEk 23379
+IHN3aW1taW5n 23380
+IE1hcmlv 23381
+IFF1ZXN0aW9ucw== 23382
+UEFDRQ== 23383
+5pa5 23384
+ZW9y 23385
+fX0i 23386
+IG92ZW4= 23387
+IEtvbg== 23388
+IHdpc2RvbQ== 23389
+IGFjcXVpc2l0aW9u 23390
+ZXNzbWVudA== 23391
+YWdpbmU= 23392
+IGV4cHJlc3Npb25z 23393
+U2VxdWVudGlhbEdyb3Vw 23394
+RnJvbnQ= 23395
+dWxwdA== 23396
+YXdr 23397
+J10pCgo= 23398
+X0FS 23399
+IGFuYWxvZw== 23400
+dWxpbg== 23401
+X1BSSU5U 23402
+IExH 23403
+IGJsb2I= 23404
+IEZ1cnRoZXJtb3Jl 23405
+X2NvbXBvbmVudA== 23406
+IENvbGU= 23407
+TEFO 23408
+U0NSSVBUSU9O 23409
+IGxhcA== 23410
+aWNlbnNpbmc= 23411
+X1RJTUVPVVQ= 23412
+IEZybw== 23413
+IGxpYWJpbGl0eQ== 23414
+IGNvbXBvc2Vk 23415
+LmNyZWF0ZVNlcXVlbnRpYWxHcm91cA== 23416
+X3BlcnNvbg== 23417
+IGJlYW0= 23418
+CSAgICAgICAg 23419
+IE5vdEZvdW5k 23420
+LicK 23421
+w61z 23422
+LlRleHRWaWV3 23423
+UERG 23424
+IGthcg== 23425
+X18oJw== 23426
+ICI6Ig== 23427
+X21lc3NhZ2Vz 23428
+IGhhcnZlc3Q= 23429
+Lmhpc3Rvcnk= 23430
+PicK 23431
+LWZvbGQ= 23432
+5oo= 23433
+IEJldHRlcg== 23434
+ICJcPA== 23435
+c3BhY2luZw== 23436
+IGZ1cm5pc2hlZA== 23437
+b3Nlcg== 23438
+XX0K 23439
+ICQi 23440
+cHVsbA== 23441
+LlBvc3Q= 23442
+KGlw 23443
+l48= 23444
+LmZyb250 23445
+bnRl 23446
+IEZN 23447
+Z3VpZA== 23448
+IG5lZ290aWF0aW9ucw== 23449
+YWdvbmFs 23450
+IHRyZW1lbmQ= 23451
+dW5nZW9u 23452
+QWR2 23453
+Y2Fyb3VzZWw= 23454
+w59l 23455
+X0RFU0M= 23456
+IGhhbW1lcg== 23457
+4bqt 23458
+ICAgICAgICAKCg== 23459
+LWNvcmU= 23460
+LXNlcnZpY2U= 23461
+IGNvcm5lcnM= 23462
+IFNG 23463
+cHJlZA== 23464
+PkE= 23465
+IEpMYWJlbA== 23466
+IHJvbWFudGlj 23467
+IHRlc3RpbW9ueQ== 23468
+b3Nj 23469
+IEdlbmVyYXRpb24= 23470
+YXN1cmVz 23471
+X2ludGVybmFs 23472
+IHByaW50cw== 23473
+IF0pCg== 23474
+IENsZXZlbGFuZA== 23475
+cmVwbw== 23476
+RGlzYw== 23477
+ICI+Cg== 23478
+77+977+977+977+9 23479
+IG5lYXJlc3Q= 23480
+X3Ri 23481
+KHJlcXVpcmU= 23482
+RU9G 23483
+LWNoaWxk 23484
+IGJ1ZGQ= 23485
+Llh0cmFFZGl0b3Jz 23486
+YWx0aWVz 23487
+XCI6XCI= 23488
+V29yZHM= 23489
+IGxvY2FsbHk= 23490
+IHB1cmNoYXNlcw== 23491
+RHJhd2Vy 23492
+ZXh0cmFjdA== 23493
+IGV4ZWN1dA== 23494
+fScu 23495
+dXNlcmRhdGE= 23496
+IGZvY3VzZXM= 23497
+LW1pbnV0ZQ== 23498
+IFB1Ymxpc2g= 23499
+b2dv 23500
+IG1vdW50YWlucw== 23501
+Qm90 23502
+fT57 23503
+IHRlbnNpb24= 23504
+cm9k 23505
+bWVzaA== 23506
+IHRyYW5zZm9ybWVk 23507
+LFI= 23508
+KCl9Cg== 23509
+Lmxvbmc= 23510
+IGdvcmdlb3Vz 23511
+IFNjaGVkdWxl 23512
+IG9sZGVzdA== 23513
+IHN1YnByb2Nlc3M= 23514
+KElO 23515
+eWVjdA== 23516
+IENvb3Blcg== 23517
+YXJuZXNz 23518
+IE1vbml0b3I= 23519
+LnBhcnQ= 23520
+IE5CQw== 23521
+IGNvdHRvbg== 23522
+IGhvbA== 23523
+IHJnYmE= 23524
+IEJpbw== 23525
+Q29udGludWU= 23526
+UG9k 23527
+IHBhcnRpY2lwYXRpbmc= 23528
+Y2x1c2lvbnM= 23529
+KEJ5VmFs 23530
+w6w= 23531
+IEhPVw== 23532
+X3NldG9wdA== 23533
+IGFjY29tcGFueWluZw== 23534
+YXRvbg== 23535
+IC9c 23536
+IEF1dGhlbnRpY2F0aW9u 23537
+acOpbg== 23538
+IEJhcmFjaw== 23539
+Lyou 23540
+IGVhZ2Vy 23541
+IENhbmNlbA== 23542
+PGxlbW1h 23543
+ZXBo 23544
+CXdpbmRvdw== 23545
+IGluY2lkZW50cw== 23546
+KSwo 23547
+LkRlcw== 23548
+aWJl 23549
+IEZ1bmN0aW9ucw== 23550
+IGhvc3BpdGFscw== 23551
+IG94eWdlbg== 23552
+cm9vdFNjb3Bl 23553
+IGRyZXc= 23554
+CXJlcXVlc3Q= 23555
+bm90aWNl 23556
+YWt1 23557
+YW1lbnRz 23558
+ZmFy 23559
+IHByZWNpc2U= 23560
+X3dyYXBwZXI= 23561
+IGxpc3RlbmVycw== 23562
+QVo= 23563
+LmJvdW5kcw== 23564
+IEF2ZXJhZ2U= 23565
+ZmllbGRzZXQ= 23566
+X2F4aXM= 23567
+IGV4YW1pbmF0aW9u 23568
+Jy4K 23569
+bW9ucw== 23570
+Kyspew0K 23571
+IEZvcm1z 23572
+7ZWc 23573
+Q3BwTWV0aG9k 23574
+X3RyYWNl 23575
+IGVuZ2luZWVy 23576
+IEZsYXQ= 23577
+IHJldmlzaW9u 23578
+IGhlYXRpbmc= 23579
+L3Byb2ZpbGU= 23580
+LnJ1 23581
+cHJpb3JpdHk= 23582
+IGluZmVy 23583
+X1NUUkVBTQ== 23584
+ICopKA== 23585
+PiQ= 23586
+T0xFQU4= 23587
+T0tJRQ== 23588
+SUJJTElUWQ== 23589
+VUFHRQ== 23590
+IFN1cnZleQ== 23591
+IHJlc2lnbg== 23592
+d2luZw== 23593
+IHNlY3JldHM= 23594
+IGNoaXBz 23595
+SlNPTk9iamVjdA== 23596
+RGVza3RvcA== 23597
+X1NZTUJPTA== 23598
+KHJlc291cmNl 23599
+IDwvPgo= 23600
+IG5ld2VzdA== 23601
+dWxp 23602
+IGRlc2VydA== 23603
+IGRpcA== 23604
+IFBvdw== 23605
+IGVxdWF0aW9u 23606
+IHBvc3NpYmlsaXRpZXM= 23607
+IEZlZA== 23608
+b3NwaA== 23609
+IFsl 23610
+IGJ1YmJsZQ== 23611
+ZXRoZXJsYW5kcw== 23612
+IGNlbWVudA== 23613
+LmF1dG8= 23614
+X0FO 23615
+4oCZLg== 23616
+c2VsZWN0aW9u 23617
+IEJvbmQ= 23618
+RGVu 23619
+LU8= 23620
+LmdldFR5cGU= 23621
+LldpbmRvdw== 23622
+cHJlcw== 23623
+IHN3aW5nZXI= 23624
+In0pCg== 23625
+IHBpcA== 23626
+IG1pY2U= 23627
+IGNvbXBvdW5k 23628
+LXBsdWdpbg== 23629
+aWtv 23630
+IGNlbnR1cmllcw== 23631
+aWN1bGFy 23632
+LWlubGluZQ== 23633
+CWtleQ== 23634
+Plw8 23635
+RU5TSU9O 23636
+IFsNCg== 23637
+IHByZWNpc2VseQ== 23638
+IMOpdMOp 23639
+IFBhc3Q= 23640
+IENhbWJyaWRnZQ== 23641
+LWZ1bGw= 23642
+IGFuYWx5emU= 23643
+IFN0ZXZlbg== 23644
+IG5lbQ== 23645
+ZHVl 23646
+b3Jlbg== 23647
+IG11c2NsZXM= 23648
+aWppbmc= 23649
+Ly0= 23650
+IEtlbm5lZHk= 23651
+Uk0= 23652
+b3NzaWJsZQ== 23653
+IGFjdHJlc3M= 23654
+IGRvbG9y 23655
+5b2V 23656
+TmVlZA== 23657
+LnRvZ2dsZQ== 23658
+IFJhY2U= 23659
+d2Vycw== 23660
+Lm1hdGVyaWFs 23661
+IER1ZQ== 23662
+IFBlbA== 23663
+I3ByaW50 23664
+IGluZGVwZW5kZW5jZQ== 23665
+ZXh1cw== 23666
+U2hhZG93 23667
+IGVuY29kZXI= 23668
+KGxldmVs 23669
+IFN3aWZ0 23670
+LmRvYw== 23671
+X3NlbGVjdGlvbg== 23672
+IHNlcmlhbFZlcnNpb25VSUQ= 23673
+TGFiZWxz 23674
+IHBlcmZvcm1hbmNlcw== 23675
+LlRhZw== 23676
+IE5ITA== 23677
+aXplbg== 23678
+L1VJS2l0 23679
+X0NPTlRST0w= 23680
+IGVhcm5pbmdz 23681
+IEFsdA== 23682
+X0hBTkRMRQ== 23683
+Q3R4 23684
+IHBlcnN1 23685
+IHRyYW4= 23686
+56g= 23687
+X0NIQU5ORUw= 23688
+IHNhdGlzZmFjdGlvbg== 23689
+IEdQ 23690
+aW94 23691
+bWl0dA== 23692
+bGFuZG8= 23693
+IHBpZw== 23694
+aW5hbHM= 23695
+w6puY2lh 23696
+U3VyZmFjZQ== 23697
+IFVVSUQ= 23698
+IGJlbmVmaWNpYWw= 23699
+IHNlcXVlbmNlcw== 23700
+CW1lbXNldA== 23701
+IG1hZ2ljYWw= 23702
+wqs= 23703
+IHdvcm4= 23704
+QVND 23705
+cG9wdXA= 23706
+Q09NUA== 23707
+X2JlZm9yZQ== 23708
+ZW5lc3M= 23709
+VWk= 23710
+TGVz 23711
+LnJlcXVpcmU= 23712
+LlNlcmlhbGl6YWJsZQ== 23713
+YWRkR2Fw 23714
+IGF1dGhvcml6YXRpb24= 23715
+LnB5cGxvdA== 23716
+dXJyYXk= 23717
+bGF0aXR1ZGU= 23718
+ZnJhbWVz 23719
+YWpz 23720
+IGNvbXBhc3M= 23721
+IG9ic2VydmF0aW9ucw== 23722
+X3N1cA== 23723
+LmVudmlyb24= 23724
+IHRyaXBsZQ== 23725
+IFJ1Ynk= 23726
+IGRyYWlu 23727
+X0ZJTFRFUg== 23728
+U2Fu 23729
+VU1Q 23730
+TnVsbEV4Y2VwdGlvbg== 23731
+IEdhYg== 23732
+b3dl 23733
+IFR1cmtpc2g= 23734
+X3NlcXVlbmNl 23735
+IEdyYW50 23736
+dWVsYQ== 23737
+IHdv 23738
+IGN1YmU= 23739
+aXE= 23740
+IGRpc29yZGVycw== 23741
+IGV4dHJhb3JkaW5hcnk= 23742
+IGN0cmw= 23743
+IFNlcQ== 23744
+ZW50cg== 23745
+IHNhbmN0aW9ucw== 23746
+dXRzY2g= 23747
+UmVwb3J0cw== 23748
+IGluaGVyaXQ= 23749
+UGVyaW9k 23750
+IHBob3RvZ3JhcGh5 23751
+IEZyYW1ld29yaw== 23752
+IHNwZWNpYWxpc3Q= 23753
+ID8KCg== 23754
+X3NlbGVjdGVk 23755
+LlBsYXllcg== 23756
+IGFsbG9jYXRpb24= 23757
+KGFjY291bnQ= 23758
+IHN0cnVjdHVyYWw= 23759
+dmFibGU= 23760
+LW9mZnNldA== 23761
+LkFwcENvbXBhdEFjdGl2aXR5 23762
+0LDQvA== 23763
+LkFkZFdpdGhWYWx1ZQ== 23764
+IGljb25z 23765
+IHNodXRkb3du 23766
+X2xvdw== 23767
+IENvbXBhcmU= 23768
+IENl 23769
+PWhlYWQ= 23770
+bGFt 23771
+LnByZWRpY3Q= 23772
+X0RFQw== 23773
+IFNsZWVw 23774
+IEdyYXRpcw== 23775
+IHN1Z2dlc3Rpb24= 23776
+IERFTA== 23777
+Y2FmZg== 23778
+YXZpcnVz 23779
+Tm90aGluZw== 23780
+nos= 23781
+IHdpZGVzcHJlYWQ= 23782
+IG1lY2hhbmlzbXM= 23783
+IHRleHRBbGlnbg== 23784
+b2NjdXA= 23785
+IFJhaWw= 23786
+Ok5T 23787
+IGZpYmVy 23788
+IG1r 23789
+IHZpbnRhZ2U= 23790
+LWxvbmc= 23791
+LnJlZHVjZQ== 23792
+LkVudGl0aWVz 23793
+KHJlY29yZA== 23794
+IHBsZWFzYW50 23795
+RlJJTkc= 23796
+LkNlbGxz 23797
+T1RU 23798
+CWVsc2VpZg== 23799
+X2NvbmZpcm0= 23800
+IFZpZXdHcm91cA== 23801
+c3lt 23802
+IHByYXk= 23803
+IHN1c3BlY3RlZA== 23804
+Q29udGFpbnM= 23805
+IGJvcmRlcnM= 23806
+IGNvbXBvbmVudERpZA== 23807
+QVNTRVJU 23808
+IGluZmluaXRl 23809
+LW9yZGVy 23810
+IGhlbGxv 23811
+IEdyYWRl 23812
+LmN1cnJlbnRUaW1lTWlsbGlz 23813
+YXBvbGlz 23814
+emg= 23815
+CU9iamVjdA== 23816
+Olxc 23817
+SE8= 23818
+dmFsdWF0aW9u 23819
+IHZvY2Fi 23820
+IGNvdXBvbg== 23821
+YXRhYmFzZXM= 23822
+LkdldFR5cGU= 23823
+TGVhcm4= 23824
+XT0i 23825
+IEdhcnk= 23826
+b3RpdmU= 23827
+IGFzaA== 23828
+IGJpYg== 23829
+WFhYWA== 23830
+IGJhbGFuY2Vk 23831
+VkFMVUU= 23832
+IE5hdA== 23833
+X0Fk 23834
+PEU= 23835
+5Yy6 23836
+IE1ldGhvZEluZm8= 23837
+TElC 23838
+IGNvbnNpZGVyYWJsZQ== 23839
+IEluZHVzdHJ5 23840
+dGVzdHM= 23841
+LnNldFRpdGxl 23842
+IEJsdWV0b290aA== 23843
+IG1hcHBlZA== 23844
+IEJydWNl 23845
+IE1haW5XaW5kb3c= 23846
+CXN0YXR1cw== 23847
+IHJheg== 23848
+IE1hbmQ= 23849
+IGNsYXNzaWZpY2F0aW9u 23850
+UGVybWlzc2lvbnM= 23851
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0= 23852
+IGNvbnRhaW5lcnM= 23853
+OnNldA== 23854
+X3htbA== 23855
+IHdoaWxzdA== 23856
+VGhyb3VnaA== 23857
+IHZhbGlnbg== 23858
+IHdvcmxkcw== 23859
+Q09SRA== 23860
+RURJQQ== 23861
+0YDQvtCy 23862
+IHNwYXJl 23863
+IEhhZA== 23864
+IERFRg== 23865
+KHB0cg== 23866
+IHdhcm1pbmc= 23867
+4KS+ 23868
+IGNvbnNlbnN1cw== 23869
+YWduZQ== 23870
+Q1RM 23871
+IOyV 23872
+Lk1haW4= 23873
+d2ViRWxlbWVudA== 23874
+IHBpc3Q= 23875
+Rmxhc2g= 23876
+QXBwZW5k 23877
+LnR3aW1n 23878
+VGFw 23879
+IHZlZ2V0YWJsZXM= 23880
+YWxn 23881
+LnNhbXBsZQ== 23882
+IGNvYWNoaW5n 23883
+KGluZA== 23884
+Q2VsbFZhbHVl 23885
+Q2hlY2tCb3g= 23886
+IEhlbGw= 23887
+Uk9PVA== 23888
+IHN0YWRpdW0= 23889
+IGludmVzdGlnYXRpbmc= 23890
+KSU= 23891
+c3RlZA== 23892
+IFdyaXRpbmc= 23893
+IOqy 23894
+IHVubw== 23895
+IHt7LS0= 23896
+IGNvb3Jkcw== 23897
+IHVuc2Vy 23898
+b3JnYW5pemF0aW9u 23899
+IENyaW1l 23900
+IERlbW9jcmF0 23901
+IHZpbg== 23902
+L2ZpbGU= 23903
+LWFwaQ== 23904
+IEF5 23905
+IGZ1bmRlZA== 23906
+IEJyZXhpdA== 23907
+IEdo 23908
+ZW50aW5h 23909
+Y2FzZXM= 23910
+IGRhc2g= 23911
+ICEhfQo= 23912
+SEk= 23913
+T2ZmaWNl 23914
+IGNhcHRhaW4= 23915
+IHdvcnNoaXA= 23916
+XEM= 23917
+IGdsb2Jl 23918
+X2JvYXJk 23919
+IGJhYmllcw== 23920
+IGNvbnNlY3V0aXZl 23921
+IGVuaGFuY2Vk 23922
+ZXJldW0= 23923
+IEFkdmlz 23924
+IGdyYWlu 23925
+IGNyYXc= 23926
+YW5jZWxsYXRpb25Ub2tlbg== 23927
+LmFscGhh 23928
+X1dJVEg= 23929
+IE90dA== 23930
+IENvb2w= 23931
+LmJhdGNo 23932
+IHZlcmlmaWVk 23933
+KGNhbGxiYWNr 23934
+IHJlZ2FyZHM= 23935
+IEludFB0cg== 23936
+b3VjaGVy 23937
+IGtpbg== 23938
+IHRvdWNoZWQ= 23939
+aXTDoA== 23940
+YXRob24= 23941
+IGFkamFjZW50 23942
+IGFjY29tcGFuaWVk 23943
+TEVBUg== 23944
+IGltcGxpZXM= 23945
+IGhpbGw= 23946
+IEJhbHRpbW9yZQ== 23947
+PSIt 23948
+RmluYWxseQ== 23949
+U2Ft 23950
+aWNvcHQ= 23951
+IHNvZA== 23952
+IG1hag== 23953
+IFNoaXBwaW5n 23954
+IGdldEFsbA== 23955
+IGNvYWNoZXM= 23956
+IGRvbmF0aW9ucw== 23957
+aWxvdA== 23958
+IFRhcg== 23959
+Y2Vycg== 23960
+IGJhZGdl 23961
+IG1hcmtlcnM= 23962
+IFJhbmQ= 23963
+YWlzZWQ= 23964
+aXNzYW5jZQ== 23965
+IGV4cGxvcmluZw== 23966
+dWNlZA== 23967
+IEluZG9uZXNpYQ== 23968
+IGJlbmVhdGg= 23969
+IG1hZ25ldGlj 23970
+IG11c2V1bQ== 23971
+bWF0Y2hDb25kaXRpb24= 23972
+IGRpc3J1cHQ= 23973
+IHJlbWluZA== 23974
+IFRN 23975
+IC8+PA== 23976
+IGZvb2w= 23977
+IGVzaw== 23978
+Lk51bGw= 23979
+IERpZXM= 23980
+X09VVFBVVA== 23981
+X1RZUEVE 23982
+IHBhaW50ZWQ= 23983
+IHNvcGhpc3RpYw== 23984
+IEJlYXI= 23985
+Km4= 23986
+X1BBQ0s= 23987
+IGRlbGl2ZXJpbmc= 23988
+IENPVU5U 23989
+5Y2V 23990
+IGplZw== 23991
+LWNhcg== 23992
+Zm5hbWU= 23993
+IHJhbmdpbmc= 23994
+IE5lZw== 23995
+LyoqKioqKi8= 23996
+IENIQVI= 23997
+IHVsdHJh 23998
+R3JhZA== 23999
+PXQ= 24000
+IGp1ZGdlcw== 24001
+IERpc2U= 24002
+YW5uZXJz 24003
+IHNjYWw= 24004
+X2NhbA== 24005
+IENPTk5FQ1RJT04= 24006
+X2VtYmVk 24007
+KGZu 24008
+IENyYWZ0 24009
+IFBhcw== 24010
+IiktPg== 24011
+LmNvbnZlcnQ= 24012
+LnJlc291cmNl 24013
+IFNUQVRVUw== 24014
+w7RuZw== 24015
+IFRpdA== 24016
+IGNsYXNzcm9vbQ== 24017
+IEFyY2hpdGVjdA== 24018
+IEtpbmdz 24019
+IHN0ZWFkeQ== 24020
+LyohCg== 24021
+IEdlbmU= 24022
+KSI7Cg== 24023
+aWNpYQ== 24024
+c3Rhbg== 24025
+IENvbnN0cnVjdGlvbg== 24026
+dW1wZXI= 24027
+d2M= 24028
+IENCUw== 24029
+aW5naW5n 24030
+LXBhcnR5 24031
+KGRyaXZlcg== 24032
+TUFSSw== 24033
+IG5lc3RlZA== 24034
+ZXdhcmQ= 24035
+IGRlcGVuZGVuY3k= 24036
+IG1hbGVz 24037
+IE9ORQ== 24038
+IFByb2R1Y3Rpb24= 24039
+XVsk 24040
+44O844M= 24041
+X0xPQUQ= 24042
+IEJvbA== 24043
+ZWxyeQ== 24044
+oOmZpA== 24045
+IFJlcXVpcmU= 24046
+IHBsYWNpbmc= 24047
+eHh4 24048
+Q0FMRQ== 24049
+IHRodW1i 24050
+Q2hvb3Nl 24051
+IHByb3RvdHlwZQ== 24052
+Vk9JRA== 24053
+IGxlc2JpYW4= 24054
+IHRyYWl0cw== 24055
+U2hhcnA= 24056
+IGNvbnN1bWU= 24057
+VHJ1dGg= 24058
+IGFjdGlvblBlcmZvcm1lZA== 24059
+IEVudmlyb25tZW50YWw= 24060
+IERlYW4= 24061
+IGVzdGFkbw== 24062
+c2FtZQ== 24063
+IG51bWVyaWM= 24064
+IHRyYW5zaXQ= 24065
+LkVtYWls 24066
+LXNpZGU= 24067
+X1JVTg== 24068
+IFZpbGxhZ2U= 24069
+X09QRU4= 24070
+6KY= 24071
+LnJlbQ== 24072
+LXdhcm5pbmc= 24073
+YW55YQ== 24074
+UHJvcGVydHlDaGFuZ2Vk 24075
+ICghXw== 24076
+KGNoZWNr 24077
+aWxpYQ== 24078
+IFNvZnQ= 24079
+c3RlcHM= 24080
+IE1hZHJpZA== 24081
+TWVtb3J5V2FybmluZw== 24082
+IGhhbmRsZXJz 24083
+IGV4cGVyaWVuY2luZw== 24084
+IGluc3BlY3Q= 24085
+YnV0dG9ucw== 24086
+UmVjZWl2ZU1lbW9yeVdhcm5pbmc= 24087
+Y2hlbXk= 24088
+TGlua3M= 24089
+IHVybGxpYg== 24090
+LlN5c3RlbUNvbG9ycw== 24091
+IEVpZ2Vu 24092
+IHB1bmlzaG1lbnQ= 24093
+OlVJQ29udHJvbA== 24094
+YmFyYQ== 24095
+LXNldA== 24096
+IH0NCg0KDQo= 24097
+IHRvbGVyYW5jZQ== 24098
+IGludGVyZmFjZXM= 24099
+LnJlZGlyZWN0 24100
+aWdoYm9ycw== 24101
+Y3NyZg== 24102
+X2JhY2tncm91bmQ= 24103
+LlV0aWxz 24104
+X0hU 24105
+IEludGVyZXN0 24106
+aW1vcw== 24107
+IGdyYW50cw== 24108
+IGV4YW1pbmVk 24109
+0JQ= 24110
+IGNm 24111
+Zm9yZ2U= 24112
+YmFja3M= 24113
+IE9iamVjdHM= 24114
+X3NlbnQ= 24115
+LmVudHJ5 24116
+IFRIRU4= 24117
+ZWxsaWRv 24118
+Y2lh 24119
+LHJlcw== 24120
+L3N0ZGM= 24121
+Lm5k 24122
+KEludA== 24123
+IEF1dGhvcnM= 24124
+IEFwcENvbXBhdEFjdGl2aXR5 24125
+J3s= 24126
+IG1lZGk= 24127
+TXVzaWM= 24128
+aWdt 24129
+Y2VpcHQ= 24130
+IGF1c3M= 24131
+IHRhcmdldGluZw== 24132
+IEtleXM= 24133
+aG4= 24134
+Ol0K 24135
+IG1pbmVyYWw= 24136
+w64= 24137
+LmNh 24138
+b21lZA== 24139
+IHNoZWV0cw== 24140
+IGNhbWI= 24141
+IGRlYWRseQ== 24142
+LmluamVjdA== 24143
+KHVuaXQ= 24144
+IFNlbGVjdGlvbg== 24145
+Lmdtcw== 24146
+KGNvbm5lY3Rpb24= 24147
+ICQoIg== 24148
+w6ltb24= 24149
+IEN1cnJlbnRseQ== 24150
+cHRl 24151
+X3BhdGhz 24152
+bGVhZg== 24153
+IGltcGxpY2F0aW9ucw== 24154
+cG9zYWw= 24155
+5L2N 24156
+Wy8= 24157
+YW5jaWE= 24158
+6Zs= 24159
+bXVs 24160
+Y2ll 24161
+IGdlaWxl 24162
+aW1hbHM= 24163
+VUlWaWV3 24164
+IHN1cnJl 24165
+c2VyaWFsaXpl 24166
+SVNP 24167
+IGFyYml0cmFyeQ== 24168
+IHNvY2thZGRy 24169
+LmZu 24170
+IE1lcmM= 24171
+IGNhc3Rpbmc= 24172
+S2V5RG93bg== 24173
+IG5ld1ZhbHVl 24174
+b3BlbnM= 24175
+VG9kbw== 24176
+IGZsZXhpYmlsaXR5 24177
+CQkJCSAg 24178
+VmVsb2NpdHk= 24179
+w7pu 24180
+cm93aW5n 24181
+IGNvbXB1dGVk 24182
+YCkK 24183
+c3RhdGVtZW50 24184
+IHJp 24185
+X2NhcnQ= 24186
+TG93 24187
+dHJhbnNmZXI= 24188
+Lm5hdg== 24189
+IGdyYXZl 24190
+IERvb3I= 24191
+CWFsZXJ0 24192
+LnN1YnNjcmliZQ== 24193
+LXByb2ZpbGU= 24194
+CWJhc2U= 24195
+IOKIkg== 24196
+X18KCg== 24197
+IGVuZ2luZWVycw== 24198
+IGV4cGxvc2lvbg== 24199
+IGRhcmk= 24200
+CUxvZw== 24201
+b25hbA== 24202
+IGlzb2xhdGVk 24203
+e2k= 24204
+IE1zZw== 24205
+RnV0dXJl 24206
+IHJhY2lzdA== 24207
+LXdyYXA= 24208
+IFZlcnM= 24209
+Ym9yZw== 24210
+SVNJT04= 24211
+INGA0LDQ 24212
+IFlhbg== 24213
+aW5pdFdpdGg= 24214
+IG5vbWlu 24215
+KGVtcHR5 24216
+w61u 24217
+44Kk 24218
+CXdpZHRo 24219
+IGNoYW1iZXI= 24220
+L2FqYXg= 24221
+RU1Q 24222
+IG5lY2Vz 24223
+aXZvcw== 24224
+bG9naWM= 24225
+Kikm 24226
+Y3JpcHRz 24227
+Um93QXQ= 24228
+aWJsaW5ncw== 24229
+IGVhcnM= 24230
+IGNvbXB1dGluZw== 24231
+IG1ha2Vy 24232
+IE5laXRoZXI= 24233
+YnJlYWRjcnVtYg== 24234
+IHNlcmlhbGl6ZQ== 24235
+IFdpdGhpbg== 24236
+IGRlbGw= 24237
+X1RSQUNF 24238
+PWE= 24239
+IHdpc2hlcw== 24240
+LWluY2g= 24241
+IERvcg== 24242
+IGlubm9jZW50 24243
+IERvbA== 24244
+IGludGVucw== 24245
+Zm9yY2Vk 24246
+IEJJVA== 24247
+IHBob3RvZ3JhcGhz 24248
+IGNhc2E= 24249
+IExlbg== 24250
+XEZyYW1ld29yaw== 24251
+LlNpbXBsZQ== 24252
+IGRlYXI= 24253
+KS8o 24254
+aXBwaQ== 24255
+IG93bnM= 24256
+UGxheWVycw== 24257
+IHByb3Bvc2Fscw== 24258
+LnBp 24259
+dXNhbGVt 24260
+RGFtYWdl 24261
+IGNhbG9yaWVz 24262
+IENyZWF0aXZl 24263
+IFsk 24264
+IC8vDQo= 24265
+QW5kVmlldw== 24266
+w6htZQ== 24267
+LmN1c3RvbQ== 24268
+X2ZhY3Rvcnk= 24269
+Y29tbWFuZHM= 24270
+X2xvb2s= 24271
+IHN0cmNtcA== 24272
+WU4= 24273
+YWlyZWQ= 24274
+IGF1ZGl0 24275
+0L7RgdGC 24276
+IFJldmVyc2U= 24277
+cm9wcmlhdGU= 24278
+ZXRpY3M= 24279
+PHZlY3Rvcg== 24280
+LnNlbGVuaXVt 24281
+Lm9y 24282
+IHByZWRpY2F0ZQ== 24283
+IGZpbmlzaGluZw== 24284
+IGtsZQ== 24285
+IFJlcG9z 24286
+IEtoYW4= 24287
+IE1ha2luZw== 24288
+IEZT 24289
+IHB1dGU= 24290
+CXN0YXRl 24291
+X1NVUFBPUlQ= 24292
+Jy0= 24293
+b3JpZW50YXRpb24= 24294
+IGV4aXN0ZWQ= 24295
+YXR1cmE= 24296
+IGV4cGVjdHM= 24297
+IFNoYWRvdw== 24298
+IG9yZ2FuaXo= 24299
+5Z6L 24300
+IHN1c3BlbnNpb24= 24301
+IHVpdA== 24302
+IHNpbXVsdGFuZW91c2x5 24303
+IEFmZmVybw== 24304
+OiIpOwo= 24305
+IHJvY2tldA== 24306
+Y2Fz 24307
+ZXRlcm1pbmU= 24308
+YWNldXQ= 24309
+eGw= 24310
+IEFNRA== 24311
+KGdyYXBo 24312
+YXNzb2Np 24313
+X0NS 24314
+LmFyYW5nZQ== 24315
+KGpMYWJlbA== 24316
+IGJlZWY= 24317
+UXVpY2s= 24318
+LmNhcmQ= 24319
+XSk6 24320
+LWdy 24321
+LkdPTkU= 24322
+X0NMT1NF 24323
+IE5ldg== 24324
+w61hcw== 24325
+IHN0ZXBwZWQ= 24326
+IEZyZWVkb20= 24327
+IFdS 24328
+TlNBcnJheQ== 24329
+X3J4 24330
+X2RpYWxvZw== 24331
+IGhvdGVscw== 24332
+IChcPA== 24333
+IERpYW1vbmQ= 24334
+IGFzc3VtcHRpb24= 24335
+dW1p 24336
+KGl0ZW1z 24337
+DQ0NCg== 24338
+5rOV 24339
+IG5lbA== 24340
+Qm9va3M= 24341
+5Y6/ 24342
+dXNi 24343
+IEZJTg== 24344
+5qw= 24345
+IGNvcnBvcmF0aW9ucw== 24346
+VVNB 24347
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 24348
+LnByb3BlcnR5 24349
+ZXdpc2U= 24350
+X3Bsb3Q= 24351
+Ij4nOwo= 24352
+IHBlcHBlcg== 24353
+IHNoZWQ= 24354
+IE1lZGl1bQ== 24355
+IENvb2tpZQ== 24356
+IG92ZXJzZWFz 24357
+ZWRvcg== 24358
+YXN1cmVtZW50 24359
+5a2Y 24360
+ICcuJw== 24361
+IHBocA== 24362
+IFBST0M= 24363
+IGV4Y2VwdGlvbmFs 24364
+KHRo 24365
+IEpldA== 24366
+IG9jY3VwaWVk 24367
+LnNldEltYWdl 24368
+IFJlbGF0ZWQ= 24369
+dWNrZXI= 24370
+TWVtYmVycw== 24371
+UFJJTlQ= 24372
+IEdsbw== 24373
+X1ZJRVc= 24374
+fSIsCg== 24375
+IGFkb3B0aW9u 24376
+W10pCg== 24377
+IE1pc3NvdXJp 24378
+IExpbmNvbG4= 24379
+ZXJhbGQ= 24380
+UG9wdXA= 24381
+IGZhdGU= 24382
+LWJvb3RzdHJhcA== 24383
+ZmVjdGlvbnM= 24384
+IFBvbGw= 24385
+X0FSR1M= 24386
+aW5hbmNl 24387
+LWhvbWU= 24388
+Liks 24389
+X2RvbmU= 24390
+OgoKCg== 24391
+IGRpc2N1c3Npbmc= 24392
+IFNRTEV4Y2VwdGlvbg== 24393
+IGVsZWN0cm8= 24394
+CXJlcQ== 24395
+IHp3 24396
+IGx1aQ== 24397
+IG92ZXJuaWdodA== 24398
+JHVzZXI= 24399
+IFdBWQ== 24400
+IGFsbGVyZw== 24401
+IGRpc2FwcG9pbnRlZA== 24402
+IHJhZGlhdGlvbg== 24403
+IGltcHJlc3NlZA== 24404
+aWZpY2F0ZXM= 24405
+IHRvYg== 24406
+Q0xBU1M= 24407
+IGN1ZGE= 24408
+X2RldA== 24409
+LXBvc3Q= 24410
+dWx1 24411
+VHJhbnNsYXRpb24= 24412
+LWhhbmQ= 24413
+LnllYXI= 24414
+IE1vbmdv 24415
+IHVuY2xlYXI= 24416
+LmVuZ2luZQ== 24417
+V0VCUEFDSw== 24418
+cmljZXM= 24419
+X0FDQ0VTUw== 24420
+IGhvbGlkYXlz 24421
+cGVyY2VudA== 24422
+LklkZW50aXR5 24423
+IEdvdg== 24424
+IHBhc3Npb25hdGU= 24425
+ISEu 24426
+IEdyZWVjZQ== 24427
+cGx1c3BsdXM= 24428
+JykpOw== 24429
+R1A= 24430
+IGV4Y2l0 24431
+LnRhYlBhZ2U= 24432
+X2NvbmQ= 24433
+IHNwb25zb3I= 24434
+TU9EVUxF 24435
+X3Byb2M= 24436
+ICQK 24437
+IHJhdGlvbmFs 24438
+LlRvb2w= 24439
+IGlocg== 24440
+Y2Nh 24441
+5ZOB 24442
+IEVzdGF0ZQ== 24443
+SUJVVEU= 24444
+QWN0aW9uUGVyZm9ybWVk 24445
+IFNvbGFy 24446
+poI= 24447
+IGVxdWl0eQ== 24448
+dGlk 24449
+IHJlY2lw 24450
+LnNpbXBsZQ== 24451
+bWs= 24452
+IEx1a2U= 24453
+IEd1YXJkaWFu 24454
+IGVuY3J5cHRlZA== 24455
+IGRvbWluYW50 24456
+LnBsYWNl 24457
+IE5W 24458
+IHRvbmd1ZQ== 24459
+KEdldA== 24460
+IHN0YWlubGVzcw== 24461
+LlBsYXk= 24462
+IGVi 24463
+YWNp 24464
+LmJ1ZmZlcg== 24465
+cmVhZGNydW1icw== 24466
+IHZhY2NpbmU= 24467
+cHJvbQ== 24468
+IHVzZXJJbmZv 24469
+IHNsdWc= 24470
+U2VyaWFsaXplZE5hbWU= 24471
+LXdpZGU= 24472
+IHJlYWN0aW9ucw== 24473
+IFlhbmc= 24474
+IEFkZHM= 24475
+KHVzZXJJZA== 24476
+IHBsYXRlcw== 24477
+IE1FTQ== 24478
+IGJhaWw= 24479
+SW5zaWRl 24480
+ZXRlZA== 24481
+IGVsc2lm 24482
+IHNha2U= 24483
+IGN5Y2xlcw== 24484
+IOyX 24485
+CUk= 24486
+LWNvbGxhcHNl 24487
+IEdNVA== 24488
+RGVjbGFyYXRpb24= 24489
+IGdyb3M= 24490
+IHJlYWNoZXM= 24491
+IGN1c3RvZHk= 24492
+VW50aWw= 24493
+dHU= 24494
+IENoZW4= 24495
+IG54 24496
+KGFkZHI= 24497
+IE9mZmVy 24498
+IGNvbGxlZw== 24499
+YXNzYWRvcg== 24500
+IG1hcHBlcg== 24501
+IFNJR05BTA== 24502
+IEJsb29t 24503
+IEhvbGw= 24504
+IEltcGVy 24505
+LWRlcw== 24506
+X3NpdGU= 24507
+UHJvYw== 24508
+RXF1 24509
+IGF0b21pYw== 24510
+IFdvbWFu 24511
+c2VudA== 24512
+c2Nhcg== 24513
+IGludGVsbGlnZW50 24514
+IEdldHRpbmc= 24515
+IFJlZ2lzdHJhdGlvbg== 24516
+IFBoaWxs 24517
+IGtpbGxlcg== 24518
+dW5pY29kZQ== 24519
+CgkJCg== 24520
+IEphY29i 24521
+IENvbnN0 24522
+IGxvY2F0ZQ== 24523
+IGNhdXM= 24524
+IFNjaG9sYXI= 24525
+IGNvbnN0aXR1dGlvbmFs 24526
+IGluZmxhdGlvbg== 24527
+IEdvdA== 24528
+PWFycmF5 24529
+ZW5kdW0= 24530
+IHRyYW5zbGF0ZWQ= 24531
+IGRpdm9yY2U= 24532
+RW50cmllcw== 24533
+IHNvcg== 24534
+IFF1b3Rl 24535
+aXJsaW5lcw== 24536
+VUs= 24537
+IGV4Y2Vs 24538
+KG9wdA== 24539
+IEFEVg== 24540
+LDos 24541
+IGNvbnRhY3RlZA== 24542
+IERB 24543
+IHJpbmdz 24544
+IEluZHVzdHJpYWw= 24545
+LmdldENvbnRleHQ= 24546
+IGZvcmdvdHRlbg== 24547
+IFRhbg== 24548
+IHBhbnRz 24549
+IG92 24550
+IGRlY29kZXI= 24551
+IFBhcnRpYWw= 24552
+IHZj 24553
+IGJhdHRsZXM= 24554
+QXJpYWw= 24555
+RlJJTkdFTUVOVA== 24556
+aXJhdGVz 24557
+LHc= 24558
+YWludGVuYW5jZQ== 24559
+IE9k 24560
+IFRlY2hub2xvZ2llcw== 24561
+5YmN 24562
+IENhcnRlcg== 24563
+LmZpbmRBbGw= 24564
+Tm9tZQ== 24565
+QmVu 24566
+IFVzYWdl 24567
+IFBpY3R1cmU= 24568
+IGJhZGx5 24569
+X3BhbmVs 24570
+IHBhdGVudA== 24571
+IFByb3RvY29s 24572
+bG90dGU= 24573
+CXBsYXllcg== 24574
+amVjdGlvbnM= 24575
+IGRvdQ== 24576
+X3JlbGVhc2U= 24577
+dXJuaXR1cmU= 24578
+X3RheA== 24579
+IEZpZWxkcw== 24580
+LmRhdGFzZXQ= 24581
+X21hc3Rlcg== 24582
+Q0xVREU= 24583
+IFBoYXJt 24584
+YnN0 24585
+IG9wZXJhdGlvbmFs 24586
+LmNlbGw= 24587
+IGlkZW50aWZ5aW5n 24588
+IGp3dA== 24589
+dHVwbGU= 24590
+IFRD 24591
+IENybw== 24592
+aXhtYXA= 24593
+LWNvbXBvbmVudHM= 24594
+Z2VuZXJhbA== 24595
+IG96 24596
+X0Rl 24597
+X2RvdWJsZQ== 24598
+IFRvbw== 24599
+LlZpZXdHcm91cA== 24600
+Z2F0ZQ== 24601
+ZGluZ3M= 24602
+cGhvdG9z 24603
+IGdyYW5kZQ== 24604
+b2xsZWN0 24605
+X2xpbg== 24606
+IGF3ZnVs 24607
+ZmlsdGVycw== 24608
+IGFsdGVybmF0ZQ== 24609
+ZXNw 24610
+IGNvbXByZXNz 24611
+ZW8= 24612
+IFNjYWxl 24613
+IGluZGlyZWN0 24614
+IGludm9pY2U= 24615
+CgoKCgoKCgoKCgoKCgoKCg== 24616
+U3RhcnRpbmc= 24617
+IFBsYXllcnM= 24618
+aWVsZQ== 24619
+LnRoZW4= 24620
+T3Jk 24621
+IFR1cGxl 24622
+IGJvdXQ= 24623
+IFN0YXRpc3RpY3M= 24624
+UHJldmlldw== 24625
+IHB1enpsZQ== 24626
+IFdpZHRo 24627
+U1RBVEU= 24628
+IG92ZXJsYXk= 24629
+CW9u 24630
+IGluZnI= 24631
+IHNtYWxsZXN0 24632
+bG9ja2Vk 24633
+0YLQvg== 24634
+c3Ns 24635
+IGRlZW1lZA== 24636
+IHNjbw== 24637
+cmVjaw== 24638
+IGpCdXR0b24= 24639
+IG1pc3Npb25z 24640
+56ew 24641
+LlNlbGVjdGVkSW5kZXg= 24642
+VEFCTEU= 24643
+U2VwdA== 24644
+IGFja25vd2xlZGdl 24645
+IHN0cnRvdGltZQ== 24646
+IFRlbGw= 24647
+IERhaw== 24648
+IGFsdW1pbnVt 24649
+IGZlbmNl 24650
+IFN0YXJz 24651
+Q09ORklH 24652
+IHJldHJvZml0 24653
+IGVtcGhhc2lz 24654
+L2hlYWRlcg== 24655
+IFNvbWV0aGluZw== 24656
+aW5pc2hlZA== 24657
+PSciLiQ= 24658
+IFZhbGlkYXRvcnM= 24659
+IHBvbGFy 24660
+c2VjdGlvbnM= 24661
+LmFzcHg= 24662
+IGFzcGly 24663
+Lk1vY2s= 24664
+Q29kZUdlbg== 24665
+IHBldXQ= 24666
+IGFjY2VwdGluZw== 24667
+IGJhY2tpbmc= 24668
+UGljdHVyZQ== 24669
+L2Fw 24670
+0LXQsw== 24671
+X1NFQw== 24672
+LXVzZQ== 24673
+YW5ub3RhdGlvbg== 24674
+IGNvZ25pdGl2ZQ== 24675
+IGdyaXA= 24676
+aG91cg== 24677
+IExlZ2Fs 24678
+IGVwaWM= 24679
+LnRvb2xTdHJpcA== 24680
+Lm5vdGlmeQ== 24681
+Lkxhc3Q= 24682
+T1JJWg== 24683
+TWlkZGxld2FyZQ== 24684
+Y3JpcHRpb25z 24685
+bGFzaA== 24686
+X0ZPVU5E 24687
+IExpdmVycG9vbA== 24688
+IHt9Iiw= 24689
+SW5zdGFsbA== 24690
+IG5pdA== 24691
+IGZpZ3VyZWQ= 24692
+W2xlbg== 24693
+Lldpbg== 24694
+LnBsYXRmb3Jt 24695
+IGdhbWJsaW5n 24696
+KGR0 24697
+YXZlcnk= 24698
+CWluY2x1ZGU= 24699
+V2hldGhlcg== 24700
+Um91dGluZw== 24701
+IHRoZXJhcA== 24702
+UmVtb3Rl 24703
+IExvc3M= 24704
+eWxs 24705
+IGFwcHJvYWNoZWQ= 24706
+IFZlaGljbGU= 24707
+IEFscGhh 24708
+IHZvY8Oq 24709
+YW5zd2Vycw== 24710
+TlNEaWN0aW9uYXJ5 24711
+Y29uc2lkZXI= 24712
+dW51c2Vk 24713
+IEZhbg== 24714
+b3JhYmxl 24715
+ZnJl 24716
+IERJU0NMQUlN 24717
+IEFjdG9y 24718
+Ll0= 24719
+dG9IYXZl 24720
+LnVzZXJJZA== 24721
+IHNwZWVkcw== 24722
+ZXdheQ== 24723
+IHJlY3Vycw== 24724
+INCz 24725
+X3ByaXY= 24726
+IeKAnQoK 24727
+Q2hvaWNl 24728
+IHNldHRsZQ== 24729
+IHBsYW5lcw== 24730
+J30s 24731
+VG9t 24732
+SVRFUg== 24733
+ISIK 24734
+5bs= 24735
+YWNoZWxvcg== 24736
+IHNlcGFyYXRpb24= 24737
+IGRhbA== 24738
+YWRq 24739
+IHJlZ2lzdGVycw== 24740
+cml6 24741
+IE5vdGljZQ== 24742
+IGx1 24743
+IGNvdXJhZ2U= 24744
+IGF4ZXM= 24745
+Y2VsbGVudA== 24746
+LmFzeW5j 24747
+IGNvbXBhdGliaWxpdHk= 24748
+56s= 24749
+ICEKCg== 24750
+CXRpdGxl 24751
+WUxF 24752
+CW1lc3NhZ2U= 24753
+VVVJRA== 24754
+T0xERVI= 24755
+IEhI 24756
+IFN0eWxlU2hlZXQ= 24757
+IGFjY2Vzc2Vk 24758
+LnZhbGlkYXRpb24= 24759
+dGFza3M= 24760
+IHBvbGx1dGlvbg== 24761
+LmNhbnZhcw== 24762
+IGluZ3JlZGllbnQ= 24763
+IENhYmlu 24764
+QWg= 24765
+b2xkb3du 24766
+IE5PSQ== 24767
+IMOX 24768
+W2Y= 24769
+ZWR1Yw== 24770
+eWFsdHk= 24771
+KG5vdA== 24772
+X1N0YXRl 24773
+YW1lbg== 24774
+IGRhbw== 24775
+dWRhZA== 24776
+ZWxsZXJz 24777
+fSY= 24778
+bGljaXR5 24779
+X1dJTkRPVw== 24780
+IHRhdHRv 24781
+dmFsb3I= 24782
+LlJhbmdl 24783
+IHJlZmVyZW5jZWQ= 24784
+IFJlc2VydmU= 24785
+TW9uZXk= 24786
+U0NSSVBU 24787
+L3Byb2R1Y3Q= 24788
+Y2hvaWNlcw== 24789
+IHRpbg== 24790
+44KT 24791
+IHNlcGFyYXRvcg== 24792
+IHBrZw== 24793
+YW1tZWQ= 24794
+IE1BVA== 24795
+ISEKCg== 24796
+IHJhaWQ= 24797
+IG1vdGl2YXRpb24= 24798
+IFhQ 24799
+IEJhY2tncm91bmQ= 24800
+IFF1YXRlcm5pb24= 24801
+LmRlZmluZVByb3BlcnR5 24802
+aWtlcg== 24803
+CXBhcmVudA== 24804
+IE9yaWdpbmFsbHk= 24805
+YW50YWdl 24806
+IEhhbnM= 24807
+IHRpbWVsaW5l 24808
+LmN1cg== 24809
+b3BpYw== 24810
+IFNlcXU= 24811
+bXVzdA== 24812
+IENvYWw= 24813
+IGZvcm1hdHRlcg== 24814
+X1JHQg== 24815
+IF8oIg== 24816
+J30pLAo= 24817
+ID09PT09PT09PT09PT09PT09 24818
+IEZVTkNUSU9O 24819
+IGxuZw== 24820
+aWNhdGVz 24821
+bGl2ZQ== 24822
+X2VuZ2luZQ== 24823
+IHRvd25z 24824
+JykpCgo= 24825
+IFBL 24826
+KGFwaQ== 24827
+CXNjYW5m 24828
+cGFja2V0 24829
+LnBob25l 24830
+4YA= 24831
+IEFuZHk= 24832
+X05BTUVT 24833
+UExZ 24834
+IG1pbnM= 24835
+aW1p 24836
+IGJyaWNr 24837
+IGJsYWRl 24838
+LnN0ZG91dA== 24839
+fWA7Cg== 24840
+U2hpZnQ= 24841
+CXNi 24842
+IENoZWNrcw== 24843
+IHBoZW5vbWVub24= 24844
+QXZhdGFy 24845
+IG1pbmlzdHJ5 24846
+cm9zZQ== 24847
+CUZpbGU= 24848
+IHRpdGxlZA== 24849
+KExPRw== 24850
+IGdhbg== 24851
+ZGVzaWdu 24852
+KCksDQo= 24853
+IGJvbmVz 24854
+c3Rt 24855
+xZvEhw== 24856
+IElucHV0U3RyZWFt 24857
+IHZvbHVudA== 24858
+IFNlcmlhbGl6YWJsZQ== 24859
+IGZpZ2h0ZXI= 24860
+IERyYWc= 24861
+VHdpdHRlcg== 24862
+IHN1YnNpZA== 24863
+57w= 24864
+IGZvcnVtcw== 24865
+LmxvYWRpbmc= 24866
+bG9nZ2Vk 24867
+X3RoaXM= 24868
+IHRlcnJhaW4= 24869
+IGlycmU= 24870
+IEluZw== 24871
+IENO 24872
+X29iamVjdHM= 24873
+LnVpZA== 24874
+IGNvbnNjaW91c25lc3M= 24875
+VElOR1M= 24876
+IEdhbGw= 24877
+IHBvcnRyYXk= 24878
+IERldmVsb3Blcg== 24879
+IHBhcnRpY2lwYW50 24880
+ICI7DQo= 24881
+L21vZGVs 24882
+IE9wZXJhdGlvbnM= 24883
+Xlw= 24884
+IExhdGVy 24885
+IHJhaXNlcw== 24886
+LW5vbmU= 24887
+Lm1ldGE= 24888
+PScuJA== 24889
+RmluaXNoZWQ= 24890
+IHJlcGxhY2luZw== 24891
+IHNhbXBsaW5n 24892
+IEplbg== 24893
+IlRoZXJl 24894
+UkVBTA== 24895
+QUxF 24896
+7Iqk 24897
+T3JkZXJz 24898
+X3BhcmFtZXRlcg== 24899
+IE9seW1waWM= 24900
+IHRyw6hz 24901
+IGFyZW5h 24902
+aW9s 24903
+Oz8+ 24904
+IGltcGFjdHM= 24905
+IFdT 24906
+OmdldA== 24907
+IGZsaWdodHM= 24908
+IFJ1c3NlbGw= 24909
+Y2FtZXJh 24910
+Rm4= 24911
+c2lnbWE= 24912
+IGZvcmNpbmc= 24913
+IGxvY2Fscw== 24914
+IGRlcGFydHVyZQ== 24915
+IGNlbGVicmF0aW9u 24916
+IFNheQ== 24917
+77yS 24918
+IEhpbGxz 24919
+Lmhhc093blByb3BlcnR5 24920
+IHR5cGluZ3M= 24921
+LkFQSQ== 24922
+IGRvbmF0aW9u 24923
+T3BlcmF0aW9uRXhjZXB0aW9u 24924
+LkFjdGl2aXR5 24925
+Y3BsdXNwbHVz 24926
+IENoYXJsaWU= 24927
+IGltcG9ydGVk 24928
+IGRhbm4= 24929
+IG9jY2FzaW9ucw== 24930
+IGltcGxlbWVudGluZw== 24931
+IHB1cnBsZQ== 24932
+LmRpYWxvZw== 24933
+U1FMRXhjZXB0aW9u 24934
+ZXJubw== 24935
+IHdhcnM= 24936
+IHBhc3Rl 24937
+IGRlY3JlYXNlZA== 24938
+IGhhcnNo 24939
+IGVsYWJvcg== 24940
+aW5wdXRz 24941
+IFZpZXdz 24942
+IGVycm9yTWVzc2FnZQ== 24943
+X211bA== 24944
+CXdyaXRl 24945
+IENvcA== 24946
+IEFubnVhbA== 24947
+KGJ1dHRvbg== 24948
+IHZpZGE= 24949
+YmFycw== 24950
+IEhhcnZhcmQ= 24951
+CWV4cGVjdA== 24952
+IGluZGV4ZXM= 24953
+IGRvY3VtZW50YXJ5 24954
+IGZsZXNo 24955
+T1JMRA== 24956
+IERlbHRh 24957
+TUFORA== 24958
+QnJ1c2g= 24959
+LWNvbHVtbg== 24960
+IGRldmVsb3BtZW50cw== 24961
+bWV0aG9kVmlzaXRvcg== 24962
+c2xpY2U= 24963
+IFBETw== 24964
+IGludmVzdGluZw== 24965
+aXJhYmxl 24966
+IHhtbG5z 24967
+77yb 24968
+YXJ0YQ== 24969
+IHRoZW9yaWVz 24970
+X2NpdHk= 24971
+ICRfXw== 24972
+Q3JlYXRpbmc= 24973
+KHBy 24974
+RHJvcGRvd24= 24975
+aXNtYXRjaA== 24976
+IE5FVA== 24977
+J10pKXsK 24978
+IFZhbHVlcw== 24979
+IFNFTw== 24980
+IFNUQVQ= 24981
+IGVjb3N5c3RlbQ== 24982
+IHRlbXB0 24983
+IFxc 24984
+IC8vewo= 24985
+IENocmlzdG9waGVy 24986
+IEtlbnR1Y2t5 24987
+IEh0dHBTZXJ2bGV0UmVzcG9uc2U= 24988
+IGh5YnJpZA== 24989
+eW9u 24990
+IGZlZWRpbmc= 24991
+IEV4dHJh 24992
+Tm9ybQ== 24993
+SVRDSA== 24994
+IFNlYW4= 24995
+IFVwbG9hZA== 24996
+bXVu 24997
+cHVy 24998
+IHBlcnNpc3RlbnQ= 24999
+IElEQw== 25000
+IFBlcmZvcm0= 25001
+Lm1lcmdl 25002
+X3Jvb20= 25003
+TWVhbndoaWxl 25004
+IT0n 25005
+IFdlbA== 25006
+QXJnc0NvbnN0cnVjdG9y 25007
+LkRhdGFiYXNl 25008
+IGNvdW50aW5n 25009
+KCkq 25010
+lOWbng== 25011
+IFRPUA== 25012
+bWlsbA== 25013
+IERU 25014
+SUdORUQ= 25015
+IEtC 25016
+IGNvbXBseQ== 25017
+U291dGg= 25018
+X2NvbGxlY3Rpb24= 25019
+Q2hhcHRlcg== 25020
+IGV4cGxhaW5pbmc= 25021
+X0FN 25022
+X3Rz 25023
+Y2FyZHM= 25024
+IHF1ZWw= 25025
+IHBvbGU= 25026
+IHRvdWNoZG93bg== 25027
+IE90aGVycw== 25028
+IHBlZXJz 25029
+IFR5cGVFcnJvcg== 25030
+IHNpeHRo 25031
+IGNoZWVy 25032
+IGRpc3B1dGU= 25033
+dXNj 25034
+KV0s 25035
+dGh1bWI= 25036
+IGhpZGluZw== 25037
+IFNJRw== 25038
+bGlrZXM= 25039
+IFBBR0U= 25040
+LlJlZmxlY3Rpb24= 25041
+IGhlYWRxdWFydGVycw== 25042
+VElORw== 25043
+IEdob3N0 25044
+TUxF 25045
+JAo= 25046
+IGNvbnRyYXJ5 25047
+ZXh0ZW5k 25048
+J10pLg== 25049
+RkZFQ1Q= 25050
+IFBpbnRlcmVzdA== 25051
+w7ptZXJv 25052
+cmljYW5l 25053
+CXNlc3Npb24= 25054
+IGNyeXN0YWw= 25055
+LUNvbnRyb2w= 25056
+b3Zlcm5tZW50 25057
+b2dyYWY= 25058
+LWFjdGlvbg== 25059
+dm9sdW1l 25060
+ZnRlbg== 25061
+IHVuY29u 25062
+IGFuaW1hdGU= 25063
+IGxlYXNl 25064
+c2Ny 25065
+IHJlZnVzZQ== 25066
+44CL 25067
+ZnRw 25068
+aW5mb3JtYXRpb24= 25069
+IGV2YWx1YXRlZA== 25070
+IGluamVjdGlvbg== 25071
+IGphY2s= 25072
+IHdvcmtzaG9w 25073
+5rOo 25074
+UFRI 25075
+IFRz 25076
+b2ZmZXI= 25077
+CW9z 25078
+IGtpbmdkb20= 25079
+TWlzc2luZw== 25080
+IGxhd21ha2Vycw== 25081
+ZXh0RmllbGQ= 25082
+IHNpbmdpbmc= 25083
+YWJp 25084
+L2NsaWVudA== 25085
+Lm1lZGlh 25086
+QVRFR09SWQ== 25087
+U2lnbmF0dXJl 25088
+JScsCg== 25089
+IEZ1Y2s= 25090
+XVs6 25091
+IHNlbnNvcnM= 25092
+L2NvbQ== 25093
+IFByaW1hcnk= 25094
+LlNRTA== 25095
+X3Byb2dyYW0= 25096
+IHBpbGxz 25097
+IGludGVncmFs 25098
+IGZsZWV0 25099
+IGRyb3BwaW5n 25100
+LnNs 25101
+QmVlbg== 25102
+IHBldHM= 25103
+IGFkdmlzZWQ= 25104
+IGRyYWdvbg== 25105
+X0VESVQ= 25106
+KGlt 25107
+RkVS 25108
+IERydWc= 25109
+KHJhbmRvbQ== 25110
+IGNvbXByZXNzaW9u 25111
+b3VzdA== 25112
+WyU= 25113
+IGJ1eWVy 25114
+aG9w 25115
+Um9sZXM= 25116
+bWFuYWdl 25117
+IHBhaW5mdWw= 25118
+IEJyYW5jaA== 25119
+LW1vZGFs 25120
+ZW5hbnQ= 25121
+IE1lc2g= 25122
+L2ZvbnQ= 25123
+IEdyYWhhbQ== 25124
+IOKY 25125
+IG5j 25126
+IEZyYW5jaXM= 25127
+IHNwZWNpZmljYXRpb24= 25128
+IGRhbWFnZXM= 25129
+LWNvbmZpZw== 25130
+IHRoZW9yZXQ= 25131
+c2VjdXJl 25132
+X211bHRp 25133
+YWNldXRpY2Fs 25134
+IGRlbWFuZGluZw== 25135
+ZW5uZQ== 25136
+SVNUUw== 25137
+KCkpKTsKCg== 25138
+UmVhc29u 25139
+UmVjZW50 25140
+cGhhc2U= 25141
+IHBzeQ== 25142
+X01BTg== 25143
+IHZvbHVudGVlcg== 25144
+5b8= 25145
+aXN0cmlidXRlZA== 25146
+bGlv 25147
+IHByb2R1Y3Rpdml0eQ== 25148
+X2NvbW0= 25149
+U3ByaW5n 25150
+bmlz 25151
+LndlaWdodA== 25152
+IENhbmNlcg== 25153
+QWxsb2M= 25154
+IFR3ZWV0 25155
+IHNlcGFyYXRlbHk= 25156
+CWNoZWNr 25157
+X3Byb3BlcnRpZXM= 25158
+LlVuaXQ= 25159
+X0NMSw== 25160
+IGd0 25161
+ICgpOwoK 25162
+IGhhbmR5 25163
+IFRob21wc29u 25164
+IHVubmVjZXNzYXJ5 25165
+IFJlYWRlcg== 25166
+R04= 25167
+PXJlcXVlc3Q= 25168
+IFV0aWxpdHk= 25169
+LlJlcG9zaXRvcnk= 25170
+IEF4 25171
+aHlkcg== 25172
+aWV1 25173
+IHRoeQ== 25174
+IGx0 25175
+X21haWw= 25176
+5L+u5pS5 25177
+YWlsYW5k 25178
+IFBoaWxpcA== 25179
+IGJpdHRlcg== 25180
+IGJldHRpbmc= 25181
+IHRpbWVk 25182
+b2Nrcw== 25183
+J2E= 25184
+IGFsZ29yaXRobXM= 25185
+IHJlaW50ZXJwcmV0 25186
+IHRvc3M= 25187
+cm9nZW4= 25188
+IGhvcGVk 25189
+KHNlbGVjdGVk 25190
+IHZlbnR1cmU= 25191
+VEVY 25192
+IExlYXZl 25193
+LlN1YnN0cmluZw== 25194
+IGdyYXRlZnVs 25195
+dWth 25196
+IENvbnN1bWVy 25197
+IGFnZ3JlZw== 25198
+Q2lyY2xl 25199
+4LiB 25200
+X2Jsb2Nrcw== 25201
+IGxlZ2FsbHk= 25202
+ICJ8 25203
+44OD 25204
+LmJvYXJk 25205
+LkFi 25206
+RnVuY3Rpb25z 25207
+cmVjaXBl 25208
+6Ic= 25209
+IE94Zm9yZA== 25210
+IHdob2xlcw== 25211
+LkJ1aWxk 25212
+X2NoYW5nZWQ= 25213
+aGFp 25214
+IGRlcGFydG1lbnRz 25215
+SW1w 25216
+IGNvYWxpdGlvbg== 25217
+SU5GUklOR0VNRU5U 25218
+IGVtcG93ZXI= 25219
+aXRjaGVz 25220
+Tm9ydGg= 25221
+IGluZmxhbW0= 25222
+T05TRQ== 25223
+IG1pc3NpbGU= 25224
+IFJhag== 25225
+IElzc3Vl 25226
+IGF0b2k= 25227
+Y2FsZWQ= 25228
+LkNvbnRyb2xsZXJz 25229
+IFdvbGY= 25230
+IGNydXNoZXJz 25231
+4buH 25232
+LkF1dGg= 25233
+LmFkZEF0dHJpYnV0ZQ== 25234
+aGlz 25235
+IGJvb3Rz 25236
+LmNsZWFu 25237
+Y2FtcA== 25238
+IHRlbmFudA== 25239
+IHR1bmU= 25240
+IHt9Jy4= 25241
+IHdvcmtvdXQ= 25242
+UmVwbw== 25243
+IHBhcnRpYWxseQ== 25244
+TUlTU0lPTg== 25245
+amFtaW4= 25246
+IFNC 25247
+IGRldGVybWluYXRpb24= 25248
+ICcnKTsK 25249
+IEJlbmc= 25250
+IHZvcw== 25251
+IGluaGFi 25252
+L2xhbmc= 25253
+c2J1cmdo 25254
+RXhlY3V0b3I= 25255
+aG9uZQ== 25256
+IENoYWxsZW5nZQ== 25257
+X2xpbmtz 25258
+LkxldmVs 25259
+IHVuZGVyZ3JvdW5k 25260
+LWNvZGU= 25261
+IG9wdGltaXphdGlvbg== 25262
+bG9nZ2luZw== 25263
+X2Rlc3Q= 25264
+IHNuYWtl 25265
+IGNoZW1pY2Fscw== 25266
+X0lNUE9SVEVE 25267
+YWRvb3A= 25268
+IFRIQVQ= 25269
+bWFuYWdlZA== 25270
+IHJlZHVjZXM= 25271
+IFJFQUw= 25272
+IEd1eQ== 25273
+X0dFTkVSSUM= 25274
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq 25275
+LmFtb3VudA== 25276
+IGRlcmU= 25277
+Z2V0VGltZQ== 25278
+IHBhbnQ= 25279
+YW5vbnltb3Vz 25280
+IGhhcm1vbnk= 25281
+IEFsYW4= 25282
+IHNjZW5hcmlvcw== 25283
+IGRpcnQ= 25284
+aHRhZ3M= 25285
+TWM= 25286
+U2hlbGw= 25287
+cmlu 25288
+ew0KDQo= 25289
+LnBvdw== 25290
+CWNsaWVudA== 25291
+IGNvbnNwaXJhY3k= 25292
+IGFkbWlzc2lvbg== 25293
+IFJlZ2lvbmFs 25294
+IFZpZXdDb250cm9sbGVy 25295
+IFBoaWxpcHBpbmVz 25296
+IGRlcG9z 25297
+IHBhcA== 25298
+IFBhZA== 25299
+UGF1bA== 25300
+LkNvbWJvQm94 25301
+IHR1dG9y 25302
+IFJlY2lwZQ== 25303
+d3JpdGluZw== 25304
+IGNvbnRyaWJ1dG9y 25305
+T1RI 25306
+U21hbGw= 25307
+Vkk= 25308
+IGhhY2Vy 25309
+ZXF1 25310
+IEV4YW1wbGVz 25311
+aHVtYW4= 25312
+Lm1lc3NhZ2Vz 25313
+CXR5cA== 25314
+ICgNCg== 25315
+IFNTTA== 25316
+TEVO 25317
+IFJvbW5leQ== 25318
+KGdyaWQ= 25319
+CW1pbg== 25320
+ID4KCg== 25321
+IGZydWl0cw== 25322
+IHZvdGVy 25323
+SW5saW5l 25324
+cGFuZQ== 25325
+IENvbGxlY3Rpb25z 25326
+Y2hhcnNldA== 25327
+IHNwYW0= 25328
+emI= 25329
+aXRlbWFw 25330
+IHN1Y2NlZWRlZA== 25331
+X0NPTA== 25332
+IGVsYXBzZWQ= 25333
+aW1ldGVy 25334
+IHJlY292ZXJlZA== 25335
+VGVuc29y 25336
+aGF0dGFu 25337
+LnNldHVw 25338
+aXN0bw== 25339
+KGhlYWQ= 25340
+IFNJWkU= 25341
+IHRhY3RpY3M= 25342
+IGRpc3R1cg== 25343
+IHByZXZhbA== 25344
+aWNpb3M= 25345
+KFZhbHVl 25346
+X2NvbHM= 25347
+IEZhdA== 25348
+IHNlYWw= 25349
+IHNvbnM= 25350
+IGVuc3VyZXM= 25351
+IHByZXNzaW5n 25352
+PSY= 25353
+aWdlbm91cw== 25354
+IGhhcmFzc21lbnQ= 25355
+X0pTT04= 25356
+IGlnbm9y 25357
+eW5vbWlhbA== 25358
+b21lcg== 25359
+X3N0YXRpYw== 25360
+IHNpZ25pZmljYW5jZQ== 25361
+IGNpcmNsZXM= 25362
+X1N5c3RlbQ== 25363
+IGRpc2NpcGxpbmU= 25364
+IGRyZXNzZWQ= 25365
+IHNwaGVyZQ== 25366
+IGNsaW1i 25367
+X2FjdGlvbnM= 25368
+IEJhYg== 25369
+ICc9Jyw= 25370
+X3NjaGVtYQ== 25371
+InVzZQ== 25372
+IHVuZGVycw== 25373
+IGN1cHM= 25374
+LnNjcmVlbg== 25375
+L25ldw== 25376
+IGFwcGVhcmluZw== 25377
+VE9Q 25378
+dmlzZWQ= 25379
+Y2xhbmc= 25380
+IGludmVzdGlnYXRvcnM= 25381
+IG15c3RlcmlvdXM= 25382
+IHByb21pc2luZw== 25383
+IHF1YWxpZnk= 25384
+IGNhdmU= 25385
+IGVxdWlw 25386
+PXg= 25387
+R1Q= 25388
+KGxpbms= 25389
+LnZlbG9jaXR5 25390
+LmVyYXNl 25391
+b3Rlcg== 25392
+KysrKysrKys= 25393
+cHJvZml0 25394
+IHpvbmVz 25395
+X3VpZA== 25396
+LXNlcg== 25397
+IG9iamVjdGl2ZXM= 25398
+IG1pbGY= 25399
+d2Via2l0 25400
+KG1hdGNo 25401
+bmVo 25402
+IEFzc29jaWF0ZWQ= 25403
+IFRvZG8= 25404
+PWQ= 25405
+Q2Ft 25406
+IHZvY2Fs 25407
+IHN1ZG8= 25408
+KEVY 25409
+IHRyb3U= 25410
+QUJD 25411
+LmJlYW4= 25412
+IEdyb3VuZA== 25413
+IFJFU1Q= 25414
+d2VldHM= 25415
+SW5n 25416
+aW1vbg== 25417
+X2J1cw== 25418
+IENPTE9S 25419
+dW50bw== 25420
+IGZvc3M= 25421
+IExpbmtz 25422
+w6RuZw== 25423
+L2Zvcm1z 25424
+cHJpc2Vz 25425
+IGFjaGlldmVtZW50 25426
+Q0FMTA== 25427
+0LXQu9GM 25428
+IFZlcmlmeQ== 25429
+X1NPVVJDRQ== 25430
+YXB0Y2hh 25431
+SURE 25432
+X3JlZmVyZW5jZQ== 25433
+R29sZA== 25434
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgIAo= 25435
+UmVjZWl2ZXI= 25436
+IGFq 25437
+X2RpcmVjdGlvbg== 25438
+fV0= 25439
+IENvbXBldA== 25440
+IGJhbmc= 25441
+IENhc3M= 25442
+LXVybA== 25443
+dGVjaG4= 25444
+IEplcnVzYWxlbQ== 25445
+bG9uZ2l0dWRl 25446
+Jyk7DQoNCg== 25447
+IHdpbm5lcnM= 25448
+VGFza3M= 25449
+IERNQQ== 25450
+IHRvb2x0aXA= 25451
+jrc= 25452
+IEJyYQ== 25453
+X2R1cmF0aW9u 25454
+Y3VyeQ== 25455
+cGFyZW50cw== 25456
+LS0tLTwv 25457
+IHBhc3Nwb3J0 25458
+V0M= 25459
+INC7 25460
+Y2Vzc2lvbg== 25461
+IFllbGxvdw== 25462
+IGVuY3J5cHRpb24= 25463
+JwoKCg== 25464
+IGxpc3Rpbmdz 25465
+IENvbW11bmljYXRpb25z 25466
+Ll8K 25467
+ICIiIg0K 25468
+IGZi 25469
+IHN0cmljdGx5 25470
+IExpdGVy 25471
+IEVudGVycHJpc2U= 25472
+X2JvdHRvbQ== 25473
+QUtF 25474
+a2V0 25475
+IHRhbQ== 25476
+QmV0d2Vlbg== 25477
+X1RPUA== 25478
+RGlzYWJsZQ== 25479
+IGZpbGluZw== 25480
+IENocm9u 25481
+U0VRVQ== 25482
+ICZfX18= 25483
+IGZhbA== 25484
+IFNMT1Q= 25485
+RW1iZWQ= 25486
+dXRoZXI= 25487
+IFJlc3RhdXJhbnQ= 25488
+IHJlYWxpc3RpYw== 25489
+IScpOwo= 25490
+IERFQUw= 25491
+IFBlcmlvZA== 25492
+LmdldFg= 25493
+IHNlaHI= 25494
+Il0nKS4= 25495
+ZXNzYQ== 25496
+CW1lbWNweQ== 25497
+IGFja25vd2xlZGdlZA== 25498
+c2VuYWw= 25499
+IFVuaXZlcnNhbA== 25500
+ICcnOwoK 25501
+L3dpa2k= 25502
+aWVubmU= 25503
+IE5TQXJyYXk= 25504
+IGFjY2VwdGFuY2U= 25505
+IGxpdmVy 25506
+IHRvb3Ro 25507
+IGFjY3Vz 25508
+CUxPRw== 25509
+dmFsdQ== 25510
+5YC8 25511
+IHNlY3RvcnM= 25512
+cGVyaW1lbnRhbA== 25513
+L2NsYXNz 25514
+X2dv 25515
+TWljaGFlbA== 25516
+b2xhdGlsZQ== 25517
+IFBST0Y= 25518
+IGNvbXByb20= 25519
+c3BlY2lhbGNoYXJz 25520
+IOKc 25521
+IGlzRXF1YWxUb1N0cmluZw== 25522
+IEh1bmc= 25523
+LmFzTGlzdA== 25524
+L2dv 25525
+Pj4o 25526
+IEtpcg== 25527
+IGludHJvcw== 25528
+IHNrZXRjaA== 25529
+IHNraWxsZWQ= 25530
+IGltbWVy 25531
+IGFkZXF1YXRl 25532
+X3JlcA== 25533
+KGhlYWRlcg== 25534
+X2xpa2U= 25535
+IHBlcmNlaXZlZA== 25536
+c3No 25537
+IGFzc3VtaW5n 25538
+IGZm 25539
+X3V1aWQ= 25540
+dWxhcw== 25541
+IGRlbW9jcmF0aWM= 25542
+LmVudGl0aWVz 25543
+U2VyaWVz 25544
+YXBob3Jl 25545
+IG5ld2Vy 25546
+fSg= 25547
+U0VD 25548
+YWlybw== 25549
+IGNvbW1vZA== 25550
+IHByaXZpbGVnZQ== 25551
+IGRldXg= 25552
+IEhvcA== 25553
+Licv 25554
+Y3RpYw== 25555
+Lic7Cg== 25556
+PD89 25557
+IFVU 25558
+ZXRpZXM= 25559
+X0NPTlRFTlQ= 25560
+LnJlbGVhc2U= 25561
+LmRpc21pc3M= 25562
+IGZj 25563
+b3VuZ2U= 25564
+cHdk 25565
+X3ByZXY= 25566
+TWdy 25567
+IEJ1ZmZlcmVkUmVhZGVy 25568
+d3JpdHRlbg== 25569
+IEVi 25570
+ICkKCgo= 25571
+dWl0bw== 25572
+IGNvbnRyb3ZlcnN5 25573
+IGRpc3Bvc2Vk 25574
+IGZvdG8= 25575
+TGlzdFZpZXc= 25576
+L2NyZWF0ZQ== 25577
+IENPTA== 25578
+Y29tbXVuaWM= 25579
+IGZyZWVseQ== 25580
+dW5hbA== 25581
+b3ZpZA== 25582
+CXRy 25583
+cGFnaW5hdGlvbg== 25584
+IENvbW1vbnM= 25585
+RWxlbQ== 25586
+IFJFTQ== 25587
+IGNvcnJlbGF0aW9u 25588
+KCkrIg== 25589
+IEhpZGU= 25590
+YW5kaW5n 25591
+KHZlYw== 25592
+aXRvcw== 25593
+IEN1bHQ= 25594
+IG51dHJpdGlvbg== 25595
+dmFscw== 25596
+IGRldGVybWluaW5n 25597
+bG9yZA== 25598
+IHNjYW5kYWw= 25599
+IHNoYWxsb3c= 25600
+b2Rhc2g= 25601
+X3NlcmlhbA== 25602
+IFNsbw== 25603
+IGRpc3Bvbg== 25604
+UGxvdA== 25605
+aWNrbGU= 25606
+IGVsbA== 25607
+IHVuZW1wbG95bWVudA== 25608
+Rk0= 25609
+cm9ucw== 25610
+bMSx 25611
+TW8= 25612
+RXhpc3Q= 25613
+SURT 25614
+Q2hv 25615
+IEtleWJvYXJk 25616
+LnBhcnNlcg== 25617
+LkdldE9iamVjdA== 25618
+IHNwZWxscw== 25619
+IGdlc2No 25620
+IG1hZ25pdHVkZQ== 25621
+X1NM 25622
+aXNkaWN0aW9u 25623
+ICcpOwo= 25624
+aWxpYW5z 25625
+IHNoYXI= 25626
+IFByb2I= 25627
+dWlsdGlu 25628
+IHR1bm5lbA== 25629
+PkM= 25630
+IFdhcnJlbg== 25631
+IG9wdGltaXplcg== 25632
+IFNFUlZJQ0VT 25633
+X29wZXI= 25634
+Z2V0QXR0cmlidXRl 25635
+IE1jSw== 25636
+X3NlbGY= 25637
+LnJz 25638
+IikKCgo= 25639
+R2V0Q29tcG9uZW50 25640
+ZXJjZQ== 25641
+IHRvdXM= 25642
+dW5pdHM= 25643
+J10pOw0K 25644
+Wm9vbQ== 25645
+L0U= 25646
+IG9ic2M= 25647
+IGZhc3Rlc3Q= 25648
+b25saW5l 25649
+IHBlYWNlZnVs 25650
+ZmZlbg== 25651
+IGNhcmdv 25652
+CXBy 25653
+IHNlZWtz 25654
+enU= 25655
+VHJpbQ== 25656
+IHdhcmQ= 25657
+IHZlcmQ= 25658
+IGJsb2dz 25659
+LmV4Y2VwdGlvbnM= 25660
+IFByZW1pdW0= 25661
+IE5ldGhlcmxhbmRz 25662
+U2FmZQ== 25663
+RmluaXNo 25664
+IEFsYnVt 25665
+X0FDQw== 25666
+PXRoaXM= 25667
+dmlydHVhbA== 25668
+XT4= 25669
+X0xBQkVM 25670
+IE5pY2g= 25671
+X3dpbg== 25672
+IEFhcm9u 25673
+V1A= 25674
+OyQ= 25675
+YWltcw== 25676
+IEltYWdlVmlldw== 25677
+IGVuZGxlc3M= 25678
+RVJB 25679
+X0RJU0FCTEU= 25680
+IGNhbmNlbGxlZA== 25681
+LXVz 25682
+IGluc3BlY3Rpb24= 25683
+ZW1pbg== 25684
+IEdyZXk= 25685
+LW9wZW4= 25686
+IGl0ZXJhdGlvbnM= 25687
+Lm93bmVy 25688
+IGtlcmFz 25689
+LlBhc3N3b3Jk 25690
+IFJ5 25691
+IElOUw== 25692
+QWly 25693
+IFNldmVyYWw= 25694
+LlRhYlN0b3A= 25695
+SU5HTEU= 25696
+IEhhaXI= 25697
+IENhbnZhcw== 25698
+QUFBQQ== 25699
+IGZsYXc= 25700
+Y2VkZXM= 25701
+LlJlcG9ydA== 25702
+7Yo= 25703
+IFRpcHM= 25704
+Y3JpcHRvcnM= 25705
+LnRyYW5zYWN0aW9u 25706
+LlNwcmluZw== 25707
+IHZpZXdlcg== 25708
+IGluc2lnaHRz 25709
+6L6T 25710
+b3JkaW9u 25711
+VUlOVA== 25712
+c2Vlaw== 25713
+IEF1Zg== 25714
+7J6Q 25715
+IHN0cmFpbg== 25716
+VG9vbHRpcA== 25717
+IGR6 25718
+aWduYWw= 25719
+YWR0 25720
+IHVj 25721
+ZmluaXRl 25722
+IG5t 25723
+LmNtZA== 25724
+IE15U3Fs 25725
+W2RhdGE= 25726
+LmphY2tzb24= 25727
+LnRyZWU= 25728
+UmVxdWVzdFBhcmFt 25729
+X2FnZW50 25730
+IildDQo= 25731
+IGFzc2Fzcw== 25732
+KENvbnN0YW50cw== 25733
+OnNz 25734
+IE1BTg== 25735
+Ky0rLQ== 25736
+IEJvdHRvbQ== 25737
+cHJpbnRz 25738
+IFNhbWU= 25739
+QEF1dG93aXJlZA== 25740
+c3dhcA== 25741
+aWNpw7Nu 25742
+IHByb3Rlc3RlcnM= 25743
+IGhvbmV5 25744
+IFZldGVy 25745
+KENhbGVuZGFy 25746
+LWFk 25747
+IEJyb29rbHlu 25748
+TGlmZQ== 25749
+X1ZBUg== 25750
+emVjaA== 25751
+IENBTEw= 25752
+X0NBU1Q= 25753
+IEVsZWN0aW9u 25754
+IHRoaWNrbmVzcw== 25755
+VmVyeQ== 25756
+X0lOVEVHRVI= 25757
+LWRldg== 25758
+KSkpKQ== 25759
+YXBhdA== 25760
+b29vbw== 25761
+ZGVtbw== 25762
+IHBhcnNlRmxvYXQ= 25763
+IFJhdGhlcg== 25764
+U1RJVA== 25765
+bWFrZXI= 25766
+W2N1cnJlbnQ= 25767
+Y2hyb25v 25768
+IGNocmlzdA== 25769
+44Gq 25770
+IERldGFpbA== 25771
+xrDhuw== 25772
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 25773
+IHN1bA== 25774
+aWRlbmN5 25775
+UXVl 25776
+IGVsZWdhbnQ= 25777
+YXBvbnM= 25778
+IGRpc2hlcw== 25779
+IGludGVnZXJz 25780
+KHJlYWQ= 25781
+ZmluZFZpZXdCeUlk 25782
+IEFtb3VudA== 25783
+IFNraXA= 25784
+IGhhYml0cw== 25785
+Kiko 25786
+IG1vbnN0ZXJz 25787
+TUFD 25788
+OmVuZA== 25789
+IGZyYW5r 25790
+QXNzZW1ibHk= 25791
+IGRmcw== 25792
+IG5ldXQ= 25793
+X1RZUEVT 25794
+ZXF1YWw= 25795
+bG95ZA== 25796
+KHVyaQ== 25797
+IGNoaQ== 25798
+IGRlZmVuZGFudA== 25799
+IGNvbmZsaWN0cw== 25800
+IHZpbA== 25801
+LWpz 25802
+IFBlYWNl 25803
+IG11dGFibGU= 25804
+KXNlbmRlcg== 25805
+IEZvY3Vz 25806
+5bu6 25807
+IGFwcHJlY2lhdGVk 25808
+c2xlZXA= 25809
+IFJFRA== 25810
+Q3VsdHVyZQ== 25811
+IGRlc2lnbmVycw== 25812
+X2dlbmVyYXRvcg== 25813
+Y29kZXM= 25814
+L2V4 25815
+LkdldFZhbHVl 25816
+dW1ibGVk 25817
+LnNjYWxhanM= 25818
+cGVyb3I= 25819
+IHZldGVyYW5z 25820
+IH0pDQo= 25821
+IHVuZm9ydHVuYXRlbHk= 25822
+X0NSRUFURQ== 25823
+TWFzcw== 25824
+IENMQUlN 25825
+IE1lZXQ= 25826
+X3N1cHBvcnQ= 25827
+QmFuaw== 25828
+KCkuCg== 25829
+RGFyaw== 25830
+X0xPVw== 25831
+IE1pbmluZw== 25832
+IE93bmVy 25833
+aWVyYQ== 25834
+Q2xpZW50ZQ== 25835
+IGVuY291cmFnaW5n 25836
+PlM= 25837
+IGJveWZyaWVuZA== 25838
+IEhhbGY= 25839
+IEFDQw== 25840
+QWZm 25841
+X2Fy 25842
+LWxpZmU= 25843
+Y3g= 25844
+LkpCdXR0b24= 25845
+aXphZG8= 25846
+Lnplcm8= 25847
+Lm9wZW5xYQ== 25848
+b3Rvbg== 25849
+LnRleHRDb250ZW50 25850
+IHRvbGw= 25851
+YXRpZQ== 25852
+IGJhbGxvdA== 25853
+LW51bWJlcg== 25854
+LkV4Y2VwdGlvbg== 25855
+CXBhcmFtcw== 25856
+Y2lyY2xl 25857
+LW1hcA== 25858
+IG5hcA== 25859
+IFJvYm90 25860
+IEljaA== 25861
+cmVnaXN0cmF0aW9u 25862
+QW1hem9u 25863
+cm9sbG1lbnQ= 25864
+KGV4cA== 25865
+IHRhbmtz 25866
+IEdvcmRvbg== 25867
+IG1hY2hpbmVyeQ== 25868
+IGJhc2VsaW5l 25869
+5os= 25870
+2Kk= 25871
+IENvbnZlbnRpb24= 25872
+CWNvbmZpZw== 25873
+b29raWVz 25874
+bXVsdA== 25875
+UmVjb3Jkcw== 25876
+IEVTVA== 25877
+IGdhcmJhZ2U= 25878
+IGNvbmZvcm0= 25879
+aWRhbA== 25880
+IGJhcmc= 25881
+IHN1cnZpdmVk 25882
+IGludmVzdGlnYXRpb25z 25883
+LmNvbnRhaW5zS2V5 25884
+LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0K 25885
+b3J0aW9u 25886
+IGhvcnI= 25887
+X2h0dHA= 25888
+IG1hbnQ= 25889
+XTsNCg0K 25890
+YmluYXJ5 25891
+ZW1wbA== 25892
+IGlucXVpcnk= 25893
+IE1lYW53aGlsZQ== 25894
+IGNvbGxlY3Rpbmc= 25895
+LkVudGl0eUZyYW1ld29yaw== 25896
+IiwKCg== 25897
+IFBpYw== 25898
+QEluamVjdA== 25899
+aWNrbmVzcw== 25900
+IEJpbmRpbmc= 25901
+IGNvbnRyb2xsaW5n 25902
+cmV2ZXJzZQ== 25903
+IGNoYWlycw== 25904
+c2VtYmxlZA== 25905
+KGFkZA== 25906
+RGlzYWJsZWQ= 25907
+YW5hcw== 25908
+LnRyYW5zbGF0ZQ== 25909
+LS0tLS0tLS0tLS0K 25910
+IHJlZmxlY3RlZA== 25911
+Il0KCg== 25912
+RXh0ZXJuYWw= 25913
+QXJyb3c= 25914
+U2luZ2xldG9u 25915
+JXg= 25916
+IMU= 25917
+IGFuY2VzdA== 25918
+IE9ybGVhbnM= 25919
+CWNtZA== 25920
+IHByb2hpYml0ZWQ= 25921
+aXRobWV0aWM= 25922
+KGNoYW5uZWw= 25923
+X2Nzcw== 25924
+Rm9yd2FyZA== 25925
+LnNvY2tldA== 25926
+IGx1Yw== 25927
+4oY= 25928
+IEZpcmVmb3g= 25929
+IE1vdmllcw== 25930
+KV8= 25931
+LmVuZHM= 25932
+KHNoYXBl 25933
+IGRlYWx0 25934
+IHNhdmVz 25935
+IGdsb3J5 25936
+IG1lam9y 25937
+IGJyZWF0aGluZw== 25938
+IGVsbGVy 25939
+Z2V0RGF0YQ== 25940
+IGFuZ2xlcw== 25941
+IHRvb2xiYXI= 25942
+IHNwYWNpbmc= 25943
+SVBT 25944
+IGZsb29ycw== 25945
+X0FDVElWRQ== 25946
+IHNodWZmbGU= 25947
+L3NoYXJlZA== 25948
+IEVsZQ== 25949
+ZWRpc2g= 25950
+IHdlYmNhbQ== 25951
+LmV4cGVjdA== 25952
+aWxvYw== 25953
+IEluY2x1ZGVz 25954
+IHR3ZWV0ZWQ= 25955
+IDop 25956
+IEVzc2F5 25957
+Rml4 25958
+LWJldHdlZW4= 25959
+X3dlYg== 25960
+LmNvbnY= 25961
+IHJhY2lzbQ== 25962
+IHJlZmxlY3Rz 25963
+dW1t 25964
+0LjRgtC1 25965
+X2Zvb3Rlcg== 25966
+L2RvY3M= 25967
+IFBvdXI= 25968
+TmdNb2R1bGU= 25969
+LmluaXRpYWxpemU= 25970
+cGF0dGVybnM= 25971
+X0lu 25972
+IEFiYg== 25973
+Kg0K 25974
+IHNlbnRpbWVudA== 25975
+YnVmZg== 25976
+X2NvdW50cw== 25977
+IHJldXNl 25978
+Y2h1bms= 25979
+IGltcG9zZWQ= 25980
+UHJpbWFyeUtleQ== 25981
+Rm9yZWdyb3VuZA== 25982
+IGNvbnN1bWVk 25983
+PyE= 25984
+IGRpY2s= 25985
+IGNocm9u 25986
+IEZlcm4= 25987
+IHJlc3BvbnNpdmU= 25988
+IGluc2VjdA== 25989
+aWN1bHR5 25990
+IHJ3 25991
+IGFsaWtl 25992
+IHN1YnNldA== 25993
+IENvb2tpZXM= 25994
+IFBhaXI= 25995
+IHRpZXI= 25996
+SUZP 25997
+YXZvdXI= 25998
+IFFV 25999
+LHNpemVvZg== 26000
+IG1lcmdlZA== 26001
+bXY= 26002
+aXRvbA== 26003
+eWxvbg== 26004
+IGp1bXBlZA== 26005
+LnJvbGU= 26006
+ZW5zYWpl 26007
+UnVsZXM= 26008
+IGJyb3dzZQ== 26009
+QW5pbWF0b3I= 26010
+IHlvZ2E= 26011
+IHZhcmlhbnRz 26012
+IGNvdXJ0ZXN5 26013
+dXJhbg== 26014
+cGJz 26015
+ZWxzZWlm 26016
+QWx0 26017
+IExhbmU= 26018
+Q0xL 26019
+SU1BUlk= 26020
+X1BST1BFUlRZ 26021
+77yQ 26022
+IGNoYW4= 26023
+IGdyYWR1YWxseQ== 26024
+IHNoYWtl 26025
+IGJsb25kZQ== 26026
+Li4uIik7Cg== 26027
+LXNleA== 26028
+IGdhbWVwbGF5 26029
+YWNpZXM= 26030
+LnJlZnJlc2g= 26031
+VVNC 26032
+IFBsb3Q= 26033
+V2Fz 26034
+aXNzaXBwaQ== 26035
+IFRlbnNvcg== 26036
+IGNyeXB0b2N1cnJlbmN5 26037
+IGRpZmZpY3VsdGllcw== 26038
+RGVsZXRlZA== 26039
+V2l0aG91dA== 26040
+X2FwcGVuZA== 26041
+X3Zlcg== 26042
+IikpDQo= 26043
+IGhvbmVzdGx5 26044
+IHBpdm90 26045
+IHRlbXBz 26046
+X3Bz 26047
+IFVubGlrZQ== 26048
+Wzot 26049
+VlM= 26050
+X2luZg== 26051
+IGp1bmlvcg== 26052
+IGFuaW1hdGlvbnM= 26053
+IGZpbGVwYXRo 26054
+Pzwv 26055
+W1w= 26056
+IG9wZXJhdGVz 26057
+X3JlZA== 26058
+IEJvb3RzdHJhcA== 26059
+bGVhZA== 26060
+ZWZmZWN0 26061
+wr0= 26062
+IFN0ZXI= 26063
+IEJ1Y2s= 26064
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 26065
+IGRlcHV0eQ== 26066
+VGhhbg== 26067
+4bq/ 26068
+T05FTlQ= 26069
+IEhlYXQ= 26070
+ZXRoZWxlc3M= 26071
+XSl7Cg== 26072
+IGtvc3Rlbmxvcw== 26073
+KCk7Ly8= 26074
+IGRlcGxveWVk 26075
+Pnt7JA== 26076
+IHVuaWNvZGU= 26077
+cGxhY2Vz 26078
+IENvZmZlZQ== 26079
+LlNF 26080
+IFBBUg== 26081
+KHR4dA== 26082
+Z2VicmE= 26083
+IGZpcmVz 26084
+TWFpbldpbmRvdw== 26085
+bWVkaXVt 26086
+ICjigJw= 26087
+IGxn 26088
+IGNtcA== 26089
+L2Jhc2U= 26090
+X2xheWVycw== 26091
+X2VudHJpZXM= 26092
+IGFkbWluaXN0ZXI= 26093
+IFNVQ0g= 26094
+QlA= 26095
+IFNjb3R0aXNo 26096
+CQ0KCQ0K 26097
+Z3VhcmQ= 26098
+IFN0cm9uZw== 26099
+SW5zbg== 26100
+IENBUA== 26101
+YXN1cnk= 26102
+IFNFRQ== 26103
+Q2xvY2s= 26104
+ZXJpZQ== 26105
+XG1vZGVscw== 26106
+ICQk 26107
+IENhYg== 26108
+IHd1cmRl 26109
+IHNvbGRpZXI= 26110
+IGNsaXBz 26111
+IGFycmFuZ2VtZW50 26112
+IFdvbmRlcg== 26113
+IEhvcm4= 26114
+IHNjYXJlZA== 26115
+IGN1cmU= 26116
+bWtkaXI= 26117
+IGFsaWduZWQ= 26118
+IFBpbms= 26119
+IGxhbmRlZA== 26120
+RGltZW5zaW9u 26121
+U2Nyb2xsUGFuZQ== 26122
+LmNoYXQ= 26123
+LldpdGg= 26124
+IFRyYWlu 26125
+XS4K 26126
+IHRoaXJ0eQ== 26127
+IGR1cmFibGU= 26128
+IGxk 26129
+IGxhdGVpbml0 26130
+IGNoYXJ0cw== 26131
+IGluc3VsdA== 26132
+LkZhdGFs 26133
+X2N0 26134
+IG1hc2tz 26135
+Q0xVREVE 26136
+UHJlc2lkZW50 26137
+IGNvbG91cnM= 26138
+Z21lbnRz 26139
+LmF0dHJpYnV0ZXM= 26140
+IEZsZXg= 26141
+IENsb2Nr 26142
+w61jdWw= 26143
+aW1lbg== 26144
+Sk8= 26145
+IFJlZ2V4 26146
+X0xJTks= 26147
+IGNvdWNo 26148
+IElOUFVU 26149
+IGJlYXRpbmc= 26150
+YnVzaW5lc3M= 26151
+cHJlY2Vk 26152
+LnVuaXQ= 26153
+IEZlbA== 26154
+TmV2ZXI= 26155
+b3NwZWw= 26156
+LnN0YXJ0c3dpdGg= 26157
+IEVQQQ== 26158
+Lm9ubHk= 26159
+IHByZXZlbnRpbmc= 26160
+eWVy 26161
+Q29sdW1uTmFtZQ== 26162
+IGVsZXZhdGlvbg== 26163
+Zmx1 26164
+aWN5Y2xl 26165
+IG9mZmxpbmU= 26166
+VG9vbGJhcg== 26167
+IGNvbXBldGluZw== 26168
+KV0u 26169
+IG1vZw== 26170
+IGlzVmFsaWQ= 26171
+QXNr 26172
+X2F2 26173
+X2xhdA== 26174
+QU5D 26175
+IEpvaA== 26176
+a2Vycw== 26177
+IGd1YXJkcw== 26178
+IGNoYWlucw== 26179
+IFNpbXBsZURhdGVGb3JtYXQ= 26180
+LnN0YXRpYw== 26181
+IHZlc3NlbA== 26182
+IG11ZA== 26183
+IHN0YWJpbA== 26184
+IHN0cmV0 26185
+Z20= 26186
+YW1hdGlvbg== 26187
+55w= 26188
+LXdpdGg= 26189
+IHJvcw== 26190
+X1BB 26191
+IHJlc3VsdGFkbw== 26192
+IGNvbmZpZGVudGlhbA== 26193
+IFRva3lv 26194
+CXVzaW5n 26195
+IE1hdGhm 26196
+b21iaW5l 26197
+IEVTUE4= 26198
+IGRlYWxlcnM= 26199
+IGRpc21pc3NlZA== 26200
+VFJZ 26201
+IHRlZW5z 26202
+cmVjb3Jkcw== 26203
+IHdpbmdz 26204
+Z2FsbGVyeQ== 26205
+YWNjb3VudHM= 26206
+X0xJQg== 26207
+IGphY2tldA== 26208
+IE5TT2JqZWN0 26209
+IHN0b25lcw== 26210
+IERlbGl2ZXJ5 26211
+IERpZXQ= 26212
+L3dhdGNo 26213
+IHRvaWxldA== 26214
+IEd1ZXN0 26215
+LmRheQ== 26216
+IGludHZhbA== 26217
+VmlzaXQ= 26218
+IGludmVzdGlnYXRlZA== 26219
+IHBlbnRydQ== 26220
+IFRoZWF0cmU= 26221
+YW5kaWRhdGVz 26222
+TGFuZw== 26223
+IFNlcnY= 26224
+IGNvbnRyb2xsZXJz 26225
+IHNldFRpdGxl 26226
+TlA= 26227
+YW15 26228
+ZmxhdA== 26229
+KHVp 26230
+X2RvY3VtZW50 26231
+6IO9 26232
+IENvaW4= 26233
+IEFkYW1z 26234
+cHRpYw== 26235
+IHByb2R1Y3RpdmU= 26236
+IGFjY29tcGxpc2hlZA== 26237
+DQoNCg0KDQo= 26238
+IGRlZmVycmVk 26239
+aWVudGVz 26240
+IHNpbmM= 26241
+b2xhcnM= 26242
+UmlnaHRhcnJvdw== 26243
+IHZhcmlhdGlvbnM= 26244
+KG9mZnNldA== 26245
+LkxheW91dEluZmxhdGVy 26246
+IHN1c3BlbmQ= 26247
+IHByZXZlbnRpb24= 26248
+X3ByaXZhdGU= 26249
+X2pz 26250
+4piF 26251
+IHdpZWRlcg== 26252
+YXR1bQ== 26253
+kow= 26254
+IGFwcGVhcmFuY2Vz 26255
+LkRvY3VtZW50 26256
+IHZhbGlkYXRlcw== 26257
+Y2FsZW5kYXI= 26258
+fSI7Cg== 26259
+LmRlbW8= 26260
+Y29udXQ= 26261
+IGNvcnJlY3Rpb24= 26262
+IERlYWw= 26263
+IGJhdHRlcmllcw== 26264
+LmR1cmF0aW9u 26265
+LFw= 26266
+X21hcmtlcg== 26267
+bXVsdGk= 26268
+IGhhbHQ= 26269
+IGNtcw== 26270
+IHNoYXBlZA== 26271
+QnJv 26272
+cmVkdWNl 26273
+ICMjIyM= 26274
+Q1RPUg== 26275
+IEJlbmVm 26276
+IGljb25pYw== 26277
+IHBpYW5v 26278
+IGVmZmVjdGl2ZW5lc3M= 26279
+fC4K 26280
+IGFqYXg= 26281
+IHZvbHVtZXM= 26282
+4Lih 26283
+IGNsanM= 26284
+ICAgICAgICAgICAgICAK 26285
+YXRocw== 26286
+cmFpdHM= 26287
+5aSn 26288
+0ZY= 26289
+X211bHQ= 26290
+IGZhc2NpbmF0aW5n 26291
+QXZlcmFnZQ== 26292
+IHByw6k= 26293
+IENoYWlybWFu 26294
+LmZpbmRFbGVtZW50 26295
+X3Bpbg== 26296
+IGNvbXBhcmluZw== 26297
+IGRhcmtuZXNz 26298
+LUZp 26299
+LXNlcnZlcg== 26300
+IHNlbGVjdGluZw== 26301
+c3RlcmRhbQ== 26302
+IFBhcnRz 26303
+Rk9STUFUSU9O 26304
+IG5vdGluZw== 26305
+IHBpbGU= 26306
+b2dz 26307
+IHBhbGV0dGU= 26308
+X2Rv 26309
+aXRpemU= 26310
+KCko 26311
+IGRlZmluaW5n 26312
+IHJlbWFpbmRlcg== 26313
+VW5pdHM= 26314
+X1RBU0s= 26315
+SHR0cENsaWVudA== 26316
+U29jaWFs 26317
+IGZ1bmRyYQ== 26318
+TlI= 26319
+Y2hlc3Q= 26320
+Q3VycmVuY3k= 26321
+LmFkYXB0ZXI= 26322
+IGRvcA== 26323
+dW50aW5n 26324
+QU5HVUFHRQ== 26325
+Ikhl 26326
+CWluZGV4 26327
+X3BhY2thZ2U= 26328
+Lkljb24= 26329
+IHJlcGV0 26330
+bWFzcw== 26331
+PSIuJA== 26332
+IFN1ZA== 26333
+IGxpZA== 26334
+cHJvdmluY2U= 26335
+7Jw= 26336
+R1BJTw== 26337
+0Jo= 26338
+IE15U1FM 26339
+IGRvY3M= 26340
+IEdB 26341
+IGlwc3Vt 26342
+S2VybmVs 26343
+IGFjY2VwdHM= 26344
+IGZpdHRpbmc= 26345
+IGN1YW5kbw== 26346
+IGR1cGxpYw== 26347
+IEJyb3RoZXI= 26348
+IEtsZQ== 26349
+bnVtcw== 26350
+IG1vcnBo 26351
+ICMjIyMjIyMj 26352
+IENHUG9pbnQ= 26353
+PHVuc2lnbmVk 26354
+5L6L 26355
+IER1a2U= 26356
+LnNldEJvdW5kcw== 26357
+cXM= 26358
+b3JpYw== 26359
+amVy 26360
+IHJlZ2FyZGVk 26361
+SHR0cFJlcXVlc3Q= 26362
+IGJvbmRz 26363
+IHRob3JvdWdobHk= 26364
+ZW5jZW50 26365
+IGhpZ2hsaWdodGVk 26366
+IGFjcmVz 26367
+IHdvcmtwbGFjZQ== 26368
+IEx1eA== 26369
+IHF1b3Q= 26370
+LmluZmxhdGU= 26371
+IGRvY3VtZW50ZWQ= 26372
+IGFkZGljdGlvbg== 26373
+IG11dGF0aW9u 26374
+LmNpdHk= 26375
+IGJvdHRsZXM= 26376
+IFJlcG9zaXRvcnk= 26377
+b25u 26378
+ZXJybm8= 26379
+QVJJQUJMRQ== 26380
+5bqm 26381
+X0JFR0lO 26382
+Z2xhcw== 26383
+J30pCg== 26384
+IE1hc3NhZ2U= 26385
+IFdoaXQ= 26386
+cmVnZXg= 26387
+V0E= 26388
+IG91dGxldA== 26389
+LWhlYWQ= 26390
+IGV4cGlyZWQ= 26391
+IFRoYWk= 26392
+L2luY2x1ZGU= 26393
+Z3JhZGllbnQ= 26394
+c2NhbmY= 26395
+IHNlYW0= 26396
+d2Fs 26397
+CWJ1Zg== 26398
+QmVhcmVy 26399
+IHByZWNpb3Vz 26400
+aWZhY3Rz 26401
+Y29vcmQ= 26402
+IGV4cGxvcmF0aW9u 26403
+LmdldFk= 26404
+KGhhbmRsZQ== 26405
+VG9waWM= 26406
+IFZlbnQ= 26407
+cmhz 26408
+LS0tLS0tCg== 26409
+IEJyaWdodA== 26410
+IGd1aWxk 26411
+bW90aGVy 26412
+c3Rvcm0= 26413
+IG11bmljaXBhbA== 26414
+IGluaw== 26415
+LlRZUEU= 26416
+d2w= 26417
+Li4uPC8= 26418
+X0RFVg== 26419
+PSIuLw== 26420
+X2Jvb2s= 26421
+dGh5 26422
+aXR6ZXJsYW5k 26423
+b3BsZXM= 26424
+dHJhY3Rpb24= 26425
+IENhbWVyb24= 26426
+IEFuZHJl 26427
+LnJlc3VsdHM= 26428
+IGNocm9tZQ== 26429
+IHNlY3VyZWQ= 26430
+IHN1cmZhY2Vz 26431
+KTw= 26432
+IHRvYmFjY28= 26433
+CXNwcmludGY= 26434
+IGVzY2Fs 26435
+IHN0ZGVycg== 26436
+IE1lbGJvdXJuZQ== 26437
+IGRpc3RyaWN0cw== 26438
+IG1hdHQ= 26439
+b2hlbg== 26440
+IGRhdGFHcmlkVmlld0NlbGxTdHlsZQ== 26441
+KE1vZGVs 26442
+IHNlbnNpdGl2aXR5 26443
+S0E= 26444
+dHJhbnNwb3J0 26445
+LmdldERhdGU= 26446
+IHN1YnRsZQ== 26447
+VUdJTg== 26448
+Lm1vdXNl 26449
+IGFsdGVybmF0aXZlcw== 26450
+IGVsbGU= 26451
+Y29yYXRpb24= 26452
+cmVhdGlvbg== 26453
+5ps= 26454
+X05PUk1BTA== 26455
+RGlzcGxheU5hbWU= 26456
+IGZhbmN5 26457
+SVNFRA== 26458
+TU9E 26459
+LlJlYWRPbmx5 26460
+IFVi 26461
+IEN1 26462
+aWNvbA== 26463
+IE5lbHNvbg== 26464
+IENPUg== 26465
+YW56YQ== 26466
+IFNwYXJr 26467
+ICJcXA== 26468
+LS0KCg== 26469
+d29vY29tbWVyY2U= 26470
+IHJlbWVtYmVyZWQ= 26471
+dmVyaXR5 26472
+IEV4dGVuc2lvbg== 26473
+IFBE 26474
+IHNlYXJjaGVz 26475
+LnNv 26476
+IEZvb3Rlcg== 26477
+ID0n 26478
+IFdBUk5JTkc= 26479
+LWxv 26480
+CXRhYmxl 26481
+IGRyYXdlcg== 26482
+cGljdHVyZQ== 26483
+IEZhbnRhc3k= 26484
+c3Rvcnk= 26485
+IG3Dqm1l 26486
+IwoK 26487
+X3NsaWNl 26488
+b2x0YWdl 26489
+SGFy 26490
+L3k= 26491
+IEVS 26492
+ZGll 26493
+IFBPUw== 26494
+LmFjdGlvbnM= 26495
+KE1haW4= 26496
+ZXdhcnQ= 26497
+YXBldXQ= 26498
+IFNURQ== 26499
+aWRkaW5n 26500
+LnJlYWRMaW5l 26501
+IHNlYXJjaGVk 26502
+V2Vk 26503
+LmZpZ3VyZQ== 26504
+dWdodGVycw== 26505
+KCkuX18= 26506
+IG9yYml0 26507
+c2hpcHBpbmc= 26508
+IGZyaWVuZHNoaXA= 26509
+IFNoaWZ0 26510
+LW9y 26511
+cXVv 26512
+V0hFUkU= 26513
+IEVzcA== 26514
+LmZvcndhcmQ= 26515
+b2ZmaWNl 26516
+IGnDpw== 26517
+IENoZWxzZWE= 26518
+SXRlbVNlbGVjdGVk 26519
+YWNoZXJz 26520
+ZGVsZXRlZA== 26521
+cm91cw== 26522
+ICItIg== 26523
+IEdyYW4= 26524
+IPCfmA== 26525
+LXBvd2Vy 26526
+ZXR0YQ== 26527
+IHJlbWluZGVy 26528
+ZW5zb3Jz 26529
+IEFsbG93 26530
+xJlk 26531
+X3RlYW0= 26532
+IGNyb3du 26533
+dGlja2V0 26534
+IGNvbGxlY3Rpb25WaWV3 26535
+bGFjZQ== 26536
+IGZpeGVz 26537
+IEh1Yg== 26538
+Y2F0YWxvZw== 26539
+IElkZW50aXR5 26540
+IGV4Y2Vzc2l2ZQ== 26541
+IE5hdmlnYXRvcg== 26542
+X0JS 26543
+LXBsYXk= 26544
+IENhbXBhaWdu 26545
+ICAgICAgICAgICAgICAgCg== 26546
+YXNpdmU= 26547
+IHdj 26548
+IEJlaWppbmc= 26549
+L3d3dw== 26550
+IG1ha2V1cA== 26551
+IGRpc3RhbmNlcw== 26552
+IHNhdGlzZnk= 26553
+Q09ORA== 26554
+IHdvdW5k 26555
+KCld 26556
+IHZpb2xhdGlvbnM= 26557
+IHN0YXlz 26558
+LyM= 26559
+aWxpbmU= 26560
+XEV4Y2VwdGlvbg== 26561
+IE1vdGlvbg== 26562
+IGhlYWw= 26563
+X3BsYW4= 26564
+cmFzZXM= 26565
+KG1haW4= 26566
+QXBwbGU= 26567
+IGNvbXBsZXRpbmc= 26568
+IGRldGVybWluZXM= 26569
+U2Nhbg== 26570
+IHN0ZWFs 26571
+IFNvYw== 26572
+QW5hbHlzaXM= 26573
+IGZhdm9yaXRlcw== 26574
+IGNhbXBv 26575
+b25lcg== 26576
+IEZsaWdodA== 26577
+Li4uCgoKCg== 26578
+KSkpKSk7Cg== 26579
+LWNvdW50 26580
+IHB3 26581
+QXNTdHJpbmc= 26582
+IHNleHVhbGx5 26583
+Rmlyc3ROYW1l 26584
+IEVzY29ydA== 26585
+Y2FsYw== 26586
+IFdpa2lwZWRpYQ== 26587
+IGRvY2tlcg== 26588
+IFN3ZWV0 26589
+J2lk 26590
+SW50bw== 26591
+IEh1bnQ= 26592
+LmVxdWFsVG8= 26593
+IGxhYm9yYXRvcnk= 26594
+IEJVU0lORVNT 26595
+RmlsZURpYWxvZw== 26596
+VHJlZU5vZGU= 26597
+LkVuYw== 26598
+IE1heGltdW0= 26599
+IG1vdGhlcnM= 26600
+5rU= 26601
+IGZyYWN0 26602
+LnN0YXJ0c1dpdGg= 26603
+IGhhcmRjb3Jl 26604
+Lm9i 26605
+5aeL 26606
+ID48Lw== 26607
+X3Jv 26608
+KCgq 26609
+Pz8/Pw== 26610
+X3ZlcnRleA== 26611
+a2VpdA== 26612
+IEhhbGxvd2Vlbg== 26613
+VEk= 26614
+IFZh 26615
+X2Nhcg== 26616
+PSJ7eyQ= 26617
+IHJhbmRvbWx5 26618
+0LDQvdC40LU= 26619
+IHNob2NrZWQ= 26620
+IFBva8OpbW9u 26621
+c2lnbmFs 26622
+IFNESw== 26623
+bWlkZGxld2FyZQ== 26624
+IHRyZWF0aW5n 26625
+IGJ1cm5lZA== 26626
+RGVwYXJ0bWVudA== 26627
+IFNwZWN0 26628
+IGNsaWVudGU= 26629
+IFJlZGRpdA== 26630
+X2F2Zw== 26631
+IGluc3RhbGxpbmc= 26632
+X2FscGhh 26633
+LGRhdGE= 26634
+IHNldElk 26635
+IExpc3RWaWV3 26636
+KHByb3BlcnR5 26637
+IGNyb3NzaW5n 26638
+IE9iag== 26639
+IFdhcmQ= 26640
+IFJlZGlyZWN0VG8= 26641
+IFByZXNlbnQ= 26642
+IGRyYXdz 26643
+Y2hlZHVsZWQ= 26644
+IGxlZ2lzbGF0aXZl 26645
+IHR3aXN0 26646
+IFN0cmE= 26647
+IEFGUA== 26648
+IENoYXA= 26649
+LXBy 26650
+OkNHUmVjdA== 26651
+IGNlcw== 26652
+Um91dGVz 26653
+bm9m 26654
+IHZpc2E= 26655
+IFRDUA== 26656
+IEVWRU4= 26657
+aXZpYWw= 26658
+IExldHRlcg== 26659
+UkFZ 26660
+IGltcGxvZGU= 26661
+LmVx 26662
+PScr 26663
+IG1vdGl2YXRlZA== 26664
+LnZpc2libGU= 26665
+LnNob3J0 26666
+Pm1hbnVhbA== 26667
+IFRlY2huaWNhbA== 26668
+IGNvcnBvcmF0aW9u 26669
+IEhX 26670
+YW5rYQ== 26671
+VEFJTA== 26672
+aXN0YXM= 26673
+IHBlcmZvcm1z 26674
+IEJlaGF2aW9y 26675
+LkZvcg== 26676
+X09SREVS 26677
+IEtpY2s= 26678
+IGNhbGxiYWNrcw== 26679
+X2Ry 26680
+dWVnbw== 26681
+aHVi 26682
+dWZmaWNpZW50 26683
+c2t5 26684
+IGJw 26685
+aHRhYmxl 26686
+IE9OTFk= 26687
+IEFVVEhPUlM= 26688
+LkFyZ3VtZW50 26689
+In07Cg== 26690
+IFRodW5kZXI= 26691
+IEtvbQ== 26692
+LlNob3VsZA== 26693
+QVVUSA== 26694
+YWh1 26695
+X3BheW1lbnQ= 26696
+IHN0YXJ0ZXI= 26697
+7ISc 26698
+7Jqp 26699
+QmxvZw== 26700
+LnBhdGNo 26701
+IGdvdmVybmVk 26702
+YXNzeQ== 26703
+LWZvdW5k 26704
+IHRoZWF0ZXI= 26705
+IEZvbnRXZWlnaHQ= 26706
+IEJhdG1hbg== 26707
+Iklm 26708
+LlJhbmRvbQ== 26709
+X2RlbHRh 26710
+IENF 26711
+QXV0aGVudGljYXRlZA== 26712
+IGRyb25l 26713
+IGNvdXM= 26714
+cmFkaXVz 26715
+TWVy 26716
+KE5vbmU= 26717
+IE5K 26718
+X2hlYWRlcnM= 26719
+IGFtZXI= 26720
+cHl0ZXN0 26721
+IEFjdGlvbnM= 26722
+CQkJICAgIA== 26723
+IGV0dA== 26724
+IGhvbHk= 26725
+IHVuY29tZm9ydA== 26726
+IE5pbg== 26727
+IERlY2ltYWw= 26728
+IE1lc3NhZ2Vz 26729
+LnNlbmRlcg== 26730
+XV0pCg== 26731
+IGVtYnJhY2U= 26732
+VGhvdWdo 26733
+L3Nw 26734
+IGN1bHR1cmVz 26735
+IGhpZ2h3YXk= 26736
+dGFy 26737
+LmZhaWw= 26738
+X2hpZGRlbg== 26739
+IGNvbXBvbmVudERpZE1vdW50 26740
+IFdyaWdodA== 26741
+IGphZw== 26742
+X2ls 26743
+Li4vLi4vLi4v 26744
+aWd1 26745
+Rm9vZA== 26746
+IGFjZQ== 26747
+IGHDsW9z 26748
+VVNE 26749
+IG11dHVhbA== 26750
+TG9naWM= 26751
+IHRlbXBsZQ== 26752
+IGJyaWVmbHk= 26753
+IFRyaXA= 26754
+Y2xhc3NtZXRob2Q= 26755
+ZGVmYXVsdHM= 26756
+IGNodW5rcw== 26757
+LCwsLA== 26758
+IFJlYXNvbg== 26759
+JGlk 26760
+LXVwcw== 26761
+IGRhbW4= 26762
+IHRydWNrcw== 26763
+IHVubGltaXRlZA== 26764
+IHNjdWxwdA== 26765
+IENhcmRz 26766
+IGF1dG9y 26767
+IFRlc3Rpbmc= 26768
+IGRpZXNl 26769
+c2hvcHM= 26770
+57Q= 26771
+KHBheWxvYWQ= 26772
+IFBBVEg= 26773
+IE1lbW9yaWFs 26774
+IHJpZGljdWxvdXM= 26775
+ZWdyZWU= 26776
+LXdpbm5pbmc= 26777
+IHJlaGFi 26778
+IHNvcGhpc3RpY2F0ZWQ= 26779
+d3BkYg== 26780
+CXBhdGg= 26781
+ISI7Cg== 26782
+X1NZUw== 26783
+LnNwZWVk 26784
+IHNvYXA= 26785
+c3VmZml4 26786
+V3JhcA== 26787
+IGVuaGFuY2VtZW50 26788
+w4k= 26789
+w7pi 26790
+IHBsYXlsaXN0 26791
+IG1peGluZw== 26792
+YW50aWRhZA== 26793
+PSIiOwo= 26794
+IFJldmlzaW9u 26795
+IEJlYXQ= 26796
+LmluYw== 26797
+LXdheQ== 26798
+ZW5jaWFz 26799
+dWxlcnM= 26800
+Q2F0 26801
+aWRlbA== 26802
+IFNoaXA= 26803
+LnNldENvbG9y 26804
+IHRocmVhdGVuaW5n 26805
+Lm1vZHVsZXM= 26806
+IGFmdGVyd2FyZHM= 26807
+IERhc2hib2FyZA== 26808
+CiAK 26809
+U2lnbmFs 26810
+IHByaW1lcg== 26811
+b3JuZXlz 26812
+aWNpYXJ5 26813
+IGxpZ25l 26814
+X3ByZWRpY3Q= 26815
+IGFlc3Q= 26816
+X2h0dHBz 26817
+Pjo= 26818
+IExleA== 26819
+IHJlbmNvbnRyZXM= 26820
+ZWdyYWw= 26821
+c2NhbGE= 26822
+X2ZhbWlseQ== 26823
+w59lbg== 26824
+X3N5bQ== 26825
+IHVuY2VydGFpbnR5 26826
+IFZBTFVF 26827
+IH07DQoNCg== 26828
+IGJyb2FkZXI= 26829
+IGhvcnNlcw== 26830
+44Gd 26831
+IEthbA== 26832
+b2Jh 26833
+X0lORVQ= 26834
+IEtpbGw= 26835
+anF1ZXJ5 26836
+YW1pbmF0aW9u 26837
+W0Ai 26838
+IG11ag== 26839
+IyMjCg== 26840
+Rmlyc3RPckRlZmF1bHQ= 26841
+dGhlblJldHVybg== 26842
+Q2hl 26843
+L2Zvb3Rlcg== 26844
+IHBhcmtz 26845
+YXNqZQ== 26846
+IEd1bGY= 26847
+IG1vZGVzdA== 26848
+LkluaXQ= 26849
+77yfCgo= 26850
+IHByb3NwZWN0cw== 26851
+IHN2Zw== 26852
+IOWP 26853
+LkRpYWxvZw== 26854
+X05FVA== 26855
+ICgoJA== 26856
+IGVr 26857
+IFdhcm5pbmc= 26858
+IE1L 26859
+PExN 26860
+ICcNCg== 26861
+aWVt 26862
+aGV0aWM= 26863
+IGl4 26864
+dGhpbms= 26865
+LXNoYWRvdw== 26866
+IEVsZA== 26867
+IE5ldmFkYQ== 26868
+IExlYWY= 26869
+IEdST1VQ 26870
+IHByb21v 26871
+ZW50aW5l 26872
+CU1hcA== 26873
+IE1vZGVscw== 26874
+IEtyaXN0 26875
+X2tlcm5lbA== 26876
+LW1hZGU= 26877
+IGNlcnI= 26878
+QXNzZXRz 26879
+ZWxsYXI= 26880
+IGludm9rZWQ= 26881
+LnZ1ZQ== 26882
+IGN1bHRpdg== 26883
+Q2xvc2Vk 26884
+IGdlbmVyYXRlcw== 26885
+ZmZmZmZm 26886
+dGhlc2l6ZQ== 26887
+c3FydA== 26888
+IENhc3RsZQ== 26889
+LmNhcg== 26890
+IGtlZW4= 26891
+dW5kYQ== 26892
+IENyb3c= 26893
+IFNpbmdo 26894
+eXRob24= 26895
+IGJlYW5z 26896
+bGFyZw== 26897
+5paH5Lu2 26898
+QXdlc29tZQ== 26899
+dW5jYXRl 26900
+UGF0aHM= 26901
+b2pp 26902
+KGN1cnI= 26903
+Q09ORFM= 26904
+IG1pbQ== 26905
+IHNob3VsZGVycw== 26906
+SGFyZA== 26907
+YXN0ZXM= 26908
+0LDQtdGC 26909
+IGNvbnZpbmNl 26910
+ZGVjZXNz 26911
+bWFkZQ== 26912
+IENNRA== 26913
+Lklt 26914
+IGNoYW9z 26915
+ZW5zaXZlbHk= 26916
+IGNvb2xpbmc= 26917
+IGJ1cmllZA== 26918
+KCdA 26919
+X1Nl 26920
+CQkJCQkJCQkJCQkJCQkJCQ== 26921
+LmNvbXBhbnk= 26922
+LnN1Ym1pdA== 26923
+cGhhbnQ= 26924
+IGJvb3RzdHJhcA== 26925
+X2hlbHA= 26926
+4Kc= 26927
+LmR1bXA= 26928
+IGRpZmVy 26929
+X21hcHBpbmc= 26930
+IGNpcmN1bGFy 26931
+IGVzY29ydHM= 26932
+IGJlcmU= 26933
+IGdyYWR1 26934
+IExlZ2VuZA== 26935
+aW1lZGlh 26936
+IEJhcmNlbG9uYQ== 26937
+IGJlZHM= 26938
+5Yiw 26939
+44CK 26940
+X3ZvbHVtZQ== 26941
+IHRyZW1lbmRvdXM= 26942
+IHNjYWxpbmc= 26943
+IHBpbnM= 26944
+ZW5hcw== 26945
+dHlwZXBhcmFt 26946
+RGFzaGJvYXJk 26947
+cmVuZGVyZXI= 26948
+IHNwaQ== 26949
+ICYk 26950
+IFNraW4= 26951
+YWxtYXJ0 26952
+IGhvY2tleQ== 26953
+ICciLiQ= 26954
+IGVycm5v 26955
+IGJldw== 26956
+Rm9sbG93aW5n 26957
+Lk1vZHVsZQ== 26958
+ZXJhYmxl 26959
+IE1pbGl0YXJ5 26960
+IFJpbw== 26961
+X2F2YWlsYWJsZQ== 26962
+IFN1cmZhY2U= 26963
+IHN0YWI= 26964
+SUZJRVI= 26965
+IExJU1Q= 26966
+IGRhc2hib2FyZA== 26967
+IGNsdXN0ZXJz 26968
+LnBsdWdpbg== 26969
+IGpvdQ== 26970
+IERlY29y 26971
+Rm91cg== 26972
+IGRlbGxl 26973
+KioqKioqLwo= 26974
+aWF6 26975
+aW5kZQ== 26976
+Y2hpbmc= 26977
+IGdldEl0ZW0= 26978
+LkFkZHJlc3M= 26979
+bWVudGVk 26980
+QW1lcmlj 26981
+UGxhaW4= 26982
+IHVzYg== 26983
+IFByYWN0aWNl 26984
+X21lbnQ= 26985
+LmJsdWU= 26986
+SGludA== 26987
+0YDQsNCy 26988
+IGNvbm5lY3Rvcg== 26989
+IGluaGVyaXRlZA== 26990
+0LjQsg== 26991
+IGludGVydmFscw== 26992
+IGNlcmU= 26993
+IHVk 26994
+IGluY29u 26995
+LkV4aXN0cw== 26996
+IE1pYw== 26997
+Rks= 26998
+KGNhcmQ= 26999
+LlNldHRpbmdz 27000
+IGV4aGliaXRpb24= 27001
+IG9uUHJlc3NlZA== 27002
+IHJlc3RvcmVk 27003
+ZW5ndQ== 27004
+LmRlZg== 27005
+IHJlY3Y= 27006
+LiIpOw0K 27007
+ZW5jb2Rlcg== 27008
+YXRoZXJpbmU= 27009
+KGRlc3Q= 27010
+YXplZA== 27011
+I2VuZHJlZ2lvbg== 27012
+c2VtYmw= 27013
+LE0= 27014
+b2J5 27015
+INC/0LXRgA== 27016
+LkNhbGw= 27017
+IGF0dGVuZGFuY2U= 27018
+LWJvcmRlcg== 27019
+IGFkZHJlc3Npbmc= 27020
+w6pu 27021
+IExldg== 27022
+IGJhc2g= 27023
+YmVuY2g= 27024
+Q3JlZGVudGlhbHM= 27025
+U3BhY2luZw== 27026
+KG9m 27027
+X1JFU0VU 27028
+aWd1b3Vz 27029
+IGNydWVs 27030
+IGNyb3NzZWQ= 27031
+IGxldXI= 27032
+IEdvbGY= 27033
+b3JyZWN0 27034
+IHBhY2tldHM= 27035
+IERhdGFTZXQ= 27036
+IHBhcnRseQ== 27037
+U0VRVUVOVElBTA== 27038
+IGluZGljYXRpb24= 27039
+IFNhbHQ= 27040
+YWNpYQ== 27041
+ICopOwo= 27042
+CWluZm8= 27043
+IFZpZXdCYWc= 27044
+b256 27045
+IGVkaXRvcmlhbA== 27046
+IEFyZW5h 27047
+IHNpcg== 27048
+X1N0YXRpYw== 27049
+KHNvY2tldA== 27050
+c3U= 27051
+Y2hvb3Nl 27052
+Lm1vbnRo 27053
+Lk15 27054
+w6lyaQ== 27055
+O2ZvbnQ= 27056
+ZG9lcw== 27057
+IGNvbnZlcnRlcg== 27058
+IHNhbHY= 27059
+IGxy 27060
+IGluZmx1ZW5jZWQ= 27061
+KGZlYXR1cmU= 27062
+IFF1ZWVucw== 27063
+bGV0dA== 27064
+X01PTg== 27065
+JmFtcA== 27066
+VG91Y2hhYmxlT3BhY2l0eQ== 27067
+T0ZG 27068
+IG1ldGFib2w= 27069
+KGl0ZXI= 27070
+IHZpdGFtaW4= 27071
+IElORElSRUNU 27072
+YXV0b20= 27073
+X3B1YmxpYw== 27074
+IGFkanVzdG1lbnQ= 27075
+IHNwZWNpYWxpemVk 27076
+d2luZG93cw== 27077
+LmFkZEFsbA== 27078
+IGFjY29yZGluZ2x5 27079
+IEpPcHRpb25QYW5l 27080
+IGNlbGxzcGFjaW5n 27081
+IHF1YWQ= 27082
+IGNyZWVw 27083
+IG91dGxldHM= 27084
+fWApCg== 27085
+IHByaWVzdA== 27086
+X1RIUkVBRA== 27087
+IE1hcng= 27088
+IEJ5VmFs 27089
+IGN1YWw= 27090
+6Z2i 27091
+IHRlbXBvcmFyaWx5 27092
+QW5u 27093
+a2VsZXRvbg== 27094
+5aU= 27095
+IExPQw== 27096
+YXVlcg== 27097
+ZGVyaXZl 27098
+IGJlaGF2aW9ycw== 27099
+YXNlbmFtZQ== 27100
+IENlbnR1cnk= 27101
+IGhvcnJpYmxl 27102
+TUVTUw== 27103
+X0xpc3Q= 27104
+d2Vp 27105
+UGF0 27106
+IENob2ljZQ== 27107
+X0ZST00= 27108
+CWxpbmU= 27109
+Lmludm9rZQ== 27110
+LkJvdHRvbQ== 27111
+IG5vd2hlcmU= 27112
+LiIKCgoK 27113
+X2V4cG9ydA== 27114
+IHN0cnVnZ2xlZA== 27115
+LkFwcGVhcmFuY2U= 27116
+IEpCdXR0b24= 27117
+IEplcmVteQ== 27118
+KFtb 27119
+IGtpY2tlZA== 27120
+bWFyc2hhbA== 27121
+c3RhZmY= 27122
+ZXNpdHk= 27123
+IHF1aXo= 27124
+X2VmZmVjdA== 27125
+IH0pKTsKCg== 27126
+bWVs 27127
+YmFubmVy 27128
+IFBJTg== 27129
+IGludmVudGlvbg== 27130
+IGNvbnNvbGlk 27131
+IG9wcw== 27132
+IEJldHdlZW4= 27133
+amFjaw== 27134
+ZXJuYXRpb25hbA== 27135
+IHNhY3JpZmljZQ== 27136
+YWdhdGlvbg== 27137
+IEpveQ== 27138
+IGFtZW5kbWVudA== 27139
+IFNvbGQ= 27140
+IHByaXNvbmVycw== 27141
+0LDQvdC90Ys= 27142
+RG9jdW1lbnRz 27143
+KV0pCg== 27144
+dXN0ZWQ= 27145
+IExpbmVhckxheW91dA== 27146
+b3Nv 27147
+X0VN 27148
+LnNlbGY= 27149
+Lk1pZGRsZQ== 27150
+KS8v 27151
+IFwn 27152
+IGZ1Y2tlZA== 27153
+IE11cnJheQ== 27154
+IHByb2ZvdW5k 27155
+X0VMRU1FTlQ= 27156
+dWx0YQ== 27157
+aWxlcnM= 27158
+cG9ydGZvbGlv 27159
+SnVuZQ== 27160
+dGNw 27161
+bW9kaWZpZWQ= 27162
+IFRyYWNl 27163
+IEtlbA== 27164
+YWx5emVy 27165
+KT0+ 27166
+IFJlcGFpcg== 27167
+X0JF 27168
+QnJhbmQ= 27169
+dWFydA== 27170
+cHJldmlldw== 27171
+IGluaXRpYXRpdmVz 27172
+cnVubmluZw== 27173
+YmFuZw== 27174
+CXVwZGF0ZQ== 27175
+IENvYWNo 27176
+UmljaA== 27177
+IHlvdXR1YmU= 27178
+IHJpdHVhbA== 27179
+YXBwYQ== 27180
+IFJvYmluc29u 27181
+cHJlY2lzaW9u 27182
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLw== 27183
+PVtdCg== 27184
+IGNlbGVicmF0ZWQ= 27185
+T1RP 27186
+IGluY2x1c2lvbg== 27187
+SlA= 27188
+JzsNCg0K 27189
+IG5vdGFibGU= 27190
+KF8u 27191
+TWFuYWdlZA== 27192
+IGd1aWRlcw== 27193
+Jm5ic3A= 27194
+YXRlZFJvdXRl 27195
+IEFkanVzdA== 27196
+IGNvbG9yZWQ= 27197
+X3Njb3Jlcw== 27198
+IFRlc2xh 27199
+X3Byb2dyZXNz 27200
+Lmluc3Q= 27201
+Wydf 27202
+LmZsYWdz 27203
+IGZjbG9zZQ== 27204
+X09QRVI= 27205
+xbx5 27206
+X25vdGU= 27207
+IHRyYW5zZ2VuZGVy 27208
+5ZU= 27209
+UklQVA== 27210
+IGFic2VudA== 27211
+IGFtZXQ= 27212
+IG9wZXJhbmQ= 27213
+66k= 27214
+IGhvb2Q= 27215
+dG9Mb3dlckNhc2U= 27216
+YXZv 27217
+IENpcmN1aXQ= 27218
+IExpbmQ= 27219
+LS19fQo= 27220
+PW0= 27221
+IHN1cHByZXNz 27222
+IE1BUA== 27223
+aWFuZw== 27224
+LWFkbWlu 27225
+IHNpZGViYXI= 27226
+IEJ1 27227
+IEhleA== 27228
+LEY= 27229
+IFNpZ25hbA== 27230
+IHRyYW5zcGFyZW5jeQ== 27231
+IEZlZGVyYXRpb24= 27232
+L1Y= 27233
+UmVx 27234
+IHB1bHNl 27235
+IHRlbmRz 27236
+TnVtYmVycw== 27237
+JSc= 27238
+IGRlcG9ydA== 27239
+ZGF0YXM= 27240
+X1VJTlQ= 27241
+X3RyYQ== 27242
+b2tv 27243
+ICI/ 27244
+Y29tcGV0 27245
+c29sZXRl 27246
+dW5kcnk= 27247
+IG92ZXJsYXA= 27248
+fWAsCg== 27249
+Lmx5 27250
+X3N1bW1hcnk= 27251
+IExvc3Q= 27252
+LkNlbnRlcg== 27253
+IGRpc2FiaWxpdHk= 27254
+LlNlcmlhbGl6YXRpb24= 27255
+IGdlb20= 27256
+ID86 27257
+IFdv 27258
+IHNoaXBwZWQ= 27259
+guaVsA== 27260
+IHVnbHk= 27261
+IGV4Y2l0ZW1lbnQ= 27262
+IGV4dGVyaW9y 27263
+IGNoZWNrb3V0 27264
+IGt1cg== 27265
+LEQ= 27266
+IEFsYXNrYQ== 27267
+IHN5bnRoZXRpYw== 27268
+IEJ1ZGdldA== 27269
+IFN1YnNjcmliZQ== 27270
+ICYK 27271
+yJlp 27272
+IFl1 27273
+CXF1ZXJ5 27274
+fS4K 27275
+IHRyYWdlZA== 27276
+YXNzZW4= 27277
+IGFjY29tbW9kYXRpb24= 27278
+IHBoeXNpY2lhbg== 27279
+IHJlbmFtZWQ= 27280
+IHRpZGFr 27281
+esSF 27282
+IG1pbnVz 27283
+bnljaA== 27284
+X0VYQ0VQVElPTg== 27285
+dGhyZWFkcw== 27286
+IHRpcmU= 27287
+X2NyZWF0ZWQ= 27288
+ZW5zdXJl 27289
+IHdvcnRoeQ== 27290
+IGV4Y3VzZQ== 27291
+IGNsb3Ro 27292
+LnBhcmVudE5vZGU= 27293
+L3BsYXRmb3Jt 27294
+IFVGQw== 27295
+IEd0aw== 27296
+dW5ueQ== 27297
+IGdpYnQ= 27298
+a2VsZXk= 27299
+aHVt 27300
+KHR4 27301
+CWRldg== 27302
+IG91dGZpdA== 27303
+ZG9vcnM= 27304
+IGZvbg== 27305
+aWN1dA== 27306
+dm9sYXRpbGU= 27307
+IGhvbW9zZXg= 27308
+TWF4aW11bQ== 27309
+IGV4cGVuZA== 27310
+IH0pOwoKCg== 27311
+RXE= 27312
+b25kZXJz 27313
+ZGVwYXJ0bWVudA== 27314
+IFBoeXNpY3M= 27315
+In0pOwo= 27316
+IHBhcmFk 27317
+LlN0cg== 27318
+IHNlbGU= 27319
+SUZJRUQ= 27320
+IGRlbGl2ZXJz 27321
+aXZhbg== 27322
+IHJlc3BvbnNpYmlsaXRpZXM= 27323
+IGFkdm9jYXRlcw== 27324
+6LU= 27325
+IFJJRA== 27326
+LnBhcmFtZXRlcnM= 27327
+TWV0cmljcw== 27328
+cm9uaWNz 27329
+IFVJVGFibGVWaWV3Q2VsbA== 27330
+QWJzb2x1dGU= 27331
+aXBzZQ== 27332
+eWx1bQ== 27333
+TUxFbGVtZW50 27334
+X1ZBTElE 27335
+PHRpdGxl 27336
+RGxn 27337
+cGFjZXM= 27338
+IHN5bmRyb21l 27339
+YmVhbnM= 27340
+X2RhdGFiYXNl 27341
+b3ppbGxh 27342
+IE1lZw== 27343
+REJH 27344
+IGx1Yg== 27345
+QmFnQ29uc3RyYWludHM= 27346
+YWJhZA== 27347
+IHByb2plY3RlZA== 27348
+X0JZVEU= 27349
+LlNpemVG 27350
+c3RyZWV0 27351
+CgoKCgoKCgoKCg== 27352
+IExPU1M= 27353
+IGRpcmVjdG9ycw== 27354
+L25ld3M= 27355
+IG51cnNpbmc= 27356
+IERvbmU= 27357
+LkhUVFA= 27358
+ZGlzY291bnQ= 27359
+IFJvdA== 27360
+VG9NYW55 27361
+IGVuYWJsaW5n 27362
+IGF1c3Np 27363
+b3N0YQ== 27364
+ICAgICAgICAgICAgICAgIA0K 27365
+6L29 27366
+IGhlbGljb3B0 27367
+IEluc2lkZQ== 27368
+5L+h5oGv 27369
+aXNwZXI= 27370
+IEFsbGFo 27371
+QVJDSEFS 27372
+IHJvbGxz 27373
+Q29tcGFyZQ== 27374
+WFA= 27375
+SW5kZXhPZg== 27376
+U1VN 27377
+IGFzc3VyZWQ= 27378
+IFBoeXNpY2Fs 27379
+RW5kcG9pbnQ= 27380
+Lkdsb2JhbA== 27381
+LmRldGFpbA== 27382
+IHRoZWZ0 27383
+Lmp1cGl0ZXI= 27384
+IGh1bW9y 27385
+LlJlbmRlcg== 27386
+QWxleA== 27387
+LmNhcA== 27388
+IGJ1ZmZlcnM= 27389
+IGRpc3Bvc2U= 27390
+dGlvbg== 27391
+LnByZXNlbnQ= 27392
+emVs 27393
+LFA= 27394
+IGRlc3BlcmF0ZQ== 27395
+LmdldENvbHVtbg== 27396
+IHR3aW4= 27397
+7JY= 27398
+LmNhbg== 27399
+IGZsZWU= 27400
+IElyYW5pYW4= 27401
+IHN0aWNreQ== 27402
+IFVUQw== 27403
+TFQ= 27404
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8v 27405
+IGxpY2Vuc2luZw== 27406
+X1BPSU5U 27407
+IE1hcHM= 27408
+IGxvbA== 27409
+PW1vZGVscw== 27410
+LXRhYg== 27411
+IE5hc2g= 27412
+X2xvZ2dlcg== 27413
+dG9yY2g= 27414
+IENPTlNFUVVFTlRJQUw= 27415
+Tm90RW1wdHk= 27416
+L3JlYWN0 27417
+IHBm 27418
+IGFzc2VydGlvbg== 27419
+IHN1YnNlcXVlbnRseQ== 27420
+X2Nhbg== 27421
+IHBhbmRlbWlj 27422
+b2d1ZQ== 27423
+IisK 27424
+X2VudA== 27425
+X1BhcmFt 27426
+LgoKCgoKCgoK 27427
+UmVzZWFyY2g= 27428
+Q2FwdHVyZQ== 27429
+IGJlbG92ZWQ= 27430
+ZGVt 27431
+IGV4dHJhY3RlZA== 27432
+IGZpZ2h0cw== 27433
+RVJD 27434
+KGF1dGg= 27435
+cG9zaXRpb25z 27436
+IHJldmVyc2Vk 27437
+KHN0YWNr 27438
+IF8p 27439
+dXRvZmY= 27440
+X2Zsb3c= 27441
+54K5 27442
+KEdhbWU= 27443
+IGV4Y2x1ZGVk 27444
+IENTVg== 27445
+Y2c= 27446
+IFRpdGFu 27447
+cGF1c2U= 27448
+IGNlcmNh 27449
+IGR1bXBzdGVy 27450
+TGVzcw== 27451
+IGtvdGxpbng= 27452
+YXN0ZXJ4bWw= 27453
+IHBvaW50ZXJz 27454
+IGZsb3dz 27455
+IFR1bg== 27456
+IE1haW5BY3Rpdml0eQ== 27457
+IGRpc2NyZXQ= 27458
+IGNvbWJpbmF0aW9ucw== 27459
+dmlzaXQ= 27460
+X2JpbmQ= 27461
+b290aW5n 27462
+ZGF0ZXI= 27463
+X2xvb2t1cA== 27464
+Lm5pbw== 27465
+IHN3ZWF0 27466
+IFJk 27467
+IHNjaWVudGlzdA== 27468
+IFBpeGVs 27469
+QE5nTW9kdWxl 27470
+UGxheWluZw== 27471
+IHVuZm9sZA== 27472
+VHJhbnNsYXRl 27473
+IExhd3JlbmNl 27474
+IEZJWE1F 27475
+QmlsbA== 27476
+IFJJR0hU 27477
+IHdoZXJldmVy 27478
+IG9vaw== 27479
+dmlkZW5jZQ== 27480
+IF1dOw== 27481
+IFNraWxs 27482
+dW5pc3Rk 27483
+IPCfmYI= 27484
+IGZlbWFsZXM= 27485
+LS0pCg== 27486
+jrflj5Y= 27487
+IEZyZWQ= 27488
+T3ZlcmFsbA== 27489
+2YI= 27490
+IGVzc2VuY2U= 27491
+IHRoZXJlYnk= 27492
+IHdvdW5kZWQ= 27493
+IERPV04= 27494
+bGVzc29u 27495
+dGV4dHVyZQ== 27496
+Um91bmQ= 27497
+IGF1dG9tYXRlZA== 27498
+INCh 27499
+IFVwZGF0ZXM= 27500
+IHNoYWRl 27501
+cHVibGlzaA== 27502
+IEdlYXI= 27503
+PWxhbWJkYQ== 27504
+IGxldmVy 27505
+KSsi 27506
+aGlsbA== 27507
+IHJhZGFy 27508
+cnlpbmc= 27509
+ICIpLg== 27510
+ZmlsbGVk 27511
+IGxpbmV1cA== 27512
+IGRs 27513
+IHdvcmtzcGFjZQ== 27514
+Vm8= 27515
+X2R0 27516
+67I= 27517
+X0l0ZW0= 27518
+TlNVUkw= 27519
+LnZlcmlmeQ== 27520
+IEhhd2FpaQ== 27521
+R29k 27522
+TWFyY2g= 27523
+IFvigKZd 27524
+IHBlbG8= 27525
+dXJpb3Vz 27526
+IFBpdHRzYnVyZ2g= 27527
+Lkl0 27528
+Q2xlYW4= 27529
+Plw8Xg== 27530
+IGlvcw== 27531
+c291bmQ= 27532
+Il07 27533
+IGZyZWVk 27534
+cm90dGxl 27535
+IExvd2Vy 27536
+W2NvdW50 27537
+5Z0= 27538
+IHBhbGU= 27539
+IFdheW5l 27540
+ZWFydGg= 27541
+X2NhdGVnb3JpZXM= 27542
+VUNL 27543
+Lm1ldGFkYXRh 27544
+IHN1bW1vbg== 27545
+SE9NRQ== 27546
+0L7Qu9GM0Lc= 27547
+IG1hbnVmYWN0dXJlZA== 27548
+IGRvY2s= 27549
+IGNvbXBldGl0b3Jz 27550
+X01PREVM 27551
+b2tpYQ== 27552
+IEhleQ== 27553
+zr8= 27554
+IGJhY2t3YXJk 27555
+IFBPU1M= 27556
+cm9wYQ== 27557
+IGNyaQ== 27558
+X09CSg== 27559
+VHJhbnNwb3J0 27560
+LWhpZ2g= 27561
+IGVyb3Rpaw== 27562
+X3Nsb3Q= 27563
+IGFydGlj 27564
+X2ZyYW1ld29yaw== 27565
+LXNlcmlm 27566
+IFNxbERiVHlwZQ== 27567
+Jyko 27568
+KyIv 27569
+IHdvcmU= 27570
+U2ls 27571
+IHN0b3Jpbmc= 27572
+IFBoYXNl 27573
+dWFudA== 27574
+IGJ1bXA= 27575
+aW5obw== 27576
+IGRpZ24= 27577
+IGJhY2tz 27578
+cXE= 27579
+KGhhc2g= 27580
+IGdlbw== 27581
+IHRlbmRlcg== 27582
+TG9nbw== 27583
+ISkK 27584
+IE1Y 27585
+IEFydGh1cg== 27586
+ZXNzb2E= 27587
+X0No 27588
+IGJlZHJvb21z 27589
+PSIjIj48 27590
+IHRocm9hdA== 27591
+aW5zaWM= 27592
+LmludGVnZXI= 27593
+IHByaW1pdGl2ZQ== 27594
+VHJ1dGh5 27595
+IGZhY2lsaXRhdGU= 27596
+IGNyZWF0aXZpdHk= 27597
+IEROUw== 27598
+IGdyYQ== 27599
+dWV6 27600
+IGNvdW50bGVzcw== 27601
+IFBvbGFuZA== 27602
+J00= 27603
+IERpc3Q= 27604
+IHZlc3Q= 27605
+IGNlcnRpZmljYXRpb24= 27606
+4buR 27607
+aGVsZA== 27608
+ZXh0ZW5zaW9ucw== 27609
+KHN0YXRpYw== 27610
+IGdyYWRlcw== 27611
+IFViZXI= 27612
+44Gf 27613
+IFtdKQo= 27614
+ZGF0b3M= 27615
+IGdldERhdGE= 27616
+IENoYXJn 27617
+IEJT 27618
+Lm1pY3Jvc29mdA== 27619
+LnZpZGVv 27620
+LmRpcmVjdGlvbg== 27621
+LT57Jw== 27622
+bHVh 27623
+YXBlc3Q= 27624
+IGJvaWxlcg== 27625
+ZXJlaw== 27626
+IGRlY2lkZXM= 27627
+Lmphcg== 27628
+SVND 27629
+IFdvcmRz 27630
+KENPTg== 27631
+RU1QTEFURQ== 27632
+cmVlemU= 27633
+c2hvdHM= 27634
+YXBwcw== 27635
+dW50ZWQ= 27636
+LnNldE5hbWU= 27637
+Ojo8 27638
+LWJvbGQ= 27639
+6rI= 27640
+5a+G 27641
+TG9uZ3JpZ2h0YXJyb3c= 27642
+IHVuZmFpcg== 27643
+IGVhcm5pbmc= 27644
+IHNoZWxm 27645
+VVJFTUVOVA== 27646
+IGlkbGU= 27647
+X01FTlU= 27648
+LkN1c3RvbQ== 27649
+QUdFUg== 27650
+LSI= 27651
+X3N3aXRjaA== 27652
+YmVjYXVzZQ== 27653
+KXZpZXc= 27654
+bWFyZQ== 27655
+X2NvbmRpdGlvbg== 27656
+IFN0YXJ0aW5n 27657
+TXZj 27658
+KHByZQ== 27659
+ZHVtcA== 27660
+X0xPQ0s= 27661
+YXRldGltZQ== 27662
+LmNhbGxiYWNr 27663
+IENlcg== 27664
+b3BvbA== 27665
+aWJyYXJ5 27666
+IHJlc2VydmF0aW9u 27667
+CQkJCQkJCQo= 27668
+bGVjdG9y 27669
+Z3JhZHVhdGU= 27670
+IGdlbmVyb3Vz 27671
+IGlvbg== 27672
+cmljYW8= 27673
+bXE= 27674
+X2NvbXBsZXRl 27675
+KGN1cnNvcg== 27676
+IEZvcm1Db250cm9s 27677
+OmNlbnRlcg== 27678
+IHN1YnN0aXR1dGU= 27679
+IFBsYW5uaW5n 27680
+IHBlbnNpb24= 27681
+IHJlY29tbWVuZGF0aW9u 27682
+IFRhZ3M= 27683
+IGdlZg== 27684
+IGFsYnVtcw== 27685
+IHdhc2hpbmc= 27686
+cm9j 27687
+IHRyYWlucw== 27688
+YXRpbmdz 27689
+IGV4cG9uZW50 27690
+YWNrYmFy 27691
+LWxu 27692
+w6Fn 27693
+LkRhdGFBbm5vdGF0aW9ucw== 27694
+IEVJRg== 27695
+IE1hbGF5c2lh 27696
+CVBPUlQ= 27697
+b251cw== 27698
+IGNsZXZlcg== 27699
+IHBldQ== 27700
+PgoKCgo= 27701
+IEFyZ3VtZW50cw== 27702
+IGRlYnVnZ2luZw== 27703
+KHJpZ2h0 27704
+J0Q= 27705
+Y29tcHV0ZQ== 27706
+IGZpbmVzdA== 27707
+T1JBR0U= 27708
+IHNwZWN0YWN1bGFy 27709
+cGhyYXNl 27710
+IGluZGlh 27711
+IGxlZ2VuZGFyeQ== 27712
+YmlydGg= 27713
+IGNvbXBvc2l0ZQ== 27714
+IGdyb3dz 27715
+IFRE 27716
+IGVwaWQ= 27717
+IGxhdW5jaGluZw== 27718
+XV1b 27719
+TWludXRlcw== 27720
+IENoYQ== 27721
+IGNsZWFuZWQ= 27722
+IHdpdG5lc3Nlcw== 27723
+dWthbg== 27724
+CVR5cGU= 27725
+IGhhYmU= 27726
+cGFyYWdyYXBo 27727
+IEpQYW5lbA== 27728
+IEhhbm4= 27729
+IHZhcmllZA== 27730
+IFBva2Vtb24= 27731
+IE1VU1Q= 27732
+5Yqo 27733
+LnZpc2liaWxpdHk= 27734
+b3B1cA== 27735
+Xls= 27736
+LmV4cGFuZA== 27737
+ICInLA== 27738
+LmZhc3RlcnhtbA== 27739
+X2F1dG8= 27740
+IFNoZWV0 27741
+bWFya2Vy 27742
+UGFyY2Vs 27743
+ZXdz 27744
+IFN0cmF0ZWd5 27745
+LW1ha2luZw== 27746
+IHVudmU= 27747
+IHRyYWlsaW5n 27748
+IGNsaWNrcw== 27749
+IEdldENvbXBvbmVudA== 27750
+CWNvbnRlbnQ= 27751
+SUdFTkNF 27752
+RVJORUw= 27753
+TlNNdXRhYmxlQXJyYXk= 27754
+IGJyZWF0 27755
+IGhhcm1mdWw= 27756
+tog= 27757
+IGJlc2lkZXM= 27758
+IGJvcmluZw== 27759
+IGJydXRhbA== 27760
+dmFuZw== 27761
+KHBhcnNl 27762
+cXVpY2s= 27763
+IHB5dGVzdA== 27764
+IHN3aXRjaGluZw== 27765
+KCldCg== 27766
+IOyE 27767
+TEVS 27768
+CWZvbnQ= 27769
+IG5ldHQ= 27770
+KV0KCg== 27771
+KC9c 27772
+5p6c 27773
+dG9BcnJheQ== 27774
+IGJyZWVk 27775
+IENBUg== 27776
+IFdlYXBvbg== 27777
+QWJz 27778
+dG90 27779
+IHNldE5hbWU= 27780
+YXB0aXZl 27781
+IDos 27782
+IGVzY2FwZWQ= 27783
+b3JkZW4= 27784
+IFByaQ== 27785
+dGh1bWJuYWls 27786
+IGRlc2NyaXB0aW9ucw== 27787
+L3N0eWxlcw== 27788
+IFBDSQ== 27789
+IGFscGhhYmV0 27790
+YXN0aWNzZWFyY2g= 27791
+Tk9URQ== 27792
+IGNpYWxpcw== 27793
+IEdyaWZm 27794
+IHBvcnF1ZQ== 27795
+IHByb3RlaW5z 27796
+cGxheXM= 27797
+IHN0YXRpbmc= 27798
+IGltYWdpbmF0aW9u 27799
+IGZhY2lhbA== 27800
+IE1lY2hhbg== 27801
+IGFycmFuZ2Vk 27802
+X3VzZWQ= 27803
+IGFycmFuZ2VtZW50cw== 27804
+IFBpcGU= 27805
+aG9zdG5hbWU= 27806
+IHByb3ZpbmM= 27807
+VGl0 27808
+LkZsYXRTdHlsZQ== 27809
+IFNwbGl0 27810
+IExvYWRlcg== 27811
+LmNj 27812
+IGNsaW5pYw== 27813
+LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ== 27814
+IGJha2luZw== 27815
+IEVOVA== 27816
+bmVhdGg= 27817
+44CBCgo= 27818
+QU5F 27819
+LkVudGl0eUZyYW1ld29ya0NvcmU= 27820
+YXBwZXJz 27821
+Lmlj 27822
+IE5nTW9kdWxl 27823
+IEZPUk0= 27824
+ICc7 27825
+LXByb2ZpdA== 27826
+aHc= 27827
+ZW5lbXk= 27828
+IEV5ZQ== 27829
+IGNhdXRpb24= 27830
+dG93bg== 27831
+IHVyZ2Vk 27832
+IEppbW15 27833
+eW5jaHJvbm91cw== 27834
+LXNpemVk 27835
+bWFraW5n 27836
+LHs= 27837
+XScs 27838
+X09iamVjdA== 27839
+YWhvbWE= 27840
+IGFjdGl2aXN0 27841
+SU5WQUw= 27842
+IENvbW1lcmNpYWw= 27843
+IE9ybGFuZG8= 27844
+KHRhYg== 27845
+INio 27846
+QWxnb3JpdGht 27847
+IGhlcml0YWdl 27848
+R2V0TWFwcGluZw== 27849
+IGZhaWx1cmVz 27850
+cmlvcw== 27851
+YXRpdmE= 27852
+IHRldA== 27853
+IGNhcnBldA== 27854
+KFo= 27855
+dGhyZWU= 27856
+IGRpc2Nsb3N1cmU= 27857
+LkVSUk9S 27858
+X2NhbGxlZA== 27859
+IGRpYWw= 27860
+IG9jY2FzaW9uYWw= 27861
+LkVycg== 27862
+IGZ1bmNpb24= 27863
+Y2FmZm9sZA== 27864
+IHJlbGVhc2luZw== 27865
+77yJCgo= 27866
+X1ZhbHVl 27867
+IFZhcmk= 27868
+eWVsbG93 27869
+IHN0cnVnZ2xlcw== 27870
+LmNhbA== 27871
+IERha290YQ== 27872
+CWNsb3Nl 27873
+IHNhbmR3aWNo 27874
+IGFuYWx5dGljcw== 27875
+ICoqKQ== 27876
+JiM= 27877
+IEpvcw== 27878
+IHBhc3NpdmU= 27879
+QVRUUg== 27880
+VGhyb3dhYmxl 27881
+IE11bg== 27882
+IFVpbnQ= 27883
+KGRpc3Bvc2luZw== 27884
+YXJhaw== 27885
+IExlYWRlcnM= 27886
+IGFmZmVjdGluZw== 27887
+IGl0ZW1WaWV3 27888
+IGVjb25vbWljcw== 27889
+ZnY= 27890
+4LmA 27891
+LnJi 27892
+IE92ZXJhbGw= 27893
+IHdlYWx0aHk= 27894
+IGV2b2x2ZWQ= 27895
+bmRh 27896
+IEh1cw== 27897
+cmVzdHJpY3Q= 27898
+dW1lbg== 27899
+IEFncmljdWx0 27900
+IQoKCg== 27901
+IGV4cGlyZXM= 27902
+IHNwb2tlc3BlcnNvbg== 27903
+aW50ZXJ2YWw= 27904
+IMOi 27905
+IHF1ZWVu 27906
+KG5pbA== 27907
+aW5nbw== 27908
+SGVhcA== 27909
+2Y4= 27910
+IGNvbXBsYWlu 27911
+U3lt 27912
+IENsb25l 27913
+IFJ1 27914
+IFdJTEw= 27915
+IENyeXN0YWw= 27916
+L2NvbnRlbnQ= 27917
+aW5nZW4= 27918
+b2ludG1lbnQ= 27919
+TGFzdE5hbWU= 27920
+YXZpY29u 27921
+IElCTQ== 27922
+IERpbWVuc2lvbg== 27923
+YW5o 27924
+aWNpcGFudHM= 27925
+IEFubmU= 27926
+LnByb2dyZXNz 27927
+IGFsZ28= 27928
+b2JpbA== 27929
+IFZvaWNl 27930
+IEZF 27931
+IGdsaQ== 27932
+IHZlZA== 27933
+IHByZXZlbnRz 27934
+XENvbHVtbg== 27935
+IGZvbGs= 27936
+ZXR0aQ== 27937
+IG1u 27938
+IENMQVNT 27939
+IGRpc3BsYXlpbmc= 27940
+IEts 27941
+IEZlcnI= 27942
+ZHV0bw== 27943
+Lmli 27944
+IGRhZG9z 27945
+J25hbWU= 27946
+LXNwYWNl 27947
+IGl0YWxpYW4= 27948
+IGludmVyc2U= 27949
+IGRlbnNl 27950
+dXRlcg== 27951
+IElFbnVtZXJhdG9y 27952
+LXNpZ24= 27953
+IG5hdGlvbndpZGU= 27954
+IHBlcnNvbmE= 27955
+IHNvbHZlZA== 27956
+IGRyYW1hdGljYWxseQ== 27957
+TG9nb3V0 27958
+IGdyYXY= 27959
+IGFuYWx5c2Vz 27960
+b2xsbw== 27961
+IGxhbXA= 27962
+LnRlYW0= 27963
+IEVyb3Q= 27964
+PVsi 27965
+IGRhbmNpbmc= 27966
+ID8+Lw== 27967
+IGNhdGVy 27968
+ZmZl 27969
+IFNoYQ== 27970
+IEJvcw== 27971
+IFJFUVVJUkU= 27972
+IE1vbnN0ZXI= 27973
+IFJC 27974
+IElERQ== 27975
+IHN1aXRz 27976
+IGZvcm1EYXRh 27977
+KHRoZXRh 27978
+IHNwYXRpYWw= 27979
+PU5VTEw= 27980
+IFNxbENvbm5lY3Rpb24= 27981
+IOA= 27982
+IFZlbmV6 27983
+IE1vcm5pbmc= 27984
+IHB1YmxpY2F0aW9ucw== 27985
+IE5PTklORlJJTkdFTUVOVA== 27986
+Zmlyc3ROYW1l 27987
+dWRz 27988
+V291bGQ= 27989
+X0hFQUQ= 27990
+IGludmVzdGVk 27991
+c3RhYmxl 27992
+ZnJlZA== 27993
+IGNvbW1hbmRlcg== 27994
+U0VT 27995
+4oCUYQ== 27996
+YW5jaGU= 27997
+IE1vdmVtZW50 27998
+67M= 27999
+U3VpdGU= 28000
+IGp1cmlzZGljdGlvbg== 28001
+66as 28002
+IEJldGg= 28003
+alF1ZXJ5 28004
+IElzYQ== 28005
+IGRlbnRhbA== 28006
+LCo= 28007
+IExpbWl0 28008
+aWxpYXRpb24= 28009
+PSJ7 28010
+YmFzdA== 28011
+IHR1cmI= 28012
+aXN5 28013
+T09L 28014
+IGFkdm9jYXRl 28015
+aW1hZw== 28016
+TEVDVElPTg== 28017
+0LvRjA== 28018
+KGNhdGVnb3J5 28019
+LmRlYw== 28020
+IHVuaXF1 28021
+X3Nu 28022
+IGF0dHJhY3RlZA== 28023
+IMOJ 28024
+IFJ1bm5pbmc= 28025
+X2VkZ2Vz 28026
+IERpc2FibGU= 28027
+X0FT 28028
+5Zu+ 28029
+IG5ldHdvcmtpbmc= 28030
+X2JyYW5jaA== 28031
+SGF2aW5n 28032
+dG9CZVRydXRoeQ== 28033
+R0k= 28034
+IGNhbXBz 28035
+c2Vw 28036
+LXBhcnQ= 28037
+ICkKCgoKCgoKCg== 28038
+dXN0cmFsaWE= 28039
+IFJlcG9ydHM= 28040
+cml0bw== 28041
+IHdhaXN0 28042
+X3BsdXM= 28043
+IFdX 28044
+LXBlcnNvbg== 28045
+QXByaWw= 28046
+IHNhcg== 28047
+LnRhcg== 28048
+IGFncmljdWx0dXJhbA== 28049
+dGlj 28050
+IHRjcA== 28051
+IHNldFZhbHVl 28052
+YWdlbnRv 28053
+IEFwcGU= 28054
+cGlsZXI= 28055
+Q0FERQ== 28056
+IGFuY2hl 28057
+YXRjaGVy 28058
+IGNvbWljcw== 28059
+IGxicw== 28060
+X3NlZ21lbnQ= 28061
+J109JA== 28062
+aXR0ZXJz 28063
+aWNoZXI= 28064
+R0lORQ== 28065
+IHV0aWxpemU= 28066
+IEN1cnNvcg== 28067
+X2V4cHJlc3Npb24= 28068
+IGRhZw== 28069
+PGxvbmc= 28070
+IHJoeXRo 28071
+5o+Q 28072
+IGNvbnN1bHRhdGlvbg== 28073
+WWV0 28074
+IikpCgo= 28075
+X01BQw== 28076
+Y291bGQ= 28077
+ICdcXA== 28078
+IFZv 28079
+CWh0dHA= 28080
+IGdz 28081
+cGhlcg== 28082
+LWdyaWQ= 28083
+SmFtZXM= 28084
+SnVs 28085
+IHNjaG9u 28086
+IHRlbnNvcmZsb3c= 28087
+IExPR0dFUg== 28088
+YW1hcw== 28089
+IHNjaXB5 28090
+IGNvbnZpY3Rpb24= 28091
+LmFn 28092
+IGFkbWluaXN0cmF0b3I= 28093
+KSl7DQo= 28094
+IG51bg== 28095
+Imdyb3Vw 28096
+UG9y 28097
+IG51cnNl 28098
+ZXhwcmVzc2lvbg== 28099
+YWt5 28100
+IEhlYXZ5 28101
+Lm9wdA== 28102
+LmdldEFsbA== 28103
+IG92ZXJs 28104
+LyIs 28105
+X2NvdW50cnk= 28106
+544= 28107
+IEdFTkVS 28108
+X3JvdXRl 28109
+IERhbA== 28110
+wrQ= 28111
+b2xvYWQ= 28112
+IHVuY29tZm9ydGFibGU= 28113
+KG1lbnU= 28114
+IGhvc3RuYW1l 28115
+JyIpOwo= 28116
+IGNhbGN1bGF0aW9ucw== 28117
+LWNsaWNr 28118
+IHByb3RlY3RpdmU= 28119
+44Kv 28120
+X0Zvcm0= 28121
+dW5ncw== 28122
+QWN0dWFs 28123
+bWY= 28124
+IFByb2Nlc3Npbmc= 28125
+IEludmVudG9yeQ== 28126
+KG1hdHJpeA== 28127
+YXBwcm9wcmlhdGU= 28128
+d2Vn 28129
+aWph 28130
+IGNocg== 28131
+IHJpZmxl 28132
+LXdzag== 28133
+a2Fy 28134
+IGluZGVwZW5kZW50bHk= 28135
+SU9T 28136
+IGNvbnNpc3RlbmN5 28137
+dm4= 28138
+L3N5c3RlbQ== 28139
+IENoYW5nZXM= 28140
+IGV4cG9zZQ== 28141
+aWNpZW50cw== 28142
+IHJlbGF0ZQ== 28143
+CW5leHQ= 28144
+6Kg= 28145
+dWRlcw== 28146
+IGdsYXNzZXM= 28147
+RlhNTA== 28148
+Li4uLi4u 28149
+IFBkZg== 28150
+IGFwcHJvdmU= 28151
+IHtc 28152
+IGV4aXN0ZQ== 28153
+KSko 28154
+QVJFTlQ= 28155
+0L7Qvw== 28156
+IExhdGVzdA== 28157
+IE5pZ2VyaWE= 28158
+LkludGVyZmFjZXM= 28159
+IHJlbW92ZXM= 28160
+RW5lbXk= 28161
+IGVuZm9yY2U= 28162
+dmVydHM= 28163
+CXBvcw== 28164
+X3RleHR1cmU= 28165
+V0FSRA== 28166
+IElOQ0lERU5U 28167
+KGNvbnRhaW5lcg== 28168
+IGRlZmVuZGluZw== 28169
+IFJY 28170
+IEhvb2s= 28171
+YnJpcw== 28172
+IEZsYXNr 28173
+R3JheQ== 28174
+LikK 28175
+dmlzaWJpbGl0eQ== 28176
+IFJlZGlyZWN0VG9BY3Rpb24= 28177
+ZXJyYWw= 28178
+X2VsZW0= 28179
+IHJlc29u 28180
+ZnJvbnRlbmQ= 28181
+X3ZhcmlhYmxlcw== 28182
+YXRlcmlh 28183
+ICsi 28184
+YXZlbGVk 28185
+UklY 28186
+IGRlZmljaXQ= 28187
+X0NoZWNr 28188
+WVlZWQ== 28189
+VG9PbmU= 28190
+c3B5 28191
+IHVuaXRlZA== 28192
+ZW5kZW50 28193
+IHBvZGU= 28194
+44GM 28195
+Q0FU 28196
+KGZtdA== 28197
+IEJvbnVz 28198
+IHJlY2s= 28199
+wro= 28200
+TW9kdWxlcw== 28201
+IHZhY3V1bQ== 28202
+UmFkaW8= 28203
+IERBTUFHRQ== 28204
+UGVu 28205
+IFBhcmtlcg== 28206
+OzsK 28207
+IFJlYWxseQ== 28208
+X25lZw== 28209
+cGVuZGluZw== 28210
+IG5vbWluZWU= 28211
+IENhdGVnb3JpZXM= 28212
+IFVsdHJh 28213
+V2VhcG9u 28214
+IGRlZmVuZGVy 28215
+SXNz 28216
+IEdlbmRlcg== 28217
+IERyZXNz 28218
+IGltcHJpc29u 28219
+IGJhbmtydXB0 28220
+aW1lbnNpb25hbA== 28221
+UEhB 28222
+IFN0cmF0ZWc= 28223
+IFBST0ZJVFM= 28224
+IHBhdHJp 28225
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8= 28226
+ZGVsZWdhdGU= 28227
+IGZvclN0YXRl 28228
+IGRldm90ZWQ= 28229
+X21ha2U= 28230
+IHRlcnJvcmlzdHM= 28231
+IFNuYXA= 28232
+X25hdg== 28233
+IEFB 28234
+IElhbg== 28235
+CWFwcA== 28236
+UGxhY2VtZW50 28237
+X2hkcg== 28238
+PEs= 28239
+IHNhbmc= 28240
+c3Ryb2tl 28241
+LVE= 28242
+Pjw/PQ== 28243
+LW1vZGVs 28244
+YXZhbmE= 28245
+IFdhbmc= 28246
+ICAgICAgICAgICAgIAo= 28247
+CWluaXQ= 28248
+IGVudHJlcHJlbmV1cg== 28249
+YXRpdm8= 28250
+TG92ZQ== 28251
+LW92ZXI= 28252
+V2F0ZXI= 28253
+IG1vZHM= 28254
+Z2VuY2U= 28255
+VGVjaG4= 28256
+Png= 28257
+LlRhc2s= 28258
+bW9uZXk= 28259
+aWJhYmE= 28260
+J30pOwo= 28261
+IFNwZWNpZmlj 28262
+IExpbmVhcg== 28263
+X09QVA== 28264
+SGFzaENvZGU= 28265
+KFBsYXllcg== 28266
+LkNvbnRhaW5zS2V5 28267
+IGNvbGxhcHNlZA== 28268
+dHJhbnNwYXJlbnQ= 28269
+X1JBTkdF 28270
+Vmlld2Vy 28271
+KGNmZw== 28272
+IHNvcnRpbmc= 28273
+IGluZmVjdGVk 28274
+IE5hY2g= 28275
+IGFjY29tbW9kYXRl 28276
+LmVsZW1lbnRz 28277
+X1BBUlQ= 28278
+IFNleHk= 28279
+PWdldA== 28280
+KHllYXI= 28281
+IHhocg== 28282
+Ol0= 28283
+b3dza2k= 28284
+IHN1bW1hcg== 28285
+IMK/ 28286
+IGludGU= 28287
+IHdvcmtmbG93 28288
+IFRhaXdhbg== 28289
+dmVyc2lvbnM= 28290
+5Y+R 28291
+IHN1cnByaXNpbmdseQ== 28292
+IG9wdGljYWw= 28293
+IHByb2Nlcw== 28294
+IGRpc2FncmVl 28295
+IG51ZXZv 28296
+IENBTQ== 28297
+c29ydGVk 28298
+bGVhc2Vz 28299
+aXN0bGU= 28300
+SWRlbnQ= 28301
+CWV2ZW50 28302
+amVjdGVk 28303
+Q2h1bms= 28304
+VmFycw== 28305
+LnByb3ZpZGVy 28306
+IHByb2NlZWRpbmdz 28307
+IGluY2x1c2l2ZQ== 28308
+IGFydHdvcms= 28309
+ZW5kYW50cw== 28310
+77yaCg== 28311
+c2Vlbg== 28312
+IGxpZw== 28313
+IG1ha2Vycw== 28314
+X2Z1bg== 28315
+IGxlbmd0aHM= 28316
+UGF0aFZhcmlhYmxl 28317
+W2l0ZW0= 28318
+4Li1 28319
+RGVhZA== 28320
+RkZGRkZG 28321
+IFVyYmFu 28322
+dXBsZXM= 28323
+aWNoZW4= 28324
+KG51bGxwdHI= 28325
+LnNwZWM= 28326
+LFN5c3RlbQ== 28327
+VVJBVElPTg== 28328
+KGpvYg== 28329
+5byP 28330
+IHRyYWNrZXI= 28331
+xZk= 28332
+IE1S 28333
+IFNRTGl0ZQ== 28334
+IGR0bw== 28335
+IDs7Cg== 28336
+IG1pbnQ= 28337
+IEludHJvZHVjdGlvbg== 28338
+Y2Fv 28339
+IHF1ZXN0aW9uZWQ= 28340
+IGZpdHRlZA== 28341
+cmV2aXNpb24= 28342
+c3E= 28343
+IG1pZw== 28344
+X3VuaXRz 28345
+X2FzeW5j 28346
+IGZsaWNr 28347
+fSk7CgoK 28348
+IG5vdHJl 28349
+fWAs 28350
+RmlsdGVycw== 28351
+IG11bmRv 28352
+X2RheXM= 28353
+IGZybQ== 28354
+dXRj 28355
+IHZhbHM= 28356
+ZXdpZHRo 28357
+IEdlbmVyYXRvcg== 28358
+IEFydGlzdA== 28359
+IElEcw== 28360
+IEFydGljbGVz 28361
+cmVhdGVy 28362
+IENvbXBvbmVudEZpeHR1cmU= 28363
+Lj0= 28364
+IHJvdQ== 28365
+LW5v 28366
+LmJ1a2tpdA== 28367
+ZWdn 28368
+IERpZmY= 28369
+YXRpY3M= 28370
+0YPRhw== 28371
+4oCUCgo= 28372
+IENoYXJsb3R0ZQ== 28373
+Ynll 28374
+IH0pOw0KDQo= 28375
+IFZpaw== 28376
+IEJyb3c= 28377
+IGx2 28378
+IEdpYg== 28379
+LXdpbmc= 28380
+R0xJR0VOQ0U= 28381
+KEls 28382
+IEVuZ2luZWVy 28383
+LldhaXQ= 28384
+IFBpY3R1cmVz 28385
+IHJoZXQ= 28386
+IHRoZXJtYWw= 28387
+IHByYWlzZQ== 28388
+PD4oKTsKCg== 28389
+IFNwaWRlcg== 28390
+UGF1c2U= 28391
+IEJha2Vy 28392
+IHNsb3dlcg== 28393
+IH1dCg== 28394
+X2VucXVldWU= 28395
+IGRpc2FwcGVhcmVk 28396
+IFRpY2tldA== 28397
+SU5VWA== 28398
+X0xPQ0FM 28399
+0LDRgdGB 28400
+QEluamVjdGFibGU= 28401
+Y29tbXVuaXR5 28402
+R2VzdHVyZVJlY29nbml6ZXI= 28403
+5Zu9 28404
+IHNjYWxlcw== 28405
+IC0o 28406
+Lycr 28407
+IFNpdA== 28408
+IGV4ZWN1dGl2ZXM= 28409
+YXJkaW5n 28410
+IGFkdmVycw== 28411
+IGJhY2t3YXJkcw== 28412
+CWNvbnRleHQ= 28413
+IEhhbXA= 28414
+IFBG 28415
+IERlY2s= 28416
+IENyYWln 28417
+QW1lcmljYW4= 28418
+IGJlbGw= 28419
+IHByb2w= 28420
+dWZlbg== 28421
+IHJuZw== 28422
+YXJzaGFs 28423
+IFNpbXBseQ== 28424
+Zmlyc3RuYW1l 28425
+c2hvcmU= 28426
+SnVseQ== 28427
+IG1vcnRhbGl0eQ== 28428
+IOKGkgoK 28429
+SGVscGVycw== 28430
+IGJlbmNobWFyaw== 28431
+ZW1hZGU= 28432
+IG9yZ2FuaXNhdGlvbnM= 28433
+Lmdzb24= 28434
+IFRleHRGaWVsZA== 28435
+IGNpdmlsaWFucw== 28436
+LkFycmF5cw== 28437
+IE1pc3Npc3NpcHBp 28438
+IGludGVybWVkaWF0ZQ== 28439
+Z2V0VXNlcg== 28440
+X2NsdXN0ZXI= 28441
+UmVsYXRpdmU= 28442
+Zm9yZWlnbg== 28443
+LnF1ZXJ5U2VsZWN0b3JBbGw= 28444
+Rm9yZWlnbktleQ== 28445
+IHJlYXNvbmFibHk= 28446
+LS0tLS0tLS0tCg== 28447
+Q2FyZHM= 28448
+IEthbQ== 28449
+IFRob3I= 28450
+IHJvbGxlcg== 28451
+LWVsZW1lbnQ= 28452
+IEN1cnJlbmN5 28453
+ZGRpZQ== 28454
+QUxMWQ== 28455
+IFJB 28456
+IHBlcm1ldA== 28457
+YWFhYQ== 28458
+IGhvbWV3b3Jr 28459
+IFZpdA== 28460
+IG1vbGQ= 28461
+IEZlcg== 28462
+W3N0YXJ0 28463
+IHN0YXRpc3RpY2Fs 28464
+IHNjYXJ5 28465
+X0hPTUU= 28466
+LkJlZ2lu 28467
+Q29uc3RydWN0 28468
+b2dlbmlj 28469
+IERFQUxJTkdT 28470
+IHRhbWJpw6lu 28471
+aXhvbg== 28472
+LmluZA== 28473
+YWNyZQ== 28474
+IHRyYW5zZm9ybXM= 28475
+IE5hcA== 28476
+LkJsb2Nr 28477
+dXNzaWE= 28478
+cGlyYXRpb24= 28479
+dWxlbnQ= 28480
+IGNlaWw= 28481
+Q2xhdXNl 28482
+bmFpcmU= 28483
+VEVT 28484
+IG5lYXQ= 28485
+U1RE 28486
+IFJlZ0V4cA== 28487
+cGVyZm9ybQ== 28488
+Oik= 28489
+IHVuaW9ucw== 28490
+IHN1YmxpYw== 28491
+IHdpbmRz 28492
+bG9hdGluZw== 28493
+Z2xpY2g= 28494
+IHBhZ2luYXRpb24= 28495
+U2tpbGw= 28496
+QXBwbHk= 28497
+IE9wZXJhdG9y 28498
+aXN0b2dyYW0= 28499
+IHF1YWxpdGllcw== 28500
+Q3Jvc3M= 28501
+IGRlY29t 28502
+XSwi 28503
+IEp1YW4= 28504
+Lm1vZGFs 28505
+LkNoaWxk 28506
+IFJvZ2Vy 28507
+U1RJVFVURQ== 28508
+OkNHUmVjdE1ha2U= 28509
+YWxldHRl 28510
+IHN0YQ== 28511
+YXNpZGU= 28512
+IGJsdXI= 28513
+IFdh 28514
+aWZldGltZQ== 28515
+cmVlZA== 28516
+Y29udHJvbHM= 28517
+IGJpbnM= 28518
+INC/0L7Quw== 28519
+Ki8sCg== 28520
+VUlT 28521
+IFJvdQ== 28522
+IERlbW8= 28523
+LWF3ZXNvbWU= 28524
+IENoYWlu 28525
+IGhhc3Rh 28526
+IEJhcnQ= 28527
+LktFWQ== 28528
+IHZlbmRvcnM= 28529
+bm9mb2xsb3c= 28530
+IERlc3Q= 28531
+X2J1aWxkZXI= 28532
+IGFyZ3Vlcw== 28533
+X2Fuc3dlcg== 28534
+Z290bw== 28535
+IFJFU1VMVA== 28536
+IE1PTg== 28537
+IHBvZGVy 28538
+b29ucw== 28539
+X0NBU0U= 28540
+IHJlcGxpYw== 28541
+IGZpbmFuY2luZw== 28542
+IERBVEU= 28543
+Y2Vybg== 28544
+X3RyYWNr 28545
+dGllcw== 28546
+L2xvZ28= 28547
+IE5FR0xJR0VOQ0U= 28548
+Z2V0VHlwZQ== 28549
+PlQ= 28550
+YmV0 28551
+Z2lybA== 28552
+IElOQ0lERU5UQUw= 28553
+LXNpdGU= 28554
+LnRyaWdnZXI= 28555
+IExpc2E= 28556
+X2lucHV0cw== 28557
+IHJlbGF0aXZlcw== 28558
+TG9nZ2VkSW4= 28559
+Q29uZmlndXJl 28560
+SUs= 28561
+LmFjY2VwdA== 28562
+UmVzdW1l 28563
+IERyYWZ0 28564
+ICo+KA== 28565
+IFdB 28566
+ZWRpYW4= 28567
+ZXJuZXNz 28568
+IExheW91dEluZmxhdGVy 28569
+Ki8NCg0K 28570
+b3RoeQ== 28571
+IG9ibGlnYXRpb24= 28572
+U3Vic2NyaWJl 28573
+IHRodW1ibmFpbA== 28574
+ZXhpc3Q= 28575
+IGluc2lzdGVk 28576
+IFVJQ29sbGVjdGlvblZpZXc= 28577
+IEFuZ3VsYXI= 28578
+IHRhYmxldHM= 28579
+IEltcGFjdA== 28580
+44CNCgo= 28581
+YWhv 28582
+IGNoYXJhY3RlcmlzdGlj 28583
+Z2Q= 28584
+ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0= 28585
+b3VydA== 28586
+YC4= 28587
+QXBwcm8= 28588
+Q29vcmRpbmF0ZQ== 28589
+UmVtZW1iZXI= 28590
+IG1hcmluZQ== 28591
+XT09Jw== 28592
+IEFkbWluaXN0cmF0b3I= 28593
+LmdldERlZmF1bHQ= 28594
+IGZvcmdvdA== 28595
+IFN0cnVjdHVyZQ== 28596
+VnVl 28597
+YXJzaW5n 28598
+bW9tZW50 28599
+a3c= 28600
+X2N1cnNvcg== 28601
+QXR0YWNr 28602
+IGF0aGxldGlj 28603
+IGRpYWdub3NlZA== 28604
+IGVuZGU= 28605
+5Yig6Zmk 28606
+SG91c2U= 28607
+IFBBUkFN 28608
+IHdpa2k= 28609
+IE9wcA== 28610
+IGNvbnNlcnZhdGlvbg== 28611
+IHNuZA== 28612
+X3RlbQ== 28613
+c3Vic3Ry 28614
+IENhcGU= 28615
+LnNpbQ== 28616
+VVRJT04= 28617
+YW5hbg== 28618
+4oCZdW4= 28619
+IGd5 28620
+LXdvcms= 28621
+IGNvbXBlbGxpbmc= 28622
+PScj 28623
+CXN1Yg== 28624
+IGRpcmVjdG9yaWVz 28625
+7Yq4 28626
+IHRvdWNoZXM= 28627
+b3V0aW5lcw== 28628
+LkNvbGxlY3Rpb24= 28629
+c2NoZWR1bGU= 28630
+LmxhdA== 28631
+IERvY3RyaW5l 28632
+Q0FB 28633
+IFJlZmVy 28634
+IHNoaWZ0cw== 28635
+IGxpa2VsaWhvb2Q= 28636
+cHJldGVy 28637
+IEZlbWFsZQ== 28638
+IGludGVyY2VwdA== 28639
+IGxvdQ== 28640
+55m7 28641
+IHJ1Zw== 28642
+IENyb3du 28643
+ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio= 28644
+LXByb2R1Y3Q= 28645
+IHByb21wdGVk 28646
+dW5nbGU= 28647
+ZG9ja2Vy 28648
+IFR1 28649
+IFVuaXF1ZQ== 28650
+X0Vycm9y 28651
+dWxvcw== 28652
+IOKE 28653
+IChg 28654
+R2V0dGluZw== 28655
+X3NjYWw= 28656
+IEVuaA== 28657
+w7x0 28658
+IHN1c3RhaW5lZA== 28659
+IHBhdGNoZXM= 28660
+IHByb3NwZXI= 28661
+IEdhemE= 28662
+X2xpZ2h0 28663
+IGluY29ucw== 28664
+LS0tLS0tLS0K 28665
+CQkgICAgICA= 28666
+U0Y= 28667
+Q04= 28668
+OiI7Cg== 28669
+IENvbGxpbnM= 28670
+KCop 28671
+IGNvbXBpbGF0aW9u 28672
+J10NCg== 28673
+IGNvbnNlcXVlbmNl 28674
+LC4uLg== 28675
+IGRt 28676
+IEJMT0NL 28677
+Q2x1c3Rlcg== 28678
+IHNraQ== 28679
+KGFyZ2M= 28680
+VHVwbGU= 28681
+IGpvaW5z 28682
+IFNoZXJpZmY= 28683
+V2Fy 28684
+aW5kaQ== 28685
+IGNvbW1lbnRlZA== 28686
+SE9TVA== 28687
+IGludml0YXRpb24= 28688
+YXBhbmVzZQ== 28689
+IHBlcm1pdHM= 28690
+cHJlY2VkZW50ZWQ= 28691
+X3pvbmU= 28692
+IEFteQ== 28693
+X1JE 28694
+TWluaW11bQ== 28695
+IGludm9jYXRpb24= 28696
+LmVuYWJsZQ== 28697
+aWNodGVu 28698
+LW93bmVk 28699
+Imlk 28700
+X1BPSU5URVI= 28701
+RmFj 28702
+IHNwZWNpZmljYXRpb25z 28703
+IG5vbWluYXRpb24= 28704
+IGdw 28705
+PCg= 28706
+IHJvYm90cw== 28707
+IEplcnJ5 28708
+IGhvbGRlcnM= 28709
+IHdhbmQ= 28710
+Y21z 28711
+IH0pKQo= 28712
+LlRvYXN0 28713
+IElMaXN0 28714
+QmFzZWQ= 28715
+em9vbQ== 28716
+L3N0eWxl 28717
+IEJlY2s= 28718
+TWVu 28719
+IGNvbnRyaWJ1dGluZw== 28720
+IHVuZG8= 28721
+IE9I 28722
+IGFkZE9iamVjdA== 28723
+IGVpZ2Vu 28724
+c2lnbnVw 28725
+6ZSZ 28726
+IGRpc3RhbnQ= 28727
+UEFSQVRPUg== 28728
+IE1hcmk= 28729
+IG3DoQ== 28730
+RW1w 28731
+w7Nz 28732
+IOyImA== 28733
+ZXZ0 28734
+K2o= 28735
+cGFyaw== 28736
+IFN0YXk= 28737
+IER1bg== 28738
+IHNveQ== 28739
+PiU= 28740
+YXppbmVz 28741
+IHRpZW1wbw== 28742
+KG1l 28743
+cHJlc2VudA== 28744
+LlRoaXM= 28745
+IGVkaXRvcnM= 28746
+RklFTEQ= 28747
+Lldvcms= 28748
+IFVuaXZlcnNl 28749
+IGRydW5r 28750
+LnRpbWVy 28751
+IGFsdGVyZWQ= 28752
+IE5hcg== 28753
+66Cl 28754
+LkFjdGl2ZQ== 28755
+aWRvcg== 28756
+560= 28757
+LmRlbHRhVGltZQ== 28758
+IGF3a3dhcmQ= 28759
+JnF1b3Q= 28760
+IFNhZmFyaQ== 28761
+IHRyaWNrcw== 28762
+TUVOVFM= 28763
+ZGl2aXNpb24= 28764
+IHZhcnlpbmc= 28765
+IEhpZ2h3YXk= 28766
+IHBob3RvZ3JhcGhlcg== 28767
+IFN0ZXdhcnQ= 28768
+IGxhc3Rpbmc= 28769
+LlByZQ== 28770
+LmFtYXpvbmF3cw== 28771
+IEx1Y2s= 28772
+LkRlc2NyaXB0aW9u 28773
+IE5heg== 28774
+bmVn 28775
+IGPDsw== 28776
+PDwiXA== 28777
+IFN1cnY= 28778
+IFVuYw== 28779
+UmVjaXBl 28780
+LkJvcmRlclN0eWxl 28781
+IG1vZGlmaWNhdGlvbnM= 28782
+LWF0 28783
+QVRGT1JN 28784
+aGRy 28785
+YWtv 28786
+IHN1YmxpY2Vuc2U= 28787
+IEp1bXA= 28788
+IGJlaW0= 28789
+IE1hbmhhdHRhbg== 28790
+LmJvb2w= 28791
+X2h3 28792
+0YLRjA== 28793
+Qmlu 28794
+IGdhdGV3YXk= 28795
+IiI6 28796
+IFVJUw== 28797
+OiIr 28798
+LWRlZg== 28799
+IFJlZ3VsYXI= 28800
+L3Rlc3Rpbmc= 28801
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 28802
+c3RyaW5nc3RyZWFt 28803
+IGRpc3Bhcg== 28804
+IG1vYmls 28805
+LXJlYWQ= 28806
+IEFkYXB0ZXI= 28807
+IENoYW1waW9ucw== 28808
+IHNjaGVkdWxlcg== 28809
+IGtpbGxz 28810
+IE11bHRpcGxl 28811
+aXJyb3I= 28812
+IGdvZHM= 28813
+QURP 28814
+YWt0ZQ== 28815
+IFVzdWFyaW8= 28816
+LmNpcmN1bGFy 28817
+IHJlY2VwdA== 28818
+IEV4cHI= 28819
+IGVsZGVybHk= 28820
+IG5pY2VseQ== 28821
+IGJlc3Rl 28822
+V2FudA== 28823
+IGNsYXNzaWNhbA== 28824
+LnNwcml0ZQ== 28825
+b2JqYw== 28826
+IE1hc29u 28827
+IHNpc3RlbWE= 28828
+LkJsYWNr 28829
+ZXNv 28830
+IFplaXQ= 28831
+IGRpdmlk 28832
+IGVudGVycw== 28833
+X3N1YmplY3Q= 28834
+IFBsYW5ldA== 28835
+Lndhcm5pbmc= 28836
+IEdyYW0= 28837
+X3Rva2Vucw== 28838
+IGhvdXNlaG9sZHM= 28839
+X2N1c3RvbWVy 28840
+dXNlck5hbWU= 28841
+Y3Jvc3M= 28842
+IHBpb25l 28843
+IGFzc2lzdHM= 28844
+X1NN 28845
+aWJv 28846
+IGxveWFs 28847
+IHVzZWxlc3M= 28848
+I2VsaWY= 28849
+IFVsdGltYXRl 28850
+Q29tZQ== 28851
+Z2Vs 28852
+IGRpY2g= 28853
+eHl6 28854
+aWtlbA== 28855
+b2JyYQ== 28856
+X3NjYW4= 28857
+IEludGVyaW9y 28858
+IE5pY2U= 28859
+IHBsYWM= 28860
+CXRhcmdldA== 28861
+IHZpcmFs 28862
+YXNzbw== 28863
+KCkv 28864
+dW5kZQ== 28865
+IEFkb2Jl 28866
+T3M= 28867
+dmlzaXRlZA== 28868
+IE9X 28869
+IEZlZWQ= 28870
+IFNlcXVlbmNl 28871
+IG1hbmFnZXM= 28872
+aW5zb24= 28873
+IExvdWlzaWFuYQ== 28874
+e30p 28875
+IEhhYg== 28876
+IExE 28877
+IGJpcA== 28878
+cHJpdGVz 28879
+KGVsZW0= 28880
+LmhpYmVybmF0ZQ== 28881
+w6lsw6k= 28882
+IG9obmU= 28883
+X3RyYW5zYWN0aW9u 28884
+IGFubnVuY2k= 28885
+UHVibGlzaGVk 28886
+IEhvbmRh 28887
+IFRhbQ== 28888
+IFBhY2tldA== 28889
+X3NlbGVjdG9y 28890
+IGNoYWxsZW5nZWQ= 28891
+UHJvY2Vzc2luZw== 28892
+LWhvdmVy 28893
+IHRyYWluZXI= 28894
+X2NhbmNlbA== 28895
+IE5TRGljdGlvbmFyeQ== 28896
+YWJyaWM= 28897
+IE1MUw== 28898
+X3NlbnNvcg== 28899
+IHNocmluaw== 28900
+IEZY 28901
+dGhyZXNob2xk 28902
+CUhY 28903
+LW1hcms= 28904
+YC5g 28905
+U2NoZW1l 28906
+KGZ1bGw= 28907
+X3dyaXRlcg== 28908
+IFN5cw== 28909
+IGZsZWQ= 28910
+IENpbg== 28911
+LXdpZGdldA== 28912
+IFByZXZpb3Vz 28913
+R2VuZGVy 28914
+X3F1ZXN0aW9u 28915
+RmVlZA== 28916
+IHNjcnV0 28917
+KHByZWZpeA== 28918
+44CC44CC 28919
+IGluZmVjdGlvbnM= 28920
+UGFydHM= 28921
+IGhpZXJhcmNoeQ== 28922
+X0RFTEVURQ== 28923
+IFBhdGllbnQ= 28924
+X3BheQ== 28925
+IHByb21vdGVk 28926
+IOyL 28927
+IGNpdmlsaWFu 28928
+IGFncmljdWx0dXJl 28929
+IFBpZWNl 28930
+IHN0YW5jZQ== 28931
+dXRzY2hl 28932
+QXNzaWdu 28933
+LkFDVElPTg== 28934
+Rmln 28935
+X3JhZGl1cw== 28936
+IFN5bmM= 28937
+ZHVjZXI= 28938
+ZmFpbHVyZQ== 28939
+ZW5zZWQ= 28940
+cHRpbWU= 28941
+Qk0= 28942
+X2RhdGV0aW1l 28943
+cXVpdm8= 28944
+UVVFVUU= 28945
+6ICF 28946
+QXBwZWFy 28947
+IHN1bW1pdA== 28948
+OnZvaWQ= 28949
+IHZpbmU= 28950
+6K6k 28951
+b25uZQ== 28952
+X1RSQU5T 28953
+LmdyZWVu 28954
+X2Nj 28955
+IGh1bmdyeQ== 28956
+ICI+ 28957
+KCkpOw0KDQo= 28958
+RXh0cmFjdA== 28959
+aXplbnM= 28960
+IHNvbHZlcg== 28961
+Tm90aWZ5 28962
+IGVuZ2xpc2g= 28963
+IFNob3BwaW5n 28964
+aW50ZXJmYWNlcw== 28965
+UkVR 28966
+IGlsbGVn 28967
+IFVJSW1hZ2VWaWV3 28968
+IGRpc2Nvbm5lY3Q= 28969
+IFVudGls 28970
+IENvbnNlcnZhdGl2ZQ== 28971
+QENvbHVtbg== 28972
+IHNoaWZ0ZWQ= 28973
+IDoNCg== 28974
+IGZpY2g= 28975
+IGRsYQ== 28976
+IHNob2U= 28977
+IiksDQo= 28978
+dWxhcml0eQ== 28979
+X1JFU1A= 28980
+V2VhdGhlcg== 28981
+VUlBcHBsaWNhdGlvbg== 28982
+Lml0ZXJhdG9y 28983
+IGFnaW5n 28984
+LlBhcmVudA== 28985
+b3dpZQ== 28986
+KGVxdWFs 28987
+IENvbnY= 28988
+L2RlZmF1bHQ= 28989
+IG1lYXN1cmluZw== 28990
+LnByZXY= 28991
+LklzVmFsaWQ= 28992
+LkZhdA== 28993
+IHPEgw== 28994
+a2V5d29yZHM= 28995
+d2l0aG91dA== 28996
+IHNvdmVyZQ== 28997
+IGV4Y2hhbmdlcw== 28998
+IG1lbHQ= 28999
+IGlzbGFuZHM= 29000
+IEludGVncg== 29001
+IGp1bXBpbmc= 29002
+IGdsZQ== 29003
+IGpvdXJuYWxpc20= 29004
+IGRhdGVk 29005
+TG9jYWxpemVk 29006
+IFJlZnJlc2g= 29007
+UGFydGljbGU= 29008
+IGFh 29009
+IFNUUklDVA== 29010
+IGJvZA== 29011
+LlByb2Nlc3M= 29012
+X0FVVE8= 29013
+IFB1Ymxpc2hlZA== 29014
+ZXZlcnk= 29015
+IHRlY2hub2xvZ2ljYWw= 29016
+bHN4 29017
+IGlycml0 29018
+QWRkaXRpb25hbA== 29019
+IGRlbGltaXRlcg== 29020
+X2xhbmd1YWdl 29021
+LWFyZWE= 29022
+Ym95cw== 29023
+IFR1YmU= 29024
+IHdhdA== 29025
+IG1lY2hhbmljcw== 29026
+X293bmVy 29027
+U3BlbGw= 29028
+IFN0b3JpZXM= 29029
+LkFwcGVuZExpbmU= 29030
+VGFibGVWaWV3 29031
+aGVt 29032
+c3RpY2s= 29033
+b2xsb3dlcg== 29034
+SUZG 29035
+IFVW 29036
+b2xsaXNpb24= 29037
+U1VC 29038
+IGNvbXBhcmFibGU= 29039
+IGRvbmRl 29040
+c2FsZXM= 29041
+bGx2bQ== 29042
+IH1dLAo= 29043
+T1RUT00= 29044
+IFB1cnBvc2U= 29045
+TGFi 29046
+IGludGVydmlld2Vk 29047
+b2lz 29048
+YXNpbA== 29049
+LnNldElk 29050
+IEluc3RydWN0aW9u 29051
+LS0+ 29052
+IE1vZGlmaWVk 29053
+YXRpb25hbGx5 29054
+IE1lZXRpbmc= 29055
+6K+v 29056
+I3JlZ2lvbg== 29057
+IHJvdXRpbmc= 29058
+LmZvY3Vz 29059
+IFlvdXRo 29060
+PEQ= 29061
+IE5hZw== 29062
+Y29udGFjdHM= 29063
+IGZvcm1pbmc= 29064
+IG1pZQ== 29065
+JyxbJy4uLw== 29066
+IEJQ 29067
+IGFwcGV0 29068
+IFRlYWNoZXI= 29069
+IFRQ 29070
+IGFubnVhbGx5 29071
+b3V0ZWRFdmVudEFyZ3M= 29072
+IFNwZWFrZXI= 29073
+IHJlbmFtZQ== 29074
+Q0ZH 29075
+KCIvLw== 29076
+5o6l 29077
+L3BhZ2Vz 29078
+IHByw6lz 29079
+IFNwZWxs 29080
+LkFsbG93 29081
+IElOVEVSUlU= 29082
+ICgj 29083
+4oCZCgo= 29084
+X0dlbmVyaWM= 29085
+Lmltc2hvdw== 29086
+X3RpbQ== 29087
+LWZhY2U= 29088
+KCYo 29089
+YXRpbnVt 29090
+IHJldm9sdXRpb25hcnk= 29091
+IEhvdXJz 29092
+cmFpbg== 29093
+IGFueXRpbWU= 29094
+IGFiYg== 29095
+LmpzcA== 29096
+U2Nyb2xsVmlldw== 29097
+IFRydXRo 29098
+IGFudGljaXBhdGVk 29099
+IGFjY2VudA== 29100
+LmNoZWNrZWQ= 29101
+IHNwZWNpZmllcw== 29102
+IGNhZg== 29103
+IGNlbGxwYWRkaW5n 29104
+IGNvb2tlZA== 29105
+IEh1Z2g= 29106
+cGVlaw== 29107
+X1JBVEU= 29108
+IGRvcm0= 29109
+Lw0K 29110
+SVZJVFk= 29111
+LkNvbnRyb2xsZXI= 29112
+KHBhcnQ= 29113
+LmNvbnN0cmFpbnQ= 29114
+IGludmFzaW9u 29115
+TU9WRQ== 29116
+IGdsdWM= 29117
+bGVuYW1l 29118
+IGFtZW4= 29119
+ZW5nbGlzaA== 29120
+IFN3aXR6ZXJsYW5k 29121
+IjsKCgo= 29122
+cGVzdA== 29123
+LmNvbGxlY3Q= 29124
+Tmli 29125
+IERpY3Q= 29126
+IEVtYg== 29127
+KHN1YmplY3Q= 29128
+IG91dHJhZ2U= 29129
+IGRlY2lkaW5n 29130
+IHNlbnRlbmNlZA== 29131
+RmVjaGE= 29132
+IkE= 29133
+IHF1ZXI= 29134
+IGZvbnRGYW1pbHk= 29135
+IHF1YWRy 29136
+LVk= 29137
+X0NBQ0hF 29138
+IGFuYWx5emVk 29139
+IGdhaW5pbmc= 29140
+IEFnYWluc3Q= 29141
+IFNvdWw= 29142
+dGF1 29143
+IGxpZ2h0d2VpZ2h0 29144
+IFRG 29145
+IEVmZmVjdHM= 29146
+LlR5cGVz 29147
+LmFkZENsYXNz 29148
+IHZlZ2Fu 29149
+6YE= 29150
+Lici 29151
+IEV4cGxvcmVy 29152
+LmRldGVjdA== 29153
+LnNoaWZ0 29154
+IG9ibGlnYXRpb25z 29155
+bGFzdE5hbWU= 29156
+IGFzc29jaWF0aW9ucw== 29157
+IFRpbWVTcGFu 29158
+dW50ZXI= 29159
+IEZyZXNo 29160
+Q29tcGF0aWJsZQ== 29161
+UHVi 29162
+aWRnZXM= 29163
+Lm9wdGlvbg== 29164
+dmFyaQ== 29165
+Lmhhc2hDb2Rl 29166
+IGdlYg== 29167
+LnNlY3Rpb24= 29168
+LW5vdA== 29169
+IFN1Ym1pdA== 29170
+VE4= 29171
+cmVnaXN0cnk= 29172
+X21lZGlh 29173
+IG5hag== 29174
+ZmZ0 29175
+IG1hdGU= 29176
+LXRoaXJk 29177
+IHBvY2tldHM= 29178
+ZXN0YQ== 29179
+IGJlbnQ= 29180
+IE5vcmQ= 29181
+IHJldGFpbGVycw== 29182
+IE1vcnJpcw== 29183
+LiIiIgoK 29184
+V3Jvbmc= 29185
+IMWb 29186
+UmF5 29187
+LmVj 29188
+IEJpbmQ= 29189
+X0hBTkQ= 29190
+KG5vbg== 29191
+aXNWYWxpZA== 29192
+IHNpbWlsYXJseQ== 29193
+X0xJTUlU 29194
+IGR5bmFtaWNz 29195
+IGRpc3RpbmN0aW9u 29196
+44GG 29197
+PE4= 29198
+IG9ydGg= 29199
+IFRveW90YQ== 29200
+IEthdGU= 29201
+IExT 29202
+b3JpZQ== 29203
+IFNwcmluZ3M= 29204
+IGZyZWFr 29205
+bGFzdG5hbWU= 29206
+X01VTFQ= 29207
+LXN0ZXA= 29208
+Iig= 29209
+QUREUg== 29210
+IGVudGVydGFpbmluZw== 29211
+X0NPTkY= 29212
+IGRlY29kZWQ= 29213
+IHN0cmVhaw== 29214
+IHdhaXRlZA== 29215
+IG5vdGlmaWVk 29216
+cm9kdWNlZA== 29217
+dmlzdWFs 29218
+LkxheW91dFBhcmFtcw== 29219
+5rA= 29220
+ZXNpYW4= 29221
+Zml0cw== 29222
+c3ByaW5n 29223
+IEJlcm5pZQ== 29224
+VXNlckRlZmF1bHRz 29225
+IHBlZGVzdA== 29226
+QXBwZWFyYW5jZQ== 29227
+IFdpa2k= 29228
+IE5PVElDRQ== 29229
+IHNzaA== 29230
+IGR1cmFudGU= 29231
+IFppcA== 29232
+xLFy 29233
+IE5BVE8= 29234
+IHR3ZWx2ZQ== 29235
+IHJveWFs 29236
+77g= 29237
+IG1lcmNoYW50 29238
+IEZ1cm5pdHVyZQ== 29239
+J10pLAo= 29240
+LFg= 29241
+IGZvbGRlcnM= 29242
+IEdhdGU= 29243
+CWZ1bmM= 29244
+cGljaw== 29245
+X3VzdWFyaW8= 29246
+IFZlcm0= 29247
+bWVudGlvbg== 29248
+dXJwb3Nl 29249
+IGFsZXJ0cw== 29250
+eGlvdXM= 29251
+X3NpZw== 29252
+IEZ1 29253
+ICg6 29254
+IGR1bWI= 29255
+5YWz 29256
+IGFjY3VyYXRlbHk= 29257
+6YeN 29258
+UkI= 29259
+LXNjcmVlbg== 29260
+IFZFUg== 29261
+am91cg== 29262
+IHJvbWFuY2U= 29263
+dWNjZWVk 29264
+LmNob2ljZQ== 29265
+IGFkaXA= 29266
+X2RpbXM= 29267
+U2VyaWFsaXphYmxl 29268
+44KL 29269
+LmpvYg== 29270
+IHByb2c= 29271
+dWNoYXI= 29272
+IGdlbnRseQ== 29273
+IFJTUw== 29274
+aWN0dXJlZA== 29275
+X0VOQUJMRUQ= 29276
+CWxhYmVs 29277
+YXdrcw== 29278
+IEVuc3VyZQ== 29279
+cmVtZW1iZXI= 29280
+7KCV 29281
+IHRyYW5zbWl0 29282
+e3sk 29283
+LlRyYW5zYWN0aW9u 29284
+dXJzZQ== 29285
+X3JlbGF0aXZl 29286
+IHNpemVk 29287
+IFhY 29288
+IFByaW5jZXNz 29289
+IExhcnJ5 29290
+IHByw7M= 29291
+INGB0YLRgA== 29292
+IHNpc3RlcnM= 29293
+ZXN0cnVjdA== 29294
+IGNoZWNrcG9pbnQ= 29295
+Omxlbmd0aA== 29296
+IENhcmxvcw== 29297
+L2ljb24= 29298
+X1RBUkdFVA== 29299
+VG9rZW5z 29300
+IHBhdGllbmNl 29301
+IFNlbGVjdGVk 29302
+cXR5 29303
+LnNob3dNZXNzYWdl 29304
+IHdpbGRsaWZl 29305
+IFByb3Bz 29306
+Ym0= 29307
+LWFycm93 29308
+IHBhcmNlbA== 29309
+ZmlyZWJhc2U= 29310
+IEJlbmphbWlu 29311
+Y2Vzc28= 29312
+LnRpbQ== 29313
+IEdhcmM= 29314
+LmFueQ== 29315
+IEhPV0VWRVI= 29316
+IEtv 29317
+IGdyYWJiZWQ= 29318
+X2ZyYW1lcw== 29319
+IG9iamVjdEF0SW5kZXg= 29320
+IEFEVklTRUQ= 29321
+IHN1YnVy 29322
+CUdM 29323
+IH0pfQo= 29324
+LWxlbmd0aA== 29325
+7Iuc 29326
+IFBvdHRlcg== 29327
+X2J1ZmY= 29328
+Lmd1aQ== 29329
+IEVuY29kaW5n 29330
+RWxlY3Q= 29331
+LW1lc3NhZ2U= 29332
+IO+/vQ== 29333
+IMiZaQ== 29334
+IEFyZ3VtZW50TnVsbEV4Y2VwdGlvbg== 29335
+0LDRhtC4 29336
+IG1pbmltaXpl 29337
+IHJlc3BvbmRpbmc= 29338
+JF9bJw== 29339
+IEluZGl2aWR1YWw= 29340
+w6Fj 29341
+IElOVEVS 29342
+IG1hc3R1cmI= 29343
+IEJpbg== 29344
+KCck 29345
+65Oc 29346
+IG9wZW5seQ== 29347
+ID48 29348
+IHVudG8= 29349
+b2xvZ2ljYWxseQ== 29350
+IE11bA== 29351
+VklESUE= 29352
+IHNsaW0= 29353
+IENvbW1pc3Npb25lcg== 29354
+KG9u 29355
+IHVuZGVybmVhdGg= 29356
+L2Ri 29357
+dm90ZQ== 29358
+KE1lc3NhZ2U= 29359
+IFBvcGU= 29360
+RGVmaW5lZA== 29361
+IHN3aWZ0 29362
+dXJm 29363
+IGFkYXB0ZWQ= 29364
+U0VM 29365
+IHJldmVudWVz 29366
+IGRpdmluZQ== 29367
+PXk= 29368
+R3JhZGllbnQ= 29369
+X2FjdA== 29370
+IC8qITw= 29371
+IHBvbHlnb24= 29372
+IEZEQQ== 29373
+IENhcnI= 29374
+YXRhYmxlcw== 29375
+KHN0ZG91dA== 29376
+IHJlZnJpZ2Vy 29377
+IGNvb3JkaW4= 29378
+YXZvcml0ZXM= 29379
+0YjQuA== 29380
+IGNvbXBhc3Npb24= 29381
+IFBPU1NJQklMSVRZ 29382
+LXNlY29uZGFyeQ== 29383
+dXJhY3k= 29384
+IGNvbXByb21pc2U= 29385
+X0FW 29386
+X29z 29387
+IGJlc2lkZQ== 29388
+g50= 29389
+IGxu 29390
+LnBsdWdpbnM= 29391
+Q2FwYWNpdHk= 29392
+YWxhaA== 29393
+LmJpbg== 29394
+IENSQw== 29395
+X2JhbGFuY2U= 29396
+IGZsZXhEaXJlY3Rpb24= 29397
+IGFtYml0 29398
+IG5pY2tuYW1l 29399
+IEZvcmNlcw== 29400
+Q0xF 29401
+IFNoZWxs 29402
+IHNhaWw= 29403
+IFdyaXRlcg== 29404
+IEFsaWNl 29405
+ZHc= 29406
+IEluZGlhbnM= 29407
+IE1hcnNoYWxs 29408
+X1NSQw== 29409
+IG5vcm1hbGl6ZWQ= 29410
+IEphZw== 29411
+44KS 29412
+emVpdA== 29413
+cnBj 29414
+w61j 29415
+LmlubGluZQ== 29416
+IHRyYXZlcnM= 29417
+X251bWVyaWM= 29418
+IHV0aWxpdGllcw== 29419
+IGV2YWM= 29420
+SU5QVVQ= 29421
+CXJlZ2lzdGVy 29422
+TVg= 29423
+IENhbXBiZWxs 29424
+IGRhdGFzZXRz 29425
+IGRlbWFuZGVk 29426
+IGluaXRpYWxTdGF0ZQ== 29427
+Z2Fu 29428
+IGVp 29429
+VW5leHBlY3RlZA== 29430
+LXdlYg== 29431
+dHJhaXQ= 29432
+LFk= 29433
+IFRvZGQ= 29434
+IHNrZWxldG9u 29435
+IG9wdGltaXpl 29436
+56ys 29437
+IFVwb24= 29438
+IFN0T2JqZWN0 29439
+IGFwbGlj 29440
+Lic8Lw== 29441
+QUND 29442
+YWxvdXM= 29443
+IGhhc2hDb2Rl 29444
+IEJpYg== 29445
+SU5BTA== 29446
+IGludmlzaWJsZQ== 29447
+IGhldGVy 29448
+IHNhZmVy 29449
+fS8v 29450
+LnRoZW1l 29451
+Lm5hdmlnYXRpb25Db250cm9sbGVy 29452
+X21lc2g= 29453
+c2tpbGw= 29454
+IFZpb2w= 29455
+wrI= 29456
+IEVPRg== 29457
+IEtp 29458
+eW1tZXRyaWM= 29459
+IG1heGxlbmd0aA== 29460
+xaM= 29461
+ZnJpZW5kcw== 29462
+IEV2YW5z 29463
+IGxlbW9u 29464
+ICgu 29465
+U2xpZGU= 29466
+IFRoYWlsYW5k 29467
+IENhbm4= 29468
+IGFtZW5k 29469
+IGNpcg== 29470
+IHNpbGx5 29471
+ZXNpbWFs 29472
+X3BpYw== 29473
+cHJvY2Vzc29y 29474
+SmF2YVNjcmlwdA== 29475
+IGV2aWRlbnQ= 29476
+X2Rp 29477
+PlA= 29478
+dnJvbg== 29479
+LlVO 29480
+IHBhaW50ZXI= 29481
+aXphcnJl 29482
+IGxhdg== 29483
+IHBvbQ== 29484
+cHJlZw== 29485
+PWZ1bmN0aW9u 29486
+KHNlcmlhbA== 29487
+aWZpY2E= 29488
+dW1pbmc= 29489
+5Zyw 29490
+44GC 29491
+LW9w 29492
+VUNI 29493
+IEhlbmQ= 29494
+LnByb3BUeXBlcw== 29495
+IHlv 29496
+IHJvdXRpbmVz 29497
+IGNhcmluZw== 29498
+U2Vt 29499
+IHJlc2VydmVz 29500
+IHByaW9yaXRpZXM= 29501
+cmVkaXRz 29502
+SVNUUg== 29503
+Q29udGVudFR5cGU= 29504
+IFNjaHc= 29505
+L21lZGlh 29506
+IGVzdHI= 29507
+IGNsaW1iaW5n 29508
+LXdlZWs= 29509
+Y2hlcmNoZQ== 29510
+c2Vuc29y 29511
+VG9BcnJheQ== 29512
+IE1vbnRyZWFs 29513
+IGNsb3Vkcw== 29514
+IEluamVjdGFibGU= 29515
+IFJpY2U= 29516
+IHByb3BhZ2FuZGE= 29517
+X3Byb3ZpZGVy 29518
+IGluZG9vcg== 29519
+IGluYXVn 29520
+IGRpcGxvbQ== 29521
+IG1lc3NhZ2luZw== 29522
+X211dA== 29523
+5aaC 29524
+IGt3 29525
+T05T 29526
+YXJpYW5z 29527
+UlBD 29528
+KV0NCg== 29529
+LXJheQ== 29530
+IFNvcg== 29531
+bWFsbA== 29532
+IG1hcmtldHBsYWNl 29533
+IHZ0aw== 29534
+TWE= 29535
+b2dhbg== 29536
+aWdp 29537
+IHNwb25zb3JlZA== 29538
+IERhbmk= 29539
+LlNFVkVS 29540
+PicuJA== 29541
+bXVsdGlwYXJ0 29542
+IFdvbA== 29543
+IHRhYmxlTmFtZQ== 29544
+IFVzZXJuYW1l 29545
+QmFja2dyb3VuZENvbG9y 29546
+IGZyaWdodA== 29547
+X0VNQUlM 29548
+U2VwdGVtYmVy 29549
+X3ZhbHM= 29550
+b3BpYQ== 29551
+IHNwb3R0ZWQ= 29552
+LUNo 29553
+IGRhdGFTb3VyY2U= 29554
+LyIK 29555
+0LXQutGC 29556
+IFJlcXVlc3RNZXRob2Q= 29557
+IFJlcGxhY2U= 29558
+LWRv 29559
+YWhu 29560
+IFBoRA== 29561
+XS4KCg== 29562
+Tk9O 29563
+Z2VtZW50 29564
+IFRocg== 29565
+IHF1aWV0bHk= 29566
+IHRvcnR1cmU= 29567
+IHRlYXM= 29568
+IENZ 29569
+IGF0cg== 29570
+ZGV2ZWxvcG1lbnQ= 29571
+LWRldGFpbA== 29572
+IGxpZ2h0ZXI= 29573
+IGFyZ3Vpbmc= 29574
+IGRlc2VydmVz 29575
+IGN1cnJpY3VsdW0= 29576
+X0NPTlRFWFQ= 29577
+xYJ5 29578
+SElURQ== 29579
+CUlE 29580
+L3VwbG9hZHM= 29581
+IHRpdHM= 29582
+cmVv 29583
+X2Ryb3A= 29584
+LlVURg== 29585
+IHBpY2t1cA== 29586
+IGdyb2Nlcnk= 29587
+IFB1cmU= 29588
+IGVhc2llc3Q= 29589
+UGhpbA== 29590
+LmZlYXR1cmU= 29591
+KCIq 29592
+IGludmVzdG9y 29593
+dG9r 29594
+IGphcg== 29595
+TG9z 29596
+4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU 29597
+LnF1ZXVl 29598
+LXNwZWVk 29599
+TWFs 29600
+dW1ibHI= 29601
+IENPTlNU 29602
+IEhSRVNVTFQ= 29603
+IERhbmNl 29604
+KGZpbGVQYXRo 29605
+IGF0dHJpYnV0ZWQ= 29606
+4KWN 29607
+IEJ1bmQ= 29608
+Y29pbnM= 29609
+IHPDo28= 29610
+IHBpcg== 29611
+cGVyc29uYWw= 29612
+IHByZWxpbQ== 29613
+IHByb3Bvc2U= 29614
+IFRM 29615
+XV0p 29616
+IFN1YnNjcmlwdGlvbg== 29617
+IEtyZQ== 29618
+LGxlbg== 29619
+LkZpcnN0T3JEZWZhdWx0 29620
+KS0t 29621
+X3Byb2R1Y3Rz 29622
+LkdldEJ5dGVz 29623
+U2hpcA== 29624
+IGVuY3J5cHQ= 29625
+IFNH 29626
+IE15c3Q= 29627
+aGly 29628
+IGl0ZXJhdGU= 29629
+IGludGVuZA== 29630
+Lm1vY2tpdG8= 29631
+IGNoYXB0ZXJz 29632
+KGFuZ2xl 29633
+IFZsYWQ= 29634
+6K6+ 29635
+Jy4KCg== 29636
+UmVzcG9uc2VCb2R5 29637
+IEFiZA== 29638
+ZGVhbA== 29639
+IGJhcnJpZXJz 29640
+LW91dGxpbmU= 29641
+YmlsbA== 29642
+IEZhbGxz 29643
+X3NlY29uZA== 29644
+LmluY2x1ZGU= 29645
+LmNlaWw= 29646
+IG9jY3VwYXRpb24= 29647
+cGhvbnk= 29648
+Lm1vdmVUbw== 29649
+IEplbm5pZmVy 29650
+QVNURVI= 29651
+OyI+PA== 29652
+IEVuYWJsZWQ= 29653
+IHRlcm1pbmF0ZQ== 29654
+IElv 29655
+bGF0aW9ucw== 29656
+IFRIRU9SWQ== 29657
+IGVhcmxpZXN0 29658
+IHJhY2s= 29659
+IFNjYXI= 29660
+c2hha2U= 29661
+Y2hpcA== 29662
+IHV2 29663
+IGFsbGlhbmNl 29664
+0L/QuNGB 29665
+IEdPT0RT 29666
+emlvbmU= 29667
+IFZJ 29668
+IHst 29669
+IGZpbHRlcmluZw== 29670
+IG1pc2Nvbg== 29671
+LkRvY2tTdHlsZQ== 29672
+IGJ1c2g= 29673
+IGp1bms= 29674
+5ow= 29675
+IFFVRQ== 29676
+IGhvb2tz 29677
+IGZpcm13YXJl 29678
+IG1pZGRsZXdhcmU= 29679
+ZGlj 29680
+IE9ha2xhbmQ= 29681
+IGFycml2ZXM= 29682
+UGF5bG9hZA== 29683
+cGl4ZWw= 29684
+XXw= 29685
+IHN0YXJ0RGF0ZQ== 29686
+LlBSTw== 29687
+X2F1ZGlv 29688
+IG1pZGZpZWxk 29689
+aWdpZGJvZHk= 29690
+IFN3aXNz 29691
+IENsaXA= 29692
+IER1bXA= 29693
+IFRleHRCb3g= 29694
+IGdlaA== 29695
+eWllbGQ= 29696
+b2Rz 29697
+IHJlZmVyZW5kdW0= 29698
+QmFja2VuZA== 29699
+IENyZWFt 29700
+IGRvbWluYXRlZA== 29701
+IEFyY2hpdmU= 29702
+IHJpZGVycw== 29703
+LnByZXBhcmVTdGF0ZW1lbnQ= 29704
+IHF1YW5kbw== 29705
+IGNoZWY= 29706
+d2lraQ== 29707
+aW5lbA== 29708
+YW1wbGluZw== 29709
+KCJcXA== 29710
+IHNhZw== 29711
+X3Byb3h5 29712
+44GV 29713
+cGRv 29714
+LmdldEVsZW1lbnRzQnlUYWdOYW1l 29715
+IGRlbW9uc3RyYXRpb24= 29716
+IE5QQw== 29717
+IGFyY2hpdm8= 29718
+ZW5kYW5jZQ== 29719
+IGVmZmljaWVudGx5 29720
+KGFjdHVhbA== 29721
+LnRhYmxlVmlldw== 29722
+IG11c2g= 29723
+IGJlYXJz 29724
+X3RocmVhZHM= 29725
+amFz 29726
+YWh1bg== 29727
+IG5ldXJhbA== 29728
+IGRlc2lnbmluZw== 29729
+IEdEUA== 29730
+IGxpZnRlZA== 29731
+55uu 29732
+IEpvaW50 29733
+IEluY2x1ZGU= 29734
+IEdpYW50cw== 29735
+IHdpdGhkcmF3YWw= 29736
+IFJlbnQ= 29737
+bmF0aXZl 29738
+IFNlZWs= 29739
+Z3Jlc3Npb24= 29740
+X0NQVQ== 29741
+XFM= 29742
+IFNoaWVsZA== 29743
+IHNvbGlj 29744
+IGJvb20= 29745
+eWVjdG8= 29746
+IG1hbnVmYWN0dXJl 29747
+IOKAiw== 29748
+IGJib3g= 29749
+IGVhcnRocXU= 29750
+b2xsZWN0b3Jz 29751
+OkAiJQ== 29752
+IGxvb3Bz 29753
+SmU= 29754
+YWxraW5n 29755
+IFdoYXRz 29756
+IEJveXM= 29757
+LmJvb2s= 29758
+QVJHRQ== 29759
+X3BpeGVs 29760
+IHN1c3BlY3Rz 29761
+zrk= 29762
+dXNw 29763
+IEJNVw== 29764
+aWVjZXM= 29765
+KHBlcnNvbg== 29766
+5byA 29767
+6bs= 29768
+IFBvZGNhc3Q= 29769
+IGJvdQ== 29770
+KEl0ZW0= 29771
+w7s= 29772
+KElucHV0 29773
+SHR0cEdldA== 29774
+IGJ1cmc= 29775
+KV4= 29776
+Qk9BUkQ= 29777
+Ki8s 29778
+IGd1bHA= 29779
+IEJlbm4= 29780
+IGRlY2tz 29781
+LnN0YXR1c0NvZGU= 29782
+IGFjdXRl 29783
+IGh1Zw== 29784
+dWd1 29785
+IHBsZWQ= 29786
+LCIl 29787
+aGFwZQ== 29788
+INC30LDQvw== 29789
+IE1haW5l 29790
+LnJlYWw= 29791
+IGRhbGFt 29792
+IE1pbm9y 29793
+LkZsb2F0 29794
+ZGlzcA== 29795
+IHRs 29796
+IGVuY291bnQ= 29797
+PT4k 29798
+IGZn 29799
+dGVlcw== 29800
+IFJlY29tbQ== 29801
+w6Rs 29802
+IGNoZW1pc3RyeQ== 29803
+QmxvY2tz 29804
+T0lE 29805
+IGZvcmV4 29806
+IEFwcGVuZA== 29807
+IHsq 29808
+IFN1cHBseQ== 29809
+Q0dGbG9hdA== 29810
+KGJs 29811
+IGF0ZQ== 29812
+YWRvcmE= 29813
+IGd1c3Q= 29814
+QXNzb2Np 29815
+Pi4K 29816
+RkVUQ0g= 29817
+LnNlcmlhbA== 29818
+d2lkZ2V0cw== 29819
+YXJkbGVzcw== 29820
+aWVmcw== 29821
+X0ZVTEw= 29822
+ZXJuZXRlcw== 29823
+IFByZWQ= 29824
+2K0= 29825
+5LqL 29826
+dWJlcm5ldGVz 29827
+IExhdXJh 29828
+IGxhYmVsZWQ= 29829
+SGlnaGxpZ2h0 29830
+IGFubm95aW5n 29831
+L3VwZGF0ZQ== 29832
+KGRlc2NyaXB0aW9u 29833
+IGludGltaWQ= 29834
+JGM= 29835
+IikpKQo= 29836
+LkFQ 29837
+IFtdKg== 29838
+IEVYSVQ= 29839
+Lkhvc3Q= 29840
+IE9QRU4= 29841
+LnNlbmRNZXNzYWdl 29842
+X2NhbWVyYQ== 29843
+X3RpbGU= 29844
+IHRoZXJt 29845
+b25vbW91cw== 29846
+IGRpc2Fkdg== 29847
+IG5hYXI= 29848
+aW5kZXhPZg== 29849
+IFBQ 29850
+LnByb3RvY29s 29851
+QUZF 29852
+IHRleHR1cmVz 29853
+IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMj 29854
+dW1iYWk= 29855
+LnN0YXRz 29856
+IEdF 29857
+IGll 29858
+IFNURA== 29859
+IE1hbm4= 29860
+LnJlZmxlY3Q= 29861
+S0I= 29862
+IGRpdmU= 29863
+Lndhdg== 29864
+LyotLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t 29865
+L3NldHRpbmdz 29866
+LmxpZmVjeWNsZQ== 29867
+IGRhdWdodGVycw== 29868
+b3J1cw== 29869
+dWJlcg== 29870
+TklORw== 29871
+c3RyaQ== 29872
+IFRpcA== 29873
+IHpu 29874
+IHN3aXRjaGVk 29875
+aW5ldA== 29876
+dWZmeQ== 29877
+IFRyYW5zcG9ydGF0aW9u 29878
+KGNvbmY= 29879
+ZnJpY2E= 29880
+IFhM 29881
+IExlYWQ= 29882
+X3BlcmNlbnQ= 29883
+PE1hcA== 29884
+IHRocnVzdA== 29885
+b3Ji 29886
+aWtr 29887
+IHRyYXVtYQ== 29888
+QWNjZXNzb3I= 29889
+IEZpdA== 29890
+IFN0cmluZ0J1ZmZlcg== 29891
+ZXhwbA== 29892
+KHNjcmVlbg== 29893
+IGF1ZGllbmNlcw== 29894
+IE9QVElPTg== 29895
+X3JvdW5k 29896
+W25vZGU= 29897
+YmVo 29898
+LT5fXw== 29899
+cGVybWlzc2lvbnM= 29900
+IERldGVybWluZQ== 29901
+Lk1hbg== 29902
+IGFkdmFuY2Vz 29903
+LklucHV0U3RyZWFt 29904
+IHN0cm9uZ2VzdA== 29905
+IGVCYXk= 29906
+ICMt 29907
+IGRpcm5hbWU= 29908
+IFNNUw== 29909
+IG1lZGljYXRpb25z 29910
+IGFtZW5kZWQ= 29911
+IGNodXJjaGVz 29912
+IEltcGVyaWFs 29913
+JHJvdw== 29914
+IE1hZGlzb24= 29915
+IEluc3A= 29916
+IGFmZmFpcg== 29917
+IHBzeWNob2xvZ3k= 29918
+dmg= 29919
+IHNldmVyaXR5 29920
+4oCQ 29921
+IHN0cmlwcw== 29922
+QUg= 29923
+dmVydGlzaW5n 29924
+IGNvbnNl 29925
+SU1BR0U= 29926
+IFN0YXRz 29927
+CXNj 29928
+LkN1cnNvcg== 29929
+IGZyZWV6ZQ== 29930
+c3Nvbg== 29931
+KHhtbA== 29932
+IFN1c2Fu 29933
+LnRpbGU= 29934
+ZWRlZA== 29935
+ICAgIAkJCQ== 29936
+dWVsbGU= 29937
+IE1pdGNoZWxs 29938
+YmFzZWQ= 29939
+T3BlcmFuZA== 29940
+veaVsA== 29941
+IEZG 29942
+CXN0cmNweQ== 29943
+b3VuY2Vz 29944
+aWxkbw== 29945
+LmV4ZWN1dGVRdWVyeQ== 29946
+IGFwcHJvYWNoaW5n 29947
+IFNldmVu 29948
+IG51dHM= 29949
+IHJpYw== 29950
+YXNzaWdubWVudA== 29951
+IGNhbGN1bGF0b3I= 29952
+IE11cnBoeQ== 29953
+IEJvdQ== 29954
+7YQ= 29955
+IGJ1dHQ= 29956
+IHRpY2tz 29957
+UHJvamVjdHM= 29958
+aWxpYg== 29959
+LnRleHRDb2xvcg== 29960
+bW92 29961
+X2xvZ28= 29962
+KHRlbXBsYXRl 29963
+IElOSVQ= 29964
+IGltYWdlVmlldw== 29965
+c2NyaXB0aW9ucw== 29966
+T1JJVFk= 29967
+Q29uc3VtZXI= 29968
+IHVucHJlY2VkZW50ZWQ= 29969
+IHRvdXJpc3Q= 29970
+IGJyb24= 29971
+IGNvbnRyYWN0b3I= 29972
+IGxpY2VuY2U= 29973
+IE5hbQ== 29974
+5q8= 29975
+KHRyYW5zZm9ybQ== 29976
+X0FUVA== 29977
+UHJlZg== 29978
+IEdhbQ== 29979
+IHZlc3NlbHM= 29980
+IGhhdg== 29981
+TGF0ZXI= 29982
+LlRvTG93ZXI= 29983
+IHVybHM= 29984
+IGJyZWFrZG93bg== 29985
+IHBlbmFsdGllcw== 29986
+IGZvc3Rlcg== 29987
+IFVF 29988
+IGNsdWU= 29989
+Y29tZWQ= 29990
+5ZCN56ew 29991
+LW1haW4= 29992
+IHB0cw== 29993
+IGNvdW50ZWQ= 29994
+aWN0cw== 29995
+L3Bvc3Q= 29996
+IGdldGF0dHI= 29997
+IHBpbmc= 29998
+QU5DRUw= 29999
+IHBlYw== 30000
+0YXQvtC0 30001
+YW50b20= 30002
+IEJsdWVwcmludA== 30003
+IEV2ZW50RW1pdHRlcg== 30004
+IGzDpA== 30005
+5rI= 30006
+IHN0cmF3 30007
+KGNvbXA= 30008
+J3VuZQ== 30009
+Pk4= 30010
+LWNsaWVudA== 30011
+ZXNNb2R1bGU= 30012
+LWJhc2U= 30013
+IHJldHJlYXQ= 30014
+X3NpbXBsZQ== 30015
+CQkJCQkJIA== 30016
+ZmVl 30017
+JykNCg0K 30018
+Q29udHJvbEl0ZW0= 30019
+IHN1YnNjcmliZXJz 30020
+cGxlYXNl 30021
+IEVmZg== 30022
+IHBvdW5k 30023
+IEJ5dGVz 30024
+IFRlYQ== 30025
+X2FjdGl2aXR5 30026
+IG1heGlt 30027
+IG9wY29kZQ== 30028
+QlNE 30029
+LmNvbnN0YW50 30030
+O30= 30031
+b21icmVz 30032
+IGNhcmVlcnM= 30033
+KS4KCgoK 30034
+IHNwcmVhZGluZw== 30035
+LWV4cGFuZGVk 30036
+IE9yZA== 30037
+YW1hcmlu 30038
+IG1vYmlsaXR5 30039
+VW5mb3J0dW5hdGVseQ== 30040
+YWtr 30041
+Tkw= 30042
+X3JlZGlyZWN0 30043
+IFBH 30044
+IFNlbnNvcg== 30045
+Ym9s 30046
+dGFw 30047
+X01FTU9SWQ== 30048
+IFVJQWxlcnQ= 30049
+cGxpdHVkZQ== 30050
+V2Vic2l0ZQ== 30051
+IExvZ28= 30052
+bG92ZQ== 30053
+W2luZA== 30054
+IGFsdG9nZXRoZXI= 30055
+IHdvbmRlcmVk 30056
+IGVzcGVy 30057
+IExpYmVyYWw= 30058
+IG9zcw== 30059
+IGVsaXQ= 30060
+IHN0aWZm 30061
+b2RveA== 30062
+X21lbnRpb25z 30063
+IERvdWdsYXM= 30064
+X3BpZA== 30065
+IENL 30066
+IGluaXRXaXRoRnJhbWU= 30067
+LmJsb2c= 30068
+cGtn 30069
+YW5naGFp 30070
+UVVJUkVE 30071
+dXU= 30072
+IG1rZGly 30073
+QVRBTA== 30074
+IHVuaA== 30075
+aW5jZXM= 30076
+c3Ro 30077
+IGh5cG90aGVzaXM= 30078
+IGNhdGE= 30079
+IFRC 30080
+IENsYXI= 30081
+IHByZWRlY2Vzcw== 30082
+IHNpdHVhdGVk 30083
+LXdvcmxk 30084
+KSkv 30085
+IGhlYWRsaW5lcw== 30086
+LnN0YXQ= 30087
+IG91dGJyZWFr 30088
+c3BhdGg= 30089
+X0ZMQUdT 30090
+IFNlcnZsZXRFeGNlcHRpb24= 30091
+U3Vu 30092
+RlJPTQ== 30093
+IERpcg== 30094
+44O744O744O7 30095
+X2Nvb3Jk 30096
+IE9wdGlt 30097
+TW9uaXRvcg== 30098
+LmJpdA== 30099
+WFhY 30100
+IHRvZGFz 30101
+ZmVsZA== 30102
+0YDQuA== 30103
+aW1pcg== 30104
+IHBvbGl0aWNhbGx5 30105
+IG1vbGVjdWxhcg== 30106
+IHRyYWRlZA== 30107
+IHt7JA== 30108
+IFN3ZWRpc2g= 30109
+ICdALw== 30110
+X1JFQUw= 30111
+IHdhcmVob3VzZQ== 30112
+dG9kYXk= 30113
+LEw= 30114
+b3Jw 30115
+PHNlY3Rpb24= 30116
+LWJy 30117
+eW1l 30118
+IFVzZXJTZXJ2aWNl 30119
+IGxpYmVydHk= 30120
+IG1vbWVudG8= 30121
+KEltYWdl 30122
+PHNpemU= 30123
+U2No 30124
+IGpvZw== 30125
+aW9sb2d5 30126
+YXJlbnRseQ== 30127
+IHF1YW50dW0= 30128
+IEFidQ== 30129
+IHJpbQ== 30130
+IG1hbmE= 30131
+Rm9udFNpemU= 30132
+QnVpbGRpbmc= 30133
+c3RhaXJz 30134
+QUlMQUJMRQ== 30135
+ICYn 30136
+IHNlY3Q= 30137
+IHNpZ2g= 30138
+KGJhdGNo 30139
+LklDb250YWluZXI= 30140
+cG9sbA== 30141
+IENvcnBz 30142
+zrU= 30143
+YXJ1 30144
+IEtheQ== 30145
+LnJhbmdl 30146
+X2NsaWNrZWQ= 30147
+IFJvYmVydHM= 30148
+Lk5ldHdvcms= 30149
+ZmluaXNo 30150
+LU1hbg== 30151
+IGNvbGxlZ2Vz 30152
+IEZpbmU= 30153
+IikpLAo= 30154
+ZmlsbQ== 30155
+IHJlbWluZGVk 30156
+IGdlc3R1cmU= 30157
+b3V0aWw= 30158
+IHRocmVhZGluZw== 30159
+IG9iamV0 30160
+IHRvdXJz 30161
+YWN0aXZhdGVk 30162
+Lm1rZGly 30163
+PXVzZXI= 30164
+IHJlZGU= 30165
+ZsO8 30166
+X1NZU1RFTQ== 30167
+cHY= 30168
+IGNvbmdy 30169
+IG1hc3Nhc2pl 30170
+IHByYWN0aXRpb24= 30171
+VW5pdmVyc2l0eQ== 30172
+IHRhYmluZGV4 30173
+0Jg= 30174
+U2V0cw== 30175
+IGNvdW50aWVz 30176
+Z3Vlc3Q= 30177
+ZmFu 30178
+IHdvcmRlbg== 30179
+LmRp 30180
+0L3QsNGH 30181
+wr8= 30182
+aWdEZWNpbWFs 30183
+IHNob3Jl 30184
+IGfDtg== 30185
+IHJlcGFpcnM= 30186
+IGhlbHBlcnM= 30187
+IGNlbnRlcmVk 30188
+T0xMT1c= 30189
+IG1hcFN0YXRlVG9Qcm9wcw== 30190
+IGNlbnRz 30191
+PEE= 30192
+IGV4cGVjdGF0aW9u 30193
+T2N0b2Jlcg== 30194
+IGJnY29sb3I= 30195
+Y2FsZXM= 30196
+LkNPTg== 30197
+IFZlbA== 30198
+IGNyeWluZw== 30199
+LXNlYXNvbg== 30200
+IGZ1bmN0aW9uaW5n 30201
+X0xPQ0FUSU9O 30202
+w7xzcw== 30203
+YmVyeQ== 30204
+UGFyYQ== 30205
+b21pbmF0b3I= 30206
+LWxl 30207
+IGV0aGljYWw= 30208
+aGFzaHRhZ3M= 30209
+ZW1wbG8= 30210
+IG7Dum1lcm8= 30211
+KGFjdGl2aXR5 30212
+LlN0b3A= 30213
+LnN0cmZ0aW1l 30214
+SUxE 30215
+IHRvZQ== 30216
+CU5vZGU= 30217
+IikNCg0K 30218
+IFB1ZXJ0bw== 30219
+IGV4ZWN1dGluZw== 30220
+IEdVSUQ= 30221
+IG9wcG9zaW5n 30222
+YWxwaA== 30223
+IGV4aGliaXQ= 30224
+X2ZsYXNo 30225
+IG1laWxsZQ== 30226
+IGpzb25PYmplY3Q= 30227
+SGVybw== 30228
+YWludGVk 30229
+X0RPTQ== 30230
+IHdpbA== 30231
+IHNsb3Bl 30232
+IG3DpQ== 30233
+IElyYXFp 30234
+IG9yZ2FuaXpl 30235
+CWpRdWVyeQ== 30236
+SFVE 30237
+c2hpbmU= 30238
+Lndl 30239
+IFNraWxscw== 30240
+cG9uc29y 30241
+IGNvbmNsdXNpb25z 30242
+IHJlZm9ybXM= 30243
+IHJlbHVjdA== 30244
+bmFtZWQ= 30245
+IE9saXZlcg== 30246
+IC8vfQo= 30247
+LWxvb2tpbmc= 30248
+IGZvZw== 30249
+IEhP 30250
+IEZyaWVk 30251
+IGluZXZpdGFibGU= 30252
+IERhdGFHcmlkVmlldw== 30253
+SG91cg== 30254
+aWxsZXM= 30255
+bG9naWNhbA== 30256
+IGNvbm5lY3Rpdml0eQ== 30257
+LnR3aWc= 30258
+IEt5bGU= 30259
+KGRzdA== 30260
+LVNo 30261
+IFN0dWRpb3M= 30262
+KExldmVs 30263
+LmpldA== 30264
+X1BST1RP 30265
+LWRlY29yYXRpb24= 30266
+T1RIRVI= 30267
+IHJlYWRpbHk= 30268
+LlBhcmFtZXRlcg== 30269
+IG11bHRpcGx5 30270
+IExJQg== 30271
+YXJtZWQ= 30272
+IHNvb25lcg== 30273
+5oQ= 30274
+X0VT 30275
+IGZvc3NpbA== 30276
+IEFuYw== 30277
+4oCcVGhpcw== 30278
+bG9kYXNo 30279
+UHl0aG9u 30280
+IGhpc3RvZ3JhbQ== 30281
+d2VzdGVybg== 30282
+IGluZmFudA== 30283
+IGNvb3JkaW5hdG9y 30284
+IG5pYg== 30285
+Om0= 30286
+IHJlc3BlY3RlZA== 30287
+IGRlZmluaXQ= 30288
+JlQ= 30289
+X3BhZA== 30290
+IFRyaWdnZXI= 30291
+dGhhbA== 30292
+IGltYWdlTmFtZWQ= 30293
+IGJlYXRlbg== 30294
+CXJj 30295
+IFBhbGFjZQ== 30296
+IGhhemFyZA== 30297
+IGlzb2xhdGlvbg== 30298
+X3Jj 30299
+Y29udHJl 30300
+T1VUUFVU 30301
+IHJlaWdu 30302
+IFBsYXRl 30303
+QVRFUw== 30304
+IGZsdXg= 30305
+IHBhY2tz 30306
+LmdldFNlbGVjdGVk 30307
+IHBhcnRpY2lwYXRlZA== 30308
+IG5lZWRsZQ== 30309
+LWRlcHRo 30310
+Ojo6Ojo6 30311
+LWxhdw== 30312
+aW5zcGFjZQ== 30313
+b25pdG9y 30314
+PW5v 30315
+IEF0b21pYw== 30316
+IEJyYWlu 30317
+RWRpdGFibGU= 30318
+LXNj 30319
+cmVkZW50aWFs 30320
+IFBlcnJ5 30321
+a2ll 30322
+IC0tLS0tLS0tLS0K 30323
+LnN0cm9rZQ== 30324
+KEludGVudA== 30325
+IHVuaXR5 30326
+dW1sYWg= 30327
+RnVydGhlcg== 30328
+IHByemU= 30329
+IHPDuA== 30330
+44KK 30331
+IFBST0NVUkVNRU5U 30332
+IEhvdXNpbmc= 30333
+IGF0dG9ybmV5cw== 30334
+IGNvbXBvc2U= 30335
+YXR0ZXJpbmc= 30336
+IldoYXQ= 30337
+ZHJhdWw= 30338
+IHN0cmFpZ2h0Zm9yd2FyZA== 30339
+SW5zdGFudA== 30340
+LkpUZXh0RmllbGQ= 30341
+IHRyYWRlcw== 30342
+0LvQsA== 30343
+IHsh 30344
+IGxhdGVseQ== 30345
+SU1H 30346
+IEFsZA== 30347
+IElOTkVS 30348
+IGNhcnRvb24= 30349
+LlNvdXJjZQ== 30350
+RkFMU0U= 30351
+IGRvdWdo 30352
+ZmVu 30353
+KHJlY3Q= 30354
+RGF0YVRhYmxl 30355
+Tmljaw== 30356
+IEJ1dHRlcg== 30357
+cmVhZHM= 30358
+X2NvbW1lbnRz 30359
+RU5W 30360
+IENvbm5lY3RpY3V0 30361
+LUZJUlNU 30362
+CQkJICAgICA= 30363
+YWNoaQ== 30364
+Lk1zZw== 30365
+cmVjdGlvbg== 30366
+IHJlbGF4ZWQ= 30367
+IHNoYWZ0 30368
+IGVm 30369
+IEFkZGluZw== 30370
+IGJyZWFjaA== 30371
+IO+8mg== 30372
+cmFtYQ== 30373
+IGNvbmR1Y3Rpbmc= 30374
+ICg7 30375
+KGds 30376
+IENBVVNFRA== 30377
+YXNoaQ== 30378
+IEZMQUc= 30379
+IENvbW1lcmNl 30380
+IElOVEVHRVI= 30381
+aG91cnM= 30382
+IFNjaG9vbHM= 30383
+IG51Y2xl 30384
+QWdhaW4= 30385
+cHJvag== 30386
+IHNldmVudGg= 30387
+RU1QTEFSWQ== 30388
+KG1vY2s= 30389
+J10sDQo= 30390
+X1NQRUVE 30391
+PmZhbHNl 30392
+IHNwYQ== 30393
+IE5lYXI= 30394
+7JU= 30395
+IGludHJpZw== 30396
+X21lbWJlcnM= 30397
+d2F2ZQ== 30398
+IGFuYWx5c3Rz 30399
+X09T 30400
+ZWRpbg== 30401
+IEZyaQ== 30402
+IHJldHJpZXZlZA== 30403
+UmVndWxhcg== 30404
+X29icw== 30405
+RVhQT1JU 30406
+Jyl9fSI= 30407
+ImNsYXNz 30408
+X18oKA== 30409
+YnVja2V0 30410
+IHN0cm8= 30411
+IFBhdGNo 30412
+eXN0aWNr 30413
+ZnVsbmVzcw== 30414
+YXBvcw== 30415
+RGE= 30416
+CQkJCQkgICA= 30417
+IGVucmljaA== 30418
+dW5vcmRlcmVk 30419
+aG9sZQ== 30420
+Q29uZw== 30421
+PFByb2R1Y3Q= 30422
+IEN1cnQ= 30423
+KHRoZQ== 30424
+X2xvd2Vy 30425
+IGF2b2lkaW5n 30426
+IGJ1eno= 30427
+IHZpYWJsZQ== 30428
+dWJh 30429
+LWlz 30430
+YXJlbA== 30431
+IGFjdGVk 30432
+LWRldGFpbHM= 30433
+4LiH 30434
+IFRoZW9yeQ== 30435
+IFB1bg== 30436
+IEFub255bW91cw== 30437
+Li4uIgo= 30438
+w6hyZXM= 30439
+5Y+v 30440
+IFZpc2lvbg== 30441
+X3NlbQ== 30442
+YXNoYQ== 30443
+IGNlbGVicml0eQ== 30444
+IGVuZERhdGU= 30445
+IHBvcHVsYXRl 30446
+IGN1aXM= 30447
+cXVhbnQ= 30448
+Zmxvb3I= 30449
+IGdsb2JhbGx5 30450
+IGNydWlzZQ== 30451
+IFN0YW5sZXk= 30452
+IGJpa2Vz 30453
+LmdldENvbm5lY3Rpb24= 30454
+IHBvb3JseQ== 30455
+X290aGVy 30456
+YW1waW5n 30457
+LiIpOwoK 30458
+b2Rp 30459
+X0FETUlO 30460
+LmNvbG9ycw== 30461
+IEdhbWluZw== 30462
+Pic7Cgo= 30463
+U1RSVUNU 30464
+UVI= 30465
+SURz 30466
+KGFyZ3VtZW50cw== 30467
+X2F1eA== 30468
+KEV2ZW50 30469
+X1BSSVZBVEU= 30470
+IFRyZWs= 30471
+IGRvd25sb2Fkcw== 30472
+bXV0YWJsZQ== 30473
+X1NUUlVDVA== 30474
+KHd4 30475
+IGRvbWFpbnM= 30476
+anNweA== 30477
+IFZpYWdyYQ== 30478
+Q29tbWFuZHM= 30479
+SnM= 30480
+LmNmZw== 30481
+Q29udGVudFBhbmU= 30482
+IEVkaXRUZXh0 30483
+4KWN4KQ= 30484
+QXR0YWNo 30485
+IEFSTQ== 30486
+cG9zaXRpdmU= 30487
+IEdlbmVyYXRlZA== 30488
+IHNlaXplZA== 30489
+PTo= 30490
+IGVsZWN0cm9uaWNz 30491
+IEFwcENvbXBvbmVudA== 30492
+LycsCg== 30493
+LmVxdWFsc0lnbm9yZUNhc2U= 30494
+RG9jdHJpbmU= 30495
+ZGlzaw== 30496
+IFBvbGl0aWNhbA== 30497
+Q0hP 30498
+PEY= 30499
+CWhlaWdodA== 30500
+IEJ1Zw== 30501
+Lmxl 30502
+aWto 30503
+IG1pbGxpc2Vjb25kcw== 30504
+IGNvbnN0aXR1 30505
+bWFn 30506
+Lm5s 30507
+LXJhbmdl 30508
+YW5nZ2Fs 30509
+Jyxb 30510
+cm9wb2xpdGFu 30511
+IMOc 30512
+IFVD 30513
+LmRlc2M= 30514
+LUxBU1Q= 30515
+ZnN0cmVhbQ== 30516
+aWJpbA== 30517
+IGZpZXI= 30518
+VkVSWQ== 30519
+IOuz 30520
+SVJU 30521
+X1VJ 30522
+KGFicw== 30523
+IGtuZWVz 30524
+IHJvb2tpZQ== 30525
+IFZhYw== 30526
+YXJlbmE= 30527
+Y29tbWVuZA== 30528
+LVw= 30529
+IFNVQlNUSVRVVEU= 30530
+U29mdA== 30531
+IHBhcnRpcg== 30532
+d2VhbHRo 30533
+6KaB 30534
+KGRhdGFzZXQ= 30535
+IENsaW1hdGU= 30536
+LXNob3c= 30537
+IHJlbGlhYmlsaXR5 30538
+X2NodW5r 30539
+5Luj 30540
+X3N0b2Nr 30541
+IEVYRU1QTEFSWQ== 30542
+77iP 30543
+IHbDrQ== 30544
+IHNtaWxlZA== 30545
+IGRyaWxs 30546
+LkZ1bmN0aW9u 30547
+IFNJ 30548
+IHJlZ3Jlc3Npb24= 30549
+LVg= 30550
+IEphcg== 30551
+cHJlZg== 30552
+CXN1Y2Nlc3M= 30553
+IEhpdGxlcg== 30554
+IGluc3RpbmN0 30555
+IGZlbW1lcw== 30556
+IGxvdmVy 30557
+PAo= 30558
+IG11bHRpcGxpZXI= 30559
+cmls 30560
+UmVzaXpl 30561
+IEF1dGhvcml6YXRpb24= 30562
+IEthbg== 30563
+RGlzcGF0Y2hUb1Byb3Bz 30564
+IGNyb3Bz 30565
+dG9rZW5z 30566
+ZWNu 30567
+ZW50aWFsbHk= 30568
+IElOVEVSUlVQVElPTg== 30569
+ZmFrZQ== 30570
+VW5kZWZpbmVk 30571
+IEFL 30572
+IFRlc3RDYXNl 30573
+IHJhYg== 30574
+IHRvcnJlbnQ= 30575
+IE90 30576
+QmFycw== 30577
+IGxlY3R1cmU= 30578
+IGVuam8= 30579
+IHJlc3BvbmRz 30580
+IGluZGV4ZWQ= 30581
+T2ZXb3Jr 30582
+X2NoYWlu 30583
+KSktPg== 30584
+IEJlYXV0eQ== 30585
+IGA8 30586
+IHRvdWNoaW5n 30587
+IHwtLQ== 30588
+CWZsYWc= 30589
+bm9ybWFsaXpl 30590
+IHRyYXBwZWQ= 30591
+IGVzdGFibGlzaGluZw== 30592
+L2J1aWxk 30593
+QUo= 30594
+Znk= 30595
+LXJlYWN0 30596
+YXZu 30597
+UklQVElPTg== 30598
+IGt1dA== 30599
+IEZhc2hpb24= 30600
+IEluZm9ybQ== 30601
+Y3VyaXRpZXM= 30602
+PGJ5dGU= 30603
+IFVrcmFpbg== 30604
+IHN1Zw== 30605
+IGNvbnNpc3Rpbmc= 30606
+b29kbGU= 30607
+LmN0eA== 30608
+LlRvTGlzdA== 30609
+IGNvbW1lbnRhcnk= 30610
+IHRyYW5zZmVycw== 30611
+IG5vc3Q= 30612
+aWhhZA== 30613
+IFVwcGVy 30614
+IGNvbmZ1c2luZw== 30615
+bWlzc2luZw== 30616
+LWNs 30617
+IGJvdW5kaW5n 30618
+IGNvbmdyZXNzaW9uYWw= 30619
+IHJldmVhbGluZw== 30620
+ZGg= 30621
+cnVw 30622
+IHRyZXM= 30623
+cmVwZWF0 30624
+LAoKCgo= 30625
+X3RhYw== 30626
+IGV4cGVk 30627
+R2lybA== 30628
+aG9yaXpvbnRhbA== 30629
+ICIuLi8uLi8uLi8= 30630
+KG9wdGlvbg== 30631
+IHdlaXRlcg== 30632
+CXNxbA== 30633
+ID0+ewo= 30634
+IGdhcmxpYw== 30635
+IHJlcHI= 30636
+IHJlcGxpZXM= 30637
+KHByb3A= 30638
+IHNwaXJpdHM= 30639
+IGluc3BpcmU= 30640
+IGJhc2VtZW50 30641
+LnJlamVjdA== 30642
+IGhpbnRz 30643
+IHBvbGxpbmc= 30644
+CSAK 30645
+X3JhdGluZw== 30646
+IGNhdGg= 30647
+YXZpZXI= 30648
+IGNvbXByZXNzZWQ= 30649
+IFZT 30650
+XSc= 30651
+IGp1ZGljaWFs 30652
+IFRyZW5k 30653
+dHJhaW5pbmc= 30654
+RVNUQU1Q 30655
+b2duaXRpb24= 30656
+xIE= 30657
+U0VOVA== 30658
+dmVudGlvbnM= 30659
+IGNvbnN1bHRhbnQ= 30660
+dW1waA== 30661
+IHVzZXJTZXJ2aWNl 30662
+LE5VTEw= 30663
+a2g= 30664
+RGVhcg== 30665
+X0JBRA== 30666
+aXRhdGlvbnM= 30667
+IG1ldGFwaA== 30668
+J8Op 30669
+YW5kaXNl 30670
+LWZvbnQ= 30671
+LmNoYXJ0 30672
+IHNn 30673
+X0NvbnRyb2xsZXI= 30674
+LmpwZWc= 30675
+IFVMT05H 30676
+CWdhbWU= 30677
+KHNz 30678
+IE1hag== 30679
+CWdv 30680
+IFNhZA== 30681
+IEJlcmc= 30682
+IE1pbmU= 30683
+UGFjaw== 30684
+IHJlc2lzdGFudA== 30685
+IFJPTQ== 30686
+IHBlZw== 30687
+IFN0YW5mb3Jk 30688
+IFlhaG9v 30689
+IHNjYWxlZA== 30690
+IGxhbg== 30691
+PVtd 30692
+Ii8+PC8= 30693
+IHBsb3Rz 30694
+LioK 30695
+IHRyYXZlbGVk 30696
+IE9zY2Fy 30697
+Vkw= 30698
+IGxpbmtpbmc= 30699
+IHRpcmVz 30700
+ICcqJw== 30701
+IEJ1ZmZlcmVk 30702
+ZXJp 30703
+ICoqKio= 30704
+IG92ZXJsb29r 30705
+Lk5vbg== 30706
+IHLDqXM= 30707
+IGVneQ== 30708
+5bCP 30709
+IGF0dGFja2Vy 30710
+CQkJCQkJCQkJCQkJCQkJ 30711
+LnN5bmM= 30712
+QVNDQURF 30713
+R3JvdW5k 30714
+IGRlY2F5 30715
+IFRvbg== 30716
+IGpld2Vscnk= 30717
+IGJ5cGFzcw== 30718
+IG1lbWJy 30719
+Uk5B 30720
+PFN5c3RlbQ== 30721
+IE1lZGljYXJl 30722
+KG5ldA== 30723
+b3Np 30724
+SEI= 30725
+REVD 30726
+e0VJRg== 30727
+X2ZpbGw= 30728
+IHRyYXZlbGxpbmc= 30729
+b2JzZXJ2ZXI= 30730
+IGNvbnN1bHRpbmc= 30731
+UkVBVA== 30732
+UGhhc2U= 30733
+KGlp 30734
+IFNVTQ== 30735
+Pg0NCg== 30736
+IHN1ZA== 30737
+CWJhY2tncm91bmQ= 30738
+IHNjaG9sYXJz 30739
+LW11dGVk 30740
+YXLDoQ== 30741
+ID09PT09 30742
+IF9fX18= 30743
+Q3JlYXQ= 30744
+ZW5ldmVy 30745
+L3dw 30746
+IFZQTg== 30747
+RXJyb3JDb2Rl 30748
+KV0sCg== 30749
+KGJ1aWxkZXI= 30750
+IEVuZW15 30751
+U2Vuc29y 30752
+dXNh 30753
+IHRyaWdnZXJz 30754
+IHBsYXlvZmZz 30755
+X1JFUQ== 30756
+ICh+ 30757
+IEJhcnJ5 30758
+IHBlcm1hbmVudGx5 30759
+IFJVTg== 30760
+IGJ1cmU= 30761
+LkZhdGFsZg== 30762
+IGNoaWNr 30763
+CXBhbmlj 30764
+cHNp 30765
+b2th 30766
+6YCJ 30767
+Pls= 30768
+IHVuZGVyc3RhbmRz 30769
+IEp1bmlvcg== 30770
+IElORk8= 30771
+PW15c3FsaQ== 30772
+dXN0YWlu 30773
+LXNvdXJjZQ== 30774
+c2Vydg== 30775
+IENSRUFURQ== 30776
+LmF1 30777
+IHNlbGxz 30778
+ICAKICAK 30779
+RXVyb3Bl 30780
+enc= 30781
+cHJlaA== 30782
+IE5TQQ== 30783
+IHh5 30784
+4Li0 30785
+IEJleW9uZA== 30786
+SW5zdGVhZA== 30787
+Tm9uUXVlcnk= 30788
+IGFyaXNl 30789
+IGF2b2lkZWQ= 30790
+LmVtcGxhY2U= 30791
+X21vZGVscw== 30792
+fSksCg== 30793
+IGhpZA== 30794
+ICZf 30795
+LnBvaW50cw== 30796
+LmdldFdpZHRo 30797
+LkV4ZWM= 30798
+IC8vLy8= 30799
+IFNlc3Npb25z 30800
+Li4uXA== 30801
+IENvbG9tYg== 30802
+IGFjY2VsZXJhdGlvbg== 30803
+cmVzdG9yZQ== 30804
+IGlsZQ== 30805
+b2JpYw== 30806
+PE5vZGU= 30807
+IERY 30808
+IEJlc2lkZXM= 30809
+LmFnZQ== 30810
+IENvbnRhaW5z 30811
+TmF0aW9uYWw= 30812
+IEltcGxlbWVudGF0aW9u 30813
+IGVmZmlj 30814
+IFJN 30815
+SHk= 30816
+IFdlZGRpbmc= 30817
+b2tpZXM= 30818
+IHJlY3Vyc2l2ZQ== 30819
+IHByb3NlY3V0b3Jz 30820
+LlNlbGVjdGlvbg== 30821
+IEZvcm11bGE= 30822
+QmVlbkNhbGxlZA== 30823
+W2lp 30824
+IEZyYW4= 30825
+IHRyYWdlZHk= 30826
+X0ZFQVRVUkU= 30827
+mag= 30828
+Y29tcGFzcw== 30829
+IEJo 30830
+PwoKCg== 30831
+LndyaXRlcg== 30832
+IEhvdXI= 30833
+RGJDb250ZXh0 30834
+aW92 30835
+YW1vbg== 30836
+cmVwcg== 30837
+6YM= 30838
+CWZp 30839
+J11d 30840
+IERyeQ== 30841
+LnJv 30842
+IE9ic2Vydg== 30843
+5qCH 30844
+Rm9ybWVy 30845
+IEJhbGFuY2U= 30846
+CWpzb24= 30847
+IHByenk= 30848
+SVNT 30849
+KHNvY2s= 30850
+IExJTkU= 30851
+IGRlY2U= 30852
+IGFsbHk= 30853
+IHRlbmRlbmN5 30854
+RnVu 30855
+IHNjaGVtZXM= 30856
+IGludGVydmVu 30857
+5piO 30858
+IGFkdmVyc2U= 30859
+cXVvdGVsZXY= 30860
+IHNhY3JpZmlj 30861
+X3NpZGU= 30862
+IG11dGV4 30863
+QUdJQw== 30864
+IG9jY3VycmluZw== 30865
+IENvbW11bmljYXRpb24= 30866
+dW1hcg== 30867
+57yW 30868
+IFRyZWF0bWVudA== 30869
+LnBlcnNvbg== 30870
+IExD 30871
+IGVjaA== 30872
+KCgi 30873
+IERpc2Vhc2U= 30874
+w6Rk 30875
+IEFa 30876
+LkFjY291bnQ= 30877
+IGNvbnRpbnVvdXNseQ== 30878
+RU5ESU5H 30879
+IFJFVFVSTg== 30880
+LXN0cmluZw== 30881
+LmZpbGVuYW1l 30882
+c3ludGhlc2l6ZQ== 30883
+UmVzcG9uZGVy 30884
+KG9wdHM= 30885
+cmVncw== 30886
+IG51ZXN0 30887
+UGVlcg== 30888
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0= 30889
+IGdhdWdl 30890
+IEtpbg== 30891
+LnNjaGVtYQ== 30892
+IGFycmFuZ2U= 30893
+IEJsYWtl 30894
+X1R5cGVJbmZv 30895
+Q292ZXI= 30896
+IEhhbXBzaGlyZQ== 30897
+UGFwZXI= 30898
+LWlubmVy 30899
+dXRpbGl0eQ== 30900
+IGNyb3Nzb3JpZ2lu 30901
+Rk9S 30902
+IGlnbm9yaW5n 30903
+IERE 30904
+YXZhbg== 30905
+IHRyYWRpdGlvbnM= 30906
+IGdldFN0cmluZw== 30907
+IGV0aGljcw== 30908
+IE1hdGVyaWFscw== 30909
+REVTQw== 30910
+IGVuenlt 30911
+aW9sZXQ= 30912
+IENoaXA= 30913
+IE1jRG9uYWxk 30914
+IG5lcnZl 30915
+54Q= 30916
+Iild 30917
+5rGC 30918
+IFN1Z2Fy 30919
+X1NJTQ== 30920
+anBlZw== 30921
+IGRpc2NyZXRpb24= 30922
+IFRO 30923
+Ym92ZQ== 30924
+IE1pbmltdW0= 30925
+IEZvcm1Hcm91cA== 30926
+IHdvcmtmb3JjZQ== 30927
+IEV4ZWN1dGlvbg== 30928
+ZXJyZXI= 30929
+CSAgICAJ 30930
+IHByZXNjcmliZWQ= 30931
+LlRleHRBbGlnbg== 30932
+T1BFTg== 30933
+IFBC 30934
+aW1pdHk= 30935
+IEV4dGVybmFs 30936
+wrBD 30937
+IEFwcGxpY2F0aW9uQ29udHJvbGxlcg== 30938
+IGJhcnI= 30939
+aW1wbGljaXQ= 30940
+X2RvdA== 30941
+IENvbG9u 30942
+Q09MT1I= 30943
+LlByb2plY3Q= 30944
+Kjwv 30945
+LXhs 30946
+IG9zYw== 30947
+KHBhdHRlcm4= 30948
+Jyl9Cg== 30949
+c3VjY2Vzc2Z1bA== 30950
+YWxvZw== 30951
+U3R1ZGVudHM= 30952
+XXN0cmluZw== 30953
+YW50b24= 30954
+YXR0aQ== 30955
+Y2hlbWljYWw= 30956
+LmluZg== 30957
+KGRy 30958
+OlVJQ29udHJvbFN0YXRl 30959
+dG9JbnQ= 30960
+XTwv 30961
+0LDQtdC8 30962
+IMW+ 30963
+LkFjdGlvbkxpc3RlbmVy 30964
+LlNFVkVSRQ== 30965
+IFNhbHY= 30966
+X1RSQU4= 30967
+L2ludGVybmFs 30968
+IHdlbGNvbWVk 30969
+LmNvbW1lbnQ= 30970
+bXV0YXRpb24= 30971
+IEZBUQ== 30972
+Lm9uZQ== 30973
+IExBQg== 30974
+In19 30975
+IFJvbA== 30976
+aWV2ZWQ= 30977
+IGFkdmVudHVyZXM= 30978
+IGZ1bmVyYWw= 30979
+IHNwb3VzZQ== 30980
+KG9wZW4= 30981
+IFJlYWR5 30982
+IHRvdXJpc20= 30983
+YWRpbg== 30984
+X2ZhY2U= 30985
+4oKB 30986
+IG1pZ3JhbnRz 30987
+IFB1cmNoYXNl 30988
+Y29yZA== 30989
+IE9VVFBVVA== 30990
+KSkNCg0K 30991
+U2VndWU= 30992
+dGFicw== 30993
+IGRvdHM= 30994
+IG5haWw= 30995
+Ym9ybmU= 30996
+IGRlc2lyZXM= 30997
+IHByZXZlbnRlZA== 30998
+J109PQ== 30999
+IHRpbWVseQ== 31000
+SUNB 31001
+U2Nhbm5lcg== 31002
+IEx1Y2Fz 31003
+IGdpdGh1Yg== 31004
+J11bXQ== 31005
+ZGlh 31006
+Y29ub21pYw== 31007
+IGRpZXNlcg== 31008
+dW5kZXJz 31009
+LkhhbmRsZXI= 31010
+PyIs 31011
+LmRhdGFi 31012
+IGFkdmlzZQ== 31013
+LmFuaW1hdGlvbg== 31014
+IG92ZXJoZWFk 31015
+IG9ic3RhY2xlcw== 31016
+X2pvaW4= 31017
+IG3DqQ== 31018
+RmxhdA== 31019
+LmRpc3Bvc2U= 31020
+IEV4cGVjdGVk 31021
+IGZsZXc= 31022
+IGVtYm9k 31023
+X3NsdWc= 31024
+IG5hbWVseQ== 31025
+IHdpdG5lc3NlZA== 31026
+c29saWQ= 31027
+LmxlZ2VuZA== 31028
+UXVhbA== 31029
+X3N1cmZhY2U= 31030
+44Op 31031
+QW1lcmljYQ== 31032
+IGFmZmlsaWF0ZXM= 31033
+IFByb3M= 31034
+X2V4dGVuc2lvbg== 31035
+YmluZGluZw== 31036
+U1RBTEw= 31037
+LnJlYWR5 31038
+IGNvcHlpbmc= 31039
+IEhlbmNl 31040
+IGRpc2NvcmQ= 31041
+X3NoaXA= 31042
+UHJvcGVydHlOYW1l 31043
+CQkgICAgICAgICAgIA== 31044
+IGFjaGlldmluZw== 31045
+IEJlYw== 31046
+Wmlw 31047
+U29tZXRpbWVz 31048
+44GL 31049
+IGNvbnRyYQ== 31050
+IHB1bmlzaA== 31051
+IGluc3VsaW4= 31052
+IGRpc2FwcGVhcg== 31053
+X2VudW0= 31054
+LmF1dA== 31055
+IGhhc2F0dHI= 31056
+YWZmZWN0ZWQ= 31057
+c2hl 31058
+JHRhYmxl 31059
+a3Np 31060
+IGxhY2tpbmc= 31061
+IGRpc2NvdW50cw== 31062
+U3RtdA== 31063
+IEFyZ2VudGluYQ== 31064
+IHVucGFjaw== 31065
+IFJvdXRlZEV2ZW50QXJncw== 31066
+ICc/ 31067
+aW50ZXJvcA== 31068
+IHNvZmE= 31069
+IGR5bg== 31070
+IEdyYWNl 31071
+IGludGVncmF0ZQ== 31072
+2YM= 31073
+IGRlbGF5cw== 31074
+IEltcGxlbWVudA== 31075
+UHJvb2Y= 31076
+IGFwcGxpY2FudHM= 31077
+IExlYXRoZXI= 31078
+7Ja0 31079
+IGVuam95YWJsZQ== 31080
+U3Bpbm5lcg== 31081
+L3o= 31082
+IGZvYW0= 31083
+IExhYm9yYXRvcnk= 31084
+IHJlc2VhcmNoZXI= 31085
+IENocmlzdGlhbml0eQ== 31086
+IGN1c3RvbWl6ZQ== 31087
+IGNpcGhlcg== 31088
+IGRvZA== 31089
+IHPDsw== 31090
+QEVudGl0eQ== 31091
+T05MWQ== 31092
+aW52ZW50b3J5 31093
+IGNvbmNsdWRl 31094
+IGN1ZW50YQ== 31095
+IENvaGVu 31096
+LWluY29tZQ== 31097
+bWJI 31098
+bWVudGF0aW9u 31099
+IHZlcnc= 31100
+dWRw 31101
+QU1M 31102
+LmNvbWJvQm94 31103
+Zmg= 31104
+am9icw== 31105
+RmlsZVN5bmM= 31106
+IEJhcmJhcmE= 31107
+IFNjYW4= 31108
+Y3JlZW5zaG90 31109
+IE9ydGg= 31110
+LnZpZXdEaWRMb2Fk 31111
+IEFSUkFZ 31112
+LEA= 31113
+L2ludA== 31114
+R2VuZXJhdGU= 31115
+IGRlbW9uc3RyYXRlcw== 31116
+IFplbmQ= 31117
+5YiX 31118
+CXZvbGF0aWxl 31119
+PXI= 31120
+IGZt 31121
+CWJ1ZmZlcg== 31122
+ZW5hdGU= 31123
+LkNvbWJpbmU= 31124
+IG1pc2M= 31125
+Y2hlbWFz 31126
+IHB1cmVseQ== 31127
+IGdsVmVydGV4 31128
+LlJlc3Q= 31129
+IHJlY2FsbGVk 31130
+IGZyZWVs 31131
+IHNxdWU= 31132
+VHJhY2tlcg== 31133
+IFBocA== 31134
+IERpc3RhbmNl 31135
+IGJlYXN0 31136
+Q29tcGxleA== 31137
+IGNvbnNpZGVycw== 31138
+572R 31139
+dHJpYnV0aW9u 31140
+IGNvbXBsaW1lbnQ= 31141
+X2xpbmVubw== 31142
+IE11dGFibGU= 31143
+IHVuZGVm 31144
+IEdlbQ== 31145
+IGNvbXBvdW5kcw== 31146
+LnV1aWQ= 31147
+IGFub255bQ== 31148
+IHN0YWlycw== 31149
+IERiU2V0 31150
+d29ydA== 31151
+IFNlbnM= 31152
+LkJlZm9yZQ== 31153
+IGVuZGZvcmVhY2g= 31154
+IFRvZ2V0aGVy 31155
+YXRpbGl0eQ== 31156
+IG1vaXN0dXJl 31157
+LSR7 31158
+KFRlc3Q= 31159
+VEI= 31160
+bXVzaWM= 31161
+IGluc2lzdA== 31162
+IGhlYWRsaW5l 31163
+LkFuZA== 31164
+UEFUQ0g= 31165
+IFByZXBhcmU= 31166
+IHN3aXRjaGVz 31167
+KnA= 31168
+IFll 31169
+X2Ficw== 31170
+LmhhbmRsZXI= 31171
+IGFzc2lnbm1lbnRz 31172
+UHJlZmVyZW5jZQ== 31173
+RU5USVRZ 31174
+IHBpcGVz 31175
+IEFsZXJ0RGlhbG9n 31176
+b2dyYXBoaWNhbA== 31177
+IHBhdGlv 31178
+IHdlYnBhY2s= 31179
+YnBz 31180
+TmF2TGluaw== 31181
+Lk51bWJlcg== 31182
+IEFybW9y 31183
+IFBldGVycw== 31184
+IERlc2M= 31185
+ZHVpbm8= 31186
+IEljb25z 31187
+LmdldEhlaWdodA== 31188
+IHRleHRWaWV3 31189
+CU5VTEw= 31190
+YWxsb2NhdGU= 31191
+fSR7 31192
+IFByaXpl 31193
+LW51bQ== 31194
+Lk1vdmU= 31195
+6L6T5YWl 31196
+LmNhbWVyYQ== 31197
+UHJvYmxlbQ== 31198
+CXR5cGVkZWY= 31199
+KHN0b3Jl 31200
+IERJU0NMQUlNRUQ= 31201
+IHN1YnN0YW50aWFsbHk= 31202
+RkZG 31203
+IGVwc2lsb24= 31204
+IGluZXF1YWxpdHk= 31205
+X2NoaWxkcmVu 31206
+5LiH 31207
+cmVsdQ== 31208
+UGllY2U= 31209
+YW50cnk= 31210
+YmFiZWw= 31211
+dmV0aWNh 31212
+IHN1cnZleXM= 31213
+IGRldGVjdG9y 31214
+CWFyZ3M= 31215
+LlNlbGVjdGVkVmFsdWU= 31216
+IGludGVyZmVyZW5jZQ== 31217
+Li4uKQo= 31218
+LlNUUklORw== 31219
+IFR5bGVy 31220
+IENhdGFsb2c= 31221
+VmVydGljZXM= 31222
+IFByb2plY3Rz 31223
+IExlYmFu 31224
+LiIpCgo= 31225
+Lmtlcm5lbA== 31226
+IHJpZGVz 31227
+IE11dA== 31228
+YW50aA== 31229
+0L7RgNC8 31230
+ZW5uaWFs 31231
+LnRhc2tz 31232
+LnNldFByb3BlcnR5 31233
+YXRlZ29yaQ== 31234
+5pyA 31235
+L2Nvbg== 31236
+YnJhY2U= 31237
+IE5TRXJyb3I= 31238
+J10pKTsK 31239
+bGlzdGVk 31240
+IFByZXZpZXc= 31241
+QWN0aXZhdGU= 31242
+IGN5Y2w= 31243
+LWFjdGl2ZQ== 31244
+aGFk 31245
+VG9v 31246
+IHJlZ2lzdA== 31247
+bGljYWw= 31248
+IHBvZXRyeQ== 31249
+SW1wb3J0cw== 31250
+77yB77yB 31251
+Ojw= 31252
+IGNoYXJt 31253
+IENvdW4= 31254
+b2xsaWRlcg== 31255
+IGh3 31256
+fWAK 31257
+PWFyZ3M= 31258
+IE5ldXJv 31259
+aXRpY2Fs 31260
+aWVuZW4= 31261
+IERvdA== 31262
+X09OTFk= 31263
+RE4= 31264
+IFBsYXlTdGF0aW9u 31265
+IHN0ZWVw 31266
+IHByYWN0aWNhbGx5 31267
+IGFwcGxpY2FudA== 31268
+IGFyb20= 31269
+YW5pYw== 31270
+CWRpc3BsYXk= 31271
+IHRlcm1pbmF0ZWQ= 31272
+IGNsYXJpdHk= 31273
+IE1lbnVJdGVt 31274
+IEt1cg== 31275
+aWpl 31276
+X3dlZWs= 31277
+KGRpY3Q= 31278
+X3JlY29yZHM= 31279
+IENvc3Rh 31280
+IGtldA== 31281
+RXh0ZW5zaW9ucw== 31282
+IG5ldWtlbg== 31283
+aW5zaQ== 31284
+X2luYw== 31285
+IOaW 31286
+IGVpbmY= 31287
+IFJpc2s= 31288
+IGVsZXZhdGVk 31289
+cGVycw== 31290
+VURB 31291
+IEtO 31292
+IGxpbmVk 31293
+IE1vcm0= 31294
+KTsKCgoK 31295
+Pn0K 31296
+cGxhaW50 31297
+Z2V0VGV4dA== 31298
+IGluZGl2aWR1YWxseQ== 31299
+IGNoZWNrYm94 31300
+VVk= 31301
+IExhbWI= 31302
+IGR5c2Z1bmN0aW9u 31303
+IExhcg== 31304
+4LA= 31305
+IENyZWF0aW5n 31306
+Jyk7CgoK 31307
+IlRoZXk= 31308
+bG9jYXRpb25z 31309
+X0NPUkU= 31310
+SW50ZXJhY3Rpb24= 31311
+dW1ibmFpbHM= 31312
+IFBhcnRuZXI= 31313
+YnJpdA== 31314
+IGxlc3Nlcg== 31315
+IFNsb3Q= 31316
+c2V0QXR0cmlidXRl 31317
+IFdhdmU= 31318
+LnBv 31319
+L3N0b3Jl 31320
+IGJyb3dzaW5n 31321
+X3Bk 31322
+c3VtZQ== 31323
+c2Vk 31324
+Q3VydmU= 31325
+IHBsYXNtYQ== 31326
+IHN1c3BpY2lvdXM= 31327
+7J24 31328
+IEJhaA== 31329
+IEV4cGxpY2l0 31330
+X0ND 31331
+LkNsaWVudFNpemU= 31332
+XFZpZXc= 31333
+IHN1YnN0aXQ= 31334
+bG9vbg== 31335
+IEdBTUU= 31336
+IEJyaWQ= 31337
+m+W7ug== 31338
+X1VzZXI= 31339
+IHNxdWFyZXM= 31340
+Zm9uZQ== 31341
+IHNhY3JlZA== 31342
+dWdocw== 31343
+XWludGVyZmFjZQ== 31344
+IFRocm93 31345
+IEtpcms= 31346
+IGVtcGlyZQ== 31347
+IGFzc2Vzc2Vk 31348
+VGF4 31349
+IEhlYXZlbg== 31350
+LWJ1ZmZlcg== 31351
+X1NUQVRJQw== 31352
+w6luw6k= 31353
+LWJvcmRlcmVk 31354
+IHB1bmN0 31355
+KG1vZGU= 31356
+IGtlaW5l 31357
+U2VudA== 31358
+IENhbGN1bA== 31359
+IEV2ZQ== 31360
+IHN0eWxpc2g= 31361
+IG9pbHM= 31362
+LlRlc3RDYXNl 31363
+IHRyYWRlbWFyaw== 31364
+IGxpdGVyYXJ5 31365
+IGNvbmNlbnRyYXRpb25z 31366
+IFJlbGF0aW9ucw== 31367
+KENsYXNz 31368
+IHN0ZGlu 31369
+IHbDpg== 31370
+YmFja3Vw 31371
+LlZFUlNJT04= 31372
+LkF1dG9TY2FsZURpbWVuc2lvbnM= 31373
+c3RhcnRlcg== 31374
+VHJhbnNhY3Rpb25hbA== 31375
+LXBhbmVs 31376
+U3R1ZGlv 31377
+a2M= 31378
+IENoYW1iZXI= 31379
+IFNwaWVs 31380
+IHJobw== 31381
+2KfZhA== 31382
+ISc= 31383
+LkF0dHJpYnV0ZXM= 31384
+IG11cmRlcmVk 31385
+YXBldXRpYw== 31386
+IGludGltYXRl 31387
+IHRleHRGaWVsZA== 31388
+IEJ1ZmZhbG8= 31389
+ZHVtbXk= 31390
+IiU= 31391
+IExpYmVydHk= 31392
+b2Jhcg== 31393
+IFRhbms= 31394
+IFBvcHVsYXI= 31395
+ZXJ2aXNvcg== 31396
+IEluaXRp 31397
+IE1hbGw= 31398
+IFByaW9y 31399
+Q0FQ 31400
+IENsYXk= 31401
+IENlcnRpZmljYXRl 31402
+LkxvY2s= 31403
+LXN0cmlw 31404
+LWRyaXZlbg== 31405
+L2FsbA== 31406
+IE1lc3NhZ2VCb3hCdXR0b25z 31407
+X1NFQ1JFVA== 31408
+X3Bi 31409
+IHJhdHM= 31410
+4KS+4KQ= 31411
+IG50 31412
+LlJvdXRlcg== 31413
+X3RvcGlj 31414
+IHRlbm5pcw== 31415
+IFBVQkxJQw== 31416
+IEFjdGl2YXRlZFJvdXRl 31417
+ICcsCg== 31418
+IGNvc3R1bWU= 31419
+IGpva2Vz 31420
+LkhhbmRsZQ== 31421
+CWJ5dGU= 31422
+IGZsYXZvcnM= 31423
+KGNj 31424
+IHBlcnNvbmFz 31425
+CWltYWdl 31426
+IE5hemk= 31427
+IGdyYW1tYXI= 31428
+IMO6bHQ= 31429
+IHZhbHZl 31430
+IHZpYw== 31431
+IFJhY2hlbA== 31432
+X2ludmFsaWQ= 31433
+UHJlZnM= 31434
+c3RkaW50 31435
+KHJvdXRl 31436
+IGh0bWxzcGVjaWFsY2hhcnM= 31437
+IHBlb3BsZXM= 31438
+cGxpbmU= 31439
+IG52 31440
+IFF1YW50 31441
+b3BwZXJz 31442
+IGN1cnJlbnRVc2Vy 31443
+IENhdGFs 31444
+IHJlY29uYw== 31445
+IGNvbmp1bmN0aW9u 31446
+bHg= 31447
+YW1idXJn 31448
+IGluZmx1ZW50aWFs 31449
+ZGFuZ2Vy 31450
+aW5kZXJz 31451
+ICVAIiw= 31452
+LmNvbmZpZ3VyYXRpb24= 31453
+b3NvbWU= 31454
+LmlkZW50aXR5 31455
+IHBpY2tlcg== 31456
+bm9zdA== 31457
+IERJWQ== 31458
+QXVndXN0 31459
+YWJsbw== 31460
+TGVhZg== 31461
+IFJlY28= 31462
+Y2tv 31463
+RE9D 31464
+IEhlcm0= 31465
+OmFueQ== 31466
+IEludGVydmlldw== 31467
+IFRleA== 31468
+eGZl 31469
+KHdvcms= 31470
+IGxlYXA= 31471
+SGVhZGluZw== 31472
+IHF1YXJ0ZXJz 31473
+XEJ1bmRsZQ== 31474
+cmVi 31475
+UGVyaGFwcw== 31476
+IEdtYkg= 31477
+QmlydGg= 31478
+CXN1bQ== 31479
+IFdhdHNvbg== 31480
+Lm5pbA== 31481
+56E= 31482
+e30KCg== 31483
+aWNhaWQ= 31484
+R2V0dGVy 31485
+Im5hbWU= 31486
+ICINCg== 31487
+X25vbmU= 31488
+em0= 31489
+YWN1dGU= 31490
+dWVzdG8= 31491
+IHNvdXM= 31492
+IHJlYnVpbGQ= 31493
+IG5ld3NwYXBlcnM= 31494
+IEhheg== 31495
+IGtpdHM= 31496
+aWZv 31497
+Qmx1cg== 31498
+IHN1aXRlZA== 31499
+LUlu 31500
+4K8= 31501
+IEtlaXRo 31502
+IE5vcndheQ== 31503
+SU5JVA== 31504
+aXJlY2Npb24= 31505
+aWV0aWVz 31506
+X3VzYWdl 31507
+IERvdWc= 31508
+cmlzZQ== 31509
+IHRyaWxsaW9u 31510
+aW1pdGVk 31511
+IFJFTA== 31512
+YWxpYw== 31513
+IGNyaXRpY2l6ZWQ= 31514
+dGhlb3JlbQ== 31515
+IGNlYXNl 31516
+IHNpZGV3 31517
+IFRlcnJ5 31518
+IHN1YnNpZGk= 31519
+IGZpcm1seQ== 31520
+IGF3cw== 31521
+IGhvdHQ= 31522
+IGRyZXNzaW5n 31523
+YmFkZ2U= 31524
+IEFwcGxpY2F0aW9ucw== 31525
+6L+U5Zue 31526
+IGxhdWdoZWQ= 31527
+IGhvYmJ5 31528
+IG11c2ljaWFucw== 31529
+ICou 31530
+LnBsYWNlaG9sZGVy 31531
+IGNvdW50ZXJz 31532
+IENhcGl0b2w= 31533
+U0RL 31534
+IGhlbG1ldA== 31535
+YW5kYm94 31536
+cXVpdA== 31537
+IGNyaW1pbmFscw== 31538
+IHRlZW5hZ2Vy 31539
+KHVwZGF0ZQ== 31540
+R2w= 31541
+LnNlbGVjdGlvbg== 31542
+IGRpc2NoYXJnZQ== 31543
+IHByZXNlbnRpbmc= 31544
+dWZhY3R1cmVy 31545
+X1VOS05PV04= 31546
+IHN0cmVzc2Vk 31547
+5Zmo 31548
+UHJvdG8= 31549
+X2NvcnJlY3Q= 31550
+aGF1cw== 31551
+IHJlbm92 31552
+IGZpcmVhcm1z 31553
+IHRlY2huaWNhbGx5 31554
+LWJyb3dzZXI= 31555
+IGNhbmR5 31556
+U3Ryb2tl 31557
+IGV4ZWN1dG9y 31558
+IG9jY3VycmVuY2U= 31559
+IElQdg== 31560
+X0lOVEVSRkFDRQ== 31561
+IFJldHJpZXZl 31562
+LmJhZA== 31563
+RXhjaGFuZ2U= 31564
+TmF2YmFy 31565
+IEtpZA== 31566
+KGdldEFwcGxpY2F0aW9uQ29udGV4dA== 31567
+X1NUT1A= 31568
+IEJvc3M= 31569
+TGlzdGVuZXJz 31570
+IHNob290ZXI= 31571
+IEFsYg== 31572
+w6RjaA== 31573
+IHBpeA== 31574
+LmtleUNvZGU= 31575
+YWxvbmU= 31576
+IGFic3VyZA== 31577
+IEN1bQ== 31578
+IE5ld3RvbnNvZnQ= 31579
+aWt0 31580
+IGxhdWdoaW5n 31581
+IGNhcGl0YWxpc20= 31582
+cmVlTm9kZQ== 31583
+VHg= 31584
+X1FVRVJZ 31585
+LlNsZWVw 31586
+KGxvZ2lu 31587
+V2ViRWxlbWVudA== 31588
+IGNlbGVicmF0aW5n 31589
+IGRlcHJlY2F0ZWQ= 31590
+IG1hYXI= 31591
+IGFydGlzdGlj 31592
+X0FTU09D 31593
+IEJvcmRlclJhZGl1cw== 31594
+CXdw 31595
+IHN1cnZpdm9ycw== 31596
+SW5uZXI= 31597
+LXJlZA== 31598
+IHByb3NlY3V0aW9u 31599
+X3Bw 31600
+KCI8Lw== 31601
+IF49 31602
+IGxhbQ== 31603
+IFRyYWRpbmc= 31604
+ZmxhcmU= 31605
+RGV0ZWN0b3I= 31606
+TUY= 31607
+IEVtZXJnZW5jeQ== 31608
+IEVhZ2xlcw== 31609
+cXVhZA== 31610
+IEluY3Jl 31611
+cGxpYW5jZQ== 31612
+XE1pZ3JhdGlvbg== 31613
+IHVwZ3JhZGVz 31614
+Q1BV 31615
+YWdnaQ== 31616
+ZnByaW50Zg== 31617
+aWdpb24= 31618
+IGJlYXV0aWZ1bGx5 31619
+IGRyaWVk 31620
+X0hJR0g= 31621
+IGdwaW8= 31622
+TVND 31623
+IERlcHV0eQ== 31624
+IERlY2w= 31625
+IHRyZWFzdXJl 31626
+c2dpdmluZw== 31627
+X3NpZGViYXI= 31628
+IGFwYXJ0bWVudHM= 31629
+IFdy 31630
+IGJvYXRz 31631
+IGJvcg== 31632
+Lmxhbmd1YWdl 31633
+IFVp 31634
+bGl0 31635
+ZnJt 31636
+YW5jaWVz 31637
+IG1hc3Nlcw== 31638
+IEFzc2lnbg== 31639
+IFBPTA== 31640
+IG1hcERpc3BhdGNoVG9Qcm9wcw== 31641
+IGJyYWNrZXQ= 31642
+IFBhcA== 31643
+IENp 31644
+IEludG8= 31645
+IHRlYW1tYXRlcw== 31646
+IGZvcmFsbA== 31647
+dWx1aQ== 31648
+IENhcm4= 31649
+X0lOUw== 31650
+YXppb25p 31651
+Y2Vw 31652
+IHRvdXJpc3Rz 31653
+LWJsdWU= 31654
+IExlZA== 31655
+IHBlbmV0 31656
+IEZv 31657
+IGltYWdpbmc= 31658
+cHJh 31659
+IHNsYXZlcw== 31660
+b2xlcmFuY2U= 31661
+IGluY29ycG9yYXRlZA== 31662
+Jiw= 31663
+dWFibHk= 31664
+IEthcA== 31665
+WG1sRWxlbWVudA== 31666
+IE11ZWxsZXI= 31667
+Q2hhbmdlTGlzdGVuZXI= 31668
+IEhvbGlkYXk= 31669
+CSAgICAgICAgIA== 31670
+RmxleA== 31671
+CVVzZXI= 31672
+Il0pKQ== 31673
+X3N1Ym1pdA== 31674
+LmJvbGQ= 31675
+IGxvY2tz 31676
+IEN1YmE= 31677
+dWRzb24= 31678
+SG9vaw== 31679
+IFdhcm5lcg== 31680
+X3N0YXI= 31681
+Ij0+JA== 31682
+IGNvbW1h 31683
+dW5jaGVja2Vk 31684
+Z3JhcGhpY3M= 31685
+cm9ycw== 31686
+R1JPVU5E 31687
+KHB1YmxpYw== 31688
+IGN1c3RvbWl6ZWQ= 31689
+IEFya2Fuc2Fz 31690
+IFJldw== 31691
+IGV4cGlyYXRpb24= 31692
+15U= 31693
+IEN1bA== 31694
+IG5vbnM= 31695
+LkZpbHRlcg== 31696
+IHNlbmF0b3I= 31697
+X2RlZmluaXRpb24= 31698
+YXNoaW5ndG9u 31699
+eW1waA== 31700
+L0o= 31701
+IGZ1c2U= 31702
+cmFtaWQ= 31703
+IFN1cHBsaWVy 31704
+IGF1dG9jb21wbGV0ZQ== 31705
+IH0pLA== 31706
+LiIKCgo= 31707
+X2Z1bmN0aW9ucw== 31708
+CXRv 31709
+LmV2YWw= 31710
+IFRPYmplY3Q= 31711
+UmVmZXJlbmNlcw== 31712
+IGhlYXRlZA== 31713
+SEFM 31714
+ICkpfQo= 31715
+fSQ= 31716
+IEJhcnI= 31717
+X1VOSVQ= 31718
+KyQ= 31719
+IGdldFZhbHVl 31720
+aXBlZA== 31721
+Y2hpZWQ= 31722
+KHZt 31723
+Y3Vl 31724
+X2ludGVnZXI= 31725
+X2NvdXJzZQ== 31726
+dGhpcmQ= 31727
+IHJldmlzZWQ= 31728
+KiovCg== 31729
+X0RJUkVDVA== 31730
+T3V0T2Y= 31731
+KCIo 31732
+IEZlZWw= 31733
+IHJlYXNz 31734
+IHN1YnRpdGxl 31735
+cGVyaQ== 31736
+bmY= 31737
+IGVuam95cw== 31738
+IHRyZWF0cw== 31739
+KXRoaXM= 31740
+LXRhYnM= 31741
+YW5jZXJz 31742
+IGNvbnRpbmVudA== 31743
+IGNhcmRpbw== 31744
+U2Vy 31745
+LnF1ZXN0aW9u 31746
+IHBocmFzZXM= 31747
+VmFsaWRhdG9ycw== 31748
+IHBvcHVs 31749
+IGzDrQ== 31750
+c29uZw== 31751
+X0lOVEVSTkFM 31752
+IGFkdmlzZXI= 31753
+IHB1eno= 31754
+IGFtYml0aW91cw== 31755
+IFRvYg== 31756
+IERQ 31757
+IHByZXNpZGVuY3k= 31758
+IHN1cnJlbmRlcg== 31759
+IHdhdGNoZXM= 31760
+X2JpbmFyeQ== 31761
+IFNvb24= 31762
+IGNhbmFkYQ== 31763
+KCIiKQo= 31764
+XT0n 31765
+IEJyYW5kb24= 31766
+ZXBzaWxvbg== 31767
+cnc= 31768
+LmFkZENoaWxk 31769
+LkNvcHk= 31770
+UHJpbmNpcGFs 31771
+UGhvdG9z 31772
+IG1hcmdpbmFs 31773
+IGJhc2ljcw== 31774
+ZWluZw== 31775
+TXVzdA== 31776
+X1N0cmluZw== 31777
+IG9sZQ== 31778
+TWFnZW50bw== 31779
+LmN1c3RvbWVy 31780
+KHByZXY= 31781
+4Lil 31782
+IGxveWFsdHk= 31783
+Q29n 31784
+IHByb3RvY29scw== 31785
+IENvbXBhbmllcw== 31786
+IHRoZW9yZXRpY2Fs 31787
+IGFjY2Vzc2luZw== 31788
+IFplbg== 31789
+Lm9uZXM= 31790
+YXR0aWNl 31791
+X3dvcmxk 31792
+emVz 31793
+IHRhdHRvbw== 31794
+IG1lbm9z 31795
+IGludGVyc2VjdA== 31796
+Il07Cgo= 31797
+YmVsaWU= 31798
+IGluYWN0aXZl 31799
+LnJlYWRsaW5l 31800
+LWxhYmVsbGVk 31801
+LmRvbmU= 31802
+bGlja3I= 31803
+IFdPUks= 31804
+IGRlcml2YXRpdmU= 31805
+IGRhdGFiYXNlcw== 31806
+4oKC 31807
+IHN4 31808
+LmlzQXJyYXk= 31809
+IHlz 31810
+IHBhZGE= 31811
+IEJ1bGxldA== 31812
+KGAv 31813
+aXNBY3RpdmU= 31814
+IENHU2l6ZQ== 31815
+KGVxdWFsVG8= 31816
+IENvbHVtYnVz 31817
+IG1hcnJ5 31818
+REVW 31819
+X2xpbWl0cw== 31820
+cm9uZXM= 31821
+SUFT 31822
+IHRhdQ== 31823
+bWlubw== 31824
+X1dyaXRl 31825
+IFdpbmU= 31826
+IFtbJw== 31827
+IFB1bGw= 31828
+cml0ZXJz 31829
+cmllbnRz 31830
+IHNoaWZ0aW5n 31831
+dXBw 31832
+X1RJTUVS 31833
+IENvbmRpdGlvbnM= 31834
+4bql 31835
+IE9yZGVycw== 31836
+IFN0cmVuZ3Ro 31837
+5omA 31838
+IHZhbGlkaXR5 31839
+IGZvdA== 31840
+ZXR1cg== 31841
+IGJvbHQ= 31842
+5YaF 31843
+IEFsb25n 31844
+b3NoaQ== 31845
+IGFzc3VtcHRpb25z 31846
+IG1hZ2F6aW5lcw== 31847
+X1NQSQ== 31848
+IHB1bnQ= 31849
+X1BST0RVQ1Q= 31850
+IHJlbGF5 31851
+IEphdmFzY3JpcHQ= 31852
+LnRl 31853
+LWVz 31854
+IHdpZGdldHM= 31855
+KGZz 31856
+PEl0ZW0= 31857
+X2V4dHJh 31858
+IHJlY3J1aXRpbmc= 31859
+RXQ= 31860
+IG5lY2Vzc2l0eQ== 31861
+cHc= 31862
+IG5vdmVscw== 31863
+dXNzZWxz 31864
+Q3JlYXRvcg== 31865
+IE1WUA== 31866
+IE9D 31867
+dGhvb2Q= 31868
+Y2xpZW50cw== 31869
+KSkq 31870
+IGNoYXJhY3Rlcml6ZWQ= 31871
+X1NFTkQ= 31872
+dXRp 31873
+VHk= 31874
+LmZyb21Kc29u 31875
+QFNlcnZpY2U= 31876
+44KC 31877
+Q2hyaXM= 31878
+X0lz 31879
+IEpvaG5ueQ== 31880
+IGNsZWFuZXI= 31881
+IEluaXRpYWxpemVz 31882
+VU5L 31883
+KGF4aXM= 31884
+0LXQtw== 31885
+aWV2YWw= 31886
+IFdhcnJpb3Jz 31887
+fSko 31888
+RE1J 31889
+4pmA 31890
+IFRyZWFzdXJ5 31891
+IGZlYXM= 31892
+IHNsYQ== 31893
+X0VOVU0= 31894
+bGhz 31895
+IEluc3RpdA== 31896
+aXBwZXJz 31897
+TGluZWFy 31898
+UmVhZGluZw== 31899
+cXVpcmllcw== 31900
+LWNlbGw= 31901
+Y2hyb21l 31902
+LlNlYXJjaA== 31903
+SU5B 31904
+57G75Z6L 31905
+IAogCg== 31906
+IFNhbXVlbA== 31907
+IG1pbGxz 31908
+IGRvbmF0ZQ== 31909
+IEdlbw== 31910
+KHJvd3M= 31911
+IHNoZWVw 31912
+IMOpbA== 31913
+5L2T 31914
+IGJlbQ== 31915
+X1VOVVNFRA== 31916
+IFJDQw== 31917
+IGludHJvZHVjaW5n 31918
+YXR0YQ== 31919
+IFByaW9yaXR5 31920
+IEZC 31921
+IFNlcmdl 31922
+PiI7 31923
+YXRjaGluZw== 31924
+IEtub3dsZWRnZQ== 31925
+CVRoZQ== 31926
+O21hcmdpbg== 31927
+bGVzc25lc3M= 31928
+b3BhcmQ= 31929
+dW1hdGlj 31930
+KCkpKTsNCg== 31931
+IGZhbHM= 31932
+KGNhY2hl 31933
+VHlwZUlk 31934
+6YCa 31935
+X2Nob2ljZQ== 31936
+IEdvdGg= 31937
+IFNpdGVz 31938
+TUc= 31939
+X2JvcmRlcg== 31940
+SW5kaWNlcw== 31941
+Q29tcGFyZXI= 31942
+IFJlZGlzdHJpYnV0aW9u 31943
+IGNsb3NldA== 31944
+IHZlcnNhdGlsZQ== 31945
+SW5wdXRz 31946
+KioqKioqKioqKioqKioqKioqKio= 31947
+IG9iZXNpdHk= 31948
+cXVpeg== 31949
+Z3Jh 31950
+KGdsb2JhbA== 31951
+5Yqh 31952
+IGNvbGxlY3Rvcg== 31953
+IGtvcg== 31954
+b3ZhYmxl 31955
+QURD 31956
+IEV2ZW50SGFuZGxlcg== 31957
+Lm5j 31958
+IHBsYXliYWNr 31959
+aWVudG9z 31960
+X3Blcm0= 31961
+X1dBUk5JTkc= 31962
+IE9seW1waWNz 31963
+Lm5vcm0= 31964
+IEJyb2FkY2FzdA== 31965
+X3NtYWxs 31966
+ZHJpdmU= 31967
+Lmlsb2M= 31968
+IHR5cGVk 31969
+TUVN 31970
+X2NvbnM= 31971
+RE1FVEhPRA== 31972
+IGx1bg== 31973
+LmRpc3RhbmNl 31974
+KHBhcg== 31975
+cG9vbg== 31976
+IGJhc3Q= 31977
+YWN0aXZpdGllcw== 31978
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 31979
+Og0KDQo= 31980
+U0VS 31981
+KSYm 31982
+X2xzdA== 31983
+IFBvbGlzaA== 31984
+IGtub2NrZWQ= 31985
+IGZydXN0cmF0aW9u 31986
+YXVrZWU= 31987
+IHBob3NwaA== 31988
+aXF1aWQ= 31989
+X2NvZWZm 31990
+5q2k 31991
+TGF0ZXN0 31992
+IER1c3Q= 31993
+VGlwbw== 31994
+IG1haW50YWlucw== 31995
+IG1hcnNo 31996
+aW5jaW5u 31997
+bGJs 31998
+Q2FyZQ== 31999
+IG5laWdoYm9yaG9vZHM= 32000
+X2dwaW8= 32001
+IEFyc2VuYWw= 32002
+RGVt 32003
+IFdoZQ== 32004
+X2hvb2s= 32005
+IGxkYw== 32006
+IEhhcnBlcg== 32007
+IEJlcmtlbGV5 32008
+IGdyYWR1YXRlZA== 32009
+UGVyY2VudA== 32010
+IGFycml2aW5n 32011
+IEFkdmVudHVyZQ== 32012
+KHNjb3Bl 32013
+KCcq 32014
+cXVhcnRlcg== 32015
+IE1hcmll 32016
+U3BlYWtpbmc= 32017
+X2NvZGVnZW4= 32018
+IGltbXVu 32019
+Y2FzdGVy 32020
+44KM 32021
+5ZWG 32022
+IERpbWVuc2lvbnM= 32023
+LnJlY29yZA== 32024
+IHRleHRv 32025
+IE1pY2hlbGxl 32026
+UGVuZGluZw== 32027
+KGJ5 32028
+X1BBUg== 32029
+dWNodA== 32030
+YmVl 32031
+LlRocmVhZA== 32032
+YW1waXJl 32033
+a25vdw== 32034
+IENsaW5pY2Fs 32035
+IG1hcmdpbkJvdHRvbQ== 32036
+IGRpc3Rpbmd1aXNo 32037
+LkZ1bGw= 32038
+LnVuZGVmaW5lZA== 32039
+IFNlcXVlbGl6ZQ== 32040
+IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw== 32041
+IGVkdWNhdGVk 32042
+X09WRVI= 32043
+5bqP 32044
+IMKgIMKg 32045
+X2VhY2g= 32046
+IHVyZ2U= 32047
+ZGVwYXJ0 32048
+IGRvbm9ycw== 32049
+IEF1 32050
+IGJpbGxpb25z 32051
+IGJlbG9uZ2luZw== 32052
+X2FnZQ== 32053
+X0ludA== 32054
+IHN1YnN0YW5jZXM= 32055
+bWFjaGluZQ== 32056
+ISEhCgo= 32057
+IGpzb25pZnk= 32058
+aWJiZWFu 32059
+IENhZA== 32060
+IGVuZFRpbWU= 32061
+IGN5Y2xpbmc= 32062
+IFVJVGV4dEZpZWxk 32063
+IGxldmVyYWdl 32064
+IHZhbmlsbGE= 32065
+ZWF0 32066
+TGF1bmNo 32067
+KHB0 32068
+c3RhdGVz 32069
+IENvbnRyb2xz 32070
+IFJlc3BvbnM= 32071
+IEpha2U= 32072
+IGFzbGVlcA== 32073
+Zm9ydHVuYXRl 32074
+Lm5leHRMaW5l 32075
+U2l6ZU1vZGU= 32076
+7J28 32077
+VGVzdGluZ01vZHVsZQ== 32078
+R2VybWFu 32079
+IEludmVzdGln 32080
+LnJldmVyc2U= 32081
+IEJBQ0s= 32082
+KERhdGVUaW1l 32083
+IG5vbnByb2ZpdA== 32084
+IEV4cGVjdA== 32085
+IHRhbnRv 32086
+J10pLA== 32087
+CXRoZQ== 32088
+TXVsdGlwbGU= 32089
+KGdldEFjdGl2aXR5 32090
+X1dBSVQ= 32091
+IGrDoQ== 32092
+ZGVjb3I= 32093
+bGV2YW5jZQ== 32094
+IEdpdEh1Yg== 32095
+bWluYXRpb24= 32096
+X3F1YW50aXR5 32097
+LlNjYW5uZXI= 32098
+IExpb24= 32099
+6ZSZ6K+v 32100
+IGRyZQ== 32101
+IHRhbnRyYQ== 32102
+IGNvbnRlbnRUeXBl 32103
+IGZpZA== 32104
+X2FsdA== 32105
+TlNJbmRleFBhdGg= 32106
+LXBs 32107
+5YyW 32108
+IGFudGliaW90 32109
+dGFibGVz 32110
+YWNpYWw= 32111
+IFJlZ2lzdHJ5 32112
+IG9saXZl 32113
+aWdlcnM= 32114
+IHN1YnNjcmliZXI= 32115
+X3ByZXM= 32116
+IFN5bnRheA== 32117
+IGxvdmVycw== 32118
+LkJ5dGU= 32119
+b2xkZXJz 32120
+X2ZvcndhcmQ= 32121
+YWx3YXlz 32122
+Q2FwdGlvbg== 32123
+UHJpdg== 32124
+IFRhbXBh 32125
+aXNhdGV1cg== 32126
+LWxhYmVsbGVkYnk= 32127
+IFRvU3RyaW5n 32128
+IOyCrA== 32129
+IGluaXRpYXRlZA== 32130
+V0Y= 32131
+IGluc3RpdHV0aW9uYWw= 32132
+aW5qZWN0 32133
+IFNjcg== 32134
+IGRvY3RyaW5l 32135
+IHNwYWNpb3Vz 32136
+aXN1cmU= 32137
+IEFuYQ== 32138
+InRpbWU= 32139
+ZXNzYWdpbmc= 32140
+IGNpZA== 32141
+IE5hbg== 32142
+IGluY29tcGxldGU= 32143
+VEFH 32144
+LWJ1aWxk 32145
+RGVjZW1iZXI= 32146
+IHJlc2lkdWFs 32147
+KFBETw== 32148
+IExpc3Rlbg== 32149
+IGdseXBo 32150
+IGdhcHM= 32151
+bmVh 32152
+LlJlY3Q= 32153
+IHNhdQ== 32154
+IFBob3RvZ3JhcGg= 32155
+IGV4ZWN1dGFibGU= 32156
+IEV4cGVydA== 32157
+Q29yb3V0aW5l 32158
+X3NpemVz 32159
+IE5M 32160
+LmlzVmFsaWQ= 32161
+KTt9Cg== 32162
+LXJlZw== 32163
+IGNpdGluZw== 32164
+Y3dk 32165
+IE90dGF3YQ== 32166
+IEJhdHQ= 32167
+IHJlbmV3YWJsZQ== 32168
+IHByZWxpbWluYXJ5 32169
+IGFzeWx1bQ== 32170
+IHdyaXN0 32171
+IHV0aWxpeg== 32172
+IGRldGVudGlvbg== 32173
+RmFzdA== 32174
+IGFuZ2U= 32175
+aW5jaW5uYXRp 32176
+IHN0ZWVyaW5n 32177
+IE5hTg== 32178
+aW9zaXR5 32179
+L3BhZ2U= 32180
+IOi/ 32181
+c3Rlcm9s 32182
+IGRpc2c= 32183
+KERC 32184
+IERFU0NSSVBUSU9O 32185
+IF8k 32186
+IG9ic3RhY2xl 32187
+IGJpemFycmU= 32188
+IGV4dHJhY3Rpb24= 32189
+X2V4cGVjdGVk 32190
+IGxvc2Vz 32191
+IENlbGVicg== 32192
+IGh0bWxGb3I= 32193
+IGV4cGxvaXQ= 32194
+0L7Qu9GM0LfQvtCy 32195
+WFla 32196
+IG1hZ25ldA== 32197
+YW1wZWQ= 32198
+IGF0b21z 32199
+U291cmNlcw== 32200
+cGVjdGl2ZXM= 32201
+0YHQu9C4 32202
+ID0NCg== 32203
+IGRhcmU= 32204
+IFdhbHRlcg== 32205
+IGJyaWdodG5lc3M= 32206
+IGFubm90YXRpb25z 32207
+648= 32208
+aXNrZQ== 32209
+U2NoZWR1bGU= 32210
+LmltYWdlcw== 32211
+cm9zc28= 32212
+ICIuLg== 32213
+Z2FtbWE= 32214
+IGluc3RydWN0b3I= 32215
+IG92ZXJ3cml0ZQ== 32216
+LWFt 32217
+IGRldmFzdGF0aW5n 32218
+IFNhaW50cw== 32219
+IGhz 32220
+IGJvbnVzZXM= 32221
+JG91dHB1dA== 32222
+aWpk 32223
+KEFjdGlvbkV2ZW50 32224
+bW9uaXRvcg== 32225
+IG1hdHRyZXNz 32226
+SmFudWFyeQ== 32227
+Lmpw 32228
+IGNhcmFjdGVy 32229
+IGltcG9zZQ== 32230
+X3Jlc3Q= 32231
+IFNpZ25hdHVyZQ== 32232
+IGNvcm9uYXZpcnVz 32233
+44GK 32234
+X2NvbXBhcmU= 32235
+TWVhc3VyZQ== 32236
+aXRhdGVk 32237
+ZWxpams= 32238
+aWdvcw== 32239
+ZXNhcg== 32240
+IHJ1c2hlZA== 32241
+bWV0cnk= 32242
+X1NFUEFSQVRPUg== 32243
+X1dF 32244
+X0FUVFJJQlVURQ== 32245
+IHlhbWw= 32246
+IHNwZWNz 32247
+IFJhaA== 32248
+cGhlcmlj 32249
+IEludmVzdG1lbnQ= 32250
+w6RsbA== 32251
+IGFwcGVhbGluZw== 32252
+IHZpZXdwb3J0 32253
+56k= 32254
+IG1hcmdpbkxlZnQ= 32255
+IHN1YnRyYWN0 32256
+IEVESVQ= 32257
+CUFycmF5TGlzdA== 32258
+Z3JhZGluZw== 32259
+IEZhaWx1cmU= 32260
+YXNwZXI= 32261
+RUVL 32262
+KG5vdw== 32263
+PG9iamVjdA== 32264
+IEFsaWdubWVudA== 32265
+cGxlYWRv 32266
+cXR0 32267
+KEVSUk9S 32268
+IElOVkFMSUQ= 32269
+IHVzZXJpZA== 32270
+cmFpc2Vz 32271
+SURJ 32272
+IHZhcmlhbmNl 32273
+IE5pbA== 32274
+L2RlbGV0ZQ== 32275
+X01BSU4= 32276
+LlRva2Vu 32277
+LkNhdGVnb3J5 32278
+PikK 32279
+Q29sbGlzaW9u 32280
+IEdyZWF0ZXI= 32281
+IFJhY2luZw== 32282
+YWxhbg== 32283
+IG1vbmV0YXJ5 32284
+LG5ldw== 32285
+IFNvcnJ5 32286
+LkVuYWJsZQ== 32287
+IEluc3RhbnRpYXRl 32288
+b2xsZW4= 32289
+66m0 32290
+IENhbGxpbmc= 32291
+X2hvdXI= 32292
+QURB 32293
+IHNoeQ== 32294
+KSoq 32295
+ID09Pg== 32296
+IGVzcGVjaWFs 32297
+IGludGVycHJldGVk 32298
+IT0i 32299
+IHBoYXJtYWN5 32300
+LnNpbmdsZQ== 32301
+IENpYWxpcw== 32302
+IHBhcmFz 32303
+LnRvVXBwZXJDYXNl 32304
+IERlbW9u 32305
+UHJpbWU= 32306
+IHJhbmtpbmdz 32307
+QWRkaW5n 32308
+X0hBU0g= 32309
+IEV4YW0= 32310
+2qk= 32311
+IFZpY3Rvcg== 32312
+T2theQ== 32313
+Il07DQo= 32314
+IGZvcnR1bmU= 32315
+IEZFVENI 32316
+ZXhwYW5k 32317
+LkludGVyb3A= 32318
+IGJhcm4= 32319
+5raI 32320
+dWV2bw== 32321
+IHNwZWN1bGF0aW9u 32322
+4pSA4pSA4pSA4pSA 32323
+IE51 32324
+IEJsdWVz 32325
+KGZuYW1l 32326
+IGluaGFiaXQ= 32327
+IFwiJQ== 32328
+Q0VT 32329
+dWxhcmlv 32330
+X2Ny 32331
+IHZhbGlkYXRlZA== 32332
+IG1pZG5pZ2h0 32333
+YW5raW5n 32334
+IGluY29ycG9yYXRl 32335
+IHB1cnN1aXQ= 32336
+RVhQ 32337
+cHJpbWU= 32338
+UGlk 32339
+LVVT 32340
+IE51cnM= 32341
+IFdoZWVs 32342
+6Zg= 32343
+IGlucA== 32344
+IHN1cHBvcnRpdmU= 32345
+Lm1lbWJlcg== 32346
+IFNob3Q= 32347
+LkNoZWNrQm94 32348
+IGFmZmlybQ== 32349
+VG9y 32350
+RnVsbFllYXI= 32351
+IGNvbnNpZGVyYWJseQ== 32352
+Y3JlZGVudGlhbHM= 32353
+X29wdHM= 32354
+Um9sbA== 32355
+KHJvdW5k 32356
+IGNvbWVudA== 32357
+X1VBUlQ= 32358
+IGV4dGVuZGluZw== 32359
+Ukc= 32360
+cmVzdWx0YWRv 32361
+aXR1 32362
+LmdldFNlc3Npb24= 32363
+IGF0dHJhY3Rpb24= 32364
+JkQ= 32365
+JGh0bWw= 32366
+IEplc3NpY2E= 32367
+IEFzc29jaWF0ZQ== 32368
+YcOx 32369
+X2Vk 32370
+IExhZw== 32371
+IG9yaWdpbnM= 32372
+KCkpLT4= 32373
+YWRkRXZlbnRMaXN0ZW5lcg== 32374
+SUFMT0c= 32375
+5ZCm 32376
+LkNvbXBhcmU= 32377
+QWxidW0= 32378
+IEt1 32379
+PFE= 32380
+YXJnZXN0 32381
+IHByb2xvbmc= 32382
+IGNvbmZpZ3VyYXRpb25z 32383
+IGFjY2lkZW50YWxseQ== 32384
+X3Bob3Rv 32385
+ICcnOw0K 32386
+IHZlcnNl 32387
+Qm9i 32388
+IGZhcm1pbmc= 32389
+ZGVsaXZlcnk= 32390
+IE1hY2s= 32391
+IHVzZVNlbGVjdG9y 32392
+LmJvb3RzdHJhcGNkbg== 32393
+a2VlcGluZw== 32394
+ZW55 32395
+LnVwbG9hZA== 32396
+IE1FVEhPRA== 32397
+Y3JlYXRvcg== 32398
+PF8= 32399
+IEVhc3Rlcg== 32400
+Li0t 32401
+VUlCdXR0b24= 32402
+44KJ 32403
+b21ldGVycw== 32404
+IHNoaW5l 32405
+IGhvZ3k= 32406
+XHM= 32407
+IGhhcm5lc3M= 32408
+LkNlbGw= 32409
+IGxpZnRpbmc= 32410
+IGNvbWJpbmVz 32411
+IE9jY3Vw 32412
+ZXhjbHVkZQ== 32413
+cGF0aWFs 32414
+IHJlc3Bpcg== 32415
+X2ZpdA== 32416
+IGZpZnR5 32417
+IE1vbA== 32418
+IHR1bmVk 32419
+LWRpbWVuc2lvbmFs 32420
+IHFz 32421
+IHRvcHM= 32422
+PiI7Cgo= 32423
+cXVpc2l0ZQ== 32424
+Y2hhbm5lbHM= 32425
+L3Jlcw== 32426
+IEFuYWx5dGljcw== 32427
+LmFwcGNvbXBhdA== 32428
+L3Rv 32429
+IG9uRXJyb3I= 32430
+KGF0dHI= 32431
+SVJN 32432
+IHJhZ2F6 32433
+LWFz 32434
+LlNlY29uZA== 32435
+b3JpZW50ZWQ= 32436
+IGRvbm4= 32437
+IGxpZ2h0bmluZw== 32438
+Zmlk 32439
+IFBsZQ== 32440
+44G+44GZ 32441
+dHJv 32442
+LlRydWU= 32443
+T2JzZXJ2YWJsZQ== 32444
+15k= 32445
+dW1iaW5n 32446
+IHByb3NwZWN0aXZl 32447
+LWZpbHRlcg== 32448
+IHB1cnN1YW50 32449
+KHBvaW50cw== 32450
+LkJpbmQ= 32451
+IHBhbG0= 32452
+Y2xlYXJmaXg= 32453
+w7Zz 32454
+IEdvbno= 32455
+IHdlYWtlbg== 32456
+RHJpdmU= 32457
+ZW5pZG8= 32458
+bGxk 32459
+b2JveA== 32460
+YW5lYW4= 32461
+R290 32462
+5L+d 32463
+UmVnZXg= 32464
+5oM= 32465
+IHNhbGFk 32466
+YXNzaXM= 32467
+Im5ldA== 32468
+aW5oZXJpdERvYw== 32469
+IFJW 32470
+cXVpZXI= 32471
+IGNsYXp6 32472
+xLHFnw== 32473
+b3N0ZXJvbmU= 32474
+IGFpcmxpbmU= 32475
+Lmxpc3RkaXI= 32476
+IGRvd25sb2FkaW5n 32477
+IFBhbG0= 32478
+d2F1a2Vl 32479
+Jmx0 32480
+LkJM 32481
+X0lOTElORQ== 32482
+b2Zmcw== 32483
+PDwo 32484
+X25ld3M= 32485
+IGNoYXNl 32486
+Lz48 32487
+IGV1cm9z 32488
+IEVneXB0aWFu 32489
+IFN0YWlubGVzcw== 32490
+X0JPT0w= 32491
+IEd1aWxk 32492
+IER5bmFt 32493
+W2luZGV4UGF0aA== 32494
+IO8= 32495
+IG1lbW9yYWJsZQ== 32496
+IENoYW1waW9u 32497
+UmVzb3VyY2VNYW5hZ2Vy 32498
+LkxvZ2lu 32499
+IEZvcm1lcg== 32500
+eXBlZA== 32501
+IGxsZWc= 32502
+OyIs 32503
+RFdPUkQ= 32504
+IHRheGk= 32505
+IGJvbWJz 32506
+cmFo 32507
+LnRhZ3M= 32508
+X3Rlc3Rz 32509
+c3RvbmVz 32510
+4oCdKQ== 32511
+W2c= 32512
+cnR5cGU= 32513
+IHZ1 32514
+IGhvc3RpbGU= 32515
+Q2hhcnM= 32516
+IFBhdHJpb3Rz 32517
+L3N0YXR1cw== 32518
+PEI= 32519
+IEluY29tZQ== 32520
+IERhZA== 32521
+IHBhdHJvbA== 32522
+X0NIQU5HRQ== 32523
+IHVwZ3JhZGVk 32524
+IGNoaW5h 32525
+c2V0cQ== 32526
+U3RhcnRlZA== 32527
+LlVuZGVm 32528
+IGNoZWNrc3Vt 32529
+IGZydXN0cmF0ZWQ= 32530
+e28= 32531
+IGVuZg== 32532
+IHdvb2Rz 32533
+IEFueW9uZQ== 32534
+RW5jb2Rl 32535
+IFF0V2lkZ2V0cw== 32536
+YXJlYXM= 32537
+IHNoZWVy 32538
+c2tp 32539
+ZW5kcG9pbnQ= 32540
+X1Rlc3Q= 32541
+U291cA== 32542
+fn5+fn5+fn5+fn5+fn5+fg== 32543
+KGZpbGVz 32544
+CQkJCQkNCg== 32545
+LnNwYXJr 32546
+IHZhbHVlZA== 32547
+ICUK 32548
+LmNvbnRyb2xz 32549
+IFhDVEFzc2VydEVxdWFs 32550
+IGZhbWU= 32551
+IFJpYw== 32552
+RE9U 32553
+IEFsYmVydGE= 32554
+5L2/ 32555
+b3NhbA== 32556
+LldlYkNvbnRyb2xz 32557
+IC0tLS0tLS0tLS0tLQ== 32558
+IE1pcw== 32559
+IFNZUw== 32560
+Tm9ubnVsbA== 32561
+PWl0ZW0= 32562
+IGV4cGlyZQ== 32563
+RGVjb2Rl 32564
+X29wZXJhdGlvbg== 32565
+IFZhbGlkYXRvcg== 32566
+LkNFTlRFUg== 32567
+dWZmcw== 32568
+Km0= 32569
+IGF2YW50 32570
+5qyh 32571
+4oCcWW91 32572
+LnBlcm1pc3Npb24= 32573
+Li4uKQ== 32574
+IExpYw== 32575
+X2Nvb3Jkcw== 32576
+Lm5vbWJyZQ== 32577
+Y2xv 32578
+LkludGVybmFs 32579
+IENobw== 32580
+X3N3 32581
+CUls 32582
+Y2xr 32583
+IGNhc3RsZQ== 32584
+KGxheWVy 32585
+cGl0 32586
+IGd1aWRlZA== 32587
+IOKWiA== 32588
+IHN1cGVyYg== 32589
+IHN1cHBsZW1lbnRz 32590
+X2NlbnQ= 32591
+IHBlZWs= 32592
+SU5BUlk= 32593
+LkNvbnRlbnRBbGlnbm1lbnQ= 32594
+ZmFsbHM= 32595
+IikpOw== 32596
+V2FsbA== 32597
+KS4NCg== 32598
+IERhbm55 32599
+aXJtaW5naGFt 32600
+SUFMSVo= 32601
+KGNyZWF0ZQ== 32602
+Iklu 32603
+U2VydmljZVByb3ZpZGVy 32604
+IHByaWNlZA== 32605
+bWFjcm8= 32606
+YW1hYw== 32607
+LmJveA== 32608
+LS0tLQo= 32609
+44Or 32610
+IFN1aXQ= 32611
+dXJzdA== 32612
+YnJ1 32613
+b3VybmFscw== 32614
+bnVtZXJv 32615
+X18oKQo= 32616
+RGFz 32617
+IE1pdHQ= 32618
+dWRlcg== 32619
+P1w= 32620
+ZnU= 32621
+W0I= 32622
+IDopCgo= 32623
+KGludGVy 32624
+YnJhaW5z 32625
+IGF0dGl0dWRlcw== 32626
+VmVyaWZ5 32627
+IHNpZ25hdHVyZXM= 32628
+YWNrQmFy 32629
+IGdk 32630
+SmFjaw== 32631
+LmNhdA== 32632
+IHp6 32633
+d2FyZg== 32634
+RlRFUg== 32635
+Iik7CgoK 32636
+QWxpdmU= 32637
+SUNMRQ== 32638
+IFdoYXRldmVy 32639
+IG91dGxpbmVk 32640
+c3ByaXRl 32641
+0LXQsg== 32642
+X0FC 32643
+X0RFUFRI 32644
+IGNydXNoZWQ= 32645
+YWFh 32646
+KGV2 32647
+5py6 32648
+QW50aQ== 32649
+SUNP 32650
+aXNFcXVhbFRv 32651
+LnN1bg== 32652
+aWN1bG8= 32653
+c2FsZQ== 32654
+X2hleA== 32655
+IFZr 32656
+YXB0b3I= 32657
+VW5pb24= 32658
+IERpc2NvdW50 32659
+bGlzdGE= 32660
+LlVuZGVmT3I= 32661
+IGF1dG9tYXRpb24= 32662
+Tm9y 32663
+5a+5 32664
+5Y+C5pWw 32665
+IHJlZmxleA== 32666
+IExhdXJl 32667
+LnNob3dNZXNzYWdlRGlhbG9n 32668
+LnRlbXA= 32669
+IGFrYW4= 32670
+IF9fX19fXw== 32671
+LklzVHJ1ZQ== 32672
+QVJFRA== 32673
+YWdsZQ== 32674
+RW5lcmd5 32675
+IHF1YW50aXRpZXM= 32676
+4oCZw6k= 32677
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 32678
+IGNpdGl6ZW5zaGlw 32679
+bW91dGg= 32680
+IGluYXBwcm9wcmlhdGU= 32681
+IE91dGRvb3I= 32682
+V2hpdGVTcGFjZQ== 32683
+QW5vbnltb3Vz 32684
+bG9hZHM= 32685
+d2ViRWxlbWVudFByb3BlcnRpZXM= 32686
+VGVu 32687
+IGFjY2lkZW50cw== 32688
+IGFkdmVydGlzZW1lbnQ= 32689
+IFllbWVu 32690
+KGNhbGw= 32691
+IHNsYXZlcnk= 32692
+0YHQvw== 32693
+IExhbQ== 32694
+X0JJVFM= 32695
+b21lZ2E= 32696
+IE9sZQ== 32697
+IGtpZG4= 32698
+X0Fu 32699
+IFJhaWQ= 32700
+Q3JlYXRpb24= 32701
+c2F2ZWQ= 32702
+IHByb3BvcnQ= 32703
+V0FSTklORw== 32704
+XFA= 32705
+IHB3ZA== 32706
+RGF0YVJlYWRlcg== 32707
+aXNjaGVy 32708
+YWRlb24= 32709
+IFByZWRpY3Q= 32710
+IHJlYXNvbmluZw== 32711
+IGRlc3Ryb3lpbmc= 32712
+SGVs 32713
+KmQ= 32714
+IExlZ2lzbA== 32715
+X1By 32716
+CQkJICAgICAgIA== 32717
+IHN5bXBhdGg= 32718
+IGNoZXNz 32719
+IG1hbQ== 32720
+OmhvdmVy 32721
+IGNvbnZlcnRz 32722
+IHBlbGE= 32723
+IHByb2dyZXNzaW9u 32724
+ICJfIg== 32725
+IEdpbGw= 32726
+CXNob3c= 32727
+IHN1cHBvc2VkbHk= 32728
+YWNjdXJhY3k= 32729
+ZWxpbg== 32730
+IHVuZm9sZGluZw== 32731
+IEh5cGVy 32732
+IHdhbm5h 32733
+IHVwcw== 32734
+KCM= 32735
+IENyaW1pbmFs 32736
+KFBvaW50 32737
+YXRMbmc= 32738
+YWN0bHk= 32739
+IGNvbnRyYWN0b3Jz 32740
+J119 32741
+ZHJhdWxpYw== 32742
+w7NkaWdv 32743
+IFRU 32744
+IFdpZGU= 32745
+IEFSRw== 32746
+X2lj 32747
+RkxBR1M= 32748
+U2Nob29s 32749
+IGNsZWFyaW5n 32750
+LWJlaW5n 32751
+PXtb 32752
+LGNvbnN0 32753
+bWFuZW50 32754
+T3ZlcmxheQ== 32755
+KCci 32756
+6YeP 32757
+IFRpbWVzdGFtcA== 32758
+IG1haWxpbmc= 32759
+IENha2U= 32760
+LlRoYXQ= 32761
+IG1lZGl0YXRpb24= 32762
+cXA= 32763
+IGVtcHJlc2E= 32764
+IExpb25z 32765
+IHdlbGQ= 32766
+IExpbmtlZElu 32767
+IGN1c2g= 32768
+IGdlbm9tZQ== 32769
+LkluZGV4T2Y= 32770
+YWdhaW4= 32771
+IGZhbGxiYWNr 32772
+IGNhbXBpbmc= 32773
+cmVkZA== 32774
+LXN0cmlwZWQ= 32775
+IGR2 32776
+RmVicnVhcnk= 32777
+IFByb3h5 32778
+dXNr 32779
+IGRpZXNlbA== 32780
+V1JJVEU= 32781
+UkVBSw== 32782
+TG9yZW0= 32783
+Lkludm9rZQ== 32784
+LWRpdg== 32785
+SW50ZXJjZXB0b3I= 32786
+IERI 32787
+aWFsZXM= 32788
+IHZpbGxhZ2Vz 32789
+2LQ= 32790
+IEVOVg== 32791
+U3lz 32792
+LlhS 32793
+IHBvZW0= 32794
+w4I= 32795
+Y2FkZQ== 32796
+cGxvdHM= 32797
+IHso 32798
+LmdpdA== 32799
+L3N2Zw== 32800
+bmNtcA== 32801
+IMSN 32802
+YWluZXM= 32803
+5Ye95pWw 32804
+ICgpCgo= 32805
+b3BzaXM= 32806
+IFJlbGF0aW9uc2hpcA== 32807
+X2F1dA== 32808
+IEJvbWI= 32809
+CWNvbQ== 32810
+KnNpemVvZg== 32811
+b2ZmaWNpYWw= 32812
+X3BheWxvYWQ= 32813
+CQkJCQkgIA== 32814
+Lm1hbmFnZXI= 32815
+IEFyb3VuZA== 32816
+CXNlbmQ= 32817
+IEV4ZXJjaXNl 32818
+IEJpbGx5 32819
+aXZp 32820
+IG5lZWRpbmc= 32821
+X3VybHM= 32822
+X3Rhc2tz 32823
+IEhlbQ== 32824
+IHRlYXJEb3du 32825
+ZW5jcnlwdA== 32826
+LnRpZQ== 32827
+IGFzbQ== 32828
+SUNI 32829
+IENHUmVjdE1ha2U= 32830
+7ISx 32831
+dWxvbmc= 32832
+IGl0cg== 32833
+IEdTVA== 32834
+IG9mZmVyaW5ncw== 32835
+cm9iZQ== 32836
+RUVF 32837
+b3BlcmF0b3Jz 32838
+X1BST1A= 32839
+aW5kZW50 32840
+QURF 32841
+b3Jm 32842
+65A= 32843
+IGJsZXNzZWQ= 32844
+dmFzY3VsYXI= 32845
+IGNvbm9j 32846
+SGFwcHk= 32847
+QnJpZGdl 32848
+aWxpdGF0aW9u 32849
+am9pbnQ= 32850
+IEFkbWluaXN0cg== 32851
+LXRyYW5zZm9ybQ== 32852
+IG1lYW50aW1l 32853
+L0s= 32854
+IEJlZHJvb20= 32855
+IHJpZ2lk 32856
+IGJyb3dzZXJz 32857
+RU1QVFk= 32858
+LlNlcmlhbGl6ZQ== 32859
+X0VE 32860
+IHN0aXRjaA== 32861
+IGphbg== 32862
+ZWxsdA== 32863
+IGJyYWNl 32864
+IHRyYWlscw== 32865
+cHVibGlzaGVk 32866
+5a+G56CB 32867
+fScpCg== 32868
+IGFjaWRz 32869
+ICEhIQ== 32870
+X2RpcmVjdA== 32871
+PigpKTsK 32872
+YWrEhQ== 32873
+X09DQw== 32874
+IHBsYW5ldHM= 32875
+5p+l 32876
+IER1Ymxpbg== 32877
+IHNlcmll 32878
+LnByaW50Zg== 32879
+ZGVlcA== 32880
+YCk= 32881
+IFwk 32882
+IM68 32883
+X1ZJREVP 32884
+ZW5kb3Jz 32885
+IENyeXB0bw== 32886
+RmFy 32887
+LlRyYW5zcGFyZW50 32888
+LlRS 32889
+aWFzbQ== 32890
+X3RyYWluaW5n 32891
+IHRlYWNoZXM= 32892
+IEJlbHQ= 32893
+IGxpbWl0aW5n 32894
+IEthdGg= 32895
+IEluZGV4UGF0aA== 32896
+IGFjaGlldmVtZW50cw== 32897
+IHNlcsOh 32898
+aW50ZXJvcFJlcXVpcmU= 32899
+IGRpc3Nl 32900
+Lklm 32901
+YXJtaW5n 32902
+dWxzaW9u 32903
+UG8= 32904
+X0RFVEFJTA== 32905
+UHJvdG90eXBl 32906
+IENBTA== 32907
+IGFncmVlcw== 32908
+LnZv 32909
+LkV4ZWN1dGVOb25RdWVyeQ== 32910
+IFRvcGlj 32911
+ICd7fQ== 32912
+QXJt 32913
+IGVjYw== 32914
+TWFn 32915
+IHNlcmlhbGl6ZWQ= 32916
+CWNvbm4= 32917
+Y2FjaGVk 32918
+PXRm 32919
+IEJ5dGVBcnJheQ== 32920
+cHJvdG9idWY= 32921
+dmFyY2hhcg== 32922
+CUFTU0VSVA== 32923
+IGxpc3Rl 32924
+X3RyaWdnZXI= 32925
+t7g= 32926
+RmVlbA== 32927
+VGFob21h 32928
+IExpaw== 32929
+IHN0cnVjdHVyZWQ= 32930
+ZXJndXM= 32931
+LkluaXRpYWw= 32932
+X2dl 32933
+Y2xqcw== 32934
+LmNvbnRhY3Q= 32935
+IGFuZGVyZQ== 32936
+JHN0bXQ= 32937
+X0NVUlJFTlQ= 32938
+IERpc2NvdmVy 32939
+JHJlcw== 32940
+Zm9ybWF0dGVy 32941
+SGE= 32942
+dmFuZ3N0 32943
+IGVtZXJnZQ== 32944
+44CC4oCd 32945
+IENhYmluZXQ= 32946
+LXNxdWFyZQ== 32947
+6YOo 32948
+IHJhZ2U= 32949
+IEFK 32950
+IFZU 32951
+c2hhZG93 32952
+IEZhaXRo 32953
+ZW5hbWVz 32954
+cHJldHR5 32955
+aGFzaWw= 32956
+cGFydHk= 32957
+IHZhcmNoYXI= 32958
+IGZvdG9z 32959
+IGFsdW0= 32960
+IEJlbGdpdW0= 32961
+LnlsYWJlbA== 32962
+IGRlag== 32963
+X251bWJlcnM= 32964
+IGh1 32965
+LnNldEFkYXB0ZXI= 32966
+IFVzdWFsbHk= 32967
+KHNhbXBsZQ== 32968
+LlNoYXJlZA== 32969
+IGJvb2tlZA== 32970
+ID4+PQ== 32971
+IG1pbmVyYWxz 32972
+Ij48Pz0= 32973
+IGFkanVzdG1lbnRz 32974
+IERM 32975
+IHZpYnJhbnQ= 32976
+IERlcGVuZGVuY3k= 32977
+IHphcA== 32978
+L1g= 32979
+IGZvbnRz 32980
+dHJpcA== 32981
+0LjRhw== 32982
+IHR1YmVz 32983
+Y2xhbWF0aW9u 32984
+IOun 32985
+IHByb3RhZ29u 32986
+b3Vwb24= 32987
+IEJydXNo 32988
+KHByZWQ= 32989
+b3VybmV5 32990
+J10pLT4= 32991
+cHJvZw== 32992
+Ym9v 32993
+X21k 32994
+X3BhY2s= 32995
+KGV4cHJlc3M= 32996
+dXR6 32997
+XEF1dGg= 32998
+LGlk 32999
+IENoaWxl 33000
+YWN0aWNl 33001
+IHJlY3J1aXRtZW50 33002
+IHBvc2Vz 33003
+IHZ1bG5lcmFiaWxpdHk= 33004
+aW5zdGFuYw== 33005
+b3J1bQ== 33006
+ZGVzcw== 33007
+IHhs 33008
+JSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSU= 33009
+KGZpZw== 33010
+IGRlbGV0aW5n 33011
+LmRlbA== 33012
+KScpCg== 33013
+IFdlZWtseQ== 33014
+Pz8/ 33015
+KHN0cmNtcA== 33016
+c21pdGg= 33017
+IHB1cnN1aW5n 33018
+LXNv 33019
+IEFwcHM= 33020
+LycK 33021
+IGRlY2lz 33022
+Rk9SRQ== 33023
+RXZlcnlvbmU= 33024
+IGxhbmVz 33025
+VmlydHVhbA== 33026
+LmF0dGFjaA== 33027
+KExvZw== 33028
+IE1lZGljYWlk 33029
+KFBhdGg= 33030
+IFR1cm5lcg== 33031
+L2FwcGxpY2F0aW9u 33032
+IHBvcnRyYWl0 33033
+IG9wcG9zZQ== 33034
+Y2hlY2tvdXQ= 33035
+IGZpbmlzaGVz 33036
+X01F 33037
+QmFycmllcg== 33038
+U29uZw== 33039
+VkFS 33040
+RWFybGllcg== 33041
+cmVsbGE= 33042
+IGhhc3Q= 33043
+YXphcg== 33044
+IHB1bGxz 33045
+bmd4 33046
+IGluc3BpcmluZw== 33047
+0YPRjg== 33048
+LWRpcmVjdGlvbg== 33049
+IGV4cGxvc2l2ZQ== 33050
+IGNyZWF0ZWRBdA== 33051
+c3Rv 33052
+IHdoZWF0 33053
+IEJ1aWx0 33054
+J2Fp 33055
+IHRyYWNrZWQ= 33056
+aGFtbWFk 33057
+Um93QXRJbmRleFBhdGg= 33058
+X2hlYXA= 33059
+RHVl 33060
+IGNvbm5lY3Rz 33061
+LnB1Ymxpc2g= 33062
+ZW11 33063
+IGJ1bGxldHM= 33064
+QkFS 33065
+b2xhdGU= 33066
+IGludGVybmFsbHk= 33067
+IGNhdGNoaW5n 33068
+LXBhc3N3b3Jk 33069
+b3VjaGVk 33070
+5oCn 33071
+ZW91cw== 33072
+IHhyYW5nZQ== 33073
+UXVhbGl0eQ== 33074
+dnY= 33075
+TWFuYWdl 33076
+KCgk 33077
+YWNlbWVudHM= 33078
+IEJyb3RoZXJz 33079
+IEhFQUQ= 33080
+IFVuc3VwcG9ydGVk 33081
+c2Fu 33082
+ZXNp 33083
+KioqCg== 33084
+IGFkYXB0YXRpb24= 33085
+IFdvcmtlcg== 33086
+J10v 33087
+LnNhdmVmaWc= 33088
+KHRyYW5z 33089
+2Kw= 33090
+bmVl 33091
+Q29ycmVjdA== 33092
+Li4uIikK 33093
+IHN1Ym1pdHRpbmc= 33094
+LXBhdGg= 33095
+CWxhc3Q= 33096
+aXNzYW4= 33097
+LnhsYWJlbA== 33098
+IFNlcGFy 33099
+L25v 33100
+X2Jlc3Q= 33101
+IE1pbGxz 33102
+X3NvY2s= 33103
+KGZsYWc= 33104
+IGRlc3RpbmF0aW9ucw== 33105
+ZW1wdGlvbg== 33106
+IEZBSUw= 33107
+5ZKM 33108
+IHJw 33109
+ZmFjdA== 33110
+CWxlbg== 33111
+REFZ 33112
+IHNlaXo= 33113
+X2RzdA== 33114
+bGlw 33115
+LkxpbmVhcg== 33116
+IEJhc2tldA== 33117
+JHQ= 33118
+JGk= 33119
+LWJyYW5k 33120
+IE5laWw= 33121
+IEVx 33122
+IHRob3U= 33123
+b2dlbmU= 33124
+IHNjaG9sYXJzaGlw 33125
+5pu0 33126
+IHN3bw== 33127
+YWdpbmF0b3I= 33128
+ZW5p 33129
+KGJvb2s= 33130
+IGJsaW5r 33131
+dGh1cw== 33132
+IGNhbmNlbGxhdGlvblRva2Vu 33133
+IFBhbGVzdGluaWFucw== 33134
+IHByb2ZpdGFibGU= 33135
+IGJhY2twYWNr 33136
+ZW5zb24= 33137
+PExvbmc= 33138
+IHBvb2xz 33139
+IHN0aWNrcw== 33140
+IHNwb2tlc3dvbWFu 33141
+QmVpbmc= 33142
+IEhlcml0YWdl 33143
+IE5pa2U= 33144
+U0hB 33145
+IE5vdEltcGxlbWVudGVkRXhjZXB0aW9u 33146
+JGNvcmU= 33147
+IFJpY28= 33148
+L2xhdGVzdA== 33149
+IEN6ZWNo 33150
+bmVyUmFkaXVz 33151
+KGxpbmVz 33152
+IHNlbWVzdGVy 33153
+IHdvdW5kcw== 33154
+UHJvY2VkdXJl 33155
+Lm1haWw= 33156
+KCkpOgo= 33157
+IGNvcnJpZA== 33158
+dGVyZWQ= 33159
+IE5DQUE= 33160
+IGdhbGF4eQ== 33161
+X2tpbmQ= 33162
+aWxr 33163
+IHRyYXM= 33164
+X1BPTA== 33165
+IEhldA== 33166
+IHJlZnVnZWU= 33167
+IHRlZW5hZ2U= 33168
+LmJpbmRpbmc= 33169
+cG9zdGFs 33170
+IGnDp2lu 33171
+IERhdGFUeXBl 33172
+6ZY= 33173
+eWNsZXJ2aWV3 33174
+LHZhbHVl 33175
+X2lkZW50aWZpZXI= 33176
+PGI= 33177
+IG91dGZpbGU= 33178
+DQogICAgDQo= 33179
+IGNyw6k= 33180
+IHJlc3BvbmRlbnRz 33181
+IEJlYXN0 33182
+Y2VsZWQ= 33183
+IGludGVyZg== 33184
+LXRoZW1l 33185
+Z2lm 33186
+IFJhbmdlcnM= 33187
+SVRBTA== 33188
+IGF1dGhlbnRpY2F0ZQ== 33189
+Q29tcGxldGlvbg== 33190
+dXJzb3Jz 33191
+IGNpbmVtYQ== 33192
+IGRpc2NvdXI= 33193
+IEphdw== 33194
+T0NLRVQ= 33195
+IHByYXllcnM= 33196
+IEx1aXM= 33197
+ZnJhZw== 33198
+PVsK 33199
+IGJyYXZl 33200
+X3Bvc2U= 33201
+Q2VydGlmaWNhdGU= 33202
+LWZl 33203
+aWZlcmF5 33204
+IEZsYWdz 33205
+Q29udGFpbmVyR2Fw 33206
+IENyaXQ= 33207
+UmVzdWx0U2V0 33208
+CWN1cg== 33209
+IGNvcnJlc3BvbmRz 33210
+U3RhZmY= 33211
+Lkh0dHBTZXJ2bGV0UmVxdWVzdA== 33212
+IG5ldXJvbnM= 33213
+IE1haW5BeGlzQWxpZ25tZW50 33214
+ZWRhcg== 33215
+IGdhZA== 33216
+X3BhcnRz 33217
+IM6y 33218
+IGZ4 33219
+L2ZpbGVz 33220
+IEJyb3M= 33221
+aGlwcw== 33222
+IGdsdWNvc2U= 33223
+IGZhcm1z 33224
+IG1lbnRhbGx5 33225
+cmVzdGF1cmFudA== 33226
+VGFibGVOYW1l 33227
+IE1lcmNlZGVz 33228
+LlZpc3VhbA== 33229
+IGFuY2g= 33230
+aW5hbGc= 33231
+X3J1bnRpbWU= 33232
+IHByb3ByaWV0YXJ5 33233
+IGludGVudGlvbnM= 33234
+aXpp 33235
+U2xpY2U= 33236
+OyI+PC8= 33237
+X1dPUkQ= 33238
+XE1pZ3JhdGlvbnM= 33239
+IEVOQUJMRQ== 33240
+X1BBUkFNRVRFUg== 33241
+IEJpc2hvcA== 33242
+LnN1YmplY3Q= 33243
+aWxsYXM= 33244
+Lm1hdHJpeA== 33245
+dXJyZW5jZXM= 33246
+Knk= 33247
+IGNvc3RseQ== 33248
+IENodWNr 33249
+IGNsb3Nlcw== 33250
+IE1pZ2h0 33251
+LXN0b3Jl 33252
+IG1hbGw= 33253
+aWV0ZW4= 33254
+LkFicw== 33255
+IGNvdXBsZWQ= 33256
+LmJhc2lj 33257
+IDo6Ojo6Ojo6 33258
+TWFrZXI= 33259
+Y2Fubm90 33260
+IGFjaA== 33261
+IEVsaQ== 33262
+4oiS 33263
+b3JuYQ== 33264
+IGNwcw== 33265
+IHRoZXJlb2Y= 33266
+IEB7 33267
+IE5TTXV0YWJsZUFycmF5 33268
+zr0= 33269
+cHJvZHVjdGl2ZQ== 33270
+U3F1YXJl 33271
+dGVtcHRz 33272
+IGVsaW1pbmF0ZWQ= 33273
+PE0= 33274
+IGNvbnNlcnZhdGl2ZXM= 33275
+IFN1cmc= 33276
+LnBhcg== 33277
+IEJ1Y2g= 33278
+KmI= 33279
+Rm9ydA== 33280
+Q29sb3Vy 33281
+IENoaQ== 33282
+ZWRpYw== 33283
+PnRydWU= 33284
+IE5ZQw== 33285
+IGJvcmVk 33286
+IERldGVjdA== 33287
+IGFwcGFy 33288
+IGplYW5z 33289
+IFRhaw== 33290
+SU9E 33291
+IEhvcnNl 33292
+KEZJTEU= 33293
+KD8= 33294
+cmlxdWU= 33295
+b3B0aW1pemVy 33296
+bmF0 33297
+bG95cw== 33298
+CVRva2Vu 33299
+b3VidGVk 33300
+dWVzcw== 33301
+b2NvYQ== 33302
+RGF0YU1lbWJlcg== 33303
+X1BPV0VS 33304
+Y2xhc3NMaXN0 33305
+UHVzaEJ1dHRvbg== 33306
+IFdpRmk= 33307
+LlN0cmVhbQ== 33308
+Lmd1aWxk 33309
+IG5vZw== 33310
+IFBvcnR1Z2Fs 33311
+IFVudGVy 33312
+UHJpbWl0aXZl 33313
+Ym9zcw== 33314
+IERldXRzY2g= 33315
+IGVyb3RpYw== 33316
+IHN0cmNvbnY= 33317
+LlRyeVBhcnNl 33318
+IGdyYW1z 33319
+LlN1Y2Nlc3M= 33320
+X3Br 33321
+IEhhcnZleQ== 33322
+LW1pbmRlZA== 33323
+LmNvdW50cnk= 33324
+W10i 33325
+IGFuZ2Vs 33326
+IGJlYXRz 33327
+IFZvcg== 33328
+aWxpbw== 33329
+Lm1hc3Rlcg== 33330
+c29tZXRoaW5n 33331
+IFBBQ0s= 33332
+KGlm 33333
+UmVxdWVzdEJvZHk= 33334
+IGFudGVz 33335
+L3dpZGdldA== 33336
+IG1vZG8= 33337
+IEFX 33338
+ZmluZGVy 33339
+IG9wdGltaXplZA== 33340
+IG1pc3NpbGVz 33341
+TkI= 33342
+CWludGVybmFs 33343
+dGV4 33344
+IFNyaQ== 33345
+IGRhbWFnaW5n 33346
+IE1haXM= 33347
+LUFsbG93 33348
+IFpo 33349
+LWFsdA== 33350
+ICkpOwoK 33351
+6Ik= 33352
+IGluZmx1ZW5jZXM= 33353
+IGNhdGFs 33354
+X1JFR0lTVEVS 33355
+IEFQSXM= 33356
+LWNlbnR1cnk= 33357
+IGJpb2xvZ3k= 33358
+IEFjdHVhbA== 33359
+IGhlZWxz 33360
+VFJBQ0U= 33361
+X0RJRw== 33362
+RGF0YXNldA== 33363
+IE1hdHRlcg== 33364
+IGNsYXNzaWZpZXI= 33365
+Lndpa2lwZWRpYQ== 33366
+IFJvZ2Vycw== 33367
+IGRvbmF0ZWQ= 33368
+cmF3bGVy 33369
+ZW5lbg== 33370
+IGNhc2lub3M= 33371
+b3J0YWw= 33372
+IHByaXZl 33373
+c3Bl 33374
+ZHVjZXJz 33375
+LmVw 33376
+IGdyYXNw 33377
+YWNqaQ== 33378
+IGRhaXJ5 33379
+IGJ1c2Vz 33380
+LmNvbW0= 33381
+Lmlucw== 33382
+IElSUw== 33383
+IEJlZXI= 33384
+YWRj 33385
+b2FyZA== 33386
+X01FVA== 33387
+ICcrJw== 33388
+cmFucw== 33389
+IGtpbmRh 33390
+IOKUgg== 33391
+IE1hdXI= 33392
+0LDQsw== 33393
+IGJhbmR3aWR0aA== 33394
+aWJ1cw== 33395
+IERpZmZlcmVudA== 33396
+KG1hdA== 33397
+IFJlc3VtZQ== 33398
+X1VOUw== 33399
+ZXN0YWJsaXNo 33400
+IGZvbmN0aW9u 33401
+U3Vic2NyaXB0aW9u 33402
+X2NvbXBhbnk= 33403
+IGxpZ2h0bHk= 33404
+LmNvbmZpcm0= 33405
+LnlhbWw= 33406
+IEJvb3N0 33407
+Q29tbWVyY2U= 33408
+LXRlbXBsYXRl 33409
+X0RFTEFZ 33410
+IEhJ 33411
+IG5hdmln 33412
+KFNlbmRlcg== 33413
+IEhT 33414
+XyIr 33415
+IFJFUVVFU1Q= 33416
+IHdpZmk= 33417
+PSIiCg== 33418
+XSktPg== 33419
+IHJvcGU= 33420
+IHZpb2xhdGVk 33421
+IGdsYW5jZQ== 33422
+IEt1cmQ= 33423
+IOiu 33424
+ZGVjaw== 33425
+IElTQk4= 33426
+IGluZmVjdA== 33427
+IEZvbw== 33428
+IGdldHRlcg== 33429
+IHRlbmVy 33430
+YXBwZQ== 33431
+Lmho 33432
+X2hvdA== 33433
+PEFN 33434
+cG9seQ== 33435
+ISIsCg== 33436
+IGNvbnZlcnRpbmc= 33437
+IFdXRQ== 33438
+Uk9T 33439
+KCd7 33440
+Q29tbWl0 33441
+KUw= 33442
+IE9yZQ== 33443
+IHNwYXJzZQ== 33444
+IGRpc3Bvc2Fs 33445
+IGNhbmNlbGVk 33446
+5ZCO 33447
+IGFlcg== 33448
+IHZpbnls 33449
+4buD 33450
+cmVjb2du 33451
+YXJraW5n 33452
+IHRyaWNreQ== 33453
+KnM= 33454
+IHByb2NlZWRz 33455
+IGlzbw== 33456
+IGNvY29udXQ= 33457
+IGNyYWZ0ZWQ= 33458
+SUVMRFM= 33459
+IHF1ZXN0bw== 33460
+IGNvbW11bg== 33461
+X0NPTk5FQ1Q= 33462
+IHRyYWZmaWNraW5n 33463
+RGVlcA== 33464
+YcOnw7Vlcw== 33465
+Y29kaWdv 33466
+dmVhdQ== 33467
+IGJldHJheQ== 33468
+aW50YQ== 33469
+VEVE 33470
+w6Zy 33471
+bWFydA== 33472
+X0JVUw== 33473
+L3Nj 33474
+aWFsbHk= 33475
+IGNpZ2FyZXR0ZXM= 33476
+6K+B 33477
+KG5u 33478
+IG1vZGVsaW5n 33479
+L3Byb2R1Y3Rz 33480
+d2Fybg== 33481
+IG1ldHJv 33482
+IEl2 33483
+Jik= 33484
+IENhYmxl 33485
+zrs= 33486
+Q29tcGFyaXNvbg== 33487
+Z2FyeQ== 33488
+IEJB 33489
+UEFSVA== 33490
+IHB2 33491
+X3VwZGF0ZWQ= 33492
+Q3JlZGl0 33493
+b3J0aHk= 33494
+b2JzZXJ2YWJsZQ== 33495
+IHRoZWF0cmU= 33496
+QkxF 33497
+O30KCg== 33498
+bGF1bmNo 33499
+X3N0cmluZ3M= 33500
+dWdv 33501
+IFJQRw== 33502
+LWF1dGg= 33503
+0KA= 33504
+aG9sbQ== 33505
+IFBhbmQ= 33506
+VWlk 33507
+IGltcGx5 33508
+7Jy8 33509
+J109Jw== 33510
+L1VzZXI= 33511
+IHN0cmNhdA== 33512
+0L3Ri9C5 33513
+RGF0YUFkYXB0ZXI= 33514
+IGxhbmRzYw== 33515
+IGRpcGxvbWF0aWM= 33516
+77yT 33517
+KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg== 33518
+IENoaWNrZW4= 33519
+IGJjcnlwdA== 33520
+LkluZg== 33521
+W2NvbA== 33522
+IFF1YW50aXR5 33523
+LXBvc2l0aW9u 33524
+IGRpZXRhcnk= 33525
+IGZpbG1t 33526
+SXNyYWVs 33527
+UHJldg== 33528
+IE1pbGxpb24= 33529
+IHJlbWVk 33530
+IGJpbGxpbmc= 33531
+IG91dGRvb3Jz 33532
+LnRt 33533
+IG5hZA== 33534
+Rm9yZw== 33535
+Wlo= 33536
+IHNzbA== 33537
+XSwn 33538
+S1Q= 33539
+ZnJlcQ== 33540
+PWRvY3VtZW50 33541
+Ymx1cg== 33542
+rLg= 33543
+IEplZmZlcnNvbg== 33544
+Q3M= 33545
+KHNhdmU= 33546
+IHN0cmFw 33547
+SW5kaWE= 33548
+IGlkZW9sb2d5 33549
+Qk9TRQ== 33550
+IEZQ 33551
+KGFucw== 33552
+IGZldmVy 33553
+IFlhbQ== 33554
+S2luZw== 33555
+4LI= 33556
+QVRJTkc= 33557
+Ym9oeWRy 33558
+cm9sbGJhY2s= 33559
+IG5ld05vZGU= 33560
+IE5WSURJQQ== 33561
+IGhvbm91cg== 33562
+IENvbmZpcm0= 33563
+eGJk 33564
+IHN1Y2Nlc3Nvcg== 33565
+L3U= 33566
+bGl2 33567
+b3VybmFtZW50cw== 33568
+QXR0YWNobWVudA== 33569
+IGdydXA= 33570
+IHRyaWJl 33571
+IGNhcmVz 33572
+ZWZ0 33573
+X3NhbWU= 33574
+J2xhYmVs 33575
+IOOAkA== 33576
+TW90b3I= 33577
+IGluZXhw 33578
+ICIoIg== 33579
+X1BPU0lUSU9O 33580
+IHZhbGxleQ== 33581
+IFJlc3VsdFNldA== 33582
+IHByZXNlcnZlZA== 33583
+IG11dGF0aW9ucw== 33584
+IHF1ZXN0aW9uaW5n 33585
+bXVuaXRpb24= 33586
+cGFyc2VJbnQ= 33587
+IFNy 33588
+IE1ldGFkYXRh 33589
+4oCd77yM 33590
+dGltZXN0YW1wcw== 33591
+IHRyYW5zaXRpb25z 33592
+7Zk= 33593
+0Yo= 33594
+aW9t 33595
+LkRv 33596
+IHBpbmU= 33597
+IGZ1bmc= 33598
+IHRyYW5zbWl0dGVk 33599
+Y3RpbWU= 33600
+IEZhbQ== 33601
+UmV2aXNpb24= 33602
+QmFz 33603
+VVBFUg== 33604
+RGVzdGluYXRpb24= 33605
+dG9IYXZlQmVlbkNhbGxlZA== 33606
+IHVuZm9ydHVuYXRl 33607
+SU5FUw== 33608
+X3Byb2Y= 33609
+QW1vbmc= 33610
+IEN5YmVy 33611
+IEJhdHRlcnk= 33612
+Z2VucmU= 33613
+IFZpZXdNb2RlbA== 33614
+LT0= 33615
+IHV0aWxpemVk 33616
+cGFpbnQ= 33617
+LkludGVnZXJGaWVsZA== 33618
+ZXJuaXR5 33619
+Y29tcGlsZXI= 33620
+4oCLCgo= 33621
+IE1hc3RlcnM= 33622
+LlRvQXJyYXk= 33623
+IHN0cnRvbA== 33624
+IFVrcmFpbmlhbg== 33625
+fSkpOwo= 33626
+IHNoZW1hbGU= 33627
+IlRoYXQ= 33628
+Zm9yYWxs 33629
+L2Rvd25sb2Fk 33630
+IHJoZXRvcmlj 33631
+LmxhdGl0dWRl 33632
+IFdIRU4= 33633
+IHNob2NraW5n 33634
+SUZJQw== 33635
+Lk5vcm1hbA== 33636
+X0ZPTERFUg== 33637
+IGRyaWZ0 33638
+IG1vdW50aW5n 33639
+LWJvb2s= 33640
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAK 33641
+IFdpcmVsZXNz 33642
+PiIuJA== 33643
+IHJlbGllcw== 33644
+KENvbnNvbGU= 33645
+SW50ZXJuYXRpb25hbA== 33646
+LT57JA== 33647
+TWlk 33648
+IGRpc3NlcnQ= 33649
+ZGRz 33650
+IGRlcG9zaXRz 33651
+CWRyaXZlcg== 33652
+I2dh 33653
+cHJpc2luZw== 33654
+cHJpbnRsbg== 33655
+IHByZXNlbnRlcg== 33656
+IG1pbmVz 33657
+Q1NT 33658
+IER1YWw= 33659
+KCEo 33660
+IGthbQ== 33661
+IGlzTG9hZGluZw== 33662
+IFByb3RlY3Q= 33663
+LnVwcGVy 33664
+YXJpdW0= 33665
+XToKCgo= 33666
+WWlp 33667
+LXNoaXJ0 33668
+IElNQUdF 33669
+X2NvbG9ycw== 33670
+IHVyZ2VudA== 33671
+LkNvbnRhaW5lcg== 33672
+ISgK 33673
+U2F0dXJkYXk= 33674
+IHNvY2lldGllcw== 33675
+IFRoYW4= 33676
+IENvZA== 33677
+PUA= 33678
+IGF0dGFjaG1lbnRz 33679
+Lm1vYmlsZQ== 33680
+IHNwaXRl 33681
+IGJvdW5jZQ== 33682
+cmF3bA== 33683
+aW5zdGFuY2V0eXBl 33684
+IFRydWNr 33685
+IG1hbmlwdWxhdGlvbg== 33686
+KENvbmZpZw== 33687
+LWluc3Q= 33688
+IHN0b3I= 33689
+aXR1dGlvbg== 33690
+UHJlZmVycmVkR2Fw 33691
+IG1haW5BeGlzQWxpZ25tZW50 33692
+IGxpc3RlbmVk 33693
+JycnCgo= 33694
+b3R0YWdl 33695
+LXByb2plY3Q= 33696
+LkFQUExJQ0FUSU9O 33697
+CXJvb3Q= 33698
+IHdoaXQ= 33699
+IGJpbGRlcg== 33700
+IGtlcg== 33701
+IGFwcGxpYW5jZXM= 33702
+cm93YXZl 33703
+7J2A 33704
+ZW1hdGljcw== 33705
+IE9yZw== 33706
+b3Bpbmc= 33707
+X1NFQVJDSA== 33708
+IGNoYW0= 33709
+YWRkQ29udGFpbmVyR2Fw 33710
+ICgpLg== 33711
+IEFycm93 33712
+SWxsZWdhbA== 33713
+Q3VycmVudGx5 33714
+IHVzYQ== 33715
+IHBhc3N3b3Jkcw== 33716
+IHJlbm93bg== 33717
+YXZlcm4= 33718
+IEV2aWw= 33719
+IGNvbmNhdA== 33720
+IGR1bw== 33721
+IHZhbGU= 33722
+IEJlYW4= 33723
+IGluZGljYXRvcnM= 33724
+Y21hdGg= 33725
+IFB1bXA= 33726
+Tm92ZW1iZXI= 33727
+aWZpY2FudA== 33728
+X0RPTUFJTg== 33729
+cmVnYXI= 33730
+IFBvcnRhbA== 33731
+IiQ= 33732
+IGZvcm1lcmx5 33733
+Il06Cg== 33734
+IFZpc2liaWxpdHk= 33735
+LmdldEVsZW1lbnRzQnlDbGFzc05hbWU= 33736
+X1JFRA== 33737
+IGNoYW1waW9ucw== 33738
+4LQ= 33739
+VmFsb3I= 33740
+X2Vz 33741
+KmE= 33742
+LXJlcGVhdA== 33743
+QmFuZA== 33744
+LnN0YWdl 33745
+IGJ1cmVhdWM= 33746
+Q250 33747
+ZXRlbg== 33748
+LWZ1bmN0aW9u 33749
+IG11aXRv 33750
+UElE 33751
+X2VkaXRvcg== 33752
+IGNyYXNoZWQ= 33753
+ZGVhZA== 33754
+a2F0 33755
+YWdo 33756
+IEVYVA== 33757
+YXNzZXI= 33758
+LXNtYWxs 33759
+IHJlYWxpeg== 33760
+KEVudGl0eQ== 33761
+w7pz 33762
+IEFjdHVhbGx5 33763
+IEVsaXRl 33764
+IGhlbG0= 33765
+KG5vbmF0b21pYw== 33766
+YXNoZXI= 33767
+Q29tbXVuaXR5 33768
+YWxsZW5n 33769
+aXJ5 33770
+IEdyb3d0aA== 33771
+IHN1ZQ== 33772
+IGZyZXF1ZW5jaWVz 33773
+X2Rlc2NyaXB0b3I= 33774
+LkF0dHJpYnV0ZQ== 33775
+IHJlY2lwaWVudHM= 33776
+X05T 33777
+LyIr 33778
+aWJhbg== 33779
+IGF0aGxldGU= 33780
+IElnbg== 33781
+X0RNQQ== 33782
+KGRz 33783
+IFJlcXVpcmVtZW50cw== 33784
+QURJ 33785
+ZXJleg== 33786
+XEFkbWlu 33787
+YnJhc2th 33788
+IFJ1c3Q= 33789
+UmVsYXRpb24= 33790
+Q09E 33791
+IFZFUlNJT04= 33792
+ZW1tYQ== 33793
+KSl7 33794
+LkR1cmF0aW9u 33795
+IENhbWI= 33796
+LWxvZ28= 33797
+IHJlYWRhYmxl 33798
+IGNyZWF0b3Jz 33799
+KCldOwo= 33800
+VXBEb3du 33801
+LWhhbGY= 33802
+LmdldE1vbnRo 33803
+KHNm 33804
+UGlj 33805
+IGh1bmdlcg== 33806
+LnR4 33807
+IGV4Y2VlZGVk 33808
+X3NlZWQ= 33809
+KF4= 33810
+X3Nr 33811
+LnBlcmZvcm0= 33812
+ID46Og== 33813
+IG1vbmdv 33814
+PWZsb2F0 33815
+YmluZFBhcmFt 33816
+U21hcnQ= 33817
+aWZh 33818
+IHNlY3VyaXRpZXM= 33819
+IHByZWp1ZA== 33820
+ICwi 33821
+IGNvcnBz 33822
+IHZyYQ== 33823
+YW1hY2FyZQ== 33824
+aXRlcnI= 33825
+KE1lZGlh 33826
+dWNoZQ== 33827
+IGNvYg== 33828
+IGxpYmVy 33829
+Lmdlb21ldHJ5 33830
+TG9jYXRvcg== 33831
+IHNsaWRpbmc= 33832
+IHN1cmdpY2Fs 33833
+X0NVUg== 33834
+IGNvbnNlY3Q= 33835
+Wyo= 33836
+IFJlc29ydA== 33837
+U3R1Yg== 33838
+X0RPVUJMRQ== 33839
+IFNvcGg= 33840
+IGVsZWN0b3JhbA== 33841
+X2Rpc2FibGU= 33842
+INGB0L4= 33843
+IExpZ2h0bmluZw== 33844
+IG1lbnRpb25z 33845
+b2N5 33846
+IGxlYWtlZA== 33847
+IHJlbGF4aW5n 33848
+UHJlc2VudGVy 33849
+dnNw 33850
+IGd1aWx0 33851
+PS09LQ== 33852
+LnJlcGx5 33853
+IE1pcnJvcg== 33854
+Q2FtcA== 33855
+ICsjKyMrIys= 33856
+ICsjKyMrIysjKyMr 33857
+LkF1dGhvcg== 33858
+IGRpcmVjdGl2ZQ== 33859
+LWhvb2s= 33860
+7YSw 33861
+fQoKCgoK 33862
+QHB5dGVzdA== 33863
+X3JhbmQ= 33864
+bWlz 33865
+IGNvbG9yZnVs 33866
+dWpl 33867
+bGFzc2Vz 33868
+IENsYXNzZXM= 33869
+LmhhdmU= 33870
+JSks 33871
+6aKY 33872
+IGRpc3R1cmJpbmc= 33873
+c3Vic3RyaW5n 33874
+IEtvaA== 33875
+SW52ZXN0 33876
+cHVyY2hhc2U= 33877
+IHJlY3ljbGluZw== 33878
+IEFSVA== 33879
+aWVyYXJjaHk= 33880
+IGZwcw== 33881
+LmNoZWNrQm94 33882
+7ZW0 33883
+X21hdGVyaWFs 33884
+ZHVjYXRpb24= 33885
+IGZ3 33886
+dWRpdA== 33887
+IHJldmlld2luZw== 33888
+IFNpZA== 33889
+U3ludGF4 33890
+IFdyaXR0ZW4= 33891
+YXJnYXI= 33892
+VU1F 33893
+L3E= 33894
+Q2xhc3NpZmllcg== 33895
+T2ZmaWNpYWw= 33896
+IGpheno= 33897
+IG9tZWdh 33898
+UGh5c2ljcw== 33899
+IGx1Z2Fy 33900
+X2FjY2Vzc29y 33901
+LmNvbW1hbmRz 33902
+QWJpbGl0eQ== 33903
+IEJhdGNo 33904
+UkFN 33905
+IGVuY291bnRlcnM= 33906
+LlF1 33907
+QllURQ== 33908
+IERpc3RyaWJ1dGlvbg== 33909
+IHVzbw== 33910
+IFJlY292ZXJ5 33911
+YXBwcm92ZWQ= 33912
+IGRlbmlhbA== 33913
+L3NoYXJl 33914
+TGlua2VkTGlzdA== 33915
+KQ0KDQoNCg== 33916
+dWRkeQ== 33917
+IGZpbmVz 33918
+IHJ5 33919
+VW5pY29kZQ== 33920
+CXJlbmRlcg== 33921
+IHByZW1pc2Vz 33922
+IHBvbg== 33923
+YWxpYXNlcw== 33924
+L0ZvdW5kYXRpb24= 33925
+Y3VkYQ== 33926
+IENvY2s= 33927
+LDop 33928
+KGZvbGRlcg== 33929
+IG3DqWQ= 33930
+ZHJhZw== 33931
+IHRhbGVudHM= 33932
+ICAgCgo= 33933
+0LXRgdGC0LI= 33934
+bW9i 33935
+LnltbA== 33936
+IGFzdGVy 33937
+IGRpc2NyZQ== 33938
+Z29hbA== 33939
+IEdUWA== 33940
+IFNVQ0NFU1M= 33941
+IExPTkc= 33942
+KGZpbmQ= 33943
+IHNpbmd1bGFy 33944
+X3N6 33945
+IEV0aGVyZXVt 33946
+Li4K 33947
+IGlycmVz 33948
+Jykpewo= 33949
+IG1pbmlzdGVycw== 33950
+U3RlcHM= 33951
+aXZlcnNhbA== 33952
+IE5ldmVydGhlbGVzcw== 33953
+LWxlZA== 33954
+ICglKQ== 33955
+56Gu 33956
+IHRpbWV6b25l 33957
+IHN0cmFuZ2Vy 33958
+KHJlbmRlcg== 33959
+IHNodXRpbA== 33960
+IG1waA== 33961
+IHRyaW8= 33962
+cHB5 33963
+IHByZWRvbWlu 33964
+IGVuZG9ycw== 33965
+IFJ1c3NpYW5z 33966
+CXJvdw== 33967
+IHdpemFyZA== 33968
+LnNlcmlhbGl6ZQ== 33969
+IGNvbXBsYWluZWQ= 33970
+IHNpZG8= 33971
+IGRlbGlnaHRlZA== 33972
+LW1l 33973
+IFJhdg== 33974
+SHVtYW4= 33975
+YWRheXM= 33976
+cmVjdg== 33977
+V29ya2luZw== 33978
+SnVtcA== 33979
+IMOlcg== 33980
+IEF1dG9tYXRpYw== 33981
+X0Jhc2U= 33982
+5qC8 33983
+YXVyYW50cw== 33984
+wq8= 33985
+5rg= 33986
+KENUeXBl 33987
+SUZJ 33988
+KGFtb3VudA== 33989
+IGJlbGlldmluZw== 33990
+PW15c3Fs 33991
+IGZpcg== 33992
+IHJlc3RvcmF0aW9u 33993
+ZXJlY28= 33994
+0KI= 33995
+Xycr 33996
+IGVib29r 33997
+IGRlYnJpcw== 33998
+KGlucHV0cw== 33999
+QVlPVVQ= 34000
+IHNjcmVhbWluZw== 34001
+YXZpYQ== 34002
+bGFuZGVy 34003
+IGRpc3RyZXNz 34004
+IGFzc2VtYmxlZA== 34005
+IEF2b2lk 34006
+KHRocmVhZA== 34007
+IFJQQw== 34008
+X0VYSVQ= 34009
+KHF1ZXVl 34010
+0LjRgdGC 34011
+RGxs 34012
+IHNrdWxs 34013
+X3B1Yg== 34014
+Y2hleg== 34015
+bWluYXRl 34016
+ZW5zZW4= 34017
+IGluc2FuZQ== 34018
+Ym91bmRz 34019
+IFJvc2Vu 34020
+IGNvbmRpdGlvbmluZw== 34021
+cHJvY2Vzc2Vk 34022
+dmlkZW9z 34023
+Zm91cg== 34024
+LkNvbnY= 34025
+fDsK 34026
+UGVyc29uYWw= 34027
+Y2VycHQ= 34028
+OlVJQ29udHJvbFN0YXRlTm9ybWFs 34029
+IGRvc2Vz 34030
+IEthcmw= 34031
+IEZyZXF1 34032
+LkJBU0U= 34033
+IFZvdGU= 34034
+IGNvbmN1cnJlbnQ= 34035
+IE1lc3NhZ2VCb3hJY29u 34036
+IMOW 34037
+IER1YmFp 34038
+IFJldGFpbA== 34039
+Om51bWJlcg== 34040
+IE9ic2VydmVy 34041
+IEJpZ0ludGVnZXI= 34042
+X29yaWdpbg== 34043
+X1dPUks= 34044
+RnJhbWVz 34045
+IG5vdGFibHk= 34046
+LuKAnA== 34047
+IHRyb3BpY2Fs 34048
+IG5pY2hl 34049
+YW1pbmE= 34050
+LnN5cw== 34051
+KHRva2Vucw== 34052
+bW9kaWZ5 34053
+b3NpdA== 34054
+c3Ryb20= 34055
+IENvbWljcw== 34056
+T1BUSU9O 34057
+VGlja2V0 34058
+IGZhY3Rvcmllcw== 34059
+IGRpc3B1dA== 34060
+X0ZpbGU= 34061
+IEZpbm4= 34062
+ZWVl 34063
+IERpc2NvcmQ= 34064
+X21vbmV5 34065
+LnRwbA== 34066
+X3NhZmU= 34067
+TEI= 34068
+IGdsdXQ= 34069
+Sks= 34070
+LmZsb3c= 34071
+LWNvbnQ= 34072
+Z29z 34073
+IGhvcml6b24= 34074
+IFJ1c2g= 34075
+Ojoq 34076
+UGlwZQ== 34077
+dWxsYQ== 34078
+Ym9yb3VnaA== 34079
+aGVpbWVy 34080
+KG1vdmU= 34081
+KFRleHQ= 34082
+fSk7DQoNCg== 34083
+d2VsY29tZQ== 34084
+IENvbXBvbmVudHM= 34085
+IGdvdmVybmFuY2U= 34086
+Y2xvc2Vk 34087
+CW1hcmdpbg== 34088
+IGxhdW5kcnk= 34089
+IFRlcm1pbmFs 34090
+aXphcmRz 34091
+LuKAlA== 34092
+LnJlbW90ZQ== 34093
+LnJhZGl1cw== 34094
+IFF1ZWJlYw== 34095
+IGRo 34096
+VGVjaA== 34097
+IE1pc3Q= 34098
+c2VsbGVy 34099
+X2xpdGVyYWw= 34100
+IGdlbml1cw== 34101
+IGJyYWlucw== 34102
+Z2Vt 34103
+IE1lYXN1cmU= 34104
+IGNhdGFzdA== 34105
+cmFuY2U= 34106
+LlRleHRGaWVsZA== 34107
+IGNvbnN1bWluZw== 34108
+ICdcJyc= 34109
+b3VidGVkbHk= 34110
+IENlcnRhaW4= 34111
+RXY= 34112
+ZXJ0aQ== 34113
+YmVpbmc= 34114
+RXhwZXJpZW5jZQ== 34115
+IC8vWw== 34116
+IEFyYWJpYw== 34117
+IENyaXN0 34118
+IEF6dXJl 34119
+IGhvcmE= 34120
+bGFkZXNo 34121
+XEJsdWVwcmludA== 34122
+ZGFy 34123
+LnJlbA== 34124
+IHN1cHJlbQ== 34125
+IFJlYWdhbg== 34126
+IEF0dHJpYnV0ZXM= 34127
+LXNpZGViYXI= 34128
+IHVzZVN0eWxlcw== 34129
+IEFpcmxpbmVz 34130
+IGhpbGxz 34131
+L3hodG1s 34132
+dmluYw== 34133
+X21vY2s= 34134
+CiAgICAgICAgICAgICAgICAK 34135
+IFBpbGw= 34136
+LkxheW91dFN0eWxl 34137
+IENvbW1hbmRlcg== 34138
+XTw= 34139
+c2lnbmF0dXJl 34140
+IHt9DQo= 34141
+IGhhdHJlZA== 34142
+IOuL 34143
+b2xlc3Rlcm9s 34144
+ICoqKioqKioq 34145
+YW5jZWxsb3I= 34146
+Y3JvcA== 34147
+VElN 34148
+CQkKCg== 34149
+eXNxbGk= 34150
+dWl0aXZl 34151
+CXVuc2V0 34152
+X3NlbA== 34153
+IG1lbnVz 34154
+dGljaw== 34155
+IGNvbnN0aXR1dGU= 34156
+IEVsZW1lbnRz 34157
+IFJlZGlz 34158
+YWdnaW8= 34159
+X2Zw 34160
+X2RlcGVuZA== 34161
+ZW1hcw== 34162
+Q0FTVA== 34163
+b3Jhbmdl 34164
+am9u 34165
+IEVtaWx5 34166
+IHBvdGF0b2Vz 34167
+IHJlY2VwdG9y 34168
+IEVsZWN0cm9uaWM= 34169
+IExpZ2h0cw== 34170
+IGNvbWJpbmluZw== 34171
+IFNvbWVvbmU= 34172
+ICMjIyMjIyMjLg== 34173
+IFRPRA== 34174
+L3Nob3c= 34175
+WGQ= 34176
+LiIn 34177
+YWZ4 34178
+IHRyYWdpYw== 34179
+U3R5bGVk 34180
+IE1hcmNv 34181
+R2FsbGVyeQ== 34182
+ZGFsZQ== 34183
+LuKAnQoKCgo= 34184
+w6lyaWU= 34185
+L3NlcnZpY2U= 34186
+5LqG 34187
+IGFtYmllbnQ= 34188
+X1NFVFRJTkdT 34189
+LkFkYXB0ZXI= 34190
+bGVuZQ== 34191
+IHRyYXZlbHM= 34192
+Tm90aWNl 34193
+IGNsZWFucw== 34194
+IEZlbQ== 34195
+Y2hhaXI= 34196
+0YPQvQ== 34197
+L215 34198
+X2JhZA== 34199
+IEVjb25vbWljcw== 34200
+SVNB 34201
+X0NOVA== 34202
+KE1lbnU= 34203
+5LqO 34204
+IFJpZGdl 34205
+IGxlbmd0aHk= 34206
+RG90 34207
+IGp1bXBz 34208
+IGhleQ== 34209
+JHBkZg== 34210
+IHdvcm0= 34211
+IHN1dA== 34212
+IHNoZXI= 34213
+aWFtbw== 34214
+IENhbGM= 34215
+dHJpZXZl 34216
+IGNvcHM= 34217
+IENocm9t 34218
+IHJlZ3VsYXRlZA== 34219
+cmVhdG1lbnQ= 34220
+IEhpZ2hlcg== 34221
+b2tz 34222
+IGRlemU= 34223
+TE9DQVRJT04= 34224
+b25nc1Rv 34225
+IGZpbml0ZQ== 34226
+IHZhcmllcw== 34227
+IHBvc2l0aW9uZWQ= 34228
+J2ls 34229
+6YeR 34230
+IGhpa2U= 34231
+KGRvbmU= 34232
+cGxheWxpc3Q= 34233
+IGFkYQ== 34234
+IGNvYXN0YWw= 34235
+IE5hbmN5 34236
+LkRhdGVUaW1lRmllbGQ= 34237
+Q3BwQ29kZUdlbg== 34238
+IFNpbWlsYXJseQ== 34239
+cmV1cg== 34240
+IENvbnRy 34241
+IEhpZGRlbg== 34242
+IEJldGE= 34243
+YXRjaGVk 34244
+X2luc3RhbGw= 34245
+Lk91dHB1dA== 34246
+TG9va3Vw 34247
+IFJpY2htb25k 34248
+cXVhcmVk 34249
+IG1hbmdh 34250
+LWNvbnRyb2xz 34251
+IEJlcm5hcmQ= 34252
+TGFyZ2U= 34253
+IHNsaWNlcw== 34254
+IG9mZmVuY2U= 34255
+IE1lZ2E= 34256
+IGVzdGFy 34257
+IGpvaW50cw== 34258
+IHN1bW0= 34259
+X3BsYXRmb3Jt 34260
+QnVmZg== 34261
+LmFkZFN1YnZpZXc= 34262
+IHJldGFpbmVk 34263
+TGV0dGVy 34264
+LmRpbQ== 34265
+IGVzc2VyZQ== 34266
+IFNjYWZmb2xk 34267
+RVhQRUNU 34268
+CVJF 34269
+LmxvbmdpdHVkZQ== 34270
+w7xuZA== 34271
+IHN0YXR1ZQ== 34272
+LmFkZFdpZGdldA== 34273
+IENhcmliYmVhbg== 34274
+YWRkUHJlZmVycmVkR2Fw 34275
+aWxkZQ== 34276
+VUlMYWJlbA== 34277
+IE9wcG9ydA== 34278
+IGltcGVyaWFs 34279
+dXJzaW9u 34280
+IG1hbmRhdGU= 34281
+IHByb21vdGlvbmFs 34282
+IHZr 34283
+aWHFgg== 34284
+IHB5bA== 34285
+IENyZWF0aW9u 34286
+0L7Qt9C0 34287
+IHNpbXBsZXI= 34288
+LndoYXQ= 34289
+IFJlY2VudA== 34290
+U3Rvcm0= 34291
+LnF1YW50aXR5 34292
+IExvdg== 34293
+Ii0= 34294
+dWJibGVz 34295
+X25vdGlmaWNhdGlvbg== 34296
+KHdvcmxk 34297
+dXJnZXI= 34298
+Kigt 34299
+OiIK 34300
+aG0= 34301
+YW5zaGlw 34302
+IEFsbW9zdA== 34303
+IG1vdG9yY3ljbGU= 34304
+X2ZlZQ== 34305
+IGFic29yYg== 34306
+IFZpbmNlbnQ= 34307
+IHNvdW5kZWQ= 34308
+w61zdA== 34309
+IHBoYXJtYWNldXRpY2Fs 34310
+aHRhZw== 34311
+IEtpbmRsZQ== 34312
+aXRhbGl6ZQ== 34313
+IEVtcGVyb3I= 34314
+b3VzdGlj 34315
+IHNwZWNpYWxpc3Rz 34316
+5YWs 34317
+Qm9yZGVyU3R5bGU= 34318
+L1w= 34319
+UkVMQVRFRA== 34320
+KCcsJyw= 34321
+KGV4cHI= 34322
+IGh0 34323
+5Y2I 34324
+X0NyZWF0ZQ== 34325
+IHNwZWNpYWxseQ== 34326
+IFtdOw0K 34327
+IGhlZWw= 34328
+IHNlcHQ= 34329
+X2FyY2g= 34330
+KGluaXRpYWw= 34331
+JS4KCg== 34332
+XCIsXCI= 34333
+IGRpc2N1c3Nlcw== 34334
+IHVwdA== 34335
+IFsm 34336
+IG1hbnVz 34337
+LmhhbmQ= 34338
+IE1BSU4= 34339
+IERlbm1hcms= 34340
+IF0sDQo= 34341
+IGNyeXN0 34342
+IG5hY2s= 34343
+Q29vcmRz 34344
+X2lubmVy 34345
+IG1pZHN0 34346
+IGF3YWtl 34347
+INCe 34348
+LWJyZWFr 34349
+w612ZWw= 34350
+X1BBU1M= 34351
+IFBhcmFtcw== 34352
+IGRldHI= 34353
+IHNwaWRlcg== 34354
+IENvbmNlcHQ= 34355
+IHByZW5k 34356
+Q0hFRA== 34357
+LkV4aXQ= 34358
+IHBvcHVsYXRlZA== 34359
+IHZpcnR1ZQ== 34360
+X1NFU1NJT04= 34361
+IG5vdXZlbA== 34362
+b2F1dGg= 34363
+INC00LDQvdC90Ys= 34364
+cmluaw== 34365
+LkhlYWRlclRleHQ= 34366
+YXR1cmF0ZWQ= 34367
+IGVyc3Q= 34368
+IOWF 34369
+4KWH 34370
+X3Zpc2libGU= 34371
+ZXllcg== 34372
+IGxpYWJsZQ== 34373
+IGRlYmU= 34374
+IGJ3 34375
+ey0j 34376
+X1dJTg== 34377
+ZGZz 34378
+SG92ZXI= 34379
+IFBVVA== 34380
+LWFuZ2xl 34381
+IG5vYmxl 34382
+IHRyYWNlcw== 34383
+ZW5jdg== 34384
+IHVzZXJEYXRh 34385
+X2lucw== 34386
+IFN1eg== 34387
+IG5ld3NsZXR0ZXJz 34388
+IE1vZGk= 34389
+IGVudHJlcHJlbmV1cnM= 34390
+IHRyaWJ1dGU= 34391
+IHJ1bW9ycw== 34392
+IHJy 34393
+IFF1YXJ0ZXI= 34394
+6rOg 34395
+IGZlZWRz 34396
+w7Nn 34397
+IGVudmVsb3Bl 34398
+IGxlYXI= 34399
+IGvDuA== 34400
+ZGV2ZWxvcGVy 34401
+U2ltaWxhcg== 34402
+OiIpCg== 34403
+c3Vic2NyaXB0aW9u 34404
+TW9kaWZpZXI= 34405
+aXRhbGlj 34406
+IG5hc3R5 34407
+IHRlcm1pbmF0aW9u 34408
+IGNoYXJtaW5n 34409
+IOKf 34410
+dG9ucw== 34411
+LnRyYWNl 34412
+aG90cw== 34413
+IFVS 34414
+TW9udA== 34415
+IGp1c3RpZmllZA== 34416
+IEdhbmc= 34417
+aW5lYQ== 34418
+IGJvZw== 34419
+KGFw 34420
+XyQ= 34421
+IGNvbnRhbWlu 34422
+LkRvdA== 34423
+CURlYnVn 34424
+KGV4cG9ydHM= 34425
+IHBhaXJlZA== 34426
+IEFzc2lnbm1lbnQ= 34427
+IGF1dG9tb2JpbGU= 34428
+k40= 34429
+IHBoYXNlcw== 34430
+dnc= 34431
+QFN1cHByZXNzV2FybmluZ3M= 34432
+PVw= 34433
+cmFudA== 34434
+LWVk 34435
+CWF3YWl0 34436
+IGNlcnRpZmljYXRlcw== 34437
+Jz4i 34438
+IGludGFjdA== 34439
+Q1RSTA== 34440
+TWlrZQ== 34441
+Z3JlZ2F0aW9u 34442
+QVRURVJO 34443
+IHJlcHVibGlj 34444
+X3VwcGVy 34445
+aWxpYXJ5 34446
+IGNvbXB1dGF0aW9u 34447
+aGlyZQ== 34448
+IFNoaW4= 34449
+X0FOWQ== 34450
+IE1hbnVmYWN0dXJlcg== 34451
+IENhcm0= 34452
+IGJlYXJpbmdz 34453
+X2NvbWI= 34454
+Y2Fk 34455
+dXJpc3RpYw== 34456
+IHdob2xlc2FsZQ== 34457
+IGRvbm9y 34458
+LmludGVyZmFjZXM= 34459
+cHJlc3Nv 34460
+IEJydW4= 34461
+LWNsb3Nl 34462
+cHJvdmU= 34463
+X1NL 34464
+CWZyYW1l 34465
+ZXRyb3M= 34466
+IFBhaW4= 34467
+X0VYUA== 34468
+IExU 34469
+X2Zz 34470
+LmRhdGFz 34471
+CXNz 34472
+dm9pcg== 34473
+IEF4aXM= 34474
+TWFqb3I= 34475
+PSI8 34476
+W2g= 34477
+IHByb2Zlc3M= 34478
+aWdyYXRl 34479
+KHNjb3Jl 34480
+S2V5d29yZA== 34481
+Im9z 34482
+ICAgIAkK 34483
+YW5hbHlzaXM= 34484
+IHJlcGxheQ== 34485
+LnBhc3M= 34486
+XGQ= 34487
+dGxz 34488
+IHNhbmN0 34489
+LmxpZ2h0 34490
+X21vYmlsZQ== 34491
+0YHRgtGM 34492
+CXRvdGFs 34493
+dWl0eQ== 34494
+IHBhdXNlZA== 34495
+TkFT 34496
+IGVuY29yZQ== 34497
+bG9l 34498
+IC0qLQoK 34499
+LmhpZ2g= 34500
+YW1wbGVy 34501
+IFNlY3VyZQ== 34502
+IGZyYWdtZW50cw== 34503
+X3ZlbA== 34504
+aWxsYXJ5 34505
+IFN0ZWlu 34506
+IERhd24= 34507
+IG1heGltaXpl 34508
+4Lii 34509
+IC9e 34510
+IGNvbnRpbnVhbGx5 34511
+IHNoYWRvd3M= 34512
+CSAgICAgICAgICAgICAgICAgICA= 34513
+IElBY3Rpb25SZXN1bHQ= 34514
+IGluZm9ybWFjacOzbg== 34515
+Q0hFQ0s= 34516
+LlNlbGVjdGVkSXRlbQ== 34517
+YnVuZGxl 34518
+b2xsZXk= 34519
+PEludA== 34520
+QUlORVI= 34521
+IFdpbmc= 34522
+dGl0bGVz 34523
+b3VudGFpbg== 34524
+Q1k= 34525
+IExvY2FsZQ== 34526
+Zm9ybWVy 34527
+PGNvbnRleHQ= 34528
+UmFkaW9CdXR0b24= 34529
+X3NjaGVkdWxl 34530
+IGZhYnVsb3Vz 34531
+Um9iZXJ0 34532
+X1BST0ZJTEU= 34533
+IGdhdGVz 34534
+SU1Q 34535
+IFBlbnRhZ29u 34536
+Z29sZA== 34537
+YmFjaA== 34538
+ZW1wbG95ZWVz 34539
+Um90YXRl 34540
+IGNoYW1w 34541
+IHNlbGJzdA== 34542
+QWx0ZXJu 34543
+IGNvbnZlcnRWaWV3 34544
+Lyw= 34545
+IH4o 34546
+U3RyZWV0 34547
+X3BsYWNl 34548
+IHBlcnNvbmFsaXplZA== 34549
+UHVibGlzaGVy 34550
+IFNPQ0s= 34551
+X05BTUVTUEFDRQ== 34552
+IFN0YW5kYXJkcw== 34553
+c29ldmVy 34554
+X0NFTlRFUg== 34555
+SW50ZXJlc3Q= 34556
+w7R0 34557
+dGVtcGVyYXR1cmU= 34558
+Vmlld3BvcnQ= 34559
+Z2V0UmVzb3VyY2U= 34560
+IGVhdGVu 34561
+IHNlbXByZQ== 34562
+IGFibm9ybWFs 34563
+IGN5bGluZGVy 34564
+IHRyb3VibGVz 34565
+bm9k 34566
+0YvQsg== 34567
+Z2FtZXM= 34568
+X2ds 34569
+UGxhbmU= 34570
+Z3JleQ== 34571
+X3RibA== 34572
+LkNvbXBvbmVudFBsYWNlbWVudA== 34573
+IENoYXNl 34574
+TG9nZ2luZw== 34575
+bWFueQ== 34576
+7IY= 34577
+IGZsYW1l 34578
+PSI8Pz0k 34579
+IEdyb3Vwcw== 34580
+LVU= 34581
+0YDQsNC9 34582
+CgoKCgoKCg== 34583
+IHZhdWx0 34584
+b21vbg== 34585
+cHJvYmxlbQ== 34586
+IHRyYWRlcnM= 34587
+IHBlcmlwaGVyYWw= 34588
+IGhvbWVwYWdl 34589
+KGRlcw== 34590
+IFN1Y2Nlc3NmdWxseQ== 34591
+IHJlYm9vdA== 34592
+IGNlbGx1bGFy 34593
+aWlp 34594
+IFBsYW5z 34595
+bGlzdGluZw== 34596
+CWRpcw== 34597
+IFJlZmxlY3Q= 34598
+CWV4Y2VwdA== 34599
+Iiko 34600
+IHRhbWLDqW0= 34601
+VmVoaWNsZQ== 34602
+YWNjaQ== 34603
+bHVzaA== 34604
+T3JkZXJCeQ== 34605
+IGltYWdpbmVk 34606
+Y29kZWM= 34607
+IGRhdGVUaW1l 34608
+TWljcm8= 34609
+IHJlbWluZHM= 34610
+IGZydXN0cmF0aW5n 34611
+IFZpc3Rh 34612
+VHJhaW4= 34613
+INCy0YE= 34614
+IG1vbGVjdWxlcw== 34615
+YXZpbg== 34616
+IGRvdWJsZWQ= 34617
+IGJyYWtl 34618
+IGNhbGNpdW0= 34619
+RnJpZGF5 34620
+IElkZW50aWZpZXI= 34621
+5Z8= 34622
+0YvQuQ== 34623
+IEphaA== 34624
+UmVu 34625
+IHNjYW0= 34626
+IERlbm5pcw== 34627
+LnNldEludA== 34628
+4p8= 34629
+IGFwcGVhbHM= 34630
+IEF1cg== 34631
+IHNwbGFzaA== 34632
+ZXF1YWxzSWdub3JlQ2FzZQ== 34633
+d2h5 34634
+IHNhcA== 34635
+U3VwcG9ydGVk 34636
+IHNlcmE= 34637
+IDoi 34638
+IFZlcm1vbnQ= 34639
+IHJldW4= 34640
+IE5vdmE= 34641
+ICAgICAgICAgICAgCiAgICAgICAgICAgIAo= 34642
+UmF0ZWQ= 34643
+IGxheWluZw== 34644
+IEthcmVu 34645
+LkRlc2VyaWFsaXpl 34646
+IGNvZGVj 34647
+IHRheHBheWVycw== 34648
+OyIpOwo= 34649
+IGNydWRl 34650
+IG1vbGU= 34651
+IHVzZUNvbnRleHQ= 34652
+CXJlc3A= 34653
+IHBrdA== 34654
+IENhbm5vdA== 34655
+UGlwZWxpbmU= 34656
+5YaG 34657
+dGljYWw= 34658
+QWN0aW9uQmFy 34659
+YWVkYQ== 34660
+IENyaXRpY2Fs 34661
+IE5hZA== 34662
+IGJsZWVkaW5n 34663
+IGxsdm0= 34664
+L2N1c3RvbQ== 34665
+IFNpbXBzb24= 34666
+U3k= 34667
+aXRhYmx5 34668
+IFN1bW1pdA== 34669
+KCkpKS4= 34670
+RUxMT1c= 34671
+JCcs 34672
+TWV0 34673
+SW52b2ljZQ== 34674
+b2xpc3Q= 34675
+IHNwaW5l 34676
+YXV0aWZ1bA== 34677
+cGFpZA== 34678
+IGxvY2tlcg== 34679
+X2FybQ== 34680
+XCI+PA== 34681
+IHRyYWplY3Rvcnk= 34682
+X3Jpbmc= 34683
+IGh5ZHJvZ2Vu 34684
+dHJvbg== 34685
+IHN0YXR1dGU= 34686
+IGNvbmRpdGlvbmFs 34687
+IHRyYXk= 34688
+LXNjaG9vbA== 34689
+KHdpZGdldA== 34690
+JGNvbmZpZw== 34691
+IHJlcXVlc3Rpbmc= 34692
+LnVpbnQ= 34693
+ZXRvbg== 34694
+YnJpdGllcw== 34695
+T2ZUeXBl 34696
+QURNSU4= 34697
+cHJlZGljdA== 34698
+IGdlZ2Vu 34699
+IEhhcHA= 34700
+T0NVTUVOVA== 34701
+IEFwYXJ0 34702
+IC0tLS0t 34703
+cm9l 34704
+dWlkZQ== 34705
+anVzdGlmeQ== 34706
+IFNxdWFk 34707
+IHByb2Zlcw== 34708
+LmJvdA== 34709
+X2N1cnJlbmN5 34710
+aW5uZW4= 34711
+IE11bWJhaQ== 34712
+IE51bWJlcnM= 34713
+YXZhbmF1Z2g= 34714
+YWduaXR1ZGU= 34715
+4oCcVGhlcmU= 34716
+PWh0dHA= 34717
+54mH 34718
+IHZi 34719
+Kyc8Lw== 34720
+IG9yZ2FuaXppbmc= 34721
+YW5pdW0= 34722
+SW5TZWN0aW9u 34723
+LmFuZA== 34724
+IGV0ZXJuYWw= 34725
+IHNvdWxz 34726
+X09ORQ== 34727
+X25z 34728
+X2Jhc2lj 34729
+IHJldFZhbA== 34730
+LXNoYXBlZA== 34731
+aWZkZWY= 34732
+IE1vemlsbGE= 34733
+IGVpZw== 34734
+Y29tcGxldGVk 34735
+Tm90aWZpY2F0aW9ucw== 34736
+VEVDVA== 34737
+cmllbg== 34738
+Y29vcmRpbmF0ZXM= 34739
+IHByZXRlbmQ= 34740
+cG9uc29yZWQ= 34741
+LnN0ZGVycg== 34742
+IGdhbWVycw== 34743
+IGRlZmVuZGVk 34744
+VG9vbFRpcA== 34745
+dWl0YXI= 34746
+IGZyYW5jYQ== 34747
+IFdvb2Rz 34748
+IGlocmU= 34749
+IHBzZXVkbw== 34750
+IGNyb3dkcw== 34751
+IFNZU1RFTQ== 34752
+bGVj 34753
+LmtlcmFz 34754
+IGNpcmN1bGF0aW9u 34755
+ZWVy 34756
+LmNi 34757
+dXp6eQ== 34758
+7Zg= 34759
+LnJlYWRlcg== 34760
+IHNlcXVlbA== 34761
+U2V2ZXJhbA== 34762
+LnBvcnRhbA== 34763
+LS0tLS0K 34764
+aXN0cmFy 34765
+77u/Ly8= 34766
+UGk= 34767
+IFwiIg== 34768
+IGN1c3RvbXM= 34769
+IGRpc3BsYXlOYW1l 34770
+IG5vdGljZXM= 34771
+IGNhcmI= 34772
+Ll8KCg== 34773
+IHByb2R1Y3Rv 34774
+INGB0Ls= 34775
+IG51bWVyaWNhbA== 34776
+IHVuaW50 34777
+IGNvZGlnbw== 34778
+T3JkaW5hbA== 34779
+U3RyaW5nVXRpbHM= 34780
+IGTDqWM= 34781
+IExhbg== 34782
+IHNob3djYXNl 34783
+IGFyaXRobWV0aWM= 34784
+LXNjcm9sbA== 34785
+X1RFTVBMQVRF 34786
+IFJvdXRlck1vZHVsZQ== 34787
+IFNoYWRlcg== 34788
+INCd 34789
+cG9saWN5 34790
+UGVyZm9ybWFuY2U= 34791
+CWJvcmRlcg== 34792
+KGZpbGVwYXRo 34793
+56m6 34794
+X2VuZXJneQ== 34795
+X0NT 34796
+VGhlaXI= 34797
+LnNwYWNpbmc= 34798
+KGRw 34799
+IExBTkdVQUdF 34800
+IGhpc3RvcmljYWxseQ== 34801
+Ij57eyQ= 34802
+IGlub2Rl 34803
+c2ls 34804
+IGhhY2U= 34805
+IHNldmVyZWx5 34806
+IE92ZXJ2aWV3 34807
+IHNwcmF3 34808
+IGJlYWNoZXM= 34809
+OmxlZnQ= 34810
+t7s= 34811
+KCR7 34812
+IEZJUlNU 34813
+IFNwYQ== 34814
+LWFzcw== 34815
+IGJhaXNl 34816
+IE5PREU= 34817
+IFBpenph 34818
+UGV0 34819
+KHNlcQ== 34820
+XCI+Cg== 34821
+Q3BwTWV0aG9kUG9pbnRlcg== 34822
+IHZw 34823
+IGlh 34824
+X3NlY29uZHM= 34825
+ZW1ldA== 34826
+L2Jsb2I= 34827
+X1RIUkVTSA== 34828
+Li4uDQo= 34829
+RGVzdA== 34830
+IE5I 34831
+LmRhdGFTb3VyY2U= 34832
+aXTDqXM= 34833
+IEphaw== 34834
+c2VsbA== 34835
+IHdvcmtzaG9wcw== 34836
+PHU= 34837
+IHJpdmFscw== 34838
+IEVYSVNUUw== 34839
+aG9t 34840
+LXRva2Vu 34841
+Y29tcGF0aWJsZQ== 34842
+LkpQYW5lbA== 34843
+IHBoeXNpY2lhbnM= 34844
+YXJ0aW4= 34845
+IGRlc2lyYWJsZQ== 34846
+IGRpc3RpbmN0aXZl 34847
+LkRlcA== 34848
+Z2lk 34849
+aWxpYXRl 34850
+LG1heA== 34851
+IHByZW1pZXJl 34852
+IHFEZWJ1Zw== 34853
+IGFkdm9jYWN5 34854
+IHdoaXNwZXI= 34855
+UHQ= 34856
+IHVuY2hhbmdlZA== 34857
+X3F0eQ== 34858
+6K+35rGC 34859
+U2Vhc29u 34860
+YXZlbGVuZ3Ro 34861
+IFB1bA== 34862
+IGTDrWE= 34863
+J11dXSwK 34864
+YWxpcw== 34865
+KCIm 34866
+Ym9ybw== 34867
+IGJt 34868
+IFJhZGk= 34869
+d3Jvbmc= 34870
+IEdvaW5n 34871
+aW1lVHlwZQ== 34872
+aWpp 34873
+LWZlZWRiYWNr 34874
+IE5hbWVz 34875
+IEJhcHQ= 34876
+IHByb2JhYmxl 34877
+IEV0aGVy 34878
+IFBvbGl0aWNz 34879
+X3Byb3RvY29s 34880
+bGluaW5n 34881
+U2F0 34882
+IGNvcnJlbA== 34883
+LlByaW1hcnk= 34884
+KG51bGxhYmxl 34885
+UklPUklUWQ== 34886
+IGNvbG9yaW5n 34887
+IHV0aWxpemluZw== 34888
+ZGFz 34889
+IGV4cG9ydGVk 34890
+IGNhcnJpZXJz 34891
+Q29udg== 34892
+LmVkaXRvcg== 34893
+acOz 34894
+KGhhbmRsZXM= 34895
+IGFwcHJlY2lhdGlvbg== 34896
+LmltcG9ydA== 34897
+IEF1c3RyaWE= 34898
+IFN0cmlw 34899
+aWxpZ2h0 34900
+IGFwcHJvcHJpYXRlbHk= 34901
+IFByZXN0 34902
+IFdpcg== 34903
+IFVJQXBwbGljYXRpb24= 34904
+YWxjaGVteQ== 34905
+IE1vYg== 34906
+IERldGVybWlu 34907
+ZXJndXNvbg== 34908
+cmVnaXN0ZXJlZA== 34909
+X2NvbnZlcnQ= 34910
+IFZsYWRpbWly 34911
+LlNob3dEaWFsb2c= 34912
+cmVmbGVjdA== 34913
+IHNob29r 34914
+IGFzc3VyZQ== 34915
+IE9mdGVu 34916
+IGNpdmlsaXphdGlvbg== 34917
+IHZvY2FidWxhcnk= 34918
+Zm9yZWdyb3VuZA== 34919
+IFNjb3Bl 34920
+IHVud2FudGVk 34921
+YWN0aW5n 34922
+IChbXQ== 34923
+IG1hcmtpbmc= 34924
+Lm9yaWdpbmFs 34925
+IE1PVkU= 34926
+IHNwb3J0aW5n 34927
+Y2VwdGlvbnM= 34928
+TlNOdW1iZXI= 34929
+U2l6ZXM= 34930
+IHByb3ZpbmNpYWw= 34931
+X1RyYW5z 34932
+IHByb2JsZW1hdGlj 34933
+ZGlnaXQ= 34934
+IEVtbWE= 34935
+bG9ja3M= 34936
+IENyZXc= 34937
+aWJh 34938
+Jyk6 34939
+aXNoYQ== 34940
+IG1hbW0= 34941
+IG9jY3VyZWQ= 34942
+d2Nz 34943
+KHJ1bGU= 34944
+IG1lcmNoYW5kaXNl 34945
+ZXNwZWNpYWxseQ== 34946
+IFR3aW4= 34947
+IG5hbWluZw== 34948
+IHNsb2c= 34949
+IGltcHJvdmVz 34950
+IGFkaGVy 34951
+OnRleHQ= 34952
+LmhhZG9vcA== 34953
+X0hUVFA= 34954
+LnRvTGlzdA== 34955
+LmRpc2FibGVk 34956
+IGxlbnNlcw== 34957
+LmluaQ== 34958
+IFJhcmU= 34959
+IFVidW50dQ== 34960
+IHNjcmFt 34961
+b2xhdGlvbg== 34962
+dGl0dWxv 34963
+RXZlcnl0aGluZw== 34964
+IG5vZGRlZA== 34965
+aWNodGln 34966
+X2NvbnN0YW50 34967
+emM= 34968
+bGlmdA== 34969
+IE5vdGlmeQ== 34970
+b25kbw== 34971
+IElORg== 34972
+KCIr 34973
+IEtheg== 34974
+IGRyZWFk 34975
+Lm1hcHBlcg== 34976
+bGV1cg== 34977
+IENvbWV5 34978
+IE5C 34979
+aWNlcnM= 34980
+LlB1c2g= 34981
+IEhhY2s= 34982
+IEJyYXppbGlhbg== 34983
+X3Byb2Q= 34984
+IC8vCgo= 34985
+IGJpY3ljbGU= 34986
+IHVuYXZhaWxhYmxl 34987
+IGFkb2xlc2NlbnQ= 34988
+Ymxr 34989
+IG1pdGln 34990
+X2JsdWU= 34991
+7Jg= 34992
+ZmFkZUlu 34993
+IFV0aWxpdGllcw== 34994
+IE1O 34995
+O2s= 34996
+PHN0eWxl 34997
+LXN0YXR1cw== 34998
+aW5kbw== 34999
+IGlubmluZ3M= 35000
+IGdq 35001
+IHx8PQ== 35002
+LmV1 35003
+Ok51bWJlcg== 35004
+IGN1aXNpbmU= 35005
+IFVSTHM= 35006
+aWVr 35007
+IHdpcmVz 35008
+CXBz 35009
+aWVn 35010
+Lm1r 35011
+c29hcA== 35012
+IHNvbWV0aW1l 35013
+IHN0YXA= 35014
+X3Nlcmllcw== 35015
+LlRhcmdldA== 35016
+5ro= 35017
+LmRlc3RpbmF0aW9u 35018
+T1VOVEVS 35019
+UmFpc2Vz 35020
+JkE= 35021
+IHNtYXJ0cGhvbmVz 35022
+TklFbnY= 35023
+LnNkaw== 35024
+IGhlbGljb3B0ZXI= 35025
+IGltcGU= 35026
+IEJpcnRo 35027
+QVU= 35028
+YnJlYWRjcnVtYnM= 35029
+Y29vcmRz 35030
+IGV4cGxvcmVk 35031
+IGxvZA== 35032
+IElw 35033
+Z2FibGU= 35034
+aWFuZQ== 35035
+IGFydGlmYWN0cw== 35036
+Qm94TGF5b3V0 35037
+2KfYsQ== 35038
+bGlzdGVuZXI= 35039
+LmNhcnQ= 35040
+IEh1ZmY= 35041
+IEhpbmR1 35042
+IERhdGFUeXBlcw== 35043
+IERydXBhbA== 35044
+SUdOT1JF 35045
+IG9mZnNldHM= 35046
+IFJUQw== 35047
+LWxvZ2lu 35048
+5q4= 35049
+IFFPYmplY3Q= 35050
+IHByb3NlY3V0b3I= 35051
+Um9jaw== 35052
+X2NoYXQ= 35053
+V2F5 35054
+7LI= 35055
+IG5lZ2xpZw== 35056
+IGR1ZGU= 35057
+Ozw= 35058
+IGRlbGVnYXRlcw== 35059
+X2ZhaWxlZA== 35060
+L2Rldg== 35061
+L3dvcms= 35062
+KE5ldw== 35063
+ZXRhYmxl 35064
+KCki 35065
+KEljb25z 35066
+IHBvcms= 35067
+IE1vZGVsQW5kVmlldw== 35068
+IFZJUA== 35069
+IEtvcg== 35070
+bWl4 35071
+IG94aWQ= 35072
+IFNDUkVFTg== 35073
+IEZvdXJ0aA== 35074
+LyIsCg== 35075
+IHRlZQ== 35076
+IFN0ZXZlbnM= 35077
+dGlja3M= 35078
+IHBsZWRnZQ== 35079
+aWJib24= 35080
+IExvYW4= 35081
+IG5lbw== 35082
+bnVtcHk= 35083
+IFNoYXJlZFByZWZlcmVuY2Vz 35084
+LW9yaWVudGVk 35085
+IExvZ2dlckZhY3Rvcnk= 35086
+IEdyYXBoUUw= 35087
+emVuaWE= 35088
+Il8= 35089
+V29tZW4= 35090
+LmNhc3Q= 35091
+IGRlbGliZXJhdGVseQ== 35092
+K2I= 35093
+IEFybg== 35094
+Zm9udFNpemU= 35095
+IG1hemU= 35096
+IGJsYW1lZA== 35097
+Lm1hcw== 35098
+fSkNCg== 35099
+ZWxlcmlr 35100
+IHNjYW5uaW5n 35101
+IFdvcmtzaG9w 35102
+IGZpbmRlbg== 35103
+IGNhdXQ= 35104
+VUlGb250 35105
+KHJldHVybg== 35106
+YWxpbg== 35107
+Y2FzdGxl 35108
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8v 35109
+IGluY2VudGl2ZQ== 35110
+b3BhdGg= 35111
+YmxvYg== 35112
+IGNpZ2FyZXR0ZQ== 35113
+IGZlcnRpbA== 35114
+Ki8KCgo= 35115
+IFNoYXI= 35116
+CiAgICAgIAo= 35117
+IHVuY2VydGFpbg== 35118
+IFN0b24= 35119
+T3BlcmF0aW9ucw== 35120
+IFNwZW5jZXI= 35121
+IGRlZmlu 35122
+IFNvbG8= 35123
+b25lc3Q= 35124
+t7vliqA= 35125
+IHVvbW8= 35126
+R2l2ZQ== 35127
+IGRlbnRybw== 35128
+O3BhZGRpbmc= 35129
+ZW50YWk= 35130
+IENhcnM= 35131
+IGVudGh1c2lhc20= 35132
+IE9wZXJhdGluZw== 35133
+U2tpcA== 35134
+cGFyYXRpb24= 35135
+IHByb3RlY3Rz 35136
+IHJldmVy 35137
+ZGc= 35138
+IENpbmNpbm5hdGk= 35139
+IGNvbnNlY3RldHVy 35140
+IG11c3M= 35141
+ZW1wbG95ZWQ= 35142
+YXVzZXM= 35143
+aW5rbGU= 35144
+LlZhbHVlcw== 35145
+o7w= 35146
+bG92 35147
+X1dBUk4= 35148
+IGJvb2ttYXJr 35149
+IEFwb2xsbw== 35150
+LmF4aXM= 35151
+IG3DqXQ= 35152
+IG9wZW5lcg== 35153
+IHR1bW9y 35154
+ZGFu 35155
+IGVsZW1lbnRhcnk= 35156
+IHNraXBwZWQ= 35157
+IEtlcg== 35158
+YXNpYQ== 35159
+X3Jlc3A= 35160
+IGRlbW9s 35161
+IENhbmFkaWFucw== 35162
+IHRhc3Rlcw== 35163
+VUludGVnZXI= 35164
+ICckew== 35165
+LmF3cw== 35166
+Uk9JRA== 35167
+cmlhbnM= 35168
+TVE= 35169
+b3JkYWJsZQ== 35170
+IGNvdXNpbg== 35171
+UHJvcGFnYXRpb24= 35172
+KFNlc3Npb24= 35173
+cGhhbHQ= 35174
+VUxE 35175
+IFNjYWxhcg== 35176
+IGJsb29keQ== 35177
+IOCm 35178
+Lm1hc2s= 35179
+LHE= 35180
+IFVuaXRz 35181
+IGNlbnRyZXM= 35182
+IFByaW0= 35183
+Ll0KCg== 35184
+IFNoYXc= 35185
+UHJvbQ== 35186
+IFRob3VnaHQ= 35187
+Q2hlY2tlcg== 35188
+X291dHB1dHM= 35189
+KGNoYW4= 35190
+RUlOVkFM 35191
+IGJvYg== 35192
+X2NtcA== 35193
+UGVk 35194
+IG1hdHJpY2Vz 35195
+IHZyb3V3ZW4= 35196
+IGdlbnVpbmVseQ== 35197
+aGlnaGxpZ2h0 35198
+KGRpc3BsYXk= 35199
+KSE9 35200
+IGRlbGljYXRl 35201
+IEx1dGhlcg== 35202
+IE1pbGVz 35203
+IHVzZXJJRA== 35204
+JT0= 35205
+YXRldXJz 35206
+X0JVRg== 35207
+LS0tLS0tLQo= 35208
+aW1pdGl2ZXM= 35209
+IHNoZWx2ZXM= 35210
+c2xvdw== 35211
+X2luZm9ybWF0aW9u 35212
+TEVH 35213
+V3I= 35214
+LmZvcm1z 35215
+Y2VsYW5k 35216
+L3Vu 35217
+OiY= 35218
+LuKAmQoK 35219
+PSIl 35220
+IHByb3N0 35221
+IGZvbnRzaXpl 35222
+dWNpw7Nu 35223
+Z2V0aWM= 35224
+YW10 35225
+PSIu 35226
+RGVjb3I= 35227
+QnJpdA== 35228
+ICIiKS4= 35229
+IGZvdW5kaW5n 35230
+LkZpbGVOYW1l 35231
+IFRpZXI= 35232
+IGRpc2Nsb3Nl 35233
+w6Ft 35234
+LnN5bg== 35235
+LlZpZXdIb2xkZXI= 35236
+bGljYW50 35237
+X3N0YWdl 35238
+TW9uZGF5 35239
+IGRlc2VyaWFsaXpl 35240
+dGFsaw== 35241
+IHRyYWRpdGlvbmFsbHk= 35242
+5oCB 35243
+2K4= 35244
+TEVY 35245
+IGVo 35246
+CVJPTQ== 35247
+IHt9KQo= 35248
+UXVlc3Rpb25z 35249
+bmNweQ== 35250
+IGZpeGluZw== 35251
+0LrRgw== 35252
+X0tleQ== 35253
+Ong= 35254
+IFNUUklORw== 35255
+INGE0LDQuQ== 35256
+CWxlZnQ= 35257
+IEJlbmNo 35258
+ZWxsaWo= 35259
+VVJSRUQ= 35260
+IERpYWdyYW0= 35261
+fWNhdGNo 35262
+L3RpbWU= 35263
+IE1pc3Npbmc= 35264
+ZGJuYW1l 35265
+IHNvcmU= 35266
+IFdhbHQ= 35267
+dWdnaW5n 35268
+cmVwcmVzZW50 35269
+IEdT 35270
+bmV5cw== 35271
+CXBhZ2U= 35272
+IHZvbGNhbg== 35273
+KGJ0bg== 35274
+IGV4Y2VlZHM= 35275
+IGVyZw== 35276
+IHBpbG90cw== 35277
+IFNlZA== 35278
+ZXJzaW9ucw== 35279
+IHBhdHJvbg== 35280
+UlY= 35281
+L3RvcA== 35282
+LmFzc2V0 35283
+X2Nyb3Nz 35284
+LkVkaXRvcg== 35285
+LnRi 35286
+IHdlbGNvbWluZw== 35287
+U0NSRUVO 35288
+KWZpbmRWaWV3QnlJZA== 35289
+Q29kZXI= 35290
+PElBY3Rpb25SZXN1bHQ= 35291
+X1FVRVVF 35292
+4YM= 35293
+IGhlaWdodHM= 35294
+UmVxdWVzdHM= 35295
+IHN5bWJvbGlj 35296
+DQ0KDQ0K 35297
+IGNvdXBvbnM= 35298
+LWZpdmU= 35299
+IERlc2t0b3A= 35300
+IG1pc21hdGNo 35301
+ICdfJw== 35302
+X0RJVg== 35303
+QVNPTg== 35304
+LnRyYW5zcG9zZQ== 35305
+KG1hc2s= 35306
+IENlbHQ= 35307
+LkhhbmQ= 35308
+YXR1 35309
+asSZ 35310
+IHt9KTsK 35311
+TWlzcw== 35312
+IHByaW1h 35313
+bXVuZA== 35314
+b2x2 35315
+IFByZXR0eQ== 35316
+IHJlYmVs 35317
+IEZE 35318
+YXN0aWNhbGx5 35319
+T0xU 35320
+LWF4aXM= 35321
+dXhl 35322
+IGVpbmZhY2g= 35323
+IENoZW1pY2Fs 35324
+X3NlZw== 35325
+bGVldGNvZGU= 35326
+bG9wZQ== 35327
+X29yaWc= 35328
+ICAJCQ== 35329
+KERvdWJsZQ== 35330
+IFBheVBhbA== 35331
+LkJhY2tncm91bmRJbWFnZQ== 35332
+IGhvbWVtYWRl 35333
+Liku 35334
+KHBhcnNlcg== 35335
+YXRybw== 35336
+YWNjb3JkaW9u 35337
+RGVmaW5l 35338
+IOyeiA== 35339
+IEFVVE8= 35340
+LnN1bW1hcnk= 35341
+c2NhbGFy 35342
+IEhvb2Q= 35343
+cXVpbg== 35344
+X2Rlcg== 35345
+IEdlc2No 35346
+LmNvbXB1dGU= 35347
+RmVlZGJhY2s= 35348
+IHBoYXJtYWM= 35349
+IMWfaQ== 35350
+IGdsb3Nz 35351
+IEZJTFRFUg== 35352
+SU5TVEFOQ0U= 35353
+IGthbA== 35354
+LlBM 35355
+X0ZSRUU= 35356
+R3JhZGU= 35357
+IOKZ 35358
+Lm1ldHJpY3M= 35359
+IGNhZ2U= 35360
+Llh0cmFHcmlk 35361
+X2Rz 35362
+emln 35363
+aW50ZXJvcFJlcXVpcmVEZWZhdWx0 35364
+LnJlbW92ZUNsYXNz 35365
+PT09PT09PT09PT09PQ== 35366
+IG1hc3RlcnM= 35367
+U3RhdGVFeGNlcHRpb24= 35368
+aWxsZXJ5 35369
+IEJyYWR5 35370
+IGxpbmluZw== 35371
+X2Nz 35372
+aW5zdWxh 35373
+IH06 35374
+W3Bvc2l0aW9u 35375
+IFJ4 35376
+IEJZVEU= 35377
+IFN0cmlrZQ== 35378
+INCa 35379
+IENsdXN0ZXI= 35380
+LmRvd25sb2Fk 35381
+QWxsb3dlZA== 35382
+IGFtZW5pdGllcw== 35383
+IG9uVGFw 35384
+ZnVsV2lkZ2V0 35385
+IHN0cmVuZ3Rocw== 35386
+dHdlZXQ= 35387
+IGFzY2VuZGluZw== 35388
+IGRpc2Nsb3NlZA== 35389
+Z3Jhdg== 35390
+ZGlzdHJpY3Q= 35391
+KTw8 35392
+KSwi 35393
+KGRlZnVu 35394
+X3w= 35395
+IGdhemU= 35396
+0LDRjw== 35397
+IGZvcnR5 35398
+PT09PT09PT09PT0= 35399
+U2NpZW5jZQ== 35400
+c2VtYmxlcg== 35401
+CWJvZHk= 35402
+X3RyYW5zZmVy 35403
+IGxvbmd0aW1l 35404
+IGNvbXBsaWNhdGlvbnM= 35405
+IGJvb3Ro 35406
+VkVSUg== 35407
+IHlpZWxkcw== 35408
+IG5hdmlnYXRvcg== 35409
+OjpfKCc= 35410
+RUNUT1I= 35411
+X0NvbmZpZw== 35412
+IGxhc3RlZA== 35413
+dXNhbA== 35414
+55m75b2V 35415
+IGdsb3Zlcw== 35416
+IGJlbGx5 35417
+U2FsZXM= 35418
+KE1ldGhvZA== 35419
+KG1lbWJlcg== 35420
+IFJlZWQ= 35421
+cGFzc2Vk 35422
+U2lnbklu 35423
+LG51bQ== 35424
+VUxPTkc= 35425
+IExFRw== 35426
+bmVscw== 35427
+IG1lbnRvcg== 35428
+KHJj 35429
+IE9idmlvdXNseQ== 35430
+Lmlm 35431
+IEZyZWRlcg== 35432
+SEVBRA== 35433
+QGF1dGhvcg== 35434
+Q29uZGl0aW9ucw== 35435
+IGdhcmRlbnM= 35436
+IFJpcA== 35437
+KHVzZXJz 35438
+IE9rYXk= 35439
+IHdyZXN0bGluZw== 35440
+aW1lc3RvbmU= 35441
+IENlcnRpZmllZA== 35442
+IHZlcmRpY3Q= 35443
+YWlkYQ== 35444
+LmlubmVyVGV4dA== 35445
+aWNhc3Q= 35446
+CWF0 35447
+IHByZXN1bWFibHk= 35448
+IEZVTg== 35449
+YWplcw== 35450
+0Jc= 35451
+PiIsCg== 35452
+X1Bpbg== 35453
+dWVzZQ== 35454
+IG92ZXJyaWRlcw== 35455
+X3JlYWR5 35456
+QWR2YW5jZWQ= 35457
+IG9waQ== 35458
+LWNhcnQ= 35459
+KCIvIiw= 35460
+IERlYg== 35461
+Q1JZ 35462
+IFZlcnRpY2Fs 35463
+IE9WRVI= 35464
+IENvcnBvcmF0ZQ== 35465
+ICIiOw== 35466
+IHN0ZXBwaW5n 35467
+ZWo= 35468
+IGFjY3VzYXRpb25z 35469
+IG9yYXo= 35470
+X3RhaWw= 35471
+IGluZHVjZWQ= 35472
+IGVsYXN0aWM= 35473
+IGJsb3du 35474
+LC8v 35475
+IGJhY2tncm91bmRz 35476
+4oCZdW5l 35477
+LXNkaw== 35478
+IHNldEludGVydmFs 35479
+IGluY2VudGl2ZXM= 35480
+IHZlZ2V0YWJsZQ== 35481
+X09u 35482
+ZXhwYW5kZWQ= 35483
+cGl4 35484
+X3NoYWRlcg== 35485
+IFNQRFg= 35486
+QGV4YW1wbGU= 35487
+IFdyYXBwZXI= 35488
+Llplcm8= 35489
+UG9zaXRpdmU= 35490
+IHNwaW5uZXI= 35491
+IGludmVudGVk 35492
+IEdhdGVz 35493
+0L7RgtC+0YA= 35494
+IGNvbXBhcmlzb25z 35495
+6Lc= 35496
+LnByaW1hcnk= 35497
+ZGF0YVByb3ZpZGVy 35498
+YWRkaXRpb25hbA== 35499
+CW9wdGlvbnM= 35500
+c25hcHNob3Q= 35501
+LnNldEhvcml6b250YWw= 35502
+ICJ7fQ== 35503
+IEZpc2hlcg== 35504
+aGFsdGVu 35505
+PFR5cGU= 35506
+IG1heExlbmd0aA== 35507
+IE10 35508
+IOqwgA== 35509
+LmpldGJyYWlucw== 35510
+IGlkZW50aWZpZXM= 35511
+IGZsb3dpbmc= 35512
+IERpc2N1c3Npb24= 35513
+YXRzYnk= 35514
+IHNjaHc= 35515
+dWdodHk= 35516
+IHJpdmVycw== 35517
+LnVuaXF1ZQ== 35518
+X1BIWQ== 35519
+ZWRyYWw= 35520
+KGxs 35521
+IGNzcmY= 35522
+cHBlcnM= 35523
+w7xs 35524
+IEVzcGVjaWFsbHk= 35525
+cG9ydGVk 35526
+IEhhcnJpc29u 35527
+KioqKioqKi8K 35528
+VGV4dENvbG9y 35529
+7Iq1 35530
+d2lyZQ== 35531
+IHN0YXR1c0NvZGU= 35532
+IEZpbmlzaA== 35533
+Y2VuY2U= 35534
+IE1jQ2Fpbg== 35535
+IFdvcg== 35536
+KGF3YWl0 35537
+ICktPg== 35538
+IFJlZ2lzdGVyZWQ= 35539
+SU5FRA== 35540
+a2Fs 35541
+cGFyaXNvbg== 35542
+IG9iamV0bw== 35543
+Vmk= 35544
+bWFuZGE= 35545
+IHJlbmV3ZWQ= 35546
+IFNvZg== 35547
+ZXNzZWw= 35548
+Lm5kYXJyYXk= 35549
+IGNyYXA= 35550
+566h 35551
+LmFic3BhdGg= 35552
+KHVw 35553
+IGNsZWFyYW5jZQ== 35554
+IFRX 35555
+X0NPUFk= 35556
+ICAgICAgICAgICAgCQ== 35557
+IGZvcmVzdHM= 35558
+IGFyZ3VhYmx5 35559
+IEFTUw== 35560
+aGV5 35561
+YW1lbA== 35562
+X2ZvcmU= 35563
+IFNvdXRoZWFzdA== 35564
+IGFidXNlZA== 35565
+IHByYWN0aWNpbmc= 35566
+YWtlZGlycw== 35567
+5Li7 35568
+X3Jlc291cmNlcw== 35569
+IHBvbmQ= 35570
+LkZpeGVk 35571
+TGFzdEVycm9y 35572
+IFBzeWNob2xvZ3k= 35573
+ICIvLw== 35574
+ITo= 35575
+UmV1c2FibGU= 35576
+IG1lbnNhamU= 35577
+IHJvc3B5 35578
+IGJvdXI= 35579
+IHZhcmlldGllcw== 35580
+IGVtcGF0aA== 35581
+KCh7 35582
+X29yZw== 35583
+IE1lcw== 35584
+IE1hZ2VudG8= 35585
+SVNUT1JZ 35586
+VW5sZXNz 35587
+IGhq 35588
+IER1dHk= 35589
+SnVu 35590
+LHNpemU= 35591
+IHBhaW50aW5ncw== 35592
+IGRpc3BlbnM= 35593
+ZGFydA== 35594
+IGJlaGF2aW9yYWw= 35595
+IHJwYw== 35596
+Y2FsY3VsYXRl 35597
+ZnJ1aXQ= 35598
+X21t 35599
+CXB0aHJlYWQ= 35600
+TWF4TGVuZ3Ro 35601
+IGN1cnJlbmNpZXM= 35602
+X2NhcGFjaXR5 35603
+IE96 35604
+IGZpcmVhcm0= 35605
+IGNvZWZmaWNpZW50 35606
+IGJhbmtydXB0Y3k= 35607
+d2FydA== 35608
+IGZhdGlndWU= 35609
+QVZB 35610
+IGVzcGE= 35611
+X3Bj 35612
+IFF1b3Rlcw== 35613
+X0xJR0hU 35614
+IFRpY2tldHM= 35615
+IHJlbGF0ZXM= 35616
+IHB1Ymxpc2hlcnM= 35617
+IHVubG9ja2Vk 35618
+IC8vLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ== 35619
+IEludGVycnVwdGVkRXhjZXB0aW9u 35620
+IG91dGxvb2s= 35621
+cm4= 35622
+IHJlYmVscw== 35623
+V3JpdHRlbg== 35624
+IGFzaWFu 35625
+b3R0bw== 35626
+IAkJCQk= 35627
+X2dwdQ== 35628
+VHh0 35629
+LkltYWdlVmlldw== 35630
+IHN1aXM= 35631
+X3RhYmxlcw== 35632
+LlJlY3ljbGVyVmlldw== 35633
+IHdoYXRzb2V2ZXI= 35634
+6IE= 35635
+XSsrOwo= 35636
+YXNzZXJ0VHJ1ZQ== 35637
+X3ZlcmlmeQ== 35638
+IFJpdmVycw== 35639
+IF1b 35640
+SmV0 35641
+aWRpYW4= 35642
+U2libGluZw== 35643
+IGdlbnJlcw== 35644
+LkFjY2Vzcw== 35645
+T1BT 35646
+IHRyaXZpYWw= 35647
+4Liq 35648
+YWxlbg== 35649
+0LLQtdC0 35650
+IFN3b3Jk 35651
+IHNjcnV0aW55 35652
+KGNi 35653
+IGNvbW1lcmNl 35654
+IGd1YXJhbnRlZXM= 35655
+X2Fkdg== 35656
+IExFVA== 35657
+cmVjaW8= 35658
+IGhpbGFy 35659
+IGJhY2t5YXJk 35660
+44CP 35661
+IGlsbHVzdHJhdGVk 35662
+L3ZlbmRvcg== 35663
+LlV0aWw= 35664
+IHdvdw== 35665
+TE9Z 35666
+IE1hcnNoYWw= 35667
+Ij4nLiQ= 35668
+IEJhaw== 35669
+IG1vZGlmaWVycw== 35670
+ZGljdGlvbmFyeQ== 35671
+IFN0cmU= 35672
+bXVsdGlwbGU= 35673
+IikpLA== 35674
+IENvcnQ= 35675
+J10iKS4= 35676
+KGFkbWlu 35677
+IENyZWF0b3I= 35678
+SW50ZXJuZXQ= 35679
+KG1z 35680
+bG9neQ== 35681
+REVDTEFSRQ== 35682
+IE1hcmN1cw== 35683
+PDw8PA== 35684
+44Gg 35685
+X215 35686
+KGluc3Q= 35687
+IHNjaWVuY2Vz 35688
+TkRFUg== 35689
+LmVudGVy 35690
+IGl0dQ== 35691
+IGJlaGF2ZQ== 35692
+UGFu 35693
+b21iaWVz 35694
+PSc8 35695
+JykpOw0K 35696
+IE1FTlU= 35697
+IFdvcmtlcnM= 35698
+Lk5vRXJyb3I= 35699
+IGJpbmRpbmdz 35700
+IGRpc2FiaWxpdGllcw== 35701
+e1w= 35702
+IE11bmljaXA= 35703
+IGNvcmVz 35704
+dXJwbGU= 35705
+IE5va2lh 35706
+dXNpb25z 35707
+IEZpdG5lc3M= 35708
+LmhhbmRsZUNoYW5nZQ== 35709
+IGphdmFzY3JpcHQ= 35710
+7JqU 35711
+KGRlYw== 35712
+IHBhY2tpbmc= 35713
+LWRlcGVuZA== 35714
+IHRyYW5zY3JpcHQ= 35715
+emVyb3M= 35716
+X2FsZXJ0 35717
+PyIsCg== 35718
+bGlicw== 35719
+sdC+0YI= 35720
+IHwKCg== 35721
+dHJhaW5lZA== 35722
+IEdlbnQ= 35723
+IFJhYg== 35724
+eHA= 35725
+X2NvbmZpZ3VyYXRpb24= 35726
+5aSp 35727
+X2FjY2VwdA== 35728
+LnJlY3ljbGVydmlldw== 35729
+OnVybA== 35730
+IE11aGFtbWFk 35731
+IHByaXZpbGVnZXM= 35732
+X2Jhbms= 35733
+dWt1 35734
+d2FsbGV0 35735
+IFJPT1Q= 35736
+IGVuY3VlbnQ= 35737
+P2ZhbWlseQ== 35738
+CXBvc2l0aW9u 35739
+IGNn 35740
+IHByZWNpcA== 35741
+bWV0aG9kcw== 35742
+X2Zhc3Q= 35743
+aW5jcmVtZW50 35744
+IFRpZ2Vy 35745
+X09DQ1VSUkVE 35746
+cXVpcA== 35747
+IEhBUw== 35748
+X2RvbQ== 35749
+IHdyZWNr 35750
+Ymo= 35751
+IGRlcm4= 35752
+IG9yZ2Fucw== 35753
+LmVudHJpZXM= 35754
+IF8oJw== 35755
+cmFtZW50bw== 35756
+IEphbWll 35757
+IHB1bms= 35758
+SVBQ 35759
+IHByb2dyYW1h 35760
+IGF0dGFpbg== 35761
+IHByb3Zlcw== 35762
+L3NpZ24= 35763
+IGFuc3dlcmluZw== 35764
+IGxhZGRlcg== 35765
+KioqKioqKioqKioqKioqKioqKioqKioqKioqKg== 35766
+IFdhbG1hcnQ= 35767
+IENPTlRFTlQ= 35768
+ZHVjdG9y 35769
+IHZlcmJhbA== 35770
+IFBJRA== 35771
+Y3J5cHRv 35772
+X0NBTExCQUNL 35773
+ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PQ== 35774
+IHBvdGVudA== 35775
+IHNob3J0cw== 35776
+LlVyaQ== 35777
+LnVuaWZvcm0= 35778
+O2JvcmRlcg== 35779
+IFdlcg== 35780
+IGhlcmVpbg== 35781
+bGxh 35782
+IElocg== 35783
+UGl4bWFw 35784
+bGl0ZXJhbA== 35785
+ISkKCg== 35786
+Z2VuZXJpYw== 35787
+cnVzdA== 35788
+X3NjcmlwdHM= 35789
+b3N0bw== 35790
+aXR1cw== 35791
+IENvYWxpdGlvbg== 35792
+IHJlbW90 35793
+ZGVwbG95 35794
+IEVhZ2xl 35795
+44CB44CM 35796
+IGltcG9ydGFudGU= 35797
+CW9iamVjdA== 35798
+IHNlYXNvbmFs 35799
+bmVq 35800
+YWlkdQ== 35801
+QmluZFZpZXc= 35802
+IFNpZXJyYQ== 35803
+LWJn 35804
+IG1ha2VTdHlsZXM= 35805
+W29mZnNldA== 35806
+R2FtZXM= 35807
+IGhvcm1vbmU= 35808
+QVJJTw== 35809
+aGVhZHM= 35810
+KHNlbGVjdA== 35811
+IFN0YXJ0ZWQ= 35812
+QHBhcmFt 35813
+X2RlY2w= 35814
+X2Jsb2c= 35815
+IGHDsW8= 35816
+XEFwaQ== 35817
+IE1pbHdhdWtlZQ== 35818
+UHJvdmlk 35819
+QW5pbWF0ZWQ= 35820
+IGNvb2xlcg== 35821
+IFNlZWQ= 35822
+LkVkaXQ= 35823
+z4Q= 35824
+IFRha2luZw== 35825
+IGJvcmRlckNvbG9y 35826
+LWZvdW5kZXI= 35827
+LkxvZ2dlckZhY3Rvcnk= 35828
+ICIiCgo= 35829
+QUxU 35830
+IExhdGU= 35831
+RURJQVRF 35832
+ICk7CgoK 35833
+YWZh 35834
+IGNhbmNlbGxhdGlvbg== 35835
+QXRvbQ== 35836
+IEJpcm1pbmdoYW0= 35837
+ZW1wcmVzYQ== 35838
+SEVNQQ== 35839
+YXNjYWw= 35840
+IHVwc2lkZQ== 35841
+LlZlcnNpb24= 35842
+IEZvbGRlcg== 35843
+IEVpZ2h0 35844
+IFZpbnRhZ2U= 35845
+IEFwcERlbGVnYXRl 35846
+IFByZXZlbnRpb24= 35847
+LnNlcGFyYXRvcg== 35848
+U1RN 35849
+KHJvb20= 35850
+Z2VuZXJhdG9y 35851
+IGNhdHRsZQ== 35852
+CVo= 35853
+IFBhcnRpY2xl 35854
+J307Cg== 35855
+IG5laWdoYm91cnM= 35856
+IFN0YXRlbGVzcw== 35857
+IGFsdGl0dWRl 35858
+IHNhaW50 35859
+0L7QsdCw0LI= 35860
+IGNvbnZpbmM= 35861
+IENvbnRlbnRz 35862
+IGpldW5l 35863
+KHRz 35864
+U2VyaWFsaXphdGlvbg== 35865
+KGNvbGxlY3Rpb24= 35866
+IEpheno= 35867
+IERvZA== 35868
+IFJvY2g= 35869
+YWNpbw== 35870
+Y29tbWVuZGVk 35871
+REVGSU5F 35872
+Lm9ubG9hZA== 35873
+IHNwZWNpYWx0eQ== 35874
+UExBQ0U= 35875
+X01PVkU= 35876
+IGFjY291bnRhYmxl 35877
+UmV1dGVycw== 35878
+IGZpY2tlbg== 35879
+IGRlcHI= 35880
+V293 35881
+Vm9pZA== 35882
+LnNwYWNl 35883
+4LiX 35884
+IHRx 35885
+IFBldHM= 35886
+PCQ= 35887
+KEN1cnJlbnQ= 35888
+YmVycmllcw== 35889
+cGxhbmF0aW9u 35890
+IGxpc3RPZg== 35891
+IFRodQ== 35892
+IFBSSU5U 35893
+IG1pc21v 35894
+IGRvaQ== 35895
+Y2hr 35896
+IFVuaWNvZGU= 35897
+KHJvbGU= 35898
+IHZpcmdpbg== 35899
+PFBvaW50 35900
+X1JFU1BPTlNF 35901
+LWhvdXNl 35902
+IFZlbmV6dWVsYQ== 35903
+RU1BSUw= 35904
+IHDDumI= 35905
+X2V4aXN0 35906
+QmFsbA== 35907
+LkNM 35908
+cmVmZXJlbmNlcw== 35909
+IEJlYXV0aWZ1bFNvdXA= 35910
+CUV4cGVjdA== 35911
+VEhJUw== 35912
+0YPQtA== 35913
+YmFuZQ== 35914
+IHRlbXBvcmFs 35915
+RVJJQw== 35916
+ZXRhcw== 35917
+IHJlZnJlc2hpbmc= 35918
+IHNlY3VsYXI= 35919
+QHN5bnRoZXNpemU= 35920
+YWNjdXI= 35921
+IG5lbGxh 35922
+IFNPTA== 35923
+LnBpcGU= 35924
+Q2hhbm5lbHM= 35925
+6Ieq 35926
+IGluc2VydGlvbg== 35927
+4buL 35928
+ZWxpYQ== 35929
+IGFkanVzdGFibGU= 35930
+Q2FuYWRh 35931
+IElURU0= 35932
+IGN1cnZlcw== 35933
+IENoZWFw 35934
+bGV0aW5n 35935
+IG9wdGltaXN0aWM= 35936
+YWxsbw== 35937
+IHBvbGl0aWNpYW4= 35938
+X2Rvd25sb2Fk 35939
+PWVkZ2U= 35940
+T1JUSA== 35941
+IG1vZGVsbw== 35942
+YXJ0bw== 35943
+LnJvdGF0ZQ== 35944
+IHNlbGVuaXVt 35945
+5oiR 35946
+X2FsaWFz 35947
+IHJlbm93bmVk 35948
+Licu 35949
+IGN6eQ== 35950
+IGFsbGVz 35951
+LkNvbXBpbGVy 35952
+IEJhc3M= 35953
+Q29ubmVjdG9y 35954
+LlJvbGU= 35955
+TElOSw== 35956
+IGNyaXRlcmlvbg== 35957
+bGVtZXRyeQ== 35958
+U3VjY2Vzc2Z1bGx5 35959
+L3BuZw== 35960
+IGV5ZWI= 35961
+YXNwYmVycnk= 35962
+KGdy 35963
+IGRhbmdlcnM= 35964
+IGNvcnJlY3RlZA== 35965
+IGdsb3c= 35966
+IGVsYWJvcmF0ZQ== 35967
+IEJlYXJz 35968
+YXdhaQ== 35969
+PSInKw== 35970
+IHByb21vdGlvbnM= 35971
+IG1hdGhlbWF0aWNhbA== 35972
+ICJg 35973
+X0dlbmVyaWNDbGFzcw== 35974
+IENoZWY= 35975
+LlNvcnQ= 35976
+dGFibGVOYW1l 35977
+UklD 35978
+IHZvbHVudGFyeQ== 35979
+IEJsYWRl 35980
+LWVsZWN0 35981
+IENvbWJhdA== 35982
+IEFiaWxpdHk= 35983
+IGFiZG9t 35984
+IGR1Y2s= 35985
+VG1w 35986
+5YWo 35987
+IGVyYXNl 35988
+LlBo 35989
+IERlZmF1bHRz 35990
+cGFydG1lbnQ= 35991
+X1VTQg== 35992
+w6p0ZQ== 35993
+Oyc= 35994
+IHBhZHM= 35995
+IE9iYW1hY2FyZQ== 35996
+LlRvdGFs 35997
+IGRpdmVydA== 35998
+IGNyaWNrZXQ= 35999
+IHJlY3JlYXRpb25hbA== 36000
+KHJlZA== 36001
+IENsZQ== 36002
+UlU= 36003
+IG1pc3Rha2Vu 36004
+IE1vbnRhbmE= 36005
+IHN0cml2ZQ== 36006
+X3NsaWRlcg== 36007
+IFBsYXN0aWM= 36008
+IGRlY29yYXRlZA== 36009
+IFZQ 36010
+bGljbw== 36011
+CWZhbHNl 36012
+IHByZWZz 36013
+KFwi 36014
+X2ZhbHNl 36015
+aWVuZG8= 36016
+IEAk 36017
+QnVja2V0 36018
+YWN0aWNhbA== 36019
+IFpoYW5n 36020
+LmNvbHM= 36021
+LkJpbmRpbmc= 36022
+IHdheA== 36023
+X1NUT1JBR0U= 36024
+IGxhd24= 36025
+IHJm 36026
+LlNjZW5l 36027
+IENhbGN1bGF0b3I= 36028
+LmRlc2lnbg== 36029
+IHJlc2ls 36030
+0LvQtdC8 36031
+RW1wbG95 36032
+IFByaWNlcw== 36033
+IFBXTQ== 36034
+YWdp 36035
+LmV2YWx1YXRl 36036
+CXBhcmFt 36037
+IGJyYXNz 36038
+YmJlbg== 36039
+IGluZmxhbW1hdGlvbg== 36040
+dWxsaXZhbg== 36041
+IGFubm90 36042
+IHBI 36043
+aWFtZXRlcg== 36044
+IEJUQw== 36045
+KGJveA== 36046
+U3Rvcnlib2FyZA== 36047
+IGNsYXk= 36048
+LmFzc2VydFJhaXNlcw== 36049
+fHN0cmluZw== 36050
+LkFwcGx5 36051
+IG1hdGNoZXI= 36052
+dW5kZWQ= 36053
+IHNhdGlzZnlpbmc= 36054
+IOyglQ== 36055
+UmVuZGVyaW5n 36056
+X2FwcHJv 36057
+aW5kcm9tZQ== 36058
+QU5FTA== 36059
+X2ZpeA== 36060
+YnJ1c2g= 36061
+Lk1hdGNo 36062
+IHNtaWxpbmc= 36063
+b25hdXQ= 36064
+U3VuZGF5 36065
+IGRlbGV0aW9u 36066
+IGVuY291cmFnZXM= 36067
+UHVsbA== 36068
+IHJldmVuZ2U= 36069
+IHF1YXJyeQ== 36070
+dHJhZGU= 36071
+IGNhYmxlcw== 36072
+KGRlbHRh 36073
+aXRlc3BhY2U= 36074
+IGZo 36075
+LmJ1bmlmdQ== 36076
+IHZpZWw= 36077
+X0lOQ0xVREVE 36078
+IFRhaWw= 36079
+YWRhcg== 36080
+b2Zz 36081
+IG1ldGFscw== 36082
+Z29t 36083
+X21ldGhvZHM= 36084
+IG5q 36085
+LlN0ZA== 36086
+KHdpbg== 36087
+JCgn 36088
+IHR1cnRsZQ== 36089
+dXJvbg== 36090
+IGVucm9sbGVk 36091
+IEh6 36092
+IEJveERlY29yYXRpb24= 36093
+IHBvbnQ= 36094
+cmVsYXRpb25zaGlw 36095
+Qmk= 36096
+s7s= 36097
+IG1hc2N1bA== 36098
+IHNoYWRlcw== 36099
+IHZy 36100
+IExvZ2lj 36101
+IGFpbg== 36102
+IERJU1Q= 36103
+IGNvbGxhcg== 36104
+InByb2ZpbGU= 36105
+R2VuZXJhdGVkVmFsdWU= 36106
+IFBvc3NpYmxl 36107
+IGVpbmVz 36108
+g4E= 36109
+LnRpbWVvdXQ= 36110
+IEVj 36111
+IGplcnNleQ== 36112
+LkRvdWJsZQ== 36113
+IHF1YWxpZnlpbmc= 36114
+dm9y 36115
+Q1JFRU4= 36116
+X0FwcA== 36117
+X3JlY3Y= 36118
+IGFsaWVucw== 36119
+SXRz 36120
+RXNj 36121
+aWF0b3I= 36122
+IEVjbGlwc2U= 36123
+IGdo 36124
+VmljdA== 36125
+CWh0bWw= 36126
+dG9v 36127
+LmNvbnN0 36128
+IGFudGVyaW9y 36129
+IFd1 36130
+KGtleXM= 36131
+IHVsdHI= 36132
+X3BvbHk= 36133
+IFRhcA== 36134
+IEJ1ZA== 36135
+QVdT 36136
+IGNyYXNoZXM= 36137
+X3RvdA== 36138
+Q29udGlu 36139
+LWhhbmRlZA== 36140
+YWx0aG91Z2g= 36141
+4Lia 36142
+aWZpY2VudA== 36143
+IGRldmU= 36144
+dXRvcnk= 36145
+IFdvcnRo 36146
+X01T 36147
+IGZsb29yaW5n 36148
+IHNlbGxlcnM= 36149
+IFRoYW5rc2dpdmluZw== 36150
+IHBuZw== 36151
+IHZhbG9yZXM= 36152
+IHNsZWV2ZQ== 36153
+IGZpbGxl 36154
+0JA= 36155
+IGFwcG9pbnRtZW50cw== 36156
+IHZpbQ== 36157
+VXNlckluZm8= 36158
+Qk9PU1Q= 36159
+IHBvc2Vk 36160
+aW5pdGlhbGl6ZWQ= 36161
+LnByb2R1Y3Rz 36162
+IExlYWRlcnNoaXA= 36163
+bWFudWVs 36164
+JyU= 36165
+ZW1hcmtz 36166
+UGVyY2VudGFnZQ== 36167
+KGRpc3Q= 36168
+LmF2YXRhcg== 36169
+KGhPYmplY3Q= 36170
+5LuK 36171
+X2lmZg== 36172
+aWNvbmU= 36173
+Oyk= 36174
+X25pbA== 36175
+IGFib2w= 36176
+0LXRgdGC 36177
+IHZlbnVlcw== 36178
+LkNvbnZlcnQ= 36179
+IScpCg== 36180
+LkJpdG1hcA== 36181
+c2tpbg== 36182
+X0NPTFVNTg== 36183
+UmV2 36184
+R1JFU1M= 36185
+Z293 36186
+IHdpc2hlZA== 36187
+dHJhY3Rz 36188
+LmFzc2VydEZhbHNl 36189
+IHNjcmVlbnNob3Q= 36190
+IGZvaXM= 36191
+Q29tYg== 36192
+TGluZVdpZHRo 36193
+IEdyYWI= 36194
+IGludGVuc2l2ZQ== 36195
+CXNo 36196
+Kyk= 36197
+LmZpcnN0TmFtZQ== 36198
+X1BST0NFU1M= 36199
+IHRpbHQ= 36200
+aXRvcmVk 36201
+LkxPRw== 36202
+IGJhaw== 36203
+IGludGVudGlvbmFsbHk= 36204
+LnBsYXllcnM= 36205
+KGNhbnZhcw== 36206
+KSkpDQo= 36207
+LlByb3ZpZGVy 36208
+X1BVQkxJQw== 36209
+VGFsaw== 36210
+IExpdg== 36211
+Y2hlZHVsZXJz 36212
+IGxj 36213
+YWRpYw== 36214
+ZmVhdHVyZWQ= 36215
+LnJlc291cmNlcw== 36216
+RnVsbE5hbWU= 36217
+IG1lYW53aGlsZQ== 36218
+QnVmZmVycw== 36219
+IHJlc29sdmVy 36220
+IFNBUA== 36221
+X1RF 36222
+R05V 36223
+IEZvcm1zTW9kdWxl 36224
+X3do 36225
+IFN3ZQ== 36226
+LndpZGdldHM= 36227
+IGNhYmluZXRz 36228
+IHN1c2NlcHQ= 36229
+IEJvdHQ= 36230
+YWN0aXZleA== 36231
+YXZhcg== 36232
+YW50aWNz 36233
+ICI9Ig== 36234
+X2t3YXJncw== 36235
+IGdhbWVPYmplY3Q= 36236
+IEFuZ2xl 36237
+Lkl0ZXI= 36238
+bWFyc2g= 36239
+IEJpcnRoZGF5 36240
+IENNUw== 36241
+cmVxdWVzdHM= 36242
+IFBlYXJs 36243
+X0VPTA== 36244
+IGxpbnV4 36245
+KG9yZw== 36246
+X01vdXNl 36247
+LmNvbnN0cnVjdG9y 36248
+IHpk 36249
+IGtpY2tz 36250
+YXJ0aXNhbg== 36251
+IGVheA== 36252
+S24= 36253
+cG9uZ2U= 36254
+IEZpbmxhbmQ= 36255
+IG1ldHJlcw== 36256
+IEFzc2Vzc21lbnQ= 36257
+cGFydG5lcg== 36258
+L3ByZQ== 36259
+IScsCg== 36260
+W0ludA== 36261
+IG9zbG8= 36262
+ZGF0ZXBpY2tlcg== 36263
+L1N0cmluZw== 36264
+b3BsYXk= 36265
+IEhlYnJldw== 36266
+LGRvdWJsZQ== 36267
+IHRyYWJhbA== 36268
+KyJc 36269
+CUVJRg== 36270
+L3RleHQ= 36271
+X0ZJUlNU 36272
+IFBldGU= 36273
+IGVnbw== 36274
+IGV4dHJhcw== 36275
+UERP 36276
+IHJlZ3VsYXRl 36277
+IFFXaWRnZXQ= 36278
+c3Rz 36279
+IFNob3dz 36280
+IE5IUw== 36281
+LmNvdXJzZQ== 36282
+cHRocmVhZA== 36283
+IEZ1ZWw= 36284
+LnRpbWVz 36285
+IMKw 36286
+IHN0cmlkZXM= 36287
+KCQoJyM= 36288
+KHdvcmRz 36289
+IHJoeXRobQ== 36290
+IHNwb250 36291
+IHNlbnNhdGlvbg== 36292
+IHNwaWtl 36293
+Q2xvc2luZw== 36294
+6aG16Z2i 36295
+TnVtZXJpYw== 36296
+IGJyZWF0aGU= 36297
+IGZpbmFsZQ== 36298
+X0ZBQ1Q= 36299
+aW5pb24= 36300
+IGNoaWxs 36301
+IGZvcm1hbGx5 36302
+QU5HRUQ= 36303
+ICc6Jw== 36304
+INC/0YDQuA== 36305
+YXE= 36306
+IEZhYnJpYw== 36307
+KGxhdA== 36308
+IFByaW5jaXBhbA== 36309
+IGVycm8= 36310
+b2NhbGU= 36311
+Tm9t 36312
+IGZvc3Q= 36313
+X0NVU1RPTQ== 36314
+LmludGVsbGlq 36315
+ZXJ0b29scw== 36316
+IGNsYXNzZQ== 36317
+YWRpZW50cw== 36318
+IGZ1bmRyYWlzaW5n 36319
+RU5F 36320
+X09QVElPTlM= 36321
+X29i 36322
+Ly99Cg== 36323
+IHByb3RlY3Rpb25z 36324
+LnNlZWQ= 36325
+TlY= 36326
+dGVybWluYWw= 36327
+Ozs7 36328
+UHJlZGljYXRl 36329
+IOy2 36330
+IGJvbWJpbmc= 36331
+R0Y= 36332
+IGNoZXc= 36333
+KSkpLg== 36334
+cXVhbGlmaWVk 36335
+XT17 36336
+bGlzdGVu 36337
+Q0VOVA== 36338
+ZGlnZXN0 36339
+RWFzdA== 36340
+IGRpdmVy 36341
+IGVuZHBvaW50cw== 36342
+IGVl 36343
+IGNvbGxlYWd1ZQ== 36344
+IGRpc3NlcnRhdGlvbg== 36345
+X2NvbW1pdA== 36346
+X0RBVA== 36347
+LnJj 36348
+IGJyZWFzdHM= 36349
+IFJ1Zw== 36350
+IFBpbA== 36351
+Q29udHJhY3Rz 36352
+IEJyeWFu 36353
+V2ViVmlldw== 36354
+IGNvbmNlbnRyYXRl 36355
+IElubmVy 36356
+ICd8 36357
+c3Rkb3V0 36358
+X1N1Yg== 36359
+Pi0tPgo= 36360
+Vm9s 36361
+IFNTRA== 36362
+KSkpLA== 36363
+Lk9wdGlvbmFs 36364
+IG51cnNlcw== 36365
+IG9yYg== 36366
+X3Bl 36367
+KTsNCg0KDQo= 36368
+cGxhY2Vk 36369
+ZXNzZXI= 36370
+IHRoZXJhcGV1dGlj 36371
+IHdoaXRlc3BhY2U= 36372
+IGFzdG9u 36373
+U3VjY2Vzc2Z1bA== 36374
+IHByYWlzZWQ= 36375
+IFdlcw== 36376
+IGVpZ2h0aA== 36377
+aXJhbA== 36378
+IHZyb3V3 36379
+IGZhY3Rpb24= 36380
+X2JpYXM= 36381
+IHdpdGNo 36382
+IG5wYw== 36383
+KHNi 36384
+IFJvZHJpZw== 36385
+X2JpZw== 36386
+RGVwZW5kZW5jeQ== 36387
+IEFicmFoYW0= 36388
+YXJkaQ== 36389
+Q0FS 36390
+bm9z 36391
+IGFidW5kYW5jZQ== 36392
+IG51dHJpZW50cw== 36393
+aW5zdGVpbg== 36394
+LlZlcnQ= 36395
+IElTUw== 36396
+PFU= 36397
+IHN1bXM= 36398
+X2hpc3Q= 36399
+IGZhcm1lcg== 36400
+IEFicg== 36401
+U2hvdA== 36402
+IEJhZFJlcXVlc3Q= 36403
+IGhhc3M= 36404
+IFJhaWxz 36405
+IGFmZmlsaWF0ZWQ= 36406
+5p2l 36407
+IGVyZg== 36408
+SU5G 36409
+IFZpZXdIb2xkZXI= 36410
+bWluaQ== 36411
+IFJvdGg= 36412
+IGZhaXRoZnVs 36413
+IFBoaWxsaXBz 36414
+QU5ET00= 36415
+XS5b 36416
+X1BBWQ== 36417
+IEFyY3RpYw== 36418
+ZmFrZXI= 36419
+RGlnaXQ= 36420
+TWFsZQ== 36421
+c3RkZXJy 36422
+c2V5cw== 36423
+IMWh 36424
+X3JlbW90ZQ== 36425
+bGlxdWU= 36426
+IGluZGVm 36427
+IEluZHVzdHJpZXM= 36428
+aXRyYQ== 36429
+X3BhaXJz 36430
+PGlvc3RyZWFt 36431
+IHNhbGFyaWVz 36432
+aWtlbg== 36433
+LkZyYW1l 36434
+UExJQw== 36435
+X1NQRUM= 36436
+IE1lZGl0ZXJy 36437
+IHN5c3RlbWF0aWM= 36438
+IGludGVycm9n 36439
+SWNvbkJ1dHRvbg== 36440
+c2Vh 36441
+aW50cm8= 36442
+IElzc3Vlcw== 36443
+ZW5jcnlwdGVk 36444
+IGludGVybmF0aW9uYWxseQ== 36445
+IHNucHJpbnRm 36446
+IHBhc3Rh 36447
+IEJyYWRsZXk= 36448
+X1N0YXR1cw== 36449
+QUxL 36450
+X1BBRA== 36451
+LmxhdW5jaA== 36452
+PHNlbGVjdA== 36453
+IGhhcmRlc3Q= 36454
+IHBoeQ== 36455
+ICgoKg== 36456
+LXNsaWRl 36457
+IE5vYm9keQ== 36458
+U3U= 36459
+IGFzw60= 36460
+Y2xvc2VzdA== 36461
+X2luaXRpYWxpemVy 36462
+IHN1cHBvcnRlcg== 36463
+LWdlbg== 36464
+IHRhbGVz 36465
+IGNvcnA= 36466
+X2Z1 36467
+c2F0 36468
+bmVpZ2hib3I= 36469
+Lk1pZ3JhdGlvbnM= 36470
+IGFsZ3Vu 36471
+IHNpbm9u 36472
+LlNwZWM= 36473
+PywK 36474
+LkdM 36475
+bWFsZQ== 36476
+IG1vbml0b3Jz 36477
+eWxhbg== 36478
+LUxpY2Vuc2U= 36479
+Lm1hdGNoZXM= 36480
+IEFCUw== 36481
+IE1hc3Q= 36482
+IFdhbGxldA== 36483
+KCQoIiM= 36484
+RGlydHk= 36485
+IGNvcGU= 36486
+IGludGVycG9sYXRpb24= 36487
+b3VzZWQ= 36488
+IEpldHM= 36489
+LkZMQUc= 36490
+LkNhbmNlbA== 36491
+LkV2ZW50cw== 36492
+bmV2ZXI= 36493
+IE1Ieg== 36494
+PkQ= 36495
+IHNlcnZsZXQ= 36496
+YmFzdGlhbg== 36497
+ID4m 36498
+U0lE 36499
+X2Nsaw== 36500
+IGRpdmlzaW9ucw== 36501
+fScsCg== 36502
+IGRpbGRv 36503
+IHBhcmFkZQ== 36504
+bWFqb3I= 36505
+IGFib2FyZA== 36506
+Oysr 36507
+IGZ1c2lvbg== 36508
+In0seyI= 36509
+IERpYWxvZ1Jlc3VsdA== 36510
+CWFycg== 36511
+LWVt 36512
+X25y 36513
+KGhhbmRsZXI= 36514
+Lk5FVA== 36515
+Llh0cmFSZXBvcnRz 36516
+IFNoYWg= 36517
+IEJyaWVm 36518
+LSw= 36519
+IHByZWNpbw== 36520
+CQkJICAgICAg 36521
+IHRhbnQ= 36522
+IEdyYW5kZQ== 36523
+L3htbA== 36524
+X0lDT04= 36525
+IFJldHJv 36526
+dW5xdWU= 36527
+IG5hZw== 36528
+dG9GaXhlZA== 36529
+WEw= 36530
+IGRlY2xhcmluZw== 36531
+IENvbmNyZXRl 36532
+IEFtYXppbmc= 36533
+CXByaW50aw== 36534
+IGRlYmF0ZXM= 36535
+REFURUQ= 36536
+IGFlc3RoZXRpYw== 36537
+ZW1ldGVyeQ== 36538
+Um91dGluZ01vZHVsZQ== 36539
+IE5hc2h2aWxsZQ== 36540
+V0FZUw== 36541
+IHdvbGY= 36542
+IG9ic2VydmVycw== 36543
+T1RB 36544
+YW5zb24= 36545
+IGVh 36546
+IGdyZWVuaG91c2U= 36547
+k43kvZw= 36548
+IHN0YWly 36549
+IGltbWlncmFudA== 36550
+X2FwcGx5 36551
+cGVhcmU= 36552
+IEJsb29tYmVyZw== 36553
+X1BMQVlFUg== 36554
+UmVzcA== 36555
+5q2j 36556
+Q2hvb3Nlcg== 36557
+IElDb2xsZWN0aW9u 36558
+UGV0ZXI= 36559
+RXJybw== 36560
+LmRldGVjdENoYW5nZXM= 36561
+TWFwcw== 36562
+IHNxdWVlemU= 36563
+IEhvbWVz 36564
+d2VnaWFu 36565
+IGZvcm1hdHRpbmc= 36566
+IG5lZ290aWF0ZQ== 36567
+dWxk 36568
+IE5lcA== 36569
+IFFC 36570
+IGVjb25vbWllcw== 36571
+ICovLA== 36572
+IHJlZHVuZA== 36573
+IEFiZXI= 36574
+LklzTnVsbE9yV2hpdGVTcGFjZQ== 36575
+eWNsZWQ= 36576
+ICAgICAgICAgICAgICAgICAgCg== 36577
+X1No 36578
+IHNrZXB0 36579
+IHJlY3JlYXRlZA== 36580
+IGdldFR5cGU= 36581
+IG1hcmdpbnM= 36582
+IGNvbG9uaWFs 36583
+Y2hhcnRz 36584
+Ly9A 36585
+IHByb2Nlc3NvcnM= 36586
+6K+0 36587
+YmF0aXM= 36588
+5oSP 36589
+YXRvcmlv 36590
+bWVudGlvbmVk 36591
+UGF0aWVudA== 36592
+IHByZXk= 36593
+Q2hlY2tib3g= 36594
+X3hwYXRo 36595
+LnNraXA= 36596
+IE1vcm1vbg== 36597
+IE1lbW9yeVN0cmVhbQ== 36598
+Q1JFTUVOVA== 36599
+IGt1 36600
+bWVsZA== 36601
+XERhdGE= 36602
+IEtlcm5lbA== 36603
+aWx0cg== 36604
+6YCB 36605
+KHByb2ZpbGU= 36606
+Q2FyYm9u 36607
+Uk9MRQ== 36608
+KHBs 36609
+XSoo 36610
+Lm1lbW9yeQ== 36611
+IG1lZGFs 36612
+IGFkdmlzb3I= 36613
+aXTDpHQ= 36614
+IGhkcg== 36615
+aWVydW5n 36616
+IFByb3ZpZGVz 36617
+KGFscGhh 36618
+IHRlZW5hZ2Vycw== 36619
+LXBhcnNlcg== 36620
+LkxhdExuZw== 36621
+XSgpCg== 36622
+IGZlbG9ueQ== 36623
+CQkJCgkJCQo= 36624
+Qk9PSw== 36625
+IHNsYXNo 36626
+IGNsZWFyZml4 36627
+IFByb3BoZXQ= 36628
+5a65 36629
+cmlnaHRuZXNz 36630
+LWZp 36631
+LmtpbmQ= 36632
+ZXJ0b24= 36633
+Smlt 36634
+IG1hbmlwdWxhdGU= 36635
+IHdvcmtzaGVldA== 36636
+b2xpbg== 36637
+c3RhcnM= 36638
+IGFydGlmYWN0 36639
+X0VNUFRZ 36640
+CW1haW4= 36641
+LS0tLS0tLS0tLS0tLTwv 36642
+L3N0YXRpYw== 36643
+SVRJRVM= 36644
+IENvdW5zZWw= 36645
+IFdD 36646
+IEJMQUNL 36647
+LXN5c3RlbQ== 36648
+IFRyaXBsZQ== 36649
+LmJ0 36650
+c29mdHdhcmU= 36651
+XScpLg== 36652
+SW5qZWN0aW9u 36653
+X25vdGlmeQ== 36654
+IGZpZnRlZW4= 36655
+IGFtYmFzc2Fkb3I= 36656
+YnJlYWtpbmc= 36657
+VVJJQ29tcG9uZW50 36658
+IFByb3Rlc3Q= 36659
+LlJlc2V0 36660
+IE1Qcw== 36661
+dnJv 36662
+LmdldFN0YXR1cw== 36663
+X21vcmU= 36664
+Y3Vw 36665
+IEtlbnlh 36666
+5bey 36667
+IGFtbXVuaXRpb24= 36668
+15XX 36669
+IERhc2g= 36670
+IHVuZGVyZ28= 36671
+IGJ1ZGR5 36672
+0YLQvtGA 36673
+ZXRpY2FsbHk= 36674
+X091dA== 36675
+IEJyb2Fkd2F5 36676
+qow= 36677
+IEZpdHo= 36678
+IHN0cmlwcGVk 36679
+LWNhY2hl 36680
+IHVtYg== 36681
+IGFub20= 36682
+IHNpYmxpbmdz 36683
+b2N1bWVudGVk 36684
+SW50ZXJydXB0ZWRFeGNlcHRpb24= 36685
+IHBlbmc= 36686
+bHN0 36687
+X0FMSUdO 36688
+LWNhcA== 36689
+UkQ= 36690
+Y2VsbHM= 36691
+IE1vdG9ycw== 36692
+IHRyYW5zbGF0aW9ucw== 36693
+dXN0ZXJpbmc= 36694
+6Zo= 36695
+IGxlYWtz 36696
+ZmlsZVBhdGg= 36697
+IG91dGdvaW5n 36698
+X2VuZHBvaW50 36699
+X0dM 36700
+LmxpZmVyYXk= 36701
+cmljaHQ= 36702
+IE9wZW5HTA== 36703
+LmpwYQ== 36704
+IGFmZmVjdGlvbg== 36705
+Zmx1eA== 36706
+IGdseQ== 36707
+IGJ1ZA== 36708
+Pic7 36709
+IGV4cHJlc3Npbmc= 36710
+IElR 36711
+IEZhY3Q= 36712
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioK 36713
+X21hc3M= 36714
+KSk6 36715
+IGNvbmRvbQ== 36716
+IGNyZWF0ZVN0YXRl 36717
+b21ldG93bg== 36718
+IGlycg== 36719
+ID4o 36720
+PkI= 36721
+aXRlcmF0aW9u 36722
+44Oq 36723
+IHNoaXJ0cw== 36724
+b3VudHk= 36725
+LT4k 36726
+X1NJR04= 36727
+IERhbGU= 36728
+IGpq 36729
+RWFzeQ== 36730
+RnJl 36731
+IE55 36732
+IGNobG9y 36733
+bWF0Y2hlZA== 36734
+IEdlcm0= 36735
+LVVB 36736
+IE5hdGhhbg== 36737
+ZWR1Y2F0aW9u 36738
+LXlhcmQ= 36739
+LWNoZQ== 36740
+aG91c2Vz 36741
+cml0aW9uYWw= 36742
+IHByb3hpbWl0eQ== 36743
+IGRpZXNlbQ== 36744
+4bqtcA== 36745
+IGRyb3VnaHQ= 36746
+LmF1ZGlv 36747
+IExlbw== 36748
+IGZhdm9yYWJsZQ== 36749
+aW5jaA== 36750
+IERhdw== 36751
+cmlibHk= 36752
+X3N0dWRlbnQ= 36753
+aWRhYmxl 36754
+T1ZF 36755
+IGxhY2tz 36756
+b3VuY2luZw== 36757
+LmJ1c2luZXNz 36758
+IHJlb3Blbg== 36759
+bWF5YmU= 36760
+X0dMT0JBTA== 36761
+IGRyZXNzZXM= 36762
+IEVkd2FyZHM= 36763
+ZW5zaWJsZQ== 36764
+IEhhcmR3YXJl 36765
+IEV4Y2VsbGVudA== 36766
+IFRpbWVVbml0 36767
+Q1RJT05T 36768
+IHNjaGVkdWxlcw== 36769
+IHNlZ3Vl 36770
+T3BlbnM= 36771
+YW1tZW4= 36772
+LUlkZW50aWZpZXI= 36773
+IHN0YXJpbmc= 36774
+IGhhcHBpbHk= 36775
+IEhvYg== 36776
+J18= 36777
+ICIpOw== 36778
+YW1lbnRvcw== 36779
+ZXRjaGVk 36780
+IC8+fQo= 36781
+LlVzZXJz 36782
+IGludGVycnVwdGVk 36783
+Q29udGFjdHM= 36784
+IHJlZ2lzdHJv 36785
+aW5idXJnaA== 36786
+Q0hB 36787
+X2ltcA== 36788
+cGhpcw== 36789
+c2F5 36790
+IHJldGFpbGVy 36791
+Lk5PREU= 36792
+L21hcHM= 36793
+X0xBU1Q= 36794
+IENoYXJnZQ== 36795
+X2d1YXJk 36796
+Q29sbGlkZXI= 36797
+IFN0YXRlbGVzc1dpZGdldA== 36798
+IjpbIg== 36799
+KCIuLi8uLi8= 36800
+aW94aWRl 36801
+IFN1bmQ= 36802
+ICcnOw== 36803
+dW5zZXQ= 36804
+YWRkV2lkZ2V0 36805
+0LvRjg== 36806
+ZWxsZXM= 36807
+YWxrZXI= 36808
+QXJj 36809
+IGRlZHVjdA== 36810
+R1VJTGF5b3V0 36811
+IFZpbGxh 36812
+IGZvcmJpZGRlbg== 36813
+X3doZXJl 36814
+IFwv 36815
+IFRpYg== 36816
+X0FY 36817
+XQ0KDQo= 36818
+IEJpcg== 36819
+IGJlbmQ= 36820
+IE1BS0U= 36821
+IE1FVA== 36822
+IGZ1dHVyZXM= 36823
+IHdlaWdodGVk 36824
+IiIiDQo= 36825
+IGF1dGhvcml6ZQ== 36826
+KHByb2dyYW0= 36827
+fSx7Ig== 36828
+IGNvZWZmaWNpZW50cw== 36829
+w6pz 36830
+UGVyUGFnZQ== 36831
+IEJhdGhyb29t 36832
+IFB1Ymxpc2hpbmc= 36833
+R1BM 36834
+IHN1Ym1pc3Npb25z 36835
+IE5VTUJFUg== 36836
+asSF 36837
+IGFkZGl0aW9uYWxseQ== 36838
+ZW1wcmU= 36839
+IFNoZWw= 36840
+b3R5cA== 36841
+U29sdXRpb24= 36842
+IHRodW5kZXI= 36843
+X2Vj 36844
+IAogICAgCg== 36845
+IEZlbGxvdw== 36846
+IGtheQ== 36847
+IG5ld1N0YXRl 36848
+T05UQUw= 36849
+SW1wbGVtZW50YXRpb24= 36850
+Lkxvb2s= 36851
+IGVudHM= 36852
+IGxvcnM= 36853
+IEJJRw== 36854
+ZmFi 36855
+IGF2ZXJhZ2Vk 36856
+IEZlZWRiYWNr 36857
+IFdlbGxz 36858
+IG1hcnRpYWw= 36859
+IGluZHVs 36860
+IENvbW11bmlzdA== 36861
+IEZvcmV4 36862
+IEFncmljdWx0dXJl 36863
+Ils= 36864
+IHF1YXI= 36865
+IEtvbnQ= 36866
+CXZpZXc= 36867
+LkJ5dGVz 36868
+ZGVza3RvcA== 36869
+IE1ha2Vz 36870
+YWtlc3BlYXJl 36871
+Lk51bGxhYmxl 36872
+IHNwb3RsaWdodA== 36873
+VkI= 36874
+b3d5 36875
+KHRvcmNo 36876
+dHJpZGdl 36877
+X2JvdW5kcw== 36878
+IGFwb2xvZ2l6ZQ== 36879
+LmFkZEl0ZW0= 36880
+YW50ZA== 36881
+Kik7Cg== 36882
+LHU= 36883
+KGdlbg== 36884
+57uT 36885
+cmVhdG9y 36886
+IENvcmQ= 36887
+b3VwcGVy 36888
+Lm1ldHJv 36889
+IGV3 36890
+IFdPUkQ= 36891
+LkFmdGVy 36892
+IGRldGFpbmVk 36893
+IEhhbW1lcg== 36894
+ZXhpc3Rpbmc= 36895
+IG9zdA== 36896
+IG1vbnVtZW50 36897
+LWN1c3RvbQ== 36898
+VXNlcklE 36899
+IE5vbQ== 36900
+IHJlamVjdGlvbg== 36901
+KGRpbQ== 36902
+IHNpbmdsZXRvbg== 36903
+CWRpZQ== 36904
+YXJpYW5jZQ== 36905
+cmVwb3J0cw== 36906
+XSE9 36907
+ZWxkYQ== 36908
+IHByZXZhbGVuY2U= 36909
+X3JlZ3M= 36910
+LiIu 36911
+IGZlbWluaXN0 36912
+Q29kZWM= 36913
+ICoqCg== 36914
+KGxhYmVscw== 36915
+X01BUks= 36916
+RkFJTEVE 36917
+IGFkbWluaXN0ZXJlZA== 36918
+V04= 36919
+ICAgICAgICAJCQ== 36920
+IG5vdW4= 36921
+d2ln 36922
+IGdvdHRh 36923
+IHJpZg== 36924
+LWlt 36925
+IFBhdWxv 36926
+IENvbW1hbmRUeXBl 36927
+XSkpCgo= 36928
+LXplcm8= 36929
+VHJhaW5pbmc= 36930
+IGxvcmQ= 36931
+X2FydA== 36932
+cmVkZGl0 36933
+Q2VydA== 36934
+IHBlc28= 36935
+Um90 36936
+IGVuZGFuZ2Vy 36937
+LmRy 36938
+dXNlckluZm8= 36939
+dW50cw== 36940
+bnY= 36941
+IFRyYWlsZXI= 36942
+LWZpcnN0 36943
+KG1ha2U= 36944
+IGJlbmVmaWNp 36945
+LWJsYWNr 36946
+acOf 36947
+IHVuZG91YnRlZGx5 36948
+IG1leA== 36949
+IEFuY2llbnQ= 36950
+KGFz 36951
+IGRlc2NlbnQ= 36952
+UGljaw== 36953
+IHJlcGxpY2E= 36954
+JG9iag== 36955
+w6Rocg== 36956
+IGFycm93cw== 36957
+ZnR5 36958
+IExpYnlh 36959
+dWdh 36960
+Y2hhcmdlZA== 36961
+VHVy 36962
+IGhvbWlj 36963
+aXNzZW4= 36964
+IEZha2U= 36965
+IGJlZXJz 36966
+IHNjYXR0ZXJlZA== 36967
+KFRpbWU= 36968
+VVRJTA== 36969
+IGJ1cmVhdWNy 36970
+L3BsYWlu 36971
+IHN0aWNraW5n 36972
+RkFJTA== 36973
+IENvdmlk 36974
+VGhpcmQ= 36975
+X3ByZXNlbnQ= 36976
+IFBpZXJyZQ== 36977
+IOuq 36978
+IFsuLi5dCgo= 36979
+UHJvYg== 36980
+IFRyYWZmaWM= 36981
+aWNhbw== 36982
+ZG9jdG9y 36983
+ICksCgo= 36984
+VGFicw== 36985
+YWx1 36986
+77ya4oCc 36987
+IGluaGVyZW50 36988
+X05v 36989
+cml0aXM= 36990
+IFByb29m 36991
+LmJhc2VuYW1l 36992
+5Lya 36993
+IGNoaW0= 36994
+IFByb3RlY3RlZA== 36995
+Y3JpdA== 36996
+IHByb25l 36997
+INC60L7QvQ== 36998
+IEhlcm9lcw== 36999
+IGFueGlvdXM= 37000
+IGFub3M= 37001
+IHdlZWtlbmRz 37002
+IHNleHQ= 37003
+IHJlZHVjZXI= 37004
+PVVURg== 37005
+aGFsZg== 37006
+IFNhdw== 37007
+Lm1t 37008
+IG51ZXZh 37009
+LmN1cnJlbnRUYXJnZXQ= 37010
+Lmx1YQ== 37011
+X0VYVEVOU0lPTg== 37012
+CXJlZw== 37013
+IEN0cmw= 37014
+X2FsaWdu 37015
+YWNjZXB0YWJsZQ== 37016
+IHJ1c2hpbmc= 37017
+ZnJhYw== 37018
+IGJvYXN0cw== 37019
+Rml2ZQ== 37020
+wrE= 37021
+IFRlbXBlcmF0dXJl 37022
+Pik6 37023
+IGNoYXJ0ZXI= 37024
+UkVBVEVE 37025
+IHN1YmplY3RlZA== 37026
+IG9wYw== 37027
+aGVhbHRoeQ== 37028
+5L2/55So 37029
+IFNjaWVudGlmaWM= 37030
+IGZyYXU= 37031
+cmlhZ2Vz 37032
+4LiU 37033
+LmludmVudG9yeQ== 37034
+YXRpb25hbGU= 37035
+TWFk 37036
+bWludXRlcw== 37037
+Pj4oKTsK 37038
+IEVudg== 37039
+IHJlY29yZGluZ3M= 37040
+IHN1c3BpY2lvbg== 37041
+c3FsaXRl 37042
+CXJlYWQ= 37043
+44Gm 37044
+IHdvcnJpZXM= 37045
+LnB1dFN0cmluZw== 37046
+IFNoYW5naGFp 37047
+KHVpZA== 37048
+cmVy 37049
+IHbDrWRl 37050
+Iik6 37051
+IG1ldGhvZG9sb2d5 37052
+INC60L7RgtC+0YA= 37053
+Y2Nj 37054
+YXZhZA== 37055
+IGluZHVjdGlvbg== 37056
+CVRocmVhZA== 37057
+LHN0cmluZw== 37058
+4bqhaQ== 37059
+bmVobWVu 37060
+dWl0aW9u 37061
+ICpfXw== 37062
+LmVtZg== 37063
+IOyc 37064
+L3RoZW1lcw== 37065
+IE5pbmU= 37066
+Lk9uZQ== 37067
+IEVtYmVk 37068
+IGZheg== 37069
+dWF0aW9ucw== 37070
+IHByaXZhdGVseQ== 37071
+IGxpbmc= 37072
+W0Y= 37073
+dXNoaQ== 37074
+IGxhdW5jaGVz 37075
+KEtFWQ== 37076
+R01U 37077
+IGFpbWluZw== 37078
+cGF0aWJsZQ== 37079
+IEJpZGVu 37080
+aXc= 37081
+IERlZ3JlZQ== 37082
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 37083
+ICQoJzw= 37084
+w6FyaW9z 37085
+dG9VcHBlckNhc2U= 37086
+7KCc 37087
+IEVVUg== 37088
+IG92ZXJzaWdodA== 37089
+IHRhYmxlc3A= 37090
+VXBkYXRlcw== 37091
+Lm1ha2VkaXJz 37092
+IGh1bWlkaXR5 37093
+L3RlbXBsYXRl 37094
+QWx3YXlz 37095
+KElT 37096
+X2NlcnQ= 37097
+RGln 37098
+IHVuZGVyd2F5 37099
+b3J0b24= 37100
+IEh1cnJpY2FuZQ== 37101
+IHNwZW5kcw== 37102
+IFNlZ21lbnQ= 37103
+IGZsaWVz 37104
+IFRvZ2dsZQ== 37105
+IEx5bmNo 37106
+IHNlbnNlcw== 37107
+IEtvcw== 37108
+c2V0RW5hYmxlZA== 37109
+aXN0aWNhbGx5 37110
+IHRlc3Rlcg== 37111
+IGFkbWluaXN0cmF0b3Jz 37112
+IHRhZ2dlZA== 37113
+0JM= 37114
+IHNob3J0Y3V0 37115
+IFJlc29sdXRpb24= 37116
+IHN1cGVydmlzaW9u 37117
+IEFzaGxleQ== 37118
+VHJhY2tpbmc= 37119
+dWxhdG9yeQ== 37120
+YW5kZWw= 37121
+aXN0ZW4= 37122
+IHVucmU= 37123
+KGRpZmY= 37124
+QU5UUw== 37125
+IHJpZGVy 37126
+IHPEhQ== 37127
+LlNlcmllcw== 37128
+X29yZGVycw== 37129
+T1JJWk9OVEFM 37130
+IHJldGVudGlvbg== 37131
+44CCPC8= 37132
+LlRlc3Rz 37133
+U3lu 37134
+LnBhcnNlRG91Ymxl 37135
+a29kZQ== 37136
+emVudA== 37137
+R2VuZXJhdGlvbg== 37138
+IGFkbWl0cw== 37139
+IExlYWs= 37140
+IGFrYQ== 37141
+Uk9XUw== 37142
+IEFuZ2VsYQ== 37143
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 37144
+IG5vb24= 37145
+IHN0YXJr 37146
+IGRyYWdnZWQ= 37147
+44O844I= 37148
+IHJlY3ljbGVyVmlldw== 37149
+IFNpbGljb24= 37150
+X3N1ZmZpeA== 37151
+Sm9u 37152
+Y29jaw== 37153
+IFByb2JhYmx5 37154
+SW50cm9kdWN0aW9u 37155
+IFRlcnJvcg== 37156
+KFRoaXM= 37157
+IEJhc2ViYWxs 37158
+IGplbnRlcg== 37159
+Y2hlc3RyYQ== 37160
+Lm5hbg== 37161
+PWc= 37162
+IGNsYXJpZnk= 37163
+eWlp 37164
+cm9vdHM= 37165
+IG5vdGVib29r 37166
+IEV4Y2VwdA== 37167
+IHJpc2Vz 37168
+IEJydXNzZWxz 37169
+YXRvcmllcw== 37170
+LlVTRVI= 37171
+cm9zc292ZXI= 37172
+L3VwbG9hZA== 37173
+IEV2ZW50dWFsbHk= 37174
+Q29uc2lkZXI= 37175
+IEJvdW5k 37176
+LmlkZW50aWZpZXI= 37177
+KHVuaXR0ZXN0 37178
+IGluZmVyaW9y 37179
+IGNyYw== 37180
+IGF1dGlzbQ== 37181
+VUlBbGVydA== 37182
+IEthdmFuYXVnaA== 37183
+aW5lbWVudA== 37184
+cXVldWVSZXVzYWJsZQ== 37185
+U2tpbg== 37186
+LmJhY2tlbmQ= 37187
+LmdldFN0YXRl 37188
+dW5kaW5n 37189
+IHN1YmNsYXNz 37190
+IHJlZmluZWQ= 37191
+IGFubm95 37192
+IHJuZA== 37193
+RGlyZWN0b3I= 37194
+IOuC 37195
+YmVjY2E= 37196
+bW9uZ29kYg== 37197
+IENvbW1vbndlYWx0aA== 37198
+QXo= 37199
+IFRoaW5n 37200
+IHJlY29t 37201
+dW5pbmc= 37202
+CWNvbg== 37203
+CSAgICAK 37204
+ZW1pY3M= 37205
+ZWNk 37206
+IGhvcm55 37207
+QVRSSVg= 37208
+IG1pc2xlYWRpbmc= 37209
+IEJldw== 37210
+L25vZGU= 37211
+Y3N0ZGlv 37212
+4Lin 37213
+IGFkZGl0aW9ucw== 37214
+cmly 37215
+X3JlcXVlc3Rz 37216
+IHJlY2hlcmNoZQ== 37217
+c3R1ZGVudHM= 37218
+X3Bvc2l0aW9ucw== 37219
+ZXJ0ZXh0 37220
+IEV2b2x1dGlvbg== 37221
+YW5kZXo= 37222
+IGRpc3R1cmI= 37223
+a2V5dXA= 37224
+IEJ1dGxlcg== 37225
+LnJlYWRsaW5lcw== 37226
+X3N0ZGlv 37227
+IGJlZQ== 37228
+IEFyY2hpdmVz 37229
+IG5ldmVydGhlbGVzcw== 37230
+VVJJVFk= 37231
+IGRyb25lcw== 37232
+dXJpdGllcw== 37233
+IOKYhQ== 37234
+Ij4NCg0K 37235
+IGRpYWdvbmFs 37236
+IENhbmNlbGxhdGlvblRva2Vu 37237
+X0ludGVybmFs 37238
+IHJ1aW4= 37239
+LlF0 37240
+b2NyYXRpYw== 37241
+VGVs 37242
+IEFuc3dlcnM= 37243
+bWF0aWM= 37244
+IHhw 37245
+YXRlbQ== 37246
+X2pvYnM= 37247
+X2FueQ== 37248
+IHNlbmlvcnM= 37249
+IGxhbmRtYXJr 37250
+IFFMaXN0 37251
+IG1hbmV1 37252
+b3RpZnk= 37253
+LyI7Cg== 37254
+L3NlcnZlcg== 37255
+IFBoaWxvc29waA== 37256
+dXRlbmFudA== 37257
+KGlv 37258
+aHo= 37259
+IGF1dGhlbnRpY2F0ZWQ= 37260
+ZHY= 37261
+LUNvbXBhdGlibGU= 37262
+T3JpZ2luYWxseQ== 37263
+LGZ1bmN0aW9u 37264
+44CCDQo= 37265
+IFJlcHJlc2VudGF0aXZl 37266
+YXNpbHk= 37267
+aXJjdWl0 37268
+LmR0 37269
+KG1hdGg= 37270
+Lk1hcnNoYWw= 37271
+Wyw= 37272
+IENpdGllcw== 37273
+X3R1cm4= 37274
+fCkK 37275
+IGNhbnRpZGFk 37276
+YWx0ZXI= 37277
+CXVp 37278
+IE5lYnJhc2th 37279
+IHNraXJ0 37280
+LmJn 37281
+U2hhcmVkUHJlZmVyZW5jZXM= 37282
+KHN0eWxl 37283
+IGdyaWVm 37284
+Z2V3 37285
+IHNhZmVn 37286
+b2xhbmc= 37287
+X2xpc3Rz 37288
+7Js= 37289
+IGdyYW5pdGU= 37290
+IGhvdHRlc3Q= 37291
+LmpkYmM= 37292
+LkN1c3RvbWVy 37293
+IOKJpA== 37294
+IHdhYXI= 37295
+X3NjZW5l 37296
+Kycv 37297
+IEpUZXh0RmllbGQ= 37298
+IHNlYXRpbmc= 37299
+IHdlYXJz 37300
+IGAv 37301
+Q2FzZXM= 37302
+IFlvdXR1YmU= 37303
+xLFt 37304
+IGJhbGNvbg== 37305
+LEc= 37306
+TWV0YURhdGE= 37307
+LXByaWNl 37308
+U0NS 37309
+VW5pdHk= 37310
+IHRydW5r 37311
+PXtgJHs= 37312
+IGVhcnRocXVha2U= 37313
+UGFydGlhbA== 37314
+IHN1YnN0 37315
+IGVsaW1pbg== 37316
+PSInLg== 37317
+Ly8qW0A= 37318
+IHN1cGVydmlzb3I= 37319
+dnJvbGV0 37320
+X2FydGljbGU= 37321
+IHBhbmU= 37322
+Ymlv 37323
+IG1vdG9ycw== 37324
+Tk0= 37325
+RnJhbms= 37326
+IG9uaW9u 37327
+LXdvcmQ= 37328
+SXRlbUNsaWNrTGlzdGVuZXI= 37329
+IGJyaXQ= 37330
+ZW5kZW5jaWVz 37331
+Q29tcHV0ZXI= 37332
+X3J1bm5pbmc= 37333
+KGRheQ== 37334
+LWhl 37335
+KG5hbWVk 37336
+IFNhY2g= 37337
+0L7Rhw== 37338
+Y2FtcGFpZ24= 37339
+LkFic3RyYWN0 37340
+KHdyYXBwZXI= 37341
+LnBheQ== 37342
+IHV3 37343
+R2Vv 37344
+cmFpbHM= 37345
+L3NlbGVjdA== 37346
+aWNodGU= 37347
+c29ucw== 37348
+RVZFTlQ= 37349
+IGFsaW1lbnQ= 37350
+UHJvdmlkZXJz 37351
+QXdhaXQ= 37352
+X0lOVEVSVkFM 37353
+Lm9mZg== 37354
+IGdsdXRlbg== 37355
+X2Nsb3Vk 37356
+IHdlbg== 37357
+LmV4dHJhY3Q= 37358
+CWJ1dHRvbg== 37359
+L01N 37360
+UGFydHk= 37361
+IGRlbW9ncmFwaGlj 37362
+X2Vycm5v 37363
+IGhpa2luZw== 37364
+KCcnKQo= 37365
+IixAIg== 37366
+IHdpdA== 37367
+csOh 37368
+b2xvZ2ll 37369
+IFN0eWxlcw== 37370
+IEJyb3dzZXJNb2R1bGU= 37371
+LlJlcXVlc3RNYXBwaW5n 37372
+aWNhbnM= 37373
+UEFHRQ== 37374
+Y3JlYXRpb24= 37375
+IEZlcmd1c29u 37376
+dWRlZA== 37377
+bnVtYmVycw== 37378
+IEdUSw== 37379
+IHByZXNlbnRhdGlvbnM= 37380
+IEJvYmJ5 37381
+X3NwYW4= 37382
+ZXN0eWxl 37383
+IGlsbGVnYWxseQ== 37384
+YWJlbGE= 37385
+IGJhdHRsZWZpZWxk 37386
+Y2FwYWNpdHk= 37387
+dGVycm9y 37388
+XSIpOwo= 37389
+IHdhcnJpb3I= 37390
+bGVhZGVy 37391
+IERCRw== 37392
+IFJldmVudWU= 37393
+IHZpZ2ls 37394
+IGNvdW50ZXJwYXJ0cw== 37395
+KEVycm9y 37396
+QUNURVI= 37397
+IGhlZWZ0 37398
+IHNlbGVjdGlvbnM= 37399
+emV1Zw== 37400
+dG9t 37401
+LXR3bw== 37402
+LjsK 37403
+X3N0YXRlbWVudA== 37404
+IEFpZA== 37405
+IFZ1bA== 37406
+X3JnYg== 37407
+IHByaXplcw== 37408
+IGVkaXRhYmxl 37409
+CWZvcm0= 37410
+xLFuxLE= 37411
+LmRlY29y 37412
+RGVtbw== 37413
+bGljZXM= 37414
+IGVuY3R5cGU= 37415
+cmF0dWxhdGlvbnM= 37416
+IFJPUw== 37417
+X2NoYXJz 37418
+IEphaHI= 37419
+cGFydGlhbA== 37420
+0YPRgg== 37421
+IFJlY2VpdmU= 37422
+IExhbmRz 37423
+QVBURVI= 37424
+IGNob3BwZWQ= 37425
+Li4i 37426
+IEFuYWx5 37427
+IFVJRA== 37428
+IFJhZGVvbg== 37429
+IEJlZQ== 37430
+IHVubQ== 37431
+Pk0= 37432
+LmZpbmRhbGw= 37433
+VG9rZW5pemVy 37434
+IFdIQVQ= 37435
+IHNq 37436
+RHJhd2luZw== 37437
+RXNz 37438
+T05E 37439
+irY= 37440
+KHBhY2tldA== 37441
+4oCUYnV0 37442
+SW52b2NhdGlvbg== 37443
+IE51Y2xlYXI= 37444
+PzsK 37445
+IGdyYW5kZXM= 37446
+IENyeXB0 37447
+cmVtYXJr 37448
+ICcuLi8uLi8uLi8uLi8= 37449
+IGluYWJpbGl0eQ== 37450
+bWFnaWM= 37451
+Y2F0cw== 37452
+IHNpbXVsYXRl 37453
+OiR7 37454
+aW5mbGF0ZQ== 37455
+IGVuZXI= 37456
+Ok5P 37457
+aXBsZXM= 37458
+IG1lcml0 37459
+IFJhdGVk 37460
+IGdsdWU= 37461
+L2Jsb2c= 37462
+IGdyZW4= 37463
+IHRocmlsbGVk 37464
+LkNI 37465
+dW5jYW4= 37466
+IFBSSU1BUlk= 37467
+IHBlcnNlYw== 37468
+IGZlYXJlZA== 37469
+Lk1JTg== 37470
+IFRoZWF0ZXI= 37471
+6ZI= 37472
+YXRlZ29yaWU= 37473
+5q61 37474
+IGFwcGV0aXRl 37475
+c3F1YXJl 37476
+IEFsZXhhbmQ= 37477
+LlVzZXJJZA== 37478
+X2d0 37479
+X2VudGVy 37480
+IGdyYWR1YXRlcw== 37481
+RnJhZ21lbnRNYW5hZ2Vy 37482
+QXV0aG9yaXpl 37483
+LU5MUw== 37484
+KE15 37485
+IHRyaXVtcGg= 37486
+dXN0aW5n 37487
+X1BBUkFNUw== 37488
+Q2hhcmFjdGVycw== 37489
+KDosOiw= 37490
+X0JVSUxE 37491
+TUh6 37492
+IHdhc2hlZA== 37493
+IHVuY2xl 37494
+U3RldmU= 37495
+YXJkb3du 37496
+PHN0ZGlv 37497
+X3Rlcm1z 37498
+IE1BUg== 37499
+IGhvc2U= 37500
+dWN1cw== 37501
+IENsYWlt 37502
+IFJhbXM= 37503
+IG1vZGVsQnVpbGRlcg== 37504
+IG7DqQ== 37505
+dXNlcklE 37506
+PWpzb24= 37507
+LlJlc3BvbnNlV3JpdGVy 37508
+mOiupA== 37509
+IGdydXBv 37510
+LWl0 37511
+IEtP 37512
+LU1haWw= 37513
+IGNvbmZlcmVuY2Vz 37514
+SUZB 37515
+IEFzc2Fk 37516
+IHByb25vdW5jZWQ= 37517
+IGFuY2VzdG9ycw== 37518
+IFRSQUNF 37519
+IEdlRm9yY2U= 37520
+IHByaXZhdA== 37521
+cGVsbA== 37522
+ZW1vamk= 37523
+INmI 37524
+R2VucmU= 37525
+IGNvbmNlbnRyYXRlZA== 37526
+amFuZw== 37527
+TU9URQ== 37528
+IFpvb20= 37529
+dG9vbGJhcg== 37530
+IHV0dGVybHk= 37531
+IGVuY29tcGFzcw== 37532
+IFNvY2Nlcg== 37533
+IGV1cm9wZQ== 37534
+LWFpcg== 37535
+LmFuaW0= 37536
+X0NUTA== 37537
+aGVyZW50 37538
+cmV4 37539
+aW50ZXJhY3RpdmU= 37540
+44Gn44GZ 37541
+IEthcw== 37542
+IGRlc3BlcmF0ZWx5 37543
+KGFy 37544
+IGJpaw== 37545
+IHRyYXZlcnNl 37546
+ZXVycw== 37547
+UmVjeWNsZXJWaWV3 37548
+IE1hcmdhcmV0 37549
+IGhvcGVmdWw= 37550
+IE1pZw== 37551
+X01FTUJFUg== 37552
+cmVjZWl2ZXI= 37553
+TWF0Y2hlcg== 37554
+ZGVwZW5kZW50 37555
+IGV4Y2VsbGVuY2U= 37556
+0LDQtg== 37557
+TE9T 37558
+QXNwZWN0 37559
+IGFkYWxhaA== 37560
+IEVjb25vbXk= 37561
+dWxvdXNseQ== 37562
+IGV2YWx1YXRpbmc= 37563
+IGRldmlhdGlvbg== 37564
+ZXh0ZXI= 37565
+L2RhdA== 37566
+Q29scw== 37567
+IFBva2Vy 37568
+Ym9hcmRpbmc= 37569
+LkNoaWxkcmVu 37570
+QU5HTEU= 37571
+w68= 37572
+IFlvZ2E= 37573
+IGhhdGVk 37574
+QWRhbQ== 37575
+IEZDQw== 37576
+SU1BTA== 37577
+IGZhaW50 37578
+X0RJU1BMQVk= 37579
+IGV2b2x2ZQ== 37580
+IGZyaWRnZQ== 37581
+IHLDqWc= 37582
+IGVtb3Rpb25hbGx5 37583
+4oCcSWY= 37584
+YXdlaQ== 37585
+ZXJlc2E= 37586
+Jywi 37587
+QkVHSU4= 37588
+IFZBUkNIQVI= 37589
+IHhp 37590
+ZmFjdG9y 37591
+dHo= 37592
+X3BoYXNl 37593
+U0VR 37594
+KHJhbmQ= 37595
+IG1hdGhlbWF0aWNz 37596
+IGNvbnRleHRz 37597
+LWFj 37598
+IEZJRw== 37599
+IENhcHRpb24= 37600
+IFdhaXRGb3I= 37601
+LXdlc3Q= 37602
+IGZpcmVmaWdodA== 37603
+X0xFRA== 37604
+ZWN0aW9ucw== 37605
+CXRocm93cw== 37606
+IFRha2Vz 37607
+b2JyZQ== 37608
+IEF2YXRhcg== 37609
+IElubm92YXRpb24= 37610
+IGNhbGlicmF0aW9u 37611
+OnRoaXM= 37612
+X2VuY29kaW5n 37613
+IGNhbGN1bGF0aW5n 37614
+ICMjIyMjIyMjIyMjIyMjIyM= 37615
+IFByb2dyYW1z 37616
+IEhJR0g= 37617
+LmNvbmZpZ3VyZVRlc3RpbmdNb2R1bGU= 37618
+UG9seWdvbg== 37619
+X0RCRw== 37620
+Il0sDQo= 37621
+0LDQsQ== 37622
+IHNpbWlsYXJpdHk= 37623
+IHByemV6 37624
+IEZpcm0= 37625
+IG1pc3VuZGVy 37626
+IE1vdmluZw== 37627
+IE1PVg== 37628
+IHJlYWN0b3I= 37629
+UmVxdWVzdGVk 37630
+ZXhwZWN0cw== 37631
+IGVyZWN0 37632
+bGljaHQ= 37633
+b3VsZGVy 37634
+SURHRVQ= 37635
+IGRldmls 37636
+IHByb2dyYW1tZXM= 37637
+IENvbW1vbk1vZHVsZQ== 37638
+ICInIg== 37639
+KEF1dGg= 37640
+44CC77yM 37641
+IFN0YXRlZnVsV2lkZ2V0 37642
+6K6h 37643
+L29wZW4= 37644
+aW5hbGx5 37645
+LlJvdW5k 37646
+IFdpc2g= 37647
+IGh1bWFuaXRhcmlhbg== 37648
+QWNjZXNzVG9rZW4= 37649
+IFNPQw== 37650
+IHBva2Vtb24= 37651
+IHZhcG9y 37652
+X2FkZGVk 37653
+CUdldA== 37654
+c3BlbGw= 37655
+IEluaXRpYXRpdmU= 37656
+IEhFTA== 37657
+YWlycm8= 37658
+YmxlZA== 37659
+INCx0Ys= 37660
+IHNlbnNpYmxl 37661
+IEx1YQ== 37662
+fCgK 37663
+IGZpeHR1cmVz 37664
+IG9yZ2FzbQ== 37665
+Q3V0 37666
+dWt0 37667
+Z3Vl 37668
+IGNyZWRpYmlsaXR5 37669
+OmltYWdl 37670
+IENQUA== 37671
+LnNu 37672
+KGRlc2M= 37673
+IFJlaWQ= 37674
+LWRlZ3JlZQ== 37675
+X3NvdW5k 37676
+Q2xvbmU= 37677
+4buZ 37678
+YWtzaQ== 37679
+PiR7 37680
+X2NvbmZpcm1hdGlvbg== 37681
+IHRyb3BoeQ== 37682
+V29ya3M= 37683
+IEVsZWN0cm9uaWNz 37684
+IE1lZGl0ZXJyYW5lYW4= 37685
+X21ldHJpY3M= 37686
+IGFubm91bmNpbmc= 37687
+IERBWQ== 37688
+X3Byb3Rv 37689
+IHBlYXI= 37690
+YmFzZVVybA== 37691
+CQkJCQkJCQkK 37692
+IGNvb3JkaW5hdGlvbg== 37693
+Ok4= 37694
+LmFuaW1hdGU= 37695
+IENvdHRvbg== 37696
+X2hpdA== 37697
+4pw= 37698
+IGpldHp0 37699
+aWZ0ZXI= 37700
+KGZpZWxkcw== 37701
+b3dubG9hZA== 37702
+aWZpY2FjaW9u 37703
+LmN1ZGE= 37704
+IExpdQ== 37705
+PmVxdWFscw== 37706
+IEFjZQ== 37707
+0YDQsNC8 37708
+IFN1cGVybWFu 37709
+IEdhcmNpYQ== 37710
+IGFycmVzdHM= 37711
+YWdhcg== 37712
+IHt9KQ== 37713
+IG1hY3Jvcw== 37714
+cm91cGU= 37715
+w6p0cmU= 37716
+IHR3aXN0ZWQ= 37717
+c3RydW1lbnRz 37718
+Xygi 37719
+X3ZlcnRpY2Vz 37720
+IFRyYW5zaXRpb24= 37721
+0LjQug== 37722
+W21heA== 37723
+bWluZA== 37724
+IGFjY2Vzc1Rva2Vu 37725
+IHVubGU= 37726
+bXVz 37727
+Y29w 37728
+IEZhY3Rvcg== 37729
+IGNvbmNlZA== 37730
+IHJldHI= 37731
+LmxpbmFsZw== 37732
+LXNsaWRlcg== 37733
+b2Js 37734
+X1N0YXRpY0ZpZWxkcw== 37735
+IHpvbWJpZQ== 37736
+c2VsbGluZw== 37737
+IGNoYXA= 37738
+IHNoYWtpbmc= 37739
+IFRyYW5zbGF0ZQ== 37740
+IEFtc3RlcmRhbQ== 37741
+IEVUSA== 37742
+X0VYVEVSTg== 37743
+a2Q= 37744
+X2Rpc2M= 37745
+IHByZWNlZGluZw== 37746
+IHByaXg= 37747
+T2JqZWN0TmFtZQ== 37748
+X21vZGlmaWVk 37749
+YXJkd2FyZQ== 37750
+ID8+Ij4= 37751
+IERX 37752
+YCR7 37753
+ID8+Ij48Pw== 37754
+dXllbg== 37755
+IGRvbm5h 37756
+IHhzaQ== 37757
+ICQiew== 37758
+IERyYXdpbmc= 37759
+LG5pbA== 37760
+IG9uZGVy 37761
+Qkc= 37762
+T2JzZXJ2 37763
+IGNvbnNpZGVyYXRpb25z 37764
+Ym9hdA== 37765
+IEJhbmtz 37766
+IGluZGljdA== 37767
+LEk= 37768
+IEJsdQ== 37769
+KHZlcnNpb24= 37770
+Y2xpZW50ZQ== 37771
+b2xhbg== 37772
+TEVTUw== 37773
+YXNzZXJ0U2FtZQ== 37774
+X3ZvaWQ= 37775
+IFdBUw== 37776
+CWVudW0= 37777
+IG1peGVy 37778
+RVc= 37779
+YWZmZQ== 37780
+IGJsb3dqb2I= 37781
+dGV4dEZpZWxk 37782
+IGltbWVuc2U= 37783
+X3JlcG8= 37784
+IGdsb2JhbHM= 37785
+YW50YWdlcw== 37786
+LnRvZGF5 37787
+VGh1cnNkYXk= 37788
+IEJyaWc= 37789
+e30pCg== 37790
+IEltYWdpbmU= 37791
+KEdQSU8= 37792
+IGVzdG8= 37793
+IFByb3ZpbmNl 37794
+IE1lbnRhbA== 37795
+X2NlbGxz 37796
+IEp1bGlhbg== 37797
+LlNjcmVlbg== 37798
+IGNhbmRsZQ== 37799
+IG1vbmRl 37800
+IHZlcmc= 37801
+aXRlcmFscw== 37802
+LWxheW91dA== 37803
+R3Vlc3Q= 37804
+IHZpbmQ= 37805
+IEVjaG8= 37806
+Jyl9 37807
+IG1hbm4= 37808
+X0JPT0xFQU4= 37809
+aGFw 37810
+IG5pZ2h0bWFyZQ== 37811
+VUdI 37812
+IG5vbmV0aGVsZXNz 37813
+IGF0aGU= 37814
+IEhvbGxhbmQ= 37815
+IEJvcm4= 37816
+XE9STQ== 37817
+YW51dA== 37818
+X2xldmVscw== 37819
+IHBldGl0ZQ== 37820
+LWFydA== 37821
+X1NIT1c= 37822
+bnVtYmVyT2Y= 37823
+X3RodW1ibmFpbA== 37824
+YW1pbnM= 37825
+IERlZmluZXM= 37826
+ICI9 37827
+LlN0YXR1c0NvZGU= 37828
+IGRpZ25pdHk= 37829
+IEJpa2U= 37830
+Lk5ld0xpbmU= 37831
+IEdsYXM= 37832
+KGxvZ2dlcg== 37833
+IGNhdGNoZXM= 37834
+dm90ZXM= 37835
+IGV4YW1pbmluZw== 37836
+L3JlZ2lzdGVy 37837
+IHNwZWNpZnlpbmc= 37838
+X2ZpeGVk 37839
+IGRyYXdpbmdz 37840
+VGhyZXNob2xk 37841
+QXg= 37842
+IEFyY2hpdGVjdHVyZQ== 37843
+KHBpZA== 37844
+V2lyZQ== 37845
+KGNvbnQ= 37846
+bGFuZQ== 37847
+TGlzdHM= 37848
+IHNwcmludA== 37849
+IGdyYW5kZmF0aGVy 37850
+X0FH 37851
+IHNjaGVkdWxpbmc= 37852
+Q0xVUw== 37853
+YXR1cml0eQ== 37854
+IGxvY2tpbmc= 37855
+W3NpemU= 37856
+X3N0eWxlcw== 37857
+IHdi 37858
+LS0+Cgo= 37859
+IHNwaW5uaW5n 37860
+X3BlbmRpbmc= 37861
+TWF0Y2hlcnM= 37862
+LktleXM= 37863
+IFBW 37864
+ZW51cw== 37865
+YW50aXM= 37866
+IGRpc2NhcmQ= 37867
+IGhhdWw= 37868
+IGVtcGly 37869
+IHBhdGh3YXk= 37870
+IG9haw== 37871
+0LzQtdC9 37872
+LWluZHVjZWQ= 37873
+IGltcGFpcg== 37874
+IENhbGdhcnk= 37875
+LmlzSGlkZGVu 37876
+ZHo= 37877
+X2luY2x1ZGU= 37878
+IGdt 37879
+ICcoJw== 37880
+UFk= 37881
+dWdnZXN0aW9ucw== 37882
+IGNvbW1vZGl0eQ== 37883
+Y3Jv 37884
+L3N1Yg== 37885
+IGdldEluc3RhbmNl 37886
+IExlZ2FjeQ== 37887
+IEtpbA== 37888
+QmFs 37889
+KHNob3J0 37890
+SW5mb3Jt 37891
+K3g= 37892
+KnI= 37893
+IEhvcGVmdWxseQ== 37894
+b3JhdGU= 37895
+IG1hY2hlbg== 37896
+IHRyZWF0eQ== 37897
+IE9yaQ== 37898
+LnB1YmxpYw== 37899
+LWhvcml6b250YWw= 37900
+IHRhY3RpYw== 37901
+IGJvcmQ= 37902
+d2FyZXM= 37903
+IGFtbW8= 37904
+IExpc3Rz 37905
+IGVxdWF0aW9ucw== 37906
+L2hlcg== 37907
+IE5TVw== 37908
+Qm91bmRpbmc= 37909
+X0NvbGxlY3Rpb25z 37910
+IGF2YWls 37911
+LkRyb3BEb3du 37912
+6LA= 37913
+IGho 37914
+IGzDoA== 37915
+LnBi 37916
+IG1lbW9yaWFs 37917
+IEFUVFI= 37918
+IGV4aGF1c3RlZA== 37919
+IHRzcA== 37920
+CXJlZGlyZWN0 37921
+IGxpa2V3aXNl 37922
+U1RFUg== 37923
+TGphdmE= 37924
+IGNvbmRlbW5lZA== 37925
+b2NhdXN0 37926
+KHN0cmljdA== 37927
+IGV4ZW1wdA== 37928
+IHNtcw== 37929
+IGV4YWdnZXI= 37930
+U1lT 37931
+IGxvdW5nZQ== 37932
+Ol4= 37933
+IHRvZGQ= 37934
+ZGVi 37935
+YXRvcmlhbA== 37936
+IFBvcnRlcg== 37937
+IHR1aXRpb24= 37938
+IGV4ZW1wbA== 37939
+IHBhcmVu 37940
+LmxpbmVUbw== 37941
+IGtpZG5leQ== 37942
+IMOnYQ== 37943
+IGN1aQ== 37944
+77yM6K+3 37945
+WEM= 37946
+IG1vxbw= 37947
+IG5vbWluYXRlZA== 37948
+bHVuZw== 37949
+SW1HdWk= 37950
+IEJ1eno= 37951
+IHN0ZXJlbw== 37952
+cG9ydGFs 37953
+cmVzYXM= 37954
+IGtsYXNz 37955
+IGRyYWZ0ZWQ= 37956
+IHByb2plY3RpbGU= 37957
+L2dwbA== 37958
+KHBhcmFtZXRlcnM= 37959
+KikK 37960
+IGFzc2lzdGVk 37961
+IE5TSW50ZWdlcg== 37962
+c2l0ZW1hcA== 37963
+Om50aA== 37964
+LlZpZXdz 37965
+LkFyZ3VtZW50UGFyc2Vy 37966
+IG1lZXI= 37967
+emllcg== 37968
+IERpZw== 37969
+PD89JA== 37970
+X3Blcm1pc3Npb24= 37971
+CUFkZA== 37972
+b2xvZ2lh 37973
+IHNjaQ== 37974
+IGZpbmFuY2lhbGx5 37975
+IHNjcm9sbGluZw== 37976
+LmRpc3Q= 37977
+X0hBUw== 37978
+dWJ1bnR1 37979
+LnBhZ2Vz 37980
+SW5jcmU= 37981
+YnVyc2U= 37982
+IEFtYXRldXI= 37983
+5rqQ 37984
+QmxvYg== 37985
+IGNob2xlc3Rlcm9s 37986
+REVT 37987
+bWluaW11bQ== 37988
+IHJlZnVzaW5n 37989
+dW5uZWQ= 37990
+0Jw= 37991
+IFJE 37992
+LlNlcnZsZXQ= 37993
+ICovOwo= 37994
+dWRkZW4= 37995
+IHZpZXdCb3g= 37996
+IG1ldGFib2xpc20= 37997
+IHN0ZWFsaW5n 37998
+IEJldmVy 37999
+YWduZXRpYw== 38000
+VkVSUklERQ== 38001
+X0FVRElP 38002
+0YDRiw== 38003
+IGFyY2hpdmVz 38004
+LmxpbmVhcg== 38005
+PXs8 38006
+dW5jYXRlZA== 38007
+QWNjZXNzRXhjZXB0aW9u 38008
+IHBpY3R1cmVCb3g= 38009
+CXNlbGVjdA== 38010
+TGF0aXR1ZGU= 38011
+dmlzb3I= 38012
+cmVpYg== 38013
+IHBhaw== 38014
+SG9wZQ== 38015
+IEl0ZXJhYmxl 38016
+LnJlc3BvbnNlVGV4dA== 38017
+IFF1YWQ= 38018
+IEJyb29rcw== 38019
+IFRvdA== 38020
+T1BU 38021
+ZWxvbmc= 38022
+IGNvY2FpbmU= 38023
+IGFubw== 38024
+RGFu 38025
+IHBzaQ== 38026
+0LDQu9GM 38027
+LmdldENoaWxk 38028
+IFJFRg== 38029
+LWFi 38030
+IFRyaWFuZ2xl 38031
+PFRleHQ= 38032
+IENvbG9tYmlh 38033
+aW5reQ== 38034
+6Imy 38035
+KX0+Cg== 38036
+IHBsYWc= 38037
+cGluZQ== 38038
+IGJsYW5rZXQ= 38039
+IDo8Lw== 38040
+IFRyYW5zbGF0aW9u 38041
+bm92 38042
+IHBlcmZlY3Rpb24= 38043
+IENvbmZlZGVy 38044
+LnN0dWI= 38045
+LkludGVyb3BTZXJ2aWNlcw== 38046
+LlN0b3Jl 38047
+IGVucm9sbG1lbnQ= 38048
+IGRlZXI= 38049
+TW92ZW1lbnQ= 38050
+LWZyb20= 38051
+aGM= 38052
+IGV2YW5nZWw= 38053
+IElsbHVzdHI= 38054
+IHRydW1w 38055
+X1N0YXJ0 38056
+cGxhbmVz 38057
+IEJpbA== 38058
+SW5mb3M= 38059
+LXRyYW5z 38060
+IHJhbmNo 38061
+IExpbmRh 38062
+X21hcg== 38063
+UkVU 38064
+L25ldA== 38065
+TGF3 38066
+TkY= 38067
+IFByZXZlbnQ= 38068
+IGNyaWVk 38069
+IGVkdWNhdGU= 38070
+YXN0aWNz 38071
+eWk= 38072
+LkxpbmVhckxheW91dA== 38073
+TUVUSE9E 38074
+IEVn 38075
+bWFwcGVy 38076
+5pmC 38077
+LmFzYXJyYXk= 38078
+z4E= 38079
+acOnw6Nv 38080
+UmV1c2U= 38081
+X3Jldg== 38082
+IFBST0RVQ1Q= 38083
+X0NvZGU= 38084
+ICAgICANCg== 38085
+IFNFUlZJQ0U= 38086
+X2NvdmVy 38087
+LiwK 38088
+LkV4ZWN1dGVSZWFkZXI= 38089
+IERpbmluZw== 38090
+LmFyY2g= 38091
+IG90cm8= 38092
+IERpc2NvdmVyeQ== 38093
+IEtleUVycm9y 38094
+IEJlbmVmaXRz 38095
+X1NIQQ== 38096
+LlVubWFyc2hhbA== 38097
+SEVBREVS 38098
+TXV0ZXg= 38099
+QU1B 38100
+IGluaXRpYXRl 38101
+U3RheQ== 38102
+TGl0dGxl 38103
+ICgpLA== 38104
+IGRlY2VudHJhbA== 38105
+UmVzb2x1dGlvbg== 38106
+LmhlYWx0aA== 38107
+CWZjbG9zZQ== 38108
+5Lqk 38109
+IHN0YWtlaG9sZGVycw== 38110
+IGFyY2hhZQ== 38111
+RGlnaXRhbA== 38112
+bGVzY29wZQ== 38113
+X3Blbg== 38114
+IEl0ZW1TdGFjaw== 38115
+IENhbm9u 38116
+IEtlbmQ= 38117
+IMO4 38118
+X2FqYXg= 38119
+aW5ncmVkaWVudHM= 38120
+RGVsaXZlcnk= 38121
+U2VjdGlvbnM= 38122
+IGRpc2FwcG9pbnRpbmc= 38123
+IEdyZW4= 38124
+LHJl 38125
+IGRlY3J5cHQ= 38126
+b2xvZ2lj 38127
+X2ZtdA== 38128
+IFNsaWRlcg== 38129
+bmFo 38130
+V2FzaGluZ3Rvbg== 38131
+enVuZw== 38132
+INGG 38133
+eWN6 38134
+aWV2ZXM= 38135
+LkRFQlVH 38136
+IFRJ 38137
+IGhhY2tpbmc= 38138
+IGNlbnRy 38139
+Zmxvd3M= 38140
+IGRpZFJlY2VpdmVNZW1vcnlXYXJuaW5n 38141
+IGFjY291bnRhYmlsaXR5 38142
+Q09VTlQ= 38143
+0LvQtdC80LXQvdGC 38144
+Ymxv 38145
+L2lk 38146
+IFNsb3c= 38147
+aXp6YXJk 38148
+LnJlbW92ZUV2ZW50TGlzdGVuZXI= 38149
+IOyehQ== 38150
+L0k= 38151
+aXNtYQ== 38152
+IEh1ZHNvbg== 38153
+fX0s 38154
+dW1lZA== 38155
+IHJlYWxpc2U= 38156
+dW5zYWZl 38157
+IHp1cw== 38158
+IHNob3J0YWdl 38159
+b2xpYQ== 38160
+X3ByaW9yaXR5 38161
+IGZsb29kaW5n 38162
+b3BlcmF0aW9ucw== 38163
+UG9seQ== 38164
+YWJhbg== 38165
+W2N1cg== 38166
+IGVza29ydGU= 38167
+X0RFU0NSSVBUSU9O 38168
+X25hdA== 38169
+IG1hbGljaW91cw== 38170
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 38171
+IFBhcmtz 38172
+IHRheHBheWVy 38173
+IEZvc3Rlcg== 38174
+IHNleHVhbGl0eQ== 38175
+57O7 38176
+67A= 38177
+XA0K 38178
+LnNlZWs= 38179
+0LDQvdC40Y8= 38180
+L2FydGljbGU= 38181
+6L+H 38182
+IFVocg== 38183
+IGdyYW5kbW90aGVy 38184
+IEJsZQ== 38185
+ZnVydA== 38186
+YW1iYWg= 38187
+bm90aWZpY2F0aW9ucw== 38188
+ZGVwcmVjYXRlZA== 38189
+IHVpbnRwdHI= 38190
+b2tp 38191
+KEFycmF5 38192
+IGF1dG9ub21vdXM= 38193
+IG9icg== 38194
+wq/Crw== 38195
+IGJhc2VuYW1l 38196
+IHVudmVpbGVk 38197
+c29s 38198
+IE5vdEltcGxlbWVudGVkRXJyb3I= 38199
+IGRlcHJlc3M= 38200
+XycuJA== 38201
+IFVOSVQ= 38202
+JScs 38203
+LXRhZw== 38204
+Z3JlcA== 38205
+IE1haW50ZW5hbmNl 38206
+IHdhcmZhcmU= 38207
+X1JFU09VUkNF 38208
+KHNwZWM= 38209
+KGN2 38210
+IG5hZGE= 38211
+55S1 38212
+IGNyb3dkZWQ= 38213
+QmVsb3c= 38214
+IFphY2g= 38215
+RXN0YWRv 38216
+X3ByaW1l 38217
+IHRyYWJham8= 38218
+IGluZm9ybWF0aXZl 38219
+U2NvdHQ= 38220
+IHNlcmlhbGl6ZXJz 38221
+IE5hcw== 38222
+VGh1bms= 38223
+IG1lcmN5 38224
+LC4uLgoK 38225
+IGFkZGljdA== 38226
+LmNvbnN0YW50cw== 38227
+IGRhdGFmcmFtZQ== 38228
+X3JlYXNvbg== 38229
+Z29tZXJ5 38230
+7Iq164uI64uk 38231
+IG5lZ2xlY3Q= 38232
+IExpbmVz 38233
+IG1lbWI= 38234
+X0VYRUM= 38235
+YXNzYWdl 38236
+IFlhcmQ= 38237
+e30nLg== 38238
+IGxvdHRlcnk= 38239
+dGVpbg== 38240
+X2NhbGM= 38241
+aWt1 38242
+X1JFQ09SRA== 38243
+V2Fybg== 38244
+IGhlYWx0aGllcg== 38245
+dXJlbWVudA== 38246
+IHlhcm4= 38247
+IENvcm5lcg== 38248
+KHppcA== 38249
+KGluaXQ= 38250
+IExpdA== 38251
+SFc= 38252
+c3Vic2V0 38253
+IE1G 38254
+RVRFUlM= 38255
+X3JvdA== 38256
+IGVyZQ== 38257
+IE92ZXJyaWRl 38258
+V2FsbGV0 38259
+X3Jld2FyZA== 38260
+IHNhZ2U= 38261
+c2V0VmlzaWJsZQ== 38262
+IEpzb25SZXNwb25zZQ== 38263
+SUNZ 38264
+6K+i 38265
+VmFyQ2hhcg== 38266
+YWF0 38267
+LWdyZWVu 38268
+IGlycQ== 38269
+YW5pdHk= 38270
+IHdob2V2ZXI= 38271
+X3NoYXJl 38272
+IGZvdXQ= 38273
+cm9sbHM= 38274
+IHdpbGxpbmduZXNz 38275
+LmNvbXBvbmVudEluc3RhbmNl 38276
+IGhvbm9yZWQ= 38277
+dXJ2ZXk= 38278
+QmVy 38279
+IHJ1bm5lcnM= 38280
+IGxpZXU= 38281
+b3Jwb3I= 38282
+X3N0cnVjdHVyZQ== 38283
+QmFyQnV0dG9uSXRlbQ== 38284
+YWR4 38285
+IEJlbm5ldHQ= 38286
+IGRpbGln 38287
+IGZsdWN0 38288
+SURERU4= 38289
+X1NlbGVjdGVk 38290
+KGRpdg== 38291
+IHF1aWNrZXI= 38292
+YWxvbmc= 38293
+Z3JhcGhxbA== 38294
+aW5leg== 38295
+IGNpdGU= 38296
+IEluc3RydWN0aW9ucw== 38297
+IGluc2VydGluZw== 38298
+LmNsb3VkZmxhcmU= 38299
+Y291cG9u 38300
+ZWRMaXN0 38301
+IFN0b3Jlcw== 38302
+X21hbGxvYw== 38303
+56ym 38304
+IEF3ZXNvbWU= 38305
+IGxhbWI= 38306
+UkVTVA== 38307
+IGludGVzdA== 38308
+IE5hdmJhcg== 38309
+LmZlYXR1cmVz 38310
+SW5jcmVtZW50 38311
+IFBvbQ== 38312
+IGluc3VmZmljaWVudA== 38313
+X0xPR0lO 38314
+UExFTUVOVA== 38315
+IE9BdXRo 38316
+LklORk8= 38317
+IGV4b3RpYw== 38318
+IENBU0U= 38319
+CSAgCg== 38320
+IEdhbmQ= 38321
+dGhlc2Vz 38322
+IG5vdm8= 38323
+IERlbGw= 38324
+4oCm4oCm4oCm4oCm 38325
+X3NvZnQ= 38326
+IGFncmVlaW5n 38327
+Y2VudHM= 38328
+bG9hbg== 38329
+JyIsCg== 38330
+IFJhbg== 38331
+REVM 38332
+IG9yZ2FuaXNlZA== 38333
+K24= 38334
+IEhlYWx0aGNhcmU= 38335
+IGRldGVyaW9y 38336
+IGltcGxlbWVudGF0aW9ucw== 38337
+IGNhcm4= 38338
+ICwn 38339
+IExPQUQ= 38340
+IHBsYW50ZWQ= 38341
+5pyq 38342
+Rm9ybUNvbnRyb2w= 38343
+X21hdGNoZXM= 38344
+IHBlcmlvZGlj 38345
+X1Rv 38346
+IEpvZWw= 38347
+IGFua2xl 38348
+IG1pbGl0YW50cw== 38349
+IFdpdGNo 38350
+dW5pZm9ybQ== 38351
+dWVudGE= 38352
+T2ZXZWVr 38353
+IHBlcnBldHI= 38354
+IGludGVydmVudGlvbnM= 38355
+KHdyaXRlcg== 38356
+YW50aW5l 38357
+UHJvZ3Jlc3NCYXI= 38358
+IGxlYWd1ZXM= 38359
+Y29tcHJlc3M= 38360
+aXppb25l 38361
+IEVB 38362
+Il09Ig== 38363
+IFN0ZXBoYW4= 38364
+bWludXM= 38365
+c3N0cmVhbQ== 38366
+X2xlZA== 38367
+ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0= 38368
+IldoZW4= 38369
+QWxyZWFkeQ== 38370
+IGNvbnRlbXBs 38371
+IGF0YXU= 38372
+IENvbmdyZXNzaW9uYWw= 38373
+IHJhcHBvcnQ= 38374
+IEJvdXI= 38375
+aXNoaQ== 38376
+IHR5bQ== 38377
+IEFybWVu 38378
+INGA0LDQtw== 38379
+LWZvcm1hdA== 38380
+X1JlYWQ= 38381
+KGNvbHVtbnM= 38382
+IG5ldWU= 38383
+X2JveGVz 38384
+IFNhbmR5 38385
+XywK 38386
+IFdpemFyZA== 38387
+IG9yZGVu 38388
+IGZpbGVzeXN0ZW0= 38389
+ZmxpZ2h0 38390
+IHdzeg== 38391
+YW5jZWxlZA== 38392
+IGRhd24= 38393
+IEdzb24= 38394
+X3dhcm5pbmc= 38395
+IEljZWxhbmQ= 38396
+IHNsdXQ= 38397
+IHNldElz 38398
+X2lkZW50 38399
+IG9mZnNob3Jl 38400
+IFNrZXRjaA== 38401
+OyU= 38402
+IHRyaWJlcw== 38403
+X1NQQUNF 38404
+IG90cm9z 38405
+Q29tcGlsZXI= 38406
+CUVuZA== 38407
+IF0pLAo= 38408
+R3Jhdml0eQ== 38409
+IHRlbnNpb25z 38410
+IHNtb290aGx5 38411
+S25vdw== 38412
+b290aGluZw== 38413
+IFN0YXJ0dXA= 38414
+IEh5cA== 38415
+IGFtYXpvbg== 38416
+IFJlY2VpdmVk 38417
+emVuaWU= 38418
+654= 38419
+IENob2NvbGF0ZQ== 38420
+IMSw 38421
+Ik5v 38422
+IEFMUw== 38423
+IFByb2dyYW1taW5n 38424
+IERvZ3M= 38425
+IGdvb2RuZXNz 38426
+KGVycm5v 38427
+L2Vz 38428
+IHJlbW90ZWx5 38429
+IEhvb2tz 38430
+VXVpZA== 38431
+IG92ZXJseQ== 38432
+IOWQ 38433
+IGdwdQ== 38434
+IHN0aW11bHVz 38435
+KHN0ZXA= 38436
+LllvdQ== 38437
+IGJpb20= 38438
+SU5D 38439
+LmJpdHM= 38440
+KG1Db250ZXh0 38441
+IGFtZXJpY2Fu 38442
+IHRlcnJpdG9yaWVz 38443
+IE5E 38444
+XSIK 38445
+IE1hcHBpbmc= 38446
+IHByb2NlZWRpbmc= 38447
+LmF4 38448
+IHN1YnN0cmluZw== 38449
+QlVUVE9O 38450
+IEln 38451
+LXBhbmU= 38452
+IEFucw== 38453
+IGdyYWR1YXRpb24= 38454
+IHBlcnNwZWN0aXZlcw== 38455
+TWl4aW4= 38456
+X21pbnVz 38457
+CQkJCSAgICA= 38458
+IikpKQ== 38459
+bm9ybWFsaXplZA== 38460
+Lmxhc3ROYW1l 38461
+IGNsYW4= 38462
+QXNpYQ== 38463
+KE1vdXNl 38464
+cGFnaW5hdGU= 38465
+IGdpZg== 38466
+ZWxpZw== 38467
+IHBvc3RlcnM= 38468
+bmluZ3M= 38469
+IM+E 38470
+IGFwb3N0 38471
+IElocmU= 38472
+RGxsSW1wb3J0 38473
+IEVxdWFs 38474
+IGRpc3Rpbmd1aXNoZWQ= 38475
+bmVhcG9saXM= 38476
+IGJhY2tkcm9w 38477
+IEFsdGVybmF0aXZlbHk= 38478
+L21vZA== 38479
+IGxlbmQ= 38480
+IFNIT1c= 38481
+X2NvZGVz 38482
+IGF0w6k= 38483
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 38484
+LWNhc2U= 38485
+Y2h0ZQ== 38486
+IGRvbmM= 38487
+OmFkZA== 38488
+TmVnYXRpdmU= 38489
+ZmF2b3JpdGU= 38490
+IGF0dHJhY3Rpb25z 38491
+aW50Q29sb3I= 38492
+IFBpcg== 38493
+Q29ubmVsbA== 38494
+TWFuaWZlc3Q= 38495
+dGVhbXM= 38496
+IH07CgoK 38497
+IHBsdXJhbA== 38498
+IG92ZXJ0aW1l 38499
+IEV1cm9wYQ== 38500
+IEJhbmdsYWRlc2g= 38501
+KGFu 38502
+IGxpbmd1 38503
+aXRpbWU= 38504
+aW5zdG9u 38505
+LnNoYWRvdw== 38506
+56iL 38507
+IFVTUw== 38508
+U2VydmVyRXJyb3I= 38509
+SVZFUlM= 38510
+IEppbg== 38511
+IGh1bWJsZQ== 38512
+YXV0b2xvYWQ= 38513
+YXJleg== 38514
+4oCy 38515
+IEFzdHI= 38516
+aWNvbG9u 38517
+LlZpZXdNb2RlbHM= 38518
+b2Jv 38519
+IHN3aXBl 38520
+IHJlY2Vzc2lvbg== 38521
+6ZU= 38522
+IOyY 38523
+bmVyZw== 38524
+aW5ncmVkaWVudA== 38525
+bWFpbHRv 38526
+IEZhbWU= 38527
+UHJpbnRpbmc= 38528
+UGl4ZWxz 38529
+IEJhc2g= 38530
+cG9zdGE= 38531
+X0pP 38532
+IGluZmFtb3Vz 38533
+IExhbmM= 38534
+KGxvY2FsU3RvcmFnZQ== 38535
+LmJsaXQ= 38536
+IHlvdW5nZXN0 38537
+IGZpZWxkTmFtZQ== 38538
+IGNvbnRpbmc= 38539
+IHdvb2w= 38540
+IEltR3Vp 38541
+IE5TVA== 38542
+LnByZWZpeA== 38543
+VG9JbnQ= 38544
+IFNveA== 38545
+IGhhYml0YXQ= 38546
+KCJ8 38547
+PSciKw== 38548
+SU5HVE9O 38549
+X3dyYXA= 38550
+dWNrZXRz 38551
+IFdSSVRF 38552
+IG1lZGljaW5lcw== 38553
+IG1lbWJyYW5l 38554
+IEpUZXh0 38555
+IHJlcHJvZHVjdGlvbg== 38556
+X3JlY2VpdmU= 38557
+VGFibGVSb3c= 38558
+cXVldWVSZXVzYWJsZUNlbGw= 38559
+aG9va3M= 38560
+IHJlbHlpbmc= 38561
+IGRyaWxsaW5n 38562
+X0ls 38563
+KGV4Y2VwdGlvbg== 38564
+IGR1cmFiaWxpdHk= 38565
+IGhlc2l0YXRl 38566
+IGNvbXBhcnQ= 38567
+SUxJTkc= 38568
+IEVsZGVy 38569
+IGNhZmZl 38570
+IGRldmVsb3Bz 38571
+aXNoZXI= 38572
+IHBseQ== 38573
+IHRvbA== 38574
+X1BMQVk= 38575
+IGZyaWN0aW9u 38576
+KGFsd2F5cw== 38577
+IGluZGlnZW5vdXM= 38578
+IE9wZXJh 38579
+IENhbXB1cw== 38580
+YW5jZW1lbnRz 38581
+IGxpdHRlcg== 38582
+LmxpbWl0 38583
+KFRva2Vu 38584
+ZW5pcw== 38585
+IGhpZ2hsaWdodGluZw== 38586
+IEF1Yg== 38587
+IHZhbGlkYXRvcnM= 38588
+LWhvc3Q= 38589
+d2hlZWw= 38590
+PHs= 38591
+KSkr 38592
+IE5ld3NsZXR0ZXI= 38593
+X2F2ZXJhZ2U= 38594
+IHNvZGl1bQ== 38595
+IEhpbA== 38596
+IE1pbGU= 38597
+IEF1dGhTZXJ2aWNl 38598
+U3RhdGlzdGljcw== 38599
+IE51dHJpdGlvbg== 38600
+IHNwb25zb3Jz 38601
+b3ZlbmFudA== 38602
+PT09PT09PT09PT09PT0= 38603
+LkFic29sdXRl 38604
+IGbDpQ== 38605
+SGFuZGxpbmc= 38606
+IC0tLS0tLS0K 38607
+KGRpcmVjdG9yeQ== 38608
+IikuCg== 38609
+YW5vbA== 38610
+LmJyb3dzZXI= 38611
+IEdyaW5kaW5n 38612
+IGNr 38613
+RnJlcXVlbmN5 38614
+KClbJw== 38615
+QWRqdXN0 38616
+Y3Jldw== 38617
+YWZldHk= 38618
+IGdu 38619
+IHdpdmVz 38620
+b29v 38621
+IHByb3N0aXR1 38622
+IG/DuQ== 38623
+aWZ0eQ== 38624
+IGxpdGlnYXRpb24= 38625
+IEV6 38626
+SmVmZg== 38627
+LnBr 38628
+IFNob2Vz 38629
+Y29ybg== 38630
+eXl2c3A= 38631
+IGFkYXA= 38632
+PXU= 38633
+Q09ORg== 38634
+QU5EQVJE 38635
+IGVsZXZhdG9y 38636
+YmlsbGluZw== 38637
+IGNhbmQ= 38638
+IGNhcnA= 38639
+W2ZpZWxk 38640
+LWxpYg== 38641
+c2VxdWVudGx5 38642
+Pi0= 38643
+IGxjZA== 38644
+LS0tLS0tLS0tLS0tLS0t 38645
+KCIi 38646
+IHRhY3RpY2Fs 38647
+IFJvbmFsZA== 38648
+ZXh0cg== 38649
+IEZlc3Q= 38650
+IGZ1ZXI= 38651
+LW5hdmlnYXRpb24= 38652
+IGti 38653
+Z2hvc3Q= 38654
+IGhhbmRsZUNoYW5nZQ== 38655
+X2Nscw== 38656
+KCkhPQ== 38657
+Q29tcGFyYXRvcg== 38658
+LnZt 38659
+IENveA== 38660
+X3Jldmlldw== 38661
+L0A= 38662
+X2Nvb2tpZQ== 38663
+IHJlY29nbmlzZWQ= 38664
+bGRhcA== 38665
+VGhyZWFkcw== 38666
+IFNleHVhbA== 38667
+IEJlYXJpbmc= 38668
+KFNRTA== 38669
+IHhy 38670
+IHRoaWdo 38671
+VVJMQ29ubmVjdGlvbg== 38672
+IFNVVg== 38673
+IG1Db250ZXh0 38674
+IGluY2lkZW5jZQ== 38675
+IEVzdGU= 38676
+LnN1cA== 38677
+X3Rl 38678
+KEVYSVQ= 38679
+Q01E 38680
+LyI+ 38681
+QWxtb3N0 38682
+IFVuZQ== 38683
+IGFuZGVyZW4= 38684
+IFNpbmdsZXRvbg== 38685
+IGJvcmU= 38686
+VGhpbms= 38687
+IG5hcmM= 38688
+XWluaXRXaXRo 38689
+X3Nob3A= 38690
+KHN0cmF0ZWd5 38691
+IScs 38692
+aGVyaXRz 38693
+IERlc2s= 38694
+X21hY2hpbmU= 38695
+Lm5ldHR5 38696
+xLFuZGE= 38697
+PTw= 38698
+IFFS 38699
+IFNpZGViYXI= 38700
+LnNwbGl0Q29udGFpbmVy 38701
+IG9uU3VjY2Vzcw== 38702
+IG1vbmtleQ== 38703
+RW5qb3k= 38704
+KG5vZGVz 38705
+cGVjdHJ1bQ== 38706
+ICgqKA== 38707
+CVVJTlQ= 38708
+LGhlaWdodA== 38709
+IE5ldHdvcmtz 38710
+LnRhaWw= 38711
+LmxpbnNwYWNl 38712
+ICIuLi4= 38713
+TGlzdGVu 38714
+xqE= 38715
+LkNoYW5uZWw= 38716
+LWRlZmluZWQ= 38717
+UmVwZWF0 38718
+YWRqdXN0 38719
+RVJN 38720
+X2FwcGxpY2F0aW9u 38721
+LmFzc2VydE5vdE51bGw= 38722
+LXN0cmVhbQ== 38723
+IHJhYmJpdA== 38724
+IHBvc2l0aW9uaW5n 38725
+IHdva2U= 38726
+IGZpbmc= 38727
+IG11bHRpcGxheWVy 38728
+IHJlZ2lzdGVyaW5n 38729
+dW50aWw= 38730
+w6Vu 38731
+KDo6 38732
+dXNzaW9ucw== 38733
+IHBvdGF0bw== 38734
+IEVxdWFscw== 38735
+LlN1cA== 38736
+L2FwYWNoZQ== 38737
+ICg9 38738
+LiIp 38739
+LnB0cg== 38740
+IFNwZWVjaA== 38741
+LmNsaXA= 38742
+IEdhYnJpZWw= 38743
+IG11c2ljaWFu 38744
+L2lzc3Vlcw== 38745
+LnNob3A= 38746
+IEhpZXI= 38747
+X1JFVA== 38748
+X2J1Y2tldA== 38749
+44Oh 38750
+YXZz 38751
+IHJveg== 38752
+Zmxvd2Vy 38753
+V3JpdGVCYXJyaWVy 38754
+IE1pbGFu 38755
+IGxlZ2lzbGF0dXJl 38756
+IERvbGw= 38757
+IHByb3Zpbmc= 38758
+LmNvbmNhdGVuYXRl 38759
+4pWQ 38760
+IGdjaGFy 38761
+Y2RuanM= 38762
+Ymxlcw== 38763
+IExpc3Rpbmc= 38764
+0LvQvg== 38765
+LnhyTGFiZWw= 38766
+IFNhaw== 38767
+anVzdGljZQ== 38768
+IFZhbGVudGluZQ== 38769
+dW5sZXNz 38770
+IHBpZ2Vy 38771
+KHJ1bg== 38772
+IHRlc3RpZmllZA== 38773
+QU5B 38774
+IFJlbW92ZXM= 38775
+KSkpKTsK 38776
+cmVjYXRlZA== 38777
+IFJ1bnRpbWVNZXRob2Q= 38778
+IGNvbnF1 38779
+44Ki 38780
+IHRpc3N1ZXM= 38781
+YWlsZXI= 38782
+w6l0w6k= 38783
+LVN0YXI= 38784
+IGZsYW1lcw== 38785
+LnNldEljb24= 38786
+IHN1cGVybg== 38787
+IHZhZ2luYQ== 38788
+LXZhcmlhYmxl 38789
+IHdlbGxuZXNz 38790
+Q1VS 38791
+IGJlbGxl 38792
+LmdldFJlcXVlc3Q= 38793
+IHBvY28= 38794
+YmVuaA== 38795
+YWdlbnM= 38796
+IHNwaWxs 38797
+IEp1cg== 38798
+IGRpc3BhdGNoZXI= 38799
+0L3QvtCz0L4= 38800
+ZW1vbmlj 38801
+KGRpcm5hbWU= 38802
+INCU 38803
+IHBhc3Nl 38804
+IGdhbno= 38805
+cmljaW5n 38806
+RVU= 38807
+IG11amVyZXM= 38808
+ZXNzZW4= 38809
+LmF0dHJpYnV0ZQ== 38810
+amo= 38811
+CQkgCg== 38812
+W14= 38813
+IHN0cnRvbG93ZXI= 38814
+bGV4ZXI= 38815
+ZWN0YXI= 38816
+aG90ZWw= 38817
+LnNxdWFyZQ== 38818
+IHJhbGw= 38819
+IGxvd2VyZWQ= 38820
+aGFuZGxlZA== 38821
+TWFya2V0 38822
+IFVzZXM= 38823
+aXZhcw== 38824
+LkJ1c2luZXNz 38825
+44GX44Gm 38826
+RElW 38827
+IHdhc3RlZA== 38828
+IGF2b2ly 38829
+w6pt 38830
+X0FDQ09VTlQ= 38831
+LmV0 38832
+CVNETA== 38833
+a2Fw 38834
+IGZveA== 38835
+dXBwZXQ= 38836
+e30sCg== 38837
+Iiwn 38838
+RmF2b3JpdGU= 38839
+UEVORA== 38840
+IEFFUw== 38841
+fSks 38842
+IGRlZHVjdGlvbg== 38843
+IHBvbMOtdA== 38844
+IGNvbXBvbmVudFdpbGw= 38845
+IFRlbGVyaWs= 38846
+X1NFTEY= 38847
+IG11c2U= 38848
+Q3JhZnQ= 38849
+IGRlbnM= 38850
+4KS/ 38851
+KHRw 38852
+IHRhc3R5 38853
+IGJhbGFuY2Vz 38854
+IGRlZGljYXRpb24= 38855
+IFdhbGxhY2U= 38856
+IHVubGF3 38857
+XCI+XA== 38858
+IG11bQ== 38859
+LXVwZGF0ZQ== 38860
+ZW1lbnRl 38861
+IHNvZGE= 38862
+UmVwdWJsaWM= 38863
+YXNtaW5l 38864
+w6lyaWM= 38865
+KFN0YXR1cw== 38866
+IEpzb25Db252ZXJ0 38867
+IERpc2s= 38868
+LlJlZGlyZWN0 38869
+IGZpbG1pbmc= 38870
+L21vbA== 38871
+Um8= 38872
+IHZpbGxl 38873
+IHRyYWJhag== 38874
+IHN5bnRoZXNpcw== 38875
+cmVnYQ== 38876
+IHJs 38877
+U2NoZWR1bGVy 38878
+SVNIRUQ= 38879
+Y3VycmVudFVzZXI= 38880
+KGVycm9ycw== 38881
+J2g= 38882
+X2JvdA== 38883
+eGltbw== 38884
+IFVTQVJU 38885
+X3N1cGVy 38886
+X0RFQ1JFRg== 38887
+0L3QvtC5 38888
+X1JPVw== 38889
+IHByb21vdGVz 38890
+IFRB 38891
+IGhvcmFz 38892
+IFJlcHJlc2VudHM= 38893
+IG5hbWVvZg== 38894
+IEV4Yw== 38895
+IEdhcmFnZQ== 38896
+IHNlaW5l 38897
+LCM= 38898
+IGhlcmI= 38899
+L3Jlc291cmNlcw== 38900
+IHBsZWFkZWQ= 38901
+LnJhZGlvQnV0dG9u 38902
+IOaY 38903
+T3Bz 38904
+IE5lc3Q= 38905
+Y3N0cmluZw== 38906
+IERlZmVuY2U= 38907
+IHJlZmVyZQ== 38908
+X2xlYWY= 38909
+IHJldmVsYXRpb24= 38910
+66c= 38911
+LmV4ZWN1dGVVcGRhdGU= 38912
+X1dPUkxE 38913
+IGV4cGFucw== 38914
+KCJcIg== 38915
+amFi 38916
+IGRvdWJ0cw== 38917
+IEdlb21ldHJ5 38918
+IGludHJvZHVjZXM= 38919
+IHNlbmF0b3Jz 38920
+IGNhbmFs 38921
+LmhlbHBlcg== 38922
+IEJpb2xvZ3k= 38923
+X1NFTlM= 38924
+LnByZXZpb3Vz 38925
+LXRvdWNo 38926
+YWJpdA== 38927
+IGltcGFjdGVk 38928
+IGJyYWNrZXRz 38929
+LmRpcmVjdA== 38930
+YWNjdW0= 38931
+IHRlc3Rvc3Rlcm9uZQ== 38932
+CWFjdGlvbg== 38933
+IENoYW5jZQ== 38934
+IHBlYWtz 38935
+Q3BwQ29kZUdlbldyaXRlQmFycmllcg== 38936
+IHVuYmVsaWU= 38937
+X3ByZXNz 38938
+LlJlbA== 38939
+YW5nbGVk 38940
+L3RlbXBsYXRlcw== 38941
+LS0+DQo= 38942
+bGltZQ== 38943
+IHN1ZmZpY2llbnRseQ== 38944
+X250 38945
+RXhwYW5k 38946
+LmlzZmlsZQ== 38947
+IGlzRW1wdHk= 38948
+IHF0 38949
+IG11bGhlcg== 38950
+YWNvYg== 38951
+R2Vvcmdl 38952
+5bi4 38953
+IGFzc2lt 38954
+YXNv 38955
+IGNvbXByaXNlZA== 38956
+T1Y= 38957
+KENPTkZJRw== 38958
+CXdyaXRlcg== 38959
+IGRlc3A= 38960
+IHRlbnVyZQ== 38961
+KGNy 38962
+LnBvb2w= 38963
+IEJyZW5k 38964
+IGNlbnNvcg== 38965
+KHRpbWVvdXQ= 38966
+IHBsZWE= 38967
+LldyYXA= 38968
+IHRpZ2h0bHk= 38969
+IFdlcmU= 38970
+IElnbm9yZQ== 38971
+YWJlaQ== 38972
+IGJyaWRnZXM= 38973
+IGNvbmRlbW4= 38974
+IHNpbXBsaWNpdHk= 38975
+IHJvdXRpbmVseQ== 38976
+IGJsYWNrcw== 38977
+amI= 38978
+IFBpdA== 38979
+VXRm 38980
+IC8K 38981
+cmVsb2Fk 38982
+IHNldE9iamVjdA== 38983
+L2dsb2JhbA== 38984
+IGZhdHR5 38985
+IHNvY2tz 38986
+Q291bGRu 38987
+IGVyb3Rpc2s= 38988
+5p2h 38989
+IFByZXNzdXJl 38990
+IE1heg== 38991
+bnBvcw== 38992
+dG9sb3dlcg== 38993
+IEVR 38994
+dXRldXI= 38995
+IE1vbWVudA== 38996
+IGV0YQ== 38997
+e3stLQ== 38998
+IGdyYXBocw== 38999
+IEd1YXI= 39000
+cmluZQ== 39001
+KC0t 39002
+IEh0dHBTdGF0dXM= 39003
+KHN0dWRlbnQ= 39004
+Km5w 39005
+IHJhaWx3YXk= 39006
+IGFzeW5jaHJvbm91cw== 39007
+X3Zt 39008
+J10sJw== 39009
+LHRleHQ= 39010
+bWVyY2hhbnQ= 39011
+KEd1aWQ= 39012
+IEdyYQ== 39013
+aXhlcg== 39014
+ZmV0Y2hBbGw= 39015
+LmFkZExpc3RlbmVy 39016
+ZmxpcA== 39017
+KiQ= 39018
+PigpLA== 39019
+IHN1bmxpZ2h0 39020
+YXNzaWduZWQ= 39021
+IGFiYw== 39022
+IENPTFVNTg== 39023
+IPCfmYIKCg== 39024
+KS4uLg== 39025
+IGVuc2VtYmxl 39026
+IG5ld2xpbmU= 39027
+X1NJTkdMRQ== 39028
+aWVkYWQ= 39029
+IGRhcmtlcg== 39030
+b3JtYXA= 39031
+IGxpb24= 39032
+cGxpdHM= 39033
+IGlsbHVzdHJhdGlvbg== 39034
+IElFRUU= 39035
+IHZpc3Rh 39036
+b3VzYW5kcw== 39037
+KioqKioqKg== 39038
+IFRvbW15 39039
+IGh1ZQ== 39040
+U2Vs 39041
+IGF1cmE= 39042
+IFRoZXJhcHk= 39043
+IGFuaW1hdG9y 39044
+LmNvbnN0cmFpbnRz 39045
+IHZhZ3Vl 39046
+KCIiKQ== 39047
+IHZpbGxhaW4= 39048
+IGJsZXNzaW5n 39049
+IHN0cmluZ0J1aWxkZXI= 39050
+IE1pc2M= 39051
+IERJUg== 39052
+ZmF4 39053
+LW5vZGU= 39054
+IFdhbGtpbmc= 39055
+IEFV 39056
+c2Vzcw== 39057
+IGdyaWxs 39058
+VkVSVElTRQ== 39059
+IEZvb2Rz 39060
+IHRvdXJuYW1lbnRz 39061
+w5M= 39062
+IE1hcnNo 39063
+IHdvbmRlcnM= 39064
+TG9uZ2l0dWRl 39065
+LkNvbW1hbmRUZXh0 39066
+PWlucHV0 39067
+X2VuY29kZXI= 39068
+cGFnZVNpemU= 39069
+IGdldFN0YXRl 39070
+Pj4K 39071
+LmdyZXk= 39072
+cG9k 39073
+IHJlYWRpbmdz 39074
+IHJlY29uc2lkZXI= 39075
+U3RhcnR1cA== 39076
+IGV4Y2Vy 39077
+LmJhbGFuY2U= 39078
+X2N5Y2xl 39079
+X1RpbWU= 39080
+TE9DQUw= 39081
+IEVGSQ== 39082
+IFJleW4= 39083
+LnNldEZvcmVncm91bmQ= 39084
+Ynlu 39085
+IGRpc2Nvbm5lY3RlZA== 39086
+QUNUSVZF 39087
+IGVtYmVkZGluZw== 39088
+aWNrZXJz 39089
+IHN1cnJvdW5kaW5ncw== 39090
+KmM= 39091
+IGdhcmFudA== 39092
+IGJm 39093
+IHdpcGU= 39094
+IOS4iw== 39095
+X1RSQQ== 39096
+YWRveA== 39097
+55U= 39098
+IHN1Y2tz 39099
+IFNvbmdz 39100
+IEFzc29jaWF0ZXM= 39101
+IEJhbGQ= 39102
+IEJyZXR0 39103
+dmVuaWxl 39104
+IHZ0 39105
+IGluYWRl 39106
+IHJlc2lnbmVk 39107
+IEdsZW5u 39108
+LnBhdHRlcm4= 39109
+LkRhdGFCaW5k 39110
+0YPQvA== 39111
+TGF5b3V0SW5mbGF0ZXI= 39112
+Y2hldA== 39113
+IFRlc3RhbWVudA== 39114
+Lm1z 39115
+IHBhdg== 39116
+IFJlYWN0RE9N 39117
+dXJkeQ== 39118
+QURBVEE= 39119
+TXU= 39120
+L2FjdGlvbnM= 39121
+IEpz 39122
+X2V4dHJhY3Q= 39123
+IEJyaW5n 39124
+Omlk 39125
+c3RydA== 39126
+aXZhdGlvbg== 39127
+IG91dHJpZ2h0 39128
+YXp1 39129
+bG95bWVudA== 39130
+0LjRjw== 39131
+YWxkbw== 39132
+IFB1Ymxpc2hlcg== 39133
+RWR1Y2F0aW9u 39134
+UGFsZXR0ZQ== 39135
+X2Rydg== 39136
+ICgkKA== 39137
+IEFuZGE= 39138
+IHJlbWVkeQ== 39139
+IGluY29uc2lzdGVudA== 39140
+dGVjdGlvbg== 39141
+IHJlZ3VsYXRvcnM= 39142
+IHNob3J0ZXN0 39143
+KHBhaXI= 39144
+IEluc3RhbGxhdGlvbg== 39145
+IGRlZmVuZGFudHM= 39146
+ICgpOw== 39147
+LWxhcmdl 39148
+TWVs 39149
+IHRocmVhdGVu 39150
+0L3Rjw== 39151
+IGZldGlzaA== 39152
+b3RpbmU= 39153
+X2RpYw== 39154
+IDwk 39155
+IHN0YWdnZXI= 39156
+c3Bp 39157
+JHJlc3BvbnNl 39158
+U2Vydg== 39159
+LWJvcm4= 39160
+am9z 39161
+CWltZw== 39162
+CVdIRVJF 39163
+X2x0 39164
+5b2T 39165
+LmNvc3Q= 39166
+IFR1ZQ== 39167
+LmxhYmVscw== 39168
+IExW 39169
+d2Nzc3RvcmU= 39170
+IEplc3Nl 39171
+4Lir 39172
+VHJhZGU= 39173
+IHByZWRlY2Vzc29y 39174
+64I= 39175
+ZmluYWxseQ== 39176
+X2dlbmVyYWw= 39177
+b2dnbGVy 39178
+X1JFR0lPTg== 39179
+bmVtZW50 39180
+IGJsb2dnZXI= 39181
+IEhhcmJvcg== 39182
+IERhdGFzZXQ= 39183
+W3c= 39184
+IGF0dGVuZGVlcw== 39185
+Lmljbw== 39186
+bWF4aW11bQ== 39187
+LlVubG9jaw== 39188
+X1NZTkM= 39189
+w6FnaW5h 39190
+IGRvd25z 39191
+IFdpaQ== 39192
+XSkv 39193
+IGtpY2tpbmc= 39194
+dW5pY2F0aW9u 39195
+IERBQw== 39196
+IElEUw== 39197
+IFJlbnRhbA== 39198
+IGN1cnJlbnRUaW1l 39199
+IHZhY2NpbmVz 39200
+IERldmls 39201
+IG5vcnM= 39202
+X21vdXNl 39203
+dXJyZWN0aW9u 39204
+KG5v 39205
+ID4NCg== 39206
+IGFnZ3Jlc3Npb24= 39207
+IGJyZWVkaW5n 39208
+LnN5bWJvbA== 39209
+aW1hbg== 39210
+QWJzb2x1dGVQYXRo 39211
+IFdITw== 39212
+X2ZsdXNo 39213
+LXJvb3Q= 39214
+YXJuYQ== 39215
+Jk0= 39216
+IGZhdGhlcnM= 39217
+IFJvY2tldA== 39218
+aXZlYXU= 39219
+IHdhbmRlcg== 39220
+IGNvbXBvcw== 39221
+IFdhcnJpb3I= 39222
+IFNlYXQ= 39223
+IENsaW5pYw== 39224
+X2ludm9pY2U= 39225
+KGRpc3BhdGNo 39226
+UHJvZHVjdG8= 39227
+YXR1cmluZw== 39228
+b3NzaWVy 39229
+IE1BWQ== 39230
+IGRhZ2dlcg== 39231
+IHNhbml0aXplZA== 39232
+IFJGQw== 39233
+IHByb3Bo 39234
+IHVyaW5l 39235
+IGdyaW5k 39236
+IEV4cGFuZGVk 39237
+ZGVzY3JpcGNpb24= 39238
+LWZ3 39239
+IEtlcnJ5 39240
+PW5hbWU= 39241
+IGNoaw== 39242
+IG5hdGlvbmFsbHk= 39243
+IHRoZWU= 39244
+SW5j 39245
+ID8+Pg== 39246
+LlJhZGlvQnV0dG9u 39247
+Lkh0dHBTZXJ2bGV0UmVzcG9uc2U= 39248
+L1k= 39249
+CWZpZWxk 39250
+IGhvbW1l 39251
+eXBlcg== 39252
+UGh5c2ljYWw= 39253
+PXY= 39254
+IGRyaXY= 39255
+IEVycm9ycw== 39256
+IGPEgw== 39257
+RGVhdGg= 39258
+IFdJTkRPVw== 39259
+IHBvZXQ= 39260
+IFNoYXJw 39261
+IEltbXV0YWJsZQ== 39262
+CWNyZWF0ZQ== 39263
+IGdlaHQ= 39264
+IFJlZm9ybQ== 39265
+YWlzZXI= 39266
+IEluaXRpYWxpemF0aW9u 39267
+IGltbXVuaXR5 39268
+LmNvbXBvc2U= 39269
+IGxhdGVuY3k= 39270
+IExlYmFub24= 39271
+IFBhcmFk 39272
+IGZ1ZWxz 39273
+IEV4aGli 39274
+Y29o 39275
+JSI+Cg== 39276
+IENMSQ== 39277
+KWluaXRXaXRo 39278
+LVph 39279
+X0NMRUFS 39280
+cmVnbg== 39281
+IGZpbmFuY2Vz 39282
+LnN0YW5kYXJk 39283
+X0NBVEVHT1JZ 39284
+LmxpYnJhcnk= 39285
+IHRyYXZlbGVycw== 39286
+X3dw 39287
+IEV2YWx1YXRpb24= 39288
+c3RhcnRpbmc= 39289
+ICkpLAo= 39290
+ZXBpc29kZQ== 39291
+IFZhcmlhbnQ= 39292
+IGRhZW1vbg== 39293
+IEp1bGlh 39294
+IE5S 39295
+IGRvdWJsZXM= 39296
+PHY= 39297
+L3J1bnRpbWU= 39298
+IGludGVycHJldGVy 39299
+IElOREVY 39300
+IEhvbG1lcw== 39301
+X0RJTQ== 39302
+IHBhZGRsZQ== 39303
+X2V4YW1wbGU= 39304
+IGZvcmVncm91bmQ= 39305
+LnJvdXRlcw== 39306
+IHNvd2ll 39307
+U1VDQ0VTUw== 39308
+IENEQw== 39309
+IEJE 39310
+Xy0= 39311
+YXN1cmVk 39312
+V3JpdGluZw== 39313
+IGN1cnJlbnRQYWdl 39314
+KGFuc3dlcg== 39315
+IEFTQ0lJ 39316
+4Kg= 39317
+IHNvY2lhbGx5 39318
+eXl5 39319
+IFNwZWNpYWxpc3Q= 39320
+KGN1c3RvbWVy 39321
+aXN0YW5p 39322
+a2VzdA== 39323
+IE1haw== 39324
+IHRobw== 39325
+LnB0 39326
+KGNvbW1lbnQ= 39327
+IENvbnZlcnRlcg== 39328
+Z2Ft 39329
+Ymlucw== 39330
+LnRlbGU= 39331
+IFZldGVyYW5z 39332
+X0FMTE9D 39333
+0L7Qu9GM0LfQvtCy0LDRgg== 39334
+aW5uYW1vbg== 39335
+O3dpZHRo 39336
+b2hs 39337
+IGZhbnRhcw== 39338
+IHN1bmc= 39339
+CUs= 39340
+KEpzb24= 39341
+IG5laWdoYm91cmhvb2Q= 39342
+IHZvdw== 39343
+IHNpbnM= 39344
+b25hY2Np 39345
+IGVwb2Nocw== 39346
+aW1hZ2Vu 39347
+LkNoYW5nZQ== 39348
+Lm15YmF0aXM= 39349
+U2Vlaw== 39350
+V0VS 39351
+566h55CG 39352
+IGludGVyZXNz 39353
+X0V2ZW50 39354
+ZWRlcmxhbmQ= 39355
+IHRlcnJpdG9y 39356
+IGNpdWRhZA== 39357
+dWNrZWQ= 39358
+IHNuYWNr 39359
+IHRyYW5zcG9ydGVk 39360
+IE1hbmlmZXN0 39361
+IERBVA== 39362
+X3RoZXRh 39363
+IHdvbnQ= 39364
+LgoKCgoKCgoKCgo= 39365
+irbmgIE= 39366
+IEVwaWM= 39367
+RGVjaw== 39368
+bHRyYQ== 39369
+X1pFUk8= 39370
+IFtdOw== 39371
+L3NjcmlwdHM= 39372
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t 39373
+5oOF 39374
+IHdlZWQ= 39375
+TkJD 39376
+IHJhcGVk 39377
+IEdhdGV3YXk= 39378
+W00= 39379
+IFRpbWVvdXQ= 39380
+ZW5jaG1hcms= 39381
+LlZpZXdNb2RlbA== 39382
+IHBvcm5vcw== 39383
+IFlh 39384
+dGhyaXRpcw== 39385
+IEZseW5u 39386
+IG1lZ2E= 39387
+YWNpbg== 39388
+IHRyaWJhbA== 39389
+LmFwcGxl 39390
+IEJsbw== 39391
+w6Ju 39392
+aWJp 39393
+cm92 39394
+IExpdmVz 39395
+Xi4= 39396
+Z2V0UmVxdWVzdA== 39397
+IEVzdGFibGlzaA== 39398
+Y29udGFpbmVycw== 39399
+IHN0YXJyaW5n 39400
+IGNlbGVicml0aWVz 39401
+IFJlbGF0aXZl 39402
+IEhlaWdodHM= 39403
+IHRxZG0= 39404
+IE5vcnRod2VzdA== 39405
+aXZpYw== 39406
+CWNs 39407
+IGF1dG9tb3RpdmU= 39408
+ZW50cmlj 39409
+IGZvcnR1bmF0ZQ== 39410
+IGZpcmVwbGFjZQ== 39411
+c2V1ZA== 39412
+bmlja25hbWU= 39413
+O3M= 39414
+X0NBTA== 39415
+aGFsdA== 39416
+KG5z 39417
+X2RlbGV0ZWQ= 39418
+RGV2ZWxvcG1lbnQ= 39419
+bW92aWVz 39420
+IGlkZW50aXRpZXM= 39421
+IHByb21wdGx5 39422
+2KfZhg== 39423
+IGFudGU= 39424
+ICInLCc= 39425
+5Y+j 39426
+aW1wc2U= 39427
+IHlhcA== 39428
+VHlwZU5hbWU= 39429
+IGJpdGNo 39430
+IGFzc29jaWF0ZXM= 39431
+SEVNRQ== 39432
+LWVtcHR5 39433
+INiq 39434
+b2x2ZXJz 39435
+IHBpc3RvbA== 39436
+U2NvcGVk 39437
+YWduZXI= 39438
+J109PSc= 39439
+IElNUA== 39440
+ZXhj 39441
+IG9taXR0ZWQ= 39442
+IG1pbmRzZXQ= 39443
+IFtdKA== 39444
+IG9ybg== 39445
+X0NBTQ== 39446
+QXZn 39447
+TG9jYWxpemVkU3RyaW5n 39448
+IE5hdHVy 39449
+IGNvbXBvc2Vy 39450
+IFBsYXlpbmc= 39451
+IG92ZXJk 39452
+X3V0Zg== 39453
+LnNr 39454
+IEZvbA== 39455
+JHBhZ2U= 39456
+LE9iamVjdA== 39457
+IGJlZXM= 39458
+YWxhcnk= 39459
+YnVsbGV0 39460
+X2xpYnJhcnk= 39461
+T2ZmZXI= 39462
+bG9jYXRlZA== 39463
+IChfLA== 39464
+4oCcSGU= 39465
+IE93bmVycw== 39466
+KSkuCg== 39467
+IGJyaQ== 39468
+LkFkbWlu 39469
+a3Rpb24= 39470
+0LvRjtGH 39471
+IGVyb3RpY2k= 39472
+Q2FuY2VsbGVk 39473
+IGFncg== 39474
+cmV2aWV3cw== 39475
+X2RtYQ== 39476
+UklDVA== 39477
+IGdmeA== 39478
+bXBp 39479
+cHBv 39480
+IC8vQA== 39481
+IHVwcGVyY2FzZQ== 39482
+IGNvbW1pdHRpbmc= 39483
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 39484
+VXNlckRhdGE= 39485
+IHZhaQ== 39486
+CXNvcnQ= 39487
+IGNvbmdyYXQ= 39488
+IGRpb3hpZGU= 39489
+0LTQsA== 39490
+LmFyZWE= 39491
+IEpvc2h1YQ== 39492
+IEtvY2g= 39493
+X2JyZWFr 39494
+YXp1cmU= 39495
+aXN0aWNhbA== 39496
+X0FMUEhB 39497
+X3ZpZXdz 39498
+IGVsaW1pbmF0aW5n 39499
+T01C 39500
+ZW51bWVy 39501
+IEh5ZHJv 39502
+KCoo 39503
+RVJUSUNBTA== 39504
+IGluZXZpdGFibHk= 39505
+IHN0b2xl 39506
+LWVhc3Q= 39507
+aWVyb24= 39508
+IGxpbmdlcg== 39509
+L2RvYw== 39510
+xbo= 39511
+IEFscmVhZHk= 39512
+YXNpbw== 39513
+IC0tCg== 39514
+IGFiYnJldg== 39515
+IEF0b20= 39516
+aGlt 39517
+IElOU0VSVA== 39518
+c3Vu 39519
+4pmq 39520
+Q09OTkVDVA== 39521
+ZXJhdG9y 39522
+IE1hbm5pbmc= 39523
+IDoo 39524
+Z2Fz 39525
+PT4n 39526
+IHF1ZXJ5c2V0 39527
+O30NCg== 39528
+IFBvcHVsYXRpb24= 39529
+dXRlZFN0cmluZw== 39530
+cmVzaWRlbnQ= 39531
+X0ZPTlQ= 39532
+IFJlc3BvbmQ= 39533
+IG9ic2N1cmU= 39534
+IG9ic2VydmFibGU= 39535
+IENvbnRyaWJ1dG9ycw== 39536
+a29u 39537
+IE11c2s= 39538
+ZXhhbw== 39539
+IFR1Yg== 39540
+Qm9vdEFwcGxpY2F0aW9u 39541
+U09S 39542
+Lkhvcml6b250YWw= 39543
+LmZpbmRCeQ== 39544
+LnBvd2Vy 39545
+IHBvc2l0aXZlbHk= 39546
+dmVuaWVuY2U= 39547
+IEpvbmc= 39548
+IHdoaXN0bGU= 39549
+INC30L3QsNGH 39550
+IGxlbmRpbmc= 39551
+IGRlc3RydWN0aXZl 39552
+IG9uRGVsZXRl 39553
+YXV0aG9yaXphdGlvbg== 39554
+KCk7Pz4= 39555
+X29yaWdpbmFs 39556
+c2NpZW5jZQ== 39557
+YXRyYQ== 39558
+Pyw/LA== 39559
+IEFzYw== 39560
+IGNvbnZpbmNpbmc= 39561
+JGE= 39562
+b3JnZW4= 39563
+X0RhdGU= 39564
+IFByb3ZpZGU= 39565
+IGxvbmVseQ== 39566
+KScK 39567
+ZXhjaGFuZ2U= 39568
+Oz8+Cg== 39569
+LmZhc3Q= 39570
+U2FtcGxlcw== 39571
+TG9uZG9u 39572
+J10pDQo= 39573
+IElvbmlj 39574
+IHBlc3Nv 39575
+IEtuaWdodHM= 39576
+IFJhZg== 39577
+X2F0dHJz 39578
+IHJlcGVhbA== 39579
+Pk1haW4= 39580
+IE9yZGVyZWQ= 39581
+X05ldw== 39582
+PSIiPjwv 39583
+dXJscGF0dGVybnM= 39584
+QVRJT05BTA== 39585
+cGVlY2g= 39586
+IElkYWhv 39587
+IHByaW5jZXNz 39588
+IEN1c3RvbWVycw== 39589
+YXdheXM= 39590
+YWRi 39591
+IEJyeWFudA== 39592
+bm9uY2U= 39593
+IGFkdWw= 39594
+IGBgKA== 39595
+IGFmdGVybWF0aA== 39596
+PWRpY3Q= 39597
+dGV4dEJveA== 39598
+IHNwZXJt 39599
+IGNvdWdo 39600
+SG9y 39601
+4oCZUw== 39602
+LkNvbXBvbmVudFJlc291cmNlTWFuYWdlcg== 39603
+IHJlZ3VsYXRvcg== 39604
+IHBhcnRuZXJzaGlwcw== 39605
+L3Byb2plY3Rz 39606
+dHJ5cw== 39607
+IExhc2Vy 39608
+4p+p 39609
+IEZ1bms= 39610
+IHVuY29uc2Npb3Vz 39611
+IGNydXN0 39612
+IFRlYW1z 39613
+IEJhbm5lcg== 39614
+IEhvbmV5 39615
+bGVtcw== 39616
+IG1heFdpZHRo 39617
+UG9pbnRlckV4Y2VwdGlvbg== 39618
+ZmFkZU91dA== 39619
+LVN0 39620
+IHN0cmFuZ2Vycw== 39621
+X0dP 39622
+V3JpdGFibGU= 39623
+X0luZm8= 39624
+Lk5vbk51bGw= 39625
+YW5ub3RhdGlvbnM= 39626
+IEdE 39627
+IGVuZG9yc2Vk 39628
+CVRva2VuTmFtZQ== 39629
+IERlcGVuZGluZw== 39630
+WU5BTQ== 39631
+IE1ldGVvcg== 39632
+IEluY3JlYXNl 39633
+Lk1hbnk= 39634
+PT0o 39635
+LlVVSUQ= 39636
+X0tFUk5FTA== 39637
+IHZpZMOp 39638
+IHBx 39639
+IFF0R3Vp 39640
+IFZhcmlvdXM= 39641
+IGpvaG4= 39642
+X3BhdGNo 39643
+IHRvdXRlcw== 39644
+IEZhaWw= 39645
+IHN1cnZpdmluZw== 39646
+KCIkew== 39647
+ICAgICAgIA0K 39648
+IGltYWdlVXJs 39649
+LndvcmRwcmVzcw== 39650
+c291cmNlcw== 39651
+CWdsVmVydGV4 39652
+4oCZYQ== 39653
+IGVzY29s 39654
+UkFSWQ== 39655
+IFNuYWtl 39656
+IHF1aW50 39657
+IGxhc3Rz 39658
+IEhhcm1vbg== 39659
+IGNvaWw= 39660
+IGV4cGxvaXRhdGlvbg== 39661
+bGVlbg== 39662
+Jz4iOwo= 39663
+IFNFUlZFUg== 39664
+IEhFQURFUg== 39665
+X3ZlbG9jaXR5 39666
+IEludm9rZQ== 39667
+LnRpbWVzdGFtcHM= 39668
+IHN1bGY= 39669
+SVFVRQ== 39670
+IGluaGFiaXRhbnRz 39671
+cGhpbnM= 39672
+YXp6bw== 39673
+IG1vbm8= 39674
+TGVnZW5k 39675
+IG5vbmNl 39676
+SUZF 39677
+OyI7Cg== 39678
+LWNyZWF0ZQ== 39679
+IiIsCg== 39680
+cGVybWl0 39681
+IEltbWlncmF0aW9u 39682
+IHBhdGhuYW1l 39683
+ZmZlY3RpdmU= 39684
+4pmA4pmA 39685
+IGV4YW1z 39686
+LWV2ZW50 39687
+IFRpbGw= 39688
+W21pZA== 39689
+RklY 39690
+O2NvbG9y 39691
+KE9yZGVy 39692
+X3RyYWl0cw== 39693
+IG9yZGVyQnk= 39694
+IHN1bnQ= 39695
+IE5pY2hvbGFz 39696
+2LI= 39697
+IHN1bm55 39698
+aW5lcnM= 39699
+IGFjY2Vzc2liaWxpdHk= 39700
+IEhC 39701
+LmNvbXA= 39702
+CW9w 39703
+IG1pbm9yaXRpZXM= 39704
+ZXRoZXVz 39705
+IGNvbGxhYm9yYXRpdmU= 39706
+cHJpdA== 39707
+SElS 39708
+IHdyYXBz 39709
+CWRyYXc= 39710
+Z29k 39711
+IElY 39712
+LmFwcHM= 39713
+IE5N 39714
+IGlycmVsZXZhbnQ= 39715
+IFRpZ2Vycw== 39716
+IGRpYWc= 39717
+R1Y= 39718
+IEFjY2Vzc29yaWVz 39719
+a29udA== 39720
+IHNpbXBsaWZ5 39721
+IEZhdm9yaXRl 39722
+X3Rvb2xz 39723
+KFtdKTsK 39724
+IHRvd2Vycw== 39725
+QmVz 39726
+IGh1bnRlcg== 39727
+IHNhbG9u 39728
+KGJ1ZmY= 39729
+CWRlYnVn 39730
+IG1hbHdhcmU= 39731
+TW92aW5n 39732
+LW9wdGlvbnM= 39733
+KSsn 39734
+IExPVkU= 39735
+X1NPQ0tFVA== 39736
+X2Zpbg== 39737
+IERlbGF3YXJl 39738
+IHNoZXJpZmY= 39739
+LWludmFsaWQ= 39740
+IEZVTEw= 39741
+INC/0L7QtA== 39742
+ZWxhcw== 39743
+InN0cmluZ3M= 39744
+IFJlcHJlc2VudGF0aXZlcw== 39745
+c3VyZmFjZQ== 39746
+cmVzb2x2ZWQ= 39747
+aHRkb2Nz 39748
+KSk6DQo= 39749
+IHByZXNzdXJlcw== 39750
+IG5vcm1z 39751
+IHBsYQ== 39752
+IHN1cm5hbWU= 39753
+IHBvc3RhbA== 39754
+IERlcGFydA== 39755
+IHNsYXVnaHRlcg== 39756
+b3JpZGE= 39757
+IGhlYmJlbg== 39758
+IGRlc2Fy 39759
+Y29tcGFjdA== 39760
+X0xBTkc= 39761
+5ZCI 39762
+b3BvbHk= 39763
+X3JhZA== 39764
+IFNURE1FVEhPRA== 39765
+TGF6eQ== 39766
+ICAgCQ== 39767
+Li4uLA== 39768
+KHdlYg== 39769
+IFBvbnQ= 39770
+IGV0d2Fz 39771
+IHVwd2FyZA== 39772
+X2hhdA== 39773
+IF0sCgo= 39774
+IGJhc2VVcmw= 39775
+IHdvcnJ5aW5n 39776
+LWFkZG9u 39777
+KGdldENsYXNz 39778
+U1BJ 39779
+IGNhcHR1cmluZw== 39780
+KX0sCg== 39781
+RWZmZWN0cw== 39782
+IGNvbXBldGVudA== 39783
+IGZvdWw= 39784
+IHN1YnNjcmliaW5n 39785
+IE9CSkVDVA== 39786
+SVhFTA== 39787
+YnVja3M= 39788
+KGVkZ2U= 39789
+KHBhc3M= 39790
+IFBldGVyc29u 39791
+IGJvb2Jz 39792
+IERlbGF5 39793
+X3NxdWFyZQ== 39794
+ZWxpbQ== 39795
+b3RlcnM= 39796
+X1BD 39797
+JUU= 39798
+b25jbGljaw== 39799
+IFNWRw== 39800
+IHRvcHBlZA== 39801
+IGZpc3Q= 39802
+c21hcnQ= 39803
+IFJhbHBo 39804
+KG93bmVy 39805
+am91cnM= 39806
+IGJyb256ZQ== 39807
+IEFyZ3VtZW50RXhjZXB0aW9u 39808
+KG9yaWdpbmFs 39809
+X1NDQUxF 39810
+X2Nw 39811
+IHJlY29tbWVuZHM= 39812
+LnNldFN0eWxl 39813
+U3VyZQ== 39814
+TEFORA== 39815
+IHJlcGVhdGluZw== 39816
+TWF0dA== 39817
+LlZpc2liaWxpdHk= 39818
+IGVudGVycHJpc2Vz 39819
+LlNldHVw 39820
+KHNjZW5l 39821
+IFJlYWN0aXZl 39822
+dXJnZQ== 39823
+Ync= 39824
+LlB1dA== 39825
+cGVyc2lzdA== 39826
+LmNvb2tpZQ== 39827
+IEF1ZGk= 39828
+YHM= 39829
+c3VwcGxpZXI= 39830
+KEZvcm0= 39831
+wqE= 39832
+X3Nv 39833
+jIA= 39834
+IExlZ2lvbg== 39835
+dHRl 39836
+TmQ= 39837
+TG9zcw== 39838
+KGF0dHJz 39839
+LnNjYXR0ZXI= 39840
+IGdyb29t 39841
+IGdsaW1wc2U= 39842
+IG5haWxz 39843
+IGN1bXVsYXRpdmU= 39844
+IGZhemVy 39845
+X3NlcnZpY2Vz 39846
+Lk51bQ== 39847
+aWJpbGl0 39848
+X3Jlc29sdXRpb24= 39849
+IFR4 39850
+dW1pbml1bQ== 39851
+b3Bh 39852
+LnNjaGVkdWxl 39853
+c210cA== 39854
+4LiV 39855
+dXJyeQ== 39856
+w7xr 39857
+Z29vZw== 39858
+X3NpZ25hdHVyZQ== 39859
+LmludG8= 39860
+IFN0ZXBz 39861
+IGhvbWVvd25lcnM= 39862
+IE5TVVJM 39863
+IFBBQw== 39864
+ICAgICAgICAgICAgCgo= 39865
+PicpCg== 39866
+ZW5o 39867
+IGluY2Fw 39868
+JE1FU1M= 39869
+IG1vaW5z 39870
+IEZp 39871
+IG9mZnNlYXNvbg== 39872
+cHJlc3Npb25z 39873
+Pi48Lw== 39874
+IE1hcmtlcg== 39875
+IG9uQ2xvc2U= 39876
+TEVWRUw= 39877
+IGludGVyZmVyZQ== 39878
+IENvbGlu 39879
+IFJlc2lzdGFuY2U= 39880
+RGlzY291bnQ= 39881
+IFdlYkVsZW1lbnQ= 39882
+IGJhdGhyb29tcw== 39883
+bGVnYWN5 39884
+IENhcHR1cmU= 39885
+IGFyaXNpbmc= 39886
+ICIpOwoK 39887
+0YjQuNCx 39888
+IEluZmluaXR5 39889
+QWR2ZXJ0aXNlbWVudHM= 39890
+IENvbWluZw== 39891
+IFBST0pFQ1Q= 39892
+X1BST1RPQ09M 39893
+IHVzZURpc3BhdGNo 39894
+LmNoYW5uZWxz 39895
+IENpdGl6ZW5z 39896
+ZW50cmU= 39897
+X21w 39898
+LkNvbnN0YW50cw== 39899
+IFNlcmlhbGl6ZQ== 39900
+X0lOQw== 39901
+KGx1YQ== 39902
+IGNsYXNo 39903
+X3dpdGhvdXQ= 39904
+LmtleVNldA== 39905
+IHJlY2VpdmVycw== 39906
+5pa55rOV 39907
+KG1lbQ== 39908
+IEhvcml6b250YWw= 39909
+IGNvY2t0YWls 39910
+IGNob29zZXM= 39911
+LklubmVy 39912
+IHJlbGllZA== 39913
+b3VudGVy 39914
+ICJe 39915
+IHRlbmFudHM= 39916
+ImA= 39917
+X1BN 39918
+ZXJzZWQ= 39919
+IH19Ij48Lw== 39920
+IHByb3ZpbmNlcw== 39921
+X1JBVw== 39922
+XEFwcA== 39923
+IHByb3N0aXR1ZXI= 39924
+X2dhaW4= 39925
+LnRlbmNlbnQ= 39926
+ZmZlY3Rz 39927
+KHBr 39928
+c2t1 39929
+IHVzYWJsZQ== 39930
+RVJWRUQ= 39931
+IGFudGVubmE= 39932
+aGVh 39933
+cGxpc3Q= 39934
+X1BMVUdJTg== 39935
+0YHQuw== 39936
+Lmxvb2t1cA== 39937
+4buB 39938
+IGVubGFyZw== 39939
+IHBpc3M= 39940
+SGFt 39941
+aW1hcA== 39942
+IGludmFsaWRhdGU= 39943
+IHNpbGs= 39944
+PSIjIj4K 39945
+IEdyYXNz 39946
+IEdvYWw= 39947
+X3BkZg== 39948
+SGFuZGxlcnM= 39949
+IHN0YWNrcw== 39950
+LmdldEZ1bGxZZWFy 39951
+PVtdOwo= 39952
+6L2m 39953
+LFY= 39954
+KHNwbGl0 39955
+0YPQvdC6 39956
+IGJha2VjYQ== 39957
+IH4vLg== 39958
+cGV6 39959
+dGFpbHM= 39960
+IEdsZW4= 39961
+IHNldEltYWdl 39962
+IENvbWlj 39963
+QkxPQ0s= 39964
+CVRoaXM= 39965
+b2FkZXI= 39966
+IGNhcGl0YWxpc3Q= 39967
+X1NURVA= 39968
+KEJvb2xlYW4= 39969
+IENvcnJlY3Q= 39970
+cmluYQ== 39971
+IGNvbmNhdGVu 39972
+5a6e 39973
+KCk6Cgo= 39974
+IHVuYW5pbQ== 39975
+bGxp 39976
+YWxhcnM= 39977
+LW5l 39978
+IGRpdm9y 39979
+IEtpY2tzdGFydGVy 39980
+XS5f 39981
+PG51bWJlcg== 39982
+L21lbnU= 39983
+R1JBUEg= 39984
+dmlzaXRvcg== 39985
+IGltcHJvcGVy 39986
+X05FWFQ= 39987
+IGJpc2E= 39988
+YmFja2dyb3VuZENvbG9y 39989
+L2lucHV0 39990
+IG1vaQ== 39991
+R29hbA== 39992
+bGlxdQ== 39993
+IG1pc2NvbmR1Y3Q= 39994
+IGNvbXByaXNlcw== 39995
+YXducw== 39996
+IFBpZQ== 39997
+cmFpcw== 39998
+cm9sZXVt 39999
+IGN1cnNl 40000
+eXU= 40001
+X3BvbGw= 40002
+LmN1cnJlbnRVc2Vy 40003
+RVNI 40004
+XSlb 40005
+IHN0b3J5dA== 40006
+KT87Cg== 40007
+Kj0= 40008
+IEJ1cmc= 40009
+L2xheW91dA== 40010
+X2JhY2tlbmQ= 40011
+Oz8+PC8= 40012
+IFdoYXRzQXBw 40013
+IE1vdW50YWlucw== 40014
+dmlzaW9ucw== 40015
+Zmx1ZW5jZQ== 40016
+LmNyZWF0ZUNvbXBvbmVudA== 40017
+IFBzeQ== 40018
+Zm9yZ2V0 40019
+c3J2 40020
+X0NPTVBPTkVOVA== 40021
+IE5leHVz 40022
+ICl7 40023
+ZW5kaQ== 40024
+SU1VTQ== 40025
+IEdG 40026
+57uE 40027
+4oCUdGhhdA== 40028
+Yms= 40029
+TW96aWxsYQ== 40030
+IGRlZmVuZGVycw== 40031
+LXNldHRpbmdz 40032
+aW1taW5n 40033
+IE9QVA== 40034
+IENX 40035
+IHRoYXRz 40036
+IE9wZW5pbmc= 40037
+UmVsZWFzZWQ= 40038
+bnBt 40039
+IGhycw== 40040
+IGdyb3VwZWQ= 40041
+LyIuJA== 40042
+IEhpc3RvcmljYWw= 40043
+KCQiew== 40044
+b3ZpYw== 40045
+KHNpZ24= 40046
+IFBob3RvZ3JhcGh5 40047
+IHNpZ251cA== 40048
+X0FSQ0g= 40049
+LnRlc3RuZw== 40050
+L2FuZ3VsYXI= 40051
+UmVzdENvbnRyb2xsZXI= 40052
+c2hpdA== 40053
+dWxsZQ== 40054
+LnBhdXNl 40055
+KFtdLA== 40056
+KHF1ZXN0aW9u 40057
+aWxvZ3k= 40058
+IEV1Zw== 40059
+LWxvY2Fs 40060
+IGt2aW4= 40061
+IHJlc2VydmF0aW9ucw== 40062
+b2JpYQ== 40063
+IHN1YnNpZGlhcnk= 40064
+IGFjY3VtdWxhdGVk 40065
+IFFWYXJpYW50 40066
+IEJKUA== 40067
+IE5vcm1hbg== 40068
+IEludGVncmF0aW9u 40069
+LlZhcmlhYmxl 40070
+KFJlc291cmNl 40071
+KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg== 40072
+RXhwb3Nl 40073
+ICd9 40074
+LkNPTE9S 40075
+INGH0LjRgQ== 40076
+QWpheA== 40077
+IHRocnU= 40078
+TW92aWVz 40079
+IHByb3Bvc2l0aW9u 40080
+L3RoZW1l 40081
+TW9kZWxQcm9wZXJ0eQ== 40082
+IEF3cw== 40083
+IEFuZHJlYQ== 40084
+IE1lcmdl 40085
+LmZpbmlzaA== 40086
+KHJlcXVpcmVk 40087
+IFByZWw= 40088
+ZWxlZA== 40089
+5pON5L2c 40090
+LlRSQQ== 40091
+TUFT 40092
+IHJlYWxpc2Vk 40093
+cm9pZHM= 40094
+CWZu 40095
+cmg= 40096
+LiI8Lw== 40097
+dmlkaWE= 40098
+IGRlcHVpcw== 40099
+IEJW 40100
+TG4= 40101
+IGx1c3Q= 40102
+QXNj 40103
+CQkJCQkJCSA= 40104
+aXNsZQ== 40105
+LWNhcmU= 40106
+X0lOVg== 40107
+IERyZXc= 40108
+IHdoYXRz 40109
+IENhcGFjaXR5 40110
+UGFybQ== 40111
+X21vbml0b3I= 40112
+LnN0dWRlbnQ= 40113
+IFJOQQ== 40114
+LmVuZHN3aXRo 40115
+Ymlo 40116
+IE1MQg== 40117
+L3Byb2plY3Q= 40118
+IHJlc3Rpbmc= 40119
+c2VwYXJhdG9y 40120
+eWQ= 40121
+ZXJ0aWE= 40122
+IG1vbml0b3JlZA== 40123
+Ij4qPC8= 40124
+LkZD 40125
+IE5FV1M= 40126
+IENhbGxz 40127
+IGFkZXF1 40128
+Q2hlY2tpbmc= 40129
+ZXN0aW1hdGU= 40130
+IHJlY2FsbHM= 40131
+X2ZyZXF1ZW5jeQ== 40132
+IHVzZVJlZg== 40133
+IEdyb3Zl 40134
+IFhpYQ== 40135
+IMOt 40136
+ZXNzZW5nZXI= 40137
+LWNvc3Q= 40138
+LmZj 40139
+IEt1bWFy 40140
+LkZvY3Vz 40141
+ZWxsYW5lb3Vz 40142
+LkFsZXJ0 40143
+ZWF4 40144
+IG9yY2g= 40145
+LnBt 40146
+IGxhbmRsb3Jk 40147
+KHBvcA== 40148
+X2FjdHVhbA== 40149
+IExC 40150
+R3JhbmQ= 40151
+LnJlbmRlcmVy 40152
+IGxvYg== 40153
+Y3VzdG9tZXJz 40154
+IGNhcHR1cmVz 40155
+V0lORE9X 40156
+IGRvY2g= 40157
+IGFwb2xvZ3k= 40158
+IEphbWE= 40159
+QFs= 40160
+LnRha2U= 40161
+bm9vcA== 40162
+IGx1bQ== 40163
+IGRpZmZlcmVudGlhbA== 40164
+IGVmZmljYWN5 40165
+CUlO 40166
+X0JPWA== 40167
+X3Nk 40168
+X3J0 40169
+Y29kZXI= 40170
+b3VuY2VtZW50 40171
+aGFzQ2xhc3M= 40172
+IHJpc2t5 40173
+IEVzdGFkbw== 40174
+LURE 40175
+IENhcnNvbg== 40176
+U3VmZml4 40177
+IHRvZGE= 40178
+IFRyYWNrZXI= 40179
+IERlbGVnYXRl 40180
+YCxg 40181
+IFBhcmtpbmc= 40182
+IG5lcg== 40183
+YXpv 40184
+IEZpbGVJbnB1dFN0cmVhbQ== 40185
+IHJlY291bnQ= 40186
+cWk= 40187
+Y2tlbg== 40188
+IHNvY2lhbGlzdA== 40189
+IEludm9pY2U= 40190
+INC/0YDQvg== 40191
+JSIs 40192
+ZW5uZW4= 40193
+IHZpdm8= 40194
+IG9yZ2FuaXphdGlvbmFs 40195
+IHVuY29tbW9u 40196
+dXRhcg== 40197
+IGh1bGw= 40198
+VHVlc2RheQ== 40199
+IGFzc2Vzc21lbnRz 40200
+KGFwcGxpY2F0aW9u 40201
+IHByZW1pc2U= 40202
+U3RhcnRUaW1l 40203
+IGRr 40204
+IGludGVyZmVy 40205
+IFF1ZWVuc2xhbmQ= 40206
+IGNyZWRlbnRpYWw= 40207
+IGxlaXN1cmU= 40208
+WVo= 40209
+IENtZA== 40210
+QlVT 40211
+dXNhbg== 40212
+CXZlYw== 40213
+aW9sb2dpY2Fs 40214
+IExvdHM= 40215
+IGVubGlnaHQ= 40216
+IGZyZXNobWFu 40217
+IENPTU1BTkQ= 40218
+IEFjdGlvbkxpc3RlbmVy 40219
+dXRt 40220
+YXJpdXM= 40221
+VHdpZw== 40222
+IHN3ZXB0 40223
+LXRvb2w= 40224
+xJA= 40225
+Y2hhcHRlcg== 40226
+LWdyYWRl 40227
+IGN1cmlvc2l0eQ== 40228
+IHN1c3RhaW5hYmlsaXR5 40229
+IE1pbmVjcmFmdA== 40230
+d2VuZA== 40231
+SWZFeGlzdHM= 40232
+IEN1bHR1cmFs 40233
+IFNhY3JhbWVudG8= 40234
+TGF5ZXJz 40235
+U3Vic2NyaWJlcg== 40236
+LkdyYXBo 40237
+IGxt 40238
+ZXN0eQ== 40239
+YWR2ZXJ0 40240
+JHA= 40241
+IEhvY2tleQ== 40242
+IERFVA== 40243
+c2V0VGl0bGU= 40244
+eWFuZw== 40245
+IGJhYmU= 40246
+ZWxzaXVz 40247
+VHJhdmVs 40248
+IG1lc21v 40249
+KG1hcFN0YXRlVG9Qcm9wcw== 40250
+X1NFTA== 40251
+LXBvcA== 40252
+IGVtaXNzaW9u 40253
+4oCZLgoK 40254
+LnN3aXRjaA== 40255
+b3Rpb25z 40256
+LnBob3Rv 40257
+TFY= 40258
+YW1vZGVs 40259
+IHdvcmR0 40260
+SUdHRVI= 40261
+IFRPREFZ 40262
+T0xT 40263
+X0lERU5U 40264
+IGNvbW1lbnRpbmc= 40265
+RGF0b3M= 40266
+IGhpbGFyaW91cw== 40267
+KGFueQ== 40268
+IGRhbXA= 40269
+LWNvbnRyb2xsZWQ= 40270
+ICI8Pw== 40271
+X2JsYWNr 40272
+TmV0QmFy 40273
+LnNldFNlbGVjdGVk 40274
+Q3Nz 40275
+IHF1YXJ0 40276
+IG93bmluZw== 40277
+IEZJRUxE 40278
+LnJlbHU= 40279
+IGxpcw== 40280
+7Jqw 40281
+LlJFTEFURUQ= 40282
+IGxvaw== 40283
+IEZsaXA= 40284
+IHByZXN0aWdpb3Vz 40285
+IGRn 40286
+IElucHV0U3RyZWFtUmVhZGVy 40287
+IHVzdQ== 40288
+IGdpcg== 40289
+IGFuYQ== 40290
+X3B5 40291
+dW5uZWw= 40292
+CXN5c3RlbQ== 40293
+IGNvYXRpbmc= 40294
+IEdlbnJl 40295
+ZXJybw== 40296
+IENMSUVOVA== 40297
+IHN0cmV0Y2hlZA== 40298
+Lkhhc1ZhbHVl 40299
+Ozs7Ozs7Ozs= 40300
+54mI 40301
+IGZpbmFscw== 40302
+LmdldENoaWxkcmVu 40303
+IC0tfX0K 40304
+IENvd2JveXM= 40305
+IEVkaW5idXJnaA== 40306
+IFBsYXph 40307
+YWJlbg== 40308
+QXJ0aXN0 40309
+VVJB 40310
+IEh1Z2hlcw== 40311
+b2JiaWVz 40312
+X25vaXNl 40313
+Lk9iamVjdHM= 40314
+RXhwcmVzc2lvbnM= 40315
+IGFudGhyb3A= 40316
+JykpDQo= 40317
+KS4i 40318
+Y3JpcHRpdmU= 40319
+IHNhbG1vbg== 40320
+IHdhc3Q= 40321
+cmhv 40322
+LnRpY2s= 40323
+IGV4cGxvcmVz 40324
+IEFsZ29yaXRobQ== 40325
+Q2hhckFycmF5 40326
+4LiE 40327
+X1BBQ0tFVA== 40328
+SkU= 40329
+Il1dOwo= 40330
+Lm5vdGU= 40331
+QmFja2luZw== 40332
+IEhvbGRlcg== 40333
+cmVpY2g= 40334
+IFppb24= 40335
+L2dy 40336
+ICAgICAgICAgICAgICAgICAgIAo= 40337
+TW90aW9u 40338
+IFRyaWJ1bmU= 40339
+IGNyaXRpY2FsbHk= 40340
+IENSTQ== 40341
+IGJsb3dpbmc= 40342
+IGNvbW1pc3Npb25lcg== 40343
+Sm9l 40344
+IFRlbGV2aXNpb24= 40345
+CXByZQ== 40346
+IFRSQU4= 40347
+IFZpa2luZ3M= 40348
+IEJFVA== 40349
+d291bGQ= 40350
+LkNhcHRpb24= 40351
+IGJhY29u 40352
+aG1h 40353
+bWVyZ2Vk 40354
+IHN1YnNjcmlwdGlvbnM= 40355
+b2NjdXBpZWQ= 40356
+TGl2ZURhdGE= 40357
+IGFsbG93YW5jZQ== 40358
+cmlnZXNpbWFs 40359
+ZGRk 40360
+LmxvZ291dA== 40361
+IFRhbmc= 40362
+IHdhcm10aA== 40363
+TW9kZWxJbmRleA== 40364
+IFByYQ== 40365
+IHNjZW50 40366
+IGhhY2tlcnM= 40367
+IGlsbHVzdHJhdGU= 40368
+SWNo 40369
+IGRpYXM= 40370
+Q0FTRQ== 40371
+IFNjaQ== 40372
+JHVybA== 40373
+IE1PRFVMRQ== 40374
+dXNob3J0 40375
+bGllcnM= 40376
+IERldmljZXM= 40377
+bWluc3Rlcg== 40378
+dW5hbWU= 40379
+IHVucg== 40380
+RXhhbXBsZXM= 40381
+IHJpc2Vu 40382
+LmFp 40383
+Y2hyb20= 40384
+X3dvcmtlcg== 40385
+IGFsaWFzZXM= 40386
+TW91c2VFdmVudA== 40387
+IHNldHRlcg== 40388
+IFB1cnBsZQ== 40389
+Sm9pbkNvbHVtbg== 40390
+PWU= 40391
+VEhPT0s= 40392
+IFRvdw== 40393
+IENydXNoaW5n 40394
+IEplZGk= 40395
+IEdyaWZmaW4= 40396
+IGtvcw== 40397
+X0ZT 40398
+aW5nZXM= 40399
+c29sZXM= 40400
+KG5hbWVz 40401
+IEJpZA== 40402
+LXBvd2VyZWQ= 40403
+TXVsdA== 40404
+YW1pbGlhcg== 40405
+LmNsZWFuZWQ= 40406
+IFppbW1lcg== 40407
+CWNsZWFy 40408
+IHVuc3VwcG9ydGVk 40409
+Q2FsbGFibGU= 40410
+IHJlcHM= 40411
+YWx0ZXJu 40412
+X1JFUE9SVA== 40413
+LmdldENvbHVtbkluZGV4 40414
+X1NUT1JF 40415
+IHN1Y2h0 40416
+c3VidGl0bGU= 40417
+IHBlcmQ= 40418
+q5g= 40419
+Lk5PVA== 40420
+fT48Lw== 40421
+OmQ= 40422
+bWRp 40423
+YmluZFZhbHVl 40424
+IERlY2lzaW9u 40425
+UmV0dXJuVmFsdWU= 40426
+LGluZGV4 40427
+eGZj 40428
+IHNlcnVt 40429
+Z2V0RmllbGQ= 40430
+Q29ubmVjdGlvblN0cmluZw== 40431
+LW9iamVjdA== 40432
+LnJlY3Y= 40433
+IHVuZGVyZ3JhZHVhdGU= 40434
+LkluZnJhc3RydWN0dXJl 40435
+IEthYg== 40436
+IGFkdmlzb3J5 40437
+LXRyZWU= 40438
+IG11ZQ== 40439
+aW5mb3Jt 40440
+LmVtYmVk 40441
+IGVycm9yQ29kZQ== 40442
+bWljcm8= 40443
+IHNwYXJrZWQ= 40444
+IGltYWdlcnk= 40445
+Y29uYw== 40446
+X21pc3Npbmc= 40447
+IHN1cnBsdXM= 40448
+S1M= 40449
+CVJUSE9PSw== 40450
+VGVsbA== 40451
+cml1bQ== 40452
+IFJhZGl1cw== 40453
+cmlrYQ== 40454
+bG9zaW9u 40455
+IEhlcm4= 40456
+R2FtbWE= 40457
+IEZlZQ== 40458
+IE5hbWVk 40459
+IENhbnlvbg== 40460
+IEpTT05BcnJheQ== 40461
+IHp3ZWk= 40462
+IFNTSA== 40463
+IHNlcnZhbnQ= 40464
+Y29hbA== 40465
+IGRlbnlpbmc= 40466
+IHNwbGl0cw== 40467
+SW5jb3JyZWN0 40468
+IHRveA== 40469
+IEFuYWx5c3Q= 40470
+IGFjY3JlZA== 40471
+dWJsZQ== 40472
+IHd0 40473
+IFRyaWFs 40474
+LmV4dGVuc2lvbg== 40475
+IENhcmVlcg== 40476
+IHNlY3VyaW5n 40477
+IExpbA== 40478
+IHByb2plY3Rpb25z 40479
+IHllYXN0 40480
+TWFkZQ== 40481
+IGZvdW5kYXRpb25z 40482
+YWNpZmlj 40483
+LnZvbHVtZQ== 40484
+IG1pcnJvcnM= 40485
+IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyM= 40486
+IHZpb2xhdGU= 40487
+YXJzZXJz 40488
+IHNvY2lv 40489
+IHRraW50ZXI= 40490
+IExJTks= 40491
+LmdldFNpemU= 40492
+IFdob2xl 40493
+KXZpZXdEaWRMb2Fk 40494
+CWRvbmU= 40495
+dWRlYXU= 40496
+XCI+PC8= 40497
+QW5kcmV3 40498
+ZXJi 40499
+IGbDtg== 40500
+LmNsdXN0ZXI= 40501
+IGRpc2NvdXJzZQ== 40502
+X0RFRklO 40503
+IHB1ZWRlbg== 40504
+IExPVw== 40505
+LmF2 40506
+IHByZWNh 40507
+IHF1bw== 40508
+IHZlbG9j 40509
+LCcn 40510
+IHh5eg== 40511
+CXBhZGRpbmc= 40512
+IHRvbWF0b2Vz 40513
+IEJlbnQ= 40514
+X2N1cnI= 40515
+TlNEYXRl 40516
+IGdldEN1cnJlbnQ= 40517
+IFtg 40518
+V2VkbmVzZGF5 40519
+LkJhcg== 40520
+IFZvdXM= 40521
+aW56 40522
+IFF1aW5u 40523
+ZXhjZWw= 40524
+ZG9z 40525
+IG91dGRhdGVk 40526
+T1VUSA== 40527
+IE1ha2Vy 40528
+ZXBlbmRlbmN5 40529
+IGR1bGw= 40530
+IFdpbm4= 40531
+b2dl 40532
+Y2xhdmU= 40533
+IG5vdmE= 40534
+IGF2YWw= 40535
+Q2FwdA== 40536
+IFNwb3RpZnk= 40537
+IGp1bA== 40538
+KXRhYmxlVmlldw== 40539
+IGZpbGVuYW1lcw== 40540
+IGVza29ydA== 40541
+5ZGo 40542
+IHNrZXc= 40543
+dGVyaW9y 40544
+IGZpbmFuYw== 40545
+IHRhYmxh 40546
+IFVJQg== 40547
+ICgpOg== 40548
+IERvY2tlcg== 40549
+cGVyY2VudGFnZQ== 40550
+TWVldA== 40551
+aWNoaQ== 40552
+IGludGVyaW0= 40553
+ICc9Jw== 40554
+LkpTT05PYmplY3Q= 40555
+KGZpZA== 40556
+IGRvd250 40557
+IHRyYW5zaWVudA== 40558
+IFN0ZXBo 40559
+IGlnbm9yYW5jZQ== 40560
+IENvZGVz 40561
+PScnLA== 40562
+IElDRQ== 40563
+IHRyYW5xdQ== 40564
+IEV4dGVuZGVk 40565
+IG11bmQ= 40566
+IEhPTUU= 40567
+IGtpbG9tZXRlcnM= 40568
+IGltYWdlbg== 40569
+b3V4 40570
+KHN6 40571
+WW91bmc= 40572
+dWZmZWQ= 40573
+IFdha2U= 40574
+IGFpZGU= 40575
+UFJPQw== 40576
+IFJhdA== 40577
+IExpdGg= 40578
+YmFydA== 40579
+IEFycmFuZ2U= 40580
+cHJvbXB0 40581
+0KM= 40582
+KGN0 40583
+IEludGVydmFs 40584
+ZGVwdA== 40585
+RGFuaWVs 40586
+IGZpbGxz 40587
+LnRlbnNvcg== 40588
+KHRyaW0= 40589
+IGplYWxvdXM= 40590
+RmVi 40591
+XENvbW1vbg== 40592
+IGFtZW5kbWVudHM= 40593
+X29wZXJhdG9y 40594
+X2N1c3RvbWl6ZQ== 40595
+IF1d 40596
+IGJu 40597
+IGRpc2FwcG9pbnRtZW50 40598
+IG1pbGxlbm4= 40599
+LndoZW4= 40600
+IG9iZXk= 40601
+IG9mZmVuZGVycw== 40602
+V2lsZA== 40603
+IGNlbGxGb3I= 40604
+IGFwcGFyYXR1cw== 40605
+LmFmdGVy 40606
+IEVQUw== 40607
+IGFkb3JhYmxl 40608
+b3BlcmFuZA== 40609
+KGxpc3RlbmVy 40610
+dmVhbA== 40611
+ICko 40612
+IGNhcmRpb3Zhc2N1bGFy 40613
+dXBsaWNhdGVz 40614
+cmlzdG9s 40615
+IHJlZnVzZXM= 40616
+KFFXaWRnZXQ= 40617
+IGVsZW1lbnRv 40618
+TnVtYmVyT2Y= 40619
+LmRlbGF5 40620
+Lmdyb3Vwcw== 40621
+Ij4nKw== 40622
+5Z2A 40623
+YWNlbmN5 40624
+KFVSTA== 40625
+X2hhbGY= 40626
+PWw= 40627
+IGxpc3RWaWV3 40628
+KHNlY3Rpb24= 40629
+LnRvQXJyYXk= 40630
+Ky8= 40631
+IFJvZHJpZ3Vleg== 40632
+aXN0cmVhbQ== 40633
+IGVsaWdpYmlsaXR5 40634
+Ojot 40635
+Lm5ld0luc3RhbmNl 40636
+UEI= 40637
+IEFzc2V0cw== 40638
+IENvbXBvc2l0ZQ== 40639
+IExhYnM= 40640
+IEhhbWFz 40641
+KyspOwo= 40642
+IGJsaw== 40643
+IE5lbw== 40644
+THVj 40645
+QGxvZ2lu 40646
+IHVuYXdhcmU= 40647
+Lm1ldA== 40648
+X1JFTEVBU0U= 40649
+KFNU 40650
+QU1JTA== 40651
+cmlrZQ== 40652
+ICgpewo= 40653
+KHNwcmludGY= 40654
+IEFjY291bnRz 40655
+IFZJRVc= 40656
+IEFq 40657
+44Kw 40658
+IHdoaXNr 40659
+IGlkaQ== 40660
+IHJvZGU= 40661
+IGlobg== 40662
+IEVsZW1lbnRhcnk= 40663
+UXR5 40664
+IGludHJpZ3Vpbmc= 40665
+IOWk 40666
+Sm9icw== 40667
+CW9mZnNldA== 40668
+IEFobWVk 40669
+IFRhbGliYW4= 40670
+IOiOt+WPlg== 40671
+IGluamVjdGVk 40672
+LkF1dGhlbnRpY2F0aW9u 40673
+X2xpbmVhcg== 40674
+LkRlY2ltYWw= 40675
+IGFwcGxlcw== 40676
+IHNoYXJlaG9sZGVycw== 40677
+IGJha2Vk 40678
+LmRpZmY= 40679
+IEVkZGll 40680
+b2tlcnM= 40681
+IGNvbmZyb250ZWQ= 40682
+dm9pY2Vz 40683
+IHR1cw== 40684
+IFNwaW4= 40685
+Tk9ERQ== 40686
+X1Vu 40687
+Q1RY 40688
+L2dvb2dsZQ== 40689
+VGVtcGVyYXR1cmU= 40690
+ICcnKS4= 40691
+IG1hZ25pZmljZW50 40692
+IHN0YXJ0SW5kZXg= 40693
+c2VtYmxlcw== 40694
+QW55b25l 40695
+ems= 40696
+ZWhlbg== 40697
+IERhbWU= 40698
+LnN0cmljdA== 40699
+IHJlcGxhY2Vz 40700
+IGxpbmViYWNr 40701
+IHB1c2hlcw== 40702
+IGNoZWVr 40703
+IFNoaQ== 40704
+X0JZVEVT 40705
+UkVB 40706
+4bqjbg== 40707
+X0NPTk5FQ1RJT04= 40708
+R2F0ZXdheQ== 40709
+IFRyYXZpcw== 40710
+IEFY 40711
+IEJhc2ljYWxseQ== 40712
+IFVwZ3JhZGU= 40713
+4Ko= 40714
+dGhlbWVz 40715
+ZXJtbw== 40716
+a29y 40717
+RmVtYWxl 40718
+X2F0dGFjaA== 40719
+IOyCrOyaqQ== 40720
+IHBveg== 40721
+PT09PT09PT09PT09PT0K 40722
+KHN5bWJvbA== 40723
+IFNlY3Rvcg== 40724
+X18pCgo= 40725
+X3BhZGRpbmc= 40726
+77yaIg== 40727
+IGZhYnM= 40728
+IHJhbmdlZA== 40729
+c2V0TmFtZQ== 40730
+IHBlcnJvcg== 40731
+4pc= 40732
+IEZpbGVSZWFkZXI= 40733
+IGZ1bGZpbGxlZA== 40734
+X0N1cnJlbnQ= 40735
+IGRvbWluYXRl 40736
+IHNtdWdn 40737
+UG9zdE1hcHBpbmc= 40738
+X2ZvcmNl 40739
+IGJsb2M= 40740
+IEdpYW50 40741
+KHZpZGVv 40742
+IENV 40743
+U3lzdGVtU2VydmljZQ== 40744
+IGVsZg== 40745
+IGtvbnRha3Q= 40746
+66o= 40747
+a2Vlcw== 40748
+Z3Rr 40749
+IHBhcmFtSW50 40750
+IG1hcmt1cA== 40751
+dWFsZXM= 40752
+IGFjY291bnRlZA== 40753
+IGdhbmdiYW5n 40754
+UllQVA== 40755
+IFdyb25n 40756
+IGNyZWRpdGVk 40757
+IE1FU1NBR0U= 40758
+IGZsYXdz 40759
+IGJidw== 40760
+IG1ldGFib2xpYw== 40761
+IE9FTQ== 40762
+L2V2ZW50 40763
+KENvbGxlY3RvcnM= 40764
+bW9udG9u 40765
+YXBwZWFy 40766
+IG9wdGVk 40767
+IGNoZWF0 40768
+IGRhdg== 40769
+IFByb2NlZWQ= 40770
+IOq4 40771
+YW5rZWQ= 40772
+0LjQtw== 40773
+YW5zaw== 40774
+IEhhbmc= 40775
+IENsZXI= 40776
+IGRpc2d1 40777
+IGNtYXA= 40778
+LmNsanM= 40779
+IGF1bWVudA== 40780
+bGV6 40781
+IEpvaW5lZA== 40782
+X3JlY2VpdmVk 40783
+IGFlcmlhbA== 40784
+b3RlbA== 40785
+IGdyZWV0 40786
+InM= 40787
+IEdlbmVzaXM= 40788
+IENhbGlm 40789
+cGFuaW9u 40790
+IHRhaWxvcmVk 40791
+bWFwcGluZw== 40792
+YW5kRXhwZWN0 40793
+LnRyYWNr 40794
+YXRvbXk= 40795
+IE93 40796
+dWxsYWg= 40797
+Llllcw== 40798
+IFNpbXBsZU5hbWU= 40799
+ZGJo 40800
+J2Vu 40801
+IG5vbnNlbnNl 40802
+IHBoaWxvc29waGljYWw= 40803
+KGdldENvbnRleHQ= 40804
+IGlzc28= 40805
+IEFDRQ== 40806
+c3RhcnREYXRl 40807
+IGLEmWQ= 40808
+IEFVVEhPUg== 40809
+IEdsb2Jl 40810
+IGluc2VjdHM= 40811
+X0Fs 40812
+dXNoaW5n 40813
+6K6w 40814
+L0hvbWU= 40815
+IExvY2FsRGF0ZQ== 40816
+bmVlZGVk 40817
+aGVzaXZl 40818
+IGlsbHVzaW9u 40819
+5LqM 40820
+IHRyYXQ= 40821
+eG8= 40822
+L2RldGFpbA== 40823
+X01BVENI 40824
+IGJyb2FkYmFuZA== 40825
+IHdhbA== 40826
+IElsbGVnYWxTdGF0ZUV4Y2VwdGlvbg== 40827
+SVJFQ1RJT04= 40828
+IG5vcnRoZWFzdA== 40829
+ZXNpdW0= 40830
+IENsaWVudGU= 40831
+dWxhbmNl 40832
+bnR5 40833
+IHRlY24= 40834
+RGV2aWNlcw== 40835
+IGdyYWlucw== 40836
+IE9n 40837
+IFNFTA== 40838
+dWRpYW50 40839
+ICsrOwo= 40840
+IGV4cGxhbmF0aW9ucw== 40841
+b2Njbw== 40842
+IGRpZXRz 40843
+IGNvaG9ydA== 40844
+KGNvbnRyb2xsZXI= 40845
+Lkl0ZXJhdG9y 40846
+LXJpY2g= 40847
+cm9jZXNz 40848
+R0Q= 40849
+IGNhcmJvaHlkcg== 40850
+IGZyaWVk 40851
+IEVtcGxveW1lbnQ= 40852
+7J6l 40853
+IExlb25hcmQ= 40854
+XyR7 40855
+cXVhcmVz 40856
+IGNvbXBhbmlvbnM= 40857
+IHBhcmlz 40858
+IHN0aW11bGF0aW9u 40859
+IFpvbw== 40860
+IHJlbGV2YW5jZQ== 40861
+IENvbG91cg== 40862
+IHNwZWFy 40863
+b3Rpb25hbA== 40864
+IExpdGU= 40865
+IEtvc3Rlbg== 40866
+IMOz 40867
+X2F0dGFjaG1lbnQ= 40868
+b3JwaGlj 40869
+IGRhbWl0 40870
+IGRsZw== 40871
+IHRocml2ZQ== 40872
+Q0hBTkdF 40873
+IEFwcGFyZW50bHk= 40874
+IGF0dWFs 40875
+IHJvb3RlZA== 40876
+KGltYWdlcw== 40877
+YXdp 40878
+YXJpYXQ= 40879
+IGNoZXJyeQ== 40880
+U1RBVElD 40881
+bW50 40882
+IFVzZXJJZA== 40883
+aWxsZXQ= 40884
+IEhpc3Bhbmlj 40885
+IG5haw== 40886
+IGNlbnRybw== 40887
+IGRpbXM= 40888
+X2luaXRpYWxpemU= 40889
+xLFr 40890
+IENlbnRlcnM= 40891
+UkVO 40892
+IGV2b2x1dGlvbmFyeQ== 40893
+IFRvcGljcw== 40894
+X2RhbWFnZQ== 40895
+ZW1lcg== 40896
+IHJ1bmQ= 40897
+IHB1bmlzaGVk 40898
+IGN1Ymlj 40899
+ZmFpcg== 40900
+W107Cgo= 40901
+IGluc3RhbnRpYXRl 40902
+IG92ZXJzZWU= 40903
+LWRlbGV0ZQ== 40904
+dW50ZWVy 40905
+c3RhcnRUaW1l 40906
+IFBpcGVsaW5l 40907
+X0dBTUU= 40908
+IENpcg== 40909
+CU51bGw= 40910
+LkZvcm1hdHRpbmc= 40911
+dWN1bWJlcg== 40912
+IFJpZGU= 40913
+IHpvbw== 40914
+IGNoZWNrZXI= 40915
+5ZCM 40916
+PUM= 40917
+IGdyaXQ= 40918
+Iik7Ly8= 40919
+X3h5 40920
+IERlY2xhcmF0aW9u 40921
+IGNhbGxhYmxl 40922
+Rm9v 40923
+IExpc3RJdGVt 40924
+IGluYWNjdXI= 40925
+bWxpbg== 40926
+CURhdGE= 40927
+IGV2b2x2aW5n 40928
+YXdhbg== 40929
+IGNhZmU= 40930
+Zm9saw== 40931
+X0lEWA== 40932
+IEFueXRoaW5n 40933
+IFBhbGVzdGluZQ== 40934
+IEdyaWRWaWV3 40935
+IGNvbG9ueQ== 40936
+IEdlcm1hbnM= 40937
+KCs= 40938
+LnBpZA== 40939
+LmpzeA== 40940
+IFN1cGVyaW9y 40941
+Q2hyaXN0aWFu 40942
+IExlY3Q= 40943
+CUdhbWU= 40944
+IGluc3RydW1lbnRhbA== 40945
+QW5pbWF0aW9ucw== 40946
+0LTQsNC7 40947
+IE1vc2Vz 40948
+CQkNCgkJDQo= 40949
+enM= 40950
+a3Rl 40951
+5Lia 40952
+X0RJU1Q= 40953
+Yml0bWFw 40954
+ZEI= 40955
+IHBlcnNpc3RlbmNl 40956
+0YDQvtGB 40957
+JGw= 40958
+QnJvbg== 40959
+IHt8 40960
+X2NoYXJ0 40961
+IENvbnN1bQ== 40962
+IGhlbXA= 40963
+ICIpKQo= 40964
+IGF0dGFja2Vycw== 40965
+IGtub3dsZWRnZWFibGU= 40966
+IGNldA== 40967
+IHZpcnVzZXM= 40968
+J0k= 40969
+IHBpdGNoZXI= 40970
+IHN3ZWVwaW5n 40971
+PWxpc3Q= 40972
+YXB0b3Bz 40973
+LmRlcHRo 40974
+IGluc3RydWN0ZWQ= 40975
+IFJ1cw== 40976
+YmVuaGF2bg== 40977
+INC40L0= 40978
+U3BvcnRz 40979
+IG9uc2V0 40980
+5p2D 40981
+LlJFRA== 40982
+X3Np 40983
+IFBTVA== 40984
+Lm9uQ2hhbmdl 40985
+PnRhZw== 40986
+IFJvaA== 40987
+X2NoYXJhY3Rlcg== 40988
+IExhd3M= 40989
+IEJhY2hlbG9y 40990
+X3N3YXA= 40991
+LnJlYWN0aXZleA== 40992
+IHJld2FyZGluZw== 40993
+TWVkaXVt 40994
+LVs= 40995
+IFJlY2VudGx5 40996
+Sm9pbnQ= 40997
+cGFydGl0aW9u 40998
+IE1pbnV0ZXM= 40999
+IGluZG8= 41000
+IGFic29yYmVk 41001
+IEdO 41002
+X0lORA== 41003
+IHNhYmVy 41004
+U3Bhd24= 41005
+b3V0cHV0cw== 41006
+IEplZmZyZXk= 41007
+IG1lZGlldmFs 41008
+aGVk 41009
+R3VpZGU= 41010
+IHBzeWNobw== 41011
+IGdsYW0= 41012
+RWxpbQ== 41013
+w6RkY2hlbg== 41014
+X3BsYWlu 41015
+IFNhdQ== 41016
+LWZvdXI= 41017
+IGFuYWx5emluZw== 41018
+UVVFUlk= 41019
+IHRvbWF0bw== 41020
+X2J1dHRvbnM= 41021
+VkVO 41022
+LnNldFN0YXR1cw== 41023
+LlVybA== 41024
+KwoK 41025
+IGNvbXBsYWluaW5n 41026
+ZGVncmVl 41027
+Y29uZmlybWVk 41028
+IHN1YnQ= 41029
+cGFyc2Vk 41030
+IHRvcnF1ZQ== 41031
+IHRyb3VibGVk 41032
+IFRBUkdFVA== 41033
+IHRyYWRlbWFya3M= 41034
+IENvb3JkaW5hdGU= 41035
+IFZpdg== 41036
+IC8vfQoK 41037
+IGFwcsOocw== 41038
+LmdldFBvc2l0aW9u 41039
+KEtleUNvZGU= 41040
+IFNpbHZh 41041
+IG1ldGVvcg== 41042
+IGVuZG9yc2VtZW50 41043
+T3ZlcnZpZXc= 41044
+IFBvc3M= 41045
+LkluamVjdA== 41046
+IGV2ZW5seQ== 41047
+IHZpc3VhbGl6YXRpb24= 41048
+IHdjaGFy 41049
+IEhETUk= 41050
+IGZ1bmN0 41051
+aWNrbmFtZQ== 41052
+JywnJywn 41053
+IGZvcndhcmRz 41054
+TWFuYWdlZE9iamVjdA== 41055
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 41056
+CXNlcnZlcg== 41057
+IE91dGxvb2s= 41058
+IENocm9uaWNsZQ== 41059
+IGR1YmJlZA== 41060
+IGRvaw== 41061
+IFdlYXI= 41062
+LkFM 41063
+cGFyZW4= 41064
+LkludGVyZmFjZQ== 41065
+SW50ZXJmYWNlcw== 41066
+LmNvZA== 41067
+IGRpYg== 41068
+Lkdsb2JhbGl6YXRpb24= 41069
+IEFjYWRlbWlj 41070
+IGFzc21z 41071
+QXV0b20= 41072
+IGx3 41073
+IE5X 41074
+ICYmDQo= 41075
+IHByb2JsZW1h 41076
+IE1hbnVmYWN0dXJpbmc= 41077
+bGltaXRz 41078
+LW1vYmlsZQ== 41079
+IGZpbG1l 41080
+L21hcA== 41081
+IGRvaXQ= 41082
+IEluaw== 41083
+IHN1ZWQ= 41084
+LmFycg== 41085
+IHVuZGVybWlu 41086
+IFByb2M= 41087
+Y3JvbGxWaWV3 41088
+X18k 41089
+IHNpZGV3YWxr 41090
+KHRoYXQ= 41091
+4Li3 41092
+W3E= 41093
+Z3JhbW1hcg== 41094
+IHTDqw== 41095
+cXVpdG8= 41096
+IHNwaXJhbA== 41097
+ZXh0ZW5kZWQ= 41098
+IGZvY2Fs 41099
+IGRpZ2dpbmc= 41100
+cGFz 41101
+IFRhbGw= 41102
+LnByb3h5 41103
+aXR1cmVz 41104
+VFJBQ1Q= 41105
+IFJlYWxt 41106
+IGZlZGVy 41107
+IG9yaWVudGVk 41108
+IEFsdGVybmF0aXZl 41109
+IG93ZQ== 41110
+IHNvdXJjZWQ= 41111
+aW5rZXI= 41112
+LmRldA== 41113
+U2Vw 41114
+IFF1aQ== 41115
+IFBhbG1lcg== 41116
+KF8s 41117
+c2FtcGxlcw== 41118
+b3llcg== 41119
+dWxsYW4= 41120
+cXVleg== 41121
+RWRnZXM= 41122
+IHNob3V0 41123
+IEFjaGll 41124
+IGhhYXI= 41125
+X0NvbnN0cnVjdA== 41126
+IHByZW1hdHVyZQ== 41127
+IHJldmVydA== 41128
+JykuCg== 41129
+IHNjaG4= 41130
+ZmlsdGVyZWQ= 41131
+bnVsbHB0cg== 41132
+U2F2ZWQ= 41133
+aXRlY3R1cmU= 41134
+Q0xB 41135
+IHZs 41136
+c3RlbGw= 41137
+CU1l 41138
+IExpcA== 41139
+bmF0aW9uYWw= 41140
+IHdob2xseQ== 41141
+IHNwcmluZ3M= 41142
+LlRpbWVy 41143
+CXNyYw== 41144
+ZWxzZW4= 41145
+5YW2 41146
+IGNvbW11bmljYXRpbmc= 41147
+IFF1aXo= 41148
+IHRlbmc= 41149
+IGdleg== 41150
+IE91dHNpZGU= 41151
+LlNpZ24= 41152
+KGNz 41153
+IGRpc3B1dGVz 41154
+IFdlaXNz 41155
+YW5uZXM= 41156
+Pk5v 41157
+IEJhY2g= 41158
+LnJlbW92ZUFsbA== 41159
+cmVmZXI= 41160
+L2Rhc2hib2FyZA== 41161
+IEFqYXg= 41162
+SW5kZXhDaGFuZ2Vk 41163
+IFdlYWs= 41164
+JyIK 41165
+IHNpZ2h0cw== 41166
+YWNjZXNzVG9rZW4= 41167
+IEpvaQ== 41168
+KGRvbWFpbg== 41169
+CWN2 41170
+IGNvbnRpbnVhdGlvbg== 41171
+IHBsdW0= 41172
+YWRpcg== 41173
+LnNldE1lc3NhZ2U= 41174
+IO+8jA== 41175
+IHN3YWxsb3c= 41176
+IExhbXA= 41177
+IHF3 41178
+IHV1 41179
+Q29pbg== 41180
+dWJpYw== 41181
+IERlYWxz 41182
+cmFjZQ== 41183
+IGRpY3RhdG9y 41184
+IG1lbWU= 41185
+dHVybmVk 41186
+IEp1bGll 41187
+LmdyaWRDb2x1bW4= 41188
+IHB1cHB5 41189
+IHBhbQ== 41190
+ICl7DQo= 41191
+IGludml0aW5n 41192
+IGZyZW5jaA== 41193
+dmlt 41194
+IHdyYXBwaW5n 41195
+ICMtfQo= 41196
+KFst 41197
+RWFybHk= 41198
+IHNoaW55 41199
+LmZhY2Vz 41200
+IHJlYmVsbA== 41201
+YWJjZGVm 41202
+w6RsdA== 41203
+IGVzdGltYXRpb24= 41204
+cGh5cw== 41205
+bG9zdXJlcw== 41206
+X1JFTA== 41207
+IGV4Y2x1c2lvbg== 41208
+IFNreXBl 41209
+d2Vpc2U= 41210
+LXN0b3A= 41211
+bm90aGluZw== 41212
+IEVnZw== 41213
+aXNvcnM= 41214
+UmljaGFyZA== 41215
+IGNvdW5zZWxpbmc= 41216
+IGNvbW1lbQ== 41217
+IFFNZXNzYWdlQm94 41218
+IFN5bmQ= 41219
+IEZyb3N0 41220
+IENvbXBldGl0aW9u 41221
+IEF3YWtl 41222
+IHRlZA== 41223
+aWNpb25lcw== 41224
+IERldkNvbXBvbmVudHM= 41225
+VkVSVElTRU1FTlQ= 41226
+b3R0aQ== 41227
+LnJ1bm5lcg== 41228
+IHVuaXF1ZWx5 41229
+LmZsYWc= 41230
+CXJz 41231
+X2dlbmVyaWM= 41232
+IGBgYAo= 41233
+QUNISU5F 41234
+IG1laW4= 41235
+KEFwcGxpY2F0aW9u 41236
+KGJy 41237
+IHJhdGlvcw== 41238
+Oiw= 41239
+IFhDVGVzdA== 41240
+dXN0YWluYWJsZQ== 41241
+LXd3dw== 41242
+aXRsZXM= 41243
+X1RFTVA= 41244
+IHN5c3Q= 41245
+dW1lcmljVXBEb3du 41246
+CWFzc2VydFRydWU= 41247
+IHdm 41248
+LnBlZWs= 41249
+IEJ1bGc= 41250
+IHRlcnJpZnlpbmc= 41251
+Lk1PREU= 41252
+IEdX 41253
+w6Fy 41254
+IGZpYw== 41255
+IGNvbW1pdG1lbnRz 41256
+LXRlY2g= 41257
+IExpcXVpZA== 41258
+b3Bleg== 41259
+emhlaW1lcg== 41260
+YcOxYQ== 41261
+LW1lZGlh 41262
+KGFuaW1hdGVk 41263
+X2dvYWw= 41264
+IGd1bQ== 41265
+eXN0b25l 41266
+LlNFVA== 41267
+IFdlbmQ= 41268
+c2V0Q2VsbFZhbHVl 41269
+IG1zZ3M= 41270
+Y2FzaA== 41271
+QUxMT0M= 41272
+L2F3cw== 41273
+IG1pY3Jvd2F2ZQ== 41274
+LlBvaW50ZXI= 41275
+CUNvbnNvbGU= 41276
+X3NvcnRlZA== 41277
+IEZpbGlw 41278
+UHJvZA== 41279
+IC8vITw= 41280
+aW5ncm91cA== 41281
+IGtz 41282
+X1RSSQ== 41283
+IHRlYXNwb29u 41284
+IEFUVA== 41285
+IHJlY292ZXJpbmc= 41286
+IEdMT0JBTA== 41287
+LlBhcg== 41288
+IC8+Owo= 41289
+IG1hcmJsZQ== 41290
+dWxhdG9ycw== 41291
+IEN5Y2xl 41292
+IGhlcmJz 41293
+X21ldHJpYw== 41294
+KSE= 41295
+X0NMT0NL 41296
+X0J1dHRvbg== 41297
+SGFycnk= 41298
+6L+b 41299
+IHN0cmFpbnM= 41300
+IEFwcEJhcg== 41301
+IENoYW4= 41302
+L3ZpZGVv 41303
+IGJhbQ== 41304
+LlByb2dyZXNz 41305
+JGY= 41306
+bGVtZW4= 41307
+IGlycmVndWxhcg== 41308
+IER1bmNhbg== 41309
+IE1pbnQ= 41310
+LXZpZGVv 41311
+4Ka+ 41312
+w7N3bg== 41313
+IEVNUFRZ 41314
+IHN0YWNrZWQ= 41315
+IEhB 41316
+X2N1dA== 41317
+IHdoZXJlaW4= 41318
+IFdheXM= 41319
+KGNvdW50ZXI= 41320
+6K+V 41321
+Rm9ybUdyb3Vw 41322
+IGJsZXc= 41323
+Y291cnNlcw== 41324
+IHByb2R1Y3Rvcw== 41325
+cnlz 41326
+IFJlc3Ry 41327
+IHN0eWxpbmc= 41328
+PnM= 41329
+IHBpdg== 41330
+IGl0ZXJ0b29scw== 41331
+Z2V0UmVwb3NpdG9yeQ== 41332
+IElr 41333
+X2RldmljZXM= 41334
+bGF5dWk= 41335
+IGhhbGZ3YXk= 41336
+IGZyYW7Dpw== 41337
+IHR1bmluZw== 41338
+T0E= 41339
+X05vZGU= 41340
+YXJkZQ== 41341
+IGZpZXJjZQ== 41342
+bGljdGVk 41343
+Iw0K 41344
+IGJyZWFrdGhyb3VnaA== 41345
+IEVyaWs= 41346
+IGJyaWRl 41347
+IC4i 41348
+Y3VsdXM= 41349
+aW5zaWRl 41350
+IEluZGlhbmFwb2xpcw== 41351
+IEVF 41352
+IHlvZw== 41353
+dXJyZXQ= 41354
+LmZz 41355
+LmdyYWQ= 41356
+X2NhcmRz 41357
+X2FjY3VyYWN5 41358
+X2VwaQ== 41359
+cXVlZGE= 41360
+L29yZw== 41361
+6aqM 41362
+IGNvbXB0ZQ== 41363
+KSlb 41364
+T3V0c2lkZQ== 41365
+R3JlYXRlcg== 41366
+IFJlbmRlcmVy 41367
+LmFjdG9y 41368
+QWNjb3VudHM= 41369
+SWRsZQ== 41370
+X2hvdXJz 41371
+ZXJuZXI= 41372
+Sm9pbmVk 41373
+IG1lbmo= 41374
+cmVxdWlyZXM= 41375
+IE9QRVI= 41376
+LnJlbW92ZUNoaWxk 41377
+CXNw 41378
+IGVzc2U= 41379
+cmlmdA== 41380
+eEZF 41381
+IFNoYWtlc3BlYXJl 41382
+X19fX19fX19fX19f 41383
+IGJ1ZGdldHM= 41384
+TW9kZWxTdGF0ZQ== 41385
+ZmlsbGFibGU= 41386
+LWNvbXBvbmVudA== 41387
+b2Nvcw== 41388
+IEJVVFRPTg== 41389
+L2lv 41390
+LG91dA== 41391
+c21z 41392
+VGhvbWFz 41393
+IEFybWVk 41394
+cmVzdW1l 41395
+IHJvdGF0aW5n 41396
+IFZhdWx0 41397
+IHNldXM= 41398
+Ligq 41399
+IGFtaW5v 41400
+IFtdKTsKCg== 41401
+IHByb3ZvYw== 41402
+bm94 41403
+LkdldEVudW1lcmF0b3I= 41404
+PT09PT09PQo= 41405
+5paZ 41406
+X3Njcm9sbA== 41407
+IGZpbG1lZA== 41408
+IFNvY2k= 41409
+Z2Fw 41410
+Z3Jv 41411
+Vm90ZQ== 41412
+IkJ1dA== 41413
+X1JD 41414
+QW5pbWFs 41415
+woA= 41416
+aWJpbGU= 41417
+IGF3YWtlbg== 41418
+b3Jlc3Q= 41419
+aW5qYQ== 41420
+IEl2YW4= 41421
+KENvbW1hbmQ= 41422
+ICoqKioq 41423
+zrc= 41424
+IGt2aW5kZXI= 41425
+L2hlbHBlcnM= 41426
+X2Nhc2Vz 41427
+dGc= 41428
+7IS4 41429
+UmVnaXN0ZXJlZA== 41430
+CXBhc3M= 41431
+X2RpZ2l0cw== 41432
+IGNvbnRvdXI= 41433
+IGluZmFudHM= 41434
+IGp1c3RpZmljYXRpb24= 41435
+IEZvcnR1bmF0ZWx5 41436
+Q29udHI= 41437
+IG9uQ3JlYXRlVmlldw== 41438
+X1NBTVBMRQ== 41439
+IGFsbG93TnVsbA== 41440
+IG51ZA== 41441
+IGZldGNoZWQ= 41442
+X2VxdQ== 41443
+IFVuYWJsZQ== 41444
+PVwiIg== 41445
+PnsK 41446
+IGNvbW1pdHRlZXM= 41447
+aXN0ZW1h 41448
+KyIu 41449
+w61hbg== 41450
+bWFudA== 41451
+IHNvdXRoZWFzdA== 41452
+77yMCg== 41453
+ZGlhbG9ncw== 41454
+UFJPSkVDVA== 41455
+Y2hhcmdlcg== 41456
+LXBvcnQ= 41457
+KHV1aWQ= 41458
+LmV4cG9ydA== 41459
+U2l4 41460
+IFJQ 41461
+UHJlbQ== 41462
+IGNvbnNjaWVuY2U= 41463
+IG1hcmdpblJpZ2h0 41464
+X2Rpc3RyaWJ1dGlvbg== 41465
+eWFtbA== 41466
+cmVzaXppbmc= 41467
+RG9jaw== 41468
+IExvY2F0aW9ucw== 41469
+R1k= 41470
+U2VlZA== 41471
+QlVGRkVS 41472
+b3NzaXA= 41473
+dWxsZW4= 41474
+VGhpbmdz 41475
+LXNlbGY= 41476
+LnBvbGw= 41477
+UExBWUVS 41478
+IOWu 41479
+R1JPVVA= 41480
+IEF3YXk= 41481
+IGdvc3BlbA== 41482
+eGZk 41483
+TWFyeQ== 41484
+IFBvcnRhYmxl 41485
+VFVSRQ== 41486
+IHV0aWxpcw== 41487
+IHNlaXQ= 41488
+IHN0cmFuZA== 41489
+IHRyYW5zYw== 41490
+IChe 41491
+IEFsZnJlZA== 41492
+Lm1lbQ== 41493
+LmNpcmNsZQ== 41494
+IH4v 41495
+Zm9yY2luZw== 41496
+IHJpb3Q= 41497
+cHJveA== 41498
+VEhPTg== 41499
+aXphY2nDs24= 41500
+IE5J 41501
+cm9zdA== 41502
+IGRpc3Bybw== 41503
+X2luc3RhbmNlcw== 41504
+77yM4oCc 41505
+b2dyYXBoZXI= 41506
+ZW5kYXM= 41507
+IElzYWFj 41508
+IFBpbmU= 41509
+L2Rpcw== 41510
+IGNvbG9yV2l0aA== 41511
+aXRlcmF0ZQ== 41512
+X3N0cmlkZQ== 41513
+IHB1bnRv 41514
+LkV2ZW50QXJncw== 41515
+KGNlbnRlcg== 41516
+IG5laWdoYm9yaW5n 41517
+IFByaXNvbg== 41518
+IE1lc3Nlbmdlcg== 41519
+IGVwaWRlbWlj 41520
+ZGFv 41521
+X2NvbXBsZXg= 41522
+IGdyYXZlbA== 41523
+X0RJUA== 41524
+w6ltZW50 41525
+IEFyaQ== 41526
+X2JpdG1hcA== 41527
+LnF1aXQ= 41528
+KHZhbGlk 41529
+IHBlbmQ= 41530
+IHJlc3BpcmF0b3J5 41531
+IHJlYm91bmQ= 41532
+RGVmYXVsdFZhbHVl 41533
+44Ot 41534
+IGNvbW1pdHM= 41535
+LnRlc3Rz 41536
+X2Zy 41537
+aXRldA== 41538
+LnNm 41539
+IHNwYWNlY3JhZnQ= 41540
+Y3JpdGljYWw= 41541
+IGRlcHJlc3NlZA== 41542
+IEFueU9iamVjdA== 41543
+IHVuYg== 41544
+IGRpc2Nlcm4= 41545
+KG15c3Fs 41546
+TGF0aW4= 41547
+IEJvZw== 41548
+IFdpbGRsaWZl 41549
+VG9GaWxl 41550
+aW94aWQ= 41551
+QFJlc3RDb250cm9sbGVy 41552
+ICIkKA== 41553
+IDw8Ig== 41554
+IGRlZmVjdHM= 41555
+IGRhdHVt 41556
+aGlu 41557
+IHJlYWxpemFy 41558
+YW55YWh1 41559
+IFNpZw== 41560
+QERhdGE= 41561
+YWRhcHRpdmU= 41562
+IENhdGhlcmluZQ== 41563
+LmNy 41564
+IENPT0tJRQ== 41565
+IHBpY3R1cmVk 41566
+IEZpZ2h0ZXI= 41567
+UXVlcnlhYmxl 41568
+IEFueXdheQ== 41569
+IEdMRlc= 41570
+X25hbWVzcGFjZQ== 41571
+X2Z0 41572
+IF0p 41573
+T3JnYW5pemF0aW9u 41574
+IGNvbnN0aXR1dGVz 41575
+IHF1YW5k 41576
+KGNodW5r 41577
+Ii8+DQo= 41578
+IExha2Vz 41579
+bWFpbndpbmRvdw== 41580
+Q2FydGh5 41581
+c3Bpbg== 41582
+KGNzdg== 41583
+OnJlZA== 41584
+LWNvbW1lcmNl 41585
+4Li5 41586
+IGRpc2NvdmVyaW5n 41587
+IGVjbw== 41588
+X2ZhYw== 41589
+aW5jZXRvbg== 41590
+IEdyZWVucw== 41591
+and0 41592
+2LU= 41593
+IEJyb25jb3M= 41594
+IEdvb2Rz 41595
+KEdUSw== 41596
+IHJldHVyblZhbHVl 41597
+IHNpZW1wcmU= 41598
+IG5ldXRy 41599
+d2VudA== 41600
+IE5hdGFs 41601
+IGVudGh1c2lhc3RpYw== 41602
+4buN 41603
+Rk4= 41604
+L2RhdGFiYXNl 41605
+Q2F0YWxvZw== 41606
+IGJydW4= 41607
+IEthc2g= 41608
+X1Bs 41609
+aXNjcmlt 41610
+LHdpZHRo 41611
+IGlubWF0ZXM= 41612
+QXNzaWdubWVudA== 41613
+IEhhdmVu 41614
+IHBsYXlncm91bmQ= 41615
+ZXhhbQ== 41616
+QENvbnRyb2xsZXI= 41617
+dWxpYXI= 41618
+LmdldFBhcmVudA== 41619
+ICI7Cgo= 41620
+OnNpemU= 41621
+aXNzb3Jz 41622
+IGZpcw== 41623
+IGFsYw== 41624
+ZW5zYXRpb24= 41625
+IE5peG9u 41626
+IG1pZ2h0eQ== 41627
+LXN0cg== 41628
+X3NwZWNpYWw= 41629
+X0FEQw== 41630
+IFR3aWc= 41631
+dW1ibGluZw== 41632
+LWFkZHJlc3M= 41633
+IGhlcm9pbg== 41634
+WVRF 41635
+ICAgICAgICAgICAgICAgICAK 41636
+RnJpZW5k 41637
+IGF2ZQ== 41638
+IFBORw== 41639
+IEt1cmRpc2g= 41640
+RGF0YVNldENoYW5nZWQ= 41641
+IGJsYWRlcw== 41642
+YnJhbA== 41643
+U3RlYW0= 41644
+IHNpZ3U= 41645
+SVJUVUFM 41646
+YWNvcw== 41647
+VURQ 41648
+KGRhdGFiYXNl 41649
+aGVj 41650
+IFN0cmluZ3M= 41651
+X3NjYWxhcg== 41652
+CWRlc2M= 41653
+IFRMUw== 41654
+OyIK 41655
+IENvcmJ5bg== 41656
+U2ltcGxlTmFtZQ== 41657
+dWVsbA== 41658
+IEVudHJl 41659
+ZWxsaXRlcw== 41660
+LXBsYWNl 41661
+IGZyYW5rbHk= 41662
+IEVyZg== 41663
+Q0VM 41664
+IHBhw61z 41665
+IGhlZGdl 41666
+IGxhdGVudA== 41667
+IElSUQ== 41668
+IEhlcmFsZA== 41669
+IFByZWM= 41670
+67O0 41671
+LlRFWFQ= 41672
+U2FsYXJ5 41673
+IGF1dHVtbg== 41674
+IHRyYXZhaWw= 41675
+LlN1bQ== 41676
+IGNhcmVk 41677
+TW9y 41678
+IGludHVpdGl2ZQ== 41679
+IGpvdXJuYWxz 41680
+X0lU 41681
+IFRyb3U= 41682
+5Lyg 41683
+SGFzQ29sdW1uTmFtZQ== 41684
+Q29tcG9zaXRl 41685
+IHNwaWNl 41686
+X2Rpc2s= 41687
+X0NPREVT 41688
+IEludHJvZHVjZWQ= 41689
+aW9uYQ== 41690
+IG51ZXN0cmE= 41691
+b2N0 41692
+ICAgIAogICAgCiAgICAK 41693
+KHBhcmFtZXRlcg== 41694
+IHN0dWRpb3M= 41695
+IHByb2plY3RJZA== 41696
+IGJkc20= 41697
+LlNxbENsaWVudA== 41698
+aW1pemVy 41699
+IENBUkQ= 41700
+K3Q= 41701
+YWFu 41702
+LnNvbA== 41703
+X0FkanVzdA== 41704
+IHJpZ2h0ZW91cw== 41705
+IExvZ2dpbmc= 41706
+LmZpbHRlcnM= 41707
+X1RBQg== 41708
+CXN5cw== 41709
+cm9waGlj 41710
+b3RoZXJhcHk= 41711
+IEJyb3dzZQ== 41712
+a2V5Ym9hcmQ= 41713
+Uk9O 41714
+K1w= 41715
+cm9wcGVk 41716
+IGV4dGVuc2l2ZWx5 41717
+Zms= 41718
+IGxpbWU= 41719
+eWVhcnM= 41720
+RXhj 41721
+IHNwaA== 41722
+IGNoZWF0aW5n 41723
+YW5kcm8= 41724
+w61v 41725
+IHByaW5jZQ== 41726
+b2lyZQ== 41727
+IERlc3RpbmF0aW9u 41728
+IENvbnZlcnRz 41729
+IHVwc3RyZWFt 41730
+b2xlZA== 41731
+IHNlcnZhbnRz 41732
+IHNlbWFudGlj 41733
+IGNydW5jaA== 41734
+IGV2ZW50dWFs 41735
+cnVubmVy 41736
+L2Vycm9y 41737
+U3Bpbg== 41738
+IHNlY3JldGx5 41739
+IGFzc2VtYmxl 41740
+LlBlcnNvbg== 41741
+ZW5kZXJyb3I= 41742
+Xzw= 41743
+IHBlbmRhbnQ= 41744
+U2xlZXA= 41745
+IENoZW1pc3RyeQ== 41746
+IGJvc3Nlcw== 41747
+bGs= 41748
+KSkpLAo= 41749
+QmxvY2tseQ== 41750
+REVWSUNF 41751
+IHJlZmxlY3Rpbmc= 41752
+IGFtcGxl 41753
+TWlsbGlzZWNvbmRz 41754
+IFByZXNpZGVudGlhbA== 41755
+IHVzdWFyaW9z 41756
+IE5a 41757
+IFNhbGFyeQ== 41758
+IEFtYW5kYQ== 41759
+X25w 41760
+anVyeQ== 41761
+IGvDtm4= 41762
+IHRoZXJhcGlzdA== 41763
+IGhvbW9zZXh1YWw= 41764
+IERyYWtl 41765
+LXdpbmRvdw== 41766
+IExvY2F0ZWQ= 41767
+LkRyaXZlcg== 41768
+IFZJREVP 41769
+IG1lcmNoYW50cw== 41770
+IENoZXN0 41771
+LWxvY2s= 41772
+L3BocA== 41773
+IG1pbGFubw== 41774
+X1NUWUxF 41775
+YXJnZXI= 41776
+aWRlYQ== 41777
+R1VJRA== 41778
+YWR2YW5jZWQ= 41779
+bWVhbA== 41780
+T3B0aW9uc0l0ZW1TZWxlY3RlZA== 41781
+PScl 41782
+IENoYW0= 41783
+OmRhdGE= 41784
+KHN0YXQ= 41785
+V2lsbEFwcGVhcg== 41786
+IGluZm9ybWFs 41787
+YWpp 41788
+IHJlcHJvZHVjdGl2ZQ== 41789
+IENBUw== 41790
+44Gj 41791
+RlVOQw== 41792
+IFJ1dGg= 41793
+KSso 41794
+Q09OU1Q= 41795
+IEZhbnM= 41796
+IGdyb3VwSWQ= 41797
+eGZmZmZmZmZm 41798
+IHNhbXBsZXI= 41799
+IH19Ij4= 41800
+LnRoZQ== 41801
+IGhvbGxvdw== 41802
+V0FZ 41803
+IEZhY3VsdHk= 41804
+QXR0cmlidXRlZFN0cmluZw== 41805
+IExvb2tz 41806
+IFJleA== 41807
+ams= 41808
+IE1JTA== 41809
+IGJhcmQ= 41810
+Lkxvbmc= 41811
+IGxpdmVzdA== 41812
+IHNrYWw= 41813
+aWNpc20= 41814
+TUFJTg== 41815
+IG11Y2hv 41816
+Qk9EWQ== 41817
+IGVzZQ== 41818
+CXVzZQ== 41819
+Rm9vdA== 41820
+LlNRTEV4Y2VwdGlvbg== 41821
+IGluaGVyaXRhbmNl 41822
+cmVjZWl2ZWQ= 41823
+IHB1dGFz 41824
+ZWRpcw== 41825
+YWxzYQ== 41826
+IEVycm9yTWVzc2FnZQ== 41827
+Qm9va2luZw== 41828
+IHRyYWN0 41829
+YWN6 41830
+IENhbnQ= 41831
+X3JlZ2V4 41832
+IGlkZW9sb2dpY2Fs 41833
+IGppaGFk 41834
+aG9z 41835
+L3N5cw== 41836
+Y29sbQ== 41837
+KHBvb2w= 41838
+IGVzdMOhbg== 41839
+IFBlbmRpbmc= 41840
+ZW3DoXM= 41841
+IGt0w7NyeQ== 41842
+KSk7CgoK 41843
+dHJhbnNhY3Rpb25z 41844
+IHdpZWxk 41845
+aXRlcmU= 41846
+ZXJ0dXJl 41847
+X3Nz 41848
+IHN0cmV0Y2hpbmc= 41849
+IHByaXNvbmVy 41850
+LlJlYWRBbGw= 41851
+IGJlc2No 41852
+LS07DQo= 41853
+IGNyaXNw 41854
+X1NDQU4= 41855
+IGFl 41856
+U3RyaWN0 41857
+IE1pbm5lYXBvbGlz 41858
+IEJvZWluZw== 41859
+YXJpcw== 41860
+cmVr 41861
+X3BpcGU= 41862
+IHByaWVzdHM= 41863
+KEVJRg== 41864
+ZWhpY2xlcw== 41865
+IEludGVyYWN0aXZl 41866
+YmV0d2Vlbg== 41867
+CU51bGxDaGVjaw== 41868
+IEJsYWly 41869
+IEx0 41870
+X2lubGluZQ== 41871
+ZXRoeWw= 41872
+wrw= 41873
+X3BhY2thZ2Vz 41874
+IGJhcnJlbHM= 41875
+X2hl 41876
+IHJlZ2V4cA== 41877
+X3B0cw== 41878
+X0hhbmRsZXI= 41879
+aW5ndWxhcg== 41880
+IE5pc3Nhbg== 41881
+IFJhbmNo 41882
+IHBlcmNo 41883
+VW5zdXBwb3J0ZWQ= 41884
+U21pdGg= 41885
+IExlZ2VuZHM= 41886
+TWk= 41887
+IGdm 41888
+c3RlZGVy 41889
+IGFjcXVpcmluZw== 41890
+IHNpbXVsYXRvcg== 41891
+KCksIg== 41892
+cmVjZWl2ZQ== 41893
+IGlucGxhY2U= 41894
+QUNUSU9O 41895
+IFdlYkRyaXZlcg== 41896
+ZmlsZXN5c3RlbQ== 41897
+PE9yZGVy 41898
+bG9wZW4= 41899
+IEhFSUdIVA== 41900
+LnNldEJvcmRlcg== 41901
+jbA= 41902
+X19bIg== 41903
+IGNsYW1w 41904
+U2Vnb2U= 41905
+YmFuZHM= 41906
+dG9MaXN0 41907
+YW1iYQ== 41908
+PicrCg== 41909
+IGNyZWRpYmxl 41910
+YW1hdA== 41911
+cGxheWluZw== 41912
+LnNldEltYWdlUmVzb3VyY2U= 41913
+cXVlbA== 41914
+IHBvZHI= 41915
+Z2VvbQ== 41916
+RWs= 41917
+IFFhdGFy 41918
+IGdlbGQ= 41919
+PycsCg== 41920
+IGN5bA== 41921
+KGF4 41922
+IFdJ 41923
+dXJhbGx5 41924
+IEJyYXNpbA== 41925
+IHNlbnph 41926
+YWxleQ== 41927
+b25lbg== 41928
+IGJhaA== 41929
+IG1vbGVjdWxl 41930
+UmFk 41931
+6L+w 41932
+QU5DSA== 41933
+LWJhY2tncm91bmQ= 41934
+LWFnZW50 41935
+IHByb2xpZmVy 41936
+OmJvb2xlYW4= 41937
+IHRpZGU= 41938
+ZXJpYWxpemVy 41939
+XzsNCg== 41940
+RmVl 41941
+Kiop 41942
+ZXJneQ== 41943
+IEhvbm9y 41944
+LkxvZ2dpbmc= 41945
+aXJpcw== 41946
+IHVuZGVybWluZQ== 41947
+IER5 41948
+IHR5cg== 41949
+IGRlcXVl 41950
+IGRhbWVy 41951
+KFtdKQo= 41952
+LmxheW91dENvbnRyb2xJdGVt 41953
+cGVhdGVk 41954
+Q0FO 41955
+cmFnbWVudHM= 41956
+TGFuZA== 41957
+KV0pOwo= 41958
+IFNhaA== 41959
+IERFQ0w= 41960
+V2l0aGlu 41961
+IE5hbWVzcGFjZQ== 41962
+YW5vdGhlcg== 41963
+c2VtYmxpbmc= 41964
+LmRlc2NyaWJl 41965
+Q29uc3Vt 41966
+IEZlYXI= 41967
+Z2l2ZW4= 41968
+T3Jhbmdl 41969
+PGJvb2xlYW4= 41970
+IHN0ZWFkaWx5 41971
+cGFSZXBvc2l0b3J5 41972
+IHJlc3VsdFNldA== 41973
+X0VOVEVS 41974
+X3JlcGVhdA== 41975
+IHRvbmVz 41976
+IFBST1A= 41977
+bmFs 41978
+cGFydGljbGU= 41979
+IHNpZ25hbGluZw== 41980
+IGFjY2Vzc29yeQ== 41981
+CQkJCQkJICA= 41982
+IHZpZWxl 41983
+IE5vYWg= 41984
+LWFn 41985
+IG11cmRlcnM= 41986
+IGFpcmVk 41987
+IFBMQVk= 41988
+IFN1bGxpdmFu 41989
+X0NvcmU= 41990
+IHVsb25n 41991
+IGJsb2dnaW5n 41992
+PlRoaXM= 41993
+IGRhdGFJbmRleA== 41994
+IHByaW50YWJsZQ== 41995
+IEV5ZXM= 41996
+X3RhcmdldHM= 41997
+KFB5 41998
+Lm92ZXI= 41999
+IGJydQ== 42000
+YW1wdG9u 42001
+IHBsYWludGlmZg== 42002
+PEtleQ== 42003
+YnVsbA== 42004
+IOKfqA== 42005
+SXNzdWU= 42006
+LmNvcm5lclJhZGl1cw== 42007
+Q3JpdGljYWw= 42008
+X3BoaQ== 42009
+LmFuZ2xl 42010
+IGR5bmFtaWNhbGx5 42011
+ISIpOw0K 42012
+Pik7Cg== 42013
+aW52ZXN0 42014
+LioKCg== 42015
+IHTDqWzDqQ== 42016
+IHN1cGVyZg== 42017
+IGNhc2NhZGU= 42018
+RFRE 42019
+IHZpdmlk 42020
+IHN1YnNpZGllcw== 42021
+IEhhc3M= 42022
+IGNvbGxhcHM= 42023
+IGNlcmFtaWM= 42024
+e30iLg== 42025
+IExlYWthZ2U= 42026
+LXRyYXNo 42027
+Y29sbGFwc2Vk 42028
+LXNvY2lhbA== 42029
+IENoYWQ= 42030
+IGluY2xpbmVk 42031
+IHN0bw== 42032
+IHN0b3J5Ym9hcmQ= 42033
+LnBheW1lbnQ= 42034
+c3RhY2tvdmVyZmxvdw== 42035
+IFJhaWRlcnM= 42036
+ICMn 42037
+b2xpY2llcw== 42038
+7Jy866Gc 42039
+ZW1hcA== 42040
+IGtq 42041
+IHF1b3Rh 42042
+IEdhcmRlbnM= 42043
+67KI 42044
+IEFuZ2Vscw== 42045
+IG9mdA== 42046
+IGxvd2VyY2FzZQ== 42047
+IGlQYXJhbQ== 42048
+IGNoZWFwZXN0 42049
+dW50YQ== 42050
+X3BrdA== 42051
+aWNhdG9ycw== 42052
+IGxldXJz 42053
+IGRlY3JlYXNlcw== 42054
+CWRlZmluZQ== 42055
+UFJFQw== 42056
+YW1tZXJz 42057
+IFByZXBhcmVkU3RhdGVtZW50 42058
+KGRpcmVjdGlvbg== 42059
+IGNyZXdz 42060
+YXJrZWQ= 42061
+IE1lbXBoaXM= 42062
+IFNlbGw= 42063
+R1RL 42064
+IG1haWQ= 42065
+OmRpc2FibGU= 42066
+6ZuG 42067
+IFBm 42068
+IGFsYmVpdA== 42069
+b3Blbmg= 42070
+Pz4iPgo= 42071
+LmdldFNvdXJjZQ== 42072
+KHNjYWxl 42073
+RHU= 42074
+IFBJTA== 42075
+X3JlZnJlc2g= 42076
+IGJldHM= 42077
+KGNhcg== 42078
+IFZvbg== 42079
+fC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tCg== 42080
+IEdyYXQ= 42081
+TXVjaA== 42082
+KERpYWxvZw== 42083
+LnN0b3BQcm9wYWdhdGlvbg== 42084
+IHRlaw== 42085
+IGV4aXRz 42086
+J10sJA== 42087
+IHBob25lTnVtYmVy 42088
+dWNz 42089
+ZWNpbWFs 42090
+LS0tLS0tLS0tLS0tLS0= 42091
+aW5w 42092
+LnBvam8= 42093
+IGNvcnB1cw== 42094
+IHByYWN0aXRpb25lcnM= 42095
+LnBpYw== 42096
+InRlc3Rpbmc= 42097
+IHN0cmluZ0J5 42098
+Lk5vdE51bGw= 42099
+IHJhbmc= 42100
+LkR5bmFtaWM= 42101
+X1JlbmRlcg== 42102
+0LDRgtCw 42103
+V2FpdGluZw== 42104
+IFdpaw== 42105
+IG92ZXJ3aGVsbWVk 42106
+JSI+ 42107
+IEFF 42108
+fX0+Cg== 42109
+dXc= 42110
+X3R5cA== 42111
+IGJ1Y2tldHM= 42112
+IGdyZWV0aW5n 42113
+IGxhdWdodGVy 42114
+IGFudGFnb24= 42115
+dWdnZXN0aW9u 42116
+LWVtYWls 42117
+CXRvcA== 42118
+IGVyb3M= 42119
+X3RyaQ== 42120
+IGlzc3Vpbmc= 42121
+IGjDoQ== 42122
+IGlzb2xhdGU= 42123
+T3ZlcmZsb3c= 42124
+LEU= 42125
+IG51dHJpdGlvbmFs 42126
+IEFiYm90dA== 42127
+IG5m 42128
+LnRvdWNo 42129
+LmZldGNoYWxs 42130
+X3ppcA== 42131
+Iil9Cg== 42132
+IGFtYXQ= 42133
+IENpc2Nv 42134
+IG7DpQ== 42135
+UExFWA== 42136
+IHNlaQ== 42137
+Zm90bw== 42138
+LnRvSnNvbg== 42139
+5aSa 42140
+IEtsZWlu 42141
+IGxpYmM= 42142
+IG1pbmVycw== 42143
+5aI= 42144
+LXByaW50 42145
+IFByaWRl 42146
+VG9kb3M= 42147
+IG1hc2tlZA== 42148
+IHNldERhdGE= 42149
+IHRlbGVmb24= 42150
+IHVuaGFwcHk= 42151
+IFRhYmxlcw== 42152
+Z2Vi 42153
+KGRlYnVn 42154
+X2FsbG93ZWQ= 42155
+LWFjY2Vzcw== 42156
+IGxvZ2lzdGljcw== 42157
+IGdlbXM= 42158
+IE1hdHVyZQ== 42159
+IHJzcA== 42160
+IEFsbGU= 42161
+LmdldEJ5dGVz 42162
+XHdlYg== 42163
+eW5jaHJvbml6ZWQ= 42164
+UGFyYWdyYXBo 42165
+IHRocm90dGxl 42166
+LnNxbGl0ZQ== 42167
+Y29uc3VsdGE= 42168
+IFNlYWg= 42169
+Q2U= 42170
+IHN1Ym1hcg== 42171
+RVJF 42172
+Vm91cw== 42173
+IHJlZGRpdA== 42174
+IHNxbGFsY2hlbXk= 42175
+LW1pbGU= 42176
+b2NpZGU= 42177
+UG91cg== 42178
+fX0iPgo= 42179
+c3RlYWQ= 42180
+IEAo 42181
+IFtdKQ== 42182
+IEFkcw== 42183
+IG92ZXJsb2Fk 42184
+cmlkZGVu 42185
+IERlc2VydA== 42186
+IFdyYXA= 42187
+IFBvcnR1Z3Vlc2U= 42188
+ZXR6 42189
+CWZpcnN0 42190
+IG1pbGVzdG9uZQ== 42191
+5peg 42192
+0YPRiQ== 42193
+KHN1Y2Nlc3M= 42194
+PFZlY3Rvcg== 42195
+Y29vbA== 42196
+IFtdKTsK 42197
+ZXJ2YWxz 42198
+IGludmVydA== 42199
+Imlv 42200
+Y3Vyc28= 42201
+ZnJhZ21lbnQ= 42202
+IGZlYXNpYmxl 42203
+LnNldFBvc2l0aW9u 42204
+IGVsbQ== 42205
+IGltYWdpbg== 42206
+QFNwcmluZw== 42207
+IGJhdHM= 42208
+cHXDqXM= 42209
+Z2FsZW1lbnQ= 42210
+bnNpYw== 42211
+Z2llbmU= 42212
+ZWxsYXRpb24= 42213
+IEJhaWxleQ== 42214
+U2hhcg== 42215
+IFR1bA== 42216
+IEhL 42217
+IGZyZWV6aW5n 42218
+Z2xt 42219
+Y2VhbnM= 42220
+LWN1dA== 42221
+X2NpcmNsZQ== 42222
+5ZGY 42223
+bmVnYXRpdmU= 42224
+IGluZGlhbg== 42225
+c2FsdA== 42226
+IHRpbmc= 42227
+CW1vZA== 42228
+IHNpbnQ= 42229
+YWtpbg== 42230
+dW1s 42231
+IFRleHRJbnB1dA== 42232
+IHBvcHBlZA== 42233
+VE1Q 42234
+IHBhcmtlZA== 42235
+15nX 42236
+IEZ1c2lvbg== 42237
+IGhlYXRlcg== 42238
+RVRG 42239
+cm96ZW4= 42240
+aGFsbA== 42241
+IE1paw== 42242
+bGV2YXJk 42243
+LWhlYXJ0 42244
+CW9yZGVy 42245
+TWFraW5n 42246
+IHBsZWRnZWQ= 42247
+IGRpcnM= 42248
+JHBvc3Q= 42249
+IEhlcnI= 42250
+c3RhbnRpYXRl 42251
+LCIK 42252
+LmdldENvbG9y 42253
+IFNBVA== 42254
+IHRpbWVkZWx0YQ== 42255
+IE1haQ== 42256
+CW1ldGhvZA== 42257
+IGlkaW90 42258
+IFRyYXY= 42259
+aWRlbnRpZmllZA== 42260
+IERpdmluZQ== 42261
+LmdldFBhdGg= 42262
+RGFzaA== 42263
+IGluZmlsdHI= 42264
+IGhhbmRsZVN1Ym1pdA== 42265
+YnJvb2s= 42266
+LmdlbmVyaWM= 42267
+LnNob3J0Y3V0cw== 42268
+Li4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLg== 42269
+IGRhdGluZ3M= 42270
+IE1W 42271
+77u/Iw== 42272
+fSIKCg== 42273
+IGltcHJpc29ubWVudA== 42274
+YXNvbmlj 42275
+cm91ZA== 42276
+dWNpb24= 42277
+5oql 42278
+IGRpYWxlY3Q= 42279
+IG9uTW91c2U= 42280
+Y29uc3RleHBy 42281
+LmxhYmVsQ29udHJvbA== 42282
+IHdlYWtlcg== 42283
+IG1hbmtpbmQ= 42284
+IFJFQ0U= 42285
+IGRpeg== 42286
+IGFwcEJhcg== 42287
+IHF1w6k= 42288
+ZnJh 42289
+X2RlZmF1bHRz 42290
+IGFsaXF1 42291
+X2F0b20= 42292
+OmluZGV4UGF0aA== 42293
+IG1pc3Nlcw== 42294
+IHZpc3VhbGx5 42295
+IEhhbmRz 42296
+U1RSVQ== 42297
+aWF0ZXM= 42298
+X2Fzc2V0 42299
+RmluZGVy 42300
+bWlkdA== 42301
+IHNuYWNrcw== 42302
+KF9fKCc= 42303
+LnVyaQ== 42304
+IEluc3RydW1lbnQ= 42305
+dmVuaXI= 42306
+KCRfXw== 42307
+LkRvdE5ldEJhcg== 42308
+IGNvbmZpZ3M= 42309
+IGd1ZXNzZWQ= 42310
+4KS/4KQ= 42311
+IGluaXRpYWxpemVy 42312
+ID8iLA== 42313
+IFZlcml6b24= 42314
+bWFuaWZlc3Q= 42315
+Z2ViZW4= 42316
+LmRldGFpbHM= 42317
+R2F0ZQ== 42318
+cG9uc2libGU= 42319
+IEVsaW0= 42320
+LHN0cg== 42321
+IHdyaXRpbmdz 42322
+IERlcmVr 42323
+IENvb3JkaW5hdG9y 42324
+IHBpbGxvdw== 42325
+IG5vdGljZWFibGU= 42326
+UnM= 42327
+IGR1cGxpY2F0ZXM= 42328
+ZXJuZWxz 42329
+a0o= 42330
+Lnp6 42331
+b2xsYW5k 42332
+IFNFQ1RJT04= 42333
+X2ZuYW1l 42334
+dWZmbGVk 42335
+J10uJzwv 42336
+X0NN 42337
+IHly 42338
+cGxhdA== 42339
+b2JvZHk= 42340
+bmRl 42341
+KEVsZW1lbnQ= 42342
+IEF0bGFz 42343
+IO+8iA== 42344
+IG5pdmVs 42345
+IGluc2lzdHM= 42346
+W1A= 42347
+IGVudGh1c2lhc3Rz 42348
+IOyeheugpQ== 42349
+IGJldmVyYWdl 42350
+e30iLA== 42351
+OnJpZ2h0 42352
+IG5vdXZlYXU= 42353
+IENvbXBsZQ== 42354
+IFBhZw== 42355
+b3ducw== 42356
+IHJlbWVtYmVycw== 42357
+IFByYWRlc2g= 42358
+IGNoYWxr 42359
+IExhdXJlbg== 42360
+XFNlcnZpY2U= 42361
+X0dFTg== 42362
+PiIpCg== 42363
+IERvbGxhcg== 42364
+IGVtb2pp 42365
+Q2Fyb3VzZWw= 42366
+LXBsYXllcg== 42367
+IGFkanVzdGluZw== 42368
+IGp1Z2E= 42369
+YWxsZW5nZXM= 42370
+Z2VuZQ== 42371
+KGJvZHlQYXJzZXI= 42372
+bG9wZWRpYQ== 42373
+IEJlaGluZA== 42374
+IHNsZWV2ZXM= 42375
+IGRyYWdnaW5n 42376
+IENoZXZyb2xldA== 42377
+IGJpeg== 42378
+aXZpdGllcw== 42379
+IEZyZXF1ZW5jeQ== 42380
+LGNoYXI= 42381
+LldISVRF 42382
+X3ByZXZpZXc= 42383
+KSc7Cg== 42384
+X2F4 42385
+SU9OUw== 42386
+LmNwdQ== 42387
+LmlucHV0cw== 42388
+VUJF 42389
+X2ZlZWQ= 42390
+IFN1cHBsZW1lbnQ= 42391
+ISku 42392
+ZXN1cw== 42393
+IFVEUA== 42394
+IG1pY3JvcGhvbmU= 42395
+IGNvbmZpcm1z 42396
+LmlzTm90RW1wdHk= 42397
+IjoiIiwK 42398
+X1NDUkVFTg== 42399
+CWV4cGVjdGVk 42400
+Ky0rLSstKy0= 42401
+IEhhaXQ= 42402
+ZmFzdGNhbGw= 42403
+IGRlcGljdA== 42404
+dmI= 42405
+X3BpY3R1cmU= 42406
+CWRlc2NyaXB0aW9u 42407
+IFdpZmU= 42408
+dWNp 42409
+IHZpY2lvdXM= 42410
+5LuW 42411
+dWViYQ== 42412
+IHNldFVzZXI= 42413
+44Gh 42414
+IGRpdmluZw== 42415
+IG9wZXJh 42416
+dXNlcmNvbnRlbnQ= 42417
+YXJhaA== 42418
+KX0s 42419
+eXVu 42420
+dmVsdA== 42421
+IHVuY292ZXJlZA== 42422
+IGhpcHM= 42423
+IG9zY2lsbA== 42424
+IGFzc2VydGluZw== 42425
+IFhp 42426
+LnJlc3RvcmU= 42427
+a2Vh 42428
+IHNwZWxsaW5n 42429
+IGRlcml2ZQ== 42430
+YWJ3ZQ== 42431
+IERvdw== 42432
+LnNldFR5cGU= 42433
+X3Zz 42434
+IGNvenk= 42435
+LmNhdGVnb3JpZXM= 42436
+T3Jn 42437
+X21ncg== 42438
+IGR1bmdlb24= 42439
+Y29sbGVjdGlvblZpZXc= 42440
+IEJsYW5r 42441
+YWNpYXM= 42442
+w6TDpA== 42443
+X2NsZWFudXA= 42444
+X0FDVElWSVRZ 42445
+IHRyaWFuZ2xlcw== 42446
+Lk1lbnVJdGVt 42447
+IGlwaG9uZQ== 42448
+IFdvbg== 42449
+XV0KCg== 42450
+IENvbXBhcmlzb24= 42451
+LkRvYw== 42452
+IGNhbm9uaWNhbA== 42453
+IFN1ZGFu 42454
+Jyl7 42455
+VXBJbnNpZGU= 42456
+YnVpbHRpbg== 42457
+RU5DWQ== 42458
+eGJl 42459
+IGNodWNr 42460
+IGNvbnRyYWRpY3Q= 42461
+IG51ZXN0cm8= 42462
+IGFyY2hpdGVjdHVyYWw= 42463
+IEZpYg== 42464
+IGNvbXBhcmVz 42465
+Kms= 42466
+Q2Zn 42467
+54Sh 42468
+bnRlbg== 42469
+TWF0Y2hlcw== 42470
+IERPV05MT0FE 42471
+X0hBTkRMRVI= 42472
+bWFuYWdlbWVudA== 42473
+W1M= 42474
+RU5H 42475
+woDC 42476
+ZmFuZw== 42477
+IHNsaXBwZWQ= 42478
+IExhbmth 42479
+ZXNjYXBpbmc= 42480
+IHRhY2tsZXM= 42481
+IFBlZHJv 42482
+LlByb3A= 42483
+Licn 42484
+LkdlbmVyYXRlZA== 42485
+Lk5ld0d1aWQ= 42486
+YXRyaWdlc2ltYWw= 42487
+aWxsb24= 42488
+IHN0YXRpc3RpYw== 42489
+c3BlY2llcw== 42490
+aG9sZGluZw== 42491
+RHJ1cGFs 42492
+IGZ1bmRhbWVudGFsbHk= 42493
+IGJvbmRhZ2U= 42494
+IHJlc29sdXRpb25z 42495
+SW5saW5lRGF0YQ== 42496
+XFR5cGU= 42497
+ZXN0aW9u 42498
+LndyYXA= 42499
+IHdhcnJpb3Jz 42500
+IExPQ0FM 42501
+QXJjaGl2ZQ== 42502
+IGVtYnJhY2Vk 42503
+4bun 42504
+LlZlcg== 42505
+IEFmZm9yZGFibGU= 42506
+b2xlc2FsZQ== 42507
+IEFwcGxpZWQ= 42508
+IENvbnZlcnNpb24= 42509
+bWVnYQ== 42510
+X2NhbQ== 42511
+IGNlcmVtb24= 42512
+YXVydXM= 42513
+IFZvbGs= 42514
+Lm9wZW5z 42515
+L2Fib3V0 42516
+IFN0ZA== 42517
+am91cm5hbA== 42518
+KCkpew0K 42519
+LCJc 42520
+KEFycmF5cw== 42521
+IERlbnNl 42522
+YXNlw7Fh 42523
+w6RubmVy 42524
+L3N0YXQ= 42525
+dXNlckRhdGE= 42526
+IGdlcm1hbg== 42527
+IHR6 42528
+d29ydGh5 42529
+Rm9ybWF0RXhjZXB0aW9u 42530
+cGhlcmQ= 42531
+IHNtaWxlcw== 42532
+IFdoZW5ldmVy 42533
+KGFkYXB0ZXI= 42534
+LmJhZGxvZ2lj 42535
+IGJyaWVmaW5n 42536
+LkdyaWRDb2x1bW4= 42537
+LWNoYXI= 42538
+ZGltZW5zaW9u 42539
+IENvcHBlcg== 42540
+IG5pbnRo 42541
+ICd7ew== 42542
+IHJhdg== 42543
+X1RhYmxl 42544
+IGRlcml2YXRpdmVz 42545
+IFJhaXNl 42546
+IEZ1dA== 42547
+YXJtb3I= 42548
+LXBhZGRpbmc= 42549
+IHJlbWlu 42550
+CXN0eWxl 42551
+IE1lbWJlcnNoaXA= 42552
+IHNwcmVhZHM= 42553
+IGdhbGxlcmllcw== 42554
+IENsYXJrZQ== 42555
+IGNvbmNlcHRpb24= 42556
+bWludXRl 42557
+IGFidXNpdmU= 42558
+X2Fkag== 42559
+IHRlcnJpZmlj 42560
+IG92ZXJ0 42561
+b3VyY2luZw== 42562
+IGVudHJhZGE= 42563
+bGV2ZWxz 42564
+IGNyaXRpcXVl 42565
+IHJlc3BlY3Rz 42566
+IE1NQQ== 42567
+aWVuZQ== 42568
+IGVuY2Fwcw== 42569
+IFJheW1vbmQ= 42570
+RGl2aWRlcg== 42571
+aXZhYmxl 42572
+YmF6 42573
+IEBfOwo= 42574
+IENsYWlyZQ== 42575
+IHVyZ2luZw== 42576
+Q0VF 42577
+IHRyYW5zZm9ybWVy 42578
+ZGlzY29yZA== 42579
+IEpvdXJuZXk= 42580
+dG9z 42581
+IGNvbXBldGl0aW9ucw== 42582
+IE9CSg== 42583
+IEJpcw== 42584
+IHJlbGF4YXRpb24= 42585
+aWR5 42586
+X0lOU1RBTkNF 42587
+IFByZWY= 42588
+ZGFkb3M= 42589
+aWNpZW5jaWVz 42590
+IE1lZGlhUXVlcnk= 42591
+IEN1YmU= 42592
+IFN0cmFuZ2U= 42593
+Z3B1 42594
+KGRheXM= 42595
+X0luaXRTdHJ1Y3Q= 42596
+IGZpbmdlcnByaW50 42597
+ZW1hdA== 42598
+IEdlY2tv 42599
+IHJhaWxz 42600
+IEx1bQ== 42601
+c3RyYWN0aW9u 42602
+aWd1bmc= 42603
+KG1vdmll 42604
+X2RpY3Rpb25hcnk= 42605
+X2ludGVycnVwdA== 42606
+IFFD 42607
+aWtlZA== 42608
+YXBwZW5kQ2hpbGQ= 42609
+cmVjaXBpZW50 42610
+csOp 42611
+VmU= 42612
+IHRvd2Vs 42613
+Lmxhc3RJbmRleE9m 42614
+IHBsYWNlYm8= 42615
+IFdpZQ== 42616
+LmVzcA== 42617
+KERlYnVn 42618
+b3BlcmF0aXZl 42619
+IGRlY2Vhc2Vk 42620
+Jmlk 42621
+CW11dGV4 42622
+ZWxpYw== 42623
+IGJhcHQ= 42624
+CQ0KDQo= 42625
+IGZhcnRoZXI= 42626
+SGFsZg== 42627
+LmRpc2FibGU= 42628
+Lm1lbnVTdHJpcA== 42629
+bGVjY2lvbg== 42630
+IHJlc3VsdENvZGU= 42631
+IGNhbnM= 42632
+LWVsZWN0aW9u 42633
+ZmVtYWxl 42634
+X0ZJWA== 42635
+YXVzaWJsZQ== 42636
+IFBPV0VS 42637
+IHJlY29uc3RydWN0aW9u 42638
+IHNjYW5z 42639
+Llh0cmFCYXJz 42640
+4oCYcw== 42641
+UmVtb3ZlZA== 42642
+IHBhcmFncmFwaHM= 42643
+X21hcmdpbg== 42644
+IGx5bXBo 42645
+IGJvcw== 42646
+bGluZ3Rvbg== 42647
+IEJhcHRpc3Q= 42648
+IGFkdmVydGlzZW1lbnRz 42649
+IE1hbmFnZQ== 42650
+L3l5eXk= 42651
+SU9VUw== 42652
+RU5DRVM= 42653
+IEZpY3Rpb24= 42654
+CW1lbnU= 42655
+IEZpbGVPdXRwdXRTdHJlYW0= 42656
+b3Zhbg== 42657
+IEZlbmc= 42658
+IHNraXBwaW5n 42659
+Z2V0Q2xhc3M= 42660
+YW5uaQ== 42661
+IHJlYm91bmRz 42662
+IHB1YmxpY2l0eQ== 42663
+IGluZ3Jlcw== 42664
+dXNlbWVudA== 42665
+IHRob3VnaHRmdWw= 42666
+LkNoYXJ0 42667
+IGhhdHRl 42668
+cGFzc3BvcnQ= 42669
+IGhvb2tlZA== 42670
+IExlbnM= 42671
+IGZsYWdzaGlw 42672
+IHN0aXA= 42673
+IEdFTg== 42674
+IGNsdWVz 42675
+aXB2 42676
+IFJpc2U= 42677
+IEdldw== 42678
+dGFibGVuYW1l 42679
+IGZvcmVtb3N0 42680
+X3ZhbGlkYXRl 42681
+X2FuYWx5c2lz 42682
+b2xsYQ== 42683
+IHF1YWxpZmljYXRpb25z 42684
+IGRpc3RyaWJ1dGlvbnM= 42685
+IEZsb3dlcg== 42686
+IHRlbnNl 42687
+IHRoYW5rZnVs 42688
+IGNsdXRjaA== 42689
+IHVuaWZpZWQ= 42690
+cm9hZHM= 42691
+IHNpdGk= 42692
+IHN0YWxs 42693
+X1BSSU9SSVRZ 42694
+Y3N0ZGxpYg== 42695
+X1VTRVJOQU1F 42696
+LmJ5dGVz 42697
+P3BhZ2U= 42698
+ZXJtYWxpbms= 42699
+IFZlZ2V0 42700
+L3ZuZA== 42701
+LWF1dGhvcg== 42702
+Lk5PTkU= 42703
+IENvbmN1cnJlbnQ= 42704
+IENyeQ== 42705
+IHN0YXJ0ZXJz 42706
+IEludGVyYWN0aW9u 42707
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 42708
+IExFVkVM 42709
+RWxs 42710
+IGNvbWJvQm94 42711
+IFRoZXJlc2E= 42712
+dGVr 42713
+X0hhbmRsZQ== 42714
+IGFieQ== 42715
+LmdkeA== 42716
+LGVuZA== 42717
+KExvY2Fs 42718
+T2w= 42719
+a25pZmU= 42720
+YXJpYWw= 42721
+IEhvZmY= 42722
+IHByb3N0aXR1ZXJhZGU= 42723
+RG9jdG9y 42724
+SW5zdGFuY2Vz 42725
+LlNldFZhbHVl 42726
+CWZyb20= 42727
+IGx1eHVyaW91cw== 42728
+SW5kZW50 42729
+QWxsb2NhdG9y 42730
+X0RSQVc= 42731
+KCIsIiw= 42732
+IEZyYW5jZXM= 42733
+IGdyb3VwQm94 42734
+KHNjaGVtYQ== 42735
+UHJpbnRm 42736
+T1JJRVM= 42737
+LWdyYWRpZW50 42738
+IHJlcHV0 42739
+YXJpbg== 42740
+X0RPTkU= 42741
+aW5jcmU= 42742
+aWdudHk= 42743
+IGV4ZXJ0 42744
+IC0u 42745
+L0FwcA== 42746
+LXRocm91Z2g= 42747
+IGRlY2xpbmluZw== 42748
+IGRlc3NlcnQ= 42749
+IGluY3VtYg== 42750
+IGRlc2lnbmF0aW9u 42751
+LlBPUlQ= 42752
+LHN0cm9uZw== 42753
+IHNhbmRib3g= 42754
+IHdpbmVz 42755
+IFBhdg== 42756
+JHN0cg== 42757
+YXNrZWxs 42758
+IGjDtg== 42759
+IFBZ 42760
+R2V0SW5zdGFuY2U= 42761
+VGV4dElucHV0 42762
+Z2FtZU9iamVjdA== 42763
+L2V2ZW50cw== 42764
+Y3JlYXRlZEF0 42765
+IGxvY2FsVmFy 42766
+IFdISVRF 42767
+cGVyZWQ= 42768
+aWxlZ2U= 42769
+ZWZmaWNpZW50 42770
+LGNvbG9y 42771
+Y2F0ZQ== 42772
+IENhZmU= 42773
+IHNpbWlsYXJpdGllcw== 42774
+IHB1bXBz 42775
+IEh1bmdhcnk= 42776
+LlVzZXJuYW1l 42777
+IHNrYXRl 42778
+IHRvdWNoZG93bnM= 42779
+IGFjY2VsZXJhdGU= 42780
+IEhlbGVu 42781
+T01FTQ== 42782
+IEt1bg== 42783
+X3ZvbA== 42784
+IGZpbmRBbGw= 42785
+IE1lbnNjaGVu 42786
+YWhlYWQ= 42787
+KTsi 42788
+a29tbWVu 42789
+IHBvc3Nlc3NlZA== 42790
+LmFyZ21heA== 42791
+LnRyYW5zaXRpb24= 42792
+QVJQ 42793
+T0xVTUU= 42794
+KHNjcmlwdA== 42795
+INCY 42796
+IEZpbmRpbmc= 42797
+b25jZXM= 42798
+SW8= 42799
+Qm9sZA== 42800
+IHJlbmV3YWw= 42801
+X0RJQUxPRw== 42802
+IGRpc3JlZw== 42803
+SU5URVJO 42804
+IHRvdXRl 42805
+IGVsZWN0cg== 42806
+IEdyb3Nz 42807
+CXRydWU= 42808
+LkZpZWxkcw== 42809
+IFdJRFRI 42810
+IERlbnQ= 42811
+IMOB 42812
+TlNOb3RpZmljYXRpb24= 42813
+IGFvcw== 42814
+IG1lbGVl 42815
+LlZhbGlkYXRpb24= 42816
+IERFQw== 42817
+LWRlcGVuZGVudA== 42818
+IHN1aWM= 42819
+VHJhaXRz 42820
+JG1lc3NhZ2U= 42821
+IERlYXI= 42822
+CUZJTEU= 42823
+bGFuZ3VhZ2Vz 42824
+LlByb3Q= 42825
+LmFkZHI= 42826
+LWdlbmVyYXRpb24= 42827
+SUNPTg== 42828
+IHRyYW5zcGxhbnQ= 42829
+LWRlc2NyaXB0aW9u 42830
+IGNoYXNpbmc= 42831
+IGNoZWVz 42832
+IH0qLwo= 42833
+VHJhZA== 42834
+cXVlcmllcw== 42835
+L3dpZGdldHM= 42836
+c3VicGFja2FnZQ== 42837
+IGVzcGVj 42838
+IGNyYWNrZWQ= 42839
+IGNvbXBldGl0b3I= 42840
+UHVyY2hhc2U= 42841
+LXRlYW0= 42842
+b2xlY3VsYXI= 42843
+b3JUaHVuaw== 42844
+JlA= 42845
+IHJlbGVudA== 42846
+LyN7 42847
+IHByb2R1Y3RJZA== 42848
+IOi+ 42849
+IExhdg== 42850
+IEFsdGVy 42851
+Lk1vZGU= 42852
+QURJTw== 42853
+Z3Jw 42854
+5re75Yqg 42855
+UXVpdA== 42856
+IGRlcHRocw== 42857
+LWNhdGVnb3J5 42858
+IERBVEFCQVNF 42859
+U1BFTEw= 42860
+IEZhbGNvbg== 42861
+IFFTdHJpbmdMaXN0 42862
+ICcnLg== 42863
+IEluc3RpdHV0aW9u 42864
+ZGFtYWdl 42865
+YXpvcg== 42866
+YmVsb25nc1Rv 42867
+dmVyYWdlcw== 42868
+IE5PTkU= 42869
+aXBwZXRz 42870
+LFwK 42871
+IGZvb3RwcmludA== 42872
+X2FyY2hpdmU= 42873
+bmFr 42874
+LmdldEZpZWxk 42875
+IFJlZmxlY3Rpb24= 42876
+ICdd 42877
+IEhCTw== 42878
+X2Rpc2NvdW50 42879
+IGluY2VzdA== 42880
+IERvZGdl 42881
+IFdhZGU= 42882
+Lk5P 42883
+ImVuY29kaW5n 42884
+IEJsb2NrY2hhaW4= 42885
+IGxhd3N1aXRz 42886
+IE1haW50 42887
+Y2h0ZW4= 42888
+IMOpdGFpdA== 42889
+IGt0w7NyZQ== 42890
+X2N0bA== 42891
+KHRpbWVy 42892
+QmF0dGxl 42893
+aXpv 42894
+YXllZA== 42895
+SU9S 42896
+IEdsYXNnb3c= 42897
+IHN5bnRo 42898
+X2xvZ3M= 42899
+LnBvc2U= 42900
+X0FkanVzdG9yVGh1bms= 42901
+KCgm 42902
+IHVuc3VyZQ== 42903
+eXN0YXRl 42904
+7ZWY64qU 42905
+T1VMRA== 42906
+Lm5n 42907
+IGRlZmF1bHRkaWN0 42908
+d29ya3NwYWNl 42909
+IHNlbGVjdGl2ZQ== 42910
+UGlja2VyQ29udHJvbGxlcg== 42911
+WU5BTUlD 42912
+Lm1ldGhvZHM= 42913
+IHBhdGh3YXlz 42914
+IEZldw== 42915
+S0c= 42916
+Q1JZUFQ= 42917
+Zm9sbG93aW5n 42918
+IERMQw== 42919
+IFNhcmE= 42920
+IHByZXNldA== 42921
+ZXN0cnVjdG9y 42922
+IEt1cnQ= 42923
+IGFpcnBsYW5l 42924
+IG9tcA== 42925
+IFBhcmVudHM= 42926
+IE1hcnRpbmV6 42927
+LmNvbXBsZXRl 42928
+IGJyb2FkbHk= 42929
+IHNjYXJl 42930
+IE3DqQ== 42931
+IGVsaW1pbmF0aW9u 42932
+IHBvdXJlZA== 42933
+L3N3 42934
+IGNvbXVu 42935
+IG1hc2M= 42936
+IE9yZ2FuaWM= 42937
+IFN0cmluZ1V0aWxz 42938
+aWxhdGVyYWw= 42939
+IHJlbHVjdGFudA== 42940
+LWFnZQ== 42941
+IG56 42942
+LiJc 42943
+IHBhc3Rvcg== 42944
+YWxleg== 42945
+IGVmZWN0 42946
+cHJvdg== 42947
+L2luaXQ= 42948
+IHBlbm4= 42949
+dW5kcw== 42950
+IHNzaXpl 42951
+IFByb2o= 42952
+YmFzZW5hbWU= 42953
+IHNoZWxscw== 42954
+IE5lY2s= 42955
+IEVuZm9yY2VtZW50 42956
+dmlkZWQ= 42957
+c3Rvd24= 42958
+U3BoZXJl 42959
+JHI= 42960
+dXNzZW4= 42961
+YWZpbA== 42962
+IFRlbGVncmFt 42963
+IGFuYWx5dGljYWw= 42964
+0L3Ri9C1 42965
+dXN1YWxseQ== 42966
+eG4= 42967
+IGhpc3Rvcmlhbg== 42968
+IEdyZWdvcnk= 42969
+b2xwaA== 42970
+IFVuYQ== 42971
+IGNvbnRyaWJ1dGVz 42972
+JS0= 42973
+YW50aWFnbw== 42974
+0YDQtdC0 42975
+LnJlZ2lvbg== 42976
+IGFicnVwdA== 42977
+IFVuc3VwcG9ydGVkT3BlcmF0aW9uRXhjZXB0aW9u 42978
+IFRBU0s= 42979
+X2ZpbmlzaA== 42980
+IG5vdG9yaW91cw== 42981
+IFZz 42982
+IE1R 42983
+IHN1bnNldA== 42984
+IHVuYWNjZXB0YWJsZQ== 42985
+YXJjZXI= 42986
+IGlsbHVtaW4= 42987
+IE9yYg== 42988
+IGJo 42989
+RXN0ZQ== 42990
+X2Rpc3BhdGNo 42991
+IHJpcHBlZA== 42992
+IHRvdWpvdXJz 42993
+IFBhcmNlbA== 42994
+X2xs 42995
+LnVzZXJOYW1l 42996
+LmNsYXNzZXM= 42997
+U09VUkNF 42998
+KE51bWJlcg== 42999
+0LXQu9GP 43000
+IGhlYWRwaG9uZXM= 43001
+KHNpZGU= 43002
+Y29uc3RpdHV0aW9u 43003
+YW5uYWg= 43004
+DQogICAgICAgIA0K 43005
+IGNsaWZm 43006
+LXJlZg== 43007
+IG1vc3RyYXI= 43008
+IFBvd2VsbA== 43009
+K3k= 43010
+IEJH 43011
+X2ZyYWdtZW50 43012
+LlBvcnQ= 43013
+IHJlYWxpemluZw== 43014
+cGFyYW1yZWY= 43015
+IGhvbWV0b3du 43016
+QFRhYmxl 43017
+KyI8Lw== 43018
+b21pZA== 43019
+IGR1Zw== 43020
+CWJ0bg== 43021
+IHN1YmplY3RpdmU= 43022
+L2Jyb3dzZXI= 43023
+IHVzaG9ydA== 43024
+IE1vbnRnb21lcnk= 43025
+LXJhdGU= 43026
+CXB1dHM= 43027
+bGV0aWNz 43028
+b3Jucw== 43029
+4oCcV2hhdA== 43030
+ZWVwZXI= 43031
+LkludmFyaWFudA== 43032
+IGNvbmNlYWxlZA== 43033
+X251bXB5 43034
+PT09PT09PT09 43035
+KHBz 43036
+TG9jYXRpb25z 43037
+LmFzdHlwZQ== 43038
+IENIQU5HRQ== 43039
+Lk9yZGVyQnk= 43040
+O2hlaWdodA== 43041
+IGdlbnRl 43042
+IGdydW50 43043
+IFBsYW5l 43044
+IHNhZGx5 43045
+IExvZ2Fu 43046
+X3VzZWM= 43047
+LmRndg== 43048
+IHNpbmNlcg== 43049
+IHBu 43050
+CWd0aw== 43051
+IGluc3RhbGxlcg== 43052
+IGRpc3BsYWNlbWVudA== 43053
+IGJ1cm5z 43054
+0YPRgQ== 43055
+aXZlcmVk 43056
+Ol0pCg== 43057
+c2VhdA== 43058
+YW5pbmc= 43059
+fSkKCgo= 43060
+X3JvbGVz 43061
+YXRpY2Fu 43062
+IGdlbmVyYXRvcnM= 43063
+IGh1cnRz 43064
+IHNuaXBwZXQ= 43065
+IGdzb24= 43066
+IHNlZ3JlZw== 43067
+IGRpc3RyaWJ1dG9y 43068
+IGFkdmFuY2luZw== 43069
+cG9zdGdyZXM= 43070
+IHVzcg== 43071
+IExpcw== 43072
+LmFzc2VydElz 43073
+X2Nk 43074
+IGh5ZHJhdWxpYw== 43075
+LmNvdW50ZXI= 43076
+IEluZGVwZW5kZW5jZQ== 43077
+IGRpZmbDqQ== 43078
+VW5saWtl 43079
+IHRvbWI= 43080
+dmlr 43081
+cG9zdGVk 43082
+d2Y= 43083
+IGRlc2NlbmRpbmc= 43084
+ZHlu 43085
+YW1lbnRhbA== 43086
+IEZydWl0 43087
+IFlv 43088
+LmRvdWJsZQ== 43089
+IElB 43090
+aWV2 43091
+aWJyYXRl 43092
+IFJlbGlnaW9u 43093
+TWFueVRvT25l 43094
+LVRh 43095
+IGJhbmFuYQ== 43096
+IEF2ZW5nZXJz 43097
+IEhvbG9jYXVzdA== 43098
+IGdldEM= 43099
+IGNvbmRv 43100
+IEdvdGhpYw== 43101
+IHByb3NwZXJpdHk= 43102
+VFJBTlM= 43103
+IGRvZXNudA== 43104
+IENoYW9z 43105
+SVRU 43106
+IENVUlJFTlQ= 43107
+XGhlbHBlcnM= 43108
+X1NBVkU= 43109
+YXZpdA== 43110
+Y29tcHV0ZXI= 43111
+X3NoZWV0 43112
+IEJyZXdpbmc= 43113
+IHJvYmJlcnk= 43114
+IOqyvQ== 43115
+INC60L7QvA== 43116
+IG7DpA== 43117
+LnJlZ2V4 43118
+IGRpc3J1cHRpb24= 43119
+IFNpbXVsYXRpb24= 43120
+YXBpZA== 43121
+IHN1cHJlbWU= 43122
+zrw= 43123
+IGNvbW1pc3Npb25lZA== 43124
+IGFic29ycHRpb24= 43125
+IE5ld2Nhc3RsZQ== 43126
+CWNvbnN0cnVjdG9y 43127
+VGVybXM= 43128
+IHJpdg== 43129
+IHJlbGlnaW9ucw== 43130
+V2l0aFRhZw== 43131
+Lkh0bWw= 43132
+bGlua2Vk 43133
+Q29tcG91bmQ= 43134
+IE1hbnM= 43135
+IGxha2Vz 43136
+aXp6bGU= 43137
+LnNldFNpemU= 43138
+YWJlcg== 43139
+IE5lZWRz 43140
+cGFja2FnZXM= 43141
+LlRhYlBhZ2U= 43142
+IHJlZnM= 43143
+IGlvdXRpbA== 43144
+IERvaW5n 43145
+ICJcKA== 43146
+IHBoZW5vbWVuYQ== 43147
+LkdldEludA== 43148
+QUxUSA== 43149
+IHBhcmxpYW1lbnRhcnk= 43150
+IHJlZnVzYWw= 43151
+IGluZXhwZW5zaXZl 43152
+IH0KCgoKCg== 43153
+IHNvbGlkYXJpdHk= 43154
+CXB1c2g= 43155
+aGF1bA== 43156
+IEJlcmU= 43157
+U2l6ZXI= 43158
+SW5kaXZpZHVhbA== 43159
+IGFuY2U= 43160
+IGRpbGU= 43161
+IFBlYWs= 43162
+KGhy 43163
+RWRpdGluZ0NvbnRyb2xsZXI= 43164
+SE4= 43165
+X1BFUklPRA== 43166
+RVRT 43167
+QmFubmVy 43168
+ZXJyb3JNZXNzYWdl 43169
+LkNBU0NBREU= 43170
+LWlnbm9yZQ== 43171
+IFNJR04= 43172
+IE9C 43173
+X2Rk 43174
+KERFRkFVTFQ= 43175
+IHNvbw== 43176
+IFZpY3Rvcmlhbg== 43177
+IGN1cnQ= 43178
+IGRpc2NyZXRl 43179
+cnlsaWM= 43180
+aW1iYWJ3ZQ== 43181
+LnRvRml4ZWQ= 43182
+bMOk 43183
+LnN0ZGlu 43184
+IHF0eQ== 43185
+Uk9MTEVS 43186
+bWVkaWF0ZWx5 43187
+IHBsdW1iaW5n 43188
+IFByb3BlcnR5Q2hhbmdlZA== 43189
+YXJyYW50eQ== 43190
+IEJyZWFrZmFzdA== 43191
+LnNldEhlYWRlcg== 43192
+LnB5dGhvbg== 43193
+Y29tbWVyY2U= 43194
+b3BlbmN2 43195
+Pi0tfX0K 43196
+RnJlbmNo 43197
+RW50aXR5TWFuYWdlcg== 43198
+IFBsYWlu 43199
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8= 43200
+wrM= 43201
+KFJF 43202
+Y2FwdA== 43203
+IG9yZ2FuaXNtcw== 43204
+IGpldHM= 43205
+b2xvY2F0aW9u 43206
+IEFwcFJvdXRpbmdNb2R1bGU= 43207
+IGdsb3Jpb3Vz 43208
+5pyN 43209
+IGRpc2NhcmRlZA== 43210
+CQkJCSAgICAg 43211
+IEFybm9sZA== 43212
+bHVn 43213
+IHBhcmw= 43214
+IGhvcm1vbmVz 43215
+IG1haA== 43216
+IFNvbmlj 43217
+IG9yZ2FuaXplcnM= 43218
+X1BMQVRGT1JN 43219
+Lmludg== 43220
+IGNob3Jk 43221
+dmVudGlvbmFs 43222
+CW9m 43223
+RXBpc29kZQ== 43224
+LkVudW0= 43225
+dW5rdA== 43226
+IERo 43227
+IEphcmVk 43228
+IE5haw== 43229
+IGludGVuZHM= 43230
+RW5kaWFu 43231
+IGF1c3RyYWxpYQ== 43232
+X2N2 43233
+KHJlc29sdmU= 43234
+IGNsaW5pY3M= 43235
+bGlrZWQ= 43236
+QVNISU5HVE9O 43237
+aW5oYQ== 43238
+Jyo= 43239
+IE5Q 43240
+X2JlaA== 43241
+IGhm 43242
+IHfDvHI= 43243
+Y2F0ZWdvcmlh 43244
+JGZvcm0= 43245
+IHN1YndheQ== 43246
+IGlzQWN0aXZl 43247
+cG9wdWxhcg== 43248
+Q291cg== 43249
+IGNvb2xkb3du 43250
+IGFpbnNp 43251
+IEdMdWludA== 43252
+ZXJlYWw= 43253
+IGFycmF5T2Y= 43254
+IGhhdGNo 43255
+PT09PT09PT09PQ== 43256
+cmVzc2Vz 43257
+X1BQ 43258
+Ll4= 43259
+X2RlY2F5 43260
+IEJsZXNz 43261
+bWV0cmljcw== 43262
+IENPUFlJTkc= 43263
+IER1bXBzdGVy 43264
+IEpvc8Op 43265
+IERlc2lnbnM= 43266
+PFZvaWQ= 43267
+57q/ 43268
+ID8+PA== 43269
+ICJ9Cg== 43270
+dGltZXpvbmU= 43271
+IGVlcg== 43272
+bWF4Y2Ru 43273
+IEVTQw== 43274
+aWdhcmV0 43275
+X2Nvbm5lY3RlZA== 43276
+X3JldmVyc2U= 43277
+IHF1ZXN0aW9uYWJsZQ== 43278
+IFVTQw== 43279
+IHR1dHRp 43280
+IGRyb3BvdXQ= 43281
+IEFjdGl2aXRpZXM= 43282
+IFdpbmRz 43283
+JykpKTsK 43284
+IGNvbmdlc3Q= 43285
+xJ/EsQ== 43286
+IHByb2xvbmdlZA== 43287
+6L+Z 43288
+IENyb3NzQXhpc0FsaWdubWVudA== 43289
+TEVFUA== 43290
+IFZBTElE 43291
+IEdheg== 43292
+IGRlcGVuZGVuY2U= 43293
+IFByaXg= 43294
+LkNvbXBpbGVyU2VydmljZXM= 43295
+anVtcA== 43296
+IHN0cmF0 43297
+Y2lyYw== 43298
+IENVU1RPTQ== 43299
+eGFh 43300
+IGJtcA== 43301
+IGJ1cmVhdQ== 43302
+IHdhcmVu 43303
+Tlg= 43304
+KFdpbmRvdw== 43305
+IENocmlzdGll 43306
+X0ZF 43307
+IHRu 43308
+IE9tZWdh 43309
+Y29tbXVuaWNhdGlvbnM= 43310
+SG9tZVBhZ2U= 43311
+Y29tcGxldGlvbg== 43312
+IHN1cHBseWluZw== 43313
+WVBFUw== 43314
+w6F2ZWw= 43315
+5Yi2 43316
+KGNsaWNr 43317
+XENvbnRyYWN0cw== 43318
+L3F1ZXN0aW9ucw== 43319
+IGV6 43320
+QU1T 43321
+Lm1lc2g= 43322
+ICc8Pw== 43323
+asOg 43324
+SW5p 43325
+LiM= 43326
+IENhcmRpbmFscw== 43327
+cGNpw7Nu 43328
+Q3ViZQ== 43329
+IFBhdGllbnRz 43330
+X3ByZWY= 43331
+QWN0aW9uQnV0dG9u 43332
+KGJ1aWxk 43333
+IFZpc2E= 43334
+b3ZlbA== 43335
+KEFycmF5TGlzdA== 43336
+SWdu 43337
+IHJlaGFiaWxpdGF0aW9u 43338
+IHBhbGFjZQ== 43339
+IHNwZWVjaGVz 43340
+fScK 43341
+SHR0cFJlc3BvbnNl 43342
+CWNvZGU= 43343
+RHVtbXk= 43344
+IGFjYWRlbXk= 43345
+Lm1vdmll 43346
+IGluY29ycmVjdGx5 43347
+IGN5Yw== 43348
+KFVuaXR5RW5naW5l 43349
+CWNhbGxiYWNr 43350
+IFNhdGFu 43351
+IEZVTkM= 43352
+IGNoYW50 43353
+IEhlYWx0aHk= 43354
+OicsCg== 43355
+U2hpcHBpbmc= 43356
+X21j 43357
+IER5bGFu 43358
+IFByb2R1Y2Vy 43359
+IHJlc3B1ZXN0YQ== 43360
+IHBvbGlzaGVk 43361
+QnJvYWRjYXN0 43362
+IGJhbGFuY2luZw== 43363
+IFNsaWRl 43364
+IENhcHM= 43365
+c3RpbGw= 43366
+IGhhcHBpZXI= 43367
+IEdvc3BlbA== 43368
+dHJhbg== 43369
+LnBhdGhuYW1l 43370
+QWN0aXZlU2hlZXQ= 43371
+IENoYW5n 43372
+PlwK 43373
+Um9ib3Q= 43374
+SnNvbk9iamVjdA== 43375
+IERG 43376
+IFByb2Nlc3Nvcg== 43377
+X3Nob3VsZA== 43378
+LnByb3RvYnVm 43379
+LXVzZXJz 43380
+IGVtYnJ5 43381
+Rk9OVA== 43382
+IHN0YXJ0dXBz 43383
+IERhdGFTb3VyY2U= 43384
+KSM= 43385
+dXJvcw== 43386
+X0NvbG9y 43387
+IHN0YW5kYWxvbmU= 43388
+fVs= 43389
+amQ= 43390
+IGZvcmdpdmU= 43391
+IG5neA== 43392
+IEdlbmVyYWxseQ== 43393
+IGNvbmZpZ3VyYWJsZQ== 43394
+L29yZGVy 43395
+IHZhcw== 43396
+JykiOwo= 43397
+IFJS 43398
+IFRyb3k= 43399
+IGNvbXByb21pc2Vk 43400
+IFN3YW4= 43401
+aW50ZW5kZW50 43402
+Q2VudHJhbA== 43403
+X2tlZXBlcg== 43404
+IGFycXVpdm8= 43405
+IFJlYWRPbmx5 43406
+X2N1cnZl 43407
+a3Y= 43408
+ZW50aW4= 43409
+6LE= 43410
+IEV5 43411
+LmltcmVhZA== 43412
+IFBhbQ== 43413
+aWZmZQ== 43414
+YXRpdml0eQ== 43415
+eGJj 43416
+IGdyaW0= 43417
+LWZpbGxlZA== 43418
+bmFtZXNl 43419
+J106 43420
+IGF1cg== 43421
+IEdpYnNvbg== 43422
+Lk1vdXNlRXZlbnQ= 43423
+IGxhZG8= 43424
+YXZhZG9j 43425
+IGZhbWls 43426
+IE1vZGVy 43427
+ZnBz 43428
+44CA44CA 43429
+LWV4YW1wbGU= 43430
+IEFsemhlaW1lcg== 43431
+IFV0Zg== 43432
+X2FyZ3VtZW50cw== 43433
+Q29uY2x1c2lvbg== 43434
+dGV4dENvbnRlbnQ= 43435
+cmVtYWluaW5n 43436
+IGludGVycnVwdHM= 43437
+IEJhY2t1cA== 43438
+IE1vbmc= 43439
+IHJlY2VwdG9ycw== 43440
+aGlzdG9y 43441
+LmNvcm91dGluZXM= 43442
+IHNob3V0ZWQ= 43443
+QWxhcm0= 43444
+IGNvbWJ1c3Q= 43445
+IGdyb3Rl 43446
+dWx0dXJhbA== 43447
+KGlkcw== 43448
+LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0= 43449
+aXBsaW5hcnk= 43450
+T3B0cw== 43451
+IFlhbGU= 43452
+bG9jYWxTdG9yYWdl 43453
+IGVxdWl2YWw= 43454
+IEZsZWV0 43455
+XGI= 43456
+KnBp 43457
+IFFMYWJlbA== 43458
+5qE= 43459
+IHZ4 43460
+IEFDTA== 43461
+IHN1Y2Vzc28= 43462
+IHBlcmM= 43463
+IE5vdHJl 43464
+IGFuYXJjaA== 43465
+UmluZw== 43466
+c3Bi 43467
+IHN0cnBvcw== 43468
+c3RvcmVz 43469
+IE1hcGxl 43470
+KE1haW5BY3Rpdml0eQ== 43471
+KCIiKSk= 43472
+IHZpZXdIb2xkZXI= 43473
+UXVhZA== 43474
+IGlndWFs 43475
+b3JzY2hl 43476
+Lm1hcmdpbg== 43477
+IGluZGll 43478
+IGZyYW5j 43479
+IEZvcm1CdWlsZGVy 43480
+IFBhcnRpY2lw 43481
+LmZsYXNo 43482
+IHN0b3Jtcw== 43483
+VWx0 43484
+IGZlbg== 43485
+W25ldw== 43486
+RXZlcg== 43487
+PSIK 43488
+IGxvY2FsaXplZA== 43489
+X2ZvbGxvdw== 43490
+IG5hdmU= 43491
+IGRvbWluYW5jZQ== 43492
+KHRpbGU= 43493
+Sm91cm5hbA== 43494
+IFZD 43495
+IHBlbmV0cmF0aW9u 43496
+77yV 43497
+IGNvbXBhcnRtZW50 43498
+IGJpZHM= 43499
+Rm9ybWF0dGVk 43500
+KioqKioqLwoK 43501
+KGNpdHk= 43502
+4oCUaXQ= 43503
+W0M= 43504
+IHVzZUNhbGxiYWNr 43505
+YXVi 43506
+KT8u 43507
+IFZBUg== 43508
+IFNlYmFzdGlhbg== 43509
+IE1vc3M= 43510
+IGFidW5kYW50 43511
+R3JlZw== 43512
+0YLQsA== 43513
+X2Np 43514
+IGJpYmxp 43515
+Q1JN 43516
+IEF0dGVtcHQ= 43517
+aXNtZQ== 43518
+ZGFzaA== 43519
+44CO 43520
+X211 43521
+LkZvcm1hdHRpbmdFbmFibGVk 43522
+SW5kZWVk 43523
+LWRpcmVjdA== 43524
+IHN1Y2tpbmc= 43525
+IHBuZQ== 43526
+b2NhYnVsYXJ5 43527
+IFBhY2tlcnM= 43528
+Lk5hdmlnYXRpb24= 43529
+IHBpZWQ= 43530
+Y3JpYmluZw== 43531
+IFN0dWFydA== 43532
+LlRvRG91Ymxl 43533
+IFNlY29uZGFyeQ== 43534
+U2F2aW5n 43535
+IER1dA== 43536
+IE1hZGQ= 43537
+TWFnaWM= 43538
+LEg= 43539
+LmRvY3VtZW50RWxlbWVudA== 43540
+IEJTVA== 43541
+IGRpZmZlcnM= 43542
+IG1vcmVvdmVy 43543
+X25k 43544
+U0VBUkNI 43545
+0L/RgNCw0LI= 43546
+5rQ= 43547
+dG9NYXRjaA== 43548
+IGRlY3JlYXNpbmc= 43549
+LW1lbWJlcg== 43550
+YW1wdXM= 43551
+KGJvb3N0 43552
+RGFpbHk= 43553
+RGF0YUdyaWRWaWV3 43554
+IEh0dHBDb250ZXh0 43555
+IGhpcHA= 43556
+X3dvcmtlcnM= 43557
+LWxhbmd1YWdl 43558
+6ZM= 43559
+IGNvbnNpc3RlZA== 43560
+YXRoaW5n 43561
+IE1lcmN1cnk= 43562
+JGNvbnRlbnQ= 43563
+IHByYWN0aWNlZA== 43564
+IE1vZHVsZXM= 43565
+X0RBWQ== 43566
+IHdlYWtuZXNzZXM= 43567
+IExvZGdl 43568
+IG5hcg== 43569
+IE1hdGU= 43570
+IGpw 43571
+IEh0dHBIZWFkZXJz 43572
+IHNtbw== 43573
+IFRPS0VO 43574
+XSko 43575
+IGFxdWk= 43576
+c3dhZ2Vu 43577
+IHNydg== 43578
+CWFucw== 43579
+QXJvdW5k 43580
+IE1hbnVlbA== 43581
+IGZpY3Rpb25hbA== 43582
+IElNRw== 43583
+IC4n 43584
+IEJlcnJ5 43585
+IHdhbGxwYXBlcg== 43586
+c2V4dWFs 43587
+aWVybw== 43588
+IOeahA== 43589
+7IaM 43590
+QmFja2luZ0ZpZWxk 43591
+IEFkcmlhbg== 43592
+QkFTRVBBVEg= 43593
+IHJlcGVhdHM= 43594
+IGJsdWVz 43595
+IHVucHJlZGljdA== 43596
+X2NvbGw= 43597
+c3RhY2xl 43598
+IFR1bWJscg== 43599
+IEVsZg== 43600
+IGFzc3VyYW5jZQ== 43601
+IGNlbnN1cw== 43602
+IElNUE9SVA== 43603
+RU5ERVI= 43604
+YW5vcw== 43605
+ID0o 43606
+IEVsbGlz 43607
+IgoKCgo= 43608
+Lndpbg== 43609
+IEFib3Zl 43610
+YWxvbg== 43611
+X3RpY2s= 43612
+IHJlcHJlc2VudGF0aW9ucw== 43613
+IOaV 43614
+d2lk 43615
+IEFybXM= 43616
+TGlzdGE= 43617
+X2ZhaWx1cmU= 43618
+X2Nt 43619
+LkZsYXRBcHBlYXJhbmNl 43620
+IHRocm9uZQ== 43621
+UGF0Y2g= 43622
+IFZveQ== 43623
+ZW5nbA== 43624
+IG5lZ290aWF0aW5n 43625
+PmA= 43626
+IHNob290cw== 43627
+IEZQUw== 43628
+LlllYXI= 43629
+IEtpc3M= 43630
+ZW5jacOzbg== 43631
+cmVldGluZw== 43632
+RnJvbUZpbGU= 43633
+IHJlc2lnbmF0aW9u 43634
+2Lc= 43635
+IHR3aW5z 43636
+xrDhu6M= 43637
+IGdlYnJ1 43638
+LmdldENvbnRlbnQ= 43639
+LlRyZWU= 43640
+IEVtcGxveWVlcw== 43641
+IEZJRkE= 43642
+IGNlcnRhaW50eQ== 43643
+KENs 43644
+IHRvdGFscw== 43645
+ZWRpdGFibGU= 43646
+4KWA 43647
+LlJlcG9ydGluZw== 43648
+TWFz 43649
+cXVpZXQ= 43650
+LnJ1bGVz 43651
+IFZP 43652
+Y29uZXhpb24= 43653
+LEs= 43654
+IGFsbG9jYXRvcg== 43655
+IFBvd2Rlcg== 43656
+XFJlcG9zaXRvcnk= 43657
+QmVhdA== 43658
+X3RpcG8= 43659
+IFsnJyw= 43660
+X0lOVFI= 43661
+IDw8PA== 43662
+PGhy 43663
+Iik9PQ== 43664
+dWdnYWdl 43665
+IENyYXc= 43666
+IMOpZ2FsZW1lbnQ= 43667
+IGdpbmdlcg== 43668
+IHByaW1lcmE= 43669
+IHByb2R1dG8= 43670
+bHRr 43671
+LlVzZXJOYW1l 43672
+IHN0cmVycm9y 43673
+bWl0aA== 43674
+X25i 43675
+IGRpc2NvbWZvcnQ= 43676
+J107Pz48Lw== 43677
+UVQ= 43678
+IGVydXB0 43679
+IERhbmlzaA== 43680
+XEFjdGl2ZQ== 43681
+X2FkYXB0ZXI= 43682
+IGJ1YmJsZXM= 43683
+cm9sbG8= 43684
+b3Jnb3Q= 43685
+0L3Ri9GF 43686
+VkVDVE9S 43687
+b2NvZGU= 43688
+IEJ1bGxz 43689
+IGJvaWw= 43690
+PiIpOw0K 43691
+ZHJvcElmRXhpc3Rz 43692
+IEJlZw== 43693
+X0hBTA== 43694
+IGNyb3NzQXhpc0FsaWdubWVudA== 43695
+IEV2aWRlbmNl 43696
+IHBlY3VsaWFy 43697
+IGluc3RpdHV0ZQ== 43698
+dmVpcw== 43699
+IGZmdA== 43700
+w4E= 43701
+IHpvZWt0 43702
+YW5hbHk= 43703
+IEhvbWVsYW5k 43704
+IHBlbmV0cg== 43705
+dWRkZW5seQ== 43706
+CWVsZW1lbnQ= 43707
+IEJyZW4= 43708
+IFRydWRlYXU= 43709
+IEN1YmFu 43710
+amFt 43711
+dXNsaW0= 43712
+X2V2 43713
+IHN0ZW1z 43714
+fSU= 43715
+neWniw== 43716
+IGJyYW5kaW5n 43717
+IGNvcnJlc3BvbmRlbmNl 43718
+LmpxdWVyeQ== 43719
+ouWNlQ== 43720
+IFJlYWRz 43721
+KEh0dHBTdGF0dXNDb2Rl 43722
+YXNzaW4= 43723
+KHNsb3Q= 43724
+IEdyYWR1YXRl 43725
+Ly8vPA== 43726
+IGluZm9ybWF0aW9ucw== 43727
+RU5BQkxF 43728
+IHB1aXM= 43729
+IGZpbmRlcg== 43730
+IEJyaXM= 43731
+IG5ldHRzdGVkZXI= 43732
+X21pZA== 43733
+IG9ncw== 43734
+IFN0ZXJsaW5n 43735
+IGFycm9n 43736
+c3RyZnRpbWU= 43737
+fAoK 43738
+IHZveA== 43739
+IFJlZ2FyZGxlc3M= 43740
+IGVzbw== 43741
+IENvbWZvcnQ= 43742
+LkJvb2xlYW5GaWVsZA== 43743
+IHVo 43744
+QUNZ 43745
+IHNxdWVleg== 43746
+IFZpYw== 43747
+Y29udHJv 43748
+Lmxv 43749
+IGlyZQ== 43750
+IENvbWVkeQ== 43751
+67Y= 43752
+IG9yaWdpbmF0ZWQ= 43753
+IHNoaXBtZW50 43754
+fG1heA== 43755
+X2d1aWQ= 43756
+bGV2YXRpb24= 43757
+0L3QsNGP 43758
+KHVuZGVmaW5lZA== 43759
+IEREUg== 43760
+IHNob290aW5ncw== 43761
+IExhdGlubw== 43762
+RU5ET1I= 43763
+IGF2ZXJhZ2luZw== 43764
+IGdyZWV0ZWQ= 43765
+IHRoZWF0ZXJz 43766
+0L7QtQ== 43767
+IGRC 43768
+IGdzdA== 43769
+IGRlZmluaXRl 43770
+LlN0b3JhZ2U= 43771
+Lmhlcg== 43772
+IGFmb3Jl 43773
+IFJlYWxpdHk= 43774
+IEdvZHM= 43775
+dmVyc2Vk 43776
+IGhhbmRzb21l 43777
+IGV4Y2x1ZGluZw== 43778
+KGFk 43779
+UXVvdGVz 43780
+IFNjaGVtZQ== 43781
+P3E= 43782
+IFRhbWls 43783
+VGlja3M= 43784
+IHBlc3Q= 43785
+J24= 43786
+IHBvcm5vZ3JhcGh5 43787
+X21vZGFs 43788
+IC0tLS0tLS0tLS0= 43789
+IGRpc3Bvc2FibGU= 43790
+RlJFRQ== 43791
+IHNoYXJr 43792
+Q0hF 43793
+IGRlcGljdGVk 43794
+IGRlbW9uc3RyYXRpb25z 43795
+IEtpbGxlZA== 43796
+IFJVTEU= 43797
+IG9ic2Vzc2Vk 43798
+IHNpbXBsaWZpZWQ= 43799
+UG9zdGFs 43800
+IGNvbmNlcHR1YWw= 43801
+IHBzdA== 43802
+TGFz 43803
+X1BST0pFQ1Q= 43804
+dWNjZWVkZWQ= 43805
+b2x1 43806
+xJ9p 43807
+IHBlcnNvbmFsaXRpZXM= 43808
+IHJlc2hhcGU= 43809
+IGVuY2xvc2Vk 43810
+CXB0cg== 43811
+IHR1dG9yaWFscw== 43812
+IGV4cGxvZGVk 43813
+X0RJUkVDVE9SWQ== 43814
+5YaF5a65 43815
+IGNhbm9u 43816
+IHJlY29nbmlzZQ== 43817
+UEFE 43818
+IEFwcHJveA== 43819
+IFJlc3RvcmU= 43820
+IEltcG9ydGFudA== 43821
+IGhlYXZpZXI= 43822
+LlNlcXVlbnRpYWw= 43823
+RWFydGg= 43824
+IE1pbGs= 43825
+LnNldFJlcXVlc3Q= 43826
+LnRlbQ== 43827
+IHJlY29uc3RydWN0 43828
+IHNrZXB0aWNhbA== 43829
+X1ByaXZhdGU= 43830
+QlVG 43831
+cXVh 43832
+OmE= 43833
+IHNlaw== 43834
+IGR3ZWxs 43835
+b3NzYQ== 43836
+IHJld2FyZGVk 43837
+0LjQuQ== 43838
+KHRvcGlj 43839
+X3BhcnRpdGlvbg== 43840
+IF9fX19fX19fX19fX19fX19fXw== 43841
+S2V5d29yZHM= 43842
+IEZyYW5jbw== 43843
+TGl0ZQ== 43844
+IG5ha2Vu 43845
+INC30LA= 43846
+T0JKRUNU 43847
+IGNyYWZ0cw== 43848
+IFN3YXA= 43849
+LlhuYQ== 43850
+LkNvbm5lY3Q= 43851
+IGJhbGNvbnk= 43852
+KHJlYWw= 43853
+IEJhcm5lcw== 43854
+Ymly 43855
+IFR3ZW50eQ== 43856
+YXlhbg== 43857
+YXRhcnM= 43858
+IFByb3BlbA== 43859
+IElobmVu 43860
+VXBncmFkZQ== 43861
+IGN1cmI= 43862
+LXNlY29uZA== 43863
+IG5lcGg= 43864
+LnByZXM= 43865
+7J6F 43866
+LnNlcQ== 43867
+IHBhZGRlZA== 43868
+Ij8= 43869
+amw= 43870
+44Os 43871
+Jyk8Lw== 43872
+IGNpdmlj 43873
+Z29ucw== 43874
+PmE= 43875
+Q29vcmRpbmF0ZXM= 43876
+IGVuYWN0ZWQ= 43877
+RU5UUw== 43878
+IGxhYw== 43879
+LmZpbmFs 43880
+IFBocFN0b3Jt 43881
+Y2FsbGVk 43882
+IGlucXVpcmllcw== 43883
+Lm1pZGRsZXdhcmU= 43884
+IERvd250b3du 43885
+Lyc7Cg== 43886
+IGtpbG9tZXQ= 43887
+YWNjZWw= 43888
+IHF1aWVu 43889
+d3N0cmluZw== 43890
+c2V0RGF0YQ== 43891
+IG1hbmVyYQ== 43892
+IG1vZHVsYXI= 43893
+cmltcA== 43894
+IHRhcmlmZnM= 43895
+4oCZaWw= 43896
+X1RIUk9X 43897
+L2NvbG9y 43898
+IEhUTUxFbGVtZW50 43899
+IGNhcnJv 43900
+IHByZXJl 43901
+IHBsb3R0aW5n 43902
+IFBvc2l0aXZl 43903
+IE1hY2hpbmVz 43904
+T1RFUw== 43905
+4bub 43906
+cGxlYXNhbnQ= 43907
+IGFsdGU= 43908
+IGFpbmRh 43909
+dGhlc2U= 43910
+IGNvcnM= 43911
+aXBheQ== 43912
+IEFkdmlzb3J5 43913
+IFJ1Ymlv 43914
+anE= 43915
+IGxpbWVzdG9uZQ== 43916
+IGRldGFjaGVk 43917
+6K6+572u 43918
+dGVuYW50 43919
+IERlcHRo 43920
+YWxvcmU= 43921
+INGB0YLRgNC+0Lo= 43922
+IEZPUkU= 43923
+IExheQ== 43924
+cHJlc2VudGF0aW9u 43925
+KScpOwo= 43926
+LnN1YnBsb3Rz 43927
+z4M= 43928
+Tk9X 43929
+R2Fy 43930
+aGFuZGxlcw== 43931
+YWJyYQ== 43932
+cHV0aWVz 43933
+IEVsZWN0cmljYWw= 43934
+TWlkZGxl 43935
+cm9waWM= 43936
+IEpE 43937
+IER5bg== 43938
+IEJyaXN0b2w= 43939
+IE1jQ2FydGh5 43940
+IHN0cmlrZXI= 43941
+IGVudW1lcmFibGU= 43942
+IEV2YW4= 43943
+LmRlZmF1bHRz 43944
+cXVlbmNlcw== 43945
+KXx8 43946
+CXRva2Vu 43947
+4peP 43948
+LWRyb3Bkb3du 43949
+U1RPUkU= 43950
+IEdyYXBoaWM= 43951
+KHBw 43952
+RXhwbA== 43953
+IHVwd2FyZHM= 43954
+IERpc3RyaWJ1dGVk 43955
+IFdFQg== 43956
+SmVy 43957
+aXNOYU4= 43958
+55Sf5oiQ 43959
+PlI= 43960
+w7xzc2Vu 43961
+ZWZz 43962
+IHVuY292ZXI= 43963
+IGx1ZA== 43964
+LmNhbGN1bGF0ZQ== 43965
+IGludHB0cg== 43966
+IG1pZGZpZWxkZXI= 43967
+LkhlYWRlcnM= 43968
+IG1m 43969
+ZXJlZg== 43970
+Lk1ldHJv 43971
+IFNwZWFraW5n 43972
+OmI= 43973
+IGNyeXB0b2N1cnJlbmNpZXM= 43974
+IGRlbW9ucw== 43975
+CUVYUEVDVA== 43976
+IHdpY2tlZA== 43977
+eW91dHViZQ== 43978
+OkludA== 43979
+IEhpbmRp 43980
+IENBVA== 43981
+INi5 43982
+cmFy 43983
+b21vcmU= 43984
+L3Blcg== 43985
+L2xpY2Vuc2U= 43986
+IHJlaW0= 43987
+IGF3YWl0aW5n 43988
+IGxldGhhbA== 43989
+IEVG 43990
+cm91bmRlZA== 43991
+IFBsYXRpbnVt 43992
+INCy0YHQtQ== 43993
+LmNvb3Jkcw== 43994
+LkRldmljZQ== 43995
+L2l0ZW0= 43996
+IFdlbm4= 43997
+Y29tcGlsZUNvbXBvbmVudHM= 43998
+IEtpbmRlcg== 43999
+LnJlbW92ZUl0ZW0= 44000
+IGFuZGE= 44001
+Ym5i 44002
+IHByYQ== 44003
+KHRyYW5zYWN0aW9u 44004
+IGVtYmFycmFzc2luZw== 44005
+CUJPT0w= 44006
+LmNvbnRlbnRWaWV3 44007
+IGV2ZW50ZGF0YQ== 44008
+YXRvcmU= 44009
+IHByb3ZpZGVkSW4= 44010
+aXJtYQ== 44011
+IHpvbmE= 44012
+X0hX 44013
+5pk= 44014
+IHN0b3Zl 44015
+IGNvdW50ZXJwYXJ0 44016
+X1Byb2R1Y3Q= 44017
+X01BTkFHRVI= 44018
+IGluZnJpbmc= 44019
+IEVSQQ== 44020
+X3BhcnR5 44021
+0ZE= 44022
+IGluaWNp 44023
+X1JlcXVlc3Q= 44024
+IG1pcmFjbGU= 44025
+IGNhbmNlbEJ1dHRvbg== 44026
+U3B5 44027
+YXTDsw== 44028
+IHBvbGlzaA== 44029
+IE5pY29sZQ== 44030
+LmRpc3BsYXlOYW1l 44031
+XFJlcXVlc3Rz 44032
+IHVzZUhpc3Rvcnk= 44033
+Um91dGVyTW9kdWxl 44034
+IHN0YXJlZA== 44035
+SURFUg== 44036
+0YPQvdC60YbQuA== 44037
+IG5vdGE= 44038
+JGFycg== 44039
+cGVjaWZpZWQ= 44040
+IHRvcHA= 44041
+X0RSSVZFUg== 44042
+L25n 44043
+5aA= 44044
+X3Rt 44045
+JXRpbWVvdXQ= 44046
+PHM= 44047
+ICgqKQ== 44048
+IEh0dHBSZXF1ZXN0 44049
+X1RSQUNL 44050
+KG5vdGU= 44051
+IEV4cGxvcmU= 44052
+X3NlcnY= 44053
+IOe7 44054
+QmluZGVy 44055
+KyIs 44056
+LmF0dA== 44057
+IEV0aGk= 44058
+IGPDs2RpZ28= 44059
+PSdc 44060
+LmxpbmVz 44061
+KE9m 44062
+5bCG 44063
+bWlzc2libGU= 44064
+IHbDqQ== 44065
+IGFjb3VzdGlj 44066
+IGNyYWZ0aW5n 44067
+bml0 44068
+LmJh 44069
+IEx1Y3k= 44070
+IGlQb2Q= 44071
+IHB1cGlscw== 44072
+LW1heA== 44073
+X3dy 44074
+KGNw 44075
+IFJFUE9SVA== 44076
+IGRucw== 44077
+IFJlZmVyZW5jZXM= 44078
+IHVuZGVydGFrZW4= 44079
+IGvDuGJlbmhhdm4= 44080
+IGNoYWk= 44081
+IENyb2F0 44082
+X0xvZw== 44083
+cm93bmVk 44084
+X21lZA== 44085
+CWRhdGU= 44086
+I19f 44087
+IGNvc3R1bWVz 44088
+IFJlcXVpcmVz 44089
+YWZmbGU= 44090
+54q25oCB 44091
+LVNlbWl0 44092
+ZWxhaWRl 44093
+0LXRgtC+0LQ= 44094
+IHBlc3RpYw== 44095
+IGRyYQ== 44096
+RE9DVU1FTlQ= 44097
+IC4uLg0K 44098
+fWB9Cg== 44099
+IEF1Y3Rpb24= 44100
+IERvY2s= 44101
+eHh4eHh4eHg= 44102
+KGdldFN0cmluZw== 44103
+hY0= 44104
+IGJvcmRlcldpZHRo 44105
+IE1hY2hpbmVyeQ== 44106
+IHByZWRpY3RhYmxl 44107
+LlNI 44108
+IGFtcGxpdHVkZQ== 44109
+LmZvclJvb3Q= 44110
+SU5hdmlnYXRpb24= 44111
+VGFibGVNb2RlbA== 44112
+YXR0cmli 44113
+IG1hbmV1dmVy 44114
+IGV4Y2F2 44115
+QkVSUw== 44116
+IGRhcGF0 44117
+IGluc3RhbGxhdGlvbnM= 44118
+LkFzeW5j 44119
+IHJheXM= 44120
+PeKAnQ== 44121
+Ow0NCg== 44122
+LmNyeXB0bw== 44123
+X2RiZw== 44124
+IEVudW1lcmFibGU= 44125
+T2ZTaXpl 44126
+X2Vwb2Nocw== 44127
+bXc= 44128
+TUVOVQ== 44129
+b3V0bGluZQ== 44130
+IFBhcGVycw== 44131
+PT09PT09PT09PT09Cg== 44132
+IHVuaWZvcm1z 44133
+IEdpZw== 44134
+LXBhY2thZ2U= 44135
+IEplbmtpbnM= 44136
+IEhvbWVQYWdl 44137
+LmlzU2VsZWN0ZWQ= 44138
+IG1lY2hhbmlj 44139
+TUs= 44140
+IFNvdW5kcw== 44141
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQo= 44142
+IHJlc2VhcmNoaW5n 44143
+IGluZm9z 44144
+b2dyYXBoaWNz 44145
+ZXJzZXQ= 44146
+KFsnLw== 44147
+IFRpbWJlcg== 44148
+LmFnZW50 44149
+LnRvSlNPTg== 44150
+X2NvbW1hbmRz 44151
+cGFyaW5n 44152
+X2FkanVzdA== 44153
+Lm5vbWU= 44154
+KGdsbQ== 44155
+U3RhdHVzQmFy 44156
+ZmlsZXBhdGg= 44157
+P+KAmQ== 44158
+IGRldGVjdGl2ZQ== 44159
+IHVuc2VyZXI= 44160
+IFRpYmV0 44161
+RU5ERUQ= 44162
+KHNlZWQ= 44163
+IHNuZWFr 44164
+IGFtb3I= 44165
+PSIvLw== 44166
+IFBhbnRoZXJz 44167
+YWxsYXg= 44168
+IExJVkU= 44169
+CURXT1JE 44170
+XT0t 44171
+IHRvcm5hZG8= 44172
+L21pbg== 44173
+IGx1bmdz 44174
+LWN1cnJlbnQ= 44175
+IEJvb2tpbmc= 44176
+5YiX6KGo 44177
+IGVuam95bWVudA== 44178
+4KSw 44179
+SkE= 44180
+dHlwZWQ= 44181
+LkJ0bg== 44182
+ZmF0 44183
+dWdhbA== 44184
+IFNoYXJlcw== 44185
+IGRpc2dy 44186
+IEJBUg== 44187
+IEZPWA== 44188
+T3Bjb2Rl 44189
+IFN6 44190
+a2V5ZG93bg== 44191
+aWN0aW9uYXJpZXM= 44192
+IGRldGFpbGluZw== 44193
+fSkpCg== 44194
+IHBvaw== 44195
+IGRlbW9uc3RyYXRpbmc= 44196
+IG5vdGF0aW9u 44197
+bGF5ZXJz 44198
+QGlm 44199
+IE5QUg== 44200
+LnN0cmljdEVxdWFs 44201
+IFJlY2lwZXM= 44202
+LlRlbnNvcg== 44203
+IGxpcXVvcg== 44204
+IGRlYnRz 44205
+LmVuZHNXaXRo 44206
+V2hlZWw= 44207
+LlBvcw== 44208
+Q1NW 44209
+JGFyaXR5 44210
+IHVuc3RhYmxl 44211
+KGxvc3M= 44212
+RU5TT1I= 44213
+IGVsZXZlbg== 44214
+IExvcGV6 44215
+IEhvcGtpbnM= 44216
+Y29ub20= 44217
+IFNldGg= 44218
+IHBvZW1z 44219
+UXVhbnQ= 44220
+IGdzbA== 44221
+IHN5cnVw 44222
+IHNpYmxpbmc= 44223
+IGNhc3M= 44224
+LXZvdXM= 44225
+w7Z0 44226
+X1BBVFRFUk4= 44227
+X1NFQ1RJT04= 44228
+ZXN0aW1hdGVk 44229
+dXBncmFkZQ== 44230
+Lm1vbmdvZGI= 44231
+IEJvYXQ= 44232
+X0NUWA== 44233
+IGZldGNoaW5n 44234
+dXN0aW4= 44235
+cGllbA== 44236
+TWFyZw== 44237
+UmVmbGVjdGlvbg== 44238
+IGR1Y3Q= 44239
+IE11bmljaXBhbA== 44240
+IGJ4 44241
+LkdldEN1cnJlbnQ= 44242
+bWxpbms= 44243
+IEFjY291bnRpbmc= 44244
+IEdlbmV2YQ== 44245
+X1Bvcw== 44246
+IHBhc3Nlcg== 44247
+IGhlYXJpbmdz 44248
+Y29tcGFu 44249
+IGZyYWdpbGU= 44250
+SW5pdGlhbGl6ZXI= 44251
+d2Fsa2Vy 44252
+Lk1hdGVyaWFs 44253
+IEh1bnRpbmc= 44254
+dHJ5c2lkZQ== 44255
+IGthdA== 44256
+IGNsZXJr 44257
+4Z8= 44258
+ZG9pbmc= 44259
+CWdyb3Vw 44260
+IHNhbmN0aW9u 44261
+Lmxi 44262
+IExhenk= 44263
+IENvbnN0cmFpbnQ= 44264
+UGFnaW5hdGlvbg== 44265
+IHBvdXZleg== 44266
+IEluZGljYXRlcw== 44267
+TUVS 44268
+IGNvdXJz 44269
+IHllYXJseQ== 44270
+IGdyb3NzZQ== 44271
+YWJicmV2 44272
+IERPTg== 44273
+IHByb2NlZWRlZA== 44274
+ZW50bGljaA== 44275
+IHByb3BlcnR5TmFtZQ== 44276
+IFRlYWNoaW5n 44277
+c3RhZHQ= 44278
+IGN1dG9mZg== 44279
+b3JuZXJz 44280
+IGFmcmljYQ== 44281
+IHJlbmRlcnM= 44282
+IFlhbmtlZXM= 44283
+IFRvb2xiYXI= 44284
+c3BhY2Vz 44285
+LmZpbGxTdHlsZQ== 44286
+IHNlZ3VuZG8= 44287
+X3N0cmxlbg== 44288
+LkZpcmViYXNl 44289
+5aSE 44290
+IG1lbnRpb25pbmc= 44291
+XCg= 44292
+IFZhbHZl 44293
+U2V0dGVy 44294
+IHNwYW5z 44295
+IEFsY29ob2w= 44296
+IExldHRlcnM= 44297
+XHhl 44298
+IFRL 44299
+X0JMRQ== 44300
+LmdldFJlc3VsdA== 44301
+PFBsYXllcg== 44302
+IFBhdHQ= 44303
+IGVhc2luZw== 44304
+IHR1cmtleQ== 44305
+IEZlbg== 44306
+Jyki 44307
+IGNvbmZpbmVk 44308
+IGluY2x1cw== 44309
+U3VwZXJ2aWV3 44310
+KHdpdGhJZGVudGlmaWVy 44311
+ZW5jaWFs 44312
+IHN0dWZmZWQ= 44313
+VGhldGE= 44314
+IGVjb25vbWlzdHM= 44315
+fSkpOwoK 44316
+Y29va2llcw== 44317
+IFJvb3Nl 44318
+IENoZWVzZQ== 44319
+IGZpY2hpZXI= 44320
+IGVuZm9yY2Vk 44321
+QUJC 44322
+bm/Fm2Np 44323
+X0FMTE9X 44324
+IHJlY3J1aXRlZA== 44325
+IGV4cGVuZGl0dXJl 44326
+LW5pZ2h0 44327
+IGFzc2VydE5vdE51bGw= 44328
+X2V4ZWN1dGU= 44329
+INiv 44330
+SU5ERVg= 44331
+X0ZNVA== 44332
+IHJlc2N1ZWQ= 44333
+IE1vbnRobHk= 44334
+IENvbnNlcnZhdGlvbg== 44335
+IEdlYg== 44336
+T2JhbWE= 44337
+RXBvY2g= 44338
+aWNpZXM= 44339
+IE9ydA== 44340
+IHNvaXQ= 44341
+KGljb24= 44342
+RnJpZW5kcw== 44343
+bW9s 44344
+IGdyb3VuZGVk 44345
+IENhdXNl 44346
+YWRlbmE= 44347
+V0VFTg== 44348
+IEx1bg== 44349
+SVRJVkU= 44350
+Lmxvb3A= 44351
+X3VudGls 44352
+IGNvcnI= 44353
+LmVkZ2Vz 44354
+IGh5cG90aA== 44355
+Y2hlZHVsaW5n 44356
+dHJhbnNsYXRvcg== 44357
+INCc 44358
+Um9t 44359
+44CRCgo= 44360
+IFhhbWFyaW4= 44361
+IHZpb2xhdGluZw== 44362
+LmFuY2hvcg== 44363
+LS0tCgo= 44364
+IHRyYWRlcg== 44365
+QURWRVJUSVNFTUVOVA== 44366
+IHVuc2VyZQ== 44367
+IERBTw== 44368
+IGJsb25k 44369
+IFBBVA== 44370
+Lmdsb2I= 44371
+IOi+kw== 44372
+IHNwbGl0dGluZw== 44373
+IHVuc3Vic2NyaWJl 44374
+IGF0bW9zcGhlcmlj 44375
+IFRyaW0= 44376
+IGNpdGF0aW9u 44377
+IGluZmVyZW5jZQ== 44378
+IEZ0 44379
+IERhcndpbg== 44380
+ZmluZE9uZQ== 44381
+IEdlbA== 44382
+KENvbnZlcnQ= 44383
+IGFjY2Vzc29y 44384
+O3RleHQ= 44385
+KHNvcnRlZA== 44386
+IGp1ZGdlZA== 44387
+KTtc 44388
+OnA= 44389
+IG1laW5l 44390
+IFNsaW0= 44391
+LkNvbW1hbmRz 44392
+IHBlcmNlaXZl 44393
+Y29ob2xpYw== 44394
+PERhdGE= 44395
+LmVudHJ5U2V0 44396
+IGFzc2VydEZhbHNl 44397
+IFBhdHJvbA== 44398
+ZW5zZW0= 44399
+xYLEhQ== 44400
+qKE= 44401
+V0lEVEg= 44402
+IFJlc2N1ZQ== 44403
+IFVJRg== 44404
+X1RIUkVTSE9MRA== 44405
+IE1pY2hlbA== 44406
+QVRFUklBTA== 44407
+b3BlbnNvdXJjZQ== 44408
+IERpYW5h 44409
+IGludml0ZXM= 44410
+X0JPRFk= 44411
+IHJlc2Vydm9pcg== 44412
+IHJvaQ== 44413
+Y3VzdA== 44414
+KHRj 44415
+77yBIik7Cg== 44416
+IGZlc3RpdmFscw== 44417
+IHBlcmZvcm1lcnM= 44418
+IGNsaW1iZWQ= 44419
+IGp1bmdsZQ== 44420
+U3RyaW5nTGVuZ3Ro 44421
+IHVubGF3ZnVs 44422
+aWVycmU= 44423
+dmVydGlzZW1lbnQ= 44424
+IHN0YWtlcw== 44425
+IGhhdHM= 44426
+TW9kaWZ5 44427
+IExFVFRFUg== 44428
+LkhpZGU= 44429
+IHN0YXR1dG9yeQ== 44430
+X3doaXRl 44431
+IFBlcmw= 44432
+dXRlbmJlcmc= 44433
+ZW1wbGU= 44434
+Lldvcmxk 44435
+IG92ZXJsb29rZWQ= 44436
+IGNvbmNsdWRlcw== 44437
+Lyo9PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09 44438
+LXdpc2U= 44439
+CXN0cmVhbQ== 44440
+cG9wdWxhdGlvbg== 44441
+IGV2ZW50bw== 44442
+IGlsbHVzdHJhdGlvbnM= 44443
+ZnRz 44444
+IGF1dG9m 44445
+IFByb2NlZHVyZQ== 44446
+IGRlc2VydmVk 44447
+LXRpbWVz 44448
+IGdvbA== 44449
+TlNFcnJvcg== 44450
+Y3Jlc3Q= 44451
+IFBha2lzdGFuaQ== 44452
+YW55Y2g= 44453
+Z2V0Q3VycmVudA== 44454
+IGxhcg== 44455
+bnRs 44456
+IFJlYmVjY2E= 44457
+IG1hdGVyaWE= 44458
+IGZpbmRCeQ== 44459
+L2Fk 44460
+Q2FsbGJhY2tz 44461
+IEFscw== 44462
+IEthdGll 44463
+IE9ic2VydmFibGVDb2xsZWN0aW9u 44464
+IERvY3VtZW50YXRpb24= 44465
+VHlwZWQ= 44466
+IEN1bHR1cmVJbmZv 44467
+IFRpbW90aHk= 44468
+IGxhdGVyYWw= 44469
+InR5cGU= 44470
+IHVuYXV0aG9yaXplZA== 44471
+IHRlYWNoaW5ncw== 44472
+IGRlYnVnZ2Vy 44473
+W3ZhbHVl 44474
+IGFsb3Jz 44475
+IHV6 44476
+IHNjYXR0ZXI= 44477
+IGRvd253YXJk 44478
+IG1pZ2xp 44479
+c3RhdHVzQ29kZQ== 44480
+ICgpKQ== 44481
+IE1X 44482
+INC80L7Qtg== 44483
+Uk9TUw== 44484
+LmJ1Zg== 44485
+IGZhaXJ5 44486
+IEluZnJhc3RydWN0dXJl 44487
+PT4i 44488
+dGxlbWVudA== 44489
+JCgi 44490
+RnJvbVN0cmluZw== 44491
+IEJpbGQ= 44492
+IGNvbnZlbnRpb25z 44493
+X25hdGl2ZQ== 44494
+IEluc3BlY3Rvcg== 44495
+IFBpc3Q= 44496
+dWJhcg== 44497
+IHJlZ3M= 44498
+IFBpbG90 44499
+VGh1cw== 44500
+Picr 44501
+IGNlbGE= 44502
+Lm5ld3M= 44503
+KFByb2R1Y3Q= 44504
+TGl2aW5n 44505
+UnVzc2lh 44506
+IGZhY2V0 44507
+ZXRpY2Fs 44508
+IFsnJA== 44509
+L1s= 44510
+IERpcmU= 44511
+IGdhc2Vz 44512
+IElORk9STUFUSU9O 44513
+IEVhdA== 44514
+IEZvcnVtcw== 44515
+IENoYXJhY3RlcnM= 44516
+X21ldA== 44517
+IOyLnA== 44518
+IGtpbmdz 44519
+YWNoaWU= 44520
+IExhbWJkYQ== 44521
+IHRpbWVycw== 44522
+IExpZ2h0aW5n 44523
+IENhc2V5 44524
+YWRkaXI= 44525
+YW5kZXg= 44526
+LmFuc3dlcg== 44527
+IEhpcA== 44528
+IFByaW5jaXA= 44529
+U3RhcnREYXRl 44530
+IOOAjA== 44531
+dHJlcw== 44532
+ICYj 44533
+Lk1heFZhbHVl 44534
+IFByb2JsZW1z 44535
+IGxhdGV4 44536
+T2ZDbGFzcw== 44537
+IEx5bm4= 44538
+Ly8n 44539
+IHZveWFnZQ== 44540
+IHNodXR0bGU= 44541
+IFJvbGxlcg== 44542
+IFJ1bnRpbWVFcnJvcg== 44543
+dXlh 44544
+RGlj 44545
+CWJ1aWxkZXI= 44546
+IGJ1bGx5aW5n 44547
+IHNpbXBsZXN0 44548
+LmNhbGxlZA== 44549
+IExS 44550
+IG1vcmFsaXR5 44551
+IHN0dXJkeQ== 44552
+dHJhY2tpbmc= 44553
+LnN3YWdnZXI= 44554
+X0JJTkQ= 44555
+SVRPUg== 44556
+LXVybGVuY29kZWQ= 44557
+INGF 44558
+IFRyaW5pdHk= 44559
+IHRyYXBz 44560
+IHwt 44561
+IHNldFRleHQ= 44562
+IGJhcmdhaW4= 44563
+IGJyYWtlcw== 44564
+LmdldENvZGU= 44565
+IG1pZ3JhdGU= 44566
+IHJpYmJvbg== 44567
+KXJldHVybg== 44568
+IGNoYXJnZXI= 44569
+YWNvbQ== 44570
+QURJVVM= 44571
+IEFtYmFzc2Fkb3I= 44572
+LWFmdGVy 44573
+IGFubmk= 44574
+CXNwaW4= 44575
+Q29uY2VwdA== 44576
+IEhlbmRlcnNvbg== 44577
+IEhPU1Q= 44578
+LnJhbms= 44579
+IE5vcnRoZWFzdA== 44580
+IGJlcmxpbg== 44581
+IHJlcXVpcw== 44582
+LmZlZWQ= 44583
+IHNvdXJjZU1hcHBpbmc= 44584
+IFJlbmNvbnRyZQ== 44585
+LmFqYXg= 44586
+bmVzdGpz 44587
+IHRyZWs= 44588
+IE5hY2lvbmFs 44589
+ICZb 44590
+IHBheWFibGU= 44591
+b3J0ZXg= 44592
+IGRlcHQ= 44593
+ZmllbGROYW1l 44594
+IGNvbXBsZXRlcw== 44595
+IFJWQQ== 44596
+IG9uaW9ucw== 44597
+YWxpZ25tZW50 44598
+Rm9ybWF0cw== 44599
+ICd7JA== 44600
+SGFzaFNldA== 44601
+IEJvZA== 44602
+LkludmFyaWFudEN1bHR1cmU= 44603
+IHNldHRsZW1lbnRz 44604
+IGh5ZHI= 44605
+LnVwZGF0ZWQ= 44606
+dmVudGg= 44607
+KHNlY29uZHM= 44608
+PSIvIg== 44609
+IHdlYnBhZ2U= 44610
+KAoK 44611
+IHRpcg== 44612
+IHRvZXM= 44613
+IEJyaWNr 44614
+IGFtYml0aW9u 44615
+UG90 44616
+PW1heA== 44617
+RVRJTUU= 44618
+IGRlcG90 44619
+Y2FsbHM= 44620
+IE5vcndlZ2lhbg== 44621
+YDo= 44622
+IGJ1cmdlcg== 44623
+IHByb2Zlc3NvcnM= 44624
+IEFsbG9jYXRl 44625
+LXRoaXJkcw== 44626
+LWNoYXJ0 44627
+IGZvcmQ= 44628
+Kk4= 44629
+LmtvdGxpbg== 44630
+IHBhcGVyd29yaw== 44631
+IERFVklDRQ== 44632
+JUAiLA== 44633
+cmVzcGVjdA== 44634
+KG1w 44635
+6auY 44636
+LWlm 44637
+IGN1c2hpb24= 44638
+b2JvdA== 44639
+IHBhcmM= 44640
+U1BBQ0U= 44641
+IE5ldGFueWFodQ== 44642
+IHNlbGZpc2g= 44643
+ZmVhdA== 44644
+IGNsaWVudGVz 44645
+LXRvb2xz 44646
+IHBvcmNo 44647
+IGpx 44648
+LnZlcmJvc2U= 44649
+IGxpYmVyYWxz 44650
+XSkKCgo= 44651
+cGllcw== 44652
+Tm90Qmxhbms= 44653
+KHRlcm0= 44654
+yJtp 44655
+X1BhcmFtcw== 44656
+Lm5vcm1hbGl6ZQ== 44657
+QnVsbGV0 44658
+QVNJQw== 44659
+KGhleA== 44660
+X2NsaWVudGU= 44661
+Kyw= 44662
+X0RJ 44663
+IGZvcnRoY29taW5n 44664
+fSIpXQo= 44665
+c2Vv 44666
+VW0= 44667
+Pk5hbWU= 44668
+IGNvbWZvcnRhYmx5 44669
+aXJlY3Rpb25hbA== 44670
+V0lUSA== 44671
+L3By 44672
+IFBvb3I= 44673
+IFZpdGFtaW4= 44674
+dmlj 44675
+R0g= 44676
+IHByaW9yaXQ= 44677
+IE5O 44678
+IENsb3NlZA== 44679
+pO0= 44680
+IGlzT3Blbg== 44681
+XENvbnNvbGU= 44682
+QW5kRmVlbA== 44683
+LlNVQ0NFU1M= 44684
+X09QRVJBVElPTg== 44685
+cG9sYXRpb24= 44686
+IFRhcw== 44687
+cHN6 44688
+Picu 44689
+Q1VSUkVOVA== 44690
+VmVuZG9y 44691
+aG9zdHM= 44692
+IEVyZA== 44693
+PnRhZ2dlcg== 44694
+IHNvdXJjZU1hcHBpbmdVUkw= 44695
+IG1hcmF0aG9u 44696
+X2Nsb3NlZA== 44697
+IGV4ZW1wdGlvbg== 44698
+IHJlY29nbml6ZXM= 44699
+aWRlc2hvdw== 44700
+JyQ= 44701
+KCcvJyk7Cg== 44702
+bWl0cw== 44703
+d2Fyeg== 44704
+IENoZXJyeQ== 44705
+taw= 44706
+bm9y 44707
+cG9ydGU= 44708
+IHds 44709
+X2JhY2t1cA== 44710
+LmdldEJvb2xlYW4= 44711
+LmdldFJlc291cmNl 44712
+IGRlZmluaXRpdmU= 44713
+LkVkaXRUZXh0 44714
+IHPDrQ== 44715
+LkNPTlQ= 44716
+IFBMQVlFUg== 44717
+LmNhcmRz 44718
+IFNob3Jl 44719
+KCcvJykK 44720
+Y2x1aXI= 44721
+V2ViRHJpdmVy 44722
+KG1vbnRo 44723
+LXJlbGVhc2U= 44724
+IGluc3BlY3Rvcg== 44725
+5aM= 44726
+IE5G 44727
+X2NsaXA= 44728
+5a2Q 44729
+IGludGVyYWN0aW5n 44730
+LnRtcA== 44731
+ICcnJwoK 44732
+IGRlZQ== 44733
+IGZyb3N0 44734
+Il0pKQo= 44735
+IFBsYWNlcw== 44736
+VGhyb3dz 44737
+Zm9yaw== 44738
+L2RheQ== 44739
+aVBob25l 44740
+IE1JQw== 44741
+IGZvbGRpbmc= 44742
+IGNyb3Jl 44743
+IENoaWVmcw== 44744
+cGhlcmljYWw= 44745
+KHByaWNl 44746
+LldyaXRlU3RyaW5n 44747
+IGV4aXRpbmc= 44748
+XScsCg== 44749
+aWdodGluZw== 44750
+SW5ncmVkaWVudA== 44751
+KHZlcnRleA== 44752
+IHNjcm9sbFZpZXc= 44753
+aGY= 44754
+Om5ldw== 44755
+U0VO 44756
+c2VjdG9y 44757
+IHNwaW5z 44758
+IFNjaGVkdWxlcg== 44759
+b3RlY2hu 44760
+c2VtaWNvbG9u 44761
+Rm9udE9mU2l6ZQ== 44762
+IFNwZWNpZmljYWxseQ== 44763
+ZmxhbW0= 44764
+Lk9iamVjdElk 44765
+IGNvbnRh 44766
+X3Blcm1pc3Npb25z 44767
+CUZST00= 44768
+SUNPREU= 44769
+L2tn 44770
+IEhvdGVscw== 44771
+LW1lZA== 44772
+IERpbg== 44773
+IG5hdnk= 44774
+Z2V0UGFyYW0= 44775
+IG1lbmQ= 44776
+IHBvcnRyYXllZA== 44777
+IE1ldHJvcG9saXRhbg== 44778
+UGFpbnRlcg== 44779
+IHJlZmVycmFs 44780
+X2dvb2Q= 44781
+IG1hcnZlbA== 44782
+b3NhaWM= 44783
+Pigm 44784
+LnVy 44785
+IGVzdG9z 44786
+V2lsbGlhbQ== 44787
+IHRpbWJlcg== 44788
+IHF1ZWxxdWVz 44789
+IERvY3VtZW50cw== 44790
+LlhhbWw= 44791
+IGJhdGNoZXM= 44792
+6YGT 44793
+IFJlbGVhc2Vk 44794
+VGFpbA== 44795
+Q09PS0lF 44796
+aGVpZA== 44797
+X3N0YXRpb24= 44798
+IFZpYQ== 44799
+U2FsZQ== 44800
+IFJlcGVhdA== 44801
+IHByb21pbg== 44802
+IFpv 44803
+LWZvcndhcmQ= 44804
+IElvbg== 44805
+aXRhcnk= 44806
+IGp1cw== 44807
+LXJlcXVlc3Q= 44808
+IHByb3VkbHk= 44809
+IFN0cmVhbWluZw== 44810
+KE1vdXNlRXZlbnQ= 44811
+IFNwcmludA== 44812
+X3JvdGF0aW9u 44813
+UmVwb3NpdG9yaWVz 44814
+IHRhcnQ= 44815
+INGB0LI= 44816
+IG1hcHBpbmdz 44817
+6Ko= 44818
+Q3U= 44819
+Q3ljbGU= 44820
+IGJ1bg== 44821
+CWx1YQ== 44822
+44OJ 44823
+ICgoIQ== 44824
+IGNvbGxlY3RpdmVseQ== 44825
+IENvbmQ= 44826
+IHdzenlzdA== 44827
+KGxpYg== 44828
+b3BlbmhhZ2Vu 44829
+X3NraXA= 44830
+LkNvbHVtbkhlYWRlcg== 44831
+6YI= 44832
+cGVyaWVuY2Vk 44833
+j+i/sA== 44834
+X3Byb3Bz 44835
+IGNvbnRyYWNl 44836
+IG1hdGNodXA= 44837
+YWJldGlj 44838
+Lm1lbWJlcnM= 44839
+UkVDVA== 44840
+KGRhdA== 44841
+IHNvZw== 44842
+cmVub20= 44843
+X01ldGhvZA== 44844
+Q3VzdG9tZXJz 44845
+ZnVsbG5hbWU= 44846
+Wk4= 44847
+cmV0cnk= 44848
+IGthcA== 44849
+IE5ldQ== 44850
+6Io= 44851
+YWRkQ2hpbGQ= 44852
+d2lsbFJldHVybg== 44853
+X3Blcm1hbGluaw== 44854
+IGVuZXJnZXRpYw== 44855
+IFdldA== 44856
+IE1vcnI= 44857
+IGdjZA== 44858
+Y291bnRz 44859
+LHR5cGU= 44860
+ZGln 44861
+KExvZ2lu 44862
+IGNyYWNrcw== 44863
+IGJhY3RlcmlhbA== 44864
+IE1lYXQ= 44865
+IEFybXN0cm9uZw== 44866
+IEJyb256ZQ== 44867
+IGFwcHJveGltYXRl 44868
+X2RpcnM= 44869
+bGlnYQ== 44870
+xYJhZA== 44871
+IGtpbmRuZXNz 44872
+IGNvbnRyZQ== 44873
+IEVWRVJZ 44874
+TUVU 44875
+IGFubm91bmNlbWVudHM= 44876
+Z3Bpbw== 44877
+IFdhaXRGb3JTZWNvbmRz 44878
+IFBob3Rvc2hvcA== 44879
+IGRpc2NvbnRpbg== 44880
+L2Rk 44881
+IHRvcG9sb2d5 44882
+YW5pY2Fs 44883
+LmludGVyZmFjZQ== 44884
+YXVjb3Vw 44885
+Lkhhc2hTZXQ= 44886
+QVJJQU5U 44887
+KHJvdXRlcw== 44888
+IFRlaA== 44889
+IGh5cGU= 44890
+XSIpLg== 44891
+IHNsYW0= 44892
+IGJyb3Ro 44893
+LWludGVy 44894
+IFJpZA== 44895
+LW1hbmFnZXI= 44896
+Q2FuY2VsYXI= 44897
+IFBhZ2luYXRpb24= 44898
+IHNvdW5kdHJhY2s= 44899
+IHBvc3Rlcmlvcg== 44900
+IHNjcnVi 44901
+Y3JlYXRpbmc= 44902
+LSo= 44903
+aXJ0ZWVu 44904
+LmR5 44905
+LnN5bW1ldHJpYw== 44906
+ICIiLg== 44907
+PT09PT09PT09PT09PT09 44908
+IGNoYXNzaXM= 44909
+IG51bWJlck9mUm93cw== 44910
+RGV2ZWxvcGVy 44911
+X2JpbnM= 44912
+IE9VUg== 44913
+cmllYg== 44914
+UHJvcw== 44915
+IHdpxJk= 44916
+ImQ= 44917
+IGFzeW5jaW8= 44918
+emVpZ2Vu 44919
+X3NwaQ== 44920
+LkFMTA== 44921
+IHNjcmV3cw== 44922
+Q2hpbmVzZQ== 44923
+IGFwaUtleQ== 44924
+IHVuc3VjY2Vzc2Z1bA== 44925
+IFNlYWhhd2tz 44926
+T1JH 44927
+56ug 44928
+IHByb2Zlc3Npb25hbGx5 44929
+IENvdXBvbg== 44930
+5a2X5q61 44931
+Q29udmVudGlvbg== 44932
+IHBvbHlt 44933
+5omL 44934
+IHNhbHZhdGlvbg== 44935
+IGVuZ2luZWVyZWQ= 44936
+IFdyZXN0 44937
+IEdDQw== 44938
+IHdhcm1lcg== 44939
+TGF5b3V0Q29uc3RyYWludA== 44940
+IGFnZ3Jhdg== 44941
+U2NyaXB0cw== 44942
+dmVudHVyZQ== 44943
+IHJlZnJpZ2VyYXRvcg== 44944
+IGlubm92YXRpb25z 44945
+IFJ1bm5lcg== 44946
+TklD 44947
+IFJvbGxpbmc= 44948
+Q29udHJvbEV2ZW50cw== 44949
+IGxvb3M= 44950
+cGFj 44951
+CXBhbmVs 44952
+ZWZl 44953
+IEJ1ZGRoYQ== 44954
+LS0tLS0tLS0tLS0tLS0K 44955
+5bqT 44956
+KGZvcktleQ== 44957
+IGx1bWlu 44958
+ICg/ 44959
+IEFJRFM= 44960
+LHVzZXI= 44961
+aW1pZW50b3M= 44962
+Y29udGVudFR5cGU= 44963
+YW50bHI= 44964
+6aY= 44965
+IFdlbHQ= 44966
+UHJvZHVjdGlvbg== 44967
+bWlnaHQ= 44968
+IFZJSQ== 44969
+Iiwo 44970
+IG9ic2VydmluZw== 44971
+IGRlbGliZXJhdGU= 44972
+KGNvbnRyb2w= 44973
+IHdpdGhk 44974
+IHNlbWFuYQ== 44975
+U1RBQ0s= 44976
+dWNoZW4= 44977
+TmljZQ== 44978
+IERldXRzY2hsYW5k 44979
+IFNwZWNpZmllcw== 44980
+ZG1h 44981
+aXppbw== 44982
+IEZhY3Rz 44983
+X3BvcHVw 44984
+IERpcmVjdG9ycw== 44985
+ezo= 44986
+W1I= 44987
+INGN0LvQtdC80LXQvdGC 44988
+IHBsYXQ= 44989
+IGRpcmVjdGluZw== 44990
+5LiJ 44991
+IEdpbGJlcnQ= 44992
+4oCmLgoK 44993
+LnFtbA== 44994
+IHRoZXJlYWZ0ZXI= 44995
+IGRpc3Bvc2l0aW9u 44996
+ZHJhZnQ= 44997
+IHN1cmdlb24= 44998
+IEluc2lkZXI= 44999
+QmxlbmQ= 45000
+IFRyZXY= 45001
+dHJpbnNpYw== 45002
+VG9waWNz 45003
+cmlldmU= 45004
+X0ZJTEVOQU1F 45005
+IGF1dHJlcw== 45006
+Sm9zZQ== 45007
+UHJvZHVjZXI= 45008
+ZXJ1cw== 45009
+IHBldGl0 45010
+IE5FWFQ= 45011
+IEZpbHRlcnM= 45012
+IHJlcGxpY2F0ZQ== 45013
+Il0pLg== 45014
+IGxlbmRlcnM= 45015
+XSIsCg== 45016
+O2NoYXJzZXQ= 45017
+Q3BwT2JqZWN0 45018
+IGZsb3JhbA== 45019
+IFRpcG8= 45020
+IGNpcmN1aXRz 45021
+ZWFzeQ== 45022
+KCYk 45023
+aXR0YQ== 45024
+ZXJ5bA== 45025
+X0NPTU1PTg== 45026
+J319Pgo= 45027
+LWJhY2tlZA== 45028
+KHZhcmlhYmxl 45029
+KEluZGV4 45030
+IHZvaXI= 45031
+X2xvY2F0aW9ucw== 45032
+Kyspew== 45033
+IExvdWlzdmlsbGU= 45034
+IGdyYXRpdHVkZQ== 45035
+Lk1vY2tpdG8= 45036
+IFBvd2Vycw== 45037
+aWV1cnM= 45038
+IGdlb2dyYXBoaWM= 45039
+cmFsZQ== 45040
+IGNyYQ== 45041
+IFNwdXJz 45042
+aXBoZXJ0ZXh0 45043
+QUNJT04= 45044
+LWNvbW1vbg== 45045
+IHZpY3Rvcmllcw== 45046
+IEZpbmFscw== 45047
+LnNodWZmbGU= 45048
+LW1pbGxpb24= 45049
+X1BST0M= 45050
+YXNzdW1l 45051
+IGlscw== 45052
+REJD 45053
+Qm9vdFRlc3Q= 45054
+IGxhdm9y 45055
+LnRlc3Rpbmc= 45056
+LmFzdA== 45057
+Il0v 45058
+bW9pZA== 45059
+IHF1YWxpZmljYXRpb24= 45060
+Z2VzY2g= 45061
+CXB1dA== 45062
+IGFpcnBvcnRz 45063
+Skk= 45064
+VGVhY2hlcg== 45065
+X3VuaWZvcm0= 45066
+IG5hbWE= 45067
+IEJhc3Q= 45068
+ZXJ0eXBl 45069
+Y2FwdHVyZQ== 45070
+Z2V0QWxs 45071
+IFJleW5vbGRz 45072
+b29sZWQ= 45073
+LmNvbW1lbnRz 45074
+IGNoaW4= 45075
+KS4q 45076
+INC40LvQuA== 45077
+dGds 45078
+dWRvcw== 45079
+IGTDrWFz 45080
+Y2hhaQ== 45081
+LnByb2dyYW0= 45082
+IHBzeg== 45083
+CWljb24= 45084
+cGhpbA== 45085
+ZW50cmFs 45086
+X1dSQVA= 45087
+b3Zp 45088
+IG5vc3RhbGc= 45089
+SW5maW5pdHk= 45090
+CXlpZWxk 45091
+IHZpdGFtaW5z 45092
+UXVhdGVybmlvbg== 45093
+U2luaw== 45094
+X2dvb2Rz 45095
+IC4uLi4uLi4u 45096
+IFdpbmdz 45097
+dXJpZGFk 45098
+LXN0b3J5 45099
+Il0pCgo= 45100
+aWRlbGl0eQ== 45101
+VHlwZURlZg== 45102
+R3Rr 45103
+IO2M 45104
+X01haW4= 45105
+IGNoZXo= 45106
+IFJhdmVu 45107
+IHBheXJvbGw= 45108
+IGZyZWVsYW5jZQ== 45109
+TExV 45110
+IE1lbmQ= 45111
+ZWRheQ== 45112
+QXBpTW9kZWxQcm9wZXJ0eQ== 45113
+LkZvcm1Cb3JkZXJTdHlsZQ== 45114
+IGVjb25vbWlzdA== 45115
+c3RhbmJ1bA== 45116
+IGZyZWlnaHQ= 45117
+LUFnZW50 45118
+KG1ldGE= 45119
+IHN5bW1ldHJ5 45120
+ICcuLg== 45121
+LkNhbGVuZGFy 45122
+LWF1dA== 45123
+Z2Y= 45124
+cGVudA== 45125
+eWNsb3BlZGlh 45126
+IHdpc2hpbmc= 45127
+CgoKCgoKCgoKCgoK 45128
+IGdlbnRsZW1hbg== 45129
+IOqz 45130
+PSM= 45131
+IGxlY3R1cmVz 45132
+4oCcSW4= 45133
+ICFf 45134
+IGhi 45135
+IFZlbmRvcg== 45136
+UmVjZW50bHk= 45137
+X25vdGVz 45138
+5o+Q56S6 45139
+Ik15 45140
+SGVhZGVyc0hlaWdodA== 45141
+X1NP 45142
+IHVud2lsbGluZw== 45143
+IHN1cGVyaGVybw== 45144
+Z2lv 45145
+cHN5 45146
+IFBlZXI= 45147
+amF2YXg= 45148
+JmFwb3M= 45149
+IENyaXNpcw== 45150
+b3JkaW5hbA== 45151
+TWVtY3B5 45152
+KysrKysrKysrKysrKysrKw== 45153
+LXZhbA== 45154
+IHdvcmtib29r 45155
+LWFw 45156
+PWs= 45157
+IG1ldGFsbGlj 45158
+X3BlZXI= 45159
+QnlQcmltYXJ5S2V5 45160
+X1NE 45161
+dWF0b3I= 45162
+X1NIQURFUg== 45163
+KU1hdGg= 45164
+LlRyYW5zZm9ybQ== 45165
+IGNvd3M= 45166
+UGhp 45167
+IENsZW0= 45168
+KF8oIg== 45169
+IEx1ZA== 45170
+LWRlbGF5 45171
+IFNlY3VyaXRpZXM= 45172
+IE9ydGhvZG94 45173
+U3ltZm9ueQ== 45174
+KHJlcG9ydA== 45175
+IGVudGVydGFpbg== 45176
+RVBT 45177
+aXpvcGg= 45178
+ZXh1YWw= 45179
+SVJE 45180
+5LuO 45181
+IGxpdGg= 45182
+IHNhbml0aXpl 45183
+IGZlbWluaW5l 45184
+SVNCTg== 45185
+LmF1dGhlbnRpY2F0aW9u 45186
+X3BpcGVsaW5l 45187
+L2NvbnN0YW50cw== 45188
+IENPTkY= 45189
+IGx1Y3I= 45190
+cmljaWE= 45191
+LnR0Zg== 45192
+LnNldENvbnRlbnQ= 45193
+IHN0YW4= 45194
+b3JlYW4= 45195
+IExsb3lk 45196
+LnJhd1ZhbHVl 45197
+IGdvcg== 45198
+IEJyb3ducw== 45199
+UmVncmVzc2lvbg== 45200
+IGxvd2VyaW5n 45201
+bmFpc3NhbmNl 45202
+IGJsb3dz 45203
+IGFtYXplZA== 45204
+IHVucmVsYXRlZA== 45205
+UmV2aWV3cw== 45206
+IHJ1Ynk= 45207
+IE1vZGlmaWVy 45208
+IGdpYW50cw== 45209
+LnRocmVhZA== 45210
+IGNvbnRhaW5tZW50 45211
+IFN0YXJ0Q29yb3V0aW5l 45212
+dW1hdA== 45213
+b3JlbGVhc2U= 45214
+IFJhbmR5 45215
+QGVuZGlm 45216
+RGlnZXN0 45217
+IHN1YnVyYmFu 45218
+PSIpOwo= 45219
+IGFubm9uY2U= 45220
+LnZhcmlhYmxl 45221
+XEZvdW5kYXRpb24= 45222
+IGFjcmU= 45223
+VmFu 45224
+IHR1cGxlcw== 45225
+ZG5z 45226
+IFN0YW5kaW5n 45227
+X2xhcmdl 45228
+IGJveGluZw== 45229
+U3VwcG9ydEFjdGlvbkJhcg== 45230
+IEZvcnR1bmU= 45231
+IFJ1bQ== 45232
+X211bHRpcGxl 45233
+YXJjaGljYWw= 45234
+IGZ3cml0ZQ== 45235
+X3F1b3Rl 45236
+IGZvb2xpc2g= 45237
+IGNvbXByaXNpbmc= 45238
+INC+0L8= 45239
+LXNlbGVjdGVk 45240
+dmY= 45241
+bWFpZA== 45242
+TmFtYQ== 45243
+KGRhdGV0aW1l 45244
+IGluZGlyZWN0bHk= 45245
+Z2FydA== 45246
+Zml4dHVyZXM= 45247
+Y2hvcw== 45248
+IEhhbG8= 45249
+IHJlY3VycmluZw== 45250
+LW5ld3M= 45251
+dmls 45252
+IE51cnNpbmc= 45253
+LXByb2R1 45254
+IEhR 45255
+XEh0dHBGb3VuZGF0aW9u 45256
+ZW5jaQ== 45257
+YXVlbg== 45258
+IHZ5 45259
+b2NyYWN5 45260
+IGRlbGVnYXRpb24= 45261
+IGFzcGhhbHQ= 45262
+IHNldFNlbGVjdGVk 45263
+a29r 45264
+L3Jlc3Q= 45265
+bWV0aWNz 45266
+IE5TRGF0ZQ== 45267
+IHRyYXZlbGxlZA== 45268
+IHJlY2li 45269
+IG1pbWU= 45270
+Q0xJRU5U 45271
+IEdV 45272
+IEhBTkRMRQ== 45273
+L1E= 45274
+W3o= 45275
+IGJvdGhlcmVk 45276
+IEJCUQ== 45277
+w6dhcw== 45278
+X2V4YW1wbGVz 45279
+X0ZJTg== 45280
+IHdoaXRlQ29sb3I= 45281
+IGFzdHJvbm9t 45282
+LWRpcg== 45283
+IHNvdmVyZWlnbg== 45284
+IGJyZWV6ZQ== 45285
+IGlubmluZw== 45286
+IEVkbW9udG9u 45287
+Z2xp 45288
+LmJsb2dzcG90 45289
+anN4 45290
+IHZlcnNh 45291
+IE1vaGFtbWVk 45292
+LkpvYg== 45293
+LXRvZ2dsZXI= 45294
+INC/0L7Qu9GM0LfQvtCy0LDRgg== 45295
+YXJkb24= 45296
+IG5ld2Jvcm4= 45297
+IG5hdmFs 45298
+bm90ZXE= 45299
+IHR1bWJscg== 45300
+IGhlbnRhaQ== 45301
+IFR5cGljYWxseQ== 45302
+IGxvb3Q= 45303
+LlNwcml0ZQ== 45304
+RmxpZ2h0 45305
+IHdhdmVsZW5ndGg= 45306
+LXNr 45307
+IEVsbGU= 45308
+X2V4cG9ydHM= 45309
+INGP 45310
+IElI 45311
+aXpvcGhyZW4= 45312
+IO2B 45313
+X3ByaW1hcnk= 45314
+IG1vaXM= 45315
+IEJO 45316
+IHN5c3RlbWlj 45317
+IGRpZmVyZW50ZXM= 45318
+SU5DVA== 45319
+ICcnCgo= 45320
+JHE= 45321
+V2lkZ2V0SXRlbQ== 45322
+Y2xpZGU= 45323
+JGZpbGU= 45324
+TGVtbWE= 45325
+L3RhYmxl 45326
+YWdyaWQ= 45327
+IE1vbmdvREI= 45328
+aW50ZQ== 45329
+IGFwcHJlbnQ= 45330
+wq1pbmc= 45331
+LkRi 45332
+IMOC 45333
+aGFtbWVy 45334
+PScnOwo= 45335
+IGJyb2tlcnM= 45336
+aXRsZW1lbnQ= 45337
+c2VtYmxpZXM= 45338
+RWxl 45339
+e3g= 45340
+IGxhc3RuYW1l 45341
+PC0= 45342
+IGZsYXR0ZW4= 45343
+X2JhbmQ= 45344
+LlJvb3Q= 45345
+LnJlYWRGaWxlU3luYw== 45346
+PT09PT09 45347
+LnJ4 45348
+Pw0K 45349
+IG1ldGFwaG9y 45350
+VGk= 45351
+Y29udGU= 45352
+IGRlYml0 45353
+IGNvbnRlbXB0 45354
+Q3BwVHlwZQ== 45355
+5pSv 45356
+Rm9ybUZpZWxk 45357
+cmF0aW8= 45358
+b3NvcGhlcg== 45359
+IGltcGxhbnQ= 45360
+UFVSRQ== 45361
+IGFsdGE= 45362
+X21hbmFnZW1lbnQ= 45363
+IHJlZmluZQ== 45364
+IENoZWNrQm94 45365
+IENoYXJs 45366
+LXZlcnNpb24= 45367
+Y29uZGl0aW9uYWw= 45368
+dmVudWVz 45369
+IHJpZmxlcw== 45370
+IG9mZnNwcmluZw== 45371
+IG1pbGxpbmc= 45372
+IHNoYXJwbHk= 45373
+IHVuZGVyd2F0ZXI= 45374
+KG9yaWdpbg== 45375
+X0NvbnRyb2w= 45376
+IC4k 45377
+UGx1Z2lucw== 45378
+IGRyeWluZw== 45379
+IGlsbHVzdHJhdGVz 45380
+LXU= 45381
+IHZlZ2V0YXJpYW4= 45382
+bnBj 45383
+SGVhcnQ= 45384
+OycsCg== 45385
+Y29tbWE= 45386
+dGVlbnRo 45387
+YXNhbg== 45388
+L3NwZWM= 45389
+X21vdmVz 45390
+LW1hcmdpbg== 45391
+IGluZ2Vu 45392
+wqDCoMKg 45393
+IHByb2pldA== 45394
+IG90cmE= 45395
+IGJyYXM= 45396
+LnV0Yw== 45397
+IHNsZXB0 45398
+PXN1Yg== 45399
+YWJpbGl0 45400
+cG9zdGVy 45401
+IHNkaw== 45402
+b3VuY2lsbA== 45403
+IHdk 45404
+UHJlcGFyZWRTdGF0ZW1lbnQ= 45405
+IERydW0= 45406
+KGF0dHJpYnV0ZQ== 45407
+IEV0aGVybmV0 45408
+CURC 45409
+Q2FsaWZvcm5pYQ== 45410
+Y3ViZQ== 45411
+W0k= 45412
+LkNyZWF0ZWQ= 45413
+IEhN 45414
+IHRyYWNpbmc= 45415
+Rm9ybXNNb2R1bGU= 45416
+LXlvdQ== 45417
+LmN1cnJlbmN5 45418
+ZmVlZGluZw== 45419
+IHRib2R5 45420
+TGk= 45421
+YWNjaW9u 45422
+bmFz 45423
+IHRyb3V2ZXI= 45424
+Tk9ORQ== 45425
+In0sDQo= 45426
+IGZ0cA== 45427
+V2l0aElkZW50aWZpZXI= 45428
+cG9sYXRl 45429
+RmlsZUluZm8= 45430
+IHB1cnN1ZWQ= 45431
+ICAgIA0KICAgIA0K 45432
+REVTQ1JJUFRJT04= 45433
+fSovCg== 45434
+RnJvbU5pYg== 45435
+IGRlY29yYXRpdmU= 45436
+X1NTTA== 45437
+KGNoYXQ= 45438
+VExT 45439
+IHN1cnByaXNlcw== 45440
+YWxjdWxhdGU= 45441
+IFNwbGFzaA== 45442
+KENvbmZpZ3VyYXRpb24= 45443
+IFNFTQ== 45444
+aW1zb24= 45445
+L2xpYnJhcnk= 45446
+PERvdWJsZQ== 45447
+LnJvYm90 45448
+wqDCoMKgwqDCoMKgwqDCoA== 45449
+IENQRg== 45450
+IFVuZGVyc3RhbmRpbmc= 45451
+IGNvc21ldGlj 45452
+IFh0 45453
+dGlwcw== 45454
+K2s= 45455
+KCIn 45456
+IFBEVA== 45457
+V0FS 45458
+LmdldE9iamVjdA== 45459
+IFRyYWRpdGlvbmFs 45460
+LnNsdWc= 45461
+IERpcGw= 45462
+PSIiLA== 45463
+IEZpbG1z 45464
+IEFuaW0= 45465
+LmhlbHA= 45466
+IGVtYmFzc3k= 45467
+IEJvb3Rz 45468
+IGJ1bms= 45469
+LXJpc2s= 45470
+IHBjaQ== 45471
+IC9cLg== 45472
+IElQVA== 45473
+IGNyYXNoaW5n 45474
+IGlwdg== 45475
+X2tl 45476
+IFJFU1A= 45477
+LkxvZ0Vycm9y 45478
+IGluYWRlcXVhdGU= 45479
+SW9u 45480
+IEbDvHI= 45481
+cmljdWxh 45482
+IHNob3VsZEJl 45483
+YWxyZWFkeQ== 45484
+J10uIjwv 45485
+IFN0dWZm 45486
+RGlnaXRl 45487
+IHRyYW5zbGF0b3I= 45488
+X3Nwcml0ZQ== 45489
+bGV0YWw= 45490
+IG1haW9y 45491
+IFNleGU= 45492
+dGhhbmtz 45493
+IENvbXBsZXRlZA== 45494
+IGdhc29saW5l 45495
+LmF0dHJz 45496
+YmFnYWk= 45497
+IE9yaWc= 45498
+Ol0s 45499
+LmxvY2FsZQ== 45500
+IFJvbWE= 45501
+w61m 45502
+IGZhdm9yZWQ= 45503
+IHZhaW4= 45504
+IHNwb29u 45505
+IEphaHJlbg== 45506
+IG5pbmc= 45507
+V1dX 45508
+LGZsb2F0 45509
+X0RBVEFCQVNF 45510
+Qm9vdHN0cmFw 45511
+IENCQw== 45512
+IENodW5r 45513
+X2ludG8= 45514
+IEtvbA== 45515
+IGRlZmVuc2Vz 45516
+b3JlZFByb2NlZHVyZQ== 45517
+YmFsbHM= 45518
+VGV4dENoYW5nZWQ= 45519
+IHNoYXBpbmc= 45520
+IH19Pg== 45521
+R0VE 45522
+ZmFx 45523
+IG9wdGlvbmFsbHk= 45524
+X0Rpcw== 45525
+IFN1Y2Nlc3NmdWw= 45526
+IENlbnN1cw== 45527
+IGluY2FyY2Vy 45528
+X0NBUkQ= 45529
+IGF2aWF0aW9u 45530
+IEd5bQ== 45531
+QXV0aG9yaXR5 45532
+LkJlYW4= 45533
+c2hhZGVy 45534
+Tm90RXhpc3Q= 45535
+X1RleHRDaGFuZ2Vk 45536
+IFNUT1A= 45537
+KHRlYW0= 45538
+Ikg= 45539
+d2c= 45540
+IGdyaW5kZXI= 45541
+IHN0cmlwZQ== 45542
+IHByZXNlcnZhdGlvbg== 45543
+Q2xhaW0= 45544
+YXZlcnNhbA== 45545
+d2FyZWhvdXNl 45546
+dGFyZ2V0cw== 45547
+VHJ1c3Q= 45548
+IGFsbGV2 45549
+LHd3dw== 45550
+b3Vzc2U= 45551
+X2NoYW4= 45552
+X1NpemU= 45553
+c3lzdGVtcw== 45554
+IG9iamVjdGlvbg== 45555
+IEthbmU= 45556
+IGNvcnJvcw== 45557
+IERTTA== 45558
+IHVh 45559
+IE1I 45560
+IFN0cmF0ZWdpYw== 45561
+X3RjcA== 45562
+IOqwkg== 45563
+IGJvcnJvd2Vk 45564
+IEFjaA== 45565
+CWNvbW1hbmQ= 45566
+IGdwcw== 45567
+bGVzdG9u 45568
+aWNoZXZlcg== 45569
+IFVB 45570
+IGFzc2F1bHRlZA== 45571
+IHNwZWNpYWxpemVz 45572
+CXNlYXJjaA== 45573
+SG90ZWw= 45574
+ICAgICAgICAgICAgICAgICAgICANCg== 45575
+IFBpdGNo 45576
+INmB 45577
+UkVBRFk= 45578
+IHBhcmVudGFs 45579
+IGfDqW7DqQ== 45580
+IGRvbm7DqWVz 45581
+IGRldGFpbg== 45582
+VEFSR0VU 45583
+IHByb3RhZ29uaXN0 45584
+IGNsZWFySW50ZXJ2YWw= 45585
+IEljb25CdXR0b24= 45586
+IEdldEFsbA== 45587
+VHlwZUluZm8= 45588
+RUg= 45589
+4oCcVGhleQ== 45590
+IHtb 45591
+IGdhZw== 45592
+INqp 45593
+IERyb3Bkb3du 45594
+LmZyZWU= 45595
+Z29uZQ== 45596
+aW1lbnM= 45597
+IGluc3RhbA== 45598
+CWN1cmw= 45599
+X0NBTg== 45600
+IEJvbmU= 45601
+77yU 45602
+b255bXM= 45603
+LWdvdmVybm1lbnQ= 45604
+LmJpbmRpbmdOYXZpZ2F0b3I= 45605
+IERhbnM= 45606
+IE1jTA== 45607
+KGVu 45608
+Pihf 45609
+0JLRiw== 45610
+Lio7DQo= 45611
+PWo= 45612
+LWNvcg== 45613
+U29u 45614
+LlRvb2xTdHJpcEl0ZW0= 45615
+LWFyb3VuZA== 45616
+X1hNTA== 45617
+ZW5kRGF0ZQ== 45618
+IHNsYWNr 45619
+IHJvdGF0ZWQ= 45620
+IG5vcWE= 45621
+IGNvdHRhZ2U= 45622
+IGVuY29udHJhcg== 45623
+X3NraWxs 45624
+aG91ZXR0ZQ== 45625
+IQ0K 45626
+LndlYXRoZXI= 45627
+IGVtcGhhc2l6ZWQ= 45628
+5a62 45629
+INGB0L/QuNGB 45630
+IENvbXBpbGVy 45631
+KGFuZHJvaWQ= 45632
+IOKAug== 45633
+LnR1cm4= 45634
+IHN1cHByZXNzaW9u 45635
+X2NhbGxz 45636
+ICpA 45637
+KHN0cmxlbg== 45638
+LmhleA== 45639
+IEJpbGxz 45640
+IFJTQQ== 45641
+z4I= 45642
+IEVzY2FwZQ== 45643
+ZW1lbnRpYQ== 45644
+IGZyb250ZW5k 45645
+IHBpbnQ= 45646
+X2V4Yw== 45647
+enpv 45648
+W10sCg== 45649
+ICInLCci 45650
+LkVudmlyb25tZW50 45651
+IGFmb3JlbWVudGlvbmVk 45652
+IGVuZHVyZQ== 45653
+cHJvdG90eXBl 45654
+dGhlcmFweQ== 45655
+c3Np 45656
+RGVn 45657
+X3BsdWdpbnM= 45658
+LnVzZXJJbmZv 45659
+UHJpbnRlcg== 45660
+IFBST0dSQU0= 45661
+IHJ1aW5z 45662
+IGVtcGlyaWNhbA== 45663
+IGNyYXds 45664
+IEJvaWxlcg== 45665
+LWNvbW1lbnQ= 45666
+LnN1YnBsb3Q= 45667
+X2V0 45668
+ICcuJyw= 45669
+bWlub3I= 45670
+IEN1c3RvbXM= 45671
+IHlhdw== 45672
+dW5kZXJsaW5l 45673
+IENvbW8= 45674
+KCgn 45675
+KG1lYW4= 45676
+IGNoYXF1ZQ== 45677
+IEJsb2Nrcw== 45678
+LnJhZA== 45679
+aWxpYnJpdW0= 45680
+IHdlYmRyaXZlcg== 45681
+IG1lbGhvcg== 45682
+ZGFuYQ== 45683
+IEFidXNl 45684
+IFNvdXRod2VzdA== 45685
+IFBhcmVu 45686
+UEVSVElFUw== 45687
+CUlM 45688
+IHNjcmVhbQ== 45689
+dnU= 45690
+IGluY29tZXM= 45691
+IG5pbQ== 45692
+IGxhY2U= 45693
+IGNvbXBlbnNhdGU= 45694
+UmV2ZXJzZQ== 45695
+RGF0 45696
+X2F0dGFjaw== 45697
+IG5vdXI= 45698
+YWNoZW4= 45699
+Y2Vr 45700
+PEZ1bmM= 45701
+d2ll 45702
+Y29tcHJlc3NlZA== 45703
+LW1hdGNo 45704
+KCIiKV0K 45705
+aW1pemVk 45706
+Lm9yaWVudGF0aW9u 45707
+LmNvbXBhcmVUbw== 45708
+IG1hc3NhZ2dp 45709
+IOychA== 45710
+IGVsYm93 45711
+IGFudGlveGlk 45712
+dW5kcmVkcw== 45713
+L3Rvb2xz 45714
+IFJPVw== 45715
+YW5tYXI= 45716
+IFdvdw== 45717
+X3RpY2tldA== 45718
+UHJvZ3JhbW1pbmc= 45719
+IHRoZW9y 45720
+LXJldmlldw== 45721
+KCkpKSk7Cg== 45722
+IFJpY2hhcmRzb24= 45723
+IFBvY2tldA== 45724
+XVtd 45725
+YW1wcA== 45726
+X2hlYWx0aA== 45727
+IFBPUA== 45728
+IE5hdmFs 45729
+R3Vlc3M= 45730
+IGFuY2VzdG9y 45731
+LkdldEFsbA== 45732
+LmxvY2FsU2NhbGU= 45733
+IE1hcHBlcg== 45734
+IGFjY3VtdWxhdGlvbg== 45735
+IHNpbXVsYXRlZA== 45736
+IERyaXZlcnM= 45737
+IGTDqXM= 45738
+Y3VycmluZw== 45739
+IGVsZXBoYW50 45740
+IGFkdmVydGlzZWQ= 45741
+IG1haWxib3g= 45742
+U0hJRlQ= 45743
+IE1vbmljYQ== 45744
+IGFuYw== 45745
+IHdhcmRyb2Jl 45746
+SW5ncmVkaWVudHM= 45747
+IHx8DQo= 45748
+aXBweQ== 45749
+IGFudGliaW90aWNz 45750
+YXZpbmdz 45751
+KGN4 45752
+IEZlcnJhcmk= 45753
+IEFuaW1hdG9y 45754
+LmR0eXBl 45755
+cmVtb3ZlZA== 45756
+b3JkZXJieQ== 45757
+IGNyZXM= 45758
+b2PDqg== 45759
+IHB5bQ== 45760
+IENpcmN1bGFy 45761
+QGluZGV4 45762
+IFdhcm0= 45763
+U2F5 45764
+IEFzc2lzdGFuY2U= 45765
+IGN1cnRhaW4= 45766
+IE1vbnRl 45767
+SUxFUg== 45768
+IENWRQ== 45769
+IER1Y2s= 45770
+IEFsbG93cw== 45771
+X2ZpcmU= 45772
+IERlcmJ5 45773
+IHJlcG9z 45774
+IGh0dHBDbGllbnQ= 45775
+IHBzeWNoaWF0 45776
+IG5vd2FkYXlz 45777
+IGNhdXRpb3Vz 45778
+IENvbXB1dGluZw== 45779
+IGNvbXBsZXRpb25IYW5kbGVy 45780
+IFdlbHNo 45781
+IEJFU1Q= 45782
+IHN0cmVzc2Z1bA== 45783
+X1BF 45784
+5pel5pyf 45785
+IERhdGFGcmFtZQ== 45786
+CUludGVnZXI= 45787
+X1ByaW50 45788
+TW92ZXM= 45789
+IHRyYW5zZm9ybWluZw== 45790
+LkJhdGNo 45791
+eWFob28= 45792
+UG9zaXRpb25z 45793
+emVq 45794
+IG5vb2Q= 45795
+aW9yZXM= 45796
+Xyo= 45797
+IGNsaw== 45798
+IEZsb3lk 45799
+IGhhcA== 45800
+Zm9udHNpemU= 45801
+IG5heg== 45802
+Lm5vdGlmaWNhdGlvbg== 45803
+IERlcHJlc3Npb24= 45804
+IGFjbmU= 45805
+KioqCgo= 45806
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCg== 45807
+LmNvbnRlbnRz 45808
+eW50aA== 45809
+IFN0cmFpZ2h0 45810
+Jyl9fSI+PC8= 45811
+IGJ1bGI= 45812
+Ulg= 45813
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0K 45814
+IGNvbXVuaWM= 45815
+IFJO 45816
+LW1lZGl1bQ== 45817
+TEVBTg== 45818
+PWxlbg== 45819
+UGhvbmVOdW1iZXI= 45820
+ZXJ2YXRpb25z 45821
+QWNjdXJhY3k= 45822
+IEFubm90YXRpb24= 45823
+X2tleXdvcmQ= 45824
+X2hpbnQ= 45825
+IEF0aGVucw== 45826
+IGFzc2lzdGluZw== 45827
+IEhD 45828
+LkluaXRpYWxpemU= 45829
+JykpKQo= 45830
+dXBh 45831
+IHN1aXY= 45832
+IElQQw== 45833
+PFRFbnRpdHk= 45834
+IGJyYW5kZWQ= 45835
+b29tbGE= 45836
+bGFyxLE= 45837
+IFhNTEh0dHBSZXF1ZXN0 45838
+IGTDqWrDoA== 45839
+IHRyYW5zY3JpcHRpb24= 45840
+IHByZXZhbGVudA== 45841
+LnBsYW4= 45842
+IHN0YXJl 45843
+IHdvcmtvdXRz 45844
+IEVkdWNhdGlvbmFs 45845
+IG1lc3N5 45846
+IE1PVA== 45847
+LkNvbW1hbmRUeXBl 45848
+UWVk 45849
+KGdjYQ== 45850
+IExpbmVhckxheW91dE1hbmFnZXI= 45851
+IEJsb3c= 45852
+IEFsdW1pbnVt 45853
+IHN3aW5nZXJjbHVi 45854
+IFRyYW5zaXQ= 45855
+IGV4cG9z 45856
+dmly 45857
+KHNlY29uZA== 45858
+IGJlbG9uZ2Vk 45859
+U3RvbmU= 45860
+6ZW/ 45861
+IFN1bA== 45862
+IGdpZA== 45863
+IGFsbG95 45864
+ZXJ2YQ== 45865
+aXNlY29uZA== 45866
+X1JFTkRFUg== 45867
+IGFuZ2Vscw== 45868
+IFBoaWxvc29waHk= 45869
+b3B1cw== 45870
+IG1vbw== 45871
+ZW5ndWlu 45872
+X1ZBUklBQkxF 45873
+X0RFU1Q= 45874
+KGF1eA== 45875
+IGhvZQ== 45876
+IGRvYg== 45877
+YXR0YWNobWVudHM= 45878
+IGNvcnJpZG9y 45879
+IGRpdmlkZW5k 45880
+nbw= 45881
+IFRocm91Z2hvdXQ= 45882
+Lm9wdGlt 45883
+JG5ldw== 45884
+IGJlcmc= 45885
+IHNwcmVhZHNoZWV0 45886
+LlRyeUdldFZhbHVl 45887
+IHBheW91dA== 45888
+IE9uRGVzdHJveQ== 45889
+YXV0aGVudGljYXRpb24= 45890
+IE1pZ3VlbA== 45891
+cnRj 45892
+IENocmlzdGluZQ== 45893
+IEFJUg== 45894
+IGp1cmlz 45895
+IGRlc3BhaXI= 45896
+IHBhdGVudHM= 45897
+LWhhcw== 45898
+JV4= 45899
+5LuY 45900
+X3N0cmR1cA== 45901
+IFJlYXI= 45902
+ZXR0ZXM= 45903
+KHByb3BlcnRpZXM= 45904
+IHdyaXRhYmxl 45905
+LmlzTnVsbA== 45906
+b2xpY3M= 45907
+X2Jsb2I= 45908
+IGN1YWxxdWllcg== 45909
+YWZp 45910
+b3d5Y2g= 45911
+6I635Y+W 45912
+w4c= 45913
+IENhcmRpbmFs 45914
+IHRlbWE= 45915
+IkFuZA== 45916
+UGFnZVNpemU= 45917
+56eS 45918
+LlNpbXBsZURhdGVGb3JtYXQ= 45919
+IFdpbm5lcg== 45920
+IGNvcnJlbw== 45921
+X3dl 45922
+LmFkZE9iamVjdA== 45923
+KGNvdXJzZQ== 45924
+IGhvZw== 45925
+b3Bybw== 45926
+IHByb2JhdGlvbg== 45927
+dW5hYmxl 45928
+KGFjdGl2ZQ== 45929
+5Zu+54mH 45930
+IHBlcnRhaW5pbmc= 45931
+IGVtcGhhc2l6ZQ== 45932
+IFByaW50ZXI= 45933
+PS4= 45934
+IHVwZ3JhZGluZw== 45935
+L2NvbnRhY3Q= 45936
+PVtb 45937
+LXNhbg== 45938
+CXZhbHVlcw== 45939
+IGRvc2FnZQ== 45940
+U29saWQ= 45941
+IFJvb3NldmVsdA== 45942
+5ZWG5ZOB 45943
+IHJlY3JlYXRpb24= 45944
+IFRlcm1pbg== 45945
+LkJhZA== 45946
+IEJvbHQ= 45947
+U2t5 45948
+X0ltYWdl 45949
+IHNxdWly 45950
+IENvYg== 45951
+T1JO 45952
+IGF1Yw== 45953
+LkxFRlQ= 45954
+J0I= 45955
+LXJlc2lzdGFudA== 45956
+PiIr 45957
+IHRva2VuaXplcg== 45958
+IHNvdmVyZWlnbnR5 45959
+IFBlbmNl 45960
+KCkiKTsK 45961
+IHBlc3NvYXM= 45962
+Lkdl 45963
+IEluY2x1ZGVk 45964
+IHBhZ2luYQ== 45965
+IGV4cG9zaW5n 45966
+0LXRiA== 45967
+X1NDUklQVA== 45968
+LyQnLA== 45969
+VGh1bWJuYWls 45970
+15Q= 45971
+d2ViRWxlbWVudFg= 45972
+d2ViRWxlbWVudFhwYXRocw== 45973
+cHJlc3N1cmU= 45974
+IEN1cnJ5 45975
+X0NQ 45976
+T0xVVElPTg== 45977
+SUxFUw== 45978
+cHJvdGVjdA== 45979
+b29sYQ== 45980
+V29ya3NwYWNl 45981
+e307Cg== 45982
+IFVOUw== 45983
+IHN5bXBhdGh5 45984
+cm9rZXI= 45985
+IHJlbW9kZWw= 45986
+CWNlbGw= 45987
+IGF0b3A= 45988
+LkZ1bGxOYW1l 45989
+IGZhdXQ= 45990
+IEVhc2lseQ== 45991
+X2R5bmFtaWM= 45992
+IGZyYW1lZA== 45993
+IG1vdGl2ZQ== 45994
+6Lev 45995
+c2Ft 45996
+IG1hcmNh 45997
+IFRleHRFZGl0aW5nQ29udHJvbGxlcg== 45998
+IGRlc3RydWN0b3I= 45999
+Y3JlYW0= 46000
+IHJ1ZGU= 46001
+IEJvbGQ= 46002
+IEluZGlnZW5vdXM= 46003
+IGdlbnM= 46004
+IHJlbGFjaW9u 46005
+KHN5c3RlbQ== 46006
+IFVJRm9udA== 46007
+X2NoYXJnZQ== 46008
+VVNURVI= 46009
+RVY= 46010
+Lk5hbWVzcGFjZQ== 46011
+IG1lcmdlcg== 46012
+IGNhbGxvYw== 46013
+Z2FuZw== 46014
+QmFkUmVxdWVzdA== 46015
+IHNwZXI= 46016
+LWRlc2lnbg== 46017
+IOKH 46018
+Q2hhbg== 46019
+IG9yZ2FuaXNt 46020
+LCk= 46021
+PWlk 46022
+X3BsYW5l 46023
+IENhc2Vz 46024
+ZWxmYXN0 46025
+IExlZ2lzbGF0dXJl 46026
+IEZha2Vy 46027
+IGludm9raW5n 46028
+LXV0aWxz 46029
+KCkuJw== 46030
+LmZhY2U= 46031
+IGd1YXJkaWFu 46032
+bXlNb2RhbA== 46033
+IGNsaXBib2FyZA== 46034
+IEFUTQ== 46035
+IHBlYXM= 46036
+IFN5bHY= 46037
+LmNhbGM= 46038
+IENvbnRhY3Rz 46039
+aW50VmFsdWU= 46040
+IG1vZGlmeWluZw== 46041
+IEJhcmI= 46042
+Lmxvc3M= 46043
+X3BlcmNlbnRhZ2U= 46044
+QXNrZWQ= 46045
+KGxzdA== 46046
+YXRlZ29yaWNhbA== 46047
+LWZpbGVz 46048
+IFJvbWFuaWE= 46049
+LkFj 46050
+IGhhaQ== 46051
+IEZseWluZw== 46052
+IMW8 46053
+anA= 46054
+IFRyYWluZXI= 46055
+LmFyYw== 46056
+X2RlZw== 46057
+IHRyYWNlYmFjaw== 46058
+T3JGYWls 46059
+RkxPVw== 46060
+Lm9sZA== 46061
+b3lh 46062
+Z210 46063
+aXNlbXB0eQ== 46064
+IHZhY2NpbmF0aW9u 46065
+IG9ic29sZXRl 46066
+cmVjb2duaXplZA== 46067
+IHJ1aW5lZA== 46068
+IFJlaW4= 46069
+IFRyYWNraW5n 46070
+eGZi 46071
+2KfbjA== 46072
+IHbDpnJl 46073
+IGJyeXN0ZXI= 46074
+IElUUw== 46075
+IGRlc3Rpbnk= 46076
+IHN3ZWFy 46077
+IHJlZGVz 46078
+IGNsZg== 46079
+IGZsaXBwZWQ= 46080
+CWhlYWQ= 46081
+Qmx1ZXRvb3Ro 46082
+IE92ZXJyaWRlcw== 46083
+OkJvb2xlYW4= 46084
+Xz0= 46085
+X2xy 46086
+c3Bhd24= 46087
+OmluZGV4 46088
+VkFMVUVT 46089
+aXNrZXk= 46090
+PyIpOwo= 46091
+LnN5bnRoZXRpYw== 46092
+IENoZWNraW5n 46093
+c3RydWN0dXJlcw== 46094
+aXBpbmc= 46095
+IHZvY2Fscw== 46096
+LVVw 46097
+IE1hbnVmYWN0dXJlcnM= 46098
+IE1hcnJpYWdl 46099
+5Luj56CB 46100
+IGdhcm5lcg== 46101
+X0NsaWVudA== 46102
+cGFyYWxsZWw= 46103
+UklFTkQ= 46104
+IHZpbmVnYXI= 46105
+c2VndWU= 46106
+SkI= 46107
+IGNvbnRhY3Rpbmc= 46108
+IENhcnJvbGw= 46109
+IG91dHJlYWNo 46110
+dGVuc29y 46111
+X3ZhcmlhbnQ= 46112
+IHRoZWF0 46113
+bGljYWJsZQ== 46114
+e3w= 46115
+dGlueQ== 46116
+X2xldHRlcg== 46117
+IHBlbmNpbA== 46118
+SGVhZGVyc0hlaWdodFNpemVNb2Rl 46119
+aWx0cm8= 46120
+LmF1dG9jb25maWd1cmU= 46121
+LmRyYWc= 46122
+LnVzZVN0YXRl 46123
+IEJNSQ== 46124
+aGludA== 46125
+Q29tcGlsZQ== 46126
+Klw= 46127
+ZW5hcnk= 46128
+IGx2bA== 46129
+LkNhY2hl 46130
+Kz0i 46131
+X3R2 46132
+cnVpdG1lbnQ= 46133
+IGZyZWFk 46134
+QXJ0aWNsZXM= 46135
+ZmlsYQ== 46136
+IHBhY2thZ2Vk 46137
+4piG 46138
+QVRIRVI= 46139
+IFBsYW5uZWQ= 46140
+c2NoZW1l 46141
+IGRpYXJ5 46142
+IG9mZmVuc2Vz 46143
+Lzw/ 46144
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 46145
+UHJvZ3Jlc3NIVUQ= 46146
+IEdvcg== 46147
+LmdldFRpdGxl 46148
+IG1vY2tlZA== 46149
+IFRvcnk= 46150
+ICIpIjsK 46151
+I2c= 46152
+IGxpZWQ= 46153
+IHN2Yw== 46154
+X2d1aQ== 46155
+RU5UUlk= 46156
+IHNlcnZpY2lv 46157
+bW91c2VvdmVy 46158
+U0FDVElPTg== 46159
+44Kz 46160
+IHJlaWZl 46161
+bGVjdHJpYw== 46162
+X2NyZWF0aW9u 46163
+UmVhbGl0eQ== 46164
+KCcr 46165
+cHJvZHVjdElk 46166
+U3VwcGxpZXI= 46167
+LUxl 46168
+LnJlcG8= 46169
+dWNraW5n 46170
+X1N0cg== 46171
+IFJlbGF5 46172
+0LjQuA== 46173
+IHBlcnY= 46174
+Q2hpY2Fnbw== 46175
+IG1haXNvbg== 46176
+IHN0aWNrZXI= 46177
+X3ByZXNzZWQ= 46178
+U3dhcA== 46179
+IElH 46180
+IHN1c2NlcHRpYmxl 46181
+b2NhZG8= 46182
+IGdpbg== 46183
+ZXhl 46184
+aWdoYm9yaG9vZA== 46185
+KWA= 46186
+IGRpYWdyYW1z 46187
+IGluZmxhbW1hdG9yeQ== 46188
+IHTDqQ== 46189
+IFBvcHVw 46190
+IGFwcHJlaA== 46191
+IFBvcnRmb2xpbw== 46192
+IHdvcnM= 46193
+LmVudW1z 46194
+0LXQs9C+ 46195
+L0J1dHRvbg== 46196
+IFBoYW50b20= 46197
+ICM6 46198
+IGRpaw== 46199
+cGFnZXI= 46200
+ZnRhcg== 46201
+IG9yZ2FuaXplcg== 46202
+KGNoaWxkcmVu 46203
+IE11bmljaA== 46204
+IHN0cmFuZw== 46205
+IFJX 46206
+44K/ 46207
+TWFo 46208
+cHRpZGU= 46209
+IGxlYXJucw== 46210
+IHJlZHVjdGlvbnM= 46211
+IFJlcGxhY2VtZW50 46212
+T1RT 46213
+YWxjb24= 46214
+KHBhcnRz 46215
+YmFzaA== 46216
+IENpdGl6ZW4= 46217
+jbDsnbQ= 46218
+IEh0dHBTZXJ2bGV0 46219
+X1NDSEVNQQ== 46220
+bWVhbnM= 46221
+IGhvcnJpZmlj 46222
+VkVSSUZZ 46223
+IERDSEVDSw== 46224
+ICgv 46225
+LmJlZm9yZQ== 46226
+LnRleHR1cmU= 46227
+Z2V0TW9jaw== 46228
+IFNlbnNl 46229
+SW5zcGVjdG9y 46230
+VGV4dE5vZGU= 46231
+KEFM 46232
+LmdldE5vZGU= 46233
+IGJveWM= 46234
+IEJyaXNiYW5l 46235
+IGJhdHRsaW5n 46236
+CXR4 46237
+IGxvYmJ5aW5n 46238
+YnVpbHQ= 46239
+IFNFRUs= 46240
+IHJhbmRvbWl6ZWQ= 46241
+Z25p 46242
+X2NsdXN0ZXJz 46243
+X2lkZW50aXR5 46244
+IGNhcmRpYWM= 46245
+IG5ld1VzZXI= 46246
+LlZpZGVv 46247
+ZHVpdA== 46248
+XWluaXQ= 46249
+QXRs 46250
+KXZhbHVl 46251
+VGV4dFV0aWxz 46252
+INC10YHQu9C4 46253
+Q29tcHV0ZQ== 46254
+PSgn 46255
+CQkgICAgICAgICAgICAgICA= 46256
+IGFydGVy 46257
+IFRXTw== 46258
+JykpLA== 46259
+IERJVg== 46260
+IHByaXZpbGVnZWQ= 46261
+IFBhcnRuZXJzaGlw 46262
+IEhlYXRoZXI= 46263
+YmF5 46264
+YXRpc2ZpZWQ= 46265
+aW5zdGFncmFt 46266
+X1NlbmQ= 46267
+IEFTRg== 46268
+JG5hbWU= 46269
+IGJvbw== 46270
+IGTDqWY= 46271
+X0ZpZWxk 46272
+IEVkdQ== 46273
+Y2FuZGlkYXRl 46274
+cnVieQ== 46275
+IGFjY3VtdWxhdGU= 46276
+KEludFB0cg== 46277
+IGJ1c2luZXNzbWFu 46278
+IGVjb25vbWljYWxseQ== 46279
+IFJpbmdz 46280
+IElucHV0cw== 46281
+uYQ= 46282
+YWNpZQ== 46283
+IEFsYXJt 46284
+IExvZ291dA== 46285
+LnNlcXVlbmNl 46286
+IFZpZW5uYQ== 46287
+b3By 46288
+IGRydW1z 46289
+PWNvbmZpZw== 46290
+cXVp 46291
+IGRhdG8= 46292
+IHBvbHltZXI= 46293
+IENoYW5nZWQ= 46294
+V2ViUmVxdWVzdA== 46295
+IEFkdmFuY2U= 46296
+IHVuZGVyZ29pbmc= 46297
+LkNvbnNvbGU= 46298
+IGN1cnJlbnROb2Rl 46299
+IFdvb2w= 46300
+IHDDoWdpbmE= 46301
+UkVHSVNURVI= 46302
+IHNhZ2E= 46303
+IFlPUks= 46304
+YW1hbmhv 46305
+5a6M 46306
+IEJ1bmRlcw== 46307
+IERpYWxvZ0ludGVyZmFjZQ== 46308
+Z2VvaXM= 46309
+dW5jaWF0aW9u 46310
+PyQ= 46311
+LkFzc2VydGlvbnM= 46312
+IHNlYXRlZA== 46313
+IFNweQ== 46314
+UG9zZQ== 46315
+IkM= 46316
+IGFob3Jh 46317
+INGE0LDQudC7 46318
+IOuzgA== 46319
+IHdhcnA= 46320
+UHJvamVjdGlvbg== 46321
+IFNpbmdsZXM= 46322
+IEFkdmVydGlzaW5n 46323
+TGludXg= 46324
+dXN0eQ== 46325
+IHBlbmFs 46326
+VVNJQw== 46327
+b2RpYQ== 46328
+Lm5ldGJlYW5z 46329
+IFVn 46330
+IEJyZW50 46331
+LWxvZw== 46332
+L2NhdGVnb3J5 46333
+IEN1c3RvbWl6ZQ== 46334
+aXJlbg== 46335
+77yaPC8= 46336
+aW5hcnM= 46337
+ICgrKw== 46338
+R29pbmc= 46339
+RVhFQw== 46340
+KG1lc2g= 46341
+IHBlcmltZXRlcg== 46342
+Q2xz 46343
+Y2VpdmluZw== 46344
+bWVuc2FqZQ== 46345
+KCkpKXsK 46346
+IHByb3N0YXRl 46347
+X2J1eQ== 46348
+IFJvb2Y= 46349
+LlJldHVybg== 46350
+IG1hcnJpYWdlcw== 46351
+X3RodW1i 46352
+574= 46353
+4K+N 46354
+VGV4dHVyZXM= 46355
+KFRFWFQ= 46356
+c2hvcnRjdXQ= 46357
+VHJhbnNmb3JtZXI= 46358
+QVRJQw== 46359
+IFNub3dkZW4= 46360
+c2NyaWJlcnM= 46361
+bWFya2Vk 46362
+IOKGkQ== 46363
+aG9yYQ== 46364
+T1BFUg== 46365
+IEZZ 46366
+IEF1dGhlbnRpYw== 46367
+IGF1ZGk= 46368
+cmFtZXI= 46369
+IExpdGVyYXR1cmU= 46370
+IGl0ZW1JZA== 46371
+LkF0dA== 46372
+KGNudA== 46373
+IEtT 46374
+LWxpbnV4 46375
+IFBhcnRpY2lwYW50 46376
+IENydWlzZQ== 46377
+aXR1bG8= 46378
+dXN0cmlhbA== 46379
+IGNsYXNl 46380
+ID0k 46381
+X2RhdGVz 46382
+Y3VycmVudFBhZ2U= 46383
+aXhh 46384
+ZXhhY3Q= 46385
+IHRzbA== 46386
+LlNv 46387
+L2RvY3VtZW50 46388
+aGFydA== 46389
+X0lETEU= 46390
+e30u 46391
+eWV0 46392
+SXJvbg== 46393
+IFRocm9uZXM= 46394
+c25k 46395
+XHhh 46396
+IGJldmVyYWdlcw== 46397
+X3RyYW5zcG9ydA== 46398
+IGZvaWw= 46399
+IHRhc3Rpbmc= 46400
+IGdvZWQ= 46401
+TWVtbw== 46402
+IG5pdHJvZ2Vu 46403
+Lk1lbWJlcg== 46404
+LmZsYXQ= 46405
+IGlsbHVt 46406
+bWluZW50 46407
+Lnpvb20= 46408
+IFB0cg== 46409
+b2Npbw== 46410
+IENvbnN1bHRpbmc= 46411
+IENvbmU= 46412
+CWl0ZW1z 46413
+IExN 46414
+IG9hdXRo 46415
+IFByb2dyYW1tZQ== 46416
+b2Nob25k 46417
+KHNlbGVjdG9y 46418
+IHdhdGVycHJvb2Y= 46419
+IE1lcmtlbA== 46420
+IHN1ZmZlcnM= 46421
+IG5wbQ== 46422
+6LGh 46423
+IExhbmRpbmc= 46424
+IExBTg== 46425
+CQkJCQkJDQo= 46426
+L2lz 46427
+IHPDqXJpZQ== 46428
+IEdVSUxheW91dA== 46429
+Z2l2ZQ== 46430
+X0NZ 46431
+QnJvd3Nl 46432
+Lm11bHRpcGx5 46433
+PSIkKA== 46434
+dXNv 46435
+LXBhcmVudA== 46436
+Lk1hdGg= 46437
+Lm51bWJlck9m 46438
+IHRpZW5lbg== 46439
+IHJlc2VudA== 46440
+IHBpdGNoaW5n 46441
+Il0pLAo= 46442
+LlV0aWxpdGllcw== 46443
+IG11bHRpcGxpY2F0aW9u 46444
+OnR5cGU= 46445
+IHBwcmludA== 46446
+aWFuaQ== 46447
+5YiZ 46448
+IGxhdW5jaGVy 46449
+IHJ1Z2J5 46450
+546w 46451
+CgkJCQo= 46452
+aGlk 46453
+QW5nbGVz 46454
+IGdvb2RieWU= 46455
+IGlucHV0U3RyZWFt 46456
+LndhdGNo 46457
+R29vZHM= 46458
+IFNheXM= 46459
+PkY= 46460
+IFN0aWNr 46461
+IGNlcmM= 46462
+IFNsZWU= 46463
+CQkgICAgICAgIA== 46464
+PEltYWdl 46465
+IOiuvg== 46466
+LWVkaXRvcg== 46467
+cGllY2Vz 46468
+IERyYW1h 46469
+IC8vLy8vLy8vLy8vLy8vLy8vLw== 46470
+IFRhc2tz 46471
+QVJD 46472
+Z2F0ZXdheQ== 46473
+LmdldGN3ZA== 46474
+Lk1ldGFkYXRh 46475
+IGd1ZXNzaW5n 46476
+5Zyw5Z2A 46477
+IHNtYXJ0ZXI= 46478
+IEdldEVudW1lcmF0b3I= 46479
+IGVmdGVy 46480
+L29wZXJhdG9ycw== 46481
+IEdMZmxvYXQ= 46482
+IGbDuHI= 46483
+IG9wYXF1ZQ== 46484
+5L+d5a2Y 46485
+U3ByZWFk 46486
+U1lTVEVN 46487
+IGludmVyc2lvbg== 46488
+IEJhc2tldGJhbGw= 46489
+IHNpbXVsYXRpb25z 46490
+IGRlbmllcw== 46491
+IGF2ZXo= 46492
+X2xpc3RlbmVy 46493
+IGVuaGFuY2luZw== 46494
+IE15dGg= 46495
+IExha2Vycw== 46496
+X01E 46497
+TmRFeA== 46498
+REFUQUJBU0U= 46499
+IHThuw== 46500
+YXJ0aA== 46501
+W2xlZnQ= 46502
+IGNvbnRlc3Rz 46503
+c3RpbGU= 46504
+KEtFUk4= 46505
+X2Zj 46506
+X3Bt 46507
+IHByZXNpZGVudHM= 46508
+IGhvc3BpdGFsaXR5 46509
+IGZhZGVJbg== 46510
+Uk9QRVJUWQ== 46511
+X21hcHM= 46512
+IERlZmluaXRpb25z 46513
+IGFzc2Vzc2luZw== 46514
+IHVzYXI= 46515
+IHF1YW50aXRhdGl2ZQ== 46516
+bW96 46517
+QmVhdXRpZnVs 46518
+Wygo 46519
+Ym9ucw== 46520
+ZnJlcXVlbmN5 46521
+Q29udGFpbg== 46522
+IHB1enpsZXM= 46523
+IENhc3Rybw== 46524
+IHZpbGxh 46525
+IGtpbmRseQ== 46526
+Rm9udEF3ZXNvbWU= 46527
+ZXJuYQ== 46528
+ZXBvY2hz 46529
+X2RhdGFz 46530
+CWlw 46531
+LnBhZGRpbmc= 46532
+IENvbnRlc3Q= 46533
+IGVkaXRpb25z 46534
+IGRpc3Byb3BvcnRpb24= 46535
+IElDTw== 46536
+IGNvbWViYWNr 46537
+PXZhbHVl 46538
+cmlhZA== 46539
+LXNvcnQ= 46540
+U3VibWl0dGVk 46541
+KG5ldHdvcms= 46542
+IENlbA== 46543
+IGluc3RhbGxtZW50 46544
+bGFzaGVz 46545
+Lkxpc3RWaWV3 46546
+IFZhdGljYW4= 46547
+KE1lZGlhVHlwZQ== 46548
+SVZFRA== 46549
+cmVhY2hhYmxl 46550
+Oklz 46551
+IENJVFk= 46552
+5Lqs 46553
+IEhlbHBmdWw= 46554
+IGJhxZ8= 46555
+JQ0K 46556
+IHBzeWNoaWF0cmlj 46557
+IHJlY3ljbGVk 46558
+Rk9STUFU 46559
+IEdyb3c= 46560
+YmluZQ== 46561
+R2l0 46562
+LnNz 46563
+IFdlYXBvbnM= 46564
+IFN0eQ== 46565
+X2Fycm93 46566
+KnNlbGY= 46567
+aXJlbWVudA== 46568
+IGRlZ2xp 46569
+QXBwRGVsZWdhdGU= 46570
+X2Jhbm5lcg== 46571
+IGNvb3JkaW5hdGVk 46572
+IFdlYmNhbQ== 46573
+IGNlbGVicmF0aW9ucw== 46574
+LmFjdA== 46575
+KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq 46576
+KHNob3c= 46577
+IHdlZWtkYXk= 46578
+IGNvbmNlcnRz 46579
+0L7Qu9C9 46580
+Y2xpbg== 46581
+IGNyb24= 46582
+IE5pbQ== 46583
+LnNldFZlcnRpY2Fs 46584
+IEVsbGVu 46585
+2LPYqg== 46586
+IFNBTQ== 46587
+RWZm 46588
+Z3o= 46589
+c3RlYW0= 46590
+IGFudGlxdWU= 46591
+cGh5c2ljYWw= 46592
+IEZvcm1EYXRh 46593
+LnNldHRlcg== 46594
+IFBPSU5U 46595
+Qm9u 46596
+IGZsYXZvdXI= 46597
+ZXJ2ZW50aW9u 46598
+X0VOVElUWQ== 46599
+CSAgICAgICAgICAgIA== 46600
+IGludHJpbnNpYw== 46601
+IOaO 46602
+YXBwZW5kVG8= 46603
+YXJhbWVs 46604
+KV0p 46605
+IFJlY29tbWVuZA== 46606
+KW0= 46607
+T3V0T2ZSYW5nZQ== 46608
+IGtuaWdodA== 46609
+IHNhdGVsbGl0ZXM= 46610
+IFRpdGFucw== 46611
+IHdlaWdoZWQ= 46612
+IERhbmE= 46613
+ZWFzZQ== 46614
+IHNpcA== 46615
+U0lN 46616
+IERldmVsb3BlcnM= 46617
+bWFsaW5r 46618
+L2NoZWNr 46619
+X1BMTA== 46620
+bnVuZw== 46621
+IGRyeWVy 46622
+PUE= 46623
+LmR3 46624
+X1NRTA== 46625
+IHN1YnBsb3Q= 46626
+RFJPUA== 46627
+IHByb3RvdHlwZXM= 46628
+IGhvdXJseQ== 46629
+ZGlzcGxheU5hbWU= 46630
+IGFzaQ== 46631
+IFZpb2xlbmNl 46632
+IGFzdHJvbmF1dA== 46633
+IGRhdGF0eXBl 46634
+IGluZm9ybWF0aW9uYWw= 46635
+IGludmVzdGlnYXRpdmU= 46636
+ZXRlcm1pbmVk 46637
+cmVuYWw= 46638
+Oyc+ 46639
+CWNvbA== 46640
+Vkc= 46641
+X2Jvb2xlYW4= 46642
+cmVjZW50 46643
+ICopCgo= 46644
+IFJhaW5ib3c= 46645
+b21tZW4= 46646
+IGx1cg== 46647
+IG9wcHJlc3Npb24= 46648
+KCIsIik7Cg== 46649
+IEZhY2lsaXR5 46650
+REVGSU5FRA== 46651
+IG5lb24= 46652
+IG9mZmVuZGVy 46653
+QUZQ 46654
+IENsZWFuaW5n 46655
+W10pOg== 46656
+IHVuZG9jdW1lbnRlZA== 46657
+LlJlcG9zaXRvcmllcw== 46658
+IEd1aXRhcg== 46659
+0LDRgdGB0LjQsg== 46660
+U2tpbGxz 46661
+IHRlc3RpbW9u 46662
+cnlwdG9ncmFwaHk= 46663
+IEFtYmVy 46664
+IFN0YWxpbg== 46665
+IGxvbmU= 46666
+IGFwZW5hcw== 46667
+IGRpZXNlcw== 46668
+IEFyZHVpbm8= 46669
+6L2s 46670
+PT0t 46671
+X0FjdA== 46672
+IGNvZGVk 46673
+4pag 46674
+YW1idXJnZXI= 46675
+LWxpbmtz 46676
+IGFybW91cg== 46677
+LkhpZ2g= 46678
+Z2V0Q29udGVudA== 46679
+c3RhZw== 46680
+IGhlY2s= 46681
+IOyXhg== 46682
+IE1jQ29ubmVsbA== 46683
+IENvbmNlcnQ= 46684
+IEFsbG9j 46685
+w6RyZQ== 46686
+LnJlcGxhY2VBbGw= 46687
+IHBhcnRpdGlvbnM= 46688
+cm90dA== 46689
+IEZsZQ== 46690
+X1RSRUU= 46691
+cmVhc29uYWJsZQ== 46692
+IFJlcG9ydGluZw== 46693
+IGJpbGxpb25haXJl 46694
+c2NvcmVz 46695
+bWlucw== 46696
+LWV5ZQ== 46697
+TU9SRQ== 46698
+YWJvcnQ= 46699
+IFNXVA== 46700
+IGludmVydGVk 46701
+IFRlYWNoZXJz 46702
+O24= 46703
+IGFzdHJv 46704
+0L3QvtCy 46705
+0LDQvdC40YY= 46706
+cHJvZHVjdG8= 46707
+Y291bnRyaWVz 46708
+IE93ZW4= 46709
+IGNvbnRhbWluYXRpb24= 46710
+IHZpYmU= 46711
+IEVsbGk= 46712
+LnNjcmlwdA== 46713
+IE9saXZl 46714
+RE1B 46715
+dmllcg== 46716
+OnNlbWljb2xvbg== 46717
+LW1vZHVsZQ== 46718
+Z3Jlc3NpdmU= 46719
+YWd1 46720
+X3BsYXllcnM= 46721
+IHJlc3VsdGFkb3M= 46722
+c3RhcnRlZA== 46723
+c2Nyb2xsVG9w 46724
+PT09PT0= 46725
+IHdlaWdoaW5n 46726
+IFtbWw== 46727
+emFobA== 46728
+KE5T 46729
+IEFzc2VydGlvbg== 46730
+bGVhZ3Vl 46731
+LnNldFRleHRDb2xvcg== 46732
+CU1lc3NhZ2U= 46733
+IG1vbXM= 46734
+X0FG 46735
+Lndo 46736
+QUxT 46737
+IGF1dHJl 46738
+XQoKCgo= 46739
+Lm9wYWNpdHk= 46740
+IEJ1ZGRoaXN0 46741
+IGRlYWY= 46742
+IE9yZ2FuaXNhdGlvbg== 46743
+KEdsb2JhbA== 46744
+ZW5zY2g= 46745
+IGhlYWRhY2hl 46746
+IEFsaWVu 46747
+X2lub2Rl 46748
+IFN0YXJr 46749
+IOaJ 46750
+LWxuZA== 46751
+b3JlZg== 46752
+X2ZlYXQ= 46753
+IHBlZGVzdHJpYW4= 46754
+IG5vbWluYWw= 46755
+IGJhbGxvb24= 46756
+IHNwcml0ZXM= 46757
+UHJvdG90eXBlT2Y= 46758
+IEFwb3N0 46759
+IEZFQVRVUkU= 46760
+T0g= 46761
+IHJlY2Vzcw== 46762
+IERvbm5h 46763
+Y29uc3VtZXI= 46764
+JEdMT0JBTFM= 46765
+IEdJRg== 46766
+LWZyYW1l 46767
+SW5pY2lv 46768
+IHBhc3NhZ2Vz 46769
+RGF0ZVN0cmluZw== 46770
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 46771
+LmJ5dGU= 46772
+QnVn 46773
+aW5pdGlhbGl6ZXI= 46774
+cGt0 46775
+b2RpdW0= 46776
+IERFUg== 46777
+Lm9wcw== 46778
+bGVyaQ== 46779
+IGdpZnRlZA== 46780
+IGRldGFjaA== 46781
+dGVycmFpbg== 46782
+ZWx0ZXJz 46783
+44GP 46784
+LmxvYWRlcg== 46785
+IE5HTw== 46786
+c3RybmNtcA== 46787
+S2g= 46788
+KGZvbnRTaXpl 46789
+cm9ja2V0 46790
+IHByZWNlZGVudA== 46791
+IEF1cm9yYQ== 46792
+IEV4cGVyaW1lbnQ= 46793
+aXNwaGVyZQ== 46794
+RW5jb2RlZA== 46795
+IOKAkwoK 46796
+IHB5cmFtaWQ= 46797
+IEFubml2ZXJzYXJ5 46798
+b2ZpbA== 46799
+658= 46800
+KHBsdWdpbg== 46801
+Q29lZmY= 46802
+IGNvb3BlcmF0ZQ== 46803
+IHByZWRvbWluYW50bHk= 46804
+SVNN 46805
+UGhyYXNl 46806
+X0RFRklORQ== 46807
+RmxpcA== 46808
+QU1JTFk= 46809
+IE1hcmtldHM= 46810
+IFN0cmVhbVJlYWRlcg== 46811
+IENvbWJpbmU= 46812
+IG1hbnVzY3JpcHQ= 46813
+enph 46814
+LHRw 46815
+V2hhdGV2ZXI= 46816
+SVRJQ0FM 46817
+aWdoYm91cg== 46818
+RGF0YVByb3ZpZGVy 46819
+LlRleHR1cmU= 46820
+cHJpdmFjeQ== 46821
+LlNESw== 46822
+IHJlY2hhcmdl 46823
+IGNwcA== 46824
+IENGRw== 46825
+KGhvbGRlcg== 46826
+KHB5 46827
+bW90 46828
+IHNhdm9pcg== 46829
+IFJvc2E= 46830
+IFBDcw== 46831
+IO2Z 46832
+Lmhlcm9rdQ== 46833
+IGZyZW4= 46834
+IFJpbGV5 46835
+YWdhdGU= 46836
+IHNvbmQ= 46837
+Lnhsc3g= 46838
+IGhhY2tlZA== 46839
+c3RhZA== 46840
+R2k= 46841
+IHNhbml0eQ== 46842
+IFNxbERhdGFBZGFwdGVy 46843
+Li4uIiw= 46844
+IFB1c3N5 46845
+ICoqKioqKioqKioqKioqKio= 46846
+IGhhc3NsZQ== 46847
+X1BBUkVOVA== 46848
+IFVBRQ== 46849
+IGJlZ2lubmVycw== 46850
+KENsaWVudA== 46851
+IHN0YXRpc3RpY2FsbHk= 46852
+LmhvdXI= 46853
+ZWRlbHRh 46854
+IHRyYWN0aW9u 46855
+dWVsdmU= 46856
+YXJhdA== 46857
+IHNhdW5h 46858
+SU5WQUxJRA== 46859
+IGluZGljdG1lbnQ= 46860
+QUxMRQ== 46861
+IGRpc3NlbnQ= 46862
+IFR5cG9ncmFwaHk= 46863
+IGludGVudGlvbmFs 46864
+c2l0 46865
+IEFuaW1hbHM= 46866
+IGNvdW50cnlzaWRl 46867
+IHVhcnQ= 46868
+fVwi 46869
+IHNlYW1sZXNz 46870
+vuekug== 46871
+IGF1dG9z 46872
+ICInIjsK 46873
+Rmx1c2g= 46874
+QU5OT1Q= 46875
+IGFsZ2VicmE= 46876
+YXNzb2M= 46877
+IFdhdGVycw== 46878
+IHByZXBhcmF0aW9ucw== 46879
+cm9ueW0= 46880
+Wyxd 46881
+U2Fucw== 46882
+IGFybWllcw== 46883
+aXBlZw== 46884
+IGNyZWFteQ== 46885
+LmFydA== 46886
+ZXRyZQ== 46887
+IEFuaW1hdGVk 46888
+IHVucGxlYXNhbnQ= 46889
+ZW1lYW4= 46890
+Z3JlYXQ= 46891
+acSF 46892
+IEVhcmxpZXI= 46893
+IGNoaWM= 46894
+IHByZXNlcnZpbmc= 46895
+KGV4ZWM= 46896
+IEludmVzdGlnYXRpb24= 46897
+CUdQSU8= 46898
+IHJpZ29yb3Vz 46899
+aWpv 46900
+PW51bQ== 46901
+IHRvb2xTdHJpcA== 46902
+KXNldA== 46903
+KyIm 46904
+IEFjY2VsZXI= 46905
+IGRldmVsb3BtZW50YWw= 46906
+aXNwb3NhYmxl 46907
+IGZsYXdlZA== 46908
+cmVuZQ== 46909
+VXBkYXRpbmc= 46910
+IHdhdGNoZG9n 46911
+IGRlbm9taW5hdG9y 46912
+IHN1YnVyYnM= 46913
+IC4uLik= 46914
+IGNvbnZpY3Rpb25z 46915
+Y2xvc3VyZQ== 46916
+LklQ 46917
+IHRyYW5zbGF0ZXM= 46918
+LnN3dA== 46919
+LlRyYWNl 46920
+IG1ldHRyZQ== 46921
+LmlzRW5hYmxlZA== 46922
+IEVmZmVjdGl2ZQ== 46923
+LnRvSW50 46924
+IGVuY2hhbnQ= 46925
+IHN0dW5uZWQ= 46926
+IHBvaQ== 46927
+L2NvZGU= 46928
+YWRt 46929
+LmRhdGFiaW5kaW5n 46930
+IExvcmVt 46931
+X19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fXw== 46932
+IGxlZGdlcg== 46933
+IGNhcmE= 46934
+IEdpcg== 46935
+IHdhaXRz 46936
+VW5v 46937
+IGN3ZA== 46938
+6L6R 46939
+IFRSZXN1bHQ= 46940
+IHJlam8= 46941
+IGVtaXR0ZWQ= 46942
+IFdlc3RtaW5zdGVy 46943
+5LiA5Liq 46944
+bmVr 46945
+X1Rpcw== 46946
+IGVuYWN0 46947
+CXdpdGg= 46948
+b3JnaWE= 46949
+IGp1ZQ== 46950
+UGVyZm9ybQ== 46951
+U1BBVEg= 46952
+LnRvcGlj 46953
+IERhdGVu 46954
+4bqn 46955
+IHNpdGlv 46956
+X01N 46957
+IlNv 46958
+YmlhbA== 46959
+IHNjb3BlZA== 46960
+UmVxdWlyZXM= 46961
+IFRPVEFM 46962
+IENoYW5jZWxsb3I= 46963
+KGNvbnRlbnRz 46964
+IHN0ZWFsdGg= 46965
+ZGV2aWNlcw== 46966
+LXBhc3M= 46967
+aWxpaA== 46968
+IE1hbGNvbG0= 46969
+IERlcG90 46970
+IGNvbmZpZ3Vy 46971
+YXVzc2lhbg== 46972
+X2NvbnN0cmFpbnQ= 46973
+0LLQtdGC 46974
+R1JB 46975
+IFJhdGVz 46976
+LmRhdGFHcmlkVmlld1RleHRCb3hDb2x1bW4= 46977
+IE5vYmVs 46978
+aXRpY3M= 46979
+IGlnbm9yYW50 46980
+IFJlcG9ydGVy 46981
+IEVib2xh 46982
+IFNob2Nr 46983
+X3JlbGF0aW9u 46984
+IE5pbmph 46985
+KWM= 46986
+IHRpY2tlcg== 46987
+LmlzQ2hlY2tlZA== 46988
+IFN1cHBsaWVycw== 46989
+IFJhcGlk 46990
+TGV2ZWxz 46991
+4oKs4oSi 46992
+CXF1ZXVl 46993
+IGNob3A= 46994
+IFVuaXg= 46995
+cmVqZWN0 46996
+LWNhbGVuZGFy 46997
+KHNvcnQ= 46998
+w6huZQ== 46999
+ZXJjaWNpbw== 47000
+IGhlY3Q= 47001
+Q0FMTFRZUEU= 47002
+cm91cG9u 47003
+IHJlbnRhbHM= 47004
+YXV0aG9ycw== 47005
+e25hbWU= 47006
+IEZJRk8= 47007
+IGxhc3Nlbg== 47008
+IE5vdXM= 47009
+IHNuYXBwZWQ= 47010
+IGZlcnRpbGl0eQ== 47011
+ImxvZw== 47012
+Y2xpY2tlZA== 47013
+IHBsYW50aW5n 47014
+IGdi 47015
+L291dHB1dA== 47016
+UEVBVA== 47017
+IGNhdGVnb3JpYQ== 47018
+IGJhY2g= 47019
+UHJvZmVzc29y 47020
+aW50aA== 47021
+Il0NCg== 47022
+UmVjb3JkZXI= 47023
+c2VyZGU= 47024
+IFRyYW5zbWlzc2lvbg== 47025
+dHJhZA== 47026
+IHR1cmJv 47027
+X1ZFUlRFWA== 47028
+XEV2ZW50 47029
+aWx2ZXI= 47030
+IGJvZGlseQ== 47031
+IFNvdXJjZXM= 47032
+IGtpbGxpbmdz 47033
+LnhyVGFibGVDZWxs 47034
+IGZvbGRlZA== 47035
+L2xlZ2Fs 47036
+dW5lcg== 47037
+IFJpZmxl 47038
+IE1JREk= 47039
+X1NlbGVjdGVkSW5kZXhDaGFuZ2Vk 47040
+LlNpemVUeXBl 47041
+IFdlYlNvY2tldA== 47042
+IHNlbGVjY2lvbg== 47043
+U2FuZA== 47044
+b3Ryb3M= 47045
+IGVudmlzaW9u 47046
+L2V0Yw== 47047
+IE1lbGlzc2E= 47048
+U3BvdA== 47049
+0L3QvtC1 47050
+X0FSTQ== 47051
+QXR0ZW1wdA== 47052
+IEJJ 47053
+44GU 47054
+IERV 47055
+IGJhY2tsYXNo 47056
+c3RyaWRl 47057
+L2NsYXNzZXM= 47058
+IHRleHRDb2xvcg== 47059
+X3N0YWZm 47060
+b2JsaW4= 47061
+YWdlbnRh 47062
+LmNvbGxlY3Rpb25z 47063
+aWxsYWdl 47064
+Jw0KDQo= 47065
+ZmxhdHRlbg== 47066
+X3NhbGVz 47067
+X01BU1RFUg== 47068
+VFc= 47069
+X2Rh 47070
+UGl0Y2g= 47071
+cGhpZXM= 47072
+IHpvbWJpZXM= 47073
+IFZFUlk= 47074
+IFBoYXJtYWN5 47075
+IHByb2dyZXNzQmFy 47076
+IGhhc2h0YWc= 47077
+U2lkZWJhcg== 47078
+QHN0b3A= 47079
+KHBj 47080
+0L7Qu9C2 47081
+TUFLRQ== 47082
+IENvcm9u 47083
+IGt2aW5uZXI= 47084
+IE1haWQ= 47085
+Ym9i 47086
+LnRpdGxlTGFiZWw= 47087
+IHN1Y2Nlc3Nlcw== 47088
+IERlbW9jcmFjeQ== 47089
+IFN1cmdlcnk= 47090
+IGNvdWdhcg== 47091
+IGN1cnNv 47092
+IGxvcm8= 47093
+aXN0ZW5jeQ== 47094
+U2VuaW9y 47095
+w6Zr 47096
+IEFBQQ== 47097
+IEJPT0s= 47098
+0LrQvg== 47099
+V1NUUg== 47100
+ICovLAo= 47101
+b3lhbA== 47102
+LnZlY3Rvcg== 47103
+IFNQRUM= 47104
+U1NG 47105
+IGNvbXB1bHM= 47106
+IEFwcGVhbHM= 47107
+IFdpbnN0b24= 47108
+IE1vY2tpdG8= 47109
+Y29udHJpYg== 47110
+LmF2YWlsYWJsZQ== 47111
+ZW50aXR5TWFuYWdlcg== 47112
+YXJpYXM= 47113
+X3NhbGU= 47114
+X3Jz 47115
+IGRlY29kaW5n 47116
+IGxvY2F0b3I= 47117
+b2xpdGg= 47118
+IGtvbA== 47119
+IGFzY2lp 47120
+IFJ1dA== 47121
+L2ludGVyZmFjZQ== 47122
+CQkJCQkJICAg 47123
+IE51bWVy 47124
+LmZsaXA= 47125
+LWRlbA== 47126
+IGJvbHN0ZXI= 47127
+b25vbWlj 47128
+IHpt 47129
+TEc= 47130
+RmluZEJ5 47131
+IGFkYXB0aXZl 47132
+bG9v 47133
+IHZ1ZQ== 47134
+KHJldmVyc2U= 47135
+X2NhbnZhcw== 47136
+LnJvbGVz 47137
+aWZpY2Fkbw== 47138
+dmVuaWVudA== 47139
+IkFz 47140
+IEVudHI= 47141
+YWxpZ25lZA== 47142
+IGJlcmVpdHM= 47143
+Ly8vCgo= 47144
+Lmd3dA== 47145
+LmVtcGxveWVl 47146
+X2NsaQ== 47147
+IGFudGljaXBhdGU= 47148
+6ZmQ 47149
+IHBpaw== 47150
+IG11c2hyb29tcw== 47151
+KHR0 47152
+IG9tYQ== 47153
+IFNhbmNoZXo= 47154
+X2dvb2dsZQ== 47155
+LlZhbGlk 47156
+IEZpbGVOYW1l 47157
+aXZhdGl2ZQ== 47158
+a2Vk 47159
+LXdhcg== 47160
+IG1hdHVyaXR5 47161
+0LjQtA== 47162
+IG1pbmVy 47163
+UmVkdWNlcnM= 47164
+IExhdExuZw== 47165
+X1NURA== 47166
+RGlnaXRz 47167
+Q2FsYw== 47168
+LXVwbG9hZA== 47169
+IGhhbmRpYw== 47170
+4Li14LmI 47171
+ZWdyYXRlZA== 47172
+IFNUTQ== 47173
+Q2xpZW50cw== 47174
+IFR1cmJv 47175
+U1lOQw== 47176
+IHBob3RvZ3JhcGhlcnM= 47177
+Lk91dA== 47178
+LmNoYXJhY3Rlcg== 47179
+QlVJTEQ= 47180
+LnVubG9jaw== 47181
+IGFyaXNlcw== 47182
+IENvbW1hbmRz 47183
+KCIiKTsNCg== 47184
+X0ZPUkU= 47185
+Oycs 47186
+KyIn 47187
+LkltYWdlcw== 47188
+Iil7 47189
+IE1leWVy 47190
+IG5lZ2F0aXZlbHk= 47191
+IERMTA== 47192
+IGV4ZQ== 47193
+IGRlZmljaWVuY3k= 47194
+IHdpbGRseQ== 47195
+LXN3aXRjaA== 47196
+Y29uc3RydWN0aW9u 47197
+IGV4Y2VwdGlvbmFsbHk= 47198
+IExpeg== 47199
+L2phdmE= 47200
+IHRoZWlycw== 47201
+IENvbnRlbXBvcmFyeQ== 47202
+bGlz 47203
+LmZpbGxSZWN0 47204
+IE5GQw== 47205
+IHJlaGU= 47206
+KG51bWJlcnM= 47207
+IHJhc3Rlcg== 47208
+IGZpZ3VyaW5n 47209
+IHNob3dj 47210
+IEppbGw= 47211
+IGFyY2FkZQ== 47212
+IENvbnN0cnVjdHM= 47213
+bWRs 47214
+KCd8 47215
+IGlkZW50aWZpZXJz 47216
+IHN0ZWxsYXI= 47217
+KENvbm5lY3Rpb24= 47218
+ICJ7ew== 47219
+eW9y 47220
+KG15c3FsaQ== 47221
+IGRvdmU= 47222
+T2ZCaXJ0aA== 47223
+LmRpc2Nvbm5lY3Q= 47224
+X2hp 47225
+IHp3aXNjaGVu 47226
+IEdydW5k 47227
+aXJvcw== 47228
+X0FycmF5 47229
+Lm9uY2xpY2s= 47230
+YW5zb20= 47231
+QW5zd2Vycw== 47232
+CXJlbW92ZQ== 47233
+RmE= 47234
+IGh1cnJ5 47235
+LWluZg== 47236
+IGdldENsYXNz 47237
+IFJlZ3VsYXRpb24= 47238
+IEZMQUdT 47239
+bWlzYw== 47240
+S2Vu 47241
+X2hlYWRpbmc= 47242
+R0h6 47243
+LWVudHJ5 47244
+IGJpb2dyYXBoeQ== 47245
+U2ln 47246
+LW1m 47247
+V2F0Y2hlcg== 47248
+4oCcQQ== 47249
+fXB4 47250
+IHNwaWN5 47251
+X3Nx 47252
+TG9zdA== 47253
+KHRyYWNr 47254
+0LDQu9C4 47255
+RGVzY2VuZGluZw== 47256
+PGJpdHM= 47257
+cXVpbmU= 47258
+IEFkdm9j 47259
+X1NO 47260
+IEhhbm5haA== 47261
+UE9Q 47262
+IGVtaXR0ZXI= 47263
+IGN5bg== 47264
+IENBRA== 47265
+Pyku 47266
+L3NldA== 47267
+IFNpc3Rlcg== 47268
+IEVuZHBvaW50 47269
+IG1lbm9y 47270
+IGludGVycA== 47271
+cms= 47272
+aWRsZQ== 47273
+IG91dGZpdHM= 47274
+LnZlcnRleA== 47275
+IGNsaWM= 47276
+QVJFTg== 47277
+IHBvc3R1cmU= 47278
+IE9wcG9ydHVuaXR5 47279
+dng= 47280
+IEZvcmJlcw== 47281
+LkRpcmVjdGlvbg== 47282
+IHJlc2lkZQ== 47283
+IHJlbWVtYmVyaW5n 47284
+bmVzdHk= 47285
+QXV0b3Jlc2l6aW5n 47286
+cHJvdmlkZXJz 47287
+IEFI 47288
+IGh1cnRpbmc= 47289
+IExpbHk= 47290
+ZXZhbHVhdGU= 47291
+bGlqaw== 47292
+cGFwZXJz 47293
+IFNtYXNo 47294
+IExBU1Q= 47295
+IHdlbGxz 47296
+d2FzaGVy 47297
+X1JPTEU= 47298
+IERhbmdlcg== 47299
+Kigo 47300
+X3JlcG9zaXRvcnk= 47301
+IFJlc29sdmU= 47302
+IFJvb21z 47303
+X1JH 47304
+IFFU 47305
+b29w 47306
+IEhlYXA= 47307
+IHNsb3dpbmc= 47308
+IGdyYXR1aXRl 47309
+X2NhdGFsb2c= 47310
+IHBvbHlub21pYWw= 47311
+THk= 47312
+cGNz 47313
+Rm94 47314
+IEN5cg== 47315
+IGRpbWlu 47316
+L21vbnRo 47317
+U2FsdA== 47318
+IGhpbmQ= 47319
+LlBFUg== 47320
+Rm9ydW0= 47321
+Y2Vu 47322
+X3BvbA== 47323
+7Zi4 47324
+IGluc2Vy 47325
+KH4= 47326
+QHRlc3Q= 47327
+IEdvbGRtYW4= 47328
+IHVwbG9hZGluZw== 47329
+RmM= 47330
+IGtvbW1lcg== 47331
+IG1pdHQ= 47332
+X2xvZ2dlZA== 47333
+IGJ1Y2tz 47334
+LWxheWVy 47335
+KX07Cg== 47336
+IE9N 47337
+IHZlZw== 47338
+Y29sb3Vy 47339
+INC+0LHRig== 47340
+U3RkU3RyaW5n 47341
+X3F1ZQ== 47342
+IFRpYW4= 47343
+IHNwZWNpYWxpemU= 47344
+0LjQvw== 47345
+INC60Ls= 47346
+dHJpYWw= 47347
+LWVkZ2U= 47348
+IG1hcnM= 47349
+T0dMRQ== 47350
+IGVtcGF0aHk= 47351
+IEJvbQ== 47352
+IGNvbGxpc2lvbnM= 47353
+IGNhcnRl 47354
+IFRlaWw= 47355
+IE1QTA== 47356
+IHBvcm7DtA== 47357
+IGFpcmxpbmVz 47358
+QXdz 47359
+TnM= 47360
+IFNwYXdu 47361
+KHVzZQ== 47362
+6buY6K6k 47363
+IHlhY2M= 47364
+c3Rvcg== 47365
+IGNvbmZlc3M= 47366
+IHBlcXVl 47367
+cmFnZQ== 47368
+PyIK 47369
+L2RhdGF0YWJsZXM= 47370
+IFNob3dlcg== 47371
+X18v 47372
+IGNyeXN0YWxz 47373
+IGJ1c2Nhcg== 47374
+IEhhdXM= 47375
+aXphw6fDo28= 47376
+X2VudGl0aWVz 47377
+lYw= 47378
+mow= 47379
+eGNj 47380
+dmlydA== 47381
+LWNoZXZyb24= 47382
+KFJlc3VsdA== 47383
+Y2FrZQ== 47384
+Q09NRQ== 47385
+IHByb2hpYml0 47386
+IENoZXNz 47387
+IGJlYXVjb3Vw 47388
+INGH0YLQvg== 47389
+UlVO 47390
+IElL 47391
+w7PFgg== 47392
+X1VwZGF0ZQ== 47393
+IHNsZWVr 47394
+IFNwZWNpZnk= 47395
+X2NyZWRlbnRpYWxz 47396
+xZ90 47397
+IFVzZXJOYW1l 47398
+CVZhbHVl 47399
+IGFycmF5TGlzdA== 47400
+IGV4Y2hhbmdlZA== 47401
+aXBzaXM= 47402
+LnJlbGF0ZWQ= 47403
+IFNlaXRl 47404
+X0JBUg== 47405
+IExlbQ== 47406
+IFdBVENI 47407
+IENsaWVudHM= 47408
+IC4q 47409
+IEVhcmw= 47410
+LXJlcG9ydA== 47411
+IGZvcmVpZ25lcnM= 47412
+IHN0cmVuZ3RoZW5pbmc= 47413
+CURlc2NyaXB0aW9u 47414
+KGdv 47415
+LnRvb2xiYXI= 47416
+IGNhbGN1bGF0ZXM= 47417
+CXNvdXJjZQ== 47418
+IGN6YXM= 47419
+IHJlY2w= 47420
+YWJv 47421
+IGxvY2FsaG9zdA== 47422
+IF57Cg== 47423
+LlBvcA== 47424
+IERlc2lnbmVk 47425
+XEFic3RyYWN0 47426
+SG9sZA== 47427
+IEd1aWRlbGluZXM= 47428
+aXBsaW5l 47429
+IGNhY2hpbmc= 47430
+LlJlYWRlcg== 47431
+X2V4dGVybmFs 47432
+LnN0cnB0aW1l 47433
+IFdlZWtlbmQ= 47434
+LU1hcg== 47435
+IEJlaQ== 47436
+IHsqfQ== 47437
+IFJ1ZA== 47438
+IGV4cGxvcg== 47439
+IEJvdWxldmFyZA== 47440
+Q2FzaA== 47441
+IHByZXBhcmVz 47442
+IHNlcmlhbGl6YXRpb24= 47443
+ZXdhdGVy 47444
+IGFkYw== 47445
+OgoKCgoKCg== 47446
+UmVmZXI= 47447
+IHNjYW5uZWQ= 47448
+fX0KCg== 47449
+IEZ1bA== 47450
+IHRvdXJpbmc= 47451
+44OD44Kv 47452
+Pigo 47453
+c3VydmV5 47454
+IO2Y 47455
+Li4uJykK 47456
+IERpdmlkZXI= 47457
+b3Ns 47458
+X0NBTkNFTA== 47459
+X3ByZXBhcmU= 47460
+c3Rpbg== 47461
+IEhlYXRo 47462
+LlByaW1hcnlLZXk= 47463
+IOKGkA== 47464
+IExvY2FsRGF0ZVRpbWU= 47465
+IGNvb3BlcmF0aXZl 47466
+TGVhcm5pbmc= 47467
+LmVucXVldWU= 47468
+IGdvb2c= 47469
+IFJlZ3Jlc3Npb24= 47470
+aW1hdGVz 47471
+IHZveWV1cg== 47472
+IERyaW5r 47473
+cGx1Zw== 47474
+IGxlbmRlcg== 47475
+bWFuYQ== 47476
+IHBlcnNvbm5lcw== 47477
+eXBzZQ== 47478
+IHVubGluaw== 47479
+IFJhdmVucw== 47480
+IGh1cmQ= 47481
+IHBlcmlvZGljYWxseQ== 47482
+QVJHUw== 47483
+IEdI 47484
+Y2hhcmFjdGVycw== 47485
+Li4uIgoK 47486
+LWVzdGFibGlzaA== 47487
+IGRu 47488
+KGNvbmRpdGlvbg== 47489
+IEdyYXZpdHk= 47490
+IGVzdGFz 47491
+X2ZvY3Vz 47492
+Q3JlYXR1cmU= 47493
+KHNpdGU= 47494
+IGNhcnI= 47495
+IFJM 47496
+IFJJ 47497
+IE1vdG8= 47498
+QVNG 47499
+IEx1Y2tpbHk= 47500
+CVJvdXRl 47501
+IGVudHJvcHk= 47502
+KCIsIg== 47503
+Q29sbGVjdA== 47504
+KGNvbnRhY3Q= 47505
+IEZsb3JlbmNl 47506
+IHByZW1pdW1z 47507
+IGxpZmVjeWNsZQ== 47508
+IGJhbnM= 47509
+eGVm 47510
+V2ViS2l0 47511
+IEZsb2F0aW5n 47512
+IGNvc2E= 47513
+U3BlY2lmaWM= 47514
+IExvYW5z 47515
+YnJlYWQ= 47516
+IGRlc2NyaXB0b3Jz 47517
+IHs6Lg== 47518
+VEhSRUFE 47519
+IFRyZW50 47520
+IHNjb3A= 47521
+UUE= 47522
+IEFudGFy 47523
+cGVs 47524
+X2RpZmZlcmVuY2U= 47525
+X2NoYW5nZXM= 47526
+KC4uLik= 47527
+IFJvdGF0aW9u 47528
+IExHUEw= 47529
+IEpVU1Q= 47530
+KFRhc2s= 47531
+X3N1YnNldA== 47532
+IFRSQU5T 47533
+5Yqb 47534
+IFNjb3V0 47535
+LXBvcHVw 47536
+IHNtb2tlZA== 47537
+X0NsYXNz 47538
+IHR1cm5vdmVy 47539
+YnJha2s= 47540
+IFJvY2t5 47541
+dGFz 47542
+LlJlZ3VsYXJFeHByZXNzaW9ucw== 47543
+IEVsbGlvdHQ= 47544
+IFNwaW5uZXI= 47545
+RFVDVElPTg== 47546
+IGxpYnJl 47547
+IG1vbHRv 47548
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 47549
+IEZUUA== 47550
+bXBlZw== 47551
+KGZlYXR1cmVz 47552
+IGJhbGQ= 47553
+IFZpZA== 47554
+IHNob3V0aW5n 47555
+TGludA== 47556
+IHNvY2tldHM= 47557
+IHByb3c= 47558
+IG5vdXZlbGxl 47559
+aXNjYXJk 47560
+IFNwb25zb3I= 47561
+IGNvbnN1bHRh 47562
+KSkpOw== 47563
+SW5kaWFu 47564
+IFJhc3BiZXJyeQ== 47565
+IHRlYW1tYXRl 47566
+IEpXVA== 47567
+IEdoYW5h 47568
+IGNha2Vz 47569
+cHJpbWVy 47570
+Zm9ybWE= 47571
+ZXJnYXJ0ZW4= 47572
+X01hbmFnZXI= 47573
+IHByZXNlYXNvbg== 47574
+R0FNRQ== 47575
+fCI= 47576
+IEJyb2Nr 47577
+IG9jY3VweQ== 47578
+IGRlY29yYXRpb25z 47579
+w6FuZA== 47580
+IGNvdA== 47581
+IHBhcmFu 47582
+RGlzaw== 47583
+cmVtYWlu 47584
+Pj8= 47585
+U3Ryb25n 47586
+IGZyYW5jZQ== 47587
+IEVyYQ== 47588
+LWNy 47589
+LkJ1ZmZlcmVkUmVhZGVy 47590
+IFBhcmFkaXNl 47591
+IFZBVA== 47592
+IEFuZGVycw== 47593
+IGxpbWI= 47594
+YW1wb28= 47595
+IGltcGVyYXRpdmU= 47596
+VVRJTElUWQ== 47597
+IFJlY29nbml0aW9u 47598
+IHJhZ2F6emU= 47599
+IHBvcHM= 47600
+eXByZXNz 47601
+IGVtYmFyZ28= 47602
+Ly97Cg== 47603
+IHN5bGw= 47604
+UFRS 47605
+5a2Y5Zyo 47606
+IGRpZG50 47607
+TWFpbGVy 47608
+IGFjYWRlbWljcw== 47609
+IEZyYXVlbg== 47610
+bmVpZGVy 47611
+LXJlbA== 47612
+IHJhaW5ib3c= 47613
+KElu 47614
+IHNsaWNlZA== 47615
+PT09PT09PT09PT09PQo= 47616
+KHNlbmQ= 47617
+TlNNdXRhYmxlRGljdGlvbmFyeQ== 47618
+dm9z 47619
+KHBhY2thZ2U= 47620
+IG9yZGluYW5jZQ== 47621
+dmlld2Vy 47622
+IFNhbnRvcw== 47623
+LXNlbGxpbmc= 47624
+IGdvdg== 47625
+ZXR0bGU= 47626
+IGZvdW5kZXJz 47627
+IHdha2luZw== 47628
+c2xhc2hlcw== 47629
+LXBvdW5k 47630
+cmVjaHQ= 47631
+2KfYqg== 47632
+Lm9uQ2xpY2s= 47633
+IG5vcmQ= 47634
+c3TDpG5k 47635
+X3doZW4= 47636
+VVRFUlM= 47637
+aWNj 47638
+IGNhcHN1bGU= 47639
+IFdpZA== 47640
+TWFyYw== 47641
+4Li4 47642
+cm9yZWQ= 47643
+VUdF 47644
+TE9VRA== 47645
+IEF1ZGl0 47646
+aXBpZW50cw== 47647
+b3BpYW4= 47648
+IFN1ZQ== 47649
+IHd1cmRlbg== 47650
+LkhlbHBlcnM= 47651
+IGZhY3Rpb25z 47652
+W25w 47653
+LXRoYW4= 47654
+IHJlY28= 47655
+IGthcw== 47656
+IGNtZHM= 47657
+L25ldHdvcms= 47658
+eGJm 47659
+Z2V0Q29sb3I= 47660
+IGJpYXNlZA== 47661
+IExhaw== 47662
+RGF0YXM= 47663
+dmVudHM= 47664
+IOuy 47665
+X1BT 47666
+LlZhbGlkYXRl 47667
+SW52b2tlcg== 47668
+IG5ldWVu 47669
+IGp1dmVuaWxl 47670
+VklTSU9O 47671
+IGRldm90ZQ== 47672
+IGxpbmhh 47673
+IGRpc2NvdW50ZWQ= 47674
+XENvbmZpZw== 47675
+IHdvcnRod2hpbGU= 47676
+IHNraW5ueQ== 47677
+IENvdXJzZXM= 47678
+bGV5cw== 47679
+IE1vcnRnYWdl 47680
+S2V2aW4= 47681
+IGFubm91bmNlcw== 47682
+XSkq 47683
+cmVzZXJ2YXRpb24= 47684
+IOaVsA== 47685
+IHByZWp1ZGljZQ== 47686
+IFN0cmluZ0NvbXBhcmlzb24= 47687
+IGJlYXJk 47688
+LXdpbg== 47689
+IFPDo28= 47690
+CW1z 47691
+amFs 47692
+IEVhcm4= 47693
+X3BvcnRz 47694
+IE5vbWJyZQ== 47695
+X0NPUg== 47696
+IEJVSUxE 47697
+LnNvdW5k 47698
+WWVsbG93 47699
+IGxpbmViYWNrZXI= 47700
+IGNoYXJpdGFibGU= 47701
+anVn 47702
+X05PTk5VTEw= 47703
+IERlbnRhbA== 47704
+Ij4kew== 47705
+CW1hdGNo 47706
+UnVzc2lhbg== 47707
+IHZlcnNjaA== 47708
+IHBpbm5lZA== 47709
+IGFkb3B0aW5n 47710
+T3B0aW9uc01lbnU= 47711
+UGFn 47712
+IHBhaXJpbmc= 47713
+IHRyZWFk 47714
+ZXJjaXNlcw== 47715
+IFNwcmVhZA== 47716
+KWk= 47717
+IEJBRA== 47718
+X3Rm 47719
+VUlJbWFnZVZpZXc= 47720
+cG9wdWxhdGU= 47721
+YmFi 47722
+IM+D 47723
+Wysr 47724
+IG9waW9pZA== 47725
+ICMjCg== 47726
+ZHR5cGU= 47727
+IFN0YXJ0cw== 47728
+KCcvJyk= 47729
+IHBlcnNvbmFscw== 47730
+LW1hcmtldA== 47731
+IHJlZHVuZGFudA== 47732
+IEVzc2VudGlhbA== 47733
+IHNjcmFweQ== 47734
+INC40Lw= 47735
+YWNs 47736
+IGNyZWFy 47737
+IEJlbmQ= 47738
+IHJlbGlldmU= 47739
+LXJvb20= 47740
+d2lmZQ== 47741
+IHbDoA== 47742
+IFFQb2ludA== 47743
+IHF1YXNp 47744
+IG1ldGhvZE5hbWU= 47745
+XHhj 47746
+IFBlcnU= 47747
+L1RoZQ== 47748
+Lm9ybQ== 47749
+IHZpeg== 47750
+L3BkZg== 47751
+TG9jYXRlZA== 47752
+IGNvbmZyb250YXRpb24= 47753
+IENoYW1waW9uc2hpcHM= 47754
+IGh5cGVydA== 47755
+IGRq 47756
+IFVzZXJJbmZv 47757
+IOWIm+W7ug== 47758
+XHhi 47759
+KHNpbQ== 47760
+ID09Cg== 47761
+IHN0YWdpbmc= 47762
+IGRyYXN0aWNhbGx5 47763
+5a2m 47764
+bG9yZHM= 47765
+Lmxlc3M= 47766
+0LLQtdC00LjRgtC1 47767
+IEJ1Y2tldA== 47768
+IE1hbQ== 47769
+LnRlcm0= 47770
+X3Bp 47771
+Y3p5 47772
+LnB1Yg== 47773
+cHJlY2lv 47774
+IFZpcnQ= 47775
+IHJvbWFu 47776
+aXRhdA== 47777
+TGV4 47778
+X2luZm9z 47779
+xLA= 47780
+Lm90aGVy 47781
+VkVMTw== 47782
+IHBvbmRlcg== 47783
+IGhhbm5v 47784
+KFBhZ2U= 47785
+ZG9p 47786
+IHBvbGl0ZQ== 47787
+IHByb2dyYW1tZXI= 47788
+RGllcw== 47789
+JGQ= 47790
+IHJlcGxpY2F0aW9u 47791
+YWRkQ29sdW1u 47792
+ZnJpY2Fu 47793
+IGxlbmc= 47794
+YmVlcg== 47795
+b2l0 47796
+IHdhc3Rpbmc= 47797
+eWxpbQ== 47798
+bWVhc3VyZQ== 47799
+TmVn 47800
+IHBhcnRpZQ== 47801
+LmNvbnNvbGU= 47802
+IEd1aW5lYQ== 47803
+VEVM 47804
+X2ZhY3Q= 47805
+LmNodW5r 47806
+IGxlbnQ= 47807
+IGFsbGVy 47808
+IOCklQ== 47809
+X2lkbGU= 47810
+IGFkbWlzc2lvbnM= 47811
+SlNPTkFycmF5 47812
+IHZpYnJhdGlvbg== 47813
+LmhlbHBlcnM= 47814
+5aSW 47815
+IGhlbg== 47816
+am9obg== 47817
+IOyDnQ== 47818
+IGp1ZGdlbWVudA== 47819
+IGdlZW4= 47820
+dGVycmE= 47821
+Xns= 47822
+IEl6 47823
+IGPDog== 47824
+aW5zdGFuY2Vz 47825
+IHRocmVhdGVucw== 47826
+IG3DvHNzZW4= 47827
+S2luZE9mQ2xhc3M= 47828
+IHN0b3J5dGVsbGluZw== 47829
+X2RlbW8= 47830
+cmlhcw== 47831
+UHJpdmFjeQ== 47832
+aGlmdA== 47833
+IFlp 47834
+ZXNvcg== 47835
+7ZWg 47836
+ZW5zaXRpdml0eQ== 47837
+LldyaXRlcg== 47838
+4LiC 47839
+RGlzdHJpY3Q= 47840
+LmdldEpTT05PYmplY3Q= 47841
+SW1wcm8= 47842
+KGdldFJlc291cmNlcw== 47843
+IFNQRUxM 47844
+cm9kdWNl 47845
+IHNsb3dlZA== 47846
+IGxpbmV3aWR0aA== 47847
+IGhvbmVzdHk= 47848
+IENvb3Jk 47849
+IEZvcms= 47850
+IERpc3BhdGNoUXVldWU= 47851
+IENsaWZm 47852
+IFdpcmluZw== 47853
+X1RJTUVTVEFNUA== 47854
+b2xsYWg= 47855
+YXZvaWQ= 47856
+KytdOwo= 47857
+c2VtYW50aWM= 47858
+LWNzcw== 47859
+IHZldG8= 47860
+IE1lcnI= 47861
+IGxlZ2lzbGF0b3Jz 47862
+Q0VFREVE 47863
+IHF1ZXN0aW9ubmFpcmU= 47864
+IFBpbGxz 47865
+Q2FsY3VsYXRl 47866
+KGNvcmU= 47867
+J2U= 47868
+IGRpc2xpa2U= 47869
+IFByZWZlcmVuY2Vz 47870
+X0VYVEVSTkFM 47871
+6LCD 47872
+IGRvZGdl 47873
+5pyN5Yqh 47874
+Lm5hbWVz 47875
+LmRyYXdJbWFnZQ== 47876
+X3Byb20= 47877
+dWNrbGFuZA== 47878
+IDwkPg== 47879
+xLF6 47880
+L3NpdGU= 47881
+6aG5 47882
+cm9waGU= 47883
+IGNvbXBlbGxlZA== 47884
+IGxhcHRvcHM= 47885
+IHVuaQ== 47886
+Q0xPU0U= 47887
+IGNhc3VhbHRpZXM= 47888
+IFVuaWZvcm0= 47889
+VGVybWluYWw= 47890
+LiIsIg== 47891
+REFU 47892
+KFRyZWVOb2Rl 47893
+IEdhbmRoaQ== 47894
+KHN0bXQ= 47895
+QVhC 47896
+Kk0= 47897
+IHVtYnJlbGxh 47898
+YW5pbWFs 47899
+IGdycGM= 47900
+IHdoZXJlYnk= 47901
+IGZsb2F0cw== 47902
+CWFyZw== 47903
+IGRiZw== 47904
+IGV4Y2VlZGluZw== 47905
+RXZlbnRUeXBl 47906
+LlNhdmVDaGFuZ2VzQXN5bmM= 47907
+IHt7ew== 47908
+IG93ZWQ= 47909
+YWhyZW5oZWl0 47910
+IOyn 47911
+IGVxdWlwbw== 47912
+dXJhaQ== 47913
+IGlkb2w= 47914
+XSIpCg== 47915
+X21ham9y 47916
+IGVudGlyZXR5 47917
+aW5nZXJwcmludA== 47918
+w6dvcw== 47919
+L2FjY291bnQ= 47920
+CXJpZ2h0 47921
+dXJzb3M= 47922
+IEVEVA== 47923
+X0lOU0VSVA== 47924
+IHNoaW5pbmc= 47925
+IDw6 47926
+RWRnZUluc2V0cw== 47927
+IGNvbG9uaWVz 47928
+LklN 47929
+CSAJ 47930
+Uk9BRA== 47931
+Q0NDQw== 47932
+cGxhY2luZw== 47933
+IGdldEFjdGl2aXR5 47934
+ZW1hY3M= 47935
+JyUo 47936
+LmNsaWNrZWQ= 47937
+IFRoZW0= 47938
+aXNpYQ== 47939
+QnVzY2Fy 47940
+LnJlbmFtZQ== 47941
+IG9hdGg= 47942
+IGFmdGVyd2FyZA== 47943
+IFVGTw== 47944
+QVBT 47945
+IEphY2tzb252aWxsZQ== 47946
+LnNvbWU= 47947
+Q29uZmlybWVk 47948
+LnNjYW4= 47949
+aWdJbnRlZ2Vy 47950
+RGVjb3JhdG9y 47951
+c2hpZWxk 47952
+cmVzc2l2ZQ== 47953
+LmRpZA== 47954
+6K+36L6T5YWl 47955
+IHNodXR0ZXI= 47956
+RGFt 47957
+IHBhcmVudGluZw== 47958
+ZXllZA== 47959
+JGl0ZW0= 47960
+LWRldmVsb3A= 47961
+IGV4dHJhY3Rz 47962
+IGRlY2VudHJhbGl6ZWQ= 47963
+IEVsc2E= 47964
+X3NwaW4= 47965
+XSkr 47966
+LWluaXRpYWw= 47967
+IG11bHRpdHVkZQ== 47968
+IHNlbnNvcnk= 47969
+IE1PREVM 47970
+IHNhZmVndWFyZA== 47971
+7Lk= 47972
+IGh1bnRlcnM= 47973
+IFRpbnk= 47974
+SU5P 47975
+ZGVjb3JhdGU= 47976
+IE5vU3VjaA== 47977
+SG8= 47978
+KFJlc3BvbnNl 47979
+IHJ1bGVy 47980
+CXNob3J0 47981
+IGNhc3Rlcg== 47982
+IGNsaWVudElk 47983
+IHBkYg== 47984
+64+E 47985
+aXRpYw== 47986
+IEdhbWVTdGF0ZQ== 47987
+IG5ld0l0ZW0= 47988
+KQoKCgoKCg== 47989
+b3Vpcw== 47990
+bm9j 47991
+LkJMQUNL 47992
+X1ZFQ1RPUg== 47993
+LS0tLS0tLS0tLTwv 47994
+IGV4YW1pbmVz 47995
+CWJsb2Nr 47996
+IGFkZG9u 47997
+IHN1cnZleWVk 47998
+IExpc3RlbmVy 47999
+IGZyb250aWVy 48000
+IGxhY2tlZA== 48001
+SlVTVA== 48002
+INGN0YI= 48003
+IHRpbnQ= 48004
+IE15c3Rlcnk= 48005
+ZGF0ZVRpbWU= 48006
+IFR1dG9yaWFs 48007
+IGZ1bGxOYW1l 48008
+IERyYWdvbnM= 48009
+X0ZJTEVT 48010
+IFByaW50V3JpdGVy 48011
+IGJlZXQ= 48012
+IExhZGllcw== 48013
+X3RpcA== 48014
+IEphaHJl 48015
+b3JhbWE= 48016
+IGluc3VsYXRpb24= 48017
+KEVudmlyb25tZW50 48018
+X2FzdA== 48019
+YmVyZ2Vy 48020
+bGVuYQ== 48021
+b2dlbmVvdXM= 48022
+X01PTlRI 48023
+LXByZXNlbnQ= 48024
+IGZyYW1ld29ya3M= 48025
+UVE= 48026
+UEhQRXhjZWw= 48027
+IGNvdW50ZG93bg== 48028
+IEZX 48029
+KGNsdXN0ZXI= 48030
+OmM= 48031
+IG9raHR0cA== 48032
+b2JzZXJ2ZQ== 48033
+W3BsYXllcg== 48034
+Lmhl 48035
+IFBhbmFtYQ== 48036
+QXVzdHJhbGlh 48037
+IG91bmNlcw== 48038
+IGFnZ3Jlc3NpdmVseQ== 48039
+IHdhcm5z 48040
+IGN1c3RvbWl6YXRpb24= 48041
+X1F1ZXJ5 48042
+d2lz 48043
+IGludmFs 48044
+QUZG 48045
+KGNhbWVyYQ== 48046
+V2ly 48047
+IG5lZ290aWF0aW9u 48048
+CU8= 48049
+IHJlc3BlY3RmdWw= 48050
+IGRpYW1vbmRz 48051
+J2F2 48052
+YXBwcm94 48053
+L2Ry 48054
+IGdyYWJz 48055
+IGFjY29tcGFuaWVz 48056
+Y29uc3RyYWludA== 48057
+IHJleg== 48058
+KHJlZ2lvbg== 48059
+IGJhaXQ= 48060
+dGVybWluYXRl 48061
+IEJlbGdpYW4= 48062
+YXNzaXVt 48063
+IF0NCg== 48064
+U3lzdGVtcw== 48065
+b3VzZWRvd24= 48066
+LmJ1cw== 48067
+U2V0VmFsdWU= 48068
+IFByZXA= 48069
+IGNvbnZlbmllbnRseQ== 48070
+Lm1pZA== 48071
+Y2FzZWNtcA== 48072
+TnVtZXJv 48073
+ZGFpbHk= 48074
+IENvZGluZw== 48075
+KGRlc3RpbmF0aW9u 48076
+IyQ= 48077
+dWrEhQ== 48078
+IGVtZXJnZW5jZQ== 48079
+X3BhcmE= 48080
+X0lOQ0xVREU= 48081
+Izo= 48082
+IHJlY29nbml6aW5n 48083
+IGZ1Zw== 48084
+In19LAo= 48085
+IGJ1aWxkZXJz 48086
+IFRlcnJpdG9yeQ== 48087
+IGluaGVyZW50bHk= 48088
+IGRlcml2aW5n 48089
+LmV0aA== 48090
+IERpbm5lcg== 48091
+LnNldE9iamVjdE5hbWU= 48092
+IGNlbGVicmF0ZXM= 48093
+IHF1ZXVlcw== 48094
+IE1hcmtz 48095
+QUxURVI= 48096
+IERhcnQ= 48097
+cG9rZQ== 48098
+X0NIQU5HRUQ= 48099
+IHBhYXI= 48100
+bGllcw== 48101
+LnZvbGxleQ== 48102
+IE1lYW5pbmc= 48103
+IE9GRlNFVA== 48104
+ZW5zaW5n 48105
+IGZyw6Vu 48106
+LmxvY2FsU3RvcmFnZQ== 48107
+IOup 48108
+KHt9KTsK 48109
+ZGVjb2Rlcg== 48110
+IHJvdWxldHRl 48111
+IGRpc21hbnQ= 48112
+SXI= 48113
+IGluc3VyZw== 48114
+ICcnOgo= 48115
+LuKAnQo= 48116
+IGJydW5ldHRl 48117
+LmFzc2V0cw== 48118
+X05FVFdPUks= 48119
+4LiK 48120
+bnlt 48121
+X1NvdXJjZQ== 48122
+XFRlc3Rz 48123
+RXNjYXBl 48124
+Y3J5cHQ= 48125
+LlhNTA== 48126
+IHNvdW5kaW5n 48127
+b3Bjb2Rl 48128
+IGNsYXNzaWZ5 48129
+IGVtYmFycmFzc2Vk 48130
+IExPR0lO 48131
+IHJlc2lkdWU= 48132
+IE5FRUQ= 48133
+LmRlZXBFcXVhbA== 48134
+cGVyYw== 48135
+LWNhbA== 48136
+UmVkaXM= 48137
+VHJh 48138
+KF8p 48139
+YXNrZXRz 48140
+Z3JhZGF0aW9u 48141
+IGVuenltZQ== 48142
+IFN0ZXBoYW5pZQ== 48143
+LkludmFsaWQ= 48144
+J10/Pjwv 48145
+IGRpc3BsYWNlZA== 48146
+IGVsZW1lbnRvcw== 48147
+KGR1cmF0aW9u 48148
+cm93Q291bnQ= 48149
+IEZTdGFy 48150
+bGV0YQ== 48151
+L3BvcHBlcg== 48152
+IHN0YXRv 48153
+IHBlcmZvcm1lcg== 48154
+IGRpc2NpcGxpbmVz 48155
+IEZ1bGx5 48156
+aWN1bGFybHk= 48157
+IGVyc3Rlbg== 48158
+IFBvbHlnb24= 48159
+IGRpc2NpcGxlcw== 48160
+LmlzZGly 48161
+IHRlc3RpZnk= 48162
+X1NS 48163
+cHJpc2luZ2x5 48164
+IEdMaW50 48165
+IHdpcGVk 48166
+IGNhcnZlZA== 48167
+IERpc2g= 48168
+Lmhlcm9rdWFwcA== 48169
+c3RpdGlhbA== 48170
+IE1BVENI 48171
+Y2xhaXI= 48172
+IERheXRvbg== 48173
+LycpCg== 48174
+SURETEU= 48175
+IGluZnJh 48176
+IGxpdmVseQ== 48177
+IGRlcHM= 48178
+IFsuLi5d 48179
+CQkJCQkJCQkJCQkJCQkJCQk= 48180
+IExvbg== 48181
+RXh0cmFz 48182
+VHJhbnNpZW50 48183
+0LLQtdGA 48184
+L21vZHVsZQ== 48185
+IGVuZHVyYW5jZQ== 48186
+X3RleA== 48187
+ICJ+Lw== 48188
+X3lsYWJlbA== 48189
+IG9iZWQ= 48190
+L2dhbWU= 48191
+b3BzeQ== 48192
+IGZpcnN0bmFtZQ== 48193
+LmZvcmNl 48194
+IG1hcnQ= 48195
+XENsaWVudA== 48196
+IGxlZ2l0aW0= 48197
+LmZsYXR0ZW4= 48198
+Iics 48199
+b3NleHVhbA== 48200
+IGpvdXJz 48201
+TUg= 48202
+ZXhwaXJlcw== 48203
+IHN0eWw= 48204
+LmludGVydmFs 48205
+S25vd24= 48206
+IGZvbGxvd2Vy 48207
+IGRhbGxh 48208
+cGlyeQ== 48209
+X3NzbA== 48210
+aXNobGlzdA== 48211
+IFJleQ== 48212
+IHN1cGVybWFya2V0 48213
+T2J2aW91c2x5 48214
+LWVudGVy 48215
+IHByb2JhYmlsaXRpZXM= 48216
+IEhW 48217
+IENpbmVtYQ== 48218
+IGN0eXBlcw== 48219
+IEJDTQ== 48220
+X1RBQw== 48221
+O2E= 48222
+LmJ1dHRvbnM= 48223
+IHJldHJpZXZpbmc= 48224
+aWxhcml0eQ== 48225
+IHVuZGVydGFraW5n 48226
+CXN0YWNr 48227
+IGtlbA== 48228
+IFhlbg== 48229
+KHBoaQ== 48230
+IHRvdWdoZXI= 48231
+IFNlbGxlcg== 48232
+Y2Fwcw== 48233
+IEVtYmVy 48234
+IENoaW4= 48235
+IGxhdWdocw== 48236
+Q29udmVyc2lvbg== 48237
+Lmxpc3RlbmVy 48238
+JkI= 48239
+IHBhcmFkaWdt 48240
+IGp1bmN0aW9u 48241
+JC8sCg== 48242
+W28= 48243
+IENvbnNlcnZhdGl2ZXM= 48244
+z4A= 48245
+bGF0ZXM= 48246
+X0V4Y2VwdGlvbg== 48247
+IG1laWxsZXVy 48248
+IHN0cmFwcw== 48249
+cXVpc2l0ZXM= 48250
+CXNu 48251
+IG1hc3NhY3Jl 48252
+b3R0ZXM= 48253
+X2dyZWVu 48254
+VGl0bGVz 48255
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ== 48256
+IFJlZ3VsYXRpb25z 48257
+YXJs 48258
+X3Nob3J0Y29kZQ== 48259
+IERyYXdlcg== 48260
+IHBhcm9sZQ== 48261
+IHdpbGRlcm5lc3M= 48262
+aXNzb24= 48263
+IEFGVEVS 48264
+Q3JlZGVudGlhbA== 48265
+QmxvY2tpbmc= 48266
+IEhUQw== 48267
+U2lu 48268
+KGF1dGhvcg== 48269
+IGNvcnRleA== 48270
+Jyl7DQo= 48271
+77yJ77yM 48272
+IGR1bXBlZA== 48273
+IFNodXQ= 48274
+IEtleUV2ZW50 48275
+CVBsYXllcg== 48276
+LmdldFBsYXllcg== 48277
+IGlnbm9yZXM= 48278
+dG9nZ2xlQ2xhc3M= 48279
+IEV4Y2x1c2l2ZQ== 48280
+PigpOw== 48281
+LmdldFA= 48282
+YW55ZQ== 48283
+IG5ldXJvbg== 48284
+aWZvbGQ= 48285
+IEtub3du 48286
+Qml0Y29pbg== 48287
+QW55d2F5 48288
+YXlldHRl 48289
+ICdbJw== 48290
+w6BuaA== 48291
+bWdy 48292
+IGNvcnJlbGF0ZWQ= 48293
+IG5hdXNl 48294
+IG1lbnRhbGl0eQ== 48295
+aGFzTWFueQ== 48296
+IEZH 48297
+YW1waWU= 48298
+SVRV 48299
+RnM= 48300
+LlNw 48301
+X2JldHdlZW4= 48302
+RGVwZW5kZW5jaWVz 48303
+b3Vn 48304
+UGxhY2Vob2xkZXI= 48305
+PXRleHQ= 48306
+IE1hbmFnaW5n 48307
+b2NhbHlwc2U= 48308
+5YyX 48309
+X21hZw== 48310
+Zmxk 48311
+4pE= 48312
+Q0FN 48313
+IEhlbHBlcnM= 48314
+IGRvc3Q= 48315
+L291dA== 48316
+IGFzc2Fzc2luYXRpb24= 48317
+LmdldEltYWdl 48318
+IEtlbm55 48319
+LicpCgo= 48320
+KXsvLw== 48321
+IFJhbmdlcg== 48322
+IGdlaw== 48323
+IHNpbmNlcmU= 48324
+PFZhbHVl 48325
+IERPVA== 48326
+IFZpY3Rvcnk= 48327
+IGxlZ2VuZHM= 48328
+IHByaXNvbnM= 48329
+KGV4cHJlc3Npb24= 48330
+IFJhYmJpdA== 48331
+X3NlbnRlbmNl 48332
+IGJpdGVz 48333
+IG9uRmFpbHVyZQ== 48334
+IOKIiA== 48335
+S2lt 48336
+LmdlbmRlcg== 48337
+IM67 48338
+IFsu 48339
+Il0pOw== 48340
+bGFuZGluZw== 48341
+LWRpZ2l0 48342
+VEVNUA== 48343
+CWVudHJ5 48344
+IHN0cnRvaw== 48345
+IGRlc2NlbmRhbnRz 48346
+dW1ubw== 48347
+IGxlYW5pbmc= 48348
+IHNwZWNpZmljcw== 48349
+cW4= 48350
+IFNwYXJ0 48351
+IHBvcnI= 48352
+RURJQVRFSw== 48353
+IHNlcGVy 48354
+J2F1dA== 48355
+IFNURVA= 48356
+IEJvcmRlckxheW91dA== 48357
+IHJldHJvcw== 48358
+IFNhbHZhZG9y 48359
+IEVOR0lORQ== 48360
+eGRj 48361
+VHdlZXQ= 48362
+dms= 48363
+IOyy 48364
+XTw8 48365
+aGV0aWNz 48366
+Y29kaW5n 48367
+UmVhY2g= 48368
+LnJlcQ== 48369
+Z3VpZGU= 48370
+LnNjb3Bl 48371
+c2hpcnQ= 48372
+cm9nYXRl 48373
+U0VUVElORw== 48374
+IFByb3RlaW4= 48375
+IGVpbmc= 48376
+LkVNUFRZ 48377
+LmRm 48378
+IGNsZWFyZXI= 48379
+IGNyb3Nzb3Zlcg== 48380
+IFRveXM= 48381
+IGNvYXRlZA== 48382
+Lk1vbnRo 48383
+IEF0dGFjaA== 48384
+L3J1bg== 48385
+LnRhYnM= 48386
+IG9nc8Ol 48387
+QnJvd24= 48388
+LkRBVEU= 48389
+IGZvcw== 48390
+5a2X56ym 48391
+V29vZA== 48392
+LXRocmVl 48393
+aGVyaXRlZA== 48394
+IHJvcA== 48395
+KGFj 48396
+IGVtYm9kaW1lbnQ= 48397
+IEtlbm5ldGg= 48398
+IGNhbm5vbg== 48399
+IGJpZGRpbmc= 48400
+PElFbnVtZXJhYmxl 48401
+CXNldFRpbWVvdXQ= 48402
+X2RpZ2l0 48403
+IGVsaW1pbmFy 48404
+KG5l 48405
+YnVkZ2V0 48406
+Q1NJ 48407
+IOyVhA== 48408
+IEFTUA== 48409
+R3JvdXBJZA== 48410
+X0NPVU5URVI= 48411
+Y29uc3VsdA== 48412
+IGlmcmFtZQ== 48413
+bGVnZW4= 48414
+X0RFQ0xBUkU= 48415
+U2hhcnBlcg== 48416
+IEZyaWVuZGx5 48417
+dWxldA== 48418
+LWNvbW1hbmQ= 48419
+INCg 48420
+Y3ljbGVz 48421
+IFdhc3Rl 48422
+IHRhcHBlZA== 48423
+CUJ1ZmZlcg== 48424
+4oCUaW4= 48425
+IAogIAo= 48426
+IElkZWFs 48427
+IENhbmR5 48428
+X1N5bnRheA== 48429
+w6p0 48430
+7J2M 48431
+YWJvdmU= 48432
+IE5hemlz 48433
+IGZzdA== 48434
+c2Vpbg== 48435
+IGt1bm5lbg== 48436
+d2lr 48437
+IFNhdmluZw== 48438
+LmV4dGVuc2lvbnM= 48439
+IERlc2VyaWFsaXpl 48440
+b3VyZw== 48441
+LmF0dHJpYg== 48442
+77yaCgo= 48443
+IFdpbnM= 48444
+LmVxbA== 48445
+Unlhbg== 48446
+X2Fjaw== 48447
+T1VSQ0VT 48448
+IG9ucw== 48449
+Z3Jlc2U= 48450
+YWZpYQ== 48451
+TW9kZXJu 48452
+IGFkaGVyZQ== 48453
+IGJpb3M= 48454
+KGFjYw== 48455
+a2Jk 48456
+VGhyb3du 48457
+qeuLiOuLpA== 48458
+CUh0dHA= 48459
+CXhtbA== 48460
+RW5kRGF0ZQ== 48461
+KHBhcnNlZA== 48462
+LmdldGVudg== 48463
+cmVnaXN0cg== 48464
+bmVsbA== 48465
+aW9uYXJpbw== 48466
+LmlubmVyV2lkdGg= 48467
+cnRs 48468
+UFY= 48469
+X3BpZWNl 48470
+IERlcG9zaXQ= 48471
+eWVycw== 48472
+IE5TTnVtYmVy 48473
+IGdpbnQ= 48474
+ZW5zZW1ibGU= 48475
+IG5ld2NvbQ== 48476
+IFZpZXRuYW1lc2U= 48477
+X2hw 48478
+IGFjY3VzaW5n 48479
+IHF1aXM= 48480
+IGludmVzdGlnYXRvcg== 48481
+ZXNzZW50aWFs 48482
+IENY 48483
+LmZvck5hbWU= 48484
+ZGVmcw== 48485
+IGFuYWx5c2U= 48486
+X2FuaW1hdGlvbg== 48487
+IHRoYQ== 48488
+dGFib29sYQ== 48489
+IFRIQw== 48490
+w61jdWxv 48491
+IGdsb3dpbmc= 48492
+IGhvbm9ycw== 48493
+YnN0cmFjdA== 48494
+a3A= 48495
+SVRFUw== 48496
+ICMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyM= 48497
+I2dldA== 48498
+L0Rlc2t0b3A= 48499
+CWdsbQ== 48500
+IHppbmM= 48501
+w6F0aWNh 48502
+IDw8Cg== 48503
+Vk1M 48504
+IFVubGltaXRlZA== 48505
+dnJl 48506
+LWJlZA== 48507
+X25vbmNl 48508
+IEdJ 48509
+dHJhdmVs 48510
+IGlzS2luZE9mQ2xhc3M= 48511
+IGFub255bWl0eQ== 48512
+RmlyZXN0b3Jl 48513
+IGVtYWlsZWQ= 48514
+X0ZMQVNI 48515
+IGbDpXI= 48516
+4piF4piF 48517
+IDpd 48518
+SHVt 48519
+LnJlc2VydmU= 48520
+w7xt 48521
+IGtvc3Rlbmxvc2U= 48522
+IFNDUA== 48523
+dXRhbg== 48524
+IEdvcmU= 48525
+IGNoYXRz 48526
+Lz4NCg== 48527
+LmdldFJlc291cmNlcw== 48528
+IGx1bXA= 48529
+X2NvbnN0cw== 48530
+KGV4dA== 48531
+CWRpcg== 48532
+4p0= 48533
+IHBhZGRpbmdUb3A= 48534
+IG9ic2Vzc2lvbg== 48535
+IGJhbm5pbmc= 48536
+IEFwcE1vZHVsZQ== 48537
+IHBhcnRpc2Fu 48538
+IGNhdGFsb2d1ZQ== 48539
+IG1pbm9ycw== 48540
+IHBpdGNoZXM= 48541
+d2VlcA== 48542
+IHVuZGVydGFrZQ== 48543
+IHRoZW1lZA== 48544
+YXVkaXQ= 48545
+LnNjcm9sbFRvcA== 48546
+IHJlcg== 48547
+IHN5bXB0b20= 48548
+IG9wZW5pbmdz 48549
+LmJsb2Nrcw== 48550
+b3Blbmlk 48551
+IGFzc2g= 48552
+LXNhdmU= 48553
+IFBpZw== 48554
+IHJlZ2Fpbg== 48555
+IGluaWNpYWw= 48556
+L2Zhdmljb24= 48557
+CWV4cA== 48558
+IHNwaWNlcw== 48559
+aXNrYQ== 48560
+Y2xhaW1z 48561
+bWFr 48562
+ZGVmaW5pdGlvbnM= 48563
+IGNvcnJlc3BvbmRlbnQ= 48564
+IENhbm5hYmlz 48565
+X18sCg== 48566
+IEx1Y2t5 48567
+IEdhdXNzaWFu 48568
+IE5lYXJseQ== 48569
+Q0FE 48570
+J11dCg== 48571
+IGFkZXF1YXRlbHk= 48572
+IFRJVExF 48573
+Y29uc3RpdHV0aW9uYWw= 48574
+LW1t 48575
+X292ZXJyaWRl 48576
+IGJsYXM= 48577
+LnJlYWR5U3RhdGU= 48578
+IHJlbWluaXM= 48579
+IHJlaW5mb3JjZWQ= 48580
+IENvbGxhYm9y 48581
+IGRlY29yYXRpbmc= 48582
+IGJhY2hlbG9y 48583
+RVJSVVBU 48584
+IHVwcmlnaHQ= 48585
+aXBhdGlvbg== 48586
+IE5vYmxl 48587
+IHZhbHVlRm9yS2V5 48588
+IHNldExvYWRpbmc= 48589
+Lklnbm9yZQ== 48590
+5YE= 48591
+R2xvYmFscw== 48592
+IE1lbnQ= 48593
+QVNTRVM= 48594
+IGxpbWJz 48595
+IEhVRA== 48596
+aW5jaQ== 48597
+Lml2 48598
+IFFNb2RlbEluZGV4 48599
+RnVzZQ== 48600
+IHBlZGFs 48601
+X0ZSRVE= 48602
+KHZlcmJvc2U= 48603
+IGxvbmdpdHVk 48604
+IENoYXJ0ZXI= 48605
+6re4 48606
+IGJ1bmRsZXM= 48607
+Lmlnbm9yZQ== 48608
+dW1ibw== 48609
+RU1B 48610
+Li4uLi4uLg== 48611
+c3g= 48612
+LkNhcmQ= 48613
+IGhldXRl 48614
+IHN0ZWVy 48615
+anVtbGFo 48616
+IHtf 48617
+X0NoZWNrZWQ= 48618
+IGZheA== 48619
+IEd1c3Q= 48620
+aXRjaGVucw== 48621
+ICkpCgo= 48622
+IHJlbWFya2FibHk= 48623
+L1hNTA== 48624
+LXJlbW92ZQ== 48625
+X2J0 48626
+IGluY3Vi 48627
+LnBhY2thZ2U= 48628
+LmN1cnJlbnRUaHJlYWQ= 48629
+IEhpZ2hsYW5kZXI= 48630
+LnNpZGU= 48631
+c3BsYXNo 48632
+IGljaQ== 48633
+PUQ= 48634
+IHB1Y2s= 48635
+IGJhbGxvdHM= 48636
+IGh1Z2VseQ== 48637
+Y29lZmY= 48638
+IHBEYXRh 48639
+LkNPTFVNTg== 48640
+IEhlYWxpbmc= 48641
+IG9yZGlu 48642
+ISks 48643
+ICcnLA0K 48644
+KG1k 48645
+IFNhc2s= 48646
+PHN0cm9uZw== 48647
+IHN1cnZpdm9y 48648
+LnNlcmllcw== 48649
+IGNhZmZlaW5l 48650
+IGAo 48651
+LlRSQUlMSU5H 48652
+X0lucHV0 48653
+KCJe 48654
+emQ= 48655
+Jik7Cg== 48656
+IFBpbmc= 48657
+IHZvdWNoZXI= 48658
+LnJhdGluZw== 48659
+LXNoaXJ0cw== 48660
+IFJldHJpZXZlcw== 48661
+LmFsaWJhYmE= 48662
+T3JhY2xl 48663
+X01PVg== 48664
+T2xkRGF0YQ== 48665
+IC8qDQo= 48666
+IGdib29sZWFu 48667
+ID0+DQo= 48668
+IHLDoQ== 48669
+IGJsdW50 48670
+IEltYWdlSWNvbg== 48671
+aWZpaw== 48672
+UlRD 48673
+IGZpYmVycw== 48674
+IHRvaWxl 48675
+LnNlbnQ= 48676
+IFB5UXQ= 48677
+JGFwcA== 48678
+IG1lZGlv 48679
+IGdyYW50aW5n 48680
+IHRzbGludA== 48681
+IE3Dtg== 48682
+KGZpZ3NpemU= 48683
+IGh1cnJpY2FuZQ== 48684
+IGxpZmVz 48685
+IMOE 48686
+cm9jZXNzaW5n 48687
+X3N0YW5kYXJk 48688
+LW9wdGlvbg== 48689
+JykpKQ== 48690
+IHZhY2FudA== 48691
+5bel 48692
+IEhvbGxvdw== 48693
+aGFuZGxlQ2hhbmdl 48694
+IGRpdmlkZXI= 48695
+IEVuZ2luZWVycw== 48696
+IHN2ZW5z 48697
+IGNvbXBsaWFudA== 48698
+dGFuZ2dhbA== 48699
+IENyZWRpdHM= 48700
+IEVtaXJhdGVz 48701
+UnVsZUNvbnRleHQ= 48702
+IHJlYWxpemF0aW9u 48703
+IGRpc3RyYWN0ZWQ= 48704
+XSs9 48705
+IGF1Z21lbnQ= 48706
+IER3 48707
+b3Rw 48708
+b3JyZW50 48709
+RWRpdGFy 48710
+LnN0b2Nr 48711
+U3R1ZHk= 48712
+cGVjdGlvbnM= 48713
+IEdhbWVNYW5hZ2Vy 48714
+PWN1dA== 48715
+IGZsb2Nr 48716
+IFJvbWFucw== 48717
+dGhlbQ== 48718
+LWhvcA== 48719
+IHNjcmVlbnNob3Rz 48720
+IC8qIQo= 48721
+IGNvbnZlcnNpb25z 48722
+IG5vcm1hbGl6YXRpb24= 48723
+KGNvbmZpZ3VyYXRpb24= 48724
+IGFlcm9z 48725
+X3NlY3VyaXR5 48726
+IScK 48727
+Qm9udXM= 48728
+IERSSVZFUg== 48729
+CURhdGU= 48730
+dGll 48731
+IFd5b21pbmc= 48732
+U3RhbmQ= 48733
+aXRyZQ== 48734
+IHNob3BwZXJz 48735
+IGRpc2FkdmFudGFnZQ== 48736
+IGxpa2luZw== 48737
+56yR 48738
+IHVuZGVyc3RhbmRhYmxl 48739
+U0VF 48740
+IGhveQ== 48741
+IG5pbmV0ZQ== 48742
+IGNvbmZlcg== 48743
+IG5vd3JhcA== 48744
+IFZlcm4= 48745
+LA0KDQo= 48746
+aW1lc3RlcA== 48747
+TGF5b3V0TWFuYWdlcg== 48748
+4Lc= 48749
+CXdhaXQ= 48750
+UExFVEVE 48751
+SmFwYW4= 48752
+IGluZHVjZQ== 48753
+IOWv 48754
+0L7Qt9Cy 48755
+X0VORFBPSU5U 48756
+Lmhvcml6b250YWw= 48757
+IGFjY2VsZXJhdGVk 48758
+cmltb24= 48759
+SVZFUw== 48760
+VHJhbnNhY3Rpb25z 48761
+TGVhbg== 48762
+IFNPVVI= 48763
+d2hldGhlcg== 48764
+eWc= 48765
+IG9pZA== 48766
+IEVudGl0eU1hbmFnZXI= 48767
+T1VOVFJZ 48768
+IGZpbGE= 48769
+T0xVTU5T 48770
+SU5VRQ== 48771
+IEFuY2hvcg== 48772
+VFJBTg== 48773
+d29v 48774
+YmxvY2txdW90ZQ== 48775
+IE51cnNl 48776
+IENhcnA= 48777
+IHJlZGVlbQ== 48778
+LnRyeQ== 48779
+IEpQ 48780
+IHRpbWVzdGFtcHM= 48781
+ID8+Ij48 48782
+IFJFTU9WRQ== 48783
+IFN0YXJidWNrcw== 48784
+UmVhbGx5 48785
+IGZsb29kZWQ= 48786
+LkNhbGxiYWNr 48787
+RHJvcERvd24= 48788
+aXBybw== 48789
+IHRlbmRlZA== 48790
+bHRl 48791
+IHByb3BvcnRpb25z 48792
+LXRl 48793
+IFJlbmE= 48794
+bGljYXRl 48795
+Zm9yY2Vz 48796
+LmV4dHJh 48797
+LmF1dGhlbnRpY2F0ZQ== 48798
+0LLQvtC0 48799
+obA= 48800
+IGZvckNvbnRyb2xFdmVudHM= 48801
+IHNlbmhh 48802
+IGtlaW4= 48803
+IG1pbmlzdA== 48804
+IFByZWZlcmVuY2U= 48805
+IFRlbGVncmFwaA== 48806
+0YPQvw== 48807
+c3RycG9z 48808
+IGlsbG5lc3Nlcw== 48809
+IHBpZ3M= 48810
+IGdldEludGVudA== 48811
+U29s 48812
+IMKh 48813
+KGNwdQ== 48814
+W3Byb3A= 48815
+c2NyZWVucw== 48816
+Jyk7Pz4= 48817
+IEFjdHM= 48818
+IHN0cmR1cA== 48819
+IGF2ZXJhZ2Vz 48820
+YW5hbA== 48821
+IENhc3VhbA== 48822
+R3JvdXBCb3g= 48823
+IEhhbmRib29r 48824
+L2NvbW1lbnRz 48825
+IG51bWJlcmVk 48826
+IGJyb2FkY2FzdGluZw== 48827
+55uR 48828
+Lm5hdGl2ZUVsZW1lbnQ= 48829
+Lm11 48830
+IHVwZGF0ZWRBdA== 48831
+IERvZXNu 48832
+LkFD 48833
+LmNvbGw= 48834
+IHJlY29yZGVy 48835
+X3NoYQ== 48836
+Qmc= 48837
+Ymls 48838
+IGJvbHRz 48839
+IOes 48840
+IGltcG9zaW5n 48841
+IEluZm9ybWF0aW9uZW4= 48842
+X2ZsYXNoZGF0YQ== 48843
+ZWNvbm9taWM= 48844
+UmVtYXJr 48845
+dWNhcw== 48846
+IE9mZmljZXJz 48847
+IFRFUg== 48848
+V2Fsaw== 48849
+IG1lcmNhZG8= 48850
+X2dlbmVyYXRl 48851
+SFk= 48852
+Q2FsbGluZw== 48853
+c25hcA== 48854
+c2NyaXB0SWQ= 48855
+Lm9wZXJhdGlvbg== 48856
+IEZsYW1l 48857
+bGluZXNz 48858
+IHJlbnRlZA== 48859
+X3RvZ2dsZQ== 48860
+LWNoYW5naW5n 48861
+IFRZ 48862
+J3V0aWw= 48863
+RUVQ 48864
+IGdyYXBocWw= 48865
+IFVuaQ== 48866
+IGltcHVsc2U= 48867
+LkJhc2lj 48868
+IGVuZXJnaWVz 48869
+TUFSWQ== 48870
+IE1hcmNlbA== 48871
+IG1vcnRhbA== 48872
+IGZyZXM= 48873
+bWVucw== 48874
+bW90aW9u 48875
+IHNhbXBsZWQ= 48876
+4oCcVGhhdA== 48877
+aWRheQ== 48878
+cXVpcG1lbnQ= 48879
+Z2V0SW50 48880
+IEFic29sdXRl 48881
+LCci 48882
+dW5lZA== 48883
+LnNoYXJl 48884
+IH0pKA== 48885
+bW1t 48886
+IFJpc2luZw== 48887
+5Lu7 48888
+IHVuZW1wbG95ZWQ= 48889
+eGZh 48890
+LmZvbGxvdw== 48891
+CQkJCSAgICAgIA== 48892
+c2x0 48893
+LlBob25l 48894
+IGtuaXZlcw== 48895
+IGV2ZQ== 48896
+b25DbGljaw== 48897
+XSkpDQo= 48898
+IFdpdG5lc3M= 48899
+CU5T 48900
+IEVPUw== 48901
+IFN0ZWZhbg== 48902
+IFByaWVzdA== 48903
+4oCUd2hpY2g= 48904
+R2V0U3RyaW5n 48905
+LkJ5 48906
+IHVwc3RhaXJz 48907
+IGRldHJpbWVudA== 48908
+YnJva2Vu 48909
+ZW1icm8= 48910
+IG5pY290aW5l 48911
+aWxpb24= 48912
+IGFzdG9uaXNoaW5n 48913
+X2FmZg== 48914
+IExlc3Nvbg== 48915
+IGFjY2lkZW50YWw= 48916
+b2Rvcg== 48917
+IGRlY2ly 48918
+IG5ld05hbWU= 48919
+Ky4= 48920
+55u4 48921
+aWdzbGlzdA== 48922
+IEdpdGh1Yg== 48923
+IHN1Y2Nlc3NpdmU= 48924
+cmFjaWFs 48925
+IGVudmlyb24= 48926
+6aqM6K+B 48927
+IHJlZGlyZWN0ZWQ= 48928
+VE9UQUw= 48929
+IGdyYWJiaW5n 48930
+IExhbmNl 48931
+IGZvcmZl 48932
+X0NC 48933
+5b6u 48934
+RWxhcHNlZA== 48935
+X3dheQ== 48936
+KERpYWxvZ0ludGVyZmFjZQ== 48937
+X21lYXN1cmU= 48938
+eGJi 48939
+RG9n 48940
+RGVwYXJ0 48941
+LXNyYw== 48942
+cmVzb2x2ZXI= 48943
+d2l0aHN0YW5kaW5n 48944
+X3NoZWxs 48945
+IExhc3ROYW1l 48946
+IEF2aWF0aW9u 48947
+IGJlZ2lubmVy 48948
+KCIlLg== 48949
+KHRvb2w= 48950
+INC90L7Qsg== 48951
+OmluaXQ= 48952
+KEFQSQ== 48953
+IE1vcnJpc29u 48954
+dnRDb2xvcg== 48955
+IHN0YXBsZQ== 48956
+L0lORk8= 48957
+IHN1cGVybmF0dXJhbA== 48958
+IHN0ZWFr 48959
+dGltZWxpbmU= 48960
+enpsZQ== 48961
+ImAKCg== 48962
+U2Vjb25kYXJ5 48963
+IE5lcGFs 48964
+LlN0cmluZ1V0aWxz 48965
+IGFkYW0= 48966
+ICguLi4= 48967
+IHN1YnN0aXR1dGlvbg== 48968
+IGJvYXJkaW5n 48969
+IEtleXdvcmQ= 48970
+IEFzc2F1bHQ= 48971
+ZGJjVGVtcGxhdGU= 48972
+IG9yZGVySWQ= 48973
+KGVuZ2luZQ== 48974
+LmFzc2VydFRoYXQ= 48975
+IFZlbnVz 48976
+IGhvbWljaWRl 48977
+IEF2YWw= 48978
+IGd1dHRlcg== 48979
+IFN1cHBvcnRlZA== 48980
+L3BhcnQ= 48981
+IGFjY2xhaW1lZA== 48982
+SGlzdG9y 48983
+IG1lc2Vz 48984
+w7xiZXI= 48985
+IFJlbmV3 48986
+IGdyYXM= 48987
+IEVr 48988
+IGluZmlsZQ== 48989
+aW5keQ== 48990
+Lm11c2lj 48991
+LlNjcm9sbA== 48992
+IEFnZXM= 48993
+IE5hcnV0bw== 48994
+IEdhdGhlcg== 48995
+IGNvbmZpcm1pbmc= 48996
+PSgi 48997
+IHBpdGNoZWQ= 48998
+b2xleQ== 48999
+RnJhbmNl 49000
+Kyci 49001
+JHRvdGFs 49002
+IG9uZGU= 49003
+IGRpdGNo 49004
+X3NpZ21h 49005
+IGNvbnRpbnVpdHk= 49006
+cmV3YXJk 49007
+LWxvYWQ= 49008
+IHByb2Nlc28= 49009
+TG9ja2Vk 49010
+c3Rhdw== 49011
+IHNwaW5hbA== 49012
+bGF6eQ== 49013
+IT09 49014
+amVzdA== 49015
+IGR1bg== 49016
+IFJvZGdlcnM= 49017
+CWdyaWQ= 49018
+IGxvZ29z 49019
+IEJlbmdhbA== 49020
+LnN1cGVy 49021
+UHJvdmlkZXM= 49022
+IG51dHJpZW50 49023
+LlRpbWVzdGFtcA== 49024
+SVpBVElPTg== 49025
+5YaM 49026
+IGZhdHM= 49027
+IFh4eA== 49028
+Y3RpY2E= 49029
+VGFyZ2V0cw== 49030
+IGNvbnRvdXJz 49031
+IHJlb3JkZXJlZA== 49032
+OkFycmF5 49033
+IHRvbGVyYXRl 49034
+Vmly 49035
+IHRlcnJpYmx5 49036
+IGJyaWNrcw== 49037
+KCZf 49038
+aGI= 49039
+UG9ydGFs 49040
+IEJyZWFk 49041
+LndoaWNo 49042
+wq10 49043
+YXNJbnN0YW5jZU9m 49044
+IGpvYmplY3Q= 49045
+CWxlbmd0aA== 49046
+X01U 49047
+OyI+DQo= 49048
+X0VYSVNU 49049
+IG1hdGVybmFs 49050
+UkVM 49051
+IOqyveyasA== 49052
+aGVl 49053
+IGxheW91dHM= 49054
+IExhcA== 49055
+YWlzeQ== 49056
+IHN0dW1ibGVk 49057
+IFVJRw== 49058
+IFNjbw== 49059
+IGltcGFpcmVk 49060
+UkVTU0VE 49061
+IGFidXNlcw== 49062
+VkY= 49063
+QVJC 49064
+Lk5BTUU= 49065
+cmNo 49066
+cHJpbWly 49067
+X2NvbXBsZXRlZA== 49068
+IHBlbm55 49069
+Q2hyb21l 49070
+KGJlZ2lu 49071
+ZXJuZW4= 49072
+LWNoZWNrYm94 49073
+UGxhaW5PbGREYXRh 49074
+IExQQw== 49075
+cmFkZQ== 49076
+c3Bpcg== 49077
+IGNvbmNlaXZlZA== 49078
+VGlwcw== 49079
+IElvVA== 49080
+IEdhbg== 49081
+6IGU 49082
+IGJpYXNlcw== 49083
+IGNvbnN1bHRhbnRz 49084
+cGxlZA== 49085
+X2h0 49086
+YXNzb2NpYXRlZA== 49087
+XSwKCg== 49088
+IGRlbGlnaHRmdWw= 49089
+INGC0LXQug== 49090
+SGVsdmV0aWNh 49091
+KGxvYWQ= 49092
+LWV4cGFuZA== 49093
+X1dJREdFVA== 49094
+dG9h 49095
+IEFrdA== 49096
+IG9tbg== 49097
+IGNsYXVzZXM= 49098
+SW50ZWw= 49099
+Ki99Cg== 49100
+X3JlZ2lzdHJhdGlvbg== 49101
+IG9sZFZhbHVl 49102
+IHJlc3RvcmluZw== 49103
+IHVucmVhbA== 49104
+T1ZFUg== 49105
+CQoJCgkK 49106
+QVRT 49107
+X3Byb2Jl 49108
+IGRpdmlzb3I= 49109
+LnVwZGF0ZUR5bmFtaWM= 49110
+5bmz 49111
+UHJvZHVjZXM= 49112
+c3RhbXA= 49113
+Lmpib3Nz 49114
+CXRhc2s= 49115
+ISg6 49116
+IHBzeWNoaWM= 49117
+QGNsYXNz 49118
+TWFydGlu 49119
+IFBhc3NlZA== 49120
+Y2xhcmF0aW9ucw== 49121
+aGVs 49122
+0LDRhw== 49123
+CWNvcHk= 49124
+LWJpbg== 49125
+emFu 49126
+aWdyYW0= 49127
+4Ka+4KY= 49128
+KHNpZw== 49129
+IENhdmFs 49130
+XyMj 49131
+ICU9 49132
+b3V0bGluZWQ= 49133
+IEFjaWQ= 49134
+IHVucHJlZGljdGFibGU= 49135
+LWRhc2hib2FyZA== 49136
+SGV4U3RyaW5n 49137
+K2M= 49138
+LlB1YmxpYw== 49139
+4bqp 49140
+IGNvbnZleW9y 49141
+IEVC 49142
+IHNlbGVjdHM= 49143
+IGtub2NraW5n 49144
+IENlYw== 49145
+SUJVVEVT 49146
+b3dhxIc= 49147
+Z2F0c2J5 49148
+KnY= 49149
+ZW50cm9weQ== 49150
+IGRpc3BhdGNoZWQ= 49151
+IGNhbWVs 49152
+IFNhdHVybg== 49153
+IG92ZXJ3ZWlnaHQ= 49154
+KHBob25l 49155
+cGFyYWJsZQ== 49156
+JUI= 49157
+X3ZlY3RvcnM= 49158
+IGJyZXdpbmc= 49159
+IFRr 49160
+IERvd25sb2Fkcw== 49161
+IFNhdmVk 49162
+LlByaWNl 49163
+IGN1cnZlZA== 49164
+IFBhcmVudGhvb2Q= 49165
+6LY= 49166
+LnBubA== 49167
+cGxldGVseQ== 49168
+LkRheQ== 49169
+IGFkdmVydGlzZXJz 49170
+IGVqZWM= 49171
+IHByemVk 49172
+668= 49173
+ISc7Cg== 49174
+IEt1c2g= 49175
+IFRBQg== 49176
+IHF1ZXN0cw== 49177
+IGNvaW5jaWRlbmNl 49178
+dW1taWVz 49179
+IEthc2htaXI= 49180
+IEV0aGljcw== 49181
+X2dyb3d0aA== 49182
+IGFrdGl2 49183
+IGdyb3VwaW5n 49184
+5aKe 49185
+X3RydXRo 49186
+5ZCs 49187
+dG9kb3M= 49188
+aXNldA== 49189
+VGV4Q29vcmQ= 49190
+w6R0dA== 49191
+IFp1cg== 49192
+cm95cw== 49193
+X01BR0lD 49194
+IGJyZXdlcnk= 49195
+KFN0YXRl 49196
+IFNNQUxM 49197
+IFBsYW50cw== 49198
+aXRiYXJ0 49199
+ZWFjaGVy 49200
+IEFkZWxhaWRl 49201
+THU= 49202
+IGZpY2s= 49203
+dW5kbGVz 49204
+X2xvYWRlZA== 49205
+0LjQtQ== 49206
+UG9sbA== 49207
+cml0aWM= 49208
+RUxZ 49209
+ICsn 49210
+IFByb2Zlc3Npb24= 49211
+IHN0YW1wcw== 49212
+IFNldw== 49213
+c2Nyb2xsVmlldw== 49214
+IGNvbW11bmlzdA== 49215
+L3Byb2JsZW1z 49216
+fQ0KDQoNCg0K 49217
+LG8= 49218
+IHVkcA== 49219
+IG9iZXNl 49220
+YXBwcm92ZQ== 49221
+YW5jZWxsYXRpb24= 49222
+X0dhbWU= 49223
+IEhhc2h0YWJsZQ== 49224
+YWRhcHRpdmVTdHlsZXM= 49225
+IHBvc3Nlc3Nlcw== 49226
+Lm1hdGNoZXI= 49227
+ZnVuY3Rpb25hbA== 49228
+TXJz 49229
+CXNhdmU= 49230
+IERiVHlwZQ== 49231
+IGtlbg== 49232
+Z2V0Q29udGV4dA== 49233
+IG1hbnM= 49234
+KHJlbA== 49235
+IEJyb3RoZXJob29k 49236
+KWAK 49237
+6Kej 49238
+LkluZm9ybWF0aW9u 49239
+T3V0T2ZSYW5nZUV4Y2VwdGlvbg== 49240
+IFNlaw== 49241
+Q2Fz 49242
+IGJsb2dnZXJz 49243
+RWl0aGVy 49244
+KCIiIg== 49245
+IHBpbmNo 49246
+IGNvYXJzZQ== 49247
+KXA= 49248
+IFB1bHNl 49249
+IGxlYXJudA== 49250
+IGRlbnRpc3Q= 49251
+IG9uY2hhbmdl 49252
+IGRpcmVjdGl2ZXM= 49253
+KGFjdGlvbnM= 49254
+bnlkZXI= 49255
+IFNoaXI= 49256
+VHJhaXQ= 49257
+X2RlcA== 49258
+IFBFVA== 49259
+IFJFUA== 49260
+LkFwcFNldHRpbmdz 49261
+Y3VhZG9y 49262
+aWRlbmF2 49263
+IGVudmk= 49264
+IHNsYW1tZWQ= 49265
+IFNob290 49266
+IGRhdGVGb3JtYXQ= 49267
+LmpvZGE= 49268
+dmV5cw== 49269
+ICkuCgo= 49270
+IGNhcmVn 49271
+IFBhcmFsbGVs 49272
+X3RyYW5zbGF0aW9u 49273
+LmZ1bmN0aW9ucw== 49274
+Lm9icw== 49275
+UnVudGltZUV4Y2VwdGlvbg== 49276
+W109 49277
+b3ZlcnZpZXc= 49278
+IFNjaGw= 49279
+IG5vaXN5 49280
+IE9uUHJvcGVydHlDaGFuZ2Vk 49281
+U2VuZGluZw== 49282
+IHVuZmFtaWxpYXI= 49283
+VXBvbg== 49284
+IFByaW50cw== 49285
+LnR5cA== 49286
+IGZsZWVpbmc= 49287
+CW1vdmU= 49288
+KFVu 49289
+IHFy 49290
+15w= 49291
+X2JldGE= 49292
+IHNraWVz 49293
+CW1l 49294
+V05E 49295
+IHN0aWNrZXJz 49296
+Ymxhcw== 49297
+IGluc2VydHM= 49298
+IHZlcnNlcw== 49299
+IERldw== 49300
+IHRhbmdpYmxl 49301
+IGhlY2hv 49302
+UE9M 49303
+IHRlYXJkb3du 49304
+b21uaWE= 49305
+SUJF 49306
+LmNvdmVy 49307
+X3N0cmF0ZWd5 49308
+Xi0= 49309
+c2V0UG9zaXRpb24= 49310
+dWFsZQ== 49311
+U2lnbmVk 49312
+IGlmYWNl 49313
+YXNlbGluZQ== 49314
+LnNldFRpbWU= 49315
+IE1pbmVyYWw= 49316
+IEZpZ2h0aW5n 49317
+c2tpbnM= 49318
+IGRpc2NyaW1pbg== 49319
+IGRhbnNr 49320
+IFByaW5jZXRvbg== 49321
+YWNpc3Q= 49322
+ICgpKTsK 49323
+dHJhY2tz 49324
+aW1vbmlhbA== 49325
+YWRlY2ltYWw= 49326
+RVBST00= 49327
+dWdnbGU= 49328
+Lk5vdGlmaWNhdGlvbg== 49329
+JG1haWw= 49330
+Y2FudGlkYWQ= 49331
+IEp1bmc= 49332
+IHNlZWtlcnM= 49333
+IHBsYXVzaWJsZQ== 49334
+dGllcg== 49335
+0LXQtg== 49336
+IHJhcHBlcg== 49337
+IE1hbmE= 49338
+IEh0dHBTdGF0dXNDb2Rl 49339
+IGJ1cm50 49340
+bG9zZXM= 49341
+IEZvdG8= 49342
+IEpzb25PYmplY3Q= 49343
+SW5zdGFncmFt 49344
+IHN5c2NhbGw= 49345
+IHJlYWxpdGllcw== 49346
+IE1BVExBQg== 49347
+Ol57Cg== 49348
+VEVSTQ== 49349
+IENiZA== 49350
+IFBhcmFncmFwaA== 49351
+IHRyYXbDqXM= 49352
+IGNvbnN0cnVjdGluZw== 49353
+IHN3YWw= 49354
+IHBpZ2U= 49355
+TExMTA== 49356
+LWV4aXN0aW5n 49357
+R2V0cw== 49358
+IG1lbHRlZA== 49359
+IG1pdGlnYXRl 49360
+SGVu 49361
+IGht 49362
+aW1hcw== 49363
+IEFv 49364
+IFBlcmV6 49365
+IERBTA== 49366
+IOuLpA== 49367
+IGRpdmlz 49368
+U3Rvcnlib2FyZFNlZ3Vl 49369
+IE1vZGlmeQ== 49370
+IMOcYmVy 49371
+X09WRVJSSURF 49372
+LnBlbQ== 49373
+dW50b3M= 49374
+IGVzcGHDsQ== 49375
+IHs/ 49376
+IFBBWQ== 49377
+X2lwdg== 49378
+IEZ1cnk= 49379
+X18uX18= 49380
+ZWxvdw== 49381
+LWNlbnRlcmVk 49382
+Y2hlY2tz 49383
+X1JlZw== 49384
+LUphdmFkb2M= 49385
+CWxvYWQ= 49386
+IExpa2V3aXNl 49387
+2KfZhQ== 49388
+VU5F 49389
+LnNlbQ== 49390
+eGNi 49391
+IENhdmU= 49392
+X3NsZWVw 49393
+IHNpbGVudGx5 49394
+IEV4dHJlbWU= 49395
+LlRvVXBwZXI= 49396
+CUNIRUNL 49397
+IGN1ZQ== 49398
+IFFCeXRlQXJyYXk= 49399
+IGNvcnJ1cHRlZA== 49400
+IETDqQ== 49401
+IGltcGVk 49402
+R2V0TmFtZQ== 49403
+IGluYWNjdXJhdGU= 49404
+IHNvYmVy 49405
+0LXQtQ== 49406
+IGJhcmNvZGU= 49407
+LS0pewo= 49408
+aW5raQ== 49409
+IMOpcA== 49410
+IGRyaQ== 49411
+IEFMVA== 49412
+Pj4+Pj4+Pj4= 49413
+b250YQ== 49414
+W0w= 49415
+IGludGVyZXM= 49416
+dmVydGluZw== 49417
+IGRpYWdub3N0aWNz 49418
+cGRldg== 49419
+6Kk= 49420
+IEludGVncmF0ZWQ= 49421
+KS4n 49422
+X2dj 49423
+JHRleHQ= 49424
+LmdhbWVz 49425
+IFRlcnJh 49426
+J1Jl 49427
+LnRyYW5zZmVy 49428
+X0ZJRk8= 49429
+Z2V0TW9kZWw= 49430
+IGJsYW5k 49431
+IENvbGVtYW4= 49432
+IHByaW1lcw== 49433
+IOaI 49434
+IGNyb3NzZXM= 49435
+bms= 49436
+R0lORw== 49437
+ICde 49438
+IEJsb2I= 49439
+IGludGVyY291cnNl 49440
+IEJsdmQ= 49441
+IHdlaWdocw== 49442
+X3JlZ3VsYXI= 49443
+IFBlcnRo 49444
+IHNlcGFyYXRpbmc= 49445
+IGJpbGxlZA== 49446
+LnRhYkNvbnRyb2w= 49447
+IHB1cHBldA== 49448
+IHV0aWxpemF0aW9u 49449
+IOKWoA== 49450
+IHN1Y2Nlcw== 49451
+IGxhbXBz 49452
+X3Byb2o= 49453
+RXJpYw== 49454
+IHJlbm92YXRpb24= 49455
+IEZhbWlsaWVz 49456
+IEJpdHM= 49457
+cGFydGlhbHM= 49458
+LU1lbg== 49459
+c29sdXRpb24= 49460
+IGR3YXJm 49461
+LklOVEVHRVI= 49462
+IExPQ0s= 49463
+LmN0 49464
+IGV4Y2VycHQ= 49465
+IFBpeA== 49466
+IEZpcnN0TmFtZQ== 49467
+QU5URUQ= 49468
+IEFkbWly 49469
+LWhlbHA= 49470
+UHJpb3I= 49471
+IEFsaWdu 49472
+LklOU1RBTkNF 49473
+TGluZUVkaXQ= 49474
+KCcvOg== 49475
+IGluZXQ= 49476
+b2R1cw== 49477
+LnBrbA== 49478
+IEtZ 49479
+dXBlcnQ= 49480
+IG5lcnZlcw== 49481
+X2dyYWRpZW50 49482
+fScsJw== 49483
+X3VucmVm 49484
+IHNhdHVyYXRlZA== 49485
+IENvbm5lY3RlZA== 49486
+IEZO 49487
+RVhJVA== 49488
+IHRlbGVwb3J0 49489
+IGF2YWl0 49490
+UGFnZVJvdXRl 49491
+IGRpdm9yY2Vk 49492
+KGxhbmc= 49493
+ZnN0 49494
+IFR5cg== 49495
+IG1lc3Nlbmdlcg== 49496
+aWZzdHJlYW0= 49497
+WFM= 49498
+IEJhbmtpbmc= 49499
+IGluZmVjdGlvdXM= 49500
+IE1vbnM= 49501
+X0xPT1A= 49502
+IHp1csO8Y2s= 49503
+IG9idGVuZXI= 49504
+L3JlcG9z 49505
+VmVs 49506
+YWNybw== 49507
+IHVzZXJSZXBvc2l0b3J5 49508
+c3R5bGVUeXBl 49509
+IFNSQw== 49510
+Vk1MSU5VWA== 49511
+cmVjdXJzaXZl 49512
+L2Jhcg== 49513
+X2NoaXA= 49514
+b21pbmF0ZWQ= 49515
+IE5pdA== 49516
+4oCUdG8= 49517
+IEJ1ZGRo 49518
+0L7QvNC10YA= 49519
+IE1BRw== 49520
+IENIRQ== 49521
+X2Rlbg== 49522
+LnJhaXNlcw== 49523
+X2RlZ3JlZQ== 49524
+IHB1bXBraW4= 49525
+X3RlbXBsYXRlcw== 49526
+X01FRElB 49527
+IFRpbWVsaW5l 49528
+IGJvdHM= 49529
+T2JqZWN0VHlwZQ== 49530
+IGJ1eXM= 49531
+LnBvc3Rz 49532
+Q0FM 49533
+d2FpdGluZw== 49534
+IERhbmllbHM= 49535
+IGRhYmVp 49536
+IFNpZ21h 49537
+aWxvcg== 49538
+aWdlbA== 49539
+LFc= 49540
+QURT 49541
+KHBhbmVs 49542
+7LK0 49543
+aXRhdGluZw== 49544
+LnBhbGV0dGU= 49545
+IG1vc3F1aXRv 49546
+IHRlZ28= 49547
+KHBhcnNlSW50 49548
+IGRlc3B1w6lz 49549
+cHJvbWlzZQ== 49550
+IHdpag== 49551
+dHlwZXNjcmlwdA== 49552
+IFR2 49553
+X0lERU5USUZJRVI= 49554
+KS4KCgo= 49555
+X2ZsYXQ= 49556
+aXRzdQ== 49557
+VVNS 49558
+ZXhwZXJpZW5jZQ== 49559
+LWZpdA== 49560
+cGhpbng= 49561
+X3RocmVzaA== 49562
+IGlkZWFsbHk= 49563
+IEZyZWVtYW4= 49564
+LERC 49565
+X3J3 49566
+562J 49567
+VWI= 49568
+X3N0YXRpc3RpY3M= 49569
+PSIiPjw= 49570
+IGNob3Jl 49571
+IHlvcms= 49572
+aW5zdGFsbGVk 49573
+QWRkaXRpb25hbGx5 49574
+IHBzdG10 49575
+eWxrbw== 49576
+OjoK 49577
+Rm9yZXN0 49578
+IGhlYWRzZXQ= 49579
+IGdhbGxvbg== 49580
+0YDQtdC8 49581
+IHdpdGhkcmF3bg== 49582
+IENhbmRpZGF0ZQ== 49583
+IG1lbHRpbmc= 49584
+IGZyZWV6ZXI= 49585
+IGhs 49586
+X0hFTFA= 49587
+bWltZQ== 49588
+KC8q 49589
+IHRoaXJzdA== 49590
+JHJldHVybg== 49591
+bWVtYmVyb2Y= 49592
+0LXQsQ== 49593
+IEh0dHBTZXJ2bGV0UmVxdWVzdA== 49594
+KG9i 49595
+X1Jlc3VsdA== 49596
+IGFzc2VydGVk 49597
+IGZ1bGZpbGxpbmc= 49598
+IHN0cmV0Y2hlcw== 49599
+cGFyYXRlZA== 49600
+LWZ1bmRlZA== 49601
+IOWb 49602
+aW5nbGVz 49603
+X2Nh 49604
+LmNvbmRpdGlvbg== 49605
+IERpc3BsYXlz 49606
+IG9yYW5n 49607
+IENSRQ== 49608
+IGdsQmluZA== 49609
+IFNlbGVjdG9y 49610
+L3R5cGU= 49611
+IEFsZXhh 49612
+Y2hlZHVsZXM= 49613
+IFBlbmluc3VsYQ== 49614
+IHBhcml0eQ== 49615
+CWRlc3Q= 49616
+IERvb3Jz 49617
+DQoJDQo= 49618
+X2RpbWVuc2lvbg== 49619
+IGFsb2Fk 49620
+LlN0b3JlZFByb2NlZHVyZQ== 49621
+KHBhcmVu 49622
+IEJ1cmtl 49623
+JyldCg== 49624
+LWVuZ2luZQ== 49625
+IHF1aXI= 49626
+IEh5YnJpZA== 49627
+IERvZQ== 49628
+IG91dGxpbmVz 49629
+IFRyZW5kcw== 49630
+X05W 49631
+cGVyaW1lbnRz 49632
+IEhpbg== 49633
+Pycs 49634
+CVRleHQ= 49635
+RlVM 49636
+IHNtZWxscw== 49637
+IHNsaWNr 49638
+IG1pc2VyYWJsZQ== 49639
+IEFycmF5QWRhcHRlcg== 49640
+IHBhcmFtU3RyaW5n 49641
+SG9t 49642
+X2xpdGVyYWxz 49643
+dXN1YXJpb3M= 49644
+IHByb21wdGluZw== 49645
+X2xhenk= 49646
+IEFjdGl2YXRpb24= 49647
+X29j 49648
+V2Vhaw== 49649
+IGFuZWNk 49650
+IFVDTEE= 49651
+PXJl 49652
+aXNzZW1lbnQ= 49653
+IEVzY29ydHM= 49654
+RXhjZWxsZW50 49655
+IFBhdXNl 49656
+IHJlcG9zaXRvcmllcw== 49657
+VE9S 49658
+YXJpYXRl 49659
+X2lzbw== 49660
+dXBkYXRlcw== 49661
+aGFsYg== 49662
+dWRpYW50ZQ== 49663
+66Gd 49664
+IG5haXZl 49665
+IFBlZw== 49666
+IExvdW5nZQ== 49667
+QVJHSU4= 49668
+KGJpbg== 49669
+T25DbGlja0xpc3RlbmVy 49670
+IEZBSUxFRA== 49671
+IGxpdGU= 49672
+IGR6aWU= 49673
+IExpdGVyYWw= 49674
+aXZvcg== 49675
+ZmNudGw= 49676
+IGVhdHM= 49677
+IHFlZA== 49678
+VW5sb2Nr 49679
+cmlkaW5n 49680
+dW5kYWk= 49681
+PU0= 49682
+QVRURVI= 49683
+Q29uZmlndXJlQXdhaXQ= 49684
+aWNpYXM= 49685
+dXN0b21lZA== 49686
+IHN1Y2Nlc3Npb24= 49687
+ZW5kVGltZQ== 49688
+IEp1cGl0ZXI= 49689
+IGp1ZGdpbmc= 49690
+ZHJhdGlvbg== 49691
+X2RvY3M= 49692
+Lm1v 49693
+IGVkdWNhdG9ycw== 49694
+IFZpbmU= 49695
+Q29uZA== 49696
+W291dA== 49697
+cWI= 49698
+XFZhbGlkYXRvcg== 49699
+IG1lYW5pbmdz 49700
+IHByZXNlbnRseQ== 49701
+IGRpdmlkaW5n 49702
+b3R0ZW5oYW0= 49703
+YXNjdWxhcg== 49704
+IHRyYWlsZXJz 49705
+IENMT1NF 49706
+0LDQvNC4 49707
+4oCZYWk= 49708
+IEdhaW4= 49709
+d29y 49710
+IHBsYW5uZXI= 49711
+IGRpc3RyaWJ1dGluZw== 49712
+dmF0 49713
+bW9udGhz 49714
+eGxhYmVs 49715
+SEY= 49716
+VmlvbA== 49717
+LkJBU0VMSU5F 49718
+0LXRgtGB0Y8= 49719
+IFJvdGF0ZQ== 49720
+IHR4bg== 49721
+OmJvbGQ= 49722
+IGJsb3Nz 49723
+Rm9yZ2VyeQ== 49724
+KGVtYmVk 49725
+IGpha28= 49726
+c3ByaW50Zg== 49727
+dGhlaXI= 49728
+IGV4aGliaXRz 49729
+LXN0YXRpYw== 49730
+aGVjeQ== 49731
+Z2V0QWN0aXZlU2hlZXQ= 49732
+LmNsaWVudHM= 49733
+44GN 49734
+X2hpZGU= 49735
+W3dvcmQ= 49736
+Q2I= 49737
+YWRkSXRlbQ== 49738
+YXhl 49739
+X3JhZGlv 49740
+YWxpb24= 49741
+bW9kaWZpZXI= 49742
+IHNhdHVyYXRpb24= 49743
+IGRlbm9t 49744
+X3BpeGVscw== 49745
+bWVzcw== 49746
+KGZs 49747
+YXRpZg== 49748
+IHNlY3M= 49749
+IHByb3N0aXR1dGlvbg== 49750
+IGdyYW5kY2hpbGRyZW4= 49751
+IHBhcmFkaXNl 49752
+IEZlbGQ= 49753
+X0JJTkFSWQ== 49754
+aXRvdXM= 49755
+4LmE 49756
+IGZsYXNoaW5n 49757
+LXNpZGVk 49758
+IGNvbnRyYWRpY3Rpb24= 49759
+LyoKCg== 49760
+eWxhYmVs 49761
+IFRldA== 49762
+IGFkbWlyZQ== 49763
+cmVzbw== 49764
+IGxldHo= 49765
+IFNFQVJDSA== 49766
+c2xvdHM= 49767
+IFJld2FyZHM= 49768
+IEhvZw== 49769
+IE5TRGF0YQ== 49770
+c3Rhc2g= 49771
+RmFsbA== 49772
+IEFtZXI= 49773
+TGluZWFyTGF5b3V0 49774
+L3Bob3Rvcw== 49775
+IGZlYXRoZXI= 49776
+IHwNCg== 49777
+RG93bmxvYWRz 49778
+LlN0YXJ0c1dpdGg= 49779
+IC8vIw== 49780
+aW5lVHJhbnNmb3Jt 49781
+IGFmZmlk 49782
+VnRibA== 49783
+IFJvZ3Vl 49784
+c2NyaWJlZA== 49785
+IGZhdWM= 49786
+IE1vbnJvZQ== 49787
+IGRlY2xhcmVz 49788
+bW9kZXJu 49789
+cmVvbg== 49790
+YXliZQ== 49791
+UEFTUw== 49792
+ZmVycw== 49793
+X01VTFRJ 49794
+IE1hdGhlbWF0aWNz 49795
+IHN1ZGFo 49796
+X0FUVEFDSA== 49797
+IG51bWJlcldpdGg= 49798
+IFNvbG9tb24= 49799
+amlu 49800
+b2dyYWZpYQ== 49801
+w7Zs 49802
+X2Rlc2lnbg== 49803
+Y3VsYXRlZA== 49804
+IEx1bmE= 49805
+aWVzeg== 49806
+ID0+Jw== 49807
+IHJldmVsYXRpb25z 49808
+QWxvbmc= 49809
+KGVk 49810
+IEZpbGVuYW1l 49811
+IHlsYWJlbA== 49812
+U2VjdXJl 49813
+IGJ1c2Nh 49814
+YWdub3Npcw== 49815
+X1JFQ0U= 49816
+IG92ZXJsYXBwaW5n 49817
+RXh0ZW50 49818
+IGFudGljaXBhdGlvbg== 49819
+Q2hlY2tz 49820
+IEFMU08= 49821
+b3Jj 49822
+aWxpbmd1YWw= 49823
+aXRhdGlvbmFs 49824
+IGFkdmFuY2VtZW50 49825
+b3Vybw== 49826
+IFByZWRpY2F0ZQ== 49827
+5b6X 49828
+ZXJpYQ== 49829
+IFBpZXJjZQ== 49830
+b3Jpbw== 49831
+IG1lcml0cw== 49832
+IHBlYW51dA== 49833
+LlBhY2thZ2U= 49834
+IENvbmR1Y3Q= 49835
+X1NFTlNPUg== 49836
+IGJvaWxpbmc= 49837
+IGludHJh 49838
+IElHTg== 49839
+IEZ1cg== 49840
+LlJlZnJlc2g= 49841
+IFJlYWNo 49842
+X2RlY29kZXI= 49843
+LkV4cA== 49844
+INGC0LDQug== 49845
+cGlsbA== 49846
+LFE= 49847
+IEdyaWxs 49848
+IHBvcHBpbmc= 49849
+LkFn 49850
+IHByb3llY3Rv 49851
+IG1pbGVhZ2U= 49852
+IGVjb2xvZ2ljYWw= 49853
+XV0pOwo= 49854
+IMKt 49855
+c3VicGxvdA== 49856
+YWNhZA== 49857
+IFRyeWluZw== 49858
+cmVjaXBlcw== 49859
+JGNyaXRlcmlh 49860
+IFBlcnNpYW4= 49861
+LWJvdW5k 49862
+TUFTSw== 49863
+IEdlc3R1cmU= 49864
+IGtr 49865
+IFBWQw== 49866
+IHByb2hpYml0aW9u 49867
+IGNvbWFuZG8= 49868
+IExPT0s= 49869
+U2hvcHBpbmc= 49870
+IGRpc3RvcnRpb24= 49871
+PEJvb2xlYW4= 49872
+LkdldExlbmd0aA== 49873
+dW1wdA== 49874
+XFByb2R1Y3Q= 49875
+ZWxsZXJ5 49876
+IGZpcmV3YWxs 49877
+Zm9ybWF0dGVk 49878
+LnJlZGlz 49879
+IGVzYQ== 49880
+IFJob2Rl 49881
+U29t 49882
+Lm5vbg== 49883
+ICcpLg== 49884
+IGdldFZpZXc= 49885
+4bqhbg== 49886
+cHJ1cw== 49887
+TWF0dGhldw== 49888
+IHNpYQ== 49889
+IEZvcnM= 49890
+R1BV 49891
+aWVudHJhcw== 49892
+X0lOU1Q= 49893
+IG9sYXJhaw== 49894
+IGltcG9ydGluZw== 49895
+VENQ 49896
+LyIpOwo= 49897
+ZWl0aGVy 49898
+IGZyZXNobHk= 49899
+Y2FzY2FkZQ== 49900
+KGNoYXJhY3Rlcg== 49901
+IEplZXA= 49902
+b3RpY3M= 49903
+X1VUSUw= 49904
+Llh0cmFQcmludGluZw== 49905
+LmZpcnN0Q2hpbGQ= 49906
+IEV4Y2VsbA== 49907
+IGR2ZA== 49908
+IHRhbGxlcg== 49909
+IHJhcw== 49910
+eXBhc3M= 49911
+IGFzc2lnbnM= 49912
+IGdyaWV2 49913
+LW1vcmU= 49914
+SkQ= 49915
+IEJ1cm5z 49916
+Jz4NCg== 49917
+LkRlcGVuZGVuY3k= 49918
+LlF1ZXJ5U3RyaW5n 49919
+Lk93bmVy 49920
+IGV4cGlyeQ== 49921
+VGh1 49922
+KFZlYw== 49923
+IGhhemFyZG91cw== 49924
+IHJwbQ== 49925
+QVBPTg== 49926
+IGFkZFRhcmdldA== 49927
+c3ZpbGxl 49928
+cE5ldA== 49929
+IEltZw== 49930
+IFRJTUVS 49931
+LkFuaW1hdGlvbg== 49932
+IGJlaw== 49933
+IGFzc29ydA== 49934
+IGxlYmlo 49935
+IGJvZHlQYXJzZXI= 49936
+IHZpYnJhdGluZw== 49937
+SURM 49938
+IGJ1dHRlcmtuaWZl 49939
+aW50ZXJz 49940
+IHBlcnN1YWRl 49941
+IExHQlRR 49942
+6Is= 49943
+LnNvZnQ= 49944
+IGJlYW1z 49945
+X3N1cg== 49946
+LkRlZg== 49947
+IGxhYnM= 49948
+CXBsdA== 49949
+IHNraW5z 49950
+IHRyYW5zZmVycmluZw== 49951
+IGltYWdpbmFyeQ== 49952
+X0VuZA== 49953
+O2JhY2tncm91bmQ= 49954
+IGxhcHM= 49955
+X0NPTU1FTlQ= 49956
+KFNETA== 49957
+b25kcw== 49958
+LlJlY29yZA== 49959
+IEltcGxlbWVudHM= 49960
+X3RpY2tz 49961
+KCkpKQoK 49962
+IGFyb3Nl 49963
+XT8= 49964
+IE1w 49965
+IElDb21tYW5k 49966
+IHNjdWxwdHVyZQ== 49967
+IGNvbnRyYWN0ZWQ= 49968
+PEhUTUw= 49969
+IGNhbGVuZA== 49970
+YXR5 49971
+L1N1Yg== 49972
+IGt2aW5u 49973
+X0lHTk9SRQ== 49974
+IFNoYW5l 49975
+TUxT 49976
+IHN0aW11bGF0ZQ== 49977
+UGFydGl0aW9u 49978
+IG11bg== 49979
+w7Nt 49980
+ZXJhbGE= 49981
+LWFjY291bnQ= 49982
+LkJpbmFyeQ== 49983
+Y8Op 49984
+IHNlaXpl 49985
+Y29ubmVjdGlvbnM= 49986
+IAogICAgICAgIAo= 49987
+IERpYWdub3N0aWM= 49988
+VklTSUJMRQ== 49989
+IFJ1bnM= 49990
+IGltcHJlc3Npb25z 49991
+c3VpdGU= 49992
+b2JsZQ== 49993
+fi0= 49994
+YWt1a2Fu 49995
+PFBlcnNvbg== 49996
+IE5vcw== 49997
+IEd1aQ== 49998
+LndhaXRGb3I= 49999
+UkVTRVQ= 50000
+IHBvc3Rwb24= 50001
+RGlzY292ZXI= 50002
+YXJyaXNvbg== 50003
+c2hhdw== 50004
+Ymxvb2Q= 50005
+QUpPUg== 50006
+5pu05paw 50007
+IE11c2U= 50008
+5pS2 50009
+IHJldGFpbmluZw== 50010
+b3R0ZQ== 50011
+IG1vc3F1ZQ== 50012
+IFNuZQ== 50013
+IHN0YW5kYXJkaXplZA== 50014
+IG1haW5sYW5k 50015
+X3RocmVl 50016
+dW5nZW9ucw== 50017
+Z2V0RG9jdHJpbmU= 50018
+IHdoYWxl 50019
+IGFnZw== 50020
+IFBvcnNjaGU= 50021
+bm93bGVk 50022
+bGF0ZW50 50023
+IFJlbGF0aW9u 50024
+IC8vJw== 50025
+IHNodXR0aW5n 50026
+IFJlbWl4 50027
+X2Nvdg== 50028
+IHNhaWxpbmc= 50029
+IHZvd2Vk 50030
+IHBvdHM= 50031
+b3V0dQ== 50032
+IGhhaXJ5 50033
+Y2FzdHM= 50034
+UmVsb2Fk 50035
+IHJlY29ubmVjdA== 50036
+dGVyYQ== 50037
+LmNoaWxkTm9kZXM= 50038
+IFJhY2s= 50039
+IGN1cnJlbnRJbmRleA== 50040
+IGFsbGVu 50041
+IOeUqOaItw== 50042
+IEN1YnM= 50043
+W1g= 50044
+X1NFUQ== 50045
+X1JFTU9WRQ== 50046
+LmdldEFjdGlvbg== 50047
+KC9e 50048
+ZXJyYXI= 50049
+IGV0aGVy 50050
+Y3VydmU= 50051
+IHNsYXA= 50052
+IHVvbQ== 50053
+T3RoZXJz 50054
+IGVuZ3I= 50055
+RGlzcG9zaXRpb24= 50056
+IHN0YWdlZA== 50057
+RXll 50058
+IEF1eA== 50059
+YXV0aGVudGljYXRl 50060
+ICQ/ 50061
+IEFuZHJlYXM= 50062
+IHNldHc= 50063
+LkFydA== 50064
+IGZvcmVjYXN0cw== 50065
+IGF1bnQ= 50066
+LW1pZGRsZQ== 50067
+IG1pc2Q= 50068
+ZGVzaw== 50069
+IGVzY29ydGU= 50070
+IENhc2E= 50071
+cm9waWNhbA== 50072
+IGV4ZW1wbGU= 50073
+cGxhbmV0 50074
+KFVJTlQ= 50075
+IHdoaXA= 50076
+IFBDQg== 50077
+Y2xpZGVhbg== 50078
+PSJc 50079
+IG94aWRl 50080
+IHN1Y2NlZWRz 50081
+ZGVyaXZlZA== 50082
+IEVjb25vbQ== 50083
+X2Nvb3JkaW5hdGVz 50084
+aXJhcw== 50085
+RHJhZnQ= 50086
+IHZpc3VhbGl6ZQ== 50087
+QnJpYW4= 50088
+X0FTU1VNRQ== 50089
+IE9iamVjdElk 50090
+IHRyYWluZXJz 50091
+X0ZPUkNF 50092
+IGNvbnNvbGVz 50093
+LXByb2Nlc3M= 50094
+bGljaGVy 50095
+IFNpbW1vbnM= 50096
+VGFraW5n 50097
+IENsYWltcw== 50098
+IGRpZmbDqXJlbnQ= 50099
+QWN0aXZpdHlSZXN1bHQ= 50100
+IHNucw== 50101
+6YCJ5os= 50102
+IENydXM= 50103
+IGxsYW0= 50104
+cmFi 50105
+IEpvYW4= 50106
+QUFB 50107
+CWZpbHRlcg== 50108
+aXNob3Bz 50109
+Z2V0dGluZw== 50110
+4LU= 50111
+IHF1YW50bw== 50112
+UGFzdA== 50113
+b3ZpY2g= 50114
+IGluanVzdGljZQ== 50115
+IEZMT0FU 50116
+IGFscmlnaHQ= 50117
+XERC 50118
+KEdhbWVPYmplY3Q= 50119
+dWlzaA== 50120
+KGJvdA== 50121
+IGdhbGxvbnM= 50122
+IFLDqQ== 50123
+IFNhaWQ= 50124
+IFNURE1FVEhPRENBTExUWVBF 50125
+YWlzaW5n 50126
+X3Byb2Nlc3Nvcg== 50127
+ZWxsaWRvcw== 50128
+dGVyZGFt 50129
+IEJlYW0= 50130
+VGV4dEFyZWE= 50131
+IHJldG9ybm8= 50132
+Lk1ha2U= 50133
+ICQoIjw= 50134
+IGxvY2tkb3du 50135
+IHJlbWVkaWVz 50136
+IHZlZWw= 50137
+eGVl 50138
+ZG9jdHlwZQ== 50139
+Rmls 50140
+IEV4cGFuZA== 50141
+IGVtcGxveXM= 50142
+IHNlc3Npb25TdG9yYWdl 50143
+UGhw 50144
+UHVibGlzaA== 50145
+IHJldGFs 50146
+ZmFicw== 50147
+eW5hbWljcw== 50148
+IHRvc3NlZA== 50149
+IG51bWJlck9mUm93c0luU2VjdGlvbg== 50150
+eHBhdGg= 50151
+XG1vZHVsZXM= 50152
+IGRpc2FzdHI= 50153
+IE1VTFQ= 50154
+Lk1lc2g= 50155
+LXN0YWdl 50156
+IHNkZg== 50157
+aXR1bmc= 50158
+dWdlcw== 50159
+ID8+Ij48Lw== 50160
+X2luZGV4ZXM= 50161
+IHZhbHVhdGlvbg== 50162
+IGxpZmVsb25n 50163
+IGV4cGVkaXRpb24= 50164
+KFlpaQ== 50165
+IHBhaW5z 50166
+IFBSSQ== 50167
+IE1peGVk 50168
+IENoYW5naW5n 50169
+R2VybWFueQ== 50170
+Y29tbXVuaWNhdGlvbg== 50171
+Lm9yZ2Fu 50172
+IE1hcmF0aG9u 50173
+Z2V0UGF0aA== 50174
+IEFjY3VyYWN5 50175
+IEdsb2JhbHM= 50176
+Jyl9fTwv 50177
+IE9XTkVS 50178
+4oCm4oCd 50179
+IHN0YWJiZWQ= 50180
+IHNjaGl6b3BocmVu 50181
+IEZu 50182
+IENPUkU= 50183
+IERhdGFSb3c= 50184
+IExURA== 50185
+IG15dGhz 50186
+IGZhbW91c2x5 50187
+fCwK 50188
+IFNlb3Vs 50189
+U2ly 50190
+IEJlcms= 50191
+UmVnRXhw 50192
+LmdldFJvdw== 50193
+IERlY29kZQ== 50194
+Uk4= 50195
+IG1hbmc= 50196
+IGVtcGxveWluZw== 50197
+X25vbWJyZQ== 50198
+PFRhc2s= 50199
+IEd1eXM= 50200
+IEFydGlrZWw= 50201
+QmVycnk= 50202
+enVyZQ== 50203
+IHZhbGV1cg== 50204
+aGl0cw== 50205
+IGx1Y3JhdGl2ZQ== 50206
+IGluZm9ybWF0 50207
+Q2xpbnRvbg== 50208
+IHRlcw== 50209
+IENlcnRpZmljYXRpb24= 50210
+X3dz 50211
+IG9mZmVuY2Vz 50212
+ZWJyYQ== 50213
+IEF4aW9z 50214
+cmVzdGFydA== 50215
+TE4= 50216
+LkVuY29kZQ== 50217
+bWl1bQ== 50218
+IEZlYXR1cmVk 50219
+0YjQuNCx0LrQsA== 50220
+IERlcHQ= 50221
+OyYj 50222
+IE15ZXJz 50223
+CXRyYW5zZm9ybQ== 50224
+VGV4YXM= 50225
+16g= 50226
+IFlvcmtzaGlyZQ== 50227
+bG5hbWU= 50228
+QnJl 50229
+44GT44Gu 50230
+IHNjZW5lcnk= 50231
+IGbDvGg= 50232
+CQkJCSAgICAgICA= 50233
+IERvb20= 50234
+IEFETUlO 50235
+KGVz 50236
+INC80LDRgdGB0LjQsg== 50237
+X2FzY2lp 50238
+L0RhdGE= 50239
+bGVzaG9vdGluZw== 50240
+QmFu 50241
+IG1lbW9pcg== 50242
+INmG 50243
+IEF1c3M= 50244
+KXBhcmVu 50245
+IGd1aWRpbmc= 50246
+IGJheg== 50247
+w7h5 50248
+QURN 50249
+IGRtYQ== 50250
+LlF1ZXVl 50251
+IFN1cHBsaWVz 50252
+IE1jRA== 50253
+IEFnZW50cw== 50254
+X2Ji 50255
+c2xhc2g= 50256
+IGhhc2hlcw== 50257
+IGNyYW5r 50258
+IFJhZw== 50259
+IGF1dG9ub215 50260
+w610dWxv 50261
+IHJlY3Vyc2lvbg== 50262
+IENyYXp5 50263
+X3RyYWNrZXI= 50264
+IE1i 50265
+X3BoeQ== 50266
+Zm9vYmFy 50267
+CXNwZWVk 50268
+IGNhbXBvcw== 50269
+IG1vdWxk 50270
+IGNoYXJpdGllcw== 50271
+SEVJR0hU 50272
+IGVhdXRv 50273
+X3NvbHV0aW9u 50274
+IERH 50275
+bWFydmlu 50276
+WWVzdGVyZGF5 50277
+IEJlY29tZQ== 50278
+PGxs 50279
+b3Jpcw== 50280
+W25leHQ= 50281
+IGluY3VtYmVudA== 50282
+IER1cA== 50283
+CW92ZXJyaWRl 50284
+5a6J 50285
+CWNmZw== 50286
+IHPDtg== 50287
+IGRlc2U= 50288
+LWRp 50289
+IG9udHZhbmdzdA== 50290
+IGRlY2lzaXZl 50291
+5Lu3 50292
+X2tlZXA= 50293
+KERhdGFiYXNl 50294
+Xy8= 50295
+IENMTA== 50296
+LW1ldGhvZA== 50297
+CVBvaW50 50298
+IEJ5dGVCdWZmZXI= 50299
+IHRyYWNlZA== 50300
+YWRkVG8= 50301
+7IS47JqU 50302
+YW55YWs= 50303
+IGVtcHJlc2Fz 50304
+KHJlcG9zaXRvcnk= 50305
+LmNyZWF0ZVN0YXRlbWVudA== 50306
+IGVsYQ== 50307
+Rm9yZ2VyeVRva2Vu 50308
+IGlzZW1wdHk= 50309
+YXNpbg== 50310
+IExvb2t1cA== 50311
+0LXQvdCw 50312
+IHZpb2xhdGVz 50313
+IFNtYXJ0eQ== 50314
+IHphaw== 50315
+KCQu 50316
+U0hPVw== 50317
+INCi 50318
+YXJ1cw== 50319
+KFRFU1Q= 50320
+cGFja2Vk 50321
+IGhpc3Rvcmlh 50322
+IGNhbmNlcnM= 50323
+IEtyZW1saW4= 50324
+UmVkdWNl 50325
+L2hvdw== 50326
+IMSQ 50327
+VElUTEU= 50328
+LmxvY2FsUG9zaXRpb24= 50329
+bGlhYmxl 50330
+IOesrA== 50331
+IGZyYW5jYWlz 50332
+CWhhc2g= 50333
+IGluaWNpbw== 50334
+IENyYXNo 50335
+IHsu 50336
+IGNsb2Nrcw== 50337
+ZHVjdG9yeQ== 50338
+IFB2 50339
+6528 50340
+IGRvaXM= 50341
+XC0= 50342
+IGphYXI= 50343
+IE1heWE= 50344
+bW96aWxsYQ== 50345
+CXJlc291cmNl 50346
+ISEK 50347
+YXlzY2FsZQ== 50348
+ICctJyw= 50349
+5Y+W5raI 50350
+IHN0YWxl 50351
+Q29ybmVy 50352
+w6hsZQ== 50353
+aXRpdmVz 50354
+emFz 50355
+aWNvcm4= 50356
+LkV4cHJlc3Npb24= 50357
+w7N0 50358
+QXBwbGljYXRpb25z 50359
+UmVzdHI= 50360
+X0luZGV4 50361
+jbDsnbTthLA= 50362
+IEpGcmFtZQ== 50363
+c2l4 50364
+X0lNRw== 50365
+6JeP 50366
+IE51bWVyaWM= 50367
+IHdpcms= 50368
+X1NVTQ== 50369
+PERhdGVUaW1l 50370
+IHB5bGludA== 50371
+IGxhbWVudA== 50372
+IFBvc2U= 50373
+X2VudHJvcHk= 50374
+IGVuY291cmFnZW1lbnQ= 50375
+IGxhaW4= 50376
+5Yib5bu6 50377
+LWZy 50378
+IGNvcnJlY3Rpb25z 50379
+cGhhcw== 50380
+dXVy 50381
+YXRlZ29yaWFz 50382
+IGNhdGFseXN0 50383
+LmFsdA== 50384
+IEZlcm5hbmRv 50385
+LkRhdGFHcmlkVmlld0NlbGxTdHlsZQ== 50386
+IGhlcmJhbA== 50387
+IFJH 50388
+U1RFUA== 50389
+SUZu 50390
+IFRvbmc= 50391
+xb5l 50392
+IElOQ0xVREU= 50393
+IGhj 50394
+dHJhY2tlcg== 50395
+CVN0cmluZ0J1aWxkZXI= 50396
+IERlc3Rpbnk= 50397
+IHNvcGhvbW9yZQ== 50398
+IERlZA== 50399
+IFBBUkE= 50400
+aXpvbnRhbGx5 50401
+LWNoYW5nZQ== 50402
+ZW5kaWQ= 50403
+6YCJ5oup 50404
+aWprZQ== 50405
+IEF0aGxldGlj 50406
+YmFp 50407
+Z2V0UG9zaXRpb24= 50408
+Lm5hbWVzcGFjZQ== 50409
+6K6i5Y2V 50410
+UkFDVA== 50411
+IHJlbGlldmVk 50412
+IHBvdXJpbmc= 50413
+IGl5 50414
+cm92ZQ== 50415
+IGFkb2xlc2NlbnRz 50416
+IGF3ZQ== 50417
+cmVhcw== 50418
+QW50aUZvcmdlcnlUb2tlbg== 50419
+cm93bmluZw== 50420
+IFVuY2xl 50421
+LkNvbm4= 50422
+IE1lZGlhVHlwZQ== 50423
+Lm9yYWNsZQ== 50424
+SU5URVJOQUw= 50425
+LGFuZA== 50426
+IGZhdXg= 50427
+aXBtYXA= 50428
+JG1vZGVs 50429
+IEdlb2Zm 50430
+X0FYSVM= 50431
+KCgpKQo= 50432
+IG5lZ2xlY3RlZA== 50433
+IHF1YXJ0ZXJseQ== 50434
+IGRpZXNlbg== 50435
+IGRyYWdvbnM= 50436
+TmlnaHQ= 50437
+L1dlYg== 50438
+PFZlYw== 50439
+CSAgICAgICAgICAgICAgICAgICAgICAg 50440
+IE9icw== 50441
+YmRk 50442
+IGhlaXI= 50443
+LWFuZ3VsYXI= 50444
+TWVudVN0cmlw 50445
+ICciPic= 50446
+a2luc29u 50447
+INC60L7Quw== 50448
+b2duaXRpdmU= 50449
+X2xp 50450
+IGltbWluZW50 50451
+IGFmZmluaXR5 50452
+LnNpZ25hbA== 50453
+IG5vdGNo 50454
+IFN0ZWVsZXJz 50455
+bWF4bGVuZ3Ro 50456
+S0s= 50457
+IEV1Z2VuZQ== 50458
+X1BXTQ== 50459
+cm9p 50460
+IOKXjw== 50461
+IEhhbWJ1cmc= 50462
+Lk11c3Q= 50463
+IGF4ZQ== 50464
+ZW5lZg== 50465
+IGFtYml0aW9ucw== 50466
+IFNwZWNpZXM= 50467
+IFN0cmVzcw== 50468
+IGF3aGlsZQ== 50469
+INCx0YPQtA== 50470
+IHdpdGhzdGFuZA== 50471
+IERlY29kZXI= 50472
+X2ludmVudG9yeQ== 50473
+IHsNDQo= 50474
+IHRndA== 50475
+IHJhaWxyb2Fk 50476
+V0FTSElOR1RPTg== 50477
+IG5lZ290aWF0ZWQ= 50478
+TlNU 50479
+LXBob25l 50480
+LFU= 50481
+IGV4ZXJjaXNpbmc= 50482
+4bul 50483
+X1BJWEVM 50484
+YXZvcnM= 50485
+aXRlcmF0ZWQ= 50486
+IHZhbXBpcmU= 50487
+YWRhbA== 50488
+SW5ncmVzZQ== 50489
+IHVuZw== 50490
+amVjdGl2ZQ== 50491
+LmNlbGxz 50492
+IG5hbm8= 50493
+IG1hcmtkb3du 50494
+X1JVTEU= 50495
+KGV2ZW50cw== 50496
+IGx1Z2dhZ2U= 50497
+TUVTU0FHRQ== 50498
+aWdrZWl0 50499
+JGNvdW50 50500
+QXR0cmlidXRlTmFtZQ== 50501
+SUdJTkFM 50502
+X0VudA== 50503
+IEJG 50504
+IENPTU1FTlQ= 50505
+X2luaQ== 50506
+IEV1cm9wZWFucw== 50507
+IEJlbGxl 50508
+5ZG9 50509
+KVsn 50510
+5bqU 50511
+IFVzZWZ1bA== 50512
+LnJlZmVyZW5jZQ== 50513
+KCkiLA== 50514
+X2dyYWRl 50515
+IEthdw== 50516
+IHNlbnRlbmNpbmc= 50517
+IHNvY2lhbGlzbQ== 50518
+bW9uc3Rlcg== 50519
+X0xBWUVS 50520
+IGRlZXBlc3Q= 50521
+d2s= 50522
+IE5vaXNl 50523
+IyMjCgo= 50524
+IHByw6lj 50525
+b3RsZQ== 50526
+0YLQtQ== 50527
+YXVm 50528
+aWJhbA== 50529
+IGNvbnF1ZXI= 50530
+PkVtYWls 50531
+IGFtYnVsYW5jZQ== 50532
+T0FE 50533
+ICgiJQ== 50534
+IEZJ 50535
+LmZpeHR1cmU= 50536
+IHRlcnNl 50537
+ICAgIAkJCQk= 50538
+IHNhbmN0dWFyeQ== 50539
+dWdp 50540
+IENvbXBhcmF0b3I= 50541
+RGVmaW5pdGlvbnM= 50542
+IGFzdGhtYQ== 50543
+IGxhY3Q= 50544
+IGhhcmR3b29k 50545
+LmNsb2Nr 50546
+IGF0dHJhY3Rpbmc= 50547
+IE1vdXI= 50548
+KGRpc3RhbmNl 50549
+aWNpdHM= 50550
+IGJvbm5l 50551
+IEFDQ0VTUw== 50552
+LkRlc2VyaWFsaXplT2JqZWN0 50553
+IFR5cGVk 50554
+IGpldQ== 50555
+IGFwcElk 50556
+IENsYXJh 50557
+IEhG 50558
+IFJlaWNo 50559
+aXBwbGVz 50560
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ== 50561
+X2RlbGl2ZXJ5 50562
+ZXJpYWxpemF0aW9u 50563
+IHBsYWludGlmZnM= 50564
+U2NpZW50 50565
+c2hvcHBpbmc= 50566
+IER1bW15 50567
+IFdhbGQ= 50568
+R3JvdXBOYW1l 50569
+IGluc2NyaXB0aW9u 50570
+ZWxvZw== 50571
+Ojo6Ojo6Ojo= 50572
+X2xk 50573
+QmFja1ByZXNzZWQ= 50574
+LlJhdw== 50575
+IE9uVHJpZ2dlcg== 50576
+IG11c2V1bXM= 50577
+IEJlZW4= 50578
+IEFkdmVudHVyZXM= 50579
+IHNsYXRl 50580
+IGxldHQ= 50581
+IHN1bmQ= 50582
+IEdpbg== 50583
+IE1lY2hhbmljYWw= 50584
+LnNoaXA= 50585
+QXBwQ29tcG9uZW50 50586
+IGRlc3RpbmVk 50587
+IGR3ZWxsaW5n 50588
+UHJvZmlsZXI= 50589
+UHJlcGFyZQ== 50590
+emVpY2g= 50591
+IHNpbGljb24= 50592
+KGhhcw== 50593
+ICMl 50594
+VklERU8= 50595
+IGNvbGxhYm9yYXRl 50596
+TGlu 50597
+IHNjb3Blcw== 50598
+KGNsYXNzTmFtZQ== 50599
+KHNk 50600
+YW5kaW4= 50601
+LmhhbQ== 50602
+U2VydmljZUltcGw= 50603
+LWRlc2NyaWJlZA== 50604
+IGlyb255 50605
+c3RpYWw= 50606
+IEh1YXdlaQ== 50607
+KHJlcG8= 50608
+IHVuZXhwZWN0ZWRseQ== 50609
+IEthaQ== 50610
+Lmluc3RhbGw= 50611
+XHhm 50612
+IGV4aGliaXRlZA== 50613
+X1RDUA== 50614
+IE94 50615
+X0NITw== 50616
+IHByb3N0aXR1ZXJ0ZQ== 50617
+IHbDpA== 50618
+IHNpdG8= 50619
+IGNvbnN0aXR1ZW50cw== 50620
+IENvbnRpbnVlZA== 50621
+IFNBVkU= 50622
+cnNz 50623
+L21lc3NhZ2U= 50624
+dWJlcw== 50625
+IG1pc2RlbWVhbg== 50626
+IHRheGF0aW9u 50627
+IHN0b3J5bGluZQ== 50628
+aGFpcg== 50629
+IEZpbmRz 50630
+U0lH 50631
+dmVyaWZpY2F0aW9u 50632
+fj0= 50633
+Lmhw 50634
+SXRlcmFibGU= 50635
+0YvQtQ== 50636
+YXRvcmk= 50637
+IGN0cg== 50638
+Ung= 50639
+Xyk7Cgo= 50640
+ZGFn 50641
+LnBpbg== 50642
+IHBzZXVk 50643
+IGludm8= 50644
+0YHRgtGA 50645
+X3BpeA== 50646
+5Li656m6 50647
+IHN3b3Ju 50648
+4oCUb3I= 50649
+X3JlZ2lzdHJ5 50650
+IGRpc2FzdGVycw== 50651
+IFJPSQ== 50652
+IOKAlQ== 50653
+YWt0dQ== 50654
+Zm9yZXN0 50655
+YmVpdGVu 50656
+4oCUSQ== 50657
+dWV2YQ== 50658
+ZWd0 50659
+IHNwaWtlcw== 50660
+VVJFUw== 50661
+IFJlY29tbWVuZGVk 50662
+IGV4cGxvaXRlZA== 50663
+IEZyZWRlcmljaw== 50664
+X0NPTVBMRVRF 50665
+IERydWdz 50666
+ISEhISEhISE= 50667
+IFJpdg== 50668
+U1RPUA== 50669
+Uk9PTQ== 50670
+IFBBU1NXT1JE 50671
+Q29va2llcw== 50672
+LkVs 50673
+4but 50674
+IEJlcnQ= 50675
+IGhhc2hlZA== 50676
+aWNlc3Rlcg== 50677
+IGRlY29yYXRvcg== 50678
+IHF1ZXJ5U3RyaW5n 50679
+OjsK 50680
+ICJbIg== 50681
+b3RvcGU= 50682
+LUFtZXJpYw== 50683
+IE1hdHRoZXdz 50684
+VVJBTA== 50685
+4oCcLA== 50686
+U3VtbWVy 50687
+Zm9z 50688
+X0NPTlRBSU5FUg== 50689
+X0FDSw== 50690
+IGZpbHRy 50691
+X2Rpc3A= 50692
+X1Jl 50693
+IGZhY2lsZQ== 50694
+0LDRiA== 50695
+IOyVig== 50696
+IGViZW4= 50697
+IHNwcmluaw== 50698
+IFF1aW50 50699
+PlY= 50700
+IGhpc3RvcmlhbnM= 50701
+b3VybWV0 50702
+IE1vbml0b3Jpbmc= 50703
+bGVkZ2Vy 50704
+Y290dA== 50705
+IHdhcmU= 50706
+R0dMRQ== 50707
+Y2Fycw== 50708
+IE1FRElBVEVL 50709
+IHZvbHVwdA== 50710
+X1ZpZXc= 50711
+SEVM 50712
+KGNvcHk= 50713
+KHN0YXRz 50714
+IGNocm9tb3NvbWU= 50715
+IEN1cnRpcw== 50716
+LWNvbmY= 50717
+KGFzc2V0 50718
+IGh2b3I= 50719
+RmlsZVN5c3RlbQ== 50720
+PD4oKTsNCg== 50721
+b2NvZGVy 50722
+IENhbm5vbg== 50723
+KXg= 50724
+IFNtb290aA== 50725
+IFNBUw== 50726
+X2Nl 50727
+CXByZXY= 50728
+X21vdmll 50729
+RWM= 50730
+X3dhbGw= 50731
+PEJ1dHRvbg== 50732
+IEZBU1Q= 50733
+IG9uVmlldw== 50734
+dWxhbg== 50735
+IFNVUFBPUlQ= 50736
+IGdlc2NoaWNodGVu 50737
+IFNvbnM= 50738
+SW1t 50739
+JElGbg== 50740
+IGZhaXJuZXNz 50741
+IGRwaQ== 50742
+YXRzdQ== 50743
+Sm9zaA== 50744
+RXF1YWxpdHk= 50745
+IH0oKQo= 50746
+X2xlc3M= 50747
+IFJhdGlv 50748
+IENhdHM= 50749
+IFN0ZXJu 50750
+TW9uc3Rlcg== 50751
+IG1lcmN1cnk= 50752
+w7xocg== 50753
+IHBsdXNpZXVycw== 50754
+LmRlc2VyaWFsaXpl 50755
+c2NvcHk= 50756
+LkZhbHNl 50757
+KWFuaW1hdGVk 50758
+IEV4cGVydHM= 50759
+ICIiKXsK 50760
+LldoZW4= 50761
+c2VlYWxzbw== 50762
+LnVucGFjaw== 50763
+TEVN 50764
+LnNlbGVjdEFsbA== 50765
+IHBlcmNlcHRpb25z 50766
+dWRpbmc= 50767
+aXJsaW5n 50768
+IFByaW50aW5n 50769
+Z3JhbXM= 50770
+IEZpbGVTdHJlYW0= 50771
+ZXJ2aWxsZQ== 50772
+aWxvZw== 50773
+aWNtcA== 50774
+X0NvdW50 50775
+IGxpdmVzdG9jaw== 50776
+LWNh 50777
+ZG9jdW1lbnRz 50778
+IHBvbGVz 50779
+CXdhbnQ= 50780
+IGZsdW9yZXM= 50781
+IHN0YW5kcG9pbnQ= 50782
+IEh1Z2U= 50783
+IHJhZGlhbnM= 50784
+IFVJQmFy 50785
+RURJVU0= 50786
+IEhpc3Rvcmlj 50787
+X2hvbGRlcg== 50788
+IE1hcmluZXM= 50789
+IHTDpA== 50790
+LkxpZ2h0 50791
+cXVpcmVy 50792
+YXNvbnJ5 50793
+ZGl2aWRlcg== 50794
+IEZsdXR0ZXI= 50795
+X2Zi 50796
+cmVzdHJpY3RlZA== 50797
+IEV2ZXJ5Ym9keQ== 50798
+TsOjbw== 50799
+IGtub3Q= 50800
+IFR3aXRjaA== 50801
+IGhhbGx3YXk= 50802
+KENvbGxpZGVy 50803
+SW5wdXRFbGVtZW50 50804
+PykK 50805
+L29mZg== 50806
+Lyk= 50807
+cGxheWVk 50808
+W09G 50809
+IGJhdHRpbmc= 50810
+X2Rs 50811
+IGNvbWVkaWFu 50812
+IMOpdg== 50813
+IERFTQ== 50814
+IEVkZW4= 50815
+OndoaXRl 50816
+Jycs 50817
+Q29uc3RydWN0aW9u 50818
+YWNlcmI= 50819
+IHRhc2tlZA== 50820
+Lm1hbmFnZQ== 50821
+UmVsYXRpb25zaGlw 50822
+IHBob24= 50823
+bno= 50824
+X0JHUg== 50825
+VmFsaWRhdGVBbnRpRm9yZ2VyeVRva2Vu 50826
+X2Fpcg== 50827
+4oCcV2hlbg== 50828
+IGdsZnc= 50829
+IENvbnZlcnNhdGlvbg== 50830
+X1RPVEFM 50831
+LFo= 50832
+IGdyYXo= 50833
+IGl0ZXJhYmxl 50834
+IFBBU1M= 50835
+IGFkdmVydGlzZQ== 50836
+IG3DtmdsaWNo 50837
+L3RyYWlu 50838
+IFZvbGtzd2FnZW4= 50839
+IGNyZWVweQ== 50840
+ICIpDQo= 50841
+UVVFTkNF 50842
+IGFsdGFy 50843
+IGVkaXRz 50844
+Y29tcGlsZWQ= 50845
+YXduaW5n 50846
+IER1bmdlb24= 50847
+IG9zZw== 50848
+TmF2aWdhdGlvbkJhcg== 50849
+IHRyZW5kaW5n 50850
+IEVjbw== 50851
+b2dnbGVz 50852
+Y2RvdA== 50853
+fC0= 50854
+U2ll 50855
+ZWNyZXQ= 50856
+IE5lZ2F0aXZl 50857
+IExpbmc= 50858
+IERJTQ== 50859
+IENXRQ== 50860
+IENhcnJpZXI= 50861
+IGNhcnRyaWRnZQ== 50862
+X3VzYg== 50863
+PW9z 50864
+IEphY2tpZQ== 50865
+IG90cmFz 50866
+IGNvbW1vZGl0aWVz 50867
+IFByZXNlbnRhdGlvbg== 50868
+KSYmKA== 50869
+IE1hcnRoYQ== 50870
+IENhdGhvbGljcw== 50871
+IE1vbmQ= 50872
+0L7QsdGL 50873
+X2Fic29sdXRl 50874
+IGFzaGFtZWQ= 50875
+cG9uc29ycw== 50876
+dGFs 50877
+IHNhZG5lc3M= 50878
+IHB1w7I= 50879
+RmFkZQ== 50880
+LXByZXZpZXc= 50881
+IFJlcXVlc3Rz 50882
+IENhbHZpbg== 50883
+aG9ybg== 50884
+UmV1c2VJZGVudGlmaWVy 50885
+KHByb3ZpZGVy 50886
+L2FwcHM= 50887
+aW1lbw== 50888
+CUNsYXNz 50889
+U2Ftc3VuZw== 50890
+IFdPUkxE 50891
+IGNpbm5hbW9u 50892
+ZG90ZW52 50893
+IElVc2Vy 50894
+IERFVg== 50895
+X0NoYXI= 50896
+LmliYXRpcw== 50897
+ZXRp 50898
+L21l 50899
+c3N0 50900
+LnN5bQ== 50901
+IFJ1Z2J5 50902
+LW1hc3Rlcg== 50903
+YWphcg== 50904
+IFlFQVI= 50905
+IG9kcA== 50906
+IFJvbGVz 50907
+IGJpcGFydGlzYW4= 50908
+YWlsbGU= 50909
+IGJsb2NrZXI= 50910
+IGdyZWVucw== 50911
+LlNFQ09ORFM= 50912
+IGJlbGlldmVycw== 50913
+IExpa2Vz 50914
+RkxPQVQ= 50915
+IG1haw== 50916
+IGdjYw== 50917
+4pWQ4pWQ 50918
+KCJ+Lw== 50919
+U0NSSVBUT1I= 50920
+IHRvbm5lcw== 50921
+IFNhbmc= 50922
+IHRyYW5zcG9zZQ== 50923
+ZW5uYWk= 50924
+UHJlZA== 50925
+IHNvbGx0ZQ== 50926
+LmdpdGh1YnVzZXJjb250ZW50 50927
+KHByaW50 50928
+IEhvbGU= 50929
+55yL 50930
+YWRnZXQ= 50931
+IHByb21wdHM= 50932
+IGdlbmV0aWNhbGx5 50933
+IEhvZA== 50934
+IHZlcnRpY2FsbHk= 50935
+X2NvbnRyb2xz 50936
+0YHRgtCw0L0= 50937
+Iil7DQo= 50938
+JHRpdGxl 50939
+IH0pLAoK 50940
+IHN0YXRld2lkZQ== 50941
+IENvcnJlc3BvbmQ= 50942
+IEF0dHI= 50943
+aXRhbnQ= 50944
+RWxlbWVudFR5cGU= 50945
+IG91dHdhcmQ= 50946
+IGZhbWlsaWE= 50947
+KGFydGljbGU= 50948
+IGJsYXQ= 50949
+wqAK 50950
+IGdsR2V0 50951
+IFJlY2VpdmVy 50952
+ICUt 50953
+YWRhbQ== 50954
+V2lubmVy 50955
+IHRhaWxvcg== 50956
+X3B3ZA== 50957
+ZXJ0ZW4= 50958
+U3Rhbg== 50959
+CWFsbA== 50960
+YWxpdmU= 50961
+c3RydG90aW1l 50962
+77+9cw== 50963
+c2Vzc2lvbnM= 50964
+JGNvbm4= 50965
+YXNzaXN0 50966
+IGNoYXR0aW5n 50967
+IE1hbnQ= 50968
+ICVA 50969
+ICIiKTsKCg== 50970
+IGRndg== 50971
+IO2VqA== 50972
+LnJlcGVhdA== 50973
+X01lc3NhZ2U= 50974
+IGFkdmlzZXJz 50975
+L3BhdGg= 50976
+IGtlcw== 50977
+KX08Lw== 50978
+TWlzYw== 50979
+IGJzb24= 50980
+IHRyaW1tZWQ= 50981
+IEFjaw== 50982
+VmVydGV4QXR0cmli 50983
+57Si 50984
+dWF0ZXM= 50985
+Lm15c3Fs 50986
+IGRlc3Rpbg== 50987
+IHByb2Js 50988
+KENvbnN0YW50 50989
+YXNzZXM= 50990
+LWltYWdlcw== 50991
+X0FSRUE= 50992
+X18qLw== 50993
+W10o 50994
+IHNpZ25Jbg== 50995
+xJE= 50996
+eHI= 50997
+YWhpcg== 50998
+LmZpcmVzdG9yZQ== 50999
+IHNlcXVlbnRpYWw= 51000
+IElkZWE= 51001
+LWJhc2lj 51002
+X3BhZw== 51003
+IGluc3RhZ3JhbQ== 51004
+b3Ryb24= 51005
+X2FsaWdubWVudA== 51006
+XFxcXA== 51007
+LkZhY3Rvcnk= 51008
+LnJ1bGU= 51009
+LmNoZGly 51010
+IGxpYnJv 51011
+KGdhbWVPYmplY3Q= 51012
+LlRvb2xTdHJpcEJ1dHRvbg== 51013
+IGRpc2NvdmVycw== 51014
+LkFyZ3M= 51015
+ZG9i 51016
+IHZu 51017
+4oaS 51018
+IGTDvA== 51019
+IFhN 51020
+IGFsdW1uaQ== 51021
+IGhvbmU= 51022
+IHNlY3VyZWx5 51023
+X2Ryb3Bkb3du 51024
+RGlzY2xhaW1lcg== 51025
+IGR6aQ== 51026
+KHRpbWVzdGFtcA== 51027
+Jyld 51028
+IGN1bHRpdmF0aW9u 51029
+Li4uCgoK 51030
+IFRyZWF0eQ== 51031
+IERpc3M= 51032
+IGNvbmZsaWN0aW5n 51033
+LmdldFNlbGVjdGlvbg== 51034
+IHBsYXlhYmxl 51035
+IFNpbGs= 51036
+IEVxdWFsaXR5 51037
+IG1veQ== 51038
+IGZsYXR0 51039
+IG1vdGl2ZXM= 51040
+UGVyZmVjdA== 51041
+LmV4aXN0 51042
+IHR3ZWFr 51043
+IG9taXQ= 51044
+IFR3aWxpZ2h0 51045
+IGtpc3Npbmc= 51046
+IGNocmlzdGlhbg== 51047
+KFNF 51048
+X2RlZmluZQ== 51049
+IFBlbmc= 51050
+U29ydGVk 51051
+J2lu 51052
+TG9ncw== 51053
+4buHbg== 51054
+IG55bG9u 51055
+RHVtcA== 51056
+SW1hZ2luZQ== 51057
+cmVuYW1l 51058
+IGJlZm9yZWhhbmQ= 51059
+cHlnYW1l 51060
+IGJweQ== 51061
+IERq 51062
+IHRpdHVsbw== 51063
+IG5sdGs= 51064
+IFNjaG1pZHQ= 51065
+IENhdg== 51066
+KG9uZQ== 51067
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 51068
+LmdldE1vZGVs 51069
+IFB0 51070
+YXRvaQ== 51071
+LmxvY2Fscw== 51072
+YnVyc2VtZW50 51073
+UHJvdmluY2U= 51074
+IEFwcHJvdmVk 51075
+KCk8PA== 51076
+w7NyaWE= 51077
+dXNjaA== 51078
+IEplbm55 51079
+YXJyYW50cw== 51080
+IExpYmVydA== 51081
+TG9yZA== 51082
+IFJlbW92ZWQ= 51083
+X2NvZGVj 51084
+LmJ1bmRsZQ== 51085
+IEdvbnphbGV6 51086
+b3BlcnM= 51087
+neWni+WMlg== 51088
+ZXR0aW5n 51089
+IGdvZGRlc3M= 51090
+cmlwZQ== 51091
+IG11c2N1bGFy 51092
+CQkJCQkJCQkg 51093
+IEh1Z28= 51094
+IG1lam9yZXM= 51095
+bG9pZA== 51096
+cml0ZWxu 51097
+Z2lz 51098
+YWRkb24= 51099
+ICgoKCg= 51100
+YXBwb2ludG1lbnQ= 51101
+cmVzZXJ2ZWQ= 51102
+CWZyaWVuZA== 51103
+X2F2YXRhcg== 51104
+Qk9PTEU= 51105
+YWhp 51106
+LUVORA== 51107
+IGlmZg== 51108
+w7Ni 51109
+IEJydW5v 51110
+cm93c2FibGU= 51111
+IFBvaXNvbg== 51112
+KGZsYWdz 51113
+dXJ0bGVz 51114
+IEFuaW1l 51115
+IG1pZ3JhbnQ= 51116
+CXN0cmNhdA== 51117
+KHJlcGx5 51118
+IFJlZnVnZQ== 51119
+IEJX 51120
+ZWZ1bA== 51121
+JHZhbHVl 51122
+ZmVk 51123
+ICAgICAgICAgICAgICAgICAgICAgICAK 51124
+6LWE 51125
+KGNt 51126
+IHZ1bG5lcmFiaWxpdGllcw== 51127
+IFsoJw== 51128
+IHVuYmVsaWV2YWJsZQ== 51129
+c3RyaWN0aW9u 51130
+ZW50aWV0aA== 51131
+IHByYXlpbmc= 51132
+Q2xhaW1z 51133
+IGthdWZlbg== 51134
+bsOp 51135
+IHBvaXNvbmluZw== 51136
+Y29sbGVjdGlvbnM= 51137
+IGluaXRTdGF0ZQ== 51138
+IFNldmVyaXR5 51139
+IGNvbnRlbnRpb24= 51140
+IAoJCg== 51141
+LmNvbnRyb2xsZXJz 51142
+c3RydWN0dXJlZA== 51143
+aWN0aW0= 51144
+IE9iZXI= 51145
+IC8qI19f 51146
+X09U 51147
+IEFtZXJpY2Fz 51148
+IEFkYQ== 51149
+UHJvZHV0bw== 51150
+Lm11bHRp 51151
+IGdyYXBl 51152
+YmVn 51153
+5p+l6K+i 51154
+IHF1YXJ0eg== 51155
+IFJvbWFuY2U= 51156
+IE1pZHdlc3Q= 51157
+IGhvdXNlZA== 51158
+IGZ1cm5pc2g= 51159
+aWNvbnQ= 51160
+LnVuc2hpZnQ= 51161
+b3RyZQ== 51162
+IMO6bg== 51163
+aXBwbGU= 51164
+IHN1YnVyYg== 51165
+dWFsaQ== 51166
+Vm9pY2U= 51167
+LklzQW55 51168
+LGNvbHVtbg== 51169
+IFByb3NlYw== 51170
+SURB 51171
+CXBvc3Q= 51172
+cHRvbXM= 51173
+dsOp 51174
+IEluZ3JlZGllbnRz 51175
+w7ZmZg== 51176
+Lm9wZXJhdG9y 51177
+IDw8PQ== 51178
+bGFzdGlj 51179
+IHJlc2VtYmxl 51180
+VW5hdXRob3JpemVk 51181
+IHR1dHRv 51182
+X1NXSVRDSA== 51183
+X1JFQURZ 51184
+fT0= 51185
+bm93bGVkZ2U= 51186
+IGFwcGVuZGVk 51187
+dW5nYW4= 51188
+4oCZZW4= 51189
+IExvcmVu 51190
+cHVibGlzaGVy 51191
+IE1H 51192
+fSwi 51193
+IFdhbHNo 51194
+VGVtcGxhdGVz 51195
+X3NvY2lhbA== 51196
+IHBhcmlzaA== 51197
+IFNwbA== 51198
+bWluYXRlZA== 51199
+KEZBTFNF 51200
+IGZvcmVmcm9udA== 51201
+bW9kaXR5 51202
+IGJpbGF0ZXJhbA== 51203
+IGNvbXBldGl0 51204
+IGNhbmRsZXM= 51205
+LmRw 51206
+IGNvbGxlY3Rz 51207
+dGVsZWZvbm8= 51208
+IGF0dGVudA== 51209
+IExlbW9u 51210
+aXphZGE= 51211
+IHRoZXJhcGllcw== 51212
+IHBhcmFkb3g= 51213
+IHRhcw== 51214
+LXN1Ym1pdA== 51215
+ZWtlcg== 51216
+SU5hdmlnYXRpb25Db250cm9sbGVy 51217
+IG1ldGF2YXI= 51218
+IHNld2luZw== 51219
+IFppbWJhYndl 51220
+IGxhd2Z1bA== 51221
+IGxvcmU= 51222
+IExvYWRz 51223
+INGB0L7Qt9C0 51224
+LnByb21pc2U= 51225
+IEZhY2Vz 51226
+LlBsYXRmb3Jt 51227
+LmdldExvY2F0aW9u 51228
+IHRyb3VibGluZw== 51229
+IHbDrWRlbw== 51230
+IEZlYXR1cmluZw== 51231
+5Lqn 51232
+cWVk 51233
+IG9uQmluZA== 51234
+IHRvZGRsZXI= 51235
+Q2xv 51236
+RGl2aXNpb24= 51237
+LWdhbGxlcnk= 51238
+IEdlbGQ= 51239
+c3BlY2lmaWM= 51240
+RmllbGROYW1l 51241
+X2V4Y2Vs 51242
+XGh0ZG9jcw== 51243
+IERW 51244
+ICY6 51245
+IHR3aWc= 51246
+IENvbmNlcm4= 51247
+IHNob3RndW4= 51248
+IG5pY2tlbA== 51249
+IEx1eHVyeQ== 51250
+X0tFWVM= 51251
+Lm5weQ== 51252
+xa8= 51253
+IGZvcmVoZWFk 51254
+zrI= 51255
+IGVuZGFuZ2VyZWQ= 51256
+L3RoZQ== 51257
+cGlwZWxpbmU= 51258
+xbE= 51259
+bmVv 51260
+RXhwbG9yZQ== 51261
+U3BlY1dhcm4= 51262
+IGludGVyY2hhbmdl 51263
+KHBp 51264
+YmlydGhkYXk= 51265
+RGF0YVJvdw== 51266
+IFNQUg== 51267
+IG9zdGU= 51268
+ICJ+ 51269
+YXRpc2ZhY3Rpb24= 51270
+Tkg= 51271
+b3Jkbw== 51272
+LWZvY3VzZWQ= 51273
+J0E= 51274
+lok= 51275
+LmJlc3Q= 51276
+IFNwZWNpZmljYXRpb24= 51277
+Lz4uCgo= 51278
+b2dlbmVzaXM= 51279
+IE9QVElPTlM= 51280
+dXB0b29scw== 51281
+IG1pbGl0YW50 51282
+IGV4aXRlZA== 51283
+aWdhcg== 51284
+IENPTU0= 51285
+IERpc3Bvc2FibGU= 51286
+YXljYXN0 51287
+IHJvd3NwYW4= 51288
+IHN5bnRoZXM= 51289
+IHNvbmRlcm4= 51290
+IDwhLS08 51291
+IEVuZGU= 51292
+LnZhcmlhYmxlcw== 51293
+IGNvbnNlcXVlbnRseQ== 51294
+c2Rr 51295
+U3VwcGx5 51296
+cmVzcG9uc2l2ZQ== 51297
+T3BlbmluZw== 51298
+cGhvdA== 51299
+IH1c 51300
+IGJ1bGxzaGl0 51301
+IGJlYWNvbg== 51302
+X3NhdA== 51303
+IHNuYXBz 51304
+IEdIeg== 51305
+TE9ORw== 51306
+PHBhaXI= 51307
+IFsKCg== 51308
+IFZlcmc= 51309
+IEVpbmU= 51310
+L3Bvc3Rz 51311
+IGFyYWI= 51312
+IHN1bWE= 51313
+44Oz44OI 51314
+IHNjYXJj 51315
+IG9sZWg= 51316
+ID8/Pw== 51317
+IE9mZmVycw== 51318
+eGVk 51319
+IGZ1bGxXaWR0aA== 51320
+LWFjdGlvbnM= 51321
+T3V0ZXI= 51322
+IEV4cG8= 51323
+w6lyZXI= 51324
+Lkhl 51325
+REg= 51326
+IGhpbA== 51327
+IE1pbGxlbm4= 51328
+0LXQvdGM 51329
+SWNl 51330
+X2dyYXk= 51331
+INC/0L7Qu9GD0Yc= 51332
+IFB1bms= 51333
+IHRpbWV2YWw= 51334
+IGlzYQ== 51335
+IENIdG1s 51336
+LkRhdGFQcm9wZXJ0eU5hbWU= 51337
+IGRpeQ== 51338
+dG91cg== 51339
+IGpUZXh0RmllbGQ= 51340
+IGplbGx5 51341
+IGFra2E= 51342
+LWVyYQ== 51343
+RGVwcmVjYXRlZA== 51344
+X0lNUEw= 51345
+IE1vbnRocw== 51346
+X0lURVI= 51347
+IGFydGU= 51348
+IEhlYWRpbmc= 51349
+IEJvaA== 51350
+IHByYWc= 51351
+IGRvd25zdHJlYW0= 51352
+IEJPQVJE 51353
+X2tleXdvcmRz 51354
+IE1ldHJvRnJhbWV3b3Jr 51355
+KS0o 51356
+PEV2ZW50 51357
+4bqldA== 51358
+IFByZWNpc2lvbg== 51359
+IE1SSQ== 51360
+aGVyZW5jZQ== 51361
+aXhv 51362
+KSkpewo= 51363
+KCk/Pg== 51364
+IHNhYXQ= 51365
+IFdhcmVob3VzZQ== 51366
+X2F0b21pYw== 51367
+IHZvaWNlZA== 51368
+SXRlbUNsaWNr 51369
+ICAgICAgCQ== 51370
+LlJlc3VsdFNldA== 51371
+L3BsdWdpbg== 51372
+IGhhbGxz 51373
+PWZvcm0= 51374
+IFdhZ25lcg== 51375
+ZW1haWxz 51376
+JSUK 51377
+VU5LTk9XTg== 51378
+IFJpbQ== 51379
+dWludHB0cg== 51380
+IExpYmVyYWxz 51381
+IHRlcnJpdG9yaWFs 51382
+IE11cmRlcg== 51383
+IExhZGVu 51384
+IHByZXNpZGVudGU= 51385
+KGNhcA== 51386
+IH0sewo= 51387
+YXZvdXJpdGU= 51388
+ZmluZEFsbA== 51389
+IGFwcGxhdWQ= 51390
+IOuplA== 51391
+L3Bob3Rv 51392
+X3N5bg== 51393
+LndhbGs= 51394
+IHN1bnNoaW5l 51395
+IHN0dWJib3Ju 51396
+IGRvd25zaWRl 51397
+IExURQ== 51398
+LWJ1aWxkaW5n 51399
+UXVlcnlCdWlsZGVy 51400
+X2Rpc2FibGVk 51401
+VGVycg== 51402
+YWtyYQ== 51403
+UmVmcmVzaGluZw== 51404
+X3Byb2Jz 51405
+IGZvbGw= 51406
+PmI= 51407
+IGNvbGxhdGVyYWw= 51408
+JGVycm9y 51409
+IGFjb21wYW4= 51410
+X2l2 51411
+K2Q= 51412
+YWp1 51413
+IOKd 51414
+c3VybmFtZQ== 51415
+LmFydGljbGU= 51416
+IGJpY3k= 51417
+IjoKCg== 51418
+Pjw/PSQ= 51419
+0LrQu9GO0Yc= 51420
+ZWNvbWU= 51421
+RmluZGluZw== 51422
+KHBk 51423
+IHJlY3Rhbmd1bGFy 51424
+ZXN0bw== 51425
+aWhpbA== 51426
+PScnKQo= 51427
+IG1hbnNpb24= 51428
+X2ZpbHRlcmVk 51429
+YW5lZA== 51430
+UFJPRFVDVA== 51431
+TE9HWQ== 51432
+X2ly 51433
+LlJlbW90ZQ== 51434
+IGV4ZWN1dGVz 51435
+b3RlY2hub2xvZ3k= 51436
+IFBST0NFU1M= 51437
+IHJvd0luZGV4 51438
+Z2V0WA== 51439
+TXV0 51440
+aW5za3k= 51441
+KHN0cmluZ3M= 51442
+IE1veg== 51443
+Rmxvb3I= 51444
+LlN0cnVjdA== 51445
+X3ByZWRpY3Rpb24= 51446
+IGNhcnJpYWdl 51447
+IGNvbGxlY3RvcnM= 51448
+IFdoZWVscw== 51449
+IGJ1bmRsZWQ= 51450
+YXhlZA== 51451
+a29s 51452
+X2Nyb3A= 51453
+IGJsb29t 51454
+QmVzaWRlcw== 51455
+IG92ZXJyaWRkZW4= 51456
+IHN1Ym5ldA== 51457
+aWVuaWE= 51458
+Kj46Og== 51459
+IFByaW1pdGl2ZQ== 51460
+IOag 51461
+LkNoYXJhY3Rlcg== 51462
+6KGo56S6 51463
+IEFESEQ= 51464
+Uk9Z 51465
+SmFwYW5lc2U= 51466
+T1VT 51467
+OlVJQ29udHJvbEV2ZW50 51468
+IFBBTA== 51469
+aXphY2lvbg== 51470
+IGNoZXJjaGU= 51471
+b3J0aW5n 51472
+IG9yZ2Fz 51473
+LlV0Yw== 51474
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 51475
+XERvbWFpbg== 51476
+T1JB 51477
+IHRlcnJhY2U= 51478
+IHByaXM= 51479
+CQkJCQkJCQkJCg== 51480
+IHJhaWRz 51481
+X2luY3JlbWVudA== 51482
+IHVuanVzdA== 51483
+JG9wdGlvbnM= 51484
+b25DaGFuZ2U= 51485
+Qmxvb2Q= 51486
+RmlsbQ== 51487
+IGhhbmRpbmc= 51488
+IG11Zw== 51489
+U09MRQ== 51490
+44OV 51491
+aWNvbmR1Y3Rvcg== 51492
+IElzbGFtaXN0 51493
+ICIiKTsNCg== 51494
+LW92ZXJsYXk= 51495
+LGNvbA== 51496
+6Zw= 51497
+YXJyaW5ncw== 51498
+X2NvbnRyYWN0 51499
+CWxs 51500
+cGlw 51501
+X2VtYmVkZGluZw== 51502
+IHBlcm1pdGU= 51503
+IG1vZGVt 51504
+IHRyaWdnZXJpbmc= 51505
+KGh3bmQ= 51506
+LiIpXQo= 51507
+IHNhbnQ= 51508
+IGV4dGluY3Rpb24= 51509
+IGNsYXNoZXM= 51510
+LkF1ZGlv 51511
+IHN1bw== 51512
+Lm11bHQ= 51513
+IHNlYXNvbmVk 51514
+LlZhckNoYXI= 51515
+cG93ZXJlZA== 51516
+ImNvbnRleHQ= 51517
+IG1lbmM= 51518
+KEdyYXBoaWNz 51519
+JHdoZXJl 51520
+IHJlY3VwZXI= 51521
+YWNrbGU= 51522
+IG5ld0RhdGE= 51523
+IEJyZWFraW5n 51524
+ZXJnZWQ= 51525
+IENQUFVOSVQ= 51526
+IE11bGw= 51527
+IGtvbW10 51528
+IExlZWRz 51529
+JywnPQ== 51530
+Lm5leHRUb2tlbg== 51531
+IFJpZw== 51532
+UkVUVVJO 51533
+CXRpbWVy 51534
+fV97 51535
+IE1hcmluYQ== 51536
+IHNsb2dhbg== 51537
+SVpFRA== 51538
+T3BlbkdM 51539
+X1BhZ2U= 51540
+YXRpdmFz 51541
+IGhhemFyZHM= 51542
+J3ZhbHVl 51543
+IGNvcnBzZQ== 51544
+IEZsb3dlcnM= 51545
+X29ubGluZQ== 51546
+ZGFs 51547
+IENvbGxpc2lvbg== 51548
+w6BuZw== 51549
+IGZlcnJ5 51550
+IHBva2U= 51551
+IFRvdXJpc20= 51552
+aW5lcmFyeQ== 51553
+L1NldA== 51554
+LkVtcGxveWVl 51555
+PkA= 51556
+LHZhbA== 51557
+IE1pbGY= 51558
+YXZleg== 51559
+UmV0cnk= 51560
+LiIv 51561
+IHJvdW5kaW5n 51562
+LXBsYWNlbWVudA== 51563
+IGNlcnY= 51564
+TWV4 51565
+IE1zZ0JveA== 51566
+X3Npbms= 51567
+bWFuaWE= 51568
+X2NyZWRpdA== 51569
+R3VhcmRhcg== 51570
+IHZhbml0eQ== 51571
+IGltbXV0YWJsZQ== 51572
+IGNvbnRhbWluYXRlZA== 51573
+0LrQsNC3 51574
+5Liy 51575
+YWNoYQ== 51576
+IGhhdGg= 51577
+IGVudW1lcmF0aW9u 51578
+LmdldEJ5 51579
+4bq/dA== 51580
+IERhbw== 51581
+b2JpZXJubw== 51582
+IEd1dA== 51583
+X1BJUEU= 51584
+LmFkdg== 51585
+IEd1dGVuYmVyZw== 51586
+YWRo 51587
+66y4 51588
+ZnVzYw== 51589
+LlZL 51590
+cHRh 51591
+IEVNUA== 51592
+LkZpcnN0TmFtZQ== 51593
+IHJlYWxpemVz 51594
+LmNn 51595
+IHVuaXRl 51596
+UExJVA== 51597
+IEFiZHVs 51598
+IE1FRA== 51599
+UkFJTlQ= 51600
+IHF1ZXN0YQ== 51601
+c3RkaW4= 51602
+IGNhbG9yaWU= 51603
+CWdsQmluZA== 51604
+IGFybWE= 51605
+eWxsYW5k 51606
+T01Q 51607
+LXE= 51608
+IEtoYWw= 51609
+c2FsYXJ5 51610
+CUFORA== 51611
+c2dp 51612
+X3RoYW4= 51613
+LWJ1aWx0 51614
+ICsvLQ== 51615
+IG5hcmdz 51616
+X2xhdW5jaA== 51617
+IFNR 51618
+em9u 51619
+IEJlbmVk 51620
+X3VuaW9u 51621
+PigpOw0KDQo= 51622
+IFNpbXM= 51623
+IERhdGVz 51624
+CUNvbm5lY3Rpb24= 51625
+IFBlcmM= 51626
+Z3JhbnQ= 51627
+YW1waWw= 51628
+IGFnZ3JlZ2F0aW9u 51629
+ZXNlbGVjdA== 51630
+X1NVUA== 51631
+KHsKCg== 51632
+Lm9t 51633
+IHdt 51634
+LmNvbnRyYWN0 51635
+LU9yaWdpbg== 51636
+IGdlbWU= 51637
+ZnJlZXpl 51638
+TlVNQkVS 51639
+LmN1cnI= 51640
+IEdsYWQ= 51641
+c2xh 51642
+IFJlYg== 51643
+0LXRgdGC0LLQvg== 51644
+YXJib24= 51645
+L2NvbnRyb2xsZXJz 51646
+U2xvdHM= 51647
+LmRlZXBjb3B5 51648
+RlVMTA== 51649
+dWlyZQ== 51650
+QHN0dWRlbnQ= 51651
+4LmJ4Lit 51652
+VHJhbnNsYXRvcg== 51653
+IHByZWZlcmFibHk= 51654
+Y2hlbWlzdHJ5 51655
+IEphY29icw== 51656
+bmFy 51657
+ICgiXA== 51658
+bmVhcg== 51659
+aWZpcXVl 51660
+CWNvbHVtbg== 51661
+IG1pbnV0b3M= 51662
+aWdlcw== 51663
+IGVzdGFibGU= 51664
+LWRpc2M= 51665
+KENoYXI= 51666
+a292 51667
+ZXhhbXBsZXM= 51668
+X18oIg== 51669
+INC60LDQug== 51670
+IEJvcmlz 51671
+KGR4 51672
+c3By 51673
+IG92ZXJoYXVs 51674
+YXRvb24= 51675
+IEhhcmxleQ== 51676
+aWNhbWVudGU= 51677
+4paI4paI4paI4paI 51678
+ZXZpdHk= 51679
+dXNoZXI= 51680
+LlZpc3VhbFN0dWRpbw== 51681
+V2F2ZQ== 51682
+IE5vcm1hbGx5 51683
+c3Rvb2Q= 51684
+b3JuaW5ncw== 51685
+IGhhbmRtYWRl 51686
+KGxvZ2dpbmc= 51687
+IGNhcmNpbg== 51688
+YWNqYQ== 51689
+IHN1cGVycw== 51690
+IHNpZWdl 51691
+CUlm 51692
+IElMb2dnZXI= 51693
+VUFSVA== 51694
+QW5pbWF0aW9uRnJhbWU= 51695
+IHRhcGVz 51696
+IGFpZHM= 51697
+IENvbG9uZWw= 51698
+dmVlZG9y 51699
+IG1kbA== 51700
+cGhvbg== 51701
+RGlzbWlzcw== 51702
+QXZhaWxhYmlsaXR5 51703
+VW5pZm9ybUxvY2F0aW9u 51704
+IGlkZWFscw== 51705
+cXVldHRl 51706
+a2VpdGVu 51707
+IEVNQUlM 51708
+IE5lYg== 51709
+IHN1bW1vbmVk 51710
+IGdvdmVybm1lbnRhbA== 51711
+IEhvcnJvcg== 51712
+Y2hhbmdpbmc= 51713
+IEFjdGl2YXRl 51714
+SWxs 51715
+PHRib2R5 51716
+Y3JlYXRpdmU= 51717
+IEJMRQ== 51718
+IG1hZG5lc3M= 51719
+T3JOaWw= 51720
+IGhpbg== 51721
+xZM= 51722
+LkdldEtleQ== 51723
+X2NvbnNvbGU= 51724
+Ik91cg== 51725
+IGd1aW50 51726
+IGFtaQ== 51727
+IHJlZmxlY3RpdmU= 51728
+IGNyYWNraW5n 51729
+IFJp 51730
+UkFM 51731
+dXJzZWQ= 51732
+cHVyZQ== 51733
+IHJlcGFpcmVk 51734
+IHRpZ2Vy 51735
+IE5pY29sYXM= 51736
+VnM= 51737
+bnRo 51738
+LmV4cHJlc3Npb24= 51739
+IHNlYXM= 51740
+X0FDQ0VQVA== 51741
+IGZvcmM= 51742
+IEZyYXU= 51743
+IHRocmVzaA== 51744
+IM+A 51745
+KEJBU0U= 51746
+X09wZW4= 51747
+V3VudXNlZA== 51748
+IERvbWVzdGlj 51749
+KHByaXY= 51750
+Z3Vlc3M= 51751
+Ly8hCg== 51752
+Z2V0SXRlbQ== 51753
+KCkpCgoK 51754
+bXV0YXRpb25z 51755
+IHN0cw== 51756
+IGRlbWVudGlh 51757
+c3Bva2Vu 51758
+JHBhcmFtcw== 51759
+IHBhdHJvbnM= 51760
+IHJ1bndheQ== 51761
+IEJVWQ== 51762
+Lldhcm5pbmc= 51763
+IG5ldXRyYWxpdHk= 51764
+emhvdQ== 51765
+0YDQsNGJ 51766
+YWt0ZXI= 51767
+IENvbnN0cnVjdG9ycw== 51768
+w5NO 51769
+IFByb2dyZXNzaXZl 51770
+IEJ1cmdlcg== 51771
+IGluY3VycmVk 51772
+IGltcGxpY2l0bHk= 51773
+X2Vudmlyb25tZW50 51774
+IGV4YWNlcmI= 51775
+IGVuZHVyaW5n 51776
+c2lj 51777
+IFBhcnRpY2lwYW50cw== 51778
+X0Jsb2Nr 51779
+IGVucm9sbA== 51780
+X2VtcGxveWVl 51781
+IFBlcHBlcg== 51782
+bGF1Z2h0ZXI= 51783
+44OW 51784
+J107Pz4= 51785
+PScu 51786
+KHJlbmFtZQ== 51787
+IHNoZWx0ZXJz 51788
+IEFNQQ== 51789
+X2dhcA== 51790
+IFJFVVRFUlM= 51791
+eGFtcHA= 51792
+T01JQw== 51793
+IHBlZGlkbw== 51794
+IGTDqXZlbG9w 51795
+X18oLyoh 51796
+X29k 51797
+d2VyZQ== 51798
+X051bWJlcg== 51799
+X211bHRpcGxpZXI= 51800
+S0VFUA== 51801
+IHNob3dlcnM= 51802
+IG1hZ2U= 51803
+IHNpbm8= 51804
+Y3Jvdw== 51805
+LmlkeA== 51806
+X25vdGljZQ== 51807
+dWVpbA== 51808
+IG15cmlhZA== 51809
+IEF2YWlsYWJpbGl0eQ== 51810
+Y2VudHJhbA== 51811
+IEFCT1VU 51812
+IGluY29ycG9yYXRpbmc= 51813
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tCg== 51814
+X3dpZGdldHM= 51815
+IHN5c3RlbUZvbnRPZlNpemU= 51816
+w7ZydA== 51817
+L2pwZWc= 51818
+IFNNVFA= 51819
+KGJyb3dzZXI= 51820
+Z3Vucw== 51821
+c2V0dw== 51822
+X0FWQUlMQUJMRQ== 51823
+IGluY29ycG9yYXRlcw== 51824
+L2FuZHJvaWQ= 51825
+eXg= 51826
+5biD 51827
+X2xhYg== 51828
+IGxlYWtpbmc= 51829
+IEhpbnQ= 51830
+w7xuY2hlbg== 51831
+LlNjYWxl 51832
+IGZpcmV3b3Jrcw== 51833
+IGxQYXJhbQ== 51834
+YnNk 51835
+YXhvbg== 51836
+KHByZWRpY3Q= 51837
+Q29uZ3JhdHVsYXRpb25z 51838
+IFNwZWN0cnVt 51839
+SVJD 51840
+IEFkbWluaXN0cmF0aXZl 51841
+IGltcHJpc29uZWQ= 51842
+UlNwZWM= 51843
+IHJldGFpbnM= 51844
+IHNldHRsaW5n 51845
+IGNpdGF0aW9ucw== 51846
+IFdvcmxkcw== 51847
+c3RyY29udg== 51848
+b3VzYW5k 51849
+IEJlZ2lubmluZw== 51850
+IEFuZHJld3M= 51851
+IFNoYXJvbg== 51852
+RXhlY3V0aW5n 51853
+Z3JvdXBJZA== 51854
+YWRkRmllbGQ= 51855
+IGV4cGFuZHM= 51856
+IGtpbG9tZXRyZXM= 51857
+bGlua3k= 51858
+IGdycA== 51859
+SU5BVElPTg== 51860
+QnJpdGlzaA== 51861
+IGNvbXBvcnQ= 51862
+LkRhdGFHcmlkVmlld0NvbHVtbg== 51863
+IFByb2R1Y3Rpb25z 51864
+aWxkZW4= 51865
+IHVuaXg= 51866
+X2dhbGxlcnk= 51867
+X1BST1ZJRA== 51868
+b3JkZXJpbmc= 51869
+X2Fubg== 51870
+Ymg= 51871
+LkRlc2lnbg== 51872
+IHRyZWZmZW4= 51873
+IHVuZGVybGluZQ== 51874
+X251bXM= 51875
+7ZWc64uk 51876
+KXY= 51877
+dXNpemU= 51878
+IGRpc2FwcGVhcmFuY2U= 51879
+VG9Cb3VuZHM= 51880
+IHBjbA== 51881
+IFdpbm5pcGVn 51882
+IFNoZXJtYW4= 51883
+X2xhbWJkYQ== 51884
+bmFudA== 51885
+IHJvb3RWaWV3 51886
+LkZsYWdz 51887
+IGNlbnNvcnNoaXA= 51888
+c2VudGVuY2U= 51889
+LnJlYWRJbnQ= 51890
+X2Fzc2lnbm1lbnQ= 51891
+IHZlcnNjaGllZA== 51892
+IEZyYWN0aW9u 51893
+IG5hdGlvbmFsaXN0 51894
+IGp1ZWdv 51895
+IERlYWxlcg== 51896
+IHByZWRpY3Rpbmc= 51897
+YXVwdA== 51898
+aGVsbQ== 51899
+X1BSSUNF 51900
+X0RT 51901
+KCIjew== 51902
+bGlmdGluZw== 51903
+IHBvc2luZw== 51904
+IE5TTXV0YWJsZURpY3Rpb25hcnk= 51905
+IHNtYXNo 51906
+IGFraW4= 51907
+IGNhbXB1c2Vz 51908
+IE91dGxpbmU= 51909
+IEVsYXN0aWM= 51910
+X0NoZWNrZWRDaGFuZ2Vk 51911
+KElFbnVtZXJhYmxl 51912
+c3F1ZWV6ZQ== 51913
+cHR1bmU= 51914
+X0ZST05U 51915
+bWg= 51916
+IOyDneyEsQ== 51917
+UnVuV2l0aA== 51918
+IHR1cm5vdXQ= 51919
+c2libGluZ3M= 51920
+KWU= 51921
+X0FSR1VNRU5U 51922
+IEdyaWRCYWdDb25zdHJhaW50cw== 51923
+X1BPT0w= 51924
+LlJJR0hU 51925
+aWdnaW5z 51926
+dGVsZXBob25l 51927
+XEV4dGVuc2lvbg== 51928
+IEFyaXN0 51929
+aXR1cg== 51930
+IGZyaWVz 51931
+X2R1cA== 51932
+RXhwYW5kZWQ= 51933
+LXJv 51934
+IFdvcmxkd2lkZQ== 51935
+IENvcms= 51936
+w7Ns 51937
+TGlt 51938
+IGRlbm4= 51939
+UHJldHR5 51940
+IGZ5 51941
+VHJpYW5nbGU= 51942
+RmVhdHVyZWQ= 51943
+KENvbW1vbg== 51944
+X2VmZg== 51945
+ICIiDQo= 51946
+4bubaQ== 51947
+X0xJTkVBUg== 51948
+IFJpY2E= 51949
+IGNhZsOp 51950
+IGFwcGVsbA== 51951
+IG5pdmVhdQ== 51952
+ICYs 51953
+IGZhYnJpY3M= 51954
+X1BsYXllcg== 51955
+IGh5Z2llbmU= 51956
+IGRpc2FzdHJvdXM= 51957
+IHNoYXJlZEluc3RhbmNl 51958
+X3BpdGNo 51959
+cno= 51960
+ZW5tZW50 51961
+TmVhcg== 51962
+X1NUQVRT 51963
+IHN0YWlu 51964
+IEROQw== 51965
+IGlzc3U= 51966
+Xks= 51967
+CXRyZWU= 51968
+X2Jsaw== 51969
+c2V6 51970
+bGFpbg== 51971
+YW11 51972
+X293bmVk 51973
+VVNBUlQ= 51974
+Lmhhc0NsYXNz 51975
+SVNPTg== 51976
+IGZvZQ== 51977
+dXNoZWQ= 51978
+X1VOU0lHTkVE 51979
+IGluZGV4aW5n 51980
+IEZpcmViYXNlQXV0aA== 51981
+IGxpdGVyYWN5 51982
+IFNVUg== 51983
+IENvbHRz 51984
+YmVjdWU= 51985
+IEludHJv 51986
+IGNoYW90aWM= 51987
+IGFuaQ== 51988
+IEFubmll 51989
+xrDhu50= 51990
+LmR4 51991
+ZGlzY29ubmVjdA== 51992
+IGFyY2hpdmVk 51993
+W0xpc3Q= 51994
+PU4= 51995
+LnByZXNlbnRhdGlvbg== 51996
+UmVzdGF1cmFudA== 51997
+IHJvY2tldHM= 51998
+PWh0dHBz 51999
+L29w 52000
+IHB1cnNl 52001
+IEtyaXM= 52002
+IGNvcmFs 52003
+c2V0UGFyYW1ldGVy 52004
+IGlycmln 52005
+UXVlZW4= 52006
+TlNEYXRh 52007
+IHZhc3RseQ== 52008
+LkZpbGVz 52009
+IGZlbWluaXNt 52010
+KFN0cmVhbQ== 52011
+IGF0cmli 52012
+IGxpcXVpZGl0eQ== 52013
+PEZpbGU= 52014
+dHJhZw== 52015
+W2NvbnRhaW5z 52016
+IGhpbmRp 52017
+CWNw 52018
+aG9tZXBhZ2U= 52019
+IHN1cnBhc3M= 52020
+IGRheWxpZ2h0 52021
+YXV0aG9yaXpl 52022
+IENvbnNlcXVlbnRseQ== 52023
+QXN5bmNSZXN1bHQ= 52024
+IERpYXJ5 52025
+LlBhdHRlcm4= 52026
+LiovCg== 52027
+ZW5zY2hhZnQ= 52028
+IEp1ZGljaWFyeQ== 52029
+QWR1bHQ= 52030
+KCY6 52031
+IGplb3BhcmQ= 52032
+IEJsaXp6YXJk 52033
+IGdn 52034
+IjsvLw== 52035
+WEhS 52036
+IHBhc3N3ZA== 52037
+Pn0= 52038
+JyksJw== 52039
+IGNvbXBhcmF0b3I= 52040
+LmNoYWlu 52041
+IGluc3VyZWQ= 52042
+X0VER0U= 52043
+IHR5bGtv 52044
+X01BSk9S 52045
+d2F2 52046
+XEZpbGU= 52047
+RW50cg== 52048
+J2FwcA== 52049
+IGZvcmdpdmVuZXNz 52050
+CWRzdA== 52051
+Ijot 52052
+Lm1vbg== 52053
+ICgKCg== 52054
+IGNhcGl0YQ== 52055
+IGluaXRDb21wb25lbnRz 52056
+IHN3b3Jkcw== 52057
+IE91dHB1dFN0cmVhbQ== 52058
+IGhlYXJz 52059
+IFNQQUNF 52060
+LWluc3BpcmVk 52061
+X2Jvb3Q= 52062
+Lm5vbmU= 52063
+LmdldElucHV0U3RyZWFt 52064
+IGRldmlzZQ== 52065
+IHBlZGlhdHJpYw== 52066
+YW5zaQ== 52067
+X3BhcnRpYWw= 52068
+IHNoYXJk 52069
+IGZ1cmlvdXM= 52070
+IGRyYXdhYmxl 52071
+JSku 52072
+KGVt 52073
+IEJha2U= 52074
+CXBlcnJvcg== 52075
+IFJlbGlnaW91cw== 52076
+LSIr 52077
+CQkJICAgICAgICAgICA= 52078
+IFNlY3JldHM= 52079
+KG5vcm1hbA== 52080
+QUNFUw== 52081
+IFN0b2NraG9sbQ== 52082
+LW5vcm1hbA== 52083
+IGFjY3VzdG9tZWQ= 52084
+IGJvdXRpcXVl 52085
+IFN3aW5n 52086
+IGZpbQ== 52087
+IFBV 52088
+LlNvY2tldA== 52089
+ICciJw== 52090
+YW5q 52091
+TWFudWFs 52092
+IG11amVy 52093
+IHBoeXNpb2xvZ2ljYWw= 52094
+Y29udGFpbg== 52095
+TWVyZ2U= 52096
+IHN1YXM= 52097
+ICd7Ig== 52098
+bmVnbw== 52099
+IHN1YnNjcmliZWQ= 52100
+dG9hc3Q= 52101
+X1ZFUkJPU0U= 52102
+IGtuaXQ= 52103
+IEFydGlzdHM= 52104
+IGhlYXJ0YmVhdA== 52105
+IGZpcmVmaWdodGVycw== 52106
+c3Nh 52107
+W3s= 52108
+IHVuZGVyc2NvcmU= 52109
+IGhpc3Rvcmllcw== 52110
+aWdtb2lk 52111
+RmllbGRWYWx1ZQ== 52112
+VG9BZGQ= 52113
+LkNv 52114
+IEhhcm9sZA== 52115
+QXZvaWQ= 52116
+aWdoYm91cnM= 52117
+b3JkZQ== 52118
+IHRydXRocw== 52119
+L2Fs 52120
+IHdpcmVk 52121
+IEl0YWxpYQ== 52122
+IHNlcnZpY2lvcw== 52123
+IEFVRElP 52124
+ICciKw== 52125
+IHB1bXBpbmc= 52126
+IENsZW1lbnQ= 52127
+w4NP 52128
+5Y6f 52129
+Pm4= 52130
+IHN0clNxbA== 52131
+amRiYw== 52132
+4oE= 52133
+CVNFVA== 52134
+IEJVRkZFUg== 52135
+Oi8vIg== 52136
+IGNpcmN1bXN0YW5jZQ== 52137
+VUlUYWJsZVZpZXdDZWxs 52138
+LnZlcnRpY2Fs 52139
+IEpvaG5z 52140
+dG9saXN0 52141
+IGRyaXZld2F5 52142
+IGxlYXJuZXJz 52143
+dG9iZXI= 52144
+d2lubmVy 52145
+LXlvdXI= 52146
+LnN0YXRlcw== 52147
+SE0= 52148
+IGdyYWRpZW50cw== 52149
+IHNlaXp1cmU= 52150
+IG1hdGVy 52151
+IGRldGFs 52152
+IFJlZHVjZQ== 52153
+KG1vdXNl 52154
+IFJlU2hhcnBlcg== 52155
+LXJvdXRpbmc= 52156
+INi0 52157
+IGpvaW50bHk= 52158
+IEZhbWls 52159
+PE1lc3NhZ2U= 52160
+ZXhwaXJl 52161
+X3RyYWRl 52162
+4oCmLi4= 52163
+IEZVTkNUSU9OUw== 52164
+IHhlbg== 52165
+IHt9Ow== 52166
+RmFi 52167
+IGZlYXN0 52168
+KERi 52169
+Rmlyc3RSZXNwb25kZXI= 52170
+xLFsxLE= 52171
+IG1heFZhbHVl 52172
+IC06 52173
+YXB0aWM= 52174
+Lkdzb24= 52175
+IFJvdmVy 52176
+X2Nu 52177
+bG91ZA== 52178
+IGNoYW1iZXJz 52179
+INC30LDQtA== 52180
+LmZvcmVhY2g= 52181
+LmdldEVtYWls 52182
+55+l 52183
+Lk5vZGVz 52184
+IFZX 52185
+IFdhaXRpbmc= 52186
+KFF0Q29yZQ== 52187
+IHPDs2xv 52188
+cnE= 52189
+YW5ndWFyZA== 52190
+IHJlc2VtYmxlcw== 52191
+Oltb 52192
+IGdlZA== 52193
+X0VQ 52194
+KEFjdGl2aXR5 52195
+IElzbg== 52196
+IENydXNoZXJz 52197
+X1JVTlRJTUU= 52198
+CW9wZW4= 52199
+IEhpZ2hsaWdodHM= 52200
+w6lyYXRpb24= 52201
+IHllbGxpbmc= 52202
+IExJR0hU 52203
+UGhvdA== 52204
+dmVuZ2U= 52205
+IFN1c3A= 52206
+IENocg== 52207
+LkRpc3RhbmNl 52208
+YXJzaW1w 52209
+bGljYXM= 52210
+Lk1vbg== 52211
+IHN1Y2tlZA== 52212
+cHJpbnRlZA== 52213
+bXV0ZQ== 52214
+IHNldEVycm9y 52215
+Lk9wdGlvbg== 52216
+IGltcGFpcm1lbnQ= 52217
+bm9pc2U= 52218
+IHBhcnRuZXJlZA== 52219
+w40= 52220
+ZGVucw== 52221
+aWN6 52222
+IHdhaXRGb3I= 52223
+IG92ZXJsb29raW5n 52224
+IEZPUk1BVA== 52225
+IFRTdHJpbmc= 52226
+IHJlbnRpbmc= 52227
+CWNvbXBvbmVudA== 52228
+LkZyZWU= 52229
+IExhdW5jaGVy 52230
+PWRhdGU= 52231
+IFBvZHM= 52232
+QUdNRU5U 52233
+Q29kaWdv 52234
+Qml0RmllbGRz 52235
+IHViaXF1 52236
+LWNhcm91c2Vs 52237
+IFNpbXVsYXRvcg== 52238
+aW5vZGU= 52239
+J10pewo= 52240
+IEJhZ2hk 52241
+IG5vcnRod2VzdA== 52242
+aHRha2luZw== 52243
+PCY= 52244
+IHRyYW0= 52245
+IGZvcndhcmRlZA== 52246
+IGVycm9yTXNn 52247
+X0FTU0lHTg== 52248
+IEVudGl0aWVz 52249
+LlBhcnQ= 52250
+cmVhdHVyZQ== 52251
+KFVyaQ== 52252
+IERyaXZpbmc= 52253
+IGludmFzaXZl 52254
+aWdyYXRpb25CdWlsZGVy 52255
+b3NhdXJz 52256
+CXBvcnQ= 52257
+IGJyYW4= 52258
+aXR0aW5ncw== 52259
+RG9vcg== 52260
+IHsl 52261
+KGxpbWl0 52262
+IHNxdWFyZWQ= 52263
+IERJU1BMQVk= 52264
+LkFjY2VwdA== 52265
+LmJhc2VVcmw= 52266
+LkVudGVy 52267
+IC4uLikK 52268
+IG93bA== 52269
+IHNsYXRlZA== 52270
+LmZlY2hh 52271
+X1NFRw== 52272
+PXsk 52273
+IE9OTElORQ== 52274
+T05Z 52275
+INC00LDQvdC90YvRhQ== 52276
+b250ZQ== 52277
+X0NMSUNL 52278
+U2E= 52279
+SW1wb3J0YW50 52280
+IGNhcm91c2Vs 52281
+IGFwcGVhbGVk 52282
+IE5pZQ== 52283
+L2Jvb2s= 52284
+W10+KA== 52285
+IHhtYXg= 52286
+IGxhbmdl 52287
+LlN1cHByZXNz 52288
+IFRoaW5raW5n 52289
+QWRkcmVzc2Vz 52290
+IFNhbGx5 52291
+LVRW 52292
+IENoYXJsZXN0b24= 52293
+KSIKCg== 52294
+IHRhbGx5 52295
+IHVsbA== 52296
+IGxvY2FsZXM= 52297
+ZXdhbg== 52298
+IGluY3JlbWVudGFs 52299
+65Cc 52300
+IGNhcmV0 52301
+anVyZQ== 52302
+IGRvcg== 52303
+IGxvY2FsaXphdGlvbg== 52304
+IHNlYWZvb2Q= 52305
+IFJ1YmJlcg== 52306
+LlRoZXJl 52307
+IEZpc2hpbmc= 52308
+WVlZ 52309
+bWFnZQ== 52310
+IEZsZXhpYmxl 52311
+IEdFTkVSQUw= 52312
+ZWth 52313
+IHRocml2aW5n 52314
+IHNpcw== 52315
+IGJvdXJnZW9pcw== 52316
+RmFrZQ== 52317
+LFwi 52318
+INC+0LQ= 52319
+Q09S 52320
+LWVmZmVjdGl2ZQ== 52321
+IHNrdQ== 52322
+ZWRseQ== 52323
+IyMKCg== 52324
+IEhvbGx5 52325
+IEZMQVNI 52326
+L1RS 52327
+Lm5z 52328
+cHJvYmU= 52329
+Z2lmdA== 52330
+b3dpdHo= 52331
+LW5hdmJhcg== 52332
+IHNhY2s= 52333
+57qn 52334
+IFRocmVhdA== 52335
+WkE= 52336
+WE0= 52337
+JyksCgo= 52338
+IExMVk0= 52339
+YXN6 52340
+RWRpdGVk 52341
+V2l0aFN0cmluZw== 52342
+U2lsdmVy 52343
+eW5h 52344
+X3JlbmRlcmVy 52345
+CURFQlVH 52346
+KG9wZXJhdGlvbg== 52347
+IFNsb3Rz 52348
+IEF1YnVybg== 52349
+eGVj 52350
+IGhvbW9zZXh1YWxpdHk= 52351
+LlJlc3RDb250cm9sbGVy 52352
+ZXJzaXZl 52353
+IHByb2ZpbA== 52354
+IE15YW5tYXI= 52355
+cm9zc2U= 52356
+X0lSUW4= 52357
+IHNlbmRNZXNzYWdl 52358
+IHRlY2huaWNpYW5z 52359
+IG1hbmU= 52360
+Y29tbW9ucw== 52361
+IHNocmVkZA== 52362
+Qm9vc3Q= 52363
+IHN5bXBhdGhldGlj 52364
+LWVmZg== 52365
+IENlcnRhaW5seQ== 52366
+IHfDpGg= 52367
+IFJvY2hlc3Rlcg== 52368
+dWNjaQ== 52369
+dXJt 52370
+ZW1wb3I= 52371
+ICIiOgo= 52372
+LXNwYWNpbmc= 52373
+IHNpeHR5 52374
+IOKckw== 52375
+X3JlcG9ydGluZw== 52376
+V2ls 52377
+b3lv 52378
+IGRpZFNlbGVjdA== 52379
+LmdldExvbmc= 52380
+LnNldEVycm9y 52381
+X25j 52382
+IERvbmc= 52383
+CWFzeW5j 52384
+IEhpZ2hseQ== 52385
+XToNCg== 52386
+TGVha3M= 52387
+LC4uLgo= 52388
+dmFsdWF0b3I= 52389
+ZGljdGlvbnM= 52390
+b3hlbA== 52391
+IGdlc3R1cmVz 52392
+PSI/ 52393
+YmFncw== 52394
+IFJlbGllZg== 52395
+c3Vic2V0ZXE= 52396
+KG5hbWVzcGFjZQ== 52397
+fXw= 52398
+IG1pY3JvYmk= 52399
+IHB1cml0eQ== 52400
+Y2hpbw== 52401
+fT8= 52402
+X01VVA== 52403
+X2FjdGl2YXRpb24= 52404
+IFBpcmF0ZXM= 52405
+ICUj 52406
+aWZpY2FjacOzbg== 52407
+5Ys= 52408
+IE5SQQ== 52409
+w6dvbg== 52410
+fSkoKTsK 52411
+IENoZXN0ZXI= 52412
+4oCT4oCT 52413
+Z2V0Q29ubmVjdGlvbg== 52414
+LmFyZ3VtZW50cw== 52415
+RmV0Y2hpbmc= 52416
+IEZyeQ== 52417
+IERpdA== 52418
+IHppY2g= 52419
+cGFzdA== 52420
+LWxpYnJhcnk= 52421
+IEhheWVz 52422
+IGJvdW50eQ== 52423
+IFNwcmluZ2ZpZWxk 52424
+UE9S 52425
+IEFQUg== 52426
+IEVtYmFzc3k= 52427
+UVVFU1RJT04= 52428
+IFNvbGRpZXI= 52429
+ZXJ0YXM= 52430
+IE5PUk1BTA== 52431
+IGR1cw== 52432
+Ym9sdA== 52433
+IGRvcnQ= 52434
+IExpZnQ= 52435
+IGdldFJhbmRvbQ== 52436
+LlJ1bldpdGg= 52437
+LCksCg== 52438
+IHZhcmFyZ2lu 52439
+IGhhbmRsZUNsaWNr 52440
+XEh0bWw= 52441
+IGhvbW1lcw== 52442
+Y2lkYWRl 52443
+KGVw 52444
+SmE= 52445
+L2RpYWxvZw== 52446
+LnJhdGU= 52447
+IFdlaQ== 52448
+ZnVsbHNjcmVlbg== 52449
+IE5Vbml0 52450
+Lm1lYXN1cmU= 52451
+VmFscw== 52452
+IFNpZ25lZA== 52453
+IHJ1cw== 52454
+IHJhZnQ= 52455
+IEJsb25kZQ== 52456
+IG5ldHM= 52457
+IE1ldHJpYw== 52458
+aWNoVGV4dEJveA== 52459
+IHVyZQ== 52460
+IGludGVycmFjaWFs 52461
+ICd9Cg== 52462
+KHN0b3JhZ2U= 52463
+SW50ZWdyYXRpb24= 52464
+IGJhbmNv 52465
+QVNZ 52466
+IGppbnQ= 52467
+IGRlZ3JhZGF0aW9u 52468
+IEhBTkQ= 52469
+dWVyZG8= 52470
+PScn 52471
+IHN0cm9rZXM= 52472
+cmV3cml0ZQ== 52473
+KFNldA== 52474
+IE1hdERpYWxvZw== 52475
+IGRvc3NpZXI= 52476
+CWFuZA== 52477
+QURESU5H 52478
+IG11dHVhbGx5 52479
+IHByZWNlZGVk 52480
+fX07Cg== 52481
+IHN1YnR5cGU= 52482
+IHJlc29sdmluZw== 52483
+IGdlb21ldHJpYw== 52484
+W2NvbHVtbg== 52485
+IENUUkw= 52486
+IEhM 52487
+IGRhaA== 52488
+ICg7Ow== 52489
+UmFpbHM= 52490
+w5w= 52491
+IEdlbmVyYXRlcw== 52492
+LUxlbmd0aA== 52493
+cGVkbw== 52494
+b2dlbm91cw== 52495
+IFJvYmVydHNvbg== 52496
+LkJvb2w= 52497
+b2RlcnM= 52498
+X0FHRU5U 52499
+cGFzc3dk 52500
+IE5vZGVz 52501
+LmJp 52502
+IFdC 52503
+IHByb3BoZXQ= 52504
+c2xhdmU= 52505
+IOW8 52506
+IHdlaWw= 52507
+JTwv 52508
+IGNhcmJz 52509
+5rC0 52510
+IGV4cHJlc3NseQ== 52511
+XHhk 52512
+LWV5ZWQ= 52513
+IENyZWF0dXJl 52514
+Y29udGFpbmVk 52515
+KFNJRw== 52516
+IEVuaGFuY2VtZW50 52517
+IENvcnM= 52518
+R2Fs 52519
+X1NJR05BTA== 52520
+cmVpbnRlcnByZXQ= 52521
+IFFQdXNoQnV0dG9u 52522
+X05vbmU= 52523
+IGdlbm9jaWRl 52524
+IFNlYWw= 52525
+5LiK5Lyg 52526
+KHBlcg== 52527
+0LvRjNGC 52528
+IMOgcw== 52529
+LlRlbXBsYXRl 52530
+ICkNCg0K 52531
+LnNpbmdsZXRvbg== 52532
+CXNsZWVw 52533
+IHNwYXduZWQ= 52534
+IHBvc3Nlc3Npb25z 52535
+Z2V0Q29uZmln 52536
+IHRhaQ== 52537
+bHVkZQ== 52538
+IE1ldGVy 52539
+IGJpYmxpY2Fs 52540
+bWFyc2hhbGxlcg== 52541
+LlRvb2xraXQ= 52542
+IExlc2JpYW4= 52543
+LnNtYXJ0 52544
+IGJveWNvdHQ= 52545
+IGZyeQ== 52546
+LWRlc2M= 52547
+X1NlcnZpY2U= 52548
+IG1hY2h0 52549
+IENhaXJv 52550
+w6Bp 52551
+X3ByZXZpb3Vz 52552
+LnRyYW5zcG9ydA== 52553
+TWVkaWNhbA== 52554
+Q0dQb2ludA== 52555
+UVVBUkU= 52556
+IGJyaWdodGVy 52557
+IGNoZWNrQm94 52558
+IEZPVU5E 52559
+LmJyYW5jaA== 52560
+IGJsYWg= 52561
+IFByZWx1ZGU= 52562
+T2ZmbGluZQ== 52563
+TGlzdGluZw== 52564
+LyoqLyou 52565
+IEpS 52566
+cGhhbnRz 52567
+Z2V0WQ== 52568
+LkZpbmRDb250cm9s 52569
+Ii4uLg== 52570
+0LrQtQ== 52571
+SFJFU1VMVA== 52572
+IGNoZWNrbGlzdA== 52573
+KGFzdA== 52574
+IGJvcnJvd2luZw== 52575
+4oCmYW5k 52576
+INCX 52577
+IHByb2N1cmVtZW50 52578
+LXRhc2s= 52579
+X2hhbA== 52580
+UGxheWxpc3Q= 52581
+LnN0YXI= 52582
+X1NVUFBPUlRFRA== 52583
+QVNN 52584
+JUE= 52585
+cmVzdHJpYWw= 52586
+INC40YHQvw== 52587
+IHBhZ2Vy 52588
+IERpYWJldGVz 52589
+IE1haGFy 52590
+dGFu 52591
+QWN0dWFsbHk= 52592
+Pi8v 52593
+IFhW 52594
+4KeN 52595
+IHNlamE= 52596
+LnZpc3VhbA== 52597
+a2tlcg== 52598
+XTsKCgo= 52599
+IHR5cGVOYW1l 52600
+LkJ1dA== 52601
+Q2xpZW50UmVjdA== 52602
+aWNhbHM= 52603
+IERqYW5nbw== 52604
+IFJhcGU= 52605
+IHBheWRheQ== 52606
+KHJlc291cmNlcw== 52607
+LmJpeg== 52608
+dG9p 52609
+KFJ1bnRpbWU= 52610
+IER5bmFtaWNz 52611
+IEludmFsaWRPcGVyYXRpb25FeGNlcHRpb24= 52612
+KHR5cGVz 52613
+IFRhYnM= 52614
+Lk1pZGRsZUxlZnQ= 52615
+eGFi 52616
+IF8o 52617
+IERyZWFtcw== 52618
+X0dyb3Vw 52619
+KGNvcg== 52620
+TGVhZGVy 52621
+IGdyYWR1YWw= 52622
+KEJpZ0RlY2ltYWw= 52623
+IHRleHRhcmVh 52624
+bGV0aW9u 52625
+IEZpbmlzaGVk 52626
+IFBvbGU= 52627
+IHRhcHBpbmc= 52628
+Jig= 52629
+IGZsaXJ0 52630
+IHRlcnJpZmllZA== 52631
+IHBhZHk= 52632
+ZXJlZw== 52633
+ZWxkb20= 52634
+IHN0YXRpb25hcnk= 52635
+IHBvbnk= 52636
+IFJFR0lTVEVS 52637
+X2FjY2Vs 52638
+IEhlcno= 52639
+IG1hdHJpeg== 52640
+IENhZg== 52641
+eGFj 52642
+YXNjdXM= 52643
+IGVubGFyZ2U= 52644
+QUNIRUQ= 52645
+eXl2YWw= 52646
+IHNpYw== 52647
+IENhbmFs 52648
+OnY= 52649
+PT8s 52650
+IEltcHJvdmVtZW50 52651
+P30iLA== 52652
+TlNPYmplY3Q= 52653
+IGVzY2FwaW5n 52654
+IE51bGxhYmxl 52655
+IGjDpA== 52656
+d2FudA== 52657
+RWxpbWluYXI= 52658
+IENMTG9jYXRpb24= 52659
+IHJldXNlSWRlbnRpZmllcg== 52660
+QnVmZmVyU2l6ZQ== 52661
+w59lcg== 52662
+IEFza2Vk 52663
+J11dLAo= 52664
+IHNoaWVsZHM= 52665
+Z3JhbmQ= 52666
+IFRvd25zaGlw 52667
+IFB1Yk1lZA== 52668
+ZWN0bA== 52669
+Zml2ZQ== 52670
+IFJlYWN0aXZlRm9ybXNNb2R1bGU= 52671
+IEdMZW51bQ== 52672
+RGFy 52673
+aWZhY2U= 52674
+LWluZGVudA== 52675
+Rm9ybXVsYQ== 52676
+LnNuYXBzaG90 52677
+Q09NUEFSRQ== 52678
+IGJlbHRz 52679
+CWNhY2hl 52680
+bGRhdGE= 52681
+IGVkYWQ= 52682
+IEJPWA== 52683
+KGNhcnQ= 52684
+X0xBWU9VVA== 52685
+IGZmbHVzaA== 52686
+IExPUw== 52687
+IFNvcnRlZA== 52688
+LnNsaWRl 52689
+IHRpamQ= 52690
+IFRleGFucw== 52691
+IFB1cmNo 52692
+IExldmVscw== 52693
+IHNlbWFudGljcw== 52694
+IFRlaHJhbg== 52695
+Ym1w 52696
+LnVybGVuY29kZWQ= 52697
+X3hsYWJlbA== 52698
+KGd1bHA= 52699
+IEJ1dHRvbnM= 52700
+IEJyb2tlcg== 52701
+55uR5ZCs 52702
+JGVtYWls 52703
+2ZA= 52704
+IGNsYXNzaWNz 52705
+Y29tcG9zZQ== 52706
+KGJz 52707
+IHVuaGVhbHRoeQ== 52708
+RXhlcmNpc2U= 52709
+Y3JldHM= 52710
+IFBhcnM= 52711
+IERldGVybWluZXM= 52712
+YWZvcnQ= 52713
+KG9icw== 52714
+IG5hc3Q= 52715
+IGlocmVu 52716
+IHJveWFsdHk= 52717
+c2VyaWFsaXplcg== 52718
+aWV1eA== 52719
+ICAgICAgICAgICAgICAgICAgICAgIAo= 52720
+ZXhlY3V0aW9u 52721
+IHZpZXdDb250cm9sbGVy 52722
+IHJlcHJv 52723
+LnBl 52724
+IGNhcGl0YWxpemU= 52725
+5Ye7 52726
+IHR1bm5lbHM= 52727
+LkRBVEE= 52728
+cGlyaXQ= 52729
+Q29sbGVjdGlvbnM= 52730
+KX19 52731
+IE9E 52732
+IGZ1enp5 52733
+SW1tZWRpYXRl 52734
+bGo= 52735
+Oz8+Ig== 52736
+W3Zhcg== 52737
+IHZvbGF0aWxpdHk= 52738
+cmVnbG8= 52739
+IHByb2xpZmVyYXRpb24= 52740
+IG9yYWNsZQ== 52741
+IEN2 52742
+IG51bmNh 52743
+UFJJTlRG 52744
+IGJyZWFrcG9pbnQ= 52745
+LkVO 52746
+IGJlc3Rlbg== 52747
+IHJlYmVsbGlvbg== 52748
+UGF1c2Vk 52749
+IGZsb3du 52750
+IHZpY2luaXR5 52751
+d3JpZ2h0 52752
+LGNw 52753
+aXNjaW5n 52754
+b3VjaGVycw== 52755
+QXNo 52756
+eWFy 52757
+IEVq 52758
+cmVwcmVzZW50ZWQ= 52759
+b2RpYw== 52760
+LmNyb3Nz 52761
+IGNyZWF0aW9ucw== 52762
+IFBhYmxv 52763
+ZmVzdA== 52764
+IEhpbHRvbg== 52765
+UmVwb3J0ZXI= 52766
+IERpbA== 52767
+aWxlbmFtZXM= 52768
+IGV4cGVuZGl0dXJlcw== 52769
+X0VESVRPUg== 52770
+IEFyaWFs 52771
+IHBsdW5n 52772
+IHVubmFtZWQ= 52773
+T3JFbHNl 52774
+IHJlY3JlYXRl 52775
+IEhlYXJ0cw== 52776
+PmFsZXJ0 52777
+LmdldFBhc3N3b3Jk 52778
+IE11c3Rhbmc= 52779
+Vks= 52780
+IGFjY29tcGxpc2htZW50cw== 52781
+QXBwZW5kaW5n 52782
+IENheQ== 52783
+IFVzZXJNb2RlbA== 52784
+IHN1YnN5c3RlbQ== 52785
+TGVnYWw= 52786
+eW5jaHJvbml6ZQ== 52787
+X1BFUk1JU1NJT04= 52788
+IEFwYXJ0bWVudA== 52789
+bGlnZQ== 52790
+IGFmZmlsaWF0aW9u 52791
+KERFQlVH 52792
+VHM= 52793
+IENvbG9yaW5n 52794
+IFdvaG4= 52795
+bmljZQ== 52796
+KGxpc3Rh 52797
+4LE= 52798
+cGxveW1lbnQ= 52799
+44G+44Gf 52800
+5aW9 52801
+c3Vic3Q= 52802
+J11dWyc= 52803
+YWJvbA== 52804
+PSdf 52805
+4KeN4KY= 52806
+b3JwaGlzbQ== 52807
+LmxpdGVyYWw= 52808
+IFBsdWc= 52809
+IG13 52810
+b21hbA== 52811
+ICInIiw= 52812
+dXNp 52813
+IHNpZ2hlZA== 52814
+aWN1bHR1cmFs 52815
+Lios 52816
+IFByb3N0aXQ= 52817
+KGNvbnNvbGU= 52818
+SVBMRQ== 52819
+IFRyYXA= 52820
+WFI= 52821
+IEVkaXRvckdVSUxheW91dA== 52822
+X3ZvY2Fi 52823
+IGluY29tcGF0aWJsZQ== 52824
+IHVuY29uc3RpdHV0aW9uYWw= 52825
+LWxh 52826
+IGVyb3RpcXVl 52827
+IGRlcHV0aWVz 52828
+cXVpc2l0aW9ucw== 52829
+bmV3VmFsdWU= 52830
+YWRpYQ== 52831
+IGh3bmQ= 52832
+Z2luZ3M= 52833
+IFZhcw== 52834
+IEluY3JlbWVudA== 52835
+IEZsaW50 52836
+YW1iaWE= 52837
+X1BvaW50 52838
+LWRpc3BsYXk= 52839
+IEZ1bm55 52840
+LnRvYXN0 52841
+LmRhcms= 52842
+QmluZGluZ3M= 52843
+IGRlc2NyaXB0aXZl 52844
+YXJlbmQ= 52845
+LlJldA== 52846
+IHJlY3Vyc2l2ZWx5 52847
+IE1r 52848
+IFRJTEU= 52849
+LmNyZWF0ZVRleHROb2Rl 52850
+IFJBVw== 52851
+IGluZmx1eA== 52852
+54mp 52853
+VG9r 52854
+LWJvYXJk 52855
+UmVjb3JkaW5n 52856
+U3RyZW5ndGg= 52857
+IHJhaW5mYWxs 52858
+KGRk 52859
+LmZ4bWw= 52860
+bmV0cw== 52861
+LkltYWdpbmc= 52862
+IEJJT1M= 52863
+XSsi 52864
+T0U= 52865
+IHJlc2lkZW5jeQ== 52866
+WkU= 52867
+V0I= 52868
+LnNwYW4= 52869
+X2RlZmluZWQ= 52870
+Qk9U 52871
+Pm51bGw= 52872
+Zm9ybURhdGE= 52873
+Q3BwTWV0aG9kSW5pdGlhbGl6ZWQ= 52874
+X1VTRVJT 52875
+IE5vdmVs 52876
+aW5za2k= 52877
+PntA 52878
+ZXR0bw== 52879
+bmF0dXJhbA== 52880
+IFN0cmljdA== 52881
+Onc= 52882
+LnNhZmU= 52883
+IHRvd2Vscw== 52884
+4bqtdA== 52885
+LmdzdWI= 52886
+66M= 52887
+aW5xdQ== 52888
+IGFpZGVz 52889
+IGluY29t 52890
+Z2V0dGVy 52891
+IHdhc2hlcg== 52892
+YWN0b3JpZXM= 52893
+IGdldHRlcnM= 52894
+bWl0ZQ== 52895
+X3NvdXJjZXM= 52896
+IGhhcm1sZXNz 52897
+IHVub3M= 52898
+cHJlaGVuc2l2ZQ== 52899
+IG5vZG8= 52900
+IGdlb2dyYXBoaWNhbA== 52901
+IFNlbGVjdExpc3Q= 52902
+LlNjcmlwdA== 52903
+LkVudW1z 52904
+IEVOVEVS 52905
+d2FsZA== 52906
+IEJhcm9u 52907
+IHBhcnRpY3Vs 52908
+LmN1cnJlbnRQYWdl 52909
+QFRyYW5zYWN0aW9uYWw= 52910
+W2xpbmU= 52911
+CWRlcw== 52912
+SmFzb24= 52913
+LmdldENvdW50 52914
+IFBlbm55 52915
+IFBheWxvYWQ= 52916
+c2hhcnA= 52917
+W3JpZ2h0 52918
+dmVudGE= 52919
+IGFwbA== 52920
+IHByb2R1aXRz 52921
+IG90dA== 52922
+VHJhY2tz 52923
+LkFuZHJvaWQ= 52924
+IHNpbGljb25l 52925
+IEVMU0U= 52926
+YW5pbWF0aW9ucw== 52927
+dWx0dXJlSW5mbw== 52928
+IGJsdWVwcmludA== 52929
+b2ZzdHJlYW0= 52930
+IFtdW10= 52931
+IFNlcnZl 52932
+IHRyaWc= 52933
+CXNlcnZpY2U= 52934
+IFN0cmF0 52935
+IFNhdmFnZQ== 52936
+IG9ianM= 52937
+IE5vdGlmaWNhdGlvbnM= 52938
+LHBvcw== 52939
+VGhpbmc= 52940
+IFJCSQ== 52941
+b3BhdGh5 52942
+IG5hdWdodHk= 52943
+bGJz 52944
+ZXByb20= 52945
+PiIu 52946
+IHBpb25lZXI= 52947
+IGphcGFuZXNl 52948
+QXVk 52949
+IGFsbGV5 52950
+IFBldHNj 52951
+J10/Pg== 52952
+IEtpbGxlcg== 52953
+LmdldEFic29sdXRlUGF0aA== 52954
+X2NhcHM= 52955
+xas= 52956
+IHN1YnN0cmF0ZQ== 52957
+LmFzc2VydElu 52958
+7JWE 52959
+IHRoeXJvaWQ= 52960
+IERlbHV4ZQ== 52961
+IGZhY3RvcmlhbA== 52962
+IHByZXNzZXM= 52963
+IEFjY29t 52964
+PW9wZW4= 52965
+LmdldFM= 52966
+IGV4cGxvcmVy 52967
+IHJlc2lkZXM= 52968
+QXNzb2NpYXRlZA== 52969
+IHRyYW5zZm9ybWF0aW9ucw== 52970
+VHU= 52971
+IFJpY2hhcmRz 52972
+X2JpcnRo 52973
+PSN7 52974
+LXNwZQ== 52975
+KG5k 52976
+IHZpc3VhbHM= 52977
+X3N0YW1w 52978
+IHRlcm1pbmFscw== 52979
+cm91dGluZQ== 52980
+KioqLwo= 52981
+IEphYg== 52982
+S0w= 52983
+Q29udHJpYg== 52984
+IHNvdXRod2VzdA== 52985
+IFBlcA== 52986
+CWVudGl0eQ== 52987
+IGxpbmVy 52988
+LlN0YXR1c09L 52989
+IFNjaHVs 52990
+KENM 52991
+IG1pam4= 52992
+YXN0b3M= 52993
+X2RpZ2VzdA== 52994
+IHBlcnNpc3RlZA== 52995
+LWNvbnRhY3Q= 52996
+IG9kb3I= 52997
+IGRpc2NvdmVyaWVz 52998
+X0ZJRUxEUw== 52999
+Rmx5 53000
+IHJ6 53001
+IExpc3Rh 53002
+UmVzZXJ2ZWQ= 53003
+dGF4b25vbXk= 53004
+KXNlY3Rpb24= 53005
+LyIpCg== 53006
+L3JlcXVlc3Q= 53007
+IHNvbWVkYXk= 53008
+Y2l0aWVz 53009
+L2ZpcmU= 53010
+IG9iamVjdGlvbnM= 53011
+CURFQ0xBUkU= 53012
+Lm5hdmlnYXRpb25JdGVt 53013
+LnNldGRlZmF1bHQ= 53014
+cmV0dXJuVmFsdWU= 53015
+VUNDRUVERUQ= 53016
+IG9ibGlnZWQ= 53017
+IFFhZWRh 53018
+IGh5c3Rlcg== 53019
+ZXN0aGVz 53020
+ZGlzdGluY3Q= 53021
+w6B5 53022
+IENvbWJv 53023
+CXNm 53024
+IOKK 53025
+IGRpc2NyZXBhbg== 53026
+IGluc2lnbg== 53027
+IFJFU1VMVFM= 53028
+IFZhbGlkYXRpb25FcnJvcg== 53029
+IEh0dHBSZXNwb25zZVJlZGlyZWN0 53030
+CVFTdHJpbmc= 53031
+IGF1dG9mb2N1cw== 53032
+RHVy 53033
+IFJFTEVBU0U= 53034
+LWRvbGxhcg== 53035
+LkNvbW1pdA== 53036
+IGtow7RuZw== 53037
+IGxhdW5kZXI= 53038
+Lj0i 53039
+IOaWhw== 53040
+IGJ5ZQ== 53041
+LkdldEtleURvd24= 53042
+IGdpbw== 53043
+X3NpZA== 53044
+IGdxbA== 53045
+LmNt 53046
+X1NMT1Q= 53047
+LkdldEluc3RhbmNl 53048
+cmV1c2U= 53049
+LnNodXRkb3du 53050
+IGplcnNleXM= 53051
+X01Q 53052
+cGF0aWJpbGl0eQ== 53053
+IOiuvue9rg== 53054
+IHJlcGxhY2VtZW50cw== 53055
+IHByZWNlZGVuY2U= 53056
+IGJ1ZmZlcmVk 53057
+LmJz 53058
+X0dSRUVO 53059
+YnJhaW4= 53060
+w6FjaA== 53061
+YXZhaWxhYmlsaXR5 53062
+IEVURg== 53063
+IGZyZXQ= 53064
+aXN0aW5l 53065
+IGxpZnRz 53066
+RXhpc3Rpbmc= 53067
+IHN0ZXJlb3R5cGVz 53068
+IGVtcHQ= 53069
+bW9uZ28= 53070
+LnRyYWluaW5n 53071
+YWxpc3Q= 53072
+LklzRW5hYmxlZA== 53073
+ICIh 53074
+PD8K 53075
+dWlkbw== 53076
+IGludFZhbHVl 53077
+LmVsYXN0aWNzZWFyY2g= 53078
+TE9HSU4= 53079
+IHJlbGlhbmNl 53080
+IHZpZXdUeXBl 53081
+IGRpbWluaXNoZWQ= 53082
+U2FyYWg= 53083
+IEFwcHJvYWNo 53084
+X1dFQg== 53085
+IGRybQ== 53086
+IGNvbHVtbmlzdA== 53087
+TWFya3Vw 53088
+IGFxdcOt 53089
+IERpYW5l 53090
+IGN3 53091
+IFRpY2s= 53092
+Lm9ic2VydmU= 53093
+SVJPTg== 53094
+SW5CYWNrZ3JvdW5k 53095
+IGVib255 53096
+IENvdXJ0ZXN5 53097
+Om51bGw= 53098
+KioqKioqKi8KCg== 53099
+L3Jlc291cmNl 53100
+SXRlcmF0aW9u 53101
+ZGVmYXVsdFZhbHVl 53102
+YXR0ZW50aW9u 53103
+INGA0LDQsdC+0YI= 53104
+IHdhaXZlcg== 53105
+IHByb2R1aXQ= 53106
+IEdyYWRpZW50 53107
+IHBlcmNlbnRhZ2Vz 53108
+IFNBTA== 53109
+IE1k 53110
+KHNuYXBzaG90 53111
+CWlv 53112
+aWtlcnM= 53113
+V2VicGFjaw== 53114
+IHNldFBhc3N3b3Jk 53115
+IGRlZmVhdGluZw== 53116
+IEplZw== 53117
+ZWxhcHNlZA== 53118
+aG9sZHM= 53119
+X3NoYWRvdw== 53120
+IG9mZmVuZGVk 53121
+IFBhbnQ= 53122
+IENhbGxhYmxl 53123
+X0lORk9STUFUSU9O 53124
+ZmZlZQ== 53125
+KGVtcGxveWVl 53126
+IFlBTUw= 53127
+cG9zc2libHk= 53128
+IG1heGltYWw= 53129
+ZWxsdWxhcg== 53130
+IFNueWRlcg== 53131
+ZGVzY3JpcHRvcg== 53132
+IFBMRUFTRQ== 53133
+RGxnSXRlbQ== 53134
+IGFydGlsbGVyeQ== 53135
+YH0K 53136
+cG9zaXVt 53137
+IGxlZXI= 53138
+JWM= 53139
+IGRpc3Bvcw== 53140
+Lm11bA== 53141
+IGdlb2dyYXBoeQ== 53142
+IGdyYXBoaWNhbA== 53143
+IGRyYW5r 53144
+IG1vdGlvbnM= 53145
+IHJ1dGg= 53146
+KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio= 53147
+IHByb2R1Y3Rpb25z 53148
+IGNyZWF0ZVRpbWU= 53149
+IFNjcmlwdHVyZQ== 53150
+YmJi 53151
+dWNocw== 53152
+5LiN6IO9 53153
+LkJpZ0RlY2ltYWw= 53154
+c2l6ZXM= 53155
+X3NvbHZlcg== 53156
+X0Zyb20= 53157
+X2pvaW50 53158
+IHBhdGhsaWI= 53159
+IGdlYXJz 53160
+INGE0L7RgNC8 53161
+IGNvbmNlYWw= 53162
+IGRpZmZlcmVudGlhdGU= 53163
+PEdhbWVPYmplY3Q= 53164
+IGplZGVu 53165
+IGFsbw== 53166
+Z2xvYmFscw== 53167
+ZXJ2YXRpdmU= 53168
+IHBhZGQ= 53169
+IFBseQ== 53170
+X3R5 53171
+IHByZXNlbnRl 53172
+IHByb3ByaWV0 53173
+X2xz 53174
+IFB1bmNo 53175
+IENyYXdmb3Jk 53176
+YmVsb3c= 53177
+Q3BwR2VuZXJpYw== 53178
+IENPTlRST0w= 53179
+IG9jZWFucw== 53180
+IFJPVVQ= 53181
+IHJhbmRpbnQ= 53182
+CWFkZHI= 53183
+IEhvbmVzdA== 53184
+IGVudmVsb3A= 53185
+IHRyYXVtYXRpYw== 53186
+IExBVA== 53187
+IHRn 53188
+7Iqk7Yq4 53189
+RXh0ZW5kZWQ= 53190
+IHVuY2hlY2tlZA== 53191
+IG9ic3RydWN0 53192
+X3RpbWV6b25l 53193
+UGVyc2lzdGVudA== 53194
+IGxsZXY= 53195
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKgo= 53196
+IEZsYQ== 53197
+LnBoeXNpY3M= 53198
+IGZvcmdlZA== 53199
+IExhdXI= 53200
+IG1vbm9wb2x5 53201
+IGNocmlzdG1hcw== 53202
+Z292 53203
+IFNtb2tl 53204
+W2Rm 53205
+IGJpc2hvcA== 53206
+bG9jYWxPYmplY3Q= 53207
+b3JyaA== 53208
+b250dmFuZ3N0 53209
+ZHJ5 53210
+IGVyZm9s 53211
+LWNl 53212
+IE9yZGVyZWREaWN0 53213
+IGh4 53214
+IFJFU0VU 53215
+U3Vj 53216
+IHJlY2tsZXNz 53217
+YWxhbWF0 53218
+QmlnSW50ZWdlcg== 53219
+IGJ1bGJz 53220
+IG11dGU= 53221
+5pS+ 53222
+LlVsdHJh 53223
+TG9u 53224
+IGNsZWFyVGltZW91dA== 53225
+PFJpZ2lkYm9keQ== 53226
+c3dpcGVy 53227
+IENvbWVz 53228
+XGRi 53229
+CW1w 53230
+IHJlc3Rz 53231
+TW92ZWQ= 53232
+IExvcmU= 53233
+LkRpbWVuc2lvbg== 53234
+IE1hbml0 53235
+Lmh4eA== 53236
+PT09PT09PQ== 53237
+cGl0Y2g= 53238
+ZmZpZWxk 53239
+c2tpbGxz 53240
+X2FsYnVt 53241
+dHJhbnNsYXRlZA== 53242
+IFhJ 53243
+IHZlaW4= 53244
+IERhdmlkc29u 53245
+IEF1Y2tsYW5k 53246
+eXNzZXk= 53247
+IGF1dGhlbnRpY2l0eQ== 53248
+IEFzc2lzdA== 53249
+IGNvbXByaXNl 53250
+Q3JlYXRlVGltZQ== 53251
+IHRyZW5jaA== 53252
+LndlZWs= 53253
+LS07 53254
+IFVJQWxlcnRDb250cm9sbGVy 53255
+X3JlbGF0ZWQ= 53256
+Q01T 53257
+cmVtZWx5 53258
+IGxleGVy 53259
+aXJtd2FyZQ== 53260
+RWxlbWVudHNCeQ== 53261
+LXVwcGVy 53262
+IHN0YWdu 53263
+LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ== 53264
+X3NuYXBzaG90 53265
+L1hNTFNjaGVtYQ== 53266
+X09yZGVy 53267
+IGFubmV4 53268
+X0VOQ09E 53269
+IEFsdG8= 53270
+YXJpb3Vz 53271
+REo= 53272
+IGFib3J0aW9ucw== 53273
+Q29tYmF0 53274
+IExpY2VuY2U= 53275
+dWdnZXN0ZWQ= 53276
+W0s= 53277
+LCkpCg== 53278
+KCcvLw== 53279
+LkNhbg== 53280
+c2Vjcw== 53281
+cXVvdGVz 53282
+X3RyeQ== 53283
+IFNhZ2U= 53284
+IE1vdg== 53285
+J29u 53286
+cmVnaXN0 53287
+IFdyaXRlcw== 53288
+IERpZ2VzdA== 53289
+CWNvbnRhaW5lcg== 53290
+LXByb2dyZXNz 53291
+IGdvYXQ= 53292
+X3NjaGVtZQ== 53293
+LkdldENoaWxk 53294
+IGFzeW0= 53295
+Lm15YmF0aXNwbHVz 53296
+YXRpY2E= 53297
+cGdzcWw= 53298
+X2Fzc2V0cw== 53299
+Pks= 53300
+IGFmaW4= 53301
+TlNT 53302
+IE5BVg== 53303
+KCcuJyw= 53304
+IGAi 53305
+IGF1ZGl0b3I= 53306
+X01PVVNF 53307
+IHdhbGxldHM= 53308
+IG1vdQ== 53309
+cnVucw== 53310
+ZXRlcmFuZ2Fu 53311
+IFJlc2VydmF0aW9u 53312
+IGV4cGVyaWVuY2lh 53313
+CXByb2Nlc3M= 53314
+LWltcG9ydA== 53315
+X1JldHVybg== 53316
+IE1hY3Jv 53317
+IFBlbmlz 53318
+cGl4ZWxz 53319
+IHNldEVtYWls 53320
+KE1pZ3JhdGlvbkJ1aWxkZXI= 53321
+KHhz 53322
+IEVzdG9u 53323
+IEJ1YmJsZQ== 53324
+QUxMT1c= 53325
+CWhhbmRsZXI= 53326
+JHJldA== 53327
+IGNvbXBsaW1lbnRhcnk= 53328
+LWNpdHk= 53329
+IGVsbG9z 53330
+IFNPVVJDRQ== 53331
+IEFkdmlzb3I= 53332
+b2xvZ8OtYQ== 53333
+IGZhZGVk 53334
+LnBj 53335
+X1JHQkE= 53336
+QUZY 53337
+IHJlcGF5 53338
+IEZhbGNvbnM= 53339
+X2lzc3Vl 53340
+b21pZG91 53341
+LmJhb21pZG91 53342
+IGluZnJpbmdlbWVudA== 53343
+dXJuaW5n 53344
+L3N0b3JhZ2U= 53345
+X3F1YW50 53346
+IFF0Q29yZQ== 53347
+IG1lbGw= 53348
+X2RlbnNpdHk= 53349
+IEtub3g= 53350
+IFN1cnZpdmFs 53351
+LmdldFVzZXJuYW1l 53352
+IGNvbW1lcmNpYWxseQ== 53353
+Z3Jhc3M= 53354
+IG1laXM= 53355
+5Lq/ 53356
+IFBlcm1pc3Npb25z 53357
+X1FVT1RFUw== 53358
+aXBob25l 53359
+IExPVA== 53360
+IHRocmlsbGVy 53361
+IENoYXBlbA== 53362
+IFJpcw== 53363
+Pmk= 53364
+LUlE 53365
+IHJpZ2h0bHk= 53366
+Q3J5cHQ= 53367
+IElzdGFuYnVs 53368
+cmVkcw== 53369
+X3Jlc2l6ZQ== 53370
+UG9wdWxhdGlvbg== 53371
+KGZldGNo 53372
+IEhPVA== 53373
+OmZpcnN0 53374
+IGdhZGdldHM= 53375
+UHlPYmplY3Q= 53376
+IG1lcmdpbmc= 53377
+ZHVjZWQ= 53378
+bGVnYXRlcw== 53379
+dWJlY3Rs 53380
+JS8= 53381
+YWxsZWU= 53382
+IHp1c2FtbWVu 53383
+LlByb3BUeXBlcw== 53384
+YXN0bw== 53385
+Oio= 53386
+cmVjZQ== 53387
+UmVzcG9uc2VUeXBl 53388
+L2dyb3Vw 53389
+IGJhcmJhcg== 53390
+IENhcm9saW5l 53391
+b3VyY2Vk 53392
+57uP 53393
+IGx1YnJpYw== 53394
+aW5zcGVjdGlvbg== 53395
+YW1tYWQ= 53396
+CUltYWdl 53397
+IGllcnI= 53398
+IGN1cnRhaW5z 53399
+X0FSQg== 53400
+IE9yYWw= 53401
+IGFsbGllZA== 53402
+IFN0YXR1c0NvZGU= 53403
+IENsZWFybHk= 53404
+UHJlZmVycmVkU2l6ZQ== 53405
+cXVpbmE= 53406
+IHNwb3M= 53407
+IG9wdGltaXNt 53408
+IGNvbXByYXI= 53409
+IGx1Zw== 53410
+IEJvb20= 53411
+Y29uZmlybWF0aW9u 53412
+X0RVUkFUSU9O 53413
+X2Jyb3dzZXI= 53414
+IHJlcGV0aXRpb24= 53415
+IGtlZXBlcg== 53416
+IGFkZFRv 53417
+KGpz 53418
+LlN0YXQ= 53419
+LkNvbmQ= 53420
+IEhlcm5hbmRleg== 53421
+cGFxdWU= 53422
+IHZvbHVudGFyaWx5 53423
+IGplcms= 53424
+IExleQ== 53425
+IGRvY3VtZW50bw== 53426
+X2RlYWQ= 53427
+IFRFQ0g= 53428
+IGluY2VwdGlvbg== 53429
+KCJ7fQ== 53430
+IG9uTG9hZA== 53431
+eGRk 53432
+IElTUA== 53433
+c3BlY2lmaWVk 53434
+IOusuA== 53435
+UFJPQ0VTUw== 53436
+KGFsZXJ0 53437
+Lk1N 53438
+IGNyZWF0ZVN0b3Jl 53439
+KHVuaXF1ZQ== 53440
+LmdldEJsb2Nr 53441
+656Y 53442
+dW5vcw== 53443
+IHRyb3BoaWVz 53444
+X2hvdmVy 53445
+IERhZGR5 53446
+Lk1l 53447
+IENPVVI= 53448
+T0JK 53449
+YXRlbWFsYQ== 53450
+IFBzaQ== 53451
+IG5vcm1hbHM= 53452
+YWNpZXI= 53453
+IE1CQQ== 53454
+IHBhd24= 53455
+z4U= 53456
+IHNwb250YW5lb3Vz 53457
+IGF1eGlsaWFyeQ== 53458
+IGluYXVndXJhbA== 53459
+IGZhc3Rpbmc= 53460
+IEZpbGVTeXN0ZW0= 53461
+IHplbg== 53462
+X0JMVUU= 53463
+IHN1YnRyZWU= 53464
+IHByZXByb2Nlc3M= 53465
+LXRyYWNr 53466
+Q2hhcmxlcw== 53467
+IGRlcG9zaXRlZA== 53468
+IHF1ZXJ5UGFyYW1z 53469
+0L7Qu9GM0LrQvg== 53470
+aWVtYnJl 53471
+IHByYXc= 53472
+eEZD 53473
+IHBhbmM= 53474
+X25vbQ== 53475
+aGVyb2Vz 53476
+Lmphdg== 53477
+OjokXw== 53478
+INin2YTZhQ== 53479
+U0dsb2JhbA== 53480
+5o+P6L+w 53481
+PXRlbXA= 53482
+ZXN0aQ== 53483
+IGNvbnN0cnVjdGl2ZQ== 53484
+IFNoaW0= 53485
+IERpcmVjdGlvbnM= 53486
+IEJpbmc= 53487
+ZGlydHk= 53488
+LXJ1bm5pbmc= 53489
+X2ZpbGVwYXRo 53490
+b3JkZXJJZA== 53491
+Z2FyZA== 53492
+X29yaWVudA== 53493
+IHNjb3V0 53494
+IHBzeWNob2xvZ2lzdA== 53495
+7LY= 53496
+IOWt 53497
+ZGVxdWU= 53498
+IEhlcm1pb25l 53499
+IFBvd2VyUG9pbnQ= 53500
+IGVsbGE= 53501
+IFVJQmFyQnV0dG9uSXRlbQ== 53502
+U3Vidmlld3M= 53503
+QFJlcG9zaXRvcnk= 53504
+IiIiCgoK 53505
+IHJldG91cg== 53506
+IGNpcmNh 53507
+R3JhcGhpYw== 53508
+IEdyYXR1aXQ= 53509
+ZGR5 53510
+IHRlY2huaWNpYW4= 53511
+IENsZWFudXA= 53512
+IHBlcnNvbm5l 53513
+IHJlc2lu 53514
+Lk11bHQ= 53515
+JG0= 53516
+IE9yY2hlc3RyYQ== 53517
+IHdoZWVsY2hhaXI= 53518
+LlND 53519
+CUdhbWVPYmplY3Q= 53520
+IG1vxbxl 53521
+T3BlbmVk 53522
+IGNoaWNrZW5z 53523
+b3Rhcw== 53524
+X3RlbXBlcmF0dXJl 53525
+IGRldGVjdGluZw== 53526
+IGFjcXVhaW50 53527
+IDw/PSQ= 53528
+Pl0= 53529
+IG1lbnN0cg== 53530
+IGR5ZQ== 53531
+Um9ib3Rv 53532
+LnVuaXRz 53533
+IFZpbnls 53534
+Y3VyYQ== 53535
+cnlwdG9u 53536
+ZWRk 53537
+PXRlc3Q= 53538
+IHRyb3Y= 53539
+Q29uZmlybWF0aW9u 53540
+IHRoZW9sb2d5 53541
+IEhvbGRpbmdz 53542
+dWF0aW5n 53543
+UHJlZGljdA== 53544
+W3VzZXI= 53545
+IDon 53546
+IFNlc3Nv 53547
+cGFyZW50SWQ= 53548
+Q29kZUF0 53549
+YWJibw== 53550
+IFRyZXZvcg== 53551
+IFF1aXQ= 53552
+X3NoaXBwaW5n 53553
+X1JB 53554
+IGtsZWluZQ== 53555
+56Y= 53556
+X0xhYmVs 53557
+IE9tYXI= 53558
+IEdSRUVO 53559
+LykK 53560
+cm9r 53561
+IHJvYXN0ZWQ= 53562
+X1JU 53563
+IOKAjg== 53564
+QFJ1bldpdGg= 53565
+Pk5O 53566
+IHRhbmQ= 53567
+Kycu 53568
+Y3J1ZA== 53569
+LmtleWJvYXJk 53570
+YXN0ZXJ5 53571
+QkFE 53572
+IENvbHVtbnM= 53573
+LkNvbXBhbnk= 53574
+IHNlbWluYXI= 53575
+IGdldENvbnRlbnRQYW5l 53576
+IGNhdGFzdHJvcGhpYw== 53577
+IGVtYnJvaWQ= 53578
+aWF0aXZl 53579
+IGNydWVsdHk= 53580
+Ymlz 53581
+IGluc2U= 53582
+IEJyb2tlbg== 53583
+CWZz 53584
+IG1WaWV3 53585
+0LDRhtC40Lg= 53586
+LWZhY2Vib29r 53587
+IGNhY2hlcw== 53588
+44CC44CCCgo= 53589
+IE9STQ== 53590
+IERpc3RyaWI= 53591
+IFNjZW5lTWFuYWdlcg== 53592
+X3RyYW5zaXRpb24= 53593
+b21leg== 53594
+IFNIRQ== 53595
+IHdvcmtsb2Fk 53596
+U3VwcG9ydGVkRXhjZXB0aW9u 53597
+IHJpZXM= 53598
+IOWc 53599
+KGNhdA== 53600
+SGFzTWF4TGVuZ3Ro 53601
+QXBwcw== 53602
+LlRBQkxF 53603
+IEtleVZhbHVlUGFpcg== 53604
+ZWRpZG8= 53605
+LlJlbmRlcmluZw== 53606
+IGVsZWN0cm9t 53607
+IGFyYml0cmF0aW9u 53608
+IHZhcmlhYmlsaXR5 53609
+YXBvbGxv 53610
+IHV0bW9zdA== 53611
+b3BlbnNzbA== 53612
+IGjDpQ== 53613
+KCcm 53614
+LlN0YW5kYXJk 53615
+IGRpc3RyYWN0aW9u 53616
+aWZheA== 53617
+IOuVjA== 53618
+dGhvc2U= 53619
+aXNwZW5z 53620
+dmFr 53621
+IFNVUA== 53622
+IElzUGxhaW5PbGREYXRh 53623
+LGtleQ== 53624
+ZnJhZ2lzdGljcw== 53625
+IEpveWNl 53626
+IEZpYmVy 53627
+LlNlcnZsZXRFeGNlcHRpb24= 53628
+X0FsbA== 53629
+IGJhY2tlcnM= 53630
+IEF0dHJpYnV0ZUVycm9y 53631
+ewoKCg== 53632
+QHlhaG9v 53633
+LWRpcmVjdG9yeQ== 53634
+IHVuaW5zdGFsbA== 53635
+IGZsdW9y 53636
+bGlxdWlk 53637
+IGzDoQ== 53638
+IGZyaWdodGVuaW5n 53639
+YWRhbg== 53640
+IEFVVA== 53641
+IHRhdHRvb3M= 53642
+IHByb3BhZ2F0aW9u 53643
+LnRyYW5zbGF0aW9u 53644
+0J/RgA== 53645
+X3NjaGVkdWxlcg== 53646
+44CC4oCc 53647
+IGNhaXJv 53648
+IEh0dHBDbGllbnRNb2R1bGU= 53649
+IE5EUA== 53650
+IEhpdHM= 53651
+IFRyYW5zZm9ybWF0aW9u 53652
+IENhZXNhcg== 53653
+c3RpbQ== 53654
+IEJ1cnRvbg== 53655
+d3lu 53656
+IGNvbW1hbmRlZA== 53657
+IENsb3RoaW5n 53658
+IFJ1bnRpbWVPYmplY3Q= 53659
+cmVhbGx5 53660
+Y2xh 53661
+LnNh 53662
+IFNoYW5ub24= 53663
+IGNvbW1pc3Npb25z 53664
+IEphbmV0 53665
+IGRpc2d1c3Rpbmc= 53666
+IG9wdGltdW0= 53667
+X3NvbA== 53668
+dXJvbnM= 53669
+IFNIQVJF 53670
+QXR0cnM= 53671
+IFNjaGU= 53672
+IEJpZ051bWJlcg== 53673
+IGNpZ2Fy 53674
+KGRlcHRo 53675
+IGZyYWM= 53676
+IEN1cnZl 53677
+TEFTVA== 53678
+IFNDUklQVA== 53679
+6rO8 53680
+TWFsbG9j 53681
+Lmdyb3VwYnk= 53682
+IExlc2xpZQ== 53683
+IHdoaWNoZXZlcg== 53684
+U21hcnR5 53685
+L3dl 53686
+IEFtcA== 53687
+LGlu 53688
+bG9wcw== 53689
+ZGVwZW5kZW5jeQ== 53690
+Y2VkdXJlcw== 53691
+IGB7 53692
+eGljbw== 53693
+Q29sbGVjdG9y 53694
+IGhhYw== 53695
+IERhcmtuZXNz 53696
+ZmZmZmZmZmY= 53697
+Jz0+Ig== 53698
+IHBsZWFzaW5n 53699
+Y29ubmVjdG9y 53700
+em9z 53701
+UENJ 53702
+dmFj 53703
+IEluY29ycG9y 53704
+IG5lZA== 53705
+X0ZBQ1RPUg== 53706
+LmZi 53707
+IG91bmNl 53708
+X3NhdmVk 53709
+INix 53710
+IGRlZWRz 53711
+IERvbHBoaW5z 53712
+IGJ1ZW4= 53713
+RVND 53714
+LHRpbWU= 53715
+X0FVVA== 53716
+ZWNz 53717
+IFNlbmF0b3Jz 53718
+Lm91dGVy 53719
+IFNlbGxpbmc= 53720
+IHJpbg== 53721
+PmAK 53722
+Lm9ic2VydmFibGU= 53723
+IGNvc3Rpbmc= 53724
+REc= 53725
+IHdpbmRpbmc= 53726
+IHNrYQ== 53727
+IGNpcmN1bGF0aW5n 53728
+IGZvcm1pZGFibGU= 53729
+YW1wbw== 53730
+IFJhaXNlZA== 53731
+IHZlZ2V0YXRpb24= 53732
+VUZGSVg= 53733
+S2lsbA== 53734
+cHRpdmU= 53735
+KHJ2 53736
+IENvdW50cmllcw== 53737
+IE5ha2Vk 53738
+IEpB 53739
+KSkiCg== 53740
+dWRhcw== 53741
+IGJhcms= 53742
+CWxldmVs 53743
+IGZvZXM= 53744
+PkFkZA== 53745
+WW91VHViZQ== 53746
+O3Q= 53747
+TkNZ 53748
+Q2x1Yg== 53749
+RWlu 53750
+LS0NCg== 53751
+IGNvbnN0cmFpbmVk 53752
+RVR3aXR0ZXI= 53753
+WUc= 53754
+RGVzY3JpcGNpb24= 53755
+VU5DSA== 53756
+IGVucXVldWU= 53757
+IGRpc2tz 53758
+IFdlbnQ= 53759
+IG11aXQ= 53760
+CWxvY2F0aW9u 53761
+IHJldmlzaW9ucw== 53762
+IEFDSw== 53763
+LWZpeGVk 53764
+dHJhc291bmQ= 53765
+XFRlc3Q= 53766
+U3RhcnRQb3NpdGlvbg== 53767
+LWh0bWw= 53768
+IHByb2JsZW1hcw== 53769
+X0lOVEVSUlVQVA== 53770
+IFNUT1JF 53771
+5qih 53772
+aWxpYXRlZA== 53773
+IFJQTQ== 53774
+W3RlbXA= 53775
+YWNodGVu 53776
+IGNpYw== 53777
+IEF1dG9tYXRpb24= 53778
+IGhpZ2hz 53779
+Lyg/ 53780
+OicpCg== 53781
+c3Bhcms= 53782
+cmVscw== 53783
+CW1vdg== 53784
+VVRFUw== 53785
+LkF1dGhvcml6YXRpb24= 53786
+IFNjaG5laWRlcg== 53787
+IGNoZWVrcw== 53788
+YWRkcmVzc2Vz 53789
+YXJkaW4= 53790
+IHJlbW92YWJsZQ== 53791
+LkJhZFJlcXVlc3Q= 53792
+aWNpb25hcg== 53793
+IERpZXNlbA== 53794
+dGhhbg== 53795
+L34= 53796
+IGRhenU= 53797
+UmVnaXN0cm8= 53798
+ZmZp 53799
+X0RMTA== 53800
+IG5pZXU= 53801
+IG1vaXN0dXI= 53802
+LWV2ZW50cw== 53803
+IHRocmlsbA== 53804
+LmdldEVudGl0eQ== 53805
+IHRvZ2c= 53806
+IHdhdg== 53807
+KWRpZA== 53808
+YXRr 53809
+KHN1YnN0cg== 53810
+IEluamVjdGlvbg== 53811
+X21i 53812
+LkRpdg== 53813
+IGVuZGVhdm9y 53814
+ICjCow== 53815
+IGNsdXR0ZXI= 53816
+IHVyZ2VuY3k= 53817
+IGluc3RydWN0b3Jz 53818
+LScs 53819
+LXN0YW5kYXJk 53820
+Y2Vt 53821
+CWhhbmRsZQ== 53822
+LmZ0 53823
+U3RlcGhlbg== 53824
+Um9u 53825
+44GZ44KL 53826
+c2Np 53827
+IEF0bW9z 53828
+IGNhdGVyaW5n 53829
+IGZpYXQ= 53830
+LlBlcmNlbnQ= 53831
+IENvbmdv 53832
+eGRm 53833
+Lm1vemlsbGE= 53834
+IHNlaGVu 53835
+LnNob3dUb2FzdA== 53836
+T09U 53837
+LXJlc3VsdA== 53838
+zIE= 53839
+IGdob3N0cw== 53840
+IEJ1ZW4= 53841
+IFJpZGVy 53842
+IERvY3RvcnM= 53843
+IHVyYW5pdW0= 53844
+IGxvdWRseQ== 53845
+IHBvaXNlZA== 53846
+IGZhdm9ycw== 53847
+KEFQ 53848
+TEVZ 53849
+IHNpY2tuZXNz 53850
+IGNoYXR0ZQ== 53851
+IGludGVncmF0aW5n 53852
+IFl1cA== 53853
+Q2xvc3VyZQ== 53854
+IFRhbGVz 53855
+IGxpbmVh 53856
+IGV5ZWw= 53857
+LkNyeXB0b2dyYXBoeQ== 53858
+dW5leHBlY3RlZA== 53859
+YWxlbWVudA== 53860
+Y2l0 53861
+ZXRBZGRyZXNz 53862
+TGVhZA== 53863
+eGNk 53864
+X25lZ2F0aXZl 53865
+X2NvcnI= 53866
+aWdyYXBo 53867
+LWNoYW5uZWw= 53868
+IGRpc2Nv 53869
+U2VlZGVy 53870
+YmVhbQ== 53871
+X2Rw 53872
+Q0ND 53873
+IFByb3ZpZGVk 53874
+IGpzb25EYXRh 53875
+X1dI 53876
+RklORQ== 53877
+Qlg= 53878
+LkRhdGFBY2Nlc3M= 53879
+IHRlbXB0ZWQ= 53880
+IGZpbmVk 53881
+aXNDaGVja2Vk 53882
+IGZyYXVkdWxlbnQ= 53883
+RnJp 53884
+IGRvbWlj 53885
+UXVpeg== 53886
+IFVuZGVyZ3JvdW5k 53887
+YWJyYXM= 53888
+IElEaXNwb3NhYmxl 53889
+IFBlcnNvbmE= 53890
+IHJvZ3Vl 53891
+IEJleQ== 53892
+Z2V0Q2xpZW50 53893
+ZWtlbg== 53894
+ICcnJw0K 53895
+V2lraQ== 53896
+KEh0dHBTdGF0dXM= 53897
+U3RyZXRjaA== 53898
+IEdlc3Q= 53899
+IO2VmA== 53900
+IGVudGl0bGVtZW50 53901
+IGRvZW4= 53902
+YmxvZ3M= 53903
+IHZpdHJv 53904
+Ik9o 53905
+IFN1bW1vbg== 53906
+IEJhY2tib25l 53907
+IGfDvA== 53908
+Z2V0Q29sdW1u 53909
+IFdJTkFQSQ== 53910
+CXZh 53911
+X1JFUVVJUkVE 53912
+LnRocm93 53913
+IHNldEN1cnJlbnQ= 53914
+ZHVjdGVk 53915
+KEZ1bmN0aW9u 53916
+ZWxzaW5raQ== 53917
+X1Blcg== 53918
+ZmxpZXM= 53919
+IGluY29tcGV0 53920
+IGp1xbw= 53921
+KCkl 53922
+IC0tLQo= 53923
+dW1hcw== 53924
+IE9sZGVy 53925
+IGRpc3B1dGVk 53926
+X1JFUVVJUkU= 53927
+Lm1hdG11bA== 53928
+dW5rZW4= 53929
+5LmL 53930
+44GL44KJ 53931
+IHR0bA== 53932
+dW5kZXJzY29yZQ== 53933
+IFBhdHJpY2lh 53934
+IHRhcGVy 53935
+IHNlaW5lcg== 53936
+IHNheWE= 53937
+5Y+w 53938
+aWVyaQ== 53939
+LnNlY3JldA== 53940
+IHhvcg== 53941
+IG1pdG9jaG9uZA== 53942
+IGNhcmRib2FyZA== 53943
+fWB9 53944
+LUJFR0lO 53945
+IGRhdmlk 53946
+b3Vsb3M= 53947
+IFBldGVyc2J1cmc= 53948
+ICIiLA0K 53949
+c2hlbGY= 53950
+LXdhdGVy 53951
+LWJ5dGU= 53952
+INC+0LHRitC10LrRgg== 53953
+IHN0aXJyaW5n 53954
+7Je0 53955
+IGNvbXB0 53956
+IFBvdGVudGlhbA== 53957
+UkFGVA== 53958
+IGVhcHBseQ== 53959
+IHN3aW5naW5n 53960
+IGZlYw== 53961
+QVJB 53962
+IHdhbmRlcmluZw== 53963
+IHByZWZlcnM= 53964
+SmVzdXM= 53965
+IHBpcmF0ZQ== 53966
+IElzaXM= 53967
+Lk1pbmltdW0= 53968
+IFZhbGU= 53969
+X0JU 53970
+cmVuY2hlZA== 53971
+Y29ycw== 53972
+KGl0ZW1WaWV3 53973
+IGfDpQ== 53974
+LkNvbnRhY3Q= 53975
+Vmlld0NoaWxk 53976
+aW5kc2F5 53977
+Y29uZmlncw== 53978
+RHVwbGljYXRl 53979
+4oCmSQ== 53980
+enlzdA== 53981
+KHRvZG8= 53982
+LlJlbW92ZUF0 53983
+X0RJRkY= 53984
+IEJvdHRsZQ== 53985
+IHZvbHRh 53986
+dHJhZmZpYw== 53987
+TGVl 53988
+IOyk 53989
+IHR1bmVz 53990
+IEVjdWFkb3I= 53991
+IFl1bg== 53992
+IHVuZGVyd2VudA== 53993
+aWNvbQ== 53994
+ICcnKXsK 53995
+LXBvbA== 53996
+ZmxhbW1hdG9yeQ== 53997
+TXV0YXRpb24= 53998
+IHJlY2Fw 53999
+X3ZlcnQ= 54000
+T1RJT04= 54001
+Q0RBVEE= 54002
+aWNpbmU= 54003
+X2JvdW5kYXJ5 54004
+U2NhbGFycw== 54005
+IFVsdGltYXRlbHk= 54006
+RVE= 54007
+bWV0YWw= 54008
+a3Nlcw== 54009
+bXBs 54010
+IGNvbnRlbg== 54011
+U29sZA== 54012
+RVNTQUdFUw== 54013
+IGJpbmRlcg== 54014
+IGxpbmVu 54015
+IE15QXBw 54016
+LW1ldGE= 54017
+CXJhaXNl 54018
+b3VsdHJ5 54019
+CW1vZHVsZQ== 54020
+5pi+56S6 54021
+bsOt 54022
+IHlycw== 54023
+IHBoeXNpYw== 54024
+LXBsYXRmb3Jt 54025
+IHN3aW5nZXJz 54026
+KGhlYWRlcnM= 54027
+Licp 54028
+IEJV 54029
+IEluY29udHJp 54030
+U2NlbmFyaW8= 54031
+QW1i 54032
+IHByZW1pw6hyZQ== 54033
+L2FydGljbGVz 54034
+IE1ham9yaXR5 54035
+Q0xVU0lWRQ== 54036
+b25vcg== 54037
+IGhhYsOtYQ== 54038
+5bee 54039
+IG1pZGk= 54040
+IExhYw== 54041
+LmZpbmRJbmRleA== 54042
+IFBhaW50aW5n 54043
+LmJvcmRlckNvbG9y 54044
+Kmo= 54045
+IGNvbmdlc3Rpb24= 54046
+X0RJQ1Q= 54047
+b2xsZQ== 54048
+YXJuYXRpb24= 54049
+KHRleHR1cmU= 54050
+IHVm 54051
+IEVpbnN0ZWlu 54052
+KFRocmVhZA== 54053
+IGluZG9vcnM= 54054
+c2NyYXRjaA== 54055
+IG1ha2Vu 54056
+LlNUQVJU 54057
+IEp1ZHk= 54058
+Zm9ydW1z 54059
+CgoKCgoKCgoK 54060
+QklMRQ== 54061
+IHZvdQ== 54062
+TVlTUUw= 54063
+IGdlcm5l 54064
+IEltcG9ydEVycm9y 54065
+IFN1cnJl 54066
+PG5hdg== 54067
+IERpZXNl 54068
+ZXdhcmU= 54069
+IOuqqA== 54070
+aW1wbGVtZW50ZWQ= 54071
+U0lHTg== 54072
+ICd7QA== 54073
+cnpl 54074
+Lm1pbmVjcmFmdGZvcmdl 54075
+LmlubmVySGVpZ2h0 54076
+YmVjaw== 54077
+IGN1cnJ5 54078
+IGZvcm11bGFz 54079
+YWdvZw== 54080
+ZW5kZXQ= 54081
+IFBhaWQ= 54082
+IFJvYmVydG8= 54083
+IHVucGFpZA== 54084
+PWhlYWRlcnM= 54085
+LlBvd2Vy 54086
+IGJyZWQ= 54087
+b3JFbHNl 54088
+b3hpZGU= 54089
+IGZpbmFsaXpl 54090
+c2V0Q29sb3I= 54091
+IFN0YWR0 54092
+KCdcXA== 54093
+aXNtaWM= 54094
+IGhlbGU= 54095
+LlByb3RvY29s 54096
+Lkhvc3Rpbmc= 54097
+X01lbnU= 54098
+X2NvbmRpdGlvbnM= 54099
+IHB1cmdl 54100
+LnhhbWw= 54101
+YmFyZQ== 54102
+RlJBTUU= 54103
+IGN1YmVz 54104
+IEpvaGFubmVz 54105
+b2NyYXRz 54106
+LkRpcmVjdG9yeQ== 54107
+KWE= 54108
+Pyk6 54109
+X0xJQlJBUlk= 54110
+IGdldFRva2Vu 54111
+IGVjaG9lZA== 54112
+PWg= 54113
+X3NvYw== 54114
+IEV2YWx1YXRl 54115
+IOq4sA== 54116
+IERlbGV0ZWQ= 54117
+RXU= 54118
+IGNsb25lZA== 54119
+c3RhdGlzdGljcw== 54120
+LkNhbnZhcw== 54121
+IGhhY2tlcg== 54122
+IGdhbmdz 54123
+LnJlc3VtZQ== 54124
+cGVhY2U= 54125
+0JLQstC10LTQuNGC0LU= 54126
+IFByb2NlZWRpbmdz 54127
+56U= 54128
+IGphcGFu 54129
+ID8+Pgo= 54130
+ICR7KHs= 54131
+LnJlY3RhbmdsZQ== 54132
+Z3c= 54133
+IE9yaWVudGF0aW9u 54134
+JW0= 54135
+LiIpKTsK 54136
+IExpZXV0ZW5hbnQ= 54137
+LnRydWU= 54138
+IGVsdA== 54139
+IERJUkVDVE9SWQ== 54140
+zq8= 54141
+LmRheXM= 54142
+dXR0Z2FydA== 54143
+IHVuZGVyd2Vhcg== 54144
+LCkK 54145
+Q0lE 54146
+aW1lbGluZQ== 54147
+IEJsZW5k 54148
+cGhhc2lz 54149
+IHBlcnNl 54150
+IGdsaXR0ZXI= 54151
+IHVuaXE= 54152
+IENvbWJvQm94 54153
+IHNlc3Npb25JZA== 54154
+dXN0ZXJpdHk= 54155
+SURHRQ== 54156
+0L7QsdGJ 54157
+0KQ= 54158
+cmVuZGVycw== 54159
+X3Bvc2l0aXZl 54160
+X3Nsb3Rz 54161
+YnJvYWRjYXN0 54162
+IE1vbGQ= 54163
+L0NvcmU= 54164
+IEJhbm5vbg== 54165
+VG9vbEJhcg== 54166
+YWJlbGxl 54167
+X2F3 54168
+b2xlY3VsZQ== 54169
+IGRlbGV0ZXM= 54170
+IMOhcmVh 54171
+IHByb3BvcnRpb25hbA== 54172
+TVc= 54173
+IHdhcnk= 54174
+IGludGVybWVkaQ== 54175
+ICoqKioqKioqKioqKioqKioqKioqKioqKg== 54176
+LlNUQVRVUw== 54177
+X3R3 54178
+IGFyb21h 54179
+IGFjdGl2aXNt 54180
+LklzTm90TnVsbA== 54181
+dWF0 54182
+IHBvc3REYXRh 54183
+IHBlbQ== 54184
+X2N0b3I= 54185
+IFJhcGlkcw== 54186
+LW9mZnNldG9m 54187
+IGluZWZmZWN0aXZl 54188
+IG9uRGVzdHJveQ== 54189
+IE1ldHJpY3M= 54190
+IHBhZGRpbmdMZWZ0 54191
+LWVuYWJsZWQ= 54192
+IEdvYWxz 54193
+eW5jaHJvbm91c2x5 54194
+IHllcg== 54195
+SXRlbUF0 54196
+IE1ZU1FM 54197
+Y2Vzbw== 54198
+LktpbmQ= 54199
+dGVj 54200
+KGJ1bmRsZQ== 54201
+IHJlZmVyZWU= 54202
+LiI7DQo= 54203
+IGNvbmV4 54204
+IGJpa2luaQ== 54205
+X0FQUExJQ0FUSU9O 54206
+IHN3ZWxsaW5n 54207
+IGJlYWRz 54208
+IGJhcmdhaW5pbmc= 54209
+LS0tLS0tLS0tLS0KCg== 54210
+IGtpdGE= 54211
+KmZ0 54212
+TWluaQ== 54213
+IFRvbmlnaHQ= 54214
+IG1hbmlwdWxhdGVk 54215
+TWlycm9y 54216
+IFBvc3RhbA== 54217
+IG1hcmU= 54218
+RFc= 54219
+IGNvbXBpbGluZw== 54220
+IGZvcmVuc2lj 54221
+LmdldFZpZXc= 54222
+ZXBpbmc= 54223
+Q29z 54224
+IGFjY3JlZGl0ZWQ= 54225
+IG9iamV0aXZv 54226
+Y2FyZXQ= 54227
+UGFpcnM= 54228
+KT4+ 54229
+IHNlw7E= 54230
+IHF1b3RhdGlvbg== 54231
+IEJyYW5kcw== 54232
+dWJp 54233
+eXB5 54234
+IElubGluZQ== 54235
+aW1ldGVycw== 54236
+V2ludmFsaWQ= 54237
+CWxpbms= 54238
+IEJlbGZhc3Q= 54239
+IE1lYXN1cmVtZW50 54240
+X05PVElGSUNBVElPTg== 54241
+IHJveQ== 54242
+IENHQ29udGV4dA== 54243
+IHdlZGRpbmdz 54244
+VVJOUw== 54245
+IHBvZGNhc3Rz 54246
+IFNlcmc= 54247
+IOuNsOydtO2EsA== 54248
+IGVhcm5lc3Q= 54249
+Y292ZXJhZ2U= 54250
+aXRlRGF0YWJhc2U= 54251
+RW1wbG95ZWVz 54252
+IERlbWFuZA== 54253
+IGNvbnRlbmlkbw== 54254
+IFFWZWN0b3I= 54255
+IiwiXA== 54256
+IEdlcmFsZA== 54257
+KClg 54258
+IGdyaWRCYWdDb25zdHJhaW50cw== 54259
+UkVTT1VSQ0U= 54260
+IFNhZw== 54261
+YWJpbGlkYWQ= 54262
+IGNvZXJj 54263
+b3VuY2VtZW50cw== 54264
+IElzbGU= 54265
+LmVkZ2U= 54266
+IGV4dGVy 54267
+KV1b 54268
+IFBsYXlsaXN0 54269
+IEJsaW5k 54270
+IFZpdGFs 54271
+IGxhdHRpY2U= 54272
+cmF0ZWQ= 54273
+ZGVwZW5kZW5jaWVz 54274
+IGBgYA== 54275
+IEthbmc= 54276
+bWFjaA== 54277
+LmZhZGU= 54278
+IEd1ZXNz 54279
+Kls= 54280
+TmF0dXJhbA== 54281
+Lk9r 54282
+IFJlbmFpc3NhbmNl 54283
+IHRodWlz 54284
+IGxpa2Vu 54285
+Kmg= 54286
+XCcs 54287
+LWNsb2Nr 54288
+IE9iamVjdGl2ZQ== 54289
+ZmluZE9yRmFpbA== 54290
+IERpcnR5 54291
+IHNjYW5k 54292
+IFZBUklBQkxF 54293
+IGNvbXBhcmF0aXZl 54294
+eXBhZA== 54295
+KFNvdXJjZQ== 54296
+ZWNv 54297
+IGp1c3F1 54298
+CWFwaQ== 54299
+QnVpbHQ= 54300
+ICMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMj 54301
+IGxhYmVsaW5n 54302
+IGhlYWRhY2hlcw== 54303
+IG11ZmY= 54304
+IE9yY2g= 54305
+IGhhdGVz 54306
+LWJyZWFraW5n 54307
+L2J1dHRvbg== 54308
+IEJ1eWluZw== 54309
+TWV0cmlj 54310
+IHVuc3BlY2lmaWVk 54311
+L2hlYWQ= 54312
+IHN0aW5n 54313
+IHJlaW5mb3JjZQ== 54314
+IENvbVZpc2libGU= 54315
+Ymxpbms= 54316
+IEFobWFk 54317
+ZGJn 54318
+X2xibA== 54319
+IGh0dA== 54320
+7JuQ 54321
+cm9wb2xpcw== 54322
+ICgoX18= 54323
+IHBlcm1l 54324
+IGFwcGFyZWw= 54325
+U1RSRUFN 54326
+Y2h0cw== 54327
+IHNlaW5z 54328
+ZmlsbFR5cGU= 54329
+7KO8 54330
+Uk9XU0VS 54331
+dW1waW5n 54332
+IE5pZ2VyaWFu 54333
+4oCUaXM= 54334
+X2xvZ2lj 54335
+Lk9yZGluYWw= 54336
+bG9zdA== 54337
+L3Vzcg== 54338
+QWY= 54339
+IEl0ZXJhdGU= 54340
+aWJz 54341
+YWFs 54342
+IHN5bW1ldHJpYw== 54343
+LGlucHV0 54344
+IFBMTA== 54345
+dXppb25l 54346
+Y2FwdGNoYQ== 54347
+IFRhbGU= 54348
+RXhwaXJlZA== 54349
+IE9iamVjdE1hcHBlcg== 54350
+Y2lkbw== 54351
+LmdldE5leHQ= 54352
+IG1lbmphZGk= 54353
+OnNlbGVjdGVk 54354
+IHJpZW4= 54355
+X3NlbmRlcg== 54356
+UHdk 54357
+IEZsaWNrcg== 54358
+LkphdmE= 54359
+X3ZvdGU= 54360
+X01vZGU= 54361
+LiR7 54362
+IGZ1Y2tz 54363
+IEFsaWJhYmE= 54364
+IGluc2lkZXI= 54365
+YWNpbWllbnRv 54366
+IGZyYW7Dp2Fpcw== 54367
+SlNPTkV4Y2VwdGlvbg== 54368
+IEp3dA== 54369
+TWl0 54370
+bGVpY2g= 54371
+IHByYWN0aXRpb25lcg== 54372
+L3NvdXJjZQ== 54373
+IG9nbmk= 54374
+IHBoaWxvc29waGVy 54375
+U25hY2tCYXI= 54376
+c3RlbGx1bmc= 54377
+KGJpdG1hcA== 54378
+IGFzdGVyb2lk 54379
+IG1hcGxl 54380
+dWNoYQ== 54381
+aXRlbUlk 54382
+IHN0ZWh0 54383
+T3JkZXJlZA== 54384
+ZW5idXJn 54385
+L3Rva2Vu 54386
+6YWN 54387
+IFdlYmI= 54388
+b3dhbmll 54389
+IFdBSVQ= 54390
+IEhEUg== 54391
+IEV2YQ== 54392
+QVRUTEU= 54393
+KG1hc3Rlcg== 54394
+IGVycw== 54395
+YWxvYWQ= 54396
+IHNtdHA= 54397
+dW5pcQ== 54398
+IGd1aXQ= 54399
+IFJhZmFlbA== 54400
+Imlu 54401
+KFVJ 54402
+KExheW91dEluZmxhdGVy 54403
+b3Jhbg== 54404
+IHNlcnZp 54405
+bmV6 54406
+IFRvcnJlcw== 54407
+Lk1pZGRsZUNlbnRlcg== 54408
+IG1vbGw= 54409
+IFRleHRBbGlnbg== 54410
+X3VwbG9hZGVk 54411
+IE1laHI= 54412
+IGhvbW8= 54413
+LWxpbmtlZA== 54414
+dW5uZXI= 54415
+X2xlbmd0aHM= 54416
+IGRpZmZ1c2U= 54417
+IEF1dG9tb3RpdmU= 54418
+WWVhcnM= 54419
+IGxpZW4= 54420
+W2NvdW50ZXI= 54421
+a2xhc3M= 54422
+0YHRgtC4 54423
+LkVuZ2luZQ== 54424
+IG1lbnk= 54425
+dWx0eg== 54426
+IGluZmFudHJ5 54427
+Vmlh 54428
+c2VjdHM= 54429
+LmRhc2hib2FyZA== 54430
+IHNwb25zb3JzaGlw 54431
+Lk1vZGlmaWVk 54432
+Oy0= 54433
+IFZlbG9jaXR5 54434
+dHJhY3RlZA== 54435
+KG1ldGFkYXRh 54436
+IHBsYWd1ZQ== 54437
+TlNVc2VyRGVmYXVsdHM= 54438
+YXBwcm92YWw= 54439
+cHJvYmFibHk= 54440
+LXNpeA== 54441
+X1ZJUw== 54442
+OicnLAo= 54443
+LmVuYw== 54444
+Lk1lc3NhZ2Vz 54445
+X1BST0dSRVNT 54446
+IG5lY2tsYWNl 54447
+IFRlbXBvcmFyeQ== 54448
+X21hcmt1cA== 54449
+IEZ1bmN0aW9uYWw= 54450
+IEpp 54451
+IHRlc3RDYXNl 54452
+ICgpOw0K 54453
+X0NlbGw= 54454
+IFJlc2lkZW50aWFs 54455
+IFJhaWx3YXk= 54456
+KCgmX19f 54457
+IGRlZmF1bHRzdGF0ZQ== 54458
+IGVpbm1hbA== 54459
+LmZhYw== 54460
+KmY= 54461
+IHBpY25pYw== 54462
+KGV2YWw= 54463
+IGZ1cm5hY2U= 54464
+YXNzb2NpYXRpb24= 54465
+eyEh 54466
+IENvbXBpbGU= 54467
+eGVi 54468
+RXZhbA== 54469
+gOyepQ== 54470
+KGNhbA== 54471
+IG1hcmtldGVycw== 54472
+X2hlbHBlcnM= 54473
+bG9jYWxjdHg= 54474
+IHlvZ3VydA== 54475
+IHZpdGE= 54476
+LGxlbmd0aA== 54477
+IElucHV0RGVjb3JhdGlvbg== 54478
+IGludGVydmVuZQ== 54479
+IGNvbXB1dGF0aW9uYWw= 54480
+RGVuaWVk 54481
+L2Vudmlyb25tZW50 54482
+aWlk 54483
+LkJveA== 54484
+LVRpbWU= 54485
+IGV4Y3VzZXM= 54486
+dHJhbnNwb3Nl 54487
+IG91dHJhZ2VvdXM= 54488
+KFNlcnZlcg== 54489
+ZGltcw== 54490
+Il0pOw0K 54491
+kJw= 54492
+IEVpc2Vu 54493
+KE9w 54494
+IGhhc2hsaWI= 54495
+KGxp 54496
+fiw= 54497
+xLFuZA== 54498
+IFNwaGVyZQ== 54499
+IEJlbGxh 54500
+LXRyYW5zaXRpb24= 54501
+LnJlYWRTdHJpbmc= 54502
+aGVhcmQ= 54503
+IFp1Y2tlcg== 54504
+IHdhbm4= 54505
+IGphaWxlZA== 54506
+IFRhbGVudA== 54507
+b3Bob2JpYQ== 54508
+wrY= 54509
+IG9wZXJhbmRz 54510
+U29tZW9uZQ== 54511
+IExpYnJhcmllcw== 54512
+cHJpbWFyeUtleQ== 54513
+16o= 54514
+VXI= 54515
+IG1hdGVz 54516
+INGI 54517
+LWR1dHk= 54518
+cG91cg== 54519
+PEVudGl0eQ== 54520
+PllvdQ== 54521
+Q3JlYXRvcnM= 54522
+V2l0aE5hbWU= 54523
+J2ludA== 54524
+IFJhdGlvbmFs 54525
+PUI= 54526
+LkF1dG9GaWVsZA== 54527
+IEZvdW5kZXI= 54528
+IE1lZ2Fu 54529
+LmltYWdlVmlldw== 54530
+Ym93cw== 54531
+IHdpdGhSb3V0ZXI= 54532
+IGxpYmVyYXRpb24= 54533
+IGZvcmFt 54534
+IGNpdGFz 54535
+b2NoZW4= 54536
+LnN3YXA= 54537
+IC4uCg== 54538
+LmN2dENvbG9y 54539
+IEF3YXJl 54540
+IHF1ZWVy 54541
+5aSE55CG 54542
+IEluZmluaXRl 54543
+L3N0cmluZw== 54544
+IGJsZW5kZWQ= 54545
+LUNvbA== 54546
+IHd5cw== 54547
+IHNpY2hlcg== 54548
+Lkxhc3ROYW1l 54549
+X3dhdGVy 54550
+X1JlbQ== 54551
+IGFydGhyaXRpcw== 54552
+LkFQUA== 54553
+IEV4cGFuc2lvbg== 54554
+eGRi 54555
+ZXN0cm8= 54556
+ZmF2aWNvbg== 54557
+VmVyaWZpZWQ= 54558
+IGRlbGl2ZXJpZXM= 54559
+YXJrZXQ= 54560
+IGdldEltYWdl 54561
+IEpQRUc= 54562
+IFRSSQ== 54563
+IEVsZXY= 54564
+ZnVzaW9u 54565
+IGpwZWc= 54566
+Y29sbGlzaW9u 54567
+IGRlc2NlbmQ= 54568
+LmZvcmU= 54569
+IExvZ3M= 54570
+IHBvbGljaW5n 54571
+dW50YXM= 54572
+Lmhvc3RuYW1l 54573
+YWNjZXB0ZWQ= 54574
+4KWL 54575
+IFdlbmR5 54576
+LnJlYWRGaWxl 54577
+IFNhbnRpYWdv 54578
+IEdvbA== 54579
+cmliYm9u 54580
+c3RyYXRpb24= 54581
+IHB1ZGQ= 54582
+IC8vXw== 54583
+aXNMb2FkaW5n 54584
+X1NFUklBTA== 54585
+IGluc3RhbnRpYXRlZA== 54586
+IHBvZHM= 54587
+IHdhcnJhbnRz 54588
+IGFkbWl0dGluZw== 54589
+CWNvbm5lY3Rpb24= 54590
+X2J1ZmZlcnM= 54591
+IEluY2g= 54592
+IFpFUk8= 54593
+d2VydA== 54594
+IENsYW4= 54595
+CWls 54596
+KHNoYWRlcg== 54597
+IHBpbGdy 54598
+IOWK 54599
+RHN0 54600
+X2JhcmFuZw== 54601
+Oicj 54602
+QnV0dG9uVGV4dA== 54603
+dGVyZQ== 54604
+X2FtdA== 54605
+IEZvcmV2ZXI= 54606
+LkxpbmtlZExpc3Q= 54607
+dWFyZHM= 54608
+dXJvdXM= 54609
+IFNlbmRlcg== 54610
+dmFyaWFudHM= 54611
+X21hZ2lj 54612
+IGFjY29tbW9kYXRpb25z 54613
+YXBHZXN0dXJlUmVjb2duaXplcg== 54614
+UHJvbXB0 54615
+ID8+DQoNCg== 54616
+IHJlcHJvZHVjZWQ= 54617
+X3ByZWNpc2lvbg== 54618
+IHJ1dA== 54619
+bW9uZHM= 54620
+O3g= 54621
+IH0sDQoNCg== 54622
+55S7 54623
+IFZpdGE= 54624
+IHByb3Bvc2Vz 54625
+IFBhcnRpdGlvbg== 54626
+SElORw== 54627
+ICN7QA== 54628
+IGVzc2E= 54629
+KGJhcg== 54630
+IFplbGRh 54631
+LmNhdGNo 54632
+X2V4Y2VwdA== 54633
+IG92ZXJ3aGVsbWluZ2x5 54634
+CVRFU1Q= 54635
+X0NPTlRBQ1Q= 54636
+X187 54637
+IFNlbWk= 54638
+IHRyYWJhbGhv 54639
+cmFkb3Vybw== 54640
+X3NxdWFyZWQ= 54641
+4LY= 54642
+JUQ= 54643
+IHByYXQ= 54644
+aXRleg== 54645
+KGVsZW1lbnRz 54646
+UGxhbnQ= 54647
+YWd1YQ== 54648
+IGlocmVy 54649
+LkNvbA== 54650
+IE1jTg== 54651
+IENvcmV5 54652
+T05FWQ== 54653
+Q2VsZQ== 54654
+cmVtZW50 54655
+IG1hbHQ= 54656
+IEx1aw== 54657
+57uf 54658
+UE1FTlQ= 54659
+IGFuYWx5emVy 54660
+IEhhbms= 54661
+X3VuaWNvZGU= 54662
+IGJ1cmlhbA== 54663
+IENlbHRpYw== 54664
+RUZG 54665
+TG90 54666
+d29u 54667
+IE51ZGU= 54668
+IE5hdGU= 54669
+IFNpbmdlcg== 54670
+IFNJVEU= 54671
+KGJpdA== 54672
+Yml6 54673
+IGRldG9u 54674
+UkVBRE1F 54675
+OkFkZA== 54676
+IEhvbGRpbmc= 54677
+e3JldHVybg== 54678
+bmNpYXM= 54679
+Pg0KDQoNCg== 54680
+cnVwdGlvbnM= 54681
+LnJlYWN0 54682
+dXJzYWw= 54683
+4Lib 54684
+IERPTkU= 54685
+aXZhdGVk 54686
+Lm5vdGVz 54687
+IHN0cmlwZXM= 54688
+cmlwcA== 54689
+aXJhbg== 54690
+IHNsYWI= 54691
+IEJ1cm5pbmc= 54692
+KGVudA== 54693
+LnNlYw== 54694
+R1U= 54695
+X2dvbGQ= 54696
+XSkpLg== 54697
+ZWxpbmVzcw== 54698
+0L7QsdGA0LDQ 54699
+IOKIgA== 54700
+IGNvc21pYw== 54701
+J10pOgo= 54702
+Y2Npb25lcw== 54703
+Y2lzaW9u 54704
+Y29tcGFyaXNvbg== 54705
+IEV2YW5nZWw= 54706
+IFNoaXJ0 54707
+bGFnZW4= 54708
+IGnFnw== 54709
+IGZpbGxlcg== 54710
+LnByb2Q= 54711
+IAkJCQkJ 54712
+INGE0YPQvdC60YbQuA== 54713
+IFplcm9Db25zdHJ1Y3Rvcg== 54714
+QXRB 54715
+XSkNCg0K 54716
+IGNvbnN0cnVjdG9ycw== 54717
+X1NIQVJFRA== 54718
+CWRldmljZQ== 54719
+IEFkdmljZQ== 54720
+OkAiJUA= 54721
+Pn0n 54722
+LklzRW1wdHk= 54723
+IGludHM= 54724
+bW9zdGF0 54725
+IFNpZ251cA== 54726
+Z2Vhcg== 54727
+KHBhdGhz 54728
+LHsi 54729
+L0RvY3VtZW50cw== 54730
+PENhdGVnb3J5 54731
+VUVTVA== 54732
+IGdldERlc2NyaXB0aW9u 54733
+ICJ7XCI= 54734
+IEpvZXk= 54735
+b2Rlbg== 54736
+X2d1ZXNz 54737
+RVVS 54738
+IGhlcnI= 54739
+IHNlZGFu 54740
+IHJlYWN0ZWQ= 54741
+X2Nsb25l 54742
+IFJldmVs 54743
+IGZvcmI= 54744
+UmVtYWluaW5n 54745
+XFNlcnZpY2Vz 54746
+IGF2aXM= 54747
+YmF0aW0= 54748
+emVwdA== 54749
+IERCTnVsbA== 54750
+Q29ubmVjdGlvbnM= 54751
+IGRpc3BvbmlibGU= 54752
+cGhpbg== 54753
+IHN0dQ== 54754
+IHNjaG9sYXJzaGlwcw== 54755
+LXNoYXJpbmc= 54756
+Zm9ybWluZw== 54757
+IEJyaQ== 54758
+VmFySW5zbg== 54759
+L3Nlc3Npb24= 54760
+IGFtYmlndW91cw== 54761
+IGFwcmVzZW50 54762
+X3Jk 54763
+c2l0ZXM= 54764
+L2FjdGlvbg== 54765
+dHJhY3Rvcg== 54766
+IGRpbGVtbWE= 54767
+IFNY 54768
+XS0tPgo= 54769
+IEphY2tldA== 54770
+UkFUSU9O 54771
+LmdldFNlbGVjdGVkSXRlbQ== 54772
+LWluaXQ= 54773
+IFJlZ2lzdGVycw== 54774
+X3NlcA== 54775
+IFRvb2xraXQ= 54776
+LmRpY3Q= 54777
+IHhsYWJlbA== 54778
+XFRhYmxl 54779
+dG9j 54780
+X2NvbWJv 54781
+IENvbXBhY3Q= 54782
+IHJ1Z2dlZA== 54783
+4KWH4KQ= 54784
+LW1hbmFnZW1lbnQ= 54785
+Jyl9fSI+Cg== 54786
+IFN0YW1w 54787
+xLFs 54788
+cm94 54789
+IGxhbmRzY2FwZXM= 54790
+X05PVEU= 54791
+bW9uYXJ5 54792
+Y2Fi 54793
+IG1vZXQ= 54794
+eGFm 54795
+cmNvZGU= 54796
+LWNsaQ== 54797
+X2dhdGU= 54798
+W2V2ZW50 54799
+U1BPUlQ= 54800
+Z2lh 54801
+IFNVUEVS 54802
+L0xvZ2lu 54803
+X3NodXRkb3du 54804
+aW50ZXJydXB0 54805
+IHByZXRlbmRpbmc= 54806
+IGZyaW5nZQ== 54807
+IFJlZHM= 54808
+IENVREE= 54809
+IFVOSVg= 54810
+dml0 54811
+IGJyaWc= 54812
+ZHJ2 54813
+IENvbm5lY3Rvcg== 54814
+VGhlcmVmb3Jl 54815
+IGxpYQ== 54816
+RGV0ZWN0aW9u 54817
+X2FjdG9y 54818
+IHRlbXBmaWxl 54819
+IGVjY2VudHJpYw== 54820
+LXJvbGU= 54821
+IHBhZHg= 54822
+ZGVudA== 54823
+V2VzdGVybg== 54824
+IOq3uA== 54825
+IEFwcGxpY2F0aW9uUmVjb3Jk 54826
+IGNhbXBhaWduaW5n 54827
+X3J1bm5lcg== 54828
+IENpdmlj 54829
+YWxlaWdo 54830
+IGRpcmVrdA== 54831
+LnN1bA== 54832
+ICAJCQk= 54833
+YW50ZW4= 54834
+IGlzc3Vlcg== 54835
+IGFzc2VydGlvbnM= 54836
+KG9yaWc= 54837
+QVRJTw== 54838
+IGxlYW5lZA== 54839
+w6Rz 54840
+LkRUTw== 54841
+ZXhwbG9kZQ== 54842
+Lk9ic2VydmFibGU= 54843
+IHN0YWdnZXJpbmc= 54844
+IGtpZG5hcHBlZA== 54845
+IHByb2dyYW1tZXJz 54846
+IElubm92 54847
+LnBhcmFtZXRlcg== 54848
+IGRvbWluYXRpb24= 54849
+IHNrZXB0aWM= 54850
+IOaYrw== 54851
+IGF2b2lkcw== 54852
+LlZlcmlmeQ== 54853
+dWJieQ== 54854
+IEFTTg== 54855
+IGZvcm1hdG8= 54856
+IEJlYXRsZXM= 54857
+X2JyYW5k 54858
+IGluc2V0 54859
+eW91dHU= 54860
+IHRvYw== 54861
+LWZpbmFs 54862
+U2hvd2luZw== 54863
+IERvdWI= 54864
+IE1lc2E= 54865
+QWRq 54866
+X21lZGl1bQ== 54867
+Q3JlYXRlcw== 54868
+KGVuZHBvaW50 54869
+CVVQ 54870
+YmJpZQ== 54871
+IHN0YWxr 54872
+LmRhdGFiaW5k 54873
+LlNjYW4= 54874
+YWdlbnRz 54875
+JCw= 54876
+aW5kaXZpZHVhbA== 54877
+Kykv 54878
+CXZt 54879
+KG5vdGlmaWNhdGlvbg== 54880
+IGluZXg= 54881
+IENsYXNzaWZpY2F0aW9u 54882
+cmVubw== 54883
+IG9saWc= 54884
+LXJhdGVk 54885
+IGZvcm11bGF0aW9u 54886
+Jyx7 54887
+IGFjZXB0 54888
+X3VucGFjaw== 54889
+X0NB 54890
+LlBvdw== 54891
+CWlt 54892
+IGFsdW1pbml1bQ== 54893
+QU5P 54894
+IHhu 54895
+IGPDs21v 54896
+IEluZ3JlZGllbnQ= 54897
+IHNlaXp1cmVz 54898
+5YWx 54899
+aWZpY2Fkb3I= 54900
+IHNpZ3VpZW50ZQ== 54901
+IEluZnJhZ2lzdGljcw== 54902
+IGR1cGxpY2F0ZWQ= 54903
+IERlZQ== 54904
+IG7DuA== 54905
+IEFDQ0VQVA== 54906
+KGNyYXRl 54907
+0LjRgtC10LvRjA== 54908
+LWxlc3M= 54909
+IGluZmluaXR5 54910
+QW5hbHl6ZXI= 54911
+LURheQ== 54912
+cml0dA== 54913
+KGNpbg== 54914
+IEd5 54915
+IG11bHRpcGxpZWQ= 54916
+dWNoaQ== 54917
+IEJhbGR3aW4= 54918
+L2lw 54919
+IHNob3J0Y3V0cw== 54920
+LkFERA== 54921
+IHZpZ29y 54922
+X2luc3RydWN0aW9u 54923
+KDs= 54924
+X2V0YQ== 54925
+6L+e 54926
+dXRvcmlhbHM= 54927
+IGJvb3N0aW5n 54928
+YnY= 54929
+IGFja25vd2xlZGdlcw== 54930
+TGlzdGVuaW5n 54931
+RkFR 54932
+O2I= 54933
+KCgt 54934
+IGFyY2hpdGVjdHM= 54935
+IHp3ZQ== 54936
+IHB1bHM= 54937
+IGdldENvdW50 54938
+dmVyYnM= 54939
+44Cc 54940
+KENvbGxlY3Rpb24= 54941
+a3Jl 54942
+IGp1cmlzZGljdGlvbnM= 54943
+X2JyaWRnZQ== 54944
+IENyYWNr 54945
+IERpZmZpY3VsdHk= 54946
+S08= 54947
+UmVzZXJ2YXRpb24= 54948
+X3JlcXVpcmVz 54949
+VG91cg== 54950
+44GX44Gf 54951
+LnNldEN1cnJlbnQ= 54952
+IGt5 54953
+IEFsYmFueQ== 54954
+IOin 54955
+bGxlcg== 54956
+YWduYQ== 54957
+d29ya2Vycw== 54958
+LmJsYW5r 54959
+IFByYXllcg== 54960
+TUlD 54961
+IHJlc2lsaWVuY2U= 54962
+VGVY 54963
+IExhbmd1YWdlcw== 54964
+c3R1ZHk= 54965
+CWN1cnI= 54966
+IGVuenltZXM= 54967
+U2x1Zw== 54968
+IO2MjA== 54969
+c3RyYWw= 54970
+IHR1bW9ycw== 54971
+IHNlZ3VuZGE= 54972
+PSd7 54973
+aW5zdHJ1Y3Rpb24= 54974
+IExpc3A= 54975
+L2luZm8= 54976
+ICJ7JA== 54977
+LDopLA== 54978
+IGd2 54979
+KEVycm9yTWVzc2FnZQ== 54980
+ICc9 54981
+fS0kew== 54982
+LkRvY3VtZW50cw== 54983
+IldlbGw= 54984
+IHJlbWluaXNjZW50 54985
+IGdheg== 54986
+aXJvcHI= 54987
+ZWhy 54988
+IHN1cHByZXNzZWQ= 54989
+ZXJzaA== 54990
+LnNjcm9sbFRv 54991
+IGNhZGVuYQ== 54992
+IGdhbWVTdGF0ZQ== 54993
+w61t 54994
+KGNvbnY= 54995
+IFRvbW9ycm93 54996
+IENDVA== 54997
+TW9uZ28= 54998
+dWxn 54999
+LkNhbWVyYQ== 55000
+LmhhbmRsZXJz 55001
+bXBo 55002
+IHN0aw== 55003
+IGdlbmV0aWNz 55004
+QUNJTkc= 55005
+VHJpdmlh 55006
+IEJhbQ== 55007
+KG1hcmtlcg== 55008
+LlN0cmV0Y2g= 55009
+IFN1bm5p 55010
+IEJldHR5 55011
+LnRvbGlzdA== 55012
+dW5saWtlbHk= 55013
+LlJlY3RhbmdsZQ== 55014
+b2Jzb2xldGU= 55015
+SUxPTg== 55016
+aW5uZXJUZXh0 55017
+ZW1ib3VyZw== 55018
+YU4= 55019
+IFZlaGljbGVz 55020
+dW5sb2Nr 55021
+OnV0Zg== 55022
+bm9i 55023
+IFNlZWluZw== 55024
+IE5FVkVS 55025
+IHRscw== 55026
+IGZpbGxlcw== 55027
+IGJlbmVmaXRlZA== 55028
+IENsaW50 55029
+Ki8pLA== 55030
+LmZvbGQ= 55031
+IHBvc2libGU= 55032
+QURFRA== 55033
+dGhvdXNl 55034
+LkRBTA== 55035
+IE9kZA== 55036
+cm9rZXM= 55037
+IFN1bm55 55038
+IFBhcnRpYWxFcQ== 55039
+X0J1ZmZlcg== 55040
+IExldmk= 55041
+bG9uZ3JpZ2h0YXJyb3c= 55042
+ZWxkb24= 55043
+Z2FnZXM= 55044
+X3dhcm4= 55045
+LkNyZWF0ZVRhYmxl 55046
+IERpcA== 55047
+X3F1ZXN0aW9ucw== 55048
+LmxvZ2lj 55049
+ICMi 55050
+PXsoKT0+ 55051
+IHRlcA== 55052
+IGp1aWN5 55053
+7IKs 55054
+ZW5rbw== 55055
+aWFsZWN0 55056
+2Yk= 55057
+IG9uYm9hcmQ= 55058
+IOaP 55059
+CXJ0 55060
+X1VURg== 55061
+IFFBY3Rpb24= 55062
+4oCe 55063
+KENvbXBvbmVudA== 55064
+KGF1ZGlv 55065
+LmhpdA== 55066
+Z3Rl 55067
+IHByb2dyYW1tZWQ= 55068
+c3RhdGVQYXJhbXM= 55069
+IHBvbHllc3Rlcg== 55070
+ZmlyZXM= 55071
+Ynlzcw== 55072
+XT0o 55073
+X3F1YWxpdHk= 55074
+T2ZEYXk= 55075
+IEZhaXJ5 55076
+IHllbGxlZA== 55077
+b3Bs 55078
+KHVzZXJOYW1l 55079
+IERpZmZlcmVuY2U= 55080
+IGV2YWx1YXRpb25z 55081
+aWZmYW55 55082
+IGN5Y2xpc3Rz 55083
+IGNpZGFkZQ== 55084
+IHRleHRib29r 55085
+IHByb2ZpbGluZw== 55086
+X18pLA== 55087
+ZGVh 55088
+LmFjdGl2YXRl 55089
+IGluZGljYXRpb25z 55090
+0JU= 55091
+VG91Y2hVcEluc2lkZQ== 55092
+IGludmFsdWFibGU= 55093
+IE1BU0s= 55094
+IGNvbnRlbmQ= 55095
+RnJlcQ== 55096
+IHJlY3J1aXRz 55097
+KGludGVydmFs 55098
+IFVzZXJQcm9maWxl 55099
+ICcuLy4uLw== 55100
+ZWR1 55101
+X0NhbGxiYWNr 55102
+IGFuYWxvZ3k= 55103
+IFRyb3BoeQ== 55104
+YXBwaGlyZQ== 55105
+VmlkZW9z 55106
+IENoZXI= 55107
+IEhhdg== 55108
+4oCmIg== 55109
+LnZhbGlkYXRvcg== 55110
+Z2Z4 55111
+IFVPYmplY3Q= 55112
+Y2xhc3NuYW1lcw== 55113
+dHJpYW5nbGU= 55114
+IEVuY29kZXI= 55115
+LnNweQ== 55116
+IHByZWRhdG9ycw== 55117
+PXN0YXR1cw== 55118
+LXNhZmU= 55119
+OiIsCg== 55120
+IEluY2x1ZGluZw== 55121
+IHt9Ow0K 55122
+KmNvcw== 55123
+IGVuZHVyZWQ= 55124
+LnN1bGFrZQ== 55125
+IG51cnNlcnk= 55126
+IGZyYWdyYW5jZQ== 55127
+IHJlYnVpbGRpbmc= 55128
+IG50aA== 55129
+IEZyYXNlcg== 55130
+LnNldERhdGU= 55131
+IFZpbmNl 55132
+X1JFU1Q= 55133
+IHZlbnRpbGF0aW9u 55134
+5rW3 55135
+Y3JpYmVz 55136
+LmFzbQ== 55137
+bHBWdGJs 55138
+IEFiZQ== 55139
+dWlzaW5l 55140
+LGFycmF5 55141
+CWNsYXNzTmFtZQ== 55142
+ZXJyYWxz 55143
+ICcKCg== 55144
+Q2hlY2tvdXQ= 55145
+IHNvbGljaXQ= 55146
+QXV4 55147
+X2NhcHR1cmU= 55148
+IHJpYnM= 55149
+cmFnb24= 55150
+dmlvbA== 55151
+dG9waWNz 55152
+RnVuY3Rpb25GbGFncw== 55153
+IE1hcnR5 55154
+YmlrZQ== 55155
+IFR1Y2tlcg== 55156
+KGtlcm5lbA== 55157
+IE9wcw== 55158
+Q2xvc2VPcGVyYXRpb24= 55159
+L2RlbW8= 55160
+aWxkYQ== 55161
+IGzDrW5lYQ== 55162
+QVBQSU5H 55163
+IHN1aXRlcw== 55164
+LnZpc2l0VmFySW5zbg== 55165
+dXJ1cw== 55166
+IE1pbnV0ZQ== 55167
+KG1hbmFnZXI= 55168
+IGJ1dHRlcmZseQ== 55169
+IGFwYXJl 55170
+IHdvbHZlcw== 55171
+SldU 55172
+IFNhbG9u 55173
+CWRlbGF5 55174
+LWVzbGludA== 55175
+aXNhdGlvbnM= 55176
+LnJwYw== 55177
+KXwo 55178
+IFNuYXBjaGF0 55179
+L21t 55180
+TU4= 55181
+Y2VyaWVz 55182
+LnRleHRBbGlnbm1lbnQ= 55183
+IEZyYW5rZnVydA== 55184
+IGFkbw== 55185
+KG5ld1ZhbHVl 55186
+KGFjY2Vzcw== 55187
+KEV4cHJlc3Npb24= 55188
+IFNpZ25Jbg== 55189
+IEhhaXRp 55190
+X3Rw 55191
+LnNldFBhcmFtZXRlcg== 55192
+TWludXRl 55193
+IG1hbnVhbHM= 55194
+cmljYW5lcw== 55195
+IFBUUg== 55196
+IE91dGVy 55197
+IGdldGxpbmU= 55198
+b2NhdGlvbnM= 55199
+X0NE 55200
+IEx5b24= 55201
+L2d1aQ== 55202
+X2xpdmU= 55203
+aWRhbg== 55204
+Lmdlb20= 55205
+IGJvcmRlckJvdHRvbQ== 55206
+aW11dGg= 55207
+X2NoZWNrcG9pbnQ= 55208
+IG1ldQ== 55209
+IElydmluZw== 55210
+IHBldXZlbnQ= 55211
+KE1BWA== 55212
+IEFSQ0g= 55213
+IHBvdg== 55214
+LnNvdXJjZWZvcmdl 55215
+IGphbWFpcw== 55216
+IGFyaw== 55217
+IEJhZ2hkYWQ= 55218
+IENMRUFS 55219
+TWVudUJhcg== 55220
+IHRyb2lz 55221
+Q0hFRFVMRQ== 55222
+ICMNCg== 55223
+KENhbGw= 55224
+JG9yZGVy 55225
+KE1hdGVyaWFs 55226
+IGVuY29udHJhZG8= 55227
+JGxpc3Q= 55228
+IE1FVEhPRFM= 55229
+LmJlZ2luVHJhbnNhY3Rpb24= 55230
+X01BRw== 55231
+U3R5bGVTaGVldA== 55232
+IG1ham9ycw== 55233
+IGluZGVmaW5pdGVseQ== 55234
+Y2xlYW51cA== 55235
+IGhvbWVsYW5k 55236
+KGR0bw== 55237
+RGF0ZXM= 55238
+UHJlc2VudGF0aW9u 55239
+IERL 55240
+PXtgLw== 55241
+CUtleQ== 55242
+KEJsb2Nr 55243
+X2NoZWNrYm94 55244
+bmVlZHM= 55245
+IG9uQ29tcGxldGU= 55246
+cmljbw== 55247
+IGdsZWljaA== 55248
+IHht 55249
+T09E 55250
+QmV0dGVy 55251
+IFNRTElURQ== 55252
+LkJvb2s= 55253
+eGFk 55254
+IEdvbmU= 55255
+CWRw 55256
+IGRldm90aW9u 55257
+IHN0bQ== 55258
+IG9ic2Vzcw== 55259
+IEJhY2tlbmQ= 55260
+UXVlcmllcw== 55261
+SWs= 55262
+Ly8qKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq 55263
+IGRpdmlkZW5kcw== 55264
+LnBhcmVudEVsZW1lbnQ= 55265
+fSIpCgo= 55266
+IE1hdGVyaWFsUGFnZVJvdXRl 55267
+Om51bQ== 55268
+IGV4cGxpYw== 55269
+IE9M 55270
+bGVhc3Q= 55271
+T29wcw== 55272
+aW1lbnRvcw== 55273
+IGluc3VyZXJz 55274
+IGhlcm9pYw== 55275
+CWZpZWxkcw== 55276
+LmltZ3Vy 55277
+LmJ0bkNhbmNlbA== 55278
+IERldGVjdGl2ZQ== 55279
+KHNt 55280
+IE11dGFibGVMaXZlRGF0YQ== 55281
+LmxhYg== 55282
+KChb 55283
+IGhhaXJzdA== 55284
+IFRyYW5zYWN0aW9ucw== 55285
+5byA5aeL 55286
+IHN0ZENsYXNz 55287
+dWVudG8= 55288
+R0lT 55289
+X2NvZA== 55290
+SW5zdHJ1Y3Rpb25z 55291
+Q2FsbHM= 55292
+UG9pbnRlclR5cGU= 55293
+IFJ3 55294
+IGFzc29ydG1lbnQ= 55295
+IERJRw== 55296
+K3I= 55297
+X0NFUlQ= 55298
+IGluc3RhYmlsaXR5 55299
+IHZpYg== 55300
+b25hcw== 55301
+IHJva3U= 55302
+YXBlbGxpZG8= 55303
+IGFuZ2w= 55304
+cHJlbmV1cg== 55305
+IGZsdWlkcw== 55306
+aXNlYXNl 55307
+IGRlZWQ= 55308
+cXVpc3Q= 55309
+X0NPTlNUQU5U 55310
+IGVxdWlsaWJyaXVt 55311
+X2RlbGVnYXRl 55312
+IFF1YW50dW0= 55313
+cmVp 55314
+Q2FwYWJpbGl0aWVz 55315
+cmVjdGFuZ2xl 55316
+Pz48 55317
+YWxpZW4= 55318
+IEp1Zw== 55319
+RE5B 55320
+VGlja2V0cw== 55321
+T2NjdXJz 55322
+IEhhd2s= 55323
+LnNldEhvcml6b250YWxHcm91cA== 55324
+XENvbGxlY3Rpb24= 55325
+ZmZpdGk= 55326
+IHJlYXJy 55327
+LnNldFZlcnRpY2FsR3JvdXA= 55328
+IGNhdml0eQ== 55329
+IGFkdWx0ZQ== 55330
+RmFjYWRl 55331
+LXdo 55332
+IExPTA== 55333
+2LA= 55334
+IGdyYW5kcGFyZW50cw== 55335
+U3dpZnQ= 55336
+CXd4 55337
+5omA5pyJ 55338
+aWZlbg== 55339
+ZmZzZXQ= 55340
+QmV5b25k 55341
+Ly99Cgo= 55342
+IHdhZ2Vy 55343
+IGJ1cnk= 55344
+IGNvbW1lbmNl 55345
+cmVnaXN0cm8= 55346
+c2NpZW50 55347
+IFBlcmNlbnQ= 55348
+INC00L7Qu9C2 55349
+KGlkZW50aWZpZXI= 55350
+LnNldE1vZGVs 55351
+IHNlbGRvbQ== 55352
+bnRvbg== 55353
+IGFwcGxpYW5jZQ== 55354
+YW11cw== 55355
+cnlzbGVy 55356
+IHBhbnRpZXM= 55357
+ZW5ndWlucw== 55358
+IG1pbWlj 55359
+IG9uQ2hhbmdlZA== 55360
+IGFsY29ob2xpYw== 55361
+LnJlbG9hZERhdGE= 55362
+Q2hhcmdl 55363
+IEZheA== 55364
+IGpTY3JvbGxQYW5l 55365
+RW1wcmVzYQ== 55366
+IHNoYXR0ZXJlZA== 55367
+eGJh 55368
+Rm9udHM= 55369
+P3M= 55370
+IHBvc3RzZWFzb24= 55371
+cmV0YWlu 55372
+X3JhdGVz 55373
+IHJlcXVlc3RDb2Rl 55374
+LnRvZG8= 55375
+wrRz 55376
+Q0hL 55377
+IEtlZXBpbmc= 55378
+ZW5nZWFuY2U= 55379
+IHZzY29kZQ== 55380
+SVBQSU5H 55381
+RGVmYXVsdENsb3NlT3BlcmF0aW9u 55382
+X3JhaXNl 55383
+IE9jdWx1cw== 55384
+b2dyYW1z 55385
+cmFq 55386
+cGNp 55387
+IGNvcnJvc2lvbg== 55388
+LmhhbmRsZVN1Ym1pdA== 55389
+QWNjZXNzaWJsZQ== 55390
+IFBpYW5v 55391
+bGl0dGxl 55392
+QUNM 55393
+xIdl 55394
+LnVud3JhcA== 55395
+IENvbnZlcnM= 55396
+IExlYmVu 55397
+aW9uZWVy 55398
+IE1lcmNoYW50 55399
+IEpvcmdl 55400
+IGVtYnJhY2luZw== 55401
+IHZlbnRh 55402
+w6FzdA== 55403
+IHZpZW5l 55404
+PFFTdHJpbmc= 55405
+IGV4cGxvc2lvbnM= 55406
+IGRpc3R1cmJlZA== 55407
+LiI8 55408
+bWVtbw== 55409
+IEFib3JpZ2luYWw= 55410
+IGNvbXBsZXRv 55411
+VGV4UGFyYW1ldGVy 55412
+IHVvbWluaQ== 55413
+KGFnZW50 55414
+0YPRgA== 55415
+IFdob2xlc2FsZQ== 55416
+L2Ft 55417
+IEJvb2ttYXJr 55418
+ZHJhZ29u 55419
+IGdsb3Zl 55420
+ICIiKSk7Cg== 55421
+aXZhcmlhdGU= 55422
+bm93cmFw 55423
+SW5DaGlsZHJlbg== 55424
+LkJy 55425
+IGNvbmV4aW9u 55426
+IGJhY2tib25l 55427
+IGVjbGlwc2U= 55428
+IHBlcnNlY3V0aW9u 55429
+JzoKCg== 55430
+L2xpbms= 55431
+IFBlcm8= 55432
+YW5kYXM= 55433
+IFRlaw== 55434
+LiIpOw== 55435
+LWFuYWx5c2lz 55436
+IGVyYWQ= 55437
+TWFyc2hhbA== 55438
+IGFuY2hvcnM= 55439
+b2dlcg== 55440
+IGNvbnZlcmdlbmNl 55441
+c3RpY2t5 55442
+IG5hdmVn 55443
+aW50ZXJu 55444
+X0RFU0NSSVBUT1I= 55445
+IENvbnN1bHRhbnQ= 55446
+ICAgICAgICAgICAgICAgICAgICAgCg== 55447
+IEF1Y2g= 55448
+IGVycmU= 55449
+xZtsaQ== 55450
+IEhvcml6b24= 55451
+Y29sYQ== 55452
+SW5zdGFsbGF0aW9u 55453
+aG90bWFpbA== 55454
+Q05O 55455
+LkNvbGxlY3RvcnM= 55456
+Y2hz 55457
+KHRyYWNl 55458
+IEVuY3J5cHQ= 55459
+IC0tLS0tLQ== 55460
+IEJhc2VDb250cm9sbGVy 55461
+IGFndWE= 55462
+IHJlYWN0aXZl 55463
+aWRs 55464
+IGNsYXNzTmFtZXM= 55465
+CVNlc3Npb24= 55466
+IERvZGdlcnM= 55467
+SGFk 55468
+X2x2 55469
+SXNWYWxpZA== 55470
+IEhFTFA= 55471
+dXR0bw== 55472
+IFZlcmlmaWNhdGlvbg== 55473
+IGdldGVudg== 55474
+X3Bh 55475
+LmJtcA== 55476
+OmY= 55477
+IExvdWlzZQ== 55478
+KCc7 55479
+L3NvY2tldA== 55480
+R3JhbnRlZA== 55481
+LmNhbGVuZGFy 55482
+KElQ 55483
+IFBY 55484
+LlJvb20= 55485
+IHByb2dyYW1t 55486
+ZW5zaQ== 55487
+IHRhYmxlc3Bvb25z 55488
+IGxldmU= 55489
+IG1vc3Ry 55490
+LnRpcG8= 55491
+L2Fu 55492
+KGRp 55493
+IGJpb2Q= 55494
+IGRiQ29udGV4dA== 55495
+IEpTWA== 55496
+CXJlc3VsdHM= 55497
+LkVORA== 55498
+aHRl 55499
+bGlmeQ== 55500
+UHJlY2lzaW9u 55501
+6IqC 55502
+QVJTRVI= 55503
+KWRpZFJlY2VpdmVNZW1vcnlXYXJuaW5n 55504
+YXR0ZW1wdA== 55505
+SVNQ 55506
+JmE= 55507
+X1BPUA== 55508
+IFRhYw== 55509
+IHByZXBhcmVkU3RhdGVtZW50 55510
+INC30LDQv9C40YE= 55511
+IG93aW5n 55512
+LHN0YXJ0 55513
+IHJldmlld2Vy 55514
+IHJzdA== 55515
+IHByb3BUeXBlcw== 55516
+IHJvY2t5 55517
+X2xvY2FsZQ== 55518
+IFN0cmF0ZWdpZXM= 55519
+IFdlYmVy 55520
+LkNhc2NhZGU= 55521
+X2VxdWFsVG8= 55522
+IGNvc2Fz 55523
+IERlbGV0ZXM= 55524
+IE1heGlt 55525
+IHNocmltcA== 55526
+cmV0cmlldmU= 55527
+LkluY2x1ZGU= 55528
+SUdJTg== 55529
+IE9F 55530
+XSk7DQoNCg== 55531
+LmVudW1lcg== 55532
+IGNvZWY= 55533
+X051bGw= 55534
+UmE= 55535
+dHlhcmQ= 55536
+IFNoYXdu 55537
+a2VlcGVycw== 55538
+IHFx 55539
+X3Ni 55540
+b21lbnM= 55541
+IEV4ZWN1dGVz 55542
+IyI= 55543
+VFRZ 55544
+IFZhbHVlVHlwZQ== 55545
+KTsqLwo= 55546
+IEFic29sdXRlbHk= 55547
+IFRvdHRlbmhhbQ== 55548
+L2FydA== 55549
+IGJsZXNzaW5ncw== 55550
+IHN3aWZ0bHk= 55551
+YnVzdGVy 55552
+IGF2aWQ= 55553
+Q09NTQ== 55554
+LHRlbXA= 55555
+IH0/Pgo= 55556
+LWdyb3dpbmc= 55557
+IGRlZXBjb3B5 55558
+QWNr 55559
+ZWdnaWVz 55560
+IF9fKCI= 55561
+IG5vaXI= 55562
+dGVycm9yaXNt 55563
+IGFudGhlbQ== 55564
+YWdlbmN5 55565
+X1BBQ0tBR0U= 55566
+IENsb3N1cmU= 55567
+LnJlZ2lzdHJ5 55568
+IG1hbW1hbHM= 55569
+PEw= 55570
+VUlDb2xsZWN0aW9uVmlldw== 55571
+IExFRHM= 55572
+IHZvbGxleQ== 55573
+KEJ1ZmZlcg== 55574
+X05BVElWRQ== 55575
+bGliYw== 55576
+aW1wbG9kZQ== 55577
+U2Nyb2xsQmFy 55578
+IE1hcmlvbg== 55579
+LkNvbnRyYWN0cw== 55580
+X0F0 55581
+IFdlaW5zdGVpbg== 55582
+Y29tcGFyZVRv 55583
+IEhvc2U= 55584
+ZW5pdHk= 55585
+LmNyZWF0ZVF1ZXJ5 55586
+X3JvdXRlcg== 55587
+IHN0aW11bGk= 55588
+ICsrKQ== 55589
+IENoYW1w 55590
+IEJheWVybg== 55591
+YXNzYQ== 55592
+LnZh 55593
+IGRpc3RyaWJ1dG9ycw== 55594
+IGZpbGVwcml2YXRl 55595
+IGRlcGFydGVk 55596
+Y2NjYw== 55597
+QGNsaWNr 55598
+IEx1bmNo 55599
+Pkw= 55600
+IGJsdWV0b290aA== 55601
+LkRlZXA= 55602
+LXN0YW5kaW5n 55603
+w6FjaWw= 55604
+IHJvb2Z0 55605
+IFBhdGhz 55606
+X2l0ZXJhdGlvbnM= 55607
+SW52YWxpZEFyZ3VtZW50RXhjZXB0aW9u 55608
+LnNwaQ== 55609
+IFVJQWxlcnRBY3Rpb24= 55610
+dXll 55611
+c2lnbmlu 55612
+LnByaW9yaXR5 55613
+IEVzc2F5cw== 55614
+PSd7JA== 55615
+IOi/lOWbng== 55616
+X3NpZ25lZA== 55617
+LnBlcnNpc3Q= 55618
+IHJlZGVzaWdu 55619
+VG9Mb3dlcg== 55620
+IE5ld21hbg== 55621
+PXN0YXJ0 55622
+IElzcmFlbGlz 55623
+YXNpc3dh 55624
+U3BlZWNo 55625
+IG51bWVyb3M= 55626
+aGFuZGxlcnM= 55627
+IFdvbmc= 55628
+INC80LXRgtC+0LQ= 55629
+V2VpZ2h0cw== 55630
+IEd1amFy 55631
+dGVpbA== 55632
+IE5vbmV0aGVsZXNz 55633
+X0VGRkVDVA== 55634
+IHZlY3Q= 55635
+IE9zYw== 55636
+IGNvYXRz 55637
+IFdoZWF0 55638
+IGdlZWs= 55639
+IFBST1BFUlRZ 55640
+d29ybQ== 55641
+X2NvbnN0YW50cw== 55642
+IEJvdWxkZXI= 55643
+IFBhcm0= 55644
+Y29sZQ== 55645
+IGRlZmF1bHRDZW50ZXI= 55646
+IFJvdWdl 55647
+OkE= 55648
+eGNm 55649
+IFZlbmljZQ== 55650
+bWVkaWFu 55651
+IHJlZGVtcHRpb24= 55652
+RnJlc2g= 55653
+IGNvc20= 55654
+IGZpZ3Vy 55655
+IHJlZnVyYg== 55656
+Q09QRQ== 55657
+LmNk 55658
+IGNob3Jkcw== 55659
+IFNndA== 55660
+xY0= 55661
+VlBO 55662
+IFNFTkQ= 55663
+YWluZW4= 55664
+X2FjY291bnRz 55665
+IHRlbnRo 55666
+IGRpc3NvbHZlZA== 55667
+PEFwcA== 55668
+IENvdmVyYWdl 55669
+dXNlU3RhdGU= 55670
+w6lybw== 55671
+Li48 55672
+IOyjvA== 55673
+IGRyZWFtaW5n 55674
+IEZvcmVjYXN0 55675
+LkN1cnNvcnM= 55676
+IHZpc2Fz 55677
+L3NjcmlwdA== 55678
+X3N0YXJ0ZWQ= 55679
+IGdhc3Ry 55680
+KFBSTw== 55681
+XTsvLw== 55682
+LlRpbGU= 55683
+KnNpbg== 55684
+KEFkYXB0ZXI= 55685
+IFNhbmRyYQ== 55686
+X1NJRw== 55687
+YXJkYXNo 55688
+IE92YWw= 55689
+IGRlc2NyaXBjaW9u 55690
+KHNs 55691
+IERlc2NyaXB0b3I= 55692
+IGAk 55693
+L2ZyZWU= 55694
+IEtleXdvcmRz 55695
+IHR1ZG8= 55696
+aW9uYWxl 55697
+KGZvdW5k 55698
+Lnh5eg== 55699
+IEdlbmVyYXRpb25UeXBl 55700
+X0RJU0FCTEVE 55701
+KGFyZWE= 55702
+IGVsaXRlcw== 55703
+IGhvbWJyZQ== 55704
+KG1lc3NhZ2Vz 55705
+IFJhYw== 55706
+IGV4dGluZ3U= 55707
+IEVzdGE= 55708
+b3Bv 55709
+LnZlbA== 55710
+bW91c2VvdXQ= 55711
+IGNvbnZvbHV0aW9u 55712
+IEhhbmRsaW5n 55713
+IGNlaWxpbmdz 55714
+VGVr 55715
+IEFyZWFz 55716
+LndyaXRlcm93 55717
+PFZpZXc= 55718
+IENvcm5lbGw= 55719
+X0JJTg== 55720
+LmludmFsaWQ= 55721
+JycnDQo= 55722
+aWXFvA== 55723
+X1Bvc2l0aW9u 55724
+IGtpZGRpbmc= 55725
+UENPREU= 55726
+IHdhdGNoZXI= 55727
+bG94 55728
+IOKX 55729
+RGF2ZQ== 55730
+X2FsbG93 55731
+IGJpc2V4dWFs 55732
+IHVub3JkZXJlZA== 55733
+IFNjaHdl 55734
+X3NlZ21lbnRz 55735
+IHRlYXJpbmc= 55736
+SU5MSU5F 55737
+IHVuZGVz 55738
+Lmdvb2Rz 55739
+LmNhbQ== 55740
+IExX 55741
+CXdoZXJl 55742
+Q2FsY3VsYXRvcg== 55743
+LXRocmVhdA== 55744
+LWFsZXJ0 55745
+IFN1enVraQ== 55746
+IElQQQ== 55747
+IEF0dGFjaG1lbnQ= 55748
+QUNDRVNT 55749
+KGR0eXBl 55750
+T3Bw 55751
+X3N5bWJvbHM= 55752
+IGRhbnNrZQ== 55753
+bGFnZQ== 55754
+b3JnZXQ= 55755
+cmVzb2x1dGlvbg== 55756
+0LXRhw== 55757
+IFFDb2xvcg== 55758
+IEJhcnJldHQ= 55759
+0LDRhtC40Y8= 55760
+PVwn 55761
+IE5hdkNvbnRyb2xsZXI= 55762
+L3JlZg== 55763
+KGNvdW50cnk= 55764
+X0hEUg== 55765
+IHRlcnNlYnV0 55766
+cGV0aXRpb24= 55767
+IHN1Zg== 55768
+Y3JlZGl0cw== 55769
+4LmM 55770
+eG0= 55771
+IERhdmllcw== 55772
+LnJlZGRpdA== 55773
+IHdvdmVu 55774
+IE9ibA== 55775
+IEtN 55776
+IENvbnNpZGVyaW5n 55777
+ZW5zb3JlZA== 55778
+LnBlcmlvZA== 55779
+IGRkbA== 55780
+JHdw 55781
+IGV4dHJlbWlzdA== 55782
+O1wK 55783
+IGtpbQ== 55784
+YWxlcnM= 55785
+IHNwYW5uaW5n 55786
+IGNvaGVyZW50 55787
+IGNvbnNlZ3U= 55788
+LnRleHRMYWJlbA== 55789
+LmdlbmVyYWw= 55790
+X2Rhc2hib2FyZA== 55791
+0LvQtdC90LjQtQ== 55792
+a2ljaw== 55793
+X1BJRA== 55794
+IEV4dGVuc2lvbnM= 55795
+cmVnZXhw 55796
+IENsYXVzZQ== 55797
+X21vdg== 55798
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 55799
+IFJld2FyZA== 55800
+IExFR08= 55801
+QWs= 55802
+PS09LT0tPS0= 55803
+CXBhcnNlcg== 55804
+IG9uemU= 55805
+6YCA 55806
+4oCd44CC 55807
+X2JhbGw= 55808
+KHJocw== 55809
+IGNob3J1cw== 55810
+PGNvdW50 55811
+YXN1cmFibGU= 55812
+IHdpcmtsaWNo 55813
+IEVyaW4= 55814
+IE1TTkJD 55815
+IGV0dGVy 55816
+IENyb24= 55817
+X0ZMT1c= 55818
+ICwNCg== 55819
+IGNhbGlkYWQ= 55820
+IEZpbGVXcml0ZXI= 55821
+CXN0bXQ= 55822
+KEJ5dGU= 55823
+X3BhdA== 55824
+IHRlbGVzY29wZQ== 55825
+IGdyZWVk 55826
+IFRvcnQ= 55827
+KHdyaXRl 55828
+XGFwcGxpY2F0aW9u 55829
+CVJUTFI= 55830
+IENvbmZpZ3VyYXRpb25NYW5hZ2Vy 55831
+VW5peA== 55832
+RW5kVGltZQ== 55833
+SW5jbHVkZXM= 55834
+IEhhcnZlc3Q= 55835
+ZW5iZXJn 55836
+IEF1c3RyYWxpYW5z 55837
+IOuT 55838
+IHJu 55839
+IHJlcHV0YWJsZQ== 55840
+IGJsZW5kaW5n 55841
+VUxBVElPTg== 55842
+IEJyZW5kYW4= 55843
+ZGFk 55844
+IG3DuA== 55845
+IFdvbw== 55846
+X2Rj 55847
+VW5l 55848
+IHJ1ZQ== 55849
+d2l0aGlu 55850
+YW5nZXA= 55851
+IHBvdWNo 55852
+XCIiLA== 55853
+IFNpYw== 55854
+4oCdKSw= 55855
+YWx5emU= 55856
+IEdlZg== 55857
+Y292ZXJz 55858
+IGRibw== 55859
+cmVwbGFjZUFsbA== 55860
+CUxvZ2dlcg== 55861
+VHJ5aW5n 55862
+W3N0YXRl 55863
+LXBpZWNl 55864
+6ZaT 55865
+YmVoYXZpb3I= 55866
+YWxsb3dz 55867
+bHJ0 55868
+X3B5dGhvbg== 55869
+ZXJ0dXJh 55870
+LWNvdW50cnk= 55871
+IFRH 55872
+LlVJTWFuYWdlcg== 55873
+YmVucw== 55874
+YWxleA== 55875
+IEJyZWl0YmFydA== 55876
+YmFj 55877
+IHByZWRpY3Rz 55878
+IGdhYg== 55879
+IGNhcmRpbmFs 55880
+LlRpbWVVbml0 55881
+IFZpc2l0b3I= 55882
+IE1pbmc= 55883
+IGxpdnJl 55884
+IHBhcmVudElk 55885
+cG9ydHVu 55886
+IGRpbWVuc2lvbmFs 55887
+IFZlc3Q= 55888
+ZW5pYw== 55889
+4LM= 55890
+INmH 55891
+IEJMVUU= 55892
+IGl0ZW1Db3VudA== 55893
+IGZlYXRoZXJz 55894
+CXBzdG10 55895
+IFBvbGFy 55896
+ey8v 55897
+dW5kaQ== 55898
+0YPQtg== 55899
+emFy 55900
+RXJyb3JSZXNwb25zZQ== 55901
+7IOB 55902
+UmVwcmVzZW50YXRpb24= 55903
+Kl8= 55904
+K10= 55905
+cHJlcGVuZA== 55906
+ICc+ 55907
+IGxlZ2l0aW1hY3k= 55908
+IG9v 55909
+U2xpbmt5 55910
+IG5hdGlvbmFscw== 55911
+LndvcmRz 55912
+O3A= 55913
+dHJhcA== 55914
+b21hbmlw 55915
+IGN1ZXM= 55916
+IGdyYWR1YXRpbmc= 55917
+IHNlbWFwaG9yZQ== 55918
+Il0pOwoK 55919
+YWNleQ== 55920
+UkVFVA== 55921
+R3JhYg== 55922
+IEZlbGl4 55923
+KElk 55924
+X25laWdoYm9ycw== 55925
+IG1lYW5pbmdsZXNz 55926
+KGRlbA== 55927
+IGplZGVy 55928
+IENvbnRlbnRWYWx1ZXM= 55929
+LmFic29sdXRl 55930
+L2Ns 55931
+IHhi 55932
+ZGF0dW0= 55933
+IHRvcnR1cmVk 55934
+IHJ1YmJpbmc= 55935
+U2NvcmVz 55936
+IPCfmIk= 55937
+IGF2b25z 55938
+IGFtc3RlcmRhbQ== 55939
+RU9T 55940
+SGFs 55941
+IHRydXN0d29ydGh5 55942
+Iz0= 55943
+LkVYVFJB 55944
+IG1hbm8= 55945
+aXNpY2luZw== 55946
+LXN1cHBvcnQ= 55947
+CWN1cnNvcg== 55948
+IFNwbw== 55949
+YWltYXNzYWdl 55950
+TWlzc2lvbg== 55951
+W117Ig== 55952
+IHByaW50ZXJz 55953
+R1JFRU4= 55954
+IHRlZw== 55955
+IGFiZG9taW5hbA== 55956
+IQoKCgoKCg== 55957
+LlNob3J0 55958
+0LDQt9Cy 55959
+IEdpZnRz 55960
+fSIp 55961
+KGJpbmRpbmc= 55962
+eGNl 55963
+4oCR 55964
+aW5mb3M= 55965
+Rm9ybURhdGE= 55966
+IGRhcnQ= 55967
+IGVsZW1z 55968
+KGludg== 55969
+WUw= 55970
+dGlu 55971
+R0VORVI= 55972
+4buv 55973
+IFRha2Vu 55974
+dWNrbGU= 55975
+OmU= 55976
+IHNwZWN0cmFs 55977
+LmJhaWR1 55978
+LycpOwo= 55979
+IGdyZWVkeQ== 55980
+ZXNpb24= 55981
+LCwsLCwsLCw= 55982
+IC8+LAo= 55983
+SW50ZXJuYWxTZXJ2ZXJFcnJvcg== 55984
+TlNOb3RpZmljYXRpb25DZW50ZXI= 55985
+IEFp 55986
+IHNwaXQ= 55987
+IGF1Z21lbnRlZA== 55988
+IHN0YW5kYXJkVXNlckRlZmF1bHRz 55989
+RklOSVRZ 55990
+UmFjZQ== 55991
+OkM= 55992
+IFJFQ09SRA== 55993
+IEhpZ2hsaWdodA== 55994
+ICdg 55995
+IGRlZmljaXRz 55996
+IG5laQ== 55997
+IHJlc2VhcmNoZWQ= 55998
+VGE= 55999
+IGNvcHA= 56000
+LkdldEhhc2hDb2Rl 56001
+KToNCg0K 56002
+T25DbGljaw== 56003
+IFdlbGxpbmd0b24= 56004
+IHJldml2YWw= 56005
+5q+U 56006
+6Zeu 56007
+IE5TUw== 56008
+IGZvcm4= 56009
+IGludMOp 56010
+IEt1d2FpdA== 56011
+X2ZsaXA= 56012
+X2Jv 56013
+X1w= 56014
+IG9jY3VycmVuY2Vz 56015
+IFNjaWVudGlzdHM= 56016
+U1JD 56017
+b2dlbnM= 56018
+aWdyYW50 56019
+UkVNT1RF 56020
+IFNJRA== 56021
+Lm9wdHM= 56022
+dXZl 56023
+KCldKQo= 56024
+IGxpYmVydGFyaWFu 56025
+IEdsaWRl 56026
+bGVzZW4= 56027
+IGZvcm1l 56028
+b3dhbmlh 56029
+IGFubm95ZWQ= 56030
+RGVmcw== 56031
+IEV4ZWN1dG9y 56032
+IGNhc3Rz 56033
+LnNldENoZWNrZWQ= 56034
+IFNoYXJpbmc= 56035
+LlNlcmlhbGl6ZU9iamVjdA== 56036
+IHNlbGVjdG9ycw== 56037
+X09USEVS 56038
+66+4 56039
+KHN1cGVy 56040
+KE9T 56041
+X1ZFUklGWQ== 56042
+aWR1bnQ= 56043
+PGhlYWRlcg== 56044
+IC8+JzsK 56045
+IHZpZMOpbw== 56046
+IE5lZ3Jv 56047
+IExvcmRz 56048
+IFRvdXJz 56049
+IHNvZnRseQ== 56050
+LnJlY2VpdmU= 56051
+IEVSQw== 56052
+IGRhdGFTZXQ= 56053
+QmFkZ2U= 56054
+CUV2ZW50 56055
+IHBlcmw= 56056
+IHt9XA== 56057
+KHNlbnRlbmNl 56058
+T3JVcGRhdGU= 56059
+IGRpbWluaXNo 56060
+UElO 56061
+KGRyYXc= 56062
+LlRvRGF0ZVRpbWU= 56063
+LkVxdWFsVG8= 56064
+KHBpbg== 56065
+LXBlbmNpbA== 56066
+bHVlbnQ= 56067
+IENhbGxlcg== 56068
+IHBsYXlmdWw= 56069
+LScr 56070
+eGNh 56071
+c3dpY2s= 56072
+KXt9Cg== 56073
+fTokew== 56074
+IE1ldGg= 56075
+LmdldENlbGw= 56076
+LmJyZWFr 56077
+IHltYXg= 56078
+PSc8Pw== 56079
+LWpzb24= 56080
+IHByaW1laXJv 56081
+IGluZGljZQ== 56082
+44Kj 56083
+IFVOSVRZ 56084
+KGFi 56085
+0YbQuNC4 56086
+X0hBVkU= 56087
+LXllYXJz 56088
+IEVyZG9nYW4= 56089
+LXN0YWNr 56090
+IGRpc2NoYXJnZWQ= 56091
+IGJyZWF0aHRha2luZw== 56092
+IGdyYXNzcm9vdHM= 56093
+IEFzaWRl 56094
+aGVsbA== 56095
+IHNuYWtlcw== 56096
+L2xvZ291dA== 56097
+IG1pbldpZHRo 56098
+IEhlYXI= 56099
+IFN0b25lcw== 56100
+IFdpc2RvbQ== 56101
+IEV2ZW5pbmc= 56102
+X2JsYW5r 56103
+IFByb21vdGlvbg== 56104
+IE1NTQ== 56105
+IEJhcnM= 56106
+44K3 56107
+bmo= 56108
+X1RJ 56109
+IFNvY2lhbGlzdA== 56110
+IEVH 56111
+LW9wdA== 56112
+PVwiJA== 56113
+KGRpYWxvZw== 56114
+IGJlaG9sZA== 56115
+IGludHJpY2F0ZQ== 56116
+IGVyZWN0aWxl 56117
+RXh0cmFjdG9y 56118
+IHNjbA== 56119
+IGNsYXM= 56120
+KGhpc3Rvcnk= 56121
+aWRlbnRhbGx5 56122
+IHBuZXVt 56123
+UmFuZA== 56124
+IExhcHRvcA== 56125
+Y2FsbGVy 56126
+IEZsb29k 56127
+b3BlbmVk 56128
+dWRkZXI= 56129
+IEdldHRlcg== 56130
+X3dhbGs= 56131
+KHdlaWdodA== 56132
+IEFsZXhhbmRyaWE= 56133
+IHRhYmxlYXU= 56134
+VmFyaQ== 56135
+IC0tLS0tLS0t 56136
+6Iez 56137
+ZXdvcnRoeQ== 56138
+U3BlY2lmaWNhdGlvbg== 56139
+IHRocmVzaG9sZHM= 56140
+KCIiKTsKCg== 56141
+X2ZvdXI= 56142
+IFNhZGx5 56143
+IChfKQ== 56144
+aXNtYXRpYw== 56145
+IEphaWw= 56146
+dG9IYXZlQmVlbkNhbGxlZFdpdGg= 56147
+Lm1hcg== 56148
+IHByZXZpZXdz 56149
+IHNjYWZm 56150
+aW5kaWNhdG9y 56151
+IGNvZGVjcw== 56152
+IGF1dG9j 56153
+KHJ0 56154
+LmdldEhvdXJz 56155
+IFJI 56156
+IFN1cmdl 56157
+aXZhbWVudGU= 56158
+IGNvbnRlbmRlcg== 56159
+Q3BwR2VuZXJpY0NsYXNz 56160
+IDs7Xg== 56161
+OjoqOwo= 56162
+LXJlY29yZA== 56163
+IG1hbWE= 56164
+IGltZ3M= 56165
+LmlzTG9hZGluZw== 56166
+IG5lZWRsZXM= 56167
+IGVuY3VlbnRyYQ== 56168
+b2RhdGE= 56169
+IEJ1ZmZlcmVkSW1hZ2U= 56170
+CWphdmE= 56171
+IFRvbWI= 56172
+VU5JVFk= 56173
+IGxpbmdlcmll 56174
+IEphbWFpY2E= 56175
+YnVncw== 56176
+KioKCg== 56177
+IE1hbw== 56178
+LmJlZ2luUGF0aA== 56179
+IHByb3N0aXR1dA== 56180
+IFBoaWxpcHBpbmU= 56181
+X3Nm 56182
+X3Bvdw== 56183
+IFNjaG8= 56184
+eGRl 56185
+J8OpdA== 56186
+4oCZYXV0 56187
+YWlzb24= 56188
+IEZpbGVJbmZv 56189
+dHVybnN0aWxl 56190
+ZHJlYW0= 56191
+IGlWYXI= 56192
+c3ludGF4 56193
+aWxsaXNlY29uZHM= 56194
+cHJvZmlsZXM= 56195
+X1JFR0VY 56196
+INC00L4= 56197
+IENvbW11bg== 56198
+QmV0 56199
+aXB6aWc= 56200
+IE1lbW8= 56201
+Lmlkcw== 56202
+IHBob3RvZ3JhcGhlZA== 56203
+IGFwcHJveGltYXRpb24= 56204
+OnZhcmlhYmxlcw== 56205
+IG1vZGlmaWNhcg== 56206
+X1NNQUxM 56207
+IEhlbXA= 56208
+IGRpc3Jlc3BlY3Q= 56209
+IGNvbnRlc3RlZA== 56210
+IGlubm9jZW5jZQ== 56211
+aWxsaXM= 56212
+U3ltYm9scw== 56213
+IGluc3BpcmF0aW9uYWw= 56214
+IGRpc2NpcGxpbmFyeQ== 56215
+IFBlcm1hbmVudA== 56216
+IGRlc2Ny 56217
+IFVOREVS 56218
+0YHRiw== 56219
+cHJlc3Nvcg== 56220
+SU1FUg== 56221
+IG1vdW50cw== 56222
+IG1vcmFsbHk= 56223
+X1NFQ09ORA== 56224
+LmZpbGVOYW1l 56225
+44OX 56226
+IGNvbnN0cnVjdHM= 56227
+IFNVTg== 56228
+RVNQ 56229
+RmluYW5jaWFs 56230
+IE51cg== 56231
+w7RsZQ== 56232
+cmljdWxhcg== 56233
+IFVzZXJNYW5hZ2Vy 56234
+aWJpbGlkYWQ= 56235
+IG9uUmVzcG9uc2U= 56236
+IGZpbG1tYWtlcg== 56237
+IGFsb3Q= 56238
+X1RIUkVBRFM= 56239
+IGVudmlyb25tZW50YWxseQ== 56240
+Li4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u 56241
+IHJhc2g= 56242
+IEx5cmljcw== 56243
+IGlwYWlycw== 56244
+QmFja3Vw 56245
+U2lnbnVw 56246
+IEB7Cg== 56247
+SlVuaXQ= 56248
+d29ya2Zsb3c= 56249
+IENvbXBsZXRpb24= 56250
+IGludHVpdGlvbg== 56251
+8J0= 56252
+IG1pYQ== 56253
+IFNuYWNrYmFy 56254
+IFRpbg== 56255
+CWluc3RhbmNl 56256
+IE11c2ljYWw= 56257
+IHdlbGNvbWVz 56258
+IHJlZHJhdw== 56259
+X2NvbG91cg== 56260
+X1JFQUxUWVBF 56261
+X3NpbmNl 56262
+IEJ5dGVBcnJheU91dHB1dFN0cmVhbQ== 56263
+LWRlbWFuZA== 56264
+YXJldGg= 56265
+LnBhZA== 56266
+c2Vr 56267
+JywuLi4K 56268
+LWZpcmU= 56269
+Lnw= 56270
+IG51bWI= 56271
+IERPVUJMRQ== 56272
+QU1BR0U= 56273
+Y2htb2Q= 56274
+LWls 56275
+IGFsYXJtaW5n 56276
+Q29w 56277
+5aSH 56278
+aW52aXRl 56279
+X0lURU1T 56280
+IGxldWs= 56281
+IHJlZWw= 56282
+IGZ1bGZpbGxtZW50 56283
+UmVzdG9yZQ== 56284
+X3Jy 56285
+KGNsYXNzZXM= 56286
+IHBhZ2luZw== 56287
+eW1heA== 56288
+cmFwcGVk 56289
+7ZmU 56290
+fWB9Pgo= 56291
+IEhpcm8= 56292
+KFRSVUU= 56293
+YXN1cmVy 56294
+IGN1ZXI= 56295
+VWJlcg== 56296
+Lk9wZXJhdGlvbg== 56297
+IG9sYW4= 56298
+IHRocmlsbGluZw== 56299
+PFJlc3BvbnNl 56300
+IEZlbWlu 56301
+IHRyYXZlcnNhbA== 56302
+IHBvYw== 56303
+IHNldFN0YXR1cw== 56304
+ZGVjbGFy 56305
+c3RkYWZ4 56306
+IGFkZGljdGl2ZQ== 56307
+IEJ0bg== 56308
+IGV4cGxvc2l2ZXM= 56309
+IENvb2tpbmc= 56310
+IFBsYWludA== 56311
+IGFjY3VtdWxhdG9y 56312
+IEFwcG9pbnRtZW50 56313
+LHBhc3N3b3Jk 56314
+IEZBUg== 56315
+bHVldA== 56316
+RnVydGhlcm1vcmU= 56317
+ZGVjbHNwZWM= 56318
+X1N0YXRpY3M= 56319
+LkRpY3Rpb25hcnk= 56320
+Ij4nLg== 56321
+CXZhbGlk 56322
+IiIs 56323
+SW5zdHJ1bWVudA== 56324
+Pko= 56325
+IG5vc3Ry 56326
+IFJpZnQ= 56327
+X1BvcnQ= 56328
+IHZlY2Vz 56329
+W1sn 56330
+IHJhbGxpZXM= 56331
+LXNlcmllcw== 56332
+IHZ2 56333
+LnVj 56334
+IHJ0bg== 56335
+U3RhdGVDaGFuZ2Vk 56336
+KGlucw== 56337
+IENsYQ== 56338
+LS0tLS0tLS0tLS0tCg== 56339
+Y3Vz 56340
+IFJlbG9hZA== 56341
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0= 56342
+LnNlY29uZHM= 56343
+X2Rlc3RpbmF0aW9u 56344
+IHNjcmV3ZWQ= 56345
+PmM= 56346
+VGhpY2tuZXNz 56347
+RGVzaWduZXI= 56348
+IGdyaWRz 56349
+bsSF 56350
+KGNvb2tpZQ== 56351
+VHJpcA== 56352
+LU1vYmlsZQ== 56353
+IHZvbGw= 56354
+IGdlbml0YWw= 56355
+IGNvbmZpc2M= 56356
+IENvbmZlZGVyYXRl 56357
+IHdlYlZpZXc= 56358
+IG1pc2U= 56359
+IGNsZXI= 56360
+KHNlbGVjdGlvbg== 56361
+JGRhdGU= 56362
+IHNoYXJwZW4= 56363
+cmFnZW4= 56364
+QW5kVXBkYXRl 56365
+IHJlbWl4 56366
+IGh0b25z 56367
+Ulc= 56368
+TVBJ 56369
+IHJldHJpZXZhbA== 56370
+IHJpY2hlc3Q= 56371
+LkRlY29kZQ== 56372
+OmluaXRDb21wb25lbnRz 56373
+IFRWYWx1ZQ== 56374
+U2FpbnQ= 56375
+QGluY2x1ZGU= 56376
+IFBFUlNPTg== 56377
+LnNlcA== 56378
+IExEQVA= 56379
+Z2Jh 56380
+IGdyb8OfZQ== 56381
+IHJlbGlhYmx5 56382
+IERGUw== 56383
+LmdldEl0ZW1JZA== 56384
+IHByw6lzZW50 56385
+LmdldFRva2Vu 56386
+IGNoaW5lc2U= 56387
+IE1lYWw= 56388
+WU9V 56389
+Ij48Pz0k 56390
+KGNob2ljZQ== 56391
+IHBoZW5vbWVuYWw= 56392
+IFN0ZWVsZQ== 56393
+wqI= 56394
+IFBhY2thZ2VNYW5hZ2Vy 56395
+IFN5bmRyb21l 56396
+RGlyZWN0b3JpZXM= 56397
+aXZhcg== 56398
+LnVuc3Vic2NyaWJl 56399
+bGllw58= 56400
+bW9ubw== 56401
+X2Nvbm5lY3Rpb25z 56402
+X3ByZXNlbmNl 56403
+eW55 56404
+S25pZmU= 56405
+IGdyb292ZQ== 56406
+IHNjb29w 56407
+VEVNUEw= 56408
+YXNha2k= 56409
+LmhhbWNyZXN0 56410
+IGhhcmJvcg== 56411
+Y292 56412
+Kno= 56413
+IFh1 56414
+IHByb3Bvc2luZw== 56415
+IEZSQU1F 56416
+Q2hpcA== 56417
+IEVlbg== 56418
+IOyghA== 56419
+IHNtYXNoZWQ= 56420
+VW5zaWduZWQ= 56421
+KC4u 56422
+X2ZpbmlzaGVk 56423
+IGdldFN0YXR1cw== 56424
+IGZpYnJl 56425
+QXhlcw== 56426
+ICcvJyw= 56427
+eWFyZHM= 56428
+TURC 56429
+LWJz 56430
+aW50ZW50 56431
+IGJvb3N0ZXI= 56432
+LmRzdA== 56433
+LkRpYWxvZ1Jlc3VsdA== 56434
+IE1ldHM= 56435
+IGJlYXN0cw== 56436
+aW5jcmVtZW50cw== 56437
+LmthZmth 56438
+VUlBbGVydEFjdGlvbg== 56439
+LWV2ZXI= 56440
+X2JhbA== 56441
+IGhlbHQ= 56442
+IGZyZW9wZW4= 56443
+IFJlY3J1aXRtZW50 56444
+bGljdHM= 56445
+Zm9yZ2V0dGFibGU= 56446
+RGlzcGxheWVk 56447
+X1ZFTkRPUg== 56448
+Q29sbGVnZQ== 56449
+QVNDSUk= 56450
+IFNpbms= 56451
+IE1hY2Vk 56452
+IGN0b3I= 56453
+IGVzdMOjbw== 56454
+IFdpbmRzb3I= 56455
+X2NoZWNrZWQ= 56456
+X2RldGVjdA== 56457
+YXR0ZW5k 56458
+IHhtaW4= 56459
+IGluZGlzcGVucw== 56460
+L3BlcnNvbg== 56461
+X0RFVEFJTFM= 56462
+UkVESVQ= 56463
+SGF5 56464
+YWJvbGlj 56465
+IGZ1bmN0b29scw== 56466
+aWFpcw== 56467
+RlRQ 56468
+X1JlY3Q= 56469
+IEluZHk= 56470
+LXB1YmxpYw== 56471
+b2hhbg== 56472
+X21hbmFnZQ== 56473
+Q29tcHV0ZWQ= 56474
+7JeQ7ISc 56475
+IFNsaWNl 56476
+IGdheXM= 56477
+IGFsZXg= 56478
+YWl0cw== 56479
+IHJlY2VpcHRz 56480
+U1BFQw== 56481
+IEJFRk9SRQ== 56482
+IFByZWZpeA== 56483
+X3Zpc2l0 56484
+IHNwdW4= 56485
+TEVURUQ= 56486
+IGRvdw== 56487
+IGxlZ2FsaXphdGlvbg== 56488
+YWJiYWdl 56489
+IGNsYXc= 56490
+IFRjbA== 56491
+eGltYQ== 56492
+IGNvdmVydA== 56493
+Tmk= 56494
+IHRoYW5rZWQ= 56495
+IGFsbGVyZ2lj 56496
+bG92ZXI= 56497
+IEJyZWFzdA== 56498
+LmlzQWN0aXZl 56499
+IGdlYmVu 56500
+VkVSU0U= 56501
+Wk9ORQ== 56502
+CVJlc3VsdA== 56503
+JykuJw== 56504
+IGdlZQ== 56505
+IFNlcmlvdXNseQ== 56506
+cHVycGxl 56507
+IEVzcGHDsWE= 56508
+aWZpZQ== 56509
+LXBhY2s= 56510
+UGFydGljbGVz 56511
+ICcvLi4v 56512
+IG11bHRpbWVkaWE= 56513
+YXV0b2NvbXBsZXRl 56514
+IFRIUkVBRA== 56515
+IHJlZmVyZW5jaW5n 56516
+cmVldGluZ3M= 56517
+IHF1b3Rpbmc= 56518
+IGFzc2lzdGFudHM= 56519
+amVuaXM= 56520
+aGFwcHk= 56521
+IGxheXM= 56522
+bGliZnQ= 56523
+eGRh 56524
+IGZvdQ== 56525
+cGlhcg== 56526
+UmVjb21tZW5kZWQ= 56527
+IEJpcmRz 56528
+IFdhcnJhbnR5 56529
+w7xybGljaA== 56530
+LklOVklTSUJMRQ== 56531
+X2FuY2hvcg== 56532
+4oCdOg== 56533
+RmFudA== 56534
+X2RlZnM= 56535
+IGRyZWFtZWQ= 56536
+IF9fX19fX18s 56537
+cGxh 56538
+w6RmdA== 56539
+b2RrYQ== 56540
+xLFz 56541
+IGRhZGR5 56542
+c2NoZW1hcw== 56543
+PXplcm9z 56544
+IHJhdHQ= 56545
+CQkgICAgCQ== 56546
+aWVq 56547
+IGRyaWxscw== 56548
+LTw/ 56549
+QUJB 56550
+Lmxpbmtz 56551
+IERlcGVuZGVuY3lQcm9wZXJ0eQ== 56552
+Lmxvdw== 56553
+aGVlZA== 56554
+X0JMQUNL 56555
+L0FkbWlu 56556
+IGFtaWdvcw== 56557
+aW5nZWQ= 56558
+IE1pY2tleQ== 56559
+LkdldEF4aXM= 56560
+IE5lZWRlZA== 56561
+IEVuY29kZQ== 56562
+w6lyaWV1cg== 56563
+IE1hbmlsYQ== 56564
+IENvbGxlZw== 56565
+YWRhc3Rybw== 56566
+IGNoaWNhcw== 56567
+5L2g 56568
+IG9uZXNlbGY= 56569
+eGVh 56570
+ZHVr 56571
+IGd3 56572
+dXJnaWNhbA== 56573
+IENlbnRybw== 56574
+IGFlcw== 56575
+ZmVlbA== 56576
+IHRyb3Q= 56577
+IGVsZWN0cm9ucw== 56578
+IHJpdHVhbHM= 56579
+IEJpbGRlcg== 56580
+IGRlY29yYXRl 56581
+IFRva2VuVHlwZQ== 56582
+IGx1cmU= 56583
+QXBpQ2xpZW50 56584
+Z3JwYw== 56585
+IE9yYw== 56586
+Q29udGV4dE1lbnU= 56587
+UFJFRklY 56588
+LXRoZW1lZA== 56589
+X2ZpZm8= 56590
+LklucHV0U3RyZWFtUmVhZGVy 56591
+X3NwZWNpZmlj 56592
+IERTUA== 56593
+PXN1YnByb2Nlc3M= 56594
+L3NoZQ== 56595
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAo= 56596
+IGRhdW50aW5n 56597
+IGNsZWFycw== 56598
+IE1vdmVz 56599
+IG15c3Rlcmllcw== 56600
+LWJlc3Q= 56601
+IFZ1 56602
+b2xpYg== 56603
+IElzaA== 56604
+IGNhcmFjdA== 56605
+KExhYmVs 56606
+IERlYmlhbg== 56607
+IEV4cGVyaW1lbnRhbA== 56608
+IGNhdg== 56609
+LlRvRGVjaW1hbA== 56610
+IFJob2Rlcw== 56611
+IEhhd2tz 56612
+IGZvdW50YWlu 56613
+X1BFTkRJTkc= 56614
+X1NV 56615
+IHd4U3RyaW5n 56616
+IFBldw== 56617
+LmNsaQ== 56618
+0YTQvtGA0Lw= 56619
+LndlYmtpdA== 56620
+X0NO 56621
+IDs7PQ== 56622
+CW5hbWVzcGFjZQ== 56623
+IHdQYXJhbQ== 56624
+IHB1cHBpZXM= 56625
+IHRlcm1pbm9sb2d5 56626
+IGFkZGljdGVk 56627
+IGZvcmdl 56628
+IEdhcmRuZXI= 56629
+IHBlc3NvYQ== 56630
+CVJlc3VsdFNldA== 56631
+IGF0dGVudQ== 56632
+YW5nZW1lbnQ= 56633
+X2luZHM= 56634
+Q2hp 56635
+YXJpdGg= 56636
+RW5jb2RpbmdFeGNlcHRpb24= 56637
+bW91c2Vkb3du 56638
+IEJFVFdFRU4= 56639
+d2VpZ2g= 56640
+IkZvcg== 56641
+LmRk 56642
+aXRlbA== 56643
+WU8= 56644
+IERpY2U= 56645
+dW5peA== 56646
+IE9idA== 56647
+IENlZGFy 56648
+IHNwZWNpbWVucw== 56649
+cG9ybg== 56650
+IHVub2ZmaWNpYWw= 56651
+6buR 56652
+c29tZXRpbWVz 56653
+IEJ1bGxk 56654
+dHJ1c3Q= 56655
+Z2V0UmVzdWx0 56656
+IHNtb2tlcnM= 56657
+IHNhbmR3aWNoZXM= 56658
+IGV4aA== 56659
+IEZhZGU= 56660
+X0RD 56661
+IG1hc3R1cmJhdGlvbg== 56662
+Zm9ydGF3ZXNvbWU= 56663
+VEhJTkc= 56664
+X2FuZHJvaWQ= 56665
+IGRlZGlj 56666
+LXNlbnNpdGl2ZQ== 56667
+IG5hY2t0 56668
+TElCSU5U 56669
+IGFnb24= 56670
+IERJU0FCTEU= 56671
+b25lc2lh 56672
+Ymllcw== 56673
+IFpJUA== 56674
+IGhhdW50ZWQ= 56675
+IGN1aWQ= 56676
+L2NhcnQ= 56677
+a29z 56678
+CVJUTFU= 56679
+IGhpbmRlcg== 56680
+IGFkaXBpc2ljaW5n 56681
+SUVOQ0U= 56682
+LmJhbms= 56683
+IEN5cHJ1cw== 56684
+bWl4ZWQ= 56685
+LmN5 56686
+LXNpbmdsZQ== 56687
+PGxlbg== 56688
+Q29taW5n 56689
+IGZhdWx0cw== 56690
+IGZvcmVzZWU= 56691
+Z2V0bGluZQ== 56692
+ImE= 56693
+IGJyYWc= 56694
+IGRpc2Nz 56695
+IHJpcGU= 56696
+IG7DpnI= 56697
+IEdH 56698
+U0hPVA== 56699
+ZGVyYWJhZA== 56700
+KGVkaXQ= 56701
+VG9MZWZ0 56702
+W10pOwo= 56703
+IGRvR2V0 56704
+dmF0dXJl 56705
+TmVlZGVk 56706
+IENoZW5n 56707
+Y2Np 56708
+RUZJ 56709
+IGZldWQ= 56710
+IGx1bmFy 56711
+LlNoYXBl 56712
+Tm9ib2R5 56713
+X1RSSUdHRVI= 56714
+Q3k= 56715
+Z3JvdW5kQ29sb3I= 56716
+IFJlbW92YWw= 56717
+KGJvdHRvbQ== 56718
+JG1zZw== 56719
+U0NJSQ== 56720
+cml0eg== 56721
+IGZyZW50ZQ== 56722
+IGNvbXBvc3Q= 56723
+YW5zd2VyZWQ= 56724
+IFJvZHI= 56725
+X0hUTUw= 56726
+IHNpbGhvdWV0dGU= 56727
+IFFVRVNU 56728
+IENhdGhlZHJhbA== 56729
+LkNvbW1lbnQ= 56730
+IE1u 56731
+LW5ldHdvcms= 56732
+LmdldEZpbGU= 56733
+LmdlbmVyYXRvcg== 56734
+IENoZWNrb3V0 56735
+X3pvb20= 56736
+IGVuY29kZVVSSUNvbXBvbmVudA== 56737
+X1RD 56738
+c29t 56739
+IFNlcmll 56740
+IGJhc2VVUkw= 56741
+CXJ1bg== 56742
+IGh1aA== 56743
+LnNlbGVjdGVkSW5kZXg= 56744
+IFNUQVI= 56745
+fi1+LQ== 56746
+YWJjZGVmZ2g= 56747
+Lm1hcHBpbmc= 56748
+PWRhdGV0aW1l 56749
+Q29vbA== 56750
+bmlt 56751
+IERpcmVjdGl2ZQ== 56752
+RmVkZXJhbA== 56753
+IG1lbnVJdGVt 56754
+INCQ 56755
+QW5uYQ== 56756
+IFJlY3JlYXRpb24= 56757
+cnlhbg== 56758
+LWFnZWQ= 56759
+emVyYmFp 56760
+4oCm4oCdCgo= 56761
+Y2FtcG8= 56762
+IG1pbmlhdHVyZQ== 56763
+ZGV0YWNo 56764
+bWVhbmluZw== 56765
+X2VtcA== 56766
+UGVhaw== 56767
+IGJjbQ== 56768
+IEh1bmdhcmlhbg== 56769
+IENhc2NhZGU= 56770
+IHNhY2tz 56771
+IHRydW5jYXRl 56772
+IOKWiOKWiA== 56773
+IHdoYWxlcw== 56774
+IHNvcnRhYmxl 56775
+IGFzc2VydHM= 56776
+IHNlYWxz 56777
+b2N5dGVz 56778
+XSkpKQo= 56779
+YWxhcm0= 56780
+cmVzc2luZw== 56781
+KHNpZ25hbA== 56782
+IGVtcGVyb3I= 56783
+CU9O 56784
+Y29tbWl0dGVl 56785
+IHRyaWxvZ3k= 56786
+LlRyYW5zYWN0aW9uYWw= 56787
+R3Jvdw== 56788
+X3VhcnQ= 56789
+IHN3aW5ncw== 56790
+IHNwZWN0YWNsZQ== 56791
+4oCZYXY= 56792
+IFNlbnRpbmVs 56793
+INmE 56794
+IFRvdQ== 56795
+IHdpZG93 56796
+Z2VyYWxk 56797
+LHVpbnQ= 56798
+IHVudXN1YWxseQ== 56799
+PENhcmQ= 56800
+IFJlc3RhcnQ= 56801
+bW9y 56802
+44GC44KK 56803
+aXhlZFJlYWxpdHk= 56804
+IGhhbmRndW4= 56805
+4pSA4pSA4pSA4pSA4pSA4pSA4pSA4pSA 56806
+IGxpdGhpdW0= 56807
+UmVzb2x2ZQ== 56808
+Z2V0Qnl0ZXM= 56809
+L2Z1bmN0aW9ucw== 56810
+IHRhY2tsaW5n 56811
+T3V0bGluZWQ= 56812
+IH08Lw== 56813
+IFNleG8= 56814
+IEFuaw== 56815
+IHJhdGlvbmFsZQ== 56816
+cmVtb3ZlQXR0cg== 56817
+IG11bmljaXBhbGl0eQ== 56818
+IGFzc2F1bHRz 56819
+Q0hPT0w= 56820
+IFJlZQ== 56821
+IGJhdWQ= 56822
+pqw= 56823
+IGVuaGFuY2Vz 56824
+INC/0YDQtdC0 56825
+IGNvbmNlc3M= 56826
+Lmluc3RhZ3JhbQ== 56827
+LmdldFJlc3BvbnNl 56828
+c2VnbWVudHM= 56829
+IHdlbGxiZWluZw== 56830
+fTsKCgoK 56831
+aHVuZw== 56832
+44OG 56833
+IHJlbm92YXRlZA== 56834
+LmV4cGVjdGVk 56835
+IHJhZGlhbA== 56836
+IGNvbW11bmFs 56837
+dXNlck1hbmFnZXI= 56838
+K2E= 56839
+IGZ1bmRhbWVudGFscw== 56840
+LlRI 56841
+6II= 56842
+IHJhbnQ= 56843
+IFN0cmF3 56844
+IE9sZURi 56845
+YXppbw== 56846
+IGhhbWJ1cmc= 56847
+IHBhaW50cw== 56848
+IHRodW1icw== 56849
+IE51bGxQb2ludGVyRXhjZXB0aW9u 56850
+IGdyb3VwZQ== 56851
+IEhvbWVDb21wb25lbnQ= 56852
+IGJhbGxv 56853
+IElOSVRJQUw= 56854
+X2FyZQ== 56855
+IFBlcw== 56856
+dXJzZXM= 56857
+IGJhcmR6bw== 56858
+LmdldExlbmd0aA== 56859
+YW1vdG8= 56860
+Lm5vdGlmeURhdGFTZXRDaGFuZ2Vk 56861
+aWVuZXM= 56862
+ZW56aWU= 56863
+X2VtYg== 56864
+dW1uaQ== 56865
+c21vb3Ro 56866
+IERybw== 56867
+cGFzdGU= 56868
+IE5hcnI= 56869
+LS0tLQoK 56870
+z4k= 56871
+IEF1dG9y 56872
+IG91dHJvcw== 56873
+IExBQkVM 56874
+LnBh 56875
+LlN0dWRlbnQ= 56876
+KFhtbA== 56877
+IGV0aG5pY2l0eQ== 56878
+IEl2eQ== 56879
+44KI 56880
+X2Zha2U= 56881
+Pyg6 56882
+dXBsb2FkZWQ= 56883
+Z2V0TWFuYWdlcg== 56884
+LVFhZWRh 56885
+b2RpYWM= 56886
+Q29ubm9y 56887
+aWhhbg== 56888
+TUFU 56889
+KG1pZA== 56890
+IEFsYmFu 56891
+IHNvaXI= 56892
+Q29tYm8= 56893
+IFB1YmxpY2F0aW9u 56894
+b3BvdWxvcw== 56895
+cGlz 56896
+IHRlbXBsZXM= 56897
+b25neWFuZw== 56898
+X2NsaWVudHM= 56899
+IHJvZHM= 56900
+IHhj 56901
+aWprZW4= 56902
+IHJlYXA= 56903
+IOS4i+WNiA== 56904
+CWNvbm5lY3Q= 56905
+Rm9jdXNlZA== 56906
+LGNvdW50 56907
+aWV0ZXQ= 56908
+IGhhY2lh 56909
+X2FsbG9jYXRvcg== 56910
+IHRveGljaXR5 56911
+KHNlcXVlbmNl 56912
+IG51ZXN0cm9z 56913
+IFByaW5jaXBsZXM= 56914
+IGxsZQ== 56915
+YWxhcmlh 56916
+LndyaXRlU3RyaW5n 56917
+IEFGTA== 56918
+aWZuZGVm 56919
+IERvcw== 56920
+xZtjaWU= 56921
+IEFnZ3JlZ2F0ZQ== 56922
+IHNhY3JpZmljZXM= 56923
+X29mZnNldHM= 56924
+bGRi 56925
+IGxhdGNo 56926
+IGZ1bGxzY3JlZW4= 56927
+bWlzc2l2ZQ== 56928
+T1BUSU9OUw== 56929
+IFRlbGVwaG9uZQ== 56930
+IGFyc2VuYWw= 56931
+amVqZXI= 56932
+IEhvc3A= 56933
+IGZhdm91cml0ZXM= 56934
+cml2ZQ== 56935
+LmluY3JlbWVudA== 56936
+IGJ2 56937
+IEZhbnRhc3RpYw== 56938
+LnNheQ== 56939
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 56940
+IG1lZGljaW5hbA== 56941
+IERST1A= 56942
+IHBpdHk= 56943
+bWV0aXM= 56944
+IHdvbGxlbg== 56945
+IGJlZg== 56946
+X0Js 56947
+ID4+Cgo= 56948
+Ym93ZXI= 56949
+IHN3YXBwZWQ= 56950
+L2luc3RhbGw= 56951
+IHNpbmtz 56952
+ZXRyaXpl 56953
+IGRlY2xpbmVz 56954
+CW15c3Fs 56955
+IENTdHJpbmc= 56956
+IE1vdGlvbkV2ZW50 56957
+Lkxhbmd1YWdl 56958
+Um9hZA== 56959
+0YLQtdGA 56960
+YXNjaW1lbnRv 56961
+JykpLT4= 56962
+LmFib3V0 56963
+KGVkaXRvcg== 56964
+IFJhdGluZ3M= 56965
+aW5jb21l 56966
+xaFl 56967
+LmRlcXVldWVSZXVzYWJsZUNlbGw= 56968
+IEF1c3RyaWFu 56969
+IHN1bGxh 56970
+IFRyaWJ1bmFs 56971
+IERpZG4= 56972
+0L7QstCw0YA= 56973
+IGluc3BlY3Rpb25z 56974
+Qm9zcw== 56975
+IGNvY2t0YWlscw== 56976
+IGFwb2xvZ2l6ZWQ= 56977
+X3N1YnBsb3Q= 56978
+b3BhbA== 56979
+Kz0o 56980
+IHJlc29uYW5jZQ== 56981
+aWJ1 56982
+IOumrA== 56983
+cm9tYQ== 56984
+cmVzZXJ2ZQ== 56985
+cGxz 56986
+IFRhaA== 56987
+YXhpZXM= 56988
+T1BMRQ== 56989
+IERhcnJlbg== 56990
+IFpvbWJpZQ== 56991
+X01hcA== 56992
+IF0pCgo= 56993
+IFFp 56994
+IFNhaWw= 56995
+IHJlc3RyaWN0aXZl 56996
+IGVyb3Npb24= 56997
+LXBhcg== 56998
+V0hJVEU= 56999
+IG9sZHU= 57000
+IGFwZXJ0dXJl 57001
+IGJpdGNvaW5z 57002
+dGV4dG8= 57003
+IENvbWNhc3Q= 57004
+IHRpbWVsZXNz 57005
+ZW5raW5z 57006
+IGZlZWRlcg== 57007
+L3RtcA== 57008
+cmVzZGVu 57009
+Kydf 57010
+LkRlc3Ryb3k= 57011
+IMOnb2s= 57012
+IERPQ1VNRU5U 57013
+LmxuZw== 57014
+LnRhZ05hbWU= 57015
+IGt1bGxhbg== 57016
+ZWdyYXRl 57017
+ICgqLg== 57018
+57yW6L6R 57019
+IGhhbmRzaGFrZQ== 57020
+c29j 57021
+X2dlb21ldHJ5 57022
+IERhbWFzY3Vz 57023
+TWlub3I= 57024
+IEthZmth 57025
+7Jes 57026
+RmxvcmlkYQ== 57027
+X2NvbXB1dGU= 57028
+LmV4cHI= 57029
+IHBhcmFsbGU= 57030
+IERpYXo= 57031
+Y2ly 57032
+W3RhcmdldA== 57033
+IGpva2luZw== 57034
+IGdsb3I= 57035
+KHNldHE= 57036
+X2hhbmRsZXJz 57037
+SGFuZw== 57038
+IGZlcnI= 57039
+cmltaW5hbA== 57040
+CSAgICAJCQ== 57041
+ZW50aWVz 57042
+ZGVmaW5lcw== 57043
+LXRheA== 57044
+anNvbnA= 57045
+IFVQUw== 57046
+bWV0cm8= 57047
+X187Cg== 57048
+IFVnYW5kYQ== 57049
+XSkpOgo= 57050
+X3Rk 57051
+eGFl 57052
+bHc= 57053
+Lk9T 57054
+IExvZ2dlZA== 57055
+YWNpZA== 57056
+IE1heW8= 57057
+YXNwZWN0 57058
+IHZhZ2luYWw= 57059
+IGluaXRpYWxpemluZw== 57060
+IHN0ZXJvaWRz 57061
+ZmljdGlvbg== 57062
+R1JF 57063
+Z2VuZA== 57064
+IGxpYWJpbGl0aWVz 57065
+IExldHM= 57066
+TWVjaA== 57067
+KG5j 57068
+KGNoYW5nZQ== 57069
+IGNvbm5lY3RvcnM= 57070
+Oms= 57071
+IHRhc3Q= 57072
+ISIpOwoK 57073
+dGhpbmdz 57074
+cm9waHk= 57075
+bHVldG9vdGg= 57076
+IFNpZ25VcA== 57077
+LmN0cmw= 57078
+IHRoZXJlaW4= 57079
+b3JkYQ== 57080
+LmVzY2FwZQ== 57081
+aWdhdG9y 57082
+IHBldHJvbA== 57083
+IHNwZWNpbWVu 57084
+IGRlYnV0ZWQ= 57085
+LVBybw== 57086
+IGNyaXNlcw== 57087
+LmFkZFZpZXc= 57088
+64+Z 57089
+LWRvb3I= 57090
+IG1vbmV0 57091
+IG1pbGxpcw== 57092
+IHZpZXI= 57093
+SW50ZXJuYWxFbnVtZXJhdG9y 57094
+IGFkbWlucw== 57095
+IExhaXI= 57096
+emlu 57097
+Z2V0UXVlcnk= 57098
+dW1ibGVz 57099
+TElNSVQ= 57100
+IFZpZw== 57101
+X3Nvbmc= 57102
+PENoYXJhY3Rlcg== 57103
+Ojou 57104
+X2hvbQ== 57105
+X2Jw 57106
+IFN1cGVydmlzb3I= 57107
+c3VibWlzc2lvbg== 57108
+YWJpbGU= 57109
+IG5vaQ== 57110
+T3JDcmVhdGU= 57111
+IHBlZWw= 57112
+IG9uU3RhcnQ= 57113
+IHNlbnRpbWVudHM= 57114
+dmVoaWNsZXM= 57115
+IGNsYXNzcm9vbXM= 57116
+IHN6ZXI= 57117
+IGJlbmRpbmc= 57118
+IGxvbmdldml0eQ== 57119
+IGFjbA== 57120
+IEFsZXBwbw== 57121
+IFVN 57122
+IFJpY2h0 57123
+IG11bHRpcHJvY2Vzc2luZw== 57124
+RE9NQUlO 57125
+IiwiKw== 57126
+X1lFQVI= 57127
+IHNjcmFwZQ== 57128
+IHNvbGl0YXJ5 57129
+ICJdIjsK 57130
+L2Vycm9ycw== 57131
+7J6s 57132
+nOugpQ== 57133
+YmV0dGVy 57134
+CW51bWJlcg== 57135
+IExG 57136
+IEFjcm9zcw== 57137
+UHViTWVk 57138
+XCIi 57139
+IEV4Y2VsbGVuY2U= 57140
+IHVzYW5kbw== 57141
+IFVJUA== 57142
+QWN0aXZpdHlJbmRpY2F0b3I= 57143
+X1ZPSUQ= 57144
+IGJyZWVkcw== 57145
+772l 57146
+dWVzdGFz 57147
+IFRyZWFzdXJl 57148
+dXN0cmFsaWFu 57149
+KGZhY2U= 57150
+IFRlbm5pcw== 57151
+CUludA== 57152
+IEhhbnNlbg== 57153
+57U= 57154
+Okk= 57155
+IOKclA== 57156
+R1JBWQ== 57157
+T1VTRQ== 57158
+IGhlcGF0 57159
+oO0= 57160
+QUlS 57161
+w7PFvA== 57162
+IHF1ZXVlZA== 57163
+dmluY2lh 57164
+IENocm9taXVt 57165
+IGNvbXBldGVuY2U= 57166
+dW5nYWw= 57167
+aWxsaQ== 57168
+IGdldEJ5 57169
+IEZpbmRlcg== 57170
+IGluY2FwYWJsZQ== 57171
+IHNhZGQ= 57172
+IGNpdGVz 57173
+IENodXJjaGlsbA== 57174
+U2Rr 57175
+TW9yZW92ZXI= 57176
+QXNwTmV0 57177
+KEZsb2F0 57178
+JHBhc3N3b3Jk 57179
+IENvbm5vcg== 57180
+LXNlc3Npb24= 57181
+X2Rt 57182
+Kikp 57183
+IGRldXRzY2g= 57184
+IE5Y 57185
+IHBlcmtz 57186
+X1NPUlQ= 57187
+X1RPT0w= 57188
+X1ZJU0lCTEU= 57189
+LmFzcA== 57190
+5oiW 57191
+IEJyZWF0aA== 57192
+RGV0ZWN0 57193
+IER1ZWw= 57194
+LmNtYg== 57195
+W2l0 57196
+LlNldEJvb2w= 57197
+IG5hcmNpc3M= 57198
+IGFiaWRl 57199
+IGVqZW1wbG8= 57200
+IOKElQ== 57201
+IG1vcm5pbmdz 57202
+IGNvbXB1dGVz 57203
+LnNzbA== 57204
+anQ= 57205
+IG11Y2hvcw== 57206
+X1NT 57207
+W2VuZA== 57208
+IGJhc2lu 57209
+IGFsZ3Vub3M= 57210
+IENyb2F0aWE= 57211
+bGluZXdpZHRo 57212
+KHRhZ3M= 57213
+KGhpZGRlbg== 57214
+w61jaW8= 57215
+IGFwYXI= 57216
+INC2 57217
+5LiO 57218
+LmZvb2Q= 57219
+IFJ1cmFs 57220
+IGJyZWFkdGg= 57221
+5b2x 57222
+KHNlc3M= 57223
+KyIp 57224
+IFBhc3Rl 57225
+IHNlcnZpZG9y 57226
+IEJpdFNldA== 57227
+IFRyYW4= 57228
+bGF1cw== 57229
+dmV0dGU= 57230
+ZXllcw== 57231
+IENMSUNL 57232
+IFZJSUk= 57233
+IFR1cm5z 57234
+IExlQnJvbg== 57235
+IE11ag== 57236
+IERlZw== 57237
+IEFkdWx0cw== 57238
+X3N1aXRl 57239
+cHJvY2Vzc2FibGU= 57240
+IFBIWQ== 57241
+Z2hlc3Q= 57242
+LkZhaWw= 57243
+IFNsYWNr 57244
+Y2Vq 57245
+XENhcmJvbg== 57246
+IHN1cGVyc3Rhcg== 57247
+IGhvbGRpbmdz 57248
+KGZvcm1z 57249
+ICcjJw== 57250
+TXVsdGlw 57251
+KCJbJQ== 57252
+LXNvbGlk 57253
+L3VybA== 57254
+LXRpZXI= 57255
+W2xlbmd0aA== 57256
+IFN0cmVhbVdyaXRlcg== 57257
+IE1hcmtldHBsYWNl 57258
+Z2V0dGV4dA== 57259
+X1RJQ0s= 57260
+IEZvcmdl 57261
+IGJsYWNramFjaw== 57262
+IERPRVM= 57263
+IE1hdHRlcnM= 57264
+d2F2ZXM= 57265
+IHdoaXNwZXJlZA== 57266
+IGx1c2g= 57267
+7Jik 57268
+ZGlnaXRhbA== 57269
+IHdyaW5r 57270
+IEhvZ2Fu 57271
+IHJ1c3RpYw== 57272
+LkFwcGx5UmVzb3VyY2Vz 57273
+IEhhcmR5 57274
+b3NvbWVz 57275
+QVVU 57276
+LlNUQVRF 57277
+IG5hcnJhdGl2ZXM= 57278
+CXN0b3Jl 57279
+Ymli 57280
+CVNjYW5uZXI= 57281
+IENvZHk= 57282
+XFJlcG9zaXRvcmllcw== 57283
+IHJldW5pb24= 57284
+YW5kdW0= 57285
+4oCZaA== 57286
+IHNuaWZm 57287
+TlNCdW5kbGU= 57288
+IGNvbXByZWhlbmQ= 57289
+X1VTQUdF 57290
+X29jYw== 57291
+VVJSRU5DWQ== 57292
+Sk5J 57293
+IHNwZWNpYWxpemluZw== 57294
+IHZpc2lvbnM= 57295
+IGRvbG9yZQ== 57296
+IHbDoQ== 57297
+IENoZXZ5 57298
+IFN0eWxlZA== 57299
+aW1wYWN0 57300
+YWxsZW4= 57301
+IGthcnQ= 57302
+IFRhYmxldA== 57303
+c3R1ZmY= 57304
+cmVlc29tZQ== 57305
+0LDRgtC+0YA= 57306
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0K 57307
+X0FkbWlu 57308
+IGNlbGxwaG9uZQ== 57309
+IGF1dG9wbGF5 57310
+IGNhbWJpbw== 57311
+IG1hcml0aW1l 57312
+X0JPT1Q= 57313
+LXF1YXJ0ZXI= 57314
+IGxhdGluYQ== 57315
+IEFKQVg= 57316
+ZXF1aXY= 57317
+IEZyb250aWVy 57318
+IFhZ 57319
+fV0K 57320
+IFJvdWdo 57321
+LnByb3Rv 57322
+IGNvcnJlY3RuZXNz 57323
+IGZhY2ls 57324
+IFJlYWNoZWQ= 57325
+44Gd44Gu 57326
+VklT 57327
+LnBz 57328
+IHN0cm5jcHk= 57329
+IGRpZmZ1c2lvbg== 57330
+LnN0YXJ0QWN0aXZpdHk= 57331
+77+977+977+9 57332
+IGFjY29tcA== 57333
+QU1FU1BBQ0U= 57334
+aW1vbmlhbHM= 57335
+IEJsYXN0 57336
+YWJ5cmlu 57337
+IGRvbWU= 57338
+IGV4dHJhdg== 57339
+IHllbg== 57340
+IGN1bGluYXJ5 57341
+UFJJ 57342
+IENvbW11bml0aWVz 57343
+bmlk 57344
+X29wZXJhdGlvbnM= 57345
+Lmhz 57346
+IE1pbHRvbg== 57347
+IG5vaXNlcw== 57348
+QXV0b3Jlc2l6aW5nTWFzaw== 57349
+KGNpZA== 57350
+fQoKCgoKCg== 57351
+XX0sCg== 57352
+IERldGVjdGlvbg== 57353
+dGFibGE= 57354
+IGxpYmVydGllcw== 57355
+X0RZTkFNSUM= 57356
+d2dldA== 57357
+IFTDvHI= 57358
+IFBhc2NhbA== 57359
+VHJhbnNwYXJlbnQ= 57360
+RGVsYXllZA== 57361
+XSgp 57362
+IEhlcmJlcnQ= 57363
+PEFjdGlvblJlc3VsdA== 57364
+Y2hhbGxlbmdl 57365
+IG11c2hyb29t 57366
+Lmluc2VydEJlZm9yZQ== 57367
+IFJpbg== 57368
+IGh1bW91cg== 57369
+IGbDuA== 57370
+YXBpS2V5 57371
+YWxsb2NhdGVk 57372
+IGNvbmZlc3Npb24= 57373
+LiIsDQo= 57374
+CWFzc2VydFRoYXQ= 57375
+IFNPUlQ= 57376
+IExPUkQ= 57377
+IGV4cG9ydGVy 57378
+LnNldExldmVs 57379
+cG9rZW1vbg== 57380
+YXNodHJh 57381
+IGbDqQ== 57382
+dXJhdG9y 57383
+KE1TRw== 57384
+IHR1cA== 57385
+IEh1bGw= 57386
+IHlpZWxkZWQ= 57387
+LlN1YmplY3Q= 57388
+XFJvdXRl 57389
+IT8= 57390
+INGD0LTQsNC7 57391
+XFNlY3VyaXR5 57392
+LWFy 57393
+IGFsbGVnYXRpb24= 57394
+KFNldHRpbmdz 57395
+w6RuZGVy 57396
+IGVsbGlwc2U= 57397
+IFJldHJvZml0 57398
+IHJlZ3VsYXRpbmc= 57399
+IE1vbGx5 57400
+IExvaw== 57401
+X0N1c3RvbQ== 57402
+IFByb21v 57403
+aXNpbg== 57404
+IHJlc3VtZWQ= 57405
+IG1ldHJvcG9saXRhbg== 57406
+LmVycm9yTWVzc2FnZQ== 57407
+Oi0tLS0tLS0tLS0tLS08Lw== 57408
+Lm1s 57409
+c2NvcGlj 57410
+LnJlZnM= 57411
+YXB0b3Jz 57412
+IEluc3RydW1lbnRz 57413
+IHByb3BhZ2F0ZQ== 57414
+fS0+ 57415
+IHBhc2Fkbw== 57416
+dGhhbms= 57417
+X0RlbGV0ZQ== 57418
+IEJyaWdodG9u 57419
+LHVuc2lnbmVk 57420
+5L2c6ICF 57421
+IGFzcGlyYXRpb25z 57422
+LWhvdw== 57423
+Um9zZQ== 57424
+PSgo 57425
+X25lZWRlZA== 57426
+X3BsdXJhbA== 57427
+PEFwcGxpY2F0aW9u 57428
+IFdFRUs= 57429
+IFVubG9jaw== 57430
+IFRFTVA= 57431
+U291 57432
+IHNjaGl6b3BocmVuaWE= 57433
+IHRyb2xs 57434
+IGNvbXBsZW1lbnRhcnk= 57435
+IE5FVFdPUks= 57436
+IGJsaXI= 57437
+IHByb2dyZXNzRGlhbG9n 57438
+IiUo 57439
+IEF0dHJpYnV0ZVNldA== 57440
+CXRz 57441
+Lml0ZXJpdGVtcw== 57442
+6K+d 57443
+IGVzY3JpdA== 57444
+dm91cw== 57445
+X3BsYWNlcw== 57446
+SEs= 57447
+IHNlZ3Vpcg== 57448
+X2Z3 57449
+IFJvdW5kZWQ= 57450
+IGRpc3Bvc2l0 57451
+6KeG 57452
+cGFybQ== 57453
+d293 57454
+U1RSVUNUSU9O 57455
+LmFsbG93 57456
+IENoYXJTZXF1ZW5jZQ== 57457
+CWV4dGVybg== 57458
+IHByb3NlY3V0ZWQ= 57459
+IG1vcnRhcg== 57460
+IEp1ZGE= 57461
+LW1zZw== 57462
+IGVzdHVk 57463
+LmdldERlc2NyaXB0aW9u 57464
+IHNvdw== 57465
+YW1icmU= 57466
+IHJvbWE= 57467
+RW5o 57468
+Ym9udXM= 57469
+IHNxdWF0 57470
+IGRpc3RyYQ== 57471
+ZWRJbWFnZQ== 57472
+IHBlcHBlcnM= 57473
+LXBlcmZvcm1hbmNl 57474
+LAoKCg== 57475
+LGZpbGU= 57476
+IE1JTUU= 57477
+X2NvbmNhdA== 57478
+QUJT 57479
+LWZhc2hpb24= 57480
+IHVuZGVyY292ZXI= 57481
+T25lVG9NYW55 57482
+IHJlY2xhaW0= 57483
+Q09QWQ== 57484
+IGJpbmRz 57485
+IFRhcGU= 57486
+IGdvc3NpcA== 57487
+IEVxdWl0eQ== 57488
+L0NhcmQ= 57489
+LmFjdGl2 57490
+J2Ft 57491
+IGRyYWluYWdl 57492
+PFNjYWxhcnM= 57493
+IG9uQmluZFZpZXdIb2xkZXI= 57494
+KCk/Lg== 57495
+IHNvcnJvdw== 57496
+IEli 57497
+dXB5 57498
+X1VVSUQ= 57499
+IENoYXJt 57500
+IEVsZWN0aW9ucw== 57501
+Lm9uRGVzdHJveQ== 57502
+IEludGVyZXN0aW5nbHk= 57503
+b3VuZGluZ0JveA== 57504
+X2RldGVjdGlvbg== 57505
+LWhlbGQ= 57506
+X3Vua25vd24= 57507
+IHJlZnJhaW4= 57508
+IG3DqXRvZG8= 57509
+IGVCb29r 57510
+RU5PTUVN 57511
+IGRhbmc= 57512
+UHJvZmVzc2lvbmFs 57513
+IGRpY3Rpb25hcmllcw== 57514
+L215c3Fs 57515
+IFNUVUQ= 57516
+IG1hc3Nl 57517
+c2NhcGU= 57518
+IGRyZWk= 57519
+Om5hbWU= 57520
+LmxvZ28= 57521
+U2lnblVw 57522
+IHRhaHVu 57523
+KHRoZW1l 57524
+IEZlbW1l 57525
+IGJvbWJlcg== 57526
+IEphZGU= 57527
+IFRheQ== 57528
+IHN1Ym1hcmluZQ== 57529
+X2NsYXVzZQ== 57530
+enljaA== 57531
+IHNpbXVsdGFuZW91cw== 57532
+IGNhc29z 57533
+LmJvb2xlYW4= 57534
+KGxocw== 57535
+IGNvbnRpbmVudGFs 57536
+LXNhbGU= 57537
+CWVudg== 57538
+IEN1dGU= 57539
+IEZhY3RvcnlHaXJs 57540
+YWJ1cw== 57541
+L3ZhbHVl 57542
+IGphZHg= 57543
+IHN0ZXJu 57544
+Pj4KCg== 57545
+IHN1cmZhY2Vk 57546
+IOyggOyepQ== 57547
+cGxhdHo= 57548
+CWVtYWls 57549
+Y2VwdG9ycw== 57550
+Ij4o 57551
+IGVwaWxl 57552
+6K+7 57553
+IERlYnQ= 57554
+5ZGK 57555
+Tk9Q 57556
+Imh0dHBz 57557
+Omo= 57558
+Rm9ybUl0ZW0= 57559
+X0xJQ0VOU0U= 57560
+LmdldERvdWJsZQ== 57561
+IEFnZW5kYQ== 57562
+CWZpbmFsbHk= 57563
+KGZpbHRlcnM= 57564
+KGF2 57565
+576O 57566
+QVBFUg== 57567
+IGxhdmE= 57568
+0LXRgNC2 57569
+KSkpKQoK 57570
+IGZhdWx0eQ== 57571
+X25t 57572
+IHRyYXZh 57573
+KEJpdG1hcA== 57574
+IHNwZWVkaW5n 57575
+PicpLg== 57576
+IHNjcmVlbmVk 57577
+X3JvbGw= 57578
+IE1hY0Jvb2s= 57579
+IEFVRA== 57580
+IGRpYWdub3Nl 57581
+LkdlbmVyYXRl 57582
+IF5e 57583
+IHN0cnM= 57584
+W1Rlc3Q= 57585
+IHJhbnNvbQ== 57586
+IERIQ1A= 57587
+ZWxkZW4= 57588
+IGludGVycHJldGF0aW9ucw== 57589
+KCldLg== 57590
+ZmxhdE1hcA== 57591
+IGxpbmVIZWlnaHQ= 57592
+X21vdW50 57593
+IFdpemFyZHM= 57594
+IHNsdXRz 57595
+ZWhsZXI= 57596
+b2RhbA== 57597
+IG1pbGl0aWE= 57598
+5bI= 57599
+ZWFybmVk 57600
+IG1pc2VyeQ== 57601
+aW50dmFs 57602
+ZnVuZA== 57603
+IGhpZGVz 57604
+IGRpYXJy 57605
+IFdlc2xleQ== 57606
+IHhtbQ== 57607
+IHF1ZW0= 57608
+IEFyYWJz 57609
+aWZ0aA== 57610
+YXRlZ29yaXplZA== 57611
+RGlzcG9zYWJsZQ== 57612
+UHVyZQ== 57613
+X05PVElGWQ== 57614
+c25pcHBldA== 57615
+IEdhcnJldHQ= 57616
+LnJ1bm5pbmc= 57617
+LndlaWdodHM= 57618
+ICgtLQ== 57619
+IGludmFyaWFudA== 57620
+5LqL5Lu2 57621
+IEFsbG93ZWQ= 57622
+ZGlycw== 57623
+IHBhc3Npb25z 57624
+IGxhZA== 57625
+IEZsdXNo 57626
+bWVudXM= 57627
+OmJsb2Nr 57628
+IGNvbXByYQ== 57629
+LmNob21w 57630
+YWxsb2NhdG9y 57631
+IGN1cmF0ZWQ= 57632
+IEtub3dpbmc= 57633
+IFBhdHRlcnNvbg== 57634
+IHRlbGFo 57635
+J2V4 57636
+IGRvb21lZA== 57637
+IHBoaWxhbnRo 57638
+b3R0eQ== 57639
+LnN0eWxlcw== 57640
+T3duZWQ= 57641
+IGFsbGVyZ2llcw== 57642
+PXBhcmFtcw== 57643
+b2Nlc2U= 57644
+aXRlbGlzdA== 57645
+IFNlbmRpbmc= 57646
+YmVm 57647
+b3JyYXI= 57648
+IE7Do28= 57649
+IEZhcmdv 57650
+IEx1Yg== 57651
+IENvbWJpbmVk 57652
+X2dpdmVu 57653
+CQkJCQkgICAg 57654
+IHJlY29uY2lsaWF0aW9u 57655
+UGF0dGVybnM= 57656
+YXphcmQ= 57657
+IGJpb21hc3M= 57658
+IEhvdXNlcw== 57659
+cmVzcHVlc3Rh 57660
+Y2Nv 57661
+L3RvcGljcw== 57662
+IFl1aw== 57663
+IHdlYWtlbmVk 57664
+X2NhbGVuZGFy 57665
+IG11bGhlcmVz 57666
+IE1hcmw= 57667
+IHNpbmU= 57668
+IFRpbA== 57669
+IFNvdWxz 57670
+IERldXRzY2hl 57671
+IEZPTExPVw== 57672
+IHBpcGVsaW5lcw== 57673
+IEJldmVybHk= 57674
+X0RJUFNFVFRJTkc= 57675
+IiM= 57676
+IFByb3Rv 57677
+LmJpZw== 57678
+IFNhdmluZ3M= 57679
+IFRhbno= 57680
+anVu 57681
+IEdhbW1h 57682
+IFNhZGQ= 57683
+IGFkdmlzb3Jz 57684
+IHJvYXN0 57685
+IHVudGVycw== 57686
+dWRpZXM= 57687
+X2xvbg== 57688
+LXBvaW50ZXI= 57689
+IEVsZW1lbnRSZWY= 57690
+XEJ1aWxkZXI= 57691
+ZXhhbXBsZUlucHV0 57692
+LndlYmRyaXZlcg== 57693
+ZGF0YVR5cGU= 57694
+IFF1aXRl 57695
+IENlbHRpY3M= 57696
+dWls 57697
+LWRlZmVuc2U= 57698
+YmlzaA== 57699
+IFVJV2luZG93 57700
+IFN1ZGRlbmx5 57701
+LmhvdA== 57702
+LnJlYXNvbg== 57703
+IGfDtnI= 57704
+QU1E 57705
+Lk11bHRp 57706
+YXV0aGVudGljYXRlZA== 57707
+cmVnaW9ucw== 57708
+Oyg= 57709
+0LDRgNCw0Lw= 57710
+IEtpcmJ5 57711
+JHJvdXRl 57712
+UFJFQ0FURUQ= 57713
+IER1cmhhbQ== 57714
+b3dv 57715
+IFBlcmZvcm1z 57716
+IGRpc3JlZ2FyZA== 57717
+bnN0 57718
+IFBvbHM= 57719
+IGdldFA= 57720
+Il06 57721
+LWNvbG9yZWQ= 57722
+KEtleXM= 57723
+IEFsbGVn 57724
+X21vZGlmeQ== 57725
+X2xvYWRpbmc= 57726
+c3RyYWluZWQ= 57727
+IGF0cm9j 57728
+X3Bocg== 57729
+PFNwcml0ZQ== 57730
+IHNhdGlzZmFjdG9yeQ== 57731
+bWFuc2hpcA== 57732
+LnBpcGVsaW5l 57733
+VG9ueQ== 57734
+IHRoaWVm 57735
+cG9sYXRvcg== 57736
+KGxvY2s= 57737
+YnVyc3Q= 57738
+IE9wdGltaXphdGlvbg== 57739
+IHN1cmZpbmc= 57740
+Illlcw== 57741
+IGRlc2NlbmRlZA== 57742
+5pI= 57743
+X0NsZWFy 57744
+IGNyaWVz 57745
+IEZyb3plbg== 57746
+RElSRUNU 57747
+LUNvbg== 57748
+IExlaWNlc3Rlcg== 57749
+5aWz 57750
+T09N 57751
+PWRi 57752
+IGdldE1lc3NhZ2U= 57753
+PFN0dWRlbnQ= 57754
+X2JhdGNoZXM= 57755
+Lk1hc2s= 57756
+X2V0aA== 57757
+XCk= 57758
+IHNvbWE= 57759
+Q2F0Y2g= 57760
+W2No 57761
+T3duZXJz 57762
+aW5kbGU= 57763
+OmF1dG8= 57764
+LnZlcnQ= 57765
+aXZy 57766
+LnNldExvY2F0aW9u 57767
+IGZsdWVudA== 57768
+X0VORElBTg== 57769
+IENhcmxv 57770
+Y2VwdHM= 57771
+YWRkQWN0aW9u 57772
+Lm9hdXRo 57773
+PFVuaXR5RW5naW5l 57774
+cmVlbWVudHM= 57775
+LlNraXA= 57776
+PykKCg== 57777
+LmRlZmF1bHRQcm9wcw== 57778
+IGNhYmU= 57779
+IFNoZW4= 57780
+ZXJvc2lz 57781
+IFByb2ZpdA== 57782
+IHBvaXM= 57783
+X0NSRUFURUQ= 57784
+IHJlbW92ZUZyb20= 57785
+KHdz 57786
+P2FjdGlvbg== 57787
+KEZpZWxk 57788
+IGVycm9uZQ== 57789
+Lm1pbmltdW0= 57790
+IFJldHJpZXZlZA== 57791
+IGRhZG8= 57792
+IFBSSVZBVEU= 57793
+LXNwZWM= 57794
+IGd6aXA= 57795
+cGRhdGE= 57796
+IHBvc1k= 57797
+KGxvdw== 57798
+IHF1YWxxdWVy 57799
+L2Nsb3Vk 57800
+6rKM 57801
+KGNvbW1vbg== 57802
+IEFyYmVpdA== 57803
+b3JnYW5pc2F0aW9u 57804
+IHRpZHk= 57805
+IFJvbGFuZA== 57806
+KHBo 57807
+LnpvbmU= 57808
+IGdlbnRsZW1lbg== 57809
+xrDhu6Nj 57810
+5bGx 57811
+IGVuY2xvc3VyZQ== 57812
+IE1hbmFmb3J0 57813
+CUNvbG9y 57814
+U3RlbmNpbA== 57815
+Tmlj 57816
+IHRoZW9yZW0= 57817
+IFZH 57818
+IGNvbG91cmVk 57819
+VkJveExheW91dA== 57820
+dWxzaXZl 57821
+RHJhZ29u 57822
+Y2Zm 57823
+ZXRlc3Q= 57824
+ZW5zYQ== 57825
+b2ZkYXk= 57826
+LkF6dXJl 57827
+OlVJQ29udHJvbEV2ZW50VG91Y2hVcEluc2lkZQ== 57828
+X3VwZGF0ZXM= 57829
+IHRyZW5keQ== 57830
+dWdhcw== 57831
+d2Vha1NlbGY= 57832
+IHJpZGdl 57833
+aWJyaQ== 57834
+IOy2lA== 57835
+KENH 57836
+IE1vbmtleQ== 57837
+LndyaXRlSW50 57838
+LnRpbWVkZWx0YQ== 57839
+Vmlld0NvbnRyb2xsZXJBbmltYXRlZA== 57840
+IFByb3ZpZGVuY2U= 57841
+44GI 57842
+IGJsZW5kcw== 57843
+L1N1YnRocmVzaG9sZA== 57844
+IEFwcGw= 57845
+IGF0YW4= 57846
+IHJlbG9hZERhdGE= 57847
+dW1ib3Ryb24= 57848
+c3TDvHQ= 57849
+T0F1dGg= 57850
+IEdpdmluZw== 57851
+IOyEpA== 57852
+IEZpbm5pc2g= 57853
+Y2hlY2tpbmc= 57854
+LkVtYmVk 57855
+c2VxdWVsaXpl 57856
+IGluaXRpYWxpemVz 57857
+IE9zbG8= 57858
+2LY= 57859
+Z2V0RXh0ZW5zaW9u 57860
+X0FMVA== 57861
+KGJsYW5r 57862
+IGZhdGFsRXJyb3I= 57863
+IGRlbWlzZQ== 57864
+KioqKioK 57865
+IFhT 57866
+KEFG 57867
+IEVucw== 57868
+YW50aGE= 57869
+IFBPUg== 57870
+IG5pY2g= 57871
+Lk5hbWVk 57872
+IGdpZ2FudGlj 57873
+IE9ic2VydmF0b3J5 57874
+LlJlc29sdmU= 57875
+IFBheW1lbnRz 57876
+Z3VpbGQ= 57877
+IGN1cnJlbnRTdGF0ZQ== 57878
+PT09PT09PT09PT09PT09Cg== 57879
+IFNleQ== 57880
+cERhdGE= 57881
+IGRlYWRsaW5lcw== 57882
+IGNlbnRyYWxpemVk 57883
+IFNjaG9sYXJzaGlw 57884
+X3N1cHBvcnRlZA== 57885
+LmNocm9tZQ== 57886
+KCldKTsK 57887
+IGN5YW4= 57888
+IENhZ2U= 57889
+QXV0aG9ycw== 57890
+Xw0K 57891
+L29z 57892
+a2lt 57893
+ZGVl 57894
+LnRleA== 57895
+IHlvdXJzZWx2ZXM= 57896
+IG1ncg== 57897
+IGFsaw== 57898
+LWluc3RhbGw= 57899
+IGRyYWZ0aW5n 57900
+IHJ1bW9y 57901
+IHN0YXR1ZXM= 57902
+UG9vbGluZw== 57903
+b2xpbmE= 57904
+QUFBQUFBQUE= 57905
+LyotLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t 57906
+IGV4dHJlbWlzdHM= 57907
+Q2FsY3Vs 57908
+aWdodGhvdXNl 57909
+SW5zZXQ= 57910
+KElOUFVU 57911
+IHN5bmNocm9uaXphdGlvbg== 57912
+aXZpcnVz 57913
+LmF4ZXM= 57914
+IEdhcA== 57915
+LUFu 57916
+X1RlbXBsYXRl 57917
+IGdhbWVy 57918
+IENyaWNrZXQ= 57919
+IGxpbnQ= 57920
+IGF1dGhvcml0YXJpYW4= 57921
+TlNVSW50ZWdlcg== 57922
+IHJlZG8= 57923
+IGFkaXBpc2Npbmc= 57924
+X0ZFVENI 57925
+Y2hlaWQ= 57926
+IEZhbmc= 57927
+LmluZGljZXM= 57928
+dG9uZQ== 57929
+0LTQtdC7 57930
+IHt7LS08 57931
+YnJhaGlt 57932
+IHNhbGE= 57933
+Z2V0Q29kZQ== 57934
+IGNvbW11bmljYXRlZA== 57935
+c3RhcnRzV2l0aA== 57936
+ZXJ0eg== 57937
+UmVhZGFibGU= 57938
+SXRlbUlk 57939
+b3JlZmVycmVy 57940
+Y3JlZGlibGU= 57941
+w6FyaWE= 57942
+IGNvbWJpbmVSZWR1Y2Vycw== 57943
+KiovCgo= 57944
+IGJsaXNz 57945
+IGFkb3Ju 57946
+ZGVwZW5kcw== 57947
+IFJPT00= 57948
+IGZyYW1pbmc= 57949
+ID8nLA== 57950
+YXV0eQ== 57951
+X3BvdA== 57952
+X3RhYnM= 57953
+RXhhY3Q= 57954
+LCIs 57955
+ICd9JzsK 57956
+IGFyYml0cg== 57957
+YWhyYWlu 57958
+LmdldFN0cmluZ0V4dHJh 57959
+ICRc 57960
+IG91dHB1dFN0cmVhbQ== 57961
+IGNvbW1lbmM= 57962
+YW51cw== 57963
+Y2h5 57964
+PEVtcGxveWVl 57965
+IGhleGF0cmlnZXNpbWFs 57966
+IG5hY2lvbmFs 57967
+KHNlcmlhbGl6ZXJz 57968
+X3B1dGNoYXI= 57969
+X1NBRkU= 57970
+ZW50aWFsQWN0aW9u 57971
+SXRlbVNlbGVjdGVkTGlzdGVuZXI= 57972
+LkRpc3BhdGNo 57973
+Q29uZmxpY3Q= 57974
+X2Fib3V0 57975
+b3NhdXI= 57976
+Qm91bmRhcnk= 57977
+IGNsZWFyQ29sb3I= 57978
+KExvY2F0aW9u 57979
+IE1PTlRI 57980
+IFRhc3Rl 57981
+LUdlbmVyYWw= 57982
+IFdBUg== 57983
+IGVyaGFsdGVu 57984
+LXNhdmluZw== 57985
+IGNvdXBsaW5n 57986
+LXRyaWdnZXI= 57987
+bW90b3I= 57988
+IHl5eXk= 57989
+IFBhdGVudA== 57990
+cHRv 57991
+IG1pc2RlbWVhbm9y 57992
+dmFzaW9u 57993
+IEFkbWlyYWw= 57994
+4LmJ4Liy 57995
+X1BXUg== 57996
+IGRldmFzdGF0ZWQ= 57997
+Zm9saW9z 57998
+SVRVREU= 57999
+dXJyZWN0 58000
+IHJvYm90aWM= 58001
+IFNhbmN0 58002
+IEhhd2FpaWFu 58003
+LlJvdXRl 58004
+LWNvbmRpdGlvbg== 58005
+IHJr 58006
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioK 58007
+Y3JlYXRlRWxlbWVudA== 58008
+IEtvcA== 58009
+aWduYW50 58010
+LnJvbGxiYWNr 58011
+IHNhbHVk 58012
+Xycs 58013
+IEFOU0k= 58014
+RXhjZXB0 58015
+IERyYXdhYmxl 58016
+LlV0Y05vdw== 58017
+Ijpbewo= 58018
+IGtvbGU= 58019
+THVh 58020
+IEJlbGlldmU= 58021
+Q29tcHV0 58022
+IGhhbGx1Yw== 58023
+IFNpZ25z 58024
+cnN0 58025
+Lmh1 58026
+IEtOT1c= 58027
+V2k= 58028
+IEJyYXNz 58029
+IFJhcw== 58030
+QGhvdG1haWw= 58031
+IHNlZGltZW50 58032
+IGFwaw== 58033
+IOyDgQ== 58034
+X3JlZ2lvbnM= 58035
+IHBvZGl1bQ== 58036
+PEJvb2s= 58037
+0LbQtQ== 58038
+IHNpeHRlZW4= 58039
+IEFsaWFz 58040
+IGluZnJhcmVk 58041
+IFZhbmRlcg== 58042
+IExlYWRpbmc= 58043
+dWNpbmc= 58044
+LDosOg== 58045
+X2hvcg== 58046
+d2F0 58047
+IGTDqWNvdQ== 58048
+X1dpZGdldA== 58049
+U291bmRz 58050
+X25hdmlnYXRpb24= 58051
+IHNjaG5lbGw= 58052
+KGdlbmVyYXRvcg== 58053
+dWNlbmU= 58054
+IHJlbWFrZQ== 58055
+SVB2 58056
+IHLDqWFs 58057
+X0lOQ1JFTUVOVA== 58058
+IGh5cG90aGV0aWNhbA== 58059
+X2FuZw== 58060
+IG9mcw== 58061
+ICEK 58062
+LmNvbXBsZXRlZA== 58063
+R2V0VHlwZQ== 58064
+IGtvbW1lbg== 58065
+w6FsaWRv 58066
+YWRkT24= 58067
+IHrFgg== 58068
+VUxB 58069
+X2luZGljYXRvcg== 58070
+J10KCgo= 58071
+YXBhY2hl 58072
+X1NlbGVjdA== 58073
+IEdyZWVuZQ== 58074
+V2hhdHM= 58075
+X2FuaW0= 58076
+IHJlcGV0aXRpdmU= 58077
+bXVjaA== 58078
+IFRocmVzaG9sZA== 58079
+IGxm 58080
+KENhdGVnb3J5 58081
+Y29uZQ== 58082
+TWl4 58083
+X01FVEFEQVRB 58084
+YXlzaWE= 58085
+TmVpZ2hib3Jz 58086
+CQoJCQo= 58087
+SVBIRVI= 58088
+IEZyYWc= 58089
+IENlbGxz 58090
+IG5hbWVzcGFjZXM= 58091
+KGJhY2s= 58092
+IFJlc3RhdXJhbnRz 58093
+c3Zj 58094
+INC70Lg= 58095
+b3RlY2g= 58096
+LXNs 58097
+pb8= 58098
+IFdU 58099
+IFJlZHVjdGlvbg== 58100
+IGRvdHRlZA== 58101
+CWZvdW5k 58102
+IFRFQU0= 58103
+Qm9ybg== 58104
+IE11c2g= 58105
+IENvbXBhcmFibGU= 58106
+IGhpdGNo 58107
+QVRP 58108
+IG1heEhlaWdodA== 58109
+YmVnaW5UcmFuc2FjdGlvbg== 58110
+w612 58111
+X2Ju 58112
+IGhlcmQ= 58113
+IHJldmVyc2Fs 58114
+IEhvbmQ= 58115
+ZGVsaW1pdGVy 58116
+IGNvbmZ1c2U= 58117
+IGhvcHM= 58118
+IGNlbnRyb2lk 58119
+IGNvdXJ0cm9vbQ== 58120
+LmRlY29yYXRvcnM= 58121
+IG1waQ== 58122
+IEltcHJvdmVk 58123
+SU5ORVI= 58124
+IEJhbmdhbG9yZQ== 58125
+IFRhbWI= 58126
+IGJvYXN0 58127
+KCkpKQ0K 58128
+IGlsbGljaXQ= 58129
+IE1vcm9jY28= 58130
+Z3JlZ2F0b3I= 58131
+X3Jlc3VtZQ== 58132
+IGNyYWNrZG93bg== 58133
+IHBvcnRyYWl0cw== 58134
+L2hpZ2g= 58135
+KFwn 58136
+IGF5dWQ= 58137
+X2ZlZWRiYWNr 58138
+IGNhdGU= 58139
+L2F2YXRhcg== 58140
+IGhlYg== 58141
+UG9pbnRDbG91ZA== 58142
+IOWSjA== 58143
+IDwhWw== 58144
+IGdldFJlc291cmNlcw== 58145
+fTp7 58146
+T3BlcmF0aW5n 58147
+IEZvZw== 58148
+CXRhYg== 58149
+IFJlc2VhcmNoZXJz 58150
+IGZhYnJpY2F0aW9u 58151
+LmRhdGFzZXRz 58152
+IENhbXBv 58153
+IEthdWY= 58154
+IGRsbA== 58155
+bGlndA== 58156
+XSkpOwoK 58157
+c3RlbGxlbg== 58158
+QUNLRVQ= 58159
+bHZs 58160
+IEdsb3J5 58161
+LmRhdGVUaW1l 58162
+IGNvbW11dGU= 58163
+IG9uQ3JlYXRlVmlld0hvbGRlcg== 58164
+IFhFbGVtZW50 58165
+IFRva2Vucw== 58166
+PHRoZWFk 58167
+X3BpY2s= 58168
+7KQ= 58169
+dm9u 58170
+ZGVwYXJ0dXJl 58171
+KHJlbmRlcmVy 58172
+cGhvbmVOdW1iZXI= 58173
+KFBlcnNvbg== 58174
+Z2VuZXM= 58175
+IExhcnM= 58176
+ICl7Cgo= 58177
+IEpzb25SZXN1bHQ= 58178
+IG1ldG9kbw== 58179
+Vk9LRQ== 58180
+LmdldFVzZXJJZA== 58181
+QWNjZWxlcg== 58182
+CXJlcXVpcmVk 58183
+IGNoYW1waW9uc2hpcHM= 58184
+QnVpbGRDb250ZXh0 58185
+L3Rhc2s= 58186
+L3JlbGVhc2Vz 58187
+Q2F0ZWdvcmlh 58188
+X292ZXJsYXk= 58189
+IHNjYXJjZQ== 58190
+X2xpbQ== 58191
+bmdy 58192
+YWhsZW4= 58193
+IEFydGlmaWNpYWw= 58194
+c3ByZWFk 58195
+IGJvd2xpbmc= 58196
+LmFuYWx5c2lz 58197
+U01UUA== 58198
+CXBhc3N3b3Jk 58199
+IGJhdGhz 58200
+XSkpewo= 58201
+Y3VycmVudGx5 58202
+YWNpZW50ZQ== 58203
+X3NlcGFyYXRvcg== 58204
+IGRlYmVy 58205
+IERpc2FibGVk 58206
+acOocmVz 58207
+IOKV 58208
+X3Byb2Nlc3Npbmc= 58209
+IHByb3Rlc3Rpbmc= 58210
+IFJPVA== 58211
+Z3JhYg== 58212
+INC30LDQug== 58213
+IHByb2FjdGl2ZQ== 58214
+d29yZHByZXNz 58215
+IFNldmVy 58216
+aW5kZW4= 58217
+IHdpa2lwZWRpYQ== 58218
+KXsNCg0K 58219
+X3dpbmRvd3M= 58220
+aXNsYXRpb24= 58221
+IHVucmVzdA== 58222
+IGRpc21pc3NhbA== 58223
+Lk5VTQ== 58224
+X0ZBU1Q= 58225
+aXNzdWVk 58226
+IEZBQ0U= 58227
+X3VuZGVy 58228
+IHBsdWdnZWQ= 58229
+IOWw 58230
+IGLEmWR6aWU= 58231
+IElDQw== 58232
+IGNvbWJ1c3Rpb24= 58233
+IGtpc3NlZA== 58234
+IHN0YXJyZWQ= 58235
+IFdhdHRz 58236
+IHNwaWVsZW4= 58237
+LXB1cnBvc2U= 58238
+IEV2YWw= 58239
+YXJnZXM= 58240
+LHJlc3VsdA== 58241
+dGVjaG5vbG9neQ== 58242
+IG5hdGlvbmFsaXR5 58243
+aWN1cw== 58244
+IE51Zw== 58245
+INGC0L4= 58246
+CQkJCQkJCSAg 58247
+Y29sbw== 58248
+IGdhc3Rybw== 58249
+YW50ZWVk 58250
+T0xJRA== 58251
+LmJpYXM= 58252
+X3RlbGU= 58253
+Lmluc3BlY3Q= 58254
+IHZlaWw= 58255
+LmZvb3Rlcg== 58256
+IG5lZ2xpZ2VuY2U= 58257
+IGp1ZGdtZW50cw== 58258
+Um9vbXM= 58259
+eW5u 58260
+CWNvdW50ZXI= 58261
+b2NjdXBhdGlvbg== 58262
+IOeUnw== 58263
+dW5hcw== 58264
+ICheKSg= 58265
+TGFtYmRh 58266
+ZmVs 58267
+LlBhcmFtcw== 58268
+INC00L7QsdCw0LI= 58269
+c2V0TGF5b3V0 58270
+IGRlcG9ydGF0aW9u 58271
+IGxvY2FsT2JqZWN0 58272
+IFBoYXJtYWNldXRpY2Fs 58273
+Y2VwdGl2ZQ== 58274
+IE5vbWU= 58275
+RXF1aXBtZW50 58276
+RmFu 58277
+VW5pdmVyc2Fs 58278
+CXNvY2tldA== 58279
+IGdyaW4= 58280
+IGV4cG9zZXM= 58281
+IGhhYmVy 58282
+IHNpbmNlcmVseQ== 58283
+IGNhbXM= 58284
+IG3DvA== 58285
+ZW5pYQ== 58286
+RW1lcg== 58287
+Q3J5cHRv 58288
+U2xvdw== 58289
+KHhocg== 58290
+IT0o 58291
+LXNlcnZpY2Vz 58292
+IFBX 58293
+IHByZW5kcmU= 58294
+IG3DpGRjaGVu 58295
+ZW1vbnM= 58296
+0L7Qt9Cy0YDQsNGJ 58297
+Lk1hbmFnZXI= 58298
+7Jk= 58299
+IGdyYWY= 58300
+LXJh 58301
+bWV0cmljYWw= 58302
+L2Zs 58303
+IGNlbWV0ZXJ5 58304
+Z2Vucw== 58305
+IHDFmQ== 58306
+IE15U3FsQ29tbWFuZA== 58307
+LVRv 58308
+IHbDpQ== 58309
+IGFpcnN0 58310
+b21lbnR1bQ== 58311
+IHNlcnZv 58312
+bWlsbGlvbg== 58313
+IE1pcmFuZGE= 58314
+IlNoZQ== 58315
+IGFkdm9jYXRpbmc= 58316
+LWNhcHRpb24= 58317
+IEF0dHJpYnV0aW9u 58318
+IHdlbGNoZQ== 58319
+X3ZlbmRvcg== 58320
+CVN0YXR1cw== 58321
+YXJyaXM= 58322
+IHByaW50aw== 58323
+IiwiIw== 58324
+IHJlbGF0aXY= 58325
+aWZmZXJlbmNlcw== 58326
+aXp6ZXM= 58327
+IGRlY2ltYWxz 58328
+IFByb3Y= 58329
+Lm1heGltdW0= 58330
+QXJu 58331
+IGhlbGljb3B0ZXJz 58332
+X0JPVFRPTQ== 58333
+Y2h1cmU= 58334
+b2Rpbmdz 58335
+Jyg= 58336
+IikpKTsNCg== 58337
+KGJlYW4= 58338
+LmZk 58339
+RnVuZA== 58340
+IGhhbmdz 58341
+YXBwaWQ= 58342
+L2tlcm5lbA== 58343
+LnBvaQ== 58344
+Lk1pblZhbHVl 58345
+LXZhbGlkYXRpb24= 58346
+THVrZQ== 58347
+Y2Rm 58348
+IEZ1bmVyYWw= 58349
+IFNhbXBsZXM= 58350
+CWRl 58351
+IHRvYXN0cg== 58352
+IHRheGFibGU= 58353
+IGNsdXN0ZXJpbmc= 58354
+ICdcJw== 58355
+IHJlc3RyYWludA== 58356
+ZWNlZA== 58357
+Y2hhaW5z 58358
+44CC77yI 58359
+X0dSQVBI 58360
+IGZ1ZWxlZA== 58361
+6ZyA 58362
+SHA= 58363
+5aSN 58364
+VGlsZXM= 58365
+IGF1bnF1ZQ== 58366
+SkM= 58367
+IGhvc3RhZ2U= 58368
+IEVzaw== 58369
+IG1hdg== 58370
+IGdlc3Rpb24= 58371
+IGJhbm5lcnM= 58372
+fXsk 58373
+LmludFZhbHVl 58374
+LiciCgo= 58375
+X01BVFJJWA== 58376
+IGNlYXNlZA== 58377
+IEdPRA== 58378
+X0NBTUVSQQ== 58379
+LkFsbG93VXNlcg== 58380
+dHJhY2tlZA== 58381
+Q29vaw== 58382
+YmFpcnJv 58383
+KGNvbXBhbnk= 58384
+IHZpZXdwb2ludA== 58385
+LmdldFdyaXRlcg== 58386
+IE5ldHM= 58387
+d2l2ZXM= 58388
+ICgpKQo= 58389
+ZXhhbXBsZU1vZGFs 58390
+CWNoaWxk 58391
+IG15dGhvbG9neQ== 58392
+IC8vIg== 58393
+X2F4ZXM= 58394
+aWJvbGQ= 58395
+LkRhcms= 58396
+IE1heHdlbGw= 58397
+IGdwb2ludGVy 58398
+b2xpY2l0dWQ= 58399
+QmF0 58400
+dWxuZXI= 58401
+YmFsYW5jZWQ= 58402
+bWFpbGVy 58403
+IGNvbnRlbXBvcg== 58404
+5omL5py6 58405
+KCJfXw== 58406
+ICIpIg== 58407
+cmVhcg== 58408
+IEh1YW5n 58409
+XScpCg== 58410
+16k= 58411
+RlRB 58412
+IENhbGxpbmdDb252ZW50aW9u 58413
+IE91dHB1dHM= 58414
+UGs= 58415
+LlJlZmVyZW5jZQ== 58416
+bGVjdHVhbA== 58417
+ICk6Cgo= 58418
+IGJyYWNlbGV0 58419
+dWdlcg== 58420
+CUVycm9y 58421
+U3dlZXQ= 58422
+KCIvIik7Cg== 58423
+aHg= 58424
+IHVucmVhc29uYWJsZQ== 58425
+SW50ZXJwcmV0ZXI= 58426
+IGxvZnQ= 58427
+X3Byb2R1Y3Rv 58428
+IHNvY2lldGFs 58429
+LlBhcnNlcg== 58430
+IEFkYXB0 58431
+LmZvbw== 58432
+KHdoZXJl 58433
+LkZlYXR1cmU= 58434
+IFlhbWFoYQ== 58435
+Z2xhc3M= 58436
+Rm9yZ2U= 58437
+IHByb2hpYml0cw== 58438
+IGNhcGFjaXRpZXM= 58439
+IO2VqOyImA== 58440
+IHBlcm11dGF0aW9u 58441
+IGlobQ== 58442
+Rmxk 58443
+ZWxpYWw= 58444
+PT09PT09PT09PT0K 58445
+QENvbmZpZ3VyYXRpb24= 58446
+IGdlYXJlZA== 58447
+aW9zbw== 58448
+aWVzdGE= 58449
+dHJhbnNsYXRpb25z 58450
+SW5wdXRDaGFuZ2U= 58451
+UG9wdWxhcg== 58452
+IFBMVVM= 58453
+IHZm 58454
+X0ZyZWU= 58455
+YmJveA== 58456
+IGNhdXNhbA== 58457
+UElMRQ== 58458
+IHNjaMO2 58459
+IGlyb25pYw== 58460
+TWly 58461
+LkA= 58462
+5Y2X 58463
+IOiH 58464
+UmV3 58465
+dWxlbmNl 58466
+Zmxlbg== 58467
+IGNhbkFjdGl2YXRl 58468
+LXJlc3BvbnNl 58469
+IGFjY2VudHM= 58470
+aWdub3JlZA== 58471
+wrBG 58472
+LkRlcGVuZGVuY3lJbmplY3Rpb24= 58473
+CXBvaW50 58474
+IGNvbnRpbmdlbnQ= 58475
+IHNxdWFzaA== 58476
+IHBhcm1z 58477
+IENlbWV0ZXJ5 58478
+IGRlbHRhVGltZQ== 58479
+IERPUw== 58480
+IHZhbmlzaGVk 58481
+0LDRgNCw0LzQtdGC 58482
+IERQUw== 58483
+dGZvb3Q= 58484
+IFp1cw== 58485
+X0lOU1RBTEw= 58486
+R0FO 58487
+IGFyYg== 58488
+IG11bmljaXBhbGl0aWVz 58489
+SW50b0NvbnN0cmFpbnRz 58490
+QXV0b3Jlc2l6aW5nTWFza0ludG9Db25zdHJhaW50cw== 58491
+LGltYWdl 58492
+X2lnbm9yZQ== 58493
+IGRhbmdlcm91c2x5 58494
+cXVpc2E= 58495
+cGx1Y2s= 58496
+IGhhcnVz 58497
+dXBwZQ== 58498
+SHR0cEV4Y2VwdGlvbg== 58499
+QnJhY2tldA== 58500
+LicnCgo= 58501
+IFRvbA== 58502
+IFZpZXdlcg== 58503
+emJvbGxhaA== 58504
+LkNvZGVBbmFseXNpcw== 58505
+w6xuaA== 58506
+IGNvcnJlY3RhbWVudGU= 58507
+LmRh 58508
+IEFsZ2Vy 58509
+15A= 58510
+YmF1bQ== 58511
+IFBhbnRoZXI= 58512
+cGFydGljaXBhbnQ= 58513
+5b+F 58514
+LXN1cA== 58515
+IGVtdWxhdG9y 58516
+IGZhZGluZw== 58517
+IFdvbHZlcg== 58518
+Y3JlYXRlcw== 58519
+IGJvb2tpbmdz 58520
+LlF1ZXN0aW9u 58521
+p+ihjA== 58522
+IHN0cmVzc2Vz 58523
+IHJld3JpdHRlbg== 58524
+LlBJUEU= 58525
+ZWRlcw== 58526
+IGNiZA== 58527
+IjoiLw== 58528
+IGVuaGFuY2VtZW50cw== 58529
+X3N5 58530
+QklO 58531
+IFNsaXA= 58532
+SW5zcGVjdA== 58533
+IFdlZw== 58534
+IGNvbmdyZWdhdGlvbg== 58535
+IF86 58536
+X3Jt 58537
+RnJhbWVidWZmZXI= 58538
+ICcmIw== 58539
+IEZhbGxvdXQ= 58540
+SXNSZXF1aXJlZA== 58541
+IFBlYXJzb24= 58542
+IEZBQ1Q= 58543
+IHJlbGll 58544
+CWJveA== 58545
+IFNoZXBoZXJk 58546
+IFdpa2lMZWFrcw== 58547
+IENvbGxlY3Rvcg== 58548
+IHJlc2l6ZWQ= 58549
+bWV0aG9kTmFtZQ== 58550
+IGV2ZW50VHlwZQ== 58551
+IEF0aGVu 58552
+RGVzY3JpcHRvcnM= 58553
+IGJlcnM= 58554
+LW9wZXI= 58555
+IEluaXRpYWxseQ== 58556
+5aE= 58557
+X0JUTg== 58558
+ICAgICAgICAgDQo= 58559
+w6Fi 58560
+X2NhbXBhaWdu 58561
+X3dhdGNo 58562
+Rm9yZA== 58563
+LWRhdGVwaWNrZXI= 58564
+IHZpc2M= 58565
+IHNhdHU= 58566
+X3Ntcw== 58567
+IGNvbnRhZG9y 58568
+LXN2Zw== 58569
+IERPSQ== 58570
+JGFyZ3M= 58571
+IGtub2I= 58572
+LkJPTEQ= 58573
+IGRlYmF0ZWQ= 58574
+aW1ncw== 58575
+c29ja29wdA== 58576
+dHJ1dGg= 58577
+IEZlZXM= 58578
+IGhXbmQ= 58579
+X2Zvb2Q= 58580
+IGFicmFz 58581
+IG5vdGlvbnM= 58582
+IFRvZA== 58583
+OmNyZWF0ZQ== 58584
+IENvbmZsaWN0 58585
+VXN1YXJpb3M= 58586
+T1RPUw== 58587
+IG1zbQ== 58588
+S0hUTUw= 58589
+KFso 58590
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 58591
+IH1d 58592
+d2l6YXJk 58593
+IG1pZW50cmFz 58594
+IGRhdGFMaXN0 58595
+IGVtZXJnZXM= 58596
+xINuZw== 58597
+LlJlYWRJbnQ= 58598
+UEdB 58599
+SUxMSVNF 58600
+SUVudW1lcmF0b3I= 58601
+KHR1cGxl 58602
+Q2hyaXN0bWFz 58603
+TG9va0FuZEZlZWw= 58604
+b2dlbmVyYXRlZA== 58605
+ICMKCg== 58606
+Y29udHJvbGxlZA== 58607
+IGV4cXVpc2l0ZQ== 58608
+IGFjZXN0 58609
+UmVhZFdyaXRl 58610
+R2Fpbg== 58611
+44CN44CM 58612
+IGNvcHlyaWdodGVk 58613
+IGRvb20= 58614
+LlRhYmxlTGF5b3V0UGFuZWw= 58615
+IERvcnQ= 58616
+IGNoaWxp 58617
+IHdlcms= 58618
+IEVWRU5UUw== 58619
+IEJlYWNvbg== 58620
+IHNoaXBtZW50cw== 58621
+IHNlYmFnYWk= 58622
+dXBvbg== 58623
+dXRvbQ== 58624
+LmNvbnZlcnRlcg== 58625
+LkRyb3BUYWJsZQ== 58626
+PXt9Cg== 58627
+Zmlj 58628
+fgoK 58629
+IGxlc2JpYW5z 58630
+X25h 58631
+Rm9yZWlnbg== 58632
+CXRoZW4= 58633
+L21z 58634
+IG9yaQ== 58635
+Z2V0UHJvcGVydHk= 58636
+CXNucHJpbnRm 58637
+aGVzaW9u 58638
+44Gk 58639
+In0sIg== 58640
+IGFjcnlsaWM= 58641
+UGVycw== 58642
+QEVuYWJsZQ== 58643
+SXNs 58644
+KENhcmQ= 58645
+LlN0YWNr 58646
+TGljZW5zZWQ= 58647
+X0dVSUQ= 58648
+OnRpdGxl 58649
+IGh1c3Q= 58650
+IHByaW5jaXBhbFRhYmxl 58651
+YW5pdGl6ZQ== 58652
+L2VtYmVk 58653
+IGVuc3VyZWQ= 58654
+IEVHTA== 58655
+2YjYsQ== 58656
+IOWIhg== 58657
+LywK 58658
+IGZ1bmRyYWlzZXI= 58659
+S2V5TmFtZQ== 58660
+IG1hcmNoZWQ= 58661
+X1ZBTFVFUw== 58662
+IFNjZW5hcmlv 58663
+IG1ldGlj 58664
+X2Fzc29jaQ== 58665
+IFBhc3Rvcg== 58666
+CQkJCQkJCQkJCQkJCQkJCQkJ 58667
+ZXJhdGU= 58668
+IGludml0YXRpb25z 58669
+cXVvaXNl 58670
+IGJsYW1pbmc= 58671
+IGRhcmluZw== 58672
+VU1NWQ== 58673
+IHJpY2hlcg== 58674
+ZW1ha2Vy 58675
+IElkZW50aWZpY2F0aW9u 58676
+IOyduA== 58677
+IEJpbmRpbmdGbGFncw== 58678
+Y2hhcw== 58679
+IHJlc2lsaWVudA== 58680
+X3Bn 58681
+IHJlbGVn 58682
+IElSQQ== 58683
+U1RF 58684
+IHRyYWN0b3I= 58685
+LWxvYWRpbmc= 58686
+IFByZXZpb3VzbHk= 58687
+IFZhY2M= 58688
+L2Jl 58689
+IG7DpXI= 58690
+IHVybGVuY29kZQ== 58691
+IE5vcmZvbGs= 58692
+LlJlbGVhc2U= 58693
+IE5ldXRyYWw= 58694
+5Lit5Zu9 58695
+IEFybGluZ3Rvbg== 58696
+IGFsbGVnZXM= 58697
+IFdyaXRlcnM= 58698
+VGVzdGVy 58699
+IFJhbGx5 58700
+IGPDoQ== 58701
+CVByaW50 58702
+IOKHkg== 58703
+IFVzZXJDb250cm9sbGVy 58704
+IFNlZWtpbmc= 58705
+LlZBTA== 58706
+TGlzdE5vZGU= 58707
+X2Zm 58708
+IFBoaWxsaXA= 58709
+RkFDVA== 58710
+IGNhcmFtZWw= 58711
+IE11bHRpcA== 58712
+IENvbXBhcmVk 58713
+IFNlcmJpYQ== 58714
+n7M= 58715
+IHJldml2ZQ== 58716
+IEthbnll 58717
+IHZlcmdl 58718
+IEJ1bGdhcmlh 58719
+Z2V0Qm9keQ== 58720
+IHw+ 58721
+Y2VwaA== 58722
+LkRhdGVUaW1lUGlja2Vy 58723
+LiI7Cgo= 58724
+IFRpZQ== 58725
+LGl0ZW0= 58726
+IG1lbm4= 58727
+R2Fz 58728
+b2NoYQ== 58729
+X3ZpcnR1YWw= 58730
+IG1hc3RlcnBpZWNl 58731
+X3NlcXVlbmNlcw== 58732
+TFRF 58733
+IFN1Ym1pc3Npb24= 58734
+Q2FsbGVy 58735
+JFw= 58736
+U3BvcnQ= 58737
+YWd1cw== 58738
+Q29uc3RyYWludE1ha2Vy 58739
+IGNvbG9j 58740
+IHdpZw== 58741
+INCj 58742
+CUFycmF5 58743
+TG9va3M= 58744
+IEdUQQ== 58745
+LnN0ZXBz 58746
+YXRjaGV3YW4= 58747
+X3Jhbmdlcw== 58748
+ZXh0QWxpZ25tZW50 58749
+IEJyZW5uYW4= 58750
+IGFic3RyYWN0aW9u 58751
+dWxlckFuZ2xlcw== 58752
+Lm1pc2M= 58753
+IGFudGlib2RpZXM= 58754
+IGV4cG9uZW50aWFs 58755
+IENIQU5ORUw= 58756
+ZXhwZW5zZQ== 58757
+J3k= 58758
+IGRldGVjdGl2ZXM= 58759
+IHB1cnBvcnRlZA== 58760
+WVNURU0= 58761
+IHJhZGlvYWN0aXZl 58762
+IExhdGluYQ== 58763
+LkVuY29kaW5n 58764
+LlRBRw== 58765
+eGlu 58766
+RGVncmVl 58767
+dXJhY2lvbg== 58768
+cHJpY2Vz 58769
+IFJlZmVyZW50aWFsQWN0aW9u 58770
+IHJhcml0eQ== 58771
+IHBpbGVz 58772
+Z2VuZGU= 58773
+X3Byb2plY3Rz 58774
+X2dsb2JhbHM= 58775
+LnN0YXJ0VGltZQ== 58776
+IOq1rA== 58777
+U0VDVElPTg== 58778
+X3B1Ymxpc2g= 58779
+RmF1bHQ= 58780
+RERM 58781
+X3ByaW9y 58782
+TW9t 58783
+IHRoaWNrZXI= 58784
+IHNlcXVlbGl6ZQ== 58785
+IGVzc2VudGlhbHM= 58786
+c3RyYXM= 58787
+aW50cg== 58788
+PigoKQ== 58789
+Lm1hbmFnZW1lbnQ= 58790
+ZWls 58791
+6Zet 58792
+QXdhcmU= 58793
+LkNpdHk= 58794
+IEFyYml0 58795
+X0RN 58796
+X2tleWJvYXJk 58797
+TE9iamVjdA== 58798
+LXdlYnBhY2s= 58799
+IE5ld3BvcnQ= 58800
+IHByaW5jaXBhbENvbHVtbg== 58801
+bGVnYW50 58802
+IHBhbGxldA== 58803
+IGZyYWN0dXJl 58804
+IGdtYWls 58805
+Lk1ldGE= 58806
+QWJvdmU= 58807
+LktleUV2ZW50 58808
+aml0 58809
+X21hY3Jv 58810
+X1BVU0g= 58811
+4bup 58812
+L2NvbnRyb2xsZXI= 58813
+5Yqg6L29 58814
+IHN1cGVyZmljaWFs 58815
+ZXh0ZXJpdHk= 58816
+IG1lbnNhZ2Vt 58817
+V2luZA== 58818
+aXN0b24= 58819
+Lm9wZW5hcGk= 58820
+0LjRgNC+0LI= 58821
+IFNlcmlhbGl6ZXI= 58822
+dWN0aXZl 58823
+IHphcg== 58824
+UGxhY2Vz 58825
+LlN0YXRpYw== 58826
+QmE= 58827
+IGluYWR2ZXJ0 58828
+IEluZG9uZXNpYW4= 58829
+X0lQVg== 58830
+KGhvcml6b250YWw= 58831
+IGdldFRpdGxl 58832
+aWRlcHJlc3M= 58833
+IENvbnNvbGVDb2xvcg== 58834
+aXBlcnM= 58835
+JG91dA== 58836
+IGZlc3RpdmU= 58837
+IGV2ZW5pbmdz 58838
+LkdldERhdGE= 58839
+dWl0a2E= 58840
+IE1hbnVhbHM= 58841
+dXNzZWQ= 58842
+X01heA== 58843
+LkNoYXQ= 58844
+IEFpcmNyYWZ0 58845
+PWNvbQ== 58846
+Rk9VTkQ= 58847
+YXBybw== 58848
+IHRyZWFzdXJlcw== 58849
+X2FsaXZl 58850
+IGdhZGdldA== 58851
+ZWtpbmc= 58852
+QnV0dG9uRG93bg== 58853
+QnJvd3NhYmxl 58854
+LlBFUk1JU1NJT04= 58855
+UEFTU1dPUkQ= 58856
+IEhBU0g= 58857
+ZsOp 58858
+XFRlc3RDYXNl 58859
+TE9TUw== 58860
+b3RoZXJz 58861
+LEo= 58862
+IGFzc2hvbGU= 58863
+d2Vyaw== 58864
+IG3Dow== 58865
+Lmll 58866
+ZXZpbA== 58867
+a29udGFrdGU= 58868
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8K 58869
+PXN5cw== 58870
+CWxvY2s= 58871
+LS07Cgo= 58872
+X0ZVTg== 58873
+RmlsbENvbG9y 58874
+w7Nh 58875
+cHJlbmQ= 58876
+IGNvbXByZXNzb3I= 58877
+TW90aGVy 58878
+IEFyY2hlcg== 58879
+LmdvdG8= 58880
+IHfDvHJkZQ== 58881
+IGJhbWJvbw== 58882
+77yO 58883
+IFRyZWVz 58884
+IGJ1bXBlcg== 58885
+IHNhdXNhZ2U= 58886
+IEVsYXN0aWNzZWFyY2g= 58887
+IGhvcml6b250YWxseQ== 58888
+IEd1bA== 58889
+SW1tdXRhYmxl 58890
+IGxvc2Vy 58891
+IGFib3J0ZWQ= 58892
+LWRlbW8= 58893
+IEhhdGNo 58894
+IHVuZGU= 58895
+IHByb2Nlc3Nv 58896
+LWNhbGw= 58897
+SW5jb21l 58898
+5YM= 58899
+X3JldHVybnM= 58900
+J10uIic= 58901
+KHN3 58902
+Q0JT 58903
+YW1pbGllcw== 58904
+IFlvdXJzZWxm 58905
+IEhvbHQ= 58906
+Lk1PTg== 58907
+4KeH 58908
+0YjQtQ== 58909
+YW5vbg== 58910
+IEZvbnRBd2Vzb21l 58911
+cHJvZHVjZXI= 58912
+anI= 58913
+IG1hdQ== 58914
+CWludGVy 58915
+IGRpc2hvbmVzdA== 58916
+IG1hZ25h 58917
+IENvbGxlY3RpdmU= 58918
+IHZyYWltZW50 58919
+IGNob2l4 58920
+c3RheQ== 58921
+IHdlbGRpbmc= 58922
+cmlzaW5n 58923
+LG1pbg== 58924
+IEZhdGU= 58925
+Z2xvYg== 58926
+UkdCQQ== 58927
+IGRldHRl 58928
+VmVu 58929
+IGVtYmFycmFzc21lbnQ= 58930
+LkRFTEVURQ== 58931
+Z3JlZ2Fy 58932
+LXJlbmRlcg== 58933
+KGJ1Y2tldA== 58934
+Ij4KCgo= 58935
+LndhaXRLZXk= 58936
+QnVzeQ== 58937
+IGRpZmZlcmVudGlhdGlvbg== 58938
+IENTVA== 58939
+LkNvbnN0YW50 58940
+IGxpbmVOdW1iZXI= 58941
+KG1hdGNoZXM= 58942
+IHdlYnNvY2tldA== 58943
+IGJhcnJlZA== 58944
+IHB1ZWRlcw== 58945
+TW9ubw== 58946
+Q09SRQ== 58947
+SUlE 58948
+ICAgIA0KDQo= 58949
+IHDDumJsaWNv 58950
+bGVhbmluZw== 58951
+IGNsZWFuc2luZw== 58952
+IGNyaXM= 58953
+IERldmlscw== 58954
+X1NFVFRJTkc= 58955
+dW50YXJ5 58956
+Lik7Cg== 58957
+CiAgIAo= 58958
+W2N1cnI= 58959
+dHN5 58960
+IEFsZXhpcw== 58961
+cml0ZWw= 58962
+IHBldHJvbGV1bQ== 58963
+LnByZXByb2Nlc3Npbmc= 58964
+bWF0dGVy 58965
+Rm9yUmVzdWx0 58966
+LWxpY2Vuc2U= 58967
+IHRyYXZlbGxlcnM= 58968
+IERpc3BhdGNoZXI= 58969
+ZW5uaWZlcg== 58970
+IGRpZ2VzdGl2ZQ== 58971
+UEVE 58972
+aGliaXRpb24= 58973
+TUFTQ29uc3RyYWludE1ha2Vy 58974
+IFdhdHQ= 58975
+QmVuZWY= 58976
+LnNldFZpZXc= 58977
+ZHRv 58978
+VEVF 58979
+IFBlbG9zaQ== 58980
+X0VYVFJB 58981
+IG1lZGFscw== 58982
+eGhy 58983
+Zm9yZWNhc3Q= 58984
+IG5hcmdpbg== 58985
+b3Vucw== 58986
+LWZpbGw= 58987
+X0NVUlNPUg== 58988
+IHN1cGVydmlzZWQ= 58989
+IHR1cmY= 58990
+IEVkZ2Fy 58991
+UE9TSVRJT04= 58992
+IGNhdGVnb3J5SWQ= 58993
+4ok= 58994
+X0VS 58995
+4bunYQ== 58996
+U2hvd24= 58997
+Lmxs 58998
+X1BPTElDWQ== 58999
+KCksJw== 59000
+IFByZXY= 59001
+IFN0cmluZ0ZpZWxk 59002
+CUdsb2JhbA== 59003
+YXNzZWQ= 59004
+VGhyb3VnaG91dA== 59005
+b3N0cmluZ3N0cmVhbQ== 59006
+LmF3dGV4dHJh 59007
+IHNsb3Blcw== 59008
+IFNlcXVlbnRpYWw= 59009
+IGdpb3Ju 59010
+IHplbGY= 59011
+IHZlcnNhdGlsaXR5 59012
+bGVuZWNr 59013
+LmNnaQ== 59014
+IGRvdWJsaW5n 59015
+IEJhbmdrb2s= 59016
+IGJ1dXJ0 59017
+IHVzdcOhcmlv 59018
+c3R1ZGlv 59019
+IGpldW5lcw== 59020
+IG11dGVk 59021
+IGlwcw== 59022
+X2ZyYWN0aW9u 59023
+JiYo 59024
+IHN0dW50 59025
+Jyk7Pz48Lw== 59026
+IExpZ2E= 59027
+IHF1YWxpdMOp 59028
+QXNzaWduYWJsZQ== 59029
+IHdvcmthcm91bmQ= 59030
+IHNwdXI= 59031
+IHNsZXc= 59032
+X0dF 59033
+IEFncmljdWx0dXJhbA== 59034
+IHJlbGVudGxlc3M= 59035
+KFF1ZXJ5 59036
+IFNlY3Rpb25z 59037
+IHJldmlld2Vycw== 59038
+UmFpbg== 59039
+ZGxn 59040
+YXNzZXJ0RmFsc2U= 59041
+IG5vbWluZWVz 59042
+X18pLg== 59043
+LmR5bmFtaWM= 59044
+IFBCUw== 59045
+Q2hhbmdpbmc= 59046
+IHNsaWdodGVzdA== 59047
+IE1hbmc= 59048
+fT4NCg== 59049
+IGV2YXBvcg== 59050
+YmFibGU= 59051
+IFBSSUNF 59052
+IOaz 59053
+bHVjZW50 59054
+IHZhbXA= 59055
+IFRlY2huaWNpYW4= 59056
+IHVuaXF1ZW5lc3M= 59057
+TWVz 59058
+dXJiYW4= 59059
+LnBhcmFtZXRyaXpl 59060
+IFJlcGxheQ== 59061
+U2Vzc2lvbnM= 59062
+ZW1icg== 59063
+LUFtZXJpY2Fucw== 59064
+X1BST1hZ 59065
+IHBpYW4= 59066
+IHRyaWU= 59067
+IERlc3RydWN0b3I= 59068
+R2FtZVN0YXRl 59069
+IElNRg== 59070
+Y2hpbg== 59071
+IHBvcnRl 59072
+IFN3YWw= 59073
+5Z+O 59074
+U3Vic3RyaW5n 59075
+aW1pbmc= 59076
+L0xpYnJhcnk= 59077
+IGZyaWdodGVuZWQ= 59078
+d3JpdGVz 59079
+IHJlY3Vyc29z 59080
+YXJSZXN1bHQ= 59081
+X0lOSVRJQUxJWg== 59082
+IEJhZGdl 59083
+X2NyYw== 59084
+RWlnaHQ= 59085
+IERJU1RJTkNU 59086
+IHRocm8= 59087
+QFhtbA== 59088
+IExlZ2VuZGFyeQ== 59089
+LXR3aXR0ZXI= 59090
+X2Vhc3k= 59091
+ICsrKw== 59092
+KERBVEE= 59093
+LkxvY2FsZQ== 59094
+IGvDpA== 59095
+IG51cnQ= 59096
+IGNydWlz 59097
+X2lvcw== 59098
+IHNlbnNpbmc= 59099
+X0xpbmU= 59100
+CiAgICAgICAgICAgICAgICAgICAgCg== 59101
+cG9uZw== 59102
+b2xlb24= 59103
+IHdpbGRjYXJk 59104
+55So5oi35ZCN 59105
+IGJlZ2dpbmc= 59106
+Um9k 59107
+IMOO 59108
+X0NFTEw= 59109
+UmVzZWFyY2hlcnM= 59110
+LnNlbGVjdG9y 59111
+X2luZw== 59112
+IGFzcGlyaW5n 59113
+IGltbW9ydGFs 59114
+IHltaW4= 59115
+X3JvYm90 59116
+IHBsdXI= 59117
+QlRD 59118
+IERJRA== 59119
+IHBpZXJjaW5n 59120
+KnU= 59121
+X0RFRklORUQ= 59122
+IFRoaQ== 59123
+aXRhaXJl 59124
+KG1lZGlh 59125
+LW9ucw== 59126
+IGNoZWZz 59127
+ICIqLg== 59128
+L0FQ 59129
+IHJhem9y 59130
+IHNlYXJjaERhdGE= 59131
+ID0m 59132
+IOOAgg== 59133
+IG1vdXJu 59134
+dGluZ2hhbQ== 59135
+IG9saQ== 59136
+IFZlcm5vbg== 59137
+X1JT 59138
+nuaApw== 59139
+IGbDoWNpbA== 59140
+YW5nZW4= 59141
+Y2VsYWlu 59142
+IGFpbA== 59143
+bGVzdA== 59144
+IFFDT01QQVJF 59145
+Z2Fpbg== 59146
+IM61 59147
+IEtvYg== 59148
+IEZhdWx0 59149
+X2NvbmZpZ3M= 59150
+57uT5p6c 59151
+Lis= 59152
+Y2FsYXI= 59153
+KGNvbG9ycw== 59154
+TXVs 59155
+X0FSVA== 59156
+IGV4cGVyaW1lbnRpbmc= 59157
+ZXJtZW4= 59158
+IEFuZ2xv 59159
+LkZpeGVkU2luZ2xl 59160
+U2Vh 59161
+IGN0eHQ= 59162
+LnNsaWRlcg== 59163
+Q29sbGFwc2U= 59164
+R3JleQ== 59165
+IGZsZA== 59166
+LXByb29m 59167
+LmNhcGFjaXR5 59168
+Z2V0UGFyZW50 59169
+IENvbXBsaWFuY2U= 59170
+IGJ1cmds 59171
+LXJlYw== 59172
+IG92ZXJ3cml0dGVu 59173
+TVU= 59174
+IHJvdXRlcnM= 59175
+CU1vZGVs 59176
+IGZhbnRhc2llcw== 59177
+YXZpYW4= 59178
+X3ByZWM= 59179
+IFNjYW5kaW4= 59180
+IC8vPA== 59181
+L29jdA== 59182
+IGNlcmVtb25pZXM= 59183
+TW9udGhz 59184
+dW5keQ== 59185
+IHF1ZWQ= 59186
+IE5vdQ== 59187
+IFZpYnI= 59188
+LnJnYg== 59189
+IGNpdHJ1cw== 59190
+IGJyYWNlcw== 59191
+LXVwcGVyY2FzZQ== 59192
+Z2V0VGFibGU= 59193
+IGRvcG8= 59194
+IEtlcnI= 59195
+X0NISUxE 59196
+LWNsb3Vk 59197
+CU1hdHJpeA== 59198
+IGdhcmRlbmluZw== 59199
+U2luZw== 59200
+YWxtb3N0 59201
+UmVxdWlyZW1lbnRz 59202
+dWd1YXk= 59203
+KFByb3BlcnR5 59204
+c3Vic2NyaWJlcg== 59205
+RkFTVA== 59206
+cmVhY3Rpb24= 59207
+KGxw 59208
+KX0pCg== 59209
+YCku 59210
+LndhbGxldA== 59211
+X2V4Y2hhbmdl 59212
+Lk1heGltdW0= 59213
+IFZlcmI= 59214
+4pSB 59215
+KCk8 59216
+77ybCg== 59217
+Uk9U 59218
+Q0FSRA== 59219
+dWJpdA== 59220
+e0A= 59221
+X2tlbA== 59222
+IFRvb2x0aXA= 59223
+TXlTUUw= 59224
+TWFpbkFjdGl2aXR5 59225
+YXJm 59226
+IG1hbGlnbg== 59227
+IHNlaW5lbg== 59228
+YXBpc3Q= 59229
+IDwl 59230
+TWV0aG9kSW1wbA== 59231
+TWls 59232
+IE1pY2s= 59233
+LmRlcGVuZA== 59234
+PElE 59235
+IHByZWRpY3RpdmU= 59236
+IEFQUExJQ0FUSU9O 59237
+bGVm 59238
+ZGltZW5zaW9ucw== 59239
+IGNvbm9jZXI= 59240
+L2NvbmY= 59241
+IFRyYWN5 59242
+Rm90bw== 59243
+X3JlbWFpbmluZw== 59244
+PWZpbGU= 59245
+IHBhZ2VJbmRleA== 59246
+IFBhcmlzaA== 59247
+IHRleGFz 59248
+IE1BR0lD 59249
+IEhldw== 59250
+ZGlmZmVyZW5jZQ== 59251
+IGFsdHVyYQ== 59252
+Y3Vt 59253
+CWRhdGFUeXBl 59254
+IGNhcmFjdGVyZXM= 59255
+YXZpb3Vycw== 59256
+IFZPSUQ= 59257
+6L+R 59258
+UFVCTElD 59259
+Qmlv 59260
+IHN0cmluZ0J5QXBwZW5kaW5n 59261
+UGFyc2VFeGNlcHRpb24= 59262
+IFN1ZmY= 59263
+IE5vcnRvbg== 59264
+L2RldGFpbHM= 59265
+Lm51bGw= 59266
+Pj4m 59267
+CW9r 59268
+LWxvdw== 59269
+LnVzdWFyaW8= 59270
+bmVzdGVk 59271
+WEI= 59272
+T1VSUw== 59273
+LkJvcmRlckNvbG9y 59274
+IGJyb3c= 59275
+INCV 59276
+Y29ycg== 59277
+IFJlZHNraW5z 59278
+LmdldFRhZw== 59279
+LmdldFRyYW5zYWN0aW9u 59280
+IHN0aWdtYQ== 59281
+aGFyZHQ= 59282
+IFBsYXllclByZWZz 59283
+YWxzeQ== 59284
+dWNzb24= 59285
+TGFuZ3VhZ2Vz 59286
+IE9saXZpYQ== 59287
+IHRhYw== 59288
+IGJsaQ== 59289
+IGNhdmFs 59290
+IGNvbnNvbGlkYXRlZA== 59291
+IHBlcmls 59292
+IGRlbGU= 59293
+IGZvcm11bGF0ZWQ= 59294
+IGhpZ2h3YXlz 59295
+LnNwYXdu 59296
+PT0k 59297
+IE5pZXQ= 59298
+IHZlZ2dpZXM= 59299
+eXBv 59300
+LXJ1bGU= 59301
+IFZpZQ== 59302
+L2VwbA== 59303
+IGVuZmFudHM= 59304
+c3RyaW5nTGl0ZXJhbA== 59305
+IHRvdWdoZXN0 59306
+YnV5ZXI= 59307
+IGNvdmFyaWFuY2U= 59308
+IGlsaQ== 59309
+IFNvcGhpZQ== 59310
+IEJBQg== 59311
+ICIpLA== 59312
+IFVr 59313
+Y3VycmVudEluZGV4 59314
+X3VzZXJkYXRh 59315
+LmNvZGVj 59316
+IFB1bmphYg== 59317
+IFNOUA== 59318
+bG9s 59319
+YWR2YW5jZQ== 59320
+IGNvbWZ5 59321
+SnNvbklnbm9yZQ== 59322
+IGZhc2hpb25hYmxl 59323
+IElDT04= 59324
+IG9yYQ== 59325
+IFByaWNpbmc= 59326
+PG51bQ== 59327
+IElSQw== 59328
+RVJW 59329
+IE1laW4= 59330
+IElEaWN0aW9uYXJ5 59331
+QURPVw== 59332
+aXNOZXc= 59333
+IERldm9u 59334
+YXRs 59335
+KHJlcXVlc3RDb2Rl 59336
+CVByZXBhcmVkU3RhdGVtZW50 59337
+SU1QT1JU 59338
+IG1hcml0YWw= 59339
+X1NFTEVDVEVE 59340
+Z2V0UmVzcG9uc2U= 59341
+YXJEb3du 59342
+QlY= 59343
+aWJOYW1l 59344
+IFBBVENI 59345
+w6TDpG4= 59346
+IGRhYXI= 59347
+IEZpbGVNb2Rl 59348
+IG1hcnR5 59349
+LlNwcmluZ0FwcGxpY2F0aW9u 59350
+Y2VuZQ== 59351
+YW1wb2xpbmU= 59352
+Z2V0U2l6ZQ== 59353
+UmVzdGFydA== 59354
+5pWI 59355
+LnByb2plY3Rz 59356
+IEV0aGlvcGlh 59357
+IHN0YXR1c2Vz 59358
+VElPTg== 59359
+KGJn 59360
+IFh1bml0 59361
+VGVtcG9yYXJ5 59362
+IEVuZ2FnZW1lbnQ= 59363
+IHhm 59364
+IHByb3hpZXM= 59365
+IGdlbmVzaXM= 59366
+UGFnZXJBZGFwdGVy 59367
+IFNsYXZl 59368
+IHN1bmdsYXNzZXM= 59369
+IENobG9l 59370
+IGtvamk= 59371
+YWRlbQ== 59372
+CUpTT05PYmplY3Q= 59373
+zrM= 59374
+IGhvcnM= 59375
+Knc= 59376
+w7Ny 59377
+ZXNjaA== 59378
+IGNyaXRpY2lzZWQ= 59379
+emlhbA== 59380
+IFNhbGVt 59381
+LlZlcnRpY2Fs 59382
+IFJhc2g= 59383
+PkU= 59384
+dGVyaW5n 59385
+L3NjcmVlbnM= 59386
+IGhlaWdodGVuZWQ= 59387
+0LDRgNGC 59388
+QXV0aG9yaXRpZXM= 59389
+X2Jib3g= 59390
+w7xuc3Q= 59391
+LmZvbnRTaXpl 59392
+IEJPT0xFQU4= 59393
+ZGl2aWRl 59394
+IFNsb3Zlbg== 59395
+dWNlcg== 59396
+2ZI= 59397
+c3R1Yg== 59398
+IG5hdmlnYXRpbmc= 59399
+OmFuaW1hdGVk 59400
+X05PVw== 59401
+X3ZlY3Q= 59402
+fXsK 59403
+QCg= 59404
+IHRlbGVjb20= 59405
+IGNvbnRyYWN0aW5n 59406
+IEFzc2FuZ2U= 59407
+IGV4dHJhY3Rpbmc= 59408
+IGdyw7Y= 59409
+Y29icmE= 59410
+LkRJUw== 59411
+IGNyYWI= 59412
+IHR3aXRjaA== 59413
+IHZlcnRz 59414
+IHJlamVjdHM= 59415
+CWZvcm1hdA== 59416
+IHJlZ2VuZXJhdGlvbg== 59417
+LlN5cw== 59418
+c29sdmU= 59419
+CWRpYWxvZw== 59420
+c2hp 59421
+bWV0ZXI= 59422
+KGJlc3Q= 59423
+dmFsaWRhdG9ycw== 59424
+IG9ud2FyZHM= 59425
+IGd1cnU= 59426
+IG1vZGVyYXRvcg== 59427
+b3dpZWQ= 59428
+ZXhwZXJpbWVudA== 59429
+cnVi 59430
+IG1xdHQ= 59431
+IENhdWNhcw== 59432
+IG5hdGlvbmFsaXNt 59433
+IG1hbmdl 59434
+CUltR3Vp 59435
+L0VkaXQ= 59436
+IGluaA== 59437
+IGludGVsbGln 59438
+ZXJva2Vl 59439
+CWV4cG9ydA== 59440
+IGRpc2NyaW1pbmF0ZQ== 59441
+c3VidHJhY3Q= 59442
+IE1vb2RsZQ== 59443
+ZW5zZXI= 59444
+IEd1aWRlcw== 59445
+UkFQ 59446
+LWhvdA== 59447
+X2dycA== 59448
+LnBpY3R1cmU= 59449
+WEE= 59450
+IGluaXRWaWV3 59451
+X0NvbW0= 59452
+IG92ZXJkb3Nl 59453
+ICsKCg== 59454
+IFNpbGVudA== 59455
+c2hvd3M= 59456
+IGludGVycG9sYXRl 59457
+Rm9ybWF0aW9u 59458
+IGJpc2M= 59459
+bWFya2V0cw== 59460
+KFND 59461
+WmU= 59462
+IE5ldHdvcmtpbmc= 59463
+IGFkcmVuYWw= 59464
+IEd1bnM= 59465
+ZXRlb3I= 59466
+RGVjbGFyZWQ= 59467
+b3JnZXRvd24= 59468
+IGthcmVuYQ== 59469
+L3Bhc3N3b3Jk 59470
+X2FkZHJlc3Nlcw== 59471
+SVRFUkFM 59472
+QnV6eg== 59473
+IENvbndheQ== 59474
+KGNhc2U= 59475
+UFdE 59476
+aGVpcm8= 59477
+KGFjdA== 59478
+KioNCg== 59479
+KCkpOwoKCg== 59480
+IGFudg== 59481
+IC4uCgo= 59482
+KE1lbnVJdGVt 59483
+KG1haWw= 59484
+X3NlY3Rpb25z 59485
+CW5ldA== 59486
+IHBsdXQ= 59487
+IHdyZW5jaA== 59488
+L29iamVjdA== 59489
+IElzdA== 59490
+IFZJUw== 59491
+L3B1Yg== 59492
+YWx0ZW4= 59493
+IGd1aXRhcnM= 59494
+IGFudGliaW90aWM= 59495
+77yW 59496
+wrk= 59497
+ICIrIg== 59498
+Zm9ybXVsYQ== 59499
+IGJhYmVz 59500
+IFByb21wdA== 59501
+IGVuaW0= 59502
+L3BsYXllcg== 59503
+CXJlZg== 59504
+IGJ5xIc= 59505
+IGNvbnN1bWVz 59506
+IEhhc3Q= 59507
+IFRhbw== 59508
+ICcpKQo= 59509
+IGNsYW0= 59510
+IHRoaWdocw== 59511
+IG1vdGlm 59512
+QXBpT3BlcmF0aW9u 59513
+IFdM 59514
+Z2V0Qw== 59515
+CWZsYWdz 59516
+b2ludG1lbnRz 59517
+IGVjb25vbWljYWw= 59518
+bmVlZGxl 59519
+eGxz 59520
+cHJhY3RpY2U= 59521
+dXR6ZXI= 59522
+dGltZW9mZGF5 59523
+LW91dHB1dA== 59524
+IGZpbmRCeUlk 59525
+IEJ1ZGR5 59526
+0J7Rgg== 59527
+U2V2ZW4= 59528
+IEJhcms= 59529
+IGVudm95 59530
+X2FsZ29yaXRobQ== 59531
+5Yip 59532
+IGJhbGxpc3RpYw== 59533
+56e7 59534
+cmFkZXM= 59535
+CWRvYw== 59536
+cm9kdWNpbmc= 59537
+IEVhdGluZw== 59538
+VW5tb3VudA== 59539
+L2RhdGFUYWJsZXM= 59540
+X2JvbnVz 59541
+IGxpdHQ= 59542
+cHBz 59543
+KWxvY2FsT2JqZWN0 59544
+cGVyZg== 59545
+IEhlbHZldGljYQ== 59546
+c2h1dGRvd24= 59547
+L21s 59548
+LnRva2Vucw== 59549
+IEhhcmRjb3Jl 59550
+LHJvdw== 59551
+L2Jn 59552
+U2NhbGVy 59553
+4oCUYXM= 59554
+X2xvZ2l0cw== 59555
+4oCZaW50 59556
+CUFwcA== 59557
+SW1wbGljaXQ= 59558
+LkZwcmludGY= 59559
+RVRP 59560
+IHRlcnJh 59561
+IHBvc3Nlc3Npbmc= 59562
+LnJzdHJpcA== 59563
+LCks 59564
+PXllcw== 59565
+IFN0cmlwZQ== 59566
+Pz0= 59567
+bmV1dHJhbA== 59568
+Lmdvb2Q= 59569
+IGtlbm5lbg== 59570
+IFN1bmc= 59571
+ZmF1bHQ= 59572
+eXN0YXRlY2hhbmdl 59573
+Q2FuYWRpYW4= 59574
+JywnIi4k 59575
+IE1pdHM= 59576
+w6ZuZA== 59577
+IFNUUlVDVA== 59578
+IFVSTFdpdGhTdHJpbmc= 59579
+IENvbXBhc3M= 59580
+IC0tCgo= 59581
+IE5TTGF5b3V0Q29uc3RyYWludA== 59582
+fG1pbg== 59583
+LWFkanVzdA== 59584
+IHJlYnVpbHQ= 59585
+TElHSFQ= 59586
+L3Nl 59587
+LW1vdW50 59588
+dnBu 59589
+dmFsaWRhdGVk 59590
+KFFPYmplY3Q= 59591
+IGlnbml0aW9u 59592
+IENoYXJnZXJz 59593
+UllQVE8= 59594
+XWluaXRXaXRoRnJhbWU= 59595
+IEZsdWlk 59596
+IGNhZHJl 59597
+IG5vbWluYXRpb25z 59598
+TmVpbGw= 59599
+IEhvdQ== 59600
+IGN1cnJlbnRz 59601
+X2dlbmU= 59602
+KGlucA== 59603
+UGFyaXM= 59604
+esSZ 59605
+YWdncmVnYXRl 59606
+IGFzc29j 59607
+d2VldGVk 59608
+ZXJyYXQ= 59609
+4oCTCgo= 59610
+ICcvJywK 59611
+Zml4dHVyZQ== 59612
+IEhpZ2hlc3Q= 59613
+YW1iaWVudA== 59614
+IGNobW9k 59615
+IGNvbnRl 59616
+IHNlbnN1YWw= 59617
+IGdhcm1lbnQ= 59618
+emVycw== 59619
+IFBvd2VyZWQ= 59620
+ZG9tYWlucw== 59621
+UmV3YXJk 59622
+aW9tYW5pcA== 59623
+IGNvY2twaXQ= 59624
+b3V0ZmlsZQ== 59625
+IGJ1aWx0aW4= 59626
+IGluc2lzdGluZw== 59627
+LnZhcnM= 59628
+emlwY29kZQ== 59629
+IO+/ve+/ve+/ve+/vQ== 59630
+ZmFpbHM= 59631
+IGNvbnNvbGlkYXRpb24= 59632
+X29pZA== 59633
+UGxhbmV0 59634
+ID0iLA== 59635
+CWVs 59636
+VUlMVA== 59637
+w6R0eg== 59638
+YWZhcmk= 59639
+IE1jQ2w= 59640
+VGltZWxpbmU= 59641
+RXN0YQ== 59642
+IGZyYW0= 59643
+WUU= 59644
+IGNlcmVicmFs 59645
+T2ZNb250aA== 59646
+IFByZWdu 59647
+INC60LvQsNGB0YE= 59648
+ICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgCg== 59649
+IEZyZXM= 59650
+QXBwcm92ZWQ= 59651
+LlNwZWNpYWw= 59652
+IFByb3Rlc3RhbnQ= 59653
+IGFsbGVyZ3k= 59654
+X3BjbQ== 59655
+CUNvcHlyaWdodA== 59656
+IHN1cGVyQ2xhc3M= 59657
+InN0cmNvbnY= 59658
+IE1vaGFtZWQ= 59659
+ICcvLw== 59660
+Rm9yZUNvbG9y 59661
+QXJ0aHVy 59662
+IEp1bmdsZQ== 59663
+IHZlaW5z 59664
+U2Fk 59665
+IGJhY2t1cHM= 59666
+IE9waW5pb24= 59667
+w7t0 59668
+IGludGVybWl0dA== 59669
+b2R5bg== 59670
+IENocmlzdGluYQ== 59671
+IGFuZHJl 59672
+IGV2YWN1YXRpb24= 59673
+cGFsZXR0ZQ== 59674
+aG9yc2U= 59675
+IFJlc2lkZW50 59676
+IEhhc3Nhbg== 59677
+Lk5pbA== 59678
+IGFpc2xl 59679
+IEdyb3dpbmc= 59680
+IGJsb2dpbmZv 59681
+L3NxbA== 59682
+X2lvY3Rs 59683
+U2NhbGluZw== 59684
+IE1vbmFk 59685
+X2NwcA== 59686
+IEh1dGNo 59687
+IEFwcGxlV2ViS2l0 59688
+RXhwZW5zZQ== 59689
+X0pPQg== 59690
+IHBvaW50bGVzcw== 59691
+RnJvbUJvZHk= 59692
+YW50YWw= 59693
+IGRlcGljdGluZw== 59694
+IENFTEw= 59695
+IHJlZmlu 59696
+IENOQw== 59697
+7LmY 59698
+X2RpbWVuc2lvbnM= 59699
+IFNBTg== 59700
+IGFmdA== 59701
+IGZvb3RzdGVwcw== 59702
+Y2NvbGk= 59703
+X1BIT05F 59704
+L21hdGg= 59705
+LWtpbmQ= 59706
+IE1lYW5z 59707
+aWNoYWVs 59708
+Lmd1bmE= 59709
+IGluYXVndXJhdGlvbg== 59710
+LWRyaXZpbmc= 59711
+KGRlbGV0ZQ== 59712
+IHRvdGFsQ291bnQ= 59713
+X01D 59714
+LkV4dGVuc2lvbg== 59715
+Q29tbWVyY2lhbA== 59716
+IHpJbmRleA== 59717
+PEN1c3RvbWVy 59718
+Imc= 59719
+LXNoYXJl 59720
+IHBhY3Q= 59721
+YWdhcmE= 59722
+IFNJTA== 59723
+X21vZGVz 59724
+IE1vbGVjdWxhcg== 59725
+IHN5c3RlbWF0aWNhbGx5 59726
+PEc= 59727
+X3Njcg== 59728
+IE9ybw== 59729
+YXNlcnM= 59730
+IGJpYw== 59731
+IGRlc3Ryb3lz 59732
+UElQRQ== 59733
+LlN0YXJ0UG9zaXRpb24= 59734
+IGPhu6dh 59735
+aXJleg== 59736
+LkJ1bmlmdQ== 59737
+X0Z1bmN0aW9u 59738
+IHPDvA== 59739
+X2Z1dHVyZQ== 59740
+IFdlYWx0aA== 59741
+IE5hdHVyYWxseQ== 59742
+5oC7 59743
+X3llcw== 59744
+IGFicnVwdGx5 59745
+U3RyaW5nRW5jb2Rpbmc= 59746
+IENHUG9pbnRNYWtl 59747
+IHpo 59748
+IGltcGVyc29u 59749
+IHBpdm90YWw= 59750
+IFNvbWFsaWE= 59751
+IHNlZ21lbnRhdGlvbg== 59752
+X0FOQUw= 59753
+IExvZ2luQ29tcG9uZW50 59754
+Q29uc3VsdA== 59755
+IHRydW5jYXRlZA== 59756
+XSI7Cg== 59757
+LmdldENvbmZpZw== 59758
+IGludGVybnNoaXA= 59759
+QmFieQ== 59760
+6rCc 59761
+IHN0cmVuZ3RoZW5lZA== 59762
+X01J 59763
+YmFza2V0 59764
+IG5pY2h0cw== 59765
+IFRWcw== 59766
+IFNoYW4= 59767
+44K1 59768
+cmFjdXNl 59769
+LlJlTFU= 59770
+L2ludGVyZmFjZXM= 59771
+IGdldEl0ZW1Db3VudA== 59772
+IHJldGlyaW5n 59773
+IHNwZWNpYWxz 59774
+IGVudGl0eU1hbmFnZXI= 59775
+YmVsaWVm 59776
+IHNvbGRlcg== 59777
+ZGF1Z2h0ZXI= 59778
+aWprbA== 59779
+IHV0aWxpemVz 59780
+LmZpeGVk 59781
+U1U= 59782
+IGRyYXN0aWM= 59783
+IGhhY2tz 59784
+Z3J1bmQ= 59785
+IE1V 59786
+IFN0YXJ0ZXI= 59787
+LkNvbXBvbmVudHM= 59788
+X21vdG9y 59789
+R29sZGVu 59790
+IGxvZGdl 59791
+ICkpOw== 59792
+IENvcmludGg= 59793
+0LjRh9C10YHRgtCy0L4= 59794
+w7NuaWNv 59795
+Z3JlU1FM 59796
+IEZsdWVudA== 59797
+IG1hcmM= 59798
+LkxvYWRTY2VuZQ== 59799
+Lkdyb3Vwcw== 59800
+IGVyaA== 59801
+IEF1dHVtbg== 59802
+U3RvcHBlZA== 59803
+IGl0YWxpYW5v 59804
+IG1pbmlvbnM= 59805
+IEFzc2VydGlvbnM= 59806
+IG11eA== 59807
+QnU= 59808
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ== 59809
+CXVw 59810
+cmVhZHlzdGF0ZWNoYW5nZQ== 59811
+X01ldGE= 59812
+IGN1cnJlbnREYXRl 59813
+IENoYXBtYW4= 59814
+VW5kbw== 59815
+U2Vhbg== 59816
+YXBy 59817
+IHBhcm0= 59818
+X2ljb25z 59819
+IFN0YQ== 59820
+w6F6 59821
+IHN1YmRpdmlzaW9u 59822
+IGFsdGVyaW5n 59823
+UE5H 59824
+cG9uZW50aWFs 59825
+IHBvc3RncmVz 59826
+IEJEUw== 59827
+LWV4aXN0ZW50 59828
+IEJyYWRmb3Jk 59829
+IE9NWA== 59830
+X1dISVRF 59831
+X1BST0dSQU0= 59832
+cWM= 59833
+IHR5cGluZ3NTbGlua3k= 59834
+IFBpY3M= 59835
+X01FVEE= 59836
+SVRURVI= 59837
+X3N1YnNjcmlwdGlvbg== 59838
+SVJPTk1FTlQ= 59839
+IEh5dW5kYWk= 59840
+KCk7CgoKCg== 59841
+INiz 59842
+IGphYw== 59843
+IGVsaW1pbmF0ZXM= 59844
+KX0pOwo= 59845
+IGNvbXByZW5k 59846
+CWluc2VydA== 59847
+X2ZhY2Vz 59848
+Ij4k 59849
+IGViYXk= 59850
+IGNhcHRpdmU= 59851
+cGxpYW50 59852
+IENhbGN1bGF0ZXM= 59853
+b2x0YQ== 59854
+ZXN0aW5n 59855
+X3JldmlzaW9u 59856
+IG3DunM= 59857
+K20= 59858
+IiwiIiwi 59859
+V0hBVA== 59860
+IGNvbXBhc3Npb25hdGU= 59861
+aGFyZ2E= 59862
+W3JhbmRvbQ== 59863
+IG1vZHVsbw== 59864
+KHNu 59865
+IG9jY3VwYXRpb25z 59866
+Ly8vLwo= 59867
+CWJvYXJk 59868
+IEJhbGs= 59869
+d2nEhQ== 59870
+IFdpZmk= 59871
+LlByb2ZpbGU= 59872
+Om1hag== 59873
+CW1hdA== 59874
+TE9DS1M= 59875
+KGpCdXR0b24= 59876
+ICgnJA== 59877
+TXVy 59878
+5oyJ 59879
+YmJsZQ== 59880
+IGZyb2c= 59881
+LWhpZGU= 59882
+IGJyb2FkY2FzdGVy 59883
+4Lie 59884
+aGFsZWQ= 59885
+IGFtdXNpbmc= 59886
+X3ByZWRpY3Rpb25z 59887
+X2ludHI= 59888
+IGVhZ2xl 59889
+0LDRgtC10LvRjA== 59890
+IGdldExpc3Q= 59891
+cHNpbG9u 59892
+IGNoYXJhY3Rlcml6YXRpb24= 59893
+QVJEUw== 59894
+IHJlbG9jYXRpb24= 59895
+IHJ1bGVycw== 59896
+UEFZ 59897
+IERlZmluaXRlbHk= 59898
+X0FjdGlvbg== 59899
+IGNsb3N1cmVz 59900
+IGZhY3R1YWw= 59901
+b2R5bmFtaWM= 59902
+IHByZWNhdXRpb25z 59903
+bmllag== 59904
+IFBhcnRpZXM= 59905
+IFN1YmFydQ== 59906
+IGNvdXNpbnM= 59907
+YXJiZWl0 59908
+Lm1vbmV5 59909
+Z3VudGE= 59910
+KGFuZA== 59911
+Z2V0aXRlbQ== 59912
+LlN0eWxlUHJpb3JpdHk= 59913
+IHNsaWQ= 59914
+c2luZ2xldG9u 59915
+IGdhcm4= 59916
+IFBBUw== 59917
+IGRheno= 59918
+YcW8 59919
+IGJvZ3Vz 59920
+IE1vZw== 59921
+IHJpdmFscnk= 59922
+aXNvbA== 59923
+IGxhbmRtYXJrcw== 59924
+w7Fhcw== 59925
+QmVybg== 59926
+IFNhY2hz 59927
+ICIpCgo= 59928
+IGhvc3RpbGl0eQ== 59929
+X21leA== 59930
+bWVyZQ== 59931
+TW90 59932
+cGljdHVyZUJveA== 59933
+RGVmZW5zZQ== 59934
+IGFmZmlkYXZpdA== 59935
+b3RoZXJ3aXNl 59936
+LmRpcmVjdG9yeQ== 59937
+X1VuaXR5RW5naW5l 59938
+LWJsb2c= 59939
+LnNraW4= 59940
+cGhlbQ== 59941
+QXBlbGxpZG8= 59942
+ZXJjaGFudA== 59943
+W2NsYXNz 59944
+IHdhcnQ= 59945
+LiJb 59946
+YWxldXI= 59947
+L2JhY2s= 59948
+ICAgIAkgICA= 59949
+IHByZWNpcGl0YXRpb24= 59950
+IG9ic3RydWN0aW9u 59951
+IHBPYmo= 59952
+IHJ1cHQ= 59953
+VUNLRVQ= 59954
+YXll 59955
+5o6S 59956
+Z3g= 59957
+IGVjbA== 59958
+IHNlY3JlY3k= 59959
+L0hlYWRlcg== 59960
+IExlc2I= 59961
+IGxlaQ== 59962
+IEJ1bGxldGlu 59963
+IGdpdmVhd2F5 59964
+LkhvbWU= 59965
+X1JPT00= 59966
+Ilc= 59967
+IGNvd29yaw== 59968
+X3Jh 59969
+IEN5Y2xpbmc= 59970
+IFBhdw== 59971
+IHB1cGls 59972
+L2FyY2g= 59973
+IEZpbGVVdGlscw== 59974
+6aaW 59975
+cnNw 59976
+IGZyZWVkb21z 59977
+IExlYXI= 59978
+fWApLg== 59979
+IGJvd2xz 59980
+L2Jsb2Nr 59981
+X2xvZ2dpbmc= 59982
+IG1ldGhhbmU= 59983
+IGhvcm5z 59984
+IHdvbmRlcmZ1bGx5 59985
+IGFsdGVyYXRpb25z 59986
+IGV4aWxl 59987
+bHNlbg== 59988
+X3BhdXNl 59989
+X0xBTkdVQUdF 59990
+IFVTREE= 59991
+X215c3Fs 59992
+X0FNT1VOVA== 59993
+IExJRkU= 59994
+IHlvdW5nc3RlcnM= 59995
+IHJpb3Rz 59996
+W0U= 59997
+IHVuZm9yZ2V0dGFibGU= 59998
+LH0sCg== 59999
+RGlzcG9zZWQ= 60000
+IEFzc2Fzc2lu 60001
+VU5H 60002
+IE5ld3Nw 60003
+VXNlclNlcnZpY2U= 60004
+OmFsb2Fk 60005
+Kycs 60006
+IHNldHRsZXJz 60007
+IHNjcmVhbXM= 60008
+IGluY29udmVuaWVuY2U= 60009
+LlJvdGF0ZQ== 60010
+IGphcnM= 60011
+IFB1enpsZQ== 60012
+IG1lc3Q= 60013
+YXJzaQ== 60014
+IFNoYXJtYQ== 60015
+fCg= 60016
+LmRz 60017
+IFNhY3JlZA== 60018
+X2V2dA== 60019
+IGV4cHJlc3Nlcw== 60020
+IGhvY2g= 60021
+IER1Y2g= 60022
+LmNhbGxz 60023
+dGhy 60024
+IFNoZWZmaWVsZA== 60025
+LkFsZXJ0RGlhbG9n 60026
+IHJhZGljYWxseQ== 60027
+IHRyb3Vz 60028
+IHByZXZhaWxpbmc= 60029
+IFdXSUk= 60030
+4oCZbg== 60031
+ZW5zZWx5 60032
+IFllc3RlcmRheQ== 60033
+IFNpcml1cw== 60034
+IGtpbGxlcnM= 60035
+IEZGVA== 60036
+IG92YWw= 60037
+Jyk6DQo= 60038
+IOygleuztA== 60039
+b3VyYWdl 60040
+IENoZWNrYm94 60041
+V29ya2Jvb2s= 60042
+LmRlZmVy 60043
+X2Zsb29y 60044
+IGNvdW5jaWxs 60045
+IG5vcnNrZQ== 60046
+bW9pbA== 60047
+b3JlYQ== 60048
+IG1hcmtldGVk 60049
+X1NVUg== 60050
+eEFB 60051
+IHN0YWluZWQ= 60052
+ZXV0 60053
+IE1lbmc= 60054
+IGllZWU= 60055
+LmV4dGVybg== 60056
+ZWdpZQ== 60057
+IHJhcHA= 60058
+IFB5b25neWFuZw== 60059
+J2NsYXNz 60060
+TW9i 60061
+IGluaXRpYWxWYWx1ZQ== 60062
+X3dhdmU= 60063
+IGphYg== 60064
+IG1hc2N1bGluZQ== 60065
+IGFtcGxpZmllcg== 60066
+IHR0eQ== 60067
+UGF0aENvbXBvbmVudA== 60068
+X3h0 60069
+IEdGUA== 60070
+L3NlYw== 60071
+CWRpc3BhdGNo 60072
+bWFya2Rvd24= 60073
+IFNjaG4= 60074
+Ym9sZQ== 60075
+wrfCtw== 60076
+bW91c2Vtb3Zl 60077
+IGVyck1zZw== 60078
+IGFzaWdu 60079
+X21vbm8= 60080
+VG9TZWxlY3Rvcg== 60081
+IFp1 60082
+KFJlY3Q= 60083
+IEVycm9yQ29kZQ== 60084
+bGF0aW4= 60085
+YW5naWJsZQ== 60086
+dnRr 60087
+Q0dTaXpl 60088
+UG9rZW1vbg== 60089
+IGNsYXNzbWF0ZXM= 60090
+IGF0dHJhY3Rz 60091
+IFRhdHRv 60092
+dWx0YW4= 60093
+b2zDs2c= 60094
+IGhhbHRlZA== 60095
+4KSo 60096
+IEthcnQ= 60097
+IHVl 60098
+X0luaXRTdHJ1Y3R1cmU= 60099
+VGVzdENsYXNz 60100
+IEFpcmJuYg== 60101
+XyIs 60102
+IGNoYXJjb2Fs 60103
+IGlwYw== 60104
+IFN0cmV0Y2g= 60105
+LmdsaWRl 60106
+bGF0ZXNBdXRvcmVzaXppbmdNYXNrSW50b0NvbnN0cmFpbnRz 60107
+IHBvdGlvbg== 60108
+SVRUTEU= 60109
+IGNvdW50ZXJ0 60110
+X2hk 60111
+cHJlcGFyZWQ= 60112
+QWRz 60113
+IFZhbXBpcmU= 60114
+cm9ib3Rz 60115
+LkNyZWF0ZUluZGV4 60116
+U3RhdHVzTGFiZWw= 60117
+IHR1Y2tlZA== 60118
+YWbDvHI= 60119
+VXQ= 60120
+IHN3ZWF0ZXI= 60121
+X0ZO 60122
+ICAgICAgICAgICAgICAgIAk= 60123
+YXRha2E= 60124
+IGV5ZWJyb3dz 60125
+YWNvZXM= 60126
+dWRlbg== 60127
+LkxpbmVhckxheW91dE1hbmFnZXI= 60128
+IHN3YXk= 60129
+IG11bHRpbg== 60130
+KCkpKSkK 60131
+IE5TVUludGVnZXI= 60132
+IE15QmFzZQ== 60133
+UGFydG5lcg== 60134
+dXRzY2hlbg== 60135
+IENhdGVy 60136
+LnNldEJhY2tncm91bmRDb2xvcg== 60137
+IGFjY29tcGxpc2htZW50 60138
+X3Byb2JsZW0= 60139
+LmR0ZA== 60140
+IHBhZ2VOdW1iZXI= 60141
+IGphY2tldHM= 60142
+IGNyb3BwZWQ= 60143
+dWVscw== 60144
+IEhlcA== 60145
+IGNhcHBlZA== 60146
+Kk1hdGg= 60147
+X2NhbGxiYWNrcw== 60148
+IHB1YmI= 60149
+IEJydW5zd2ljaw== 60150
+LnJlc3BvbmQ= 60151
+WyJf 60152
+IGJlZGRpbmc= 60153
+aHl0aG0= 60154
+T1g= 60155
+KHNwZWVk 60156
+IHBlc3RpY2lkZXM= 60157
+IC0tLS0tLS0= 60158
+LkJsdWU= 60159
+IG5vb2RsZXM= 60160
+IEdvZXM= 60161
+IHNhdmVy 60162
+b3h5 60163
+X2NvbXBsZXRpb24= 60164
+IFN3aW5nZXI= 60165
+IGdldERhdGU= 60166
+IG1pbmRlZA== 60167
+aW50ZWdyYXRpb24= 60168
+IExvdHVz 60169
+KHN0b3A= 60170
+KCcsJyk7Cg== 60171
+IGZsb29kcw== 60172
+IFdvcmtmbG93 60173
+IGVydXB0ZWQ= 60174
+TWFjcm8= 60175
+IFNhdWNl 60176
+IGV2ZW50TmFtZQ== 60177
+XElucHV0 60178
+QnJlYWtpbmc= 60179
+CXdoZW4= 60180
+X3B3 60181
+SU5ERVI= 60182
+IFdlbGxuZXNz 60183
+IHZveGVs 60184
+IE1lbGw= 60185
+IE1FRElB 60186
+U0VOUw== 60187
+IEZ1bmRz 60188
+IE1pbGQ= 60189
+PEFycmF5 60190
+LXRoaXM= 60191
+dW1wZWQ= 60192
+L2Z3 60193
+IERiQ29udGV4dA== 60194
+V0k= 60195
+Z2lybHM= 60196
+SE9X 60197
+Jyk7Pz4K 60198
+IHRlbXB0aW5n 60199
+IHRlc3RhbWVudA== 60200
+IGJpYmxl 60201
+IGNvbnN1bHRlZA== 60202
+IEluZGV4RXJyb3I= 60203
+6KiY 60204
+IGtleXBhZA== 60205
+aXp6bw== 60206
+KG9r 60207
+IHdoYXRzYXBw 60208
+IFJlbW90ZUV4Y2VwdGlvbg== 60209
+IHRlYW1lZA== 60210
+4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU 60211
+wrss 60212
+IGdldFRpbWU= 60213
+ZGlhZw== 60214
+aXNzeQ== 60215
+IGhlZA== 60216
+IGtub3Rz 60217
+am9t 60218
+IGZ1bm5lbA== 60219
+LW1haWxz 60220
+IGV4cG9ydGluZw== 60221
+IFZM 60222
+IEthcm4= 60223
+IEJ1ZGRoaXNt 60224
+IEFsbGFu 60225
+X1JBRElVUw== 60226
+IHdvcmRpbmc= 60227
+IEZvcmdldA== 60228
+IENvcm9uYQ== 60229
+aXBoeQ== 60230
+IGxpbWJ1cmc= 60231
+dWdneQ== 60232
+IFVzZXJSZXBvc2l0b3J5 60233
+aW1pbg== 60234
+KGVsZQ== 60235
+IGxhYmVsbGVk 60236
+56S+ 60237
+IEhlcm1hbg== 60238
+LnFx 60239
+ICIpKTsK 60240
+aWViZXI= 60241
+LlRyYW5zbGF0ZQ== 60242
+cnlu 60243
+IGRlc2Vudg== 60244
+dW1k 60245
+U2ltcGx5 60246
+CW1vZGU= 60247
+UnBj 60248
+IFZhbGVuY2lh 60249
+IHN0YWZmZXJz 60250
+IHNlbHY= 60251
+IFNwaWtl 60252
+IGRlbGlj 60253
+IGVydQ== 60254
+X0RU 60255
+SnVkZ2U= 60256
+4buV 60257
+IEJhc2lu 60258
+Lm11dGFibGU= 60259
+InVybA== 60260
+IHRhcmlmZg== 60261
+IFNsZWV2ZQ== 60262
+IGZsYXJl 60263
+LmRyb3BvdXQ= 60264
+IGJyaWRlcw== 60265
+KSksDQo= 60266
+X2NvbnN0cmFpbnRz 60267
+ZGVzdHJ1Y3Q= 60268
+T3V0bGluZQ== 60269
+IGRpc2FwcGVhcnM= 60270
+X2xvY2tlZA== 60271
+IE5TTG9jYWxpemVkU3RyaW5n 60272
+Y2tl 60273
+CW51bGw= 60274
+YWRyZXNzZQ== 60275
+IHRvcHBpbmc= 60276
+IEpva2Vy 60277
+YmlzaG9w 60278
+0L3QvtGB0YLRjA== 60279
+YW5kZXJpbmc= 60280
+X2FtcA== 60281
+PXRpbWU= 60282
+X1NwYWNl 60283
+X1BVTEw= 60284
+Jz0= 60285
+IGFudGlxdQ== 60286
+IGNhY2g= 60287
+X19fCgo= 60288
+T05FUw== 60289
+0L7Rjw== 60290
+IHVucmVhZA== 60291
+LnBvbGljeQ== 60292
+b29vb29vb28= 60293
+65+s 60294
+IHVzdGVk 60295
+IFJlY2U= 60296
+IGFsbGVt 60297
+44O844K5 60298
+IFRob3VnaHRz 60299
+dmVpbGxhbmNl 60300
+aXN0cmF0ZQ== 60301
+X2xhbmU= 60302
+IGZhbWVk 60303
+LkdldE5hbWU= 60304
+IHNtb290aGVy 60305
+IFF1YWxpZmllZA== 60306
+YXplcnM= 60307
+X2dlbw== 60308
+RmF4 60309
+IE1pbmRz 60310
+IFJhaXNlcw== 60311
+IHRyYW5zY3JpcHRz 60312
+Q29udmVyc2F0aW9u 60313
+IHJlbWFya2Vk 60314
+64KY 60315
+ZGxpbmc= 60316
+IGRlcGxveWluZw== 60317
+IHNoYXJlZEFwcGxpY2F0aW9u 60318
+IGtw 60319
+Rm9udEF3ZXNvbWVJY29u 60320
+X2R1bW15 60321
+cmVpYmVu 60322
+IEphbmVpcm8= 60323
+RGlyZWN0aW9ucw== 60324
+LmdldEJlYW4= 60325
+c2Fzcw== 60326
+IGNvbW1hbmRlcnM= 60327
+dmF0aW9u 60328
+ZXJyb3JDb2Rl 60329
+IEFsbG95 60330
+LmxvY2FsaXplZA== 60331
+0JE= 60332
+IGRpc2h3YXNoZXI= 60333
+IFNvdXA= 60334
+TnU= 60335
+X0RlZmF1bHQ= 60336
+IHVuZXZlbg== 60337
+IC8+IjsK 60338
+LUJhc2Vk 60339
+IHNlYW1sZXNzbHk= 60340
+LW51bGw= 60341
+IFhD 60342
+IHN0ZXc= 60343
+KGRlbGF5 60344
+QVRPUlM= 60345
+IFdoZWVsZXI= 60346
+Ijw/ 60347
+IENoYW5kbGVy 60348
+IHJldGFsaWF0aW9u 60349
+IGJ1ZGRpZXM= 60350
+LXNpemluZw== 60351
+IEVpbnM= 60352
+IC4uLiw= 60353
+cXVldGU= 60354
+IERPQw== 60355
+IGZhbHNlbHk= 60356
+IGZsYXRz 60357
+TklDQUxM 60358
+IGxpYnI= 60359
+QmVOdWxs 60360
+aW11bGF0aW9u 60361
+CVF1ZXJ5 60362
+X3V0 60363
+IHBsYXF1ZQ== 60364
+YmlsZA== 60365
+IHNjcmVhbWVk 60366
+Lm12Yw== 60367
+LldpZGdldA== 60368
+IGRpZmZlcmluZw== 60369
+L3N1cHBvcnQ= 60370
+X1ZPTFVNRQ== 60371
+Lm5vZGVUeXBl 60372
+CVdyaXRl 60373
+IHLDs3du 60374
+Ym9va21hcms= 60375
+X0NPTk4= 60376
+IENyZWVk 60377
+IGluaGliaXRpb24= 60378
+IFJlaGFi 60379
+dXZyZQ== 60380
+IGR1bXBz 60381
+b3dlag== 60382
+X3BsYWNlaG9sZGVy 60383
+IEhXTkQ= 60384
+IGRlcm1hdA== 60385
+LmRldGFjaA== 60386
+IGZpbmFsaXplZA== 60387
+Z2VyaWVz 60388
+aWRhaw== 60389
+X3Byb2c= 60390
+IHVwZGF0ZVVzZXI= 60391
+bHlz 60392
+Lkdvb2dsZQ== 60393
+IGx1ZWdv 60394
+IGFudHM= 60395
+5qCH6aKY 60396
+IERSTQ== 60397
+0LvQtdC9 60398
+LWRi 60399
+ZXJyaWNr 60400
+X2xu 60401
+Li5c 60402
+aWtpdA== 60403
+IERpZW4= 60404
+IHBhcmFtZXRyb3M= 60405
+a2V5cHJlc3M= 60406
+IEtlcmFsYQ== 60407
+IGRyYWluZWQ= 60408
+ZsO8Zw== 60409
+IGNhcGl0 60410
+X2F1Zw== 60411
+dGFudA== 60412
+TmF2QmFy 60413
+IHJvbGxiYWNr 60414
+IGxleQ== 60415
+4LiI 60416
+IEJTUA== 60417
+IFByZWRpY3Rvcg== 60418
+IHdhZ29u 60419
+ICJ8Ig== 60420
+U2VydmU= 60421
+LkRvbmU= 60422
+IER1cmNo 60423
+UHJvdmlkZQ== 60424
+CXNjb3Jl 60425
+X09E 60426
+LndlYXBvbg== 60427
+IHVuaXZlcnNhbGx5 60428
+IGluanVuY3Rpb24= 60429
+X1NDUk9MTA== 60430
+Lk1hdHJpeA== 60431
+IE1vbmdvQ2xpZW50 60432
+YnVmZmVycw== 60433
+IGJhZGdlcw== 60434
+IHNoYXJrcw== 60435
+IFNoYXJr 60436
+TU9ERUw= 60437
+LlJFQUQ= 60438
+CXRhZw== 60439
+IHN0cnRvdXBwZXI= 60440
+RVJHWQ== 60441
+Ymlhcw== 60442
+IGFjY291bnRJZA== 60443
+IEVtbWFudWVs 60444
+IHJlc29ydHM= 60445
+IHN2bg== 60446
+d2FybmluZ3M= 60447
+X0lF 60448
+TEFT 60449
+IG51bGxh 60450
+CWFz 60451
+IGRlbWVhbg== 60452
+4oCcQXM= 60453
+QXV0aG9yaXplZA== 60454
+IHRlbmRlbmNpZXM= 60455
+LXNldHRpbmc= 60456
+IHByZWxvYWQ= 60457
+IGNubg== 60458
+4oCcTm8= 60459
+JSkKCg== 60460
+PVQ= 60461
+dXN0bw== 60462
+IEZJUkU= 60463
+cmVzZWFyY2g= 60464
+INCT 60465
+IExlc3NvbnM= 60466
+LkFwcGVuZEZvcm1hdA== 60467
+IGluaXRpYXRpb24= 60468
+IENvdXM= 60469
+YXJlcg== 60470
+cHJvamVjdGlvbg== 60471
+IFNoZWV0cw== 60472
+IEZvbGQ= 60473
+UmVkZGl0 60474
+RGVsZXRpbmc= 60475
+IHphbQ== 60476
+IE5ldXJhbA== 60477
+IEZlY2hh 60478
+IMKu 60479
+IHRhc3RlZA== 60480
+IEVuZW1pZXM= 60481
+IEpvaG5zdG9u 60482
+IGRhbmNlcnM= 60483
+IGRpc2FibGluZw== 60484
+IHBldHR5 60485
+IFdlbGQ= 60486
+Ly0t 60487
+KHNwcml0ZQ== 60488
+SUdP 60489
+YXJnb3V0 60490
+IHF1YXJ0ZXJiYWNrcw== 60491
+ZGlzcGF0Y2hlcg== 60492
+IFN1c3RhaW5hYmxl 60493
+ZW5hcmlvcw== 60494
+IFNraQ== 60495
+IGZhY3Rv 60496
+aWxsaW4= 60497
+X2V4dGVuc2lvbnM= 60498
+ybU= 60499
+Pkg= 60500
+ZWFzdA== 60501
+LmFpcg== 60502
+4oCcQnV0 60503
+T2JqZWN0Q29udGV4dA== 60504
+c3VjY2Vzc2Z1bGx5 60505
+X2xhbmQ= 60506
+IGZvbGRz 60507
+X0NPT1JE 60508
+IHN1YnBv 60509
+LmdldEFkZHJlc3M= 60510
+aW5zdHI= 60511
+TWF0ZXJpYWxz 60512
+0YPRgdGC 60513
+ZGVwb3NpdA== 60514
+LWxhc3Q= 60515
+X0dSQVk= 60516
+PWZpbmQ= 60517
+IG11dGFudA== 60518
+IGxlc2JpZW5uZQ== 60519
+bGV0Y2hlcg== 60520
+Uk9VR0g= 60521
+dXJla2E= 60522
+LmNhcHR1cmU= 60523
+IGVubg== 60524
+IChbWw== 60525
+IEZsdQ== 60526
+IHRhc2tJZA== 60527
+IEh1c3NlaW4= 60528
+LmZvbGRlcg== 60529
+IGF1c3Rlcml0eQ== 60530
+SVNUUkFUSU9O 60531
+X0ltcGw= 60532
+5rOo5oSP 60533
+IGRlY3JlZQ== 60534
+LWNoYXQ= 60535
+IGltcGxpY2F0aW9u 60536
+IGd1ZXNzZXM= 60537
+dWxrYW4= 60538
+QW5hbHl0aWNz 60539
+LnBsdXM= 60540
+Q09NTUFORA== 60541
+0LXQu9C4 60542
+wrsKCg== 60543
+X1NJVEU= 60544
+IGVxdWFsVG8= 60545
+U3VwcG9ydEZyYWdtZW50TWFuYWdlcg== 60546
+IFJlY29yZGluZw== 60547
+5a6M5oiQ 60548
+IGJhZ2dhZ2U= 60549
+IHBpdGNoZXJz 60550
+IEVo 60551
+b3F1ZQ== 60552
+CWNudA== 60553
+ID0+JA== 60554
+L2Zvbw== 60555
+SVJB 60556
+IFNhdGVsbGl0ZQ== 60557
+Ym9yYWg= 60558
+IH19Igo= 60559
+IEVuZHM= 60560
+IFNwcmF5 60561
+LHBhcmFt 60562
+LkNocm9tZQ== 60563
+KnE= 60564
+dGhvdWdodA== 60565
+aWJyYXRlZA== 60566
+IHRoaWV2ZXM= 60567
+IGJlbmVmaWNpYXJpZXM= 60568
+RW50ZXJlZA== 60569
+b3R0ZXN2aWxsZQ== 60570
+IHZldGVyaW4= 60571
+QnlJRA== 60572
+cXVpcGU= 60573
+dW1wdGlvbg== 60574
+LXVuaXQ= 60575
+RXhlY3V0aW9uQ29udGV4dA== 60576
+QHM= 60577
+IEdpb3Y= 60578
+LlRvb2xUaXA= 60579
+X2ZyaWVuZA== 60580
+KGF0dHJpYnV0ZXM= 60581
+IGR1bXBpbmc= 60582
+IEpD 60583
+X0RPQ1VNRU5U 60584
+IEFybW91cg== 60585
+KGluc2VydA== 60586
+Lkhvcml6b250YWxBbGlnbm1lbnQ= 60587
+IFFlZA== 60588
+44GE44G+44GZ 60589
+L2dpdA== 60590
+IFlZWVk= 60591
+IENhcmRpZmY= 60592
+IGFwYQ== 60593
+b3JnYW5pYw== 60594
+IFdoZXJlYXM= 60595
+IOad 60596
+IE1pYQ== 60597
+IGRlbW9saXRpb24= 60598
+IHNjYXJz 60599
+IHBhaQ== 60600
+IHJldHJpZXM= 60601
+IHJx 60602
+IERlbmlz 60603
+KFV0aWxz 60604
+IGFsbGV2aWF0ZQ== 60605
+IFBJQw== 60606
+aWR1ZQ== 60607
+IGFja25vd2xlZGdpbmc= 60608
+IC8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8= 60609
+56Gu5a6a 60610
+xKs= 60611
+XEpzb24= 60612
+LmJpbmFyeQ== 60613
+IHh0eXBl 60614
+c2lnbmFscw== 60615
+IEFwcGVhcmFuY2U= 60616
+JnI= 60617
+fXM= 60618
+Q2k= 60619
+IElsbHVt 60620
+cG9yYXRl 60621
+aG9n 60622
+IGluZGV4T2Y= 60623
+XENvbW1hbmQ= 60624
+X3BhcmFsbGVs 60625
+IFNoZXJsb2Nr 60626
+7YM= 60627
+ICIiKQ0K 60628
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8v 60629
+IGNyaXRpY2l6ZQ== 60630
+IFNvYXA= 60631
+IE1hdGNoZXI= 60632
+IGdyaWxsZWQ= 60633
+KlQ= 60634
+IGFkb3Jl 60635
+dWxsaW5n 60636
+IGplZG9jaA== 60637
+X3JlZnM= 60638
+bGVhbnVw 60639
+IEpBWEI= 60640
+IHJvc2Vz 60641
+IExpYW0= 60642
+c2l6ZWk= 60643
+IGdldGNoYXI= 60644
+IHRhcmRl 60645
+LXRvb2x0aXA= 60646
+IHF1YWxpZmllcg== 60647
+IEludGVybWVkaWF0ZQ== 60648
+X1dpbmRvdw== 60649
+IE1hbHRh 60650
+RGlzY29ubmVjdA== 60651
+ZXdoZXJl 60652
+Q2FtcG8= 60653
+IGlycmF0aW9uYWw= 60654
+bGVkbw== 60655
+IERO 60656
+QVJHVg== 60657
+IG91dHJv 60658
+IHRoaXJ0ZWVu 60659
+Sm9zZXBo 60660
+TUFS 60661
+L2ds 60662
+SmVzcw== 60663
+IFBzeWNoaWF0 60664
+IHBhZGRpbmdCb3R0b20= 60665
+LWxvb3A= 60666
+L2ZvbnRz 60667
+X3NlZW4= 60668
+VGVhbXM= 60669
+UmVhY3RET00= 60670
+KG1hbg== 60671
+KHhwYXRo 60672
+LmdldFNpbXBsZU5hbWU= 60673
+Pigq 60674
+IFB2dA== 60675
+IGVsZGVycw== 60676
+IHBpZXM= 60677
+LnVzZXJBZ2VudA== 60678
+LXJlZ2lvbg== 60679
+IEdyZWVrcw== 60680
+KGZyYWdtZW50 60681
+c3R1 60682
+IGNvdW5jaWxz 60683
+IHN0YW1pbmE= 60684
+IEdvZGRlc3M= 60685
+6KW/ 60686
+IHBoaWxvc29waGVycw== 60687
+IHBlcnNvbmU= 60688
+IExvc2U= 60689
+IENMUg== 60690
+IERvY3M= 60691
+IHNvYWs= 60692
+IEhPTERFUg== 60693
+IGJlbGxz 60694
+aGFzaENvZGU= 60695
+UkFURQ== 60696
+X1dFSUdIVA== 60697
+aW5vdXM= 60698
+ZW5kcmE= 60699
+b3Bob2JpYw== 60700
+IHByb3Nl 60701
+IGZpbmVseQ== 60702
+L29hdXRo 60703
+KHNwYWNl 60704
+YWRnZQ== 60705
+IE1hbWE= 60706
+IHN0cmluZ0J1ZmZlcg== 60707
+IHN0aW50 60708
+IG1pc21h 60709
+IHZpbGxhaW5z 60710
+IENyaW1lYQ== 60711
+IGRpcGxvbWE= 60712
+INC/0L7RgdC7 60713
+IEJlYQ== 60714
+KGpvaW4= 60715
+IO2VtA== 60716
+Q0hBVA== 60717
+cGVyaW5n 60718
+IENyb3M= 60719
+IG1vbmtleXM= 60720
+IHByZWRz 60721
+eWxh 60722
+LCws 60723
+IHZpYnJhdG9y 60724
+IE5V 60725
+5YWI 60726
+ZmFudA== 60727
+emV0 60728
+IGJpZXRldA== 60729
+dW5mdA== 60730
+c3dvcnRo 60731
+LkZsb3c= 60732
+IHBzeWNoZWQ= 60733
+IENvbnRpbmVudGFs 60734
+PnQ= 60735
+IHF1aWx0 60736
+LlVQ 60737
+IGV4cGFuc2l2ZQ== 60738
+RGlzcG9zZQ== 60739
+KGxhbmd1YWdl 60740
+Q2Fwcw== 60741
+X1pPTkU= 60742
+IHJlY3ljbGU= 60743
+IE1hbmFnZWQ= 60744
+Y3VycmVudENvbG9y 60745
+LmJyb2FkY2FzdA== 60746
+c2lnbklu 60747
+LnByb20= 60748
+bGx1 60749
+dWVibG8= 60750
+IHB1bmNoZXM= 60751
+IGF1dG9tYXQ= 60752
+IGFzc2lnbmluZw== 60753
+IGNyZWF0ZVVzZXI= 60754
+IEFsbGllZA== 60755
+IGNvbmR1Y3Rvcg== 60756
+gqg= 60757
+IHNhZGRsZQ== 60758
+IGRuaQ== 60759
+b21lZGljYWw= 60760
+LVdlc3Q= 60761
+UG9zaXRpdmVCdXR0b24= 60762
+IGl0YWxpYw== 60763
+P1s= 60764
+KHRyaWdnZXI= 60765
+IGVsZXBoYW50cw== 60766
+IjoiIiwi 60767
+IGNhbGliZXI= 60768
+cmFmdGVk 60769
+ZGlnaXRz 60770
+IG1hcnNoYWw= 60771
+bWlsbGlzZWNvbmRz 60772
+bWFya2Vycw== 60773
+bW9t 60774
+L3BsYWNl 60775
+IGhvbGlzdGlj 60776
+OnQ= 60777
+Iyw= 60778
+IGJvdG8= 60779
+IG5hdXNlYQ== 60780
+IFNob290aW5n 60781
+aXRlY2g= 60782
+IHRleHRTdGF0dXM= 60783
+PENsYXNz 60784
+IERlc2NyaWJl 60785
+IGJ1ZmZldA== 60786
+Z2ls 60787
+IGxvZ2l0cw== 60788
+c3RkY2FsbA== 60789
+bW9kcw== 60790
+IFNrdWxs 60791
+IEJhcmU= 60792
+aG9wZQ== 60793
+IEludHI= 60794
+RmFpcg== 60795
+CXB0 60796
+IGFjb21wYW5o 60797
+IGZraw== 60798
+X3JwYw== 60799
+SW5zdGFsbGVk 60800
+X2Fucw== 60801
+LmdldE1pbnV0ZXM= 60802
+4oCmIgoK 60803
+LXRocmVhZA== 60804
+IHByZXNjaG9vbA== 60805
+QUlMUw== 60806
+IGRpZmZpYw== 60807
+KGNvbnZlcnQ= 60808
+IE5hdGg= 60809
+IERPSg== 60810
+IHJlZ2ltZXM= 60811
+IGVudGh1c2lhc3Q= 60812
+IHdhcnJhbnRpZXM= 60813
+IGZhc2NpbmF0ZWQ= 60814
+X2JpbmRpbmc= 60815
+X05vdA== 60816
+b2Z0ZW4= 60817
+X1JX 60818
+L21haWw= 60819
+IHRpdGxlTGFiZWw= 60820
+IHZpbGxhZ2Vycw== 60821
+IEppYW5n 60822
+IHN3YWdnZXI= 60823
+LlJvd0luZGV4 60824
+X2ltZ3M= 60825
+cmFweQ== 60826
+VkVSQUdF 60827
+LlVw 60828
+IG5vb3A= 60829
+Y2lv 60830
+CVNU 60831
+IGRlY3JlbWVudA== 60832
+IG1hZ25lc2l1bQ== 60833
+X3JvdGF0ZQ== 60834
+U2l0 60835
+IG5pZXV3ZQ== 60836
+IHRlcm1lZA== 60837
+7ZWp64uI64uk 60838
+IHVyZw== 60839
+X3RvdWNo 60840
+IHN3YXJt 60841
+IGNsYXZl 60842
+dGhlc3Q= 60843
+IExhZg== 60844
+SFg= 60845
+IEh1bGs= 60846
+IHBsYWludGV4dA== 60847
+IFNvZmE= 60848
+Z2V0U2Vzc2lvbg== 60849
+TGVk 60850
+IGVjb3N5c3RlbXM= 60851
+aGVp 60852
+IEtpbGxz 60853
+IGh1c2JhbmRz 60854
+0YXRgNCw0L0= 60855
+KGRvbQ== 60856
+X3RpbGVz 60857
+TmliTmFtZQ== 60858
+IGRvbmF0aW5n 60859
+LmFjYw== 60860
+IGxpZmVzcGFu 60861
+LmJu 60862
+X1JHQ1RY 60863
+5qU= 60864
+YW5zZW4= 60865
+IG1vZGVsbGluZw== 60866
+TGF5b3V0UGFyYW1z 60867
+IG9uQ2hhbmdlVGV4dA== 60868
+cnNh 60869
+LWxvY2F0aW9u 60870
+LlBl 60871
+KGJ1cw== 60872
+KHNvbmc= 60873
+IHByb2R1aw== 60874
+IFNIT1VMRA== 60875
+IENK 60876
+IHNvcw== 60877
+IEhvbWVDb250cm9sbGVy 60878
+LmxvYWRlZA== 60879
+KERvY3VtZW50 60880
+LnNvY2lhbA== 60881
+dGlsZXM= 60882
+IGxhbWU= 60883
+PWRm 60884
+LnBhcnNlTG9uZw== 60885
+IHByYWM= 60886
+IGRldG94 60887
+IFZF 60888
+IHB1bnRvcw== 60889
+IGRvY3Ry 60890
+IGFuY29y 60891
+Q0FQRQ== 60892
+IGNtYg== 60893
+54S2 60894
+Kiki 60895
+Oi8vLw== 60896
+VmFsdWVUeXBl 60897
+IG1vcnRnYWdlcw== 60898
+O3E= 60899
+IFJvY2tldHM= 60900
+c3BvcnQ= 60901
+VUdD 60902
+Y3Rz 60903
+44KB 60904
+aWV1cg== 60905
+IEFwcGVhbA== 60906
+KG5i 60907
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8= 60908
+SU1BVElPTg== 60909
+IENyZXM= 60910
+IE1hbmlw 60911
+Q2F1c2U= 60912
+YXR5cGVz 60913
+bWFudWZhY3R1cmVy 60914
+Iy0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0= 60915
+IHNwb3I= 60916
+ZXNvbg== 60917
+IHB1bmNoZWQ= 60918
+IGJvb2ttYXJrcw== 60919
+IEJ1bGs= 60920
+Q29tcGxldGVMaXN0ZW5lcg== 60921
+IFRhbGtpbmc= 60922
+IEVybmVzdA== 60923
+IHJ1YmJpc2g= 60924
+a2lsbHM= 60925
+IERFRklO 60926
+IG5laWdoYm91cmluZw== 60927
+YXJsbw== 60928
+IFBDQQ== 60929
+CW1hdHJpeA== 60930
+bG9r 60931
+IGF0bGFz 60932
+IEd1cg== 60933
+IHd5bg== 60934
+LW5lZ2F0aXZl 60935
+IHR1bA== 60936
+IHJlbGlj 60937
+IFZvbHRhZ2U= 60938
+IFByZWlz 60939
+IEpOSUNBTEw= 60940
+IFBNSUQ= 60941
+YWtldA== 60942
+CWF0dHI= 60943
+IGV0aXF1 60944
+IE1K 60945
+IEdtYWls 60946
+Y2xy 60947
+X2V4ZWN1dGlvbg== 60948
+6ZSu 60949
+cG9zaXRvcg== 60950
+LmFm 60951
+TnI= 60952
+R2VvcmdpYQ== 60953
+VG9wb2xvZ3k= 60954
+IHBlcmNow6k= 60955
+IG11c2xpbQ== 60956
+IGVwaWRlbWk= 60957
+IHNhYm90 60958
+YWN0dXM= 60959
+IOuMgA== 60960
+IElPRXJyb3I= 60961
+LmVzdA== 60962
+cHJlZnM= 60963
+IEtyaXNo 60964
+LlJlYWRLZXk= 60965
+TkFTQQ== 60966
+dcOnw6Nv 60967
+X0Ri 60968
+dW1lcmF0b3I= 60969
+V2lkZQ== 60970
+KHN0YXRlbWVudA== 60971
+LmVuZHBvaW50 60972
+Li4uLi4uLi4u 60973
+IFsq 60974
+c3RyZWFtcw== 60975
+bXRpbWU= 60976
+UHg= 60977
+YXRy 60978
+IHRwbA== 60979
+Um9tYW4= 60980
+IHNjZW5pYw== 60981
+Lm56 60982
+IFNlY29uZHM= 60983
+c3VibWVudQ== 60984
+IOyLpO0= 60985
+X2J1bmRsZQ== 60986
+IGRlxJ8= 60987
+IFNpc3RlcnM= 60988
+cHJlZmVyZW5jZXM= 60989
+IHBvcnRh 60990
+QWR2aXNvcg== 60991
+bWF4TGVuZ3Ro 60992
+IEdSRUFU 60993
+X18oCg== 60994
+b2xlc3Q= 60995
+IExhYmVscw== 60996
+IGVuZmVy 60997
+ICAgICAgCgo= 60998
+IFRoZWZ0 60999
+X0ZJTEw= 61000
+IFdpc2U= 61001
+KWFwcGxpY2F0aW9u 61002
+dW5hbWk= 61003
+PigpKQo= 61004
+QUREUkVTUw== 61005
+QlNU 61006
+ZXR6dA== 61007
+IFFncw== 61008
+U2Vuc2U= 61009
+RXhjZXB0aW9uSGFuZGxlcg== 61010
+IENodQ== 61011
+LmdldE93blByb3BlcnR5 61012
+IGV4ZXJjaXNlZA== 61013
+aW90aWM= 61014
+IFJlbGVhc2Vz 61015
+IHBpbnRlcmVzdA== 61016
+b2xpZQ== 61017
+aXNvZnQ= 61018
+IHNlcXVlbmNpbmc= 61019
+IHBhZHJl 61020
+XSkpOw0K 61021
+KHJhZGl1cw== 61022
+Lm1lZA== 61023
+YWludGllcw== 61024
+Lk9iamVjdE1vZGVs 61025
+IGVtcGxl 61026
+IHNlZ3Vybw== 61027
+U3RhcnM= 61028
+IHF1YWxpdGF0aXZl 61029
+bGVtbg== 61030
+4bux 61031
+PiIpLg== 61032
+IGd4 61033
+LWNlcnQ= 61034
+IEFTVE0= 61035
+IGZ1bGxuYW1l 61036
+IHRlbGVtZXRyeQ== 61037
+IENhbWJvZGlh 61038
+X3Vs 61039
+IENsYXJl 61040
+Q1VTVE9N 61041
+UUM= 61042
+IFVucw== 61043
+IEhUVFBT 61044
+IFBhcmtpbnNvbg== 61045
+YW5jeWJveA== 61046
+JywnLg== 61047
+VHVl 61048
+LmdldExhc3Q= 61049
+IGFiaQ== 61050
+xIVk 61051
+QXN0 61052
+IEVkaXRpbmc= 61053
+LlVuaXR5 61054
+am1w 61055
+IG1hdHM= 61056
+IHNoYXJlZFByZWZlcmVuY2Vz 61057
+Q2FwdGFpbg== 61058
+LnBhZ2VTaXpl 61059
+IHJ0bA== 61060
+IGFubWVsZA== 61061
+UnVudGltZU9iamVjdA== 61062
+IGRlbWFuZGU= 61063
+KCI7 61064
+c2VpdGU= 61065
+LWhlYWRlZA== 61066
+IEtyYQ== 61067
+IEZPTlQ= 61068
+YFw= 61069
+Q2xhc3NOb3RGb3VuZEV4Y2VwdGlvbg== 61070
+LmF2Zw== 61071
+YXRpY2Fs 61072
+QWo= 61073
+IHBlcm1pdHRpbmc= 61074
+UHJvag== 61075
+RVJSUQ== 61076
+IGNyZWFtcGll 61077
+IEJ1eWVy 61078
+LW1vZHVsZXM= 61079
+IFN1bmRheXM= 61080
+fGAK 61081
+IGRheXRpbWU= 61082
+ICso 61083
+IGdsaXRjaA== 61084
+IE9wZXJhbmQ= 61085
+IHRveGlucw== 61086
+aW55YQ== 61087
+RE5T 61088
+IFNhcw== 61089
+Q2FrZQ== 61090
+IE5hdGlvbmFscw== 61091
+LmFkZFRv 61092
+IHNpbmtpbmc= 61093
+IGNvbXByZWhlbnNpb24= 61094
+IHNjb3I= 61095
+YWdlbWVudHM= 61096
+IHRhcmQ= 61097
+IG1hcmNoaW5n 61098
+IE1UVg== 61099
+IHNhbmU= 61100
+Q3JlYXRlSW5mbw== 61101
+4bqv 61102
+IGVuZEluZGV4 61103
+CWxheW91dA== 61104
+IOWQjQ== 61105
+U0lURQ== 61106
+IFRIRVJF 61107
+IFt7Jw== 61108
+b3BhdGhpYw== 61109
+IHRyYW5zbWl0dGVy 61110
+L2JvZHk= 61111
+IHB1bmQ= 61112
+IENsb3Npbmc= 61113
+IHNldGF0dHI= 61114
+IGJvdW5kZWQ= 61115
+QXRsYXM= 61116
+c3VtaW5n 61117
+KHRpbWVz 61118
+cGFyZXI= 61119
+eW5vbQ== 61120
+ZmVpdA== 61121
+IGZyZW0= 61122
+LWxlZw== 61123
+IEJyYXM= 61124
+PiM= 61125
+IOy2nOugpQ== 61126
+IElOU1RBTkNF 61127
+IENvdWNo 61128
+X2hvc3Rz 61129
+bGlrZWxpaG9vZA== 61130
+Lk1hcmtlcg== 61131
+IE1hc2tz 61132
+IGNlcmVhbA== 61133
+dXRpbGl0aWVz 61134
+IGVsZW1lbnRhbA== 61135
+IGRpc3RvcnRlZA== 61136
+aW5hY3RpdmU= 61137
+Y3J5 61138
+V0w= 61139
+VVBQT1JURUQ= 61140
+LlRocm93cw== 61141
+L3NjaGVtYQ== 61142
+c2VyaWU= 61143
+LiInLA== 61144
+IEJlbmVkaWN0 61145
+LXBpY2tlcg== 61146
+aWdncw== 61147
+IFBpcmF0ZQ== 61148
+5ZGo5pyf 61149
+IFRoZW1h 61150
+IFNvdXRoYW1wdG9u 61151
+IGFycmF5V2l0aA== 61152
+IFBhdWxh 61153
+IHByZWRpY3Rvcg== 61154
+LUFzcw== 61155
+LnVzZXJpZA== 61156
+IHBlcmk= 61157
+IGV4YWdnZXJhdGVk 61158
+dXJhdGU= 61159
+YXJzZWlsbGU= 61160
+IENvbmNlbnQ= 61161
+IFBpaw== 61162
+IEBfOwoK 61163
+IGZvcm1hdGlvbnM= 61164
+IGRlbm9taW4= 61165
+Ii8+Lgo= 61166
+ZW5kZWRvcg== 61167
+IHBhbmNyZQ== 61168
+IGFtdA== 61169
+IG9uUmVzdW1l 61170
+b25EZWxldGU= 61171
+IEJDSA== 61172
+KSgi 61173
+bW92ZW1lbnQ= 61174
+IHBvdGFzc2l1bQ== 61175
+PCEtLVs= 61176
+IG1lbWVz 61177
+X1NFVFVQ 61178
+X2dhbW1h 61179
+IGNvbG9yV2l0aFJlZA== 61180
+IGdyYXZlcw== 61181
+IHN0YXR1dGVz 61182
+IGFxdWFyaXVt 61183
+IExhbWFy 61184
+IHhBeGlz 61185
+V2VicGFja1BsdWdpbg== 61186
+X2ZvbGQ= 61187
+Lmdlbw== 61188
+IEZlZXQ= 61189
+LXNwZWFraW5n 61190
+6aKd 61191
+X2Nvcw== 61192
+IEF2ZWM= 61193
+YW5zdA== 61194
+IEVFUFJPTQ== 61195
+IGRlYWxlcnNoaXA= 61196
+IFVudGVybmVobWVu 61197
+LEludGVnZXI= 61198
+IMOqdGVz 61199
+LmB8YAo= 61200
+dmluZQ== 61201
+IEtuaWZl 61202
+X3ZlcnRpY2Fs 61203
+LkRvd25sb2Fk 61204
+IG92ZXJzaXplZA== 61205
+bGlk 61206
+IHBpbGxhcg== 61207
+Y2F1Z2h0 61208
+IGZsYWdnZWQ= 61209
+KHJvdXRlcg== 61210
+KFJFRw== 61211
+IGJhcmJlY3Vl 61212
+YnJvd3Nl 61213
+IEZpdHpnZXJhbGQ= 61214
+INC/0YDQvtCy 61215
+aXJpZQ== 61216
+IGVyc3Rl 61217
+ZWxpYg== 61218
+X1BSRVNT 61219
+IGhlYWxlZA== 61220
+IGhhdXQ= 61221
+PnhwYXRo 61222
+IFdlbg== 61223
+Z3J1bnQ= 61224
+LktleXdvcmQ= 61225
+LWhhc3BvcHVw 61226
+bnc= 61227
+U1o= 61228
+Z2FiZQ== 61229
+SW50ZXJhY3Rpb25FbmFibGVk 61230
+cHJlY2g= 61231
+IHByaW1v 61232
+c3RyaXBl 61233
+YWx0ZWQ= 61234
+X0JPUkRFUg== 61235
+ZmluZEJ5 61236
+X2Fubm90YXRpb24= 61237
+V2ViU29ja2V0 61238
+QnVy 61239
+IGRpcGxvbWFjeQ== 61240
+KHRk 61241
+IFNpbXBs 61242
+ZGV0ZWN0 61243
+cGVyZm9ybWFuY2U= 61244
+IGNhcmJvaHlkcmF0ZXM= 61245
+L2lvdXRpbA== 61246
+LS0tLS0tKw== 61247
+X3Ny 61248
+bWVldGluZw== 61249
+IHwtLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQo= 61250
+X1Zhcg== 61251
+IHJvdmVy 61252
+IGNhc2k= 61253
+IE1hdGNoZXM= 61254
+cXJ5 61255
+X0JPT0s= 61256
+IHByZXN1bWVk 61257
+IE3DqXQ= 61258
+L2l0ZW1z 61259
+IENyZWRlbnRpYWxz 61260
+XSkuCg== 61261
+IEthcmRhc2g= 61262
+QWRtaW5pc3Ry 61263
+IFNsb3Zhaw== 61264
+KCcsJykK 61265
+IGNvbnF1ZXN0 61266
+UGVyc2lzdA== 61267
+IERyYWlu 61268
+Ymlq 61269
+IGRvdg== 61270
+IHPDuGdlcg== 61271
+V29uZGVy 61272
+QVNFVA== 61273
+W21pbg== 61274
+Z3VuYQ== 61275
+Z3Jvd24= 61276
+IH0pCgoK 61277
+QVVE 61278
+IGJlbGlldmVy 61279
+aXNlcnM= 61280
+KHNlbnQ= 61281
+SmFja3Nvbg== 61282
+IHBhaXM= 61283
+IGN1ZGFNZW1jcHk= 61284
+IGZsYXNoZXM= 61285
+YmVyZQ== 61286
+IG11bHRpZg== 61287
+IENhcmdv 61288
+RWxlbWVudHNCeVRhZ05hbWU= 61289
+KGVwb2No 61290
+IEt1bmRlbg== 61291
+UmVjb2duaXRpb24= 61292
+IFNldFZhbHVl 61293
+IFN1bnNoaW5l 61294
+QUNQ 61295
+OnN0cg== 61296
+IGFtYmlndQ== 61297
+IO2VnA== 61298
+LWxpbmVhcg== 61299
+IFdPVw== 61300
+KGN1c3RvbQ== 61301
+IGlzRW5hYmxlZA== 61302
+QkFU 61303
+X2RpYWc= 61304
+X0dVSQ== 61305
+SGVhdA== 61306
+IGFzc2VtYmxpZXM= 61307
+IENldHRl 61308
+L2NhcmQ= 61309
+IERlY2xhcmU= 61310
+IHVwaGVsZA== 61311
+IENsYXVk 61312
+LWZsb3c= 61313
+IGhvb2t1cA== 61314
+SVJR 61315
+RmF0aGVy 61316
+RGVsZXRlcw== 61317
+KSk7Ly8= 61318
+IFBUU0Q= 61319
+KTsNDQo= 61320
+ZWdhbA== 61321
+LmFycm93 61322
+IE1QVQ== 61323
+w7Nq 61324
+IG1vdGl2YXRl 61325
+IEthdGhlcmluZQ== 61326
+LmZyYW1lcw== 61327
+IHRoaQ== 61328
+PFJlc3VsdA== 61329
+LmdyYXk= 61330
+IEt1c2huZXI= 61331
+IENlbWVudA== 61332
+IEJ1cmw= 61333
+SW50ZXJ2aWV3 61334
+PSciLg== 61335
+UE9XRVI= 61336
+IENEcw== 61337
+IFsmXSg= 61338
+IGNoYW5nZXI= 61339
+Pj4sCg== 61340
+LXdl 61341
+IENMSw== 61342
+IEFkcmk= 61343
+IGNpbA== 61344
+PVg= 61345
+IHNlbmRv 61346
+IENlbHNpdXM= 61347
+YmxvY2tlZA== 61348
+T3V0T2ZCb3VuZHM= 61349
+LiE= 61350
+b3Byb2plY3Q= 61351
+YW5kZXM= 61352
+ZWRpdGluZw== 61353
+IHB1bXBlZA== 61354
+KCk7fQo= 61355
+4Ka/ 61356
+X0VWRU5UUw== 61357
+IEZyaWVkbWFu 61358
+ID4v 61359
+ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio= 61360
+IHRlbXB0YXRpb24= 61361
+IElwc3Vt 61362
+IENlcw== 61363
+IG5vdGljaW5n 61364
+X2VsZQ== 61365
+QWNjZW50 61366
+IE52aWRpYQ== 61367
+IGFtdXNlbWVudA== 61368
+IGludHJvZHVjdG9yeQ== 61369
+CXJldHZhbA== 61370
+IGxpbA== 61371
+aXJpbQ== 61372
+ZW5xdWV1ZQ== 61373
+LWhpc3Rvcnk= 61374
+IGNvdW5zZWxvcg== 61375
+VFJBTlNGRVI= 61376
+X1ZlY3Rvcg== 61377
+Y2F0ZWdvcnlJZA== 61378
+cGVyeQ== 61379
+RklMVEVS 61380
+KHJlbW90ZQ== 61381
+IHNlcGFyYXQ= 61382
+IEVtYmVkZGVk 61383
+IEJhY29u 61384
+dGVycmFmb3Jt 61385
+IHJlc3BlY3RhYmxl 61386
+aWNoYQ== 61387
+YWlj 61388
+Kydc 61389
+IHN0cmF5 61390
+0LXQvdC40Lk= 61391
+IEF1ZGl0b3I= 61392
+ZW50aWNhdG9y 61393
+IGNsb2Fr 61394
+IFVOS05PV04= 61395
+IEFtZW4= 61396
+dm94 61397
+YXN0cmVldA== 61398
+Li4uXQ== 61399
+IGAl 61400
+LXByb3BlcnR5 61401
+IFF1YWxjb21t 61402
+ZWRpdGVk 61403
+IGRpc2NyZWV0 61404
+LU11c2xpbQ== 61405
+LnJlY2lwZQ== 61406
+IHZhbmRhbA== 61407
+IHXFvHk= 61408
+c2VuaGE= 61409
+LGlz 61410
+IFBvbXBl 61411
+IEtuaWNrcw== 61412
+KCknLA== 61413
+KHRi 61414
+IEhJRA== 61415
+IHBldw== 61416
+IGNhcnJvdHM= 61417
+IHBvbGljeW0= 61418
+Lmxp 61419
+IHR3ZW50aWV0aA== 61420
+X3Byb21wdA== 61421
+c2NlbmFyaW8= 61422
+LkpGcmFtZQ== 61423
+IE1RVFQ= 61424
+IEluZGl2aWR1YWxz 61425
+dG9NYXRjaFNuYXBzaG90 61426
+w61zdGljYXM= 61427
+IkQ= 61428
+IGZvZA== 61429
+IHJpY2h0 61430
+IFphcg== 61431
+IHJlc3VycmVjdGlvbg== 61432
+IG1pbGl0YXI= 61433
+IE1hbmFnZXJz 61434
+X0dSSUQ= 61435
+bm9ubnVsbA== 61436
+QkVSVA== 61437
+T3V0cHV0cw== 61438
+ICAgIAoKCg== 61439
+IHByZWRlY2Vzc29ycw== 61440
+IGlzU2VsZWN0ZWQ= 61441
+IGN5YmVyc2VjdXJpdHk= 61442
+5YaZ 61443
+Lm1j 61444
+UXVp 61445
+IGFsbGVnaW5n 61446
+IHRpYw== 61447
+TWFudWZhY3R1cmVy 61448
+IEVuaGFuY2Vk 61449
+IEJpeg== 61450
+IHJlYWRPbmx5 61451
+w7Ru 61452
+IGx1bWJlcg== 61453
+YWVk 61454
+IHJhaW5z 61455
+cHJvdmlkZQ== 61456
+TGF0ZQ== 61457
+IHBlZGVzdHJpYW5z 61458
+amF2 61459
+QWN0aXZhdGlvbg== 61460
+J0JyaWVu 61461
+IHZhY2FuY3k= 61462
+Ly8t 61463
+IGJsYWRkZXI= 61464
+IGFnaWxl 61465
+IHN0ZWFscw== 61466
+IHJlZ2lzdHJhcg== 61467
+IGVsZWN0b3JhdGU= 61468
+R292ZXJubWVudA== 61469
+J109Ig== 61470
+YWxidW1z 61471
+ZWxlY3Rpb24= 61472
+YWJs 61473
+IE9yaWVudA== 61474
+IHBpcmF0ZXM= 61475
+IGxvb3Bo 61476
+CXJlYWRlcg== 61477
+IMO6bHRpbW8= 61478
+IFBldHJv 61479
+INGB0YLRgNCw0L3QuNGG 61480
+IHNhbXA= 61481
+aW52ZXJzZQ== 61482
+LmdyYWRsZQ== 61483
+IERvbnQ= 61484
+eG9u 61485
+IGNyZWFk 61486
+ZXJ0aWxpdHk= 61487
+cmdjdHg= 61488
+IHBvbMOtdGljYQ== 61489
+VmFsdWVDaGFuZ2Vk 61490
+QXBpUmVzcG9uc2U= 61491
+Y29tYm8= 61492
+IFVY 61493
+IGRhaGE= 61494
+J2Fu 61495
+LW15 61496
+4oCcTXk= 61497
+cGVl 61498
+bGF0bG9uZw== 61499
+XEJhc2U= 61500
+Lndpaw== 61501
+IFBPVA== 61502
+IHB1bmN0dWF0aW9u 61503
+cXVz 61504
+aW55aW4= 61505
+PW1pbg== 61506
+IG51Y2xldXM= 61507
+IGNvbmNlc3Npb25z 61508
+LmF2ZXJhZ2U= 61509
+dXNlcmluZm8= 61510
+IHRhYmxlc3Bvb24= 61511
+IE5laWdoYm9yaG9vZA== 61512
+KFRocm93YWJsZQ== 61513
+PnY= 61514
+b3Z5 61515
+WFhYWFhYWFg= 61516
+aXN0aQ== 61517
+IGJhcnQ= 61518
+77u/Cg== 61519
+RW5jcnlwdA== 61520
+PWVuZA== 61521
+IGluY3Vy 61522
+IHBlcnRpbmVudA== 61523
+X01JTk9S 61524
+KSI+Cg== 61525
+Y2hpZWY= 61526
+IHZk 61527
+KGAK 61528
+dXJneQ== 61529
+YWJ5cmludGg= 61530
+IFNoYXBlcw== 61531
+IHZhZ3k= 61532
+LmRkcw== 61533
+bWVtY21w 61534
+CUl0 61535
+c2VtZXN0ZXI= 61536
+IEVtaXQ= 61537
+IGluc2Fu 61538
+IGJydXNoZWQ= 61539
+X0ZBVEFM 61540
+ImVycm9ycw== 61541
+IGRpc3J1cHRpdmU= 61542
+JW4= 61543
+IGNvbXBvc2l0aW9ucw== 61544
+IGJhY2hlY2E= 61545
+IGRpc2FncmVlbWVudA== 61546
+UHJvdGVjdA== 61547
+TElLRQ== 61548
+LkZpbGVOb3RGb3VuZEV4Y2VwdGlvbg== 61549
+IHdlaXRlcmU= 61550
+IE1vbmFjbw== 61551
+Xzw/ 61552
+IG1vZGVsZWQ= 61553
+c3RlZWw= 61554
+ZWVudGg= 61555
+IFtdKS4= 61556
+KHJlZ2V4 61557
+ZW5pZQ== 61558
+LkZsdXNo 61559
+LnBvcHVw 61560
+IE92ZXJz 61561
+LkRlYnVnZ2Vy 61562
+PmA7Cg== 61563
+bml0ZQ== 61564
+LnF1b3Rl 61565
+IGNvZw== 61566
+IHdha2Vz 61567
+IFdyZXN0bGluZw== 61568
+SW50cm8= 61569
+IHNlcmRl 61570
+IHJldXNhYmxl 61571
+IENvbXBvdW5k 61572
+SW1wbE9wdGlvbnM= 61573
+CUl0ZW0= 61574
+IG51bU9m 61575
+IENIUg== 61576
+IEJvbHRvbg== 61577
+UExVUw== 61578
+Ym91bmRpbmc= 61579
+KCsr 61580
+ICIsIjsK 61581
+IEd1ZXN0cw== 61582
+IGRlcHJpdmVk 61583
+IG1lbG9keQ== 61584
+WklQ 61585
+Pj4oKQ== 61586
+IGNvbmNlZGVk 61587
+X2RpZQ== 61588
+IGpveXN0aWNr 61589
+IGFuYXRvbXk= 61590
+IFRvb2xTdHJpcA== 61591
+IEVub3VnaA== 61592
+Iio= 61593
+aW50b3No 61594
+aGFiaQ== 61595
+IFN5cmFjdXNl 61596
+IEluY3JlYXNlZA== 61597
+TXVz 61598
+LnBhdGllbnQ= 61599
+IGluY3JlbWVudHM= 61600
+IFBJWA== 61601
+IGJvb3R5 61602
+LnByaXZhdGU= 61603
+ZXJ0b2lyZQ== 61604
+IGN1dHRlcg== 61605
+IGJla2Fu 61606
+IGRyYXdlcnM= 61607
+X0FMSUFT 61608
+QW5pbWF0aW5n 61609
+X2Fuc3dlcnM= 61610
+LmF0dGFjaw== 61611
+d3JpdGVycw== 61612
+IGdhYW4= 61613
+aWtvbg== 61614
+CWNvbnRyb2xsZXI= 61615
+IGZhY2FkZQ== 61616
+k+WQjQ== 61617
+LHN0YXR1cw== 61618
+LmZl 61619
+IHBvc3Rwb25lZA== 61620
+IEZvbnRz 61621
+IEJlbmNobWFyaw== 61622
+aWRlbnRhbA== 61623
+IGNoaWxsaW5n 61624
+IEtpZXY= 61625
+IGJydXNoZXM= 61626
+LXdoZWVs 61627
+IEhpcmU= 61628
+KHByb2M= 61629
+IGNoZW1vdGhlcmFweQ== 61630
+INCx0YvRgtGM 61631
+IE5vbGFu 61632
+KGllcnI= 61633
+IEp1ZGU= 61634
+LUF1Zw== 61635
+dW1ub3M= 61636
+Y29udmVyc2F0aW9u 61637
+IEJlaGF2aW9yU3ViamVjdA== 61638
+YmF1Z2g= 61639
+IGd1aXRhcmlzdA== 61640
+Lm9mZmVy 61641
+IGFjY3VzZQ== 61642
+cGFyZA== 61643
+cmVmZg== 61644
+LlJlYWN0 61645
+IHVjaGFy 61646
+IG9mZnNldG9m 61647
+JHN0YXR1cw== 61648
+L2VtYWls 61649
+LmNvbm5lY3RlZA== 61650
+Lys= 61651
+QHFx 61652
+YXJhdmVs 61653
+IGZ2 61654
+LlBlcnNpc3RlbnQ= 61655
+ZW5zdGVpbg== 61656
+Li4uXQoK 61657
+LmdyaWRWaWV3 61658
+IEpPQg== 61659
+LScuJA== 61660
+LmxheW91dENvbnRyb2w= 61661
+IGNhcmc= 61662
+IEtvdA== 61663
+X2VxdWFscw== 61664
+IHdpdGhkcmV3 61665
+QVRFU1Q= 61666
+LWJ1dHRvbnM= 61667
+CVVQUk9QRVJUWQ== 61668
+IFVJR3JhcGhpY3M= 61669
+IFB1YmxpY2F0aW9ucw== 61670
+IElOVEVSTg== 61671
+IGV0aGFub2w= 61672
+w6RuZ2Vy 61673
+U0VORA== 61674
+CXNsb3Q= 61675
+0LvQtdC90LjRjw== 61676
+IHBhc28= 61677
+X2V4dGVuZGVk 61678
+b3J0aGFuZA== 61679
+KHNoZWV0 61680
+IHByb2NlZHVyYWw= 61681
+IGtpZG5hcHBpbmc= 61682
+Ly8tLS0tLS0tLS0tLS0tLS0t 61683
+W21zZw== 61684
+T2NjdXJyZWQ= 61685
+QWxpY2U= 61686
+IENBU1Q= 61687
+IGthdGE= 61688
+5rOo5YaM 61689
+Y2hlYXA= 61690
+aWNpdHk= 61691
+IHJlYWRpbmVzcw== 61692
+KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio= 61693
+IFNZTg== 61694
+IE1hZ2dpZQ== 61695
+cmljYQ== 61696
+IHlp 61697
+IFR3ZQ== 61698
+aWdub24= 61699
+YW5kZW4= 61700
+IGpxdWVyeQ== 61701
+IHN0YXJ0WQ== 61702
+IGF2ZW51ZQ== 61703
+QW50aA== 61704
+X2NhcHRpb24= 61705
+IFJvd3M= 61706
+wq/Cr8Kvwq8= 61707
+c2VxdWVuY2Vz 61708
+0LjRhA== 61709
+KCIvIikK 61710
+Y3JhdGU= 61711
+IFNhZ2E= 61712
+SnVk 61713
+IGZhY2V0cw== 61714
+X3NjYWxlZA== 61715
+UnVieQ== 61716
+IFBR 61717
+IGNydXM= 61718
+SXJhbg== 61719
+LnNxdWVlemU= 61720
+CWZk 61721
+IHBlcmNl 61722
+IGRhdGFw 61723
+Xl5eXg== 61724
+X1NDT1BF 61725
+IFNhbG1vbg== 61726
+IHRhaWxsZQ== 61727
+IFZhbG9y 61728
+QUdFTUVOVA== 61729
+UnA= 61730
+IEd1YXJkaWFucw== 61731
+IHJlYWRGaWxl 61732
+IG5lZ3Jv 61733
+IG9icmE= 61734
+LlBhcmNlbA== 61735
+Q0FDSEU= 61736
+cmV0Y2hlZA== 61737
+Y3Jt 61738
+cXJzdA== 61739
+b3VmbA== 61740
+7ZqM 61741
+Lm5vbQ== 61742
+c3NpZA== 61743
+IHNhZmVzdA== 61744
+LkVycm9ycw== 61745
+X3BuZw== 61746
+Q29udmVydGVyRmFjdG9yeQ== 61747
+PFNlbGY= 61748
+IHNlcGFyYXRlcw== 61749
+X2pCdXR0b24= 61750
+IG1pc3VzZQ== 61751
+ZXhjZXB0aW9ucw== 61752
+IFt7Ig== 61753
+IFBBRA== 61754
+562+ 61755
+a0h6 61756
+PWVu 61757
+IGjDoG5n 61758
+SFo= 61759
+IFhhdmllcg== 61760
+e2lk 61761
+IHN0YWlyY2FzZQ== 61762
+dGV4dGZpZWxk 61763
+L2RvY2tlcg== 61764
+KHRhYmxlTmFtZQ== 61765
+IHRlbGVjb21tdW5pY2F0aW9ucw== 61766
+b25zbw== 61767
+b2Ns 61768
+UGFyZW50cw== 61769
+L3BhcnNlcg== 61770
+LWRyb3A= 61771
+KHN0eWxlcw== 61772
+X21vZGlmaWVy 61773
+UmVxdWVzdElk 61774
+LmJyYW5k 61775
+IENvaW5z 61776
+IGt1bnQ= 61777
+Lkdy 61778
+IEhJU1RPUlk= 61779
+KGRyb3A= 61780
+QnJhZA== 61781
+IHNla3Np 61782
+X3Nkaw== 61783
+IGluc3BlY3RlZA== 61784
+cHJlZGljYXRl 61785
+LmZp 61786
+R09S 61787
+IGNvY29h 61788
+IElRdWVyeWFibGU= 61789
+LS0tPC8= 61790
+IGRlcm5pZXI= 61791
+IFVzZXJEZWZhdWx0cw== 61792
+X1RT 61793
+IGVvcw== 61794
+IGJsZW5kZXI= 61795
+IGxvdWRlcg== 61796
+U3BhbmlzaA== 61797
+bGluZXI= 61798
+XHdpZGdldHM= 61799
+IHNjaGVtYXM= 61800
+X0NBUFRVUkU= 61801
+Lm1pY3Jv 61802
+44Kt 61803
+IPCfkQ== 61804
+IGFuZGVy 61805
+YWx0dW5n 61806
+ID09Jw== 61807
+IGVuZm9yY2luZw== 61808
+IEV4aXN0 61809
+dXZ3 61810
+aXJ0c2NoYWZ0 61811
+IEdyZWF0ZXN0 61812
+IE1vc3Vs 61813
+X3Bv 61814
+IHNpbW1lcg== 61815
+IHByb2dyZXNzZWQ= 61816
+IHJvdGFyeQ== 61817
+IG50bw== 61818
+Tm9pc2U= 61819
+IGNoYXNlZA== 61820
+IGluc3RpbmN0cw== 61821
+UHVibGljS2V5 61822
+IHNuYXBzaG90cw== 61823
+IFN1cGVydg== 61824
+Lm1hYw== 61825
+IEJpYmxp 61826
+Li4uKQoK 61827
+CW9sZA== 61828
+S0VO 61829
+IENsaW0= 61830
+IFByb2dyZXNzRGlhbG9n 61831
+bGljYW50cw== 61832
+X3NsaWRl 61833
+K2g= 61834
+IGVtcG93ZXJlZA== 61835
+SW5qZWN0b3I= 61836
+IGluZmx1ZW56YQ== 61837
+IHBsYW5ldGFyeQ== 61838
+V2lsbGlhbXM= 61839
+IG1vbmQ= 61840
+ZW5hbg== 61841
+LnJhbmRvbVVVSUQ= 61842
+KFBvc2l0aW9u 61843
+IGhvbWJyZXM= 61844
+IGluc2VjdXJl 61845
+IHZlcmJz 61846
+X3JlY3RhbmdsZQ== 61847
+SU5TVEFMTA== 61848
+IFBhcnNlRXhjZXB0aW9u 61849
+X1RB 61850
+JGZpZWxk 61851
+LkltYWdlSWNvbg== 61852
+IEd1amFyYXQ= 61853
+LWxpdmVk 61854
+X3NvbWU= 61855
+IGNsaXBwaW5n 61856
+LmdldENvbXBvbmVudA== 61857
+LmNsb3Nlc3Q= 61858
+LmxpdmU= 61859
+IGluY2lk 61860
+DQoJCQ0K 61861
+IHByb2R1dG9z 61862
+X211c2lj 61863
+U3FsQ29ubmVjdGlvbg== 61864
+IFByZWRpY3Rpb24= 61865
+IFhU 61866
+LW5vdGVz 61867
+IEpld2Vscnk= 61868
+cmVtZW4= 61869
+KHJlYXNvbg== 61870
+U25hcA== 61871
+QWZmaW5lVHJhbnNmb3Jt 61872
+YW5nZWxvZw== 61873
+IGRpY3RhdGU= 61874
+IHpvc3Rh 61875
+QmFyQ29udHJvbGxlcg== 61876
+L3Nob3A= 61877
+ZWlk 61878
+LXN3 61879
+Q291cnNlcw== 61880
+Zm9udFdlaWdodA== 61881
+IEhvZmZtYW4= 61882
+X051bQ== 61883
+S1I= 61884
+IFdpbGxpZQ== 61885
+YXJrYW4= 61886
+LXNjYWw= 61887
+IGF1ZGl0aW9u 61888
+LmRpc2M= 61889
+IHR3aXN0cw== 61890
+IGRlcGljdHM= 61891
+IGJhbnlhaw== 61892
+IEtpdHM= 61893
+IEhlemJvbGxhaA== 61894
+bm9ydGg= 61895
+IEdSRQ== 61896
+w7Zn 61897
+cXVvaQ== 61898
+LXRocmVhdGVuaW5n 61899
+IHdvcm1z 61900
+IFBO 61901
+IHNleGRhdGU= 61902
+IG1vbnVtZW50cw== 61903
+TU1D 61904
+Ym90cw== 61905
+IFNETEs= 61906
+ZGVhdGg= 61907
+IHBpdHM= 61908
+X2Nob2ljZXM= 61909
+KHNvbHV0aW9u 61910
+IHByb2NsYWltZWQ= 61911
+IFFpbmc= 61912
+IHNzY2FuZg== 61913
+c3RyYXRlZ3k= 61914
+ZGVhdXg= 61915
+IEZpc2NoZXI= 61916
+X0lW 61917
+IGlud2FyZA== 61918
+RGF0ZVBpY2tlcg== 61919
+IHNld2Vy 61920
+IGV1cm9w 61921
+IGhvbWVsZXNzbmVzcw== 61922
+LlNwcmluZ0Jvb3RBcHBsaWNhdGlvbg== 61923
+IFNwYWNlWA== 61924
+IGluZm9ybWluZw== 61925
+ICch 61926
+IHBsYXN0ZXI= 61927
+SW5pdGlhbGl6YXRpb24= 61928
+LmJldGE= 61929
+IFBlcnNvbnM= 61930
+dWdnbGluZw== 61931
+IHNoYW1wb28= 61932
+IEplaA== 61933
+IHNlcnI= 61934
+IG1heFNpemU= 61935
+IHN0aXRjaGVz 61936
+W3BhdGg= 61937
+LnJldA== 61938
+IFByZXQ= 61939
+TmVpbA== 61940
+Q29udmVydGVk 61941
+IE1hemRh 61942
+UE9TSVQ= 61943
+VG9vbGtpdA== 61944
+IFJFQURNRQ== 61945
+Q3VzdG9tQXR0cmlidXRlcw== 61946
+YXJjaGl2bw== 61947
+LlBhaW50 61948
+Z2V0T2JqZWN0 61949
+SVE= 61950
+LldlYkRyaXZlcg== 61951
+IGFudGlib2R5 61952
+IExpbWE= 61953
+aW5jb3JyZWN0 61954
+RnJhY3Rpb24= 61955
+IERlYWRsaW5l 61956
+c2VuZE1lc3NhZ2U= 61957
+Lk9mZnNldA== 61958
+ZWRpbw== 61959
+INeQ 61960
+IHNtb290aGluZw== 61961
+LmJv 61962
+IENFTlQ= 61963
+ZWxhc3RpYw== 61964
+LmNoYXJDb2RlQXQ= 61965
+UmVmcmVzaExheW91dA== 61966
+QUdFRA== 61967
+KTtcCg== 61968
+IFtdKQoK 61969
+IHRhcHM= 61970
+RFY= 61971
+4oCV 61972
+IENveQ== 61973
+IG91dHdlaWdo 61974
+J2dj 61975
+XEV4Y2VwdGlvbnM= 61976
+IEdyYW1tYXI= 61977
+IEd1YXRlbWFsYQ== 61978
+IEd1cnU= 61979
+IHRlag== 61980
+IGZyaWVuZHNoaXBz 61981
+IGNvcGluZw== 61982
+KHVwZGF0ZWQ= 61983
+X2R4 61984
+QW5hbA== 61985
+LU1heQ== 61986
+IG1hdGNobWFraW5n 61987
+IGp1bnRv 61988
+UEFDS0FHRQ== 61989
+IHJlbnRz 61990
+IOiHqg== 61991
+Y2FrZXM= 61992
+44CCJywK 61993
+cmVuZGluZw== 61994
+X0ZyYW1ld29yaw== 61995
+LSk= 61996
+KHVwbG9hZA== 61997
+IG9wb3J0dW4= 61998
+IGNhdXNh 61999
+IHByb2xpZmlj 62000
+Um93Q291bnQ= 62001
+IG5hY2t0ZQ== 62002
+IFNveQ== 62003
+U2h1dGRvd24= 62004
+6Ig= 62005
+X0VYUEk= 62006
+IEhhcmJvdXI= 62007
+IHRvcmU= 62008
+XE1lc3NhZ2U= 62009
+L1U= 62010
+T01CUkU= 62011
+LnNlZ21lbnQ= 62012
+IGNvbWVk 62013
+cm9tYW4= 62014
+IHNlZ8O6bg== 62015
+U2lnbWE= 62016
+IHNraWluZw== 62017
+IFRlcnJhaW4= 62018
+IGJlbmNobWFya3M= 62019
+IEF0dGVudGlvbg== 62020
+IH0qLwoK 62021
+IGdlaWw= 62022
+IGNhcnRvb25z 62023
+IGF0dHJpYnV0aW9u 62024
+IHJvdG9y 62025
+ZW5oYQ== 62026
+IM6z 62027
+IHRyYWo= 62028
+IGPDtG5n 62029
+IHNoYWtlcw== 62030
+IENsZW1zb24= 62031
+IGJydXRhbGl0eQ== 62032
+IDsNCg0K 62033
+IGVpZ2h0ZWVu 62034
+IEF3YXJlbmVzcw== 62035
+KHJlc3Q= 62036
+IHZpb2xpbg== 62037
+X1JPVVRF 62038
+LkZpZWxkTmFtZQ== 62039
+IEFkZQ== 62040
+aXppYQ== 62041
+IEhlbG0= 62042
+IHR5aW5n 62043
+IFByb2dyZXNzQmFy 62044
+YXV0b3I= 62045
+IGxvbmRvbg== 62046
+Jnc= 62047
+Z29v 62048
+SVNUUlk= 62049
+L0NyZWF0ZQ== 62050
+IFVTSU5H 62051
+IEdY 62052
+IEVGRkVDVA== 62053
+RmNu 62054
+IEVuY3J5cHRpb24= 62055
+Q0VE 62056
+ZmluZQ== 62057
+LWFycmF5 62058
+IHB1c2hWaWV3Q29udHJvbGxlcg== 62059
+QCQ= 62060
+VXBsb2FkZWQ= 62061
+LXdyaXRl 62062
+LmdldFBhZ2U= 62063
+X2VzdGFkbw== 62064
+QU5UTFI= 62065
+IFZpZXdEYXRh 62066
+ICR7KA== 62067
+IGFsbW9uZA== 62068
+IExvZ2ljYWw= 62069
+IHNob290ZXJz 62070
+IOygnA== 62071
+IHB1ZmY= 62072
+IHVuY29tbWVudA== 62073
+IGN1c3RvbWl6YWJsZQ== 62074
+xINy 62075
+RGlyZWN0aXZl 62076
+CWlkeA== 62077
+Q2hhbGxlbmdl 62078
+IHN1bW1hcml6ZQ== 62079
+IEF2Zw== 62080
+LlVzZXJJRA== 62081
+LmRpc3BhdGNoRXZlbnQ= 62082
+IGNvb2tlcg== 62083
+IGNvbm5lY3Rpb25TdHJpbmc= 62084
+IHNocmlua2luZw== 62085
+amFk 62086
+IFRoZW1lcw== 62087
+YW5kYXRvcnk= 62088
+IGR1YmlvdXM= 62089
+IGNlcA== 62090
+c3Bpbm5lcg== 62091
+IHN1YnJlZGRpdA== 62092
+IGlpaQ== 62093
+L2NhY2hl 62094
+ZGVmZXI= 62095
+IHN1YnN0aXR1dGVk 62096
+IGd1bm1hbg== 62097
+Y2xpbmc= 62098
+IOyw 62099
+KGN0cmw= 62100
+T3JkZXJJZA== 62101
+X2VuZw== 62102
+IGZpbG1tYWtlcnM= 62103
+IGZvcndhcmRpbmc= 62104
+IHN0cmFuZGVk 62105
+IExlYW4= 62106
+IOunjA== 62107
+KFVuaXQ= 62108
+IGRpZFNldA== 62109
+bGFrZQ== 62110
+Z3JvdW5kcw== 62111
+5Zug 62112
+IHVucmVnaXN0ZXI= 62113
+IG1pbmhh 62114
+IFZlZ2Fu 62115
+CWlWYXI= 62116
+LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQo= 62117
+b3R0bGU= 62118
+SVBD 62119
+IHByYWdtYQ== 62120
+IElJRA== 62121
+X01pbg== 62122
+JTsiPgo= 62123
+X3JhbQ== 62124
+ZHJpdmVycw== 62125
+IENoaWNr 62126
+IGNscg== 62127
+X0JVRkY= 62128
+INCy0YvQsQ== 62129
+TWVyYw== 62130
+anV2ZW4= 62131
+IHNoaW0= 62132
+0YvRhQ== 62133
+IHRoZW9yZXRpY2FsbHk= 62134
+L2ZvcnVt 62135
+IHNwaWRlcnM= 62136
+IGdvb3Nl 62137
+IFBob3Rvbg== 62138
+IHByb2ZpY2llbmN5 62139
+IENsZXJr 62140
+X2ZpZw== 62141
+Q29uY2Vybg== 62142
+KGNvc3Q= 62143
+IHJlZGQ= 62144
+LmVudmlyb25tZW50 62145
+Q3JvcA== 62146
+IOKJpQ== 62147
+eWVjdG9z 62148
+LkJhdGNoTm9ybQ== 62149
+LWNvbXA= 62150
+JGltYWdl 62151
+IE5pa29u 62152
+IGRtZw== 62153
+Wzo6LQ== 62154
+UExM 62155
+dW5jaW9z 62156
+Zm9jdXNlZA== 62157
+IHR1bw== 62158
+IGh2b3JkYW4= 62159
+IGF0dGFpbmVk 62160
+IHByb3RlY3Rvcg== 62161
+IEthbnQ= 62162
+IHNob3Jlcw== 62163
+IEV0aGFu 62164
+X3NjaG9vbA== 62165
+IG5lYXRseQ== 62166
+LlNoYXBlcw== 62167
+IE5lbQ== 62168
+aGNw 62169
+LicvJy4k 62170
+IE3DqXhpY28= 62171
+c3RydWN0dXJpbmc= 62172
+IGxha2g= 62173
+IGFkcmVzc2U= 62174
+JywnIw== 62175
+IEhhc2tlbGw= 62176
+X0VOR0lORQ== 62177
+IHJlcGVudA== 62178
+IGN1Y2s= 62179
+LkZJRUxE 62180
+IFNrZQ== 62181
+QEBAQA== 62182
+SGl0cw== 62183
+IGltcGxhbnRz 62184
+IENvbnN0aXR1dGlvbmFs 62185
+IFBIUFVuaXQ= 62186
+IHRvaWxldHM= 62187
+LmFsYnVt 62188
+5LiL6L29 62189
+CXNldFN0YXRl 62190
+KCItLS0tLS0tLS0tLS0tLS0t 62191
+LkFtb3VudA== 62192
+ZWN0dXJl 62193
+IFRob3VzYW5kcw== 62194
+TmVpdGhlcg== 62195
+IHByZXNldHM= 62196
+IEFzc3VtZQ== 62197
+KGZhY3Rvcnk= 62198
+IGxpY2s= 62199
+IGdvYWxrZWVwZXI= 62200
+PFN0YXRl 62201
+LXNlY3VyaXR5 62202
+X2ll 62203
+ZXNrdG9w 62204
+IEx2 62205
+IFN5bXBob255 62206
+LnNhbXBsZXM= 62207
+IGh5cGVydGVuc2lvbg== 62208
+xYJ1 62209
+Lmp1c3Q= 62210
+TWVuc2FqZQ== 62211
+IT0t 62212
+PFRLZXk= 62213
+IHNweWluZw== 62214
+LGRhdGU= 62215
+b3JnYW5pemVk 62216
+ICAgICAgICAgIA0K 62217
+KGN1ZGE= 62218
+X01ldGFkYXRh 62219
+dWJpc2hp 62220
+LUJlbno= 62221
+X0Fzcw== 62222
+IEVsc2VJZg== 62223
+IGxlc2lvbnM= 62224
+IFByZXN0b24= 62225
+VGVjaG5pY2Fs 62226
+IHBsYXRpbnVt 62227
+L3Bp 62228
+SW5kZXhlcw== 62229
+IHBhcmFwaA== 62230
+IG92ZXJ0aHJvdw== 62231
+aXBhdGVk 62232
+b250b2xvZ3k= 62233
+IGRlbW9ncmFwaGljcw== 62234
+IGNhbmU= 62235
+IHByb2ZpdGFiaWxpdHk= 62236
+IGVzdGFibGlzaG1lbnRz 62237
+XSY= 62238
+OmFic29sdXRl 62239
+ZW50cmFkYQ== 62240
+VHA= 62241
+IHNoYXJlaG9sZGVy 62242
+Lidf 62243
+5aaC5p6c 62244
+bnBq 62245
+dnJpcg== 62246
+IEVYRUM= 62247
+IFBvbGljaWVz 62248
+IGZlbGxvd3NoaXA= 62249
+IENHUmVjdEdldA== 62250
+X3JlY2lwZQ== 62251
+X1JFQw== 62252
+dW51 62253
+IHJvYmJlZA== 62254
+IHR1cm1vaWw= 62255
+KTo6 62256
+LnN0YXJ0RGF0ZQ== 62257
+IGV2YWN1YXRlZA== 62258
+LWVxdQ== 62259
+IGZvdXJ0ZWVu 62260
+QFNwcmluZ0Jvb3RBcHBsaWNhdGlvbg== 62261
+IOaVsOaNrg== 62262
+bmFudHM= 62263
+dGhyZW4= 62264
+U29ueQ== 62265
+REZT 62266
+LWNpZ2FyZXQ= 62267
+IGFnZ3JhdmF0ZWQ= 62268
+IG5lZGVybGFuZA== 62269
+IEZ1ag== 62270
+dWNlcw== 62271
+L3VzZQ== 62272
+dW1tZXI= 62273
+KFNURA== 62274
+6rCE 62275
+Kj4m 62276
+LnBlcmNlbnQ= 62277
+aWFudHM= 62278
+IEN0 62279
+VkFT 62280
+X1RIRU1F 62281
+IHNuaXBlcg== 62282
+X0VM 62283
+LXdvcmtlcnM= 62284
+U25vdw== 62285
+IEF1cmE= 62286
+aWVnbw== 62287
+IEdsb2I= 62288
+TmFtZWRRdWVyeQ== 62289
+X0JH 62290
+IExpdmVEYXRh 62291
+IFNlbmRNZXNzYWdl 62292
+IHJlc3BvbmRzVG9TZWxlY3Rvcg== 62293
+ZW5jZXJz 62294
+aW5zdHJ1Y3Rpb25z 62295
+KEl0 62296
+5ZG95ZGo5pyf 62297
+IEdvbWV6 62298
+Y2hhcmdlcw== 62299
+LkdlbmVyYXRlZFZhbHVl 62300
+IE1hY3Jvbg== 62301
+KFBPUlQ= 62302
+IFByb2Nlc3Nlcw== 62303
+Lm9uUmVzdW1l 62304
+IGZpZQ== 62305
+QnVpbGRlcnM= 62306
+KWdldA== 62307
+X3dhbGxldA== 62308
+IGNhbmM= 62309
+IE1vYmlsaXR5 62310
+IGFsYXJtcw== 62311
+cm9zaXM= 62312
+YW1hw7Fv 62313
+IHBpcw== 62314
+IOODuw== 62315
+U2hh 62316
+IGNvbmZlc3NlZA== 62317
+KElORk8= 62318
+KCcsJw== 62319
+X1NlcnZlcg== 62320
+IGJsYXN0ZWQ= 62321
+IEZhcm1lcnM= 62322
+cnV6 62323
+Y2tlZGl0b3I= 62324
+X0lNUExFTUVOVA== 62325
+IG1vdHRv 62326
+IENBUkU= 62327
+IHlkaw== 62328
+Qm9uZQ== 62329
+IGFkZW3DoXM= 62330
+KyIvIis= 62331
+UHJvcFR5cGVz 62332
+X1Na 62333
+LnBhaW50 62334
+LnBpeGVs 62335
+IE1lc3NhZ2VUeXBl 62336
+IHR3ZWFrcw== 62337
+YC4KCg== 62338
+VmVyaWZpY2F0aW9u 62339
+bmVjaw== 62340
+YmVycmE= 62341
+IG1pbmRmdWw= 62342
+U3Vydg== 62343
+IDotCg== 62344
+IGFueXdheXM= 62345
+IEFkbWlzc2lvbg== 62346
+YWNjZXNzaWJsZQ== 62347
+RmxhdEJ1dHRvbg== 62348
+ICInIik7Cg== 62349
+IGhhaGE= 62350
+VG9Qb2ludA== 62351
+IGJ1cmdlcnM= 62352
+Z2V0U3RhdGU= 62353
+XEhlbHBlcg== 62354
+IEZVTkNU 62355
+IEVMRU1FTlQ= 62356
+IENFUlQ= 62357
+IEFDQ09VTlQ= 62358
+Y2hhcmdpbmc= 62359
+X2NhbmRpZGF0ZQ== 62360
+X3JlY2VudA== 62361
+IEluc3RydWN0b3I= 62362
+IGRydW5rZW4= 62363
+WVNRTA== 62364
+b3JhdGl2ZQ== 62365
+IjoiIg== 62366
+IHRhZ05hbWU= 62367
+X05FRw== 62368
+IHFw 62369
+IFVuZGVmaW5lZA== 62370
+IGdyZWFzZQ== 62371
+CSAgCQ== 62372
+IGVhZ2VybHk= 62373
+VGV4UGFyYW1ldGVyaQ== 62374
+ZGlzdHJpYnV0ZWQ= 62375
+QWRtaW5pc3RyYXRvcg== 62376
+RGlzdHJpYnV0aW9u 62377
+IERlY29tcA== 62378
+IFRyYW5zZm9ybWVy 62379
+LmJ0blNhdmU= 62380
+IEdvcw== 62381
+KEVudW0= 62382
+Y2Fpcm8= 62383
+LWNp 62384
+L3JlcG9ydA== 62385
+IFBvc3Rlcg== 62386
+X2RlcGVuZGVuY3k= 62387
+IGV4cGxvaXRz 62388
+c2V0Rmxhc2g= 62389
+IHh0 62390
+IGpld2VsbGVyeQ== 62391
+IGRhaQ== 62392
+X1JBTQ== 62393
+IGJlcnJpZXM= 62394
+IGdyYW5ueQ== 62395
+RmF0YWw= 62396
+w6lhbA== 62397
+LW1vc3Q= 62398
+LlZpc3VhbEJhc2lj 62399
+IFBlbmQ= 62400
+YmVp 62401
+amFr 62402
+OyovCg== 62403
+Qm95 62404
+PlNlbGVjdA== 62405
+aW5kcmljYWw= 62406
+VGVjaG5vbG9neQ== 62407
+IEFsbGlzb24= 62408
+ZGF0YXR5cGU= 62409
+J2Nsb2Nr 62410
+IGtvc3Q= 62411
+IGJham8= 62412
+LkNvdW50cnk= 62413
+WmVuZA== 62414
+LndyYXBwZXI= 62415
+4L0= 62416
+IEZpbGlwaW5v 62417
+b2NyZQ== 62418
+U1NI 62419
+IFNBTVBMRQ== 62420
+X2luaXRpYWxpemVk 62421
+KTs/Pgo= 62422
+IHBvcm5vc3Q= 62423
+ZXNhbg== 62424
+IEN1dHRpbmc= 62425
+IG1peGVz 62426
+X2FnYWlu 62427
+IGZvcm11bGFyaW8= 62428
+W1Y= 62429
+IHRlbGVmb25v 62430
+L3Vz 62431
+IGxvYWREYXRh 62432
+LnJlZmVyZW5jZXM= 62433
+IG1hcFZpZXc= 62434
+KyJf 62435
+IFNRTGl0ZURhdGFiYXNl 62436
+aXRvbg== 62437
+Q29sdW1uVHlwZQ== 62438
+IEV2ZXJ0b24= 62439
+LlJlc3VsdHM= 62440
+L25vdA== 62441
+IGdldEZpbGU= 62442
+aGVyaXRhbmNl 62443
+IGdldEhlaWdodA== 62444
+JHVzZXJuYW1l 62445
+d2l0aGRyYXc= 62446
+Xyk7DQo= 62447
+LnV0 62448
+IFFBcHBsaWNhdGlvbg== 62449
+dXJuYWw= 62450
+LWRvd25sb2Fk 62451
+YnVyZ2Vy 62452
+cHJlY2k= 62453
+IFRoYW5rZnVsbHk= 62454
+LkVWRU5U 62455
+IGdyZWF0bmVzcw== 62456
+IGxvb3NlbHk= 62457
+IG1hc2g= 62458
+IGdlaGVu 62459
+X2FudA== 62460
+IGltcGVuZGluZw== 62461
+LmlzUHJlc2VudA== 62462
+IHN0YWlucw== 62463
+SU1T 62464
+LmJhY2tlbmRz 62465
+IGlycmlnYXRpb24= 62466
+IFRhdA== 62467
+L3Rlc3Rz 62468
+IEtpbmdzdG9u 62469
+LnRyYW5zbGF0ZXNBdXRvcmVzaXppbmdNYXNrSW50b0NvbnN0cmFpbnRz 62470
+IHZvbWl0aW5n 62471
+LXJlcXVpcmVk 62472
+IGJsYXpl 62473
+IFN0YWZmb3Jk 62474
+UklE 62475
+L2Z3bGluaw== 62476
+IGthbGU= 62477
+c29sZA== 62478
+KHByb2dyZXNz 62479
+KGNoYXJ0 62480
+IGN5c3Q= 62481
+IGRpbGlnZW5jZQ== 62482
+L21w 62483
+IGNsZXJneQ== 62484
+IEJyb3dzZXJSb3V0ZXI= 62485
+IEFQSw== 62486
+IENPTlRBQ1Q= 62487
+QmFySXRlbQ== 62488
+LURpc3Bvc2l0aW9u 62489
+IE1vdG9yb2xh 62490
+X3NhbA== 62491
+IFdvb2Rlbg== 62492
+IFRIRVk= 62493
+IGNvbW1lbnRhdG9ycw== 62494
+IGNvbW1lcmNpYWxz 62495
+PW1vZGVs 62496
+LiIpLAo= 62497
+IFBsdWdpbnM= 62498
+ZGFpbg== 62499
+aGVhZGVk 62500
+IENvb3JkaW5hdGVz 62501
+SmFuZQ== 62502
+IFByZWZlcnJlZA== 62503
+IHBvZGVtb3M= 62504
+LmlzQmxhbms= 62505
+IFN0YXA= 62506
+IHdzcA== 62507
+IENPTEw= 62508
+X2JpZA== 62509
+IHByb2Jlcw== 62510
+dWFuaWE= 62511
+KHN5bQ== 62512
+IGN1ZXJwbw== 62513
+IG1hbmlwdWxhdGluZw== 62514
+IGFtYXppbmdseQ== 62515
+LkRBWQ== 62516
+dW1wdGVjaA== 62517
+YWNvYmlhbg== 62518
+VGVybWluYXRl 62519
+IHN0YXRpb25lZA== 62520
+U2V0QnJhbmNo 62521
+U2NyZWVuc2hvdA== 62522
+ZXN0aGVzaWE= 62523
+IHdhbGtlcg== 62524
+I2Zyb20= 62525
+Y29vcmRpbmF0ZQ== 62526
+X2ludGVyZXN0 62527
+IGhlbHBsZXNz 62528
+CXB1Yg== 62529
+bmdh 62530
+X0V4 62531
+IG53 62532
+IHRleHR1YWw= 62533
+IHBsdWdz 62534
+IG1pbmlvbg== 62535
+bWFyZXM= 62536
+PD4K 62537
+QUNB 62538
+Q29tcGFueU5hbWU= 62539
+KGVj 62540
+IExhbmRzY2FwZQ== 62541
+X1BST1ZJREVS 62542
+Y3c= 62543
+lIQ= 62544
+QWNjb3VudElk 62545
+JDo= 62546
+IFBlcnNvbmFsbHk= 62547
+cHJvcGVydHlOYW1l 62548
+IEt1Yg== 62549
+J2k= 62550
+IEdpdWw= 62551
+IHByaW9yaXRpemU= 62552
+Rk9STUFOQ0U= 62553
+IFBhcmFkZQ== 62554
+KVwK 62555
+c3RkYm9vbA== 62556
+IGFsZXJ0RGlhbG9n 62557
+IExlaA== 62558
+LmNhdGFsb2c= 62559
+IHdlYmluYXI= 62560
+IGltcG9ydGVy 62561
+cHJvamVjdElk 62562
+VFlQTw== 62563
+X18NCg== 62564
+R1c= 62565
+c3VtbWVy 62566
+IHNpbmlzdGVy 62567
+LmZhaWxlZA== 62568
+IGJlc29pbg== 62569
+aXNtYW4= 62570
+REVTVA== 62571
+IG5o4bqtcA== 62572
+IG1vxbxuYQ== 62573
+X2luc3Ry 62574
+IHBhdmVk 62575
+IHByZWZpeGVz 62576
+IHJhbXBhbnQ= 62577
+IHlBeGlz 62578
+IOazqA== 62579
+X21pZGRsZQ== 62580
+IHNjaG9sYXJseQ== 62581
+IHByb3N0aXR1dGVz 62582
+IG1vcmFsZQ== 62583
+LnBlcm1pc3Npb25z 62584
+LmdldExpc3Q= 62585
+IHJlamVjdGluZw== 62586
+IGxvb3Bpbmc= 62587
+IFNwZWNpZmljYXRpb25z 62588
+IGltbWVuc2VseQ== 62589
+IE1lZGlhbg== 62590
+KGNoYWlu 62591
+IGNsaWNo 62592
+L2ZsdXR0ZXI= 62593
+YWNm 62594
+LnVybG9wZW4= 62595
+dXR0ZXJzdG9jaw== 62596
+IHNwZWN0cmE= 62597
+IGFkbWly 62598
+L21heA== 62599
+LkVtaXQ= 62600
+KHdlaWdodHM= 62601
+acSZ 62602
+SW5zdGFsbGluZw== 62603
+SnU= 62604
+IEZlbGw= 62605
+IEZSRQ== 62606
+LmRlbg== 62607
+IEJpZ0ludA== 62608
+Ij5A 62609
+ICopOwoK 62610
+IEJpb2xvZ2ljYWw= 62611
+IHBhdGVudGVk 62612
+LnBhZ2luYXRpb24= 62613
+LnJvbGw= 62614
+IER1bA== 62615
+IGRlc2Fycm9sbG8= 62616
+UmVnYXJkbGVzcw== 62617
+mOydtA== 62618
+IHJvYmU= 62619
+0J3QtQ== 62620
+IEJveWQ= 62621
+LyoqKioqKioqKioqKioqKioqKioqKioqKg== 62622
+cmVjZWlwdA== 62623
+IEFzc2lnbmVk 62624
+YXR0ZW5kYW5jZQ== 62625
+LWNob2ljZQ== 62626
+ZXRzeQ== 62627
+X2Vsc2U= 62628
+LG5leHQ= 62629
+X2V4aXN0aW5n 62630
+ICcnKSwK 62631
+IGxpYmVydGlu 62632
+dHJhaXRz 62633
+YXR0ZQ== 62634
+Q29tcGFyYWJsZQ== 62635
+IENvdg== 62636
+IEFkb2xlcw== 62637
+LHRoZQ== 62638
+IExvYWRlZA== 62639
+fHI= 62640
+PWluZGV4 62641
+IEdhc3Q= 62642
+IGluamVjdG9y 62643
+CXN0b3A= 62644
+LWdvb2dsZQ== 62645
+IGZldGFs 62646
+IGFsbG8= 62647
+eWxlZnQ= 62648
+Z2V0UGFyYW1ldGVy 62649
+4oCd4oCU 62650
+X3NlY3Rvcg== 62651
+LlV0aWxpdHk= 62652
+b3Njb3Bl 62653
+LmVhc2U= 62654
+IE1hZ25ldGlj 62655
+QXJyYXlPZg== 62656
+IGZlYXJmdWw= 62657
+IEluZmVy 62658
+IEZ1aw== 62659
+Sm9obnNvbg== 62660
+JGFycmF5 62661
+IHNhaXM= 62662
+X2NvbnRy 62663
+RGVzY3Jp 62664
+IERldGFpbGVk 62665
+X2xlYXZl 62666
+X1JPVA== 62667
+IG7DpGNo 62668
+IGthbWk= 62669
+RENBTEw= 62670
+OmVx 62671
+IG1vbms= 62672
+X29ianM= 62673
+KFNlcnZpY2U= 62674
+ZmluYW5jZQ== 62675
+IHBvZGVt 62676
+X3Jlc3RvcmU= 62677
+IGRlY29yYXRvcnM= 62678
+IGFkdmlzaW5n 62679
+INC/0LDRgA== 62680
+LnBlcm0= 62681
+IEhhaQ== 62682
+IGZr 62683
+dW50ZWVycw== 62684
+IFJUV0Y= 62685
+X2l4 62686
+QUNT 62687
+IGJyZWFrb3V0 62688
+ZGlyZWNjaW9u 62689
+IFN1bnNldA== 62690
+X2Z4 62691
+b2xrYXRh 62692
+LXJhZGlv 62693
+SGV0 62694
+LnV0aWxpdGllcw== 62695
+X2Jhc2lz 62696
+KGtpbmQ= 62697
+IENvbmM= 62698
+VGh1bWI= 62699
+IE1pY2hl 62700
+ZGVsaXZy 62701
+IGd1dGU= 62702
+IEZpbGVQYXRo 62703
+IFRyaWJl 62704
+XCIp 62705
+X2N1ZGE= 62706
+RGlmZmVyZW5jZQ== 62707
+IE1vbnN0ZXJz 62708
+IHNldFR5cGU= 62709
+LkNvbnRlbnRUeXBl 62710
+IGR1bQ== 62711
+RW52ZWxvcGU= 62712
+YWd0 62713
+IHVubG9hZA== 62714
+X2NoZWNrZXI= 62715
+IHJlc3Rv 62716
+X3Blb3BsZQ== 62717
+UHJpY2Vz 62718
+UHJvZmlsZXM= 62719
+KClc 62720
+RlVO 62721
+ICIjIg== 62722
+IFBhdHRlcm5z 62723
+IFNQRA== 62724
+X1JPV1M= 62725
+T3JpZw== 62726
+YmxhZGU= 62727
+IGzDqQ== 62728
+JWk= 62729
+Kysr 62730
+TGlmZWN5Y2xl 62731
+LS0tLS0tLS0tLS0tLS0tCg== 62732
+VGFy 62733
+VGhhbk9y 62734
+JnE= 62735
+IGNyaXRpY2lzbXM= 62736
+LXBo 62737
+RWxlbWVudEV4Y2VwdGlvbg== 62738
+X2d1ZXN0 62739
+IOu2 62740
+X0Fz 62741
+IENhcnJ5 62742
+X0JJRw== 62743
+YWtldXA= 62744
+X3JldHJ5 62745
+IG7DqWNlc3M= 62746
+IE1JU1M= 62747
+aXN1 62748
+IFNwaXJpdHVhbA== 62749
+XyRf 62750
+IHJlZmxlY3Rpb25z 62751
+PHQ= 62752
+IGZ1bsOnw6Nv 62753
+IG1vbmFyY2g= 62754
+IFBhdGVs 62755
+X3ZvbHRhZ2U= 62756
+IHJhaW55 62757
+Y291cnQ= 62758
+IHVsdHJhc291bmQ= 62759
+aU9T 62760
+X0FMV0FZUw== 62761
+V28= 62762
+X0JMRU5E 62763
+b2tzZW4= 62764
+IHRyYXZlbGVy 62765
+IGRhdGFUYWJsZQ== 62766
+c2V0Q3VycmVudA== 62767
+V29ya2Zsb3c= 62768
+LnllbGxvdw== 62769
+XSkt 62770
+QUJTUEFUSA== 62771
+X2l0ZXJhdGlvbg== 62772
+0LTRgA== 62773
+IHViaWM= 62774
+IG1lYXRz 62775
+L2Vt 62776
+IERpc29yZGVy 62777
+IGVudmlhcg== 62778
+U0VP 62779
+IGhlYXZlbnM= 62780
+X3N0dWI= 62781
+IGFkcmVzcw== 62782
+IFRyaWU= 62783
+IExpbmRzYXk= 62784
+bGVp 62785
+IHBsYXRh 62786
+LnNldHRpbmc= 62787
+IGVsZWs= 62788
+ICgkew== 62789
+QXV0b21hdGlj 62790
+IGRvd25zdGFpcnM= 62791
+UElY 62792
+aWNpb25hbA== 62793
+YWJhbA== 62794
+LXN0b3JhZ2U= 62795
+aWNoaWVy 62796
+IEFscGhhYmV0 62797
+LGxhYmVs 62798
+QAo= 62799
+IGludGVzdGluYWw= 62800
+IHZhcmE= 62801
+Lm1h 62802
+IHByb2du 62803
+IG5lcGhldw== 62804
+VGltaW5n 62805
+Y2xhc3NuYW1l 62806
+IGxvY29t 62807
+IFNhbWFudGhh 62808
+IEFjY29yZGluZ2x5 62809
+IFhDVGVzdENhc2U= 62810
+IFBsYWlucw== 62811
+IExlbmlu 62812
+bm9w 62813
+IFR5c29u 62814
+IHJlbmFs 62815
+b2luZQ== 62816
+KFRlc3RDYXNl 62817
+IExvbWI= 62818
+QmFuZw== 62819
+IHZvbHVt 62820
+X2dlbmRlcg== 62821
+IGx1dA== 62822
+IO+8 62823
+Q29uZmlndXJlcg== 62824
+IHN0cm9rZVdpZHRo 62825
+Lkh0dHBTZXJ2bGV0 62826
+fHg= 62827
+LkpTY3JvbGxQYW5l 62828
+IGNvbnNvcnQ= 62829
+LmJ1bXB0ZWNo 62830
+dHJpZGdlcw== 62831
+IGJlbmVmaWNpYXJ5 62832
+PXJlcXVpcmU= 62833
+cmVuYw== 62834
+IE9V 62835
+ZW50YXJpbw== 62836
+IHVyZ2Vz 62837
+4oCUbm90 62838
+Q2FtcGFpZ24= 62839
+ZHJl 62840
+IFJpdmVyc2lkZQ== 62841
+CXRi 62842
+IG91dHB1dEZpbGU= 62843
+IGFic3Q= 62844
+IHN0cnVjdHM= 62845
+IHJ2YWw= 62846
+XCI+Ig== 62847
+IGFjcXVpc2l0aW9ucw== 62848
+QkxBQ0s= 62849
+IHRydW5j 62850
+IGFubm90YXRlZA== 62851
+c2V0VXA= 62852
+VE9LRU4= 62853
+IENvY2E= 62854
+RGlzYXBwZWFy 62855
+OnZhbHVl 62856
+IGFpZGVk 62857
+dHRs 62858
+bHV4 62859
+IGFjdWVyZG8= 62860
+IEZpbmdlcg== 62861
+Lkdlb21ldHJ5 62862
+XScpOwo= 62863
+Lmdm 62864
+VFhU 62865
+IFNjb3RpYQ== 62866
+YXZyYQ== 62867
+IHZpcA== 62868
+IHdob3BwaW5n 62869
+LWdpcmw= 62870
+IGN1cnNlZA== 62871
+XVst 62872
+IGNpcmN1bGF0ZWQ= 62873
+dW5jdHVyZQ== 62874
+b3JtYW4= 62875
+IG1BZGFwdGVy 62876
+IOKAlAoK 62877
+RmlsZU1hbmFnZXI= 62878
+KGlQYXJhbQ== 62879
+SW1hZ2VCdXR0b24= 62880
+REFR 62881
+QXJtb3I= 62882
+IHNwYXQ= 62883
+LmpzZGVsaXZy 62884
+IG1pc29n 62885
+LmVjb3Jl 62886
+J119Cg== 62887
+aW1wb3J0cw== 62888
+IGRpbm9zYXVy 62889
+LUZyZWU= 62890
+IGFubm9u 62891
+IHRyaWJ1bmFs 62892
+WWE= 62893
+Lmd1aWQ= 62894
+bW9zdGx5 62895
+PT09PQo= 62896
+IGltYWdlbQ== 62897
+U3VpdA== 62898
+a2Fz 62899
+IENoYW5uZWxz 62900
+QnVkZ2V0 62901
+IERpdmlkZQ== 62902
+amVt 62903
+IEdyaQ== 62904
+IGluZGljYXRpdmU= 62905
+XEZhY3Rvcnk= 62906
+LnJlcG9zaXRvcmllcw== 62907
+IEFNUA== 62908
+LnNucA== 62909
+IGHDpw== 62910
+Ims= 62911
+IMK1 62912
+ZGVjb2RlZA== 62913
+X2FyYw== 62914
+LUNsYXVzZQ== 62915
+IEFkag== 62916
+IG5ld0FycmF5 62917
+KEdFVA== 62918
+IGxhdGlu 62919
+IHd6 62920
+OnVpbnQ= 62921
+5Yir 62922
+Ii4u 62923
+Q29ubmVjdGluZw== 62924
+ZW5ub24= 62925
+5bm2 62926
+IFNlcw== 62927
+IGJlbG9uZ2luZ3M= 62928
+Kycm 62929
+CXNldHRpbmdz 62930
+SU5W 62931
+IHDDqQ== 62932
+IGFkdWx0aG9vZA== 62933
+YW1ibGU= 62934
+X21hc2tz 62935
+LXJlc29sdXRpb24= 62936
+cmF0cw== 62937
+IO2BtA== 62938
+IHZvZw== 62939
+IFNobw== 62940
+IENvdmVuYW50 62941
+IHJlbWluZGluZw== 62942
+b3JuYWRv 62943
+aWFk 62944
+5byC 62945
+Q3JlYXRpdmU= 62946
+IFNUWUxF 62947
+IGFub21hbHk= 62948
+XEFwcGxpY2F0aW9u 62949
+IG1hbmlmZXN0YXRpb24= 62950
+IE5hbm8= 62951
+TWFwVmlldw== 62952
+aWRlYWw= 62953
+YWNoaW5lcnk= 62954
+IFZhdWdo 62955
+cHJpbnRlcg== 62956
+VmVyZGFuYQ== 62957
+L2NvbXBvbmVudA== 62958
+IGFkZENoaWxk 62959
+IGxlYXJuZXI= 62960
+IGRlY3J5cHRlZA== 62961
+IHRpZ2h0ZXI= 62962
+5p2f 62963
+IGplag== 62964
+IC4KCgoK 62965
+IExvYmJ5 62966
+bGVw 62967
+w6Rubg== 62968
+bGVpZ2g= 62969
+L3JvdXRlcw== 62970
+IGNhbm9weQ== 62971
+IEZpc2NhbA== 62972
+Ojsi 62973
+IGJ1cmRlbnM= 62974
+L2Z1bGw= 62975
+IENTUg== 62976
+LlNoYXJlZFByZWZlcmVuY2Vz 62977
+L3RyZWU= 62978
+IGRyb2l0 62979
+SW1wbGVtZW50 62980
+R2V0Q3VycmVudA== 62981
+KHB1c2g= 62982
+JHg= 62983
+0Y/Qtw== 62984
+QUNJVFk= 62985
+PT09PT09PT09PQo= 62986
+amM= 62987
+X2hyZWY= 62988
+LmdldFJvb3Q= 62989
+IEtE 62990
+KGxz 62991
+W2NudA== 62992
+IGRhbGw= 62993
+KGJw 62994
+IEVX 62995
+S2V5RXZlbnQ= 62996
+bG9iZQ== 62997
+IGh0bWxlbnRpdGllcw== 62998
+IGZhbHRh 62999
+IHZhbHZlcw== 63000
+IHNpemluZw== 63001
+UG9ybg== 63002
+IHNob3dFcnJvcg== 63003
+IEZyaWQ= 63004
+IMOH 63005
+LnJhbmRu 63006
+IHRhbnRy 63007
+IHNheA== 63008
+dXJvdmlzaW9u 63009
+dGhlb24= 63010
+X1JDQw== 63011
+eEZE 63012
+SW5pdFN0cnVjdA== 63013
+IGNhbm5lZA== 63014
+IHF1YW50aWRhZGU= 63015
+LldBUk5JTkc= 63016
+IEJyaXR0 63017
+LXJlZ2lzdGVy 63018
+YWN0aXZlbHk= 63019
+IE5hdGFsaWU= 63020
+44G/ 63021
+IENPTk5FQ1Q= 63022
+emVr 63023
+IG1pbGxvbmVz 63024
+XWludA== 63025
+ICcsJyw= 63026
+IHByaW4= 63027
+IjpbLQ== 63028
+IC8vLg== 63029
+IGludGltaWRhdGluZw== 63030
+cmF6aW9uZQ== 63031
+LmlibQ== 63032
+IEpha2FydGE= 63033
+0LzQtdGA 63034
+IGxvYWRDaGlsZHJlbg== 63035
+X1VQTE9BRA== 63036
+IFdlZWtz 63037
+IGdldFRleHQ= 63038
+IPCfkg== 63039
+IF1dCg== 63040
+IENvc3Rz 63041
+xJlw 63042
+cGF5bWVudHM= 63043
+Lk1vdmll 63044
+bGg= 63045
+tIg= 63046
+X2NlcnRpZmljYXRl 63047
+PXE= 63048
+bGlicmFyaWVz 63049
+IEFlcg== 63050
+YXVzcw== 63051
+CWZhaWw= 63052
+T1VORFM= 63053
+c2VuZEtleXM= 63054
+IHNjYW1z 63055
+d2FydHM= 63056
+SGlzdA== 63057
+IEVzc2V4 63058
+IGZ1cnk= 63059
+IHRpdHJl 63060
+IENvcGVuaGFnZW4= 63061
+IHByZWRlZmluZWQ= 63062
+c2Nw 63063
+c2VycmF0 63064
+LmVuc3VyZQ== 63065
+aWxlZQ== 63066
+TWVyaXQ= 63067
+X1VOTE9DSw== 63068
+IENvcnJlY3Rpb24= 63069
+Tm9ybWFsaXphdGlvbg== 63070
+IOS/ruaUuQ== 63071
+IHN0b29s 63072
+IOWIoOmZpA== 63073
+U2hvcnRjdXQ= 63074
+Y2hvc2Vu 63075
+IGJ1bGx5 63076
+IGZ1bmNpw7Nu 63077
+44O844Or 63078
+IOeUn+WRveWRqOacnw== 63079
+LmFsaWFz 63080
+PlRvdGFs 63081
+IFNURU0= 63082
+cGVuZw== 63083
+Y2FsZXI= 63084
+cGVyZmVjdA== 63085
+IGJvbmRpbmc= 63086
+UGhvbmVz 63087
+IHB1bHA= 63088
+67aA 63089
+SUVXUw== 63090
+IERlZXI= 63091
+X0xDRA== 63092
+IENvbmNvcmQ= 63093
+V2l6YXJk 63094
+IG9mcmVj 63095
+IEVtZXJhbGQ= 63096
+dGVuZXNz 63097
+bmF2aWdhdG9y 63098
+VGhlb3J5 63099
+IGd1YXJkYXI= 63100
+IGZ1bGZpbA== 63101
+IFVuYXV0aG9yaXplZA== 63102
+IEJvdXQ= 63103
+CWhvc3Q= 63104
+IFJpYg== 63105
+KGZ0 63106
+RG9jcw== 63107
+LmdldEJvZHk= 63108
+5b+D 63109
+IFJpdmVyYQ== 63110
+IHdhdmluZw== 63111
+IHBlcmZpbA== 63112
+Qm91bmRpbmdDbGllbnRSZWN0 63113
+LmZh 63114
+cGFnZWQ= 63115
+IEFmZmlsaWF0ZQ== 63116
+IHByb2xldA== 63117
+fS0+ew== 63118
+KHNjb3Jlcw== 63119
+IHZpdGFl 63120
+e05hbWU= 63121
+c2NoZWR1bGVy 63122
+X1NBTg== 63123
+IE5lYw== 63124
+IEJlZWY= 63125
+X3Rj 63126
+TElO 63127
+IEV2ZW50VHlwZQ== 63128
+IEJ1ZmZlcmVkV3JpdGVy 63129
+IHNvZnRlcg== 63130
+IFZvdGluZw== 63131
+IEdlc3R1cmVEZXRlY3Rvcg== 63132
+IHVuc2Vlbg== 63133
+IFNDTw== 63134
+IGVsbw== 63135
+Y29tYmluZQ== 63136
+X21ha2VDb25zdHJhaW50cw== 63137
+IHVuZGVyZ29uZQ== 63138
+IE9mZmljaWFscw== 63139
+LG9wdA== 63140
+IGxheWVyZWQ= 63141
+ScOTTg== 63142
+IGJhbmtlcnM= 63143
+IHNlZ3JlZ2F0aW9u 63144
+IHJ1c3NpYW4= 63145
+IHZlbnRhbmE= 63146
+Z2V0S2V5 63147
+U2FudGE= 63148
+LlRvb2xTdHJpcFNlcGFyYXRvcg== 63149
+IEFlcm9z 63150
+LnB1dEludA== 63151
+IGluZm9ybXM= 63152
+X2JpbGw= 63153
+66aE 63154
+LnNldE1heA== 63155
+IH0+Cg== 63156
+IElQUw== 63157
+IEFsaWM= 63158
+In0KCg== 63159
+IHVzaGVy 63160
+IE5ndXllbg== 63161
+IGFic29sdXQ= 63162
+IGd1YXJkZWQ= 63163
+IFJlYmVs 63164
+IFp3 63165
+IEFubnVuY2k= 63166
+IHByw6E= 63167
+YWJjZGVmZ2hpamts 63168
+IFZlcmlmaWVk 63169
+W2l4 63170
+IHRpZXJz 63171
+w6J0 63172
+LiIpDQo= 63173
+aWp1 63174
+bGl2aW5n 63175
+R1BT 63176
+LlRlc3RUb29scw== 63177
+U2l6ZVBvbGljeQ== 63178
+IG1hc3NhZ2Vz 63179
+YXNzZXJ0SW5zdGFuY2VPZg== 63180
+IHBvc3PDrXZlbA== 63181
+IGJ1c2M= 63182
+IEp1ZGFpc20= 63183
+IGluZGlzcGVuc2FibGU= 63184
+IE1vc3RseQ== 63185
+SVRB 63186
+IGdldENvbnRlbnQ= 63187
+QnJvd3NlclJvdXRlcg== 63188
+LWNvdW50ZXI= 63189
+IG9idGVu 63190
+IC8+KTsK 63191
+0LjQuw== 63192
+aGVhZGxpbmU= 63193
+KGhvbWU= 63194
+YWxpY2U= 63195
+bGRyZQ== 63196
+X01vZHVsZQ== 63197
+Q29tcGFuaWVz 63198
+TlBD 63199
+IHRvcnNv 63200
+LmNvbnM= 63201
+CWFkZHJlc3M= 63202
+X3B1cmNoYXNl 63203
+IEJhcmQ= 63204
+Z3N0 63205
+LWFuaW1hdGlvbg== 63206
+X3BhaWQ= 63207
+LnNwZWNpYWw= 63208
+IGRlbGlt 63209
+IHRha2VvdmVy 63210
+KGhhbmQ= 63211
+ZW51aW5l 63212
+LWdyZXk= 63213
+IEFCSQ== 63214
+U2Vzc2lvbkZhY3Rvcnk= 63215
+aW5zdGFsbGVy 63216
+X0RJU1RBTkNF 63217
+IEZhdm9yaXRlcw== 63218
+oIA= 63219
+Jz57 63220
+IExhdXJlbnQ= 63221
+0YfQtdGC 63222
+IHN0cmlwc2xhc2hlcw== 63223
+IGVzdGFiYQ== 63224
+JnQ= 63225
+LnBhbg== 63226
+IFBBUlRZ 63227
+IEJhbGk= 63228
+Y3Np 63229
+KG1lbW9yeQ== 63230
+IFRvZG9z 63231
+IFNPQVA= 63232
+YWduZXQ= 63233
+CWJlZm9yZQ== 63234
+T3B0aW9uc1Jlc29sdmVy 63235
+aWJlbg== 63236
+INmF2YY= 63237
+IGFkZGl0aXZl 63238
+IE1lbGVl 63239
+IE1hbml0b2Jh 63240
+IFBlcmNlbnRhZ2U= 63241
+PSgt 63242
+LmtpbGw= 63243
+IGx4 63244
+YW5jYQ== 63245
+IGZvdG9ncmFm 63246
+IGJsYW5j 63247
+IFJlc2lkZW50cw== 63248
+cGluaw== 63249
+SEJveExheW91dA== 63250
+LnVuaW9u 63251
+IEhZ 63252
+IGNvbnRlbnRWaWV3 63253
+LWZhdA== 63254
+CWhhcw== 63255
+66OM 63256
+IHdoaXBwZWQ= 63257
+dmVuZG9ycw== 63258
+dWJyZQ== 63259
+SVRIRVI= 63260
+LmZ1bmN0aW9uYWw= 63261
+INCy0LXRgA== 63262
+Q2FuY2VsZWQ= 63263
+LWNu 63264
+SW5PdXQ= 63265
+LlJvd1N0eWxlcw== 63266
+IHRyYXRh 63267
+IEluZG9vcg== 63268
+LWZhc2hpb25lZA== 63269
+IEJvb3Ro 63270
+LkxhYmVsQ29udHJvbA== 63271
+IHBvcGU= 63272
+IENhcm5lZ2ll 63273
+bmVyZ2ll 63274
+IEJY 63275
+44CCIiwK 63276
+IFdlYnN0ZXI= 63277
+CWRpdg== 63278
+TmFycg== 63279
+IGNvbmp1Zw== 63280
+a2lk 63281
+IG1vZGVyYXRpb24= 63282
+IGFteQ== 63283
+IFNvbHZl 63284
+VklD 63285
+IEVa 63286
+aWxsYWM= 63287
+IENpcGhlcg== 63288
+IEFjY2VwdGVk 63289
+TEFCRUw= 63290
+IHdyYXRo 63291
+IG1pblZhbHVl 63292
+IGthxbw= 63293
+IERhdWdodGVy 63294
+KS5e 63295
+KGRj 63296
+IHJlc29sdmVz 63297
+c2Nzcw== 63298
+YWJvdXRz 63299
+dWx0aXBhcnRGaWxl 63300
+IGZlYXRz 63301
+IGxhdW5kZXJpbmc= 63302
+IGNvbXBhw7E= 63303
+IHNlZ3VyaWRhZA== 63304
+IGhvYmJpZXM= 63305
+LWZhY2luZw== 63306
+InZhbHVl 63307
+Z2V0SW1hZ2U= 63308
+U3FsU2VydmVy 63309
+IHdpdGhTdHlsZXM= 63310
+PkRhdGU= 63311
+IEV4cGVk 63312
+JGpzb24= 63313
+6ZO+ 63314
+IEFDVElPTlM= 63315
+U2Vuc2l0aXZl 63316
+Ymxhc3Q= 63317
+IMO2ZmY= 63318
+ZnRl 63319
+Q1RTVFI= 63320
+IExvZ0xldmVs 63321
+Y29udHJhY3Rz 63322
+LmRqYW5n 63323
+Ij4NDQo= 63324
+RVRZUEU= 63325
+IG9iamM= 63326
+X1NPVU5E 63327
+X3NwYWNpbmc= 63328
+X2NsYXNzaWZpZXI= 63329
+IHJvYw== 63330
+Q2xhc3NpYw== 63331
+IOuztA== 63332
+X2ludmVyc2U= 63333
+LWFjcmU= 63334
+IEZJTA== 63335
+IERWRHM= 63336
+IHN3YWxsb3dlZA== 63337
+dmlsbGE= 63338
+IFJlcGxpZXM= 63339
+RmlyZWJhc2U= 63340
+IHBoeXNpcXVl 63341
+CXRoYXQ= 63342
+IFJlc2l6ZQ== 63343
+Pj4+Pj4+Pg== 63344
+TmVhcmx5 63345
+LmFydGlzdA== 63346
+LXs= 63347
+Pz4NCg0K 63348
+Lmxy 63349
+Lmly 63350
+KFsk 63351
+aWFubmU= 63352
+CW9i 63353
+LCcl 63354
+IGtuZXg= 63355
+IGNvcnJv 63356
+IE93ZW5z 63357
+PW5pbA== 63358
+bGF5cw== 63359
+YXBn 63360
+w5Y= 63361
+RU5P 63362
+SGVucnk= 63363
+SnVzdGlu 63364
+ZWxlY3RyaWM= 63365
+IE5vcmRpYw== 63366
+5oyH 63367
+IGV4Y2x1ZGVz 63368
+RXVyb3BlYW4= 63369
+IHRlbnRz 63370
+KFN0cmluZ1V0aWxz 63371
+KHBlZXI= 63372
+eXN0b3Jl 63373
+UG9ja2V0 63374
+ZnVlbA== 63375
+ZXR1cw== 63376
+IE1hcmlu 63377
+0YDRg9C6 63378
+6K+E 63379
+IFBlbnM= 63380
+IGluZWZmaWNpZW50 63381
+IGV0ZXJuaXR5 63382
+Licm 63383
+IFBhY2thZ2Vz 63384
+IEFwcENvbmZpZw== 63385
+IG11bHRpZA== 63386
+Y3Vsbw== 63387
+IGJvcnJvd2Vycw== 63388
+IERlYmJpZQ== 63389
+IGZyb250cw== 63390
+Sko= 63391
+ICIuLi8uLi8uLi8uLi8= 63392
+ICIrCg== 63393
+PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0= 63394
+IEdhdmlu 63395
+IG1pc2g= 63396
+4pWR 63397
+X0FUVEFDSw== 63398
+SW5kZXBlbmQ= 63399
+4K+N4K4= 63400
+w6Fm 63401
+Z2Fycw== 63402
+IFBhcnRpY2lwYXRpb24= 63403
+VmVyYm9zZQ== 63404
+U3By 63405
+U3Zn 63406
+KFZhbHVlRXJyb3I= 63407
+IHJlY29uY2lsZQ== 63408
+CURCRw== 63409
+bWVldA== 63410
+IExvZ2luUGFnZQ== 63411
+LXVudXNlZA== 63412
+IGpvbmc= 63413
+IGFuY29yYQ== 63414
+INij 63415
+Plo= 63416
+PXc= 63417
+IFJlbm8= 63418
+dmll 63419
+b3Rpb25FdmVudA== 63420
+IExpc3RUaWxl 63421
+X1J1bnRpbWU= 63422
+IHVwaG9sZA== 63423
+IE9idGFpbg== 63424
+cHJvdmlkZWQ= 63425
+IERhdGVQaWNrZXI= 63426
+IENHSQ== 63427
+IEJsYWNrQmVycnk= 63428
+YWNobw== 63429
+IElzYWlhaA== 63430
+5pW0 63431
+IEFiZHVsbGFo 63432
+IHVwcA== 63433
+IHVybHBhdHRlcm5z 63434
+CXNpemVvZg== 63435
+IHBpc3NlZA== 63436
+IHByZWZlcnJlZFN0eWxl 63437
+QVBQRVI= 63438
+IFZC 63439
+IFRlcmVzYQ== 63440
+b2duaXRv 63441
+RU1Z 63442
+IGVsZWdhbmNl 63443
+IENsYXl0b24= 63444
+YXRpdm9z 63445
+IEFuYWxvZw== 63446
+IGdhdXNzaWFu 63447
+IEhpYmVybmF0ZQ== 63448
+W11b 63449
+IHN3ZWV0bmVzcw== 63450
+IE5pZWxzZW4= 63451
+IER1dGVydGU= 63452
+KHNlbA== 63453
+LCs= 63454
+IGV4dHJhb3JkaW4= 63455
+Zmxha2U= 63456
+W0RvdWJsZQ== 63457
+Ly8vDQo= 63458
+IG11Y2hhcw== 63459
+IEJyb2FkY2FzdGluZw== 63460
+QXNzb2NpYXRpb24= 63461
+ZXhlcmNpc2U= 63462
+LlJlbGF0aXZl 63463
+IHViaXF1aXRvdXM= 63464
+U0JBVENI 63465
+xLFuYQ== 63466
+LWZvb2Q= 63467
+IGNyeXN0YWxs 63468
+0YPQsQ== 63469
+ICd+ 63470
+INCR 63471
+IGR1bms= 63472
+IHpp 63473
+IE11Zw== 63474
+IGRlY2VwdGlvbg== 63475
+IEVtYWNz 63476
+CiAgICAKICAgIAo= 63477
+IMSRxrDhu6Nj 63478
+IFdvbHZlcw== 63479
+YW1lbnRp 63480
+ICcpWw== 63481
+Zm9ybWF0cw== 63482
+UmVjdg== 63483
+RGV0YWlsZWQ= 63484
+KEhXTkQ= 63485
+X3RyaWFs 63486
+YWdyYW50 63487
+T20= 63488
+Y29uc2Npb3Vz 63489
+IG9zcA== 63490
+cXXDqQ== 63491
+IGdvbg== 63492
+IG1lcmVrYQ== 63493
+YXJlbmRyYQ== 63494
+TWluZQ== 63495
+LmxpbmtlZGlu 63496
+IGZpZm8= 63497
+Lm1vbml0b3I= 63498
+IHJ1bmU= 63499
+bW5vcA== 63500
+IHNwZWN1bGF0ZQ== 63501
+ZWds 63502
+IHZhc2N1bGFy 63503
+LnRlY2g= 63504
+IG1hZ21h 63505
+IGxlc3Q= 63506
+dW1hbm4= 63507
+IERyaXZlck1hbmFnZXI= 63508
+IG9ydA== 63509
+IGxpbmdlcmluZw== 63510
+IG9zdHJlYW0= 63511
+IHNwYXJrbGluZw== 63512
+LmNvbm5lY3Rvcg== 63513
+IHRhaWxz 63514
+IGtlcm5lbHM= 63515
+VVNFUk5BTUU= 63516
+CWNj 63517
+IG9uU2VsZWN0 63518
+L01QTA== 63519
+dGFwZQ== 63520
+LmRqYW5nb3Byb2plY3Q= 63521
+R2VuZQ== 63522
+4oCZaW4= 63523
+L2ZpbHRlcg== 63524
+LWVudmVsb3Bl 63525
+IGFwcGxhdXNl 63526
+IHJlZ2lzdHJvcw== 63527
+IENvcnk= 63528
+b2ZmbGluZQ== 63529
+LXNob3Q= 63530
+bGVzYw== 63531
+b3RlbnQ= 63532
+IG51bWVyYXRvcg== 63533
+LmVmZmVjdA== 63534
+cGxhY2VtZW50cw== 63535
+IEFGQw== 63536
+LlNlcXVlbmNl 63537
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0K 63538
+eW50aGlh 63539
+IEdyaWZmaXRo 63540
+ZWxtYW4= 63541
+c2V0RGVzY3JpcHRpb24= 63542
+IE5pZ2h0cw== 63543
+Lm9yZGVycw== 63544
+IGAsCg== 63545
+IFNhbGFk 63546
+amlhbmc= 63547
+IHJlY3Vy 63548
+IFNUQVRJQw== 63549
+LXNwb25zb3JlZA== 63550
+eWxlbmU= 63551
+LGVtYWls 63552
+X18pKQ== 63553
+KSIpLg== 63554
+Q0VMTA== 63555
+YW1tZW50 63556
+TEFZ 63557
+LHN0ZA== 63558
+LnByZWY= 63559
+LkNvcg== 63560
+cmVkbw== 63561
+IEZ1Y2tlZA== 63562
+IHJ1c3M= 63563
+IGVzdGFibGlzaGVz 63564
+bnZhcmNoYXI= 63565
+LkdldEZpbGVOYW1l 63566
+IHBlbWI= 63567
+IFNhdWQ= 63568
+X3BhY2tldHM= 63569
+Lmludm9pY2U= 63570
+LmdldFRvdGFs 63571
+SG9tZUNvbnRyb2xsZXI= 63572
+IHTDtg== 63573
+YWdoZXI= 63574
+LmVudA== 63575
+LkFic29sdXRlQ29uc3RyYWludHM= 63576
+IGdlbnVz 63577
+IEJhYnlsb24= 63578
+IC4uLy4uLw== 63579
+IE1pZG5pZ2h0 63580
+IHdn 63581
+IGRhbmNlcg== 63582
+LWltbQ== 63583
+ZGlyZQ== 63584
+aGF6aQ== 63585
+Y2VydGlmaWNhdGU= 63586
+IG1EYXRh 63587
+IGN1cmVk 63588
+c3Zu 63589
+IkI= 63590
+aWJyZQ== 63591
+IGRyYWZ0cw== 63592
+Q2FwaXRhbA== 63593
+IGNvbmNpc2U= 63594
+IFBlYWNo 63595
+IHxc 63596
+IHBwbQ== 63597
+X2NvbnRhaW5z 63598
+QXV0b3I= 63599
+QXV0b1NpemU= 63600
+X2xi 63601
+IHNvbGVtbg== 63602
+IGZpbmdlcnQ= 63603
+IEluZGljYXRvcg== 63604
+IFN2 63605
+UGFyaw== 63606
+JHR5cGU= 63607
+X01JU1M= 63608
+YW5udWFs 63609
+UGFpZA== 63610
+bWFzdGVycw== 63611
+IFdE 63612
+IHZ1ZWw= 63613
+IGVqYWM= 63614
+CWdsdXQ= 63615
+IHVuZmluaXNoZWQ= 63616
+ZXN0ZWVt 63617
+Z3JvdXBCb3g= 63618
+UmVtb3Zpbmc= 63619
+IGVpbmlnZQ== 63620
+IFNjcmlwdHM= 63621
+Z2V0dG8= 63622
+LkhhbmRsZUZ1bmM= 63623
+Il0pLA== 63624
+IGRpc2FkdmFudGFnZXM= 63625
+LWZyb250 63626
+PnA= 63627
+c2V0T25DbGlja0xpc3RlbmVy 63628
+IGxhbmRsb3Jkcw== 63629
+IE3DvA== 63630
+IHByZXByb2Nlc3Npbmc= 63631
+KX0+ 63632
+LWNvbnRleHQ= 63633
+LGJvb2w= 63634
+UVVJVA== 63635
+ICIpIik7Cg== 63636
+IFdlYnNpdGVz 63637
+IENoYXJsb3R0ZXN2aWxsZQ== 63638
+TGF0Y2g= 63639
+LmRpcmVjdGl2ZQ== 63640
+IEh1ZmZpbmd0b24= 63641
+X2RpcnR5 63642
+ZXhwaXJhdGlvbg== 63643
+IFRQTQ== 63644
+IGVkeA== 63645
+IFdlYkRyaXZlcldhaXQ= 63646
+IGFkbWlyZWQ= 63647
+IGxpc3RlbnM= 63648
+IFZpbA== 63649
+ZGlmZmVyZW50 63650
+IGxpdmVsaWhvb2Q= 63651
+IFdhcmNyYWZ0 63652
+IHBvc2ljaW9u 63653
+IGltcGVhY2htZW50 63654
+SmF5 63655
+IHBvc2l0aXZlcw== 63656
+IGp1bmdl 63657
+IFNNQg== 63658
+L2luY2x1ZGVz 63659
+KCcuLi8uLi8uLi8= 63660
+QXJndW1lbnROdWxsRXhjZXB0aW9u 63661
+ZGVzY3JpY2Fv 63662
+QUJDREU= 63663
+LUFB 63664
+IGludmFkZWQ= 63665
+IGFtZXJpY2E= 63666
+dWVkZQ== 63667
+IFBoYXNlcg== 63668
+IHNjb3Jlcg== 63669
+IGRpc2NvdXJhZ2Vk 63670
+dGhpbg== 63671
+IGFiZG9tZW4= 63672
+IElQUA== 63673
+IEhhbXB0b24= 63674
+L0RlbGV0ZQ== 63675
+W3NyYw== 63676
+Q1N0cmluZw== 63677
+IE51bg== 63678
+IGVwaXRo 63679
+4oC7 63680
+LnRhYmxlcw== 63681
+IEhlaW4= 63682
+IHdoaXJs 63683
+IGNsYXJpZmljYXRpb24= 63684
+IHdlZGdl 63685
+IGjDpHI= 63686
+IFRpbmE= 63687
+IHRod2FydA== 63688
+IENvc3R1bWU= 63689
+aW9uYWdl 63690
+Q29k 63691
+X2FjbA== 63692
+IHJlc2g= 63693
+IE1lcmN5 63694
+IERpeG9u 63695
+IGRlc2Fycm9sbA== 63696
+VmlyZ2lu 63697
+KiopJg== 63698
+IExlbm92bw== 63699
+IGVyYXNlZA== 63700
+ZW50aW9ucw== 63701
+IHNsaXBwaW5n 63702
+5Zub 63703
+IGNyYXZpbmc= 63704
+cGxhbnRz 63705
+IGdldHRleHQ= 63706
+IG1hc3NpdmVseQ== 63707
+IFJlbmFtZQ== 63708
+Lmhlcm8= 63709
+44K7 63710
+IHRvbWFy 63711
+IENPU1Q= 63712
+IFByYWN0aWNlcw== 63713
+Lk1lZGlhVHlwZQ== 63714
+IEZ1bmRpbmc= 63715
+RmluZQ== 63716
+aWdlcmlh 63717
+VW5j 63718
+IHN3YXBwaW5n 63719
+PicuCg== 63720
+aW50ZXJw 63721
+YXJ0aWZhY3Q= 63722
+IEJhZ3M= 63723
+LnZpZXdNb2RlbA== 63724
+cXVvdGVk 63725
+CUxvbmc= 63726
+X1NDT1JF 63727
+IHNhdnZ5 63728
+bmVsbGU= 63729
+a2zDpA== 63730
+Q291bnRz 63731
+2q8= 63732
+RmllbGRUeXBl 63733
+b2thYmxl 63734
+IFJUTA== 63735
+I2luZGV4 63736
+ICV7 63737
+IGFyaXN0 63738
+LkdldE1hcHBpbmc= 63739
+KEFkYXB0ZXJWaWV3 63740
+PSIiKQo= 63741
+IGRpc2lu 63742
+IFRvdWNoYWJsZU9wYWNpdHk= 63743
+IE1PWg== 63744
+IER1bm4= 63745
+Q2FwYWJpbGl0eQ== 63746
+YWtoc3Rhbg== 63747
+VUlWaWV3Q29udHJvbGxlcg== 63748
+KHNvY2tmZA== 63749
+IEphY3F1ZXM= 63750
+PXRr 63751
+YXJQYXJhbXM= 63752
+Y29uZGE= 63753
+IGFkdm9jYXRlZA== 63754
+IHBlbmV0cmF0ZQ== 63755
+SkVDVElPTg== 63756
+IOuwmA== 63757
+IEZJTkQ= 63758
+IGVhcm5z 63759
+YXBwZW4= 63760
+6rE= 63761
+IHRocm91Z2hwdXQ= 63762
+IHBlbnNpb25z 63763
+IGZ1c3M= 63764
+SFRUUFJlcXVlc3Q= 63765
+bnV0cw== 63766
+b2NodA== 63767
+LWVzdGFibGlzaGVk 63768
+IEFMSUdO 63769
+IGpzcGI= 63770
+RGlzcA== 63771
+X2VtYmVkZGluZ3M= 63772
+IHJlcHQ= 63773
+IFlvcmtlcg== 63774
+w7JuZw== 63775
+IGpvdXJuZXlz 63776
+IEFwcHJvdmFs 63777
+CVNFTEVDVA== 63778
+KEdyYXBo 63779
+0LzQuA== 63780
+IGRvbGxz 63781
+IHNleGlzdA== 63782
+IHBhbnM= 63783
+IG1wbA== 63784
+IG9wZXJhdGl2ZQ== 63785
+IFRvcnJlbnQ= 63786
+WU0= 63787
+IFBhc3Npb24= 63788
+5pat 63789
+LmNvbXBpbGVy 63790
+CUNTdHJpbmc= 63791
+PWNvbG9y 63792
+b3JpYW5DYWxlbmRhcg== 63793
+IEtub2Nr 63794
+IGhhaWxlZA== 63795
+L3N0YXRl 63796
+IHNldHVwdG9vbHM= 63797
+IE1hcmU= 63798
+IHN5bmNocm9uaXpl 63799
+IFN3aXBl 63800
+IGdhbWJsZQ== 63801
+LCcnXV1dLAo= 63802
+IGRlZmVjdGl2ZQ== 63803
+X09CSkM= 63804
+IGRlbmlt 63805
+IHRhZA== 63806
+IEtpbWJlcg== 63807
+IG5ldXJvbG9naWNhbA== 63808
+w6puY2lhcw== 63809
+CWNi 63810
+LnNldFBhc3N3b3Jk 63811
+IFBsZWFzYW50 63812
+IFBoaQ== 63813
+LXRhZ3M= 63814
+IGNvbnRhZw== 63815
+IENvcmFs 63816
+IGRpc3RyYWN0 63817
+aXRpemVy 63818
+IHN1bnJpc2U= 63819
+c2V0SWQ= 63820
+IENoZW5uYWk= 63821
+IE9ncmU= 63822
+X0hJU1RPUlk= 63823
+UFJFU1NJT04= 63824
+X1NVRkZJWA== 63825
+ZHVwbGljYXRl 63826
+LmF1dGhTZXJ2aWNl 63827
+IHNwYWNlZA== 63828
+IEJlbmdhbHM= 63829
+U29sdmVy 63830
+IGJ1cmVhdWNyYWN5 63831
+X2hpdHM= 63832
+INGC0LjQvw== 63833
+IGPDqQ== 63834
+IGRpc2dyYWNl 63835
+6KeS 63836
+aXNPcGVu 63837
+Q2hlbQ== 63838
+X2xpY2Vuc2U= 63839
+X2hvc3RuYW1l 63840
+X0JSRUFL 63841
+IGZpZXJ5 63842
+OkQ= 63843
+L2xpbnV4 63844
+VGl0dWxv 63845
+UmFkaWFucw== 63846
+aXpvbnM= 63847
+UmFt 63848
+b2RpYW4= 63849
+aWFuZ2xl 63850
+IG5pbmph 63851
+RXZlcnlib2R5 63852
+KCI+ 63853
+IHRha8W8ZQ== 63854
+IGdyb3VuZGJyZWFraW5n 63855
+IGRpcmln 63856
+SFRNTEVsZW1lbnQ= 63857
+IFVuY29tbWVudA== 63858
+Y2hlaW4= 63859
+IOeUn+WRveWRqOacn+WHveaVsA== 63860
+JSIK 63861
+IHRpcG9z 63862
+Q2hhckNvZGU= 63863
+IFByb2R1Y3Rv 63864
+ZmFpdA== 63865
+J2w= 63866
+LXRodW1ibmFpbA== 63867
+dXN1 63868
+X2Zvcm11bGE= 63869
+LlRPUA== 63870
+LmJ1eQ== 63871
+IG1pZXV4 63872
+Q2VudHVyeQ== 63873
+cGVp 63874
+IHRic3A= 63875
+LVBhY2lmaWM= 63876
+b2dp 63877
+IGZhdHRv 63878
+IGZhbnRhc3Q= 63879
+IFNBTEU= 63880
+LmFkcw== 63881
+IHBpbGxhcnM= 63882
+X3RyaXA= 63883
+IHR1YQ== 63884
+IGFwZWxsaWRv 63885
+LnNldENlbGxWYWx1ZQ== 63886
+ICgoXw== 63887
+IE5pbmE= 63888
+PGM= 63889
+aW5pdW0= 63890
+ZGZ1bmRpbmc= 63891
+LXdvcmtpbmc= 63892
+IEVzdGFkb3M= 63893
+IE1hbGk= 63894
+PGY= 63895
+dXJhbmNlcw== 63896
+cGFnaW5h 63897
+X1BL 63898
+IHVuYXJtZWQ= 63899
+b2dnbGVk 63900
+Q2FuZGlkYXRl 63901
+UmF0aGVy 63902
+IGZyYW5jaGlzZXM= 63903
+IGNvdmVuYW50 63904
+wqo= 63905
+aXBwaW5lcw== 63906
+R3Vu 63907
+LWZlaXJh 63908
+IGxpbmVhZ2U= 63909
+X0dSQU5URUQ= 63910
+Z2VucmVz 63911
+LkVsYXBzZWQ= 63912
+IGxhcmdv 63913
+0Js= 63914
+LXJlYWR5 63915
+X3Byb2Nlc3NlZA== 63916
+bGFuZ3M= 63917
+w7ptZXJvcw== 63918
+ZnE= 63919
+L25wbQ== 63920
+X3Nydg== 63921
+IGF0dGVuZGFudA== 63922
+aXZpZA== 63923
+ZXZpY2U= 63924
+QUJJ 63925
+KGJpbmFyeQ== 63926
+X1ZBTElEQVRF 63927
+IGFkZEl0ZW0= 63928
+X2NvZWY= 63929
+YWxlYg== 63930
+b2dyYXBoaWNhbGx5 63931
+Qm9yZGVyQ29sb3I= 63932
+IGFzc2F5 63933
+IGNhdGNoRXJyb3I= 63934
+IENocnlzbGVy 63935
+b2do 63936
+IGtleVZhbHVl 63937
+ZGVjaXNpb24= 63938
+LW9mZnM= 63939
+IGxpZWd0 63940
+KERhdGFUeXBl 63941
+IGlyaXM= 63942
+IGV1cA== 63943
+cmlnZXI= 63944
+b25pY2E= 63945
+IHJvcGVz 63946
+IG5hcnJvd2x5 63947
+IFF1YWRy 63948
+IGVwdWI= 63949
+ZXN0aW5hbA== 63950
+LXR1cm4= 63951
+IGxhbmdz 63952
+55uR5ZCs6aG16Z2i 63953
+IHF1ZWxsbw== 63954
+LGFyZ3M= 63955
+aWdhdGU= 63956
+IFNlZW1z 63957
+IGZvcnRl 63958
+Q0xJ 63959
+X0xPQURJTkc= 63960
+LlJ1bGU= 63961
+IHlvdXRocw== 63962
+KHh4 63963
+IEFzc3VtaW5n 63964
+YWdoZXR0aQ== 63965
+KQoKCgoK 63966
+IG9uT3B0aW9uc0l0ZW1TZWxlY3RlZA== 63967
+T2NjdXA= 63968
+IGRldHJpbWVudGFs 63969
+IGlubmF0ZQ== 63970
+IEJhcnJlbA== 63971
+dWVuY2lh 63972
+IG9uQmx1cg== 63973
+IGxpYnM= 63974
+W2xhc3Q= 63975
+IGNwZg== 63976
+LlRpbWVvdXQ= 63977
+ZXN0YXRpb24= 63978
+IHdpZWw= 63979
+IHV0aWxpemFy 63980
+IGRpc2d1aXNl 63981
+IER1bQ== 63982
+T0NJ 63983
+T05HTw== 63984
+ICg/LA== 63985
+IFBhdGlv 63986
+VmVydGV4QXJyYXk= 63987
+LmF1dGhvcml6YXRpb24= 63988
+cm96 63989
+IEhvcw== 63990
+LlNwYWNl 63991
+IFZpcnVz 63992
+KGtleXdvcmQ= 63993
+VE9DT0w= 63994
+X0NPTlRST0xMRVI= 63995
+IEJsb2NrZWQ= 63996
+IENob3A= 63997
+d2nEmQ== 63998
+XFJvdXRpbmc= 63999
+L3BhY2thZ2U= 64000
+IHBlcnN1YWRlZA== 64001
+YmVpdHM= 64002
+TENE 64003
+IG11Yw== 64004
+X0ZPUldBUkQ= 64005
+IG91dGxhdw== 64006
+IHphdw== 64007
+X3ZlaGljbGU= 64008
+IEplbnNlbg== 64009
+LkdyZWVu 64010
+IC8vLy8v 64011
+SVJDTEU= 64012
+LWJ1c2luZXNz 64013
+LkhpZGRlbg== 64014
+IGtvbm50ZQ== 64015
+cHE= 64016
+IHBhcmVjZQ== 64017
+IGxhbmRzY2FwaW5n 64018
+IERlY29yYXRpb24= 64019
+IEdSQQ== 64020
+X3Byb2ZpbGVz 64021
+IEZsZW0= 64022
+Q0xJQ0s= 64023
+IEZBSUxVUkU= 64024
+IGlvbnM= 64025
+X1RpbWVy 64026
+LkRvZXM= 64027
+IGJvdW5jaW5n 64028
+dXBweQ== 64029
+dWxpcw== 64030
+L2Fn 64031
+IEdhcm4= 64032
+IGh1ZA== 64033
+IHJlc3BvbmRlcg== 64034
+IHN0cmNocg== 64035
+IGNob2tl 64036
+IHN0YXNo 64037
+X2NoZWNrc3Vt 64038
+IHN0YW1wZWQ= 64039
+QEdldE1hcHBpbmc= 64040
+LkJ5dGVBcnJheQ== 64041
+IER5cw== 64042
+YXRlcm5pdHk= 64043
+KHJi 64044
+IGVkaXRUZXh0 64045
+IGVyZWN0aW9u 64046
+IGNlc3M= 64047
+X2V2ZXJ5 64048
+X2dhdGV3YXk= 64049
+ICciLg== 64050
+IHN0YWZmaW5n 64051
+IGludm9pY2Vz 64052
+aW5pY2lv 64053
+fV0sCg== 64054
+LHZhcg== 64055
+eWNpbg== 64056
+IERpb24= 64057
+ICUlCg== 64058
+Jywo 64059
+LXNwYW4= 64060
+IHRow6BuaA== 64061
+IGJvcm5l 64062
+IEthdGhsZWVu 64063
+6L+e5o6l 64064
+X2N1YmU= 64065
+IGluZm9ybWHDp8O1ZXM= 64066
+bmdlcg== 64067
+L0ZpbGU= 64068
+IGRhcmE= 64069
+IG1M 64070
+KioqKioqCg== 64071
+IG1hcmtpbmdz 64072
+YmJl 64073
+IHJlY3VycmVudA== 64074
+IFJhbmtpbmc= 64075
+X2ludGVncmFs 64076
+XT4K 64077
+IHVuYW5pbW91c2x5 64078
+IGRpcGxvbWF0cw== 64079
+IElPUw== 64080
+OyI+PD8= 64081
+IE1hdHRl 64082
+IFJhbGVpZ2g= 64083
+IEltcHJvdmU= 64084
+ZXhpc3RlbnQ= 64085
+IGZha2Vy 64086
+IEhpZ2hsYW5k 64087
+c3RlbQ== 64088
+LW1z 64089
+TGlzdE9m 64090
+Lkxpc3RlbmVy 64091
+KHdhaXQ= 64092
+X1JTVA== 64093
+VW5h 64094
+IG9jY3VwYXRpb25hbA== 64095
+LW1lbW9yeQ== 64096
+IFN1cmY= 64097
+IGJydXRl 64098
+X0VsZW1lbnQ= 64099
+ZGRkZA== 64100
+IERlY3Jl 64101
+LnBzaQ== 64102
+LWRldmVs 64103
+IE9uVHJpZ2dlckVudGVy 64104
+VG9EZWxldGU= 64105
+IGhlcmFsZA== 64106
+IHNvY2lhbGVz 64107
+IGJvb3N0ZWQ= 64108
+Lkl0b2E= 64109
+KiI= 64110
+IGFudGlkZXByZXNz 64111
+IE1hdmVy 64112
+X18pKQo= 64113
+KER1cmF0aW9u 64114
+ZXN0YXRl 64115
+YnJhdGU= 64116
+Q2xh 64117
+IOS4ig== 64118
+65CY 64119
+cmnDqHJl 64120
+YnJlYWtlcg== 64121
+X2xlZw== 64122
+fWVsc2VpZg== 64123
+X2Z1bmNz 64124
+dcOt 64125
+LnBhZ2VZ 64126
+Y3JlYXR1cmU= 64127
+IGNhbm5hYmlu 64128
+IEFzdHJv 64129
+bG9jYWxz 64130
+IExBUw== 64131
+X2NvbnZlcnNpb24= 64132
+IENSVUQ= 64133
+LnNraWxs 64134
+IHN0cmF0ZWdpc3Q= 64135
+LnBvbA== 64136
+KHNlZ21lbnQ= 64137
+IHBlZQ== 64138
+fSIpOwoK 64139
+LnByZXZpZXc= 64140
+SmFt 64141
+IGhlZnR5 64142
+aXZhdGluZw== 64143
+R3JpZENvbHVtbg== 64144
+IGN1ZGQ= 64145
+IGluamVjdGlvbnM= 64146
+IE5JTA== 64147
+LW9sZHM= 64148
+ZmxhdGlvbg== 64149
+IExlYWZz 64150
+IHNwaGVyaWNhbA== 64151
+IGZhbGxvdXQ= 64152
+YW1pbmVy 64153
+IDo6PQ== 64154
+LnBvaW50ZXI= 64155
+LU1hcnQ= 64156
+IG1hdHRl 64157
+IGNvcXVpbmU= 64158
+IGRpc2NvbnRpbnVlZA== 64159
+IFJFR0lPTg== 64160
+LlJpZ2h0VG9MZWZ0 64161
+IHNxdWVlemVk 64162
+X1BPSU5UUw== 64163
+YmVzdG9z 64164
+LWxhc3Rpbmc= 64165
+KHV0aWxz 64166
+PEJhc2U= 64167
+IHBhcmRvbg== 64168
+U3RyaWRl 64169
+Y2Ry 64170
+IG5hcnJhdG9y 64171
+dm9sdXRpb24= 64172
+IHVzZXJJbnB1dA== 64173
+X2NvbnRhY3Rz 64174
+KGVuZW15 64175
+IENoYW1iZXJz 64176
+emllbA== 64177
+IGJsb2NrU2l6ZQ== 64178
+QW5pbWF0aW9uc01vZHVsZQ== 64179
+IGltbWVyc2l2ZQ== 64180
+IG91dGluZw== 64181
+dWVzdG9z 64182
+VHdlZW4= 64183
+IGtlcA== 64184
+IHLDqXN1bHQ= 64185
+IEJvbGx5d29vZA== 64186
+RExM 64187
+IFN1cmVseQ== 64188
+LlJvd1N0eWxl 64189
+KHRt 64190
+X2dlbmVyYXRpb24= 64191
+IFN0aXI= 64192
+IGRhdGFTbmFwc2hvdA== 64193
+Y2h1cmNo 64194
+IGNvbmZpZGVudGlhbGl0eQ== 64195
+X3N1c3BlbmQ= 64196
+dmlw 64197
+IEthdGh5 64198
+44Km 64199
+IHZpb2xlbnRseQ== 64200
+cGV0cw== 64201
+IG1lc3NlZA== 64202
+IHRleHRib29rcw== 64203
+ICAgICAgICAJCQk= 64204
+5raI5oGv 64205
+IExhcmF2ZWw= 64206
+IEFyY2FkZQ== 64207
+IGVudGg= 64208
+IGJlbmlnbg== 64209
+X0RST1A= 64210
+LWVuYWJsZQ== 64211
+4oCdKS4= 64212
+dXZ3eHl6 64213
+X2xpc3Rpbmc= 64214
+IE5JQw== 64215
+44GV44GE 64216
+KCIuIiw= 64217
+LXJvdW5kZWQ= 64218
+LXBhY2Vk 64219
+cGF0cmljaw== 64220
+U2VsZQ== 64221
+LmdldEZpcnN0 64222
+LkVYSVQ= 64223
+ZXRlcm1pbmF0ZQ== 64224
+R3JhbQ== 64225
+Ly8qKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq 64226
+LmV4dGVybmFs 64227
+IHdyb25nZG9pbmc= 64228
+IEVsbQ== 64229
+IHNhbms= 64230
+VGVlbg== 64231
+IFRob21zb24= 64232
+cHJpb3I= 64233
+amV0YQ== 64234
+IEFEUw== 64235
+IFBlcnNpc3RlbmNl 64236
+IEZvbGs= 64237
+e1wi 64238
+Ym9uZA== 64239
+X1NQRUNJQUw= 64240
+X0xBVA== 64241
+b25la3Np 64242
+IG1vdGhlcmJvYXJk 64243
+IHNoZWFy 64244
+RnVsbFNjcmVlbg== 64245
+Kks= 64246
+KEJsdWVwcmludA== 64247
+TWV0aG9kSW5mbw== 64248
+QmVjb21l 64249
+IGhhaWw= 64250
+IERvYg== 64251
+IGdlbmVyb3NpdHk= 64252
+ID8iOwo= 64253
+IHdoaXNrZXk= 64254
+IHRoaW5uZXI= 64255
+IENw 64256
+IGludGVyc2VjdGlvbnM= 64257
+Q3JpdA== 64258
+cmFpc2Fs 64259
+cmVmZmVu 64260
+V2hlbmV2ZXI= 64261
+IGNvbW1lbmNlZA== 64262
+VHJhbnNmb3JtYXRpb24= 64263
+L3dyaXRl 64264
+PSIiIg== 64265
+KGxk 64266
+IG5vcnNr 64267
+QU1FTlQ= 64268
+LnNoYXJlZEluc3RhbmNl 64269
+X2hvdXNl 64270
+IGdsRW5hYmxl 64271
+6L2v 64272
+IG5hbw== 64273
+IGRlcG9zaXRpb24= 64274
+IGRpbm9zYXVycw== 64275
+IHRpbWVTdGFtcA== 64276
+X18pOwoK 64277
+LlJpYmJvbg== 64278
+IExpbmRzZXk= 64279
+OnVzZXI= 64280
+IMOA 64281
+X2Zvcm1z 64282
+bWluYXRpbmc= 64283
+IE9saXY= 64284
+IGTDqWJ1dA== 64285
+YmFyY29kZQ== 64286
+c2ltaWxhcg== 64287
+IHBsYXRlYXU= 64288
+IGluZGVt 64289
+UmVhbG0= 64290
+IGZlcnRpbGl6ZXI= 64291
+IGNhcGU= 64292
+IGNoYW1wYWduZQ== 64293
+IHNlbGZpZQ== 64294
+IHBsYWlubHk= 64295
+IGNhdGFzdHJvcGhl 64296
+IGJldHJheWVk 64297
+dmVyc2libGU= 64298
+VXBkYXRlVGltZQ== 64299
+Lk91dHB1dFN0cmVhbQ== 64300
+Ymlhc2Vk 64301
+Ym91bmNl 64302
+IFNwb3J0aW5n 64303
+Q29vcmRpbmF0b3I= 64304
+ZGV2ZWxvcGVycw== 64305
+IHRyYWNlcg== 64306
+IG11c3RhcmQ= 64307
+U1E= 64308
+X3Rlcm1pbmFs 64309
+IGNvb2xlZA== 64310
+IGF2b2lkYW5jZQ== 64311
+TG9naWNhbA== 64312
+IHllbGw= 64313
+X3JvdXRlcw== 64314
+IGFydGVyeQ== 64315
+IEJlYXJpbmdz 64316
+Lm12cA== 64317
+LkdVSQ== 64318
+VUlTY3JlZW4= 64319
+eW1t 64320
+aXTDpA== 64321
+KClbIg== 64322
+IEF6ZXJiYWk= 64323
+IGNvbmRpdGlvbmVy 64324
+IHdhZw== 64325
+IHNjYWxw 64326
+dmluY2lhbA== 64327
+b3dsZXI= 64328
+LicpOwoK 64329
+QkxVRQ== 64330
+IMKnwqc= 64331
+Qm9zdG9u 64332
+IExpbmtlZEhhc2hNYXA= 64333
+RG9jdW1lbnRhdGlvbg== 64334
+LkxlcnA= 64335
+IGRlbm5l 64336
+IGhlc2l0YXRpb24= 64337
+IENlbGVicml0eQ== 64338
+IEh5ZGU= 64339
+IGNvbW1hbmRpbmc= 64340
+YWNlbGx1bGFy 64341
+IHBhdmVtZW50 64342
+IEhhbW1vbmQ= 64343
+YXNzaWM= 64344
+UExVR0lO 64345
+IHJldm9rZWQ= 64346
+RG9jdW1lbnRv 64347
+LnBob3Rvcw== 64348
+IFdpbGxvdw== 64349
+IFZpa2luZw== 64350
+IHVwZnJvbnQ= 64351
+IExpZmV0aW1l 64352
+ICVb 64353
+RHJlYW0= 64354
+5aS0 64355
+IGFjY2VsZXJhdG9y 64356
+UGVyc29uYQ== 64357
+X3RvcGljcw== 64358
+77yJ44CB 64359
+IChfLg== 64360
+IHPDqWN1cg== 64361
+IEt3 64362
+X2Nhc2g= 64363
+IHNvb3RoaW5n 64364
+IExvdmVseQ== 64365
+IEhlcnM= 64366
+ZWxvbg== 64367
+TElDRU5TRQ== 64368
+X2NhY2hlZA== 64369
+LnNoYQ== 64370
+UkZD 64371
+LkZpbGVJbnB1dFN0cmVhbQ== 64372
+LUFs 64373
+IHVzZXJMaXN0 64374
+IG7DpHI= 64375
+SGlsbGFyeQ== 64376
+IHBhZ28= 64377
+LlBsdWdpbg== 64378
+IENvdmU= 64379
+X3lhbWw= 64380
+X3JzcA== 64381
+J3Bvc3Q= 64382
+LWR1cmF0aW9u 64383
+IHNlbnRpZG8= 64384
+IG1pbkhlaWdodA== 64385
+IHR1cnJldA== 64386
+LWVuZXJneQ== 64387
+IOeJ 64388
+0YDRg9Cz 64389
+b3RlY2E= 64390
+X3F1YWw= 64391
+U2VsZWN0aXZl 64392
+IEJFTE9X 64393
+CWFkbWlu 64394
+IH19LAo= 64395
+J3VzZXI= 64396
+U1ZH 64397
+IGN1bG8= 64398
+KFdvcmxk 64399
+LWJpbmRpbmc= 64400
+bmJy 64401
+IFNlbmRz 64402
+IHN1cHJlbWFjeQ== 64403
+IHNrYXRpbmc= 64404
+IGNyZWVr 64405
+IGFjY3VzYXRpb24= 64406
+YXBnb2xseQ== 64407
+LklERU5USVRZ 64408
+IG1hbmRhdGVk 64409
+IGdvd24= 64410
+IHdpZHRocw== 64411
+IExTVQ== 64412
+L3ZlcnNpb24= 64413
+IFJlYWRlcnM= 64414
+IFJvbmFsZG8= 64415
+IGJhZmY= 64416
+IGA7Cg== 64417
+R0xJU0g= 64418
+KGRvdA== 64419
+IE9wZXJhdG9ycw== 64420
+LlNjZW5lTWFuYWdlbWVudA== 64421
+bWVyYw== 64422
+X3JlcG9ydHM= 64423
+LWNlbnRyaWM= 64424
+IENlaWxpbmc= 64425
+PXsh 64426
+bW9ueQ== 64427
+IEFERFJFU1M= 64428
+5a+56LGh 64429
+TWF0Y2hpbmc= 64430
+IHVuaw== 64431
+IGtleUNvZGU= 64432
+ICcvJyk= 64433
+KWRhdGE= 64434
+IFZvbHVudGVlcg== 64435
+IGxheg== 64436
+IEd1YW5n 64437
+IENhbmRpZGF0ZXM= 64438
+RW5zdXJl 64439
+aWFnZQ== 64440
+c3VjYw== 64441
+Q2VydGFpbg== 64442
+IGxlZnRvdmVy 64443
+aW5pbg== 64444
+LWVsZW1lbnRz 64445
+cGlrZQ== 64446
+IHNsaWRlc2hvdw== 64447
+LnRvb2xTdHJpcFNlcGFyYXRvcg== 64448
+LnBoYXNl 64449
+IGVudGVydGFpbmVk 64450
+IENhcnJpZQ== 64451
+IE1vaGFtbWFk 64452
+LmxvZ2dlZA== 64453
+IHNjcm9sbFRvcA== 64454
+IEFiYmV5 64455
+aW1vbnk= 64456
+KHJlc3VsdFNldA== 64457
+IGFkaGVzaXZl 64458
+X0RBTUFHRQ== 64459
+IGlvY3Rs 64460
+YnJvd24= 64461
+SU5TVA== 64462
+LkNsb25l 64463
+IGxvb21pbmc= 64464
+RGVzZXJpYWxpemU= 64465
+IGx1eg== 64466
+cXJzdHV2d3h5eg== 64467
+LmlkZW50 64468
+SGVhdnk= 64469
+IGRpbw== 64470
+5piv5ZCm 64471
+IEZ1cm4= 64472
+6YKu 64473
+emltbWVy 64474
+44O844OJ 64475
+c3BlYWtlcg== 64476
+IEdlZA== 64477
+IHVuaWRlbnRpZmllZA== 64478
+SW50ZXJmYWNlT3JpZW50YXRpb24= 64479
+IFN1cnZpdm9y 64480
+ZGVlbg== 64481
+IEJvcmc= 64482
+dG9Eb3VibGU= 64483
+X2J3 64484
+IHB1Ymxpc2hlcw== 64485
+X0FMRVJU 64486
+YW5ncw== 64487
+aWVyZXM= 64488
+IGhlaQ== 64489
+IElDb25maWd1cmF0aW9u 64490
+IGNvbnN0aXR1dGVk 64491
+V0FUQ0g= 64492
+cHJpdmF0aW9u 64493
+IEdyYW5pdGU= 64494
+LlRleHRBbGlnbm1lbnQ= 64495
+X2t3 64496
+OyIsCg== 64497
+Y290 64498
+IE5ld2Fyaw== 64499
+cm9hY2g= 64500
+KW9iag== 64501
+Q29tcGlsYXRpb24= 64502
+Q2F0ZWdvcnlJZA== 64503
+LnNldFVzZXI= 64504
+aXZ5 64505
+IEltYWdpbmc= 64506
+aWdodGVk 64507
+IHdnZXQ= 64508
+IG1vdXRocw== 64509
+Lmxpbg== 64510
+IFJhZGlvQnV0dG9u 64511
+LkNtZA== 64512
+c3Nl 64513
+IG1lc2hlcw== 64514
+IFNvbGU= 64515
+LnJlY29yZHM= 64516
+IGFudGlz 64517
+KG1vbg== 64518
+INGH0LjRgdC70L4= 64519
+gq0= 64520
+IOyeiOuKlA== 64521
+QWxsQXJnc0NvbnN0cnVjdG9y 64522
+IHN1cnJlYWw= 64523
+IE1hcnJpZWQ= 64524
+IHhwYXRo 64525
+XGY= 64526
+QnJpbmc= 64527
+IHlhaG9v 64528
+IEV0c3k= 64529
+X2RhaWx5 64530
+IHRocm93YWJsZQ== 64531
+IFBsYXNtYQ== 64532
+L1B1YmxpYw== 64533
+aW1pemVCb3g= 64534
+IHZlcw== 64535
+IHRyb20= 64536
+X3Jocw== 64537
+LWFscGhh 64538
+IEFyYm9y 64539
+KSkt 64540
+RmlzaA== 64541
+ZmVlZHM= 64542
+IGNhbGY= 64543
+IFNlcmdlYW50 64544
+KGVudW0= 64545
+IFJhbXNleQ== 64546
+IElkZW50aWZ5 64547
+LmluaXRTdGF0ZQ== 64548
+IGZsdWN0dWF0aW9ucw== 64549
+X0FUVFJJQlVURVM= 64550
+IHB3bQ== 64551
+RVNB 64552
+Y3Bm 64553
+U2ltdWxhdGlvbg== 64554
+IHlvdXRoZnVs 64555
+IEluZmFudHJ5 64556
+IGdsYW5jZWQ= 64557
+IFByb3Blcg== 64558
+5LmJ 64559
+IEtyYWZ0 64560
+Q2l0 64561
+b29wcw== 64562
+PXVybA== 64563
+cG9zdGluZw== 64564
+ZGVjbGFyaW5n 64565
+IHBOb2Rl 64566
+SmF2YXNjcmlwdA== 64567
+CQkJCQoJCQkJCg== 64568
+LmNvb3JkaW5hdGVz 64569
+cmlldA== 64570
+IFNx 64571
+X0NBVA== 64572
+IFBhcGE= 64573
+YW5kaQ== 64574
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8v 64575
+TWVldGluZw== 64576
+IOyekA== 64577
+SW1hZ2Vu 64578
+w6lyaWVuY2U= 64579
+QWdncmVnYXRl 64580
+LnBvbHk= 64581
+IHdhdmVk 64582
+IGludmVycw== 64583
+c2VhcmNoTW9kZWw= 64584
+IHRyb2xscw== 64585
+W2xldmVs 64586
+IExvd2U= 64587
+dWxsbw== 64588
+KHBsYWNl 64589
+IE5BU0NBUg== 64590
+IG9yYml0YWw= 64591
+LnN0b3J5 64592
+IGF1dGhvcml0YXRpdmU= 64593
+LnRleHRWaWV3 64594
+IGFscGg= 64595
+X3JlZHVjZQ== 64596
+IEZyYW1lcw== 64597
+IEJyb20= 64598
+cmVkaQ== 64599
+KE1ldGhvZEltcGxPcHRpb25z 64600
+bWFjZW4= 64601
+VG90 64602
+IG1pZGQ= 64603
+2Y8= 64604
+IEJhc2VNb2RlbA== 64605
+IFZlZ2E= 64606
+ID8+Igo= 64607
+IFJpZ2lkYm9keQ== 64608
+LnNldENvbnRlbnRUeXBl 64609
+YWFT 64610
+QmFzZWxpbmU= 64611
+IGJsYW5rZXRz 64612
+c2Fw 64613
+IGNhc3VhbGx5 64614
+VW5pdmVycw== 64615
+IFRyYXk= 64616
+IEFpcmVz 64617
+IG1heFk= 64618
+X1BST1BFUlRJRVM= 64619
+IGhlbG1ldHM= 64620
+wqY= 64621
+X2Rlc2Ny 64622
+c2hpbnQ= 64623
+X0NQUA== 64624
+dW1v 64625
+YWRheQ== 64626
+KHBsb3Q= 64627
+ZW56eW1l 64628
+IEV4Y2VwdGlvbnM= 64629
+X3Zpc3VhbA== 64630
+Ol0KCg== 64631
+KHRhcmdldEVudGl0eQ== 64632
+cGhlcmVz 64633
+dW5hbg== 64634
+IHNlbG9u 64635
+d2ls 64636
+IFJlbmRlcmluZw== 64637
+S0M= 64638
+IGNvbnN0aXR1ZW5jeQ== 64639
+U0NSSUJF 64640
+ZXN5 64641
+IEZlbGxvd3NoaXA= 64642
+5Y+4 64643
+IGZ1dHVybw== 64644
+IGFybW9yZWQ= 64645
+bGlzdGU= 64646
+b3Jhcw== 64647
+bXVsdGlwbHk= 64648
+Z2VtZQ== 64649
+Y29lZg== 64650
+0L7QsdGA0LDQtg== 64651
+IERlbGl2ZXI= 64652
+ZW5nbw== 64653
+LnVzZXJTZXJ2aWNl 64654
+T05VUw== 64655
+Lm9ucmVhZHlzdGF0ZWNoYW5nZQ== 64656
+ICIvIiw= 64657
+YW1iaW8= 64658
+X1Byb2plY3Q= 64659
+Jyk/Pg== 64660
+IGZsaXBwaW5n 64661
+d29tZW4= 64662
+LkNyb3Nz 64663
+IGhvbGxhbmQ= 64664
+IGNpbmVtYXRpYw== 64665
+IHdoaXN0bGVibA== 64666
+IGxpbmd1aXN0aWM= 64667
+LkdldHRlcg== 64668
+IG3DpG5uZXI= 64669
+IExlZ28= 64670
+IFNjaHVtZXI= 64671
+YXNzZXNzbWVudA== 64672
+X2Noaw== 64673
+IHJlY29tbWVuZGluZw== 64674
+LnNjYWxh 64675
+IEd1YXJhbnRlZQ== 64676
+IEBf 64677
+LkFVVEg= 64678
+IHlQb3M= 64679
+bGF0ZXg= 64680
+IEFsYmVydG8= 64681
+5q2l 64682
+dGhvcmE= 64683
+4Li34LmI 64684
+VVJMRXhjZXB0aW9u 64685
+R2hvc3Q= 64686
+LlRvb2xiYXI= 64687
+IGVuZGlhbg== 64688
+6Zeo 64689
+c3RyYWN0aW9ucw== 64690
+RmlsZU5vdEZvdW5kRXhjZXB0aW9u 64691
+IHN0aW11bGF0aW5n 64692
+YnNlcnZpY2U= 64693
+YXTDs3Jpbw== 64694
+aXRpb3Vz 64695
+IGF1dGhTZXJ2aWNl 64696
+X1RSQU5TRkVS 64697
+IHJlZGlyZWN0VG8= 64698
+IG1lbnNlbg== 64699
+IFNQTA== 64700
+IMK7LA== 64701
+IGFjZXQ= 64702
+X0JhY2s= 64703
+4KSV 64704
+YWFj 64705
+IFJpb3Q= 64706
+X0ZC 64707
+IFph 64708
+UGxhdGU= 64709
+IGxhYmVsVGV4dA== 64710
+INCy0YDQtdC8 64711
+aHRvbg== 64712
+IE1jQQ== 64713
+IEFwcGVuZGl4 64714
+IEtvaw== 64715
+IGludGVydmlld2luZw== 64716
+X3NwZWxs 64717
+IFN1YmplY3Rz 64718
+IGJ1cm5lcg== 64719
+5a+8 64720
+aWxsaWFu 64721
+IGJ1bXBz 64722
+UGFzc2Vk 64723
+IENvbnRyaWJ1dG9y 64724
+WW8= 64725
+Ymxh 64726
+IHNvdXQ= 64727
+LmV4Yw== 64728
+Tm90aWZpZXI= 64729
+c2hpdg== 64730
+LlVuaXRUZXN0aW5n 64731
+dWVsbGVz 64732
+X1NMRUVQ 64733
+CW9wdHM= 64734
+IHByZXNjcmlwdGlvbnM= 64735
+IHJldmlzZQ== 64736
+RURJVE9S 64737
+IGFubsOpZXM= 64738
+X3BrZw== 64739
+IFRyYWNrcw== 64740
+4LmI4Liy 64741
+PWZvcm1z 64742
+LlJVTg== 64743
+IGFzZWc= 64744
+IHDDoQ== 64745
+IGplcw== 64746
+R3Jl 64747
+YWNy 64748
+T2ZmaWNpYWxz 64749
+dWtlcw== 64750
+Y29tcGFuaWVz 64751
+XFF1ZXJ5 64752
+IFByaW50YWJsZQ== 64753
+5a6i 64754
+X1ZP 64755
+IGRlaXg= 64756
+IGRldmljZUlk 64757
+IGRpc3R1cmJhbmNl 64758
+bmlzdA== 64759
+Lmlzbw== 64760
+cGFyYWxsZQ== 64761
+LWRlc2NyaWJlZGJ5 64762
+IExpZg== 64763
+IGJyZWFzdGZlZWRpbmc= 64764
+IGZlbWluaXN0cw== 64765
+bGVncm91bmQ= 64766
+IGRhbWU= 64767
+IGNvbXB1bHNvcnk= 64768
+TUVSQ0hBTlRBQklMSVRZ 64769
+LXJlc3VsdHM= 64770
+Zm9ybWVkVVJMRXhjZXB0aW9u 64771
+OlsK 64772
+LWludGVyZXN0 64773
+IHPDpA== 64774
+IG5vc3RhbGdpYQ== 64775
+IGNsYXJpZmllZA== 64776
+IFBIT1RP 64777
+IHJldmlzaXQ= 64778
+IGNhcHN1bGVz 64779
+IHNoaW5lcw== 64780
+IGNyYWZ0c20= 64781
+c3ViamVjdHM= 64782
+ICAgICAgICAgICANCg== 64783
+5LiN6IO95Li656m6 64784
+IFNjaHdhcnR6 64785
+cmV1 64786
+IG1hZHJpZA== 64787
+LnBlbmRpbmc= 64788
+IExJTg== 64789
+IHVuc3Q= 64790
+CW12 64791
+IHZpdmFzdHJlZXQ= 64792
+IHNwb2ls 64793
+w7hq 64794
+64u5 64795
+IGJ1ZW5h 64796
+IGRpZ2l0YWxXcml0ZQ== 64797
+c3Vicw== 64798
+IFVOSVZFUlM= 64799
+IFN1aWNpZGU= 64800
+PEd1aWQ= 64801
+LmVsZW0= 64802
+X2NvbnN0cnVjdA== 64803
+IGFtaWRzdA== 64804
+IOuP 64805
+LWVzdGVlbQ== 64806
+IEludGVncml0eQ== 64807
+LmZtbA== 64808
+T3V0T2ZCb3VuZHNFeGNlcHRpb24= 64809
+LVNlbWl0aXNt 64810
+QmV0YQ== 64811
+LWdvaW5n 64812
+U2VnbWVudHM= 64813
+IE1hZQ== 64814
+IFBlcnNvbmFsaXR5 64815
+dXJiYXRpb24= 64816
+5Y+z 64817
+IHNlcnZpY2luZw== 64818
+IGJpcG9sYXI= 64819
+X1NUQUdF 64820
+LkpQRw== 64821
+Jyl9fSI+ 64822
+aXNobHk= 64823
+SVZFUlk= 64824
+IEluc3BpcmVk 64825
+LnNlcnY= 64826
+KGRhdGFz 64827
+IGRpdmlkZXM= 64828
+PFJlYWw= 64829
+dmVydHVyZQ== 64830
+IG1vdGl2YXRpb25z 64831
+dmVydGU= 64832
+RU5DSA== 64833
+ZmRz 64834
+IHJldm9sdA== 64835
+d2VidG9rZW4= 64836
+aW5zdGVhZA== 64837
+CW9wdA== 64838
+IE1hcmlqdWFuYQ== 64839
+X2FkYw== 64840
+YmFv 64841
+W1NlcmlhbGl6ZUZpZWxk 64842
+IGdyYWZmaXRp 64843
+LWFvcw== 64844
+ZW1pYWg= 64845
+IGbDrXM= 64846
+IGV0aGlj 64847
+J2FsbA== 64848
+OmtleQ== 64849
+65Ok 64850
+IHJlc3RyaWN0aW5n 64851
+IFhIVE1M 64852
+ZXJlbw== 64853
+dW5kb3M= 64854
+CWVuZGlm 64855
+WzosOiw= 64856
+IHN0ZWhlbg== 64857
+YWtoaXI= 64858
+IGp1aWNlcw== 64859
+ZGF0YVNvdXJjZQ== 64860
+X21r 64861
+LmRlbGV0ZWQ= 64862
+Q29uZ3Jlc3M= 64863
+aW1tZWw= 64864
+RWxlY3RyaWM= 64865
+YW9z 64866
+IE92ZXJsYXk= 64867
+IEFDTFU= 64868
+cm5k 64869
+ZXNzZXM= 64870
+IEx1eGVtYm91cmc= 64871
+cGFyc2VGbG9hdA== 64872
+IGd1dHM= 64873
+Y2xhc3NpZmllZA== 64874
+IGRlZlN0eWxl 64875
+IFRjcA== 64876
+cGVhdGluZw== 64877
+Q2hhcnRz 64878
+X3Vy 64879
+X2xhdGVzdA== 64880
+KSEK 64881
+Y2F0aW9u 64882
+LkdldGVudg== 64883
+KGxvb3A= 64884
+IHVubA== 64885
+X2R0eXBl 64886
+emXFhA== 64887
+KEpOSUVudg== 64888
+LmZldGNob25l 64889
+IHNpZ21vaWQ= 64890
+IE9MRA== 64891
+IE1pbmlzdA== 64892
+7YE= 64893
+IEvDtg== 64894
+IGZyYWN0aW9ucw== 64895
+IHNpeg== 64896
+PT09PT0K 64897
+LlByaW50V3JpdGVy 64898
+X0FkZHJlc3M= 64899
+IEF1ZGllbmNl 64900
+Q29tbw== 64901
+IEJydWlucw== 64902
+LmFjdGl2aXRpZXM= 64903
+IGFuY2VzdHJ5 64904
+0YPQu9GM0YI= 64905
+CVJldHVybg== 64906
+cHVu 64907
+IGdyYXBlcw== 64908
+SUxvZw== 64909
+IGRpam8= 64910
+IFBlcmtpbnM= 64911
+IFZNd2FyZQ== 64912
+X2F1dGhlbnRpY2F0ZWQ= 64913
+w650cmU= 64914
+b3ZlcndyaXRl 64915
+IEhk 64916
+IGdhbGF4aWVz 64917
+YWNodQ== 64918
+SHJlZg== 64919
+W0Q= 64920
+IHBhcmNl 64921
+TGF0TG5n 64922
+X3BhdHRlcm5z 64923
+IFNIT1JU 64924
+IHJ1bW91cnM= 64925
+Y291bnR5 64926
+IEdSSUQ= 64927
+IFsv 64928
+IFNreXJpbQ== 64929
+RGF0YUdyaWRWaWV3VGV4dEJveENvbHVtbg== 64930
+IGNlbg== 64931
+IGN1Y3VtYmVy 64932
+LklOVA== 64933
+X0NPTkZJUk0= 64934
+IGN0bA== 64935
+cGVybA== 64936
+aWxsb3M= 64937
+IEFDQQ== 64938
+IEdlb3JnZXRvd24= 64939
+X2NhbGxhYmxl 64940
+IENyYWZ0cw== 64941
+L2Nv 64942
+IGluYm91bmQ= 64943
+IFRlY2huaXF1ZXM= 64944
+c2V0Q2hlY2tlZA== 64945
+IHBuYW1l 64946
+Y29tcHV0 64947
+U3RlZWw= 64948
+IGhhbmRoZWxk 64949
+IEFsYW0= 64950
+YWJzdHJhY3RtZXRob2Q= 64951
+6aKR 64952
+SU5Z 64953
+YmF0dGxl 64954
+X0VWVA== 64955
+IGNldXg= 64956
+IGF0b2Y= 64957
+IEFieXNz 64958
+X3ZhbGlkYXRvcg== 64959
+IGhhaXJz 64960
+VmVydGV4QXR0cmliQXJyYXk= 64961
+IGNvbW1vbnM= 64962
+LWJpbmQ= 64963
+TXVp 64964
+IGNvc21ldGljcw== 64965
+IG1pcmFj 64966
+Lm1hcmtlcg== 64967
+U0NBTEU= 64968
+LldvcmQ= 64969
+LXVs 64970
+IERpdmVyc2l0eQ== 64971
+IEREUw== 64972
+LmN3ZA== 64973
+X3h5eg== 64974
+IENvbXB1dGVz 64975
+KGNsaWNrZWQ= 64976
+VEVNUExBVEU= 64977
+IHpvbmluZw== 64978
+IGZpbnM= 64979
+IFBK 64980
+ZXh0Vmlldw== 64981
+Q2hhcmFjdGVyaXN0aWM= 64982
+aWdhdG9ycw== 64983
+IHByb2NsYWlt 64984
+IHByaXN0aW5l 64985
+IGRhdGFzdG9yZQ== 64986
+IGRpc2NvdXJhZ2U= 64987
+X25zZWM= 64988
+IG5pbmV0ZWVudGg= 64989
+IGNlbHVp 64990
+Sm9uYXRoYW4= 64991
+IGFtcGg= 64992
+IENyb3NzaW5n 64993
+IEh1bWFucw== 64994
+IEJvb2tlcg== 64995
+w6JjZQ== 64996
+Z2V0UG9zdA== 64997
+IE1vbnRlcg== 64998
+IEZsYXZvcg== 64999
+TWVkaWFUeXBl 65000
+IuKAlA== 65001
+IEFyY2hhZQ== 65002
+QHJldHVybg== 65003
+LWF3YXJl 65004
+b3J1 65005
+LVRoZQ== 65006
+YW1wbGVk 65007
+S0Y= 65008
+LlRlbXA= 65009
+IERyZQ== 65010
+KHtf 65011
+cG9seWdvbg== 65012
+IMOm 65013
+IERlZmVuZGVy 65014
+77yY 65015
+Xyks 65016
+LlVuc3VwcG9ydGVk 65017
+X14o 65018
+KElEQw== 65019
+JHY= 65020
+IHdvcnRobGVzcw== 65021
+IFNFRw== 65022
+aWxpa2k= 65023
+Tm9BcmdzQ29uc3RydWN0b3I= 65024
+IE1lcmNo 65025
+IG5vcA== 65026
+IGZvcmdldHRpbmc= 65027
+IGRvcGFtaW5l 65028
+anVhbA== 65029
+ZW9u 65030
+IFJlYXNvbnM= 65031
+c29ydEJ5 65032
+KCctJyw= 65033
+LXN5bmM= 65034
+ZWNlZG9y 65035
+S1A= 65036
+KGNvb3Jk 65037
+KENoYXQ= 65038
+XCQ= 65039
+ZXN0cmluZw== 65040
+Y2Vm 65041
+LmhhbmRsZUVycm9y 65042
+24zYrw== 65043
+0YHQug== 65044
+IGhhbmRj 65045
+ZWxpamtl 65046
+IFNwaXI= 65047
+IEJ1Y2tz 65048
+IFFSZWN0 65049
+U2V0Rm9udA== 65050
+LmV4ZWNTUUw= 65051
+OjoKCg== 65052
+IHN1aWNpZGFs 65053
+c2VlaW5n 65054
+IGNpZGVy 65055
+UHJvZ3Jlc3NEaWFsb2c= 65056
+IG1vbGRpbmc= 65057
+CXRyYWNl 65058
+IGVtcGhhc2l6ZXM= 65059
+IG11bHRpcGxlcw== 65060
+X1BU 65061
+X091dHB1dA== 65062
+Y2FwaXRhbA== 65063
+TmVlZHM= 65064
+X0RJUkVDVElPTg== 65065
+LmlzVmlzaWJsZQ== 65066
+IHJlc3Rl 65067
+IG92YXI= 65068
+KHNoYXJlZA== 65069
+LWNvbXBvc2U= 65070
+LmJhY2t3YXJk 65071
+CXJlY3Q= 65072
+QW1hemluZw== 65073
+LmRpZFJlY2VpdmVNZW1vcnlXYXJuaW5n 65074
+U0VSVklDRQ== 65075
+IEluanVyeQ== 65076
+QnJhaW4= 65077
+IGF1c2dl 65078
+KHBl 65079
+Ly8qKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio= 65080
+b3JwdGlvbg== 65081
+X01BSUw= 65082
+b2hh 65083
+IHNubw== 65084
+IGJvaWxlZA== 65085
+aWxkZW5hZmls 65086
+IFdlbGZhcmU= 65087
+IFF1YXJ0eg== 65088
+IGNhcHRjaGE= 65089
+IFdFU1Q= 65090
+IE1hemU= 65091
+IGdyYXBoZW5l 65092
+IHBlcms= 65093
+IG1pc3RyZXNz 65094
+LkZvcm1TdGFydFBvc2l0aW9u 65095
+IGV4cGVyaW1lbnRhdGlvbg== 65096
+KikoKA== 65097
+IGJyb2FkY2FzdHM= 65098
+IHJlbW92ZUFsbA== 65099
+CUdVSQ== 65100
+5YOP 65101
+YWJjZGVmZ2hpamtsbW5vcA== 65102
+IHVuaW5z 65103
+QVNQ 65104
+K3c= 65105
+bXVy 65106
+IGRpbmU= 65107
+IGFyb3U= 65108
+IGVzY2FwZXM= 65109
+IFRvYmFjY28= 65110
+Lm5hbWVk 65111
+IFBhdHJlb24= 65112
+X0ZBQ0U= 65113
+X3NwaW5uZXI= 65114
+bW92aW5n 65115
+X3ZvdGVz 65116
+T2hpbw== 65117
+LmVuY29kaW5n 65118
+RGVncmVlcw== 65119
+IlRv 65120
+IHByZXN0aWdl 65121
+b3NwaGVyZQ== 65122
+IExhbmNhc3Rlcg== 65123
+77yX 65124
+IG9uQ2FuY2Vs 65125
+IEhJUw== 65126
+0J7RiNC40LHQutCw 65127
+IG9yY2hlc3Ry 65128
+IHJlZnJlc2hlZA== 65129
+RGF0aW5n 65130
+KG11 65131
+IEplZA== 65132
+IEVkaXRvcmlhbA== 65133
+U2V0QnJhbmNoQWRkcmVzcw== 65134
+Q3BwVHlwZURlZmluaXRpb24= 65135
+IEJyb254 65136
+IGdhdGhlcmluZ3M= 65137
+ICcnDQo= 65138
+cG9zdERhdGE= 65139
+IEZyYW0= 65140
+Q2xpcGJvYXJk 65141
+IFhQYXRo 65142
+cmF5cw== 65143
+IGJha2VyeQ== 65144
+IHJvd0NvdW50 65145
+IGxvd3M= 65146
+YW5kV2hlcmU= 65147
+X3ZlcnNpb25z 65148
+IEd1bm4= 65149
+IHdlZXI= 65150
+IGNvbnRleHR1YWw= 65151
+IEtleUNvZGU= 65152
+IFNhc2thdGNoZXdhbg== 65153
+IFBoaWxseQ== 65154
+IE1vdXRo 65155
+IGRvUG9zdA== 65156
+IHBlcmNlbnRpbGU= 65157
+IGJ1ZmZlclNpemU= 65158
+KGZyZXE= 65159
+JHNtYXJ0eQ== 65160
+aWVydGU= 65161
+aXNzYW50 65162
+X2Zwcw== 65163
+IGludGltYWN5 65164
+X2Jvb2tpbmc= 65165
+IGRlY29tcG9zaXRpb24= 65166
+dW5pY2lwaW8= 65167
+IE5TSW5kZXhQYXRo 65168
+IEtS 65169
+IHR1cmJpbmU= 65170
+LXByb20= 65171
+X0NBUlQ= 65172
+KGNvb3Jkcw== 65173
+ZWNvbQ== 65174
+IGNvd2FyZA== 65175
+IHdheXBvaW50 65176
+LUNvbGE= 65177
+IHByb2ZvdW5kbHk= 65178
+IEVSUA== 65179
+Ym91bmRhcnk= 65180
+IHBvb3Jlcg== 65181
+L2V4YW1wbGU= 65182
+IHJlbmNvbnRy 65183
+IG5pY2Vy 65184
+54E= 65185
+LWNoYWlu 65186
+IEVudGl0eVN0YXRl 65187
+IGdyYWRpbmc= 65188
+QUxJR04= 65189
+IFBpY2tz 65190
+LmFr 65191
+LXZlY3Rvcg== 65192
+IEVudHJpZXM= 65193
+IFNlcmdpbw== 65194
+ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq 65195
+T0RC 65196
+IOW9 65197
+IGNvcm9uYXJ5 65198
+IHNoYXZlZA== 65199
+IGFxdWU= 65200
+ZW1wbG95ZXI= 65201
+IHBhcmNo 65202
+IG1lYXN1cmFibGU= 65203
+IGJvaXM= 65204
+am9pbmluZw== 65205
+IHZvbGNhbm8= 65206
+Ok0= 65207
+LnRocmVzaG9sZA== 65208
+IERveWxl 65209
+dmVyYm9zaXR5 65210
+IOKWug== 65211
+IHNwb3VzZXM= 65212
+IHJlc3VtZXM= 65213
+TmF0 65214
+ek0= 65215
+X0VuYWJsZQ== 65216
+IFVTRUQ= 65217
+IENhcmV5 65218
+CWZw 65219
+UGF0cmljaw== 65220
+IE9zdw== 65221
+UG9zc2libGU= 65222
+LmxlYWRpbmc= 65223
+YWhydW5n 65224
+4pmqCgo= 65225
+CQkJCQkJCQkJIA== 65226
+44CC44CM 65227
+LmFkZEVkZ2U= 65228
+IGVjeA== 65229
+J0xCTA== 65230
+IFRDTA== 65231
+IGJpcnRocw== 65232
+IHRoZWF0cmljYWw= 65233
+IHBpag== 65234
+Z3JlYXRlcg== 65235
+IEZTdHJpbmc= 65236
+QkVE 65237
+7ZmY 65238
+LkNhc3Q= 65239
+Q1g= 65240
+L01haW4= 65241
+cGVhdGVy 65242
+IHBlcnN1YXNpdmU= 65243
+Y29udG8= 65244
+eGxzeA== 65245
+X0FCUw== 65246
+IEJ1bg== 65247
+bWFuYWdlZFR5cGU= 65248
+0LPQvg== 65249
+IFNjYWxh 65250
+cmFkb3I= 65251
+IHJlY29nbml6YWJsZQ== 65252
+dHJ1 65253
+IHRq 65254
+XE1hcHBpbmc= 65255
+X0JPQVJE 65256
+IHRvSnNvbg== 65257
+IGJvd2Vs 65258
+KWQ= 65259
+J30p 65260
+KGhXbmQ= 65261
+aHJz 65262
+Y2FudA== 65263
+X18oKQoK 65264
+IGludGVycm9nYXRpb24= 65265
+bGljYXRpdmU= 65266
+CQkJCgo= 65267
+IFR3aW5z 65268
+IEFP 65269
+QmlyZA== 65270
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 65271
+cGVyaGFwcw== 65272
+b2ZpbGU= 65273
+IHBlbmM= 65274
+IHRyZWVOb2Rl 65275
+IHRvcGljYWw= 65276
+LXByaXZhdGU= 65277
+54m5 65278
+IERpc2N1c3M= 65279
+IGRlc24= 65280
+UnVh 65281
+LlZFUlRJQ0FM 65282
+44CN44Go 65283
+SUZPUk0= 65284
+IGNvdXJ0eWFyZA== 65285
+INGB0LXRgA== 65286
+ICMjIwo= 65287
+IGVtcG93ZXJpbmc= 65288
+IEZhY2lsaXRpZXM= 65289
+XCIsXA== 65290
+vZQ= 65291
+Ok9iamVjdA== 65292
+IFZvdGVz 65293
+aXNlbA== 65294
+IGV1Y2g= 65295
+b3JzdA== 65296
+KENsb25l 65297
+LmNvb2tpZXM= 65298
+JHRtcA== 65299
+KGluZGljZXM= 65300
+ZXJnZW5jeQ== 65301
+IHBsYWd1ZWQ= 65302
+IERpYQ== 65303
+eWNsaWM= 65304
+fSkp 65305
+6rK9 65306
+IGR1ZWw= 65307
+IGhldGVyb3NleHVhbA== 65308
+LmFkZENvbXBvbmVudA== 65309
+U0VDUkVU 65310
+bGVybw== 65311
+Y29uc3RyYWludHM= 65312
+IGdldENvbm5lY3Rpb24= 65313
+IExlYmVucw== 65314
+IFBvbg== 65315
+IENocm9uaWNsZXM= 65316
+ICAgICAgICAgICAgICAgICAgICAgICAgDQo= 65317
+IE1vdXJpbmhv 65318
+IG9jY3VwYW5jeQ== 65319
+X3NsYXZl 65320
+T1JJWkVE 65321
+CVk= 65322
+LmhpZ2hsaWdodA== 65323
+X3NlbnNpdGl2ZQ== 65324
+IHNwZWN0cm8= 65325
+LmVuY3J5cHQ= 65326
+IHNwb2lsZXJz 65327
+LlNpemVNb2Rl 65328
+IHByb2Zlc3Npb25hbGlzbQ== 65329
+Pklu 65330
+RXhwaXJlcw== 65331
+QXU= 65332
+IEhWQUM= 65333
+cmVsYXRpb25z 65334
+IEFUSw== 65335
+X0dFTkVSQUw= 65336
+IFNpZ2h0 65337
+IGtpdGNoZW5z 65338
+OlJlZ2lzdGVy 65339
+IGVkbQ== 65340
+IHRvbGVyYXRlZA== 65341
+IFNFU1NJT04= 65342
+aWVyeg== 65343
+IElOU1Q= 65344
+LnBhdGhz 65345
+IHBlcnBldHJhdG9ycw== 65346
+ZWJw 65347
+cGVjdGluZw== 65348
+ZWR1Y2F0ZWQ= 65349
+IFBpb25lZXI= 65350
+X1JFVg== 65351
+IGJ1c3R5 65352
+c3RhdHVzZXM= 65353
+UmVzcG9uZA== 65354
+c2h1ZmZsZQ== 65355
+IFRpbmRlcg== 65356
+RXhhY3RseQ== 65357
+aWxsaXNlY29uZA== 65358
+INC30L3QsNGH0LXQvdC40LU= 65359
+KEFjY291bnQ= 65360
+LiY= 65361
+aXpy 65362
+YXNzdW1pbmc= 65363
+CU9wdGlvbmFs 65364
+U2VuaGE= 65365
+IGVucm9s 65366
+dHVy 65367
+IGFycm9nYW50 65368
+IEpPYmplY3Q= 65369
+b2xpdGhpYw== 65370
+bWFwcGVk 65371
+IHRpcHBlZA== 65372
+LlVQREFURQ== 65373
+w6htZXM= 65374
+R05VQw== 65375
+V1g= 65376
+IG1vbmtz 65377
+LmJvcmRlcldpZHRo 65378
+IFNodXRkb3du 65379
+IEhhcm1vbnk= 65380
+Y2xhc3NpZmljYXRpb24= 65381
+IGRlcXVldWVSZXVzYWJsZUNlbGw= 65382
+IF07DQo= 65383
+Lkdlbg== 65384
+IGxhdm9ybw== 65385
+IExlb25hcmRv 65386
+ICYp 65387
+IGRlcG9pcw== 65388
+IFZvbHQ= 65389
+RXRo 65390
+IExlb25l 65391
+IE5lZGVybGFuZA== 65392
+IEVYVFJB 65393
+UmVzb2x2ZWQ= 65394
+IHBlbmluc3VsYQ== 65395
+X1ZN 65396
+R2Vy 65397
+2KfYrw== 65398
+LnByb21wdA== 65399
+LmFsaWdu 65400
+aW5nZ2E= 65401
+ZmlsbXM= 65402
+SEFORExF 65403
+IGNhcnRz 65404
+KFNvbWU= 65405
+PEF1ZGlv 65406
+IGVubGFyZ2VtZW50 65407
+IGdyb2Nlcmllcw== 65408
+LWhvbGRlcg== 65409
+IGlycml0YXRpb24= 65410
+Q29tbXVuaWNhdGlvbg== 65411
+IHByaW1hcmllcw== 65412
+aHR1Yg== 65413
+X2luaWNpbw== 65414
+IGNvb3JkaW5hdGluZw== 65415
+KHF1 65416
+IGZhaXM= 65417
+IHZpc3Rv 65418
+Z3VpZGVk 65419
+IHZsYW4= 65420
+IGVzcHJlc3Nv 65421
+w6h0ZQ== 65422
+c2VoZW4= 65423
+X3Blbmc= 65424
+IHJvb2Zpbmc= 65425
+IEFsaXZl 65426
+QXhpc1NpemU= 65427
+IHN0dW4= 65428
+IHJlc3RlZA== 65429
+dWxsZXRz 65430
+IE1hbGF5c2lhbg== 65431
+LFVuaXR5RW5naW5l 65432
+IGVudnk= 65433
+J107DQoNCg== 65434
+IE9zdA== 65435
+X2p1bXA= 65436
+IGNvbnRyYXNlw7Fh 65437
+Ing= 65438
+CVBhZ2U= 65439
+KVsi 65440
+IFNJUA== 65441
+IEdlb2dyYXBoaWM= 65442
+IGNhdWN1cw== 65443
+X1RFUg== 65444
+4oCdOw== 65445
+UG9zdEV4ZWN1dGU= 65446
+aW1zaG93 65447
+IENPTVBBTlk= 65448
+IE5lYWw= 65449
+IEhlYXJpbmc= 65450
+KGFjdG9y 65451
+Qmlk 65452
+LlBS 65453
+LlByb2R1Y3Rz 65454
+IEVtbQ== 65455
+IOab 65456
+IHB1bHNlcw== 65457
+X0VW 65458
+L2V4cA== 65459
+X21vdGlvbg== 65460
+IGdiYw== 65461
+IG5hdmlnYXRpb25Db250cm9sbGVy 65462
+IENvdXJ0cw== 65463
+IEljb25EYXRh 65464
+d3U= 65465
+X3Jm 65466
+IFJhZ2U= 65467
+LWZsYXQ= 65468
+IEhpbXNlbGY= 65469
+X2NodW5rcw== 65470
+IG92ZXJzaA== 65471
+IGNpZg== 65472
+KElz 65473
+cGVha2Vy 65474
+IENQVXM= 65475
+aXJlY3Rvcg== 65476
+LHRpdGxl 65477
+LnNldERlc2NyaXB0aW9u 65478
+IGVhcnRocXVha2Vz 65479
+IHdu 65480
+Z2x5cGg= 65481
+dWx1bWk= 65482
+IHNwZWVkeQ== 65483
+IGVzcGFjaW8= 65484
+IGVtdWxhdGU= 65485
+IFwiJA== 65486
+X0lORg== 65487
+Y2FsbG9j 65488
+LXF1ZXJ5 65489
+KHZhbHM= 65490
+IHNlYWI= 65491
+IGhhdm9j 65492
+IEludGVyc3RhdGU= 65493
+IHRyaWFuZ3VsYXI= 65494
+YmluZGluZ3M= 65495
+CQkJCQkgICAgIA== 65496
+IAkg 65497
+YmNyeXB0 65498
+IGNyZWRpdG9ycw== 65499
+IHNlbWlm 65500
+bGxl 65501
+aWVuemE= 65502
+IEtlbGxlcg== 65503
+IG1vbnN0cg== 65504
+IE1hcmNvcw== 65505
+KHJlaW50ZXJwcmV0 65506
+IGhpdmU= 65507
+U2Ny 65508
+X2hyZXN1bHQ= 65509
+IOyhsA== 65510
+IFNxbERhdGFSZWFkZXI= 65511
+YW5ub3VuY2U= 65512
+X3ByZWZlcmVuY2Vz 65513
+IHRydXN0cw== 65514
+RXJvdA== 65515
+LXdvcmtlcg== 65516
+IHR3ZWVu 65517
+IFN0cmVldHM= 65518
+gq3soJw= 65519
+IEZyYW56 65520
+IOKApi4= 65521
+VUlUZXh0RmllbGQ= 65522
+LmdldEl0ZW1z 65523
+IHRvbHVh 65524
+4oCcT3Vy 65525
+IHPhu5E= 65526
+IHZpcnR1ZXM= 65527
+IHBvdWx0cnk= 65528
+PXJvdw== 65529
+Y29kZWQ= 65530
+Tm9TdWNo 65531
+IGtvZA== 65532
+bHNp 65533
+IGtldG8= 65534
+IGdyb3VwTmFtZQ== 65535
+YXNu 65536
+IHVuY29tcA== 65537
+IHRleHRpbGU= 65538
+dG9vbFN0cmlw 65539
+LlBvcGVu 65540
+IHByb3N0aXR1dGU= 65541
+IHByb21vdGVy 65542
+Ijt9Cg== 65543
+IGNvbGxpZGVy 65544
+QnJva2Vy 65545
+ZGF0YXNldHM= 65546
+CU5TU3RyaW5n 65547
+YW5nbGVy 65548
+UklFUw== 65549
+YXRvbXM= 65550
+IHJlbmRleg== 65551
+YXBv 65552
+IOuE 65553
+Lmdj 65554
+IFNPTUU= 65555
+IGZnZXRz 65556
+R0xF 65557
+IHphbA== 65558
+IE9wcG9zaXRpb24= 65559
+aGFuZGxlU3VibWl0 65560
+X21hdGg= 65561
+IHNwcmU= 65562
+IHNob3J0ZW5lZA== 65563
+IGNhdmVz 65564
+U01T 65565
+LWNvbnNjaW91cw== 65566
+IFNhdmVz 65567
+LkJhY2tncm91bmRJbWFnZUxheW91dA== 65568
+IGVsZWN0cm9tYWduZXRpYw== 65569
+KGl0ZXJhdG9y 65570
+IHVuYmU= 65571
+amVjdG9yaWVz 65572
+IG1lZGlhbnRl 65573
+IMOubnQ= 65574
+Iiwt 65575
+IEFTTQ== 65576
+6K6w5b2V 65577
+IGNvbmZpbmVtZW50 65578
+4oCmCgoK 65579
+RXhjZXB0aW9ucw== 65580
+LW1ham9y 65581
+IFZhbmlsbGE= 65582
+IExPQ0FUSU9O 65583
+IGVsdXNpdmU= 65584
+VUFSSU8= 65585
+IElOTElORQ== 65586
+IHByb2R1Y3ROYW1l 65587
+X3F1ZXJpZXM= 65588
+Li4uIjsK 65589
+IFhpYW8= 65590
+V2luZG93VGl0bGU= 65591
+bGV0dGVz 65592
+IHBlcnBldHVhbA== 65593
+U2V2ZXJpdHk= 65594
+IEFjaGlldmVtZW50 65595
+w6JuY2lh 65596
+IHJlbWluZGVycw== 65597
+c29ydGFibGU= 65598
+IGFmZm9yZGVk 65599
+IGluZmx1ZW5jaW5n 65600
+IFR1bm5lbA== 65601
+LmxlYXJuaW5n 65602
+IFF1w6k= 65603
+cGhldGFtaW5l 65604
+LkJBRA== 65605
+Lm1ldGFtb2RlbA== 65606
+LWRldmljZQ== 65607
+IEtvbnRha3Q= 65608
+4pSB4pSB 65609
+LXN1bW1hcnk= 65610
+KCc8Pw== 65611
+KTw9 65612
+IHdpc2VseQ== 65613
+X290 65614
+Om1vZGVs 65615
+IFVX 65616
+IE9wZW5TU0w= 65617
+IEpwYVJlcG9zaXRvcnk= 65618
+Q29uZXhpb24= 65619
+VE9U 65620
+LmNyZWF0ZWRBdA== 65621
+KHRyYWluaW5n 65622
+IGJpc2hvcHM= 65623
+IHZlbnR1cmVz 65624
+LkVucXVldWU= 65625
+IFRoZXJtYWw= 65626
+IEJyZXdlcnk= 65627
+b3Rlbg== 65628
+IEZhdGFs 65629
+X3N1cHBseQ== 65630
+IGNvbmRpdGlvbmVk 65631
+IHN1cGVyaW9yaXR5 65632
+IElicmFoaW0= 65633
+IGNvcnBv 65634
+dW91c2x5 65635
+IFByYWN0aWNhbA== 65636
+Ly9b 65637
+IEFmcmljYW5z 65638
+IEJhaHJhaW4= 65639
+IHN0ZXJpbA== 65640
+IENsYXNzTm90Rm91bmRFeGNlcHRpb24= 65641
+LlJlZ2lvbg== 65642
+IHRyYW5zaXRpb25hbA== 65643
+IGludGVycHJldGluZw== 65644
+LlNvdW5k 65645
+IGZyb250YWw= 65646
+IGhhcnZlc3Rpbmc= 65647
+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn4= 65648
+YXRhaXJl 65649
+Lkh0dHBTdGF0dXM= 65650
+S00= 65651
+IEVyb3Rpc2NoZQ== 65652
+IGVyb3Rpc2tl 65653
+RmlnaHQ= 65654
+UGFja2FnZU5hbWU= 65655
+IENBQ0hF 65656
+d2luZ0NvbnN0YW50cw== 65657
+IFppbW1lcm1hbg== 65658
+L2Nhcg== 65659
+IFF1cmFu 65660
+TWV0YWw= 65661
+IHVzZXJNYW5hZ2Vy 65662
+IG1hc3Rlcnk= 65663
+KFVVSUQ= 65664
+IHZpZXdXaWxsQXBwZWFy 65665
+IHN1bW1lZA== 65666
+KC0o 65667
+ICAgICAgIAoK 65668
+VGFrZW4= 65669
+IGNsb2Nrd2lzZQ== 65670
+IENhZsOp 65671
+KGxldHRlcg== 65672
+IENyb3NzUmVm 65673
+IEFzdG9u 65674
+IEFzc2VtYmx5VmVyc2lvbg== 65675
+6Z2e 65676
+bnRz 65677
+ICQoJ1s= 65678
+X1JBVElP 65679
+aWNpZW50ZQ== 65680
+IHJpY2h0aWc= 65681
+IHBlZGln 65682
+KGl4 65683
+0YHRi9C7 65684
+QXNzaWduYWJsZUZyb20= 65685
+Ym91bmRlZA== 65686
+IGFsa2Fs 65687
+X3ByaWNlcw== 65688
+IGfFgg== 65689
+YW5jaGlzZQ== 65690
+X3JlY2VpdmVy 65691
+SUdBVElPTg== 65692
+X3B1bGw= 65693
+IFN0YXRpc3RpY2Fs 65694
+X3Rvb2xiYXI= 65695
+YW1pZGU= 65696
+IEFzeW5jVGFzaw== 65697
+cmV0YQ== 65698
+IOyi 65699
+IFJFQUxMWQ== 65700
+IGJ1cnN0cw== 65701
+IElucXVpcnk= 65702
+IGJpZ290 65703
+c2FuaXRpemU= 65704
+IEhvbWVy 65705
+UXXDqQ== 65706
+IFJvdXRpbmc= 65707
+LmNvbGxlY3Rpb25WaWV3 65708
+IEJpbGxpb24= 65709
+U1RSVUNUT1I= 65710
+LmVqYg== 65711
+IGVuY2g= 65712
+LnNldFRpbWVvdXQ= 65713
+UnVi 65714
+LXJvYWQ= 65715
+Lm91dHB1dHM= 65716
+Y29udGVzdA== 65717
+IHNwaGVyZXM= 65718
+IHJlc3VycmVjdA== 65719
+Ii4i 65720
+IElyaXM= 65721
+IOya 65722
+IFhL 65723
+IFJhcml0eQ== 65724
+IElTZXJ2aWNl 65725
+YXRoYQ== 65726
+IOWH 65727
+IHByZXZhaWw= 65728
+CXBw 65729
+Lkxv 65730
+Z2V0V2lkdGg= 65731
+IHd3 65732
+IHdpY2h0aWc= 65733
+QEdldHRlcg== 65734
+IEpheXM= 65735
+IHNwZWN1bGF0aXZl 65736
+KGF0dA== 65737
+IHRlZGlvdXM= 65738
+IHNjcmF0Y2hlcw== 65739
+IHBlbMOtY3Vs 65740
+IGJvcm91Z2g= 65741
+IG3Dsw== 65742
+UmVwcmVzZW50 65743
+YXRvcml1bQ== 65744
+KENhbWVyYQ== 65745
+IGNvbHVtbk5hbWU= 65746
+IHJlaXRlcmF0ZWQ= 65747
+IENhc3Rpbmc= 65748
+LmdldEhlYWRlcg== 65749
+IOKAnFs= 65750
+IEp1aWNl 65751
+Y2h1 65752
+LkhUTUw= 65753
+IEFudHdvcnQ= 65754
+R0x1aW50 65755
+CUl0ZXJhdG9y 65756
+IEFOQUw= 65757
+IHVucG9wdWxhcg== 65758
+KExvY2FsZQ== 65759
+IG1pdGlnYXRpb24= 65760
+IGFkcmVz 65761
+4bq3 65762
+fSx7Cg== 65763
+IFNjaHdhcg== 65764
+X1BBSVI= 65765
+PigpLAo= 65766
+b3V2 65767
+IEFsZg== 65768
+eEVG 65769
+55yB 65770
+IGVzY3Jp 65771
+TE9VUg== 65772
+U0VMRg== 65773
+IFRtYXg= 65774
+VHJl 65775
+bG90cw== 65776
+ICguLi4p 65777
+XSsk 65778
+IGFtZXJpYw== 65779
+L3JlZmVyZW5jZQ== 65780
+IE9keXNzZXk= 65781
+IE1pbmVz 65782
+IGFnb3Jh 65783
+IHByb3BoZWN5 65784
+IE9wcG9ydHVuaXRpZXM= 65785
+cHJvZmVzc2lvbmFs 65786
+KHByb3h5 65787
+cGhhbnVtZXJpYw== 65788
+IEVkaXRlZA== 65789
+b2xvZ25h 65790
+LmlzT3Blbg== 65791
+KHZlcnRpY2Vz 65792
+IFJpY2t5 65793
+X292ZXJsYXA= 65794
+Pjs= 65795
+LkRPTQ== 65796
+e31f 65797
+IENPTVBVVA== 65798
+cmVkaXJlY3RUbw== 65799
+IHNoYWtlbg== 65800
+IHJhdGlvbg== 65801
+IG5lbGw= 65802
+X2Jj 65803
+IE5lcg== 65804
+YW5kUmV0dXJu 65805
+IGVyZWN0ZWQ= 65806
+Q2hpZWY= 65807
+IGRpbmVybw== 65808
+IGphc21pbmU= 65809
+LS0tLS0tLS0tLS0tLQo= 65810
+ZmFybQ== 65811
+IEhhdGU= 65812
+VEFTSw== 65813
+QU5ORVI= 65814
+J11dXQo= 65815
+IE5pZ2Vs 65816
+aGliaXQ= 65817
+IFFUZXh0 65818
+Lkxlbg== 65819
+IHRlxbw= 65820
+c2xpZGVz 65821
+ZmVsdA== 65822
+IFJFVg== 65823
+X2hvbGQ= 65824
+IENvdXBsZQ== 65825
+ZXNjYXBlZA== 65826
+LWV4cG9ydA== 65827
+Pkk= 65828
+ZXdpc2g= 65829
+KEFwaQ== 65830
+ICghWw== 65831
+Tm91cw== 65832
+T1RPUg== 65833
+IHNlYWxpbmc= 65834
+V2ll 65835
+IGthbm5zdA== 65836
+K3htbA== 65837
+IG14QXJyYXk= 65838
+IGFkbWlyYXRpb24= 65839
+Lm5i 65840
+IGpld2Vs 65841
+LlRlYW0= 65842
+IHByb3NlY3V0ZQ== 65843
+LnhtbGJlYW5z 65844
+Y2h3 65845
+KGJhY2tncm91bmQ= 65846
+IEF2aXY= 65847
+CWZpbGw= 65848
+IGRpc3Bhcml0eQ== 65849
+4Lo= 65850
+X0FQUEVORA== 65851
+IFB2UA== 65852
+44OQ 65853
+IFZpdmU= 65854
+IGdyYW5kc29u 65855
+LmFkZEVsZW1lbnQ= 65856
+QXRvbWlj 65857
+IHByaW1hcnlLZXk= 65858
+IGNvbnRpbmVudHM= 65859
+IEZ1Y2tpbmc= 65860
+JScK 65861
+QG1haWw= 65862
+IGN1bHR1cmFsbHk= 65863
+YW5nYW5lc2U= 65864
+7KCE 65865
+Zm9sbG93ZXJz 65866
+IHVybg== 65867
+IHJhY2tz 65868
+IFNBRkU= 65869
+Ly8NCg0K 65870
+KCIvew== 65871
+X0lOSVRJQUw= 65872
+X1Jlc3BvbnNl 65873
+RXZlbnREYXRh 65874
+Jz4k 65875
+c3RhcnRz 65876
+4Kk= 65877
+IHRoYWltYXNzYWdl 65878
+IHNwZWNpYWxpemF0aW9u 65879
+IOyEpOyglQ== 65880
+ZWRv 65881
+IGNvbXBlbnNhdGVk 65882
+X2NoYXJzZXQ= 65883
+fS57 65884
+L2VudGl0aWVz 65885
+X2Zr 65886
+LS0tLS0tCgo= 65887
+YXNjYXI= 65888
+IGNlbGxGb3JSb3dBdEluZGV4UGF0aA== 65889
+IFByb3Bvc2Fs 65890
+IE90dG8= 65891
+IF9fX19f 65892
+ICIqIg== 65893
+IHRvb2xraXQ= 65894
+IGV4cGVjdGFuY3k= 65895
+RG93bkxpc3Q= 65896
+LWRh 65897
+IHByb3ZvY2F0aXZl 65898
+IG1laW8= 65899
+ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PQ== 65900
+KCgpPT57Cg== 65901
+JGxpbms= 65902
+aW5jYXJl 65903
+IGljeQ== 65904
+IEhpc3Q= 65905
+QWNjZXB0ZWQ= 65906
+IGNsb25lcw== 65907
+IFFB 65908
+IGNvbmZvcnQ= 65909
+IHByb3ByaW8= 65910
+IFZvZw== 65911
+KG1hcms= 65912
+X1NlYXJjaA== 65913
+IGVuZHdoaWxl 65914
+ICQj 65915
+44GX44GL 65916
+X0xU 65917
+SW5zdGFuY2VJZA== 65918
+YmFyZA== 65919
+cm5l 65920
+cmVnb3I= 65921
+IG5vcmdl 65922
+XDo= 65923
+0YDRg9C3 65924
+LmJ0bkFkZA== 65925
+IHBpbGxvd3M= 65926
+IFBhcmFtZXRlckRpcmVjdGlvbg== 65927
+SGFuZGxlcw== 65928
+IGRlYWxpbmdz 65929
+IGNvbnZleA== 65930
+IENoYXJpdHk= 65931
+Lk51bWVyaWNVcERvd24= 65932
+IFNrZWxldG9u 65933
+IFp1Y2tlcmJlcmc= 65934
+ZXNlbg== 65935
+IEZBQQ== 65936
+X3N0ZQ== 65937
+IGh1bWlk 65938
+am0= 65939
+Y2hn 65940
+LmdldExvY2Fs 65941
+IHRhbmRlbQ== 65942
+aXN0bGVz 65943
+X210 65944
+LmFjY291bnRz 65945
+IEluc3BlY3Rpb24= 65946
+IEZyYXVk 65947
+IGvDvA== 65948
+IHN5bmNocm9ub3Vz 65949
+IFJpY2FyZG8= 65950
+IEh1ZQ== 65951
+IENvbm5lY3Rpb25z 65952
+SU1FTlQ= 65953
+b2NoYXN0aWM= 65954
+XGRhdGE= 65955
+IEVudGVycHJpc2Vz 65956
+LXNpbXBsZQ== 65957
+IGltYWdlRGF0YQ== 65958
+IFVtYg== 65959
+LXNjcmlwdA== 65960
+L2dlbmVyYWw= 65961
+QVBU 65962
+IFR1dA== 65963
+aW1pemF0aW9u 65964
+IGlkYWRl 65965
+IEtlbQ== 65966
+ZWxzaWY= 65967
+LkFMSUdO 65968
+IFRvcmllcw== 65969
+IEJhc2ls 65970
+b2dvbmFs 65971
+aGFjaw== 65972
+TnVsbE9yRW1wdHk= 65973
+IiksCgo= 65974
+44OD44OI 65975
+ICclJw== 65976
+X1JG 65977
+ZWdvdA== 65978
+LmFzcGVjdA== 65979
+KFByb2plY3Q= 65980
+TEVOR1RI 65981
+cGxlbWVudGFyeQ== 65982
+X3ByZWRz 65983
+IEhvbGRz 65984
+Y2Fycmllcg== 65985
+CWxheWVy 65986
+QXR0YWNoZWQ= 65987
+LXByZXNpZGVudA== 65988
+aW5kaA== 65989
+J10uJyI= 65990
+LkFDQ0VTUw== 65991
+IENFTlRFUg== 65992
+UXVhbGlmaWVk 65993
+IG9zdHI= 65994
+LlN5bWJvbA== 65995
+dGFodW4= 65996
+IExBTkc= 65997
+X2J1c2luZXNz 65998
+CVN0YXJ0 65999
+ZXJyZQ== 66000
+IGFzaGVz 66001
+IEFkdmVydGlzZW1lbnQ= 66002
+Lkhvdw== 66003
+IC8vLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t 66004
+IG9ibGl2 66005
+IGJsZWVk 66006
+IHN2bw== 66007
+Lm5vZGVOYW1l 66008
+IGl0ZW1OYW1l 66009
+IEJBTks= 66010
+w61jdWxvcw== 66011
+IEVtbXk= 66012
+IERvbWluaWNhbg== 66013
+JylbJw== 66014
+IHJlYWxsb2M= 66015
+dWxzZXM= 66016
+6L6T5Ye6 66017
+IE9mZmVyaW5n 66018
+64ql 66019
+LXByb2dyYW0= 66020
+INGB0L7QvtCx0Yk= 66021
+TU9W 66022
+IG5vZGVJZA== 66023
+0LXQvw== 66024
+Zmx1aWQ= 66025
+IHRlYXNl 66026
+w7hyZQ== 66027
+IGNvbXJhZGVz 66028
+IHVucmVsaWFibGU= 66029
+IHBvc3RJZA== 66030
+Z2V0SUQ= 66031
+b2dyYXBocw== 66032
+VGFuaw== 66033
+IFFWRVJJRlk= 66034
+IGZsb2F0ZWQ= 66035
+X1RISVM= 66036
+Y2ltaWVudG8= 66037
+IE5pY2Fy 66038
+c2hy 66039
+Qm91bmRpbmdCb3g= 66040
+IGlub3JkZXI= 66041
+IEdsb3Nz 66042
+V2l0aFRpdGxl 66043
+dW5jaW8= 66044
+IHBlcnNpc3Rz 66045
+IGRpcmVjdHM= 66046
+YWNjacOzbg== 66047
+U2FtcGxlcg== 66048
+IGJsYWNrbGlzdA== 66049
+IGFEZWNvZGVy 66050
+IGludm9rZXM= 66051
+X3NraW4= 66052
+Pklm 66053
+dHJ1bmNhdGU= 66054
+LlNpbg== 66055
+c29vbg== 66056
+IGRpc2Zy 66057
+CVZlYw== 66058
+IyNf 66059
+LnNjaG9vbA== 66060
+IGJsaW5kcw== 66061
+IGFjYWI= 66062
+IHBhdGhldGlj 66063
+IHZvbGNhbmlj 66064
+IHJkZg== 66065
+IGN1bHRpdmF0ZWQ= 66066
+IFVJTmF2aWdhdGlvbkNvbnRyb2xsZXI= 66067
+IGlwdA== 66068
+IGdsYW5k 66069
+IGV2aWRlbnRseQ== 66070
+UGh5cw== 66071
+IHN3YW1w 66072
+IGltYWdlTmFtZQ== 66073
+LkxheWVy 66074
+dWZl 66075
+LFsn 66076
+IENyaW1zb24= 66077
+6YCg 66078
+PGZvb3Rlcg== 66079
+IGJpa2luZw== 66080
+INC00LDQvdC90YvQtQ== 66081
+bW92ZXM= 66082
+Y3Jj 66083
+aWxsYXRpb24= 66084
+IGxhdXJl 66085
+0YDQsNCx0L7Rgg== 66086
+0YPQug== 66087
+IENhaW4= 66088
+IHB5cw== 66089
+IGNvbGxpZGU= 66090
+IHxffA== 66091
+KHNwYW4= 66092
+IGdpbmc= 66093
+IG9iZWRpZW5jZQ== 66094
+b3V0ZXJz 66095
+U29vbg== 66096
+IFdoaXRuZXk= 66097
+IEltcG9ydHM= 66098
+OlVJVGFibGVWaWV3 66099
+KiY= 66100
+IGJr 66101
+V2l0aEVycm9y 66102
+LWV4dA== 66103
+X1JET05MWQ== 66104
+X3RyYWNraW5n 66105
+bm9vcGVuZXI= 66106
+w7xucw== 66107
+IEd0a1dpZGdldA== 66108
+c2ti 66109
+U0FWRQ== 66110
+T2Jz 66111
+KCcuJylb 66112
+IGF1dGhvcmVk 66113
+LS8= 66114
+TG91aXM= 66115
+LmdldE91dHB1dFN0cmVhbQ== 66116
+IGdlbmVyYWxpemVk 66117
+7Yw= 66118
+IGFydGlzYW4= 66119
+KGNwcw== 66120
+IERtaXQ= 66121
+0LvQuNGG 66122
+LkltYWdlTGF5b3V0 66123
+IHN1Y2hlbg== 66124
+XX0s 66125
+LmNvbGxpZGVy 66126
+VGFiUGFnZQ== 66127
+XT1b 66128
+aHlkcm8= 66129
+X3N0cmlw 66130
+IGxpY2tpbmc= 66131
+IGJvb3N0cw== 66132
+IHNrZXB0aWNpc20= 66133
+IGpvZ28= 66134
+IGNvbXBldGVk 66135
+IOuCtA== 66136
+Tm9kZVR5cGU= 66137
+WEY= 66138
+IHBvc3NpYmlsaXQ= 66139
+LWNvcHk= 66140
+IHRyaXR1cg== 66141
+IEF0dGFja3M= 66142
+IG7Dqw== 66143
+SURBRA== 66144
+b2dyYXBoaWVz 66145
+VGltZVN0YW1w 66146
+b3R5cGluZw== 66147
+LUFwcg== 66148
+INC/0L7Qu9GM0LfQvtCy0LDRgtC10LvRjw== 66149
+ICI7Ig== 66150
+IEhhbGU= 66151
+L2FwaXM= 66152
+IDpdCg== 66153
+X2hkbA== 66154
+IERpYWw= 66155
+CUNvbmZpZw== 66156
+X0ZSQUdNRU5U 66157
+X0VkaXQ= 66158
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq 66159
+IGNhbmRpZGFjeQ== 66160
+IENvbXByZXNzaW9u 66161
+X2xvc3Nlcw== 66162
+Kj4oJg== 66163
+SW50ZWdyYWw= 66164
+IHBhcm9keQ== 66165
+IGluaXRpYWxpc2U= 66166
+ZmlsbHM= 66167
+IGFsdHJp 66168
+X0VMRU1FTlRT 66169
+YWRhc3RyYXI= 66170
+Y29ycmVv 66171
+IHdhdHQ= 66172
+X0RSVg== 66173
+IEZvcmdvdA== 66174
+IGdldENvbnRleHQ= 66175
+IHNob3J0YWdlcw== 66176
+IE9DVA== 66177
+d2VldGFsZXJ0 66178
+IE9wZW5z 66179
+Kmw= 66180
+IEtpdHR5 66181
+4oCZw6l0 66182
+IFBpY2Fzc28= 66183
+LnRvQnl0ZUFycmF5 66184
+0L7Qu9GD0Yc= 66185
+IERFTg== 66186
+5aeT5ZCN 66187
+V2ludGVy 66188
+YW50YW4= 66189
+X19b 66190
+UHJpbQ== 66191
+IHJvb2Z0b3A= 66192
+IEJpbGxib2FyZA== 66193
+dGVzdENhc2U= 66194
+cHJvZHV0bw== 66195
+LXRodW1i 66196
+IHJlc2V0cw== 66197
+Z2Vibg== 66198
+PkVycm9y 66199
+LmRlcGFydG1lbnQ= 66200
+IGVhcnJpbmdz 66201
+IENhcm91c2Vs 66202
+KGV4YW1wbGU= 66203
+CWVt 66204
+XENvbnRhaW5lcg== 66205
+IEVsdmlz 66206
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0= 66207
+RW5nbGFuZA== 66208
+Y3JlZGl0ZWQ= 66209
+X2NvbnN0cnVjdG9y 66210
+IGxvcg== 66211
+IERhd3Nvbg== 66212
+QnVybg== 66213
+IEJyaWdhZGU= 66214
+IE11dGV4 66215
+IFRyYW5zaXRpb25hbA== 66216
+IE1vdXNlRXZlbnQ= 66217
+Z3Jvdw== 66218
+Lm1pbnV0ZQ== 66219
+IEdNTw== 66220
+PVtdLA== 66221
+IHN1c2hp 66222
+IGFlc3RoZXRpY3M= 66223
+T0NVUw== 66224
+IFNFTEY= 66225
+IEFzc2VydGlvbkVycm9y 66226
+IE1DVQ== 66227
+IGhpbnRUZXh0 66228
+IHNlYXc= 66229
+bmdsZQ== 66230
+IGV4cGVsbGVk 66231
+UFJPUEVSVFk= 66232
+KS48Lw== 66233
+LW9wZXJhdGlvbg== 66234
+IEltbXVu 66235
+IGxpY2Vucw== 66236
+aWJpYQ== 66237
+IGJpZXRlbg== 66238
+IGdyaXBz 66239
+Q0hBTk5FTA== 66240
+X0VSUk9SUw== 66241
+X3JlY3Vyc2l2ZQ== 66242
+VWx0aW1hdGVseQ== 66243
+IE1hamVzdHk= 66244
+IGRlYWN0aXZhdGU= 66245
+IEVYQU1QTEU= 66246
+dWNpb25lcw== 66247
+IGN1cnJlbnRWYWx1ZQ== 66248
+IGV2YWx1YXRlcw== 66249
+L0dyYXBoaWNz 66250
+InRleHQ= 66251
+X3BhbGV0dGU= 66252
+IFRNUA== 66253
+IEJlZHM= 66254
+LkNvcw== 66255
+4Lix4LiZ 66256
+PXRvcmNo 66257
+IFBBQ0tBR0U= 66258
+aWxsYXJk 66259
+LmNw 66260
+leyduA== 66261
+LWFwcHJvdmVk 66262
+IE5vcnRod2VzdGVybg== 66263
+PHRleHRhcmVh 66264
+IENvbXBhdGlibGU= 66265
+X1JEV1I= 66266
+LlF1YW50aXR5 66267
+QElk 66268
+X29yaWVudGF0aW9u 66269
+Z2V0VXJs 66270
+IHRyYW5zbGF0aW5n 66271
+IFdlYXZlcg== 66272
+IGpzb25BcnJheQ== 66273
+IGVtYmxlbQ== 66274
+LklzTnVsbA== 66275
+IENoYXJ0cw== 66276
+W119 66277
+Z2Fl 66278
+X25lc3RlZA== 66279
+dGVtcHM= 66280
+cGF0aG5hbWU= 66281
+Q1c= 66282
+LXdyaXR0ZW4= 66283
+IFBBUks= 66284
+KGNvbmQ= 66285
+X2FsYXJt 66286
+IGdlcmU= 66287
+IEdpeg== 66288
+IE5nYg== 66289
+IC5f 66290
+YXBwaW5lc3M= 66291
+IERlcGxveW1lbnQ= 66292
+aVBhZA== 66293
+Il1d 66294
+IHN0cnN0cg== 66295
+IHRvbnVtYmVy 66296
+KGRs 66297
+CXdvcmQ= 66298
+W3Rv 66299
+X0ZJWEVE 66300
+RXhwaXJhdGlvbg== 66301
+OnJldHVybg== 66302
+T250 66303
+PlBsZWFzZQ== 66304
+Z2V0VGl0bGU= 66305
+LnNwbGl0ZXh0 66306
+Y29tYmluZWQ= 66307
+T2Q= 66308
+IG5vdmVsdHk= 66309
+IlM= 66310
+IHN2bQ== 66311
+Q292ZXJhZ2U= 66312
+IEh1dA== 66313
+IHJlc2lzdGVk 66314
+IGVsbG8= 66315
+IG3DtmNodGU= 66316
+S2F5 66317
+Lmxpa2U= 66318
+Y2Npb25l 66319
+IHJlc2VtYmw= 66320
+RGVhdGhz 66321
+IGVwaXQ= 66322
+KHJnYg== 66323
+LkNsYXNzZXM= 66324
+INC00L7RgdGC 66325
+Y2FwdHVyZXM= 66326
+XStc 66327
+YW1pZW50 66328
+IFBhc28= 66329
+LlNlbmRNZXNzYWdl 66330
+IFJlbmF1bHQ= 66331
+IE5hcmVuZHJh 66332
+dG91dA== 66333
+IGhhZGRl 66334
+IFR3ZWVu 66335
+w6VkZQ== 66336
+IG91dGZpZWxk 66337
+Lz48Lw== 66338
+QFw= 66339
+IER1cmFudA== 66340
+IGFicmU= 66341
+X3N0b3J5 66342
+IHBlcmZ1bWU= 66343
+Q3BwVHlwZURlZmluaXRpb25TaXplcw== 66344
+INC/0LDRgNCw0LzQtdGC 66345
+Y2hlbWVz 66346
+IFNhZGRhbQ== 66347
+cHJlbm9t 66348
+dXNwZW5kZWQ= 66349
+IEJlbmVmaXQ= 66350
+IHNjZXB0 66351
+X01vdmU= 66352
+IE5hag== 66353
+LU9u 66354
+cnVk 66355
+SW1hZ2VQYXRo 66356
+wq4s 66357
+IGFuYWx5c2Vk 66358
+IE9H 66359
+ZWxsZWljaHQ= 66360
+YmlyZHM= 66361
+ZWt0ZQ== 66362
+IEFsaXNvbg== 66363
+IGF0aGVpc3Q= 66364
+eyU= 66365
+YWJo 66366
+LXBob3Rv 66367
+aW5zdHJ1bWVudA== 66368
+IGhpbnRlZA== 66369
+IE9mZmxpbmU= 66370
+KSIpOwoK 66371
+X1BSRUY= 66372
+IHN0eWxpc3Q= 66373
+IEt1YmVybmV0ZXM= 66374
+IGZlcnY= 66375
+CgoKCgoKCgoKCgoKCgo= 66376
+KCI9Ig== 66377
+LmdldE0= 66378
+IG5vdGV3b3J0aHk= 66379
+IHNjb3V0aW5n 66380
+X3RyYW5zbGF0ZQ== 66381
+IGJlZ2lubmluZ3M= 66382
+IEx1bw== 66383
+IHFs 66384
+X2FsaWduZWQ= 66385
+IGVydw== 66386
+dWFycw== 66387
+X1BhdGg= 66388
+LicuJA== 66389
+IGhvYw== 66390
+IGRlcnA= 66391
+bG9p 66392
+IE1jS2lu 66393
+6K+05piO 66394
+Lz0= 66395
+TGlua0lk 66396
+c3RkZGVm 66397
+cmVkdWNlcnM= 66398
+aXNhbnM= 66399
+Lmhpc3Q= 66400
+Jy8+Cg== 66401
+IFRveGlj 66402
+IGRpc2FwcGVhcmluZw== 66403
+IGNpcw== 66404
+KGRv 66405
+IG1haW5TY3JlZW4= 66406
+X0JBTks= 66407
+IGRlbW9uc3RyYXRvcnM= 66408
+IFBhbGV0dGU= 66409
+dWVseQ== 66410
+UmFyZQ== 66411
+IHJlc2lkaW5n 66412
+IGFtYmllbnRl 66413
+IG1pc20= 66414
+LXF1ZXN0aW9u 66415
+IG9wcHJlc3NlZA== 66416
+IGxldHJh 66417
+PGR5bmFtaWM= 66418
+IEZvdG9z 66419
+LXBvbGljeQ== 66420
+aXN0ZW0= 66421
+LmV4Y2hhbmdl 66422
+c3RyZQ== 66423
+JC8s 66424
+7ZWY6riw 66425
+JAoK 66426
+IFJlbmU= 66427
+IHRvdXRlZA== 66428
+LUNvcmU= 66429
+IENyYW4= 66430
+IFRyYWRlcg== 66431
+IGRldw== 66432
+IGZsYXA= 66433
+CWZpbGVuYW1l 66434
+IGlubWF0ZQ== 66435
+KE1vY2s= 66436
+IFNvYg== 66437
+aXNibg== 66438
+IG5vZQ== 66439
+IEZvcmJpZGRlbg== 66440
+IGVsZXM= 66441
+IGRpbmc= 66442
+X3Nh 66443
+KSovCg== 66444
+YXJpZQ== 66445
+IFN1cHBvcnRz 66446
+IG1vZHVsYXRpb24= 66447
+IGVuc2w= 66448
+IFNoYWRvd3M= 66449
+cHJpbmNpcGFs 66450
+YW5nZW50 66451
+LUphbg== 66452
+IFBhbnRz 66453
+LHRy 66454
+IGZpdHRl 66455
+IGdhcm1lbnRz 66456
+TWFyZ2lucw== 66457
+TFRS 66458
+IE1peQ== 66459
+dmVudHVz 66460
+IE3DtmdsaWNo 66461
+W2F0dHI= 66462
+L3Jlc3BvbmQ= 66463
+IHR0aw== 66464
+IG9sZHXEnw== 66465
+IENvbnNl 66466
+UHJlbWl1bQ== 66467
+IGZyYW5jYWlzZQ== 66468
+X2hvcml6b250YWw= 66469
+X2li 66470
+IEZhcmU= 66471
+IGhhcnZlc3RlZA== 66472
+ZW5kaXI= 66473
+KGhpdA== 66474
+PiovCg== 66475
+IElSZXBvc2l0b3J5 66476
+eWxpZQ== 66477
+IGRldGVjdHM= 66478
+Om5v 66479
+4pi0 66480
+IGRpc2XDsQ== 66481
+IHVuc2VyZW4= 66482
+IG1vY2tpbmc= 66483
+c291dGg= 66484
+cmF0ZXM= 66485
+IGh5cG9j 66486
+IFNob3J0bHk= 66487
+IEJsYWNrcw== 66488
+0YLQuNGA0L7Qsg== 66489
+IEFTQVA= 66490
+cmViYmU= 66491
+aWVj 66492
+LkFkZERheXM= 66493
+IGVwaXM= 66494
+LWluZmxhbW1hdG9yeQ== 66495
+LW5ldA== 66496
+IHBhbGw= 66497
+65Q= 66498
+IGlzc3VhbmNl 66499
+IGNvbnRlbnRpb3Vz 66500
+LkFyZWFz 66501
+0LjQu9GM 66502
+IGNvbnRpZ3VvdXM= 66503
+W2FjdGlvbg== 66504
+IGV4cHJlcw== 66505
+ISIpCgo= 66506
+VUxP 66507
+IHdyZQ== 66508
+IHN1YmRpdg== 66509
+IHR1cm5hcm91bmQ= 66510
+IGFjY2Vs 66511
+IFVuaXY= 66512
+IFVuaXZlcnNpZGFk 66513
+c2V0dA== 66514
+ZGVzY3I= 66515
+LkdlbmVyYXRpb24= 66516
+IHBhdHJpb3Q= 66517
+IGZhcw== 66518
+KioqKgo= 66519
+UVA= 66520
+IOWN 66521
+b3BwZWw= 66522
+IGp1ZWdvcw== 66523
+LmRyYXdTdHJpbmc= 66524
+LWNvbmZpcm0= 66525
+CSAgICAgICAgICAgICA= 66526
+PFByb3Bz 66527
+IGZhbWlsbGU= 66528
+IEhlbG1ldA== 66529
+ZXJ0aWFyeQ== 66530
+YXRoaQ== 66531
+IGN1bHRpdmF0ZQ== 66532
+IGR1cGxpY2F0aW9u 66533
+IHNweU9u 66534
+Ki8pCg== 66535
+IEh1bmdlcg== 66536
+T3J0aA== 66537
+IHBpbnBvaW50 66538
+IEhhZw== 66539
+IHRpbWV0YWJsZQ== 66540
+bWFyZ2luVG9w 66541
+IHJlY2lwcm8= 66542
+ZmVsbA== 66543
+IFBlcnNpc3RlbnQ= 66544
+44Gp 66545
+cGx1cmFs 66546
+cXVldWVk 66547
+IGdyYWNpYXM= 66548
+w6F0aWNv 66549
+IGhhcmRzaGlw 66550
+IEFwYXJ0bWVudHM= 66551
+IEp1bms= 66552
+IFJldmU= 66553
+X01zaw== 66554
+IHN1cHJh 66555
+IEFUUA== 66556
+IHNldFNob3c= 66557
+5a2X56ym5Liy 66558
+IE5vdHRpbmdoYW0= 66559
+U3RldmVu 66560
+IE11bmQ= 66561
+cmFuZ2Vz 66562
+IHVwbG9hZHM= 66563
+IGJmcw== 66564
+cHo= 66565
+dWx0aW1hdGU= 66566
+IEVmZmljaWVuY3k= 66567
+QU1J 66568
+5b6E 66569
+X1JFUEVBVA== 66570
+IGFjYWRlbWlh 66571
+LnRvb2xTdHJpcEJ1dHRvbg== 66572
+VG9FbmQ= 66573
+cnZpbmU= 66574
+IFRoeQ== 66575
+IEVsZWN0b3JhbA== 66576
+IFJFUVVJUkVE 66577
+IHBsdW5nZQ== 66578
+IFJldm9sdXRpb25hcnk= 66579
+IFRlbnQ= 66580
+IGdyZW5hZGU= 66581
+IjpbeyI= 66582
+IG1vdXI= 66583
+UG93 66584
+IGV2YW5nZWxpY2Fs 66585
+VEVDVEVE 66586
+IG92ZXJ0dXJu 66587
+CUlucHV0 66588
+cmVjb21tZW5k 66589
+JUM= 66590
+IHNsYWc= 66591
+IEJoYXI= 66592
+X2VuY3J5cHQ= 66593
+IFdhcmZhcmU= 66594
+KGFnZQ== 66595
+QVRFR09SSUVT 66596
+bWlsZQ== 66597
+IGhlYXZlbmx5 66598
+YW1tZXI= 66599
+KCkpWw== 66600
+YWRlcmE= 66601
+aGc= 66602
+IExBVw== 66603
+IHBhY2thZ2VOYW1l 66604
+X3R5cGVEZWZpbml0aW9u 66605
+KGJl 66606
+REJOdWxs 66607
+X3Rhcg== 66608
+IGhldXJpc3RpYw== 66609
+IFdhbnRlZA== 66610
+IFN0dWI= 66611
+IGtpdHQ= 66612
+UkVD 66613
+IHBhc2Fy 66614
+Lm5ld0J1aWxkZXI= 66615
+CWdyYXBo 66616
+aW9zYQ== 66617
+LmNvbHVtbkhlYWRlcg== 66618
+IHNldE9wZW4= 66619
+IFRoaXJ0eQ== 66620
+ICIlLg== 66621
+QWxiZXJ0 66622
+IHNhbWE= 66623
+IHJvY2tpbmc= 66624
+Q29tcGxl 66625
+TVY= 66626
+fCgpCg== 66627
+X3JlYWRz 66628
+KHZhcmFyZ2lu 66629
+b3Vsb3VzZQ== 66630
+IFNJTUQ= 66631
+IGNhcmJvaHlkcmF0ZQ== 66632
+d2hvbGU= 66633
+LE5vbmU= 66634
+i+ivlQ== 66635
+IENoYW5k 66636
+Y3phcw== 66637
+X3F1ZXJ5c2V0 66638
+IGV4aXN0ZW50aWFs 66639
+IGVkaWJsZQ== 66640
+IGFnaWxpdHk= 66641
+IFdpbGxpcw== 66642
+IGh5bQ== 66643
+IEJyaWxs 66644
+0LjRhQ== 66645
+IE5vdEZvdW5kRXhjZXB0aW9u 66646
+ICgoKQ== 66647
+QVBTSE9U 66648
+IHN1YnN0YW50aXZl 66649
+X3R5cGVEZWZpbml0aW9uU2l6ZQ== 66650
+IHZhY2FuY2llcw== 66651
+RU5HSU5F 66652
+IGFuZGVycw== 66653
+IHN5bWI= 66654
+IGV0cmVl 66655
+KS5f 66656
+IHRyYW5zcG9ydGluZw== 66657
+aW1wcw== 66658
+L2NvcA== 66659
+YWN0YWJsZQ== 66660
+X2ZsdXg= 66661
+IG5ld0luc3RhbmNl 66662
+YXRvaXJl 66663
+IGNvbHVtbkluZGV4 66664
+IEdpbw== 66665
+IHN1YnRpdGxlcw== 66666
+LldpbkZvcm1z 66667
+0LvRj9C10Lw= 66668
+IGFsZXJ0ZWQ= 66669
+IHN0cmlwcGluZw== 66670
+d2VuZHVuZw== 66671
+IE1ldGhvZEludm9jYXRpb24= 66672
+RXJyb3JIYW5kbGVy 66673
+U2Nyb2xsYmFy 66674
+UG9ydGZvbGlv 66675
+Y29uc3Vt 66676
+IENPTU1PTg== 66677
+TGY= 66678
+X2Jhc2Vk 66679
+b2NhbHk= 66680
+IGVmZmV0 66681
+dnZt 66682
+cmlwc2k= 66683
+IGZsb3VyaXNo 66684
+Y2h0ZXI= 66685
+PT09PT09PT09Cg== 66686
+IHJlcXVlcg== 66687
+LnF1ZXN0aW9ucw== 66688
+KCI/ 66689
+IHBvc1g= 66690
+IFBDUg== 66691
+IE9yZ2FuaXphdGlvbnM= 66692
+cHLDvA== 66693
+RXhhbQ== 66694
+IEluY29ycG9yYXRlZA== 66695
+X3BocmFzZQ== 66696
+IHByYXllZA== 66697
+IGhvbWVvd25lcg== 66698
+IFRhag== 66699
+eng= 66700
+IElkZWFsbHk= 66701
+X01BQ0hJTkU= 66702
+IFJlbW92aW5n 66703
+Q29lZmZpY2llbnQ= 66704
+IGVkdWNhdGluZw== 66705
+ID8+Jg== 66706
+IHBvdXJz 66707
+aXJhbQ== 66708
+X3BlYWs= 66709
+IG5lc3Rpbmc= 66710
+YWJ5dGU= 66711
+bmF0dXJl 66712
+IGFmcw== 66713
+IFJvbw== 66714
+Y2FyZ28= 66715
+b2JqZXQ= 66716
+IGZyZWVpbmc= 66717
+cXVha2U= 66718
+RGVuc2l0eQ== 66719
+IGRlc2NyaWNhbw== 66720
+LyoqKioqKioq 66721
+IGRhc2hlZA== 66722
+IGdyb8Of 66723
+b29reQ== 66724
+IFBFT1BMRQ== 66725
+X1Bvc3Q= 66726
+IGNlcnZpY2Fs 66727
+IEFkanVzdGFibGU= 66728
+ZW5zdWFs 66729
+IFJldmlzZWQ= 66730
+KHJlZmVyZW5jZQ== 66731
+CUJhc2U= 66732
+ZXNzaW0= 66733
+TWFpbnQ= 66734
+IGdldFNpemU= 66735
+IFNhbmR3aWNo 66736
+cmFkaWVudA== 66737
+c2luaw== 66738
+Oi8vJw== 66739
+X3R0 66740
+RlBT 66741
+IEFybWVuaWFu 66742
+cHJldlN0YXRl 66743
+X0xJTkVT 66744
+IHRpZ2h0ZW4= 66745
+PFs= 66746
+XTw8Ig== 66747
+IFRyYWZm 66748
+IGxpcXVpZHM= 66749
+IGFyY3M= 66750
+X0NvbW1hbmQ= 66751
+QHByb3RvY29s 66752
+LWlzaA== 66753
+IHJ1YmJlZA== 66754
+QkJD 66755
+L2ZpcmViYXNl 66756
+QXBwQmFy 66757
+PFg= 66758
+IFNJTkdMRQ== 66759
+LlN0YXR1c0ludGVybmFsU2VydmVyRXJyb3I= 66760
+IHZlcnRl 66761
+L3F1ZXJ5 66762
+IGdldENvbmZpZw== 66763
+IERpcmVjdFg= 66764
+cGh5c2ljcw== 66765
+eWNvcA== 66766
+IGJyZWFrZXI= 66767
+LXZvbHVtZQ== 66768
+ZGF0YVRhYmxl 66769
+4oCZZQ== 66770
+cmlvdHQ= 66771
+IEV0ZXJuYWw= 66772
+Z2V0SGVpZ2h0 66773
+IG9uSXRlbUNsaWNr 66774
+IHF1YXRlcm5pb24= 66775
+IGtpbmt5 66776
+ZGVzZXJpYWxpemU= 66777
+KFNwcmluZw== 66778
+IHBlYWNlZnVsbHk= 66779
+X0RldmljZQ== 66780
+KE1hdHJpeA== 66781
+acOocmVtZW50 66782
+KHR5cA== 66783
+LnZhYWRpbg== 66784
+LmdldE1ldGhvZA== 66785
+IOKAnQoK 66786
+IHRocmVhZGVk 66787
+IEZhbW91cw== 66788
+IEdhbWI= 66789
+IOyngA== 66790
+INCk 66791
+IGZha3Q= 66792
+IGVjaHQ= 66793
+X3Vi 66794
+LkpwYVJlcG9zaXRvcnk= 66795
+IHVuZ2U= 66796
+LWVuZGluZw== 66797
+IENBTUVSQQ== 66798
+Y3JlZGVudGlhbA== 66799
+IFBhc3Nwb3J0 66800
+CVJUREJH 66801
+IGV4dHJhZA== 66802
+LW9yaWdpbg== 66803
+IHNhY3JpZmljZWQ= 66804
+IFNjaHVsdHo= 66805
+IFR1cnRsZQ== 66806
+LmNlbnRlclg= 66807
+IHNob3djYXNpbmc= 66808
+IGJ6dw== 66809
+eXJv 66810
+aXNOdWxs 66811
+LmlzRGlyZWN0b3J5 66812
+bWFpbnQ= 66813
+X2Jp 66814
+IFNwcmluZ2Vy 66815
+fSgpCgo= 66816
+aXNzdWVy 66817
+LWFybQ== 66818
+ZXNr 66819
+bGluaGE= 66820
+IGtvcnQ= 66821
+YWphcw== 66822
+YWxpbms= 66823
+KEJ1dHRvbg== 66824
+IFJlc3RvcmF0aW9u 66825
+IGluY3I= 66826
+IFpob3U= 66827
+CSAgICAgICAgCQ== 66828
+IERpc2NsYWltZXI= 66829
+IGt2aW5ub3I= 66830
+IERhcmU= 66831
+IDwtPg== 66832
+6K+m 66833
+CQkJCQkJCQkJCQo= 66834
+LkNsYW1w 66835
+CXNjb3Bl 66836
+IE11bQ== 66837
+PDw8PDw8PA== 66838
+L3t7 66839
+X2FydGlzdA== 66840
+IFJlYWN0aW9u 66841
+IE5pY2tlbA== 66842
+X1JlbW92ZQ== 66843
+KCgoKA== 66844
+64yA 66845
+IGR5bmFzdHk= 66846
+IFRocm93cw== 66847
+IENvdWw= 66848
+X3JuZw== 66849
+IERvaw== 66850
+Lmxpc3RWaWV3 66851
+IFR1Y3Nvbg== 66852
+KHRvaw== 66853
+IFBoaWxpcHBl 66854
+VG9TaG93 66855
+IGRpZXRh 66856
+IFVsdHI= 66857
+LlRpY2s= 66858
+IEdldFR5cGU= 66859
+aWV0ZQ== 66860
+IExlYWg= 66861
+SGFyZHdhcmU= 66862
+IENvbXByZWhlbnNpdmU= 66863
+Q09NTU9O 66864
+IGluZHVzdHJp 66865
+aXJpY2Fs 66866
+LWJlZHJvb20= 66867
+IGd5cm8= 66868
+INC60L7RgA== 66869
+IC0vCg== 66870
+Y291cg== 66871
+IEJydXNoZXM= 66872
+TXVsdGlwbGllcg== 66873
+IHVzZXJkYXRh 66874
+IFJlY29nbg== 66875
+IG9ibGlnYXRlZA== 66876
+IExldmlu 66877
+YW5jZXN0b3I= 66878
+IG1lbmluZw== 66879
+IFVk 66880
+LGpzb24= 66881
+KGFzc2lnbg== 66882
+IG5kYXJyYXk= 66883
+X2Nvcm5lcg== 66884
+QEFsbEFyZ3NDb25zdHJ1Y3Rvcg== 66885
+6aqM6K+B56CB 66886
+YWRvcnM= 66887
+IHJlc3BvbmRlbnQ= 66888
+R09SSVRI 66889
+IHRlbmdv 66890
+IHNldE1lc3NhZ2U= 66891
+IElQTw== 66892
+YXJyYXlz 66893
+IEFHQUlO 66894
+J1s= 66895
+ICItLy8= 66896
+w6Rt 66897
+44CCXA== 66898
+Lm9uY2U= 66899
+Y3VycmVudFRpbWU= 66900
+R292 66901
+IGdldG9wdA== 66902
+bWx4 66903
+IFRvbmU= 66904
+J11dOwo= 66905
+IHByZWRhdG9y 66906
+V3k= 66907
+L2VudGl0eQ== 66908
+IG1hbnRyYQ== 66909
+KT49 66910
+b2dyYWQ= 66911
+IG1lbGFu 66912
+IHNvcnRCeQ== 66913
+IERFRklORQ== 66914
+UHJvdGVjdGVk 66915
+Y2RlY2w= 66916
+Jz4iLiQ= 66917
+PGN2 66918
+Y3JpcmU= 66919
+LVRydW1w 66920
+IHVjZmlyc3Q= 66921
+Y2Fzc2VydA== 66922
+IGFja25vd2xlZGdlbWVudA== 66923
+IElOVg== 66924
+IFVOVQ== 66925
+LnNxdWFyZXVw 66926
+IFNheA== 66927
+cmV0dGU= 66928
+KCkKCgoK 66929
+IERhdGFCYXNl 66930
+IFBhdHJpb3Q= 66931
+X1Jvdw== 66932
+IEV4aGliaXRpb24= 66933
+IGRldGFpbmVlcw== 66934
+IFN0cmluZ0lP 66935
+X0RFTg== 66936
+TW9kaWZpZXJz 66937
+YXNhcg== 66938
+aXJ0aW5n 66939
+IHRyYW5xdWls 66940
+KGVuYw== 66941
+IOOCsw== 66942
+bmNvZGVy 66943
+X3VudXNlZA== 66944
+IEJpYW4= 66945
+VmVyYg== 66946
+X2V4Y2VycHQ= 66947
+L2V4cG9ydA== 66948
+IFNleHQ= 66949
+RHM= 66950
+QU1QTA== 66951
+T2ZTdHJpbmc= 66952
+X3RyYWNrcw== 66953
+d2o= 66954
+b3Rvbmlu 66955
+IElURQ== 66956
+SVZFTg== 66957
+LW9yaWdpbmFs 66958
+IEZJTkFM 66959
+X18pCgoK 66960
+IGVuc2U= 66961
+IFV0dA== 66962
+Oioq 66963
+IFN1cnJleQ== 66964
+IEthaXNlcg== 66965
+YWRtaW5pc3RyYXRvcg== 66966
+LWxhcmdlc3Q= 66967
+IGxldHp0ZW4= 66968
+IGNoYWluZWQ= 66969
+J0g= 66970
+IGRvY3VtZW50aW5n 66971
+IExlY3R1cmU= 66972
+Ukg= 66973
+b2xsYXBzZWQ= 66974
+c2tpcnRz 66975
+ZWxkZXI= 66976
+IFNpeHRo 66977
+IGFsbGVnaWFuY2U= 66978
+SVNPU3RyaW5n 66979
+VXNhZ2VJZA== 66980
+LmhhcmR3YXJl 66981
+IHBhcmk= 66982
+IHfDpGhyZW5k 66983
+IHJkcg== 66984
+IGhqZW0= 66985
+TE9PUg== 66986
+IExQQVJBTQ== 66987
+INC80L7QttC10YI= 66988
+IGhvbWFnZQ== 66989
+b3V0c2lkZQ== 66990
+IENoYXJTZXQ= 66991
+PEdhbWU= 66992
+77yZ 66993
+X01VVEVY 66994
+KSkvKA== 66995
+X3Jlb3JkZXJlZA== 66996
+dGV4dElucHV0 66997
+QU5DRUQ= 66998
+IFRlZQ== 66999
+IGNvcm5lcmJhY2s= 67000
+UXVlcnlTdHJpbmc= 67001
+IGxvbmdpdHVkaW5hbA== 67002
+IEhvbGlkYXlz 67003
+QUJDREVGRw== 67004
+LktleVByZXNz 67005
+LnVs 67006
+eWRybw== 67007
+IFRhdGU= 67008
+CXJvdXRlcg== 67009
+c3BvdHM= 67010
+IHBhdWw= 67011
+LXByZXY= 67012
+IGtub3dpbmdseQ== 67013
+IEt1cmRz 67014
+IEV1cm9w 67015
+LmNlcnQ= 67016
+QklH 67017
+KGNvZWZm 67018
+IENsYXVz 67019
+L2V4YW1wbGVz 67020
+IEZhcm1z 67021
+IC8vKA== 67022
+U1BBTg== 67023
+IGNpcmN1cw== 67024
+IE1JUw== 67025
+IFRyYWl0cw== 67026
+LWNsZWFy 67027
+IHJlZ2ltZW4= 67028
+IGJhY2tncm91bmRJbWFnZQ== 67029
+dXNhaGE= 67030
+X01ldGFkYXRhVXNhZ2VJZA== 67031
+IHJoZQ== 67032
+Q2xpbg== 67033
+IERvbWluaWM= 67034
+Lm5leHREb3VibGU= 67035
+KGRldGFpbA== 67036
+VGhyZWFkUG9vbA== 67037
+IENhcnBlbnRlcg== 67038
+c29ydGluZw== 67039
+IGdvdmVybm9ycw== 67040
+IHNpbmdlcnM= 67041
+dW5saW5r 67042
+IHJpbmdpbmc= 67043
+IHNjaGVtYXRpYw== 67044
+IGVycm1zZw== 67045
+IGJlYg== 67046
+LiIr 67047
+IEluY3JlYXNlcw== 67048
+IkFsbA== 67049
+IGFjb250ZQ== 67050
+emlh 67051
+LlRleHRDaGFuZ2Vk 67052
+IFRvRG8= 67053
+LDopOwo= 67054
+bmFnZQ== 67055
+Y2hs 67056
+b3dlbA== 67057
+IGdlcmFkZQ== 67058
+X2ZmdA== 67059
+IGVzdGFtb3M= 67060
+U1RBUg== 67061
+IGRpc2d1c3Q= 67062
+Z3Jhbg== 67063
+cG9ydHVuaXR5 67064
+IGF1dG9iaQ== 67065
+e317Cg== 67066
+IENvdXBvbnM= 67067
+X0dBSU4= 67068
+IFRDSEFS 67069
+L3Bhc3M= 67070
+55Sx 67071
+IGZvb3R3ZWFy 67072
+KGJvdW5kcw== 67073
+YXB1cw== 67074
+Y2l0ZQ== 67075
+Qk9PVA== 67076
+IENvZGVj 67077
+bG9ndWU= 67078
+LXByb3BlcnRpZXM= 67079
+YXV0b21hdGlvbg== 67080
+IFNob2U= 67081
+c3BlY3Q= 67082
+KG1t 67083
+IEtldA== 67084
+W3BhcmFt 67085
+IGJhc2ls 67086
+IEFuZ3VsYXJGaXJl 67087
+IGFkdmVudHVyb3Vz 67088
+X1VDbGFzcw== 67089
+IGluZHVsZ2U= 67090
+CWN1ZGE= 67091
+IGluc3VsdGluZw== 67092
+LkV4cHJlc3Npb25z 67093
+IG9uQ3JlYXRlT3B0aW9uc01lbnU= 67094
+VUVM 67095
+IGJpdGluZw== 67096
+KCFf 67097
+IEVuY3ljbG9wZWRpYQ== 67098
+IGJlcnQ= 67099
+IFZlcmE= 67100
+IEJpYmxpY2Fs 67101
+aW5zaWNz 67102
+X1NJTVBMRQ== 67103
+IHNhbGlkYQ== 67104
+cmVxdWVzdGVk 67105
+IENvbXBvc2l0aW9u 67106
+LkF0b2k= 67107
+KEtleUV2ZW50 67108
+ZXJlYQ== 67109
+IGRlcG9ydGVk 67110
+IFF1cg== 67111
+IG5pcHBsZXM= 67112
+aXNBcnJheQ== 67113
+INGD0LrQsNC3 67114
+IGJyaW5r 67115
+bWV0cm9z 67116
+RW51bWVyYXRpb24= 67117
+IEJ1aWxkcw== 67118
+ZXJ0b3M= 67119
+IHNhaW50cw== 67120
+LmRlcGxveQ== 67121
+ZXRoZXJldW0= 67122
+IGtpbmRlcmdhcnRlbg== 67123
+dmFuaXplZA== 67124
+IGNvbWJpbg== 67125
+IHBvdXZvaXI= 67126
+S2lu 67127
+YXLEsQ== 67128
+IC4uLi4u 67129
+77y+ 67130
+Lkdv 67131
+IHF1aXJreQ== 67132
+xLFuZGFu 67133
+IGFjdGlvblR5cGVz 67134
+IFFVRVJZ 67135
+VGF5bG9y 67136
+IFJL 67137
+dGF0 67138
+LnBhY2tldA== 67139
+IElNUE9SVEFOVA== 67140
+IGN1c2hpb25z 67141
+YnVsaw== 67142
+ZHVjdGl2ZQ== 67143
+YmVuZWY= 67144
+b2NyaXN5 67145
+IGZ1ZXJvbg== 67146
+IGN1cnNlcw== 67147
+IGZpbGluZ3M= 67148
+ZWxpZXI= 67149
+KD86 67150
+X2RyaXZl 67151
+IGNvbnRhY3Rv 67152
+IFBhcmt3YXk= 67153
+dmlkZXM= 67154
+Z25l 67155
+YXZhZ2U= 67156
+XFwu 67157
+ZnVsbE5hbWU= 67158
+ZGxs 67159
+IHNob2Nrcw== 67160
+ICMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw== 67161
+X3B4 67162
+QFdlYg== 67163
+LlBlcnNpc3RlbmNl 67164
+IHN1bms= 67165
+LnRvb2x0aXA= 67166
+YXV0aWNhbA== 67167
+TmV3c2xldHRlcg== 67168
+IHdhaXRlcg== 67169
+IGlucXVpcmU= 67170
+0LDQtdGC0YHRjw== 67171
+KCdfXw== 67172
+dG9n 67173
+SUVOVEFUSU9O 67174
+IGNvbXBhbnlJZA== 67175
+IEJhc2ljcw== 67176
+CUpMYWJlbA== 67177
+IG1hY09T 67178
+IE1hdHM= 67179
+X3RlbA== 67180
+LXByZWZpeA== 67181
+IG11dGF0ZQ== 67182
+fScp 67183
+Y2hlbmc= 67184
+IE1pbGl0 67185
+IiY= 67186
+ZmluZGluZw== 67187
+IERhdGFMb2FkZXI= 67188
+LkdQSU8= 67189
+IExldnk= 67190
+IHNuZWFrZXJz 67191
+IGNyw6lk 67192
+YXduZXI= 67193
+eGlh 67194
+L3NpbXBsZQ== 67195
+Q0hS 67196
+IGZsb3RhdGlvbg== 67197
+LnNlbnNvcg== 67198
+QnJhemls 67199
+IFNlYXNvbnM= 67200
+IFNwZWFr 67201
+LWJhbGw= 67202
+IE11dGF0aW9u 67203
+dWtrYW4= 67204
+IE9tYWhh 67205
+4oCZb24= 67206
+IEN1b21v 67207
+IEp1ZGljaWFs 67208
+IGNoZWNrcG9pbnRz 67209
+IEZyZW0= 67210
+CUlk 67211
+ZWdyaXR5 67212
+X2Fm 67213
+QE5vQXJnc0NvbnN0cnVjdG9y 67214
+IHRhYmVsYQ== 67215
+WyM= 67216
+bm90YQ== 67217
+IEZhY3RvcnM= 67218
+KGdyb3Vwcw== 67219
+aXN3YQ== 67220
+SVZP 67221
+IHNjcmk= 67222
+YWNldA== 67223
+IE1laA== 67224
+KGNsYXp6 67225
+IFs8 67226
+cGVyaWFs 67227
+IHN1cnBhc3NlZA== 67228
+IGpva2Vk 67229
+IHJ1ZA== 67230
+IGltYmFsYW5jZQ== 67231
+IEZyYWdl 67232
+c3Nw 67233
+IGluZGljdGVk 67234
+Lm1hcmtldA== 67235
+O20= 67236
+IHJlcGFpcmluZw== 67237
+LW5vdGU= 67238
+RGVidWdnZXI= 67239
+KFdlYg== 67240
+IHNpbmdz 67241
+IExveQ== 67242
+IERFU0lHTg== 67243
+LkNvbXA= 67244
+LWNvbnRyb2xsZXI= 67245
+IGF2b2NhZG8= 67246
+IEJvd2ll 67247
+Y29udGFkb3I= 67248
+dWxpbmdz 67249
+dWNob3M= 67250
+c3BlY2lmaWVy 67251
+IFZvbHZv 67252
+IGRlbW9z 67253
+IFByb2R1dG8= 67254
+Lk5vdEZvdW5k 67255
+IG5pw7Fvcw== 67256
+IEJvbHM= 67257
+X291dGVy 67258
+U2hlcg== 67259
+QVVUTw== 67260
+IGpvdg== 67261
+IEZyZWRkaWU= 67262
+b3JpYXM= 67263
+IGFmZWN0 67264
+IGZhY2lsaXRhdGluZw== 67265
+IGRvbWluYXRpbmc= 67266
+UGFyY2VsYWJsZQ== 67267
+JywnLQ== 67268
+bW9vbg== 67269
+IG1ldGFzdA== 67270
+IHNjYXJm 67271
+IFRoZXJt 67272
+Q2FsbEJhY2s= 67273
+0YHRgtCw0LI= 67274
+LkltcG9ydA== 67275
+IGJldHJheWFs 67276
+aWN1bG9z 67277
+IHdlacOf 67278
+5YyF 67279
+X14= 67280
+d2lmaQ== 67281
+IFNFTlNPUg== 67282
+X0JVU1k= 67283
+JGI= 67284
+X0ZJTkQ= 67285
+IHBsYXN0aWNz 67286
+IENPTlZFUlQ= 67287
+CWNhbGw= 67288
+IFByYWd1ZQ== 67289
+IGdhcm5lcmVk 67290
+X2xlYXJuaW5n 67291
+c2hvb3Q= 67292
+J10pKQ0K 67293
+IEdpbmdlcg== 67294
+PXBk 67295
+LHRlc3Q= 67296
+UHJvZml0 67297
+IGVzdGltYXRvcg== 67298
+IGJyZWU= 67299
+IC8vPC8= 67300
+X2hhdmU= 67301
+IEtvZA== 67302
+X0lNTQ== 67303
+aXp6YXM= 67304
+bWlnaHR5 67305
+154= 67306
+IE9uQ2xpY2tMaXN0ZW5lcg== 67307
+44OH 67308
+IFNjaWVudGlzdA== 67309
+RmlsdGVyZWQ= 67310
+YXZs 67311
+aGF5 67312
+X2dlbmVyYXRlZA== 67313
+XScK 67314
+IEF1dGhvcml0aWVz 67315
+OnBhcmFt 67316
+IHN0YXR0 67317
+LW1hdGVyaWFs 67318
+IGxpZGVy 67319
+IENyb3A= 67320
+IEJ1bmlmdQ== 67321
+IG5leHRQcm9wcw== 67322
+b3J6 67323
+X29yZA== 67324
+PHg= 67325
+X0lPQ1RM 67326
+IE11c2NsZQ== 67327
+CWV4ZWM= 67328
+RU5BTUU= 67329
+X2xldHRlcnM= 67330
+IyMjIyM= 67331
+IENz 67332
+J109PSI= 67333
+ICInKQ== 67334
+Q2xlYW51cA== 67335
+LnN0cnVjdHVyZQ== 67336
+zro= 67337
+6YCa6L+H 67338
+J107Pz4i 67339
+IExhdGl0dWRl 67340
+YmJpbmc= 67341
+IGJhbmFuYXM= 67342
+cmVjdGlvbnM= 67343
+IFJhbmRhbGw= 67344
+TllTRQ== 67345
+IGFwcmVuZA== 67346
+LlJlc3BvbnNlRW50aXR5 67347
+IHRlc3REYXRh 67348
+XGU= 67349
+IFdL 67350
+LkFkZENvbXBvbmVudA== 67351
+X3J1bnM= 67352
+w6dvaXM= 67353
+LW1pbmk= 67354
+Zm9sZGVycw== 67355
+IGxvc2Vycw== 67356
+IFRvd2Vycw== 67357
+LUVuY29kaW5n 67358
+OnI= 67359
+Y2hvb3Nlcg== 67360
+IGZsYXR0ZW5lZA== 67361
+0YHRgtCw0L3QvtCy 67362
+CVB5 67363
+5Lic 67364
+IGRhbW5lZA== 67365
+RGVwdA== 67366
+d2Vk 67367
+IHBpc2M= 67368
+Z2llcw== 67369
+X2dhbWVz 67370
+Lm1hc3M= 67371
+KEVxdWFs 67372
+IG5hdGl2ZXM= 67373
+LnRodW1ibmFpbA== 67374
+bHRy 67375
+IGVxbA== 67376
+X2luY29tZQ== 67377
+CWhlYWRlcnM= 67378
+LWhhaXJlZA== 67379
+IG1lZGlvY3Jl 67380
+IFdpdGhkcmF3 67381
+IGJpdHRl 67382
+2b4= 67383
+PWlu 67384
+b2NrZWQ= 67385
+RnVsbHk= 67386
+IFRFTVBMQVRF 67387
+w7pkZQ== 67388
+T2Rk 67389
+aWxsZXo= 67390
+VGVsZXBob25l 67391
+IAoJCQo= 67392
+KCInIg== 67393
+X3NjaGVk 67394
+ZXJuZQ== 67395
+wr4= 67396
+LnBpY2s= 67397
+IE1TSQ== 67398
+CWZm 67399
+RGlzY292ZXJ5 67400
+IENPRA== 67401
+IExhY2s= 67402
+IHNlbnNhdGlvbmFs 67403
+bW90aA== 67404
+IExlZ2lzbGF0aXZl 67405
+0Y0= 67406
+IHZpYWJpbGl0eQ== 67407
+IGdldEVtYWls 67408
+IHVuYW5pbW91cw== 67409
+IHBlbGxldA== 67410
+ICIoKQ== 67411
+Y29hdA== 67412
+YWdvb24= 67413
+IEFMV0FZUw== 67414
+XHVD 67415
+X3N0ZG91dA== 67416
+QW5keQ== 67417
+IG5ld0xpc3Q= 67418
+IE1haGFyYXNodHJh 67419
+LF9f 67420
+PXVzZXJuYW1l 67421
+IHNjcmlwdGluZw== 67422
+IFRtaW4= 67423
+PEFjdGlvbg== 67424
+PXt9LA== 67425
+c3ltYm9scw== 67426
+IGZlbmNpbmc= 67427
+IHbDrWRlb3M= 67428
+IE1hdXJpY2U= 67429
+Y29ybGli 67430
+IGtlbQ== 67431
+In0pLAo= 67432
+IENsYXNzaWNhbA== 67433
+Y29sbGVnZQ== 67434
+IEhvbWVwYWdl 67435
+IH19Cgo= 67436
+X01zcA== 67437
+IENvbXBsYWludA== 67438
+IHNhbmR5 67439
+QXNpYW4= 67440
+X3NlcmlhbGl6ZXI= 67441
+IExhaA== 67442
+IGJ1ZHM= 67443
+b2xvZ25l 67444
+IHJlc3BvbnNlRGF0YQ== 67445
+b3BoaWxl 67446
+a2F0ZWdvcmk= 67447
+RW5kZWQ= 67448
+bGVjdGlj 67449
+IGNsYXdz 67450
+Li4uJyk7Cg== 67451
+IHBsYW5uZXJz 67452
+IFphaw== 67453
+IEdsb3Zlcw== 67454
+Iil9 67455
+IGZhc2hpb25lZA== 67456
+YnJvbg== 67457
+IG5ld2NvbWVycw== 67458
+dmFuYQ== 67459
+IHBpZXJ3cw== 67460
+UmVjZWlwdA== 67461
+LWVudg== 67462
+IHJ1dGE= 67463
+IEZhcm1lcg== 67464
+b2RvcmU= 67465
+bXVp 67466
+IHJvbWFudA== 67467
+IGluZmxpY3Q= 67468
+IHNlbWluYXJz 67469
+PWN2 67470
+KHN0b2Nr 67471
+IGV4dHJhY3Rvcg== 67472
+IFRpZmZhbnk= 67473
+X3V2 67474
+LmNvbnRhY3Rz 67475
+JyksKCc= 67476
+IHNvbHZlcw== 67477
+LkNvbm5lY3Rpb25TdHJpbmc= 67478
+L2RlYnVn 67479
+IEF2ZXJ5 67480
+44Oj 67481
+IG1heFg= 67482
+U3Bhcms= 67483
+PHRoaXM= 67484
+IGhpa2Vz 67485
+S2V5VmFsdWVQYWly 67486
+IFF1aWV0 67487
+c3RhYg== 67488
+IEtvbW1lbnQ= 67489
+bHljZXI= 67490
+IE1TTQ== 67491
+IExhbnRlcm4= 67492
+IGNvbmp1bnRv 67493
+aHNp 67494
+TVVMVA== 67495
+V2l0aER1cmF0aW9u 67496
+YXR0YWNoZWQ= 67497
+IEFzdGVy 67498
+CXBvaW50cw== 67499
+IFNpYmVy 67500
+IE1ldGhvZGlzdA== 67501
+L3NpdGVz 67502
+IGZvcnR1bmVz 67503
+UGFydGljaXBhbnQ= 67504
+IGN1c3RvbWVySWQ= 67505
+KWluaXQ= 67506
+X3NlcnZlcnM= 67507
+IHdlYXZl 67508
+IFRSQUlO 67509
+IGhhcmFzc2Vk 67510
+7J6R 67511
+YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXo= 67512
+X2Zhcg== 67513
+QWxjaGVteQ== 67514
+LmxpbmVXaWR0aA== 67515
+IHRoZXJhcGlzdHM= 67516
+IExvYg== 67517
+ZXF1aXBtZW50 67518
+IHJlY2h0 67519
+Lm1pcG1hcA== 67520
+Lm5pY2tuYW1l 67521
+IHVudG91Y2hlZA== 67522
+QUdPTg== 67523
+IFNhdWw= 67524
+IHdvcmtzaGVldHM= 67525
+IFZldGVyYW4= 67526
+b3VkZW4= 67527
+YWNsYXNz 67528
+X2FzbQ== 67529
+IHRlbXBs 67530
+IEV4cGVuc2U= 67531
+ZWlnaHQ= 67532
+I1NCQVRDSA== 67533
+em9uZXM= 67534
+LnBhcnRz 67535
+YXRyaWNl 67536
+bGF3cw== 67537
+dG9CZURlZmluZWQ= 67538
+RWZmZWN0aXZl 67539
+IFBpZWNlcw== 67540
+YXJ0aQ== 67541
+IGluaGliaXRvcnM= 67542
+CXBhcmFtZXRlcnM= 67543
+IHRlbGVncmFt 67544
+Ym91cmc= 67545
+X25vdGlmaWNhdGlvbnM= 67546
+IHBvc2l0aW9uYWw= 67547
+LWRlYWxz 67548
+IC8qLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ== 67549
+IHNoYWRlcnM= 67550
+XT0k 67551
+IGRlY28= 67552
+ZXR5cGVz 67553
+Y2xhcmU= 67554
+IEdTTQ== 67555
+LnV0aWxpdHk= 67556
+VG9TdHI= 67557
+YWZlbg== 67558
+IFht 67559
+X3BhcnRpY2xlcw== 67560
+IGZsdWZmeQ== 67561
+TWFya2V0aW5n 67562
+IHN0YW5kaW5ncw== 67563
+PwoKCgoKCg== 67564
+VU1BTg== 67565
+X1BBWU1FTlQ= 67566
+CVRpbWU= 67567
+cmF3bg== 67568
+b3Jybw== 67569
+IGVlcnN0ZQ== 67570
+IHBhZ2VOdW0= 67571
+IENPUA== 67572
+IHBsYWdpYXI= 67573
+VXBsb2FkZXI= 67574
+JHNlbGY= 67575
+bGF0ZXI= 67576
+ZXJpYWxpemVk 67577
+IGFsaWduU2VsZg== 67578
+IOKZpQ== 67579
+LmFycmF5Y29weQ== 67580
+IG5vc290cm9z 67581
+CWdwaW8= 67582
+IHBsb3R0ZWQ= 67583
+aXRlcmF0aW9ucw== 67584
+IFJlbGF4 67585
+Y2lwaGVy 67586
+R2lmdA== 67587
+IEJldHQ= 67588
+IFhS 67589
+IHN0cmlwZWQ= 67590
+KGVudmlyb25tZW50 67591
+ZWdlcnM= 67592
+X1JFU0VSVkVE 67593
+IGvDtm5udGU= 67594
+IGluZmVycmVk 67595
+UGRm 67596
+c29ycnk= 67597
+cGFyYXRl 67598
+LkNvbmNhdA== 67599
+IGxpcGlk 67600
+LkJP 67601
+IG9ybQ== 67602
+IENvbnNvcnQ= 67603
+IG92ZXJzZWVpbmc= 67604
+IGFtYmVy 67605
+IHBsZXRob3Jh 67606
+CUFjdGlvbg== 67607
+cXVlcnF1ZQ== 67608
+IGh1aXM= 67609
+ID1b 67610
+IHByb2dyZXNzZXM= 67611
+anVkdWw= 67612
+IGNvbnZlcnRpYmxl 67613
+LmVtYmVkZGluZw== 67614
+IHs/Pgo= 67615
+IHJlZHV4 67616
+W2xhYmVs 67617
+OiIpOw0K 67618
+Lm9ubGluZQ== 67619
+cXVhcnRlcmVk 67620
+IHNjaG9vbGluZw== 67621
+ICJcIiI= 67622
+W2xpc3Q= 67623
+QWxhbg== 67624
+J30KCg== 67625
+eXBzdW0= 67626
+IHN0cml2aW5n 67627
+IFJlc3BvbnNpYmxl 67628
+IO2MjOydvA== 67629
+LkludFB0cg== 67630
+cmlrZXM= 67631
+ZW52aWxsZQ== 67632
+LnNldExheW91dE1hbmFnZXI= 67633
+IFBhc3Nlbmdlcg== 67634
+IGRpc29i 67635
+IGZlcm1lbnQ= 67636
+LlBpeGVs 67637
+Pign 67638
+IGNvbnRlbmRlcnM= 67639
+LWJldGE= 67640
+IGFmZmlybWF0aXZl 67641
+0L3QvtGB0YLQuA== 67642
+aWHDp8Ojbw== 67643
+UmVjb21tZW5k 67644
+aW1pdGVycw== 67645
+X3lsaW0= 67646
+IHN1YnNpZHk= 67647
+IGVyYg== 67648
+RmlsZVNpemU= 67649
+KHNy 67650
+IHBvb3Jlc3Q= 67651
+IHZvaQ== 67652
+U2lk 67653
+IHNsaXBz 67654
+X21pbnV0ZXM= 67655
+IHVn 67656
+xqFu 67657
+IG5hdMO8cmxpY2g= 67658
+44Oe 67659
+YmVhcg== 67660
+fV8kew== 67661
+IGZpc3Nl 67662
+IGRpc2NyaW1pbmF0b3J5 67663
+CQkgIAo= 67664
+IENvaWw= 67665
+X2lmYWNl 67666
+LnZlcg== 67667
+IG1pbmVk 67668
+IGFzc2Fzc2lu 67669
+IHVuc2V0dA== 67670
+LnJlcXVlc3Rz 67671
+LlVT 67672
+aW1hZ2VVcmw= 67673
+IHN0cmF0ZWdpY2FsbHk= 67674
+LWJhbmQ= 67675
+IHRyb3VzZXJz 67676
+WEQ= 67677
+ey8= 67678
+bGVjdGlvbnM= 67679
+YCgp 67680
+IlA= 67681
+IHNrZXRjaGVz 67682
+Y2xpZW50SWQ= 67683
+IFNyYw== 67684
+b3BlbmluZw== 67685
+UHV0aW4= 67686
+IFBvZXRyeQ== 67687
+IFBST00= 67688
+SUxMSVNFQ09ORFM= 67689
+IGJvb21pbmc= 67690
+U2ltaWxhcmx5 67691
+Omxhc3Q= 67692
+Lndvcmtlcg== 67693
+LmdldElE 67694
+LlNQ 67695
+c2VydmVycw== 67696
+b2N1bGFy 67697
+IHNwaW5hY2g= 67698
+SVNL 67699
+w7A= 67700
+J10pWw== 67701
+IGNoaWVmcw== 67702
+IGdyb8OfZW4= 67703
+cmlldmluZw== 67704
+LmFzaw== 67705
+LXN1cg== 67706
+VlY= 67707
+Lz4iOwo= 67708
+KHJlbW92ZQ== 67709
+IEtM 67710
+IEhhbGV5 67711
+QFJlc3BvbnNlQm9keQ== 67712
+LSY= 67713
+U3dhZ2dlcg== 67714
+IHpuYWo= 67715
+Lm9uRXJyb3I= 67716
+cmVnbw== 67717
+ZWxpeA== 67718
+IEFWQUlMQUJMRQ== 67719
+IHNlcGVydGk= 67720
+aWFw 67721
+X21pc3M= 67722
+IHN1cmdlcmllcw== 67723
+IGltcGFydGlhbA== 67724
+IENvdA== 67725
+YWt0aW9u 67726
+IHdoaXRlbGlzdA== 67727
+INCw0LI= 67728
+X21peA== 67729
+IEJlZHJvb21z 67730
+IHByaW1laXJh 67731
+IHNpZ25pZmljYQ== 67732
+L2J5 67733
+IHN0YXJ0bGluZw== 67734
+IFNQRQ== 67735
+dWNjacOzbg== 67736
+TnVtZXI= 67737
+SUJN 67738
+LmZyYWdtZW50cw== 67739
+UmVudA== 67740
+IHLDs3duaWXFvA== 67741
+LkFVVE8= 67742
+LkZvckVhY2g= 67743
+IFpodQ== 67744
+IEN1bm5pbmc= 67745
+IFdhcm4= 67746
+IEJI 67747
+X0RPV05MT0FE 67748
+QnlLZXk= 67749
+KeKAlA== 67750
+IGNvbW1hbmRl 67751
+X0FOUw== 67752
+Q2hyb24= 67753
+RklU 67754
+X2F0b21z 67755
+X1NLSVA= 67756
+IHZhcA== 67757
+KEJveA== 67758
+IGxkYXA= 67759
+dW5wcm9jZXNzYWJsZQ== 67760
+SVRJT05T 67761
+w6lyw6k= 67762
+LG1zZw== 67763
+IG91dHNldA== 67764
+IGRyaWxsZWQ= 67765
+IGTDqXZlbG9wcA== 67766
+IENvYXQ= 67767
+IEJlbmdoYXpp 67768
+SG9va3M= 67769
+IE1pc3NpbGU= 67770
+X1Jlc2V0 67771
+Pi88 67772
+ICItIgo= 67773
+KCk9PnsK 67774
+IEhvY2g= 67775
+LmF3YWl0 67776
+QWRyZXNzZQ== 67777
+IGRpZ2l0YWxseQ== 67778
+IlRoZXNl 67779
+b3BsZXZlbA== 67780
+IGFzeW5jaHJvbm91c2x5 67781
+IER1Y2tz 67782
+UkVTUA== 67783
+SVJP 67784
+LmZpeA== 67785
+IFJhZGFy 67786
+dmVydGlzZQ== 67787
+w61zZXM= 67788
+SXRlcmF0aW9ucw== 67789
+bW91c2V1cA== 67790
+bWludA== 67791
+RklSU1Q= 67792
+IHBheXBhbA== 67793
+X3VwZ3JhZGU= 67794
+V3JhcHBlZA== 67795
+Ow0NDQo= 67796
+K3M= 67797
+IGNhdGNoZXI= 67798
+Lk9w 67799
+X05PVElDRQ== 67800
+cGFyYWxsZWxlZA== 67801
+Q1ZF 67802
+Zm9yZ290 67803
+IHBhbm9y 67804
+IG9mZnJl 67805
+IGVub3JtZQ== 67806
+KCkNCg0KDQo= 67807
+YWRpYXRvcg== 67808
+YWRkQWxs 67809
+W3RleHQ= 67810
+KHV0aWw= 67811
+LlByb21pc2U= 67812
+YW5pc20= 67813
+X29mZmVy 67814
+RU5ESUY= 67815
+ZG90cw== 67816
+IEtybw== 67817
+IHNwZWxsZWQ= 67818
+IGFwcE5hbWU= 67819
+QWN0aXZpdGllcw== 67820
+IFNwaWNl 67821
+ZWF0ZWQ= 67822
+IHNrYg== 67823
+IGvDtno= 67824
+IHRvcmNodmlzaW9u 67825
+Q2l2aWw= 67826
+IGhvcw== 67827
+X0hlbHBlcg== 67828
+acSH 67829
+X3Vuc2lnbmVk 67830
+6K66 67831
+4oCcQW5k 67832
+CWtmcmVl 67833
+LnJhaXNl 67834
+IGNhbGxl 67835
+IExhbnM= 67836
+IGFudGln 67837
+XCI+IjsK 67838
+YnJhbmNoZXM= 67839
+bG9ncmFkb3Vybw== 67840
+IHN0YWxsZWQ= 67841
+YWx5emVk 67842
+RGVyaXZlZA== 67843
+Om5vdA== 67844
+IGdpYmk= 67845
+IFR1cm5idWxs 67846
+LnVzZXJEYXRh 67847
+KFRhYmxl 67848
+IERlcml2ZWQ= 67849
+CWNvbmY= 67850
+IGFsZ2Fl 67851
+IGthZmth 67852
+IG5ha25l 67853
+IEhlYXRpbmc= 67854
+IFRpcmU= 67855
+YWR1bHQ= 67856
+IERhdGVGb3JtYXQ= 67857
+b3Bj 67858
+ZW5zYWdlbQ== 67859
+LlRvb2xz 67860
+Lk1peGVkUmVhbGl0eQ== 67861
+cmFp 67862
+IFdvbmRlcmZ1bA== 67863
+KV0pCgo= 67864
+aWFyZA== 67865
+VGhlbWVQcm92aWRlcg== 67866
+IGV2ZW50RGF0YQ== 67867
+I2Fk 67868
+LmdldFVybA== 67869
+IHRvb2xib3g= 67870
+IG92ZXJyaWRpbmc= 67871
+Q09OVEVOVA== 67872
+LXByb2R1Y3Rz 67873
+d2lsZA== 67874
+X2V4cGFuZA== 67875
+aW5haXJl 67876
+QnJ1 67877
+b2xscw== 67878
+INGN0YLQvg== 67879
+Y3Rlc3Q= 67880
+IHB1bmNoaW5n 67881
+RFJW 67882
+X3NwYWNlcw== 67883
+IFN1cGVyaW50ZW5kZW50 67884
+IGxheXVp 67885
+KGZlZWQ= 67886
+dG9k 67887
+IHZo 67888
+IGluc3VsdHM= 67889
+IFN1Yw== 67890
+aWtz 67891
+VG9ycmVudA== 67892
+Lmty 67893
+X2FjdGl2YXRl 67894
+k5g= 67895
+amVl 67896
+aW1lcnM= 67897
+cnVpdHM= 67898
+IHByZWNpbmN0 67899
+LlJlcXVpcmVk 67900
+IHNhdGlzZmllcw== 67901
+IGNoZWVyaW5n 67902
+IGFycml2 67903
+CXJlYw== 67904
+IENvYmI= 67905
+IGNvbmN1c3Npb24= 67906
+dWpldA== 67907
+Tm90Rm91bmRFcnJvcg== 67908
+SmVhbg== 67909
+IHBob3Rvbg== 67910
+Pl8= 67911
+IEJhcmNs 67912
+YW1k 67913
+ICV9Cg== 67914
+PVwiIw== 67915
+SW50ZXJu 67916
+IENvbW1pdHRlZXM= 67917
+LmJlbA== 67918
+bnVtbWVy 67919
+IGxldml0cmE= 67920
+X3ZlcmJvc2U= 67921
+KGNvZGVj 67922
+IFN0aXRjaA== 67923
+PSIiOw0K 67924
+IHJlZ3JldHM= 67925
+IG11bHRpbmF0aW9uYWw= 67926
+IHJlc3RydWN0dXJpbmc= 67927
+IE1FTg== 67928
+eW5jaHJvbml6YXRpb24= 67929
+IG1lZGlhdG9y 67930
+a2ly 67931
+UHJpbmNl 67932
+IGluaGliaXQ= 67933
+IGdvc3Q= 67934
+IE1NQw== 67935
+IHNpZGVk 67936
+X2Rhcms= 67937
+KGJsb2I= 67938
+PkxvcmVt 67939
+PiIpOwoK 67940
+c2Nhbm5lcg== 67941
+OmlubGluZQ== 67942
+LmNhcm91c2Vs 67943
+b3RpZGU= 67944
+IFdXVw== 67945
+IGRydW1tZXI= 67946
+LmZhbWlseQ== 67947
+IG9yZGluYWw= 67948
+5b2T5YmN 67949
+IGRpcGxvbWF0 67950
+IHN1cHBsZW1lbnRhbA== 67951
+IGRhZsO8cg== 67952
+IEZBVA== 67953
+IFlvbmc= 67954
+aGFwdXM= 67955
+IEp1bmN0aW9u 67956
+emw= 67957
+LlVzZUZvbnQ= 67958
+IGhhc2hNYXA= 67959
+LVJl 67960
+ICIqKg== 67961
+LnNldEJhY2tncm91bmRSZXNvdXJjZQ== 67962
+IGltcGVyZmVjdA== 67963
+LkZpbmRFbGVtZW50 67964
+IExMUA== 67965
+IG11cmRlcmVy 67966
+IHRleHRl 67967
+aXPDqQ== 67968
+YWN0aWNz 67969
+VG95 67970
+R3JhbnQ= 67971
+X2Rpc2Nvbm5lY3Q= 67972
+IGJyYXNpbGU= 67973
+IGVtZXJnZW5jaWVz 67974
+X2x2bA== 67975
+IEAiXA== 67976
+fSovCgo= 67977
+X1NPQw== 67978
+Tk9STUFM 67979
+L2dhbGxlcnk= 67980
+YXNpY3M= 67981
+RXZlbnR1YWxseQ== 67982
+IGdyYXA= 67983
+IGNyaXN0 67984
+IHByb2plY3Rvcg== 67985
+IGdlb21ldA== 67986
+IGRldGVjdG9ycw== 67987
+IGNyaXRpY2l6aW5n 67988
+IGNoaWNrcw== 67989
+IEhpag== 67990
+L2ZyYW1l 67991
+LW1vbmV5 67992
+ImRlc2NyaXB0aW9u 67993
+IHRleHRpbmc= 67994
+IHNleGlzbQ== 67995
+IE1WQw== 67996
+LWdlbmVyYWw= 67997
+IG92ZXJ0dXJuZWQ= 67998
+IG1vdmVy 67999
+IFBocmFzZQ== 68000
+IFVOVVNFRA== 68001
+IEVudHJlcHJlbmV1cg== 68002
+VEVHUg== 68003
+ZWxsaXBzZQ== 68004
+TWFya2Rvd24= 68005
+X18oKg== 68006
+IEthcmRhc2hpYW4= 68007
+cHBlbGlu 68008
+IEdvdHQ= 68009
+IGR5c3Q= 68010
+IFJlZHV4 68011
+SG9sYQ== 68012
+PyEKCg== 68013
+IFJlYWx0eQ== 68014
+U3VydmV5 68015
+IE1jR3JlZ29y 68016
+X2hhbmRsZXM= 68017
+IGludHJpZ3VlZA== 68018
+IGdldFVybA== 68019
+IGRldmlzZWQ= 68020
+IFBheXBhbA== 68021
+IHRoaW5rZXJz 68022
+IFN0YXR1c0Jhcg== 68023
+IEVsaWc= 68024
+IGNvbXBsZXhlcw== 68025
+INC60L7QtA== 68026
+c3RvY2tz 68027
+LWluaXRpYWxpemVk 68028
+IHNjYW5kYWxz 68029
+IGNvbWZvcnRpbmc= 68030
+IFJvY2tz 68031
+IGxpb25z 68032
+bG9jYXRvcg== 68033
+IV0= 68034
+IFBvbnk= 68035
+RGF0dW0= 68036
+IEZldA== 68037
+IG9mZnNldFk= 68038
+IFJFVFVSTlM= 68039
+IGJyZWFjaGVz 68040
+VGltZUludGVydmFs 68041
+IHZpZWxlbg== 68042
+VmVyc2U= 68043
+IGthZA== 68044
+IGdhYXQ= 68045
+KCItIiw= 68046
+IG1vdXNlWQ== 68047
+KFBvc3Q= 68048
+IFVo 68049
+ZWxpZ2libGU= 68050
+YWx0YQ== 68051
+IHV0aWxpc2U= 68052
+ZmFjdHM= 68053
+SElQ 68054
+IG9yY2hlc3RyYQ== 68055
+IFNwYWNlcw== 68056
+aXNwaWVs 68057
+IG11bHRpcGFydA== 68058
+LW9wYWNpdHk= 68059
+U2VhcmNoaW5n 68060
+IFBsYXRv 68061
+VmlzaW9u 68062
+IGx1bA== 68063
+IEFwcHJlbnQ= 68064
+57uc 68065
+W3JhbmQ= 68066
+LWRpc2FibGVk 68067
+IEZsZXRjaGVy 68068
+IHRyYW5zcG9ydHM= 68069
+JmU= 68070
+dHBhcmFt 68071
+cG9sZQ== 68072
+IEJ1ZW5vcw== 68073
+w7pibGljYQ== 68074
+aW50ZXJhY3Rpb24= 68075
+IGhvYg== 68076
+IGluZmxpY3RlZA== 68077
+bGl0ZQ== 68078
+IFBBUkFNRVRFUlM= 68079
+IFN0YW0= 68080
+KG14 68081
+IEF1dG9NYXBwZXI= 68082
+aWxpYW4= 68083
+IHF1aXR0aW5n 68084
+PXt9 68085
+IEpvbmFz 68086
+IGxvY2FsaXR5 68087
+IFNpbGVuY2U= 68088
+X2ZsdXR0ZXI= 68089
+IG5icg== 68090
+bGl0ZXI= 68091
+IE5vcm1hbGl6ZQ== 68092
+IGFjdW0= 68093
+QnJhaW5z 68094
+ZXF1aXA= 68095
+XT09Ig== 68096
+IGRlc3Rpbm8= 68097
+IERpb3M= 68098
+Lk11bHRpbGluZQ== 68099
+YWdyZWU= 68100
+KQoKCgoKCgoK 68101
+IHN0ZWxsZW4= 68102
+IGN1cmx5 68103
+Lk9mZmljZQ== 68104
+LWFib3V0 68105
+ICcuLy4uLy4uLw== 68106
+IFVUSUw= 68107
+IFJw 68108
+4oC6 68109
+IG1hcGE= 68110
+LkRP 68111
+YWdhbA== 68112
+LndpbmRvd3M= 68113
+IGFkdmVyc2VseQ== 68114
+Llh0cmFMYXlvdXQ= 68115
+bWVkaWNhbA== 68116
+IHVuc3Vy 68117
+dGhlcm1hbA== 68118
+Lk1vZGVsQWRtaW4= 68119
+LmFjdHVhbA== 68120
+c2V0Q29udGVudA== 68121
+IHBvc3RmaXg= 68122
+UFc= 68123
+IENoYWlycw== 68124
+IGdyYW1t 68125
+IGNvbXBsaWM= 68126
+RElTUExBWQ== 68127
+IE1vb3Nl 68128
+aGFhcg== 68129
+QUxFUw== 68130
+IGxkYQ== 68131
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqCg== 68132
+ICcvJwo= 68133
+QVNO 68134
+IEJhcmJlcg== 68135
+IG1haW5z 68136
+IG1haW5XaW5kb3c= 68137
+0LDQt9Cy0LDQvdC40LU= 68138
+IGVtYW4= 68139
+X2NvbGxlY3Q= 68140
+IHJlbXBs 68141
+LnRheA== 68142
+YmFo 68143
+IFBzeWNoaWF0cnk= 68144
+RGVzY3JpcHRpb25z 68145
+IGV4ZWN1dGlvbnM= 68146
+CUxPR0dFUg== 68147
+JkU= 68148
+OmJn 68149
+IGtk 68150
+LmRhbWFnZQ== 68151
+IG5pc2k= 68152
+5qy+ 68153
+IENhbWVs 68154
+aW5pZGFk 68155
+IExpZmVzdHlsZQ== 68156
+IFRISVJE 68157
+IOCkuA== 68158
+IHBvbHlnb25z 68159
+IGF0dGlyZQ== 68160
+YWxlbnQ= 68161
+X1VTQVJU 68162
+IG1hbGFyaWE= 68163
+bG9icw== 68164
+IF19Cg== 68165
+KHJlZ2lzdGVy 68166
+LXBz 68167
+X29wdGltaXplcg== 68168
+KEFMT0FE 68169
+IHZhcGU= 68170
+LnNvY2s= 68171
+kOiXjw== 68172
+JHByb2R1Y3Q= 68173
+KEVSUg== 68174
+Y2twdA== 68175
+YnVxdWVycXVl 68176
+IH19Ij57ew== 68177
+IEhpdmU= 68178
+IE1hc2g= 68179
+IEVwaWQ= 68180
+IEx1bmQ= 68181
+X3RyYW5zYWN0aW9ucw== 68182
+IHN1YmNsYXNzZXM= 68183
+RWFzZQ== 68184
+X0Nsb3Nl 68185
+X2NoZWNrb3V0 68186
+IicsCg== 68187
+U2VjdG9y 68188
+b2lzZQ== 68189
+LXRlbXA= 68190
+KSIp 68191
+aHlwZXI= 68192
+ZXJjdWw= 68193
+c3RhY2twYXRo 68194
+X05S 68195
+SUxMRQ== 68196
+IHJlbGFjacOzbg== 68197
+IE1hdHRo 68198
+X0NPREVD 68199
+IGhhbmRsZUVycm9y 68200
+X09uZQ== 68201
+YWxib3Jn 68202
+CQkgICAgICAgICA= 68203
+IFVwbG9hZGVk 68204
+Tm0= 68205
+Ly89 68206
+KlM= 68207
+X0VYUEVDVA== 68208
+IGZyYWN0aW9uYWw= 68209
+Q291 68210
+IHNjYWxhYmxl 68211
+IENJRA== 68212
+PFBvc3Q= 68213
+CXRocmVhZA== 68214
+aGFyZHdhcmU= 68215
+LmNoYW5nZWQ= 68216
+LkVsZW1lbnRBdA== 68217
+IGFydGljdWxhdGU= 68218
+ZWRvcmVz 68219
+RXN0YWJsaXNo 68220
+PXtbCg== 68221
+ISo= 68222
+IFNK 68223
+TWV0ZXI= 68224
+LnJlcA== 68225
+IFZPTA== 68226
+IE91 68227
+bMOp 68228
+IHBuZXVtb25pYQ== 68229
+X3BpY2tlcg== 68230
+ZXhwbG8= 68231
+IOyekQ== 68232
+IFN3aW0= 68233
+ZHJlc3M= 68234
+c3Rvcmllcw== 68235
+L25hdg== 68236
+VmE= 68237
+INit 68238
+L3NlbGY= 68239
+IHZldGVyaW5hcnk= 68240
+KERlbnNl 68241
+CWJvb3N0 68242
+IElzTm90 68243
+IHRydXN0aW5n 68244
+IExlYmFuZXNl 68245
+JHJlcXVlc3Q= 68246
+eGZmZmZmZg== 68247
+X3JlbW92ZWQ= 68248
+IHVwZGF0ZXI= 68249
+2KfY 68250
+RE9XTkxPQUQ= 68251
+IEltbWVkaWF0ZWx5 68252
+IHJvYW1pbmc= 68253
+IEhvcm55 68254
+LmNvZGlnbw== 68255
+IEZpZ3VyZXM= 68256
+IHBhbnRyeQ== 68257
+KHNhbXBsZXM= 68258
+IEJFTA== 68259
+IHNldENvbnRlbnQ= 68260
+dW1vcg== 68261
+5pSv5LuY 68262
+X01JTlVT 68263
+IHVubGVhc2hlZA== 68264
+IHByb2ZpY2llbnQ= 68265
+CVVJ 68266
+LkV4Y2VwdGlvbnM= 68267
+IHNyYW5k 68268
+UHJlc3N1cmU= 68269
+LmFzc2VydE5vdA== 68270
+KHNlcmlhbGl6ZXI= 68271
+CXR4dA== 68272
+UG9ydHM= 68273
+IG5lY2VzYXJpbw== 68274
+IHJldml2ZWQ= 68275
+IG1pbGVzdG9uZXM= 68276
+Y2Fubw== 68277
+RXNjb3J0 68278
+IGVudGVuZA== 68279
+QVBF 68280
+aXBj 68281
+LmF0b21pYw== 68282
+IFBlbWI= 68283
+IHJlYWNoYWJsZQ== 68284
+IGthbnM= 68285
+d2hhdGV2ZXI= 68286
+TGlzdEJveA== 68287
+IENseQ== 68288
+cGljdHVyZWQ= 68289
+IEVsZWN0cm8= 68290
+YWJpYw== 68291
+IGZ1bms= 68292
+IGRpYXJyaGVh 68293
+IOeZ 68294
+IFNvbHZlcg== 68295
+IEJhYw== 68296
+IHNrZWxldGFs 68297
+IO+C 68298
+IEZpbGVOb3RGb3VuZEV4Y2VwdGlvbg== 68299
+ICIpWw== 68300
+IFRyYWl0 68301
+dWRva3U= 68302
+LS0tLS0tLS0tLQoK 68303
+QW5nZWw= 68304
+YWdy 68305
+IHNpbXBsZXM= 68306
+IGJhbmM= 68307
+IEFsZXJ0cw== 68308
+IENvbmZpcm1hdGlvbg== 68309
+IEFseQ== 68310
+Y2FsbGJhY2tz 68311
+IGZ1bmt0aW9u 68312
+IGdyYWZ0 68313
+WVBE 68314
+L0FGUA== 68315
+V0s= 68316
+a3Vy 68317
+Q0tFVA== 68318
+IFNsYXRl 68319
+IFN0ZWY= 68320
+CVJ1bnRpbWU= 68321
+IEVTTA== 68322
+IHByZWFjaGluZw== 68323
+QnJvYWQ= 68324
+IHNldERlc2NyaXB0aW9u 68325
+YXplbA== 68326
+PQoK 68327
+IGphY2twb3Q= 68328
+IC8vIQo= 68329
+dmlhcg== 68330
+IGVpZA== 68331
+IGF0aXY= 68332
+IHJlZmxleGl2aXR5 68333
+Lkxpc3Rlbg== 68334
+IGx5cmlj 68335
+IHZlcms= 68336
+IGNvbGx1c2lvbg== 68337
+YXphYXI= 68338
+IHdpbms= 68339
+IE11ZA== 68340
+L29wZXJhdG9y 68341
+IGV4dGVybmFsbHk= 68342
+IGJhcnU= 68343
+IGJhc2tldHM= 68344
+dGlja2Vy 68345
+KHBob3Rv 68346
+X2V2ZW4= 68347
+IHNwb25nZQ== 68348
+IGhlaWdodEZvcg== 68349
+Z2V0Q2hpbGQ= 68350
+X2Zvcm1hdHM= 68351
+LkV4ZWN1dGlvbg== 68352
+X1Byb3BlcnR5 68353
+cmVwb3M= 68354
+dGhlaWQ= 68355
+X1BIWVM= 68356
+IGV2aWRlbmNlZA== 68357
+LmhlYWRpbmc= 68358
+QW5ndWxhcg== 68359
+IFZlbnVl 68360
+IEhPVVNF 68361
+IEVzdG9uaWE= 68362
+0LzQsA== 68363
+cmdhbml6YXRpb24= 68364
+L2RldmljZQ== 68365
+SVJS 68366
+X3RoZW4= 68367
+YXJlbQ== 68368
+IGFnZ2k= 68369
+RU1PTg== 68370
+INGB0Lo= 68371
+IEVwaA== 68372
+IE1TUA== 68373
+IGxvZ2ZpbGU= 68374
+LWxlYWRpbmc= 68375
+YXRoYW0= 68376
+IHVubWF0Y2hlZA== 68377
+IFNpdHVhdGlvbg== 68378
+KCl7fQo= 68379
+CWNoYW5nZQ== 68380
+IENoYXB0ZXJz 68381
+LlJFU1VMVA== 68382
+IG9l 68383
+RVRZ 68384
+X3ZpZA== 68385
+Li4uJyw= 68386
+IGFsdGVybmF0aXZlbHk= 68387
+X1dT 68388
+IFBsZW50eQ== 68389
+IENyYXRl 68390
+YXNpb25hbGx5 68391
+IExhd24= 68392
+IElNTQ== 68393
+IFZhbml0eQ== 68394
+IFZvb3I= 68395
+5ZCv 68396
+IG1pag== 68397
+c3RlcnJlaWNo 68398
+IFJERg== 68399
+IENyaXRlcmlvbg== 68400
+Lkludg== 68401
+LlN0ZXA= 68402
+X0ZyYW1l 68403
+IEVOVU0= 68404
+774= 68405
+SG9wZWZ1bGx5 68406
+TmF2Q29udHJvbGxlcg== 68407
+IOy2lOqwgA== 68408
+IFZhZGVy 68409
+IHJ1dGhsZXNz 68410
+JGtleQ== 68411
+Y2t0 68412
+aW5lbQ== 68413
+aWxlbnQ= 68414
+IHJlc3BlY3Rpbmc= 68415
+bGNk 68416
+KGJ0 68417
+IEVsbGlvdA== 68418
+IFVuaWRvcw== 68419
+KENoYW5uZWw= 68420
+IGVpdXM= 68421
+IGFzdHJvbmF1dHM= 68422
+IEhvc3Rpbmc= 68423
+IGNhc3Rl 68424
+IGhhcm1lZA== 68425
+b3VwbGVz 68426
+PFJvbGU= 68427
+LkRlc2M= 68428
+LWNvdXJzZQ== 68429
+IENhcnRvb24= 68430
+aWxlZ2Vk 68431
+IG15c3RpY2Fs 68432
+IOex 68433
+KGZpZWxkTmFtZQ== 68434
+V0lUSE9VVA== 68435
+LHN1bQ== 68436
+J2FjYw== 68437
+CXJvd3M= 68438
+IGdldFBhc3N3b3Jk 68439
+IGNvY2tz 68440
+cGl2b3Q= 68441
+bmFtZW9m 68442
+IGZlYXNpYmlsaXR5 68443
+IGNvbW1lbmNlbWVudA== 68444
+IERvbWU= 68445
+LkpTT05FeGNlcHRpb24= 68446
+IEh5ZGVyYWJhZA== 68447
+IExpc3RlZA== 68448
+IENvbXB1dGVycw== 68449
+W3ZhbA== 68450
+IGlzb3Q= 68451
+CXdpbg== 68452
+IG5laA== 68453
+KElOVA== 68454
+UmVwdWJsaWNhbg== 68455
+INC/0YDQvtCy0LXRgA== 68456
+RmF0 68457
+IGVxdWl2 68458
+IERhdHVt 68459
+YXN0aQ== 68460
+IHNvaWxz 68461
+dXB1bmN0dXJl 68462
+cHJlc3NpdmU= 68463
+XykpOwo= 68464
+Lldhcm4= 68465
+IGhhcmI= 68466
+Lm9uT3B0aW9uc0l0ZW1TZWxlY3RlZA== 68467
+IGNsb3du 68468
+IE9XTg== 68469
+IGV4YW1pbmF0aW9ucw== 68470
+IEV4aXN0aW5n 68471
+am91cmQ= 68472
+IGNvbmNlc3Npb24= 68473
+IEZpcmViYXNlRGF0YWJhc2U= 68474
+IHVwdGFrZQ== 68475
+IGVubGlzdGVk 68476
+IENhcmI= 68477
+IGZ1cw== 68478
+IGFidXNpbmc= 68479
+LnByb2R1Y3Rpb24= 68480
+eW5jaA== 68481
+aWx5bg== 68482
+cmVmdW5k 68483
+LWhhdmU= 68484
+KGFyZ3VtZW50 68485
+IGZzY2FuZg== 68486
+Y29uY2VwdA== 68487
+X0xBTkU= 68488
+IGVuZ2FnZXM= 68489
+IEV4YWN0bHk= 68490
+YWx0dXJh 68491
+KEFkZHJlc3M= 68492
+IHN5bm9ueW1vdXM= 68493
+VG93bg== 68494
+IFBheW5l 68495
+cm9pdA== 68496
+cGVyaWVuY2Vz 68497
+cGFydGljbGVz 68498
+X2Jk 68499
+IEdyaW5kZXI= 68500
+TWFuYWdlZE9iamVjdENvbnRleHQ= 68501
+KGJi 68502
+W3RtcA== 68503
+LWNvbnM= 68504
+YW9rZQ== 68505
+IHN0ZXdhcmQ= 68506
+IFZpZXdDaGlsZA== 68507
+LmRyYXdMaW5l 68508
+IFdBUk4= 68509
+IHB1ZXM= 68510
+bW9kYXRpb24= 68511
+IHpz 68512
+QWdyZWdhcg== 68513
+ICIuIiw= 68514
+LmNlbnRlclk= 68515
+IGZsYXdsZXNz 68516
+IGRldXRzY2hl 68517
+IExpcXU= 68518
+aXRlaXQ= 68519
+X2ludHJv 68520
+LXVzZWQ= 68521
+LHRhcmdldA== 68522
+IEhERA== 68523
+ICUr 68524
+b3JlbnQ= 68525
+L09iamVjdA== 68526
+IGRpc3J1cHRlZA== 68527
+w6J0ZQ== 68528
+IGFjY2Vzbw== 68529
+IExvd2VzdA== 68530
+IFdpbGxpYW1zb24= 68531
+X2NyZWF0b3I= 68532
+U2VsbA== 68533
+IEJVRw== 68534
+X3JlcHI= 68535
+6ICM 68536
+IGFyY2hhZW9sb2dpY2Fs 68537
+b21lcnM= 68538
+IEVsb24= 68539
+IFNjcm9sbFZpZXc= 68540
+IGxpbmVzdHlsZQ== 68541
+aXNSZXF1aXJlZA== 68542
+aXNrbw== 68543
+X3Ji 68544
+ZsO8aA== 68545
+ICAgCQk= 68546
+KGRlZmluZQ== 68547
+IFNDTQ== 68548
+IERJRkY= 68549
+X2Jz 68550
+cGVuZGljdWxhcg== 68551
+cGFjZWQ= 68552
+IEpvdXJuYWxpc20= 68553
+LkpTT05BcnJheQ== 68554
+IERhdGFBY2Nlc3M= 68555
+TWFyaWE= 68556
+IELDvA== 68557
+SEVMTA== 68558
+IE1BVFJJWA== 68559
+T0xUSVA= 68560
+YXBzaWJsZQ== 68561
+XToKCg== 68562
+bmFpcmVz 68563
+X2hpc3RvZ3JhbQ== 68564
+IGZsYWly 68565
+aGF2aW5n 68566
+IFVzZXJJRA== 68567
+IFJlbGF0aW9uc2hpcHM= 68568
+UmVwbGFjZW1lbnQ= 68569
+IHJzYQ== 68570
+IGVucmljaGVk 68571
+IHJlaGVhcnM= 68572
+IHfDpHJl 68573
+IGxvYWRlcnM= 68574
+IEVsZW5h 68575
+IFdhdGNoaW5n 68576
+CWpvYg== 68577
+TkVXUw== 68578
+L3NldHRpbmdzZGlhbG9n 68579
+aXZlYw== 68580
+X0VRVUFMUw== 68581
+VGVtcGxhdGVOYW1l 68582
+IEJPRFk= 68583
+LmFkYXB0ZXJz 68584
+d29mZg== 68585
+Y29tYm9Cb3g= 68586
+Lk5ld1JlYWRlcg== 68587
+fHJlcXVpcmVk 68588
+X3Byb2JhYmlsaXR5 68589
+ICg6Og== 68590
+IGNyYXo= 68591
+IFVG 68592
+VGVzdElk 68593
+IGVzcGVjaWZpYw== 68594
+aWJlbA== 68595
+cGF3bg== 68596
+640= 68597
+IE1hcnI= 68598
+IHN0YXJ0WA== 68599
+X3NpdGVz 68600
+Lz4KCg== 68601
+IGltcGxpY2F0ZWQ= 68602
+KGlubmVy 68603
+IGVmZm9ydGxlc3NseQ== 68604
+wq10aW9u 68605
+YXdhcmQ= 68606
+IGhvdmVyaW5n 68607
+cHJp 68608
+JHRlbXBsYXRl 68609
+dWFuZw== 68610
+IGF1dG9tYXRl 68611
+ICoqLwoK 68612
+aWJsaQ== 68613
+IG51dHJpdA== 68614
+KS4o 68615
+ZWVlZQ== 68616
+QXBpQ29udHJvbGxlcg== 68617
+L293bA== 68618
+IFdvbWVucw== 68619
+LWRvdWJsZQ== 68620
+IE9yZGVyaW5n 68621
+c3Bt 68622
+TW9kZXI= 68623
+Lk5hdGl2ZQ== 68624
+IEJlcmdlcg== 68625
+ZXNkYQ== 68626
+ZXJkaW5ncw== 68627
+X2VjaG8= 68628
+IHN1bW1hcml6ZWQ= 68629
+IGVsZXZhdGU= 68630
+X3F1YWQ= 68631
+IHdvbw== 68632
+dWxhbnQ= 68633
+UHJvcGVydHlWYWx1ZQ== 68634
+IHBsaXN0 68635
+IEdSQVBI 68636
+IFNUREVSUg== 68637
+KScpLg== 68638
+QXNzZXJ0aW9u 68639
+bGlua3BsYWlu 68640
+IGFjY2VsZXJhdGluZw== 68641
+IHNuaXBwZXRz 68642
+IFNhbG1hbg== 68643
+YWJjZA== 68644
+LmVjaG8= 68645
+X2lkeHM= 68646
+IHBjbQ== 68647
+b2NhbHlwdGlj 68648
+X2Nvb3JkaW5hdGU= 68649
+KHByZXZpb3Vz 68650
+LXNob3J0 68651
+LnN1YnRyYWN0 68652
+KEJpdA== 68653
+P3Q= 68654
+IE5vdGVib29r 68655
+IEthdHJpbmE= 68656
+aWZmZXJlbnRpYWw= 68657
+c2lsZW50 68658
+dGVybWluYXRlZA== 68659
+IHRhbmdlbnQ= 68660
+OlQ= 68661
+IGNvc8Os 68662
+IHBhcmFub2lk 68663
+IGRlcHJpdmF0aW9u 68664
+L3t7JA== 68665
+IGhlbWlzcGhlcmU= 68666
+IHJlaW5zdA== 68667
+ZWN6 68668
+dGVycg== 68669
+IFBMQVRGT1JN 68670
+IHRyb3VibGVzaG9vdGluZw== 68671
+IHZhbGlkYXRpbmc= 68672
+IE9yaW9u 68673
+YXN1cmluZw== 68674
+0LjQvdCw 68675
+IGh1YnM= 68676
+YXJlbmNl 68677
+IENoYWxsZW5nZXM= 68678
+IHplYWw= 68679
+U3Bv 68680
+IFNjcmVlbnM= 68681
+IG11bmRhbmU= 68682
+IER1bms= 68683
+ICMjIyMj 68684
+IFJFRkVS 68685
+b25ldA== 68686
+LmNhc2U= 68687
+LXBvc2l0aXZl 68688
+SU5URUdFUg== 68689
+Lm1ldHJvTGFiZWw= 68690
+U0FO 68691
+IHByb2Zlc3Npb25z 68692
+IHR5cmVz 68693
+UGFsaW5kcm9tZQ== 68694
+IFNFQ09ORA== 68695
+LkdSRUVO 68696
+IFNuYXBzaG90 68697
+VUxL 68698
+X2NpZA== 68699
+JEk= 68700
+IGN1bnQ= 68701
+ZXN0cnVjdGlvbg== 68702
+UHN5Y2g= 68703
+IEh0dHBSZXNwb25zZU1lc3NhZ2U= 68704
+ZW1iYWxp 68705
+X3Jldmlld3M= 68706
+U2VsZWN0YWJsZQ== 68707
+X1BSRVNFTlQ= 68708
+IEpzb25SZXF1ZXN0 68709
+IFRoZXRh 68710
+X2ludGVycA== 68711
+UmFzdGVy 68712
+I2Vycm9y 68713
+LG9iag== 68714
+IHR3ZWV0aW5n 68715
+X0dQVQ== 68716
+X3RvZGF5 68717
+X3NlY3M= 68718
+bmVlcw== 68719
+LmdldFN5c3RlbVNlcnZpY2U= 68720
+IHZub2Rl 68721
+IFJlZ3VsYXRvcnk= 68722
+IEZhaHJlbmhlaXQ= 68723
+IHNjYWxlcg== 68724
+X21hcmtldA== 68725
+LmFsbG9jYXRl 68726
+dGlja2V0cw== 68727
+YXRhaw== 68728
+IFBpa2U= 68729
+IExvcg== 68730
+ZGl0b3I= 68731
+IGxvY2F0aW9uTWFuYWdlcg== 68732
+IGluaXREYXRh 68733
+IFdhcmU= 68734
+IEluY2lkZW50 68735
+IGNvbW1lbnRhdG9y 68736
+dWVudGVz 68737
+IEluZmxhdGU= 68738
+IOWG 68739
+IGFjdGl2aWRhZA== 68740
+IEJq 68741
+RU5VTQ== 68742
+IHJldXNlZA== 68743
+INC80LXQvQ== 68744
+IHNlc2nDs24= 68745
+LicpKTsK 68746
+44GT44KT 68747
+L2dl 68748
+YWdhaW5zdA== 68749
+LGxpbmU= 68750
+KFVubWFuYWdlZFR5cGU= 68751
+KT0i 68752
+IHl0 68753
+dWRpYW50ZXM= 68754
+cm9sbGFibGU= 68755
+5aGr 68756
+X0NPTExFQ1RJT04= 68757
+b2xpcw== 68758
+dW1iZXJsYW5k 68759
+KCIiIgo= 68760
+IHppcHBlcg== 68761
+DAo= 68762
+L3NpZ251cA== 68763
+IHN0cmFuZHM= 68764
+cmF4 68765
+LmNvbnN1bWVy 68766
+IHVuY2VydGFpbnRpZXM= 68767
+RGVidWdFbmFibGVk 68768
+IGRlZmVhdHM= 68769
+IGRydg== 68770
+IHJlYWxpc20= 68771
+YWdyYW1z 68772
+WEU= 68773
+IEhhemFyZA== 68774
+LW5lZWRlZA== 68775
+KHRhYmxlVmlldw== 68776
+LkVsZW1lbnRz 68777
+IFNBUg== 68778
+CWVsZW0= 68779
+KHBrZw== 68780
+U2ltb24= 68781
+VGludENvbG9y 68782
+IFBoZW4= 68783
+X0VNUA== 68784
+2Iw= 68785
+Pz4KCgo= 68786
+X2F0dHJpYg== 68787
+IGJveFNoYWRvdw== 68788
+IENHQWZmaW5lVHJhbnNmb3Jt 68789
+IENhbmJlcnJh 68790
+IHN0YXJ0UG9z 68791
+IFJhaw== 68792
+CWNlcnI= 68793
+IFRhbnphbmlh 68794
+dW9uZw== 68795
+Y2Fm 68796
+LmJhc2ljQ29uZmln 68797
+b2lucw== 68798
+Q29udGFpbmVk 68799
+PXNldA== 68800
+X2dpdA== 68801
+CXBhY2tldA== 68802
+IGNvZg== 68803
+KFRS 68804
+5qC85byP 68805
+KHt9KQo= 68806
+IGRpcmVjY2lvbg== 68807
+IHBsYXlsaXN0cw== 68808
+IGFmZmluZQ== 68809
+LnNldFNlbGVjdGlvbg== 68810
+IGFtbW9u 68811
+IGNvbnF1ZXJlZA== 68812
+IFJhbW9z 68813
+IFBTUA== 68814
+PXN1bQ== 68815
+IGNvcnJlbGF0aW9ucw== 68816
+IHJvYWRtYXA= 68817
+IGV4dGluY3Q= 68818
+IGFkdmlzYWJsZQ== 68819
+IGJvbWJlcnM= 68820
+IFVJUmVzcG9uZGVy 68821
+X0JQ 68822
+INCx0YPQtNC10YI= 68823
+IFByZW1pZXJl 68824
+IFJV 68825
+dHJhc2g= 68826
+KGNsanM= 68827
+Z251 68828
+LlBhZ2Vz 68829
+IGluc3BlY3RvcnM= 68830
+TWV4aWNv 68831
+IFZlcmU= 68832
+UHJlYw== 68833
+IFNjYWw= 68834
+aXNwZXJz 68835
+UnVubmFibGU= 68836
+Lm9yaWc= 68837
+IHNhaWxvcnM= 68838
+UGFyc2luZw== 68839
+IFZpc2l0b3Jz 68840
+JnR5cGU= 68841
+cG9wb3Zlcg== 68842
+PCgpLA== 68843
+IG93ZXM= 68844
+IHJlYWN0cw== 68845
+IERlZmluZWQ= 68846
+IHJlYWxtZW50ZQ== 68847
+IGRpY3RhdG9yc2hpcA== 68848
+YWRtaW5pc3Ry 68849
+aWRlbmQ= 68850
+PUw= 68851
+c3RyY2FzZWNtcA== 68852
+XSU= 68853
+0L7Qs9GA0LDQvA== 68854
+ZWR1bGE= 68855
+LWRlc2lnbmVk 68856
+Q09WRVI= 68857
+X0NoYW5uZWw= 68858
+IHByb2pldG8= 68859
+eW1vb24= 68860
+Q0hLRVJSUQ== 68861
+6YeK 68862
+IHZlcmlmeWluZw== 68863
+L2tleQ== 68864
+LmZyb21DaGFyQ29kZQ== 68865
+LkJpdA== 68866
+X2J1ZGdldA== 68867
+ICUi 68868
+dmV5b3I= 68869
+IHl1bQ== 68870
+IGV4dHJlbWVz 68871
+X0NSRQ== 68872
+Z2V0U3RhdHVz 68873
+c3Vic2VjdGlvbg== 68874
+IHNvYWtlZA== 68875
+IGdlbmF1 68876
+X0NIQVJBQ1RFUg== 68877
+5oyB 68878
+LW9ubGluZQ== 68879
+LnRvQ2hhckFycmF5 68880
+Y2VyZXI= 68881
+Il0sIg== 68882
+IHN0cm9sbA== 68883
+IFl1YW4= 68884
+IFdhbmRlcg== 68885
+IHNpc3RlbQ== 68886
+X3Vj 68887
+KG5vbWJyZQ== 68888
+Y2hhbnRtZW50 68889
+KGNsb3Nl 68890
+bWV0aA== 68891
+LXNlY3JldA== 68892
+cHNldWRv 68893
+Q291bnR5 68894
+Q09OVFJPTA== 68895
+IHNvbHZlbnQ= 68896
+IHNvYXJpbmc= 68897
+IHNwaWVz 68898
+TmF2SXRlbQ== 68899
+IHJlc2VtYmxhbmNl 68900
+KGJpdHM= 68901
+IGNlbGx1bA== 68902
+IGFzc29jaWF0aXZl 68903
+Lmltd3JpdGU= 68904
+LmNvb3JkaW5hdGU= 68905
+XSwk 68906
+KHNr 68907
+Ki8p 68908
+IG1vY2tz 68909
+IGp1bmc= 68910
+X0RPQw== 68911
+LXJ1bnRpbWU= 68912
+IEdpdmVz 68913
+dW5q 68914
+KHNlZw== 68915
+KFtc 68916
+IG5haA== 68917
+X2V4cGVjdA== 68918
+Um93SW5kZXg= 68919
+KGZvcmNl 68920
+IEdldFZhbHVl 68921
+IHN1bW1hcmllcw== 68922
+X1NIQVJF 68923
+LXRyYWluZWQ= 68924
+IEJsYW5j 68925
+IGZpdHRpbmdz 68926
+IHdhdGVyZnJvbnQ= 68927
+Lk5vdGU= 68928
+IFdhbmQ= 68929
+b3ZlcmU= 68930
+cHJlZGljdGlvbg== 68931
+IGNzcg== 68932
+LnRvcEFuY2hvcg== 68933
+IFN0cm9rZQ== 68934
+X0ZpbHRlcg== 68935
+YXRoZQ== 68936
+ICJcXCI= 68937
+IEFGRg== 68938
+PSIvIj4= 68939
+LlJlcXVlc3RNZXRob2Q= 68940
+kJzntKI= 68941
+IHdpdG5lc3Npbmc= 68942
+QXBwYXJlbnRseQ== 68943
+IG1kaQ== 68944
+c3RpY2tz 68945
+IEFsdg== 68946
+w6TDnw== 68947
+X2NvbnRpbg== 68948
+IGJvaWxlcnM= 68949
+IE1hcnhpc3Q= 68950
+SU9D 68951
+bmVybw== 68952
+aW5uYWNsZQ== 68953
+TGl0 68954
+Y2Vj 68955
+S2V5UHJlc3M= 68956
+R2V0RGF0YQ== 68957
+IGlzbnQ= 68958
+0YDQvtCy0LXRgA== 68959
+IHFyeQ== 68960
+Um9vdEVsZW1lbnQ= 68961
+IE5TQ29kZXI= 68962
+LmdldE51bQ== 68963
+IHRocmVlc29tZQ== 68964
+VXNlcw== 68965
+LiJf 68966
+IENvbnRpbnVvdXM= 68967
+IHBvcHVsaXN0 68968
+IFBzeWNob2xvZ2ljYWw= 68969
+X2N5Y2xlcw== 68970
+IGlmZGVm 68971
+aXBoZXJhbHM= 68972
+CSAgICAgICAgICA= 68973
+IGFkdmlzZXM= 68974
+IENvbXBhbmlvbg== 68975
+dHJpZ2h0 68976
+IGdyb3dlcnM= 68977
+IFNPQ0tFVA== 68978
+eW1jZQ== 68979
+UlNT 68980
+bWVtYmVyT2Y= 68981
+VG91Y2hhYmxl 68982
+X2FycmF5cw== 68983
+IGp1bXBlcg== 68984
+IGhlcnBlcw== 68985
+IFRpdHM= 68986
+IFRlbGVmb24= 68987
+X1BBTkVM 68988
+dWdlbg== 68989
+5YyX5Lqs 68990
+LlNpdGU= 68991
+X3VucmVnaXN0ZXI= 68992
+X2Nocg== 68993
+LnRm 68994
+LWh1bWFu 68995
+IGFzb2Np 68996
+IHF1ZWVucw== 68997
+QW50aG9ueQ== 68998
+IHN0cmluZ2VudA== 68999
+IG1vbGVzdA== 69000
+c2V0SWNvbg== 69001
+SEVFTA== 69002
+SEVMUA== 69003
+RERT 69004
+LmNtcw== 69005
+SVNUUklCVVQ= 69006
+Y2llcw== 69007
+LmZvckNoaWxk 69008
+LmNoaw== 69009
+IE90dG9tYW4= 69010
+IFRQUA== 69011
+IG1pbw== 69012
+IEJ1Zg== 69013
+Ym9h 69014
+VmVyc2lvbnM= 69015
+KGxvY2FsZQ== 69016
+IFJhaWxyb2Fk 69017
+YmNj 69018
+LyoqPA== 69019
+LXBhaWQ= 69020
+IGNlbGVyeQ== 69021
+YXRpc2NoZQ== 69022
+Z2V0T3B0aW9u 69023
+b3Jpb3VzbHk= 69024
+IGFkYXB0ZXJz 69025
+U3RvcmVz 69026
+L3NhdmU= 69027
+IEJhc2lz 69028
+0Y7Rgg== 69029
+IExhZA== 69030
+X3JlbGF0aW9uc2hpcA== 69031
+IENsdWJz 69032
+IOCo 69033
+OiI8PA== 69034
+X01JU0M= 69035
+VmlzdWFsaXphdGlvbg== 69036
+IG1pcnJvcmVk 69037
+ZXNwZXI= 69038
+U3RyTG4= 69039
+IHJlc3BvbnNlT2JqZWN0 69040
+5ZCR 69041
+LmVuY29kZXI= 69042
+LS0tLS0tLS0tCgo= 69043
+IGdyaWRWaWV3 69044
+X2luZGVudA== 69045
+YW50d29ydA== 69046
+IGFycml2YWxz 69047
+IFNldHRsZW1lbnQ= 69048
+Vmlld0luaXQ= 69049
+LXZhbHVlcw== 69050
+IHdhdGVyZmFsbA== 69051
+IGluY2FyY2VyYXRpb24= 69052
+IFRlZW5z 69053
+CXNpZ24= 69054
+aW1tdW5l 69055
+LnNlY29uZGFyeQ== 69056
+IHZpZGVvZXI= 69057
+IOi+k+WFpQ== 69058
+IGludGltaWRhdGlvbg== 69059
+ZW5kYWxl 69060
+IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMj 69061
+IGluc2lnaHRmdWw= 69062
+IHNhbmRz 69063
+IHBob3RvZ3JhcGhpYw== 69064
+UGFnaW5hdG9y 69065
+IGRpc2NpcGxpbmVk 69066
+X1RMUw== 69067
+XSkpLA== 69068
+cmxlbg== 69069
+PGNlbnRlcg== 69070
+X1BDTQ== 69071
+S2VsbHk= 69072
+LWJpbGxpb24= 69073
+LmN4 69074
+IGpldXg= 69075
+IGZpbGVMaXN0 69076
+IFFEaWFsb2c= 69077
+dHJhY3RpdmU= 69078
+RHQ= 69079
+IGVzdHJvZ2Vu 69080
+IHN0YXJjaA== 69081
+X2VtaXQ= 69082
+INC30LDQv9GA0L7RgQ== 69083
+IFF1YXJ0 69084
+IGluYWR2ZXJ0ZW50bHk= 69085
+IHRyb25n 69086
+c2hpcG1lbnQ= 69087
+IE5PUg== 69088
+IFNjcmVlbmluZw== 69089
+IERpc2Nvbm5lY3Q= 69090
+bWVubw== 69091
+IFdvcnN0 69092
+IE5y 69093
+e2s= 69094
+c3Bs 69095
+X2N0cg== 69096
+LnNvcnRlZA== 69097
+LXBsYWNlaG9sZGVy 69098
+KCk7Ig== 69099
+aHVyc3Q= 69100
+LWhpdA== 69101
+LnNvbHZl 69102
+566X 69103
+IHVuZGVhZA== 69104
+IHdoaW1z 69105
+IGdldERlZmF1bHQ= 69106
+IE5pa2tp 69107
+YXNzZW1ibGU= 69108
+IHJlbG9jYXRlZA== 69109
+LXJldA== 69110
+SXRhbGlhbg== 69111
+OlN5c3RlbQ== 69112
+LnNjaGVkdWxlcg== 69113
+4oCcU28= 69114
+Rm9yYmlkZGVu 69115
+QVZPUg== 69116
+emlhxYI= 69117
+LkFkYW0= 69118
+CWNhbnZhcw== 69119
+IHBhcnRuZXJpbmc= 69120
+IGd5bW4= 69121
+IG1hbmlj 69122
+RGlmZmVyZW50 69123
+IMOlcmh1cw== 69124
+IGZlcnRpbGU= 69125
+Y2xm 69126
+LQ0K 69127
+LnJldmlldw== 69128
+b2RhYmxl 69129
+IEJvdW5kcw== 69130
+b2Jhbw== 69131
+IFBhcGVyYmFjaw== 69132
+IG1vZGlmaWM= 69133
+Y2hlY2twb2ludA== 69134
+IEFwcEJ1bmRsZQ== 69135
+IHN0YWJpbGl6ZQ== 69136
+IEF1ZGlvQ2xpcA== 69137
+bW9udGhseQ== 69138
+LmJlaA== 69139
+IGZsb3I= 69140
+IGJvbmRlZA== 69141
+IFdvcmtvdXQ= 69142
+Y29taW5ncw== 69143
+IHJhYmJpdHM= 69144
+IEJBTA== 69145
+Q0NS 69146
+X3Z1ZQ== 69147
+IExldml0cmE= 69148
+IGxpYmVydGluZQ== 69149
+IGNoYWxsZW5nZXI= 69150
+IFZhY2F0aW9u 69151
+VG9G 69152
+fSQv 69153
+X0RyYXc= 69154
+IGZlbmNlcw== 69155
+IGRhdGFzb3VyY2U= 69156
+IHBhcGVs 69157
+c2xpY2s= 69158
+X21lcw== 69159
+IFVJU3Rvcnlib2FyZFNlZ3Vl 69160
+KFRhZw== 69161
+IOWvuQ== 69162
+ICctJyk= 69163
+X0NMQVNTRVM= 69164
+KFJlbmRlcg== 69165
+CWZ3cml0ZQ== 69166
+VUVE 69167
+QUVT 69168
+KGpzb25QYXRo 69169
+IHNsb3dz 69170
+PkRlc2NyaXB0aW9u 69171
+IGVucmljaG1lbnQ= 69172
+IGl0ZW1wcm9w 69173
+IFBvdmVydHk= 69174
+IGFic29yYmluZw== 69175
+IFBzeWNobw== 69176
+5rGf 69177
+LC4KCg== 69178
+SW52ZXJzZQ== 69179
+IGFkanVk 69180
+aWdpZEJvZHk= 69181
+emlvbmk= 69182
+ICInLiQ= 69183
+5LiN5a2Y5Zyo 69184
+VGhhaQ== 69185
+IHNsYWlu 69186
+IGJydXRhbGx5 69187
+IFBlcnNwZWN0aXZl 69188
+IFJldGlyZW1lbnQ= 69189
+JHJz 69190
+IHNlcnZpY2VOYW1l 69191
+IOyI 69192
+LXByb2Nlc3Npbmc= 69193
+YnJhbmRz 69194
+OmVycm9y 69195
+KHByb3BlcnR5TmFtZQ== 69196
+IEJvZWg= 69197
+L2Nt 69198
+L3JlYWQ= 69199
+QU1C 69200
+IHJvdGF0aW9ucw== 69201
+LndvcmtzcGFjZQ== 69202
+Onk= 69203
+IHVwaG9s 69204
+dW5reQ== 69205
+IEJyYWNl 69206
+L21ldGE= 69207
+IEJyYXZl 69208
+YWNqZQ== 69209
+KFVJbnQ= 69210
+IHZpZWlsbGU= 69211
+cmFkaQ== 69212
+X2R5bg== 69213
+Tlc= 69214
+bG9zZXI= 69215
+ZXJ1c2Zvcm0= 69216
+IEJhcnRvbg== 69217
+IGZhcmVz 69218
+IE11aw== 69219
+4buHdQ== 69220
+IEF1ZGlvU291cmNl 69221
+KChf 69222
+LkJpZw== 69223
+Lm9yZ2FuaXphdGlvbg== 69224
+IFRyaWNr 69225
+IGJsdXNo 69226
+KFRZUEU= 69227
+IFJlbGF0aXZlTGF5b3V0 69228
+bGVjdHJvbg== 69229
+XX0i 69230
+IFphcA== 69231
+IFR3ZWx2ZQ== 69232
+Okw= 69233
+IHN0aWZmbmVzcw== 69234
+X0hFTA== 69235
+IHNwZXA= 69236
+KGNvZGVy 69237
+IHRhbWFuaG8= 69238
+IGFudGlveGlkYW50 69239
+IGhvc3BpdGFsaXplZA== 69240
+R1BD 69241
+IHNjcnV0aW4= 69242
+4buBbg== 69243
+IFNa 69244
+IEp1bGl1cw== 69245
+IFNhYmI= 69246
+ZWxvcg== 69247
+KG1j 69248
+6YeM 69249
+IFBpbnM= 69250
+IG1vZGVyYXRlbHk= 69251
+IEvDvA== 69252
+b3JnYW5pemF0aW9ucw== 69253
+IFNDT1JF 69254
+IHNjb3Vy 69255
+IGNob3I= 69256
+IFVJRWRnZUluc2V0cw== 69257
+IHNrdWxsZQ== 69258
+X29wZXJhbmQ= 69259
+LmdzdGF0aWM= 69260
+L25naW54 69261
+IGdldFdpZHRo 69262
+QmF0dGVyeQ== 69263
+IFNldHRlcg== 69264
+bUE= 69265
+KFJlc291cmNlcw== 69266
+X3BsYXlsaXN0 69267
+IG1hbmdv 69268
+IE9SRA== 69269
+YW5raW5k 69270
+ZXdheXM= 69271
+Pyks 69272
+IEdMVVQ= 69273
+IGp1c3Rl 69274
+IHBheWVy 69275
+KGNhbQ== 69276
+IFRlYWNo 69277
+IEZsdXg= 69278
+IG91dHNwb2tlbg== 69279
+IFN0cmluZ1V0aWw= 69280
+IFpoYW8= 69281
+LkhlbHBlcg== 69282
+IGVzdGlsbw== 69283
+IEFudGhyb3A= 69284
+IEd1YXJkcw== 69285
+Vm9jw6o= 69286
+Olsn 69287
+CXByb2R1Y3Q= 69288
+dXBkYXRlZEF0 69289
+IGluc3BpcmVz 69290
+cXc= 69291
+QkxFTQ== 69292
+YWtpc3Rhbg== 69293
+IGN6xJk= 69294
+LWhlYXJ0ZWQ= 69295
+IENvbXBlbnNhdGlvbg== 69296
+0LjQsw== 69297
+IGNvbWE= 69298
+IEZpYXQ= 69299
+IHhtbGh0dHA= 69300
+IHJlZmVycmFscw== 69301
+IHNwZWN0YXRvcnM= 69302
+IFRvcw== 69303
+aXNvcw== 69304
+SU1QTEVNRU5U 69305
+IGVudHJlcHJlbmV1cmlhbA== 69306
+IFNjb3V0cw== 69307
+IEFsb25l 69308
+YnJva2Vy 69309
+UHJvZHVjdElk 69310
+IEtvYmU= 69311
+IGNoYXVk 69312
+L2ZlYXR1cmVz 69313
+IHJvb21tYXRl 69314
+IFByb2plY3Rpb24= 69315
+YXZvdXJpdGVz 69316
+X0pPSU4= 69317
+IEFWQw== 69318
+X3BoeXM= 69319
+S2V5UHJlc3NlZA== 69320
+LDw= 69321
+IHVucmVhY2hhYmxl 69322
+IENpdGF0aW9u 69323
+W2NoYW5uZWw= 69324
+c3RhcnRzd2l0aA== 69325
+IEphZ3VhcnM= 69326
+LklzRmFsc2U= 69327
+bWVtYmVyc2hpcA== 69328
+QXR0ZW50aW9u 69329
+IHJlbW9kZWxpbmc= 69330
+IENpbmR5 69331
+IGNsaW5pY2FsbHk= 69332
+IG1pbGxlbm5pYWxz 69333
+IM60 69334
+IHJmbA== 69335
+ZW5ldA== 69336
+IG9icmln 69337
+IHZvbHVudGVlcmluZw== 69338
+Q3JlZGl0cw== 69339
+CWFy 69340
+IHJlc2lzdGluZw== 69341
+IFByb2R1a3Q= 69342
+PT09Ig== 69343
+IGNvbmVjdA== 69344
+IHJpag== 69345
+INeU 69346
+IHB1YmxpY0tleQ== 69347
+IG95 69348
+IEJ1dHQ= 69349
+X21pc2M= 69350
+IEJlc3Rl 69351
+IFBMQw== 69352
+IOafpQ== 69353
+IEJveEZpdA== 69354
+IiIu 69355
+VGVzdEZpeHR1cmU= 69356
+IGNoYXR0ZXI= 69357
+IGRvb3J3YXk= 69358
+eXNpemU= 69359
+INGH0YI= 69360
+SUNUVVJF 69361
+PScuLi8= 69362
+c2hvd24= 69363
+X3dlYXRoZXI= 69364
+IExvZ01hbmFnZXI= 69365
+XX0iCg== 69366
+IGNvbG91cmZ1bA== 69367
+IHJ1bW9yZWQ= 69368
+IGzDpQ== 69369
+IHByb2Jz 69370
+CWJ1aWxk 69371
+IOWmgg== 69372
+LnJldg== 69373
+IGludGVyY2VwdGVk 69374
+R2F5 69375
+TGlzdENvbXBvbmVudA== 69376
+IHBpw6g= 69377
+IkF0 69378
+IGFnYXI= 69379
+IEd1bmQ= 69380
+X0FFUw== 69381
+7IM= 69382
+jpjsnbQ= 69383
+IGF1dGhvcmlzZWQ= 69384
+IENoYWxs 69385
+X2xvZ291dA== 69386
+Y3Jvbg== 69387
+YXRlZ2llcw== 69388
+cGVyc2lzdGVudA== 69389
+IEFuZEFsc28= 69390
+dXN6 69391
+X3Jlc3RhcnQ= 69392
+IGRlY2lk 69393
+emY= 69394
+IHBhZ2luYXRvcg== 69395
+b2xsZXI= 69396
+IEhH 69397
+T3BhcXVl 69398
+c2VhdQ== 69399
+IE9NSVQ= 69400
+IFRoaWNrbmVzcw== 69401
+IEFpcndheXM= 69402
+X2RlbQ== 69403
+eXRpYw== 69404
+IHByb3Rlc3RlZA== 69405
+IHVwcmlzaW5n 69406
+IHN1aW5n 69407
+IFNoZWxieQ== 69408
+LmVuZXJneQ== 69409
+IGFsbGVsZQ== 69410
+LWJpZw== 69411
+U3RyaW5nQnVpbGRlcg== 69412
+IHNpZGVsaW5lcw== 69413
+IFRV 69414
+X2Fp 69415
+LkhPUklaT05UQUw= 69416
+IHJhZ2luZw== 69417
+LnRvTG9jYWxl 69418
+Lm11c3Q= 69419
+eEZGRg== 69420
+Lm5paA== 69421
+ICd7fSc= 69422
+2YjYrw== 69423
+IHB1bG1vbmFyeQ== 69424
+IOWPkQ== 69425
+IG7Dum1lcm9z 69426
+IE5hcG9sZW9u 69427
+X01ldGhvZEluZm8= 69428
+bGFzdGluZw== 69429
+IGV4cG9zdXJlcw== 69430
+IGVtYmFyaw== 69431
+X3VkcA== 69432
+S2lkcw== 69433
+X0NPTk5FQ1RFRA== 69434
+IHdlZWRz 69435
+UE9PTA== 69436
+IGtyaWo= 69437
+IG51aXM= 69438
+Sk5JRVhQT1JU 69439
+YWFhYWFhYWE= 69440
+IO2P 69441
+5Lu9 69442
+IHJlcGxlbg== 69443
+IFRyaWFscw== 69444
+d2FzaA== 69445
+cnV0 69446
+LWJlZm9yZQ== 69447
+X0FUVEFDSE1FTlQ= 69448
+VU5U 69449
+XFZhbGlkYXRpb24= 69450
+VG9u 69451
+IGhlYWRpbmdz 69452
+UHJvYmFibHk= 69453
+IGZhYnJpY2F0ZWQ= 69454
+U29ja2V0QWRkcmVzcw== 69455
+IGxldHRyZQ== 69456
+KSI+ 69457
+IHZhY2NpbmF0ZWQ= 69458
+Omh0dHA= 69459
+IGNvbmRvbA== 69460
+c2hlZA== 69461
+IFNwaWVsZQ== 69462
+44OU 69463
+RGVwbG95 69464
+LkNvbnRyYWN0 69465
+LWJv 69466
+Iy8= 69467
+IGludGVyY2VwdGlvbg== 69468
+IGlzYm4= 69469
+IG1hbm5lcnM= 69470
+L2Fj 69471
+CUNoZWNr 69472
+X2Zn 69473
+IGVuZFBvaW50 69474
+X3dlYXBvbg== 69475
+IHVuaW50ZW50aW9u 69476
+IHF1aXRz 69477
+X01JQw== 69478
+YXBpcm8= 69479
+IGJhbGxvb25z 69480
+IGdyYWRz 69481
+bWFycmllZA== 69482
+IDwqPg== 69483
+IGRpc3RvcnQ= 69484
+X01FU1NBR0VT 69485
+IFBTQQ== 69486
+X1BE 69487
+YWxzZXg= 69488
+IERpYWxvZ3Vl 69489
+IHJlZ2lzdHJhdGlvbnM= 69490
+IE9yaWdpbnM= 69491
+IGZsYW5r 69492
+PzsKCg== 69493
+OwoKCgoK 69494
+XS0k 69495
+IERlc3M= 69496
+LlN0YXR1c0JhZFJlcXVlc3Q= 69497
+IGluaGFiaXRlZA== 69498
+IGdpbHQ= 69499
+IFNURENBTEw= 69500
+LnRoZXRh 69501
+JCQkJA== 69502
+aWNsYXNz 69503
+QXBhcnQ= 69504
+Lmxpc3RCb3g= 69505
+IEJlbGFydXM= 69506
+IGRlbmVu 69507
+IFN1c3NleA== 69508
+CWRlbA== 69509
+X0VD 69510
+bmVhcmVzdA== 69511
+XE9yZGVy 69512
+UGFja2FnZXM= 69513
+Zm9ybWVybHk= 69514
+Ke+8jA== 69515
+6LSj 69516
+U2V4eQ== 69517
+IGhvcnJvcnM= 69518
+Uk9BRENBU1Q= 69519
+QXBwcm94 69520
+RGVzaw== 69521
+QU1FRA== 69522
+Lk5vcm1hbGl6ZQ== 69523
+X3B1Ymxpc2hlZA== 69524
+IERlYm9yYWg= 69525
+56eR 69526
+IHBvdW5kaW5n 69527
+IEVzcGVy 69528
+IERhbmNpbmc= 69529
+IExPT1A= 69530
+IFJveWFscw== 69531
+IGluc3VyZQ== 69532
+IEludmVzdG9ycw== 69533
+IHRoZW9sb2dpY2Fs 69534
+QXBwb2ludG1lbnQ= 69535
+IGNhdGVnb3JpY2Fs 69536
+IGNyYW4= 69537
+VmFsaWRpdHk= 69538
+IHJlc3BvbmRlcnM= 69539
+ICgpDQo= 69540
+ZXBhZA== 69541
+QklUUw== 69542
+IExhbWJlcnQ= 69543
+c3VtbQ== 69544
+YWNpZGFk 69545
+IGxvZ2dlZElu 69546
+PVc= 69547
+LkxvY2FsaXphdGlvbg== 69548
+cmlkbw== 69549
+JyIpCg== 69550
+IFdlYlZpZXc= 69551
+bG90aA== 69552
+IHRlYXNlcg== 69553
+IENhbmQ= 69554
+IGVwaWxlcHN5 69555
+SW5jcmVhc2U= 69556
+aXZpdHlNYW5hZ2Vy 69557
+ZW50cmFudA== 69558
+VGVsZWZvbm8= 69559
+LmN1cnJlbnRTdGF0ZQ== 69560
+IE5vZWw= 69561
+ICAgICAgICAgICAgCQk= 69562
+IGV4aGF1c3Rpb24= 69563
+ZWxpYW4= 69564
+IGNvdmV0ZWQ= 69565
+LXByb2R1Y3Rpb24= 69566
+KHN0ZGlu 69567
+IHByZWZlcmFibGU= 69568
+IG9mZmVuZGluZw== 69569
+KGNvbW1pdA== 69570
+CWFs 69571
+IHJlbG9jYXRl 69572
+IGFub21hbA== 69573
+IERpc2Vhc2Vz 69574
+IEZvcmc= 69575
+IFdJRkk= 69576
+IEtpbGxpbmc= 69577
+cXY= 69578
+IGZtYXA= 69579
+IGxsZXZhcg== 69580
+dGl0cmU= 69581
+LmVtcA== 69582
+LCRf 69583
+YXZy 69584
+Q2FuQmU= 69585
+X21h 69586
+IEhhd2tpbnM= 69587
+X1JPVVQ= 69588
+IGxvYWRJbWFnZQ== 69589
+IFdhaA== 69590
+IERlbXM= 69591
+IGluZGVudGF0aW9u 69592
+cHJlY2F0aW9u 69593
+IOaWh+S7tg== 69594
+IEJ1ZGFwZXN0 69595
+IHV0Yw== 69596
+KGhvdXJz 69597
+IHRyYW5ueQ== 69598
+QW5z 69599
+ennEhw== 69600
+LnZlaGljbGU= 69601
+Q29pbnM= 69602
+IEJyYXVu 69603
+CVJlc3BvbnNl 69604
+IHZyaWo= 69605
+IHN0cmFuZ2VseQ== 69606
+IEZhc2M= 69607
+XFNlc3Npb24= 69608
+TW91c2VMaXN0ZW5lcg== 69609
+IFJvbGxz 69610
+4bqnbg== 69611
+LmdycGM= 69612
+SW50ZWdlckZpZWxk 69613
+CWFmeA== 69614
+RG9ja0NvbnRyb2w= 69615
+JVw= 69616
+JTsi 69617
+IGdpZ2c= 69618
+IGJvcnJvd2Vy 69619
+IGRpc3BvbmlibGVz 69620
+X1JFQ1Q= 69621
+IFRoaW4= 69622
+IHBlYXJs 69623
+eEZC 69624
+IHJpcHBsZQ== 69625
+IGtIeg== 69626
+LmFjcXVpcmU= 69627
+Ymlvcw== 69628
+dGFibGVGdXR1cmU= 69629
+L2FudGxy 69630
+b3JhY2xl 69631
+IEFSRUE= 69632
+IGludGVuc2VseQ== 69633
+IHByb3RvYnVm 69634
+IExFTkc= 69635
+IEhlYWRxdWFydGVycw== 69636
+YXRoZWQ= 69637
+TWluZA== 69638
+aW5peg== 69639
+CVBhdGg= 69640
+WE1MTG9hZGVy 69641
+IGFsbG9jYXRpb25z 69642
+LnNsb3Q= 69643
+UHJvY0FkZHJlc3M= 69644
+IHJvbGVJZA== 69645
+Oyc7Cg== 69646
+IEJSRUFL 69647
+IFBlcmZvcm1pbmc= 69648
+Lk9yZGluYWxJZ25vcmVDYXNl 69649
+LWds 69650
+Omg= 69651
+IGRvd25sb2FkYWJsZQ== 69652
+IFN1YnNjcmliZXI= 69653
+YW5zZQ== 69654
+IGNoYXJhY3Rlcml6ZQ== 69655
+IHNocnVnZ2Vk 69656
+IHNjcA== 69657
+IGd1c3Rh 69658
+IG1ldGFsbA== 69659
+IGxhYm9yYXRvcmllcw== 69660
+IFhpbg== 69661
+IE1vdG9yY3ljbGU= 69662
+IGVnZXQ= 69663
+IGZpbmFuY2Vk 69664
+IE1PRElGWQ== 69665
+KlI= 69666
+QWk= 69667
+IGV4dHJlbWlzbQ== 69668
+IEhhbGlmYXg= 69669
+IHZhbW9z 69670
+JG51bQ== 69671
+IGltcGFydA== 69672
+YnJpY2s= 69673
+IOexuw== 69674
+IGZ1ZXJh 69675
+IFJPTEU= 69676
+LkNvbmN1cnJlbnQ= 69677
+X09QRVJBVE9S 69678
+IGN5bmljYWw= 69679
+IFJlZ2luYQ== 69680
+Z2V0RXJyb3I= 69681
+2KM= 69682
+YnN1Yg== 69683
+SmFwZ29sbHk= 69684
+IGluaGliaXRvcg== 69685
+SnVzdGljZQ== 69686
+44U= 69687
+TmV2ZXJ0aGVsZXNz 69688
+LXNlbQ== 69689
+Lm9nZw== 69690
+cmVxdWVudA== 69691
+IG5vc3Nv 69692
+SGFpcg== 69693
+LkxpYnJhcnk= 69694
+bWRpcg== 69695
+IGhhcmk= 69696
+IFRhcmE= 69697
+IFBvcnRv 69698
+bmV0aW5ldA== 69699
+IGFsbGlhbmNlcw== 69700
+ZWxsc2NoYWZ0 69701
+X1N1cmZhY2U= 69702
+CVZpZXc= 69703
+YXR1cmRheXM= 69704
+IHBvcGNvcm4= 69705
+X1BBUlNF 69706
+IFJpcHBsZQ== 69707
+IHBoYW50b20= 69708
+IG1vbmRv 69709
+LmNyZWF0ZUNsYXNz 69710
+IEtvcmVhbnM= 69711
+IGZhc2U= 69712
+IFdvY2hlbg== 69713
+IEVxdWlw 69714
+LWVpZ2h0 69715
+IFN0YXRlbWVudHM= 69716
+IGFkYXB0aW5n 69717
+UHJlY2lv 69718
+IEN1cmU= 69719
+IGNhbWJpYXI= 69720
+5rCR 69721
+IGhleGFkZWNpbWFs 69722
+c3BpcmFjeQ== 69723
+YmlsdA== 69724
+IFl1Zw== 69725
+IC0tLT4= 69726
+IFBQQw== 69727
+aXN6 69728
+YWtlRnJvbU5pYg== 69729
+IERpc3A= 69730
+IEF0aGxldGljcw== 69731
+IG5pZ2h0Y2x1Yg== 69732
+R09PRA== 69733
+LnNldEdlb21ldHJ5 69734
+K1s= 69735
+L3NlbmQ= 69736
+IGJpbmFyaWVz 69737
+IHLDoXA= 69738
+OnJlcQ== 69739
+LWNvbnN1bWluZw== 69740
+ZXJ0aW1l 69741
+VVBEQVRFRA== 69742
+X251bGxhYmxl 69743
+VklO 69744
+dWxpYQ== 69745
+Y3lhbg== 69746
+IG1pc3VuZGVyc3RhbmRpbmc= 69747
+b3JpY2Fs 69748
+ZGVncmVlcw== 69749
+TGVhZGluZw== 69750
+LkFS 69751
+aWNrZXN0 69752
+TnVldm8= 69753
+dWZvcmlh 69754
+IGdvb2RpZXM= 69755
+IGZvcmVz 69756
+KCk8PCI= 69757
+YWRlbWlj 69758
+QWN0aW9uQ3JlYXRvcnM= 69759
+c2VydmVybmFtZQ== 69760
+KG50 69761
+ZGJDb250ZXh0 69762
+IGFpcmJvcm5l 69763
+IGV4aGliaXRpb25z 69764
+Y2VsZQ== 69765
+IHRlbGE= 69766
+PE1vdmll 69767
+KCd7fQ== 69768
+RXhwbGFuYXRpb24= 69769
+IGhPYmplY3Q= 69770
+IGJlYXJlcg== 69771
+ZW5zaWJseQ== 69772
+bmlw 69773
+IEplcm9tZQ== 69774
+IENa 69775
+IGRhdGVGb3JtYXR0ZXI= 69776
+w6ljaWFs 69777
+U2V0TmFtZQ== 69778
+b3VjZQ== 69779
+IHJlZ3Jlc3M= 69780
+JkM= 69781
+KCkiPg== 69782
+LnNldFByZWZlcnJlZFNpemU= 69783
+IE1JRA== 69784
+IEFsZXNz 69785
+IGhvcnNlcG93ZXI= 69786
+IGF0bQ== 69787
+IFBhY2thZ2luZw== 69788
+IGNpcGhlcnRleHQ= 69789
+UmVxdWVzdE1ldGhvZA== 69790
+IGJlaWRlbg== 69791
+6KM= 69792
+IFBPVw== 69793
+LldyaXRlSGVhZGVy 69794
+ZGlyZWN0b3I= 69795
+LWJ1dA== 69796
+44Gg44GV44GE 69797
+aW5jZXI= 69798
+X2Ru 69799
+ISEhISE= 69800
+IG1hbnVmYWN0dXJlcw== 69801
+LlRleHRVdGlscw== 69802
+IGNvbnNjaW91c2x5 69803
+IGJvdW5jZWQ= 69804
+Y3VsdHVyZQ== 69805
+IFNwYXI= 69806
+IFBpcGVy 69807
+LnByZXNz 69808
+LW93bmVy 69809
+IGV2YWx1YXRvcg== 69810
+IFNUUkVBTQ== 69811
+LlBpY3R1cmVCb3hTaXplTW9kZQ== 69812
+IHN1Z2Fycw== 69813
+U2NyZWVuV2lkdGg= 69814
+IG5leHRTdGF0ZQ== 69815
+IGl2b3J5 69816
+IGJydW5jaA== 69817
+ZGVuc2l0eQ== 69818
+X09X 69819
+IENvcm9uYXZpcnVz 69820
+IENGUg== 69821
+YmFr 69822
+XENhdGVnb3J5 69823
+5pWw57uE 69824
+IGludm9rZXZpcnR1YWw= 69825
+fSgpCg== 69826
+IHN1amV0 69827
+LW1hcmtlcg== 69828
+aXNkaWdpdA== 69829
+IE1vYmls 69830
+IEpzb25SZXF1ZXN0QmVoYXZpb3I= 69831
+X1JFTU9URQ== 69832
+LmV4aXN0c1N5bmM= 69833
+IHJpY2hlcw== 69834
+LnByZXNlbnRlcg== 69835
+IGdsQ29sb3I= 69836
+IGhhbnlh 69837
+IGZvcnRyZXNz 69838
+IGZsYXNoZWQ= 69839
+dml6 69840
+cmVxdWVudGx5 69841
+YnVhdA== 69842
+JGNvbg== 69843
+Pnw= 69844
+LkZ1bmM= 69845
+IGh1bW9yb3Vz 69846
+dWVt 69847
+LlpFUk8= 69848
+IFNUTA== 69849
+IEJ1aw== 69850
+L3NhbXBsZQ== 69851
+IEdyb3M= 69852
+UmVjaXBlcw== 69853
+IGluZmxhdGVk 69854
+IHN3dW5n 69855
+OkY= 69856
+RmFjaW5n 69857
+LlRoZW1l 69858
+0L3QuNC6 69859
+IHNwbGVuZGlk 69860
+IHJlcXVlc3RJZA== 69861
+LkNlbnRlclNjcmVlbg== 69862
+L2F1dG9sb2Fk 69863
+ZW1iZWRkZWQ= 69864
+X2RlcGFydA== 69865
+IFBvcnRz 69866
+4LmD 69867
+0LDQudC0 69868
+ZGlzY3Vzc2lvbg== 69869
+X2NvbnN1bQ== 69870
+IHNjb3V0cw== 69871
+IGNvbGFib3I= 69872
+LlN0YWdl 69873
+Lm5hbm8= 69874
+ZWxkb3Jm 69875
+IGdlbWFjaHQ= 69876
+ICAgICAgICAgICAgICAgICAgICAgICAgICAK 69877
+IHBvbGljeW1ha2Vycw== 69878
+X1BLVA== 69879
+LFRo 69880
+b2t5 69881
+X1VJRA== 69882
+UGluZw== 69883
+IG9yY2hlc3Q= 69884
+IG9wdGljcw== 69885
+dWhhbg== 69886
+IFhPUg== 69887
+IGVzcGHDsW9s 69888
+IEFkaWRhcw== 69889
+cm5n 69890
+bWFucw== 69891
+LnZzdGFjaw== 69892
+IGdldGF3YXk= 69893
+IGhpZXJhcmNoaWNhbA== 69894
+YW5vaWE= 69895
+IEJpdG1hcEZhY3Rvcnk= 69896
+cmVhbG0= 69897
+CWFw 69898
+X2FwcHM= 69899
+LWRpdmlkZXI= 69900
+LmRyYXdlcg== 69901
+IEhBUkQ= 69902
+J107Pz4K 69903
+LXBhY2tlZA== 69904
+5rK7 69905
+X1NUUlVDVFVSRQ== 69906
+W1k= 69907
+aVBhcmFt 69908
+KGVx 69909
+IGVuY29tcGFzc2Vz 69910
+IFwKCg== 69911
+LT5b 69912
+JnV0bQ== 69913
+Z3JvdXBvbg== 69914
+c3RyYXRl 69915
+RFk= 69916
+b21vcnBoaWM= 69917
+Jzpb 69918
+IGdyYXZpdGF0aW9uYWw= 69919
+IE1pY2hh 69920
+IFRlbmNlbnQ= 69921
+IGNvYWNoZWQ= 69922
+7Lac 69923
+0YPQvNC10L3Rgg== 69924
+L21vYmlsZQ== 69925
+TW91c2VEb3du 69926
+YnVk 69927
+IFlhcw== 69928
+IFByb3ZpZGVycw== 69929
+Tlo= 69930
+CXJlcG9ydA== 69931
+ZXJybXNn 69932
+IGltYWdlUGF0aA== 69933
+YWN0ZXJpYWw= 69934
+IE1hbmdh 69935
+d2lja2x1bmc= 69936
+KHVzdWFyaW8= 69937
+IikpOw0KDQo= 69938
+LyoqKg== 69939
+IG9yZ2FuaXNl 69940
+SW5kZXhlZA== 69941
+X1FVQUw= 69942
+KFB5T2JqZWN0 69943
+IHN1cnJlbmRlcmVk 69944
+UE9DSA== 69945
+IE5PVEVT 69946
+XFwi 69947
+LWpvYg== 69948
+IHNldmVudHk= 69949
+IyMjIwo= 69950
+IE1hbm9y 69951
+IGRvd25yaWdodA== 69952
+IHRpbWVmcmFtZQ== 69953
+aW5zdXJhbmNl 69954
+Y2hlY2tlcg== 69955
+IFNFQ1JFVA== 69956
+IGVjaG9lcw== 69957
+IENhcm1lbg== 69958
+LnNldEhvcml6b250YWxBbGlnbm1lbnQ= 69959
+IGlzQ2hlY2tlZA== 69960
+IFRPUg== 69961
+X25u 69962
+KCco 69963
+RmV0Y2hSZXF1ZXN0 69964
+IFByaW50ZWQ= 69965
+Rmx1aWQ= 69966
+IFNUQUNL 69967
+R0VT 69968
+YWlnbmVk 69969
+aWdvcg== 69970
+LlVua25vd24= 69971
+Q0JD 69972
+IENhcmxzb24= 69973
+LlVSSQ== 69974
+IHBsaWdodA== 69975
+L3N0YXJ0 69976
+IFBlcnNvbm5lbA== 69977
+IFBSRUZJWA== 69978
+LCoq 69979
+IGxpbWl0ZQ== 69980
+X2hlYXQ= 69981
+Je+8jA== 69982
+IERvbm5l 69983
+Z2V0Tm9kZQ== 69984
+IFNjaWVudG9sb2d5 69985
+IGNvbWV0 69986
+IHdlbmln 69987
+QXNpZGU= 69988
+IE1QRUc= 69989
+Jz8= 69990
+dmFyaWFibHk= 69991
+LmVuZERhdGU= 69992
+IHVuY29udA== 69993
+IFNjb3Jlcw== 69994
+IExvZ2luRm9ybQ== 69995
+LmdlbmVyYXRlZA== 69996
+LGNo 69997
+LW1hcg== 69998
+IE5lZA== 69999
+IGV2ZW50SWQ= 70000
+K3A= 70001
+IFNJTg== 70002
+L3Jlc2V0 70003
+LlJFQUNU 70004
+IE1lc3Np 70005
+X1JBTks= 70006
+LndyaXRlRmlsZQ== 70007
+IGNyaXBw 70008
+ZXN0aGV0aWM= 70009
+RVJTSVNU 70010
+IHJlaW1idXJzZW1lbnQ= 70011
+Q3VycmVudFZhbHVl 70012
+IHVuaW4= 70013
+RG93bkxhdGNo 70014
+IHBhZGRpbmdSaWdodA== 70015
+IHN0b2NrZWQ= 70016
+Lycu 70017
+IHJlcGF5bWVudA== 70018
+dHJhaw== 70019
+L2JhY2tlbmQ= 70020
+INC40LfQvNC10L0= 70021
+Q1NS 70022
+IHByZXZlbnRpdmU= 70023
+IHBhbnRhbGxh 70024
+X3RyaW0= 70025
+UGVkaWRv 70026
+aG9zcGl0YWw= 70027
+IG1hbmFnZWFibGU= 70028
+cm91dGVQYXJhbXM= 70029
+dGV4dHVyZXM= 70030
+Li4uLi4uCgo= 70031
+IHPDqWxlY3Rpb24= 70032
+TmFtZVZhbHVlUGFpcg== 70033
+IHBvbGx1dA== 70034
+TW9kZXM= 70035
+IExhdWQ= 70036
+amF5 70037
+IFVycw== 70038
+IHNpZ25lcg== 70039
+IEpK 70040
+IENoZXJva2Vl 70041
+X0VYSVNUUw== 70042
+IGR3YXI= 70043
+ICgkKCcj 70044
+IHJlZWY= 70045
+Pnsk 70046
+IEJheWxvcg== 70047
+IE1vZGVsU3RhdGU= 70048
+LV8= 70049
+IFN0cnVjdHVyZXM= 70050
+IHNvdXZlbnQ= 70051
+U3BlY2lmeQ== 70052
+KHBpcGU= 70053
+IGZyYWNraW5n 70054
+IEdQQQ== 70055
+IGJlbGU= 70056
+CQkJCQkJCSAgIA== 70057
+IE1pbm9yaXR5 70058
+IHR1ZA== 70059
+IG9wZW5uZXNz 70060
+IElsbHVzdHJhdGVk 70061
+IG94aWRhdGlvbg== 70062
+IE5L 70063
+CVVwZGF0ZQ== 70064
+IEVNUw== 70065
+IFRlZGR5 70066
+IGdlbmVyYWxz 70067
+CU1hdA== 70068
+IHJhZGlvcw== 70069
+IEFudGlxdWU= 70070
+Y29ub215 70071
+IFNxdWFkcm9u 70072
+KScsJw== 70073
+5aOw 70074
+IHlvdXJl 70075
+IE1haW5QYWdl 70076
+IGJlaGF2aW91cnM= 70077
+ZW5naHQ= 70078
+KEAiJUAiLA== 70079
+IHRlc3RjYXNl 70080
+IENvbXBpbGF0aW9u 70081
+IGZsYXZvdXJz 70082
+IEV4dGVuZA== 70083
+aWxsYXRvcg== 70084
+IGNvaA== 70085
+IHNwbGluZQ== 70086
+IEtH 70087
+LXBheQ== 70088
+IGNvbW11bmlzbQ== 70089
+IEJ1c2luZXNzZXM= 70090
+b2NraW5n 70091
+Lk1heExlbmd0aA== 70092
+YXNzYW5kcmE= 70093
+cXVpcmluZw== 70094
+YWRkZW4= 70095
+IEplYg== 70096
+X2ZhdWx0 70097
+W2ZpbGU= 70098
+IHByb21pbmVuY2U= 70099
+ZGlzY2lwbGluYXJ5 70100
+4oCUdGhleQ== 70101
+X2V4dGVudA== 70102
+IFZJQw== 70103
+IGVudGFpbHM= 70104
+LnBhcnRuZXI= 70105
+IGhpcHBvYw== 70106
+TGVhZ3Vl 70107
+55S3 70108
+d2lwZQ== 70109
+LXNwaW5uZXI= 70110
+IHNhbHV0ZQ== 70111
+IFN1cmdpY2Fs 70112
+KG91dHB1dHM= 70113
+d29ya2Vk 70114
+W3N0cmxlbg== 70115
+YXBwb2ludGVk 70116
+IEhlZw== 70117
+IEFDUEk= 70118
+KFte 70119
+dWFsYQ== 70120
+X3RvbA== 70121
+IFJpdA== 70122
+LlBheW1lbnQ= 70123
+a293c2tp 70124
+IHdhbG1hcnQ= 70125
+cmVxdWlyZW1lbnRz 70126
+IEZJTlNFUQ== 70127
+X0JBQ0tHUk9VTkQ= 70128
+IE9zYm9ybmU= 70129
+KGVycm9yTWVzc2FnZQ== 70130
+UmVwb3J0aW5n 70131
+IGF1Y3Rpb25z 70132
+IGNvbWJvcw== 70133
+IE5vdGljZWQ= 70134
+X29jdA== 70135
+IHByaW1lcm8= 70136
+dGFpcmU= 70137
+X2hy 70138
+INC80L7QtA== 70139
+IGNvbnRyYWRpY3Rvcnk= 70140
+PSJA 70141
+YWNoaW5lcw== 70142
+KG9wdGFyZw== 70143
+IFBlbmd1aW4= 70144
+IEFiYmFz 70145
+IHN1YmxpbWU= 70146
+IHBhZ2VhYmxl 70147
+IERlZmVuc2l2ZQ== 70148
+IGRpc3RpbmN0bHk= 70149
+IEF1dG9tYXRpY2FsbHk= 70150
+VW5kZXJzdGFuZGluZw== 70151
+RXF1YWxpdHlDb21wYXJlcg== 70152
+Z290YQ== 70153
+ICI6Og== 70154
+IHB1bHZlcg== 70155
+IEJhdHRsZXM= 70156
+IHVucGFyYWxsZWxlZA== 70157
+VENIQQ== 70158
+IGNvbnN0cnVlZA== 70159
+LWFmZg== 70160
+IHByZWN1cnNvcg== 70161
+LWxmcw== 70162
+IG1hZHVyYXM= 70163
+IERhaXN5 70164
+IEFyYmVpdHM= 70165
+Lk1hbmFnZW1lbnQ= 70166
+CUlu 70167
+IHJvYmVz 70168
+IHNww6lj 70169
+4oCcKA== 70170
+IG1hdGVybml0eQ== 70171
+ZXh0ZW50 70172
+IFNwYWNlcg== 70173
+RGlkQXBwZWFy 70174
+CXVz 70175
+LmdldFJlcXVlc3REaXNwYXRjaGVy 70176
+KGNvbHM= 70177
+IHBsdW1tZXQ= 70178
+7IU= 70179
+IHsKCgoK 70180
+w6lyaWNh 70181
+IFNpemVz 70182
+LmVudW0= 70183
+LkhpZ2hsaWdodA== 70184
+ICEhfTwv 70185
+QVRURVJZ 70186
+IFNvcm9z 70187
+R0xmbG9hdA== 70188
+44KE 70189
+IEplbm5pbmdz 70190
+Pz8KCg== 70191
+IFJvbWVv 70192
+ID8+CgoK 70193
+V2Vubg== 70194
+IGNsaW1heA== 70195
+IGNyZW0= 70196
+X3RoYXQ= 70197
+W+KApg== 70198
+X2RvbWFpbnM= 70199
+X1JFUExZ 70200
+IGNvbXBsZXRh 70201
+VkVTVA== 70202
+X3BhcnRpY2xl 70203
+IHNvcA== 70204
+IGZhdGFsaXRpZXM= 70205
+aW1wbGlmeQ== 70206
+IFNLRg== 70207
+IGluZnVzaW9u 70208
+IEphdmllcg== 70209
+IGJhbGxldA== 70210
+IGFtaWdv 70211
+LndhbnQ= 70212
+IGNvbGxhZ2Vu 70213
+IExhd3llcg== 70214
+LlN0YXRlbWVudA== 70215
+LnJ0 70216
+YmFhcg== 70217
+RW5kUG9pbnQ= 70218
+IEJlaw== 70219
+U0hJUA== 70220
+IHBhdHJpYXJjaA== 70221
+IEF1bnQ= 70222
+X1RN 70223
+IG3DrW4= 70224
+IG1hc3RlcmVk 70225
+V1hZWg== 70226
+IGVzcG9z 70227
+PWxvZ2dpbmc= 70228
+IHJpZ2h0ZW91c25lc3M= 70229
+dG9ycmVudA== 70230
+IGJzdA== 70231
+X0NIQUlO 70232
+IG91dHNraXJ0cw== 70233
+KHJvdGF0aW9u 70234
+ICcuJyk= 70235
+aWdyYW50cw== 70236
+K2xzaQ== 70237
+IENDVFY= 70238
+X1BIQVNF 70239
+LmF6dXJl 70240
+X1Byb2Nlc3M= 70241
+dmFl 70242
+IFRyb3BpY2Fs 70243
+IEFua2FyYQ== 70244
+aW1hZ2VWaWV3 70245
+X1JVTk5JTkc= 70246
+ICopX18= 70247
+4bq/bg== 70248
+KGNsaQ== 70249
+c2NhdHRlcg== 70250
+IHNjaGU= 70251
+UmVnaXN0cmFy 70252
+IGFpcmluZw== 70253
+IHB5cGxvdA== 70254
+aXNpw7Nu 70255
+L2N1c3RvbWVy 70256
+IHNpbXBsZW1lbnQ= 70257
+IGNsYXNzeQ== 70258
+IERXQw== 70259
+IEJhc2hhcg== 70260
+IERFVkVMTw== 70261
+IFZpY2s= 70262
+YXZhaWw= 70263
+IEjDtg== 70264
+X2V4dGVuZA== 70265
+ZHJGYw== 70266
+LmlzTm90Qmxhbms= 70267
+IHBsYWlz 70268
+fH0K 70269
+IHBvcm5vZmls 70270
+bGFicw== 70271
+IGhhdXM= 70272
+IG9yaWdpbmF0aW5n 70273
+IHN1cnJvdW5kcw== 70274
+IFFVQUw= 70275
+bWVn 70276
+L2xvZ2dlcg== 70277
+W29iag== 70278
+IGlycmVzcG9uc2libGU= 70279
+IFB1YmxpY0tleQ== 70280
+SE9ORQ== 70281
+Oicv 70282
+aWJveA== 70283
+IEZWZWN0b3I= 70284
+fHsK 70285
+YXRhbG9hZGVy 70286
+aGF3a3M= 70287
+SERS 70288
+IGVzY2FsYXRpb24= 70289
+IFBvZHNEdW1teQ== 70290
+ZWxpdGU= 70291
+IHByZXN1cA== 70292
+Q2FjaGVk 70293
+Pkc= 70294
+Lm9wdGltaXplcg== 70295
+IFZpc2libGU= 70296
+tIA= 70297
+IG5lbg== 70298
+IHBjcw== 70299
+IElkbGU= 70300
+W0FueQ== 70301
+IGtleWJvYXJkcw== 70302
+IENPTVBPTkVOVA== 70303
+IHRpdGFuaXVt 70304
+KG11dA== 70305
+IExlZGdlcg== 70306
+IHByb3NwZXJvdXM= 70307
+ZXRyb2ZpdA== 70308
+X0xM 70309
+X3BhdGllbnQ= 70310
+IHBkYXRh 70311
+IGtvbnRha3Rl 70312
+U3dpcGU= 70313
+IGNoZWVyZnVs 70314
+IEhvbmR1cmFz 70315
+Il1bJA== 70316
+IGhlbW9ycmg= 70317
+IjoiKw== 70318
+IGxlYXNpbmc= 70319
+IGluc3RhbGxz 70320
+IFBheA== 70321
+IExvZ2lzdGljcw== 70322
+IGtpbmV0aWM= 70323
+IFBob24= 70324
+X21vdmVtZW50 70325
+CWJ5dGVz 70326
+IGNpbmNv 70327
+IE1hZG5lc3M= 70328
+Iikr 70329
+IEpF 70330
+X2lq 70331
+U2NlbmVNYW5hZ2Vy 70332
+IEJ1c3Q= 70333
+cHRlc3Q= 70334
+YWVh 70335
+IGJlc3Nlcg== 70336
+w61n 70337
+0LTQuNC9 70338
+KHRhc2tz 70339
+KCIoIg== 70340
+c2V0VHlwZQ== 70341
+KG91dGZpbGU= 70342
+CXJlc2V0 70343
+IEFSQw== 70344
+IG3DunNpY2E= 70345
+IFNoZWxm 70346
+IG1pblk= 70347
+cGNo 70348
+IHdlaWJlcg== 70349
+aXNzb3I= 70350
+IHRyb3V2ZQ== 70351
+CUJ1dHRvbg== 70352
+IHJlZ2VuZXJhdGVk 70353
+xaNp 70354
+aW1hY2hpbmVyeQ== 70355
+YmxvY2tpbmc= 70356
+LmRhdGFUYWJsZXM= 70357
+X2ZyYWM= 70358
+IEFkdmFudGFnZQ== 70359
+LnZpc2l0TWV0aG9k 70360
+6YeN5paw 70361
+IGV4dHJhcG9s 70362
+IHRlYXNpbmc= 70363
+IEhpdGNo 70364
+IEdlZWs= 70365
+RVNDTw== 70366
+IHdpY2g= 70367
+CWF4 70368
+X2RlY29y 70369
+IHNjcmVlbldpZHRo 70370
+IFNvcGhpYQ== 70371
+Rm9yZ290 70372
+LnVuaQ== 70373
+IFZlbnR1cmU= 70374
+X2NvbGxpc2lvbg== 70375
+IGxhd21ha2Vy 70376
+KEVkaXQ= 70377
+YmxlcnM= 70378
+IGdldE5leHQ= 70379
+4oCUeW91 70380
+TWVkaWFQbGF5ZXI= 70381
+IEhvcmRl 70382
+IENvbmdyZXNzbWFu 70383
+b2JzZXJ2YXRpb25z 70384
+CXByb3BlcnR5 70385
+IDwtLQ== 70386
+Q3JlYXRlZEF0 70387
+dWJ5dGU= 70388
+IHF1YXJhbnRpbmU= 70389
+IGRpc3RyZXNzZWQ= 70390
+X0FQQg== 70391
+IEdvb2RtYW4= 70392
+44Kr 70393
+IHJlY29tZW5k 70394
+X1BSSU5URg== 70395
+RE9ORQ== 70396
+QmluZGFibGU= 70397
+cnN0cmlw 70398
+Y2VudGFqZQ== 70399
+IFVuZXhwZWN0ZWQ= 70400
+IFNDSE9PTA== 70401
+IFByb2Zlc3Npb25hbHM= 70402
+IEdQVXM= 70403
+TGVzc29u 70404
+RXhjbHVzaXZl 70405
+IGF0cmF2 70406
+IERhbms= 70407
+IExhd3llcnM= 70408
+IFdhbHRvbg== 70409
+Pltd 70410
+IGFsb3Vk 70411
+PSIuLi8uLi8uLi8= 70412
+IGRlYmF0aW5n 70413
+IEFWRw== 70414
+X1ZPTA== 70415
+L2NnaQ== 70416
+LmRlZw== 70417
+Omc= 70418
+LkluZm9m 70419
+TWVhc3VyZVNwZWM= 70420
+LnNvbmc= 70421
+bXRyZWU= 70422
+dWxscw== 70423
+Sm9yZGFu 70424
+IENvdmVycw== 70425
+IGF0dHJpYnV0YWJsZQ== 70426
+IGplZGlz 70427
+aWF0cmljcw== 70428
+IHJvdHRlcmRhbQ== 70429
+IG1lbGQ= 70430
+IENvbnRlbnRUeXBl 70431
+IG1hbnRsZQ== 70432
+IGFsaWNl 70433
+X2R1cGxpY2F0ZQ== 70434
+L0ludGVybmFs 70435
+IGZpbGVzaXpl 70436
+CWZpcmU= 70437
+cmVzZQ== 70438
+b25kZXJl 70439
+IGZhbWlsaWFyaXR5 70440
+IENyZXN0 70441
+IGthcm1h 70442
+IHRvcmlubw== 70443
+IG1lc2E= 70444
+L3RlbXA= 70445
+IGNoaXI= 70446
+IE92ZXJmbG93 70447
+IHRlbmVtb3M= 70448
+dW5paw== 70449
+TkVYVA== 70450
+QWxsZQ== 70451
+IG54dA== 70452
+TWFydA== 70453
+IGF0bA== 70454
+IHBlcmlvZG8= 70455
+X3lvdQ== 70456
+IH0pKS4= 70457
+aW50ZXN0aW5hbA== 70458
+LkFkYXB0ZXJWaWV3 70459
+IGhlc2l0YW50 70460
+IGNvbXBhcmF0aXZlbHk= 70461
+LlVJbnQ= 70462
+KHZpZXdNb2RlbA== 70463
+IHNhbmdhdA== 70464
+IFJlc3BvbnNpdmU= 70465
+IFphY2s= 70466
+4oU= 70467
+SkFWQQ== 70468
+IEZ1bGxlcg== 70469
+IOKdpA== 70470
+LkNvbnN1bWVy 70471
+IGFuaw== 70472
+IHJlYWN0b3Jz 70473
+ZnVjaw== 70474
+X3JhdA== 70475
+IHNlc3Npb25GYWN0b3J5 70476
+X2JhY2t3YXJk 70477
+IHNjcmFtYmxlZA== 70478
+CXRo 70479
+IGluc2Vuc2l0aXZl 70480
+IGNoYW1wcw== 70481
+IG5naW54 70482
+IGNvbmhlYw== 70483
+IEphc3Blcg== 70484
+LmZt 70485
+U3RyaWN0RXF1YWw= 70486
+YWNoc2Vu 70487
+LU5vdg== 70488
+bGFzc2Vu 70489
+LmludGVncmF0aW9u 70490
+KGxibA== 70491
+Q29tcG9zZQ== 70492
+IEZvbg== 70493
+w5o= 70494
+R3JhdGlz 70495
+IExpbWU= 70496
+IEFkYXB0ZXJWaWV3 70497
+IHBvaXNvbmVk 70498
+YW5jaG9ycw== 70499
+6K6+6K6h 70500
+J10/PiI= 70501
+IHByb2N1cg== 70502
+SXRhbHk= 70503
+Lk1PTlRI 70504
+IExVQQ== 70505
+IExpdGh1YW5pYQ== 70506
+IEhlYWRz 70507
+X0NIVU5L 70508
+IFBVU0g= 70509
+QXNwZWN0UmF0aW8= 70510
+IHdlZw== 70511
+IHZpZHM= 70512
+IFdlaW4= 70513
+CUlOVA== 70514
+c2Vzc2lvbklk 70515
+SW5kdXN0cnk= 70516
+IGRlbm91bmNlZA== 70517
+SktMTQ== 70518
+IFZhbmVzc2E= 70519
+LklkZW50aWZpZXI= 70520
+cHJvcHJp 70521
+INC40LM= 70522
+IHTDqWNu 70523
+IG1vc2FpYw== 70524
+U3RyZWFtUmVhZGVy 70525
+LVRo 70526
+Zm9ydGg= 70527
+IGFkaGVyZW5jZQ== 70528
+YmF0ZQ== 70529
+IGtuaWdodHM= 70530
+c291bmRz 70531
+IHNhbGxl 70532
+T01FVA== 70533
+44K544OI 70534
+LXRt 70535
+IFJoZQ== 70536
+LkZpbGVPdXRwdXRTdHJlYW0= 70537
+5YiG57G7 70538
+IEVORw== 70539
+aG9saWRheQ== 70540
+IENvbmdyYXR1bGF0aW9ucw== 70541
+KSgK 70542
+IGFnZ3JlZ2F0ZXM= 70543
+SE9PSw== 70544
+ZXdpcmU= 70545
+U2VuYXRvcg== 70546
+IGVtYmVkZGluZ3M= 70547
+ZXB5 70548
+KENPTQ== 70549
+IHJvYmJlcg== 70550
+w6R0ZXI= 70551
+d2FuZw== 70552
+X3RlYWNoZXI= 70553
+IHJlc2VudG1lbnQ= 70554
+IGxldHR1Y2U= 70555
+ZXJyZXVy 70556
+KGlj 70557
+IFRhY3RpY2Fs 70558
+IENvbnRyYWN0cw== 70559
+IG3Dpm5k 70560
+IHNpdGlvcw== 70561
+IGJhc3RhbnRl 70562
+IG51ZXZvcw== 70563
+CU5kckZj 70564
+IHByaXZhdGVLZXk= 70565
+dWNjaA== 70566
+TU1kZA== 70567
+IOi+k+WHug== 70568
+dW1iYQ== 70569
+QGZvcmVhY2g= 70570
+OiIpOwoK 70571
+IHNsaXBwZXJ5 70572
+IEtleXN0b25l 70573
+IHBpb25lZXJpbmc= 70574
+X3RyaWFuZ2xl 70575
+KCIK 70576
+CQkJCQkJCQkgIA== 70577
+IEludGVydmVudGlvbg== 70578
+U0NJ 70579
+IGNKU09O 70580
+IHRlcm1pbmF0aW5n 70581
+67mE 70582
+IGJhYnlz 70583
+U3Vic2V0 70584
+IOuh 70585
+IHNldWxlbWVudA== 70586
+IG11ZXN0cmE= 70587
+RW50cmU= 70588
+5Lul5LiK 70589
+bmdv 70590
+ImJ5dGVz 70591
+UVJTVA== 70592
+IHlwb3M= 70593
+cGVyc29uYQ== 70594
+IERlcGxveQ== 70595
+Y2Vl 70596
+IOCu 70597
+LmdvYWw= 70598
+IGhhYml0YXRz 70599
+IGlzQWRtaW4= 70600
+IGV4cGxvaXRpbmc= 70601
+IHZlbnRpbA== 70602
+IEJhbGxz 70603
+2KfYqA== 70604
+IG1pbmRmdWxuZXNz 70605
+KGt3YXJncw== 70606
+IHJlc2VtYmxpbmc= 70607
+IGNob2ly 70608
+IG9uQmFja1ByZXNzZWQ= 70609
+IFNFQ1VSSVRZ 70610
+L2d0ZXN0 70611
+IGp1c3RpY2Vz 70612
+IGludGVnZXJWYWx1ZQ== 70613
+YmxhaA== 70614
+IEFpbQ== 70615
+X2ZpbmFsaXpl 70616
+a2Vo 70617
+IENvbXBsZXhpdHk= 70618
+IGF1Z3VzdA== 70619
+Z2V0RWxlbWVudHNCeVRhZ05hbWU= 70620
+IHByZWFjaA== 70621
+IHByb251bmNpYXRpb24= 70622
+IFRyYXNo 70623
+LXBlcmNlbnQ= 70624
+X1BSSVY= 70625
+IEh1bnRz 70626
+IEN1cnNl 70627
+dWVsbGVu 70628
+IGhlYXZ5d2VpZ2h0 70629
+WGk= 70630
+CXNlbGVjdGVk 70631
+IE1jQ295 70632
+5byC5bi4 70633
+fD0K 70634
+IEJhdHRsZWZpZWxk 70635
+SXRlbUltYWdl 70636
+IGRlZHVjdGlvbnM= 70637
+IEVsZW1lbnRhbA== 70638
+KCkpOy8v 70639
+IEJ1cms= 70640
+fSkNCg0K 70641
+c3dpZnQ= 70642
+L2Z1bmN0aW9u 70643
+VXN1YWxseQ== 70644
+X1N0 70645
+X2ZlYXRz 70646
+IElzVmFsaWQ= 70647
+IHphZA== 70648
+SW1hZ2VDb250ZXh0 70649
+IGNsYXNzbmFtZQ== 70650
+IGRvbm5lcg== 70651
+IC0tPgoKCg== 70652
+IG1vdG9yY3ljbGVz 70653
+KycvJys= 70654
+IHNldEJhY2tncm91bmQ= 70655
+XENNUw== 70656
+LkFsbEFyZ3NDb25zdHJ1Y3Rvcg== 70657
+IExleGluZ3Rvbg== 70658
+LmV4YW1wbGVz 70659
+IFB1cnM= 70660
+UHVzaE1hdHJpeA== 70661
+ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09 70662
+LmFkZFRhcmdldA== 70663
+cG9yYQ== 70664
+RnVsbHNjcmVlbg== 70665
+IGdvb2Y= 70666
+aGxlbg== 70667
+w6RnZQ== 70668
+IENVUkw= 70669
+IEludGVyZXN0aW5n 70670
+IHJldHJpZXZlcw== 70671
+X09iag== 70672
+aW5uZXNz 70673
+LS0tLS0KCg== 70674
+LnRzdg== 70675
+KElN 70676
+IEJyYXZlcw== 70677
+X0lTUg== 70678
+b3N0aQ== 70679
+4buT 70680
+IEV4dGVyaW9y 70681
+IENvdXJ0bmV5 70682
+IHJlc2lkdWVz 70683
+VGllcg== 70684
+Lio7DQoNCg== 70685
+OmJsYWNr 70686
+d2ViVmlldw== 70687
+InBhdGg= 70688
+IG1hc2E= 70689
+XSE9Jw== 70690
+IE1hdGNoaW5n 70691
+ZHVy 70692
+SnZt 70693
+PWNvbnRleHQ= 70694
+X1JJTkc= 70695
+IHByb3BvbmVudHM= 70696
+IFFTdHJpbmdMaXRlcmFs 70697
+IGluZmxhdGU= 70698
+PEZsb2F0 70699
+IERvbm92YW4= 70700
+KElP 70701
+SE9SVA== 70702
+IGRpc2FncmVlZA== 70703
+aXNreQ== 70704
+YXNraW5n 70705
+X1ZFQw== 70706
+SEFTSA== 70707
+IG1hdGhz 70708
+IExhc3RseQ== 70709
+IGRlcHJlc3Npbmc= 70710
+LmVzdGFkbw== 70711
+IGhhbG8= 70712
+X2JsZQ== 70713
+IEdhYnJp 70714
+PFRSZXN1bHQ= 70715
+IHRyb29w 70716
+IGVudW1z 70717
+IFNFUklBTA== 70718
+bnVtZXJ1c2Zvcm0= 70719
+IENoaWM= 70720
+LWV4ZWM= 70721
+IGJhY2tsb2c= 70722
+IEJyYXZv 70723
+UG9wTWF0cml4 70724
+IEJydXQ= 70725
+IGJsb3F1ZQ== 70726
+IGp1bml0 70727
+IFdoaWxzdA== 70728
+0YbQuNGP 70729
+ZmV3 70730
+rIE= 70731
+IFZhcmlldHk= 70732
+IFBvbGl0aWNv 70733
+ZXhlbXBsZQ== 70734
+VXNlckNvbnRyb2xsZXI= 70735
+IGhhcmRlbmVk 70736
+YWtlbnM= 70737
+IFNlZWRlcg== 70738
+b3dhcmRz 70739
+Y2hlY2tzdW0= 70740
+IFNhaQ== 70741
+VkVSVEVY 70742
+UmVzcG9uc2Vz 70743
+cGxvZGU= 70744
+LWhhcmQ= 70745
+U3BlY2llcw== 70746
+UmVuZGVyVGFyZ2V0 70747
+X0NIQVQ= 70748
+IHNob3djYXNlcw== 70749
+aXRpbWF0ZQ== 70750
+X0ZPUkVBQ0g= 70751
+X0NPTkZJR1VSQVRJT04= 70752
+ZWJh 70753
+IEVzc2VudGlhbGx5 70754
+KHBvbHk= 70755
+LWxlYXJuaW5n 70756
+IGfDpXI= 70757
+X3N1Y2M= 70758
+KE1hdA== 70759
+IGNvaWxz 70760
+YnJhcw== 70761
+IGFtYQ== 70762
+X21hdGNoaW5n 70763
+aW5kdXN0cnk= 70764
+IE5vcnJpcw== 70765
+IEV4cG9zdXJl 70766
+IHBlcnZhc2l2ZQ== 70767
+IGRleg== 70768
+5peP 70769
+IGVsZWN0cm9uaWNhbGx5 70770
+RERS 70771
+IFN0aW0= 70772
+INGE0LDQudC70LA= 70773
+IG1hZHJl 70774
+bmVtb25pYw== 70775
+a2ljaA== 70776
+IEZyYWdlbg== 70777
+IFJ1bmU= 70778
+IG9uVG91Y2g= 70779
+CXNjYWxl 70780
+IFBoYXJtYWM= 70781
+IE1hbmRhdG9yeQ== 70782
+IFN0bw== 70783
+IEJyYW0= 70784
+X0xlZnQ= 70785
+X1NUQVI= 70786
+KX19Ig== 70787
+c2Npb3VzbHk= 70788
+0LXQt9GD0LvRjNGC 70789
+56uZ 70790
+Z3Jhdml0eQ== 70791
+K0M= 70792
+fTw= 70793
+QU5HRVM= 70794
+IGNvbnRyYWN0aW9u 70795
+IFdhbGxwYXBlcg== 70796
+LkZhY2U= 70797
+IHByw7N4aW1v 70798
+LmZpZw== 70799
+bGFuZ2xl 70800
+INC/0LXRgNC10Lw= 70801
+X0NSRUFU 70802
+QmFzaWNhbGx5 70803
+IGF3YWl0cw== 70804
+IENIQVJBQ1RFUg== 70805
+IHZwbg== 70806
+SG9u 70807
+IGV2aXRhcg== 70808
+IFVuZG8= 70809
+UVM= 70810
+IEVkbXVuZA== 70811
+IG1pcmFjbGVz 70812
+IFRpbWluZw== 70813
+IFZlbmV6dWVs 70814
+LlNxcnQ= 70815
+b2lkYWw= 70816
+IGVycnM= 70817
+LS0tLS0tLS0KCg== 70818
+IERFQ0xBUkU= 70819
+IHZpZ29yb3Vz 70820
+YXJnb24= 70821
+IGFnZ3JlZ2F0ZWQ= 70822
+IFNoYXJrcw== 70823
+IEN5cnVz 70824
+IHJlcHLDqXM= 70825
+bWF0Y2hlcg== 70826
+IGd1aUFjdGl2ZQ== 70827
+PyIpCg== 70828
+IEpOSQ== 70829
+LmNoYXJzZXQ= 70830
+J3w= 70831
+IGdvYXRz 70832
+aW5kcmU= 70833
+LmdldERheQ== 70834
+IHBhcnNlcw== 70835
+IElocmVu 70836
+X18uJy8= 70837
+aWxlZ2Vz 70838
+bmF2aWdhdGU= 70839
+IEJ1ZmZ5 70840
+UEhQVW5pdA== 70841
+IG1hc3Nh 70842
+YWx0YXI= 70843
+JyldLAo= 70844
+IG92ZXJzZWVz 70845
+IHt9DQoNCg== 70846
+IFdMQU4= 70847
+Y2xpcGJvYXJk 70848
+X0luc3RhbmNl 70849
+IGdsYWRseQ== 70850
+KHNlcmllcw== 70851
+IHZhZA== 70852
+IGdldFBhZ2U= 70853
+W29m 70854
+LkludGVydmFs 70855
+aW51cw== 70856
+Y2hhckF0 70857
+b2xlbQ== 70858
+YWludGluZw== 70859
+LkFG 70860
+X21pbm9y 70861
+X0lM 70862
+O3k= 70863
+IFRlbGVjb20= 70864
+IFBvbmQ= 70865
+IG1tYXA= 70866
+L14= 70867
+IFlhaw== 70868
+IFJhYmJp 70869
+ZW5vcw== 70870
+CUNvbnRleHQ= 70871
+LnZlYw== 70872
+KEF0dHJpYnV0ZQ== 70873
+IGNhdGVnb3JpemVk 70874
+IGRpYWJldGlj 70875
+KHJhbms= 70876
+IHBhw61zZXM= 70877
+IEAiIjsK 70878
+IGppa2E= 70879
+YXJzaXR5 70880
+IC8o 70881
+LkhlbHA= 70882
+LWJhbm5lcg== 70883
+IEJ5cm9u 70884
+IHVucmVhbGlzdGlj 70885
+IHxf 70886
+IFN0b3B3YXRjaA== 70887
+IGV4ZW1wdGlvbnM= 70888
+L2NhcmRz 70889
+IHRvc3RyaW5n 70890
+bmdpbmU= 70891
+IHNwcmF3bGluZw== 70892
+IGx0ZA== 70893
+IFVuZGVyc3RhbmQ= 70894
+INGC0LXQutGB0YI= 70895
+ZXdpdG5lc3M= 70896
+IGNhbGxCYWNr 70897
+LVllYXI= 70898
+RnVlbA== 70899
+PSo= 70900
+IGludmVudG9y 70901
+IGJlc3RzZWxsaW5n 70902
+IGhhcmRuZXNz 70903
+IFR1cw== 70904
+IGtleW5vdGU= 70905
+IGJlYXU= 70906
+X2Fib3J0 70907
+IHByb3Bvcg== 70908
+IGNvbWVyYw== 70909
+X1JFRkVS 70910
+UGFz 70911
+aGF2ZW4= 70912
+LWZpeA== 70913
+Q2Fub25pY2Fs 70914
+IGxvb2tvdXQ= 70915
+RXhwbG9yZXI= 70916
+IGNlcmNv 70917
+KHNlbnNvcg== 70918
+IEpzb25TZXJpYWxpemVy 70919
+IHZva3Nlbg== 70920
+IGJyaWdodGVzdA== 70921
+IHN0YWJiaW5n 70922
+LkJl 70923
+LmFkZFByb3BlcnR5 70924
+IEh1bXBo 70925
+IGlzQXV0aGVudGljYXRlZA== 70926
+5rKh 70927
+IHBvcmVz 70928
+IGplZ28= 70929
+IFNob3dpbmc= 70930
+ID8+Ij4NCg== 70931
+X0NPU1Q= 70932
+aWxpbmVhcg== 70933
+IFdvcmtzcGFjZQ== 70934
+IHNwZWw= 70935
+YWdvZ3Vl 70936
+IE1pbGxlbm5pdW0= 70937
+IFBvcHVsYXRl 70938
+IG5pZA== 70939
+LnBhcnNlQ29sb3I= 70940
+U29sYXI= 70941
+IEdhZA== 70942
+IOykkQ== 70943
+IEthbXA= 70944
+CXJt 70945
+IGJlbno= 70946
+IEhvbmVzdGx5 70947
+IGVsZWN0cm9kZQ== 70948
+IFByYWlyaWU= 70949
+IFBST0ZJTEU= 70950
+IE9yaWVudGFs 70951
+IE9MRUQ= 70952
+L2NvcHlsZWZ0 70953
+YXdhaWk= 70954
+KHByb2R1Y3Rz 70955
+KVw8 70956
+LWNyZWF0ZWQ= 70957
+Lk1hbnlUb01hbnk= 70958
+Ikhvdw== 70959
+INCy0YvQvw== 70960
+IG1pdG9jaG9uZHJpYWw= 70961
+X3Rlc3Rpbmc= 70962
+KGNyZWF0ZWQ= 70963
+IGdldEZpZWxk 70964
+X0VWQUw= 70965
+XS4i 70966
+IEZTTQ== 70967
+IFJpdGE= 70968
+IOWPguaVsA== 70969
+IGPDtHQ= 70970
+IEluc2lnaHQ= 70971
+CW15c3FsaQ== 70972
+X3RpbWluZw== 70973
+SURP 70974
+KSkpKSkK 70975
+Q09WRVJZ 70976
+LmltYWc= 70977
+Q0RG 70978
+bHVzdA== 70979
+aWNrdA== 70980
+X0ZQ 70981
+LicsJw== 70982
+Z2Nj 70983
+IGt1cno= 70984
+X3B3bQ== 70985
+IG9kcG93aWVk 70986
+IEJhcnJpZXI= 70987
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKgo= 70988
+cGFr 70989
+LUlzcmFlbA== 70990
+IFJ1dGdlcnM= 70991
+IHNlbGVjdGVkSXRlbQ== 70992
+IFJhbWlyZXo= 70993
+RmFybQ== 70994
+IGNhbGVuZGFycw== 70995
+Z3ppcA== 70996
+IGJsb2NrYnVzdGVy 70997
+IFBseW1vdXRo 70998
+55yM 70999
+cmVzcG9uc2Vz 71000
+LkRpYWxvZ0ludGVyZmFjZQ== 71001
+LWdyYW5k 71002
+IGdldFNvdXJjZQ== 71003
+IGRlanRpbmdz 71004
+IHRpZXRlbg== 71005
+IGNvbmRlbW5hdGlvbg== 71006
+IGNvbnRpbnVhcg== 71007
+Lk1vY2tNdmM= 71008
+L2VuZ2xpc2g= 71009
+IE1lZGlhUGxheWVy 71010
+Y29tcHV0ZWQ= 71011
+IENsaXBwZXJz 71012
+KGRlbGVnYXRl 71013
+LlNsZg== 71014
+IOuhnA== 71015
+IFRpZGU= 71016
+IGlocmVt 71017
+IFdhbg== 71018
+0YPRjtGJ 71019
+fT48 71020
+RGlzY3Vzc2lvbg== 71021
+IHdhdHRz 71022
+LW1pbnVz 71023
+IEp1bGlldA== 71024
+6ZuF 71025
+IGNvbmNsdWRpbmc= 71026
+YW5kc2NhcGU= 71027
+IMO6bHRpbWE= 71028
+IERFUlA= 71029
+IHNpZ25VcA== 71030
+IFNlY29uZGx5 71031
+V0FJVA== 71032
+bGRz 71033
+LmNhbGxiYWNrcw== 71034
+KGhvdXI= 71035
+aW1hdG9ycw== 71036
+dm9sZW50 71037
+QUFG 71038
+ZWRyaXZlcg== 71039
+IE1hdGhlbWF0aWM= 71040
+PFR1cGxl 71041
+IC8+Jw== 71042
+e2o= 71043
+X0FCT1JU 71044
+RXRoZXI= 71045
+IGVkdWNhdG9y 71046
+IHByZWNhdXRpb24= 71047
+IGZpbmdlcnRpcHM= 71048
+Z2V0VmFy 71049
+Y2FtYXRhbg== 71050
+LWRlYnVn 71051
+IFJBRg== 71052
+W2FyZw== 71053
+IHJhY2Vk 71054
+IHRzdW5hbWk= 71055
+LmZsaW5r 71056
+IGdseWM= 71057
+dWtv 71058
+IE11bHRpcGx5 71059
+IHJlZGlzdHJpYnV0aW9u 71060
+QUdP 71061
+IFJvdXRpbmU= 71062
+IG9wcg== 71063
+KGxvd2Vy 71064
+IEZ1bmt0aW9u 71065
+LmRr 71066
+IGVndA== 71067
+X0JBU0lD 71068
+c3lzY2FsbA== 71069
+IExTRA== 71070
+IER1cGxpY2F0ZQ== 71071
+X3NlbGw= 71072
+IGVycm9ySGFuZGxlcg== 71073
+X2lwcw== 71074
+IGVydg== 71075
+YW5uaWU= 71076
+KHJlc291cmNlTmFtZQ== 71077
+IGJvdHRsZWQ= 71078
+IGNyYXdsaW5n 71079
+ZWdtZW50 71080
+LnNldFRhZw== 71081
+IHJzcw== 71082
+IFF1YXJyeQ== 71083
+X2V4YWN0 71084
+Lmp3dA== 71085
+IEJvYXJkcw== 71086
+b3Bp 71087
+IG5hc2Fs 71088
+IFhZWg== 71089
+LnVk 71090
+Tm9ydGhlcm4= 71091
+IGFjdGl2YXRpbmc= 71092
+ZWR4 71093
+b3ZhaA== 71094
+IGluZHg= 71095
+QWxlcnREaWFsb2c= 71096
+IHRpZW5lcw== 71097
+YW5ueWE= 71098
+X3Bhbg== 71099
+KGRlY2ltYWw= 71100
+LkRpY3Q= 71101
+IHN1YnNpZGlhcmllcw== 71102
+UHJvZHVjdE5hbWU= 71103
+RmV3 71104
+ZGF0bw== 71105
+b2RpZWQ= 71106
+LXVuZGVy 71107
+IOqygw== 71108
+54mI5pys 71109
+YXRpc20= 71110
+W01hdGg= 71111
+Lic8 71112
+KGluZmlsZQ== 71113
+IGRlbm90ZXM= 71114
+JGNsYXNz 71115
+X1NFQ1VSSVRZ 71116
+IHNld2FnZQ== 71117
+bWVsb24= 71118
+KENoYXJhY3Rlcg== 71119
+L2dpdGh1Yg== 71120
+IGdsYXJpbmc= 71121
+Lkd1aWQ= 71122
+X3NwYXJzZQ== 71123
+IE1hcmdpbg== 71124
+X2Rucw== 71125
+IG1laW5lcg== 71126
+IGxlZnRpc3Q= 71127
+CWxvYw== 71128
+YWJ5dGVz 71129
+IGVxdWlwbWVudHM= 71130
+ZXhwbw== 71131
+IFNvbWVyc2V0 71132
+RUs= 71133
+5o2i 71134
+IGxlY3R1cmVy 71135
+IG1lbWlsaWtp 71136
+5qC4 71137
+57Sg 71138
+cHJvbg== 71139
+OnBvaW50ZXI= 71140
+Ym9ycm93 71141
+IFByb3RlY3RpdmU= 71142
+X2Nm 71143
+INCV0YHQu9C4 71144
+YnBw 71145
+JzsKCgoK 71146
+YXR1cmFsbHk= 71147
+X05BVg== 71148
+IHBlcHRpZGU= 71149
+PmQ= 71150
+IGlmc3RyZWFt 71151
+X0ZBQ1RPUlk= 71152
+Jyk7Ly8= 71153
+am9pbmVk 71154
+bW9uZw== 71155
+IHRpbWVzcGVj 71156
+IGRlc3RhYmls 71157
+IGF1dG9w 71158
+LWxpbWl0 71159
+cHVibGljYXRpb24= 71160
+IERlbm4= 71161
+Lk1lbW9yeQ== 71162
+KHNrYg== 71163
+IEFuYWhlaW0= 71164
+X1JFVFVSTlRSQU5TRkVS 71165
+b3VldXI= 71166
+KF8oJw== 71167
+bGVndA== 71168
+aXN0aW5ndQ== 71169
+CXByaXY= 71170
+IHJlZGlyZWN0cw== 71171
+TXQ= 71172
+IGFsbGVlbg== 71173
+IFBvaW50Rg== 71174
+IG9taW4= 71175
+IGNpdHQ= 71176
+IFRhZ2U= 71177
+IFdhbGxz 71178
+4buJ 71179
+IG9jY3VweWluZw== 71180
+eEJG 71181
+cmFuZ2xl 71182
+IHJlbGF0aW9uYWw= 71183
+LW9yZw== 71184
+IGpwZw== 71185
+LWRlcml2ZWQ= 71186
+IG1hbGZ1bmN0aW9u 71187
+IEJlbnNvbg== 71188
+KHNjcm9sbA== 71189
+IFhE 71190
+SG9seQ== 71191
+KGNvbW1hbmRz 71192
+IHRpcHBpbmc= 71193
+IHByaW1pdGl2ZXM= 71194
+IHNleGxl 71195
+Q2FsbENoZWNr 71196
+IE1BU1RFUg== 71197
+X1RFQU0= 71198
+LnNldFJlcXVlc3RIZWFkZXI= 71199
+X3NwZWNz 71200
+IHNlcmdl 71201
+Lk1hc3Rlcg== 71202
+IGltcw== 71203
+LlNwcmluZ0Jvb3RUZXN0 71204
+cGF5cGFs 71205
+IFdBTlQ= 71206
+Lkluc3Q= 71207
+IENhcnBldA== 71208
+IHdyb25nbHk= 71209
+KCQoJy4= 71210
+IGJpbGQ= 71211
+LlJvbGw= 71212
+IFVyYg== 71213
+LWNhbg== 71214
+44GP44Gg44GV44GE 71215
+b2xpYmVyYWw= 71216
+PCEtLTw= 71217
+4oCUZm9y 71218
+IG5lZ2F0ZQ== 71219
+KG5vcm0= 71220
+YWVj 71221
+X3NhbGFyeQ== 71222
+cGxhaW50ZXh0 71223
+b2Rlc2s= 71224
+IEJvc2No 71225
+U2NpZW50aXN0cw== 71226
+aW5kZXhlcw== 71227
+IG1weg== 71228
+IGdyb3VuZHdhdGVy 71229
+fX0pOwo= 71230
+0LDQu9C40Lc= 71231
+IGVybw== 71232
+IHByZXNjcmliZQ== 71233
+IEV4dHI= 71234
+PEFycmF5TGlzdA== 71235
+IGF0cm9jaXRpZXM= 71236
+QXJlYXM= 71237
+IFRJbnQ= 71238
+KHBsYXllcnM= 71239
+IGRhdGFi 71240
+IHd5bQ== 71241
+44Gb 71242
+IGR1YXM= 71243
+X3Bvc3NpYmxl 71244
+IGluc3RydWN0aW9uYWw= 71245
+aXRpb25lcg== 71246
+L2F1ZGlv 71247
+ICAgICAgICAgICAgICAgIAoK 71248
+c3RvcmVk 71249
+T01QSQ== 71250
+IGFwcHJlbnRpY2Vz 71251
+VGVuYW50 71252
+IENvdXQ= 71253
+IGNvbnRyYWNlcHRpb24= 71254
+TG9hbg== 71255
+X3Zpc2liaWxpdHk= 71256
+J3x8 71257
+LlBhcnNlRXhjZXB0aW9u 71258
+IGNvaW5jaWRl 71259
+LmdldFdpbmRvdw== 71260
+IE1hcnRpYWw= 71261
+X3Rscw== 71262
+L2Jvb2tz 71263
+IG91dHJhZ2Vk 71264
+ICh+KA== 71265
+c3Ryc3Ry 71266
+IEJveGVz 71267
+6YO9 71268
+44Ol 71269
+Uk9J 71270
+RnVuY3Rpb25hbA== 71271
+IFByb2Q= 71272
+PFRlc3Q= 71273
+IHZpZGVvdA== 71274
+IGFtb3Jl 71275
+YWJicg== 71276
+IE1vbnVtZW50 71277
+IHJlaW5mb3JjZW1lbnQ= 71278
+IENvY29udXQ= 71279
+LnNlbmRTdGF0dXM= 71280
+Lmtl 71281
+IExlYXA= 71282
+X2FydGljbGVz 71283
+UGll 71284
+IElydmluZQ== 71285
+QUJDREVGR0hJ 71286
+IEV4cGxhbmF0aW9u 71287
+Z3JvdXBCeQ== 71288
+IG92ZXJoZQ== 71289
+IGFuw6Fs 71290
+IGNsYXNzaWZpZXJz 71291
+IE1peGVy 71292
+L2NvbG9ycw== 71293
+IFVzZXJEYXRh 71294
+X0FSUk9X 71295
+X3ZsYW4= 71296
+LkNyZWF0ZURpcmVjdG9yeQ== 71297
+IEhhaw== 71298
+IEJvbmVz 71299
+IEFwaVJlc3BvbnNl 71300
+IE1vb2R5 71301
+REFD 71302
+Z2V0Yw== 71303
+6LaF 71304
+LkZpcmU= 71305
+6aM= 71306
+IGhpdHRlcg== 71307
+ZnJlc2g= 71308
+4LmB 71309
+IENoaWxkaG9vZA== 71310
+eG9y 71311
+LWh0dHA= 71312
+IE1PUg== 71313
+LnNlbmRLZXlz 71314
+X3NoYXBlcw== 71315
+IFVwcw== 71316
+IEFycmVzdA== 71317
+YXp6aQ== 71318
+X29wY29kZQ== 71319
+Lk5vbWJyZQ== 71320
+IHByw7Nw 71321
+IHp4 71322
+IHRyZW1lbmRvdXNseQ== 71323
+U3BhY2Vz 71324
+ZWNj 71325
+IHZlbHZldA== 71326
+IG1lbW9yaWE= 71327
+IExBUA== 71328
+LkRyYXdMaW5l 71329
+IHRhcmdldFR5cGU= 71330
+cmVzdHJpY3Rpb24= 71331
+IERSVg== 71332
+W3RvcA== 71333
+IeKAmQ== 71334
+L2NoYXQ= 71335
+IHNvbmlj 71336
+VG9yb250bw== 71337
+b3dp 71338
+LmRvY3M= 71339
+IEluaXRpYWxpc2U= 71340
+IDwh 71341
+LnRibA== 71342
+LlByZXBhcmVkU3RhdGVtZW50 71343
+L2RvbQ== 71344
+LnJvdA== 71345
+X1BST00= 71346
+S2VlcGluZw== 71347
+IGhhcmdh 71348
+IGpvcm4= 71349
+IGlkZW50aWZpYWJsZQ== 71350
+W2lw 71351
+UGluaw== 71352
+X0hlYWRlcg== 71353
+w5E= 71354
+YWRsZQ== 71355
+572R57uc 71356
+c2VxdWVudA== 71357
+QWN0aXZhdGVk 71358
+dG1wbA== 71359
+IFBhbGw= 71360
+IGZhdGFsbHk= 71361
+fX0pCg== 71362
+UG9wb3Zlcg== 71363
+IE1jTGFyZW4= 71364
+Q2hhbmdlZEV2ZW50QXJncw== 71365
+IEZvcm1hdGlvbg== 71366
+TmFt 71367
+bmV3c2xldHRlcg== 71368
+LmZyb21TdHJpbmc= 71369
+X2ltbQ== 71370
+QVBQRUQ= 71371
+LG5vZGU= 71372
+KGRldA== 71373
+IHBhcmFsbGVscw== 71374
+IGxhc2Vycw== 71375
+IGNob2NvbA== 71376
+L3BvcnQ= 71377
+YWZmZW4= 71378
+KGRldGFpbHM= 71379
+IHJlcGxpY2F0ZWQ= 71380
+QXNTdHJlYW0= 71381
+YXJtYWM= 71382
+XV09 71383
+YWxhY2g= 71384
+X3Nlc3Npb25z 71385
+QWxnb3JpdGhtRXhjZXB0aW9u 71386
+IHZlcmJvc2l0eQ== 71387
+LkNvbHVtblN0eWxlcw== 71388
+KFVTRVI= 71389
+IHNsZWVwcw== 71390
+IGFxdWF0aWM= 71391
+X2J1bGs= 71392
+PScuLw== 71393
+b3VybsOpZQ== 71394
+IE1TRA== 71395
+IEJsb2M= 71396
+IEdsZQ== 71397
+IHJlcHJlc3Npb24= 71398
+IGVudG9uY2Vz 71399
+CQkgICAgICAgICAgICAgICAgICAg 71400
+WU5D 71401
+LkFsbG93R2V0 71402
+IHR1cnRsZXM= 71403
+ICd+Lw== 71404
+ZXNzb24= 71405
+IERJRQ== 71406
+IEFxdWE= 71407
+IFNFUQ== 71408
+Ozs7Ozs7Ozs7Ozs7Ozs7Ow== 71409
+LnB1dHM= 71410
+IE1BSw== 71411
+KEN1c3RvbWVy 71412
+IGRlc3NlcnRz 71413
+IGVtYmVsbA== 71414
+IHRheGVk 71415
+5bqX 71416
+IHNjaGw= 71417
+cmVzY28= 71418
+IEZyb2c= 71419
+IFBlbmRpbmdJbnRlbnQ= 71420
+X0xvY2Fs 71421
+L3NlY3VyaXR5 71422
+IFJveA== 71423
+IHNwb2lsZWQ= 71424
+X1dJTkRPV1M= 71425
+SmVubmlmZXI= 71426
+IGRhdGk= 71427
+VW5sb2Fk 71428
+LmdyaWR4 71429
+KHN0YWdl 71430
+4buX 71431
+U3FsQ29tbWFuZA== 71432
+Lm14 71433
+IGJsaXR6 71434
+IEZvcnRyZXNz 71435
+IEJyb3dzZXJBbmltYXRpb25zTW9kdWxl 71436
+d2luZQ== 71437
+TlNF 71438
+LXJhbmtpbmc= 71439
+eXJl 71440
+IGxpbmthZ2U= 71441
+w6Fr 71442
+kZw= 71443
+YXRzYXBw 71444
+IEN5Y2w= 71445
+IGVjb2xvZ3k= 71446
+IGJsYXRhbnQ= 71447
+IFBlcmY= 71448
+IFhpYW9taQ== 71449
+IERvcnRtdW5k 71450
+cmVzdWx0U2V0 71451
+IGdpw6A= 71452
+IGZhdWNldA== 71453
+IERhbHRvbg== 71454
+IGZyZWVz 71455
+QlVGRg== 71456
+LnBhcmFsbGVs 71457
+IEFzdHJvcw== 71458
+IFZFQ1RPUg== 71459
+IHN0YW5kb3V0 71460
+w7Ntbw== 71461
+IGZyYW1lYm9yZGVy 71462
+X1BBUkFNRVRFUlM= 71463
+IEZhbGs= 71464
+IERpZ2l0 71465
+IGVsZWN0csOzbmljbw== 71466
+IHZlcnI= 71467
+VUlBbGVydFZpZXc= 71468
+KFNxbA== 71469
+LUlORg== 71470
+IikpKTs= 71471
+JycK 71472
+KEVGRkVDVA== 71473
+IFp1bQ== 71474
+X0RQ 71475
+KV07DQo= 71476
+IGFudGVubg== 71477
+IGFiYnJldmlhdGlvbg== 71478
+IHNlaXNtaWM= 71479
+X1RSQU5TTA== 71480
+tZw= 71481
+Lk1pbGxpc2Vjb25k 71482
+LGxhdA== 71483
+IEFuY2g= 71484
+X01vZA== 71485
+QWxyaWdodA== 71486
+ZGRh 71487
+IMKl 71488
+VU5ETEU= 71489
+INC30LDQsw== 71490
+IHN1bGZ1cg== 71491
+IFNpdGg= 71492
+IE5pbWJ1cw== 71493
+IEV4YW1pbmF0aW9u 71494
+X3dpZmk= 71495
+fWApOwoK 71496
+IHNlbnNhdGlvbnM= 71497
+YWZz 71498
+X0NMUg== 71499
+IGluZmluaXRlbHk= 71500
+IHN5c3TDqG1l 71501
+X2ZvbnRz 71502
+SW1wYWN0 71503
+UG93ZXJlZA== 71504
+IDw9Pg== 71505
+X25lZWQ= 71506
+REVDUkVG 71507
+IC8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8v 71508
+IFJlcG8= 71509
+Z2V0U2VydmljZQ== 71510
+JG4= 71511
+X3BjdA== 71512
+RXJyZXVy 71513
+IE5HT3M= 71514
+ICoKCgo= 71515
+LmF0YW4= 71516
+X1RNUA== 71517
+IGNvbGxhcHNpbmc= 71518
+IHNobw== 71519
+X1BDSQ== 71520
+Lm9wZXI= 71521
+KGFkag== 71522
+IGdpb3Y= 71523
+Piku 71524
+IGluY29udHJv 71525
+YXJkYQ== 71526
+IGFwZXg= 71527
+IG1lZGlkYQ== 71528
+IFNoZWlraA== 71529
+IEFybWVuaWE= 71530
+YXNzb2NpYXRl 71531
+LXdvdw== 71532
+IFR1cm5pbmc= 71533
+IEZyZXVk 71534
+IEZvb2w= 71535
+IExEUw== 71536
+LS0tLS0tLQoK 71537
+b2xzb24= 71538
+LkZJTEU= 71539
+X2RldGVjdG9y 71540
+RG9taW4= 71541
+IGRlcGxveW1lbnRz 71542
+IGZhcmV3ZWxs 71543
+KGJpbmQ= 71544
+IG5vdmljZQ== 71545
+dGRvd24= 71546
+IGdldEVsZW1lbnQ= 71547
+IHZlbGl0 71548
+YXN0aGFu 71549
+CWNoYW5uZWw= 71550
+X0ZSQU1FQlVGRkVS 71551
+LnRyYWlsaW5n 71552
+LnNldEVkaXRhYmxl 71553
+Oyw= 71554
+IElERg== 71555
+X1BC 71556
+Z2V0TGFzdA== 71557
+IENvYXN0YWw= 71558
+IEhhbmR5 71559
+bGluZ2Vy 71560
+44Gn44KC 71561
+UGVyc2lzdGVuY2U= 71562
+LmdldFNlcnZpY2U= 71563
+INC+0Lo= 71564
+IG5vdHdpdGhzdGFuZGluZw== 71565
+KFBS 71566
+VU1C 71567
+J10pKXsNCg== 71568
+ZW1icmFuY2U= 71569
+ZXhjZXJwdA== 71570
+YXF1 71571
+X2Jsb2M= 71572
+IFByb3Zpc2lvbg== 71573
+IE1jRG9u 71574
+IEdvbGRiZXJn 71575
+IGNvbXBvbmVudFdpbGxVbm1vdW50 71576
+IGJhc2VQYXRo 71577
+LWZpcmVk 71578
+IGZvbGxhbmRv 71579
+IFRpbGVz 71580
+QGVuZGZvcmVhY2g= 71581
+RU5DSUw= 71582
+IEJveGluZw== 71583
+aXF1ZXI= 71584
+QWNoaWU= 71585
+RW51bXM= 71586
+QmFzZVVybA== 71587
+KHNjYW4= 71588
+IFBhc3NpdmU= 71589
+YWJlbGxh 71590
+L3Nu 71591
+Lm51bWVyaWNVcERvd24= 71592
+IHZlcm4= 71593
+bG9jYWxpemVk 71594
+IE1peg== 71595
+IHJlc3VsdExpc3Q= 71596
+L3Z1ZQ== 71597
+RVJWSUNF 71598
+Lm9k 71599
+IGxpZ24= 71600
+IFN0cmluZ1Rva2VuaXplcg== 71601
+IHRyYWc= 71602
+QWNjb3JkaW9u 71603
+IG5vcmVmZXJyZXI= 71604
+bXNjb3JsaWI= 71605
+w6F0aXM= 71606
+Ynl0ZXI= 71607
+IHNob3dkb3du 71608
+IHNlbWFpbmU= 71609
+IC0tPg0KDQo= 71610
+IE1haG0= 71611
+fSI7Cgo= 71612
+IGRx 71613
+IFB1Ymxpc2hlcnM= 71614
+IEFtcGw= 71615
+IERhbmllbGxl 71616
+IHRlcm4= 71617
+6LW3 71618
+bm/Fm8SH 71619
+ZWlu 71620
+IEFzeW5jU3RvcmFnZQ== 71621
+dW5nZXI= 71622
+cm91dw== 71623
+IHNjaXNzb3Jz 71624
+L2Fzc2VydA== 71625
+LmJ1Y2tldA== 71626
+L2FyY2hpdmU= 71627
+X01hbg== 71628
+IGludG9sZXI= 71629
+ICgpPT4= 71630
+INCS0Ys= 71631
+IHNhaQ== 71632
+Lnh5 71633
+LiINCg== 71634
+IHVyaW5hcnk= 71635
+ZXN1Yg== 71636
+SVNUSUNT 71637
+IM66 71638
+IGNvbXBsaW1lbnRz 71639
+IHR5cGluZ3NKYXBnb2xseQ== 71640
+aWhhcg== 71641
+RXhwYW5zaW9u 71642
+IFNlcnZpbmc= 71643
+X3N0dWRlbnRz 71644
+IFhCT09MRQ== 71645
+KGls 71646
+IOyymA== 71647
+IGrDsw== 71648
+KHRvbA== 71649
+KEpT 71650
+CUNH 71651
+IERSQVc= 71652
+dHdpZw== 71653
+IG9hdA== 71654
+X3Ntb290aA== 71655
+IENTTA== 71656
+IG9zb2I= 71657
+IGVuc3Vpbmc= 71658
+IGJhbmtlcg== 71659
+IEJhY2twYWNr 71660
+X3Bpbmc= 71661
+IHdpc2hsaXN0 71662
+PWF4 71663
+CSAgIAo= 71664
+RGlzbmV5 71665
+c3RlYWR5 71666
+Ij4l 71667
+IHByb3BoZXRz 71668
+IFpY 71669
+IG1pbmltYWxpc3Q= 71670
+LlBMQUlO 71671
+U2VhdHRsZQ== 71672
+Lm9yZGluYWw= 71673
+IFBJUEU= 71674
+IHJldG9ybmE= 71675
+IGp1Z2Fkb3I= 71676
+IEJyZXQ= 71677
+IOKUnA== 71678
+IHBsdXNo 71679
+VUxBVE9S 71680
+U29ydGluZw== 71681
+LmdyaWR5 71682
+ZWN0b215 71683
+X2FjdGl2 71684
+cmFjaw== 71685
+SW50ZXJhY3RpdmU= 71686
+IEFudGFyY3RpY2E= 71687
+IHZlbmdlYW5jZQ== 71688
+ZW5zbw== 71689
+X2tub3du 71690
+dXBwbGllcg== 71691
+Lk1vZHVsZXM= 71692
+IENvbm5lY3Rpb25TdGF0ZQ== 71693
+6ZqQ6JeP 71694
+QEZpbmRCeQ== 71695
+IHBsYWNlcg== 71696
+XG1vZGVs 71697
+PCgpPg== 71698
+LmlzU3VjY2Vzc2Z1bA== 71699
+LWdvb2Q= 71700
+Yno= 71701
+IERyYWNv 71702
+QXNzaXN0YW50 71703
+LWV4dHJh 71704
+0LDQsdC70LjRhg== 71705
+IGh5cG9jcmlzeQ== 71706
+IHRzdA== 71707
+IEFncg== 71708
+JHR4dA== 71709
+IGxvZ2lzdGlj 71710
+bGljZW5zZWQ= 71711
+IEhvZg== 71712
+IHRhdA== 71713
+KGl2 71714
+IGludG94aWM= 71715
+cG9zdElk 71716
+X3N0cmlrZQ== 71717
+IGh1bWlsaWF0aW9u 71718
+cGNvZGVz 71719
+InN5bmM= 71720
+KHJlY2lwZQ== 71721
+K04= 71722
+cmVudGU= 71723
+CUNsaWVudA== 71724
+eWNvcGc= 71725
+IFp1cmljaA== 71726
+IFByb2ZpbGVz 71727
+Q291bnRyaWVz 71728
+IHBpY3Q= 71729
+IHJvbGxvdXQ= 71730
+cmVxdWVuY2llcw== 71731
+IHBhdGNoZWQ= 71732
+IGNhcnRyaWRnZXM= 71733
+IHNoYWRpbmc= 71734
+SmFy 71735
+IHNhbHZhZ2U= 71736
+IFRheGVz 71737
+IHN0YW5kYnk= 71738
+YXBvcmFu 71739
+RWlnZW4= 71740
+LmFuZ3VsYXI= 71741
+IE5lc3RlZA== 71742
+5Lqr 71743
+IGlzVmlzaWJsZQ== 71744
+IER3aWdodA== 71745
+X0JSQU5DSA== 71746
+LkRlbGF5 71747
+IGtlbmQ= 71748
+IGZhY2lsaXRhdGVk 71749
+LmZsYXRNYXA= 71750
+IHNhbnRh 71751
+CVNlbmQ= 71752
+L21lc3NhZ2Vz 71753
+IG9mVHlwZQ== 71754
+CXN3YXA= 71755
+I3BsdA== 71756
+IFR1cmtz 71757
+TkVT 71758
+IHByb2dyZXNzaXZlbHk= 71759
+IFJlc2lkZW5jZQ== 71760
+IFRSRUU= 71761
+IG5vZW4= 71762
+ZGlv 71763
+IG5lbGxl 71764
+IHNvZ2Fy 71765
+aXR0aQ== 71766
+d2Vla2x5 71767
+IGFtYmlndWl0eQ== 71768
+X1NldHRpbmdz 71769
+V2FyZQ== 71770
+Lm5lbw== 71771
+X0RTVA== 71772
+IOaWuQ== 71773
+cHJlcA== 71774
+bG9iYnk= 71775
+QGVtYWls 71776
+L21vdmll 71777
+IGZ1bmtj 71778
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgCg== 71779
+wq1z 71780
+IGd1YXJkaWFucw== 71781
+LXBvcw== 71782
+IGNvbmZpZ3VyaW5n 71783
+IENQUw== 71784
+IERldXM= 71785
+IHZpZMOpb3M= 71786
+X2VtcHJlc2E= 71787
+IHNsYXBwZWQ= 71788
+PE1vZGVs 71789
+IHVuZGVyc2NvcmVz 71790
+VWg= 71791
+LmFjY2Vzc1Rva2Vu 71792
+U0VUUw== 71793
+IFNwYXJzZQ== 71794
+IENhbGQ= 71795
+OnBhdGg= 71796
+IFNlcnZlcnM= 71797
+PWJhdGNo 71798
+IGtuaXR0aW5n 71799
+IHhh 71800
+IHNlYXJjaEJhcg== 71801
+IHNuYWc= 71802
+IGluZnVzZWQ= 71803
+LmJhbQ== 71804
+bGV2ZXI= 71805
+IHRheG9ub215 71806
+w44= 71807
+IGF0dGFjaGluZw== 71808
+IGhlcm4= 71809
+X05PUA== 71810
+Q2xpY2thYmxl 71811
+KFBhcnNl 71812
+IER5bmFtbw== 71813
+LWJ1aWxkZXI= 71814
+IGRlcmVn 71815
+IHNjYXR0ZXJpbmc= 71816
+6L+b6KGM 71817
+YW56aQ== 71818
+IFNoZXBhcmQ= 71819
+Ij4nLAo= 71820
+X1hERUNSRUY= 71821
+IEJ1enpGZWVk 71822
+X01BUkdJTg== 71823
+UExPWQ== 71824
+LnNtYWxs 71825
+IG1pbWVUeXBl 71826
+IGhvbG9n 71827
+CWNhbWVyYQ== 71828
+bGlhcw== 71829
+IHN1c3BlbnNl 71830
+b2R5bmFt 71831
+YmF1 71832
+IGdyYXZleWFyZA== 71833
+X25hbWVk 71834
+IjoiJw== 71835
+ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg== 71836
+IGdhbWVPdmVy 71837
+IExFTkdUSA== 71838
+CXNjcmVlbg== 71839
+IGRvSW5CYWNrZ3JvdW5k 71840
+X2RlcGVuZGVuY2llcw== 71841
+IHJ0Yw== 71842
+L3Vw 71843
+X1JPTQ== 71844
+SGFsbA== 71845
+IGRlZmljaWVuY2llcw== 71846
+KHRl 71847
+JyM= 71848
+X2VxdWl2 71849
+IHByZW9yZGVy 71850
+IEF4ZQ== 71851
+0L7QvNGD 71852
+LnNlbmRGaWxl 71853
+IGZpbHQ= 71854
+IExpbWl0cw== 71855
+IENhdmFsaWVycw== 71856
+LmRpc2NvdW50 71857
+4oaQ 71858
+IFdpdA== 71859
+UVJTVFVW 71860
+IGlq 71861
+IHRlZ2Vu 71862
+IDoiLA== 71863
+ZGlmZmljdWx0eQ== 71864
+cHVua3Q= 71865
+IEVtYWlscw== 71866
+Y2hsb3I= 71867
+KGZ1bg== 71868
+LlVpbnQ= 71869
+IFN0YWxs 71870
+X3ZlcmlmaWVk 71871
+dUQ= 71872
+RmlsZVR5cGU= 71873
+IHBsZWFzdXJlcw== 71874
+IGp1ZGljaWFyeQ== 71875
+IHNoYW0= 71876
+aXB1cg== 71877
+X1BMVVM= 71878
+b2ZmZXJz 71879
+KGZvbw== 71880
+X0dU 71881
+CWNvcmU= 71882
+RU5USU9O 71883
+IExpYmVyYXRpb24= 71884
+Q29tbWFuZExpbmU= 71885
+X2RlcGFydG1lbnQ= 71886
+LkFy 71887
+X25laWdoYm9y 71888
+IFN1Ym1pdHRlZA== 71889
+IDwhLS1b 71890
+IGxvY2F0aW5n 71891
+Lk1hcHBlcg== 71892
+X3N0cmVuZ3Ro 71893
+Wy4uLiw= 71894
+IEphbA== 71895
+L2xvYWQ= 71896
+IGJ1ZmZz 71897
+IG1vdG9yaXN0cw== 71898
+CWNz 71899
+YXNjZW5kaW5n 71900
+IFdoYXRzYXBw 71901
+IE5hc3M= 71902
+X0NPTFVNTlM= 71903
+TGVvbg== 71904
+cHBl 71905
+ZWx0YXM= 71906
+IHRqZWplcg== 71907
+X0tFWVdPUkQ= 71908
+cXVhbGlmaWNhdGlvbg== 71909
+aHJh 71910
+IHJpZGljdWxvdXNseQ== 71911
+JGluZm8= 71912
+RkVBVFVSRQ== 71913
+ZG9lc24= 71914
+IEtX 71915
+IEVudW1lcmFibGVTdHJlYW0= 71916
+X01BVA== 71917
+IFN0cmVhbUxhenk= 71918
+IHNjcmF0Y2hpbmc= 71919
+LnRpY2tldA== 71920
+IHNob3J0Y29taW5ncw== 71921
+ZWxsaXBzaXM= 71922
+PWN1cnJlbnQ= 71923
+IGNyZXN0 71924
+IHdob3Jl 71925
+IFBldHJvbGV1bQ== 71926
+Y29udGV4dHM= 71927
+IOat 71928
+LXB5dGhvbg== 71929
+KGpzb25PYmplY3Q= 71930
+IFByaXNt 71931
+IHlhY2h0 71932
+t6g= 71933
+Zmxhc2hkYXRh 71934
+IGxlaWNodA== 71935
+IE1vcnRvbg== 71936
+IHN0ZXJsaW5n 71937
+X2l0cg== 71938
+X3Vk 71939
+RmFjZXM= 71940
+IGhpcmVz 71941
+ZmZh 71942
+Jyx7Cg== 71943
+LWNhbWVyYQ== 71944
+X1JFQVNPTg== 71945
+IEhlbGVuYQ== 71946
+cnVn 71947
+aWdodGx5 71948
+IHBlcm11dGF0aW9ucw== 71949
+IFRvcmFo 71950
+IOaYr+WQpg== 71951
+CXJlY29yZA== 71952
+w4A= 71953
+LmdtYWls 71954
+Rm9ydHVuYXRlbHk= 71955
+KE1vZA== 71956
+T2NjdXJyZW5jZXM= 71957
+IGRlcHJlY2k= 71958
+IHZhZ3VlbHk= 71959
+L1o= 71960
+Vk4= 71961
+LnRw 71962
+X2dlbmVy 71963
+IHs6P30iLA== 71964
+d2FobA== 71965
+SUtF 71966
+IExlZ2lzbGF0aW9u 71967
+IGhpbnRlcg== 71968
+IGFkZWw= 71969
+KGhpZ2g= 71970
+5o+Q5Lqk 71971
+L2RvbWFpbg== 71972
+LnRpbGVz 71973
+IFRpYmV0YW4= 71974
+IFN0ZXJlbw== 71975
+IGZpbGVTaXpl 71976
+Z3J1cG8= 71977
+aWFl 71978
+U0NQ 71979
+IHZvdWNoZXJz 71980
+IFBhbmRvcmE= 71981
+IGRpc21heQ== 71982
+IGzDqWc= 71983
+IEJlaGF2aW9yYWw= 71984
+Y3Jhbg== 71985
+TmVzdGVk 71986
+YWNjb20= 71987
+IE5haA== 71988
+IEJhbHRpYw== 71989
+IERFU1Q= 71990
+IGtpc3Nlcw== 71991
+Vmlu 71992
+IHByb3Zva2U= 71993
+X0NvbnRleHQ= 71994
+IHdlZWtkYXlz 71995
+dXJnZW5jZQ== 71996
+TGlr 71997
+IHBsYXph 71998
+IGJsZXY= 71999
+IHJlYWZm 72000
+X1RpdGxl 72001
+KEd0aw== 72002
+IGNlbGxl 72003
+Iz09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0= 72004
+IEpvb21sYQ== 72005
+Ij4vLw== 72006
+TW9udGhseQ== 72007
+LnRvRG91Ymxl 72008
+KGVudHJpZXM= 72009
+IE5SRg== 72010
+KGdjZg== 72011
+IE1pZGRsZXdhcmU= 72012
+fS17 72013
+X0hJREU= 72014
+IGxvd2Vycw== 72015
+KFNlbGY= 72016
+5Y+R6YCB 72017
+IGlzTG9nZ2VkSW4= 72018
+IGJpb2RpdmVyc2l0eQ== 72019
+IG11c2NoaQ== 72020
+KGNhbmRpZGF0ZQ== 72021
+IEFuc2k= 72022
+CXNt 72023
+L2lt 72024
+Kycp 72025
+Y2Rj 72026
+IGFsZ3VuYQ== 72027
+IHNhY3JpZmljaW5n 72028
+L3ZlbmRvcnM= 72029
+L0FQSQ== 72030
+QWR2ZXJ0aXNpbmc= 72031
+IEdFTkVSQVRFRA== 72032
+IERpc29yZGVycw== 72033
+IFNlcmlhbGl6YXRpb24= 72034
+IHNhdmFnZQ== 72035
+IOm7 72036
+IEluc2lnaHRz 72037
+IHJldm9rZQ== 72038
+IGp1cm9ycw== 72039
+c3VpdA== 72040
+IENhbXBpbmc= 72041
+X3Byb2ZpdA== 72042
+YnVjaA== 72043
+LkFjdGlvbnM= 72044
+IElERUE= 72045
+b2x1bHU= 72046
+TGlrZXM= 72047
+67KI7Zi4 72048
+LkJMTA== 72049
+dsOk 72050
+IGNhcmRp 72051
+IGRpc3Byb3BvcnRpb25hdGVseQ== 72052
+IGluc2FuaXR5 72053
+LmVvZg== 72054
+IFBsYXR6 72055
+LmZpcnN0bmFtZQ== 72056
+IFNsYXNo 72057
+X0NG 72058
+amFuZHJv 72059
+IEdhdWdl 72060
+IFN1bmRlcg== 72061
+IEJ1bm55 72062
+X3Vt 72063
+6IGU57O7 72064
+IGlQaG9uZXM= 72065
+IEJJTw== 72066
+IGtobw== 72067
+eEZB 72068
+IEZyaWVuZHNoaXA= 72069
+IGNhbG1seQ== 72070
+X3Rocg== 72071
+X0FuaW0= 72072
+IHJhaXNvbg== 72073
+L3Jvb3Q= 72074
+LmdldEJ5SWQ= 72075
+IFNhdmFubmFo 72076
+IEludGVycHJldA== 72077
+a2lsbGVy 72078
+CXdn 72079
+XSld 72080
+0YPQtdGC 72081
+S2V5VmFsdWU= 72082
+W0c= 72083
+c3RyZXRjaA== 72084
+LXBsYXlpbmc= 72085
+JTsNCg== 72086
+IHBsYW5r 72087
+IHBlYWNo 72088
+IERlcnJpY2s= 72089
+0LTRgNC10YE= 72090
+IFNoYW0= 72091
+QVBQTElDQVRJT04= 72092
+LnByb2dyZXNzQmFy 72093
+IHRyYW5zaXRpb25pbmc= 72094
+X2RyYWc= 72095
+LlJlcXVlc3RCb2R5 72096
+Lk1vYmlsZQ== 72097
+Sm9uZXM= 72098
+LlBob3Rv 72099
+IGF4bGU= 72100
+enVn 72101
+L29wdGlvbnM= 72102
+XV0pCgo= 72103
+CW5v 72104
+W2hyZWY= 72105
+IGFncmVnYXI= 72106
+IFNlcnZpY2VFeGNlcHRpb24= 72107
+bmluZ2Vu 72108
+RGlmZmljdWx0eQ== 72109
+Qk9PTEVBTg== 72110
+QWRkcw== 72111
+LWhhbmRsZXI= 72112
+IEdhdA== 72113
+IEVib255 72114
+4bqtbg== 72115
+YnJpZ2h0 72116
+IGNvcnBzZXM= 72117
+LkNoZWNrZWRDaGFuZ2Vk 72118
+IG1hdGluZw== 72119
+IEhhcnRmb3Jk 72120
+IHpvdQ== 72121
+IGR1ZGVz 72122
+X2FsZw== 72123
+IEp1bGk= 72124
+b2N1cA== 72125
+INC/0YDQsNCy 72126
+IEthdHk= 72127
+X0ludGVybmFsQXJyYXk= 72128
+LkNvbHVtbkhlYWRlcnNIZWlnaHRTaXplTW9kZQ== 72129
+TWV0aG9kTWFuYWdlcg== 72130
+IFJlZGU= 72131
+IGxpc3RJdGVt 72132
+LkJvdW5kcw== 72133
+IGF2ZW51ZXM= 72134
+IENvZ25pdGl2ZQ== 72135
+RXh0ZW5k 72136
+dGVjaG5pY2Fs 72137
+4oCa 72138
+c25ha2U= 72139
+RnJvbUNsYXNz 72140
+aWxlc3M= 72141
+ID17 72142
+dXJldHRl 72143
+L3RocmVhZA== 72144
+RklFTERT 72145
+SVZJTkc= 72146
+IFBPU0lY 72147
+X2Fr 72148
+IC4uLy4uLy4uLw== 72149
+TXA= 72150
+IGFub255bW91c2x5 72151
+VGFyZ2V0RXhjZXB0aW9u 72152
+YWZmZXI= 72153
+YW55dGhpbmc= 72154
+Imlz 72155
+Z3Jlc28= 72156
+IExhcmE= 72157
+aXphZG9z 72158
+IG1pbmc= 72159
+LnRh 72160
+X3Rocm93 72161
+Umg= 72162
+IHNvbGlkaXR5 72163
+bmFobWU= 72164
+aWNoYWdl 72165
+IG1vdW5k 72166
+b2xpbw== 72167
+YXJ5YQ== 72168
+QVNVUkU= 72169
+IHdvaGw= 72170
+IGZ1cm5pc2hpbmdz 72171
+LnNlY3Rpb25z 72172
+IGFwb2xvZ2llcw== 72173
+YXBpa2V5 72174
+IFNjcmV3 72175
+IFdhcnNhdw== 72176
+L2dyYXBo 72177
+IFNBVEE= 72178
+eXNlcw== 72179
+L2J1dHRvbnM= 72180
+0LXQvdC+ 72181
+VUdIVA== 72182
+IHBvcm5zdGFy 72183
+UGljdHVyZUJveA== 72184
+X1RleHR1cmU= 72185
+IGHDsQ== 72186
+IG5lcmQ= 72187
+LWNvbm5lY3RlZA== 72188
+IG91dHNpZGVycw== 72189
+IG9wZXJhdGl2ZXM= 72190
+YWJibGU= 72191
+L21hbg== 72192
+IHBsZWFk 72193
+XERi 72194
+IENvdmVyZWQ= 72195
+PVM= 72196
+IEZsYW1lcw== 72197
+77+l 72198
+X3RpdGxlcw== 72199
+IHJldHJhY3Q= 72200
+IGNvbGxhYm9yYXRpbmc= 72201
+IGJlaGFuZA== 72202
+LkRhdGFHcmlkVmlld0NvbHVtbkhlYWRlcnNIZWlnaHRTaXplTW9kZQ== 72203
+IGxhYm9yZQ== 72204
+IHRvdGFsUHJpY2U= 72205
+IHNwb2lsZXI= 72206
+IGRpcHBlZA== 72207
+Iikpew0K 72208
+X1NC 72209
+IExlaQ== 72210
+IGluY2x1c28= 72211
+dmVsbA== 72212
+CXBs 72213
+SW5hY3RpdmU= 72214
+IFVTU1I= 72215
+b25kZW4= 72216
+IHJvdXRlZA== 72217
+LnN0cnVjdA== 72218
+4Ks= 72219
+IE1hbGlr 72220
+IEhFWA== 72221
+IEN1c3Q= 72222
+X1BFUkNFTlQ= 72223
+X2VwaXNvZGU= 72224
+5ouJ 72225
+VkVSUw== 72226
+IGNydWlzaW5n 72227
+Qm9va21hcms= 72228
+4oCmCgoKCg== 72229
+Y2hlY2tCb3g= 72230
+b3VmbGFnZQ== 72231
+IG5vbnplcm8= 72232
+IGFwcm94 72233
+IFB1cmR1ZQ== 72234
+Y29vbg== 72235
+bGVncw== 72236
+IExvdHRlcnk= 72237
+U2xm 72238
+SEFW 72239
+Pms= 72240
+PkFu 72241
+IHNsZW5kZXI= 72242
+c2NoZWQ= 72243
+VGVsZWdyYW0= 72244
+Umljaw== 72245
+X1N0cnVjdA== 72246
+X0JD 72247
+IGN1c3RvbWFyeQ== 72248
+IERhbW9u 72249
+dXJjaGFzZWQ= 72250
+IGtvYg== 72251
+IHRpb24= 72252
+KHByb21wdA== 72253
+IGltYg== 72254
+eEND 72255
+CVdlYkVsZW1lbnQ= 72256
+IGhlbW9z 72257
+4Kaw 72258
+IENOQkM= 72259
+IEFMTE9X 72260
+57Gz 72261
+IEVOQw== 72262
+LnNjYWxhdGVzdA== 72263
+IFRCRA== 72264
+Z2V0UmVmZXJlbmNl 72265
+IEltcG9ydGVk 72266
+4Liw 72267
+IGl3 72268
+b2xvbg== 72269
+bWls 72270
+Oi8vJHs= 72271
+Lk1hbmlmZXN0 72272
+IGxo 72273
+IGl0ZW1MaXN0 72274
+X2Fkcw== 72275
+SW5zcGVjdGFibGU= 72276
+IFRvbGVkbw== 72277
+IERpc2FzdGVy 72278
+VXBkYXRlZEF0 72279
+KScpLA== 72280
+IFBBTg== 72281
+RmlsZUNob29zZXI= 72282
+IHl1YW4= 72283
+aXRt 72284
+INC10LPQvg== 72285
+IElibg== 72286
+SGF0 72287
+X3Vsb25n 72288
+YXBs 72289
+IFVydWd1YXk= 72290
+w6lueQ== 72291
+IENyYWlnc2xpc3Q= 72292
+ZG9jaA== 72293
+IGJpbGU= 72294
+IHByb2R1a3Q= 72295
+IGVsZWN0cm9seQ== 72296
+LkNvdXJzZQ== 72297
+IG1x 72298
+dW5jdHVhdGlvbg== 72299
+LyoqKioqKioqKioqKioqKio= 72300
+dWp1 72301
+TU1NTQ== 72302
+X0xFRw== 72303
+IG5ldXRyb24= 72304
+IHBsdXJhbGl0eQ== 72305
+ICsrJA== 72306
+Zm91bmRhdGlvbg== 72307
+LkNvbHVtblN0eWxl 72308
+IEhvb3Zlcg== 72309
+LkFDVA== 72310
+IEJyYXo= 72311
+bGVzc29ucw== 72312
+ZsO8aHI= 72313
+4KSC 72314
+IENsYXNzaWNz 72315
+cmFpZw== 72316
+IG1o 72317
+IGtldHRsZQ== 72318
+U3RyaWtl 72319
+ZXJkYWxl 72320
+RU5UQQ== 72321
+IFRhYmxlQ29sdW1u 72322
+IFNoYWtl 72323
+IFdG 72324
+IExpY2Vuc2luZw== 72325
+dWHDp8Ojbw== 72326
+IHNlY2FyYQ== 72327
+IG5ld1ZhbA== 72328
+U2VsZWNjaW9u 72329
+UHJlZmFi 72330
+ZmlnaHRlcg== 72331
+TGF1bmNoaW5n 72332
+JyI7DQo= 72333
+Lmxvbg== 72334
+LnV0Y25vdw== 72335
+IEh1bmRyZWRz 72336
+ZXN0ZWFk 72337
+IE92ZXJ3YXRjaA== 72338
+X0FGVEVS 72339
+IHJlbW5hbnRz 72340
+KS5c 72341
+IGxvYmJ5aXN0cw== 72342
+IHVuaW50ZW5kZWQ= 72343
+IOuQ 72344
+eXN6 72345
+IGxpYnJvcw== 72346
+LXBhZ2Vz 72347
+SU5URVJGQUNF 72348
+IGRldGVybWluaXN0aWM= 72349
+IFVOSVFVRQ== 72350
+IGV0dMOk 72351
+U2luZ2xlTm9kZQ== 72352
+CQkJCQkJCQ0K 72353
+LXN0YXQ= 72354
+IGhhc2hpbmc= 72355
+L2FjY2Vzcw== 72356
+dGVsbA== 72357
+CXVzZXJuYW1l 72358
+IERhdG9z 72359
+Qml0Q29udmVydGVy 72360
+Omhvc3Q= 72361
+IGFsdGVybmF0aW5n 72362
+IOKAi+KAiw== 72363
+IHdhdmVmb3Jt 72364
+PEVsZW1lbnQ= 72365
+IENhbnRvbg== 72366
+IGRlc3RhYw== 72367
+dGVudA== 72368
+LmdldE1heA== 72369
+IHN0ZW5jaWw= 72370
+IEFjcXVpc2l0aW9u 72371
+LkdlbmVyYXRpb25UeXBl 72372
+IE1FUg== 72373
+X2NvbWJpbmU= 72374
+IFtdLg== 72375
+X0JJVE1BUA== 72376
+bGRy 72377
+IGNhbnY= 72378
+IEpWTQ== 72379
+cGFycw== 72380
+IGRvd25oaWxs 72381
+RGV0YWlsc1NlcnZpY2U= 72382
+KE5BTUU= 72383
+IHJlanV2ZW4= 72384
+X3dpdGhpbg== 72385
+QWNjZXNzb3J5 72386
+IFPDqQ== 72387
+L2luYw== 72388
+IildCgo= 72389
+UHVibGljYXRpb24= 72390
+X3JvaQ== 72391
+IG1vYnM= 72392
+Lk5vQXJnc0NvbnN0cnVjdG9y 72393
+IGV2ZW50b3M= 72394
+LnZlbmRvcg== 72395
+X1NFTEVDVE9S 72396
+w6lmb25v 72397
+PSJb 72398
+IGxhYXQ= 72399
+IGJsdXJyZWQ= 72400
+IEJvcmRlclNpZGU= 72401
+eEZGRkZGRg== 72402
+X3dyaXR0ZW4= 72403
+IGplbnRl 72404
+L3Rpbnk= 72405
+Lndw 72406
+LnN0eWxlYWJsZQ== 72407
+IENoYXJnZXI= 72408
+IGJhdGhpbmc= 72409
+IFBhbmRh 72410
+w6lsaQ== 72411
+IHBhY2llbnRl 72412
+IGdpb2NoaQ== 72413
+IFZpZXdTdGF0ZQ== 72414
+Y2dp 72415
+LmxvZ2ljYWw= 72416
+RG9uYWxkVHJ1bXA= 72417
+LGNvcHk= 72418
+ZW1t 72419
+X0xpbms= 72420
+IGluc2lnbmlmaWNhbnQ= 72421
+ZmZtcGVn 72422
+L3BheQ== 72423
+X3F1aXQ= 72424
+SU9EZXZpY2U= 72425
+IEV4aXN0cw== 72426
+IGNvb2tz 72427
+anVuY3Rpb24= 72428
+IFRYVA== 72429
+KGVndA== 72430
+YW5pdQ== 72431
+X3BhcnRuZXI= 72432
+IGZhY3VsdA== 72433
+IFVuaWZpZWQ= 72434
+L3NiaW4= 72435
+IE5laA== 72436
+IEthemFraHN0YW4= 72437
+cG9zdGNvZGU= 72438
+IHZlZ2Fz 72439
+IHNlaW5lbQ== 72440
+fV0s 72441
+dGV0 72442
+LXBheW1lbnQ= 72443
+IENvbW1lbnRhcnk= 72444
+IGd1aWRlbGluZQ== 72445
+KTsk 72446
+IENvbnNvcnRpdW0= 72447
+57O757uf 72448
+dmlzbw== 72449
+IEJpbGxpbmc= 72450
+aWNpYXI= 72451
+IFR5cGVJbmZv 72452
+CXRyYW5z 72453
+PFRleHR1cmU= 72454
+YXRob20= 72455
+bGF1Z2hz 72456
+IGludGVyY2VwdGlvbnM= 72457
+KEVWRU5U 72458
+Rm9yZWNhc3Q= 72459
+VHJhcA== 72460
+dHJ4 72461
+IFdoaXRlcw== 72462
+c3VibWl0dGVk 72463
+YWxnbw== 72464
+IHRyYW5zcG9ydGVy 72465
+b3VuZGFyeQ== 72466
+IEluaGVyaXRz 72467
+IENvbmV4aW9u 72468
+LmNsaWVudFg= 72469
+CXByb2plY3Q= 72470
+aGVhcnRiZWF0 72471
+LW90aGVy 72472
+ICc7DQo= 72473
+w6ty 72474
+b3JwaW9u 72475
+KGNvcnM= 72476
+IEVMRUNU 72477
+IFBlcmU= 72478
+IHVzZU1lbW8= 72479
+ZXdyaXRlcg== 72480
+IHNxdWlydA== 72481
+L2V4dGVuc2lvbnM= 72482
+L2Fz 72483
+LkNMSUVOVA== 72484
+IGdvdXJtZXQ= 72485
+IGF1dG9Db21wbGV0ZQ== 72486
+UkVW 72487
+IGJyYWtpbmc= 72488
+X1NFTEVDVElPTg== 72489
+44Oh44Oz44OI 72490
+X2xpZmU= 72491
+X2dyb3VuZA== 72492
+X3Rlcg== 72493
+c25z 72494
+IFNQT1JU 72495
+kuGe 72496
+5rs= 72497
+VW5pcXVlSWQ= 72498
+IGRyaXA= 72499
+X0JST1dTRVI= 72500
+LW1ldGVy 72501
+ZW5kZXo= 72502
+IGV4aGF1c3RpdmU= 72503
+KFNL 72504
+IEJ1cmxpbmd0b24= 72505
+d29vcmQ= 72506
+KHBvdw== 72507
+IHNlYXJjaFRleHQ= 72508
+hYw= 72509
+aGVlbHM= 72510
+c3RlbGxlcg== 72511
+LnNpZw== 72512
+WU9VUg== 72513
+LmFsaQ== 72514
+IERhdGFDb2x1bW4= 72515
+IHByb2plY3ROYW1l 72516
+X2ZlY2hh 72517
+IHJlZnVuZHM= 72518
+IHRvcG8= 72519
+IENISUxE 72520
+IE1hcmJsZQ== 72521
+IGZvckNlbGw= 72522
+IHBlc3NpbQ== 72523
+IGNyaXNweQ== 72524
+aWZlc3R5bGVz 72525
+IG92ZXJkdWU= 72526
+b2xhcml0eQ== 72527
+IGFtYXTDuHI= 72528
+TWQ= 72529
+UFJFU1M= 72530
+IGluc3VyZXI= 72531
+b2NyYXQ= 72532
+IGZhY2lsaXRhdGVz 72533
+Lw0KDQo= 72534
+IGh1cmRsZXM= 72535
+X0hJ 72536
+TGV0dGVycw== 72537
+bWluZWNyYWZ0 72538
+YXh0ZXI= 72539
+eWs= 72540
+IGVjb27Ds20= 72541
+INC90LDRhw== 72542
+IFNXSVRDSA== 72543
+Q29uc3VsdGE= 72544
+IE5vcmE= 72545
+Q0tFUg== 72546
+X0NU 72547
+LmFwcHNwb3Q= 72548
+IC8vLS0= 72549
+CUJPT1NU 72550
+X2NvdXJzZXM= 72551
+IHdpbGxpbmdseQ== 72552
+66eM 72553
+ZmZk 72554
+ZmlsZXI= 72555
+IE1lYXN1cmVz 72556
+IGxlYXNlcw== 72557
+IERvcm90aHk= 72558
+Ol0u 72559
+c3Vic2NyaXB0aW9ucw== 72560
+IGNob2lz 72561
+IGFsYW4= 72562
+IGFicmly 72563
+LlBvcHVw 72564
+RXN0aW1hdGVk 72565
+IFBMQU4= 72566
+4LWN 72567
+IEVMRg== 72568
+IGRpc3RhbmNpbmc= 72569
+CWFuc3dlcg== 72570
+IHJ1Z3M= 72571
+S2k= 72572
+4Z+S4Z4= 72573
+R3VpbGQ= 72574
+ZXh0cmFz 72575
+Y3Bz 72576
+TW9ja3M= 72577
+IHRla3N0 72578
+Kmc= 72579
+LnJlcXVlc3RGb2N1cw== 72580
+IGFsdGVyYXRpb24= 72581
+IENhdGVnb3JpYQ== 72582
+aW1tZXJz 72583
+IERyb3Bib3g= 72584
+IEFkZHI= 72585
+5byV 72586
+ZGVwcw== 72587
+Lk1lc3NhZ2VCb3g= 72588
+ISwK 72589
+LmdldEI= 72590
+IG1pZ3JhdGVk 72591
+IEhvYmJ5 72592
+IE1n 72593
+LlZlcnRleA== 72594
+IGZvcmdpdmVu 72595
+IERlVg== 72596
+IHdlcmQ= 72597
+IEFyYWJpYW4= 72598
+IFNtb2tpbmc= 72599
+IHN0cmF3YmVycnk= 72600
+IENNUA== 72601
+ZGJs 72602
+IERIUw== 72603
+LWVycm9ycw== 72604
+LnBhZw== 72605
+IFJORw== 72606
+IHNoYXZl 72607
+IHR3ZWU= 72608
+IGFzc2VydE51bGw= 72609
+IERlbnNpdHk= 72610
+ZG9qbw== 72611
+YWlubWVudA== 72612
+IHBq 72613
+LllFQVI= 72614
+ICopKTsK 72615
+aWJyYXJpZXM= 72616
+SmV0cw== 72617
+RXhlY3V0aXZl 72618
+X2RlbnNl 72619
+LmdldENvbnRlbnRQYW5l 72620
+Y2hhbmRsZQ== 72621
+YWluYQ== 72622
+LXJlZmVyZW5jZQ== 72623
+IGxpYXI= 72624
+IEhFQUxUSA== 72625
+W3Rlc3Q= 72626
+LmlzbmFu 72627
+Q2hhcmxpZQ== 72628
+IHB1cHBlcg== 72629
+IGtpcg== 72630
+OmhpZGRlbg== 72631
+aXNWaXNpYmxl 72632
+IGtvbXQ= 72633
+IGFjcXVhaW50ZWQ= 72634
+IERydWlk 72635
+KENz 72636
+Lmxhc3RuYW1l 72637
+RFNB 72638
+IGRpc3NvbHZl 72639
+57yW5Y+3 72640
+VmFyaW91cw== 72641
+IERleA== 72642
+X2FuZ2xlcw== 72643
+L2FwaW1hY2hpbmVyeQ== 72644
+IGV4cGxvZGluZw== 72645
+KENoYXJTZXF1ZW5jZQ== 72646
+IEhpc3Bhbg== 72647
+KyspewoK 72648
+Lk1vZGVsU2VyaWFsaXplcg== 72649
+UVJTVFVWV1hZWg== 72650
+54K55Ye7 72651
+PXNldHRpbmdz 72652
+4KWB 72653
+UENT 72654
+IElOVEVSTkFM 72655
+IEhVR0U= 72656
+IG1pY3Jvc2NvcGU= 72657
+aXNBZG1pbg== 72658
+XHY= 72659
+LnJlcXVpcmVOb25OdWxs 72660
+0L7Qu9C+0LI= 72661
+aWNlcmNh 72662
+X1NFTlQ= 72663
+IGRlcGljdGlvbg== 72664
+IFVzZXJDb250cm9s 72665
+IE1lbW9y 72666
+IEFsbG9jYXRpb24= 72667
+IEJlZGZvcmQ= 72668
+IOabtA== 72669
+IHRvcm1lbnQ= 72670
+YXplZXJh 72671
+LlRvZGF5 72672
+IFJlZ2FyZGluZw== 72673
+X0VOQw== 72674
+X1JBTkRPTQ== 72675
+TG9nTGV2ZWw= 72676
+PVI= 72677
+IEdyZWVubGFuZA== 72678
+IHN0cmFpbmVk 72679
+IG1hZ25ldHM= 72680
+IGFsZXJ0Q29udHJvbGxlcg== 72681
+IENocm9uaWM= 72682
+X3JlZ2lzdGVyZWQ= 72683
+IGxpag== 72684
+IEVudHJ5UG9pbnQ= 72685
+IFJlZ2ltZW50 72686
+dWNpZA== 72687
+IENvdWxkbg== 72688
+IEFjdGluZw== 72689
+X3JheQ== 72690
+IG5hYg== 72691
+LXNlcGFyYXRlZA== 72692
+IHBubA== 72693
+Q29hY2g= 72694
+QVRZUEU= 72695
+IHN1cHBsZW1lbnRhdGlvbg== 72696
+YWNlcnM= 72697
+ZmxlZXQ= 72698
+SW5wdXRCb3JkZXI= 72699
+IFN0cnVjdHVyYWw= 72700
+IGRlaW5l 72701
+IGJyZXdlcmllcw== 72702
+YW5vaQ== 72703
+IHRyYW5zbGF0b3Jz 72704
+IGVpZ2VuZW4= 72705
+IGRhbmNlcw== 72706
+dGFt 72707
+IENvb3BlcmF0aW9u 72708
+X3JlcXVlc3RlZA== 72709
+IE1hZ2ljYWw= 72710
+CUxFRlQ= 72711
+ICIiKSwK 72712
+Ky0rLSstKy0rLSstKy0rLQ== 72713
+IE5vaXI= 72714
+IEVzdGltYXRl 72715
+IFRocmVhZFBvb2w= 72716
+IEhlY2s= 72717
+ICcqLg== 72718
+VHVya2V5 72719
+IHN1Y2NlZWRpbmc= 72720
+ZHJ1Zw== 72721
+dmlv 72722
+IHBvbmVy 72723
+IEphZA== 72724
+aXp6bHk= 72725
+ZXZlcnl0aGluZw== 72726
+IHt9KS4= 72727
+IEluc3RpdHV0ZXM= 72728
+IG51b3Zv 72729
+IGluaXRXaXRoVGl0bGU= 72730
+IGx1YUw= 72731
+b3duaWs= 72732
+IHRob3I= 72733
+IGtsYXI= 72734
+IG5vdG9yaW91c2x5 72735
+IGRvbmc= 72736
+ZW1lbnM= 72737
+X3Byb2plY3Rpb24= 72738
+X0dSRQ== 72739
+LmV5ZQ== 72740
+IHdhdGVyaW5n 72741
+IFRpaw== 72742
+b1M= 72743
+IFN0cmFuZ2Vy 72744
+ICANCg0K 72745
+cGFnaW5n 72746
+X2ludGVyc2VjdA== 72747
+IENvbG9uaWFs 72748
+TGlzYQ== 72749
+LnVubGluaw== 72750
+IG1pcA== 72751
+YW51dHM= 72752
+YW1hem9u 72753
+IElERU5U 72754
+c3Rhc3k= 72755
+Snd0 72756
+LS0tLS0tKy0tLS0tLSs= 72757
+IEVWUA== 72758
+Q29udGVudExvYWRlZA== 72759
+CUJJVA== 72760
+LnBhcmVudHM= 72761
+IGFsbG9jYXRpbmc= 72762
+IEdPTEQ= 72763
+fWA7Cgo= 72764
+QUxBUg== 72765
+IHByZWNpc2E= 72766
+RGlzdGluY3Q= 72767
+c2Vp 72768
+IHN1YnBvZW5h 72769
+IHBvbXA= 72770
+IFBvbG8= 72771
+Y29l 72772
+dmo= 72773
+LndvcmtmbG93 72774
+ZXN0cmU= 72775
+IGNvbm5leGlvbg== 72776
+aW1ldHlwZQ== 72777
+LlJvd0NvdW50 72778
+IERoYWJp 72779
+IGVtaXRz 72780
+LkJvcmRlclNpemU= 72781
+KHBvbGljeQ== 72782
+LG1lc3NhZ2U= 72783
+T25Jbml0 72784
+KShf 72785
+IGZpbmVy 72786
+W251bWJlcg== 72787
+IHNjcmlwdHVyZQ== 72788
+UmVmbGVjdA== 72789
+LXRvb2xiYXI= 72790
+KFBBVEg= 72791
+IEVOVFJZ 72792
+KC4uLikK 72793
+LWRvbWFpbg== 72794
+KHN0cmlw 72795
+KSgq 72796
+IGNvbnZleWVk 72797
+IGF0dGVudGl2ZQ== 72798
+w6hnZQ== 72799
+X0xE 72800
+IEdyYW50cw== 72801
+LWhpZ2hsaWdodA== 72802
+IGJyZXRocmVu 72803
+2YjZhA== 72804
+IGRlcXVldWVSZXVzYWJsZUNlbGxXaXRoSWRlbnRpZmllcg== 72805
+YXB1bHQ= 72806
+LmJvdHRvbUFuY2hvcg== 72807
+IG9wY2lvbg== 72808
+IG91dEZpbGU= 72809
+cmVhdGluZw== 72810
+ZGlu 72811
+X3NhbXBsZXI= 72812
+CWdsRW5hYmxl 72813
+cHR5cGU= 72814
+X0NPTkRJVElPTg== 72815
+LWVmZmljaWVudA== 72816
+Jm8= 72817
+IGpj 72818
+0Kc= 72819
+L0Zvcm0= 72820
+KWZyYW1l 72821
+IGJpbmdl 72822
+X2Nsb3N1cmU= 72823
+SU1B 72824
+KG5leHRQcm9wcw== 72825
+CWNk 72826
+IGdldE1lbnU= 72827
+IGdldFN1cHBvcnRBY3Rpb25CYXI= 72828
+IG1hbmlmb2xk 72829
+WlI= 72830
+Y2hhbmdlcg== 72831
+YXNzaW5n 72832
+ZGlzaA== 72833
+IE1vdQ== 72834
+Lm5ldGZsaXg= 72835
+IHBvc3Rjb2Rl 72836
+IHdvbWI= 72837
+IEFycw== 72838
+4oCmKQ== 72839
+IGxpbmVXaWR0aA== 72840
+RGVhbA== 72841
+YXJhcw== 72842
+IEdyYW50ZWQ= 72843
+IGhvYXg= 72844
+IGRpcmVjdGlvbmFs 72845
+LktleUNoYXI= 72846
+ID09Ig== 72847
+IFZlcmRl 72848
+X0tQ 72849
+IHN1cnJvZ2F0ZQ== 72850
+IERVSQ== 72851
+dXB5dGVy 72852
+IHBlbnNl 72853
+IFJBTkQ= 72854
+KGV4Yw== 72855
+IG1pc3VuZGVyc3Rvb2Q= 72856
+IENVVA== 72857
+IOS4rQ== 72858
+CXRp 72859
+X2luc2lkZQ== 72860
+IGJpY3ljbGVz 72861
+IGRlYW4= 72862
+ZGlyZWN0aXZl 72863
+LnBlZXI= 72864
+aWNpbmE= 72865
+X2l0ZXJz 72866
+IGltcGx5aW5n 72867
+Lm9idGFpbg== 72868
+IHBzeWNoaWF0cmlzdA== 72869
+dXNlclNlcnZpY2U= 72870
+ZWxpdmVyeQ== 72871
+CXBhcnQ= 72872
+IGh1cnJpZWQ= 72873
+IGJ1bQ== 72874
+IGhlcGF0aXRpcw== 72875
+amlk 72876
+J10+Owo= 72877
+IHVuY29udmVudGlvbmFs 72878
+IGZhc2Npc3Q= 72879
+IFBleQ== 72880
+6K+t 72881
+Jyl9PC8= 72882
+LkNsdXN0ZXI= 72883
+IEJpdENvbnZlcnRlcg== 72884
+ZWRhdGE= 72885
+zr/PhQ== 72886
+4pSC 72887
+QXBwQnVuZGxl 72888
+Lmh0dHBDbGllbnQ= 72889
+IGFwbw== 72890
+QUlOUw== 72891
+IFZG 72892
+X2dpZA== 72893
+IG9kZQ== 72894
+RVJSWQ== 72895
+IFJlY2VpcHQ= 72896
+IENhbmRsZQ== 72897
+IG1pc3Npb25hcnk= 72898
+IENyYW5l 72899
+IFNUQVRFUw== 72900
+Ym91dA== 72901
+YXlhcmFu 72902
+Li4uIiwK 72903
+IGl0aW5lcmFyeQ== 72904
+KGxhdGl0dWRl 72905
+IENPTlM= 72906
+L3NpZGViYXI= 72907
+U3BpZGVy 72908
+R1JJRA== 72909
+LmRlYnVnTGluZQ== 72910
+IGAn 72911
+LXllbGxvdw== 72912
+IHJlZmluZW1lbnQ= 72913
+IE1ha2V1cA== 72914
+IERhbm4= 72915
+KCk7DQoNCg0K 72916
+IG92ZXJjb21pbmc= 72917
+IEJhdHRlcg== 72918
+L3BhY2thZ2Vz 72919
+INCy0LjQtA== 72920
+IGFyeQ== 72921
+4oCdPw== 72922
+cmVsbGFz 72923
+IGdydXBvcw== 72924
+IFR5cGljYWw= 72925
+IE1vbnNhbnRv 72926
+SW50ZXJzZWN0aW9u 72927
+IHR5cmU= 72928
+PT09PT09Cg== 72929
+zq4= 72930
+OzsKCg== 72931
+IHRyaXZpYQ== 72932
+X3Rha2Vu 72933
+IHNtdWdnbGluZw== 72934
+IG5hcnJvd2Vk 72935
+4bqpbQ== 72936
+IHBhbGFicmE= 72937
+Y2Vh 72938
+cGFydGljdWxhcmx5 72939
+QWNjZXNzVHlwZQ== 72940
+IGNvbGU= 72941
+VG9GaXQ= 72942
+IHZlcmU= 72943
+IENPUw== 72944
+L3ZpZGVvcw== 72945
+ICgkKCIj 72946
+IGNyYW5l 72947
+Lmhhc01vcmU= 72948
+JHBhdGg= 72949
+aXZpc20= 72950
+IHN1cGVydmlzb3Jz 72951
+IEZsb3Jlcw== 72952
+cHJvZ3JhbXM= 72953
+LlppcA== 72954
+IGltcGFjdGluZw== 72955
+IG1vdG8= 72956
+IFRK 72957
+cGVnYXdhaQ== 72958
+X0tJTkQ= 72959
+X2ludGVyZmFjZXM= 72960
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio= 72961
+IExlYXZpbmc= 72962
+VGV4dFN0eWxl 72963
+YmVpdGVy 72964
+IFdpbm5pbmc= 72965
+LXBhcmFt 72966
+R2FyeQ== 72967
+IFN1bnM= 72968
+YWzEscWf 72969
+ZHVjaw== 72970
+IHRocmVhZElkeA== 72971
+IHBvZXRz 72972
+IHBsZWFkaW5n 72973
+IENvcmludGhpYW5z 72974
+ZmNj 72975
+YXdhaXRlcg== 72976
+Ki0= 72977
+IHBlcnNldmVy 72978
+IGFjdGl2aWRhZGVz 72979
+X291dGxpbmU= 72980
+LXBsYW4= 72981
+LnNjcm9sbFZpZXc= 72982
+cXVhdA== 72983
+IHNhbXN1bmc= 72984
+IGxldmVsaW5n 72985
+IHNwbGl0dGVy 72986
+X2dlb20= 72987
+IHByb21pbmVudGx5 72988
+IFNlZWRz 72989
+5Zyf 72990
+dWFpcw== 72991
+ZWZ1bGx5 72992
+SUVudW1lcmFibGU= 72993
+YWRkcw== 72994
+dmVyc2F0aW9ucw== 72995
+IGRpc2FibGVz 72996
+QU5EUk9JRA== 72997
+IFdlaXRlcg== 72998
+X0Zvcm1hdA== 72999
+X3NwbGl0cw== 73000
+IEFjdGl2ZVN1cHBvcnQ= 73001
+KGNzcw== 73002
+X21pY3Jv 73003
+c3RyaWtl 73004
+IENhdXNlcw== 73005
+IHZpc2libHk= 73006
+Q2FuY2VsYWJsZQ== 73007
+IFlvc2g= 73008
+IGRyYWluaW5n 73009
+IGNvbGk= 73010
+YXNsZXk= 73011
+IFJlc3BvbnNpYmlsaXRpZXM= 73012
+IFN1dHRvbg== 73013
+KnRoaXM= 73014
+U2hhcmVz 73015
+LWdyYXBo 73016
+IGVubGFyZ2Vk 73017
+Um91dGluZQ== 73018
+IGZyYW1lYnVmZmVy 73019
+IGFpcmZsb3c= 73020
+IHRyeA== 73021
+IExlaWdo 73022
+IEtlbnM= 73023
+KGhlYXA= 73024
+IHNwaWxsZWQ= 73025
+U0NBTEw= 73026
+IFZlbHZldA== 73027
+YWN0dWFsbHk= 73028
+X0VOQ09ESU5H 73029
+IFdvcm0= 73030
+KSl9Cg== 73031
+IERhbmdlcm91cw== 73032
+IHN1cGVyaW50ZW5kZW50 73033
+Lmxvb2s= 73034
+IHNoZWw= 73035
+L2Zz 73036
+U2FmZXR5 73037
+5a6L 73038
+LkRFRklORQ== 73039
+X2ZhY3RvcnM= 73040
+IHBhcnRpZG8= 73041
+IG9wdGltaXppbmc= 73042
+RG91YmxlQ2xpY2s= 73043
+LWNvbW1lcmNpYWw= 73044
+IGxvZ2ljYWxseQ== 73045
+Y3ljaA== 73046
+dXJ2ZQ== 73047
+wrU= 73048
+QUlMWQ== 73049
+IHJlYWN0aW5n 73050
+X0VYUFI= 73051
+a8O2 73052
+LmxvY2FsaXplZERlc2NyaXB0aW9u 73053
+IGFzdG91bmRpbmc= 73054
+IHBhc3RyeQ== 73055
+IGdsb3NzeQ== 73056
+IGJlaGF2ZXM= 73057
+L2Vj 73058
+IGNsaXBwZWQ= 73059
+IHByb3dlc3M= 73060
+IFVC 73061
+LyotLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0= 73062
+CWFscGhh 73063
+IGV4dHJhdmFn 73064
+IGZpbm5z 73065
+KFNvY2tldA== 73066
+IFVuc2FmZQ== 73067
+IHF1aWVyZQ== 73068
+X2VuY29kZWQ= 73069
+b2x1bWJpYQ== 73070
+IHphYg== 73071
+c3RyaWN0ZWQ= 73072
+IG1uaWU= 73073
+IE1PUw== 73074
+IGF0aGxldGljcw== 73075
+IEtlbmRhbGw= 73076
+IOyYpA== 73077
+QVZBSUxBQkxF 73078
+aW5veA== 73079
+X09QQ09ERQ== 73080
+IEl0ZW1UeXBl 73081
+IGNlbnRyaWY= 73082
+IGludGVyc3RhdGU= 73083
+X2Jvb2tz 73084
+LmRlbGl2ZXJ5 73085
+IExpc3Rl 73086
+b3JzaQ== 73087
+X3NlY3VyZQ== 73088
+Z3Jvd3Ro 73089
+IHZlbnRl 73090
+IHBzeWNob2xvZ2lzdHM= 73091
+IENDUw== 73092
+dWRlbmNl 73093
+IGNyYXdsZXI= 73094
+L21hbnVhbA== 73095
+IHRleHRTdHlsZQ== 73096
+IHBhbGluZHJvbWU= 73097
+IGNvbmR1Y3Rz 73098
+dGFibA== 73099
+V2l0aFVSTA== 73100
+L3JpZ2h0 73101
+IERyYQ== 73102
+Lk1haWw= 73103
+KHNlYw== 73104
+b2Z0d2FyZQ== 73105
+IHNldWw= 73106
+IHdyaW5rbGVz 73107
+X0ZX 73108
+QXk= 73109
+IEVybnN0 73110
+dW5iaW5k 73111
+IGNvbW1lbmQ= 73112
+X2hvb2tz 73113
+IE1vbmV0YXJ5 73114
+IFFR 73115
+dW5pdE9mV29yaw== 73116
+IEVudGl0eVR5cGU= 73117
+IGhvcm1vbmFs 73118
+LkZBSUw= 73119
+QFNsZg== 73120
+L2NoYW5uZWw= 73121
+c29ubw== 73122
+RGFucw== 73123
+X1JlZ2lzdGVy 73124
+SGFu 73125
+T1JC 73126
+SktMTU5PUA== 73127
+dmVudGVk 73128
+IGxvbmdzdGFuZGluZw== 73129
+IGJnQ29sb3I= 73130
+IDsp 73131
+IFJvYmJpZQ== 73132
+KCIuIg== 73133
+IGFqdXN0 73134
+LmhhbmRsZUNsaWNr 73135
+cmF0aW5ncw== 73136
+cHRlcg== 73137
+IGVyb3RpY28= 73138
+IEplbGx5 73139
+KioqKioqDQo= 73140
+LkRvZXNOb3RFeGlzdA== 73141
+CWJl 73142
+JHRlbXA= 73143
+Ij4mIw== 73144
+55u0 73145
+CVB1YmxpYw== 73146
+neyytA== 73147
+IEJ1aWxkaW5ncw== 73148
+LWFsb25l 73149
+LCdc 73150
+IHN3YXBz 73151
+IHBlcnBsZXg= 73152
+X3Byb2Nlc3NvcnM= 73153
+INC00LI= 73154
+IE5ZUEQ= 73155
+UENS 73156
+5q+P 73157
+IGhvamU= 73158
+RWRpdE1vZGU= 73159
+IHZ1bGdhcg== 73160
+IHZlcmRl 73161
+ICgpPT57Cg== 73162
+L2Zyb250ZW5k 73163
+IHRlbGVmb25l 73164
+IGxhbnRlcm4= 73165
+LnBhZ2VY 73166
+IER1ZA== 73167
+bGltaXRhdGlvbnM= 73168
+IG5vdGlmaWVy 73169
+IE1lc3NhZ2luZw== 73170
+IWltcG9ydGFudA== 73171
+IHN1cmdlb25z 73172
+KT0o 73173
+Rml4ZWRTaXpl 73174
+Llpvb20= 73175
+aW5hbg== 73176
+IGNyZWRz 73177
+IEJVRg== 73178
+LlN0YWNrVHJhY2U= 73179
+IHdhcnJhbnRlZA== 73180
+IHNvdXJjaW5n 73181
+IGNvbm5h 73182
+X0ZSRQ== 73183
+IHdvbGw= 73184
+IHJlZmluaW5n 73185
+X0FMTE9XRUQ= 73186
+X212 73187
+IFdvcmNl 73188
+IFNpbmNsYWly 73189
+Q2hlY2tzdW0= 73190
+IHVubG9ja3M= 73191
+IE1hcmtkb3du 73192
+IGZpc2hlcm1lbg== 73193
+RHVi 73194
+IEJvbm5pZQ== 73195
+ICAgICAgICAJCg== 73196
+IHZlcno= 73197
+Piw8Lw== 73198
+PjwhWw== 73199
+Wyc8ew== 73200
+amVj 73201
+IEVyZw== 73202
+cmF0aGVy 73203
+IHBhbGFicmFz 73204
+IFBBQ0tFVA== 73205
+bWlzZQ== 73206
+ZGFx 73207
+IE9rdG9iZXI= 73208
+KEdMRlc= 73209
+IEhlbnJp 73210
+IEZvdA== 73211
+IER1bw== 73212
+IE5FUw== 73213
+IHNhbHNh 73214
+IHVuYmlhc2Vk 73215
+QFNwcmluZ0Jvb3RUZXN0 73216
+IG9mZnM= 73217
+5YWs5Y+4 73218
+IGFtb3VudGVk 73219
+RnVsbFBhdGg= 73220
+IHF1YXQ= 73221
+IG1haWRlbg== 73222
+IFN1YnNldA== 73223
+IEFwcGxpY2F0aW9uRGJDb250ZXh0 73224
+bWlycm9y 73225
+bmV4 73226
+LnN0cmVldA== 73227
+c2V0UXVlcnk= 73228
+JHJlc3VsdHM= 73229
+YWRlcm8= 73230
+Z3Jlc3Nvcg== 73231
+X2J1Zw== 73232
+aXNzZXI= 73233
+IFNlYXJz 73234
+IGZpbGxDb2xvcg== 73235
+Lm1hc2tz 73236
+IERpYWJsbw== 73237
+X0FORFJPSUQ= 73238
+0J7QsQ== 73239
+IGZyZWFraW5n 73240
+IHJpbnNl 73241
+KHBrdA== 73242
+IGJvb2tsZXQ= 73243
+IHNhbmN0aW9uZWQ= 73244
+IHN0cmVhbWVk 73245
+dGFicGFuZWw= 73246
+IFJldHVybmluZw== 73247
+UGxhaW5UZXh0 73248
+TE9ZRUU= 73249
+YWxlc2Nl 73250
+0L7QutCw 73251
+IEZpeHR1cmU= 73252
+YXNzYWRvcnM= 73253
+IGRpc2JlbGllZg== 73254
+IEx1c3Q= 73255
+IHJhZGljYWxz 73256
+LkZlYXR1cmVz 73257
+X2luY2hlcw== 73258
+KHByaW1hcnk= 73259
+IEpNZW51SXRlbQ== 73260
+X3Rha2U= 73261
+IENva2U= 73262
+VW5pdE9mV29yaw== 73263
+IFdDSEFS 73264
+IGNvbnNjaWVudA== 73265
+b25lbnVtYmVy 73266
+UElORw== 73267
+YWJham8= 73268
+XSgi 73269
+LnNhbGVz 73270
+X2hlcmU= 73271
+IG9mZnNldFg= 73272
+dGFnTmFtZQ== 73273
+INmK 73274
+X1JpZ2h0 73275
+aWxpZw== 73276
+dGhlVmFsdWU= 73277
+b2NhcmQ= 73278
+IGNvbnN1bHRhbmN5 73279
+IGJsaWo= 73280
+Z29ybQ== 73281
+TmF2aWdhdGU= 73282
+xLFj 73283
+SWxsZWdhbEFyZ3VtZW50RXhjZXB0aW9u 73284
+X3Zl 73285
+LkNPTlRFTlQ= 73286
+dXJvcGVhbg== 73287
+LnJhZGlv 73288
+IGVudmlzaW9uZWQ= 73289
+IFNPTQ== 73290
+LnNk 73291
+QU5USVRZ 73292
+IENBTExCQUNL 73293
+IGhn 73294
+ZGVjcnlwdA== 73295
+566x 73296
+XFF1ZXVl 73297
+IE1JTEY= 73298
+IHJlY3Vyc2U= 73299
+IERhbnRl 73300
+LmdhbW1h 73301
+b3Jrcw== 73302
+KCIiKSkK 73303
+IEdyaW0= 73304
+Lm9wZW5n 73305
+IE1pY2hlbGU= 73306
+QW5hbHk= 73307
+IFBydQ== 73308
+X3JlZGlyZWN0ZWQ= 73309
+X3BhbA== 73310
+ZmFsbGJhY2s= 73311
+IOWtlw== 73312
+IGRpbm5lcnM= 73313
+R2VuZXJhdGluZw== 73314
+JCIs 73315
+aGlzdG9yaWM= 73316
+Z2V0U2ltcGxlTmFtZQ== 73317
+IE1pbGxpb25z 73318
+LWdsb2JhbA== 73319
+cm91dGluZw== 73320
+IGNvbnNvbGlkYXRl 73321
+IHJlY29pbA== 73322
+T2JqZWN0T2ZUeXBl 73323
+IGRlc3BlcmF0aW9u 73324
+QW55d2hlcmU= 73325
+IGdldE1vZGVs 73326
+X2tpbGw= 73327
+b2Jvb2s= 73328
+L2Rpc3BsYXk= 73329
+Ii8+Cgo= 73330
+IG1heW8= 73331
+INGB0L/QuNGB0L7Qug== 73332
+IGdvYWxpZQ== 73333
+eERG 73334
+IFByZXBhcmF0aW9u 73335
+IGRlcGVuZGFibGU= 73336
+LklOVkFMSUQ= 73337
+Li4uJw== 73338
+bmF0YWw= 73339
+bW9kdWxlTmFtZQ== 73340
+Y2FyYm9u 73341
+UEFM 73342
+IG1lZQ== 73343
+IGNhc2luZw== 73344
+6aG555uu 73345
+bmljYXM= 73346
+IEhhbW0= 73347
+IEJhYmU= 73348
+b3dhbmU= 73349
+IHN5bm9ueW0= 73350
+IFFpbg== 73351
+aW9j 73352
+ZW1vdGlvbg== 73353
+IGZlcm1lbnRhdGlvbg== 73354
+IGN1bXBs 73355
+IEVsZWN0cmljaXR5 73356
+KFJPT1Q= 73357
+dGVzdGVy 73358
+IEh1c2JhbmQ= 73359
+IEJhdQ== 73360
+X01BQ1JP 73361
+YWtlbmluZw== 73362
+ICAgICAgICAKICAgICAgICAKICAgICAgICAK 73363
+LmZpbg== 73364
+IENvbmZpZGVudGlhbA== 73365
+aWV6 73366
+TUJFUg== 73367
+IHNwZXJtYQ== 73368
+IEhQVg== 73369
+dHhu 73370
+Q09OVEFDVA== 73371
+LlRocm93 73372
+IG11cmFs 73373
+IFR3aXN0 73374
+KCZfX18= 73375
+IGpk 73376
+IGVtcG93ZXJtZW50 73377
+IGRpc3RpbnQ= 73378
+IGJvbWJpbmdz 73379
+T3V0Y29tZQ== 73380
+IHNob3J0ZW4= 73381
+5b6M 73382
+QUNDT1VOVA== 73383
+X2NvdmVyYWdl 73384
+ZW5jbw== 73385
+X3JlZmVy 73386
+c2V0TWVzc2FnZQ== 73387
+IHJlcGVyYw== 73388
+cHRpZGVz 73389
+IGRlaXR5 73390
+dWNoc2lh 73391
+KGh0 73392
+LnN1YnNjcmlwdGlvbg== 73393
+IHJlZGlzdHJpYnV0ZWQ= 73394
+IER5bmFzdHk= 73395
+X3Zj 73396
+LWZyYW1ld29yaw== 73397
+cnlmYWxs 73398
+IGdhdGluZw== 73399
+IExvcmVuem8= 73400
+b29kb28= 73401
+IGRpZ2VzdGlvbg== 73402
+IGZvb3Rpbmc= 73403
+CUhhc2hNYXA= 73404
+cmVhbERvbmFsZFRydW1w 73405
+IGFwYWNoZQ== 73406
+KHZhbG9y 73407
+IHBvaXNvbm91cw== 73408
+LlBlcm1pc3Npb24= 73409
+IHBhcmFtb3VudA== 73410
+d2VpdA== 73411
+bGxhbmQ= 73412
+IGh5cG90aGVzZXM= 73413
+IFByeQ== 73414
+IGhvbWVt 73415
+KERldmljZQ== 73416
+aW5kaWNl 73417
+ZXZh 73418
+cHJlc2VuY2U= 73419
+IEJlbnRsZXk= 73420
+IEVuZGluZw== 73421
+IGRvbWVzdA== 73422
+CXRw 73423
+CWVycm9ycw== 73424
+Y29ybmVy 73425
+bGRh 73426
+CgkJCQkK 73427
+X1BFUlNPTg== 73428
+IFNlcmdleQ== 73429
+IFBhcnNlcw== 73430
+LWZpY3Rpb24= 73431
+LkJhY2tncm91bmRDb2xvcg== 73432
+IHNvbW1lcw== 73433
+IGNvb2xlc3Q= 73434
+IHJ1YmJsZQ== 73435
+LmpvYnM= 73436
+IGRyb3duaW5n 73437
+YWRvcmFz 73438
+IHdpbmdlcg== 73439
+IEluY3JlYXNpbmc= 73440
+2YrYqQ== 73441
+QkJCQg== 73442
+KFJvbGU= 73443
+IG9kZGx5 73444
+RGV2RXhwcmVzcw== 73445
+LXV0aWw= 73446
+IFNoZW1hbGU= 73447
+cHJpbWl0aXZl 73448
+IGFmZmlybWVk 73449
+LnJldHVyblZhbHVl 73450
+LWxpdmU= 73451
+IEFjdGlvbkNvbnRyb2xsZXI= 73452
+w6ts 73453
+ZXJjdWxvc2lz 73454
+IHByYWt0 73455
+IGdlb3BvbA== 73456
+cGljcw== 73457
+Q0RD 73458
+LkZs 73459
+LnNpZA== 73460
+cmllYmVu 73461
+KHZhcnM= 73462
+K3NlbGY= 73463
+IGludGVyaW9ycw== 73464
+IEF1Z3VzdGluZQ== 73465
+IjpAIg== 73466
+IFN0ZWFsdGg= 73467
+IGdldENvbG9y 73468
+IEdlbnRsZQ== 73469
+fiI6Ig== 73470
+IHdoaW0= 73471
+KCc8Lw== 73472
+IFNTRQ== 73473
+IFZpb2xldA== 73474
+X2NyZWQ= 73475
+IGF0YQ== 73476
+IEF6ZXJiYWlqYW4= 73477
+ID8/Pz8/ 73478
+LmV2ZXJ5 73479
+KGNvbm5lY3Q= 73480
+IERyb25l 73481
+IHRvbGVyYW50 73482
+c3VidG90YWw= 73483
+X3NodWZmbGU= 73484
+dXN0YWluYWJpbGl0eQ== 73485
+cHJlZmVycmVk 73486
+IFNFWA== 73487
+IGNvbmdyZXNzbWFu 73488
+IG5hbW9ybw== 73489
+IGhvbm9yYWJsZQ== 73490
+IGFmdGVyRWFjaA== 73491
+IMW8eWM= 73492
+SEFN 73493
+LnRvbQ== 73494
+IGVsb25n 73495
+IFNlcmlvdXM= 73496
+LVNlbWl0aWM= 73497
+0KHRgg== 73498
+IGZsYW0= 73499
+dGVuZXI= 73500
+LlRFU1Q= 73501
+IFRSQUNL 73502
+IFBoaWxpcHM= 73503
+IEFyZW4= 73504
+IEhpY2tz 73505
+b2luZWQ= 73506
+IEZhaA== 73507
+aXNzZXVy 73508
+IGNpcmN1bWNpc2lvbg== 73509
+KHR3ZWV0 73510
+IHBvaWw= 73511
+IFNlZW4= 73512
+X01BUFBJTkc= 73513
+IGludmFyaWFibHk= 73514
+IEZ1c2U= 73515
+ICc/Jw== 73516
+PXBhc3N3b3Jk 73517
+IOuCmA== 73518
+IElIdHRw 73519
+c3R5cGU= 73520
+Zml0bmVzcw== 73521
+LlRhZ3M= 73522
+IOqwnA== 73523
+KERXT1JE 73524
+IHF1YQ== 73525
+IE1hcnZpbg== 73526
+Ik0= 73527
+LmlzQXV0aGVudGljYXRlZA== 73528
+Lmd1YXJk 73529
+KT8KCg== 73530
+CQkJCQkJCQkJCQkJCQkJCQkJCQ== 73531
+IFNoaXBz 73532
+IHNlbnNpdA== 73533
+fTsNCg0KDQo= 73534
+YWhhaGE= 73535
+IGxpZXV0ZW5hbnQ= 73536
+IEphZ3Vhcg== 73537
+IC8vLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0= 73538
+VUNF 73539
+SW5zcA== 73540
+YWludGVy 73541
+X3BvbHlnb24= 73542
+LkRvd24= 73543
+IHRleHR1cmVk 73544
+LnNldEFjdGlvbg== 73545
+b2dy 73546
+IHNjaWVudGlmaWNhbGx5 73547
+IHNocmluZQ== 73548
+IGNsb3VkeQ== 73549
+LkhvdXI= 73550
+UG9zdEJhY2s= 73551
+QVpZ 73552
+X2NhbmRpZGF0ZXM= 73553
+KFNlYXJjaA== 73554
+IGNvbW1pc3Npb25lcnM= 73555
+IEJpZW4= 73556
+IGRvY3RvcmFs 73557
+IEZlZWxpbmc= 73558
+X1ZFUlRJQ0FM 73559
+IEJk 73560
+bmdpbng= 73561
+IOWcqA== 73562
+X2FyZ3Y= 73563
+UlNB 73564
+IGVsZGVzdA== 73565
+LWhlYXZ5 73566
+Q09OTg== 73567
+IEh0dHBOb3RGb3VuZA== 73568
+LWNvbHVtbnM= 73569
+IE5QQ3M= 73570
+IGNhZmVz 73571
+IGfDqQ== 73572
+IHN0YWxscw== 73573
+IGZvcmtz 73574
+IHBvYmw= 73575
+U3RyZWFtcw== 73576
+IGJhc3RhcmQ= 73577
+IFJhcHRvcnM= 73578
+IEdyYW1teQ== 73579
+IEdlaA== 73580
+X1RpY2s= 73581
+KHByZWc= 73582
+IGxpcHN0aWNr 73583
+X3J1 73584
+PEg= 73585
+IMSRaQ== 73586
+LkNhcg== 73587
+IHNwYXJlZA== 73588
+bW9uaWM= 73589
+aW5jdGlvbnM= 73590
+QWZyaWNh 73591
+KGRpY3Rpb25hcnk= 73592
+ICoqKSY= 73593
+YGBg 73594
+X3ByZXNzdXJl 73595
+bWll 73596
+IFJvbWFuaWFu 73597
+L21hcms= 73598
+IG1haW50ZW5hbnQ= 73599
+IHRyZW4= 73600
+IFBvc3RncmVTUUw= 73601
+UkVMRUFTRQ== 73602
+SlBFRw== 73603
+IGRlZGljYXRl 73604
+TWFrZVJhbmdl 73605
+IHJvYm90aWNz 73606
+YWt0aXY= 73607
+JSUl 73608
+YWFy 73609
+dmlld01vZGVs 73610
+KG1hYw== 73611
+dWNoZXI= 73612
+IGRlYmVu 73613
+TG9jYWxpemF0aW9u 73614
+0L7Qt9Cy0YDQsNGJ0LDQtdGC 73615
+LnNldFRvb2xUaXA= 73616
+LmZhc3Rqc29u 73617
+IHBlcmVubmlhbA== 73618
+LWNoaWVm 73619
+a2lzaA== 73620
+IGF0dGlj 73621
+U3VidGl0bGU= 73622
+IFNsYW0= 73623
+IExpdGVyYXJ5 73624
+ZXJuZXM= 73625
+INGC0L7Qu9GM0LrQvg== 73626
+IHN0YXJ0QWN0aXZpdHlGb3JSZXN1bHQ= 73627
+LkVycm9yTWVzc2FnZQ== 73628
+YmluYXRpb25z 73629
+Ikw= 73630
+IGZvcmJpZA== 73631
+IGxvZGdlZA== 73632
+Lkxpc3RCb3g= 73633
+IFBTRA== 73634
+IGN1bHR1cmE= 73635
+VU5DVA== 73636
+Ik9uZQ== 73637
+IEd1aWxs 73638
+IEJhdHRhbGlvbg== 73639
+IGNhcmVnaXZlcnM= 73640
+IEtsbw== 73641
+QmVoaW5k 73642
+IHNlYXJjaGFibGU= 73643
+X0JPVU5E 73644
+Uk9D 73645
+IHN0ZXJlb3R5cGU= 73646
+IHByZXBlbmQ= 73647
+aW50ZXJzZWN0aW9u 73648
+QmFza2V0 73649
+KGxv 73650
+IGZpbGVJbmZv 73651
+IFVJU2Nyb2xsVmlldw== 73652
+ZWNlc3NhcmlseQ== 73653
+IENoZXM= 73654
+LWluc3RhbmNl 73655
+IGFwcGFydA== 73656
+IEFtYXI= 73657
+IHJvd0RhdGE= 73658
+IGF5dWRh 73659
+IGNhcmF2YW4= 73660
+X3BpY2tsZQ== 73661
+IGNoYWluaW5n 73662
+KV07Cgo= 73663
+IGJveGVk 73664
+YWVwZXI= 73665
+IEVWRVI= 73666
+eW50aGVzaXM= 73667
+LWZhc3Q= 73668
+IOuwsA== 73669
+5Y+v5Lul 73670
+IHZvbHVudGVlcmVk 73671
+IGV4aWc= 73672
+U0lERQ== 73673
+IFBob25lTnVtYmVy 73674
+dWxhaXJl 73675
+IEthZA== 73676
+IGRhcm4= 73677
+IHlhaw== 73678
+IEJsaW5r 73679
+LnNwaW5uZXI= 73680
+IG9yZGVhbA== 73681
+X2VuZW15 73682
+IGdldFM= 73683
+IEJvbw== 73684
+TGluZU51bWJlcg== 73685
+X0xPT0s= 73686
+RUxDT01F 73687
+IHNlYW1z 73688
+IHNhZ2Vu 73689
+aXNjbG9zZWQ= 73690
+KHJheQ== 73691
+W2dyb3Vw 73692
+UFRT 73693
+Lk5hdmlnYXRl 73694
+IE93bA== 73695
+IGRidXM= 73696
+IGltcGF0aWVudA== 73697
+IEd1cHRh 73698
+KG9iamVjdHM= 73699
+IGFwcmls 73700
+LXF1 73701
+IG91dHJhcw== 73702
+IFRIRU0= 73703
+IEVNQw== 73704
+RW1wbGVhZG8= 73705
+IGdydWI= 73706
+SUFN 73707
+IHZlbm9t 73708
+IHRyYW5zY2VuZA== 73709
+IHZpY3RvcmlvdXM= 73710
+IE1heWVy 73711
+INGC0L7QstCw0YA= 73712
+IEtlbGxleQ== 73713
+SW5wdXRHcm91cA== 73714
+IHJlZmlsbA== 73715
+V2l0aFR5cGU= 73716
+IGNoYXVmZg== 73717
+b2xkZW0= 73718
+X3RpZA== 73719
+IGZsdXNoZWQ= 73720
+XHN5c3RlbQ== 73721
+LnJhbmRyYW5nZQ== 73722
+IFBPU0lUSU9O 73723
+IFRlbmFudA== 73724
+Y29udmVyc2lvbg== 73725
+Y2FsbGluZw== 73726
+KCkpKSwK 73727
+0L7QvdCw 73728
+IHNpZGV3YXlz 73729
+IGxheA== 73730
+CXJlcA== 73731
+YWVwZXJuaWNr 73732
+IG5lZ2Vy 73733
+IEZseWVycw== 73734
+ICJALw== 73735
+dXBha2Fu 73736
+X2VsYXBzZWQ= 73737
+dHViZQ== 73738
+UG9zWA== 73739
+LnNleA== 73740
+IGzDpHNzdA== 73741
+IEdyYXZl 73742
+5Y+C 73743
+KGVtcA== 73744
+KHN0cnRvbG93ZXI= 73745
+Y29udmVydGVy 73746
+IFNwb25zb3JlZA== 73747
+KHdvcmtlcg== 73748
+IG1hdHJpbW9u 73749
+Q29tbWlzc2lvbg== 73750
+KGh3 73751
+X1NJR05BVFVSRQ== 73752
+bWVr 73753
+IGFsZ3VuYXM= 73754
+X0VU 73755
+aXN0cmluZw== 73756
+THY= 73757
+U2xpZGVz 73758
+IHdlYWtTZWxm 73759
+IHdr 73760
+IFppZw== 73761
+IHB1YnM= 73762
+IEJSQQ== 73763
+IGZsdW9yZXNjZW50 73764
+Y2Fycnk= 73765
+LmVyYg== 73766
+IEluaQ== 73767
+LkRyYXdTdHJpbmc= 73768
+IFNFUA== 73769
+dXR0ZXJz 73770
+2ZE= 73771
+Um95YWw= 73772
+IGNhYmJhZ2U= 73773
+IFN1aw== 73774
+XT49 73775
+IEVkaXNvbg== 73776
+IHNwZWN1bGF0ZWQ= 73777
+LmRvd25jYXNl 73778
+IHRwaA== 73779
+IMOD 73780
+IGd1bnNob3Q= 73781
+cnBt 73782
+IGZsdXR0ZXI= 73783
+IGFueA== 73784
+YXplcw== 73785
+UU9iamVjdA== 73786
+IEZhdm9y 73787
+IG1vZHVsZU5hbWU= 73788
+JnM= 73789
+bGVo 73790
+LldlaWdodA== 73791
+IFdBTA== 73792
+X1ZBUlM= 73793
+IFdhc3Nlcg== 73794
+IG91dGJvdW5k 73795
+IGVyZm9sZ3Jl 73796
+LnZhbG9y 73797
+KGxpZ2h0 73798
+IE1hZ251cw== 73799
+IHpvZWs= 73800
+eWg= 73801
+IHN0eWxlc2hlZXQ= 73802
+Pm0= 73803
+V2hpdGVzcGFjZQ== 73804
+IFsnLw== 73805
+CVJlcXVlc3Q= 73806
+X2luY3JlYXNl 73807
+LWRpc3RhbmNl 73808
+aWNvbG9y 73809
+aGNp 73810
+IEtJTkc= 73811
+UFg= 73812
+b2ls 73813
+ZW1pbmc= 73814
+bmFtZW50cw== 73815
+RGVmaW5lcw== 73816
+IFstLQ== 73817
+IHZhcmlvcw== 73818
+IFBSRVNT 73819
+LGF4aXM= 73820
+IENvbGxpZGVy 73821
+KX0KCg== 73822
+IGZvcmNpYmx5 73823
+IHN0YWF0 73824
+X1NUQU5EQVJE 73825
+IG9jY3VsdA== 73826
+IGJhcHRpc20= 73827
+IEN1bm5pbmdoYW0= 73828
+X2J1aWx0aW4= 73829
+Q1BG 73830
+W21heG4= 73831
+IFJIUw== 73832
+IE9uZXM= 73833
+KF86 73834
+IGluc2VjdXJpdHk= 73835
+LnJlZ2lzdHJhdGlvbg== 73836
+aW1wbGlmaWVk 73837
+IFN5bXBvc2l1bQ== 73838
+aHJlYWQ= 73839
+IHF1ZWxsZQ== 73840
+IGZyZW56eQ== 73841
+Q2FsaWJyaQ== 73842
+IFNQRUVE 73843
+b3Vp 73844
+KCldLAo= 73845
+YWNjb3JkaW5n 73846
+IG1jYw== 73847
+IGFzaWF0 73848
+IGFkamFjZW5jeQ== 73849
+IEFibGU= 73850
+IHNhbGRv 73851
+bm9zdGk= 73852
+IGRpbWU= 73853
+ZXRyYXRpb24= 73854
+IE1vZGlmaWNhdGlvbg== 73855
+IEhlcmI= 73856
+IHBsYWF0cw== 73857
+IGludGVycGVyc29uYWw= 73858
+IO2ZleyduA== 73859
+YXJtZQ== 73860
+IGNvbWVyY2lhbA== 73861
+IEJhdGVz 73862
+KGNhcmRz 73863
+LmdldENsaWVudA== 73864
+Lk5PUk1BTA== 73865
+CVRlc3Q= 73866
+ICAgICAgICANCiAgICAgICAgDQo= 73867
+IFJhem9y 73868
+d2Vpcw== 73869
+SVRIVUI= 73870
+IEVOVElUWQ== 73871
+YWdpdA== 73872
+IG1pbmVjcmFmdA== 73873
+cHJvcG9zYWw= 73874
+IHNhbHR5 73875
+YW5kcg== 73876
+IENvbmNsdXNpb24= 73877
+IHBydWRlbnQ= 73878
+IFtA 73879
+IFB1cHBldA== 73880
+aWdvbg== 73881
+IEdvdGhhbQ== 73882
+IGNoZWVycw== 73883
+IFNoYXk= 73884
+IGpp 73885
+IEdESw== 73886
+ZXhwZXJ0 73887
+IGZ1bmt5 73888
+IFphbQ== 73889
+W05VTQ== 73890
+RGVxdWU= 73891
+X1RXTw== 73892
+XHZpZXdz 73893
+IHByb2pla3Q= 73894
+IGRyb3duZWQ= 73895
+a2lkcw== 73896
+LnNoZWV0 73897
+IG5vbmQ= 73898
+IGNvdXJ0ZQ== 73899
+IC4uLgoKCgo= 73900
+IHBpY3R1cmVzcXVl 73901
+IHR1YmluZw== 73902
+KCkuIg== 73903
+amV0cw== 73904
+X1B1YmxpYw== 73905
+IEZhcnI= 73906
+IEFyZA== 73907
+T1VSU0U= 73908
+IGthZGFy 73909
+IFByb2dyYW1t 73910
+LmtleXdvcmQ= 73911
+CSAgICAgICAgICAgICAgICA= 73912
+aWVkYWRlcw== 73913
+YXRvbG9neQ== 73914
+IER1bmQ= 73915
+PWNvdW50 73916
+IHNsb3dkb3du 73917
+LSIs 73918
+LkZvcmVncm91bmRDb2xvcg== 73919
+UnVucw== 73920
+LlR5cGVPZg== 73921
+JGN1cnJlbnQ= 73922
+IHVwc2NhbGU= 73923
+CXVuaW9u 73924
+KGNoaXA= 73925
+dW1pZGl0eQ== 73926
+PVtdDQo= 73927
+IGhhcnQ= 73928
+ICRfWw== 73929
+eW5lYw== 73930
+LlVzdWFyaW8= 73931
+IG9jdGF2ZQ== 73932
+IHBvcnRyYXlhbA== 73933
+INC90L7QvNC10YA= 73934
+IE9jY3VweQ== 73935
+X25hbg== 73936
+IFNtYXJ0cGhvbmU= 73937
+aGluZA== 73938
+IHdpbmRzaGllbGQ= 73939
+IGxvbmVsaW5lc3M= 73940
+L2NoYXJ0 73941
+IGFjdGl2YXRlcw== 73942
+LnJpYmJvbg== 73943
+IGxhZ2k= 73944
+IHBhcmFjaA== 73945
+SHlwZXI= 73946
+c2NhbGVk 73947
+VGVz 73948
+IEJlZXQ= 73949
+IGRpc3NlY3Q= 73950
+IENpYw== 73951
+IH0sCgoK 73952
+PigpCgo= 73953
+LnN0dWR5 73954
+IGNvbnRyYXN0aW5n 73955
+WkVSTw== 73956
+IHR1bmE= 73957
+IENob3c= 73958
+X3Zh 73959
+ZmF2b3I= 73960
+W0luZGV4 73961
+IFBvd2VyU2hlbGw= 73962
+KHByb3Rv 73963
+JykpOgo= 73964
+X2Zvcm1hdHRlcg== 73965
+Q2hyaXN0b3BoZXI= 73966
+T3JOdWxs 73967
+Q0lTSU9O 73968
+X2NvbnN1bWVy 73969
+UGFzdGU= 73970
+KG5vbWU= 73971
+ZW50b24= 73972
+IHVucmF2ZWw= 73973
+X2Rvbg== 73974
+IHBhcmVudGhlc2Vz 73975
+IE5VSVQ= 73976
+L10= 73977
+IOKIpw== 73978
+c3RhY2xlcw== 73979
+L2NvbW1lbnQ= 73980
+dXR0aW5n 73981
+IHNsb3BweQ== 73982
+KFt7 73983
+LnNhdg== 73984
+dG9Kc29u 73985
+IOu5hA== 73986
+IFByYXR0 73987
+Lm1vZGlmeQ== 73988
+LklzQ2hlY2tlZA== 73989
+IHZlbmV6 73990
+IFNFVFRJTkdT 73991
+amF3 73992
+IGZpcmVzdG9yZQ== 73993
+IGNvbnNvcnRpdW0= 73994
+IGthYg== 73995
+IFN1cHBvcnRpbmc= 73996
+IFRoZXNpcw== 73997
+IG5vbmxpbmVhcg== 73998
+IHRleHRib3g= 73999
+LiIiIg== 74000
+IEVuZXJn 74001
+LkpPcHRpb25QYW5l 74002
+IGludGVycnVwdGlvbg== 74003
+w6h0cmVz 74004
+IHNoYWxl 74005
+IFBsYXllZA== 74006
+IHNvY2lhbGU= 74007
+WUdPTg== 74008
+X0JBVENI 74009
+IHRyaW1lc3Q= 74010
+IFByb2NlZHVyZXM= 74011
+IGF0dGVuZHM= 74012
+IiR7 74013
+ZXZhbHVhdGlvbg== 74014
+LlByb2dyZXNzQmFy 74015
+IEFsZXhhbmRyYQ== 74016
+Y2jDqQ== 74017
+X1NFUVVFTkNF 74018
+IGNyb2NoZXQ= 74019
+Um9z 74020
+IGlobmVu 74021
+ICIqKio= 74022
+IGFyb3Vz 74023
+IG1vZHVsdXM= 74024
+X0xJTlVY 74025
+U3RhY2tTaXpl 74026
+aWF0aW9uRXhjZXB0aW9u 74027
+Lk11dGFibGU= 74028
+IClb 74029
+IHBpaQ== 74030
+Zmlmbw== 74031
+X1BJQ0s= 74032
+UHVycG9zZQ== 74033
+KFN0dWRlbnQ= 74034
+IE5pY28= 74035
+ZXN6 74036
+L3Nt 74037
+IFBQUA== 74038
+W2lucHV0 74039
+5Y+Y 74040
+IGJsYXN0cw== 74041
+IE11dHVhbA== 74042
+cm9sbGV5 74043
+IHV0aWxpc2Vy 74044
+OlRoZQ== 74045
+5Z+6 74046
+LmRlY29kZXI= 74047
+IG9iamV0b3M= 74048
+IGF3YWtlbmluZw== 74049
+IEVubGlnaHQ= 74050
+CWFsaWdu 74051
+X3Jld3JpdGU= 74052
+L2N1cnJlbnQ= 74053
+IGRhcmF1Zg== 74054
+Q2FudGlkYWQ= 74055
+LG5w 74056
+IHZlbG9jaXRpZXM= 74057
+Q0xS 74058
+IG1pc2luZm9ybWF0aW9u 74059
+IHN0cmVhbWxpbmVk 74060
+IGdyb29taW5n 74061
+IGF6aQ== 74062
+b2xn 74063
+IGNvbnN0aXR1ZW50 74064
+IHdlZQ== 74065
+0YXQvtC00LjQvA== 74066
+IEFsb25zbw== 74067
+aWV0Zg== 74068
+Y3Rlcg== 74069
+IHRoZXJtb3N0YXQ= 74070
+KEND 74071
+IHN0YWNraW5n 74072
+X2NvbnZlcnRlcg== 74073
+IERpc25leWxhbmQ= 74074
+CWZpbGVz 74075
+SUNJ 74076
+X1RPUElD 74077
+CUVsZW1lbnQ= 74078
+YXJnYXM= 74079
+IFxA 74080
+YW5jb2Nr 74081
+IEJhc2VFbnRpdHk= 74082
+KCItLS0= 74083
+cmJyYWtr 74084
+IG5lZ2F0aXZlcw== 74085
+IHZ3 74086
+PWZvcGVu 74087
+Y2hlbWlzdA== 74088
+QXJjaGl2bw== 74089
+IGAu 74090
+IEZPVVI= 74091
+KGFp 74092
+VGFibGVXaWRnZXRJdGVt 74093
+PD8+Pg== 74094
+LnByZWQ= 74095
+VHJhaWw= 74096
+LWZhY3Rvcg== 74097
+IEltYWdlQnV0dG9u 74098
+cGVyaWE= 74099
+IENlbGVicmF0aW9u 74100
+LlJlc3BvbnNlQm9keQ== 74101
+dXJjaGFzZXM= 74102
+IGdldEtleQ== 74103
+IENyYWI= 74104
+IHFp 74105
+IFdpY2s= 74106
+IGNoYXN0 74107
+IC4uLi4uLg== 74108
+IGNvbWVueg== 74109
+IHNoYXJkcw== 74110
+IGTDqWNvcg== 74111
+IGhhbHZlcw== 74112
+UVVFTkNZ 74113
+IHBvd2VyaG91c2U= 74114
+TElORw== 74115
+Q2xhc3NMb2FkZXI= 74116
+Y2VudHJl 74117
+LXNlbmQ= 74118
+bWFo 74119
+IHNocmVkZGVk 74120
+IFRJRkY= 74121
+aW5rYQ== 74122
+LgoKCgoK 74123
+IGRlc2lnbmF0ZQ== 74124
+IE5pZ2h0bWFyZQ== 74125
+IEdlbmV0aWM= 74126
+X2NoYW5jZQ== 74127
+KGFuaW1hdGlvbg== 74128
+cXVpbGE= 74129
+X3NwZWNpZXM= 74130
+TkVZ 74131
+b3lzdGljaw== 74132
+cmVsbG8= 74133
+zqw= 74134
+IGRpdmlzaXZl 74135
+IFJFQw== 74136
+IHN0dW1ibGU= 74137
+KGZha2U= 74138
+IExhY2U= 74139
+YW50YWdlZA== 74140
+YWtlc3Q= 74141
+cHJvbW90aW9u 74142
+IEZvd2xlcg== 74143
+PWNlbnRlcg== 74144
+IENpdWRhZA== 74145
+UmFkaQ== 74146
+IFNsZWVwaW5n 74147
+dXRyb24= 74148
+IHF1b2k= 74149
+IFJBRA== 74150
+IGV4cG9uZW50aWFsbHk= 74151
+IEJyZWVk 74152
+IG1vbm9wb2w= 74153
+aGlnaGVzdA== 74154
+eG1sbnM= 74155
+SW50UHRy 74156
+IHR1dHRl 74157
+IFJlZnJpZ2Vy 74158
+IOmhtemdog== 74159
+IHpvbmRlcg== 74160
+bGJyYWtr 74161
+O2VsZW1lbnQ= 74162
+IEhlZA== 74163
+UmVsYXRpb25z 74164
+64U= 74165
+Q29ycmVv 74166
+5aC0 74167
+IE1pZ2h0eQ== 74168
+QU5HTw== 74169
+X2NvbXBpbGU= 74170
+LmdldENtcA== 74171
+IGludmFkZQ== 74172
+LnNwcmluZ2Jvb3Q= 74173
+IFR1bmU= 74174
+X3NuYXA= 74175
+X0ZFRUQ= 74176
+IGRlY2lwaGVy 74177
+PXNpemU= 74178
+X2ZyZQ== 74179
+IFRpbGxlcnNvbg== 74180
+0LjQutCw 74181
+dGlnaHQ= 74182
+IGN1bHByaXQ= 74183
+UlRM 74184
+IFBhcmU= 74185
+KHB1Yg== 74186
+ZWdvdg== 74187
+IHBvbnRv 74188
+IGNvbnN1bA== 74189
+SlNJbXBvcnQ= 74190
+IHZlcndlbmRldA== 74191
+IEJvb3N0ZXI= 74192
+5b6F 74193
+IGNhcnJvdA== 74194
+dmVyaWdl 74195
+KExQ 74196
+IHd4VA== 74197
+IGltcHJvcGVybHk= 74198
+Iik6DQo= 74199
+IHN1Y2U= 74200
+L21vZGFs 74201
+IElDVA== 74202
+LikuCgo= 74203
+X21hcmtz 74204
+IENhY2hlZA== 74205
+IEN1cnJpY3VsdW0= 74206
+QnM= 74207
+CUpPcHRpb25QYW5l 74208
+m4Q= 74209
+IGNvZ25pdGlvbg== 74210
+IE5lZ290 74211
+PXJlc3VsdA== 74212
+X0ZvbnQ= 74213
+YXJpbmU= 74214
+IGNvbnNwaWM= 74215
+IENhbGN1bGF0aW9u 74216
+IENFT3M= 74217
+LXRyYW5zcGFyZW50 74218
+IEJlcmVpY2g= 74219
+56iL5bqP 74220
+Lmh5 74221
+LkFsaWdu 74222
+IGhvcGVsZXNz 74223
+IGNvbG9tYg== 74224
+dXJiZWQ= 74225
+IFNBWA== 74226
+IGVpbno= 74227
+KHpvbmU= 74228
+IG11enpsZQ== 74229
+IHRyZXNwYXNz 74230
+IEFicmFtcw== 74231
+IGNvbXDDqXQ= 74232
+IFNhbmN0dWFyeQ== 74233
+IE5TVGV4dEFsaWdubWVudA== 74234
+IHN0YXY= 74235
+IHByYWdtYXRpYw== 74236
+c3RyZW5ndGg= 74237
+V2l0aE9wdGlvbnM= 74238
+LmJhbmQ= 74239
+YXBoYWVs 74240
+QXVzdHJhbGlhbg== 74241
+IE9TRXJyb3I= 74242
+TWFuY2hlc3Rlcg== 74243
+SWRl 74244
+XFJlc291cmNl 74245
+0L7QtNC10YDQtg== 74246
+IHppZQ== 74247
+SGFybmVzcw== 74248
+LlR3ZWVu 74249
+Y2Ftcw== 74250
+4pyU 74251
+LXNjYWxhYmxl 74252
+LW9r 74253
+IGpsb25n 74254
+IE9sc29u 74255
+IE9ha3M= 74256
+LnNsaW0= 74257
+IHPFgg== 74258
+IG5ld09iag== 74259
+LkludmVudG9yeQ== 74260
+IGtlbm4= 74261
+IG5pZ2h0bWFyZXM= 74262
+aXJjbGVz 74263
+Lm50 74264
+Z3Jlbg== 74265
+IFRFTg== 74266
+IFNjb3Rz 74267
+IERpc2FiaWxpdHk= 74268
+X21hbmlmZXN0 74269
+LnNpZGViYXI= 74270
+IHNodWZmbGVk 74271
+IGh1bWlsaXR5 74272
+LnRhcA== 74273
+IEdyYWlu 74274
+bm90aWNlZA== 74275
+77yJ44CC 74276
+X2hwcA== 74277
+IGRpbGF0aW9u 74278
+IGhhbmRpY2Fw 74279
+Z2V0RGF0ZQ== 74280
+IGR6aWHFgg== 74281
+JykuJzwv 74282
+cmVjb3Zlcg== 74283
+eXNp 74284
+KGdyYXk= 74285
+YWhrYW4= 74286
+IGludGVyZmVyaW5n 74287
+X1RPVUNI 74288
+X3JlZHVjdGlvbg== 74289
+QWx0ZXI= 74290
+IGN1Yw== 74291
+RXhwZXJ0 74292
+IEx1bXA= 74293
+Wzpd 74294
+IHJlbG9j 74295
+IGNvbmR1Yw== 74296
+Q2hhcnNldHM= 74297
+Lmxpc3RlbmVycw== 74298
+LWludmVyc2U= 74299
+IHN1bW1vbnM= 74300
+IMO6bmljbw== 74301
+IE9W 74302
+IFNpY2hlcg== 74303
+IEpGYWN0b3J5 74304
+LmdldEJvdW5kaW5nQ2xpZW50UmVjdA== 74305
+amg= 74306
+IHNrZWxldG9ucw== 74307
+IEFzaWFucw== 74308
+IEFNQw== 74309
+aXNlbGVjdA== 74310
+LmNsaWVudEhlaWdodA== 74311
+KGZy 74312
+SGFzRm9yZWlnbktleQ== 74313
+LnJlbGF0aXZl 74314
+INiu 74315
+IG11bHRpY3VsdHVyYWw= 74316
+X0NPTEw= 74317
+IG1pY3JvYmlhbA== 74318
+IGltcG9ydGFudGVz 74319
+U3BhaW4= 74320
+IGN5bGluZGVycw== 74321
+aWVuaWU= 74322
+X09XTkVS 74323
+KERJUw== 74324
+IGZhbmRvbQ== 74325
+KG54 74326
+IGFwbGljYWNpw7Nu 74327
+b2NhdG9y 74328
+ZXNzaWFu 74329
+IENsYXVkZQ== 74330
+IGludG9sZXJhbmNl 74331
+xYJlbQ== 74332
+IFNlbWFudGlj 74333
+Lk1pZGRsZVJpZ2h0 74334
+QVJFU1Q= 74335
+IHNpZXZl 74336
+xLHEn8Sx 74337
+aWNhYmxl 74338
+ZXJnaWM= 74339
+IGJhdHRsZWQ= 74340
+b3JiaXQ= 74341
+KXx8KA== 74342
+dWVsZQ== 74343
+IGZhc2NpbmF0aW9u 74344
+IGTDpQ== 74345
+IFRpZ2h0 74346
+X0lOQ1JFRg== 74347
+LklzU3VjY2Vzcw== 74348
+LE8= 74349
+IHN0w7hy 74350
+IHByZXNzdXJlZA== 74351
+LlRSVUU= 74352
+IFRob3VzYW5k 74353
+IGdlbWVpbnM= 74354
+IHpi 74355
+IHNwaXJpdHVhbGl0eQ== 74356
+IFpldXM= 74357
+IFBvd2VyZnVs 74358
+YmF0dGVyeQ== 74359
+aXN0ZXM= 74360
+IO2D 74361
+LnNoaXJv 74362
+IEhpcHA= 74363
+ZGVjbHR5cGU= 74364
+LmpmYWNl 74365
+LnRlbXBlcmF0dXJl 74366
+IG1hcnF1ZQ== 74367
+X2JhZw== 74368
+QXR1YWw= 74369
+cHJpY2luZw== 74370
+Q2xlYXJseQ== 74371
+X0Fic3RyYWN0 74372
+w6lr 74373
+YWhydW5nZW4= 74374
+SW5zdHI= 74375
+CQoKCg== 74376
+IGNoZXdpbmc= 74377
+IENvYWNoaW5n 74378
+JExBTkc= 74379
+bWFsbG93 74380
+IHNlcmlvdXNuZXNz 74381
+X2N1dG9mZg== 74382
+IFF1YXJ0ZXJseQ== 74383
+fScpCgo= 74384
+IikpKTsKCg== 74385
+6KeE 74386
+LlBvc2l0aXZl 74387
+LXBv 74388
+eGl0bw== 74389
+LlJhZA== 74390
+IGJyaXNr 74391
+IExpZmVjeWNsZQ== 74392
+5pWw5o2u5bqT 74393
+ZmF0YWw= 74394
+IHhwb3M= 74395
+LkRldGFpbA== 74396
+ZW5hbA== 74397
+TUFUQ0g= 74398
+IGhlZWQ= 74399
+IGFmcmljYW4= 74400
+RGFkb3M= 74401
+YmVyYXBh 74402
+IGhlbGY= 74403
+JywnJyw= 74404
+IGVudHJlcHJlbmV1cnNoaXA= 74405
+IGNlcnRz 74406
+ZWNl 74407
+PnI= 74408
+X2ZpeHR1cmU= 74409
+IHBvb2xpbmc= 74410
+IG1vZ2VsaWpr 74411
+IHNldERhdGU= 74412
+5pS/ 74413
+LWNvbXBsZXRl 74414
+X1JBRElP 74415
+IGt1bA== 74416
+IGdvYg== 74417
+X1NMQVZF 74418
+IGZ1cnJ5 74419
+IE5VSVRLQQ== 74420
+SUxJVElFUw== 74421
+IG5vY2hl 74422
+IGN1ZmY= 74423
+IGNvbnRlc3RhbnRz 74424
+IFdW 74425
+IHBhc3Nwb3J0cw== 74426
+IMWC 74427
+IE5haWw= 74428
+X2RlY2ltYWw= 74429
+YXN0bGU= 74430
+IFNvbGRpZXJz 74431
+UmVjaXBpZW50 74432
+IGNvdXJzZXdvcms= 74433
+IGltZQ== 74434
+IFNlYXRz 74435
+X0RM 74436
+IGNvbnN1bHRhdGlvbnM= 74437
+X0FEVg== 74438
+IElrZWE= 74439
+IG9maWNpYWw= 74440
+IHJlZ2ltZW50 74441
+IEJhdGhz 74442
+LXBpbg== 74443
+X0JVQ0tFVA== 74444
+QUJDREVGR0hJSktMTU5PUA== 74445
+Il0pKTsK 74446
+PE1lc2g= 74447
+Iix7 74448
+IGRlcml2ZXM= 74449
+4oCcRm9y 74450
+IFl1Z29zbA== 74451
+aXNFbmFibGVk 74452
+IHNvbGx0ZW4= 74453
+IHBldGl0aW9ucw== 74454
+b3ZlcmFsbA== 74455
+IGdldFRvdGFs 74456
+X0hJTlQ= 74457
+TWludXM= 74458
+IGFub21hbGllcw== 74459
+IFBpY2t1cA== 74460
+PT09Jw== 74461
+bGVpdHVuZw== 74462
+IERlaw== 74463
+WVNJUw== 74464
+LnNlc3Npb25z 74465
+IGNhcmM= 74466
+X0l0ZW1z 74467
+IGludGVybWl0dGVudA== 74468
+Lkpzb25Qcm9wZXJ0eQ== 74469
+IG1NYXA= 74470
+IEthaw== 74471
+YWluY29udHJp 74472
+X3NlZWs= 74473
+IHVuYW1l 74474
+X3B1dHN0cg== 74475
+RmQ= 74476
+TGltaXRlZA== 74477
+c25vdw== 74478
+IFBhdmlsaW9u 74479
+IEV4YWN0 74480
+IHBvc3Rpbmdz 74481
+CWRpc3Q= 74482
+PHN0ZGxpYg== 74483
+TGlnaHRz 74484
+IGZpbHRybw== 74485
+V29ya2Vycw== 74486
+IHN5c2xvZw== 74487
+R2lybHM= 74488
+IEd1bQ== 74489
+X3llYXJz 74490
+J319Cg== 74491
+IGjDpHQ= 74492
+Z2F5 74493
+KHByb2I= 74494
+ZWxsYXM= 74495
+IHdpbHQ= 74496
+Lm9wdGltaXpl 74497
+X0RVTVA= 74498
+KFhNTA== 74499
+IERYR0k= 74500
+IG3DqXRo 74501
+SVRJWkU= 74502
+ZWxlY3Ryb24= 74503
+LmN6 74504
+IHN1YnNldHM= 74505
+IHJlc3Bvc3Rh 74506
+IGJlYWQ= 74507
+wrsu 74508
+IE9TQw== 74509
+JnBhZ2U= 74510
+Z3Bz 74511
+YW5pYW4= 74512
+UHVycGxl 74513
+IGFjcm9ueW0= 74514
+Uk9XTg== 74515
+QXVkaXQ= 74516
+IGNvdXJpZXI= 74517
+YWxpZQ== 74518
+IFdhc3M= 74519
+IGF1ZGl0cw== 74520
+IFBPVg== 74521
+IEZhY2lhbA== 74522
+X3N0cmNtcA== 74523
+ICsl 74524
+ICAgICAKCg== 74525
+YCk7Cgo= 74526
+RUhJQ0xF 74527
+WyJA 74528
+LW5hdGlvbmFs 74529
+6ZuF6buR 74530
+6L2v6ZuF6buR 74531
+X2NvZGlnbw== 74532
+IHVucXVlc3Rpb24= 74533
+aWxtaW5ndG9u 74534
+cmVxdWVzdENvZGU= 74535
+IElX 74536
+LnN0cmF0ZWd5 74537
+IFNZTUJPTA== 74538
+IGdyw7bDnw== 74539
+X2JlaGF2aW9y 74540
+IHJlZnJlc2hUb2tlbg== 74541
+IG1vbmc= 74542
+aW1lbnRhcnk= 74543
+IFNob3Bz 74544
+KCc/ 74545
+X2hpZ2hsaWdodA== 74546
+X2xleA== 74547
+IGlsbHVtaW5hdGVk 74548
+IHBhbHA= 74549
+LWluc2VydA== 74550
+IHN0cml2ZXM= 74551
+IGZvcnRz 74552
+IGVtYm9kaW1lbnRz 74553
+bXBqZXM= 74554
+X1RPTw== 74555
+IGRyYWdnYWJsZQ== 74556
+IGltbWVyc2lvbg== 74557
+cGlucw== 74558
+IFJlZ2lzdHI= 74559
+IEZyZWVCU0Q= 74560
+X3hsaW0= 74561
+IFR1bHNh 74562
+U25hY2tiYXI= 74563
+L2RhdGU= 74564
+IGRhdm9u 74565
+IGF1dG9yZWxlYXNl 74566
+IHZhY2F0aW9ucw== 74567
+CQkgCQ== 74568
+aWNlcHM= 74569
+IFJhbXA= 74570
+IEN5bnRoaWE= 74571
+X3BvcHVsYXRpb24= 74572
+JCQk 74573
+IFRBUg== 74574
+ZW5nYQ== 74575
+IHB1cw== 74576
+IOW5 74577
+IHRpbWVzdGVw 74578
+TGlmZXRpbWU= 74579
+IGZpbG1lcg== 74580
+WVNU 74581
+IEdhemV0dGU= 74582
+IG91dHNpZGVy 74583
+IEVYUE9SVA== 74584
+R09SSVRITQ== 74585
+LmZsZXg= 74586
+IFJvb3Rz 74587
+KHBpeGVs 74588
+emN6ZQ== 74589
+YWlyaWU= 74590
+IG92ZXJsb2FkZWQ= 74591
+U1RSQUNU 74592
+IENvdXJpZXI= 74593
+44GW 74594
+Y29udGluZW50 74595
+RnJlZA== 74596
+IHNlbXA= 74597
+IFN0ZWxsYQ== 74598
+IGRvdWJ0ZnVs 74599
+YWRtaW5z 74600
+IG9wdGluZw== 74601
+TE9UUw== 74602
+IG1hbmlmZXN0bw== 74603
+LWZvbGRlcg== 74604
+X2Ryb3BvdXQ= 74605
+dXR1cmVz 74606
+w612ZWlz 74607
+YWNoaWV2ZW1lbnQ= 74608
+IGNveQ== 74609
+ZmFpdGg= 74610
+X0hBTEY= 74611
+aXJlY3RlZA== 74612
+IGNvbnRhdG8= 74613
+U2VtYXBob3Jl 74614
+UHNp 74615
+IHZpdGFsaXR5 74616
+IEZsYXRCdXR0b24= 74617
+SXRlbVR5cGU= 74618
+IGltcGVjYw== 74619
+IGJ1b3k= 74620
+dWlu 74621
+IHNreXJvY2tldA== 74622
+IFNsYXllcg== 74623
+IFJDTVA= 74624
+IFNldmVudGg= 74625
+X0ludGVyZmFjZQ== 74626
+IGZpZXJj 74627
+c3RhdGlvbnM= 74628
+IEdyYWY= 74629
+bGljZWQ= 74630
+IGVudW1lcmF0b3I= 74631
+Q29udGFpbmVycw== 74632
+IG9p 74633
+w4fDg08= 74634
+LXRvbg== 74635
+UkVQ 74636
+KGZsb3c= 74637
+LmNvb3Jk 74638
+R2Fi 74639
+IE1vcnBo 74640
+IFpvZQ== 74641
+IGhhcmJvdXI= 74642
+Lm1lc3NhZ2luZw== 74643
+X29wdGlvbmFs 74644
+IEJhc2VBY3Rpdml0eQ== 74645
+cmVzZW50ZXI= 74646
+IG5ieXRlcw== 74647
+IGNvdXJhZ2VvdXM= 74648
+PSE= 74649
+J0l0 74650
+IGZvcnM= 74651
+IGNvcnJpZG9ycw== 74652
+IEJFRU4= 74653
+IGZ1c2Vk 74654
+PWltYWdl 74655
+LkdyaWRWaWV3 74656
+IHNlbWVu 74657
+aWdyb3Vw 74658
+dXB0aW1l 74659
+IFhC 74660
+5o6S5bqP 74661
+IGludGVncmF0ZXM= 74662
+X09D 74663
+IGJhaWxvdXQ= 74664
+IHRlc3Rl 74665
+IG9jdXA= 74666
+YXVsZWQ= 74667
+X29kZA== 74668
+cGdh 74669
+IEFTVVM= 74670
+IFRTUg== 74671
+IG9jY3VwYW50cw== 74672
+U2V0VGl0bGU= 74673
+U2NoZWR1bGVycw== 74674
+IGJla29tbWVu 74675
+QnJpZ2h0 74676
+IE1haW5Gb3Jt 74677
+Xygn 74678
+RnJvbUFycmF5 74679
+IGluZGljYQ== 74680
+SEFORA== 74681
+T3JkZW4= 74682
+IFRlbXBlcg== 74683
+LnN0YXR1c1RleHQ= 74684
+cG9saXRpY2Fs 74685
+IFBlcmN5 74686
+44CCCgoKCgoK 74687
+LnNldFg= 74688
+Z2V0TGlzdA== 74689
+aG9sZXM= 74690
+UGl4 74691
+IG91dHNvdXJjaW5n 74692
+IG1lc3NhZ2VJZA== 74693
+IGdldFNlc3Npb24= 74694
+IFZJUg== 74695
+T2ZGaWxl 74696
+IFNwYXRpYWw= 74697
+LkZsb2F0RmllbGQ= 74698
+KShfXw== 74699
+IFN3aW1taW5n 74700
+QUNMRQ== 74701
+IHNlbnRpcg== 74702
+IHBsdW5nZWQ= 74703
+IGF1am91cmQ= 74704
+Z3VuYWthbg== 74705
+KHZvbHVtZQ== 74706
+IGNyYXRlcg== 74707
+Lnhscw== 74708
+woDCmQ== 74709
+UmVuZGVyV2luZG93 74710
+LnVzZXJtb2RlbA== 74711
+IGZ1bmN0b3I= 74712
+RG9tYWlucw== 74713
+aW50ZXJwcmU= 74714
+IGFibm9ybWFsaXRpZXM= 74715
+YXJnaW5n 74716
+RGVtb2NyYXRz 74717
+IHBhbG1z 74718
+4qCA 74719
+w7hk 74720
+KkE= 74721
+RnJvbURhdGU= 74722
+fFs= 74723
+IEFsdGVybmF0ZQ== 74724
+IHB1ZG8= 74725
+IGNvbmRlbnNlZA== 74726
+KHBsYW4= 74727
+ZGVsaXZlcg== 74728
+IGJ1bGxldGlu 74729
+J11dLA== 74730
+IGNyw6llcg== 74731
+LWlw 74732
+V3M= 74733
+IiIiLAo= 74734
+IGlrZWE= 74735
+IHZpc2l0ZQ== 74736
+IG11bHRpcw== 74737
+UmVzdWx0YWRv 74738
+IFBob3RvZ3JhcGhlcg== 74739
+Li4uJywK 74740
+IG1pZ2xpb3Jp 74741
+IFRocmVhZHM= 74742
+Z2V0U3R5bGU= 74743
+ZXJhw6fDo28= 74744
+PFRTb3VyY2U= 74745
+IEdpbmc= 74746
+J10iLA== 74747
+IHNpZ25hbGVk 74748
+U3VwcHJlc3NMaW50 74749
+IGR3b3Jk 74750
+IEh1bnRpbmd0b24= 74751
+IEFBUA== 74752
+QU5HTEVT 74753
+LmNyZWRlbnRpYWxz 74754
+c3dhZ2dlcg== 74755
+LWNvbnNvbGU= 74756
+Ii0t 74757
+LlRleHRJbnB1dA== 74758
+IE5PUlRI 74759
+IG5pZ2h0bHk= 74760
+LkZPTlQ= 74761
+IHF1b3RpZW50 74762
+5Lmf 74763
+IHNjaMO2bg== 74764
+IFBsYW5uZXI= 74765
+IHJlYWRsaW5l 74766
+IGNvbmZyb250aW5n 74767
+YH0= 74768
+SXRlbUNvdW50 74769
+CWFjdGl2ZQ== 74770
+IHLDqXBvbmQ= 74771
+ZWxtZXQ= 74772
+IGdpbW0= 74773
+LG5vbmF0b21pYw== 74774
+IEFDVElWRQ== 74775
+aGV1cmU= 74776
+L1ByaXZhdGU= 74777
+IG1lYw== 74778
+LlNlY3JldA== 74779
+IENJUw== 74780
+xYJ1Zw== 74781
+KHBlcmlvZA== 74782
+IGxsZWdhcg== 74783
+dXJpYQ== 74784
+RGVzY3JpYmU= 74785
+IHBhcmVqYQ== 74786
+IFZlZA== 74787
+LWVmZmVjdHM= 74788
+IFBhcnNpbmc= 74789
+LXJlc291cmNl 74790
+IGFiYQ== 74791
+ICosCg== 74792
+IGFuYXRvbQ== 74793
+ICgqKSg= 74794
+LXJlYWw= 74795
+IFZlbnR1cmVz 74796
+IFNoaWVsZHM= 74797
+IFVuaXZlcnNpdGllcw== 74798
+UFJFU0VOVA== 74799
+IFFMYXRpbg== 74800
+xaU= 74801
+IFdpbGV5 74802
+QWFyb24= 74803
+IHJhY2lhbGx5 74804
+IE5hZHU= 74805
+IGh0dHBSZXNwb25zZQ== 74806
+w610aWNh 74807
+IOuwqQ== 74808
+IGdyw6F0aXM= 74809
+5LuL 74810
+b21hcA== 74811
+IGFub24= 74812
+CXBvcA== 74813
+YXZhdGFycw== 74814
+IHN1YnBhcmFncmFwaA== 74815
+ZHpp 74816
+UHJvamVjdGlsZQ== 74817
+RFRW 74818
+bGlzdGVuaW5n 74819
+X3JlZ2VuZXJhdGlvbg== 74820
+IFNoZWx0ZXI= 74821
+PFZlcnRleA== 74822
+L21k 74823
+KGxl 74824
+IHZhaw== 74825
+c2VsZWN0ZWRJbmRleA== 74826
+X10= 74827
+IFN5bnRoZXRpYw== 74828
+YXBwSWQ= 74829
+IEZpcmVk 74830
+IHBhbXBo 74831
+X2xhdGVuY3k= 74832
+aW5maWxl 74833
+KGNyaXRlcmlh 74834
+c2VyaWFsaXphdGlvbg== 74835
+UkNU 74836
+CWV2 74837
+IFNDSA== 74838
+IE9wdGljYWw= 74839
+IHN0aXJyZWQ= 74840
+IFBvdGlvbg== 74841
+ZXRoaWNhbA== 74842
+Ojp7Cg== 74843
+IFBlbmd1aW5z 74844
+UEhZ 74845
+RGVjaXNpb24= 74846
+a2FydA== 74847
+IGV4cG9ydGVycw== 74848
+IFBvbHllc3Rlcg== 74849
+Y29udHJlcw== 74850
+IExhd3Nvbg== 74851
+IEVtcGxveWVy 74852
+IHNhc3M= 74853
+IGRvd250aW1l 74854
+IGJyb2tlcmFnZQ== 74855
+IFJvdGFyeQ== 74856
+IFdhaGw= 74857
+V0FSTg== 74858
+IHNldEFjdGl2ZQ== 74859
+dGVtcGw= 74860
+Q2hlZXJz 74861
+LXNoZWxs 74862
+Rml0bmVzcw== 74863
+IHF1aWw= 74864
+IGNsZWFuZXJz 74865
+IOeb 74866
+IE1pbGFubw== 74867
+LWFzc29jaWF0ZWQ= 74868
+fX19LAo= 74869
+UEZO 74870
+IG9uUGFnZQ== 74871
+X3N0cmVhbXM= 74872
+IHNjdWxwdHVyZXM= 74873
+IG5haWxlZA== 74874
+PXNj 74875
+6aaW6aG1 74876
+0LjQvNCy 74877
+Y29ubmV4aW9u 74878
+Sk9C 74879
+IEthcm1h 74880
+IFN3aWZ0VUk= 74881
+IERleg== 74882
+L1VJ 74883
+IOyZ 74884
+Z2V0Q2xpZW50T3JpZ2luYWw= 74885
+IHB1bmlzaGluZw== 74886
+IG9kZW5zZQ== 74887
+LHJpZ2h0 74888
+ZW5lcmF0aXZl 74889
+IFByb2JsZQ== 74890
+IEFwcFN0YXRl 74891
+IGRpc2Nsb3N1cmVz 74892
+IENhbnRlcg== 74893
+Y29tcG9zZXI= 74894
+dXBhdGVu 74895
+IHN1Y2Nlc3NvcnM= 74896
+Ij4nCg== 74897
+IHByZXNlcnZlcw== 74898
+Lm9wZW5k 74899
+X05vcm1hbA== 74900
+L2hy 74901
+UmFuZ2Vz 74902
+LGxvbmc= 74903
+CQkJCSAgICAgICAgICAg 74904
+cHJvZHVjdG9z 74905
+IGZseWVy 74906
+IEdydXBv 74907
+Tmlja25hbWU= 74908
+SGllcg== 74909
+IERFQQ== 74910
+U3ByaXRlcw== 74911
+CW1hc2s= 74912
+X3Jlc2VydmVk 74913
+LXNob3A= 74914
+Lm5vdGlmaWNhdGlvbnM= 74915
+IGRpdmlzaWJsZQ== 74916
+aW9zaw== 74917
+a2VyamE= 74918
+aW5ndA== 74919
+IEZpZnR5 74920
+IGFjY291bnRhbnQ= 74921
+IEV4cGxvcmF0aW9u 74922
+X2Jyb2FkY2FzdA== 74923
+IGV4dHJhb3JkaW5hcmlseQ== 74924
+IGtvdA== 74925
+IGNpcmN1bWZlcmVuY2U= 74926
+cm91Y2g= 74927
+W0Jvb2xlYW4= 74928
+Y3Jhd2xlcg== 74929
+L3JlbW92ZQ== 74930
+YXJlbGxh 74931
+IHNleGVz 74932
+SGludHM= 74933
+IGdhbWI= 74934
+IGRhcmVk 74935
+dGVzdGVk 74936
+X0tFRVA= 74937
+IGZpbHRyYXRpb24= 74938
+aWNrZXk= 74939
+IEluZmx1ZW5jZQ== 74940
+IHNwZWNpZmljaXR5 74941
+X0lEUw== 74942
+IFJvZG5leQ== 74943
+X0lSUUhhbmRsZXI= 74944
+T25FcnJvcg== 74945
+IHByZXZTdGF0ZQ== 74946
+aWVnZWw= 74947
+IExFU1M= 74948
+IGF3YWtlRnJvbU5pYg== 74949
+IExV 74950
+dW1hYmx5 74951
+b3J0YWxpdHk= 74952
+IG1hbmRhdGVz 74953
+CXZlcnNpb24= 74954
+IHBhcmVudE5vZGU= 74955
+IHBlc3Rz 74956
+IGNhc2M= 74957
+Y2VwdGFy 74958
+IFdvb2R5 74959
+ZXJlZQ== 74960
+X3Bm 74961
+LlBPUw== 74962
+aXN0cmE= 74963
+bGV3 74964
+WWFuZw== 74965
+IHN5c3RlbWQ= 74966
+IHJvYW0= 74967
+LkdyYXk= 74968
+IGNvbmR1 74969
+4oCUaW5jbHVkaW5n 74970
+VmlvbGF0aW9u 74971
+TWFob24= 74972
+IE1VU0lD 74973
+IFNpcmk= 74974
+IEVudGVyZWQ= 74975
+IGNlcnRhaW5z 74976
+ZWxhaA== 74977
+CU1haW4= 74978
+LkRhdGVGaWVsZA== 74979
+LkhlYWx0aA== 74980
+IEthc2ljaA== 74981
+IGNhbmluZQ== 74982
+PXJvb3Q= 74983
+dWRkbGU= 74984
+XGNvbW1vbg== 74985
+IFN1bHRhbg== 74986
+ZmluYW5jaWFs 74987
+IFFTcWw= 74988
+IGFzY2VudA== 74989
+IHBydWViYQ== 74990
+emllaHVuZw== 74991
+LmdldEVycm9y 74992
+IEdsb3JpYQ== 74993
+RWNobw== 74994
+X0NIT0lDRVM= 74995
+X2Vwcw== 74996
+L3Byb3ZpZGVy 74997
+UEhPTkU= 74998
+5YWz6Zet 74999
+IGNvbXByb21pc2luZw== 75000
+X0FQUFJP 75001
+UHJvY2Vzc0V2ZW50 75002
+IGJ5dGVBcnJheQ== 75003
+IENydWM= 75004
+wqg= 75005
+IGljaW5n 75006
+IFBDTQ== 75007
+dmVjdA== 75008
+QW15 75009
+IFZhY3V1bQ== 75010
+aW5jaWRlbnQ= 75011
+IHVzZXJu 75012
+emJlaw== 75013
+XSspLw== 75014
+IH19Ij48 75015
+IEdldERhdGE= 75016
+Y250bA== 75017
+IHNhZ3Q= 75018
+X1BSSU1BUlk= 75019
+IGxlcg== 75020
+IEZVQ0s= 75021
+IFN0YXJy 75022
+SUg= 75023
+w7ZycGVy 75024
+eW1z 75025
+XSldCg== 75026
+L3Rvb2w= 75027
+Y29tYmluYXRpb24= 75028
+IHRhbXA= 75029
+IEJlaXQ= 75030
+IE5JR0hU 75031
+IGFubsOpZQ== 75032
+KGFt 75033
+XFRyYWl0cw== 75034
+Olwi 75035
+IGNhcmdh 75036
+LmlkZQ== 75037
+IGRpa2tl 75038
+Q29tcGV0 75039
+IHNjb290ZXI= 75040
+IHhQb3M= 75041
+KGludGVycA== 75042
+IGhhc2ls 75043
+Y2xpZA== 75044
+IGhldXJlcw== 75045
+Z2xvbWVy 75046
+c2hhcmVz 75047
+77yMCgo= 75048
+cG9uZGU= 75049
+4bqjaQ== 75050
+X2R1cGxpY2F0ZXM= 75051
+c29uZ3M= 75052
+fV07Cg== 75053
+IFNuaXBlcg== 75054
+IFRodXI= 75055
+cm9wcA== 75056
+IGdydWVz 75057
+IG9yZXM= 75058
+dXNoaW1h 75059
+IHVzYWJpbGl0eQ== 75060
+6ZKf 75061
+L21lbWJlcg== 75062
+b2xkZW1vcnQ= 75063
+SXNBY3RpdmU= 75064
+R2V0RW51bWVyYXRvcg== 75065
+bXV4 75066
+V0lORE9XUw== 75067
+TmVnYXRpdmVCdXR0b24= 75068
+4Liz 75069
+LW1ha2Vycw== 75070
+44Kk44Oz 75071
+IEJlcm0= 75072
+QnlFeGFtcGxl 75073
+IFLDvGNr 75074
+U2hvd3M= 75075
+Z2hp 75076
+IElocmVy 75077
+IENydWQ= 75078
+Y2hlZg== 75079
+X2F1Yw== 75080
+IGFww7Nz 75081
+YW5rYW4= 75082
+IEtERQ== 75083
+SUxMUw== 75084
+IGFuZ2xhaXM= 75085
+LXJlZnJlc2g= 75086
+CXJhbmdl 75087
+eG1t 75088
+KGVkZ2Vz 75089
+IGFwcGVs 75090
+Ijt9 75091
+IGVkaQ== 75092
+IHN3b2xsZW4= 75093
+IGJ1dGNoZXI= 75094
+aWNpZGVz 75095
+aG91bmQ= 75096
+IF4o 75097
+IEV2YWx1 75098
+IGtleWJvYXJkVHlwZQ== 75099
+U1NJRA== 75100
+cm9iYXQ= 75101
+IG5paw== 75102
+IHN0cmF3YmVycmllcw== 75103
+XCJd 75104
+bm9zaXM= 75105
+TUVE 75106
+54g= 75107
+5LqU 75108
+aW1heA== 75109
+XEFubm90YXRpb24= 75110
+IG51cnU= 75111
+IE1pbmltYWw= 75112
+IHdvcmRwcmVzcw== 75113
+IGNvbGRlcg== 75114
+CXBhcnNl 75115
+L3N0cmV0Y2g= 75116
+5omn6KGM 75117
+cm9tb3NvbWU= 75118
+RElN 75119
+IHRlbnRhdGl2ZQ== 75120
+Ok5TVVRG 75121
+LGltZw== 75122
+IE1BVEVSSUFM 75123
+IEpldEJyYWlucw== 75124
+TGVnZW5kYXJ5 75125
+CXN0cm5jcHk= 75126
+IGRlZnM= 75127
+TnVtYmVyRm9ybWF0RXhjZXB0aW9u 75128
+IGJ5dGVjb2Rl 75129
+IHdpc3Nlbg== 75130
+X01PUkU= 75131
+oO2DnQ== 75132
+IENvZmY= 75133
+LkNvbmRpdGlvbg== 75134
+IGTDqXBhcnQ= 75135
+ZHNu 75136
+IHBhcmFtZXRybw== 75137
+XEw= 75138
+Lm5hbm9UaW1l 75139
+Qk9UVE9N 75140
+LldoYXQ= 75141
+64Q= 75142
+IERpeA== 75143
+X0RB 75144
+KENvbnRhaW5lcg== 75145
+YXlhcg== 75146
+RmxleGlibGU= 75147
+LlJheWNhc3Q= 75148
+IEVkd2lu 75149
+W3VybA== 75150
+wpI= 75151
+LnN0cm9rZVN0eWxl 75152
+IFBvbHlub21pYWw= 75153
+aWxpdGF0aW5n 75154
+IFFWQm94TGF5b3V0 75155
+KHJlcA== 75156
+LnZu 75157
+LWFzc2V0cw== 75158
+Q0hBU0U= 75159
+IEVzc2VudGlhbHM= 75160
+anlsbGFuZA== 75161
+IGF4cw== 75162
+IFRyZW0= 75163
+Lm1haW5sb29w 75164
+IFdJTkRPV1M= 75165
+LlJFUVVFU1Q= 75166
+IHJlaW50 75167
+IExpYnJl 75168
+Y2hlb24= 75169
+IGd1ZXJy 75170
+CU5kckZjU2hvcnQ= 75171
+LnNvZnRtYXg= 75172
+IEFzdXM= 75173
+LXNjb3Jl 75174
+IEpPSE4= 75175
+PlN0YXR1cw== 75176
+PkVkaXQ= 75177
+IENhbWU= 75178
+IEFzaGU= 75179
+X3VzaW5n 75180
+IExvbmU= 75181
+IGxlc2Vu 75182
+IHJldmVyc2luZw== 75183
+bmdyeA== 75184
+LnNpZ25hdHVyZQ== 75185
+LUFzc2Fk 75186
+L25hdGl2ZQ== 75187
+X3JhdGluZ3M= 75188
+IG55YQ== 75189
+IGFkaWRhcw== 75190
+KG9wdGlvbmFs 75191
+Il0o 75192
+IHJlY3VycmVuY2U= 75193
+IEJNUA== 75194
+z4w= 75195
+X2dw 75196
+Ij5c 75197
+X3dyb25n 75198
+eXBz 75199
+LlByb3h5 75200
+X1VEUA== 75201
+UXRDb3Jl 75202
+TGlua2VkSW4= 75203
+IGNhdmVybg== 75204
+IHNww6ljaWFs 75205
+X3dpcmU= 75206
+IG5hbm9w 75207
+LmJhbGw= 75208
+IHJlZHVjZXJz 75209
+IG1haWxlZA== 75210
+ZG9uZw== 75211
+IG9wcG9zZXM= 75212
+IEhhbnNvbg== 75213
+IFNhdHVyZGF5cw== 75214
+YWNvbW1lbnQ= 75215
+X01ldGFEYXRh 75216
+IEdhbGFjdGlj 75217
+KCIvIik= 75218
+IENsZWFuZXI= 75219
+X1RFUk0= 75220
+IGNsYXJv 75221
+Lk9VVA== 75222
+5a6h 75223
+IHNsaWs= 75224
+IGplZG5haw== 75225
+SGFuZGxlckNvbnRleHQ= 75226
+IGlycmFkaQ== 75227
+ICAgICAgICAgICAgICAgICAgICAgICAgIAo= 75228
+LnRpZ2h0 75229
+QnJlYWRjcnVtYg== 75230
+ZnJleQ== 75231
+IOqwneyytA== 75232
+bGJyYWNl 75233
+TEVHQUw= 75234
+LWd1bg== 75235
+IEJsb2dz 75236
+IFNoaXJsZXk= 75237
+IFB1bmU= 75238
+dXJzaW9ucw== 75239
+IHN1YnRyYWN0aW9u 75240
+ICoqKgo= 75241
+YXJtYWN5 75242
+IHNhbXQ= 75243
+PSIpLg== 75244
+IHBlcm1pc3NpYmxl 75245
+KHJk 75246
+IFdBVEVS 75247
+IHByb2Zlc2lvbmFs 75248
+IGhhbmRib29r 75249
+IG1vdXJuaW5n 75250
+YXJlZmE= 75251
+IGFzbg== 75252
+aXNleA== 75253
+IGNvbnRlbnU= 75254
+IFVOQw== 75255
+LmdldFByaWNl 75256
+IFB1bXBraW4= 75257
+LwoKCg== 75258
+IGNvc2luZQ== 75259
+IG5pZWQ= 75260
+IEJyYWtl 75261
+RGF0YVVSTA== 75262
+IERhdGFHcmlkVmlld0NlbGxTdHlsZQ== 75263
+IFJldHVybmVk 75264
+ZXdvb2Q= 75265
+aXF1w6k= 75266
+IGJsZWFr 75267
+IHdlYmhvb2s= 75268
+LlRoZXk= 75269
+YXJi 75270
+TEFOR0FETQ== 75271
+X29yZGVyZWQ= 75272
+IHByYW5r 75273
+Lk5ld1JlcXVlc3Q= 75274
+IGxpdGVyYWxz 75275
+J30+Cg== 75276
+c2VyaWFsaXplZA== 75277
+a3Rvcg== 75278
+KHJ4 75279
+IGdldFk= 75280
+CVN0cmluZ0J1ZmZlcg== 75281
+KHNsaWNl 75282
+cmJyYWNl 75283
+ZW1lbnRv 75284
+IGxhbmM= 75285
+RGVwbG95bWVudA== 75286
+IGNvbmNlbnRyYXRpbmc= 75287
+U2tldGNo 75288
+IGJyaWdodGx5 75289
+QmVnaW5uaW5n 75290
+IERhaA== 75291
+VGs= 75292
+SW5zZW5zaXRpdmU= 75293
+IHNhYmU= 75294
+KE1vZHVsZQ== 75295
+IGNlZGFy 75296
+X2NvbnRpbnVl 75297
+IHdpdGhPYmplY3Q= 75298
+IGNvbHVtbmE= 75299
+IENhbGRlcg== 75300
+INC/0L7QvA== 75301
+X3NvZnRj 75302
+c2hhbGVk 75303
+ZXJ0YXRpb24= 75304
+CSAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 75305
+OkAiIg== 75306
+IGZhw6dvbg== 75307
+dXN0dW0= 75308
+c3Rr 75309
+X0NSQw== 75310
+b2R6aQ== 75311
+IGFzY2VuZA== 75312
+Zmdhbmc= 75313
+IHByZWZhYg== 75314
+IGZpbmRldA== 75315
+Oicr 75316
+5Y2V5L2N 75317
+dW1ibGVkb3Jl 75318
+LmludmFsaWRhdGU= 75319
+IHRvaQ== 75320
+YW5nZXBpY2tlcg== 75321
+X0FJ 75322
+aGls 75323
+U2VhdA== 75324
+IHBpc3Rvbg== 75325
+Zmli 75326
+X2JsdWVwcmludA== 75327
+44K4 75328
+X1JlY29yZA== 75329
+cmV0cw== 75330
+RnJhbg== 75331
+IENhaXQ= 75332
+IHBlbGlj 75333
+IGRuYQ== 75334
+IHVwZGF0ZVRpbWU= 75335
+IC9eWw== 75336
+IHJhbGxpZWQ= 75337
+IEhpbWFs 75338
+U1NJ 75339
+X3BsYW5lcw== 75340
+IE91dHN0YW5kaW5n 75341
+QXBwbGljYXRpb25CdWlsZGVy 75342
+c3R1ZA== 75343
+X2xvY2F0b3I= 75344
+IGFib2xpdGlvbg== 75345
+ICgkKQ== 75346
+amVybmU= 75347
+IEFBQw== 75348
+L3dpbmRvd3M= 75349
+LUNhbA== 75350
+X1NFQ09ORFM= 75351
+ICcnfQo= 75352
+w6FueQ== 75353
+IHl1bW15 75354
+5omL5py65Y+3 75355
+IFZHQQ== 75356
+aWxhdGU= 75357
+IFN1cnZlaWxsYW5jZQ== 75358
+CUd0aw== 75359
+8J+Y 75360
+IHNoaW1tZXI= 75361
+YWx0ZXJuYXRl 75362
+Rm9yU2VndWU= 75363
+dWVzdHJh 75364
+LWNvdmVy 75365
+YXNs 75366
+IEluc2V0cw== 75367
+bGlqYWg= 75368
+OlM= 75369
+CWNhdGVnb3J5 75370
+IGZq 75371
+w61saWE= 75372
+IE1BRA== 75373
+QGpz 75374
+5p8= 75375
+IHBvb2xlZA== 75376
+IHRyZWF0aWVz 75377
+IEJpaw== 75378
+IEhhemVs 75379
+QWxsb2NhdGU= 75380
+IGFpcnBsYW5lcw== 75381
+IHNlcm1vbg== 75382
+IFBvc2l0aW9ucw== 75383
+IE1BSUw= 75384
+U3RvcHBpbmc= 75385
+YXZvcmVk 75386
+KFRlbXA= 75387
+IGNoZWF0cw== 75388
+LnVzZXJJRA== 75389
+IHB1dGE= 75390
+LXl5eXk= 75391
+VWlUaHJlYWQ= 75392
+IG9mc3RyZWFt 75393
+XFNlZWRlcg== 75394
+IENvdHRhZ2U= 75395
+IF4K 75396
+IEFMVEVS 75397
+IHF1YW50aWZ5 75398
+cmVpYnVuZw== 75399
+IG5lY2Vzc2l0aWVz 75400
+LkxvY2FsRGF0ZQ== 75401
+IOaXpQ== 75402
+cGljdHVyZXM= 75403
+IGNydWQ= 75404
+5pyo 75405
+IGRvd250dXJu 75406
+YWN0b3Jpbmc= 75407
+IERlcm0= 75408
+IGVzdHJ1Y3Q= 75409
+IE11c2lr 75410
+IG1seA== 75411
+Lm1ham9y 75412
+Lkh0dHBTZXNzaW9u 75413
+Pzw= 75414
+eWVhaA== 75415
+IG1vam8= 75416
+IFVuaXR5RWRpdG9y 75417
+IHJha2U= 75418
+X3R3ZWV0 75419
+IHJhZGlvQnV0dG9u 75420
+IERvbWluaW9u 75421
+YXNTdHJpbmc= 75422
+b3p5 75423
+IHZvZGth 75424
+b2dsb2I= 75425
+IEFsdW1uaQ== 75426
+YmFsYW5jZXM= 75427
+X21hbnVhbA== 75428
+LmxvYWR0eHQ= 75429
+X2ZyaWVuZHM= 75430
+IFhtbERvY3VtZW50 75431
+W2ZpcnN0 75432
+S2V5Q29kZQ== 75433
+IHBvZXRpYw== 75434
+bWluYQ== 75435
+IG9wY2lvbmVz 75436
+5omT 75437
+X3N1cHBsaWVy 75438
+LkZyb21SZXN1bHQ= 75439
+X2Rpc3RyaWN0 75440
+IEdhbGE= 75441
+LnF0 75442
+IGNvbnRyYWN0dWFs 75443
+YWNvbnM= 75444
+LWFuY2hvcg== 75445
+IHl1cA== 75446
+IHVuYW5zd2VyZWQ= 75447
+IG1heGxlbg== 75448
+RXJyTXNn 75449
+LXNu 75450
+IGh5cG5vdA== 75451
+X1dN 75452
+KCldWw== 75453
+IGRlc2VydmluZw== 75454
+b3dtZW50 75455
+KFJhbmRvbQ== 75456
+IHZldG9y 75457
+IElTVA== 75458
+0LDQvdC0 75459
+LWxhbmc= 75460
+IHNpaw== 75461
+Y3JlYXNpbmc= 75462
+IHBvcnRhbHM= 75463
+IEJ1bGxkb2dz 75464
+cHJvbW8= 75465
+IHByb3Zva2Vk 75466
+XX07Cg== 75467
+IEliaWQ= 75468
+ZXJnbGFzcw== 75469
+X1dJRkk= 75470
+YXBwcm9wcmk= 75471
+IHJlZGVzaWduZWQ= 75472
+IC8vLS0tLS0tLS0tLS0tLS0tLQ== 75473
+emlr 75474
+JG8= 75475
+dWx0b24= 75476
+IFJlbGF0aXZlcw== 75477
+IG1ldHJvcw== 75478
+IG1lbnRvcmluZw== 75479
+YXTEgw== 75480
+dXNobWFu 75481
+IGluaGVyaXRz 75482
+IFJ0 75483
+L3ByZWZlcmVuY2Vz 75484
+aW1lZA== 75485
+Sk9JTg== 75486
+KGludGVyZmFjZQ== 75487
+IGFkZXB0 75488
+IE9mZmVuc2l2ZQ== 75489
+IEFHUkU= 75490
+b25pYW4= 75491
+LnBhcnNlcnM= 75492
+IHBhc3NwaHJhc2U= 75493
+IHVuc2VyaWFsaXpl 75494
+VmlzaXRlZA== 75495
+IGdldFByb3BlcnR5 75496
+IG5vYw== 75497
+ZWRhZA== 75498
+ICMtfQoK 75499
+dmlkYQ== 75500
+c29sdmVy 75501
+IE1vcmFsZXM= 75502
+IGt2aW5uZQ== 75503
+IEFjY2lkZW50 75504
+IHZldXQ= 75505
+IG1pc2d1aWRlZA== 75506
+IFJldmVsYXRpb24= 75507
+IHJhcGlkZQ== 75508
+cHVuaw== 75509
+Iy0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0= 75510
+T2JqZWN0SWQ= 75511
+YWJpbmV0 75512
+ZXh0cmFjb21tZW50 75513
+IGJ1bm55 75514
+IERlZmVycmVk 75515
+dXR0YQ== 75516
+dWFl 75517
+YnVzdGVycw== 75518
+IFNvaWw= 75519
+R1NU 75520
+LkN1cnJlbnRSb3c= 75521
+44GR 75522
+IGdyYXR1aXRz 75523
+IGNydWlzZXI= 75524
+15E= 75525
+IFRlbm4= 75526
+anNj 75527
+IO2VhA== 75528
+ZGlzcG9zZWQ= 75529
+QUJPVVQ= 75530
+fQ0NCg== 75531
+ZXhwaXJlZA== 75532
+IFhtbE5vZGU= 75533
+IFRhdHRvbw== 75534
+Vm90ZXM= 75535
+Rm9sZA== 75536
+RWxpemFiZXRo 75537
+X0ZJTEVOTw== 75538
+IGNvbmNv 75539
+IEdkaw== 75540
+b3BpZXM= 75541
+fX19 75542
+UVVPVEU= 75543
+LUlJ 75544
+c3BhbQ== 75545
+LWxp 75546
+IGNhcnRh 75547
+LmxheW91dHM= 75548
+IGJlc3Bva2U= 75549
+IGFtYXRldXJz 75550
+IGNvdWxldXI= 75551
+aXRhbWlu 75552
+IGlycmVzcGVjdGl2ZQ== 75553
+IGJsYWNrQ29sb3I= 75554
+LnlhaG9v 75555
+IHdlYXJ5 75556
+IHN3ZWV0cw== 75557
+PyI7Cg== 75558
+PVwiJQ== 75559
+X3dvcmtzcGFjZQ== 75560
+IERpYW1ldGVy 75561
+IGFtZA== 75562
+IE5ldWU= 75563
+IGRiTmFtZQ== 75564
+SmVyZW15 75565
+bG9nZmlsZQ== 75566
+YXRyaWI= 75567
+IEh0dHBTZXNzaW9u 75568
+CUNyZWF0ZQ== 75569
+aWRkeQ== 75570
+LlBBUkFN 75571
+IGZpYW4= 75572
+IHN6Y3o= 75573
+IHFyZWFs 75574
+X0VTQ0FQRQ== 75575
+dXNhaGFhbg== 75576
+LmRpZ2VzdA== 75577
+IGdldFBhcmVudA== 75578
+LkRyb3BEb3duTGlzdA== 75579
+IHRow6k= 75580
+IG1vbnN0cm91cw== 75581
+IGJlcmhhc2ls 75582
+IiIiDQoNCg== 75583
+U3VwcG9ydGVkQ29udGVudA== 75584
+IEdhdGhlcmluZw== 75585
+aW5jeQ== 75586
+LktleUNvZGU= 75587
+IGZldHVz 75588
+LmNlbnQ= 75589
+IGJlc29uZGVycw== 75590
+bmlsYWk= 75591
+TFRSQg== 75592
+IGhpbmdl 75593
+UFJPUA== 75594
+LmZvdW5kYXRpb24= 75595
+bnVtZXI= 75596
+LXJhbmtlZA== 75597
+6I0= 75598
+IHBhaW5mdWxseQ== 75599
+ICg7Oyk= 75600
+Zm9ybWU= 75601
+TGFkeQ== 75602
+L2FwcGxl 75603
+IENvbnN0aXQ= 75604
+IHN0b2NraW5ncw== 75605
+5rS7 75606
+IG1lbnRvcnM= 75607
+PkNyZWF0ZQ== 75608
+IEludGVybmFsRW51bWVyYXRvcg== 75609
+IHRlbGV2aXNlZA== 75610
+VG9rZW5UeXBl 75611
+IGJyaWI= 75612
+Y3JlYXRlVmlldw== 75613
+L0RURA== 75614
+R2l0SHVi 75615
+KGJpZw== 75616
+IG3DoXhpbW8= 75617
+5b6u6L2v6ZuF6buR 75618
+LmNm 75619
+IMKgIMKgIMKgIMKg 75620
+PHR5cGVvZg== 75621
+IHByb2dyZXNzaW5n 75622
+LnNldFdpZHRo 75623
+KHR2 75624
+IHVuZmFpcmx5 75625
+IEFuaXRh 75626
+YXJ5YXdhbg== 75627
+RGFs 75628
+VVJZ 75629
+b2dlbmVpdHk= 75630
+ZWZh 75631
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq 75632
+IGRlamE= 75633
+T1NF 75634
+cmFpbA== 75635
+cm9vZg== 75636
+X3F1b3Rlcw== 75637
+PGo= 75638
+44Ko 75639
+KHNldHRpbmc= 75640
+bGV2ZWxuYW1l 75641
+X2hhbmRsaW5n 75642
+w6lyYQ== 75643
+JGo= 75644
+IGRhcmxpbmc= 75645
+LlBhdGhWYXJpYWJsZQ== 75646
+W3NvdXJjZQ== 75647
+TWV0aG9kTmFtZQ== 75648
+IE91dGxldA== 75649
+5pKt 75650
+IENvY29h 75651
+VWJ1bnR1 75652
+IG1vb2ll 75653
+IGZsb3JpZGE= 75654
+IHJldGhpbms= 75655
+IGdldFg= 75656
+Z2V0RWxlbWVudA== 75657
+IHJhZGl4 75658
+IEdhbWVy 75659
+ZGVhbGxvYw== 75660
+bGVmdEpvaW4= 75661
+X1NZTg== 75662
+R3JpZExheW91dA== 75663
+Imdv 75664
+KGVhY2g= 75665
+CXNjZW5l 75666
+IFB5RXJy 75667
+SG93YXJk 75668
+LlNpZ25hbA== 75669
+IFRFTQ== 75670
+IOen 75671
+VkVOVE9SWQ== 75672
+IHNpbXVs 75673
+IDw8LQ== 75674
+IHR1cmJpbmVz 75675
+IHN1cnRvdXQ= 75676
+YWx0bw== 75677
+IHVuYXJ5 75678
+YA0K 75679
+IFNjcmk= 75680
+IE1vbms= 75681
+IHVuZm9sZGVk 75682
+Q29tcG9zaXRpb24= 75683
+UFBFUg== 75684
+IHNpZGluZw== 75685
+Jyx7Jw== 75686
+IHRyZWZm 75687
+X1VOSUNPREU= 75688
+IGRlcmVjaG8= 75689
+IHBvbGFyaXR5 75690
+IG9yYw== 75691
+PERvY3VtZW50 75692
+KHRvZGF5 75693
+LikKCgoK 75694
+IHNlZW1pbmc= 75695
+XFY= 75696
+PklE 75697
+IGZpYm9uYWNjaQ== 75698
+KG1hdGVyaWFs 75699
+RkxBU0g= 75700
+ZGlyZWN0b3JpZXM= 75701
+ZXN0ZXJz 75702
+VEVDVElPTg== 75703
+d3JhcHBlZA== 75704
+LXNlbGVjdGlvbg== 75705
+LXJlbGF0aXZl 75706
+KGNocg== 75707
+IHBvcnRmb2xpb3M= 75708
+IHNob3dEaWFsb2c= 75709
+aW5nbGV0b24= 75710
+IFRJQ0s= 75711
+IEludmVzdG9y 75712
+IGJyYXY= 75713
+IFNWTg== 75714
+IGhhdGVmdWw= 75715
+cmlwcw== 75716
+ZXhwaXJ5 75717
+X2NvaW4= 75718
+PgoKCgoK 75719
+IG1hcmdpbmFsaXplZA== 75720
+IGV4Y2VlZGluZ2x5 75721
+bmF2YmFyU3VwcG9ydGVkQ29udGVudA== 75722
+KGV4dGVuc2lvbg== 75723
+IGFkdmFudGFnZW91cw== 75724
+Lk1pY3Jvc29mdA== 75725
+IGVuc3VpdGU= 75726
+LXZpb2w= 75727
+X2R1ZQ== 75728
+S0g= 75729
+IFJvbWFudGlj 75730
+aW5hbmQ= 75731
+ZWNp 75732
+cmVwb3J0ZWQ= 75733
+IENvcnB1cw== 75734
+IHNwYW5raW5n 75735
+IENyb3NieQ== 75736
+LkZvdW5kYXRpb24= 75737
+XF8= 75738
+IGFubm9uY2Vz 75739
+QXR0YWNobWVudHM= 75740
+4Liy4Lij 75741
+IFdheA== 75742
+77yB77yBCgo= 75743
+IHNhaWxlZA== 75744
+LkV1bGVy 75745
+CXNjcm9sbA== 75746
+IHBlYXNhbnRz 75747
+IEJ1aWxkZXJz 75748
+LkdlbmVyYWw= 75749
+QVJFQQ== 75750
+IG1lc3Npbmc= 75751
+dmVybg== 75752
+IGRpYXBlcg== 75753
+IG9jY3VwaWVz 75754
+CWxvZ2lu 75755
+LkxPQw== 75756
+aWdhbnM= 75757
+77yB4oCd 75758
+X2Zvb3Q= 75759
+X3RhdQ== 75760
+LXBhY2thZ2Vz 75761
+cmVjdXI= 75762
+QWx0ZXJuYXRpdmU= 75763
+77yB44CN 75764
+YXJvbw== 75765
+IHRydXN0ZWU= 75766
+LDpd 75767
+5pa55byP 75768
+Pz4+ 75769
+Lk1pbnV0ZQ== 75770
+IGFsY2Fu 75771
+IENvbmNlcHRz 75772
+Y2hpbGROb2Rlcw== 75773
+Q291cnQ= 75774
+IGNlbGxhcg== 75775
+bGVr 75776
+YWtpcw== 75777
+QnViYmxl 75778
+IG9iamVjdGVk 75779
+IO+7vw== 75780
+Ol06Cg== 75781
+LnBhcnNlRmxvYXQ= 75782
+IHNwYXJrcw== 75783
+LWZpbmQ= 75784
+dmFyaWF0aW9u 75785
+SGFjaw== 75786
+RmFucw== 75787
+X3BhcnNlZA== 75788
+RW50aXR5VHlwZQ== 75789
+YXVjZQ== 75790
+X3RyZWVz 75791
+IEVnZ3M= 75792
+VUlCYXJCdXR0b25JdGVt 75793
+X3RheG9ub215 75794
+IFNIT1A= 75795
+VHdlbnR5 75796
+X2NoZWNrcw== 75797
+IExY 75798
+dXRzY2hlaW4= 75799
+KHBsYXRmb3Jt 75800
+IGF1dG9wc3k= 75801
+UmVxdWlyZW1lbnQ= 75802
+IFJFQ1Q= 75803
+dG9Db250YWlu 75804
+JywnJQ== 75805
+L2VkaXRvcg== 75806
+IHFi 75807
+IEVFRw== 75808
+aHRh 75809
+X1RJTEU= 75810
+LXN1bQ== 75811
+IEFsYnVxdWVycXVl 75812
+IHNob3J0Y29kZQ== 75813
+IHNpbnVz 75814
+IGRlc2tz 75815
+IHBvb3A= 75816
+Lm9wZW5zb3VyY2U= 75817
+IENvbGxhcHNl 75818
+LmRlcg== 75819
+IGhhd2s= 75820
+IFZhbmd1YXJk 75821
+IE1hcnJpb3R0 75822
+X1RhcmdldA== 75823
+IEJhbmFuYQ== 75824
+X2F0dGVudGlvbg== 75825
+IEFyaWVs 75826
+X3Rlbg== 75827
+IGJha2Vy 75828
+4oCUaGU= 75829
+xIXFvA== 75830
+dmVsb3BtZW50 75831
+RWxm 75832
+X2djaGFuZGxl 75833
+UmVwdWJsaWNhbnM= 75834
+IGl0ZW1CdWlsZGVy 75835
+V29u 75836
+X2FjY3Vt 75837
+IG5ld1Bhc3N3b3Jk 75838
+IGRldm9pZA== 75839
+IE1hcmt1cw== 75840
+ZGFlbW9u 75841
+Lkh0dHBDb250ZXh0 75842
+S3Jpc3Q= 75843
+IGFhbGJvcmc= 75844
+X3RyaWFscw== 75845
+KGFzc2VydA== 75846
+44Gj44Gm 75847
+YmVsdA== 75848
+IG1pbGRseQ== 75849
+ZXJ2b2ly 75850
+IGRlc2NlbmRhbnQ= 75851
+IEdpb3Zhbm5p 75852
+IGRlY2x0eXBl 75853
+LVNoaXJ0 75854
+IGFwcm8= 75855
+QXBwbGllZA== 75856
+LmdldFBhcmFt 75857
+aG9m 75858
+dXJhcg== 75859
+IE9CUw== 75860
+X3Nlcg== 75861
+KHNlY3JldA== 75862
+W2xheWVy 75863
+IHVzZWZ1bG5lc3M= 75864
+IEtvdQ== 75865
+X3N1Ym1pc3Npb24= 75866
+X0hPUklaT05UQUw= 75867
+LHRtcA== 75868
+Ly4K 75869
+IGxlc3Nlbg== 75870
+X3dj 75871
+X0ZJTkFM 75872
+0L3QvtC/ 75873
+LnRvZG9z 75874
+LlhQYXRo 75875
+IElEYXRh 75876
+IGRvb3JzdGVw 75877
+IGNvbXBvc2luZw== 75878
+IGh1dA== 75879
+IFZMQU4= 75880
+IG91dGY= 75881
+6K+l 75882
+KGJldGE= 75883
+KioqLwoK 75884
+IEluZG8= 75885
+IGtsYQ== 75886
+X2NvbmZpZ3VyZQ== 75887
+Lk1hcms= 75888
+b3NlY29uZHM= 75889
+KFZlcnRleA== 75890
+b3JnYW5pc21z 75891
+IGZmbQ== 75892
+IGRlbW9saXNoZWQ= 75893
+ICItLS0= 75894
+bGVzaQ== 75895
+IFNpZG5leQ== 75896
+LmdldEluZGV4 75897
+Lk1vbmFk 75898
+U2VsZWN0ZWRJdGVt 75899
+IE5hdlBhcmFtcw== 75900
+YXpvbGU= 75901
+QUJDREVGR0hJSktMTU5PUFFSU1RVVldYWVo= 75902
+X3NlbnRlbmNlcw== 75903
+IGluY2xpbmF0aW9u 75904
+IEZhdGhlcnM= 75905
+YWNjb3VudElk 75906
+aGFyaQ== 75907
+KT4K 75908
+L3Jhdw== 75909
+ICcnKTsKCg== 75910
+K2w= 75911
+KGNk 75912
+IHVuemlw 75913
+IGdsYW1vcm91cw== 75914
+IyIs 75915
+IG5hdw== 75916
+IG1pbmli 75917
+IEJyYW4= 75918
+TmFjaA== 75919
+X3R3ZWV0cw== 75920
+IENDUA== 75921
+JSI+PA== 75922
+IFN0ZXBoZW5z 75923
+bWFzxLE= 75924
+J2Vz 75925
+IHJlcGFy 75926
+X2RvY3VtZW50cw== 75927
+LmNsb3NlZA== 75928
+LXJpbmc= 75929
+L2NhdGVnb3JpZXM= 75930
+IERlZXBDb3B5 75931
+U1VQ 75932
+Lm5ld2F4aXM= 75933
+IGdkeQ== 75934
+aG9l 75935
+IFJlZWY= 75936
+IHBvbGl0aWM= 75937
+IFJlcXVpcmVtZW50 75938
+IHNoZWRz 75939
+c2VhbGVk 75940
+IHBhdGhvbG9neQ== 75941
+Ii8+PA== 75942
+bW9kbw== 75943
+IHN0ZW1taW5n 75944
+IHRhYm9v 75945
+IFNhdmlvcg== 75946
+IH0NCg0KDQoNCg== 75947
+LmN2 75948
+IGpvdWV1cg== 75949
+IENvcm53YWxs 75950
+IFJlY2VwdGlvbg== 75951
+IGlsbHVtaW5hdGlvbg== 75952
+IGdkYg== 75953
+VkVD 75954
+b2R1 75955
+Q29udGVudEFsaWdubWVudA== 75956
+c3RhbnRpYWw= 75957
+YmFzZWxpbmU= 75958
+X2J1c3k= 75959
+LwoKCgo= 75960
+IHBsYXllcklk 75961
+5qM= 75962
+X3BldA== 75963
+IE1pcmFjbGU= 75964
+dXJlbnQ= 75965
+IE1lcmxpbg== 75966
+dWJlbg== 75967
+IHNldENvbG9y 75968
+IGRhcmtlc3Q= 75969
+c3Rlcnk= 75970
+IGNhcmlj 75971
+IHJldGFyZA== 75972
+IEhvdXNlaG9sZA== 75973
+IGphbA== 75974
+IHlw 75975
+IiwiIik7Cg== 75976
+IEFjZXI= 75977
+W1c= 75978
+b2xraWVu 75979
+YXlv 75980
+UHJpdmF0ZUtleQ== 75981
+IFNUQVRT 75982
+INC90YPQtg== 75983
+OicuJA== 75984
+IHRoYW5rZnVsbHk= 75985
+IGRpc3RydXN0 75986
+Z2V0RGVmYXVsdA== 75987
+L2ZhY2Vib29r 75988
+IENvbnJhZA== 75989
+IHV0aWxpemFuZG8= 75990
+IEthZw== 75991
+L25hbWU= 75992
+IGJhbWI= 75993
+LkZyb21TZWNvbmRz 75994
+IG11dGls 75995
+IExhZ29z 75996
+IEJsZXNzZWQ= 75997
+aWxsZWdhbA== 75998
+aWVp 75999
+X1RQ 76000
+IG1hdGxhYg== 76001
+IGN5Y2xpYw== 76002
+IHdpdGhoZWxk 76003
+IGhvcnJpYmx5 76004
+LWhvdXJz 76005
+LUhlYWRlcnM= 76006
+IG92ZXJsYXBz 76007
+IGN1YXRybw== 76008
+IGVxdWl0YWJsZQ== 76009
+IGNvbG9ybWFw 76010
+IHNoaW4= 76011
+IFN1aXRlcw== 76012
+X2x1YQ== 76013
+KHZv 76014
+X1JFU1VMVFM= 76015
+IFZpa3Rvcg== 76016
+RG93bmxvYWRpbmc= 76017
+bm9jaA== 76018
+TW9vbg== 76019
+IGRlY2lkZWRseQ== 76020
+44GU44GW 76021
+X1JQQw== 76022
+SW50ZXJwb2xhdG9y 76023
+IHZhbnM= 76024
+e1Q= 76025
+X3NwYXdu 76026
+IEV4eG9u 76027
+X0NhbGw= 76028
+IENsYXNzcm9vbQ== 76029
+IHNlcm90b25pbg== 76030
+IERpcGxvbWE= 76031
+YmVkdGxz 76032
+IFByb3RvdHlwZQ== 76033
+LmV4ZWN1dGlvbg== 76034
+IGRhdGluZ3NpZGU= 76035
+IEdva3U= 76036
+X3Jvb21z 76037
+4oCZYW0= 76038
+Z3JhZg== 76039
+YWNlb3Vz 76040
+IGFjY29tbW9kYXRpbmc= 76041
+fSwn 76042
+LmRpbWVuc2lvbg== 76043
+ZXJyb3JNc2c= 76044
+CW1lc2g= 76045
+RmlsbGVk 76046
+LnByZWZlcmVuY2U= 76047
+IHNtYXJ0eQ== 76048
+X2NvdXBvbg== 76049
+IMO2dmVy 76050
+IGNvbmNlaXZl 76051
+b2Rvbg== 76052
+ZGljZQ== 76053
+VG9EYXRl 76054
+YWRhbWVudGU= 76055
+LW1hc2s= 76056
+IGVzY2FsYXRpbmc= 76057
+4oCmKQoK 76058
+SW5SYW5nZQ== 76059
+X0Vt 76060
+IHV0aWxpemE= 76061
+IGxldnk= 76062
+PCFb 76063
+IEplbm5lcg== 76064
+IFJFU09VUkNF 76065
+X1NUQVJURUQ= 76066
+IHZvbGxleWJhbGw= 76067
+IG1nYQ== 76068
+IFJvc3Np 76069
+Q2hhbmNl 76070
+IEVuZGVk 76071
+LnVudGls 76072
+IGtub2Nrb3V0 76073
+X2V4ZQ== 76074
+IFByZXNjcmlwdGlvbg== 76075
+IENPVU5UWQ== 76076
+Lmhy 76077
+aWVyc2hpcA== 76078
+RVJWRQ== 76079
+6ak= 76080
+44Gn44Gv 76081
+IHBlcsOt 76082
+IGltZ1VybA== 76083
+ZWN4 76084
+IFd5bg== 76085
+CVJldHVybnM= 76086
+X2V5ZQ== 76087
+IEFnaW5n 76088
+cXVldWVz 76089
+IOWIneWni+WMlg== 76090
+LlNlcmlhbGl6ZWROYW1l 76091
+LmhvdXJz 76092
+IGlzZQ== 76093
+LkFjdG9y 76094
+5p2h5Lu2 76095
+YXBwbA== 76096
+VGFu 76097
+L2NhdGFsb2c= 76098
+L1Jlc291cmNlcw== 76099
+ZWxhbg== 76100
+KCd7ew== 76101
+IGluc24= 76102
+IG5vZGVOYW1l 76103
+IGNvb2tib29r 76104
+JywnPScsJw== 76105
+Uk9NRQ== 76106
+LnRlbXBsYXRlcw== 76107
+ZWN1cmU= 76108
+LWtleXM= 76109
+IGdsVW5pZm9ybQ== 76110
+IGdlw6c= 76111
+IFJlY292ZXI= 76112
+SURY 76113
+IEtyaXN0ZW4= 76114
+IHBvbnRvcw== 76115
+YD0nJA== 76116
+YXJnZW50 76117
+IGFycmFuZ2luZw== 76118
+6KiY5LqL 76119
+IGVybGU= 76120
+ZW5lZG9y 76121
+KCkpKTs= 76122
+w6Zra2U= 76123
+IEdpbGxlcw== 76124
+In0+Cg== 76125
+Lm1vdmllcw== 76126
+LXNlbGVjdG9y 76127
+LmxlYXJu 76128
+IHBvdGVuY3k= 76129
+IGZpbm8= 76130
+CWJn 76131
+IGxlaGV0 76132
+IGzDtg== 76133
+IGVybQ== 76134
+IGFzYmVzdG9z 76135
+IGRlc3Rl 76136
+IGJsb2NrYWRl 76137
+IFJPVU5E 76138
+IGxuYW1l 76139
+IFNlcGFyYXRl 76140
+w6RuZ2U= 76141
+IGZ1eno= 76142
+CVVO 76143
+X25vbWU= 76144
+X2xpbmtlZA== 76145
+IFNoYXJlUG9pbnQ= 76146
+aGF1c2Vu 76147
+IGxvYWY= 76148
+LWVjb25vbWlj 76149
+IGRpZEZpbmlzaA== 76150
+eWVu 76151
+IGJsYXN0aW5n 76152
+IFdlaXJk 76153
+SUNMRVM= 76154
+IEdGWA== 76155
+IHN1ZmZpY2U= 76156
+ZWJpbg== 76157
+IGFwcHJvdmluZw== 76158
+IFJleWVz 76159
+IFJUQUw= 76160
+aWdsaQ== 76161
+X3Rvaw== 76162
+b3Jkb3Zh 76163
+Q2FybA== 76164
+IFBsYXlz 76165
+bG9zc2Vu 76166
+cGFpcmVk 76167
+QUdNQQ== 76168
+d2nEhXo= 76169
+bGlua2VkaW4= 76170
+IGVnYWw= 76171
+KHByZWRpY2F0ZQ== 76172
+IFJFU1BPTlNF 76173
+IG1pblg= 76174
+IGNoYW5jZWxsb3I= 76175
+IFJFQ0VJVkVS 76176
+IGFzY2VydGFpbg== 76177
+IHplcg== 76178
+IFdvcmtzaGVldHM= 76179
+Tks= 76180
+IHZvd2Vs 76181
+dmFudA== 76182
+VVBT 76183
+4oCcLg== 76184
+IEhheWRlbg== 76185
+IFNwYXJ0YW4= 76186
+cmlnaHRz 76187
+LmdldElu 76188
+IGlubGFuZA== 76189
+IE5pbGU= 76190
+IFRyYW5zbGF0b3I= 76191
+IHJlY3RhbmdsZXM= 76192
+QnV0dG9uVHlwZQ== 76193
+IFNvbGlj 76194
+IHJhZ2F6emE= 76195
+L3RhZw== 76196
+IGlycmVzaXN0 76197
+I0VuZA== 76198
+KioqKioqKg0K 76199
+IHJlc3RyYWluZWQ= 76200
+IGNoaXJvcHI= 76201
+L1No 76202
+LWZsaWdodA== 76203
+Y29udmVydGVk 76204
+IHNraXJ0cw== 76205
+KGNoYXJz 76206
+JHZpZXc= 76207
+IGlucHV0RmlsZQ== 76208
+Z21haWw= 76209
+X0RJQUc= 76210
+IG51bWVs 76211
+IEdpbmE= 76212
+ZWxsdW5nZW4= 76213
+IHRheGE= 76214
+IGRyaXBwaW5n 76215
+PSIiLz4K 76216
+IGJvcmRlcmVk 76217
+IHRvdWdobmVzcw== 76218
+bGVuZXNz 76219
+IEJpZWJlcg== 76220
+X1dBS0U= 76221
+KGV0 76222
+IHNhbnTDqQ== 76223
+IFRFWA== 76224
+X0RJU0NPTk5FQ1Q= 76225
+IHBpZW4= 76226
+IEZvbnRTdHlsZQ== 76227
+X1VM 76228
+LXRvdGFs 76229
+d29sZg== 76230
+IE1hcml0aW1l 76231
+IE9QVElPTkFM 76232
+LXJlc3Q= 76233
+IG1lbWJ1YXQ= 76234
+IEJTT04= 76235
+X3NpbWlsYXJpdHk= 76236
+Lm92ZXJsYXk= 76237
+IHBhbGF0ZQ== 76238
+IEJyaWRnZXM= 76239
+QW5kUGFzc3dvcmQ= 76240
+IENoYXZleg== 76241
+aGV0dG8= 76242
+Lm9mZnNldEhlaWdodA== 76243
+IHVuZGVzaXJhYmxl 76244
+IGFwbGlr 76245
+IC8+XA== 76246
+LHRv 76247
+IHJlbW92ZXI= 76248
+IE1vZGVsaW5n 76249
+IHB1cmNoYXNlcg== 76250
+IENob29zaW5n 76251
+b3BsZWZ0 76252
+IG11dGFibGVMaXN0T2Y= 76253
+IFNpc3RlbWE= 76254
+IElQTA== 76255
+aWNrZXJWaWV3 76256
+SGFzQ29sdW1uVHlwZQ== 76257
+IHNvYmll 76258
+dWJlcm4= 76259
+IGFsdW5v 76260
+IGltYWdpbmF0aXZl 76261
+IEludGVyZXN0ZWQ= 76262
+KCl9PC8= 76263
+IGRpdmVyc2lvbg== 76264
+X3Rvb2x0aXA= 76265
+LlNhbXBsZQ== 76266
+IEZ1dHVyZXM= 76267
+Y29udGVuaWRv 76268
+IEVJTlZBTA== 76269
+KGVuY29kZWQ= 76270
+IFNoYXVu 76271
+CXBheWxvYWQ= 76272
+ZGVr 76273
+PllvdXI= 76274
+SXNv 76275
+VHJhdmVyc2Fs 76276
+aWNpZQ== 76277
+LmNyb3A= 76278
+IEpC 76279
+SU5HRVI= 76280
+IGV4ZW1wbGFyeQ== 76281
+X3JlbHU= 76282
+YW5uaXM= 76283
+0LXQt9GD0LvRjNGC0LDRgg== 76284
+Y2x1YnM= 76285
+4oaR 76286
+IHNjcmFtYmxl 76287
+IFVuYmxvY2s= 76288
+IGRvcnM= 76289
+IHNoYWNr 76290
+IG1pbmltaXppbmc= 76291
+IFBhc3Npbmc= 76292
+YWRkRWxlbWVudA== 76293
+4bud 76294
+IHJvb2Zz 76295
+IGpjbGFzcw== 76296
+Y29yZG92YQ== 76297
+UG9zWQ== 76298
+KENhbnZhcw== 76299
+KGZpbg== 76300
+LWxvc3M= 76301
+LmJ0bkNsb3Nl 76302
+ZG9jdW1lbnRhdGlvbg== 76303
+IFJK 76304
+YW1vbmc= 76305
+TW9z 76306
+bGluZ2Vu 76307
+IEFndQ== 76308
+b2x5bm9taWFs 76309
+XTw9 76310
+IGRpZmZpY2lsZQ== 76311
+IFdpbm5lcnM= 76312
+5bGV 76313
+U3RyYQ== 76314
+IGNvbmdyZWc= 76315
+IEVuYWJsZXM= 76316
+IFN5bXB0b21z 76317
+X3Nn 76318
+IFJpZGluZw== 76319
+X2hlYWRz 76320
+IENvc21ldGlj 76321
+w650 76322
+LlNpbmdsZXRvbg== 76323
+IE5pY2FyYWd1YQ== 76324
+IAoKCgoK 76325
+IG3DrQ== 76326
+J30sDQo= 76327
+IEJvc25pYQ== 76328
+Plg= 76329
+Ly8qWw== 76330
+IHBpbGVk 76331
+Y2FzdGluZw== 76332
+IGdyw6JjZQ== 76333
+IEhlbHNpbmtp 76334
+R3Jv 76335
+I2Fm 76336
+7Iud 76337
+IHNvdWhh 76338
+IEluZGll 76339
+X25lYXI= 76340
+IGltbW9iaWw= 76341
+LkV4Y2Vs 76342
+IHJhZGlhbnQ= 76343
+X01C 76344
+IEtldG8= 76345
+dmVudGFyaW8= 76346
+X2FnZW50cw== 76347
+VGFibGVWaWV3Q2VsbA== 76348
+IFRoZW9kb3Jl 76349
+PT09PT09PT0K 76350
+LGxpc3Q= 76351
+KHNp 76352
+aWNpcGF0aW9u 76353
+QVJUSA== 76354
+c2V0RGlzcGxheQ== 76355
+LkZ1dHVyZQ== 76356
+IFNUQU5EQVJE 76357
+IE9JRA== 76358
+IGZyb3duZWQ= 76359
+IE1hcmlseW4= 76360
+b2xhcmU= 76361
+UHU= 76362
+IHPDqWN1cml0w6k= 76363
+UmVkdXg= 76364
+U0NP 76365
+CQkJCQkgICAgICA= 76366
+cml2 76367
+cGVydA== 76368
+IHNvZnRtYXg= 76369
+IHNlbmF0ZQ== 76370
+PWVtYWls 76371
+IGVzdGltYXRpbmc= 76372
+CXRk 76373
+RnVjaw== 76374
+IFdhdGVybG9v 76375
+IG1leGljbw== 76376
+TmV3dG9u 76377
+U2Fi 76378
+LOKApgoK 76379
+IGNlbGVzdGlhbA== 76380
+IFFOYW1l 76381
+IGdldEFwcA== 76382
+Tmll 76383
+X3BjaQ== 76384
+IFFQb2ludEY= 76385
+X2xpc3Rh 76386
+Lk5WYXJDaGFy 76387
+IENvYw== 76388
+S2Fy 76389
+IGJ1c3RlZA== 76390
+aXphdGlvbmFs 76391
+b3VyZA== 76392
+X2Nvbm5lY3Rvcg== 76393
+IFNla3M= 76394
+0L3Rg9GO 76395
+0II= 76396
+L0xpc3Q= 76397
+L2lj 76398
+XEZyYW1ld29ya0J1bmRsZQ== 76399
+dXh0 76400
+IGhlYWRwaG9uZQ== 76401
+RVhURVJO 76402
+LXJlc2V0 76403
+IEdlaWxl 76404
+IHRyaWFuZw== 76405
+IEFOTg== 76406
+IHTDrQ== 76407
+IFNQQQ== 76408
+IE1hY2Vkb25pYQ== 76409
+IGNyaWFy 76410
+IGNsaW1icw== 76411
+IFNPTg== 76412
+IENyaXRpY3M= 76413
+IGTDsw== 76414
+X1NQTElU 76415
+IEJvdW5kYXJ5 76416
+X0luc2VydA== 76417
+Q29sZA== 76418
+LmNyZWF0ZUNlbGw= 76419
+X3NhaWRh 76420
+LkJMVUU= 76421
+QmlnRGVjaW1hbA== 76422
+KEJ5dGVz 76423
+CVN0YXRl 76424
+LS0tQA== 76425
+Vmlld1NldA== 76426
+YWthaA== 76427
+X1JlcG9ydA== 76428
+LWNyb3Nz 76429
+LmdldEN1cnJlbnRVc2Vy 76430
+dWx0dXI= 76431
+KEZs 76432
+IEltYWc= 76433
+Q1Rlc3Q= 76434
+7IOd 76435
+IHN0YWc= 76436
+IG96b25l 76437
+IGvDqQ== 76438
+cmVwYWly 76439
+KSIpOw0K 76440
+IHZvd3M= 76441
+LkFsdGVy 76442
+IEFsZ2VicmE= 76443
+IEFoZWFk 76444
+Z2V0dA== 76445
+LklubmVyVGV4dA== 76446
+IFpoZW5n 76447
+LnJlYWxwYXRo 76448
+IGRpc3RyYWN0aW9ucw== 76449
+LGV2ZW50 76450
+IElOQ0xVREVE 76451
+Lk1hdGNoZXI= 76452
+LnNwb3RpZnk= 76453
+IGNvbnNpZA== 76454
+Lk1hcHBpbmc= 76455
+IEZvYW0= 76456
+IE5BTkQ= 76457
+IGRldmFudA== 76458
+XSIpXQo= 76459
+TGF1cmE= 76460
+IHNhY2tlZA== 76461
+X3hvcg== 76462
+IHJlYWxtcw== 76463
+IFJvYm90aWNz 76464
+LlNlZWs= 76465
+LiQk 76466
+IFJpYmJvbg== 76467
+CUhSRVNVTFQ= 76468
+IENyZXNjZW50 76469
+RUZS 76470
+IE1lZGl0YXRpb24= 76471
+LmdldFo= 76472
+INC60L7QvNC/ 76473
+anNvbndlYnRva2Vu 76474
+Oj8= 76475
+ZmFm 76476
+VklPVVM= 76477
+YWxsYWg= 76478
+IHBpcGluZw== 76479
+IG1vZGVybmU= 76480
+cG9zdGFsY29kZQ== 76481
+IGxldmVyYWdpbmc= 76482
+IENISVA= 76483
+cGNt 76484
+bWFp 76485
+IGlQ 76486
+QUtFUg== 76487
+ZGF0YUdyaWRWaWV3 76488
+X2RlcHM= 76489
+LWRyaXZlcg== 76490
+TGll 76491
+ZGlzY2FyZA== 76492
+eW50YXhFeGNlcHRpb24= 76493
+IGVjdA== 76494
+IEV4aGliaXQ= 76495
+ICgqKg== 76496
+IOuU 76497
+Q2hhbmdlRXZlbnQ= 76498
+IHN1cGVybWFya2V0cw== 76499
+IHNobQ== 76500
+cHJvZml0cw== 76501
+cGlsbGFy 76502
+cmFpc29u 76503
+V2F0 76504
+IHBoYXJtYWNpZXM= 76505
+IG5ydw== 76506
+Ly89PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0= 76507
+CXdvcmxk 76508
+U3RyZWFtaW5n 76509
+RGlhbW9uZA== 76510
+IEVudW1lcmF0b3I= 76511
+IGVucXVpcnk= 76512
+LmxhbWJkYQ== 76513
+YmVr 76514
+Uk9UTw== 76515
+IFBkZlA= 76516
+IGhpc3Rv 76517
+IGdldENoaWxk 76518
+L3N0cmV0Y2hy 76519
+IEFNQVo= 76520
+IEFyZ3VtZW50T3V0T2ZSYW5nZUV4Y2VwdGlvbg== 76521
+InVzZXI= 76522
+IHNhbml0YXRpb24= 76523
+IENsb3RoZXM= 76524
+Lm51bXB5 76525
+ZmVj 76526
+ICMjIyMjIyMjIyMjIw== 76527
+0LXQudGB0YLQsg== 76528
+X2xw 76529
+IGF6dXJl 76530
+WFBhdGg= 76531
+VmVudA== 76532
+TGFib3I= 76533
+IG1pc3Rha2VubHk= 76534
+IGNvbmR1aXQ= 76535
+IEZhaXJmYXg= 76536
+Z2V0U3RhdHVzQ29kZQ== 76537
+IE1veQ== 76538
+TGlzdEFkYXB0ZXI= 76539
+ICg/KQ== 76540
+R2VuZXJhbGx5 76541
+LmlzQ29ubmVjdGVk 76542
+dmlkbw== 76543
+TW91c2VCdXR0b24= 76544
+R2VuZXJhdGlvblN0cmF0ZWd5 76545
+X2Rlcml2 76546
+IGxla2tlcg== 76547
+TWVhc3VyZW1lbnQ= 76548
+X0NPT0tJRQ== 76549
+ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq 76550
+IGNvbXBldGl0aXZlbmVzcw== 76551
+IGdhbWxl 76552
+IHJldHJvc3BlY3Q= 76553
+IEVkdWFyZG8= 76554
+IERhdGFTZXJ2aWNl 76555
+IGVzY29ydGVk 76556
+IFF0eQ== 76557
+SG9saWRheQ== 76558
+CXJhdw== 76559
+bGV1cnM= 76560
+QmlydGhkYXk= 76561
+IGhlYXRz 76562
+LmludmVyc2U= 76563
+IF8NCg== 76564
+aWxsdW0= 76565
+b2thYmxlQ2FsbA== 76566
+X21s 76567
+TGlrZWQ= 76568
+ZW51bWVyYXRl 76569
+RmluaXRl 76570
+LXByb3A= 76571
+QXJlYVZpZXc= 76572
+IG1lZGlhdGlvbg== 76573
+IGNoYW50aW5n 76574
+X05U 76575
+X3VuYw== 76576
+c21vdXRo 76577
+IHBpZ21lbnQ= 76578
+UGFzc3dvcmRFbmNvZGVy 76579
+IHbDqXI= 76580
+IHdhc3Rld2F0ZXI= 76581
+LVBhY2s= 76582
+IGpvdmVu 76583
+YWVz 76584
+S1k= 76585
+UGludGVyZXN0 76586
+IG11c2ljYQ== 76587
+bGFjZXM= 76588
+IFdpY2g= 76589
+KHJvdA== 76590
+KGly 76591
+IOyCreygnA== 76592
+44Gd44KM 76593
+X1RIRQ== 76594
+Z2V0RmlsZQ== 76595
+W3Byb3BlcnR5 76596
+IGVuZGluZ3M= 76597
+aXp6YXJl 76598
+PXRyYWlu 76599
+LWxvdmluZw== 76600
+IG5vdXZl 76601
+IGNvbW1hcw== 76602
+IGNhbWJp 76603
+IFp1c2FtbWVu 76604
+CUV4dA== 76605
+KG9ic2VydmVy 76606
+Zm9ybWlr 76607
+IHF1aW5kaQ== 76608
+IEl2b3J5 76609
+IEJvbGl2aWE= 76610
+YXNhZA== 76611
+X2xlZ2VuZA== 76612
+Q2l0aWVz 76613
+X0ZJUkU= 76614
+YXNkZg== 76615
+LkRlcHRo 76616
+VmFsdWVHZW5lcmF0aW9uU3RyYXRlZ3k= 76617
+dXBk 76618
+LkdldFJlc3BvbnNl 76619
+IHVyZ2VudGx5 76620
+SW52YXJpYW50 76621
+R2V0WA== 76622
+IHN0YXR1cmU= 76623
+IGltYWdpbmluZw== 76624
+YXRlYXU= 76625
+TU9WRUQ= 76626
+KFRyYW5zYWN0aW9u 76627
+X3Bvcg== 76628
+UmVmUHRy 76629
+Lmdsb2JhbERhdGE= 76630
+Z3JhdmU= 76631
+aW1lc3RlcHM= 76632
+Zm91bmRsYW5k 76633
+U2FsaXI= 76634
+YXJ0aXN0cw== 76635
+IGNyZWF0ZUFjdGlvbg== 76636
+IFNhbnRv 76637
+INC90LXRgg== 76638
+CQkJICAgICAgICAgICAgICAg 76639
+LXNvbmc= 76640
+IG51aXNhbmNl 76641
+IGltcG92ZXI= 76642
+XykNCg== 76643
+IGNyb3dkZnVuZGluZw== 76644
+IHRpbXA= 76645
+UGljdHVyZXM= 76646
+IGxvZGdpbmc= 76647
+6ZKu 76648
+YXRhc2V0cw== 76649
+44Ot44Kw 76650
+cGVyc29ucw== 76651
+Y29uZHVjdA== 76652
+IGV2YWRl 76653
+IGhhdW50aW5n 76654
+ICEhfQ== 76655
+IExBUkdF 76656
+IGtpdHRlbg== 76657
+IHVwaGlsbA== 76658
+KG1pbnV0ZXM= 76659
+IEVtYW51ZWw= 76660
+J0M= 76661
+IFNreXdhbGtlcg== 76662
+cHVycG9zZQ== 76663
+X21hcHBlcg== 76664
+IGFkYXB0YXRpb25z 76665
+LmZpbGxUZXh0 76666
+cnVr 76667
+IHJlcGVydG9pcmU= 76668
+KHByaW9yaXR5 76669
+KG1hcHBlZA== 76670
+Um9iaW4= 76671
+IGVycm9uZW91cw== 76672
+IGluaGFs 76673
+Qk9WRQ== 76674
+KCIsIikK 76675
+dWVsbGVtZW50 76676
+IGZpbmdlcnByaW50cw== 76677
+IFBZVEhPTg== 76678
+LWRlbQ== 76679
+bGVhbm9y 76680
+esSFZA== 76681
+IlBlb3BsZQ== 76682
+YXNpZXI= 76683
+IHBhdHJpb3RpYw== 76684
+LmZyZWV6ZQ== 76685
+SUo= 76686
+IEJhbmNv 76687
+IGlzU3VjY2Vzcw== 76688
+KHZlaGljbGU= 76689
+KExheW91dA== 76690
+IGNhcnZpbmc= 76691
+X2NpcGhlcg== 76692
+IHZlemVz 76693
+KCdfJyw= 76694
+IEZpcnN0bHk= 76695
+IGZ1bGxlc3Q= 76696
+IExpc3RlbmluZw== 76697
+X3NpZ25hbHM= 76698
+ZXdvbGY= 76699
+IFNDUg== 76700
+IE1lcnJ5 76701
+L3Rlc3RpZnk= 76702
+X1NBTklUSVpF 76703
+aW9jdGw= 76704
+SUVFRQ== 76705
+PU1hdGg= 76706
+IGVucXU= 76707
+CWF1eA== 76708
+4pml 76709
+IGRpc3BlcnNlZA== 76710
+aGFyZQ== 76711
+YmVybg== 76712
+IEFtZW5k 76713
+IGluc2lkZXJz 76714
+IEFsdmFyZXo= 76715
+IFp1Zw== 76716
+L2NhbGVuZGFy 76717
+IGhldXJl 76718
+LXBhcGVy 76719
+IHNvZm9ydA== 76720
+IHNtaXRo 76721
+IHBvYg== 76722
+KHJhdGU= 76723
+IHNvY2nDqXTDqQ== 76724
+IHdvZXM= 76725
+IGJydXNoaW5n 76726
+cWQ= 76727
+b2xvZ3Vl 76728
+c29ja2V0cw== 76729
+X1lFUw== 76730
+LmFkZENvbHVtbg== 76731
+IGV2YXNpb24= 76732
+U09GVFdBUkU= 76733
+YWJveA== 76734
+LnlsaW0= 76735
+IGVuZ3VsZg== 76736
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLwo= 76737
+IG5nT25EZXN0cm95 76738
+IG5vc3Nh 76739
+LmxzdA== 76740
+KCl9Pgo= 76741
+Lmt3YXJncw== 76742
+IGNvbnRleHRv 76743
+IFBVQg== 76744
+RnU= 76745
+IGJpZ290cnk= 76746
+IGJyaWQ= 76747
+IHN0ZXJvaWQ= 76748
+IHZpZ29yb3VzbHk= 76749
+IGJ1cnN0aW5n 76750
+IHZlbmU= 76751
+IHNhbGFkcw== 76752
+IFZBUklBQkxFUw== 76753
+IE9uYw== 76754
+IGZpcmVFdmVudA== 76755
+c2FuZGJveA== 76756
+IHRvdWNoc2NyZWVu 76757
+c2Fucw== 76758
+L0luc3RydWN0aW9u 76759
+IGVvZg== 76760
+bGVjdHVyZQ== 76761
+Py0= 76762
+LmxvY2FsaXphdGlvbg== 76763
+VkVT 76764
+X3ZvaWNl 76765
+aXR1cmE= 76766
+LnJlcG9ydGluZw== 76767
+IF0pOw== 76768
+Tm92YQ== 76769
+X0NPTVBBVA== 76770
+IG91dGJyZWFrcw== 76771
+LmNsaWVudFdpZHRo 76772
+aWZsb3dlcg== 76773
+X0dSQQ== 76774
+SW5pdGlhbGl6aW5n 76775
+X3BlcmY= 76776
+KCl9LA== 76777
+PVA= 76778
+X0lNRVRIT0Q= 76779
+IHRpZ2h0ZW5pbmc= 76780
+IHRhYkJhcg== 76781
+IEJL 76782
+CURvdWJsZQ== 76783
+L2hhc2g= 76784
+IG1leg== 76785
+VG9VcHBlcg== 76786
+VEc= 76787
+KGluZGVudA== 76788
+IHNpbGljYQ== 76789
+IC8vLy8vLw== 76790
+w7Zr 76791
+IGVsdmVz 76792
+ZW1wbGF0ZXM= 76793
+LkNvbXBhcmVUbw== 76794
+IGd1bmZpcmU= 76795
+YW5pbWFscw== 76796
+IGtlcGFkYQ== 76797
+IENQUg== 76798
+X0xTQg== 76799
+CXZlcnRleA== 76800
+INC/0LXRgNCy 76801
+LCE= 76802
+IGR1bHk= 76803
+X1BBVENI 76804
+RU5B 76805
+CUND 76806
+Y29tcG9zaXRpb24= 76807
+X3N2 76808
+TGJs 76809
+amVq 76810
+0YHRgtGA0L7QuQ== 76811
+LkVkaXRWYWx1ZQ== 76812
+5YW3 76813
+YW50YXM= 76814
+IGJyZWFkY3J1bWI= 76815
+IFRlc3Rlcg== 76816
+IE1lYXN1cmVtZW50cw== 76817
+L0lucHV0 76818
+IFJheg== 76819
+X1BPTEw= 76820
+SW5kZXBlbmRlbnQ= 76821
+Lmx1Y2VuZQ== 76822
+IE1lY2hhbmljcw== 76823
+Y29sb24= 76824
+LnN1cmZhY2U= 76825
+IHVuYXM= 76826
+cmFkbw== 76827
+UExJQ0FURQ== 76828
+Q1JU 76829
+LnNldERlZmF1bHQ= 76830
+JUg= 76831
+IHJlc3BvbnNhYmxl 76832
+IHBlcnBlbmRpY3VsYXI= 76833
+IFJlc3Bpcg== 76834
+IFR1bmlzaWE= 76835
+XEFycmF5 76836
+6Lev5b6E 76837
+IHBhdw== 76838
+IGRlYm91bmNl 76839
+KE1QSQ== 76840
+INiv2LE= 76841
+IGVsaw== 76842
+IFJlbGF5Q29tbWFuZA== 76843
+L2xpZ2h0 76844
+LnNlcmlhbGl6YXRpb24= 76845
+QlNJVEU= 76846
+KSgoKCg= 76847
+IEJpb3M= 76848
+X3N2Zw== 76849
+KHN1cmZhY2U= 76850
+RHVwbGljYXRlcw== 76851
+ICg+ 76852
+X0FTVA== 76853
+Lm5pY2s= 76854
+IldoeQ== 76855
+IEludGVsbGVjdHVhbA== 76856
+YWJicmV2aWF0aW9u 76857
+ZWFyYWJsZQ== 76858
+IGNvbnNlZ3Vpcg== 76859
+KEJl 76860
+X1BvZHM= 76861
+PEFuaW1hdG9y 76862
+X1VOREVGSU5FRA== 76863
+QVJSWQ== 76864
+IC8vfg== 76865
+cGVyYXRvcg== 76866
+LndyaXRlRmlsZVN5bmM= 76867
+QWxz 76868
+bGRlcg== 76869
+IG1pZWpz 76870
+IGZ1bmNz 76871
+aW5jaWJsZQ== 76872
+IGR1c3R5 76873
+IERyaWxs 76874
+IGNvbnRpbnVhbA== 76875
+IEVsZWN0cm9u 76876
+LmVuZW15 76877
+KHBi 76878
+IHJldW5pdGVk 76879
+U21va2U= 76880
+LWZhY2Vk 76881
+SW50ZW5zaXR5 76882
+IFRyZWVNYXA= 76883
+IEFyZ3VtZW50RXJyb3I= 76884
+LndyaXRlSGVhZA== 76885
+IFRSRQ== 76886
+U3BsaXRPcHRpb25z 76887
+LyoqKioqKi8K 76888
+IFw8Xg== 76889
+IEludmVzdG1lbnRz 76890
+U1VNRVI= 76891
+IGRhYw== 76892
+QU5J 76893
+Llllc05v 76894
+KG9mU2l6ZQ== 76895
+eXRo 76896
+ZWxvYWQ= 76897
+IGltcHJlcw== 76898
+IGJsb2Jz 76899
+LnJldHJpZXZl 76900
+IHR5cmFubnk= 76901
+IGNhbmNlbEJ1dHRvblRpdGxl 76902
+IGhhY2k= 76903
+IENhc2lub3M= 76904
+IGRoZQ== 76905
+UmV0YWls 76906
+IFBvcm5odWI= 76907
+IENyaW1lcw== 76908
+T2ls 76909
+KElTZXJ2aWNl 76910
+UmVzaXphYmxl 76911
+CVNv 76912
+T2Z0ZW4= 76913
+IGNvbW1vbnBsYWNl 76914
+X0dD 76915
+YWxkaQ== 76916
+YXRobG9u 76917
+KFZpZXdHcm91cA== 76918
+KEVtcGxveWVl 76919
+IHNhZmVndWFyZHM= 76920
+6YCA5Ye6 76921
+X0FVUkE= 76922
+IHVubm90aWNlZA== 76923
+IFRob3Ju 76924
+bW9kZWxl 76925
+IGFjb3Jkbw== 76926
+IFdlbmdlcg== 76927
+aW11cw== 76928
+ZW5zYnVyZw== 76929
+b21iYQ== 76930
+Y2nDs24= 76931
+Imh0dHA= 76932
+X01hdHJpeA== 76933
+fHx8fA== 76934
+b3JuZWNlZG9y 76935
+CUJ1ZmZlcmVkUmVhZGVy 76936
+cmVnaXN0ZXJz 76937
+cmVsZWFzZWQ= 76938
+IGFkZE9ic2VydmVy 76939
+IFZhbGVudA== 76940
+KEN1bHR1cmVJbmZv 76941
+IG1hbm5lbg== 76942
+IGJ1cmdsYXJ5 76943
+X21pbnV0ZQ== 76944
+IGludGVyY2VwdG9y 76945
+b2NyYXRlcw== 76946
+YXR0cm8= 76947
+IFlF 76948
+ZXNzbGVy 76949
+bGlzdGVuZXJz 76950
+L3Byb20= 76951
+IOek 76952
+dG91Y2hlcw== 76953
+RXNw 76954
+IEFib3J0 76955
+IGZmaQ== 76956
+IGNsdW1z 76957
+TklM 76958
+X1ZJUlRVQUw= 76959
+IGxvaW4= 76960
+eW5vbWlhbHM= 76961
+INec 76962
+IGd6 76963
+IE5lb24= 76964
+SVNJUw== 76965
+YW1lcmF0ZQ== 76966
+X2F2YWls 76967
+IG1heGk= 76968
+IGlzQXJyYXk= 76969
+Q29sdW1uSW5mbw== 76970
+aXppbg== 76971
+IHBlcnNv 76972
+IG91ZA== 76973
+aWFsaXplZA== 76974
+eW1p 76975
+IGNvbmZpZGVudGx5 76976
+PSIvIj4K 76977
+LmRhdGFzb3VyY2U= 76978
+IHBheWNoZWNr 76979
+IEJhdg== 76980
+L0JyYW5jaA== 76981
+IFRlYXI= 76982
+IG1lcnVwYWthbg== 76983
+IEJyYWg= 76984
+INC60L7QvdGC 76985
+74I= 76986
+LHBhdGg= 76987
+IGRhenpsaW5n 76988
+IFVDSEFS 76989
+IHByb3Zpc2lvbmFs 76990
+0L/Qvw== 76991
+IGxlZ2FsaXplZA== 76992
+X2FsZ28= 76993
+X1JTQQ== 76994
+YWx0ZXJuYXRpdmU= 76995
+IERFVEFJTFM= 76996
+VG9Ebw== 76997
+cmVmbGVjdGlvbg== 76998
+X1dFRUs= 76999
+IENMRUFO 77000
+IHNsb2dhbnM= 77001
+IOuTsQ== 77002
+IFZldGVyaW5hcnk= 77003
+aWRm 77004
+LmRhdGVUaW1lUGlja2Vy 77005
+aWNvbnRyb2w= 77006
+KHBsYXk= 77007
+IHVsbGFt 77008
+ICcpDQo= 77009
+IGNoZXF1ZQ== 77010
+5a6L5L2T 77011
+IHVuc2VyZW0= 77012
+IEFyY2hpdGVjdHM= 77013
+YW1lbnRhbHM= 77014
+IHZtYXg= 77015
+IGplbWFuZA== 77016
+Q0VFRA== 77017
+IE9saXZpZXI= 77018
+c2V2ZXJpdHk= 77019
+Uks= 77020
+RGlzY29ubmVjdGVk 77021
+IHdlYXBvbnJ5 77022
+dWnDp8Ojbw== 77023
+IGJpbmdv 77024
+ZG9udA== 77025
+X0NIQU5ORUxT 77026
+IERhZw== 77027
+IGTDpHI= 77028
+w6lyaXF1ZQ== 77029
+Z3JhZGFibGU= 77030
+IENPTVBMRVRF 77031
+IHNwYW5pc2g= 77032
+IGluc3RydW1lbnRhdGlvbg== 77033
+dmFzaXZl 77034
+RFJBVw== 77035
+IGZwdXRz 77036
+IFNwZW5k 77037
+IFJlc3BlY3Q= 77038
+Q291cnRlc3k= 77039
+IHNjaG8= 77040
+IHBvc3RhZ2U= 77041
+IE1lYWRvd3M= 77042
+IHR1dG9yaW5n 77043
+ZXJ2bw== 77044
+QWJzb2x1dGVseQ== 77045
+w6FuZGV6 77046
+vZTrk5w= 77047
+IFNIUg== 77048
+cGhvb24= 77049
+IERlcG9z 77050
+PScnCg== 77051
+IHBoeXNpb2xvZ3k= 77052
+KnRpbWU= 77053
+IFRvdWdo 77054
+ZG9jaw== 77055
+L2hl 77056
+KEhhdmU= 77057
+IE1vaW5lcw== 77058
+U1RZUEU= 77059
+IEJyaWRl 77060
+IHN0cm9u 77061
+IHdvcmxkdmlldw== 77062
+IGdyYXR1aXRv 77063
+IGFlcm9zcGFjZQ== 77064
+IElocmVt 77065
+IHFj 77066
+IG1hbmlmZXN0YXRpb25z 77067
+c2xhdWdodA== 77068
+PEFjY291bnQ= 77069
+IEluZm9z 77070
+YW1iaWw= 77071
+X0ZpbmFs 77072
+IGFkbWluaXN0cmF0aW9ucw== 77073
+IGNvbGxhYm9yYXRlZA== 77074
+LmpkZXNrdG9w 77075
+b2x1Y2nDs24= 77076
+YXNjdGltZQ== 77077
+X2FsbG9jYXRl 77078
+YXJyaXZhbA== 77079
+Sk9S 77080
+IHNoYWR5 77081
+IHBpbmVhcHBsZQ== 77082
+44KP 77083
+IHNhdGlu 77084
+YnJlcm8= 77085
+IExpZXM= 77086
+IHRlbnNvcnM= 77087
+IEludGVsbGlnZW50 77088
+LlNlbGVjdGVkSW5kZXhDaGFuZ2Vk 77089
+IHJhZGlhdG9y 77090
+YXNzaXN0YW50 77091
+JGZpZWxkcw== 77092
+CXN0ZXA= 77093
+IE1pdGdsaQ== 77094
+IEV2ZXJldHQ= 77095
+IFNjaGVkdWxlZA== 77096
+SG9yYQ== 77097
+Il0tPg== 77098
+IG1vdHM= 77099
+IERTVA== 77100
+Zm9udE5hbWU= 77101
+IFdhcndpY2s= 77102
+X1Rhc2s= 77103
+KkM= 77104
+44On 77105
+b2JlbA== 77106
+X0RFVA== 77107
+IHNvY2lvbG9neQ== 77108
+IEthdHo= 77109
+aWNpb25z 77110
+b3RsYW5k 77111
+YWRvbw== 77112
+X3BhcnM= 77113
+IHJpcHBpbmc= 77114
+aWNobw== 77115
+IG51dHJpdGlvdXM= 77116
+CWRhbWFnZQ== 77117
+S3k= 77118
+IGFuY2hvcmVk 77119
+IGFydGlmaWNpYWxseQ== 77120
+IEp1dmVudHVz 77121
+L3Blcmw= 77122
+IGV4cHJlc3NpdmU= 77123
+eEVF 77124
+IEVudW1lcmF0aW9u 77125
+Lk1FU1NBR0U= 77126
+KGRlZw== 77127
+5b+X 77128
+IyMjIyMj 77129
+ICIiKSw= 77130
+a2zDpHI= 77131
+XE1haWw= 77132
+RGVzaWduZWQ= 77133
+IHN0YWZmZXI= 77134
+IHNhbHRz 77135
+KioqKioNCg== 77136
+IOKB 77137
+IHNldFRpdGxlQ29sb3I= 77138
+RFZE 77139
+LldyaXRlQWxs 77140
+ZWxsYW50 77141
+IGNvZXJjaW9u 77142
+IFNvcnRpbmc= 77143
+6KiA 77144
+IHN0YXJ2YXRpb24= 77145
+Ly97ew== 77146
+LmhlYXA= 77147
+IE1lZGlldmFs 77148
+ICotLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t 77149
+77yR77yQ 77150
+IHdhcmRz 77151
+IEhlcmM= 77152
+IEhvZ3dhcnRz 77153
+LWNvbW1lbnRz 77154
+IExhdWRlcmRhbGU= 77155
+5rw= 77156
+IHJpZnQ= 77157
+IHplaXQ= 77158
+IHByb29mcw== 77159
+LnZpZXdwb3J0 77160
+JHN0YXJ0 77161
+IEJvdWdodA== 77162
+LnJpY2hUZXh0Qm94 77163
+IGNsaW5n 77164
+ICcqKg== 77165
+T3duZXJzaGlw 77166
+IEJvZWhuZXI= 77167
+KGR5bmFtaWM= 77168
+IG1lZGljYWxseQ== 77169
+IFdURg== 77170
+IE1haW5NZW51 77171
+6LSt 77172
+IGRpZmVyZW50ZQ== 77173
+L3Jlc3VsdHM= 77174
+ZW50aGFs 77175
+IFdpZGdldHM= 77176
+cnVzaA== 77177
+IFJNUw== 77178
+IFZvbGxleQ== 77179
+IHJlbW92ZUZyb21TdXBlcnZpZXc= 77180
+IExhZmF5ZXR0ZQ== 77181
+IEZldGNoVHlwZQ== 77182
+YWNhcw== 77183
+IHBhdGhvZ2Vucw== 77184
+IE1NTw== 77185
+LkN1cnJlbmN5 77186
+b2Npb3Vz 77187
+IHNwcml0ZUJhdGNo 77188
+ZG9sbA== 77189
+IHZhbXBpcmVz 77190
+bGF1bmNoZXI= 77191
+IHBlYWtlZA== 77192
+IGRlYnVuaw== 77193
+IEFTRA== 77194
+IHVuZXF1YWw= 77195
+IHNxdWFkcw== 77196
+fS4kew== 77197
+bWFuaQ== 77198
+IkU= 77199
+IEZhaHI= 77200
+IElTSQ== 77201
+IHVuYXZvaWQ= 77202
+b3Bob25l 77203
+WzpdCg== 77204
+IERpcmVjdGVk 77205
+IGJ1c2hlcw== 77206
+LmZhaWx1cmU= 77207
+IGltbWVyc2Vk 77208
+ZXhv 77209
+SGlzdG9ncmFt 77210
+IEthbm4= 77211
+IHBpcmFjeQ== 77212
+IENydW5jaA== 77213
+IGzDpg== 77214
+Ly8i 77215
+IG1vbm90 77216
+IFNhdW5kZXJz 77217
+IFNldmVudA== 77218
+KEFic3RyYWN0 77219
+IHNtb2tlcg== 77220
+cm9uZQ== 77221
+LmNsaWVudFk= 77222
+ICItIiw= 77223
+IEZvdW50YWlu 77224
+IGlubmU= 77225
+7IOJ 77226
+Q3Ry 77227
+JGlucHV0 77228
+UFJPRklMRQ== 77229
+IERvbmF0aW9u 77230
+V2l0aEVtYWls 77231
+IGZyYWN0dXJlcw== 77232
+S2VlcGVy 77233
+IG1laXNqZXM= 77234
+IGFyY2hpdGVjdHVyZXM= 77235
+IEx1bmc= 77236
+J2ltYWdl 77237
+aGFybWE= 77238
+IGFiYW5kb25pbmc= 77239
+QUxMRUQ= 77240
+c3VidHlwZQ== 77241
+cmVpcmE= 77242
+IG1vc3M= 77243
+IFBhcnNvbnM= 77244
+YWtlZG93bg== 77245
+PW9iag== 77246
+IHN1Y2Vzcw== 77247
+IHdlYXJhYmxl 77248
+44Kn 77249
+IGFkdWx0aQ== 77250
+LnVt 77251
+IHZpYnJhdGlvbnM= 77252
+IHN3ZWxs 77253
+IERpc2Nsb3N1cmU= 77254
+IFJERA== 77255
+cGFpcnM= 77256
+YW5nZ2Fu 77257
+IG1haW5CdW5kbGU= 77258
+IERJTg== 77259
+IHJvY2tlZA== 77260
+c2hvdWxkQmU= 77261
+Lmdi 77262
+IElNRA== 77263
+IFdO 77264
+LGFyZw== 77265
+4oCm4oCm4oCm4oCm4oCm4oCm4oCm4oCm 77266
+W109JA== 77267
+LlNN 77268
+IGFsZ3Vucw== 77269
+YWRkb25z 77270
+X0NvbW1vbg== 77271
+X1JFRlJFU0g= 77272
+INmB2Yo= 77273
+IFRZUE8= 77274
+IEVjb2xvZ3k= 77275
+IGdsdQ== 77276
+LkRhdGFUeXBl 77277
+IFByb2Jl 77278
+THV4 77279
+b3dlZ28= 77280
+IHJlaw== 77281
+IFBsYWludGlmZg== 77282
+YWNoYWJsZQ== 77283
+Lm5hbWE= 77284
+Km91dA== 77285
+fX17ew== 77286
+IENBUElUQUw= 77287
+5L2G 77288
+SW1wb3J0ZXI= 77289
+LmNyZWF0ZVNlcnZlcg== 77290
+X3Jlc29sdmU= 77291
+X0VQUw== 77292
+c3RlbGxhcg== 77293
+X1Byb2ZpbGU= 77294
+CXN3 77295
+LW1vbg== 77296
+dWRldg== 77297
+XFBsdWdpbg== 77298
+X01JWA== 77299
+IERpc2NyaW0= 77300
+LmZyb21MVFJC 77301
+IFN0cmFuZA== 77302
+QW55dGhpbmc= 77303
+cG93ZXJz 77304
+XV0NCg== 77305
+LlRJTQ== 77306
+IGFkZHNsYXNoZXM= 77307
+IGVzaQ== 77308
+QEJlZm9yZQ== 77309
+IHNhaw== 77310
+ICcvJzsK 77311
+Y29j 77312
+xZ/EsQ== 77313
+ICkpOw0K 77314
+X2Fib3Zl 77315
+IEVDQw== 77316
+L2NwdQ== 77317
+IGNhZGU= 77318
+LlN0ZGVycg== 77319
+IHBlbGxldHM= 77320
+IFBhbGlu 77321
+IGfDqW4= 77322
+X2phdmE= 77323
+IHNhbGFo 77324
+IGJlcmdlbg== 77325
+X1NXQVA= 77326
+IGdpYg== 77327
+acOjbw== 77328
+X2Rpc3RhbmNlcw== 77329
+IENpbmRlcg== 77330
+IGFuYXJjaGlzdA== 77331
+aW1hdA== 77332
+CW1vY2s= 77333
+44GX44G+44GZ 77334
+T21lZ2E= 77335
+IGJhaHdh 77336
+X1BhcnNl 77337
+LnBhcGVy 77338
+CUludGVudA== 77339
+cmVucw== 77340
+L2dyaWQ= 77341
+IGZpbHRoeQ== 77342
+LmV2 77343
+IyMjIyMK 77344
+IHNhcmU= 77345
+IHNvYWtpbmc= 77346
+IFJlZ2lvbnM= 77347
+X1VTRUQ= 77348
+IFNpaw== 77349
+aWZpa2FzaQ== 77350
+CUVkaXRvcg== 77351
+THVjaw== 77352
+IOyXsA== 77353
+xINt 77354
+LiI7 77355
+IFppZWw= 77356
+IGdyYXlzY2FsZQ== 77357
+KEZ1bmM= 77358
+44OB 77359
+LkRlbnNl 77360
+LWxlYW5pbmc= 77361
+IGdyYWNlZnVs 77362
+R3JhcGhOb2Rl 77363
+X0NPTU1JVA== 77364
+IENWUw== 77365
+IHBsYWlucw== 77366
+IHJlag== 77367
+cGNpb25lcw== 77368
+IHVuZGVybWluaW5n 77369
+X2NhdHM= 77370
+ZmVi 77371
+Q29sbGVjdGlvblZpZXc= 77372
+U0VNQg== 77373
+IHRodQ== 77374
+dGV4dGJveA== 77375
+KEFuZHJvaWQ= 77376
+IHJpZ29y 77377
+IFlpZWxk 77378
+LmlzUGxheWluZw== 77379
+OnZpZXc= 77380
+cmVtYWluZGVy 77381
+IFBpcA== 77382
+KWluZGV4 77383
+IEJlY2tlcg== 77384
+dG9Mb2NhbGU= 77385
+YXV0b3JlbGVhc2U= 77386
+IFJvbWVybw== 77387
+LkhhbmRsZWQ= 77388
+IENhYmluZXRz 77389
+KVY= 77390
+IHJ0ZQ== 77391
+IEh1bHU= 77392
+aWNpZWw= 77393
+L2FuaW1hdGlvbnM= 77394
+IHByZXN1bWU= 77395
+LnRyYW5zcGFyZW50 77396
+IHN1Ym1lbnU= 77397
+cW0= 77398
+aWVydGVu 77399
+IHRleHRTaXpl 77400
+IHN0YXJ2aW5n 77401
+L2pvYg== 77402
+QXBhY2hl 77403
+IHlpZWxkaW5n 77404
+LWFydGljbGU= 77405
+Jz0+JF8= 77406
+IOih 77407
+PFNwcml0ZVJlbmRlcmVy 77408
+IFNoaWE= 77409
+KToo 77410
+IHB1Ymxp 77411
+emllag== 77412
+IHRlbGVzYw== 77413
+IHRlaWw= 77414
+TGVnYWN5 77415
+IFBsYWNlbWVudA== 77416
+KCkpew== 77417
+IHRyb3VibGVzb21l 77418
+5pif 77419
+IHBlcnPDtm4= 77420
+X0FzcE5ldA== 77421
+PX0= 77422
+KHVzZXJJRA== 77423
+U3Vz 77424
+44K6 77425
+LWF2ZXJhZ2U= 77426
+IFFJbWFnZQ== 77427
+LlN0cmljdA== 77428
+dGVib3Jn 77429
+LWZ1bmN0aW9ucw== 77430
+UkVHSU9O 77431
+Pk5ldw== 77432
+X2Nob29zZQ== 77433
+KGNp 77434
+IHVubGVhc2g= 77435
+IFJJR0hUUw== 77436
+IFNwZWFy 77437
+CW1ha2U= 77438
+IHR5cw== 77439
+YW5lbGE= 77440
+IFdY 77441
+X01BS0U= 77442
+L3NldHVw 77443
+IG9uU2F2ZQ== 77444
+IGNsaW5pY2lhbnM= 77445
+CWJhY2s= 77446
+LkxpbmtlZA== 77447
+IGNvbnNlcnZl 77448
+IGJpdHRlbg== 77449
+X3ZhcmlhbmNl 77450
+IGxpcmU= 77451
+IGluZXJ0aWE= 77452
+dWZmbGVz 77453
+X01QSQ== 77454
+aWRkbGVz 77455
+W2Fycg== 77456
+LnZvY2Fi 77457
+IHNoaXR0eQ== 77458
+IG5lc3Rl 77459
+c3NpemU= 77460
+IEtU 77461
+Ymxlcg== 77462
+X2xpbnV4 77463
+IG1vbmdvZGI= 77464
+IElURU1T 77465
+S29u 77466
+IEJ1cnN0 77467
+X3Bob3Rvcw== 77468
+Q29sb3JhZG8= 77469
+IGFja25vd2xlZGdtZW50 77470
+IG9pbHk= 77471
+IG5mcw== 77472
+IFppb25pc3Q= 77473
+IGFkZGljdHM= 77474
+IGFkZFVzZXI= 77475
+IE1pc2g= 77476
+IGtX 77477
+IFdhbnRz 77478
+KHJlY29yZHM= 77479
+b2N1cnJlbmN5 77480
+SlNHbG9iYWw= 77481
+LmVsYXBzZWQ= 77482
+IE5i 77483
+IHBwdA== 77484
+XERlcGVuZGVuY3k= 77485
+Um9s 77486
+IMOnYWzEscWf 77487
+IGV4cGFuc2lvbnM= 77488
+YnViYmxl 77489
+IG1pZHRlcm0= 77490
+ICcjew== 77491
+Y3R4dA== 77492
+SVN5bnRheEV4Y2VwdGlvbg== 77493
+IFZhbGxl 77494
+IENhZGlsbGFj 77495
+ICIifSwK 77496
+IHNlbXVh 77497
+cmljaFRleHQ= 77498
+c29mdG1heA== 77499
+b2JqUEhQRXhjZWw= 77500
+LmhzdGFjaw== 77501
+X2NyaXRpY2Fs 77502
+KDw/ 77503
+ZGo= 77504
+IGNvbnNvbg== 77505
+IHJvb21JZA== 77506
+RE9NQ29udGVudExvYWRlZA== 77507
+cGFybXM= 77508
+IHplaWd0 77509
+VFBM 77510
+LW5vdGNo 77511
+IG9wcHJlc3NpdmU= 77512
+Q29kaW5n 77513
+IExlYXZlcw== 77514
+KERpc3BsYXk= 77515
+LnNpZ25Jbg== 77516
+Ly8tLQ== 77517
+IE9wcg== 77518
+Y3Rh 77519
+IG1ldGF2 77520
+U2VyaWFsaXplZA== 77521
+IHVuYWZmZWN0ZWQ= 77522
+IEFUTA== 77523
+IEtQ 77524
+QXRsYW50aWM= 77525
+LHVybA== 77526
+LHN0YXRl 77527
+IGJpc3Q= 77528
+ZW5lZw== 77529
+IHNpbXBsaXN0aWM= 77530
+IGJpZGRlcg== 77531
+IHBlcmNlcHQ= 77532
+IGNlbGli 77533
+IFRIUk9X 77534
+KC9b 77535
+VGNw 77536
+IGZ1cnRoZXJtb3Jl 77537
+LkFjYw== 77538
+b3BwYWJsZQ== 77539
+5Lik 77540
+IFRhcnQ= 77541
+IEJlbno= 77542
+IGVtYm9kaWVk 77543
+KENvbnN0 77544
+ICst 77545
+UGFydGljaXBhbnRz 77546
+IGh0dHBSZXF1ZXN0 77547
+YWNjZW50 77548
+IFPDvA== 77549
+IGhvcnJpZnlpbmc= 77550
+IC8+LA== 77551
+IGVuYWN0bWVudA== 77552
+IFVOSU9O 77553
+L2xvZ3M= 77554
+IHNjcmVlbkhlaWdodA== 77555
+IGV0d2E= 77556
+5L6L5aaC 77557
+IGHDum4= 77558
+5bem 77559
+X3RpbWVsaW5l 77560
+ICIiKSkK 77561
+JzonJw== 77562
+Qlc= 77563
+IHJlbm92YXRpb25z 77564
+IDwK 77565
+UGFsZQ== 77566
+Pjo8Lw== 77567
+U2tlbGV0b24= 77568
+IGdldFVzZXJz 77569
+X2RhdGFmcmFtZQ== 77570
+YWJy 77571
+bWF0ZXJpYWxz 77572
+JmVhY3V0ZQ== 77573
+LkRpc3BsYXlOYW1l 77574
+IGh2aXM= 77575
+X2xhbmd1YWdlcw== 77576
+LnN5 77577
+dG93ZXI= 77578
+SUZJQ0FUSU9OUw== 77579
+IGJhcnJpYw== 77580
+IFBsdXRv 77581
+YDs= 77582
+44OL 77583
+Y2VudGU= 77584
+I2Fi 77585
+IGxleGljYWw= 77586
+IEJSTw== 77587
+IHJ1bGluZ3M= 77588
+SEVZ 77589
+LmlPUw== 77590
+cmV0dXJuZWQ= 77591
+LmJvb2tz 77592
+IEh1YmI= 77593
+ZW9m 77594
+Pj46Og== 77595
+IOyG 77596
+IGdvVG8= 77597
+6ICD 77598
+44Go44GG 77599
+PEZvcm0= 77600
+Y29waWVz 77601
+LnF1YW50 77602
+IFBvdGF0bw== 77603
+IENvdXNpbnM= 77604
+IHPDuw== 77605
+R292ZXJu 77606
+IGdhbGVy 77607
+IEZJUg== 77608
+X1dpZHRo 77609
+IFNoZWxkb24= 77610
+LkRldg== 77611
+IFJlc3BvbnNpYmlsaXR5 77612
+c29uaWFu 77613
+IHN1cGVyY2xhc3M= 77614
+Yml0c2V0 77615
+ZWRkYXI= 77616
+IExhYm9yYXRvcmllcw== 77617
+IGNvaW5lZA== 77618
+IFRlY2huaXF1ZQ== 77619
+KENvcmU= 77620
+IHNwcmF5ZWQ= 77621
+IHBvbmc= 77622
+KE5ldHdvcms= 77623
+IHJvYXI= 77624
+IEVBU1Q= 77625
+c3RyYWlu 77626
+IG1lbnN0cnVhbA== 77627
+b21iYXQ= 77628
+IGNhbG1pbmc= 77629
+CURpbQ== 77630
+X21vdmllcw== 77631
+IFJBSUQ= 77632
+LWRpc21pc3NpYmxl 77633
+IGZyZXVuZA== 77634
+LWNoYW4= 77635
+IHJlc2lzdG9y 77636
+X0NvcHk= 77637
+b2NyaW5l 77638
+IGVzcGlvbmFnZQ== 77639
+Z2Fkbw== 77640
+TkRBUg== 77641
+IHBvcmNlbGFpbg== 77642
+dGhhbG0= 77643
+IGBb 77644
+IGdyYWRv 77645
+0LjRgA== 77646
+RE9VQkxF 77647
+IGFjY2Vzc2Vz 77648
+LkZsb29y 77649
+IOKGlA== 77650
+IHRva2VuaXpl 77651
+YW5hbHl0aWNz 77652
+LkNyZWF0ZUluc3RhbmNl 77653
+IHN1Y2hl 77654
+CWVudA== 77655
+aWduZXI= 77656
+INC/0LXRgNC10LQ= 77657
+IGNvbmRpY2lvbmVz 77658
+LmxpYnM= 77659
+Iic7 77660
+UERPRXhjZXB0aW9u 77661
+IG9uRGF0YQ== 77662
+IEF1dGlzbQ== 77663
+LWhlbHBlcg== 77664
+IHJld2luZA== 77665
+IGNvZmZpbg== 77666
+44O844K4 77667
+IHRyYW5zbWl0dGluZw== 77668
+LnNldEFsaWdubWVudA== 77669
+IGRlYWxsb2M= 77670
+IGFuY2VzdHJhbA== 77671
+b2dpZQ== 77672
+LkNPTVA= 77673
+OmZyYW1l 77674
+bW1v 77675
+Jzoi 77676
+IFJlZ2VudHM= 77677
+IGNoZWF0ZWQ= 77678
+Lmdn 77679
+IHBhY2Vk 77680
+IGVzdGFk 77681
+b2NlbmU= 77682
+bHNh 77683
+KGZj 77684
+L2dyb3Vwcw== 77685
+L21pc2M= 77686
+IFNodXR0bGU= 77687
+VVBJ 77688
+w6Fv 77689
+LWN5Y2xl 77690
+CXByb3Bz 77691
+IHJvdHRlbg== 77692
+UmVqZWN0ZWQ= 77693
+I2Fj 77694
+LnVh 77695
+IEFtbmVzdHk= 77696
+IHBlbm5lZA== 77697
+SU5DUkVNRU5U 77698
+PGRpbQ== 77699
+LnNldFVw 77700
+IFR3ZWV0cw== 77701
+IE1hZHVybw== 77702
+INmC 77703
+IENBY3RpdmU= 77704
+CUJZVEU= 77705
+KHNlcGFyYXRvcg== 77706
+LlJlc2l6ZQ== 77707
+dWZmbWFu 77708
+c3VwcG9ydHM= 77709
+IHVyYg== 77710
+IEZvdW5kZWQ= 77711
+X2hhcmQ= 77712
+IGVjbGVjdGlj 77713
+LkZpbHRlcnM= 77714
+IFJvdW5kZWRSZWN0YW5nbGU= 77715
+X3NhbXBsaW5n 77716
+IEpldHp0 77717
+YW1lcmljYW4= 77718
+Lmludm9rZUxhdGVy 77719
+IEJ1dHRlcmZseQ== 77720
+KGNvbm5lY3Rpb25TdHJpbmc= 77721
+IE5hb21p 77722
+IEphaW1l 77723
+cnRz 77724
+IG1hZ2ljYWxseQ== 77725
+Lm1hY2hpbmU= 77726
+IEFwcGFsYWNo 77727
+Iisi 77728
+dmFsZQ== 77729
+LW1vdW50ZWQ= 77730
+IGFjaGU= 77731
+TUo= 77732
+IFVJSW1hZ2VQaWNrZXJDb250cm9sbGVy 77733
+LUp1bg== 77734
+TWFuYQ== 77735
+a3JhaW5l 77736
+RENG 77737
+L1Byb2R1Y3Q= 77738
+IFJFU0VSVkVE 77739
+IEZIQQ== 77740
+OkAiJUAiLA== 77741
+IFByb2pla3Q= 77742
+IE5pcg== 77743
+IENhcm5pdmFs 77744
+ICom 77745
+IFFT 77746
+V0hP 77747
+IHdlbHQ= 77748
+IG1hcnJ5aW5n 77749
+QWxleGFuZGVy 77750
+IFJldmlld2Vk 77751
+YWN0ZXJpYQ== 77752
+IHdhbg== 77753
+KHJvYm90 77754
+IFdpbmRvd01hbmFnZXI= 77755
+IG1vbnVtZW50YWw= 77756
+IERvbWluZw== 77757
+L3dlYXRoZXI= 77758
+X3NlY29uZGFyeQ== 77759
+T3BlcmF0b3Jz 77760
+X1NJREU= 77761
+S2F0 77762
+LXpvbmU= 77763
+IHNpZ25pZmllcw== 77764
+IEh0dHBNZXRob2Q= 77765
+L2NvbnRleHQ= 77766
+Ig0KDQoNCg== 77767
+IFJvZHJpZ28= 77768
+IGJ1Yg== 77769
+L211c2lj 77770
+IHNlcm9udA== 77771
+IG1STkE= 77772
+X2VtYWlscw== 77773
+ICc+Jw== 77774
+IEdlbWU= 77775
+INGA0LDRgQ== 77776
+IH5+ 77777
+IGR1Y2tz 77778
+IEZyZXVuZA== 77779
+RXhwZXJpbWVudA== 77780
+IHJlb3BlbmVk 77781
+IFwiew== 77782
+IGVsbGlwdA== 77783
+IGNvbmNhdGVuYXRl 77784
+IHBvbG8= 77785
+VGltZVpvbmU= 77786
+ICAKICAgIAo= 77787
+IGNhcHRpb25z 77788
+cmlja3M= 77789
+LmZyZXE= 77790
+Lm1lbW8= 77791
+IHNtYg== 77792
+RHJ1Zw== 77793
+XVsv 77794
+X0JBQ0tFTkQ= 77795
+IEVsbGE= 77796
+IFBvcnRpb25z 77797
+IGZldGNoRGF0YQ== 77798
+IGNvcm91dGluZQ== 77799
+IGVzdGF2YQ== 77800
+IEdlbml1cw== 77801
+OmB+ 77802
+IFN3YW5zZWE= 77803
+KHBheW1lbnQ= 77804
+Vm90cmU= 77805
+IFBydWl0dA== 77806
+Lm9mZnNldFdpZHRo 77807
+YXJ5bA== 77808
+IHVuaWZvcm1seQ== 77809
+IFdhcnA= 77810
+IFNFQQ== 77811
+IGRlZHVjdGlibGU= 77812
+IGJ1bGxpZWQ= 77813
+IEJlc2No 77814
+IFByb3NwZWN0 77815
+T1NQ 77816
+IlllYWg= 77817
+IEFuZ3J5 77818
+LlZhbA== 77819
+IGdpZ3M= 77820
+IGJ1bGt5 77821
+ZXRlcmlh 77822
+LmdldFN0YXJ0 77823
+IE1FVEg= 77824
+IGNvaGVyZW5jZQ== 77825
+IG1lZGlhdGVk 77826
+0LXQs9C40YHRgg== 77827
+Li4uLgo= 77828
+IHN0cm9rZUxpbmU= 77829
+bWo= 77830
+IFVuc3VyZQ== 77831
+YXRocm9vbQ== 77832
+KEJpbmFyeQ== 77833
+X0tleVByZXNz 77834
+5p6E 77835
+aW5oZXJpdHM= 77836
+IHJlcHJlaA== 77837
+CVNjaGVtYQ== 77838
+IHVucmVzdHJpY3RlZA== 77839
+LmRlZmluaXRpb24= 77840
+XT8u 77841
+IGl0aA== 77842
+5aCx 77843
+IHNsaW1l 77844
+bXNncw== 77845
+X0pT 77846
+CVZlcnNpb24= 77847
+X1NFQ1VSRQ== 77848
+IGNvc3Rv 77849
+LlJlc3Ry 77850
+Y3Ny 77851
+X1RPT0xUSVA= 77852
+cGNs 77853
+IOKGkw== 77854
+U2VsZlBlcm1pc3Npb24= 77855
+LnJhdmVs 77856
+IG1lbWJyZXM= 77857
+QXNzZW1ibGVy 77858
+cm9taXVt 77859
+c3VyZg== 77860
+IFVQREFURUQ= 77861
+KGJyYW5jaA== 77862
+KGluY2x1ZGU= 77863
+IElkb2w= 77864
+XE9iamVjdA== 77865
+IGNsb25pbmc= 77866
+IGlzTmFO 77867
+IGFueg== 77868
+xrDhu51uZw== 77869
+IG9uYw== 77870
+X0NMVVNURVI= 77871
+IHt9KSwK 77872
+aW1pbmFyeQ== 77873
+CWNvbnRlbnRQYW5l 77874
+dHJhaWw= 77875
+IG5pbmV0eQ== 77876
+IE5pYWdhcmE= 77877
+IEFuZHI= 77878
+w6lzeg== 77879
+IGRpZmlj 77880
+dXRyYQ== 77881
+J319Pg== 77882
+44Kk44OI 77883
+c3Bhcg== 77884
+ICJcIiw= 77885
+IG15ZmlsZQ== 77886
+ZmZj 77887
+IG5vdGljZWFibHk= 77888
+ZXlh 77889
+IFB1dHRpbmc= 77890
+SlY= 77891
+LmRpbWVuc2lvbnM= 77892
+ZXJjYQ== 77893
+Z2VuZXNpcw== 77894
+ZWZmZWN0aXZl 77895
+IHBlcmRlcg== 77896
+Lk9S 77897
+X0NPTVBBUkU= 77898
+Omxlbg== 77899
+L3JlZA== 77900
+IEFyaXN0b3RsZQ== 77901
+IHF1ZXJpZWQ= 77902
+IGZvcmVzZWVhYmxl 77903
+IFVJQ29udHJvbA== 77904
+cmVtaW5kZXI= 77905
+IGNlbmE= 77906
+IGhpYw== 77907
+ICIiOw0KDQo= 77908
+L2Jhc2lj 77909
+IGFmZm9yZGFiaWxpdHk= 77910
+LGVycg== 77911
+INGB0LjQvNCy 77912
+IElTUg== 77913
+bGljZW5zZXM= 77914
+Vk9JQ0U= 77915
+Lkxhbmc= 77916
+LnJlbGF0aW9uc2hpcA== 77917
+IGxlbmRz 77918
+IG51dHplbg== 77919
+IGVzcGVjw61m 77920
+aWVuZGE= 77921
+PFBhaXI= 77922
+VHY= 77923
+X1JFVFJZ 77924
+IGhvbm9yaW5n 77925
+X2RlY2xhcmF0aW9u 77926
+KE5P 77927
+IEhpY2s= 77928
+IG1pbmxlbmd0aA== 77929
+IEdlc2NoaWNodGU= 77930
+YXBlc2g= 77931
+QVRPTQ== 77932
+JykiKTsK 77933
+ZW50ZXJwcmlzZQ== 77934
+Pn08Lw== 77935
+IHBvbGl0aXF1ZQ== 77936
+ZWRpdGlvbg== 77937
+X0RlYnVn 77938
+QW5uZQ== 77939
+LlNjb3Bl 77940
+Y3Rw 77941
+Y2Fub25pY2Fs 77942
+Pj47Cg== 77943
+TWVudXM= 77944
+IGZpZXJjZWx5 77945
+Lk9uY2U= 77946
+IEJvcnJvdw== 77947
+IHNvc3Q= 77948
+IHNlcnZpbmdz 77949
+LWZsYWc= 77950
+IHZlc3RlZA== 77951
+IGZyb24= 77952
+7ZWo 77953
+IGZhbWluZQ== 77954
+Il0pKXsK 77955
+ZXJlw6dv 77956
+IGtpamtlbg== 77957
+IEZsb29yaW5n 77958
+55CD 77959
+b2JzZXJ2YXRpb24= 77960
+IHVzZXJEYW8= 77961
+PSIiPg0K 77962
+Q09WSUQ= 77963
+YmFieQ== 77964
+IHRyb3VnaA== 77965
+IFNlYW0= 77966
+IEZpZ2h0ZXJz 77967
+b21pdA== 77968
+IENoYXJnZXM= 77969
+UnVzcw== 77970
+IHF1ZWxxdWU= 77971
+R2V0UG9zaXRpb24= 77972
+IE1pbmlzdGVycw== 77973
+X3JlY2VpcHQ= 77974
+IHJvb3ROb2Rl 77975
+bXVsdGlw 77976
+JHNlYXJjaA== 77977
+IikpKSkK 77978
+dGFrZXM= 77979
+ICghIQ== 77980
+IEJBVA== 77981
+Y2hhbmc= 77982
+xJM= 77983
+Lm9j 77984
+IHNraWxsZXQ= 77985
+IFNLVQ== 77986
+IEdhbGxhZ2hlcg== 77987
+IGNyZXNj 77988
+d2Vla2RheQ== 77989
+ZXJ2aXNlZA== 77990
+Q2FyZENvbnRlbnQ= 77991
+LmFjY2Vs 77992
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAK 77993
+VGFp 77994
+IENvbXBhdGliaWxpdHk= 77995
+eENG 77996
+X3Jld2FyZHM= 77997
+cmRm 77998
+QVBQTEU= 77999
+LWZlZA== 78000
+IGRlcGVuZGVk 78001
+LWdlbmVyYXRvcg== 78002
+KFByb2Nlc3M= 78003
+0LzQvtC2 78004
+IGRpc2NyZXBhbmN5 78005
+IHBob3NwaGF0ZQ== 78006
+TmV0d29ya2luZw== 78007
+6K6+6K6h5Zmo 78008
+KHJv 78009
+IGNvbmN1cnJlbmN5 78010
+CWF1dGg= 78011
+UGx1Zw== 78012
+QVRBTE9H 78013
+c3Viag== 78014
+L3RlYW0= 78015
+KGF2Zw== 78016
+b2tpbg== 78017
+IHBsZWRnZXM= 78018
+IGNvbGxhYm9yYXRvcnM= 78019
+IGVtYmFya2Vk 78020
+IERvY2g= 78021
+IERhaXJ5 78022
+Y29tcGV0aXRpb24= 78023
+IE11dGFibGVMaXN0 78024
+LXNldmVu 78025
+IGNvbmN1cnJlbnRseQ== 78026
+IFZpag== 78027
+IHJlc2V0dGluZw== 78028
+ZHBp 78029
+IHNsaXQ= 78030
+IFBPSU5URVI= 78031
+IENBUlQ= 78032
+LmRleA== 78033
+Y3Vsb3M= 78034
+X3BlcnNvbmFs 78035
+IGFuYWx5dGlj 78036
+I2NyZWF0ZQ== 78037
+X21lbWNweQ== 78038
+KExpc3ROb2Rl 78039
+X1RhZw== 78040
+IElycg== 78041
+Ij4nOw0K 78042
+U2hvcnRseQ== 78043
+LnRpcA== 78044
+XFs= 78045
+IFJlcHJlc2VudGF0aW9u 78046
+X0xJVEVSQUw= 78047
+LmNibw== 78048
+IEthcm5hdGFrYQ== 78049
+IENvbXBldGl0aXZl 78050
+IFJ1ZQ== 78051
+IHJ1bm9mZg== 78052
+IFNwZWxscw== 78053
+ZmNsb3Nl 78054
+Y2lz 78055
+RnJh 78056
+IHJlbW9yc2U= 78057
+IENvbG9nbmU= 78058
+IHJhbmdlcg== 78059
+IE1vcmc= 78060
+ZmlnaHRlcnM= 78061
+LlJlcXVlc3RQYXJhbQ== 78062
+Q29ycw== 78063
+IGRlbm90ZQ== 78064
+IGNob3Nlcw== 78065
+w6JuZA== 78066
+LnJlY3ljbGU= 78067
+IExvZ2lzdGlj 78068
+IERFQUQ= 78069
+LWxvYWRlZA== 78070
+IENsZWFycw== 78071
+IGtlbGw= 78072
+cmFwaGlj 78073
+IE1hbmU= 78074
+RU1CRVI= 78075
+IG1hc2tpbmc= 78076
+CWVkaXRvcg== 78077
+SGFsbG8= 78078
+Omxpc3Q= 78079
+IGV0aG4= 78080
+LXNlYXQ= 78081
+ICopWw== 78082
+IEdseQ== 78083
+IEFDUw== 78084
+CXN0YXQ= 78085
+L0NvbW1vbg== 78086
+IGRpc2d1aXNlZA== 78087
+RmluYW5jZQ== 78088
+IEVsZXBoYW50 78089
+dGVtcG9yYXJ5 78090
+IENhcmx5 78091
+IGNvY29z 78092
+IEp1ZGl0aA== 78093
+IHdyYXBwZXJz 78094
+IEx1bmFy 78095
+IHLDqWN1cA== 78096
+LXNldHVw 78097
+IHNpemFibGU= 78098
+ICAJIA== 78099
+Y2xhc3NpZmllcg== 78100
+IGZpZ3NpemU= 78101
+IG1hc3R1cg== 78102
+IOabtOaWsA== 78103
+IFJ3YW5kYQ== 78104
+KXQ= 78105
+IEN1cHM= 78106
+QXp1cmU= 78107
+KCl9LAo= 78108
+U1BBUkVOVA== 78109
+KGRpYw== 78110
+IFRleHRGb3JtRmllbGQ= 78111
+IGRlZm9ybQ== 78112
+IGRpcmVjY2nDs24= 78113
+IHlheg== 78114
+IGdsdWVk 78115
+IGF0cmF2w6lz 78116
+Y29mZmVl 78117
+IFVwZGF0aW5n 78118
+IENvbGxlZ2Vz 78119
+w6RsbHQ= 78120
+YW5kZWxpZXI= 78121
+IHNhbGly 78122
+IFNDQUxF 78123
+cWU= 78124
+6rO1 78125
+KHJlY2VpdmVy 78126
+bWRi 78127
+Im1hdGg= 78128
+aXNuYW4= 78129
+dGVsZWZvbmU= 78130
+UkVQT1JU 78131
+LmFkZE1vdXNlTGlzdGVuZXI= 78132
+ZHVlZA== 78133
+e31d 78134
+KCkpOg== 78135
+IHdvcmtpbmdz 78136
+fSk7CgoKCg== 78137
+IGNvbXBvbmVudFdpbGxNb3VudA== 78138
+U2VydmVycw== 78139
+X0NMT1NFRA== 78140
+SVpFUg== 78141
+IGJvb2I= 78142
+IENPTkNBVA== 78143
+IEhhcHBpbmVzcw== 78144
+IGNvbW11bmU= 78145
+eEFC 78146
+b3duZXJzaGlw 78147
+X05FQVI= 78148
+X0hBUkQ= 78149
+IFlB 78150
+bGlvbg== 78151
+IHNwaWVs 78152
+IHRhZ2dpbmc= 78153
+IGltbW9yYWw= 78154
+LWdyb3VuZA== 78155
+IHRodW5r 78156
+IGxvY3Vz 78157
+IExhdHZpYQ== 78158
+aXppb25p 78159
+Y2xhcnNpbXA= 78160
+IHBhdGllbnRseQ== 78161
+XEhhcw== 78162
+IHN1Ym9yZGluYXRl 78163
+IFdISUNI 78164
+ZW50aW9uUG9saWN5 78165
+IGRlcGxldGVk 78166
+RlNJWkU= 78167
+IFss 78168
+IEJpb2dyYXBoeQ== 78169
+IFNhbmRz 78170
+U0hBUkU= 78171
+Q2hhcnNldA== 78172
+LndyaXQ= 78173
+X1NVUw== 78174
+IE1vcmVubw== 78175
+IGJyb2Njb2xp 78176
+IFZY 78177
+YW1pY3M= 78178
+LkdldFVzZXI= 78179
+IENvbW1vZA== 78180
+LnNjaGVtZQ== 78181
+KHZz 78182
+IGFuYWxvZ291cw== 78183
+UHN5 78184
+PWxpbmU= 78185
+LnB1Ymxpc2hlcg== 78186
+IG9ud2FyZA== 78187
+0LXQutGB 78188
+IERlYWxlcnM= 78189
+IHRvQXJyYXk= 78190
+IENob2ljZXM= 78191
+0JTQvtCx0LDQsg== 78192
+IGRlZmF1bHRNZXNzYWdl 78193
+IGFncmVn 78194
+IENvbmNhdA== 78195
+SFY= 78196
+IENpcmN1bGFyUHJvZ3Jlc3M= 78197
+X3N2Yw== 78198
+VEFC 78199
+X2ZpbA== 78200
+Lk1hcFBhdGg= 78201
+emJ1cmc= 78202
+IGdldFByb2R1Y3Q= 78203
+IFZFUklGWQ== 78204
+Lk1vbmdv 78205
+IHB1bmRpdHM= 78206
+cHVsc2U= 78207
+bGljdGluZw== 78208
+Z2lhdGFu 78209
+IC4uLiI= 78210
+IGZpeg== 78211
+IGFudGlt 78212
+IENoYXR0 78213
+X1RZUEVERUY= 78214
+R3V5 78215
+CXRlc3Rz 78216
+IFNsb3Zlbmlh 78217
+IENvbW1hbmRMaW5l 78218
+IGJlbmVmaWNpYXRpb24= 78219
+IGJpbmRBY3Rpb25DcmVhdG9ycw== 78220
+TlRBWA== 78221
+LUNz 78222
+IGNoYXJpc21hdGlj 78223
+LmFsbG9j 78224
+X25m 78225
+IGFzc2F1bHRpbmc= 78226
+INGC0LDQsdC70LjRhg== 78227
+IGPDoWM= 78228
+IFNjcm9sbHM= 78229
+SEFT 78230
+eXl5eU1NZGQ= 78231
+IEdhbGU= 78232
+IFByb3plbnQ= 78233
+IFRob3JudG9u 78234
+ZGVhbGVy 78235
+IGV2aWN0aW9u 78236
+IGFuYWxl 78237
+4oCO 78238
+PSIo 78239
+IGVhZw== 78240
+KCcnKTsKCg== 78241
+IGNvbnRlbXBsYXRpbmc= 78242
+aHlw 78243
+YmVsdW0= 78244
+IEZpdHM= 78245
+IEV4YW1pbmVy 78246
+IEJ1Y2M= 78247
+IG1lbWJyYW5lcw== 78248
+IGJyaWxsaWFudGx5 78249
+IENlcmFtaWM= 78250
+w6h2ZQ== 78251
+IFBvdW5k 78252
+IHRyZWFzdXJ5 78253
+LicpOw0K 78254
+CXRj 78255
+ZWNha2U= 78256
+Q3VycmVudFVzZXI= 78257
+LmhhYmJv 78258
+IHRyZWFzb24= 78259
+IEZUQw== 78260
+TVVY 78261
+IG51bWJlcmluZw== 78262
+UklB 78263
+LS0pDQo= 78264
+IGJlaWdl 78265
+IEFydGVt 78266
+YmFzZXM= 78267
+X0JBTkQ= 78268
+IFBhdmVs 78269
+0YHRgtGA0YPQug== 78270
+dGhlZA== 78271
+X25icg== 78272
+INCx0LDQtw== 78273
+c2xpZGVVcA== 78274
+IFRheGk= 78275
+IGFxdWVs 78276
+IE1pc2NlbGxhbmVvdXM= 78277
+ZWx1 78278
+IGluc3VsYXRlZA== 78279
+IGFzc2V6 78280
+LkNvbmZpZ3VyZQ== 78281
+IHF1ZWxsYQ== 78282
+IHBhcmFzaXRlcw== 78283
+QXdheQ== 78284
+ZHVjaWJsZQ== 78285
+KCc9Jw== 78286
+IHZlcm8= 78287
+IFdhdGtpbnM= 78288
+IFNlcGFyYXRvcg== 78289
+YXBzZXM= 78290
+ZW52aXJvbm1lbnRz 78291
+IGFwcHJhaXNhbA== 78292
+cGF1c2Vk 78293
+X2RlYXRo 78294
+IHNpdHVhY2nDs24= 78295
+IGZyYXRlcm5pdHk= 78296
+IGluc2lzdGVuY2U= 78297
+X2NyeXB0bw== 78298
+QXR0cmliUG9pbnRlcg== 78299
+Il1dLAo= 78300
+IG94aWRhdGl2ZQ== 78301
+IG5ldXJvbmFs 78302
+IFFHcmFwaGljcw== 78303
+Ij4nLA== 78304
+IFNtaWxl 78305
+T2JqZWN0aXZl 78306
+IFNha3VyYQ== 78307
+Wk8= 78308
+YW1pZW50b3M= 78309
+LkxvY2FsRGF0ZVRpbWU= 78310
+L3VuaXQ= 78311
+LWZyZXF1ZW5jeQ== 78312
+LUNT 78313
+In07Cgo= 78314
+IHJlbGV2 78315
+QWxsb2NhdGlvbg== 78316
+JU0= 78317
+IER1c3Rpbg== 78318
+IHN3aXBlcg== 78319
+IE5hcmM= 78320
+dGF0dXM= 78321
+IGxvbmdpbmc= 78322
+IHRodWlzb250dmFuZ3N0 78323
+IGNvbW1vZG8= 78324
+IEFEQQ== 78325
+aW11 78326
+X2ZvcnVt 78327
+YW5naQ== 78328
+CUFwcGxpY2F0aW9u 78329
+W2Zyb20= 78330
+IEJldGhlc2Rh 78331
+b3Ryb3BpYw== 78332
+IE1VQ0g= 78333
+IHByZWRpYw== 78334
+ZmlsbWU= 78335
+KGdyYW1tYXI= 78336
+KEFQUA== 78337
+IEN1cmw= 78338
+IHNob3J0aGFuZA== 78339
+YWZmaWxpYXRl 78340
+XSoq 78341
+X250aA== 78342
+aWFiaWxpdHk= 78343
+Ym9tYg== 78344
+WVQ= 78345
+KCItLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ== 78346
+IEJpY3ljbGU= 78347
+aW1hdGluZw== 78348
+Lm5paQ== 78349
+IEthcmE= 78350
+YXNrYW4= 78351
+cmVhY3RzdHJhcA== 78352
+IHdsYW4= 78353
+b2dyYXBoZXJz 78354
+CSANCg== 78355
+cGFnaW5hdG9y 78356
+aWhhbm5h 78357
+IG1hdGNodXBz 78358
+X1BBRERJTkc= 78359
+X3JlZ2lzdGVycw== 78360
+eXRl 78361
+IHByaWNleQ== 78362
+IGZvb3Ro 78363
+IEh1Y2s= 78364
+UEFSVE1FTlQ= 78365
+IHByb2hpYml0aW5n 78366
+LmlzRGVidWdFbmFibGVk 78367
+4KS4 78368
+bGVpbg== 78369
+PXJlcw== 78370
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg== 78371
+ZGRs 78372
+bXBy 78373
+IOqwmQ== 78374
+IFdBTEw= 78375
+IHJldm9sdmVz 78376
+IFBFUkY= 78377
+KTt9 78378
+IFRvYnk= 78379
+Ly4uLw== 78380
+IGthbw== 78381
+IGZvcmVjYXN0aW5n 78382
+X0NvbnRlbnQ= 78383
+IH0pKSwK 78384
+cG9ybm8= 78385
+bGVhZGVycw== 78386
+LWhvb2tz 78387
+aXN0cmlidXRvcg== 78388
+L3N0b3J5 78389
+CWxpbmVz 78390
+LXJlcGx5 78391
+IGFkcmVuYWxpbmU= 78392
+Rmxvd0xheW91dA== 78393
+LnJvdXRpbmc= 78394
+CXRpbWVvdXQ= 78395
+IHJhaWRlZA== 78396
+CURE 78397
+IGRpc2RhaW4= 78398
+Y29uc2lzdGVudA== 78399
+Z2Vpc3Q= 78400
+KCI6Lw== 78401
+KHN0YXRlcw== 78402
+IEhJVA== 78403
+LVJheQ== 78404
+LWhlYWx0aA== 78405
+IC8vLQ== 78406
+dGVtZW50 78407
+Lm5hdmlnYXRlVG8= 78408
+IGJlbmNoZXM= 78409
+ZXdpbmc= 78410
+ZW56aGVu 78411
+LXNwbGl0 78412
+UmVqZWN0 78413
+IHB5bGFi 78414
+IGZsYXNobGlnaHQ= 78415
+IGluaXRpYXRpbmc= 78416
+IE9FQ0Q= 78417
+IGVudHJlZ2E= 78418
+TmF0dXJl 78419
+Lm9yYW5nZQ== 78420
+IMO6bHRpbW9z 78421
+IGVjcw== 78422
+LmhvdmVy 78423
+IGRlbHV4ZQ== 78424
+Um9nZXI= 78425
+IFRpYw== 78426
+IixfXw== 78427
+IHBsYWNlaG9sZGVycw== 78428
+IHNwYXduaW5n 78429
+IG51cnR1cmU= 78430
+IGV4Y2hhbmdpbmc= 78431
+Q3JlYXRlRGF0ZQ== 78432
+IGxhbWlu 78433
+IFNlbWljb25kdWN0b3I= 78434
+ICovCgoKCg== 78435
+IGbDuHJzdGU= 78436
+IGluaXRpYWxz 78437
+IHByb3ZlcmI= 78438
+IEFjdHJlc3M= 78439
+Q29uY2F0 78440
+IE5pY29sYQ== 78441
+LXNob3BwaW5n 78442
+aXZpdMOg 78443
+aXRpYW4= 78444
+IFdlcnQ= 78445
+LkFkZFNjb3BlZA== 78446
+IHNhbGVzbWFu 78447
+Ym9z 78448
+IEZlcnJ5 78449
+Q0VOVEVS 78450
+bW9kZWxv 78451
+IFJvZQ== 78452
+IElzbGFuZGVycw== 78453
+dXBlcnRpbm8= 78454
+RGVjbGFyZQ== 78455
+IHZvd2Vscw== 78456
+IGJveGVy 78457
+KHRvb2xiYXI= 78458
+IGhhbGZ0aW1l 78459
+bmlu 78460
+IEJyb29rZQ== 78461
+IFZlcw== 78462
+0LvQsNGC 78463
+IG1vdGl2bw== 78464
+cHJvdGVpbg== 78465
+a3Vz 78466
+YnVzeQ== 78467
+IHN0cmluZ1ZhbHVl 78468
+CU15 78469
+TnV0 78470
+dXp6aQ== 78471
+IHNleg== 78472
+IG9sZHM= 78473
+IG1ldGh5bA== 78474
+IGLDvA== 78475
+aGliYQ== 78476
+IEluc3BpcmF0aW9u 78477
+IGF3YWl0ZWQ= 78478
+QnJ1Y2U= 78479
+QkFMTA== 78480
+IFRSWQ== 78481
+LWxpdGU= 78482
+IHVuZGVyZXN0aW1hdGU= 78483
+CXJ2 78484
+Lm1vdg== 78485
+IGhpc3TDsw== 78486
+IEVyaWU= 78487
+Y25hbWU= 78488
+L2Nvbm5lY3Q= 78489
+Y29uZmVyZW5jZQ== 78490
+X3RyYWl0 78491
+IGt2aW5kZQ== 78492
+IEludm9jYXRpb24= 78493
+IERhdGVUaW1lT2Zmc2V0 78494
+d2VjaGF0 78495
+Q0VP 78496
+IExpYnlhbg== 78497
+LmNhcGl0YWxpemU= 78498
+IGdyYWNlZnVsbHk= 78499
+IHJlZWxz 78500
+aW5jcmVhc2U= 78501
+Lm1heGNkbg== 78502
+ZmF2b3JpdGVz 78503
+SVRFRA== 78504
+PFNjYWxhcg== 78505
+LkZldGNo 78506
+IHN1c3BpY2lvbnM= 78507
+W01BWE4= 78508
+X1RSQU5TQUNUSU9O 78509
+IGN5bGluZHJpY2Fs 78510
+Lm5leHRFbGVtZW50 78511
+IG1vcnBob2xvZ3k= 78512
+IENlZA== 78513
+IGNuYW1l 78514
+KHJhd1ZhbHVl 78515
+V2Fsa2luZw== 78516
+TG9hZHM= 78517
+X0FMSUdOTUVOVA== 78518
+X1JPVU5E 78519
+IFJPQ0s= 78520
+Y2x1c3RlcnM= 78521
+Img= 78522
+dWV1cg== 78523
+cGxhbnM= 78524
+IGF0aGVpc3Rz 78525
+IHZhdA== 78526
+PSJfXw== 78527
+YXdhaA== 78528
+ZXJ2YXRpdmVz 78529
+IGZpbmRPbmU= 78530
+IG5vdGVib29rcw== 78531
+IFRUTA== 78532
+LkdldEFzeW5j 78533
+IG3DvG5jaGVu 78534
+bUFo 78535
+YnJ0Yw== 78536
+X1BZ 78537
+QnVpbGRlckludGVyZmFjZQ== 78538
+CWdiYw== 78539
+IGJsYW5rcw== 78540
+IGTDqW0= 78541
+UmVjdXJzaXZl 78542
+Lk1hbnlUb01hbnlGaWVsZA== 78543
+X1BBUlNFUg== 78544
+IGVuZGVhdm9ycw== 78545
+IGRyaWI= 78546
+X3BocA== 78547
+IGF1dG9tb2JpbGVz 78548
+bG9pdA== 78549
+IE9ydGl6 78550
+IFVE 78551
+KGRBdEE= 78552
+IE1pdHN1YmlzaGk= 78553
+QXR0cmlidXRlVmFsdWU= 78554
+IHBvYXRl 78555
+55u45YWz 78556
+IGNhdmFscnk= 78557
+Lk1hdGNoZXJz 78558
+IGluZ3Jlc3M= 78559
+IEplaG92YWg= 78560
+CXNlcQ== 78561
+X3N0cmVldA== 78562
+IFNvZmlh 78563
+IHNjcm9sbHM= 78564
+dmluY2Vz 78565
+ZWxlY3Ryb25pY3M= 78566
+XHBhcmFt 78567
+IHplbmQ= 78568
+IHNraW0= 78569
+LnBpeA== 78570
+ZW5r 78571
+X2FyZWFz 78572
+IEJvaXNl 78573
+LXZhbGlkYXRvcg== 78574
+IHVuZWFydGg= 78575
+b2ZpbG0= 78576
+IEJDRQ== 78577
+b3Zza3k= 78578
+IExldmVy 78579
+IHBvbGljZW1hbg== 78580
+IG1pZXM= 78581
+IFBvcnRyYWl0 78582
+IHBvdGlvbnM= 78583
+X21vdA== 78584
+bWFzc2FnZQ== 78585
+0LXQvdGL 78586
+IGN1ZA== 78587
+IG1hbnVzY3JpcHRz 78588
+Y29udGludW91cw== 78589
+LnRj 78590
+w7x6 78591
+IEZyZWV6ZQ== 78592
+Xzoq 78593
+Lmht 78594
+IENTUkY= 78595
+IE3DpGRjaGVu 78596
+LXBlZXI= 78597
+IHB1dFN0ckxu 78598
+IGltc2hvdw== 78599
+IEB7JA== 78600
+IEJhdWVy 78601
+KHRvbHVh 78602
+IHdyb3VnaHQ= 78603
+IEdpYW4= 78604
+IMO2bg== 78605
+ZnVuZw== 78606
+QnV0dG9uVGl0bGVz 78607
+fSkiLA== 78608
+IE11cmRvY2g= 78609
+S1c= 78610
+IFJlcG9ydGVk 78611
+c2ll 78612
+IG1laWxsZXVycw== 78613
+IEthZXBlcm5pY2s= 78614
+IGRzcA== 78615
+IEV2ZXJ5ZGF5 78616
+cmVuZHM= 78617
+IENvbmNl 78618
+IGluY29udHI= 78619
+LnJlbW92ZUF0dHJpYnV0ZQ== 78620
+44G+44GX44Gf 78621
+IHJldw== 78622
+IFByZXNlbmNl 78623
+L2dpbg== 78624
+LkNsYWltcw== 78625
+CXNs 78626
+RHJhZ2dpbmc= 78627
+IHNwcmVl 78628
+IGFjdHVhbGl6YXI= 78629
+IG5vc3M= 78630
+IGxpZmVzdHlsZXM= 78631
+O2M= 78632
+VURHRQ== 78633
+SW5NaWxsaXM= 78634
+IGl0aw== 78635
+YWJieQ== 78636
+KHBh 78637
+aXNzZW50 78638
+IFByZXNpZGVudHM= 78639
+IEhleGF0cmlnZXNpbWFs 78640
+ZWNpZGVk 78641
+KHRleA== 78642
+IGNyb3duZWQ= 78643
+UGhpbGlw 78644
+IFNhcms= 78645
+IEFkZGl0aW9u 78646
+IENvbGJlcnQ= 78647
+IEdMRVM= 78648
+IFFMaW5lRWRpdA== 78649
+IGRyYWlucw== 78650
+IHNvcnRPcmRlcg== 78651
+ZXNjb3J0 78652
+VGVk 78653
+IG1hbmlmZXN0ZWQ= 78654
+LnZhcmlhbnQ= 78655
+IFJFRkVSRU5DRVM= 78656
+KGdj 78657
+L3sk 78658
+b2N5dGU= 78659
+IG9ybmFtZW50 78660
+IGJvb2tzdG9yZQ== 78661
+SG9s 78662
+IFZhbGw= 78663
+Lycp 78664
+YWNhaw== 78665
+IE5hdkJhcg== 78666
+IG55ZQ== 78667
+X0RlYw== 78668
+b2x2aW1lbnRv 78669
+TVJJ 78670
+IGhvb3A= 78671
+ICAgCiAgICAK 78672
+IFBvc3Rpbmc= 78673
+IG91dGxpbmluZw== 78674
+YWdhc2Nhcg== 78675
+LmJyZWFrcG9pbnRz 78676
+Y2F0aWQ= 78677
+X3RyaWdnZXJlZA== 78678
+IHJ1bm5hYmxl 78679
+L3RydW5r 78680
+LWNoYWly 78681
+IGJhaXNlcg== 78682
+ZmFjaWxpdHk= 78683
+IHBvbGxlbg== 78684
+6Z+z 78685
+IFtbIg== 78686
+IENHU2l6ZU1ha2U= 78687
+IGFzc2FpbA== 78688
+IEF0aGVuYQ== 78689
+IEFkZGljdGlvbg== 78690
+aWxhbmQ= 78691
+O2Jy 78692
+LktleWJvYXJk 78693
+X2Zt 78694
+QWNl 78695
+IFJFUQ== 78696
+IE5ld2VzdA== 78697
+Oy4= 78698
+IE1BREU= 78699
+c2V0VGltZW91dA== 78700
+U2VydmxldENvbnRleHQ= 78701
+CQkJCQkgICAgICAg 78702
+IEx1cA== 78703
+LXJldmlld2Vk 78704
+IEFuYWx5emVy 78705
+Lk5hTg== 78706
+dXR1cmE= 78707
+R2VvbQ== 78708
+eW1lcw== 78709
+X3Npbg== 78710
+IHRydXN0ZWVz 78711
+Ly89PT0= 78712
+IGFkbWl0dGVkbHk= 78713
+IGFrbw== 78714
+IFVFRkE= 78715
+X2hlcm8= 78716
+R2l0aHVi 78717
+X2VzdGltYXRl 78718
+IGNvcnJvYm9y 78719
+ZW50aWZ1bA== 78720
+IFN0ZWVyaW5n 78721
+IE1pdGFy 78722
+IFBpcGVz 78723
+IGvDpQ== 78724
+X3NlYXNvbg== 78725
+IEJDSFA= 78726
+L3NvZnR3YXJl 78727
+bmV0dGU= 78728
+KiIs 78729
+dW5kcmE= 78730
+IGdldFJlcXVlc3Q= 78731
+LkJ1ZmZlcmVk 78732
+ZmVybg== 78733
+TWFyaW8= 78734
+IGRpc3BlcnM= 78735
+X2NhdGVnb3JpYQ== 78736
+IGVuZGxlc3NseQ== 78737
+Z3VhcmRz 78738
+CWF0b21pYw== 78739
+c2NvcGVk 78740
+IHVuZG9uZQ== 78741
+U0hPUA== 78742
+IFRvcmNo 78743
+IEhhc3Rpbmdz 78744
+IEZJTEVT 78745
+X1NhdmU= 78746
+V2l0aE1hbnk= 78747
+V2lz 78748
+IGludGVuc2lmaWVk 78749
+LmFyZ3VtZW50 78750
+IEFwaVNlcnZpY2U= 78751
+IEpTSW1wb3J0 78752
+ZWtp 78753
+SW5zdXJhbmNl 78754
+c3R5 78755
+LmRzbA== 78756
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQo= 78757
+bHRyZQ== 78758
+U0VH 78759
+RFJBTQ== 78760
+LWJsb2NraW5n 78761
+0L3QtQ== 78762
+cGlyaW5n 78763
+IFBSRVM= 78764
+IEZhY2g= 78765
+IHNhcmM= 78766
+IFNNRQ== 78767
+IEVsZW0= 78768
+IENhbGlmb3Ju 78769
+VW5zYWZl 78770
+IENvbXBvc2Vy 78771
+KGRlcA== 78772
+IEF0dGVuZA== 78773
+ICopKCg= 78774
+IHRlYXNlZA== 78775
+IEFUSQ== 78776
+KHBt 78777
+ICIoXDw= 78778
+J10r 78779
+IHNlY3Rhcmlhbg== 78780
+IFBoYXJtYQ== 78781
+RUk= 78782
+CVRva2VuTmFtZUlkZW50aWZpZXI= 78783
+w6d1 78784
+IGF1Z21lbnRhdGlvbg== 78785
+IHNhamE= 78786
+IGNvbG9yZQ== 78787
+ZGVhZGxpbmU= 78788
+LklURU0= 78789
+IFJpeQ== 78790
+bWFhbA== 78791
+CWNsaWNr 78792
+UGVybWFuZW50 78793
+SG91c3Rvbg== 78794
+UmVzcG9uc2l2ZQ== 78795
+IEVyZ2Vibg== 78796
+ICIlIg== 78797
+LnRvT2JqZWN0 78798
+CXBpZA== 78799
+LlN1Ykl0ZW1z 78800
+IFsr 78801
+IGZ1bmd1cw== 78802
+IGJyb2NodXJl 78803
+IEFwcHJveGltYXRlbHk= 78804
+IG1paw== 78805
+dmVsb3Blcg== 78806
+IHBhZ2FtZW50bw== 78807
+5Yqo55Sf5oiQ 78808
+IGN5dA== 78809
+IFRlbXBs 78810
+ZW5pYWJsZQ== 78811
+IENvbmFu 78812
+IHNldGJhY2s= 78813
+b2JsaW5z 78814
+IE5UTg== 78815
+b3NzYWw= 78816
+VkVSQk9TRQ== 78817
+LmJpbw== 78818
+IMWe 78819
+4buf 78820
+IEdyaXA= 78821
+PCo= 78822
+VFJJRVM= 78823
+LmNob29zZQ== 78824
+UGhvZW5peA== 78825
+IHByb3ZpbmNpYQ== 78826
+TUZMT0FU 78827
+Q2Fycw== 78828
+IHJldHJvc3BlY3RpdmU= 78829
+IGFnb255 78830
+IGxsZW4= 78831
+IGJ1bXBlZA== 78832
+eWxhdGlvbg== 78833
+IHdhcnRv 78834
+IHRvZGRsZXJz 78835
+bGF2 78836
+KHBhdGllbnQ= 78837
+ICgpLT4= 78838
+Y2xj 78839
+IG9uQWN0aXZpdHlSZXN1bHQ= 78840
+IGVtdWxhdGlvbg== 78841
+IGJ1bGxk 78842
+X0FVVEhPUg== 78843
+Pk8= 78844
+L3F1 78845
+IMK2 78846
+CWhy 78847
+c3RkQ2xhc3M= 78848
+IHNwYWNlcg== 78849
+VHJhbnNsYXRlZg== 78850
+LmFkag== 78851
+Oml0ZW0= 78852
+IGV4aGF1c3Rpbmc= 78853
+cGx4 78854
+IHJldml0YWw= 78855
+xZtuaWU= 78856
+IGNhbGlmb3JuaWE= 78857
+c2V0U3RhdGU= 78858
+L3RhYg== 78859
+aW5kc2lnaHQ= 78860
+X0xldmVs 78861
+aW1pbGFy 78862
+Lm5hdmlnYXRvcg== 78863
+IHRlbXBlcmFtZW50 78864
+IGRpZsOtYw== 78865
+IGluZXhwZXJpZW5jZWQ= 78866
+IGltcHJpbnQ= 78867
+IFJlc2lzdA== 78868
+X0ZPTExPVw== 78869
+IFJldHJ5 78870
+IGVuZ2FnZW1lbnRz 78871
+Q2FuQmVDb252ZXJ0ZWQ= 78872
+IHNpbmdsZWQ= 78873
+Lmljb25z 78874
+IGNvbmRvbXM= 78875
+IEZlYXRoZXI= 78876
+bGVybmVu 78877
+KWI= 78878
+IE5wZ3NxbA== 78879
+IENvbnNvbGlk 78880
+cGVrdA== 78881
+56uv 78882
+c3RyaW5nVmFsdWU= 78883
+R2Ft 78884
+IFNpbmFp 78885
+IE9iamVjdFR5cGU= 78886
+X2lucA== 78887
+IHBhcnRp 78888
+IFdhdGVycHJvb2Y= 78889
+IGNvbGxpZGVk 78890
+IGFpcnM= 78891
+L3dvcmxk 78892
+L1NlYXJjaA== 78893
+X3N5bnRheA== 78894
+xZ9p 78895
+X2Fubm90YXRpb25z 78896
+IFRhY28= 78897
+TEFU 78898
+IE9wY29kZQ== 78899
+44CC4oCdCgo= 78900
+IGxlYXNo 78901
+IEFsaWNpYQ== 78902
+77yM6buY6K6k 78903
+IFRTQQ== 78904
+IGhvdHRlcg== 78905
+X0hhbmRsZVR5cGVEZWY= 78906
+Z2luYXM= 78907
+IGluZGlmZmVyZW50 78908
+Q3VzdG9tTGFiZWw= 78909
+kZA= 78910
+b2R5bmFtaWNz 78911
+T25VaVRocmVhZA== 78912
+IENhcmE= 78913
+LmRldmljZXM= 78914
+IEZvcmVpZ25LZXk= 78915
+PicpOw0K 78916
+LmJ1dA== 78917
+LnRpZg== 78918
+IOaWsA== 78919
+IE9rSHR0cENsaWVudA== 78920
+KFRleHR1cmU= 78921
+LlNPQ0s= 78922
+KGluc3Ry 78923
+bWlzdA== 78924
+VW5uYW1lZA== 78925
+U3I= 78926
+Km51bQ== 78927
+KE5VTQ== 78928
+KioqKioKCg== 78929
+L2hlbHA= 78930
+YmVlbGQ= 78931
+LmFkanVzdA== 78932
+X1Bhcm1z 78933
+X0FOR0xF 78934
+VFJFRQ== 78935
+IGVzdHVkaW8= 78936
+d29ya3NoZWV0 78937
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tCg== 78938
+QWR2aWNl 78939
+w7bDn2U= 78940
+bkVudGVy 78941
+YcSH 78942
+IGFnZWluZw== 78943
+IEt1cmRpc3Rhbg== 78944
+X1JUQw== 78945
+YmFua3M= 78946
+LlVS 78947
+IGluY2FybmF0aW9u 78948
+IGdsYW1vdXI= 78949
+IOOCuQ== 78950
+IGltcGVyaWFsaXNt 78951
+7J6F64uI64uk 78952
+IHNpZGVsaW5l 78953
+LkFycmF5QWRhcHRlcg== 78954
+IyMjIyMjCg== 78955
+IFN5cmlhbnM= 78956
+IEF0dGVuZGFuY2U= 78957
+LWVzcXVl 78958
+IGdyZW5hZGVz 78959
+X3Fvcw== 78960
+T1ND 78961
+X2Rvb3I= 78962
+LkNhcA== 78963
+REFM 78964
+IGFtYnVzaA== 78965
+CWVz 78966
+VG9Kc29u 78967
+TWFudWZhY3Q= 78968
+RW1lcmdlbmN5 78969
+IFFGaWxl 78970
+IOWV 78971
+CUxQ 78972
+5pCc57Si 78973
+IEdhcmxhbmQ= 78974
+LmNvbm5lY3Rpb25z 78975
+LlJlYWRGaWxl 78976
+IEh3eQ== 78977
+4oCUZXZlbg== 78978
+eERF 78979
+IG5vdXZlbGxlcw== 78980
+IEh1c3M= 78981
+RGVwb3NpdA== 78982
+X2ZvcmVpZ24= 78983
+YWJhag== 78984
+IFBveg== 78985
+ZGJ1cw== 78986
+IGlvZA== 78987
+w5cKCg== 78988
+IENoZWVycw== 78989
+SmVzc2ljYQ== 78990
+IHNhaXNvbg== 78991
+IFB0eQ== 78992
+Ij48IS0t 78993
+aW5vYQ== 78994
+ZXhjbHVkaW5n 78995
+IGJpdHRlcm5lc3M= 78996
+dWVsaW5n 78997
+UHJvdGVjdGlvbg== 78998
+IEJlcmdlbg== 78999
+CQkJIAo= 79000
+QkVM 79001
+IFRvYmlhcw== 79002
+IHVwZA== 79003
+67KE 79004
+IGZvbGlhZ2U= 79005
+X1BVUg== 79006
+IEFkdm9jYXRl 79007
+IG9uUmVxdWVzdA== 79008
+LnBhcnRpdGlvbg== 79009
+IERldmVsb3BlZA== 79010
+IGNyaWI= 79011
+0YHQutC4 79012
+dm91Y2hlcg== 79013
+IEludGVyc2VjdGlvbg== 79014
+IG5pZWNl 79015
+IGxr 79016
+IENhdWN1cw== 79017
+KFsNCg== 79018
+IERldGVjdG9y 79019
+L2xn 79020
+IEhlZGdl 79021
+IHNsdWdn 79022
+YW5nc3Ryb20= 79023
+IENvbnRyb2xsZXJCYXNl 79024
+CXl5 79025
+LnBw 79026
+IEtsaW5n 79027
+IExUUw== 79028
+4oaT 79029
+YXJyYQ== 79030
+Z2V0SlNPTg== 79031
+X3dlYnNpdGU= 79032
+IGlkaW90cw== 79033
+IE1lZ2hhbg== 79034
+QnV0dG9uTW9kdWxl 79035
+ICU+ 79036
+IHByb2plY3RpbGVz 79037
+c3dvcmQ= 79038
+ICAgIAkJCQkJ 79039
+IGFzc2Vz 79040
+IFN1Y2hl 79041
+IGtlZA== 79042
+csOhZg== 79043
+IHNhcsOg 79044
+TEVuY29kZXI= 79045
+UkFORA== 79046
+IFNvbWVob3c= 79047
+IFNhbGE= 79048
+IG11bHRpbQ== 79049
+IG51bVJvd3M= 79050
+IFJvY2tpZXM= 79051
+IHhk 79052
+IGRpc3Byb3BvcnRpb25hdGU= 79053
+CVJUTEk= 79054
+CVVSTA== 79055
+YWdsaQ== 79056
+IFN1YkxPYmplY3Q= 79057
+IEdyYXZlcw== 79058
+X3JlZ3VsYXJpemVy 79059
+X2NoYXJhY3RlcnM= 79060
+LmFuYWx5dGljcw== 79061
+Lm1vZHM= 79062
+IGltcHJvdmlz 79063
+IEJsb2NrUG9z 79064
+X2luc3RhbGxlZA== 79065
+X0NPTlRJTlVF 79066
+L2Rvd24= 79067
+U09D 79068
+LmFwaVVybA== 79069
+LlVzZXJTZXJ2aWNl 79070
+VHJlZXM= 79071
+5oqV 79072
+X292ZXJmbG93 79073
+YXVzYWw= 79074
+Ym94ZWQ= 79075
+Jgo= 79076
+IEphY3F1 79077
+X3Vzcg== 79078
+SU5UUg== 79079
+IHNpZ25hZ2U= 79080
+IGNvY2g= 79081
+Tm9ybWFsaXplZA== 79082
+CgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgo= 79083
+IHN1c3RhaW5pbmc= 79084
+IFNjcmFw 79085
+cHJhYWs= 79086
+LWF2YXRhcg== 79087
+LndlYnNpdGU= 79088
+KGd1aQ== 79089
+PXJlc3BvbnNl 79090
+KG9wZXJhdG9y 79091
+IGVmZm9ydGxlc3M= 79092
+IEFjdGlvbkJhcg== 79093
+RkZF 79094
+56uL 79095
+CVJlZ2lzdGVy 79096
+QVJTRQ== 79097
+KW4= 79098
+IE1PU1Q= 79099
+X1NQUg== 79100
+X0NISVA= 79101
+YXNk 79102
+IHRvcExlZnQ= 79103
+IFR4dA== 79104
+0LDQttC0 79105
+LlZvbHVtZQ== 79106
+IGlubGV0 79107
+IGZyYWN0dXJlZA== 79108
+IExvbmdpdHVkZQ== 79109
+IERyYW0= 79110
+LkNvbm5lY3Rpb25TdHJpbmdz 79111
+YWJlZQ== 79112
+cGVyYXRl 79113
+am5p 79114
+YHQ= 79115
+ZmluZ2Vy 79116
+IEplc3NpZQ== 79117
+LGxs 79118
+IFJ1ZHk= 79119
+IGdlbmVyb3VzbHk= 79120
+X0NPTlZFUlQ= 79121
+IGVpdXNtb2Q= 79122
+IERhaQ== 79123
+aW1hZ2lu 79124
+IEdPYmplY3Q= 79125
+IMSRw6M= 79126
+aWRpb3Vz 79127
+cmlkZ2Vk 79128
+IHNvcHI= 79129
+0LvQsNC0 79130
+IHN0aXRjaGluZw== 79131
+IGtyYg== 79132
+CiAgICAgICAgCiAgICAgICAgCg== 79133
+IGxhdmlzaA== 79134
+IENpdg== 79135
+U3RhcnRFbGVtZW50 79136
+IExvbA== 79137
+CXV0aWw= 79138
+J11dLg== 79139
+IE1hbGF5 79140
+IC4NCg== 79141
+548= 79142
+X0ludm9rZQ== 79143
+aXZpc3Q= 79144
+RGVwZW5kaW5n 79145
+KSI7DQo= 79146
+IHRvZnU= 79147
+IE1DUA== 79148
+IHN0b2NraW5n 79149
+IGNhdGhlZHJhbA== 79150
+IHF1YWRyYXRpYw== 79151
+YWxlemE= 79152
+Lm1vdmVUb0ZpcnN0 79153
+Q29sb3JCcnVzaA== 79154
+IEVyZWN0 79155
+IFJDUw== 79156
+OmJlZm9yZQ== 79157
+PW5vZGU= 79158
+IHByb2Jsw6htZQ== 79159
+X3Jobw== 79160
+IHN2ZW5zaw== 79161
+Um95 79162
+YmFzZVBhdGg= 79163
+IGtvbmQ= 79164
+INC10YHRgtGM 79165
+Z2V0U2luZ2xldG9u 79166
+IERTTQ== 79167
+SWFu 79168
+IGh1bnRlZA== 79169
+IFRlcnJhY2U= 79170
+IGNoaWxkY2FyZQ== 79171
+IGNvZWZmcw== 79172
+IGdyYWRlZA== 79173
+IEx1Y2lh 79174
+IGpzb25PYmo= 79175
+YWJsZU9iamVjdA== 79176
+VmF1bHQ= 79177
+w61zdGljYQ== 79178
+X3BhZ28= 79179
+X1BG 79180
+YW5kcmU= 79181
+IEFuYXRvbXk= 79182
+LkpDb21ib0JveA== 79183
+b3VyZQ== 79184
+IGdlbm90eXBl 79185
+YmVuY2htYXJr 79186
+IGJhaWs= 79187
+IFF1w6liZWM= 79188
+KCkpDQoNCg== 79189
+IGt1bm5l 79190
+IFBvc3NpYmx5 79191
+IEJlaXNwaWVs 79192
+IGNvbmRvbGVuY2Vz 79193
+PXF1ZXJ5 79194
+IHbDtQ== 79195
+IG51ZXZhcw== 79196
+IEFwb2NhbHlwc2U= 79197
+dmVjdGlvbg== 79198
+CXNwcml0ZQ== 79199
+bGV2YXRvcg== 79200
+LiJdCg== 79201
+Z2V0TmV4dA== 79202
+KFJlZ2lzdGVy 79203
+IHVuc3Vi 79204
+dHJlZXZpZXc= 79205
+Tm9kZUlk 79206
+IOyK 79207
+JikK 79208
+Zmx0 79209
+IGhvdHNwb3Q= 79210
+IGdhc3Ryb2ludGVzdGluYWw= 79211
+ZmlnY2FwdGlvbg== 79212
+b3dlcmVk 79213
+IENzcw== 79214
+X3Jvcw== 79215
+X3NjYWxpbmc= 79216
+IGVkaXRhcg== 79217
+J11dKTsK 79218
+Lm5lZw== 79219
+IGZ1dHVyaXN0aWM= 79220
+IHN0YXRh 79221
+dWN0b3I= 79222
+VUxBVEU= 79223
+IHfFgg== 79224
+LWNoYXJhY3Rlcg== 79225
+ICAKCgo= 79226
+IEJlYXU= 79227
+IHBlcm1hbGluaw== 79228
+Qnl0ZUJ1ZmZlcg== 79229
+IGRpY3RhdGVz 79230
+IE1MQQ== 79231
+X0xvZ2lu 79232
+Q29uZGl0aW9uYWw= 79233
+U1lN 79234
+QXJyYW5nZQ== 79235
+IFN0b2Nrcw== 79236
+IG1lYXNsZXM= 79237
+4KSk 79238
+RW5jcnlwdGlvbg== 79239
+IEVudGlyZQ== 79240
+IG1pbk9jY3Vycw== 79241
+IGh1Z3M= 79242
+L3dpbmRvdw== 79243
+CXByb3A= 79244
+PSQoKA== 79245
+IFVDUw== 79246
+IEZpcg== 79247
+LkNsb2Nr 79248
+LWRlc2t0b3A= 79249
+IG1hbGZvcm1lZA== 79250
+IEFiZXJkZWVu 79251
+IMOF 79252
+IFJvYWRz 79253
+IEJlaGF2aW91cg== 79254
+KCkn 79255
+5bGe5oCn 79256
+LkNvbXBhcmF0b3I= 79257
+X21v 79258
+X0lPUw== 79259
+IE9yaW9sZXM= 79260
+Lkxvb2t1cA== 79261
+IGZzZWVr 79262
+X0lC 79263
+L3N0YXI= 79264
+Kzwv 79265
+X0Rlc3Ryb3k= 79266
+LXRyYQ== 79267
+KCcuJyk= 79268
+IEZvckNhbkJlQ29udmVydGVk 79269
+IEZvckNhbkJlQ29udmVydGVkVG9G 79270
+IEZvckNhbkJlQ29udmVydGVkVG9Gb3JlYWNo 79271
+IEFhZA== 79272
+IGFpcnN0cmlrZXM= 79273
+aXNPaw== 79274
+IGZlZGVyYXRpb24= 79275
+IExhYnJhZG9y 79276
+X2xhdW5jaGVy 79277
+YWxvZ3k= 79278
+Pj4oKTsKCg== 79279
+IEp1Yg== 79280
+dXRy 79281
+aXN0aW5ndWlzaGVk 79282
+YWJhbnQ= 79283
+UmVnaW9ucw== 79284
+L2hlbHBlcg== 79285
+X2xpc3Rlbg== 79286
+CVRvYXN0 79287
+IEZpbGVNYW5hZ2Vy 79288
+aXRvcmlz 79289
+IGVsZWN0cm9kZXM= 79290
+R1JBREU= 79291
+IGJlZ2dlZA== 79292
+IFBsYXRlcw== 79293
+YWZvbmU= 79294
+ISEhCg== 79295
+IGVieA== 79296
+IGRlZmF1bHRQcm9wcw== 79297
+IGNvbXBhcmVUbw== 79298
+IFNDQw== 79299
+LmV4dGVudA== 79300
+YXV0b3M= 79301
+IOyW 79302
+IFRvbGtpZW4= 79303
+OjoqOwoK 79304
+Kics 79305
+LmRvY3VtZW50cw== 79306
+c2luZw== 79307
+PUJpdENvbnZlcnRlcg== 79308
+IEtyaXNobmE= 79309
+IHBsYWlzaXI= 79310
+IGJ1Z2d5 79311
+IHJlZ3VsYXRlcw== 79312
+IGZyaWRheQ== 79313
+IGNvbXBsZXRlbmVzcw== 79314
+IGF1ZGlibGU= 79315
+IFJlY29nbml0aW9uRXhjZXB0aW9u 79316
+IHNoZWRkaW5n 79317
+W10pewo= 79318
+KGJhbGw= 79319
+IENoYXRDb2xvcg== 79320
+KENvZGU= 79321
+KCksCgo= 79322
+IHRlcnRpYXJ5 79323
+IFNJREU= 79324
+KEpTT05PYmplY3Q= 79325
+pOaWrQ== 79326
+UmVtYXJrcw== 79327
+IGxpc3RCb3g= 79328
+LmltYWdlVXJs 79329
+IGRlbGF5aW5n 79330
+IHNvY2lvZWNvbm9taWM= 79331
+Lmxw 79332
+PE15 79333
+Lm9uU3RhcnQ= 79334
+IFNjb3I= 79335
+Ynl0ZXJpYW4= 79336
+LXJvY2s= 79337
+X21ldGVy 79338
+IHJlcG1hdA== 79339
+IHByZWd1bnRh 79340
+IE1FVEE= 79341
+KGd0 79342
+IEZSSUVORA== 79343
+IHNvcnRl 79344
+IGhlcA== 79345
+b25vbWllcw== 79346
+IGF1dG9tw6F0 79347
+IEZvcm1hdHM= 79348
+c3RhdGVQcm92aWRlcg== 79349
+LWZsb29y 79350
+X01VWA== 79351
+KENvbnRlbnQ= 79352
+IElOU1RBTEw= 79353
+IFRpdGFuaXVt 79354
+cnVj 79355
+LkRhdGFzZXQ= 79356
+YXNjbw== 79357
+Lk1BVENI 79358
+IGZlc3Rpdml0aWVz 79359
+TVNO 79360
+Lm90 79361
+IEdldExhc3RFcnJvcg== 79362
+aWVucw== 79363
+IF9fX19fX19fX19fX19fX19fXwoK 79364
+X0dG 79365
+X3BsYXRl 79366
+IEZvcm1hbA== 79367
+LWxldHRlcg== 79368
+S2F0ZQ== 79369
+YXBpYQ== 79370
+ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKi8K 79371
+L2dlbmVyYXRlZA== 79372
+IERpbmc= 79373
+IEZyaWVkcmljaA== 79374
+ICcpJw== 79375
+VUJMSVNI 79376
+IEFiaWxpdGllcw== 79377
+IHVubG9ja2luZw== 79378
+Lnl5 79379
+IEludGVycg== 79380
+bm90aHJvdw== 79381
+aXBvcA== 79382
+IENPUlBPUg== 79383
+W2FycmF5 79384
+PFdlYkVsZW1lbnQ= 79385
+X1NJRA== 79386
+LnF1YWw= 79387
+RGlhZ25vc3RpYw== 79388
+OiIiLAo= 79389
+KG1vbWVudA== 79390
+anVyZWQ= 79391
+IHRlcnJlc3RyaWFs 79392
+ZXJ1bGU= 79393
+ICYpOwo= 79394
+IGJ1cmVhdWNyYXRpYw== 79395
+b3BwaW5z 79396
+IGphcG9u 79397
+bGVvbg== 79398
+X3JlbmFtZQ== 79399
+X0RFU1RST1k= 79400
+LkVuZHNXaXRo 79401
+IGVydXB0aW9u 79402
+KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKi8K 79403
+UEVU 79404
+X3JlbG9hZA== 79405
+IHN1cHBsZW1lbnRhcnk= 79406
+IHppZW4= 79407
+Q0xMb2NhdGlvbg== 79408
+IGtsZWlu 79409
+X2Vm 79410
+Ont9 79411
+IGNvbWVudGFyaW9z 79412
+KHZhbGlkYXRpb24= 79413
+Lnh0ZXh0 79414
+X0lNQUdFUw== 79415
+LnNldElucHV0 79416
+IERlY29tcGlsZWQ= 79417
+X1RCTA== 79418
+Y29tcGxleFR5cGU= 79419
+X2ZlYXR1cmVk 79420
+ID8+PD8= 79421
+LnZvdGU= 79422
+IEZyaWRheXM= 79423
+LmNvbnN1bWU= 79424
+Lk1FRElB 79425
+IHN5bmVyZw== 79426
+jpjsnbTsp4A= 79427
+X0hFQURFUlM= 79428
+eEFD 79429
+X252 79430
+zq0= 79431
+IFNpbW9uZQ== 79432
+Q2VycmFy 79433
+YWRkb2Nr 79434
+LnNlcmlhbGl6ZXI= 79435
+IENsYXNzaWZpZWQ= 79436
+Lkl0ZW1zU291cmNl 79437
+IHByZWNvbmRpdGlvbg== 79438
+44Gd44GX44Gm 79439
+RElTVA== 79440
+SW1hZ2VVcmw= 79441
+L3JhbmRvbQ== 79442
+IGVyw7N0 79443
+W3Jvb3Q= 79444
+QUxMRVJZ 79445
+Y2o= 79446
+eEFE 79447
+IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIwo= 79448
+IGl0YWxpYW5p 79449
+fCM= 79450
+IHJlZ2VuZXJhdGU= 79451
+IHN0cnI= 79452
+KHx8 79453
+IEVtZXJzb24= 79454
+IFBJRQ== 79455
+Y2xpZmZl 79456
+CWFu 79457
+PlBhc3N3b3Jk 79458
+dG9EYXRl 79459
+Q2lwaGVy 79460
+IGNvbnZveQ== 79461
+IFhDVEFzc2VydFRydWU= 79462
+L19f 79463
+LWZvY3Vz 79464
+IFJoaW5v 79465
+IGdvbw== 79466
+IGJvdG9u 79467
+Lk5vU3VjaA== 79468
+IFJlZHVjZWQ= 79469
+TUlTUw== 79470
+IFdpbmNoZXN0ZXI= 79471
+dXJsZW5jb2Rl 79472
+IG11ZGR5 79473
+aXlh 79474
+IE1icHM= 79475
+IHN0YWw= 79476
+b2RhZm9uZQ== 79477
+5Lus 79478
+IHBo4bqpbQ== 79479
+ICIvIjsK 79480
+IEFtbW8= 79481
+TmV3UHJvcA== 79482
+ID0KCg== 79483
+INCf0YA= 79484
+IHBheg== 79485
+IGxpYmVybw== 79486
+CVJlc291cmNl 79487
+bmVpZ2hib3Jz 79488
+LHJlc3BvbnNl 79489
+X2F0dGVtcHRz 79490
+IG5r 79491
+IG1pbGl0aWFz 79492
+X1BBWUxPQUQ= 79493
+LkJ5dGVTdHJpbmc= 79494
+INGB0L7QtNC10YDQtg== 79495
+YXJ0b24= 79496
+PkhlbGxv 79497
+bGlnaHRseQ== 79498
+b3dlbGw= 79499
+IGd1YXJkaW5n 79500
+IFRPSw== 79501
+IHdoZXJlYWJvdXRz 79502
+X2R3 79503
+IFJvdWxldHRl 79504
+IGd5cg== 79505
+IEZlZG9yYQ== 79506
+LkJ1dHRvbnM= 79507
+IGV4Y2xhaW1lZA== 79508
+IFNvbW1lcg== 79509
+QXV0aEd1YXJk 79510
+LXJhdGluZw== 79511
+TWV0aG9kQmVhdA== 79512
+LnBvc2l0aW9ucw== 79513
+TWVkaWFu 79514
+LuKApgoK 79515
+IGdsYWM= 79516
+IHVuZGVybWluZWQ= 79517
+JSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJQ== 79518
+X3RoaXJk 79519
+LmtlZXA= 79520
+IGhheWE= 79521
+IHRvSlNPTg== 79522
+IExhdXJpZQ== 79523
+IAkgICA= 79524
+IEFjY3Vt 79525
+IHBydW5l 79526
+dXJ2ZWQ= 79527
+IE5TRg== 79528
+IEdyYXBl 79529
+RkxJQ1Q= 79530
+6LI= 79531
+IHByZWRpcw== 79532
+X3B0cnM= 79533
+IG11bHRpY2FzdA== 79534
+KEdyb3Vw 79535
+IGhlacOf 79536
+IGZlZGVyYWxseQ== 79537
+X1BBVVNF 79538
+IG1hbGF5c2lh 79539
+IFJlY2FsbA== 79540
+IHJvZHo= 79541
+IFNlbnRlbmNl 79542
+aW50ZWw= 79543
+X2RydmRhdGE= 79544
+LXNjZW5lcw== 79545
+PHk= 79546
+IGZvb2xlZA== 79547
+IExvdWQ= 79548
+IGFudGl2aXJ1cw== 79549
+LnBsaXN0 79550
+IHZlcndlbmRlbg== 79551
+IFdvbGZl 79552
+KWl0ZW0= 79553
+IHR3aXN0aW5n 79554
+IGVzcGFu 79555
+YXRlcm5v 79556
+IEFjY29yZA== 79557
+KCldLA== 79558
+UkVNT1ZF 79559
+ZGVoeQ== 79560
+X1ByZQ== 79561
+IG1pc2Nhcg== 79562
+dmxh 79563
+IHNlbWJs 79564
+IHRldGhlcg== 79565
+IEJpag== 79566
+LycKCg== 79567
+IENvcGllcw== 79568
+LXBhdHRlcm4= 79569
+Lm9uVmlldw== 79570
+LXRha2luZw== 79571
+X3NpbXBz 79572
+44GX44GL44GX 79573
+IERBQ0E= 79574
+b3JuaW5n 79575
+IFBlc3NvYQ== 79576
+b3JueQ== 79577
+X3Bhcw== 79578
+IGVpZ2h0eQ== 79579
+VGFj 79580
+X1NUT0NL 79581
+LmxvY2F0aW9ucw== 79582
+Iil9LAo= 79583
+IHTDoQ== 79584
+LWZpZWxkcw== 79585
+b2thbmU= 79586
+L2t1YmVybmV0ZXM= 79587
+IGNoaWNh 79588
+IGFydMOtY3Vsbw== 79589
+7II= 79590
+Q1JFQVNF 79591
+QVNB 79592
+IExvbmQ= 79593
+IGV4ZW1wbG8= 79594
+QWxsb3dz 79595
+aHRtbHNwZWNpYWxjaGFycw== 79596
+KHZpcw== 79597
+IGpy 79598
+54Gr 79599
+IEVDTQ== 79600
+IGVtYmFy 79601
+X0FEQVBURVI= 79602
+IGRpbHV0ZWQ= 79603
+X29mZmljZQ== 79604
+IHNraW5jYXJl 79605
+QUdJTkc= 79606
+IMO+ 79607
+IFNNQVJU 79608
+L1RhYmxl 79609
+IGJhc2Fs 79610
+Q29uY3VycmVuY3k= 79611
+IFZveA== 79612
+IFVJQ29sbGVjdGlvblZpZXdDZWxs 79613
+IHdvbA== 79614
+IFNPVVRI 79615
+IGZyb21EYXRl 79616
+IGNvcmRz 79617
+RU1T 79618
+LndlaXhpbg== 79619
+J2VsbGU= 79620
+IOWx 79621
+IGdvYWx0 79622
+dWli 79623
+IE5lcHR1bmU= 79624
+KG9yZA== 79625
+xLFuxLFu 79626
+IG1pY3JvYmVz 79627
+V2VhcG9ucw== 79628
+LURlYw== 79629
+IFJvb25leQ== 79630
+IFN3YWdnZXI= 79631
+66qF 79632
+X2xh 79633
+IGdlbmVyYWRv 79634
+IEhpcg== 79635
+Q29taWM= 79636
+IGNhcnZl 79637
+X3Jx 79638
+aWN0ZXI= 79639
+IGNhcnRlbA== 79640
+YW5jaWFz 79641
+IFBhbmFzb25pYw== 79642
+IHJvYWRzaWRl 79643
+IGZyZXNod2F0ZXI= 79644
+IGRiYw== 79645
+X3RleHRz 79646
+X3NrdQ== 79647
+IFN1bW1lcnM= 79648
+IFBpY3R1cmVCb3g= 79649
+Lmdyb3VwQ29udHJvbA== 79650
+VkFSQ0hBUg== 79651
+UmVMVQ== 79652
+IHNhYm90YWdl 79653
+DQogICAgICAgICAgICANCg== 79654
+IHNjcm9sbGJhcg== 79655
+IGJhdHRlcmVk 79656
+Y2lw 79657
+LXBpY3R1cmU= 79658
+CXN0YXRz 79659
+LmNyZWF0b3I= 79660
+X0NMRUFO 79661
+Lk1PRA== 79662
+IGJpZ2ludA== 79663
+IFRlcnJvcmlzbQ== 79664
+X1Nob3c= 79665
+IFNwaWNlcg== 79666
+X0VUSA== 79667
+IMSR4buD 79668
+IHN1bW1lcnM= 79669
+IFVyYW4= 79670
+L21lbW9yeQ== 79671
+UmV2aWV3ZWQ= 79672
+IGR1ZXM= 79673
+c2V0U2NhbGU= 79674
+IFJheXM= 79675
+IENTQw== 79676
+aW5jb21pbmc= 79677
+LWJ1eQ== 79678
+IHByb2N1cmU= 79679
+ZW50YXI= 79680
+IGJ1bGxz 79681
+IAkJCQkJCQ== 79682
+IEZpYm9uYWNjaQ== 79683
+LXNjaGVtYQ== 79684
+bWFrZXM= 79685
+RWY= 79686
+X0Rlc2NyaXB0aW9u 79687
+L2FsZXJ0 79688
+IGpzb25TdHJpbmc= 79689
+dWZmbGluZw== 79690
+IEtFUk5FTA== 79691
+IEhveQ== 79692
+IGdyYW50UmVzdWx0cw== 79693
+b25hbGQ= 79694
+IFByb3ZpbmNpYWw= 79695
+c2VuZGluZw== 79696
+cHRvbQ== 79697
+INCe0LE= 79698
+IGNvbnN0cmFpbg== 79699
+IMWhdG8= 79700
+IFJhaXNlZEJ1dHRvbg== 79701
+VVRET1dO 79702
+IEdMc2l6ZWk= 79703
+IOekug== 79704
+44OR 79705
+IEdvbg== 79706
+UExJRVI= 79707
+J119PC8= 79708
+Y2xhc3NpYw== 79709
+IGVuZ3JhdmVk 79710
+IG1hc2N1bGluaXR5 79711
+TWFyc2g= 79712
+c3NxbA== 79713
+KEdyYXZpdHk= 79714
+IGxvYnN0ZXI= 79715
+67aE 79716
+X0ludGVy 79717
+XGJhc2U= 79718
+JzpbJw== 79719
+IGRldGFsbGU= 79720
+dHdlZXRz 79721
+IGplYWxvdXN5 79722
+YWdlbmRh 79723
+LGl0 79724
+c3dpcmU= 79725
+K0I= 79726
+IHRyb3V0 79727
+X2FsdGVybg== 79728
+OiIj 79729
+IER3YXJm 79730
+IFNoYXBpcm8= 79731
+ZXJvb24= 79732
+IG5vaw== 79733
+X2xvbmdpdHVkZQ== 79734
+IFdlcm5lcg== 79735
+IHZpb2xldA== 79736
+dXJzaXZlbHk= 79737
+LWF3YWl0 79738
+IH0KCgoKCgo= 79739
+IExlbm5vbg== 79740
+IEFudGFyY3RpYw== 79741
+IGLDpWRl 79742
+X3Nsb3Bl 79743
+bWFuZG8= 79744
+b3VuY2Vy 79745
+LWlvbg== 79746
+IERlc3RydWN0aW9u 79747
+aXNzZW5zY2hhZnQ= 79748
+UGl6emE= 79749
+IEdlb2xvZ2ljYWw= 79750
+Qk9VTkQ= 79751
+IGNpbmU= 79752
+RGVtb24= 79753
+LnBlb3BsZQ== 79754
+X1RPR0dMRQ== 79755
+CW5vZGVz 79756
+YnVzY2Fy 79757
+LnByb2Nlc3Nvcg== 79758
+Tmg= 79759
+L3Nkaw== 79760
+IG15Y2tldA== 79761
+YXVjdGlvbg== 79762
+TWVn 79763
+R01FTQ== 79764
+IGlyb25pY2FsbHk= 79765
+5riF 79766
+IGNvbnZlcmdl 79767
+IFVJVGFibGVWaWV3RGF0YVNvdXJjZQ== 79768
+QXJkdWlubw== 79769
+PmU= 79770
+Sm95 79771
+IFNob3VsZGVy 79772
+IER1Yw== 79773
+UFJJTUFSWQ== 79774
+Lioo 79775
+LXByZXM= 79776
+IGRpYWxvZ1JlZg== 79777
+aW1hZ2VOYW1l 79778
+X2ludm9rZQ== 79779
+XFRlbXBsYXRl 79780
+T0k= 79781
+IHZyaWVuZA== 79782
+IEd1ZXJy 79783
+IHByZXJlcXVpc2l0ZQ== 79784
+IFBHQQ== 79785
+IFJlc3A= 79786
+KSIsIg== 79787
+bGxlbg== 79788
+IHNuYXBwaW5n 79789
+X0ZpcnN0 79790
+S0lU 79791
+LnNldEZvY3Vz 79792
+IEN5cHJlc3M= 79793
+Y3JhZnRlZA== 79794
+LzsK 79795
+d2VpZ2h0ZWQ= 79796
+dm95 79797
+X3RG 79798
+X2luc24= 79799
+IEluc3RhbGxpbmc= 79800
+IEdhbGx1cA== 79801
+QURPUg== 79802
+IEFMT0c= 79803
+Q29udGV4dEhvbGRlcg== 79804
+IFRvdXQ= 79805
+IEZvbGV5 79806
+IGNvbnRlbXBsYXRl 79807
+IENvaW5iYXNl 79808
+WMOj 79809
+d2FuZA== 79810
+LkNyZWF0ZUNvbW1hbmQ= 79811
+U29jaw== 79812
+IHVud3JhcA== 79813
+Y2xhc3NwYXRo 79814
+PFJlc291cmNl 79815
+X0VTVA== 79816
+PXJhbmRvbQ== 79817
+IFNoYWRl 79818
+IGRpY2k= 79819
+2K/Zig== 79820
+IGtpdHR5 79821
+0LDRgtC10LM= 79822
+4buNbg== 79823
+LkNvbXBsZXRlZA== 79824
+cGxvcmVy 79825
+IGJhYmVs 79826
+Lk9uSXRlbUNsaWNrTGlzdGVuZXI= 79827
+IE1jTWFob24= 79828
+IHJlc3RUZW1wbGF0ZQ== 79829
+IHRlc3M= 79830
+U2V0VXA= 79831
+L29jdGV0 79832
+IGNhbGFt 79833
+IGhpbmdlcw== 79834
+IGFydGVyaWFs 79835
+IFRydW1hbg== 79836
+IENoZXJ5bA== 79837
+X0REUg== 79838
+IHRtcGw= 79839
+IExlcg== 79840
+W2hhc2g= 79841
+S0VS 79842
+IHByb3BvcmNpb24= 79843
+IGNvYXN0bGluZQ== 79844
+YWNpb3M= 79845
+Ij4tLX19Cg== 79846
+IGRpc2FkdmFudGFnZWQ= 79847
+VG91Y2hMaXN0ZW5lcg== 79848
+IFNlZ2E= 79849
+Y29lcw== 79850
+SWxsZWdhbEFjY2Vzc0V4Y2VwdGlvbg== 79851
+PEJveA== 79852
+IEluY3JlZGlibGU= 79853
+VXBkYXRlcg== 79854
+RkxU 79855
+aW5hbWU= 79856
+IEludGVyZmFjZXM= 79857
+Kylc 79858
+ZW5kaW1lbnRv 79859
+IHBhbmNha2Vz 79860
+IGluY29uc2lzdA== 79861
+LnBldA== 79862
+IGtleW9m 79863
+SW5uZXJUZXh0 79864
+Picp 79865
+RGVhbg== 79866
+IFDDqQ== 79867
+KENvbnRyb2w= 79868
+IHNwYXI= 79869
+bGluaWs= 79870
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA== 79871
+IERhbmU= 79872
+X1BBR0VT 79873
+IHNldEJhY2tncm91bmRDb2xvcg== 79874
+c3ViY2F0ZWdvcnk= 79875
+IFN0cmluZ1NwbGl0T3B0aW9ucw== 79876
+QWxsZW4= 79877
+ISgie30iLA== 79878
+hOyerA== 79879
+IGJhYw== 79880
+X1BST0RVQ1RT 79881
+dXBwZXJjYXNl 79882
+PSQoIiM= 79883
+xJlr 79884
+IFVJVGFwR2VzdHVyZVJlY29nbml6ZXI= 79885
+TUVUQQ== 79886
+IHNjYXJjZWx5 79887
+6aA= 79888
+X21hbmFnZWQ= 79889
+IGNvbnN1bW8= 79890
+TW91c2VNb3Zl 79891
+IFNwZWNz 79892
+IFNlYXJjaGluZw== 79893
+SGVhZGVyVmlldw== 79894
+Oicp 79895
+IG1pY3Jvc29mdA== 79896
+IEtvc292bw== 79897
+ZW1hbm4= 79898
+LmZmdA== 79899
+IEh1YmJhcmQ= 79900
+IGRleA== 79901
+X1RFUk1JTg== 79902
+X0ZD 79903
+IHBoaWxpcHBpbmVz 79904
+XENvbGxlY3Rpb25z 79905
+IHRlaA== 79906
+IHF1YWxpZmllcw== 79907
+IGlucHV0VmFsdWU= 79908
+IEdPVA== 79909
+KHNh 79910
+SUxMRUQ= 79911
+IHNsYW5n 79912
+IGtlaW5lbg== 79913
+IGZlbG9u 79914
+IEVyaWNr 79915
+YWJpbGlkYWRl 79916
+LnNlcg== 79917
+IHJ1bmVz 79918
+IFVucmVhbA== 79919
+KG9y 79920
+IOusuOyekA== 79921
+IGJpZGk= 79922
+IGlyYw== 79923
+CWl0ZXI= 79924
+Im5pbA== 79925
+L3VidW50dQ== 79926
+IG11cmRlcmluZw== 79927
+ID8u 79928
+dW5rZXI= 79929
+UmVjdFRyYW5zZm9ybQ== 79930
+JykpCgoK 79931
+IGFyaXR5 79932
+IEZyZWVs 79933
+Lm1vdW50 79934
+Q09NTUVOVA== 79935
+ICIqIiw= 79936
+ZW5jcnlwdGlvbg== 79937
+W21vZGVs 79938
+In19Pgo= 79939
+LlRvdWNo 79940
+L3RodW1i 79941
+IHByZXo= 79942
+L2NvbXBhbnk= 79943
+IHLDs8W8 79944
+IHNvZnRlbg== 79945
+IHBvc3NpYmlsZQ== 79946
+IEVDQg== 79947
+X0Jvb2w= 79948
+IC0tLS0tCg== 79949
+IGludGVydHc= 79950
+X3N0YQ== 79951
+X0JBTA== 79952
+Lm5hdmlnYXRpb25CYXI= 79953
+IFJHQkE= 79954
+Z3JpbHk= 79955
+c3RvZmY= 79956
+YWNreQ== 79957
+UUI= 79958
+QEFwaQ== 79959
+cGVjaWE= 79960
+IFJwYw== 79961
+IGFtcHM= 79962
+IEZlbmNl 79963
+IGdlbm9taWM= 79964
+KGFsaWFz 79965
+Vmllbg== 79966
+U3BpbkJveA== 79967
+LmdldFNlY29uZHM= 79968
+IGdsb2JhbGl6YXRpb24= 79969
+IGN1cw== 79970
+a3ViZWN0bA== 79971
+IHRocm90dA== 79972
+IGluZXJ0 79973
+IFNjcmF0Y2g= 79974
+w5c8Lw== 79975
+Lmlzc3Vl 79976
+ZXNzYXk= 79977
+LUlzbA== 79978
+IG3DoXI= 79979
+CWJpdA== 79980
+IGFib2xpc2hlZA== 79981
+LmluZmluaXR5 79982
+bGluZW5v 79983
+LmFsZ29yaXRobQ== 79984
+b3JzY2g= 79985
+RW1haWxBZGRyZXNz 79986
+IERBRw== 79987
+YnJpbmdpbmc= 79988
+Lm15YXBwbGljYXRpb24= 79989
+LlN1cHBvcnQ= 79990
+X2xlYWRlcg== 79991
+IERldmlu 79992
+IFtdDQoNCg== 79993
+IHJtcw== 79994
+IGJ1Y2tsZQ== 79995
+aWdsaWE= 79996
+L3Byb2JsZW0= 79997
+IGhhdXRl 79998
+IGluc3RpdHV0ZWQ= 79999
+SVU= 80000
+bGFtYQ== 80001
+RVhQRUNURUQ= 80002
+IEJlY2toYW0= 80003
+IEh5ZHJhdWxpYw== 80004
+U3RhdGljcw== 80005
+X25vcm1hbGl6ZWQ= 80006
+LmAsCg== 80007
+IG1pbWV0eXBl 80008
+IHNoYXZpbmc= 80009
+T3ZlcnJpZGVz 80010
+IE1lcmNlcg== 80011
+dHJmcw== 80012
+LXN0YXRz 80013
+b3NwYWNl 80014
+IGFudGlveGlkYW50cw== 80015
+aW5maW5pdHk= 80016
+Um9ja2V0 80017
+IEV1bGVy 80018
+LXZhbHU= 80019
+IGzDuA== 80020
+LUlO 80021
+SG1t 80022
+LXJldHVybg== 80023
+IFBBTkVM 80024
+IHRlcm1pbmF0b3I= 80025
+IHRla24= 80026
+IHByZWRpY2F0ZXM= 80027
+U3RhbXBlZA== 80028
+IHN2ZQ== 80029
+YW50ZXI= 80030
+IGN5Y2xpc3Q= 80031
+IEVwc3RlaW4= 80032
+IGhpdHRlcnM= 80033
+ZG9ncw== 80034
+LkFkZExpc3RlbmVy 80035
+X2V4Y2VwdGlvbnM= 80036
+IEZPT1Q= 80037
+aWNhcmU= 80038
+W3RhZw== 80039
+LWZldGNo 80040
+VVBMT0FE 80041
+LmRyb3Bkb3du 80042
+IGNlbnRyb2lkcw== 80043
+IGFyYmU= 80044
+IGhpam8= 80045
+IERhdGFiYXNlUmVmZXJlbmNl 80046
+UG9saXRpY2Fs 80047
+IEJBU0lD 80048
+LWZvcmNl 80049
+fCQ= 80050
+IFJFVklFVw== 80051
+LmRlY29yYXRl 80052
+IEFzcGVjdA== 80053
+IGNvbW1lbW9y 80054
+IGNsZWFuc2U= 80055
+IENsYXVkaWE= 80056
+Z2VuZXJhdGlvbg== 80057
+SExU 80058
+dHlwZW9ybQ== 80059
+cHJlZmVy 80060
+b3ZlcmxhcA== 80061
+YmlvbG9neQ== 80062
+U3RyZWFtZXI= 80063
+Y29tbWlzc2lvbg== 80064
+IHRodW1ibmFpbHM= 80065
+LkN1cnJlbnRDdWx0dXJl 80066
+IHVybHBhcnNl 80067
+IGdpb3Jubw== 80068
+IGRldnM= 80069
+X2FzcGVjdA== 80070
+IGNoZXJpc2hlZA== 80071
+IE5hY2hyaWNodA== 80072
+IHJpZ2dlZA== 80073
+L2xvZ2dpbmc= 80074
+aHVudA== 80075
+VHlwZUVycm9y 80076
+PFNlbGVjdA== 80077
+KHByb2c= 80078
+IEdyaWRMYXlvdXQ= 80079
+6JA= 80080
+IEVYUEVS 80081
+CUtFWQ== 80082
+LmRt 80083
+CWNhcmQ= 80084
+IFRhdQ== 80085
+IG5vdGFtbWVudA== 80086
+IGhlcm9pbmU= 80087
+IGJhdGh0dWI= 80088
+YXRyb24= 80089
+IOaU 80090
+77yS77yQ 80091
+Y29ub21pY3M= 80092
+IHJldmVyc2libGU= 80093
+6YeR6aKd 80094
+IGpzeA== 80095
+IFNwZWFrZXJz 80096
+RGVzZXJpYWxpemVy 80097
+LnRvRmxvYXQ= 80098
+INC/0LXRgNC10LzQtdC9 80099
+IFByb3ZpZGluZw== 80100
+6LSm 80101
+W2VsZW1lbnQ= 80102
+Kjo= 80103
+PlJldHVybnM= 80104
+IHRpdHVsYXI= 80105
+IGhlYXJ0YnJlYWtpbmc= 80106
+X05C 80107
+LkFyZ3VtZW50cw== 80108
+IG9wdGlj 80109
+YXR0YWNrcw== 80110
+IFZ1bG5lcg== 80111
+CWtleXM= 80112
+IGNvbnRyb2xl 80113
+LlJHQg== 80114
+IHN1Ymdyb3Vw 80115
+bWFuZGF0b3J5 80116
+IENBQg== 80117
+CWVuZ2luZQ== 80118
+44Gw 80119
+TUVESUE= 80120
+L3RyYW5z 80121
+IGRhbms= 80122
+IHNlcnZpY2Vk 80123
+IGluY2FyY2VyYXRlZA== 80124
+IEZyZWFr 80125
+IHVwdG8= 80126
+ZHJhd2Vy 80127
+WyIr 80128
+IGVudHdpY2s= 80129
+Z0w= 80130
+TW9kZWxFcnJvcg== 80131
+IHJlYWRkaXI= 80132
+aXN0cmlidXRl 80133
+IGdsYXJl 80134
+aXF1ZW1lbnQ= 80135
+Y2hpbmE= 80136
+IEthcGxhbg== 80137
+IFN0YWJpbGl0eQ== 80138
+cG9zaXRlcw== 80139
+IEpBWEJFbGVtZW50 80140
+IHRvdGFsbWVudGU= 80141
+KGNvbW0= 80142
+X3Byb2Nlc3Nlcw== 80143
+VGhvdXNhbmRz 80144
+IElscw== 80145
+ZXJ0YWludHk= 80146
+IFNoYWRlcw== 80147
+YWN0YWw= 80148
+bG9nZ2VkSW4= 80149
+IE5pY2hvbHM= 80150
+IE1pZGxhbmRz 80151
+ZGV2aWw= 80152
+IHN0clNRTA== 80153
+In0p 80154
+IEpvcmQ= 80155
+KGZm 80156
+IEp1bmk= 80157
+5bCx 80158
+YXJ0aXNhbmxpYg== 80159
+IG1vb25z 80160
+IHVucmVzb2x2ZWQ= 80161
+IHdpdGNoZXM= 80162
+IEfDvA== 80163
+IEdvYmxpbg== 80164
+YW5zc29u 80165
+fCU= 80166
+IGJ6 80167
+IGR1cGxleA== 80168
+ICIpKQ== 80169
+Lmxpa2Vz 80170
+KHZlcnRpY2Fs 80171
+IGNvd2JveQ== 80172
+U2VsZWNjaW9uZQ== 80173
+ICcqJyw= 80174
+IFNhcA== 80175
+IFNhYmJhdGg= 80176
+U09SVA== 80177
+4Ka/4KY= 80178
+X2NlbnRlcnM= 80179
+XFBvc3Q= 80180
+KFRyZWU= 80181
+IHBhcnRlcw== 80182
+X3lhdw== 80183
+YXJlbW9z 80184
+c2V2ZW4= 80185
+IGhpYXR1cw== 80186
+X2ludGVuc2l0eQ== 80187
+LW1hbnk= 80188
+IERvbGxhcnM= 80189
+LXVuc3R5bGVk 80190
+IGdyaXBwaW5n 80191
+IG1hcnZlbG91cw== 80192
+IHJlY2VwdGlvbnM= 80193
+IG92ZXJjbG9jaw== 80194
+YmVybWFu 80195
+IGhlYWRxdWFydGVyZWQ= 80196
+eEJC 80197
+Y2xhc3NDYWxsQ2hlY2s= 80198
+IG9ic2VydmVz 80199
+U3VibWl0dGluZw== 80200
+0LjRh9C10YE= 80201
+IEh0dHBTdGF0dXNDb2RlUmVzdWx0 80202
+IGhpZXJvbnRh 80203
+cm9wcGluZw== 80204
+Rk9SQ0U= 80205
+CXV0aWxz 80206
+IHZlbnRz 80207
+YWRkZXJz 80208
+IE1JWA== 80209
+IEVsZWdhbnQ= 80210
+IGFjb3M= 80211
+KG1hY2hpbmU= 80212
+IG1lZGRsaW5n 80213
+IHZpbGU= 80214
+LWNvbXBhdGlibGU= 80215
+IGNyZWFtcw== 80216
+IFRhYmxlUm93 80217
+IFJlaGFiaWxpdGF0aW9u 80218
+QWJi 80219
+KHVzZXJJbmZv 80220
+X2V4cGlyZWQ= 80221
+Lk9iamVjdE1ldGE= 80222
+IGdvZHQ= 80223
+dXN1YWw= 80224
+LmJpbmRpbmdOYXZpZ2F0b3JNb3Zl 80225
+IFJlZ2lzdHJhcg== 80226
+bWlncmF0aW9u 80227
+YXB0dXJlZA== 80228
+LHBhcmFtcw== 80229
+IGNlbnRlclk= 80230
+b3dhbg== 80231
+bG9jYWxlcw== 80232
+SW5wdXRNb2R1bGU= 80233
+IHZpZ2lsYW50 80234
+IG5jb2xz 80235
+IGluZ3I= 80236
+IGPDtHTDqQ== 80237
+dmVydGltZQ== 80238
+IHdpZGVzdA== 80239
+IEhERg== 80240
+IEFsZ2VyaWE= 80241
+IGNoYXR0 80242
+JHNlbGVjdA== 80243
+Il0pDQo= 80244
+IG11bHRlcg== 80245
+IENoZW5leQ== 80246
+ZnVzY2F0ZWQ= 80247
+PSciLiRf 80248
+IERlbmlzZQ== 80249
+IHJpZmY= 80250
+QWJzZW50 80251
+IHRhbWHDsW8= 80252
+IGplc3pjemU= 80253
+LlByb2dyYW0= 80254
+CWJy 80255
+ZXJhaXM= 80256
+IHNhbmRhbHM= 80257
+ICws 80258
+IGRpc3NvbHV0aW9u 80259
+IHVudGVyc2NoaWVk 80260
+UHJvdg== 80261
+LnRyYW5zYWN0aW9ucw== 80262
+IFRyb3VibGU= 80263
+Lm1pZGRsZQ== 80264
+LmdldERlY2xhcmVk 80265
+IHN3ZWF0aW5n 80266
+IEhhbmNvY2s= 80267
+6LS5 80268
+IHBvZw== 80269
+IEtpYQ== 80270
+IG1vZG5l 80271
+IEFjY2Vzc2liaWxpdHk= 80272
+IGxlYWthZ2U= 80273
+IGRlY2VwdGl2ZQ== 80274
+IFdPTQ== 80275
+INC+0YE= 80276
+IGNzYWs= 80277
+YWNvY2s= 80278
+LlN5bnRheA== 80279
+ICxb 80280
+LicpLAo= 80281
+IGZvcmVjbG9zdXJl 80282
+IHVuZmF2b3I= 80283
+IGV4Y2w= 80284
+Q1VEQQ== 80285
+ZGVuc2U= 80286
+PFVuaXQ= 80287
+IHZhcGluZw== 80288
+IG1hamVzdGlj 80289
+aWF0b3Jz 80290
+IGF1dGlzdGlj 80291
+LmdhdGV3YXk= 80292
+VXJsUGFyc2Vy 80293
+SGVsbA== 80294
+IENvc3Rjbw== 80295
+IEhJUA== 80296
+T2JzZXJ2ZXJz 80297
+IFBlb3BsZXM= 80298
+IFNwb3RsaWdodA== 80299
+IFRhdmVybg== 80300
+IFRPVVI= 80301
+cGxpbmdz 80302
+LldSQVA= 80303
+IGFsZA== 80304
+TkFM 80305
+KCIqKio= 80306
+c2V0UHJvcGVydHk= 80307
+X1N0b3A= 80308
+YW5ub3VuY2VtZW50 80309
+IEltbWVkaWF0ZQ== 80310
+IEhTVg== 80311
+X1RFU1RT 80312
+IGNyYXZl 80313
+X1VD 80314
+LmRlY3J5cHQ= 80315
+KFJvbGVz 80316
+IHN1Ymo= 80317
+X0ludGVnZXI= 80318
+Lm5vdE51bGw= 80319
+IEdzdA== 80320
+IEJ5cm5l 80321
+IEFxdWFyaXVt 80322
+IENhbmM= 80323
+X0NIQU4= 80324
+IERUTw== 80325
+Lmhs 80326
+IG1lbmdndW5ha2Fu 80327
+RnJhbmM= 80328
+RGlhbG9nQ29udGVudA== 80329
+Li4uJwo= 80330
+IEt1bnN0 80331
+IEFsbG9jYXRvcg== 80332
+VVNBR0U= 80333
+S25vd2xlZGdl 80334
+CWNwdQ== 80335
+IG1vcmFscw== 80336
+cGF0aWVudHM= 80337
+IGlsaw== 80338
+IGNyaXRlcg== 80339
+IFZldA== 80340
+IE1lc3NpYWg= 80341
+X186 80342
+YXZlbm91cw== 80343
+X3ZpZXdlcg== 80344
+KERpY3Rpb25hcnk= 80345
+IEJvZGllcw== 80346
+aGFzT25l 80347
+0LjQvNC10YA= 80348
+IHppcGNvZGU= 80349
+U3Rlcg== 80350
+IGLDoXM= 80351
+X0Rpc3BsYXk= 80352
+IGZpcm1h 80353
+IFJhaWRlcg== 80354
+IEtI 80355
+V2l0aERhdGE= 80356
+KEFSRw== 80357
+IHByb3Ry 80358
+IG1zZWM= 80359
+IGxhdmVuZGVy 80360
+KFV0aWw= 80361
+INC/0YDQvtCz0YDQsNC8 80362
+X211eA== 80363
+X2xhdGl0dWRl 80364
+UG9ydHJhaXQ= 80365
+IHNpdGNvbQ== 80366
+IGFkaWNpb24= 80367
+KGNvbnN0YW50cw== 80368
+IEFueGlldHk= 80369
+IFJvc2Vz 80370
+IHN0aW11bGF0ZWQ= 80371
+IGNocm9ubw== 80372
+IGZvc3NpbHM= 80373
+IEFpcmJ1cw== 80374
+bGVmdHJpZ2h0 80375
+IE3DqXRvZG8= 80376
+Inc= 80377
+IGtsZWluZW4= 80378
+IGNsaXF1ZQ== 80379
+b21pbmF0aW9u 80380
+IG1vdGVs 80381
+L3ZlY3Rvcg== 80382
+ZGVjbGFyYXRpb24= 80383
+IG5ld1k= 80384
+W0g= 80385
+LnNjYWxhcg== 80386
+b21ibw== 80387
+aHVk 80388
+O3NldA== 80389
+ZnR5cGU= 80390
+KCcnKS4= 80391
+b3JkZXM= 80392
+eW5vcw== 80393
+J10sCgo= 80394
+X0ZMVVNI 80395
+aWRlbnRpZnk= 80396
+L2RldmljZXM= 80397
+IGRpY3RhdGVk 80398
+IGRlamFy 80399
+IEVtaW4= 80400
+IFBlbmRhbnQ= 80401
+IG9uVXBkYXRl 80402
+XSkpKQ== 80403
+IEJhcmtlcg== 80404
+T3Jt 80405
+6K+36YCJ5oup 80406
+X2d1aWRl 80407
+w6FiYWRv 80408
+b3BoZQ== 80409
+ICIuCg== 80410
+IEJyZXdlcnM= 80411
+IGJyaWRhbA== 80412
+IENFUw== 80413
+X0NhdGVnb3J5 80414
+IEJUTg== 80415
+IERhcnRo 80416
+I2Zvcg== 80417
+ZXRobmlj 80418
+YXJjaGl0ZWN0dXJl 80419
+IENvdXBl 80420
+aWRvcmVz 80421
+IGZhc2Npc20= 80422
+IGNvbnRyYWRpY3Rpb25z 80423
+ZWZmZWN0cw== 80424
+SW5pdGlhbFN0YXRl 80425
+IOekuuS+iw== 80426
+bWF0cGxvdGxpYg== 80427
+LmRlc2t0b3A= 80428
+INCt 80429
+IFFQaXhtYXA= 80430
+CWJlZ2lu 80431
+IHduZA== 80432
+IGNvbnRpZW5l 80433
+KGhlbHBlcg== 80434
+Lk5vdGlmeQ== 80435
+KEJvb2s= 80436
+IEd1YXJhbnRlZWQ= 80437
+cGxs 80438
+aW9sYQ== 80439
+IGZ1bmdp 80440
+aXZlbnQ= 80441
+IE9B 80442
+5rKh5pyJ 80443
+IHdpxJljZWo= 80444
+CQoJCgkKCQo= 80445
+77yaIis= 80446
+IFRhbGtz 80447
+LnN0YXJ0ZWQ= 80448
+b2NpdGllcw== 80449
+IGVzcG9ydHM= 80450
+PElucHV0 80451
+IEVYQ0VQVElPTg== 80452
+IGFjdHU= 80453
+LmltcA== 80454
+ICIvIgo= 80455
+T3RoZXJ3aXNl 80456
+IFBlbnNpb24= 80457
+IFdhdmVz 80458
+xrDGoQ== 80459
+aWFyZHM= 80460
+ICo8Lw== 80461
+dXJnZW9u 80462
+IFNDSQ== 80463
+IExhdXJlbA== 80464
+ZXRhZw== 80465
+TmV0ZmxpeA== 80466
+IFJlc3BvbnNlcw== 80467
+IG5lb2xpYmVyYWw= 80468
+aXNDb250YWluZWQ= 80469
+PW15 80470
+IHJlcHJpbnQ= 80471
+b25lc3RseQ== 80472
+IGRlcGFydGluZw== 80473
+UFdN 80474
+ZXdoYXQ= 80475
+PSI8PA== 80476
+Lnlhbmc= 80477
+IFRyYWRpdGlvbg== 80478
+KyI6 80479
+ZGVwZW5kaW5n 80480
+X1VuaXQ= 80481
+IENvZGFibGU= 80482
+IHdoaXNreQ== 80483
+IGNvcnJlbGF0ZQ== 80484
+IGRpcmV0 80485
+TGFzdGx5 80486
+CU91dHB1dA== 80487
+KGlub2Rl 80488
+XExvZw== 80489
+IERlcGVuZGVuY2llcw== 80490
+V2lsbERpc2FwcGVhcg== 80491
+IFBhbmVscw== 80492
+IOKUnOKUgOKUgA== 80493
+IG9zdGVuc2libHk= 80494
+fC0t 80495
+QW5udWFs 80496
+IGF1dG9sb2Fk 80497
+VmFsdWVIYW5kbGluZw== 80498
+LmNvaW4= 80499
+ZWR1Y3Q= 80500
+Wlk= 80501
+IENhbnVja3M= 80502
+IHNtZWFy 80503
+IHJlYWxpZGFk 80504
+IHt7Cg== 80505
+aXZvbA== 80506
+ZXRTb2NrZXRBZGRyZXNz 80507
+IEtlbXA= 80508
+L0ZyYW1ld29yaw== 80509
+IHF1aWNrZXN0 80510
+XyIuJA== 80511
+IHdpdGhob2xkaW5n 80512
+IGludHJpZ3Vl 80513
+IEFERFI= 80514
+RGllc2U= 80515
+V2Vla2x5 80516
+X19fX18= 80517
+IEludmFsaWRBcmd1bWVudEV4Y2VwdGlvbg== 80518
+b2xhdGVk 80519
+UnVuTG9vcA== 80520
+IHBhc3PDqQ== 80521
+LmZpcmViYXNlaW8= 80522
+LmV1bGVyQW5nbGVz 80523
+aXN0ZW5jZQ== 80524
+IGZlYXJpbmc= 80525
+IEVsZW1lbnRUeXBl 80526
+L1Rlc3Q= 80527
+IOafpeivog== 80528
+IGZvbmRv 80529
+IFBhcnI= 80530
+IHplc3Q= 80531
+IFRyYW5zZm9ybWVycw== 80532
+TGluZVN0eWxl 80533
+IGV0aGVybmV0 80534
+YWZmbGVz 80535
+IG5hbWVkdHVwbGU= 80536
+IFNjYWxhcnM= 80537
+TlNVUkxTZXNzaW9u 80538
+LWV4dGVuc2lvbg== 80539
+KE1lc3NhZ2Vz 80540
+IGF0ZW5jacOzbg== 80541
+IEplcnNleXM= 80542
+YmVkUGFuZQ== 80543
+IFN0dW5kZW4= 80544
+IHZvaXR1cmU= 80545
+IOm7mOiupA== 80546
+Lm9wZW5nbA== 80547
+ICJ9 80548
+IFJldmVuZ2U= 80549
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0K 80550
+SW5zdGFudGlhdGU= 80551
+IGVucg== 80552
+VmFsaWRhdGlvbkVycm9y 80553
+X0FMUkVBRFk= 80554
+TG90cw== 80555
+b2Nl 80556
+IHNjcmlt 80557
+IGVtYm9keQ== 80558
+0YDQsNGC 80559
+IGNvbmNlZGU= 80560
+YXNzZWw= 80561
+IEJSRQ== 80562
+UExFQVNF 80563
+CWRpZmY= 80564
+57uT5p2f 80565
+LmZw 80566
+YmFt 80567
+TWVhbA== 80568
+IE1hZG9ubmE= 80569
+IHB1bmlzaGFibGU= 80570
+aWZmaWVz 80571
+X3VuaXg= 80572
+7JmA 80573
+IEdhZ2E= 80574
+InN0cnVjdA== 80575
+VG9TZW5k 80576
+IE9DUg== 80577
+IHByYWlzaW5n 80578
+Z2V0U3RvcmU= 80579
+IGV1dGg= 80580
+IGFycmVnbG8= 80581
+IGZlcm0= 80582
+ZmRm 80583
+Q29vbGRvd24= 80584
+IFJlY3ljbGluZw== 80585
+QW5h 80586
+aW5kcg== 80587
+X0hQ 80588
+IEdvdmVybmFuY2U= 80589
+IGJhcnJhZ2U= 80590
+L2Nh 80591
+ICwo 80592
+RsO8cg== 80593
+IElTUHM= 80594
+IG1lbmFjZQ== 80595
+VmlyZ2luaWE= 80596
+IGZhbmM= 80597
+IG5vbWJyZXM= 80598
+Lmluc3RydWN0aW9ucw== 80599
+IGVzY2FsYXRlZA== 80600
+YWdpbmE= 80601
+IExldmluZQ== 80602
+CWZpbmQ= 80603
+X2Vy 80604
+IGRlanRpbmdzYWo= 80605
+c3Zw 80606
+YWdvcw== 80607
+KHNvbA== 80608
+IExpZA== 80609
+UFJJVkFURQ== 80610
+IElNUExFTUVOVA== 80611
+ZWZlbGxlcg== 80612
+KFRhcmdldA== 80613
+4LmJ4Lit4Lih 80614
+aG91c2luZw== 80615
+LnNldEN1cnNvcg== 80616
+IG5laG1lbg== 80617
+LnJlY2VpdmVy 80618
+IFR1dG9y 80619
+IG1hdHRlcmVk 80620
+bWRhdA== 80621
+cmVndWxhdGVk 80622
+IGdldEFkZHJlc3M= 80623
+IE1pbnV0ZW4= 80624
+IElV 80625
+0LvQsNCy 80626
+IHR1cm5vdmVycw== 80627
+IHN1aXRhYmlsaXR5 80628
+CWVzYw== 80629
+Y2FsY3Vs 80630
+X1N0cmVhbQ== 80631
+X2ZpbGVuYW1lcw== 80632
+LXZhcnM= 80633
+Li4uLi4KCg== 80634
+RGlh 80635
+IHN3aW1z 80636
+T3B0aW1pemVy 80637
+PGJvb3N0 80638
+IFBlcm1pdA== 80639
+J10pKXs= 80640
+XE9wdGlvbnNSZXNvbHZlcg== 80641
+5qGI 80642
+IGhlY3RhcmVz 80643
+KHVz 80644
+IERldmVsb3Bpbmc= 80645
+X3hz 80646
+IG5vdmVsaXN0 80647
+IENvbnZlbmllbmNl 80648
+d2Fsa2luZw== 80649
+IGNoYXJtcw== 80650
+IExlYXNl 80651
+CUhBTA== 80652
+KFsm 80653
+IHJlc3RhcnRlZA== 80654
+TWFnZQ== 80655
+SXB2 80656
+INGN0Lo= 80657
+UkxG 80658
+IGFzc2VtYmxpbmc= 80659
+IEVjYw== 80660
+dmluZm9z 80661
+cGVkaWRv 80662
+IHN5bm9wc2lz 80663
+IFN0YW50b24= 80664
+c3RhcnR1cA== 80665
+LmdldHZhbHVl 80666
+IEtpdHQ= 80667
+cHJvcGVy 80668
+IHByZXRyYWluZWQ= 80669
+IFBFTg== 80670
+LlRlcm0= 80671
+IHBlcXU= 80672
+ZXBoaXI= 80673
+IEFsbGllcw== 80674
+IG1vZGVsQW5kVmlldw== 80675
+IGJ1dHRlcmZsaWVz 80676
+IEtpcnN0 80677
+IENoZWNrZXI= 80678
+IGN1bm5pbmc= 80679
+LnNldFk= 80680
+X01hc3Rlcg== 80681
+SW5jcmVhc2luZw== 80682
+IGh1cmRsZQ== 80683
+IGZpc3Rz 80684
+IFNsb3Zha2lh 80685
+IG5vbWJyZXV4 80686
+IDo6Cg== 80687
+dGFza0lk 80688
+IGZvbGx5 80689
+PFRyZWVOb2Rl 80690
+IFZvbGRlbW9ydA== 80691
+IGJsaXN0ZXI= 80692
+xYJl 80693
+LkVudGl0eU1hbmFnZXI= 80694
+LkRPV04= 80695
+IEdyZWdn 80696
+LWNvb3JkaW5hdGU= 80697
+KHZj 80698
+w6FiYg== 80699
+LlRvZ2dsZQ== 80700
+IExpc2Jvbg== 80701
+56I= 80702
+INC/0L7Rgg== 80703
+cGFyZW50Tm9kZQ== 80704
+LnNldFNjYWxl 80705
+X01JU1NJTkc= 80706
+IG91dHJh 80707
+IGt1cA== 80708
+YF0= 80709
+X3ZpYQ== 80710
+ZWRpY3M= 80711
+IEJvcmRlcnM= 80712
+IGlwYWQ= 80713
+IGVkdA== 80714
+IENhcnRlc2lhbg== 80715
+L21hYw== 80716
+IGJhcmxleQ== 80717
+IFNjYXJsZXQ= 80718
+ICAgIAogICAgCiAgICAKICAgIAo= 80719
+cXVlcnlQYXJhbXM= 80720
+IHJoeXRobXM= 80721
+IGdlYXJpbmc= 80722
+Wlg= 80723
+aHlkcmF0aW9u 80724
+U1RT 80725
+IHBsZW50aWZ1bA== 80726
+Y29ycA== 80727
+fUA= 80728
+aW50ZWdy 80729
+L2F0 80730
+LmRlYg== 80731
+IHVuZGVuaWFibGU= 80732
+IG9wZW5zc2w= 80733
+LmRlYWQ= 80734
+IFBpbGxvdw== 80735
+IEJlYW5z 80736
+LmFudA== 80737
+X3Fz 80738
+LWluZm9ybWF0aW9u 80739
+IOuzgOyImA== 80740
+JSIpLAo= 80741
+INC00YDRg9Cz 80742
+IFNwb25nZQ== 80743
+IHNpZnQ= 80744
+dGVzdGltb25pYWw= 80745
+IHVubmF0dXJhbA== 80746
+VUlTY3JvbGxWaWV3 80747
+dmVyZ2VuY2U= 80748
+KHRleHRCb3g= 80749
+LXBhZ2luYXRpb24= 80750
+IERpc3F1cw== 80751
+X3Byb2R1aw== 80752
+YWduYXI= 80753
+S2V5VXA= 80754
+CQkJICAgICAgICA= 80755
+0LXQu9C1 80756
+PHNvdXJjZQ== 80757
+Lmls 80758
+LmF0b20= 80759
+X0NvbXBvbmVudA== 80760
+IHlu 80761
+WydfXw== 80762
+IHdlYWtlc3Q= 80763
+X2RlY3J5cHQ= 80764
+L21zZw== 80765
+Y2Jj 80766
+IHBvbGl0ZWx5 80767
+b21hdA== 80768
+IGVubGlnaHRlbm1lbnQ= 80769
+IGNyZWE= 80770
+IGJydWs= 80771
+X2FscmVhZHk= 80772
+IHNvY2tmZA== 80773
+dW5wYWNr 80774
+b3JnZXM= 80775
+IFVORVNDTw== 80776
+aW5hbGl0eQ== 80777
+IHNlbnRpbmVs 80778
+IGFmZmx1ZW50 80779
+IHRocm93RXJyb3I= 80780
+aWV0cw== 80781
+QU5KSQ== 80782
+IFN1ZmZvbGs= 80783
+YmVybw== 80784
+a2V0w7h5 80785
+RW5kcG9pbnRz 80786
+ZXhlY3V0b3I= 80787
+R2E= 80788
+LkxB 80789
+X3BvcnRmb2xpbw== 80790
+dW5zY2g= 80791
+ZWxhZ2U= 80792
+IGdvYmllcm5v 80793
+IEJpb2w= 80794
+TW9kaWZpY2F0aW9u 80795
+IERlY2ltYWxGb3JtYXQ= 80796
+IFZvY8Oq 80797
+IG1ldGhvZG9sb2dpZXM= 80798
+W10u 80799
+IEdW 80800
+IHJlcGxpY2Fz 80801
+4oCUd2l0aA== 80802
+KTspOwo= 80803
+cG9zaXg= 80804
+U3VjY2Vzc0xpc3RlbmVy 80805
+cGhl 80806
+X25vcm1hbGl6ZQ== 80807
+IExhcmdlcg== 80808
+IHJlcGVyY3Vzc2lvbnM= 80809
+X1ZlcnQ= 80810
+IGhvc3RlbA== 80811
+IGluY29tcGV0ZW50 80812
+aGV2 80813
+X0RFTFRB 80814
+IHB1ZWRv 80815
+aW5zdGFsbGF0aW9u 80816
+X2ZyYWc= 80817
+KHJy 80818
+IE1BVg== 80819
+IExvY2FsaXphdGlvbg== 80820
+KCIiKS4= 80821
+IC0tLS0tLS0tLQ== 80822
+DQoK 80823
+IFB5VHVwbGU= 80824
+IEp1bGlv 80825
+CUdMdWludA== 80826
+bWFya3Vw 80827
+X0ZBTUlMWQ== 80828
+UFJPR1JBTQ== 80829
+IEZpcm13YXJl 80830
+KnNpemU= 80831
+V2lmaQ== 80832
+IHZpc2l0YQ== 80833
+IEVybA== 80834
+RmluZE9iamVjdA== 80835
+LlVOUkVMQVRFRA== 80836
+cGh0aGFsbQ== 80837
+IHBlcnNvbmFsaXpl 80838
+IGNyw6lhdGlvbg== 80839
+ICAgIAkg 80840
+LnByZWNpc2lvbg== 80841
+IHNldHRlcnM= 80842
+IG5ld1NpemU= 80843
+IENhdGFsYW4= 80844
+CW9wdGlvbg== 80845
+IHBpZWw= 80846
+IGNhZ2Vz 80847
+IFN0ZW0= 80848
+ZHJhd2luZw== 80849
+ZXhwbGFpbmVk 80850
+IOaOpw== 80851
+IGRyZWFkZnVs 80852
+ZXJydXB0ZWQ= 80853
+LmdldFZhbHVlQXQ= 80854
+IGVsYXBzZWRUaW1l 80855
+IGluZGVmaW5pdGU= 80856
+IFRIQU5L 80857
+X3N0YXJ0dXA= 80858
+U1VSRQ== 80859
+IGtpZG5leXM= 80860
+IEN1aXNpbmU= 80861
+fGFycmF5 80862
+U2VuZE1lc3NhZ2U= 80863
+ZmF2 80864
+IEFlcm9zcGFjZQ== 80865
+X21lYW5z 80866
+IG5lYg== 80867
+IE9UUA== 80868
+IGNodXJu 80869
+L2Zy 80870
+IFJlaWdu 80871
+X2NsYXNzaWZpY2F0aW9u 80872
+IE1hY0RvbmFsZA== 80873
+Ii4KCgoK 80874
+IGNoaWxseQ== 80875
+IOivt+axgg== 80876
+aWhhdA== 80877
+U1RB 80878
+J2F1dHJlcw== 80879
+IGxhc2M= 80880
+Lm1peA== 80881
+IGJsb3Q= 80882
+IElERA== 80883
+ZGF0YXRhYmxl 80884
+c3BpZWw= 80885
+IMOpeGl0bw== 80886
+YXJ0aWM= 80887
+LkF4aXM= 80888
+LmFkdmFuY2U= 80889
+IG1vdXNlWA== 80890
+J8Og 80891
+IHJlY2lldmVk 80892
+IHBvc2k= 80893
+IGZvdXJu 80894
+IE1hZmlh 80895
+IHBjYQ== 80896
+YmVsb25ncw== 80897
+YWJseXR5cGVk 80898
+QVVUSE9SSVpFRA== 80899
+LnNjYWxhYmx5dHlwZWQ= 80900
+7JyE 80901
+LWRvdA== 80902
+IGVtcGhhc2l6aW5n 80903
+TWVtYmVyc2hpcA== 80904
+KnBvdw== 80905
+LXNwaW4= 80906
+cnV0YQ== 80907
+aGV2aWs= 80908
+X0FTWU5D 80909
+X2NvbXBpbGVy 80910
+LkZsYWc= 80911
+IGVsYm93cw== 80912
+LkNSRUFURQ== 80913
+TWV0cm8= 80914
+LmxvZ3M= 80915
+em1hbg== 80916
+cG9uZQ== 80917
+xJnFvA== 80918
+IGludGVycw== 80919
+IHdlYnM= 80920
+X0hJRERFTg== 80921
+CW5vdw== 80922
+Q29tbXVuaWM= 80923
+JHRwbA== 80924
+c2NvcGVz 80925
+IFppa2E= 80926
+IHN0cmluZ3N0cmVhbQ== 80927
+IFVuY2F0ZWdvcml6ZWQ= 80928
+Rlk= 80929
+L3N3YWdnZXI= 80930
+UGVubg== 80931
+aW1lSW50ZXJ2YWw= 80932
+IGNvbnRlbmRz 80933
+eGllcw== 80934
+IFNhbGVzZm9yY2U= 80935
+IHV0ZW5z 80936
+IHVuZGlz 80937
+Q3J5c3RhbA== 80938
+Lm5kaW0= 80939
+IGZvcm11bA== 80940
+IEZhdg== 80941
+5bm/ 80942
+cmlzaw== 80943
+bmFk 80944
+L3Rvcw== 80945
+IFBFUkZPUk1BTkNF 80946
+IHdyaXRlbG4= 80947
+IGNvbGxv 80948
+YW50aWNhbGx5 80949
+VURFTlQ= 80950
+Umdi 80951
+IG9mZXJl 80952
+IG1lcmdlcw== 80953
+ZmlkZg== 80954
+IGt6 80955
+VmljdG9yaWE= 80956
+IC9eXA== 80957
+IGt1YmU= 80958
+IEFwb3N0bGU= 80959
+IGRlZmVuZHM= 80960
+PD0o 80961
+IE1FTU9SWQ== 80962
+XElk 80963
+IEFjdGl2ZUZvcm0= 80964
+IE9uZVBsdXM= 80965
+SHR0cFNlcnZsZXRSZXF1ZXN0 80966
+IFRlbXBEYXRh 80967
+7KCB 80968
+LkFTQ0lJ 80969
+2YTYpw== 80970
+S0k= 80971
+IGZyYXQ= 80972
+X0NJUEhFUg== 80973
+LlN1cmZhY2U= 80974
+IHBpdGZhbGxz 80975
+LW1lZGlhdGVk 80976
+eXBp 80977
+LWFsaXN0 80978
+eEJD 80979
+dGVhY2hlcnM= 80980
+IEN5Yw== 80981
+IHBzeWNoZWRlbGlj 80982
+IER1bWJsZWRvcmU= 80983
+IikuCgo= 80984
+IFRoYXRjaGVy 80985
+IFByaW5jaXBsZQ== 80986
+VG9nZXRoZXI= 80987
+IGZsb3Jh 80988
+d2Vla3M= 80989
+X2NyaXRlcmlh 80990
+Ym9uZXM= 80991
+LmludGVybmV0 80992
+IGJsb2NrRGlt 80993
+LlNpbmdsZU9yRGVmYXVsdA== 80994
+RGljZQ== 80995
+IEV2ZWw= 80996
+IFRMYWJlbA== 80997
+IElnb3I= 80998
+IENvcHA= 80999
+IGluYXVndXI= 81000
+L3ByaXZhdGU= 81001
+IGFiZXJy 81002
+bmRz 81003
+O2lm 81004
+LXJhbmdpbmc= 81005
+YWNodHM= 81006
+X21hcnNoYWxs 81007
+IF9fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX18= 81008
+LmVuZFRpbWU= 81009
+IE1vZGVsUmVuZGVyZXI= 81010
+KGZvb2Q= 81011
+KCJ+ 81012
+IHN1cHBs 81013
+KCJcKA== 81014
+U3E= 81015
+VHJhbnNsYXRlZA== 81016
+IENvbnRpbnVpbmc= 81017
+IHBvc3Nvbm8= 81018
+RklYTUU= 81019
+IEFuZ2Vib3Q= 81020
+aWV2ZXI= 81021
+IEt5b3Rv 81022
+Y2ls 81023
+TmV3VXJsUGFyc2Vy 81024
+LkRp 81025
+IGh1bWFuZQ== 81026
+RGVtYW5k 81027
+IE1hcnRpYW4= 81028
+d29vZHM= 81029
+IEhlYWw= 81030
+IFl1ZQ== 81031
+IGNvdXJ0aG91c2U= 81032
+IHZvbnQ= 81033
+IGJvbnM= 81034
+aW50ZWdyYWw= 81035
+ICQoJyMn 81036
+ZXRlcm1pbmF0aW9u 81037
+Lm1vZGlmaWVk 81038
+IHByaW5jaXBhbHM= 81039
+IGFsYXJtZWQ= 81040
+LmNyZWF0ZU9iamVjdA== 81041
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQo= 81042
+L2NvdW50 81043
+IGVudHJlbmNoZWQ= 81044
+XGE= 81045
+IGludHJ1c2lvbg== 81046
+IE54 81047
+CQkKCQkKCQkK 81048
+Y2hlbWF0aWM= 81049
+IHNsaWRlcnM= 81050
+IHNlbGVjdGFibGU= 81051
+X25s 81052
+aWVzZQ== 81053
+X2VzdGltYXRvcnM= 81054
+IFN2Zw== 81055
+IGRlbGV0ZVVzZXI= 81056
+KG1hcHBpbmc= 81057
+IOyymOumrA== 81058
+IGFudGFnb25pc3Q= 81059
+IGtpbmFzZQ== 81060
+IHdlbGRlZA== 81061
+IExlbmE= 81062
+ZWRpdGg= 81063
+aWFsaQ== 81064
+KHBpYw== 81065
+IGJyZWFjaGVk 81066
+UElD 81067
+IGNvYXN0ZXI= 81068
+RkRB 81069
+IGtyZQ== 81070
+cGVyZmls 81071
+IEdlbXM= 81072
+X2ZlbmNl 81073
+VVJMUmVxdWVzdA== 81074
+4oCZYXBw 81075
+UkVGRVJFTkNF 81076
+LkV4cG9ydA== 81077
+IG1pbmltaXplZA== 81078
+aXBlbA== 81079
+aWRhdGE= 81080
+KWRlYWxsb2M= 81081
+ZXNjYWw= 81082
+X2Z3ZA== 81083
+bWVtY3B5 81084
+IExvcmk= 81085
+X1JlZg== 81086
+IGJhcmE= 81087
+IFNlbGxlcnM= 81088
+IGRldGVyaW9yYXRpb24= 81089
+ZnJhY3Rpb24= 81090
+KV07 81091
+L3BsYXk= 81092
+wqU= 81093
+LXRlc3Rz 81094
+T2Zmc2V0cw== 81095
+T2k= 81096
+IEtsYXVz 81097
+IHF1ZXJ5aW5n 81098
+d2lzaA== 81099
+YXBlbA== 81100
+X3dvcmtpbmc= 81101
+bXlNb2RhbExhYmVs 81102
+IHRvRGF0ZQ== 81103
+cGVybWFsaW5r 81104
+IGZyZWM= 81105
+b2xlY3VsZXM= 81106
+IEdvb3Nl 81107
+LXdpZGdldHM= 81108
+dHVydGxl 81109
+SW1wcm92ZWQ= 81110
+IHJvYWR3YXk= 81111
+a2Vocg== 81112
+IGFzdHJvbm9teQ== 81113
+Q29tYmluZQ== 81114
+IGNpZ2Fycw== 81115
+X0dBVEU= 81116
+L21hbmFnZQ== 81117
+IEdlcmFyZA== 81118
+IFByb3RlY3Rvcg== 81119
+U3Vic3lzdGVt 81120
+L2ZpbmQ= 81121
+L1lZWVk= 81122
+IHRvdGFsaW5n 81123
+0LzQvtGC 81124
+IE9tYW4= 81125
+IGluZmluaXQ= 81126
+LW9mZmljZQ== 81127
+IGluc3RhbnRpYXRpb24= 81128
+LsKn 81129
+Y2V1 81130
+KGF0b20= 81131
+IERyb3BvdXQ= 81132
+7YGs 81133
+IGNvbmRlbW5pbmc= 81134
+X2Jhc2VuYW1l 81135
+XX08Lw== 81136
+RGF0YUNvbnRleHQ= 81137
+IFdhc2hpbmc= 81138
+Lk9O 81139
+IG1vbW15 81140
+KCl9Owo= 81141
+IDspCgo= 81142
+L2V4dA== 81143
+Zm9yZWdyb3VuZENvbG9y 81144
+dW5zdXBwb3J0ZWQ= 81145
+IHNvbGxlbg== 81146
+IGNvbWXDpw== 81147
+RElTQUJMRQ== 81148
+IG9uUGF1c2U= 81149
+INGH0YLQvtCx0Ys= 81150
+IEFpbg== 81151
+R3M= 81152
+CVRhc2s= 81153
+aGF3aw== 81154
+Ik5vdA== 81155
+QUdS 81156
+LmdldFRhYmxl 81157
+IGRpdmVyZ2VuY2U= 81158
+IG5lZ29jaQ== 81159
+UmVwbGFjaW5n 81160
+XX0pCg== 81161
+aWxsdXNpb24= 81162
+IM6U 81163
+X0tFWUJPQVJE 81164
+S3I= 81165
+CW9y 81166
+56Gu6K6k 81167
+CXByaW50bG4= 81168
+IFNlYXJjaGVz 81169
+IEZyZXNubw== 81170
+IHZlcmRhZA== 81171
+XE1pZGRsZXdhcmU= 81172
+IOy1nA== 81173
+fSkoKTs= 81174
+dGV4dEFsaWdu 81175
+aW5rZWw= 81176
+LlR4dA== 81177
+IG9wdGltaXphdGlvbnM= 81178
+eW91bmc= 81179
+IGxlYXNlZA== 81180
+SlQ= 81181
+IElvbmljTW9kdWxl 81182
+ZXR0aW5ncw== 81183
+ZXNlaGVu 81184
+IGZhdm91cmFibGU= 81185
+YW5leQ== 81186
+IG90aGVyQnV0dG9uVGl0bGVz 81187
+IFRoYW1lcw== 81188
+CXVuaXQ= 81189
+Q09MVU1O 81190
+IGxvaQ== 81191
+LHByb3Rv 81192
+X1BSSQ== 81193
+IHdhbmRlcmVk 81194
+IHNhcGk= 81195
+YmFja3dhcmQ= 81196
+YXJhb2g= 81197
+IEZI 81198
+IEFsZw== 81199
+CWFj 81200
+YXJybw== 81201
+5Y6G 81202
+IFNPUw== 81203
+IERyZWFk 81204
+VmVjdG9yWGQ= 81205
+LnJtdHJlZQ== 81206
+X2V4ZWN1dG9y 81207
+IHByZWduYW5jaWVz 81208
+IHByYWN5 81209
+IFd3dw== 81210
+IEFyY2hiaXNob3A= 81211
+IG1laW5lbg== 81212
+RlU= 81213
+LkVudg== 81214
+IGVubGlnaHRlbmVk 81215
+IG9yaWdpbmF0ZQ== 81216
+5Y+K 81217
+IHpsaWI= 81218
+X1NB 81219
+IHdhc3Rlcw== 81220
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 81221
+cHJhcw== 81222
+IGhvcnJpZmllZA== 81223
+IENhbGR3ZWxs 81224
+dG95 81225
+X3Nob3Q= 81226
+IGxlc2Jp 81227
+IE1hZ25ldA== 81228
+b3hpYw== 81229
+U3VybmFtZQ== 81230
+IHNob3dUb2FzdA== 81231
+CURlc3Ryb3k= 81232
+LmdldEV4dGVybmFs 81233
+SUxJ 81234
+IE5ldmlsbGU= 81235
+dHNreQ== 81236
+IG1lbGFrdWthbg== 81237
+ICImIw== 81238
+IGZsb3dlcmluZw== 81239
+IHZldGVyaW5hcmlhbg== 81240
+IGhhcm1vbmlj 81241
+IENhc3NhbmRyYQ== 81242
+KENyZWF0ZQ== 81243
+cGVyc2U= 81244
+UGVybQ== 81245
+KU5TU3RyaW5n 81246
+IGlzSW4= 81247
+IEZsb2F0aW5nQWN0aW9uQnV0dG9u 81248
+L05ldw== 81249
+IPCd 81250
+Y2FwYWJpbGl0eQ== 81251
+IGN1Y2tvbGQ= 81252
+IEJhaW4= 81253
+KCl7DQoNCg== 81254
+UEVBUg== 81255
+IGphd3M= 81256
+IGdvZGU= 81257
+IGNhc3NldHRl 81258
+LmZyZXF1ZW5jeQ== 81259
+U0NPUkU= 81260
+LmludGVudA== 81261
+Olsi 81262
+IOWmguaenA== 81263
+77yf4oCd 81264
+L0ltYWdl 81265
+IHNpZW5kbw== 81266
+X2FsbG9jYXRpb24= 81267
+OkI= 81268
+L1JlZ2lzdGVy 81269
+X2thdGVnb3Jp 81270
+dW55YQ== 81271
+Lmluc3RhbmNlcw== 81272
+IFVOSVZFUlNJVFk= 81273
+IHBsZWFzYW50bHk= 81274
+IGdsYW5kcw== 81275
+IFlFTExPVw== 81276
+IFRoaWNr 81277
+QW10 81278
+IHByeQ== 81279
+IGx1aw== 81280
+KHByb2JsZW0= 81281
+IHByb2plY3Rpbmc= 81282
+W25vdw== 81283
+IGVzdG95 81284
+KCgpPT4= 81285
+IHdheXBvaW50cw== 81286
+IEJsaWNr 81287
+LlJlcXVpcmU= 81288
+TGFrZQ== 81289
+IElHTk9SRQ== 81290
+IFFIQm94TGF5b3V0 81291
+X3Jlc3BvbnNlcw== 81292
+Lndy 81293
+JmFjdGlvbg== 81294
+LmNoYXJhY3RlcnM= 81295
+SVc= 81296
+cGFnZU51bQ== 81297
+IGRpc3RyYWN0aW5n 81298
+XS0n 81299
+cGVlcw== 81300
+b3VuY3k= 81301
+IHNlZ3U= 81302
+LmdldFNlbGVjdGlvbk1vZGVs 81303
+SW5saW5pbmc= 81304
+J2FmZg== 81305
+IFByZXNlcnZl 81306
+IGFjcXVhaW50YW5jZQ== 81307
+IGFudXM= 81308
+aW5zdGl0dXRpb24= 81309
+IC8vKg== 81310
+IFNpY2s= 81311
+IEtvZGk= 81312
+IEFWUg== 81313
+IGJldHI= 81314
+IEJlcm5zdGVpbg== 81315
+LGN2 81316
+Y2Ni 81317
+Q0FG 81318
+CXNpZ25hbA== 81319
+6KiI 81320
+UmVzdWx0c0NvbnRyb2xsZXI= 81321
+IHNhbG9wZXM= 81322
+IHBoZW5vdHlwZQ== 81323
+dWJhaA== 81324
+X2RhdGFzZXRz 81325
+IGdyYWNpb3Vz 81326
+IENsaXBib2FyZA== 81327
+IGdlbmRlcnM= 81328
+ZG93bmxvYWRz 81329
+RXhwZXJpbWVudGFs 81330
+IGJla2FubnQ= 81331
+IG5pdmU= 81332
+LkVk 81333
+ZGlzbWlzcw== 81334
+XFR3aWc= 81335
+LkF2 81336
+L3Rhc2tz 81337
+LnBpY2tsZQ== 81338
+KkI= 81339
+Y2VzdG9y 81340
+Y2FwaXRhbGl6ZQ== 81341
+LkdldFNlcnZpY2U= 81342
+S2V5SWQ= 81343
+LnBpdGNo 81344
+IENvbnRyb2xsZWQ= 81345
+LnNhdmVk 81346
+IHphag== 81347
+IENhdGh5 81348
+KENhbmNlbGxhdGlvblRva2Vu 81349
+LWFuaW1hdGU= 81350
+XFxc 81351
+IEphc21pbmU= 81352
+LkxJTkU= 81353
+IGJvdGhlcnM= 81354
+IGJ1ZmZhbG8= 81355
+IEZPUkVJR04= 81356
+IHRhY2tsZWQ= 81357
+X0hFQVA= 81358
+IHNlcnZpYw== 81359
+Pj4s 81360
+IEFjdG9ycw== 81361
+LlR4 81362
+ZWJ4 81363
+X3Zpc2l0b3I= 81364
+X21hcnNoYWxlZA== 81365
+LG1hcA== 81366
+IGhlYXRlcnM= 81367
+IHVMb2NhbA== 81368
+IEthcG9vcg== 81369
+IG1pbnV0 81370
+LnJlYWRBcw== 81371
+IC4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u 81372
+X1ZPTFQ= 81373
+LmJ6 81374
+IGNvcnJlY3Rpbmc= 81375
+U0VQ 81376
+YnJpbmc= 81377
+SHU= 81378
+IEd1cw== 81379
+QUFE 81380
+aWVyYW4= 81381
+ZnJhcmVk 81382
+X3JvbQ== 81383
+IHNjYXJjaXR5 81384
+IGFwb2xvZ2lzZQ== 81385
+IHNvbGlkcw== 81386
+IEZvcm1hdHRlcg== 81387
+ICclJA== 81388
+LXZpcw== 81389
+IiwiIiw= 81390
+VU5ERVI= 81391
+ISEhIQoK 81392
+IEVsZXZlbg== 81393
+KSld 81394
+IHNhdGlyZQ== 81395
+XHVC 81396
+IHNldmVudGVlbg== 81397
+TEFOR1VBR0U= 81398
+IGFkdmVyc2FyeQ== 81399
+IHN0cmZ0aW1l 81400
+IG5leHVz 81401
+dWJpdHM= 81402
+ICclIg== 81403
+IFNLSVA= 81404
+S0hS 81405
+LmJhdA== 81406
+IEplYW5z 81407
+Lj8= 81408
+IGltcG9zdA== 81409
+LnF0eQ== 81410
+Q29tcHJlc3Npb24= 81411
+IHByaW5jaXBhbGVz 81412
+b25pbw== 81413
+IGJhcmNlbG9uYQ== 81414
+IENoaWxp 81415
+X21vc3Q= 81416
+LnVm 81417
+IGNvbnRlbnRWYWx1ZXM= 81418
+IEZpc3Q= 81419
+dWdhZG9y 81420
+VGV4dFdyaXRlcg== 81421
+QkFDS0dST1VORA== 81422
+IGxpdnJv 81423
+IERlc2lyZQ== 81424
+bWVhc3VyZW1lbnQ= 81425
+UHJvYmU= 81426
+IHB1ZGRpbmc= 81427
+LnNob3dFcnJvcg== 81428
+IHVudGVyc3TDvHQ= 81429
+44CB44CB 81430
+IMSHZQ== 81431
+IHB1bml0aXZl 81432
+5q2i 81433
+TGlzdEdyb3Vw 81434
+LkFyZWE= 81435
+IPCfmIkKCg== 81436
+b29yZA== 81437
+IHNjcmFwaW5n 81438
+KHRpY2tldA== 81439
+IFdvY2hl 81440
+IGV4cGVjdGVkUmVzdWx0 81441
+IEtvc3Rlbmxvcw== 81442
+Y29uZmlndXJlZA== 81443
+X3N0cmVycm9y 81444
+LmFkZEhhbmRsZXI= 81445
+bW91c2VsZWF2ZQ== 81446
+IEZlbGlwZQ== 81447
+IENoaW0= 81448
+X0NTUg== 81449
+UENB 81450
+aWZpY2HDp8Ojbw== 81451
+KysKCg== 81452
+eWFz 81453
+IOaWueazlQ== 81454
+IElETQ== 81455
+IGFuaW1hdGVXaXRoRHVyYXRpb24= 81456
+IHNhbWVu 81457
+LnN1YnRpdGxl 81458
+X0tleURvd24= 81459
+IFRyZXk= 81460
+IHRlbXBvcmFkYQ== 81461
+IHNwZA== 81462
+IFJj 81463
+IE1hc3NpdmU= 81464
+IGJvd3M= 81465
+SG9zcGl0YWw= 81466
+IGdyb290 81467
+IHBhdmluZw== 81468
+IGNob3Jlcw== 81469
+IEFsbHk= 81470
+IGNlcnRpZmljYXRpb25z 81471
+IHhib3g= 81472
+c2VsZWN0QWxs 81473
+R2FtZU92ZXI= 81474
+IGNvcm5lcnN0b25l 81475
+UmVjb3ZlcmVk 81476
+IGRlZW0= 81477
+VWx0cmE= 81478
+IGdldExhc3Q= 81479
+IGFsbWE= 81480
+LnRleHRGaWVsZA== 81481
+IHdhaXZlZA== 81482
+Pih7Cg== 81483
+IEVzdHI= 81484
+aXNhYmxl 81485
+IHByb3Rvbg== 81486
+X2ZhY2Vib29r 81487
+X1RSQUlO 81488
+IGNvb3BlcmF0aW5n 81489
+dW5naQ== 81490
+QXJpem9uYQ== 81491
+I2VjaG8= 81492
+LWV4cHJlc3Npb24= 81493
+Lm1pbnV0ZXM= 81494
+IHByZWZpeGVk 81495
+IGZpc2hlcmllcw== 81496
+LmNvcnJlY3Q= 81497
+IG7Dpg== 81498
+KFNwcml0ZQ== 81499
+TW9kcw== 81500
+IFZpZGU= 81501
+IGdldEJ5SWQ= 81502
+IEtleW5lcw== 81503
+IEVneXB0aWFucw== 81504
+X0NPRA== 81505
+Qmllbg== 81506
+cmVvcGVu 81507
+aWdoZXQ= 81508
+UkVERU5USUFM 81509
+IHVud2luZA== 81510
+JA0K 81511
+IHJhY2tldA== 81512
+IGZsb2F0VmFsdWU= 81513
+IFNwZWNpYWx0eQ== 81514
+b2NhdGU= 81515
+bW91bnRlZA== 81516
+QXR0ZW1wdHM= 81517
+T2ZmaWNlcnM= 81518
+SGFzaFRhYmxl 81519
+IGTDqXZlbG9wcGVtZW50 81520
+IGRhcA== 81521
+IG10eA== 81522
+TmFycmF0ZWQ= 81523
+a0I= 81524
+X1NUQQ== 81525
+LUNsYXNz 81526
+IGR1bA== 81527
+IExlYWRz 81528
+IHRyw6pz 81529
+ZnJpZW5kbHk= 81530
+IEZpbHRlcmluZw== 81531
+LXByb3ZpZGVy 81532
+INGD0YHQvw== 81533
+IEtvbGthdGE= 81534
+bWFza2Vk 81535
+SURhdGE= 81536
+IFt8 81537
+wqQ= 81538
+IFJlZXNl 81539
+IEhvbm9sdWx1 81540
+VG9PYmplY3Q= 81541
+IHRocmlmdA== 81542
+YXNzaQ== 81543
+IGNvbmdyYXR1bGF0aW9ucw== 81544
+U0tJ 81545
+ZW50YXJpb3M= 81546
+IEZST05U 81547
+dWZpZw== 81548
+aG9u 81549
+CWdldGxpbmU= 81550
+IGhlYXJ0eQ== 81551
+Y2FsaW5n 81552
+IMOpY29ub20= 81553
+ICoqKi8K 81554
+X0hFUkU= 81555
+YCg= 81556
+TWljaGlnYW4= 81557
+QmVhbnM= 81558
+LXJvdXRl 81559
+IHByaW5j 81560
+IEd1aWRhbmNl 81561
+CWVtaXQ= 81562
+Lk9Q 81563
+dGhpYw== 81564
+ZWxvcGU= 81565
+IElSZXF1ZXN0 81566
+IGhhbmRsZUNsb3Nl 81567
+ZGF0YUFycmF5 81568
+LkV4ZWN1dGVTY2FsYXI= 81569
+RVBISVI= 81570
+IENvbnZlcnNlbHk= 81571
+KEZvbnQ= 81572
+IG1ldHJl 81573
+IFNwaWVsZXI= 81574
+RWxsaXBzZQ== 81575
+IFBWT0lE 81576
+IERhdGFDb250ZXh0 81577
+Y29uc3RydWN0ZWQ= 81578
+QU5ESU5H 81579
+LS0tLS0tLS0tLS0qLwo= 81580
+Qm9uam91cg== 81581
+X1BIUA== 81582
+cHJvZ3Jlc3NiYXI= 81583
+Tm90U3VwcG9ydGVkRXhjZXB0aW9u 81584
+IHZlcmRhZGU= 81585
+L2NoYW5nZQ== 81586
+b3Jzaw== 81587
+IGFyb21hdGlj 81588
+cmVzcG9ucw== 81589
+cmVhbGxvYw== 81590
+YXRpc2No 81591
+LGV2 81592
+IFNpb3V4 81593
+dGVh 81594
+IFBvZQ== 81595
+5LmI 81596
+X2Ntb3M= 81597
+IGFsYg== 81598
+KGxy 81599
+IEFwcGFyZWw= 81600
+IGRlbGxv 81601
+INGC0L7Rhw== 81602
+IHN0cmVhbWxpbmU= 81603
+d2NoYXI= 81604
+QWRvYmU= 81605
+LG1vZHVsZQ== 81606
+IHVuaW5zdXJlZA== 81607
+fSIpDQo= 81608
+KCIvLypbQA== 81609
+LXBoYXNl 81610
+IGZldQ== 81611
+X3RB 81612
+em9law== 81613
+IGZvbGxpYw== 81614
+IHR1Zw== 81615
+IGJlZmluZA== 81616
+IHRhbGxlc3Q= 81617
+KG10 81618
+aWVkeQ== 81619
+X0xlbmd0aA== 81620
+IHN0YXVuY2g= 81621
+IHJlbW92ZU9iamVjdA== 81622
+IGZsYWtlcw== 81623
+Z3Jlc3Fs 81624
+IGlua2w= 81625
+IFNDU0k= 81626
+IEtlZXBlcg== 81627
+O2w= 81628
+IEhpbmR1cw== 81629
+X1BFRA== 81630
+X0NPTkQ= 81631
+IExhdW5kcnk= 81632
+KytdPQ== 81633
+X0FVWA== 81634
+IGJ5xYI= 81635
+IGF1bWVudG8= 81636
+bWFyZ2luTGVmdA== 81637
+ZXF1YWxpdHk= 81638
+IEx1eg== 81639
+IEVjaw== 81640
+X21hcw== 81641
+X2xlbnM= 81642
+IHN0ZXJpbGU= 81643
+Y2xpZW50ZXM= 81644
+J30pCgo= 81645
+IGdvb2R3aWxs 81646
+IEVsbGlzb24= 81647
+U3BhY2VJdGVt 81648
+IHNob3dNZXNzYWdl 81649
+66Gc6re4 81650
+IGNvbnRyYXRv 81651
+UG9zdGluZw== 81652
+LmludGVycG9sYXRl 81653
+KGZpbGw= 81654
+IGJ1bGxwZW4= 81655
+LmdlbmVy 81656
+IGh1ZXM= 81657
+IG1lbW9yYW5kdW0= 81658
+dG9Qcm9taXNl 81659
+IEJ5eg== 81660
+KHB4 81661
+KFByb2dyYW0= 81662
+UkVTU0lPTg== 81663
+YmZk 81664
+IHBsYW50YQ== 81665
+Lm1vdXNlUG9zaXRpb24= 81666
+IFNwYW0= 81667
+6LSn 81668
+dGVsZWdyYW0= 81669
+YWd5 81670
+IGdlZnVuZGVu 81671
+LkRvbQ== 81672
+IGxpbmVtYW4= 81673
+LmJ0bkRlbGV0ZQ== 81674
+IHNlbGVjdGl2ZWx5 81675
+65Og 81676
+SUZT 81677
+IEdldEhhc2hDb2Rl 81678
+IHJldGly 81679
+IHJlcXVpc2l0ZQ== 81680
+QlRUYWc= 81681
+cGxpYg== 81682
+IGZpcmVmb3g= 81683
+LnRyYWRl 81684
+ICMk 81685
+LmNvbXByZXNz 81686
+IGxhZGVu 81687
+IERpcmVjdG9yeUluZm8= 81688
+IE1vZGVz 81689
+IGtvbmU= 81690
+IGRpdnVs 81691
+CWhz 81692
+Y3JvZnQ= 81693
+IFdIWQ== 81694
+eENF 81695
+L0dyaWQ= 81696
+X0FVRA== 81697
+IFNjcmU= 81698
+IGVycm9yVGhyb3du 81699
+U2FkbHk= 81700
+YXRpdGlz 81701
+IG5lZ2xpZ2libGU= 81702
+LlJlZ2lzdGVyVHlwZQ== 81703
+IE1vaXN0 81704
+5rWL6K+V 81705
+IEJNQw== 81706
+bGVhZmxldA== 81707
+eW5l 81708
+cm9rZW4= 81709
+IHZpbmM= 81710
+dHR5 81711
+IGJldXJldHRl 81712
+IEFscGluZQ== 81713
+IE1jTQ== 81714
+U3BvaWxlcg== 81715
+ZGlzdHJpYnV0aW9u 81716
+LXJheXM= 81717
+IOuwlA== 81718
+X3BhcmVudHM= 81719
+IGNyYXRlcw== 81720
+IGNvbW11dGVycw== 81721
+IEFyZ2VudGluZQ== 81722
+77u/LyoK 81723
+L2ZyYW1ld29yaw== 81724
+IGNoYW5uZWxJZA== 81725
+Z3JlZW5z 81726
+LnNldFN0eWxlU2hlZXQ= 81727
+IGluYWNjZXNzaWJsZQ== 81728
+aXRhdGVz 81729
+IHdhcm1lZA== 81730
+RmFicmlj 81731
+Z2V0YXR0cg== 81732
+ZGlzcGxheVRleHQ= 81733
+X01PTklUT1I= 81734
+IHNpZGV3YWxrcw== 81735
+SW50aWFsaXplZA== 81736
+IGtvbWVu 81737
+IGRpc2NyaW1pbmF0b3I= 81738
+IE5hdmlnYXRl 81739
+KERpcmVjdGlvbg== 81740
+IFNwaXQ= 81741
+X2FkZGl0aW9uYWw= 81742
+IGh0b24= 81743
+IGVzcGVyYQ== 81744
+IGRlbHZl 81745
+IGNvbXBhcnRpcg== 81746
+IHByZWVtcHQ= 81747
+cHJvY2Vzc29ycw== 81748
+LWdpdA== 81749
+YmVlbg== 81750
+LlNVQg== 81751
+IFJlZXZlcw== 81752
+L2dlbg== 81753
+O3RvcA== 81754
+CU1QSQ== 81755
+Wlc= 81756
+R0VTVA== 81757
+YWJpbGly 81758
+IHByb2dyZXNzaXZlcw== 81759
+aGFmdA== 81760
+QXVm 81761
+IEFjdGlvblR5cGU= 81762
+bGVv 81763
+IHV0YW4= 81764
+SW5pY2lhbA== 81765
+PlVzZXI= 81766
+IH0pOwoKCgo= 81767
+INio2Yc= 81768
+IENoYWlucw== 81769
+aXNzcGFjZQ== 81770
+L3JlbQ== 81771
+U1FMaXRl 81772
+IGNlYXNlZmlyZQ== 81773
+JGFy 81774
+VFJT 81775
+Oi8vew== 81776
+IFNwaXJpdHM= 81777
+2Lo= 81778
+KFNpemU= 81779
+IG51Zw== 81780
+IE9sc2Vu 81781
+IGNobG9yaWRl 81782
+IERpc3BsYXlOYW1l 81783
+IFBlcnQ= 81784
+IGdldE1heA== 81785
+IEVkaXRvcnM= 81786
+IFBhaXM= 81787
+YXNtdXM= 81788
+VmFj 81789
+IFRhYmxlTmFtZQ== 81790
+IG51YW5jZWQ= 81791
+Rm9yTWVtYmVy 81792
+IHNsZWVweQ== 81793
+YWR2aXNvcg== 81794
+IHN0YWxraW5n 81795
+Lm1lZGlhbg== 81796
+X0F0dA== 81797
+IGdldE5vZGU= 81798
+IEZhbmN5 81799
+5pWw6YeP 81800
+LkF0dHJpYnV0ZVNldA== 81801
+KGluc3RydWN0aW9u 81802
+eEJE 81803
+IGtvcA== 81804
+QWZmZWN0ZWQ= 81805
+L25hdmJhcg== 81806
+IGFpbG1lbnRz 81807
+IFJhbWFkYW4= 81808
+IEFjY2VudA== 81809
+IFBhcmFtb3VudA== 81810
+IEdBTQ== 81811
+5L2N572u 81812
+PSov 81813
+LklOUFVU 81814
+PFByb2plY3Q= 81815
+TGVhc3Q= 81816
+IEdlbm9tZQ== 81817
+QWNjZXNzb3JUeXBl 81818
+bGVmdHJpZ2h0YXJyb3c= 81819
+dmVudGluZw== 81820
+L3BheW1lbnQ= 81821
+X1B0cg== 81822
+IHRhbWU= 81823
+IE1FTUJFUg== 81824
+IEJpdGNvaW5z 81825
+LmVwYW0= 81826
+LlBsZWFzZQ== 81827
+IHNjaHdhcg== 81828
+Q3BwTWV0aG9kSW50aWFsaXplZA== 81829
+IHVuaWNvcm4= 81830
+IGJlZGV1dA== 81831
+X0hT 81832
+IGF1dG9nZW5lcmF0ZWQ= 81833
+IExpbGx5 81834
+IEFzc2Vzcw== 81835
+IEhlaWRp 81836
+LnNvdXJjZXM= 81837
+LnRlbGw= 81838
+YXJnaW5z 81839
+KCInIiw= 81840
+0LvQvtC2 81841
+IEVyb3RpYw== 81842
+IGp1c3Rv 81843
+IGVzYWM= 81844
+Y29tYQ== 81845
+IENvbG9ueQ== 81846
+IHBjdA== 81847
+CWVu 81848
+IGVtcGV6 81849
+IERlbGV0aW5n 81850
+TkVM 81851
+IGVuYW0= 81852
+UHJlc3NFdmVudA== 81853
+IFJlc29sdmVy 81854
+IFJURQ== 81855
+Rng= 81856
+IEluY29ycmVjdA== 81857
+IHlj 81858
+X3JlYWRpbmc= 81859
+O2Jhc2U= 81860
+IGhhc2h0YWdz 81861
+IE1hcmluZXJz 81862
+LlNldEZsb2F0 81863
+IHJlYXNzdXJpbmc= 81864
+aXJzY2g= 81865
+KHVzZXJpZA== 81866
+ID09PT0= 81867
+XSkpKTsK 81868
+a2Y= 81869
+IHRpbGVk 81870
+ZWd1YXJk 81871
+Q2xpZW50ZXM= 81872
+5pmC6ZaT 81873
+ZHNs 81874
+UmlnaHRz 81875
+IFBzYWxt 81876
+ZHVyaW5n 81877
+Q2xlYXJDb2xvcg== 81878
+dXN0YQ== 81879
+PENvbW1lbnQ= 81880
+IG5venpsZQ== 81881
+IFBMQUNF 81882
+L2hpc3Rvcnk= 81883
+aWh1 81884
+aVZhcg== 81885
+IGdlcm0= 81886
+IHRyaW1taW5n 81887
+IEh1bnRlcnM= 81888
+IFJTVlA= 81889
+SW50ZXJlc3RpbmdseQ== 81890
+amlhbg== 81891
+KSl7Cgo= 81892
+LkV4cGVjdA== 81893
+IFRvaWxldA== 81894
+IHdhbGxwYXBlcnM= 81895
+LldlYlNlcnZsZXQ= 81896
+YXJwYQ== 81897
+L21haW53aW5kb3c= 81898
+aHE= 81899
+IHV5 81900
+IGluZGlnbg== 81901
+Q2hlY2tlZENoYW5nZUxpc3RlbmVy 81902
+IGNhbGxlcnM= 81903
+IE1vdXNlRXZlbnRBcmdz 81904
+IEpTY3JvbGxQYW5l 81905
+IHfFgmE= 81906
+cmVwb3NpdG9yaWVz 81907
+IMWbdw== 81908
+IHJlZmVyZW5jaWE= 81909
+IGlvdGE= 81910
+IGNhcmdhcg== 81911
+X29ic2VydmVy 81912
+SENJ 81913
+c2lsdmVy 81914
+IGRldmFzdGF0aW9u 81915
+LXNlbWlib2xk 81916
+IEV4cGxhaW4= 81917
+IEJsb2NrbHk= 81918
+Llhy 81919
+ZXN0dXJlUmVjb2duaXplcg== 81920
+Q2FuY2VsQnV0dG9u 81921
+IExvY2tl 81922
+VHJpYWw= 81923
+X1BMQUNF 81924
+anVhbGFu 81925
+IFJ1Ymlu 81926
+U3RyaXBl 81927
+IG1ldGFEYXRh 81928
+Y29uZmlkZW5jZQ== 81929
+X2JhdHRlcnk= 81930
+IGlzbA== 81931
+IGJvYQ== 81932
+LnRhcmdldHM= 81933
+bGlqa2U= 81934
+IGFkb2xlc2NlbnRl 81935
+YmV3 81936
+LEZhbHNl 81937
+IHlPZmZzZXQ= 81938
+UHJldmlvdXNseQ== 81939
+PXBhdGg= 81940
+X0FB 81941
+iOadgw== 81942
+IGJha2VrYQ== 81943
+IGxlZQ== 81944
+IEJsb2NraW5n 81945
+L3RpdGxl 81946
+IOW8gA== 81947
+IFN0ZXZlbnNvbg== 81948
+KW9iamVjdA== 81949
+aXN0cm9z 81950
+LmdldFNlcnZlcg== 81951
+IHBsYW50YXRpb24= 81952
+X0JveA== 81953
+ICc7Jw== 81954
+dGljYQ== 81955
+KSldOwo= 81956
+IGRpc3Bhcml0aWVz 81957
+xrDhu5s= 81958
+aWNyb2JpYWw= 81959
+IHNwYXM= 81960
+L0RE 81961
+KHBvaW50ZXI= 81962
+IG1pZHBvaW50 81963
+LmdldENsYXNzTmFtZQ== 81964
+IFRvdGFsbHk= 81965
+IGNvbmdlbg== 81966
+IHTDqnRl 81967
+LnhsaW0= 81968
+Q09NUExFVEU= 81969
+KGZp 81970
+b3dhcmQ= 81971
+0LzRjw== 81972
+LmFzYw== 81973
+IHBhZ2luYXRl 81974
+IGx1cmtpbmc= 81975
+LnNpZ251cA== 81976
+U1RZTEU= 81977
+IHdvcnNo 81978
+aHY= 81979
+IGRlZmVuc2l2ZWx5 81980
+IEx1dGhlcmFu 81981
+LmZ1bg== 81982
+INC40L3RhNC+0YDQvA== 81983
+cHNj 81984
+IGFkbW9u 81985
+IEVzdGltYXRlZA== 81986
+IE15U3FsQ29ubmVjdGlvbg== 81987
+LnN0YXR1c1N0cmlw 81988
+IGFudGlnZW4= 81989
+IGhlcnJhbWllbnQ= 81990
+IENvbnN1bWVycw== 81991
+IFlU 81992
+Lm1hc2tzVG9Cb3VuZHM= 81993
+Lnh0aWNrcw== 81994
+OnJlcXVlc3Q= 81995
+IE1vbw== 81996
+LWF1 81997
+IHRvUmV0dXJu 81998
+IFNhcHBoaXJl 81999
+Y294 82000
+ZXhhbXBsZUlucHV0RW1haWw= 82001
+IGNvcmF6 82002
+KHBpZWNl 82003
+IHJlY29uc3RydWN0ZWQ= 82004
+X3NpZ251cA== 82005
+J10pPw== 82006
+QmlsbGluZw== 82007
+IENyb3dsZXk= 82008
+c3Rvcm1z 82009
+Zm9yY2Vy 82010
+IHN1cHJlbWFjaXN0 82011
+X3doZWVs 82012
+CXBj 82013
+LmdldERvY3VtZW50 82014
+LnVuc3F1ZWV6ZQ== 82015
+LmdyYWRl 82016
+ZWxsdW5n 82017
+LnNob3BwaW5n 82018
+Y3VzdG9tZXJJZA== 82019
+IG1lZGlkYXM= 82020
+IE1vbWVudHM= 82021
+ZW51b3Vz 82022
+SUZJQ0FURQ== 82023
+IyMjIyMjIwo= 82024
+5paH56ug 82025
+4buNYw== 82026
+b3Jtc2c= 82027
+YWxvbQ== 82028
+LXRyYWRl 82029
+CWJ0 82030
+L3N0dWRlbnQ= 82031
+YnJpZw== 82032
+YW5uZXNz 82033
+KHJh 82034
+IHJpY2VyY2E= 82035
+U3BlYWtlcg== 82036
+csOz 82037
+Z3Rlc3Q= 82038
+R2x5cGg= 82039
+w7xnZW4= 82040
+QEpzb24= 82041
+KHN1bW1hcnk= 82042
+S29t 82043
+YmV0aA== 82044
+L2VuZ2luZQ== 82045
+Q2xpbWF0ZQ== 82046
+c3VibWl0QnV0dG9u 82047
+ZXZl 82048
+ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09Cg== 82049
+cGVkaWE= 82050
+IHVzZXJuYW1lcw== 82051
+IEpN 82052
+IG1zZQ== 82053
+aW5zcGVjdA== 82054
+IFNuYXBkcmFnb24= 82055
+IGRlZmVuc2VtYW4= 82056
+IFVJVGFibGVWaWV3RGVsZWdhdGU= 82057
+aW5kaG92ZW4= 82058
+IEJveWxl 82059
+IEFsdGE= 82060
+YXJkdQ== 82061
+IHdyZXN0bGVy 82062
+IFN0cmFpdA== 82063
+IGVncmVn 82064
+X2Jhc2VsaW5l 82065
+RW52aXJvbm1lbnRhbA== 82066
+IGludml0 82067
+IEJUUw== 82068
+IElTSUw= 82069
+IGNvb3A= 82070
+aG9yZXM= 82071
+I0A= 82072
+IGNvbXBlbA== 82073
+KHNraXA= 82074
+6Ziz 82075
+X0RFUFJFQ0FURUQ= 82076
+aXBoZXJz 82077
+ZG91YmxlVmFsdWU= 82078
+IEFSUg== 82079
+LlNjb3Jl 82080
+IGNocm9tb3NvbWVz 82081
+Y2xhdXNl 82082
+IEx1aWdp 82083
+IHN1bnNjcmVlbg== 82084
+IGN5dG9r 82085
+LnRvSlNPTlN0cmluZw== 82086
+IHByb3ByZQ== 82087
+cG9vbnM= 82088
+bWl0dGVycw== 82089
+IGtpdHRlbnM= 82090
+IGNhdGhvbGlj 82091
+Lmx0 82092
+wqw= 82093
+X3F1aWNr 82094
+IHZyYWk= 82095
+IElSZWFkT25seQ== 82096
+IEhpZ2dpbnM= 82097
+IHNob3ZlZA== 82098
+IGxpYWlzb24= 82099
+X293bg== 82100
+IG1vc3F1aXRvZXM= 82101
+X25n 82102
+LlNldEtleU5hbWU= 82103
+X1JlbmRlcmVy 82104
+X09zYw== 82105
+LnVucmVnaXN0ZXI= 82106
+TWVzc2FnZVR5cGU= 82107
+LWZvdW5kZWQ= 82108
+IHNvdXRoZWFzdGVybg== 82109
+IGhhc2h0YWJsZQ== 82110
+LmluZGVudA== 82111
+IGpveWZ1bA== 82112
+X3NleA== 82113
+c2Fk 82114
+LmRlYmlhbg== 82115
+X2dhcw== 82116
+IHBlcmlzaA== 82117
+IGhldGU= 82118
+X3NpbmdsZXRvbg== 82119
+KGdyYWQ= 82120
+IGt0w7NyYQ== 82121
+IGR3aW5k 82122
+aXR0YWw= 82123
+U2VlaW5n 82124
+IFJvb2tpZQ== 82125
+CUxhYmVs 82126
+c2hhbg== 82127
+PDw8PDw8PDw= 82128
+IHLDqA== 82129
+aWVzZWw= 82130
+YXJyZXJh 82131
+Y2hyaXN0 82132
+IGN1cnZhdHVyZQ== 82133
+IGVwaGVt 82134
+Rm9ybWF0dGluZw== 82135
+LmRpY3Rpb25hcnk= 82136
+LlNldHRlcg== 82137
+IEhpc3RvZ3JhbQ== 82138
+IFN0dXR0Z2FydA== 82139
+IHBhY2luZw== 82140
+dXRhdGlvbnM= 82141
+IE5TSw== 82142
+IFBhbWVsYQ== 82143
+IEJhaWw= 82144
+IHBvbGFyaXphdGlvbg== 82145
+IEfDtg== 82146
+IEVsYWluZQ== 82147
+IGtpY2tvZmY= 82148
+IGNoYXBlbA== 82149
+PXBvc3Q= 82150
+IG1pZHdheQ== 82151
+ZXdpcw== 82152
+X01S 82153
+aWVlZQ== 82154
+LXRlc3Rpbmc= 82155
+bWV6 82156
+Pi0t 82157
+IGRvY3RyaW5lcw== 82158
+IG1pbGlldQ== 82159
+IFJBRElP 82160
+dGFrZW4= 82161
+UmVzcG9ucw== 82162
+IGhhbmRzZXQ= 82163
+IGNvbnRybw== 82164
+IEFwcGxpZXM= 82165
+6Zif 82166
+LkJpbmRpbmdTb3VyY2U= 82167
+INis 82168
+IGh1bWlsaQ== 82169
+IE1lbGFuaWE= 82170
+T3ZlcmxhcA== 82171
+KFBhcmNlbA== 82172
+IHdhcmVob3VzZXM= 82173
+LkdldEJ5SWQ= 82174
+IGZyYW5rZnVydA== 82175
+IFdpdHQ= 82176
+LnByb2o= 82177
+IFNhc2hh 82178
+IFJldmVy 82179
+IGFydGljdWxhdGVk 82180
+YW5jaGVz 82181
+IFNlbWluYXI= 82182
+IERhZ2dlcg== 82183
+IEFnaWxl 82184
+T1dM 82185
+IEJz 82186
+b2tseW4= 82187
+RXRh 82188
+IGFnb3N0bw== 82189
+7ZWY7Jes 82190
+IG9wdGFyZw== 82191
+CW9uQ2hhbmdl 82192
+IFJPQUQ= 82193
+R0JL 82194
+IGVudGZlcg== 82195
+LkF1dG9Db21wbGV0ZQ== 82196
+IGhlbGZlbg== 82197
+Q2hlYXA= 82198
+IGFwcHJlbnRpY2U= 82199
+aW90aWNz 82200
+5oqA 82201
+T2ZZZWFy 82202
+aW5kZXJlZA== 82203
+Lk1TRw== 82204
+IE1hcsOtYQ== 82205
+KGlucGxhY2U= 82206
+IGZpbmRl 82207
+KERF 82208
+LlNlcmlhbGl6ZXI= 82209
+JHRpbWU= 82210
+dW5uYWJsZQ== 82211
+TWFpblRocmVhZA== 82212
+ZGVwbG95bWVudA== 82213
+IG1wZnI= 82214
+cmljaFRleHRQYW5lbA== 82215
+KTsKCgoKCg== 82216
+IGRhbnljaA== 82217
+X0JFRk9SRQ== 82218
+X2FyeQ== 82219
+IEJhdW0= 82220
+IHR1cmJ1bGVudA== 82221
+IE11bHRpbWVkaWE= 82222
+IHBoeXNpY2lzdA== 82223
+5Zy6 82224
+QW5pbWF0ZQ== 82225
+PUY= 82226
+UGFnbw== 82227
+L3R3aXR0ZXI= 82228
+b3R0aWU= 82229
+dWN1cnNhbA== 82230
+X3BhZ2luYXRpb24= 82231
+LmFyY2hpdmU= 82232
+LWRvY3VtZW50 82233
+aW5pbmU= 82234
+U2VsbGVy 82235
+YWRyZXNz 82236
+6ZO+5o6l 82237
+0LDRgtC10LPQvtGA 82238
+X2ZybQ== 82239
+bm9EQg== 82240
+aWdhdGVk 82241
+IE9zYW1h 82242
+cGV0dG8= 82243
+Pnk= 82244
+LVVu 82245
+IGNvcHBpYQ== 82246
+QWxtb3N0RXF1YWw= 82247
+LmxleA== 82248
+IGxldmVsZWQ= 82249
+IFNDSVA= 82250
+X0hPT0s= 82251
+SUxvZ2dlcg== 82252
+bmVhdQ== 82253
+77ye 82254
+24zZhg== 82255
+aWtoYWls 82256
+IHVwbG9hZGVy 82257
+IENhcm9seW4= 82258
+LmFkZFZhbHVl 82259
+dGhpbmtpbmc= 82260
+cHJpbnRTdGF0cw== 82261
+IGNhbWJpb3M= 82262
+cG9p 82263
+IEJFRA== 82264
+IHhibWM= 82265
+Lu+/vQ== 82266
+IHNhcmNhc3Q= 82267
+IE5FQw== 82268
+JGJvZHk= 82269
+QWxsV2luZG93cw== 82270
+IHlvdW5nc3Rlcg== 82271
+IHVuZWFzeQ== 82272
+KEFU 82273
+IG5vc3RhbGdpYw== 82274
+UFJJQ0U= 82275
+IFNlaXRlbg== 82276
+IG1ha2E= 82277
+IGxpbXA= 82278
+IGNvbnRyYXN0cw== 82279
+Q29mZmVl 82280
+CWdlbg== 82281
+IHBlcm1z 82282
+IE5lZWRsZXNz 82283
+b3V2ZQ== 82284
+YXJjaGluZw== 82285
+X3BlbmFsdHk= 82286
+cm93YWQ= 82287
+b25nYW4= 82288
+X2R1cg== 82289
+IGlmbmRlZg== 82290
+aWF1eA== 82291
+IGNhcGFjaWRhZA== 82292
+IE5vcnRl 82293
+IC0qLQ0K 82294
+aWZlcw== 82295
+IE1hbnNpb24= 82296
+I1JlZ2lvbg== 82297
+Q2FuY2VsbGF0aW9u 82298
+IG5lYXJpbmc= 82299
+IGxhbmd1 82300
+ZXJlcXVpc2l0ZXM= 82301
+X2V4cGVyaW1lbnQ= 82302
+b25kaGVpbQ== 82303
+XSwm 82304
+IENvb2xpbmc= 82305
+IHNhZmFyaQ== 82306
+IHBpb25lZXJz 82307
+IGZhcm1ob3VzZQ== 82308
+IGRpc3RhbmNpYQ== 82309
+IGRlc2VydGVk 82310
+IE5hcnJvdw== 82311
+LnNn 82312
+IGVudHJhcg== 82313
+LnJh 82314
+IHJlZnVyYmlzaGVk 82315
+IGludGVyY29ubmVjdGVk 82316
+IHN1cnZpdmVz 82317
+IHF1YWxpZmllcnM= 82318
+X0NIQVJT 82319
+LWFqYXg= 82320
+IFJvcnk= 82321
+IGtvbGVq 82322
+L0dM 82323
+X2xlZ2Fs 82324
+IFRZUEVT 82325
+IFZvaWNlcw== 82326
+IEZlcmQ= 82327
+dWplbXk= 82328
+IHNjb3JlYm9hcmQ= 82329
+IEJPVA== 82330
+eERE 82331
+IEl2YW5rYQ== 82332
+IGhzdg== 82333
+bm9kaXNjYXJk 82334
+IFRIRVNF 82335
+bW9qb20= 82336
+IHRpY2tpbmc= 82337
+cGVx 82338
+IOa3u+WKoA== 82339
+IE5pY29s 82340
+CWFuZ2xl 82341
+X2FsbG9jYXRlZA== 82342
+IHN0cnV0 82343
+eERC 82344
+RXZhbHVhdGU= 82345
+IFZBUklBTlQ= 82346
+IHJlZmVyZW5jZWRDb2x1bW5OYW1l 82347
+bG9o 82348
+IFJlcXVlc3RPcHRpb25z 82349
+IGNvY28= 82350
+IGJsZWFjaA== 82351
+X29yZ2FuaXphdGlvbg== 82352
+IENITw== 82353
+SFRUUFM= 82354
+X2JhcnJpZXI= 82355
+LnZpc2l0TWV0aG9kSW5zbg== 82356
+IHZpdGU= 82357
+IC0k 82358
+W2NlbGw= 82359
+IGNlc3NhdGlvbg== 82360
+CgoKCgoKCgoKCgo= 82361
+INGB0LDQuQ== 82362
+RXZhbHVhdGlvbg== 82363
+IENJTQ== 82364
+cXVhbGl0aWVz 82365
+WG1sQXR0cmlidXRl 82366
+IEVtb2pp 82367
+ICIoJw== 82368
+IFRVUk4= 82369
+eHNk 82370
+IEdJUw== 82371
+IGNyZWF0ZVNlbGVjdG9y 82372
+cmlwcGxl 82373
+IHVubmVjZXNzYXJpbHk= 82374
+IG5ld1Bvcw== 82375
+IHN5bWJvbGlzbQ== 82376
+b2J1dHRvbg== 82377
+IHNhbW8= 82378
+ICgqKCg= 82379
+LnJld2FyZA== 82380
+S0VSTkVM 82381
+KGpTY3JvbGxQYW5l 82382
+IGJ5c3RhbmQ= 82383
+X2ljYWxs 82384
+IGR1bmdlb25z 82385
+IGNvbnN0ZWxsYXRpb24= 82386
+IGVtYnJhY2Vz 82387
+IEluZmFudA== 82388
+QXVzdGlu 82389
+LmFic3RyYWN0 82390
+IGNvbXBhZ24= 82391
+IENvbmRpdGlvbmluZw== 82392
+TWFpcw== 82393
+VmVyaWZpZXI= 82394
+IFB5cmFtaWQ= 82395
+IG1MaXN0ZW5lcg== 82396
+X2J1aWxkaW5n 82397
+LlJlZGlz 82398
+IFRvb3Ro 82399
+TE9HR0VS 82400
+LkFzeW5jVGFzaw== 82401
+X3ByaW5jaXBhbA== 82402
+ZXhhbXBsZU1vZGFsTGFiZWw= 82403
+CUxvY2Fs 82404
+TWFya2Vycw== 82405
+IGRvbHBoaW5z 82406
+LlRleHRFZGl0 82407
+J2Fs 82408
+IG92ZXJzdA== 82409
+LWRyaXZl 82410
+IGluc29tbmlh 82411
+IGFkYg== 82412
+X3F1ZXVlcw== 82413
+RWI= 82414
+IERhbW4= 82415
+aXN0cmluZ3N0cmVhbQ== 82416
+CUR1ZWw= 82417
+aWJibGU= 82418
+IGltcmVhZA== 82419
+LmZpbmlzaGVk 82420
+IG1pc3JlcHJlc2VudGVk 82421
+xYRzdA== 82422
+aW9uYWxlcw== 82423
+Ik5vdw== 82424
+LlNlbGVjdFNpbmdsZU5vZGU= 82425
+IHdlYWtlbmluZw== 82426
+X2luc3RydWN0aW9ucw== 82427
+LW9z 82428
+IHN0YXJ0UG9pbnQ= 82429
+IE1pbWU= 82430
+IEhlbGQ= 82431
+fHwo 82432
+dW1taW5ncw== 82433
+b2tpbm8= 82434
+IHJlZmw= 82435
+cmlkb3I= 82436
+SW50ZWdyYXRlZA== 82437
+RU9iamVjdA== 82438
+cGVhdHM= 82439
+Q2lyY3VsYXI= 82440
+IFNvZGl1bQ== 82441
+IHBvZHLDrWE= 82442
+bWVkaWNpbmU= 82443
+IHBhcmFub2lh 82444
+L2JhY2tncm91bmQ= 82445
+KGJvcmRlcg== 82446
+X3Nsb3c= 82447
+IHByZXNlbnRWaWV3Q29udHJvbGxlcg== 82448
+IGNvbnRpbmdlbmN5 82449
+IFBhc2FkZW5h 82450
+bG9vcHM= 82451
+IE9j 82452
+YXBwbGljYXRpb25z 82453
+IG1wZw== 82454
+IEFR 82455
+LldpbkNvbnRyb2xz 82456
+bGVkb24= 82457
+IFJlcQ== 82458
+IEFjcmVz 82459
+aWJpcg== 82460
+IGdldFdpbmRvdw== 82461
+IFlhaA== 82462
+IG5lZWR5 82463
+4pa6 82464
+IFRPTQ== 82465
+KFsuLi4= 82466
+IGZx 82467
+IENhbWRlbg== 82468
+b3JkaW5hdGVk 82469
+CWNoaWxkcmVu 82470
+dmVnZXQ= 82471
+CWRpcmVjdGlvbg== 82472
+PEZpZWxk 82473
+X2NvcnJlY3Rpb24= 82474
+KEVORA== 82475
+SEVFVA== 82476
+RmFsc3k= 82477
+LmR5bGli 82478
+X1JFUE8= 82479
+IGJyaWxsaWFuY2U= 82480
+b2dyw6Fm 82481
+bG9k 82482
+IHBvd2RlcmVk 82483
+KEFydA== 82484
+IE1JTEw= 82485
+0LXQtNCw0Lo= 82486
+X3NpbXVsYXRpb24= 82487
+IHNtYXNoaW5n 82488
+IHVybFN0cmluZw== 82489
+IGRyZWFkZWQ= 82490
+cmllZw== 82491
+L25z 82492
+IEludGVycHJldGVy 82493
+Om1heA== 82494
+ZGVyaXY= 82495
+IFBldHQ= 82496
+IG1vZMOobGU= 82497
+IGFtcGxpZmllZA== 82498
+IFNpZ25hbHM= 82499
+Lm5hdkN0cmw= 82500
+5ZY= 82501
+IHNlcGFyYXRvcnM= 82502
+IFNISUZU 82503
+IGZpZGVsaXR5 82504
+LnNvbg== 82505
+KGNh 82506
+IFBMVUdJTg== 82507
+IGxpZ2h0ZW4= 82508
+UEJT 82509
+ZmxvYXRpbmc= 82510
+KGxvYWRlcg== 82511
+IHBlZWxlZA== 82512
+aGlj 82513
+IHRhcGVk 82514
+IG5vdmVtYnJl 82515
+IHN0dWZmaW5n 82516
+IEZpcmVhcm1z 82517
+LkRyYXdhYmxl 82518
+IGNvcnRpY2Fs 82519
+IEdVSUNvbnRlbnQ= 82520
+IFZlcm9uaWNh 82521
+X3JzYQ== 82522
+IGNvbW1lbW9yYXRl 82523
+LlNZU1RFTQ== 82524
+IGRhbXM= 82525
+LmlzVHJ1ZQ== 82526
+IFByZWduYW5jeQ== 82527
+7Iug 82528
+IGF1ZGl0b3J5 82529
+KENlbGw= 82530
+IGludmFkaW5n 82531
+IGZvckVhY2g= 82532
+CURyYXc= 82533
+TWFyY3Vz 82534
+UHJvY2Vzc2Vk 82535
+IHNwcmF5aW5n 82536
+IE91dGxpbmVJbnB1dEJvcmRlcg== 82537
+ZXNzZXJhY3Q= 82538
+IOacgA== 82539
+UGc= 82540
+LXF1YXJ0ZXJz 82541
+IHNrbA== 82542
+L3Byb3ZpZGVycw== 82543
+dG9IYXZlQmVlbkNhbGxlZFRpbWVz 82544
+IGNvc21vcw== 82545
+IGZpbmFsaXN0cw== 82546
+IHNsZWVwZXI= 82547
+IE1hdGVyaWFsQXBw 82548
+ZGFj 82549
+IGJ1c2luZXNzbWVu 82550
+xJ9lcg== 82551
+Qmlhcw== 82552
+ZGF0YWw= 82553
+VXBFZGl0 82554
+IFRpcg== 82555
+SVNUSUM= 82556
+IEhlcmE= 82557
+X2ludGVyc2VjdGlvbg== 82558
+IExhbWE= 82559
+CWFwcGVuZA== 82560
+IHBvbGx1dGFudHM= 82561
+IFNpa2g= 82562
+IGNvbGxhYm9yYXRpb25z 82563
+bnV0cml0aW9u 82564
+IGhhbW0= 82565
+IERpbGxvbg== 82566
+X0RPVA== 82567
+IGZpcnN0aGFuZA== 82568
+U09BUA== 82569
+PXo= 82570
+LnByaXY= 82571
+TWlzbWF0Y2g= 82572
+LnNlbmRSZWRpcmVjdA== 82573
+LmxpbmtMYWJlbA== 82574
+IHdyZWFr 82575
+TWFydmVs 82576
+L3Ns 82577
+IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw== 82578
+IG1vdmFibGU= 82579
+0YPQuQ== 82580
+IERyaW5raW5n 82581
+YWNlYQ== 82582
+IHRyb3ZhcmU= 82583
+LkNTUw== 82584
+IGtlcm4= 82585
+dmZz 82586
+5pWw5a2X 82587
+IHN0ZXNzbw== 82588
+IEZPUkNF 82589
+IGxpZWY= 82590
+IGFjaGlldmVz 82591
+IEVsaWphaA== 82592
+R2V0UHJvcGVydHk= 82593
+LypA 82594
+IEh1bWFuaXR5 82595
+KFRoZQ== 82596
+d2FybQ== 82597
+PiIp 82598
+IGNvbXB1dGF0aW9ucw== 82599
+LnRpbnRDb2xvcg== 82600
+IHVzbGVlcA== 82601
+IEdQTHY= 82602
+bmRhdGE= 82603
+L2NsaQ== 82604
+TW9o 82605
+PiINCg== 82606
+LmJyaWRnZQ== 82607
+IGVuY3ljbG9wZWRpYQ== 82608
+IEJJTg== 82609
+IFN1cHBvc2U= 82610
+INio2Kc= 82611
+cmlldmVk 82612
+cGFnZW4= 82613
+aXJzZQ== 82614
+UGFjaWZpYw== 82615
+LmZ1bGxOYW1l 82616
+IGFsbGVnZQ== 82617
+aWxsdXN0cg== 82618
+IOqysA== 82619
+IGRldGVycmVudA== 82620
+IE5hcGxlcw== 82621
+aW5jbHVkZWQ= 82622
+UmF0ZXM= 82623
+IGhhc05leHQ= 82624
+IEplcmVtaWFo 82625
+IEZlcm5hbmRleg== 82626
+IGdldE9yZGVy 82627
+LlN1YnNjcmliZQ== 82628
+UG9zcw== 82629
+OikK 82630
+IFdvcmtzaGVldA== 82631
+YmxlbmQ= 82632
+IHdpdHR5 82633
+IGNvdW50ZXJmZWl0 82634
+X2R5 82635
+L1J1bnRpbWU= 82636
+IHNvZG9t 82637
+L2Rv 82638
+IDx8 82639
+IFJlY3J1 82640
+5aOw5piO 82641
+IG1vZGVsb3M= 82642
+IGJpdHJhdGU= 82643
+LmNybQ== 82644
+bHVz 82645
+IGZpbGVUeXBl 82646
+5bCR 82647
+IG1hcnJvdw== 82648
+IFZlbmV6dWVsYW4= 82649
+IHNjYXY= 82650
+IFNUT0NL 82651
+IEltcG9zc2libGU= 82652
+bmF2aWdhdGlvbkJhcg== 82653
+IHNpZ2h0aW5ncw== 82654
+IGNlbGxGb3JSb3dBdA== 82655
+IHJlY3Rz 82656
+IGFpcmw= 82657
+IExlc3Rlcg== 82658
+IG5vZHM= 82659
+QHJlZ2lzdGVy 82660
+eENE 82661
+cG5hbWU= 82662
+IHBvdHRlcnk= 82663
+IHp3YXI= 82664
+IFN1bmRlcmxhbmQ= 82665
+4oCmYnV0 82666
+L2NvbnRyb2w= 82667
+IGNhbGN1bHVz 82668
+KGlzb2xhdGU= 82669
+cGxhY2Vob2xkZXJz 82670
+Kilf 82671
+IH19DQo= 82672
+IEtvaGFuYQ== 82673
+Y29kaWxl 82674
+b3Rlcmlj 82675
+IHByZXBhaWQ= 82676
+IGdyYW5kbWE= 82677
+IHN1bHBo 82678
+IEdhaW5lcw== 82679
+XE1vZHVsZQ== 82680
+IGNvdW5zZWxsaW5n 82681
+LWdlbmVyaWM= 82682
+IFR1ZXM= 82683
+LkdyYWRpZW50 82684
+IFRodXJz 82685
+IGVudHJh 82686
+IGFkdmFuY2VtZW50cw== 82687
+U1dFUA== 82688
+X01BUktFUg== 82689
+IGtsdWI= 82690
+IG3DqWc= 82691
+ZmZmZmZmZg== 82692
+Il0pewo= 82693
+L2NvbXBpbGVy 82694
+YWRpZW5z 82695
+U3RyaW5nVmFsdWU= 82696
+IFNjdWxwdA== 82697
+cGFuZWxz 82698
+5b2i 82699
+5Lqn5ZOB 82700
+YXLDrWE= 82701
+IGRlcmFpbA== 82702
+IExvY2g= 82703
+IHBlcHA= 82704
+bXB6 82705
+IOKe 82706
+S1Y= 82707
+IERpZXRhcnk= 82708
+QVJSSUVS 82709
+IHBvbw== 82710
+IFJBTkRPTQ== 82711
+6LM= 82712
+IEhvbWV3b3Jr 82713
+LlZhbGlkYXRpb25FcnJvcg== 82714
+IE1hcnhpc20= 82715
+0YPRgtGM 82716
+IGNvbWVudGFyaW8= 82717
+X0JPVEg= 82718
+IHBybQ== 82719
+Y2FzdEhpdA== 82720
+aXBsaW5h 82721
+IFZvdGVycw== 82722
+LmFzc2lnbm1lbnQ= 82723
+bmV0dA== 82724
+U0FNUExF 82725
+amlz 82726
+InRpdGxl 82727
+LnZhbGlkYXRvcnM= 82728
+ICI/Ig== 82729
+dW5pZGFk 82730
+X2ZpZ3VyZQ== 82731
+IGFjY3J1 82732
+IFJlbWFyaw== 82733
+Rm91bmRlcg== 82734
+LmluaXRpYWxpemVBcHA= 82735
+IFByZXNlbnRz 82736
+IE1VTFRJ 82737
+dmVzdGVy 82738
+LnZpc2l0SW5zbg== 82739
+IGdldFBhdGg= 82740
+X2RpZmZlcmVudA== 82741
+IGxvb3Nlbg== 82742
+IGFycm9nYW5jZQ== 82743
+IGp1bmk= 82744
+IFphaGw= 82745
+IEdDQk8= 82746
+IG1vZGVyYXRvcnM= 82747
+TGluZUNvbG9y 82748
+IE5vZGVUeXBl 82749
+X2JlbG93 82750
+b3JndA== 82751
+IEhhcmxlbQ== 82752
+IE9yd2VsbA== 82753
+X1VOSVg= 82754
+LnJlc3RhcnQ= 82755
+aXRoZQ== 82756
+IGdlbmll 82757
+IGNsYWQ= 82758
+Jzp7Jw== 82759
+IHNob3djYXNlZA== 82760
+IGxhcnZhZQ== 82761
+TWljaGVsbGU= 82762
+IExI 82763
+LmdldExvZw== 82764
+Q29uc3RydWN0ZWQ= 82765
+IGh2YQ== 82766
+X3N1YnM= 82767
+IGRhYg== 82768
+LmRvY3VtZW50YXRpb24= 82769
+IG5pZw== 82770
+IE1hbmRhcmlu 82771
+4oCUYXJl 82772
+LXBpYw== 82773
+X2Nvcm5lcnM= 82774
+LkJvdA== 82775
+XVso 82776
+X18nOg0K 82777
+LkVkaXRvckJ1dHRvbg== 82778
+LXN5bnRheA== 82779
+U2FuZGVycw== 82780
+IFRhbmtz 82781
+ZGVzaXJlZA== 82782
+c3RhbnRpYXRlVmlld0NvbnRyb2xsZXI= 82783
+R2Vhcg== 82784
+IHVzZXJNb2RlbA== 82785
+CWNvbnRyb2w= 82786
+RGF0YUJhc2U= 82787
+IERlYmF0ZQ== 82788
+aW5lc2lz 82789
+IHhl 82790
+Lm1hZ25pdHVkZQ== 82791
+IHlhbg== 82792
+IEFwaUV4Y2VwdGlvbg== 82793
+KHdoaWNo 82794
+YXRoZXJpbmc= 82795
+Q29uc2lkZXJpbmc= 82796
+IEFMUEhB 82797
+568= 82798
+IFJhbmtpbmdz 82799
+LmxpZmU= 82800
+6rCS 82801
+T0ZGU0VU 82802
+LnRlbGVncmFt 82803
+IGZhdmljb24= 82804
+X3NzaA== 82805
+IEVER0U= 82806
+UmVmcw== 82807
+YW5kYW4= 82808
+IGFkb2xlc2NlbmNl 82809
+IFNoYW5r 82810
+IFN3YW1w 82811
+X3BlcmM= 82812
+IGNvbnRyYXJpbw== 82813
+Lm55 82814
+LiIpLA== 82815
+IHVudGVu 82816
+X0VOU1VSRQ== 82817
+L29yZGVycw== 82818
+KGNm 82819
+IHVudHJlYXRlZA== 82820
+YXplbg== 82821
+KElucHV0U3RyZWFt 82822
+IGFwcHJvdmFscw== 82823
+IGdlcm1hbnk= 82824
+IGF2ZXJl 82825
+VHJpcGxl 82826
+LWJhcnM= 82827
+IHNldFBhZ2U= 82828
+SmFj 82829
+IEZpcmVz 82830
+IERBWVM= 82831
+56i/ 82832
+IHNjcmF0Y2hlZA== 82833
+IEJFTg== 82834
+LXdpZmU= 82835
+IGludGVsbGVjdHVhbHM= 82836
+IHBvdWNv 82837
+IHN0YWJpbGl6YXRpb24= 82838
+IHBlbG9z 82839
+IFNUT1JZ 82840
+PGZpZWxkc2V0 82841
+IE1haWRlbg== 82842
+LkNpcmNsZQ== 82843
+IHNtw6U= 82844
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLw== 82845
+L2VuZA== 82846
+6Iux 82847
+KG51bXB5 82848
+LnBhbmVsQ29udHJvbA== 82849
+Y2hyaWZ0 82850
+Y29udGluZW50YWw= 82851
+X3BlbA== 82852
+RFNM 82853
+PFwv 82854
+IE9QUw== 82855
+IE5vb24= 82856
+IHVuZGlzY2xvc2Vk 82857
+IFlpbg== 82858
+c3Bv 82859
+CWRlc2NyaWJl 82860
+dG9ncm91cA== 82861
+IGRpYXBlcnM= 82862
+IG1IYW5kbGVy 82863
+CUNsb3Nl 82864
+IHJlbmRpdGlvbg== 82865
+PXsoew== 82866
+RW50ZXJpbmc= 82867
+KERJUg== 82868
+X09MRA== 82869
+IFN0aW5n 82870
+IFBhd24= 82871
+dXNzZXM= 82872
+IGdldENvZGU= 82873
+SXRlbUxpc3Q= 82874
+IGluZGlz 82875
+ID4iLA== 82876
+IGNvbmZs 82877
+IGRvbWluYXRlcw== 82878
+dGhlc2l6ZWQ= 82879
+c3RlcmVk 82880
+IGNhYw== 82881
+IEdlbnVpbmU= 82882
+PFBhdGg= 82883
+IEhvZGc= 82884
+LWZseQ== 82885
+LmNpZA== 82886
+IG9iamVjdElk 82887
+KCMp 82888
+Lm1vdmVUb05leHQ= 82889
+RGlhbG9ndWU= 82890
+PHBjbA== 82891
+dGVhckRvd24= 82892
+Jyl9fQo= 82893
+5ri4 82894
+TGl2ZXI= 82895
+TWF0cml4WGQ= 82896
+IGNyYXBweQ== 82897
+X0RFQUQ= 82898
+LnBhcnRpYWw= 82899
+LkRyb3BEb3duU3R5bGU= 82900
+ZnVy 82901
+LkNvbGxhcHNlZA== 82902
+LXRvd24= 82903
+SUNJQUw= 82904
+RGlyZWNjaW9u 82905
+IHNldFJlc3VsdA== 82906
+L3Jlc3VsdA== 82907
+IFNoZWVw 82908
+eXNjYWxl 82909
+Y29udGk= 82910
+IHJlY29ub2M= 82911
+6b4= 82912
+W2Jsb2Nr 82913
+Y2xheno= 82914
+IGJlbmVmaXRpbmc= 82915
+QUFQ 82916
+LnJlcXVpcmVz 82917
+LkNvb2tpZQ== 82918
+IGNhcHRpdml0eQ== 82919
+LlNlY3Rpb24= 82920
+XSkpOw== 82921
+LWNhcmV0 82922
+KHZh 82923
+IHbDpGw= 82924
+IEhpZ2hsYW5kcw== 82925
+Tm90YQ== 82926
+IEZNTA== 82927
+d2ludGVy 82928
+IGFnZW5kYXM= 82929
+X18sX18= 82930
+ZGVtYW5k 82931
+IHR1dG9ycw== 82932
+X1NZTQ== 82933
+KENI 82934
+IHVuZXF1aXY= 82935
+LnRyYW5zaXRpb25z 82936
+IENhbG9yaWVz 82937
+IEVjb25vbWlzdA== 82938
+LlBpbg== 82939
+IGRlZmxlY3Q= 82940
+RXhwb3NlZA== 82941
+IGdlcA== 82942
+LkxheW91dENvbnRyb2xJdGVt 82943
+IHJhaw== 82944
+ZmliZXI= 82945
+IGFwb3B0 82946
+IEVudW1z 82947
+aXRldXI= 82948
+IG1vZGlmaWVz 82949
+IHJlbHVjdGFuY2U= 82950
+IHNwaWxscw== 82951
+QXNjZW5kaW5n 82952
+IHRlbXBlcmF0dXJh 82953
+LWludGVyZmFjZQ== 82954
+IGNvd29ya2Vycw== 82955
+IDpc 82956
+IFJvdW5kZWRSZWN0YW5nbGVCb3JkZXI= 82957
+PEtleVZhbHVlUGFpcg== 82958
+UGFyc2Vk 82959
+IHdpdGhkcmF3aW5n 82960
+KGhpc3Q= 82961
+IHRoZW9yaXN0cw== 82962
+LW5n 82963
+IGNoaWZm 82964
+66W4 82965
+UEFJUg== 82966
+IEJyZXdlcg== 82967
+S2E= 82968
+IEJvd2xpbmc= 82969
+X3Rs 82970
+J30pLg== 82971
+IHByb2Jpbmc= 82972
+QXJz 82973
+LnJlYWxt 82974
+IGVzdGF0ZXM= 82975
+dmFyeQ== 82976
+IEtlcw== 82977
+ICIsIiw= 82978
+fSwNCg0K 82979
+UGxhbm5pbmc= 82980
+IFJlY29u 82981
+IGNvbmNsdXM= 82982
+dmF1bHQ= 82983
+IGluY2VudGl2 82984
+IGJpbm5lbg== 82985
+IFBoaWxsaWVz 82986
+LkxvYWRlcg== 82987
+IEZhbGxlbg== 82988
+X1R3bw== 82989
+IEJpYXM= 82990
+Um9sZUlk 82991
+IFBhcmNlbGFibGU= 82992
+IERvZGQ= 82993
+ICQoIiMi 82994
+5Lq/5YWD 82995
+LW1lYW4= 82996
+KE91dHB1dA== 82997
+QVRUUklCVVRF 82998
+IHNlY3JldGl2ZQ== 82999
+IFBlcmlwaGVyYWw= 83000
+IEZpbGVk 83001
+IOW3 83002
+X21lZGlhbg== 83003
+LklD 83004
+IEFycmF5QnVmZmVy 83005
+KFRBQkxF 83006
+IF0KCgo= 83007
+IGFudGhvbG9neQ== 83008
+IG9ic2NlbmU= 83009
+b3BhdXNl 83010
+IEVTVg== 83011
+w6F2ZWlz 83012
+b3NlbWl0ZQ== 83013
+R3J1cG8= 83014
+IE1PQ0s= 83015
+IHVuYXZvaWRhYmxl 83016
+IGNvdmlk 83017
+aG93ZXI= 83018
+Lk5ldmVy 83019
+U2V0QWN0aXZl 83020
+e3RleHQ= 83021
+X3Byb2Jh 83022
+XENvbmZpZ3VyYXRpb24= 83023
+IEJyeWNl 83024
+IGNvZXJjZQ== 83025
+IFZhbmRlcmJpbHQ= 83026
+Z2VtZW50cw== 83027
+bGVnZw== 83028
+IHJlYnV0 83029
+IFZJTg== 83030
+5YiG6ZKf 83031
+IG9ic2Vzc2l2ZQ== 83032
+L2NtZA== 83033
+IGtvbW1lbnQ= 83034
+IExhdWdo 83035
+64uI 83036
+IHNlbHZlcw== 83037
+b3JyYQ== 83038
+LnJvb21z 83039
+IGNvbXBsZXhpdGllcw== 83040
+CW9wZXJhdG9y 83041
+QWx0ZXJuYXRl 83042
+IHNvcnRpZQ== 83043
+Z2V0TnVt 83044
+IHJlYWxpemFkbw== 83045
+RG9pbmc= 83046
+X0dyaWQ= 83047
+IHNldFN1cHBvcnRBY3Rpb25CYXI= 83048
+w6RobHQ= 83049
+5ZQ= 83050
+OnsNCg== 83051
+SW50ZXJlc3RlZA== 83052
+IGRpbWluaXNoaW5n 83053
+IExvb3Q= 83054
+QWRhcHRlckZhY3Rvcnk= 83055
+LXJ1bm5lcg== 83056
+c2F2aW5n 83057
+KHNlbQ== 83058
+ZmFk 83059
+RURVUkU= 83060
+X2RvY3VtZW50bw== 83061
+IENhbGVi 83062
+IGd1aXNl 83063
+IE1jR3U= 83064
+KHVuaXRz 83065
+IGJlemllcg== 83066
+IHBhdHQ= 83067
+IHBlbHZpYw== 83068
+IGNvbm9zYw== 83069
+YWN0aXZv 83070
+IE1hbG9uZQ== 83071
+LlRha2U= 83072
+KHNxcnQ= 83073
+c3Rhc2hvcA== 83074
+LWVuZGVk 83075
+IE1pZGk= 83076
+IEJhbmM= 83077
+IFBlcHNp 83078
+X01BWQ== 83079
+IHBsbA== 83080
+L2luZXQ= 83081
+LWVuaA== 83082
+IEl0YWw= 83083
+bW91cg== 83084
+IHJlbHVjdGFudGx5 83085
+LnJjUGFyYW1z 83086
+IHBhbHM= 83087
+LnBrZw== 83088
+IGZvcm1hcw== 83089
+bGllw59saWNo 83090
+LWJvb2tz 83091
+b21hbHk= 83092
+IHJlY29tbWFuZA== 83093
+UExJQ0lU 83094
+acSN 83095
+LmNnQ29sb3I= 83096
+KEJvYXJk 83097
+0LXQvdC40Lg= 83098
+IExFTg== 83099
+Xy1f 83100
+IFVubw== 83101
+IE5PVElGWQ== 83102
+aGFuYQ== 83103
+W3Nsb3Q= 83104
+XGFkbWlu 83105
+SW5JbnNwZWN0b3I= 83106
+KWNvbnN0 83107
+IGZsYXR0ZXJpbmc= 83108
+aWdyYW1z 83109
+Y2Fj 83110
+IGhlYXJ0ZmVsdA== 83111
+SW5kdXN0cmlhbA== 83112
+QWlycG9ydA== 83113
+WEk= 83114
+IHZhbGlkYXI= 83115
+cmVwcmVzZW50YXRpb24= 83116
+IFJlbnRhbHM= 83117
+IG9taXNzaW9u 83118
+IG15dGhpY2Fs 83119
+IEVudHJhbmNl 83120
+IHNlcmdlYW50 83121
+IHdyaXRlVG8= 83122
+IE5vcndpY2g= 83123
+IExpb25lbA== 83124
+LWJhbA== 83125
+IFp3ZQ== 83126
+X3JlbnQ= 83127
+IHJlbWFy 83128
+IEJhaGFtYXM= 83129
+IEJhbGU= 83130
+OiIiLA== 83131
+U3RhdGVNYW5hZ2Vy 83132
+IGLDqW7DqQ== 83133
+ICEqKio= 83134
+IGJsb2NrZXJz 83135
+LnNlbA== 83136
+KExFRA== 83137
+IGZzbQ== 83138
+IHdpcGluZw== 83139
+IHphbWFu 83140
+IFJlaQ== 83141
+YWd1YXk= 83142
+Li4n 83143
+IGxvdW5n 83144
+ZXRjb2Rl 83145
+IGxhbno= 83146
+Y2l0YXRpb24= 83147
+W2A= 83148
+LWVs 83149
+YXNib3VyZw== 83150
+IFNPTEQ= 83151
+IE9yY2hhcmQ= 83152
+Q0hhbmRsZQ== 83153
+IExvZnQ= 83154
+LmRpdmlkZQ== 83155
+LVdpdGg= 83156
+L2Rlc2lnbg== 83157
+LlNlcnZpY2VNb2RlbA== 83158
+TWlz 83159
+IHJhd0RhdGE= 83160
+IGludGVyYWN0cw== 83161
+IEVyb3Rpaw== 83162
+IG9uUG9zdEV4ZWN1dGU= 83163
+6Jk= 83164
+IHZleA== 83165
+IHN0cmluZ2lmeQ== 83166
+eW5lcw== 83167
+X0VtYWls 83168
+X09N 83169
+cXVpdGU= 83170
+X2VmZmVjdHM= 83171
+QURY 83172
+IGFkb3JuZWQ= 83173
+c3Nm 83174
+ZWRpdGFy 83175
+IE1hZGFtZQ== 83176
+IHJlZnV0ZQ== 83177
+IEx1Y2E= 83178
+IFdvbHZlcmluZQ== 83179
+c2V4bw== 83180
+QW5kcmU= 83181
+PFJvdXRl 83182
+IFNjZW5lcw== 83183
+IHJlb3JkZXI= 83184
+X214 83185
+Y3JlYXRlVGltZQ== 83186
+IHN5bnQ= 83187
+LG1vZGVs 83188
+aWNyb3Vz 83189
+IE1PVVNF 83190
+6rk= 83191
+Y29tcHJlc3Npb24= 83192
+IHByaW5jZXM= 83193
+IHNoYW1lZnVs 83194
+IHBhdQ== 83195
+IFRFRA== 83196
+KGNvZWZmcw== 83197
+4K+B 83198
+L3VtZA== 83199
+IGNhbnlvbg== 83200
+L3JlbmRlcg== 83201
+LnVzZWQ= 83202
+IEFncmVl 83203
+IEpld2Vs 83204
+L2NvbW1hbmQ= 83205
+QmFyY29kZQ== 83206
+KGRlYWQ= 83207
+d2Vic29ja2V0 83208
+dW11 83209
+R0xPU1M= 83210
+IGZvcnRu 83211
+IGJvYXN0ZWQ= 83212
+ICJcIj4= 83213
+aXN0dW5n 83214
+LW1hY2hpbmU= 83215
+IGluY2lkZW50YWw= 83216
+IG1N 83217
+LXJlYWRhYmxl 83218
+LmZ4 83219
+IFBPTElU 83220
+IHN5bWxpbms= 83221
+KHVzaW5n 83222
+eEVE 83223
+ICIiIi4= 83224
+LlN0ZG91dA== 83225
+IOiL 83226
+IGFsbWFjZW4= 83227
+CXRyaWdnZXI= 83228
+LXRpcA== 83229
+IENPTU1JVA== 83230
+LmluZ3JlZGllbnRz 83231
+IG1hbmlmZXN0cw== 83232
+IE9TUw== 83233
+IEhhdXQ= 83234
+L2xvYWRpbmc= 83235
+LlR5cGVTdHJpbmc= 83236
+KGNsZWFu 83237
+IExJQw== 83238
+IEJhcmJpZQ== 83239
+T09TRQ== 83240
+LuKApg== 83241
+IEludml0YXRpb24= 83242
+IHJlZGVlbWVk 83243
+KS4nPC8= 83244
+IGltZGI= 83245
+IGJlbGFuZw== 83246
+IHNjcmFwcGVk 83247
+LW5pbA== 83248
+IFByb3Vk 83249
+0LDRgdGC 83250
+LlNJWkU= 83251
+IHNldFZpc2libGU= 83252
+IHJhaW5pbmc= 83253
+IGxlbmdodA== 83254
+IGFuYWs= 83255
+X0NNUA== 83256
+IHBhbm9yYW1pYw== 83257
+IGdpbQ== 83258
+c2FpZA== 83259
+IHByb2dlbg== 83260
+IEdCUA== 83261
+4oCg 83262
+IGludmVzdGlnYXRlcw== 83263
+IHByw6hz 83264
+L25hdmlnYXRpb24= 83265
+Lm1vdGlvbg== 83266
+IExpZ2h0d2VpZ2h0 83267
+CQkgICAgICAgICAgICA= 83268
+IG9udG9sb2d5 83269
+IE5JSA== 83270
+KHNpbXA= 83271
+LnB1bGw= 83272
+IHByb3Bvc2l0aW9ucw== 83273
+QFdlYlNlcnZsZXQ= 83274
+IHJlZGVmaW5l 83275
+IEVORVJHWQ== 83276
+7KC4 83277
+T1JJWkFUSU9O 83278
+IFZlcmbDvGc= 83279
+fX1dLAo= 83280
+IHdlZ2Vu 83281
+4LmH 83282
+Jm9hY3V0ZQ== 83283
+LkJvYXJk 83284
+IGN1bHBh 83285
+IEdlbmV0aWNz 83286
+IH0+ 83287
+IGFkYW1hbnQ= 83288
+44GV44KM 83289
+CWF1ZGlv 83290
+6riA 83291
+IG51bWVyYWw= 83292
+IHJlc3RyYWluaW5n 83293
+LklOVEVSTkFM 83294
+IE1vbXM= 83295
+IElQQWRkcmVzcw== 83296
+aW1lbnRp 83297
+IGFscGhhYmV0aWNhbA== 83298
+IEpGSw== 83299
+IEF0dGVtcHRz 83300
+ZnJhZ2U= 83301
+IGRhcm0= 83302
+IGJhc2VtYW4= 83303
+PWxvZw== 83304
+LGVycm9y 83305
+IERJU0NMQUlNUw== 83306
+CXRleHR1cmU= 83307
+LWNvdmVyZWQ= 83308
+IFBsdW0= 83309
+IOWVhg== 83310
+IHDDqXJp 83311
+KHJldmlldw== 83312
+IEZvcmNlZA== 83313
+Rkg= 83314
+IOy0iA== 83315
+IGV5ZWJyb3c= 83316
+X1JFR1M= 83317
+IGNoZXN0cw== 83318
+IExhcmdlc3Q= 83319
+XV06Cg== 83320
+VVRPUg== 83321
+IGVucXVpcmllcw== 83322
+IGNva2U= 83323
+LWNhdGNoaW5n 83324
+IEdlb2dyYXBoeQ== 83325
+YXRlbA== 83326
+KHByb2Q= 83327
+b3JXaGVyZQ== 83328
+TmluZQ== 83329
+IFBpZWQ= 83330
+IGFkanVzdHM= 83331
+KHByb20= 83332
+X21lbnVz 83333
+X2V4YW0= 83334
+IE5vdGlmaWNhdGlvbkNlbnRlcg== 83335
+CWRz 83336
+TElL 83337
+X3R3aXR0ZXI= 83338
+Q1JD 83339
+IGV1eA== 83340
+IFN0YWJsZQ== 83341
+aXlvcg== 83342
+IGNhcmJvbmF0ZQ== 83343
+LnNhbA== 83344
+TWFwcGVk 83345
+aWV2aW5n 83346
+KXk= 83347
+eW5hbW9kYg== 83348
+LkNvbXBhcmVUYWc= 83349
+IHNldmVyZWQ= 83350
+J2VtYWls 83351
+IGZvcnNr 83352
+bGV4cG9ydA== 83353
+SU1JVEVS 83354
+IEFwZXg= 83355
+IGhtYWM= 83356
+IE9kZHM= 83357
+b3ZlcnJpZGVz 83358
+OiI7DQo= 83359
+IG9waW9pZHM= 83360
+IG1lc21lcg== 83361
+IEdBTA== 83362
+LWxpbmVz 83363
+IGFwcGx5TWlkZGxld2FyZQ== 83364
+IHNlcmlh 83365
+RVNJUw== 83366
+IG5pbGFp 83367
+IG1hbGxz 83368
+IFBhb2xv 83369
+IExlbnQ= 83370
+LmJ1aWxkZXJz 83371
+LyY= 83372
+IENsaXBz 83373
+IEp1cmFzc2lj 83374
+4pWd 83375
+LWNvbmQ= 83376
+44O844OI 83377
+fHd4 83378
+LmhvdXNl 83379
+IGhlcmF1cw== 83380
+IGhr 83381
+IENvY28= 83382
+IlwK 83383
+IGFjY3JlZGl0YXRpb24= 83384
+IFJhY2g= 83385
+ZXJ0ZXN0 83386
+c2hvcnRjb2Rl 83387
+IHZhbGlkYXRpb25z 83388
+VUxTRQ== 83389
+IGV4Y2VycHRz 83390
+U2Vla0Jhcg== 83391
+IGdldExvY2F0aW9u 83392
+IGZlbmNlZA== 83393
+KGdz 83394
+IGx5cw== 83395
+IGhhcm1z 83396
+IEhvbW8= 83397
+4oCcU2hl 83398
+IOKAuw== 83399
+PXNlc3Npb24= 83400
+X0NPTVBJTEU= 83401
+TWVhbnM= 83402
+IHBldGl0aW9uZXI= 83403
+SU1P 83404
+Il09Pg== 83405
+ZGJl 83406
+X2dwcw== 83407
+IG1q 83408
+X2V4cGlyZQ== 83409
+IERBTg== 83410
+IHh2 83411
+IGZ1bmNpb25lcw== 83412
+IHNoYWt5 83413
+U3VnYXI= 83414
+IGdldFJlc3VsdA== 83415
+PFRva2Vu 83416
+aHR0cENsaWVudA== 83417
+Lm9uUGF1c2U= 83418
+c3Rp 83419
+U25ha2U= 83420
+TWFwcGluZ3M= 83421
+IFJlYXBlcg== 83422
+IGZyZWk= 83423
+IENvc21vcw== 83424
+dWVycw== 83425
+IEhhag== 83426
+IEJsYXpl 83427
+b2ppcw== 83428
+Q3JMZg== 83429
+LnByb2M= 83430
+IG90cA== 83431
+IERyYXdz 83432
+CVJFRw== 83433
+KCcnJw== 83434
+IGdlbmVyYQ== 83435
+IEF0dGFjaGVk 83436
+UkVN 83437
+JTsiPg== 83438
+dXJuaXNoZWQ= 83439
+X3Jw 83440
+IHpvYWxz 83441
+IGFzc29ydGVk 83442
+aXRpemVk 83443
+IGNhbWlubw== 83444
+IGFiZHVjdGVk 83445
+LnRvQmU= 83446
+J10pOg== 83447
+IE1vb3I= 83448
+SW5jbHVkaW5n 83449
+IGdyYXppbmc= 83450
+c2V0U3RhdHVz 83451
+YWlyb2Jp 83452
+X0V4ZWN1dGU= 83453
+aWZpYW50 83454
+ZWxkbw== 83455
+YXV0b21hdGlj 83456
+KCQp 83457
+IGxlYXBz 83458
+b25lZERhdGVUaW1l 83459
+KGxheWVycw== 83460
+LXByb2R1Y2Vk 83461
+IFdvcmtib29r 83462
+IGVub3Jtb3VzbHk= 83463
+IGRlcHJlc3NpdmU= 83464
+IGFhYQ== 83465
+RW1iZWRkZWQ= 83466
+QlVN 83467
+IGVsbGVz 83468
+IGJvYXJkZWQ= 83469
+xZtteQ== 83470
+IG1hc2lo 83471
+X2dlbmVz 83472
+CVRleHR1cmU= 83473
+aXN0YXI= 83474
+IEF1Z3VzdGE= 83475
+IEFwcE1ldGhvZEJlYXQ= 83476
+IGtvZGU= 83477
+YWJleg== 83478
+X3BpZWNlcw== 83479
+Q3Vycg== 83480
+IGxpYmVyYWxpc20= 83481
+RGljaw== 83482
+QWxl 83483
+IHF1YWxl 83484
+fSc7Cg== 83485
+LmFuc3dlcnM= 83486
+IEpBTg== 83487
+IFBVUkU= 83488
+IGNhbm9l 83489
+IFNBTUU= 83490
+UXVhbGlmaWVy 83491
+IGRibmFtZQ== 83492
+IElubm9j 83493
+CVRSQUNF 83494
+aXZyZQ== 83495
+IG1lY2g= 83496
+YXNlbA== 83497
+Iixb 83498
+IGFzaWE= 83499
+IENhbnRlcmJ1cnk= 83500
+LkRhdGFCaW5kaW5ncw== 83501
+a2Fo 83502
+KCkpKSk= 83503
+IGR6aWV3 83504
+cmV0ZQ== 83505
+IHNjcmVlbmluZ3M= 83506
+Lk1PVVNF 83507
+IGJ1c2llc3Q= 83508
+CXJlbmRlcmVy 83509
+IHRlc3RpbW9uaWFscw== 83510
+IGFzcGlyZQ== 83511
+Zm9ydHVuZQ== 83512
+IE1TQw== 83513
+IGRhbXBpbmc= 83514
+XCIsCg== 83515
+V2Vs 83516
+V2lr 83517
+IOyXrA== 83518
+KHRpZA== 83519
+IENhbm5lcw== 83520
+b2NvcA== 83521
+PiIrCg== 83522
+ZmFjZXQ= 83523
+IHNsYXNoZWQ= 83524
+IExpYmVyaWE= 83525
+U21vb3Ro 83526
+X2NoZQ== 83527
+TGFib3Vy 83528
+IGVtaW5lbnQ= 83529
+Olg= 83530
+XEJhY2tlbmQ= 83531
+ICsrKQo= 83532
+IHRlYW13b3Jr 83533
+X2FnZw== 83534
+LlNlcnZl 83535
+IFNORA== 83536
+IFBJQ0s= 83537
+IHdpcGVz 83538
+L1R5cG9ncmFwaHk= 83539
+IEFQQQ== 83540
+aWtraQ== 83541
+IGNvZGVy 83542
+Z2FiZW4= 83543
+IHVua25vdw== 83544
+LkRlcGFydG1lbnQ= 83545
+4Lix4Lia 83546
+IHBsYXllck5hbWU= 83547
+KmU= 83548
+PEJsb2Nr 83549
+X3VwZA== 83550
+IEdpYmJz 83551
+bGVhc2luZw== 83552
+IENvbG9tYmlhbg== 83553
+KFBIUA== 83554
+ICoqKiEK 83555
+IOydvA== 83556
+IEN1cnRhaW4= 83557
+L2F5 83558
+2YTZiQ== 83559
+c3BvcnRz 83560
+IGRlc2Vh 83561
+aXLDoQ== 83562
+IHVuY29uZGl0aW9uYWw= 83563
+IHRocm9t 83564
+IENIUklTVA== 83565
+IEhPUg== 83566
+b3Njb3BpYw== 83567
+IHlhxZ8= 83568
+IG5vc3Rybw== 83569
+Li4uIik7DQo= 83570
+IHNsdXI= 83571
+IGhhdHRlbg== 83572
+IHBlc3RpY2lkZQ== 83573
+IGZyZWV3YXk= 83574
+IENvaA== 83575
+IHdhbm5vbmNl 83576
+IG1laWRlbg== 83577
+X3N1YnN0cg== 83578
+X0NTUw== 83579
+IFN5bWJvbHM= 83580
+4Li34Lit 83581
+REVU 83582
+IE1hZGRlbg== 83583
+IHJlcXVlc3Rlcg== 83584
+LnZpcnR1YWw= 83585
+IHd4RGVmYXVsdA== 83586
+IGF1dG9tw6F0aWNhbWVudGU= 83587
+YnJpZHM= 83588
+aVQ= 83589
+LlByaW9yaXR5 83590
+Jyk7PC8= 83591
+YnVuZw== 83592
+RGVhZGxpbmU= 83593
+Q29uY3JldGU= 83594
+IG5leHRQYWdl 83595
+IOuwmw== 83596
+IFN0b2tl 83597
+a29w 83598
+INCx0L7Qu9GM 83599
+IFByb2R1aw== 83600
+LW1ha2Vy 83601
+IFByb2plY3RpbGU= 83602
+YW5jZWxsYWJsZQ== 83603
+IFRIRUlS 83604
+VG9SZW1vdmU= 83605
+RU1V 83606
+Y29tbWVyY2lhbA== 83607
+QVZFRA== 83608
+IHdlYXZpbmc= 83609
+IGJpb21l 83610
+QFNldHRlcg== 83611
+cW1s 83612
+IGJyb2FkZW4= 83613
+INGB0L8= 83614
+SVNS 83615
+IGRlYWN0aXZhdGVk 83616
+IHNlbGVjdGVkSW5kZXg= 83617
+cmlvdXM= 83618
+ZWxwcw== 83619
+LkVzY2FwZQ== 83620
+IHBvbGxlZA== 83621
+cXVpYQ== 83622
+X3JlZmw= 83623
+X21pbWU= 83624
+PEF1ZGlvU291cmNl 83625
+KFRyYW5zZm9ybQ== 83626
+ZXZlbm9kZA== 83627
+CXJhbmRvbQ== 83628
+bG9jcw== 83629
+IGRldXQ= 83630
+cmVwbGFjZW1lbnQ= 83631
+IGV4YW1pbmVy 83632
+SGFzS2V5 83633
+IOumrOyKpO2KuA== 83634
+IENsb3Ro 83635
+IOCkqg== 83636
+IFJlZ2lzdHJv 83637
+IEVzdGhlcg== 83638
+IFNoYXJlZE1vZHVsZQ== 83639
+LmJvcnJvdw== 83640
+IG9zY2lsbGF0b3I= 83641
+IGZvb2xz 83642
+uqs= 83643
+IGJvYXN0aW5n 83644
+X3B1bHNl 83645
+c2hhcmluZw== 83646
+IHBpc3RvbHM= 83647
+X1BMQU4= 83648
+IHNlcHRlbWJlcg== 83649
+IG11c3Rlcg== 83650
+IG1hcmNow6k= 83651
+Q0hFTVk= 83652
+IHN1aQ== 83653
+IGdlYnJ1aWs= 83654
+Lj0n 83655
+ZXJyYXRlZA== 83656
+IExpYQ== 83657
+IGhhdW50 83658
+IEN1c2g= 83659
+cm91dGVQcm92aWRlcg== 83660
+Inw= 83661
+ZW5kcGhw 83662
+Il1dCg== 83663
+IGF2YQ== 83664
+77yBIiw= 83665
+7Ke4 83666
+IGNvbGE= 83667
+X1NQRUxM 83668
+IGFsw6lt 83669
+KExhbmd1YWdl 83670
+KGR1bW15 83671
+IGJ1bmtlcg== 83672
+IEVtcHJlc2E= 83673
+IGNyZWF0ZUNvbnRleHQ= 83674
+Om1pbg== 83675
+IEJPT1Q= 83676
+IE1lcmVkaXRo 83677
+Wmg= 83678
+IERvd25pbmc= 83679
+d2pnbA== 83680
+LmRj 83681
+c2RhbGU= 83682
+IGluY29udmVuaWVudA== 83683
+IHJlYWRtZQ== 83684
+TmF2aWdhdGlvblZpZXc= 83685
+Q09ORElUSU9O 83686
+LmRlcA== 83687
+IHLDqXVzcw== 83688
+IG9wY2nDs24= 83689
+IEFjY291bnRhYmlsaXR5 83690
+Lk1hcg== 83691
+LWd1aWQ= 83692
+RURHRQ== 83693
+RXZlbnRNYW5hZ2Vy 83694
+IGRpc2NpcGxl 83695
+dWNrbGVz 83696
+fX0+ 83697
+aW50ZXJlc3RlZA== 83698
+RmlsdGVyV2hlcmU= 83699
+IHB1c3M= 83700
+LXByb3h5 83701
+X3N0YXR1c2Vz 83702
+IFsj 83703
+dW5mb2xk 83704
+IFJvbm5pZQ== 83705
+JiYh 83706
+IGFjZXNzbw== 83707
+dW9z 83708
+X3lpZWxk 83709
+KGNhbGVuZGFy 83710
+KHNvdW5k 83711
+IGRhdGFBcnJheQ== 83712
+IFlhdGVz 83713
+IHByb2Nlc3Npb24= 83714
+RUZBVUxU 83715
+IEdIQw== 83716
+YW11cmE= 83717
+IHN0cmljdGVy 83718
+LkJPVFRPTQ== 83719
+IGhhYml0dWFs 83720
+eEFG 83721
+QVZJTkc= 83722
+IHNldHVwcw== 83723
+ID17Cg== 83724
+Kioo 83725
+IHNvaw== 83726
+IHJldGluYQ== 83727
+IEZpcmVwbGFjZQ== 83728
+aW52ZXJ0 83729
+IEZvcnJlc3Q= 83730
+PGRhdGE= 83731
+XEFjdGlvbg== 83732
+T1VHSA== 83733
+IGNhcmVsZXNz 83734
+LmdldEFjdGl2ZQ== 83735
+ZXNlcw== 83736
+IHpkasSZ 83737
+KSkqKA== 83738
+U0VN 83739
+IFBhbmlj 83740
+VG91Y2hlcw== 83741
+IHByZWNv 83742
+L2FjY291bnRz 83743
+5L6b 83744
+UG9zdGFsQ29kZXM= 83745
+LXBsdWdpbnM= 83746
+PG1lc3NhZ2U= 83747
+KHBvd2Vy 83748
+IHBlcmN1c3Npb24= 83749
+IGPDqWw= 83750
+5o6o 83751
+IGRhbmNlZA== 83752
+X1NDQU5DT0RF 83753
+IFNpdHRpbmc= 83754
+IExva2k= 83755
+U2hhcmluZw== 83756
+LkRpcg== 83757
+IHNjaHdlcg== 83758
+X0xB 83759
+Lk1lbnVTdHJpcA== 83760
+X3plcm9z 83761
+IGZpeGF0aW9u 83762
+IEFtaXQ= 83763
+IGNvbXBsaWVk 83764
+LnNwYWNlQmV0d2Vlbg== 83765
+IGFycmVzdGluZw== 83766
+IFN1Zw== 83767
+IHBlcmZvcg== 83768
+IGtvbXBsZQ== 83769
+IEVzc2VuY2U= 83770
+IHBsZWlu 83771
+c2ltdWxhdGlvbg== 83772
+IGNyZWF0ZWRCeQ== 83773
+IEV4cGVkaXRpb24= 83774
+77yBCgoKCg== 83775
+dHJhaW5lcg== 83776
+Il09JA== 83777
+IHN1Y3Rpb24= 83778
+bVBpZA== 83779
+bm90aW4= 83780
+IHByZWNpb3M= 83781
+IEFzc3VyYW5jZQ== 83782
+IExhbA== 83783
+LiIm 83784
+IG1pbkxlbmd0aA== 83785
+IE1pbmVyYWxz 83786
+dHJhamVjdG9yeQ== 83787
+U0FGRQ== 83788
+IG51YW5jZXM= 83789
+KGV4dHJh 83790
+X3ZpZGVvcw== 83791
+W109ew== 83792
+IGhvbmV5bW9vbg== 83793
+X3ByZXA= 83794
+CQkJCQkJCQkJCSA= 83795
+IHB1cnBvcw== 83796
+IGFuemVpZ2Vu 83797
+LnN0cnV0cw== 83798
+IHBhZ2Fy 83799
+LkF1dG9TaXplTW9kZQ== 83800
+IHdlbmlnZXI= 83801
+IHBhZ2Fu 83802
+IGFjaWRpYw== 83803
+Z01hcHM= 83804
+IGJld2FyZQ== 83805
+X2lwYw== 83806
+IG1lZHM= 83807
+IGRpc2XDsW8= 83808
+KSkpCgoK 83809
+Q2h1cmNo 83810
+IG51cnR1cmluZw== 83811
+X21waQ== 83812
+IHJlc3VsdGFudA== 83813
+IFBpc3RvbA== 83814
+c1BpZA== 83815
+TXNw 83816
+TW9tZW50 83817
+IFVQTE9BRA== 83818
+TmFubw== 83819
+YmxpY2s= 83820
+IG1lc3VyZQ== 83821
+IExheWVycw== 83822
+X3RyYWo= 83823
+IGJ1dHRvbldpdGhUeXBl 83824
+CWNvbW1vbg== 83825
+IE15Q2xhc3M= 83826
+2KjYsQ== 83827
+eG9vcHM= 83828
+X0hlaWdodA== 83829
+X1dBUk5JTkdT 83830
+U2V0VGV4dA== 83831
+IEhpc3Bhbmljcw== 83832
+TnVsbFBvaW50ZXJFeGNlcHRpb24= 83833
+LmZhY3Rvcg== 83834
+IHZpZWxsZWljaHQ= 83835
+IHNob3V0cw== 83836
+dHJ1c3RlZA== 83837
+IG5ld1Jvdw== 83838
+IEZyYW7Dpw== 83839
+W2pq 83840
+4oCUd2hv 83841
+IFFEaXI= 83842
+X2FkdmFuY2Vk 83843
+KEhhdmVPY2N1cnJlZA== 83844
+IHVucGw= 83845
+L3Jvcw== 83846
+LmVhc3k= 83847
+IEJBTEw= 83848
+550= 83849
+L2xncGw= 83850
+IHN1YmNvbnNjaW91cw== 83851
+ICctJzsK 83852
+ICcpOw== 83853
+INGW 83854
+IHNjYW50 83855
+X3Nlc3M= 83856
+X3BsYXlpbmc= 83857
+X0lTTw== 83858
+IHNldFNpemU= 83859
+X2RlY2s= 83860
+X0xBUkdF 83861
+IE1leQ== 83862
+Q2hpY2tlbg== 83863
+aWZmaW4= 83864
+ZGlzcG9zZQ== 83865
+SEVTVA== 83866
+TGF1Z2g= 83867
+IExDUw== 83868
+IG9uc2l0ZQ== 83869
+LmlzTG9nZ2VkSW4= 83870
+IGlycml0YXRlZA== 83871
+IGJyaWdhZGU= 83872
+IGRlcXVldWU= 83873
+Y2xhc3NOYW1lcw== 83874
+IE3DoXM= 83875
+IEF0YXJp 83876
+KElPRXhjZXB0aW9u 83877
+UmFjaGVs 83878
+LXNhbXBsZQ== 83879
+IGVpZ2VudGxpY2g= 83880
+SUZERUY= 83881
+Lm5laWdoYm9ycw== 83882
+IHNlcGVyYXRl 83883
+IExpc3Rpbmdz 83884
+LmZm 83885
+KGltcG9ydA== 83886
+TW9kZWxBdHRyaWJ1dGU= 83887
+IHNwZW5kZXI= 83888
+IG1vdGlmcw== 83889
+c3N1ZQ== 83890
+IEFwcHJlbnRpY2U= 83891
+LWNhdA== 83892
+clBpZA== 83893
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8K 83894
+b2N6 83895
+aW5pb25z 83896
+L2NvbnRhaW5lcg== 83897
+IHBsYWdpYXJpc20= 83898
+V3JpdGFibGVEYXRhYmFzZQ== 83899
+Ly4KCg== 83900
+IEZldmVy 83901
+LVZlcnNpb24= 83902
+YWNpamE= 83903
+IHdlaQ== 83904
+LWluZw== 83905
+IHRlbWFz 83906
+IHN1cmdlZA== 83907
+IGNyaWE= 83908
+IGFyZA== 83909
+Yml0Y29pbg== 83910
+LnRpbWV6b25l 83911
+IG9iamVjdE1hcHBlcg== 83912
+IAogICAgICAgICAgICAK 83913
+IHlsaW0= 83914
+IElDVQ== 83915
+IERlcHJlY2F0ZWQ= 83916
+KSgpOwo= 83917
+QVJHRVI= 83918
+dW5nYWxvdw== 83919
+VGVzdERhdGE= 83920
+KHB0cw== 83921
+RklMRU5BTUU= 83922
+dXBwbHk= 83923
+IHBhY2llbnRlcw== 83924
+LGxlZnQ= 83925
+IFdyaXRlTGluZQ== 83926
+IHBhcmNlbHM= 83927
+X2ZvbGRlcnM= 83928
+IERpcms= 83929
+LmFzc2VydElzSW5zdGFuY2U= 83930
+TWND 83931
+X1ZhcmlhYmxl 83932
+KGFh 83933
+IFBvcms= 83934
+LlB1Ymxpc2g= 83935
+LWdheQ== 83936
+IFBldHJh 83937
+IENvbm5lY3Rpbmc= 83938
+VGFiQ29udHJvbA== 83939
+aXZlcmluZw== 83940
+KFNjcmVlbg== 83941
+IGNoaWxsZWQ= 83942
+IGFpbw== 83943
+VG91Y2hFdmVudA== 83944
+IGFjY2Vzc2lvbg== 83945
+IExvaXM= 83946
+L21vbWVudA== 83947
+IGFudsOkbmQ= 83948
+IHN1aWNpZGVz 83949
+KGhlbHA= 83950
+YW5kZXJz 83951
+IFZJRA== 83952
+QmVp 83953
+ZXZlbnRv 83954
+IEFuZ3Vz 83955
+VmVycw== 83956
+IEJvcmRlYXV4 83957
+LnN0cmVhbWluZw== 83958
+IHJvdWdl 83959
+IGNyYWZ0c21hbnNoaXA= 83960
+b3NzaWw= 83961
+X0ZBTEw= 83962
+QG1lZGlh 83963
+aWxlYWtz 83964
+RGF0YVNlcnZpY2U= 83965
+IFRyaXBBZHZpc29y 83966
+IE1hYXI= 83967
+Q3Vyc28= 83968
+UG9zdGFsQ29kZXNOTA== 83969
+KCk7Kys= 83970
+JFBvc3RhbENvZGVzTkw= 83971
+IG9jb3I= 83972
+IHRhaW50ZWQ= 83973
+IGxlbQ== 83974
+LW91dHM= 83975
+IHh4eHg= 83976
+IGlycml0YXRpbmc= 83977
+b3hpZA== 83978
+b2ludGVk 83979
+IFRvcm8= 83980
+X292 83981
+LmJpcnRo 83982
+KyU= 83983
+IENoYXJhY3RlcmlzdGljcw== 83984
+IEJldHRpbmc= 83985
+IG9mZmVuZA== 83986
+IFBIWVM= 83987
+IElDTVA= 83988
+eERD 83989
+IENk 83990
+LmdldE1hcA== 83991
+YXRjaGV0 83992
+LmN1cnJlbnRJbmRleA== 83993
+RVJBTA== 83994
+IGthcHBh 83995
+aWRlbmNlcw== 83996
+UGFyZW4= 83997
+IFNlcmdlaQ== 83998
+LWZpbg== 83999
+J10sWyc= 84000
+w6FtYXJh 84001
+R3Jvd2luZw== 84002
+R2xhc3M= 84003
+CW1ldGE= 84004
+dmVyYmF0aW0= 84005
+L0dQTA== 84006
+IEthaA== 84007
+KHN2Zw== 84008
+Y2xpc3Q= 84009
+IEJsb3dqb2I= 84010
+b2NjYW4= 84011
+LmFib3J0 84012
+b2RlbGlzdA== 84013
+IGRpZmbDqXJlbnRz 84014
+X09QVFM= 84015
+PXJlcQ== 84016
+IGludG94 84017
+IGRpYWdvbg== 84018
+IFsoIg== 84019
+JlI= 84020
+IG9iamVjdGl2ZWx5 84021
+IGJsaW5raW5n 84022
+IExvdmVz 84023
+cmluZ2U= 84024
+Kik7Cgo= 84025
+IEJvbmRz 84026
+IExvdmVk 84027
+ZWx0cw== 84028
+IGRpc3BhcmF0ZQ== 84029
+IEVucmlxdWU= 84030
+IldpdGg= 84031
+cmVtaXVt 84032
+YWphcmFu 84033
+dHJ5aW5n 84034
+LVJ1c3NpYW4= 84035
+bmV3SW5zdGFuY2U= 84036
+LlRSQU4= 84037
+IG9yYW5nZXM= 84038
+L2xvY2FsZQ== 84039
+IERJU1A= 84040
+CW5z 84041
+IFNodXR0ZXJzdG9jaw== 84042
+IENMT0NL 84043
+KHJhZA== 84044
+IGFzc3VyYW5jZXM= 84045
+IHJhc3A= 84046
+VWJlcmdyYXBo 84047
+RW1pbHk= 84048
+IGludmVudGlvbnM= 84049
+cmlvdA== 84050
+IHRvc3Npbmc= 84051
+IG1ha2VvdmVy 84052
+IHVuaXRPZldvcms= 84053
+YnV0dG9uU2hhcGU= 84054
+5Yid5aeL5YyW 84055
+IHBhcnRlZA== 84056
+4paR 84057
+LnNpZ21vaWQ= 84058
+IHJlZGlyZWN0aW9u 84059
+IGRpc3R1cmJhbmNlcw== 84060
+IGludGltaWRhdGVk 84061
+CUNyZWF0ZWQ= 84062
+YWdldA== 84063
+IGNvcnJlcw== 84064
+IE5FRw== 84065
+aXRvbmU= 84066
+L2Zyb250 84067
+IFZlcnNl 84068
+Z2FtYmFy 84069
+IHByZW1pZXJlZA== 84070
+IElNTw== 84071
+IEdvYmllcm5v 84072
+IGlmcw== 84073
+YXlhaA== 84074
+LkNPTA== 84075
+IGZyZWRlcg== 84076
+IHN1Ym1lcmdlZA== 84077
+IE5lcm8= 84078
+bW9kaWZpYWJsZQ== 84079
+L0Zvb3Rlcg== 84080
+LWNlbnRyYWw= 84081
+IGdvdXZlcg== 84082
+IFRyaWVk 84083
+IGRpenp5 84084
+UXVlcnlQYXJhbQ== 84085
+Ij4nKwo= 84086
+X3ByaW1pdGl2ZQ== 84087
+56iO 84088
+LmdwdQ== 84089
+IHZveg== 84090
+ZW56ZQ== 84091
+IFdpbGRlcm5lc3M= 84092
+IHByb2JhYmls 84093
+L3JlYw== 84094
+IGFjY2Vz 84095
+IFRydXN0ZWVz 84096
+R2I= 84097
+IHBhZGRpbmdIb3Jpem9udGFs 84098
+U2hpZWxk 84099
+IE5hbWVu 84100
+dWRkbGVk 84101
+IFByaW9yaXR5UXVldWU= 84102
+UG9vcg== 84103
+IFNBRg== 84104
+LS1bWw== 84105
+IGNobG9yaW5l 84106
+IHZlcmJhbGx5 84107
+IGFpcmU= 84108
+PjsNCg== 84109
+aWxoYQ== 84110
+W2NvbG9y 84111
+YW5kYWxvbmU= 84112
+LmFkZFJvdw== 84113
+IFNvaw== 84114
+IENvbm9y 84115
+IG1lam9yYXI= 84116
+J2lscw== 84117
+ZGV0YWxsZQ== 84118
+ICIpLAo= 84119
+JUA= 84120
+Lmxhenk= 84121
+Lmp1bXA= 84122
+b3N0ZQ== 84123
+K0Y= 84124
+IGluZnVyaQ== 84125
+IHNvbnJh 84126
+aXRlbWlk 84127
+JGxvZw== 84128
+IG11cmRlcm91cw== 84129
+TEVD 84130
+CW5pbA== 84131
+IE3DpHI= 84132
+KHBn 84133
+aWxlbw== 84134
+QXNjaWk= 84135
+IExvY2toZWVk 84136
+IFRoZW8= 84137
+QmVsbA== 84138
+YWNpb25hbGVz 84139
+LmNyZWF0ZU5ldw== 84140
+IOW+ 84141
+LWZvb3RiYWxs 84142
+IGVjb21tZXJjZQ== 84143
+CVNpbXBsZQ== 84144
+Y2x5 84145
+LklubmVyRXhjZXB0aW9u 84146
+IHBlc29z 84147
+IHRyb3Bl 84148
+IEFSR1M= 84149
+TWlhbWk= 84150
+IFBhbG8= 84151
+IFN1emFubmU= 84152
+X21hcHBpbmdz 84153
+I3tA 84154
+IE9jY3VwYXRpb25hbA== 84155
+X2J1Y2tldHM= 84156
+Z29hbHM= 84157
+X1J1bg== 84158
+LXByZXBlbmQ= 84159
+c3Nz 84160
+bWFyc2hhbGw= 84161
+IGVxdWl2YWxlbmNl 84162
+IFdlbGNo 84163
+KE9wQ29kZXM= 84164
+CWNsb2Nr 84165
+IE1lZGluYQ== 84166
+VEVSUw== 84167
+b3Jhbmc= 84168
+VGhvdWdodA== 84169
+IG9hdHM= 84170
+X1RFWA== 84171
+UklDUw== 84172
+IGluZGlmZmVyZW5jZQ== 84173
+IGFsbG90 84174
+LlVzZVRleHQ= 84175
+IFRyaWNrcw== 84176
+YXdl 84177
+LkZJTEw= 84178
+LXBocA== 84179
+LnZvaWNl 84180
+IFBhdGhmaW5kZXI= 84181
+X1RBR1M= 84182
+IFRyaXQ= 84183
+5oyJ6ZKu 84184
+YmJj 84185
+IGFkZGl0aXZlcw== 84186
+IHNjaGxl 84187
+IEtleWJvYXJkSW50ZXJydXB0 84188
+IHVzZVBhcmFtcw== 84189
+IEJ1Y2hhbmFu 84190
+cmlhbmdsZQ== 84191
+IG11bHRpcGx5aW5n 84192
+IHNlbGJlcg== 84193
+IFllcA== 84194
+Q2hhaXI= 84195
+LXJlcG9ydGVk 84196
+X1NESw== 84197
+LG5v 84198
+IEZhbGxpbmc= 84199
+5rk= 84200
+ICgpLAo= 84201
+cGRi 84202
+IEJvcm91Z2g= 84203
+LnJlbW92ZUZyb20= 84204
+IG92ZXJzaGFkb3c= 84205
+aWdhaWw= 84206
+IHR1bmc= 84207
+IG1tYw== 84208
+W3BhcmVudA== 84209
+RXh0ZXJu 84210
+YXZpb2xldA== 84211
+JykiCg== 84212
+IGNvdW50ZXJ0b3Bz 84213
+IHVidW50dQ== 84214
+5rc= 84215
+IM6T 84216
+IHVucHVibGlzaGVk 84217
+IEluZGllcw== 84218
+VU5FVA== 84219
+IG9mZXJ0YQ== 84220
+IGRhbWVz 84221
+IGFzdGVyb2lkcw== 84222
+IG5vdmVtYmVy 84223
+Y29udHJhc3Q= 84224
+LkFkZE1vZGVsRXJyb3I= 84225
+K1NhbnM= 84226
+IHNjcmFtYmxpbmc= 84227
+dGV4dFZpZXc= 84228
+L2NyeXB0bw== 84229
+VXNlUHJvZ3JhbQ== 84230
+QHVwZGF0ZQ== 84231
+RGVzZGU= 84232
+U0FU 84233
+IGRpc3BsZQ== 84234
+YW5uw6ll 84235
+XERlcGVuZGVuY3lJbmplY3Rpb24= 84236
+IGl0bQ== 84237
+IOe8 84238
+IGV0aG9z 84239
+QVBP 84240
+IEdhcmPDrWE= 84241
+aWRpcw== 84242
+IFN0ZWFr 84243
+cmliYQ== 84244
+X3ZlcmlmaWNhdGlvbg== 84245
+IEZL 84246
+IEVpbnNhdHo= 84247
+IHBlcnNvbmFsaXNlZA== 84248
+LW1vdGlvbg== 84249
+IE1lbGFuaWU= 84250
+w7Zo 84251
+X1ZD 84252
+IGRyaWZ0aW5n 84253
+LmNvbnN0cnVjdA== 84254
+IO2UhA== 84255
+IGJhdGNoaW5n 84256
+Li4vLi4vLi4vLi4v 84257
+RVJQ 84258
+X3V0Yw== 84259
+IG11bHRpdA== 84260
+IG1yYg== 84261
+Y2Nhaw== 84262
+Y2h1bmtz 84263
+IHRyYW5zbHVjZW50 84264
+IHBheW9mZg== 84265
+4oCUYW4= 84266
+IHNpbGw= 84267
+IG9ybmFtZW50cw== 84268
+Z3Vh 84269
+VUJZ 84270
+KHN0ZXBz 84271
+IEJPUkRFUg== 84272
+IFNPVU5E 84273
+YGAK 84274
+ZW5hcmllcw== 84275
+IEJpdHRl 84276
+IGdseXBocw== 84277
+IG92ZXJydW4= 84278
+IGJsb2NrSWR4 84279
+IE1TVA== 84280
+IGdlbm9tZXM= 84281
+dGVuc29yZmxvdw== 84282
+RGlyZWN0b3J5TmFtZQ== 84283
+X2xocw== 84284
+IGZpbnQ= 84285
+YWRkdG9ncm91cA== 84286
+IHN0ZWFkZmFzdA== 84287
+IGNsb3Zlcw== 84288
+IFNvdmlldHM= 84289
+IElTQQ== 84290
+wqNv 84291
+dXJnZXJ5 84292
+c292 84293
+INCy0YvQstC+0LQ= 84294
+IHB1ZA== 84295
+LXdhdGNo 84296
+IEhvc3BpdGFscw== 84297
+fXdoaWxl 84298
+IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMj 84299
+4buj 84300
+IGFrdHVhbA== 84301
+IGtpbG9ncmFtcw== 84302
+IEZBQw== 84303
+b3BoeXM= 84304
+cHJz 84305
+KkA= 84306
+eWI= 84307
+c2VjdXJlZA== 84308
+IGFsZ8O6bg== 84309
+IOCkuQ== 84310
+cGhhbnM= 84311
+QWRkb24= 84312
+IGNlbnRyYWxseQ== 84313
+X1NVSVRF 84314
+SW50ZXJlc3Rpbmc= 84315
+dWx0aW1v 84316
+QWdhaW5zdA== 84317
+IEV6cmE= 84318
+IEhlYg== 84319
+dWlkYQ== 84320
+IHNreXM= 84321
+T0xWRQ== 84322
+QmVuZWZpdHM= 84323
+IHByaXNl 84324
+Lio/KQ== 84325
+LmlzRGVmaW5lZA== 84326
+IHN0YW5kb2Zm 84327
+IHBsYW5v 84328
+LmxhdGVzdA== 84329
+ICgkLg== 84330
+IEdvdWxk 84331
+IGNhdXRpb25lZA== 84332
+J10o 84333
+IG51aXQ= 84334
+IEhDSQ== 84335
+Zm9vdGJhbGw= 84336
+IHdpbGxlbg== 84337
+UHJvY2VlZA== 84338
+IGludGVuZGluZw== 84339
+dGlm 84340
+IHNwb25zb3Jpbmc= 84341
+b2hhbmE= 84342
+RG9z 84343
+TW9ybmluZw== 84344
+ICEiKTsK 84345
+LnNoZWxs 84346
+IFJFTEFURUQ= 84347
+IHBpbXA= 84348
+L2NvdXJzZQ== 84349
+IHJhbWlmaWNhdGlvbnM= 84350
+IHBpeG1hcA== 84351
+IHBvd2VybGVzcw== 84352
+IGRvdWNoZQ== 84353
+Y3JpbWU= 84354
+Y29udHJpYnV0b3Jz 84355
+KHByb3RvY29s 84356
+IGdldFBvc2l0aW9u 84357
+U0VUVElOR1M= 84358
+IHZpZXQ= 84359
+aXNzZXM= 84360
+V2l0aEVtYWlsQW5kUGFzc3dvcmQ= 84361
+UmV0dXJuVHlwZQ== 84362
+QXBwZQ== 84363
+IElLRQ== 84364
+LkNvb2tpZXM= 84365
+Lm1lZGl1bQ== 84366
+LmdldEpTT05BcnJheQ== 84367
+X0Zvcg== 84368
+L3Rpbnlvcw== 84369
+IFRhYmxlQ2VsbA== 84370
+IFJFUExBQ0U= 84371
+Lk5ldHdvcmtpbmc= 84372
+IGJvd2Vk 84373
+CW1k 84374
+PSJ7ISE= 84375
+IGhvbmRh 84376
+IEV1cg== 84377
+IGluZG9uZXNpYQ== 84378
+IGhlbmQ= 84379
+LnZpZXdtb2RlbA== 84380
+CWN0cmw= 84381
+IFRhYmxldHM= 84382
+LW9yYW5nZQ== 84383
+ZXJyYXM= 84384
+X2dyYXBoaWNz 84385
+e3M= 84386
+IFRpdGxlcw== 84387
+IGRpYWdub3Nlcw== 84388
+b3VwbGU= 84389
+X0RvdWJsZQ== 84390
+W3Jlc3VsdA== 84391
+IGppdHRlcg== 84392
+X05VTUVSSUM= 84393
+PmY= 84394
+X01Z 84395
+0LjRgdGC0LXQvA== 84396
+c3RvcmVJZA== 84397
+IHJlbGlucXU= 84398
+ZW9z 84399
+IHdpZGVuaW5n 84400
+IHRhY29z 84401
+LllFUw== 84402
+XSsn 84403
+IEluZGV4ZWQ= 84404
+IHByb2Zlc3Npb25uZWw= 84405
+IFN0cmFw 84406
+QnVmZmVyRGF0YQ== 84407
+ZWVh 84408
+ZXJpbg== 84409
+QU5DRVM= 84410
+X1RYVA== 84411
+IHt9Lg== 84412
+KGNvbnRyYWN0 84413
+eXc= 84414
+IGJsaW5kbmVzcw== 84415
+Q0hBTg== 84416
+CWdsQ29sb3I= 84417
+IGN1cnJlbnRQb3NpdGlvbg== 84418
+IENhdWNhc2lhbg== 84419
+JGltZw== 84420
+I2Fh 84421
+IHNlYW4= 84422
+TWVzcw== 84423
+Kj0qPQ== 84424
+IGNhcGFjaXRvcg== 84425
+YWxmYQ== 84426
+LlJlbW92ZUFsbA== 84427
+IFdQQVJBTQ== 84428
+dWxhZG8= 84429
+bmljb3M= 84430
+IG9yZ3k= 84431
+R1g= 84432
+X0RFVklDRVM= 84433
+b3Vya2U= 84434
+IGtC 84435
+IHNvcGhpc3RpY2F0aW9u 84436
+X2F1ZGl0 84437
+L0lQ 84438
+IEx5ZnQ= 84439
+L1N0 84440
+CWNhbmNlbA== 84441
+IG92YXJpYW4= 84442
+bWFyaW5l 84443
+a8SZ 84444
+IFlN 84445
+IE1pbG8= 84446
+IE1hdFRhYmxl 84447
+IEFiYnk= 84448
+bnpl 84449
+IEx1ZHdpZw== 84450
+X2FybW9y 84451
+IHNjYWZmb2xk 84452
+4buXaQ== 84453
+YXV0aG9yaXR5 84454
+4bqleQ== 84455
+LmdldFByb2R1Y3Q= 84456
+IE9yYml0 84457
+X1BhcmFtZXRlcg== 84458
+LmRhdGVGb3JtYXQ= 84459
+L3RhZ3M= 84460
+LlNwZWVk 84461
+KExpbmU= 84462
+IHBvbGlzaGluZw== 84463
+IGtvbWI= 84464
+IHJ0cmlt 84465
+J2ljb24= 84466
+cmllcmU= 84467
+IFByZWZlcg== 84468
+c3RydG9sb3dlcg== 84469
+UmVncw== 84470
+Q0JE 84471
+LT4K 84472
+IHBhcmFzaXRl 84473
+ZW5kc1dpdGg= 84474
+IENvYnJh 84475
+OnRlc3Q= 84476
+IE51Z2dldHM= 84477
+xaF0 84478
+Q29yZUFwcGxpY2F0aW9u 84479
+L2JpbmQ= 84480
+IE1jSW50 84481
+aXR1bmVz 84482
+Wy0t 84483
+IFN1cnByaXNl 84484
+X0lORw== 84485
+IEZhc3Rlcg== 84486
+0J3QsA== 84487
+OkU= 84488
+IGRpbnQ= 84489
+bmdl 84490
+LiInLCciLiQ= 84491
+IGFkamVjdGl2ZQ== 84492
+LmJj 84493
+Y29uc3VtZQ== 84494
+Qk9S 84495
+KGFuY2hvcg== 84496
+IGVzdGVlbQ== 84497
+IGJyZWFrdXA= 84498
+ZGVjYXk= 84499
+ICQKCg== 84500
+RWR3YXJk 84501
+QVNJ 84502
+IGF0dGFjaGVz 84503
+X0RJU0s= 84504
+IFdpbG1pbmd0b24= 84505
+IEt1bA== 84506
+IFtbXQ== 84507
+IERlcGFydG1lbnRz 84508
+IHJldHVyblR5cGU= 84509
+IFVOSVRFRA== 84510
+b2JqZWN0aXZl 84511
+IGdpcmxmcmllbmRz 84512
+X0dV 84513
+QHN0b3Jl 84514
+LU91dA== 84515
+Lm1vdmVz 84516
+KHN0YXJ0RGF0ZQ== 84517
+CUpCdXR0b24= 84518
+IFBhY2U= 84519
+IEJlYXRz 84520
+IGxpY3o= 84521
+IGV0aGVyZXVt 84522
+IGNoZWVyZWQ= 84523
+IGF1Y3Vu 84524
+UmVnYXJkaW5n 84525
+IG1pZ3JhdGluZw== 84526
+IGZ1dGlsZQ== 84527
+IFRhY29tYQ== 84528
+X0NoYXJhY3Rlcg== 84529
+IHZn 84530
+IENvcGE= 84531
+2Ks= 84532
+IG5hbA== 84533
+IGxhbmRmaWxs 84534
+IHRhbWls 84535
+IHBlcnBldHJhdG9y 84536
+IFBhY2Vycw== 84537
+LmdldE9yZGVy 84538
+fA0K 84539
+R2V0T2JqZWN0 84540
+IGJsYQ== 84541
+IEhhcmFt 84542
+cG9ydGxldA== 84543
+IGxva2Fs 84544
+TWVyY2hhbnQ= 84545
+UGFzc3dvcmRz 84546
+b25lbnQ= 84547
+IGFydGVyaWVz 84548
+IEludGVsbGk= 84549
+XFN5c3RlbQ== 84550
+PWxvY2FsaG9zdA== 84551
+LmF2aQ== 84552
+IFZlbmQ= 84553
+KHRibA== 84554
+Q29ycmVjdGlvbg== 84555
+IHV0ZXJ1cw== 84556
+IHNhbGl2YQ== 84557
+Kys7DQoNCg== 84558
+KCcqJyw= 84559
+IHNuYXRjaA== 84560
+IFNUUkVFVA== 84561
+KVs6 84562
+54Sh44GX44E= 84563
+U2VudGVuY2U= 84564
+KCkuJy8= 84565
+OnJlbGF0aXZl 84566
+leOCkw== 84567
+X3VzZXJpZA== 84568
+b2xpbmc= 84569
+IENsYXNo 84570
+CXNldHVw 84571
+KG1p 84572
+IGppdA== 84573
+IFNjYW5kaW5hdmlhbg== 84574
+IFBob25lcw== 84575
+Iic7Cg== 84576
+IHR1bXVsdA== 84577
+IEludGw= 84578
+IFNpbm4= 84579
+KG5ld3M= 84580
+IGRicw== 84581
+IFJlbWFya3M= 84582
+S2l0Y2hlbg== 84583
+IGFkbWlyYWJsZQ== 84584
+X2Rhc2g= 84585
+IERPTUFJTg== 84586
+YWRkTGlzdGVuZXI= 84587
+Il0uKA== 84588
+CU1ldGhvZA== 84589
+bWFya3Q= 84590
+LGV4cG9ydHM= 84591
+IG91dG51bWJlcg== 84592
+X0FTQw== 84593
+cHJlbWl1bQ== 84594
+KU5VTEw= 84595
+IEJvd21hbg== 84596
+LnNldE9uSXRlbUNsaWNrTGlzdGVuZXI= 84597
+IFJlZ2V4T3B0aW9ucw== 84598
+S2Vs 84599
+L21hdA== 84600
+44GT44KM 84601
+IHdlYXJlcg== 84602
+aW5pcw== 84603
+W2RpbQ== 84604
+IE51dHp1bmc= 84605
+aXNidXJ5 84606
+5Yid 84607
+IHJvb3RSZWR1Y2Vy 84608
+ZXlK 84609
+SW5jbHVkZWQ= 84610
+LUxlYWd1ZQ== 84611
+YW5heA== 84612
+KGluZmxhdGVy 84613
+IEZpZWxkVHlwZQ== 84614
+IHNob3Zl 84615
+IGZ1bGxmaWxl 84616
+RGF0YU1hbmFnZXI= 84617
+LmdldExlZnQ= 84618
+IEZz 84619
+ZHJvcG91dA== 84620
+IOuyiA== 84621
+IG1hbmnDqHJl 84622
+IGZsYW1pbmc= 84623
+IGNvbXBsZXRhbWVudGU= 84624
+4oCw 84625
+fC4= 84626
+RW5lbWllcw== 84627
+b3NjaQ== 84628
+IFNBWQ== 84629
+IG1hcnk= 84630
+KFJ1bnRpbWVPYmplY3Q= 84631
+IH4+ 84632
+IFNpbXBzb25z 84633
+J10uJA== 84634
+X21lbWJlcnNoaXA= 84635
+KSI6 84636
+IGxheW91dE1hbmFnZXI= 84637
+IFJvY2tlZmVsbGVy 84638
+ICd8Jw== 84639
+SVBI 84640
+RE9O 84641
+YWNodGU= 84642
+UGVhY2U= 84643
+aHRhcg== 84644
+QCIK 84645
+IHRyZWFkbWlsbA== 84646
+IHNwdXJyZWQ= 84647
+IEtW 84648
+bWlkZA== 84649
+IGZsb3dlZA== 84650
+w6Nlc3Rl 84651
+R2VuZXNpcw== 84652
+PT0+ 84653
+IFZlbnR1cmE= 84654
+X2VsaW0= 84655
+INC40LzRjw== 84656
+IHNvbmd3cml0ZXI= 84657
+Y3JlYXRlRm9ybQ== 84658
+SUdITA== 84659
+IG1vbGRlZA== 84660
+IHJldmVyZWQ= 84661
+VW5kZXJUZXN0 84662
+aW1ibGVkb24= 84663
+X1Nlc3Npb24= 84664
+IG1hc2NvdA== 84665
+IGFsZg== 84666
+66mU 84667
+PldlbGNvbWU= 84668
+IGtub2Nrcw== 84669
+IEVxdWF0aW9u 84670
+LnRvdWNoZXM= 84671
+X0xhc3Q= 84672
+IHVwYmVhdA== 84673
+YmlnaW50 84674
+IGVudmlz 84675
+L2Jhbm5lcg== 84676
+44GC44KK44GM 84677
+IERvd25z 84678
+X1NG 84679
+IHJ1bkFwcA== 84680
+IHF1ZXN0aQ== 84681
+VHJhZGl0aW9uYWw= 84682
+X3dhaXRpbmc= 84683
+cGlja3Vw 84684
+KCdALw== 84685
+CXNl 84686
+IEtlcm4= 84687
+IERlbGljaW91cw== 84688
+IHNhdHVybg== 84689
+IEpTT05FeGNlcHRpb24= 84690
+44KN 84691
+SlI= 84692
+fSgpKTsK 84693
+IFNvbWFsaQ== 84694
+dWFp 84695
+aW1hZ2Vt 84696
+YW5kRmlsdGVyV2hlcmU= 84697
+w6hsZXM= 84698
+aW5ib3g= 84699
+IHlhcMSx 84700
+IG1laXN0ZW4= 84701
+YF0o 84702
+U1dH 84703
+LGNsYXNz 84704
+4LWN4LQ= 84705
+dGFpZW50 84706
+IEZyYW7Dp29pcw== 84707
+QXV0aFRva2Vu 84708
+IHB1ZXN0bw== 84709
+IGps 84710
+IGdhdGVk 84711
+IERlYXRocw== 84712
+IFNpZGQ= 84713
+IHByZXZhaWxlZA== 84714
+LcOqdHJl 84715
+KGFsYnVt 84716
+IHFpbnQ= 84717
+bWFyY2E= 84718
+IE5BRlRB 84719
+IHRpZ2h0ZW5lZA== 84720
+X0dBUA== 84721
+RU5TSU9OUw== 84722
+IExpYmVydGFyaWFu 84723
+X3N0eWxlc2hlZXQ= 84724
+LlNldEludA== 84725
+X3B1Ymxpc2hlcg== 84726
+cGFnZU51bWJlcg== 84727
+enNjaGU= 84728
+IFNRTEFsY2hlbXk= 84729
+IGhvb2Y= 84730
+Z2V0VG9rZW4= 84731
+IG5lYmVu 84732
+bHVuZA== 84733
+Lm1pdA== 84734
+ZXJycw== 84735
+LnNldE1pbmltdW0= 84736
+LXByaWNlZA== 84737
+KHBv 84738
+ZW5nYWdl 84739
+X0ZU 84740
+Ly8KCgo= 84741
+IHRvbWU= 84742
+ICI+PC8= 84743
+VmVjdG9ycw== 84744
+IFRlc3RVdGlscw== 84745
+ZmlsdHI= 84746
+VXN1 84747
+IGRpY3Rpb25hcnlXaXRo 84748
+IG9icmFz 84749
+IEJEU00= 84750
+LmdldFRhcmdldA== 84751
+IGFsbG93YWJsZQ== 84752
+IEluc2VydHM= 84753
+CU5vbmU= 84754
+IGxpYmVyYXRlZA== 84755
+S2VudA== 84756
+IFdpc2hsaXN0 84757
+IExhZ2Vy 84758
+IGp1aW4= 84759
+IG51ZXM= 84760
+IG1vbmFzdGVyeQ== 84761
+IG1pY3Jvc2Vjb25kcw== 84762
+IEhhbm5h 84763
+0L7RgdGC0Lg= 84764
+d2VhcG9ucw== 84765
+X3Nwb3Q= 84766
+b2RvbQ== 84767
+Lk1vZGVsRm9ybQ== 84768
+IG9yZGVybHk= 84769
+RklOSVRF 84770
+IHJlc2lkZW5jZXM= 84771
+X3RD 84772
+Q0dDb2xvcg== 84773
+IMW+ZQ== 84774
+IHNjcmVlbnBsYXk= 84775
+IHB5bW9uZ28= 84776
+IGTDqXQ= 84777
+IGRlc3Rh 84778
+IE5ldXJvc2NpZW5jZQ== 84779
+bmllc3Q= 84780
+QEdlbmVyYXRlZFZhbHVl 84781
+RUxTRQ== 84782
+PGw= 84783
+IGRpc2pvaW50 84784
+LnB1Ymxpc2hlZA== 84785
+ZWxsYW4= 84786
+IFN0cmluZ1dyaXRlcg== 84787
+LkJyb2FkY2FzdA== 84788
+IEZlaW5zdGVpbg== 84789
+YW1waGV0YW1pbmU= 84790
+S2V5U3BlYw== 84791
+IEdyaW1t 84792
+ZXR0ZWw= 84793
+4Lic 84794
+T3Q= 84795
+aWJyYWx0YXI= 84796
+Y2Vi 84797
+IHRpbWluZ3M= 84798
+aW5lZQ== 84799
+IEFuZHLDqQ== 84800
+RXNzYXk= 84801
+Lmpk 84802
+IEJ1bmRlc2xpZ2E= 84803
+UmV0dXJuZWQ= 84804
+IGFwcGFsbGluZw== 84805
+LkJpZ0ludGVnZXI= 84806
+IFNFTg== 84807
+IEhvbWVtYWRl 84808
+LmNoYXB0ZXI= 84809
+LXZhbGlk 84810
+IEFUVFJJQlVURQ== 84811
+dXN0cmlh 84812
+IGVudMOjbw== 84813
+UmV0dXJuaW5n 84814
+dmVydGlzZXI= 84815
+LlBhY2thZ2VNYW5hZ2Vy 84816
+Q2xhcms= 84817
+IHF1b3Rhcw== 84818
+IHNjYWxlRmFjdG9y 84819
+IGNveg== 84820
+X21pbmk= 84821
+IG11dGF0ZWQ= 84822
+LmFjdGl2YXRpb24= 84823
+Km1hdGg= 84824
+LnZlcnR4 84825
+PGFydGljbGU= 84826
+IGVtYnJvaWRlcnk= 84827
+L2J1c2luZXNz 84828
+Y2tldHQ= 84829
+c2NpZW50aWZpYw== 84830
+IEdpbGVz 84831
+IHJhY2Vy 84832
+X3BlcmZvcm1hbmNl 84833
+IGxhbWluYXRl 84834
+IFBISQ== 84835
+UsOp 84836
+IEF0aGU= 84837
+Y29sZXM= 84838
+IHNhxJ8= 84839
+IElua1dlbGw= 84840
+CXNpZw== 84841
+IHNwYWNlc2hpcA== 84842
+IGluc29s 84843
+IFVDbGFzcw== 84844
+LmxlYWRpbmdBbmNob3I= 84845
+dG90YWxz 84846
+IHNwcmlua2xl 84847
+IE1vZHVsYXI= 84848
+ICdcIg== 84849
+b3Jvbg== 84850
+LlJlYWRBbGxUZXh0 84851
+ICAgIAkNCg== 84852
+L2lvbg== 84853
+REVQVEg= 84854
+X21pbmltdW0= 84855
+XENhY2hl 84856
+IGRpdmVyc2lmaWVk 84857
+aWduZXQ= 84858
+IGRvam8= 84859
+IFVJQWxlcnRWaWV3 84860
+L3R0eQ== 84861
+IFNhc3M= 84862
+IC9cLig= 84863
+IElNQUdFUw== 84864
+IGRhdGluZ3NpZGVy 84865
+IEV4cGxvcw== 84866
+LmdlbnJl 84867
+XEV2ZW50cw== 84868
+IGVudW1lcmF0ZWQ= 84869
+Y3VycmVudFN0YXRl 84870
+aXRydXN0 84871
+Q2FsbGFibGVXcmFwcGVy 84872
+Rm91bmRlZA== 84873
+IHJveWFsdGllcw== 84874
+KFByb3BlcnRpZXM= 84875
+IFVTUFM= 84876
+LS0tLS0tLS0tLS0NCg== 84877
+LlJlYWRUb0VuZA== 84878
+IGNvc3k= 84879
+IGFwZQ== 84880
+X2RlZmluaXRpb25z 84881
+IHBhZ2VObw== 84882
+IGR6aWVjaQ== 84883
+c3RhbmRlbg== 84884
+IGJlc2Fy 84885
+aXRpbg== 84886
+IGNvbnNlcXVhdA== 84887
+IHBydg== 84888
+IHNwbGl0dGVk 84889
+IGVzcG9zYQ== 84890
+PWZpbmRWaWV3QnlJZA== 84891
+V2Fsa2Vy 84892
+IEhlYXJ0aA== 84893
+aWJyYXRvcg== 84894
+b3RvbXk= 84895
+YWdnYWJsZQ== 84896
+IOW9kw== 84897
+77yBJyk7Cg== 84898
+aW9uYXRl 84899
+L3llYXI= 84900
+IHNldEM= 84901
+IE1lZGlhVGVr 84902
+LWJveQ== 84903
+LnRvb2xTdHJpcE1lbnVJdGVt 84904
+Q29uZmlncw== 84905
+YXR0ZW5kZWQ= 84906
+IGVtb2M= 84907
+IEJhaQ== 84908
+b3BvbGl0YW4= 84909
+IGludHJ1c2l2ZQ== 84910
+IHp1Zw== 84911
+IGZmbXBlZw== 84912
+X2Jvb3N0 84913
+IG1vemlsbGE= 84914
+IHNsaWNpbmc= 84915
+V0c= 84916
+cGFnZXNpemU= 84917
+UHJvcGVydHlEZXNjcmlwdG9y 84918
+IEFsZWphbmRybw== 84919
+VVNFUw== 84920
+SG9zdGluZw== 84921
+IHJpc2tpbmc= 84922
+IEludml0ZQ== 84923
+IEphemVlcmE= 84924
+IHJlZ2FpbmVk 84925
+IEhhZ3Vl 84926
+IGd1ZXJyYQ== 84927
+IGVuY2xvc2luZw== 84928
+J10iKQo= 84929
+PFRyYW5zZm9ybQ== 84930
+Lk5PUlRI 84931
+IGNyaW0= 84932
+SU5V 84933
+IGNsZW4= 84934
+IE1vdGhlcnM= 84935
+IE93bmVyc2hpcA== 84936
+RHJpbms= 84937
+IGJlYmVyYXBh 84938
+Lm9uZXJyb3I= 84939
+KSsK 84940
+IHRhYkluZGV4 84941
+IERpbw== 84942
+IEZvcnR5 84943
+KExpbms= 84944
+IHNlZ21lbnRlZA== 84945
+IGphbWVz 84946
+IFRhcmdldHM= 84947
+IFJUUw== 84948
+INC60L3QvtC/ 84949
+IHZhcmlhcw== 84950
+IHTDrXR1bG8= 84951
+IGTDvHI= 84952
+L0dhbWU= 84953
+cmFuc2l0aW9u 84954
+IGRpc3Rpbmd1aXNoaW5n 84955
+dWt0dXI= 84956
+YW5qZQ== 84957
+IE1jQ2FiZQ== 84958
+cGFp 84959
+KHRr 84960
+RGVzdHJ1Y3Rvcg== 84961
+R2FtZU9iamVjdFdpdGhUYWc= 84962
+JGg= 84963
+IGFmcg== 84964
+LnNldEVtYWls 84965
+IHJlcGV0aXRpb25z 84966
+bGFuZGVycw== 84967
+IFNoZWE= 84968
+X2NsYWlt 84969
+IGFjZXNz 84970
+QmVuY2htYXJr 84971
+LkVzdA== 84972
+LlBP 84973
+IE7DpA== 84974
+IGl0Y2hpbmc= 84975
+IGNvbmRvbWluaXVt 84976
+X0ZXRA== 84977
+IHJlYWx0aW1l 84978
+IGNpdmlsaXplZA== 84979
+X3BoeXNpY2Fs 84980
+UmFs 84981
+IHdpbnRlcnM= 84982
+IFlhZA== 84983
+IGZvcmE= 84984
+IGNhbGlicmF0ZWQ= 84985
+UGV0cw== 84986
+IHN0b3JtZWQ= 84987
+IGplbA== 84988
+IFNTUA== 84989
+ZGF0YWdyaWQ= 84990
+IExhdQ== 84991
+dW5hcg== 84992
+dWxmaWxsZWQ= 84993
+RVJJTkc= 84994
+IFRyaW8= 84995
+2LHZiA== 84996
+Rm9yZWdyb3VuZENvbG9y 84997
+PW91dA== 84998
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKi8K 84999
+IHZpZW50 85000
+IEFETQ== 85001
+X0Nvbm5lY3Rpb24= 85002
+LWNhbmNlbA== 85003
+KCcuJyk7Cg== 85004
+IHNhaWxz 85005
+IGVxdWl2YWxlbnRz 85006
+TmI= 85007
+IGZseWVycw== 85008
+IEdJUg== 85009
+a2VsaWc= 85010
+LXdhbGw= 85011
+LlJlcXVpcmVz 85012
+IGNvc2U= 85013
+IEFOQw== 85014
+IGphZGU= 85015
+IEFsZWM= 85016
+IGVuZHJlZ2lvbg== 85017
+IEVYVEk= 85018
+ZWRlcmU= 85019
+VGVycmFpbg== 85020
+U3BlY2lmaWNhdGlvbnM= 85021
+IFN3ZWVw 85022
+c2V0SXRlbQ== 85023
+IHNtaXJr 85024
+IHNjcmlwdGVk 85025
+W1N5c3RlbQ== 85026
+56eB 85027
+IHN5bmNlZA== 85028
+IHNxcg== 85029
+Z2V3YXRlcg== 85030
+IGpld2Vscw== 85031
+IGhkYw== 85032
+4KWN4KSw 85033
+z4Y= 85034
+w7xzc2VsZG9yZg== 85035
+bGllbg== 85036
+Qm9yZGVycw== 85037
+IEF0b21pY0ludGVnZXI= 85038
+IHBhcmFseXNpcw== 85039
+Q2xhc3NpZmljYXRpb24= 85040
+IGdsaWRl 85041
+IHVtcA== 85042
+IC8+fQ== 85043
+IHZlbmRpbmc= 85044
+4Li04LiZ 85045
+bm90aWY= 85046
+Jl8= 85047
+IEVtZXJnaW5n 85048
+YXRpY29u 85049
+IHByb3BhZ2F0ZWQ= 85050
+LW9yZGVycw== 85051
+YWdhcw== 85052
+dXJnZW50 85053
+KFRpbWVTcGFu 85054
+QUxDSEVNWQ== 85055
+L2Jvd2Vy 85056
+7IKw 85057
+LmJvb3N0 85058
+LmRlcGVuZGVuY2llcw== 85059
+LlN3aW5nQ29uc3RhbnRz 85060
+dW50bGV0 85061
+LmNoYXJz 85062
+LWNpZ2FyZXR0ZXM= 85063
+IE1vZHM= 85064
+ICAgICAJ 85065
+IGJyYXZlcnk= 85066
+IGNvdW50ZXJlZA== 85067
+cmVsdWRl 85068
+X21vYg== 85069
+QUlORUQ= 85070
+bmdvaW5n 85071
+IHVuZGVyZ3JhZA== 85072
+R2V0TWV0aG9k 85073
+RHVhbA== 85074
+X2pvdXJuYWw= 85075
+LE5v 85076
+IHNpZGVs 85077
+IExhcnNvbg== 85078
+KyIsIis= 85079
+IG5hcnJhdGlvbg== 85080
+IFN1YndheQ== 85081
+IExleGVy 85082
+IE5pbmc= 85083
+aW5kaWM= 85084
+dGhhbmU= 85085
+LlNJRw== 85086
+LWVhcnRo 85087
+IGJlcnJ5 85088
+IFRldWNob3M= 85089
+CUVudGl0eQ== 85090
+ZXJzcGVjdGl2ZQ== 85091
+Tm9z 85092
+IE93bmVk 85093
+QlVS 85094
+IGxpbmVubw== 85095
+IEZpamk= 85096
+R2V0SW50 85097
+U3RyaW5nUmVm 85098
+ICcmJw== 85099
+dWFkYQ== 85100
+LmNhcHRpb24= 85101
+YXBwTmFtZQ== 85102
+KG9mZg== 85103
+IHZlcnN0 85104
+IHR5cG8= 85105
+6ZyA6KaB 85106
+YXRlcmFuZ2VwaWNrZXI= 85107
+IHFlbXU= 85108
+IEdFTw== 85109
+X0Ns 85110
+LklU 85111
+IE51bmVz 85112
+W1o= 85113
+IENvbXBsZXRlbHk= 85114
+LkxpdmU= 85115
+IEphcw== 85116
+IHdlaXQ= 85117
+Y29zaXR5 85118
+IHBvbGljZW1lbg== 85119
+KHRhcmdldHM= 85120
+aXRsZWRCb3JkZXI= 85121
+IOinow== 85122
+LkdsaWRl 85123
+IGRlbW9uaWM= 85124
+SW50ZXJpb3I= 85125
+LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t 85126
+IERvdGE= 85127
+IG9yYml0cw== 85128
+QU1Z 85129
+IFRyaW5pZGFk 85130
+aWN1bQ== 85131
+Lnph 85132
+IGdldEludA== 85133
+QXRsYW50YQ== 85134
+IGFtbmVzdHk= 85135
+IFJhaHVs 85136
+IF98 85137
+aGlybw== 85138
+IFRBS0U= 85139
+IGp1bWxhaA== 85140
+IEF1dG9tb2JpbGU= 85141
+4buP 85142
+d2hvc2U= 85143
+X1NBTVBM 85144
+UGF0aWVudHM= 85145
+INGC0LXQutGD0Yk= 85146
+LnN1YnNjcmlwdGlvbnM= 85147
+IE1lbnRpb24= 85148
+VG9Xb3JsZA== 85149
+aXBh 85150
+CU1lc3NhZ2VCb3g= 85151
+PEFwcGxpY2F0aW9uVXNlcg== 85152
+INil 85153
+ZmFicmlj 85154
+a2VsZXRhbA== 85155
+QmFyQnV0dG9u 85156
+IGFyY2hldHlwZQ== 85157
+aW5zdGFudA== 85158
+IGludGVybmFjaW9uYWw= 85159
+IFZveWFnZXI= 85160
+KHRvdWNo 85161
+IFZhbGs= 85162
+L01JVA== 85163
+IGNhdWw= 85164
+J0Nvbm5vcg== 85165
+KCIh 85166
+KE9Q 85167
+ZmFjdWx0eQ== 85168
+IEJhdG9u 85169
+IFZvbHVudGVlcnM= 85170
+dGFuaw== 85171
+X0JJTkRJTkc= 85172
+O2xpbmU= 85173
+IFZlcnNpb25z 85174
+WUxFUw== 85175
+IGplZXA= 85176
+KEVuY29kaW5n 85177
+IGdlb2xvZ2ljYWw= 85178
+TmljaA== 85179
+KHBkZg== 85180
+IGFuYWx5emVz 85181
+IGNhcHRpdmF0aW5n 85182
+IGhpem8= 85183
+Lm1kbA== 85184
+IGphcA== 85185
+IGZsaXBz 85186
+CWRm 85187
+IFBpZXQ= 85188
+IG5yb3dz 85189
+IGthbXU= 85190
+INCy0L7Qtw== 85191
+IHBydW5pbmc= 85192
+YWN1bGE= 85193
+IHRyYXZlbGxlcg== 85194
+U2hvb3Q= 85195
+LmVwc2lsb24= 85196
+IEZsZW1pbmc= 85197
+aWJ1cg== 85198
+b3BlcmF0ZQ== 85199
+aWdodGVy 85200
+IGJlZ3M= 85201
+IFdhbG51dA== 85202
+KFBhcnNlcg== 85203
+IHdpdGhkcmF3YWxz 85204
+aXNjb3BhbA== 85205
+IGJpbGxib2FyZA== 85206
+a2Vr 85207
+LW9wZW5pbmc= 85208
+IER1ZGU= 85209
+Y29uaQ== 85210
+eEVC 85211
+IGNhbG9y 85212
+YW1haGE= 85213
+LlRYVA== 85214
+RHJ5 85215
+IG1pc3Npb25hcmllcw== 85216
+X1ZlcnNpb24= 85217
+IG11bHRpbGluZQ== 85218
+4oCUd2U= 85219
+IGNvbXBvbmVudERpZFVwZGF0ZQ== 85220
+RmF2b3JpdGVz 85221
+aWdoYW0= 85222
+IGpvdXJuw6ll 85223
+IGFtdXNlZA== 85224
+IE9tbmk= 85225
+dGd0 85226
+IHdhaA== 85227
+ZXRpbmU= 85228
+IHBoYXNlZA== 85229
+IG9uU3RvcA== 85230
+Y3JlYXRpdmVjb21tb25z 85231
+U29waA== 85232
+IHVuYm9ybg== 85233
+PUU= 85234
+IEZlZEV4 85235
+bm9ybWFsbHk= 85236
+IGx5cg== 85237
+TWF0cml4TW9kZQ== 85238
+IHplaWdlbg== 85239
+QXRo 85240
+IEt1bQ== 85241
+w6RobGVu 85242
+LyI7Cgo= 85243
+IGRhbGxl 85244
+IGxhbmNl 85245
+IFN1aXRhYmxl 85246
+IGNvdW5zZWxvcnM= 85247
+5YWo6YOo 85248
+IGZhc3Rh 85249
+IGJsYXppbmc= 85250
+7KeE 85251
+L3R1dG9yaWFs 85252
+LnRjcA== 85253
+5pmv 85254
+TWFuYWdlckludGVyZmFjZQ== 85255
+IFNhbWFy 85256
+CWdsVW5pZm9ybQ== 85257
+IHByZXJlcXVpc2l0ZXM= 85258
+IGFudGljaXBhdGluZw== 85259
+cmFxdW8= 85260
+a3Nlbg== 85261
+TWFnbml0dWRl 85262
+dXRvbWF0aW9u 85263
+SGllcmFyY2h5 85264
+IGRldmlhdGlvbnM= 85265
+aW1ldA== 85266
+Q0NJ 85267
+PSgK 85268
+IGFudGxy 85269
+CWluaXRpYWw= 85270
+IFJlc29ydHM= 85271
+aG9tZXM= 85272
+CXBvb2w= 85273
+IG1hdMOp 85274
+P29wdGlvbg== 85275
+Om15c3Fs 85276
+KHV0Zg== 85277
+LlRhYkNvbnRyb2w= 85278
+PlRpdGxl 85279
+IEFkb3B0 85280
+LklzTWF0Y2g= 85281
+IGVudHJ1c3RlZA== 85282
+U3VzYW4= 85283
+c3dpbmc= 85284
+aW1hZ2VuZXM= 85285
+IHNlbGVjaW9u 85286
+IGFpZGluZw== 85287
+KFtdKg== 85288
+IHNldEZyYW1l 85289
+c3Bpcml0 85290
+L3Jzcw== 85291
+SXRhbGlj 85292
+IFByb3BlbEV4Y2VwdGlvbg== 85293
+IFRvbGw= 85294
+LkZpbmRHYW1lT2JqZWN0V2l0aFRhZw== 85295
+aW5hbnQ= 85296
+IHNlbGZpZXM= 85297
+XXxb 85298
+IGFwcGxpY2F0aW9uQ29udGV4dA== 85299
+aXhl 85300
+Y2Ri 85301
+ZWJi 85302
+IE92ZXJzZQ== 85303
+IHNxbENvbW1hbmQ= 85304
+SG9zdE5hbWU= 85305
+LWxhdW5jaA== 85306
+Umlzaw== 85307
+O3I= 85308
+LlNwYW4= 85309
+X0NJVFk= 85310
+X01B 85311
+LyIKCg== 85312
+UGF3bg== 85313
+IFllbHA= 85314
+QnVuZGxlT3JOaWw= 85315
+IG1heW9yw61h 85316
+U3RhY2tOYXZpZ2F0b3I= 85317
+ITsK 85318
+IHRodWdz 85319
+IEJhcm5ldHQ= 85320
+44O744O744O7Cgo= 85321
+IOqygA== 85322
+X0NPTlY= 85323
+IGJ1enppbmc= 85324
+a2V0ZXJhbmdhbg== 85325
+TWlsaXRhcnk= 85326
+d2VlZA== 85327
+IGRlbGltaXRlZA== 85328
+6LWE5rqQ 85329
+INCw0Lo= 85330
+X0hFTFBFUg== 85331
+IFJFQURZ 85332
+TG9vcGVy 85333
+KioqKi8K 85334
+IFRydWNrcw== 85335
+5Y67 85336
+X3BvZA== 85337
+T01BVElD 85338
+LWphdmE= 85339
+IHVuaWZ5 85340
+L0FyZWE= 85341
+ICcvJyk7Cg== 85342
+IEdhbWJsaW5n 85343
+LkhpdA== 85344
+IEZhcnJlbGw= 85345
+X2ZpdG5lc3M= 85346
+cmVjb21tZW5kZWQ= 85347
+emVuZA== 85348
+b2RpZQ== 85349
+X2JlYW0= 85350
+IHBsYWdl 85351
+bmRvbg== 85352
+LmFzc2VydGo= 85353
+IGdyYXRl 85354
+TWVhc3VyZWQ= 85355
+LmNlbnRyYWw= 85356
+Z2VzdHVyZQ== 85357
+IEdsb2JhbEtleQ== 85358
+cHl4 85359
+IE5lY2tsYWNl 85360
+5Y2O 85361
+LkFkZENvbHVtbg== 85362
+IFJ1ZGQ= 85363
+IFByZXNieXRlcmlhbg== 85364
+dW5kbGVy 85365
+IyFb 85366
+X2xhaGly 85367
+KCk9PSI= 85368
+QWNjZXNzaWJpbGl0eQ== 85369
+LXRyYWluaW5n 85370
+IFRob3U= 85371
+X1BJWA== 85372
+X1RSWQ== 85373
+PEo= 85374
+xrDGoW5n 85375
+bHVjaw== 85376
+X01BWElNVU0= 85377
+IHRoYXc= 85378
+VW5pZmllZA== 85379
+PkNvbnRhY3Q= 85380
+LVByZXNpZGVudA== 85381
+LXBhcnNl 85382
+IFBpY2tlcg== 85383
+TWFyY28= 85384
+dHJz 85385
+zrQ= 85386
+LiQu 85387
+X01FU0g= 85388
+IHNhZ3Rl 85389
+Kz0n 85390
+0K8= 85391
+KHBhcmNlbA== 85392
+aXZvcnM= 85393
+IGRpdmVydGVk 85394
+QUdBSU4= 85395
+IG5lc3M= 85396
+IHZhbGxleXM= 85397
+IC4uLig= 85398
+IEVRVUk= 85399
+IE91dHM= 85400
+IERlbW9uc3Ry 85401
+RGV0YWxsZQ== 85402
+IOu2gA== 85403
+UG9pbnRYWVo= 85404
+LmVwcw== 85405
+IHN5bm9ueW1z 85406
+ID09KA== 85407
+4oCcWWVz 85408
+J3V0aWxpc2F0ZXVy 85409
+TmFtaW5n 85410
+TEVW 85411
+cHJvdG9jb2xz 85412
+IOyb 85413
+IGdldFVzZXJuYW1l 85414
+LXZhcg== 85415
+X210eA== 85416
+IHNwZWN1bGFy 85417
+IG5vdGFz 85418
+SG9yaXpvbnRhbEFsaWdubWVudA== 85419
+IEJheWVy 85420
+c3Vz 85421
+ICAgIAkJCg== 85422
+IFNoYWNr 85423
+cmVzaGVy 85424
+IGltbWF0dXJl 85425
+YnJhY2h0 85426
+SVNDTw== 85427
+LmNyZWRpdA== 85428
+IHZpbmVz 85429
+X0xQ 85430
+RUVERUQ= 85431
+IFNjYXJib3JvdWdo 85432
+w6FudA== 85433
+KT09Jw== 85434
+CWRlbHRh 85435
+X0NPTE9SUw== 85436
+LkN1c3RvbUJ1dHRvbg== 85437
+IGFmaXJt 85438
+IEppbmc= 85439
+UGFybXM= 85440
+Y2VudGVycw== 85441
+LT5fX18= 85442
+IExETA== 85443
+LWNvbnRyaWI= 85444
+IERyZXNkZW4= 85445
+IFBpeGVscw== 85446
+ICIiIiIsCg== 85447
+TEVUVEU= 85448
+eEJF 85449
+IEh1c3Q= 85450
+IEV4ZWN1dGlvbkNvbnRleHQ= 85451
+IEJ1ZmZldHQ= 85452
+Y2xhbXA= 85453
+LkFydGljbGU= 85454
+IFJhdGg= 85455
+IFBleXRvbg== 85456
+IExPV0VS 85457
+b29rZQ== 85458
+IHRpZGFs 85459
+IHVuaGVhcmQ= 85460
+IFNoYWxs 85461
+IGJvbWJhcmQ= 85462
+YW5vdmE= 85463
+W21hc2s= 85464
+KGNyZWRlbnRpYWxz 85465
+IEV1cm9z 85466
+IGJyYW5jaGluZw== 85467
+IHN0cm9uZ2hvbGQ= 85468
+IGNpdmlsaXphdGlvbnM= 85469
+LWNvbm5lY3Q= 85470
+IExTVE0= 85471
+LW1vdmluZw== 85472
+IHV0ZW4= 85473
+Y3Jhc3Q= 85474
+X0RJU1A= 85475
+IENvbnRyb2xsZXJz 85476
+dXBl 85477
+LnBlbg== 85478
+IGRlc3Nh 85479
+IGRpZsOtY2ls 85480
+dWl0YWJsZQ== 85481
+b2ZpcmU= 85482
+W2NoaWxk 85483
+UkVGRVJFTkNFUw== 85484
+IGRlY2VpdA== 85485
+IFVyZw== 85486
+PEVkZ2U= 85487
+IGRlc2k= 85488
+IEJPVEg= 85489
+ICcpJzsK 85490
+dHlwZU5hbWU= 85491
+Q29tbWFuZEV2ZW50 85492
+d2hlcmVJbg== 85493
+KG9wdGltaXplcg== 85494
+IHLDqWFsaXM= 85495
+IG9taW5vdXM= 85496
+IEJyYWNrZXQ= 85497
+IGRhdGVTdHJpbmc= 85498
+IHNpbmdseQ== 85499
+KEpGcmFtZQ== 85500
+4oCZVA== 85501
+ZXNsaW50 85502
+KGhlcm8= 85503
+IE1hcmE= 85504
+IGNhdGNoeQ== 85505
+LGNhbGxiYWNr 85506
+IGN0eXBl 85507
+cHJlc2V0 85508
+CWdsZnc= 85509
+0LXRiQ== 85510
+aGs= 85511
+IHRpdGFu 85512
+QWNlcHRhcg== 85513
+44Gh44Gv 85514
+X2Fzc2lnbmVk 85515
+X2VyYXNl 85516
+IGluZmFuY3k= 85517
+UmV2aWV3ZXI= 85518
+IFJlY29yZGVy 85519
+IHNjbQ== 85520
+IEJpZ2dlc3Q= 85521
+IEdvYQ== 85522
+CVND 85523
+X0xvY2F0aW9u 85524
+X29yaQ== 85525
+a2ls 85526
+cmVuZGU= 85527
+IG1hcnpv 85528
+U3RyaW5nVXRpbA== 85529
+0YPRidC10YHRgtCy 85530
+IEhvd2U= 85531
+xrDhu51p 85532
+Zm9pcw== 85533
+WE1MRWxlbWVudA== 85534
+IGRlcmVjaG9z 85535
+IGR1bmc= 85536
+IFdhaw== 85537
+IEdhdw== 85538
+fVxc 85539
+ISIpOw== 85540
+IEpvaGFubmVzYnVyZw== 85541
+IHN1Ym1hcmluZXM= 85542
+IGFjY29s 85543
+IGZvc3RlcmluZw== 85544
+LgoKCgoKCgoKCgoKCg== 85545
+Lk9wZXJhdG9y 85546
+IG51b3Zh 85547
+IHRyYWplY3Rvcmllcw== 85548
+LnNjaGVkdWxlcnM= 85549
+IEZvbGxvd2Vycw== 85550
+IEFuZGVyc2Vu 85551
+IFBlZ2d5 85552
+LmZyZQ== 85553
+xLFjxLE= 85554
+IGt2cA== 85555
+Y29i 85556
+LWxlbg== 85557
+IG1haWxz 85558
+IGFjY3I= 85559
+IEpBVkE= 85560
+IGFkbWluaXN0ZXJpbmc= 85561
+RGVmYXVsdENlbGxTdHlsZQ== 85562
+IGNsaWNrYWJsZQ== 85563
+IEphY2tldHM= 85564
+O2Rpc3BsYXk= 85565
+IGJyZWFkY3J1bWJz 85566
+Y2hhbA== 85567
+Oic7Cg== 85568
+IEhvdmVy 85569
+dWNjaGluaQ== 85570
+IHRlYw== 85571
+IHN0b3B3YXRjaA== 85572
+X1JlbGVhc2U= 85573
+TWF5b3I= 85574
+4Z62 85575
+IFlhbmtlZQ== 85576
+Y2huZXI= 85577
+QXJ0aWZhY3Q= 85578
+LmJhbm5lcg== 85579
+IGtm 85580
+X3N0dWR5 85581
+Zm92 85582
+IE1lZXRpbmdz 85583
+w7Zt 85584
+IGluanVyaW5n 85585
+L2RvY3VtZW50YXRpb24= 85586
+QkNN 85587
+c3R5bA== 85588
+CXJi 85589
+IG9yaWdpbmFscw== 85590
+IGZsZXJl 85591
+IFRlcnJhcmlh 85592
+dG9rZW5pemVy 85593
+LWxpdGVy 85594
+Jyk7Ig== 85595
+IHBldGl0cw== 85596
+IEJidw== 85597
+IFRoaWVm 85598
+VUlMVElO 85599
+Uk9VVA== 85600
+IHNudWc= 85601
+Pj4p 85602
+LW5pbmU= 85603
+IH1dOwoK 85604
+IEJlbGxldg== 85605
+IGVsw6k= 85606
+IHl5bg== 85607
+eW5hbW8= 85608
+Z2xlcw== 85609
+IHNwZWQ= 85610
+LkJVVFRPTg== 85611
+IGRpc3BlcnNpb24= 85612
+b3VibGVz 85613
+IG5vdmVsbGVy 85614
+Il0uIg== 85615
+IHByaWVzdGhvb2Q= 85616
+ICIiKQoK 85617
+CWd1aQ== 85618
+LWluYw== 85619
+WG1sTm9kZQ== 85620
+IHN0dWRz 85621
+LklzQWN0aXZl 85622
+IHRyw6Q= 85623
+IG9yZGFpbmVk 85624
+IEJ5dGVBcnJheUlucHV0U3RyZWFt 85625
+IHJlcXVlc3RCb2R5 85626
+IFJUUA== 85627
+UkVTVUxUUw== 85628
+KGNvbGw= 85629
+IHJlbG9hZGluZw== 85630
+Lk5hdmlnYXRvcg== 85631
+X2NvdW50ZXJz 85632
+IGJ1ZGRpbmc= 85633
+IGxpY2Vuc2Vl 85634
+b2xvZ2k= 85635
+IHPhuqNu 85636
+IEtpcw== 85637
+IEZsYXR0ZW4= 85638
+X3ByaQ== 85639
+IGFwcHJvcHJpYXRpb24= 85640
+6K+E6K66 85641
+X1JTUA== 85642
+Y29tYmF0 85643
+X1BH 85644
+IGhpc3RvZ3JhbXM= 85645
+ZHE= 85646
+RW50ZXJwcmlzZQ== 85647
+IE5PQUE= 85648
+IFNwZWVkd2F5 85649
+IGJhZ2k= 85650
+IEJld2VydA== 85651
+RmxvYXRpbmc= 85652
+IEtpbWJlcmx5 85653
+UHJvc2Vj 85654
+SmltbXk= 85655
+IEVsaWFz 85656
+IGFyYml0cmFyaWx5 85657
+IOS9v+eUqA== 85658
+IENvdW50cw== 85659
+dXN0ZQ== 85660
+Rmlyc3RDaGlsZA== 85661
+IENsZWFucw== 85662
+LnB1cmNoYXNl 85663
+IGludGVycG9sYXRlZA== 85664
+IGJ1aWxkdXA= 85665
+X1NURU5DSUw= 85666
+RWd5cHQ= 85667
+IGF1cmU= 85668
+LnRydXRo 85669
+ZmVvZg== 85670
+IEdpbQ== 85671
+b2NhY2hl 85672
+IFV0dGFy 85673
+X0NPTVBMRVRFRA== 85674
+U2Vlbg== 85675
+IE5hcG9saQ== 85676
+KGRt 85677
+IGdyaXR0eQ== 85678
+LmVudGVycHJpc2U= 85679
+Y29uZXhhbw== 85680
+IGdhdGhlcnM= 85681
+IHNldFNlYXJjaA== 85682
+IENsaWZmb3Jk 85683
+IFNuYXBl 85684
+IFNhbHZhdGlvbg== 85685
+TG9naW5Gb3Jt 85686
+Q3JpdGljYWxTZWN0aW9u 85687
+LnVzZXJkZXRhaWxz 85688
+IHJlcGFpbnQ= 85689
+44GC44KK44GM44Go44GG 85690
+SHVudGVy 85691
+WmVu 85692
+VGlueQ== 85693
+bWxhbmQ= 85694
+ZXJ0aWw= 85695
+CWJ1ZmY= 85696
+X09mZnNldA== 85697
+IHNtZWxsZWQ= 85698
+Uml2ZXI= 85699
+LXRvcGlj 85700
+IGFjb21w 85701
+IFJvdXRlU2VydmljZVByb3ZpZGVy 85702
+IDwr 85703
+b21icw== 85704
+IENvb3BlcmF0aXZl 85705
+IHNldWxl 85706
+IGFpbWU= 85707
+c2hvdWxkUmVjZWl2ZQ== 85708
+SG9uZw== 85709
+IG9hc2lz 85710
+IEdlbWluaQ== 85711
+cmFwaWQ= 85712
+RHVw 85713
+KFF0R3Vp 85714
+b2RvbnQ= 85715
+LWdudQ== 85716
+IFNlbGVuaXVt 85717
+Jyk/Pjwv 85718
+IE5vcGU= 85719
+R3JlYXRlclRoYW4= 85720
+Lk9ic2VydmVy 85721
+IEFwcHJvcHJp 85722
+IExvbmVseQ== 85723
+IGhhaXJjdXQ= 85724
+IGFsbGVyZGluZ3M= 85725
+w7NwZXo= 85726
+esWR 85727
+IHNsdW1w 85728
+IEdpbnM= 85729
+IGdpb3JuaQ== 85730
+IHBhcGVyYmFjaw== 85731
+LkZpbGVSZWFkZXI= 85732
+ZGFm 85733
+Y3JlZHM= 85734
+dHlwaW5ncw== 85735
+ZGVoeWRl 85736
+Y29pbA== 85737
+U291dGhlcm4= 85738
+IG1vdXNlQ2xpY2tlZA== 85739
+emVpY2huZXQ= 85740
+dXNlclJlcG9zaXRvcnk= 85741
+RGVzdHJveWVk 85742
+aW50ZXJuZXQ= 85743
+IEVpZA== 85744
+IGxpbmtlcg== 85745
+4oCZQg== 85746
+IHNsYXVnaHRlcmVk 85747
+IFBlcnI= 85748
+CVJ1bnRpbWVPYmplY3Q= 85749
+c2FpZGE= 85750
+IHBhZ2VDb3VudA== 85751
+IFJhbmRvbHBo 85752
+IEpOSUVudg== 85753
+X3N1cGVydXNlcg== 85754
+LWRpcmVjdGVk 85755
+IElEYg== 85756
+IEJlcm5hcmRpbm8= 85757
+IE5pbnRo 85758
+IEFsZ29yaXRobXM= 85759
+YmRi 85760
+QHRlc3RhYmxl 85761
+LmFybQ== 85762
+YmVsbGlvbg== 85763
+KHNpZA== 85764
+IGJyaWVmZWQ= 85765
+4pWX 85766
+6YWN572u 85767
+IFVtYQ== 85768
+IEluZGljZXM= 85769
+IEJ1Y2NhbmU= 85770
+IGF5YW50 85771
+RnJlZWRvbQ== 85772
+IFl1cmk= 85773
+ZXRzaw== 85774
+X1Bo 85775
+IGl0YWxpYQ== 85776
+Y2xvc2luZw== 85777
+IHdyaXN0cw== 85778
+ICp9 85779
+c2VjdXRpdmU= 85780
+RW52aWFy 85781
+cmFpdGg= 85782
+IEhhd3Ro 85783
+15M= 85784
+ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKgo= 85785
+cGFnZVRpdGxl 85786
+IGRoY3A= 85787
+IOyLpO2WiQ== 85788
+d2lzaGxpc3Q= 85789
+IGJsYW1lcw== 85790
+IHNpZGw= 85791
+dWRkZWQ= 85792
+IGNvbnRyb3ZlcnNpZXM= 85793
+6I8= 85794
+KHVzZXJEYXRh 85795
+IGxpbnNwYWNl 85796
+IERpZmZlcmVuY2Vz 85797
+X2RlcG9zaXQ= 85798
+REVUQUlM 85799
+LmRlY2s= 85800
+IGNvbnRpbnV1bQ== 85801
+IHNhY3JhbQ== 85802
+b21pdGU= 85803
+IG5mbA== 85804
+Q3Vt 85805
+IHNvZg== 85806
+IGV2aWxz 85807
+IGVudGlkYWQ= 85808
+CXNvY2s= 85809
+IExlbW1h 85810
+LlNoaXA= 85811
+IHppZw== 85812
+VGVsZWZvbmU= 85813
+SURFUw== 85814
+IE51bWVyb3Vz 85815
+Lm1ldHJpYw== 85816
+aW5zbg== 85817
+IGNvcHlyaWdodHM= 85818
+IGNvbXBsaWNhdGlvbg== 85819
+IFVSTFNlc3Npb24= 85820
+IGRpcHBpbmc= 85821
+IGNx 85822
+IEJ1c3R5 85823
+cmVsYXRpb25zaGlwcw== 85824
+IENvcnZldHRl 85825
+U3VtbW9u 85826
+ZXZlbnROYW1l 85827
+SXNzdWVz 85828
+IGlycmVzaXN0aWJsZQ== 85829
+IGdyaXM= 85830
+Q0FTQ0FERQ== 85831
+IHBhdXNlcw== 85832
+IGxlZGdl 85833
+X0dQ 85834
+LkltcA== 85835
+IG9yZGVyYnk= 85836
+IE9yZ2FuaXplcg== 85837
+IEdyZWVud2ljaA== 85838
+T2Fr 85839
+LW1lbWJlcnM= 85840
+IFdlYkdM 85841
+IGdhbW0= 85842
+bW9kdWxlSWQ= 85843
+IGZ1bGxQYXRo 85844
+bG9nZW4= 85845
+KGV2ZW50TmFtZQ== 85846
+KCIuIik7Cg== 85847
+IGtyaXN0 85848
+IGNsaWZmcw== 85849
+IFBlcmNlcHRpb24= 85850
+RVRJTkc= 85851
+IGzhuqFp 85852
+IGludGVydg== 85853
+IG9wcG9ydHVu 85854
+IEp1ZGdlcw== 85855
+IENvbWJpbmF0aW9u 85856
+Y29udGludWVk 85857
+Y29ubw== 85858
+LmRyYXdSZWN0 85859
+LkNvbXBvc2U= 85860
+IHNpZ3VpZW50ZXM= 85861
+IER1ZmZ5 85862
+KGVuY29kaW5n 85863
+IFZ1bGthbg== 85864
+IEdlcnI= 85865
+IHBhcmZhaXQ= 85866
+KHl5 85867
+X1RIQU4= 85868
+IGdldFNlcnZpY2U= 85869
+X09SRA== 85870
+LGVw 85871
+Z3JhcGhpYw== 85872
+IFF1ZXJpZXM= 85873
+IHBhcnRpY3VsYXJz 85874
+IEhhdmFuYQ== 85875
+PW8= 85876
+ZmFucw== 85877
+IHVuaWxhdGVyYWw= 85878
+IFJGSUQ= 85879
+Q29tcGF0aWJpbGl0eQ== 85880
+c3RyYW5k 85881
+IHdha3R1 85882
+IHF1YWxpZGFkZQ== 85883
+UHJvcGVydHlQYXJhbXM= 85884
+cmV0ZW4= 85885
+KGhvc3RuYW1l 85886
+X0NBUg== 85887
+IHdpZGVuZWQ= 85888
+IFhwZXJpYQ== 85889
+cG9sbG8= 85890
+QWJvcnQ= 85891
+ISEpCg== 85892
+IFdhZw== 85893
+LS0r 85894
+INGC0YA= 85895
+IFJlY3Vyc2l2ZQ== 85896
+IGFubmU= 85897
+IEdhbWVwbGF5 85898
+PENsaWVudA== 85899
+LlVzYWdl 85900
+IElTU1VF 85901
+IGpkYmM= 85902
+aXNvcnk= 85903
+X21hY3Jvcw== 85904
+cGlja2xl 85905
+LmdhbWVzZXJ2ZXI= 85906
+IHR2Yg== 85907
+0YLRiw== 85908
+Lk9QRU4= 85909
+IHByZWRldGVybWluZWQ= 85910
+IHNpcmU= 85911
+CQkJDQoJCQkNCg== 85912
+aXNjcmltaW5hdGlvbg== 85913
+IHJlcGVhbGVk 85914
+IGNvbmplY3Q= 85915
+IFByZWNvbmRpdGlvbnM= 85916
+IHRpbHRlZA== 85917
+IGlub2M= 85918
+IGV1cm9wZWFu 85919
+YWJk 85920
+X0RFTEVURUQ= 85921
+IC0s 85922
+4oCTYW5k 85923
+QEZYTUw= 85924
+ICldCg== 85925
+UklORw== 85926
+IGFsaXF1YQ== 85927
+IGdydWVzb21l 85928
+IEluY2hlcw== 85929
+UGxheWVk 85930
+KGNvbmZpcm0= 85931
+IE5WSUM= 85932
+X1RvdGFs 85933
+aXNhcw== 85934
+IE9uaW9u 85935
+IHNlY29uZG8= 85936
+IEdldFVzZXI= 85937
+XFVybA== 85938
+X2Fic3RyYWN0 85939
+IGRldmV6 85940
+IGN1cGJvYXJk 85941
+dGV4dHM= 85942
+IElzbGVz 85943
+X01BVEg= 85944
+U2tpcHBpbmc= 85945
+X2Nvc3Rz 85946
+PW91dHB1dA== 85947
+aWJpbGk= 85948
+IGtudWxs 85949
+X2NvZWZmcw== 85950
+X2F0dGVtcHQ= 85951
+CVJ1bg== 85952
+Z2VuZGVu 85953
+cnVwdGVk 85954
+IHNvYXJlZA== 85955
+X2hz 85956
+IGFkb3B0cw== 85957
+X01PRElGSUVE 85958
+XEZhY3Rvcmllcw== 85959
+IFN3ZWF0 85960
+IGRva3VtZW50 85961
+IFRlbGVzY29wZQ== 85962
+IEZpeGVz 85963
+b3JxdWU= 85964
+LkNoYXJ0aW5n 85965
+X0RBQw== 85966
+IHNlY3JldGlvbg== 85967
+IHJoZXRvcmljYWw= 85968
+UGVyZmls 85969
+IG3DtmNodGVu 85970
+LCcs 85971
+IHZpZXdQYWdlcg== 85972
+QlVZ 85973
+IG9uRm9jdXM= 85974
+b3NhbHM= 85975
+IGJpc2N1aXRz 85976
+IHZib3g= 85977
+IGZvcmNlZnVsbHk= 85978
+TmludGVuZG8= 85979
+IHbDoWw= 85980
+IGNsYW5z 85981
+ZnJvZw== 85982
+IGJvcmRlclRvcA== 85983
+QnJpZWY= 85984
+LkJvcmRlckZhY3Rvcnk= 85985
+LXNlcnZpbmc= 85986
+IHF1b3RhdGlvbnM= 85987
+IEdhcm5lcg== 85988
+IEFsbGV5 85989
+Ij8+Cg== 85990
+KHNjYW5uZXI= 85991
+IGVudGFpbA== 85992
+IC8vPT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PQ== 85993
+KGA8 85994
+LmRlc2NyaXBjaW9u 85995
+X0J5 85996
+IOyalA== 85997
+IHBha2lzdGFu 85998
+ZWxobw== 85999
+RW5naW5lZXJpbmc= 86000
+IGJvb24= 86001
+IExvb3Nl 86002
+aWVyZ2U= 86003
+U2VuYXRl 86004
+IExZ 86005
+cmVzcG9uc2VPYmplY3Q= 86006
+aW9yZQ== 86007
+w6FnZW5lcw== 86008
+IOS4jQ== 86009
+IGFkZEFjdGlvbg== 86010
+IE1BQ0hJTkU= 86011
+YW5na2Fu 86012
+X21p 86013
+X0FSUg== 86014
+TGl0ZXI= 86015
+T0xG 86016
+IHN1cHBlcg== 86017
+IHBhdGhNYXRjaA== 86018
+IE9ycg== 86019
+w61k 86020
+KGZpbHRlcmVk 86021
+IGF1dGhUb2tlbg== 86022
+IOKEnQ== 86023
+LTwv 86024
+KHRlbnNvcg== 86025
+IHJldm9sdmluZw== 86026
+IGluaWNpYXI= 86027
+IFNjaHdhcno= 86028
+ZGVmZ3JvdXA= 86029
+Y29sdW1uTmFtZQ== 86030
+X3RyYWplY3Rvcnk= 86031
+4LmE4Lih 86032
+ZWdhc3Vz 86033
+IOydtOumhA== 86034
+IGVhdGVy 86035
+IHVuZGVyZXN0aW1hdGVk 86036
+IGJ0Yw== 86037
+IOyEoO2DnQ== 86038
+ZW5hZGU= 86039
+IFNFWFA= 86040
+ZW1vdXRo 86041
+T01FVFJZ 86042
+ZW50ZXJlZA== 86043
+LnBob25lTnVtYmVy 86044
+IFZvYw== 86045
+IGV4Y2Vzc2l2ZWx5 86046
+IENBVEVHT1JZ 86047
+X1VQREFURUQ= 86048
+IG1vbmFyY2h5 86049
+YXJjaHM= 86050
+IGNhdmVhdA== 86051
+d2lucw== 86052
+IHBsYXlib29r 86053
+c2hhZGU= 86054
+IHNldFVzZXJuYW1l 86055
+IGFjY3VzZXM= 86056
+IG1vxbxsaQ== 86057
+IGxvcnNxdWU= 86058
+IGFqdWQ= 86059
+aGVhcg== 86060
+IHBzeWNvcGc= 86061
+KEVD 86062
+IG1lbGFuY2g= 86063
+dGhyb2F0 86064
+bmlo 86065
+V09PRA== 86066
+IHZvbHRz 86067
+X05FRUQ= 86068
+X3doaWxl 86069
+IFJpZGVycw== 86070
+16I= 86071
+IC4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4= 86072
+TmV0TWVzc2FnZQ== 86073
+TW9kaWZpY2Fy 86074
+LnNlc3M= 86075
+KCIiKSw= 86076
+6Kmx 86077
+IHByYWlzZXM= 86078
+IGxjbQ== 86079
+IG1ha2VzaGlmdA== 86080
+IE5PVEhJTkc= 86081
+IEFydGlmYWN0 86082
+d2lq 86083
+dHlwaWNhbGx5 86084
+KCde 86085
+PGs= 86086
+xJlraQ== 86087
+INC+0YLQv9GA0LDQsg== 86088
+IOE= 86089
+IGRlZlN0eWxlQXR0cg== 86090
+aW5jZXJlbHk= 86091
+w6lzdA== 86092
+SW5UaGU= 86093
+c3RpbWU= 86094
+IGZyYWdtZW50ZWQ= 86095
+IGZyeWluZw== 86096
+Z3JpbQ== 86097
+ZmllbGRuYW1l 86098
+IGNyb3NzaW5ncw== 86099
+IGFtbw== 86100
+X09wdGlvbnM= 86101
+IGhhaXJlZA== 86102
+L3dhaXQ= 86103
+IHBhcmNobWVudA== 86104
+IGNyZWF0ZUVsZW1lbnQ= 86105
+SHR0cFN0YXR1cw== 86106
+IGVya2zDpA== 86107
+aXp6YXppb25l 86108
+dGh1bWJuYWlscw== 86109
+bG92YWs= 86110
+IGJhbmdpbmc= 86111
+IHVuaW1hZ2lu 86112
+IE92ZW4= 86113
+KEF1ZGlv 86114
+YXBzdWxhdGlvbg== 86115
+IHJhbXBz 86116
+55Wq 86117
+IFdvb2R3YXJk 86118
+6Zeu6aKY 86119
+cm9ncmFt 86120
+0YDRg9C/0L8= 86121
+IFdvcnNoaXA= 86122
+IHN0YWQ= 86123
+IG5lZg== 86124
+IEphdW5l 86125
+YnV6eg== 86126
+YWx1cw== 86127
+T05ET04= 86128
+LXN1 86129
+IG91dHBhdGllbnQ= 86130
+amFj 86131
+RVNQTg== 86132
+w6ZsbGFuZA== 86133
+bXlw 86134
+IHNob3dyb29t 86135
+TW9udHNlcnJhdA== 86136
+LmdldERyYXdhYmxl 86137
+w6l0aWNv 86138
+IHbDoG8= 86139
+SUJD 86140
+RXhwZXJ0cw== 86141
+TWJwcw== 86142
+Ij4j 86143
+IG5vcnRoZWFzdGVybg== 86144
+IE1lag== 86145
+KG1pbGxpc2Vjb25kcw== 86146
+4oCUYWxs 86147
+LXJlYWNoaW5n 86148
+CXJlcGx5 86149
+P3R5cGU= 86150
+IGNydXo= 86151
+ID48Pw== 86152
+LkZpbmRBc3luYw== 86153
+KGNpcmNsZQ== 86154
+IFNoaW5l 86155
+IE1hdmVyaWNrcw== 86156
+IHNhZmV6b25l 86157
+IExhemFy 86158
+IGRpc3RpbmN0aW9ucw== 86159
+LWZlZWQ= 86160
+LnNldENvZGU= 86161
+4KSq 86162
+IHTDqWM= 86163
+IHNlcmFpdA== 86164
+IE1JQ1JP 86165
+IENvbnN1bXB0aW9u 86166
+Xm4= 86167
+LmZyb21GdW5jdGlvbg== 86168
+IFJ1cGVydA== 86169
+IGhhcmFzc2luZw== 86170
+LUNv 86171
+IHRpaw== 86172
+IFN2ZW5z 86173
+LkltYWdlQWxpZ24= 86174
+X3doaXRlc3BhY2U= 86175
+IGtpY2tlcg== 86176
+IGNhZGFzdHI= 86177
+Q2V0dGU= 86178
+X25vdGlmaWVy 86179
+IEZBRw== 86180
+IHByaW1hbA== 86181
+IGhvbW9nZW5lb3Vz 86182
+IGFzdHJvbm9taWNhbA== 86183
+IEJ1cnI= 86184
+LkNvcHlUbw== 86185
+Z3JhcGhz 86186
+aXR0bw== 86187
+T1NI 86188
+IHNob3dBbGVydA== 86189
+YW50cm8= 86190
+ImRlZmF1bHQ= 86191
+ZW1waGFzaXM= 86192
+V2Vp 86193
+b3V0Y29tZQ== 86194
+IGFrdQ== 86195
+IGNhbXBhaWduZWQ= 86196
+KSI7Cgo= 86197
+IHJlY2lwcm9jYWw= 86198
+IFJveWFsZQ== 86199
+ICMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyM= 86200
+LlRJTUU= 86201
+IDwq 86202
+T2Zmc2V0VGFibGU= 86203
+Y29tcG91bmQ= 86204
+d2FpdEZvcg== 86205
+dWVnb3M= 86206
+LnN0cmluZ1ZhbHVl 86207
+X1NDSEVE 86208
+IGZhdHQ= 86209
+wqDCoMKgwqDCoMKgwqA= 86210
+LmRpc2s= 86211
+IHdhcnBlZA== 86212
+IGNyaXRpcXVlcw== 86213
+PycKCg== 86214
+KHNraWxs 86215
+IG1vZGVyYXRlZA== 86216
+X2VsZW1z 86217
+S2V5TGlzdGVuZXI= 86218
+IHNlYXNvbmluZw== 86219
+IHBvdXJxdW9p 86220
+X0ZE 86221
+cHJk 86222
+aHlh 86223
+Ij7Dlzwv 86224
+IG5vdXZlYXV4 86225
+IGdpdmVhd2F5cw== 86226
+5oql6YGT 86227
+TWFpbk1lbnU= 86228
+Oy8q 86229
+IEdyb24= 86230
+cXVpdm9z 86231
+Ow0KDQoNCg0K 86232
+IGluZmx1ZW5jZXJz 86233
+KFRJTQ== 86234
+U2hhcmVkUHRy 86235
+IGRpYWxvZ3M= 86236
+KioqKiovCg== 86237
+LkF0b21pYw== 86238
+IE1vcnNl 86239
+IHBjYg== 86240
+IEFQQw== 86241
+LkltbXV0YWJsZQ== 86242
+IHJlc2l6aW5n 86243
+IEx1bXB1cg== 86244
+IEh1bWFuaXRpZXM= 86245
+X3NvbHZl 86246
+X2h1bWFu 86247
+ZXR5bA== 86248
+IEh1cnQ= 86249
+IEVzdGFibGlzaGVk 86250
+Y2xhcmVk 86251
+IGNvbXBhcnRtZW50cw== 86252
+QmVhbQ== 86253
+X1JN 86254
+LmZhbHNl 86255
+KEdyaWQ= 86256
+IFFTaXpl 86257
+X2ZsZw== 86258
+aXN0aWNh 86259
+PkxvZ2lu 86260
+OlVJQnV0dG9uVHlwZQ== 86261
+IEV4aXRpbmc= 86262
+Y2xhcw== 86263
+IGFyc2Vu 86264
+KG1ldHJpYw== 86265
+cm93c2luZw== 86266
+cXVlcnlTZWxlY3Rvcg== 86267
+X0ZSSUVORA== 86268
+LWlv 86269
+IGNvbmZpc2NhdGVk 86270
+IGRlZmlhbnQ= 86271
+IE1PVE9S 86272
+cmVndW50YQ== 86273
+IE1vcnJvdw== 86274
+IEJlcnM= 86275
+Q3JhaWc= 86276
+IENQQQ== 86277
+IHNleGtvbnRha3Rl 86278
+IHNhbW1lbg== 86279
+L0F1dGg= 86280
+LkxpYg== 86281
+Y3JhcGVy 86282
+aWNlbWFpbA== 86283
+Y3JhdGNo 86284
+IFdpcmVk 86285
+IGFkdmVydGlzZXI= 86286
+IGdldENsaWVudA== 86287
+IHJlc3BvbnNpYmx5 86288
+CVVPYmplY3Q= 86289
+LnNldFJvdGF0aW9u 86290
+LkNvdW50ZXI= 86291
+X0hPVVI= 86292
+VGVzdENhdGVnb3J5 86293
+IGhpbmRzaWdodA== 86294
+XGNvbnRyb2xsZXJz 86295
+d2FsbHM= 86296
+LnNldE1heGltdW0= 86297
+IHB1YmVydHk= 86298
+X3RlYW1z 86299
+X01PREFM 86300
+LkNP 86301
+IGJhZGFzcw== 86302
+KSddLAo= 86303
+w7pzcXVlZGE= 86304
+aXJ1dA== 86305
+Q2hlbHNlYQ== 86306
+LnRyYW5zZm9ybXM= 86307
+IGNhcGl0YWxpc3Rz 86308
+TWFyY2E= 86309
+IEFyeQ== 86310
+LWNvZGVk 86311
+546v 86312
+VVJFRA== 86313
+PFRyYW5zYWN0aW9u 86314
+IFBhcmxpYW1lbnRhcnk= 86315
+KSRf 86316
+IHN1YnRseQ== 86317
+IHNpbGt5 86318
+IERpcnQ= 86319
+IHB1enpsZWQ= 86320
+fScpOwo= 86321
+cXVlc3Rz 86322
+Rm9vdGJhbGw= 86323
+IENvbmZpZGVuY2U= 86324
+dXp1 86325
+YnVsYW4= 86326
+IGh1bW1pbmc= 86327
+bW91c2VlbnRlcg== 86328
+UmV0ZW50aW9u 86329
+IHNkbA== 86330
+b2tlZGV4 86331
+JywnPScsJA== 86332
+IEt1YWxh 86333
+U0FN 86334
+IHRyYW5zZm9ybWF0aXZl 86335
+UEtH 86336
+aWxsdXM= 86337
+IHJvb3Rpbmc= 86338
+IFdpdG5lc3Nlcw== 86339
+IFJhamFzdGhhbg== 86340
+5byg 86341
+LWFkZGVk 86342
+IFRlcnJpdG9yaWVz 86343
+KHNxdWFyZQ== 86344
+cmFiYml0 86345
+X1Jlc291cmNl 86346
+6ZaL 86347
+4LiT 86348
+IHdpbm5pbmdz 86349
+IHNwbGU= 86350
+IGTDqHM= 86351
+IE1EQg== 86352
+w6lydA== 86353
+IE1hdHRpcw== 86354
+YWlsbGVz 86355
+X3dlYWs= 86356
+L2phdg== 86357
+IGNvbGxhcHNlcw== 86358
+ICAgICAgCQk= 86359
+IHN3aXJs 86360
+IE5TU3RyaW5nRnJvbUNsYXNz 86361
+IHZvbHZlcg== 86362
+LlJlY2VpdmU= 86363
+IERleHRlcg== 86364
+IHRhYmxlbmFtZQ== 86365
+cmVhdGl2ZQ== 86366
+LkdldEZpbGVz 86367
+dm9vcg== 86368
+IEhvZQ== 86369
+VkVSTg== 86370
+IE9QQw== 86371
+7YOc 86372
+cmFtaWRz 86373
+54Sh44GX44GV44KT 86374
+U3Bpcml0 86375
+IE5PUA== 86376
+IE1haW50YWlu 86377
+KHNpZ21h 86378
+b3Ry 86379
+TW91c2VDbGlja2Vk 86380
+cXVpZXJkYQ== 86381
+X3dm 86382
+0L7QutCw0Lc= 86383
+YXBwYWJsZQ== 86384
+IEhvbGRlbg== 86385
+IENvdW50ZG93bg== 86386
+LnNpZ21h 86387
+Y2hhbGs= 86388
+YmlsZGVy 86389
+IHZpc2lvbmFyeQ== 86390
+CU9u 86391
+JHVwZGF0ZQ== 86392
+IEdpbmdyaWNo 86393
+cm9vbUlk 86394
+Pk5hbWE= 86395
+IHl5dHlwZQ== 86396
+LkRlY2ltYWxGaWVsZA== 86397
+bWFjcm9z 86398
+LnNldExheW91dFBhcmFtcw== 86399
+IHJubg== 86400
+IElNRGI= 86401
+56eN 86402
+ZW1hbGVz 86403
+IGluY2lkaWR1bnQ= 86404
+UmVzdHJpY3RlZA== 86405
+IHBlZGFscw== 86406
+IEpvZw== 86407
+IEFkYXB0aXZl 86408
+IGZhZGVz 86409
+LkV2ZW50U3lzdGVtcw== 86410
+IFBhaWdl 86411
+IHNlaXM= 86412
+IGFwcHJvcHJpYXRlZA== 86413
+RkZU 86414
+Z29yaXQ= 86415
+IGNvaGVzaXZl 86416
+IE5pY2h0 86417
+X3dvcmtmbG93 86418
+bGl1cw== 86419
+IEZvcnRuaXRl 86420
+X0lX 86421
+QXRQYXRo 86422
+IGludG94aWNhdGVk 86423
+bm9zdGlj 86424
+QmluQ29udGVudA== 86425
+LnJlZHVjZXI= 86426
+KT8K 86427
+J10q 86428
+IE9ic2VydmF0aW9u 86429
+X3ByZWZz 86430
+LnJlc29sdXRpb24= 86431
+LlBheWxvYWQ= 86432
+TWl4ZWQ= 86433
+IFJhaQ== 86434
+KHBkZXY= 86435
+KEAo 86436
+aWNvdA== 86437
+JGlz 86438
+IGNyZWU= 86439
+Pz0uKg== 86440
+LlFMYWJlbA== 86441
+IEdlb3JnaWFu 86442
+eENB 86443
+IGRlZmljaWVudA== 86444
+dGhyb3du 86445
+IHJhcGluZw== 86446
+dXBvcw== 86447
+CWNsaQ== 86448
+Z2V0Vmlldw== 86449
+SGlnaGxpZ2h0ZWQ= 86450
+Q3BwR3VpZA== 86451
+IHJlbGVnYXRlZA== 86452
+IGxlYWRlcmJvYXJk 86453
+UmVjZWl2ZVByb3Bz 86454
+Lmhhcg== 86455
+IGNvbmRp 86456
+SU1JVElWRQ== 86457
+IE1jQ2FydA== 86458
+KXRocm93cw== 86459
+YnVpZQ== 86460
+YnVhaA== 86461
+LmNvZWZm 86462
+IEF1c3NpZQ== 86463
+IFNhYmhh 86464
+KGZhYnM= 86465
+cmVsYW5k 86466
+IEbDtnI= 86467
+YmFyYW5n 86468
+LHRvcA== 86469
+CWVsc2lm 86470
+U3RlcFRocm91Z2g= 86471
+IHNrZXdlZA== 86472
+IFVudXNlZA== 86473
+Jyl9Pgo= 86474
+WWU= 86475
+Y2FsbGVl 86476
+SGliZXJuYXRl 86477
+IEV2ZXJlc3Q= 86478
+aW1wb3J0RGVmYXVsdA== 86479
+IHRhcm4= 86480
+IE5vd2FkYXlz 86481
+WUE= 86482
+IENoYWxsZW5nZXI= 86483
+X2xvZ2ljYWw= 86484
+IGNyZWF0ZURhdGU= 86485
+IEdsb3VjZQ== 86486
+IGN1YW50bw== 86487
+IEhBUg== 86488
+IENoaWxs 86489
+Il4= 86490
+IGN1cnNvcw== 86491
+LkVPRg== 86492
+IG5pamU= 86493
+IGFuZ2VyZWQ= 86494
+b2N1c2luZw== 86495
+PENvbnRhY3Q= 86496
+IEF0bW9zcGhlcmlj 86497
+IFdvbGZnYW5n 86498
+IEJK 86499
+Y2hpbGRz 86500
+IEJ1Z3M= 86501
+X0hFWA== 86502
+KFNQ 86503
+w6Vs 86504
+X2V2YWx1YXRpb24= 86505
+IFJBTkdF 86506
+IFNPUA== 86507
+X3Rva2VuaXpl 86508
+bXNnaWQ= 86509
+IHJleA== 86510
+CXBt 86511
+Q29weWluZw== 86512
+Kkw= 86513
+RGFsbGFz 86514
+LVN0YXRl 86515
+dWxmaWxs 86516
+IGJ5xYJv 86517
+IENvbnRyYWN0b3I= 86518
+RGlkbg== 86519
+QVNURQ== 86520
+IFBJTw== 86521
+LlRlbGU= 86522
+LndhdGVy 86523
+ZGV6 86524
+IGFuZ3JpbHk= 86525
+IHV0aWxpc2F0ZXVy 86526
+IHZvcnRleA== 86527
+Q29ycG9yYXRl 86528
+YXR1cmFz 86529
+IHByaXplZA== 86530
+J3VybA== 86531
+dWdsaWZ5 86532
+IGltcHVsc2Vz 86533
+IGNocm9ub2xvZ2ljYWw= 86534
+cGxlbg== 86535
+X25hbWE= 86536
+L29u 86537
+IE9mZmljZXM= 86538
+IENQSQ== 86539
+IEFmdGVyd2FyZHM= 86540
+44GT44KT44Gr 86541
+X0JMT0NLUw== 86542
+R3JhY2U= 86543
+LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg== 86544
+IEthYnVs 86545
+IOaIkA== 86546
+IExlaXB6aWc= 86547
+4Kao 86548
+U2hvY2s= 86549
+QXVz 86550
+IG11cm0= 86551
+X3N0YXJ0cw== 86552
+IGLDpA== 86553
+IFp5 86554
+IkY= 86555
+LXJpZ2h0cw== 86556
+IGJlaGF2aW5n 86557
+KCc+ 86558
+IG1vc3F1ZXM= 86559
+KndpZHRo 86560
+Ii8+Ljwv 86561
+LnVuc3BsYXNo 86562
+LmdldEFjdGl2aXR5 86563
+VVU= 86564
+IFNoYWs= 86565
+X3Jn 86566
+X0VxdWFscw== 86567
+J2h0dHBz 86568
+IE94eWdlbg== 86569
+IFBvcnRzbW91dGg= 86570
+4oCUb25l 86571
+IHdhdGNoZXJz 86572
+IENob2k= 86573
+IHNpZGVy 86574
+cGVjdHJhbA== 86575
+bXF0dA== 86576
+LmNyZWF0ZVVzZXI= 86577
+amVjdGl2ZXM= 86578
+dXJtYQ== 86579
+UmVnaXN0cg== 86580
+UGVyc29uYWxseQ== 86581
+PWtleQ== 86582
+IE5FTw== 86583
+IEZBUXM= 86584
+aWJpbGlkYWRl 86585
+Y2tzw6U= 86586
+IENvbGxhYm9yYXRpb24= 86587
+CWxibA== 86588
+LlNFUlZFUg== 86589
+IGFib3VuZA== 86590
+IEJlbmU= 86591
+d2FudGVk 86592
+LWhvbGU= 86593
+IG11dHRlcmVk 86594
+IHBlcA== 86595
+bmVzYw== 86596
+LlVwbG9hZA== 86597
+c2VtaQ== 86598
+eEVD 86599
+Jz4iKw== 86600
+IGVtYnJ5bw== 86601
+IEZpeGVkVXBkYXRl 86602
+Q2FzdGxl 86603
+Lm1vZGVsbw== 86604
+IHBscw== 86605
+IGVudmVsb3Blcw== 86606
+X3JlbWFpbg== 86607
+UXVhcnRlcg== 86608
+YWxlcnRWaWV3 86609
+X2Zvcm1hdHRlZA== 86610
+IGxhc2hlcw== 86611
+emVsZg== 86612
+aG9tbWU= 86613
+LmZsb3dMYXlvdXRQYW5lbA== 86614
+YWlycG9ydA== 86615
+IE1lbW9yaWVz 86616
+IEhFUk8= 86617
+IEFzaHRvbg== 86618
+IGV4aGliaXRpbmc= 86619
+KFNFTEVDVA== 86620
+U3VibWlzc2lvbg== 86621
+U3R1ZmY= 86622
+X3N1bg== 86623
+IHBlcsOtb2Rv 86624
+IGRlc3ByZQ== 86625
+CWVkaXQ= 86626
+IER0eXBl 86627
+Y2Vzc2l2ZQ== 86628
+YWFk 86629
+IGRlc2Nvbg== 86630
+bmVsbHk= 86631
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ== 86632
+IHNjcmlwdHVyZXM= 86633
+IG9uVmlld0NyZWF0ZWQ= 86634
+IEVWRQ== 86635
+IEJhbGxldA== 86636
+O307Cg== 86637
+VURP 86638
+IFByb2JhYmlsaXR5 86639
+cXVpcnJlbA== 86640
+Q29udGFpbmluZw== 86641
+IFBsYXQ= 86642
+6KI= 86643
+L2JpdA== 86644
+IEpRdWVyeQ== 86645
+IHRpZW5lcg== 86646
+L2RyaXZlcnM= 86647
+IFByZXNpZGVuY3k= 86648
+XHVE 86649
+IEl2ZQ== 86650
+aWVuYQ== 86651
+IGh5cGVycw== 86652
+IFNwZW5kaW5n 86653
+PFc= 86654
+IFRIRU1F 86655
+IHVzZXJQcm9maWxl 86656
+IGFubnVt 86657
+cmV0d2VldGVk 86658
+IFwnJw== 86659
+YnVuZGxlcw== 86660
+KCk8Lw== 86661
+IEN5bGluZGVy 86662
+IG91dGxpZXJz 86663
+IGRpc3NlbWluYXRpb24= 86664
+L2FwdA== 86665
+IE5hdGFzaGE= 86666
+IHJlbmRlckl0ZW0= 86667
+IENoaXBz 86668
+IHJvdW5kdXA= 86669
+IGltcHJvdg== 86670
+IGNvbW11bmljYXRvcg== 86671
+IHNreXBl 86672
+TU1N 86673
+cmlqaw== 86674
+LlBsYWNl 86675
+IHBhc2E= 86676
+IFNZTkM= 86677
+ZW5zaXM= 86678
+IEF4ZWw= 86679
+ZW7Dp2E= 86680
+Z2V0U3RyaW5nRXh0cmE= 86681
+YWJpbGl0w6k= 86682
+IGVtYWNz 86683
+LmdyYXZpdHk= 86684
+IGNoZXJpc2g= 86685
+IElTU04= 86686
+CUpzb24= 86687
+dXlv 86688
+IHVwdGltZQ== 86689
+IHJhbmRvbW5lc3M= 86690
+IGxvZnR5 86691
+Qm93 86692
+Q3JlYXI= 86693
+IHRvd2VyaW5n 86694
+Y2F0ZWdvcmll 86695
+L3Bvd2Vy 86696
+L3dlbGNvbWU= 86697
+fFI= 86698
+IGJhcnJpbmc= 86699
+aWRpYQ== 86700
+cXVhbQ== 86701
+w7pkbw== 86702
+ZXhwZXJpbWVudGFs 86703
+IGNsYQ== 86704
+IGN1cmF0b3I= 86705
+cmVhbWJsZQ== 86706
+aW5keA== 86707
+TExM 86708
+IH0pOg== 86709
+IGhpc3RvaXJl 86710
+c2ltdWxhdGU= 86711
+PEFueQ== 86712
+IEdsYW0= 86713
+IEJhcmc= 86714
+VmFsdWVDb2xsZWN0aW9u 86715
+IEluc3RpdHV0bw== 86716
+QXNTdHJpbmdBc3luYw== 86717
+IGFkZWM= 86718
+IGZlbGxvd3M= 86719
+cGlwZXM= 86720
+IFBsYWNlaG9sZGVy 86721
+IEtn 86722
+IEFsYnVtcw== 86723
+ICooKg== 86724
+X0dPT0Q= 86725
+KSIsDQo= 86726
+LlFSZWN0 86727
+w6Jt 86728
+IH0NDQo= 86729
+TWFyc2hhbEFz 86730
+QmFjaGVsb3I= 86731
+IEJhcmNvZGU= 86732
+IFRyYXZlcnNl 86733
+IG9kaW8= 86734
+LnNldFBhcmVudA== 86735
+IHNlbWljb25kdWN0b3I= 86736
+QUxMRUw= 86737
+IGJhbnF1ZXQ= 86738
+IE5ld3NwYXBlcg== 86739
+RE9NTm9kZQ== 86740
+IE5hdWdodHk= 86741
+Rm9ybWF0dGVkTWVzc2FnZQ== 86742
+IGRpc3J1cHRpbmc= 86743
+5piT 86744
+IGxvb2thaGVhZA== 86745
+IGdyYXR1aXRlcw== 86746
+IGNoZWVzeQ== 86747
+IFNQRg== 86748
+blA= 86749
+IGFyc29u 86750
+IGFudGVubmFz 86751
+X01JRERMRQ== 86752
+X01BTExPQw== 86753
+LmdvQmFjaw== 86754
+IFByb3Bvc2l0aW9u 86755
+IE1pY2hhZWxz 86756
+X3Byb29m 86757
+INC90LDQudC0 86758
+w6R0emxpY2g= 86759
+LXJvbGw= 86760
+RURB 86761
+w6Fuw60= 86762
+Z292ZXJubWVudA== 86763
+w7Z0dA== 86764
+IEVzdGFibGlzaG1lbnQ= 86765
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 86766
+X0hJVA== 86767
+IEFJTQ== 86768
+YWRvbA== 86769
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCg== 86770
+X1JFRkVSRVI= 86771
+IGZvcm1hdERhdGU= 86772
+dWN0b3Nl 86773
+IGRvd25sb2FkZXI= 86774
+VGV4dEVkaXQ= 86775
+IGRpc2FybQ== 86776
+IEhBUFA= 86777
+0L7QtNCw 86778
+ISkuCgo= 86779
+L3Byb2Nlc3M= 86780
+IGJyYWluc3Rvcm0= 86781
+IE9SSUdJTkFM 86782
+LlRhYmxlTmFtZQ== 86783
+IEtvc3Rlbmxvc2U= 86784
+IGTDqXA= 86785
+IElzYWJlbA== 86786
+IGFzdHJvbm9tZXJz 86787
+UVVJUkVT 86788
+OiIt 86789
+dXBsb2FkZXI= 86790
+Oi8vJQ== 86791
+IGFtaXM= 86792
+RmlsZVZlcnNpb24= 86793
+ICwk 86794
+Y29vaw== 86795
+LFNJR05BTA== 86796
+JywvLw== 86797
+IFN1cHByZXNz 86798
+IExhdGlub3M= 86799
+IHdpdGhob2xk 86800
+IG1uZW1vbmlj 86801
+X0NZQ0xF 86802
+IGhvZA== 86803
+IFdvcnNl 86804
+ZXJkZQ== 86805
+IHR5cGVpZA== 86806
+CWV4cG9ydHM= 86807
+IGFjaHRlcg== 86808
+b3Nhcw== 86809
+IGZvb3Rub3Rl 86810
+aGFuaQ== 86811
+KFBhcmFtZXRlcg== 86812
+CVJlbmRlcg== 86813
+IFlZU1RBQ0s= 86814
+IFhJSQ== 86815
+IHNpZGVu 86816
+IGFyb3VzYWw= 86817
+IE9P 86818
+Qml0dGU= 86819
+IG5lYXJlcg== 86820
+IENpcmN1cw== 86821
+IENPTE9SUw== 86822
+IHdpZWxkaW5n 86823
+LkZpbGVTeXN0ZW0= 86824
+IGdyaWxsZQ== 86825
+IERvdmVy 86826
+CiAgICAgCg== 86827
+KGdlb21ldHJ5 86828
+IHN0YXBsZXM= 86829
+IEFubm91bmNlbWVudA== 86830
+IOuyhA== 86831
+IGZvcnR1bmF0ZWx5 86832
+LlNvbWU= 86833
+IG1hbmdhbmVzZQ== 86834
+IGludGVydmlld2Vy 86835
+WVJP 86836
+IGNyeXB0b2dyYXBoeQ== 86837
+IGNoYW1icmU= 86838
+LnJldHJ5 86839
+IGltaXRhdGlvbg== 86840
+JGZkYXRh 86841
+IGxvdGlvbg== 86842
+KGlkZW50aXR5 86843
+LnBn 86844
+IHByZXN1bXB0aW9u 86845
+X1NVUEVS 86846
+dm9jYWI= 86847
+IFNlbWVzdGVy 86848
+IEFiZWw= 86849
+X2FwcHJvdmVk 86850
+LmNvbXBhdA== 86851
+IHdhcnRpbWU= 86852
+XV07Cgo= 86853
+bHV0 86854
+X0FjY291bnQ= 86855
+Pygn 86856
+Y29vcA== 86857
+L3JlZw== 86858
+LnNldFRv 86859
+aXRlc3Nl 86860
+IEh5ZHJh 86861
+Qmlucw== 86862
+Y2FkZW5h 86863
+Pi8nLA== 86864
+Llwi 86865
+CWFjY291bnQ= 86866
+IERhaGw= 86867
+IGRyb3du 86868
+IGdhdXNz 86869
+IHRyYW5zZm9ybWVycw== 86870
+IE1ldGFsbGlj 86871
+IEhlcmJhbA== 86872
+YWNocw== 86873
+X2J1dA== 86874
+IGl0ZXJhdGl2ZQ== 86875
+IEZyZWVk 86876
+anVy 86877
+fE0= 86878
+O2JyZWFr 86879
+X0ZG 86880
+KGRvd25sb2Fk 86881
+4buDbg== 86882
+LmNoZWNrU2VsZlBlcm1pc3Npb24= 86883
+TkVUV09SSw== 86884
+OmZsZXg= 86885
+IENUTA== 86886
+IEFyYg== 86887
+IFByb2R1Y2U= 86888
+CXN5bmNocm9uaXplZA== 86889
+4oCcT2g= 86890
+LmRhdGF0YWJsZXM= 86891
+IGNvbmVz 86892
+RMOp 86893
+0YbQsA== 86894
+QWxn 86895
+IGZ1bmNpb25h 86896
+IFViaXNvZnQ= 86897
+IGdlb3BvbGl0aWNhbA== 86898
+IHNpZWh0 86899
+IGh5ZHJhdGlvbg== 86900
+c3Rocm91Z2g= 86901
+IER1ZGxleQ== 86902
+YXrEgw== 86903
+IHRheGluZw== 86904
+INC30LDQutCw0Lc= 86905
+X0FTTQ== 86906
+TmV1dHJhbA== 86907
+dHJhZGl0aW9uYWw= 86908
+UGxheWFibGU= 86909
+IHNwYWdoZXR0aQ== 86910
+IGlDbG91ZA== 86911
+IERheXRvbmE= 86912
+IHdlcmRl 86913
+IEFOVA== 86914
+IFByb24= 86915
+IFN0YXRpb25z 86916
+IGF0dGVzdA== 86917
+IGZ1bGxlcg== 86918
+IG5vdmFtZW50ZQ== 86919
+XVxc 86920
+Y2Nl 86921
+KGRlY2s= 86922
+L2F5dXNobWFu 86923
+aWdzYXc= 86924
+IGFkdWx0ZXM= 86925
+IHRlcnJl 86926
+Lk9yZGVycw== 86927
+CXByb3BlcnRpZXM= 86928
+RElH 86929
+IFRJTUVT 86930
+ImluZGljZXM= 86931
+ITw= 86932
+TW9uYWQ= 86933
+IG5vbmV4aXN0ZW50 86934
+IEF0bGFudGlz 86935
+IGdyaWV2YW5jZXM= 86936
+dXJlbmNl 86937
+IElQUFJPVE8= 86938
+4pmA4pmA4pmA4pmA 86939
+IGVtcGxlYWRv 86940
+INmD 86941
+Lk1vdmVOZXh0 86942
+IElzbw== 86943
+YmVhdXRpZnVs 86944
+IHNvbHVibGU= 86945
+IHNsdWdnaXNo 86946
+IGRpZmZz 86947
+X09CUw== 86948
+eG1pbg== 86949
+IHR1bWJsZQ== 86950
+IFVuYXJ5 86951
+IHppcGZpbGU= 86952
+IHN2ZW5za2E= 86953
+ZXJsYW5k 86954
+L2N1cGVydGlubw== 86955
+CXNjcmlwdA== 86956
+aXNjaGVz 86957
+TW9kaWZpZWREYXRl 86958
+IHZleWE= 86959
+IGRldGVybWluYW50 86960
+IEdvcmdlb3Vz 86961
+Z2Jvb2xlYW4= 86962
+IExPRA== 86963
+ZGNj 86964
+c2NlbmVz 86965
+IFRTUk1MUw== 86966
+KFR5cGVFcnJvcg== 86967
+IGNhbW91ZmxhZ2U= 86968
+IGJ1cmdl 86969
+VGhlbQ== 86970
+LkFzc2lnbg== 86971
+IGxhc3RJbmRleA== 86972
+X3NwaGVyZQ== 86973
+X0FCSQ== 86974
+w4Q= 86975
+aWxhZ2U= 86976
+XHhmZg== 86977
+IGtheWFr 86978
+IGZpeno= 86979
+dWl0ZW4= 86980
+LlNob3VsZEJl 86981
+IGh0b25s 86982
+IFBldGl0ZQ== 86983
+IGhlYWxz 86984
+IE9zYWth 86985
+Tko= 86986
+SW5QYXJhbWV0ZXI= 86987
+IEJpcmNo 86988
+IGNvbW1lbnRhaXJl 86989
+IFNpZWdl 86990
+IGtleWNvZGU= 86991
+LWludGVuc2l2ZQ== 86992
+cHJvcFR5cGVz 86993
+RXhwb3J0cw== 86994
+IGJ1dHRvblRleHQ= 86995
+IEdvZHppbGxh 86996
+LkV4Y2hhbmdl 86997
+IHVuZGVyc3RhbmRhYmx5 86998
+IGFjY29yZGlvbg== 86999
+IHLDqWdpb24= 87000
+IG1hcmtlZGx5 87001
+YW5vb2dh 87002
+IGNvbnRyYXQ= 87003
+X2xpZnQ= 87004
+W2RhdGU= 87005
+IHNjb3Ju 87006
+IERhdGFNYW5hZ2Vy 87007
+4oCm4oCmCgo= 87008
+X0NPTVBJTEVS 87009
+IENsYXc= 87010
+b2RhdGU= 87011
+IHVuZGVyYWdl 87012
+IEltcGxlbWVudGVk 87013
+Q2xp 87014
+S2Fs 87015
+UHJvZHVjdG9z 87016
+IGVuZmVybWVk 87017
+w6lpcw== 87018
+IGRpc2NyZWRpdA== 87019
+IFNhbW9h 87020
+IFByZXNlbnRlZA== 87021
+IGNpbmVtYXQ= 87022
+XEFjdGl2ZUZvcm0= 87023
+IGZlcm4= 87024
+IFByaW1lcg== 87025
+5oKo 87026
+Z2VyZQ== 87027
+IGlsbHVzaW9ucw== 87028
+bm90YXRlZA== 87029
+IHBvag== 87030
+IG1vZGVsTmFtZQ== 87031
+IFBNQw== 87032
+IGRlY2Fk 87033
+IGZvcmVzdHJ5 87034
+dm9pZQ== 87035
+Li4uCgoKCgoK 87036
+IH19Owo= 87037
+IHRva2VuSWQ= 87038
+YW1tdQ== 87039
+IFBlcnNvbmVu 87040
+IFZFUkJPU0U= 87041
+IHBhdHJvbHM= 87042
+IGFudGlj 87043
+X2RlZXA= 87044
+ZWdlbmQ= 87045
+IFNldFByb3BlcnR5 87046
+IEdhcmV0aA== 87047
+IE1BUw== 87048
+LnJlc3RhdXJhbnQ= 87049
+IEhlYXZlbmx5 87050
+aWVkbw== 87051
+X2xlYWQ= 87052
+IEZ1amk= 87053
+UU4= 87054
+TWFzc2FnZQ== 87055
+IHBhcmFtTWFw 87056
+IGNpdGE= 87057
+X1NwZWVk 87058
+KGJib3g= 87059
+IEpVTA== 87060
+4oCZYW4= 87061
+IG1lbnRl 87062
+IFNob3djYXNl 87063
+IENTSQ== 87064
+PlR5cGU= 87065
+LlNu 87066
+b3R5cGljYWw= 87067
+IEZhbGxvbg== 87068
+LlVUQw== 87069
+IHByZWRhdG9yeQ== 87070
+IG9yZ2FuaXNpbmc= 87071
+Y29sZA== 87072
+IHBhcnNlcnM= 87073
+dWllbg== 87074
+IGNvbXBpbGVycw== 87075
+IFs9 87076
+IEV1cmFz 87077
+TU9TVA== 87078
+CiAgICAKCg== 87079
+UkFS 87080
+LlNjaGVkdWxl 87081
+Lm9wZXJhdGlvbnM= 87082
+dWZz 87083
+w7FhbmE= 87084
+IHByZW9jdXA= 87085
+LXRyZWF0ZWQ= 87086
+LmdldFdvcmxk 87087
+Lic6 87088
+IEFUSA== 87089
+OnN0YXJ0 87090
+IGF1dG9pbW11bmU= 87091
+IEJsYWNramFjaw== 87092
+X0ZJTklTSA== 87093
+KGZsb29y 87094
+IHdyZWNrYWdl 87095
+VVJU 87096
+LkJyYW5k 87097
+cGFpcw== 87098
+Y2ltYWw= 87099
+Y2nDsw== 87100
+TkZM 87101
+LWVxdWlwcGVk 87102
+LmNvbnRlbnRPZmZzZXQ= 87103
+IG92ZXJjcm93 87104
+IFRa 87105
+IG9kb20= 87106
+IENlbGx1bGFy 87107
+CXdyaXRlbA== 87108
+KGlucHV0U3RyZWFt 87109
+KHByZWY= 87110
+LXN0b2Nr 87111
+IERlbmllZA== 87112
+LXN1cHBvcnRlZA== 87113
+ICcoKA== 87114
+YW5jb2Rl 87115
+LmZpbHRlcmVk 87116
+RGltcw== 87117
+IGpi 87118
+CXByaWNl 87119
+IEBACg== 87120
+bm9jaw== 87121
+Lm9wZW5Db25uZWN0aW9u 87122
+IGFudGljcw== 87123
+cmVzdWx0Q29kZQ== 87124
+UGxheWJhY2s= 87125
+IGNlbHVsYXI= 87126
+IEZPT0Q= 87127
+IFBvZGVzdGE= 87128
+PW1lc3NhZ2U= 87129
+LnBlcmZvcm1hbmNl 87130
+IERtaXRyeQ== 87131
+YWx0aW1vcmU= 87132
+IHBsYXRlZA== 87133
+IHR1YmVyY3Vsb3Npcw== 87134
+X2dlbQ== 87135
+KEVkaXRvcg== 87136
+VHBs 87137
+IGNyaWFu 87138
+IGJ1ZmZlcmluZw== 87139
+6KeG6aKR 87140
+ICcpCgo= 87141
+VnU= 87142
+TWF0aGY= 87143
+IHRpbWVsaW5lcw== 87144
+IFRhdGE= 87145
+L3Bw 87146
+IHBsYXN0 87147
+IFRydWx5 87148
+IFN1YnN0aXR1dGU= 87149
+a2llbQ== 87150
+a2Fhcg== 87151
+IFZpc2g= 87152
+J2h1aQ== 87153
+IE1hZ2ljaw== 87154
+L0xheW91dA== 87155
+dXJhbsOnYQ== 87156
+X3R0bA== 87157
+SGlkZUluSW5zcGVjdG9y 87158
+LmtleXdvcmRz 87159
+TGlzdE1vZGVs 87160
+X1N1Y2Nlc3M= 87161
+aWxpaGFu 87162
+IGJsYWNrbWFpbA== 87163
+IFNlcmJpYW4= 87164
+cXVlbGxl 87165
+IER5c2Z1bmN0aW9u 87166
+IFByZXBhcmVk 87167
+IGpNZW51SXRlbQ== 87168
+IGxvZ2luVXNlcg== 87169
+c2V0YXR0cg== 87170
+LkNS 87171
+X2xjZA== 87172
+IGJ5dGVzUmVhZA== 87173
+IGNkZWNs 87174
+IHRvd25zaGlw 87175
+cGVr 87176
+aWprc3RyYQ== 87177
+IG1heGltaXppbmc= 87178
+LnByb3ZpZGVycw== 87179
+SW52ZXN0aWdhdG9ycw== 87180
+IHNob290b3V0 87181
+IGFpcnNwYWNl 87182
+dG9vbGJveA== 87183
+UVdpZGdldA== 87184
+PXBr 87185
+IHBvcnRlcg== 87186
+IFByZWRhdG9y 87187
+IFN1bnJpc2U= 87188
+IGRldm91cg== 87189
+CVVJbnQ= 87190
+aXR0YW5jZQ== 87191
+U1BB 87192
+X2VuZGlhbg== 87193
+IE5hZ2Fy 87194
+dmVuaWRh 87195
+L29wdA== 87196
+QnlFbWFpbA== 87197
+IFBoeXNpY2lhbg== 87198
+XEQ= 87199
+INC80Ys= 87200
+WUVBUg== 87201
+SUND 87202
+L3BvcnRmb2xpbw== 87203
+LmV4ZWN1dG9y 87204
+dWRlbQ== 87205
+RmFsbGJhY2s= 87206
+dWR1 87207
+U2xpbQ== 87208
+w7Nsbg== 87209
+Xnst 87210
+YW5za2U= 87211
+IGh1c3RsZQ== 87212
+IElyZW5l 87213
+IGFieXNz 87214
+IFJvYmJpbnM= 87215
+IGluZGV4ZXI= 87216
+U2F1ZGk= 87217
+IHdob2xlc29tZQ== 87218
+LXNsb3Q= 87219
+IFRlY24= 87220
+IHBhZ2VUaXRsZQ== 87221
+IGNvbnRlc3RhbnQ= 87222
+aWNvcHRlcg== 87223
+IGNvdXJzZUlk 87224
+Q2hy 87225
+IEFYSVM= 87226
+Zm9yZGVy 87227
+X1RVTg== 87228
+VHJhZmZpYw== 87229
+IHR5cGVhbGlhcw== 87230
+IGRhcmY= 87231
+LXVyaQ== 87232
+dHN4 87233
+LmRlc3Ryb3lBbGxXaW5kb3dz 87234
+IGl0ZXJhdGluZw== 87235
+UmVhY3Rpb24= 87236
+CUFN 87237
+IGN1ZW50 87238
+LWNvb2tpZQ== 87239
+IGZsYXZvcmVk 87240
+c3RvaQ== 87241
+IGZsaXJ0aW5n 87242
+44CL77yM 87243
+4KSu 87244
+X0NSWVBUTw== 87245
+W3Rva2Vu 87246
+IHByb2xldGFyaWF0 87247
+LuKAmeKAnQoK 87248
+CWRj 87249
+LlN0cmluZ1Zhcg== 87250
+IGxlZ2l0aW1hdGVseQ== 87251
+X2RlY29yYXRvcg== 87252
+TG9ja2Vy 87253
+IEplbm5h 87254
+VVJJTkc= 87255
+5YaN 87256
+X1ByaW50Zg== 87257
+QVRPUlk= 87258
+LWRpc3Q= 87259
+ICIuIik7Cg== 87260
+LnF1aXo= 87261
+IGlyZ2VuZA== 87262
+LWxlYWd1ZQ== 87263
+Z2llbg== 87264
+IFByb2R1Y2Vk 87265
+SGVsbWV0 87266
+5Y+v6IO9 87267
+UGxhdGZvcm1z 87268
+IFJlc291cmNlTWFuYWdlcg== 87269
+IEh1bmRyZWQ= 87270
+cm9tZXRlcg== 87271
+ZW5na2Fw 87272
+SG9w 87273
+IHBvc3N1aQ== 87274
+QmVmb3JlRWFjaA== 87275
+IENISw== 87276
+IElNUw== 87277
+VGlja2Vy 87278
+IGdyaW5uZWQ= 87279
+LmdldEFz 87280
+IGltcG9zZXM= 87281
+XSIp 87282
+Rm9yZ2V0 87283
+L2ltcG9ydA== 87284
+IGluamVjdGluZw== 87285
+TG92 87286
+IGFicmls 87287
+X3NsaWNlcw== 87288
+LWNvbW0= 87289
+IFBST0RVQ1RT 87290
+IE9hc2lz 87291
+IMO4bnM= 87292
+IFJlamVjdA== 87293
+IHJlZ3VsYXJpemF0aW9u 87294
+aW1wbGljaXRseQ== 87295
+bmF6 87296
+U3BlY2lmaWVy 87297
+IGltcG92ZXJpc2hlZA== 87298
+5po= 87299
+IG5vbWluYXRl 87300
+IE9WRVJSSURF 87301
+IEJhbmRz 87302
+ZXRoeXN0 87303
+IEppYW4= 87304
+IG5ld2NvbWVy 87305
+IE5hYg== 87306
+IGVicA== 87307
+IFBhZ2Vy 87308
+IEh1bWI= 87309
+L2Nj 87310
+IGV4cMOpcmllbmNl 87311
+dWRnaW5n 87312
+TWI= 87313
+ZGJ1Zg== 87314
+Jy8+ 87315
+IG9ja3PDpQ== 87316
+IGpkYmNUZW1wbGF0ZQ== 87317
+IFNISVBQSU5H 87318
+IGludGVyZGlzY2lwbGluYXJ5 87319
+IENFVA== 87320
+YXV0b3A= 87321
+LXN5bWJvbA== 87322
+YXZlYw== 87323
+IGNvbXBvdW5kZWQ= 87324
+IENodW5n 87325
+X1NNUw== 87326
+LWll 87327
+IFByb3NlY3V0b3I= 87328
+IExlaWE= 87329
+IE1hbmRlbGE= 87330
+U2luZ2xlT3JEZWZhdWx0 87331
+CVJFUVVJUkU= 87332
+YXRvd24= 87333
+dXJyZXRz 87334
+5paH5a2X 87335
+IENPTlRFWFQ= 87336
+RU5TSVRZ 87337
+IGluc3VyZ2VudHM= 87338
+IERpYXM= 87339
+LnN0YXRpb24= 87340
+IEtsYW4= 87341
+X21lYXN1cmVtZW50 87342
+X1FNQVJL 87343
+IHN0b2k= 87344
+TU9PVEg= 87345
+PicpOwoK 87346
+IGluZ2VzdGlvbg== 87347
+IEdsb3c= 87348
+dXRjaGVz 87349
+YmVhcmluZw== 87350
+LnRvYXN0cg== 87351
+IGZyYWdtZW50YXRpb24= 87352
+aXBwbw== 87353
+X1NFR01FTlQ= 87354
+IHN0dW1ibGluZw== 87355
+aW1hcg== 87356
+c3Rpbmlhbg== 87357
+XygpCg== 87358
+IG1vdGl2YXRpb25hbA== 87359
+TGlzdEl0ZW1UZXh0 87360
+IHdvbWVucw== 87361
+T3BlbkhlbHBlcg== 87362
+aWJhbmQ= 87363
+IGJ0blNhdmU= 87364
+IGluY29ycG9yYXRpb24= 87365
+IGRvY3VtZW50YXJpZXM= 87366
+aWNs 87367
+IE5k 87368
+IEFyYQ== 87369
+IHF1YWtl 87370
+IEN1bW1pbmdz 87371
+aHRt 87372
+YXN0ZXJlZA== 87373
+LmR0cA== 87374
+IGNvbmRvcw== 87375
+IEd1bmRhbQ== 87376
+L2Rpc2FibGU= 87377
+aHlkcmF0ZQ== 87378
+IEVwb2No 87379
+IG5hdGlvbmFsaXN0cw== 87380
+IGRldmVy 87381
+LHJlcXVlc3Q= 87382
+LmdldFZlcnNpb24= 87383
+Q0VMRVI= 87384
+IFNhbGFo 87385
+IG1vdGU= 87386
+IE1lbGxvbg== 87387
+c3BvdGlmeQ== 87388
+IG9yaWdlbg== 87389
+IG5hbGU= 87390
+IGFkdmVyc2FyaWVz 87391
+LkpUYWJsZQ== 87392
+Zm9yY2VtZW50cw== 87393
+IFJldHJlYXQ= 87394
+IGFyY2hpdm9z 87395
+IHNsYXNoZXM= 87396
+Lk1vdXNlRG93bg== 87397
+PDo6 87398
+X3Rocm91Z2g= 87399
+QWxhbWF0 87400
+LmJsdXI= 87401
+X2ZpbmRlcg== 87402
+IGFsbHVyZQ== 87403
+UGVyaXBoZXJhbA== 87404
+X3Bhc3NlZA== 87405
+X2NoYWxsZW5nZQ== 87406
+IFBhbGVv 87407
+SU5J 87408
+RGlyZQ== 87409
+c3BoZXJl 87410
+KENPTE9S 87411
+YWNrZXJz 87412
+IEdseXBo 87413
+KGludGVnZXI= 87414
+INC60L4= 87415
+IFJlbGV2YW50 87416
+INm+ 87417
+IGF0YXM= 87418
+X3ByaW0= 87419
+IE1VVA== 87420
+bmluZ2Vy 87421
+YXV0b3JlbGVhc2Vwb29s 87422
+PV9f 87423
+IFNpZ25pbmc= 87424
+7ZWY7KeA 87425
+IHVjeg== 87426
+RWRpdGluZ1N0eWxl 87427
+IEhlYXRlcg== 87428
+IEZhaXJmaWVsZA== 87429
+IEJlYXJk 87430
+LGVu 87431
+dXNhdA== 87432
+KCcuJw== 87433
+L3N0cmVhbQ== 87434
+IGdldFN1cHBvcnRGcmFnbWVudE1hbmFnZXI= 87435
+IG1DdXJyZW50 87436
+X1NUQVRFUw== 87437
+X3dpbmQ= 87438
+Q0hBUFRFUg== 87439
+cHJvYmFiaWxpdHk= 87440
+KGFubm90YXRpb24= 87441
+ICovDQoNCg0K 87442
+LlVuaXF1ZQ== 87443
+LkFkZEZpZWxk 87444
+SGlnaGVy 87445
+LmRpZ2l0YWw= 87446
+LmV4cGVyaW1lbnRhbA== 87447
+YXds 87448
+IHdoZW5jZQ== 87449
+ZXJub3Rl 87450
+U0FNRQ== 87451
+Lmlwdg== 87452
+dG9CZUZhbHN5 87453
+YnJhbmU= 87454
+X2NhdGVnb3JpY2Fs 87455
+QXVyYQ== 87456
+IFR5cGVTY3JpcHQ= 87457
+IHNwb250YW5lb3VzbHk= 87458
+bG9uZ2xlZnRyaWdodGFycm93 87459
+aWthbA== 87460
+X1RPRE8= 87461
+IFd5YXR0 87462
+IGZsdXJyeQ== 87463
+ZGlm 87464
+IHJlY2tvbg== 87465
+IENvcm91dGluZQ== 87466
+CWZmbHVzaA== 87467
+IHdvcmtmbG93cw== 87468
+IEZBTUlMWQ== 87469
+c3ByaXRlcw== 87470
+X1dvcms= 87471
+LkdldFNpemU= 87472
+IENvbnN0cmFpbnRz 87473
+QmlnSW50 87474
+aXRpYQ== 87475
+Z2V0Um93 87476
+IGR1aw== 87477
+IGlzTmV3 87478
+IFByb2R1a3Rl 87479
+eENC 87480
+aXNpZXJ0 87481
+ZnVuY3M= 87482
+IEFkZW3DoXM= 87483
+QmluZGluZ1V0aWw= 87484
+b21waWxlcg== 87485
+LWludg== 87486
+IGNoYW50cw== 87487
+IGVudHNwcmVjaA== 87488
+KHRp 87489
+X0lB 87490
+0L7RgNC00LjQvQ== 87491
+IEZBTEw= 87492
+aW1k 87493
+IGxvY2FsdGltZQ== 87494
+PExpbms= 87495
+0L3QuNC60LA= 87496
+IHByb2ZpbGVy 87497
+IGdldFVzZXJJZA== 87498
+IFBoeXNpY2lhbnM= 87499
+UkFE 87500
+IGhtbQ== 87501
+IE5lc3M= 87502
+IFRlbXBv 87503
+IEpU 87504
+IHJlY29ubmFpc3NhbmNl 87505
+PHRyYW5zbGF0aW9u 87506
+IGVudGljaW5n 87507
+IHF1YWludA== 87508
+IGNvdXBl 87509
+X18nLA== 87510
+TkFTREFR 87511
+INC30L3QsNGH0LXQvdC40Y8= 87512
+UEVSQVRVUkU= 87513
+IFBhaQ== 87514
+IHRldGFz 87515
+Q0FT 87516
+SVJST1I= 87517
+IGtj 87518
+IHRvdGU= 87519
+IGRyYXdiYWNr 87520
+IHBhcnNsZXk= 87521
+CUZ1bmN0aW9u 87522
+aXN0eQ== 87523
+IERVUA== 87524
+X0NJRA== 87525
+X1VU 87526
+IGtzaQ== 87527
+IGrDpA== 87528
+PXZhbA== 87529
+LnRvSGV4U3RyaW5n 87530
+5p2/ 87531
+LmNsaXBz 87532
+IG9mZmVu 87533
+IFRFQ0hOTw== 87534
+IFNoYW1l 87535
+IHN1c2NlcHRpYmlsaXR5 87536
+IHN0dXBpZGl0eQ== 87537
+IFRyb3V0 87538
+IENoYW1wYWduZQ== 87539
+ZXRoeWxlbmU= 87540
+IGJlZ3I= 87541
+X3JlZGlz 87542
+WWVw 87543
+IGhhbnM= 87544
+IERlZmVuZGFudA== 87545
+IGRhc2hlcw== 87546
+IHVzZXJUeXBl 87547
+X2RhdG9z 87548
+IHVuaWM= 87549
+a3JpdA== 87550
+IHJlY2VwdGl2ZQ== 87551
+IEdyZXQ= 87552
+KG1i 87553
+IEluZmx1 87554
+w6tu 87555
+fS8+ 87556
+aW50ZXJlc3Rpbmc= 87557
+VVRVUkU= 87558
+IGltYWdlU2l6ZQ== 87559
+IGdyZA== 87560
+IGFic29s 87561
+L2Zh 87562
+LmdyYWRpZW50 87563
+IHd5c3Q= 87564
+XX0+Cg== 87565
+bGVnYXRpb24= 87566
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0KCg== 87567
+IEJsZW5kZXI= 87568
+X18pOw== 87569
+IHVzZXJFbWFpbA== 87570
+IFBoYXI= 87571
+bGVoZW0= 87572
+KSk/ 87573
+KFJldHVybg== 87574
+ZWdyYQ== 87575
+dXRpdm8= 87576
+IGFwcGVuZGl4 87577
+IFJUVkY= 87578
+IFNFQUw= 87579
+IGd5cHN1bQ== 87580
+X0FyZw== 87581
+IGlsbHVtaW5hdGU= 87582
+IFNjaGlmZg== 87583
+cXVpbA== 87584
+LkNvbWJvQm94U3R5bGU= 87585
+J10pKQoK 87586
+IGFsdGVycw== 87587
+IHByYWN0aXNl 87588
+IHVzdA== 87589
+IERpbWl0 87590
+LVJlZ3VsYXI= 87591
+IGNyZWVwaW5n 87592
+IENhbmFkaWVucw== 87593
+IHJldG9ybg== 87594
+LWNvcm5lcg== 87595
+ICJdIg== 87596
+KHJuZw== 87597
+IGNhbmFkaWFu 87598
+IHBvc3Rv 87599
+LmFzc2VydEFsbW9zdEVxdWFs 87600
+IEJlY2t5 87601
+L3Nz 87602
+IGhvc3RhZ2Vz 87603
+IGJpb2xvZ2lzdA== 87604
+IEhvc3BpdGFsaXR5 87605
+IEVsaw== 87606
+IEJhcmFuZw== 87607
+66qp 87608
+YmJiYg== 87609
+LnRlYWNoZXI= 87610
+IHRlcm1pbmF0ZXM= 87611
+IGlzRXJyb3I= 87612
+IEtlbmRyaWNr 87613
+ZW5kYXJz 87614
+IFN1Z2dlc3Rpb25z 87615
+Q2Vs 87616
+IFNlcnZpY2VQcm92aWRlcg== 87617
+IFdpY2hpdGE= 87618
+XSkpLAo= 87619
+IGhlYWRsaWdodHM= 87620
+X3ZlbnRh 87621
+QU5USQ== 87622
+IHByb3BpZWRhZA== 87623
+IGVubGlzdA== 87624
+CW9yZw== 87625
+TWVzc2VuZ2Vy 87626
+LmxhbmQ= 87627
+IicK 87628
+YXNwZXJz 87629
+IHRlcnM= 87630
+ZmlsdA== 87631
+IEZ1bmN0b3I= 87632
+IHNsaW5n 87633
+X0JMSw== 87634
+LUV1cm9wZWFu 87635
+IEFjaGlsbGVz 87636
+XEVudGl0aWVz 87637
+LkRpc3BsYXlNZW1iZXI= 87638
+IHJlZGV2ZWxvcG1lbnQ= 87639
+CWhlbHA= 87640
+IFsnLQ== 87641
+IEp1bGllbg== 87642
+PUludGVnZXI= 87643
+LmlzTnVsbE9yRW1wdHk= 87644
+IFdvVw== 87645
+UGF5bWVudHM= 87646
+KGhkcg== 87647
+IGJhamE= 87648
+IEpDb21ib0JveA== 87649
+RmlyZWZveA== 87650
+IGNvbmdsb21lcg== 87651
+X2N1c3Q= 87652
+JCIpCg== 87653
+IG11dGFudHM= 87654
+TWFnbg== 87655
+IE1QSA== 87656
+e18= 87657
+X3dhcm5pbmdz 87658
+IGdhc3Q= 87659
+THQ= 87660
+IHRyYWluYWJsZQ== 87661
+VHJhZGVtYXJr 87662
+QkFTSA== 87663
+IEVDUw== 87664
+UmV0cmlldmU= 87665
+J08= 87666
+IGluaXRpYWxpc2Vk 87667
+IGNoZW1pbg== 87668
+LlRyYW5zcG9ydA== 87669
+IFlpbmc= 87670
+YXNpb25z 87671
+IG1vYw== 87672
+X0xPR0dFUg== 87673
+R0VOQ1k= 87674
+IEJsb2dnZXI= 87675
+ICIpIgo= 87676
+UEVuZA== 87677
+IGFjY29tcGFnbg== 87678
+LkNPREU= 87679
+IG1MaXN0 87680
+LWVkdWNhdGVk 87681
+LC8= 87682
+IE1lcnJpbGw= 87683
+L3Blb3BsZQ== 87684
+LicnJwo= 87685
+X3RvZG8= 87686
+IGfDvG4= 87687
+X0ZVTExTQ1JFRU4= 87688
+LmNsZWFudXA= 87689
+VW5tYXJzaGFsbGVy 87690
+LlN1cHByZXNzTGludA== 87691
+IG9uc2xhdWdodA== 87692
+IE1hcnNlaWxsZQ== 87693
+ZWRpYXRvcg== 87694
+X0VOVFJJRVM= 87695
+LGRlZmF1bHQ= 87696
+bWVsZHVuZw== 87697
+ZWxmdGg= 87698
+IEdvdmVybm1lbnRz 87699
+IHBsZWFz 87700
+b3R0cw== 87701
+IHBsdW5kZXI= 87702
+cmVhZE9ubHk= 87703
+IGR5c2Z1bmN0aW9uYWw= 87704
+J05laWxs 87705
+IHVubG9hZGVk 87706
+IHNxdWVlemluZw== 87707
+IGRvb2Q= 87708
+LmFkZERhdGE= 87709
+IEFzaQ== 87710
+TUVT 87711
+KHNjaGVkdWxl 87712
+IGFkdmVudHVyZXJz 87713
+ZXhwZWN0RXhjZXB0aW9u 87714
+IH19Pns= 87715
+Q0xT 87716
+IHJlY2hlcg== 87717
+IGRlcm5pw6hyZQ== 87718
+LkRldGFpbHM= 87719
+IHJhbmRvbU51bWJlcg== 87720
+IGlhcg== 87721
+IExhbmdl 87722
+ZXdl 87723
+IEVtaWw= 87724
+IGFkdmVydHM= 87725
+IGRyYW1hcw== 87726
+IEtvbW0= 87727
+ICAJCQkJ 87728
+X1Rlc3RDYXNl 87729
+IENsYXJlbmNl 87730
+0LXQvdGC0LA= 87731
+dG91cHBlcg== 87732
+Lm9uU3VibWl0 87733
+Y2Fh 87734
+X0FMQVJN 87735
+KikKCg== 87736
+IOuzgOqyvQ== 87737
+LlByaXZhdGU= 87738
+IHNreWxpbmU= 87739
+UkFJTg== 87740
+KGN1cmw= 87741
+b3NpdGU= 87742
+SWdub3Jpbmc= 87743
+IHZ6 87744
+IHZlZGVyZQ== 87745
+IE9TWA== 87746
+YmFuYW5h 87747
+IG1ldGFt 87748
+IHRyYW5zbGF0ZVk= 87749
+IE1jR3I= 87750
+4oCZYWNj 87751
+5Lul5LiL 87752
+IHNwaXJpdHVhbGx5 87753
+KGVuYWJsZWQ= 87754
+IHJlc3RvcmVz 87755
+IGJ0bkNhbmNlbA== 87756
+dmFuaXNoZWQ= 87757
+IE51ZXZv 87758
+U2FsdmFy 87759
+Y2FmZmU= 87760
+IG1hc3RlcmluZw== 87761
+aWRkbGVk 87762
+LmlzZGlnaXQ= 87763
+IGdyYXZ5 87764
+YWdlZExpc3Q= 87765
+XFJlc291cmNlcw== 87766
+IGRvd25mYWxs 87767
+LlBhc3M= 87768
+IGFsdGlqZA== 87769
+IHBpenphcw== 87770
+IH0pKQ== 87771
+cGVybXM= 87772
+aWdodG9u 87773
+IHJlcGVsbA== 87774
+ICcnKSw= 87775
+Lm5vcm1hbGl6ZWQ= 87776
+IG1hcmNoZXM= 87777
+CXJlc29sdmU= 87778
+Q2hpbGRTY3JvbGxWaWV3 87779
+IEluc3RpdHV0aW9ucw== 87780
+QXR0ZW5kYW5jZQ== 87781
+bHNl 87782
+ZXJkZW0= 87783
+LmdldElucHV0 87784
+SGFzQmVlbg== 87785
+YXBldXRpY3M= 87786
+ICpc 87787
+IFJpdHVhbA== 87788
+X0xT 87789
+IHNwb3RpZnk= 87790
+IHNww6R0ZXI= 87791
+IFRodW1ibmFpbA== 87792
+KGNlcnQ= 87793
+IGdldFJlc291cmNl 87794
+X3Bsb3Rz 87795
+IHN0YWluaW5n 87796
+YWRqdXN0ZWQ= 87797
+INep 87798
+RGl2RWxlbWVudA== 87799
+IFRUQw== 87800
+IGFwcm92ZQ== 87801
+LnZpZXdlcg== 87802
+fD0= 87803
+Z2V0U291cmNl 87804
+55S16K+d 87805
+X1RC 87806
+X2JpbGxpbmc= 87807
+LUxpZmU= 87808
+IHBzeWNoZQ== 87809
+IHRhYlBhZ2U= 87810
+IEluZmVjdA== 87811
+eGZmZg== 87812
+X2hpZA== 87813
+IGFwb2NhbHlwc2U= 87814
+IE5GUw== 87815
+IElURVI= 87816
+V2luZG93U2l6ZQ== 87817
+aGVpdHM= 87818
+IGluY3JlbWVudGVk 87819
+IEJyYXk= 87820
+ZW5lZ3Jv 87821
+IGFsbW9uZHM= 87822
+WVBSRQ== 87823
+Tm9ybWFsaXpl 87824
+4oCcV2VsbA== 87825
+IEFwaUNvbnRyb2xsZXI= 87826
+W1VuaXQ= 87827
+R2VucmVz 87828
+IE5leA== 87829
+IExORw== 87830
+IGZvcmVnb2luZw== 87831
+IHRlbmRvbg== 87832
+IEhw 87833
+Q291bmNpbA== 87834
+IFNhdWRpcw== 87835
+IERlemU= 87836
+IHNjcmFwZWQ= 87837
+IGJvdHRsZW5lY2s= 87838
+IE9ybg== 87839
+IHVubWFubmVk 87840
+IGludm9raW5nU3RhdGU= 87841
+IEV4b2R1cw== 87842
+X0FUT01JQw== 87843
+U3ViTWVudQ== 87844
+X2NvbXByZXNz 87845
+Iy4= 87846
+RHJ2 87847
+LnB1c2hCdXR0b24= 87848
+IHN1aXRjYXNl 87849
+b3NzZWQ= 87850
+Yml0cmFyeQ== 87851
+U25pcHBldA== 87852
+IEVwaWRlbWk= 87853
+RGlzYWxsb3c= 87854
+X0NISw== 87855
+IHZlcmlmaWVz 87856
+IENhdGFseXN0 87857
+4oCUZnJvbQ== 87858
+IGNvbnRhbWluYW50cw== 87859
+Sm9obm55 87860
+KGZpbA== 87861
+IGRlcmVu 87862
+IG91dGNyeQ== 87863
+IEpvaGFubg== 87864
+PFRhZw== 87865
+X3Nhbg== 87866
+IHN0ZGRldg== 87867
+IHBhcmFseXplZA== 87868
+IExleHVz 87869
+b3NhdGU= 87870
+IENoYXJzZXQ= 87871
+IFJlYWx0 87872
+PT8iLA== 87873
+KERlZmF1bHQ= 87874
+IFRyZWFzdXJlcg== 87875
+RWluZQ== 87876
+IHVudHJ1ZQ== 87877
+IGZpbmFuemk= 87878
+IGJlaGF2aW91cmFs 87879
+IG5pcHBsZQ== 87880
+IFJhZGljYWw= 87881
+IFBheg== 87882
+IE1haXNvbg== 87883
+LWVtcGxveWVk 87884
+IHdlcmVsZA== 87885
+IGpvcw== 87886
+IERpZWQ= 87887
+ZW50cmVwcmlzZQ== 87888
+JHJvd3M= 87889
+IHNwb29m 87890
+IMK7Lg== 87891
+IGtleXBvaW50cw== 87892
+IGN1cGNha2Vz 87893
+IHt9KTsKCg== 87894
+Y2hpbmU= 87895
+4oCL4oCL 87896
+LExPQ0FUSU9O 87897
+IHBseXdvb2Q= 87898
+IG1hZ2c= 87899
+IFJhbw== 87900
+IERQUg== 87901
+IGVib29rcw== 87902
+KXNpemU= 87903
+IHNwZWNpYWxpc2Vk 87904
+I2Fl 87905
+IG1pY2hhZWw= 87906
+IFNURE9VVA== 87907
+IFBlbGw= 87908
+QU1FUkE= 87909
+YW5nZWxv 87910
+IGluZ2lu 87911
+IG1BdXRo 87912
+IGxlZ2FsaXpl 87913
+IEN1YW5kbw== 87914
+IGNlcnRv 87915
+IGxpdHJlcw== 87916
+IEV4dHJhcw== 87917
+U0hPUlQ= 87918
+IHByZW1hdHVyZWx5 87919
+IFNlbWFwaG9yZQ== 87920
+SEVO 87921
+IGFtcGhpYg== 87922
+IGjDqQ== 87923
+RXhpdGluZw== 87924
+ZXVpbGxleg== 87925
+IFRNUHJv 87926
+LnByZWZlcmVuY2Vz 87927
+LmdldEluZm8= 87928
+w6l0aWNh 87929
+IiIiLg== 87930
+Lm5ld0FycmF5TGlzdA== 87931
+IGtyb24= 87932
+IEJMTA== 87933
+Y2xpbmU= 87934
+X2di 87935
+IFRvbWFz 87936
+cHJvYmFudGU= 87937
+SVRJT05BTA== 87938
+4buRaQ== 87939
+IExvZA== 87940
+SXNu 87941
+LHsK 87942
+IGtvbW11bg== 87943
+d2R4 87944
+Z2Vub21l 87945
+6YCj 87946
+dG9IYXZlTGVuZ3Ro 87947
+J0U= 87948
+IHDDumJsaWNh 87949
+IERldGVjdGVk 87950
+IF8KCg== 87951
+0YzRjg== 87952
+K1M= 87953
+Y2xvdGg= 87954
+Um90b3I= 87955
+Lm51bWVybw== 87956
+X3N0YW5k 87957
+R0ND 87958
+6rU= 87959
+X3Zw 87960
+X0ZBUg== 87961
+QWhlYWQ= 87962
+e31c 87963
+KGNvcnJlY3Q= 87964
+ImNyeXB0bw== 87965
+bW9kdWxv 87966
+X1VUSUxT 87967
+LlZhcg== 87968
+LW1lbg== 87969
+IHZlbmlhbQ== 87970
+IE1jQ29ybQ== 87971
+Z2V0TG9jYXRpb24= 87972
+W2NvZGU= 87973
+JWY= 87974
+IGRpZmZlcmVk 87975
+SVBBZGRyZXNz 87976
+IFN0cmF3YmVycnk= 87977
+IFNhaGFyYQ== 87978
+Y3JlYXRlQ2xhc3M= 87979
+IS8= 87980
+IG1lbWJlcnNoaXBz 87981
+IHByb25vdW5jZQ== 87982
+LkNvbnN0cmFpbnQ= 87983
+IEVucm9sbG1lbnQ= 87984
+IHJlbmV3YWJsZXM= 87985
+Lmd0 87986
+aXp6aWU= 87987
+cnp5 87988
+ZXJzZW4= 87989
+PD0k 87990
+REVMQVk= 87991
+IHNpZ25pbg== 87992
+IFBTVQ== 87993
+QXBwTmFtZQ== 87994
+fVwuWw== 87995
+RUdB 87996
+IGNpZW50 87997
+IFN5bm9wc2lz 87998
+IGxldHRlclNwYWNpbmc= 87999
+IGNoaWxkcw== 88000
+IFNjYWxpbmc= 88001
+KXByZXBhcmU= 88002
+IGNvbW11dGVy 88003
+U2xhc2g= 88004
+b3VzZXI= 88005
+IHdhdGVybWFyaw== 88006
+IFVJU2NyZWVu 88007
+b2xpYW4= 88008
+CXZlcnRpY2Vz 88009
+PkFjdGlvbg== 88010
+IGFwaA== 88011
+aGFuZHM= 88012
+IE9DQw== 88013
+SFU= 88014
+IHNlY2x1ZGVk 88015
+IHZpc2NlcmFs 88016
+IHZpZGVvZw== 88017
+IFNhbXVyYWk= 88018
+IFp1aw== 88019
+IFdpZG93 88020
+YWNjaW5l 88021
+IGxpbGxl 88022
+IFJ5ZGVy 88023
+IFByb2dyYW1tZXI= 88024
+RXhwb3J0ZXI= 88025
+IG1vdmltaWVudG8= 88026
+YXBhcw== 88027
+IGxlaWRlcg== 88028
+dWxhcmVz 88029
+aWVtZQ== 88030
+LWRlbnNpdHk= 88031
+ZGVzY2VuZGluZw== 88032
+KElU 88033
+IHNjcmFwZXI= 88034
+IGljZWJlcmc= 88035
+X0NSSVRJQ0FM 88036
+IGF1dGU= 88037
+X1N0eWxl 88038
+IE1BTA== 88039
+IEhlY3Rvcg== 88040
+LUNocmlzdGlhbg== 88041
+IGRpZmZlcmVudGlhdGVk 88042
+IEJpc29u 88043
+ICAgICAgIAk= 88044
+LnBvcHVsYXRpb24= 88045
+Umlv 88046
+LVRy 88047
+PVZhbHVl 88048
+IEx1ZnQ= 88049
+IEdpdWxpYW5p 88050
+55yf 88051
+Q291cG9u 88052
+IGhhY2llbmRv 88053
+44Od 88054
+cG9uY2U= 88055
+X3Jlc2lkdWFs 88056
+IGxp4buHdQ== 88057
+XHVmZg== 88058
+0L7QsdGF0L7QtNC40Lw= 88059
+IHJlc3BlY3Rv 88060
+IERlc2lyZWQ= 88061
+RGF0YVN0cmVhbQ== 88062
+LnNheA== 88063
+IG1vcA== 88064
+IEhhY2tlcg== 88065
+QU5UQQ== 88066
+QW5j 88067
+VmVudGE= 88068
+IFdvcmRwcmVzcw== 88069
+CWVmZmVjdA== 88070
+YWRhcHQ= 88071
+IEludGVydmlld3M= 88072
+IGRyYXdiYWNrcw== 88073
+QUxMRU5H 88074
+IGfDqW7DqXJhbA== 88075
+LWJhZGdl 88076
+UmVzaXN0YW5jZQ== 88077
+IE9TSQ== 88078
+dG91cm5hbWVudA== 88079
+IFJlcHV0YXRpb24= 88080
+IEVpc2VuaG93ZXI= 88081
+RmlsZWQ= 88082
+IGhlYnQ= 88083
+I1w= 88084
+Y3JlYXRlUXVlcnlCdWlsZGVy 88085
+5pyJ5pWI 88086
+dmFuY2Vk 88087
+Lkhhc0tleQ== 88088
+ZGRl 88089
+KHN0YXJ0VGltZQ== 88090
+IEluc3RhbGxlcg== 88091
+IEltcGw= 88092
+Y29hY2g= 88093
+IHByZWFjaGVk 88094
+IGJyZXdlZA== 88095
+SW5zdGFsbGVy 88096
+b2x2YWJsZQ== 88097
+IGFsYXM= 88098
+KHNwZWxs 88099
+IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw== 88100
+IGRlZmFtYXRpb24= 88101
+KEFyZw== 88102
+IHVzZXJEZXRhaWxz 88103
+IGxpY2Vuc29ycw== 88104
+IEludmVzdGlnYXRpb25z 88105
+IGRpbmVy 88106
+IGZpY3Q= 88107
+U3RpY2s= 88108
+TmVpZ2hib3I= 88109
+dG9UaHJvdw== 88110
+LXNlY3Rvcg== 88111
+IHJpc3VsdA== 88112
+4oCZOg== 88113
+Sk5JRW52 88114
+eXBpY2Fs 88115
+ZGVzaWduYXRpb24= 88116
+KHdw 88117
+IGNvbmZpcm1QYXNzd29yZA== 88118
+LWlvcw== 88119
+ICItIjsK 88120
+CWFzc2VydE5vdE51bGw= 88121
+YWRkRXJyb3I= 88122
+YXZyYXM= 88123
+Vm0= 88124
+KGpRdWVyeQ== 88125
+IFZpY3RpbXM= 88126
+IHJlbGlhbnQ= 88127
+IEJsaXR6 88128
+IG91dGFnZQ== 88129
+IGZsdW9yaWRl 88130
+IFROVA== 88131
+LkRpc2NsYWltZXI= 88132
+IFNOTVA= 88133
+dmFibHk= 88134
+IHBob3RvbnM= 88135
+LlJlYWRBc1N0cmluZ0FzeW5j 88136
+U2NoZWR1bGVk 88137
+IGpld2lzaA== 88138
+IEdlb2ZmcmV5 88139
+IEdyYW5ueQ== 88140
+fgo= 88141
+LW1lc3NhZ2Vz 88142
+KGdvYWw= 88143
+IGFyZ2VudA== 88144
+IFBlc3Q= 88145
+IGNvbmdyYXR1bGF0ZQ== 88146
+aW5vc2F1cg== 88147
+IHdoaXNwZXJz 88148
+IHNpc3RlbWFz 88149
+IEbDqQ== 88150
+L0luZGV4 88151
+Lk1JTExJU0VDT05EUw== 88152
+IGFjaGlldmFibGU= 88153
+IEJyaXR0YW55 88154
+KysrKysrKysrKysrKysrKysrKysrKysrKysrKysrKys= 88155
+IFJldHVyblR5cGU= 88156
+IGluZml4 88157
+LmlzU3VjY2Vzcw== 88158
+LkNhdGVnb3JpZXM= 88159
+IG91dGxpZXI= 88160
+LkFzc2V0 88161
+b3RlYw== 88162
+IHdpemFyZHM= 88163
+IGJvb3Rsb2FkZXI= 88164
+X2Jlcg== 88165
+IHJlaGFiaWxpdA== 88166
+YW50b3I= 88167
+IFZpdm8= 88168
+IEdhcm1pbg== 88169
+b2JqZWN0SWQ= 88170
+QFBhdGg= 88171
+IMO6bmljYQ== 88172
+IFlvcmtlcnM= 88173
+R3VpZElk 88174
+JGVycm9ycw== 88175
+ICs9Cg== 88176
+IGF4aW9t 88177
+IFBTSQ== 88178
+IFN1Y2M= 88179
+IFNwb2thbmU= 88180
+ICciLiRf 88181
+IExO 88182
+Lm5ld0xpbmU= 88183
+IGludGVyc2VjdHM= 88184
+bGljaGtlaXQ= 88185
+IElBTQ== 88186
+LkRyb3BEb3duSXRlbXM= 88187
+IGNvdXJ0ZW91cw== 88188
+IFNtaXRoc29uaWFu 88189
+IEhtbQ== 88190
+UURlYnVn 88191
+c3RyYWlnaHQ= 88192
+X3NvbGQ= 88193
+QnVsaw== 88194
+VHJpU3RhdGU= 88195
+IGFkZEJ1dHRvbg== 88196
+IEhpcmluZw== 88197
+VHJhbnNwb3Nl 88198
+IFVJVGV4dFZpZXc= 88199
+aXN0ZW5jaWE= 88200
+L2NwcA== 88201
+INC/0L7Qu9GP 88202
+IENvb2tib29r 88203
+L0FwcGxpY2F0aW9u 88204
+Z2VuaWM= 88205
+IFdvb0NvbW1lcmNl 88206
+LHZlY3Rvcg== 88207
+IEJpdGU= 88208
+Lmh3 88209
+IGRvY2tpbmc= 88210
+IFRhbnRyYQ== 88211
+IFNWQw== 88212
+IE1hdXJpdA== 88213
+aWFsaWFz 88214
+IEF1cmU= 88215
+IGJvbHM= 88216
+TE9DSVRZ 88217
+IFdlc3Ricm9vaw== 88218
+IEJQTQ== 88219
+IEZleQ== 88220
+IFNvdmVyZQ== 88221
+IHBhbmRh 88222
+IHF1aXp6ZXM= 88223
+IGNyZW8= 88224
+c3BlZWNo 88225
+L2Rpcg== 88226
+INC40YHQv9C+0LvRjNC30L7Qsg== 88227
+IGZvdW5kYXRpb25hbA== 88228
+LWFwcGVuZA== 88229
+blRoZQ== 88230
+IGFwaVVybA== 88231
+LlhQQVRI 88232
+IExpbmd1 88233
+IEV4aGF1c3Q= 88234
+UGFraXN0YW4= 88235
+IG9tYXA= 88236
+IGZvbnRTdHlsZQ== 88237
+0LXRgdGC0Lg= 88238
+IG1hbnNsYXVnaHRlcg== 88239
+X0xvbmc= 88240
+IGNhcnBldHM= 88241
+Q2hlc3M= 88242
+ZWxpZ2h0 88243
+RHJhd2VyVG9nZ2xl 88244
+IFBhdHR5 88245
+X2Nyb3NzZW50cm9weQ== 88246
+IHR3ZWFraW5n 88247
+0YLRgw== 88248
+IENBTEM= 88249
+c2lw 88250
+IEpNUA== 88251
+X19fX19fX19fX19fX19fX18KCg== 88252
+VHJlZVZpZXc= 88253
+LXdhdmU= 88254
+IHBhc3R1cmU= 88255
+ZWxpbWluYXI= 88256
+IGVyeQ== 88257
+IHJlc3RsZXNz 88258
+6rWs 88259
+IG1hcmlhZ2U= 88260
+IEVsbGll 88261
+Xz0n 88262
+IHZtaW4= 88263
+S2ljaw== 88264
+LnRvb2xib3g= 88265
+IE1hcmlubw== 88266
+eXBzeQ== 88267
+c3RkYXJn 88268
+cHRyZGlmZg== 88269
+IFBlYWtz 88270
+X1ZhbA== 88271
+IGluZ2VzdA== 88272
+IGNvbXBz 88273
+RGViZQ== 88274
+IERlY2xhcmF0aW9ucw== 88275
+aXJjb24= 88276
+PWFsbA== 88277
+LkRlYnVnZg== 88278
+UHJlZGljdGlvbg== 88279
+IGRhdQ== 88280
+KE1lbWJlcg== 88281
+IGNoaWVmbHk= 88282
+L2FuaW1hdGU= 88283
+LkF0dGFjaA== 88284
+IGdhc3RyaWM= 88285
+IFVzZXJEZXRhaWxz 88286
+w7ZyZW4= 88287
+a29h 88288
+LWJvb3Q= 88289
+IHNwbGljZQ== 88290
+bGVh 88291
+b3Rp 88292
+W29w 88293
+U3F1YXJlZA== 88294
+IHNjcm9sbFRv 88295
+IE5ld2ZvdW5kbGFuZA== 88296
+CUVSUk9S 88297
+V2Fs 88298
+RU1BTEU= 88299
+R2V0WQ== 88300
+IGNhYmlucw== 88301
+IGFic2w= 88302
+Lm1peGVy 88303
+IGNkcg== 88304
+Y29uY2VydA== 88305
+IFN5bHZpYQ== 88306
+Qks= 88307
+5LuK5bm0 88308
+X0NMQU1Q 88309
+0YHRgtGA0YPQutGC0L7RgA== 88310
+L2dhbWVz 88311
+xZN1cg== 88312
+PGxvY2F0aW9u 88313
+IGNsb3NlQnV0dG9u 88314
+IEhhaXJzdA== 88315
+4bqhbw== 88316
+IGNydW1ibGluZw== 88317
+IHN1bGZhdGU= 88318
+IGFsZ3VpZW4= 88319
+IEpEQkM= 88320
+IEt2 88321
+UElQ 88322
+X3N1cmY= 88323
+IHXFvHl0aw== 88324
+IG1hbm5lZA== 88325
+IE9jY2FzaW9uYWxseQ== 88326
+b2Jqcw== 88327
+TWluaW1hbA== 88328
+LWRlc3M= 88329
+IFdBVg== 88330
+IEVycm9ySGFuZGxlcg== 88331
+IHNldExvY2F0aW9u 88332
+IGlldHM= 88333
+IHN1YnJvdXRpbmU= 88334
+IHRvbmd1ZXM= 88335
+X3F1aXo= 88336
+TWlsbGVy 88337
+IEJhc2VUeXBl 88338
+IFZ1ZXg= 88339
+aXJhdGU= 88340
+U2VyaW91c2x5 88341
+dHlwZWlk 88342
+IGt1dGpl 88343
+IHByZXNjcmliaW5n 88344
+X3N1cnZleQ== 88345
+LkN0 88346
+IGJsaW5kbHk= 88347
+LmdldExhYmVs 88348
+LCIpOwo= 88349
+IHBvdHJ6ZQ== 88350
+IFN3b3Jkcw== 88351
+U29ydGFibGU= 88352
+IEJsYWNrYnVybg== 88353
+IE1hdGE= 88354
+IHBvbmRz 88355
+IHByb3Rlc3RvcnM= 88356
+IEVuc2VtYmxl 88357
+OmZvY3Vz 88358
+IGl0YWxpYW5h 88359
+IGRvcm1hbnQ= 88360
+IE5lbA== 88361
+SU5DTFVERQ== 88362
+KENvbnY= 88363
+IGJ1Zmxlbg== 88364
+IENETg== 88365
+LnhodG1s 88366
+SGRy 88367
+IGNhcmNpbm9tYQ== 88368
+IFdvcmNlc3Rlcg== 88369
+bmRs 88370
+dXNlUmFs 88371
+dXNlUmFsYXRpdmU= 88372
+dXNlUmFsYXRpdmVJbWFnZVBhdGg= 88373
+IHRha2Vhd2F5 88374
+ZWxlbWVudEd1aWRJZA== 88375
+LmxhYmVsWA== 88376
+W0lE 88377
+QUxFUg== 88378
+CXV2 88379
+PigpLT4= 88380
+L2xp 88381
+K2xlbg== 88382
+IHByb3BlbA== 88383
+IGNhYm8= 88384
+XCIiKTsK 88385
+IHZvY2F0aW9uYWw= 88386
+LXBpbGw= 88387
+Lm5sbQ== 88388
+IGVyb3RpY2E= 88389
+b3BvdA== 88390
+bGFuZHNjYXBl 88391
+aW5zaw== 88392
+IHBsYWNlbWVudHM= 88393
+LnNldEF1dG8= 88394
+IGhvbWljaWRlcw== 88395
+X0ZpZWxkT2Zmc2V0VGFibGU= 88396
+Omw= 88397
+IGFubm90YXRl 88398
+LXJpc2U= 88399
+LGFscGhh 88400
+IGludGVydmVuaW5n 88401
+YW1iaQ== 88402
+Lj0nPA== 88403
+IHBhcmxlcg== 88404
+772l772l 88405
+IGNvbXBseWluZw== 88406
+LWhhbmRsZQ== 88407
+IGludGVycnVwdGlvbnM= 88408
+cGxlcnM= 88409
+cm91cHM= 88410
+X0RlZg== 88411
+IHBpY2tlclZpZXc= 88412
+IHBpZXJjZWQ= 88413
+IGVyYWRpY2F0ZQ== 88414
+bW9ieA== 88415
+W3RyYWlu 88416
+RGVmZXJyZWQ= 88417
+IHRvdGFsZWQ= 88418
+Q2hpbGRJbmRleA== 88419
+IFJlY29tbWVuZGF0aW9ucw== 88420
+X1dPUkRT 88421
+IHNpZ25pZnk= 88422
+IEFlcm8= 88423
+X2Jvb3RzdHJhcA== 88424
+X1Vw 88425
+cHJvZHVjdE5hbWU= 88426
+LWFueQ== 88427
+IHBwbA== 88428
+X1BVVA== 88429
+IGx5b24= 88430
+X0lMaXN0 88431
+IMOpY3JpdA== 88432
+KGd1aWQ= 88433
+IGNvbnRhZ2lvdXM= 88434
+X1NlbGVjdGlvbg== 88435
+L2xhbmd1YWdl 88436
+cXVhbg== 88437
+IGFjdXB1bmN0dXJl 88438
+IG9mcmVjZQ== 88439
+CVJURQ== 88440
+Lkd1bmE= 88441
+IHNlbnNlZA== 88442
+IEtyYWs= 88443
+IHVubHVja3k= 88444
+YXZpYw== 88445
+dGl0bGVMYWJlbA== 88446
+IGhheXN0YWNr 88447
+LmJpdG1hcA== 88448
+IENvdW5zZWxpbmc= 88449
+UExBVEZPUk0= 88450
+X1Rvb2w= 88451
+VGFt 88452
+V2VyZQ== 88453
+0YDQsNC3 88454
+X1NQRQ== 88455
+IG9uQW5pbWF0aW9u 88456
+PTw/PSQ= 88457
+IFNsZQ== 88458
+IEd1aW5uZXNz 88459
+IHR3ZWFrZWQ= 88460
+LXByZXNzdXJl 88461
+X21vbnRocw== 88462
+KW8= 88463
+UHJvYmFiaWxpdHk= 88464
+IENhbXBvcw== 88465
+LkNPTkZJRw== 88466
+VmludGFnZQ== 88467
+PndpbmRvdw== 88468
+IEZhY3RvcnlCb3Q= 88469
+cG9zdGdyZXNxbA== 88470
+IHRhYmxldG9w 88471
+IENhdGE= 88472
+aG9j 88473
+X2FzYw== 88474
+4oKs4oCc 88475
+QmFja1N0YWNr 88476
+w6lv 88477
+IFNvdXM= 88478
+c2V0dGVy 88479
+JyldKQo= 88480
+dmVsbGU= 88481
+IEFsdW1pbml1bQ== 88482
+eEJB 88483
+Lm1vbmdv 88484
+IFZhcmlhdGlvbg== 88485
+eXR1dA== 88486
+bmVobWVy 88487
+4buDbQ== 88488
+IGVmZmVjdGVk 88489
+ICoqLw0K 88490
+IHJlY291bnRlZA== 88491
+UHJhY3RpY2U= 88492
+Q0FOQ0VM 88493
+Y3puaWU= 88494
+TGFycnk= 88495
+IHFh 88496
+IEh1ZmZtYW4= 88497
+Z2V0RHJhd2FibGU= 88498
+IGVuZnJlbnQ= 88499
+IG9uQ2FuY2VsbGVk 88500
+IGxlbw== 88501
+IFhTUw== 88502
+IEh1cnJpY2FuZXM= 88503
+IGpvbg== 88504
+IFRlc3RlZA== 88505
+IE1vcmFs 88506
+IGJlZHRpbWU= 88507
+IEpBRFg= 88508
+IGVjaGFuZw== 88509
+IG51ZXN0cmFz 88510
+UENN 88511
+KS4u 88512
+IOyImOyglQ== 88513
+IGJvcmRlcmxpbmU= 88514
+IGFzc2lzdGly 88515
+IEhlbHBz 88516
+IERpdmU= 88517
+X3NuZA== 88518
+d2l0 88519
+X2JsZW5k 88520
+IGlzRmlyc3Q= 88521
+IGhlYXBx 88522
+KCc9 88523
+IGFzc2VtYmxlcg== 88524
+IE15c3RpYw== 88525
+b3JnaA== 88526
+IGhpam9z 88527
+X0tIUg== 88528
+KGRlY29kZWQ= 88529
+IFFVSQ== 88530
+INeR 88531
+IGNvbnRyb2xJZA== 88532
+U3BhY2Vy 88533
+LmFnZ3JlZ2F0ZQ== 88534
+IHNoYWx0 88535
+X3RyYXA= 88536
+IEZhbWlsaWU= 88537
+zrg= 88538
+b3J0YQ== 88539
+LlBvc3RNYXBwaW5n 88540
+7LA= 88541
+ICcuLics 88542
+esOh 88543
+L2FybQ== 88544
+LmdhbGxlcnk= 88545
+IGltcGVjY2FibGU= 88546
+IHdpbmRvd0hlaWdodA== 88547
+c2xhY2s= 88548
+ZmZi 88549
+X3Fw 88550
+bGFkZW4= 88551
+IFRFUk0= 88552
+c2V0TGFiZWw= 88553
+IFNpbmdsZUNoaWxkU2Nyb2xsVmlldw== 88554
+ecO8aw== 88555
+IHB1bHVtaQ== 88556
+LWdhcA== 88557
+dW5pYWNpZA== 88558
+CWhvbGRlcg== 88559
+LmFkZEZpZWxk 88560
+IHRyaXBsZXM= 88561
+IEp1ZGdtZW50 88562
+IENlbmE= 88563
+cGFyc2Vycw== 88564
+LmRyYXdUZXh0 88565
+INC60LDQttC0 88566
+IGFjY3Q= 88567
+aGl2ZQ== 88568
+IG11c2lxdWU= 88569
+IFlheg== 88570
+LXBvc3Rz 88571
+IGZpbHM= 88572
+IC8vew0K 88573
+X3B1dHM= 88574
+IFN0YXR1ZQ== 88575
+ZGlhbW9uZA== 88576
+U3RvcmFnZVN5bmM= 88577
+IHNodXRz 88578
+IGdldHRpbWVvZmRheQ== 88579
+IEFBQkI= 88580
+aWNoZXJu 88581
+Z2V0TG9jYWxl 88582
+aW50cmVl 88583
+IGZydWl0ZnVs 88584
+QmVhcg== 88585
+IHBsdW1iZXI= 88586
+cWlk 88587
+Q0hJUA== 88588
+IG1vdGl2YXRpbmc= 88589
+IGVzY2FsYXRl 88590
+LmJ1bGs= 88591
+IFBsYXlncm91bmQ= 88592
+X21pcnJvcg== 88593
+IFBlZWw= 88594
+IGRhbmU= 88595
+aW52b2ljZXM= 88596
+SGFzQmVlblNldA== 88597
+LXZlcnRpY2Fs 88598
+IEZyYW5jZXNjbw== 88599
+IEFTQQ== 88600
+INC60L7Qu9C40YfQtdGB0YLQstC+ 88601
+w6Bu 88602
+Rm91cnRo 88603
+IENyZWF0ZVRhYmxl 88604
+Y2N0b3I= 88605
+IGZyYW50aWM= 88606
+YWFi 88607
+IEthcmFjaGk= 88608
+X2ltYWc= 88609
+IG5hdHV1cg== 88610
+RWF0 88611
+IHN0dW1w 88612
+IHJvbGxlcnM= 88613
+IHRyYWl0ZW1lbnQ= 88614
+INC/0YDQvtC0 88615
+IHJlYWxpc3RpY2FsbHk= 88616
+IGVQdWI= 88617
+IFphZw== 88618
+ZGFtbg== 88619
+IEFubmV4 88620
+cGVjaWVz 88621
+KGV4aXQ= 88622
+IHNwZWN0YXRvcg== 88623
+IEJ1bGdhcmlhbg== 88624
+IG1lZ2V0 88625
+IG1hdHVyZXM= 88626
+IGRldGVjdGlvbnM= 88627
+IHphaGw= 88628
+ZW5lZml0 88629
+YWtvdg== 88630
+IGFkdWx0b3M= 88631
+bWlkZGxld2FyZXM= 88632
+aXNPYmplY3Q= 88633
+S2Vubg== 88634
+IHVuZXRoaWNhbA== 88635
+c3VibmV0 88636
+R3JhcGhRTA== 88637
+IEdhZWw= 88638
+LkRyb3BvdXQ= 88639
+IGJ1cmVhdWNyYXRz 88640
+IFJlZGVtcHRpb24= 88641
+LkR0bw== 88642
+LkV2YWx1YXRl 88643
+IG9nZ2k= 88644
+IHRyYXRhbWllbnRv 88645
+IHJlY2FsbGluZw== 88646
+aXN0aW5ndWlzaA== 88647
+L3JlbGVhc2U= 88648
+X1dST05MWQ== 88649
+CW1rZGly 88650
+VHlwZUVudW0= 88651
+IERBUks= 88652
+5rWB 88653
+IFZhcG9y 88654
+IGF0b2w= 88655
+CWluc3Q= 88656
+LmApOwo= 88657
+L2Vs 88658
+IHJlY2xhaW1lZA== 88659
+w59lcmRlbQ== 88660
+X2xvc3Q= 88661
+IEFsYQ== 88662
+INC+0YjQuNCx 88663
+IEJhcnRo 88664
+Q29sb24= 88665
+b3Bvcg== 88666
+X3Bhc3N3ZA== 88667
+X2V4Y2x1ZGU= 88668
+QVBB 88669
+Zmxvd2Vycw== 88670
+IEVib29r 88671
+IFNUQQ== 88672
+VU5T 88673
+X0RJU1BBVENI 88674
+QUNJw5NO 88675
+dGVybWluYXRpb24= 88676
+IG5lc3RsZWQ= 88677
+YWRyYXRpYw== 88678
+Um93QW5pbWF0aW9u 88679
+X2tt 88680
+IHJvbmQ= 88681
+XV0+PC8= 88682
+5L2Z 88683
+IGNvc3BsYXk= 88684
+IG1pbGxlbm5pdW0= 88685
+X3NlcmlhbGl6ZQ== 88686
+IHZlcnNjaGllZGVuZW4= 88687
+YW50dA== 88688
+IEFtaWQ= 88689
+Y3JldGlvbg== 88690
+KT8k 88691
+IHRvd2luZw== 88692
+LmZpbA== 88693
+LkZpbGVXcml0ZXI= 88694
+IGFpcw== 88695
+IGVTcG9ydHM= 88696
+cHJ0 88697
+SVBB 88698
+LkZBTFNF 88699
+IHByaWNr 88700
+RW5kaW5n 88701
+IHByw6lzaWRlbnQ= 88702
+X2dseXBo 88703
+IHN1cHBsZW1lbnRlZA== 88704
+IGNvbnRhcg== 88705
+Ii4kXw== 88706
+IEJ1eWVycw== 88707
+dWph 88708
+IFRpbWVab25l 88709
+ZW5uZW50 88710
+SW5Qcm9ncmVzcw== 88711
+IFN1c3RhaW5hYmlsaXR5 88712
+IFByb3NwZXI= 88713
+Q29udG91cnM= 88714
+IHN0YXJ0bGVk 88715
+X2xlYXN0 88716
+IENvdmVudA== 88717
+Y2huaXR0 88718
+IE1pbGt5 88719
+ICItPg== 88720
+ZXRhaw== 88721
+IHR1c3Nlbg== 88722
+LXBheWluZw== 88723
+X2FjY2Vzc2libGU= 88724
+QmF0bWFu 88725
+KGl0cg== 88726
+SUFMSVpFRA== 88727
+IFRleHRBcmVh 88728
+YW5rZQ== 88729
+X0pVTVA= 88730
+IGJlaGF2ZWQ= 88731
+LG9wdGlvbnM= 88732
+eGl2 88733
+LlBMTA== 88734
+cXg= 88735
+Lm9uTmV4dA== 88736
+IHZlcmlmaWVy 88737
+IGR1xbw= 88738
+IEZ1a3VzaGltYQ== 88739
+IENPUlBPUkFUSU9O 88740
+X3RE 88741
+IE1lYWRvdw== 88742
+IHByb3llY3Rvcw== 88743
+ICgnXA== 88744
+IEJhcmNsYXlz 88745
+IGxlZ2FsaXR5 88746
+IGhhbWJ1cmdlcg== 88747
+IGVpbnM= 88748
+SW5kaWFuYQ== 88749
+IFRLZXk= 88750
+Y2xvYWs= 88751
+PGFsZ29yaXRobQ== 88752
+IHByZWFjaGVy 88753
+e2xuZw== 88754
+LmFydGljbGVz 88755
+c2V0SW1hZ2U= 88756
+UmVuYW1l 88757
+IGJsb3Nzb20= 88758
+IEJsb3Nz 88759
+IHV1cg== 88760
+IGRhZHM= 88761
+IFRpdGFuaWM= 88762
+ICAgICAgICANCg0K 88763
+IG9yZGluYW5jZXM= 88764
+IG3DpG5u 88765
+IGVyaw== 88766
+IGRpc3RpbGxlZA== 88767
+IMOkbA== 88768
+IHJ1cHR1cmU= 88769
+IENhbWVyYXM= 88770
+w7luZw== 88771
+IGhhaXJzdHlsZXM= 88772
+IGVtYnJ5b3M= 88773
+4oCdCg== 88774
+Lk5hdg== 88775
+IHN0cm0= 88776
+CXVzYWdl 88777
+LkFJ 88778
+IFRPVUNI 88779
+IElsbGVnYWxBY2Nlc3NFeGNlcHRpb24= 88780
+6rKw 88781
+a29uZWtzaQ== 88782
+ISIp 88783
+IGVzY2Fw 88784
+dWRpb3M= 88785
+c3RhcnR0aW1l 88786
+IG1laW5lbQ== 88787
+IFNwaXJhbA== 88788
+IEVyZWN0aWxl 88789
+aXZhbGVuY2U= 88790
+IGl0ZW1UeXBl 88791
+IGFiYWl4bw== 88792
+VmVydHM= 88793
+dGFraW5n 88794
+cHN0 88795
+IE9zY2Fycw== 88796
+IER4 88797
+ZXR0eQ== 88798
+TUFM 88799
+IE5lZWRsZQ== 88800
+IENPTVBVVEVS 88801
+5Lu75Yqh 88802
+IG5ld1g= 88803
+ICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAK 88804
+cGxldmVs 88805
+QUNFTUVOVA== 88806
+IEpvaGFu 88807
+UG9pbnRG 88808
+IHJlc3Ryb29t 88809
+dmVybw== 88810
+IGVsxZE= 88811
+cHJvZHVr 88812
+IFlFQVJT 88813
+CWFjdHVhbA== 88814
+VVBMRQ== 88815
+Q29udmVydGlibGU= 88816
+IHBvcnJm 88817
+SW5qZWN0ZWQ= 88818
+X2JvdGg= 88819
+L0dhdGU= 88820
+Y2FsY3VsYXRvcg== 88821
+ZW1haWxlcg== 88822
+LlBvZA== 88823
+IFpvdA== 88824
+X3NtYXJ0 88825
+YmFzaXM= 88826
+PENvbG9y 88827
+IGNyYXZpbmdz 88828
+RHJpdmVycw== 88829
+KGNvcw== 88830
+ZGF0YWJsZQ== 88831
+LW1ldGFs 88832
+IFBj 88833
+LmNvcHlPZg== 88834
+IG9yaWVudGF0aW9ucw== 88835
+CWFzdA== 88836
+IFpvbWJpZXM= 88837
+IGJvbWJlZA== 88838
+SG9zdG5hbWU= 88839
+X3JhaXNlcw== 88840
+bWVuc2FnZW0= 88841
+IGNvcnRpc29s 88842
+IEZpb25h 88843
+bGljb3M= 88844
+aGVhdnk= 88845
+IOqwgOyguA== 88846
+b21lbmNs 88847
+IGN1bHR1cmVk 88848
+IGFydGlrZWw= 88849
+xaHDrQ== 88850
+amRr 88851
+IHZhbmRhbGlzbQ== 88852
+IH1dKTsK 88853
+U3RyYWlnaHQ= 88854
+IHJlaGVhcnNhbA== 88855
+RWRpdGlvbg== 88856
+IEluc3Bpcg== 88857
+CXdj 88858
+IGZvcm11bGF0ZQ== 88859
+YW56ZWlnZW4= 88860
+IHBhdGhvbG9naWNhbA== 88861
+IGtlbm5lbmxlcm5lbg== 88862
+Pnsi 88863
+IGRpY2Vk 88864
+IGJyYWNlbGV0cw== 88865
+CQkgICAgCg== 88866
+Kj4q 88867
+L3RhcmdldA== 88868
+LkFnZW50 88869
+Lm1hZ2lj 88870
+IGlkZW9sb2dpZXM= 88871
+VFJBQ0s= 88872
+X2luZGl2aWR1YWw= 88873
+PGRlY2x0eXBl 88874
+IFJFQ0VJVkU= 88875
+L2Jvb3Q= 88876
+OkB7 88877
+UU0= 88878
+IE1hbmRhbA== 88879
+TkFNRVNQQUNF 88880
+IHRlcmNlcg== 88881
+IFJlZ2dpZQ== 88882
+IE5pY2hvbHNvbg== 88883
+IEZ1bHRvbg== 88884
+c3Rha2luZw== 88885
+IHJlc29uYXRl 88886
+bHBhcnI= 88887
+IGNvbnZlcnRlcnM= 88888
+ICgiLw== 88889
+IE1hcmxpbnM= 88890
+SW5mb3JtZQ== 88891
+Jz0+Wyc= 88892
+IHJvYmVydA== 88893
+IEhJTQ== 88894
+d2Vicw== 88895
+LnRyYWlsaW5nQW5jaG9y 88896
+LmFzY2lp 88897
+IE1hc2M= 88898
+IHRlY2hubw== 88899
+ZXR4dA== 88900
+CSAgICAgICAgCg== 88901
+zrHOuQ== 88902
+KFNlcQ== 88903
+ID8+Ojwv 88904
+IFBlYg== 88905
+W3NlbGVjdGVk 88906
+SkVDVEVE 88907
+Q2FzdEV4Y2VwdGlvbg== 88908
+P2Y= 88909
+IGV5ZXdpdG5lc3M= 88910
+IG1lbm8= 88911
+IERhbWllbg== 88912
+X0lFbnVtZXJhdG9y 88913
+IC4uLi4uLi4uLi4uLi4uLi4= 88914
+LlNFTEVDVA== 88915
+IGNyYXk= 88916
+X3BhcGVy 88917
+LlJvbGxiYWNr 88918
+SURFT1M= 88919
+cnBhcnI= 88920
+aW5lYXI= 88921
+X1JlbA== 88922
+IFdpbGRl 88923
+IFdvbmRlcmxhbmQ= 88924
+IFNodWZmbGU= 88925
+IHN0cmlrZW91dHM= 88926
+c2lnbW9pZA== 88927
+ISgiew== 88928
+ZXBhbQ== 88929
+IHJpY2huZXNz 88930
+IGVuZGVhdm91cg== 88931
+bWVudUl0ZW0= 88932
+INCf0L7Qu9GD0Yc= 88933
+IGZydXN0cmF0aW9ucw== 88934
+X3N1YnNjcmliZQ== 88935
+IGJvb3pl 88936
+IExpY2h0 88937
+IHBlYXNhbnQ= 88938
+IHdlaWdodGluZw== 88939
+IOW/ 88940
+QWN0aW9uQ29kZQ== 88941
+LnRyYWNrcw== 88942
+IMOY 88943
+IG1pbGxpb25haXJl 88944
+KHVy 88945
+J10pCgoK 88946
+ICIuJF8= 88947
+X0VERUZBVUxU 88948
+IGN1cmxz 88949
+X0NvbUNhbGxhYmxlV3JhcHBlcg== 88950
+LnNldFZpZXdwb3J0 88951
+IGRlbmQ= 88952
+IGF1dG91cg== 88953
+IEZvdXJpZXI= 88954
+IGJvaWxz 88955
+IEpQRw== 88956
+IGRpZ3M= 88957
+IGNvbXBsYWlucw== 88958
+LWxpbmVk 88959
+IEJsYWRlcw== 88960
+X2RpY3Rz 88961
+IElwcw== 88962
+cmVmZXJlcg== 88963
+IGFueWhvdw== 88964
+YW50YXI= 88965
+LXNoZWV0 88966
+CXBsYXk= 88967
+aWVyY2U= 88968
+Lk1lc3NhZ2luZw== 88969
+6KeB 88970
+CXByb2dyZXNz 88971
+LkRhdGFWaXN1YWxpemF0aW9u 88972
+IFN0b3Bz 88973
+SW50ZXJ2YWxTaW5jZQ== 88974
+QGJyaWVm 88975
+LndpbmQ= 88976
+IGdldElucHV0 88977
+IEtB 88978
+IFJFU1BPTlM= 88979
+IHRhcmc= 88980
+dmlzdWFsaXphdGlvbg== 88981
+IEVzcGHDsQ== 88982
+bmllcg== 88983
+IERvdmU= 88984
+X2lzcg== 88985
+IEFQUExZ 88986
+YmVkbw== 88987
+W117Cg== 88988
+IGV2YWN1YXRl 88989
+IG1pY3Jvc2NvcGlj 88990
+5q2j56Gu 88991
+ZXJvdA== 88992
+LW9wZXJhdGl2ZQ== 88993
+aWt1dA== 88994
+IGRibA== 88995
+IGFqb3V0 88996
+Lml4 88997
+ICAgICAgICAKICAgIAo= 88998
+dGVzdGU= 88999
+bml2ZWw= 89000
+LnNuYXA= 89001
+dXR6dA== 89002
+LmlzQWRtaW4= 89003
+KElD 89004
+IG9iZW4= 89005
+IEVmZmljaWVudA== 89006
+RERldmljZQ== 89007
+IGluZGVtbg== 89008
+IGZyb3pl 89009
+LHJw 89010
+IGRlY2VtYmVy 89011
+57uZ 89012
+IG1lbG9kaWVz 89013
+IEVUQQ== 89014
+44GT44KT44Gr44Gh44Gv 89015
+IHF1YWxjaGU= 89016
+IHNldERlZmF1bHRDbG9zZU9wZXJhdGlvbg== 89017
+T1JJQQ== 89018
+IHphZw== 89019
+IGFsbG93YW5jZXM= 89020
+L3Bo 89021
+LVRva2Vu 89022
+IFBvdQ== 89023
+IG1pbmlzdHJpZXM= 89024
+LkxPR0lO 89025
+IHNlYXJjaFRlcm0= 89026
+IGh1cnJpY2FuZXM= 89027
+IEZsb3Vy 89028
+IFNVUw== 89029
+VGhlbWVz 89030
+cmVlY2U= 89031
+IGVudHJldg== 89032
+RFhWRUNUT1I= 89033
+IEJyZW5kYQ== 89034
+RXJyb3JNc2c= 89035
+OildOwo= 89036
+IGRvbWluYQ== 89037
+IEludmlzaWJsZQ== 89038
+PD4oIg== 89039
+cHV0Yw== 89040
+SEFWRQ== 89041
+RXZhbHVhdG9y 89042
+bWF0Y2hpbmc= 89043
+LW5hbWVz 89044
+IGxhaA== 89045
+X1lVVg== 89046
+5pyN5Yqh5Zmo 89047
+LldSSVRF 89048
+KTpc 89049
+LWRlZmluaXRpb24= 89050
+IGNoaW1uZXk= 89051
+LmNscw== 89052
+a25vd2xlZGdl 89053
+IEFsZXhhbmRyZQ== 89054
+IGNvbGVn 89055
+b8WbY2k= 89056
+LkNobw== 89057
+IHNvZnRlbmVk 89058
+IHJvdGF0ZXM= 89059
+LXN0YXRlcw== 89060
+6rc= 89061
+dmlvbGVudA== 89062
+IDopCg== 89063
+IGFjY2nDs24= 89064
+bmlrYQ== 89065
+IExhdHRlcg== 89066
+X0Zsb2F0 89067
+IGVncmVnaW91cw== 89068
+b2RpYWw= 89069
+U3lub3BzaXM= 89070
+KHhp 89071
+IH0sew== 89072
+Y3h4 89073
+RW1tYQ== 89074
+IENvbmN1cnJlbnRIYXNoTWFw 89075
+X0NhbWVyYQ== 89076
+IHBlYW51dHM= 89077
+44Kz44Oh44Oz44OI 89078
+X2JlZA== 89079
+IGVycm9yQ2FsbGJhY2s= 89080
+IFBhcHVh 89081
+LFRydWU= 89082
+tpo= 89083
+IHN0YWRpdW1z 89084
+IGtub2Jz 89085
+aWZpY2FjaW9uZXM= 89086
+IHB1cnBvc2VseQ== 89087
+IFB1cmVDb21wb25lbnQ= 89088
+INC60LvQuA== 89089
+LlRyYWNr 89090
+c3Nj 89091
+KEpvYg== 89092
+KEh0dHBDb250ZXh0 89093
+IGNob2lzaXI= 89094
+IOy7 89095
+IGF1c3A= 89096
+dXBwZW4= 89097
+QWR2ZW50dXJl 89098
+IEZMQUM= 89099
+IGFwcGVsbGFudA== 89100
+ICgoIg== 89101
+z4c= 89102
+IHRyaWY= 89103
+IGR1cmF0aW9ucw== 89104
+IE5HWA== 89105
+LmJw 89106
+YWN0aW9uRGF0ZQ== 89107
+Lmluc3RhbnQ= 89108
+LVJlcXVlc3RlZA== 89109
+JyYm 89110
+INGH0LXRgA== 89111
+PWJvb2w= 89112
+IGxvcmRz 89113
+bGljaW5n 89114
+IG1hcmlu 89115
+IGJsaW5kZWQ= 89116
+L2xheW91dHM= 89117
+ZmVpdG8= 89118
+aXp6bGluZw== 89119
+RXZ0 89120
+IGJ1bGxpc2g= 89121
+ZXhjbHVzaXZl 89122
+4oCZZXM= 89123
+LmdldE93blByb3BlcnR5RGVzY3JpcHRvcg== 89124
+IGJhcHRpemVk 89125
+INGB0LvRg9GH 89126
+IENlY2ls 89127
+LmVmZmVjdHM= 89128
+IGNyeXB0b2dyYXBoaWM= 89129
+IFZpbGxl 89130
+dWZ0 89131
+IEFudGhlbQ== 89132
+IHNlZWtlcg== 89133
+IG5pY2tuYW1lZA== 89134
+IGNhbXBncm91bmQ= 89135
+IGFjdGlvbkJhcg== 89136
+IEVwaXNvZGVz 89137
+IC0tLS0tLS0tCg== 89138
+QnVpbGRlckZhY3Rvcnk= 89139
+X1VOU1VQUE9SVEVE 89140
+VklMTEU= 89141
+LlJlZ2lzdHJ5 89142
+VG9uaWdodA== 89143
+IG1ha3M= 89144
+IGFkZG9ucw== 89145
+IERlY3J5cHQ= 89146
+LnNraWxscw== 89147
+KGZo 89148
+IGp1Z2c= 89149
+IENvdXBsZXM= 89150
+IEFtaXI= 89151
+ID09PT09PT09PT0= 89152
+IGVuZGVyZWNv 89153
+LlN0cmluZ3M= 89154
+IGhhcm1pbmc= 89155
+IGJ1c3RsaW5n 89156
+KGZpcnN0TmFtZQ== 89157
+LnNwYXJzZQ== 89158
+SVRP 89159
+ICAgICAgICAgICAgICANCg== 89160
+5p2l5rqQ 89161
+b2RlZ2E= 89162
+YW5hZ2Fu 89163
+LkhhbmRsZXJGdW5j 89164
+IHRpbmRlcg== 89165
+ICMo 89166
+IGltYWdpbmFibGU= 89167
+IGF1bg== 89168
+UHJlc2VuY2U= 89169
+UGFja2FnZU1hbmFnZXI= 89170
+IGx1ZGljcm91cw== 89171
+acOobWU= 89172
+IGdldE9iamVjdA== 89173
+Ym94aW5n 89174
+IHNxdWlk 89175
+w6p0ZXM= 89176
+RGFlbW9u 89177
+X2xpa2Vz 89178
+hrU= 89179
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t 89180
+Lnd3dw== 89181
+c3NlbA== 89182
+ZXRlY3Rpb25z 89183
+ZGFl 89184
+L2Rvd25sb2Fkcw== 89185
+IENsYXNzaWZpZXI= 89186
+X1NVQkpFQ1Q= 89187
+emVnbw== 89188
+X0dST1VQUw== 89189
+YWN0aWNlcw== 89190
+X2xpdGU= 89191
+IGRhbm1hcms= 89192
+L2Js 89193
+YXB5cnVz 89194
+VElNRVI= 89195
+IFNjcmlwdHVyZXM= 89196
+0Y/Rgg== 89197
+c3Bh 89198
+Ikc= 89199
+IHBlbmV0cmF0aW5n 89200
+IGNvbmZvcm1pdHk= 89201
+bmV3bGluZQ== 89202
+IGx5bg== 89203
+IE1NUA== 89204
+IElOVEVSRkFDRQ== 89205
+IEFjdGlvblR5cGVz 89206
+LmNyaXRlcmlh 89207
+4buRbmc= 89208
+IHJlc3RpdHV0aW9u 89209
+CUZPUg== 89210
+PHBhdGg= 89211
+PT8iOwo= 89212
+KHBlcmNlbnQ= 89213
+bmRv 89214
+IEFDTQ== 89215
+CWN0 89216
+QGE= 89217
+IHTDug== 89218
+IHNwb3R0aW5n 89219
+w7xybg== 89220
+IEdFUg== 89221
+LndyaXRlVmFsdWU= 89222
+X2Jsb2NrZWQ= 89223
+WW1k 89224
+IGluZWZm 89225
+IFJhZGlhdGlvbg== 89226
+IE9pbGVycw== 89227
+QmVlcg== 89228
+cm90cw== 89229
+IFRyb3Q= 89230
+cm5h 89231
+cG9ydGVy 89232
+ZW5lcnk= 89233
+IHBvcm5vZmlsbQ== 89234
+65SU 89235
+X2Nr 89236
+LkNvbXB1dGU= 89237
+IFtdCgoK 89238
+Z2l1bQ== 89239
+IFRFTEU= 89240
+IEluc3RhbmNlcw== 89241
+Kkk= 89242
+IHdpcmVUeXBl 89243
+b25pdW0= 89244
+ZXNoaXJl 89245
+IHB1dGNoYXI= 89246
+IGF3YWtlbmVk 89247
+LmRlZ3JlZQ== 89248
+aGVpdGVu 89249
+LWF3YWl0ZWQ= 89250
+IG5ldXJvdHJhbnM= 89251
+LXRlc3RpZA== 89252
+CgogICAgCg== 89253
+IOe7kw== 89254
+IGtpbm8= 89255
+X0RBWVM= 89256
+IFZhbGVyaWU= 89257
+bnRpdHk= 89258
+QEJlYW4= 89259
+ZXRDb2Rl 89260
+PFJlbmRlcmVy 89261
+IiIK 89262
+IGJlcm4= 89263
+IHRvdGFsaXRhcmlhbg== 89264
+Y2xpbmlj 89265
+IE3DvG5jaGVu 89266
+bm9pbnNwZWN0aW9u 89267
+aXNjZQ== 89268
+X3R1cGxlcw== 89269
+LlBvaW50cw== 89270
+IHBhc3RvcmFs 89271
+SmFr 89272
+a2VuaW5n 89273
+L2NvbHVtbg== 89274
+LXByb2R1Y2luZw== 89275
+IGFib2xpc2g= 89276
+ZmVhcw== 89277
+cmVzcG9uc2VEYXRh 89278
+cmVkaXJlY3RUb1JvdXRl 89279
+IG9ic2VydmF0aW9uYWw= 89280
+cE5leHQ= 89281
+enRl 89282
+Q2hvaWNlcw== 89283
+CUxDRA== 89284
+JlM= 89285
+IGJpbGxpb25haXJlcw== 89286
+X0VPRg== 89287
+IGNvaG9ydHM= 89288
+YW5rZW4= 89289
+LmNvbWJpbmU= 89290
+KE9wdGlvbmFs 89291
+X0NPTlNPTEU= 89292
+QWN0aXZpdHlJbmRpY2F0b3JWaWV3 89293
+IHBoYXJtYWNpc3Q= 89294
+IERvdWdo 89295
+IE9wZXJhdGlvbmFs 89296
+57I= 89297
+IGphbXM= 89298
+U29sbw== 89299
+CWR1cmF0aW9u 89300
+LnJt 89301
+IFRvbmk= 89302
+LmxlYXZl 89303
+IHB1ZWRh 89304
+IEZheQ== 89305
+RGV0YWNo 89306
+Lk1heGltaXplQm94 89307
+IG1hcnR5cg== 89308
+IGhhemU= 89309
+L25l 89310
+IG1hbW1h 89311
+c2VsZWN0b3JNZXRob2Q= 89312
+IHBpbGdyaW1hZ2U= 89313
+IEFzcGhhbHQ= 89314
+IHZhbGlkbw== 89315
+RW5kRWxlbWVudA== 89316
+IGxhcHNl 89317
+ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0K 89318
+aWxvcw== 89319
+ZXJuYWxz 89320
+Q29ubmVjdGlvbkZhY3Rvcnk= 89321
+IExvdmluZw== 89322
+LkNvbXBpbGU= 89323
+IGNvcms= 89324
+IEJ5ZQ== 89325
+aWJOYW1lT3JOaWw= 89326
+ZXN0YXI= 89327
+XEdlbmVyYXRlZFZhbHVl 89328
+KExM 89329
+IFJhaXNlUHJvcGVydHlDaGFuZ2Vk 89330
+IElyYW5pYW5z 89331
+IGdldFByaWNl 89332
+bWFyaWVz 89333
+anVtYm90cm9u 89334
+IFJlYmVscw== 89335
+RElGRg== 89336
+IE1vag== 89337
+b3J0aWM= 89338
+CWNvbnN0ZXhwcg== 89339
+bnRw 89340
+IG1hZ2ljaWFu 89341
+IHBhdHJpb3Rpc20= 89342
+LmNl 89343
+LlNpbXBsZUJ1dHRvbg== 89344
+IFBSSVY= 89345
+aGlzdG9pcmU= 89346
+aGlnaGVy 89347
+cmVmaXhlcg== 89348
+Q0pL 89349
+IE9zd2FsZA== 89350
+LnNwcml0ZXM= 89351
+Lkls 89352
+IGFyY2FuZQ== 89353
+IENodW4= 89354
+X09m 89355
+IGV2ZXJ5dGltZQ== 89356
+0Y7RiQ== 89357
+IGxldHJhcw== 89358
+aWxhbg== 89359
+YmFydQ== 89360
+LWJvdA== 89361
+IFNpZ25pZmljYW50 89362
+iOyKteuLiOuLpA== 89363
+4oCM 89364
+LWlzc3Vl 89365
+IGluc2FuZWx5 89366
+YXRlZ2lj 89367
+X1ZF 89368
+OkNHUG9pbnQ= 89369
+TWFya3M= 89370
+LnByb2JsZW0= 89371
+J10uJy8= 89372
+IHJlZHVuZGFuY3k= 89373
+IGRlY3J5cHRpb24= 89374
+SHVuZw== 89375
+LXZhbGlkYXRl 89376
+IEFuZ2Vsbw== 89377
+Sk0= 89378
+IHBvcG92ZXI= 89379
+ZGViaXQ= 89380
+Q29tcHV0ZWRTdHlsZQ== 89381
+KV9f 89382
+KHNpbg== 89383
+ICcpLA== 89384
+KGRlZnZhcg== 89385
+w7R0ZQ== 89386
+VGhhbk9yRXF1YWxUbw== 89387
+Lnpo 89388
+KE5vdGU= 89389
+aWJCdW5kbGVPck5pbA== 89390
+IFNvbmlh 89391
+eW1vdXM= 89392
+44CCPA== 89393
+IGZpbG15 89394
+IGVhcnRobHk= 89395
+IExlYXJuZWQ= 89396
+W3NlY3Rpb24= 89397
+Lmpzb3Vw 89398
+c3RydXA= 89399
+IFBhdHJvbg== 89400
+ICkq 89401
+c2V0Rm9udA== 89402
+IGhlZw== 89403
+IGRlbHRhWQ== 89404
+X1NDUg== 89405
+LmN1dA== 89406
+IHZiQ3JMZg== 89407
+Lk9iamVjdE1hcHBlcg== 89408
+IHLDqXBvbnNl 89409
+WXU= 89410
+KCl7fQoK 89411
+LXBhcmFtZXRlcg== 89412
+xLFzxLE= 89413
+aWF6emE= 89414
+SVpFUw== 89415
+X1NVUFBMWQ== 89416
+a2l0cw== 89417
+IHJlaW5z 89418
+KGRvY3M= 89419
+JSE= 89420
+IHN5c3RlbWN0bA== 89421
+IFBzcg== 89422
+IFdlcms= 89423
+UGhpbGFkZWxwaGlh 89424
+QlJFQUs= 89425
+LmFwcGVuZFRv 89426
+KGxvbg== 89427
+QWJy 89428
+L3JlbmRlcmVy 89429
+IEVsZWFub3I= 89430
+Q0VSVA== 89431
+UGFyYW1ldGVyVmFsdWU= 89432
+JGdldA== 89433
+IOCy 89434
+IEpM 89435
+IGlnbml0ZQ== 89436
+IGLhuqFu 89437
+IENhdWw= 89438
+IGhhc3Rl 89439
+IGRvbWluZ28= 89440
+VGVzbGE= 89441
+L2NvbmZpZ3VyYXRpb24= 89442
+KGV4cGVjdA== 89443
+dXNyYQ== 89444
+IHByZWZlY3Q= 89445
+IGZyb2dz 89446
+IGFzc2lnbmFibGU= 89447
+IGludGVydmVuZWQ= 89448
+LmNob2ljZXM= 89449
+VUlTdG9yeWJvYXJkU2VndWU= 89450
+IGLDqQ== 89451
+IEzDtnM= 89452
+YWxwaGFiZXQ= 89453
+IHByZWFtYmxl 89454
+ZGJh 89455
+IGVtaXR0aW5n 89456
+Lm1vcmU= 89457
+IEJhc2Vs 89458
+KGRhdGVUaW1l 89459
+KCl9KTsK 89460
+IG5vZGVMaXN0 89461
+IEZQR0E= 89462
+d2Vs 89463
+IGxvZGFzaA== 89464
+X2F1dGhlbnRpY2F0aW9u 89465
+w7NyaW8= 89466
+KHJ1bnRpbWU= 89467
+X1NDRU5F 89468
+IGN1ZmZz 89469
+IEFkcmVzc2U= 89470
+Ojw/ 89471
+X2NtZHM= 89472
+VMOqbg== 89473
+IGVqZWN0 89474
+CUVSUg== 89475
+PE8= 89476
+IEtyYW1lcg== 89477
+4oCmCg== 89478
+c29tZW9uZQ== 89479
+IENQTA== 89480
+77yN 89481
+bG9ja2luZw== 89482
+LkZvb3Rlcg== 89483
+IGFsbQ== 89484
+IEFkb2xm 89485
+KS4v 89486
+IE1hdHRoaWFz 89487
+ICIsIgo= 89488
+ZW51aXR5 89489
+IExvdmVy 89490
+IGFsaW1lbnRvcw== 89491
+cGxldHM= 89492
+w6R0emU= 89493
+KHJlY3Y= 89494
+dXJhYQ== 89495
+U1RET1VU 89496
+YW50eg== 89497
+LkZsb2F0VGVuc29y 89498
+IFJhZQ== 89499
+cGln 89500
+IHRlcnVn 89501
+IHRoZW9sb2c= 89502
+IHRheGlz 89503
+Y29tcG9zaXRl 89504
+c2hlcg== 89505
+bGVEYg== 89506
+IFJhaG1lbg== 89507
+IDst 89508
+SW5kZW50ZWQ= 89509
+IHRyb2xsaW5n 89510
+RVJJQ0FO 89511
+Z2V0RW1haWw= 89512
+X0VOQ09ERQ== 89513
+Z2V0Q2VsbA== 89514
+IFdyYXRo 89515
+KHN1aXRl 89516
+bm90RW1wdHk= 89517
+LmdldFJpZ2h0 89518
+IGJyZWF0aGFibGU= 89519
+44Gf44Gg 89520
+IHNldFRpbWU= 89521
+J29wdGlvbnM= 89522
+IHBheWxvYWRz 89523
+YXVnYQ== 89524
+ZWRt 89525
+KHdlYXRoZXI= 89526
+CXNlbQ== 89527
+KGZyb250 89528
+IHBheW91dHM= 89529
+LnNldFRleHR1cmU= 89530
+LFtdLA== 89531
+IFBhY2tz 89532
+IGNhenpv 89533
+V2l0aFBhdGg= 89534
+UHJvZw== 89535
+bW1hcw== 89536
+IGtvaw== 89537
+LkNzcw== 89538
+IGRlbGE= 89539
+QXdhcmQ= 89540
+w7xsdA== 89541
+c291cA== 89542
+KFsoJw== 89543
+b2xsaXBvcA== 89544
+LFNMT1Q= 89545
+Y2hpYQ== 89546
+IGJsYW5jbw== 89547
+T0xVVEU= 89548
+LXBsYW5l 89549
+LExpc3Q= 89550
+eGluZw== 89551
+SU1BVEU= 89552
+LW1vcnQ= 89553
+IGdyYXZpZA== 89554
+IEhhbmdpbmc= 89555
+IHNjb2Zm 89556
+Lml0ZW1JZA== 89557
+VEhFTg== 89558
+aW5mZXI= 89559
+IG1pc3BsYWNlZA== 89560
+CU1vbm8= 89561
+d2F5bmU= 89562
+IGVkZ2Vk 89563
+X25pY2s= 89564
+IE1BUlQ= 89565
+CXN0YXRlbWVudA== 89566
+IEV2ZW50QnVz 89567
+PkFib3V0 89568
+IGJ1cmdlb25pbmc= 89569
+IGNpY2xv 89570
+TE9PUA== 89571
+IGRlZnk= 89572
+IGVsZW1lbnRUeXBl 89573
+IGNvbnNlcnZhdGlzbQ== 89574
+V2ViSG9zdA== 89575
+LkRpc2FibGVk 89576
+IGNsYXA= 89577
+IEFsZWtz 89578
+cm9yaW5n 89579
+aXNzaW9uYWw= 89580
+LUJvbGQ= 89581
+SVJUSA== 89582
+Lml0ZW1WaWV3 89583
+cWluZw== 89584
+P2tleQ== 89585
+IFZlbm9t 89586
+IGFudGlk 89587
+IEZvcm1hdHRpbmc= 89588
+UVB1c2hCdXR0b24= 89589
+IEFzc2VtYmx5VGl0bGU= 89590
+X3Jlc2VydmU= 89591
+LkRpcmVjdA== 89592
+QW5pbWU= 89593
+IG1hdGVyaWFsbHk= 89594
+IGFkanVuY3Q= 89595
+LnNldFRvb2xUaXBUZXh0 89596
+bGFzc2lhbg== 89597
+KG5y 89598
+IG5pbmfDum4= 89599
+IG1pc3VuZGVyc3RhbmQ= 89600
+IEFwcGx5aW5n 89601
+X2NvbXBhdA== 89602
+IG1peGlu 89603
+IGplb3BhcmR5 89604
+0YvQstCw0LXQvA== 89605
+IGNvY2luYQ== 89606
+X1dST05H 89607
+QVRBUg== 89608
+S0Q= 89609
+IGNhdGVnb3J5TmFtZQ== 89610
+SHR0cENvbnRleHQ= 89611
+IGJ1YmI= 89612
+IGFua2xlcw== 89613
+b3dlcmluZw== 89614
+RnJhbWV3b3Jrcw== 89615
+IHNlZ3VuZG9z 89616
+LkFzc2VtYmx5 89617
+X0VudGl0eQ== 89618
+SFE= 89619
+IGZvdXJz 89620
+IGZvcmZlaXR1cmU= 89621
+dmxhbg== 89622
+LWRvbWluYXRlZA== 89623
+LWF3YXk= 89624
+SUNJRU5U 89625
+LlJlYWRCeXRl 89626
+YW1heA== 89627
+Lj0iPA== 89628
+X3Nwcml0ZXM= 89629
+IFJlbWFpbmluZw== 89630
+TE9PRA== 89631
+X3JlcXVpcmVtZW50cw== 89632
+J2FydGljbGU= 89633
+IFBvbXBlbw== 89634
+IHTDqXI= 89635
+IERyb3Bz 89636
+SG9tZUFz 89637
+SG9tZUFzVXA= 89638
+w7ph 89639
+Lm5hc2E= 89640
+X2Jpbw== 89641
+IFlvc2hp 89642
+RWxlY3Ryb25pYw== 89643
+IGpvc2U= 89644
+IGludGVsaWc= 89645
+ID8+Pjw/ 89646
+PnshIQ== 89647
+X3Byb3Y= 89648
+PURC 89649
+PCEtLQo= 89650
+LWZsb2F0aW5n 89651
+eXVt 89652
+LkpNZW51SXRlbQ== 89653
+IE5hdGlvbndpZGU= 89654
+SW1wb3NzaWJsZQ== 89655
+6K+m5oOF 89656
+SmVycnk= 89657
+IGRlc2Nhcmdhcg== 89658
+7JW8 89659
+RGVjcnlwdA== 89660
+IHRlbXBlcmVk 89661
+IGVrcw== 89662
+w61jaWE= 89663
+Lmxhcmdl 89664
+IHVuZm9sZHM= 89665
+IGh2ZXI= 89666
+IEFWTA== 89667
+LnR0 89668
+4oKA 89669
+PSUu 89670
+IHRvcHBpbmdz 89671
+IHN0b3V0 89672
+IHNlbWluYWw= 89673
+eGVz 89674
+IE9VVEVS 89675
+YWRybw== 89676
+IHlvaw== 89677
+IERlcmU= 89678
+CWZyZW9wZW4= 89679
+X2xuZw== 89680
+Q2h1bmtz 89681
+LmdldE9yRWxzZQ== 89682
+KGVsbQ== 89683
+ICgpKTsKCg== 89684
+Q2VsZWJy 89685
+X2NhcGFiaWxpdHk= 89686
+IHNvY2llZGFk 89687
+IGludGltaWRhdGU= 89688
+IEJsYXplcnM= 89689
+aWd0aA== 89690
+ZW5kY29kZQ== 89691
+VUlMREVS 89692
+IEhhbm5pdHk= 89693
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0K 89694
+INC40YHQv9C+0LvRjNC3 89695
+IFRvb2s= 89696
+IE1vdmVk 89697
+IHByb250bw== 89698
+IE1hcnRpbnM= 89699
+RGF0YUV4Y2hhbmdl 89700
+LlBvb2w= 89701
+ZXVz 89702
+IGpvYklk 89703
+IEF4ZXM= 89704
+IGhhbXN0cmluZw== 89705
+LnJtaQ== 89706
+RGF0YVRhc2s= 89707
+IE1hZ2ljTW9jaw== 89708
+IEdBUw== 89709
+IE5hdw== 89710
+IHNuZWw= 89711
+X3NjZW5hcmlv 89712
+IGVtYWlsQWRkcmVzcw== 89713
+IE11c3M= 89714
+IHBob2VuaXg= 89715
+IGRlbnNpdGllcw== 89716
+IE1hY09T 89717
+cmVtYQ== 89718
+IHRlc3RlcnM= 89719
+KT87Cgo= 89720
+IHB1cHM= 89721
+bGFwcw== 89722
+ZGRi 89723
+L1BlYWs= 89724
+IGJhY2tzdGFnZQ== 89725
+IGJhY2tCdXR0b24= 89726
+KG5hdg== 89727
+eEFF 89728
+c3RyY3B5 89729
+aWNodGV0 89730
+IFJpZg== 89731
+4LiB4Lij 89732
+IGhvbm91cmVk 89733
+IGdyYXBwbGluZw== 89734
+VmVydGV4QnVmZmVy 89735
+LmdldEFjY291bnQ= 89736
+LU5ldw== 89737
+IG9wcHJlc3M= 89738
+IHV0dGVyZWQ= 89739
+IFVTQUdF 89740
+X0xFQVZF 89741
+X2NvbGxlY3Rpb25z 89742
+X1V0aWw= 89743
+KCIiKSk7Cg== 89744
+IHF1aWV0ZXI= 89745
+YCksCg== 89746
+IHR5cGVJZA== 89747
+IHNlcmlm 89748
+c3RhbGs= 89749
+IHByaW1hcnlTdGFnZQ== 89750
+eEVB 89751
+Ok5TTGF5b3V0 89752
+X1JC 89753
+X0FQUFM= 89754
+U0tV 89755
+KnNjYWxl 89756
+IENvdWdhcg== 89757
+CVJFVFVSTg== 89758
+aWZpw6k= 89759
+dGltaW5n 89760
+IGlkb2xz 89761
+656Y7Iqk 89762
+4oCUaWY= 89763
+KGZvcm1hdHRlcg== 89764
+IGFtYWxn 89765
+c2V0V2lkdGg= 89766
+LG1pZA== 89767
+b3JlYWw= 89768
+LlJvbGVz 89769
+IGRldmVs 89770
+IGdldEluZGV4 89771
+IHN0b29scw== 89772
+IHNub3d5 89773
+IGdyYW5kaQ== 89774
+0Y/QtdC8 89775
+aWd1aWVudGU= 89776
+0LrQvtCy 89777
+IEN1dHRlcg== 89778
+cm9zY29wZQ== 89779
+YWlyYQ== 89780
+0YPRgNGB 89781
+IHRhYmVs 89782
+IGRlZmlhbmNl 89783
+LlRvQm9vbGVhbg== 89784
+IHBlcmc= 89785
+LWNvbW11bml0eQ== 89786
+IHB1cnN1aXRz 89787
+KG1ldHJpY3M= 89788
+TXVzbGlt 89789
+IFJpeWFkaA== 89790
+IOKCuQ== 89791
+LldlYkVsZW1lbnQ= 89792
+IEhhcmRlbg== 89793
+IENvcnJ1cHRpb24= 89794
+IEFl 89795
+IFRhbm5lcg== 89796
+IGluZGVi 89797
+IENoYXJnaW5n 89798
+X1BST0Q= 89799
+IOKTmA== 89800
+IGNlbnRlclg= 89801
+dHlwaW5n 89802
+IHV4 89803
+IFRvZQ== 89804
+CWxvb3A= 89805
+Zmxv 89806
+UmVnaW9uYWw= 89807
+X2Fh 89808
+IHZpZXdwb2ludHM= 89809
+PnRoaXM= 89810
+LXJlc291cmNlcw== 89811
+IEltYW0= 89812
+IFNoaXY= 89813
+IGFuZHJh 89814
+UkVRVUlSRUQ= 89815
+IHNlZWRlZA== 89816
+dW1vbnQ= 89817
+IHRvYXN0ZXI= 89818
+IGhvbWVzY2hvb2w= 89819
+24zYsQ== 89820
+X2V4dHJhY3Rvcg== 89821
+bW9kZXM= 89822
+IE11bmRv 89823
+X2ZpcmVzdG9yZQ== 89824
+IHB1bmlzaG1lbnRz 89825
+IGJvcmVkb20= 89826
+anVyaWVz 89827
+LlNhZmU= 89828
+YW1iaXF1ZQ== 89829
+IGFkdmVyc2l0eQ== 89830
+VUxFUg== 89831
+IGFuYWxzZXg= 89832
+bW9ycGg= 89833
+IE9tbg== 89834
+KCkiPgo= 89835
+IEdJVkVO 89836
+U3o= 89837
+IG5vdW5z 89838
+IHF1YW0= 89839
+IFdpa2ltZWRpYQ== 89840
+IGR6aWV3Y3o= 89841
+LmNvbW11bmlj 89842
+Q291cmllcg== 89843
+Qm9uZA== 89844
+LmNvbW11bmljYXRpb24= 89845
+LlByZWZlcmVuY2U= 89846
+c2xpZGVEb3du 89847
+L2djYw== 89848
+IHZpYmVz 89849
+QVBJVmlldw== 89850
+IE92ZXJzaWdodA== 89851
+X3Zr 89852
+IGVtcHJlcw== 89853
+IGFyaXNlbg== 89854
+ICovKQ== 89855
+KCcoJw== 89856
+IGJ0dw== 89857
+IGNvbmV4acOzbg== 89858
+IFV6YmVr 89859
+IOyEnA== 89860
+IGltYWdlVVJM 89861
+44Kq 89862
+c3RvcHBlZA== 89863
+IFdvdWxkbg== 89864
+IENoZXc= 89865
+Z3LDqQ== 89866
+IHRydXRoZnVs 89867
+IFRyYW5zcGFyZW50 89868
+KHNlcnY= 89869
+IE1jS2F5 89870
+PXJlYWQ= 89871
+IFNhbw== 89872
+CUdyaWQ= 89873
+IGluZHVjZXM= 89874
+Lmxpc3RGaWxlcw== 89875
+IGNhcnJlcmE= 89876
+IGljb25OYW1l 89877
+IENhcmx0b24= 89878
+LkV2ZW50VHlwZQ== 89879
+IGRyYXBlZA== 89880
+X1NBTVBMRVM= 89881
+KGVzdA== 89882
+IFJ1aXo= 89883
+IGNhcHRhaW5z 89884
+IG1hZmlh 89885
+IFJhcGhhZWw= 89886
+IEdBUA== 89887
+aW1wYW4= 89888
+Y29taWM= 89889
+IG1hbnRlbg== 89890
+JEw= 89891
+IGFmdGVybWFya2V0 89892
+15c= 89893
+IENm 89894
+CXRpbGU= 89895
+QXBwU3RhdGU= 89896
+IHdob2xlc2FsZXJz 89897
+bG93ZXN0 89898
+RGVtb2NyYXRpYw== 89899
+IHBvd2VyaW5n 89900
+YXBvdA== 89901
+IENvcnRleA== 89902
+KHNpbmdsZQ== 89903
+b3BoeXNpY2Fs 89904
+LnV0Zg== 89905
+77yf44CN 89906
+IHRhcmVh 89907
+RXF1aXA= 89908
+IGtsaWs= 89909
+IHJ1YQ== 89910
+IGFWYWx1ZQ== 89911
+IE1pbmVy 89912
+IFZlZw== 89913
+YW55bA== 89914
+Q293 89915
+QGM= 89916
+X0xPQURFRA== 89917
+IEFITA== 89918
+d2FrZQ== 89919
+LkxvZ0luZm9ybWF0aW9u 89920
+KGNhdGVnb3JpZXM= 89921
+IFFVRVNUSU9O 89922
+LnVtbA== 89923
+IENyZWF0ZU1hcA== 89924
+bWVlcg== 89925
+IHJlbmNvbnRyZXI= 89926
+X3N1 89927
+IGF0bGVhc3Q= 89928
+KFByb3BlcnR5TmFtZQ== 89929
+IFlhbw== 89930
+IEhhdXB0 89931
+QmxvY2tTaXpl 89932
+IFNBQw== 89933
+IExlZ3M= 89934
+Yml0ZQ== 89935
+IGxvZ2FyaXRo 89936
+IElNZXNzYWdl 89937
+QmFja2Ryb3A= 89938
+IGdkaw== 89939
+7Jy866m0 89940
+LmV4Y2x1ZGU= 89941
+QURPUw== 89942
+LXNoaWZ0 89943
+YXRobGV0ZQ== 89944
+X2NvbWJpbmVk 89945
+IHJlYmF0ZQ== 89946
+IHBhcmQ= 89947
+IGltcGVkYW5jZQ== 89948
+cmVhdQ== 89949
+Xw0KDQo= 89950
+IGRhZ2Vu 89951
+a2VsYXM= 89952
+IGluZ3Jlc2Fy 89953
+IEJSQU5E 89954
+Lm1rZGlycw== 89955
+IHJlaWduaW5n 89956
+VGFsa2luZw== 89957
+LyoqCgo= 89958
+X1JFU09VUkNFUw== 89959
+IFBST0dNRU0= 89960
+IGRhdGFTaXpl 89961
+44Og 89962
+ZGVueQ== 89963
+SVJT 89964
+IHRlbGV2aXM= 89965
+PV8oJw== 89966
+ZWdpcw== 89967
+PD8s 89968
+IHVwc2V0dGluZw== 89969
+IHNhdWNlcw== 89970
+IHB1ZXJ0bw== 89971
+IFZvZ3Vl 89972
+aWRpbmU= 89973
+IEdyZWVud29vZA== 89974
+emlvbg== 89975
+L3F0 89976
+5bGA 89977
+Lmxhbmd1YWdlcw== 89978
+IFBsYXlib3k= 89979
+b25uZW1lbnQ= 89980
+IFBvc2l0aW9uZWQ= 89981
+IOS4uw== 89982
+IEZyaXR6 89983
+SW5pdGlhbGx5 89984
+bm9kZVZhbHVl 89985
+X1RSSUFOR0xFUw== 89986
+LWJhY2tlbmQ= 89987
+dG9JU09TdHJpbmc= 89988
+IEdvdmVybm9ycw== 89989
+WUxPTg== 89990
+Lk9SREVS 89991
+RE9J 89992
+IENoZXZyb24= 89993
+IGRlY2tpbmc= 89994
+IFNoYXJpYQ== 89995
+b3RoZXJtYWw= 89996
+RW1wdHlFbnRyaWVz 89997
+KEluaXRpYWxpemVk 89998
+ZG9yZg== 89999
+Lmx1 90000
+KFJvb20= 90001
+LlllbGxvdw== 90002
+IEFicmFt 90003
+X2xt 90004
+INC90LDQvw== 90005
+IFRIQU4= 90006
+fi1+LX4tfi0= 90007
+Lk92ZXJyaWRl 90008
+IFNWTQ== 90009
+IFN1c3BlbnNpb24= 90010
+IGFic29yYnM= 90011
+X3RyYWZmaWM= 90012
+ICI+Ig== 90013
+LmZpdHM= 90014
+IHJlaW5mb3JjaW5n 90015
+IG1veWVu 90016
+ZXJlcg== 90017
+IFJvc2Vuc3RlaW4= 90018
+IFdlc3Rvbg== 90019
+IGNvbmZpbmVz 90020
+T0xB 90021
+b3JyYWluZQ== 90022
+X0dSUA== 90023
+IHN0cmFwcGVk 90024
+IG1pbmdsZQ== 90025
+CVZr 90026
+IG5vc3RyYQ== 90027
+IGFjdHJlc3Nlcw== 90028
+IFNhbW15 90029
+bGlnbmU= 90030
+SUdITElHSFQ= 90031
+IHN0dXA= 90032
+aWN0b3J5 90033
+IGNvbnZpY3Q= 90034
+IHN1cHA= 90035
+cGVvbg== 90036
+dnJpZXI= 90037
+IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyM= 90038
+IHRyb3R6 90039
+IG1lbHRkb3du 90040
+YXJrZXJz 90041
+LlNlbGVjdENvbW1hbmQ= 90042
+IExpYWJpbGl0eQ== 90043
+IEJlY2FtZQ== 90044
+IGx1Y2tpbHk= 90045
+INC/0L7RgA== 90046
+IHJlYXNzdXJl 90047
+IENvbnRyYXN0 90048
+IEF1ZHJleQ== 90049
+IENvbnN1bHRhbnRz 90050
+IFF1ZW50aW4= 90051
+LU93bmVk 90052
+b2NyaW4= 90053
+X1NUUklQ 90054
+IHJldGFsaQ== 90055
+IHJhbGx5aW5n 90056
+IFJlcXVlc3RDb250ZXh0 90057
+IG1hc3NhYw== 90058
+CWdy 90059
+TEVF 90060
+IGNhxYI= 90061
+IEpvYW5uYQ== 90062
+4butYQ== 90063
+aGho 90064
+IHNxbFNlc3Npb24= 90065
+xLFrbA== 90066
+Q29tcG9zZXI= 90067
+IGN1cnJlbnRQbGF5ZXI= 90068
+YWdpbmk= 90069
+IEJhcmJhcg== 90070
+IEhlbGxvV29ybGQ= 90071
+bG9vbWJlcmc= 90072
+LkhlcmU= 90073
+IGRpc2d1c3RlZA== 90074
+CQkJCQkJICAgIA== 90075
+b2t1cw== 90076
+VmV0ZXI= 90077
+IGNob3Bz 90078
+IEZPUldBUkQ= 90079
+IEVpZw== 90080
+IFBhcnRpYWxWaWV3 90081
+IGltcG9zcw== 90082
+IGNvbnNlcXVlbnRpYWw= 90083
+IFsnIw== 90084
+CWxvZ2dpbmc= 90085
+IEVsaXM= 90086
+cHJvY3M= 90087
+LDwv 90088
+X3BpbnM= 90089
+XERvY3RyaW5l 90090
+VXZz 90091
+IEdJVA== 90092
+IHRhaA== 90093
+KHJ1bGVz 90094
+Y3JlYXRlRnJvbQ== 90095
+ICctJykK 90096
+aGFuZGxpbmc= 90097
+ZXh0ZXJuYWxBY3Rpb25Db2Rl 90098
+Uk9EVUNUSU9O 90099
+Rm9yUmVzb3VyY2U= 90100
+c2J1cmc= 90101
+PFRleHRWaWV3 90102
+dGhpbmthYmxl 90103
+YW5nbGluZw== 90104
+ICJ9XA== 90105
+UFJT 90106
+QXBwcm92YWw= 90107
+IGtsaWVudA== 90108
+bm91bg== 90109
+IERpYW1vbmRz 90110
+SEc= 90111
+IFRyaWJhbA== 90112
+LnB4 90113
+IHByb3BOYW1l 90114
+IGhlbHk= 90115
+0LvQuNGH 90116
+IEJvdXRpcXVl 90117
+Iik7fQo= 90118
+L2hvc3Q= 90119
+IHN0YXR1c0Jhcg== 90120
+PkRhdGE= 90121
+IGRpc2NvbnRlbnQ= 90122
+IGZyYWls 90123
+LmVsZW1lbnRBdA== 90124
+IGVtYW5j 90125
+CWZ1bg== 90126
+YXR0bGVz 90127
+IHByb3B1bHNpb24= 90128
+IGludGVyY2hhbmdlYWJsZQ== 90129
+IFRhbWJpw6lu 90130
+IHZlbmVy 90131
+X0xPV0VS 90132
+IHBkbw== 90133
+IGRldGVyZ2VudA== 90134
+IHRhdmVybg== 90135
+VmVudWU= 90136
+Lmphc3Blcg== 90137
+eXR0 90138
+IEppaGFk 90139
+4oCZw6A= 90140
+IG1lZGlhUGxheWVy 90141
+P3A= 90142
+cGNm 90143
+YW5kb25lZA== 90144
+IHJlY2ViZXI= 90145
+T1RQ 90146
+KGlPUw== 90147
+KCckew== 90148
+UHRz 90149
+IG1hbmFnZXJpYWw= 90150
+IFR1ZA== 90151
+IFdFTEw= 90152
+b3pl 90153
+IEFudG9pbmU= 90154
+IFxcCg== 90155
+IFZlY3Q= 90156
+IFdpbWJsZWRvbg== 90157
+aXNtZXQ= 90158
+IGJvdGhlcmluZw== 90159
+aW9zaXM= 90160
+Z2V0TWV0aG9k 90161
+IGlucHV0RGF0YQ== 90162
+IEJpbmRlcg== 90163
+IGRjdA== 90164
+w6Fsbg== 90165
+X0JPTEQ= 90166
+IEp1Z2VuZA== 90167
+IEJlZ2lubmVycw== 90168
+aW9tcw== 90169
+IHJlbGVudGxlc3NseQ== 90170
+IE1vbmRheXM= 90171
+5LyY 90172
+VG9tb3Jyb3c= 90173
+IFNhbXA= 90174
+XFBlcnNpc3RlbmNl 90175
+TUFTVEVS 90176
+KHByZWRpY3Rpb25z 90177
+KG51bWVybw== 90178
+LnR3aXRjaA== 90179
+LlJlc3RyaWN0 90180
+IFpa 90181
+IE1MTQ== 90182
+LlNtYWxs 90183
+XWJ5dGU= 90184
+IFZpZXdQYWdlcg== 90185
+IEFnZW5jaWVz 90186
+IHBhcnRpY2lwYXRlcw== 90187
+IGluaXRXaXRoU3R5bGU= 90188
+JVg= 90189
+IGAs 90190
+Lk9iag== 90191
+ID8iKTsK 90192
+Q2FyZWVy 90193
+IDwlPQ== 90194
+a3Vs 90195
+Q3BwSQ== 90196
+IE11c2hyb29t 90197
+dXJhdA== 90198
+bWlh 90199
+Q2Q= 90200
+YXJkdWlubw== 90201
+IGNvdW50cnlDb2Rl 90202
+X3BsYWNlbWVudA== 90203
+KCI9PT09PT09PT09PT09PT09 90204
+LWJlbA== 90205
+QXNzZXJ0aW9ucw== 90206
+IHByw7N4aW1h 90207
+KCkiKQo= 90208
+X2Vn 90209
+U1NJUA== 90210
+dXpl 90211
+cGxhY2Vy 90212
+YW1iaWd1b3Vz 90213
+X0lOSVRJQUxJWkVS 90214
+IEhhdHM= 90215
+IEdPT0dMRQ== 90216
+IGFnaXRhdGlvbg== 90217
+KG11dGV4 90218
+SElHSA== 90219
+OiIp 90220
+IGludmFkZXJz 90221
+ICl9Cgo= 90222
+Lm1hbnVhbA== 90223
+IFNpZW1lbnM= 90224
+CUpQYW5lbA== 90225
+YmluZHVuZw== 90226
+ZWNlcmE= 90227
+L21ldA== 90228
+IMOpYw== 90229
+KHN0YXRpb24= 90230
+IHBvc2ljacOzbg== 90231
+X2lzc3Vlcw== 90232
+X2FsaWFzZXM= 90233
+X3RvcG9sb2d5 90234
+IEF1dG9kZXNr 90235
+QWNrbm93bGVk 90236
+ISpcCg== 90237
+IEZyZWlnaHQ= 90238
+IEZYTUxMb2FkZXI= 90239
+aWNoZWw= 90240
+KENoYXRDb2xvcg== 90241
+IGRpc3NvY2k= 90242
+IGFuYWxvZ3Vl 90243
+PHVzaXpl 90244
+LWV2 90245
+IHRlbmRy 90246
+PkFsbA== 90247
+IFVTRVJT 90248
+LnJlc3A= 90249
+X2ludGVncmF0aW9u 90250
+RGlzcGxheVN0eWxl 90251
+RkFJTFVSRQ== 90252
+0YfQuNGC 90253
+aWxkZWQ= 90254
+X3NlbWFwaG9yZQ== 90255
+YWNhZGVtaWM= 90256
+IHNjbGVyb3Npcw== 90257
+RmFs 90258
+LHN0 90259
+YD0= 90260
+aWZ0b24= 90261
+IHN1YnN0aXR1dGVz 90262
+IFN1cHBvcnRlcnM= 90263
+YXBwbGljYW50 90264
+KGt2 90265
+IEJlcm11ZGE= 90266
+IGRpc2NyZXBhbmNpZXM= 90267
+LlNvbGlk 90268
+d2VlbmV5 90269
+IGd1bA== 90270
+IGZpbGV0eXBl 90271
+IHJlc3VsdGF0 90272
+U2VuZGVySWQ= 90273
+IGdlem9jaHQ= 90274
+IEJlcmtzaGlyZQ== 90275
+ICgiPA== 90276
+KG1s 90277
+KHNoaWZ0 90278
+X1JFRElSRUNU 90279
+T0xPTg== 90280
+L2Jyb3dzZQ== 90281
+Ok5TTWFrZVJhbmdl 90282
+IHdhaXZl 90283
+IGV4Y2U= 90284
+IGNhdGFsb2dz 90285
+5Lmm 90286
+aWxsaW9ucw== 90287
+LkdldEN1cnJlbnRNZXRob2Q= 90288
+IGJpbGluZ3VhbA== 90289
+IENhc2NhZGVUeXBl 90290
+CVRyYW5zZm9ybQ== 90291
+X0NVU1RPTUVS 90292
+aXNpZnk= 90293
+INCx0Ls= 90294
+IFdob2V2ZXI= 90295
+IEVBUg== 90296
+IFs9Ww== 90297
+INC80L7QttC90L4= 90298
+IGphcmRpbg== 90299
+QHNob3c= 90300
+IGhlaXJz 90301
+IGFiYW5kb25tZW50 90302
+IFRyYW5zY3JpcHQ= 90303
+XV4= 90304
+OlNldFBvaW50 90305
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAo= 90306
+IEZhY3Rpb24= 90307
+KGVudGl0aWVz 90308
+ZmFjdGlvbg== 90309
+bXR4 90310
+X3JlY2FsbA== 90311
+Lk5VTEw= 90312
+Lm9wdGlvbmFs 90313
+KHByZWRpY3Rpb24= 90314
+QUdFTlQ= 90315
+IPCfmIA= 90316
+4oCZeQ== 90317
+4oCZdXRpbA== 90318
+IGFuZ3N0 90319
+LkV4cGVyaW1lbnRhbA== 90320
+aG9vdA== 90321
+YXN5YXJhaw== 90322
+YXV0b3BsYXk= 90323
+IFNwbGFzaFNjcmVlbg== 90324
+IGhlY3RpYw== 90325
+IG1ldGljdWxvdXNseQ== 90326
+IGNvbWVy 90327
+S2VpdGg= 90328
+IGZyYXNl 90329
+X1VOSVFVRQ== 90330
+Lk1hZ2VudGE= 90331
+KE1heA== 90332
+IHNjYWxlWQ== 90333
+IHB1dHQ= 90334
+KElG 90335
+IEFQUExF 90336
+UG9ybm8= 90337
+LmFkZENlbGw= 90338
+IG1vbHQ= 90339
+Y2hpbXA= 90340
+IGxlZ2dpbmdz 90341
+IGZsb3A= 90342
+4oCZaHVp 90343
+UlRPUw== 90344
+L3NwYW4= 90345
+LmJlZA== 90346
+LkxvZ2lj 90347
+IHVudHJhbnNsYXRlZA== 90348
+Q0xFQVI= 90349
+O2xlZnQ= 90350
+IEJGUw== 90351
+LWdyb3Vwcw== 90352
+dG9vaw== 90353
+X2FjY2VwdGVk 90354
+IGNhc2hpZXI= 90355
+ZXZlbnRJZA== 90356
+IGRvd25ncmFkZQ== 90357
+CQkJCQkJCQkJCQkK 90358
+0LDQvdC40Y4= 90359
+w6RuZGU= 90360
+IGNvdW5jaWxsb3I= 90361
+IGRyZWQ= 90362
+ZFQ= 90363
+V1JBUFBFUg== 90364
+Lm9s 90365
+5LiA6aG1 90366
+TUVB 90367
+IGtpbmV0aWNz 90368
+IGptcA== 90369
+X2ZsaWdodA== 90370
+RmVhcg== 90371
+IENoYW5lbA== 90372
+X21pZ3JhdGlvbg== 90373
+aGRs 90374
+ZXJlcXVpc2l0ZQ== 90375
+LnJhcg== 90376
+LU9uZQ== 90377
+IHNoZXBoZXJk 90378
+LmVhc2luZw== 90379
+KGRlc2NyaXB0b3I= 90380
+IHN1YnRvdGFs 90381
+44OT 90382
+Q29tcGlsZWQ= 90383
+IENvbHQ= 90384
+ZGxl 90385
+L21vY2s= 90386
+KXJvdw== 90387
+IHJlc2V0dA== 90388
+dGVybw== 90389
+IGFlcm9iaWM= 90390
+LmludHJv 90391
+IGNoZWNrYm94ZXM= 90392
+IE1jQ2FydG5leQ== 90393
+IENseWRl 90394
+77yM5bm2 90395
+Y29vbGRvd24= 90396
+LWluc3RhZ3JhbQ== 90397
+IE1QRw== 90398
+IExlaXN1cmU= 90399
+IG5hd2V0 90400
+IE5YVA== 90401
+UmVndWxhckV4cHJlc3Npb24= 90402
+IHJhdmU= 90403
+QklMTA== 90404
+IGJhcnRlbmRlcg== 90405
+RW5sYXJnZQ== 90406
+IHZhaXM= 90407
+IDoKCgoK 90408
+LkVuZHBvaW50 90409
+ICIsDQo= 90410
+fX0iPnt7JA== 90411
+dHJlZXM= 90412
+LmVuZw== 90413
+KmxvZw== 90414
+OltdLAo= 90415
+IGJhdHRhbGlvbg== 90416
+U3ViamVjdHM= 90417
+IGV4cG9zaXRpb24= 90418
+IFRvYXN0cg== 90419
+IHRvcExldmVs 90420
+IENFTA== 90421
+IGd1YmVybg== 90422
+dW5zdWJzY3JpYmU= 90423
+Y29uYQ== 90424
+X2FwcHJveA== 90425
+VFo= 90426
+IFRyZWVTZXQ= 90427
+LmNvbW11bml0eQ== 90428
+IG5hcnJvd2Vy 90429
+KEV4cGVjdGVk 90430
+Q2xy 90431
+IGdvcmU= 90432
+IGFjcXVpdHRlZA== 90433
+IEVVUk8= 90434
+G1s= 90435
+IHJlcHVibGljYW4= 90436
+IGF1dG9iaW9ncmFwaHk= 90437
+X2Zkcw== 90438
+Q29sbGFwc2Vk 90439
+IA0KIA0K 90440
+LXBpbGxz 90441
+TUJFRA== 90442
+IGlOZEV4 90443
+IHJlc3BvbnNlVHlwZQ== 90444
+Z2xmdw== 90445
+LXR1cm5lZA== 90446
+5Y+R5biD 90447
+CUJvb2xlYW4= 90448
+Lk9y 90449
+aW5pYQ== 90450
+IGhvdmVyZWQ= 90451
+IHNvcnRlcg== 90452
+IE5o 90453
+IEV4ZXJjaXNlcw== 90454
+bGVtZW50cw== 90455
+aWRvbg== 90456
+VG9l 90457
+IHLDqWbDqQ== 90458
+U1NGV29ya2Jvb2s= 90459
+IG9yZ2FuaXNlcnM= 90460
+IHJlc3VsdE1hcA== 90461
+X0hPUg== 90462
+RG9k 90463
+TG9jYWxTdG9yYWdl 90464
+IGpzb25SZXNwb25zZQ== 90465
+QXV0aFNlcnZpY2U= 90466
+IHNtZQ== 90467
+ZW1icm9z 90468
+IGxvYmJ5aXN0 90469
+b2d1aQ== 90470
+LnNwaW4= 90471
+IENvcnJlY3Rpb25z 90472
+X1JBRA== 90473
+IExTTQ== 90474
+KGN1cnJlbmN5 90475
+IOaA 90476
+IHByZWZldGNo 90477
+LkhlYWQ= 90478
+LXJlYWRlcg== 90479
+IFJveg== 90480
+CW1vdXNl 90481
+IFRMQw== 90482
+IFFUYWJsZVdpZGdldEl0ZW0= 90483
+IFNUT1JBR0U= 90484
+YW5uZWVy 90485
+IOyXkA== 90486
+YWNlbg== 90487
+U1g= 90488
+SW1hZ2VSZWxhdGlvbg== 90489
+IHJlc3VyZ2VuY2U= 90490
+aXp6eQ== 90491
+aWxvZ3Vl 90492
+SVZBTA== 90493
+IHNtYWNr 90494
+cnJoYQ== 90495
+KFBBUkFN 90496
+IUk= 90497
+IE1lY2g= 90498
+IElNYXBwZXI= 90499
+IGdpc3Q= 90500
+IFBPRA== 90501
+dm9yZQ== 90502
+dWxhw6fDo28= 90503
+ICwt 90504
+IGludm9sdW50YXJ5 90505
+UVJT 90506
+PXRpdGxl 90507
+IEJpb20= 90508
+IFNoZWxsZXk= 90509
+IENTUA== 90510
+UGVz 90511
+ZHJvcHM= 90512
+INGD0YHQv9C10Yg= 90513
+ZGl2ZXM= 90514
+IVsK 90515
+IExlYXN0 90516
+IGtha28= 90517
+IE1vZGVsbw== 90518
+IGZ1bmN0aW9uTmFtZQ== 90519
+IGNob2tpbmc= 90520
+IGRlZm9ybWF0aW9u 90521
+JywnJyk7Cg== 90522
+Y2HDp8Ojbw== 90523
+IHNxdWlycmVs 90524
+c2V0QmFja2dyb3VuZA== 90525
+QnJva2Vu 90526
+cG9saXQ= 90527
+Tm9uY2U= 90528
+IGtleWVk 90529
+TWVzaFBybw== 90530
+LnVzZXJJbnRlcmFjdGlvbkVuYWJsZWQ= 90531
+IGZsdXNoaW5n 90532
+IGJwcA== 90533
+IEFuZ2xpYw== 90534
+VHJvdQ== 90535
+IFdhbHRlcnM= 90536
+IHN0dXR0ZXI= 90537
+SGlw 90538
+X3dhcg== 90539
+aXZlbWVudA== 90540
+Q29ybg== 90541
+IHVuZHVl 90542
+YXBhdGthbg== 90543
+IG1pbmRlbg== 90544
+c2lnbmlmaWNhbnQ= 90545
+KHF1YW50aXR5 90546
+JGluc2VydA== 90547
+IEFMRVJU 90548
+LlVuaWNvZGU= 90549
+aWhu 90550
+XTo9 90551
+IHBpbk1vZGU= 90552
+IGZyYWlz 90553
+aW50ZXJwcmV0ZXI= 90554
+J2FjdGlvbg== 90555
+IGJsZWliZW4= 90556
+obQ= 90557
+cm93c2Vycw== 90558
+R0lU 90559
+X0RJUlM= 90560
+Rm9yZXZlcg== 90561
+IFBkZlBDZWxs 90562
+fG0= 90563
+LnNldEhlaWdodA== 90564
+IGZvcmVhcm0= 90565
+IGJhdHRsZWdyb3VuZA== 90566
+INC/0L7RgdC70LXQtA== 90567
+IEhhdGg= 90568
+IEF1dGhvcml6ZWQ= 90569
+IGNvbmZlcnJlZA== 90570
+IEJPVFRPTQ== 90571
+LmdldEZsb2F0 90572
+b2dyYXBoZWQ= 90573
+YXJkeQ== 90574
+IHNlcnZpw6dv 90575
+b3RveGlj 90576
+L2F1dGhlbnRpY2F0aW9u 90577
+IHJlcHLDqXNlbnQ= 90578
+IGNvbXBsZXhpb24= 90579
+CUNvbW1vbg== 90580
+X2Jo 90581
+V2hvbGU= 90582
+SW1hZ2VEYXRh 90583
+IHRpbms= 90584
+ZXF1YWxUbw== 90585
+IFRIUg== 90586
+IGRlbHRhcw== 90587
+IEFHRQ== 90588
+aXphZG9y 90589
+YWRtaW5pc3RyYXRpb24= 90590
+cXVldHM= 90591
+X2ZpbGxlZA== 90592
+IEjDpA== 90593
+YWxsb2Nh 90594
+IEJvb25l 90595
+CWxjZA== 90596
+Rm9sZGVyUGF0aA== 90597
+LlJhaXNl 90598
+XyN7 90599
+ZXJ0aW5v 90600
+IFRocm9uZQ== 90601
+4K6/ 90602
+b3hldGluZQ== 90603
+cHJheQ== 90604
+IGRpbGlnZW50bHk= 90605
+IEFyY2hpZQ== 90606
+Lm11bHRpcGFydA== 90607
+IHNlbw== 90608
+LmdldFByb2plY3Q= 90609
+IHBhag== 90610
+Y2xlcm9zaXM= 90611
+YW1lcm9u 90612
+IHRvdXJlZA== 90613
+IG5pa2U= 90614
+IEJha2VyeQ== 90615
+LHBhcmVudA== 90616
+X1RFTQ== 90617
+U3BhdGlhbA== 90618
+bGFwcGluZw== 90619
+UHJvZHVjZXNSZXNwb25zZVR5cGU= 90620
+KGJhbGFuY2U= 90621
+SHVuZHJlZHM= 90622
+LXRlcm1pbmFs 90623
+IkRv 90624
+Q29udGVudFNpemU= 90625
+IGJiYw== 90626
+IGTDqWNvdXZyaXI= 90627
+dXRpbHVz 90628
+LnVuZG8= 90629
+LG91dHB1dA== 90630
+Z3JvdXBOYW1l 90631
+JG1heA== 90632
+IEFsbGE= 90633
+INC60LDRgNGC 90634
+Lk9ORQ== 90635
+X2RlY2lzaW9u 90636
+RUVFRQ== 90637
+IHhPZmZzZXQ= 90638
+56o= 90639
+IHJ1bmF3YXk= 90640
+IGhhbmRqb2I= 90641
+IGdlbml0YWxz 90642
+KGpUZXh0RmllbGQ= 90643
+LnJhZGlhbnM= 90644
+IFBhZHJlcw== 90645
+ZGVwZW5kZW5jZQ== 90646
+IHN3YWxsb3dpbmc= 90647
+cm90ZWlu 90648
+IGZsZWV0cw== 90649
+IGNhcmF0dGVy 90650
+KGNhbg== 90651
+IEZsb3JhbA== 90652
+X01zZw== 90653
+IGRlY2xhcmFjacOzbg== 90654
+bHNydQ== 90655
+c2Nob29scw== 90656
+IGRlbGVnYXRlZA== 90657
+IFBlbmFs 90658
+IENoZXJu 90659
+U21hcnRQb2ludGVy 90660
+c3Rvcnlib29r 90661
+IE55bG9u 90662
+5oCd 90663
+X0xFU1M= 90664
+L2FkZHJlc3M= 90665
+IENPUlM= 90666
+IOydtOuvuA== 90667
+IG1vZGE= 90668
+bWRw 90669
+IGRlcmJ5 90670
+IFBoYXJtYWNldXRpY2Fscw== 90671
+IGV5ZWQ= 90672
+X2NwdXM= 90673
+6KaL 90674
+fHwK 90675
+Lm1hZw== 90676
+KFFM 90677
+IENpdmlsaXphdGlvbg== 90678
+6Yw= 90679
+X0RlcA== 90680
+IHN3ZWFyaW5n 90681
+IFNob3J0cw== 90682
+dWViYXM= 90683
+IGRlbGluZQ== 90684
+IEFkdmlzb3Jz 90685
+IOyeiOuLpA== 90686
+X0ZJTkU= 90687
+fSk6 90688
+LGFzc2lnbg== 90689
+IFBDSWU= 90690
+e3t7 90691
+U2Np 90692
+IGFtYm9z 90693
+aWxlZW4= 90694
+IHR1bmVy 90695
+IHBhcmFtTmFtZQ== 90696
+LHRvdGFs 90697
+KExvY2FsRGF0ZQ== 90698
+IHNwcA== 90699
+IGVycm9yZXM= 90700
+IEhlbHBpbmc= 90701
+X21lcmdlZA== 90702
+LnRpbWVTY2FsZQ== 90703
+X0VMRU0= 90704
+X1NPTA== 90705
+IGF2ZW50 90706
+PGQ= 90707
+SnVuaW9y 90708
+CWJhcg== 90709
+Lmx2 90710
+IOy5 90711
+PXd4 90712
+IG1pcmFjdWxvdXM= 90713
+IFJhbmRvbUZvcmVzdA== 90714
+IEZyYW5rZW4= 90715
+YGAs 90716
+KEluaXRpYWxpemVkVHlwZUluZm8= 90717
+IHN1cGVyaGVyb2Vz 90718
+IGFuc2libGU= 90719
+X1R5cGVEZWY= 90720
+IFBlcm0= 90721
+T0xFUg== 90722
+R3Jhbg== 90723
+LW5vdGlmaWNhdGlvbg== 90724
+IGtheg== 90725
+IGV4aGlsYXI= 90726
+c2VydGVy 90727
+IHN0b3JlZnJvbnQ= 90728
+X2VuZHM= 90729
+IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMK 90730
+CWdpdA== 90731
+RFNQ 90732
+Q0hBSU4= 90733
+rLQ= 90734
+SW52YWxpZE9wZXJhdGlvbkV4Y2VwdGlvbg== 90735
+IFNseQ== 90736
+77yaPA== 90737
+QnJpdGFpbg== 90738
+L3NsaWRlcg== 90739
+IHptcQ== 90740
+IGJhag== 90741
+YnJlZA== 90742
+LlZBTFVF 90743
+IGdyaWV2aW5n 90744
+IHBvcm7DtHM= 90745
+aWd1YQ== 90746
+SU5DTFVERUQ= 90747
+V2FrZQ== 90748
+Y2Jk 90749
+IE1vbmdvbGlh 90750
+aW52aXNpYmxl 90751
+IGNvcnJlY3RpdmU= 90752
+IGNlbnRlcnBpZWNl 90753
+Q2F1Z2h0 90754
+IGthcmFrdGVy 90755
+YWxtw7Y= 90756
+IGJlbHVt 90757
+IGFkam9pbmluZw== 90758
+Pygi 90759
+IFZpc3VhbGl6YXRpb24= 90760
+a2tl 90761
+aWZpY2Fkb3M= 90762
+c3Bk 90763
+X0NCQw== 90764
+LUxhbmd1YWdl 90765
+IHN0aWw= 90766
+b3JldGljYWw= 90767
+KGNvbXBsZXRpb24= 90768
+IFZlcmbDvGd1bmc= 90769
+X1RyZWU= 90770
+cmlwcGxpbmc= 90771
+LlJlbW92ZUVtcHR5RW50cmllcw== 90772
+IFRBWA== 90773
+CUNvZGU= 90774
+5YuV 90775
+dXJnYQ== 90776
+INGD0LbQtQ== 90777
+IGFpZGVy 90778
+IFByZXNjb3R0 90779
+IGZpbGFtZW50 90780
+IC0tLS0tLS0tLS0tLS0tLS0tLS0t 90781
+dGhlcm9z 90782
+0LXRgNCw 90783
+ZGViaWFu 90784
+w6RobA== 90785
+b2xhaA== 90786
+X1VOSVRT 90787
+QXJr 90788
+TW91bnRlZA== 90789
+LlRyaW1TcGFjZQ== 90790
+LmdldE51bWJlcg== 90791
+X2VvZg== 90792
+Lm5y 90793
+IFNIQVJFUw== 90794
+aWxhdGVy 90795
+IHdpY2h0 90796
+X2NvbXBhcmlzb24= 90797
+ICki 90798
+Y2xpbmljYWw= 90799
+IFRFbnRpdHk= 90800
+dmVuZXM= 90801
+LmdldFByb3BlcnRpZXM= 90802
+IHJlbGF0 90803
+IGFubm95YW5jZQ== 90804
+YmVi 90805
+IGFuZXN0aGVzaWE= 90806
+X2ludGVydmFscw== 90807
+X2Zo 90808
+IHN1ZG9rdQ== 90809
+IGRpc2Vu 90810
+Y29ubmVjdGluZw== 90811
+IG9h 90812
+IOKWkQ== 90813
+WkY= 90814
+IGN1eg== 90815
+U09FVkVS 90816
+IE3DtmdsaWNoa2VpdA== 90817
+Y2hhcnRlZA== 90818
+IGhhc2hlcg== 90819
+IEtlZXBz 90820
+QUVB 90821
+CWxvZ3J1cw== 90822
+CU5hbWVzcGFjZQ== 90823
+b3J0aG8= 90824
+JGFjdGlvbg== 90825
+IFJvYw== 90826
+Jyk7Pz4i 90827
+IFBST1Q= 90828
+QGFwaQ== 90829
+Y2hzZWw= 90830
+L2dpZg== 90831
+KEhhbmRsZQ== 90832
+IGFudW5jaQ== 90833
+L3B5 90834
+aW52YWxpZGF0ZQ== 90835
+IE1FUA== 90836
+dGVtcw== 90837
+O10v 90838
+6IM= 90839
+6L+Q 90840
+IHRhY28= 90841
+QURW 90842
+aHBw 90843
+QnV0dG9uQ2xpY2s= 90844
+IGJyaW5nZW4= 90845
+IFRJTUVPVVQ= 90846
+IGFzdHJvbG9neQ== 90847
+ZGF0ZUZvcm1hdA== 90848
+T0dSQVBI 90849
+RmlsZVN0cmVhbQ== 90850
+5a6h5qC4 90851
+LkNvbW0= 90852
+J2I= 90853
+IEdFVEdMT0JBTA== 90854
+ZWF0aW5n 90855
+YW5kZXN0 90856
+IFNFVFVQ 90857
+IEFkdmFuY2Vz 90858
+LnNjcm9sbEhlaWdodA== 90859
+QVpF 90860
+ZW5kdGltZQ== 90861
+d2VhdGhlcm1hcA== 90862
+IE1hbmdv 90863
+IFJJUA== 90864
+IGl0ZXJhdG9ycw== 90865
+IGNvYXg= 90866
+IOWbvg== 90867
+PG1haW4= 90868
+cm1z 90869
+cGNi 90870
+IHZhY2NpbmF0aW9ucw== 90871
+IGRpc2FncmVlbWVudHM= 90872
+CWV2ZW50cw== 90873
+PExvY2F0aW9u 90874
+Lk1lYXN1cmU= 90875
+IHF1ZWRh 90876
+IHNpZ25hbGxpbmc= 90877
+IGRlZ3JhZGVk 90878
+IEFtZWxpYQ== 90879
+LWNvbmZpZGVuY2U= 90880
+ZGJOYW1l 90881
+X2luYWN0aXZl 90882
+b25hdGlvbg== 90883
+IHBlcmlwaGVyYWxz 90884
+5qC3 90885
+U1VQRVI= 90886
+J1I= 90887
+LndheQ== 90888
+UExBSU4= 90889
+IEVuZ2Vs 90890
+cmVsYXk= 90891
+IGRlYmlkbw== 90892
+IFRyb3Rza3k= 90893
+6Iw= 90894
+INCw0LTRgNC10YE= 90895
+CXVzZXJz 90896
+ZXRjaHVw 90897
+dGVw 90898
+IG5ld1Bvc2l0aW9u 90899
+IHdhaXZlcnM= 90900
+ZWRpY2luZQ== 90901
+IHRhbmdnYWw= 90902
+IGFtbW9uaWE= 90903
+LWRldA== 90904
+L2V4ZWM= 90905
+KHBhZGRpbmc= 90906
+IFNob3BwaW5nQ2FydA== 90907
+IFByaW50Zg== 90908
+SGFuZGxlZA== 90909
+IE5BTUVT 90910
+KGNsb2Nr 90911
+IHt9Og== 90912
+IHNpbXM= 90913
+IFRlYXJz 90914
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0= 90915
+X0NBTk5PVA== 90916
+TEVHUk8= 90917
+LlNldFBhcmVudA== 90918
+5YW25Lit 90919
+IGVycmV1cg== 90920
+aXBp 90921
+PEV4cHJlc3Npb24= 90922
+LnRpbWVsaW5l 90923
+ICdfJyw= 90924
+IGNvYXRpbmdz 90925
+IHVzZUZvcm0= 90926
+LnRr 90927
+IEZlYXN0 90928
+LlNL 90929
+w6RzZW50 90930
+Y2h3aXR6 90931
+IGludmVudGl2ZQ== 90932
+IE1laQ== 90933
+IHZlc3RpYg== 90934
+IG7DpGNoc3Rlbg== 90935
+L2JpZw== 90936
+IHJldHJlYXRlZA== 90937
+IHByb3BhbmU= 90938
+dmljdGlt 90939
+QWt0 90940
+IFByZXNlcnZhdGlvbg== 90941
+IFBpcw== 90942
+X1NIQURPVw== 90943
+IHByaWNlbGVzcw== 90944
+csOzZA== 90945
+b2JibGVk 90946
+IHJvbGVOYW1l 90947
+IEdEUFI= 90948
+ICciLA== 90949
+Q2VudHJl 90950
+QXJjaGl0ZWN0dXJl 90951
+Q3BwQ2xhc3M= 90952
+IG1hdHRyZXNzZXM= 90953
+IGJlZXA= 90954
+IERhbWlhbg== 90955
+5p2D6ZmQ 90956
+YmV0dA== 90957
+X2Flcw== 90958
+KGNlbGxz 90959
+IOuwsOyXtA== 90960
+IGJpdG1hc2s= 90961
+Y291bGRu 90962
+LW5vdw== 90963
+IGlubm92YXRl 90964
+IGhhY2Vu 90965
+IEx5b25z 90966
+dGhpY2tuZXNz 90967
+IHdoaXN0bGVibG93ZXI= 90968
+JGZpbHRlcg== 90969
+IGV1bGVy 90970
+IEhhcm0= 90971
+IGxlZHM= 90972
+IEtlbHZpbg== 90973
+LnF1aWNr 90974
+IEzDs3Bleg== 90975
+cmV2ZQ== 90976
+IG5pZ2VyaWE= 90977
+IGp5bGxhbmQ= 90978
+LmVtcHR5TGlzdA== 90979
+IHVuc2V0dGxpbmc= 90980
+dXNiYW5k 90981
+IHRyYWNrZXJz 90982
+PVwiIjsK 90983
+IGNvbnRpbnVh 90984
+IE51bWVybw== 90985
+ZW5kb24= 90986
+IEdlcnJ5 90987
+LlRPRE8= 90988
+UmVwZWF0ZWQ= 90989
+IFNlcmVuYQ== 90990
+0LjQvNCw0LvRjA== 90991
+cHJvZmls 90992
+INCy0YHQtdGF 90993
+QGFkbWlu 90994
+LkxpbmVz 90995
+IHRyYW5zbWlzc2lvbnM= 90996
+IGNq 90997
+YW7Dp2E= 90998
+5Yig6Zmk5oiQ5Yqf 90999
+IGdldE1lbnVJbmZsYXRlcg== 91000
+dWZyZXE= 91001
+IE1hdGhlbWF0aWNhbA== 91002
+TmF2aWdhdG9yTW92ZQ== 91003
+IGZ3ZA== 91004
+dW5pdHRlc3Q= 91005
+IHN5bnRoZXNpemVk 91006
+IGNyZWVk 91007
+KEZyYW1l 91008
+cHN5Y2g= 91009
+dm9k 91010
+dUM= 91011
+4bqndQ== 91012
+IOKAnOKApg== 91013
+IGtyYXQ= 91014
+ZHJhd2FibGU= 91015
+w6ZyZQ== 91016
+PXRvcA== 91017
+KExvZ2dlcg== 91018
+RXJyb3JFeGNlcHRpb24= 91019
+YWlzYWw= 91020
+L3dz 91021
+dWxsZWQ= 91022
+QVJJTkc= 91023
+IG5JbmRleA== 91024
+IGludGVybmFscw== 91025
+IGVmZmljaWVuY2llcw== 91026
+ICNA 91027
+X2JyaWdodG5lc3M= 91028
+X25vcm1hbHM= 91029
+IFN0b3V0 91030
+IHVudmVpbA== 91031
+IFNob3Rz 91032
+LWNvbXBhbnk= 91033
+X2VsdA== 91034
+KGRsbGV4cG9ydA== 91035
+IHByb2R1Y2Npw7Nu 91036
+Q2lzY28= 91037
+Qmxha2U= 91038
+LW1vdXRo 91039
+UGVhcg== 91040
+INC00L7RgdGC0YPQvw== 91041
+IEpBQ0s= 91042
+IO2YuA== 91043
+IHN0b3B3b3Jkcw== 91044
+IFRlc3M= 91045
+IHBvc3Rl 91046
+cmF6aWVy 91047
+6K0= 91048
+TWVzc2FnaW5n 91049
+t+aWsA== 91050
+VGFtYmFo 91051
+IG5hcmNvdGljcw== 91052
+IGNhbXBlcg== 91053
+IHRyaXBvZA== 91054
+IGdsRW5k 91055
+IGdpb2M= 91056
+Y29tYmU= 91057
+VXNlclJvbGU= 91058
+VWw= 91059
+RXF1aXZhbGVudA== 91060
+IGdub21l 91061
+IEZ1w58= 91062
+cGFja2FnZU5hbWU= 91063
+X3Vl 91064
+RGlzY2xvc3VyZQ== 91065
+YW1hdGU= 91066
+X3RlbnNvcnM= 91067
+IEthdGhyeW4= 91068
+X0Jhcg== 91069
+VGhyZWFkSWQ= 91070
+IHZlcmlmaWNh 91071
+LmFzc2VydE51bGw= 91072
+IE9kaW4= 91073
+YsOp 91074
+INGB0L7RgdGC 91075
+IGp0 91076
+LlNlbGVjdGVkSXRlbXM= 91077
+IGFjdGlvbmFibGU= 91078
+IFJlZ2FyZHM= 91079
+aGVr 91080
+Om51bWVs 91081
+LEdM 91082
+IFBIT05F 91083
+CURlZmF1bHQ= 91084
+IGVsYXN0 91085
+IGJlY2s= 91086
+PWNyZWF0ZQ== 91087
+OicK 91088
+YXJodXM= 91089
+bW9kaWZpZXJz 91090
+aW50cHRy 91091
+IHByb3Bpbw== 91092
+77yI56yR 91093
+IHJlcXVlc3RPcHRpb25z 91094
+IGltcGxpYw== 91095
+IGR1cm8= 91096
+IFBDUw== 91097
+RGVsaW1pdGVy 91098
+KGxvZ2l0cw== 91099
+LkVWVA== 91100
+V2l0aENvbnRleHQ= 91101
+IG9sdHJl 91102
+X0VYRUNVVEU= 91103
+b2xpY2l0ZWQ= 91104
+X0VudGVy 91105
+L2Zyb20= 91106
+INGB0LvQvtCy 91107
+IEhvcm0= 91108
+dWliTW9kYWw= 91109
+X0lORklOSVRZ 91110
+77yM44CK 91111
+VUdJTlM= 91112
+T05HTA== 91113
+LGJ1Zg== 91114
+IHBvdXJyYWl0 91115
+cGo= 91116
+KGN1YmU= 91117
+IHVnbA== 91118
+IFNhd3llcg== 91119
+SUZFU1Q= 91120
+QXBpcw== 91121
+IENvcmVEYXRh 91122
+IHNlc2FtZQ== 91123
+LnB0aA== 91124
+LmdldFVzZXJOYW1l 91125
+Y2FzZWQ= 91126
+IHZhbmlzaA== 91127
+X0FwaQ== 91128
+Ly86 91129
+L25vbg== 91130
+LmRvY2tlcg== 91131
+LnNp 91132
+YWxlcnRz 91133
+IGludGVzdGluZQ== 91134
+cGFydGljaXBhbnRz 91135
+LXZpc2libGU= 91136
+ZW1zcA== 91137
+bXVl 91138
+X3B2 91139
+IENyaQ== 91140
+b2dyYQ== 91141
+X2V4cGVyaWVuY2U= 91142
+IElOVEVSVkFM 91143
+X3JlZ3Jlc3Npb24= 91144
+7ZWY7IS47JqU 91145
+ZW5kZXJlY28= 91146
+bGF0YWJsZQ== 91147
+LmxvY2FsdGltZQ== 91148
+IEJJVFM= 91149
+IEZvbGRpbmc= 91150
+CSAJCQ== 91151
+w6lzZQ== 91152
+LWJlYXJpbmc= 91153
+IFhQQVI= 91154
+T1BTSVM= 91155
+J14kJyw= 91156
+aW5jbA== 91157
+IE9wcmFo 91158
+IGJvb3Rocw== 91159
+IFJvaGluZw== 91160
+LkJvcmRlclNpZGU= 91161
+YXRhdHlwZQ== 91162
+Q3JlYXRlZEJ5 91163
+LOKAmeKAnQ== 91164
+ZG9jdHJpbmU= 91165
+IGJyZWF0aGVk 91166
+X2JlZw== 91167
+IGFmZmxpY3RlZA== 91168
+TW91bnRhaW4= 91169
+QmxvYw== 91170
+IHJ1aW5pbmc= 91171
+LkFubm90YXRpb25z 91172
+CWludGVudA== 91173
+IHN0YXRpY2FsbHk= 91174
+X1V0aWxz 91175
+TGF1bmNoZXI= 91176
+Om5vcm1hbA== 91177
+IHVzZXJpbmZv 91178
+LUp1bA== 91179
+S3lsZQ== 91180
+LlJlYWRVSW50 91181
+KHVybHM= 91182
+L2lm 91183
+bWl0dGVs 91184
+YmNt 91185
+QE1vZHVsZQ== 91186
+IENvbnN0YW50aW4= 91187
+IGJq 91188
+ZXJuYXV0 91189
+PHI= 91190
+IE1lbnRvcg== 91191
+IGVncmV0 91192
+X29hdXRo 91193
+LkRhdGFDb250ZXh0 91194
+X0NMSQ== 91195
+KENvbnN0cnVjdG9y 91196
+IHNldFBvc2l0aW9u 91197
+cmVzYXI= 91198
+ZW50aW5n 91199
+4Li54Lil 91200
+VHJhbnNtaXNzaW9u 91201
+IG5vdGlmeURhdGFTZXRDaGFuZ2Vk 91202
+IE1vdXNlQnV0dG9u 91203
+ICoi 91204
+ICAgICAgICAgICAgICAgDQo= 91205
+IEx5ZGlh 91206
+IHN3b3Jl 91207
+IHBsYXRhZm9ybWE= 91208
+CWJ1dHRvbnM= 91209
+IHNwcnVuZw== 91210
+KFRva2VuVHlwZQ== 91211
+Q3g= 91212
+QXF1 91213
+CQkJCQkJCQkJICA= 91214
+CUFERA== 91215
+dWlkcw== 91216
+IOCkrg== 91217
+IOaXtumXtA== 91218
+LkFjdGlvbkJhcg== 91219
+IG9jdXI= 91220
+IGlsbWE= 91221
+LW5ldXRyYWw= 91222
+ICIuIjsK 91223
+CVNpemU= 91224
+UGllY2Vz 91225
+IHN0aWY= 91226
+ICI9Iiw= 91227
+IEVxdWl2YWxlbnQ= 91228
+IGlnZW4= 91229
+ZGZk 91230
+X3RoaWNrbmVzcw== 91231
+X3JlYWRhYmxl 91232
+L2ZhbHNl 91233
+IHRvb2x0aXBz 91234
+b3BsYXN0 91235
+aHVh 91236
+aGFuZGxlUmVxdWVzdA== 91237
+LkxBWlk= 91238
+PFVGdW5jdGlvbg== 91239
+aW1tdXRhYmxl 91240
+aWhpbGF0aW9u 91241
+IG9ydGhvZG94 91242
+LnBvcHVsYXRl 91243
+IHZlcmE= 91244
+IG9iZXI= 91245
+c2FuZA== 91246
+dmln 91247
+Q29uZmVyZW5jZQ== 91248
+KENvbGxpc2lvbg== 91249
+L2F1dG8= 91250
+IFNvbGlkQ29sb3JCcnVzaA== 91251
+Kic= 91252
+LGFkZHJlc3M= 91253
+IHN3ZWV0aGVhcnQ= 91254
+w6F0aWNhcw== 91255
+YW5pbmU= 91256
+X3BheW1lbnRz 91257
+IHVubWlzdA== 91258
+IHRydW1wZXQ= 91259
+QkFM 91260
+IGZpbGVJZA== 91261
+bmllanM= 91262
+QURG 91263
+IG1uaXN0 91264
+IEZlaGxlcg== 91265
+44CRLA== 91266
+Q2hhcmFjdGVyU2V0 91267
+IFZhbmNl 91268
+SW5zZXJ0ZWQ= 91269
+IGRvd253YXJkcw== 91270
+IHJvdGF0aW9uYWw= 91271
+IGVuY291bnRlcmluZw== 91272
+TUJQcm9ncmVzc0hVRA== 91273
+L1N5c3RlbQ== 91274
+L3BvcA== 91275
+IH0pDQoNCg== 91276
+IC4nPC8= 91277
+77yJDQo= 91278
+IGRjYw== 91279
+YXN5YXJha2F0 91280
+IHByaW5jaXBhbGx5 91281
+5a6a5LmJ 91282
+KGNob2ljZXM= 91283
+LnBhZ2luYXRvcg== 91284
+IHVwYnJpbmdpbmc= 91285
+IGRvdGVudg== 91286
+KCkpLw== 91287
+IFRBUw== 91288
+Z2Nk 91289
+X2ludGY= 91290
+Lm11dGV4 91291
+cHJlc3Rhc2hvcA== 91292
+IGLDtnI= 91293
+ZGFw 91294
+X2RlbWFuZA== 91295
+XERlc2t0b3A= 91296
+dG9GbG9hdA== 91297
+IHNlZ3JlZ2F0ZWQ= 91298
+IGNsaW1hdGVz 91299
+Lk9yZGVyQnlEZXNjZW5kaW5n 91300
+KCcsJyk= 91301
+UHVsbFBhcnNlcg== 91302
+QXRvbXM= 91303
+IGJlbsO2dA== 91304
+IGhvbWVy 91305
+YW50dQ== 91306
+SXNFbXB0eQ== 91307
+IEJlZ2lucw== 91308
+PlNob3c= 91309
+IFN1cHBsZW1lbnRz 91310
+b2NjdXM= 91311
+IGRvcGU= 91312
+LmJvb2tpbmc= 91313
+IEFsbWlnaHR5 91314
+W2VkZ2U= 91315
+IEViYXk= 91316
+X3JhY2U= 91317
+RnJvemVu 91318
+X3RyYXZlbA== 91319
+IHBhc3RvcnM= 91320
+X1NVUkZBQ0U= 91321
+X2dlbnJl 91322
+X0hPVA== 91323
+LGRpbQ== 91324
+VGJs 91325
+bXRz 91326
+cHJlZGljdGlvbnM= 91327
+X2N1bQ== 91328
+IGRldGFsbGVz 91329
+LXRyYW5zaXRpb25hbA== 91330
+IHdha2V1cA== 91331
+UGVyc29ucw== 91332
+LmNvbG9yYmFy 91333
+U3RyYW5nZQ== 91334
+2K/Zhw== 91335
+Jlc= 91336
+IEFSUA== 91337
+X1NPRlQ= 91338
+X2RyYWZ0 91339
+SVZB 91340
+IGdyb3A= 91341
+IGxpZWJl 91342
+IGlpZA== 91343
+2KfYsw== 91344
+Y2FuZGlkYXRlcw== 91345
+Z2V0QXM= 91346
+PV8oIg== 91347
+LkdldE9yZGluYWw= 91348
+KSk9PQ== 91349
+YW5ub3RhdGU= 91350
+IEx1bWlh 91351
+SVJNV0FSRQ== 91352
+X09QRU5HTA== 91353
+KGZvcm1EYXRh 91354
+ZW50aW1lcw== 91355
+IHdhdGVyc2hlZA== 91356
+INCx0LXQtw== 91357
+IGZsb3BweQ== 91358
+VG93YXJkcw== 91359
+KGNvbXBhY3Q= 91360
+RERE 91361
+e24= 91362
+IHBva2luZw== 91363
+QG0= 91364
+IHJlY3ljbA== 91365
+c3RydWN0b3Jz 91366
+a2V5Q29kZQ== 91367
+IHZlaGVtZW50 91368
+IGxpdHJl 91369
+IEJJTkQ= 91370
+IEZyYW5jb2lz 91371
+IG51ZGl0eQ== 91372
+IGlzaXpl 91373
+CW9uQ2xpY2s= 91374
+eXN0YWxz 91375
+IGdldFN5c3RlbVNlcnZpY2U= 91376
+V2ViUmVzcG9uc2U= 91377
+ZmlsZXNpemU= 91378
+IENobG9y 91379
+Y29saQ== 91380
+X3NlYXQ= 91381
+LkFkZEluUGFyYW1ldGVy 91382
+KXRlc3Q= 91383
+IHF1ZXM= 91384
+IGNhdXRpb3VzbHk= 91385
+ImRpc3BsYXk= 91386
+LnNodG1s 91387
+IEdVSURBVEE= 91388
+KCIqKg== 91389
+IGdyYW5kZGF1Z2h0ZXI= 91390
+IEFzc2VtYmx5RGVzY3JpcHRpb24= 91391
+Rm9yRWFjaA== 91392
+V2lsc29u 91393
+LGVn 91394
+IGJlbGlldmFibGU= 91395
+IGNyb3Nzd29yZA== 91396
+bG9iYmVy 91397
+IFN0YXBsZXM= 91398
+KHNoaXA= 91399
+IHdhZ2Vk 91400
+IEJvbHNoZXZpaw== 91401
+LkFkZEl0ZW0= 91402
+KEZpbHRlcg== 91403
+X0FCQw== 91404
+IGBc 91405
+0L7RiQ== 91406
+IG1ib3g= 91407
+IE5lcw== 91408
+IEFWQ2FwdHVyZQ== 91409
+IGNvbmhl 91410
+IElOVEVSTkFUSU9OQUw= 91411
+b3Nn 91412
+IF0pLT4= 91413
+U0tUT1A= 91414
+IGtpZGQ= 91415
+IFNTVA== 91416
+IOWFsw== 91417
+IEV0aG5pYw== 91418
+RVJTSEVZ 91419
+IG11bHRpYw== 91420
+X01VTA== 91421
+IEZpbmRPYmplY3RPZlR5cGU= 91422
+IEV4cGVuc2Vz 91423
+Z2V0TW9ja0J1aWxkZXI= 91424
+LWd1aWRl 91425
+J0w= 91426
+IOeZuw== 91427
+IHJhag== 91428
+IEJsYW5jaA== 91429
+IEFkZHJlc3Nlcw== 91430
+Tng= 91431
+IElzbGFtYWJhZA== 91432
+0L7QutGD0LzQtdC90YI= 91433
+IEJlYXZlcg== 91434
+LnN0dWRlbnRz 91435
+IEFzeW5jQ2FsbGJhY2s= 91436
+c2hlZXRz 91437
+ZWNhc3Q= 91438
+IEZ1bmRhbWVudGFs 91439
+IHZlcmRpZW5lbg== 91440
+IGV4YWNlcmJhdGVk 91441
+IE1vZGVyYXRvcg== 91442
+Q0NDQ0ND 91443
+IHRpbWVvdXRz 91444
+IHN1YmRpdmlzaW9ucw== 91445
+IGNvbXByb21pc2Vz 91446
+dXp6ZXI= 91447
+fSwkew== 91448
+X2Jsb2NraW5n 91449
+ZXJtYW5u 91450
+IE1pa2hhaWw= 91451
+IFNlbGJzdA== 91452
+6ZSA 91453
+LnNob3dz 91454
+5LiH5YWD 91455
+IFRm 91456
+IElIdHRwQWN0aW9uUmVzdWx0 91457
+IElFbnRpdHk= 91458
+IGlx 91459
+Rk1M 91460
+b2RlbQ== 91461
+c3Rw 91462
+dWN0aW9ucw== 91463
+LmZhdm9yaXRl 91464
+LkdldERpcmVjdG9yeU5hbWU= 91465
+IGdyYWM= 91466
+IHhtbERvYw== 91467
+X3B1c2hCdXR0b24= 91468
+Y29sbGVjdG9y 91469
+PWV4cGxvZGU= 91470
+IGRlc3RpbmF0aW9uVmlld0NvbnRyb2xsZXI= 91471
+IFNlcmlhbGl6ZWQ= 91472
+Om1lc3NhZ2U= 91473
+IENDQw== 91474
+X3JlY292ZXJ5 91475
+LWtpdA== 91476
+c2hpbWE= 91477
+cm90Y2g= 91478
+IGB9Cg== 91479
+X3N1cHA= 91480
+VGFibGE= 91481
+0YDQtdC00LXQuw== 91482
+R3RrV2lkZ2V0 91483
+IFNJTVBMRQ== 91484
+LnBoaQ== 91485
+IExpYmVydGllcw== 91486
+LS1b 91487
+IHVudmVpbGluZw== 91488
+IGV4dGVudHM= 91489
+YmNk 91490
+IGh2YWQ= 91491
+CWNy 91492
+LnJlYWRkaXI= 91493
+IHJlYWRhYmlsaXR5 91494
+IGRpc21pc3Npbmc= 91495
+Q2FtYg== 91496
+IGNhc3VhbHR5 91497
+IElQVg== 91498
+bWl0ZXM= 91499
+IHB1cmlmaWVk 91500
+Lk9yaWVudGF0aW9u 91501
+IGxq 91502
+aW11bGF0b3I= 91503
+ZnJhbQ== 91504
+L2xvY2F0aW9u 91505
+IGNvbW11bmljYXRlcw== 91506
+OlVJQWxlcnQ= 91507
+L3NvY2lhbA== 91508
+ZWx5bg== 91509
+REVO 91510
+INee 91511
+IGJlZm9yZVNlbmQ= 91512
+IFVudGVycw== 91513
+JykuIg== 91514
+ICcnKTs= 91515
+LndyaXRlT2JqZWN0 91516
+KGdyYW1tYXJBY2Nlc3M= 91517
+IEFwcGxpY2F0aW9uQ29udGV4dA== 91518
+QnlVc2VybmFtZQ== 91519
+IHNraXBz 91520
+IGZpbGhv 91521
+IHZpZXV4 91522
+IG1SZWN5Y2xlclZpZXc= 91523
+IGFyb3VzZWQ= 91524
+Lm93bA== 91525
+IGN1cmxlZA== 91526
+L2NhbGxiYWNr 91527
+KCc6Jylb 91528
+IGludW5k 91529
+IGJyZWFrcG9pbnRz 91530
+LWV2ZW4= 91531
+LnN0ZW0= 91532
+IGRlcm9n 91533
+IG5lcA== 91534
+IENvbXBsZXRhYmxlRnV0dXJl 91535
+LUxpbmU= 91536
+Lyov 91537
+LkhleA== 91538
+IHJ1c3Nl 91539
+IGJpZg== 91540
+IEZvbmQ= 91541
+aWVjdA== 91542
+IGFsbG90dGVk 91543
+ZGV0ZWN0b3I= 91544
+IC8KCg== 91545
+ZW1vZGU= 91546
+dWhl 91547
+dWlzc2U= 91548
+IEZJWEVE 91549
+bWF0aHJt 91550
+IHVuc3Vz 91551
+IEF1dG9z 91552
+IC4uLi4uLi4uLi4= 91553
+LnRyYXZlbA== 91554
+TkFW 91555
+IGxlc2Jpc2s= 91556
+IMO8emVy 91557
+IGNsZXJpYw== 91558
+IGxpbWl0bGVzcw== 91559
+b2x1Y2lvbg== 91560
+IG5lY2tsaW5l 91561
+IGRyaWZ0ZWQ= 91562
+IFJlbGlhYmxl 91563
+IENhcnk= 91564
+IHRlbsOtYQ== 91565
+ID8+Jw== 91566
+L2NvbW1vbnM= 91567
+IEdNQw== 91568
+X05QQw== 91569
+IEJsaXNz 91570
+IEJ1cm1h 91571
+5ZCM5pe2 91572
+KGRlcGVuZA== 91573
+LXN1aXRl 91574
+CXN0YWdl 91575
+RG91Zw== 91576
+aWRlbnRpZmljYXRpb24= 91577
+X3Jlc29sdmVy 91578
+QmVnYW4= 91579
+W3RocmVhZA== 91580
+IDsKCgo= 91581
+TlRTVEFUVVM= 91582
+IGRpc29iZWQ= 91583
+fGg= 91584
+IGFjY3VtdWxhdGluZw== 91585
+ICIsIik7Cg== 91586
+dVBhcmFt 91587
+LmJpbGw= 91588
+cml0Y2g= 91589
+Q3JpbWU= 91590
+0LXRgdGM 91591
+IFJlbWFpbg== 91592
+54Sh5paZ 91593
+X1RIQVQ= 91594
+YCJdCg== 91595
+LnN0YW1w 91596
+IHBhcmFub3JtYWw= 91597
+IE1QQw== 91598
+InVybHM= 91599
+IEVzdGF0ZXM= 91600
+VG9Gcm9udA== 91601
+VGhpcnR5 91602
+QmV0aA== 91603
+J3U= 91604
+IOy9lOuTnA== 91605
+VUZBQ1Q= 91606
+IENyb20= 91607
+IE1pc3Rlcg== 91608
+IEVRVUFM 91609
+ZW5oZWlt 91610
+IC8vew== 91611
+X3dhcw== 91612
+IGJvdXF1ZXQ= 91613
+IE1pZGRsZXRvbg== 91614
+aXp1 91615
+X2hhc2hlcw== 91616
+IGhlbm5l 91617
+IExJTlVY 91618
+CVNlcnZpY2U= 91619
+IFRBTQ== 91620
+IGBf 91621
+IEFUQQ== 91622
+IGRhbmdsaW5n 91623
+cGFpbg== 91624
+X0JPVU5EUw== 91625
+cHJvZ3JhbW1pbmc= 91626
+IGN1cnJlbnRJdGVt 91627
+IGJlc2ll 91628
+ZW1ibGU= 91629
+KGNhbGM= 91630
+LlNraW4= 91631
+IHBlYXJscw== 91632
+IEJ1cmI= 91633
+LW1vbml0b3I= 91634
+L2Nz 91635
+Zmly 91636
+KHZlcg== 91637
+W2FyZ3M= 91638
+w7xja2Vu 91639
+ZXBhcmF0b3I= 91640
+RG91 91641
+LkVudA== 91642
+IEVTQQ== 91643
+KGZt 91644
+dG9uZXM= 91645
+IFphYw== 91646
+a3NhbQ== 91647
+4oCZYWxs 91648
+IE1TUw== 91649
+IkRvbg== 91650
+IHNpbXBsZXg= 91651
+IENvbnNjaW91cw== 91652
+IEFwcGxpY2FudA== 91653
+cGVsbGllcg== 91654
+IHBlZGVzdGFs 91655
+JGh0dHA= 91656
+IEF2YQ== 91657
+LkNH 91658
+IGludMOpcmVzcw== 91659
+IEludGVncmFs 91660
+cmVkZQ== 91661
+PWZvcm1hdA== 91662
+LlBhdGhz 91663
+X1BBUlRJVElPTg== 91664
+IHNlaA== 91665
+IFF1YW5kbw== 91666
+WW91dHViZQ== 91667
+LnB1dFRleHQ= 91668
+7KO87IS47JqU 91669
+LkFXUw== 91670
+IENzdg== 91671
+Q3Vyc29yUG9zaXRpb24= 91672
+LWJlZ2lu 91673
+X2NvdW50cmllcw== 91674
+LXJhbmRvbQ== 91675
+5Y2z 91676
+UGhpbGw= 91677
+IHBhbm9yYW1h 91678
+IHRoZXJlcw== 91679
+5Y+q 91680
+IHNpbGVuY2Vk 91681
+IEN1bWJlcmxhbmQ= 91682
+LlZpc2libGVJbmRleA== 91683
+LnN0YXRpc3RpY3M= 91684
+IHByb3BlbGxlZA== 91685
+QW1lcmljYW5z 91686
+IHZhbGlkYQ== 91687
+IEd1YW0= 91688
+IEZFTUE= 91689
+LnN5bnRheA== 91690
+ZGdl 91691
+IGRlZXBlbg== 91692
+ICAgICAgICAJCQkJ 91693
+IFNwZWNpYWxpc3Rz 91694
+IFNhbnRhbmE= 91695
+IEJlZXRsZQ== 91696
+ICUKCg== 91697
+VXNlclByb2ZpbGU= 91698
+KCIkLg== 91699
+IGVtcGxvaQ== 91700
+IGVtYWlsaW5n 91701
+Z2V0T3JFbHNl 91702
+X1VQUEVS 91703
+LmRyaXZl 91704
+IHJlZGhlYWQ= 91705
+Rk9VTkRBVElPTg== 91706
+IG11bHRpcGxpYw== 91707
+L2VmZmVjdHM= 91708
+IGhhbmR3cml0aW5n 91709
+X3Rh 91710
+IEJheg== 91711
+w7ZmZmVudA== 91712
+cHJpeA== 91713
+IGNoaXBzZXQ= 91714
+IGlwQWRkcmVzcw== 91715
+w61kYQ== 91716
+IFVuZw== 91717
+IFNjaGE= 91718
+LkZMT0FU 91719
+IHF1aWVybw== 91720
+b2Nocm9tZQ== 91721
+IHJlZWZz 91722
+YnNvbg== 91723
+IG3Dug== 91724
+IHRyYXlz 91725
+Qm9tYg== 91726
+IG15TGlzdA== 91727
+eGltaXR5 91728
+IERlbmc= 91729
+VW5p 91730
+LVNlcmllcw== 91731
+b2dhbnk= 91732
+bMSxaw== 91733
+L2NhbA== 91734
+IHJlYWxpemE= 91735
+IEhpYg== 91736
+CQoJCgo= 91737
+IGh1bWlsaWF0aW5n 91738
+WyR7 91739
+IHByZXRlbmRlZA== 91740
+IERhdGVuc2No 91741
+YW5zaWJsZQ== 91742
+CXJlbG9hZA== 91743
+IG1pZ2xpb3I= 91744
+X2JldA== 91745
+IHRvdGFsVGltZQ== 91746
+IEJheHRlcg== 91747
+IGVuYW1lbA== 91748
+L0ltYWdlcw== 91749
+IFNFUw== 91750
+IFNwcmluZ0FwcGxpY2F0aW9u 91751
+KWluaXRXaXRoRnJhbWU= 91752
+CWNhbA== 91753
+RUxFTUVOVA== 91754
+IEd1dGg= 91755
+KEJpZ0ludGVnZXI= 91756
+IE1lZGk= 91757
+Lk1lbWJlcnM= 91758
+IHJlam9pY2U= 91759
+IGRvZg== 91760
+UEVuZFBvaW50 91761
+IGNsaXQ= 91762
+X1JFVVNF 91763
+TWFrZXM= 91764
+IHN6eQ== 91765
+IHNoYWRlZA== 91766
+IGZhdm91cmVk 91767
+aXN0b2w= 91768
+ZGV4 91769
+IGZsZXhHcm93 91770
+hac= 91771
+X3ByaW50ZXI= 91772
+LmZuYW1l 91773
+cGVyYXRpb24= 91774
+IG7Ds3M= 91775
+Z2dlcg== 91776
+6ICB 91777
+INCy0YDQtdC80Y8= 91778
+KGVmZmVjdA== 91779
+QnlVcmw= 91780
+IEFQUw== 91781
+dHV0b3JpYWw= 91782
+ZWpz 91783
+U3FsUGFyYW1ldGVy 91784
+IHNjcmFwcw== 91785
+R3JlZXRpbmdz 91786
+RmVk 91787
+IFJFTkRFUg== 91788
+IGJsb29tcw== 91789
+IGRlYmlsaXRhdGluZw== 91790
+b21ldHJpY3M= 91791
+IHNpbWls 91792
+LWhlcm8= 91793
+IHJlYWxwYXRo 91794
+ZGVwYXJ0bWVudHM= 91795
+QklORA== 91796
+IENhc3NpZHk= 91797
+bGlhbg== 91798
+U0tJUA== 91799
+LWNsZWFu 91800
+IHNpbGRlbmFmaWw= 91801
+X211bHRpcA== 91802
+anNvbkRhdGE= 91803
+QWdlbnRz 91804
+LmZoaXI= 91805
+IHRyaXVt 91806
+IGFzdG9yZQ== 91807
+IG5leA== 91808
+OnVwZGF0ZQ== 91809
+INC00LA= 91810
+4KSy 91811
+OyIpCg== 91812
+LlRleHRJbWFnZVJlbGF0aW9u 91813
+IG1pY3Jvc2NvcHk= 91814
+U1VS 91815
+YW5reQ== 91816
+IFBldGl0 91817
+bWFya2V0aW5n 91818
+IHZlcmlmaWNhcg== 91819
+YW1hZ2Vk 91820
+Y3Ro 91821
+IGluY29uc2lzdGVuY2llcw== 91822
+IG1hasSF 91823
+IGdldEluZm8= 91824
+IHBhc3Npb25hdGVseQ== 91825
+IGljbXA= 91826
+W10+Cg== 91827
+U2luZ2Fwb3Jl 91828
+IE5ld3Rvd24= 91829
+IHJhaWxpbmc= 91830
+IEVubGlnaHRlbm1lbnQ= 91831
+dXRoZXJsYW5k 91832
+bGVpbmU= 91833
+X3JlZ2lzdHJv 91834
+IEVyaWNh 91835
+X3RpY2tldHM= 91836
+L21ldGhvZA== 91837
+aXp6YXRv 91838
+R2F0dA== 91839
+LWZlYXR1cmU= 91840
+IDotKQ== 91841
+IHNlcnBlbnQ= 91842
+IEdyb3VwTGF5b3V0 91843
+TmlrZQ== 91844
+dW5nYQ== 91845
+IE1pbQ== 91846
+IGluY2Vzcw== 91847
+IGRlcGxldGlvbg== 91848
+X2xvdA== 91849
+IGJpcnRoZGF5cw== 91850
+IHJlbnRlcnM= 91851
+IGVxdWlwb3M= 91852
+IExlaHI= 91853
+X1BsYXk= 91854
+IHNwaWVsZQ== 91855
+IExBTkQ= 91856
+IEVuY291bnRlcg== 91857
+aXphbmRv 91858
+IHBlcnU= 91859
+IHNsYW1taW5n 91860
+IHJlaW5zdGFsbA== 91861
+IGFuZ2k= 91862
+SW5UaGVEb2N1bWVudA== 91863
+IHZlcnNjaGlsbA== 91864
+IHZlcnNv 91865
+LnN0YWZm 91866
+KHZw 91867
+KGFjY291bnRz 91868
+Z2V0QXBwbGljYXRpb24= 91869
+IG1hbnRlbmVy 91870
+LlNP 91871
+LkFE 91872
+IE1vcm1vbnM= 91873
+CXJlYWw= 91874
+IGhvdGxpbmU= 91875
+IENhcmRpbw== 91876
+cGFnZUluZGV4 91877
+Ymplcmc= 91878
+Rm8= 91879
+IGNvbnNlaWxz 91880
+IG1pZ3JhaW5l 91881
+IGxhdGlubw== 91882
+IHRvcnBlZG8= 91883
+amFiaQ== 91884
+L3Jz 91885
+dWJiZXI= 91886
+IENsYXNzZQ== 91887
+4Lw= 91888
+KC9eXA== 91889
+X2RlcGxveQ== 91890
+R1JFUw== 91891
+IFdIQVRTT0VWRVI= 91892
+IGFyY3B5 91893
+IG1pZWpzYw== 91894
+QXJteQ== 91895
+IHNjaMO2bmU= 91896
+IGJtaQ== 91897
+IDoiOwo= 91898
+IENydWlzZXI= 91899
+cWg= 91900
+LnByZXBlbmQ= 91901
+IHZpdmU= 91902
+b3JpYXNpcw== 91903
+ICE9Cg== 91904
+dGVnYQ== 91905
+YW1lZGk= 91906
+UHJvamVjdGVk 91907
+LWJyZQ== 91908
+LHJlYWRvbmx5 91909
+IHN1YlRpdGxl 91910
+IG1pc3Ry 91911
+IEluaGFs 91912
+Y292ZXJpbmc= 91913
+IHppag== 91914
+IEFSVElDTEU= 91915
+UlVMRQ== 91916
+IGFsdHJv 91917
+IHNldHRsZXM= 91918
+aWRlbGJlcmc= 91919
+OiIuJA== 91920
+KGZl 91921
+X2Jt 91922
+IHByb3ByaWV0b3I= 91923
+IGtlZXI= 91924
+U2VwYXJhdGVk 91925
+X05FQVJFU1Q= 91926
+KHN0cnBvcw== 91927
+IENvbXB1dGF0aW9uYWw= 91928
+IGVybg== 91929
+SW5WaWV3 91930
+QWNyb3Nz 91931
+IGZydWl0eQ== 91932
+X21hcHBlZA== 91933
+IGdyYXR1aXRlbWVudA== 91934
+IHt9CgoK 91935
+cG90ZW50aWFs 91936
+cGFudHM= 91937
+IHNlbnRpbWVudGFs 91938
+IExpbmtlZGlu 91939
+KHBhdGNo 91940
+IGFkYXB0b3I= 91941
+IFVJU3Rvcnlib2FyZA== 91942
+IHNsYXNoaW5n 91943
+KCIvOg== 91944
+IHRleHREZWNvcmF0aW9u 91945
+LmRpYWc= 91946
+XFJlZGlyZWN0 91947
+IG5ldXJvc2NpZW5jZQ== 91948
+IEFkanVzdG1lbnQ= 91949
+IFNjb3RjaA== 91950
+IENvc2J5 91951
+U0VB 91952
+PXZpZXc= 91953
+IGV2b2x2ZXM= 91954
+IFNhbGlzYnVyeQ== 91955
+44CB4oCc 91956
+ZXZlcnlvbmU= 91957
+KGFyYw== 91958
+IGFwYXJ0aGVpZA== 91959
+IGF6aW11dGg= 91960
+IFNoYW1hbg== 91961
+2KU= 91962
+w7NuaWNh 91963
+OmNsYXNz 91964
+IEluamVjdG9y 91965
+YWhhcw== 91966
+YWJsZXI= 91967
+X2VzdGltYXRvcg== 91968
+X0NVQkU= 91969
+IEtyYW5r 91970
+IHVuZmF2b3JhYmxl 91971
+IHJlcHV0ZWQ= 91972
+IENvbmRpdGlvbmFs 91973
+IG1pbGZz 91974
+IFJlc3RyaWN0aW9ucw== 91975
+KGhyZWY= 91976
+SnVhbg== 91977
+PEVudHJ5 91978
+CXRlbXBsYXRlVXJs 91979
+X3Byb2R1Y3Rpb24= 91980
+VHlwZUlE 91981
+IGJhbGs= 91982
+IG5ld0Fycg== 91983
+IGxpY2VuY2Vz 91984
+LnNvbHV0aW9u 91985
+LnNhbQ== 91986
+IEh2 91987
+IHRyZW1ibGluZw== 91988
+WWF3 91989
+IGZsZWVjZQ== 91990
+IHNob3ZlbA== 91991
+V2Vy 91992
+IHBhdHRlcg== 91993
+PVk= 91994
+IEZybQ== 91995
+U2NyZWVucw== 91996
+JCI= 91997
+IEJsb25k 91998
+INGB0LjRgdGC0LXQvA== 91999
+KG9k 92000
+IG5vY3Q= 92001
+b3VudGVycw== 92002
+dXNlcHBl 92003
+fGludA== 92004
+LnJlbWFpbmluZw== 92005
+IHVsdGltbw== 92006
+IG1hc3R1cmJhdGluZw== 92007
+bW1j 92008
+PUc= 92009
+Il19Cg== 92010
+IGZlYXJsZXNz 92011
+IGFsZ3VtYXM= 92012
+Y3VsdA== 92013
+QWx0ZXJuYXRpdmVseQ== 92014
+5bKB 92015
+T0RFVg== 92016
+IEFkb3B0aW9u 92017
+IHdlYWx0aGllc3Q= 92018
+IG1lbnRyZQ== 92019
+L2dvdG8= 92020
+IGluZm9ybWFudA== 92021
+IFJvdXQ= 92022
+b2Zp 92023
+IGhhbW1lcmVk 92024
+IEVzdG8= 92025
+4oCZQnJpZW4= 92026
+IMWa 92027
+IGRlbWk= 92028
+INGB0LvQtdC0 92029
+IENsaW50b25z 92030
+7IWY 92031
+5aSn5bCP 92032
+RUNI 92033
+IGFuYXJjaGlzdHM= 92034
+IEJldmVyYWdl 92035
+IGdvdQ== 92036
+IGJyaWJlcnk= 92037
+IHBpY2t1cHM= 92038
+IHViZXI= 92039
+IHN5bmVyZ3k= 92040
+ZmNu 92041
+IEhlbnRhaQ== 92042
+IEJhc2VtZW50 92043
+IG1vcmI= 92044
+X2N1 92045
+amFkaQ== 92046
+KHByb2o= 92047
+IEJpbmdv 92048
+X2NhdGU= 92049
+W2VtYWls 92050
+Klg= 92051
+X1NFUA== 92052
+IHByaW5jaXBpbw== 92053
+dXBkYXRpbmc= 92054
+Ly99fQ== 92055
+Li4uKA== 92056
+IERPRQ== 92057
+IHpn 92058
+c2hhcGVz 92059
+PXRtcA== 92060
+Q3J1ZA== 92061
+IHdvcmtwbGFjZXM= 92062
+IHN0YWJpbGl6ZWQ= 92063
+IHRlbnRhbmc= 92064
+LnByb2R1Y3RJZA== 92065
+IFRyaWRlbnQ= 92066
+IG9yY2hlc3RyYXRlZA== 92067
+IEJ1Y2NhbmVlcnM= 92068
+X3RvbGVyYW5jZQ== 92069
+aWdyYXBoeQ== 92070
+w7xsZXI= 92071
+INi1 92072
+QVE= 92073
+IGF0aGxldGljaXNt 92074
+CVNlcnZlcg== 92075
+ZXdlZA== 92076
+RGlkRW50ZXI= 92077
+UmVnaXN0ZXJz 92078
+X2VtbHJ0 92079
+IGZ1bmN0aW9uYWxpdGllcw== 92080
+KGhkYw== 92081
+X21hcmtlcnM= 92082
+T3JlZ29u 92083
+KFN0cg== 92084
+IEdldEJ5SWQ= 92085
+IHp3YXJ0ZQ== 92086
+IE9DSQ== 92087
+IEphbWU= 92088
+X2NyaXQ= 92089
+IHN0b2NraG9sbQ== 92090
+CURpY3Rpb25hcnk= 92091
+X2NhcGFiaWxpdGllcw== 92092
+Q1RS 92093
+IG51bWE= 92094
+X2ZpcnN0bmFtZQ== 92095
+IE5TUmFuZ2U= 92096
+IG1vc3RyYQ== 92097
+IEFycml2YWw= 92098
+KElTZXJ2aWNlQ29sbGVjdGlvbg== 92099
+IHRlYXNwb29ucw== 92100
+IFNldFVw 92101
+CQkNCg0K 92102
+KGd1aWxk 92103
+LiJd 92104
+IG3hu5tp 92105
+YmZm 92106
+REFURVM= 92107
+KCldCgo= 92108
+IGh1bWFub2lk 92109
+dGhybw== 92110
+KGtsYXNz 92111
+IFZhZA== 92112
+ZnNw 92113
+LVNhaA== 92114
+IFVTRVJOQU1F 92115
+IFByb3BlcnR5Q2hhbmdlZEV2ZW50QXJncw== 92116
+IGxlc2lvbg== 92117
+X0RFTklFRA== 92118
+IFRISU5L 92119
+gqQ= 92120
+bWVudGFs 92121
+IHByZWNhcmlvdXM= 92122
+IE5vc2U= 92123
+IGNvbmNs 92124
+IHdpbGRmaXJl 92125
+IFRCcmFuY2g= 92126
+IEJBTQ== 92127
+L2Nzdg== 92128
+IE5BTg== 92129
+IENsZWFyYW5jZQ== 92130
+XEJsb2Nr 92131
+LmFubm90YXRl 92132
+5om+ 92133
+IFdISUxF 92134
+Z2VidW5n 92135
+Pkxpc3Q= 92136
+c2ht 92137
+Um9zcw== 92138
+YWZk 92139
+W3RpZA== 92140
+UGVyUGl4ZWw= 92141
+Kyhc 92142
+IEN5YW4= 92143
+IEtub3Q= 92144
+X3Zsb2c= 92145
+L3Zhcg== 92146
+W19f 92147
+IGhhc2htYXA= 92148
+KCk7DQ0K 92149
+IGFtYXNzZWQ= 92150
+IGRhdGVQaWNrZXI= 92151
+IFNhdG9zaGk= 92152
+X0NBUEFDSVRZ 92153
+IGJ1eg== 92154
+IE1pbmg= 92155
+U2V0Q29sb3I= 92156
+Kz0nPA== 92157
+IEludmVudA== 92158
+b3JjYQ== 92159
+aWdudW0= 92160
+IEFtcGg= 92161
+IHJlZmx1eA== 92162
+CiAgICAgICAgICAgICAgICAgICAgICAgIAo= 92163
+dWhu 92164
+KFRN 92165
+YWxsZXk= 92166
+IGxlZnRvdmVycw== 92167
+ZmRj 92168
+4oCcVGhlc2U= 92169
+IGNyYXdsZWQ= 92170
+KFZvaWQ= 92171
+aWd0ZQ== 92172
+8J+S 92173
+c2V0RGVmYXVsdA== 92174
+IEJlZ2lubmVy 92175
+UG9r 92176
+IEhMUw== 92177
+IGdhbWVJZA== 92178
+IEFtYmllbnQ= 92179
+X1BSRUQ= 92180
+LiJ9LAo= 92181
+w7xocnVuZw== 92182
+LlN5bmM= 92183
+IGludmU= 92184
+IE51cnNlcnk= 92185
+IGdsYXplZA== 92186
+q+yekA== 92187
+X2ZhdGFs 92188
+X2Rpc3BhdGNoZXI= 92189
+W10pDQo= 92190
+IGRldXRzY2hlbg== 92191
+6rGw 92192
+U2hhcGVz 92193
+IGlycmV2ZXJzaWJsZQ== 92194
+X3Blcw== 92195
+X2VzYw== 92196
+IHRoZXJtb21ldGVy 92197
+44OU44O8 92198
+X3NxcnQ= 92199
+Il09PSI= 92200
+IGN1bG1pbmF0aW9u 92201
+V29yZFByZXNz 92202
+IGxldmVu 92203
+VmVydGV4VXZz 92204
+IEhheXdhcmQ= 92205
+IEFzc2V0SW1hZ2U= 92206
+IG1haXpl 92207
+IGNoaWNhZ28= 92208
+IHRhdg== 92209
+ZXhwZW5zZXM= 92210
+0K0= 92211
+K2Y= 92212
+LiInIjsK 92213
+LVNB 92214
+IEtvdGE= 92215
+TWFpbkZyYW1l 92216
+LnNhbGU= 92217
+X0JV 92218
+IHN0cmVu 92219
+X2ZpbHQ= 92220
+L3ByaW50 92221
+KFBhY2tldA== 92222
+INC30LDQsg== 92223
+QWN0cw== 92224
+0LXQu9C10YQ= 92225
+IHJlbWF0Y2g= 92226
+IHJpZGRlbg== 92227
+IH0pKCk7Cg== 92228
+IGVuZG90aA== 92229
+IGNlcnRpZnk= 92230
+IFVJUGlja2VyVmlldw== 92231
+XE5vdGlmaWNhdGlvbnM= 92232
+CVRpdGxl 92233
+IGluZXF1YWxpdGllcw== 92234
+IE1vcmFu 92235
+IERhZW1vbg== 92236
+bGVzaWE= 92237
+IGhvcHBpbmc= 92238
+IGd1c3Rv 92239
+IEZpcmViYXNlRmlyZXN0b3Jl 92240
+IHBvbHlsaW5l 92241
+IHNwaWtlZA== 92242
+JSIpOwo= 92243
+IExBVElO 92244
+TGFiZWxUZXh0 92245
+IHN0cmFwb24= 92246
+X2ZpZA== 92247
+LXNwZWNpYWw= 92248
+YXJnZWQ= 92249
+IFNUSUxM 92250
+UXVhbGlmaWVkTmFtZQ== 92251
+LlJFUw== 92252
+I2M= 92253
+LndyaXRlbG4= 92254
+IEltbXV0YWJsZUxpc3Q= 92255
+IFRodW1i 92256
+IHNpbWQ= 92257
+RGVzY3JpY2Fv 92258
+LlNldFRleHQ= 92259
+IG5vbnByb2ZpdHM= 92260
+V2l0aGRyYXc= 92261
+LWVuY29kZWQ= 92262
+c2Jpbg== 92263
+IGFtb3J0 92264
+CWRk 92265
+cmlm 92266
+IHBhdGVybmFs 92267
+Lk1hcEZyb20= 92268
+X2Fzaw== 92269
+IHJlY291cnNl 92270
+IGJhY2tzdG9yeQ== 92271
+CW1hbmFnZXI= 92272
+X0RHUkFN 92273
+IEJpaGFy 92274
+aW50ZWxsaWdlbmNl 92275
+IHNraW1hZ2U= 92276
+KGVuY29kZXI= 92277
+IHN3aXJsaW5n 92278
+IEFwcGV0 92279
+X3NhbHQ= 92280
+IGF0dGU= 92281
+IFNRVUFSRQ== 92282
+IE5ldHo= 92283
+X3BhaW50 92284
+YXPEsQ== 92285
+aXNjaQ== 92286
+Rmxv 92287
+LWdvYWw= 92288
+LnNldFN0cm9rZQ== 92289
+IEF1c2Nod2l0eg== 92290
+IEFiZGVs 92291
+IGFuZXc= 92292
+IOWung== 92293
+IHRvdGFsUGFnZXM= 92294
+IHJlZmFjdG9y 92295
+IGNyZWF0aXZlbHk= 92296
+ZW1heA== 92297
+b2RveHk= 92298
+X3R4bg== 92299
+LlNvY2tldHM= 92300
+IFJpZGxleQ== 92301
+4buxYw== 92302
+c2FtcA== 92303
+TWluTWF4 92304
+IHdvcnNlbmluZw== 92305
+b3VudGFpbnM= 92306
+YXJ0bmVy 92307
+LXByb2Y= 92308
+c2luZ3VsYXI= 92309
+PWlz 92310
+IEZFQw== 92311
+X0ZN 92312
+IOaIlg== 92313
+IENhdWdodA== 92314
+X1NDTA== 92315
+IGV4cG8= 92316
+aW5mcmE= 92317
+IE1FUw== 92318
+Y2hhcA== 92319
+YWx0ZQ== 92320
+YXJraW4= 92321
+L21M 92322
+IHNlbmREYXRh 92323
+IGZyYW7Dp2Fpc2U= 92324
+IHPDpg== 92325
+X0RFRklOSVRJT04= 92326
+KioqKioqCgo= 92327
+XEN1c3RvbWVy 92328
+IOKWiOKWiOKWiOKWiOKWiA== 92329
+IHBlcnBldHJhdGVk 92330
+IEZ1cmlvdXM= 92331
+IHRlbmdh 92332
+bGVhcmVk 92333
+VUxMRVQ= 92334
+aW5pYw== 92335
+ZWFyY2hCYXI= 92336
+PENhcg== 92337
+IFJlbmV3YWJsZQ== 92338
+IGNvbnRlbXBsYXRlZA== 92339
+L2Zvcm1hdA== 92340
+IGZvcmdpdmluZw== 92341
+LlN1YkVsZW1lbnQ= 92342
+UFVURQ== 92343
+LmNvbnRlbnRTaXpl 92344
+IHJlc3BlY3RmdWxseQ== 92345
+4oCcCgo= 92346
+IHBvaWduYW50 92347
+dXJpbGU= 92348
+fSkiCg== 92349
+c2VxdWVudGlhbA== 92350
+L2Zhc3Q= 92351
+cHJ1bmc= 92352
+IFN0dW5uaW5n 92353
+IEJZVQ== 92354
+IGNvbXBhcmVy 92355
+CXJk 92356
+dW5pY29ybg== 92357
+xrBh 92358
+LkdldEl0ZW0= 92359
+IHNlY3Rpb25hbA== 92360
+anVkZ2U= 92361
+dXh0YXA= 92362
+IHN1bmRheQ== 92363
+IHDDpA== 92364
+TWlubmVzb3Rh 92365
+Ik4= 92366
+IGFwcGxpY2F0aW9uV2lsbA== 92367
+QU5HRVI= 92368
+IHJlYXNvbmVk 92369
+IFpFTkQ= 92370
+emFw 92371
+PWJhY2s= 92372
+b3NwaGF0ZQ== 92373
+6IqC54K5 92374
+IHRpdHRlbg== 92375
+IEFzc29j 92376
+QWN0aXZpdHlDcmVhdGVk 92377
+KVst 92378
+PyIKCgoK 92379
+IGpvdA== 92380
+2Lg= 92381
+IHVuY29tcHJlc3NlZA== 92382
+LklzREJOdWxs 92383
+IHZhc2U= 92384
+IGxvcmVt 92385
+IGVudHJlcHJpc2U= 92386
+IENvbnNlbnQ= 92387
+44Op44Oz 92388
+QnlWZXJzaW9u 92389
+IHF1aWVuZXM= 92390
+CWNvbnQ= 92391
+IEJsYWNraGF3a3M= 92392
+IEJsYXNpbw== 92393
+IHRhbmtlcg== 92394
+IHN0YXJ0dGltZQ== 92395
+IFNlYXM= 92396
+cGlvcw== 92397
+LlNwbGl0Q29udGFpbmVy 92398
+Y29tcGV0aXRpdmU= 92399
+IHBCdWZmZXI= 92400
+IGNvbnNlbnRpbmc= 92401
+LmFkZE9ic2VydmVy 92402
+aXRjaGVk 92403
+IG1pc2NlbGxhbmVvdXM= 92404
+IFRvcHM= 92405
+CWxw 92406
+Y21kcw== 92407
+LmRlcGFydA== 92408
+IGZOYW1l 92409
+CWJlc3Q= 92410
+OlA= 92411
+IHN3YXRo 92412
+IHZva3M= 92413
+YWxsb24= 92414
+IEh0bWxXZWJwYWNrUGx1Z2lu 92415
+LmxvZ2dlZElu 92416
+YnVja2V0cw== 92417
+IGhvbW9waG9iaWM= 92418
+IHN1YmR1ZWQ= 92419
+IG1lc3NhZ2Vib3g= 92420
+V2hhdHNBcHA= 92421
+IGRpc3NpcA== 92422
+IE1BTlVBTA== 92423
+TElLRUxZ 92424
+dGVzdGRhdGE= 92425
+LU9jdA== 92426
+RXhpdGVk 92427
+IFRhc21hbmlh 92428
+bGFj 92429
+IHRow7RuZw== 92430
+U3Rvcmllcw== 92431
+IGJpb2NoZW1pY2Fs 92432
+b3JyZQ== 92433
+IGVjbGlwcw== 92434
+IEFzc2VtYmx5UHJvZHVjdA== 92435
+cnRsZQ== 92436
+IFdpbGhlbG0= 92437
+cGl6emE= 92438
+X0RI 92439
+Y29uag== 92440
+IHB1ZWJsbw== 92441
+IGxpcXVl 92442
+IGN1cGlk 92443
+IEFjdGl2aXR5Q29tcGF0 92444
+LlNt 92445
+Il19 92446
+bWFpbGJveA== 92447
+Lm9wdFN0cmluZw== 92448
+LW9i 92449
+IE1hdWk= 92450
+YXRhaXJlcw== 92451
+IG1lcnJ5 92452
+Um5k 92453
+IGNhcmFjdGVyw61zdGljYXM= 92454
+VHJv 92455
+KGNu 92456
+Lmxk 92457
+LXBvaW50cw== 92458
+LnNi 92459
+IHZlag== 92460
+IGNhcmVnaXZlcg== 92461
+IG5hdQ== 92462
+RElSRUNUT1JZ 92463
+KGFuZw== 92464
+KC4p 92465
+IGV4cGxhbmF0b3J5 92466
+ZWxzZXk= 92467
+IE92ZXJuaWdodA== 92468
+IGxhaXNzZQ== 92469
+IFJBVEU= 92470
+IEdvdw== 92471
+UmVjb2duaXRpb25FeGNlcHRpb24= 92472
+aWNoZXJ0 92473
+IHJldm9sdXRpb25z 92474
+JGNhdGVnb3J5 92475
+IHVuZGVmZWF0ZWQ= 92476
+L2NvbW11bml0eQ== 92477
+LXBhcnRz 92478
+LWFwcGxpY2F0aW9u 92479
+K0E= 92480
+L3N3ZWV0YWxlcnQ= 92481
+IEtt 92482
+aWxhdGVk 92483
+YXRhdA== 92484
+UEFU 92485
+xI1l 92486
+IFRlYw== 92487
+Lm9uQWN0aXZpdHlSZXN1bHQ= 92488
+XFdlYg== 92489
+IEx1Zw== 92490
+b3ZvbHRh 92491
+IGFsdHJ1 92492
+aWd5 92493
+IGLEmWTEhQ== 92494
+IGFjdGl2YXRpb25z 92495
+IGF1ZGl0aW5n 92496
+RVJHRQ== 92497
+IOiLpQ== 92498
+Q2FybG9z 92499
+IGtJbnN0cnVjdGlvbg== 92500
+bWluZXI= 92501
+IH19Lw== 92502
+QW5kSGFzaENvZGU= 92503
+IEJvdXJib24= 92504
+LnByb2Y= 92505
+IGltcHJpbWly 92506
+IEZlcmRpbmFuZA== 92507
+0LzQtdC90YI= 92508
+L3t9Lw== 92509
+IENsYWly 92510
+IE9uQ29sbGlzaW9u 92511
+c2FsZG8= 92512
+cmFpc2Vk 92513
+IEFCT1ZF 92514
+KCk9Pg== 92515
+IGRldXRzY2hsYW5k 92516
+aGliaXRlZA== 92517
+RXh0cmVtZQ== 92518
+L2hvb2tz 92519
+IGRvdXQ= 92520
+IFZPQw== 92521
+ZXRob3Zlbg== 92522
+UE1D 92523
+IHJlc3RhcnRpbmc= 92524
+IFNDTg== 92525
+IEVP 92526
+IERKcw== 92527
+UGFzc3dvcmRGaWVsZA== 92528
+LkFjY2Vzc2libGU= 92529
+CWJ1cw== 92530
+U1RSVUNUSU9OUw== 92531
+IGxhdGVu 92532
+IFNOQVA= 92533
+X0hFUlNIRVk= 92534
+IG9uc3RhZ2U= 92535
+5bCP5pe2 92536
+IHNhaWxvcg== 92537
+IEN1cnNv 92538
+IGltcHJvdmlzZWQ= 92539
+IGdlbmVyYWxpemU= 92540
+IGJ1ZW5v 92541
+IGNlcmVtb25pYWw= 92542
+IENOUw== 92543
+IHBpZ2Vvbg== 92544
+bXNw 92545
+L0FJRFM= 92546
+bGluZUVkaXQ= 92547
+IEZpbmFuY2luZw== 92548
+IGpUYWJsZQ== 92549
+IGJvdHRvbXM= 92550
+IFRleHRJbnB1dFR5cGU= 92551
+IG1laXNqZQ== 92552
+LXNpZ25lZA== 92553
+IEdyZWVudmlsbGU= 92554
+b3BoaWxpYQ== 92555
+SWNvbk1vZHVsZQ== 92556
+IGNsYW5kZXN0 92557
+ZW1haW4= 92558
+U0NBTg== 92559
+X1RJTUVT 92560
+IGxlY2tlbg== 92561
+KGNhbmNlbA== 92562
+IGVjc3Rhc3k= 92563
+Lk1VTFQ= 92564
+IG1vZXRlbg== 92565
+IGFwcHJvcHJpYXRpb25z 92566
+IFFMRA== 92567
+IEd1aWw= 92568
+IHRyYXBwaW5n 92569
+eERB 92570
+IGvDtmxu 92571
+ZW51bXM= 92572
+4oCcVG8= 92573
+cG9ydG8= 92574
+bmluZ2Fy 92575
+IFRPTw== 92576
+LVNU 92577
+IE1hdGhz 92578
+IGt1cnM= 92579
+IFJFUEw= 92580
+X2NvbnRyaWI= 92581
+IFBoeQ== 92582
+cmFuZw== 92583
+Lm1hdmVu 92584
+LWZvbGxvdw== 92585
+IC0tLS0tLS0tLS0t 92586
+xLHEnw== 92587
+X3dpbm5lcg== 92588
+LkNyaXRlcmlh 92589
+KGRhdGFTb3VyY2U= 92590
+IHNldElucHV0 92591
+IFRJTUVTVEFNUA== 92592
+b3BlcmFuZHM= 92593
+Z2V0V2luZG93 92594
+LmZhY2VWZXJ0ZXhVdnM= 92595
+IEludmVzdGluZw== 92596
+Vnk= 92597
+IHBlcnNlY3V0ZWQ= 92598
+4bq/dQ== 92599
+IFBsdW1iaW5n 92600
+T05HT0RC 92601
+RXZpZGVuY2U= 92602
+IFN0cm9t 92603
+cXVvdGE= 92604
+TGl2ZXJwb29s 92605
+CWF0dGFjaw== 92606
+bWluaW1hbA== 92607
+IG9uS2V5RG93bg== 92608
+IG1vZHVsZUlk 92609
+IFZlcmFuc3Q= 92610
+bW9ydA== 92611
+YWNpc3Rz 92612
+IE1BU1M= 92613
+X1VOREVS 92614
+LmdldFJ1bnRpbWU= 92615
+RU5USUNBVElPTg== 92616
+Uk9LRQ== 92617
+IHNjYWxlWA== 92618
+IHNlcnRh 92619
+IEZyZXF1ZW50bHk= 92620
+X1RSQU5TRk9STQ== 92621
+IHR3aWxpZ2h0 92622
+IE1jS2Vuemll 92623
+bGVkZ2Vk 92624
+IEB7QCI= 92625
+X0FDVElW 92626
+IGhvb2tlcnM= 92627
+PWRlZmF1bHQ= 92628
+IHdhbG51dA== 92629
+IHVzZU5ld1VybFBhcnNlcg== 92630
+IENoZWVy 92631
+IHdyb25nZnVs 92632
+bmlv 92633
+YnRj 92634
+LnN0cmlkZQ== 92635
+IHN1Y2Nlc2Z1bGx5 92636
+IFRyb2xs 92637
+aWZpY2lv 92638
+LmNvbmQ= 92639
+IGhlYXBz 92640
+X1BIT1RP 92641
+PEFkZHJlc3M= 92642
+IFN0aWNreQ== 92643
+IG5pZ2h0dGltZQ== 92644
+IGRhbmRv 92645
+IEJJTEw= 92646
+INC+0YLQstC10YI= 92647
+RGV0ZXJtaW4= 92648
+IGZ6 92649
+KHNpZ25hdHVyZQ== 92650
+IHZpbmRlbg== 92651
+LkNPTk5FQ1Q= 92652
+cnVpc2U= 92653
+IHh1 92654
+cHJldmVudA== 92655
+Rk9Y 92656
+VUlBcHBsaWNhdGlvbkRlbGVnYXRl 92657
+U3BsYXNo 92658
+IGVtYnJvaWRlcmVk 92659
+IEhpbGZl 92660
+LnNoYWRlcg== 92661
+IGRvdWJ0ZWQ= 92662
+UmVzcG9uc2VTdGF0dXM= 92663
+IHVuc3RvcHBhYmxl 92664
+dW5sb2Fk 92665
+KyJd 92666
+ImxhYmVs 92667
+IGZyZWVsYW5jZXI= 92668
+RGlyZWN0ZWQ= 92669
+IHZvcmhhbmQ= 92670
+IFNubw== 92671
+ZXhpc3RlbmNl 92672
+b3JkaWFs 92673
+emFn 92674
+LkFnZQ== 92675
+IHNwYXducw== 92676
+IFBTRw== 92677
+c3RpdHV0aW9ucw== 92678
+IHNpZ2h0aW5n 92679
+LXRhbGs= 92680
+INGB0L7RhdGA0LDQvQ== 92681
+ZW5lcmltYQ== 92682
+IEJlbnRvbg== 92683
+X1N0b3Jl 92684
+VHJhbnNwYXJlbnRDb2xvcg== 92685
+IEV4cGxvc2lvbg== 92686
+X0lTUw== 92687
+Q2hlY2twb2ludA== 92688
+IGRlZmxhdGU= 92689
+0JLRi9Cx 92690
+LXRyYW5zZmVy 92691
+IEJhYmllcw== 92692
+IGltYQ== 92693
+LnVzYWdl 92694
+IG5lZ2F0aXZpdHk= 92695
+IEV4dHJlbWVseQ== 92696
+a2o= 92697
+RG93bmxvYWRlcg== 92698
+CWFjdA== 92699
+W2NoYXI= 92700
+Tm9ybWFscw== 92701
+X3JlZmVyZW5jZXM= 92702
+IGRyYWNvbg== 92703
+4bulYw== 92704
+X1RSTlM= 92705
+Y29tcGFueUlk 92706
+IFZlcmQ= 92707
+YW5pbw== 92708
+IE1hdGNoZXJz 92709
+KHJlbGF0aXZl 92710
+IHJlZWxlY3Rpb24= 92711
+LkhF 92712
+VGF1 92713
+INGB0YLRgNC+0LrQuA== 92714
+IE1ldGFscw== 92715
+IENvY2t0YWls 92716
+IGFwcmVuZGVy 92717
+X3ByZWZlcmVuY2U= 92718
+LlNjaGVtZQ== 92719
+IGdsR2V0VW5pZm9ybUxvY2F0aW9u 92720
+VXNpbmdFbmNvZGluZw== 92721
+0YDQsw== 92722
+ICJdIik7Cg== 92723
+TGVhZGVycw== 92724
+J8OqdHJl 92725
+X0RlbGF5 92726
+UHJvY2Vzc2Vz 92727
+aWN1bHR1cmU= 92728
+XCI6e1wi 92729
+4oCUIg== 92730
+RW1vamk= 92731
+LWdyb3c= 92732
+IENDRA== 92733
+Y29tcG9zZWQ= 92734
+TWFpbnRlbmFuY2U= 92735
+IFJ5emVu 92736
+KGFn 92737
+LnByb2I= 92738
+IFNpbmF0cmE= 92739
+IGhvcnJlbmQ= 92740
+IE1vdW50ZWQ= 92741
+X1BFRVI= 92742
+IGN1aw== 92743
+IHPDuGtlcg== 92744
+IFF1YXI= 92745
+X1JFU09MVVRJT04= 92746
+J2VhdQ== 92747
+IGJvdXJib24= 92748
+IGF0SW5kZXg= 92749
+L3BvbA== 92750
+IOq0gA== 92751
+CXB3 92752
+fSl9Cg== 92753
+LmZvcm1EYXRh 92754
+IHVkZW4= 92755
+IHJvYXJpbmc= 92756
+Tm90aWZpY2F0aW9uQ2VudGVy 92757
+IGNsdXN0ZXJlZA== 92758
+IHBhaXJ3aXNl 92759
+bXVsdGlsaW5l 92760
+R2FtZURhdGE= 92761
+Lkxhcmdl 92762
+KSc6 92763
+INGB0LXRgNCy0LXRgA== 92764
+IFVJTWFuYWdlcg== 92765
+U3Zj 92766
+IFBsYXlzdGF0aW9u 92767
+Lk1vcmU= 92768
+LnF1YWxpdHk= 92769
+IGNvbmZpZ0ZpbGU= 92770
+LWNvbnRhaW5pbmc= 92771
+IEdvYXQ= 92772
+ZW5jaW9u 92773
+IGxpa2VuZXNz 92774
+LXVzaW5n 92775
+IHNlYXNpZGU= 92776
+4bqpdQ== 92777
+YW50aWNpcGF0ZWQ= 92778
+Rm9sZGVycw== 92779
+LUxldmVs 92780
+b3BjaW9u 92781
+KXByZXBhcmVGb3JTZWd1ZQ== 92782
+PigpKQ== 92783
+PWFkZA== 92784
+XGdyaWQ= 92785
+IHln 92786
+X0RSSVZF 92787
+IEdldE5hbWU= 92788
+LkRBTw== 92789
+IGhhbm4= 92790
+CWNhdA== 92791
+IHZpZ24= 92792
+IEhlbGxlcg== 92793
+IENSRUFURUQ= 92794
+YmVyb3M= 92795
+YnV0dA== 92796
+IGJlbmRz 92797
+IExlZXI= 92798
+0KY= 92799
+IFNNUA== 92800
+VmVjdA== 92801
+IG9iamVjdFR5cGU= 92802
+OmFzeW5j 92803
+IGNvbXBldGVuY3k= 92804
+IFF0QXdz 92805
+TG91 92806
+L2NhdA== 92807
+UHJvc3RpdA== 92808
+LXZlcw== 92809
+CXR2 92810
+IEVJ 92811
+QW5kV2FpdA== 92812
+IFRPT0w= 92813
+fSo= 92814
+X1Jlcw== 92815
+IGFsaWdubWVudHM= 92816
+7KGw 92817
+IENsYW1w 92818
+LXBhZA== 92819
+IHdyaXRlRmlsZQ== 92820
+IEFwcHJlYw== 92821
+4oCZYXV0cmVz 92822
+dWRhZGVz 92823
+IGx1Z2FyZXM= 92824
+c3BlbmRlcg== 92825
+W2ltYWdl 92826
+RVhJU1Q= 92827
+IGRlY2VpdmU= 92828
+IGh1bnRz 92829
+X1ZPSUNF 92830
+X0RY 92831
+Q0FD 92832
+ICgoJw== 92833
+aXNrcw== 92834
+LGZpbGVuYW1l 92835
+IGxlYW5z 92836
+SW5wdXREaWFsb2c= 92837
+RGF0YUNvbnRyYWN0 92838
+IHNtb290aGVk 92839
+IHJlY3J1aXRlcnM= 92840
+IHRhbmdsZWQ= 92841
+X1RhYg== 92842
+IEZpbGVBY2Nlc3M= 92843
+WUM= 92844
+IHZY 92845
+PGR5bg== 92846
+TGV4ZXI= 92847
+IOKYhg== 92848
+IGdsR2Vu 92849
+VGVtcG9yYWw= 92850
+IEFURg== 92851
+YW5rbw== 92852
+VXNlckNvZGU= 92853
+IEtvdGxpbg== 92854
+Li4KCgoK 92855
+RU5DRUQ= 92856
+LnVudHJhY2tlZA== 92857
+X21y 92858
+IHdhdmVsZW5ndGhz 92859
+IGRpY2hv 92860
+IGltdQ== 92861
+X2NyZQ== 92862
+W0o= 92863
+X0RG 92864
+IGF0dGFpbm1lbnQ= 92865
+IGxpdGVycw== 92866
+W2tleXM= 92867
+IGxpc3Rhcg== 92868
+SHR0cHM= 92869
+IGJyZXdlcnM= 92870
+IGFjb21wYcOx 92871
+IHRvYXN0ZWQ= 92872
+LmZyaWVuZA== 92873
+IHJlbHU= 92874
+IFBzeWNoaWM= 92875
+TWFuaXA= 92876
+ZG5h 92877
+UHJp 92878
+LWZsYXNo 92879
+KGFydGlzdA== 92880
+IEtvdg== 92881
+cHJlc2VydmU= 92882
+X3BlbWI= 92883
+LnNldFByb2dyZXNz 92884
+IGR1c2s= 92885
+IGNhbm5hYmlub2lkcw== 92886
+IEt1bmQ= 92887
+IENvdW50aWVz 92888
+IO2OmOydtOyngA== 92889
+IHJlbmFtaW5n 92890
+IFJ1c3Nv 92891
+TlNTZXQ= 92892
+KEVYUFI= 92893
+5YW25LuW 92894
+RGlhZ3JhbQ== 92895
+LGxhc3Q= 92896
+KHdpdGhEdXJhdGlvbg== 92897
+IGluZGVidGVk 92898
+IERpY2tlbnM= 92899
+IEFscHM= 92900
+IERlZ3JlZXM= 92901
+aWRhcg== 92902
+LWJsb29k 92903
+K29mZnNldA== 92904
+IEh1ZA== 92905
+b3VuZGVy 92906
+dWxuZXJhYmxl 92907
+IHByaW8= 92908
+YmxpbmQ= 92909
+KHBhY2s= 92910
+IG5pZ2h0bGlmZQ== 92911
+IGlsbHVzdHJhdGluZw== 92912
+IG51dHNoZWxs 92913
+IGJyb2FkY2FzdGVycw== 92914
+IGNvbXBhbnlOYW1l 92915
+aXRvcmU= 92916
+LnJpZ2h0QmFyQnV0dG9uSXRlbQ== 92917
+Ym90ZQ== 92918
+IFBJVA== 92919
+LXNjcm9sbGJhcg== 92920
+IHdpbmR5 92921
+IFFNYWluV2luZG93 92922
+aHVl 92923
+LmVwb2No 92924
+IGNhbWVy 92925
+IENMVUI= 92926
+aWZhcg== 92927
+VW5hdmFpbGFibGU= 92928
+LXF1b3Rl 92929
+IEdyYXo= 92930
+IHZhbHU= 92931
+X01BVEVSSUFM 92932
+IHBlbnk= 92933
+IHRyYXR0 92934
+IGxpY2tlZA== 92935
+CWNhbg== 92936
+IFRhaXdhbmVzZQ== 92937
+UGFnZUluZGV4 92938
+LlRpcG8= 92939
+X1JlZA== 92940
+IHZmcw== 92941
+X3RyYW1wb2xpbmU= 92942
+IE1QUw== 92943
+IFBlYW51dA== 92944
+IExvY2tlZA== 92945
+CUFU 92946
+anNwYg== 92947
+X05PREVT 92948
+J1dl 92949
+IENvbnZlbmllbnQ= 92950
+X3N1Y2Nlc3NmdWw= 92951
+K3o= 92952
+WUxlYWY= 92953
+IHBlZGlncmVl 92954
+eHo= 92955
+IHNhbHZhcg== 92956
+X0Rlc2M= 92957
+IG5lc3Rh 92958
+IGhhcmRjb2RlZA== 92959
+LmdvbGQ= 92960
+LkltYWdlRmllbGQ= 92961
+X0JT 92962
+TEs= 92963
+Q2hvY29sYXRl 92964
+LlN0YXJ0dXA= 92965
+IGFuZWNkb3Rlcw== 92966
+Lk1h 92967
+P10= 92968
+L3RvcGlj 92969
+LlNjcm9sbEJhcnM= 92970
+0YHRgtCy0LA= 92971
+IE1PTQ== 92972
+IHFvcw== 92973
+YXJ5YW5h 92974
+w6RjaHN0 92975
+IE1jR2lsbA== 92976
+IEVEVUM= 92977
+KHBvc3Rz 92978
+IEVudHdpY2tsdW5n 92979
+X3NraWxscw== 92980
+LWd1YXJk 92981
+IHRleHRpbGVz 92982
+fHVuaXF1ZQ== 92983
+IEFyaXRobWV0aWM= 92984
+TG9hZElkZW50aXR5 92985
+KTt9Cgo= 92986
+IGFzc3VyZXM= 92987
+V2lsZGNhcmQ= 92988
+IGRlZmF1bHRlZA== 92989
+IE5vdFN1cHBvcnRlZEV4Y2VwdGlvbg== 92990
+IFRvbWF0bw== 92991
+LlN1bW1hcnk= 92992
+ISIu 92993
+dXRoZXJmb3Jk 92994
+IGxvb3Bob2xl 92995
+IGNtYWtl 92996
+LWRhdA== 92997
+IHJhZ2F6em8= 92998
+IGNhcGl0YWxz 92999
+IEltcG9ydGFuY2U= 93000
+IER1bmdlb25z 93001
+X3pvbmVz 93002
+LnNhdA== 93003
+ICAgICAgCiAgICAgIAo= 93004
+Y2F0ZWdvcmlhcw== 93005
+IGRhdGF0YWJsZQ== 93006
+IG5hamxl 93007
+KGdw 93008
+LXJlbg== 93009
+IHBhbmlja2Vk 93010
+IFNreWw= 93011
+IFFVSUNL 93012
+dmFsdWVPZg== 93013
+U3RhdGlzdGlj 93014
+IGRlbWVhbm9y 93015
+bmRlcm4= 93016
+IEFwcGVhcnM= 93017
+UHJhZ21h 93018
+X3Bhc3Q= 93019
+SGFzaHRhYmxl 93020
+IHRoYW5raW5n 93021
+LmNzcmY= 93022
+IHBhdmU= 93023
+IFZpY3RpbQ== 93024
+IFDDpQ== 93025
+Rmlyc3RuYW1l 93026
+Q0FURUdPUlk= 93027
+aWxlc3RvbmU= 93028
+JyktPl9fKCc= 93029
+IGluY2FwYWM= 93030
+U3RyZWFtV3JpdGVy 93031
+IGNvbW11bmlvbg== 93032
+X3N0ZGVycg== 93033
+6Ieq5rK7 93034
+IGh1bWFuaXRpZXM= 93035
+INC70Y4= 93036
+IFBhcmFz 93037
+bG9mZg== 93038
+SGVhZGVyVGV4dA== 93039
+Z3JlZ2F0ZWQ= 93040
+LlhSVGFibGVDZWxs 93041
+IGVudGl0eUlk 93042
+IE1hc3Rlcnk= 93043
+b2xkdA== 93044
+JykpKTsKCg== 93045
+aHVtaWRpdHk= 93046
+Li4uIik7Cgo= 93047
+RGVsdGFUaW1l 93048
+IG1rdGltZQ== 93049
+UGhvdG9u 93050
+IHBlbnNhcg== 93051
+c2NhbGluZw== 93052
+X3llbGxvdw== 93053
+X211bHRpcGx5 93054
+IFZ1bGNhbg== 93055
+IFBlYXJjZQ== 93056
+X2xj 93057
+LWV4Y2x1c2l2ZQ== 93058
+SXNVbmljb2Rl 93059
+IHBhZHI= 93060
+X1BDSUU= 93061
+IGdsaW1wcw== 93062
+IHJhbXBhZ2U= 93063
+IFBhZ2luYXRvcg== 93064
+IGNvbnZleWluZw== 93065
+bm9yZQ== 93066
+X2RldGFjaA== 93067
+J10hPSc= 93068
+IGJvbmE= 93069
+CUNvbg== 93070
+TmF6 93071
+IHNlZ3VpbnQ= 93072
+IG1pZXN6 93073
+IGVzb3M= 93074
+ICcvJykK 93075
+IGZhaXRoZnVsbHk= 93076
+IGJla29t 93077
+0LDQutGB 93078
+d2hlbG1pbmc= 93079
+LnR3bw== 93080
+IFNDRQ== 93081
+LW5h 93082
+ICgpew== 93083
+IERhbWVu 93084
+X3RndA== 93085
+YWRhbGFmaWw= 93086
+IE1NSQ== 93087
+VGhpbg== 93088
+IGRlcHJlY2lhdGlvbg== 93089
+IGFic2VudGVl 93090
+IHNhbGFyaW8= 93091
+IFNvbWVib2R5 93092
+IFNsb2Fu 93093
+IGVyZm9sZ3JlaWNo 93094
+Ok5TTG9jYWxpemVkU3RyaW5n 93095
+IGdlaMO2cnQ= 93096
+IGVtbw== 93097
+IExhZ3VuYQ== 93098
+w6FzYQ== 93099
+aXN0cmF0ZXM= 93100
+UmFpc2U= 93101
+IEFzdHJvcGg= 93102
+ICdcXCc= 93103
+X3BlZA== 93104
+IFRIUk9VR0g= 93105
+IE5pZXR6c2NoZQ== 93106
+ZW5lcmF0aW5n 93107
+b3BsYXllcg== 93108
+IHJvZGVudHM= 93109
+w7xobA== 93110
+R2FtZU1hbmFnZXI= 93111
+IEhlYWRlckNvbXBvbmVudA== 93112
+IG1pbGFu 93113
+cXVlZW4= 93114
+IFBPTEw= 93115
+IEx5bWU= 93116
+IEJyaWdncw== 93117
+ZWNlcg== 93118
+d2Fnb24= 93119
+LkRFU0M= 93120
+IGdsQmVnaW4= 93121
+U3RhdGVtZW50cw== 93122
+ZXRyaQ== 93123
+IG1vY2tlcg== 93124
+IEJsdWVwcmludFJlYWRPbmx5 93125
+L2NvbnRlbnRhc3Npc3Q= 93126
+ZW1hYWt0 93127
+L2xvYWRlcg== 93128
+X2xvd2VyY2FzZQ== 93129
+Y2l2aWw= 93130
+X3ZhbG9y 93131
+X0dsb2JhbA== 93132
+IGFkcg== 93133
+aXRpemVu 93134
+LlNpZGU= 93135
+IEVtYmxlbQ== 93136
+IHRoaXJkcw== 93137
+X1NIQVBF 93138
+UmVncmVzc29y 93139
+UFlUSE9O 93140
+IHBzeWNob3RpYw== 93141
+IGN2cw== 93142
+IEFwcGxpY2F0aW9uVXNlcg== 93143
+IGFsdW5vcw== 93144
+VG9nZ2xlQnV0dG9u 93145
+IG5nYQ== 93146
+IG3Do2U= 93147
+YWR2ZXJ0aXNlbWVudA== 93148
+5YiG5Lqr 93149
+Lm92 93150
+IEFPTA== 93151
+UkVX 93152
+INin2LPYqg== 93153
+IEdpbm55 93154
+IC8vLy8vLy8vLy8= 93155
+U29uZ3M= 93156
+YWNpYw== 93157
+Q01Q 93158
+IHJlY29nbml6ZXI= 93159
+IHDDq3I= 93160
+RElD 93161
+O1wiPg== 93162
+IGNsb3Q= 93163
+OkV2ZW50 93164
+LlRP 93165
+IEN1cnNvcnM= 93166
+XFN0b3JhZ2U= 93167
+IElvbmljUGFnZQ== 93168
+X2pldA== 93169
+KEJpdENvbnZlcnRlcg== 93170
+IGNoaWxkaXNo 93171
+VHJhZGVy 93172
+PEhUTUxJbnB1dEVsZW1lbnQ= 93173
+X0ZSRVFVRU5DWQ== 93174
+PSI7Cg== 93175
+eXN0YWNr 93176
+SnVy 93177
+IOmU 93178
+IHRjYg== 93179
+IHJlY2liaXI= 93180
+LnN6 93181
+IO2BtOuemOyKpA== 93182
+UEVSU09O 93183
+bm92YQ== 93184
+IGNvZXI= 93185
+IE1haG1vdWQ= 93186
+IFdvcmtwbGFjZQ== 93187
+IiIiKSwK 93188
+LlBhZ2VTaXpl 93189
+Z2V0Um9vdA== 93190
+KGJhc2VVcmw= 93191
+W1U= 93192
+IE1DUw== 93193
+IENsYXJrc29u 93194
+LnZvbA== 93195
+ICIifQo= 93196
+IHBldXg= 93197
+IFByb2R1Y3RTZXJ2aWNl 93198
+IG1vbmRheQ== 93199
+IFRlc3REYXRh 93200
+IE1hdWw= 93201
+IHN0cm5jbXA= 93202
+IHNob3BwZXI= 93203
+dGhlb3J5 93204
+IGV0aXF1ZXR0ZQ== 93205
+bGljZW5jZQ== 93206
+c2NhbA== 93207
+LWNsdXN0ZXI= 93208
+IGhpc3TDs3JpYQ== 93209
+IFN1YnRyYWN0 93210
+IGZpYmVyZ2xhc3M= 93211
+X2xhc3RuYW1l 93212
+IFJld3JpdGU= 93213
+L3RvZG8= 93214
+IG92ZXJmbG93aW5n 93215
+IEdhdXNz 93216
+b2theQ== 93217
+IGNsdW1zeQ== 93218
+KHh5 93219
+IGV4ZW1w 93220
+YW5hbHl6ZQ== 93221
+LXRpY2tldA== 93222
+bmluZQ== 93223
+IERlYWRwb29s 93224
+IGNvbHVt 93225
+IEpL 93226
+IFtdLA0K 93227
+IEFzcGVu 93228
+IG1hbGlnbmFudA== 93229
+aMO1ZXM= 93230
+U2NhbGE= 93231
+aW5uZQ== 93232
+IENPTlNUQU5UUw== 93233
+X1ByaWNl 93234
+IyUl 93235
+IGFyc2No 93236
+IE5TQXR0cmlidXRlZFN0cmluZw== 93237
+IEZpbGVUeXBl 93238
+YWxsb2NhdGlvbg== 93239
+X3Npbmd1bGFy 93240
+KFBvaW50ZXI= 93241
+YW5uaWVz 93242
+U3RvcmVk 93243
+ICc7Cgo= 93244
+4oCZZXg= 93245
+ZHJz 93246
+QnJpZ2h0bmVzcw== 93247
+L09S 93248
+VGV4dGJveA== 93249
+IGtuYWNr 93250
+IGplbmlz 93251
+IG9jYXM= 93252
+ZGF0YXA= 93253
+IGdhbWVUaW1l 93254
+IOCw 93255
+bmR4 93256
+IEVWVA== 93257
+QnlUZXh0 93258
+IGF0dHJpYnV0ZU5hbWU= 93259
+IGp1Z2Fy 93260
+X3NlcXM= 93261
+IEZFQVRVUkVT 93262
+OmRhdGU= 93263
+ZmJl 93264
+cmlwcGVy 93265
+56iN 93266
+LkV4cHI= 93267
+VXJiYW4= 93268
+aWRvdA== 93269
+IG9ibGl2aW91cw== 93270
+KERiQ29udGV4dA== 93271
+Q2Fyb2w= 93272
+KCcsJywk 93273
+IEJyaWxsaWFudA== 93274
+a2Fk 93275
+Y2VudHJhdGlvbg== 93276
+IGt1aw== 93277
+IE1BTkFHRU1FTlQ= 93278
+X1dFQVBPTg== 93279
+IGppaGFkaXN0cw== 93280
+IGVudHJlZw== 93281
+IGRvxJ8= 93282
+IGFwcGVuZGluZw== 93283
+IFpp 93284
+X2N0eHQ= 93285
+IHF1YWRyYW50 93286
+ZWxlbWVudFR5cGU= 93287
+PWltZw== 93288
+YnJ1YXI= 93289
+SUNBU1Q= 93290
+IGludGVsbGVjdHVhbGx5 93291
+LkFubm90YXRpb24= 93292
+IGNhbXBhaWduZXJz 93293
+LkRhdGFHcmlkVmlld0F1dG9TaXpl 93294
+IMWfZWs= 93295
+IC9eKA== 93296
+LkRhdGFUYWJsZQ== 93297
+IHdlYmxvZw== 93298
+KGxpYnJhcnk= 93299
+IEZ1cw== 93300
+IE9TVA== 93301
+X1Bhc3N3b3Jk 93302
+IEJ1Y2tsZXk= 93303
+aG9mZg== 93304
+QWxpZ25lZA== 93305
+X1JlYWw= 93306
+RU5USUM= 93307
+L2dyYXBocWw= 93308
+IFdlZWQ= 93309
+IExTQg== 93310
+b2NjYXNpb24= 93311
+YWRkYWZp 93312
+TGV0cw== 93313
+KCJg 93314
+IHdpZGVu 93315
+KHZpc2l0b3I= 93316
+ICJcCg== 93317
+QU5URQ== 93318
+LWNhbXB1cw== 93319
+LUJhcg== 93320
+Y2FtZWw= 93321
+Rm10 93322
+OmRlc2NyaXB0aW9u 93323
+LmFyZQ== 93324
+IEFuYXN0 93325
+IExvbmdlcg== 93326
+c2VyaW91cw== 93327
+IGRhaGVy 93328
+aXp6ZXI= 93329
+TXVsdGlwbGljaXR5 93330
+IEhvbGxhbmRl 93331
+IEFubm90YXRpb25z 93332
+KCk/ 93333
+IHByb3Rlc3Rlcg== 93334
+IFVyZHU= 93335
+IHNwZWNpYWx0aWVz 93336
+X2x5 93337
+Q2Fk 93338
+YW5udA== 93339
+anNw 93340
+IGpvZQ== 93341
+KXI= 93342
+IFBlcnNpc3Q= 93343
+IG9ibA== 93344
+IGRlYWRsb2Nr 93345
+IHNlcmk= 93346
+UmVsYXRpdmVUbw== 93347
+IFl1cw== 93348
+KFByaW50 93349
+YWJpbGlh 93350
+IHVucHJvdGVjdGVk 93351
+IEFTSUM= 93352
+Lk5vbWU= 93353
+IFdlYkNsaWVudA== 93354
+IElUVg== 93355
+w7xybmJlcmc= 93356
+aXRvcmk= 93357
+U2lnbmluZw== 93358
+IFJlYWRvbmx5 93359
+IGVsZHJl 93360
+IENoZWNrZWQ= 93361
+YWxudW0= 93362
+U291cmNlVHlwZQ== 93363
+bGV4aWNhbA== 93364
+IGlsbHVzdHJhdG9y 93365
+IERpcmVjdG9yYXRl 93366
+IFRyb20= 93367
+bXBw 93368
+bG9nZw== 93369
+Lmluc3RydW1lbnQ= 93370
+IHdvb2RlZA== 93371
+IFVzZXJUeXBl 93372
+IFJlbmNvbnRyZXM= 93373
+bW9kZWxOYW1l 93374
+QlRUYWdDb21wb3VuZA== 93375
+PlRv 93376
+IGZyZWV6ZXM= 93377
+IENvbnRl 93378
+IENyZWRlbnRpYWw= 93379
+Y2FsYQ== 93380
+L3dvcmtzcGFjZQ== 93381
+IGxpYmlkbw== 93382
+Y2hsdXNz 93383
+b2xsZXlFcnJvcg== 93384
+IGFjY2lvbmVz 93385
+IEppbnBpbmc= 93386
+YXTDqWc= 93387
+SW50ZXJzdGl0aWFs 93388
+KSkpKSk7DQo= 93389
+eWJyaWQ= 93390
+IFJvbGxlZA== 93391
+TW9kZWxDcmVhdGluZw== 93392
+IFJlZmxleA== 93393
+IEx1Y2lmZXI= 93394
+IGVoZXI= 93395
+IGNhcm5pdmFs 93396
+ISI7DQo= 93397
+X0xPT0tVUA== 93398
+IHN1Y2PDqHM= 93399
+IHJlb3BlbmluZw== 93400
+IGNyZWFkbw== 93401
+IFNteQ== 93402
+IEVudHM= 93403
+LlNpbmNl 93404
+IEZpc2hlcmllcw== 93405
+L2Nvbm5lY3Rpb24= 93406
+IENTQQ== 93407
+INC/0YDQvtCz0YDQsNC80Lw= 93408
+bHNydWhl 93409
+CWFjdG9y 93410
+IFN0cmF1c3M= 93411
+SnNvblZhbHVl 93412
+CWV2YWw= 93413
+bG9ja2Vy 93414
+IFhJVg== 93415
+X2h5cGVy 93416
+IFBvbGx5 93417
+4oCmdGhl 93418
+IEdVUkw= 93419
+0LXRgdGB 93420
+IGRpdmVz 93421
+dWdlb3Q= 93422
+aW5lbWE= 93423
+YmVyc29tZQ== 93424
+Q29tcHJh 93425
+LWN1bHR1cmFs 93426
+IGdyYW5kcw== 93427
+U2Fj 93428
+IEJhcm5leQ== 93429
+X1FVRVNUSU9O 93430
+IG1hbWFu 93431
+IGhhc3RpbHk= 93432
+IGNsdWJob3VzZQ== 93433
+IGdydW5k 93434
+X1dBTEw= 93435
+IHB1cmlmaWNhdGlvbg== 93436
+hOS7tg== 93437
+0LLQsA== 93438
+dmVzdG1lbnQ= 93439
+LkRpc3BsYXlTdHlsZQ== 93440
+X2NvcmVz 93441
+JVM= 93442
+IG9zw7Ni 93443
+IGRpc2I= 93444
+IEZyYW5raWU= 93445
+IGluZGlzY3JpbQ== 93446
+X0JlZ2lu 93447
+KGVy 93448
+O28= 93449
+44Oz44Kw 93450
+bm9kZU5hbWU= 93451
+IHJlZnVuZGVk 93452
+IGRpc21hbA== 93453
+IEh1ZmZQb3N0 93454
+IHVuZGVjaWRlZA== 93455
+d3JpdGVsbg== 93456
+a8Ozdw== 93457
+IEJvc2U= 93458
+CWxpYg== 93459
+b3BsYW4= 93460
+aW50ZXJwcmV0ZWQ= 93461
+IE1PTkVZ 93462
+dXZv 93463
+IG50b2hz 93464
+aXNldW0= 93465
+Pmo= 93466
+IHVuZml0 93467
+IGh1Z2dlZA== 93468
+IEplc3Q= 93469
+bXBz 93470
+IGJyb20= 93471
+J28= 93472
+IGZvdg== 93473
+IFNocmluZQ== 93474
+IEVJVEhFUg== 93475
+eWNhc3RsZQ== 93476
+IHNhdHVy 93477
+cmVxdWVzdERhdGE= 93478
+W2Rpcg== 93479
+T1VDSA== 93480
+X0Rv 93481
+IHlvbA== 93482
+IGluaXRpYWxWYWx1ZXM= 93483
+W3ZlcnRleA== 93484
+c2VydmljZU5hbWU= 93485
+LnNhbGFyeQ== 93486
+IEF1dGhlbnRpY2F0ZQ== 93487
+6L6+ 93488
+X1ZMQU4= 93489
+KFtdKTsKCg== 93490
+IFNlcnVt 93491
+UGF0aFBhcmFt 93492
+Zm9ybXVsYXJpbw== 93493
+IHN1bW1hcml6ZXM= 93494
+T0NS 93495
+b3JhbQ== 93496
+TERBUA== 93497
+Ymlj 93498
+cGlja2Vk 93499
+LXRoYXQ= 93500
+IGNkcw== 93501
+CWFuaW0= 93502
+IGludHJpYw== 93503
+IFdvcnQ= 93504
+IFZMQw== 93505
+IFNoaWl0ZQ== 93506
+U3R1ZGllcw== 93507
+LmRpc3BhdGNoZXI= 93508
+KGVuYWJsZQ== 93509
+Lm1peGlu 93510
+IFNleW1vdXI= 93511
+IGJpb21lZGljYWw= 93512
+IFNwb29u 93513
+IE5vcnNl 93514
+IGludGVudHM= 93515
+IMOpcXVpcA== 93516
+IERyZXNzZXM= 93517
+TFBBUkFN 93518
+LnNldFJlc3VsdA== 93519
+LmRlbGV0ZUJ5SWQ= 93520
+IG5ld2ZvdW5k 93521
+IE9TRA== 93522
+b3VzeQ== 93523
+IGVzdGFkb3M= 93524
+W0J5dGU= 93525
+Q2h1Y2s= 93526
+Lm9uVmlld0NyZWF0ZWQ= 93527
+IENvbnRyaWJ1dGlvbg== 93528
+X0VuYw== 93529
+SU5FVA== 93530
+IGZsYXZvcmZ1bA== 93531
+IOOCog== 93532
+dmlzYQ== 93533
+IEhlcmN1bGVz 93534
+LmdldEFwcA== 93535
+IFlvaw== 93536
+Lk1haW5BY3Rpdml0eQ== 93537
+KS5b 93538
+IGxhdXQ= 93539
+SW52aXRl 93540
+IENodXJjaGVz 93541
+LCcj 93542
+2YrYsQ== 93543
+KFNT 93544
+IHZlbmRh 93545
+YXNqb24= 93546
+LklOVEVS 93547
+aXBoZXJ5 93548
+KFN5bnRheA== 93549
+b25kcm91cw== 93550
+CWNlbnRlcg== 93551
+QnJhY2tldEFjY2Vzcw== 93552
+IENhcGNvbQ== 93553
+LmdldEZvbnQ= 93554
+IFZhdWx0cw== 93555
+IGRpc2XDsWFkb3I= 93556
+Om8= 93557
+KHNoZWxs 93558
+IGVDb21tZXJjZQ== 93559
+IGFsdHJl 93560
+X2F0dGFjaGVk 93561
+IGlzcg== 93562
+IG9idGFpbnM= 93563
+LkNvbnRleHRDb21wYXQ= 93564
+IGF0dGVuZGVl 93565
+IFR3aWNl 93566
+IE1vb2Q= 93567
+6YKu566x 93568
+bm9kb2M= 93569
+IFBJWEk= 93570
+c29mYXI= 93571
+IEJsb29keQ== 93572
+LkNvbXBsZXRl 93573
+IEJFUg== 93574
+IGdldENhdGVnb3J5 93575
+IGRpc3F1YWxpZmllZA== 93576
+X1RydWU= 93577
+J2Vy 93578
+LXRvbw== 93579
+IGh5cGVybGluaw== 93580
+X21heGltdW0= 93581
+TmVhbA== 93582
+IHBJbmZv 93583
+LmdldEVsZW1lbnRzQnlOYW1l 93584
+c2NoZWR1bGVk 93585
+cGF5ZXI= 93586
+CXZlcmlmeQ== 93587
+LWVudGl0eQ== 93588
+bWV0YXRhYmxl 93589
+YmlsZHVuZw== 93590
+IGRlbHRhWA== 93591
+ZW1wbGFjZQ== 93592
+IHJldmVydGVk 93593
+cmVwaWQ= 93594
+bGVhcm5lcg== 93595
+fSkpCgo= 93596
+dWNvc2U= 93597
+IHJpY28= 93598
+IGJhbmdlZA== 93599
+IEFmcm8= 93600
+KGluZXJ0aWE= 93601
+YW5zYQ== 93602
+IMOkdmVu 93603
+S2FyZW4= 93604
+IHN1cGVyc3Q= 93605
+IGZydWl0aW9u 93606
+b3RjaA== 93607
+IFBheXM= 93608
+UmVzaWRlbnRz 93609
+IHByaXNt 93610
+Jik7Cgo= 93611
+Lmptcw== 93612
+IFNsdWc= 93613
+PScnKQ== 93614
+IGd1dGVu 93615
+IFNwaWVsYmVyZw== 93616
+IFRGb3Jt 93617
+KGJlZm9yZQ== 93618
+IEZpbml0ZQ== 93619
+5paw5aKe 93620
+IG1laWxsZXVyZQ== 93621
+0L/QuNGB0LDQvdC40LU= 93622
+X0Vycg== 93623
+LWZ0 93624
+bmFubw== 93625
+LkFkZHI= 93626
+IC8vDQoNCg== 93627
+IEpvbmFo 93628
+IERpc2Nv 93629
+IGx1bmNoZXM= 93630
+IERGQQ== 93631
+ZXhwbGljaXQ= 93632
+XSc7Cg== 93633
+IHJlZmluZXJ5 93634
+IFN0cmluZ1R5cGU= 93635
+dW5zcXVlZXpl 93636
+IExpa2VseQ== 93637
+V3JpdGVz 93638
+LmJwbQ== 93639
+IHBJdGVt 93640
+b3Vuc2Vs 93641
+U3RhbmRpbmc= 93642
+IGNob2tlZA== 93643
+IGFuc2No 93644
+dXBpbA== 93645
+IERlYnVnZ2Vy 93646
+4qCA4qCA 93647
+PEdyb3Vw 93648
+IFNjYWxpYQ== 93649
+IHN1YnN0aXR1dGlvbnM= 93650
+IGNsaW1iZXJz 93651
+ICopIg== 93652
+IG5hbm9wYXJ0aWNsZXM= 93653
+IEFQUFJP 93654
+IHB1cmNoYXNlcnM= 93655
+IFFUZXN0 93656
+IEF3YWtlbmluZw== 93657
+CVNlcmlhbA== 93658
+LnJlcGFpbnQ= 93659
+IHNhdm9yeQ== 93660
+IHBvcm91cw== 93661
+IGFWYXI= 93662
+IFN1YXJleg== 93663
+LUVhc3Q= 93664
+Qm94ZXM= 93665
+IFdlaW5lcg== 93666
+IENSQQ== 93667
+IOqwkuydhA== 93668
+IHhsaW0= 93669
+Ij8KCg== 93670
+IHdhc2hpbmd0b24= 93671
+7Jq0 93672
+IHRvdGFsZW1lbnQ= 93673
+X210aW1l 93674
+LnNldFNjZW5l 93675
+IGxsYW1h 93676
+IGNibw== 93677
+ZWZk 93678
+IHVuZGVycmF0ZWQ= 93679
+cmFpc2luZw== 93680
+IE5BVElPTkFM 93681
+ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKi8KCg== 93682
+b3B0aWM= 93683
+aWRlYXM= 93684
+IOaPkA== 93685
+IGxhaw== 93686
+ISEs 93687
+IGtvbW0= 93688
+cGFyYWd1cw== 93689
+U2l0ZXM= 93690
+IHN0cmVzc2luZw== 93691
+IE1hdEJ1dHRvbk1vZHVsZQ== 93692
+IENvbnZlcnRlZA== 93693
+YW5hbWU= 93694
+X1JFQURPTkxZ 93695
+XT0+ 93696
+IGJvcmRlbA== 93697
+IGJpYmxpb2dyYXBoeQ== 93698
+IGdyaWRDb2x1bW4= 93699
+IGpvdXJuYWxpc3RpYw== 93700
+7J6E 93701
+IHJhc3BiZXJyeQ== 93702
+c3RpY2U= 93703
+IGFicmFzaXZl 93704
+IERCSGVscGVy 93705
+IGludGY= 93706
+IFJUQlU= 93707
+fSciLA== 93708
+IEhhbw== 93709
+c3dhbmE= 93710
+IGphbnZpZXI= 93711
+IGluc3RpdHV0ZXM= 93712
+IFNlYmFzdA== 93713
+X0NPTFM= 93714
+IGZpZ3VyYQ== 93715
+IFp1c3Q= 93716
+Zm95 93717
+PigpKTsKCg== 93718
+IExpZWJl 93719
+QWdlbmN5 93720
+IOyLnOyekQ== 93721
+IFRodW1ibmFpbHM= 93722
+dGV4dFRoZW1l 93723
+IGVjaG9pbmc= 93724
+ZW1wZXJhdHVyZQ== 93725
+IGZpcmVwb3dlcg== 93726
+ZWRi 93727
+OicpOwo= 93728
+w6lnb3I= 93729
+L2ZlZWQ= 93730
+IGh1cmw= 93731
+LWF2YWlsYWJsZQ== 93732
+IFJlbmRlcnM= 93733
+IGZkcw== 93734
+IEpTR2xvYmFs 93735
+IENpdGl6ZW5zaGlw 93736
+a2llZ28= 93737
+U3RhbmRhcmRJdGVt 93738
+LnBsYWNlcw== 93739
+IHNjYWxhYmlsaXR5 93740
+IFRyYWlscw== 93741
+Zm9sbG93ZXI= 93742
+IHNlcnZpw6dvcw== 93743
+ID8+Ii8+Cg== 93744
+W21ldGhvZA== 93745
+KGli 93746
+IHJpZGljdWxl 93747
+IGFkYXB0YWJsZQ== 93748
+ZmlsdHJv 93749
+IGtldG9nZW5pYw== 93750
+LkltYWdlVHJhbnNwYXJlbnRDb2xvcg== 93751
+IENGTw== 93752
+IFBFRA== 93753
+ICIiKTs= 93754
+b2dsb2Jpbg== 93755
+W3NpemVvZg== 93756
+QnJhbmRvbg== 93757
+LlRvU2hvcnQ= 93758
+IG5pxbw= 93759
+IFRFUk1JTg== 93760
+LmdldFN0YXR1c0NvZGU= 93761
+IGRlYnRvcg== 93762
+IENPTlNUUkFJTlQ= 93763
+CXNpZGU= 93764
+IERvbWlubw== 93765
+0YLQvtC8 93766
+IGdsYWNpZXI= 93767
+IGdyb3U= 93768
+enA= 93769
+IENhcmxh 93770
+LUZlYg== 93771
+UGVs 93772
+LnJlYWRWYWx1ZQ== 93773
+Y2xpbWF0ZQ== 93774
+IHRpbGVTaXpl 93775
+LnRyaXA= 93776
+RU5URQ== 93777
+IGNodWJieQ== 93778
+IGltcG9zaXRpb24= 93779
+TE9XRVI= 93780
+LmJ5SWQ= 93781
+Lkxvb2tBbmRGZWVs 93782
+YXJpaA== 93783
+LmZpbmRCeUlkQW5kVXBkYXRl 93784
+IFN0b3JlZA== 93785
+IGJvdXJnZW9pc2ll 93786
+SFRUUFJlcXVlc3RPcGVyYXRpb24= 93787
+IHN1Y2tlcg== 93788
+LmRlcXVldWU= 93789
+bGlja2Vu 93790
+IHN1YnJhbmdl 93791
+X01FRElVTQ== 93792
+SXNsYW0= 93793
+IFNwYXJrcw== 93794
+77yaJQ== 93795
+aW1wb3J0ZQ== 93796
+IGAt 93797
+IGpveXM= 93798
+Z3JvdXBpZA== 93799
+Rmx5aW5n 93800
+CWJz 93801
+Z3Jvc3M= 93802
+IEZpZXN0YQ== 93803
+IGNzdA== 93804
+IGFmaWNpb24= 93805
+b3Bob24= 93806
+X0NJ 93807
+am4= 93808
+QmVhdXR5 93809
+IHNjZQ== 93810
+IGNyYWNrZXJz 93811
+YXBr 93812
+IGdvcmQ= 93813
+IHByZXRleHQ= 93814
+IFtc 93815
+IENhbmRpZA== 93816
+R29hbHM= 93817
+QWN0aW9uVHlwZXM= 93818
+LG51bWJlcg== 93819
+IHBvcHVsYWNl 93820
+IGVudHJlbg== 93821
+IEF1dG9m 93822
+6Zmi 93823
+QmFzZUNvbnRleHQ= 93824
+QmFsYW5jZXI= 93825
+KEJvcmRlcg== 93826
+IG1pbmNlZA== 93827
+cmVjYWxs 93828
+Y2Jh 93829
+IGFwcHJvdmVz 93830
+IEtsb3Bw 93831
+ZXJtaW50 93832
+X2Zyb250ZW5k 93833
+ZXNjbw== 93834
+IG5pbmV0ZWVu 93835
+RHJpdmluZw== 93836
+IFhWSQ== 93837
+IFRhY3RpY3M= 93838
+IHByb2dyYW1hcw== 93839
+aWVzZW4= 93840
+TW92 93841
+ZGlldA== 93842
+YXV0w6k= 93843
+KCIuIik= 93844
+IGdvdmVybm8= 93845
+X0FuZA== 93846
+L21pdA== 93847
+IGNhZmV0ZXJpYQ== 93848
+LXRyYWNraW5n 93849
+IGNvbW11dGluZw== 93850
+LnVua25vd24= 93851
+X3R5cGVvZg== 93852
+IFNTQQ== 93853
+UFJPVE8= 93854
+Lk1lcmdl 93855
+IGZvckNlbGxSZXVzZUlkZW50aWZpZXI= 93856
+IFNhdGlzZmFjdGlvbg== 93857
+ICMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw== 93858
+SU1QTElFRA== 93859
+IFJlc3RyaWN0ZWQ= 93860
+IE1hZ251bQ== 93861
+0L3QvtC8 93862
+S2Fuc2Fz 93863
+YXlsaWdodA== 93864
+IFRvd2FyZHM= 93865
+IFRvbWU= 93866
+IFRlbmRlcg== 93867
+X2RlcHQ= 93868
+LmNydA== 93869
+dHJlY2h0 93870
+U1RPTkU= 93871
+IGVtcHRpZWQ= 93872
+ICcpOwoK 93873
+4LiB4Liy4Lij 93874
+0Y/RgtGM 93875
+bGVjaw== 93876
+IFt+LA== 93877
+LmV4cGlyZXM= 93878
+IFRpZw== 93879
+IElyb25pY2FsbHk= 93880
+CUxM 93881
+Lk5vdE5pbA== 93882
+IOWKoA== 93883
+IEdvdmVy 93884
+IFBlcnNwZWN0aXZlcw== 93885
+IERWUg== 93886
+IGxva2FsZQ== 93887
+IHJlc2VuZA== 93888
+IGRvdWJseQ== 93889
+IGNvbXVuaWRhZA== 93890
+IEFzc2VtYmx5Q29tcGFueQ== 93891
+KHR1cm4= 93892
+IHN1Ymxpc3Q= 93893
+IGVuZG9yc2VtZW50cw== 93894
+X1JFR0lTVFJZ 93895
+ISIpDQo= 93896
+KTs7Cg== 93897
+IGdhbnpl 93898
+IEhhcm5lc3M= 93899
+X21hdGNoZWQ= 93900
+5L6h 93901
+4oCiCgo= 93902
+Q2hlZg== 93903
+CUluaXRpYWxpemU= 93904
+KTsiPgo= 93905
+IEZhcmFnZQ== 93906
+cmlzaA== 93907
+YWx0ZXQ= 93908
+RGVhbGVy 93909
+LkxvZ1dhcm5pbmc= 93910
+KGFmdGVy 93911
+IEdhcnRlbg== 93912
+IGV4cGxvZGVz 93913
+LkNMQVNT 93914
+IHVzZVJvdXRlcg== 93915
+LUxh 93916
+IHNhZGRlbmVk 93917
+YXJvdg== 93918
+VG9VcGRhdGU= 93919
+IOae 93920
+cGlp 93921
+JwoKCgo= 93922
+IFRSQU5TQUNUSU9O 93923
+b25nYQ== 93924
+bG9nYW4= 93925
+Q3Jvdw== 93926
+IGJyaXRpc2g= 93927
+IENvbnRlbnRWaWV3 93928
+X0JC 93929
+b2x2ZW5jeQ== 93930
+bG9hZE1vZGVs 93931
+VE9PTFM= 93932
+aGV0ZW4= 93933
+X25o 93934
+QUJM 93935
+LXZlcnM= 93936
+QXJlbmE= 93937
+LnNpbmdsZXRvbkxpc3Q= 93938
+KHBhdA== 93939
+CW5hbWVz 93940
+KHNx 93941
+IHZhbG9yZQ== 93942
+JHJlcQ== 93943
+IGFudGhyb3BvbG9neQ== 93944
+VGhpbmtpbmc= 93945
+IG1pc2NoaWVm 93946
+IGFyY2hpdmFs 93947
+4KS5 93948
+LlNldFRvb2xUaXA= 93949
+cHJhcg== 93950
+YW5qYQ== 93951
+IGZpcnN0bHk= 93952
+CWxpZ2h0 93953
+LS0s 93954
+IFNwZWFycw== 93955
+IG9nbA== 93956
+c3RlZW4= 93957
+aW1wbGVtZW50cw== 93958
+cmlzdHM= 93959
+K0U= 93960
+IEJhbnM= 93961
+IGZhc3RiYWxs 93962
+IEhlcm1lcw== 93963
+dmVsZWQ= 93964
+dHdlbnR5 93965
+IG5lY2VzaXRh 93966
+IE1vcm9jY2Fu 93967
+aXNMb2dnZWRJbg== 93968
+Q0xPQ0tT 93969
+LkFic3RyYWN0aW9ucw== 93970
+LlBhY2tldA== 93971
+IG1lbmFjaW5n 93972
+LXZlc20= 93973
+IExpdmluZ3N0b24= 93974
+IG9jaQ== 93975
+IGV4dHJhZGl0aW9u 93976
+ICQoJA== 93977
+IExvY2tlcg== 93978
+IFJlYmVsbGlvbg== 93979
+IG1peGlucw== 93980
+Y3RhbA== 93981
+L3JmYw== 93982
+IFNHRA== 93983
+LGlkeA== 93984
+IGJsZWlidA== 93985
+KFwk 93986
+IHBldGVy 93987
+IGJhcnJlbg== 93988
+IHBob3NwaG9yeQ== 93989
+IGdvZ2dsZXM= 93990
+LmhvbQ== 93991
+QGQ= 93992
+PSct 93993
+LmlzVXNlcg== 93994
+YWthc2g= 93995
+X2h1Yg== 93996
+aXBlbGluZXM= 93997
+IEB9 93998
+LnN1cm5hbWU= 93999
+SW50ZXJvcA== 94000
+IGluRmlsZQ== 94001
+IGVzcGVjaWFsbWVudGU= 94002
+IGF1dG9ub20= 94003
+IFphbWJpYQ== 94004
+X0NPVU5UUlk= 94005
+PENvdXJzZQ== 94006
+aWRlb2dyYXBoaWM= 94007
+IENhbWVyb29u 94008
+ZmluZEJ5SWQ= 94009
+KSIu 94010
+IERlcGVuZHM= 94011
+cml0b3M= 94012
+Lk91cg== 94013
+IHN1YnNpZGl6ZWQ= 94014
+JywnIis= 94015
+IGdsZWFu 94016
+IEFzc2VtYmx5Q29weXJpZ2h0 94017
+cGljYWJsZQ== 94018
+IHVud2l0dGluZw== 94019
+IG9tZGF0 94020
+IEVhc2U= 94021
+IGVtYm9kaWVz 94022
+KHBEWA== 94023
+IFZvdGVy 94024
+QXNzaWduZWQ= 94025
+cmV2ZWFs 94026
+IGZlbmQ= 94027
+KHBhcnNlRmxvYXQ= 94028
+IGRwcw== 94029
+dHBsaWI= 94030
+YXNzZXJ0Q291bnQ= 94031
+eG1heA== 94032
+VW51c2Vk 94033
+KGZi 94034
+IHN1Ym1pdHM= 94035
+IFJlcGxpY2E= 94036
+KGR5 94037
+IGJhbmRl 94038
+LnNlbWFudGlj 94039
+IHNlYXJjaFN0cmluZw== 94040
+IFNhbmZvcmQ= 94041
+CWZ1bGw= 94042
+cHJt 94043
+X3V0aWxpdGllcw== 94044
+VU5VU0VE 94045
+IHNjYW5uZXJz 94046
+IGJmZA== 94047
+Lk9yZ2FuaXphdGlvbg== 94048
+LWN1cg== 94049
+UmFpbA== 94050
+IHhueHg= 94051
+JSk7Cg== 94052
+IG92ZXJwb3N0aW5n 94053
+VmlldA== 94054
+IHRhcGVyZWQ= 94055
+IGNhbWVv 94056
+IFZpZXdpbmc= 94057
+IGRpc21hbnRsZQ== 94058
+IGZpc3M= 94059
+IFNlbnRyeQ== 94060
+aGVhdG1hcA== 94061
+IMOhcmVhcw== 94062
+IEdyw7w= 94063
+IGppZw== 94064
+LmNsZWFyUmVjdA== 94065
+ZXZlbnRUeXBl 94066
+IHR1cmJ1bGVuY2U= 94067
+Y2tpbGw= 94068
+LkZvY3VzZWQ= 94069
+IGludGVybWVkaWFyeQ== 94070
+IE9iZXNpdHk= 94071
+YXRlZ28= 94072
+bW9udG8= 94073
+IEFsYW1vZmlyZQ== 94074
+IFNoZWlsYQ== 94075
+IENPTExFQ1RJT04= 94076
+Q2FyZEJvZHk= 94077
+IEhhYml0 94078
+UExBTg== 94079
+LnZpc3VhbGl6YXRpb24= 94080
+JSkuCgo= 94081
+IEludGVsbGlK 94082
+IEdsb3Zlcg== 94083
+LnNwYXRpYWw= 94084
+IGdyZWV0aW5ncw== 94085
+IE9wZW5GaWxlRGlhbG9n 94086
+ey8q 94087
+IFTDqWzDqQ== 94088
+IEVm 94089
+ICJbJQ== 94090
+IG1hZ2lzdHJhdGU= 94091
+IExpdGVjb2lu 94092
+IFNlbGU= 94093
+IGNvbW1lcmM= 94094
+cHJpbnR3 94095
+bmV4dEludA== 94096
+LmdldENoaWxkQXQ= 94097
+IEdldEN1cnJlbnQ= 94098
+IGV1cm9ww6k= 94099
+IEFJUw== 94100
+ZXR0ZW4= 94101
+LkV2ZW50UXVldWU= 94102
+YW5mb3Jk 94103
+dW5ha2Fu 94104
+LnNldE91dHB1dA== 94105
+IGNtZGxpbmU= 94106
+LGdldA== 94107
+IEhlYXJk 94108
+LmNvbnRlbnRUeXBl 94109
+ZW1k 94110
+IFJldG9ybmE= 94111
+YWNk 94112
+IFBsYXlvZmY= 94113
+YWNtYW4= 94114
+LndlYnNvY2tldA== 94115
+Q2xpZW50SWQ= 94116
+LmV4YW0= 94117
+IGF0dGVudWF0aW9u 94118
+LnNldENoYXJhY3Rlcg== 94119
+CUNvbGxlY3Rpb24= 94120
+5rCX 94121
+IHByZWRpY3RvcnM= 94122
+IFNoZXJpZGFu 94123
+cmltaW5hdG9y 94124
+KFN0YWNr 94125
+X1BLRw== 94126
+PScnKToK 94127
+KHBhZA== 94128
+IE5vZG8= 94129
+IGludGVyb3Blcg== 94130
+IFRyYW5zcGFyZW5jeQ== 94131
+CWR4 94132
+emVt 94133
+IHByYXRpcXVl 94134
+IGZpYnI= 94135
+KCk/Owo= 94136
+X01PQklMRQ== 94137
+LlJFRw== 94138
+X1lFTExPVw== 94139
+VGl0YW4= 94140
+JykKCgoK 94141
+IGNvbXBvbmVudE5hbWU= 94142
+IENvb2xlcg== 94143
+aXNGdW5jdGlvbg== 94144
+LmZlZWRiYWNr 94145
+IHBlcmZlY3RlZA== 94146
+IHBhZWQ= 94147
+LXNjcmlwdHM= 94148
+U3VzcA== 94149
+PE9wdGlvbg== 94150
+IER0 94151
+7YS0 94152
+J1JF 94153
+IE5STA== 94154
+IE1hbm55 94155
+IHJvZw== 94156
+IEdhcnI= 94157
+X2Nvb2tpZXM= 94158
+U3Bs 94159
+IHByb21vdGVycw== 94160
+KmR0 94161
+XEFQSQ== 94162
+IGV2b2tl 94163
+X0VudHJ5 94164
+IGZpcmVmaWdodGVy 94165
+aXZpZGFk 94166
+SmFjb2I= 94167
+IGxlZ2lvbg== 94168
+KHBvbA== 94169
+CWZsYXNo 94170
+b29rZWVwZXI= 94171
+LmNsaXBzVG9Cb3VuZHM= 94172
+IGdyYXBoaXRl 94173
+J2h0dHA= 94174
+X1RSSUFOR0xF 94175
+IERyb3BJbmRleA== 94176
+LnNtdHA= 94177
+IFVOU0lHTkVE 94178
+X1BJQ1RVUkU= 94179
+X09SSUVOVEFUSU9O 94180
+IE9QUA== 94181
+Iyc= 94182
+w6FmaWNv 94183
+Lmhpc3RvZ3JhbQ== 94184
+IEJlbm55 94185
+Pldl 94186
+IHJlcG9zdA== 94187
+IGZpYW5jZQ== 94188
+IEJvdW50eQ== 94189
+c3RyZXNz 94190
+RGF0ZXRpbWU= 94191
+Okg= 94192
+IFNwaGlueA== 94193
+Tm9ybWFsbHk= 94194
+YXBpeGVs 94195
+IHVzZXJBZ2VudA== 94196
+IE1vcmk= 94197
+L2xhYg== 94198
+Lk1PREVM 94199
+IEVtb3Rpb25hbA== 94200
+U2NhbGVk 94201
+ZGV2aWNlSWQ= 94202
+IOqzhA== 94203
+Y2Vhc2Vk 94204
+PElN 94205
+Y2VlZGVk 94206
+IGxpYnJhcmlhbg== 94207
+KW51bGw= 94208
+IG1pY3Jvbg== 94209
+IEZvdQ== 94210
+dWxlbg== 94211
+L2xpdmU= 94212
+cnNjaGVpbg== 94213
+ZmVh 94214
+IGhhYmls 94215
+IE5hdkxpbms= 94216
+bmVjZXNzYXJ5 94217
+LmNvZGVz 94218
+LW1ha2U= 94219
+IHBQYXJlbnQ= 94220
+X3JlbGF0aW9ucw== 94221
+IHJ1c2hlcw== 94222
+IHByb3BlbnNpdHk= 94223
+IFNraW5ueQ== 94224
+V0VTVA== 94225
+X2NvcnB1cw== 94226
+KHJlb3JkZXJlZA== 94227
+ZmRi 94228
+IEdldE1lc3NhZ2U= 94229
+QnJ1bg== 94230
+LnZz 94231
+IHDFgg== 94232
+IGNydW5jaHk= 94233
+Qm9vbQ== 94234
+UEo= 94235
+SmFrZQ== 94236
+57qm 94237
+JGNsaWVudA== 94238
+IH1dKQo= 94239
+IGNvbnZlcnNl 94240
+IEdSQVQ= 94241
+IENSUw== 94242
+Lkxvdw== 94243
+KHZhbGlkYXRl 94244
+X0NMSUNLRUQ= 94245
+LmJsdWV0b290aA== 94246
+CXh0eXBl 94247
+IGNsb3NlTW9kYWw= 94248
+X2ludGVudA== 94249
+IHByb2dub3Npcw== 94250
+c2F2 94251
+Q3Rs 94252
+IGNob29zZXI= 94253
+IFN1ZG9rdQ== 94254
+PVVzZXI= 94255
+LmNsZg== 94256
+CWV4cGxpY2l0 94257
+IHBvdGVudGlhbHM= 94258
+IEdlb3JnZXM= 94259
+IGVsaWM= 94260
+IHRzbGli 94261
+IFJhZ25hcg== 94262
+X3JlcHJlc2VudGF0aW9u 94263
+LWxlZ2dlZA== 94264
+aGFtc3Rlcg== 94265
+IEZpcmVzdG9yZQ== 94266
+Y29udmVydFZpZXc= 94267
+Q29tYmluZWQ= 94268
+INC00LXQuw== 94269
+IGVzcGVjdA== 94270
+IOOCkg== 94271
+IFN0YW1pbmE= 94272
+bG9va3M= 94273
+RU5BUklP 94274
+L2ZpeHR1cmVz 94275
+LnNtcw== 94276
+IHNlbWljbGFzcw== 94277
+IHNlbWljbGFzc2ljYWw= 94278
+LlBlZWs= 94279
+XSQ= 94280
+X0RTUA== 94281
+X0xWTA== 94282
+VklSVFVBTA== 94283
+IENhcGl0YWxz 94284
+IFNDVA== 94285
+LldoaWxl 94286
+IFN1YnN0YW5jZQ== 94287
+LWRvbmU= 94288
+IGVuc2xhdmVk 94289
+Y2xhc3NpZnk= 94290
+ZW50YW55bA== 94291
+IFZlZ2V0YWJsZQ== 94292
+X0RFUEVORA== 94293
+RGFuaQ== 94294
+IHF1aWVyZXM= 94295
+IGFiYmlhbW8= 94296
+IExpYmVy 94297
+YWZj 94298
+6YCf 94299
+cHJlZGljdGVk 94300
+LlBORw== 94301
+IFdoaXA= 94302
+Ly89PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PQ== 94303
+IOKJoA== 94304
+IOWM 94305
+REVN 94306
+Q0NB 94307
+L2Nsb3Nl 94308
+IC8vLzwv 94309
+IG1lc21h 94310
+IEJlaXJ1dA== 94311
+IEluaXRpYWxpemluZw== 94312
+4buZdA== 94313
+TU9OVEg= 94314
+IO2bhA== 94315
+UGFya2luZw== 94316
+Q29tZm9ydA== 94317
+IEVuZ2luZXM= 94318
+d2VycA== 94319
+QFJlcXVlc3RQYXJhbQ== 94320
+LUtleQ== 94321
+IGJhY2tsaWdodA== 94322
+cGFzc2Vz 94323
+Lm51bWJlck9mTGluZXM= 94324
+L0xpbnV4 94325
+KEhUVFA= 94326
+IEh0dHBVUkxDb25uZWN0aW9u 94327
+b3Nvcw== 94328
+Lnh4 94329
+IGZpbG1wamVz 94330
+ID09PT4= 94331
+b3B0aW1pemU= 94332
+Q2Fub24= 94333
+IC4uLiIK 94334
+ICciJzsK 94335
+IGPDqWxpYg== 94336
+IHByaW5jaXBhbG1lbnRl 94337
+IFByb3BlcnR5VmFsdWU= 94338
+T1VOQ0U= 94339
+IGV4Y3Vyc2lvbg== 94340
+IEFjY2Vzc1Rva2Vu 94341
+cmVxdWV0ZQ== 94342
+Vm9sdGFnZQ== 94343
+ZXhwbGFpbg== 94344
+fSkoKTsKCg== 94345
+VVJMT1BU 94346
+IGZ1bmdhbA== 94347
+R3JlZWs= 94348
+LWJsaW5k 94349
+IGZldWRhbA== 94350
+IFNvbmF0YQ== 94351
+IERpYWdub3Npcw== 94352
+JHhtbA== 94353
+ZWRpdGFyeQ== 94354
+IHN0aW11bGF0ZXM= 94355
+UG9udA== 94356
+Lkhhc1ByZWZpeA== 94357
+Ym9hdHM= 94358
+IFNjYXR0ZXI= 94359
+IEdFTkVSSUM= 94360
+IGZpc2hlcw== 94361
+PWxlbmd0aA== 94362
+IG1lbGhvcmVz 94363
+c3BlbnQ= 94364
+w7Rt 94365
+IEluZ3JhbQ== 94366
+Pi4KCg== 94367
+cGFyaXR5 94368
+LlZpZGVvQ2FwdHVyZQ== 94369
+IFR1YmVz 94370
+IGNvbWVkaWM= 94371
+IHByb2Nlc3NEYXRh 94372
+QURC 94373
+KG5ld1N0YXRl 94374
+5YGc 94375
+IFdlYnNlaXRl 94376
+X09mZg== 94377
+LGJvZHk= 94378
+IHN1YmNvbnRyYWN0 94379
+IGNodXRl 94380
+IGNhcnRlc2lhbg== 94381
+dGhyZXNo 94382
+LkNhcnQ= 94383
+IG1ldG9k 94384
+Y3VzdG9taXpl 94385
+THRk 94386
+CXNvdW5k 94387
+V2ViU2VydmljZQ== 94388
+IEhpbmRlcmVk 94389
+W3Jlcw== 94390
+KFRpbGU= 94391
+Y2FwYWJpbGl0aWVz 94392
+X09WRVJGTE9X 94393
+INGB0YHRi9C7 94394
+IENvY2g= 94395
+IHRlc3ROYW1l 94396
+V09SRFM= 94397
+XE1vZHVsZXM= 94398
+P3VybA== 94399
+X2NvbnRpbnVvdXM= 94400
+IFFJY29u 94401
+IHN0YXJlcw== 94402
+IGVqZWN0ZWQ= 94403
+IEludmFzaW9u 94404
+ZmluYWxpemU= 94405
+IGdldg== 94406
+PGc= 94407
+IEVkaXRvckdVSQ== 94408
+QmVybGlu 94409
+LmxpbmVFZGl0 94410
+LXJlZ2V4cA== 94411
+IHNsZWQ= 94412
+IEVBQ0g= 94413
+dWNv 94414
+IHNlZWRpbmc= 94415
+IGxvY2FsaXpl 94416
+ZXR1 94417
+X2FsbW9zdA== 94418
+cGFuc2U= 94419
+IFNlbnNvcnM= 94420
+X1NJ 94421
+KnNw 94422
+IFByb3BlcnR5SW5mbw== 94423
+IGFwcm94aW0= 94424
+IGRhdGFHcmlkVmlld1RleHRCb3hDb2x1bW4= 94425
+16A= 94426
+IGRpZmVyZW5jaWE= 94427
+TE9PSw== 94428
+IG9tbmlw 94429
+IFR1cmluZw== 94430
+IHVuaWRhZGVz 94431
+77yfCg== 94432
+LlJvd0hlYWRlcnM= 94433
+X0FDVElPTlM= 94434
+IERhbHk= 94435
+IGZvcnRpZmllZA== 94436
+IFdhZ2U= 94437
+LnNpbXBz 94438
+KGlzc3Vl 94439
+IGxlcHQ= 94440
+T3duZXJJZA== 94441
+J29yZGVy 94442
+5Y+N 94443
+56Wo 94444
+IHJld3JpdGluZw== 94445
+Lkl0YWxpYw== 94446
+IEZvcmdvdHRlbg== 94447
+KElM 94448
+IE5vU3VjaEVsZW1lbnRFeGNlcHRpb24= 94449
+ZXdu 94450
+IHBvcHVsb3Vz 94451
+IFNoZWQ= 94452
+IyR7 94453
+IEFsbw== 94454
+RGV2aWNlSW5mbw== 94455
+KElOVk9LRQ== 94456
+IHBlbmE= 94457
+IEJCQg== 94458
+LmJi 94459
+IHRvcnM= 94460
+IGNvbmR1Y2l2ZQ== 94461
+LXB1cnBsZQ== 94462
+IHNxdWFyZWx5 94463
+Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0KCg== 94464
+0LrRgNGL 94465
+ZmFzdGE= 94466
+IGNwdA== 94467
+IEluZ2Vu 94468
+IHs/fQ== 94469
+0YPQsw== 94470
+UGVybA== 94471
+LnNreQ== 94472
+LWF1dG9tYXRpYw== 94473
+aW1wbGVtZW50 94474
+b3JubWVudA== 94475
+LklNQUdF 94476
+LVNwZWVk 94477
+CUZpZWxk 94478
+IHBvdW5kZWQ= 94479
+IExa 94480
+IGF1dG9Gb2N1cw== 94481
+IOC5gA== 94482
+LkNvbXBhbmlvbg== 94483
+IFZpbQ== 94484
+dW5jaWE= 94485
+X3NrYg== 94486
+IHVubWFycmllZA== 94487
+IFNvdXI= 94488
+Z2FhcmQ= 94489
+TGVvZA== 94490
+IOCq 94491
+LkNsb3Vk 94492
+IHJlaW5mb3JjZXM= 94493
+J10+ 94494
+IGZlbGl6 94495
+IFVBVg== 94496
+cmFuY2Vz 94497
+5Y2B 94498
+VG9MaXN0QXN5bmM= 94499
+LkV4ZWN1dG9y 94500
+LXRz 94501
+ICcuJzsK 94502
+IEtpbmVjdA== 94503
+44GE44GG 94504
+IGJldm9y 94505
+IEV4dHJhY3Rpb24= 94506
+X2RyYXdlcg== 94507
+JHN1Yg== 94508
+IHVwbGlmdGluZw== 94509
+LmJ0bkV4aXQ= 94510
+KCcvLypbQA== 94511
+UkVESVM= 94512
+c3RkZXhjZXB0 94513
+ZGVv 94514
+IGdpdmVy 94515
+X2JpbmRpbmdz 94516
+VG9EZXZpY2U= 94517
+Lm1p 94518
+IEVzdGltYXRlcw== 94519
+YWxsZWxl 94520
+Pz8/Cgo= 94521
+IFN0cmVhbXM= 94522
+IGFmZmxpY3Q= 94523
+LnNhcA== 94524
+IHF1YWxp 94525
+IEdhdWw= 94526
+U3BlY2lmaWVz 94527
+IHpr 94528
+IHNhbml0YXJ5 94529
+IG5ld0luZGV4 94530
+c3BlY3M= 94531
+IGZyYWdtZW50TWFuYWdlcg== 94532
+IE5lY2Vzc2FyeQ== 94533
+CVNwcmluZw== 94534
+PX4= 94535
+IE9NQVA= 94536
+Y2FyZWVy 94537
+KCItIik7Cg== 94538
+IERhcmxpbmc= 94539
+aXRhZw== 94540
+OnBr 94541
+IFN0ZWxsYXI= 94542
+IGluZmVydGlsaXR5 94543
+bGV4aWJsZQ== 94544
+VW5hcnk= 94545
+IDpdLA== 94546
+Lk5FVw== 94547
+Z3N1Yg== 94548
+X1VGdW5jdGlvbg== 94549
+LnNsaWRlcw== 94550
+IGRpdmVyc29z 94551
+X2xvY2Fscw== 94552
+XFwv 94553
+IHBjYXA= 94554
+IE9vaw== 94555
+LkRhdGFHcmlkVmlld0NvbnRlbnRBbGlnbm1lbnQ= 94556
+ZXJzb25pYw== 94557
+IHRyZWJ1aWU= 94558
+IHNlcXVlbnRpYWxseQ== 94559
+YWJhcg== 94560
+IElQQ0M= 94561
+IGRldm91dA== 94562
+XEhlbHBlcnM= 94563
+RVR3ZWV0 94564
+IHRyYWJhamFy 94565
+IFdpbGtpbnNvbg== 94566
+IGRhw58= 94567
+SHVtYW5z 94568
+VGVhY2hlcnM= 94569
+IERhdGFWaWV3 94570
+IFlvZw== 94571
+IGplZGU= 94572
+IGFtYmlhbmNl 94573
+dHJhbmQ= 94574
+IGVycmF0aWM= 94575
+IHThu6s= 94576
+LnJhYmJpdA== 94577
+IG5ld2JpZQ== 94578
+IGVudHJhbmNlcw== 94579
+IG9ydGhvZ29uYWw= 94580
+IERJU1BBVENI 94581
+IFNjaHJv 94582
+X1RVUk4= 94583
+Omludm9rZQ== 94584
+IHRhbnRhbA== 94585
+IFpvbmVz 94586
+c3RhdGVtZW50cw== 94587
+TGltaXRz 94588
+IEfDpA== 94589
+aWHFgmE= 94590
+LnByZWRpY2F0ZQ== 94591
+LkZS 94592
+IENocmlzdG9waA== 94593
+LkNvbnM= 94594
+IEhvcnRvbg== 94595
+X0N1c3RvbWVy 94596
+CU1E 94597
+IGVsa2Fhcg== 94598
+IE1TRQ== 94599
+IElzQWN0aXZl 94600
+XSop 94601
+XFVuaXQ= 94602
+IGVv 94603
+Rm9yT2JqZWN0 94604
+ZWxpYWM= 94605
+LWRldmVsb3BtZW50 94606
+IHRlYWw= 94607
+IHN0aXRjaGVk 94608
+IE91dGNvbWU= 94609
+b25jw6k= 94610
+ZW1iZWRkaW5n 94611
+IG9uTmV4dA== 94612
+IO2VtOuLuQ== 94613
+KGV4aXN0aW5n 94614
+LmJpZA== 94615
+CWFzc2VydEZhbHNl 94616
+e2w= 94617
+TEVycm9y 94618
+X2J1bGxldA== 94619
+KEh0bWw= 94620
+IGVCb29rcw== 94621
+cGVyUGFnZQ== 94622
+L3F1ZXN0aW9u 94623
+LmZha2U= 94624
+Lm1i 94625
+X2RsbA== 94626
+IGN1bXNob3Q= 94627
+IE1hZGFnYXNjYXI= 94628
+SE9MREVS 94629
+IHBlc3F1aXNh 94630
+X0RFQ0xT 94631
+XSxbLQ== 94632
+IEFsYmFuaWE= 94633
+LXRvYXN0 94634
+IHByb3RhZ29uaXN0cw== 94635
+IG15b2NhcmQ= 94636
+IHdhbGtlcnM= 94637
+ID09PT09PT0= 94638
+L1BhZ2U= 94639
+PTw/PQ== 94640
+IGVucXVhbnRv 94641
+X1RSVU5D 94642
+IHNlcHRlbWJyZQ== 94643
+IGxheW91dFBhcmFtcw== 94644
+ICcuLi8uLi8uLi8uLi8uLi8= 94645
+IFRyYWZmb3Jk 94646
+IHBhbGF2cmE= 94647
+IHJ1bmRvd24= 94648
+IGJyaXR0bGU= 94649
+w6RjaGU= 94650
+LllFTExPVw== 94651
+IENlcmVtb255 94652
+IG5ld1RleHQ= 94653
+dmVjcw== 94654
+IGVzc2Vu 94655
+IE1ldG9kbw== 94656
+IEdVSURF 94657
+IHBvc3Rwb25l 94658
+IFZTdGFjaw== 94659
+WyIk 94660
+IE1pY3Jvc3lzdGVtcw== 94661
+XFBhZ2U= 94662
+cG1hdA== 94663
+X0ZBVUxU 94664
+X21C 94665
+U3RhdGVNYWNoaW5l 94666
+RmFjdWx0eQ== 94667
+Lnd4 94668
+IE1vemFydA== 94669
+YW5pbWU= 94670
+IHB5dA== 94671
+IEJ1a2tpdA== 94672
+LUlORlJJTkdFTUVOVA== 94673
+IHNlYXJjaGVy 94674
+LWJhc2tldA== 94675
+IG9tYXM= 94676
+IFR1bmlz 94677
+IFBsYXR0 94678
+IHsNCg0KDQo= 94679
+eWFo 94680
+dG9sdWE= 94681
+SW50cm9kdWNlZA== 94682
+c3VwcGx5 94683
+IG1pc29neW4= 94684
+IFdhaXN0 94685
+IEVI 94686
+LW9wZXJhdG9y 94687
+IGRhcmtlbg== 94688
+IENvc21pYw== 94689
+IGdsYWNpZXJz 94690
+IA0NCg== 94691
+XVtf 94692
+Q29tcGFueUlk 94693
+IFJlY29uc3RydWN0aW9u 94694
+aXp6bGllcw== 94695
+IGzDrWRlcg== 94696
+IGNvbGxlZ2lhdGU= 94697
+IFBldHR5 94698
+T1VSTkFM 94699
+ZGVjb3JhdG9ycw== 94700
+cmFtcw== 94701
+KCgK 94702
+IEFzdHJvbm9teQ== 94703
+IHJpbw== 94704
+IEN5cmls 94705
+anVhbg== 94706
+IHJlaW5j 94707
+IFBpc3RvbnM= 94708
+IEJ1c3k= 94709
+cHRyb24= 94710
+IHBvbW9j 94711
+CVJUQ0s= 94712
+QnV5aW5n 94713
+Ly8qKgo= 94714
+IFdyYXBwZWQ= 94715
+IE1lZXI= 94716
+IGltYXA= 94717
+IGJlc3RpbW0= 94718
+IEFnaWxpdHk= 94719
+LlRvVGFibGU= 94720
+c3RpbmVuY2U= 94721
+XSkqKg== 94722
+IEF1dG9tYXRlZA== 94723
+ZHNw 94724
+IEdhcmxpYw== 94725
+aW9kZQ== 94726
+ZXhlbHM= 94727
+aW50cm9z 94728
+IGJlc3Rvd2Vk 94729
+KHZpc2libGU= 94730
+IGh5ZHJhdGVk 94731
+bm94aW91cw== 94732
+IEF1dGhlbnRpY2F0aW9uU2VydmljZQ== 94733
+IHNob3dNb2RhbA== 94734
+IGNvbXBvc2Vycw== 94735
+R0VORVJBTA== 94736
+Q1RT 94737
+IFNocg== 94738
+Y3JlYXQ= 94739
+IGNsb3NldHM= 94740
+IGdyb3VuZGluZw== 94741
+IENPTU1FTlRT 94742
+ICsj 94743
+IGdyb3VuZHdvcms= 94744
+KGluZGV4UGF0aA== 94745
+Z3JhdGlz 94746
+dXBwaWVz 94747
+IGt2bQ== 94748
+IGN1YWxlcw== 94749
+LkRlZXBFcXVhbA== 94750
+IGFsbG95cw== 94751
+LWJ1ZGdldA== 94752
+KF9fXw== 94753
+IGNvbmVjdGFy 94754
+LXJhZA== 94755
+IGl0Y2g= 94756
+bGFtcA== 94757
+LmdycA== 94758
+LWFkZG9ucw== 94759
+IHNlYWJvcm4= 94760
+IG5lZ2xpZ2VudA== 94761
+X0RldGFpbA== 94762
+IHNlcmVuZQ== 94763
+IGJhcnJhY2tz 94764
+IGJx 94765
+IFNlY3Q= 94766
+KGRhdG9z 94767
+IHRoZW1hdGlj 94768
+IHBvbGx1dGVk 94769
+CWFuaW1hdGlvbg== 94770
+SHVnaA== 94771
+RXhlY3V0YWJsZQ== 94772
+KCcvJylb 94773
+IGFwb3B0b3Npcw== 94774
+IGFiYnJldmlhdGVk 94775
+Zm9vbg== 94776
+UmFua2Vk 94777
+CWhpdA== 94778
+CQkgICAgICAgICAgICAgICAgICAgICAgIA== 94779
+Q29udGludW91cw== 94780
+IG1vdmVUbw== 94781
+REJPYmplY3Q= 94782
+IGNvbmNlaXZhYmxl 94783
+IEd3ZW4= 94784
+IMOhbGw= 94785
+X18oKQ== 94786
+IExhbmE= 94787
+IGVpbnplbA== 94788
+IHJlY291bnRz 94789
+eXN0ZW1z 94790
+b3dhbnk= 94791
+KTo/Pgo= 94792
+IEFrcm9u 94793
+b2xpbmk= 94794
+Q29ycA== 94795
+YXBocmFn 94796
+ICInLg== 94797
+IGNvbnZlbmVk 94798
+IC4uLi4KCg== 94799
+IGNhbGxlZQ== 94800
+IENsb3Zlcg== 94801
+LmRlc2NyaXB0b3I= 94802
+Lkl0ZW1TdGFjaw== 94803
+IHBlcnZlcnNl 94804
+X0NF 94805
+PUAi 94806
+LS0tDQo= 94807
+IGJldg== 94808
+c3VtYQ== 94809
+YWNjdW11bGF0b3I= 94810
+IGxpemFyZA== 94811
+INC+0Yc= 94812
+Z2V0RGVzY3JpcHRpb24= 94813
+IFNhcmFz 94814
+Lm5leHRTaWJsaW5n 94815
+IGVsYXN0aWNpdHk= 94816
+IGNoYWM= 94817
+bW92ZWQ= 94818
+X1RvcA== 94819
+dHJlcg== 94820
+KGRvd24= 94821
+ZWxlbXM= 94822
+b2JpbGk= 94823
+LnBvc3RNZXNzYWdl 94824
+ICjiiA== 94825
+Q3N2 94826
+IFlvc2VtaXRl 94827
+c3dlZXQ= 94828
+TUFUUklY 94829
+aWdyYXRlZA== 94830
+IGZvcmdpbmc= 94831
+IFBhZ2VTaXpl 94832
+dHJhbnNmb3Jtcw== 94833
+PVlFUw== 94834
+IGRpc2Nsb3Npbmc= 94835
+IFBlZGlhdHJpYw== 94836
+IERlYWRseQ== 94837
+UmVzb3VyY2VJZA== 94838
+LWJpbmFyeQ== 94839
+IFJvd2U= 94840
+IENhaXI= 94841
+X2V4dHJhY3Rpb24= 94842
+RGVjcmU= 94843
+IE9ic3Q= 94844
+cGxy 94845
+IFBoeXNpb2xvZ3k= 94846
+bXZj 94847
+aHRp 94848
+LlRl 94849
+IGV4dHJhdmFnYW50 94850
+IEFudGli 94851
+w7NzdA== 94852
+b3V0ZGly 94853
+IGNhcm5l 94854
+Vmlld1BhZ2Vy 94855
+IGltcGxhbnRlZA== 94856
+U2VhcmNoUGFyYW1z 94857
+w7xyZ2Vy 94858
+Y29uZGU= 94859
+YWNlbnRl 94860
+X0NVREE= 94861
+JHZhbA== 94862
+IldoaWxl 94863
+IHRlbXBMaXN0 94864
+IHN5bmFnb2d1ZQ== 94865
+Y21j 94866
+INGA0LDQsdC+0YLRiw== 94867
+IHNlem5hbQ== 94868
+IHNlc3N1YWxp 94869
+IGNhYmV6YQ== 94870
+ZXTDoA== 94871
+IGZhw6c= 94872
+Z2Vo 94873
+Y2VkZQ== 94874
+IlNvbWU= 94875
+Om9u 94876
+LWZvcm1lZA== 94877
+YnluYW1l 94878
+IOuwmO2ZmA== 94879
+IG5hw68= 94880
+IEFVRw== 94881
+IGVhc2Vk 94882
+XSl7 94883
+KHB0aHJlYWQ= 94884
+IGplZGVt 94885
+KGZpeHR1cmU= 94886
+IFBhcmw= 94887
+XX0pOwo= 94888
+IGV4cHVsc2lvbg== 94889
+IEluZXRBZGRyZXNz 94890
+IE1MUA== 94891
+LicpOw== 94892
+IG9ybw== 94893
+IFNldmlsbGE= 94894
+IGZvcm11bGFpcmU= 94895
+LXRlcnJvcmlzbQ== 94896
+L1dlYkFQSQ== 94897
+KmFuZ3N0cm9t 94898
+Y3Jhd2w= 94899
+X2xvYW4= 94900
+X0RJR0VTVA== 94901
+IEtub3h2aWxsZQ== 94902
+LmdjYQ== 94903
+IERpeQ== 94904
+bnRhZw== 94905
+YWJsZVZpZXdDb250cm9sbGVy 94906
+LkZlZWQ= 94907
+LXNoYXJlZA== 94908
+IGNvY2Np 94909
+X2ludml0ZQ== 94910
+IEJ1Y2tpbmdoYW0= 94911
+IEdsdXRlbg== 94912
+IGVuZGVtaWM= 94913
+UmFpc2Vk 94914
+IHF1ZXJ5SW50ZXJmYWNl 94915
+IG1hcnRpbg== 94916
+QuG6oW4= 94917
+IGhhcmU= 94918
+IGRlaW4= 94919
+cmFyaWFu 94920
+bXlmaWxl 94921
+IGFuZ3Vpc2g= 94922
+VGV4dG8= 94923
+IEJVRkY= 94924
+KGxu 94925
+bWFycw== 94926
+X3N1YnRpdGxl 94927
+X2dpZnQ= 94928
+IGJvbGRseQ== 94929
+IFNpbmd1bGFy 94930
+KExvZ0xldmVs 94931
+PEFydGljbGU= 94932
+L3N0YXRz 94933
+INC/0L7Qsg== 94934
+IGl0ZW5z 94935
+IGRlbm9taW5hdGlvbg== 94936
+LkRhdGFHcmlkVmlld1RyaVN0YXRl 94937
+X0xS 94938
+IER1Y2hlc3M= 94939
+CUJsb2Nr 94940
+dHJhY2Vy 94941
+LUNO 94942
+XEFwcERhdGE= 94943
+Lmxpc3Rz 94944
+KFJvdXRl 94945
+IEdPT0RNQU4= 94946
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCg== 94947
+IHRpbmhh 94948
+IGV2ZXJsYXN0aW5n 94949
+YURhdGE= 94950
+KGNvbXBhcmU= 94951
+IHJwdA== 94952
+XFBocA== 94953
+LkZJTEVT 94954
+IHNwYXJpbmc= 94955
+U2Nhcg== 94956
+INin2YTYqg== 94957
+IEJldGhsZWhlbQ== 94958
+IGJhY2twYWdl 94959
+c3BsaWNl 94960
+ZsO2cg== 94961
+QGR5bmFtaWM= 94962
+4bupYw== 94963
+7KY= 94964
+LnBhZ2luZw== 94965
+IEJlbG1vbnQ= 94966
+LkVYUA== 94967
+IGludGVybGU= 94968
+IENoZWNrbGlzdA== 94969
+IFVuaWNvcm4= 94970
+QkVTVA== 94971
+Z2V0UGxheWVy 94972
+LmFyZ3NvcnQ= 94973
+IHdpdGhTdHJpbmc= 94974
+IE1vZGVyYXRl 94975
+fSI+Cg== 94976
+LnNldEltYWdlQml0bWFw 94977
+IHRyZW5jaGVz 94978
+IGdlbmVyYXI= 94979
+IGZlcm1lbnRlZA== 94980
+IGRlanRpbmc= 94981
+Q3RybHM= 94982
+IGRpc2FncmVlcw== 94983
+UXVpZXQ= 94984
+KFNRTEV4Y2VwdGlvbg== 94985
+IFRlbnNvckZsb3c= 94986
+T05B 94987
+UG9ydGxhbmQ= 94988
+LlB0cg== 94989
+bGx4 94990
+YXN0b24= 94991
+Q2x1c3RlcnM= 94992
+IFVzdWFyaW9z 94993
+IGtoaQ== 94994
+IGdpYQ== 94995
+IERvbHBoaW4= 94996
+xZFz 94997
+IGx1ZGVy 94998
+IGRpc3Bvc2l0aXZv 94999
+IFZ5 95000
+b21wc29u 95001
+IO2VoA== 95002
+IGtjYWw= 95003
+IENhbGNpdW0= 95004
+U2VjdGlvbnNJbg== 95005
+IENhc2M= 95006
+IGdyYXR1aXRp 95007
+b3NvbWFs 95008
+IHVuZGVyY3V0 95009
+IENhaA== 95010
+OnBhcmFtcw== 95011
+IHJldHVyblVybA== 95012
+IEVyZQ== 95013
+w6lyYw== 95014
+IGludGw= 95015
+fS8jew== 95016
+IG91dHB1dFBhdGg= 95017
+IGZhbHNlaG9vZA== 95018
+IFVzZXJSb2xl 95019
+PEhhc2hNYXA= 95020
+IENyZWF0ZVVzZXI= 95021
+IENvd2JveQ== 95022
+CVVzZQ== 95023
+XSgK 95024
+IFNob3BpZnk= 95025
+Vmlld1N0YXRl 95026
+QWR2YW5jZQ== 95027
+LXRhbms= 95028
+IlQ= 95029
+IEplbnM= 95030
+PW9wdGlvbnM= 95031
+KCIuLg== 95032
+Lm1pbWU= 95033
+IENSVA== 95034
+IGjDpHR0ZQ== 95035
+KHNv 95036
+LlVOS05PV04= 95037
+IGRhcsO8YmVy 95038
+IENPVkVS 95039
+R2Vt 95040
+Q3Jv 95041
+X1JFQ1Y= 95042
+X2hpZXJhcmNoeQ== 95043
+Q2hvb3Npbmc= 95044
+SkVYRUM= 95045
+IGRvcnNhbA== 95046
+KyI8 95047
+IE5leQ== 95048
+V29tYW4= 95049
+QmV6aWVy 95050
+IHJpZ3M= 95051
+IG9udHZhbmc= 95052
+77yM5YiZ 95053
+IEdhdXQ= 95054
+Y21i 95055
+TmhhcA== 95056
+IG1vbm9j 95057
+IGVuZXJnaWE= 95058
+b2JzZXJ2ZU9u 95059
+c3Rha2Vz 95060
+LSot 95061
+IE5hY2s= 95062
+fX0iCg== 95063
+ZXJ2YXM= 95064
+IEhpbmRlcmVkUm90b3I= 95065
+QWRqYWNlbnQ= 95066
+IEludGVybmFjaW9uYWw= 95067
+CWFyZWE= 95068
+IPCflA== 95069
+IHNwYXJrbGU= 95070
+KCkuXw== 95071
+LmlkZWE= 95072
+IHV0cmVjaHQ= 95073
+IG1hcHBlZEJ5 95074
+IENvbG8= 95075
+CVRS 95076
+UG9zdGVy 95077
+IGNvbWJhdGluZw== 95078
+IFllbGxvd3N0b25l 95079
+aWVycmV6 95080
+YWNjdA== 95081
+IHPDoWNo 95082
+Lk5ld3M= 95083
+IGZpZWxkVmFsdWU= 95084
+IGNheg== 95085
+IEZyZWVt 95086
+CQkKCQo= 95087
+IHVzdXI= 95088
+IHNvbGE= 95089
+IGN1bWJlcnNvbWU= 95090
+IGNhdGFwdWx0 95091
+Ii4v 95092
+IEV4ZWN1dG9ycw== 95093
+IEFtZXM= 95094
+ICc8JT0= 95095
+ZmlsbG5h 95096
+LOKAlA== 95097
+OlNldFRleHQ= 95098
+LWNhdGVnb3JpZXM= 95099
+LWFyY2hpdmU= 95100
+IFBvbGx1dGlvbg== 95101
+Lk9m 95102
+4oCcQXQ= 95103
+X0NIQVJTRVQ= 95104
+KENvbHVtbg== 95105
+4oCZKQ== 95106
+IHVubWlzdGFr 95107
+IGVhcm0= 95108
+IFBsYXRmb3Jtcw== 95109
+IE1vbWVudHVt 95110
+VmVjdG9yaXplcg== 95111
+cmF3ZXI= 95112
+KHBhc3Nwb3J0 95113
+KHBsYW5l 95114
+IHJlcHJlc2VudGE= 95115
+IHB1YmtleQ== 95116
+IEphaW4= 95117
+IG1lbm5lcw== 95118
+IGluc3RhbnRhbmVvdXM= 95119
+IGV0aGVycw== 95120
+IG5lc3Rz 95121
+IFBhdHRvbg== 95122
+IEhBQ0s= 95123
+cGFja2luZw== 95124
+SVNlcnZpY2U= 95125
+IHJvY2tlcg== 95126
+IGZpY2E= 95127
+IEdsYWRpYXRvcg== 95128
+IFVQQw== 95129
+IExvd2VsbA== 95130
+YmVhcmVy 95131
+IHZpcGVy 95132
+X2dsb2I= 95133
+IG1hc2hlZA== 95134
+IGhhaXJzdHlsZQ== 95135
+IHVuZGVybWluZXM= 95136
+cmVzdGF1cmFudHM= 95137
+IHJlYWN0aW9uYXJ5 95138
+IGJpbGxpZw== 95139
+fSIpOw0K 95140
+IHZpc3Rhcw== 95141
+IG9wZW5kaXI= 95142
+CWxhYmVscw== 95143
+YWxsaXM= 95144
+IFdvbGZm 95145
+IENQQw== 95146
+IHJhaWx3YXlz 95147
+IFZhdWdoYW4= 95148
+IEFza2luZw== 95149
+Y2Fp 95150
+IEdu 95151
+X1BST0Y= 95152
+LVNlcA== 95153
+LmN1cnZl 95154
+TXVsdGlwbHk= 95155
+0YDQsNC90LjRhg== 95156
+IG1lZXR1cA== 95157
+Z2V0RGI= 95158
+KEdVSQ== 95159
+IHJlaW1idXJzZQ== 95160
+OnJlc3VsdA== 95161
+VHVtYmxy 95162
+LkNsb3NlZA== 95163
+IGNvbmZvcm1z 95164
+IEhvaw== 95165
+aWVkYWRl 95166
+TmV3TGFiZWw= 95167
+IG5hdkN0cmw= 95168
+RG9jdG9ycw== 95169
+IOyViA== 95170
+IGJvdXRz 95171
+IGlzYw== 95172
+Lyc7Cgo= 95173
+dWhs 95174
+LlVp 95175
+LXNhbWE= 95176
+IENhbm9uaWNhbA== 95177
+IG1ldGljdWxvdXM= 95178
+IGdyb3Rlcw== 95179
+IC8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8= 95180
+ZXRlcw== 95181
+IGxhbmd1ZQ== 95182
+IGZDaGFpbg== 95183
+IFR5cGVmYWNl 95184
+IEJyaWdoYW0= 95185
+aWFyZQ== 95186
+J8OpdGFpdA== 95187
+IEVGRg== 95188
+IGRlc3Ryb3llcg== 95189
+X21hdHJpY2Vz 95190
+TsO6bWVybw== 95191
+Y2FsbGFibGU= 95192
+X3BlcmlvZHM= 95193
+c3RydWs= 95194
+bWFq 95195
+LnJs 95196
+LmxpZnQ= 95197
+2YrZhA== 95198
+w5A= 95199
+UmV0VmFs 95200
+RGVudmVy 95201
+IFRyaWJ1dGU= 95202
+a2l5ZQ== 95203
+emV3 95204
+IFNwYXJl 95205
+IGxldWtlbWlh 95206
+IHdhaXRyZXNz 95207
+IHBsdXTDtHQ= 95208
+QWxpYXNlcw== 95209
+IExvY2F0ZQ== 95210
+5rY= 95211
+SWRlbnRpZmljYXRpb24= 95212
+LnRlbA== 95213
+LWRheXM= 95214
+dGVycml0 95215
+aW1idXM= 95216
+IEJ1dHRlcktuaWZl 95217
+64K0 95218
+cnVwdGN5 95219
+IEdyYWRlcw== 95220
+IHVuZGVyc2lkZQ== 95221
+IGhhcmRzaGlwcw== 95222
+dW5laQ== 95223
+LWNvbnRhaW5lZA== 95224
+IFsnLg== 95225
+T2Jzb2xldGU= 95226
+LlJldHJvZml0 95227
+IHVyYW51cw== 95228
+X3JnYmE= 95229
+IHJhcGVz 95230
+IEthcmU= 95231
+W+KApl0= 95232
+IEZpbmNo 95233
+LmJ1bmlmdUZsYXRCdXR0b24= 95234
+cXVpc2Fy 95235
+IE51cnNlcw== 95236
+ZWdhZGU= 95237
+IGhu 95238
+RXhjbHVkZQ== 95239
+IHN0b2NoYXN0aWM= 95240
+IHNvdHRv 95241
+IFBlbmFsdHk= 95242
+IHNvbnN0 95243
+IHJvc2E= 95244
+X0ZpbmQ= 95245
+IEludmFsaWRhdGU= 95246
+TGlzdEl0ZW1JY29u 95247
+JywNDQo= 95248
+X3BkdQ== 95249
+IE1lYWxz 95250
+YWrEhWM= 95251
+IE9vcHM= 95252
+IE5vdGljZXM= 95253
+IGRlcml2YXRpb24= 95254
+W10NCg== 95255
+6Lqr 95256
+eXN0ZXJ5 95257
+X2ZpdmU= 95258
+RWFybg== 95259
+PWV2ZW50 95260
+IG9ncg== 95261
+LVJFQUw= 95262
+IExpcHM= 95263
+c2VsZWN0b3Jz 95264
+YWRpZXI= 95265
+IHNldEJhY2tncm91bmRJbWFnZQ== 95266
+KHRoaW5n 95267
+IHNvZnRiYWxs 95268
+XHhhYQ== 95269
+KGlkZW50 95270
+IEp1cnk= 95271
+IFZveWFnZQ== 95272
+IFRBcnJheQ== 95273
+KFBhaW50 95274
+V2FybQ== 95275
+RVhURVJOQUw= 95276
+YXN1 95277
+ICghKCg= 95278
+LkZFVENI 95279
+IHNraXJt 95280
+T1JFRA== 95281
+Y2FuY2VsbGVk 95282
+aXR0ZWw= 95283
+IHNlZWR1 95284
+bGljaGVz 95285
+b2hv 95286
+LHJldGFpbg== 95287
+KFdlYkRyaXZlcg== 95288
+aXB0YWJsZXM= 95289
+RVJJQ0E= 95290
+IGNsZWFubGluZXNz 95291
+ZWxsb3dvcmxk 95292
+IGNvaGVzaW9u 95293
+Z2lzdA== 95294
+XS4n 95295
+ZXJnaW5n 95296
+IGlzcA== 95297
+Lm9mZnNldFRvcA== 95298
+KGZhY3Rvcg== 95299
+dW5pdmVyc2Fs 95300
+IFBsYXliYWNr 95301
+IEJ5dGVTdHJpbmc= 95302
+IGRhbW5pbmc= 95303
+IFNTUg== 95304
+YWN1cw== 95305
+IFN0YXRlbg== 95306
+IOWVhuWTgQ== 95307
+IFBlZQ== 95308
+IFNhbXBsaW5n 95309
+YXRvcmlh 95310
+c3RhcnRJbmRleA== 95311
+5ZCr 95312
+IOy0iOq4sA== 95313
+IE9saXZlaXJh 95314
+IEZsYWtl 95315
+Ym9vbQ== 95316
+X01TSw== 95317
+IEZhY2luZw== 95318
+b3JnaGluaQ== 95319
+Zm9vZHM= 95320
+VHJlZVdpZGdldEl0ZW0= 95321
+IEhBTEY= 95322
+IiIiKQo= 95323
+IENIQVBURVI= 95324
+IEV2ZWx5bg== 95325
+Pis= 95326
+IEhvcm5ldHM= 95327
+d29rZQ== 95328
+IC9b 95329
+YXRob2xpYw== 95330
+LnNlZ21lbnRz 95331
+Lm5hdmlnYXRlQnlVcmw= 95332
+IE1hbnVz 95333
+IHBlcHRpZGVz 95334
+IGZsZWV0aW5n 95335
+IEFUVg== 95336
+IFNoaWI= 95337
+SW50QXJyYXk= 95338
+IG1veg== 95339
+cHJvYmxlbXM= 95340
+b2duZQ== 95341
+Lk90aGVy 95342
+QWRtaW5pc3RyYXRpb24= 95343
+JSUqLw== 95344
+Il09PQ== 95345
+IEFuZHJlcw== 95346
+QWRh 95347
+aGludHM= 95348
+XCIiOwo= 95349
+KHBuZw== 95350
+IOqwgOuKpQ== 95351
+44OK 95352
+cmVqZWN0ZWQ= 95353
+IG1vdmVycw== 95354
+546H 95355
+IHBhcmVudGhlc2lz 95356
+KGFzc2lnbnM= 95357
+RWxpdGU= 95358
+UmVtaW5kZXI= 95359
+IHN1ZmZlcmVycw== 95360
+IFJlc291cmNlQnVuZGxl 95361
+dGhhZw== 95362
+PicNCg== 95363
+YW50aW5v 95364
+UGVyaXBo 95365
+IFNoYXJk 95366
+Q2hhcnREYXRh 95367
+KGpq 95368
+IG9zdGF0 95369
+aHVnZQ== 95370
+LWF1dGhvcmVk 95371
+LmNp 95372
+IHB5bXlzcWw= 95373
+IGxpbmVycw== 95374
+IEFUUw== 95375
+Pkxhc3Q= 95376
+KSIpCgo= 95377
+IGdldHBpZA== 95378
+R2V0U2l6ZQ== 95379
+IGV4dG9ydGlvbg== 95380
+W2Zsb2F0 95381
+IEVJTkE= 95382
+L0Jhc2U= 95383
+LnNldE9uQWN0aW9u 95384
+0L7Qu9GP 95385
+IEdsYWNpZXI= 95386
+X2F6 95387
+IHRyYW5zcG9ydGU= 95388
+IFNtcw== 95389
+dGh1bWJz 95390
+IHRyZWFzdXJlcg== 95391
+IG16 95392
+aXN0aWs= 95393
+UkVESUVOVA== 95394
+IGlzaQ== 95395
+X3N0dWZm 95396
+UE9TSVRPUlk= 95397
+c3RhcnRkYXRl 95398
+IFppbmM= 95399
+5rG9 95400
+IGthaw== 95401
+IGVyZmFocmVu 95402
+X0NPTUJP 95403
+IHVjd29yZHM= 95404
+LlBheQ== 95405
+IGtpbmdkb21z 95406
+IGV4Y2VsZW50ZQ== 95407
+aWduaXRl 95408
+X3ZhcmlhdGlvbg== 95409
+IG5hdmVnYWRvcg== 95410
+5LiT 95411
+dmlld0NvbnRyb2xsZXI= 95412
+cmlyZQ== 95413
+SG9uZXN0bHk= 95414
+Q2FzY2FkZQ== 95415
+ZXRyYWlu 95416
+QXJnZW50aW5h 95417
+Y3E= 95418
+IE1hcmlhbg== 95419
+L2Fy 95420
+IGludGVyZXNzZQ== 95421
+dXJhaGFu 95422
+KFBD 95423
+IGZyaXZvbA== 95424
+IFRydXN0ZWQ= 95425
+KElDb25maWd1cmF0aW9u 95426
+IFJpaGFubmE= 95427
+ZW5kb3ph 95428
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg 95429
+IHByb2NsYW1hdGlvbg== 95430
+IHByZWRvbWluYW50 95431
+IGNvbnN0cw== 95432
+LW5lY2s= 95433
+V29sZg== 95434
+LmNoZWNrYm94 95435
+IHN0YW56YQ== 95436
+IGVudGVuZGVy 95437
+Ly8o 95438
+SGFuZHM= 95439
+IGJpbGxlZGVy 95440
+IFRvc2hpYmE= 95441
+YWJiaXg= 95442
+RU5DSUVT 95443
+IGppbQ== 95444
+UFVS 95445
+Lmxlc3Nvbg== 95446
+IGJlcnRo 95447
+bGFyxLFu 95448
+Qmxv 95449
+CWV4dA== 95450
+ZWVs 95451
+IGRlbWFzaQ== 95452
+IGNvbG9uaXphdGlvbg== 95453
+L2Rpc2M= 95454
+77yP 95455
+Q2VydGFpbmx5 95456
+566h55CG5ZGY 95457
+IGpvZ2Fkb3I= 95458
+dcOp 95459
+Q29sdW1uc01vZGU= 95460
+IEpW 95461
+IEluc3RpdHV0 95462
+X3NwZWN0cnVt 95463
+LmRlbnNl 95464
+IFNob3J0Y3V0 95465
+IHNlYnVhaA== 95466
+IGZsYXNoeQ== 95467
+UmVnYXJkcw== 95468
+IHNoYXJwZXI= 95469
+Y2FuY2VsbGF0aW9uVG9rZW4= 95470
+X2RldGFsbGU= 95471
+IFNjYXJsZXR0 95472
+INC80LDRgg== 95473
+IG5lZ29jaW8= 95474
+4LiW 95475
+IEpX 95476
+d2ViZHJpdmVy 95477
+LndhbGw= 95478
+IHhhbWFyaW4= 95479
+b3BhcXVl 95480
+LkFkZFBhcmFtZXRlcg== 95481
+KENvbnRyb2xsZXI= 95482
+LWFib3J0aW9u 95483
+X0ZVTkNUSU9OUw== 95484
+Q3VzdG9tZXJJZA== 95485
+IHZlbmly 95486
+IEJ1c3Rlcg== 95487
+X3ByZWRpY3RlZA== 95488
+L3J1bGVz 95489
+LU1ldGhvZHM= 95490
+IGdkemll 95491
+Il0nKTsK 95492
+IFB4 95493
+Q09OUw== 95494
+LlNsaWNl 95495
+IHJldmFtcGVk 95496
+IFRhYmxlVmlldw== 95497
+IGRpY2tz 95498
+IO2YuOy2nA== 95499
+IEF1eGlsaWFyeQ== 95500
+T3BlcmE= 95501
+L3Jj 95502
+IHVudGhpbmthYmxl 95503
+IGRlZHVjdGVk 95504
+bHo= 95505
+IExhZ2U= 95506
+IFJvd2xpbmc= 95507
+cHJvdmVk 95508
+T2ZmZXJz 95509
+LHNldA== 95510
+UkdCTw== 95511
+IEZV 95512
+IENlbnRPUw== 95513
+b3pv 95514
+IFRyb2phbg== 95515
+IG1hw7FhbmE= 95516
+IC8vPQ== 95517
+Kio6 95518
+IHtcCg== 95519
+IEJvd2Vu 95520
+S25vd2luZw== 95521
+IOW6 95522
+PS09LT0tPS09LT0tPS09LQ== 95523
+IGViZW5mYWxscw== 95524
+XT17Cg== 95525
+Qk1J 95526
+KCk7KQ== 95527
+KHBlcm1pc3Npb24= 95528
+QW5kZXJzb24= 95529
+IGRlZ3JhZGU= 95530
+U29hcA== 95531
+dcWf 95532
+IFB1cHB5 95533
+IEV0aGlvcGlhbg== 95534
+IFRFU1RJTkc= 95535
+ZW5zZXg= 95536
+IGRyZXNzZXI= 95537
+IENob3Jl 95538
+VW5oYW5kbGVk 95539
+QXNzb2NpYXRl 95540
+LmFkZGl0aW9uYWw= 95541
+IGRpZmbDqXJlbnRlcw== 95542
+aXNxdWU= 95543
+IG5lY2Vzc8Ohcmlv 95544
+IGdlbmVyaWNz 95545
+KHBm 95546
+IFxg 95547
+IE5lYXJieQ== 95548
+YXBvcmF0aW9u 95549
+IFRoZW1lRGF0YQ== 95550
+V2lGaQ== 95551
+LlJlYWw= 95552
+YWN5ag== 95553
+TGl2 95554
+IHBzeWNob2xvZ2ljYWxseQ== 95555
+bWV0aG9kUG9pbnRlclR5cGU= 95556
+IE5pa29s 95557
+IERlZGljYXRlZA== 95558
+X1BPUlRT 95559
+IEphZQ== 95560
+TlNBdHRyaWJ1dGVkU3RyaW5n 95561
+IGFtYmFzc2Fkb3Jz 95562
+IEhhbmRsZXJz 95563
+IEFuYXQ= 95564
+IHZvY2FsaXN0 95565
+IHJhcg== 95566
+IGRldnVlbHZl 95567
+Lmdz 95568
+IHhjYg== 95569
+IHN1Ym1vZHVsZQ== 95570
+IEFTU0lHTg== 95571
+dXJlZW4= 95572
+IGNsYXNlcw== 95573
+ZW1vdGg= 95574
+X0NOVEw= 95575
+X2p3dA== 95576
+IOuniA== 95577
+IG91dHBvc3Q= 95578
+IEluYm94 95579
+CWZsZXg= 95580
+IEdyb2Nlcnk= 95581
+SUxJTkU= 95582
+Lm1vYg== 95583
+IENvbnN0cg== 95584
+XT1d 95585
+KHdhbGxldA== 95586
+IHNlZGU= 95587
+ZmFs 95588
+IGltcGFzcw== 95589
+PXtbJw== 95590
+IHVuZm9yZQ== 95591
+ZnVzZQ== 95592
+X0xlYW4= 95593
+IGF2YWxhbmNoZQ== 95594
+PXJhbmQ= 95595
+IGFkdWx0ZXJ5 95596
+IEdlZQ== 95597
+CUlucHV0U3RyZWFt 95598
+IGNhYmVs 95599
+X01PVU5U 95600
+IG5vdGljaWFz 95601
+IFJhdW0= 95602
+IGJ5dGVhcnJheQ== 95603
+IG9uSGlkZQ== 95604
+ICkuCg== 95605
+JGluc3RhbmNl 95606
+IGRpZFNlbGVjdFJvd0F0SW5kZXhQYXRo 95607
+YWNhbQ== 95608
+LWNvbGxlY3Rpb24= 95609
+IHVwaGU= 95610
+UG90ZW50aWFs 95611
+IFNEUw== 95612
+X2FwcHJvdmFs 95613
+RGFtbg== 95614
+OmNvbnZlcnQ= 95615
+IE1vZGlmaWNhdGlvbnM= 95616
+IOyYiA== 95617
+IHVuYWI= 95618
+IHNjcm9sbGVk 95619
+KyIpOwo= 95620
+IGdhdWNoZQ== 95621
+IEhPTA== 95622
+YW50YW5hbW8= 95623
+IGNvbHVtbkhlYWRlcg== 95624
+CVpFUEhJUg== 95625
+emFj 95626
+IG91dGluZ3M= 95627
+IGFwcGxhdWRlZA== 95628
+aG9yaWE= 95629
+bW9keA== 95630
+IG1pbGxlbm5pYQ== 95631
+Jm0= 95632
+Lkpzb25JZ25vcmU= 95633
+IHBpb25lZXJlZA== 95634
+IENhdnM= 95635
+CWpz 95636
+ZGVwYXJ0dXJlZGF5 95637
+X2ti 95638
+LlBhdGllbnQ= 95639
+IHBldGFscw== 95640
+cG9ydHJhaXQ= 95641
+In19Cg== 95642
+SG9tZUFzVXBFbmFibGVk 95643
+LnByZXR0eQ== 95644
+LGNsanM= 95645
+IG1lZGlvcw== 95646
+aGFzaGVk 95647
+ZW1vZGVs 95648
+IE1vam8= 95649
+LmZyb21SR0JP 95650
+LXBl 95651
+IGludGltYXRlbHk= 95652
+IGVsZ2c= 95653
+W107DQo= 95654
+L09ic2VydmFibGU= 95655
+IG9iZWRpZW50 95656
+IEphbWFs 95657
+UmVxdWlyZWRNaXhpbg== 95658
+IExpc3RWaWV3SXRlbQ== 95659
+CXBsYWNlaG9sZGVy 95660
+X3RyYW5zYWtzaQ== 95661
+PFNlcnZpY2U= 95662
+IGVuc3VlZA== 95663
+IFJpY2Fu 95664
+U2FnYQ== 95665
+QVVESU8= 95666
+IGpt 95667
+LXNhbGVz 95668
+LW11bHRp 95669
+JSI7Cg== 95670
+IGNsYXNzaWZpY2F0aW9ucw== 95671
+IHTDo28= 95672
+Q29hbA== 95673
+OycpOwo= 95674
+IGRlbGlnaHRz 95675
+X2h6 95676
+X2JvbGQ= 95677
+REVQRU5E 95678
+INCh0L7Qt9C0 95679
+YXRlZQ== 95680
+X3N1Ym5ldA== 95681
+IFRvd25zZW5k 95682
+IENhc3RpbGxv 95683
+IHBydA== 95684
+JC8p 95685
+IGZpbGli 95686
+KCcvJylbLQ== 95687
+IHVwaG9sc3Rlcnk= 95688
+IGNvbXBvbmVudGU= 95689
+IFhG 95690
+LlJldmVyc2U= 95691
+X3R1bm5lbA== 95692
+SW1tZWRpYXRlbHk= 95693
+LW1vdmU= 95694
+IGFsaXN0 95695
+V1ND 95696
+c3RydWN0dXJhbA== 95697
+aXN0b3JpY2Fs 95698
+VGFuZ2dhbA== 95699
+IENPVVJU 95700
+IG9ic2N1cmVk 95701
+IGxhbmRzbGlkZQ== 95702
+IGJlZHNpZGU= 95703
+IGJhcmFuZw== 95704
+LWVsZWN0ZWQ= 95705
+IGNlcmFtaWNz 95706
+LS0qLwo= 95707
+IFdhbm5h 95708
+RHlu 95709
+IHZlcnNjaGllZGVuZQ== 95710
+IGluZHVjaW5n 95711
+IGZsdXRl 95712
+LkFwcGVuZFRleHQ= 95713
+IFp1Yg== 95714
+IFB1bGl0emVy 95715
+OmJvdGg= 95716
+Lm1heExlbmd0aA== 95717
+LlByb3BlcnR5VHlwZQ== 95718
+YXd5 95719
+aXRlbU5hbWU= 95720
+IE5hcnJhdGl2ZQ== 95721
+cmV2b2x1dGlvbg== 95722
+IGhhbHRlbg== 95723
+IEVycm9yUmVzcG9uc2U= 95724
+Z2F0aGVy 95725
+L3V0aWxpdHk= 95726
+Oicn 95727
+IEtlZQ== 95728
+IE9seW1waWE= 95729
+Q2xpbmljYWw= 95730
+OmdyZWVu 95731
+IFBsZXg= 95732
+IEtlbnNpbmd0b24= 95733
+IFBob25ldGlj 95734
+IGRpc3RyaWJ1dGVz 95735
+X2V4ZW1wdA== 95736
+V2F0Y2hpbmc= 95737
+Lk1pc2M= 95738
+IGRvbWFpbmU= 95739
+OiIu 95740
+44OV44I= 95741
+X01PRFVMRVM= 95742
+IGhhYmxhcg== 95743
+IExhb3M= 95744
+LnNldFRleHRTaXpl 95745
+LnBhdXNlZA== 95746
+X1RX 95747
+IG92ZXJ3aGVsbQ== 95748
+IGhlbWF0 95749
+THVja2lseQ== 95750
+IFNFTlQ= 95751
+IEludmVzdGlnYXRvcnM= 95752
+Pih7 95753
+KGZvdXQ= 95754
+IEFVWA== 95755
+LnJhd1F1ZXJ5 95756
+LXN0cm9uZw== 95757
+IHJlc2VtYmxlZA== 95758
+IFNoYWZ0 95759
+IFhJSUk= 95760
+c3VnZ2VzdA== 95761
+IHNpbmdhcG9yZQ== 95762
+X2FiaWxpdHk= 95763
+JGs= 95764
+CWlOZEV4 95765
+XEltYWdl 95766
+Q2FkYXN0cm8= 95767
+LnBpdm90 95768
+IG1hbnBvd2Vy 95769
+X2F0dHM= 95770
+LnNldEZpbGw= 95771
+ZXdvcmxk 95772
+Y29uc3Rz 95773
+R2V0V2lkdGg= 95774
+IGdyYXR1aXRh 95775
+IFBldHI= 95776
+LWFuc3dlcg== 95777
+IEhlbWlzcGhlcmU= 95778
+IENhag== 95779
+IFRyYWRlcw== 95780
+xIdp 95781
+IEZyZWRkeQ== 95782
+T25DaGFuZ2U= 95783
+IHBvcm5vZ3JhZmlh 95784
+IFNVTU1BUlk= 95785
+X21lYXM= 95786
+IERSSVZF 95787
+IENyZWU= 95788
+X21hbGU= 95789
+IHN1aw== 95790
+IG1hbmV1dmVycw== 95791
+c2V0VmlzaWJpbGl0eQ== 95792
+YWxsaQ== 95793
+IGRpc2NyZXRpb25hcnk= 95794
+cmVnYXRpb24= 95795
+WVNUSUNL 95796
+OmhyZWY= 95797
+IHRhcmFm 95798
+IGNodQ== 95799
+IEBb 95800
+RW5vdWdo 95801
+LlRyYW5zZmVy 95802
+SWZOZWVkZWQ= 95803
+OildKQ== 95804
+CSAgICAgICAgICAgICAg 95805
+W2F4aXM= 95806
+VHJhbnNsYXRpb25z 95807
+LnNlcnZlcnM= 95808
+IEtFRVA= 95809
+JywpCg== 95810
+c3BvbnNvcg== 95811
+YXJjaGl2ZXM= 95812
+LlVsdHJhV2lu 95813
+IEhvbm91cg== 95814
+J10pKTs= 95815
+IGluZWxpZ2libGU= 95816
+IEFudHdvcnRlbg== 95817
+IEFwcGxpY2F0aW9uRXhjZXB0aW9u 95818
+IGNhdGVnb3JpZQ== 95819
+IFdFSUdIVA== 95820
+IEJ1bmR5 95821
+IFBJWEVM 95822
+IGR1a2U= 95823
+VG93ZXI= 95824
+U2NvdGxhbmQ= 95825
+IHJlZmVyZWVz 95826
+IEFzc2VtYmx5VHJhZGVtYXJr 95827
+CXN0YXJ0QWN0aXZpdHk= 95828
+Lk9uZVRvT25l 95829
+IEF1c3dhaGw= 95830
+IHN0cmVuZ3RoZW5z 95831
+LlF1aXQ= 95832
+IFVSTFJlcXVlc3Q= 95833
+ZWVj 95834
+IHJlZ2lzdHJhemlvbmU= 95835
+IGhvc2Vz 95836
+QWN0dWFsaXphcg== 95837
+L2FycmF5 95838
+IGNvbnN0cnVjdGlvbnM= 95839
+Y2Nk 95840
+IEZpbGVOb3RGb3VuZEVycm9y 95841
+VGjDqm0= 95842
+KHJlc3VsdGFkbw== 95843
+IFNFUklFUw== 95844
+U3BlYWs= 95845
+X0FIQg== 95846
+QmxvY2tlZA== 95847
+LWZvbnRhd2Vzb21l 95848
+Ol0p 95849
+b2JibGU= 95850
+KGxpbmtz 95851
+IENhdGFsb25pYQ== 95852
+R2VW 95853
+LkRhdGVGb3JtYXQ= 95854
+IGZsZWE= 95855
+LmVm 95856
+IHNvbGljaXR1ZA== 95857
+IERZ 95858
+Y29kZWdlbg== 95859
+eXRoZQ== 95860
+IGVwb2xs 95861
+X1RE 95862
+IGFmZmlybWF0aW9u 95863
+X2Zh 95864
+SVNUQQ== 95865
+IEVhdG9u 95866
+Y3JlYXRlUXVlcnk= 95867
+IGxvZ2lzdGljYWw= 95868
+IFJheWNhc3RIaXQ= 95869
+IGNhdWxpZmxvd2Vy 95870
+IHVsY2Vy 95871
+LkFscGhh 95872
+aW5rZQ== 95873
+Wy4u 95874
+RVhBTVBMRQ== 95875
+LXdhZ2U= 95876
+IHN0YXRp 95877
+ZWN0aXZl 95878
+LmdldE1pbg== 95879
+IFNVQkpFQ1Q= 95880
+IEF1ZGlvTWFuYWdlcg== 95881
+enphcmVsbGE= 95882
+IFNlbGVjdExpc3RJdGVt 95883
+ICQNCg== 95884
+IG9oaW8= 95885
+IFRhaG9l 95886
+IGtXaA== 95887
+cXVlcnlTdHJpbmc= 95888
+IGRlcGFydGFtZW50bw== 95889
+PWFkbWlu 95890
+IHdvcmtzdGF0aW9u 95891
+KSsrOwo= 95892
+SGVhZGVySW5TZWN0aW9u 95893
+IFRyaXVtcGg= 95894
+Q2hhcmxvdHRl 95895
+IFNNQQ== 95896
+Q8OzbW8= 95897
+IHZlcm0= 95898
+IHRoZWFubw== 95899
+Ymdjb2xvcg== 95900
+XCIiLAo= 95901
+IFJlbWluZGVy 95902
+QmlsbHk= 95903
+b3JhbFR5cGU= 95904
+Z2ViZXI= 95905
+KGNsb25l 95906
+IEt1dA== 95907
+Lz4u 95908
+QXBvbGxv 95909
+IHNobA== 95910
+Wkg= 95911
+VGh1bmRlcg== 95912
+IGdpZnM= 95913
+X2tlbGFz 95914
+IFJvdGhz 95915
+IH0o 95916
+IEJyb2FkY29t 95917
+IERlcHRocw== 95918
+CUlOTkVS 95919
+cGFyY2Vs 95920
+IGVqZXJjaWNpbw== 95921
+IGluZGVwZW5kZW50cw== 95922
+aWxsb3c= 95923
+ZXhlY3V0YWJsZQ== 95924
+RXZlbnRv 95925
+IHpvc3Q= 95926
+IEhNQUM= 95927
+W0RsbEltcG9ydA== 95928
+YWxsZXM= 95929
+X2Rlcml2YXRpdmU= 95930
+QXBpS2V5 95931
+IHN0ZXBwZXI= 95932
+PXBsdA== 95933
+Z2V0SW5kZXg= 95934
+IHZhbGV1cnM= 95935
+UG9saXRpY3M= 95936
+IElEWA== 95937
+IFVzYQ== 95938
+IExUQw== 95939
+Lm1pbkxlbmd0aA== 95940
+c3Rybw== 95941
+X05D 95942
+IHN0YWduYW50 95943
+IG1vbnRhZ2U= 95944
+IGJsb3VzZQ== 95945
+ZWxpZ2U= 95946
+IHR1cnF1b2lzZQ== 95947
+IFN1cGVybg== 95948
+5q2z 95949
+dmFyYQ== 95950
+TmV3SXRlbQ== 95951
+X0VYVEVOREVE 95952
+IHdvb2R3b3JraW5n 95953
+IEVwaXNjb3BhbA== 95954
+LnBhaXI= 95955
+LlVzZXJJbmZv 95956
+IGRpcmVudA== 95957
+L3RjcA== 95958
+IGZyYXVnaHQ= 95959
+U2xhdmU= 95960
+LmdldExhdGl0dWRl 95961
+IFRvb2xib3g= 95962
+IGVhcm5lcnM= 95963
+IEhPVVI= 95964
+0LDQu9Cw 95965
+cG9zYWJsZXM= 95966
+Y29uZGl0aW9uYWxseQ== 95967
+X3h4 95968
+IGxhbsOn 95969
+KHJw 95970
+Q2hh 95971
+IGluY2Fybg== 95972
+LkRhbw== 95973
+Li8o 95974
+2KfZgQ== 95975
+VGQ= 95976
+Q0VG 95977
+L3JhbmQ= 95978
+LlZpcnR1YWw= 95979
+IGRiSGVscGVy 95980
+YW1pbmVz 95981
+IGx6 95982
+IHN0b3M= 95983
+IEF0a2lucw== 95984
+X0RE 95985
+aXRvcmlv 95986
+IG1pbmltaXNl 95987
+aGlwc3Rlcg== 95988
+KHsuLi4= 95989
+X1NSVg== 95990
+W2ZyYW1l 95991
+IFJva3U= 95992
+R1JQ 95993
+IGJhcmJlcg== 95994
+LkZlY2hh 95995
+IOuwnA== 95996
+IGdyYW51bGFyaXR5 95997
+IFNheWluZw== 95998
+X2xpa2VsaWhvb2Q= 95999
+LmJhckRvY2tDb250cm9s 96000
+IGZyb250bGluZQ== 96001
+IFdoYWxl 96002
+IHNtZWxsaW5n 96003
+IENvbnRyaWJ1dGlvbnM= 96004
+aXZhbnQ= 96005
+IGNyaXBwbGluZw== 96006
+cHJlbG9hZA== 96007
+IEhlcnJlcmE= 96008
+X1dBVENI 96009
+LWV0 96010
+OmV4cHI= 96011
+aW52ZXN0bWVudA== 96012
+ZWRlcmF0aW9u 96013
+X21nbXQ= 96014
+IGhvb3Bz 96015
+bW9ua2V5 96016
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAK 96017
+aW50ZXJzZWN0 96018
+IGNyaW1zb24= 96019
+IHN1b2k= 96020
+IFtdOgo= 96021
+WE9iamVjdA== 96022
+U0ZNTA== 96023
+RVFVQUw= 96024
+KCd+ 96025
+Y2VudHJvaWQ= 96026
+CXJlc3RvcmU= 96027
+IHByZW5hdGFs 96028
+IE1pc3RyZXNz 96029
+IHF4 96030
+dHBz 96031
+IHJlc3Bhd24= 96032
+IFtdKSwK 96033
+IGtvbnRyb2w= 96034
+44GC44KK44GM44Go44GG44GU44GW 96035
+TW9kdWxlTmFtZQ== 96036
+IG5ld1BhdGg= 96037
+IFBhZ2luZw== 96038
+IHJpbnM= 96039
+X21ha2Vy 96040
+XGJyaWVm 96041
+IGJpc2hlcg== 96042
+CVJlYWQ= 96043
+IGppaGFkaXN0 96044
+LnBlcnNpc3RlbnQ= 96045
+IFJvYm90cw== 96046
+L2dycGM= 96047
+IEpvdQ== 96048
+w6RyZW4= 96049
+77yM5Zyo 96050
+LXB0 96051
+IHpkYXJtYQ== 96052
+X05N 96053
+IENvbm5lY3Rpdml0eQ== 96054
+KGJj 96055
+IEZsb3JpYW4= 96056
+IFNvY2lvbG9neQ== 96057
+X3dv 96058
+QW5kU2VydmU= 96059
+XygpOwo= 96060
+IEZMVA== 96061
+X0RFUg== 96062
+IENvbm5pZQ== 96063
+IEJyb2FkY2FzdFJlY2VpdmVy 96064
+eyg= 96065
+IGNvbW1lbnRlcg== 96066
+IGRlbW9jcmF0 96067
+IGFtcGxpZnk= 96068
+LS0tLS0tLS0tLQ0K 96069
+IEhNUw== 96070
+IHRyYWlsZWQ= 96071
+IFNvZGE= 96072
+LXRlc3RlZA== 96073
+dWxpc3Q= 96074
+KW5ldw== 96075
+X1RocmVhZA== 96076
+VG9kZA== 96077
+IGRlYmlhbg== 96078
+Vms= 96079
+IHByZXNlbnRh 96080
+IGNvbWZvcnRz 96081
+IFdhc2hlcg== 96082
+IGdhcmc= 96083
+IEh1Y2thYmVl 96084
+INGB0LDQvA== 96085
+ICEi 96086
+QWRhcHRlck1hbmFnZXI= 96087
+IEVh 96088
+IEFzc29jaWF0aW9ucw== 96089
+CQkJCQkKCQkJCQkK 96090
+LmdldFdyaXRhYmxlRGF0YWJhc2U= 96091
+IG51Y2xlaQ== 96092
+w6lnb3JpZQ== 96093
+CSAgICAgICAgICAgICAgICAg 96094
+QkFC 96095
+IHVwa2VlcA== 96096
+IFR1cA== 96097
+LndpdGhPcGFjaXR5 96098
+bHlh 96099
+IGx1eGU= 96100
+dXBybw== 96101
+LWVuZw== 96102
+IHJlbGHDp8Ojbw== 96103
+IGtleVByZXNzZWQ= 96104
+IGh5YnJpZHM= 96105
+bGZ3 96106
+T3BlcmF0aW9uQ29udHJhY3Q= 96107
+IG5hbWVMYWJlbA== 96108
+IEhvcnQ= 96109
+X2dydXBv 96110
+IGJhbmRh 96111
+SXg= 96112
+SGVhbHRoeQ== 96113
+LmdldEVuZA== 96114
+ZnJhdQ== 96115
+KFNjZW5l 96116
+KENvbGxlY3Rpb25z 96117
+IFNraXBwaW5n 96118
+dWJv 96119
+IGbDvG4= 96120
+Ij4tLT4K 96121
+IGRyb2l0cw== 96122
+IGhvbW9zZXh1YWxz 96123
+IGFiZHVjdGlvbg== 96124
+CXdpZGdldA== 96125
+JGhlYWRlcnM= 96126
+IERBUg== 96127
+IGZsYQ== 96128
+dGhyZWF0 96129
+IGxvdWlz 96130
+LkdldFByb3BlcnR5 96131
+Ikp1c3Q= 96132
+KGZyYW1lcw== 96133
+cnlv 96134
+cHJvZmVzc2lvbg== 96135
+fGk= 96136
+7ZW07ISc 96137
+KHN2 96138
+IHVucmVjb2duaXplZA== 96139
+SW9uaWM= 96140
+RmFzaGlvbg== 96141
+U2NyZWVuU3RhdGU= 96142
+IEluY29taW5n 96143
+Tm90Tmls 96144
+IHN5bmNpbmc= 96145
+ZW1pZQ== 96146
+IHRoZXJtbw== 96147
+X3Byb2Nz 96148
+IGluY29uc2lzdGVuY3k= 96149
+cmVsaWdpb3Vz 96150
+Lm1q 96151
+IHBlcnNvbm4= 96152
+IG1vbWVudG9z 96153
+b3JhcmlseQ== 96154
+IOaK 96155
+X25ldXJvbnM= 96156
+SWxsdXN0cg== 96157
+aW1vdG8= 96158
+aWxpaw== 96159
+IFdvag== 96160
+VHJhZGluZw== 96161
+IGFwcGFyZQ== 96162
+IGVudHJlcHJpc2Vz 96163
+YWNoYXQ= 96164
+IMKs 96165
+IG5laWdo 96166
+QlVUVE9ORE9XTg== 96167
+IE1haGVy 96168
+YWdoYW4= 96169
+LWhhc2g= 96170
+ImY= 96171
+IGNsaWVudGVsZQ== 96172
+LmFkZEJ1dHRvbg== 96173
+CVNQ 96174
+UWk= 96175
+IGdyYXRlZA== 96176
+UE9TSVRF 96177
+Oj4= 96178
+IEhvd2VsbA== 96179
+IENvbXBhcmF0aXZl 96180
+IElTQw== 96181
+wq1p 96182
+T2NlYW4= 96183
+RGF2aXM= 96184
+IEZpbG1l 96185
+V2lucw== 96186
+IEpJVA== 96187
+b2NjZXI= 96188
+IENvcm0= 96189
+RU5DSE1BUks= 96190
+cmNoaXZl 96191
+aWNhw6fDo28= 96192
+IG1hdGE= 96193
+IGNoaWxkYmlydGg= 96194
+IE9wdGlvbmFsbHk= 96195
+RW5z 96196
+IHhodHRw 96197
+IGVsdWNpZA== 96198
+X09zY0luaXRTdHJ1Y3Q= 96199
+KSkpOgo= 96200
+IGludHVpdA== 96201
+IERvbmF0ZQ== 96202
+IGNvcnJlbGF0ZXM= 96203
+PkRlbGV0ZQ== 96204
+IGVxdWlwZQ== 96205
+IGJvY2E= 96206
+IGluZmxhdGFibGU= 96207
+ZXJhaA== 96208
+IERhdGVUaW1lS2luZA== 96209
+IGNhbHZlcw== 96210
+XExpYg== 96211
+IGVtbHJ0 96212
+IFRyaWxvZ3k= 96213
+IFBhbmM= 96214
+IER1aXM= 96215
+IHBlbMOtY3VsYQ== 96216
+V0FSRFM= 96217
+X0RFVEVDVA== 96218
+LXNlY3Rpb25hbA== 96219
+ZGhjcA== 96220
+Rm9yUm93 96221
+LWRlc3RydWN0 96222
+IFByZXNlbnRlcg== 96223
+L3NsaWNr 96224
+LG9u 96225
+IENpdGFkZWw= 96226
+bG9nZ2VkaW4= 96227
+X3N1YnR5cGU= 96228
+IHNpZ3Vl 96229
+IGN1cmluZw== 96230
+IEZpcmV3YWxs 96231
+IGZsdW9yZXNjZW5jZQ== 96232
+IEl0YWxpYW5z 96233
+0LjRgtGB0Y8= 96234
+LmdldFN0eWxl 96235
+SW5TZWNvbmRz 96236
+amll 96237
+LVNtaXRo 96238
+IHhsaW5r 96239
+IHN1Ym1pc3NpdmU= 96240
+0L7QvdGC 96241
+YXJib25hdGU= 96242
+IEZhdWw= 96243
+X2dvYWxz 96244
+IENvbW1pc3Npb25lcnM= 96245
+Y2hhcnRJbnN0YW5jZQ== 96246
+X1BPU1RGSUVMRFM= 96247
+IG1lZGlhbA== 96248
+IG1hbm9z 96249
+IGRlbHQ= 96250
+c3Zt 96251
+LkFwaXM= 96252
+ZXBoeQ== 96253
+IGFzeW1wdA== 96254
+IGFwcERlbGVnYXRl 96255
+IGltcHJvYmFibGU= 96256
+Y2th 96257
+c2ltZA== 96258
+L0Vycm9y 96259
+LuKAkw== 96260
+IFBUUw== 96261
+ZGVlcg== 96262
+IHNpbmE= 96263
+bWFnbml0dWRl 96264
+SURBREU= 96265
+J119Jw== 96266
+IG1heW9yZXM= 96267
+CWNvbW1lbnQ= 96268
+L2NvbnNvbGU= 96269
+IkA= 96270
+dm9sdA== 96271
+LnNlbGw= 96272
+IE1hY3k= 96273
+IG1lbG9k 96274
+IGltw6FnZW5lcw== 96275
+X2NoZw== 96276
+IGlub3V0 96277
+aWRlbnRl 96278
+KScpLAo= 96279
+ZG5p 96280
+LmJsb2I= 96281
+IHR5cG9ncmFwaHk= 96282
+IGVlcmll 96283
+X09JRA== 96284
+cGVzYW4= 96285
+YWphbg== 96286
+IGNob3BwaW5n 96287
+IGJsdWZm 96288
+YWRm 96289
+X2Jhc2Vz 96290
+LkZvcm1hdHRlcg== 96291
+IFwl 96292
+IFBhZ2VJbmZv 96293
+Q2Fycmllcg== 96294
+IENhbGlicmF0aW9u 96295
+Y29tbw== 96296
+LWJvZGllZA== 96297
+IGZpbmFuY2llcg== 96298
+IElOQQ== 96299
+LkVSUg== 96300
+IGhvb2RpZQ== 96301
+IFNhbml0eQ== 96302
+Z3VhcmRlZA== 96303
+Lm9wZW5kYXlsaWdodA== 96304
+SVNNQVRDSA== 96305
+SGlnaGxpZ2h0cw== 96306
+w7xuaw== 96307
+YW5pZW0= 96308
+YW5nZXJlZA== 96309
+YXNzaWdubWVudHM= 96310
+IHJlZ2lzdHJhZG8= 96311
+IFVQUEVS 96312
+YW1waWxrYW4= 96313
+YXNoaXJl 96314
+IE5pa29sYQ== 96315
+IENGTA== 96316
+IEhEQw== 96317
+IHBvaWRz 96318
+IElQcw== 96319
+IHByZXZlbnRhdGl2ZQ== 96320
+aXBzb2lk 96321
+aWZpeA== 96322
+LmNhbWVs 96323
+Lmdh 96324
+Vm9sdW1lcw== 96325
+LXN0ZQ== 96326
+WWFob28= 96327
+X3NpYmxpbmc= 96328
+SGlnaGVzdA== 96329
+b3B0Z3JvdXA= 96330
+IGt2aW5uYQ== 96331
+4oCd44CCCgo= 96332
+IEFwcGxpYW5jZXM= 96333
+ICI+PA== 96334
+JykiKQo= 96335
+aHR0 96336
+IElkZW50aWZpZWQ= 96337
+IHBlbmNpbHM= 96338
+IG1lbWJlcklk 96339
+IGFwcGVuZFN0cmluZw== 96340
+LmxvYWREYXRh 96341
+IG1vY2tNdmM= 96342
+IGp1Yg== 96343
+IFNsdXQ= 96344
+IFRhaXBlaQ== 96345
+c3RhdHQ= 96346
+UG9saXQ= 96347
+IHBhcnRhZ2Vy 96348
+RGlkQ2hhbmdl 96349
+SW5jcmVhc2Vz 96350
+KX0u 96351
+IEJhYmE= 96352
+X0NMSVA= 96353
+W3VuaXQ= 96354
+INC60LvRjtGH 96355
+IGFsY3VuaQ== 96356
+IExvbGE= 96357
+IGNsaW5naW5n 96358
+QFBvc3RNYXBwaW5n 96359
+KGNvbmNhdA== 96360
+IHNzaWQ= 96361
+IEZhdWM= 96362
+b2tpdA== 96363
+IFJlY29yZGVk 96364
+w6FsZXo= 96365
+KCQoJzw= 96366
+LmFzc2VydElzTm90 96367
+IGthbGk= 96368
+Vm9sdA== 96369
+IHdhcm1seQ== 96370
+IHNjYXJlcw== 96371
+Z2V0dGk= 96372
+ZsO8aHJ0 96373
+X2RvZXM= 96374
+LkVNQUlM 96375
+aW1hdGlvbnM= 96376
+IHNwcmluZ2ZveA== 96377
+IERlY29t 96378
+YXJjeQ== 96379
+IGdsaXRjaGVz 96380
+IE1vZmY= 96381
+IFZvbGw= 96382
+LmJldHdlZW4= 96383
+IGNvb3JkZW4= 96384
+IFBhcnRpY3VsYXJseQ== 96385
+R0JQ 96386
+IHNlbWJsZQ== 96387
+RWFzdGVybg== 96388
+X01TQg== 96389
+XSl7DQo= 96390
+bW9yZ2Fu 96391
+IEVWQUw= 96392
+ZGVyZQ== 96393
+SE9VU0U= 96394
+bW9pcmU= 96395
+aXN0aXF1ZQ== 96396
+X2xzdG0= 96397
+LWNvbW1pdA== 96398
+eXN0ZXJpb3Vz 96399
+IHR3aW5r 96400
+LXRodW1ibmFpbHM= 96401
+ZW7DrQ== 96402
+OicnLA== 96403
+IGJsYWNrb3V0 96404
+IEZsb29ycw== 96405
+IHNvZmFz 96406
+IG91aQ== 96407
+bGVzaG9vdA== 96408
+IFJhcQ== 96409
+LWFicw== 96410
+IGtyYQ== 96411
+TWluaW5n 96412
+c2hhZnQ= 96413
+LnNldENvbHVtbnM= 96414
+Q2xheno= 96415
+UFJFVFRZ 96416
+LnBsYXlsaXN0 96417
+6Zai 96418
+LVNhaGFyYW4= 96419
+TUlORw== 96420
+CWJs 96421
+6K6u 96422
+amY= 96423
+RE9DS0VS 96424
+aG9wZWZ1bGx5 96425
+KGlnbm9yZQ== 96426
+IFVzZXJzQ29udHJvbGxlcg== 96427
+IE1pdGFyYmVpdGVy 96428
+IExFUw== 96429
+SGFtaWx0b24= 96430
+LW1ldGFkYXRh 96431
+IEtL 96432
+aWt0aWc= 96433
+IHdvbGx0ZQ== 96434
+ZWdyYXRvcg== 96435
+XWJvb2w= 96436
+LGN1cnJlbnQ= 96437
+IHZhbHVlVHlwZQ== 96438
+IGV4Y2F2YXRpb24= 96439
+b2xhbmQ= 96440
+IHZlcnY= 96441
+L2ZpbGVwYXRo 96442
+QXV0aFByb3ZpZGVy 96443
+IHByb2NyYXN0 96444
+CVVMT05H 96445
+X01FTUJFUlM= 96446
+IHVwbGlmdA== 96447
+IEF1dG9ub21vdXM= 96448
+IGFydHdvcmtz 96449
+IE91dHJlYWNo 96450
+IHBvcmU= 96451
+SG9tZXBhZ2U= 96452
+RGlhbG9nVGl0bGU= 96453
+IEdlbmVyYXRpbmc= 96454
+UEFSU0U= 96455
+IHNlbWFuYXM= 96456
+IGh1bWFubw== 96457
+SlNHbG9iYWxTY29wZQ== 96458
+IHZvbHRl 96459
+IGJlbGxh 96460
+KGlzaW5zdGFuY2U= 96461
+IHBsYw== 96462
+XENhdGFsb2c= 96463
+IGVzdGVlbWVk 96464
+6Zu3 96465
+KHN1ZmZpeA== 96466
+IHN3ZWVwcw== 96467
+CU9SREVS 96468
+IGRvaXZlbnQ= 96469
+IFN3YXJt 96470
+IENvbXBpbGVk 96471
+Z2V0UGFnZQ== 96472
+QURS 96473
+LlJpY2hUZXh0Qm94 96474
+IE5hbWluZw== 96475
+YWdnZWQ= 96476
+IEdBTkc= 96477
+cmFzaW5n 96478
+b2RlbGVk 96479
+IGdhbGE= 96480
+IEpTTmFtZQ== 96481
+ZGRm 96482
+IGlsbHVzdA== 96483
+IExhbnNpbmc= 96484
+W3BvcnQ= 96485
+LWRlYXRo 96486
+IGRpbmhlaXJv 96487
+IEVpZ2h0aA== 96488
+IGJpYW4= 96489
+c3TDpQ== 96490
+IHZlcnNpw7Nu 96491
+IExpbmVhckdyYWRpZW50 96492
+IEhhcmRpbmc= 96493
+Liop 96494
+ZWN6eQ== 96495
+JGhlYWRlcg== 96496
+IHbDpXI= 96497
+VW5jaGVja2Vk 96498
+IGtvamU= 96499
+IFBhbGFkaW4= 96500
+KCkpKSw= 96501
+R2l2aW5n 96502
+KCl9KQo= 96503
+IGRpcHM= 96504
+RnJpZW5kbHk= 96505
+IHBvcnRyYXlz 96506
+IGhlbGl1bQ== 96507
+IGluc3VyZ2VuY3k= 96508
+X2V4cGlyeQ== 96509
+IHN0cmluZ0J5QXBwZW5kaW5nU3RyaW5n 96510
+IGFhbnRhbA== 96511
+c2xvcGU= 96512
+bWFzdA== 96513
+LmdldEludGVnZXI= 96514
+ICMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw== 96515
+X1BJUEVMSU5F 96516
+IGRlbnNlbHk= 96517
+IG11dGF0aW5n 96518
+bWlkaQ== 96519
+IFNlaXQ= 96520
+YXluZQ== 96521
+Tk9XTEVE 96522
+IERlc21vbmQ= 96523
+IEZOYW1l 96524
+IE5haXJvYmk= 96525
+XENvbnRleHQ= 96526
+IGNhbGN1bGFy 96527
+LWRlbg== 96528
+IGNvdHQ= 96529
+XSk6DQo= 96530
+IFJlY29tbWVuZGF0aW9u 96531
+IFJvbGV4 96532
+IHZhbGlkYXRpb25SZXN1bHQ= 96533
+LnBhdA== 96534
+IG7DoHk= 96535
+IFJlc3RDbGllbnQ= 96536
+IEdQSQ== 96537
+IEFzaGV2aWxsZQ== 96538
+IE9TUA== 96539
+IFBFUk1JU1NJT04= 96540
+0JTQsNGC0LA= 96541
+L25vdGlmaWNhdGlvbg== 96542
+S25pZ2h0 96543
+X1dvcmQ= 96544
+IEJlbmRlcg== 96545
+cmFua2luZw== 96546
+IHBhcnRpZGE= 96547
+X3Jlc2VydmF0aW9u 96548
+zIA= 96549
+IG1OYW1l 96550
+IGdldGNo 96551
+IGJvcnI= 96552
+IGRpbGlnZW50 96553
+RGlzY3Vzcw== 96554
+5q2j5Zyo 96555
+YXBlYWtl 96556
+aW9uZWQ= 96557
+LU5hemk= 96558
+LmN1bQ== 96559
+IEtyb24= 96560
+PSQoJyM= 96561
+L3NpbmdsZQ== 96562
+IGVyb3Rpc2No 96563
+IFZpYg== 96564
+IHJhdGlmaWVk 96565
+IGNvbmNlcnRlZA== 96566
+IFJFR0FSRA== 96567
+IGRvYnI= 96568
+LkRyaXZlck1hbmFnZXI= 96569
+J3I= 96570
+UG9ydGFibGU= 96571
+CXN1aXRl 96572
+IHJlbGFjaW9uZXM= 96573
+IERvcA== 96574
+ZW1wbG9p 96575
+RE9C 96576
+IGNydW1icw== 96577
+IHhscw== 96578
+X0FwcGxpY2F0aW9u 96579
+KCc6Jyw= 96580
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQo= 96581
+bXNl 96582
+IGJlcms= 96583
+IFJldHVyblZhbHVl 96584
+IEJlbGx5 96585
+IGNhbWFy 96586
+IFBlZWs= 96587
+ZWxzaW5n 96588
+IG5vdGlmaWVz 96589
+IFRyaXN0YW4= 96590
+IEdBUg== 96591
+ZW1tZQ== 96592
+IEVsZXZhdGVk 96593
+X0NTVg== 96594
+KGNoYWxr 96595
+IHR3ZW50aWVz 96596
+IFNlYXJjaFJlc3VsdA== 96597
+PXNlYXJjaA== 96598
+IE1peGluZw== 96599
+w710 96600
+IHJlY3J1aXRlcg== 96601
+IElERU9HUkFQSA== 96602
+IEFnbw== 96603
+KE9wZXJhdGlvbg== 96604
+JHZhbHVlcw== 96605
+IHdvcmxkbHk= 96606
+IFJvc2VuYmVyZw== 96607
+IENvbmZpZ3VyZVNlcnZpY2Vz 96608
+Pio8Lw== 96609
+S0FOSkk= 96610
+IGNodWNrbGVk 96611
+IHN0cmlmZQ== 96612
+IEJvbWJheQ== 96613
+IEJBQ0tHUk9VTkQ= 96614
+ZXRhdA== 96615
+ZW51bWVyYXRvcg== 96616
+IHPDu3I= 96617
+IOOBrg== 96618
+X3BlZGlkbw== 96619
+L0Rr 96620
+IGplYW4= 96621
+X0NvbHVtbg== 96622
+IGhlYXRtYXA= 96623
+LlBlbmRpbmc= 96624
+IHVuc3VjY2Vzc2Z1bGx5 96625
+CWVw 96626
+IHNpbmZ1bA== 96627
+IEFudG9ueQ== 96628
+X0ZPQ1VT 96629
+VGV4dExhYmVs 96630
+X3JlYWN0aW9u 96631
+IElEaXJlY3Q= 96632
+IGNhcm5pdg== 96633
+V29ya3NoZWV0 96634
+IHN1ZWRl 96635
+CVJUQ1Q= 96636
+IHNldGJhY2tz 96637
+LnVuYmluZA== 96638
+IHNpw6g= 96639
+TGlxdWlk 96640
+X1JFTkRFUkVS 96641
+TWF0ZQ== 96642
+IE1pbGxlbm5pYWxz 96643
+IGVwb3h5 96644
+aXp6aW5lc3M= 96645
+IGJyYXppbA== 96646
+0L7RgdGC0Yw= 96647
+JnZpZXc= 96648
+L2dwaW8= 96649
+SmFtaWU= 96650
+LkdyYXZpdHk= 96651
+PSIuJF8= 96652
+IFZBTg== 96653
+IElEUg== 96654
+YXBwZWFyYW5jZQ== 96655
+LlNlbGVuaXVt 96656
+TGVhcA== 96657
+LlJlbGF0aXZlTGF5b3V0 96658
+U2lnbmFscw== 96659
+QWNjZWxlcmF0aW9u 96660
+CUhBTkRMRQ== 96661
+L09wZW4= 96662
+IGdldExvZ2dlcg== 96663
+U3Bp 96664
+LXdyaXRpbmc= 96665
+INCy0YvQtw== 96666
+LXdvcnRoeQ== 96667
+IHdjcw== 96668
+IFFUaW1lcg== 96669
+IFBvbHltZXI= 96670
+IHZhbnQ= 96671
+CURlbGV0ZQ== 96672
+aXR0ZQ== 96673
+V2hpbHN0 96674
+IGFsZ3Vt 96675
+IHNoaWVsZGluZw== 96676
+IGttcw== 96677
+CSAgICAJCQk= 96678
+TWV0ZW9y 96679
+IGFnZ3JlZ2F0b3I= 96680
+IFNpbmQ= 96681
+SG9zdEV4Y2VwdGlvbg== 96682
+PScnLAo= 96683
+IEpTQnJhY2tldEFjY2Vzcw== 96684
+T05P 96685
+X0J1aWxk 96686
+IHN0cmlwcGVy 96687
+IExK 96688
+PENvbXBvbmVudA== 96689
+L3NvdXJjZXM= 96690
+IGVyZ29ub21pYw== 96691
+IEFjY3JlZA== 96692
+dW5jZQ== 96693
+b25pcw== 96694
+emVpZ3Q= 96695
+IFNrYXRl 96696
+IFJlY3RUcmFuc2Zvcm0= 96697
+SW5jb21wbGV0ZQ== 96698
+IGluZ2VuaW91cw== 96699
+IGNvaXNh 96700
+IGNpdHlOYW1l 96701
+aGFiaXQ= 96702
+X1RW 96703
+IEFOU1c= 96704
+Li4uIj4K 96705
+IHNub3Jr 96706
+X29wYWNpdHk= 96707
+IGluaXRXaXRoTmliTmFtZQ== 96708
+aWFkbw== 96709
+QUFD 96710
+IF0pLg== 96711
+O3o= 96712
+X3BhcmFncmFwaA== 96713
+IG5vc2Vz 96714
+c3RhbmRz 96715
+aWZy 96716
+X21F 96717
+SXJhcQ== 96718
+LlByZWRpY2F0ZQ== 96719
+ZW5haXJl 96720
+XV1dOwo= 96721
+IHVuaWRhZA== 96722
+IHJldGlyZWVz 96723
+X2hlbGxv 96724
+IG1vZGVsZQ== 96725
+IFVJVGFibGVWaWV3Q29udHJvbGxlcg== 96726
+ZndyaXRl 96727
+X251bWVybw== 96728
+X3Zpc2l0ZWQ= 96729
+IHJlY2ViZQ== 96730
+KE5vdGlmaWNhdGlvbg== 96731
+RmFudGFzdGlj 96732
+X3N1Ym1lbnU= 96733
+IFBFTQ== 96734
+IEN1cGVydGlubw== 96735
+YXBwcm94aW1hdGVseQ== 96736
+Y2xhc3NlZA== 96737
+LlJlYWRTdHJpbmc= 96738
+IGRvbWljaWxl 96739
+X1BX 96740
+IGJhbGxwYXJr 96741
+IEthbGU= 96742
+Y29udHJh 96743
+X2Zhdm9yaXRl 96744
+L29m 96745
+UXVpdGU= 96746
+IE9UQQ== 96747
+IGFjY2VsZXJvbWV0ZXI= 96748
+ZGlkbg== 96749
+fF4= 96750
+IFJvaGluZ3lh 96751
+aXZpY3Jt 96752
+YW5uYWJpbg== 96753
+0L7QsdGL0YLQuA== 96754
+b3JhZG8= 96755
+Jykr 96756
+SGF1bnRlZA== 96757
+LElE 96758
+KFVJQWxlcnRBY3Rpb24= 96759
+dXJ2 96760
+X2JlbA== 96761
+IE1leGljYW5z 96762
+L3Rlcm1z 96763
+IFBhaW50ZXI= 96764
+SW5wdXRMYWJlbA== 96765
+IFZpbmNp 96766
+IFJvc2ll 96767
+XHVj 96768
+PE1lbnU= 96769
+IGNvb2xhbnQ= 96770
+KGN1cnJlbnRVc2Vy 96771
+X2R1YWw= 96772
+KSJ9LAo= 96773
+JnA= 96774
+IGNvbnZlcmdlZA== 96775
+IHJlc3RyYWlu 96776
+IFl1Z29zbGF2aWE= 96777
+PXRhcmdldA== 96778
+IGltcHVscw== 96779
+ZHNh 96780
+U2VhcmNoVHJlZQ== 96781
+IGhib3g= 96782
+IEltcHJlc3M= 96783
+wqfDgw== 96784
+Z2V0RnVsbFllYXI= 96785
+KGRh 96786
+IFlZUw== 96787
+LmFsaWdubWVudA== 96788
+LkdldFRleHQ= 96789
+LnRva2VuaXpl 96790
+IE9seW1wdXM= 96791
+IG11cmt5 96792
+b3Jlc3RhdGlvbg== 96793
+IGRpc3NhdGlzZmFjdGlvbg== 96794
+CVRBcnJheQ== 96795
+X2tzZXM= 96796
+LkFkZFNpbmdsZXRvbg== 96797
+IFN0YXJ0VGltZQ== 96798
+IGZhbmF0aWM= 96799
+ICAgICAgICAgICAgICAgICAgICAJ 96800
+IGVudGl0eVR5cGU= 96801
+Lm92ZXJyaWRl 96802
+IC0tLS0tLS0tLS0tLS0= 96803
+IERhdGFncmFt 96804
+Zm91dA== 96805
+KHdpdGhJZA== 96806
+ICNfXw== 96807
+n+iDvQ== 96808
+ZWt5bGw= 96809
+LmZyaWVuZHM= 96810
+YW1lbGVvbg== 96811
+IHphY2g= 96812
+LnNpbXBsZUJ1dHRvbg== 96813
+cmV0b3Jubw== 96814
+IGtvbms= 96815
+L3NtYWxs 96816
+IFF1aWNrbHk= 96817
+dW5yZWFk 96818
+RG9uYXRl 96819
+RGV0YWlsVmlldw== 96820
+IGR1YQ== 96821
+IHBlbmV0cmF0ZWQ= 96822
+T01VWA== 96823
+IG5pcg== 96824
+X3BkYXRh 96825
+Il0sWyI= 96826
+IGxvd2Vz 96827
+IGRvcGluZw== 96828
+IGFzeW1tZXRyaWM= 96829
+IG5lZWRsZXNz 96830
+b3VyY2Vt 96831
+IHVwcm8= 96832
+IEd1enpsZQ== 96833
+YWZi 96834
+IHNleHRyZWZmZW4= 96835
+LWNvbGxhcg== 96836
+IGNvbG9zc2Fs 96837
+TW9ua2V5 96838
+bmlzaA== 96839
+IGhhbmRsZU1lc3NhZ2U= 96840
+SW5jcmVhc2Vk 96841
+KmR4 96842
+IENoYXR0YW5vb2dh 96843
+Zm9yZw== 96844
+IE9yZGVu 96845
+IHNocmk= 96846
+IFZhbmQ= 96847
+ICJAIg== 96848
+SW1hZ2VTaGFycA== 96849
+IFdpbGRjYXRz 96850
+cG9uaWJsZQ== 96851
+LnNjZW5lcw== 96852
+IHBhaW50ZXJz 96853
+IFBmaXplcg== 96854
+IFphaA== 96855
+VG9Mb2NhbA== 96856
+IEZsYW0= 96857
+IMOpdGFpZW50 96858
+KSle 96859
+IFNhbmRib3g= 96860
+IFRSQURF 96861
+IGNocm9taXVt 96862
+IGFjY2xhaW0= 96863
+IHBhY21hbg== 96864
+wrR0 96865
+KXJlYWRlcg== 96866
+TWFyaQ== 96867
+LkRpc3BhdGNoZXI= 96868
+LkFETUlO 96869
+IFJlbWVk 96870
+U3dlZGVu 96871
+IG92ZXJsYXlz 96872
+LmVy 96873
+IHBhbmc= 96874
+IGNsZWFubHk= 96875
+YXZlbnBvcnQ= 96876
+VG95b3Rh 96877
+cGF0Y2hlcw== 96878
+IHZ0eA== 96879
+IEVpcw== 96880
+Y2xhZG8= 96881
+IFJpdGNo 96882
+Uk9MUw== 96883
+IGhhZGU= 96884
+IGNvbnNwaWN1b3Vz 96885
+IGRvY2tz 96886
+KGpx 96887
+IFByZW1pZXJzaGlw 96888
+IEJleg== 96889
+IOKElg== 96890
+INGD0YHQuw== 96891
+X3RvdGFscw== 96892
+IHByb3Zh 96893
+IEN1ZQ== 96894
+IHNhw7pkZQ== 96895
+IEdhbWVDb250cm9sbGVy 96896
+SU1JWkU= 96897
+LHBvcnQ= 96898
+44CCKA== 96899
+LkNkZWNs 96900
+SW5zdGFudGlhdGlvbkV4Y2VwdGlvbg== 96901
+IGNvbGxhZ2U= 96902
+IElPQw== 96903
+IGJhaXM= 96904
+IG9uRmluaXNo 96905
+LXN0YXJz 96906
+c2V0U2l6ZQ== 96907
+IG1vZ3Vs 96908
+IGRpc2lsbHVzaW9u 96909
+IGNoZXZ5 96910
+KFNjaGVkdWxlcnM= 96911
+KElS 96912
+X2xvY3M= 96913
+IGNhbm5vbnM= 96914
+IGNhbmNlbGxpbmc= 96915
+L2J1cw== 96916
+IGJ1Zmlv 96917
+IFlvdXJz 96918
+IFBpa2FjaHU= 96919
+IHRlcm1l 96920
+csOl 96921
+ZmFocmVu 96922
+IG93bmVySWQ= 96923
+IG9ibGlnYXRvcnk= 96924
+IGN1bHA= 96925
+IGFjaWRpdHk= 96926
+LW11bHQ= 96927
+IEJhbWJvbw== 96928
+ICciPg== 96929
+X2dz 96930
+IGNvbXBpbA== 96931
+bmFyZA== 96932
+LWV4Yw== 96933
+IHJoeW1l 96934
+IGJ1dHRv 96935
+c2F5cw== 96936
+YW50YXN5 96937
+67g= 96938
+IGNpdHTDoA== 96939
+IGNoZWc= 96940
+VGltZVN0cmluZw== 96941
+IHBvc2l0aXZpdHk= 96942
+IERhYmVp 96943
+IHdhbmc= 96944
+IGVzY3Jl 96945
+ImM= 96946
+CXZpZGVv 96947
+IFJhbmtlZA== 96948
+LnN0cmluZ3M= 96949
+Pj4+KA== 96950
+INC40L3RgtC10YA= 96951
+IHJlc3Rh 96952
+WzosOg== 96953
+IHJlbmRyZQ== 96954
+IGRlc2Vy 96955
+Sm9z 96956
+IGRpc3J1cHRpb25z 96957
+INC+0L/QtdGA 96958
+c2FtcGxpbmc= 96959
+c3VwcHJlc3M= 96960
+IGNvbnRhaW5lclZpZXc= 96961
+IFNlYW1sZXNz 96962
+IGFpcnk= 96963
+IG9ubG9hZA== 96964
+LldpbmRvd01hbmFnZXI= 96965
+IFBMQQ== 96966
+YnJhY28= 96967
+LnNldFBvc2l0aXZlQnV0dG9u 96968
+IHBkdQ== 96969
+IGdzaQ== 96970
+IENsaQ== 96971
+X2dyYWRpZW50cw== 96972
+0Y/QtA== 96973
+IFdoaXNwZXI= 96974
+Y3N0ZGludA== 96975
+IGzDpG5n 96976
+IGZvcm11bGF0aW9ucw== 96977
+w6lub20= 96978
+b3VybmVtb3V0aA== 96979
+WyRf 96980
+IG9yZGluYXJpbHk= 96981
+LnNldFVzZXJuYW1l 96982
+IGZhY3VsdGllcw== 96983
+TUlUVEVE 96984
+L3ZhbHVlcw== 96985
+IHdlaXI= 96986
+IEFwdA== 96987
+TVo= 96988
+CWNm 96989
+dWNrZW4= 96990
+CQkJCQkJCQkJCQkJCQkJCQkJCQk= 96991
+ZGVmZW5zZQ== 96992
+W2lWYXI= 96993
+IEJ1c2luZXNzRXhjZXB0aW9u 96994
+U2VsZWN0b3Jz 96995
+KGNvb3JkaW5hdGVz 96996
+IFJlc2V0cw== 96997
+IERyaW5rcw== 96998
+b2xlYW5z 96999
+KHN0eXB5 97000
+X0lPQw== 97001
+Lnh4eA== 97002
+IFNsYXRlcg== 97003
+IEJlbGl6ZQ== 97004
+IC8qKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio= 97005
+YWRkaW4= 97006
+X2VwaXNvZGVz 97007
+IGlzY2hlbQ== 97008
+bGVnYWxBcmd1bWVudEV4Y2VwdGlvbg== 97009
+RGFubnk= 97010
+IHBhcmVk 97011
+LmNvZGVoYXVz 97012
+IEFzc3k= 97013
+CVJlY3Q= 97014
+4p4= 97015
+Lmxpc3Rh 97016
+INCy0LDRiA== 97017
+IHZldHM= 97018
+SFdORA== 97019
+aXNvbmVy 97020
+IHhv 97021
+IG9yYWxseQ== 97022
+IFN0bXQ= 97023
+LnJubg== 97024
+IERQSQ== 97025
+IFN0cmlrZXM= 97026
+LnNldFZpZXdwb3J0Vmlldw== 97027
+IOiHquWKqOeUn+aIkA== 97028
+WUVMTE9X 97029
+R0xlbnVt 97030
+cGFydG5lcnM= 97031
+IEltcGxpY2l0 97032
+IHRha28= 97033
+4oCZZWxsZQ== 97034
+IGVybcO2Zw== 97035
+dG90YWxDb3VudA== 97036
+R2ls 97037
+CXdvcms= 97038
+IHByYXRpYw== 97039
+aW5hdGk= 97040
+YWJpZXM= 97041
+IFNraW5uZXI= 97042
+IHNwaXJpdGVk 97043
+IHBhbmNyZWF0aWM= 97044
+IGhkZg== 97045
+J2Vt 97046
+IHBzeWNob3Npcw== 97047
+b2xpY2l0 97048
+ICJ7Ig== 97049
+X2F0dWFs 97050
+IMOpbGVjdA== 97051
+VEVBTQ== 97052
+IGRhaw== 97053
+IFNXQVQ= 97054
+LkZyYWdtZW50TWFuYWdlcg== 97055
+IHByb3Zpc2lvbmluZw== 97056
+bGlmZXRpbWU= 97057
+X0VYVEVOU0lPTlM= 97058
+IENBU0NBREU= 97059
+ICFb 97060
+KEtQ 97061
+IHZlbQ== 97062
+IEludGVycmFjaWFs 97063
+J119LAo= 97064
+c3BhY2Vy 97065
+X2t2 97066
+V2FyZWhvdXNl 97067
+UkRE 97068
+X2ZzbQ== 97069
+LlN0cmV0Y2hJbWFnZQ== 97070
+LFllcw== 97071
+IFJlZnVnZWU= 97072
+IEJyaW5naW5n 97073
+IHbDoWxpZG8= 97074
+LmludGVyc2VjdGlvbg== 97075
+IHNwb29reQ== 97076
+X3BvcnRhbA== 97077
+IG1vdGg= 97078
+IFpvZGlhYw== 97079
+IFNPQ0lBTA== 97080
+TWltZVR5cGU= 97081
+J119fTwv 97082
+IHJlc2l6YWJsZQ== 97083
+5Lqb 97084
+KHBoYXNl 97085
+KG1hcHBlZEJ5 97086
+IG11bmRpYWw= 97087
+IGNvbnZv 97088
+L2xlZnQ= 97089
+L2RvY3VtZW50cw== 97090
+d2FzaGluZw== 97091
+IEFtw6lyaWNh 97092
+X3F1b3Rh 97093
+LnBvc3Rlcg== 97094
+J10iKTsK 97095
+IHN0ZWxsdA== 97096
+IERJU0NMQUlNRVI= 97097
+W29wdA== 97098
+IGVkcw== 97099
+IFJhY2Vz 97100
+dmVudGFz 97101
+IHB6 97102
+IENhcGFj 97103
+IFVzZXJEYW8= 97104
+aXRlc3Q= 97105
+UHJvdmVlZG9y 97106
+IFNob3RndW4= 97107
+IHRoaXJzdHk= 97108
+IEJhbGFuY2Vk 97109
+aXF1ZXRh 97110
+IGhlYWxlcg== 97111
+LyIp 97112
+LlNkaw== 97113
+IHRlcnQ= 97114
+ImRhdGE= 97115
+X3Byb3ZpbmNl 97116
+LkF1dG9tYXRpb24= 97117
+IGZvbnRXaXRoTmFtZQ== 97118
+X0FOVA== 97119
+55WM 97120
+b29kbGVz 97121
+IFJFUFJFU0VOVA== 97122
+X0dQUw== 97123
+IHBlcnN1YXNpb24= 97124
+IERpc2N1c3Npb25z 97125
+IGZyZWQ= 97126
+TkVH 97127
+OmJvcmRlcg== 97128
+CWluaXRpYWxpemU= 97129
+CWdsb2c= 97130
+LWNhcGl0YWw= 97131
+IEltVmVj 97132
+IGRldmlz 97133
+Q2FuZGlkYXRlcw== 97134
+LmFuaW1hdGlvbnM= 97135
+IHJhZ2F6emk= 97136
+IFByb21ldGhldXM= 97137
+IEtpZGQ= 97138
+IHByb2dyYW1tYQ== 97139
+Q2VydGlmaWNhdGVz 97140
+Q29udGE= 97141
+LmVzcHJlc3Nv 97142
+IOuQmA== 97143
+IGJlaWRl 97144
+6ZmG 97145
+LmdldFJhdw== 97146
+IEZ1bGxOYW1l 97147
+IGlhbQ== 97148
+KCopKA== 97149
+bWFpZHM= 97150
+Qkg= 97151
+IENvbnNwaXJhY3k= 97152
+X0RV 97153
+IGJsYXRhbnRseQ== 97154
+IFx8 97155
+IFdpZw== 97156
+IENvbmo= 97157
+UmVuZGVyaW5nQ29udGV4dA== 97158
+TWl0Y2g= 97159
+IGFsbGVsZXM= 97160
+IOazqOaEjw== 97161
+IHJpbXM= 97162
+IE5laWdoYm9y 97163
+IEt5bGll 97164
+LnBhcnR5 97165
+dG9ycw== 97166
+IOyhsO2ajA== 97167
+IHdlcw== 97168
+IENyYWZ0aW5n 97169
+WyIu 97170
+LnNwb25nZQ== 97171
+IOqx 97172
+SXNsYW1pYw== 97173
+IHByb3NlY3V0aW5n 97174
+IHdpaw== 97175
+Lm9zZ2k= 97176
+b25pbmdlbg== 97177
+R3JhbW1hcg== 97178
+J2lt 97179
+IGF4aWFs 97180
+Q2xlYW5pbmc= 97181
+LmdldEV4dGVybmFsU3RvcmFnZQ== 97182
+PS4v 97183
+IGNocm9tYXQ= 97184
+0LXRhQ== 97185
+YWJheQ== 97186
+IGJvbGE= 97187
+LkFnZ3Jlc3NpdmU= 97188
+J10sJF8= 97189
+aXphY2Fv 97190
+UHJlcGFyaW5n 97191
+OkFueQ== 97192
+LkVOVEVS 97193
+LXdpbmRvd3M= 97194
+IGVucmFnZWQ= 97195
+X2RpY2U= 97196
+IGRldHRh 97197
+ZWNhbA== 97198
+X09SSUdJTg== 97199
+IC0tLS0tLT4= 97200
+X0JsdWU= 97201
+IGJvdGFuaWNhbA== 97202
+IGZyYWdz 97203
+IGZhbWlsaWFs 97204
+LWR1 97205
+IHNlaXppbmc= 97206
+KGJsb2Nrcw== 97207
+LnJk 97208
+LmNoZWNrTm90TnVsbA== 97209
+IG1pc2Vy 97210
+IG1heHg= 97211
+IEtuZWU= 97212
+Vmlld0l0ZW0= 97213
+SW5uZXJIVE1M 97214
+RGFuZ2Vy 97215
+KChfXw== 97216
+IHByenlwYWQ= 97217
+Y3JlYXRlVXJs 97218
+Kios 97219
+IERlY29yYXRpbmc= 97220
+QVRFR1k= 97221
+Pz4v 97222
+LkRlc2lnbmVy 97223
+aGV4ZGlnZXN0 97224
+IEV2ZXJ5d2hlcmU= 97225
+YWxsZXJpZXM= 97226
+LlRFWFRVUkU= 97227
+LkJsb2Nrcw== 97228
+emVsbA== 97229
+IHByZcOnbw== 97230
+U3VkZGVubHk= 97231
+aW5wdXRFbWFpbA== 97232
+KHN5bmM= 97233
+LmJk 97234
+Z29sZGVu 97235
+PicpOw== 97236
+IERpY2tpbnNvbg== 97237
+Pj4oCg== 97238
+IFFVRVVF 97239
+IGdldENvbHVtbg== 97240
+IFNBTkQ= 97241
+LnBpZWNl 97242
+bGljZXI= 97243
+Rmx1dHRlcg== 97244
+IGdldFZlcnNpb24= 97245
+IHJlc291cmNlSWQ= 97246
+b2ds 97247
+xYJhdw== 97248
+LkJyYW5jaA== 97249
+CXdlYg== 97250
+IGZyYW1lcmF0ZQ== 97251
+UFBQ 97252
+IGZyYXk= 97253
+Q05U 97254
+IGluZm9ybWF0aWU= 97255
+J10NCg0K 97256
+bmVhcw== 97257
+SGVhZGVyQ29kZQ== 97258
+IOa4 97259
+IHRyZw== 97260
+cmF3dHlwZXM= 97261
+SG9uZGE= 97262
+IG1hcmtldGVy 97263
+IHJlcXVlc3REYXRh 97264
+IFBn 97265
+CW5vdA== 97266
+IHBhZ2VJbmZv 97267
+IGFrdHVlbGxlbg== 97268
+44GV44KT 97269
+IEFNUw== 97270
+cHVzaFZpZXdDb250cm9sbGVy 97271
+CUFM 97272
+IHZlc3Rz 97273
+cHJvZHVjZQ== 97274
+LW3Dqm1l 97275
+IFJhaG1hbg== 97276
+RnVubnk= 97277
+RVo= 97278
+X1ZhbGlk 97279
+IHNxdWFkcm9u 97280
+IGxhc2g= 97281
+IGlybQ== 97282
+aWFzY28= 97283
+IFBhcmFu 97284
+IHBldGl0ZXM= 97285
+IERlY2F5 97286
+IHVuaW5pdGlhbGl6ZWQ= 97287
+cHJpdmlsZWdlZA== 97288
+IG1iZWR0bHM= 97289
+5aSH5rOo 97290
+IF4u 97291
+IGVjc3RhdGlj 97292
+RGV0cm9pdA== 97293
+IHBhcnRlbg== 97294
+IHNvdXZlbmly 97295
+LmdldExvZ2lu 97296
+0LzQvtGC0YA= 97297
+ZW7Dp8Ojbw== 97298
+IG3DrW5pbW8= 97299
+IEFjY2Vzc2Vk 97300
+cmnDsw== 97301
+TWlj 97302
+IFZvY2Fs 97303
+LlNldFN0cmluZw== 97304
+IG1lbnNhamVz 97305
+5YCN 97306
+IGF0dHJhdmVycw== 97307
+IEFwaA== 97308
+ICcpOw0K 97309
+w7xuZGU= 97310
+IGVuY2hhbnRlZA== 97311
+IFJvb3RTdGF0ZQ== 97312
+IENMT1NFRA== 97313
+CQkJCQkJCQkNCg== 97314
+IGNhbGllbnRl 97315
+b3JyaXM= 97316
+IHBoeXNpY2lzdHM= 97317
+aHduZA== 97318
+X3Zp 97319
+IHLDoXBpZG8= 97320
+IGNhcGl0YWxpemVk 97321
+ZWRCeQ== 97322
+IG1hY2hpbmluZw== 97323
+IGh1YmJ5 97324
+IFN0YWN5 97325
+LkJ1cw== 97326
+ZHJpbms= 97327
+SHVy 97328
+IHByb3BpYQ== 97329
+VW5pdFRlc3Q= 97330
+IG1pc2NvbmNlcHRpb24= 97331
+X18pKTsK 97332
+L2Rj 97333
+IE1heXdlYXRoZXI= 97334
+X21D 97335
+LmNyZWF0ZUZyb20= 97336
+IFFQYWludGVy 97337
+cm9wc3ljaA== 97338
+aW5uaXR1cw== 97339
+YXlhcw== 97340
+IGdlZw== 97341
+KGR3 97342
+IHVzYWRv 97343
+IHRyaWNrbGU= 97344
+IGFubmloaWw= 97345
+IFBhc3Rh 97346
+ICsrCg== 97347
+KEV4cGVjdGVkQ29uZGl0aW9ucw== 97348
+LnBvc3RWYWx1ZQ== 97349
+aWNhcA== 97350
+IERvbmV0c2s= 97351
+X3NvdXA= 97352
+LXB1Ymxpc2g= 97353
+IFBi 97354
+bWVudGlvbnM= 97355
+QUNDRVBU 97356
+LlB1bGw= 97357
+LOKAmeKAmQ== 97358
+IHJldGFyZGVk 97359
+X0FUT00= 97360
+IFRlcm1pbmF0b3I= 97361
+LWNvdXJ0 97362
+IENMTG9jYXRpb25Db29yZGluYXRl 97363
+IHJldmVyZW5jZQ== 97364
+IFNTQw== 97365
+dXRlbHk= 97366
+IFdPTg== 97367
+IEdTTA== 97368
+ZnJlaQ== 97369
+LmdldExvbmdpdHVkZQ== 97370
+IG9wZW5GaWxlRGlhbG9n 97371
+LkJ1dHRlcg== 97372
+LWltcG9ydGFudA== 97373
+X01BTlk= 97374
+IEdvbmc= 97375
+4oCcSG93 97376
+IGdvcmdl 97377
+PW1zZw== 97378
+IEV6ZWs= 97379
+Y3JlYXRlQ29tbWFuZA== 97380
+OmNoZWNrZWQ= 97381
+IGluZm9ncmFwaGlj 97382
+LldFU1Q= 97383
+RGlycw== 97384
+IGd1YXJkYQ== 97385
+IGJlZXRsZQ== 97386
+PHNtYWxs 97387
+LWFuZHJvaWQ= 97388
+IGNyZWRpdG9y 97389
+IE3DqWQ= 97390
+IGZpbmFsaXN0 97391
+IGFibA== 97392
+bmV2 97393
+X2ludGVyYWN0aW9u 97394
+IE1vbnRlcmV5 97395
+amFo 97396
+IGNhbmRpZXM= 97397
+IFF1aW5jeQ== 97398
+6Kqt 97399
+IGJhdGNoU2l6ZQ== 97400
+YWtpdA== 97401
+IG9iZQ== 97402
+KHBhcmE= 97403
+IGV4cGVyaW1lbnRlZA== 97404
+IGNvdW5jaWxsb3Jz 97405
+IGNsYXNoZWQ= 97406
+c3F1 97407
+LXN0cm9rZXM= 97408
+IEdL 97409
+IEV4cGlyZXM= 97410
+IHByb3NlY3V0aW9ucw== 97411
+IENyZWF0dXJlcw== 97412
+IHnDtg== 97413
+eGxpbQ== 97414
+X0lNUA== 97415
+RW50cnlQb2ludA== 97416
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA= 97417
+LkRlZmF1bHRDZWxsU3R5bGU= 97418
+IGJyZXZl 97419
+IEJyaXRhbm4= 97420
+IHN3ZWF0eQ== 97421
+IGxldGg= 97422
+IGZsYXNoYmFjaw== 97423
+cGVybWFuZW50 97424
+IEpESw== 97425
+X0RldGFpbHM= 97426
+RXVybw== 97427
+cHB0 97428
+IHJpY2hUZXh0Qm94 97429
+L2JvYXJk 97430
+IHRyYW5jZQ== 97431
+LmN5Y2xl 97432
+Jyk7Iik7Cg== 97433
+IHRveGlu 97434
+X2RlaW5pdA== 97435
+IG92ZXJhcmNoaW5n 97436
+IGNvbmZpZ3BhcnNlcg== 97437
+IEthd2FzYWtp 97438
+LnRodW1i 97439
+IHBsYXlh 97440
+IEpvc2Vm 97441
+K18= 97442
+IHplcm9lcw== 97443
+IGF1cA== 97444
+IEhhcmk= 97445
+Y29tbWl0dGVk 97446
+Tml0 97447
+LmZpbGVQYXRo 97448
+IERpc2FiaWxpdGllcw== 97449
+bWFudWZhY3Q= 97450
+LWFsaWduZWQ= 97451
+LlJFU0VU 97452
+IHJ1c3R5 97453
+RXk= 97454
+IG91c3RlZA== 97455
+Y29zYQ== 97456
+U3RydWN0dXJlZA== 97457
+LmdldEQ= 97458
+IHPDoWJhZG8= 97459
+PkxvYWRpbmc= 97460
+X21B 97461
+LmdldFJhbmRvbQ== 97462
+Ymxpbmdz 97463
+IGNoZWVzZXM= 97464
+dHRp 97465
+LuKAog== 97466
+IEJ1cmdlc3M= 97467
+ZW5kZXJpdA== 97468
+LicsDQo= 97469
+KCIiKw== 97470
+YWNi 97471
+JXA= 97472
+aW5kZXhlZA== 97473
+X3ByZWRpY2F0ZQ== 97474
+bmVzaWE= 97475
+IGJpZWQ= 97476
+IENJVA== 97477
+KFBvcw== 97478
+X3JhZGk= 97479
+5Lu35qC8 97480
+Qml6 97481
+IEFkb2xlc2NlbnQ= 97482
+IHZpw6pu 97483
+Y3ljbA== 97484
+X0NhbmNlbA== 97485
+IGNvbmNsdXNpdmU= 97486
+IGFwcGVsbGF0ZQ== 97487
+aW5mb3JtYXRpY3M= 97488
+U0o= 97489
+IGVsZWN0aXZl 97490
+cm9sZUlk 97491
+RmV0Y2hlcg== 97492
+CUNvbW1hbmQ= 97493
+KCIoJQ== 97494
+IGZhcnQ= 97495
+SUxB 97496
+Z2V0QmxvY2s= 97497
+QVVTRQ== 97498
+INC00LDQvQ== 97499
+IEFydGU= 97500
+IG5vdGlmeWluZw== 97501
+IGdlbGU= 97502
+LnNhbWU= 97503
+IFJlZ2Vs 97504
+IEJhxZ8= 97505
+LmNyZWF0aW9u 97506
+IFZO 97507
+X2NvbW11bml0eQ== 97508
+IHVuc3VzdGFpbmFibGU= 97509
+U0VY 97510
+IGdyaWRTaXpl 97511
+cmVzY2lh 97512
+YXZlcnNhYmxl 97513
+KCcsJylb 97514
+IFBoZWxwcw== 97515
+4buVaQ== 97516
+QU5DRUxFRA== 97517
+LUlT 97518
+LnJ1bm5lcnM= 97519
+IFN0b2tlcw== 97520
+LlByb2R1 97521
+IHdoaXBwaW5n 97522
+X2FjcXVpcmU= 97523
+IGludmVzdGlnYWNpw7Nu 97524
+ZnJpZWQ= 97525
+LmNvcHlXaXRo 97526
+IEhhcmRjb3Zlcg== 97527
+LVNl 97528
+4Z624Z4= 97529
+aW52aXRhdGlvbg== 97530
+bGVzYWk= 97531
+IERvcm0= 97532
+INGB0L/QuNGB0LrQsA== 97533
+IGNvbmNhdGVuYXRlZA== 97534
+b3BoaWw= 97535
+IHRoaW5rZXI= 97536
+L2ZvbnRhd2Vzb21l 97537
+IExlb3BhcmQ= 97538
+ICIvIik7Cg== 97539
+IHJlc2lkdWFscw== 97540
+IE1pY3Jvd2F2ZQ== 97541
+IGNvbmZvcm1l 97542
+dGhyb3A= 97543
+IGRpc2VtYg== 97544
+IE9NRw== 97545
+IERpc2NpcGxpbmU= 97546
+IEFjcm9iYXQ= 97547
+L3JlcG9zaXRvcnk= 97548
+ZGZh 97549
+X01FRA== 97550
+YnVmaW8= 97551
+IG3DqXRob2Rl 97552
+X0hPTEQ= 97553
+aWFzaQ== 97554
+X2xlZ2FjeQ== 97555
+KQ0NCg== 97556
+5qOA 97557
+R2V0UHJvY0FkZHJlc3M= 97558
+IHlheQ== 97559
+b3RlbmNl 97560
+b3JkZXJpZA== 97561
+LXR3 97562
+IGRlYXJseQ== 97563
+SW5jb21pbmc= 97564
+L2ls 97565
+IG5ldXJvcA== 97566
+dWN6 97567
+KTsNDQ0K 97568
+IElubm92YXRpdmU= 97569
+IHByb2Z1bmQ= 97570
+aWdtYXQ= 97571
+U2VsZWN0aW9uTW9kZQ== 97572
+cmVsZXZhbnQ= 97573
+LkdP 97574
+IGJydWlzZXM= 97575
+IHNhY2g= 97576
+b2RlZg== 97577
+IHJlaW1i 97578
+L2Rlc2t0b3A= 97579
+LXNwb3Q= 97580
+dW5kYW5jZQ== 97581
+RW50cm9weQ== 97582
+XGNvcmU= 97583
+IHN1Z2Vy 97584
+IE12Yw== 97585
+IEdOT01F 97586
+X2luZHg= 97587
+IFlZU1RZUEU= 97588
+IE1hdGxhYg== 97589
+IENJRg== 97590
+ICopKQ== 97591
+IHByb2R1Y3RMaXN0 97592
+IEFscmlnaHQ= 97593
+YWNlbWFyaw== 97594
+0YLQuNCy 97595
+bW9kaWZpY2F0aW9u 97596
+aW50ZXJuYXRpb25hbA== 97597
+IGhvbWVycw== 97598
+IGRpY3Rz 97599
+IFFGb250 97600
+LlNRTGl0ZQ== 97601
+IHRyYW5zcGxhbnRhdGlvbg== 97602
+IE1lc3NhZ2VCb3hCdXR0b24= 97603
+IEVsdmVz 97604
+J11dKQo= 97605
+KFFJY29u 97606
+IGNpbmVtYXM= 97607
+Q09PUkQ= 97608
+LUNoaW5h 97609
+IGto4bqpdQ== 97610
+5oiR55qE 97611
+IHNrdWxscw== 97612
+IHBhaW5zdGFraW5n 97613
+ZmNl 97614
+LlhSTGFiZWw= 97615
+IHNwZWNpZmllcg== 97616
+IHByZWZlcnJpbmc= 97617
+L2FjdGl2aXR5 97618
+KFBob3Rv 97619
+w6FsdA== 97620
+LmxvdA== 97621
+Jycu 97622
+YW5ub25jZQ== 97623
+Lmdvb2dsZWNvZGU= 97624
+LXBkZg== 97625
+IFBva2U= 97626
+X0FDTA== 97627
+IGVuZG93ZWQ= 97628
+ZGlzY292ZXI= 97629
+Lm9tZw== 97630
+IHdvb2RsYW5k 97631
+Lk1hZ2lj 97632
+IHZvbG9udA== 97633
+Tm90QWxsb3dlZA== 97634
+IGNoYXZl 97635
+Qk1X 97636
+JywnPScs 97637
+IFNJWA== 97638
+5oiR5Lus 97639
+IGtvc2hlcg== 97640
+IGFzcGlyYXRpb24= 97641
+aW50bA== 97642
+X3JlZnB0cg== 97643
+JysK 97644
+bWVudG9y 97645
+LmNsdWI= 97646
+V2luZG93U3RhdGU= 97647
+LkFSUg== 97648
+IHp6YQ== 97649
+IG1lc3NhZ2VUeXBl 97650
+LmVxdQ== 97651
+VGhvcg== 97652
+IGluanVzdA== 97653
+IGd1bXM= 97654
+IGJvcmRlclNpZGU= 97655
+Ly8vLy8= 97656
+IFRyYW5zbWl0 97657
+IGJ1ZnNpemU= 97658
+IGhhaw== 97659
+IGVsbGFz 97660
+UkFORE9N 97661
+CW1j 97662
+IHBlYQ== 97663
+ZWtv 97664
+ZG9jdW1lbnRv 97665
+IGh5c3Rlcmlh 97666
+IGFyZW5hcw== 97667
+IGd1bm1lbg== 97668
+IG1pa2U= 97669
+IGltcHVuaXR5 97670
+YXRpc2F0aW9u 97671
+X1plcm8= 97672
+X0NPTVBBTlk= 97673
+IEdvcnM= 97674
+IHVzZUNsYXNz 97675
+KHJlZGlz 97676
+IFJVTk5JTkc= 97677
+IEJhaXI= 97678
+dmVsdGU= 97679
+ICcsJy4= 97680
+0LDRgtGM0YHRjw== 97681
+w7ZzdA== 97682
+ZW5jb2RlVVJJQ29tcG9uZW50 97683
+X3Jlc3RyaWN0 97684
+IGRlY2Fscw== 97685
+IFBlZGlkbw== 97686
+IGFsdGVyY2F0aW9u 97687
+RGlzcGxheXM= 97688
+IEFwcGxpY2FudHM= 97689
+Q1VT 97690
+VGV4dGFyZWE= 97691
+IEFuZ29sYQ== 97692
+LmZ1dHVyZQ== 97693
+IFVTSE9SVA== 97694
+IHN1cHByZXNzaW5n 97695
+IHNldHplbg== 97696
+QVBvbHlub21pYWw= 97697
+IHRvY2g= 97698
+IGhhbGxtYXJr 97699
+ICQkJA== 97700
+IENIQVJTRVQ= 97701
+LnJwbQ== 97702
+IERpY2g= 97703
+LS0tLS0tLS0tLS0tLS0tLS0tLS0= 97704
+X3Bhcm0= 97705
+6L+Y 97706
+YWNjaW9uZXM= 97707
+aGFpdA== 97708
+V0FSREVE 97709
+X3JvdXRpbmc= 97710
+IE5PTQ== 97711
+IGVuY2xhdmU= 97712
+IExvdHRv 97713
+CWZy 97714
+Y29tcGxleENvbnRlbnQ= 97715
+IEJhbGxhcmQ= 97716
+a3ViZQ== 97717
+L3dpbg== 97718
+LmdldENvbHVtbk1vZGVs 97719
+X1JFUExBQ0U= 97720
+SGVhZGVyVmFsdWU= 97721
+IGVzdHVkaWFudGVz 97722
+IGFwaXM= 97723
+IGJwbQ== 97724
+IFR5cGVOYW1l 97725
+QW5kR2V0 97726
+cml0YQ== 97727
+UGxhbnM= 97728
+Pk5vdGU= 97729
+IGZldGlzY2g= 97730
+IHRvbmVk 97731
+X2dvdG8= 97732
+b25zZW5zZQ== 97733
+IG1vbGRz 97734
+IGluZmlsdHJhdGlvbg== 97735
+IEd1ZXJyZXJv 97736
+dWJibw== 97737
+Y2tp 97738
+KCQoIi4= 97739
+X2FjdGl2aXRpZXM= 97740
+KGNoYW5nZXM= 97741
+IG9mQXBw 97742
+IEtlcGxlcg== 97743
+IERlbXA= 97744
+IENvbnRpbmVudA== 97745
+LlRpY2tz 97746
+IFVuc2lnbmVk 97747
+IEphaHJlcw== 97748
+IGZyZXNobWVu 97749
+IEFyY2hpdmVk 97750
+INC60L7RgtC+0YDRi9C5 97751
+ICc6Og== 97752
+VHV0b3JpYWw= 97753
+Q2M= 97754
+IHRhYmxlTGF5b3V0UGFuZWw= 97755
+ZnJvbUpzb24= 97756
+LmxldmVscw== 97757
+X3RyYW5zaWVudA== 97758
+IGVuZG9yc2luZw== 97759
+IERJQw== 97760
+bGF1Zg== 97761
+IHNocmVk 97762
+X0VNSVQ= 97763
+aWZpY2FudGx5 97764
+QUxB 97765
+L3Byb3Rv 97766
+IG5hcnJvd2luZw== 97767
+VXRj 97768
+RmFjdG9ycw== 97769
+IHNlbnRpZW50 97770
+5p6Q 97771
+bGl4aXI= 97772
+IENST1NT 97773
+bWV0ZW9y 97774
+IGdyb2lu 97775
+IG1kYg== 97776
+IFJvdHRlcmRhbQ== 97777
+IGNvbWlkYQ== 97778
+IE9wQ29kZQ== 97779
+IERlZmF1bHRWYWx1ZQ== 97780
+UGVybWlzc2lvbnNSZXN1bHQ= 97781
+IGhldGVyb2dlbmVvdXM= 97782
+IG1vb3Q= 97783
+IGRlY2VpdmVk 97784
+LWluZGVwZW5kZW50 97785
+IE9iamVjdE91dHB1dFN0cmVhbQ== 97786
+IG92ZXJwb3dlcg== 97787
+LmR1cA== 97788
+IGxkYg== 97789
+IGRvbWVzdGljYWxseQ== 97790
+IGJlc3RlbGxlbg== 97791
+IGxvdg== 97792
+IENvbnRyYWN0b3Jz 97793
+VHJpYW5nbGVz 97794
+IGZvZGRlcg== 97795
+IGZpbG1lcw== 97796
+5LyB 97797
+IHJldm9sdmVy 97798
+U3RhcnR1cFNjcmlwdA== 97799
+L3ZhbGlkYXRpb24= 97800
+IFJlc291cmNlVHlwZQ== 97801
+acWf 97802
+IExheg== 97803
+ZmVm 97804
+IGxzdG0= 97805
+eyo= 97806
+LmF0dGFjaG1lbnQ= 97807
+LmhpdHM= 97808
+ZXdpdGg= 97809
+RE9H 97810
+QWxhYmFtYQ== 97811
+IG1lZGl1bXM= 97812
+Lm1Db250ZXh0 97813
+LWNvbHM= 97814
+5Y+L 97815
+Lm5vdGljZQ== 97816
+IGF0dG4= 97817
+IFBhY2tpbmc= 97818
+IExu 97819
+X0NPTVBMRVg= 97820
+L1VzZXJz 97821
+LnNhdmV0eHQ= 97822
+IFJvdW5kcw== 97823
+Pyw/LD8sPyw= 97824
+IGluZ2w= 97825
+IFJPQw== 97826
+X2ZlbWFsZQ== 97827
+IFN0YXJk 97828
+XV07 97829
+IHdyZXN0bGVycw== 97830
+IHRvcnJlbnRz 97831
+IHNpbmg= 97832
+77u/Cgo= 97833
+67O1 97834
+c2Vuc2U= 97835
+aG93ZXZlcg== 97836
+LlBoeXNpY3M= 97837
+SW5mcmFzdHJ1Y3R1cmU= 97838
+IFNhY3I= 97839
+RmVs 97840
+IERJU1RSSUJVVA== 97841
+w6ltZW50cw== 97842
+IFZhbGlkYXRlcw== 97843
+IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMj 97844
+IHwv 97845
+IGVzbA== 97846
+IHLDqXNlYXU= 97847
+IEJpcA== 97848
+QllURVM= 97849
+X1dBVEVS 97850
+VHVybmluZw== 97851
+RUxT 97852
+IGp1eHRhcA== 97853
+IGxlc2Jpc2NoZQ== 97854
+w71jaA== 97855
+KFVua25vd24= 97856
+TmVv 97857
+QEpzb25Qcm9wZXJ0eQ== 97858
+IGFsdW1ub3M= 97859
+IFJhcXFh 97860
+aW1laQ== 97861
+LmdldEJvdW5kcw== 97862
+Lk1vdXNlRXZlbnRIYW5kbGVy 97863
+IyMjIyMjIw== 97864
+R2VuZXJpY1R5cGU= 97865
+L2Ntcw== 97866
+IHR1cm5v 97867
+INC80LjQvQ== 97868
+IGZvbGtsb3Jl 97869
+IEV2bw== 97870
+IGNvbmR1Y3Rpdml0eQ== 97871
+IGxlYmVu 97872
+IGdlYXJib3g= 97873
+LXZz 97874
+IM+G 97875
+IGRyaW5rZXJz 97876
+IGNvbmV4YW8= 97877
+IFRlZXRo 97878
+IGdldEFyZ3VtZW50cw== 97879
+IFJBVA== 97880
+ZW50aW91cw== 97881
+RWR1Yw== 97882
+K1c= 97883
+IEluc3RpdHV0aW9uYWw= 97884
+IEJvcmQ= 97885
+aXNFcXVhbA== 97886
+KHB3ZA== 97887
+IGlnbml0ZWQ= 97888
+IFJvdXNzZQ== 97889
+IGltcGFjdGZ1bA== 97890
+IE1hbGs= 97891
+IGdlcmFs 97892
+IFBpdm90 97893
+IGF6dA== 97894
+IGNzdmZpbGU= 97895
+IFJvcGU= 97896
+IFNPTFVUSU9O 97897
+IEFyYml0cmFyeQ== 97898
+IGxldHRv 97899
+Lk1vdXNlQWRhcHRlcg== 97900
+IH19fQ== 97901
+IFNhaWxvcg== 97902
+ZGVyYQ== 97903
+UHV0dGluZw== 97904
+IGNvbmNlbnRyYXRlcw== 97905
+IGF1dGhEb21haW4= 97906
+4oCd55qE 97907
+LWZpbmFscw== 97908
+LHN0cmxlbg== 97909
+TXVvbg== 97910
+IE9yZGluYXJ5 97911
+ZmlyZWZveA== 97912
+IExhVGVY 97913
+IEh1bmQ= 97914
+ZW5naW5lZXJpbmc= 97915
+L2JsdWU= 97916
+ZWRUZXh0Qm94 97917
+KCIiKTs= 97918
+IENEREw= 97919
+a2VwdA== 97920
+IEdldFN0cmluZw== 97921
+S2ly 97922
+KCk9Jw== 97923
+IE9DRA== 97924
+YW50aXVt 97925
+JG1lbnU= 97926
+IEFwcGFsYWNoaWFu 97927
+U2VjcmV0YXJ5 97928
+66WY 97929
+4Li14Lii 97930
+U2VtYW50aWM= 97931
+ICpb 97932
+ZXN0b25l 97933
+dW5na2lu 97934
+TWF4WQ== 97935
+LXRvbmU= 97936
+In07DQo= 97937
+X1BhcnQ= 97938
+PE1lbWJlcg== 97939
+dHJhbQ== 97940
+IHRyYW5zaXN0b3I= 97941
+IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tCg== 97942
+IERlc2Rl 97943
+IHJpZ2h0ZnVs 97944
+IENvcm5lbA== 97945
+5pE= 97946
+LkhPVVI= 97947
+IHNpZGVsaW5lZA== 97948
+cmVmZXJyZXI= 97949
+bWF6ZQ== 97950
+IGhvbHN0ZXI= 97951
+IGNyaXBwbGVk 97952
+IERhdGVGb3JtYXR0ZXI= 97953
+b3BoYWdl 97954
+X21E 97955
+IGRlc2VsZWN0 97956
+cmF1ZA== 97957
+IFBLSw== 97958
+cm93RGF0YQ== 97959
+IGxvY2tzbWl0aA== 97960
+LnJlc3BvbnNlcw== 97961
+KHByb2R1Y3RJZA== 97962
+X1NUTVQ= 97963
+S2V5VHlwZQ== 97964
+LlRoZW4= 97965
+emVl 97966
+IGNydA== 97967
+IEdyYW5kbWE= 97968
+QFJlc291cmNl 97969
+IGJpdHdpc2U= 97970
+LWNtcHI= 97971
+44CCd3d3 97972
+emVpdGln 97973
+JmRpc3BsYXk= 97974
+Q2FydEl0ZW0= 97975
+LU5v 97976
+IG51bcOpcm8= 97977
+IG1hdXI= 97978
+IGluc3RhbmNpYQ== 97979
+CWR0 97980
+X25wYw== 97981
+IHNrYXRlYm9hcmQ= 97982
+4oCcQWxs 97983
+IENyb3dk 97984
+IMOkbg== 97985
+IGJyYXo= 97986
+Y2Fl 97987
+eW5ldA== 97988
+L3Bt 97989
+L3NjcmVlbg== 97990
+T1BUQVJH 97991
+IFZCb3g= 97992
+IGxlb3BhcmQ= 97993
+X2dyZWF0ZXI= 97994
+Y3B0 97995
+PGRk 97996
+IG1lY2hhbmljYWxseQ== 97997
+b3NwZWxz 97998
+KWY= 97999
+Lmx3amds 98000
+LmdldFBvcnQ= 98001
+IFBSRUY= 98002
+LkFkZFRyYW5zaWVudA== 98003
+cHBhcmQ= 98004
+IO2ajA== 98005
+RXRoZXJuZXQ= 98006
+IHNhbGluZQ== 98007
+KGxldmVscw== 98008
+IHNlcnZpY2VQcm92aWRlcg== 98009
+LkFuZ2xl 98010
+YWx0aXR1ZGU= 98011
+aWxsYXVtZQ== 98012
+IHNjYXBl 98013
+X0NBTEM= 98014
+X3F1ZXN0 98015
+IERpc3NlcnRhdGlvbg== 98016
+IEVETQ== 98017
+LUNkcw== 98018
+IGhvbm9yYXJ5 98019
+c3RvcHM= 98020
+IHN1YmRpcg== 98021
+IFZI 98022
+IENoZWF0 98023
+IHJpZ2h0ZnVsbHk= 98024
+UUU= 98025
+LldyaXRlQnl0ZQ== 98026
+ZmlndXJlcw== 98027
+ZW5uaWU= 98028
+KERCRw== 98029
+IHZva3NuZQ== 98030
+IGV4cGVuZGVk 98031
+VU5JQ0FUSU9O 98032
+aWxpbng= 98033
+IFJlY2Fw 98034
+X3ZlcnRz 98035
+IHRyYXVtYXQ= 98036
+IGdldFBsYXllcg== 98037
+IHZlcmJlc3M= 98038
+IGN1bHRpdmF0aW5n 98039
+IGluaXRpYXRvcg== 98040
+VGjDtG5n 98041
+ZmluZEZpcnN0 98042
+X3Blcm1z 98043
+IGJ1Yw== 98044
+ICIiIg0KDQo= 98045
+VFlQRVM= 98046
+b2JqZWN0TWFuYWdlcg== 98047
+KENvbmZpZ3VyYXRpb25NYW5hZ2Vy 98048
+IHRpbWlk 98049
+IHNuYXBjaGF0 98050
+IGNvbnNlZw== 98051
+CWRpc3RhbmNl 98052
+X3JpZ2h0cw== 98053
+X0Rlcw== 98054
+IEZsZXNo 98055
+LXZlcg== 98056
+IGFmbA== 98057
+ZnJhdWVu 98058
+IGJsYXNwaA== 98059
+IFF1YWxpdMOkdA== 98060
+bWFm 98061
+TW9uaXRvcmluZw== 98062
+LkRpZmY= 98063
+IHNob3JlbGluZQ== 98064
+IHJlc3BvbnNlQm9keQ== 98065
+bWVtc2V0 98066
+PGRlY2ltYWw= 98067
+U21hcnR5SGVhZGVyQ29kZQ== 98068
+IGluc2V0cw== 98069
+IEJpbmFyeVRyZWU= 98070
+YW1lZGE= 98071
+IG5paGls 98072
+IE5heQ== 98073
+eW1vbG9neQ== 98074
+IFdH 98075
+IHRhcGk= 98076
+IEluc3RhbGxlZA== 98077
+bWFpbnRlbmFuY2U= 98078
+KX0iCg== 98079
+IFhP 98080
+LXBlcmlvZA== 98081
+c2Fy 98082
+IG5pbmd1bmE= 98083
+T1JNQVQ= 98084
+LnNldFByb3RvdHlwZU9m 98085
+IEti 98086
+IEhlbnJpaw== 98087
+w6l0aXF1ZQ== 98088
+IExhaG9yZQ== 98089
+CUFkZHJlc3M= 98090
+IG1lbHRz 98091
+Tnk= 98092
+X2FkdmFuY2U= 98093
+IHZlbG9jaWRhZA== 98094
+IGFsdW1ubw== 98095
+IHNhbml0aXplcg== 98096
+IHBoaXNoaW5n 98097
+IENvbWV0 98098
+IGNoaWFy 98099
+CXNwZWM= 98100
+dHJpbW1lZA== 98101
+KHN0YXRlYXJy 98102
+b25uZW4= 98103
+UmV2ZW51ZQ== 98104
+TGVucw== 98105
+IGNoYWlyZWQ= 98106
+IEFzc3VtZXM= 98107
+VHJhc2g= 98108
+X3Vuc2V0 98109
+XEJyaWRnZQ== 98110
+UG9pbnRTaXpl 98111
+IFBvbGlj 98112
+IHNleHVhbGVz 98113
+CWRmcw== 98114
+IFdpZGVTdHJpbmc= 98115
+IGFjY3J1ZWQ= 98116
+WVc= 98117
+X1NDSEVEVUxF 98118
+IGtpdGU= 98119
+IHBhcmFjaHV0ZQ== 98120
+W3RhYmxl 98121
+IGFjdGl2ZUNsYXNzTmFtZQ== 98122
+LlF1YWQ= 98123
+SXNyYWVsaQ== 98124
+IMWT 98125
+IGhvb2c= 98126
+IGNo4buJ 98127
+ZXdlYXI= 98128
+IHRpcmVsZXNzbHk= 98129
+c2V0RXJyb3I= 98130
+LmdldEFtb3VudA== 98131
+LnNldEl0ZW1z 98132
+IE1hbnNvbg== 98133
+IEJheWVzaWFu 98134
+X0ZsYWc= 98135
+QUNIRVI= 98136
+L29yaWdpbmFs 98137
+IGltbWFj 98138
+IExvc2luZw== 98139
+Jz4KCg== 98140
+TGlj 98141
+IE1pcmFnZQ== 98142
+IEFzc2VtYmx5RmlsZVZlcnNpb24= 98143
+VGVW 98144
+IFZhbHVlRXZlbnRMaXN0ZW5lcg== 98145
+LXNvbHZpbmc= 98146
+VGhv 98147
+cm91bGV0dGU= 98148
+X1dQ 98149
+IHVuaW50ZXJydXB0ZWQ= 98150
+IGZpZWxkVHlwZQ== 98151
+LlR5cGVk 98152
+IGFtb3Vy 98153
+IG1vY2tlcnk= 98154
+KHZvbA== 98155
+IFN1YmNvbW1pdHRlZQ== 98156
+IFJ1Zg== 98157
+ZXJveA== 98158
+OlVJQnV0dG9uVHlwZUN1c3RvbQ== 98159
+IEJsdXI= 98160
+IHd5a29u 98161
+bmNlcw== 98162
+QVNIQk9BUkQ= 98163
+ISEiKTsK 98164
+IG11cmRlcmVycw== 98165
+LmRhaWx5 98166
+IERJQUc= 98167
+amluZw== 98168
+IGRvbHBoaW4= 98169
+IGzDsm5n 98170
+IGLDtg== 98171
+IFZvY2FidWxhcnk= 98172
+LlN0T2JqZWN0 98173
+JykiPg== 98174
+IHp1bg== 98175
+IHNjcmltbWFnZQ== 98176
+dHLDqWFs 98177
+IExpZw== 98178
+W3Zp 98179
+Q29sZQ== 98180
+IGZyb3N0aW5n 98181
+LlBsYXllcnM= 98182
+LXRyYW5zbGF0ZQ== 98183
+RmVlbHM= 98184
+PVwiLw== 98185
+LkJ1dHRlcktuaWZl 98186
+ID8+Owo= 98187
+IGF2aQ== 98188
+aW5uaWU= 98189
+LkZhaWx1cmU= 98190
+IHNwaW5kbGU= 98191
+Q29uZmlndXJhdGlvbkV4Y2VwdGlvbg== 98192
+X2hvcA== 98193
+IHBvc2nDp8Ojbw== 98194
+IEF3YWl0 98195
+VUlJbWFnZVBpY2tlckNvbnRyb2xsZXI= 98196
+CWRheQ== 98197
+IGdlbm9t 98198
+Q2Fi 98199
+INGA0LXQt9GD0LvRjNGC0LDRgg== 98200
+T1JJR0lOQUw= 98201
+IGVqYWN1bGF0aW9u 98202
+KHRjcA== 98203
+U0VDT05E 98204
+IHRvbmlj 98205
+IExpc3RCb3g= 98206
+IAkJCg== 98207
+KCk+Cg== 98208
+IHF1YXRyZQ== 98209
+xrDhu6NuZw== 98210
+d2l0aEVycm9ycw== 98211
+Lk1heWJl 98212
+LOKApg== 98213
+dG9rZW5JZA== 98214
+X1VOREVG 98215
+IGZyZXNobmVzcw== 98216
+IEFtZW5kbWVudHM= 98217
+Lm1hcGJveA== 98218
+LkNW 98219
+KGJsb2c= 98220
+X2dldHRpbWU= 98221
+LnF1ZXN0 98222
+c3BhcnNl 98223
+IHJlc2FsZQ== 98224
+IGVudGh1c2lhc3RpY2FsbHk= 98225
+IFByb3N0aXR1dGFz 98226
+V2E= 98227
+Q2FyZ28= 98228
+LlBhcmNlbGFibGU= 98229
+U0VOU09S 98230
+IFJ5dQ== 98231
+TGF1Z2hz 98232
+X05hdGl2ZQ== 98233
+L3Bn 98234
+eXN0cw== 98235
+IHBob3RvYw== 98236
+566A 98237
+YWRvcHQ= 98238
+LnNwZWNpZXM= 98239
+Y29uY2lsaWF0aW9u 98240
+QWRqdXN0ZWQ= 98241
+LkZpcmViYXNlQXV0aA== 98242
+dXR0bGU= 98243
+b3JkaW5hdGlvbg== 98244
+IG11bmNo 98245
+IFN0YWtl 98246
+LnBpbmc= 98247
+YW5rZXI= 98248
+KFFTdHJpbmdMaXRlcmFs 98249
+IHN1YnNjcmlwdA== 98250
+ICAJCg== 98251
+IE1DQw== 98252
+X0NtZA== 98253
+c2V4eQ== 98254
+aW91 98255
+IE1BTlk= 98256
+IG5hbm55 98257
+VFJBSU4= 98258
+IGZsb3VyaXNoaW5n 98259
+IFdhdGNoZXM= 98260
+IFFNYXA= 98261
+IEZlcm0= 98262
+IHdhc20= 98263
+IEFiZWQ= 98264
+X1VE 98265
+IEdsYXNzZXM= 98266
+K3Y= 98267
+QXR0ZW5k 98268
+LkNoYWlu 98269
+IGRlY2VuY3k= 98270
+IFN1cHBsZW1lbnRhcnk= 98271
+aHVudGVy 98272
+LXR4dA== 98273
+ICJ9IjsK 98274
+LnNldFdpbmRvd1RpdGxl 98275
+KCI8Pw== 98276
+IG51bWJlcldpdGhJbnQ= 98277
+IGFmYXI= 98278
+56e75Yiw 98279
+cml0dGU= 98280
+L2xpc3Rz 98281
+KeKAnQ== 98282
+IGRpdmVyc2Fz 98283
+IGVtYmVy 98284
+LlJlYWN0Tm9kZQ== 98285
+IGthbmc= 98286
+IFN0YW1mb3Jk 98287
+W2F0 98288
+LmNsb3NlUGF0aA== 98289
+IGNvbnRyYWNlcHRpdmU= 98290
+KGxvY2F0aW9ucw== 98291
+IGF2YW56 98292
+IENvbnRhaW5lcnM= 98293
+IFNjaG9sYXJz 98294
+LmFjY3VyYWN5 98295
+INCy0YvQv9C+0LvQvQ== 98296
+5ZWP 98297
+PSItLQ== 98298
+IFdyZXN0bGU= 98299
+IEd1YW50YW5hbW8= 98300
+IG55bXBo 98301
+KGd1ZXNz 98302
+LnNldENvbHVtbg== 98303
+X3RF 98304
+LmNvbnRlbnRNb2Rl 98305
+IGludmFsaWRhdGVk 98306
+IFNob290ZXI= 98307
+IE1hdGVy 98308
+LlN1Ym1pdA== 98309
+IGFuZ2xlZA== 98310
+bmF2YmFyRHJvcGRvd24= 98311
+QW8= 98312
+IOa1 98313
+0LjRgdC6 98314
+IFNDQU4= 98315
+CWNt 98316
+IE1hcmt0 98317
+dHJ1Y2s= 98318
+OycK 98319
+Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8KCg== 98320
+IGdoZXR0bw== 98321
+IGJ1aXRlbg== 98322
+IENsb3du 98323
+OiE= 98324
+IGNoaW1wYW4= 98325
+J2ZpZWxk 98326
+YW1tbw== 98327
+IERlcGVuZA== 98328
+KX0p 98329
+KEZMQUdT 98330
+IFJDQQ== 98331
+IENob2ly 98332
+TG9naW5QYWdl 98333
+IEdvcmQ= 98334
+Q29tcGFjdA== 98335
+LXBvY2tldA== 98336
+IGNvbnN1bHRhcg== 98337
+IEludGVyY2VwdA== 98338
+xZ90aXI= 98339
+dWV0eXBl 98340
+b25lbnRz 98341
+IHN0YXJ0UG9zaXRpb24= 98342
+IHBvc2l4 98343
+IFdvaG51bmc= 98344
+X0VYUFJFU1NJT04= 98345
+IExvZ2luQWN0aXZpdHk= 98346
+KG9wY29kZQ== 98347
+IFRhbmdv 98348
+IE51bWJlck9m 98349
+Lm92ZXJmbG93 98350
+IFdDUw== 98351
+IE9jY3VwYXRpb24= 98352
+X2Nn 98353
+LlRvcGlj 98354
+IENhcmVlcnM= 98355
+QVJBVElPTg== 98356
+LmdldExpbmU= 98357
+IOyihQ== 98358
+IE5hY2h0 98359
+IHRvSXRlbQ== 98360
+aW5jbHVzaXZl 98361
+YXZpZXN0 98362
+LWFwcG9pbnRlZA== 98363
+KGludGVybmFs 98364
+Q09OVEVYVA== 98365
+KGRpZ2l0cw== 98366
+PXsiLw== 98367
+IHBsYXl3cmlnaHQ= 98368
+IGRlYWRsaWVzdA== 98369
+bGVhZHM= 98370
+LlBVVA== 98371
+ICp9Cgo= 98372
+IFBhY3Q= 98373
+IERpc2NvdW50cw== 98374
+TG9jYWxpemVkTWVzc2FnZQ== 98375
+IE3DpG5uZXI= 98376
+Xz4= 98377
+IG1hc2NhcmE= 98378
+KFByb2ZpbGU= 98379
+5Yqf6IO9 98380
+aW1pdMOp 98381
+IHdpbGRmaXJlcw== 98382
+LVJPTQ== 98383
+LmlzT24= 98384
+KGdyb3VwSWQ= 98385
+UmVwYWly 98386
+YWNjdW11bGF0ZQ== 98387
+IDwiLA== 98388
+IGhhbmR3cml0dGVu 98389
+IGFjaGV0ZXI= 98390
+IE1HTQ== 98391
+IElybWE= 98392
+LT57Xw== 98393
+Z2Vl 98394
+Y3JpbWluYWw= 98395
+IOiLpeimgQ== 98396
+IG1vbWVudGFyaWx5 98397
+IikhPQ== 98398
+X2xpdA== 98399
+IGV4cGlyZXNJbg== 98400
+LiIpLg== 98401
+6ZW/5bqm 98402
+IGZyw6Zra2U= 98403
+dmxj 98404
+IG9yYnM= 98405
+KSwk 98406
+IHZlbnR1cmVk 98407
+Lz5c 98408
+Y2hhcm0= 98409
+TnVpdGth 98410
+ZWxkaWc= 98411
+YXRvbmlu 98412
+V2l0bmVzcw== 98413
+LWxhdA== 98414
+IHNldEhpZGRlbg== 98415
+IHJlbGljcw== 98416
+IGNvbnN1bGF0ZQ== 98417
+LklHTk9SRQ== 98418
+IkFmdGVy 98419
+IHNldEFkZHJlc3M= 98420
+IGJlc3RlaHQ= 98421
+ICcnKQoK 98422
+LnhheGlz 98423
+IHNlcsOjbw== 98424
+IG1pc2xlZA== 98425
+X1VOSUZPUk0= 98426
+IFZJQQ== 98427
+aW5jcg== 98428
+IHplbml0aA== 98429
+IHZpc2Nvc2l0eQ== 98430
+IHRoaW5seQ== 98431
+LmdldFNoYXJlZFByZWZlcmVuY2Vz 98432
+LkVycm9yQ29kZQ== 98433
+IiksIg== 98434
+IE1pbGxpb25lbg== 98435
+IC8+KQo= 98436
+U2Nyb2xsSW5kaWNhdG9y 98437
+LXNlZWtpbmc= 98438
+IFBPTElUSUNP 98439
+YXNjYQ== 98440
+X3Js 98441
+TmF2aWc= 98442
+KGZ1bGxmaWxl 98443
+IHNvbGl0dWRl 98444
+IGp1dmVu 98445
+IGhhdWxpbmc= 98446
+IE1hY3Jvcw== 98447
+IEdyeQ== 98448
+IGV4ZXJjaXRhdGlvbg== 98449
+IEFUVEFDSw== 98450
+VGlja0NvdW50 98451
+IHJpdGVz 98452
+IGRvZQ== 98453
+UGFydGljbGVTeXN0ZW0= 98454
+IHNsdQ== 98455
+V2luZG93VGV4dA== 98456
+IENsYXNzTmFtZQ== 98457
+IHNsYW5kZXI= 98458
+CVBvcnQ= 98459
+am9uZw== 98460
+P2E= 98461
+LkRpYWw= 98462
+4oCUYXQ= 98463
+JG9ialBIUEV4Y2Vs 98464
+IHNvYXI= 98465
+RU5O 98466
+YXBwZWFyZWQ= 98467
+IHF1b3RpZA== 98468
+ZW1hY2hpbmU= 98469
+IG5pcA== 98470
+IG1pY3JvdGltZQ== 98471
+IEFsbWE= 98472
+OyE= 98473
+LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t 98474
+IFBhc3NhZ2U= 98475
+IGR1bXBzdGVycw== 98476
+IEV4Y2x1ZGU= 98477
+IHN1Z2dlc3RpdmU= 98478
+IENpcmN1bGFyUHJvZ3Jlc3NJbmRpY2F0b3I= 98479
+X2Nscg== 98480
+QXJyYXlUeXBl 98481
+SUxMQQ== 98482
+RWxhcHNlZFRpbWU= 98483
+RHJpdmVu 98484
+IHJlc291cmNlTmFtZQ== 98485
+IEdhcnJpc29u 98486
+c2VyaXI= 98487
+LWFoZWFk 98488
+IHBpbm5hY2xl 98489
+IEVzcHJlc3Nv 98490
+U3BhcnNl 98491
+IGFzc2F5cw== 98492
+IEdpcmxmcmllbmQ= 98493
+aW1pZA== 98494
+XT0nXA== 98495
+T05HTE9ORw== 98496
+IHBvcnRyYXlpbmc= 98497
+TGFuZQ== 98498
+IGLDunNxdWVkYQ== 98499
+IHJlaW5mb3JjZW1lbnRz 98500
+IFNwcmVhZHNoZWV0 98501
+IEFycmF5Q29sbGVjdGlvbg== 98502
+LGFycg== 98503
+bGlnaHRib3g= 98504
+aWNhbmE= 98505
+PCI= 98506
+YnVpbGRlcnM= 98507
+S2lk 98508
+IE1hdFNuYWNrQmFy 98509
+RVhQUg== 98510
+b2RjYXN0 98511
+IEZvdW5kYXRpb25z 98512
+IGluZHM= 98513
+PSckew== 98514
+Rml6eg== 98515
+LWZ1bmN0aW9uYWw= 98516
+KHdvcmtzcGFjZQ== 98517
+IHN0ZW1tZWQ= 98518
+X3BhdGNoZXM= 98519
+IEphcnZpcw== 98520
+UkVBRElORw== 98521
+IGRpc3Jlc3BlY3RmdWw= 98522
+IFFEb20= 98523
+ICR7Cg== 98524
+ZXN0YXR1cw== 98525
+UmVhY2hlZA== 98526
+IS4KCg== 98527
+SUxU 98528
+IE5ERUJVRw== 98529
+IENvdXJhZ2U= 98530
+YmlydGhkYXRl 98531
+IFRpbmc= 98532
+IHV0aWxpemFkbw== 98533
+w6FuY2hleg== 98534
+T3V0ZG9vcg== 98535
+IGhhbmRndW5z 98536
+UmVmQ291bnQ= 98537
+yZk= 98538
+cm9tbw== 98539
+IHR0cw== 98540
+LlNoZQ== 98541
+IFBhbmU= 98542
+44CRLOOAkA== 98543
+IElPQ1RM 98544
+L2JsYWNr 98545
+aW5zY3JpcHRpb24= 98546
+IGJpb3BzeQ== 98547
+IFRpbWVJbnRlcnZhbA== 98548
+LlRlc3RDaGVjaw== 98549
+IEdVSVN0eWxl 98550
+IENhcGFiaWxpdHk= 98551
+IEJlaXRyYWc= 98552
+ZG9ubmVlcw== 98553
+VHJlYXRtZW50 98554
+LmJhY2t1cA== 98555
+IHNpZ25pbmdz 98556
+IEJvY2E= 98557
+ZHJt 98558
+Lk1BSU4= 98559
+IGdvZWRl 98560
+IE1hcmt1cA== 98561
+R1JFRQ== 98562
+IEJhc2VTZXJ2aWNl 98563
+LkNyZWF0b3I= 98564
+IGphaWxz 98565
+IEthaG4= 98566
+SXBBZGRyZXNz 98567
+QUNISQ== 98568
+IGluaGliaXRlZA== 98569
+IEAkXw== 98570
+IEFzc2Fzcw== 98571
+IGVudmlhZG8= 98572
+SGVyb2Vz 98573
+0J/QtdGA 98574
+IE1hdmVu 98575
+Lmxz 98576
+IGl2ZQ== 98577
+fFJG 98578
+IHJlc2l6ZU1vZGU= 98579
+IHJ1bXBl 98580
+X2F0dGFjaG1lbnRz 98581
+VFU= 98582
+IHRhY3RpbGU= 98583
+QXR0ZW1wdGluZw== 98584
+IHJvYmlu 98585
+eWF3 98586
+IG1lcmNlbmFyaWVz 98587
+IEhhYml0YXQ= 98588
+ZW5kZGF0ZQ== 98589
+IG94eQ== 98590
+CVJhbmRvbQ== 98591
+b2hvbg== 98592
+SXNOdWxs 98593
+IFZhbGlkYXRpb25SZXN1bHQ= 98594
+44Oa 98595
+dW1iZWQ= 98596
+cHB2 98597
+IGFycA== 98598
+aWNoaWNr 98599
+X3Jubg== 98600
+IFRGVA== 98601
+VGV4SW1hZ2U= 98602
+Ik9u 98603
+IFNhbXBsZXI= 98604
+dG9wbA== 98605
+IGphbmU= 98606
+eWxpbmc= 98607
+IFVOSUNPREU= 98608
+VGFiSW5kZXg= 98609
+PHsK 98610
+c3VzcGVuZA== 98611
+dXZpYW4= 98612
+LGFwcGxpY2F0aW9u 98613
+0L7Qu9C40YfQtdGB0YLQstC+ 98614
+eWF0 98615
+ZXppZXI= 98616
+IENIVU5L 98617
+IEFkbGVy 98618
+L0FkZA== 98619
+IEtleVZhbHVl 98620
+IHNwb3PDs2I= 98621
+U2FtcGxpbmc= 98622
+Y2hlcnM= 98623
+X0FNRA== 98624
+UnU= 98625
+Lk11c3RDb21waWxl 98626
+TmF0aW9u 98627
+QXNzb2M= 98628
+TWFuYWdpbmc= 98629
+IEVuZ2w= 98630
+X0dC 98631
+IHN1Y2NpbmN0 98632
+IGRpc2xpa2Vk 98633
+IElrZQ== 98634
+QnVsbGV0aW4= 98635
+X0FSQ0hJVkU= 98636
+UHJvcG9zYWw= 98637
+IGpvZ2dpbmc= 98638
+LkNSRUFURUQ= 98639
+IGNob2w= 98640
+6KOF 98641
+jKg= 98642
+LXB1c2g= 98643
+IHJlc2VydmE= 98644
+Y29yZXY= 98645
+w6h0cmU= 98646
+VEhS 98647
+IGluY29tcGV0ZW5jZQ== 98648
+IGNoYXJpc21h 98649
+5oSf 98650
+ICI9PQ== 98651
+QlRO 98652
+IExvY2F0b3I= 98653
+aXZldA== 98654
+KCcuJykK 98655
+IGZvckluZGV4UGF0aA== 98656
+w7RtZQ== 98657
+IGNhcGFjaXQ= 98658
+d2F0ZXJz 98659
+IFdST05H 98660
+aG9h 98661
+IE1JUFM= 98662
+IGVtaXNz 98663
+IEphY3F1ZWxpbmU= 98664
+KGNtcA== 98665
+IGVlbnM= 98666
+TGVv 98667
+LnRpbWluZw== 98668
+Q0xVU0lPTg== 98669
+ICgiLQ== 98670
+5ZOI 98671
+LmtvZGU= 98672
+IFVuZGVydA== 98673
+IGJld2lsZA== 98674
+IEVzc2Vu 98675
+Lmhk 98676
+IHJlbmVnb3Q= 98677
+IG1vd2Vy 98678
+IGxzcA== 98679
+IHBlbmNoYW50 98680
+IG1hbm9l 98681
+IGFnbGk= 98682
+IHJlY2Fs 98683
+IE9QRVJBVElPTg== 98684
+KF4pKA== 98685
+IM69 98686
+IFNjb3BlZA== 98687
+IEAiCg== 98688
+PWxhYmVs 98689
+W2xvYw== 98690
+SW50bA== 98691
+IE56 98692
+dGFibGV0 98693
+LkNvbHVtbk5hbWU= 98694
+IHNjcmVlblNpemU= 98695
+REJ1cw== 98696
+Y29va2Vk 98697
+LXJlZ2lzdHJhdGlvbg== 98698
+4oCcT25l 98699
+LW5vbg== 98700
+IHdpxJlj 98701
+IGNvc3Rh 98702
+LmFkZFRhYg== 98703
+LmNvbmRpdGlvbnM= 98704
+IEhlc3M= 98705
+TUVNT1JZ 98706
+IEF2YWxhbmNoZQ== 98707
+KCl9fQo= 98708
+IHRyaXBsZXQ= 98709
+IGxhYnlyaW50aA== 98710
+IE5vZGVMaXN0 98711
+IE5ZVA== 98712
+IHllbmk= 98713
+ZGZm 98714
+Lkh0bWxDb250cm9scw== 98715
+QVZJUw== 98716
+L01hdGg= 98717
+IG1lbWNtcA== 98718
+2KfYoQ== 98719
+0L7RgdGM 98720
+Y3JhcA== 98721
+KHBhZ2Vz 98722
+IGx4bWw= 98723
+IFFEYXRlVGltZQ== 98724
+X3RjYg== 98725
+IG9wZW5pZA== 98726
+IHN5bmFwdGlj 98727
+IE1ETUE= 98728
+KHNsdWc= 98729
+aWdtYXRpYw== 98730
+ZW5vcg== 98731
+IGNyYW1wZWQ= 98732
+R09Q 98733
+rZA= 98734
+LmlzRmlsZQ== 98735
+IERpZmZlcmVudGlhbA== 98736
+ID0iIjsK 98737
+CQkJICAgIAk= 98738
+IENvb2tl 98739
+CVVGVU5DVElPTg== 98740
+IHBlcnNldmVyYW5jZQ== 98741
+UmVsYXRpdmVMYXlvdXQ= 98742
+SU1QT1JUQU5U 98743
+IGV4b24= 98744
+INC+0L0= 98745
+aWJhc2U= 98746
+KENPTlQ= 98747
+bm92YXRpb24= 98748
+5L2V 98749
+W3N1Yg== 98750
+QWRtaW5Db250cm9sbGVy 98751
+SFRUUEhlYWRlcg== 98752
+Y3JlYXI= 98753
+IE5JUg== 98754
+IERyb3BEb3duTGlzdA== 98755
+IHZhbGlkZQ== 98756
+IGRlaHlkcmF0aW9u 98757
+Lidd 98758
+KFdJTg== 98759
+IC4uLlw= 98760
+IHBob3Rvc2hvcA== 98761
+CUluaXQ= 98762
+X2NvdQ== 98763
+IHRpbWVab25l 98764
+ZGFyd2lu 98765
+cm9tYXRpYw== 98766
+TmF2aWdhdGlvbkl0ZW1TZWxlY3RlZExpc3RlbmVy 98767
+YnJhdGVz 98768
+XS0tOwo= 98769
+IHRyYWdlZGllcw== 98770
+IFBlZGlhdHJpY3M= 98771
+U01BUlQ= 98772
+LUFQSQ== 98773
+IE1lc3NhZ2VMb29rdXA= 98774
+CXZv 98775
+IHByZWp1ZGljZXM= 98776
+IG1B 98777
+VXBz 98778
+IE1JU1NJTkc= 98779
+CWFk 98780
+Q3JlYW0= 98781
+IFRi 98782
+IE1vbmE= 98783
+X2dob3N0 98784
+CXR5cGVz 98785
+RW1i 98786
+IERvY3VtZW50YXJ5 98787
+Jyk7CgoKCg== 98788
+IGx1cA== 98789
+X1JlZmVyZW5jZQ== 98790
+IEJBVENI 98791
+IGludGVydHdpbmVk 98792
+PENlbGw= 98793
+IENhYnI= 98794
+bmF0aW9u 98795
+IGlzQ29ubmVjdGVk 98796
+LnJlbW92ZUxpc3RlbmVy 98797
+IGNvbmc= 98798
+X3Rp 98799
+IFNpbGljb25l 98800
+IOqysOqzvA== 98801
+IFdBTg== 98802
+IEdpYnJhbHRhcg== 98803
+L3Jlc3BvbnNl 98804
+CXBlcnNvbg== 98805
+Y2hhbnRz 98806
+VklQ 98807
+ZW1lcmdlbmN5 98808
+UGl4ZWxGb3JtYXQ= 98809
+LUFt 98810
+IHNvdXRod2VzdGVybg== 98811
+X3BsbA== 98812
+aWZlcnM= 98813
+X09OQ0U= 98814
+IEZheWV0dGU= 98815
+Lm5jYmk= 98816
+X1BhbmVs 98817
+LlF1YWw= 98818
+IHBvbHlz 98819
+IGNyZWF0ZVN0YWNrTmF2aWdhdG9y 98820
+77+9dA== 98821
+IGxheW9mZnM= 98822
+IEJsYW5jbw== 98823
+RmVhdA== 98824
+IFZpbWVv 98825
+X2NoaQ== 98826
+X2xpZmV0aW1l 98827
+UE9JTlRT 98828
+LHByaXZhdGU= 98829
+IHVuYmVhcmFibGU= 98830
+cHJpbnRpbmc= 98831
+IGNnaQ== 98832
+LkJBQ0s= 98833
+IGludGVybnM= 98834
+IE5ld2x5 98835
+aW5mZWxk 98836
+KElC 98837
+IEthdGE= 98838
+IERlZmVuZGFudHM= 98839
+VGhy 98840
+6aKE 98841
+X1ZG 98842
+RkZGRkZGRkY= 98843
+IGRhdmlkamw= 98844
+IGJpdHRlcmx5 98845
+U3VnZ2VzdGlvbnM= 98846
+LnNldENhbmNlbGFibGU= 98847
+RklOQUw= 98848
+YXNvbnM= 98849
+X3J3bG9jaw== 98850
+X1dSQVBQRVI= 98851
+IGhhcHBpZXN0 98852
+KHJvd0luZGV4 98853
+w7NzaXRv 98854
+VE9UWVBF 98855
+QXV0b21hdGlvbg== 98856
+TG9nRmlsZQ== 98857
+IGNvbnNvbGF0aW9u 98858
+44OA 98859
+IHTDqm0= 98860
+IHByZXI= 98861
+cmd5eg== 98862
+IEdlZw== 98863
+CWR0bw== 98864
+LmRlZmF1bHRWYWx1ZQ== 98865
+IEthbWk= 98866
+IEFTRQ== 98867
+b3B0aW1pemVk 98868
+IO2PrA== 98869
+IG9yaWdpbmF0ZXM= 98870
+ZXJyTXNn 98871
+IGVzcGHDp28= 98872
+KFNZUw== 98873
+IE1jQg== 98874
+ZGFuY2U= 98875
+X2RldGVjdGVk 98876
+IGZyw7w= 98877
+CQkgICAgCQk= 98878
+PERhdGU= 98879
+KGNvbWI= 98880
+IERlY2lkZQ== 98881
+XEZpZWxk 98882
+IFByb3Bvc2Vk 98883
+Umli 98884
+IGRpc2xpa2Vz 98885
+IFdpZW4= 98886
+CURvY3VtZW50 98887
+IHRyYWY= 98888
+IHN0b3JpYQ== 98889
+IFRlbGxz 98890
+Jyk9PQ== 98891
+Q3Jp 98892
+KFZBTFVF 98893
+IEJ1cm5ldHQ= 98894
+LHZvaWQ= 98895
+IGRhbmg= 98896
+IGNjcA== 98897
+QmxvY2tjaGFpbg== 98898
+OiItImAK 98899
+SUNsaWVudA== 98900
+SVNPREU= 98901
+SXNzdWVy 98902
+KX0NCg== 98903
+LGJ1dA== 98904
+IFVwaA== 98905
+KFN1Yg== 98906
+IHTDqWzDqXBob25l 98907
+IG9uRGF0YUNoYW5nZQ== 98908
+IG1hcnNoYWxsZXI= 98909
+LWFuYWx5dGljcw== 98910
+LGNvbnRlbnQ= 98911
+IGRlYmFjbGU= 98912
+X1ZhbHVlQ2hhbmdlZA== 98913
+IGZhdW5h 98914
+ICM9Pg== 98915
+IGZveWVy 98916
+J3V0aWxpc2F0aW9u 98917
+IE3DvGxsZXI= 98918
+IEZldGlzaA== 98919
+IGRlZmF1bHRNYW5hZ2Vy 98920
+IGJhY2t0cmFjaw== 98921
+QmFo 98922
+RXhwbGljaXQ= 98923
+X0FTQ0lJ 98924
+IG1BY3Rpdml0eQ== 98925
+KE1zZw== 98926
+IOqyjA== 98927
+IFRFUk1T 98928
+IEFuZ2ll 98929
+SFNW 98930
+IE1vc3F1ZQ== 98931
+Lk5hbWVz 98932
+7Yq8 98933
+cmVzdGU= 98934
+X3Bhcm1z 98935
+IGdhcGluZw== 98936
+IGNyb3BwaW5n 98937
+RGF0YUZyYW1l 98938
+IHJlc3BvbnNpdmVuZXNz 98939
+X3VuZG8= 98940
+X3RyYW4= 98941
+LnRlcm1pbmF0ZQ== 98942
+IGl0YWxpYW5l 98943
+IHdhbGt0aHJvdWdo 98944
+IGF0dHJhY3RpdmVuZXNz 98945
+0LTQtQ== 98946
+X1NUUw== 98947
+X2xlYXJu 98948
+IGNob2NvbGF0ZXM= 98949
+aWVyYXJjaGljYWw= 98950
+LXRoaW5raW5n 98951
+ICkpKQ== 98952
+aXNobWVudHM= 98953
+LkxvZ2Y= 98954
+IFRNWg== 98955
+IENhbmFyeQ== 98956
+Zm9pbA== 98957
+IFZhY2NpbmU= 98958
+LnZ4 98959
+IFN1cnJvdW5k 98960
+SW50ZXJtZWRpYXRl 98961
+IGlvdg== 98962
+dmFpcw== 98963
+JzsiOwo= 98964
+772eCgo= 98965
+6YCB5paZ 98966
+4oCmaXQ= 98967
+U2VhdHM= 98968
+Q2xhcg== 98969
+V2Fycw== 98970
+IEh1dGNoaW5zb24= 98971
+IEhhc2Fu 98972
+IScpCgo= 98973
+IFJpY2hpZQ== 98974
+Y2hlaWRlbg== 98975
+KCQoJw== 98976
+WW9yaw== 98977
+IGxpZHM= 98978
+IGFscGhhbnVtZXJpYw== 98979
+IEdsb2Nr 98980
+LnNoYXBlcw== 98981
+IHNwYXJraW5n 98982
+X2Vwc2lsb24= 98983
+dXBsaWNhdGVk 98984
+LmRpcnR5 98985
+XSk9PQ== 98986
+IOychOy5mA== 98987
+IHNjbg== 98988
+IC8qKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq 98989
+X1BSRVZJRVc= 98990
+X0hD 98991
+aWVsZGluZw== 98992
+ZmdldHM= 98993
+IEFkZGlzb24= 98994
+IHByb2R1Y3RTZXJ2aWNl 98995
+LWZpZ3VyZQ== 98996
+KHJldHZhbA== 98997
+emFubw== 98998
+IGF1dG9i 98999
+CXNk 99000
+X251bWVy 99001
+IFNldExhc3RFcnJvcg== 99002
+IEZpb3I= 99003
+aWZpY2FuY2U= 99004
+VW50aXRsZWQ= 99005
+IGluZmllbGQ= 99006
+IHt9KSk7Cg== 99007
+IHNwYWM= 99008
+IHJvb2tpZXM= 99009
+KGRlc2NyaWJpbmc= 99010
+bmdlbg== 99011
+4K6/4K4= 99012
+LnJkZg== 99013
+Lk11dGV4 99014
+IGtuZWVsaW5n 99015
+IFFF 99016
+c2V0TWF4 99017
+UmVhZFN0cmVhbQ== 99018
+IHZlbnRhcw== 99019
+c3V0 99020
+Y21wZXE= 99021
+LldyaXRlQWxsVGV4dA== 99022
+IEV4cGVyaWVuY2Vk 99023
+JF9f 99024
+IGthdW0= 99025
+IExJUw== 99026
+IGRvY3VtZW50b3M= 99027
+X0hFQUxUSA== 99028
+aWNvbnRhaW5z 99029
+IGFydGlzYW5z 99030
+T1dORVI= 99031
+IGJsaW5rZWQ= 99032
+Z2V0RGlzcGxheQ== 99033
+IHRvZW4= 99034
+IHJvd051bQ== 99035
+IGF2cmls 99036
+IGludmlz 99037
+IEtlYXI= 99038
+dG9CZUluVGhlRG9jdW1lbnQ= 99039
+YXB1cg== 99040
+IHJhY2tlZA== 99041
+IE1jTWFzdGVy 99042
+X0FUVFJJQg== 99043
+SGF6 99044
+IGZhY3R1cmE= 99045
+L3Rz 99046
+INGA0LDQt9C80LXRgA== 99047
+IHpm 99048
+IHNob3J0ZmFsbA== 99049
+LmZhc3Rh 99050
+IENPTlNUQU5U 99051
+Lm1hbmFnZWQ= 99052
+Z2Vtcw== 99053
+U2hhcmVkUG9pbnRlcg== 99054
+IGJsdXJyeQ== 99055
+YnJpZ2h0bmVzcw== 99056
+KGNvbXBvbmVudHM= 99057
+IC4uLiIKCg== 99058
+U0VMTA== 99059
+IElsbHVzdHJhdG9y 99060
+LmdldENoYW5uZWw= 99061
+IHRyb3V2w6k= 99062
+eXN0ZXJz 99063
+IHZvaXM= 99064
+IExpbmRlbg== 99065
+IGVtb2ppcw== 99066
+IGJyYXds 99067
+IE1TUg== 99068
+IEVsbw== 99069
+IENyb2F0aWFu 99070
+UG9wdXBNZW51 99071
+TGV3aXM= 99072
+LkpXVA== 99073
+IGFzdG9uaXNoZWQ= 99074
+QnVzaA== 99075
+KGl0ZW1JZA== 99076
+IGRldGFjaG1lbnQ= 99077
+IEVuY29yZQ== 99078
+5bCU 99079
+IHJla2w= 99080
+IGNyYW0= 99081
+KSQv 99082
+LmdldEhvc3Q= 99083
+X3JlY29tbWVuZA== 99084
+LUhU 99085
+X2NhbGlicmF0aW9u 99086
+QXV0aGVudGljYXRl 99087
+LmZpcmViYXNlYXBw 99088
+VU5JWA== 99089
+CUNhbWVyYQ== 99090
+IEhFQVA= 99091
+SWRlYWw= 99092
+Lm9mZmljZQ== 99093
+IGdvb2Z5 99094
+KFN5bWJvbA== 99095
+IGpvdWVy 99096
+X3BhcnRpdGlvbnM= 99097
+IHJhcGlkZW1lbnQ= 99098
+IEdOVU5FVA== 99099
+aWRVc2Vy 99100
+IHN1cGVydmlzZQ== 99101
+KENvbnRhY3Q= 99102
+QVdO 99103
+44GY 99104
+IG5hYW0= 99105
+IGF1c3Q= 99106
+5Zyo57q/ 99107
+X3NvZnRtYXg= 99108
+QWxsb3dBbm9ueW1vdXM= 99109
+YW1tYWJsZQ== 99110
+Uk9VVEU= 99111
+KkQ= 99112
+IGFkZW4= 99113
+IENyaXN0aW5h 99114
+IENyaXN0aWFubw== 99115
+IGJsb29kc3RyZWFt 99116
+c3ViY2xhc3M= 99117
+X3BlcnNvbmE= 99118
+Q0hJTEQ= 99119
+LWtub3c= 99120
+IG5hdmlnYXRpb25PcHRpb25z 99121
+IFp1a3VuZnQ= 99122
+IFBpeGFy 99123
+VHlsZXI= 99124
+IHVuZGVyd29ybGQ= 99125
+IHNpbmNlcml0eQ== 99126
+IGRpc3BlbnNlcg== 99127
+IGt0ZXI= 99128
+aWRkZXJz 99129
+LmFkZE5vZGU= 99130
+LWNoZWNrZWQ= 99131
+IGtleXN0 99132
+IFdUTw== 99133
+LnNpZ25hbHM= 99134
+IGFkdmVudHVyZXI= 99135
+IFBhbmc= 99136
+XFI= 99137
+PXBvcw== 99138
+IGRpc3BlbnNhcmllcw== 99139
+IENsb3NldA== 99140
+KCJ7XCI= 99141
+aWRlb24= 99142
+IG7DqWNlc3NhaXJl 99143
+KCkiCg== 99144
+X1JFQ0VJVkVE 99145
+IHLDqXN1bHRhdHM= 99146
+IG1vZGVu 99147
+IEljZWxhbmRpYw== 99148
+O2Q= 99149
+LmFsbG93ZWQ= 99150
+KG5ld1VzZXI= 99151
+IG1lcmNpbGVzcw== 99152
+LldhaXRGb3I= 99153
+IGRheWNhcmU= 99154
+IENvbnZleW9y 99155
+55Y= 99156
+8Kw= 99157
+54M= 99158
+55c= 99159
+56A= 99160
+6IQ= 99161
+6bI= 99162
+5aY= 99163
+552A 99164
+5b6I 99165
+6YU= 99166
+54s= 99167
+6ao= 99168
+5oI= 99169
+6aU= 99170
+6IU= 99171
+5oOz 99172
+5ag= 99173
+6bk= 99174
+54I= 99175
+5ZI= 99176
+54w= 99177
+6LSo 99178
+5qI= 99179
+5rCU 99180
+8Ks= 99181
+5pWZ 99182
+558= 99183
+5YQ= 99184
+5Y+R5bGV 99185
+5Yib 99186
+6JE= 99187
+5oU= 99188
+5Z4= 99189
+5YGa 99190
+5oiY 99191
+5pA= 99192
+5by6 99193
+5rex 99194
+5Yeg 99195
+578= 99196
+5ak= 99197
+6J4= 99198
+5aeU 99199
+5ZCE 99200
+6I4= 99201
+6bg= 99202
+6bo= 99203
+5Y+X 99204
+6IGM 99205
+5Zg= 99206
+5r0= 99207
+6aOO 99208
+6JCl 99209
+5YWa 99210
+6Jw= 99211
+6YKj 99212
+6aKG 99213
+55E= 99214
+6bM= 99215
+5pyv 99216
+5LuA 99217
+5oi/ 99218
+57K+ 99219
+5ao= 99220
+6YY= 99221
+5aSq 99222
+6IKh 99223
+6Js= 99224
+5YWJ 99225
+5p6B 99226
+5Yqe 99227
+6JM= 99228
+55g= 99229
+5bQ= 99230
+5Zc= 99231
+6Iqx 99232
+56CU 99233
+5b+r 99234
+5biI 99235
+6LaK 99236
+6KeC 99237
+5qQ= 99238
+5qY= 99239
+554= 99240
+6IKy 99241
+54ix 99242
+55m9 99243
+5LiW 99244
+5LuA5LmI 99245
+55y8 99246
+5bM= 99247
+6JI= 99248
+5pM= 99249
+6KKr 99250
+5bmy 99251
+55eF 99252
+5aOr 99253
+55I= 99254
+6Lg= 99255
+5r4= 99256
+5bel5L2c 99257
+6K6p 99258
+54Ot 99259
+6L6D 99260
+5YS/ 99261
+5Yqp 99262
+56ev 99263
+57M= 99264
+55M= 99265
+56M= 99266
+5YI= 99267
+6Lk= 99268
+6Jo= 99269
+5bex 99270
+55m+ 99271
+5Yq/ 99272
+6LWb 99273
+5qg= 99274
+5r8= 99275
+6JY= 99276
+5p2R 99277
+5bim 99278
+5aKD 99279
+5oqk 99280
+6a0= 99281
+5as= 99282
+6Ieq5bex 99283
+5rWO 99284
+5L2O 99285
+5Yy7 99286
+6Ziy 99287
+5Yac 99288
+6IY= 99289
+54Y= 99290
+6as= 99291
+5Yab 99292
+5oiP 99293
+5Y2H 99294
+5pav 99295
+5L2P 99296
+6JC9 99297
+5YW7 99298
+6Ie0 99299
+54o= 99300
+54c= 99301
+54U= 99302
+6JQ= 99303
+5LyB5Lia 99304
+5Zui 99305
+5omN 99306
+5qCh 99307
+5YeG 99308
+5aWH 99309
+5Ymv 99310
+6bw= 99311
+5ryU 99312
+6ams 99313
+6LWw 99314
+56We 99315
+5YWL 99316
+5pyb 99317
+5rK5 99318
+6L65 99319
+5Y2D 99320
+5b6A 99321
+5YiH 99322
+5qk= 99323
+57Y= 99324
+5Zk= 99325
+6ZmF 99326
+54mM 99327
+56S+5Lya 99328
+5ri45oiP 99329
+5pa9 99330
+54Wn 99331
+5o6n 99332
+5ruh 99333
+6K+G 99334
+6YeN6KaB 99335
+6Laz 99336
+55WZ 99337
+57uG 99338
+5Y2P 99339
+6YCC 99340
+5oc= 99341
+5qc= 99342
+6YQ= 99343
+6J0= 99344
+5biC5Zy6 99345
+57uP5rWO 99346
+5Lmg 99347
+5paH5YyW 99348
+6Zq+ 99349
+5LmQ 99350
+5Yaz 99351
+5qyi 99352
+6KeJ 99353
+5Zut 99354
+5YW0 99355
+5YWF 99356
+5Li+ 99357
+5om5 99358
+6JU= 99359
+5oqK 99360
+5oqA5pyv 99361
+56m2 99362
+56ys5LiA 99363
+5L6/ 99364
+5ZON 99365
+546p 99366
+5Z2a 99367
+6J6N 99368
+5Y2K 99369
+5Zac 99370
+5bGC 99371
+56a7 99372
+5LuF 99373
+6Z8= 99374
+5ZGz 99375
+5b+1 99376
+5a2j 99377
+57Sn 99378
+5LmF 99379
+6aQ= 99380
+6Z4= 99381
+6KQ= 99382
+5YCZ 99383
+5Ya1 99384
+55+z 99385
+5YGl 99386
+5oCO 99387
+5a6d 99388
+6KGA 99389
+5Z+f 99390
+5pep 99391
+55+l6YGT 99392
+6LSf 99393
+5Y2a 99394
+5be0 99395
+5Lqy 99396
+5bGe 99397
+5Lil 99398
+5LqJ 99399
+5a+f 99400
+6Lo= 99401
+57A= 99402
+5bu66K6+ 99403
+5Lqn5Lia 99404
+5ZCD 99405
+5a2p 99406
+5peF 99407
+5qC5 99408
+5p2Q 99409
+5LyX 99410
+6ZqP 99411
+5a6Y 99412
+5bqV 99413
+5b2p 99414
+5a+M 99415
+5rip 99416
+5Y2r 99417
+5Ymn 99418
+55uK 99419
+5oqX 99420
+6LSi 99421
+57qq 99422
+5oY= 99423
+55Sf5rS7 99424
+57qi 99425
+55Sf5Lqn 99426
+6L+c 99427
+6ZKx 99428
+5ZSu 99429
+576k 99430
+54+t 99431
+5qW8 99432
+6YeH 99433
+6Im6 99434
+5bGF 99435
+5YGH 99436
+6LCI 99437
+5pma 99438
+6aw= 99439
+6Iiq 99440
+5a6z 99441
+6Jc= 99442
+540= 99443
+5bU= 99444
+546L 99445
+5bq3 99446
+6I63 99447
+57ut 99448
+5Lqa 99449
+6aOf 99450
+5Y6L 99451
+5oub 99452
+6IyD 99453
+6K64 99454
+5Zu0 99455
+6b0= 99456
+6ZmN 99457
+57qz 99458
+5ZOq 99459
+5pWZ6IKy 99460
+5bey57uP 99461
+5b63 99462
+5p6X 99463
+5a6J5YWo 99464
+6b6Z 99465
+5aSn5a62 99466
+6Z2S 99467
+5bqc 99468
+5rKz 99469
+5Y+k 99470
+6I2v 99471
+5Z2H 99472
+5pm6 99473
+5Lmh 99474
+55Wl 99475
+5Ya3 99476
+56aP 99477
+5a6k 99478
+57u0 99479
+5om/ 99480
+5bGK 99481
+6K+J 99482
+5Yi7 99483
+6J8= 99484
+5qo= 99485
+5bCx5piv 99486
+6L+Z5Liq 99487
+5Lit5b+D 99488
+5LiW55WM 99489
+5Z+O5biC 99490
+6Z2e5bi4 99491
+5YiS 99492
+5Y+M 99493
+5oCO5LmI 99494
+5Yiw5LqG 99495
+5pyD 99496
+5Y+y 99497
+5L6G 99498
+5b6L 99499
+5aWW 99500
+57uI 99501
+5aqS 99502
+5a6B 99503
+6K++ 99504
+6IGM5Lia 99505
+5YWN 99506
+5rWL 99507
+5oCl 99508
+5pWR 99509
+54us 99510
+6K2m 99511
+6aSQ 99512
+5oS/ 99513
+6LSr 99514
+55aR 99515
+5Zo= 99516
+5aW5 99517
+5Y+I 99518
+5Zug5Li6 99519
+5LiN5piv 99520
+5aSf 99521
+5pa56Z2i 99522
+6ZWH 99523
+5LqS 99524
+6YWS 99525
+6K6y 99526
+55aX 99527
+5pil 99528
+5rmW 99529
+5aSc 99530
+6LSj5Lu7 99531
+5Lq65rCR 99532
+5YWw 99533
+55+t 99534
+5pWF 99535
+5YeP 99536
+5pmu 99537
+5Lqu 99538
+5L6d 99539
+5Y2w 99540
+6Z2Z 99541
+5YCL 99542
+5b6B 99543
+5ZC4 99544
+57y6 99545
+5pS7 99546
+5YeA 99547
+5YW4 99548
+5Zu6 99549
+6K6/ 99550
+57k= 99551
+54A= 99552
+5o+Q5L6b 99553
+57uH 99554
+5b6I5aSa 99555
+56CU56m2 99556
+6Lef 99557
+5Li76KaB 99558
+5oOF5Ya1 99559
+562W 99560
+5q27 99561
+5aSn5a2m 99562
+5pS/5bqc 99563
+5b2x5ZON 99564
+5Lmw 99565
+5YWt 99566
+6Zmp 99567
+5YWr 99568
+5p+Q 99569
+6LSo6YeP 99570
+5Y2g 99571
+5beu 99572
+5pu05aSa 99573
+5pyL 99574
+6Z2p 99575
+5a6j 99576
+56C0 99577
+6L27 99578
+5bqn 99579
+5pi+ 99580
+56iz 99581
+6LS1 99582
+6IOM 99583
+6Imv 99584
+55ar 99585
+5q+S 99586
+5LmO 99587
+5YCf 99588
+6L+3 99589
+562U 99590
+5r+A 99591
+5ZG8 99592
+5LqG5LiA 99593
+6Laj 99594
+5Ly0 99595
+5LyZ 99596
+6Lw= 99597
+8Kyt 99598
+5Zu95a62 99599
+5rS75Yqo 99600
+546w5Zyo 99601
+56eR5oqA 99602
+5Y2h 99603
+5LiN5ZCM 99604
+5Liq5Lq6 99605
+6K6w6ICF 99606
+5LiN5pat 99607
+6Ze7 99608
+5Lmd 99609
+6JGX 99610
+57u8 99611
+5LiD 99612
+5qCR 99613
+5pyL5Y+L 99614
+5Y2W 99615
+5Lyk 99616
+5rKZ 99617
+5ZaE 99618
+5aWX 99619
+6L2u 99620
+56m/ 99621
+6KGl 99622
+5LiA5a6a 99623
+56qB 99624
+552j 99625
+6L+9 99626
+5aiB 99627
+5Y+m 99628
+5Zuw 99629
+5p62 99630
+57ud 99631
+5pWj 99632
+5o6i 99633
+5rSX 99634
+5Li0 99635
+5Ly8 99636
+6LS4 99637
+5Liw 99638
+5piv5LiA 99639
+56ue 99640
+6L+O 99641
+6IGa 99642
+6Ks= 99643
+5o2f 99644
+5omn 99645
+6am+ 99646
+6L+d 99647
+6KU= 99648
+6KA= 99649
+5LuW5Lus 99650
+5pe25YCZ 99651
+5a6D 99652
+5Lq65ZGY 99653
+6L+Z5qC3 99654
+5bel56iL 99655
+5Yib5paw 99656
+5a2p5a2Q 99657
+5biM 99658
+6YOo5YiG 99659
+6ZO2 99660
+5Luj6KGo 99661
+6aaZ 99662
+5biu 99663
+5o6o6L+b 99664
+55uY 99665
+56ev5p6B 99666
+6YOo6Zeo 99667
+5Z+5 99668
+5q2m 99669
+5LiN5Lya 99670
+562R 99671
+6YCZ 99672
+546p5a62 99673
+5ou/ 99674
+5Y6C 99675
+5q+b 99676
+54G1 99677
+5q2M 99678
+57u/ 99679
+5aaI 99680
+55ub 99681
+6aaG 99682
+6aG6 99683
+6IS4 99684
+5bC8 99685
+5Li9 99686
+5aWl 99687
+6YGH 99688
+6K+N 99689
+5bCB 99690
+5Lid 99691
+5aW955qE 99692
+5ouF 99693
+6ISx 99694
+5oG2 99695
+5Y6a 99696
+5Yqz 99697
+55uf 99698
+5oqY 99699
+5Y+l 99700
+5oCA 99701
+5p+T 99702
+5Lmm6K6w 99703
+5Yag 99704
+6bKc 99705
+5qaC 99706
+6ZqQ 99707
+5bmF 99708
+6LWe 99709
+5bmV 99710
+5qWt 99711
+6YGX 99712
+5Yik 99713
+6Jg= 99714
+5bY= 99715
+5oqV6LWE 99716
+6KGM5Lia 99717
+5LqR 99718
+546v5aKD 99719
+5a2m55Sf 99720
+5ZCI5L2c 99721
+5YGl5bq3 99722
+6aOe 99723
+5LiA5q2l 99724
+5LiA55u0 99725
+5Y+R55Sf 99726
+6Zi/ 99727
+6aKG5a+8 99728
+5Zac5qyi 99729
+5bqU6K+l 99730
+54K6 99731
+6K6t 99732
+5p2A 99733
+5riv 99734
+5Lqk6YCa 99735
+6Zi2 99736
+6ZKi 99737
+5Luk 99738
+5bC9 99739
+5q+N 99740
+6KGj 99741
+57KJ 99742
+6aG2 99743
+5Lmf5LiN 99744
+5oqT 99745
+6Ium 99746
+5bm4 99747
+56S8 99748
+56ys5LiJ 99749
+5aSn55qE 99750
+6YGO 99751
+54Of 99752
+6YG/ 99753
+5LuN 99754
+5bqG 99755
+5oCV 99756
+6LCi 99757
+55uW 99758
+5bCE 99759
+6Zyy 99760
+5paX 99761
+54q2 99762
+5a24 99763
+5q+V 99764
+5beo 99765
+55+/ 99766
+55qH 99767
+5bit 99768
+55eH 99769
+5oms 99770
+5bu2 99771
+5L6n 99772
+5reh 99773
+55qE5LiA 99774
+57ay 99775
+5rSB 99776
+57g= 99777
+6KeI 99778
+5625 99779
+56eY 99780
+6K+K 99781
+54++ 99782
+6KqJ 99783
+5q+r 99784
+8Kg= 99785
+5Y20 99786
+5oiQ5Li6 99787
+6IO95Yqb 99788
+6buE 99789
+5peF5ri4 99790
+6Iis 99791
+5q+U6L6D 99792
+6LW35p2l 99793
+5LqG6Kej 99794
+6Ieq54S2 99795
+5LiA5qyh 99796
+5Z+65pys 99797
+5pu+ 99798
+57u85ZCI 99799
+6I+c 99800
+6KeJ5b6X 99801
+56ys5LqM 99802
+6LeR 99803
+5rOi 99804
+5YCS 99805
+56GA 99806
+5YW1 99807
+6I2J 99808
+55Sz 99809
+55Sw 99810
+5oKj 99811
+6KeE5a6a 99812
+6IOc 99813
+6LWE5Lqn 99814
+5qKm 99815
+5pyd 99816
+6L+Z6YeM 99817
+5aSr 99818
+5oyl 99819
+5L2b 99820
+5a6I 99821
+6Zu2 99822
+5pa8 99823
+56+H 99824
+5bKb 99825
+5ZOl 99826
+6a2U 99827
+5LiN5Yiw 99828
+5omY 99829
+5bqK 99830
+5qyn 99831
+6I2j 99832
+5rGH 99833
+5omp 99834
+5YGP 99835
+5aKZ 99836
+6K6v 99837
+5ama 99838
+5oOg 99839
+5rSL 99840
+5a6c 99841
+5ram 99842
+5oWi 99843
+6YCP 99844
+5a69 99845
+6aG+ 99846
+57Sv 99847
+5rGh 99848
+54iG 99849
+56ef 99850
+5oOK 99851
+5rao 99852
+6aWw 99853
+6Zi1 99854
+6aWu 99855
+5pqW 99856
+5bqf 99857
+5peX 99858
+6ZqU 99859
+57aT 99860
+5YuZ 99861
+5a+m 99862
+6YCU 99863
+5omr 99864
+54OI 99865
+6Zu7 99866
+5YiR 99867
+6Zec 99868
+6Zeq 99869
+5aWL 99870
+5YKo 99871
+57yp 99872
+5L61 99873
+5aw= 99874
+8Ky2 99875
+5Zu96ZmF 99876
+57uE57uH 99877
+5LiT5Lia 99878
+5Y+R546w 99879
+5biM5pyb 99880
+57uP6JCl 99881
+5Y+r 99882
+5p2l6K+0 99883
+6Zqc 99884
+5Lu75L2V 99885
+5Lqk5piT 99886
+6YeN54K5 99887
+55qu 99888
+57uN 99889
+5rS+ 99890
+56eR5a2m 99891
+5bqU55So 99892
+5bu6562R 99893
+6IKJ 99894
+5pS56Z2p 99895
+5Z+656GA 99896
+5rGJ 99897
+5Ye65p2l 99898
+6L+Z5LmI 99899
+5Yia 99900
+5Z2Q 99901
+5LiN5LuF 99902
+5Lya6K6u 99903
+6Z2g 99904
+5aqS5L2T 99905
+5rC4 99906
+5Yay 99907
+6IuP 99908
+5aSu 99909
+54i2 99910
+5aCC 99911
+5a6e6ZmF 99912
+6KGX 99913
+56ul 99914
+6ZiF 99915
+5LqL5oOF 99916
+5Y6f5Zug 99917
+6YW4 99918
+5Lul5p2l 99919
+5aix 99920
+5a6r 99921
+5Z2X 99922
+57up 99923
+6YeO 99924
+5LiN5b6X 99925
+5Lyg5aWH 99926
+56Gs 99927
+5Y6F 99928
+5pei 99929
+57uD 99930
+6ISR 99931
+5byx 99932
+5o6M 99933
+6LS0 99934
+5oyC 99935
+5YWz6ZSu 99936
+5bCa 99937
+6aWt 99938
+5bqE 99939
+55m8 99940
+5ZyL 99941
+5o6I 99942
+5Liq5pyI 99943
+5LqI 99944
+5biB 99945
+6Led 99946
+5rKJ 99947
+56uf 99948
+5Yas 99949
+5oq9 99950
+6YaS 99951
+5byf 99952
+6Kem 99953
+6IGY 99954
+6LGG 99955
+5pq0 99956
+5ZGK6K+J 99957
+6LGq 99958
+6LWi 99959
+6Leo 99960
+6LOH 99961
+54i4 99962
+5oqx 99963
+5rWq 99964
+6bq7 99965
+5Luq 99966
+6KGh 99967
+5aW2 99968
+54G+ 99969
+6LW2 99970
+6IKl 99971
+5aeQ 99972
+5YC6 99973
+6ZyH 99974
+6K6i 99975
+5qyK 99976
+57c= 99977
+5buJ 99978
+5L+X 99979
+5b+Y 99980
+5aaH 99981
+57yT 99982
+5a2V 99983
+5ryr 99984
+6KOB 99985
+54eD 99986
+6buY 99987
+54mi 99988
+54i3 99989
+5oq1 99990
+5a6+ 99991
+5pyJ5LiA 99992
+6L+5 99993
+6L+r 99994
+6LKM 99995
+5pyJ55qE 99996
+8KyY 99997
+6L+Y5piv 99998
+5omA5Lul 99999
+5Lmf5piv 100000
+6L+Z5Lqb 100001
+5a+55LqO 100002
+5ZCn 100003
+55uu5YmN 100004
+6Ieq5bex55qE 100005
+6IO95aSf 100006
+5aaC5L2V 100007
+5py65p6E 100008
+5Y+q5piv 100009
+572R56uZ 100010
+5YWo6Z2i 100011
+5Li65LqG 100012
+5byA5Y+R 100013
+5paw6Ze7 100014
+6YeR6J6N 100015
+57un 100016
+5a6i5oi3 100017
+5LiA6LW3 100018
+6Iy2 100019
+5YWz5rOo 100020
+5rC05bmz 100021
+5Y6G5Y+y 100022
+5aKe6ZW/ 100023
+6bE= 100024
+5Z+66YeR 100025
+5bqt 100026
+5Y+2 100027
+5L+D 100028
+6Zuo 100029
+5raI6LS5 100030
+6Ii5 100031
+55+l6K+G 100032
+5oiY55Wl 100033
+57uP6aqM 100034
+5bOw 100035
+5puy 100036
+6ISa 100037
+5Yaw 100038
+5aSP 100039
+5b2S 100040
+56yU 100041
+6JmR 100042
+55Sy 100043
+5ZyI 100044
+6K+X 100045
+6b2Q 100046
+5a655piT 100047
+56CU5Y+R 100048
+6aqo 100049
+57q4 100050
+6Le1 100051
+5pen 100052
+55W2 100053
+5Yi4 100054
+6LS3 100055
+5Y+s 100056
+56eL 100057
+5ray 100058
+6KGM5pS/ 100059
+54yu 100060
+6IKk 100061
+6YCQ 100062
+6LaK5p2l 100063
+6LaK5p2l6LaK 100064
+5oSP6KeB 100065
+6Iie 100066
+5YmC 100067
+5raJ 100068
+56iL5bqm 100069
+5YWs5YWx 100070
+5qKw 100071
+5pyr 100072
+57qv 100073
+5ZSx 100074
+5rSy 100075
+5oqi 100076
+5qSN 100077
+5b+Z 100078
+5Lyw 100079
+5by5 100080
+5rOJ 100081
+5pyA5aSn 100082
+6LaL 100083
+5ben 100084
+56aB 100085
+5om2 100086
+5Y2x 100087
+54+g 100088
+54af 100089
+5ouc 100090
+5Li75LmJ 100091
+5p2C 100092
+6ZmE 100093
+6YGN 100094
+5pCt 100095
+5oyv 100096
+5aSa5bm0 100097
+5pWs 100098
+5pGE 100099
+57q3 100100
+5byD 100101
+5rm/ 100102
+5aiY 100103
+5qGj 100104
+6am2 100105
+5pyX 100106
+5q6W 100107
+5qac 100108
+5ZOh 100109
+5LiA5L2T 100110
+5p+l55yL 100111
+57mB 100112
+5rWT 100113
+5YWs5a6J 100114
+5r2c 100115
+6LSv 100116
+6aqX 100117
+5pCc 100118
+5beh 100119
+6Kw= 100120
+6Yo= 100121
+5aeU5Lya 100122
+5oKg 100123
+5Ymp 100124
+5o+t 100125
+5a2j5bqm 100126
+8KuY 100127
+8Kys 100128
+5LQ= 100129
+8Ko= 100130
+5L2G5piv 100131
+6YO95piv 100132
+5bmz5Y+w 100133
+5a2m5Lmg 100134
+5ZOB54mM 100135
+5LiU 100136
+6L+Z56eN 100137
+5pS/562W 100138
+5ous 100139
+6K6k5Li6 100140
+5LiA6Iis 100141
+5qCH5YeG 100142
+5pSv5oyB 100143
+5qih5byP 100144
+5YWz57O7 100145
+55qE5piv 100146
+6L+Z5LiA 100147
+5LiN6KaB 100148
+55Sa 100149
+57K+56We 100150
+5oul 100151
+5Yip55So 100152
+5L+d5oqk 100153
+5L2c55So 100154
+6Iul 100155
+5Zu95YaF 100156
+5LuL57uN 100157
+5LiA5LiL 100158
+5bel5Lia 100159
+55uu5qCH 100160
+5pyA5ZCO 100161
+5Lu35YC8 100162
+5bCN 100163
+6ZOB 100164
+6LCB 100165
+57uT5p6E 100166
+6Zuq 100167
+5pm66IO9 100168
+5Lyg57uf 100169
+5L2T6IKy 100170
+55Sf5oCB 100171
+5ouN 100172
+5o6q 100173
+5Yac5Lia 100174
+54m56Imy 100175
+6KeE5qih 100176
+5pe25Luj 100177
+6L+H56iL 100178
+6ZKI 100179
+5p2+ 100180
+5ZSQ 100181
+5Yy755aX 100182
+54Gv 100183
+5Yi26YCg 100184
+5qC45b+D 100185
+5LiN5Y+v 100186
+57O75YiX 100187
+5ZCJ 100188
+5Zyj 100189
+5YCR 100190
+5L2z 100191
+5p2l55yL 100192
+5q+U6LWb 100193
+5LiL5p2l 100194
+5Ye65LqG 100195
+5bmy6YOo 100196
+5b6u5L+h 100197
+5b2T5Zyw 100198
+5Y23 100199
+5Y2r55Sf 100200
+5Lyf 100201
+55ar5oOF 100202
+6LC3 100203
+5Yeg5Liq 100204
+6Zi0 100205
+55Sf54mp 100206
+5bCk 100207
+5LyK 100208
+6IKv 100209
+6Z2i56ev 100210
+5Yib6YCg 100211
+5o+h 100212
+5ZyG 100213
+5pmT 100214
+5oiQ5LqG 100215
+5Yeh 100216
+55a+ 100217
+56ue5LqJ 100218
+6K6o 100219
+5Li76aKY 100220
+6bKB 100221
+6L+q 100222
+5L+E 100223
+5oCq 100224
+5Lim 100225
+6Jma 100226
+5r2u 100227
+54On 100228
+6ICz 100229
+5rGg 100230
+6YCC5ZCI 100231
+5qC55pys 100232
+5Yqg55uf 100233
+55S16KeG 100234
+5re3 100235
+57yY 100236
+56qX 100237
+54qv 100238
+5oOv 100239
+5oSP5LmJ 100240
+5Yqe5rOV 100241
+5LyR 100242
+5ruR 100243
+5YuH 100244
+5pWi 100245
+5a+7 100246
+6KaG 100247
+6YCD 100248
+57uP55CG 100249
+5Z2P 100250
+5rO9 100251
+5LmY 100252
+5Yi6 100253
+5bGP 100254
+6aG/ 100255
+5Lqh 100256
+6YKA 100257
+5YW8 100258
+5Yuk 100259
+5q6L 100260
+5pig 100261
+5q+V5Lia 100262
+5oiq 100263
+6LeM 100264
+5aOB 100265
+5Y+m5LiA 100266
+55yf5a6e 100267
+56Oo 100268
+6K+a 100269
+5b+F6KaB 100270
+5oGL 100271
+5oeC 100272
+5b6S 100273
+6LCT 100274
+5pWP 100275
+5pmo 100276
+6IO4 100277
+5ou8 100278
+5aaZ 100279
+6K+4 100280
+6IGK 100281
+5oKJ 100282
+6bq8 100283
+5Yet 100284
+6IiS 100285
+5raC 100286
+6L+B 100287
+5rK/ 100288
+5aGR 100289
+5pu/ 100290
+5r6z 100291
+5b+N 100292
+6ICX 100293
+6Zy4 100294
+5Yeg5bm0 100295
+5YiK 100296
+6ISJ 100297
+6IWQ 100298
+5qGM 100299
+57qg 100300
+5rua 100301
+5oKy 100302
+5YaS 100303
+5aa5 100304
+55WF 100305
+57q1 100306
+5pGH 100307
+5aS6 100308
+6Lev5LiK 100309
+5b+9 100310
+6Jaq 100311
+5oGQ 100312
+5oSP5oCd 100313
+5auM 100314
+5o+0 100315
+5rCn 100316
+6ICA 100317
+6Zi7 100318
+6L2o 100319
+5bm7 100320
+5o2V 100321
+5Z2m 100322
+5ZOI5ZOI 100323
+54uQ 100324
+5ruo 100325
+6LK7 100326
+6L+f 100327
+5Lq66YO9 100328
+57uY 100329
+5Y+5 100330
+57WQ 100331
+5omw 100332
+5ruL 100333
+5aWR 100334
+5Yuf 100335
+56K6 100336
+8KY= 100337
+6ZuG5Zui 100338
+5p2O 100339
+5byA5bGV 100340
+5o+Q5Y2H 100341
+5YWo5Zu9 100342
+5rG96L2m 100343
+5a2m5qCh 100344
+5qC55o2u 100345
+6L+Z5piv 100346
+5Ye6546w 100347
+6ZmI 100348
+572X 100349
+6I635b6X 100350
+5YiY 100351
+6ZSA5ZSu 100352
+5pyq5p2l 100353
+6ZyA5rGC 100354
+5a6e5pa9 100355
+5Z2a5oyB 100356
+5YWo55CD 100357
+6ZO26KGM 100358
+5o6n5Yi2 100359
+6aG7 100360
+5Zyw5Yy6 100361
+5omT6YCg 100362
+55qE6K+d 100363
+5biu5Yqp 100364
+5L2T57O7 100365
+6L6+5Yiw 100366
+6KeE5YiS 100367
+5Z+56K6t 100368
+5Lik5Liq 100369
+5oql5ZGK 100370
+5Zyw5pa5 100371
+5a6M5YWo 100372
+5o6J 100373
+57uT5ZCI 100374
+5a6j5Lyg 100375
+5rOV5b6L 100376
+6Im65pyv 100377
+55S15b2x 100378
+6Kqq 100379
+5LiA54K5 100380
+6LaF6L+H 100381
+55S15a2Q 100382
+5oCd5oOz 100383
+5pWZ5a2m 100384
+6Zi25q61 100385
+5ZWG5Lia 100386
+54mp5rWB 100387
+5Yib5Lia 100388
+5pa55qGI 100389
+546w5Luj 100390
+5qGl 100391
+6JC95a6e 100392
+5bim5p2l 100393
+5Lqn55Sf 100394
+56eA 100395
+5rOw 100396
+5Lmx 100397
+5YW35L2T 100398
+5Zad 100399
+6JOd 100400
+5a6X 100401
+5Y2H57qn 100402
+5rex5YWl 100403
+5L+d6Zmp 100404
+566A5Y2V 100405
+55eb 100406
+56iz5a6a 100407
+6L6G 100408
+5bGe5LqO 100409
+5bed 100410
+5LiN5bCR 100411
+5ZKo 100412
+5Lic6KW/ 100413
+5b2i5byP 100414
+5aix5LmQ 100415
+5q2j5bi4 100416
+6bih 100417
+5YWF5YiG 100418
+5a6e6Le1 100419
+6YeM6Z2i 100420
+6Lez 100421
+6JmO 100422
+5oiQ6ZW/ 100423
+5pqX 100424
+552h 100425
+572q 100426
+55CG5b+1 100427
+5oyR 100428
+6LWE5pys 100429
+5aSa5bCR 100430
+5LiL6Z2i 100431
+5bid 100432
+5YWs5byA 100433
+5riQ 100434
+6ZW3 100435
+5bGL 100436
+5qyi6L+O 100437
+5b+D55CG 100438
+54KO 100439
+5rm+ 100440
+6K6T 100441
+6YKE 100442
+57OW 100443
+5LmM 100444
+5Yqx 100445
+54mZ 100446
+6IW/ 100447
+5bKX 100448
+5LyN 100449
+5oiQ5ZGY 100450
+5a2U 100451
+5bCP57yW 100452
+6JGj 100453
+5rOh 100454
+5YWI6L+b 100455
+5YWn 100456
+5Zi0 100457
+6LSd 100458
+6Ls= 100459
+5pCe 100460
+5rOb 100461
+6bif 100462
+572y 100463
+6JuL 100464
+5Li75Lu7 100465
+55uu55qE 100466
+5LmP 100467
+5rSl 100468
+5oi0 100469
+5Lil5qC8 100470
+54Wk 100471
+54yr 100472
+5ZSv 100473
+5bCK 100474
+55Sc 100475
+5Z6D 100476
+5Zy+ 100477
+5ouf 100478
+54Sm 100479
+6auU 100480
+5a6P 100481
+5qmf 100482
+6am7 100483
+5peB 100484
+5b27 100485
+6YO95LiN 100486
+5pGp 100487
+5LuT 100488
+5Lmz 100489
+5bK4 100490
+6LCL 100491
+5aSn5aSa 100492
+54Gt 100493
+6IW+ 100494
+5p+c 100495
+6IiN 100496
+5YWa55qE 100497
+5bCY 100498
+5Y2B5bm0 100499
+5ouS 100500
+6KOh 100501
+5p+U 100502
+5bm8 100503
+6ZSB 100504
+5LiT6aG5 100505
+5omO 100506
+6am+6am2 100507
+56KO 100508
+6KKL 100509
+6ZSL 100510
+5aOu 100511
+5bCW 100512
+55S15rGg 100513
+6L+U 100514
+5ryP 100515
+5b6q 100516
+6I+M 100517
+6IOD 100518
+6L6F 100519
+6YCS 100520
+6IOO 100521
+6Zmq 100522
+5a+/ 100523
+5aWU 100524
+54yb 100525
+57q5 100526
+55+l5ZCN 100527
+5b+G 100528
+5qGD 100529
+5qOL 100530
+6YCG 100531
+54K8 100532
+57GN 100533
+54mn 100534
+5qC355qE 100535
+6L6b 100536
+5aCG 100537
+5a6e5Zyo 100538
+5LyP 100539
+5a6/ 100540
+6LWP 100541
+6KOC 100542
+5Y2K5bm0 100543
+5YC+ 100544
+5ruh5oSP 100545
+5qKv 100546
+5oSP5ZGz 100547
+5a2k 100548
+56Wd 100549
+5pm2 100550
+6LWU 100551
+5YG/ 100552
+6ISC 100553
+572a 100554
+56KN 100555
+5rKD 100556
+5pON 100557
+5bSH 100558
+5pqC 100559
+6LeD 100560
+5pCs 100561
+5amG 100562
+6Yk= 100563
+6Ym0 100564
+5YW06Laj 100565
+6JCl5Lia 100566
+6K6K 100567
+6ISP 100568
+6L6I 100569
+5bee5biC 100570
+6LSr5Zuw 100571
+56m3 100572
+5Lit5bCP 100573
+5ryC 100574
+55mM 100575
+6Jyc 100576
+5LyZ5Ly0 100577
+54m1 100578
+5oKf 100579
+6Zm3 100580
+6LWb5a2j 100581
+5qij 100582
+5YG2 100583
+5piG 100584
+6KKt 100585
+5o2Q 100586
+6Imw 100587
+5oKs 100588
+55Si 100589
+6JGh 100590
+55uX 100591
+5am0 100592
+5bCO 100593
+57q9 100594
+5YCh 100595
+5omu 100596
+6Kit 100597
+5oqR 100598
+56GV 100599
+6L6W 100600
+6YOB 100601
+6L6p 100602
+6YK7 100603
+546w5Ye6 100604
+6KaP 100605
+5b25 100606
+6ZiU 100607
+5Ym1 100608
+6K+x 100609
+5oOR 100610
+5reA 100611
+6aKI 100612
+5L6m 100613
+5oGw 100614
+5qOA5a+f 100615
+6Yar 100616
+54S25piv 100617
+5YuD 100618
+6Iyr 100619
+5JM= 100620
+8Ky4 100621
+5L2c5Li6 100622
+55qE5Lq6 100623
+6YKj5LmI 100624
+576O5Zu9 100625
+6L+Y5pyJ 100626
+5o+Q6auY 100627
+6Jm9 100628
+5YW35pyJ 100629
+5YyF5ous 100630
+5oiW6ICF 100631
+5LiN6L+H 100632
+5LiK5rW3 100633
+5Yy76Zmi 100634
+6LWE6YeR 100635
+55Sa6Iez 100636
+5Yi25bqm 100637
+6Kej5Yaz 100638
+6IGU572R 100639
+57un57ut 100640
+5bu656uL 100641
+6L+b5LiA5q2l 100642
+5p2Q5paZ 100643
+5LuK5aSp 100644
+5b+F6aG7 100645
+5ZCE56eN 100646
+546w5Zy6 100647
+5LuW55qE 100648
+5aKe5Yqg 100649
+6aKG5Z+f 100650
+5Y+C5LiO 100651
+5oyB57ut 100652
+5LmL5LiA 100653
+54m55Yir 100654
+6bG8 100655
+5YWx5ZCM 100656
+5Yqq 100657
+546J 100658
+5Lq65Lus 100659
+5YWI55Sf 100660
+5LyY5Yq/ 100661
+5L+d5oyB 100662
+5L2c5ZOB 100663
+54mb 100664
+5oiQ5pys 100665
+5pS25YWl 100666
+5Y+K5pe2 100667
+6LSf6LSj 100668
+5o6l5Y+X 100669
+6I2Q 100670
+5Y+q6KaB 100671
+55yf55qE 100672
+5a+86Ie0 100673
+5py65Yi2 100674
+6KGM5Yqo 100675
+5paw55qE 100676
+5a6M5ZaE 100677
+5Li65LuA5LmI 100678
+5Lit5aSu 100679
+5oiQ56uL 100680
+5oSf6KeJ 100681
+5Y+Y5YyW 100682
+5Y+X5Yiw 100683
+5bm25LiN 100684
+5a2Z 100685
+5pa95bel 100686
+5piO5pi+ 100687
+6L+H5Y67 100688
+5Y+R5oyl 100689
+55yf5q2j 100690
+5Z+65Zyw 100691
+5piO56Gu 100692
+6IOh 100693
+6K645aSa 100694
+5LiA5bm0 100695
+5pa55ZCR 100696
+5oGp 100697
+55u45L+h 100698
+5Zyz 100699
+6K+m57uG 100700
+5LqL5Lia 100701
+55Sf5ZG9 100702
+5ZKo6K+i 100703
+5paH5piO 100704
+55Ge 100705
+57u/6Imy 100706
+6I6r 100707
+5oSP6K+G 100708
+5oqV5YWl 100709
+5Yqg5b+r 100710
+5qKF 100711
+57+7 100712
+5byA5pS+ 100713
+5pmu6YCa 100714
+5Y2P5Lya 100715
+5oiQ57up 100716
+5LuZ 100717
+5a+S 100718
+6K+B5Yi4 100719
+6K6k6K+G 100720
+5Li5 100721
+5aSn6YeP 100722
+6L+F 100723
+5YGa5Yiw 100724
+6K6+5pa9 100725
+6LS45piT 100726
+6IO95rqQ 100727
+5pe25pyf 100728
+5LiA5aSp 100729
+5rK755CG 100730
+5ZiJ 100731
+5a6H 100732
+5Liw5a+M 100733
+5Li+6KGM 100734
+5oiQ5p6c 100735
+6IKv5a6a 100736
+54uX 100737
+5Yqo5Yqb 100738
+5qOu 100739
+5Yeg5LmO 100740
+5Zug57Sg 100741
+5rCR5peP 100742
+5rSe 100743
+572R5Y+L 100744
+5ZCI55CG 100745
+5bm/5aSn 100746
+5q6K 100747
+5rSb 100748
+5p2v 100749
+6JKZ 100750
+55So5LqO 100751
+6J6N6LWE 100752
+56WW 100753
+5py65qKw 100754
+5Li+5Yqe 100755
+6Ieq5Yqo 100756
+5Yqe5YWs 100757
+6bue 100758
+6ZuE 100759
+5YC85b6X 100760
+54yq 100761
+5Lul5Li6 100762
+5piM 100763
+6Led56a7 100764
+5ZC45byV 100765
+57uV 100766
+6ZqG 100767
+6K6h566X 100768
+6Zif5LyN 100769
+5aSn5Lya 100770
+5byV6LW3 100771
+54m554K5 100772
+6IO2 100773
+5bm06L27 100774
+5pys6Lqr 100775
+5py65YWz 100776
+5a6Y5pa5 100777
+6YOR 100778
+5rWZ 100779
+6KeS6Imy 100780
+6JGj5LqL 100781
+5Li65Li7 100782
+5peg6K66 100783
+5Lmg5oOv 100784
+5qWa 100785
+5ouT 100786
+57uf6K6h 100787
+5YWE 100788
+5bm/5rOb 100789
+5Y2A 100790
+5rGh5p+T 100791
+6KuL 100792
+6IqC55uu 100793
+5Lym 100794
+6KaG55uW 100795
+6ICQ 100796
+5om26LSr 100797
+57uP5Y6G 100798
+6YeN6KaB55qE 100799
+6IKh5Lic 100800
+5oub6IGY 100801
+5Zub5Liq 100802
+5oeJ 100803
+6IOe 100804
+5pGG 100805
+6auY6YCf 100806
+6bqm 100807
+5Y6f5YiZ 100808
+6I6x 100809
+5pu05aW9 100810
+6ZWc 100811
+5YeM 100812
+5Z6D5Zy+ 100813
+6YCy 100814
+54Gw 100815
+6ZO6 100816
+5LqL5pWF 100817
+55SY 100818
+56m65rCU 100819
+6b6E 100820
+6I+y 100821
+55O2 100822
+5pio 100823
+5pel5oql 100824
+5rWu 100825
+5Zyw5Zu+ 100826
+5ZGI 100827
+5aSn5Yqb 100828
+57uq 100829
+5biF 100830
+5pyN5YuZ 100831
+5LiN6ZSZ 100832
+5Lmh5p2R 100833
+5bGl 100834
+5bmz5pa5 100835
+6Zey 100836
+5omj 100837
+57Sg6LSo 100838
+6LW0 100839
+6YGt 100840
+6JCo 100841
+6Ieq5Li7 100842
+6YeR5bGe 100843
+6Imv5aW9 100844
+5Lik5bm0 100845
+5rOl 100846
+6aKc 100847
+57K+5b2p 100848
+5Lit5Y2O 100849
+5pmL 100850
+5Lmg6L+R 100851
+5Lmg6L+R5bmz 100852
+5oiY5aOr 100853
+5YGa55qE 100854
+6aqR 100855
+5ru0 100856
+55Oc 100857
+54mI5p2D 100858
+6IKg 100859
+5pyD5ZOh 100860
+54+N 100861
+56iu 100862
+5Lu/ 100863
+54mp5Lia 100864
+5YCL5Lq6 100865
+5aa7 100866
+5Ly4 100867
+5rGX 100868
+5pe6 100869
+55CG5oOz 100870
+5pG4 100871
+6L+d5rOV 100872
+5a6M5pW0 100873
+5Y6m 100874
+6LiP 100875
+5paR 100876
+5qGC 100877
+5L2T5Yi2 100878
+5bir 100879
+5p2G 100880
+5q6/ 100881
+5q+B 100882
+6aaI 100883
+6KeS5bqm 100884
+5qyj 100885
+54Om 100886
+6IK6 100887
+6YeH6K6/ 100888
+5pGY 100889
+5oyh 100890
+5reY 100891
+5YW76ICB 100892
+54K4 100893
+6L+I 100894
+5Y6J 100895
+5Z2K 100896
+6L6j 100897
+5Yed 100898
+5rOq 100899
+55aP 100900
+5o6Y 100901
+5YOP5piv 100902
+6ZuV 100903
+57yd 100904
+6I23 100905
+5o23 100906
+5aCh 100907
+5Y+l6K+d 100908
+55a8 100909
+5qCP 100910
+6YG1 100911
+56Kz 100912
+5bel5ZWG 100913
+5pC6 100914
+5Yil 100915
+5LmZ 100916
+5peL 100917
+5oOc 100918
+5LiA5aSn 100919
+5bGC5qyh 100920
+6LWW 100921
+5oqs 100922
+5qiC 100923
+6K+e 100924
+5YWS 100925
+56+u 100926
+6IKD 100927
+5ae/ 100928
+5oqa 100929
+55O3 100930
+55S15Yqo 100931
+5paw5Yag 100932
+5ra1 100933
+56KR 100934
+5reu 100935
+5peo 100936
+6Liq 100937
+5riU 100938
+5oSI 100939
+5Y+U 100940
+5Y2X55yB 100941
+576p 100942
+5aeU5Lmm6K6w 100943
+6LK4 100944
+5raM 100945
+6KuW 100946
+6JCE 100947
+5o+P 100948
+5b+n 100949
+6L6m 100950
+5aaG 100951
+5omt 100952
+5ZG1 100953
+6YGl 100954
+6Kix 100955
+5LuH 100956
+5Y2B5LiJ 100957
+5Ymy 100958
+6KqN 100959
+6Iiw 100960
+6aKH 100961
+6aWx 100962
+54ug 100963
+6auY55qE 100964
+57Wx 100965
+5oWO 100966
+6aKB 100967
+5ZCI6YCC 100968
+5rW0 100969
+6LWL 100970
+5oq8 100971
+5aal 100972
+6Zmi6ZW/ 100973
+6ICV 100974
+6L6o 100975
+5oWw 100976
+5Y2B5Zub 100977
+5py1 100978
+6JOE 100979
+5p6i 100980
+5bu3 100981
+5oKE 100982
+5rav 100983
+55+p 100984
+5a2Q6YeM 100985
+54q5 100986
+5bGA6ZW/ 100987
+6ZA= 100988
+5aWg 100989
+5Lya6ZW/ 100990
+5pOa 100991
+5LiN5Y+K 100992
+5Y2B5Lmd 100993
+5qy6 100994
+6Lq6 100995
+6ZiQ 100996
+57qM 100997
+6Ki7 100998
+5YaK 100999
+6K2Y 101000
+6auY562J 101001
+6IW6 101002
+5aSV 101003
+57uR 101004
+5ZSk 101005
+6JW0 101006
+55Wc 101007
+5oWL 101008
+5Y+Z 101009
+5Y+D 101010
+5bOh 101011
+5Lq65aSn 101012
+6YW/ 101013
+6YGp 101014
+5aWi 101015
+5Y+j5rCU 101016
+6YyE 101017
+6Y8= 101018
+5YuY 101019
+6LS/ 101020
+6Zqq 101021
+6Ys= 101022
+6Zq2 101023
+8KU= 101024
+8Kyj 101025
+8KM= 101026
+8KuN 101027
+8Kyz 101028
+8KuT 101029
+8KuE 101030
+8Kuf 101031
+8Kix 101032
+5Jc= 101033
+5Lul5Y+K 101034
+5pyJ6ZmQ 101035
+5ZGi 101036
+5ZCX 101037
+55yL5Yiw 101038
+6K6h5YiS 101039
+6L+b5YWl 101040
+55u05o6l 101041
+5YiG5p6Q 101042
+5Y+q5pyJ 101043
+6K6+5aSH 101044
+5YW25a6e 101045
+5Yqg5by6 101046
+5Lit55qE 101047
+5L+d6Zqc 101048
+6ICB5biI 101049
+5Lq65omN 101050
+5b6X5Yiw 101051
+6aOO6Zmp 101052
+5LiA56eN 101053
+56m66Ze0 101054
+5oiR5Zu9 101055
+5LmL5YmN 101056
+5LiT5a62 101057
+5p2o 101058
+5pel5pys 101059
+576k5LyX 101060
+5Y+C5Yqg 101061
+5pWI5p6c 101062
+5pyJ5YWz 101063
+5a625bqt 101064
+5Yy65Z+f 101065
+5Yqq5Yqb 101066
+6ZqP552A 101067
+5peg5rOV 101068
+5Lqk5rWB 101069
+6KGM5Li6 101070
+5qOA5p+l 101071
+5pyf6Ze0 101072
+5aaC5q2k 101073
+6IKh5Lu9 101074
+5b2T5pe2 101075
+6KOF5aSH 101076
+5YeG5aSH 101077
+6YWS5bqX 101078
+6L+Q5Yqo 101079
+5o+Q5Ye6 101080
+5bem5Y+z 101081
+5o6q5pa9 101082
+6aOf5ZOB 101083
+5raI6LS56ICF 101084
+5a2m6Zmi 101085
+5oyH5a+8 101086
+6L+Q6JCl 101087
+6YeN5aSn 101088
+5Yac5p2R 101089
+6YCg5oiQ 101090
+5pS/5rK7 101091
+6ZKI5a+5 101092
+5q2j5byP 101093
+5Y+W5b6X 101094
+6YKj5Liq 101095
+6ZuG5Lit 101096
+5Y+q6IO9 101097
+5b+r6YCf 101098
+6Lqr5L2T 101099
+5YWa5ZGY 101100
+6IGU5ZCI 101101
+5Yqb6YeP 101102
+6YO95pyJ 101103
+5oWn 101104
+5aGU 101105
+5Yir5Lq6 101106
+6KGo546w 101107
+5pWF5LqL 101108
+5LiA5YiH 101109
+5bCH 101110
+6LWE5paZ 101111
+5Z+55YW7 101112
+6ZiF6K+7 101113
+5pyJ5Lq6 101114
+6JCl6ZSA 101115
+55uR552j 101116
+546v5L+d 101117
+6ICD6JmR 101118
+5rex5Zyz 101119
+5Lil6YeN 101120
+6IyD5Zu0 101121
+5aeU5ZGY 101122
+55uR566h 101123
+5LiJ5Liq 101124
+6KOF5L+u 101125
+5YWs6YeM 101126
+5YiG5Yir 101127
+55CG6Kej 101128
+6Z+p 101129
+5Yqg5bel 101130
+6K6k55yf 101131
+5LiN5aW9 101132
+5Y675bm0 101133
+6ZmN5L2O 101134
+5py65Lya 101135
+5Y2P6K6u 101136
+56ym5ZCI 101137
+5aKe5by6 101138
+5oqA6IO9 101139
+6aaW5YWI 101140
+56em 101141
+5LiB 101142
+5bC+ 101143
+5pyJ5LqG 101144
+5Zyw5Lqn 101145
+5rig 101146
+5pa55L6/ 101147
+56e75Yqo 101148
+6YCf5bqm 101149
+5bCk5YW2 101150
+6YCa55+l 101151
+5Z2b 101152
+6YG/5YWN 101153
+5oGi 101154
+6LSh 101155
+6IGM5bel 101156
+5a6e5Yqb 101157
+5piv5LiA56eN 101158
+5ZCv5Yqo 101159
+55a+55eF 101160
+5p2l5LqG 101161
+55u45a+5 101162
+546w5a6e 101163
+6J6N5ZCI 101164
+5ZCM5qC3 101165
+5YWs5ZGK 101166
+54m55q6K 101167
+57Sr 101168
+5LiL5Y67 101169
+5Lyg5pKt 101170
+5pyA5aW9 101171
+5LyY6LSo 101172
+5rKS 101173
+5oy6 101174
+5pem 101175
+6K+6 101176
+5LiA5ZCN 101177
+6YGT6Lev 101178
+56S66IyD 101179
+6L+H5p2l 101180
+5ZCM5a2m 101181
+6byT 101182
+5p2t 101183
+5pys5qyh 101184
+5ZCM5oSP 101185
+5LiW57qq 101186
+576K 101187
+5qyy 101188
+5bel6Im6 101189
+55Om 101190
+5Lq65aOr 101191
+5pyJ5omA 101192
+5LuO5LqL 101193
+5pyJ5b6I5aSa 101194
+5LiN5LqG 101195
+5bKX5L2N 101196
+5Y+Y5b6X 101197
+5Yqz5Yqo 101198
+5aSE5LqO 101199
+5bmz5Z2H 101200
+5b2i6LGh 101201
+5aGe 101202
+5YWx5Lqr 101203
+552b 101204
+5Yip5ram 101205
+5q2j5piv 101206
+5b6A5b6A 101207
+55u45q+U 101208
+5qiq 101209
+5Yi3 101210
+5rWZ5rGf 101211
+5aSn6YOo5YiG 101212
+5aSa5Liq 101213
+5oKo55qE 101214
+55S15ZWG 101215
+5b6u5Y2a 101216
+5aeL57uI 101217
+54qv572q 101218
+5piv5Zyo 101219
+57uE5ZCI 101220
+5Y6f5p2l 101221
+5riF5qWa 101222
+5ZCE5Zyw 101223
+5oSf5Y+X 101224
+5b2T5Lit 101225
+6LaL5Yq/ 101226
+5pmv5Yy6 101227
+55yf5piv 101228
+5L6b5bqU 101229
+6L2s5Z6L 101230
+54uC 101231
+6Iac 101232
+6IuX 101233
+5b+g 101234
+5b6I5aSn 101235
+6IKh5p2D 101236
+576O5YWD 101237
+5o6S5ZCN 101238
+5Yqo54mp 101239
+6ZSF 101240
+5aKo 101241
+5Li75bit 101242
+5b6I5aW9 101243
+57ud5a+5 101244
+5p2c 101245
+6L2s6L29 101246
+55KD 101247
+5p2R5rCR 101248
+5ZCo 101249
+5Zut5Yy6 101250
+6auY5bqm 101251
+54mp6LSo 101252
+6L6J 101253
+5pel5bi4 101254
+5o+S 101255
+5LiJ5bm0 101256
+5L2T546w 101257
+5omN5piv 101258
+5Luj55CG 101259
+5LiN566h 101260
+5oGS 101261
+5Zyw5L2N 101262
+57Ku 101263
+6JaE 101264
+5piO55m9 101265
+5LiA6Ie0 101266
+5pu8 101267
+5ZOt 101268
+5Yek 101269
+5Yqy 101270
+5pWM 101271
+5oiY5paX 101272
+5Li75L2T 101273
+5YWs5biD 101274
+5Y+C6ICD 101275
+6Iiq56m6 101276
+5a+6 101277
+5a2m5Lya 101278
+5Y+N5pig 101279
+576O5Li9 101280
+5aSq6Ziz 101281
+5bu65oiQ 101282
+5oWi5oWi 101283
+5ZCE5Liq 101284
+6YKm 101285
+57uE5oiQ 101286
+5LiJ5aSn 101287
+6ZSm 101288
+5aSn5aSa5pWw 101289
+5qaC5b+1 101290
+6a2C 101291
+5YWs55uK 101292
+6I2S 101293
+6Lqr5Lu9 101294
+5rex5Yi7 101295
+5YWp 101296
+57uP5YW4 101297
+5ZCE6aG5 101298
+6JmV 101299
+6L+b5q2l 101300
+5Y2B5LqM 101301
+5omn5rOV 101302
+5oOz5Yiw 101303
+5oSf5p+T 101304
+5ZWG5Yqh 101305
+5bCP57uE 101306
+6JSs 101307
+54+t5a2Q 101308
+5ZCM5b+X 101309
+6Z2i5Li0 101310
+54KS 101311
+5aSa56eN 101312
+6KeC54K5 101313
+5ZOq6YeM 101314
+5bCd 101315
+5aeG 101316
+6IW5 101317
+5Z+O5Yy6 101318
+5aSq5aSa 101319
+55eF5q+S 101320
+5Zyo5LqO 101321
+5omA6LCT 101322
+5pmw 101323
+5p6d 101324
+5ouW 101325
+5a6F 101326
+5pW05rK7 101327
+5L2P5oi/ 101328
+5YG3 101329
+54aK 101330
+6LWB 101331
+5rCb 101332
+5qC85bGA 101333
+5Z+656GA5LiK 101334
+6IOG 101335
+5YW9 101336
+6Zu25ZSu 101337
+5Z2h 101338
+5aWz5a2p 101339
+5pKe 101340
+5YWo5Yqb 101341
+5ZKW 101342
+6IKp 101343
+55yJ 101344
+6Iez5LqO 101345
+5YWa57uE 101346
+5LiA5Lu2 101347
+5ouG 101348
+5LqL5a6e 101349
+5YKz 101350
+5rmY 101351
+57ay56uZ 101352
+5b6q546v 101353
+5ZCM5q+U 101354
+5ouU 101355
+5Yy76I2v 101356
+5YW75q6W 101357
+5Zu65a6a 101358
+5a6e6ZmF5LiK 101359
+6K6w5b6X 101360
+5Yip5LqO 101361
+5oKm 101362
+5ouz 101363
+6IKd 101364
+5pWI55uK 101365
+6Kmy 101366
+5rCR5Li7 101367
+55eH54q2 101368
+6aKo 101369
+5bm85YS/ 101370
+5aeR 101371
+5oiS 101372
+5LiL55qE 101373
+5rih 101374
+5bm05bqV 101375
+6K6w5b+G 101376
+5ZCQ 101377
+5aSn5bmF 101378
+5b69 101379
+5YWs5LyX 101380
+5L+h5b+D 101381
+546b 101382
+5Lya5LiK 101383
+5LmU 101384
+5pGE5b2x 101385
+5qOL54mM 101386
+6ZmV 101387
+5bqU5oCl 101388
+5pS26LS5 101389
+5o6n6IKh 101390
+5Luq5byP 101391
+556s 101392
+5omA5Zyo 101393
+56Kw 101394
+5aeT 101395
+6aGM 101396
+5pSv6YOo 101397
+5L2/5ZG9 101398
+54KJ 101399
+5a+E 101400
+57+8 101401
+5Zyw5LiL 101402
+6L6e 101403
+5L+x 101404
+5Li75oyB 101405
+6LSn5biB 101406
+5oGo 101407
+6IKM 101408
+55uI 101409
+6ZS7 101410
+5b+X5oS/ 101411
+57G75Ly8 101412
+5oyW 101413
+6YC7 101414
+57i9 101415
+57qq5b+1 101416
+5ZWl 101417
+5byv 101418
+5ZCN5a2X 101419
+5YGl6Lqr 101420
+55qE5b+D 101421
+6amx 101422
+6IOM5ZCO 101423
+5rOV5biI 101424
+57KS 101425
+6IO96YeP 101426
+6L6w 101427
+6Imz 101428
+5b28 101429
+5q615pe26Ze0 101430
+5ZCI5rOV 101431
+5pOm 101432
+5769 101433
+5Y6o 101434
+5oiR6K+0 101435
+5LqL5Yqh 101436
+5Yeg5aSp 101437
+5YWB 101438
+57y0 101439
+5Y2T 101440
+5Lik56eN 101441
+54us54m5 101442
+5bi2 101443
+6ZK7 101444
+5oOp 101445
+6aKG5YWI 101446
+6Laz5aSf 101447
+5aOz 101448
+5oSP5ZGz552A 101449
+5YiG5biD 101450
+5LmD 101451
+6YGL 101452
+5L2p 101453
+6LCx 101454
+54Gj 101455
+6I2h 101456
+6LSv5b27 101457
+5bm+ 101458
+56OB 101459
+5YW45Z6L 101460
+5YmH 101461
+5Ya7 101462
+5qyg 101463
+5LiN5LmF 101464
+5rWm 101465
+6a2F 101466
+5byA5LqG 101467
+5L2/55So6ICF 101468
+6L+Z5qy+ 101469
+5bCI 101470
+6ISx6LSr 101471
+5pS75Z2a 101472
+566X5piv 101473
+56iA 101474
+5peg5Lq6 101475
+5aC1 101476
+5aWP 101477
+6YO95biC 101478
+5Y+v6KeB 101479
+5LiN5Ye6 101480
+5re7 101481
+5LqP 101482
+576O5aW9 101483
+6IOW 101484
+6Z+1 101485
+5qCH5b+X 101486
+6IqC6IO9 101487
+5oqr 101488
+5bC6 101489
+5a+4 101490
+5LiA5Luj 101491
+6aKX 101492
+6IC2 101493
+6JK4 101494
+5Zau 101495
+5ru/ 101496
+54yc 101497
+5rWG 101498
+5Z+D 101499
+5Y2D5LiH 101500
+6LWM 101501
+6IGy 101502
+5L2c6aOO 101503
+6LOq 101504
+5a+o 101505
+5bm05Lq6 101506
+5Y2w6LGh 101507
+5qG2 101508
+5pKk 101509
+5Y2B5LqU 101510
+5q+F 101511
+5rKq 101512
+5Zu95pyJ 101513
+5aSn6YeP55qE 101514
+5b6h 101515
+5a+T 101516
+6KaW 101517
+5ryC5Lqu 101518
+55yg 101519
+54Kt 101520
+6buO 101521
+6Jm5 101522
+5Yip5Lqa 101523
+6K2J 101524
+5rWP 101525
+5Y2B5YWr 101526
+5Lii 101527
+6L69 101528
+5pyJ5LiA5Lqb 101529
+5oWI 101530
+5YGc6L2m 101531
+5a6g 101532
+6Kej5pS+ 101533
+5pyJ5aSa 101534
+6YKK 101535
+5bi46KeB 101536
+5oq5 101537
+57qk 101538
+6Kaq 101539
+5qGG 101540
+6I6e 101541
+5rCn5YyW 101542
+6L+Z5Lu2 101543
+5Yew 101544
+5p+0 101545
+5Y+R55S1 101546
+6byg 101547
+6L2s5YyW 101548
+5aiD 101549
+5oyk 101550
+572p 101551
+5a+G5YiH 101552
+5oiR5LiN 101553
+6auY5paw 101554
+5LiA56+H 101555
+6L+b56iL 101556
+6KGw 101557
+6L+Y5LiN 101558
+54WM 101559
+5paw5Y2O 101560
+6IK/ 101561
+5rup 101562
+5LiA5rWB 101563
+6K+I 101564
+5a6e5L2T 101565
+5aSW5Zu9 101566
+6Lqy 101567
+6LWg 101568
+6Ka6 101569
+5qKd 101570
+5LiN6KeB 101571
+6KiK 101572
+5Yy5 101573
+5Y21 101574
+54el 101575
+5oWV 101576
+6b2/ 101577
+5a60 101578
+6aW8 101579
+6JGh6JCE 101580
+5bCP5b+D 101581
+5oG8 101582
+6ZmM 101583
+5piC 101584
+5YO5 101585
+6Iqd 101586
+5q+P5Liq5Lq6 101587
+5YmN5o+Q 101588
+5L2T5Lya 101589
+5qiZ 101590
+5pCc54uQ 101591
+5a+55YW2 101592
+5Lin 101593
+6JyC 101594
+5rW4 101595
+6Kq/ 101596
+5Z2q 101597
+6aKW 101598
+5ZCN5Li6 101599
+56y8 101600
+6IiM 101601
+5pys5Lmm 101602
+6IGv 101603
+57q6 101604
+566A55u0 101605
+6Zui 101606
+576O55qE 101607
+6Zqo 101608
+6auY5bOw 101609
+6L+Z5a62 101610
+5YKs 101611
+5bC4 101612
+56GV5aOr 101613
+6K23 101614
+6LCo 101615
+5piP 101616
+5pS/5Y2P 101617
+6KGU 101618
+57+S 101619
+5ZyS 101620
+5Zu95rCR 101621
+5Li76KeS 101622
+6KOV 101623
+5Lyq 101624
+5bqe 101625
+5rCR6JCl 101626
+5oOn 101627
+56eY5Lmm 101628
+55eV 101629
+55m+5YiG 101630
+5rq2 101631
+5peg55aR 101632
+55qE55y8 101633
+5pOO 101634
+5Lyf5aSn 101635
+5b2w 101636
+5YWs5a6J5bGA 101637
+57OV 101638
+5byl 101639
+5YKZ 101640
+5Lm+ 101641
+5q+r5LiN 101642
+5rOo5piO 101643
+5Ymv5oC7 101644
+5oSJ 101645
+5pWm 101646
+6aao 101647
+5pSA 101648
+6YCd 101649
+5Y+v6Z2g 101650
+5aS4 101651
+5ZyY 101652
+6Z2i5LiK 101653
+5oqW 101654
+6ISG 101655
+6amw 101656
+5LyQ 101657
+5aao 101658
+5a6a5LqG 101659
+57OK 101660
+5q2h 101661
+6YOo6ZW/ 101662
+56eJ 101663
+6IiG 101664
+5YiR5LqL 101665
+5ZC1 101666
+5qSS 101667
+6KGT 101668
+6LGr 101669
+6I+p 101670
+5a21 101671
+6aWy 101672
+5bCx5aW9 101673
+5aCq 101674
+5LiJ6KeS 101675
+5Zy65q+U6LWb 101676
+5LiN5YGc 101677
+5pOF 101678
+5YWo5paH 101679
+5rOB 101680
+5a2m5L2N 101681
+5rGw 101682
+6aCY 101683
+5Y+g 101684
+6Zqb 101685
+5biQ 101686
+55yL5Ye6 101687
+5Yyg 101688
+5bGA6Z2i 101689
+5rOM 101690
+6LCK 101691
+5ZCM5pyf 101692
+5oqV5qCH 101693
+5aW0 101694
+5p2l55yL55yL 101695
+6IS+ 101696
+6J66 101697
+5q2J 101698
+55uv 101699
+56iO5Yqh 101700
+5buK 101701
+5o6p 101702
+5oWo 101703
+55u8 101704
+6IqS 101705
+6K6A 101706
+5oyj 101707
+6IyF 101708
+5pal 101709
+5qSF 101710
+5Yiw5p2l 101711
+6JGX5L2c 101712
+54ux 101713
+5LqM5omL 101714
+5LuO5p2l 101715
+55ay 101716
+5bqK5LiK 101717
+5paw5rWq 101718
+5rOE 101719
+5aKe5YC8 101720
+5Lib 101721
+5pqR 101722
+5LuO5Lia 101723
+5reL 101724
+5aSa5qC3 101725
+5py0 101726
+5Lu96aKd 101727
+5p6j 101728
+6KW/55yB 101729
+5pys6LSo 101730
+5rex5rex 101731
+6ImH 101732
+57u1 101733
+5Lqn5YC8 101734
+5ryg 101735
+6IW7 101736
+562b 101737
+5Y6M 101738
+5oGt 101739
+5auM55aR 101740
+5oi2 101741
+5rue 101742
+6IaA 101743
+5Yqj 101744
+5bqn6LCI 101745
+5bi45oCB 101746
+55qE5oOF 101747
+6Ka9 101748
+5a+C 101749
+5YyG 101750
+6Ie6 101751
+6aGv 101752
+55WP 101753
+6YGj 101754
+5Y2c 101755
+562J5aWW 101756
+6LKs 101757
+5rqv 101758
+6Y4= 101759
+54K55aS0 101760
+6JOs 101761
+5rG6 101762
+6YWs 101763
+6YGK 101764
+6LO8 101765
+6Ki75YaK 101766
+5pys5oql 101767
+57WV 101768
+5rS75oCn 101769
+5YWR 101770
+6Yyv 101771
+5Ya2 101772
+5Za7 101773
+5rqW 101774
+6IKi 101775
+5rqD 101776
+5pes 101777
+5YmK 101778
+55CG5LqL 101779
+5bGg 101780
+5rKn 101781
+6JqA 101782
+6Zu75a2Q 101783
+5Li65q2i 101784
+5bi45aeU 101785
+57WC 101786
+6Yq3 101787
+54uA 101788
+5L6j 101789
+6IOA 101790
+6K2w 101791
+55So6L2m 101792
+5Zmq 101793
+5q23 101794
+5Y2U 101795
+5Yi5 101796
+56uf5piv 101797
+6amX 101798
+6JCd 101799
+55mr 101800
+55er 101801
+5q2n 101802
+5byK 101803
+5aq9 101804
+54+K 101805
+6KG3 101806
+6ZyJ 101807
+5Z+6552j 101808
+6Zqx 101809
+5rCo 101810
+57u4 101811
+5bC85pav 101812
+54OY 101813
+5pyf5YaF 101814
+6LCF 101815
+6ZuH 101816
+6ZqZ 101817
+5ZaJ 101818
+5Yml 101819
+55eY 101820
+5oy9 101821
+55Oj 101822
+5rmb 101823
+5qix 101824
+5r6O 101825
+5rmD 101826
+5Yas5aWl 101827
+5qO1 101828
+5a6w 101829
+5Z6S 101830
+5qeL 101831
+5L6I 101832
+6IyE 101833
+5Zi/ 101834
+6I+H 101835
+55mC 101836
+5YqD 101837
+6Y0= 101838
+6JS9 101839
+556t 101840
+5pWe 101841
+5LmW 101842
+6Z+n 101843
+6L6c 101844
+5oeI 101845
+5L2j 101846
+5567 101847
+5Z+U 101848
+6IiF 101849
+5a6e5LqL 101850
+6ag= 101851
+5ael 101852
+57Wh 101853
+5Zi7 101854
+55Wi 101855
+5rKD5bCU 101856
+6L+E 101857
+6IKH 101858
+5oWR 101859
+46c= 101860
+5I8= 101861
+8KA= 101862
+8KyH 101863
+8Kut 101864
+8KuQ 101865
+47M= 101866
+qb0= 101867
+8Kug 101868
+45s= 101869
+8KyN 101870
+6b8= 101871
+8KyS 101872
+45k= 101873
+8Kyk 101874
+8Ky0 101875
+8KuW 101876
+8KQ= 101877
+46w= 101878
+5LI= 101879
+8KuU 101880
+8Kua 101881
+6KaB5rGC 101882
+5LiA5Lqb 101883
+5a6e546w 101884
+6ICM5LiU 101885
+5Zug5q2k 101886
+55Sx5LqO 101887
+5YWz5LqO 101888
+54S25ZCO 101889
+5o6o5Yqo 101890
+5LiA5qC3 101891
+5oyJ54Wn 101892
+6L+Z5qC355qE 101893
+5b2i5oiQ 101894
+5pyJ5Lqb 101895
+5pu05Yqg 101896
+57uP6L+H 101897
+5bu66K6u 101898
+5rK755aX 101899
+5L2g5Lus 101900
+5omN6IO9 101901
+5L+D6L+b 101902
+5ZGY5bel 101903
+5L2T6aqM 101904
+6IiH 101905
+5YGa5aW9 101906
+5L+d6K+B 101907
+5pW05Liq 101908
+5piv5LiA5Liq 101909
+6YeH55So 101910
+55CG6K66 101911
+5q+U5aaC 101912
+5LiK55qE 101913
+5o6o6I2Q 101914
+55Sz6K+3 101915
+5aSp56m6 101916
+6YOo6JC9 101917
+5Y2B5YiG 101918
+5p2l6Ieq 101919
+5LmL6Ze0 101920
+6LCD5pW0 101921
+5q+P5aSp 101922
+6LCD5p+l 101923
+5oKj6ICF 101924
+6L+H56iL5Lit 101925
+6aaZ5riv 101926
+5bm/5ZGK 101927
+6Z2i5a+5 101928
+5ruh6Laz 101929
+6ZW/5pyf 101930
+6KeE6IyD 101931
+5pW05L2T 101932
+5pS55Y+Y 101933
+5pm65oWn 101934
+5aaI5aaI 101935
+5aaC5LuK 101936
+5ZCI5ZCM 101937
+6YO95Lya 101938
+5YS/56ul 101939
+5YeP5bCR 101940
+6Z+z5LmQ 101941
+57uP5bi4 101942
+5LiK5biC 101943
+5LyY56eA 101944
+55qE6YeN6KaB 101945
+5LiA5p2h 101946
+5rW35aSW 101947
+5Y+m5aSW 101948
+5LiA5a62 101949
+5Y6L5Yqb 101950
+5aSn5Z6L 101951
+55yL552A 101952
+5YiA 101953
+5bm456aP 101954
+5o6o5bm/ 101955
+5ZCb 101956
+5b6Q 101957
+5om+5Yiw 101958
+5LqO5piv 101959
+6Ieq6Lqr 101960
+5LiA5L2N 101961
+5Zyf5Zyw 101962
+5Yqg5YWl 101963
+5o6i57Si 101964
+5qKB 101965
+5Li75Yqo 101966
+5bCx5Lia 101967
+5aWz5oCn 101968
+56qB56C0 101969
+5LiN5ZCM55qE 101970
+6L+Q6L6T 101971
+6Ieq55Sx 101972
+5bGF5rCR 101973
+5q2k5qyh 101974
+55qE5pe26Ze0 101975
+5a626ZW/ 101976
+5LiA5Liq5Lq6 101977
+5qOA5rWL 101978
+5YaF6YOo 101979
+5bm/5bee 101980
+55u05pKt 101981
+5LuO6ICM 101982
+6LS35qy+ 101983
+5Y+s5byA 101984
+5pS56YCg 101985
+5Lq655Sf 101986
+5bGV56S6 101987
+5q+P5bm0 101988
+5aWz5Lq6 101989
+55qE5pa55byP 101990
+5pWI546H 101991
+5bGx5Lic 101992
+5rig6YGT 101993
+5Ly85LmO 101994
+5qGI5Lu2 101995
+5Yip55uK 101996
+55yL55yL 101997
+5b+D6YeM 101998
+57u05oqk 101999
+5a6d5a6d 102000
+572R5LiK 102001
+6K665Z2b 102002
+5bCx5Y+v5Lul 102003
+5LiN6Laz 102004
+5oGi5aSN 102005
+5biD5bGA 102006
+6LSh54yu 102007
+5LiL6ZmN 102008
+5o6M5o+h 102009
+55qu6IKk 102010
+5bel5YW3 102011
+6YeN5bqG 102012
+5ZOB6LSo 102013
+5o6o5Ye6 102014
+55S35Lq6 102015
+5om/5ouF 102016
+56qB5Ye6 102017
+6ICM6KiA 102018
+5rKf 102019
+5Y2P6LCD 102020
+5piv5LuA5LmI 102021
+5rGk 102022
+5pKR 102023
+54us56uL 102024
+546v6IqC 102025
+5omp5aSn 102026
+5rSq 102027
+5p2w 102028
+55uQ 102029
+5LuB 102030
+5raJ5Y+K 102031
+6ICB5Lq6 102032
+5Y2z5L2/ 102033
+5Y2X5Lqs 102034
+6YWN5ZCI 102035
+6ay8 102036
+54i25Lqy 102037
+572X5pav 102038
+5bCP5Yy6 102039
+5pWZ5o6I 102040
+5Yaz562W 102041
+6aKE6K6h 102042
+5pys5Lq6 102043
+5Lyv 102044
+56u5 102045
+5Yiw5bqV 102046
+5biC5rCR 102047
+5Ye65Y+j 102048
+6YeH6LSt 102049
+5oC757uT 102050
+5q2m5rGJ 102051
+5Yqg5aSn 102052
+5bm/5Lic 102053
+5rWB56iL 102054
+5Lq65Y+j 102055
+5aaC5p6c5L2g 102056
+5Ye65Y67 102057
+5YeJ 102058
+5Yac5rCR 102059
+546w6LGh 102060
+5Yqb5bqm 102061
+57uZ5LqI 102062
+5YWa5aeU 102063
+6K+t6KiA 102064
+57q/5LiK 102065
+5oCO5qC3 102066
+5YS/5a2Q 102067
+56Gu5a6e 102068
+5LmL5aSW 102069
+6YO95Zyo 102070
+6Im+ 102071
+55qE5oOF5Ya1 102072
+6YeM55qE 102073
+5Zu057uV 102074
+5pu05aSa55qE 102075
+5L6d5rOV 102076
+5YWs5Zut 102077
+5a626YeM 102078
+5q+N5Lqy 102079
+5LiN5YaN 102080
+6Iu5 102081
+5rOV6Zmi 102082
+6Z+p5Zu9 102083
+55u45b2T 102084
+5LiN55+l 102085
+6K+E5Lyw 102086
+5LiN55So 102087
+6aG65Yip 102088
+6YeN6KeG 102089
+6LSi5Yqh 102090
+5LuW5YCR 102091
+5Y+R6KGM 102092
+5LiT6Zeo 102093
+5YW35aSH 102094
+5bm25LiN5piv 102095
+6Laz55CD 102096
+6Z6L 102097
+5Y+R6KGo 102098
+5rC46L+c 102099
+6JCl5YW7 102100
+6YWN5aWX 102101
+5pW05ZCI 102102
+6LS6 102103
+5Zue562U 102104
+5pS255uK 102105
+5Lmf6K64 102106
+6LuK 102107
+5o6l6Kem 102108
+5pS75Ye7 102109
+5Zub5bed 102110
+5oCn6IO9 102111
+5Zue5Yiw 102112
+6IWw 102113
+5Lmf5rKh5pyJ 102114
+5byE 102115
+6K6+56uL 102116
+6Ziy5o6n 102117
+5oqA5ben 102118
+6YCa5bi4 102119
+6LSi5pS/ 102120
+6YOo572y 102121
+5Zy65pmv 102122
+5rGf6IuP 102123
+6KGo6L6+ 102124
+5Za3 102125
+5aWz5YS/ 102126
+6Ii2 102127
+57Wm 102128
+5Lya5ZGY 102129
+5oiW6K64 102130
+5Lqp 102131
+5Lic5pa5 102132
+5aSp5rSl 102133
+6L+R5bm0 102134
+55yL5p2l 102135
+5q+U5L6L 102136
+5bKp 102137
+6ZOc 102138
+5467 102139
+5a6e6aqM 102140
+5oCd57u0 102141
+5ouF5b+D 102142
+5rKI 102143
+6Lqr6L65 102144
+5rex5YyW 102145
+57K+5YeG 102146
+56eB5pyN 102147
+5raI6Ziy 102148
+5Y675LqG 102149
+57uG6IOe 102150
+55CD6Zif 102151
+5piO5pif 102152
+6aOf54mp 102153
+5b6I5b+r 102154
+6K6p5L2g 102155
+5L+h55So 102156
+5ZSv5LiA 102157
+5YW25a6D 102158
+562J5pa56Z2i 102159
+5b6L5biI 102160
+5q275Lqh 102161
+5p+z 102162
+5LiA5om5 102163
+5LiK5rao 102164
+5py65Zy6 102165
+5b2i5Yq/ 102166
+5oS/5oSP 102167
+6ZuG5L2T 102168
+5paw5Z6L 102169
+5o2f5aSx 102170
+5pu4 102171
+5LiL5Y2I 102172
+5q+P5qyh 102173
+5oiQ5bCx 102174
+5YWs6Lev 102175
+6Jmr 102176
+5ZKx 102177
+6KW/5a6J 102178
+5pyA5L2z 102179
+56eR56CU 102180
+5aSN5p2C 102181
+5py65Zmo 102182
+54ix5oOF 102183
+54Wn54mH 102184
+5bm06b6E 102185
+6LOH5paZ 102186
+57KX 102187
+5YeG56Gu 102188
+5Yqg5LiK 102189
+5Ye654mI 102190
+6LCQ 102191
+5a625bGF 102192
+6IOM5pmv 102193
+5LiA57q/ 102194
+5LqL6aG5 102195
+5Yqo5L2c 102196
+56Wl 102197
+5oC75L2T 102198
+5oi/5a2Q 102199
+5Lmf5bCx5piv 102200
+5aSn5qaC 102201
+6auY5pWI 102202
+5ZC5 102203
+5o6I5p2D 102204
+6ZmE6L+R 102205
+5qGI5L6L 102206
+6Ze5 102207
+54i454i4 102208
+5b2p56Wo 102209
+5oCS 102210
+5Li+5oql 102211
+5pmu6YGN 102212
+55WZ5LiL 102213
+6KGj5pyN 102214
+5peg6K665piv 102215
+5YWF5ruh 102216
+5rex5bqm 102217
+5qGR 102218
+5oiq6Iez 102219
+5bim5p2l55qE 102220
+6Zm1 102221
+5oSf5oOF 102222
+6LWa 102223
+5ZOq5Lqb 102224
+5pW05pS5 102225
+5oiQ54af 102226
+5aic 102227
+6by7 102228
+55+b 102229
+55u+ 102230
+5aW95aW9 102231
+56ys5Zub 102232
+5Yag5Yab 102233
+6LSi5a+M 102234
+5pyA5aW955qE 102235
+6L2m5Z6L 102236
+6ZaA 102237
+5Y2z5bCG 102238
+5YiG5Li6 102239
+6Z2S5bKb 102240
+57q357q3 102241
+5LuK5pel 102242
+5bmz6KGh 102243
+5bmz5pa557Gz 102244
+6YKj56eN 102245
+5Ye655Sf 102246
+6Z2S5pil 102247
+5Lq6576k 102248
+5Lq65bel 102249
+5LmL5LiL 102250
+5rmW5YyX 102251
+5Zyo5q2k 102252
+5Y2a5aOr 102253
+5pe25Yi7 102254
+5rKz5YyX 102255
+5pS+5byD 102256
+6YCa6YGT 102257
+5qOu5p6X 102258
+55aG 102259
+5pW4 102260
+6Iqz 102261
+5omT5Ye7 102262
+5pu5 102263
+5YyW5a2m 102264
+5oOz6LGh 102265
+5LiH5Lq6 102266
+6LSi57uP 102267
+5YWD57Sg 102268
+5Lya6K6h 102269
+5YWo5L2T 102270
+5oSb 102271
+6auY5Lit 102272
+5py66YGH 102273
+5aOw6Z+z 102274
+5peF6KGM 102275
+5rWp 102276
+5p+x 102277
+5bCR5bm0 102278
+5Zu95aSW 102279
+6JGX5ZCN 102280
+55Sf5a2Y 102281
+5aec 102282
+5bim6aKG 102283
+6aKc6Imy 102284
+5LiK5LiL 102285
+5Lqn5Lia6ZO+ 102286
+5pu05aW955qE 102287
+5bKt 102288
+5LyY5oOg 102289
+5L6/5piv 102290
+5YWn5a65 102291
+5LiA5Y+q 102292
+55C0 102293
+5qKm5oOz 102294
+56ef6LWB 102295
+5byA5ZCv 102296
+6LSt54mp 102297
+5YyF5ZCr 102298
+5Yip546H 102299
+6LW35LqG 102300
+5pyJ5Yqb 102301
+6YKj6YeM 102302
+5a6h5om5 102303
+5a+55omL 102304
+546w6YeR 102305
+5aSp54S2 102306
+55uS 102307
+54i9 102308
+5b+F54S2 102309
+5YyW5bel 102310
+5LiT5Yip 102311
+5ZWh 102312
+5byA5b+D 102313
+5Lq65L2T 102314
+6YGT5aOr 102315
+5oCB5bqm 102316
+56m66LCD 102317
+5oub5ZWG 102318
+5ae7 102319
+56ys5LqU 102320
+5qOS 102321
+5LiA57O75YiX 102322
+5Y2x5py6 102323
+6L2s5Y+Y 102324
+5Zy65omA 102325
+6bij 102326
+5oi/6Ze0 102327
+6YC8 102328
+6K+V54K5 102329
+5a+55aSW 102330
+5Ye65Y+w 102331
+5Zyo6L+Z 102332
+5Y6C5a62 102333
+5beo5aSn 102334
+566A5LuL 102335
+55yL5LqG 102336
+5YWa5bu6 102337
+5oyH5oyl 102338
+55+z5rK5 102339
+5LiN5Y+v6IO9 102340
+6I6y 102341
+5LiN5aSq 102342
+5Yib5oSP 102343
+56ys5LiA5Liq 102344
+6LS15bee 102345
+6L+H5LqG 102346
+5pys5p2l 102347
+6YGT5b63 102348
+562U5qGI 102349
+6Zm2 102350
+5LiA6Lev 102351
+6IKW 102352
+5riF5rSB 102353
+5pyJ5py6 102354
+5ZCN5Y2V 102355
+5p2x 102356
+5ZG85ZC4 102357
+5LiI 102358
+56aP5bu6 102359
+6K+V6aqM 102360
+5byV5Y+R 102361
+5Lmf5rKh 102362
+5LiN5L2P 102363
+54af5oKJ 102364
+6JCs 102365
+5LiN6Imv 102366
+56CW 102367
+6Ie05Yqb 102368
+562+6K6i 102369
+5ZCK 102370
+5L6v 102371
+55im 102372
+5aeR5aiY 102373
+5pak 102374
+5aa75a2Q 102375
+5pil6IqC 102376
+54is 102377
+5pud 102378
+54Ot5oOF 102379
+6ZW/5rKZ 102380
+6JCl6YCg 102381
+6YW3 102382
+6ZOd 102383
+5Z+65pys5LiK 102384
+5ZGo5Zu0 102385
+5LuA6bq8 102386
+6K6k5Y+v 102387
+5YiG5a2Q 102388
+5LiA5pa56Z2i 102389
+6L20 102390
+5by3 102391
+6ams5LiK 102392
+6Zu+ 102393
+6Iej 102394
+5bC/ 102395
+55Sf5oSP 102396
+5a6J5b69 102397
+56We57uP 102398
+5Ye65bit 102399
+6I2v5ZOB 102400
+55CG55Sx 102401
+5Y2P5ZCM 102402
+5rWB5Yqo 102403
+5Y+R5Yqo 102404
+5Z2a5a6a 102405
+6KGo5piO 102406
+5ZCO6Z2i 102407
+5LmJ5Yqh 102408
+5aaW 102409
+5pyJ5Y+v6IO9 102410
+5bm06L275Lq6 102411
+5aSn6ZmG 102412
+5bKz 102413
+5LiN6LW3 102414
+556s6Ze0 102415
+5LiN5b6X5LiN 102416
+562+57qm 102417
+5ZCI5qC8 102418
+5YWa5pSv6YOo 102419
+5rWO5Y2X 102420
+5L6/5Yip 102421
+6ZqP5pe2 102422
+5aWJ 102423
+56ew5Li6 102424
+5Lqn5p2D 102425
+5ZCV 102426
+55uG 102427
+6K++5aCC 102428
+57ea 102429
+5qOJ 102430
+57q/5LiL 102431
+6Ieq6KGM 102432
+5Li+5o6q 102433
+5Y6m6Zeo 102434
+6Ieq5L+h 102435
+5b2x6KeG 102436
+5LuU 102437
+55Sf5rS75Lit 102438
+5p2D55uK 102439
+55m96Imy 102440
+5bCx5LiN 102441
+6L+b5bGV 102442
+5q+P5pel 102443
+5L6b57uZ 102444
+5p2D5Yip 102445
+5peg5pWw 102446
+55CG6LSi 102447
+5L6d5pen 102448
+5LiK5Y2I 102449
+6K+G5Yir 102450
+55uI5Yip 102451
+56CC 102452
+6K645Y+v 102453
+5ZCM5LqL 102454
+5Zib 102455
+6YG4 102456
+552A5Yqb 102457
+6Zeo5Y+j 102458
+5LiN5aSa 102459
+5YW25qyh 102460
+56Kn 102461
+54mp55CG 102462
+5YaF5b+D 102463
+55m+5aeT 102464
+5oC757uf 102465
+5bmy5YeA 102466
+56ev57Sv 102467
+5Y+N6aaI 102468
+5qCR56uL 102469
+56S+5Lqk 102470
+56ep 102471
+5Y2B5LiA 102472
+6YKT 102473
+6amx5Yqo 102474
+5bGV6KeI 102475
+6IiS6YCC 102476
+5Z+65Zug 102477
+5beu5byC 102478
+6L2s6K6p 102479
+5bCP5aeQ 102480
+5qC35a2Q 102481
+57+U 102482
+6auY5YW0 102483
+5b2x5ZON5Yqb 102484
+5omL57ut 102485
+55u45ZCM 102486
+55u45bqU 102487
+5pmS 102488
+6KeA 102489
+5biC5aeU 102490
+6Iqv 102491
+5bGV546w 102492
+5Zyw55CD 102493
+6YKq 102494
+5LiA5a6a55qE 102495
+5YWB6K64 102496
+5L+h5Lu7 102497
+5omR 102498
+6Zmi5qCh 102499
+566A56ew 102500
+5YGa5rOV 102501
+5LmL6Lev 102502
+5peX5LiL 102503
+6IWU 102504
+5raI5aSx 102505
+5LiW55WM5LiK 102506
+5Z+O5Lmh 102507
+6Iie5Y+w 102508
+5b6I5aSn55qE 102509
+57uf5625 102510
+5YWs5bmz 102511
+6IK+ 102512
+55qE5aW9 102513
+5rGB 102514
+55y85YmN 102515
+6Zuj 102516
+5bm9 102517
+5YWx5Lqn 102518
+5Li75Yqe 102519
+5aSE572a 102520
+5bqZ 102521
+6YGT55CG 102522
+5by1 102523
+5o6l552A 102524
+54yO 102525
+54GM 102526
+55Sx5q2k 102527
+5Lq65Yqb 102528
+5rWB6KGM 102529
+5L6g 102530
+5Y+v5Lul6K+0 102531
+6JKL 102532
+5b2i5oCB 102533
+5pel5a2Q 102534
+5ryG 102535
+55WZ5a2m 102536
+55u46Zec 102537
+5pyA5aSa 102538
+5Yet5YCf 102539
+5YWs5Lqk 102540
+5oyW5o6Y 102541
+5p2C5b+X 102542
+5Li75Lq6 102543
+6Zqc56KN 102544
+5qCh6ZW/ 102545
+5pa55L2N 102546
+5LiK54+t 102547
+5aSa5YWD 102548
+6IOB 102549
+6a2F5Yqb 102550
+6IyC 102551
+5YWF55S1 102552
+5by65aSn 102553
+54Ok 102554
+5aWL5paX 102555
+5a6e55So 102556
+6ZiB 102557
+57uZ5LqG 102558
+5pys56eR 102559
+5qCL 102560
+5ouo 102561
+5pWZ57uD 102562
+6YO955+l6YGT 102563
+5q+V5Lia55Sf 102564
+56KX 102565
+5Z6C 102566
+6K68 102567
+5a6B5rOi 102568
+5a2m6ICF 102569
+6LCi6LCi 102570
+5Z+O6ZWH 102571
+5oCO5LmI5Yqe 102572
+6YGU 102573
+5oiQ5Lqk 102574
+5r2c5Yqb 102575
+5Y2n 102576
+5paw5byA 102577
+6YWN5aSH 102578
+5Li75Yqb 102579
+5ZGz6YGT 102580
+54OC 102581
+6aOe6KGM 102582
+5auB 102583
+5aSn5aSn 102584
+57uZ5aSn5a62 102585
+5aSW6Z2i 102586
+6YaJ 102587
+5Y+R6KiA 102588
+5pep6aSQ 102589
+5ZCE6Ieq 102590
+5a6Z 102591
+6I2j6KqJ 102592
+5oqr6Zyy 102593
+6aGe 102594
+5YaF55qE 102595
+6IKq 102596
+6L6Q 102597
+5rO1 102598
+5oqb 102599
+5pif5pyf 102600
+5LiA5bim 102601
+55Sf57Sg 102602
+57uP6ZSA 102603
+5Ye2 102604
+5Zyw5LiK 102605
+5ZG96L+Q 102606
+5ZOy 102607
+5LiK5Y67 102608
+5paH54mp 102609
+6K+R 102610
+5oyv5YW0 102611
+6ZW/5pe26Ze0 102612
+56Wt 102613
+5ZCI6IKl 102614
+6L+d6KeE 102615
+6IGq 102616
+5L2O5LqO 102617
+6YCC5b2T 102618
+5pyJ5bqP 102619
+5pys572R 102620
+55WZ6KiA 102621
+5oOz5rOV 102622
+562+572y 102623
+5aea 102624
+5oCn5qC8 102625
+6JKZ5Y+k 102626
+5p+P 102627
+5Z6r 102628
+5a2m5Y6G 102629
+5LuF5LuF 102630
+6K6y6K+d 102631
+6ZSQ 102632
+5oCW 102633
+5Ymq 102634
+6IuN 102635
+5ZCT 102636
+5by654OI 102637
+5YGl5YWo 102638
+55av 102639
+5Y+k5Luj 102640
+5aWI 102641
+5LiN54S2 102642
+5Lmh6ZWH 102643
+5pyL5Y+L5Lus 102644
+5YKF 102645
+6IG9 102646
+5Liq5oCn 102647
+5rOV6KeE 102648
+5bCP6ZWH 102649
+55S76Z2i 102650
+56ys5YWt 102651
+57ay6Lev 102652
+5YmN5pmv 102653
+5ZCs6K+0 102654
+5Lyg5aqS 102655
+5p2h5L6L 102656
+5Yir55qE 102657
+5LiN5oeC 102658
+6aG+6Zeu 102659
+5by65bqm 102660
+6Zi/6YeM 102661
+6LWw5Yq/ 102662
+5bi9 102663
+55qE56Gu 102664
+5Yy65Yir 102665
+6Yyi 102666
+5Li7566h 102667
+5LiA55yL 102668
+5pac 102669
+5a2Y5Zyo55qE 102670
+5Luy 102671
+5Y2x5a6z 102672
+6ZOt 102673
+5ri45oiP5Lit 102674
+6YWx 102675
+6b6Z5aS0 102676
+5Lq65b+D 102677
+6YCA5LyR 102678
+5rWP6KeI 102679
+5Yqr 102680
+6Ziy5rK7 102681
+566t 102682
+5bGI 102683
+6L695a6B 102684
+5aOk 102685
+6L+O5p2l 102686
+6Z6N 102687
+55So5p2l 102688
+5aSn5Zyw 102689
+5Luw 102690
+6YCa6K6v 102691
+5byA5bel 102692
+6KOk 102693
+5aaC5ZCM 102694
+6aqk 102695
+6Zif5ZGY 102696
+6L2p 102697
+576O5pyv 102698
+6Jmf 102699
+5ZCM5LiA 102700
+5ZyW 102701
+5Lmm5rOV 102702
+5omT5Y2w 102703
+5ZCr5pyJ 102704
+6ZuG5oiQ 102705
+6Ze3 102706
+5biC5Zy65LiK 102707
+5peB6L65 102708
+5Zyw5p2/ 102709
+5Lqn55Sf55qE 102710
+57Kk 102711
+6YeN57uE 102712
+6KGA5ray 102713
+562L 102714
+5Yqe5LqL 102715
+5bi46KeB55qE 102716
+5LiK5Y2K5bm0 102717
+5bGP5bmV 102718
+5ZCJ5p6X 102719
+5bep 102720
+5Zac54ix 102721
+57+g 102722
+5LiJ56eN 102723
+5qGG5p62 102724
+5Lic6I6e 102725
+55SY6IKD 102726
+6Iqs 102727
+5Zu+5Lmm 102728
+5Yek5Yew 102729
+5rCU5YCZ 102730
+5bC0 102731
+5bCs 102732
+5Lik5aSp 102733
+6L6F5a+8 102734
+5YCf5qy+ 102735
+5pel6LW3 102736
+5rSS 102737
+5LiA5bqm 102738
+6LmI 102739
+5r2t 102740
+5omH 102741
+55mc 102742
+5paw5YW0 102743
+5YKy 102744
+6K+45aSa 102745
+6LSq 102746
+6Zm35YWl 102747
+6Iif 102748
+6IK654KO 102749
+5LiA5qC355qE 102750
+5Y6Y 102751
+5Zyw55CG 102752
+5oqV5rOo 102753
+6ZqK 102754
+5YWJ5LyP 102755
+5L+d5YGl 102756
+5YWU 102757
+5YWs5Yqh 102758
+5omT56C0 102759
+55S35a2p 102760
+5Yqz5Yqh 102761
+5L2g5Lya 102762
+55So5Zyw 102763
+5rqi 102764
+5Y+R6L6+ 102765
+6IKa 102766
+6L+H5LqO 102767
+6IeC 102768
+6YCZ5qij 102769
+6L276L27 102770
+5Lit5YWx 102771
+5ZCE5Zu9 102772
+5ZSH 102773
+5a6e5Lmg 102774
+6Jm+ 102775
+5qe9 102776
+5LiN5LiK 102777
+5YWN55ar 102778
+5Y2g5o2u 102779
+5bel5Lya 102780
+5ZuK 102781
+6Iiq5aSp 102782
+5Y+v54ix 102783
+5paX5LqJ 102784
+55ik 102785
+5aaC5pyJ 102786
+6ZuW 102787
+5a+55oiR 102788
+5Ye656ef 102789
+5aW955yL 102790
+5aSq5aSn 102791
+5rC05Yip 102792
+5Yq/5Yqb 102793
+5YWo5rCR 102794
+572i 102795
+6LWi5b6X 102796
+55S15L+h 102797
+6L2m6Ze0 102798
+5pmC5YCZ 102799
+5bCR5pWw 102800
+6ZO4 102801
+5YWz6IGU 102802
+5LiN5LuF5LuF 102803
+5Li65oKo 102804
+5ZK4 102805
+5py65Yqo 102806
+6KOZ 102807
+5ZON5bqU 102808
+6YGg 102809
+6LK3 102810
+56m0 102811
+5aKF 102812
+6ZSh 102813
+57WE 102814
+54Gr6L2m 102815
+6LOH6KiK 102816
+5Yaz6LWb 102817
+5rGh5rC0 102818
+6Kqe 102819
+5bSb 102820
+57Sn5a+G 102821
+57y65bCR 102822
+5aSa5Lq6 102823
+5oC75Lmm6K6w 102824
+6ZSI 102825
+6JGb 102826
+5b+Y6K6w 102827
+6ZmM55Sf 102828
+6ZW/5aSn 102829
+5YWI6L+b55qE 102830
+56GF 102831
+5Y+R5piO 102832
+5am05YS/ 102833
+5omO5a6e 102834
+6JuL55m9 102835
+5LiA55m+ 102836
+55uu5YWJ 102837
+5oWM 102838
+5Yqg5rK5 102839
+5ZCe 102840
+5LiA576k 102841
+5Lit5LuL 102842
+5biW 102843
+5b+M 102844
+6IGM6IO9 102845
+5bm/5pKt 102846
+55uR5a+f 102847
+56eY5a+G 102848
+54uu 102849
+6L+Z5p2h 102850
+6YCi 102851
+5oCo 102852
+5Y2B5YWt 102853
+6Kmm 102854
+6K+05Yiw 102855
+5Yed6IGa 102856
+5oyH56S6 102857
+5rCi 102858
+5byY 102859
+6ZiA 102860
+5pap 102861
+6aCF 102862
+5LiA5byA5aeL 102863
+5o6S6KGM 102864
+5Zyo5oiR 102865
+57qq5b2V 102866
+5oqE 102867
+5qCq 102868
+6K+05rOV 102869
+5Lit6I2v 102870
+5aW95aSa 102871
+5Y+q5LiN6L+H 102872
+55WZ5Zyo 102873
+5Liq5bCP5pe2 102874
+6K6k55+l 102875
+55Wr 102876
+6KeB6L+H 102877
+5bCP5b6u 102878
+5L2b5bGx 102879
+55y+ 102880
+6K6y6L+w 102881
+5qKz 102882
+56ew5Y+3 102883
+5pel5pma 102884
+6KKW 102885
+5ZWk 102886
+5pyq57uP 102887
+5pyA5pep 102888
+5omu5ryU 102889
+6KGA566h 102890
+57qx 102891
+5oOF6IqC 102892
+56ys5LiD 102893
+5o2n 102894
+5LuX 102895
+5r+A54OI 102896
+5peg57q/ 102897
+5LiN5a655piT 102898
+5byA5bmV 102899
+5paw55Sf 102900
+5LiT5rOo 102901
+6JGx 102902
+5Y2X5rW3 102903
+54ef 102904
+6LW35L6G 102905
+5rS+5Ye6 102906
+5YSS 102907
+5L6o 102908
+6LyD 102909
+5Y2a6KeI 102910
+6YC+ 102911
+5YyA 102912
+57uP5rWO5a2m 102913
+5riX 102914
+5L+d6K23 102915
+54m6 102916
+54my 102917
+546r 102918
+55Gw 102919
+5pyA5ZCO5LiA 102920
+5pS/5Yqh 102921
+5qeb 102922
+6JmV55CG 102923
+6ZqQ5oKj 102924
+5om/5YyF 102925
+5qW1 102926
+5qGp 102927
+55uy 102928
+5a+85ZCR 102929
+6Ie05a+M 102930
+57yG 102931
+5oGL54ix 102932
+5LiN5Yqo 102933
+57uZ5Lq6 102934
+5bei 102935
+6KGo5oOF 102936
+5Lic5Y2X 102937
+5YaF5aSW 102938
+6L6I5a2Q 102939
+5Y+J 102940
+5Y2a5Lya 102941
+5Yqf5pWI 102942
+5ri0 102943
+5bGs 102944
+5o6S6Zmk 102945
+6YCb 102946
+5LiA5Lya 102947
+5LiN5byA 102948
+5byA5aWW 102949
+6buR6b6Z 102950
+6buR6b6Z5rGf 102951
+5b+r5LiJ 102952
+5bqm5YGH 102953
+5Z2k 102954
+6YKu5Lu2 102955
+5oeS 102956
+5L6b55S1 102957
+5buj 102958
+5aW96K+E 102959
+56eY5Lmm6ZW/ 102960
+5oiY5Zy6 102961
+5aW95aWH 102962
+5L615p2D 102963
+5oa+ 102964
+5pyA5Yid 102965
+5om55Y+R 102966
+5Y6V 102967
+6LyV 102968
+5p6v 102969
+5Lia5YaF 102970
+6LSt5oi/ 102971
+5LiN5Zyo 102972
+57qq5aeU 102973
+5omA6ZyA 102974
+5biC6ZW/ 102975
+6LO9 102976
+5byV5pOO 102977
+54G16a2C 102978
+6YqA 102979
+5ruk 102980
+552Q 102981
+5aSa6aG5 102982
+5Zue5aS0 102983
+6ImY 102984
+5aSN5bel 102985
+6YOo5Lu2 102986
+57Sn57Sn 102987
+5p+Q56eN 102988
+5L2/5YW2 102989
+5paw5Lq6 102990
+5p6a 102991
+5rOV5a6a 102992
+5be05be0 102993
+5ra155uW 102994
+56i7 102995
+5ou+ 102996
+5pmV 102997
+6L2/ 102998
+6YCa6KGM 102999
+5ZOA 103000
+5rOK 103001
+5rip6aao 103002
+6ZuG6IGa 103003
+54aZ 103004
+5YeR 103005
+5Y2B5LiD 103006
+5rCU5oGv 103007
+5o+Q5L6b55qE 103008
+5rOz 103009
+5aWl6L+Q 103010
+54G+5a6z 103011
+5YeA5YyW 103012
+6Leo6LaK 103013
+5ZOq5oCV 103014
+6Z+/ 103015
+5aKe5re7 103016
+54SK 103017
+5q6L55a+ 103018
+56KM 103019
+5oKU 103020
+6KeB6K+B 103021
+6L6W5Yy6 103022
+5b+D6ISP 103023
+6Zqn 103024
+5Y24 103025
+5Y+v6IO95oCn 103026
+5pyJ6Laj 103027
+5Ymv5Lmm6K6w 103028
+5YyW5aaG 103029
+5L+C 103030
+5qOa 103031
+6YaH 103032
+5bim5aS0 103033
+6aCI 103034
+6L+956m2 103035
+5pGU 103036
+6L+Z6YOo 103037
+5LiN6K66 103038
+56W4 103039
+5bO7 103040
+6YGV 103041
+55Sf6IKy 103042
+5aSg 103043
+5aSW5Lqk 103044
+6K+E5Li6 103045
+5LuO5bCP 103046
+5bCP5bCP 103047
+6aW/ 103048
+5pK8 103049
+6Leo5aKD 103050
+6KKr5ZGK 103051
+5Y2X5a6B 103052
+6Lqr5b+D 103053
+5YaN55Sf 103054
+5omA6K+0 103055
+5pe26Ze05YaF 103056
+5YiX5YWl 103057
+6Z2S5rW3 103058
+54ix5aW9 103059
+56qE 103060
+6IiI 103061
+6L+H5rih 103062
+5r+f 103063
+6ZuA 103064
+5a6h6K6u 103065
+5Zu96LWE 103066
+5q2l5LyQ 103067
+6L2o6YGT 103068
+5L+h5b+1 103069
+5LiJ5YiG 103070
+54as 103071
+5a215YyW 103072
+57yg 103073
+6YOK 103074
+6IiS5pyN 103075
+57qq5qOA 103076
+5LiA5LiL5a2Q 103077
+6Zu76Kmx 103078
+6LKg 103079
+6ZKl 103080
+5YyZ 103081
+55e0 103082
+6LaB 103083
+57uj 103084
+54i1 103085
+6L2w 103086
+6aqE 103087
+5aeo 103088
+5ouY 103089
+54y0 103090
+6K62 103091
+6L+Z5bqn 103092
+542o 103093
+5reY5rGw 103094
+55eF5L6L 103095
+5rKZ5Y+R 103096
+6KeG5Li6 103097
+5aS05p2h 103098
+5b+F6KaB55qE 103099
+5Y+v6LCT 103100
+6K+d6K+0 103101
+56+E 103102
+5pep54K5 103103
+5p6i57q9 103104
+576h 103105
+54ix5Zu9 103106
+56qB5Y+R 103107
+6YCK 103108
+5r2N 103109
+6I2j6ICA 103110
+6J+5 103111
+5qaC546H 103112
+5b6I5LmF 103113
+5oOV 103114
+6Ki0 103115
+5ZyG5ruh 103116
+55qx 103117
+5YiG5rOM 103118
+5YWF6Laz 103119
+55yL5rOV 103120
+6L6f 103121
+5oum 103122
+5oup 103123
+5a+55bqU 103124
+5Li65qC45b+D 103125
+6IWK 103126
+5aSa5LmI 103127
+5rWR 103128
+5a6P6KeC 103129
+6ISW 103130
+5ZCI6LWE 103131
+55Sf5rav 103132
+5a6e6LSo 103133
+5LyY54K5 103134
+55So5rC0 103135
+5a+/5ZG9 103136
+5rKr 103137
+5ZCB 103138
+6Km5 103139
+5Zu96Ziy 103140
+5bSp 103141
+5Z2O 103142
+6IaP 103143
+5LiA6L2u 103144
+6YGX5Lqn 103145
+5rm+5Yy6 103146
+57uO 103147
+5Y2V57qv 103148
+5r6E 103149
+5YmN5YiX 103150
+6Lqr5b2x 103151
+6buY6buY 103152
+5o2J 103153
+55Kw 103154
+6I+K 103155
+5oCc 103156
+5YWL5oCd 103157
+5oC75bGA 103158
+54eD5paZ 103159
+5Lia5oCB 103160
+5ZCE5qC3 103161
+5ZK9 103162
+5Ye66Imy 103163
+5Yid5b+D 103164
+5Y+b 103165
+56CU6K6o 103166
+6KGr 103167
+5Y6G56iL 103168
+56a9 103169
+6Laz5aSf55qE 103170
+6I2G 103171
+55yL5b6F 103172
+6LSp 103173
+5Yaz5b+D 103174
+6KO5 103175
+5biI6IyD 103176
+5Z6E 103177
+5p2g 103178
+5Ye4 103179
+54q56LGr 103180
+54Ot6KGA 103181
+5ZCI5LyZ 103182
+6YW1 103183
+6JC95Zyo 103184
+5Y2g5Zyw 103185
+6KGs 103186
+6JOJ 103187
+5oSk 103188
+5riK 103189
+5YiG5pWw 103190
+56yR552A 103191
+5aSq5bmz 103192
+54Kr 103193
+5o6o5LuL 103194
+5pav5Z2m 103195
+5b2i5a65 103196
+5pOK 103197
+5oSf5YW06Laj 103198
+5Yab5Lq6 103199
+5YeM5pmo 103200
+5a+554Wn 103201
+5Y+R55eF 103202
+5be+ 103203
+6IiJ 103204
+5qqi 103205
+56yR5LqG 103206
+56Gu6K+K 103207
+6LSf5YC6 103208
+5aOu5aSn 103209
+5oia 103210
+5LqS6IGU 103211
+6Kqy 103212
+6IWm 103213
+5pex 103214
+5Y+X5qyi6L+O 103215
+5Y2J 103216
+6Zmi5aOr 103217
+5qmh 103218
+5LiA5a+5 103219
+6L6x 103220
+5rKC 103221
+5Y+y5LiK 103222
+5pCP 103223
+5bSW 103224
+5Luj6LCi 103225
+56O3 103226
+6aGY 103227
+5rWH 103228
+5bi455So 103229
+5Y2R 103230
+5Ye65Zu9 103231
+6K+g 103232
+56iz5q2l 103233
+57uP57qq 103234
+5aSa5aSa 103235
+5omA5b6X 103236
+5Li65Li76aKY 103237
+5LiA5YiG 103238
+5qC9 103239
+6aGn 103240
+57qy 103241
+5YOF 103242
+5aOT 103243
+5YSq 103244
+57+w 103245
+5o6A 103246
+5Lq65Li6 103247
+5aqz 103248
+5rS9 103249
+6J22 103250
+5aSN5YW0 103251
+5Lya5b2x5ZON 103252
+5ZCE55WM 103253
+6YKj5LiA 103254
+6aKk 103255
+54CP 103256
+54CP6Ka9 103257
+5a+e 103258
+5Y+v5oCV 103259
+5Y2z5pe2 103260
+55W0 103261
+5LiL5Y2K5bm0 103262
+56yU6K6w 103263
+6ZmE5Yqg 103264
+54Ot5rC0 103265
+5aW4 103266
+56OF 103267
+5p2J 103268
+5riF5Y2O 103269
+6Zax 103270
+57Ch 103271
+5aSE5aSE 103272
+5ZCI6YeR 103273
+5rKz5rWB 103274
+57Sw 103275
+6LSf6Z2i 103276
+55qE55yf5a6e 103277
+5Zmo5qKw 103278
+6JKQ 103279
+6KW/5Lqa 103280
+5beF 103281
+57K5 103282
+5Y6f5paH 103283
+5p6V 103284
+6KGA5Y6L 103285
+5Zq0 103286
+5biY 103287
+5YaA 103288
+5oyr 103289
+55S16Lev 103290
+5bCP5LyZ5Ly0 103291
+6J20 103292
+5pyA5b+r 103293
+5ouM 103294
+5a6q 103295
+5pa3 103296
+57+F 103297
+5ZKz 103298
+5Ze9 103299
+576e 103300
+6Lq65Zyo 103301
+6LWb6L2m 103302
+5rKQ 103303
+6ZmQ5bqm 103304
+5Li65LiA5L2T 103305
+6JKc 103306
+5bmr 103307
+5pCF 103308
+5YuL 103309
+5YmW 103310
+57qz56iO 103311
+6ZW/5pWI 103312
+572V 103313
+5Ymv5pys 103314
+56mN 103315
+6ZKp 103316
+57m8 103317
+5Zu95Zyf 103318
+6LyJ 103319
+5LiN5b+Y 103320
+6K2m56S6 103321
+54G/ 103322
+5b+D5b6X 103323
+5oSa 103324
+5b+955Wl 103325
+5Zue5LqL 103326
+5Y2g5pyJ 103327
+5reE 103328
+54mh 103329
+55uR5LqL 103330
+57+h 103331
+6ZKI5a+55oCn 103332
+56qD 103333
+6KO9 103334
+6Iad 103335
+57Of 103336
+5riv5r6z 103337
+5aSq5aSq 103338
+5r6h 103339
+57uG5YyW 103340
+5ZSu5ZCO 103341
+5a6e5Zyo5piv 103342
+56uj 103343
+542y 103344
+5YC+5ZCR 103345
+5byV55So 103346
+6bmF 103347
+56yR5a65 103348
+5LmQ6Laj 103349
+5rCR5pS/ 103350
+6Zeo5oi3 103351
+5bGB 103352
+6L+35aSx 103353
+6ZSM 103354
+5bCP5bq3 103355
+5YuJ 103356
+5rO8 103357
+5L6L5a2Q 103358
+5LiJ5L2N 103359
+5bug 103360
+6JST 103361
+5bm/6ZiU 103362
+6ICN 103363
+6ICB6JmO 103364
+5Yuf6ZuG 103365
+6ISa5q2l 103366
+5ouv 103367
+5a2X5Y+3 103368
+54Sw 103369
+6aKg 103370
+6JqC 103371
+6JqB 103372
+6aOv 103373
+5Lq65oCn 103374
+5pKw 103375
+5Y6i 103376
+5bGA6ZmQ 103377
+5pyq5oiQ 103378
+5ZOq5YS/ 103379
+5aSn5Y+R 103380
+5LiN5a6a 103381
+5b6B5rGC 103382
+6YO1 103383
+5YC65p2D 103384
+54ix5L2g 103385
+6LqB 103386
+5LuF5L6b 103387
+6L+c5aSE 103388
+6Yab 103389
+5YO1 103390
+56ev5p6B5oCn 103391
+5o6h 103392
+5YmN5LiJ 103393
+5LqO5LiA5L2T 103394
+556E 103395
+552B 103396
+5rK4 103397
+5YWx6LWi 103398
+6YCA5b25 103399
+6LSd5bCU 103400
+5o6P 103401
+5oiy 103402
+6KGN 103403
+6ZSC 103404
+5LiH5L2Z 103405
+56eR5Yib 103406
+5ryU5ZSx 103407
+5qyn5YWD 103408
+5reh5reh 103409
+6Z2S5bGx 103410
+6Jed 103411
+57u9 103412
+5Luk54mM 103413
+6ZuG576k 103414
+5L2c54mp 103415
+54CR 103416
+5aSv 103417
+572R5ri4 103418
+5YWr5aSn 103419
+6aqa 103420
+6KqT 103421
+5Lya5bGV 103422
+5YWa5Y+y 103423
+5qOA5a+f6Zmi 103424
+5ZaY 103425
+6Zix 103426
+6ICM5Ye6 103427
+6YCa6L2m 103428
+6ZKT 103429
+5oOF5Lq6 103430
+5rib 103431
+5Lit56eL 103432
+54it 103433
+5Y+q5Ymp 103434
+5piU 103435
+6YeO55Sf 103436
+56Gr 103437
+6JCd5Y2c 103438
+5oq15oqX 103439
+55mr55er 103440
+6ZmA 103441
+6JSa 103442
+5bic 103443
+5ruh5ruh 103444
+6I+x 103445
+6ZqG6YeN 103446
+5pif57qn 103447
+5r2H 103448
+5YWs5YWD 103449
+6LCj 103450
+5q+U5Lqa 103451
+5qGM5a2Q 103452
+6LWj 103453
+6LK8 103454
+5oS/5pyb 103455
+6aG9 103456
+5rS+6YGj 103457
+56Wb 103458
+5aqa 103459
+6Zic 103460
+6JGr 103461
+6Iqm 103462
+5rO7 103463
+5aGM 103464
+54ut 103465
+5buJ5pS/ 103466
+5aWR5py6 103467
+5peX6Iiw 103468
+5oOr 103469
+5Lil5Y6J 103470
+5Y+L5oOF 103471
+5aaK 103472
+5aig 103473
+5ZOq5a62 103474
+6Iao 103475
+6Laf 103476
+5oyq 103477
+6JmQ 103478
+6aCB 103479
+556p 103480
+6bqf 103481
+56ij 103482
+6IGU6YCa 103483
+5Y+u 103484
+546L6ICF 103485
+5LiN56Gu5a6a 103486
+55Gc 103487
+6LCO 103488
+54mi6K6w 103489
+56K8 103490
+5oqk6IKk 103491
+6aG3 103492
+54SV 103493
+5YGa5by6 103494
+6Zqx56eB 103495
+6Zqx56eB5qyK 103496
+5Y+X5a6z 103497
+5LiN55Sx 103498
+54O5 103499
+6aWq 103500
+6amz 103501
+5Ly9 103502
+5Lid57u4 103503
+6KWE 103504
+5Y2B5L2Z 103505
+6bqX 103506
+5qyK5Yip 103507
+6IGe 103508
+5Y+k6ICB 103509
+6YGP 103510
+5ZCE5byP 103511
+5bCx6KGM 103512
+5YWl5aKD 103513
+54OB 103514
+6JyY 103515
+6Jub 103516
+57qs 103517
+55+r 103518
+6Luf 103519
+5rSX6KGj 103520
+5oSn 103521
+6aKE5qGI 103522
+6ZyG 103523
+5rex5Y6a 103524
+6Zi/5ouJ 103525
+5YaZ5a2X 103526
+5Y2m 103527
+6ZWA 103528
+5qih5qC3 103529
+5YKN 103530
+5pCN 103531
+6Jav 103532
+5aCF 103533
+5YWs56ev 103534
+6KiO 103535
+5Lyg5p+T 103536
+5q+v 103537
+55CG5bel 103538
+5Ya36ZO+ 103539
+56uL5pa5 103540
+5qKt 103541
+5Zyj6K+e 103542
+57u86Im6 103543
+546p56yR 103544
+5oOz5LiN5Yiw 103545
+5pGH5aS0 103546
+5re5 103547
+5YGH5pel 103548
+5YCY 103549
+6IC9 103550
+6I6T 103551
+5Z+3 103552
+6Ieq6LS4 103553
+5Y2K5aSp 103554
+5qqU 103555
+5r6O5rmD 103556
+6ZWR 103557
+5Lir 103558
+6YeM56iL 103559
+5byA6I2S 103560
+6I+P 103561
+5a6d6LS1 103562
+6K2s 103563
+5ZWf 103564
+5p+g 103565
+5qqs 103566
+6amt 103567
+5rGb 103568
+54aK54yr 103569
+6JWJ 103570
+6ZqP5LmL 103571
+5bGR 103572
+6L6D5by6 103573
+6IOz 103574
+6IaK 103575
+6Z2Z6Z2Z 103576
+5ZKq 103577
+5oub5ZG8 103578
+5Luj6KiA 103579
+5L+h566x 103580
+6KOF6YWN 103581
+5oKN 103582
+5Y2V6L2m 103583
+6JCO 103584
+5aSa5b2p 103585
+6Zm4 103586
+5LuO5Lil 103587
+5qmE 103588
+5qaE 103589
+6YCu 103590
+6YeM5pav 103591
+5ae/5oCB 103592
+5aSq5p6B 103593
+6Yed 103594
+5rqJ 103595
+6L+t 103596
+56e4 103597
+56eG 103598
+5bel5aeU 103599
+5rGV 103600
+6IGG 103601
+5L2s 103602
+57yF 103603
+55S4 103604
+5Ymv5bGA6ZW/ 103605
+6Ze6 103606
+6Kqk 103607
+6KSQ 103608
+5LiN6ZmQ 103609
+6IWV 103610
+5ZGV 103611
+55+2 103612
+5Yac5a62 103613
+566h5aeU5Lya 103614
+6aW6 103615
+6Iqc 103616
+5r6I 103617
+6Kmi 103618
+5aiB5bC85pav 103619
+5L2V5Ya1 103620
+5bCP5LyZ 103621
+5aWi5L6I 103622
+6L+Z56+H 103623
+6K+1 103624
+56ug56iL 103625
+57SA 103626
+6ZCY 103627
+6YKi 103628
+57OZ 103629
+57yA 103630
+5LmS 103631
+5LmT 103632
+54mi5Zu6 103633
+5Z2e 103634
+5byI 103635
+5L6L5aSW 103636
+5buz 103637
+6KeE56ug 103638
+6IqZ 103639
+56+3 103640
+6Lqv 103641
+5qCI 103642
+5Z2a5a6e 103643
+5Z+65bu6 103644
+552A55y8 103645
+57e0 103646
+6JGp 103647
+57ya 103648
+5qaG 103649
+5Li75YuV 103650
+56WA 103651
+5LqS6YCa 103652
+5bCk5Li6 103653
+5a6b 103654
+6aq8 103655
+5rGy 103656
+5L6D 103657
+5oKg5LmF 103658
+5pGn 103659
+5ouH 103660
+6auT 103661
+6bqS 103662
+6Zmb 103663
+5p64 103664
+5p2e 103665
+6LSs 103666
+5bCP6b6Z 103667
+5ZOu 103668
+6JOs5YuD 103669
+5YyI 103670
+55Wc54mn 103671
+5aip 103672
+5Liq5aSa 103673
+5rKl 103674
+5pin 103675
+54Sa 103676
+5oqR6YOB 103677
+55ah 103678
+6JiR 103679
+6YGO56iL 103680
+5qmx 103681
+6Z2T 103682
+5aSn55CG 103683
+6aum 103684
+5YiG6L6o 103685
+5rik 103686
+55ak 103687
+5Yqo6IO9 103688
+5byg5a62 103689
+5LiH5Y2D 103690
+5rul 103691
+6aWl 103692
+5bqf5byD 103693
+5biz 103694
+5ryz 103695
+6LGQ 103696
+5LuR 103697
+5auJ 103698
+5aaS 103699
+556S 103700
+6KGF 103701
+54u4 103702
+5b6B56iL 103703
+6YKv 103704
+6YO4 103705
+56WI 103706
+56W3 103707
+6La0 103708
+57uT5p6E5oCn 103709
+6KeG5ZCs 103710
+6Kyd 103711
+55KA 103712
+55Ko 103713
+5Ye65aSE 103714
+6K+A 103715
+5b6Y 103716
+5b6K 103717
+55yo 103718
+5ZaH 103719
+5Y+t 103720
+5Ziy 103721
+55W4 103722
+5bmy5LqL 103723
+5pqn 103724
+5rKb 103725
+5YSE 103726
+5buT 103727
+5Y6/6ZW/ 103728
+6IOa 103729
+55Ci 103730
+5623 103731
+6YeL 103732
+5L6u 103733
+5ZCp 103734
+5ZKQ 103735
+5Yy/ 103736
+5oqs6LW3 103737
+5rOj 103738
+5rak 103739
+6bq9 103740
+5puZ 103741
+5Ymv6Zmi6ZW/ 103742
+5YWa5ZKM 103743
+5pWj5Y+R 103744
+5ram5ruR 103745
+5ZO6 103746
+5oOs 103747
+5ryr6ZW/ 103748
+5LiN5oeI 103749
+5Z+g 103750
+5ZeT 103751
+6ICB54i3 103752
+6K69 103753
+5oiY57uE5ZCI 103754
+5qOg 103755
+5YWo5Z+f 103756
+6KCi 103757
+6K+h 103758
+5YmN5567 103759
+5pWb 103760
+5LiA5bCB 103761
+5bmC 103762
+6I6G 103763
+6K+d6K+t 103764
+57uG5YiZ 103765
+5bG/ 103766
+5bWM 103767
+6YCN 103768
+5Zix 103769
+5riy 103770
+54Ov 103771
+5525 103772
+6aaS 103773
+6IWl 103774
+5oqX5Ye7 103775
+552r 103776
+6I2U 103777
+6ZqO 103778
+5rOJ5rC0 103779
+6KyC 103780
+54Ks 103781
+5YeP5o6S 103782
+6LiK 103783
+6Le7 103784
+5reM 103785
+6Zy+ 103786
+5aWH57qz 103787
+5a+d 103788
+5qSO 103789
+5p+s 103790
+5pav5Z+6 103791
+5YWs56uL 103792
+6KiT 103793
+6aOZ 103794
+6am/ 103795
+5YK1 103796
+6JuZ 103797
+56+H56ug 103798
+5YiG5pSv 103799
+5LiK5bm0 103800
+562d 103801
+57yk 103802
+6ICB5pen 103803
+5Zms 103804
+5pym 103805
+6IOn 103806
+5raI6LK7 103807
+5pOU 103808
+5qa0 103809
+5r+S 103810
+57Ov 103811
+5rO4 103812
+5o2G 103813
+57ua 103814
+6LWO 103815
+55CQ 103816
+6LWC 103817
+5oWu 103818
+5rKM 103819
+54SZ 103820
+5pKt5oql 103821
+5reH 103822
+5YiH5YWl 103823
+55GV 103824
+55a1 103825
+6YG0 103826
+56ia 103827
+56mp 103828
+6J6D 103829
+5qOV 103830
+5oan 103831
+5oas 103832
+5Ly6 103833
+5q+X 103834
+5o2N 103835
+5oqJ 103836
+57SK 103837
+5byb 103838
+5out 103839
+5peP6Ieq5rK7 103840
+5Z23 103841
+56u2 103842
+6Kmz 103843
+6L+E5LuK 103844
+6LC0 103845
+556t6Kej 103846
+5p+/ 103847
+6aKK 103848
+57Cn 103849
+54Of6Iqx 103850
+5L6l 103851
+552m 103852
+6YWd 103853
+5rCT 103854
+55CJ 103855
+5aeK 103856
+5rKu 103857
+5oW3 103858
+6JyV 103859
+55Ga 103860
+6YeH55+/ 103861
+5aCw 103862
+5bqV6JW0 103863
+6Iaz 103864
+6L6V 103865
+6Z+t 103866
+5ZKZ 103867
+57K9 103868
+5YmU 103869
+5rKm 103870
+6IK0 103871
+6ZW2 103872
+5pi8 103873
+6L6X 103874
+5amq 103875
+5Yyu 103876
+5paT 103877
+5rG2 103878
+6YO0 103879
+6aC7 103880
+56qS 103881
+6KKx 103882
+5Zux 103883
+6ICY 103884
+6JqM 103885
+54uZ 103886
+55e5 103887
+56WJ 103888
+5o+u 103889
+5reG 103890
+56OL 103891
+6Ziq 103892
+5qs= 103893
+47g= 103894
+mbY= 103895
+45E= 103896
+8KOy 103897
+5KI= 103898
+460= 103899
+8Kyo 103900
+8KyA 103901
+8Kyu 103902
+8Kyv 103903
+8Kyc 103904
+8Kqo 103905
+8KuX 103906
+8KyK 103907
+8Kyx 103908
+8Kyf 103909
+5I4= 103910
+8KE= 103911
+5IM= 103912
+46A= 103913
+8Kk= 103914
+8Km+ 103915
+8Ky6 103916
+8KyZ 103917
+44CU 103918
+44CV 103919
+55qE5pe25YCZ 103920
+5pyJ6ZmQ5YWs5Y+4 103921
+5LmL5ZCO 103922
+5Lia5Yqh 103923
+5ZWK 103924
+6Jm954S2 103925
+5oul5pyJ 103926
+5LqS6IGU572R 103927
+6YKj5Lqb 103928
+5L2g55qE 103929
+5Yaz5a6a 103930
+6Zmk5LqG 103931
+5Zui6Zif 103932
+5Y+v5piv 103933
+5Lul5ZCO 103934
+56S+5Yy6 103935
+55qE6Zeu6aKY 103936
+5bm25LiU 103937
+5pWZ5biI 103938
+5bCx5Lya 103939
+5aSp56m66YOo6JC9 103940
+5pyA57uI 103941
+5b2T54S2 103942
+5Lmf5pyJ 103943
+56Gu5L+d 103944
+5oOz6KaB 103945
+6LSt5Lmw 103946
+5Lq655qE 103947
+5ZC0 103948
+55qE5Y+R5bGV 103949
+5LiN55+l6YGT 103950
+6L2v5Lu2 103951
+5oiR5Lus55qE 103952
+54i25q+N 103953
+5YmR 103954
+6ICM5piv 103955
+5a6J5o6S 103956
+5ZCO5p2l 103957
+55qE5Zyw5pa5 103958
+6LW1 103959
+6ICD6K+V 103960
+56qB54S2 103961
+5LiA5a6a6KaB 103962
+5Yi25L2c 103963
+6K+E5Lu3 103964
+5YWN6LS5 103965
+6LS555So 103966
+57uf5LiA 103967
+54S26ICM 103968
+6L+Z5qyh 103969
+6Z2S5bm0 103970
+5Lq657G7 103971
+5Lqm 103972
+6K6p5Lq6 103973
+6LSf6LSj5Lq6 103974
+6YeH5Y+W 103975
+55qE5LqL5oOF 103976
+5Lmf5Lya 103977
+6L2m6L6G 103978
+5pu05piv 103979
+5by65YyW 103980
+5oiR5YCR 103981
+5Lul5YmN 103982
+5LyY5YyW 103983
+5aeU5ZGY5Lya 103984
+5Zuw6Zq+ 103985
+5bm05bqm 103986
+5L2N5LqO 103987
+5oyH5Ye6 103988
+5YaN5qyh 103989
+5Yqe55CG 103990
+5q+P5Liq 103991
+5a+55pa5 103992
+6L+b6KGM5LqG 103993
+5pyA6auY 103994
+6K++56iL 103995
+6Lqr5LiK 103996
+5pu+57uP 103997
+5Yy755Sf 103998
+5a6J6KOF 103999
+5pyx 104000
+6L+Q6KGM 104001
+5Y+M5pa5 104002
+5pyA5aSn55qE 104003
+5p6E5bu6 104004
+6L+e57ut 104005
+55qE5bCP 104006
+5aW555qE 104007
+562J562J 104008
+5pS55ZaE 104009
+5ZCE57G7 104010
+6YGH5Yiw 104011
+5pyJ552A 104012
+5Lq654mp 104013
+5oC75piv 104014
+6L+F6YCf 104015
+5Yi25a6a 104016
+5a6D5Lus 104017
+5a6Y572R 104018
+6L+Y6KaB 104019
+57uI5LqO 104020
+5oi/5Zyw5Lqn 104021
+6K+B5piO 104022
+6IKh56Wo 104023
+5bqU5b2T 104024
+6Iux5Zu9 104025
+6L+Q55So 104026
+5pyA5paw 104027
+5Lqr5Y+X 104028
+6K6p5oiR 104029
+5pma5LiK 104030
+5b6e 104031
+5bCP6K+0 104032
+5bCk5YW25piv 104033
+6K6t57uD 104034
+5YWo5biC 104035
+5oyR5oiY 104036
+5pyJ54K5 104037
+5bim552A 104038
+55qE5Lic6KW/ 104039
+6aOO5qC8 104040
+6buE6YeR 104041
+5byV5a+8 104042
+5q2k5aSW 104043
+5pyA6L+R 104044
+6L+95rGC 104045
+5by66LCD 104046
+5Lmf5Y+v5Lul 104047
+5oSf5Yiw 104048
+6Ieq5oiR 104049
+54m55Yir5piv 104050
+5oiQ6YO9 104051
+6YCQ5riQ 104052
+5b+r5LmQ 104053
+5LmL5Lit 104054
+5oqV6LWE6ICF 104055
+5LuW5Lus55qE 104056
+5rCP 104057
+5bel5L2c5Lq65ZGY 104058
+5LqG5LiA5Liq 104059
+5ZWm 104060
+5LiA5YCL 104061
+5Z+65bGC 104062
+5rKf6YCa 104063
+56ys5LiA5qyh 104064
+5bm25rKh5pyJ 104065
+55qE5bel5L2c 104066
+5Zyo6L+Z6YeM 104067
+5p6q 104068
+5pSv5pKR 104069
+5pe25bCa 104070
+5p2l5Yiw 104071
+5pS26LSt 104072
+6Z2p5ZG9 104073
+5piv5LiN5piv 104074
+6K6o6K66 104075
+5Lia57up 104076
+5bCx6IO9 104077
+56uL5Y2z 104078
+6KGX6YGT 104079
+5Zyo5LiA6LW3 104080
+5pyI5Lu9 104081
+6auY56uv 104082
+5b6I6Zq+ 104083
+5L+E572X5pav 104084
+5omL5q61 104085
+5YGa5Ye6 104086
+5LyX5aSa 104087
+5a6e6KGM 104088
+5omT5byA 104089
+5ri45a6i 104090
+5L6d54S2 104091
+5bCx5YOP 104092
+56a75byA 104093
+6K+06YGT 104094
+5paw6IO95rqQ 104095
+5rqq 104096
+5LqV 104097
+5Luk5Lq6 104098
+5LiA5Zy6 104099
+5oiR5oOz 104100
+5Lik5Lq6 104101
+6Iez5bCR 104102
+55qE55Sf5rS7 104103
+5piv5Liq 104104
+6Iux6K+t 104105
+5rKS5pyJ 104106
+5oCd6ICD 104107
+6ZmQ5Yi2 104108
+5Y+w5rm+ 104109
+5LiA5pem 104110
+55qE5LiA5Liq 104111
+6auY57qn 104112
+5Yqe5YWs5a6k 104113
+5b635Zu9 104114
+5oiR5bCx 104115
+5a6a5L2N 104116
+6YCC5bqU 104117
+5oyH5qCH 104118
+5YWo55yB 104119
+5LiK6L+w 104120
+5a6D55qE 104121
+5Zue5a62 104122
+5qyn5rSy 104123
+6ZOB6Lev 104124
+6byT5Yqx 104125
+55qE5b2x5ZON 104126
+6auY5qCh 104127
+5aSp5LiL 104128
+6auY6LSo6YeP 104129
+5p2t5bee 104130
+6LWE6K6v 104131
+5pS+5Zyo 104132
+5pyJ5LiA5Liq 104133
+5bCx6KaB 104134
+5LiK6Z2i 104135
+6Kej6YeK 104136
+6YCQ5q2l 104137
+5bC9566h 104138
+5pyJ5LuA5LmI 104139
+55qE5LqL 104140
+55m76K6w 104141
+5Lq65rCR5biB 104142
+6KeC5LyX 104143
+6KeC5a+f 104144
+55S16ISR 104145
+55qE5ZCM5pe2 104146
+5L2c5Lia 104147
+5a6j5biD 104148
+55qE5L2c55So 104149
+5Zue5p2l 104150
+6Zq+5Lul 104151
+5omA5pyJ55qE 104152
+5bCP5a2m 104153
+5o+Q5YmN 104154
+5qSN54mp 104155
+5Yev 104156
+5LiK5LqG 104157
+5bCx5Zyo 104158
+5YWI5ZCO 104159
+5omL5pyv 104160
+6YOt 104161
+6Z2i5YmN 104162
+5q+V56uf 104163
+5LqM5piv 104164
+57qi6Imy 104165
+6Ziz5YWJ 104166
+6Iu55p6c 104167
+5b6I5aSa5Lq6 104168
+57uZ5oiR 104169
+5ZOm 104170
+55y8552b 104171
+6aCt 104172
+5LiA5piv 104173
+5Y+R5bGV55qE 104174
+5Y+N5bqU 104175
+5oi/5bGL 104176
+5pyf5b6F 104177
+56eN5qSN 104178
+5paH5a2m 104179
+5Y2z5Y+v 104180
+6aaW5qyh 104181
+6Iux6ZuE 104182
+5aSa5qyh 104183
+5YyF6KOF 104184
+5rKz5Y2X 104185
+5LmL6Ze055qE 104186
+5LuN54S2 104187
+5ZCs5Yiw 104188
+6JGj5LqL6ZW/ 104189
+6KeE5YiZ 104190
+5LiA5Lu9 104191
+5aSn5LyX 104192
+5L2/5b6X 104193
+6L+b5Y+j 104194
+5LiA54mH 104195
+5oCn55qE 104196
+55qE5aSn 104197
+5oiR5piv 104198
+5LqS5Yqo 104199
+5rCj 104200
+55qG 104201
+5YWs5Y+455qE 104202
+5LiA6L65 104203
+5Y+K5YW2 104204
+6Imv5aW955qE 104205
+5ouT5bGV 104206
+5b2T5bm0 104207
+5bm/5Zy6 104208
+5YGa5LqG 104209
+5Z+65LqO 104210
+5o+Q6YaS 104211
+5YWE5byf 104212
+6ICB5p2/ 104213
+6L+R5pel 104214
+54q25Ya1 104215
+5rOo6YeN 104216
+5Yia5Yia 104217
+6LCD56CU 104218
+5b+D5Lit 104219
+5oqK5o+h 104220
+6ZqP5ZCO 104221
+5LiN5aSf 104222
+5Yib5L2c 104223
+56uZ5Zyo 104224
+55u45LqS 104225
+55ar5oOF6Ziy5o6n 104226
+5bm05Luj 104227
+5bim5Yqo 104228
+5Lyk5a6z 104229
+56uf54S2 104230
+5byV6L+b 104231
+57Sv6K6h 104232
+6K6p5oiR5Lus 104233
+5Zue5pS2 104234
+5oql5ZCN 104235
+5Yqp5Yqb 104236
+6IGU55uf 104237
+562W55Wl 104238
+5ZGo6L65 104239
+5YuS 104240
+6L+Y5Zyo 104241
+5rWB6YeP 104242
+5a+75om+ 104243
+55S15Yqb 104244
+6Ii56Ii2 104245
+6L+Y6IO9 104246
+5ouF5Lu7 104247
+55qE5oOF5Ya15LiL 104248
+55qE5Y6f5Zug 104249
+57y65LmP 104250
+55CD5ZGY 104251
+5bKB55qE 104252
+55S35a2Q 104253
+5bel6LWE 104254
+6L+R5bm05p2l 104255
+5ZGA 104256
+5o+Q5L6b5LqG 104257
+5aW55Lus 104258
+5a625YW3 104259
+54eV 104260
+6L275p2+ 104261
+5qCh5Zut 104262
+6ICD5qC4 104263
+5Y2x6Zmp 104264
+5YWa57uE57uH 104265
+5oC757uP55CG 104266
+55qE5paw 104267
+546755KD 104268
+6L+Z5L2N 104269
+5a+55q2k 104270
+5a625Lq6 104271
+55qE6KaB5rGC 104272
+5rip5bqm 104273
+5oyH5pWw 104274
+55u05Yiw 104275
+5q2k5pe2 104276
+5rmW5Y2X 104277
+6YO96KaB 104278
+5L2c5Ye6 104279
+5ZCE5L2N 104280
+6ICD55Sf 104281
+5L6d5o2u 104282
+6K+06K+d 104283
+5oiR5Lmf 104284
+5bel5Y6C 104285
+5Y+Y5oiQ 104286
+5LuW5Lq6 104287
+5oiR6KeJ5b6X 104288
+5ZCE57qn 104289
+5Lyg5aWH56eB5pyN 104290
+5LiK5Y2H 104291
+5aW95YOP 104292
+5Yqg6YCf 104293
+5LqM5Y2B 104294
+6KKB 104295
+6KOF6aWw 104296
+6YO96IO9 104297
+5LiA5byg 104298
+5Yqo5oCB 104299
+5bm055qE 104300
+6L+Z5bCx5piv 104301
+5Lmf6KaB 104302
+6LWE5qC8 104303
+5oiY5LqJ 104304
+5oSf6LCi 104305
+5Z+56IKy 104306
+5aSp5rCU 104307
+5aWz5aOr 104308
+5Y+v6IO95Lya 104309
+55qE5Lqn5ZOB 104310
+5Lmf5bCx 104311
+5Li76KaB5piv 104312
+5Yi65r+A 104313
+57uZ5L2g 104314
+5aSn5pWw5o2u 104315
+5Yy75a2m 104316
+5Yik5pat 104317
+5LuW6K+0 104318
+6KGo5ryU 104319
+5Lqa5rSy 104320
+5LiT6aKY 104321
+56ue5LqJ5Yqb 104322
+6YKj5qC3 104323
+5bGV5byA 104324
+5bmz5pe2 104325
+5o6l5LiL5p2l 104326
+5om/6K+6 104327
+5rOV5Zu9 104328
+5YWz5b+D 104329
+5Lya5pyJ 104330
+6YKA6K+3 104331
+6aKE6Ziy 104332
+5a+55o6l 104333
+5aW95LqG 104334
+5ZKx5Lus 104335
+55qE5oSf6KeJ 104336
+5oCd6Lev 104337
+6YO95rKh5pyJ 104338
+55qE5pa55rOV 104339
+5aWz5a2Q 104340
+5Y+45rOV 104341
+6L+Y5Lya 104342
+6LaK5p2l6LaK5aSa 104343
+5Zug54K6 104344
+5rW35Y2X 104345
+5Lq65pWw 104346
+5bCG5Lya 104347
+5Lia5Li7 104348
+6aSQ6aWu 104349
+5bGF5L2P 104350
+5Y+R5Ye6 104351
+6L+R5pyf 104352
+5byV6aKG 104353
+5py65Zmo5Lq6 104354
+5Ye65p2l55qE 104355
+55yL6KeB 104356
+5L+K 104357
+6K6p5LuW 104358
+5LiN5oOz 104359
+5bel5L2c55qE 104360
+6KGl5YWF 104361
+5rWF 104362
+54m55b6B 104363
+5LiK5biC5YWs5Y+4 104364
+576O6aOf 104365
+5bm/6KW/ 104366
+5q+P5LiA5Liq 104367
+6JC95Zyw 104368
+5ZOB56eN 104369
+5ZKM6LCQ 104370
+5b275bqV 104371
+6auY6ICD 104372
+5pio5aSp 104373
+5YmN5b6A 104374
+55uR5rWL 104375
+55m+5bqm 104376
+5Zyo5Lit5Zu9 104377
+55qE6ZyA5rGC 104378
+5Lq/576O5YWD 104379
+5a2m5pyv 104380
+5pS25Yiw 104381
+5p2/5Z2X 104382
+5LiA5q61 104383
+5p6E5oiQ 104384
+5LyB5Lia55qE 104385
+6KGo6Z2i 104386
+5pW055CG 104387
+57uT5ama 104388
+5Lq65a62 104389
+5YGc5q2i 104390
+5a2m56eR 104391
+5pi+5b6X 104392
+5LyR5oGv 104393
+6aKE5pyf 104394
+5oiW5piv 104395
+55qE5Li76KaB 104396
+5bqU5a+5 104397
+6LWw5LqG 104398
+5Lit6Ze0 104399
+6LWw6L+b 104400
+5ZGI546w 104401
+5pCt6YWN 104402
+6bmP 104403
+5piv5Zug5Li6 104404
+5oOF57uq 104405
+5a6a5pyf 104406
+56S+5Lya5Li75LmJ 104407
+562J57qn 104408
+55+b55u+ 104409
+6aOe5py6 104410
+6Iez5LuK 104411
+5pS26ZuG 104412
+55qE5pWF5LqL 104413
+5YiH5a6e 104414
+5a6e546w5LqG 104415
+5b2i5oiQ5LqG 104416
+5Y2X5pa5 104417
+5Lit5a2m 104418
+5rW35rSL 104419
+5ZCm5YiZ 104420
+5ouN5pGE 104421
+5aSn5a2m55Sf 104422
+5Ye6546w5LqG 104423
+5oSP5aSW 104424
+5Lmf6IO9 104425
+55qE6IO95Yqb 104426
+5Z2Q5Zyo 104427
+5YiZ5piv 104428
+6ICD5a+f 104429
+5bCK6YeN 104430
+6Ziy5q2i 104431
+57Sn5byg 104432
+6K+75Lmm 104433
+5Ye66KGM 104434
+5bCx5pyJ 104435
+5bGl6KGM 104436
+546w5Luj5YyW 104437
+5Zu95Yqh 104438
+5Zu95Yqh6Zmi 104439
+57u05L+u 104440
+5Y6f5Yib 104441
+5piv5oyH 104442
+5LyR6Zey 104443
+54Ku 104444
+5paw5pe25Luj 104445
+6YCZ5YCL 104446
+5LiN5pWi 104447
+5a6M576O 104448
+57uG6IqC 104449
+6a2P 104450
+6JSs6I+c 104451
+6aKG5a+854+t5a2Q 104452
+6LaF57qn 104453
+6KGM5oOF 104454
+5Lq65bel5pm66IO9 104455
+5Y2w5bqm 104456
+5Z+656GA6K6+5pa9 104457
+5Y+I5piv 104458
+6I2v54mp 104459
+5ZC45pS2 104460
+5Y205piv 104461
+6YOO 104462
+5aWW5Yqx 104463
+55qE5pyL5Y+L 104464
+5L+d55WZ 104465
+6KeE5b6L 104466
+5paw55aG 104467
+6L+Y5Y+v5Lul 104468
+5o6l6L+R 104469
+5q2k5YmN 104470
+5om55YeG 104471
+5oCO5LmI5qC3 104472
+55qE5L2N572u 104473
+5LiA5Z2X 104474
+5ouS57ud 104475
+6aG+5a6i 104476
+5Lmf5Zyo 104477
+5LiA55Sf 104478
+6YOo6Zif 104479
+5bm05YmN 104480
+5pa56Z2i55qE 104481
+5bCd6K+V 104482
+55yf5q2j55qE 104483
+56aB5q2i 104484
+6L+Y5rKh5pyJ 104485
+5rCR55Sf 104486
+6LWw5ZCR 104487
+6IS45LiK 104488
+5b2T5aSp 104489
+6ZuG5Zui5YWs5Y+4 104490
+55qE5LiA56eN 104491
+6KW/5pa5 104492
+5Zue5bqU 104493
+5LiA5aOw 104494
+5bi45bi4 104495
+5o+Q5Yiw 104496
+6IW+6K6v 104497
+5pyN6KOF 104498
+5Li65L2V 104499
+5LqR5Y2X 104500
+5bCx566X 104501
+5Lyg5om/ 104502
+5Y+N6ICM 104503
+5LiH5ZCo 104504
+6LSi5Lqn 104505
+5aaC5LiL 104506
+5pel5YmN 104507
+5Y6f5pys 104508
+5pyA6YeN6KaB55qE 104509
+6K6k6K+B 104510
+5LiA6YGT 104511
+5L+h5oGv5YyW 104512
+5b6X5Yiw5LqG 104513
+6YCy6KGM 104514
+5oiR6KaB 104515
+6YCa5L+h 104516
+5a6k5YaF 104517
+6LWa6ZKx 104518
+5pS26JeP 104519
+6Kej5Yaz5pa55qGI 104520
+5oi/5Lqn 104521
+54u8 104522
+5rS75Yqb 104523
+57uP5rWO5Y+R5bGV 104524
+562J5b6F 104525
+5Lmf5b6I 104526
+5Z2R 104527
+5b6I5aW955qE 104528
+6Zq+5bqm 104529
+5LiN5aaC 104530
+5Lq65rCR5pS/5bqc 104531
+5Ye65Y+R 104532
+5YmN5pyf 104533
+5ryU5ZGY 104534
+5aWz55Sf 104535
+6IGa54Sm 104536
+5a6h6K6h 104537
+6aKE5rWL 104538
+5L6d5omY 104539
+5LqU5bm0 104540
+6KGl6LS0 104541
+5riF5pmw 104542
+6aqC 104543
+55yL6LW35p2l 104544
+55qE5a2p5a2Q 104545
+6aKR6YGT 104546
+5L2P5a6F 104547
+6Z2i5ZCR 104548
+5pyA5L2O 104549
+5pei54S2 104550
+5LiA5aWX 104551
+5pWw5a2m 104552
+576k5L2T 104553
+5YyX5Lqs5biC 104554
+5bGF54S2 104555
+5rCb5Zu0 104556
+6YCU5b6E 104557
+55qE5Z+656GA5LiK 104558
+6IGM6LSj 104559
+5Y+v6IO95piv 104560
+5Yab5LqL 104561
+5oiQ5pWI 104562
+5a2p5a2Q5Lus 104563
+6K6h566X5py6 104564
+6LWk 104565
+5Lqn5Lia5Y+R5bGV 104566
+5beo5aSn55qE 104567
+5bel5Lq6 104568
+55Sf6ZW/ 104569
+6YO95Y+v5Lul 104570
+55qE5py65Lya 104571
+6LWE6LSo 104572
+55eb6Ium 104573
+57KJ5Lid 104574
+5aKT 104575
+5bmz5a6J 104576
+566h6YGT 104577
+6Lef552A 104578
+6aWu6aOf 104579
+5ZWG5a62 104580
+5aSa5a62 104581
+5Y+45py6 104582
+5bqU6K+l5piv 104583
+6YCP6Zyy 104584
+6K6k5a6a 104585
+6KGM5Lia55qE 104586
+55qE5LyB5Lia 104587
+5q+P5LiA 104588
+6IyD5Zu05YaF 104589
+6L6D5aSn 104590
+6LSk 104591
+5aSn6LWb 104592
+5aSa5LqG 104593
+6bi/ 104594
+5Li05bqK 104595
+5Zyo6L+Z5Liq 104596
+55qE5YaF5a65 104597
+6ZSA6YeP 104598
+5b6I5bCR 104599
+5a2f 104600
+57u05oyB 104601
+5ZKW5ZWh 104602
+5pys5Zyw 104603
+6Imy5b2p 104604
+5bm26Z2e 104605
+6ICM5bey 104606
+5rip5pqW 104607
+6JCn 104608
+5oqT5L2P 104609
+6ICM5LiN5piv 104610
+5ZaK 104611
+55qE5YWz57O7 104612
+54mp5ZOB 104613
+6YKj5piv 104614
+5Yac5Lqn5ZOB 104615
+6L+Z5pe2 104616
+5ama5ae7 104617
+5rC05p6c 104618
+5pS26I63 104619
+5LuY5Ye6 104620
+5a6i5oi356uv 104621
+5ryU5Ye6 104622
+5YWo5paw 104623
+6L+Z5Lmf5piv 104624
+5piv55Sx 104625
+6KeC5b+1 104626
+5pyJ5Liq 104627
+6YCg5Z6L 104628
+6IOc5Yip 104629
+5LiJ5piv 104630
+6LaF5biC 104631
+5YWa5bu65bel5L2c 104632
+5pS+5b+D 104633
+57q/6Lev 104634
+5oub55Sf 104635
+5ZCD6aWt 104636
+6L2J 104637
+5bC96YeP 104638
+6KeB5Yiw 104639
+5ZCM5q+U5aKe6ZW/ 104640
+5Y2O5Li6 104641
+5oiR5biC 104642
+5o+Q5Ye65LqG 104643
+5rCR6K2m 104644
+5Y2a54mp 104645
+5Y2a54mp6aaG 104646
+6K+a5L+h 104647
+5YmN6Z2i 104648
+5bGx6KW/ 104649
+6L6F5Yqp 104650
+6L2s56e7 104651
+5pu05Li6 104652
+5Liw5a+M55qE 104653
+5Y2i 104654
+5b+r6YCS 104655
+5pi+6JGX 104656
+54mp6LWE 104657
+5Yiw6L6+ 104658
+5pyJ5Yip5LqO 104659
+5ZGG 104660
+5a2p5a2Q55qE 104661
+5LiN5L2G 104662
+56CU56m26Zmi 104663
+55Sz5oql 104664
+5pqo 104665
+5rCR6Ze0 104666
+5Y27 104667
+55qE5aOw6Z+z 104668
+5biC5Zy655qE 104669
+5LiA5Y+l 104670
+55yB57qn 104671
+5p2l55qE 104672
+5ZOq5Liq 104673
+5omN5Lya 104674
+5YiG6YWN 104675
+6JSh 104676
+5LuW5Zyo 104677
+5YWx5pyJ 104678
+5aGY 104679
+6JKC 104680
+6ZyN 104681
+5Y+C6KeC 104682
+5LiI5aSr 104683
+5L6d6Z2g 104684
+5pyJ5pe2 104685
+5LqG5b6I5aSa 104686
+5LiW55WM5p2v 104687
+5a625peP 104688
+5LiN6ZyA6KaB 104689
+5aSn5biI 104690
+6J6N5YWl 104691
+6Z2e5rOV 104692
+55eF5Lq6 104693
+5ZCO5pyf 104694
+5aSn5a626YO9 104695
+572R5Z2A 104696
+5Y6f5paZ 104697
+5L6/5a6c 104698
+5rab 104699
+5Lu/5L2b 104700
+5beu6Led 104701
+5Y+m5LiA5pa56Z2i 104702
+5Lqn5ZOB55qE 104703
+6LWr 104704
+5oOF5Ya15LiL 104705
+6ZKi6ZOB 104706
+5pys56uZ 104707
+57qz5YWl 104708
+5bey5pyJ 104709
+5pyJ5rKh5pyJ 104710
+5Lyw6K6h 104711
+6aOY 104712
+5pyf6LSn 104713
+5YCL5Lq66LOH5paZ 104714
+5LiT5Lia55qE 104715
+54iG5Y+R 104716
+6Ie05Yqb5LqO 104717
+546w5Zyo55qE 104718
+5pyJ5ZOq5Lqb 104719
+56C05Z2P 104720
+5pWw5a2X5YyW 104721
+5Zyw6Z2i 104722
+6buR6Imy 104723
+5bm85YS/5Zut 104724
+55qE57K+56We 104725
+5Lqt 104726
+5a+85ryU 104727
+546w5pyJ 104728
+5q2m5Zmo 104729
+6IuP5bee 104730
+546E 104731
+5rGf6KW/ 104732
+5bu25Ly4 104733
+6K665paH 104734
+6L6D5Li6 104735
+546p5rOV 104736
+6byO 104737
+5ZCM5q2l 104738
+6YeK5pS+ 104739
+5pud5YWJ 104740
+5Z2a5Yaz 104741
+5aeU5omY 104742
+5bCG5Zyo 104743
+5LqI5Lul 104744
+5L2c5paH 104745
+6ICM5Zyo 104746
+5LyY5YWI 104747
+5Zue5Y67 104748
+5L+u5aSN 104749
+5Zu95YaF5aSW 104750
+562W5YiS 104751
+5Y+R5pS+ 104752
+5b+D5oOF 104753
+55qE5Y6G5Y+y 104754
+6Z2i6K+V 104755
+5Lic5YyX 104756
+5L+h5Y+3 104757
+57Ku6aOf 104758
+6K+B5Lmm 104759
+5p+Q5Lqb 104760
+6L+Q5L2c 104761
+5Yay5Ye7 104762
+54Ot54K5 104763
+5pe25pe2 104764
+5pe25pe25b2p 104765
+5Zyw54K5 104766
+5LiA5L2T5YyW 104767
+6Zq+6aKY 104768
+5puw 104769
+56uL5Yi7 104770
+5piv6Z2e5bi4 104771
+5YWx5ZKM 104772
+5YWx5ZKM5Zu9 104773
+5r+A5Yqx 104774
+5pyJ5pWI55qE 104775
+5aSE572u 104776
+6K+l5YWs5Y+4 104777
+5qOA6aqM 104778
+6K2m5pa5 104779
+6LS+ 104780
+5LqG5LiA5LiL 104781
+5LuK5ZCO 104782
+54Wu 104783
+55So5ZOB 104784
+6K+76ICF 104785
+5oiR5Zyo 104786
+5Zue5aSN 104787
+5LiA5bqn 104788
+6L+Y5rKh 104789
+5a6a5Yi2 104790
+5rKh5oOz5Yiw 104791
+5aS5 104792
+5Lyg6YCS 104793
+5LiA5qy+ 104794
+5by65aSn55qE 104795
+55qE6KGM5Li6 104796
+5aSP5aSp 104797
+5Y+R5Yqo5py6 104798
+6aKG5Z+f55qE 104799
+5a6e6aqM5a6k 104800
+5LiA5oqK 104801
+5piv5Li65LqG 104802
+6ZmV6KW/ 104803
+5ouF5L+d 104804
+6L6+5oiQ 104805
+6KaB5piv 104806
+5piO5aSp 104807
+57uZ5LuW 104808
+5bu656uL5LqG 104809
+5LiN6KGM 104810
+5Lit5paH 104811
+5Zyw6K+0 104812
+5ZCO55qE 104813
+55uR5o6n 104814
+6YC4 104815
+5oC76YOo 104816
+5pys5paH 104817
+6bm/ 104818
+5pmv6KeC 104819
+55qE55uu5qCH 104820
+6JuH 104821
+5Yav 104822
+5Lit5Yy7 104823
+5pWI5bqU 104824
+5Lqn6YeP 104825
+5a2d 104826
+6LSm5oi3 104827
+6L+d5Y+N 104828
+6JGj5LqL5Lya 104829
+5Lqs5Lic 104830
+6LSj5Lu757yW6L6R 104831
+5ZWP6aGM 104832
+54ix5b+D 104833
+6K2m5a+f 104834
+6aSQ5Y6F 104835
+5biC5pS/5bqc 104836
+5aSp5aSp 104837
+5paw6bKc 104838
+6YOR5bee 104839
+6LaF6LaK 104840
+5b2t 104841
+55+l6K+G5Lqn5p2D 104842
+5Zue5b+G 104843
+6Lev57q/ 104844
+5buJ5rSB 104845
+6Z2S5bCR5bm0 104846
+5Y+W5b6X5LqG 104847
+55yL5Yiw5LqG 104848
+6aas 104849
+57K+5ZOB 104850
+5Zyw6ZOB 104851
+5oyB5pyJ 104852
+5LiL5LqG 104853
+5pyJ5pe25YCZ 104854
+5LiA5Lq6 104855
+5pKS 104856
+5LuU57uG 104857
+6ICB5YWs 104858
+5LqL5a6e5LiK 104859
+6IGU6LWb 104860
+5L6b5bqU6ZO+ 104861
+6aKE566X 104862
+5Yi26YCg5Lia 104863
+5a6J5YWo55Sf5Lqn 104864
+5L+x5LmQ 104865
+5L+x5LmQ6YOo 104866
+55qE5qC45b+D 104867
+5omT566X 104868
+5b2x54mH 104869
+5pCt5bu6 104870
+5Lmf5LiN5Lya 104871
+5ouF5b2T 104872
+5bGC6Z2i 104873
+5a2m5ZGY 104874
+5Li05pe2 104875
+55u457uT5ZCI 104876
+5a+55q+U 104877
+5LuW5piv 104878
+5paw5Yy6 104879
+6L+b5Y67 104880
+55m+5bm0 104881
+5L+p 104882
+5bC95b+r 104883
+55S15a2Q5ZWG5Yqh 104884
+5pu05pyJ 104885
+5riF55CG 104886
+5Y+m5LiA5Liq 104887
+5YK7 104888
+5LuA5LmI5qC355qE 104889
+5piv5pyA 104890
+5ZGo5bm0 104891
+5b6I5a655piT 104892
+5Zui57uT 104893
+57SE 104894
+5pep5bey 104895
+55qE5Y+Y5YyW 104896
+6Zye 104897
+5pel5LiK5Y2I 104898
+5aSx5Y67 104899
+5Lit5ZyL 104900
+55qE5LiA5Lqb 104901
+5bCP5a2p 104902
+5LiL6LeM 104903
+6ZS754K8 104904
+6ZE= 104905
+6ZGr 104906
+5b+X5oS/6ICF 104907
+6IKh5biC 104908
+6LWb5LqL 104909
+6K645Y+v6K+B 104910
+5Y+v5oyB57ut 104911
+5ZGK6K+J6K6w6ICF 104912
+6YC76L6R 104913
+5byV5YWl 104914
+55qE6L+H56iL5Lit 104915
+6KeG6KeJ 104916
+6Ieq5rK75Yy6 104917
+6K+B5o2u 104918
+6KOF572u 104919
+56ys5LiJ5pa5 104920
+5bm05p2l 104921
+5bm/5Lic55yB 104922
+5bim5p2l5LqG 104923
+6ZW/5rGf 104924
+6K6/6Zeu 104925
+5beu5LiN5aSa 104926
+5piv5oiR 104927
+6YGt6YGH 104928
+5oqT5aW9 104929
+6auY6L6+ 104930
+5bm25Zyo 104931
+6Ieq6KeJ 104932
+5L6b5bqU5ZWG 104933
+5oOF5oSf 104934
+5L2P5LqG 104935
+55qE6IGM5Lia 104936
+55qH5bid 104937
+6KW/6YOo 104938
+5ZKM5bmz 104939
+55qE5Yqb6YeP 104940
+5rGq 104941
+5YWF5YiG5Y+R5oyl 104942
+5oqV6K+J 104943
+6LW35Yiw 104944
+5LqS55u4 104945
+5r6z6Zeo 104946
+5o6l5Yiw 104947
+5rC05rOl 104948
+5qih5Z6L 104949
+5LiA5Y2K 104950
+56ep5bqP 104951
+5oiR5Lus5Zyo 104952
+5om/6K6k 104953
+5LiA6YOo5YiG 104954
+5Y2g5q+U 104955
+5aaH5aWz 104956
+57KY 104957
+5LqG6Kej5Yiw 104958
+5LiA5a6a5Lya 104959
+5ZCE5aSn 104960
+6LWw5Ye6 104961
+5Li65aSn5a62 104962
+6auY6ZOB 104963
+5Y+v5Lul5Zyo 104964
+5L2G5Zyo 104965
+55Sf5oCB546v5aKD 104966
+6I+v 104967
+55qE5Lu35qC8 104968
+6bq754Om 104969
+5r+A5Y+R 104970
+6YKj5bCx 104971
+55qE5qC35a2Q 104972
+5Li65q2k 104973
+5aSp5Zyw 104974
+55qE55uu55qE 104975
+5YC65Yi4 104976
+5bey57aT 104977
+5Zub5aSn 104978
+5ZCM5pe25Lmf 104979
+5b285q2k 104980
+5ou/5Yiw 104981
+5ZCr6YeP 104982
+5Y2B5aSn 104983
+6Zq+6YGT 104984
+5byX 104985
+5LiA5q615pe26Ze0 104986
+54Wn6aG+ 104987
+5pWw5o2u5pi+56S6 104988
+5oiQ5Li65LqG 104989
+6LWw5Yiw 104990
+5pys5YWs5Y+4 104991
+57uI56uv 104992
+5Lmf5LiN5piv 104993
+5aS05Y+R 104994
+5aSn57qm 104995
+6aOO5pmv 104996
+5raI6ICX 104997
+5a6h5p+l 104998
+5LqJ5Y+W 104999
+5rOV5rK7 105000
+5LqL54mp 105001
+57yT6Kej 105002
+5oOo 105003
+55u45bqU55qE 105004
+55qE5pWI5p6c 105005
+5Y+N5aSN 105006
+5Y+R55Sf5LqG 105007
+6YCZ5Lqb 105008
+57uD5Lmg 105009
+5Y6o5oi/ 105010
+5byA5ouT 105011
+5qyj6LWP 105012
+5aSr5aa7 105013
+5LiN5LiA5qC3 105014
+5Lqn6IO9 105015
+6Iqv54mH 105016
+6KaB57Sg 105017
+5Y+N5a+5 105018
+546H5YWI 105019
+6LSn54mp 105020
+5pel55S1 105021
+5L2c5a62 105022
+5pS56L+b 105023
+5oiQ5YiG 105024
+5Zug6ICM 105025
+5YeP6IKl 105026
+5r2Y 105027
+5bGx5Lic55yB 105028
+5Yqd 105029
+5Z+L 105030
+5q2m6KOF 105031
+5rGH5oql 105032
+5LiA5Liq5pyI 105033
+54Ot6Zeo 105034
+5aSn6YGT 105035
+5rS75YuV 105036
+6YO95b6I 105037
+55S15qKv 105038
+57Sn5oCl 105039
+5YC65Yqh 105040
+5a6i5pyN 105041
+5LiA6YOo 105042
+5L2g5piv 105043
+546w54q2 105044
+5q2j56Gu55qE 105045
+5LmL5aSE 105046
+57yW5Yi2 105047
+5L2g5Y+v5Lul 105048
+562J5Zyw 105049
+6I6J 105050
+5a+56K+d 105051
+5reY5a6d 105052
+6LCD6IqC 105053
+5o6S5pS+ 105054
+5bqT5a2Y 105055
+57Sa 105056
+55qE5LyY5Yq/ 105057
+5p2D5aiB 105058
+5Lul5LiL566A56ew 105059
+5LiA6aG5 105060
+6IGa6ZuG 105061
+5Lyg57uf55qE 105062
+5re35ZCI 105063
+6L+Z5LiA54K5 105064
+5LiA55y8 105065
+5peg6ZmQ 105066
+6I635b6X5LqG 105067
+6YCJ5omL 105068
+5Yi25ZOB 105069
+5Y2P5L2c 105070
+54us54m555qE 105071
+5LiA57qn 105072
+6L+Z5Liq6Zeu6aKY 105073
+5paM 105074
+5piv5oiR5Lus 105075
+5pWM5Lq6 105076
+5riF5rSX 105077
+5LiA55u05Zyo 105078
+5bCP57Gz 105079
+55qE6L+H56iL 105080
+5Zyo5YyX5Lqs 105081
+5LiA5pSv 105082
+5pep5LiK 105083
+5paH6Im6 105084
+56aP5Yip 105085
+6aOf55So 105086
+5oSf5Yqo 105087
+5YWo56iL 105088
+5pSv5Ye6 105089
+5paw5bu6 105090
+5biV 105091
+5pi+54S2 105092
+55yf55qE5piv 105093
+5paw6Ze7572R 105094
+6IO95ZCm 105095
+5Y2P5Yqp 105096
+5Lqy6Ieq 105097
+5b6I5pyJ 105098
+55m85bGV 105099
+5oSP5aSn 105100
+5oSP5aSn5Yip 105101
+55S1572R 105102
+5pel55uK 105103
+54ax 105104
+6IKM6IKk 105105
+55S35oCn 105106
+57uE5bu6 105107
+562J6Zeu6aKY 105108
+5raI6Zmk 105109
+5oqk55CG 105110
+5aGR5paZ 105111
+5LmM5YWL 105112
+5LmM5YWL5YWw 105113
+5ZWG5qCH 105114
+55Cz 105115
+5paw5omL 105116
+55qE54m554K5 105117
+5ZKs 105118
+5b2T5LiL 105119
+6K6+6K6h5biI 105120
+6LWU5YG/ 105121
+56ys5Y2B 105122
+5pm66IO95YyW 105123
+5byA5Y+R5Yy6 105124
+5Y+v5Lul6YCa6L+H 105125
+5YWx5Lqn5YWa 105126
+5Y6J5a6z 105127
+54G15rS7 105128
+5pe25YWJ 105129
+6YOo5L2N 105130
+5Lq65paH 105131
+6L+b5p2l 105132
+5LmL5omA5Lul 105133
+5LiJ5Y2B 105134
+55qE5a2m55Sf 105135
+6Ziy5oqk 105136
+5Zu95Lqn 105137
+5rex5Zyz5biC 105138
+6YKj5bCx5piv 105139
+5Yiw5L2N 105140
+54m55pyX 105141
+54m55pyX5pmu 105142
+5a6e5pe2 105143
+5Y+w54Gj 105144
+6ICM5LiN 105145
+5oyH5a6a 105146
+5Z2d 105147
+6IWQ6LSl 105148
+54m55a6a 105149
+5aKe6YCf 105150
+5qCH562+ 105151
+5oi/5Lu3 105152
+5oSB 105153
+6LSv5b276JC95a6e 105154
+5oCn6LSo 105155
+56CU56m255Sf 105156
+576O5a65 105157
+5om56K+E 105158
+56m256uf 105159
+5Lq65Yqb6LWE5rqQ 105160
+6ZaL5aeL 105161
+5Zue5b2S 105162
+6JCl5ZWG 105163
+6JCl5ZWG546v5aKD 105164
+5Lit5Zu95Lq6 105165
+55qE5Z+65pys 105166
+6K+d6aKY 105167
+5qCH5YeG5YyW 105168
+6KW/6JeP 105169
+5Yu+ 105170
+55qE6K6+6K6h 105171
+566A5Y2V55qE 105172
+5aSN5Yi2 105173
+5riQ5riQ 105174
+5Lul5aSW 105175
+6IGU5Yqo 105176
+5Lik5qyh 105177
+5oCn5ZKM 105178
+5pu05aSn 105179
+55qE5ZCN5a2X 105180
+6Z+m 105181
+5L2g6KaB 105182
+5aKD5aSW 105183
+5pep5pyf 105184
+5Yid5q2l 105185
+6LSm5Y+3 105186
+5a6z5oCV 105187
+5pio5pel 105188
+5Yia5omN 105189
+56We56eY 105190
+57K+5b+D 105191
+5rWB6YCa 105192
+5YWo5pa55L2N 105193
+5Lul5b6A 105194
+5Lmf5bCG 105195
+5piv5Lit5Zu9 105196
+5Zu95a6257qn 105197
+5bCG5Yab 105198
+5pGK 105199
+5pyA5Li6 105200
+56ys5LiA5pe26Ze0 105201
+5raI5q+S 105202
+5bCG5LqO 105203
+5aiB6IOB 105204
+6Iux5paH 105205
+5omL5Lit 105206
+55CD6L+3 105207
+6KeC55yL 105208
+56a75ama 105209
+5pys5Zyf 105210
+5YiG5pWj 105211
+5pm0 105212
+6KaB5rOo5oSP 105213
+5rWq6LS5 105214
+566h5o6n 105215
+5Ye65ZSu 105216
+5oC76KOB 105217
+5LiA6Zi1 105218
+5aiH 105219
+5LqU5Liq 105220
+5b2T5Yid 105221
+57qg57q3 105222
+5LiT55So 105223
+5aSH5qGI 105224
+5Yid5pyf 105225
+5a6D5piv 105226
+5Yy65Z2X 105227
+5Yy65Z2X6ZO+ 105228
+5aSn6L+e 105229
+6L+Z57G7 105230
+5Y+Y5oiQ5LqG 105231
+6YKE5piv 105232
+5Y2a5a6i 105233
+54++5Zyo 105234
+5LiA5pa5 105235
+5a6M5oiQ5LqG 105236
+6L+Z5Liq5pe25YCZ 105237
+5YWo5bm0 105238
+5LiK57q/ 105239
+572Q 105240
+56ue6LWb 105241
+5Ye654mI56S+ 105242
+5ZOl5ZOl 105243
+5a+r 105244
+5b6X5Lul 105245
+6Iqx5Zut 105246
+5LqG6LW35p2l 105247
+6ISx6LSr5pS75Z2a 105248
+55qE5Y6f5YiZ 105249
+6K6y6Kej 105250
+5raI5YyW 105251
+5o2f5a6z 105252
+5pqC5pe2 105253
+5b6X55+l 105254
+6YCC55So 105255
+6Zeo5bqX 105256
+6Kej6K+7 105257
+5pmu5Y+K 105258
+5Lq65rCR5rOV6Zmi 105259
+5Ymv5Li75Lu7 105260
+5b+D54G1 105261
+6K+K5pat 105262
+576O5aWz 105263
+5p+v 105264
+5bm05Lul5p2l 105265
+5rS76LeD 105266
+5YCf5Yqp 105267
+5YWx5bu6 105268
+6K+J6K68 105269
+5pS+5p2+ 105270
+56qX5Y+j 105271
+5LyB5qWt 105272
+5Yqg5ou/ 105273
+5Yqg5ou/5aSn 105274
+5Lmw5LqG 105275
+5Li75rWB 105276
+5oeC5b6X 105277
+5bCG5YW2 105278
+6YCP5piO 105279
+5bel5L2c5Lit 105280
+6IKh5Lu3 105281
+5qGj5qGI 105282
+5rKh5pyJ5Lu75L2V 105283
+5ZGK55+l 105284
+5bm05Yid 105285
+5pel5LiL5Y2I 105286
+5Y6C5ZWG 105287
+6IqC5aWP 105288
+5Li75a+8 105289
+6KOd 105290
+5YWz6ZSu6K+N 105291
+6IGK5aSp 105292
+5YaZ5L2c 105293
+5pS56Z2p5byA5pS+ 105294
+5pyJ5pyb 105295
+6YCa5oql 105296
+6JCM 105297
+5oC76aKd 105298
+55+t5pyf 105299
+5LiA55Wq 105300
+55Sf5rS755qE 105301
+5YyW55qE 105302
+5pil5aSp 105303
+6L+Z5Zy6 105304
+5paw5byA5Lyg5aWH 105305
+5piv6KaB 105306
+5bCa5pyq 105307
+5Y+Y5pu0 105308
+5LiA5ZGo 105309
+5a6i6KeC 105310
+5pel6Iez 105311
+6bmw 105312
+546y 105313
+5bCG5p2l 105314
+5a6i5Lq6 105315
+5Y+Y6Z2p 105316
+6K+05LqG 105317
+5Y6f55CG 105318
+6IGM5Yqh 105319
+5Y+I5pyJ 105320
+5LiA5Y+l6K+d 105321
+5oSf5Y+X5Yiw 105322
+56yU6ICF 105323
+56e75rCR 105324
+6KW/5Y2X 105325
+5LmD6Iez 105326
+5q2j6KeE 105327
+5Yid5Lit 105328
+54qs 105329
+5b2T5LqL 105330
+5b2T5LqL5Lq6 105331
+5oiR5Lus6KaB 105332
+5YWl5Y+j 105333
+6YKj5pe2 105334
+5pyJ6ZmQ6LSj5Lu7 105335
+5bCR5aWz 105336
+6L+Z5LmI5aSa 105337
+5YiG5YWs5Y+4 105338
+5a6H5a6Z 105339
+55qE6YCJ5oup 105340
+5aeQ5aeQ 105341
+5Y+R6LW3 105342
+6LuN 105343
+5pu05aW95Zyw 105344
+6ZmG57ut 105345
+5pys5pyN5YuZ 105346
+5aup 105347
+6LW257Sn 105348
+6ISC6IKq 105349
+56ys5LqM5aSp 105350
+5oiR5Lya 105351
+5Lik5L2N 105352
+5pWy 105353
+5YWs5a6J5py65YWz 105354
+56eR5oqA5Yib5paw 105355
+5bC65a+4 105356
+6L6Q5bCE 105357
+5a6X5pWZ 105358
+6L2s5o2i 105359
+5Ye6546w5Zyo 105360
+5LiA6aKX 105361
+5pyf6ZmQ 105362
+5ZCM5a2m5Lus 105363
+5YyX5pa5 105364
+5L2g5bCx 105365
+5LiA5bim5LiA6Lev 105366
+6ICB5amG 105367
+5ri45oiP546p5a62 105368
+55qE57uT5p6c 105369
+6KGl5YG/ 105370
+5aSW6LS4 105371
+5a+55b6F 105372
+57u055Sf57Sg 105373
+57uP6ZSA5ZWG 105374
+6L+Y5bCG 105375
+5a2Q5aWz 105376
+5pu06auY 105377
+5LiN5aSn 105378
+6Ym05a6a 105379
+6K6p5LuW5Lus 105380
+5omA6LCT55qE 105381
+5q275LqG 105382
+5biu5om2 105383
+5ZOy5a2m 105384
+5Lul5LiK55qE 105385
+55qE5YWz6ZSu 105386
+5pep5bCx 105387
+5oql5Lu3 105388
+6YG15a6I 105389
+5omp5byg 105390
+5piv5b6I 105391
+5byA6YCa 105392
+5paw5Yqg 105393
+5paw5Yqg5Z2h 105394
+57+76K+R 105395
+6K+i6Zeu 105396
+6bit 105397
+5L2T5YaF 105398
+5Lik5Liq5Lq6 105399
+54i5 105400
+6Zyc 105401
+5Lmh5p2R5oyv5YW0 105402
+552h6KeJ 105403
+5a6Y5ZGY 105404
+5Yib5aeL 105405
+5Yib5aeL5Lq6 105406
+5LyX5Lq6 105407
+5Y2z5L6/ 105408
+55ar6IuX 105409
+5LyB5Lia5a62 105410
+5rij 105411
+57K+5Yqb 105412
+5aSW6YOo 105413
+6IGq5piO 105414
+6L+Z5Lmf 105415
+5b2V5Y+W 105416
+5Yay56qB 105417
+5YWo6Lqr 105418
+5a2j6IqC 105419
+5b+954S2 105420
+55qE5oCB5bqm 105421
+5YKo5aSH 105422
+5L+d5YW7 105423
+55qE5oOz5rOV 105424
+5LiK5rW35biC 105425
+5pC65omL 105426
+55qE5L+h5oGv 105427
+5ZWG5Zy6 105428
+55qE5oCd5oOz 105429
+5p2D5Yqb 105430
+5q+r5peg 105431
+5oCA5a2V 105432
+56Gs5Lu2 105433
+5YaF6JKZ5Y+k 105434
+5o6i6K6o 105435
+5YW755Sf 105436
+55qE6KGo546w 105437
+56m65Lit 105438
+5oGQ5oCW 105439
+5b6I6auY 105440
+57uP5rWO56S+5Lya 105441
+5LiK5p2l 105442
+5bu257ut 105443
+6YeN5aSN 105444
+6Ziy6IyD 105445
+55qE5b2i5byP 105446
+5pyI5bqV 105447
+6ICB5bm05Lq6 105448
+57u/5YyW 105449
+5bGx5Yy6 105450
+5ou/5Ye6 105451
+5peF5a6i 105452
+5pu05o2i 105453
+5YWs5Li7 105454
+6IqC57qm 105455
+5YWo5Y6/ 105456
+5Zue5oql 105457
+55CG5oCn 105458
+55av54uC 105459
+5raJ5auM 105460
+5Ymn5oOF 105461
+5Yas5a2j 105462
+5ZCO57ut 105463
+6L+Z5piv5LiA5Liq 105464
+5ryU6K6y 105465
+5LiA5bGC 105466
+5pyJ5YWz6YOo6Zeo 105467
+5peg5aWI 105468
+56eN57G7 105469
+55u45YWz55qE 105470
+5oiW6ICF5piv 105471
+5om25oyB 105472
+5aSa5pWw 105473
+55qE5L2c5ZOB 105474
+5LiL5LiA5q2l 105475
+5biI5YKF 105476
+6auY6YCf5YWs6Lev 105477
+5aW95Y+L 105478
+5LyY56eA55qE 105479
+6L+b5LqG 105480
+5oGQ5oCV 105481
+5LqG5ZCn 105482
+5aSn6KeE5qih 105483
+55qE5LiW55WM 105484
+5oCA55aR 105485
+5be3 105486
+5YW05aWL 105487
+5oiw 105488
+5p2R6YeM 105489
+5pyL5Y+L5ZyI 105490
+5Yas5aSp 105491
+5Lit5Y2O5Lq65rCR 105492
+5Y2P5ZWG 105493
+6K+E6YCJ 105494
+5pet 105495
+5aKe5Yqg5LqG 105496
+5Y+X5Lyk 105497
+5LiA6IKh 105498
+5L6/5o23 105499
+5LiR 105500
+6bmk 105501
+5aSW6KeC 105502
+5bel56iL5biI 105503
+5ZKM5YW25LuW 105504
+6L+Z5bCx 105505
+5Lit5bCP5LyB5Lia 105506
+6KW/5YyX 105507
+5Zu95pyJ5LyB5Lia 105508
+6Iul5piv 105509
+5Y+v5oOc 105510
+55Sf5pel 105511
+5Ye9 105512
+5Lmw5Y2W 105513
+56Wd56aP 105514
+5Lq65rCR576k5LyX 105515
+5YWJ5piO 105516
+5YWs5a+T 105517
+5piv6LCB 105518
+5oiR55+l6YGT 105519
+6K+t5paH 105520
+5pWP5oSf 105521
+5LiN6ZSZ55qE 105522
+5p2l6K6y 105523
+5rOi5Yqo 105524
+55qE56ys5LiA 105525
+5Zyw6ZyH 105526
+5Zyo5YWo5Zu9 105527
+6aqo5bmy 105528
+5a6J572u 105529
+5a6255S1 105530
+5LiO5q2k 105531
+5LiO5q2k5ZCM5pe2 105532
+5Y+X54G+ 105533
+54Ot57q/ 105534
+55qE5oqA5pyv 105535
+5rWL6YeP 105536
+5L6d6LWW 105537
+5Lit5Zu955qE 105538
+54m55oCn 105539
+6L6D6auY 105540
+6Lip 105541
+5Lya5Zyo 105542
+5bu66YCg 105543
+5a+86Iiq 105544
+5oOz6LW3 105545
+5YWo5LiW55WM 105546
+5bu65p2Q 105547
+56+A 105548
+55qE5Z+656GA 105549
+6Ieq5Yqo5YyW 105550
+5YmN5ZCO 105551
+552h55yg 105552
+5o6o6KGM 105553
+5o2u5LqG6Kej 105554
+5LuA5LmI5pe25YCZ 105555
+5LiN5Zac5qyi 105556
+54Wk54Kt 105557
+6YKj5LmI5aSa 105558
+5biC5Zy65YyW 105559
+5LiN566h5piv 105560
+56uL5Zy6 105561
+6YO95rKh 105562
+6K++6aKY 105563
+5oiR5Lus5bCG 105564
+6L+H55qE 105565
+5YaN5Yqg5LiK 105566
+54i+ 105567
+6Lqr5p2Q 105568
+55S35aWz 105569
+6L+c6L+c 105570
+55S355Sf 105571
+6Ieq6Lqr55qE 105572
+6LSf5ouF 105573
+55m+5LiH 105574
+6KW/54+t 105575
+6KW/54+t54mZ 105576
+5YeA5Yip5ram 105577
+5r6z5aSn 105578
+5r6z5aSn5Yip5Lqa 105579
+5LiN5Y67 105580
+5om/5Y+X 105581
+5qW855uY 105582
+5aKD5YaF 105583
+5re35Yed 105584
+5re35Yed5Zyf 105585
+5oCd5oOz5pS/5rK7 105586
+5biC5Yy6 105587
+5oub5qCH 105588
+5Zui5L2T 105589
+6L+b5bqm 105590
+5Yab6Zif 105591
+5Y+N5by5 105592
+5LqG5LiA5Lqb 105593
+5o6l5b6F 105594
+55qE5a2m5Lmg 105595
+6YWN6YCB 105596
+6aOf5ZOB5a6J5YWo 105597
+5pu/5Luj 105598
+5piv5Lul 105599
+6YCa55So 105600
+56CU56m25omA 105601
+56aF 105602
+5omU 105603
+6ZqU56a7 105604
+5LiH5bmz5pa557Gz 105605
+55qE6KeE5a6a 105606
+57uZ5oiR5Lus 105607
+5r+A5YWJ 105608
+5Lya5Ye6546w 105609
+55+t5L+h 105610
+56m/552A 105611
+5rKI6Ziz 105612
+5pWZ5p2Q 105613
+6Ziy55ar 105614
+5LyY6Imv 105615
+57qm5a6a 105616
+5oiR55yB 105617
+5YWs5rCR 105618
+6YG45pM= 105619
+6YG45pOH 105620
+5bey5oiQ5Li6 105621
+5LiN5b+F 105622
+56WW5Zu9 105623
+5bm25pyq 105624
+5Zyf5aOk 105625
+5b6u56yR 105626
+5LqL5Lia5Y2V5L2N 105627
+55qE5ri45oiP 105628
+5YWs56S6 105629
+5ZCI55CG55qE 105630
+56qd 105631
+5rCU6LGh 105632
+5a625Lit 105633
+5Lqu55u4 105634
+5Y2r5pif 105635
+6K6w6L29 105636
+6KeG6YeO 105637
+5Zyw5Yy655qE 105638
+5L2G5LuW 105639
+6IKM6IKJ 105640
+5LqP5o2f 105641
+5Yqe5a2m 105642
+5LiA6KGM 105643
+6K+e55Sf 105644
+5Y+R5biD55qE 105645
+55qE5pyN5Yqh 105646
+55qE56CU56m2 105647
+5ZGo5pyr 105648
+5Lqn5Lia5Zut 105649
+6auY5rip 105650
+5oiQ5Yqf55qE 105651
+5q2l6aqk 105652
+5a2Y5YKo 105653
+5a2Q5YWs5Y+4 105654
+6K6p5aW5 105655
+5Lit5pyJ 105656
+5ZiJ5a6+ 105657
+5aau 105658
+5piO5bm0 105659
+5LqG5ZCX 105660
+5LqJ6K6u 105661
+5oiI 105662
+5LiA5pys 105663
+576O5Li955qE 105664
+5L2g6K+0 105665
+5aSn5Lq6 105666
+5pS755Wl 105667
+5LiN5pyD 105668
+5b6F6YGH 105669
+5LiA6L6G 105670
+54mI5p2D5omA5pyJ 105671
+5rCR5LyX 105672
+5Yqf5aSr 105673
+5bGV5Lya 105674
+5aSn6ISR 105675
+5q+P5pyI 105676
+5bCP6bqm 105677
+5rWZ5rGf55yB 105678
+55qE5omA5pyJ 105679
+5LiL5ruR 105680
+6JOd6Imy 105681
+6KaB5oOz 105682
+5a2m55Sf55qE 105683
+5b2T5L2g 105684
+5L2c5oiY 105685
+5a625Lmh 105686
+5aSa5ZCN 105687
+6auY5LqO 105688
+5Z2a5by6 105689
+6L+e6ZSB 105690
+5ZCO5p6c 105691
+5Lq65LqL 105692
+57SF 105693
+5r+A5Yqo 105694
+6L+b5pS7 105695
+56mG 105696
+5LiY 105697
+6K6p6Ieq5bex 105698
+5Lul5q2k 105699
+5aSr5Lq6 105700
+5byA6K6+ 105701
+5rCU6LSo 105702
+6bih6JuL 105703
+54Sh5rOV 105704
+5ZCD5LqG 105705
+5YiG5Yir5Li6 105706
+6IGU5ZCI5Zu9 105707
+5b2T5Luj 105708
+5aaC5p6c5piv 105709
+6L+c56iL 105710
+5ZaC 105711
+6K6w5L2P 105712
+5riF5Y2V 105713
+5ZCI5L2c5LyZ5Ly0 105714
+5Y675YGa 105715
+5pWF6Zqc 105716
+5qih5ouf 105717
+5biI55Sf 105718
+5YmN5p2l 105719
+55S16KeG5Ymn 105720
+54Ot54ix 105721
+6Zyy5Ye6 105722
+6auY5bGC 105723
+55S15Zmo 105724
+57qq5b6L 105725
+5byA5Y+R5ZWG 105726
+6ZW/5a6J 105727
+6L295L2T 105728
+55qE5bCx5piv 105729
+6KKr5Lq6 105730
+5Y+X55CG 105731
+56+u55CD 105732
+6I6O 105733
+5Lqk57uZ 105734
+5pyq5p2l55qE 105735
+5Lik5aSn 105736
+5ZCV5biD 105737
+562J5Lq6 105738
+55qE5pel5a2Q 105739
+5ZCI5L2c56S+ 105740
+5oyR6YCJ 105741
+5a2Y5qy+ 105742
+57O757uf55qE 105743
+5oqK5a6D 105744
+5rKh5pyJ5LuA5LmI 105745
+5LuO5q2k 105746
+5Lit5Y2I 105747
+55a855eb 105748
+5bep5Zu6 105749
+5rWq5ryr 105750
+55u45YWz6YOo6Zeo 105751
+6ZW/5Z+O 105752
+57qk57u0 105753
+5LiK6Zeo 105754
+54iG54K4 105755
+6LW354K5 105756
+55qE6YCa55+l 105757
+6ICM5p2l 105758
+55qE6ICB 105759
+5omL6YeM 105760
+6K+t6Z+z 105761
+6L6b6Ium 105762
+5rGf6IuP55yB 105763
+55So5LqG 105764
+6Lqr5Lu96K+B 105765
+5pyJ5Yqp 105766
+5pyJ5Yqp5LqO 105767
+54mp6IGU572R 105768
+5Ye66Zeo 105769
+5byf5a2Q 105770
+5oO5 105771
+6L+Z5Lu25LqL 105772
+5oiR5Lus5Y+v5Lul 105773
+55qE55Sf5ZG9 105774
+5pyJ5LiA56eN 105775
+5bqX6ZO6 105776
+5Y+M5omL 105777
+55qE5raI5oGv 105778
+6ICQ5b+D 105779
+5bC05bCs 105780
+6YKj5aSp 105781
+6aaW5om5 105782
+5piv5LiA5a62 105783
+5Lq65rCU 105784
+5Y+N5q2j 105785
+5oiR5ZKM 105786
+5a6g54mp 105787
+5LiN5a+5 105788
+5a+75rGC 105789
+55u45Ly8 105790
+5Zyo576O5Zu9 105791
+5Y+r5YGa 105792
+5ZeO 105793
+56uL6Laz 105794
+55So6YCU 105795
+5YWG 105796
+5aSn5rCU 105797
+5ZCR5LiK 105798
+5LuW5bCx 105799
+6aG555uu5bu66K6+ 105800
+6Iul5bmy 105801
+5piv5pyJ 105802
+5r+A5oOF 105803
+55qE5oSP5LmJ 105804
+5pit 105805
+5Lil6YeN55qE 105806
+5a+G6ZuG 105807
+6Iie6LmI 105808
+6I2j6I63 105809
+6I635oKJ 105810
+5rGf5Y2X 105811
+5YGH5aaC 105812
+5oi35aSW 105813
+57q/57Si 105814
+56eB5Lq6 105815
+6L2s5Z6L5Y2H57qn 105816
+55qE5Lu35YC8 105817
+5Y2V54us 105818
+6ICB55m+5aeT 105819
+5bCN5pa8 105820
+5Zu96ZmF5YyW 105821
+5Lyw5YC8 105822
+5pyN5Yqh5Lia 105823
+6Iet 105824
+5o6J5LqG 105825
+6Kej5Yaz5LqG 105826
+5Lmf5LiN6IO9 105827
+5YW5 105828
+5pav54m5 105829
+5pWF5oSP 105830
+6L+H5bqm 105831
+6IqC5pel 105832
+55m955mc 105833
+55m955mc6aOO 105834
+57un5om/ 105835
+5LqG5LiN5bCR 105836
+5LqM5Lq6 105837
+6KeB6Z2i 105838
+5oOz5oOz 105839
+5aSN5ZCI 105840
+5bq35aSN 105841
+5Y6/5Z+O 105842
+5Zyo5Zu95YaF 105843
+5Zy65Zyw 105844
+6Zm255O3 105845
+6L+Z6aG5 105846
+55y85Lit 105847
+56C4 105848
+5oSf6KeJ5Yiw 105849
+5p6c54S2 105850
+5pS+5YWl 105851
+57qm5p2f 105852
+5o6S5p+l 105853
+6L2m5Li7 105854
+55qE5oSP5oCd 105855
+5paw5Z+O 105856
+5oOz552A 105857
+6YGC 105858
+6Iy25Y+2 105859
+5Lmw5oi/ 105860
+5Yac5oi3 105861
+6auY5omL 105862
+546J57Gz 105863
+5paw5Yag6IK654KO 105864
+54Wn5piO 105865
+5oyH5Y2X 105866
+6Lii 105867
+5pWR5o+0 105868
+5pmv54K5 105869
+56iO5pS2 105870
+55qE5omL 105871
+5q2j5aW9 105872
+6KaB5oqK 105873
+6ZqP5oSP 105874
+5YW25a6e5piv 105875
+57uZ6Ieq5bex 105876
+6LCI5Yik 105877
+5q+P5aSp6YO9 105878
+5oCB5Yq/ 105879
+6aKE57qm 105880
+5Y6G5Y+y5LiK 105881
+5a6d6LSd 105882
+5YmN6L+b 105883
+5Lmf5bCx5piv6K+0 105884
+55qE5oSP6KeB 105885
+5Y+j572p 105886
+5Y6Y57Gz 105887
+6Iqx6LS5 105888
+5L2T6IKy5oqV5rOo 105889
+5YWs5LyX5Y+3 105890
+6JGX5ZCN55qE 105891
+5byA5oi3 105892
+5ouN5Y2W 105893
+5bKB5pyI 105894
+5YaF5ra1 105895
+5a6M5pW055qE 105896
+6auY5Y6L 105897
+5YWs5Yqh5ZGY 105898
+5L2/55So55qE 105899
+55Sf5Lqn57q/ 105900
+5aa55aa5 105901
+6LWw6K6/ 105902
+5piv5Y+v5Lul 105903
+5Zyo5a62 105904
+5pq05Yqb 105905
+5rOw5Zu9 105906
+6LSo55aR 105907
+5LiN6YGO 105908
+5aSp54S25rCU 105909
+57y654K5 105910
+5bCP5Z6L 105911
+5LiN5LuF5piv 105912
+6buR5pqX 105913
+5qKo 105914
+5paH5peF 105915
+6KaB5pyJ 105916
+5Lit5bGx 105917
+55qE5pWw5o2u 105918
+5b6X5b6I 105919
+5Lul5L6/ 105920
+5a+55LuW 105921
+5Yqg5Lul 105922
+55m854++ 105923
+6K6+5a6a 105924
+6IKa5a2Q 105925
+6Z2W 105926
+5aWJ54yu 105927
+5LiN5Y+Y 105928
+5Y+j56KR 105929
+5Zyo5ZOq6YeM 105930
+5L2Q 105931
+6L+Z5Lik5Liq 105932
+55qE5pa55ZCR 105933
+5p6r 105934
+5LqM5qyh 105935
+54mH5Yy6 105936
+6aCQ 105937
+56OK 105938
+5ou/552A 105939
+5bey57uP5oiQ5Li6 105940
+5LmL5LiK 105941
+5a6X5peo 105942
+5aW25aW2 105943
+6auY5paw5Yy6 105944
+56S+5pyD 105945
+6Lef6Liq 105946
+5pyN5Yqh5Lit5b+D 105947
+5omv 105948
+5omL5oyH 105949
+56S854mp 105950
+5a6/6IiN 105951
+55So5b+D 105952
+5o+Q6auY5LqG 105953
+5Lqu54K5 105954
+5LiN5oS/5oSP 105955
+5pKt5pS+ 105956
+5aSa5bCR6ZKx 105957
+5rKh5LuA5LmI 105958
+5pWw5Y2B 105959
+5oC755uR 105960
+55qE5Z+O5biC 105961
+5om+5Yiw5LqG 105962
+5YaF5Zyw 105963
+5Yiw546w5Zyo 105964
+5oiY5paX5Yqb 105965
+5Y6f5aeL 105966
+5YOn 105967
+5YCS5piv 105968
+5pyA5YW3 105969
+6LSr5Zuw5oi3 105970
+6YCB5Yiw 105971
+57qn5Yir 105972
+5Ye66LWE 105973
+5oiq5q2i 105974
+56eN5a2Q 105975
+6IO95LiN6IO9 105976
+5bm46L+Q 105977
+6JaH 105978
+6aG56ZO+ 105979
+5oyC54mM 105980
+5LiA5qij 105981
+5LmY5a6i 105982
+6JC95ZCO 105983
+5L2G5oiR 105984
+5pep5Zyo 105985
+5Yqo5ryr 105986
+5bmz562J 105987
+5a+55L2g 105988
+5LiN5oCV 105989
+5aSW55WM 105990
+5aSa5bm05p2l 105991
+6aaW5Liq 105992
+5rKz5Y2X55yB 105993
+5oiW5YW25LuW 105994
+6ZWc5aS0 105995
+5Y2X5piM 105996
+5LiA6Z2i 105997
+6YCg5oiQ55qE 105998
+5bSU 105999
+562S 106000
+5pWZ6IKy6YOo 106001
+5Zyw5Z+f 106002
+5piG5piO 106003
+5be06buO 106004
+5omL5ri4 106005
+5LiA5pe2 106006
+56CN 106007
+6aG257qn 106008
+5YWx6K6h 106009
+5Y6f5rK5 106010
+6L6J54WM 106011
+6K+05piv 106012
+5paw5Y2O56S+ 106013
+57uP5Y6G5LqG 106014
+5LiN5q2i 106015
+6KaB5LmI 106016
+6ICF55qE 106017
+5oC75oqV6LWE 106018
+6KGM6am2 106019
+5LiK5bid 106020
+5bm057qq 106021
+55C8 106022
+5Lyg6K+0 106023
+57K+6Iux 106024
+5pa56ZKI 106025
+5rGf5rmW 106026
+5oiQ54K6 106027
+5oC76YeP 106028
+5oqV5pS+ 106029
+5Yqo55S7 106030
+6Jek 106031
+55S15rqQ 106032
+6ZKZ 106033
+5ZCM6KGM 106034
+5pmu6YCa55qE 106035
+5Zu+5Lmm6aaG 106036
+6K+I6aqX 106037
+5oWI5ZaE 106038
+6L+Z5Lu9 106039
+5Li75oyB5Lq6 106040
+5bCx6L+Z5qC3 106041
+6ICM5oiQ 106042
+6Ieq6KGM6L2m 106043
+5Lit5Zu954m56Imy 106044
+6IK/55ik 106045
+5ZC+ 106046
+5byf5byf 106047
+5Y+X55uK 106048
+6YCJ5oup5LqG 106049
+5piO5pi+55qE 106050
+5oql6ICD 106051
+56yR6YGT 106052
+6ZuW54S2 106053
+5rip5bee 106054
+6Z2e5rSy 106055
+56eN56eN 106056
+5Y+C5Yqg5LqG 106057
+6LSn6L+Q 106058
+6ZqP5L6/ 106059
+5bCx5rKh5pyJ 106060
+57ij 106061
+5aSu6KeG 106062
+56m/6LaK 106063
+55qE546w6LGh 106064
+5Yeg5qyh 106065
+55qE6aOO6Zmp 106066
+5q2M5puy 106067
+5pys5bGK 106068
+5bm05YaF 106069
+5LiN6LaF6L+H 106070
+6L+H5aSa 106071
+5b+F6aG76KaB 106072
+57uT6K66 106073
+5YCf6Ym0 106074
+56We5aWH 106075
+5pyf5pyb 106076
+5LiT5Lqr 106077
+6Z2e5bi46YeN6KaB 106078
+5oSP6K+G5Yiw 106079
+5ZCI5bm2 106080
+5oqK6Ieq5bex 106081
+5aWX6KOF 106082
+6a2U5rOV 106083
+5aSP5a2j 106084
+5LiN5YOP 106085
+5aKD55WM 106086
+5oOK5Zac 106087
+5pyJ5LiA5aSp 106088
+54Sm54K5 106089
+5oiR6K6k5Li6 106090
+5YWw5bee 106091
+55S15rCU 106092
+6IGU57O75oiR5Lus 106093
+56eR5pmu 106094
+5aW56K+0 106095
+55qE5paH56ug 106096
+5aWH5oCq 106097
+5Y+L5aW9 106098
+6aWu5paZ 106099
+55qE5pSv5oyB 106100
+562U5bqU 106101
+6YeN6YeP 106102
+55G2 106103
+5YeP6L27 106104
+56eR5a2m5a62 106105
+5be06KW/ 106106
+6YeR6J6N5py65p6E 106107
+5YWa5aeU5Lmm6K6w 106108
+6LK45qy+ 106109
+57K+6Ie0 106110
+5LuO5pyq 106111
+5Y2w5Yi3 106112
+5Zue6aG+ 106113
+6aaW6YO9 106114
+5Y+R6IKy 106115
+6Zeu6YGT 106116
+6L6+5Yiw5LqG 106117
+5b+N5LiN5L2P 106118
+5omN5pyJ 106119
+5o2Q6LWg 106120
+5L2b5pWZ 106121
+5LiN5riF 106122
+6Zif6ZW/ 106123
+55u45Y+N 106124
+5oql6K2m 106125
+5aSn5YWo 106126
+5qyn55uf 106127
+5biu5b+Z 106128
+55qE5pmC5YCZ 106129
+55uu5b2V 106130
+6Laz5Lul 106131
+6Imw6Zq+ 106132
+5LuW5Lmf 106133
+5bel5L2c6ICF 106134
+5aS06ISR 106135
+57y66Zm3 106136
+5oiQ56uL5LqG 106137
+5bCx5byA5aeL 106138
+6K6k5ZCM 106139
+6buE6Imy 106140
+55eF5oOF 106141
+6Ka65b6X 106142
+6L+Z5Lik 106143
+5L+h5Luw 106144
+5ZyL5a62 106145
+5LiN5LuF5LuF5piv 106146
+54us5a62 106147
+6Iis55qE 106148
+5p2Q6LSo 106149
+5rW35LiK 106150
+54K65LqG 106151
+5py65Yqo6L2m 106152
+55u45b2T5LqO 106153
+5aSa5YWD5YyW 106154
+5pu05aSn55qE 106155
+6Juu 106156
+5YGH5pyf 106157
+5byP55qE 106158
+5Lqk6YCa6L+Q6L6T 106159
+55yB5aeU 106160
+5LiN566X 106161
+5pS+5LiL 106162
+6Zev 106163
+5Lq65Zyo 106164
+5riv5Y+j 106165
+5peo5Zyo 106166
+5ZG95Luk 106167
+5p+Q5Liq 106168
+5bmz56iz 106169
+5Y+q5aW9 106170
+5Lq65Lq6 106171
+5Lqe 106172
+5LqM57u0 106173
+5LqM57u056CB 106174
+5p6B5Li6 106175
+5Yir5aKF 106176
+5YW25L2Z 106177
+5aSn5LqL 106178
+5Li7566h6YOo6Zeo 106179
+5peg6ZSh 106180
+6Ze1 106181
+6YGt5Yiw 106182
+6K+06L+H 106183
+5Li65L2g 106184
+6Kej562U 106185
+6aqM5pS2 106186
+55qE57uP6aqM 106187
+5Yy56YWN 106188
+54Gr566t 106189
+6LGq5Y2O 106190
+5p+Q5p+Q 106191
+55qE5pe25Luj 106192
+5Lmm6Z2i 106193
+5oGS5aSn 106194
+5bu26ZW/ 106195
+5LiA5ZCM 106196
+5pyq6IO9 106197
+5Lqk5o2i 106198
+55Si5ZOB 106199
+562J5Yiw 106200
+5YiG56a7 106201
+5omT55S16K+d 106202
+5bmy54el 106203
+6L6D5aSa 106204
+5aSa5bm055qE 106205
+6IOM5pmv5LiL 106206
+5Li65L6L 106207
+5pGY6KaB 106208
+5bSb6LW3 106209
+5q2k5Yi7 106210
+5pyJ5py65Lya 106211
+5p2h5qy+ 106212
+6aKG5a+85bCP57uE 106213
+55qE6Lqr5L2T 106214
+5Y2V5LiA 106215
+5aSu6KGM 106216
+5LiN5pat5o+Q6auY 106217
+5Lu35YC86KeC 106218
+6Iq9 106219
+6JCN 106220
+5rOV5b6L5rOV6KeE 106221
+5LiN6ZSI 106222
+5LiN6ZSI6ZKi 106223
+5Ye65LqO 106224
+6Jma5ouf 106225
+5o2u5oKJ 106226
+54Om5oG8 106227
+5YWo5paw55qE 106228
+5omr5o+P 106229
+55m76ZmG 106230
+6Im65pyv5a62 106231
+55qE6aOf54mp 106232
+55qE5a2Y5Zyo 106233
+5a6i5Y6F 106234
+5oiR5Lus5bCx 106235
+5p+l55yL5pu05aSa 106236
+6K+E5a6h 106237
+5biC5aC0 106238
+6Kyb 106239
+5beo5aS0 106240
+5Lit5Zu957uP5rWO 106241
+5LqG6Ieq5bex55qE 106242
+5Yaz6K6u 106243
+55uR552j566h55CG 106244
+5oqV56Wo 106245
+5YaN5bqm 106246
+6KGM54K6 106247
+5rOo5YWl 106248
+5L2c5Li65LiA5Liq 106249
+5q+P5Liq5Lq66YO9 106250
+5Y2V5YWD 106251
+6KaB55+l6YGT 106252
+6KKr56ew5Li6 106253
+5LmL6ZmF 106254
+6Kej6Zmk 106255
+5Li4 106256
+5rqr 106257
+5LiJ5pif 106258
+6bKc5piO 106259
+5Lmf6YO9 106260
+5pe25py6 106261
+5Ye65omL 106262
+5oOF5b2i 106263
+5ZWG6LS4 106264
+6YCJ5Li+ 106265
+5a+56Ieq5bex 106266
+55Sf5Yqo 106267
+5YWL5pyN 106268
+5Liq5L2T 106269
+6IuR 106270
+56ix 106271
+5aSn5Y6m 106272
+5piv5a+5 106273
+5Yip5oGv 106274
+6L+Q5Yqo5ZGY 106275
+5YyW6Kej 106276
+5YmN5rK/ 106277
+5oSf5oGp 106278
+5oC75LmL 106279
+6auY5paw5oqA5pyv 106280
+5Z2H5Li6 106281
+5YWo5Yy6 106282
+5rCU5rCb 106283
+5Y+v5Lul6K+05piv 106284
+5L2P5a6/ 106285
+5YWa5ZGY5bmy6YOo 106286
+5Zev 106287
+6Le16KGM 106288
+55qE5LiT5Lia 106289
+6ICD6aqM 106290
+6JW+ 106291
+5YWs5a2Q 106292
+55qE54q25oCB 106293
+5r2u5rWB 106294
+5L+h5omY 106295
+6LS8 106296
+5ZCE5pa5 106297
+5pWR5Yqp 106298
+6Z2e5bi455qE 106299
+5qGl5qKB 106300
+5YWs5pak 106301
+5Ly855qE 106302
+55yL5aW9 106303
+5bGA6YOo 106304
+5a6J6Z2Z 106305
+6YWN5Lu2 106306
+5bi46KeE 106307
+5byA6L2m 106308
+56ys5LqM5qyh 106309
+5LiK57qn 106310
+5Y+C6LWb 106311
+5a625bGe 106312
+5by65Yq/ 106313
+5Zyo5LuW 106314
+5ZCR5YmN 106315
+5LmL5Zyw 106316
+6YOh 106317
+6KGM56iL 106318
+6K2m5ZGK 106319
+6KeE5a6a55qE 106320
+5ZWG5Z+O 106321
+5LqU5aSn 106322
+5pWZ5a6k 106323
+5Y2B6Laz 106324
+5omA5Lul5Zyo 106325
+5bCG57un57ut 106326
+562J5pa55byP 106327
+5a625LyB5Lia 106328
+5Lqk5LuY 106329
+54K56K+E 106330
+57uT566X 106331
+5Lmf5Y+v 106332
+5aSW5rGH 106333
+6L+Z56eN5oOF5Ya1 106334
+5o6I5LqI 106335
+5biD572u 106336
+5oiQ56uL5LqO 106337
+6aKE6K2m 106338
+566h55CG5Lq65ZGY 106339
+5ama56S8 106340
+57uT5p2f5ZCO 106341
+5YWl6YCJ 106342
+5peg5q+U 106343
+5ZKM5Y+R5bGV 106344
+55m96YWS 106345
+546p5YW3 106346
+5LiH576O5YWD 106347
+55qE5oiQ57up 106348
+5ouN54Wn 106349
+6ICD6JmR5Yiw 106350
+5LyB5Lia5Y+R5bGV 106351
+5LqG5Liq 106352
+55Sf5rCU 106353
+55qE5aWz5Lq6 106354
+5LqU5Y2B 106355
+54i354i3 106356
+57q957qm 106357
+6YO96KKr 106358
+5LiK6K++ 106359
+55uh 106360
+5Lyg57uf5paH5YyW 106361
+5r2c5Zyo 106362
+5Y+R5bCE 106363
+5LiA6Lqr 106364
+6Ziy5a6I 106365
+5Yiu 106366
+6aKY55uu 106367
+5Zyo5YaF55qE 106368
+576O5aW955qE 106369
+6L+Z6YeM55qE 106370
+5LiA5Lid 106371
+5Lq65Z2H 106372
+5YCh5a+8 106373
+6Lqr5ZCO 106374
+5omp5bGV 106375
+5aSn6Zeo 106376
+5bCx6KKr 106377
+6K+l6aG555uu 106378
+5p625p6E 106379
+5LiA5Y+j 106380
+5L+h5oGv5oqA5pyv 106381
+5byA5Lia 106382
+5pS25Y+W 106383
+572R6aG1 106384
+5pSv5o+0 106385
+5bCB6Zet 106386
+5aGR6YCg 106387
+5aSn6IOG 106388
+5b+r6YCf5Y+R5bGV 106389
+55yL5Ly8 106390
+5rid 106391
+6L+Z5qC35LiA5Liq 106392
+5qih5Z2X 106393
+5rOo5oSP5Yiw 106394
+56C06Kej 106395
+6Ieq5LuO 106396
+5ZG15ZG1 106397
+5LmL5b6M 106398
+5LmL5peF 106399
+6Lef5oiR 106400
+5rOV5Lq6 106401
+5o6S6KGM5qac 106402
+5Z2a5a6I 106403
+5aW95aSE 106404
+55+z5aS0 106405
+5bm25bCG 106406
+6Iix 106407
+5q2H 106408
+5Lik5bK4 106409
+5aSa5LmF 106410
+6LGh5b6B 106411
+5Liq5oCn5YyW 106412
+55qE6KeS5bqm 106413
+5biG 106414
+56aP5bee 106415
+5p+l5aSE 106416
+5Lik5Zu9 106417
+5ZC45byV5LqG 106418
+6aaW5bit 106419
+5aSn5ZOl 106420
+6aSK 106421
+5rao5bmF 106422
+6YCJ55So 106423
+6Kix5aSa 106424
+6JC95oi3 106425
+5ZOI5bCU 106426
+5ZOI5bCU5ruo 106427
+5YGa5LuA5LmI 106428
+5Lul5YWN 106429
+6b6N 106430
+5peg6ZyA 106431
+5Yiw5bqV5piv 106432
+5oCh 106433
+5ZGK6K+J5L2g 106434
+6Ziy5rC0 106435
+6L+Z5pe25YCZ 106436
+5qyi5LmQ 106437
+6L2s5ZCR 106438
+6L+Z5Liq5Zyw5Zu+ 106439
+5YWl6am7 106440
+6I2J5Y6f 106441
+5pe25Luj55qE 106442
+5Y+Y5Yqo 106443
+5Yqg5by65a+5 106444
+5YG25bCU 106445
+5a6I5oqk 106446
+5rCU5rip 106447
+5Lq66Ze0 106448
+5pyd6bKc 106449
+57uP6LS5 106450
+5Zut5p6X 106451
+5bel5Zyw 106452
+6KeE5qC8 106453
+5Yeg5Y2B 106454
+6K+V5Zu+ 106455
+5aaD 106456
+6YKj5pe25YCZ 106457
+5byY5oms 106458
+5Lia55WM 106459
+55qE6YCf5bqm 106460
+5Lya5LiN5Lya 106461
+6JCl5pS2 106462
+5bCP5b6u5LyB5Lia 106463
+55yL6L+H 106464
+5oqK5LuW 106465
+6YG15b6q 106466
+6L+Z6L65 106467
+5rKh5pyJ5Lq6 106468
+5aO2 106469
+5rmW5Y2X55yB 106470
+5p6B5YW2 106471
+55qE5Lq655Sf 106472
+5LuW6L+Y 106473
+6L2s5YyW5Li6 106474
+6LWw6L+H 106475
+5oqx552A 106476
+54mb5aW2 106477
+5LiH5Lqp 106478
+5b+D5oCB 106479
+5pel5bi455Sf5rS7 106480
+5L2T5qOA 106481
+5pmD 106482
+562J6aKG5Z+f 106483
+5oeJ6Kmy 106484
+5Y+v5Lul55yL5Yiw 106485
+5om+5LiN5Yiw 106486
+6ICB5bm0 106487
+5oqK5oiR 106488
+56ev5YiG 106489
+5qKz55CG 106490
+57uz 106491
+55qE5pS/5rK7 106492
+5bid5Zu9 106493
+6Zmq5Ly0 106494
+5rSb6Ziz 106495
+5YWs5q2j 106496
+5byA5Y+j 106497
+54m56Imy55qE 106498
+5Zuw5aKD 106499
+5LiK5pyJ 106500
+56uL5L2T 106501
+5omT5bel 106502
+5ZWk6YWS 106503
+5Zyo6YKj6YeM 106504
+6YKj6L65 106505
+5Liq5Yir 106506
+5LiA5a6a5piv 106507
+55qE6YeN6KaB5oCn 106508
+5Li75byg 106509
+5ZKM5pyN5Yqh 106510
+5LiK572R 106511
+6KGl5Yqp 106512
+5Y+q6ZyA 106513
+5bym 106514
+6YGu 106515
+5Yqb5LqJ 106516
+5bqm6L+H 106517
+6JGs 106518
+6aG/5pe2 106519
+6YSJ 106520
+57q657uH 106521
+5Zyw5Z2X 106522
+5L+h55So5Y2h 106523
+572a5qy+ 106524
+5ZGK6K+J5oiR 106525
+6ZuZ 106526
+5Lmm55S7 106527
+6Kit6KiI 106528
+5oC75Lya 106529
+5Yik5Yaz 106530
+5L+h6KqJ 106531
+5Liq6IKh 106532
+5bmz5bi4 106533
+5oCO6bq8 106534
+5L2T546w5Zyo 106535
+6buE5rKz 106536
+5Zub5bed55yB 106537
+55yf55u4 106538
+5ZCE6aG55bel5L2c 106539
+5Yqo5ZGY 106540
+5bOw5Lya 106541
+5LiA5pyf 106542
+5pyJ5LiA5a6a55qE 106543
+6auY5bqm6YeN6KeG 106544
+57mB6I2j 106545
+5Y+R546w5LqG 106546
+572R57qi 106547
+5omL5rOV 106548
+5a625Zut 106549
+5Luq5Zmo 106550
+6L6D5L2O 106551
+55qE5a6J5YWo 106552
+5qGQ 106553
+5LuY5qy+ 106554
+5oqR5Yi2 106555
+5Y2T6LaK 106556
+5q2j6Z2i 106557
+5ZOR 106558
+5by65Yi2 106559
+5LuK5aSp55qE 106560
+5oiY6IOc 106561
+5qW85biC 106562
+5ou/5LiL 106563
+6aKc5YC8 106564
+5Lic6YOo 106565
+56CU5Yi2 106566
+55qE5oiY55Wl 106567
+5Zyo5LiA5Liq 106568
+5LiJ5Lq6 106569
+5a6M5LqG 106570
+5paw5oqA5pyv 106571
+57uP5rWO5pWI55uK 106572
+5a+M5pyJ 106573
+5r6z5rSy 106574
+5Yqp55CG 106575
+6aKG5Y+W 106576
+6LCt 106577
+54eD54On 106578
+57Sg5YW7 106579
+6YKE5pyJ 106580
+6L+b6ICM 106581
+5LuA5LmI5piv 106582
+56CU56m25Lit5b+D 106583
+6YCC55So5LqO 106584
+5o6l5pS2 106585
+5aSx5pyb 106586
+5LqM57qn 106587
+6Ze055qE 106588
+5Y6f5qCH6aKY 106589
+6KqN54K6 106590
+5o2h 106591
+5a+5552A 106592
+5a+56Z2i 106593
+5Lit5Y6f 106594
+6ZOD 106595
+55Sf5Lqn55qE 106596
+5Y+R5biD5Lya 106597
+5aOr5YW1 106598
+6L+Z5Y+l6K+d 106599
+57y057qz 106600
+5LiA5Liq5Liq 106601
+5a2455Sf 106602
+55aR6Zeu 106603
+5Lqk6K2m 106604
+56S66IyD5Yy6 106605
+5aSp5L2/ 106606
+5Zyo5LiK5rW3 106607
+5ZCM5pmC 106608
+6L275piT 106609
+5ZSv5LiA55qE 106610
+54Ot6Ze5 106611
+5LmQ6KeC 106612
+55qE6Lqr5Lu9 106613
+5ZaE5LqO 106614
+5aSn5Y6F 106615
+6IKv5a6a5piv 106616
+6Ziy54Gr 106617
+5aSW5Ye6 106618
+5o2u6K+0 106619
+6aG555uu55qE 106620
+5LiA5Y+w 106621
+6Jma5YGH 106622
+5LiA56yU 106623
+56uL5rOV 106624
+5Lil6IKD 106625
+5om/5Yqe 106626
+5Y2B5Yeg 106627
+55qE56m66Ze0 106628
+5pys572R56uZ 106629
+5YGa5b6X 106630
+5L+d5rip 106631
+5pyI5Yid 106632
+5Zyo572R5LiK 106633
+5ZCE5pa56Z2i 106634
+5LiJ5aSp 106635
+5Lqk5piT5omA 106636
+6Kej5p6Q 106637
+5YWa5Lit5aSu 106638
+6L+b5Ye65Y+j 106639
+5ZKM56S+5Lya 106640
+5qyh5pWw 106641
+5LmL5a62 106642
+57u05bqm 106643
+5rS+5Ye65omA 106644
+5Lqn55Sf5LqG 106645
+5bim5pyJ 106646
+5b6I5by6 106647
+5pyJ5Lqb5Lq6 106648
+5bm05ZCO 106649
+5LqG6K645aSa 106650
+5a+G5bqm 106651
+5a2m5pyf 106652
+54+g5rW3 106653
+5pyA5aSa55qE 106654
+6L6557yY 106655
+5a656YeP 106656
+56ys5LqM5Liq 106657
+5LiA55u05piv 106658
+5LiN56aB 106659
+5q2y 106660
+5LuL57uN5LqG 106661
+5LyY6ZuF 106662
+5q+U6LyD 106663
+6IGM5L2N 106664
+5rip5p+U 106665
+5pyJ6ZKx 106666
+5pyA6auY55qE 106667
+5Y2a6KeI5Lya 106668
+5LiN5oiQ 106669
+6ZSZ5LqG 106670
+6K+B55uR 106671
+6K+B55uR5Lya 106672
+5oiQ5Lq6 106673
+5Z2H5YyA 106674
+5pyJ5Yip 106675
+6LaK5Y2X 106676
+5omT5LqG 106677
+5aW95ZCD 106678
+57O757Wx 106679
+6Lef6ZqP 106680
+55qE5Zyw5L2N 106681
+5q2j5aaC 106682
+56iN5b6u 106683
+5Y2w5Y+R 106684
+5Yib56uL 106685
+6aOO5YWJ 106686
+5bCG5oiQ5Li6 106687
+5LiN6auY 106688
+6aKR57mB 106689
+6K6+5pyJ 106690
+5Lye 106691
+5ouG6Zmk 106692
+5b2x5YOP 106693
+5riX6YCP 106694
+5bm05byA5aeL 106695
+572R5piT 106696
+6KaB5YGa 106697
+55S15Yqo6L2m 106698
+55yf5b+D 106699
+5rW35Yab 106700
+5Lyg5p2l 106701
+5beu5Yir 106702
+6LCo5oWO 106703
+54Of5Y+w 106704
+5Y2D5bm0 106705
+6K+B5a6e 106706
+55Cq 106707
+55qE5YW35L2T 106708
+5Yiw5aSE 106709
+5LiN5a6c 106710
+6JyA 106711
+6IO95Yqb5ZKM 106712
+54m654my 106713
+55qE6ZKx 106714
+5aSn6Zif 106715
+6aaW6KaB 106716
+5LiN5oS/ 106717
+546r55Gw 106718
+5Lq65rCR572R 106719
+6L+Y5piv6KaB 106720
+5Zub5bm0 106721
+5o2f5Lyk 106722
+55qE5YGa5rOV 106723
+6Z2I 106724
+6KGU5o6l 106725
+5ZCI5oiQ 106726
+5rKh5Lq6 106727
+6Zeo5qeb 106728
+5L+h6LS3 106729
+55qE55u45YWz 106730
+5Lic6aOO 106731
+56S+5L+d 106732
+5LiL5ri4 106733
+5Z2X6ZKx 106734
+6L+H5ZCO 106735
+55qE5bqU55So 106736
+6aW2 106737
+6aKB5Y+R 106738
+5LiA5aSE 106739
+5Y2O5aSP 106740
+5Li65LyB5Lia 106741
+5Y+q5Lya 106742
+5L615a6z 106743
+55qE5Yqf6IO9 106744
+5a2457+S 106745
+5Lit5Y2O5rCR5peP 106746
+5Y+R5biD5LqG 106747
+6L+O5o6l 106748
+5oiR6Ieq5bex 106749
+6L+Y6ZyA6KaB 106750
+5aSq6Ziz6IO9 106751
+5Y675LiW 106752
+5piv5L2g 106753
+5ZCI5Yqb 106754
+57uY55S7 106755
+5Y+w5YyX 106756
+552j5L+D 106757
+5YyX6YOo 106758
+5pyJ5aSa5bCR 106759
+5b6I6YeN6KaB 106760
+5YiS5YiG 106761
+5Y+357q/ 106762
+5pS+5aSn 106763
+5Lya6KKr 106764
+6I635aWW 106765
+5LmL5YaF 106766
+5aSx5Y675LqG 106767
+546p5a625Lus 106768
+6YeH6ZuG 106769
+5aO5 106770
+5a625LyZ 106771
+55m95aSp 106772
+5Zug5Li65LuW 106773
+56S+5Lya5rK755CG 106774
+5byA5Yib 106775
+55S157yG 106776
+5paw5LiA5Luj 106777
+5bm26LSt 106778
+5bCx5bey57uP 106779
+55qE56S+5Lya 106780
+6Zmk6Z2e 106781
+5Y+v5Lul55So 106782
+5amJ 106783
+5q+U6L6D5aW9 106784
+5a6e5Lia 106785
+5Yib5Yqe 106786
+5o+Q6LW3 106787
+6buD 106788
+5L2P5Zyo 106789
+5biC5pS/ 106790
+6Z2i5Li055qE 106791
+6IO95Zyo 106792
+55+t55+t 106793
+55yf5Lq6 106794
+5piO5piO 106795
+6LWE5Yqp 106796
+55qE5LiN5ZCM 106797
+5bCP5pyL5Y+L 106798
+6aKY5p2Q 106799
+576O5ZGz 106800
+5pif5bqn 106801
+5LiN5LiA5qC355qE 106802
+55yL5LiK5Y67 106803
+5LiA5qC5 106804
+5bm/5bee5biC 106805
+5Y+R55Sf55qE 106806
+6auY56eR5oqA 106807
+5LiA6L6I5a2Q 106808
+5Lqk5Y+J 106809
+5L2T57O75bu66K6+ 106810
+5Zug5Li65oiR 106811
+54+N5oOc 106812
+5LiK5a2m 106813
+5oiY5pyv 106814
+5q2k57G7 106815
+5Lqk5b6A 106816
+5oyJ5pGp 106817
+5Lq65Lus55qE 106818
+5YW25a+m 106819
+5Y6f5p2Q5paZ 106820
+5ri05pyb 106821
+55u45aSE 106822
+5b6u5b6u 106823
+5q63 106824
+5LmY5Z2Q 106825
+5byA5bGV5LqG 106826
+6auY5ZOB6LSo 106827
+5peg5Lq65py6 106828
+5LiN5piv5b6I 106829
+55qE5oqV6LWE 106830
+6IqC55yB 106831
+6IeJ 106832
+57K+6YCJ 106833
+55qE5qCH5YeG 106834
+5Y2X6YOo 106835
+6K6k6K+G5Yiw 106836
+5bmz6Z2Z 106837
+6Jel 106838
+5omr6buR 106839
+5omr6buR6Zmk 106840
+5omr6buR6Zmk5oG2 106841
+6YCZ56iu 106842
+5bu6562R6Z2i56ev 106843
+56Gu56uL 106844
+566h55CG5Yqe5rOV 106845
+5oSP5b+X 106846
+5Lio 106847
+6K6p5a2p5a2Q 106848
+5pWR54G+ 106849
+5b2T5LuK 106850
+54Gr54G+ 106851
+5ZCE6YOo6Zeo 106852
+5L6154qv 106853
+5q+P5ZGo 106854
+5o+9 106855
+5LiA5qyh5oCn 106856
+5YW25LuW5Lq6 106857
+6ZSZ6L+H 106858
+5LiO5YW2 106859
+5YuH5rCU 106860
+54eD5rCU 106861
+6aaW5bGK 106862
+5pyN6aWw 106863
+57Kl 106864
+5a6M5q+V 106865
+5bCx5oqK 106866
+5Yqe5LqL5aSE 106867
+5LiA5Lya5YS/ 106868
+56a75LiN5byA 106869
+5aaC5p6c5oKo 106870
+5LuT5bqT 106871
+5a+85biI 106872
+5ZCI6YCC55qE 106873
+5q+r57Gz 106874
+5a6J5YWo5oCn 106875
+5L6d54Wn 106876
+5Lqn5Lia5YyW 106877
+5L2g55yL 106878
+55yf55qE5b6I 106879
+5a2k54us 106880
+6Ziy5b6h 106881
+5b6I566A5Y2V 106882
+6aOO5rC0 106883
+5L2G5Lmf 106884
+5o6o5Ye65LqG 106885
+5rCR6JCl5LyB5Lia 106886
+56CB5aS0 106887
+5aSN5p2C55qE 106888
+57uE5oiQ6YOo5YiG 106889
+5YWF5ruh5LqG 106890
+6L+R5Yeg5bm0 106891
+55yB5pS/5bqc 106892
+5pyJ5b+F6KaB 106893
+6Zmz 106894
+5LmL57G7 106895
+5LmL57G755qE 106896
+5oCn5Lu3 106897
+5oCn5Lu35q+U 106898
+5ZWG5bqX 106899
+5biC5YC8 106900
+5Lq65omN5Z+55YW7 106901
+5rex5Y+X 106902
+566h55CG5bGA 106903
+5oGQ5oOn 106904
+5LuF5pyJ 106905
+5oq16L6+ 106906
+5rW35YWz 106907
+6LWL5LqI 106908
+5LqL5YS/ 106909
+5Lu36ZKx 106910
+5omL5LiK 106911
+6Ieq5b6L 106912
+5YWz54ix 106913
+5Lqr5pyJ 106914
+6YGX5oa+ 106915
+5b6I5b+r5bCx 106916
+5pu05b+r 106917
+5qCH6K+G 106918
+5bqG56Wd 106919
+5Lmf5aW9 106920
+5LiN5piT 106921
+5oiR5b6I 106922
+5pS56Z2p5Y+R5bGV 106923
+5aSW5Zyw 106924
+5oq15oq8 106925
+6K+X5Lq6 106926
+5Y6V5omA 106927
+5paw5aqS5L2T 106928
+6Jab 106929
+6LCI6K+d 106930
+5LiA5a6a56iL5bqm 106931
+6LWw5Zyo 106932
+5pyA5by6 106933
+5Yqf546H 106934
+5YWx6K+G 106935
+5aSn5qGl 106936
+5LiL5pa5 106937
+5aSW6LWE 106938
+56Kx 106939
+5beh6KeG 106940
+5rmW5YyX55yB 106941
+5Liq55m+5YiG 106942
+5Liq55m+5YiG54K5 106943
+55qE6LSj5Lu7 106944
+55qE5ZOB54mM 106945
+5Yqp5o6o 106946
+5Yib6YCg5LqG 106947
+5Lu76IGM 106948
+5b+r5o23 106949
+5p2R5bqE 106950
+5Y6755yL 106951
+5omN6IO95aSf 106952
+5bGk 106953
+5oiR5a62 106954
+5piv5LiA5qy+ 106955
+576F 106956
+5Yaw6Zuq 106957
+5p6B5aSn 106958
+54Gv5YWJ 106959
+6YaL 106960
+5LiO5YW25LuW 106961
+5o+Q5Ye655qE 106962
+6Z2g6L+R 106963
+6LCD5Yqo 106964
+5bC95Y+v6IO9 106965
+5Y+R5Yqb 106966
+57uZ5aW5 106967
+6YCC6YeP 106968
+6Leo5Zu9 106969
+5YWI6KGM 106970
+5paw5p2Q5paZ 106971
+5L2c5LqG 106972
+5ruh5LqG 106973
+5LiN5ruh 106974
+55qE55y8552b 106975
+55yL5b6X 106976
+6L+Z5LiA5qyh 106977
+6b2Q5YWo 106978
+55qE5LiA6YOo5YiG 106979
+5LiZ 106980
+5riF5paw 106981
+6Kqq5piO 106982
+6Lqr6L6555qE 106983
+5omA5pyJ5Lq6 106984
+5b2w5pi+ 106985
+6LG5 106986
+5Y2/ 106987
+6L+Q6L2s 106988
+5oyH5byV 106989
+5biC5YWs5a6J5bGA 106990
+5Y+C5bGV 106991
+5LmL5pe2 106992
+6YeR6J6N5pyN5Yqh 106993
+6LWE5pys5biC5Zy6 106994
+6IO96K6p 106995
+5b+Y5LqG 106996
+5aSp5aCC 106997
+5q+U5aaC6K+0 106998
+6YqA6KGM 106999
+6JuL57OV 107000
+55Sp 107001
+5qC45a6e 107002
+5pmu5Lqs 107003
+5LyY576O 107004
+5Y+j6IWU 107005
+5ryr55S7 107006
+55y86YeM 107007
+5LqG5LiL5p2l 107008
+5oiR5Lus5Lmf 107009
+5L6N 107010
+5Li65Lit5b+D 107011
+5aWH6L+5 107012
+6Z2S552Q 107013
+5oiq6Iez55uu5YmN 107014
+5Ye65L6G 107015
+5oC75YWs5Y+4 107016
+5byl6KGl 107017
+566X5rOV 107018
+5bel5L2c5a6k 107019
+5omA5Lul5oiR 107020
+5rC05YiG 107021
+5omA5bGe 107022
+5LiN6K+0 107023
+5L2G5piv5Zyo 107024
+6KaB5Y67 107025
+5Yib5Lia6ICF 107026
+5LiN5riF5qWa 107027
+5Zub5ZGo 107028
+5piv5LuO 107029
+55qE5qC55pys 107030
+54G2 107031
+5q+b5rO9 107032
+5q+b5rO95Lic 107033
+5rW35Y+j 107034
+5Zub5Y2B 107035
+5Lmf6KKr 107036
+6IG3 107037
+5LiA5omL 107038
+57up5pWI 107039
+55qE55S35Lq6 107040
+5Lmm57GN 107041
+5LiA6IS4 107042
+5aSn5LqO 107043
+6Zu26YOo5Lu2 107044
+5YWz5oCA 107045
+5bmz57Gz 107046
+5pq06Zyy 107047
+5b6X5aSa 107048
+5LiJ57qn 107049
+5pys5ZGo 107050
+5Lik6ICF 107051
+5a+55Lit5Zu9 107052
+5Y+q6KeB 107053
+5qyn576O 107054
+5aaC5p6c5pyJ 107055
+5bey57uP5piv 107056
+55yL5a6M 107057
+54Gr6ZSF 107058
+6LWQ 107059
+5LiA6YGN 107060
+5oSf5YaS 107061
+57uT5bGA 107062
+5LuT5YKo 107063
+5a6e5Zyw 107064
+5Ymv5oC757uP55CG 107065
+5Lmf5LiN55+l6YGT 107066
+56Kw5Yiw 107067
+5ZCI6K6h 107068
+5a6i5oi355qE 107069
+572X6ams 107070
+5oSJ5b+r 107071
+6aOb 107072
+54Ot54OI 107073
+5Lym5pWm 107074
+5Yy75L+d 107075
+6Zi/6YeM5be05be0 107076
+5YaN6K+0 107077
+5Li65Z+656GA 107078
+55Sf5Lqn57uP6JCl 107079
+6L+Z5Lqb5Lq6 107080
+5YiX6L2m 107081
+5rKz5YyX55yB 107082
+6L+Z5q61 107083
+5rS75Yqo5Lit 107084
+5am3 107085
+55Sf55CG 107086
+5Lit5Zu95Lq65rCR 107087
+6YSC 107088
+5ZCs5Y+W 107089
+5aSN5Lmg 107090
+5pyJ55uK 107091
+5pS25ou+ 107092
+5b6I5Y+v6IO9 107093
+572R57uc5ri45oiP 107094
+5Lus55qE 107095
+6LWL6IO9 107096
+6Zq+5b6X 107097
+5YiG5omL 107098
+55yf6K+a 107099
+5YWs5Y+45Zyo 107100
+5Z2H6KGh 107101
+5Y+j5ZGz 107102
+54m15aS0 107103
+5LiA6Iis55qE 107104
+6L2/6L2m 107105
+562J5LqO 107106
+5rKJ6buY 107107
+5oiR6YO9 107108
+5bCP56iL5bqP 107109
+5LiA5Ymv 107110
+5om/6L29 107111
+5Zyw6LSo 107112
+55WM6Z2i 107113
+55S15py6 107114
+54Sm6JmR 107115
+6ZSA5ZSu6aKd 107116
+5paw6L2m 107117
+5LiK5ri4 107118
+5Li75ryU 107119
+6ZqQ56eB 107120
+5Y+R5bGV5oiY55Wl 107121
+55qE5Yqq5Yqb 107122
+5byA5YWz 107123
+6Kej5Yaz6Zeu6aKY 107124
+552j5a+8 107125
+5a+55oqX 107126
+5b6I5aSa5Lq66YO9 107127
+5peg5pWI 107128
+5Lqn5ZOB6LSo6YeP 107129
+5a6J5b+D 107130
+5Y2O5Lq6 107131
+5LiN56ym5ZCI 107132
+6Ieq5a62 107133
+6Zi15a65 107134
+55qE5ZCE56eN 107135
+55qE55CG5b+1 107136
+55qE5paH5YyW 107137
+5Li66Ieq5bex 107138
+5bGx5rC0 107139
+5ri45rOz 107140
+6ZyH6I2h 107141
+55Sf5rS75pa55byP 107142
+6L+c56a7 107143
+55+z5YyW 107144
+5q2k5LqL 107145
+5piv55yf55qE 107146
+55qE5q+U5L6L 107147
+55So55S1 107148
+5aWl6L+Q5Lya 107149
+5L+d5a6J 107150
+6JuL55m96LSo 107151
+55qE5b+D55CG 107152
+5ber 107153
+5Y+356CB 107154
+5rCU5L2T 107155
+5Y+R5pS5 107156
+5Y+R5pS55aeU 107157
+5Yy75biI 107158
+5raC5paZ 107159
+5piK 107160
+5biC57qn 107161
+5LiW55WM55qE 107162
+5YiG5Yir5piv 107163
+56C05Lqn 107164
+5LiA5p2v 107165
+5ouJ5byA 107166
+5bmz5Yeh 107167
+55qE5Y+R55Sf 107168
+5Yqo5omL 107169
+5LiA55u05Lul5p2l 107170
+5omL5bel 107171
+6YeM6Z2i55qE 107172
+5peg5YWz 107173
+5LuL5YWl 107174
+6LWw5LiK 107175
+5bCx5piv6KaB 107176
+5bm06Ze0 107177
+5Ye654++ 107178
+5b2x6Z+/ 107179
+5bmF5bqm 107180
+6ZuB 107181
+6YGT5YW3 107182
+55uu55qE5Zyw 107183
+5ZCO6ICF 107184
+5LiK5ryU 107185
+5LqG5Yeg 107186
+5q6L55a+5Lq6 107187
+5b+Z56KM 107188
+5piv5ZCm5pyJ 107189
+5bm25a+5 107190
+5Lya5a+86Ie0 107191
+5rC05bqT 107192
+57uG6Ie0 107193
+5ZCO5oKU 107194
+5b+D5oCd 107195
+5YGa5LqL 107196
+5Y6C5oi/ 107197
+552/ 107198
+6L+Q6JCl5ZWG 107199
+5aS06YOo 107200
+55qE6KeS6Imy 107201
+5piv5LuW 107202
+5pei5pyJ 107203
+5bCP5pe25YCZ 107204
+5by65Yqy 107205
+5Li75pKt 107206
+5YWo5Zu95ZCE5Zyw 107207
+5o2P 107208
+5o2f5Z2P 107209
+5ZWG5Lya 107210
+5L+d572X 107211
+55yB5biC 107212
+6Zqn6YGT 107213
+5pyJ5LiN5bCR 107214
+6KaB5Zyo 107215
+5bu66K6+6aG555uu 107216
+57OW5bC/ 107217
+57OW5bC/55eF 107218
+5p2h5Lu25LiL 107219
+5LyY6LSo55qE 107220
+6aaW5Y+R 107221
+5b2T5pe255qE 107222
+5Liw55Sw 107223
+5aSn55uY 107224
+55u457un 107225
+5a6B5aSP 107226
+5YWl5L2P 107227
+5oiR6L+Y 107228
+5YWL5pav 107229
+5a6a5Lu3 107230
+5bmz5pa55YWs6YeM 107231
+55qE55+l6K+G 107232
+5oiR5Lus5Lya 107233
+5YWD5a6d 107234
+5L2T6YeN 107235
+6LOj 107236
+5a+55oiR5Lus 107237
+55+z5a62 107238
+55+z5a625bqE 107239
+57K+5Y2O 107240
+5b2i54q2 107241
+5Y+X5Yiw5LqG 107242
+5L+u6K6i 107243
+576O5ZyL 107244
+6auY5riF 107245
+55y86ZWc 107246
+6KeJ5b6X6Ieq5bex 107247
+5bim57uZ 107248
+5ZSu5Lu3 107249
+6Zeo56Wo 107250
+5a2V5aaH 107251
+55S16KeG5Y+w 107252
+5Y+R5L2c 107253
+55qE5ZGz6YGT 107254
+6ZW/6L+c 107255
+5YWs5YWx5pyN5Yqh 107256
+5q2j5bi455qE 107257
+5pyJ6L+H 107258
+6aOO5oOF 107259
+5q+U6YeN 107260
+5ZC7 107261
+566h55CG5bel5L2c 107262
+57u85ZCI5oCn 107263
+5bey6KKr 107264
+6K+06LW3 107265
+5o6S5rC0 107266
+5LiN5pat5Zyw 107267
+5oOF5oCA 107268
+6L6T6YCB 107269
+6L+H5pWP 107270
+55qE5Y+v6IO95oCn 107271
+5pyN55So 107272
+5pyJ6K645aSa 107273
+5aeU5Ymv5Lmm6K6w 107274
+5YyW5aaG5ZOB 107275
+5pqC5YGc 107276
+5oqV6LWE5Lq6 107277
+54+t57qn 107278
+6K+0552A 107279
+5Y2X5YyX 107280
+5YiG6KGM 107281
+54+g5a6d 107282
+5a+2 107283
+5aKe5aSa 107284
+6KKr5Yqo 107285
+54m55q6K55qE 107286
+6Zec5L+C 107287
+55qE6IS4 107288
+5oOf 107289
+5LiN5LiA5a6a 107290
+57at 107291
+54Gr54iG 107292
+56ef6YeR 107293
+556n 107294
+6YeN5bu6 107295
+6Leq 107296
+5LiA56iu 107297
+55qE5ZCI5L2c 107298
+5a6J5oWw 107299
+5LuN5piv 107300
+5LiT5Lia5YyW 107301
+6LCD6Kej 107302
+5LiN5aao 107303
+6YCZ5piv 107304
+5b+F6aCI 107305
+5LyK5pyX 107306
+5b6X5LqG 107307
+5pyN5Yqh5bmz5Y+w 107308
+5aes 107309
+5YWI6ZSL 107310
+546L5a2Q 107311
+55qE5LiA5YiH 107312
+5oC755CG 107313
+5ZO8 107314
+56qR 107315
+55qE5b+D5oOF 107316
+55qE6YeN5aSn 107317
+55Gf 107318
+5LiA56yR 107319
+5Y+R5bGV5Lit 107320
+5YGl5bq35Y+R5bGV 107321
+5ZOB54mM55qE 107322
+56au 107323
+5L2Z5Lq6 107324
+5LuK5bm05Lul5p2l 107325
+5pWw56CB 107326
+562+6K+B 107327
+5Y675om+ 107328
+5Z+66YeR5Lya 107329
+5oqx5oCo 107330
+5q2j5b2T 107331
+54+t5a2Q5oiQ5ZGY 107332
+5LiN5ZCI5qC8 107333
+5Yi25a6a5LqG 107334
+57yT5oWi 107335
+5Yi257qm 107336
+5qCP55uu 107337
+5biC5Zy657uP5rWO 107338
+57uE5oiQ55qE 107339
+5Lil5bO7 107340
+5pel6K6v 107341
+5LiA54K554K5 107342
+5piv5oCO5LmI 107343
+55qE54Wn54mH 107344
+6Zi75q2i 107345
+5qih57OK 107346
+57y4 107347
+6YGV5Y+N 107348
+5pCs6L+B 107349
+6YeR6ZKx 107350
+5b2s 107351
+5LiN5a6J 107352
+5oiY55Wl5ZCI5L2c 107353
+5aGr5YaZ 107354
+6K6y56m2 107355
+5YWF5YiG5Yip55So 107356
+6IO95aSg 107357
+6JGh6JCE6YWS 107358
+6YeH55So5LqG 107359
+5Zyo5LuK5bm0 107360
+5Lit5bCP5a2m 107361
+5Zyo5oSP 107362
+55qE5Y6L5Yqb 107363
+5LiN5bm4 107364
+5Yi26I2v 107365
+5Y+v5Lul6K6p 107366
+6KKr6K+E5Li6 107367
+57uG6I+M 107368
+5oiP5Ymn 107369
+5Y2K5a+8 107370
+5Y2K5a+85L2T 107371
+6KeG6KeS 107372
+5Zac5q2h 107373
+5b6B5pS2 107374
+6LCL5YiS 107375
+5p6B5aSn55qE 107376
+54K56LWe 107377
+6K6w6ICF5LuO 107378
+5Lik5ZCN 107379
+6Ieq5Yqp 107380
+6LW35q2l 107381
+5oqk5aOr 107382
+5a6d6ams 107383
+5aSq5a2Q 107384
+5bCP5bCP55qE 107385
+5rip5rOJ 107386
+5Ye656ef6L2m 107387
+56ef5oi/ 107388
+5Lik5a62 107389
+6ZyH5pK8 107390
+56eJ5om/ 107391
+5LiA5Lu25LqL 107392
+54OI5aOr 107393
+5a6Y5YW1 107394
+6L2s6Lqr 107395
+5LmQ5Zut 107396
+55mM55eH 107397
+5qih6IyD 107398
+5oSj 107399
+6L+H5Y6755qE 107400
+5Luj5Lu3 107401
+55qE5qaC5b+1 107402
+5Yeg55m+ 107403
+6LS16Ziz 107404
+5ouF5b+n 107405
+6YCC5a6c 107406
+546v5aKD5L+d5oqk 107407
+54Or 107408
+5L2g5oOz 107409
+5q2k5ZCO 107410
+5L2g5Lmf 107411
+542O 107412
+6Zmk5q2k 107413
+6Zmk5q2k5LmL5aSW 107414
+6LCD5bqm 107415
+56eR55uu 107416
+5omA6K+055qE 107417
+5YqH 107418
+5b+96KeG 107419
+5LiJ5qyh 107420
+5LiA5pel 107421
+5Z6C55u0 107422
+56ue5oqA 107423
+6Z2i5YyF 107424
+5aSn5oiY 107425
+5pC65bim 107426
+5aaC5p6c5rKh5pyJ 107427
+5YW75oiQ 107428
+5Ye66KGA 107429
+54ix5aW96ICF 107430
+5omT6YCa 107431
+6LW36K+J 107432
+5ZGI546w5Ye6 107433
+5q2M5omL 107434
+5Zyo5aSW 107435
+6aKG5a+85bmy6YOo 107436
+5Yal 107437
+6IiG6K66 107438
+5o+Q5Y+W 107439
+6Zi/5bCU 107440
+5pyb552A 107441
+5LiJ5Lqa 107442
+6LKh 107443
+5Yi35paw 107444
+5pma5oql 107445
+6L+Y5pyJ5LiA5Liq 107446
+5Yaw566x 107447
+572R54K5 107448
+5Ye65YW3 107449
+5by654OI55qE 107450
+5oiR55u45L+h 107451
+5biM5pyb6IO9 107452
+54mZ6b2/ 107453
+5LqL5a6c 107454
+5Lia5YaF5Lq65aOr 107455
+5Luj5pu/ 107456
+5Y+Y5b2i 107457
+6Zuy 107458
+6LCD5o6n 107459
+5Yib5paw5Yib5Lia 107460
+5ouG6L+B 107461
+5qC45p+l 107462
+6YCX 107463
+5YWl5a2m 107464
+5oSP5ZCR 107465
+5o+b 107466
+5LiL5qyh 107467
+5Lyg6L6T 107468
+5LuW5Lus5Zyo 107469
+6ICM5LiU6L+Y 107470
+5pel5Zyo 107471
+5pWZ6K6t 107472
+5rS7552A 107473
+55qE5pyJ5pWI 107474
+5aSN5bel5aSN 107475
+5aSN5bel5aSN5Lqn 107476
+5piv5LiA5Lu2 107477
+562J552A 107478
+5b6p 107479
+5YuH5pWi 107480
+6YGt5Y+X 107481
+5aWU6amw 107482
+6K6y5bqn 107483
+6K+05a6M 107484
+57uZ5Ye6 107485
+6LCm 107486
+6K+K55aX 107487
+55uy55uu 107488
+5a6i6L+Q 107489
+5bCx6L+e 107490
+5byA5YWD 107491
+5byA5YWD5qOL54mM 107492
+5LiN5pat5o+Q5Y2H 107493
+55So5oi355qE 107494
+5pKV 107495
+5L6b5rC0 107496
+57aT5r+f 107497
+5Lit5Yy76I2v 107498
+6IGU5oOz 107499
+5YWs5Lqk6L2m 107500
+6Iiq54+t 107501
+5oqA6KGT 107502
+5byV6LW355qE 107503
+5bC5 107504
+6LWE5rex 107505
+5Zu96LWE5aeU 107506
+6Jit 107507
+6by75a2Q 107508
+6Ze9 107509
+5o6S6Zif 107510
+6KeC5YWJ 107511
+6YGX5Z2A 107512
+5Lic5Lqs 107513
+6aWt5bqX 107514
+5LiN5pat55qE 107515
+5bCx5piv5LiA5Liq 107516
+6ZW/5LmF 107517
+55qE6KeC54K5 107518
+5ai2 107519
+5oiR546w5Zyo 107520
+55Ww 107521
+5b6X5Ye6 107522
+5b+F5a6a 107523
+5LiN5Y+X 107524
+5Y+q6ZyA6KaB 107525
+5Zuw5omw 107526
+56eR5a2m5oqA5pyv 107527
+54mb6IKJ 107528
+6L6D6auY55qE 107529
+6LeR5q2l 107530
+5rK+ 107531
+6I+p6JCo 107532
+5pyA5b6M 107533
+5L+d5a+G 107534
+5rK75a6J 107535
+6YKx 107536
+5bi46K+G 107537
+6IS46Imy 107538
+5YyX5aSn 107539
+5rGH6IGa 107540
+5pGG6ISx 107541
+6b6Z5aS05LyB5Lia 107542
+5aWz5Y+L 107543
+562J5bel5L2c 107544
+5Lit576O 107545
+6IGM5Zy6 107546
+6ISR6KKL 107547
+5YaZ55qE 107548
+6aWy5paZ 107549
+5Yqz5Yqo5Yqb 107550
+5bGv 107551
+5oyB6IKh 107552
+5Zu+5YOP 107553
+6L+H5Y675LqG 107554
+6LKo 107555
+6L6y 107556
+6Zeu5oiR 107557
+6Lef5L2g 107558
+55Sf5q27 107559
+5a6h576O 107560
+6aKX57KS 107561
+5Lit5pa5 107562
+5Yqg54Ot 107563
+5peF6KGM56S+ 107564
+55m855Sf 107565
+5LiN5aCq 107566
+5YK3 107567
+5qWg 107568
+5Yqe5qGI 107569
+5p+E 107570
+5pei5piv 107571
+5aSE5YiG 107572
+55yf5a6e55qE 107573
+5oql57q4 107574
+5biI54i2 107575
+5a6J5b6955yB 107576
+5Ymv5Li75bit 107577
+5LmL6YGT 107578
+5a+85by5 107579
+5a2m5qCh55qE 107580
+5Z+O5biC55qE 107581
+6LCI5Yiw 107582
+5qKX 107583
+5bmz6Z2i 107584
+6K+05LuA5LmI 107585
+6aKR546H 107586
+6ZW/5LiJ6KeS 107587
+55qE5Yip55uK 107588
+6buo 107589
+6LGG6IWQ 107590
+5a6e6ZmF5oOF5Ya1 107591
+5p6X5Lia 107592
+57qq5qOA55uR5a+f 107593
+5L2P6Zmi 107594
+55qE5pW05L2T 107595
+5YmN6KGM 107596
+5oyo 107597
+54Wk55+/ 107598
+5Ymv5oC76KOB 107599
+5bCP5ZCD 107600
+5p6B56uv 107601
+5amG5amG 107602
+546w6LSn 107603
+6K+X5q2M 107604
+6ZKl5YyZ 107605
+57yp55+t 107606
+5L2G6L+Z 107607
+5paw5ZOB 107608
+6L+Z5a+5 107609
+55+l5ZCN5bqm 107610
+5b+X5oS/5pyN5Yqh 107611
+5aSn5bGA 107612
+6KGh6YeP 107613
+5L2T546w5LqG 107614
+5qGD6Iqx 107615
+5ZC45byV5Yqb 107616
+5aCk 107617
+5pOF6ZW/ 107618
+5ZKS 107619
+55u45py6 107620
+5LiA56uZ 107621
+5LiA56uZ5byP 107622
+5pyA576O 107623
+5rC45LmF 107624
+55qE6YOo5YiG 107625
+5YiG5bel 107626
+5bel56iL5bu66K6+ 107627
+5pCt6L29 107628
+5rC05Lit 107629
+6Iyo 107630
+55qE5pON5L2c 107631
+57uf5rK7 107632
+55WF6YCa 107633
+5YWa55qE5Y2B 107634
+6Ly4 107635
+5ris 107636
+576O6KeC 107637
+5LiN5Yip 107638
+5Y+N5oCd 107639
+6aqE5YKy 107640
+5qCH55qE 107641
+5p2A5Lq6 107642
+6Zi/5aeo 107643
+6aOf5p2Q 107644
+5ZCD55qE 107645
+5ZCO5YaN 107646
+55+j 107647
+5Lik5L6n 107648
+5riF5rC0 107649
+6L+b55CD 107650
+5byA5aeL5LqG 107651
+5ZCs5LqG 107652
+54SK5o6l 107653
+55+u 107654
+5aif 107655
+5Li65Lq6 107656
+6YCB57uZ 107657
+5YaS6Zmp 107658
+5pW3 107659
+57uI5q2i 107660
+5omN55+l6YGT 107661
+6L+Q5rCU 107662
+6YCa6aOO 107663
+5oOK6K62 107664
+56eR5a2m6Zmi 107665
+5o+Q6Zeu 107666
+5aSq5Y6f 107667
+55u45ZCM55qE 107668
+5LuV 107669
+6IGW 107670
+5oOF5rOB 107671
+6aKG5a+85Lq6 107672
+5Ye65p2l5LqG 107673
+5rK/57q/ 107674
+6Zm9 107675
+5oSf6Ka6 107676
+5LuN5Zyo 107677
+5qmZ 107678
+57qm5Li6 107679
+5Zad6YWS 107680
+55So6I2v 107681
+5LiL5LiA 107682
+5rOV5a6Y 107683
+6aG65bqP 107684
+5YGa5LiA5Liq 107685
+5Yui 107686
+5q2q 107687
+55S156ue 107688
+5Ly06ZqP552A 107689
+5LmL5Yqb 107690
+5LmL5Lq6 107691
+5LqR6K6h566X 107692
+5Yir5Lq655qE 107693
+56eR5a2m5Y+R5bGV 107694
+56ys5YWr 107695
+5bmy5omw 107696
+5aWz56We 107697
+6L+Z5qC35YGa 107698
+5aSE5Zyo 107699
+5rC06LSo 107700
+6ZW/5pil 107701
+5biC5Zy66ZyA5rGC 107702
+57u05p2D 107703
+6ICz5py1 107704
+5paH5YyW55qE 107705
+5aW257KJ 107706
+5Lyg6L6+ 107707
+5omL5py654mI 107708
+5pu+5Zyo 107709
+5LqM5pyf 107710
+5Y6f5Zug5piv 107711
+5rqQ5aS0 107712
+5Y+I6IO9 107713
+6KO4 107714
+5oqA5pyv5Yib5paw 107715
+5paH5YyW5peF5ri4 107716
+5Y+R56Wo 107717
+5bm057qn 107718
+5L2g5LiN 107719
+5LmL5b+D 107720
+5pWw55m+ 107721
+5ZCR5b6A 107722
+6ICB5a62 107723
+5ZyL6Zqb 107724
+55qE6auY5bqm 107725
+5pyd6Ziz 107726
+5riF6Zmk 107727
+6Ieq5pyJ 107728
+5Lmm5Lit 107729
+5ri45oiP6KOF5aSH 107730
+5LiH5aSa 107731
+6am+6am25ZGY 107732
+5L2g55+l6YGT 107733
+5Zu95bqG 107734
+6aOf5aCC 107735
+5o6l5Y+j 107736
+5oC75pWw 107737
+5YW25LuW55qE 107738
+55Sf5ZG955qE 107739
+5L2g5Zyo 107740
+55qE55uu5YWJ 107741
+6L+Z5pa56Z2i 107742
+6YO96K+0 107743
+55aX5rOV 107744
+5YuH5aOr 107745
+5Zyo5YWo55CD 107746
+5L+d6Zmp5YWs5Y+4 107747
+552j5p+l 107748
+5ZaE6Imv 107749
+6KGo5b2w 107750
+6Lmy 107751
+6Lev5q61 107752
+5pyD5ZOh6KaP 107753
+5pyD5ZOh6KaP56+E 107754
+5oi35Z6L 107755
+5L+D5L2/ 107756
+5L+u5bu6 107757
+6auY5rC05bmz 107758
+5YGa5Ye65LqG 107759
+5Li75Zy6 107760
+6KGM6LWw 107761
+56m655m9 107762
+5pyJ5Lq66K+0 107763
+6L+Z5Liq5LiW55WM 107764
+5ZCN5LmJ 107765
+5a6M576O55qE 107766
+576h5oWV 107767
+5Y+K5YW25LuW 107768
+5Y+v55So 107769
+5ouQ 107770
+6L6D5aSn55qE 107771
+5oqA5pyv5ZKM 107772
+5bC85Lqa 107773
+55m+6LSn 107774
+5o+J 107775
+6YCJ6LSt 107776
+6Zif5Y+L 107777
+5Lyg5oSf 107778
+5Lyg5oSf5Zmo 107779
+5Y+q6KaB5L2g 107780
+5Li65LuA5LmI6KaB 107781
+5LiT5rOo5LqO 107782
+5L2Z6aKd 107783
+5YW45Z6L55qE 107784
+55uu5YmN5bey 107785
+5qyy5pyb 107786
+6IGU57uc 107787
+5rWB5Lyg 107788
+55qE5a625bqt 107789
+5Y+35Y+s 107790
+54+N6LS1 107791
+5Lyf5aSn55qE 107792
+6Ym05LqO 107793
+6Lef5LuW 107794
+5Lqn54mp 107795
+5LiN5bey 107796
+6L+d5rOV6KGM5Li6 107797
+5aS05LiK 107798
+5YiG6Kej 107799
+5Y+v5Lul55yL5Ye6 107800
+5qCh5Yy6 107801
+5a2X5L2T 107802
+5L+u54K8 107803
+55Sa6Iez5piv 107804
+5b6u5L+h5YWs5LyX 107805
+5Y+W5Luj 107806
+6JCl5Lia5pS25YWl 107807
+5r2N5Z2K 107808
+5L2g6IO9 107809
+56S+5Lya5L+d6Zqc 107810
+5q+U6LWb5Lit 107811
+5rGh5rC05aSE55CG 107812
+5aSr5aaH 107813
+5LiA5bmF 107814
+5rK/5rW3 107815
+5Y+j5oSf 107816
+5L2G5Y20 107817
+5b2T5pel 107818
+55qE5pyA5aSn 107819
+5q+P5LiA5L2N 107820
+5rKh5LqL 107821
+54m55Yil 107822
+5byA5a2m 107823
+6Lev6Z2i 107824
+5b+D55CG5a2m 107825
+5pS+572u 107826
+6YeN5bqG5biC 107827
+5L2g6Ieq5bex 107828
+5raI6LS56ICF55qE 107829
+5LiA5rOi 107830
+6K2m5oOV 107831
+5Y2n5a6k 107832
+5rOo5bCE 107833
+6aOO6Zuo 107834
+5rK/552A 107835
+5ZGK6Ki0 107836
+6KGo546w5Ye6 107837
+5Zub5piv 107838
+5Y+k5YW4 107839
+5pu06YeN6KaB55qE 107840
+5aW95LqL 107841
+55y85rOq 107842
+5qiT 107843
+5a6h5Yik 107844
+56Kw5pKe 107845
+6L2m56uZ 107846
+6L+b5YWl5LqG 107847
+6ZuG5ZCI 107848
+5qC85aSW 107849
+5a6+6aaG 107850
+5pSv5LuY5a6d 107851
+5aW55piv 107852
+5piv5aaC5L2V 107853
+5Lq65qyh 107854
+55qE5oiQ5Yqf 107855
+5peg5Yqb 107856
+5rW35ouU 107857
+5pil5a2j 107858
+6YO95LiN5Lya 107859
+562J5aSa56eN 107860
+5LiA5Liq5bCP 107861
+5YGc6L2m5Zy6 107862
+6K6p5pu05aSa 107863
+6L+Z54K5 107864
+5oiQ5ZOB 107865
+6ZKJ 107866
+6YGH6KeB 107867
+54+t5Li75Lu7 107868
+5oSP5oS/ 107869
+55qE5ZCM5a2m 107870
+5ri46KeI 107871
+5Y6L57yp 107872
+5Zyo5Lyg5aWH 107873
+5by55oCn 107874
+5pel5YaF 107875
+56aP5bu655yB 107876
+6KeS6JC9 107877
+5YiG5byA 107878
+5Lya6K6p 107879
+5aSW5Zu0 107880
+54af5oKJ55qE 107881
+54aU 107882
+5LiH6L6G 107883
+5aSc6Ze0 107884
+6L2m6Lqr 107885
+5Lit5pyf 107886
+5a6M5ZaE55qE 107887
+5ZOB57G7 107888
+5Y+L6LCK 107889
+6YCJ5ouU 107890
+6aqR5aOr 107891
+5b2m 107892
+55qE55yL5rOV 107893
+5Zu9546L 107894
+6L6j5qSS 107895
+5Y+R5biD5pe26Ze0 107896
+5Y+k5Z+O 107897
+6ZqP5py6 107898
+56uW 107899
+5byA6L6f 107900
+5LyX55Sf 107901
+5rKh5Yqe5rOV 107902
+5Y2D6YeM 107903
+5p2l5rqQ5LqO 107904
+55qE5p2D5Yip 107905
+5q+U5YiG 107906
+5ruh5oSP55qE 107907
+5L+u6KGM 107908
+5Z2g 107909
+5aSn5rW3 107910
+6I65 107911
+5Ye66Lqr 107912
+6KuH 107913
+5YWz6IqC 107914
+5ZCN5Lq6 107915
+6ZyA6KaB5rOo5oSP 107916
+5pep5pmo 107917
+5aSW5Y2W 107918
+5Y+I6KaB 107919
+5raJ5qGI 107920
+55Sz6K+35Lq6 107921
+6ZmE6L+R55qE 107922
+5Yqg5b+r5o6o6L+b 107923
+5paw5bm0 107924
+5aSn6KGX 107925
+5LiA6bue 107926
+6IuP5a6B 107927
+5oKE5oKE 107928
+6IS+5rCU 107929
+5biM6IWK 107930
+6ZqP5Y2z 107931
+5pWi5LqO 107932
+5a6e6Le15Lit 107933
+5piv5rKh5pyJ 107934
+5pyJ6Laj55qE 107935
+5p2l6Ieq5LqO 107936
+6KOB5Yik 107937
+5aWz5a2p5a2Q 107938
+6Iez5YWz 107939
+6Iez5YWz6YeN6KaB 107940
+5pm65Yqb 107941
+6LWw5Ye65Y67 107942
+55+t5p2/ 107943
+5aSn5Zu9 107944
+55qE6K6k6K+G 107945
+5bm05aSc 107946
+5YaN5Yiw 107947
+5ZCM5qC355qE 107948
+5a+G5bCB 107949
+5aSW5Lqk6YOo 107950
+55Sf5pWI 107951
+5oKo5Y+v5Lul 107952
+5L2g5YCR 107953
+6L+H5bm0 107954
+5byT 107955
+6KGM5p2O 107956
+5q+U6LW3 107957
+6Lqr6auY 107958
+6L+Z5Liq5Lq6 107959
+5Lit5aSW 107960
+6YGT5q2J 107961
+55uv552A 107962
+5Lqy5a2Q 107963
+6Ze4 107964
+55m95LqR 107965
+6ISW5a2Q 107966
+5LiA5YiH6YO9 107967
+5reR 107968
+6LCc 107969
+5YG254S2 107970
+6Z2g6LCx 107971
+6auY566h 107972
+5LiL5Y+R 107973
+5pS+5Yiw 107974
+57G75Yir 107975
+5LiL5YiX 107976
+5re35Lmx 107977
+5ZCI5rOV5p2D55uK 107978
+546v55CD 107979
+5pyJ5pWI5Zyw 107980
+5ZWG5oi3 107981
+5rmW5Lq6 107982
+5rW35bK4 107983
+5oqV5Lqn 107984
+5Lik5Liq5pyI 107985
+6YO96Z2e5bi4 107986
+5aKe5by65LqG 107987
+5p2l5Yiw5LqG 107988
+5Ymp5L2Z 107989
+5oKo55qE5a2p5a2Q 107990
+5rWB5rC0 107991
+5q2j5LmJ 107992
+5aSp54yr 107993
+5YGa6L+H 107994
+5L2V5pe2 107995
+5oiR5Y67 107996
+55yB5Lu9 107997
+5aWW6YeR 107998
+6K+l5aaC5L2V 107999
+5LiL54+t 108000
+5YG25YOP 108001
+5pGG5pS+ 108002
+5paw5qih5byP 108003
+5oqV6LOH 108004
+6Lev5Y+j 108005
+5Yac5rCR5bel 108006
+5aSn5a24 108007
+5Lu25LqL 108008
+5qC55pys5LiN 108009
+5rWT5bqm 108010
+5rWT5Y6a 108011
+6L2u6IOO 108012
+5oi/5LyB 108013
+6Z2e5bi45aW9 108014
+5LuO5Lit 108015
+5Lq65qC8 108016
+57+B 108017
+5pe26Ze05ZKM 108018
+6L+Z5LiN5piv 108019
+5Yi45ZWG 108020
+5oOK5Lq6 108021
+5Zmo5a6Y 108022
+5YeG5YiZ 108023
+5oOF5pmv 108024
+5pu06auY55qE 108025
+5a2m5a62 108026
+5rOh5rKr 108027
+5Zyw5pa55pS/5bqc 108028
+5bCx55+l6YGT 108029
+5ZG85ZCB 108030
+57uP6LS4 108031
+6Iqx6ZKx 108032
+5pyJ5LiA5qyh 108033
+5oSf5oWo 108034
+5LiA5Y2D 108035
+5aSc5pma 108036
+6Km55aeG 108037
+6Km55aeG5pav 108038
+6KaB6Ze7 108039
+57uS 108040
+5rqQ5LqO 108041
+55qE6LSo6YeP 108042
+5rOo5oSP5LqL6aG5 108043
+5oWi5oCn 108044
+56iz5a6a55qE 108045
+5bu66K6+5ZKM 108046
+5pmv6LGh 108047
+6YeP5YyW 108048
+55qE6Kmx 108049
+6K+E57qn 108050
+5rqc 108051
+57qi5YyF 108052
+6YCa6YGO 108053
+56S+5Lya6LSj5Lu7 108054
+5paw5Lqn5ZOB 108055
+5Ya36Z2Z 108056
+55yL5LiN5Yiw 108057
+6IGU6YKm 108058
+6a2E 108059
+55qE5YmN5o+Q 108060
+55qE5YmN5o+Q5LiL 108061
+6L6D5aW9 108062
+55qE5oSf5oOF 108063
+5a6i5oi35o+Q5L6b 108064
+54us6Ieq 108065
+5aKe5pS2 108066
+5paH54yu 108067
+5ou85ZG9 108068
+566h55CG5ZKM 108069
+5rWB5Yqo5oCn 108070
+5YWo5a62 108071
+5LiK5pa5 108072
+5o6o5Ye655qE 108073
+5LiJ5Zu9 108074
+5LiA5Liq5piv 108075
+5paw5LiA6L2u 108076
+5paH5YyW6YGX5Lqn 108077
+5q66 108078
+5aSn5rm+5Yy6 108079
+6YO96ZyA6KaB 108080
+55qE5a6e6ZmF 108081
+57eK 108082
+5aSn5aWW 108083
+5YWJ6IqS 108084
+5L6/5LqO 108085
+55qE6KGo5oOF 108086
+5ryU57uO 108087
+57qi5Yab 108088
+5b2T5oiR 108089
+5rK75oSI 108090
+6aKd5bqm 108091
+6Z2c 108092
+5Lu75L2V5Lq6 108093
+6KGX5aS0 108094
+54m55pav 108095
+54m55pav5ouJ 108096
+5Yy755aX5py65p6E 108097
+57uZ5a2p5a2Q 108098
+6KeE55+p 108099
+6KOc 108100
+55qE6Lqr5b2x 108101
+5LiT5qCP 108102
+5p2l5Li0 108103
+56ul5bm0 108104
+5aSN6IuP 108105
+6KiC 108106
+5Z6L5Y+3 108107
+5Zu+5qGI 108108
+566A5Y6G 108109
+5oux 108110
+6I235YWw 108111
+5Lu75oSP 108112
+5om/5o6l 108113
+6L+Z5omN 108114
+5a6i6L2m 108115
+5pyd552A 108116
+6aCF55uu 108117
+5Y+w6aOO 108118
+55qE5oi/5a2Q 108119
+6aqP 108120
+5p2x6KW/ 108121
+6YGX5Lyg 108122
+6LaK5aSa 108123
+5LqG5LuW55qE 108124
+5LiK5ZGo 108125
+566h55CG5Yi25bqm 108126
+5aSx5Lia 108127
+55S35Y+L 108128
+5o6l56eN 108129
+5aiB5ZCN 108130
+55Kw5aKD 108131
+5Y+R55Sf5Zyo 108132
+5Liq5Zu95a62 108133
+5Yib5paw5Y+R5bGV 108134
+5pS55Y+Y5LqG 108135
+5YGl5bq355qE 108136
+5YC85b6X5LiA 108137
+5YC85b6X5LiA5o+Q 108138
+5Zui5LyZ 108139
+5YGH6K6+ 108140
+5Y+w5LiK 108141
+6KeE6IyD5YyW 108142
+6Zmq5ZCM 108143
+5bqn5qSF 108144
+5Y+v5oCc 108145
+5YWL5oCd5Li75LmJ 108146
+5rOV5b6L6LSj5Lu7 108147
+5LiA6aG/ 108148
+5oqs5aS0 108149
+5Li66YeN54K5 108150
+6L+c5rSL 108151
+6YCP6L+H 108152
+5YWo55CD5YyW 108153
+6Laj5ZGz 108154
+56Wo5oi/ 108155
+5q+P5Lq6 108156
+5ZCE56eN5ZCE5qC3 108157
+5LqG5Ye65p2l 108158
+57ud5a+55piv 108159
+5LiL5bGe 108160
+5LiA5Y+M 108161
+6L+Z5Z2X 108162
+5oqX55ar 108163
+6KaB54K5 108164
+5b2i5oiQ55qE 108165
+5oiR55yL 108166
+5LiH6YeM 108167
+6ICD56CU 108168
+5Li65YW2 108169
+5rCR5a6/ 108170
+5aSa5L2N 108171
+5aSn6Ie0 108172
+5LuY6LS5 108173
+5YWl5omL 108174
+5bGF5a62 108175
+5omA5Zyo5Zyw 108176
+5Lq66Lqr 108177
+6L+H5b6X 108178
+6K+V6K+V 108179
+6K6/6LCI 108180
+5Yqg6YeN 108181
+5bCx5LiN5Lya 108182
+55Sf5Lqn5LyB5Lia 108183
+5Zue5Zu9 108184
+5bqV57q/ 108185
+6LW25Yiw 108186
+5pSv6Zif 108187
+5oiR5Lus6YO9 108188
+6YKu5pS/ 108189
+55u06Iez 108190
+6ZKi55C0 108191
+5YWc 108192
+56CU6K6o5Lya 108193
+5pyI5Lqu 108194
+5Z2a5oyB5Lul 108195
+5YWs5a6J6YOo 108196
+6ZKi566h 108197
+5bCP55m9 108198
+572u5Lia 108199
+6IGL 108200
+5Lmm5YaZ 108201
+5p2P 108202
+6YWN5pa5 108203
+6ICM5Y+I 108204
+55Ge5aOr 108205
+55WM55qE 108206
+6ICB5aSn 108207
+5oiQ54af55qE 108208
+5bmy5LuA5LmI 108209
+5LiT6aG55paX5LqJ 108210
+562J5aSa5Liq 108211
+6ISx56a7 108212
+5LiJ5Liq5pyI 108213
+56CU56m25ZGY 108214
+5peL6L2s 108215
+5p6B6Ie0 108216
+5YWN6LSj 108217
+5YWN6LSj5aOw5piO 108218
+5b6I5aSa546p5a62 108219
+6L2m5LiK 108220
+5Lqk5LqS 108221
+5bey5piv 108222
+5LiA5bCP 108223
+55qE6YeN54K5 108224
+6Iqx5LqG 108225
+5LiN5piO 108226
+5pyJ5YWz6KeE5a6a 108227
+54q55aaC 108228
+55y4 108229
+5a+h 108230
+55qE6KGj5pyN 108231
+5YyF6KO5 108232
+6Lqr5a2Q 108233
+5biI6IyD5aSn5a2m 108234
+5LqL5YWI 108235
+57q/5p2h 108236
+5rOV5Yi2 108237
+5YW75oqk 108238
+56iz5a6a5oCn 108239
+6YK1 108240
+5Z6E5pat 108241
+6aGN 108242
+6ICD5Y+k 108243
+5p2g5p2G 108244
+6IuP6IGU 108245
+5rC055S1 108246
+5YW35L2T55qE 108247
+5r+A5rS7 108248
+5oiR5qCh 108249
+5Yia5byA5aeL 108250
+5Ye45pi+ 108251
+56a+ 108252
+5YW86IGM 108253
+6YCP6YGO 108254
+5Zyo5ri45oiP5Lit 108255
+56S+5Lya5Y+R5bGV 108256
+5aW9546p 108257
+5bm75oOz 108258
+5LiN5Luj6KGo 108259
+5rOo5oSP5Yqb 108260
+5qON 108261
+55So5omL 108262
+576O5Lq6 108263
+6K645aSa5Lq6 108264
+5b6I5piv 108265
+55qE56CU5Y+R 108266
+5omT5Ye6 108267
+5ZCI5LyZ5Lq6 108268
+5LiA5aSc 108269
+57yT57yT 108270
+5L+u5q2j 108271
+5oSf55+l 108272
+57uI6Lqr 108273
+5r+A57Sg 108274
+546v5aKD5LiL 108275
+5qyh5Lya6K6u 108276
+57uP5rWO5aKe6ZW/ 108277
+5omb 108278
+5Y+R6YW1 108279
+5YiG5p6Q5biI 108280
+5Zyo5pyq5p2l 108281
+5Li76KaB5pyJ 108282
+5LiA5a2j5bqm 108283
+55qE6K+05rOV 108284
+5LuO5p2l5rKh5pyJ 108285
+6LSn6L2m 108286
+57yp5bCP 108287
+5aSq6L+H 108288
+5pWI5Yqb 108289
+5LiN5LiL 108290
+5oqV56i/ 108291
+6I2v5Lia 108292
+57uE6ZW/ 108293
+56uZ54K5 108294
+5b6I5Zac5qyi 108295
+6ZC1 108296
+5Yq/5aS0 108297
+5ryP5rSe 108298
+5oSk5oCS 108299
+5YWF5a6e 108300
+5Yib5Lia5p2/ 108301
+54iq 108302
+5pyq5b+F 108303
+5bqV6YOo 108304
+5b6X5YiG 108305
+5Lq65rCR5Yy76Zmi 108306
+5LqM5omL5oi/ 108307
+5bey57uP6KKr 108308
+5aSn5qW8 108309
+5paw5oi/ 108310
+6L6m5rOV 108311
+55So5Yqb 108312
+5ouT5a69 108313
+5YaF5Zyo 108314
+5pKt5Ye6 108315
+6aWw5ryU 108316
+5Lmf6K6p 108317
+5L2c54K6 108318
+54mp5Lia566h55CG 108319
+5Y205LiN 108320
+5Li65Lit5Zu9 108321
+5bGA5Yq/ 108322
+5LiN6IKv 108323
+5pyA5paw55qE 108324
+5Y+v5Lul6YCJ5oup 108325
+5pi+546w 108326
+5bCx566X5piv 108327
+5Zyo5qCh 108328
+6b6f 108329
+5Lik5p2h 108330
+55qE5a6e5Yqb 108331
+6LaK5aW9 108332
+5aW55Zyo 108333
+5b+g6K+a 108334
+5Lmf6ZyA6KaB 108335
+5ri45oiP5pON5L2c 108336
+6LaF5Ye6 108337
+5aaC5p6c5LiN 108338
+5omA5Zyo55qE 108339
+5L2g6L+Y 108340
+5Lul5YaF 108341
+5pyJ5LiA5a6a 108342
+5Y+v6L6+ 108343
+6LeR5Yiw 108344
+5Ymb 108345
+5bu656uL5YGl5YWo 108346
+5pW06L2m 108347
+5YmN5pa5 108348
+6Ze05o6l 108349
+56255aSH 108350
+55ay5Yqz 108351
+56a75byA5LqG 108352
+5rGd 108353
+6Z2i6YOo 108354
+5LmL5YmN55qE 108355
+5Y+Y5Li6 108356
+5aaC5p6c6K+0 108357
+5a+55LuY 108358
+5Z2H5Y+v 108359
+6KKr5ZGK5Lq6 108360
+57K+576O 108361
+6IGa5Lya 108362
+552A5oCl 108363
+6LC35q2M 108364
+5LiA5Y+3 108365
+57qi5Yip 108366
+5Lyg5aWH5ri45oiP 108367
+5buW 108368
+6LSe 108369
+5Lmw5Yiw 108370
+6a2a 108371
+5L2T6LSo 108372
+5bCR5LqG 108373
+5rOJ5bee 108374
+5ZCf 108375
+57ud5LiN 108376
+6buR5oG2 108377
+6buR5oG25Yq/5Yqb 108378
+5LiK5pig 108379
+55qE6K+d6aKY 108380
+5LiH5Lq65qyh 108381
+5LiW6Ze0 108382
+55So5bel 108383
+6LSv56m/ 108384
+5a6d55+z 108385
+5L2g5aW9 108386
+5YiH5Ymy 108387
+5by65Zu9 108388
+5Zue6JC9 108389
+5rC05pm2 108390
+5qih5Lu/ 108391
+5rSq5rC0 108392
+6YCZ6bq8 108393
+5Y2B5LiJ5LqU 108394
+5L2R 108395
+6ZmE5Lu2 108396
+55qE5aKe6ZW/ 108397
+6ZmE5bGe 108398
+546w5bey 108399
+5biu5L2g 108400
+6YeR54mM 108401
+6auY5Y6f 108402
+5Zyo5a626YeM 108403
+6Ziy6IWQ 108404
+56Gu5a6e5piv 108405
+5a6j6K6y 108406
+5aSp5omN 108407
+57uP6JCl566h55CG 108408
+6ZSF54KJ 108409
+5ZCI5LiA 108410
+6KeC6LWP 108411
+6ZW/6L6+ 108412
+5Li75LmJ5oCd5oOz 108413
+6YKj6bq8 108414
+6aOO5LqR 108415
+5Li65Li755qE 108416
+5pqR5YGH 108417
+5oyB5LmF 108418
+5byC5Zyw 108419
+5byA6Zeo 108420
+5qih5p2/ 108421
+5om55qyh 108422
+5LiN5L6/ 108423
+5aSp55Sf 108424
+5Yeg5Liq5pyI 108425
+5LiT56eR 108426
+5Y+m5pyJ 108427
+5YWs5biD55qE 108428
+5oe3 108429
+5Zy65ZCI 108430
+55qE5b+D5oCB 108431
+6L+Y5aW9 108432
+5a6e5oiY 108433
+6ICB5biI55qE 108434
+5YWp5YCL 108435
+5Y+v5Zyo 108436
+6YKj5L2N 108437
+5aWg5a6a5LqG 108438
+5L+D6ZSA 108439
+5o+05Yqp 108440
+5LiH54mp 108441
+5oOF5oql 108442
+6aaW5YWI6KaB 108443
+5paH5YyW5ZKM 108444
+6YO95bey57uP 108445
+5LiK5LiW57qq 108446
+5Yac5Zy6 108447
+5aSn5om5 108448
+5piO55m95LqG 108449
+55qE5oiQ6ZW/ 108450
+55qE5q+U6LWb 108451
+5aSx6K+v 108452
+5YGa5oiQ 108453
+5LuK5aSp5bCP57yW 108454
+6aKG6KKW 108455
+5o+Q5Y2H5LqG 108456
+5b6Q5bee 108457
+5LuN5pyJ 108458
+6L+H5ruk 108459
+5bm96buY 108460
+54Ot6YeP 108461
+5LiA6aaW 108462
+5ryC5Lqu55qE 108463
+5Yeg56eN 108464
+5YCh6K6u 108465
+5bCx5Y+v5Lul5LqG 108466
+5o6S5YiX 108467
+6YeN6YeN 108468
+5LyB5Lia5ZKM 108469
+5LiT5bGe 108470
+54WO 108471
+5Lqy5oia 108472
+55m+5YiG5LmL 108473
+56i/5Lu2 108474
+6L+Y5b6X 108475
+5Lq65ZOh 108476
+5LqJ5aS6 108477
+5pu05a655piT 108478
+5aSn6Ieq54S2 108479
+6Zu76IWm 108480
+5aSq56m6 108481
+5Zyw5aSE 108482
+5aSi 108483
+5LuW5a+5 108484
+5b+F5bCG 108485
+5LiN5b2T 108486
+5Lil6LCo 108487
+5Ye65Zy6 108488
+5bey57uP5pyJ 108489
+6aKG5Yab 108490
+6auY5qGj 108491
+5LiA5omA 108492
+5qCX 108493
+6K6p5a2m55Sf 108494
+5pu55pON 108495
+5p+Q5LiA 108496
+5Ly45Ye6 108497
+6Iqx5Y2J 108498
+5riF6YaS 108499
+6IGU57O75pa55byP 108500
+5YiG5bGA 108501
+6IWz 108502
+5qmh6IO2 108503
+6ZW/5b6X 108504
+57u/5Zyw 108505
+6KKN 108506
+55qE6Im65pyv 108507
+5aWz5pyL5Y+L 108508
+5Lit6LaF 108509
+56a75a2Q 108510
+5aSa5qC35YyW 108511
+6Ziz5Y+w 108512
+5L2O56Kz 108513
+5LiA57G7 108514
+562J5pa56Z2i55qE 108515
+5b6X5aW9 108516
+5qih5YW3 108517
+5LiH5Lq/ 108518
+55WZ5oSP 108519
+5Li05rKC 108520
+5bCR6YeP 108521
+55yL5ZCR 108522
+57uP6JCl6ICF 108523
+55WZ5LiL5LqG 108524
+5Z2P5LqG 108525
+5ZGK5Yir 108526
+55yf55CG 108527
+57y06LS5 108528
+5oqK5L2g 108529
+55qE5Lu75Yqh 108530
+5oiR5a+5 108531
+5Lmw5YWl 108532
+55m75LiK 108533
+5pyJ5Lik5Liq 108534
+5LiA5aS0 108535
+5pON5o6n 108536
+5YWo6KaG55uW 108537
+552A5omL 108538
+5aKZ6Z2i 108539
+5aSa5pa5 108540
+5Y+v54ix55qE 108541
+5Lmf5Y+v6IO9 108542
+5pyA5pyJ 108543
+6L+Z5Lqb6YO95piv 108544
+5oOh 108545
+5a6u 108546
+5b6I5bCP 108547
+6Zeu6aKY5piv 108548
+5Z2H5pyJ 108549
+5b6B6ZuG 108550
+6K+05Ye6 108551
+5pyJ5oSP 108552
+6aKC 108553
+5oms5bee 108554
+5ZWG5Lia5qih5byP 108555
+55Sf6IKW 108556
+5o2Q5qy+ 108557
+5bKC 108558
+576O5pmv 108559
+6L+Y55yf 108560
+5oul5oqx 108561
+6Lqr5L2T5YGl5bq3 108562
+5rex5aSE 108563
+55y856We 108564
+55qE5b2i6LGh 108565
+5LyY6LaK 108566
+5b2T5oiQ 108567
+5Yy65YiG 108568
+5Y676Zmk 108569
+5rOo5a6a 108570
+5aeQ5aa5 108571
+5Yy65YaF 108572
+6ama 108573
+5pqX56S6 108574
+5piO5Lqu 108575
+5oWw6Zeu 108576
+5biC5Zy65Lu96aKd 108577
+54yq6IKJ 108578
+55qE6LWE6YeR 108579
+5Y6G57uP 108580
+5aeL57uI5Z2a5oyB 108581
+55Sf5py6 108582
+5LiN6aG+ 108583
+6YeR5Yia 108584
+5aSn5aOw 108585
+6ZmV6KW/55yB 108586
+6bKN 108587
+5Yac5Lia5Yac5p2R 108588
+5pyJ5a6z 108589
+6Zeo6K+K 108590
+5q+P5LiA5qyh 108591
+55qE5Zug57Sg 108592
+6aKd5aSW 108593
+5Y6/57qn 108594
+55qH5ZCO 108595
+5Zu95LyB 108596
+6aaW6YCJ 108597
+57yW5YaZ 108598
+5ou/6LW3 108599
+5YG35YG3 108600
+5LiO5Lit5Zu9 108601
+5Y2W5a62 108602
+57uZ5LuW5Lus 108603
+56We6K+d 108604
+5a245qCh 108605
+5oiR5LiA55u0 108606
+55+l6YGT5LqG 108607
+5Y2S 108608
+5ZKM5Zyw5Yy6 108609
+5LuA5LmI6YO9 108610
+55S75a62 108611
+5pys552A 108612
+5L2Z5ZCN 108613
+5a6h55CG 108614
+5LiA5ZCR 108615
+5Y+R5bGV6LaL5Yq/ 108616
+5Yy66Ze0 108617
+5rOo5YaM6LWE5pys 108618
+55Cm 108619
+5LiN5Y+v5Lul 108620
+55qE5YS/5a2Q 108621
+5YC854+t 108622
+5Lil5qC855qE 108623
+5a6e5L2T57uP5rWO 108624
+5pyJ5p2D 108625
+5oiR5Y+I 108626
+6ZO25rKz 108627
+56uL6ams 108628
+5p2A5LqG 108629
+5YyF5a65 108630
+566h5a62 108631
+6Lqr6auU 108632
+6ZOF 108633
+5bCP5a2Q 108634
+566h55CG57O757uf 108635
+5pyJ55qE5Lq6 108636
+6aOO55S1 108637
+5pm66IO95Yi26YCg 108638
+57K+56Gu 108639
+5oub5ZWG5byV 108640
+5oub5ZWG5byV6LWE 108641
+5LqM5omL6L2m 108642
+5Y6/5aeU 108643
+6Im65Lq6 108644
+5aWV 108645
+6L+O5p2l5LqG 108646
+57uT5p2f5LqG 108647
+55qE5Lyg57uf 108648
+5ou85pCP 108649
+5aWl6L+q 108650
+55aR5oOR 108651
+5LmL5pel6LW3 108652
+5qCH5b+X552A 108653
+5Zyw5Y2A 108654
+6K+g6YeK 108655
+5Yiw5pyf 108656
+5YWo6YO9 108657
+55+t5pqC 108658
+5piv5oiR5Zu9 108659
+5oiR5bey57uP 108660
+5ru05ru0 108661
+5aSp6LWL 108662
+5a+55aW5 108663
+5Y2r55Sf6Ze0 108664
+55Sf5Lqn5Z+65Zyw 108665
+5pel6K6w 108666
+55qE5pWZ5a2m 108667
+5ZOH 108668
+5rCR5LqL 108669
+6L+Y5Y6f 108670
+5omL5Lit55qE 108671
+55qE6Imv5aW9 108672
+5rer 108673
+5Lit5YWx5Lit5aSu 108674
+5YiD 108675
+5ZOE 108676
+5Zyo5LuW55qE 108677
+5bCI5qWt 108678
+5Zy66Z2i 108679
+6YK75bGF 108680
+55eS 108681
+5aaE 108682
+5aSW56eR 108683
+5LiN6YCC 108684
+5Li+5Yqe55qE 108685
+6YK5 108686
+5YWa55qE5bu66K6+ 108687
+55m86KGo 108688
+6Leo55WM 108689
+5rKJ5reA 108690
+5aSn54mH 108691
+6LaK6auY 108692
+5bCG5piv 108693
+6KeJ6YaS 108694
+5YKo5a2Y 108695
+5aKe5aSn 108696
+5LiN6K6p 108697
+5pW05b2i 108698
+5bmz5Y+w5LiK 108699
+5Yeg5L2N 108700
+6K+J5rGC 108701
+5aW95LiN5aW9 108702
+5ZyN 108703
+5paH5pys 108704
+6YCy5YWl 108705
+57SN 108706
+5qC55pOa 108707
+6I2J5qGI 108708
+5YWt5Liq 108709
+5Yu/ 108710
+5Yi25oiQ 108711
+6aWu5rC0 108712
+5rC45oGS 108713
+6Ieq5p2A 108714
+5Y+46ams 108715
+6Zq+54K5 108716
+5Li65oiR5Lus 108717
+5byn 108718
+5Ymp5LiL55qE 108719
+5YeG5aSH5aW9 108720
+55qE5pyA5L2z 108721
+6IGU5ZCI5Lya 108722
+5oKj6ICF55qE 108723
+5oiR5LiN55+l6YGT 108724
+5LiL5LiA5Liq 108725
+5Y+R5bGV5pa55ZCR 108726
+56yo 108727
+5omA5Lul5oiR5Lus 108728
+5YaZ5LqG 108729
+6YCg5oiQ5LqG 108730
+5rKZ5ryg 108731
+562b6YCJ 108732
+54G+5Yy6 108733
+5LiK55yL 108734
+6YW2 108735
+5rua5Yqo 108736
+6Zq+5YWN 108737
+5ZCJ5Yip 108738
+5LiA5LiA 108739
+57K+5a+G 108740
+5Ly45omL 108741
+56S85Luq 108742
+5YWo5piv 108743
+6LaK5aSn 108744
+5Lit5qCH 108745
+5Y+W5Yaz 108746
+5Y+W5Yaz5LqO 108747
+6YCU5Lit 108748
+6K6o5Y6M 108749
+5omL5YaM 108750
+56ys5Lmd 108751
+5a2U5a2Q 108752
+54S25b6M 108753
+5LiA5YWx 108754
+5rW35oql 108755
+5qy+5byP 108756
+5pW05aSp 108757
+6L6555WM 108758
+6Lev6L65 108759
+5pmL57qn 108760
+5ZCQ5qe9 108761
+55qE5YWz5rOo 108762
+5oiR5rKh5pyJ 108763
+5bCx5piv5Zyo 108764
+55uu55qE5piv 108765
+5Y2z5L2/5piv 108766
+6aG25bCW 108767
+5bey57uP5Zyo 108768
+5a6J5YWo6ZqQ5oKj 108769
+5qCH5p2G 108770
+5Y2X6YCa 108771
+5Lya5a+5 108772
+5bqn5L2N 108773
+6LWi5b6X5LqG 108774
+5Y6f5p2l55qE 108775
+6Lqr5Li6 108776
+5Lmm5bqX 108777
+6KKt5Ye7 108778
+5LuK5pma 108779
+5Lul6Imy 108780
+5Lul6Imy5YiX 108781
+5oqW6Z+z 108782
+5Y205rKh5pyJ 108783
+5Lin5aSx 108784
+55qE5bGA6Z2i 108785
+5Y2B5Zub5LqU 108786
+562J55u45YWz 108787
+5rGH5oC7 108788
+5aSW6KGo 108789
+5Li65rCR 108790
+6ZyH5oOK 108791
+5aWX6Lev 108792
+54qv572q5auM55aR 108793
+5bCG5Lul 108794
+546H6aKG 108795
+6YWS5ZCn 108796
+6KGM5Lia5Y+R5bGV 108797
+5bm06Iez 108798
+5Zmo5p2Q 108799
+5ZKM5oqA5pyv 108800
+5pyA5bCP 108801
+6L+Z5LiA5YiH 108802
+6IGM56ew 108803
+5b2T5L2c 108804
+5o6A6LW3 108805
+5ZKL 108806
+5Lit6YOo 108807
+5omL6IeC 108808
+572i5LqG 108809
+5aqz5aaH 108810
+5rS96LCI 108811
+5pe25Luj5Lit5Zu9 108812
+5Lq655Sf55qE 108813
+5p6B6ZmQ 108814
+56aE 108815
+5Yy65pS/5bqc 108816
+5pys6ZKx 108817
+56S85ZOB 108818
+55qE6YKj5Liq 108819
+5L6m5p+l 108820
+5aSq5aSa55qE 108821
+5a6e5pa95pa55qGI 108822
+6auY5qCH5YeG 108823
+5oyH5oyl6YOo 108824
+5YC+5pac 108825
+54m56Imy56S+5Lya 108826
+57WQ5p6c 108827
+6ZK755+z 108828
+56e75qSN 108829
+54m556eN 108830
+6Ieq5oS/ 108831
+5ouc55m7 108832
+5Y2V6Lqr 108833
+5Y205Y+I 108834
+5Yil5Lq6 108835
+5ZCI6KeE 108836
+5py655S1 108837
+54m55oSP 108838
+5b2T5YmN5L2N572u 108839
+5Lmw5a62 108840
+5ZCI57qm 108841
+6IKp6IaA 108842
+5Li65YeG 108843
+5a626KOF 108844
+55qE54Ot5oOF 108845
+6Z2e6YGX 108846
+55qE6a2F5Yqb 108847
+5Y6f5ZGK 108848
+56S+5Lya5ZCE55WM 108849
+5Lmw55qE 108850
+5aSa5ZCD 108851
+6ZuV5aGR 108852
+6LW35LmJ 108853
+5Yqg5Ymn 108854
+6YKj5LiA5Yi7 108855
+5bCG6L+b5LiA5q2l 108856
+5qGC5p6X 108857
+5pu05by6 108858
+5a+55LyB5Lia 108859
+5peg5oSP 108860
+5Lmg6L+R5bmz5paw 108861
+5rWB5aSx 108862
+5b6u6L2v 108863
+55u45a+55LqO 108864
+5bqn6LCI5Lya 108865
+5Li76JCl5Lia 108866
+5Li76JCl5Lia5Yqh 108867
+56eB5Yuf 108868
+5bGV56S65LqG 108869
+5bi45oCB5YyW 108870
+6LK0 108871
+56ym5Y+3 108872
+5bm06L2755qE 108873
+5bCx6ZyA6KaB 108874
+5Lmf5pu+ 108875
+55qE5oOF57uq 108876
+6L6+5qCH 108877
+6Ieo 108878
+5L2N5bGF 108879
+5LuF5Li6 108880
+6aaW5a62 108881
+6Zi06Ziz 108882
+5LiN5YaN5piv 108883
+5Zug5Li65a6D 108884
+5LyB5Lia5Zyo 108885
+55i+ 108886
+5ZCs6KeB 108887
+5Y6f5pyJ 108888
+5Yi26KOB 108889
+5a+C5a+e 108890
+6YCa6L+H5a+5 108891
+5ruR6Zuq 108892
+6L+Z5byg 108893
+55qE55CG6Kej 108894
+5paw5Lit5Zu9 108895
+6L+Z5YS/ 108896
+5L2O5Lu3 108897
+5oOz6L+H 108898
+55qE5L+h5b+D 108899
+5bu6562R54mp 108900
+55qE6aKc6Imy 108901
+5LiN5bqU6K+l 108902
+5peg55aR5piv 108903
+5byV6LW35LqG 108904
+5YWo5ZGY 108905
+5p2w5Ye6 108906
+6L+Z5piv5oiR 108907
+6Kqw 108908
+6JiH 108909
+6Zi15Zyw 108910
+5YWF5YC8 108911
+55+/5Lia 108912
+552A5LuW 108913
+5L+h6K6/ 108914
+5LiH6L6+ 108915
+5pGp5pOm 108916
+5byA56uv 108917
+6I+y5b6L 108918
+6I+y5b6L5a6+ 108919
+6L2m5a2Q 108920
+5pys6Lqr55qE 108921
+54Gr6L2m56uZ 108922
+5bi45bee 108923
+5Li65Luj6KGo 108924
+5Li65Luj6KGo55qE 108925
+5bm/55S1 108926
+5Lqy5Lq6 108927
+5Y+z5omL 108928
+6ZuG6KOF 108929
+6ZuG6KOF566x 108930
+55qE5Y2w6LGh 108931
+5qmf5pyD 108932
+5YyG5YyG 108933
+5YWJ55S1 108934
+5aSn5pa5 108935
+6L+Y5pyq 108936
+5Yip5aW9 108937
+57ud5aSn5aSa5pWw 108938
+5Zyo6L+Z56eN 108939
+5LiA57uE 108940
+5paw6IKh 108941
+6L2s5Y+R 108942
+5rOV5bqt 108943
+5peg5omA 108944
+6YGT6Lev5LiK 108945
+55+/5bGx 108946
+6JGJ 108947
+5pS25Zue 108948
+56ew5LmL 108949
+56ew5LmL5Li6 108950
+5o+t6Zyy 108951
+5Y+j5bK4 108952
+5ZC8 108953
+5b+D5oOz 108954
+55qE5qKm5oOz 108955
+6Zuv 108956
+5LmL5Yid 108957
+5aWW6aG5 108958
+6K6i6ZiF 108959
+6JOd5aSp 108960
+5Z2m5YWL 108961
+56uL5qGI 108962
+6IGU5omL 108963
+5L2G5piv5oiR 108964
+5biu5oiR 108965
+5LuF5Luj6KGo 108966
+6K+05oiR 108967
+55qE6LaL5Yq/ 108968
+5q+U6L6D5aSn 108969
+6LWw5buK 108970
+6YeN54K56aG555uu 108971
+6LWM5Zy6 108972
+5ZCN54mH 108973
+5oSf5Y+5 108974
+5Zyo5Zyw5LiK 108975
+5Y+R54Ot 108976
+6IyD55W0 108977
+55qE6YGT6Lev 108978
+6YeR6Imy 108979
+5LuW5Y+I 108980
+5Lya5Lqn55Sf 108981
+5rCR5Zu9 108982
+5a6Y5pa5572R56uZ 108983
+5pS255uK546H 108984
+55qE5Yiw5p2l 108985
+55qE5Yqe5rOV 108986
+5pS55Yi2 108987
+5LiH56eR 108988
+5LiN5LqI 108989
+6L+Z5Lqb6Zeu6aKY 108990
+54ix5LiK 108991
+55CD5Zy6 108992
+6LSj5Luk 108993
+5o6I6K++ 108994
+5Zyo6aaZ5riv 108995
+57uG6IW7 108996
+5aSa5LiH 108997
+5ZCM5bm0 108998
+5aSn5L2/ 108999
+5paL 109000
+5Lmf5Li6 109001
+5oOg5bee 109002
+5ZCJ56Wl 109003
+55Sw5Zut 109004
+5Zu95a626Zif 109005
+6YeN55Sf 109006
+5Zyo5YW2 109007
+6aaZ5ZGz 109008
+6LSf6I23 109009
+5Lqy5YiH 109010
+6Ieq6LGq 109011
+5rKh6ZSZ 109012
+5Zug5Li65Zyo 109013
+5pif5pif 109014
+6YKR 109015
+6L+Y5pyJ5b6I5aSa 109016
+5pGp5omY 109017
+5pGp5omY6L2m 109018
+5q2l6KGM 109019
+566h55CG5L2T57O7 109020
+6ISa5LiL 109021
+6YGO5Y67 109022
+5rGJ6K+t 109023
+5a+55LiN6LW3 109024
+55qE57uP5Y6G 109025
+5Y+K55u45YWz 109026
+5LiN5bCR5Lq6 109027
+6YeN56OF 109028
+5Yqz5Yqo6ICF 109029
+5aSn5Yqb5Y+R5bGV 109030
+5oCO5LmI5YGa 109031
+54uX54uX 109032
+5Lic5Y2X5Lqa 109033
+5YuH5LqO 109034
+5YWs6ZaL 109035
+55O356CW 109036
+5Y+C54Wn 109037
+5bm/5pKt55S16KeG 109038
+5Li+5Yqo 109039
+5rGf6KW/55yB 109040
+5pWI6IO9 109041
+5ZSv5pyJ 109042
+6Z2i6LKM 109043
+6Ieq5Yqo6am+6am2 109044
+5qac5Y2V 109045
+5b2T5oiR5Lus 109046
+5Luy6KOB 109047
+5pyo5p2Q 109048
+57Gz5YWw 109049
+55m96ZO2 109050
+55qE5Lq66YO9 109051
+5bCx5YOP5piv 109052
+5q2l5YWl 109053
+5Y2g55So 109054
+5Ye76LSl 109055
+6K6p5aSn5a62 109056
+5Lya6K6p5L2g 109057
+5Y6/5pS/5bqc 109058
+6KaB55So 109059
+562J5b2i5byP 109060
+5Y2H6auY 109061
+6LSj5Lu75oSf 109062
+5aSH55So 109063
+5LuW6K6k5Li6 109064
+5riF5Y2O5aSn5a2m 109065
+5LuW6Ieq5bex 109066
+6Zax6K6A 109067
+5aSq5bmz5rSL 109068
+6ZSB5a6a 109069
+562G 109070
+6L+Z54mH 109071
+5omn5pS/ 109072
+6L+U5Zue5pCc54uQ 109073
+5bCx5q2k 109074
+6YGH5Yiw5LqG 109075
+5byA5bmV5byP 109076
+566h55CG6YOo6Zeo 109077
+5ae/5Yq/ 109078
+6K6+5oOz 109079
+5Zub5a2j 109080
+5oqA5pyv5Lq65ZGY 109081
+5beu54K5 109082
+6L6e6IGM 109083
+6ICB5bir 109084
+55qE5oSf5Y+X 109085
+5Lmf6Z2e5bi4 109086
+5bm05LiK5Y2K5bm0 109087
+5oCq54mp 109088
+6IyD5paH 109089
+5oiY5b25 109090
+5ZCr5LmJ 109091
+5YWo6L+H56iL 109092
+6ICM6Z2e 109093
+6YCa6K6v5ZGY 109094
+6L+Z5qC35omN6IO9 109095
+5py657uE 109096
+6KOP 109097
+55W254S2 109098
+6LWM5Y2a 109099
+5ZCE5pyJ 109100
+5bel5L2c5py65Yi2 109101
+5LqL5ZCO 109102
+5Ymn6Zmi 109103
+5bGK5pe2 109104
+5Zi06YeM 109105
+5Li757q/ 109106
+5LiA5ZyI 109107
+5Li76KaB5Y6f5Zug 109108
+5bC45L2T 109109
+5Yy755aX5Zmo5qKw 109110
+5L2g5oCO5LmI 109111
+5L2G55Sx5LqO 109112
+5pe256m6 109113
+55S35pyL5Y+L 109114
+55Sc6Jyc 109115
+6auY5Zyw 109116
+5pmW 109117
+6JKQ6ZuG 109118
+5Yed6IGa5Yqb 109119
+5aSH5Y+X 109120
+5paH5Yib 109121
+6ams5p2l 109122
+6ams5p2l6KW/5Lqa 109123
+5p+05rK5 109124
+5L2/5Lq6 109125
+5pWZ5Lya 109126
+56eL5aSp 109127
+5piO54+g 109128
+5YWt5Y2B 109129
+546v5aKD5Lit 109130
+5riF5pmo 109131
+56ev5p6B5Y+C5LiO 109132
+5beF5bOw 109133
+5Li65pyf 109134
+562+5a2X 109135
+5oSf5r+A 109136
+56eL5a2j 109137
+5p2R5a2Q 109138
+5qKF6KW/ 109139
+5pq06Zuo 109140
+55Sf5rS75Zyo 109141
+56qX5oi3 109142
+5oG25Yqj 109143
+57qv57K5 109144
+5Zyo5o6l5Y+X 109145
+5rKh6IO9 109146
+6KGM5Lq6 109147
+5Yu6 109148
+5ouo5omT 109149
+5L2c5Ye65LqG 109150
+55qE5Li76aKY 109151
+5pyq5L6G 109152
+5Lit5pyA 109153
+5r6c 109154
+6auY6KGA5Y6L 109155
+5YW06LW3 109156
+5q2j6IO96YeP 109157
+5Z+56K6t54+t 109158
+5o6l5YWl 109159
+54S25ZCO5YaN 109160
+5a2m55Sf5Lus 109161
+6aKG5YWI55qE 109162
+54Gr54Ot 109163
+5LiT6IGM 109164
+5oiW6ICF6K+0 109165
+5bu66Kit 109166
+6buP 109167
+5a+55YWs5Y+4 109168
+54m55pyJ55qE 109169
+5YWJ6I2j 109170
+5b2T5Zy6 109171
+6Z2i5a2Q 109172
+6LWE5Lqn566h55CG 109173
+5pe25pyf55qE 109174
+556O 109175
+5Y2O5Lic 109176
+5Y+I5LiA5qyh 109177
+6IOO5YS/ 109178
+5a6a54K5 109179
+5aS055eb 109180
+5ray5L2T 109181
+5piv5LiA5L2N 109182
+5bi95a2Q 109183
+5bm06LW3 109184
+5LiN5L2O5LqO 109185
+6L6D5bCR 109186
+6Z2i5Li0552A 109187
+5bGC5bGC 109188
+6J206J22 109189
+6Imw6Ium 109190
+6Zi/5qC5 109191
+6Zi/5qC55bu3 109192
+5qaC5ous 109193
+6K+36Zeu 109194
+6LW35bqK 109195
+5bGA5bGA6ZW/ 109196
+56iz5YGl 109197
+5aaC5p6c5oiR5Lus 109198
+6YWS57K+ 109199
+5oi35Y+j 109200
+5oSf5oKf 109201
+5oiR5Lus6ZyA6KaB 109202
+5oqA6Im6 109203
+6Ieq5aqS5L2T 109204
+6L+b5YyW 109205
+5r+A54OI55qE 109206
+5L2T5rip 109207
+6JqV 109208
+6Ie06L6e 109209
+5a6q5rOV 109210
+5LiA562J5aWW 109211
+55O26aKI 109212
+5oOg5rCR 109213
+6LWw6Lev 109214
+546w5Lu7 109215
+5ZWG6YeP 109216
+5LiL6L2m 109217
+5Yig 109218
+6LKs5Lu7 109219
+6J6N5ZCI5Y+R5bGV 109220
+57Sg5p2Q 109221
+5rK55Lu3 109222
+5YGa5Lq6 109223
+556q 109224
+5pS56Z2p5Yib5paw 109225
+55qE5Yy65Yir 109226
+6Leo5aKD55S15ZWG 109227
+5raJ5Y+K5Yiw 109228
+5omY566h 109229
+5oiR6L+Y5piv 109230
+5Z2Q5qCH 109231
+572R6K6v 109232
+5b2T5Zyw55qE 109233
+6L+95rqv 109234
+5Zyf6ICz 109235
+5Zyf6ICz5YW2 109236
+5bqV5LiL 109237
+5Yeg5Y2B5bm0 109238
+56m/6L+H 109239
+55Sf5oCB5paH5piO 109240
+5o6o6JY= 109241
+5o6o6Jam 109242
+6aCG 109243
+5ZKz5Ze9 109244
+5YiG5oiQ 109245
+55eV6L+5 109246
+5oi357GN 109247
+6YO95LiN6IO9 109248
+5pma5Lya 109249
+5YCp 109250
+5L2T5Yqb 109251
+6L+Z5Liq6IGM5Lia 109252
+5peg5b2i 109253
+5Y+q5oOz 109254
+6L+b5Y+W 109255
+5p2A5q27 109256
+6ISK 109257
+5LqR5Y2X55yB 109258
+5pyq55+l 109259
+576O6IGU 109260
+576O6IGU5YKo 109261
+5aSW5b2i 109262
+6K+x5oOR 109263
+55uj 109264
+6KGM5L2/ 109265
+5aCG56ev 109266
+54af57uD 109267
+6ZiQ6L+w 109268
+5pyA5aSn6ZmQ5bqm 109269
+5beh5p+l 109270
+5aS65Yag 109271
+5LyB5Lia5paH5YyW 109272
+54uu5a2Q 109273
+5L+d5a6I 109274
+5Li65qC45b+D55qE 109275
+5omp5pWj 109276
+5Yi26YCg5ZWG 109277
+5p+U6L2v 109278
+5Li65LiA5L2T55qE 109279
+5ri4546p 109280
+55Sf55eF 109281
+5bmr5Yqp 109282
+5ZSx5q2M 109283
+5omN5Y+v5Lul 109284
+5a695p2+ 109285
+6KaB5q+U 109286
+5piv5oCO5qC3 109287
+54Gw6Imy 109288
+546L5Zu9 109289
+5pCF5ouM 109290
+6K6h6YeP 109291
+5ZGo5Zu055qE 109292
+5pm66IO95omL5py6 109293
+5bi45Yqh 109294
+5bi45Yqh5Ymv 109295
+6am0 109296
+5bCG6L+R 109297
+5a+75bi4 109298
+5Lit5Zu95biC5Zy6 109299
+5a655Zmo 109300
+5bGx5LiK 109301
+6IOM5ZCO55qE 109302
+5Lqy5a+G 109303
+5omA5Lul6K+0 109304
+6Y6u 109305
+55qE55CG55Sx 109306
+5aSn5Z+O5biC 109307
+5bi45bm0 109308
+5peF5ri45Lia 109309
+5bCx5piv6L+Z5qC3 109310
+5YaN5p2l 109311
+6auY5L2N 109312
+5YaF6aWw 109313
+5p6E6YCg 109314
+5LiA6LW35p2l 109315
+55Sz6KuL 109316
+5bey57uP5byA5aeL 109317
+55qE5Yqo5L2c 109318
+6KKr6L+r 109319
+6YGN5biD 109320
+5YmW5p6Q 109321
+5bCP5LqL 109322
+5b+D5Lit55qE 109323
+5L2T5Yi25pS56Z2p 109324
+55qH5a62 109325
+5pWZ5aCC 109326
+5ZCD5a6M 109327
+5Zu95rCR5YWa 109328
+5piO56Gu5LqG 109329
+5Y+R5bGV6KeE5YiS 109330
+56ys5LiA5q2l 109331
+5b6X6LW3 109332
+5Zyo5ZOq 109333
+55qE6Lev5LiK 109334
+6buU 109335
+55W25pmC 109336
+5aSn5Yqb5pSv5oyB 109337
+5Y+M6YeN 109338
+55+l6YGT6Ieq5bex 109339
+5ZCI5L2c5Y2P6K6u 109340
+5rCU5Yq/ 109341
+6ZW/5pWI5py65Yi2 109342
+572V6KeB 109343
+5Zue5p2l5LqG 109344
+5LuW5Lya 109345
+5Lit5paw 109346
+5Lit5paw572R 109347
+55qE5ZWG5ZOB 109348
+6LWg6YCB 109349
+5rG65a6a 109350
+5biC5Zy655uR566h 109351
+55WZ5a2m55Sf 109352
+55S15Y6L 109353
+5Lqa6ams 109354
+5Lqa6ams6YCK 109355
+6L+Y5piv5q+U6L6D 109356
+5L+D6L+b5LqG 109357
+5rWB5YWl 109358
+5pGE5YOP 109359
+5pGE5YOP5aS0 109360
+5o+Q5Y+K 109361
+5Y+R5o6Y 109362
+5om+5Ye6 109363
+5qKd5Lu2 109364
+57m857qM 109365
+5oiR5Zac5qyi 109366
+5aWO 109367
+5qac5qC3 109368
+5byA6Iqx 109369
+5rKJ6YeN 109370
+5Z+65YeG 109371
+5LuF5LuF5piv 109372
+6L2o6YGT5Lqk6YCa 109373
+5ZSQ5bGx 109374
+562J5LiA57O75YiX 109375
+5LiN6L+H5piv 109376
+5a2Y5Zyo552A 109377
+6Iqx55Sf 109378
+5aS3 109379
+57uI56m2 109380
+5Lmf5piv5LiA5Liq 109381
+5Y2B5a2X 109382
+6Jaq6YWs 109383
+5Lyk5b+D 109384
+5pil56eL 109385
+5Ya35Y20 109386
+57K+54G1 109387
+55qE5Zyw5Zu+ 109388
+5q+U54m5 109389
+5q+U54m55biB 109390
+5oCn5Yir 109391
+5L2Z5LiH5YWD 109392
+5LiN5b+Y5Yid5b+D 109393
+5b+D55a8 109394
+5puy57q/ 109395
+6auY5L2O 109396
+6KaP5a6a 109397
+5pmv6Imy 109398
+6KaB6K+0 109399
+5YWs5Y+45bCG 109400
+5ray5Y6L 109401
+6L+d57qm 109402
+5Y6a5bqm 109403
+5bqe5aSn55qE 109404
+6L+Y5piv5b6I 109405
+6aaW5YWI5piv 109406
+57Wy 109407
+5Yqh5a6e 109408
+5Lim5LiU 109409
+5aKe6L+b 109410
+57uE57uH5byA5bGV 109411
+6LW35p2l5LqG 109412
+6L6D5bCP 109413
+5a+85ri4 109414
+5Lik5Zyw 109415
+57+Y 109416
+54G/54OC 109417
+6aOO6YeH 109418
+5pSv57q/ 109419
+5pSv57q/5Lu75Yqh 109420
+5aix5LmQ5ZyI 109421
+5aSp5rSl5biC 109422
+5YyF5Zu0 109423
+5pys6LWb5a2j 109424
+6YeN6KaB6K6y6K+d 109425
+5Y+M5ZCR 109426
+5Y2O5Li9 109427
+6ZSk 109428
+5YS/5aWz 109429
+5Y2W5Ye6 109430
+5L6G6Kqq 109431
+5LuL57uN5LiA5LiL 109432
+5ZCm6K6k 109433
+5Yud 109434
+5pmu6YCa5Lq6 109435
+55qE5Yqo5Yqb 109436
+5rao5YGc 109437
+5Z+66YeR566h55CG 109438
+5LiA5Liq6YeN6KaB 109439
+6L+Q5rKz 109440
+54We 109441
+6LSi5pS/6YOo 109442
+6KGM5Lia5Y2P5Lya 109443
+6YO95bCG 109444
+6KiA6K66 109445
+5LiL5L6G 109446
+5aKo6KW/ 109447
+5aKo6KW/5ZOl 109448
+5Zug5Li65LuW5Lus 109449
+5oCO5LmI5Zue5LqL 109450
+5Yqg5aSn5a+5 109451
+6Iqt 109452
+54mM5a2Q 109453
+5Lya5L2/ 109454
+5aa55a2Q 109455
+56uZ6ZW/ 109456
+5b+F5aSH 109457
+5qCR5pyo 109458
+5oG25oSP 109459
+5rKz6YGT 109460
+5a+M6KOV 109461
+57mB5Y2O 109462
+5Luj6KGo5Zui 109463
+5rWR6Lqr 109464
+6aaW5L2N 109465
+6Iiq56m65YWs5Y+4 109466
+6Zu75b2x 109467
+5LiT6L6R 109468
+5rC05rqQ 109469
+5Lit5q+S 109470
+5Lim5LiN 109471
+6ICM5Y67 109472
+6YOd 109473
+5LqO5q2k 109474
+5paH5YyW5bu66K6+ 109475
+6IKv5a6a5Lya 109476
+5biM5pyb5aSn5a62 109477
+5o+P5YaZ 109478
+5L2O6LCD 109479
+5paw5YW05Lqn5Lia 109480
+5reE5Y2a 109481
+5pS+5byA 109482
+55qE5oCn5qC8 109483
+55a+55eF55qE 109484
+5pW06aG/ 109485
+57q/5LiK57q/5LiL 109486
+6YCJ6aG5 109487
+55qE6K6k5Y+v 109488
+5pW06b2Q 109489
+55Sa5LmI 109490
+55yB5YaF 109491
+5Y+k5Lq6 109492
+5rCR5L+X 109493
+54mh5Li5 109494
+6Zeo56qX 109495
+6YKj5qC355qE 109496
+55uR5LqL5Lya 109497
+57+h57+g 109498
+56a5 109499
+5Y2D5LiH5LiN6KaB 109500
+5pS257yp 109501
+55qE5paH5a2X 109502
+5ZKM5bCa 109503
+5oyH5Luk 109504
+5YWx5Lqn5YWa5ZGY 109505
+55qE54i25Lqy 109506
+5a6M5bel 109507
+5Yqh5bel 109508
+6ams5ouJ 109509
+6ams5ouJ5p2+ 109510
+5rWL6K+E 109511
+5bKa 109512
+5LiN5YGa 109513
+5LiD5bm0 109514
+5Z2H5Lu3 109515
+5Li76KeC 109516
+5b6I5LiN6ZSZ 109517
+6IKh5Lic5aSn5Lya 109518
+5LqU5LiA 109519
+6aOO5ZC5 109520
+5byA6YeH 109521
+6L+Z5LmI5aSn 109522
+6IO955yL5Yiw 109523
+6ICD6K+E 109524
+5Y2z5L6/5piv 109525
+546w5Luj5Yac5Lia 109526
+5q+U6L6D6auY 109527
+6KaB55yL 109528
+5rKh5LqG 109529
+6Kej5rG6 109530
+546v5q+U 109531
+5Yay5Yqo 109532
+5rex5aSc 109533
+5Yeg5Y2D 109534
+5L+P 109535
+572R5rCR 109536
+5bCx5rKh 109537
+5LuW6KGo56S6 109538
+6YeP5a2Q 109539
+5pep6aSQ5Yqg55uf 109540
+5Y2K5bKb 109541
+5pCe56yR 109542
+5LiK5oql 109543
+5a+p 109544
+6aKE6K6i 109545
+6JyC6Jyc 109546
+5p+l5om+ 109547
+5LyX5omA 109548
+5LyX5omA5ZGo 109549
+5LyX5omA5ZGo55+l 109550
+5pep5pel 109551
+5Y+R5oms 109552
+5ZKM5Liq5Lq6 109553
+5Yqg5YWl5LqG 109554
+5Zau5L2N 109555
+5YiG5piO 109556
+56ys5LiA5om5 109557
+576O5Yab 109558
+5p2A5omL 109559
+6Zeo5aSW 109560
+5ZWG5ZyI 109561
+5LiA5Yi7 109562
+55qE55y856We 109563
+6ZyE 109564
+5Lqb5LuA5LmI 109565
+5Yqg5rex 109566
+5q+P5L2N 109567
+5biC6Z2i5LiK 109568
+5Y+U5Y+U 109569
+55qE6YKj56eN 109570
+57Kk5riv5r6z 109571
+6LS05b+D 109572
+5paH5YyW5Lqn5Lia 109573
+57qi5peX 109574
+5ZiJ5YW0 109575
+5pS255uY 109576
+5a6M5oiQ5ZCO 109577
+5LyB5Lia566h55CG 109578
+57q15qiq 109579
+5LiN5L+h 109580
+5oiQ6YO95biC 109581
+5rSX5r6h 109582
+5Li+6KGM55qE 109583
+55Si55Sf 109584
+56m/5LiK 109585
+5Yia5aW9 109586
+5YWJ57q/ 109587
+5omT5p62 109588
+6L+Z5pys5Lmm 109589
+5ZSu5ZCO5pyN5Yqh 109590
+5Yeg5YiG 109591
+5LiK5qyh 109592
+5LiN5YiG 109593
+5Lqn5ZCO 109594
+6YG/5byA 109595
+57uI5p6B 109596
+5Luj6KGo5aSn5Lya 109597
+5ryU5oqA 109598
+5Zue6LSt 109599
+5a2m6LS5 109600
+6Zi756KN 109601
+5LiA5aSn5om5 109602
+56uj5bel 109603
+5Yaz5a6a5LqG 109604
+5L2G5aaC5p6c 109605
+55S15rWB 109606
+5Lid5q+r 109607
+6IO95aSf5Zyo 109608
+6ZSA5ZSu5pS25YWl 109609
+5Zyo5a2m5qCh 109610
+5rC05YeG 109611
+6KeG57q/ 109612
+6Ieq5Zyo 109613
+5ZWG5Lia6ZO26KGM 109614
+5Li65LqG6K6p 109615
+542y5b6X 109616
+546p5a625pyL5Y+L 109617
+6Z2i6Iac 109618
+5YiG5Ymy 109619
+5Ymn5pys 109620
+56ut 109621
+6K+05b6X 109622
+5oOz55+l6YGT 109623
+55qE5Lq654mp 109624
+6IyF5Y+w 109625
+5ZCM5LiA5Liq 109626
+5pWw5o2u5Lit5b+D 109627
+55SE 109628
+5Zac5oKm 109629
+5LiL5p2l55qE 109630
+5a6a5ZCR 109631
+5p6B5YW3 109632
+55qE5Zyf5Zyw 109633
+6YKj5YCL 109634
+5pGE5YWl 109635
+5LqG5oiR55qE 109636
+6ams6Lev 109637
+5YWo56S+5Lya 109638
+6K6u5qGI 109639
+5bGL5a2Q 109640
+5ZCN5Y+r 109641
+5Yyq 109642
+5Zyo5aSW6Z2i 109643
+5Y2O5Y2X 109644
+5Y+R6LSn 109645
+5a+S5Ya3 109646
+6auY562J5pWZ6IKy 109647
+6K+m57uG55qE 109648
+5Liq6aG555uu 109649
+55Sf5Lqn5Yqb 109650
+5pe25bi4 109651
+5bCx5pyD 109652
+5LiH6IKh 109653
+6ZmM55Sf5Lq6 109654
+5o+P57uY 109655
+5b2T54S25piv 109656
+5ouJ5Yqo 109657
+6ZO+5p2h 109658
+5omj6Zmk 109659
+5LiA55u06YO9 109660
+5bCP5a2p5a2Q 109661
+5Lyk5Y+j 109662
+56ys5LqM5bGK 109663
+6LSt572u 109664
+55qH6ams 109665
+5peg6IGK 109666
+6KGo5Yaz 109667
+6K+45aaC 109668
+5ZON6LW3 109669
+6aOO5pq0 109670
+5LiA5rWB55qE 109671
+57eo 109672
+6Kej5pS+5Yab 109673
+5a6k5aSW 109674
+5bCx6L+Z5LmI 109675
+5bO2 109676
+5omA5pyJ5Lq66YO9 109677
+5pCc57Si5byV5pOO 109678
+55qE5oiQ5pys 109679
+5YWa5pS/ 109680
+5Y+R6KGM5Lq6 109681
+55qE5LqL5a6e 109682
+5a+56K+l 109683
+5Y+X5o2f 109684
+5L+E5LmM 109685
+6bKc6Iqx 109686
+5Yac6I2v 109687
+5p6B6YCf 109688
+5oCl5oCn 109689
+5Lik5Lya 109690
+5LiA6Iis5p2l6K+0 109691
+5rW36bKc 109692
+5YaI 109693
+55So5Lq6 109694
+55So5Lq65Y2V5L2N 109695
+5YCq 109696
+5YSq5oOg 109697
+5qC55rqQ 109698
+5Zui6LSt 109699
+576O5rSy 109700
+5LiL6KGM 109701
+5bm05pyr 109702
+6Jyh 109703
+6K+B5Lu2 109704
+5Zyo5oiR5Zu9 109705
+5LiN5bqU 109706
+5oyJ5pe2 109707
+5aCq56ew 109708
+5Zy65LiK 109709
+5bmy6YOo6IGM5bel 109710
+5pyJ5b6I5aSn55qE 109711
+5pWw5a2X57uP5rWO 109712
+5ryU57uD 109713
+5o2u57uf6K6h 109714
+5b6A5p2l 109715
+5bm/5ZGK5pyN5Yqh 109716
+55qE6Led56a7 109717
+5q24 109718
+6KiA6K+t 109719
+6KKr6KqJ 109720
+6KKr6KqJ5Li6 109721
+5YuJ5by6 109722
+5bCK5pWs 109723
+5LiH5Lq/5YWD 109724
+5Lit5Zu95Zu96ZmF 109725
+5bmy6aKE 109726
+5bm05Lqn 109727
+6ICV5Zyw 109728
+6IyO 109729
+5Y2z5piv 109730
+5pio5pma 109731
+5oiQ5Li65LiA5Liq 109732
+57qg5q2j 109733
+5ZG95ZCN 109734
+6aKB5biD 109735
+54yc5rWL 109736
+5L+d6K235pS/562W 109737
+5oui 109738
+5rS75rO8 109739
+562J6YOo6Zeo 109740
+5a2m5Yiw 109741
+5aKe5YC856iO 109742
+6Iiq57q/ 109743
+5Yak 109744
+5Y2B5Yeg5bm0 109745
+5o6n6IKh6IKh5Lic 109746
+5LiA6Zeo 109747
+5Liq5bel5L2c 109748
+5Liq5bel5L2c5pel 109749
+5paw6KW/ 109750
+5paw6KW/5YWw 109751
+6K666K+B 109752
+5LuG 109753
+5Y+m5aSW5LiA5Liq 109754
+5pS557yW 109755
+5Lil56aB 109756
+5Zac5aW9 109757
+5Liq5Lq65L+h5oGv 109758
+5ruh5oSP5bqm 109759
+5ZOo 109760
+5biI6LWE 109761
+5pS55Li6 109762
+56ue5LqJ5a+55omL 109763
+5Ye654KJ 109764
+5ZWG5Lq6 109765
+5aSn5qOa 109766
+5oyH5a+85LiL 109767
+5aaH56eR 109768
+6Lyq 109769
+5omB 109770
+5ZCM5pe26L+Y 109771
+5bm26YCa6L+H 109772
+5oiY6Zif 109773
+6JST5bu2 109774
+5L+e 109775
+6YCC5b2T55qE 109776
+5YmN6L6I 109777
+5ZOB5ZGz 109778
+5rm/5Zyw 109779
+5oiQ5Z6L 109780
+5LiN5Y+q5piv 109781
+5oOp572a 109782
+5Ye65Y+w5LqG 109783
+546p5ri45oiP 109784
+5omN5Y+R546w 109785
+5bqU6IGY 109786
+5aSW5p2l 109787
+5Y2g6aKG 109788
+5bGV5pyb 109789
+5auC 109790
+5riv6IKh 109791
+5qGM5LiK 109792
+5pSv5p+x 109793
+55qE5oOF5b2i 109794
+5bm/6ZiU55qE 109795
+5pSv6KGM 109796
+5bSp5rqD 109797
+5pyI5Lit 109798
+5pyI5Lit5pes 109799
+57uN5YW0 109800
+5Li06L+R 109801
+5oqk5qCP 109802
+5pqu 109803
+5Y2V6IGM5Lia 109804
+6L655aKD 109805
+5pel54Wn 109806
+5LiA5aCG 109807
+55u05b6E 109808
+5YWx5ZCM5L2T 109809
+5paw5Y2O572R 109810
+5omT5aW9 109811
+55S15Yqo5rG96L2m 109812
+5LiN5piO55m9 109813
+6YCZ6KOh 109814
+55ub5aSn 109815
+546L5pyd 109816
+5YaN5LiA5qyh 109817
+5Yqe5YWs5Y6F 109818
+6LSo5oq8 109819
+5ZCI5Ye7 109820
+5Lq65Lus5a+5 109821
+6Zu26aOf 109822
+6YO95LiN55+l6YGT 109823
+55qE6K+t6KiA 109824
+5Yuf6ZuG6LWE6YeR 109825
+5Yqo6ISJ 109826
+5b2k 109827
+6L+Z5Yeg5bm0 109828
+55+t6KeG6aKR 109829
+5aSq6auY 109830
+5bi45aeU5Lya 109831
+5Yqg54+t 109832
+6YeN5b+D 109833
+5aqS5L2T5oql6YGT 109834
+5rKh5rOV 109835
+6Ze75ZCN 109836
+54Ot5bqm 109837
+5bm/5rOb55qE 109838
+5YWt5aSn 109839
+54mp5L2T 109840
+5LiN6K+l 109841
+6aKY5Li7 109842
+57K+5b2p55qE 109843
+5Li66L+b5LiA5q2l 109844
+6Jme 109845
+5Zu654S2 109846
+6LS15bee55yB 109847
+57qg57uT 109848
+5Luj55CG5Lq6 109849
+5rOV5a6a5Luj6KGo 109850
+5Y+m5LiA56eN 109851
+5LiN5ZCr 109852
+5ouv5pWR 109853
+5Lya57uZ 109854
+6K+X6K+N 109855
+5ZCM57G7 109856
+5b6X5LiN5Yiw 109857
+5oqT57Sn 109858
+5Lul5YW2 109859
+5YWl5YWa 109860
+6L+Y5Y+v 109861
+5pyf5YiK 109862
+5b6I5aSa5pe25YCZ 109863
+5pel5ZCO 109864
+5YWs57qm 109865
+5LiA5Li+ 109866
+5q+U6L6D5aSa 109867
+6YeR5rKZ 109868
+5o2e 109869
+5o6S5Ye6 109870
+5q2m5pyv 109871
+5LiN5pa3 109872
+5Lit6ICD 109873
+5L+h6LWW 109874
+5LuO5Lia5Lq65ZGY 109875
+54Gr54Sw 109876
+6YaS5p2l 109877
+5L2O5rip 109878
+6YC+5pyf 109879
+5Yqx5b+X 109880
+6YWl 109881
+5Y+v6LCT5piv 109882
+6L+Z5oSP5ZGz552A 109883
+6aKg6KaG 109884
+5YyX5Lqs5aSn5a2m 109885
+5LiT57q/ 109886
+5Y+K5Lul5LiK 109887
+6Kiq 109888
+6ICM5ZCO 109889
+55+l5LmO 109890
+5LiA5a+55LiA 109891
+5aiD5aiD 109892
+54G+6Zq+ 109893
+5YWo5bGA 109894
+5omA5b6X56iO 109895
+5a6e5oOg 109896
+6JqC6JqB 109897
+5Lmf55+l6YGT 109898
+5rip5ZKM 109899
+6JC95LiL 109900
+5Z6L5LyB5Lia 109901
+5YaN5Lmf 109902
+5L6b54Ot 109903
+6auY5r2u 109904
+54CP6Ka95Zmo 109905
+55qE5beo5aSn 109906
+5YWI5aSp 109907
+5bm05Lit5Zu9 109908
+57G75Ly855qE 109909
+55CG5LqL5Lya 109910
+56m66ZaT 109911
+54G15oSf 109912
+5Yqb5rCU 109913
+5bim5LiK 109914
+5LiN5aW95oSP5oCd 109915
+5pyJ5L2V 109916
+5bey5Zyo 109917
+5Y+W5Ye6 109918
+6L+d5rOV54qv572q 109919
+5a2m5Lmg6LSv5b27 109920
+5Zyw5bim 109921
+5qW85qKv 109922
+562J5oOF5Ya1 109923
+5LuO5YmN 109924
+55qE5Lmg5oOv 109925
+57Of57OV 109926
+5bCx6IO95aSf 109927
+6KmV 109928
+5LiA5b6L 109929
+5oyr5oqY 109930
+5Y6f5paH5Zyw5Z2A 109931
+5b2T5bGA 109932
+5LiN6YCa 109933
+5pWw5Y2D 109934
+6Zif5LyN5bu66K6+ 109935
+5pe26IqC 109936
+5YGa6LW3 109937
+55qE6K6w5b+G 109938
+572R57uc5a6J5YWo 109939
+5Yeh5piv 109940
+5rCv 109941
+6ZuV5Yi7 109942
+5Z+D5Y+K 109943
+5oiR5Y+v5Lul 109944
+55uR55CG 109945
+5pu05YW3 109946
+5Z+O566h 109947
+6Iuv 109948
+5Y+l5a2Q 109949
+6Iul5pyJ 109950
+5LuO5p2l5LiN 109951
+55u45YWz6LSf6LSj 109952
+5a6J5YWo5oSf 109953
+5pu06KaB 109954
+55qE5oOF5oSf 109955
+54mi54mi 109956
+6L6D5aW955qE 109957
+5rCu 109958
+56yR6K+d 109959
+6L2m5bGV 109960
+5LmL576O 109961
+566A57qm 109962
+57G75Z6L55qE 109963
+6ICB5YyW 109964
+55yL5L2g 109965
+6L+H5YiG 109966
+6Zeo5YmN 109967
+5LiA6Ze0 109968
+5oOz5Y67 109969
+5aqb 109970
+5Zyf6LGG 109971
+5Y+I56ew 109972
+5Lit5L+h 109973
+5a2Y6YeP 109974
+6ams5LqR 109975
+6Ie05L2/ 109976
+5YWI5YmN 109977
+6ICB5a2Q 109978
+5omT5omu 109979
+5q+V5Lia5LqO 109980
+5q+V5Lia5ZCO 109981
+576O5aW955Sf5rS7 109982
+5bel5Lia5LyB5Lia 109983
+5bCx5aW95LqG 109984
+6IWQ6JqA 109985
+54+N54+g 109986
+5Yiw6L+Z6YeM 109987
+5omA6ZyA55qE 109988
+6L+Z5piv5Zug5Li6 109989
+55CG5oOz55qE 109990
+5beu5byC5YyW 109991
+6a4= 109992
+6a6u 109993
+5Lqa5aSq 109994
+5peg56m3 109995
+5o+Q546w 109996
+5LiT5Lia5oqA5pyv 109997
+55Si5qWt 109998
+5a2m5a2Q 109999
+56eR5bm7 110000
+5Y2g5Zyw6Z2i56ev 110001
+5LiN5YeG 110002
+5pyq5oiQ5bm05Lq6 110003
+5pS25b2V 110004
+6L+Y5qy+ 110005
+6ZKi562L 110006
+5ryi 110007
+5b6X5oSP 110008
+57u85ZCI5L2T 110009
+5p6B6auY 110010
+5Y2V6K+N 110011
+6auY5pWI55qE 110012
+6aqo5aS0 110013
+5omn552A 110014
+55ub5LiW 110015
+5qih54m5 110016
+5pu06IO9 110017
+57ud5pyb 110018
+5a+55bqU55qE 110019
+5qiK 110020
+5paw5LiJ 110021
+5paw5LiJ5p2/ 110022
+5oGw5oGw 110023
+5ZCN5a62 110024
+5qC45b+D5oqA5pyv 110025
+5Liq5bCP 110026
+5oCO5LmI5Lya 110027
+6K+05LiN5a6a 110028
+6KW/55Oc 110029
+5ZOO 110030
+56Kf 110031
+5b+F5LiN5Y+v 110032
+5b+F5LiN5Y+v5bCR 110033
+5LmL6ZaT 110034
+5YiG566h 110035
+5Lqk6YCa5LqL5pWF 110036
+5byA5Yqe 110037
+5b6B5rGC5oSP6KeB 110038
+5Lqo 110039
+6Zu75a2Q6YO1 110040
+6Zu75a2Q6YO15Lu2 110041
+5L+h5oGv5pyN5Yqh 110042
+5L2g6KeJ5b6X 110043
+55u06KeC 110044
+5bey5a6M5oiQ 110045
+5YiG5Lya 110046
+5Zue5Y2H 110047
+6Zq7 110048
+5aW95Lq6 110049
+5LqG6Kej5LiA5LiL 110050
+5Y2r5rW0 110051
+5pyA54ix 110052
+5bqe5aSn 110053
+5a6i5oi/ 110054
+55Ge5YW4 110055
+6YO95LiN5piv 110056
+6aSo 110057
+6JeJ 110058
+55qE5ZCE6aG5 110059
+5Li655uu5qCH 110060
+55qE6K6k55+l 110061
+5b2x5ZON5Yqb55qE 110062
+5aS45byg 110063
+5L2p5oi0 110064
+5rGH546H 110065
+55qE54ix5oOF 110066
+5pil6aOO 110067
+5piv5oiR55qE 110068
+5qi5 110069
+5Y2K5bCP5pe2 110070
+5bGx5Y6/ 110071
+5bGx6KW/55yB 110072
+6ICM6L+Z 110073
+5pu05aSa5L+h5oGv 110074
+6L+Y5pyJ5LiA5Lqb 110075
+57K+57uG5YyW 110076
+576O5a2m 110077
+55Sx5pa8 110078
+5LuF5L6b5Y+C6ICD 110079
+5b6I6auY55qE 110080
+5Y+g5Yqg 110081
+6L+Z5LmI6K+0 110082
+5bGV5Ye6 110083
+5Zub5aSE 110084
+5LiH5a62 110085
+5oub5Yuf 110086
+55qE5by65aSn 110087
+5oKj5pyJ 110088
+5bCP5LqO 110089
+5Lmf6K645piv 110090
+5a+56Ieq5bex55qE 110091
+6IGM5Lia5pWZ6IKy 110092
+5p2l6L+b6KGM 110093
+5qGj5qyh 110094
+5omT6LWi 110095
+6YO95pyJ552A 110096
+5bq4 110097
+6K+t5rCU 110098
+55Sy6Yab 110099
+56m65Yab 110100
+6L2m5YaF 110101
+5Zug5Li65L2g 110102
+5a6e5pWI 110103
+5oOF5L6j 110104
+5Y+R6L6+5Zu95a62 110105
+6ZWc5a2Q 110106
+5q+N5am0 110107
+5L2G5piv5LuW 110108
+56ev5p6B5o6o6L+b 110109
+5aSn5bmF5bqm 110110
+55qE5aWz5YS/ 110111
+6aSQ5qGM 110112
+5ZCs5b6X 110113
+55qE56ev5p6B5oCn 110114
+5aW95ZCn 110115
+5pel5raI5oGv 110116
+5pyJ5Lu75L2V 110117
+5q+S5ZOB 110118
+5pep54K55Yqg55uf 110119
+56ys5LiA5aSp 110120
+5bC95Yqb 110121
+5qCW 110122
+5Li75omT 110123
+5piv5LiA5ZCN 110124
+54iG5paZ 110125
+5LqL5Lia5Y+R5bGV 110126
+5b6u5ZWG 110127
+5LqO5LiA5L2T55qE 110128
+55Sf54yq 110129
+6Ieq54S26LWE5rqQ 110130
+556E5YeG 110131
+6KeE5qih5YyW 110132
+5bm25LiO 110133
+6IKl6IOW 110134
+5a6255So 110135
+5aSn54i3 110136
+6aKE5ZGK 110137
+5p2l5YGa 110138
+6Ziz5Y6/ 110139
+5p6E562R 110140
+6aKB5aWW 110141
+5Y6G5Y+y5paH5YyW 110142
+5pyN5YuZ5oiW 110143
+5oC75Yaz6LWb 110144
+5Y+R5Z6L 110145
+5oiR55yf55qE 110146
+5pum 110147
+5Y+C5Lya 110148
+6ISG5byx 110149
+5YeG5YWl 110150
+6IW56YOo 110151
+5Y+45Luk 110152
+5oKy5Ymn 110153
+5aSp5LiK 110154
+5Y+j5Lit 110155
+5LiH5Liq 110156
+5a2m5Lia 110157
+5o+Q5YCh 110158
+5Lik6L65 110159
+5aSn6IKh5Lic 110160
+5Y+k6ZWH 110161
+6KGA57OW 110162
+55qE56iL5bqm 110163
+5qOJ6Iqx 110164
+5ZCO5Y+w 110165
+5bCx5Yy7 110166
+5pW05pW0 110167
+6JKy 110168
+55uI5Yip6IO95Yqb 110169
+57G9 110170
+6ISr 110171
+55yL6YeN 110172
+5a626ZW3 110173
+6IGY55So 110174
+6LWb6YGT 110175
+5YmN6ICF 110176
+5bu66K2w 110177
+5b6L5biI5LqL5Yqh 110178
+6Im65pyv5ZOB 110179
+5pyJ6Ieq5bex55qE 110180
+5ZCm5a6a 110181
+56S+5Zui 110182
+5ZGo5LqU 110183
+5bim5Yiw 110184
+5bel5L2c5Lya6K6u 110185
+6IKh5pys 110186
+5aSW5YyF 110187
+5a625YWs5Y+4 110188
+55uR54ux 110189
+6IiK 110190
+5ZCN5qCh 110191
+6KW/5rmW 110192
+6LaF6L+H5LqG 110193
+5Y2X5bGx 110194
+57uE5Lu2 110195
+5YC85b6X5rOo5oSP 110196
+5oyj5omO 110197
+5LqL6L+5 110198
+57aT54ef 110199
+56eR5a6k 110200
+5aW95ZCX 110201
+5qSF5a2Q 110202
+5ZyI5a2Q 110203
+5L2G5aW5 110204
+5rWB55WF 110205
+5ZCE6Ieq55qE 110206
+6IGM5ZGY 110207
+6KGN55Sf 110208
+5YWo5Zy6 110209
+5pKk6ZSA 110210
+5Y206KKr 110211
+5a6B6Z2Z 110212
+5YmN5omA 110213
+5YmN5omA5pyq 110214
+5YmN5omA5pyq5pyJ 110215
+5Li75Lia 110216
+5YyX576O 110217
+6K+E5a6a 110218
+5ZOB5bCd 110219
+5aSn5a626YO95Zyo 110220
+5Li75biF 110221
+57uG5b+D 110222
+5L+h5oGv5oqr6Zyy 110223
+55qE56ue5LqJ 110224
+6YCZ5qij55qE 110225
+56eR5Yib5p2/ 110226
+6YeH5pGY 110227
+56Wo5o2u 110228
+6YCQ5bm0 110229
+6Iux6LaF 110230
+6KGM5Lia5YaF 110231
+5Lq65a+/ 110232
+5ZCO5Yuk 110233
+5aaC5oSP 110234
+56yU6K+V 110235
+5reh5reh55qE 110236
+5LiN6IiS5pyN 110237
+5L2T56ev 110238
+5Lmf5LiN6KaB 110239
+6Z2i5paZ 110240
+5qC35pys 110241
+56WB 110242
+5oyJ6KeE5a6a 110243
+5aSn5qaC5piv 110244
+5oOF5Ya16L+b6KGM 110245
+5ZCE5Y2V5L2N 110246
+55qE56yR5a65 110247
+5Ye66Imy55qE 110248
+5Luj6KGo5oCn 110249
+55qE576O5aW9 110250
+6ZKm 110251
+5b6u55Sf54mp 110252
+6LaK5piv 110253
+5pa55Y+v 110254
+5bmy6ISG 110255
+6YGK5oiy 110256
+55qE5YW06Laj 110257
+6Zeu6LSj 110258
+5Zug5Li65oiR5Lus 110259
+6ICD6YeP 110260
+55Sf55Sf 110261
+6Zi75Yqb 110262
+5LiN5YWB6K64 110263
+5o+Q6K6u 110264
+5YeP5oyB 110265
+5Y+q5piv5LiA5Liq 110266
+5oiR5oqK 110267
+5Y+R546w6Ieq5bex 110268
+5aKe5bmF 110269
+5aaN 110270
+6Jed6KGT 110271
+5LiA5a625Lq6 110272
+5YiG57qn 110273
+55qE5pWw6YeP 110274
+6L2u6J6N6LWE 110275
+562J5Zug57Sg 110276
+5aSn5aSr 110277
+6IGY6K+3 110278
+6aOO5py6 110279
+57u95pS+ 110280
+5Lu75L2V5LiA5Liq 110281
+6aCC 110282
+6Zi257qn 110283
+5oqK5aW5 110284
+6L+b5Yab 110285
+6IO95YGa5Yiw 110286
+5Z+56K6t5py65p6E 110287
+54mp5paZ 110288
+56ul6K+d 110289
+5oyH5a+85oSP6KeB 110290
+6Ziu 110291
+5rex5YWl5o6o6L+b 110292
+5Li75py6 110293
+5riU5Lia 110294
+5LiN5pyN 110295
+5rWT6YOB 110296
+6KGX5LiK 110297
+5L6d5qyh 110298
+5pe25q61 110299
+5qK1 110300
+55qE5Zac54ix 110301
+5b6I6ZW/ 110302
+5Yid57qn 110303
+5p6c5pat 110304
+5oqi5pWR 110305
+6byT6Iie 110306
+5L6b6ZyA 110307
+5rex5YWl5byA5bGV 110308
+5Lqn5Lia6ZuG576k 110309
+5Zmq6Z+z 110310
+5ZCs552A 110311
+5rex5Yi755qE 110312
+5b+N5Y+X 110313
+55S156OB 110314
+5by66ICF 110315
+5ruL5ZGz 110316
+5pu86IGU 110317
+5Y+v5Lul55u05o6l 110318
+5aSn57Gz 110319
+5q235Y+y 110320
+5pS/5Yqh5pyN5Yqh 110321
+5YWs5byP 110322
+56S+576k 110323
+6YGT5aOr6IGM5Lia 110324
+5LmL5oOF 110325
+5rW35rC0 110326
+5ryU5aWP 110327
+5bqX6YeM 110328
+6L+56LGh 110329
+5Y+R5bGV55CG5b+1 110330
+6auY56m6 110331
+5ZGo5YiK 110332
+5Zue5Yiw5LqG 110333
+5LiN6YCC5ZCI 110334
+5aC15aGe 110335
+5YqI 110336
+5rC05LiK 110337
+54CR5biD 110338
+57qz56iO5Lq6 110339
+54eD5rK5 110340
+5bel56iL6aG555uu 110341
+5bOh6LC3 110342
+5pyJ6ZKI5a+55oCn 110343
+5ZyG5b2i 110344
+5pys5biC 110345
+6L+Z6K+d 110346
+566h55CG6ICF 110347
+56Gu6K+K55eF5L6L 110348
+5oqK5omL 110349
+5b2p6Imy 110350
+5LiK5YmN 110351
+5aSv5a6e 110352
+576K6IKJ 110353
+5b6A5bm0 110354
+5pOF6Ieq 110355
+6L+35Lq6 110356
+6Iiq5q+N 110357
+57K+57uG 110358
+5Zyo5oiR55qE 110359
+5Yib5oqV 110360
+6bqm5YWL 110361
+5pyI57uP 110362
+5YyX5rW3 110363
+5LmL5pif 110364
+5Y+25a2Q 110365
+5biC5Zy656ue5LqJ 110366
+6L+Z5LqL 110367
+5Y+D6IiH 110368
+5Lqn5Zyw 110369
+5ZSJ 110370
+5ZWG5ZOB5oi/ 110371
+6Iiq6L+Q 110372
+5LyY5byC 110373
+5LuW5Lus5piv 110374
+6Zuo5rC0 110375
+6K+N5rGH 110376
+5Yac55Sw 110377
+5qyn6Ziz 110378
+55+t57q/ 110379
+566h572R 110380
+5qC55Z+6 110381
+5Y+q5pyJ5LiA5Liq 110382
+6Z6L5a2Q 110383
+5biC5aeU5Lmm6K6w 110384
+5Yi75oSP 110385
+6KGM6L2m 110386
+5Y+I6KKr 110387
+5Y+v6Z2g5oCn 110388
+6LSx 110389
+5Lu75ZG9 110390
+5bqU5Zyo 110391
+5bCx5b6X 110392
+5pyN5Yqh5L2T57O7 110393
+5pS/5p2D 110394
+5Y+R6KiA5Lq6 110395
+6L+H5b6A 110396
+5Lik5Y+q 110397
+6Jm96K+0 110398
+6YCB5LiK 110399
+5LuA5LmI5LqL 110400
+5pWj5paH 110401
+5o6M5o6n 110402
+6JaE5byx 110403
+5LiL6Z2i5bCx 110404
+5Li76KaB5YaF5a65 110405
+5b6I6YeN6KaB55qE 110406
+5bCx6K+0 110407
+55m96Imy55qE 110408
+6YKj5Liq5pe25YCZ 110409
+57uP57qq5Lq6 110410
+55qE5q+N5Lqy 110411
+56yU6K6w5pys 110412
+5bqV5bGC 110413
+6L+R5Luj 110414
+6Kej6K+0 110415
+6LKg6LKs 110416
+5pyA5aSn5YyW 110417
+5ZWG6ZO6 110418
+5qCh5Y+L 110419
+5rKB 110420
+5LiN5Ye65p2l 110421
+6Zm36Zix 110422
+56iF 110423
+5YWs5biD5LqG 110424
+5YeA5YC8 110425
+55u45a+56L6D 110426
+56yb 110427
+5qC4566X 110428
+5Y2O5L6o 110429
+5oCl5pWR 110430
+5oy65aW9 110431
+5YWS56ul 110432
+5LqM6IOO 110433
+5Ye66Ieq 110434
+5Z2f 110435
+5omL5LiL 110436
+5bGh 110437
+5Yib6YCg5oCn 110438
+5Lil5qC85oyJ54Wn 110439
+5YaN5Y67 110440
+5Lic55uf 110441
+5Lq65rWB 110442
+5LqG5LiA5aOw 110443
+5bCP5pe25YmN 110444
+6LS15peP 110445
+6ZyW 110446
+5Lmf5piv6Z2e5bi4 110447
+6YCx 110448
+55yL5LqG55yL 110449
+57mB5q6W 110450
+6Iez5q2k 110451
+6aKE5aSH 110452
+5b6I5piO5pi+ 110453
+5ryU6Im6 110454
+5Z2Q552A 110455
+5L+E5Yab 110456
+5Zyo6L+H5Y67 110457
+5LmL5LqL 110458
+5oqT6I63 110459
+5Z2Q5LiL 110460
+55Sx5Lit5Zu9 110461
+5Lmf5byA5aeL 110462
+562U5aSN 110463
+5Z6D5Zy+5YiG57G7 110464
+6ZKT6bG8 110465
+5ZCE56iu 110466
+55u46YGH 110467
+5LiN5YGc55qE 110468
+5om56YeP 110469
+6YeN6KaB5L2c55So 110470
+5aeU5bGI 110471
+5YWt5bm0 110472
+5LiD5Y2B 110473
+5LmL5oiY 110474
+6aOO6Zmp566h55CG 110475
+6Z+z5qiC 110476
+6KGM5pS/5aSE572a 110477
+5pys5LqL 110478
+5pKw5YaZ 110479
+6IGa5ZCI 110480
+6YCC5pe2 110481
+5pCs5a62 110482
+56KO54mH 110483
+55ub5a60 110484
+566A5rSB 110485
+5Y+s6ZuG 110486
+566A5YyW 110487
+5YyX5Lqs5pe26Ze0 110488
+56ys5LiJ5bGK 110489
+5p2l5Zue 110490
+5bi455So55qE 110491
+5Lqs5rSl 110492
+5Lqs5rSl5YaA 110493
+5qKm5bm7 110494
+6K+V6KGM 110495
+5py65bqK 110496
+5Yiw5pyA5ZCO 110497
+5Yqp5omL 110498
+5YiG5b2p 110499
+5Ye65ZOB 110500
+5Yi56L2m 110501
+5ZCv5Y+R 110502
+5L6n6Z2i 110503
+5q+P5b2T 110504
+55u45YWz6KeE5a6a 110505
+5LiW5Lq6 110506
+6LSt6L2m 110507
+5b+D55uu 110508
+5b+D55uu5Lit 110509
+5LqU6YeR 110510
+6L+Y6K6w5b6X 110511
+5L6d54S25piv 110512
+5o+Q5qGI 110513
+55S15ZWG5bmz5Y+w 110514
+5YGa5Yiw5LqG 110515
+5p2c57ud 110516
+5a6J5Y2T 110517
+5LiW55WM5ZCE5Zyw 110518
+5YmN6YCU 110519
+5rSX5YeA 110520
+5aWL5Yqb 110521
+5Z+O5biC5bu66K6+ 110522
+5aSa5Yqf6IO9 110523
+5Lya6YCg5oiQ 110524
+5Y+R5biD5Lya5LiK 110525
+56m256uf5piv 110526
+5YiG57qi 110527
+55+l6K2Y 110528
+6Z2i5p2/ 110529
+5peg5aOw 110530
+5oCl6ZyA 110531
+5aSx55yg 110532
+54i45aaI 110533
+5LqC 110534
+5YWo5pmv 110535
+57uP5YW455qE 110536
+5Ymn5Lit 110537
+6aKG5a+85LiL 110538
+5YWa5YaF 110539
+5YWl5L61 110540
+5ouJ5pav 110541
+5LiA5bmV 110542
+5Yqg5LmL 110543
+6IKG 110544
+6Iux5qC8 110545
+6Iux5qC85YWw 110546
+5ben5YWL 110547
+5ben5YWL5Yqb 110548
+5LiA5b+D 110549
+6IGC 110550
+5b6A5b6A5piv 110551
+566h55CG5bGC 110552
+55m75YWl 110553
+5bu656uL6LW3 110554
+5bu65Zu9 110555
+5a2Q5a6r 110556
+5bqU5LuY 110557
+5o6i56m2 110558
+56ys5LiA5L2N 110559
+5L2Z5a62 110560
+562J5rS75Yqo 110561
+5omA6Ie0 110562
+6L6D5b+r 110563
+5piv6Z2e 110564
+5o+Q5ZCN 110565
+5LqM6ICF 110566
+5Y+q5Ymp5LiL 110567
+5YW25Lit5YyF5ous 110568
+57yW56iL 110569
+56C056KO 110570
+5Lit5Lic 110571
+5bel5L2c5oql5ZGK 110572
+562+5ZCN 110573
+6YWS5Lia 110574
+55+l5pmT 110575
+54Ot5b+D 110576
+6Z2e5Yeh 110577
+6JCl5Lia5omn 110578
+6JCl5Lia5omn54Wn 110579
+5Lq65aSn5Luj6KGo 110580
+5LiA5Liq5paw55qE 110581
+5aiB5rW3 110582
+6YKj5Lq6 110583
+5rao5Lu3 110584
+5raI54Gt 110585
+6Zq+5b+Y 110586
+57aT6amX 110587
+5Y+j6KKL 110588
+57O75pWw 110589
+5paH5Lit 110590
+5aW96L2s 110591
+5paw6Zu25ZSu 110592
+6K6y6L+w5LqG 110593
+5byA55uY 110594
+55WZ57uZ 110595
+5oWi5oWi55qE 110596
+5oKy5Lyk 110597
+5pys5pyf 110598
+5LqG5aSa5bCR 110599
+6L+Z6K6p 110600
+5ZCM562J 110601
+5riF5piO 110602
+5Liq5Z+O5biC 110603
+5rqW5YKZ 110604
+5Yeg5LmO5piv 110605
+5by65Yqb 110606
+5L+v 110607
+5rC056i7 110608
+5Zu65a6a55qE 110609
+5qC45YeG 110610
+6K+05pyN 110611
+6aGv56S6 110612
+6L+Z5aWX 110613
+5pm65oWn5Z+O5biC 110614
+5bGL6aG2 110615
+5LiN5p2l 110616
+55Sf6bKc 110617
+55+l5oOF 110618
+5oqV6Lqr 110619
+5ZGK6K+J5oiR5Lus 110620
+5LiJ5Zub 110621
+5LiH5LiA 110622
+6L6G6L2m 110623
+5Li65LmL 110624
+5Yiw5pe25YCZ 110625
+6L+Z5omN5piv 110626
+5ZCN54mM 110627
+5bqf5rC0 110628
+5Y675bm05ZCM5pyf 110629
+5bm06ZmQ 110630
+6YGL5YuV 110631
+5Y+M55y8 110632
+6KaB57Sn 110633
+5a+5562W 110634
+5Zy66aaG 110635
+55m+56eR 110636
+6LaK6YeO 110637
+5a+M5ZCr 110638
+5aSn5aSa5pWw5Lq6 110639
+5pyA5bCR 110640
+5Y+s5ZSk 110641
+5YW46IyD 110642
+5Yac5py6 110643
+5q2j5paH 110644
+5bqU55So5LqO 110645
+5rex6ICV 110646
+5L+t 110647
+5LuA5LmI5Lic6KW/ 110648
+5aWX6aSQ 110649
+5b2T6YCJ 110650
+5bem5omL 110651
+6LCD55CG 110652
+5pma6aSQ 110653
+6Zq+5YWz 110654
+5Yet6K+B 110655
+54ix5Lq6 110656
+5oyH6LSj 110657
+6LSj57yW 110658
+55qE5LiA5qy+ 110659
+6ZOy 110660
+5Y2B5Liq 110661
+6IC7 110662
+5pyN5Yqh5ZWG 110663
+5Zyw54ux 110664
+6L+e5b+Z 110665
+5Zuw5oOR 110666
+55qT 110667
+5LiN5ZCD 110668
+546w5Zyo5bey57uP 110669
+55uY54K5 110670
+5LiN5YGc5Zyw 110671
+566h55CG5qih5byP 110672
+6L+Z5q615pe26Ze0 110673
+5qSw 110674
+56S85YyF 110675
+5rWB6L2s 110676
+5omr56CB 110677
+6ZuG5Lit5Zyo 110678
+5rGC5Yqp 110679
+5Y2K5Liq 110680
+5b+r6YCf5aKe6ZW/ 110681
+5b6A5LiL 110682
+6K+E5YiG 110683
+5bCx5oOz 110684
+5ZWG5Yqh6YOo 110685
+5pyJ6Zeu6aKY 110686
+6I635Yip 110687
+5q+b55eF 110688
+5oSf5bqU 110689
+6Imv5oCn 110690
+5YiG5q2n 110691
+5YaJ 110692
+5oiR5Lus546w5Zyo 110693
+6KaB5Yqg5by6 110694
+5ben5aaZ 110695
+6J665peL 110696
+5YiH5o2i 110697
+54uE 110698
+6aG655WF 110699
+5bCk5YW25piv5Zyo 110700
+6Iqd6bq7 110701
+6Zq+6L+H 110702
+5peX5bic 110703
+5aSN5Y2w 110704
+5aSN5Y2w5Lu2 110705
+5b+F6ZyA 110706
+5a+55aSW5byA5pS+ 110707
+6Zq+5Y+X 110708
+5Y6f5p2l5piv 110709
+566X5LqG 110710
+6auY5bGx 110711
+56a76IGM 110712
+57WE57k= 110713
+57WE57mU 110714
+5bGB6IKh 110715
+55m+5a62 110716
+6YGH5LiK 110717
+5piU5pel 110718
+5LiN5a65 110719
+55uR566h6YOo6Zeo 110720
+5Li75oSP 110721
+5rWB5Z+f 110722
+6LeM5bmF 110723
+6Iez5LiK 110724
+5Yir6K+0 110725
+5piv5q+U6L6D 110726
+5a6P6KeC57uP5rWO 110727
+5biC5Zy65Li75L2T 110728
+5rGh5p+T54mp 110729
+5pWR5rK7 110730
+5Liw5pS2 110731
+5a2Y5pS+ 110732
+5YeE 110733
+6YeR5bGx 110734
+5o2i5LqG 110735
+5LiT5Lq6 110736
+6Zec5pa8 110737
+5pei6KaB 110738
+5Zu96Laz 110739
+6ZqL 110740
+5Y+N5Ye7 110741
+6LW36Lqr 110742
+5YWI5piv 110743
+5biM5pyb6IO95aSf 110744
+5Yi26K6i 110745
+5bqX6Z2i 110746
+5ZaA 110747
+5pWZ5L2g 110748
+6ZmN5rip 110749
+5Yqb5rGC 110750
+5LiJ55m+ 110751
+54mp5Lu3 110752
+5Lii5aSx 110753
+5aKZ5LiK 110754
+6YOo5Lu9 110755
+5qC35p2/ 110756
+5LmL5oSP 110757
+572R5bCP57yW 110758
+5LiW5LiK 110759
+6LCD6K+V 110760
+5rGh5p+T6Ziy5rK7 110761
+5b2x6Zmi 110762
+5a6M5YWo5Y+v5Lul 110763
+6YCa5YWz 110764
+5LmJ5Yqh5pWZ6IKy 110765
+5rKh5pyJ5Yqe5rOV 110766
+6IC/ 110767
+5aaz 110768
+5peg5oOF 110769
+5b6X55uK 110770
+5b6X55uK5LqO 110771
+5pyf55u8 110772
+5aix5LmQ5Zy6 110773
+55Sy5pa5 110774
+5LiA5rG9 110775
+55ew 110776
+55aR5Ly8 110777
+5paw5rWq5b6u5Y2a 110778
+5by66KGM 110779
+5b2T5LuW 110780
+6IO6 110781
+55So5oi35o+Q5L6b 110782
+5Yy65aeU 110783
+5oS/5pmv 110784
+5oqY5omj 110785
+5aSx6Liq 110786
+6L+r5YiH 110787
+5a2X5q+N 110788
+5ZKv 110789
+6KqN6K2Y 110790
+5LuA5LmI5oSP5oCd 110791
+55uS5a2Q 110792
+5b2V6Z+z 110793
+5bu66K6+5bel56iL 110794
+5Lia5L2Z 110795
+5a6e6Le15rS75Yqo 110796
+55yf56m6 110797
+54KW 110798
+5Zyo6Lev5LiK 110799
+5Li76KaB5YyF5ous 110800
+6K+l5oCO5LmI 110801
+5oC75pyJ 110802
+5oCn5oSf 110803
+5rCR6Iiq 110804
+5byA5bqX 110805
+5qy66aqX 110806
+56qB5Ye7 110807
+57y65aSx 110808
+5omn5Lia 110809
+5Zyw6YGT 110810
+5bm25peg 110811
+5rCR5Yqe 110812
+57uE57uH55Sf5rS7 110813
+5oiR5aaI 110814
+6KiY6ICF 110815
+566h5Yi2 110816
+5om+5Liq 110817
+6Je7 110818
+54KO55eH 110819
+5LqS5Yqp 110820
+5rWP6KeI5Zmo 110821
+546p5a625p2l6K+0 110822
+6ZmN5L2O5LqG 110823
+6KOU 110824
+5oyj6ZKx 110825
+5ZWG5py6 110826
+5pS56KOF 110827
+5rWB5rWq 110828
+5pS/5rOV 110829
+6ICB5aS0 110830
+55Sf5Lqn5ZKM 110831
+56mX 110832
+5Lqy54ix 110833
+5Lqy54ix55qE 110834
+5bGl6IGM 110835
+5Z+O6YeM 110836
+57uG5YiG 110837
+5Yqz5Yqo5ZCI5ZCM 110838
+5Zyo5pel5pys 110839
+5aiB5bCU 110840
+5Y2r6KeG 110841
+6YCj57WQ 110842
+552A6YeN 110843
+5oqY56Oo 110844
+5Zu+5Li6 110845
+55y3 110846
+5bel5bqP 110847
+5pOB 110848
+5pOB5pyJ 110849
+572R56uZ5Zyw5Zu+ 110850
+55qE5LiA5aSn 110851
+57uE57uH5a6e5pa9 110852
+5oqb5byD 110853
+5ZKM5pSv5oyB 110854
+5rOV5YiZ 110855
+5rWq5r2u 110856
+546w5pyJ55qE 110857
+5Yeg546H 110858
+5Li65a6i5oi3 110859
+5Y2B5LiH 110860
+6LmE 110861
+56qB5Ye66Zeu6aKY 110862
+5Y+D5Yqg 110863
+6YO95Lya5pyJ 110864
+55uk 110865
+6LCB6YO9 110866
+5omL5Yqo 110867
+55u06L6+ 110868
+54K55aSa 110869
+6Zi25bGC 110870
+5LiN5L2z 110871
+6YKj5q61 110872
+5ruo5rW3 110873
+5piv5Zu95YaF 110874
+5oiR5biM5pyb 110875
+5ZCb5a2Q 110876
+6KeC6Z+z 110877
+5YGa6aWt 110878
+5rG96LuK 110879
+5YWz56iO 110880
+55y85YmN55qE 110881
+5rC06Z2i 110882
+6ICz5py6 110883
+6L+96Liq 110884
+5o6o6YCB 110885
+6ZKx5YyF 110886
+5oG25b+D 110887
+5rW35Z+f 110888
+5beN 110889
+5byA5p2l 110890
+6KGo5oCB 110891
+5Luq6KGo 110892
+5bmz5Y6f 110893
+5Y2B5aSa5bm0 110894
+5Lmf5peg5rOV 110895
+5YW86aG+ 110896
+6KGj5p+c 110897
+5qC95Z+5 110898
+5oi/5rqQ 110899
+6K6+56uL5LqG 110900
+5LiH5ZCN 110901
+5pWw6aKd 110902
+6KaB5Z2a5oyB 110903
+5ZCJ5p6X55yB 110904
+6K+36IGU57O7 110905
+57uP5Y6G6L+H 110906
+55qE5pys6LSo 110907
+5YWl6Zeo 110908
+5pys5qGI 110909
+546H6L6+5Yiw 110910
+5Y+w6Zi2 110911
+6ZKe 110912
+5oiR6IO9 110913
+6I6y6Iqx 110914
+6ZKg 110915
+5LiA5LqL 110916
+5Y6f5pyJ55qE 110917
+5q+P5YCL 110918
+5q+U5Lqa6L+q 110919
+5qOL54mM5ri45oiP 110920
+5LiN5Lya5pyJ 110921
+5b2S5p2l 110922
+5LqU55m+ 110923
+6L+H6auY 110924
+6Zu36L6+ 110925
+5LiA6LW35Y67 110926
+5pWZ5a+8 110927
+5bCx6K+K 110928
+5bCx5b6I 110929
+5LiN5ZCM5LqO 110930
+5L+6 110931
+5biW5a2Q 110932
+5pS/5Y2P5aeU5ZGY 110933
+55ar5oOF5b2x5ZON 110934
+5YiG6KOC 110935
+5Li65LuA5LmI5Lya 110936
+5LqU5pif 110937
+5bCR5YS/ 110938
+5oqi6Zmp 110939
+5qKm6KeB 110940
+6K6w6ICF6YeH6K6/ 110941
+5bGx6Lev 110942
+5oiR5Liq5Lq6 110943
+5rKZ5rup 110944
+6Lmt 110945
+5pS56K6K 110946
+5paw5Z6L5Yag 110947
+5paw5Z6L5Yag54q2 110948
+5Yy75oqk 110949
+5Yy75oqk5Lq65ZGY 110950
+5rW35bCU 110951
+5YWz5LqO5oiR5Lus 110952
+6Zmk5aSW 110953
+5bqa 110954
+5a6j5ZGK 110955
+5LiJ5Y2D 110956
+5qao 110957
+56eR5oqA5aSn5a2m 110958
+5LiD5YWr 110959
+6aG65bqU 110960
+54i454i45aaI5aaI 110961
+6YCJ5Y+W 110962
+5Ymn54OI 110963
+5Lmh5p2R5peF5ri4 110964
+56ev5p6B5o6i57Si 110965
+6KGo546w5Li6 110966
+5b6I5riF5qWa 110967
+5aSn5Yab 110968
+5p2l55S1 110969
+5aWX5oi/ 110970
+546w6KGM 110971
+5Lqr5Y+X5Yiw 110972
+55yL54K5 110973
+5Zu65a6a6LWE5Lqn 110974
+5Lul5Lq65Li6 110975
+5Lul5Lq65Li65pys 110976
+5LiN5a6M 110977
+6ZmN6Zuo 110978
+5YGa55qE5LqL5oOF 110979
+5bm25LqO 110980
+6aG95by6 110981
+6IC4 110982
+5Zi05be0 110983
+55u45YWz5L+h5oGv 110984
+5oiR5rKh 110985
+5oiY55Wl5oCn 110986
+5oCd5b+1 110987
+5YiY5aSH 110988
+5Yqp5pS7 110989
+6aOO6LKM 110990
+6Z2i5a+56Z2i 110991
+56ev5p6B5byA5bGV 110992
+55aX5pWI 110993
+55yL5Lmm 110994
+57y65Y+j 110995
+5Zu95rCR57uP5rWO 110996
+5L2/55So5p2D 110997
+6YGl6L+c 110998
+5aGr6KGl 110999
+56ys5LiJ5Lq6 111000
+5Y2K5aSc 111001
+5q2m5rGJ5biC 111002
+5oiR5Y+R546w 111003
+5LyY5oOg5pS/562W 111004
+6aOO5Y+j 111005
+5bCx5LiN6IO9 111006
+5Li65Li76KaB 111007
+5rWB5Ye6 111008
+5bSH5ouc 111009
+5bm25LiN6IO9 111010
+6auY5LiJ 111011
+5LiW55WM5LiK5pyA 111012
+5oOz5b+F 111013
+5YW25omA 111014
+5YCZ6YCJ 111015
+5YCZ6YCJ5Lq6 111016
+5LiN54ix 111017
+5Ymv5L2c55So 111018
+5Lq65rCR5pel5oql 111019
+5oiR5LiN5piv 111020
+5a6e54mp 111021
+55S15Y6C 111022
+5Lmf566X5piv 111023
+5pyJ6Zec 111024
+5pyJ6IO95Yqb 111025
+5oyC5Zyo 111026
+55y85LiL 111027
+57qm57+w 111028
+5bCP5a2m55Sf 111029
+6LW35Yiw5LqG 111030
+5bel5aSr 111031
+5ZCM5b+D 111032
+5Z2m6KiA 111033
+56CM 111034
+5Y+R5oyl5LqG 111035
+6IGM5Lia6YGT5b63 111036
+6L+Z5Lqb5bm0 111037
+5b+15aS0 111038
+6ICB6byg 111039
+5YWo6LWE 111040
+5YWo6LWE5a2Q 111041
+5LiA5ZGz 111042
+5aSa5LiH5YWD 111043
+5qC85pyD 111044
+6ZW/6YCU 111045
+5bim6LWw 111046
+6Iux5a+4 111047
+5paH5L2T 111048
+5a+55LuW5Lus 111049
+5ZOt5LqG 111050
+5aGr5oql 111051
+54mI5p2D5aOw5piO 111052
+55S157q/ 111053
+6LSt54mp5Lit5b+D 111054
+6aWx5ruh 111055
+5L2O5aS0 111056
+5by66L+r 111057
+5L+d5rSB 111058
+5qyn5Yag 111059
+55u46L+e 111060
+6K6k6LSt 111061
+54Gr5pif 111062
+6auY5bCU 111063
+6auY5bCU5aSr 111064
+6JGr6Iqm 111065
+5qCH5rOo 111066
+55qE55CG5oOz 111067
+5qC46YW4 111068
+5qC46YW45qOA5rWL 111069
+5YqJ 111070
+5LiA6Iis5piv 111071
+5oCd57Si 111072
+6L2o6L+5 111073
+54Ot5bim 111074
+6Zmj 111075
+5YeG56Gu5oCn 111076
+5oi0552A 111077
+5Zyo55Sf5rS75Lit 111078
+5omA6IO9 111079
+5pyv5ZCO 111080
+5bim5L2g 111081
+56Wg 111082
+5q6L6YW3 111083
+5Lmf5Y+q5piv 111084
+55Sz6LSt 111085
+5Li+5Yqe5LqG 111086
+5pyJ5oSP5LmJ 111087
+5pe655ub 111088
+5Zyo57ay 111089
+5Zyo57ay6Lev5LiK 111090
+5b6I5aSn56iL5bqm 111091
+566h6L6W 111092
+55ar5oOF5pyf6Ze0 111093
+6Kem5pG4 111094
+6Zi25q615oCn 111095
+5Lya6KeJ5b6X 111096
+55qE55S76Z2i 111097
+5o6l5Y+X5LqG 111098
+6KGo6L6+5LqG 111099
+6YKT5bCP 111100
+6YKT5bCP5bmz 111101
+5YWa6aOO 111102
+5YWa6aOO5buJ5pS/ 111103
+5ZWG5a2m6Zmi 111104
+5YWR5o2i 111105
+6aOf5ZOB6I2v5ZOB 111106
+6Z2e5bi45aW955qE 111107
+55yv 111108
+57qz57Gz 111109
+5Yqo5pGH 111110
+5Zue6YG/ 111111
+55yL6JGX 111112
+5qy+6aG5 111113
+5YWr5bm0 111114
+5YGa5Liq 111115
+5paH5qGj 111116
+6YeR6J6N56eR5oqA 111117
+5YW25Lit5pyJ 111118
+5LqG5LiA57O75YiX 111119
+5peX6Iiw5bqX 111120
+56ew6LWe 111121
+6Zui6ZaL 111122
+5Yi25Ya3 111123
+5a626Zeo5Y+j 111124
+5Y2B5aSa 111125
+5Ly05L6j 111126
+55yL55eF 111127
+5ouJ552A 111128
+5omS 111129
+55ay5oOr 111130
+5bCR5pWw5rCR5peP 111131
+5Zu+5b2i 111132
+6L2n 111133
+5aKe6YeP 111134
+6aWy5YW7 111135
+54Gr5bGx 111136
+5q+P5Liq5pyI 111137
+5L2c5Li65LiA5ZCN 111138
+6L205om/ 111139
+5paH5Lmm 111140
+57yV 111141
+5YW35L2T5oOF5Ya1 111142
+55eb54K5 111143
+55u06ZSA 111144
+5aGK 111145
+5Lmf5pyD 111146
+54Ot5r2u 111147
+5bmz5rCR 111148
+5ryU5ZSx5Lya 111149
+5pWZ56CU 111150
+6YCD6YG/ 111151
+5LiA6LSv 111152
+5bCx6LaK 111153
+5a6e5a6e5Zyo 111154
+5a6e5a6e5Zyo5Zyo 111155
+5Lmg6L+R5bmz5oC7 111156
+5rq6 111157
+5b+D5bqV 111158
+6ZW/5b6B 111159
+5aq95aq9 111160
+56ys5LiJ5qyh 111161
+5Ye65ryU 111162
+54uA5rOB 111163
+5bCU5pav 111164
+5Luj55CG5ZWG 111165
+54aP 111166
+55qE5a+56LGh 111167
+55S16YeP 111168
+6KGM5YiX 111169
+5Zu95Lq6 111170
+6LeR5LqG 111171
+5Y2U5Yqp 111172
+6JCl6L+Q 111173
+5biI5YWE 111174
+5qau 111175
+5oOz5YOP 111176
+5oCn5by6 111177
+56eR5a2m56CU56m2 111178
+5bu25a6J 111179
+5Lil5qC86JC95a6e 111180
+6aKG5Lya 111181
+55u45beu 111182
+6Lev5Lq6 111183
+55Sr 111184
+5pyJ5Lu35YC8 111185
+5pyJ5Lu35YC855qE 111186
+576O5Zui 111187
+5rCR5Li755Sf5rS7 111188
+5oiR5omN 111189
+576O5Zu95Lq6 111190
+5rCU5ZGz 111191
+5Y+N5bCE 111192
+55qE5Yaz5b+D 111193
+5aSn6LGG 111194
+5Lqk5Luj 111195
+6L+b5Ye6 111196
+5Y+N5oqX 111197
+5oyH55qE5piv 111198
+5Lu35L2N 111199
+6L+b6am7 111200
+5LiK55m+ 111201
+5L2N5YiX 111202
+5Lit5Zu95LyB5Lia 111203
+55qE5aW95aSE 111204
+5Li757yW 111205
+5rG95rK5 111206
+5L2G5oiR5Lus 111207
+5oCO5LmI55yL 111208
+6buE5bGx 111209
+5aSa5aqS5L2T 111210
+5ZCO5Y2r 111211
+6I635b6X5pu05aSa 111212
+5Yqh5b+F 111213
+5Li65aWR5py6 111214
+6aaW6aWw 111215
+5LiH5Y2a 111216
+6LaK5p2l6LaK5aSn 111217
+5LiT6aG56KGM5Yqo 111218
+5aWL6L+b 111219
+5LuN54S25piv 111220
+6LSo5oSf 111221
+5aaC5p6c5LiN5piv 111222
+56uZ6LW35p2l 111223
+5Lm+6ZqG 111224
+5Y+v5oCV55qE 111225
+5a+M6LS1 111226
+5riF566X 111227
+5ZCR5LiL 111228
+5YCa 111229
+55qE562U5qGI 111230
+6Ii55LiK 111231
+55qE55yf5a6e5oCn 111232
+562J5Yqf6IO9 111233
+5Zac5Ymn 111234
+5aiB5Yqb 111235
+5paw6aKW 111236
+5qC455S1 111237
+5oql6ZSA 111238
+5pWF5Lmh 111239
+5Ly06ZqP 111240
+6Z6t 111241
+5aaK5aig 111242
+5YiG5YyW 111243
+5pyJ5b6I5aSn 111244
+5oCO5LmI6K+0 111245
+5pmC5Luj 111246
+5Lqn5Ye6 111247
+5LuL57uN6K+0 111248
+5aSE55CG5Zmo 111249
+6Iao6IOA 111250
+5Ymv5biC6ZW/ 111251
+55qE5aa75a2Q 111252
+5qC35ZOB 111253
+5ZCM5q+U5LiL6ZmN 111254
+5YWD5bem5Y+z 111255
+55So6Ieq5bex55qE 111256
+6auY6ZuE 111257
+5pil5pma 111258
+5Lmf5pyJ5b6I5aSa 111259
+55y855CD 111260
+5pWj5q2l 111261
+5LuW5Lus6YO9 111262
+56ys5LiA5a62 111263
+5Yqe5aW9 111264
+5a6J6Ziy 111265
+5LiA5LiH 111266
+5Zyo6YeM6Z2i 111267
+6Z+z6aKR 111268
+5Y+j5Y+3 111269
+5LiA6Laf 111270
+56aP54m5 111271
+6bOe 111272
+5oOK6Imz 111273
+5paw5aiY 111274
+57u/6Imy5Y+R5bGV 111275
+5Lit5byP 111276
+5Lmf5Y+q5pyJ 111277
+546w6Lqr 111278
+5Y+v5L6b 111279
+5q+P5LiA5Liq5Lq6 111280
+56ys5LiJ6ICF 111281
+5Zyw5b2i 111282
+6ZKi57uT5p6E 111283
+55uR552j5qOA5p+l 111284
+5Y+r5oiR 111285
+6Ie05pWs 111286
+5rSX5omL 111287
+5LiL6LCD 111288
+5bq354aZ 111289
+5oiQ5Lqk6YeP 111290
+5Lmf5oiQ5Li6 111291
+5YWJ5ruR 111292
+5a6M5pW05oCn 111293
+54G8 111294
+57ay6aCB 111295
+6ZW/5a+/ 111296
+6YGp55So 111297
+55qE5LiA6aG5 111298
+556p55uu 111299
+5oqK6Ieq5bex55qE 111300
+6ZO26KGM5Y2h 111301
+5bCx5b+F6aG7 111302
+576O55m9 111303
+6Z6N5bGx 111304
+5pys6aKG 111305
+5LiA56KX 111306
+5omT5rOV 111307
+5oKo5aW9 111308
+5a+55a2p5a2Q 111309
+5oql6YGT56ew 111310
+5Lyg5Ye6 111311
+5aSn6Iej 111312
+56yL 111313
+55uP 111314
+6b6a 111315
+55u057q/ 111316
+5pm65bqT 111317
+56ef6L2m 111318
+6aOO5ZGz 111319
+55yL5LiA5LiL 111320
+5o6o6ZSA 111321
+6YOo6YOo6ZW/ 111322
+6LSo6YeP5ZKM 111323
+5YiK55m7 111324
+5bel5Lia5YyW 111325
+546H5Li6 111326
+6Zu25Lu2 111327
+56Gs5YyW 111328
+5LiK5Y2D 111329
+57uP6aqM5YC8 111330
+5bmz6KGM 111331
+5aOw6YGT 111332
+5pyN5Yqh6LSo6YeP 111333
+55Sf55Si 111334
+5pyA5a655piT 111335
+5LiA5p6a 111336
+5bm05oql 111337
+5YWs572R 111338
+5YWs572R5a6J 111339
+5YWs572R5a6J5aSH 111340
+55qE6IO96YeP 111341
+5a6e6ZmF6KGM5Yqo 111342
+6KaB5LiN6KaB 111343
+5pel5pys5Lq6 111344
+6IC256ij 111345
+57yW5Ymn 111346
+5rap 111347
+5Y2w5bC8 111348
+5LiK5LiL5ri4 111349
+5Yeg5Y+l 111350
+5Lit6ZOB 111351
+57Ch5Zau 111352
+6Ieq5bim 111353
+55Sf5LqO 111354
+5LiA5Y+j5rCU 111355
+5Yuk5aWL 111356
+6ZmN5Lu3 111357
+5bGV546w5LqG 111358
+5biD5ouJ 111359
+5Lya6YCJ5oup 111360
+55qE57uP5YW4 111361
+5aW95pyL5Y+L 111362
+6L2m6YGT 111363
+5pW05YCL 111364
+5ZyT 111365
+6ZW/5pyf5Lul5p2l 111366
+5oqV5b2x 111367
+55qH5Yag 111368
+6L+H5aSn 111369
+5ZGK6K+J5LuW 111370
+5LyB5Lia5o+Q5L6b 111371
+5oq96LGh 111372
+6YCC5bqm 111373
+55qE5aWz5a2p 111374
+6LW35LyP 111375
+55qE5Yqf5pWI 111376
+5LiT6aG55pW05rK7 111377
+5Y+v6YCa6L+H 111378
+5LiN5ZCM56iL5bqm 111379
+5byC6K6u 111380
+5YeA6LWE5Lqn 111381
+5ZGX 111382
+5LuA5LmI5ZGi 111383
+5beh6YC7 111384
+6LiP5LiK 111385
+5L2G5a6D 111386
+57K+5bqm 111387
+566h5bGA 111388
+56ys5LiA5ZCN 111389
+5YaF5a2Y 111390
+5pGG5Zyo 111391
+5Ymp5LiL 111392
+5Li75L2T6LSj5Lu7 111393
+54K55Y2K 111394
+5Lul6Iez5LqO 111395
+5YW76ICB5L+d6Zmp 111396
+5oSf5Y+X5Yiw5LqG 111397
+55+l5ZCN55qE 111398
+5a+M6LGq 111399
+5aal5ZaE 111400
+5a2Z5a2Q 111401
+6ZOC 111402
+6K+06Ieq5bex 111403
+6K6p5oKo 111404
+5pWw5o6n 111405
+55qE55y85YWJ 111406
+5rOo6ZSA 111407
+55qE54G16a2C 111408
+6L+Y5LiN6ZSZ 111409
+6Zeu5LuW 111410
+6Ieq5Li756CU5Y+R 111411
+6JOL 111412
+57Sr6Imy 111413
+5Zu95a625a6J5YWo 111414
+6L695a6B55yB 111415
+5Lmf5q+U6L6D 111416
+576O6IKh 111417
+5LiN56Gu5a6a5oCn 111418
+5b+D5aS0 111419
+5oiz 111420
+57qn5Yir55qE 111421
+6K666L+w 111422
+55qE5Zue562U 111423
+5L+d6K+B6YeR 111424
+562J6KGM5Lia 111425
+5bm456aP5oSf 111426
+5q2n6KeG 111427
+5py656Wo 111428
+5rS+5Lq6 111429
+6Ie05ZG9 111430
+5Zi06KeS 111431
+5paw6Ze75Lit5b+D 111432
+5pS+5byD5LqG 111433
+5a6c5bGF 111434
+5YaZ5LiL 111435
+6Zeu562U 111436
+6L+Z6YeM5piv 111437
+5aSa5Zyw 111438
+5Yy65Z+f5YaF 111439
+5Ym15paw 111440
+55yL5LuW 111441
+5omn5rOV5Lq65ZGY 111442
+5Yqo5py6 111443
+6Z+z5ZON 111444
+55qE5ZG96L+Q 111445
+6aG26YOo 111446
+5ZOf 111447
+6YO95pyD 111448
+5omT6YCg5oiQ 111449
+5oSP5Zu+ 111450
+55qW 111451
+5YCS5YWl 111452
+5be06JCo 111453
+5Yqp5a2m 111454
+5aSN5Y+k 111455
+5ZCv55So 111456
+5Zu96ZmF5biC5Zy6 111457
+5YKo6IO9 111458
+6buR6b6Z5rGf55yB 111459
+5LmY6L2m 111460
+6L+Q5Yqo5Lya 111461
+5L+d5Yip 111462
+55+z5p2Q 111463
+57Wu 111464
+54KS5L2c 111465
+55qE5L+h5Lu7 111466
+5bCx5oiQ5LqG 111467
+5Y+v6KeC 111468
+55qH5LiK 111469
+6L+Z5Yeg5aSp 111470
+5LiA6ZSu 111471
+5Ya35Ya7 111472
+5L+d5Y2r 111473
+5qC45qGD 111474
+5ZCI5L2c5YWz57O7 111475
+6YCB5Ye6 111476
+5peX5LiL55qE 111477
+5Zyo5LmO 111478
+5Li65bm/5aSn 111479
+5Y2I6aSQ 111480
+5LiT6K6/ 111481
+5oiW5bCG 111482
+6Z2S5bKb5biC 111483
+5aWU6LeR 111484
+5pel5oql6YGT 111485
+5aWR5ZCI 111486
+5paw5pil 111487
+5LiN5bCP5b+D 111488
+5Lik5LiJ 111489
+5oSP5oCd5piv 111490
+5Ya36JeP 111491
+55qE55eH54q2 111492
+5oCn5ZG9 111493
+6LaF5qCH 111494
+5a+G56K8 111495
+56eR5oqA6IKh5Lu9 111496
+5LqG5LiA5om5 111497
+552j5a+f 111498
+5aqS5LuL 111499
+5bCE5omL 111500
+5L+u5YW7 111501
+54mH5Yi7 111502
+6YCC5ZCI6Ieq5bex 111503
+5Y+q6KaB5piv 111504
+5ZCD6L+H 111505
+6YeR6ZO2 111506
+55u05bGe 111507
+5a2m6Zeu 111508
+5Y6L5Yi2 111509
+56qX5aSW 111510
+5pS25Yiw5LqG 111511
+5YWo5Zu95Lq65aSn 111512
+5L2G5piv5a+55LqO 111513
+5Zyo5pW05Liq 111514
+55qE6IOM5ZCO 111515
+5YeP5bCR5LqG 111516
+5Y+N6IWQ 111517
+5Y+N6IWQ5YCh 111518
+5Y+N6IWQ5YCh5buJ 111519
+5pe3 111520
+5YiG5pyf 111521
+5Zyo5rex5Zyz 111522
+5omT552A 111523
+5omr5LiA 111524
+5omr5LiA5omr 111525
+5pS/5bqc6YOo6Zeo 111526
+5o6l6L+e 111527
+5bGe5LqO6Ieq5bex 111528
+5a2Q5by5 111529
+5ZCM5qC35piv 111530
+5oC75YWx 111531
+6L2m5LyB 111532
+5qKT 111533
+5YWs6aG3 111534
+5Y+R5aOw 111535
+6ZKb 111536
+6LWw5Yq/5Zu+ 111537
+5Li76JCl 111538
+5ZaU 111539
+5pWw5o2u5YiG5p6Q 111540
+5LiN6L+c 111541
+5pyJ5ZCN 111542
+5pyJ5ZCN55qE 111543
+5YG/6L+Y 111544
+5b6I5L2O 111545
+6K6T5Lq6 111546
+6J2J 111547
+6auY6LS1 111548
+5bCR6K64 111549
+5rCf 111550
+5bmi 111551
+5Lqy5oOF 111552
+6L+Z5Lu25LqL5oOF 111553
+55So6aSQ 111554
+55u45YWz5paw6Ze7 111555
+5bCx5bqU6K+l 111556
+57uI54K5 111557
+5piv5aSa5bCR 111558
+55m75Zy6 111559
+6K+V566h 111560
+6K+V566h5am05YS/ 111561
+5YGa5aSn 111562
+5YGa5aSn5YGa5by6 111563
+55qE5L6L5a2Q 111564
+5YWr5Liq 111565
+5piO5pel 111566
+54Kz 111567
+6LWw5Y67 111568
+6YG6 111569
+5aKp 111570
+5L2T5Lya5Yiw 111571
+5ZKP 111572
+5LiL6L6+ 111573
+5aSN5Y+R 111574
+6L+96YCQ 111575
+5omT5ZON 111576
+55qE6Zqx56eB5qyK 111577
+5YW35pyJ5LiA5a6a 111578
+6L+Z5LmI5aSa5bm0 111579
+5qCR5p6X 111580
+5pyA6ZW/ 111581
+5ZCM6IOe 111582
+5YWJ5rO9 111583
+5Z+f5ZCN 111584
+5oyH5ZCR 111585
+5Y+X5a6z6ICF 111586
+5qCR6ISC 111587
+5pyJ5aSa5aSn 111588
+5aSn6Z2i56ev 111589
+5peg57yd 111590
+5pS55q2j 111591
+5pu05aSa55qE5piv 111592
+5pyf5pyr 111593
+5q28 111594
+5LmJ5LmM 111595
+6YKj5L2g 111596
+55qE56ys5LiA5Liq 111597
+6Iy1 111598
+5bCn 111599
+6I2r 111600
+5LiN5LuF5Y+v5Lul 111601
+5raM546w 111602
+5oC76Z2i56ev 111603
+5paw6Ze75Y+R5biD 111604
+5rCR55So 111605
+5bCx6K+7 111606
+5omT6LSl 111607
+5aSW6K+t 111608
+5oiR5Lus5LiA6LW3 111609
+6aKE5a6a 111610
+54O56aWq 111611
+5pyA5Li76KaB 111612
+5pyA5Li76KaB55qE 111613
+54mM54Wn 111614
+5Zug5YW2 111615
+5L2O5LiL 111616
+5Lya5ZCM 111617
+6KeB6Kej 111618
+6Ze06ZqU 111619
+5pWZ56iL 111620
+5bCJ 111621
+5biC5Lit5b+D 111622
+5YWz6ZSu5piv 111623
+5rW35Y2X55yB 111624
+54m55Yir5piv5Zyo 111625
+5Lit5Zu95aSn6ZmG 111626
+5YWF6Laz55qE 111627
+5pei6IO9 111628
+5YKz57Wx 111629
+55Gc5Ly9 111630
+5YWl5Zu0 111631
+5oWi5oWi5Zyw 111632
+5oql6YWs 111633
+5om55aSN 111634
+5bel5Lia5Zut5Yy6 111635
+5LiO5Y+R5bGV 111636
+6IO46YOo 111637
+5Zyo572R57uc 111638
+5Zyo572R57uc5LiK 111639
+5Lqk6LCI 111640
+5pu05pS5 111641
+5Y2g5pyJ546H 111642
+5Lid57u45LmL6Lev 111643
+6KGb 111644
+56CU5Yik 111645
+5Yiq 111646
+5Yiq6Zmk 111647
+6L+Z5Y+q 111648
+55qE5rCU5oGv 111649
+5Yqg5bee 111650
+6ZKn 111651
+55CG5LqL6ZW/ 111652
+5LiW5a62 111653
+5rWB6KGM55qE 111654
+5b6I5pyJ5Y+v6IO9 111655
+5Lus6YO9 111656
+57uP6JCl5qih5byP 111657
+6KGM5Lia5Lit 111658
+6YCa55+l5Lmm 111659
+5ZG96aKY 111660
+5pys57ay56uZ 111661
+5rKZ54m5 111662
+5Y+R5YWJ 111663
+6auY5Lu3 111664
+5bey54S2 111665
+5Y+M5Y2B5LiA 111666
+5LiK6K+J 111667
+57+F6IaA 111668
+6L+Z5LiA5bm0 111669
+5aSn5Lya5LiK 111670
+6YeJ 111671
+5a6M5YWo5piv 111672
+5b6X5aSq 111673
+5LiA6Iis5Lq6 111674
+6L+Y566X 111675
+5oqY5Y+g 111676
+5oqV5py6 111677
+54K554eD 111678
+546w6YeR5rWB 111679
+5YWU5a2Q 111680
+572R5qC8 111681
+5o6l6L+H 111682
+5L6b6LSn 111683
+6Zi05b2x 111684
+5Y6f5YWI 111685
+5o2j 111686
+5bem5L6n 111687
+5YWL5ouJ 111688
+5omT5Y2h 111689
+56eR5q+U 111690
+5rGH6ZuG 111691
+5Zyw55CG5L2N572u 111692
+6K+E5aeU 111693
+57uT5ZCI6LW35p2l 111694
+6L+b5YWl5Yiw 111695
+5Y+v6KGM 111696
+5Y+v6KGM5oCn 111697
+6K6p5a6D 111698
+5Yi25bqm5pS56Z2p 111699
+55SY6IKD55yB 111700
+5ZOX 111701
+5YGP5YGP 111702
+6KGj54mp 111703
+56Wd6LS6 111704
+5rqQ6Ieq 111705
+5bm25LiN5Luj6KGo 111706
+5Zu95bqm 111707
+5aW95Z2P 111708
+5p2W 111709
+5p2t5bee5biC 111710
+5rm/5bqm 111711
+6bK4 111712
+5Y2a5b2p 111713
+5rOw5bGx 111714
+5p2R6JC9 111715
+5paw6IGe 111716
+6IKL 111717
+5Y+k6ICB55qE 111718
+55qE56eY5a+G 111719
+5LiA5Liq6Zeu6aKY 111720
+6YGP5Yi2 111721
+5Y2D5Lq/ 111722
+6L+H56Gs 111723
+5bCE5Ye7 111724
+6Ieq54S25piv 111725
+5Lqn5Yy6 111726
+54K554K55aS0 111727
+5Y+v5Lul5biu5Yqp 111728
+6K+05a6e 111729
+6K+05a6e6K+d 111730
+5oiR5Y+q5piv 111731
+5LmL5L2Z 111732
+5ZCM5pe25Lmf5piv 111733
+5Lit5Zu96Zif 111734
+5bu65oiQ5ZCO 111735
+5LmQ6KeG 111736
+5ZGo5bKB 111737
+6I2v5bqX 111738
+6YeR5Y2O 111739
+5Lil6YeN5b2x5ZON 111740
+6LSo5Zyw 111741
+5peF6YGK 111742
+5YW15Zmo 111743
+5pWZ6IKy5pWZ5a2m 111744
+56a75Y67 111745
+5ZCE5byP5ZCE5qC3 111746
+5LuL57Q= 111747
+5LuL57S5 111748
+5byA5aS0 111749
+5bCG6Ieq5bex55qE 111750
+5ZCs5Yqb 111751
+5L+h5oGv57O757uf 111752
+5LuO5qC55pys 111753
+5LuO5qC55pys5LiK 111754
+5o6M5aOw 111755
+5qyi5Zac 111756
+5bGV5Yy6 111757
+5ZW4 111758
+5aSq5aSa5LqG 111759
+6Zey572u 111760
+6IOh6JCd5Y2c 111761
+5aeU5a6j5Lyg 111762
+5aeU5a6j5Lyg6YOo 111763
+5Y2X6Ziz 111764
+5bee5Yy6 111765
+5LiO5pe2 111766
+5LiO5pe25L+x 111767
+5LiO5pe25L+x6L+b 111768
+5auM55aR5Lq6 111769
+6Imv5b+D 111770
+5aS06aG2 111771
+6LSi5oql 111772
+5L2b5rOV 111773
+5b61 111774
+5Y6f5Lu2 111775
+5Yue 111776
+55S356+u 111777
+5aSW5Zu95Lq6 111778
+6L+d57qq 111779
+5om+5LqG 111780
+5o2V5o2J 111781
+55u46K+G 111782
+5pCc6ZuG 111783
+55qE5Lyf5aSn 111784
+5LiJ57u0 111785
+5bCx6KGM5LqG 111786
+54uQ5pyI 111787
+54uQ5pyI5bGx 111788
+5biM5pyb6YCa6L+H 111789
+6ICM5a+55LqO 111790
+6Z2i5bCN 111791
+5Yab5Zui 111792
+6KGX5Yy6 111793
+5oKs5oyC 111794
+5L6/56eY 111795
+5pyJ5LiA54K5 111796
+5Lya6K6u5LiK 111797
+5LiL5omL 111798
+5buj5ZGK 111799
+5LqU6KGM 111800
+562J5YCZ 111801
+57Sn57Sn5Zu057uV 111802
+5ou/5LqG 111803
+5qGM6Z2i 111804
+56We5oOF 111805
+6ZuE5Y6a 111806
+556z 111807
+5qW85LiL 111808
+5b2q 111809
+5LqL5Y+R 111810
+5YaN6KeB 111811
+6aSY 111812
+6aKE5ZSu 111813
+5Y6755yL55yL 111814
+5oiR5Lus5bqU6K+l 111815
+5LiJ5a62 111816
+5rWK 111817
+5LmQ6Zif 111818
+55yL5LiN6KeB 111819
+6ISR5a2Q 111820
+5oyB5pyJ55qE 111821
+55m96I+c 111822
+6Zeq54OB 111823
+5Zad5rC0 111824
+5o6n5Yi257O757uf 111825
+5LiT5Yy6 111826
+5pyd5bu3 111827
+5oiR5b+D6YeM 111828
+5bGV5Y6F 111829
+6JyY6Jub 111830
+5Ya757uT 111831
+57Kq 111832
+5bqQ 111833
+5ZCR56S+5Lya 111834
+5Yaz562W6YOo572y 111835
+55+t5pyf5YaF 111836
+5paw5Lia5oCB 111837
+5pyU 111838
+5pe25oql 111839
+5L2/5LmL 111840
+5Zug5a2Q 111841
+5Y+C5LiO6ICF 111842
+55qE5bm06L275Lq6 111843
+5omL6KGo 111844
+5bCB6ZSB 111845
+5Li65LuA5LmI5LiN 111846
+5ZC454Of 111847
+5q+S57Sg 111848
+5YiR5rOV 111849
+55+r5q2j 111850
+6Lqr5peB 111851
+5Y6f6LCF 111852
+55uR5oqk 111853
+5q2k5aSE 111854
+6Zqo5pmC 111855
+5p6c5a6e 111856
+5Yy755aX5pyN5Yqh 111857
+5LiN5ZCI55CG 111858
+5pCe5aW9 111859
+55qE6ISa5q2l 111860
+5aSW5aWX 111861
+57aT6YGO 111862
+5pS+57yT 111863
+5YGc55WZ 111864
+5pif55CD 111865
+55qE5LiA6Z2i 111866
+5Yeg5L2V 111867
+6L2u5Zue 111868
+5q+b5be+ 111869
+5L+u55CG 111870
+5LiN55+l5LiN 111871
+5LiN55+l5LiN6KeJ 111872
+5pW05Liq5Lq6 111873
+5q+B54Gt 111874
+5Y+w5bee 111875
+5L2/55So5a+/5ZG9 111876
+6buR55m9 111877
+5pG457Si 111878
+6byg5qCH 111879
+6Z2p5paw 111880
+6bq1 111881
+5LiT6Zeo5Li6 111882
+5b6I5aSa5pyL5Y+L 111883
+5bel5L2c57uE 111884
+5ZCI5b2x 111885
+54K65LuA6bq8 111886
+5p6B5bqm 111887
+55qE6L+b5q2l 111888
+5b2T5LmL 111889
+5b2T5LmL5peg 111890
+5b2T5LmL5peg5oSn 111891
+6LS06L+R 111892
+5bC65bqm 111893
+5Zyo546w5Zy6 111894
+6ZmN5Li0 111895
+5YW76ICB6YeR 111896
+56OV 111897
+5Y+v5Lul5L2/ 111898
+566h55CG5rC05bmz 111899
+5pys5oql6K6w6ICF 111900
+5rOV5Luk 111901
+5Y2h6L2m 111902
+5Lic5rW3 111903
+5aSa6YeN 111904
+5YW26Ze0 111905
+57SZ 111906
+6YeN5aSn6aG555uu 111907
+5rGX5rC0 111908
+57uE5aeU5Lya 111909
+5L+h5oGv5YWs5byA 111910
+5LiN6K665piv 111911
+5LiA5ZCs 111912
+6JK45rG9 111913
+5o+t56eY 111914
+6LaF6YGO 111915
+6Kem5Y+R 111916
+5amm 111917
+5YWz6IGU5Lqk5piT 111918
+5bCx57uZ5aSn5a62 111919
+5aW95LmF 111920
+5YCf6LS3 111921
+5ri45oiP6KeS6Imy 111922
+5byA5ZCv5LqG 111923
+5o6g 111924
+5YWa55qE5Y2B5Lmd 111925
+5LiL6Zuo 111926
+55+t5pe26Ze05YaF 111927
+5a+F 111928
+5a+85YWl 111929
+5bel5L2c57uP6aqM 111930
+5Lmf5Y+q6IO9 111931
+6Zu36ZyG 111932
+6Lef6L+b 111933
+5Y2h6YCa 111934
+6aKH5pyJ 111935
+5py65L2T 111936
+5oiY5aOr6IGM5Lia 111937
+5aWz5Li7 111938
+5L2T5Yi25py65Yi2 111939
+6Laz5Y2P 111940
+6IiS6YCC55qE 111941
+5YCf5Y+j 111942
+5om55Yik 111943
+5pWw5YC8 111944
+6Ku+ 111945
+6Zi/5ouJ5Lyv 111946
+5ZiO 111947
+5oW2 111948
+6L6+5Lq6 111949
+5byA5rC0 111950
+5aSn6Zuo 111951
+5rip5a6k 111952
+5L2O6L+3 111953
+5LuN5pen 111954
+6aqX5a2Q 111955
+5Lqy5bGe 111956
+55CG5pm6 111957
+5pys5Z+66YeR 111958
+5aiF 111959
+5YaZ5a2X5qW8 111960
+5aKZ5aOB 111961
+5a61 111962
+6Jm954S25piv 111963
+6aG6552A 111964
+5YWr5Y2m 111965
+5ZWG55So 111966
+5LiN5aSx 111967
+6L+36Iyr 111968
+6aG65L6/ 111969
+5pqR5pyf 111970
+5qy66LSf 111971
+6aKR6aKR 111972
+6K+l5qCh 111973
+5paZ55CG 111974
+5rex5oOF 111975
+5YmN6ZSL 111976
+5L+d6K2J 111977
+6IGM5Lia55Sf5rav 111978
+5YWs5byA5Y+R 111979
+5YWs5byA5Y+R6KGM 111980
+5YWl5oi3 111981
+6aCT 111982
+5YC+5ZCs 111983
+6a2B 111984
+5oSJ5oKm 111985
+5Zue5ZCI 111986
+5YWo5Yqb5Lul 111987
+5YWo5Yqb5Lul6LW0 111988
+5YO55YC8 111989
+6IO95Yqb5by6 111990
+57uP5byA 111991
+57uP5byA5Yy6 111992
+6L+c5pa5 111993
+55qE6YGT55CG 111994
+55u05Y2H 111995
+55u05Y2H5py6 111996
+5Li65Li76aKY55qE 111997
+57uZ5oKo 111998
+6L+Y5oOz 111999
+5q+U5oiR 112000
+5Yac54mn 112001
+5rW35bqV 112002
+562+6K6i5LqG 112003
+5a+55LqO5oiR5Lus 112004
+5pe26K64 112005
+6ZSu55uY 112006
+5a6e6ZmF5o6n5Yi2 112007
+55qE5qih5qC3 112008
+5Y+N5pig5LqG 112009
+5Luj5Yqe 112010
+5Yy755So 112011
+6ZuG57uT 112012
+5Y+R5bGV5YmN5pmv 112013
+5oyH552A 112014
+5Y2O5YyX 112015
+6L+Z5Yeg5Liq 112016
+5ZCN5rCU 112017
+5YKN5pma 112018
+6Ieq5Y+R 112019
+5rOi5YWw 112020
+5aSn5Yqb5o6o6L+b 112021
+6Ieq56ew 112022
+6I2G5bee 112023
+5pCN5a6z 112024
+5LqG5LiA5Y+l 112025
+5pyA5Yid55qE 112026
+6YeR6J6N5Y2x5py6 112027
+5oCA5b+1 112028
+6KGM5YuV 112029
+5aWz5o6S 112030
+5LiN6Kej 112031
+5Lyg6ZSA 112032
+6L2s6L296K+3 112033
+6aWw5ZOB 112034
+5Y+q5Li6 112035
+5LiO5LyX 112036
+5LiO5LyX5LiN5ZCM 112037
+6IO96ICX 112038
+6I+p5o+Q 112039
+6L+R5Lik5bm0 112040
+6L+U5Lmh 112041
+6ams5LiK5bCx 112042
+5LqM562J5aWW 112043
+5rC0566h 112044
+5rOV5a2m 112045
+54Gt54Gr 112046
+5aSn5aeQ 112047
+5ZGo6L2s 112048
+5pyJ5pyf 112049
+5pyJ5pyf5b6S 112050
+5pyJ5pyf5b6S5YiR 112051
+5bCN5pa5 112052
+56We6Imy 112053
+5rK56ISC 112054
+5LiJ54K5 112055
+5LiN5Yip5LqO 112056
+5LqL5Lia6YOo 112057
+5bCx6Lef 112058
+5byA5pSv 112059
+5bCP5aWz5a2p 112060
+5YWx5ZCM5Yqq5Yqb 112061
+55Sa6Iez6L+Y 112062
+6L+Z5ZCN 112063
+6L+Z56yU 112064
+546v5Y2r 112065
+5pyJ56eN 112066
+6KeG5Yqb 112067
+54af55+l 112068
+5YWs56ev6YeR 112069
+5raI6Ziy5a6J5YWo 112070
+6aKH5Li6 112071
+5aSn6IW/ 112072
+6Z22 112073
+54m55pWI 112074
+5pyN5Yqh5Yy6 112075
+5byA5Ye6 112076
+5rex5bqm6J6N5ZCI 112077
+5peg5b+n 112078
+5p+l6ZiF 112079
+57uI57uT 112080
+5L+d56iO 112081
+6KiO6KuW 112082
+5b2T5YGa 112083
+6Lez6Iie 112084
+5a+n 112085
+5aWz546L 112086
+6K6w6ICF5Zyo 112087
+5YWo5Lqn5Lia6ZO+ 112088
+6LSv6YCa 112089
+5YW05Lia 112090
+6ZmN5Yiw 112091
+5bCB6Z2i 112092
+5YWo6Z2i5o6o6L+b 112093
+5aW26Iy2 112094
+6YCJ5Z2A 112095
+5LqG5LiA5Zy6 112096
+5ZCM5Ly0 112097
+6K6u6K66 112098
+5pCT 112099
+6K+46JGb 112100
+6K+46JGb5Lqu 112101
+5bmy5Zib 112102
+5rWB5oSf 112103
+5LiT5Lia55+l6K+G 112104
+55S156uZ 112105
+5YeP5byx 112106
+5Ye65YWl 112107
+5ZCE55yB 112108
+6Z2e5bi46auY 112109
+5Zyw5q+v 112110
+5Y+R5paH 112111
+54SJ 112112
+54On54Ok 112113
+5aOB57q4 112114
+5oG25YyW 112115
+6Iq4 112116
+6IOW5a2Q 112117
+54eS 112118
+55yB6ZKx 112119
+55m+5by6 112120
+55CG5bel5aSn5a2m 112121
+6ZKi5p2Q 112122
+5Zu95pyJ6LWE5Lqn 112123
+5oiY5py6 112124
+5rOE6Zyy 112125
+5ZCO6Z2i55qE 112126
+5rC06LWE5rqQ 112127
+5qKF6Iqx 112128
+5YaZ552A 112129
+5LmL5aOw 112130
+5peg5Y+v 112131
+5piO5pyd 112132
+56uL5pa557Gz 112133
+57ej 112134
+5pS+6L+H 112135
+56aP55Sw 112136
+5b6X5L2P 112137
+5Y+X5LyX 112138
+5Lit57qn 112139
+55eF5Y+Y 112140
+5LiA556s6Ze0 112141
+5p2D6YeN 112142
+5Lq65oCn5YyW 112143
+5Yy755aX5Y2r55Sf 112144
+5LiN5Yiw5L2N 112145
+5pm66IO95a625bGF 112146
+6aWu55So 112147
+5ryU5Y+Y 112148
+6auY57Sg6LSo 112149
+5LmZ5pa5 112150
+5YGc55WZ5Zyo 112151
+6I635om5 112152
+56m/5qKt 112153
+5a6i5Zy6 112154
+5oy95Zue 112155
+5Lqs5Z+O 112156
+55Sf5ZG95Yqb 112157
+5a+m6Zqb 112158
+54eI 112159
+5YaN546w 112160
+546w5a6e5Lit 112161
+5pyJ5L+h5b+D 112162
+55aP6YCa 112163
+5Zi05ZSH 112164
+6Zu36ZSL 112165
+6I+c5Y2V 112166
+6YWv 112167
+6LaF6auY 112168
+5b6I6auY5YW0 112169
+55Sf5q6W 112170
+6YCg5Lu3 112171
+6K+v5Yy6 112172
+5oaL 112173
+5aW95raI5oGv 112174
+5bSt 112175
+5Lul6Ie0 112176
+5byA546p56yR 112177
+55uR6KeG 112178
+5beh5a+f 112179
+5b635bee 112180
+5pep5pep 112181
+6Zeq55S1 112182
+5oiq5Zu+ 112183
+5Y+v5Lul5qC55o2u 112184
+5omL6Im6 112185
+5o6l6L2o 112186
+56eN5peP 112187
+5oCA6YeM 112188
+5Y675Yy76Zmi 112189
+5LiA5LqM 112190
+5byA6ZiU 112191
+5YeP6YCf 112192
+5L2G5LuO 112193
+6YCZ5LiA 112194
+5YeP5YWN 112195
+5Li76aKY5pWZ6IKy 112196
+5byA5bel5bu66K6+ 112197
+6Lmm 112198
+5pyI6aW8 112199
+5LiL5rKJ 112200
+5bCK5Lil 112201
+6ZmH 112202
+5a6e5pyo 112203
+5bug5ZWG 112204
+5aOw56ew 112205
+6ICD5Zy6 112206
+5biD6bKB 112207
+6Ieq5p2l 112208
+6Ieq5p2l5rC0 112209
+6ZK+ 112210
+5bm05Lul5LiK 112211
+5aSn5Y+U 112212
+5LuW5bey57uP 112213
+5YWo5p2R 112214
+6IGU57O755S16K+d 112215
+5Li65a+85ZCR 112216
+5Yik5aSE 112217
+5a+56Zi1 112218
+55uu5qiZ 112219
+5ZCN6aKd 112220
+5a6i5rCU 112221
+5qiq5ZCR 112222
+562J5YaF5a65 112223
+5Yeg54K5 112224
+6LCI6K66 112225
+5LiN5LmP 112226
+5bGV546w5Ye6 112227
+6L6D6ZW/ 112228
+6YCG6L2s 112229
+5bCP5pmC 112230
+5piv5aSa5LmI 112231
+5pys5pyI 112232
+6L+R6KeG 112233
+5oiQ56uL5Lul5p2l 112234
+5Luj6KGo552A 112235
+5oql5aSN 112236
+5oiP5puy 112237
+6Kit5YKZ 112238
+5YWl6IKh 112239
+5b6B5pyN 112240
+6auY5Ye6 112241
+6Iie5Y+w5LiK 112242
+5b+D5Yqo 112243
+5Lik54K5 112244
+55u455W2 112245
+6Jmb 112246
+5Li76aG1 112247
+5Yeg5a62 112248
+5peg5LiN 112249
+5Y2P5a6a 112250
+5paQ 112251
+5a+T5oSP 112252
+5YWo57q/ 112253
+5o2V6bG8 112254
+5Y+v5Lul5LuO 112255
+5pyJ6L+Z5qC355qE 112256
+5oG26a2U 112257
+5YyF5a2Q 112258
+5oGk 112259
+5byA5aWW57uT5p6c 112260
+5LiN5q27 112261
+6JeN 112262
+5byv5puy 112263
+5rW35bOh 112264
+6ZSA5q+B 112265
+55qE54us54m5 112266
+56S65oSP 112267
+5LiN6IO95YaN 112268
+6IO95oqK 112269
+6Ziy57q/ 112270
+5LiN5bCR5LqO 112271
+5rGA 112272
+55qE6YKj5LiA 112273
+55yf5oOF 112274
+5Z6u 112275
+6KKr5omT 112276
+5Zu95a6J 112277
+576O5aaZ 112278
+6L+Z5Yeg 112279
+5Ye66YGT 112280
+5pyN5Yqh5LqO 112281
+5oiQ5p6c6L2s5YyW 112282
+5omN5Y2O 112283
+5aSp6bmF 112284
+5Yeg5Liq5Lq6 112285
+5YCY6Iul 112286
+6IC96K+v 112287
+5oqX5oiY 112288
+6KGM6Yq3 112289
+5p2l6KKt 112290
+5YCf6Yyi 112291
+6I2J6I6T 112292
+5Lil5qC85omn6KGM 112293
+5Li+6KGM5LqG 112294
+5aSW57GN 112295
+5bey6L6+ 112296
+5p2R5YWa5pSv6YOo 112297
+6KGd 112298
+6ZmN6Iez 112299
+5rW36YeP 112300
+6aSQ6aaG 112301
+5oCl5b+Z 112302
+5rex6L+c 112303
+5b6A6L+U 112304
+56iO5Yqh5bGA 112305
+5bm/5rOb5bqU55So 112306
+6K6u5ZGY 112307
+5peg5pWM 112308
+55y85YWJ 112309
+54Ot6KGA5Lyg5aWH 112310
+5q2Q 112311
+5LqG5Lqb 112312
+6L+d6IOM 112313
+6L+Z5piv5LiA56eN 112314
+5LiN56iz5a6a 112315
+5aSn5a625YiG5Lqr 112316
+6KGo54++ 112317
+5YmN5Y2B 112318
+6Lev6L+H 112319
+5pKp 112320
+5ZCM5oOF 112321
+5Lmg5L+X 112322
+5Y+R6LSi 112323
+5bqU5pyJ55qE 112324
+5p2O5p+Q 112325
+6IKb 112326
+6ams5YWL 112327
+6YCa5ZGK 112328
+5beo5Lq6 112329
+5LiA5Zui 112330
+6YCZ5qyh 112331
+5LiN5LqG6Kej 112332
+5pa96KGM 112333
+6JGh6JCE54mZ 112334
+5Y+Y5b6X5pu05Yqg 112335
+5o+j 112336
+5Yib5paw6IO95Yqb 112337
+55WF6ZSA 112338
+6KGo5oms 112339
+5q+U5Yip 112340
+5q+U5Yip5pe2 112341
+5Yy755aX5L+d6Zmp 112342
+5pON57q1 112343
+5Lyk5Lqh 112344
+5rWO5a6B 112345
+5Y+Y5LqG 112346
+5pys5qyh5rS75Yqo 112347
+5Zyf6LGq 112348
+5oOz5Yqe5rOV 112349
+5piV 112350
+5b2T5pma 112351
+5Ye65bGA 112352
+54Ot6K6u 112353
+6LCI6LCI 112354
+5pmL5Y2H 112355
+5Yq/5b+F 112356
+55m75bGx 112357
+6YKj5YS/ 112358
+5ZCD5Yiw 112359
+5LmL5Z+O 112360
+5b+r5p2l 112361
+5rmb5rGf 112362
+56ys5LiJ5Liq 112363
+5YWo6Z2i5o+Q5Y2H 112364
+5aWW5a2m 112365
+5aWW5a2m6YeR 112366
+5oqV5YWl5L2/55So 112367
+6b2Q6bKB 112368
+5Y+v5Lul5oqK 112369
+5ZKM5LuW55qE 112370
+6LSt5oi/6ICF 112371
+5q2j5byP5ZCv5Yqo 112372
+5Y2O5ram 112373
+5LiN5pat5a6M5ZaE 112374
+6ZKi5p2/ 112375
+57Sv56ev 112376
+5ruh6IS4 112377
+5Zub5pa5 112378
+6LSi54mp 112379
+5LuW5Lus5Lya 112380
+5aSP5pel 112381
+6YKj5Liq5Lq6 112382
+6Z2g552A 112383
+54K55LqG 112384
+54K55LqG54K55aS0 112385
+5qmL 112386
+5Y+I5aW9 112387
+5Y+I5aW95Y+I 112388
+5Y+I5aW95Y+I5b+r 112389
+6Zi16Zi1 112390
+5bCB5bu6 112391
+5pys55Sw 112392
+54mp5Lia5pyN5Yqh 112393
+6Ieq6LS45Yy6 112394
+5ZCP 112395
+5L6/5Yip5bqX 112396
+5Zu95a625qCH5YeG 112397
+6Z2i57KJ 112398
+6Imw6L6b 112399
+5pS75YWz 112400
+5omT5YyF 112401
+6L2m6Zif 112402
+5Lq66YCJ 112403
+5Y+v5LiN5piv 112404
+5LqM5Y2B5bm0 112405
+5ZCN5biI 112406
+5rWm5Lic 112407
+5YWs6K+B 112408
+6L+Q6YCB 112409
+5piv5pyA5aW955qE 112410
+5p+U5ZKM 112411
+546L5p+Q 112412
+55eF5oi/ 112413
+5Ya26YeR 112414
+5LiA5Lu25LqL5oOF 112415
+5Y2k 112416
+5Y+v5o6n 112417
+54mf 112418
+5ouC 112419
+5bey5LqO 112420
+5Lq66YCg 112421
+55Sf54mp5Yy76I2v 112422
+5L2T546w5Ye6 112423
+6IKy5YS/ 112424
+6ICB5a6e 112425
+5ZyW54mH 112426
+6Ku4 112427
+57Sv5LqG 112428
+5oSf5YW06Laj55qE 112429
+5Zu+54mH5p2l5rqQ 112430
+5Lmf5piv5LiA56eN 112431
+5r6O5rmD5paw6Ze7 112432
+5pe26KGo56S6 112433
+5YWJ6L6J 112434
+5oql5bqf 112435
+5bKB5pe2 112436
+6YWu 112437
+5qOA5L+u 112438
+5Y+Y6YCf 112439
+5Y+Y6YCf566x 112440
+5Zyo6IGM 112441
+6Y+h 112442
+5o2C 112443
+552j5Yqe 112444
+5rC45LiN 112445
+5YGa5LiA5Lqb 112446
+5Y6G5pe2 112447
+5bel56iL5py65qKw 112448
+5oGw5b2T 112449
+5bCx5Zyo5LqO 112450
+56ew5ZG8 112451
+6YCa5bi45piv 112452
+5qC35byP 112453
+5ZGo5LiA 112454
+6Iux6ZWR 112455
+5Z2H57q/ 112456
+5Lyg6Ze7 112457
+55So5oi35L2T6aqM 112458
+6LWe5ZCM 112459
+6aqo5oqY 112460
+5Li65Li75L2T 112461
+5rGf5bGx 112462
+5riF5pyd 112463
+5pSA5Y2H 112464
+5LiN55u45L+h 112465
+6Z20 112466
+5q2m5Yqf 112467
+5Yuk5Yqz 112468
+5p2l5om+ 112469
+5bCG5oyB57ut 112470
+5Lir5aS0 112471
+5qiZ5rqW 112472
+6KO0 112473
+5rex5rex55qE 112474
+5a2V6IKy 112475
+6KeE5YiS5bu66K6+ 112476
+5riF54i9 112477
+57K+5YeG5om26LSr 112478
+5omT56C05LqG 112479
+6L+Z5LiA5aSp 112480
+5bel5L2c5oC757uT 112481
+5peF56iL 112482
+5Lic6JCl 112483
+5pS+5bCE 112484
+5pyJ5Yeg5Liq 112485
+6Z2e54mp6LSo 112486
+5ZCD5b6X 112487
+5Zeo 112488
+5Lya5Y+R55Sf 112489
+56+u5p2/ 112490
+5byA5bCB 112491
+6bq75bCG 112492
+6I+P5rO9 112493
+5LiN5ZCI 112494
+57O75YiX5Lqn5ZOB 112495
+6K2s5aaC 112496
+576O6KqJ 112497
+6Ieq5bex5Zac5qyi 112498
+5Lqk5piT5Lit5b+D 112499
+5ZCI5ZSx 112500
+5L2/5oiR 112501
+5YOP57Sg 112502
+5bim6Zif 112503
+5L2G5a+55LqO 112504
+5oqK6L+Z5Liq 112505
+6IKd6ISP 112506
+5Y2V57qv55qE 112507
+5pS75Z2a5oiY 112508
+55ub5Lya 112509
+5ZG15oqk 112510
+5qqA 112511
+6LW25LiK 112512
+5qWK 112513
+5LmF5LqG 112514
+56Gd 112515
+562U6aKY 112516
+5L+d5oyB552A 112517
+6KeB6K+G 112518
+54K55YS/ 112519
+5Y2K5Liq5pyI 112520
+5ruH 112521
+5rW45rOh 112522
+5Lyg6YCB 112523
+5Zyo5biC5Zy65LiK 112524
+5LmL5Lmh 112525
+54m56ZW/ 112526
+6Zue 112527
+6Kqg 112528
+6Lqr5aSE 112529
+5p+g5qqs 112530
+6Lqr56m/ 112531
+55yB5YWs5a6J 112532
+55yB5YWs5a6J5Y6F 112533
+5Y+Z5Yip5Lqa 112534
+5Yeg5YiG6ZKf 112535
+5Lq65YCR 112536
+5Zyw5q61 112537
+6Ieq5a2m 112538
+5Lmf6LaK5p2l6LaK 112539
+6IGM5p2D 112540
+5pan 112541
+6Ie7 112542
+5b2S57qz 112543
+6am+6amt 112544
+6YOo5YiG5Zyw5Yy6 112545
+5rKh5pyJ5oOz5Yiw 112546
+5pKH 112547
+5LmM6bKB 112548
+5LmM6bKB5pyo 112549
+5LmM6bKB5pyo6b2Q 112550
+6IKy5Lq6 112551
+55qE5q2l5LyQ 112552
+5bu25pyf 112553
+5rK55rCU 112554
+5YGa5a6M 112555
+5Zyj5Zyw 112556
+5Liw5Y6a 112557
+5a695bim 112558
+5Y+v6Z2g55qE 112559
+5bqt6Zmi 112560
+5a2c 112561
+5bCP5bq356S+5Lya 112562
+5a6J5YWo566h55CG 112563
+5bm056ys 112564
+5o6S5rGh 112565
+6IOM5YyF 112566
+5a625L2P 112567
+5YW25a6e5bCx5piv 112568
+5Lya6KeB 112569
+5biu5Yqp5LyB5Lia 112570
+572R6LSt 112571
+5piv5LiN5Lya 112572
+6aOv5bqX 112573
+5q275Y67 112574
+5YWN55ar5Yqb 112575
+5pyV 112576
+5Zad5LqG 112577
+6L275b6u 112578
+5Liq5pyI5YaF 112579
+57uE5Zui 112580
+5ZKM5a6M5ZaE 112581
+6bi9 112582
+5o+Q6YCf 112583
+6KW/5a6J5biC 112584
+5Lit5b+D5Li75Lu7 112585
+5pe26Ze05Li6 112586
+5pyf5p2D 112587
+6LaV 112588
+5LiN5LuF6KaB 112589
+5pyN5LuO 112590
+6aGY5oSP 112591
+5LiN5bCP 112592
+5LiN5bCP55qE 112593
+57CH 112594
+56qm 112595
+5YiH5oiQ 112596
+5ZOI5Yip 112597
+5aSp55yf 112598
+5LiA5qyh5qyh 112599
+6YeR5biB 112600
+5oCO5LmI6IO9 112601
+572R6LS3 112602
+5Lya6K6h5biI 112603
+55+t57y6 112604
+5a+55qCH 112605
+5Y+Y5b6X5pu0 112606
+5YmN5Yeg5aSp 112607
+6Ziy5rGb 112608
+5b2p6Jm5 112609
+5ZOB5L2N 112610
+6KGo5qC8 112611
+5Lil5a+G 112612
+5q+b5Yip546H 112613
+55qE5Y2x5a6z 112614
+5b2V5Yi2 112615
+5rC05Yqh 112616
+6IO95aSf6K6p 112617
+5bmz5p2/ 112618
+5Lmz5oi/ 112619
+6LiP5a6e 112620
+6aaW5Yib 112621
+6aaZ6JWJ 112622
+5oql6KGo 112623
+5LiA5oq5 112624
+5Ye655Sf5LqO 112625
+6LK755So 112626
+5Ye66K6p 112627
+5ZCI5rOV5oCn 112628
+5bC85YWL 112629
+5Yaw5Ya3 112630
+6aaZ5rCU 112631
+5Y+356ew 112632
+6LW356CB 112633
+5Z+O5Y6/ 112634
+546p6ICN 112635
+5LiK6ZmQ 112636
+5Lya6K6u57K+56We 112637
+5peB6L6555qE 112638
+5L6/5Lya 112639
+5o+t5pmT 112640
+546p5oSP 112641
+6Zuq5bGx 112642
+5ZCR552A 112643
+5L2T6IKy5Zyo57q/ 112644
+6K+05piO5Lmm 112645
+5YyW6IKl 112646
+5YWa57uE5Lmm6K6w 112647
+5Yqo5Lq6 112648
+5LmL5omA 112649
+5pyI6Iez 112650
+5pyA5b+r55qE 112651
+6IqC5YGH5pel 112652
+5LiT5Zy6 112653
+6ICD5LiK 112654
+56qf 112655
+6bKc6KGA 112656
+6L6D5by655qE 112657
+5oKE54S2 112658
+5aSa5Liq5Zu95a62 112659
+56qX5biY 112660
+5p6B5aSn5Zyw 112661
+5LiN55So5ouF5b+D 112662
+6L+Z5LmI5YGa 112663
+5YO55qC8 112664
+576O5Li95Lmh5p2R 112665
+5bCP5pe25YaF 112666
+57Sn6L+r 112667
+5aSn54Gr 112668
+6IOz6IaK 112669
+5pON5L2c57O757uf 112670
+5q6L55WZ 112671
+5YaZ5Ye6 112672
+56aB5b+M 112673
+5Yqg55uf5bqX 112674
+6L+R55m+ 112675
+5L6/5Y+v 112676
+5pW05pS55o6q5pa9 112677
+6YeH6K6/5pe2 112678
+5ZSQ5Luj 112679
+5rex5YyW5pS56Z2p 112680
+55+i 112681
+6YO95Zac5qyi 112682
+6LaK5p2l6LaK6auY 112683
+6Iqx5py1 112684
+5aS055a8 112685
+5a6J5bq3 112686
+5aKe6ZW/546H 112687
+55y855yL 112688
+5bCx5piv5Li65LqG 112689
+6ICM5a+86Ie0 112690
+5Yqg5b+r5bu66K6+ 112691
+6Iqx5qC3 112692
+5YaF5b+D55qE 112693
+5piG5bGx 112694
+6LOH5rqQ 112695
+5Zue5Yiw5a62 112696
+6I+K6Iqx 112697
+5rC06YeP 112698
+5b6B5L+h 112699
+6KGM5pS/5Yy6 112700
+5LmD5piv 112701
+5oqV6LWE6aG555uu 112702
+5auB57uZ 112703
+56We5Zyj 112704
+56ig 112705
+5pys5p2l5bCx 112706
+6YCQ5LiA 112707
+6IGM5Lia5oqA5pyv 112708
+5LiN6Imv5L+h5oGv 112709
+5omY6L+Q 112710
+5ZCv56S6 112711
+5LmL5YWn5a65 112712
+6Z+2 112713
+5aWi5Y2O 112714
+5o+t56S6 112715
+5oiQ5Li65Lit5Zu9 112716
+5raI6LS55ZOB 112717
+5YWs55So 112718
+5pCe5a6a 112719
+6K+35L2g 112720
+5p+a 112721
+5YaF6KGj 112722
+5L2G5LuW5Lus 112723
+5L+d5rm/ 112724
+6K+l5Y6/ 112725
+6aWx5ZKM 112726
+5o6o5ZCR 112727
+6LWE5paZ5pi+56S6 112728
+5LiN5b2x5ZON 112729
+5Lq65Lq66YO9 112730
+5Y+R5bGV5aOu5aSn 112731
+5YW76ICB5pyN5Yqh 112732
+55Sf5rS75rC05bmz 112733
+5ZCE5Y6/ 112734
+5L2g6ZyA6KaB 112735
+6K+055qE5piv 112736
+5aSW5aqS 112737
+5q2k5Lq6 112738
+5qyh6KaB 112739
+6L+96LW2 112740
+5bqU6K+l5aaC5L2V 112741
+5pel5YeM5pmo 112742
+55Wl5pyJ 112743
+6YO95oOz 112744
+5ri45LmQ 112745
+6L+Z5qy+5ri45oiP 112746
+5bmz5reh 112747
+5piv5LiA5YCL 112748
+5aSH6ICD 112749
+5Yi25q2i 112750
+5LiA5a6a6IO9 112751
+5b6S5byf 112752
+5Lul54K6 112753
+5Y2D5YWD 112754
+5LqU5YWt 112755
+6L+q5aOr 112756
+6L+q5aOr5bC8 112757
+6Ziz5oCn 112758
+5Yas5aWl5Lya 112759
+5bCx5piv5Zug5Li6 112760
+5oyC6ZKp 112761
+5qaC5Ya1 112762
+5Y+q6KaB5pyJ 112763
+5rK555S7 112764
+5Zyw5qCH 112765
+5LiK6LCD 112766
+5Lqn5Lia5Zut5Yy6 112767
+5YWr5Y2B 112768
+5qOx 112769
+5ray5pm2 112770
+5p2R5aeU5Lya 112771
+562+57qm5Luq5byP 112772
+6L+Z5YW25Lit 112773
+5YaZ6YGT 112774
+56S66IyD5Z+65Zyw 112775
+6YeO55Sf5Yqo54mp 112776
+6Zu75a2Q5L+h566x 112777
+5Zu96ZmF6LS45piT 112778
+5Lq65p2D 112779
+5L+d566h 112780
+6Iul5oKo 112781
+5Y6L5oqR 112782
+6bub 112783
+5Zyw55yL552A 112784
+6Zmw 112785
+5LiA5bm05aSa 112786
+5LuO5a65 112787
+5Lit5pat 112788
+5a+f6KeJ 112789
+56e75Lqk 112790
+6ZSv 112791
+5oiW6K645piv 112792
+57ag 112793
+5Lik6aG5 112794
+5pyA5Zac5qyi 112795
+5pyA5Zac5qyi55qE 112796
+5aSc6YeM 112797
+5ZCM5LuB 112798
+5Yib5paw6amx5Yqo 112799
+6LCB6IO9 112800
+6aO+ 112801
+5YWJ5a2m 112802
+5Y6E 112803
+6ISx6aKW 112804
+6ISx6aKW6ICM5Ye6 112805
+6L+m 112806
+5piv5LiN5Y+v6IO9 112807
+56ql 112808
+6IO95ruh6Laz 112809
+5a695bqm 112810
+5Lym55CG 112811
+5Y+v5Lul6I635b6X 112812
+6L2s5Lya 112813
+5bGx5p2R 112814
+6ZO66K6+ 112815
+5Ye65Ye7 112816
+5paH5YyW6Im65pyv 112817
+5Lya6K6u5a6k 112818
+5q2M5aOw 112819
+5ruU 112820
+6JCO57yp 112821
+5pyN5Yqh5ZGY 112822
+5Y+R6KGo5LqG 112823
+5pa85piv 112824
+5piO56Gu6KeE5a6a 112825
+57u05aWH 112826
+5rC05Lqn 112827
+5oqV5L+d 112828
+6Zi06YGT 112829
+6LW25b+r 112830
+5aS65b6X 112831
+5LiL5Y2V 112832
+54mp5rWB5YWs5Y+4 112833
+546v57uV 112834
+5b2I 112835
+5L2c6aOO5bu66K6+ 112836
+5peF5ri45pmv5Yy6 112837
+5pyJ5pu05aSa55qE 112838
+5Liw5a+M5aSa5b2p 112839
+55CG6LSi5Lqn5ZOB 112840
+5Ye65beu 112841
+5LuO5Lil5rK7 112842
+5LuO5Lil5rK75YWa 112843
+55u45bmy 112844
+5ruL5ram 112845
+5Li75Yqe5pa5 112846
+5Ymn5Zy6 112847
+5rua55CD 112848
+5qmE5qaE 112849
+6Ieq5Li75Yib5paw 112850
+6YCa5b6A 112851
+5qC85bCU 112852
+55qE5LyY54K5 112853
+6IOM5LiK 112854
+56qc 112855
+54iG5Ye6 112856
+5bmz5pW0 112857
+5LiA6ISa 112858
+5YWo5L2T5ZGY5bel 112859
+6ZmQ5a6a 112860
+5Z+O6ZWH5YyW 112861
+5rez 112862
+6YCu5o2V 112863
+6KGM5Yqo6K6h5YiS 112864
+5omT5b6X 112865
+5Y6a6YeN 112866
+57qq5b2V54mH 112867
+5Z2a5L+h 112868
+5aSu5LyB 112869
+5YaN5Lmf5LiN 112870
+5aSp5rav 112871
+5Y+C6ICD6LWE5paZ 112872
+5pyJ5q+S 112873
+5ZC457qz 112874
+6LaK5Y+R 112875
+6YeN6KaB5oSP5LmJ 112876
+5Zu96Ziy6YOo 112877
+6L+Z5Liq6KGM5Lia 112878
+5pmu5p+l 112879
+5byC5oCn 112880
+5bu26L+f 112881
+5bCP5bmF 112882
+6Imy5oOF 112883
+57u85ZCI5rK755CG 112884
+5q2j5piv5Zug5Li6 112885
+5Lqn5Lia57uT5p6E 112886
+56CU56m25oql5ZGK 112887
+5YGc5LiL 112888
+6ZW/6ICB 112889
+6Yed5bCN 112890
+5Y2X5Lqs5biC 112891
+54GM5rqJ 112892
+6L2s6L+Q 112893
+5qy66K+I 112894
+6YCg5YGH 112895
+5YiG5biD5byP 112896
+5oSf6Kem 112897
+5oiR5b2T5pe2 112898
+5Y+R6KeJ 112899
+5Zu+57q4 112900
+5pS56Imv 112901
+54ug54ug 112902
+5Yay5Yi6 112903
+5paw5Lqs 112904
+5paw5Lqs5oql 112905
+56We5Zmo 112906
+56e456eG 112907
+54i6 112908
+5bCG6L+O5p2l 112909
+5bel5L+h 112910
+5bel5L+h6YOo 112911
+6ZmQ6YeP 112912
+5q2i5o2f 112913
+5a2m5Lya5LqG 112914
+5Y2O55ub 112915
+5Y2O55ub6aG/ 112916
+5b6M5L6G 112917
+5LiL6Z2i5piv 112918
+5LiL6Z2i5piv5bCP 112919
+5pCs6L+Q 112920
+576O5pyv6aaG 112921
+5riF5YeJ 112922
+5aSa5bm05YmN 112923
+6Kme 112924
+5Y2D57Gz 112925
+6KGo6L+w 112926
+5rGf6Zeo 112927
+5Yqg5rK556uZ 112928
+5pys6IO9 112929
+5a+86K+7 112930
+5Zu06KeC 112931
+5bm25ZCR 112932
+5Z+65pys5oOF5Ya1 112933
+5omT5byA5LqG 112934
+6L+Z5LiJ5Liq 112935
+5rGV5aS0 112936
+5by65pyJ5Yqb 112937
+5by65pyJ5Yqb55qE 112938
+6L+b5Zy6 112939
+5Lmd5rGf 112940
+55CD5pif 112941
+5aW955yL55qE 112942
+5aSn5oi3 112943
+5rmv 112944
+5aWH5aaZ 112945
+5LmQ5Zmo 112946
+5oiR55qE5b+D 112947
+55yJ5aS0 112948
+5Yac5Lia55Sf5Lqn 112949
+57yW56CB 112950
+5Z+656Q= 112951
+5Z+656SO 112952
+5aSp5paH 112953
+5YCL5Lq66LOH6KiK 112954
+5Y676L+H 112955
+6IGG5ZCs 112956
+5pS+5YGH 112957
+5LiN5YW35aSH 112958
+5reA57KJ 112959
+5aSn5L2s 112960
+5YWo5aSp 112961
+5YWo6Z2i5bu65oiQ 112962
+6ZqQ5b2i 112963
+57yF55S4 112964
+5ZCz 112965
+6KGM5pS/5omn5rOV 112966
+5Z+O5aCh 112967
+6I6r5pav 112968
+6I6r5pav56eR 112969
+5omA5pyJ5p2D 112970
+6ZuG5ZyY 112971
+5bGA5Ymv5bGA6ZW/ 112972
+5Yeg5LmO5rKh5pyJ 112973
+5rSB5YeA 112974
+55S15b2x6IqC 112975
+5a2p56ul 112976
+5omA5YGa55qE 112977
+5riF5Luj 112978
+5paw54mI 112979
+6ZOd5ZCI6YeR 112980
+5Li65oqT 112981
+5Li65oqT5omL 112982
+5Yik5a6a 112983
+54m55Lqn 112984
+5omL5qmf 112985
+5LiN5Y+v5oiW 112986
+5LiN5Y+v5oiW57y6 112987
+5biC5Zy66KeE5qih 112988
+5Z2v 112989
+5Yy75a2m6Zmi 112990
+5b+r6KaB 112991
+6Iyc 112992
+5oqY6IW+ 112993
+5LqG6L+H5p2l 112994
+5oql5ZGK5pyf5YaF 112995
+54mp56eN 112996
+57uf6K6h5bGA 112997
+5omp5bu6 112998
+5raF 112999
+6LSj5Lu75Lq6 113000
+6ZiO 113001
+6K+E6K6u 113002
+5b6A5LqL 113003
+5omA56S6 113004
+5pW05rSB 113005
+6Ze66Jyc 113006
+5peF6YCU 113007
+5a6e6K6t 113008
+5LmL56ew 113009
+5be05aOr 113010
+6YCf5bqm5b+r 113011
+5LiN5LuF5aaC5q2k 113012
+5a6d6LS155qE 113013
+5bqf54mp 113014
+5rKz5rC0 113015
+5o6l57qz 113016
+57K+5rmb 113017
+5YW25qyh5piv 113018
+6aG65b63 113019
+5YWs5YWx5Y2r55Sf 113020
+6KSQ6Imy 113021
+5LiN5oOc 113022
+5oqA5pyv5pyN5Yqh 113023
+5o63 113024
+5rGC6IGM 113025
+5LiJ5bOh 113026
+5oqV5YWl5Yiw 113027
+5aSq5ZCO 113028
+5ZCv5Yqo5Luq5byP 113029
+55u05o6l5b2x5ZON 113030
+5paw5qy+ 113031
+5Liq5Lmh6ZWH 113032
+55m+5Lq/ 113033
+5bqr 113034
+5Lmf5q2j5piv 113035
+5Y+254mH 113036
+5pyA5pep55qE 113037
+5oiY57up 113038
+5bel5pyf 113039
+5pma5pyf 113040
+6L+Z5qC36K+0 113041
+6K+N6K+t 113042
+5L6E 113043
+5pWj54Ot 113044
+6ZuG5oiQ55S16Lev 113045
+5ZCN6K+N 113046
+5pm65ZWG 113047
+5oul5aC1 113048
+54uC5qyi 113049
+6L+Z6Iis 113050
+5rW05a6k 113051
+5ZGV5ZCQ 113052
+5pyq5p2l5Y+R5bGV 113053
+5LiJ5L2N5LiA5L2T 113054
+5aqS6auU 113055
+5LiN5b6X6L2s6L29 113056
+5Zug5Li65aW5 113057
+5pi+56S65bGP 113058
+5L6b5pqW 113059
+6Yar6Zmi 113060
+5pyJ5oSP5oCd 113061
+5pyJ5oSP5oCd55qE 113062
+5aix5LmQ5Z+O 113063
+5Y215bei 113064
+5Yib6YCg5Yqb 113065
+56ug6IqC 113066
+5Lq65aSn5bi45aeU 113067
+6ICM546w5Zyo 113068
+5aSW5amG 113069
+5aKe5oyB 113070
+5LqU5Y2D 113071
+6ICB5biI5Lus 113072
+5rSb5p2J 113073
+5rSb5p2J55+2 113074
+5o6M5o+h5LqG 113075
+5Lit5Zu95paH5YyW 113076
+5paw5pS/ 113077
+5Li76KaB55So5LqO 113078
+5Y+R54On 113079
+57G75Ly85LqO 113080
+5YyX5p6B 113081
+5oiR5Lus6K6k5Li6 113082
+5byl5ryr 113083
+5YWo55CD57uP5rWO 113084
+6aKQ 113085
+5LiA6LW36KOF5L+u 113086
+5pSS 113087
+5ouJ6JCo 113088
+5bi25L6G 113089
+5Ya35rC0 113090
+5LiJ5Yac 113091
+5p2/5p2Q 113092
+6L+e6L+e 113093
+6ZOu 113094
+57uP6JCl55CG5b+1 113095
+5bGx6aG2 113096
+5b6I5oOz 113097
+55ir 113098
+5aeL57uI5L+d5oyB 113099
+5Zyo5bm/5bee 113100
+5LiN5ZCM5oSP 113101
+5Y+Y5Y6L 113102
+5Y+Y5Y6L5Zmo 113103
+5Lqn6ZSA 113104
+6KGo6Z2i5LiK 113105
+5omA5Lul5LuW 113106
+57uP6aqM5Liw5a+M 113107
+6YOo5aeU 113108
+5YW15Zui 113109
+5omA6L+w 113110
+5pWm54WM 113111
+57uP6JCl6IyD5Zu0 113112
+5Y+j6K+t 113113
+5aSx5L+h 113114
+5q+P5Liq5Lq655qE 113115
+5omL5oyB 113116
+5oGQ5oWM 113117
+5aCh5Z6S 113118
+6aaF 113119
+6ZO46YCg 113120
+5ou/5Ye65p2l 113121
+5o6i5rWL 113122
+5aSn5a625LiA6LW3 113123
+5aWn 113124
+5a6e6LSo5oCn 113125
+5bCP5YS/ 113126
+6Ie65Y2X 113127
+6Ie65Y2X5biC 113128
+5byA5Y+R6ICF 113129
+5Y+v5qC55o2u 113130
+566x5a2Q 113131
+6aW65a2Q 113132
+5b+Z552A 113133
+5p2l5LiN5Y+K 113134
+55u45Lyg 113135
+5Zu9572R 113136
+6IW55rO7 113137
+6L+Z6YeM5pyJ 113138
+6aOO5pmv5Yy6 113139
+5Y+C5L+d 113140
+5q276ICF 113141
+5oi05LiK 113142
+5qmf5qeL 113143
+6K+V6aqM5Yy6 113144
+5Lyg5o6I 113145
+5rW36L65 113146
+5rOq5rC0 113147
+55u45YWz5YaF5a65 113148
+6YOR5bee5biC 113149
+5YWR546w 113150
+5Lik5ZGo 113151
+6Iqc5rmW 113152
+55S15a2Q5L+h5oGv 113153
+57qi5aSW 113154
+5peF5ri45bGA 113155
+5b6A5b6A5Lya 113156
+6L+F54yb 113157
+5Lyg55yf 113158
+5riF5r6I 113159
+5bCx6L+R 113160
+5b6u5L+h576k 113161
+57O75YiX5rS75Yqo 113162
+57uP5bi45Lya 113163
+6KeC5rWL 113164
+5b+D5b6X5L2T5Lya 113165
+6ZmI5YiX 113166
+5YyX5paX 113167
+6Kuu 113168
+6Kuu6Kmi 113169
+6L+Y5piv5Lya 113170
+5rWL566X 113171
+5pif56m6 113172
+5a695a65 113173
+54mp5Lia5YWs5Y+4 113174
+5oiS5oyH 113175
+5biF5rCU 113176
+5LiA5q2l5q2l 113177
+5YWx6bij 113178
+5Yaz5LiN 113179
+5o6l566h 113180
+5aaH6IGU 113181
+5q+U5Za7 113182
+6bKB6L+F 113183
+5oyB57qM 113184
+55u45Lqy 113185
+5aiB5bC85pav5Lq6 113186
+56uL6aG5 113187
+5Yid5aeL 113188
+6Ieq5Yi2 113189
+6L+I6L+b 113190
+5LiK5rG9 113191
+5a6P5Lyf 113192
+5qC55pys5rKh5pyJ 113193
+5paw5Yag55eF5q+S 113194
+5ZOq56eN 113195
+5bq35YW7 113196
+6KGw6ICB 113197
+5b2V5YOP 113198
+6auU6amX 113199
+57uR5a6a 113200
+6aKd5aS0 113201
+5LqU5pyI 113202
+6Iqx5byA 113203
+5LiA57q/5Z+O5biC 113204
+5Yiw5Zy6 113205
+5oqV6ZmN 113206
+55eY55eY 113207
+5Y+X5LiN5LqG 113208
+5omO5qC5 113209
+5pu05L2V5Ya1 113210
+5oq95p+l 113211
+5Ye66Lev 113212
+5a6h6K6u6YCa6L+H 113213
+5LiN5YOF 113214
+6Imy6LCD 113215
+55m+5L2Z 113216
+6IKg6YGT 113217
+5rex5Y6a55qE 113218
+6ams5Yqb 113219
+5pep5pma 113220
+5q2M6Iie 113221
+6Ziy5pmS 113222
+5pyA5ZCO5LiA5Liq 113223
+5qix6Iqx 113224
+5bCP5LyZ5a2Q 113225
+5Zyo5b2T5Zyw 113226
+5bCP5LyZ5Ly05Lus 113227
+6LW35rqQ 113228
+5YWo5aqS5L2T 113229
+57C9 113230
+6YWx5rK5 113231
+5peg6K665aaC5L2V 113232
+6KOk5a2Q 113233
+5YGc5Lqn 113234
+5LiN55Sx5b6X 113235
+54m15byV 113236
+5Lyg5Yqo 113237
+5Lmd6b6Z 113238
+5Yqg5Zu6 113239
+5Lmf5LiN5pWi 113240
+5oqA5pyv5pSv5oyB 113241
+5LiK5bKX 113242
+57uP6aqM5ZKM 113243
+5qC85p6X 113244
+5ZC46ZmE 113245
+5pyq5oiQ5bm0 113246
+5aWi5L6I5ZOB 113247
+6L+95o2n 113248
+5aW95LiN5a655piT 113249
+6JW05ZCr 113250
+5L+d5a6a 113251
+5oql5Lia 113252
+5rW35YaF5aSW 113253
+5L2g546w5Zyo 113254
+5rK56ICX 113255
+6LSo6YeP566h55CG 113256
+5r2c5rC0 113257
+5Li95rGf 113258
+6L2s5YWl 113259
+6L+Z5LmI5LmF 113260
+5piO5Luj 113261
+6LSj5Lu75Yi2 113262
+6YeN5bel 113263
+5aSn5be0 113264
+6Kem5Y+K 113265
+6LW35Yid 113266
+5aSn5aaI 113267
+5pav5aGU 113268
+5Yab5bel 113269
+5Lmm6Zmi 113270
+5bOo 113271
+5o6o55CG 113272
+6L+Z56+H5paH56ug 113273
+6L+B56e7 113274
+5Zyo5ZCM5LiA 113275
+57uG57uG 113276
+5YmK5byx 113277
+5Lmm5oi/ 113278
+57aT5bi4 113279
+6K+V6aKY 113280
+5oKj5LiK 113281
+55mr55er55eF 113282
+5Yay5rSX 113283
+5aSW5o+0 113284
+5YWL5Yi2 113285
+5Y2B5pyI 113286
+5YGa5LiN5Yiw 113287
+576O5YyW 113288
+5aaC5pyf 113289
+6L+Y6ZyA 113290
+5aSp5bqc 113291
+5bCx5oSP5ZGz552A 113292
+55qE56Gu5piv 113293
+6aqX5bGA 113294
+5bCP57uE6LWb 113295
+6Kmp 113296
+5Lmd5bm0 113297
+5pmT5b6X 113298
+56CU56m25Lq65ZGY 113299
+5aSn6YWS5bqX 113300
+56eR5a24 113301
+5YWt5ZCI 113302
+55WM5a6a 113303
+6L2m6L29 113304
+5byA552A 113305
+5q+r5peg55aR 113306
+5q+r5peg55aR6Zeu 113307
+6L+Q57u0 113308
+56aB5Yy6 113309
+6ISx6JC9 113310
+6K6y5biI 113311
+5Lqn5Lia5Z+65Zyw 113312
+6auY5oCn6IO9 113313
+5YWJ5b2p 113314
+546w6Zi25q61 113315
+5Ye/ 113316
+6L6D5beu 113317
+6aWu55So5rC0 113318
+6ZaL55m8 113319
+572R5ZCn 113320
+54y05a2Q 113321
+5q2m5p6X 113322
+5a6J5Y6/ 113323
+5LiN5Y+v5oCd 113324
+5LiN5Y+v5oCd6K6u 113325
+6Yq35ZSu 113326
+6LSr56m3 113327
+5Li65ZWl 113328
+6bqT 113329
+5bm+5YCL 113330
+6KeE5qih5Lul5LiK 113331
+5o+a 113332
+6KKr5Zuw 113333
+57y65bit 113334
+5b+r6aSQ 113335
+5oqi5Y2g 113336
+5pmf 113337
+5aSN5rS7 113338
+5pys5oql6K6v 113339
+5Yib5LiL 113340
+5rW35rup 113341
+6YeP5Lqn 113342
+5aaC5L2V5Y67 113343
+6L2m5L2N 113344
+5a+H 113345
+5LqM5Y2B5Zub 113346
+57uP5rWO5o2f5aSx 113347
+6YWN5aWX6K6+5pa9 113348
+5Z+65pys6Z2i 113349
+5LqJ6K66 113350
+5bCx5aW95YOP 113351
+56CU56m25oiQ5p6c 113352
+6ZmI6L+w 113353
+5omT5Yqo 113354
+5LiL5be0 113355
+56eS6ZKf 113356
+5a+55Lq65L2T 113357
+5oqA5pyv56CU5Y+R 113358
+5Y6f5a2Q 113359
+5piv5LiA6aG5 113360
+5LqG5LiA5Lu9 113361
+5oyH55Sy 113362
+55So6YeP 113363
+6L+Y5LiN5aSf 113364
+5pS/5bqc6YeH6LSt 113365
+55+l6K+G54K5 113366
+5Lit5Zu95qKm 113367
+5b6I5byA5b+D 113368
+56S86LKM 113369
+6Z2e5bi45aSa 113370
+6Z2e5bi45aSa55qE 113371
+5Zua 113372
+5peF6aaG 113373
+5bC95oOF 113374
+5q2M5ZSx 113375
+5rKZ6b6Z 113376
+6L2m5Y6i 113377
+5a6i5rWB 113378
+5YGP5beu 113379
+56ev57Sv5LqG 113380
+5qGU 113381
+55S755S7 113382
+5Lmf5bqU6K+l 113383
+5bqU55So56iL5bqP 113384
+6IOD6IKg 113385
+5Lul5b6M 113386
+6LGq5a6F 113387
+5rex5Yqg5bel 113388
+55u06KiA 113389
+5YyW55+z 113390
+5Zu96YGT 113391
+5LiD5Liq 113392
+5LuO6ICM5L2/ 113393
+6IKg6IOD 113394
+5pel6LaL 113395
+54i25a2Q 113396
+57ep 113397
+5oub54mM 113398
+5Lqn5aaH 113399
+55Wq6IyE 113400
+5oiR6Zmi 113401
+5bu6562R5bel56iL 113402
+5bGV6KeI5Lya 113403
+5a626ZW/5Lus 113404
+5Yac5L2c54mp 113405
+5pel5aSc 113406
+5pS75pOK 113407
+6KeE6YG/ 113408
+6Iif5bGx 113409
+5L6/5rCR 113410
+5YWr5a2X 113411
+5LiN5pu+ 113412
+5pSv6YWN 113413
+54as5aSc 113414
+5Lq66aGe 113415
+57SA6YyE 113416
+57uP6JCl5rS75Yqo 113417
+5aSn5rao 113418
+5biC5aeU5bi45aeU 113419
+5YiG6ZCY 113420
+5LiA5Liq6IGM5Lia 113421
+55eF5Zug 113422
+6L+Z5a+55LqO 113423
+5LiN5b6X5LiN6K+0 113424
+5Y+R55S15py6 113425
+5pyJ5omA5biu5Yqp 113426
+55uu5qCH5Lu75Yqh 113427
+5Zug5Zyw 113428
+5Zug5Zyw5Yi2 113429
+5Zug5Zyw5Yi25a6c 113430
+5bCG6L6+5Yiw 113431
+57KX57OZ 113432
+56iz5Zu6 113433
+5auj 113434
+546w5Zyo5b6I5aSa 113435
+5LiW55WM57qn 113436
+5byg5p+Q 113437
+54K557yA 113438
+6JG1 113439
+56S+5Lya57uE57uH 113440
+5b6A5ZCO 113441
+5Yqg5oGv 113442
+5Zmq5aOw 113443
+5pyJ5YW06Laj 113444
+5Li65oKo5o+Q5L6b 113445
+5rK55ryG 113446
+56ys5Zub5bGK 113447
+55qH5a6r 113448
+5LmS5LmT 113449
+5LmS5LmT55CD 113450
+6Zqo6JGX 113451
+6YGp5ZCI 113452
+5Y2X6Z2e 113453
+5pO0 113454
+6KW/5rSL 113455
+5Yqg5a+G 113456
+5oiQ5Yqf5Li+5Yqe 113457
+5Y+j5rC0 113458
+5oiQ5bm05Lq6 113459
+5omA5o+Q5L6b55qE 113460
+6ZqU5aOB 113461
+5Zyo5Lqs 113462
+5b2T5Zyw5pe26Ze0 113463
+562J5ZCE56eN 113464
+6aOO5rCU 113465
+5bGL6YeM 113466
+5LiA5a2X 113467
+55qE5pe26Ze06YeM 113468
+5Zi/5Zi/ 113469
+5b+r6K6v 113470
+5Lit5Zy6 113471
+5LiA55O2 113472
+5ruV 113473
+6aKG6LeR 113474
+5aW96I6x 113475
+5aW96I6x5Z2e 113476
+5rKh5YWz57O7 113477
+5Ye65aKD 113478
+5LiN5piv5LiA5Liq 113479
+6YO95piv6Z2e5bi4 113480
+6ZyH5Yqo 113481
+6I636IOc 113482
+5Y2a5byI 113483
+5oqa5YW7 113484
+5a+556uL 113485
+5pyN5Yqh5py65p6E 113486
+6LCj6KiA 113487
+56S+5Lya56eR5a2m 113488
+5ZCs6K+06L+H 113489
+5omz 113490
+5omT56Oo 113491
+5Y+j5pyN 113492
+5aW95YOP5piv 113493
+5Lul5Y+K5YW25LuW 113494
+54m56LSo 113495
+5Lqy6L+R 113496
+5LiA57uP 113497
+5rad 113498
+6a2U5pyv 113499
+6YGT6Lev5Lqk6YCa 113500
+6KeE5qih5pyA5aSn 113501
+5a6e5pa95oSP6KeB 113502
+5Lme 113503
+5LiA5LiW 113504
+5Z+36KGM 113505
+6LGG55Oj 113506
+5YiX5Li6 113507
+5pWF5a6r 113508
+55Sf5ZG95ZGo5pyf 113509
+5LiJ56eN6IGM5Lia 113510
+6K+m57uG5LuL57uN 113511
+5a6M5aSH 113512
+5bKp55+z 113513
+6ZqP5omL 113514
+6aOy 113515
+5pWI5p6c5Zu+ 113516
+56eL5Yas 113517
+5Yqf5b63 113518
+6KeE56ug5Yi25bqm 113519
+5pel5riQ 113520
+5omA6ZyA6KaB 113521
+5omA6ZyA6KaB55qE 113522
+5bKb5LiK 113523
+5Ye65Zyf 113524
+5Zu+5paH 113525
+56eR5oqA6L+b5q2l 113526
+6YCa6IOA 113527
+6ICB5aSq5aSq 113528
+6IuX5pyo 113529
+6ZO25bed 113530
+5biQ56+3 113531
+6Z2e6KaB 113532
+6YWN55S1 113533
+5aSE5aKD 113534
+6IKh5p2D5oqV6LWE 113535
+5LiA55u05Yiw 113536
+5Z2H55Sx 113537
+5oqX5pel 113538
+5o2u5LuL57uN 113539
+5L2g5Zac5qyi 113540
+5Yib5paw5Z6L 113541
+5Y+Y6L+B 113542
+6KeG5a+f 113543
+5a6M5YWo5rKh5pyJ 113544
+5YWD5pem 113545
+5Y+v5L+h 113546
+5Y+m6KGM 113547
+5p2R57qn 113548
+5YWl5Zy6 113549
+5pCt5qGj 113550
+5Lmf5Zug5q2k 113551
+5o2i5oiQ 113552
+5LiN6LSf 113553
+5LqG5aSn6YeP55qE 113554
+6YGU5Yiw 113555
+5biC5Y6/ 113556
+5bm06LyV 113557
+5b+r5omL 113558
+5biM5bCU 113559
+6Ieq6JCl 113560
+6Zuq6Iqx 113561
+5pCB 113562
+55y856eR 113563
+5q2j56K6 113564
+55qE5ae/5oCB 113565
+5Z2a5a6e55qE 113566
+5oyH57q5 113567
+5qqU5qGI 113568
+572u5LqO 113569
+5L2p5pyN 113570
+6LGq6Zeo 113571
+5ZOS 113572
+5oGw5aW9 113573
+5qqi5p+l 113574
+5Yid6KG3 113575
+5aSn5ZSQ 113576
+57qm5Lya 113577
+6JK45Y+R 113578
+56255YiS 113579
+5bm057uI 113580
+6KGM5qWt 113581
+5YWx6Z2S 113582
+5YWx6Z2S5Zui 113583
+5Lya5byV6LW3 113584
+5Lit56eR 113585
+5Lit56eR6Zmi 113586
+5oyv5Yqo 113587
+5Y205Y+R546w 113588
+5LiN5Yqo5Lqn 113589
+6Iy5 113590
+5oi/6Ze06YeM 113591
+6LSn5biB5pS/562W 113592
+5rK755mC 113593
+5oWO6YeN 113594
+5aGe5bCU 113595
+5Zu957GN 113596
+5Zug5p6c 113597
+562J54m554K5 113598
+5bGx6LC3 113599
+5LiL6LyJ 113600
+6K6T5oiR 113601
+6aWu6YWS 113602
+6L+Z5Liq5ri45oiP 113603
+57ud5aSn6YOo5YiG 113604
+5ZKo6K+i5pyN5Yqh 113605
+5bmy5rS7 113606
+6K6u5Lya 113607
+5qaC6L+w 113608
+5YiG5Yy6 113609
+5q275ZCO 113610
+56uZ552A 113611
+5Li76KaB6aKG5a+8 113612
+5ZCM5Z+O 113613
+5aSn5qCR 113614
+5a+55a2m55Sf 113615
+56S+5Lya5L+d6Zmp 113616
+5aKe6LWE 113617
+5Li75Lq65YWs 113618
+5a6j5Lyg5pWZ6IKy 113619
+5paH5YyW5Lqk5rWB 113620
+5a6i5oi2 113621
+55+l5ZCN5ZOB54mM 113622
+5rue5ZCO 113623
+5LqS6KGl 113624
+5oSf5Lq6 113625
+5Ym/ 113626
+5ZCO5Luj 113627
+5LqJ6Zy4 113628
+5pWZ6IKy5Z+56K6t 113629
+6Z2Z6ISJ 113630
+5LmP5Yqb 113631
+6K+05Ye65p2l 113632
+546L6ICF6I2j6ICA 113633
+5YCr 113634
+5Y2H6LW3 113635
+6ZWB 113636
+5Ye65ri4 113637
+6YCa6KGM6K+B 113638
+5bel5L2c5bKX5L2N 113639
+5Yyg5b+D 113640
+5ou/5p2l 113641
+5rSX6KGj5py6 113642
+5oiR5LiN5oOz 113643
+6aKE6KeB 113644
+5ryU56S6 113645
+5LiA55u05rKh5pyJ 113646
+6Lef5aW5 113647
+5a+554Wn5qOA5p+l 113648
+57C/ 113649
+5LiT5b+D 113650
+6K6u5LqL 113651
+5YmN56uv 113652
+5Y2h5bCU 113653
+6Kit5a6a 113654
+6K6+572u5LqG 113655
+5ama57qx 113656
+5Zyo5Zu95aSW 113657
+5Y+z5L6n 113658
+6LO854mp 113659
+5aWH6JGp 113660
+5aKe5Yqg5YC8 113661
+5aW96L+Q 113662
+5Zu96ZmF5py65Zy6 113663
+5LiL56ew 113664
+55uu5YmN5Li65q2i 113665
+56We5LuZ 113666
+5a6D5Y+v5Lul 113667
+5r6E5riF 113668
+6IO95L2/ 113669
+5ri45Ye7 113670
+5ri45Ye76Zif 113671
+5Ye5 113672
+5LiN6KaB5YaN 113673
+5Yaz6IOc 113674
+5Yaz5oiY 113675
+5ou9 113676
+55ub5YW4 113677
+5b6I5aW95Zyw 113678
+5pyA576O55qE 113679
+5YOa 113680
+5be05Z+6 113681
+5be05Z+65pav5Z2m 113682
+5pyA6YCC5ZCI 113683
+6auY6IGM 113684
+5L+d5aeG 113685
+5o6I5qyK 113686
+6K+05Yiw6L+Z6YeM 113687
+5o6o5byA 113688
+546H6L6+ 113689
+5LiJ5YiG5LmL5LiA 113690
+566h55CG5Lit5b+D 113691
+5Lqk5rGH 113692
+5qOu5p6X5YWs5Zut 113693
+5b6A5LiK 113694
+6aqR6KGM 113695
+5o2u5q2k 113696
+57q95bim 113697
+57ue 113698
+5LiJ5pa5 113699
+5oSP5LmJ5LiK55qE 113700
+5o6o6L+f 113701
+5aSa5qC35oCn 113702
+5oOz6LW35LqG 113703
+5o6S5ZCN56ys 113704
+5beo6aKd 113705
+5p2f57ya 113706
+5a6J5a6a 113707
+5LqL5a+m 113708
+55qE5oS/5pyb 113709
+6KOF5aSH5Yi26YCg 113710
+5Lq65bGF 113711
+5Lq65bGF546v5aKD 113712
+5b+Y6K6w5LqG 113713
+6K+l5ri45oiP 113714
+5qW85LiK 113715
+5byA5Lya 113716
+5oGz 113717
+5Y+L5oOF6ZO+5o6l 113718
+56GS 113719
+57uZ5LqI5LqG 113720
+5YGP5aW9 113721
+5ZOJ 113722
+5Lqk6YCa5a6J5YWo 113723
+6ZuM 113724
+5rK755eF 113725
+6KeJ5b6X5b6I 113726
+6KGs6KGr 113727
+5b+D5oS/ 113728
+5rSe5a+f 113729
+5rCR5qOA5a+f6Zmi 113730
+5o+Q54K8 113731
+6KaB6L+b5LiA5q2l 113732
+6am+6L2m 113733
+5pmu5oOg 113734
+5pWW 113735
+56aP6Z+z 113736
+6YCB6L6+ 113737
+6KeE5YiS6K6+6K6h 113738
+5omL5aWX 113739
+5a6J5L+d 113740
+6L+Y5LiN5aaC 113741
+5YmN6L+w 113742
+5qCH6K6w 113743
+57Sn5o6l552A 113744
+5qeQ 113745
+5rex5rex5Zyw 113746
+5ruh5ruh55qE 113747
+5pil6L+Q 113748
+5pel5Lqn 113749
+54ix5oqk 113750
+5YWo5pel 113751
+5YWo5pel5Yi2 113752
+6L2s5Yqo 113753
+56Wt56WA 113754
+5Lmw5Lic6KW/ 113755
+5a+55pyq5p2l 113756
+5raI5aSx5LqG 113757
+5Zq06YeN 113758
+5LiJ5p2h 113759
+6YW45aW2 113760
+6ZuG5Zui6IKh5Lu9 113761
+6KW/6Lev 113762
+5Y+q5b6X 113763
+6YCB5Y67 113764
+54ug5oqT 113765
+5Yip55So546H 113766
+5LiL5ZGo 113767
+5aWL5oiY 113768
+5pil6IqC5pyf6Ze0 113769
+6LSf6LSj5Lu7 113770
+5piC6LS1 113771
+5bC+5be0 113772
+56+H5paH56ug 113773
+5YWu 113774
+6K6K5oiQ 113775
+5bm5 113776
+55m76YyE 113777
+5L2I 113778
+5bel5Yyg 113779
+5ZOq5oCV5piv 113780
+5Y+N5ZON 113781
+56eD 113782
+5Ye66L2o 113783
+5pel5Yab 113784
+5ZCN6KqJ 113785
+5pWP6ZSQ 113786
+5pyN5Yqh5rC05bmz 113787
+54Wn5bCE 113788
+5LyK5ouJ 113789
+5LyK5ouJ5YWL 113790
+5YaF6ZiB 113791
+6IqS5p6c 113792
+5LiH5YiG 113793
+6YCA5qy+ 113794
+55u05pKt6Ze0 113795
+5ou/5Yiw5LqG 113796
+5bCO6Ie0 113797
+56m65rCU5Lit 113798
+5a6i5oi35pyN5Yqh 113799
+6L+Q5Yq/ 113800
+57uT55+z 113801
+5LiN5b+F6KaB55qE 113802
+6IO25ZuK 113803
+55CG5Lya 113804
+5oq95Ye6 113805
+56m65rCU6LSo6YeP 113806
+5q+V56uf5piv 113807
+5Ya35ryg 113808
+5LiA5aaC 113809
+5LiA5aaC5pei 113810
+5LiA5aaC5pei5b6A 113811
+5oKj55eF 113812
+5Yqg5oyB 113813
+6LWe5Yqp 113814
+6auu 113815
+5ZG95Lit 113816
+5oSP5LmJ5LiK 113817
+5LiN6IiN 113818
+5YGa5qKm 113819
+5omT5omr 113820
+5pif5YWJ 113821
+5pat6KOC 113822
+5YWo5aWX 113823
+6KOB5a6a 113824
+6ams5YWL5oCd 113825
+6aqo6aq8 113826
+5LiA6Lev5LiK 113827
+5a6a5pe2 113828
+5bel56iL5oqA5pyv 113829
+5b285b6X 113830
+5rGy5Y+W 113831
+5LiA6KeI 113832
+5ZC15p62 113833
+5L+X56ew 113834
+5qCq5rSy 113835
+5bqf5pen 113836
+6KGM5pif 113837
+5Y+R55Sf5Y+Y5YyW 113838
+6aaW5LuY 113839
+5Y2B5YiG6YeN6KaB 113840
+5oqK6L+Z5Lqb 113841
+56We5bee 113842
+5o+Q5L6b5ZWG 113843
+5qW3 113844
+5bGO 113845
+54q25YWD 113846
+5Z+O5aKZ 113847
+55yL5LiA55yL 113848
+55Sf5Lqn6IO95Yqb 113849
+5Z+65pys5LiK6YO9 113850
+5omT5omw 113851
+5Yid5qyh 113852
+5Ye656S6 113853
+5YW25Lit5LiA5Liq 113854
+55Sf5oCB57O757uf 113855
+5omL5o6M 113856
+5rWO5Y2X5biC 113857
+5ZyL5YWn 113858
+5q2j5YC8 113859
+5bm+5LmO 113860
+5o6o6I2Q6ZiF6K+7 113861
+6L+t5Luj 113862
+6LCD5L6D 113863
+6aWu5ZOB 113864
+5aKZ5L2T 113865
+5Y+Y546w 113866
+5LqG5aW9 113867
+5LqG5aW95Yeg 113868
+5LiN55WZ 113869
+54iy 113870
+5bC95pep 113871
+5q2j5Zyo6L+b6KGM 113872
+5Ye66Zmi 113873
+5p2A5a6z 113874
+5o+Q5qy+ 113875
+5Y+R5bGV56m66Ze0 113876
+5YmN6Lqr 113877
+5LiN5pat5aKe5by6 113878
+5rex5bGC5qyh 113879
+5a6557qz 113880
+6YKj5Lu9 113881
+5bel5L2c5pWI546H 113882
+5pys5Zu9 113883
+5aSx6JC9 113884
+5q2j5Zug5Li6 113885
+6IqC5rC0 113886
+5LiL5LiA5Luj 113887
+56CU5Y+R5Lit5b+D 113888
+5LiN55CG 113889
+5a6M5aW9 113890
+5L+d5oqk5Yy6 113891
+57uT5p6E6LCD5pW0 113892
+5aWg5a6a 113893
+5a6j56ew 113894
+6Zi75oyh 113895
+5pKk56a7 113896
+5LiN5pa55L6/ 113897
+5ZKV 113898
+56yR5LqG56yR 113899
+546v5aKD5rGh5p+T 113900
+5L2P5oi3 113901
+57ud57yY 113902
+6Zmk5bCY 113903
+6auY5bCa 113904
+5oCO5LmI5Y+v6IO9 113905
+6Z2i6Imy 113906
+5ZWG5qWt 113907
+55a5 113908
+6LWE5rqQ5LyY5Yq/ 113909
+6L6W5Yy65YaF 113910
+6ICA55y8 113911
+5pGn5q+B 113912
+5LiW55WM57uP5rWO 113913
+5byV5p2l 113914
+5LiA5YiZ 113915
+5ouH5oyH 113916
+5oq15b6h 113917
+6ZuN 113918
+5YeG5aSH5bel5L2c 113919
+54+g5LiJ6KeS 113920
+56iA5Zyf 113921
+6I635b6X5oSf 113922
+5oiQ5Yqf546H 113923
+572R57qm 113924
+572R57qm6L2m 113925
+6ISQ 113926
+5pWs5Lia 113927
+6YeR5Lu3 113928
+57K+6auT 113929
+5Lmw6L2m 113930
+5YWz5Y+j 113931
+5YaN5aSa 113932
+5p6B5ZOB 113933
+5ZCE5a62 113934
+5Li+5oql55S16K+d 113935
+6JqK 113936
+5pa55b2i 113937
+56eR5oqA5oiQ5p6c 113938
+5pyA5aW95piv 113939
+6Zeu5YCZ 113940
+57qi6YWS 113941
+5Zub56eN 113942
+57+S5oU= 113943
+57+S5oWj 113944
+5Z6m 113945
+6YKj5Y+q 113946
+6aKG5oKf 113947
+55y86YOo 113948
+5rOw5a6J 113949
+5Lu75pyf 113950
+56Oo5o2f 113951
+5pu/5o2i 113952
+5YW456S8 113953
+56ym5ZCI5p2h5Lu2 113954
+6L+Y5pyJ5LuA5LmI 113955
+5YWx5Lqr5Y2V6L2m 113956
+5Y+v5YiG5Li6 113957
+5a2j5ZCO 113958
+5a2j5ZCO6LWb 113959
+5Lic6I6e5biC 113960
+5b+D5oSP 113961
+5omt5puy 113962
+5L2c5Li65LiA56eN 113963
+6L+Z6YOo5YiG 113964
+5Y+C5LiO5Yiw 113965
+572R55CD 113966
+5a+m54++ 113967
+57uE6KOF 113968
+5ZCR5aSW 113969
+5bel5L2c5pa55qGI 113970
+5Y2B5p2h 113971
+6Kqy56iL 113972
+6aKk5oqW 113973
+5ZOp 113974
+6YKu5a+E 113975
+5Lqi 113976
+5YWN6LK7 113977
+56ek 113978
+5bqU5oCl566h55CG 113979
+5Zub5LqU 113980
+6bqS6bqf 113981
+5b6S5q2l 113982
+6KiY5b6X 113983
+55KQ 113984
+5piv5ZCm5Lya 113985
+5oSP6KeB5Y+N6aaI 113986
+6Zq+5oCq 113987
+56qN 113988
+5Lqk5o6l 113989
+5Lik5Y2D 113990
+5oeJ55So 113991
+5pyf6ZaT 113992
+5pCs5Yiw 113993
+6K6u6aKY 113994
+56Kn5qGC 113995
+56Kn5qGC5Zut 113996
+5YGa55Sf5oSP 113997
+6Zmb5LiL 113998
+6LeL 113999
+6ICB5Lq65a62 114000
+5bim5Zue 114001
+5p645p2e 114002
+6KGM6ZW/ 114003
+5YaF5a65566A5LuL 114004
+5qKi 114005
+5oyH5o6n 114006
+6YeN55eH 114007
+572R5Y+L5Lus 114008
+54++5Luj 114009
+57G75Lqn5ZOB 114010
+5aWU5rOi 114011
+5ri6 114012
+57KJ56KO 114013
+6L+Z5Y+q5piv 114014
+5qOA5a+f5py65YWz 114015
+6b2K 114016
+5oi/56ef 114017
+5b635ouJ 114018
+5bKB5Lul5LiK 114019
+57qv5YeA 114020
+5YiG5biD5Zyo 114021
+6IO95b6X5Yiw 114022
+5LiN5bC9 114023
+56ue5Lu3 114024
+55qE5bim6aKG 114025
+55qE5bim6aKG5LiL 114026
+5Lit6I2v5p2Q 114027
+5p2R6ZWH 114028
+5LiN5Y+v6YG/5YWN 114029
+6Zyy5aSp 114030
+5bCP5aeR5aiY 114031
+54mp5Lu2 114032
+6JGX5L2c5p2D 114033
+5ouY55WZ 114034
+6YO96KeJ5b6X 114035
+5puy5oqY 114036
+5re75Yqg5YmC 114037
+5Y+s5Zue 114038
+5omO5a6e5o6o6L+b 114039
+5oqE6KKt 114040
+5YyW6Lqr 114041
+55u06JCl 114042
+5Lmf5biM5pyb 114043
+6I2j6KqJ56ew5Y+3 114044
+5Y2W57uZ 114045
+5pyJ5LiN5ZCM55qE 114046
+5aWH54m5 114047
+6YO96K6k5Li6 114048
+5aae 114049
+5oiQ6ZW/5Li6 114050
+6L6p5oqk 114051
+5Li75pWZ57uD 114052
+5rOV5biI6IGM5Lia 114053
+5qSN5YWl 114054
+57Si5bC8 114055
+5ZCs6L+H 114056
+5Lmg5oOv5LqG 114057
+5aS65Y+W 114058
+6Z+T 114059
+5pys6LSo5LiK 114060
+5o6l5Yqb 114061
+5LqR56uv 114062
+6KaB5YGa5aW9 114063
+6Lev54Gv 114064
+5Y2P5ZCM5Y+R5bGV 114065
+5pyJ5b6F 114066
+5rC05Z+f 114067
+5pCc54uQ6aaW6aG1 114068
+6LSo6YeP5a6J5YWo 114069
+5Y2B5LqM5LqU 114070
+5ZOu5ZaY 114071
+6JOs5YuD5Y+R5bGV 114072
+5ZCN5aOw 114073
+6Lqr5Lqh 114074
+546L5bqc 114075
+5Y6f5YiZ5LiK 114076
+54OY5bmy 114077
+6YGX5ryP 114078
+6Z2i55uu 114079
+5Zu95Lya 114080
+5LiA55u06YO95piv 114081
+5pyJ5LiA5L2N 114082
+6YWN5pyJ 114083
+6Zmq552A 114084
+5LyB5Zu+ 114085
+5oyJ5LiL 114086
+6JOd5Zu+ 114087
+5qmY 114088
+5aSn5aSa5piv 114089
+6L6p6K66 114090
+5peL5b6L 114091
+5oql6YCB 114092
+5p2h6KeE5a6a 114093
+5Yqo6Z2Z 114094
+5YyI5aW0 114095
+5ouc6K6/ 114096
+5LiA5YiA 114097
+5LuW55+l6YGT 114098
+5Li75p2D 114099
+5LuW5pu+ 114100
+5pKt56eN 114101
+5aOB5Z6S 114102
+54mi6K6w5L2/5ZG9 114103
+5Zyo6L+Z5pa56Z2i 114104
+5omL6IWV 114105
+5pSv5p62 114106
+5L6G6Ieq 114107
+6YeN5aGR 114108
+5aSa5bGC5qyh 114109
+5LuL6LSo 114110
+6Z2i5a2U 114111
+5r2u5rm/ 114112
+5Y6/5Z+f 114113
+5ri45oiP5b2T5Lit 114114
+5aOe 114115
+5YiX5Ye6 114116
+6LWb5Yy6 114117
+5aSa5Y2K 114118
+6YeN54K55bel5L2c 114119
+5oiR5Lus5b+F6aG7 114120
+5p+P5p6X 114121
+6bKB6IO9 114122
+5pa95bGV 114123
+5ZCE5Yy6 114124
+5YWN56iO 114125
+6LWb5ZCO 114126
+5pyA6YeN6KaB 114127
+5LiA5Liq5aW955qE 114128
+6L+d5rOV6L+d6KeE 114129
+5LqG6Kej5pu05aSa 114130
+5pWs6K+3 114131
+56yR552A6K+0 114132
+5LiN5pat5Y+R5bGV 114133
+5pGE5b2x5biI 114134
+5Lul6Ziy 114135
+54K45by5 114136
+5aOw5ZON 114137
+56SB 114138
+5oe/ 114139
+6IiG5oOF 114140
+6Ieq55Sx6LS45piT 114141
+5pWP5o23 114142
+5LiJ5aSn6Zi25q61 114143
+6IuU 114144
+5pe65a2j 114145
+5LiN5ruh5oSP 114146
+5b6u5L+h5Y+3 114147
+5L+u5Li6 114148
+56C06KOC 114149
+6YCD56a7 114150
+5q+P6IKh 114151
+6L6+5LiN5Yiw 114152
+5q+P5bm06YO9 114153
+54Gv56y8 114154
+5q2k5Z+656GA5LiK 114155
+5YOP5Liq 114156
+5YiG5aip 114157
+5pm+ 114158
+5LiN6Iez5LqO 114159
+57qi57q/ 114160
+6K+v6Kej 114161
+5Lic6Lev 114162
+5reu5a6J 114163
+5Lqn5a2m 114164
+5Lqn5a2m56CU 114165
+6Im+5ruL 114166
+6Im+5ruL55eF 114167
+5YmN5o+Q5piv 114168
+5q+P5LiA5aSp 114169
+5LiD5aSn 114170
+5qCR5Y+2 114171
+6LWw5b6X 114172
+6L+Z5Lik56eN 114173
+5o6P5Ye6 114174
+5o6Q 114175
+6aKG5a+86ICF 114176
+5LiA5py1 114177
+5Liq5aSa5pyI 114178
+5Lit5YWz 114179
+5Lit5YWz5p2R 114180
+6K++5aCC5pWZ5a2m 114181
+5aSn5ZKW 114182
+6YGL55So 114183
+6K+a5oSP 114184
+57uE5Zu+ 114185
+6K+V552A 114186
+5LmU5rK7 114187
+6L+Y5LiN5piv 114188
+5pyJ5pu05aW955qE 114189
+5ZCO5aSH 114190
+5paw55Sf5YS/ 114191
+5rCU6KGA 114192
+5rKl6Z2S 114193
+5bGP6Zqc 114194
+5qWt5YuZ 114195
+5oiR5Lul5Li6 114196
+6ZW/55u4 114197
+6ICB54i4 114198
+6ZWH5rGf 114199
+5py65qKw6K6+5aSH 114200
+5L2G5piv5aaC5p6c 114201
+5Z2a5a6a5LiN 114202
+5Z2a5a6a5LiN56e7 114203
+5Yay6ZSL 114204
+566A55u05piv 114205
+5YKo6JOE 114206
+57qv55S15Yqo 114207
+5ryr5q2l 114208
+5Li+6LW3 114209
+5oG25oCn 114210
+6KiY6YyE 114211
+6IGM6IO96YOo6Zeo 114212
+5YWo6ZW/ 114213
+6Zu76KaW 114214
+5Lmz6IW6 114215
+5L2V5aSE 114216
+5raI5p6B 114217
+5q2j5aSE5LqO 114218
+5a6J5a6B 114219
+5oiQ6ZW3 114220
+5Y+Z6L+w 114221
+5rqD55ah 114222
+5L2G546w5Zyo 114223
+5aWz5pif 114224
+5am05bm85YS/ 114225
+5oqV6J6N6LWE 114226
+6Zeu6Zeu 114227
+5o+t5byA 114228
+6K+P 114229
+5ZCN5b2V 114230
+6JiR6I+H 114231
+5ZCK6aG2 114232
+5rmW5Yy6 114233
+5Y2W5Zy6 114234
+5bu6568= 114235
+5bu656+J 114236
+6I69 114237
+5ZCs5ZCs 114238
+56ue5LqJ5LyY5Yq/ 114239
+5Ye65Lu7 114240
+5pyJ5Lik56eN 114241
+5qmx5p+c 114242
+6KSq 114243
+6K+V5Y23 114244
+57uP5rWO5oqA5pyv 114245
+5rex5bGC 114246
+6YeN6KaB5YaF5a65 114247
+6aOO5o6n 114248
+54q25oCB5LiL 114249
+6YOo6ZaA 114250
+5bm/5rG9 114251
+6KeC5pGp 114252
+6YGX55WZ 114253
+6L2s6LSm 114254
+5oyB5LuT 114255
+5oC76K6h 114256
+5ZyY6ZqK 114257
+5oi/5Lic 114258
+6ZiA6Zeo 114259
+5YWs5YWz 114260
+5YWz5YiH 114261
+6IKY 114262
+5pW45pOa 114263
+5LiJ5Y2B5bm0 114264
+6KeB6K+B5LqG 114265
+5bGG 114266
+54Gw5bCY 114267
+5qac6aaW 114268
+6KaG55uW546H 114269
+5LuZ5aWz 114270
+55Sf5Lqn5oC7 114271
+55Sf5Lqn5oC75YC8 114272
+5oi/6LS3 114273
+5rGf5Yy6 114274
+5YWF55S15qGp 114275
+55m+5ZCI 114276
+56K66KqN 114277
+6L2s56e75Yiw 114278
+6YO95peg5rOV 114279
+57qq5b+16aaG 114280
+562+572y5LqG 114281
+5bm25LiN5aSa 114282
+5oyg 114283
+5LiN5aSq5aW9 114284
+5LiW5Luj 114285
+6K+v5a+8 114286
+6auY5bOw6K665Z2b 114287
+5YW85a65 114288
+6Zy45rCU 114289
+5p2l6K6/ 114290
+5omA5bim5p2l55qE 114291
+5piv5LiA6YOo 114292
+5pma6aWt 114293
+5Y6G5Luj 114294
+5ZCm5YmH 114295
+5LmF5LmF 114296
+5pyJ5pWI5pyf 114297
+6K+x5Y+R 114298
+5oC76LWE5Lqn 114299
+5pys6Lqr5bCx5piv 114300
+55Sf5Lqn5Y6C5a62 114301
+5pe26aum 114302
+6ICQ55So 114303
+5LuO5bCP5bCx 114304
+5p2h57qm 114305
+6Iux5YuH 114306
+5L+X6K+d6K+0 114307
+5a+65bqZ 114308
+5b+D55CG5YGl5bq3 114309
+5LuA5LmI5LqL5oOF 114310
+5rGJ5a2X 114311
+55WZ5L2P 114312
+5Y2X6Lev 114313
+5LiJ6aG5 114314
+5Lii5LqG 114315
+5oOz5Yiw5LqG 114316
+56256ZuG 114317
+6ZmE5Yqg5YC8 114318
+6KW/6KOF 114319
+5LmL5L2c 114320
+5YGa55qE5LqL 114321
+55W25oKo 114322
+55W25oKo5Zyo 114323
+6aaW5qy+ 114324
+5LiN5Zyo5LmO 114325
+5bel56iL5pa95bel 114326
+6ZqQ6ZqQ 114327
+5Y+Y6Lqr 114328
+5rK/6YCU 114329
+5oKg5oKg 114330
+5L+d5pqW 114331
+55Sf5rS75Z6D5Zy+ 114332
+5rik5rW3 114333
+5q2m5L6g 114334
+5aWz5Li76KeS 114335
+5Li+5L6L 114336
+5reo 114337
+55m96aKG 114338
+6KOZ5a2Q 114339
+6L+U6L+Y 114340
+6L+I5Ye6 114341
+6b6Z6Zeo 114342
+57uP5rWO5L2T 114343
+5pS25a6Y 114344
+55WM6ZmQ 114345
+6Lez5Ye6 114346
+5Y2H5YC8 114347
+57u16Ziz 114348
+55ak55eV 114349
+55yL5riF 114350
+5ouS57WV 114351
+6KWE6Ziz 114352
+6K++5aSW 114353
+5a2Q5a2Z 114354
+5q2M6K+N 114355
+5oiQ5ZCN 114356
+5rq25ray 114357
+5YSS5a62 114358
+5ZWG5Lia5YyW 114359
+6L6o5Yir 114360
+5aSa6L6+ 114361
+572R5bqX 114362
+5Lmd5aSn 114363
+5Lmd5aSn57K+56We 114364
+5q2k5Li+ 114365
+6L+e6L29 114366
+5LiA5YCL5Lq6 114367
+6Imy5rO9 114368
+5ra155uW5LqG 114369
+6KaP5YqD 114370
+5Zu95oOF 114371
+5Y2r55Sf5YGl5bq3 114372
+56ev5p6B5ZON5bqU 114373
+5ouZ 114374
+5Yi25Yqo 114375
+5oOz6LGh5Yqb 114376
+55qE5LmQ6Laj 114377
+5byg5a6255WM 114378
+5bSO 114379
+6YeN5Z6L 114380
+5aSW5aKZ 114381
+5pS+5a2m 114382
+6K6k55yf5a2m5Lmg 114383
+6LSs5YC8 114384
+5rOV5qGI 114385
+5oqk6IKk5ZOB 114386
+6Zm35YWl5LqG 114387
+6K+35oKo 114388
+5Z6i 114389
+5pWZ6IKy6LWE5rqQ 114390
+5Lqk5piT5bmz5Y+w 114391
+5pe26KOF 114392
+5Lyg5p+T55eF 114393
+5rmW5rOK 114394
+6LWE566h 114395
+5Y6o5biI 114396
+6Zec6Y0= 114397
+6Zec6Y21 114398
+5ZOI5ZOI5ZOI 114399
+55uX56qD 114400
+55Sc576O 114401
+5bqE5Zut 114402
+55uu5YmN5bey57uP 114403
+6L655LiK 114404
+54Gr6Iqx 114405
+5oql6K6w6ICF 114406
+5oGL5oOF 114407
+57Sn5YeR 114408
+5rC05rWB 114409
+6L+Z5piv5oiR5Lus 114410
+5rOl5Zyf 114411
+5pu+5Lu7 114412
+5pa56KiA 114413
+5ZGo5YWt 114414
+5Y+35qW8 114415
+5LyR5YGH 114416
+6K+v5Lya 114417
+5Zu95YC6 114418
+5YmN5aSV 114419
+5Lik5byg 114420
+6Zer 114421
+6a2U6ay8 114422
+5oqK5oyB 114423
+6IqC6IO9546v5L+d 114424
+5riF5rSB6IO95rqQ 114425
+6IKl5paZ 114426
+6auY6aKR 114427
+5bCx5pyJ5LqG 114428
+5Lqk5Lya 114429
+5rKh6ZKx 114430
+6ZuF5oCd 114431
+6KaB5Y+K5pe2 114432
+5Z+55YW75a2m55Sf 114433
+5qyj5Zac 114434
+54Ot5rC05Zmo 114435
+6b6Z5rmW 114436
+5LqM5qW8 114437
+5paw5rWq6LSi57uP 114438
+5paw5Yqo6IO9 114439
+6LWj5bee 114440
+5ouz5aS0 114441
+5rWB5ZCR 114442
+5Lmf5piv5b6I 114443
+5Y+R5ZSu 114444
+5Lit5ZCr5pyJ 114445
+5ZCT5b6X 114446
+5beo5pif 114447
+5peg5omA6LCT 114448
+5q+b5a2U 114449
+5YWs5YWx5Lqk6YCa 114450
+54KO54Ot 114451
+6LW36I2J 114452
+5Yqg55uf5ZWG 114453
+6K+05LiN5Ye6 114454
+5aSn5a2m5q+V5Lia 114455
+5bel5Lia5Zut 114456
+6aCY5Z+f 114457
+5bqG5YW4 114458
+5rWB5Lqn 114459
+6IGy6Z+z 114460
+5Ly85LmO5piv 114461
+6LSn5rqQ 114462
+5rex5YiH 114463
+5rK755aX5pa55rOV 114464
+6LWE5rqQ6YWN572u 114465
+57ay5Y+L 114466
+55Sj 114467
+5Lql 114468
+6Lqy5Zyo 114469
+56S+56eR 114470
+6Luf6auU 114471
+5aWz6KOF 114472
+5q2h6L+O 114473
+57u85ZCI5a6e5Yqb 114474
+5qC85bCH 114475
+5YWa5Y+y5a2m5Lmg 114476
+5pyA5Z+65pys 114477
+5pyA5Z+65pys55qE 114478
+55yL5pyb 114479
+5Y+X6LS/ 114480
+5LiN5LuF6IO9 114481
+5L2V5b+F 114482
+5LiA5Liq5bCP5pe2 114483
+576M 114484
+5oub5pS2 114485
+54KS6IKh 114486
+5p2R5bmy6YOo 114487
+55u454ix 114488
+5r2c6IO9 114489
+5LmN 114490
+5pe26L6w 114491
+5qyj5oWw 114492
+6ZO26KGM5Lia 114493
+54ut56qE 114494
+6YeN54K56aKG5Z+f 114495
+546w5a6e55Sf5rS7 114496
+6Yyv6Kqk 114497
+5paw6KeE 114498
+5rul55So 114499
+5pe25LiN 114500
+5pe25LiN5pe2 114501
+5biz6Jmf 114502
+56iA57y6 114503
+5ZCR5Lic 114504
+5L+d5YGl5ZOB 114505
+54+t6ZW/ 114506
+5LqS5YuV 114507
+56y8572p 114508
+5r2b 114509
+5pqW5b+D 114510
+6L2w54K4 114511
+5bqG5bm4 114512
+6LKM5Ly8 114513
+5pO6 114514
+6ICQ56Oo 114515
+5LiT5Lia5Lq65aOr 114516
+5LiA6Iis6YO95piv 114517
+5ryz5bee 114518
+5YWo6Ieq5Yqo 114519
+5b2V55So 114520
+5aSn6LeM 114521
+5pyJ5pWI5oCn 114522
+6Ieq5YuV 114523
+5LiJ5Liq5pa56Z2i 114524
+5riv5Yy6 114525
+5L+h6LK4 114526
+6YCa6K+d 114527
+6auY5rao 114528
+5rOE5ryP 114529
+6YWN5LiK 114530
+5YWa5bel5aeU 114531
+6KKr6K6k5Li6 114532
+6KKr6K6k5Li65piv 114533
+5LiN5Lya5YaN 114534
+6LCD5YmC 114535
+5Y+C6IKh 114536
+6ISx5Y+R 114537
+5b+g5a6e 114538
+5YaF5YiG5rOM 114539
+57mB5b+Z 114540
+5Y+M5Yib 114541
+6am75p2R 114542
+5YiS566X 114543
+6YGO5L6G 114544
+5Zyj57uP 114545
+6I+c6bif 114546
+5ou85aSa5aSa 114547
+5Lit5Zu95rG96L2m 114548
+54Of6I2J 114549
+55u05rWB 114550
+5LqG5LiA5Y+j5rCU 114551
+5L2O5oiQ5pys 114552
+5om+5Zue 114553
+6Ieq5Y2R 114554
+57i95piv 114555
+5paH5YyW5Yib5oSP 114556
+5aSp5rKz 114557
+5qix5qGD 114558
+6aqR5YW1 114559
+6YeM6Z2i5pyJ 114560
+546u 114561
+6IO95om+5Yiw 114562
+6YCD6LeR 114563
+5YiH5bCU 114564
+5YiH5bCU6KW/ 114565
+5Lul5LiL5piv 114566
+5bKz6Ziz 114567
+55qE5qaC546H 114568
+5oq15Yi2 114569
+5biI5LqL5Yqh 114570
+5biI5LqL5Yqh5omA 114571
+5YeG5pe2 114572
+5bGs5pa8 114573
+6K6i6LSt 114574
+5Y2g5o2u5LqG 114575
+5Lit6YCU 114576
+5bCL 114577
+6buR6ams 114578
+5Y6/5YWs5a6J5bGA 114579
+5LiD5pyI 114580
+6Imy57Sg 114581
+5b+D6ISP55eF 114582
+5pe26ZmQ 114583
+5q+N5YWs5Y+4 114584
+5bmV5ZCO 114585
+5LiK5qac 114586
+5YC+5ZCR5LqO 114587
+57q45LiK 114588
+5qGT 114589
+6ZuG5L2T57uP5rWO 114590
+5oOF5aKD 114591
+6KaB5YGa5Yiw 114592
+56mN5qW1 114593
+5Y+q5oCV 114594
+5rmY6KW/ 114595
+55qx57q5 114596
+5YWo5ZyL 114597
+54Sh6KuW 114598
+5aW95oSf 114599
+5Y2V5Lu3 114600
+6L+b56iL5Lit 114601
+5piG5LuR 114602
+5Yib5a6i 114603
+5YWF5pal 114604
+5YWI5oqK 114605
+6K+l5oCO5LmI5Yqe 114606
+5ZOB5b63 114607
+5YWo6Z2i5Y+R5bGV 114608
+6KiI5YqD 114609
+5oC75bel5Lya 114610
+5L2b5bGx5biC 114611
+5oqX6KGh 114612
+5byA5Zy6 114613
+6ZKx5biB 114614
+5Y+L5Lus 114615
+5auJ5aaS 114616
+57Si6LWU 114617
+6K6K5YyW 114618
+5oyk5Y6L 114619
+5oyR6KGF 114620
+562J5LiA5om5 114621
+5p2o5qyi 114622
+5LiT5a625a2m6ICF 114623
+6IO96L6+5Yiw 114624
+6LWw6L+R 114625
+6LSr5Zuw5Zyw5Yy6 114626
+6ZmQ5pyf 114627
+5LiN5bmz6KGh 114628
+5Zu95YaF5biC5Zy6 114629
+6LWb5Zy6 114630
+6YWN6LWE 114631
+6KaB6ICD6JmR 114632
+5LiH5Y+w 114633
+5pyI5pyr 114634
+6ZSl 114635
+5a2r 114636
+5o6l6Kem5Yiw 114637
+5Ye65Lqn 114638
+5pWZ5a24 114639
+5L2c5byK 114640
+55qE5pyA5ZCO5LiA 114641
+5L+D5oiQ 114642
+5ZC45Y+W 114643
+5r2c6ImH 114644
+6KKr6aqX 114645
+6L6T5LqG 114646
+54uQ54u4 114647
+5Y2H6ZmN 114648
+6L+Z5Lqb5Lic6KW/ 114649
+5oqV6LWE5Z+66YeR 114650
+55Sf54mp5a2m 114651
+572R57uc6JCl6ZSA 114652
+5ZCR6K6w6ICF 114653
+6I2J5Zyw 114654
+5oCv 114655
+5pyN5Yqh6IO95Yqb 114656
+6YOB6Ze3 114657
+5Y2V5ZOB 114658
+5b6X572q 114659
+5piT5LqO 114660
+5Liq5aSa5bCP5pe2 114661
+6YeN5Lu7 114662
+5LiK5a6Y 114663
+5pys6YeR 114664
+54++5aC0 114665
+5rqi5Lu3 114666
+5pif6L6w 114667
+5rS75Yqo546w5Zy6 114668
+5Li56bqm 114669
+5bid546L 114670
+5p+l5piO 114671
+5a2Y5Zyo5LqO 114672
+6aaZ5rC0 114673
+5oq95qOA 114674
+5a6e6ZmF5LiK5piv 114675
+5paw5b6B56iL 114676
+6LSi5Yqh566h55CG 114677
+5o6b 114678
+5Yac5Y6G 114679
+6YO96IO95aSf 114680
+6YKv6YO4 114681
+55yf5a+m 114682
+57uK 114683
+5Ya15LiU 114684
+572u6Lqr 114685
+56WI56W3 114686
+552B5byA 114687
+5oyH54K5 114688
+5byA5py6 114689
+6KW/5a6B 114690
+5YyX57qm 114691
+56ev5rC0 114692
+5Ye65Yqo 114693
+5Y+R5bGV5qih5byP 114694
+6L2s5oqY 114695
+6ICD54K5 114696
+5pyJ572R5Y+L 114697
+6LSr5Zuw5p2R 114698
+5oiR5Lus55+l6YGT 114699
+5YiG6ZSA 114700
+5bGx6ISJ 114701
+5q+U5ouf 114702
+5Lyw566X 114703
+5pS55bu6 114704
+5aOu6KeC 114705
+56eJ5oyB 114706
+5o+q 114707
+56aA 114708
+5YyW5a2m5ZOB 114709
+5Lit5Zu95Yi26YCg 114710
+5LiA5p62 114711
+5omN6KGM 114712
+5oub5b6F 114713
+5Y+Y5o2i 114714
+5YmN57q/ 114715
+5bm45aW9 114716
+6L+Z5qC355qE6K+d 114717
+5b+D6KGA566h 114718
+5oCn55a+55eF 114719
+5YWo6IO9 114720
+5YiR5L6m 114721
+5L+h5oGv5Y+R5biD 114722
+5pi+54S25piv 114723
+6Z2S6ZOc 114724
+5ZCD5LuA5LmI 114725
+55S15Lu3 114726
+5rOV5b6L6KeE5a6a 114727
+54Wy 114728
+55O35Zmo 114729
+6IKJ57G7 114730
+5o+S5YWl 114731
+5Zec 114732
+6L+f6L+f 114733
+5LiA54K56YO95LiN 114734
+6L+Y5YyF5ous 114735
+6IiN5LiN5b6X 114736
+5qCH5b+X5oCn 114737
+5pyI5Lul5p2l 114738
+57OW5p6c 114739
+6YO95bqU6K+l 114740
+546v5aKD5Y2r55Sf 114741
+6Iiq6KGM 114742
+6YOR6YeN 114743
+572R5oqV 114744
+5Y2B5L2z 114745
+56eB5LiL 114746
+5pq06LeM 114747
+5Yqg5b+r5Y+R5bGV 114748
+5Lqn5ZOB56CU5Y+R 114749
+5Yib6YCg5Ye6 114750
+5oC76KeJ5b6X 114751
+5bqV55uY 114752
+6JWK 114753
+5Ye65bit5Lya6K6u 114754
+5Li75p2/ 114755
+5pel5pma6Ze0 114756
+5a6Y5pa55b6u5Y2a 114757
+5byV55So5pel5pyf 114758
+5Ymv5pWZ5o6I 114759
+55S15a2Q5Lqn5ZOB 114760
+6KGw6YCA 114761
+55WZ5a2Y 114762
+54Gr5Yqb 114763
+55Kn 114764
+55qC 114765
+5YW85YW3 114766
+6YeN6L+U 114767
+6aKG55Wl 114768
+5YiH6Zmk 114769
+5YaN55Sf6IO95rqQ 114770
+5a6e5Zyo5aSq 114771
+55CG6K665LiK 114772
+5LiJ5bGC 114773
+5LiW55WM5ZCE5Zu9 114774
+5a6c5piM 114775
+6ICz6L65 114776
+5a695pWe 114777
+5rGJ5peP 114778
+55m955m9 114779
+6L+Z6YeM6Z2i 114780
+55Sf5rS75Lmg5oOv 114781
+6LWe6LWP 114782
+55S35aOr 114783
+5Lit5L+E 114784
+6L2m56W4 114785
+5YmC6YeP 114786
+6Zmk5Y67 114787
+5bem6L65 114788
+562R54mi 114789
+54mb5biC 114790
+5a625Yqh 114791
+5ZWD 114792
+572u5o2i 114793
+57Sr5aSW 114794
+57Sr5aSW57q/ 114795
+5b6A5YmN 114796
+5Yqb5a2m 114797
+57Sn6Lef 114798
+55uu55qE5Zyo5LqO 114799
+57uu 114800
+56WC 114801
+5a6j6KiA 114802
+5LqM5rCn5YyW 114803
+5LqM5rCn5YyW56Kz 114804
+5peg57yY 114805
+57K+6YCa 114806
+6Ki6 114807
+5byV5Y+R5LqG 114808
+5pyA5YWI 114809
+5rS+6am7 114810
+5LiN5b+N 114811
+5oiR54i4 114812
+5bm05LiL5Y2K5bm0 114813
+5reL5be0 114814
+5rKh6Zeu6aKY 114815
+5bqX5YaF 114816
+6Lef5oiR6K+0 114817
+55Sf5Lqn55Sf5rS7 114818
+6KeC5pyb 114819
+5riN 114820
+6KKr5omn6KGM 114821
+6KKr5omn6KGM5Lq6 114822
+6Iic 114823
+5o66 114824
+5LiA56eS 114825
+6I2J5Z2q 114826
+5ZG85ZKM 114827
+5ZG85ZKM5rWp 114828
+5ZG85ZKM5rWp54m5 114829
+5Lq65rCR6ZO26KGM 114830
+54SV5Y+R 114831
+6K+B5Yi45Lqk5piT 114832
+55WU 114833
+5py66IO9 114834
+5aa+ 114835
+5pma5bm0 114836
+5bel5ZWG6IGU 114837
+5Y6f5Z6L 114838
+6KeS5bqm55yL 114839
+5oql56S+ 114840
+6K+N5p2h 114841
+6Lqy6YG/ 114842
+6YeN5ZCv 114843
+5aSV6Ziz 114844
+6IKh5p2D6L2s6K6p 114845
+5Zyo5LiA 114846
+5Zyo5LiA5peB 114847
+56S+5Lya5YyW 114848
+5Y+R5bGV5Y6G56iL 114849
+5ouW5qyg 114850
+5L2/6ICF 114851
+5LiO5ZCm 114852
+5paw5bGA6Z2i 114853
+5LuK5aSp5oiR5Lus 114854
+6b2Q6IGa 114855
+5a+55oiR6K+0 114856
+6YCS5Lqk 114857
+5pyq5pu+ 114858
+6I6K 114859
+6ZaJ 114860
+5Lqy5omL 114861
+6KeS6YCQ 114862
+5pyJ6bue 114863
+56iO546H 114864
+5L2O5aOw 114865
+6buY5aWR 114866
+5pmu5rOV 114867
+5aSn5LiT 114868
+56ys5LqM5aSn 114869
+5L2P5Z2A 114870
+5pS+6L+b 114871
+5LqM5oiY 114872
+5Lqy6Lqr 114873
+5Zu65YyW 114874
+5LiL5Lmh 114875
+5YWz6ZSu5oqA5pyv 114876
+5Zue5oOz 114877
+5oql5YiK 114878
+5raC5oq5 114879
+6JeP552A 114880
+56Wd5oS/ 114881
+5Y2H5rip 114882
+55Sa6Iez6L+e 114883
+5YWs5YWD5YmN 114884
+576O5pa5 114885
+6K+a5a6e 114886
+5peg5YG/ 114887
+5Ym15qWt 114888
+5bCP5b+D57+8 114889
+5bCP5b+D57+857+8 114890
+5Lik5omL 114891
+5rip6aao5o+Q56S6 114892
+5Lu/55yf 114893
+5oO2 114894
+6IOh5a2Q 114895
+5bel5L2c56uZ 114896
+56Gs55uY 114897
+56u/ 114898
+5YKz6YCB 114899
+5YWo5qCh 114900
+6bKc5rS7 114901
+55KA55Ko 114902
+57uT5bC+ 114903
+5o2i5p2l 114904
+5oiA 114905
+5L2O5L2N 114906
+5LiH5YWD5Lul5LiK 114907
+5Yqg5YiG 114908
+5o6o5LuL5Lya 114909
+55CG6LWU 114910
+5b635bCU 114911
+5oqX6K6u 114912
+5rS8 114913
+5Zan 114914
+5Z+O6ZmF 114915
+5b6I5qOS 114916
+5Lq65q275Lqh 114917
+5Lya5bGV5Lit5b+D 114918
+5LqS6IGU5LqS6YCa 114919
+6JaE6Iac 114920
+6YeN6bue 114921
+56aB5q+S 114922
+5Ya356yR 114923
+5aSn5a625Y+v5Lul 114924
+6aaW55u4 114925
+6L+R6Led56a7 114926
+5rWu546w 114927
+56eY6K+A 114928
+6LW36aOe 114929
+5pC2 114930
+55yf5YGH 114931
+5oGV 114932
+5bCP5bqX 114933
+5rCR55y+ 114934
+5Y+R5biD5YWs5ZGK 114935
+5L6n6YeN 114936
+5b6Y5b6K 114937
+5oCU 114938
+5qqQ 114939
+5pWw55uu 114940
+5Ymv56eY5Lmm6ZW/ 114941
+5Lik5Y+l 114942
+6ZqQ556S 114943
+5Y+M5Y+M 114944
+5omL5oSf 114945
+6JGh5Lqs 114946
+6YGX5b+Y 114947
+6ayl 114948
+6L+Z5Liq5Zyw5pa5 114949
+6K+055qE6K+d 114950
+5beh5Zue 114951
+6L+d56ug 114952
+5om+5bel5L2c 114953
+5pSv55CD6Zif 114954
+6KOh6Z2i 114955
+5pi+56S65Ye6 114956
+6Iez5bCK 114957
+5Lik57qn 114958
+5YmN5q615pe26Ze0 114959
+55im6Lqr 114960
+6IKi5L2T 114961
+5q+N6Kaq 114962
+5omL57ut6LS5 114963
+5rG96L2m6KGM5Lia 114964
+5o6p55uW 114965
+5o6n6IKh6ZuG5Zui 114966
+5Y+j5b6E 114967
+5pS/562W5o6q5pa9 114968
+5rW357u1 114969
+5YWo6ZWH 114970
+5LqL5YWz 114971
+5bit5omn6KGM 114972
+5bit5omn6KGM5a6Y 114973
+6YKj5qyh 114974
+5Y+v6IO95Ye6546w 114975
+5Lit5b+D5Z+O5biC 114976
+57+76Lqr 114977
+5Lmf566X 114978
+5L6155Wl 114979
+5ZaH5Y+t 114980
+5q+P5qyh6YO9 114981
+6KeF 114982
+6Zmi6Zmi6ZW/ 114983
+5aeL5LqO 114984
+6K2m5Yqh 114985
+6I2v5p2Q 114986
+5bGg5p2A 114987
+5pys6Lqr5bCx 114988
+6ZqP5pe26ZqP 114989
+6ZqP5pe26ZqP5Zyw 114990
+5ZSu5Y2W 114991
+5peg5Lq66am+6am2 114992
+6aKF 114993
+5ZOB6LOq 114994
+5Ziy56yR 114995
+6LeR5Y67 114996
+5YWL6YeM5pav 114997
+55W45b2i 114998
+5L+u6aWw 114999
+55+p6Zi1 115000
+6Z+z5LmQ5Lya 115001
+5p+z5bee 115002
+6b2h 115003
+5Lya6LCI 115004
+5q2j54mI 115005
+5Lmf5ZCM5qC3 115006
+5pqn5pin 115007
+6KGM5pS/6YOo6Zeo 115008
+5LmW5LmW 115009
+6IKk6Imy 115010
+5pe25Lu7 115011
+55yf5YiH 115012
+5pyI5LiL 115013
+5pyI5LiL5pes 115014
+5Lic5pa56LSi5a+M 115015
+6KOF5L+u5YWs5Y+4 115016
+6YCA6L+Y 115017
+5YuY5a+f 115018
+5ZOl5Lym 115019
+5ZOl5Lym5q+U5Lqa 115020
+54us5LiA 115021
+54us5LiA5peg 115022
+54us5LiA5peg5LqM 115023
+6LCD5ZGz 115024
+5Y6L6L+r 115025
+5YWo55CD5pyA5aSn 115026
+5Ymv5qCh6ZW/ 115027
+5pu05L2O 115028
+5YiG6ZKf5ZCO 115029
+5Zue5L6G 115030
+5Yi25YmC 115031
+5ZGK6K+J5aSn5a62 115032
+54K56ZKf 115033
+5Y2B5LiJ5bGK 115034
+5ZGo5Zub 115035
+6L+Z5qC35LiA 115036
+6L+Z5qC35LiA5p2l 115037
+6Iuf 115038
+5pyb5Y67 115039
+5oiQ6K+t 115040
+5b2T5Y2z 115041
+56yR5aOw 115042
+5LmL5Yq/ 115043
+5YiR5LqL5qGI5Lu2 115044
+5oyC552A 115045
+5L2V56eN 115046
+5bCP5ri45oiP 115047
+5Zu95a625oiY55Wl 115048
+5Ya35Ya3 115049
+5a6c5a6+ 115050
+5pC656iL 115051
+6LaL5LqO 115052
+5Y+N55yB 115053
+5bi46K+0 115054
+5LiH5oi3 115055
+5YO15bC4 115056
+5Y2D5LiH5Yir 115057
+5Y+R546w6Zeu6aKY 115058
+5Y+v55+l 115059
+6Zeo5oi3572R56uZ 115060
+5YGl5bq35Lqn5Lia 115061
+5Y+z6L65 115062
+5rW36L+Q 115063
+6L+R5LmO 115064
+5Yy75rK7 115065
+5oC7566X 115066
+5LiA5YiG6ZKf 115067
+5oun 115068
+5Lmf5pyJ5LiA5Lqb 115069
+5L6b55S15YWs5Y+4 115070
+5buJ5Lu3 115071
+5biu5LuW 115072
+5q2k5qyh5rS75Yqo 115073
+5Y+q6IO96K+0 115074
+6IqL 115075
+54mH5q61 115076
+5a2Y5Zyo6Zeu6aKY 115077
+5L2g5Lya5Y+R546w 115078
+6L2u5buT 115079
+572R6YCa 115080
+5ruo5rGf 115081
+5o6I5L+h 115082
+6buO5piO 115083
+5LiN5bGe5LqO 115084
+57qm5Y2g 115085
+6ZW/5rKZ5biC 115086
+6IOa6IOO 115087
+5YWD5Lu2 115088
+6ZmG5Yab 115089
+6LO86LK3 115090
+5oyH5pyb 115091
+5a6e5Lmg55Sf 115092
+54m554K55piv 115093
+54+g5rGf 115094
+55yL5LiN5Ye6 115095
+5LiN6KeB5LqG 115096
+57yJ 115097
+6Zi16JCl 115098
+5ZSQ5pyd 115099
+5rKh5b+F6KaB 115100
+5Zu95Zyf6LWE5rqQ 115101
+57uP5rWO5a2m5a62 115102
+5ZCI6IKl5biC 115103
+55Ci56Oo 115104
+56Gu5YiH 115105
+5Z+O5biC5Y+R5bGV 115106
+56235a2Q 115107
+5Lq65rCR5pyN5Yqh 115108
+5ruh5YiG 115109
+6L+35L+h 115110
+5L2c6ICF5pys5Lq6 115111
+5paH56ug5p2l5rqQ 115112
+56uZ56uL 115113
+5p6E5oiQ5LqG 115114
+6L6b5Yuk 115115
+6LaF5by6 115116
+6ZSa 115117
+5YmN5LiJ5a2j5bqm 115118
+5bCx6KeJ5b6X 115119
+5bSH6auY 115120
+6LaK5L6G 115121
+6LaK5L6G6LaK 115122
+5biC5Zy66JCl6ZSA 115123
+57u85ZCI57Sg6LSo 115124
+5a2a 115125
+5L6u6L6x 115126
+5LqM5a2X 115127
+5bel5L2c5Lu75Yqh 115128
+5Y+y5LiK5pyA 115129
+5pyA5LyY 115130
+5ZCp5ZKQ 115131
+6KGo55m9 115132
+6I6r5ZCN 115133
+6I6r5ZCN5YW2 115134
+6I6r5ZCN5YW25aaZ 115135
+5bmj 115136
+5ZCM5b+X5Lus 115137
+5bu66K6+55So5Zyw 115138
+5YSA 115139
+6YWN5YG2 115140
+5byp 115141
+5ZSx54mH 115142
+5omL6ISa 115143
+5YW85Lu7 115144
+5YGc5pS+ 115145
+5q2j5a6X 115146
+5paw5Yac5p2R 115147
+5YKs55Sf 115148
+5omA5a2m5qCh 115149
+5b+15L2b 115150
+5ZSk6YaS 115151
+5YWx5Yib 115152
+5ouJ5LiB 115153
+6IOM552A 115154
+55Sf5oCB5L+d5oqk 115155
+5Y+j5aS0 115156
+5pa55ZCR55uY 115157
+6Kq/5pW0 115158
+5oub6IGY5L+h5oGv 115159
+5YW25LuW5Zu95a62 115160
+566A5piT 115161
+5Yy/5ZCN 115162
+6K+E5rWL 115163
+5piv5LiA5bqn 115164
+54m15omL 115165
+6Laz6L+5 115166
+55CG6Kej5ZKM 115167
+5pyA5Y+X 115168
+5b+D6Lez 115169
+54i26Kaq 115170
+6Z2e5bi45Zac5qyi 115171
+6Ium6Zq+ 115172
+5oqA5biI 115173
+5rCR5oSP 115174
+5oiY5Zu9 115175
+5pu/6KGl 115176
+5rSl6LS0 115177
+5Lit5Zu95Lyg57uf 115178
+5ZCE6KGM 115179
+5ZCE6KGM5ZCE 115180
+5ZCE6KGM5ZCE5Lia 115181
+56ys5LqU5bGK 115182
+6I236Iqx 115183
+5oSP6K2Y 115184
+56Wo5Lu3 115185
+5YiG5rWB 115186
+5p2O55m9 115187
+5rGf5YyX 115188
+5o6S5pal 115189
+5L2T6YeP 115190
+5YyF5ZCr5LqG 115191
+5YiY5p+Q 115192
+546w5aaC5LuK 115193
+5bel6Im65ZOB 115194
+6L+Z56eN5pa55rOV 115195
+5Yqe5YWs5qW8 115196
+55S15bel 115197
+54WZ 115198
+5Y2h54mH 115199
+5bm05bm05bqV 115200
+5LiT6aG56LWE6YeR 115201
+5Yy756eR 115202
+5Yy756eR5aSn5a2m 115203
+5Zue5aS055yL 115204
+5LiN5bGR 115205
+6Ieq6am+ 115206
+5rKh5pS2 115207
+5omT54yO 115208
+6IS46YOo 115209
+5Y+D6ICD 115210
+5bCG5aOr 115211
+6LSr5Zuw5Lq65Y+j 115212
+55CG5oOz5L+h5b+1 115213
+6aOO5bCa 115214
+5Lq65omN6Zif5LyN 115215
+55G+ 115216
+5p2l6L+Z6YeM 115217
+5rSX5rak 115218
+5bm06Jaq 115219
+6IuN55m9 115220
+5LiH5LqL 115221
+6K++5pys 115222
+5bqT6YeM 115223
+54m55rS+ 115224
+54m55rS+5ZGY 115225
+6LWe576O 115226
+56m/5oi0 115227
+6KO95L2c 115228
+6LWe5oiQ 115229
+5LiA5L6n 115230
+5b2T5Zyw5Lq6 115231
+5ouO 115232
+57q46LSo 115233
+5L2Z5Liq 115234
+6ZSC55S15rGg 115235
+5py65Z6L 115236
+6Zmi6Zmi5aOr 115237
+5YGa5bel 115238
+5byg6LS0 115239
+56Wb5paR 115240
+5q6W5rCR 115241
+5aWR57qm 115242
+5rmY5r2t 115243
+5pCW 115244
+5a2Y6LSn 115245
+5Lqk6YCa5aSn5a2m 115246
+6LaB552A 115247
+5paH54mp5L+d5oqk 115248
+5aSH5oiY 115249
+6YeH57qz 115250
+5Y2K5pyI 115251
+5pyA5YWz6ZSu 115252
+5pyA5YWz6ZSu55qE 115253
+5o6l6YCB 115254
+5pS25Ymy 115255
+5Y+N5YCS 115256
+54Ob 115257
+5r2U 115258
+5Lyf5aSn5aSN5YW0 115259
+55qE6K+d6K+t 115260
+5a655b+N 115261
+5a6a6YeP 115262
+5pWX 115263
+5ZOB54mM5b2i6LGh 115264
+5omt6L2s 115265
+5Zu95a626YeN54K5 115266
+6Iad55uW 115267
+5LiA5qW8 115268
+5aSn6Zm4 115269
+6YKq5oG2 115270
+5Zue5ZGz 115271
+54y/ 115272
+552h5YmN 115273
+5peg6L6c 115274
+55eF5q+S5oSf5p+T 115275
+5py65qKw5YyW 115276
+54K55Lqu 115277
+5rq26Kej 115278
+5Yeg5LmO5omA5pyJ 115279
+6LeR6YGT 115280
+55S16KeG5py6 115281
+5Y+o 115282
+5pGH5LqG 115283
+5pGH5LqG5pGH5aS0 115284
+6Ieq6LSf 115285
+57u85ZCI5Yip55So 115286
+6Ieq5aaC 115287
+5Y6f5L6G 115288
+5Lmf5LiN5oOz 115289
+6IqC6K++ 115290
+6L+H5Ymp 115291
+55Sy54q2 115292
+55Sy54q26IW6 115293
+5paw5LiW57qq 115294
+6Ieq5Li75ZOB54mM 115295
+6auY5bGC5qyh 115296
+5LiA6KeS 115297
+6KGM5LqL 115298
+56WW5YWI 115299
+5ama5ZCO 115300
+6Ze06ZqZ 115301
+57yd6ZqZ 115302
+6L+Z5pSv 115303
+5LiN5pat5Yib5paw 115304
+5b6u5Z6L 115305
+5puZ5YWJ 115306
+5Lqr55So 115307
+5Lit5Zu956e75Yqo 115308
+6Zet546v 115309
+5omn5oSP 115310
+5Y+R5bGV5qC85bGA 115311
+5qC45b+D5Yy6 115312
+6aqa5omw 115313
+5YWa5ZKM5Zu95a62 115314
+5Lit5Zu95pS/5bqc 115315
+5bi26JGX 115316
+5LiH5Y2D55Om 115317
+5YWp5Lq6 115318
+5LqO5piv5oiR 115319
+5Zu65L2T 115320
+56qB5aaC 115321
+56qB5aaC5YW2 115322
+56qB5aaC5YW25p2l 115323
+6YeM56iL56KR 115324
+54ix576O 115325
+5p+l6aqM 115326
+5Y+M6LWi 115327
+6Zeq5YWJ 115328
+5qW85a6H 115329
+5pmP 115330
+5pyJ6Laz5aSf55qE 115331
+5p+U5oCn 115332
+5L+h5oGv5a6J5YWo 115333
+566h57q/ 115334
+5bm25LiN5Lya 115335
+5Zmo5Lu2 115336
+5L2g5bqU6K+l 115337
+552A5a6e 115338
+5piO5riF 115339
+5oqX55Sf57Sg 115340
+5omT5q27 115341
+5a6M5YWo5LiN5ZCM 115342
+6Iqx5qSS 115343
+5pS+5a69 115344
+5L2O56uv 115345
+5Zub6IKi 115346
+5YyX5Lqs6LWb6L2m 115347
+6ZuG5biC 115348
+5pyq5ama 115349
+5aSn5bmF5o+Q5Y2H 115350
+5bu6562R6K6+6K6h 115351
+54us5pyJ55qE 115352
+5o6i6Zmp 115353
+5rKz5rWB5Z+f 115354
+5oWV5a65 115355
+6KKr55uX 115356
+5ZO65Lmz 115357
+6I+B 115358
+5oOs5oSP 115359
+6LaK5p2l6LaK5aW9 115360
+5bm/5aSn576k5LyX 115361
+5b636IKy 115362
+5biC5Zy65Lu35qC8 115363
+5aWl5be0 115364
+5aWl5be06ams 115365
+6IqC55uu5Lit 115366
+5Lik5qy+ 115367
+5LiH5L2Z5YWD 115368
+57u05bCU 115369
+55Sf54mp56eR5oqA 115370
+5ZCs6LW35p2l 115371
+56Ca 115372
+5ouf5a6a 115373
+5rK555Sw 115374
+5aOw6KqJ 115375
+5bu6562R5Lia 115376
+6ZmQ6LSt 115377
+54mH5a2Q 115378
+55Wc56a9 115379
+572R6aaW6aG1 115380
+5LyX5625 115381
+5pKe5Ye7 115382
+5YmN5LiN5LmF 115383
+5YmN5LiW 115384
+5Zub5Liq5oSP6K+G 115385
+5rWL57uY 115386
+6Ziy56m6 115387
+5ryr6ZW/55qE 115388
+5rKQ5rW0 115389
+5q+U6L6D566A5Y2V 115390
+5rWL5a6a 115391
+5Zue6LCD 115392
+6K6p5Lq65Lus 115393
+6JKL5LuL 115394
+6JKL5LuL55+z 115395
+57uT5pm2 115396
+5aKe5re75LqG 115397
+5p2h6K+E6K66 115398
+5Ymv5Lya6ZW/ 115399
+5L2P5omA 115400
+57uZ5Ye65LqG 115401
+6LCD6YWN 115402
+5rKW 115403
+5pyJ55So 115404
+5pyJ55So55qE 115405
+5LiA5p2h6b6Z 115406
+6YeO5aSW 115407
+57yY5YiG 115408
+5rC46L+c5LiN5Lya 115409
+5p6c5qCR 115410
+5aSn5Y+R5b+r5LiJ 115411
+6bq76YaJ 115412
+5LqR6ZuG 115413
+5Y675ZOq6YeM 115414
+5YWl5biC 115415
+5Lu75oCn 115416
+5bu65qGj 115417
+5bu65qGj56uL 115418
+5bu65qGj56uL5Y2h 115419
+5LiA5qO1 115420
+56S+5Y2A 115421
+55u45Ly0 115422
+5Zq3 115423
+5aGr5YWF 115424
+5LiA5peP 115425
+576B 115426
+5Y+W6K+B 115427
+6Iiw6Zif 115428
+5Y6C5Yy6 115429
+6KG35b+D 115430
+5Y+R5bGV6Zi25q61 115431
+6auY5by65bqm 115432
+5ZeT5a2Q 115433
+6aKG6KGU 115434
+5qW85Li7 115435
+5aSn6JKc 115436
+5p6V5aS0 115437
+57Ku5rK5 115438
+6buE55Oc 115439
+5pOS 115440
+5bCP54uX 115441
+5pS56Z2p5aeU 115442
+5Y2B5YiG6ZKf 115443
+6bKc6Imz 115444
+5YWz5769 115445
+54uA5oWL 115446
+5a6e55So5oCn 115447
+5bCR6KeB 115448
+6aOe5oms 115449
+55Sw6YeO 115450
+5pCC 115451
+6L+Z5Liq6K+N 115452
+5bqU5oCl6aKE5qGI 115453
+6KeS5bqm5p2l55yL 115454
+5pWs55WP 115455
+5rOV5a6d 115456
+5ZaE5oSP 115457
+5omT5pat 115458
+5a+55Yaz 115459
+57WV5bCN 115460
+5YCf5q2k 115461
+5byA5rqQ 115462
+5bCP6Kqq 115463
+56W6 115464
+5bKB5Lul5LiL 115465
+6YCA5b255Yab5Lq6 115466
+5LiN5LmF5YmN 115467
+5Ye65Y6C 115468
+6K695Yi6 115469
+5p2l55yL55yL5ZCn 115470
+6a2U5YW9 115471
+55WZ5LiL5p2l 115472
+5bGF5a6k 115473
+5aCF5oyB 115474
+55yL5LqG5LiA 115475
+55yL5LqG5LiA55y8 115476
+6ZuG5Zui5peX5LiL 115477
+5oiY5oiY57uE5ZCI 115478
+6K6k55yf6JC95a6e 115479
+5rG96L2m5Lqn5Lia 115480
+54mp55CG5a2m 115481
+5pW1 115482
+6ZKd 115483
+5Zui6ZW/ 115484
+5LiN5pat5omp5aSn 115485
+6IKp6LSf 115486
+5Y+R5bGV55uu5qCH 115487
+6LOH6YeR 115488
+5YmN572u 115489
+5Lit5Zu95Y+k5Luj 115490
+5q275YiR 115491
+5YWF5YiG5L2T546w 115492
+5YWz6Zeo 115493
+576O5oSf 115494
+5omT5YWl 115495
+5oqR6YOB55eH 115496
+5bCR54i3 115497
+5qCR5p6d 115498
+5raI5oGv56ew 115499
+5rSb5YWL 115500
+5Y2v 115501
+6L+I5ZCR 115502
+5o6o5YuV 115503
+5LuO5Lia6ICF 115504
+5Y675Lmw 115505
+5qyi5b+r 115506
+5oul5oyk 115507
+6ams5qG2 115508
+5oqK5o6n 115509
+5pS/5YWa 115510
+5byg5oms 115511
+5a6i5qCI 115512
+57qi5pif 115513
+6YCB5p2l 115514
+5YWo5Z+f5peF5ri4 115515
+6Ieq56eB 115516
+5Y2B5LqM5p2h 115517
+5Y+55oGv 115518
+5LiA6ImY 115519
+5L+d6LS5 115520
+5pa95bel546w5Zy6 115521
+5pyJ5bm4 115522
+57ut6Iiq 115523
+5Y+v6IO95pyD 115524
+6IOM5Y+b 115525
+5L2j6YeR 115526
+5LiJ562J5aWW 115527
+5b6I5ruh5oSP 115528
+5ri45oiP5Ymv5pys 115529
+576k6YeM 115530
+5p6E5Lu2 115531
+5bqP5bmV 115532
+5aSq5rmW 115533
+5pyo6LSo 115534
+5pmL5rGf 115535
+57WC5pa8 115536
+6Lez6LeD 115537
+5YC65p2D5Lq6 115538
+562J6K+45aSa 115539
+5pS+5Ye6 115540
+5YWz6ZSu5pe25Yi7 115541
+5oSf5p+T6ICF 115542
+6aOe6KGM5ZGY 115543
+6IOG5Zu6 115544
+6IOG5Zu66YaH 115545
+5oqx5q2J 115546
+5ZGo5LqM 115547
+5paw5pe25pyf 115548
+5Ya36ZO+54mp5rWB 115549
+6L+Z56eN5pa55byP 115550
+6K+l5p2R 115551
+5Zue6aaI 115552
+5Z+6552j5pWZ 115553
+5Lq65Y+C 115554
+5p6v54el 115555
+5om55Y+R5biC5Zy6 115556
+5YWF5YiG6IKv5a6a 115557
+5biC5pS/5Y2P 115558
+5LqL5qWt 115559
+6Zy4546L 115560
+54Ot5pCc 115561
+5Y2B5Lmd5aSn 115562
+5Ly05pyJ 115563
+576O5Zu95oC757uf 115564
+5Z+O5biC566h55CG 115565
+5LiL5Luk 115566
+6IO45Y+j 115567
+5Y+q55+l6YGT 115568
+5ZGo5LiJ 115569
+55So5oi2 115570
+6a2v 115571
+5b+D6KGA 115572
+5bim5aS05Lq6 115573
+5Yy75Yqh 115574
+5Yy75Yqh5Lq65ZGY 115575
+5o6n5Yi25Zmo 115576
+5L2c5ZOB5YaF5a65 115577
+5oiY5Y+L 115578
+5Y6G5bm0 115579
+5LiN5YWL 115580
+5LiN5YWL5LiN5Y+K 115581
+5pel5q2j5byP 115582
+6LGQ5a+M 115583
+56iO6LS5 115584
+5pe25pWI 115585
+5bGV5L2N 115586
+6KGh6Ziz 115587
+5oi/6LK4 115588
+54iG5qy+ 115589
+5LmQ5oSP 115590
+55S35Li7 115591
+5a+s 115592
+5pyD6K2w 115593
+5LmL5aSc 115594
+5ZCM5qij 115595
+5LiN6KaB5aSq 115596
+5LyK5pav 115597
+5LyK5pav5YWw 115598
+5Z+65pys5Y6f5YiZ 115599
+5Y675o6J 115600
+5L2O5L+d 115601
+5Liq5Lqk5piT 115602
+5Liq5Lqk5piT5pel 115603
+6IGK6IGK 115604
+5Zub5L2N 115605
+5YWa57uE5oiQ5ZGY 115606
+5Li76KaB5LuO5LqL 115607
+5b2x6Z+z 115608
+5YaS5Ye6 115609
+5ZG85ZC46YGT 115610
+6L6+5bCU 115611
+5pyo5Zyw5p2/ 115612
+6K+h5byC 115613
+54Gv5YW3 115614
+54Gr54On 115615
+6Kej6ISx 115616
+5oSI5Y+R 115617
+5rmW5bee 115618
+6aOO5L+X 115619
+5paw5b2i5Yq/ 115620
+5paw5b2i5Yq/5LiL 115621
+6LKd 115622
+6IST 115623
+5Yqo5Yqb55S15rGg 115624
+6aOe6Ii5 115625
+6Z+n5oCn 115626
+5Yip54mp 115627
+5Yip54mp5rWm 115628
+5LiN6K6k6K+G 115629
+57yW57uH 115630
+5L2c5Z2K 115631
+6IGM5Lia5oqA6IO9 115632
+55yL6KaL 115633
+5Zu05qOL 115634
+5piP6L+3 115635
+5b2S5bGe5LqO 115636
+5oKs5bSW 115637
+6Yar55mC 115638
+5a6L5Luj 115639
+5bqE5p2R 115640
+6JeV 115641
+54yb54S2 115642
+54eD5paZ55S15rGg 115643
+5a6e5L2T5bqX 115644
+5LiN6Laz5Lul 115645
+5oOF57c= 115646
+5oOF57eS 115647
+5buK5Z2K 115648
+55S15Y+w 115649
+5bqU5Yqb 115650
+5Lit5bCP5a2m55Sf 115651
+6IOh5ZCM 115652
+6Ym05Yir 115653
+5YaF572u 115654
+5Lmx6LGh 115655
+5qyK55uK 115656
+5byA5pS+5byP 115657
+5Y2a5paH 115658
+6K6y6K++ 115659
+562J5Y6f5Zug 115660
+56m35Lq6 115661
+5Lqk5pu/ 115662
+5oqk54Wn 115663
+5Y+R5bGV5py66YGH 115664
+5a6i5ZWG 115665
+5Y+N5LmL 115666
+57Gz6aWt 115667
+5bm25Y+R 115668
+5bm25Y+R55eH 115669
+5rGJ5a2Q 115670
+5p6c5Zut 115671
+5a+55oiR5p2l6K+0 115672
+5YGP5ZCR 115673
+5om556S6 115674
+6K+75ZCO 115675
+6K+75ZCO5oSf 115676
+5piO5pm6 115677
+5Zu0552A 115678
+5Y+N6L2s 115679
+5p2o5bmC 115680
+5LiT5Y2W 115681
+5LiT5Y2W5bqX 115682
+5Y+X6ZmQ 115683
+5bqf6K+d 115684
+5p6B5bCR 115685
+5Y2I5ZCO 115686
+6L+b5L+u 115687
+5YmK5YeP 115688
+5pys56eR55Sf 115689
+5LyY6YCJ 115690
+5YWJ54Wn 115691
+5Y+Z5LqL 115692
+5Y+W5pqW 115693
+5YyX6Lev 115694
+5qaV 115695
+6I6G55Sw 115696
+5qW85bGC 115697
+5aSp6Iqx 115698
+5aSp6Iqx5p2/ 115699
+54Kc 115700
+5bey57uP5pyJ5LqG 115701
+6La+ 115702
+55Sz5Y2a 115703
+55S16Zi7 115704
+5Yqf6K++ 115705
+5q2l5q2l 115706
+6YKj5LmI5a655piT 115707
+5q2k5paH 115708
+5L2w 115709
+6K6h6L6D 115710
+54mH6Z2i 115711
+55S15b2x6Zmi 115712
+5LiN5YWs5bmz 115713
+5LiJ5pyf 115714
+5peF5ri46LWE5rqQ 115715
+5aSa56eN5b2i5byP 115716
+6KOC57yd 115717
+5ZCO5o6S 115718
+56Gs5bqm 115719
+5Zue5pqW 115720
+6YGT5pWZ 115721
+6LSr6KGA 115722
+5riF6aaZ 115723
+5Lyk55eF 115724
+5oSP576p 115725
+55qE57yY 115726
+55qE57yY5pWF 115727
+5bqE5Lil 115728
+5Y+q5piv5Li65LqG 115729
+5omT5oqY 115730
+5Lul5L6G 115731
+5ru/6Laz 115732
+546b5Li9 115733
+6aKo6Zqq 115734
+5paH56eR 115735
+6YWN5aSH5LqG 115736
+6L+b6aOf 115737
+5rah 115738
+6Lev56iL 115739
+5Y+r5aOw 115740
+5Lit5b+D5Z+O5Yy6 115741
+5pyJ5omA5LiN5ZCM 115742
+5by16LK8 115743
+6aKE5oql 115744
+5pyJ5aSa5LmI 115745
+6L+b6KGM5YWo6Z2i 115746
+5pu+57aT 115747
+5LiJ5Luj 115748
+5a6P5aSn 115749
+5riF5omr 115750
+6YCJ5Ye6 115751
+5ZOq5LiA5Liq 115752
+5Li7576p 115753
+5L6d5pOa 115754
+55qu6Z2p 115755
+6LW25p2l 115756
+562b5p+l 115757
+5qif 115758
+5L+d6I2Q 115759
+5ZCD5oOK 115760
+5pyL5Y+L5Lus5a+5 115761
+5LuW5piv5LiA5Liq 115762
+5bqf5rCU 115763
+5ruF 115764
+6LSi56iO 115765
+5p2R5p2R5rCR 115766
+6LWE5Lqn6LSf5YC6 115767
+5a6J5aic 115768
+55uu5YmN5Zu95YaF 115769
+5oSf6KeJ6Ieq5bex 115770
+57WQ5ZCI 115771
+6ZSm5qCH 115772
+6ZSm5qCH6LWb 115773
+5pu05rex 115774
+5Z+65pWw 115775
+6YW/6YWS 115776
+54m56Imy5Lqn5Lia 115777
+5Y6L5a6e 115778
+5L6d5rOV6L+956m2 115779
+5reh5a6a 115780
+566A55u05bCx5piv 115781
+5aOT5Yqb 115782
+5rCR5b+D 115783
+5LiN5ZCI6YCC 115784
+55Sx5q2k5Y+v6KeB 115785
+6LWe6KqJ 115786
+5r6k 115787
+5Yeg5bm05YmN 115788
+5ZCJ5LuW 115789
+56C05o2f 115790
+6L276L275Zyw 115791
+5bKb5bG/ 115792
+5oSP5aKD 115793
+5LuA5LmI5Y+r 115794
+5YGH6KOF 115795
+6YCB6LSn 115796
+5bmV5aKZ 115797
+5aal5Y2P 115798
+5Zu95peX 115799
+5LqG5b6I5LmF 115800
+5YiG6L6o546H 115801
+57SU 115802
+6Ziz5Yy6 115803
+5Yet552A 115804
+5YGc6L2m5L2N 115805
+5Lqs6YO9 115806
+6ZSj 115807
+5pO+ 115808
+6L+b6Zeo 115809
+5YiY5rW3 115810
+5Zub57qn 115811
+5aWz6Laz 115812
+6KGM5pS/5a6h5om5 115813
+6YGl5o6n 115814
+5LiN6Yyv 115815
+5b6X5b6I5aW9 115816
+5Li655uu55qE 115817
+5LuN5pyq 115818
+57K+6KOF 115819
+6YCN6YGl 115820
+5bC95aS0 115821
+57qg57yg 115822
+6aCY5bCO 115823
+5ouF6LSf 115824
+5oiW6ICF5YW25LuW 115825
+5Y+q5LiN6L+H5piv 115826
+5Y+u5Zix 115827
+5YGH5YaS 115828
+5pqW5rCU 115829
+55uQ5Z+O 115830
+6KKr6KeG5Li6 115831
+6K+66LSd5bCU 115832
+57uZ5LqG5oiR 115833
+6L+R5Y2D 115834
+6YeN5Zue 115835
+6YaS5LqG 115836
+55S16Kej 115837
+5b+955Wl5LqG 115838
+6IOM6YOo 115839
+5paH5piO5Z+O5biC 115840
+5rqF 115841
+6LKT 115842
+5oq15oyh 115843
+5Zac5qyi5ZCD 115844
+6Z2Z6Z2Z5Zyw 115845
+5b6I5rex 115846
+5Z+656GA55+l6K+G 115847
+6L+H6ZSZ 115848
+55CG56eR 115849
+5Lqk5rWB5ZCI5L2c 115850
+6IiU 115851
+6Kq/5p+l 115852
+5oWI5oKy 115853
+6ZKw 115854
+6Ie055S1 115855
+5a6j5Lyg5rS75Yqo 115856
+5Y+Y6YeP 115857
+55qE5Lq65p2l6K+0 115858
+5pe26ZqU 115859
+5LiN566h5L2g 115860
+55u46L+R 115861
+6LS16YeR5bGe 115862
+5Lmf5LiN5Y+v6IO9 115863
+57KJ5pyr 115864
+5Y2X55Oc 115865
+55m96ams 115866
+5YWJ5rqQ 115867
+6YeR5aWW 115868
+54us6KeS 115869
+54us6KeS5YW9 115870
+5aao56KN 115871
+57uZ5Yqb 115872
+5L2G5LuN 115873
+5byg5a625Y+j 115874
+6JCs5YWD 115875
+5riy5p+T 115876
+6ZW/5aSn5LqG 115877
+6K6w6ICF5LqG6Kej 115878
+5oCA552A 115879
+6KaB5a2m5Lya 115880
+5ri45oiP5Luj 115881
+5ri45oiP5Luj57uD 115882
+5LqM55m+ 115883
+5oSP6K+G5b2i5oCB 115884
+5466 115885
+6K6h5YiS55Sf6IKy 115886
+5om+5YeG 115887
+5YWw6Iqx 115888
+6L+Z5bqn5Z+O5biC 115889
+5rGh5rOl 115890
+5a6Y5pa55b6u5L+h 115891
+5b2S5bGe 115892
+5rCn5rCU 115893
+6YGO56iL5Lit 115894
+5Y2w6LGh5rex5Yi7 115895
+56iz5aal 115896
+57WQ5p2f 115897
+5a2V5pyf 115898
+54m55p2D 115899
+5Z2a5Zu6 115900
+6aG65Yq/ 115901
+5p6c6JSs 115902
+6Yar5bir 115903
+5Y6u 115904
+5Lmf5piv5aaC5q2k 115905
+6aaS5aS0 115906
+55u45Yqp 115907
+5bmy57q/ 115908
+5LiA5pys5Lmm 115909
+57ul 115910
+5oyv5aWL 115911
+6IK+6ISP 115912
+5YuV54mp 115913
+6aOe6LeD 115914
+6I+c5ZOB 115915
+5aSa5L2Z 115916
+5aSa5L2Z55qE 115917
+6YCd5LiW 115918
+5oGL5Lq6 115919
+5byA5Y+R5Yip55So 115920
+6aG65Liw 115921
+6YeO5b+D 115922
+5qCh5aSW 115923
+5oGQ6b6Z 115924
+6Z2i5YW3 115925
+6ZW/6L6I 115926
+6ZqP5aSE 115927
+6ZqP5aSE5Y+v6KeB 115928
+57Sn57y6 115929
+6YeN5Lit 115930
+6YeN5Lit5LmL 115931
+6YeN5Lit5LmL6YeN 115932
+5aWl5pav 115933
+5aWl5pav5Y2h 115934
+5LiA5Liq5aSa 115935
+5LiA5Liq5aSa5pyI 115936
+5LiN5Y+v57y65bCR 115937
+5paw5qC85bGA 115938
+5o+Q5oyv 115939
+6KGM6LS/ 115940
+5ryC5rWB 115941
+6IGK5Z+O 115942
+5YW05bu6 115943
+6LSo5qOA 115944
+56eB5pyN5ri45oiP 115945
+5pu06YeN6KaB 115946
+6LSu 115947
+54Wc 115948
+6L2s5Y+Y5Li6 115949
+6L+Z5Lik5bm0 115950
+5L+d6bKc 115951
+5omn5pWZ 115952
+54Oo 115953
+5byA5Y+R5bu66K6+ 115954
+6L+Q6JCl566h55CG 115955
+6K+v5beu 115956
+5Lqs5Ymn 115957
+5biQ5Y+3 115958
+5bel5L2c5L2c6aOO 115959
+5LiW5L+X 115960
+55m95a6r 115961
+5aSp5Zu9 115962
+5aSp5Zu957un57ut 115963
+5be05pav 115964
+6JCl5Yip 115965
+5ZOB5qC8 115966
+5p2R5rCR5Lus 115967
+5oi/6L2m 115968
+562J55eH54q2 115969
+5aaC5a6e 115970
+5a64 115971
+5bGC57qn 115972
+6ZSZ6L+H5LqG 115973
+57uT5a6e 115974
+56yR6IS4 115975
+55yf5a6e5oCn 115976
+6YO95biC5oql 115977
+6aWt6I+c 115978
+5bqU5rOo5oSP 115979
+5oq954Of 115980
+5Lyq6YCg 115981
+5YmN5LiA5aSp 115982
+6a2U6b6Z 115983
+6a2U6b6Z5Luk54mM 115984
+57qm6LCI 115985
+57uf56255o6o6L+b 115986
+6K6p55So5oi3 115987
+5YWo6Z2i6JC95a6e 115988
+5byE5b6X 115989
+6LCI5oGL54ix 115990
+6bif5oiQ6ZW/ 115991
+6bif5oiQ6ZW/6K6w 115992
+5rSL5rSL 115993
+55aP5pWj 115994
+6Z2i56ev57qm 115995
+5rWT57yp 115996
+5pav6aG/ 115997
+55Sf5oCB5ZyI 115998
+5omn5a+8 115999
+56e76YCB 116000
+6b2/6L2u 116001
+5qC55pys5bCx5LiN 116002
+57yp5YeP 116003
+6LWw5LiL5Y67 116004
+552r5q+b 116005
+5Lmf5LiN6ZSZ 116006
+5Y+N5pig5Ye6 116007
+6Ium5oG8 116008
+55u45YWz5pS/562W 116009
+6auY5qW8 116010
+57KJ6Imy 116011
+5oqV6LWE6aKd 116012
+5LiN57uP 116013
+5LiN57uP5oSP 116014
+5a6B5oS/ 116015
+6IiM5aS0 116016
+5ruL55Sf 116017
+5a6B5Y6/ 116018
+5YmN5YiX6IW6 116019
+5Yez 116020
+6aOf5qyy 116021
+5Y+W6IOc 116022
+6Zmi5a2Q 116023
+57Sg6LSo5pWZ6IKy 116024
+5ruo5bee 116025
+5oqi5oqT 116026
+5byC5ZGz 116027
+5ZKa 116028
+5YqN 116029
+5a696ZiU 116030
+5pq05rao 116031
+5oOg5Y+K 116032
+6KeE56iL 116033
+5L6b5YW7 116034
+6YCB5b6A 116035
+5bGx5bqE 116036
+5Lic5Lqa 116037
+5bGV6aaG 116038
+6Kej6ZSB 116039
+5peg6KeG 116040
+6ZmN6JC9 116041
+6L+e5LqR 116042
+6L+e5LqR5riv 116043
+5Y+C6LCL 116044
+546W 116045
+56yD 116046
+6ICX6LS5 116047
+5om/5b63 116048
+56S+5Lya5pWI55uK 116049
+5Y2X5rW3572R 116050
+5Yib5Lyk 116051
+6JCx 116052
+5YWF5rKb 116053
+572R56uZ5bu66K6+ 116054
+5aSn5bqG 116055
+5YaN6YCg 116056
+5a2X5qC3 116057
+5YWo5rCR5YGl6Lqr 116058
+6Iyr6Iyr 116059
+5rWu5Yqo 116060
+5YmN5Y+w 116061
+5aKe6K6+ 116062
+6YCb6KGX 116063
+5YCS6Zet 116064
+5rOV5b6L6aG+6Zeu 116065
+55au 116066
+55eF55eH 116067
+56m65YmN 116068
+6K+35pWZ 116069
+6IOc5Lu7 116070
+5p2A6I+M 116071
+5oiY5paX5py6 116072
+57uY5Yi2 116073
+5aSE5pa5 116074
+56qB5Zu0 116075
+54yr5ZKq 116076
+5oql5ZGK5pi+56S6 116077
+57+f 116078
+55W25Zyw 116079
+5pyA6Zq+ 116080
+57qq5aeU5Lmm6K6w 116081
+5L2O5Y6L 116082
+6Jma56m6 116083
+6L+Z6YOo55S15b2x 116084
+5Lqn5Lia5Y2H57qn 116085
+6LC354ix 116086
+6LC354ix5YeM 116087
+5oq86YeR 116088
+5aWz5pa5 116089
+6ZK756CU 116090
+5pqX5pqX 116091
+6L+35L2g 116092
+5omA6KyC 116093
+5aiB5buJ 116094
+5byA5pyX 116095
+5bKU 116096
+54Gr54Ks 116097
+5ZCI55CG5oCn 116098
+5YWs5Yqe 116099
+5Lya5Lya6ZW/ 116100
+6Zi06LCL 116101
+5byA5bGA 116102
+5pmu6YCa6K+d 116103
+5Y2h5ouJ 116104
+5bCR5ZCD 116105
+6Zeq6ICA 116106
+5p6c5rGB 116107
+5omn6KGM5Yqb 116108
+6LCb 116109
+5oqi5Yqr 116110
+6auY6YCf5Y+R5bGV 116111
+6Z+s 116112
+5Y2X5rKZ 116113
+6auY562J5a2m5qCh 116114
+5o2i5Liq 116115
+5Y+v6IO95a2Y5Zyo 116116
+5oqS 116117
+6LCx5YaZ 116118
+6KKr5oqT 116119
+5p2v5a2Q 116120
+6IqC6IO95YeP5o6S 116121
+5rCU5YCZ5Y+Y5YyW 116122
+5YiG5Yil 116123
+5Lit5p6i 116124
+5qyi5ZG8 116125
+5YWJ57qk 116126
+6L+Z576k 116127
+55y855WM 116128
+5YWx5ZCM5Y+R5bGV 116129
+546w5LuK 116130
+6Ze76KiA 116131
+54m56Imy5bCP6ZWH 116132
+5pWR5Lq6 116133
+6ZmN5rC0 116134
+5LiW55WM5LiA5rWB 116135
+5bCx6aSQ 116136
+556l 116137
+5aSN5LuH 116138
+57695q+b 116139
+57695q+b55CD 116140
+6LSp5Y2W 116141
+5rqQ5rOJ 116142
+5oC75L2T6KeE5YiS 116143
+5Yqo5oSf 116144
+5LiA5a6h 116145
+5YCf6ZKx 116146
+6KeB5pWI 116147
+6Iqx6I2J 116148
+5ZCM5Lia 116149
+5p+l6Kmi 116150
+5Zu96ZmF5ZCI5L2c 116151
+5L6b5Zu+ 116152
+5YG0 116153
+5qCT 116154
+55u46YCa 116155
+6LCI5Y+K 116156
+6L+H56iL5b2T5Lit 116157
+6aaZ6I+H 116158
+5Y2B5Zub5p2h 116159
+5LiA5byA5aeL5bCx 116160
+5LiT5ZGY 116161
+5piO6aGv 116162
+5omT6YCg5Ye6 116163
+5LiL6Z2i5oiR5Lus 116164
+5py65rK5 116165
+5Y+w6K+N 116166
+5a2Q5byf 116167
+5pyA5bi46KeB55qE 116168
+5oiR6K6w5b6X 116169
+57uw 116170
+5oKs5rWu 116171
+6L+Y55yf5piv 116172
+5oyC5Y+3 116173
+5Y+L5ZaE 116174
+6YeN5Lyk 116175
+54Wn5Lqu 116176
+5q2m6K2m 116177
+5Ye6546w6Zeu6aKY 116178
+6LiK6LeD 116179
+5Zyw55CD5LiK 116180
+5biC5Lq65aSn 116181
+5Y+X5a6z5Lq6 116182
+5bKQ 116183
+5ZCM5a24 116184
+6YeR6J6N5biC5Zy6 116185
+5pyJ55qE546p5a62 116186
+5biC5pWZ6IKy 116187
+5biC5pWZ6IKy5bGA 116188
+5ZCE5byC 116189
+57ea5LiK 116190
+5oG6 116191
+5pyJ5aSn6YeP55qE 116192
+5ZWG5oql 116193
+5Y2V5Y2V 116194
+5YWo6aKd 116195
+5L6d5pen5piv 116196
+5aW95Yeg5Liq 116197
+5Za1 116198
+6YeN5pW0 116199
+55Sf5rS76LSo6YeP 116200
+5o6i6K6/ 116201
+5Y2w6Iqx 116202
+55ub6KGM 116203
+5b6u6KeC 116204
+6IiN5b6X 116205
+5bqf5byD54mp 116206
+56ev6JOE 116207
+5a6a5bGF 116208
+5oK8 116209
+6Iy4 116210
+55qE5biu5Yqp 116211
+55qE5biu5Yqp5LiL 116212
+5Lq/5ZCo 116213
+5a2U6ZuA 116214
+6L+Z5p2h6Lev 116215
+6aW1 116216
+5oSI5Yqg 116217
+6ZWN 116218
+5L2c5qGI 116219
+6I2U5p6d 116220
+5aSq5bCR 116221
+6Le76Lqr 116222
+5YWs55uK5rS75Yqo 116223
+55m95paR 116224
+5oqA5pyv5rC05bmz 116225
+5bin 116226
+5peg55+l 116227
+5bqU6K+l5oCO5LmI 116228
+6YCA5biC 116229
+5rit 116230
+5YW754yq 116231
+6am8 116232
+576k5bKb 116233
+5aSn5Y2r 116234
+5LmY55So6L2m 116235
+6I+y5bCU 116236
+6LS05ZCn 116237
+5YGc5LiL5p2l 116238
+5pyJ5py657uT5ZCI 116239
+5Yi76Ium 116240
+55qE5Zyw 116241
+55qE5Zyw5q2l 116242
+6K+K5omA 116243
+5byA5oiY 116244
+6ICB54mM 116245
+562556CB 116246
+5YWr5aSn5Lul5p2l 116247
+5qW85oi/ 116248
+5a2Z5oKf 116249
+5a2Z5oKf56m6 116250
+5YWS5a2Q 116251
+56ys5LiA5p2h 116252
+56S+5Lqk5aqS5L2T 116253
+5oOz6LW35p2l 116254
+5aSn5rSL 116255
+5ou86Z+z 116256
+6L+b5Y2a5Lya 116257
+6L+H5YWz 116258
+5rK8 116259
+56m/5pCt 116260
+6YKj5LiA5aSp 116261
+56C06Zeo 116262
+5oqV5qCH5Lq6 116263
+6LWi5a62 116264
+6Jma5byx 116265
+5r+D 116266
+5a6J5qOA 116267
+5a6i5a62 116268
+54us56uL6JGj5LqL 116269
+5omL5Yq/ 116270
+5Ym16YCg 116271
+5ZyG5ruh5a6M5oiQ 116272
+5Li65Li757q/ 116273
+5aW95aWH5b+D 116274
+6aKG5Zyf 116275
+56qW 116276
+5YW45Z6L5qGI5L6L 116277
+56qB5Y+R5LqL5Lu2 116278
+5bqV5rCU 116279
+5aS05pmV 116280
+5a6b5aaC 116281
+6Ke4 116282
+5riF5reh 116283
+5Zq8 116284
+5YGc55S1 116285
+57KJ5bCY 116286
+6ZmN5L2O5oiQ5pys 116287
+5pS+5omL 116288
+6K6w6ICF6KGo56S6 116289
+5ouW5bu2 116290
+6aqH 116291
+5q6L5b+N 116292
+55yB5pWZ6IKy 116293
+55yB5pWZ6IKy5Y6F 116294
+6auY6aKd 116295
+6YSZ 116296
+5qWe 116297
+5YaF56eR 116298
+6JCl5Lia6aKd 116299
+5Z+655+z 116300
+5rWB5reM 116301
+5Li75peo 116302
+6ZiQ6YeK 116303
+5bu65Y2O 116304
+5oOK5Y+5 116305
+54mi5Zu65qCR56uL 116306
+5piv5ZCm5a2Y5Zyo 116307
+5bu65Yab 116308
+6Zu+6Zy+ 116309
+5YWs6K6k 116310
+5YWs6K6k55qE 116311
+5rCo5Z+6 116312
+5rCo5Z+66YW4 116313
+5YmN5Yeg5bm0 116314
+5Yi56YKj 116315
+5rGf5Lic 116316
+5bel5qWt 116317
+5LiA54K55Lmf5LiN 116318
+5L+u5aOr 116319
+5LqG5LiA6YGN 116320
+5YiB 116321
+5rua5rua 116322
+5YiG5qCh 116323
+55yf54ix 116324
+6KGA6ISJ 116325
+5oCl5Ymn 116326
+5LiA576k5Lq6 116327
+576v 116328
+5oiQ6b6Z 116329
+57K+56We55eF 116330
+55u45YWz5Lq65ZGY 116331
+6Z2T5Li9 116332
+5LiJ5a2j5bqm 116333
+5YiS5a6a 116334
+5LiW55WM56ys5LiA 116335
+6YCa5L+X 116336
+5ZWG5Lia5Zyw5Lqn 116337
+5Yqf6IO95oCn 116338
+6LWE5pys5Li75LmJ 116339
+6K+m6KeB 116340
+5oqT5o2V 116341
+5paH5piM 116342
+5a6d5a6J 116343
+6KOF6YWN5byP 116344
+5rqQ5rqQ 116345
+5rqQ5rqQ5LiN5pat 116346
+55Sf5oCV 116347
+57q15ZCR 116348
+5aO9 116349
+55y86KKL 116350
+6IKJ5L2T 116351
+5Y+k5LuK 116352
+6J6N5aqS5L2T 116353
+5YGJ 116354
+5qC85pyD5ZOh 116355
+54O3 116356
+5Yqf55So 116357
+5omt55+p 116358
+57u/6Imy6YCa6YGT 116359
+5Ymn57uE 116360
+5byx5Yq/ 116361
+6LSo6YeP6Zeu6aKY 116362
+6ZmQ6aKd 116363
+6aqG 116364
+6YG15LmJ 116365
+5a+d5a6k 116366
+5oOz5b+1 116367
+5aCx5ZGK 116368
+5LuF5qyh 116369
+5LuF5qyh5LqO 116370
+6J6N5Yib 116371
+5oub6IGY5Lya 116372
+5bqK5Z6r 116373
+6L2s5Z6L5Y+R5bGV 116374
+5Lit5Zu955S15L+h 116375
+5ZCs6K+d 116376
+6KuL5rGC 116377
+5aSn6YOo5YiG5Lq6 116378
+5rS75b6X 116379
+5ZOt5rOj 116380
+6LaZ 116381
+5Y+R55eF546H 116382
+5LiN56ym 116383
+5Yab5a6Y 116384
+6aKI5qSO 116385
+5paw5Yag55ar5oOF 116386
+5p+s5Z+U 116387
+5p+s5Z+U5a+o 116388
+5Lu75L2V5b2i5byP 116389
+5Lq66ZmF 116390
+5Lq66ZmF5YWz57O7 116391
+5oC75om/5YyF 116392
+5bmz5Z2H5q+P 116393
+5oGt5Zac 116394
+5YSY 116395
+5YW16ams 116396
+6L+f5Yiw 116397
+5bel5Lyk 116398
+54mI5p2D5b2S 116399
+54mI5p2D5b2S5Y6f 116400
+5oul5oqk 116401
+57OK5raC 116402
+5bmy5raJ 116403
+5bCR5LiN5LqG 116404
+5oOz5om+ 116405
+6LS5546H 116406
+6K+l6Zmi 116407
+6J6N5YyW 116408
+6L+O5ZCI 116409
+6KeG5ZCs6IqC55uu 116410
+5qC857ay56uZ 116411
+55yJ5q+b 116412
+5qyi6L+O5aSn5a62 116413
+5a625bqt5pWZ6IKy 116414
+5L616JqA 116415
+57uZ5L2g5Lus 116416
+6KGA5ray5b6q546v 116417
+5a+E5omY 116418
+5bCW5Y+r 116419
+5Lul5LiL5Yeg5Liq 116420
+6L+Y5Lul5Li6 116421
+5YW25LuW546p5a62 116422
+56yR56yR 116423
+5omT5ZCs 116424
+6Ieq54S256eR5a2m 116425
+5Z+656uZ 116426
+5Lmd5bee 116427
+5L+d6am+ 116428
+5L+d6am+5oqk 116429
+5L+d6am+5oqk6Iiq 116430
+5pS+55y8 116431
+55+l5ZCN5LyB5Lia 116432
+57iu 116433
+56i9 116434
+5pqH 116435
+5L2/55So57ay6Lev 116436
+6aKE55WZ 116437
+5aSn6LGh 116438
+5Y+R5piO5LiT5Yip 116439
+5paH5aix 116440
+6YCg56aP 116441
+5rm/5ram 116442
+6Z2i5p2h 116443
+5raI6LS55Y2H57qn 116444
+6K6K5b6X 116445
+5Yeg5ZCN 116446
+5LuE 116447
+6K6k5riF 116448
+6L+c5pmv 116449
+5o+S5bqn 116450
+6K+45L6v 116451
+5Y+Y5oCB 116452
+56aP5b2p 116453
+6LSn5p62 116454
+5aSx5o6n 116455
+56e75Yqo56uv 116456
+5LiK5Y+4 116457
+6YCg57q4 116458
+5biD5pyX 116459
+55KH 116460
+5Y+w5Y2X 116461
+5YyX5Lqs5Yas5aWl 116462
+6JOd54mZ 116463
+6ZW/55+t 116464
+5oqY5bCE 116465
+57uR5p62 116466
+5a+S5YGH 116467
+6L2s5Z+65Zug 116468
+5oCl5LqO 116469
+5q2j5ZOB 116470
+5YWF5ru/ 116471
+5aSn57qy 116472
+5oqX5L2T 116473
+6KiT57e0 116474
+5pS257Sn 116475
+5q+U6LO9 116476
+5YW15Yqb 116477
+5pys5pu4 116478
+5LqM5Luj 116479
+5oCl6K+K 116480
+5paH5qGI 116481
+57uP5ZWG 116482
+5pmo5oql 116483
+5qOY 116484
+5oC75Lmm6K6w5Zyo 116485
+5Y+X6YKA 116486
+5LqU5Zub 116487
+5bKt5Y2X 116488
+54ix5ZCD 116489
+5Z+D5bCU 116490
+5b+D5aKD 116491
+6KaG55uW6Z2i 116492
+5a6e5Zyo5piv5aSq 116493
+5qC55bqV 116494
+57q357q36KGo56S6 116495
+5ZeF 116496
+6ZqP552A5pe26Ze0 116497
+5Y6G5Y+y5oKg5LmF 116498
+6YWJ 116499
+5oC76Zif 116500
+5Li76aKY5rS75Yqo 116501
+6Zeu5Y23 116502
+6am/56uZ 116503
+5o+h5L2P 116504
+5Y+v6IO95a+86Ie0 116505
+5rCR6ZaT 116506
+6ZaL5ZWf 116507
+5L2G5LiN6ZmQ 116508
+5L2G5LiN6ZmQ5LqO 116509
+5Y2B6YeM 116510
+5ail 116511
+5o2f6ICX 116512
+55aP5a+8 116513
+546v5rCn 116514
+56We6YCa 116515
+54ix5bCU 116516
+54ix5bCU5YWw 116517
+5py05a6e 116518
+5b+r5oql 116519
+5pS25Y+X 116520
+5oiW6Kix 116521
+6IOM6Z2i 116522
+5paH5YyW5Lyg5aqS 116523
+5LiJ5YCL 116524
+5pS75Yq/ 116525
+5a6J5Lic 116526
+5a6J5Lic5bC8 116527
+5Z2H5bey 116528
+6aG+6JmR 116529
+6YSt 116530
+6L+Z5a625YWs5Y+4 116531
+5YWs5ZGK56ew 116532
+5o+Q5L6b5LyY6LSo 116533
+56iz5q2l5o6o6L+b 116534
+5aSN6K+V 116535
+5bCG6aKG 116536
+6LCI6LW3 116537
+5aiE 116538
+6L+e57q/ 116539
+5qmf6Zec 116540
+5bqU55So5Zy65pmv 116541
+55S75YOP 116542
+6LSi6L+Q 116543
+5L+d6Zqq 116544
+55eF55CG 116545
+5q+b5Li75bit 116546
+5Lid5q+r5LiN 116547
+54ix5aWH 116548
+54ix5aWH6Im6 116549
+5LiT5a6257uE 116550
+5ZG85ZSk 116551
+6Yu8 116552
+54G4 116553
+6aKG5YWI5Zyw5L2N 116554
+5o+Q5ouU 116555
+6Zy46YGT 116556
+5bGx5Z2h 116557
+6J2O 116558
+5rK46IW+ 116559
+6K+l6aG5 116560
+5LuK55Sf 116561
+5LiA56+H5paH56ug 116562
+5pa55byP6L+b6KGM 116563
+6buR5a6i 116564
+5pS55Yqo 116565
+5Li76aGM 116566
+5pWj5biD 116567
+5LuA5LmI5Zyw5pa5 116568
+5YyW5ZCI 116569
+5YyW5ZCI54mp 116570
+6Z2Z55S1 116571
+5oC75pS25YWl 116572
+5aeU57uE57uH 116573
+5aeU57uE57uH6YOo 116574
+6Z2Z5oCB 116575
+6ICB5a2X5Y+3 116576
+5a6k5Y+L 116577
+6YO95LiN5pWi 116578
+5p625a2Q 116579
+54G15pWP 116580
+5a6h6KeG 116581
+5oKj5YS/ 116582
+5bGx5a+o 116583
+6Jaq6LWE 116584
+6amw5o+0 116585
+6YOo5YiG5YaF5a65 116586
+5aW95Ly8 116587
+5oiQ5ZGY5Zu9 116588
+5Zyo5oiR55yL5p2l 116589
+5YWz5rOo5bqm 116590
+6ZmI5p+Q 116591
+6L+Z56eN5LqL5oOF 116592
+6YCJ5a6a 116593
+57K+5a2Q 116594
+5aOB55S7 116595
+5rGf5reu 116596
+6auY5piC 116597
+5qC85Yqb 116598
+6Lyp 116599
+5a2m5aCC 116600
+5oKo5ZCM5oSP 116601
+5LiA5YiH6YO95piv 116602
+5r2k 116603
+6ZaD 116604
+5biM5pyb6Ieq5bex 116605
+5L+Y 116606
+5rGf5Y6/ 116607
+5rO+ 116608
+56eR5pWZ 116609
+5omT6L+b 116610
+5LiN5oWO 116611
+5a+S5Yas 116612
+5riU5rCR 116613
+6Zu35pav 116614
+5Li75a6w 116615
+5peF5ri45bqm5YGH 116616
+55S15a2Q6YKu5Lu2 116617
+5rGC5ama 116618
+6ZqO5q61 116619
+5YGl6Lqr5oi/ 116620
+5rOo5piO5Ye65aSE 116621
+5LqL5pWF5Y+R55Sf 116622
+57qn5Lul5LiK 116623
+5a2Y5rS7 116624
+5pa96IKl 116625
+6Jyc6JyC 116626
+5bWp 116627
+5oyW5o6Y5py6 116628
+5oqX5ouS 116629
+5Lyg5a+8 116630
+5piv5LuA5LmI5ZGi 116631
+5LiK5bm05ZCM5pyf 116632
+5bu65YWa 116633
+55Sf5oWL 116634
+5L+d5L2P 116635
+5qy+6L2m5Z6L 116636
+5Lq66ISJ 116637
+6ZqQ6JS9 116638
+5aSx5pWI 116639
+6YG/5a2V 116640
+566A5L6/ 116641
+6LCi6LCi5L2g 116642
+5a6I5L2P 116643
+5pS+5pig 116644
+6KiI55Wr 116645
+546w5Luj54mp5rWB 116646
+6aSQ5buz 116647
+5pWF5bGF 116648
+5aSn5aSn5bCP 116649
+5aSn5aSn5bCP5bCP 116650
+54m55Yir5aOw5piO 116651
+6YGN5Y+K 116652
+5b+D55CG5ZKo6K+i 116653
+6LO0 116654
+54yu6KGA 116655
+5bey57uP6L6+5Yiw 116656
+5omT5oub5ZG8 116657
+5Y+M6L65 116658
+5LiA5pa56Z2i5piv 116659
+5bSH5bCa 116660
+6Zi/5a+M 116661
+6Zi/5a+M5rGX 116662
+5oyB5pyJ5Lq6 116663
+6LGB 116664
+6aOO562d 116665
+5Yqo6I2h 116666
+5LqG5LiA5Lya 116667
+5LqG5LiA5Lya5YS/ 116668
+5LiH6LGh 116669
+55yL55S16KeG 116670
+5Y2B5LiJ5p2h 116671
+54yb54OI 116672
+6KaB5LiN54S2 116673
+5aSq5p6B5ouz 116674
+5byV54iG 116675
+57uP6L+H5aSa5bm0 116676
+5ri45oiP6YeM55qE 116677
+6b6Z5rOJ 116678
+5qCH6YWN 116679
+6K6T5LuW5YCR 116680
+6YCg5p6X 116681
+5Yy65Z+f5oCn 116682
+5Lq/5LiH 116683
+5oiY55Wl5biD5bGA 116684
+6ZWH5pS/5bqc 116685
+5ZSu56Wo 116686
+55Sf5Lqn5bel6Im6 116687
+6ZWH5YWa5aeU 116688
+5Lit5bCP5Z6L 116689
+5pyo6ICz 116690
+5rKz6L65 116691
+6IS+6IOD 116692
+5qyi6L+O5oKo 116693
+5Y+Y5byC 116694
+57yk57q3 116695
+5Z6D5Zy+5qG2 116696
+6L6p6K+B 116697
+6L2m5bqT 116698
+5q+U546H 116699
+5YW05pe6 116700
+6K+m57uG5LqG6Kej 116701
+5a6J5bGF 116702
+54Wn5paZ 116703
+5pa55omN 116704
+6LWm 116705
+5YaV 116706
+5aWU6LW0 116707
+5a6d6bih 116708
+5Zy65Z2H 116709
+55uu5YmN5q2j5Zyo 116710
+5ZCe5Zms 116711
+6L+w6IGM 116712
+5oe1 116713
+5aWH55Ge 116714
+5LuN5bCG 116715
+6IiJ6L6m 116716
+5bel5ZWG5bGA 116717
+5aGR6IO2 116718
+5Yqe5a6e5LqL 116719
+5pa55pa56Z2i 116720
+5pa55pa56Z2i6Z2i 116721
+5paH5YyW6IqC 116722
+5YWl6IGM 116723
+6bil 116724
+56m/6YCP 116725
+5Lul5Lmg6L+R5bmz 116726
+5Y2x6Zqq 116727
+5pym6IOn 116728
+5Y6G5Y+y5oCn 116729
+5pWe5byA 116730
+5LyZ5Ly05YWz57O7 116731
+55+/5Yy6 116732
+5Zu96ZmF5Zyo57q/ 116733
+5Lyg5aWH6YeM6Z2i 116734
+6L+R5Lqb 116735
+6L+R5Lqb5bm0 116736
+5Yqj5Yq/ 116737
+5pS75Ye75Yqb 116738
+5pm66YCg 116739
+56an 116740
+546L5YWI55Sf 116741
+6Yar55Sf 116742
+5Zub6aG5 116743
+5a6e5pmv 116744
+5Yid5Yib 116745
+5b+D6KOh 116746
+5pm25L2T 116747
+5Lqk6ZmF 116748
+6K6p5raI6LS56ICF 116749
+6K++5paH 116750
+5o6S5rCU 116751
+5bm25LiN5oSP5ZGz 116752
+55u45aOw 116753
+56ys5LiA5bGK 116754
+5Y6f6JGX 116755
+6Zuc 116756
+5rKh5pyJ5aSq5aSn 116757
+6KGl5rC0 116758
+54mp5rWB5LyB5Lia 116759
+56ys5LqM5om5 116760
+5YW25a6D6Zeu6aKY 116761
+5o6M6Zeo 116762
+6LSj5Lu75b+D 116763
+6aSQ5YW3 116764
+576K5q+b 116765
+5rKh5pyJ5b+F6KaB 116766
+5LmQ5Zui 116767
+6L+b5Z+O 116768
+5LiA54K55YS/ 116769
+6Lqr5b2i 116770
+55qu6IKk55eF 116771
+5pix 116772
+5aKe6Iez 116773
+6IGy5piO 116774
+5o+Q6LSo 116775
+5L2T6IKy5Zy6 116776
+56255bu6 116777
+6ayG 116778
+6L2m54mM 116779
+6ZqU6Z+z 116780
+6LSf6LSj5ZCM5b+X 116781
+5Liw56GV 116782
+5L2b6ZmA 116783
+5LqJ5ZC1 116784
+5bq2 116785
+5reh5rC0 116786
+5bCP55S35a2p 116787
+56eB6Ieq 116788
+5YyW6L+b56iL 116789
+5oiY5aOr5p2l6K+0 116790
+5rK56IW7 116791
+6ISx6LSr6Ie05a+M 116792
+5pel5bi45bel5L2c 116793
+5Lqk6J6N 116794
+5Yac6LS4 116795
+5Yac6LS45biC5Zy6 116796
+5ZOI55m7 116797
+55S16LS5 116798
+6LWY 116799
+5Y+M6IW/ 116800
+5pOU5b+D 116801
+5p2l5b2i5a65 116802
+5L2/5ZG95oSf 116803
+6YKj5LmI566A5Y2V 116804
+6IqZ6JOJ 116805
+5YCf5qy+5Lq6 116806
+56eA5Li9 116807
+6K6T5LuW 116808
+5Lil5Y6J5omT5Ye7 116809
+6LOe 116810
+5pqr 116811
+54Wk5rCU 116812
+54is5LiK 116813
+5r2H5rSS 116814
+5aSq5LmF 116815
+5ZG95ZCN5Li6 116816
+6Lev55Sx 116817
+6Lev55Sx5Zmo 116818
+6amv 116819
+5o+Q5pep 116820
+5oqX5Ye755ar5oOF 116821
+5Yeb 116822
+5Lqk5Y+L 116823
+6ZSA5ZSu5rig6YGT 116824
+5q+r5LiN54q56LGr 116825
+6JCl5Zyw 116826
+56CU56m26KGo5piO 116827
+6bG857G7 116828
+5o2i5bGK 116829
+5o6h5Y+W 116830
+54mG 116831
+55ub5byA 116832
+5rKn5qGR 116833
+5bqt5a6h 116834
+57uP5p+l 116835
+5Yqg5by3 116836
+55u45q+U5LqO 116837
+5LiT54+t 116838
+5L2T5Z6L 116839
+6KKr5a6z 116840
+6KKr5a6z5Lq6 116841
+5pS25qy+ 116842
+5YW35pyJ6Imv5aW9 116843
+6auY5bOw5pyf 116844
+5YGP5L2O 116845
+5YSf 116846
+5Yac5Lia56eR5oqA 116847
+54m55q6K5oOF5Ya1 116848
+5aaC5p6c546p5a62 116849
+6ZW/57qm 116850
+56ys5YWt5bGK 116851
+5YWs5byA5oub6IGY 116852
+5YiH5pat 116853
+6L+r5L2/ 116854
+55aX56iL 116855
+56ys5LqM56eN 116856
+5LiN5YWN 116857
+5bmy6K2m 116858
+55+z5qa0 116859
+5Zej 116860
+5Lik57G7 116861
+54i15aOr 116862
+5Z+O5Lmh5bGF5rCR 116863
+5q2k6aG5 116864
+55u06L6W 116865
+55u06L6W5biC 116866
+5ZG85bqU 116867
+6ZKv 116868
+56aP5b63 116869
+5py66Lqr 116870
+5pON5Zy6 116871
+5r+S5Li0 116872
+5Lq6576k5Lit 116873
+6IKh5rCR 116874
+5a29 116875
+5rOV5YWw 116876
+6aiO 116877
+57Ov57Gz 116878
+5oC755qE 116879
+5oC755qE5p2l6K+0 116880
+5YW46ZuF 116881
+5paw6ZmI 116882
+5paw6ZmI5Luj6LCi 116883
+55uu5525 116884
+6aKE6KiA 116885
+6LeM56C0 116886
+5paw56+H56ug 116887
+5q+S5oCn 116888
+5Zad6Iy2 116889
+5p+l6I63 116890
+5Lqu5Li9 116891
+55Sf5Lqn5ZWG 116892
+5pS55oiQ 116893
+5Li65LqG5pu05aW9 116894
+5rex5Lqk 116895
+5rex5Lqk5omA 116896
+5o6D 116897
+5LmZ6IKd 116898
+5rO45bee 116899
+5YWI6L+b5oqA5pyv 116900
+6L6T57uZ 116901
+5pWj5oi3 116902
+5oCd57u05pa55byP 116903
+5bqX5Li7 116904
+6LCL5rGC 116905
+5ri45oiP5oqA5ben 116906
+5LiA5bm057qn 116907
+55y86KeS 116908
+5Lit5LuL5py65p6E 116909
+5ben5ZCI 116910
+6Ziy55uX 116911
+5a+86LSt 116912
+5oiK 116913
+5pu06YCC5ZCI 116914
+5Z+65pys5L+h5oGv 116915
+6ams5LiB 116916
+5YW75q6W5Zy6 116917
+5Y+N6L+H5p2l 116918
+5o6o5bSH 116919
+5a+G5YiH5YWz5rOo 116920
+5Z+66YeR57uP55CG 116921
+5oyJ6ZSu 116922
+5YaF6YOo5o6n5Yi2 116923
+5oiQ5ZGY5Y2V5L2N 116924
+5pyv6K+t 116925
+5Yi25pyN 116926
+5Yia6ZyA 116927
+5qOA57Si 116928
+5aSn5aSn5o+Q6auY 116929
+5YGl5bq3566h55CG 116930
+6Ieq5q2k 116931
+5a6i5oi36ZyA5rGC 116932
+5Liw6IO4 116933
+6LW36YeN 116934
+6LW36YeN5py6 116935
+5qyg57y6 116936
+5qGI5a2Q 116937
+5oOF5Lq66IqC 116938
+5YWa5qCh 116939
+6KKc 116940
+6K+l5Ymn 116941
+6L+35aSx5Lyg5aWH 116942
+57ua5Li9 116943
+5ZWq 116944
+5peg56eB 116945
+6YCy5LiA5q2l 116946
+56ys5LiA56ug 116947
+5Zmo5YW3 116948
+5Yac6LWE 116949
+56K65a+m 116950
+5bqP5YiX 116951
+5aix5LmQ5bmz5Y+w 116952
+6J6N6LWE56ef6LWB 116953
+6LWE5rqQ5YWx5Lqr 116954
+6IG95Yiw 116955
+5pCe5b6X 116956
+57un57ut5L+d5oyB 116957
+5ZCv6JKZ 116958
+55y6 116959
+5Lid6Lev 116960
+6K6+5pa95bu66K6+ 116961
+5o6l5Zyw 116962
+5o6l5Zyw5rCU 116963
+56ys5LiJ5a2j5bqm 116964
+5Z+66LCD 116965
+5Y+R6Z+z 116966
+56S+5Lya6LWE5pys 116967
+6ZuH5Li7 116968
+6L+e6IOc 116969
+5rKh5ZWl 116970
+5bui 116971
+6LW26LW0 116972
+5ryU5YyW 116973
+5Y+k5oCq 116974
+546L54i3 116975
+6aKE5YWI 116976
+5byA5YW3 116977
+5Zue6aaW 116978
+5Zyw5LiL5rC0 116979
+5bCP57yW5LiA6LW3 116980
+6LWO5Zue 116981
+5Zyw6LKM 116982
+5Yid5LiJ 116983
+5Y+v55So5LqO 116984
+6YGX6L+5 116985
+6L+Z5om5 116986
+6Jaq5rC0 116987
+5b+F54S25Lya 116988
+5rK9 116989
+6Y2L 116990
+56ys5LiA6YOo 116991
+5YiK54mp 116992
+5a6e5L6L 116993
+5riF5YeA 116994
+5LiK6LWb5a2j 116995
+5Zu+6KGo 116996
+6YKu6L2u 116997
+5ZOq6KOh 116998
+55u46KeB 116999
+5omw5Lmx 117000
+5q+P5q+P 117001
+6L+Z6L6I5a2Q 117002
+56Gr6YW4 117003
+5LqJ55u4 117004
+5rqv5rqQ 117005
+5Ye65LyX 117006
+546J55+z 117007
+5YWx55Sf 117008
+5pe26Ze05q61 117009
+6YeN6KaB5oyH56S6 117010
+5raI6LS56ZyA5rGC 117011
+6ZW/6ZW/ 117012
+6ZW/6ZW/55qE 117013
+5a6J5oqa 117014
+5aKe6auY 117015
+5pys6L2u 117016
+5Lqy55y8 117017
+6aOO5rOi 117018
+6ICB5aaI 117019
+5pS26LS55qCH5YeG 117020
+5YaF6ZmG 117021
+5oyl5Y+R 117022
+5Y2H5a2m 117023
+6IO45YmN 117024
+5YGP6L+c 117025
+57qv5rSB 117026
+5pa95bel5Y2V5L2N 117027
+6Lqr5Lu3 117028
+6LSi5Yqb 117029
+57q2 117030
+6KOF55Sy 117031
+5pi+56S65Zmo 117032
+5q+r5Y2H 117033
+5rex55+l 117034
+6IC256k= 117035
+6IC256mM 117036
+6L6D6YeP 117037
+5Zyo6L+H5rih 117038
+5Zyo6L+H5rih5pyf 117039
+6IyX 117040
+5LiA5Liq5pif5pyf 117041
+6Iq3 117042
+6LS/6LWC 117043
+5r+V 117044
+5oeC5LqL 117045
+56en 117046
+5YWF5b2T 117047
+5Zu956uL 117048
+6Iqx55Oj 117049
+6YKE6KaB 117050
+5YWs5ZyS 117051
+6Kem5Yqo 117052
+5rOw5bee 117053
+5LuA5LmI5qC3 117054
+5ruL5YW7 117055
+6K+E5Yik 117056
+5oyl5omL 117057
+6ISI 117058
+5ael5ael 117059
+6L+Q6LS5 117060
+5q+F5Yqb 117061
+5b+D5pm6 117062
+5LiN5o6S6Zmk 117063
+56ys5LiJ5Luj 117064
+6YCA6LSn 117065
+5pif6ZmF 117066
+5rC45Yip 117067
+5oqk5Y2r 117068
+54+t6L2m 117069
+6KiA6KGM 117070
+57mq 117071
+5Li75Yqo5oCn 117072
+5bel56iL6LSo6YeP 117073
+6YOK5Yy6 117074
+5LiA5qCL 117075
+5L2G5a6e6ZmF5LiK 117076
+5LiJ5aSn6IGM5Lia 117077
+5ZG85Y+r 117078
+5aWz5YWS 117079
+6K+B5Yi45oqV6LWE 117080
+6ICD5oWu 117081
+54Kr6ICA 117082
+5rK75aW9 117083
+5Zi2 117084
+6IOk 117085
+5YWJ5LyP5Y+R55S1 117086
+5Yeg5q2l 117087
+5omA5omA 117088
+5omA5omA6ZW/ 117089
+54Wn5qC3 117090
+5ZOl5Lus 117091
+6K+b 117092
+6L+Z5LiA5Yi7 117093
+55+/54mp6LSo 117094
+5LiN5b6X5bey 117095
+5ZCM55uf 117096
+57uG5b6u 117097
+6Lev6JmO 117098
+55m+6Iqx 117099
+5re35rKM 117100
+5LiK5rW36K+B5Yi4 117101
+6YCA56iO 117102
+6LWe5Y+5 117103
+5omu5ryU5ri45oiP 117104
+5ZCN5YiX 117105
+5ZCN5YiX5YmN 117106
+5ZCN5YiX5YmN6IyF 117107
+57Gz5bCU 117108
+5LuA5LmI5Y6f5Zug 117109
+5a6J5YWo5L+d6Zqc 117110
+5LiA5Y+q5omL 117111
+5Lmz5Lia 117112
+5LiN55SY 117113
+5oOF5ZWG 117114
+5oyh5L2P 117115
+5Y6f5Zug5LmL5LiA 117116
+6L+Z5Lik5aSp 117117
+54OY54SZ 117118
+6LGs 117119
+5L2g5Lul5Li6 117120
+5rKh6KeB6L+H 117121
+5ZOq5a625aW9 117122
+5YmN5Lu7 117123
+6L+b6LSn 117124
+6YCA5Zue 117125
+5Liy6IGU 117126
+6Iez5pa8 117127
+5Yaw5reH 117128
+5Yaw5reH5reL 117129
+5p+l55yL6K+m5oOF 117130
+54++5a+m 117131
+5o6o5rWL 117132
+5o6l5omL 117133
+6Zq25bGe5LqO 117134
+5Z+O5biC576k 117135
+5p2O5YWI55Sf 117136
+55+/5rOJ5rC0 117137
+54m55Lu3 117138
+5pu05aSa57K+5b2p 117139
+56iL5byP 117140
+6K+75oeC 117141
+5bGP6JS9 117142
+5aWl5p6X 117143
+5aWl5p6X5Yy5 117144
+5aWl5p6X5Yy55YWL 117145
+57qi6Jav 117146
+5aWu 117147
+5a6d546J 117148
+57ay57Wh 117149
+6LKn 117150
+5qyn5byP 117151
+55m957OW 117152
+6Ieq54S254G+5a6z 117153
+5ZGK6K+J5aW5 117154
+5bua 117155
+54K55Ye75p+l55yL 117156
+6aOO5rm/ 117157
+6LWE5Lqn6YeN57uE 117158
+5Lmf5LiN5L6L5aSW 117159
+5Y2K5Liq5bCP5pe2 117160
+5ZC45byV5pu05aSa 117161
+5pe26Ze06IqC54K5 117162
+5pS257qz 117163
+5ZC45q+S 117164
+6ICB5Lmh 117165
+55CF 117166
+5pyA57WC 117167
+5Y+N5oSf 117168
+55So5b6u5L+h 117169
+55So5b6u5L+h5omr 117170
+6YCf546H 117171
+5aSn54aK54yr 117172
+5Y+v5oOz 117173
+5Y+v5oOz6ICM 117174
+5Y+v5oOz6ICM55+l 117175
+5ZKn 117176
+6LWw5YWl 117177
+56Kz6YW4 117178
+6IyD5Yaw 117179
+6IyD5Yaw5Yaw 117180
+6KKr5Yik 117181
+56ev5p6B5o6o5Yqo 117182
+6Laz6Laz 117183
+57KS5a2Q 117184
+5aSn5a6X 117185
+5aSn5a6X5ZWG5ZOB 117186
+572R57uc56eR5oqA 117187
+5pu85Z+O 117188
+5bey5LmF 117189
+5bey5LmF55qE 117190
+56em55qH 117191
+56em55qH5bKb 117192
+5Lu75pWZ 117193
+5ZSv576O 117194
+5reh5YyW 117195
+5qGC6Iqx 117196
+55+l6K+G5YiG5a2Q 117197
+5oeS5b6X 117198
+5Li75YWs 117199
+6K6+6K6h55CG5b+1 117200
+6LO6 117201
+5omA5o+Q5L6b 117202
+5omA5o+Q5L6b5LmL 117203
+5pS75YWL 117204
+5YK+ 117205
+6K+t5rOV 117206
+5Y2D5Y+k 117207
+6ZaL5pS+ 117208
+56ys5LiA6IqC 117209
+6YKE5rKS 117210
+6YCD55Sf 117211
+5rOX 117212
+5Y6/5aeU5Lmm6K6w 117213
+5L2c6ICF5omA5pyJ 117214
+54W9 117215
+57uF 117216
+5qCF 117217
+5py057Sg 117218
+55GV55a1 117219
+5YyF5YyF 117220
+5rCR5Li75YWa 117221
+5LiN6L+c5aSE 117222
+5aWH5byC 117223
+5Zi75Zi7 117224
+5om8 117225
+57+75byA 117226
+5oCO6IO9 117227
+6YG06YCJ 117228
+6Kej6YeL 117229
+5bm856ia 117230
+6KaB5aW95aW9 117231
+6La05Zyo 117232
+57Si5Y+W 117233
+57uI55Sf 117234
+5YWo5rWB56iL 117235
+6YGp55W2 117236
+5Y2P6LCD5Y+R5bGV 117237
+5oql5LuH 117238
+56eR5oqA5Zut 117239
+5LuA5LmI6YO95LiN 117240
+5pyA5ZCO5LiA5qyh 117241
+57uZ5Lq65LiA56eN 117242
+5qC45a6a 117243
+6KKr5YiX5YWl 117244
+5oSP5oOz5LiN5Yiw 117245
+6ICD5p+l 117246
+5Zyo5q2k5LmL5YmN 117247
+5omT55CD 117248
+6LaK5p2l6LaK5bCR 117249
+5a6a5b6L 117250
+6KGM5pS/5py65YWz 117251
+5L2P5oi/5YWs56ev 117252
+5bCP5aeQ5aeQ 117253
+5LiJ6I+x 117254
+5L+u6KGl 117255
+6J6D6J+5 117256
+6KW/55Sy 117257
+5oCg 117258
+562J5aSa6aG5 117259
+5Lqn5Lia6ZuG6IGa 117260
+5Lu35qC85LiK5rao 117261
+5YWs5YWx5Zy65omA 117262
+6KKL5a2Q 117263
+5oan5oas 117264
+55qE5pa55byP5p2l 117265
+5Yiw6LSm 117266
+54G9 117267
+5be06I+y 117268
+5be06I+y54m5 117269
+5ryU5Lmg 117270
+6K2m56S65pWZ6IKy 117271
+55WP5oOn 117272
+5byV5rWB 117273
+5pS25pSv 117274
+5bGC5Ye6 117275
+5bGC5Ye65LiN 117276
+5bGC5Ye65LiN56m3 117277
+5pGH5rua 117278
+6L6m55CG 117279
+57q16KeC 117280
+5pWR5rWO 117281
+5a626YO955+l6YGT 117282
+5Yyv 117283
+5bCP6bif 117284
+5Lu75YuZ 117285
+6K6h5YWl 117286
+56ue6YCJ 117287
+5byA6I2S5pe25pyf 117288
+5ZGo5oGp 117289
+5ZGo5oGp5p2l 117290
+5Lqk57uH 117291
+55Wi5qWt 117292
+5qC55o2u6Ieq5bex 117293
+5paw5Lq6546p5a62 117294
+5a215YyW5Zmo 117295
+6YeH5pqW 117296
+5bmz5Z2H5rC05bmz 117297
+5YWs5byA6K++ 117298
+5aSx5Yip 117299
+5Ly65pyN 117300
+54qB 117301
+5b+95oKg 117302
+5Li76KaB6ZuG5Lit 117303
+5qSN5qCR 117304
+5q+X6YK7 117305
+6Ie654Gj 117306
+5Ye65Zu955WZ5a2m 117307
+5oqX6ZyH 117308
+5oOp5oiS 117309
+5bm05bqV5YmN 117310
+5ZK46Ziz 117311
+5rCR5bGF 117312
+5aSn55CG55+z 117313
+6Z2z 117314
+6ZWW 117315
+5riF6L+c 117316
+6KOF6L29 117317
+6IeA 117318
+5b2x5Lia 117319
+5byf5YWE 117320
+5oKy6KeC 117321
+552A55y85LqO 117322
+5o2N5Y2r 117323
+5Yml5aS6 117324
+56+G 117325
+5b6I6ZW/5pe26Ze0 117326
+6KWf 117327
+56ys5LiA55m+ 117328
+5LiA5YiG6ZKx 117329
+5paw6Ze76K6w6ICF 117330
+6ZW35pyf 117331
+5rOV5oiY57uE5ZCI 117332
+6LCB55+l6YGT 117333
+6IWw6YOo 117334
+5rGJ5aCh 117335
+5YWl552h 117336
+5Y2W5o6J 117337
+5raI6LK76ICF 117338
+5oOv5L6L 117339
+5oOz5LqG 117340
+5oOz5LqG5oOz 117341
+6ICB5pen5bCP5Yy6 117342
+5Lyg6KiA 117343
+5YiG5pWw57q/ 117344
+5rWB5rOq 117345
+57uE57uH6aKG5a+8 117346
+5Lqa5Yab 117347
+5aKe5YC85pyN5Yqh 117348
+5b65 117349
+5Ly2 117350
+5Lqb6K64 117351
+5biD6I6x 117352
+5by65oKN 117353
+5a6r5bu3 117354
+57u/6Iy2 117355
+5Yyh 117356
+5b6I5q2j5bi4 117357
+5pil5aSP 117358
+5q+Z 117359
+6K+E5q+U 117360
+5Yeh5LqL 117361
+5oqJ5oup 117362
+5YCS6ZyJ 117363
+6YeN5bqm 117364
+5Y2P5Lya5Lya6ZW/ 117365
+5b+n6JmR 117366
+5LiL5LiA56+H 117367
+5rKq5rex 117368
+5oiO 117369
+5omT5LuX 117370
+5Y2I6aWt 117371
+5bm06b6E5q61 117372
+5Lit5Zu96Laz55CD 117373
+6K6+6K6h5pa55qGI 117374
+5bqU55So5p+l55yL 117375
+6aKE5paZ 117376
+5Zeh 117377
+56WW54i2 117378
+55qE5LiA5ZGY 117379
+5rSX5bmy5YeA 117380
+5Y6G5Y+y5paw 117381
+5Y6G5Y+y5paw6auY 117382
+54us5YW3 117383
+5oWL5bqm 117384
+5omT5Lqk 117385
+5omT5Lqk6YGT 117386
+6buE55+z 117387
+55u85pyb 117388
+54mn5Zy6 117389
+6L2s5byv 117390
+5Y2H5Y2O 117391
+5YaN5Lmf5rKh5pyJ 117392
+6Iux5omN 117393
+5pu05ZCN5Li6 117394
+5YCf55So 117395
+57qg6ZSZ 117396
+57ud5a+55LiN5Lya 117397
+546L54mM 117398
+55uG5Zyw 117399
+5aSx6LCD 117400
+5aW96LGh 117401
+6bOl 117402
+5L+d5L+u 117403
+5Zub5Liq6Ieq5L+h 117404
+5aS055qu 117405
+5Y6f5YmH 117406
+5oql5qGI 117407
+5aW06Zq2 117408
+5bOZ 117409
+6LCD5paZ 117410
+5Lmf6Kix 117411
+6JC95Yiw 117412
+6JC95Yiw5a6e 117413
+6JC95Yiw5a6e5aSE 117414
+54Sa54On 117415
+55Sf5rS7546v5aKD 117416
+5bqU5Y+K5pe2 117417
+6LaK6L+H 117418
+5oSf6Kyd 117419
+5pmv5b63 117420
+5pmv5b636ZWH 117421
+54qA 117422
+6Lqr6YKK 117423
+56iO5Yqh5oC75bGA 117424
+5YeA5Zyf 117425
+5L615Y2g 117426
+5Yqo5bel 117427
+5bm05LmL 117428
+5bm05LmL5LmF 117429
+56ys5LqM6IqC 117430
+5Yqo54mp5Zut 117431
+56ys5LiA5Lmm6K6w 117432
+6YWa 117433
+55Sf5Lqn6K6+5aSH 117434
+5p+Q56eN56iL5bqm 117435
+5Zyt 117436
+5Yet5YCf552A 117437
+6ZiF6KeI 117438
+55m95rKZ 117439
+5rK554Of 117440
+56qB56C05Y+j 117441
+5Y+X5b2x5ZON 117442
+5Y+v5Lul5pu05aW9 117443
+5bOw5YC8 117444
+5p2C6LSo 117445
+5a6/6L+B 117446
+55uY5rS7 117447
+5r+A6LW3 117448
+5YS/56eR 117449
+5Z2Q6JC95Zyo 117450
+5oyq5aiB 117451
+5rW35bKb 117452
+57uf57uf 117453
+6Zmo 117454
+5LyY5LqO 117455
+5bCI5a62 117456
+5LiA6YKK 117457
+6JCK 117458
+5LqG5LiA5Y+j 117459
+5rKD5bCU5rKD 117460
+5q2j5bi45L2/55So 117461
+5pmu6YGN5a2Y5Zyo 117462
+5Liw5ruh 117463
+55S75Y23 117464
+5bqU5pS2 117465
+5bqU5pS26LSm 117466
+5bqU5pS26LSm5qy+ 117467
+5a6M5pW054Ot 117468
+5a6M5pW054Ot5qac 117469
+5rOo6KeG 117470
+54aE 117471
+6Lqs 117472
+6ZSA5ZSu5Lq65ZGY 117473
+6LaL5ZCR 117474
+54Sm5oCl 117475
+5Y2B5bm05YmN 117476
+5Lyg57uf5Lqn5Lia 117477
+6LOq6YeP 117478
+5Yek5Yew572R 117479
+6LWE5rqQ5pW05ZCI 117480
+5raM5YWl 117481
+5paH5YyW5Lyg5pKt 117482
+55WM56ys5LiA 117483
+5rC05rO1 117484
+5a6r5q6/ 117485
+5o6i5a+7 117486
+5L+u5Ymq 117487
+5oSP6KaL 117488
+57SK5Lmx 117489
+5puJ 117490
+55m96KGj 117491
+6JmO5Y2r 117492
+57Sn5omj 117493
+5aSE5aSE6ZW/ 117494
+5Yib5bu65bel5L2c 117495
+57qi5p6j 117496
+6aW85bmy 117497
+5LqG5Y2K5aSp 117498
+5Lya5b2x5ZON5Yiw 117499
+55u45L+h5aSn5a62 117500
+6IW+6aOe 117501
+5bCx5aaC5ZCM 117502
+5LiL6Z2i5bCP57yW 117503
+5rCR6JCl57uP5rWO 117504
+5pmm 117505
+6KOF5omu 117506
+6buR5aSc 117507
+5bi45b63 117508
+5bel5Lia5aSn5a2m 117509
+5piO55+l 117510
+6Zif5ZGY5Lus 117511
+5ZCs6K++ 117512
+5q+P6ZqU 117513
+55yf5piv5aSq 117514
+5ZCI5L2c5YWx6LWi 117515
+55CG5Y+R 117516
+5omN5bmy 117517
+55yL6LW35L6G 117518
+5q6/5LiL 117519
+5a6J6Ziz 117520
+5omA5Lqn55Sf55qE 117521
+6ZuH5L2j 117522
+5oqs6LW35aS0 117523
+5o2u5oql6YGT 117524
+6ZqG6YeN5Li+6KGM 117525
+5Lqk6ZSZ 117526
+6LaF6aKd 117527
+5YyW55aX 117528
+6aGG 117529
+57q15rex 117530
+54ix5Zu95Li75LmJ 117531
+6Zmi5Ymv6Zmi6ZW/ 117532
+6K6z 117533
+55yf5q2j5YGa5Yiw 117534
+5a2k5Y2V 117535
+6Ieq54S26ICM 117536
+6Ieq54S26ICM54S2 117537
+5L+u6Lqr 117538
+6Iq5 117539
+5oGv5oGv 117540
+5oGv5oGv55u45YWz 117541
+6am+5qCh 117542
+5o6p6aWw 117543
+5rO96L+e 117544
+5rO96L+e5pav5Z+6 117545
+5Li+5q2i 117546
+566h55CG5L2T5Yi2 117547
+5YW25Lit5LmL5LiA 117548
+5p2+5byb 117549
+5oum5oiq 117550
+5Y2r5YGl 117551
+5Y2r5YGl5aeU 117552
+5LuO5Y675bm0 117553
+5YKi 117554
+6LSt56Wo 117555
+5Zu+5qCH 117556
+5rKz6KW/ 117557
+5rCR5pS/5bGA 117558
+56eB6JCl 117559
+5aSW5Zu96K+t 117560
+5bmy6LSn 117561
+5pOm5out 117562
+5Zyw5Lit 117563
+5Zyw5Lit5rW3 117564
+5rWT5rWT 117565
+5rWT5rWT55qE 117566
+5aeL5bu6 117567
+5aeL5bu65LqO 117568
+57aT5q23 117569
+6Lev5ryU 117570
+5pq06aOO 117571
+5Z+66L6F 117572
+5om26LSr5bel5L2c 117573
+5LiA55u05aSE5LqO 117574
+5oOF6Laj 117575
+5LqM5a2j5bqm 117576
+5Y6M5oG2 117577
+6aG65Yip5a6M5oiQ 117578
+5p+l5bCB 117579
+6aG256uv 117580
+5LiN5a2V 117581
+5LiA5aSn5aCG 117582
+6KKr5reY5rGw 117583
+5piv55So5p2l 117584
+5pyA5ZCI6YCC 117585
+5Lqu55y8 117586
+5bm25LiN5piv5b6I 117587
+56eR56CU6Zmi 117588
+56eR56CU6Zmi5omA 117589
+57Kf 117590
+6aKI6YOo 117591
+6buY6buY5Zyw 117592
+6auY5Lit55Sf 117593
+5peP6Ieq5rK75Y6/ 117594
+5pWZ5a2m6LSo6YeP 117595
+5oiY54Gr 117596
+5Z2O5Z23 117597
+5pCt5LmY 117598
+6K+X5oSP 117599
+5YiR6K2m 117600
+5Ye65rGX 117601
+5Y2B5YWt5p2h 117602
+6K+35Y+K5pe2 117603
+5Yac5Lia5aSn5a2m 117604
+6JC95Y+2 117605
+5oC76ICM6KiA 117606
+5oC76ICM6KiA5LmL 117607
+5p2c5YWw 117608
+5p2c5YWw54m5 117609
+6Zmq5L2g 117610
+5YWs5oql 117611
+55WZ6KiA5p2/ 117612
+6ZiF5Y6G 117613
+56u254it 117614
+57uZ5Yir5Lq6 117615
+5pel5oql56S+ 117616
+5Z2Q6JC9 117617
+5Z2Q6JC95LqO 117618
+6YeR5a2X 117619
+6YeR5a2X5aGU 117620
+5Zuk 117621
+6K+d5Ymn 117622
+5oyB57ut5o6o6L+b 117623
+5ryP5rC0 117624
+6Kmz57Sw 117625
+5oCA5oqx 117626
+5Y+Y5bm7 117627
+6aWl6aW/ 117628
+6ZqQ6Lqr 117629
+5Liq6LWb5a2j 117630
+5ZOh5bel 117631
+5oGi5aSN5q2j5bi4 117632
+5LqG5aW95aSa 117633
+5pif5be0 117634
+5pif5be05YWL 117635
+5YWJ546v 117636
+5biF5ZOl 117637
+55m96Zuq 117638
+56iN56iN 117639
+6K6h5o+Q 117640
+5oSb5oOF 117641
+6Y6W 117642
+5L+h6Ziz 117643
+6KeA5a+f 117644
+5aaC5p6c5L2g5oOz 117645
+55u45q+U5LmL5LiL 117646
+6Kej5byA 117647
+5omT5Y2w5py6 117648
+6Lqr6Lqv 117649
+57K+56We5paH5piO 117650
+6IKh5oyH 117651
+5b6u5Yib 117652
+57qi6Iy2 117653
+6Ie055mM 117654
+5oGp5pa9 117655
+6IW/6YOo 117656
+5aSn5Z6L5aSa5Lq6 117657
+5a6J5YCN 117658
+6L6F5a+85ZGY 117659
+6Iiq6YGT 117660
+5biD5bCU 117661
+5Y2X5a6B5biC 117662
+5LiK54+t5peP 117663
+5L6n57uT5p6E5oCn 117664
+6L+96ZqP 117665
+5b2T5Zyw5pS/5bqc 117666
+6LWw5Ye65p2l 117667
+6YeR6J6N5Lia 117668
+5Lib5Lmm 117669
+6aG555uu57uP55CG 117670
+6L+H5oi3 117671
+6aqo5p62 117672
+6KGZ 117673
+5LuA6bq9 117674
+6IWL 117675
+6KaB5a6z 117676
+5Zyo5bqK5LiK 117677
+5Luj6KiA5Lq6 117678
+5Lim5bCH 117679
+5ZCE5Liq5pa56Z2i 117680
+6LC06LSj 117681
+5YWx5oyv 117682
+5Y2z5bCG5Yiw5p2l 117683
+6IK655mM 117684
+5L6b6ZSA 117685
+5Lib5p6X 117686
+6LWD 117687
+5Y2B5L2Z5bm0 117688
+5YuY5o6i 117689
+6Z+15ZGz 117690
+6Ium56yR 117691
+5pyA5aSn56iL5bqm 117692
+6YeN54K55YWz5rOo 117693
+5LmL5Li+ 117694
+5ruh5oCA 117695
+5Y+X5Yiw5b2x5ZON 117696
+5oub5oqV5qCH 117697
+6KGl6b2Q 117698
+6KW/57qi 117699
+6KW/57qi5p+/ 117700
+6ayn 117701
+6KOF5Y24 117702
+6YK76YeM 117703
+6IKH5LqL 117704
+5o6S5q+S 117705
+5a2k5YS/ 117706
+6Zu26Led56a7 117707
+5a6e5bmy 117708
+55yL5p+l55yL 117709
+5pS26LS556uZ 117710
+57u3 117711
+5YWs55uK5oCn 117712
+6YCS57uZ 117713
+5pS75omT 117714
+5pif57qn6YWS5bqX 117715
+5piO5aqa 117716
+542o56uL 117717
+6K+d6K+t5p2D 117718
+5LiA5q2l5LiA5q2l 117719
+5Lmm5rOV5a62 117720
+5pyq57uP5o6I5p2D 117721
+55+z6IaP 117722
+5Yet5LuA5LmI 117723
+55qE5pel 117724
+55qE5pel5a2Q6YeM 117725
+6K+x5Lq6 117726
+55m+5YiG55m+ 117727
+6IiI6Laj 117728
+5byg5YWI55Sf 117729
+6ICB54i35a2Q 117730
+5rOi54m5 117731
+5Z+66YeR5Lu96aKd 117732
+5rKZ5Y+R5LiK 117733
+5aWL5paX55uu5qCH 117734
+5rCi6IO9 117735
+5rKD5bCU546b 117736
+576p5YuZ 117737
+6Z+z566x 117738
+5rKJ5rW4 117739
+5rKJ5rW45Zyo 117740
+6Iux5ZyL 117741
+54Gv54Gr 117742
+6L+b6aG5 117743
+5Lik56uv 117744
+5LmU5Li5 117745
+6IS46aKK 117746
+5Y+R5bGV5r2c5Yqb 117747
+5YuV5L2c 117748
+5ZOI5L2b 117749
+5a605Lya 117750
+5qeN 117751
+56uL5b+X 117752
+56GV5aOr5a2m5L2N 117753
+5YuL56ug 117754
+6L+Z5Zy65q+U6LWb 117755
+5oyB5bmz 117756
+6ZWA6ZSM 117757
+6Iux54m5 117758
+6Iux54m55bCU 117759
+5pWZ6IGM5bel 117760
+5Yqf5Yqb 117761
+6K+l5qGI 117762
+5LiA5qKd 117763
+5ZiJ5bm0 117764
+5ZiJ5bm05Y2O 117765
+6L+r5LiN5Y+K 117766
+6L+r5LiN5Y+K5b6F 117767
+6L+Z5Liq5pe25Luj 117768
+57K+5b2p5pKt5oql 117769
+5Lq66IS4 117770
+5Lq66IS46K+G5Yir 117771
+5qOA5a+f5a6Y 117772
+5bCP6IW/ 117773
+6YaS55uu 117774
+5YWa5oC7 117775
+5YWa5oC75pSv 117776
+5oif 117777
+6Iyr54S2 117778
+6LGG5rWG 117779
+5Li75rK7 117780
+6Z2S5rW355yB 117781
+5YiR5LqL6LSj5Lu7 117782
+56Cw 117783
+5LmL5qyK5Yip 117784
+5LqU5a6Y 117785
+6L+35oOR 117786
+5YWl5bqT 117787
+5a6257q6 117788
+5by557Cn 117789
+5Y2B5LqU5p2h 117790
+57uZ5a6d5a6d 117791
+6Iiq56m66Iiq5aSp 117792
+5b6A5aSW 117793
+5byV5Yqb 117794
+55y855qu 117795
+5raJ6Laz 117796
+5p2l5a6+ 117797
+5Zyo57q/6KeS6Imy 117798
+54Ot6ZSA 117799
+5rWB6YCd 117800
+5rOh5rOh 117801
+6ZmN5bmF 117802
+6LSf6Z2i5b2x5ZON 117803
+57qi5qW8 117804
+57qi5qW85qKm 117805
+6ZqU552A 117806
+5L6l5bm4 117807
+6K645LmF 117808
+5ZKM552m 117809
+6K29 117810
+5L2/55So6ICF5oiW 117811
+5Lmw5Y2V 117812
+6L+0 117813
+6aOO5omH 117814
+5pWZ5bir 117815
+5qGM5a2Q5LiK 117816
+5b6I5ryC5Lqu 117817
+5aCx5bCO 117818
+56ys5LiA5a2j5bqm 117819
+56mp5a6a 117820
+5oKy5ZOA 117821
+552A5Yqb5omT6YCg 117822
+5oyf 117823
+6Lev5qGl 117824
+5ZGQ 117825
+5Zyj6K+e6IqC 117826
+55qH5a2Q 117827
+5LuH5oGo 117828
+6YWd6YW/ 117829
+5LiN6Ze0 117830
+5LiN6Ze05pat 117831
+5oyH5bCW 117832
+5Lit5Zu9572R5ri4 117833
+5Z6j 117834
+5oSP6KeB5bu66K6u 117835
+5q+F54S2 117836
+5Lqu5bqm 117837
+6IGU6LCK 117838
+5b2V5YWl 117839
+5YSy 117840
+5aiY5a62 117841
+56eR5bCU 117842
+5Lmf5rKh5LuA5LmI 117843
+5qC55o2u5LiN5ZCM 117844
+5Y+25L+u 117845
+5YC85a6I 117846
+5pyr56uv 117847
+5Yio 117848
+5YK15YuZ 117849
+6IGv5ZCI 117850
+5aWH5bm7 117851
+6Jma5p6E 117852
+6buE5piP 117853
+5bmz5Z2m 117854
+5rWB5rCT 117855
+5paw5Z+65bu6 117856
+5oy95pWR 117857
+5Y2O5bCU 117858
+5Y2O5bCU6KGX 117859
+5pyA5Y+X5qyi6L+O 117860
+57ut57qm 117861
+5byK56uv 117862
+6a2U5rOV5biI 117863
+6a2U5rOV5biI5ZKM 117864
+5YW35L2T5YaF5a65 117865
+55CJ55KD 117866
+5omp5a65 117867
+6Iy25Zut 117868
+5Li75LmJ6ICF 117869
+56uL6Z2i 117870
+5o6l5Y+X6YeH6K6/ 117871
+5Ye65YWl5aKD 117872
+56eR5Y2P 117873
+6ZKz 117874
+57WQ5qeL 117875
+57uT5p6c5pi+56S6 117876
+5Y+w6LSm 117877
+5bCx5p2l55yL55yL 117878
+6Ieq5pWR 117879
+5Y+N5oeJ 117880
+5Y675ZOq5YS/ 117881
+6L+Z6aaW 117882
+6L+Z6aaW5q2M 117883
+5ZCs5LyX 117884
+5aSW5aOz 117885
+5L2T6IKy6aaG 117886
+5a+m5pa9 117887
+6J665Lid 117888
+5ouJ5Y2H 117889
+54yb5Zyw 117890
+5YWo5Zu95Lq65rCR 117891
+5oKJ5bC8 117892
+5peP576k 117893
+5Zui5ZGY 117894
+5Lik5Liq5bCP5pe2 117895
+5Zyo546p5a62 117896
+5Zyo546p5a625Lit 117897
+55Sc55Sc 117898
+5oqV6KGM 117899
+5Y2U5pyD 117900
+6Zmh 117901
+5Yqg5bel5Y6C 117902
+5qaG5p6X 117903
+5q276KeS 117904
+5YaF5bmV 117905
+5omA5pyJ5oOF6IqC 117906
+5Yi35Y2h 117907
+5rC06IK/ 117908
+6IOD5Y+j 117909
+5auM5byD 117910
+5rKu5Lin 117911
+5LiJ5bm057qn 117912
+5raC5bGC 117913
+5b+D5Luq 117914
+5b+D5Luq55qE 117915
+5aSt 117916
+6aaW6L2u 117917
+5peg6K665piv5YW2 117918
+6YCP5rCU 117919
+5LqM5Y2B5LqU 117920
+566r 117921
+5Yqf5Yqz 117922
+562+5LiL 117923
+5rKJ6L+3 117924
+5pWR5ZG9 117925
+6Zeq6Zeq 117926
+5ZCD5LqP 117927
+5bGV5ZOB 117928
+5Y2z5pe25Y+R55Sf 117929
+57ac 117930
+57ac5ZCI 117931
+5qCH5piO 117932
+55yL55S15b2x 117933
+5YWs56ug 117934
+6Zi/5qOu 117935
+6Zi/5qOu57qz 117936
+6Lqr5Yib6YCg 117937
+6Lqr5Yib6YCg55qE 117938
+5rib5bCR 117939
+5YC85b6X5YWz5rOo 117940
+6Zu25ZSu5ZWG 117941
+5o2G57uR 117942
+6LiP5YWl 117943
+6Juf 117944
+5p+057qz 117945
+6ICB5YW1 117946
+57u/6Imy546v5L+d 117947
+6bmt 117948
+6bq75pyo 117949
+5o+t54mM 117950
+6L+Z5qy+6L2m 117951
+576O5b63 117952
+576O5b635YWs5Y+4 117953
+5ran 117954
+6LCB55+l 117955
+5rSL6JGx 117956
+5q+N5qCh 117957
+5LiA6Zeq 117958
+55S35Li76KeS 117959
+5peg57q/55S1 117960
+5bGg5a6w 117961
+5piv6Z+p5Zu9 117962
+5piv6Z+p5Zu95aix 117963
+5a656LKM 117964
+5Z2H5L2/5YW2 117965
+5aSq5b+r 117966
+5bm055Sx 117967
+5bm055Sx55ub 117968
+6Ium6Ium 117969
+5Yqb6L+Y5piv 117970
+5Yqb6L+Y5piv6Ieq 117971
+5oap 117972
+6IGv57Wh 117973
+5ZS+ 117974
+5YW35pyJ5oiY5aOr 117975
+6L+96Zeu 117976
+5aCG5pS+ 117977
+5Y+N6amz 117978
+5a6e5LqL5rGC 117979
+5a6e5LqL5rGC5piv 117980
+5a246Zmi 117981
+5Y2B5Yeg5Liq 117982
+5pWR5oqk 117983
+5pWR5oqk6L2m 117984
+572R57uc5Lyg5pKt 117985
+5Y2B5YWr5bGK 117986
+6YOo5Ymv 117987
+6YOo5Ymv6YOo6ZW/ 117988
+55e06L+3 117989
+566h55CG5p2h5L6L 117990
+6J6N5Li65LiA5L2T 117991
+5oC75Lqn5YC8 117992
+6LOT 117993
+5LiD5pif 117994
+54+t57uE 117995
+57uf6aKG 117996
+6K+35aSn5a62 117997
+6YeR6Zm1 117998
+6IiF6IiF 117999
+5rW35rm+ 118000
+5pa9562W 118001
+5Lqr6KqJ 118002
+6bql 118003
+56uv5Y2I 118004
+57u/5Z+O 118005
+56K65L+d 118006
+5be05ouJ 118007
+5YaS552A 118008
+5oW35oWo 118009
+5Liq5Lq66KeC54K5 118010
+5LmZ54Ov 118011
+56GF6LC3 118012
+6ZaL5bGV 118013
+5bCa5Lmm 118014
+5Z2a6Z+n 118015
+5bq1 118016
+6ICB6b6E 118017
+6ICB6b6E5YyW 118018
+55yo55y8 118019
+57u/5rC0 118020
+57u/5rC06Z2S5bGx 118021
+5Lmm6aaZ 118022
+5Li75Yqb5Yab 118023
+5omN5piv55yf5q2j 118024
+5oqi5YWI 118025
+5oiQ5bCx5oSf 118026
+6YeN5p6E 118027
+6ZKi5Y6C 118028
+5oiQ5Lu9 118029
+6Iqx57q5 118030
+5LmL5LqJ 118031
+5bmy57uG6IOe 118032
+5pei5Y+v5Lul 118033
+57mB55CQ 118034
+5oSa6KCi 118035
+6Z2e5bi45piO5pi+ 118036
+5L2T5b2p 118037
+5oqA5rOV 118038
+5p2G6I+M 118039
+5bm/5rOb5YWz5rOo 118040
+5YyX5a6L 118041
+5aeK5aa5 118042
+5Y2P5Yqe 118043
+5reu5Y2X 118044
+54OP 118045
+5rSX6IS4 118046
+5Y+X6K6/ 118047
+5Y+X6K6/6ICF 118048
+6YeN6KaB5Zug57Sg 118049
+5b2x6KeG5Ymn 118050
+57u86Im66IqC55uu 118051
+6JyV5Y+Y 118052
+5LqM57q/ 118053
+5LqM57q/5Z+O5biC 118054
+5LyK5aeL 118055
+54+K55Ga 118056
+6Ieq5p+l 118057
+5YWl5Zut 118058
+5Ye25omL 118059
+5YWs6K+J 118060
+6YGH6Zq+ 118061
+6YeH55+/562J 118062
+6Ieq55CG 118063
+5Za35raC 118064
+5omp5YWF 118065
+6YCP6KeG 118066
+6auY6YCf5aKe6ZW/ 118067
+5Zu+55S7 118068
+5765 118069
+6IKH5bqG 118070
+6L6c6LSf 118071
+6LWU5LuY 118072
+6Leh 118073
+5YGl5bq35oiQ6ZW/ 118074
+5Lul5LiK5a2m5Y6G 118075
+5Y+W5b6X5Lul5Y+K 118076
+5rKJ56ev 118077
+5Y2B5Lmd5bGK 118078
+55u46Zec5pyN5YuZ 118079
+5omn5Yuk 118080
+5Ymv5Y6/6ZW/ 118081
+5a+w 118082
+5YGc5rue 118083
+5re55rKh 118084
+55+z54Gw 118085
+5424 118086
+5YCm 118087
+576O5aqS 118088
+5pWZ5qGI 118089
+5Yqg55uW 118090
+5YWs5byA6LWb 118091
+5aWg5Z+6 118092
+5piG6Jmr 118093
+556F 118094
+56O36YW4 118095
+5LqJ5Yib 118096
+546L5pmT 118097
+57yT5Yay 118098
+5Y6a5Y6a 118099
+5Y6a5Y6a55qE 118100
+5p6j5bqE 118101
+57K+55uK 118102
+57K+55uK5rGC 118103
+57K+55uK5rGC57K+ 118104
+5YiG5pSv5py65p6E 118105
+5a6e5pa957uG5YiZ 118106
+5paw6LWb5a2j 118107
+57i957Wx 118108
+6YCg6KGA 118109
+6aKH5YW3 118110
+6buE5Z+U 118111
+6KGA6ISC 118112
+5Lqk6YCa5bel5YW3 118113
+5bOl 118114
+5peP6Ieq5rK75bee 118115
+5a+66Zmi 118116
+56K65a6a 118117
+5qaC5b+16IKh 118118
+5oSf5a6Y 118119
+5p+c5Y+w 118120
+5ZSU 118121
+556t6Kej5Lim 118122
+5oC75Lu3 118123
+5ZC45YWl 118124
+5oC8 118125
+5pma6Ze0 118126
+5bGK5q+V5Lia55Sf 118127
+55Sf5aec 118128
+6ZiF6K+75YWo5paH 118129
+5b6X5Yiw5pyJ5pWI 118130
+5pCc5pWR 118131
+5Y6G5p2l 118132
+6K2J5piO 118133
+5YO7 118134
+6Iaz6aOf 118135
+5YSE5YWD 118136
+5omT5Y6L 118137
+5a6+5a6i 118138
+5ZW8 118139
+5LiA55m+5aSa 118140
+5rex5YWl5Lq65b+D 118141
+5qKF5bee 118142
+56CU5a2m 118143
+5YWz5LmO 118144
+6Lyb 118145
+5Lqy5Y+L 118146
+6YWN5paZ 118147
+5oiR54ix5L2g 118148
+6LS45piT5oiY 118149
+5pyJ6Imy 118150
+5pyJ6Imy6YeR5bGe 118151
+5o2Q5Yqp 118152
+5Li66aaW 118153
+5Li66aaW55qE 118154
+5a+M5Yqb 118155
+55S356We 118156
+6bOz 118157
+5rWH5rC0 118158
+5ZCx 118159
+5piO56Gu5o+Q5Ye6 118160
+5Y+55LqG 118161
+5Y+55LqG5Y+j5rCU 118162
+56S85ouc 118163
+6L+Z5Liq5ZCN5a2X 118164
+5L+h5b6S 118165
+5b+X5by6 118166
+6ZmQ5pe2 118167
+5pS26LK7 118168
+5Yac5a625LmQ 118169
+5bCP6b6Z6Jm+ 118170
+6JC95bmV 118171
+5qef 118172
+5a2m6Zy4 118173
+5oiW5aSa 118174
+5oiW5aSa5oiW 118175
+5oiW5aSa5oiW5bCR 118176
+5bqn6LCI5Lya5LiK 118177
+5ra8 118178
+6a2U546L 118179
+5bKx 118180
+6aG25bGC 118181
+6aG25bGC6K6+6K6h 118182
+6ISR5a2Q6YeM 118183
+6Zmi5a2Q6YeM 118184
+6L2p6L6V 118185
+6Lqr5b+D5YGl5bq3 118186
+6IWR 118187
+6Zec5rOo 118188
+5Y+C5Yqg5Lya6K6u 118189
+5Lit5Y2O5paH5YyW 118190
+6L+95a+7 118191
+5a6J54S2 118192
+6aOZ5Y2H 118193
+6Z+t6I+c 118194
+6bim 118195
+5YKo6YeP 118196
+55S35pa5 118197
+5aSH5Lu9 118198
+5pGU5YCS 118199
+5ram5ruR5rK5 118200
+6YC86L+R 118201
+55Sz6K+J 118202
+6bif57G7 118203
+55+z5rK55YyW5bel 118204
+5Z2a5p6c 118205
+6L+Z5a625LyZ 118206
+5ouS5LiN 118207
+55yf55qu 118208
+6Led6Zui 118209
+6L+Y5oy6 118210
+6ZuV5YOP 118211
+5Yid5oGL 118212
+5o+Q5L6b5pu05aSa 118213
+5p+l55yL5YWo5paH 118214
+5pWw5a2X6LSn5biB 118215
+5ZaJ5ZKZ 118216
+5Y+m5LiA5L2N 118217
+5YKs5YyW 118218
+5YKs5YyW5YmC 118219
+5LuO5p2l5rKh 118220
+5a+G5YiH55u45YWz 118221
+6YOo5Li75Lu7 118222
+5Lqn5ZOB57uP55CG 118223
+5Lim5ZCM5oSP 118224
+6JC95YWl 118225
+5bGP5bmV5LiK 118226
+5YWs5Y+456ug56iL 118227
+5o2i5Y+l6K+d 118228
+5o2i5Y+l6K+d6K+0 118229
+5L2N5pa8 118230
+5L2U 118231
+5Ye75p2A 118232
+55u46L6D 118233
+55u46L6D5LqO 118234
+57K95a2Q 118235
+5Y2X5p6B 118236
+5a6r6aKI 118237
+6KOB5ZGY 118238
+5piO57uG 118239
+5Lu35YC86ZO+ 118240
+5Zub5Liq5pa56Z2i 118241
+5oOF5Ya15p2l55yL 118242
+5oyR5YmU 118243
+5q6Y 118244
+5p6B5Yqb 118245
+55aR6Zq+ 118246
+5oq15oqX5Yqb 118247
+5oCl6YCf 118248
+5oiM 118249
+5L2O5Lyw 118250
+6Zeq6L+H 118251
+5oGs 118252
+6LWe5oms 118253
+5LuW5aaI 118254
+5oiQ5Li65LiA5ZCN 118255
+5rSX56S8 118256
+6aKE6K6h5bCG 118257
+5YWI6L+b5Y2V5L2N 118258
+6LyU 118259
+6YCD6ISx 118260
+546w5a2Y 118261
+6ICB6JmO5py6 118262
+5Y2B5LiD5p2h 118263
+5Y+m5LiA5Y2K 118264
+5rip5oOF 118265
+5Yml56a7 118266
+5LiW6LS4 118267
+5a6Y5Y+4 118268
+5b6I5beu 118269
+6Ze06Led 118270
+6K+35rOo5oSP 118271
+5Y+y6K+X 118272
+5Yip5Zmo 118273
+6L+Q566X 118274
+5rKm5Li6 118275
+6Kmy5L2/55So6ICF 118276
+6Iys 118277
+6ZSm57uj 118278
+5Y+y5paZ 118279
+54G15rS75oCn 118280
+6IGU56S+ 118281
+5peg5Yqp 118282
+5oqX5rCn5YyW 118283
+6I+c6IK0 118284
+6YCg6Ii5 118285
+5o6J6JC9 118286
+5aSN5p+l 118287
+5YuD5YuD 118288
+5ZG85aOw 118289
+57Wm5LqI 118290
+5ZCM5LqL5Lus 118291
+572w 118292
+6K+V5o6i 118293
+5YWz6ZSu5a2X 118294
+5o2Q54yu 118295
+57uf6K6h5pWw5o2u 118296
+5Yib5L2c6ICF 118297
+5LiL5Y2K 118298
+5LiL5Y2K5Zy6 118299
+5om/5ouF6LSj5Lu7 118300
+56uv5q2j 118301
+56m/6KGj 118302
+5Lyg55CD 118303
+5Yqp6ZW/ 118304
+5Yex 118305
+6ZW25bWM 118306
+6aOe57+U 118307
+6L6T5Y21 118308
+6L6T5Y21566h 118309
+5LiH5YWs6YeM 118310
+5o6o5bm/5bqU55So 118311
+5b+r5qiC 118312
+56e9 118313
+6Imw5beo 118314
+5ZCs5a6M 118315
+5Z2a56Gs 118316
+5aWl5Zyw 118317
+5aWl5Zyw5Yip 118318
+6aKT 118319
+6JmQ5b6F 118320
+5L6b5rGC 118321
+6ZyJ57Sg 118322
+5Lyq6KOF 118323
+5Lmh5Zyf 118324
+5Yeh5pys572R 118325
+5Yeh5pys572R5rOo 118326
+5LyK5Yip 118327
+6KGh5rC0 118328
+5pu05YOP5piv 118329
+5YiG6ZKf5bem5Y+z 118330
+6KaP5qih 118331
+5LqU5YiG6ZKf 118332
+5bqX5Yqg55uf 118333
+5Zuw6Zuj 118334
+5YWz5YGc 118335
+5oCd57uq 118336
+5ZK95ZaJ 118337
+55u456ym 118338
+54Om6LqB 118339
+5pmC5pyf 118340
+5ZGI54++ 118341
+6Kej5pWj 118342
+6K+x5a+8 118343
+6ZqU54Ot 118344
+54y2 118345
+5Y2X5a6L 118346
+5rex5YWl5LqG6Kej 118347
+562U55aR 118348
+5pi85aSc 118349
+5Y2D5LyP 118350
+5Yqz5Yqh5rS+6YGj 118351
+57qi6LGG 118352
+5Z2P5LqL 118353
+54K55ru0 118354
+5bCx5Lia5bKX5L2N 118355
+57qm5ZCI 118356
+5YWN6Zmk 118357
+6YCG5Yq/ 118358
+6YeN6YeR5bGe 118359
+5a6Y5a6j 118360
+5L2O5buJ 118361
+5oGo5LiN5b6X 118362
+5b6X5aSp 118363
+5b6X5aSp54us 118364
+5b6X5aSp54us5Y6a 118365
+5LiA5bCB5L+h 118366
+5oq95aWW 118367
+6L6X6L2s 118368
+55WZ5a6I 118369
+55WZ5a6I5YS/56ul 118370
+562U5Y23 118371
+5beo5Z6L 118372
+5pyA5aW95LiN6KaB 118373
+5rWZ5rGf5aSn5a2m 118374
+5oao 118375
+5o+h5omL 118376
+6ZKI57uH 118377
+5o6S6aqo 118378
+54K9 118379
+5bCB6KOF 118380
+5Y2A5Z+f 118381
+56m65rCU5YeA5YyW 118382
+5YWJ5b2x 118383
+5YCS5aGM 118384
+5aea5piO 118385
+5qSN6KKr 118386
+5a2m5YmN 118387
+5a2m5YmN5pWZ6IKy 118388
+6Iqd5Yqg 118389
+6Iqd5Yqg5ZOl 118390
+57yp5rC0 118391
+5L2f 118392
+5Zyo57q/5ZKo6K+i 118393
+6LWP5p6Q 118394
+6Z2S6JuZ 118395
+5oqx5L2P 118396
+6IyC5ZCN 118397
+5YWo5Yqb5omT6YCg 118398
+5Y2a5aOr5a2m5L2N 118399
+5rKn5bee 118400
+5Zmi 118401
+5p2C54mp 118402
+5Yi755S7 118403
+5o2F 118404
+5b6u6YeP 118405
+5b6u6YeP5YWD57Sg 118406
+5LiA5Zue5LqL 118407
+6bih6IKJ 118408
+5Yip5ram546H 118409
+5omN566X 118410
+5b6u5aaZ 118411
+5qO15qCR 118412
+6LSq5amq 118413
+5YeP5YC8 118414
+5qKm5aKD 118415
+5Y+v6KeG 118416
+5Y+v6KeG5YyW 118417
+5bm/5aSn5biC5rCR 118418
+5LiT5Lia5LuO5LqL 118419
+57uP57qs 118420
+57Sn55uv 118421
+55+l5bex 118422
+6KSa 118423
+5paH5YyW5bqV6JW0 118424
+5Y6m6Zeo5biC 118425
+5Li05riv 118426
+5a+55YW255yf5a6e 118427
+5bK46L65 118428
+6KaW54K6 118429
+5oqX55mM 118430
+5ZSQ5a6H 118431
+5LiN5b6X6LaF6L+H 118432
+5aiB5oWR 118433
+5qGG5p625Y2P6K6u 118434
+6LWw56eB 118435
+5Zui5aeU 118436
+5aS45aSn 118437
+5qyE 118438
+56We57uP57O757uf 118439
+5pGE5b2x5L2c5ZOB 118440
+6Iql 118441
+5a6J5bqG 118442
+5rW35ruo 118443
+5p6E5oCd 118444
+54m15oyC 118445
+5Y+p 118446
+6ZiQ5piO 118447
+6YGB 118448
+57K+5rK5 118449
+56m05L2N 118450
+5oqk6Lqr 118451
+5oqk6Lqr56ym 118452
+5oyH5bCO 118453
+5a2Y5Zyo5LiA5a6a 118454
+5a+C6Z2Z 118455
+5rW35aSW5biC5Zy6 118456
+6Z2h 118457
+57u85ZCI5b6B 118458
+5L+Q 118459
+6KiI566X 118460
+5piO5pyX 118461
+5Lqa6L+Q 118462
+5Lqa6L+Q5Lya 118463
+5YmN55675oCn 118464
+5Yyu5LmP 118465
+5Lqn5Lia5om26LSr 118466
+6ISR5rW3 118467
+6ISR5rW35Lit 118468
+5YWa55qE6aKG5a+8 118469
+5YiY6YKm 118470
+5rWB5pif 118471
+5pOC 118472
+5pSA55m7 118473
+5ZKU 118474
+5LiA5LiL5a2Q5bCx 118475
+6K+K5rK7 118476
+5L2/5Yqy 118477
+5Ym15L2c 118478
+6ZOt6K6w 118479
+6ZKx6LSi 118480
+5pel5oql6K6w6ICF 118481
+54Of54Gr 118482
+6IOc6LSf 118483
+5Y2a5Li7 118484
+5Lit5Zu96IGU6YCa 118485
+572R56uZ6aaW6aG1 118486
+5bCx5aSf 118487
+5bCx5aSf5LqG 118488
+5omR5YWL 118489
+5bGF5aeU5Lya 118490
+6LCs 118491
+5a6J5YWo5LqL5pWF 118492
+5ZWG55So6L2m 118493
+5b6q546v57uP5rWO 118494
+5rek 118495
+6ICD6K+B 118496
+5a6d6JeP 118497
+5a6M57uT 118498
+56CU5Y+R5oqV5YWl 118499
+5bKR 118500
+5oGt5pWs 118501
+56a76YCA5LyR 118502
+5rC05aKo 118503
+5am2 118504
+6K+X5Y+l 118505
+5a6B5rOi5biC 118506
+5byx54K5 118507
+5YGc54mM 118508
+5aW25rK5 118509
+5aWH57qz5rKz 118510
+5oaC 118511
+56S+5Lya5a6e6Le1 118512
+6LSd5aOz 118513
+56CC5rWG 118514
+6Ii55Y+q 118515
+5a6j5oms 118516
+57u85ZCI5pW05rK7 118517
+5YKR 118518
+5rCR5peP5paH5YyW 118519
+6YeN546w 118520
+56ev5reA 118521
+5YWs54S2 118522
+54WJ 118523
+55u46IGa 118524
+5rG+ 118525
+57q555CG 118526
+54eD54Wk 118527
+5q2k56eN 118528
+576O5aaG 118529
+5Y2D55Om 118530
+55Cb 118531
+6am+6am26K+B 118532
+6Zi25qKv 118533
+5Lid5Lid 118534
+5b6I5aSa5LqL5oOF 118535
+5YWJ6Zi0 118536
+6JGX5L2c5qyK 118537
+5YWn6YOo 118538
+55u45a+55p2l6K+0 118539
+6ZaS 118540
+6ZyH5oWR 118541
+6Kqq6Kmx 118542
+5oaR 118543
+56ul6KOF 118544
+5L2P5oi/5ZKM 118545
+5L2P5oi/5ZKM5Z+O 118546
+5bey57uP6LaF6L+H 118547
+5L6m5a+f 118548
+55+/54mp 118549
+5L6b5aSn5a62 118550
+54m56YKA 118551
+56iL5bqP5ZGY 118552
+55Wc54mn5Lia 118553
+5rCq 118554
+55Gq 118555
+5YCS5Zyo 118556
+5YCS5Zyo5Zyw 118557
+5q+A 118558
+5qKv6Zif 118559
+5o6l6JGX 118560
+5oqX6I+M 118561
+6KSH 118562
+56yZ 118563
+5q+U5LiK5bm0 118564
+6bih5rGk 118565
+5a2m5Lmg5oiQ57up 118566
+5paR5paT 118567
+5YWI5a+8 118568
+5YiX5Li+ 118569
+6LCD5p+l5pi+56S6 118570
+5qmr 118571
+5Lmd5Y2B 118572
+6LCi6Z+1 118573
+6Leo6LaK5byP 118574
+5aWz5oCn5pyL5Y+L 118575
+6JCl5YW75Lu35YC8 118576
+5a6e6Le157uP6aqM 118577
+6IuP5bee5biC 118578
+55O25a2Q 118579
+5paw55qE5LiA 118580
+5paw55qE5LiA5bm0 118581
+5piO5pmw 118582
+5a6g54ix 118583
+5a2X56ys 118584
+5pyX6K+1 118585
+57qz5pav 118586
+6YCG6KGM 118587
+6KuL5oKo 118588
+6KuL5oKo5o+Q5L6b 118589
+6IO45oCA 118590
+56ys5LiD5bGK 118591
+5by65aOu 118592
+5Luj5a2V 118593
+5rG25bed 118594
+5a625Za7 118595
+5a625Za75oi3 118596
+5a625Za75oi35pmT 118597
+6IWu 118598
+5ZCv6L+q 118599
+5peg6Zqc56KN 118600
+6JmV55CG5Y+K 118601
+5p2l5Y6G 118602
+5a6e5Yqh 118603
+5Lmf6ZqP5LmL 118604
+5oqA6IO95Z+56K6t 118605
+5a2k56uL 118606
+5YmB 118607
+6YO05bee 118608
+5pS25pWb 118609
+6aC76YGT 118610
+6I2j5bm4 118611
+6I6r6L+H5LqO 118612
+5q2k5pmC 118613
+57qq5aeU55uR 118614
+57qq5aeU55uR5aeU 118615
+55u46YK7 118616
+5Y+m5LiA6L65 118617
+56qS5oGv 118618
+5pyJ5b6I5aSa56eN 118619
+5q+P6YCi 118620
+6Zeu5LiW 118621
+57Sv57Sv 118622
+6Z2S5pil5pyf 118623
+6Lev5Ya1 118624
+5YWL6I6x 118625
+6L+E5LuK5Li65q2i 118626
+5oOK5aWH 118627
+6Leo5bqm 118628
+6YW/6YCg 118629
+5YeL 118630
+6L+R5LiJ5bm0 118631
+5YaF6ams 118632
+5YaF6ams5bCU 118633
+5o+N 118634
+6L+b5bGV5oOF5Ya1 118635
+6Iyn 118636
+5pyJ5bqP5o6o6L+b 118637
+5oC75Yag5Yab 118638
+5oiQ57up5Y2V 118639
+6Zu76Kmx5Y+K 118640
+57Sn5a+G57uT5ZCI 118641
+5bqK5L2N 118642
+6bmK 118643
+5pWj5Y+R552A 118644
+5Yuf6LWE 118645
+5rCo6YW4 118646
+5b2p56We 118647
+6K6A5Y+W 118648
+6YeN5rip 118649
+5Lit5a2Y5Zyo55qE 118650
+576O6bqX 118651
+5LiN5pat5aKe5Yqg 118652
+6L2u5rWB 118653
+5o6l5ZCs 118654
+5bm05Lqn5YC8 118655
+5Y2D5YWL 118656
+5oiY5Zy65LiK 118657
+54Wn6aGn 118658
+5bmy6YOo6Zif5LyN 118659
+5Y2w56ug 118660
+5LiA6Ie05oCn 118661
+6L+e5aSc 118662
+5YWF6KOV 118663
+6buR5ZCN5Y2V 118664
+5YeA5rC0 118665
+5LiA5aSn5pep 118666
+5YyF6KKx 118667
+54qv6KeE 118668
+55CG6KuW 118669
+5p6B5piT 118670
+6aq4 118671
+5aiY5aiY 118672
+5Zui5ZyG 118673
+5Lq/5YWD5Lul5LiK 118674
+5Yip55So5oKo55qE 118675
+5bim5p2l5pu05aSa 118676
+5Lit5aSu56m66LCD 118677
+5pyI6Jaq 118678
+54yc5oOz 118679
+5Yi65a6i 118680
+5L2c5oGv 118681
+5Y2V6LCD 118682
+5LqS5Yip 118683
+5aaC5pyJ5L615p2D 118684
+5bCP5ben 118685
+5Y2B5aCw 118686
+5ZOI5ZOI5ZOI5ZOI 118687
+6L656ZmF 118688
+5qCH6K+t 118689
+5YiH5YWl54K5 118690
+6YCG6KKt 118691
+6K+V5YmC 118692
+57u/6LGG 118693
+6K6a 118694
+5Z+6552j5b6S 118695
+5aOs 118696
+5YWo5piO5pif 118697
+6YCJ56eA 118698
+6IiM5bCW 118699
+5LiN5ZCM57G75Z6L 118700
+54Of5Zux 118701
+54G15rCU 118702
+5Yy6566h5aeU5Lya 118703
+5Yac5Ymv 118704
+5Yac5Ymv5Lqn5ZOB 118705
+6JSa5p2l 118706
+5rKq5oyH 118707
+5YW75q6W5oi3 118708
+5paX5b+X 118709
+6aaW6aKG 118710
+6KGA6IWl 118711
+5Yqg57Sn 118712
+5LiA6Ie05aW96K+E 118713
+56ys5LiJ6IqC 118714
+5oms5bCY 118715
+5Lqk6YCa5p6i57q9 118716
+6Zu256KO 118717
+6buR5rSe 118718
+55yL5LiN5oeC 118719
+5bGe5a6e 118720
+5Li75Z+O5Yy6 118721
+5aib 118722
+5aib5qiC 118723
+56yR5oSP 118724
+6Jm55qGl 118725
+5ZCE5Liq546v6IqC 118726
+55Wl5b6u 118727
+6ICV6ICY 118728
+5pys5Zy65q+U6LWb 118729
+5oiQ6LSl 118730
+6YCJ6IKh 118731
+6Kqe6KiA 118732
+562U6L6p 118733
+6Ieq5Lmg 118734
+5qO6 118735
+5LiH5qyn5YWD 118736
+5YGc5bel 118737
+5a+55YW26L+b6KGM 118738
+56ev5p6B6YWN5ZCI 118739
+5Lm+5Z2k 118740
+5aaW5oCq 118741
+6JqM5Z+g 118742
+6LWE5Lqn6K+E5Lyw 118743
+6LCD55qu 118744
+6Zmk5aSV 118745
+5Zu05aKZ 118746
+5pyN5b25 118747
+5rex5riK 118748
+6aKE5Yi2 118749
+54O9 118750
+5a6J56iz 118751
+5bu65p6E 118752
+54uZ5Ye7 118753
+5Li75YuV6Ki75YaK 118754
+6YO95pyJ6Ieq5bex 118755
+5o6S5ZCN56ys5LiA 118756
+6bq76L6j 118757
+54Ca 118758
+54Of6Iqx54iG 118759
+54Of6Iqx54iG56u5 118760
+6Ieq54S25L+d5oqk 118761
+5LuZ5aKD 118762
+5Li65LqG6YG/5YWN 118763
+5Ya35bqT 118764
+6Kej5pS+5oCd5oOz 118765
+5Yid5LqM 118766
+5L2T6LS0 118767
+6aaW5a+M 118768
+6L+q5ouc 118769
+5pqC57yT 118770
+5pSv5oyB5Yqb5bqm 118771
+5L6m5o6i 118772
+6ams5Yi6 118773
+5YyX5rG9 118774
+57me 118775
+6LCO6KiA 118776
+6YCj57qM 118777
+5bez 118778
+5Lu75L2V5pe25YCZ 118779
+6L2m6IGU572R 118780
+5Y2V6aG5 118781
+5bit5Y23 118782
+5bu6562R5p2Q5paZ 118783
+5Lit56eL6IqC 118784
+56GV5aOr56CU56m2 118785
+56eB56uL 118786
+5YWa5ZKM5pS/5bqc 118787
+5pys5qyh5Lqk5piT 118788
+6Lq65Zyo5bqK5LiK 118789
+572R5Y+L6K+E6K66 118790
+5aad 118791
+5a6z576e 118792
+5YWs56uL5Yy76Zmi 118793
+5Lie 118794
+55Sf54mp6LSo 118795
+5bqU6YKA 118796
+5oq95Y+W 118797
+5Yeg5byg 118798
+5pGY57yW 118799
+57uY5pys 118800
+6K+m6Kej 118801
+5by656Gs 118802
+5pyA5YWI6L+b55qE 118803
+5oub6IKh 118804
+5oub6IKh5Lmm 118805
+5Y2D5pa5 118806
+5Y2D5pa555m+ 118807
+5Y2D5pa555m+6K6h 118808
+6YWN6Z+z 118809
+6am+54Wn 118810
+5b6B5oiY 118811
+6KqT6KiA 118812
+5ouc5biI 118813
+5ouc5biI5a2m 118814
+5ouc5biI5a2m6Im6 118815
+5oqx5Zui 118816
+57Gz57KJ 118817
+6Z2e5bi46YCC5ZCI 118818
+6Iiq5rW3 118819
+5bGl57qm 118820
+5Y2B5YWr5p2h 118821
+6ZS76YCg 118822
+6YeN6KaB5Li+5o6q 118823
+5Y+R5oyl5L2c55So 118824
+5rea 118825
+5Lq656S+ 118826
+5Lq656S+5bGA 118827
+6K+V54K55bel5L2c 118828
+6Zic6Ziz 118829
+5qGD5ZyS 118830
+5rCR5LyB 118831
+5rSB55m9 118832
+6LS15a6+ 118833
+5YWs56S+ 118834
+6KeJ5oKf 118835
+6K6w5b+G5Yqb 118836
+5pyD5ZOh6Ki75YaK 118837
+5q2k5qGI 118838
+6bq755e5 118839
+54+A 118840
+5pap6I63 118841
+55S35a2p5a2Q 118842
+5bGA6ZmQ5LqO 118843
+5YuY5p+l 118844
+5ZCD6aWx 118845
+6Iqs5YWw 118846
+5qOV6Imy 118847
+56aP56WJ 118848
+55Sz6Iqx 118849
+5rW355uX 118850
+6JSR 118851
+5paH5a24 118852
+5rS75oCn54Kt 118853
+55u06YCa6L2m 118854
+6LCi6YKA 118855
+6Lq6552A 118856
+5ZyD 118857
+5q+P5pel57uP5rWO 118858
+5YWs5YWx5paH5YyW 118859
+6K6y5pWF5LqL 118860
+5a+f55yL 118861
+5oKg6Zey 118862
+5Zyw5Z2q 118863
+5raM546w5Ye6 118864
+6auY562J6Zmi5qCh 118865
+6IyE5a2Q 118866
+6Ziy5Y2r 118867
+5L6L6KGM 118868
+5pi+6Zyy 118869
+5paw5bi45oCB 118870
+57ud5L2z 118871
+5a+M5rCR 118872
+5Lul5Lq65rCR 118873
+5Lul5Lq65rCR5Li6 118874
+6YKi5Y+w 118875
+5bGV5ryU 118876
+55m85biD 118877
+6LSf6L29 118878
+5YGP56a7 118879
+5rC46YGg 118880
+6YeN6KaB5Y6f5Zug 118881
+5Y2P5Lya5Lya5ZGY 118882
+6Zq+5rCR 118883
+55Sf5Lqn6L2m6Ze0 118884
+54G15Yqo 118885
+5Lik5bm05YmN 118886
+5pa55ZyG 118887
+5rS75LiL5Y67 118888
+5LiW55WM6KeC 118889
+6aqX5Y+W 118890
+576O6LKM 118891
+6IO955yL5Ye6 118892
+55m85o+u 118893
+6KeC5b2x 118894
+5YmD 118895
+5ZCI6LWE5YWs5Y+4 118896
+5amn 118897
+5bmy5pex 118898
+5YWt5Liq5pyI 118899
+5bCk5Li66YeN6KaB 118900
+6IK9 118901
+56em5Zu9 118902
+5omY56aP 118903
+5bu6562R5biI 118904
+5Y2H57qn5pS56YCg 118905
+5bCP6aKd 118906
+5bCP6aKd6LS35qy+ 118907
+5Lik5Liq57u05oqk 118908
+5ouN5ouN 118909
+5Y+v55aR 118910
+5o2i5Y+W 118911
+5q2m5aOr 118912
+6LWW5Lul 118913
+6LWW5Lul55Sf5a2Y 118914
+5oya 118915
+5q6/5aCC 118916
+6Ieq54S255WM 118917
+56OB5Zy6 118918
+5aaC5L2V55yL5b6F 118919
+5LuK5pel5aS05p2h 118920
+6KW/5Z+f 118921
+6I636K+E 118922
+6aKo5qC8 118923
+5L+E5Zu9 118924
+5omT5ou8 118925
+5a6j5Lyg54mH 118926
+5b6I5pa55L6/ 118927
+5L6b57uZ5L6n 118928
+57qq5b+156KR 118929
+5q+r5YWL 118930
+6Iqz6aaZ 118931
+5bel5ZWG6ZO26KGM 118932
+6K+354K55Ye7 118933
+57yq 118934
+5peg5pWw5qyh 118935
+6I2v5biI 118936
+6IW4 118937
+5ri46ImH 118938
+5Yy+ 118939
+5beh6Iiq 118940
+5rK755CG5L2T57O7 118941
+6JCl6YCg6Imv5aW9 118942
+5re35reG 118943
+6YCa55WF 118944
+5Yqz57Sv 118945
+5LuT5L2N 118946
+5aKe6ZW3 118947
+6ZqQ57qm 118948
+5p2C5b+X56S+ 118949
+5YW76IKy 118950
+5Y+v6IO95Y+R55Sf 118951
+6ICD6Kmm 118952
+6KW/5L6n 118953
+5Yqg5YCN 118954
+5Li75oyB5Y+s5byA 118955
+55Wi56uf 118956
+6Zeu6K+i 118957
+5rW35qOg 118958
+6Jep 118959
+5rOo5piO5p2l5rqQ 118960
+5qOA55ar 118961
+6K+35YGH 118962
+5oqa5pG4 118963
+6JOE55S15rGg 118964
+6Lef5LiN5LiK 118965
+546w5Luj56S+5Lya 118966
+56256LWE 118967
+5L2T6IKy5b2p56Wo 118968
+5bu26K+v 118969
+6L6b6L6j 118970
+6Z2i5a65 118971
+5Y2w6K6w 118972
+54Gt5Lqh 118973
+57Sg6aOf 118974
+5YW06Ie0 118975
+6ZyA6KaB55So 118976
+6ZyA6KaB55So5Yiw 118977
+5a6d5aaI 118978
+56OL5ZWG 118979
+6Zq25bGe 118980
+6LSh54yu5Yqb6YeP 118981
+5YWs5YWx6LWE5rqQ 118982
+5aSn6Ziq 118983
+5Yab6K6t 118984
+5oKs5b+1 118985
+56S+5Lya56iz5a6a 118986
+5bmy5LqL5Yib5Lia 118987
+5pyJ5p2h5Lu2 118988
+5pyJ5p2h5Lu255qE 118989
+5LiA5bm05LiA5bqm 118990
+5Y6l 118991
+5by65aW4 118992
+6LGq6L2m 118993
+5o6M5p+c 118994
+5rC05Yip5bel56iL 118995
+5bOq 118996
+56ev5p6B5L2c55So 118997
+5rW35reA 118998
+5rW35reA5Yy6 118999
+54Ot5pKt 119000
+5Z2a5oyB5LiN5oeI 119001
+5Y+M6ISa 119002
+57uf5oiY 119003
+5Lu75L2V5Lq66YO9 119004
+5Zyw5LiL5a6k 119005
+5Ya254K8 119006
+6LCF6Kej 119007
+5riU6Ii5 119008
+5aSq6Ziz5Z+O 119009
+6KKr5o2V 119010
+6K6h566X5Zmo 119011
+6KW/5Yy7 119012
+6IiS5b+D 119013
+5qGm 119014
+6YGy 119015
+5YqR 119016
+6KiX 119017
+6I66 119018
+5Zas 119019
+55Ov 119020
+5ZiY 119021
+5aCV 119022
+5pWd 119023
+5ZGm 119024
+6Iue 119025
+5q25 119026
+5pOs 119027
+5qOE 119028
+6Ii1 119029
+5aWq 119030
+55qL 119031
+5pS4 119032
+5Zyp 119033
+56SZ 119034
+56KY 119035
+6Y+I 119036
+5oSV 119037
+57mz 119038
+6Ji4 119039
+6LKC 119040
+5ryy 119041
+5pG5 119042
+5pSd 119043
+5a2i 119044
+6JWt 119045
+6aiw 119046
+5r28 119047
+6YWw 119048
+5pKl 119049
+6Lms 119050
+6aiZ 119051
+6Li5 119052
+6YGQ 119053
+55iA 119054
+6Juk 119055
+5oKW 119056
+55Ke 119057
+56OQ 119058
+5o6w 119059
+6L6K 119060
+5b6R 119061
+5o6W 119062
+6YGe 119063
+6YK4 119064
+6ZuP 119065
+5oaO 119066
+5py9 119067
+5427 119068
+566U 119069
+6KS2 119070
+5pqi 119071
+5pi1 119072
+54+C 119073
+5oK4 119074
+5YG1 119075
+5Zmc 119076
+5aOv 119077
+5pKu 119078
+5oGN 119079
+5amV 119080
+56+x 119081
+6ZiZ 119082
+54mg 119083
+6KOY 119084
+6LOi 119085
+6Yec 119086
+6ZOg 119087
+6I6Y 119088
+5q6G 119089
+55m4 119090
+6LSP 119091
+57Kx 119092
+5auh 119093
+5Yai 119094
+6KSS 119095
+5oeK 119096
+6ZyT 119097
+5aG1 119098
+5ouj 119099
+5buf 119100
+6aO9 119101
+6aKM 119102
+5ZqO 119103
+5re6 119104
+6Iag 119105
+5Y6t 119106
+5ZqH 119107
+5ZGD 119108
+55KL 119109
+562x 119110
+5ou3 119111
+6I2n 119112
+6ZSw 119113
+5a2w 119114
+6JOT 119115
+6Ia9 119116
+5p6J 119117
+5Za9 119118
+55uU 119119
+562Q 119120
+576a 119121
+6IWM 119122
+6L6r 119123
+5rOT 119124
+55Ss 119125
+6J+y 119126
+5Zaq 119127
+5aaT 119128
+6KyA 119129
+54KK 119130
+5puc 119131
+5rGQ 119132
+6LSI 119133
+6I2A 119134
+5oqg 119135
+56K+ 119136
+5quD 119137
+6Z6g 119138
+6JGG 119139
+56Wv 119140
+5b2d 119141
+6aaN 119142
+5Yyj 119143
+5pyt 119144
+5Z2C 119145
+5L+R 119146
+6JOu 119147
+55Gb 119148
+5omJ 119149
+6Ief 119150
+6LKr 119151
+546l 119152
+5re8 119153
+5Y6y 119154
+6bOM 119155
+5bOt 119156
+5ZGb 119157
+6ac= 119158
+6aeQ 119159
+6YG3 119160
+5L+q 119161
+5oCC 119162
+6L6N 119163
+5bGN 119164
+5YuB 119165
+5aWa 119166
+6ZqF 119167
+6ZK0 119168
+6Lyd 119169
+5a6m 119170
+6JCD 119171
+55iL 119172
+5oa2 119173
+5oKF 119174
+6L6Z 119175
+5ZGc 119176
+56C6 119177
+6YCe 119178
+5rWa 119179
+6Zaj 119180
+6Jap 119181
+6ZmL 119182
+54KZ 119183
+6KqV 119184
+5Lif 119185
+6bm9 119186
+57GM 119187
+6LSw 119188
+6Yuq 119189
+55yp 119190
+5pKQ 119191
+6Ia6 119192
+6Z6Y 119193
+576y 119194
+56qu 119195
+57SQ 119196
+5q60 119197
+57q+ 119198
+6LqN 119199
+57SL 119200
+54SW 119201
+55S6 119202
+54m9 119203
+54Kv 119204
+57yU 119205
+5q+T 119206
+5ayw 119207
+5qKn 119208
+5Lqf 119209
+6KKF 119210
+542E 119211
+6L+l 119212
+5ry+ 119213
+552R 119214
+57i+ 119215
+6aaL 119216
+6aSF 119217
+5rmE 119218
+5piH 119219
+5p6t 119220
+6Jaw 119221
+5p+R 119222
+5qa7 119223
+5ZmX 119224
+5Zm0 119225
+5qOj 119226
+5ZSn 119227
+54a5 119228
+6Lyv 119229
+5aKf 119230
+6bKy 119231
+5oib 119232
+6Imm 119233
+6Iqu 119234
+5Zif 119235
+5bil 119236
+5b+7 119237
+54yd 119238
+5a+1 119239
+6LOm 119240
+6Ju+ 119241
+5ru+ 119242
+54KV 119243
+6ZOs 119244
+6JK/ 119245
+6ZKo 119246
+54OZ 119247
+57KV 119248
+5oOm 119249
+5rqn 119250
+6aKN 119251
+6YWj 119252
+5bOm 119253
+57GB 119254
+54OD 119255
+5YaX 119256
+5Y+B 119257
+55un 119258
+5721 119259
+6ZKX 119260
+5ayJ 119261
+6LCP 119262
+57On 119263
+6L6t 119264
+5res 119265
+6J+S 119266
+6K+p 119267
+6KaD 119268
+55mW 119269
+6b2S 119270
+54iQ 119271
+566N 119272
+57yO 119273
+56O6 119274
+6K+r 119275
+6KSy 119276
+5pOg 119277
+6JCm 119278
+552s 119279
+6LCN 119280
+6YSw 119281
+5qC+ 119282
+6aGP 119283
+57ix 119284
+5qGo 119285
+6Yas 119286
+6KWy 119287
+6K6q 119288
+5am6 119289
+6I2f 119290
+5Yyd 119291
+54ag 119292
+6JuK 119293
+5ria 119294
+5bS9 119295
+6bKk 119296
+5ZWw 119297
+5YyV 119298
+5LiQ 119299
+6K6l 119300
+5Y+9 119301
+5Y+8 119302
+55q/ 119303
+6L+C 119304
+5ZCG 119305
+5bG5 119306
+6Ie8 119307
+6K65 119308
+6amu 119309
+57qr 119310
+5rGe 119311
+5oqh 119312
+6IuH 119313
+5ZCg 119314
+5ZCt 119315
+5ZCu 119316
+5bKW 119317
+5L2D 119318
+54uI 119319
+5bqH 119320
+5ZCd 119321
+6Zew 119322
+5rG5 119323
+5b+x 119324
+5ouE 119325
+5ouX 119326
+6IyJ 119327
+6Iub 119328
+6IyB 119329
+55++ 119330
+6JmP 119331
+5ZG7 119332
+5ZKE 119333
+5b+/ 119334
+6IKu 119335
+54ue 119336
+55af 119337
+55aZ 119338
+55aa 119339
+5rOe 119340
+5bia 119341
+5bGJ 119342
+6L+i 119343
+6am5 119344
+5463 119345
+54+K8w== 119346
+54+K86A= 119347
+54+K86CE 119348
+54+K86CEgQ== 119349
+5oyO 119350
+5ou0 119351
+5Z6b 119352
+6I2k 119353
+5q6D 119354
+55u5 119355
+5ZOG 119356
+6LS7 119357
+5q+h 119358
+54uw 119359
+54uh 119360
+5p+S 119361
+5oGD 119362
+6K+s 119363
+6KKE 119364
+6K+y 119365
+6Jqk 119366
+6ICZ 119367
+5Z+C 119368
+5o2O 119369
+5o2M 119370
+5qKG 119371
+6YWM 119372
+56C+ 119373
+5q6J 119374
+5ZSg 119375
+5pmM 119376
+6Jqj 119377
+6Jqq 119378
+6JqT 119379
+6biv 119380
+5ZSB 119381
+5ZSG 119382
+5YCU 119383
+6IiA 119384
+6LG6 119385
+6IOw 119386
+6bi1 119387
+6biz 119388
+6aaB 119389
+576U 119390
+5raj 119391
+5raV 119392
+5oKv 119393
+6K+9 119394
+6LCG 119395
+56Wf 119396
+57ui 119397
+5o26 119398
+5o22 119399
+5o27 119400
+5o6C 119401
+6I+g 119402
+6JCk 119403
+6YWX 119404
+55y2 119405
+5ZWE 119406
+6Jqv 119407
+6JuA 119408
+5ZSs 119409
+5bi3 119410
+6ZOQ 119411
+6ZOb 119412
+5YGO 119413
+5b6Z 119414
+6ISv 119415
+6LGa 119416
+54yW 119417
+55eK 119418
+5rau 119419
+5oOt 119420
+5oK0 119421
+5oOL 119422
+6LCa 119423
+5o+p 119424
+5pCA 119425
+5pCU 119426
+5qaU 119427
+5qSt 119428
+6Zuz 119429
+5Zaz 119430
+6Leb 119431
+6JyT 119432
+6JyS 119433
+6bmD 119434
+6ZSE 119435
+55Sl 119436
+562P 119437
+54yp 119438
+54ys 119439
+54y+ 119440
+55ei 119441
+55eq 119442
+5oOw 119443
+56qY 119444
+6LCk 119445
+6ZqY 119446
+5am/ 119447
+6bmJ 119448
+55GZ 119449
+5paf 119450
+5qS/ 119451
+6YWq 119452
+6Zu5 119453
+5Zem 119454
+6Le3 119455
+6Le6 119456
+6Lek 119457
+6JyI 119458
+6JyX 119459
+5bmM 119460
+6aaP 119461
+6KqK 119462
+5ryT 119463
+6KSC 119464
+6JSX 119465
+6JS8 119466
+5YWi 119467
+6KOz 119468
+6Jy7 119469
+6J2H 119470
+5ZiA 119471
+6ZS5 119472
+566V 119473
+566p 119474
+55ip 119475
+55if 119476
+5ryx 119477
+5a+l 119478
+6aqh 119479
+5pK1 119480
+5pKs 119481
+6LGM 119482
+5Zi5 119483
+6J2g 119484
+6J2M 119485
+6J2X 119486
+6J2Z 119487
+6ZWQ 119488
+56i8 119489
+56+T 119490
+6Iab 119491
+6bKr 119492
+55iq 119493
+6bKo 119494
+5oaU 119495
+57+p 119496
+6KSl 119497
+57yt 119498
+5Zmp 119499
+55Oi 119500
+6ZyO 119501
+6Lix 119502
+6LmC 119503
+6J+G 119504
+6bmm 119505
+56+h 119506
+55i4 119507
+56q/ 119508
+57yw 119509
+6JeQ 119510
+6LmL 119511
+6J+L 119512
+6J+A 119513
+6LWh 119514
+6IeK 119515
+6bOE 119516
+57Og 119517
+5oem 119518
+5Zqj 119519
+6ZWw 119520
+6bON 119521
+57C4 119522
+55mj 119523
+6bOW 119524
+6ayT 119525
+6KCV 119526
+6Zy5 119527
+6LqP 119528
+6buv 119529
+55Ok 119530
+55+X 119531
+5LmC 119532
+5Lmc 119533
+5YWA 119534
+5byL 119535
+5a2R 119536
+5a2T 119537
+5bm6 119538
+5LqT 119539
+5bu/ 119540
+5LiP 119541
+5Y2F 119542
+5LuD 119543
+5LuJ 119544
+5LuC 119545
+5YiI 119546
+54i7 119547
+5Y2e 119548
+6Zep 119549
+6K6j 119550
+5aSs 119551
+54i/ 119552
+5q+L 119553
+6YKX 119554
+6YKb 119555
+6Im9 119556
+6Im/ 119557
+5Y+1 119558
+5LiV 119559
+5Yyc 119560
+5Yqi 119561
+5Y2f 119562
+5Y+x 119563
+5Y+7 119564
+5Luo 119565
+5Luf 119566
+5Luh 119567
+5Lur 119568
+5Lue 119569
+5Y2u 119570
+5rCQ 119571
+54qw 119572
+5YiN 119573
+6YKd 119574
+6YKZ 119575
+6K6m 119576
+6K6n 119577
+6K6r 119578
+5bC7 119579
+6Zih 119580
+5bCV 119581
+5byB 119582
+6ICS 119583
+546O 119584
+546R 119585
+5Zys 119586
+5omm 119587
+5Zyq 119588
+5Zy5 119589
+5omq 119590
+5Zyu 119591
+5Zyv 119592
+6IqK 119593
+6IqN 119594
+6IqE 119595
+6Iqo 119596
+6IqR 119597
+6IqO 119598
+6IqX 119599
+5LqY 119600
+5Y6N 119601
+5aS8 119602
+5oiN 119603
+5bCl 119604
+5Lmp 119605
+5pev 119606
+5puz 119607
+5bKM 119608
+5bG6 119609
+5Ye8 119610
+5Zuh 119611
+6ZKH 119612
+57y2 119613
+5rCY 119614
+5rCW 119615
+54md 119616
+5LyO 119617
+5Lyb 119618
+5Lyi 119619
+5L2k 119620
+5Lu1 119621
+5Lyl 119622
+5Lyn 119623
+5LyJ 119624
+5Lyr 119625
+5Zuf 119626
+5rGG 119627
+5YiW 119628
+5aSZ 119629
+5peu 119630
+5YiO 119631
+54q3 119632
+54q4 119633
+6Iib 119634
+5Yer 119635
+6YKs 119636
+6aWn 119637
+5rGU 119638
+5rGc 119639
+5rGK 119640
+5b+W 119641
+5b+P 119642
+6K60 119643
+6K61 119644
+6K63 119645
+6IG/ 119646
+6Imu 119647
+5Y6+ 119648
+5aaB 119649
+57qh 119650
+57qj 119651
+57ql 119652
+57qo 119653
+546V 119654
+546Z 119655
+5oqf 119656
+5oqU 119657
+5Zy7 119658
+5Z2N 119659
+5oqD 119660
+46eQ 119661
+6Iqr 119662
+6Iq+ 119663
+6IuI 119664
+6Iuj 119665
+6IuL 119666
+6Iq8 119667
+6IuM 119668
+6IuB 119669
+6Iqp 119670
+6Iqq 119671
+6Iqh 119672
+6Iqf 119673
+6IuE 119674
+6IuO 119675
+6Iuh 119676
+5p2M 119677
+5p2T 119678
+5p2I 119679
+5b+R 119680
+5a2b 119681
+6YK0 119682
+6YKz 119683
+5aWB 119684
+6LGV 119685
+5b+S 119686
+5qyk 119687
+6L2r 119688
+6L+T 119689
+6YK2 119690
+5b+Q 119691
+5Y2j 119692
+6YK6 119693
+5pew 119694
+5ZGL 119695
+5ZGS 119696
+5ZGT 119697
+5ZGU 119698
+5ZGW 119699
+5pe4 119700
+5ZCh 119701
+6Jms 119702
+5ZC9 119703
+5ZCj 119704
+5ZCy 119705
+5biP 119706
+5bKI 119707
+5bKY 119708
+5YWV 119709
+5Zu1 119710
+5Zur 119711
+6ZKK 119712
+6ZKL 119713
+6ZKM 119714
+6L+V 119715
+5rCZ 119716
+5rCa 119717
+54mk 119718
+5L2e 119719
+5L2a 119720
+5L2d 119721
+5L2X 119722
+5b23 119723
+5L2Y 119724
+5L2l 119725
+6LG4 119726
+5Z2M 119727
+6IKf 119728
+5aWC 119729
+5Yqs 119730
+54uB 119731
+6big 119732
+6aWo 119733
+6aWp 119734
+6aWr 119735
+6aWs 119736
+5bqR 119737
+5bqL 119738
+55aU 119739
+55aW 119740
+6IKT 119741
+6Zex 119742
+6Zez 119743
+54KA 119744
+5rKj 119745
+5rKF 119746
+5rKU 119747
+5rKk 119748
+5rKP 119749
+5rKa 119750
+5rGp 119751
+5rGo 119752
+5rKo 119753
+5rG0 119754
+5rKG 119755
+5rKp 119756
+5rOQ 119757
+5oCD 119758
+5oCE 119759
+5b+h 119760
+5b+k 119761
+5b++ 119762
+5oCF 119763
+5b+q 119764
+5oCG 119765
+5b+t 119766
+5b+4 119767
+6K+C 119768
+6K+D 119769
+6K+F 119770
+6K+L 119771
+6K+M 119772
+6K+S 119773
+6ZmC 119774
+6ZmJ 119775
+5aap 119776
+5aaq 119777
+5aaj 119778
+5aaX 119779
+5aar 119780
+5aeS 119781
+5aak 119782
+5Yqt 119783
+5Yit 119784
+6YKw 119785
+57qt 119786
+57qw 119787
+57q0 119788
+546h 119789
+546t 119790
+546g 119791
+546i 119792
+546m 119793
+55uC 119794
+5b+d 119795
+5Yym 119796
+5Z2p 119797
+5oqo 119798
+5ouk 119799
+5Z2r 119800
+5ouI 119801
+5Z6G 119802
+5oq7 119803
+5Yq8 119804
+5ouD 119805
+5ouK 119806
+5Z28 119807
+5Z27 119808
+46ef 119809
+5Z2o 119810
+5Z2t 119811
+5oq/ 119812
+5Z2z 119813
+6Iu3 119814
+6Iuk 119815
+6IyP 119816
+6Iur 119817
+6Iuc 119818
+6Iu0 119819
+6IuS 119820
+6IuY 119821
+6IyM 119822
+6Iu7 119823
+6IuT 119824
+6Iya 119825
+6IyG 119826
+6IyR 119827
+6IyT 119828
+6IyU 119829
+6IyV 119830
+6IyA 119831
+6IuV 119832
+5p6l 119833
+5p6H 119834
+5p2q 119835
+5p2z 119836
+5p6n 119837
+5p21 119838
+5p6o 119839
+5p6e 119840
+5p6L 119841
+5p27 119842
+5p23 119843
+5p28 119844
+55+4 119845
+56CA 119846
+5Yiz 119847
+5aWE 119848
+5q6B 119849
+6YOP 119850
+6L2t 119851
+6YOF 119852
+6bii 119853
+55ux 119854
+5piZ 119855
+5p2y 119856
+5piD 119857
+5ZKC 119858
+5ZG4 119859
+5piA 119860
+5pe7 119861
+5piJ 119862
+54KF 119863
+55WA 119864
+6Jmu 119865
+5ZKA 119866
+5ZG3 119867
+6bu+ 119868
+5ZGx 119869
+5ZGk 119870
+5ZKG 119871
+5ZKb 119872
+5ZG2 119873
+5ZGj 119874
+5ZKd 119875
+5bKi 119876
+5bK/ 119877
+5bKs 119878
+5bKr 119879
+5biZ 119880
+5bKj 119881
+5bOB 119882
+5Yi/ 119883
+5bK3 119884
+5YmA 119885
+5biU 119886
+5bOE 119887
+5rKT 119888
+5Zu5 119889
+572U 119890
+6ZKN 119891
+6ZKO 119892
+6ZKP 119893
+6ZKS 119894
+6ZKV 119895
+6YK+ 119896
+6L+u 119897
+54mm 119898
+56u6 119899
+6L+k 119900
+5L22 119901
+5L6R 119902
+5L6J 119903
+6Ie+ 119904
+5L6X 119905
+5L6P 119906
+5L6p 119907
+5L27 119908
+5L2+ 119909
+5L6q 119910
+5L28 119911
+5L2v 119912
+5L6s 119913
+5bib 119914
+5L6U 119915
+5b6C 119916
+5Yi9 119917
+6YOE 119918
+57G0 119919
+55Ou 119920
+5oiX 119921
+6IK8 119922
+5I+d 119923
+6IKx 119924
+6IKr 119925
+6L+p 119926
+6YOH 119927
+54uO 119928
+54uN 119929
+54uS 119930
+5ZKO 119931
+6aWv 119932
+6aW0 119933
+5Ya9 119934
+5Ya8 119935
+5bqW 119936
+55ag 119937
+55ad 119938
+5YWW 119939
+5Yq+ 119940
+8KyJ 119941
+8KyJvA== 119942
+54KY 119943
+54Kd 119944
+54KU 119945
+5rOU 119946
+5rKt 119947
+5rO3 119948
+5rOx 119949
+5rOF 119950
+5rOg 119951
+5rO6 119952
+5rOW 119953
+5rOr 119954
+5rOu 119955
+5rKx 119956
+5rOv 119957
+5oCZ 119958
+5oC1 119959
+5oCm 119960
+5oCb 119961
+5oCP 119962
+5oCN 119963
+46Q= 119964
+46SY 119965
+5oCp 119966
+5oCr 119967
+5oC/ 119968
+5a6V 119969
+56m5 119970
+5a6T 119971
+6K+T 119972
+6K+U 119973
+6K+W 119974
+6K+Y 119975
+5oi+ 119976
+6K+Z 119977
+5oi9 119978
+6YOT 119979
+6KGp 119980
+56WG 119981
+56WO 119982
+56WH 119983
+6K+c 119984
+6K+f 119985
+6K+j 119986
+6K+k 119987
+6K+n 119988
+6K+o 119989
+5oiV 119990
+6ZmU 119991
+5aay 119992
+5aav 119993
+5aeX 119994
+5biR 119995
+5a2l 119996
+6am9 119997
+6Jmx 119998
+6L+o 119999
+57uA 120000
+57uB 120001
+57uC 120002
+6am3 120003
+6am4 120004
+57uJ 120005
+57uM 120006
+6aqA 120007
+55S+ 120008
+54+P 120009
+54+Q 120010
+54+R 120011
+546z 120012
+6aG4 120013
+54+J 120014
+54+I 120015
+5ouu 120016
+5Z6t 120017
+5oyd 120018
+5oye 120019
+5Z6k 120020
+6LWz 120021
+6LSy 120022
+5Z6x 120023
+5Z6M 120024
+5Z6n 120025
+5Z6T 120026
+5oym 120027
+5Z6g 120028
+6I2a 120029
+6I2R 120030
+6LSz 120031
+6I2c 120032
+6I6S 120033
+6Iy8 120034
+6Iy0 120035
+6Iyx 120036
+6I6b 120037
+6I2e 120038
+6Iyv 120039
+6I2P 120040
+6I2H 120041
+6I2D 120042
+6I2g 120043
+6Iyt 120044
+5Z6p 120045
+6I2l 120046
+6I2m 120047
+6I2o 120048
+6I2p 120049
+5YmL 120050
+6I2q 120051
+6I2s 120052
+6I2u 120053
+5p+w 120054
+5qCJ 120055
+5p+Y 120056
+5qCK 120057
+5p+p 120058
+5p6w 120059
+5qCM 120060
+5p+Z 120061
+5p61 120062
+5p6z 120063
+5p+e 120064
+5p+d 120065
+5qCA 120066
+5p+i 120067
+5qCO 120068
+5p+I 120069
+5p+B 120070
+5p63 120071
+5p+9 120072
+5YmM 120073
+6YWK 120074
+6YOm 120075
+55St 120076
+56CX 120077
+56CY 120078
+56CS 120079
+5par 120080
+56Ct 120081
+56Cc 120082
+6IC3 120083
+6Jm6 120084
+5q6C 120085
+5q6H 120086
+5q6E 120087
+6L2x 120088
+6L2y 120089
+6L2z 120090
+6L22 120091
+6L24 120092
+6Jm/ 120093
+5q+W 120094
+6KeH 120095
+5bCc 120096
+5ZOQ 120097
+55yE 120098
+55yN 120099
+8KCz 120100
+8KCzkA== 120101
+6YOi 120102
+55yH 120103
+55yK 120104
+55yI 120105
+56a6 120106
+5ZOC 120107
+5ZK0 120108
+5pu3 120109
+5pi0 120110
+5ZKm 120111
+5ZOT 120112
+5ZOU 120113
+55WO 120114
+5ZGy 120115
+6IOE 120116
+55WL 120117
+55WI 120118
+6Jm8 120119
+6Jm7 120120
+55uF 120121
+5ZKj 120122
+5ZOV 120123
+5YmQ 120124
+6YOn 120125
+5ZK7 120126
+5Zu/ 120127
+5ZK/ 120128
+5ZOM 120129
+5ZOZ 120130
+5ZOa 120131
+5ZKp 120132
+5ZKk 120133
+5ZOd 120134
+5ZOP 120135
+5ZOe 120136
+5bOj 120137
+572Y 120138
+5bOS 120139
+5bOk 120140
+5bOL 120141
+6LS2 120142
+6ZKa 120143
+6ZKh 120144
+6ZKj 120145
+6ZKk 120146
+6ZKr 120147
+5rCh 120148
+54mv 120149
+6YOc 120150
+56eV 120151
+56et 120152
+56u9 120153
+56yI 120154
+5L+m 120155
+5L+o 120156
+5L+F 120157
+5Y+f 120158
+5Z6h 120159
+54mu 120160
+5L+j 120161
+5L+a 120162
+55qI 120163
+5L+f 120164
+6YCF 120165
+5b6H 120166
+5b6J 120167
+6Iii 120168
+6YOX 120169
+5L+O 120170
+6YOk 120171
+54iw 120172
+6YOb 120173
+55O0 120174
+6IOo 120175
+6IOq 120176
+6IOb 120177
+6IOC 120178
+6IOZ 120179
+6ION 120180
+6IOX 120181
+6IOd 120182
+5pyQ 120183
+6IOr 120184
+6bio 120185
+5YyN 120186
+54uo 120187
+54uv 120188
+6aOR 120189
+54up 120190
+54uy 120191
+6KiH 120192
+6YCE 120193
+5pid 120194
+6aW3 120195
+6aW4 120196
+6aW5 120197
+5a2q 120198
+5aiI 120199
+5bql 120200
+55as 120201
+55aj 120202
+55al 120203
+55at 120204
+5bqg 120205
+56uR 120206
+6aOS 120207
+6Ze8 120208
+6Ze+ 120209
+6Ze/ 120210
+6ZiC 120211
+576R 120212
+6L+4 120213
+57G8 120214
+6YWL 120215
+54K7 120216
+54OA 120217
+54K3 120218
+5rSx 120219
+5rS5 120220
+5rSn 120221
+5rSM 120222
+5rWD 120223
+5rSH 120224
+5rSE 120225
+5rSZ 120226
+5raO 120227
+5rSO 120228
+5rSr 120229
+5rWN 120230
+5rSu 120231
+5rS1 120232
+5rWS 120233
+5rWU 120234
+5rWV 120235
+5rSz 120236
+5oG4 120237
+5oGT 120238
+5oG5 120239
+5oGr 120240
+5oG7 120241
+5oGC 120242
+5oGq 120243
+5oG9 120244
+5a6l 120245
+5omD 120246
+6KGy 120247
+6KG9 120248
+6KG/ 120249
+6KKC 120250
+56Wc 120251
+56WT 120252
+56Wa 120253
+6K+u 120254
+56WX 120255
+56Wi 120256
+6K+w 120257
+6K+z 120258
+6bip 120259
+5pi2 120260
+5ZKr 120261
+5byt 120262
+54mB 120263
+6IOl 120264
+6Zmf 120265
+5aeu 120266
+5aiG 120267
+5aed 120268
+5aej 120269
+5aeY 120270
+5ae5 120271
+576/ 120272
+54Kx 120273
+55+c 120274
+57uU 120275
+6aqB 120276
+6aqF 120277
+57uX 120278
+57ub 120279
+6aqI 120280
+6ICW 120281
+5oyI 120282
+54+l 120283
+54+Z 120284
+6aG8 120285
+54+w 120286
+54+p 120287
+54+n 120288
+54+j 120289
+54+e 120290
+55Ck 120291
+54+y 120292
+5oGa 120293
+5Z+V 120294
+5Z+Y 120295
+5Z+Z 120296
+5Z+a 120297
+5oy5 120298
+6ICG 120299
+6ICE 120300
+5Z+S 120301
+5o2L 120302
+6LS9 120303
+5Z64 120304
+5o2D 120305
+55uN 120306
+6I24 120307
+6I6z 120308
+6I60 120309
+6I6q 120310
+6I6g 120311
+6I6c 120312
+6I6F 120313
+6I28 120314
+6I6p 120315
+6I29 120316
+6I64 120317
+6I27 120318
+6I6o 120319
+6biq 120320
+6I68 120321
+5qCy 120322
+5qCz 120323
+5qGh 120324
+5qGO 120325
+5qGi 120326
+5qGk 120327
+5qKD 120328
+5qCd 120329
+5qGV 120330
+5qGB 120331
+5qGn 120332
+5qGF 120333
+5qCf 120334
+5qGJ 120335
+5qCp 120336
+6YCR 120337
+6YCL 120338
+5b2n 120339
+6ayy 120340
+6LGH 120341
+6YWQ 120342
+6YCm 120343
+5Y6d 120344
+5a2s 120345
+56Cd 120346
+56C5 120347
+56Cn 120348
+56C3 120349
+56Cf 120350
+56C8 120351
+56Cl 120352
+56Cj 120353
+5Yme 120354
+56C7 120355
+6L28 120356
+6L2+ 120357
+6L6C 120358
+6bir 120359
+6La4 120360
+6b6A 120361
+6bis 120362
+6JmU 120363
+55ys 120364
+5ZSb 120365
+55yZ 120366
+5ZOn 120367
+5ZO9 120368
+5pmB 120369
+6biu 120370
+6La1 120371
+6La/ 120372
+55Wb 120373
+6Jqo 120374
+6Jqc 120375
+6JqN 120376
+6JqL 120377
+6Jqs 120378
+6Jqd 120379
+6Jqn 120380
+5ZSi 120381
+5ZyE 120382
+5ZSj 120383
+5ZSP 120384
+55uO 120385
+5ZSR 120386
+5bSC 120387
+5bSD 120388
+572h 120389
+572f 120390
+6KeK 120391
+6LWF 120392
+6ZKy 120393
+6ZK1 120394
+6ZK5 120395
+6ZK6 120396
+6ZK9 120397
+6ZK8 120398
+6ZK/ 120399
+6ZOA 120400
+6ZOE 120401
+6ZOG 120402
+6ZOI 120403
+6ZOJ 120404
+6ZOK 120405
+6ZOL 120406
+6ZOM 120407
+6ZON 120408
+5KU= 120409
+5KW9 120410
+6ZOO 120411
+5rCp 120412
+5rCk 120413
+5rCm 120414
+5q+q 120415
+6IiQ 120416
+56ej 120417
+56er 120418
+55uJ 120419
+56yE 120420
+56yV 120421
+56yK 120422
+56yP 120423
+56yG 120424
+5L+4 120425
+5L+1 120426
+5YGM 120427
+5L+z 120428
+5L+2 120429
+5YCs 120430
+5YCP 120431
+5oGB 120432
+5YCt 120433
+5L++ 120434
+5YCc 120435
+6Zq8 120436
+6Zq9 120437
+5YCM 120438
+5YCl 120439
+6Ies 120440
+6YOr 120441
+5YCo 120442
+6KGE 120443
+6aKA 120444
+5b6V 120445
+6Iir 120446
+6KG+ 120447
+6IOv 120448
+6IOx 120449
+6IO0 120450
+6IOt 120451
+6ISN 120452
+6IO8 120453
+6ISS 120454
+6bix 120455
+6biy 120456
+54u3 120457
+54yB 120458
+54uz 120459
+54yD 120460
+54u6 120461
+6YCW 120462
+5qGA 120463
+6aW9 120464
+5YeH 120465
+5oyb 120466
+5Lqz 120467
+55az 120468
+55a0 120469
+55a4 120470
+55a9 120471
+55eI 120472
+55ax 120473
+55eC 120474
+55eJ 120475
+6KGu 120476
+6aKD 120477
+5oGj 120478
+5peG 120479
+5peE 120480
+5peD 120481
+6ZiD 120482
+6ZiE 120483
+6Kia 120484
+6ZiG 120485
+5oGZ 120486
+57KR 120487
+54Oc 120488
+54Op 120489
+54OK 120490
+5Ymh 120491
+6YOv 120492
+54Os 120493
+5raR 120494
+5rWv 120495
+5rae 120496
+5raf 120497
+5aiR 120498
+5rag 120499
+5rWe 120500
+5raT 120501
+5rWl 120502
+5raU 120503
+5rWc 120504
+5rWg 120505
+5rWj 120506
+5oKa 120507
+5oKt 120508
+5oKd 120509
+5oKS 120510
+5oKM 120511
+5oKb 120512
+56qI 120513
+5Ymc 120514
+6K+5 120515
+6K+8 120516
+6KKS 120517
+6KKi 120518
+6K+/ 120519
+6LCA 120520
+6LCC 120521
+6LCE 120522
+6LCH 120523
+5bGQ 120524
+5bGZ 120525
+6Zms 120526
+5YuQ 120527
+5aWY 120528
+54mC 120529
+6Jqp 120530
+6Zmy 120531
+5aiM 120532
+5aiJ 120533
+5aiy 120534
+5ai0 120535
+5aij 120536
+5aiT 120537
+5amA 120538
+55Wa 120539
+6YCh 120540
+57ug 120541
+6aqK 120542
+57uh 120543
+6aqL 120544
+57um 120545
+57uo 120546
+6aqO 120547
+6YKV 120548
+6bi2 120549
+5b2X 120550
+6ICc 120551
+54SY 120552
+6IiC 120553
+55CP 120554
+55CH 120555
+6bq4 120556
+5o+2 120557
+5Z+0 120558
+5Z+v 120559
+5o2v 120560
+5o6z 120561
+5o60 120562
+5Z+4 120563
+5Z+1 120564
+6LWn 120565
+5Z+k 120566
+5o2t 120567
+6YC1 120568
+5Z+d 120569
+5aCL 120570
+5aCN 120571
+5o6s 120572
+6bi3 120573
+5o29 120574
+5o6K 120575
+5aCJ 120576
+5o64 120577
+5o2p 120578
+5o6u 120579
+5oKr 120580
+5Z+t 120581
+5Z+9 120582
+5o6H 120583
+5o68 120584
+6IGD 120585
+6JCB 120586
+6I+Y 120587
+5aCH 120588
+6JCY 120589
+6JCL 120590
+6I+9 120591
+6I+W 120592
+6JCc 120593
+6JC4 120594
+6JCR 120595
+5qO7 120596
+6I+U 120597
+6I+f 120598
+6JCP 120599
+6I+5 120600
+6I+q 120601
+6I+F 120602
+6I+A 120603
+6I+w 120604
+6I+h 120605
+5qK/ 120606
+5qKP 120607
+6KeL 120608
+5qG0 120609
+5qG3 120610
+5qOB 120611
+5qGr 120612
+5qOC 120613
+5ZWs 120614
+6YO+ 120615
+5pWV 120616
+6LGJ 120617
+6YSE 120618
+6YWe 120619
+56GO 120620
+56Gt 120621
+56GW 120622
+56GX 120623
+56GQ 120624
+56GH 120625
+56GM 120626
+6bi4 120627
+55Og 120628
+5YyP 120629
+5Y6p 120630
+5q6S 120631
+5q6T 120632
+5q6N 120633
+6LWJ 120634
+6Zup 120635
+6L6E 120636
+5aCR 120637
+55yt 120638
+55ym 120639
+5ZWn 120640
+5pmh 120641
+5pmk 120642
+55y1 120643
+5ZyK 120644
+5ZaP 120645
+5ZWJ 120646
+5YuW 120647
+5pme 120648
+5ZS1 120649
+5pmX 120650
+5ZWt 120651
+55Wm 120652
+6La6 120653
+5ZWu 120654
+6LeE 120655
+6Jq2 120656
+6JuE 120657
+6JuO 120658
+6JuG 120659
+6Jqw 120660
+5ZyJ 120661
+6Jqx 120662
+6JuJ 120663
+6JuP 120664
+6Jq0 120665
+5ZWB 120666
+5ZWV 120667
+5ZS/ 120668
+5ZWQ 120669
+5ZS8 120670
+5ZS3 120671
+5ZWW 120672
+5ZW1 120673
+5ZW2 120674
+5ZW3 120675
+5ZSz 120676
+5ZSw 120677
+5ZWc 120678
+5bi7 120679
+5bSa 120680
+5bSm 120681
+5bi8 120682
+5bSu 120683
+5bSk 120684
+5bSG 120685
+6LWH 120686
+6LWI 120687
+6LWK 120688
+6ZOR 120689
+6ZOS 120690
+6ZOX 120691
+6ZOZ 120692
+6ZOf 120693
+6ZOh 120694
+6ZOi 120695
+6ZOj 120696
+6ZOk 120697
+6ZOn 120698
+6ZOo 120699
+6ZOp 120700
+6ZOq 120701
+6ZOr 120702
+6ZOv 120703
+6ZOw 120704
+6ZOx 120705
+6ZOz 120706
+6ZO1 120707
+6ZO3 120708
+54m+ 120709
+6bi5 120710
+56e+ 120711
+6YC2 120712
+56y6 120713
+562H 120714
+56y4 120715
+56yq 120716
+56yu 120717
+56yg 120718
+56yl 120719
+56yk 120720
+56yz 120721
+56y+ 120722
+56ye 120723
+5YG+ 120724
+5YGD 120725
+5YGV 120726
+5YGI 120727
+5YKA 120728
+5YGs 120729
+5YG7 120730
+55qR 120731
+55qO 120732
+6bi7 120733
+5b6c 120734
+6Ii4 120735
+6Ii7 120736
+6Ii0 120737
+6Ii3 120738
+6b6b 120739
+57+O 120740
+6ISs 120741
+6ISY 120742
+6ISy 120743
+5YyQ 120744
+54yX 120745
+54yh 120746
+54ye 120747
+5pab 120748
+54yV 120749
+6aaX 120750
+6aaD 120751
+6aaE 120752
+6bi+ 120753
+5bq5 120754
+5bq+ 120755
+55eU 120756
+55eN 120757
+57+K 120758
+5peM 120759
+5peO 120760
+6KKk 120761
+6ZiH 120762
+6ZiI 120763
+6ZiJ 120764
+6ZiK 120765
+6ZiL 120766
+6ZiN 120767
+6ZiP 120768
+576f 120769
+57Kd 120770
+54SQ 120771
+54ST 120772
+54SX 120773
+5reF 120774
+5ree 120775
+5riO 120776
+5ra/ 120777
+5reW 120778
+5oyy 120779
+5reg 120780
+5ra4 120781
+5riR 120782
+5rem 120783
+5red 120784
+5raq 120785
+5reZ 120786
+5rar 120787
+5riM 120788
+5oK7 120789
+5oKx 120790
+5oOd 120791
+5oOY 120792
+5oOG 120793
+5oOa 120794
+5oOH 120795
+5oOu 120796
+56qV 120797
+6LCM 120798
+5omI 120799
+55qy 120800
+6LCR 120801
+6KOG 120802
+6KK3 120803
+6KOJ 120804
+6LCS 120805
+6LCU 120806
+6LCV 120807
+6LCW 120808
+6LCX 120809
+6LCZ 120810
+6LCd 120811
+6YCv 120812
+6YO/ 120813
+6ZqI 120814
+57Kc 120815
+6ZqN 120816
+6ZqX 120817
+5amK 120818
+5ai8 120819
+5ami 120820
+5am1 120821
+6IOs 120822
+6KKI 120823
+57+M 120824
+5oG/ 120825
+5qy4 120826
+57ur 120827
+6aqQ 120828
+57uv 120829
+57ux 120830
+6aqS 120831
+57uy 120832
+6aqT 120833
+57u2 120834
+57u6 120835
+57u7 120836
+57u+ 120837
+6aqW 120838
+57yB 120839
+6ICg 120840
+55Cr 120841
+55C1 120842
+55C2 120843
+55Cl 120844
+55Co 120845
+55Cw 120846
+55Cu 120847
+55Cv 120848
+55Cs 120849
+55Ca 120850
+6L6H 120851
+6byL 120852
+5o+z 120853
+5aCe 120854
+5pC9 120855
+5o+4 120856
+5o+g 120857
+5aCZ 120858
+6LaE 120859
+5o+W 120860
+6aKJ 120861
+5aGE 120862
+5o+/ 120863
+6ICL 120864
+5o+E 120865
+6Jup 120866
+6Juw 120867
+5aGG 120868
+5pGS 120869
+5o+G 120870
+5o6+ 120871
+6IGS 120872
+6JGR 120873
+6JGa 120874
+6Z2w 120875
+6Z24 120876
+6JGz 120877
+6JG6 120878
+6JG4 120879
+6JC8 120880
+6JG2 120881
+6JKM 120882
+6JGt 120883
+5qWu 120884
+5qO8 120885
+5qSf 120886
+5qO5 120887
+5qSk 120888
+5qOw 120889
+6LWN 120890
+5qSL 120891
+5qSB 120892
+5qSq 120893
+5qSQ 120894
+6bmB 120895
+6YWk 120896
+6YWi 120897
+6YWh 120898
+6bmC 120899
+5q6a 120900
+5q6b 120901
+6Zux 120902
+6L6L 120903
+5qSg 120904
+6L6O 120905
+552E 120906
+552H 120907
+552D 120908
+5oii 120909
+5ZaL 120910
+5ZeS 120911
+5ZaD 120912
+5Zax 120913
+5Za5 120914
+5pm3 120915
+5ZaI 120916
+6LeW 120917
+6LeX 120918
+6Lee 120919
+6Lea 120920
+6LeO 120921
+6LeP 120922
+6LeG 120923
+6Jux 120924
+6Juy 120925
+6Jut 120926
+6Juz 120927
+6JuQ 120928
+6JuU 120929
+6Jue 120930
+6Ju0 120931
+6JuY 120932
+5ZaB 120933
+5Zaf 120934
+5ZW+ 120935
+5ZeW 120936
+5ZaR 120937
+5Zef 120938
+5Zee 120939
+5ZaZ 120940
+5bWY 120941
+5bWW 120942
+5bS0 120943
+6YGE 120944
+6KmI 120945
+5bWO 120946
+5bWs 120947
+5bWb 120948
+5bWv 120949
+5bWd 120950
+5bWr 120951
+5bmE 120952
+5bWL 120953
+6LWV 120954
+6ZO7 120955
+6ZO8 120956
+6ZO/ 120957
+6ZSD 120958
+6ZSG 120959
+6ZSH 120960
+6ZSJ 120961
+6ZSP 120962
+6ZSR 120963
+6ZSS 120964
+6ZSU 120965
+6ZSV 120966
+5o6j 120967
+55+s 120968
+5rCw 120969
+5q+z 120970
+5q+9 120971
+54qK 120972
+54qE 120973
+54qL 120974
+6bmE 120975
+54qN 120976
+5bWH 120977
+6buN 120978
+56iD 120979
+56iC 120980
+562a 120981
+5621 120982
+562M 120983
+5YKj 120984
+5YKI 120985
+6IiE 120986
+54mN 120987
+5YKl 120988
+5YKn 120989
+6YGR 120990
+5YKp 120991
+5b6o 120992
+5aqt 120993
+55Wy 120994
+5byR 120995
+57+V 120996
+6bmG 120997
+6IWI 120998
+6IWT 120999
+6IWG 121000
+6IW0 121001
+6IWa 121002
+6IWx 121003
+6bG/ 121004
+6bKA 121005
+6bKC 121006
+54yi 121007
+54y5 121008
+54yl 121009
+6aOT 121010
+6Kee 121011
+6Kea 121012
+54yx 121013
+6aKO 121014
+6aOn 121015
+6aaH 121016
+6aaK 121017
+5Lq1 121018
+6ISU 121019
+6KOS 121020
+55ej 121021
+55eo 121022
+55em 121023
+55ee 121024
+55ek 121025
+55en 121026
+6LWT 121027
+56um 121028
+55O/ 121029
+5ZW7 121030
+6aKP 121031
+6bmH 121032
+6ZiR 121033
+6ZiS 121034
+6ZiV 121035
+57Ke 121036
+6YGS 121037
+5a2z 121038
+54Sv 121039
+54Sc 121040
+54Sx 121041
+6bmI 121042
+5rir 121043
+5rmu 121044
+5rmO 121045
+5rmc 121046
+5rmN 121047
+5rmr 121048
+5rqy 121049
+5rmf 121050
+5rqG 121051
+5rmy 121052
+5rmU 121053
+5rmJ 121054
+5ril 121055
+5ruB 121056
+5oSg 121057
+5oO6 121058
+5oSm 121059
+5oO0 121060
+5oSA 121061
+5oSO 121062
+5oSU 121063
+5Za+ 121064
+5a+Q 121065
+6LCf 121066
+6KOi 121067
+6KOO 121068
+6KOl 121069
+56W+ 121070
+6LCg 121071
+6LCh 121072
+6LCl 121073
+6LCn 121074
+5a2x 121075
+5by8 121076
+5be9 121077
+6aqY 121078
+5aqq 121079
+5bev 121080
+57+a 121081
+55q0 121082
+6aqb 121083
+57yC 121084
+57yD 121085
+57yE 121086
+5b2Y 121087
+57yH 121088
+57yI 121089
+57yM 121090
+57yR 121091
+57yS 121092
+57yX 121093
+6aOo 121094
+6ICi 121095
+55GB 121096
+55GX 121097
+55GE 121098
+6YGo 121099
+6aqc 121100
+6Z+r 121101
+6auh 121102
+5aGs 121103
+6YSi 121104
+6LaU 121105
+6LaR 121106
+5pGF 121107
+5pGB 121108
+6JyH 121109
+5pCL 121110
+5pCq 121111
+5pCQ 121112
+5pCb 121113
+5pCg 121114
+5pGI 121115
+5b2A 121116
+5q+C 121117
+5pCm 121118
+5pCh 121119
+6JOB 121120
+5oih 121121
+6JON 121122
+6YSe 121123
+6JOQ 121124
+6JOm 121125
+6bmL 121126
+6JK9 121127
+6JOW 121128
+6JOK 121129
+6JKv 121130
+6JOf 121131
+6JOR 121132
+6JK6 121133
+6JOg 121134
+6JKf 121135
+6JKh 121136
+6JK5 121137
+6JK0 121138
+6JKX 121139
+6JOl 121140
+5qWU 121141
+5qWC 121142
+5qWd 121143
+5qWr 121144
+5qW4 121145
+5qS0 121146
+5qeM 121147
+5qWv 121148
+55qZ 121149
+5qaI 121150
+5qeO 121151
+5qaJ 121152
+5qWm 121153
+5qWj 121154
+5qW5 121155
+5qS9 121156
+5Ym9 121157
+6YWp 121158
+6JyD 121159
+56Kb 121160
+56KT 121161
+56G8 121162
+56KJ 121163
+56Ka 121164
+56KH 121165
+56Kc 121166
+6bmM 121167
+6L6P 121168
+6b6D 121169
+6b6F 121170
+6Ki+ 121171
+57Ky 121172
+552a 121173
+5Zeq 121174
+6Z+q 121175
+5Ze3 121176
+5ZeJ 121177
+552o 121178
+552i 121179
+6ZuO 121180
+552l 121181
+5ZeR 121182
+5Zer 121183
+5Zes 121184
+5ZeU 121185
+5Zed 121186
+5oil 121187
+5ZeE 121188
+54Wm 121189
+5pqE 121190
+6YGi 121191
+5pqM 121192
+6Les 121193
+6Le2 121194
+6Le4 121195
+6LeQ 121196
+6Lej 121197
+6Le5 121198
+6Ju4 121199
+6JyK 121200
+6JyN 121201
+6JyJ 121202
+6Jyj 121203
+55W5 121204
+6Ju5 121205
+5Zel 121206
+5Zey 121207
+5Zez 121208
+5ZeM 121209
+5ZeN 121210
+5ZeQ 121211
+5Zek 121212
+5Ze1 121213
+572o 121214
+5bWK 121215
+5bW0 121216
+6aqw 121217
+6ZSX 121218
+6ZSb 121219
+6ZSc 121220
+6ZSd 121221
+6ZSe 121222
+6ZSf 121223
+6ZSi 121224
+6ZSo 121225
+6ZSp 121226
+6ZSt 121227
+6ZSx 121228
+6ZuJ 121229
+5rCy 121230
+54qP 121231
+5q2D 121232
+56ie 121233
+56iX 121234
+56iU 121235
+562g 121236
+562i 121237
+562u 121238
+562y 121239
+54mS 121240
+5pWr 121241
+5b6t 121242
+5oSG 121243
+6ImE 121244
+6KeO 121245
+5q+5 121246
+6LKK 121247
+6LKF 121248
+6LKJ 121249
+6aKU 121250
+6IWg 121251
+6IWp 121252
+6IW8 121253
+6IWt 121254
+6IWn 121255
+5aGN 121256
+5aq1 121257
+6bKF 121258
+6bKG 121259
+6bKH 121260
+6bKI 121261
+6bKL 121262
+6bKQ 121263
+6IKE 121264
+6bmQ 121265
+6aOV 121266
+6Kel 121267
+6YGb 121268
+6aaQ 121269
+6bmR 121270
+5Lq2 121271
+55iD 121272
+55ex 121273
+55e8 121274
+55e/ 121275
+55iQ 121276
+55iB 121277
+55iG 121278
+6bqC 121279
+5q2G 121280
+5peS 121281
+6ZiW 121282
+6ZiX 121283
+576n 121284
+6LGi 121285
+57Kz 121286
+54y3 121287
+54Wz 121288
+54Wo 121289
+54WF 121290
+54WK 121291
+54W4 121292
+54W6 121293
+5ruf 121294
+5rqx 121295
+5rqY 121296
+5ryt 121297
+5rui 121298
+5rql 121299
+5rq9 121300
+6KOf 121301
+5rq7 121302
+5rq3 121303
+5ruX 121304
+5rur 121305
+5rq0 121306
+5ruP 121307
+5ruD 121308
+5rum 121309
+5rqP 121310
+5ruC 121311
+5ruT 121312
+5rqf 121313
+5ruq 121314
+5oSr 121315
+5oWK 121316
+6bKO 121317
+6aqe 121318
+56qg 121319
+56qj 121320
+6KOx 121321
+6KOo 121322
+6KO+ 121323
+6KOw 121324
+56aK 121325
+6LCp 121326
+6LCq 121327
+5aq+ 121328
+5aur 121329
+5aqy 121330
+5auS 121331
+5auU 121332
+5aq4 121333
+57yZ 121334
+57yc 121335
+57yb 121336
+6L6U 121337
+6aqd 121338
+57yf 121339
+57yh 121340
+57yi 121341
+57yj 121342
+6aqf 121343
+6ICl 121344
+55KI 121345
+55Gt 121346
+542S 121347
+6KeP 121348
+5oWd 121349
+5aug 121350
+5Y+G 121351
+5pG9 121352
+5aKB 121353
+5pKC 121354
+5pGe 121355
+5pKE 121356
+57+l 121357
+6LiF 121358
+5pGt 121359
+5aKJ 121360
+5aKS 121361
+5qaW 121362
+57am 121363
+6JSr 121364
+6JS3 121365
+6Z26 121366
+6Z28 121367
+6Z6F 121368
+6Z2/ 121369
+55SN 121370
+6JS4 121371
+6JSf 121372
+6JS6 121373
+5ois 121374
+6JWW 121375
+6JS7 121376
+6JO/ 121377
+5pah 121378
+6bmV 121379
+6JO8 121380
+5qab 121381
+5qan 121382
+5qar 121383
+5qat 121384
+5qeU 121385
+5qax 121386
+5qeB 121387
+5qeg 121388
+5qa3 121389
+5YOw 121390
+6YW9 121391
+6YW5 121392
+56Kh 121393
+56K0 121394
+56Kj 121395
+56Ky 121396
+6Ien 121397
+6LGo 121398
+5q6h 121399
+6ZyB 121400
+6Jya 121401
+6b6H 121402
+6b6I 121403
+5IE= 121404
+5IGW 121405
+5529 121406
+5Zie 121407
+5ZiI 121408
+5ZiM 121409
+5ZiB 121410
+5pqd 121411
+6LiM 121412
+6LiJ 121413
+6Jye 121414
+6Jyl 121415
+6Jyu 121416
+6J2I 121417
+6Jy0 121418
+6Jyx 121419
+6Jyp 121420
+6Jy3 121421
+6Jy/ 121422
+6J6C 121423
+6Jyi 121424
+5Zih 121425
+6bmX 121426
+5Zij 121427
+5Zik 121428
+5Zia 121429
+5Ze+ 121430
+5Zin 121431
+5720 121432
+572x 121433
+5bmU 121434
+5baC 121435
+5bmb 121436
+6LWZ 121437
+572C 121438
+6aq3 121439
+6aq2 121440
+6bmY 121441
+6ZSy 121442
+6ZS0 121443
+6ZS2 121444
+6ZS3 121445
+6ZS4 121446
+6ZS1 121447
+6ZWC 121448
+54qS 121449
+566Q 121450
+566m 121451
+566n 121452
+5664 121453
+566s 121454
+566F 121455
+566q 121456
+566c 121457
+566i 121458
+566T 121459
+5YOW 121460
+5YSG 121461
+5YOz 121462
+5YOt 121463
+5YqB 121464
+5YOu 121465
+6a2D 121466
+6a2G 121467
+552+ 121468
+6ImL 121469
+6YSx 121470
+6IaI 121471
+6IaR 121472
+6bKR 121473
+6bKU 121474
+6bKa 121475
+6bKb 121476
+6bKf 121477
+542Q 121478
+6Ker 121479
+6ZuS 121480
+5aSk 121481
+6aaR 121482
+6Yqu 121483
+5aG+ 121484
+55iM 121485
+55iK 121486
+55iY 121487
+55iZ 121488
+5peW 121489
+6IaC 121490
+6Zia 121491
+6YSv 121492
+6bKe 121493
+57K/ 121494
+57K8 121495
+57OB 121496
+5qeK 121497
+6bma 121498
+54aY 121499
+54al 121500
+5r2i 121501
+5ryV 121502
+5ru5 121503
+5ryv 121504
+5ry2 121505
+5r2L 121506
+5r20 121507
+5ryq 121508
+5ryJ 121509
+5ryp 121510
+5r6J 121511
+5oW1 121512
+5pC0 121513
+56qo 121514
+5a+k 121515
+57au 121516
+6LCu 121517
+6KSh 121518
+6KSZ 121519
+6KST 121520
+6KSb 121521
+6KSK 121522
+6LCv 121523
+6LCw 121524
+6LCy 121525
+5bGj 121526
+6bmb 121527
+5aux 121528
+5auW 121529
+5aum 121530
+5aua 121531
+5auY 121532
+6byQ 121533
+556A 121534
+6bmc 121535
+6aqg 121536
+57yl 121537
+57ym 121538
+57yn 121539
+57yo 121540
+6aqi 121541
+57yr 121542
+6ICm 121543
+6ICn 121544
+55Kc 121545
+55KO 121546
+55KB 121547
+5aWt 121548
+6auv 121549
+6aur 121550
+5pK3 121551
+5pKF 121552
+6LWt 121553
+5pK4 121554
+6YuG 121555
+5pKZ 121556
+5pK6 121557
+5aKA 121558
+6IGp 121559
+6KeQ 121560
+6Z6R 121561
+6JWZ 121562
+6Z6S 121563
+6JWI 121564
+6JWo 121565
+6JWk 121566
+6JWe 121567
+6JW6 121568
+556i 121569
+6JWD 121570
+6JWy 121571
+6LWc 121572
+5qe/ 121573
+5qiv 121574
+5qet 121575
+5qiX 121576
+5qiY 121577
+5qey 121578
+6YaM 121579
+6YaF 121580
+6Z2l 121581
+6a2H 121582
+6aSN 121583
+56OU 121584
+56OZ 121585
+6ZyI 121586
+6L6Y 121587
+6b6J 121588
+6b6K 121589
+6KeR 121590
+556M 121591
+556L 121592
+556R 121593
+5Zit 121594
+5ZmO 121595
+5Zm2 121596
+6aKZ 121597
+5pq5 121598
+5ZmY 121599
+6LiU 121600
+6Lid 121601
+6Lif 121602
+6LiS 121603
+6Lis 121604
+6Liu 121605
+6Liv 121606
+6Li6 121607
+6Lie 121608
+6J29 121609
+6J2+ 121610
+6J27 121611
+6J2w 121612
+6J2u 121613
+6J6L 121614
+6J2T 121615
+6J2j 121616
+6J28 121617
+5Zis 121618
+6aKa 121619
+5ZmN 121620
+5ZmZ 121621
+5ZmM 121622
+5ZmU 121623
+6aKb 121624
+5bme 121625
+5bmh 121626
+5baZ 121627
+5bad 121628
+6aq6 121629
+6ZWK 121630
+6ZWJ 121631
+6ZWM 121632
+6ZWP 121633
+6ZWS 121634
+6ZWT 121635
+6ZWU 121636
+56i3 121637
+5660 121638
+56+R 121639
+56+B 121640
+56+M 121641
+54mW 121642
+5YSL 121643
+6Jmi 121644
+6bme 121645
+6IaY 121646
+6bKg 121647
+6bKh 121648
+6bKi 121649
+6bKj 121650
+6bKl 121651
+6bKn 121652
+6bKp 121653
+542X 121654
+542g 121655
+6Kev 121656
+6aaT 121657
+6aaU 121658
+6bq+ 121659
+5bub 121660
+55ib 121661
+55i8 121662
+55ii 121663
+55ig 121664
+6b2R 121665
+576w 121666
+8KW7 121667
+8KW7lw== 121668
+57OM 121669
+57ON 121670
+57OF 121671
+54ac 121672
+54a1 121673
+5r6N 121674
+5r6M 121675
+5r24 121676
+5r2m 121677
+5r2y 121678
+6YuI 121679
+5r2f 121680
+5r26 121681
+5a+u 121682
+56qz 121683
+6LCz 121684
+6KS0 121685
+6KSf 121686
+6KSr 121687
+6LC1 121688
+54ao 121689
+5bGm 121690
+5Yuw 121691
+5oiu 121692
+6J2l 121693
+57ys 121694
+57yu 121695
+57yv 121696
+6aqj 121697
+55W/ 121698
+6ICp 121699
+6ICo 121700
+6ICq 121701
+55Kf 121702
+6Z2b 121703
+55Kg 121704
+55KY 121705
+6IGx 121706
+6J6v 121707
+6au7 121708
+6aut 121709
+6au5 121710
+5pOA 121711
+55SP 121712
+5pOe 121713
+57ig 121714
+56Os 121715
+6aKe 121716
+6JW7 121717
+6aKf 121718
+6Jak 121719
+6Jao 121720
+5qqg 121721
+6JaP 121722
+6Jau 121723
+6Jac 121724
+6JaF 121725
+5qi+ 121726
+5qmb 121727
+5qmH 121728
+5qi1 121729
+5qqO 121730
+5qm5 121731
+5qi9 121732
+5qio 121733
+5qm8 121734
+5aK8 121735
+5qmQ 121736
+57+u 121737
+6YaQ 121738
+6YaN 121739
+6Yaa 121740
+56Oy 121741
+6LWd 121742
+5q6q 121743
+6ZyP 121744
+6Yy+ 121745
+6L6a 121746
+6YG9 121747
+5rCF 121748
+556f 121749
+556g 121750
+556w 121751
+5ZqE 121752
+5ZqG 121753
+5Zmk 121754
+5pq+ 121755
+6LmA 121756
+6Li1 121757
+6Li9 121758
+6LmJ 121759
+6LmB 121760
+6J6o 121761
+6J6I 121762
+6J6F 121763
+6J6t 121764
+6J6g 121765
+6J6f 121766
+5Zmx 121767
+5Zmr 121768
+5Zm7 121769
+5Zm8 121770
+5725 121771
+5Zyc 121772
+5KY= 121773
+5KaD 121774
+6ZWX 121775
+6ZWY 121776
+6ZWa 121777
+6ZWb 121778
+6ZWd 121779
+6ZWe 121780
+6ZWg 121781
+5rCH 121782
+5rCG 121783
+56mR 121784
+56+d 121785
+56+l 121786
+56+m 121787
+56+q 121788
+56+Z 121789
+55ul 121790
+5YqT 121791
+57+x 121792
+6a2J 121793
+6a2I 121794
+5b68 121795
+5q2Z 121796
+6Iam 121797
+6IaZ 121798
+6bKu 121799
+6bKx 121800
+6bKz 121801
+6bK0 121802
+6bK1 121803
+6bK3 121804
+6bK7 121805
+5420 121806
+542t 121807
+542s 121808
+6YKC 121809
+6bmn 121810
+5buo 121811
+6LWf 121812
+55iw 121813
+5buq 121814
+55i/ 121815
+55i1 121816
+55i0 121817
+55mD 121818
+55iz 121819
+6bqH 121820
+6bqI 121821
+5ay0 121822
+5aOF 121823
+57OX 121824
+55SR 121825
+54eO 121826
+54eg 121827
+54eU 121828
+54en 121829
+5r+R 121830
+5r+J 121831
+5r2e 121832
+5r6n 121833
+5r65 121834
+5r6l 121835
+5r62 121836
+5r+C 121837
+6KSw 121838
+56q4 121839
+5ayW 121840
+54qf 121841
+6Zqw 121842
+5ayX 121843
+6aKh 121844
+57yx 121845
+57yy 121846
+57yz 121847
+55Kp 121848
+55Kq 121849
+6J6r 121850
+5pOk 121851
+5aOV 121852
+6Kez 121853
+572E 121854
+5pOi 121855
+6Ja5 121856
+6Z6h 121857
+6Z6s 121858
+6Ja3 121859
+6JeT 121860
+6JeB 121861
+5qqE 121862
+5qqp 121863
+5oeL 121864
+6Yai 121865
+57+z 121866
+56SF 121867
+56O0 121868
+6bmp 121869
+6b6L 121870
+6b6M 121871
+6LGz 121872
+5aOR 121873
+6bu7 121874
+5ZqP 121875
+5ZqF 121876
+6LmR 121877
+6LmS 121878
+6LmK 121879
+6J+l 121880
+6J6s 121881
+6J61 121882
+55aD 121883
+6J6z 121884
+6J+R 121885
+5ZqT 121886
+5729 121887
+572+ 121888
+5ba3 121889
+6buc 121890
+6bud 121891
+6auB 121892
+6auA 121893
+6ZWh 121894
+6ZWi 121895
+6ZWj 121896
+6ZWm 121897
+6ZWn 121898
+6ZWp 121899
+6ZWq 121900
+6ZWr 121901
+572F 121902
+57CM 121903
+56++ 121904
+56+8 121905
+57CW 121906
+57CL 121907
+6byi 121908
+5YSh 121909
+6bmq 121910
+6by+ 121911
+55qk 121912
+6a2N 121913
+6b6g 121914
+57mH 121915
+6LKY 121916
+6YKI 121917
+6LKU 121918
+6IeM 121919
+6Ia7 121920
+6IeG 121921
+6IeD 121922
+6bK8 121923
+6bK9 121924
+6bOA 121925
+6bOD 121926
+6bOF 121927
+6bOH 121928
+6bOK 121929
+6J69 121930
+54eu 121931
+6bmr 121932
+57Oc 121933
+57i7 121934
+55mN 121935
+6bqL 121936
+5oeR 121937
+5r+h 121938
+5r+u 121939
+5r+e 121940
+5r+g 121941
+5r+v 121942
+6LmH 121943
+6KyH 121944
+6YKD 121945
+6KWB 121946
+5qqX 121947
+5pOY 121948
+5a26 121949
+6Zqz 121950
+5ay3 121951
+6J+K 121952
+6bms 121953
+6Y2q 121954
+6Y+K 121955
+6ayI 121956
+6ayD 121957
+5569 121958
+6Z6v 121959
+6Z6o 121960
+6Z6r 121961
+6Z6n 121962
+6Z6j 121963
+6Jec 121964
+6Jeg 121965
+6Yaq 121966
+6LmZ 121967
+56ST 121968
+54e5 121969
+6aSu 121970
+556/ 121971
+5pub 121972
+6aKi 121973
+6LqH 121974
+6Lma 121975
+6J+b 121976
+6J+q 121977
+6J+g 121978
+6J+u 121979
+6bmu 121980
+6bug 121981
+6buf 121982
+6auF 121983
+6auC 121984
+6ZWs 121985
+6ZWt 121986
+6ZWv 121987
+6aal 121988
+57Cf 121989
+57Cq 121990
+6bys 121991
+6Zug 121992
+6Imf 121993
+6bOO 121994
+6bOP 121995
+6bOQ 121996
+55me 121997
+55mU 121998
+57Oo 121999
+6Lmp 122000
+6Y6P 122001
+6YKL 122002
+6ayP 122003
+5pSJ 122004
+6Z6y 122005
+6Z60 122006
+6Je/ 122007
+6Jin 122008
+6JiF 122009
+6Yau 122010
+6Yav 122011
+6YWD 122012
+6Zyq 122013
+6Zyt 122014
+6Zyo 122015
+6bu8 122016
+5Zqv 122017
+6Lmw 122018
+6Lm2 122019
+6Lm9 122020
+6Lm8 122021
+6Lm0 122022
+6Lm+ 122023
+6Lm/ 122024
+6KCW 122025
+6KCT 122026
+6J++ 122027
+6KCK 122028
+6bui 122029
+6auL 122030
+6auM 122031
+6ZWy 122032
+57GA 122033
+6b2B 122034
+6a2R 122035
+6Imo 122036
+6bOT 122037
+6bOU 122038
+6bOV 122039
+6bOX 122040
+6bOZ 122041
+6Y+W 122042
+5764 122043
+47iG 122044
+54Cj 122045
+54Cb 122046
+6KWm 122047
+6LC2 122048
+6KWe 122049
+6aql 122050
+57y1 122051
+55OS 122052
+5pSY 122053
+6Jip 122054
+6JiW 122055
+6Ya0 122056
+6Zyw 122057
+6YWG 122058
+55+N 122059
+6LqF 122060
+6byN 122061
+5beJ 122062
+6bup 122063
+6bul 122064
+6buq 122065
+6ZWz 122066
+6ZW0 122067
+6bun 122068
+57qC 122069
+55K6 122070
+6byv 122071
+6Iec 122072
+6bOc 122073
+6bOd 122074
+6bOf 122075
+542+ 122076
+5a2A 122077
+6aqn 122078
+55OY 122079
+6byZ 122080
+6Ya6 122081
+56S0 122082
+6aKm 122083
+5pup 122084
+6bOi 122085
+6bqd 122086
+5aSU 122087
+54id 122088
+54GP 122089
+56az 122090
+6ZC+ 122091
+5768 122092
+6KCh 122093
+6ICx 122094
+6bmz 122095
+5rCN 122096
+6aWV 122097
+6LqQ 122098
+6auR 122099
+6ZW1 122100
+56mw 122101
+6aWU 122102
+6ay7 122103
+6ayf 122104
+6Lax 122105
+5pSr 122106
+5pSl 122107
+6aKn 122108
+6Lqc 122109
+6by5 122110
+55mv 122111
+6KCy 122112
+6KC5 122113
+6Lqe 122114
+6KGi 122115
+54Ge 122116
+6KW7 122117
+57qb 122118
+6ayj 122119
+5pSu 122120
+5ZuU 122121
+6aaV 122122
+5oiG 122123
+54io 122124
+6b2J 122125
+5LqN 122126
+5bCi 122127
+5b2z 122128
+5Y2s 122129
+5q6z 122130
+8KCZtg== 122131
+5q+M 122132
+6YKY 122133
+5oiL 122134
+5Zyi 122135
+5rCV 122136
+5LyL 122137
+5Lud 122138
+5Yau 122139
+5rC/ 122140
+5rGI 122141
+5rC+ 122142
+5b+J 122143
+5a6E 122144
+8KyjmQ== 122145
+6K6x 122146
+5ome 122147
+5Zyy 122148
+5Zyr 122149
+6IqP 122150
+6IqD 122151
+5pyz 122152
+5py4 122153
+8KiZ 122154
+8KiZuA== 122155
+6YKo 122156
+5ZCS 122157
+5ZCW 122158
+5bG8 122159
+5bG+ 122160
+6L6/ 122161
+6ZKG 122162
+5Luz 122163
+5Lyj 122164
+5LyI 122165
+55m/ 122166
+55Sq 122167
+6YKg 122168
+54q0 122169
+5Yax 122170
+6YKh 122171
+8KyHlQ== 122172
+5rGL 122173
+5Jw= 122174
+5Jyj 122175
+6K67 122176
+8Kyjng== 122177
+5a2W 122178
+8KyYkw== 122179
+57qp 122180
+546S 122181
+546T 122182
+546Y 122183
+546a 122184
+5Yis 122185
+8Kutnw== 122186
+5Z2c 122187
+5Z2J 122188
+5om9 122189
+8Kutog== 122190
+5Z2L 122191
+5om6 122192
+46eR 122193
+5q+Q 122194
+6Iqw 122195
+6Iqj 122196
+6IuK 122197
+6IuJ 122198
+6IqY 122199
+6Iq0 122200
+6Iqg 122201
+8KuH 122202
+8KuHrQ== 122203
+6Iqk 122204
+5p2V 122205
+5p2Z 122206
+5p2E 122207
+5p2n 122208
+5p2p 122209
+5bCq 122210
+5bCo 122211
+6L2q 122212
+8KuQhA== 122213
+5Z2S 122214
+6IqI 122215
+5pe0 122216
+5pe1 122217
+5ZGZ 122218
+45U= 122219
+45Wu 122220
+5bKN 122221
+8Ku1 122222
+8Ku1tw== 122223
+5bKg 122224
+5bKc 122225
+5ZGH 122226
+5YaP 122227
+6KeD 122228
+5bKZ 122229
+5Ly+ 122230
+45GH 122231
+5Lyt 122232
+5L2W 122233
+5Lyy 122234
+5L2B 122235
+6aOP 122236
+54uD 122237
+6Ze2 122238
+5rGn 122239
+5rGr 122240
+8KOymA== 122241
+8KOylw== 122242
+5rKE 122243
+5rKY 122244
+8KyHmQ== 122245
+5rGt 122246
+47OH 122247
+5rKH 122248
+5b+u 122249
+5b+z 122250
+5b+6 122251
+8KyjoQ== 122252
+56WD 122253
+6K+H 122254
+6YKy 122255
+6K+O 122256
+6K+Q 122257
+5bGD 122258
+8Ku4 122259
+8Ku4qQ== 122260
+5bKK 122261
+6Zi9 122262
+5KK6 122263
+6Zi8 122264
+5aan 122265
+5aaY 122266
+8Kia 122267
+8KialQ== 122268
+57qu 122269
+6amy 122270
+8KuYnA== 122271
+57q7 122272
+8KyYmA== 122273
+8KuYnQ== 122274
+57q8 122275
+546k 122276
+546e 122277
+546x 122278
+546f 122279
+6YK9 122280
+6YK/ 122281
+5Z2l 122282
+5Z2w 122283
+5Z2s 122284
+5Z29 122285
+5byG 122286
+6IC1 122287
+5KK8 122288
+8Kat 122289
+8KatnA== 122290
+6IyL 122291
+6Iun 122292
+6Iu+ 122293
+6Iug 122294
+5p6F 122295
+462O 122296
+5p6Y 122297
+5p6N 122298
+55+8 122299
+55+7 122300
+5Yy8 122301
+8Kyogg== 122302
+8KyAqQ== 122303
+8KyAqg== 122304
+5pe/ 122305
+5piE 122306
+5piS 122307
+5piI 122308
+5ZKJ 122309
+5ZKH 122310
+5ZKN 122311
+5bK1 122312
+5bK9 122313
+5bKo 122314
+5bKe 122315
+5bOC 122316
+458= 122317
+45+D 122318
+5Zu3 122319
+8KysqQ== 122320
+6ZKQ 122321
+6ZKU 122322
+6ZKW 122323
+54ml 122324
+5L20 122325
+5Z6I 122326
+5L6B 122327
+5L65 122328
+5L24 122329
+5L26 122330
+6Zq5 122331
+45GK 122332
+5L6C 122333
+5L29 122334
+5L6Y 122335
+6YOI 122336
+6Iig 122337
+6YOQ 122338
+6YOD 122339
+5pS9 122340
+6IKt 122341
+6IK4 122342
+6IK3 122343
+54uJ 122344
+54ud 122345
+6aWz 122346
+5b+e 122347
+54KM 122348
+54KG 122349
+5rOZ 122350
+5rK6 122351
+5rOC 122352
+5rOc 122353
+5rOD 122354
+5rOH 122355
+5oCK 122356
+5bOD 122357
+56m4 122358
+56WL 122359
+56WK 122360
+8KuNow== 122361
+8Kyjsw== 122362
+8KypvQ== 122363
+6bik 122364
+5byi 122365
+5byo 122366
+6ZmR 122367
+8Kyuvw== 122368
+6ZmO 122369
+8KyvgA== 122370
+5Y26 122371
+5Lm4 122372
+5aat 122373
+5aeI 122374
+8Kuw 122375
+8Kuwmw== 122376
+6L+z 122377
+5Y+V 122378
+8KyztQ== 122379
+6am1 122380
+8Kyztg== 122381
+5Iw= 122382
+5Iy5 122383
+6am6 122384
+8Kugig== 122385
+57uL 122386
+57uQ 122387
+56CJ 122388
+6ICU 122389
+45uD 122390
+5462 122391
+54+H 122392
+54+F 122393
+8KyNmw== 122394
+54+L 122395
+5465 122396
+54+M 122397
+546/ 122398
+6Z+o 122399
+5Z6a 122400
+5Z6v 122401
+5Z6Z 122402
+5Z6y 122403
+5Z+P 122404
+5Z6N 122405
+6ICH 122406
+6b+N 122407
+5Z6O 122408
+5Z60 122409
+5Z6f 122410
+5Z6e 122411
+5oyT 122412
+5Z61 122413
+5Z6P 122414
+5ou2 122415
+6I2W 122416
+6I2B 122417
+6I2Z 122418
+6I2b 122419
+6IyI 122420
+6Iy9 122421
+6I2E 122422
+6Iy6 122423
+8KycrA== 122424
+6I2T 122425
+6Iyz 122426
+8Kaw 122427
+8KawoQ== 122428
+6Iyb 122429
+6I2t 122430
+462V 122431
+5p+3 122432
+5p+D 122433
+5p+K 122434
+5p65 122435
+5qCQ 122436
+5p+W 122437
+6YOa 122438
+5YmF 122439
+5LST 122440
+6L+6 122441
+5Y6W 122442
+56CG 122443
+56CR 122444
+56CE 122445
+6ICP 122446
+5aWT 122447
+5LY= 122448
+5Lau 122449
+6L21 122450
+6L23 122451
+6L25 122452
+6L26 122453
+5pi6 122454
+8Kq+ 122455
+8Kq+og== 122456
+5pi9 122457
+55u3 122458
+5ZKh 122459
+5ZK6 122460
+5piz 122461
+5pij 122462
+5pik 122463
+5pir 122464
+5pih 122465
+5ZKl 122466
+5piq 122467
+6Jm3 122468
+6Jm4 122469
+5ZOD 122470
+5bOY 122471
+6ICR 122472
+5bOb 122473
+8KqosA== 122474
+5bOX 122475
+5bOn 122476
+5bih 122477
+6ZKY 122478
+8KuTpw== 122479
+6ZKc 122480
+8Kysrg== 122481
+8KyssQ== 122482
+8KysrQ== 122483
+6ZKq 122484
+6ZKs 122485
+6ZKt 122486
+55+n 122487
+56es 122488
+5L+r 122489
+6IiB 122490
+5L+c 122491
+5L+Z 122492
+5L+N 122493
+5Z6V 122494
+6KGO 122495
+6Iij 122496
+5byH 122497
+5L60 122498
+6bin 122499
+5I+h 122500
+6IOg 122501
+8KaZtg== 122502
+6IOI 122503
+6IOp 122504
+6IOj 122505
+5pyP 122506
+6aOQ 122507
+6KiE 122508
+6aW7 122509
+5bqk 122510
+55ai 122511
+54Kj 122512
+54Kf 122513
+47Y= 122514
+47ay 122515
+5rSt 122516
+5rSY 122517
+5rST 122518
+5rS/ 122519
+47Oa 122520
+5rOa 122521
+5rWI 122522
+5rWJ 122523
+5rS4 122524
+5rSR 122525
+5rSi 122526
+5rSI 122527
+5rSa 122528
+5rS6 122529
+5rSo 122530
+5rWQ 122531
+47OY 122532
+5rS0 122533
+5rSj 122534
+5oGU 122535
+5a6s 122536
+56qA 122537
+5omC 122538
+6KKG 122539
+56WP 122540
+56WQ 122541
+56WV 122542
+5Y+a 122543
+6Zmn 122544
+6Zme 122545
+5aiA 122546
+5aee 122547
+5aex 122548
+5aek 122549
+5ae2 122550
+5ae9 122551
+5p6y 122552
+57uW 122553
+6aqD 122554
+8KyYoQ== 122555
+8KyzvQ== 122556
+8KyYqQ== 122557
+8KuEpw== 122558
+5b2W 122559
+6aqJ 122560
+5oGd 122561
+54+q 122562
+54+b 122563
+54+5 122564
+55CK 122565
+5468 122566
+54+W 122567
+8Kqf 122568
+8KqfnQ== 122569
+54+9 122570
+54+m 122571
+54+r 122572
+54+S 122573
+8KyNpA== 122574
+54+i 122575
+54+V 122576
+54+d 122577
+8KutvA== 122578
+5Z+X 122579
+5Z6+ 122580
+5Z66 122581
+5Z+G 122582
+5Z6/ 122583
+5Z+M 122584
+5Z+H 122585
+6I6w 122586
+6Iyd 122587
+8Kycrw== 122588
+6YSA 122589
+6I62 122590
+6I6d 122591
+5JOW 122592
+6I6Z 122593
+5qC7 122594
+5qGg 122595
+8KyC 122596
+8KyCqQ== 122597
+5qGE 122598
+5qKg 122599
+5qC0 122600
+5qK0 122601
+5qCS 122602
+6YWO 122603
+6YWP 122604
+8Kughg== 122605
+56C1 122606
+56Cg 122607
+56Cr 122608
+56Cs 122609
+56GB 122610
+5oGn 122611
+57+D 122612
+6YOq 122613
+8KiQ 122614
+8KiQiA== 122615
+6L6A 122616
+6L6B 122617
+8KyM 122618
+8KyMlw== 122619
+5YmV 122620
+6LWA 122621
+5ZOi 122622
+5pmF 122623
+5pmK 122624
+5ZSd 122625
+5ZOz 122626
+5ZOx 122627
+5YaU 122628
+5pmU 122629
+5pmQ 122630
+55WW 122631
+6JqE 122632
+6JqG 122633
+8KuR 122634
+8KuRoQ== 122635
+5bix 122636
+5bSB 122637
+5bO/ 122638
+8Kqotg== 122639
+5bSE 122640
+5bio 122641
+5bSA 122642
+6LWG 122643
+8KysuA== 122644
+6ZK3 122645
+8Kysuw== 122646
+8KysuQ== 122647
+8Kysvw== 122648
+8KytgQ== 122649
+55ya 122650
+55Sh 122651
+56yr 122652
+5YC7 122653
+5YC0 122654
+6ISp 122655
+5YCu 122656
+5YCV 122657
+5YCe 122658
+8Kui 122659
+8KuiuA== 122660
+5YCT 122661
+5YCn 122662
+6KGD 122663
+6JmS 122664
+6Iit 122665
+6Iiv 122666
+6Iil 122667
+55Oe 122668
+6ayv 122669
+6biw 122670
+6ISO 122671
+5pyT 122672
+6IOy 122673
+6JmT 122674
+6bG9 122675
+54u0 122676
+5bOx 122677
+54u7 122678
+55yi 122679
+8KuXpw== 122680
+5YuN 122681
+55eE 122682
+55aw 122683
+55eD 122684
+56uY 122685
+576W 122686
+576T 122687
+5qGK 122688
+5pWJ 122689
+54Og 122690
+54OU 122691
+54O2 122692
+54O7 122693
+8KyKiA== 122694
+5raN 122695
+5rWh 122696
+5rWt 122697
+5rWs 122698
+5raE 122699
+5rai 122700
+5raQ 122701
+5rWw 122702
+5rWf 122703
+5rWb 122704
+5rW8 122705
+5rWy 122706
+5raY 122707
+5oKI 122708
+5oKD 122709
+5oKi 122710
+8KySiA== 122711
+5a6n 122712
+56qF 122713
+56qK 122714
+56qO 122715
+5omF 122716
+5omG 122717
+6KKq 122718
+6KKX 122719
+6KKv 122720
+56Wn 122721
+6Zq6 122722
+5aCy 122723
+55aN 122724
+8Ki6 122725
+8Ki6mQ== 122726
+6Zm0 122727
+54Od 122728
+56Cu 122729
+45ua 122730
+5ZO/ 122731
+57+A 122732
+57+C 122733
+5Ymf 122734
+8Kyzvw== 122735
+8KuEqA== 122736
+57uk 122737
+6aqN 122738
+8KyYqw== 122739
+5II= 122740
+5IKu 122741
+55CO 122742
+54+4 122743
+54+1 122744
+55CE 122745
+55CI 122746
+55CA 122747
+54+6 122748
+5o6t 122749
+5aCO 122750
+5aCQ 122751
+5Z+8 122752
+5o6O 122753
+5Z+r 122754
+5aCM 122755
+5pmi 122756
+8Kuu 122757
+8Kuugw== 122758
+5o6e 122759
+5Z+q 122760
+5aO4 122761
+45mN 122762
+6IGN 122763
+6I+d 122764
+6JCa 122765
+6I+l 122766
+6I6/ 122767
+5JOr 122768
+5Yua 122769
+5JOs 122770
+6JCG 122771
+6I+C 122772
+6I+N 122773
+6I+8 122774
+6JCj 122775
+5JOo 122776
+6I+J 122777
+5JOb 122778
+5qK8 122779
+5qK9 122780
+5qGy 122781
+5qK+ 122782
+5qGv 122783
+5qKj 122784
+5qKM 122785
+5qG5 122786
+5pWU 122787
+5Y6j 122788
+56GU 122789
+6b+O 122790
+56GZ 122791
+56Ga 122792
+56GK 122793
+56GN 122794
+5YuU 122795
+5LSV 122796
+6b6B 122797
+6YC0 122798
+5ZSq 122799
+5ZWr 122800
+57+I 122801
+46s= 122802
+46uw 122803
+5pmZ 122804
+55Wk 122805
+8Kyxlg== 122806
+6La8 122807
+6LeC 122808
+6JuD 122809
+6Jqy 122810
+8KyfvQ== 122811
+6Jq6 122812
+5ZW0 122813
+5I6D 122814
+5bSn 122815
+5bSf 122816
+5bSe 122817
+5bSS 122818
+5bSM 122819
+5bSh 122820
+6ZOP 122821
+8KuTrw== 122822
+8KufuQ== 122823
+6ZOV 122824
+8KufvA== 122825
+6ZOW 122826
+6ZOY 122827
+6ZOa 122828
+6ZOe 122829
+6ZOl 122830
+6ZO0 122831
+54m7 122832
+54m/ 122833
+56iG 122834
+56yx 122835
+56yv 122836
+5YGw 122837
+5YGh 122838
+6bi6 122839
+5YGt 122840
+5YGy 122841
+5YGB 122842
+478= 122843
+47+g 122844
+6YSF 122845
+5YGT 122846
+5b6b 122847
+6KGS 122848
+6Iiz 122849
+6Iiy 122850
+6bi8 122851
+5oKG 122852
+6YSD 122853
+55O7 122854
+5J0= 122855
+5J2Z 122856
+6IS2 122857
+6ISe 122858
+6ISf 122859
+5I+y 122860
+6bG+ 122861
+54yH 122862
+54yK 122863
+54yE 122864
+6KeW 122865
+8KCF 122866
+8KCFpA== 122867
+5bqx 122868
+5bq8 122869
+5bqz 122870
+55eT 122871
+5LSU 122872
+56ur 122873
+5aCD 122874
+6ZiM 122875
+576d 122876
+576V 122877
+54SG 122878
+54O6 122879
+54SM 122880
+5reP 122881
+8KyHuQ== 122882
+5ref 122883
+5rec 122884
+5re0 122885
+5rev 122886
+5rm0 122887
+5ra0 122888
+8KyNoQ== 122889
+46U= 122890
+46WE 122891
+5oOb 122892
+5oOU 122893
+5oKw 122894
+5oOZ 122895
+5a+B 122896
+6YCt 122897
+8Kykhw== 122898
+8KuNrw== 122899
+6KK8 122900
+6KOI 122901
+56Wy 122902
+8Kykig== 122903
+8KuNsg== 122904
+6LCe 122905
+6Im0 122906
+5by4 122907
+5by2 122908
+8Kyvjg== 122909
+6ZqD 122910
+5ame 122911
+5ai1 122912
+5am8 122913
+5aqW 122914
+5amz 122915
+5amN 122916
+5amM 122917
+5amr 122918
+5amk 122919
+5amY 122920
+5amg 122921
+8KyYrA== 122922
+8KyYrQ== 122923
+8Ky0gg== 122924
+8KuYpg== 122925
+57u5 122926
+8KufhQ== 122927
+8KyYrw== 122928
+6aqV 122929
+8KuYpw== 122930
+57Wc 122931
+54+3 122932
+55Cy 122933
+55Ch 122934
+55Cf 122935
+55CU 122936
+55Ct 122937
+5aC+ 122938
+5aC8 122939
+5o+V 122940
+45mY 122941
+5aCn 122942
+5ZaG 122943
+5aCo 122944
+5aGF 122945
+5aCg 122946
+57W3 122947
+8Kqj 122948
+8Kqjuw== 122949
+8KGO 122950
+8KGOmg== 122951
+6JGc 122952
+5oOO 122953
+6JCz 122954
+6JGZ 122955
+6Z2s 122956
+6JG0 122957
+6JKH 122958
+6JKI 122959
+6YSa 122960
+6JKJ 122961
+6JOH 122962
+6JCp 122963
+6JGw 122964
+6JGO 122965
+6YSR 122966
+6JKO 122967
+6JGW 122968
+6JKE 122969
+6JC5 122970
+5qOk 122971
+5qO9 122972
+5qOr 122973
+5qST 122974
+5qSR 122975
+8KyD 122976
+8KyDig== 122977
+6bmA 122978
+5qSG 122979
+5qOT 122980
+5qOs 122981
+5qOq 122982
+5qSA 122983
+5qWX 122984
+8Ky3 122985
+8Ky3lQ== 122986
+55Sm 122987
+6YWm 122988
+6KeM 122989
+5aWh 122990
+55qV 122991
+56Gq 122992
+5qy5 122993
+6Kmf 122994
+8KuQkA== 122995
+6L6M 122996
+5qOQ 122997
+6b6C 122998
+8Ky5 122999
+8Ky5vA== 123000
+6bu5 123001
+54ma 123002
+552O 123003
+5pmr 123004
+5pmq 123005
+5pmx 123006
+8Kc= 123007
+8Ke/ 123008
+8Ke/uQ== 123009
+6JuR 123010
+55Wv 123011
+5pad 123012
+5Zak 123013
+5bS2 123014
+5bWB 123015
+8Ku2 123016
+8Ku2hw== 123017
+5bS+ 123018
+5bWF 123019
+5bS/ 123020
+5bWa 123021
+57+Z 123022
+8KuWrg== 123023
+5ZyM 123024
+5ZyQ 123025
+6LWR 123026
+6LWS 123027
+6b+P 123028
+6ZO5 123029
+8Kytig== 123030
+6ZO9 123031
+8Kixhw== 123032
+8KuTtg== 123033
+6ZSK 123034
+6ZSN 123035
+6ZSO 123036
+8Kytjg== 123037
+6ZST 123038
+54qH 123039
+6aKL 123040
+56iM 123041
+562A 123042
+562Y 123043
+562c 123044
+562l 123045
+562F 123046
+5YKD 123047
+5YKJ 123048
+57+b 123049
+5YKS 123050
+5YKV 123051
+6Ii+ 123052
+55Ws 123053
+8KuWrw== 123054
+6IS/ 123055
+6IWY 123056
+5JA= 123057
+5JCD 123058
+6IWZ 123059
+6IWS 123060
+8Kyxnw== 123061
+6bKD 123062
+54yw 123063
+8Kub 123064
+8KubrQ== 123065
+54yv 123066
+47o= 123067
+47qE 123068
+6aaJ 123069
+5YeT 123070
+6YSX 123071
+8Ku3 123072
+8Ku3tw== 123073
+5buL 123074
+5buG 123075
+6YSM 123076
+57Ki 123077
+6YGG 123078
+5peQ 123079
+8KyusQ== 123080
+54Se 123081
+8KyKpA== 123082
+5qy7 123083
+8KO4 123084
+8KO4ow== 123085
+5rqa 123086
+5rqB 123087
+5rmd 123088
+5riw 123089
+5rmT 123090
+47Q= 123091
+47SU 123092
+5rif 123093
+5rqg 123094
+5ri8 123095
+5rqH 123096
+5rmj 123097
+5rmR 123098
+5rqe 123099
+5oSQ 123100
+5oSD 123101
+5pWp 123102
+55Sv 123103
+5qOo 123104
+5omK 123105
+6KOj 123106
+56W8 123107
+5am7 123108
+5aqG 123109
+5aqe 123110
+45u5 123111
+5aqT 123112
+5aqC 123113
+5aqE 123114
+5q+1 123115
+55+e 123116
+8Ky0gw== 123117
+8KuYqA== 123118
+57yK 123119
+57yQ 123120
+6aqZ 123121
+55GD 123122
+55GT 123123
+55GF 123124
+55GG 123125
+5LSW 123126
+55GW 123127
+55Gd 123128
+55GU 123129
+55GA 123130
+8KSn 123131
+8KSnmw== 123132
+55Gz 123133
+55GC 123134
+5baF 123135
+55GR 123136
+6YGY 123137
+6aui 123138
+5aGl 123139
+5aC9 123140
+6LWq 123141
+5pGb 123142
+5aGd 123143
+5pCS 123144
+5pCM 123145
+6JKx 123146
+6JKo 123147
+6JOP 123148
+6JSA 123149
+6JOi 123150
+6JOC 123151
+6JK7 123152
+6JOj 123153
+5qS5 123154
+5qWq 123155
+5qaD 123156
+5qaF 123157
+5qWS 123158
+5qWp 123159
+5qaH 123160
+5qS4 123161
+5qWZ 123162
+5q2F 123163
+8Kyq 123164
+8KyqqQ== 123165
+56KD 123166
+56KP 123167
+8KySlA== 123168
+56KI 123169
+5IOF 123170
+56G/ 123171
+6YSg 123172
+6L6S 123173
+8Kyojg== 123174
+8KuQkw== 123175
+6b6G 123176
+6Kec 123177
+5KM= 123178
+5KOY 123179
+5pqV 123180
+6bmN 123181
+8Kur 123182
+8Kurhw== 123183
+46yK 123184
+5pqF 123185
+6Lex 123186
+6JyQ 123187
+6JyO 123188
+5bWy 123189
+6LWX 123190
+6aqx 123191
+6ZSW 123192
+8KuTuQ== 123193
+6ZSY 123194
+6ZSz 123195
+6ZSn 123196
+6ZSq 123197
+8Kytmg== 123198
+6ZSr 123199
+6ZSs 123200
+8Kytmw== 123201
+56iR 123202
+56iZ 123203
+5IU= 123204
+5IWf 123205
+8KyV 123206
+8KyVgg== 123207
+5627 123208
+5628 123209
+5622 123210
+562m 123211
+562k 123212
+5YK6 123213
+6bmO 123214
+5YOH 123215
+6ImF 123216
+6ImJ 123217
+6LC8 123218
+6LKG 123219
+6IW9 123220
+6IWo 123221
+6IWv 123222
+6bKJ 123223
+6bKK 123224
+6bKM 123225
+5LKf 123226
+8Ky2iw== 123227
+8Ky2jQ== 123228
+6bKP 123229
+6ZuK 123230
+54y6 123231
+6aOU 123232
+6Kef 123233
+8KadvA== 123234
+6aaM 123235
+6KOb 123236
+5buS 123237
+55iF 123238
+6YSY 123239
+6bmS 123240
+6YSc 123241
+6bqA 123242
+6YSj 123243
+6ZiY 123244
+8KuUtg== 123245
+54WB 123246
+54WD 123247
+54W0 123248
+54WL 123249
+54Wf 123250
+54WT 123251
+5rug 123252
+5rqN 123253
+5rq5 123254
+5ruG 123255
+5ruJ 123256
+5rqm 123257
+5rq1 123258
+5ry3 123259
+5run 123260
+5ruY 123261
+5ruN 123262
+5oSt 123263
+5oWl 123264
+5oWG 123265
+5aGx 123266
+8KuMgA== 123267
+6KO8 123268
+56aL 123269
+56aU 123270
+56aY 123271
+56aS 123272
+6LCr 123273
+6bmU 123274
+8KuWsw== 123275
+5oSN 123276
+5auE 123277
+5aqx 123278
+5oik 123279
+5Yug 123280
+5oij 123281
+8KuYqg== 123282
+8KuYrA== 123283
+57ye 123284
+6ICk 123285
+55Gn 123286
+8Kue 123287
+8KueqQ== 123288
+55Go 123289
+55Gx 123290
+55G3 123291
+55Gi 123292
+5pag 123293
+5pGP 123294
+5aKV 123295
+5aKI 123296
+5aKQ 123297
+5aKY 123298
+5pG0 123299
+6YqO 123300
+8KGQ 123301
+8KGQkw== 123302
+5aKa 123303
+5pKW 123304
+8Kqk 123305
+8Kqklw== 123306
+6Z29 123307
+6Z6B 123308
+6JSM 123309
+6JSI 123310
+6JOw 123311
+6JS5 123312
+6JSK 123313
+5ZiP 123314
+5qaw 123315
+5qaR 123316
+5qea 123317
+8KOX 123318
+8KOXiw== 123319
+5qec 123320
+5qaN 123321
+55aQ 123322
+8Ky4mA== 123323
+6YW6 123324
+6YW+ 123325
+6YWy 123326
+6YW0 123327
+56K2 123328
+5IOO 123329
+8KySlw== 123330
+56Ko 123331
+8KWU 123332
+8KWUsg== 123333
+56K5 123334
+56Kl 123335
+5YqC 123336
+8Kualg== 123337
+5LSX 123338
+5aSl 123339
+556N 123340
+6bmW 123341
+46yO 123342
+6Le9 123343
+6Jy+ 123344
+5bmW 123345
+5baN 123346
+5ZyZ 123347
+8Kixjw== 123348
+6ZS6 123349
+6ZS8 123350
+6ZS9 123351
+8KytpA== 123352
+6ZS+ 123353
+6ZS/ 123354
+6ZWD 123355
+6ZWE 123356
+6ZWF 123357
+6aad 123358
+6bmZ 123359
+566o 123360
+566W 123361
+5YqE 123362
+5YOs 123363
+5YOm 123364
+5YOU 123365
+5YOO 123366
+5qeD 123367
+45mm 123368
+6bKS 123369
+6bKV 123370
+8KualQ== 123371
+6bKW 123372
+6bKX 123373
+6bKY 123374
+6bKZ 123375
+8Ky2kA== 123376
+8Ky2jw== 123377
+8Km9 123378
+8Km9vg== 123379
+5aSQ 123380
+542N 123381
+6aOX 123382
+8Ky4mg== 123383
+5YeY 123384
+5buR 123385
+5buZ 123386
+55iX 123387
+55il 123388
+55iV 123389
+6bKd 123390
+6YSr 123391
+54aH 123392
+5ry5 123393
+5ryW 123394
+5r2G 123395
+5ryk 123396
+5r2p 123397
+5ry8 123398
+5ry0 123399
+470= 123400
+472P 123401
+5ryI 123402
+5ryL 123403
+5ry7 123404
+5oWs 123405
+56qs 123406
+56qt 123407
+464= 123408
+466+ 123409
+8KyknQ== 123410
+6KSV 123411
+56ab 123412
+56aa 123413
+6Zqp 123414
+5auV 123415
+5aut 123416
+5auc 123417
+5auq 123418
+8KyZgg== 123419
+47s= 123420
+47us 123421
+6bq5 123422
+55KG 123423
+5rym 123424
+5Y+H 123425
+5aKj 123426
+5aKm 123427
+5aKh 123428
+5YqQ 123429
+6JaB 123430
+6JWw 123431
+6JSD 123432
+6byS 123433
+5qex 123434
+6bmd 123435
+56OP 123436
+56OJ 123437
+5q6j 123438
+5oWt 123439
+6ZyF 123440
+5pq1 123441
+5pqy 123442
+5pq2 123443
+6Lim 123444
+6Lij 123445
+5JeW 123446
+6J2Y 123447
+6J2y 123448
+6J2k 123449
+5ZmH 123450
+5ZmC 123451
+5ZmA 123452
+5722 123453
+5bay 123454
+5baT 123455
+46CH 123456
+5baf 123457
+5baS 123458
+6ZWG 123459
+6ZWI 123460
+6ZWL 123461
+6ZWO 123462
+8KytqQ== 123463
+6ZWV 123464
+56i5 123465
+5YSH 123466
+55qe 123467
+55qb 123468
+5LSY 123469
+6ImO 123470
+6ImP 123471
+6bmf 123472
+8Km+gw== 123473
+6bKm 123474
+6bKq 123475
+6bKs 123476
+5qml 123477
+6Ket 123478
+6bmg 123479
+6bmh 123480
+57OH 123481
+57OI 123482
+57+m 123483
+6bmi 123484
+6bmj 123485
+54ab 123486
+5r2W 123487
+5r21 123488
+47U= 123489
+47WQ 123490
+5r6C 123491
+5r6b 123492
+55Gs 123493
+5r29 123494
+5r2+ 123495
+5r2P 123496
+5oat 123497
+5oaV 123498
+8Ky4ow== 123499
+5oit 123500
+6KSv 123501
+56ak 123502
+8KuNvQ== 123503
+5au9 123504
+6YG5 123505
+8Ky0ig== 123506
+55Kl 123507
+55Ky 123508
+55KS 123509
+5oaZ 123510
+5pOQ 123511
+6YS5 123512
+6Jaz 123513
+6Z6U 123514
+6buH 123515
+8Kye 123516
+8Kyenw== 123517
+6JWX 123518
+6Jai 123519
+6JW5 123520
+5qme 123521
+5qmR 123522
+5qmm 123523
+6YaR 123524
+6Kex 123525
+56Oh 123526
+8KWV 123527
+8KWVog== 123528
+56Oc 123529
+6LGu 123530
+8Kufpg== 123531
+8Ky6iA== 123532
+8KugnA== 123533
+6bm+ 123534
+6Jmk 123535
+5pq/ 123536
+5puM 123537
+5puI 123538
+46ya 123539
+6LmF 123540
+6Li2 123541
+5Jeb 123542
+6J6X 123543
+55aB 123544
+46CT 123545
+5bmq 123546
+8Kqp 123547
+8KqpmA== 123548
+5bam 123549
+8KytrA== 123550
+8KixkQ== 123551
+8Kytrw== 123552
+6aae 123553
+56mE 123554
+56+a 123555
+56+v 123556
+57CJ 123557
+6by9 123558
+6KGg 123559
+55um 123560
+6J6j 123561
+57ii 123562
+6bKt 123563
+6bKv 123564
+6bKw 123565
+6bK6 123566
+6bK5 123567
+8KuXtA== 123568
+5Lq4 123569
+55mA 123570
+55it 123571
+8Ky4pg== 123572
+576x 123573
+57OS 123574
+54eL 123575
+54a7 123576
+54eK 123577
+54ea 123578
+54eP 123579
+5r+p 123580
+5r+L 123581
+5r6q 123582
+5r69 123583
+5r60 123584
+5r6t 123585
+5r68 123586
+5oa3 123587
+5oa6 123588
+5oeU 123589
+6buJ 123590
+5ayb 123591
+6bmo 123592
+57+v 123593
+8KuEtw== 123594
+55Kx 123595
+8KSpvQ== 123596
+55Ks 123597
+55Ku 123598
+6au9 123599
+5pO/ 123600
+6Ja/ 123601
+6Ja4 123602
+5qqR 123603
+5quG 123604
+5qqe 123605
+6Yao 123606
+57mE 123607
+56O5 123608
+56O7 123609
+556r 123610
+5561 123611
+6LmQ 123612
+6J+P 123613
+45g= 123614
+45iO 123615
+8Kytsw== 123616
+6ZWk 123617
+8Kyttg== 123618
+8KuUjQ== 123619
+6ZWl 123620
+6ZWo 123621
+8KytuA== 123622
+8KixlA== 123623
+8KytvA== 123624
+8KuUjg== 123625
+55+w 123626
+56mZ 123627
+56mc 123628
+56mf 123629
+57CV 123630
+57CD 123631
+57CP 123632
+5YSm 123633
+6a2L 123634
+5pa2 123635
+6Ima 123636
+8Ky4qg== 123637
+6LC/ 123638
+5LKg 123639
+8Ky2nw== 123640
+6bK+ 123641
+8Ky2oA== 123642
+6bK/ 123643
+6bOB 123644
+6bOC 123645
+6bOI 123646
+6bOJ 123647
+542v 123648
+5Jeq 123649
+6aaY 123650
+6KWV 123651
+6KWa 123652
+8Ky2qA== 123653
+6J6x 123654
+55ST 123655
+5ays 123656
+5ayl 123657
+8KaI 123658
+8KaIoQ== 123659
+8KuEuA== 123660
+55OA 123661
+6YeQ 123662
+6ay2 123663
+54iH 123664
+6Z6z 123665
+6Z6u 123666
+8KyfgQ== 123667
+6Jef 123668
+6Jem 123669
+6Jeo 123670
+6bmy 123671
+5qqr 123672
+6buh 123673
+56Se 123674
+56SM 123675
+8KWW 123676
+8KWWqA== 123677
+6Lmi 123678
+6Lmc 123679
+6J+r 123680
+5Je0 123681
+5Zqa 123682
+6auD 123683
+6ZWu 123684
+6ZWx 123685
+6YWC 123686
+6aan 123687
+57Cg 123688
+57Cd 123689
+57Cw 123690
+6byr 123691
+6byp 123692
+55qm 123693
+6IeR 123694
+5LKi 123695
+6bOR 123696
+6bOS 123697
+6bmx 123698
+6bmv 123699
+55mX 123700
+8KaS 123701
+8KaSjQ== 123702
+5pee 123703
+57+3 123704
+5YaB 123705
+5I6W 123706
+54CU 123707
+54CN 123708
+54CM 123709
+6KWc 123710
+5LSZ 123711
+8KyZig== 123712
+5Zqt 123713
+47A= 123714
+47CA 123715
+6ay3 123716
+6Yat 123717
+6Lmv 123718
+6KCL 123719
+57++ 123720
+6bOY 123721
+5YSz 123722
+5YS0 123723
+6byX 123724
+8Ky2rQ== 123725
+8Km+jA== 123726
+6bOa 123727
+6bOb 123728
+6bqR 123729
+6bqW 123730
+6KCD 123731
+5b2f 123732
+5ay/ 123733
+6ayS 123734
+6JiY 123735
+5qyC 123736
+6Ya1 123737
+6aKl 123738
+55SX 123739
+8Kif 123740
+8KifoA== 123741
+5beH 123742
+6YWF 123743
+6auO 123744
+54qo 123745
+8Ky2rg== 123746
+8Kit 123747
+8KitiQ== 123748
+47iM 123749
+54iU 123750
+54Cx 123751
+54C5 123752
+54C8 123753
+54C1 123754
+6KWr 123755
+5a2F 123756
+6aqm 123757
+8KyZiw== 123758
+6ICw 123759
+8KSr 123760
+8KSriQ== 123761
+55OW 123762
+6ayY 123763
+6Lav 123764
+8Ky6kw== 123765
+572N 123766
+6byx 123767
+6bOg 123768
+6bOh 123769
+6bOj 123770
+54if 123771
+54ia 123772
+54GI 123773
+6Z+C 123774
+57O1 123775
+6Ji8 123776
+56S1 123777
+6bm0 123778
+6LqU 123779
+55qt 123780
+6b6i 123781
+6bOk 123782
+5Lq5 123783
+57Gl 123784
+6by3 123785
+8KuarQ== 123786
+546D 123787
+6Ya+ 123788
+6b2H 123789
+6Ke/ 123790
+6KC8 123791
+16c= 123792
+16Q= 123793
+15s= 123794
+15XXqg== 123795
+16E= 123796
+15nXnQ== 123797
+16Y= 123798
+15I= 123799
+15g= 123800
+15XXqA== 123801
+150= 123802
+15XXnA== 123803
+15Y= 123804
+4LmC 123805
+77o= 123806
+8J+N 123807
+8J+Q 123808
+15nXqA== 123809
+77s= 123810
+8J+R 123811
+8J2Q 123812
+8J+P 123813
+8J+U 123814
+8J+M 123815
+8J+O 123816
+8J+T 123817
+158= 123818
+8J2R 123819
+15XXkw== 123820
+76Y= 123821
+INeV 123822
+15XXkQ== 123823
+4Lit4LiH 123824
+8J2Y 123825
+15nXqg== 123826
+8J2V 123827
+4LiX4Li14LmI 123828
+2KfYpg== 123829
+8J+k 123830
+15XXnw== 123831
+2LHZig== 123832
+15nXnA== 123833
+4Lij4Liw 123834
+4Liy4Lii 123835
+768= 123836
+764= 123837
+4Liy4Lih 123838
+4oc= 123839
+8J+l 123840
+760= 123841
+8J2Z 123842
+15XXoA== 123843
+4b0= 123844
+INeb 123845
+8J+a 123846
+4po= 123847
+76c= 123848
+15HXqA== 123849
+15nXoA== 123850
+4bQ= 123851
+INeX 123852
+4bw= 123853
+8J2X 123854
+INei 123855
+15nXlA== 123856
+44Gj44Gf 123857
+44GT44Go 123858
+4bg= 123859
+2YrZhg== 123860
+44Gq44GE 123861
+2KfYuQ== 123862
+4Lio 123863
+4LmI4LiH 123864
+15nXkw== 123865
+157XqQ== 123866
+4Yg= 123867
+16DXmQ== 123868
+15nXkQ== 123869
+76U= 123870
+8J2T 123871
+INeZ 123872
+15o= 123873
+4Lix4LiH 123874
+4pM= 123875
+76Q= 123876
+INin2YTYow== 123877
+4Liy4LiB 123878
+4LmJ4LiZ 123879
+4LmA4Lij 123880
+15XXnQ== 123881
+4bk= 123882
+4Li2 123883
+15nXpw== 123884
+4LiL 123885
+4LiE4Lij 123886
+4LiY 123887
+4Lix4LiB 123888
+8J+V 123889
+2YjZhg== 123890
+4Lit4Lii 123891
+4oo= 123892
+8J2S 123893
+INin2YTYuQ== 123894
+4Liy4LiZ 123895
+15nXnw== 123896
+2YTZig== 123897
+15nXqQ== 123898
+4Lib4Lij4Liw 123899
+4LmA4Lib 123900
+INeg 123901
+15XXoQ== 123902
+4Lig 123903
+2YXZhg== 123904
+15XXog== 123905
+15XXng== 123906
+4ow= 123907
+8J+n 123908
+4LmH4LiZ 123909
+4LiN 123910
+444= 123911
+4bU= 123912
+INin2YTYsw== 123913
+15XXpw== 123914
+4Lir4Lil 123915
+8J+H 123916
+4o8= 123917
+8J+m 123918
+INeU154= 123919
+2YjYpw== 123920
+INeq 123921
+16jXkA== 123922
+4Lit4LiZ 123923
+4Lip 123924
+4LmI4Lin 123925
+15XXpg== 123926
+7Zc= 123927
+44Q= 123928
+76g= 123929
+77k= 123930
+4o4= 123931
+77I= 123932
+8J2a 123933
+8JA= 123934
+4LiE4Lin 123935
+4Lir4LiZ 123936
+INeo 123937
+2KjZig== 123938
+4Lij4LmM 123939
+2LHYpw== 123940
+2LTYsQ== 123941
+15XXlw== 123942
+15XXpA== 123943
+15XXqQ== 123944
+15XXkg== 123945
+7Z0= 123946
+4ps= 123947
+4LiV4Li0 123948
+4LmA4LiB 123949
+77M= 123950
+77E= 123951
+4LiU4LmJ 123952
+67k= 123953
+76w= 123954
+4b8= 123955
+8J+b 123956
+8J2W 123957
+4LmI4Liy4LiH 123958
+4Li54LmJ 123959
+INeU15A= 123960
+INin2YTYrQ== 123961
+16TXqA== 123962
+2YjZhQ== 123963
+4LmA4Lil 123964
+7ZY= 123965
+15nXog== 123966
+7Ig= 123967
+7ZM= 123968
+8J+F 123969
+4aA= 123970
+4LiE4Lin4Liy4Lih 123971
+4LiI4Liw 123972
+16DXlA== 123973
+INen 123974
+4Lif 123975
+4LmJ4LiH 123976
+4Lir4Lih 123977
+2KrZhQ== 123978
+15zXmQ== 123979
+2YrYrw== 123980
+4LmI4LiZ 123981
+15fXqA== 123982
+16nXqA== 123983
+4LmA4LiX 123984
+157XqA== 123985
+65Y= 123986
+2LnZhA== 123987
+157Xog== 123988
+4rI= 123989
+15zXlA== 123990
+INek 123991
+4Lit4LiB 123992
+2LPZhA== 123993
+15nXng== 123994
+2YLZig== 123995
+7Y4= 123996
+2KrYrQ== 123997
+15nXoQ== 123998
+15nXlw== 123999
+7Zs= 124000
+77A= 124001
+4r0= 124002
+4Yk= 124003
+4Yo= 124004
+4ag= 124005
+2YfYpw== 124006
+INec15Q= 124007
+15XXkA== 124008
+2YXYpw== 124009
+4LmJ4Lit4LiH 124010
+2LHYqA== 124011
+INin2YTYrA== 124012
+157Xkw== 124013
+2YXZhA== 124014
+2KrYsQ== 124015
+4LmA4LiU 124016
+16fXqA== 124017
+7YU= 124018
+7Lw= 124019
+6r8= 124020
+44g= 124021
+4ZA= 124022
+8J+X 124023
+6qY= 124024
+4Ys= 124025
+8J2U 124026
+4LmA4Lib4LmH4LiZ 124027
+4LmD4Lir 124028
+4Lih4Liy 124029
+4Lin4LmI4Liy 124030
+4Lih4Li1 124031
+4Li14LmJ 124032
+4LmE4Lih4LmI 124033
+2YbZig== 124034
+2KQ= 124035
+4Lij4Liy 124036
+15XXmQ== 124037
+44KI44GG 124038
+4Li04LiU 124039
+15nXpA== 124040
+15fXnA== 124041
+2YLYrw== 124042
+4LmA4Liq 124043
+15nXmA== 124044
+4LiB4Lil 124045
+16jXmw== 124046
+15XXmw== 124047
+15nXmw== 124048
+64g= 124049
+64M= 124050
+8J+W 124051
+4YU= 124052
+4rw= 124053
+44k= 124054
+4LmE4LiU4LmJ 124055
+16rXmQ== 124056
+15nXkA== 124057
+INin2YTYpQ== 124058
+4Lig4Liy 124059
+4Lij4Li0 124060
+2YLYqQ== 124061
+2K3Yrw== 124062
+6rs= 124063
+7LE= 124064
+16rXlw== 124065
+7Lo= 124066
+4os= 124067
+4YQ= 124068
+4b4= 124069
+4rU= 124070
+4r4= 124071
+INmI2KfZhA== 124072
+16DXlQ== 124073
+2YA= 124074
+2YrYpw== 124075
+4LiB4LmH 124076
+157XlA== 124077
+44GE44KL 124078
+2LnYrw== 124079
+INin2YTZhg== 124080
+INeU16k= 124081
+2KY= 124082
+4Lix4LmJ4LiH 124083
+4Lij4Lix4Lia 124084
+2YjZgg== 124085
+44Gn44GN 124086
+4LmA4Lie 124087
+15vXnA== 124088
+15jXqA== 124089
+4Lix4LiU 124090
+4Lit4Liy 124091
+7KI= 124092
+4Lit4Lia 124093
+4LiV4Lij 124094
+4LmA4LiK 124095
+7JQ= 124096
+44GX44G+ 124097
+64E= 124098
+65U= 124099
+8J+Z 124100
+4pI= 124101
+4bY= 124102
+4LmB4Lil 124103
+2YbYpw== 124104
+4LmD4Lir4LmJ 124105
+4LmE4Lib 124106
+16M= 124107
+4Lix4Lin 124108
+4Liy4LiH 124109
+15PXqA== 124110
+15HXnA== 124111
+16TXmQ== 124112
+INeT 124113
+INin2YTZgQ== 124114
+4LmA4LiC 124115
+16nXlA== 124116
+15DXqA== 124117
+66w= 124118
+44Gr44Gq 124119
+0YDQvg== 124120
+4Lin4Li0 124121
+2YXYsQ== 124122
+15DXqg== 124123
+2YPYsQ== 124124
+2LPYqA== 124125
+2YbYqg== 124126
+44GX44GE 124127
+2KfYrA== 124128
+4Lit4Lij4LmM 124129
+2YPZhA== 124130
+2LPZhQ== 124131
+4Liq4Li0 124132
+15nXpg== 124133
+650= 124134
+7Zw= 124135
+7Ik= 124136
+4YY= 124137
+2YfZhQ== 124138
+4LiZ4Li14LmJ 124139
+44GC44KL 124140
+44GE44Gm 124141
+2LPZig== 124142
+15zXkA== 124143
+2K/YsQ== 124144
+44Ga 124145
+2YjYrA== 124146
+INin2YTYrg== 124147
+2LXYsQ== 124148
+7Y8= 124149
+4LmJ4Liy4LiH 124150
+4Li44LiU 124151
+15XXmA== 124152
+15HXog== 124153
+7YY= 124154
+4LiK4Liy 124155
+4Lij4Lih 124156
+16nXng== 124157
+157XoQ== 124158
+6rQ= 124159
+7LQ= 124160
+65w= 124161
+7L8= 124162
+7Kk= 124163
+67s= 124164
+4qQ= 124165
+8J+G 124166
+4Yw= 124167
+4ZU= 124168
+2LDYpw== 124169
+4LiX4Liz 124170
+4LiV4LmI 124171
+INin2YTZgg== 124172
+2YTZgw== 124173
+4Li54LmI 124174
+4LiE4Li4 124175
+2YrZhQ== 124176
+16DXmded 124177
+4Li34LmI4Lit 124178
+2YjYuQ== 124179
+44KH 124180
+2KfZgg== 124181
+INeR16I= 124182
+4LmA4Lih 124183
+2KzZhQ== 124184
+4bur 124185
+44GT44Go44GM 124186
+2KjYrw== 124187
+15XXlA== 124188
+16nXnA== 124189
+2YfYsQ== 124190
+4LmA4LiZ 124191
+44G5 124192
+7Ys= 124193
+7Ls= 124194
+7L0= 124195
+660= 124196
+7Iw= 124197
+7YA= 124198
+64w= 124199
+67o= 124200
+44o= 124201
+4LmD4LiZ 124202
+INeS 124203
+4LmG 124204
+4LiI4Liy4LiB 124205
+4Lin4Lii 124206
+4LmD4LiK 124207
+4LiH4Liy4LiZ 124208
+INin2YTYtA== 124209
+2KfYrQ== 124210
+4LmJ4Liy4LiZ 124211
+4Li34LmI4Lit4LiH 124212
+15DXmQ== 124213
+2KjZhA== 124214
+44Go5oCd 124215
+16DXoQ== 124216
+44G+44Gb 124217
+2YPZhg== 124218
+16LXqA== 124219
+INin2YTYrw== 124220
+16nXqg== 124221
+7Z4= 124222
+2YXYsw== 124223
+2LXZhA== 124224
+15XXoNeU 124225
+2KfYsdip 124226
+2YTZhQ== 124227
+4Liq4Lih 124228
+2KPZhg== 124229
+16rXqA== 124230
+15DXng== 124231
+2LnYqA== 124232
+2K7Yqg== 124233
+44KD 124234
+7KE= 124235
+7KM= 124236
+0LjQstCw 124237
+4Liq4Lix 124238
+4Li24LiB 124239
+7Lg= 124240
+64Y= 124241
+0LDQu9GM0L0= 124242
+7LM= 124243
+7I0= 124244
+6rw= 124245
+6r0= 124246
+7I8= 124247
+44w= 124248
+448= 124249
+76k= 124250
+6qo= 124251
+4Y4= 124252
+INeW 124253
+4LiB4Lix4LiZ 124254
+15nXlQ== 124255
+4LiE4LiZ 124256
+16DXldeq 124257
+4Lic4Li54LmJ 124258
+4LmD4LiI 124259
+44GE44Gf 124260
+2YHYsQ== 124261
+15jXmQ== 124262
+16bXmQ== 124263
+44KC44Gu 124264
+INin2YTYtQ== 124265
+44G+44Gb44KT 124266
+2K/YqQ== 124267
+15HXmQ== 124268
+INin2YTYsQ== 124269
+INee15A= 124270
+4Liq4Liz 124271
+4LmA4Lir 124272
+2LnYsQ== 124273
+44Gq44GP 124274
+4LiB4Lij4Liw 124275
+15HXkw== 124276
+4LmA4LiI 124277
+15nXmg== 124278
+15fXmQ== 124279
+2YrYuQ== 124280
+16nXkQ== 124281
+2YbYqQ== 124282
+2YjYtg== 124283
+2YTZgQ== 124284
+2YDZgA== 124285
+16TXog== 124286
+7Yg= 124287
+157Xpw== 124288
+4LiQ 124289
+2K3YqQ== 124290
+2KfYtQ== 124291
+0YvQstCw 124292
+4LiE4Lih 124293
+4Lin4Lix 124294
+4Lib4Lil 124295
+7J8= 124296
+7Zo= 124297
+67Q= 124298
+65E= 124299
+64k= 124300
+64c= 124301
+7Kg= 124302
+67E= 124303
+644= 124304
+4qw= 124305
+4aU= 124306
+4Zc= 124307
+4Zs= 124308
+4Y0= 124309
+xak= 124310
+4LiU4Li1 124311
+w7Rp 124312
+INeh 124313
+15zXlQ== 124314
+4budaQ== 124315
+4LiE4Li44LiT 124316
+w6J5 124317
+4LiZ4Liy 124318
+15fXkw== 124319
+15PXmQ== 124320
+4Lir4Liy 124321
+2KzZhA== 124322
+4LmA4Lin 124323
+44KH44GG 124324
+2YXYqQ== 124325
+INin2YTZgw== 124326
+INeU16I= 124327
+2KzYsQ== 124328
+15bXqA== 124329
+2KfYtw== 124330
+15vXqg== 124331
+15XXoNeZ150= 124332
+2K3ZhQ== 124333
+6rY= 124334
+2LHZgw== 124335
+INec16I= 124336
+15XXlg== 124337
+4Liq4Lij 124338
+16bXnA== 124339
+2KI= 124340
+2KfYs9iq 124341
+4LmI4Lih 124342
+2K7YsQ== 124343
+16bXog== 124344
+15nXqNeV16o= 124345
+2KfYr9ip 124346
+2LTYp9ix 124347
+157Xlw== 124348
+7ZI= 124349
+4LmA4Lij4Li14Lii 124350
+15fXpw== 124351
+2KfYqw== 124352
+4Lij4LiH 124353
+4LmA4LiV 124354
+4LiI4Liz 124355
+4Lid 124356
+4LmI4Liy4Lii 124357
+4LiE4Lil 124358
+2YLZiA== 124359
+0LjRh9C10YHQug== 124360
+4LiT4LmM 124361
+4Lix4Lii 124362
+2YXYuQ== 124363
+66g= 124364
+678= 124365
+664= 124366
+77Q= 124367
+7KU= 124368
+7Ks= 124369
+67U= 124370
+4aE= 124371
+4o0= 124372
+8JM= 124373
+4rA= 124374
+4LiC4Lit4LiH 124375
+2Ys= 124376
+4LiB4Lix4Lia 124377
+44Gu44Gn 124378
+4LmJ4Lin 124379
+4Lit4Lii4LmI4Liy4LiH 124380
+44Gt 124381
+4buHdA== 124382
+4LiV4LmJ4Lit4LiH 124383
+157XmQ== 124384
+4LmB4Lia 124385
+15LXqA== 124386
+2YjZgQ== 124387
+2YLZhA== 124388
+4Lig4Liy4Lie 124389
+16jXmQ== 124390
+4Lil4Liy 124391
+2YrYsw== 124392
+INem 124393
+2YrZgQ== 124394
+INeY 124395
+4Lic4Lil 124396
+w6FuZw== 124397
+4Lij4Lin 124398
+INee16k= 124399
+15DXldeq 124400
+15bXlA== 124401
+4Li54LiB 124402
+4LiZ4Lix4LiB 124403
+2KfZhtmK 124404
+2K/Ypw== 124405
+44Gz 124406
+15vXnw== 124407
+44KJ44KM 124408
+44KM44Gw 124409
+16rXpw== 124410
+w7pj 124411
+2YjYsg== 124412
+15nXqNeU 124413
+IG5naA== 124414
+w6FuaA== 124415
+INeV15A= 124416
+4buF 124417
+4Liq4Li44LiU 124418
+642w 124419
+2KfYtg== 124420
+2KfZhNmK 124421
+2KjYp9ix 124422
+2LnZhQ== 124423
+4Lia4Liy 124424
+2KrYrA== 124425
+4Lie4Lij 124426
+15XXqNeU 124427
+4bqjbmc= 124428
+2K7ZhA== 124429
+4LiJ 124430
+4bqvYw== 124431
+16nXmded 124432
+7ZQ= 124433
+2YHYsw== 124434
+15nXkg== 124435
+0L/RgA== 124436
+INin2YTYqw== 124437
+2LPYtw== 124438
+4Lij4Li54LmJ 124439
+4Li14LmI4Lii 124440
+4Lit4LiU 124441
+44Gq44KK 124442
+15LXkw== 124443
+44GE44G+44GX44Gf 124444
+16HXpw== 124445
+2K7YtQ== 124446
+bGHFnw== 124447
+0LXQvdC90L4= 124448
+2KjYrQ== 124449
+4Liq4LiZ 124450
+4Liu 124451
+16jXkNep 124452
+2YXZiA== 124453
+2K/Zitiv 124454
+4Lip4Liy 124455
+15XXmg== 124456
+44On44Oz 124457
+4LiV4Li4 124458
+IOq1 124459
+INGB0LLQvg== 124460
+16bXkQ== 124461
+4Lit4Lih 124462
+4Lib4Lij 124463
+2KrYuQ== 124464
+15TXqg== 124465
+2KfZhdmE 124466
+157XoA== 124467
+57aa 124468
+4Lik 124469
+7Y0= 124470
+65g= 124471
+66Q= 124472
+7JE= 124473
+4rQ= 124474
+44s= 124475
+INio2KfZhA== 124476
+4buBdQ== 124477
+INin2YTZhA== 124478
+4LiV4Lix4Lin 124479
+2LDZhw== 124480
+4Li24LiH 124481
+4LmD4LiK4LmJ 124482
+4buTbmc= 124483
+4LiZ4Lix 124484
+4Lih4Liy4LiB 124485
+44Of 124486
+157XlQ== 124487
+4LiX4Lii 124488
+4buZaQ== 124489
+4bqx 124490
+4bqjbw== 124491
+4LmC4LiU 124492
+15DXnA== 124493
+4Liq4Liy4Lih 124494
+2YjYqA== 124495
+4LiX4Li4 124496
+4Lii4Lix4LiH 124497
+16LXqg== 124498
+15XXoNeV16o= 124499
+4LiC4Li2 124500
+4LiC4Li24LmJ4LiZ 124501
+4LiB4LmI 124502
+4bqr 124503
+4buRYw== 124504
+44GX44KH44GG 124505
+4buLY2g= 124506
+INeQ15XXqg== 124507
+INep15A= 124508
+15vXldec 124509
+4buZYw== 124510
+2LnYqQ== 124511
+4LiX4Li1 124512
+4LmA4Lit 124513
+2YPYqg== 124514
+44G7 124515
+4bq7 124516
+7JeF 124517
+4Lit4Lit4LiB 124518
+2KfZhtiq 124519
+4LmE4Lij 124520
+INeQ15fXqA== 124521
+2LfYsQ== 124522
+2YbYrw== 124523
+4Li34LmJ4Lit 124524
+2LfZhA== 124525
+15DXlA== 124526
+dXnDqm4= 124527
+7ZaJ 124528
+15HXlA== 124529
+4LiE4LmI 124530
+4LiK4LmI4Lin 124531
+44GC44KK44G+44GZ 124532
+2YrYqA== 124533
+16fXnA== 124534
+44OZ 124535
+xKk= 124536
+2LPYsQ== 124537
+4Liy4Lin 124538
+44Kx 124539
+4Lia4Lij4Li0 124540
+16jXkg== 124541
+4buDdQ== 124542
+2K3Yqg== 124543
+15XXnteZ 124544
+2KjZhg== 124545
+6rWQ 124546
+xJ91 124547
+44Gq44KT 124548
+15HXpw== 124549
+INek16g= 124550
+4bqvbg== 124551
+2K3ZhA== 124552
+15HXlw== 124553
+4bqldQ== 124554
+15HXldeT 124555
+44Ov 124556
+INec16c= 124557
+4Lix4LiN 124558
+4Lie4Li0 124559
+15fXlA== 124560
+15bXmw== 124561
+44O844Og 124562
+0YLQtdC70Yw= 124563
+157XmdeT 124564
+2YrYrg== 124565
+4bqz 124566
+2KrYtQ== 124567
+4LiY4Li0 124568
+6L68 124569
+7JM= 124570
+2YPYqQ== 124571
+2YLYqA== 124572
+4LiE4LmM 124573
+4LmJ4Liy4Lii 124574
+4LiT4Liw 124575
+4Liy4Liw 124576
+65I= 124577
+6r4= 124578
+67c= 124579
+7Ic= 124580
+6ro= 124581
+7IE= 124582
+64A= 124583
+7L4= 124584
+670= 124585
+65o= 124586
+7K0= 124587
+7I4= 124588
+4ZE= 124589
+65c= 124590
+6pI= 124591
+4KE= 124592
+4Kw= 124593
+8JCM 124594
+44c= 124595
+8J2E 124596
+INec15A= 124597
+44Go44GE44GG 124598
+IG5oaQ== 124599
+15nXldeq 124600
+INep15Q= 124601
+4LmB4Lil4LmJ4Lin 124602
+xrDhu5tj 124603
+4LiU4LmJ4Lin4Lii 124604
+4LiX4Liy4LiH 124605
+16DXqg== 124606
+16TXqg== 124607
+4LmB4LiV4LmI 124608
+xrBuZw== 124609
+4Lit4Lii4Li54LmI 124610
+4LmJ4Liz 124611
+INeQ15w= 124612
+2YPZhQ== 124613
+4bqlcA== 124614
+4Lil4LiH 124615
+44Gf44KB 124616
+15LXnA== 124617
+4Lir4Lij 124618
+INGA0LU= 124619
+4LmA4LiC4LmJ4Liy 124620
+2YLYsQ== 124621
+INeU16E= 124622
+2YjZig== 124623
+4Liq4Liy4Lih4Liy4Lij 124624
+4Liq4Liy4Lih4Liy4Lij4LiW 124625
+xINu 124626
+4Lit4Li1 124627
+16TXlQ== 124628
+15nXoNeV 124629
+4Lin4Lix4LiZ 124630
+4bq3Yw== 124631
+7ZWZ 124632
+157Xqg== 124633
+w6p1 124634
+4bq5 124635
+2YHZig== 124636
+157Xpg== 124637
+4LiE4Liy 124638
+44Gd44GG 124639
+44CF 124640
+2KfYsg== 124641
+2KfZhw== 124642
+16jXmded 124643
+4bqlbg== 124644
+4Lir4Liy4Lij 124645
+4bqhdA== 124646
+2YbZhw== 124647
+4LmA4LiE4Lij 124648
+2KzZhw== 124649
+15vXmQ== 124650
+4bqvdA== 124651
+4LiE4LmJ4Liy 124652
+2LHYqQ== 124653
+44OP 124654
+2YPZiNmG 124655
+4bupbmc= 124656
+IOyasA== 124657
+4Lii4LmM 124658
+4LmI4Lin4LiZ 124659
+4LiB4Liz 124660
+2KvYsQ== 124661
+0YHQuA== 124662
+INin2YTYtw== 124663
+INeU16Y= 124664
+INi3 124665
+INin2YTZiA== 124666
+6rmM 124667
+2K3Zig== 124668
+2KfYsdin2Ko= 124669
+4LmA4LiL 124670
+2KjYpw== 124671
+0LPRgA== 124672
+4Lij4Li1 124673
+4Li34Lit4LiZ 124674
+2LnYqg== 124675
+2YLYp9mE 124676
+2K/ZhQ== 124677
+2KE= 124678
+INee16c= 124679
+15PXmded 124680
+16LXnA== 124681
+44GS 124682
+64uY 124683
+16LXlA== 124684
+IOyWtA== 124685
+0YHRjA== 124686
+2YLYtw== 124687
+44Ob 124688
+6ICD44GI 124689
+4LmB4LiZ 124690
+2YjYp9iq 124691
+w6J1 124692
+IOyCrOue 124693
+4Lir4Lin 124694
+INin2YTYo9mF 124695
+INeU157XqQ== 124696
+2KjZiA== 124697
+4LiK4LiZ 124698
+44KT44Gn44GZ 124699
+4Lin4LiZ 124700
+4LiB4Lij4Lij4Lih 124701
+157XldeT 124702
+2YPYp9mG 124703
+15XXow== 124704
+0L7Qu9C+0LM= 124705
+2KrZhg== 124706
+4LiV4LmM 124707
+6rKD 124708
+16jXmA== 124709
+4burbmc= 124710
+15XXkdeU 124711
+2YXYrQ== 124712
+INCn 124713
+16TXkg== 124714
+4Liq4LiW 124715
+44GL44KK 124716
+xLFuxLF6 124717
+4LmA4Lii 124718
+44O844Oz 124719
+44GK44KK 124720
+16TXqQ== 124721
+4Li04LiV 124722
+2LfZhg== 124723
+15nXqteZ 124724
+15DXoA== 124725
+w6dlaw== 124726
+7Ko= 124727
+157XkQ== 124728
+4Lio4Liy 124729
+44K544K/ 124730
+4Lia4Li4 124731
+15PXkdeo 124732
+44GE44GP 124733
+4Liq4Liw 124734
+4LmA4Lir4Lil 124735
+4Li04LiH 124736
+4Lie4Lix4LiZ 124737
+44GE44Gf44Gg 124738
+44KC44KJ 124739
+4LmJ4Lih 124740
+44GT44Go44GM44Gn44GN 124741
+4Liy4Lij4LmM 124742
+4Li44LiH 124743
+7ZE= 124744
+7K8= 124745
+67w= 124746
+7YI= 124747
+7Lc= 124748
+6qE= 124749
+4Y8= 124750
+4ZI= 124751
+8J2c 124752
+4ak= 124753
+8J+E 124754
+8JCk 124755
+INep15w= 124756
+INee15Q= 124757
+4LmB4Lil4Liw 124758
+INeb15w= 124759
+4bq9 124760
+4buZbmc= 124761
+2LDZig== 124762
+0LvQtQ== 124763
+16U= 124764
+44Gq44Gp 124765
+INmI2KM= 124766
+4Lir4LiZ4LmJ4Liy 124767
+44G+44Gn 124768
+4LiV4LmI4Lit 124769
+4LiX4Lix4LmJ4LiH 124770
+44Gg44GR 124771
+4LmB4Lia4Lia 124772
+4LmA4Lij4Liy 124773
+16TXnA== 124774
+44Gf44GE 124775
+4LmA4Lil4Lii 124776
+44Gj44Gm44GE44KL 124777
+4bq/cA== 124778
+4Li24LmI4LiH 124779
+6rSA 124780
+6rOE 124781
+15vXlQ== 124782
+4LmA4Lij4Li34LmI4Lit4LiH 124783
+16fXmQ== 124784
+6rWt 124785
+16TXoQ== 124786
+2KrZig== 124787
+44OE 124788
+INeU15c= 124789
+0LPQuA== 124790
+16jXkNec 124791
+157XnA== 124792
+INij2Yo= 124793
+INi52YTZig== 124794
+44GL44Gj44Gf 124795
+16nXmQ== 124796
+0LTRgw== 124797
+157Xnw== 124798
+16DXmA== 124799
+16DXmdeq 124800
+bWnFnw== 124801
+15vXnQ== 124802
+INeR16g= 124803
+INec15E= 124804
+INCb 124805
+w6dl 124806
+15XXoNeZ 124807
+44KI44GG44Gr 124808
+16TXldeo 124809
+44ON 124810
+2YPZig== 124811
+15fXqg== 124812
+2YHZhA== 124813
+INeU16c= 124814
+INeU15E= 124815
+INee16E= 124816
+4LmI4Liy4LiZ 124817
+0L/QtdGA 124818
+4LmI4Liy4Lin 124819
+INeR15A= 124820
+INmI2Yc= 124821
+4LiZ4Liz 124822
+INeR16k= 124823
+16DXpw== 124824
+44Gp44GG 124825
+16nXldeq 124826
+15PXlA== 124827
+4LmA4Lia 124828
+2YbYsw== 124829
+IOyasOumrA== 124830
+4Liq4LmI4Lin4LiZ 124831
+4Lil4Lix4LiH 124832
+2KzYsg== 124833
+INeX15k= 124834
+2YPYq9ix 124835
+4Lil4Liw 124836
+2YfYrw== 124837
+INmI2Kg= 124838
+2KfZhNmF 124839
+4LmB4Lih 124840
+xqFp 124841
+INeR15c= 124842
+4buvYQ== 124843
+4LmA4LiX4Lio 124844
+4LiV4Lix4LmJ4LiH 124845
+0L7Qs9C00LA= 124846
+15zXpw== 124847
+2K/Yrw== 124848
+4Liq4Lij4LmJ4Liy4LiH 124849
+4LiK4Li1 124850
+2YHYtg== 124851
+4LmB4Lir 124852
+dXnhu4du 124853
+4Lij4Lix4LiB 124854
+4buHbQ== 124855
+4Liq4Liy 124856
+16TXpw== 124857
+4Li14Lii4LiH 124858
+4LiV4LmI4Liy4LiH 124859
+4LiE4Lij4Lix4LmJ4LiH 124860
+2K3Zgg== 124861
+4LmA4Lit4LiH 124862
+2KfYptmK 124863
+15jXog== 124864
+2KfZhNip 124865
+4Li04LmI4Lih 124866
+44K9 124867
+2K/ZiQ== 124868
+INeo15A= 124869
+44Gj44Go 124870
+44OD44OX 124871
+2YrYsdip 124872
+6rG0 124873
+157XkA== 124874
+15XXlQ== 124875
+2KjYuQ== 124876
+44Gy 124877
+4Lij4Liy4Lii 124878
+15PXnQ== 124879
+2KrZgQ== 124880
+4LiV4LiB 124881
+4bqhbmc= 124882
+44KS6KaL 124883
+4LiK4Lix 124884
+xrDhu58= 124885
+xrDhu59uZw== 124886
+2KzYqA== 124887
+15XXnteo 124888
+IOyCrOuejA== 124889
+w7NuZw== 124890
+4Lij4Lix 124891
+INeU15Y= 124892
+16jXpg== 124893
+INeX15M= 124894
+2LDZhNmD 124895
+15XXqNeZ 124896
+44Gh44KD 124897
+2YHYuQ== 124898
+INec16Y= 124899
+w6Fp 124900
+4LmH4Lia 124901
+44GO 124902
+4LiB4Li0 124903
+4bqhYw== 124904
+66mw 124905
+44Gq44KL 124906
+15XXnNed 124907
+4LmB4LiX 124908
+15XXpQ== 124909
+0LzQtdGC 124910
+w7zFnw== 124911
+0YDRjw== 124912
+4LiS 124913
+0YHRgtC+0Y8= 124914
+2LnZiNiv 124915
+2YXYp9ix 124916
+2LfYqQ== 124917
+4Lie4Li3 124918
+0LrRgA== 124919
+4LmB4LiB 124920
+4LmC4Lij4LiH 124921
+15HXmdeY 124922
+6rKg 124923
+15XXnNeU 124924
+2K3YsQ== 124925
+4Li34LmI4Lit4LiZ 124926
+15XXkdeo 124927
+15fXqQ== 124928
+44OV44Kh 124929
+157XmA== 124930
+w7p0 124931
+IGTDtm4= 124932
+4bqvbmc= 124933
+66CH 124934
+4bqzbmc= 124935
+4Lin4LiB 124936
+2LXYrw== 124937
+2K7Ytw== 124938
+4Lit4Lix 124939
+44KP44KM 124940
+2LPZhNin2YU= 124941
+4LmA4Lij4LmH 124942
+15nXqdeZ 124943
+2KzYp9mE 124944
+44GR44KL 124945
+4LiK4Liy4LiV4Li0 124946
+2YjYp9mC 124947
+4LmC4LiZ 124948
+44Gm44GX44G+ 124949
+2KfYudip 124950
+44Kt44Oj 124951
+4LiN4Liy 124952
+2YTYp9mC 124953
+4Li04LiB 124954
+INGB0L7Qsg== 124955
+0YDQsNC6 124956
+15nXoNeZ 124957
+w7zEnw== 124958
+w7zEn8O8 124959
+16fXkQ== 124960
+4LmI4Lit4LiH 124961
+IGdlcsOnZWs= 124962
+4LiX4Lix 124963
+0L7QstCw0L3QuNGP 124964
+157Xmw== 124965
+2LPYqQ== 124966
+15nXow== 124967
+bGXFnw== 124968
+2YXYpA== 124969
+IOydmA== 124970
+4LiQ4Liy4LiZ 124971
+INGB0L7QsQ== 124972
+IOq1rQ== 124973
+16LXpg== 124974
+0LfQsg== 124975
+4Liq4LiH 124976
+2LLZhA== 124977
+44GP44KM 124978
+0LjRgNGD 124979
+2KrYow== 124980
+0L/QvtC70L0= 124981
+7JiA 124982
+2YbYtA== 124983
+15vXkA== 124984
+2YXYtA== 124985
+4LiU4LmM 124986
+2YjZitmE 124987
+4LmB4LiC 124988
+44Gj44Gm44GX44G+ 124989
+0L3QvtGB0YI= 124990
+0LLQuw== 124991
+2YXZgg== 124992
+2LHYp9is 124993
+5aSJ 124994
+65s= 124995
+4rg= 124996
+7JA= 124997
+4Ls= 124998
+4Zo= 124999
+4rs= 125000
+6pk= 125001
+4qc= 125002
+8JI= 125003
+8J2H 125004
+INeQ16o= 125005
+INmE2YQ= 125006
+INij2YY= 125007
+INeV15Q= 125008
+44Gr44Gv 125009
+INeZ16k= 125010
+2KrZhw== 125011
+w61uaA== 125012
+2YrYp9iq 125013
+INeR154= 125014
+4LiZ4Lix4LmJ4LiZ 125015
+4LiZ4LmJ4Liz 125016
+w6Bv 125017
+4LiV4Liy4Lih 125018
+44Gu44Gv 125019
+ZMSxcg== 125020
+IG5naGk= 125021
+4bq3dA== 125022
+157Xmded 125023
+44Gm44GE44KL 125024
+INeR16o= 125025
+4Lir4Lij4Li34Lit 125026
+INiz2Yo= 125027
+44Gq44KJ 125028
+4LmC4LiU4Lii 125029
+xLF5b3I= 125030
+4Lit4Li14LiB 125031
+4buHbmg= 125032
+0YvQvA== 125033
+4LiX4Li44LiB 125034
+INec15c= 125035
+INeU16g= 125036
+INeU15k= 125037
+4Lie4Lij4Liw 125038
+4LmA4Lin4Lil4Liy 125039
+INi6 125040
+4bqrbg== 125041
+bcSxxZ8= 125042
+15vXlA== 125043
+4buRbg== 125044
+44Gn44GX44KH44GG 125045
+44Oi 125046
+4Lib4Li1 125047
+16HXmQ== 125048
+44GT44KN 125049
+INec16Q= 125050
+4Lij4LiW 125051
+6riI 125052
+4LiB4Lin4LmI4Liy 125053
+66y0 125054
+4buNbmc= 125055
+44KT44Gn 125056
+44KI44GG44Gq 125057
+4buTaQ== 125058
+44Ks 125059
+4Liq4LmI4LiH 125060
+15nXoNeU 125061
+4LiW4Li54LiB 125062
+4LiI4Lix4LiU 125063
+INeU15I= 125064
+44Oc 125065
+157Xldeq 125066
+2YjZgw== 125067
+64uo 125068
+INir 125069
+44Gu44GM 125070
+4LmA4Lir4LmH4LiZ 125071
+2LnYpw== 125072
+4LiZ4Li0 125073
+xZ4= 125074
+4Lit4Liw 125075
+44GI44KL 125076
+2KvZhA== 125077
+2K3Zhdiv 125078
+4LmA4LiB4Li04LiU 125079
+16TXqdeo 125080
+16TXlA== 125081
+4Lih4Li0 125082
+2KbZitiz 125083
+4LiX4Liz4LmD4Lir4LmJ 125084
+16LXkw== 125085
+7Iuk 125086
+4LiK4LmI4Lin4Lii 125087
+INin2YTZhdmG 125088
+2LLZig== 125089
+2LnZig== 125090
+INeb15A= 125091
+4bqhbmg= 125092
+4bu5 125093
+44KT44Gq 125094
+4Liq4Li5 125095
+16bXqA== 125096
+xrDhu5tuZw== 125097
+15XXldeU 125098
+4LmC4Lil 125099
+INin2YTZhw== 125100
+4Lin4Liy 125101
+4Lir4Lil4Liy4Lii 125102
+0YnQtQ== 125103
+4LiC4LmJ4Lit 125104
+4LmJ4Lit4Lii 125105
+2KjYtw== 125106
+0LrQsNGP 125107
+INii 125108
+INC40YE= 125109
+INin2YTYug== 125110
+4LiB4Liy 125111
+4LiZ4LmI4Liy 125112
+2YrZiA== 125113
+15HXldeo 125114
+4buFbg== 125115
+4Lin4LiH 125116
+15nXlg== 125117
+7LKt 125118
+0L3QuNC8 125119
+65+w 125120
+15LXldeo 125121
+2LXYrQ== 125122
+2YTZiA== 125123
+15fXldeq 125124
+4Liq4Li4 125125
+2LHZitmC 125126
+16HXmA== 125127
+INee16I= 125128
+44OG44Kj 125129
+4LiE4Li04LiU 125130
+44KN44GG 125131
+4LmE4Lil 125132
+4LiZ4LmM 125133
+4buPaQ== 125134
+0YHRgtGA0L4= 125135
+4Liq4LiU 125136
+4Liq4Liy4Lij 125137
+2YjZhNip 125138
+4bqnbQ== 125139
+4Lij4LmI4Lin 125140
+4Lij4LmI4Lin4Lih 125141
+4Lij4Li4 125142
+INin2YTYs9mK 125143
+7JiB 125144
+INee15E= 125145
+16TXmA== 125146
+4LiV4Li04LiU 125147
+15jXmded 125148
+IOustA== 125149
+2YLYr9mF 125150
+IGTDvMWf 125151
+2KfYptmE 125152
+0LzRiw== 125153
+2K3Ysw== 125154
+2YjYtQ== 125155
+15nXp9eU 125156
+44Gn44Gv44Gq44GE 125157
+4LmA4Lir4Lih 125158
+0L7RgNGC 125159
+7Ya1 125160
+44GQ 125161
+0LrRgNCw 125162
+4Li14Lii4Lin 125163
+2LnYp9ix 125164
+2KbYqQ== 125165
+7YOA 125166
+44Gr44Gq44KK 125167
+2KzYqQ== 125168
+2YjZgti5 125169
+0YzRjw== 125170
+15XXpteU 125171
+16nXnQ== 125172
+2KjZgg== 125173
+INeZ15Q= 125174
+2YrYtw== 125175
+xLFtxLF6 125176
+0LTQtdGA0LY= 125177
+15nXqdeo15DXnA== 125178
+2LrZitix 125179
+4Lij4Lit4LiH 125180
+4LmA4Lij4Li14Lii4LiZ 125181
+INeU15g= 125182
+4Lir4Lih4Liy4Lii 125183
+2YXZhw== 125184
+2KfZgdip 125185
+INC+0YDQsw== 125186
+2YjZiQ== 125187
+44Op44Kk 125188
+157XoNeU 125189
+IMSRbw== 125190
+INCz0L7RgA== 125191
+2KfZhdip 125192
+5qW9 125193
+2KvZitix 125194
+4LiB4Li04LiI 125195
+4buTbg== 125196
+2YbYqA== 125197
+0YDRg9C0 125198
+7JeI 125199
+INeX15HXqA== 125200
+0YDQsNC2 125201
+4bqhY2g= 125202
+2KrZiA== 125203
+4LmC4Lih 125204
+15HXmdeR 125205
+IO2GtQ== 125206
+YWNhxJ/EsQ== 125207
+2KzZhNiz 125208
+4LmA4Lib4Lil 125209
+4Lin4LiU 125210
+4Lit4Lil 125211
+44Gf44KK 125212
+4Lib4Lix4LiN 125213
+IOyVjA== 125214
+2LnYsdmB 125215
+4LmE4Lif 125216
+2KPYrg== 125217
+5aSa44GE 125218
+4LiU4Lix4LiH 125219
+2LTZgQ== 125220
+44Gj44Gm44GE44G+44GZ 125221
+15vXoNeh 125222
+0YbQtQ== 125223
+0LXRgdC/ 125224
+2YXYp9mF 125225
+4Lie4Li34LmJ4LiZ 125226
+0LjRh9C10YHQutC4 125227
+2K7Yrw== 125228
+2YPZiNmF 125229
+INeU16jXkNep 125230
+2KrYp9io 125231
+6aOf44G5 125232
+4Li34LiZ 125233
+0L7RgNC+ 125234
+IGLDtmw= 125235
+15XXl9eT 125236
+2K/Zitix 125237
+4bqvbQ== 125238
+2K/YuQ== 125239
+44GV44Gb 125240
+4LiY4Lij 125241
+4LiY4Lij4Lij4Lih 125242
+44GL44KC 125243
+5aSa44GP 125244
+csOk 125245
+2LPYuQ== 125246
+15nXnNeU 125247
+2LbYsQ== 125248
+INin2YTYtNix 125249
+15bXldeo 125250
+16LXkdeo 125251
+4bqhbQ== 125252
+0LDQu9GM0L3Qvg== 125253
+2LHZhg== 125254
+2KfZhdis 125255
+15vXmg== 125256
+ZMSxxJ8= 125257
+0LTQtdC9 125258
+2LbYpw== 125259
+2YTZitmF 125260
+IOq3uOufrA== 125261
+2KrZhdin2Lk= 125262
+2KfYsdmK2K4= 125263
+4LmC4LiV 125264
+INGB0YDQtdC0 125265
+INeg15XXoQ== 125266
+2YLYqNmE 125267
+0L7RgtC+0LI= 125268
+bGXFn3Rpcg== 125269
+INC80LXRgdGC 125270
+2LPZhNmF 125271
+INei16Y= 125272
+INin2YTYs9mE 125273
+0LXRgtGM 125274
+2KfYqNip 125275
+0L3QsNC6 125276
+4Liq4LiW4Liy4LiZ 125277
+INeR16A= 125278
+4Lia4Lix4LiZ 125279
+15vXoA== 125280
+IMO2xJ8= 125281
+44Go6KiA 125282
+dXnhur9u 125283
+ZGnEnw== 125284
+4bqtdQ== 125285
+0YDQsNGB 125286
+44K344On44Oz 125287
+bsSxeg== 125288
+15XXk9eU 125289
+2KrYsw== 125290
+2YXYp9mE 125291
+4LmA4Lir4LiV4Li4 125292
+4Lii4Lin 125293
+4Lie4Lix4LiB 125294
+44GE44Gq44GE 125295
+INC60LDRhw== 125296
+4Lil4LmM 125297
+16jXm9eq 125298
+xZ90dXI= 125299
+157Xldeh 125300
+44Gl 125301
+0LHQvtC7 125302
+2LnZhdin2YQ= 125303
+15XXqNeq 125304
+0YbQuNC+0L0= 125305
+4Lio4Li24LiB 125306
+4LiP 125307
+0YDQtdC9 125308
+2KfYs9mK 125309
+2KfYptix 125310
+4LmC4Lib4Lij 125311
+IHNlw6c= 125312
+2LrZig== 125313
+0Y3Rgg== 125314
+0LXQvdC9 125315
+44Gq44Gu 125316
+15nXqdeU 125317
+15nXpNeV16g= 125318
+44Gf44KB44Gr 125319
+2LLYqQ== 125320
+IMOnb2M= 125321
+44Kv44Oq 125322
+0YjQtdC9 125323
+44KP44GR 125324
+2LHZitiv 125325
+INGA0LDRgdGB 125326
+2YPYp9iq 125327
+4Liq4Lit4Lia 125328
+Y2XEn2k= 125329
+44K/44Kk 125330
+4Lia4Lij 125331
+INin2YTYqNix 125332
+16DXldei 125333
+csO8bg== 125334
+2LHYp9i2 125335
+4Lio4Liy4Liq 125336
+4LiV4Lij4LmM 125337
+44GN44Gf 125338
+15XXnNeT 125339
+0LXRgNC4 125340
+7ZeY 125341
+4bqvcA== 125342
+2KrYudmE 125343
+2YPYrw== 125344
+0LjRgtC10LvRjNC90L4= 125345
+2LfZgQ== 125346
+INCw0LLRgtC+0Lw= 125347
+INee16Y= 125348
+0YjQuNGF 125349
+2KfYqtmB 125350
+INGF0L7Rgg== 125351
+2Y7Ypw== 125352
+44GP44KL 125353
+15TXpA== 125354
+4LmC4LiX 125355
+4LmB4Lie 125356
+4LmI4Lit4Lii 125357
+INin2YTZhdi0 125358
+4LiB4Liy4Lij4LiT4LmM 125359
+0LDQvdC40Lc= 125360
+15TXnA== 125361
+2LjZhQ== 125362
+4Lii4Li4 125363
+bGnEnw== 125364
+4LmE4LiC 125365
+4LiW4Li34Lit 125366
+w7Z6 125367
+44GR44Gm 125368
+4LmA4Lic 125369
+4Li44Lih 125370
+44OX44Os 125371
+INeU15DXl9eo 125372
+2K7YqtmE2YE= 125373
+4LiO 125374
+2YTYp9it 125375
+IGTDvHplbg== 125376
+16bXlA== 125377
+2LPYp9ih 125378
+15XXqNea 125379
+15XXk9eZ 125380
+0YDQsNGE 125381
+xZ90xLFy 125382
+44Gr5YWl 125383
+44GI44Gw 125384
+2LXZiNmE 125385
+INCc0L7RgQ== 125386
+2KfZh9ix 125387
+44Gj44E= 125388
+INC70Y7QsQ== 125389
+15nXoteU 125390
+INeU157Xpw== 125391
+4Liq4Li04LiX 125392
+4Liq4Li04LiX4LiY4Li0 125393
+15nXoNed 125394
+2YTYp9mB 125395
+4Lie4Lix4LiZ4LiY 125396
+15XXkNeU 125397
+4Lih4Lix 125398
+4LiC4LiT4Liw 125399
+0LTQvtGA 125400
+44Go44Gq 125401
+4LiB4Lij4Liw4LiX 125402
+YWPEsQ== 125403
+15XXnNeV15I= 125404
+0YPRiA== 125405
+44Ol44O8 125406
+44Om 125407
+2YXYs9iq 125408
+IGHFnw== 125409
+16nXpw== 125410
+16TXqteX 125411
+4Liy4Lii4LiZ 125412
+7Yc= 125413
+66I= 125414
+77c= 125415
+7Yk= 125416
+7LU= 125417
+7Kw= 125418
+8J2b 125419
+7JI= 125420
+65k= 125421
+6qc= 125422
+4ZY= 125423
+4qg= 125424
+4rE= 125425
+4Zg= 125426
+8JY= 125427
+4KA= 125428
+4ZQ= 125429
+8JCt 125430
+4buvbmc= 125431
+xaluZw== 125432
+INeU16o= 125433
+INin2YTYpw== 125434
+INee16o= 125435
+4LiW4Li24LiH 125436
+w7Ju 125437
+4buLbmg= 125438
+0L3Ri9C8 125439
+IGPhuqM= 125440
+4LiU4Li5 125441
+IOC5geC4leC5iA== 125442
+INeR15Q= 125443
+w7Np 125444
+44Go44GX44Gm 125445
+w7puZw== 125446
+INiw 125447
+INeU16A= 125448
+INio2YY= 125449
+2YTYp9mE 125450
+4LmE4LiX4Lii 125451
+4buHcA== 125452
+dMSx 125453
+4Lih4Lix4LiZ 125454
+4bqxbmc= 125455
+4buRdA== 125456
+0LrQvtC8 125457
+4LiL4Li24LmI4LiH 125458
+4LiE4Lij4Lix4Lia 125459
+4Lia4LmJ4Liy4LiZ 125460
+INin2YTZig== 125461
+bMO8 125462
+2YjYsw== 125463
+44Gg44Gj44Gf 125464
+4LmA4LiH 125465
+IOqztQ== 125466
+0L3Rgw== 125467
+44KI44KK 125468
+0LzRgw== 125469
+4LmA4LiC4Liy 125470
+44KA 125471
+0L3QuNC1 125472
+44Gr44Gq44KL 125473
+4bqteQ== 125474
+INmI2Kc= 125475
+66Ck 125476
+16nXlQ== 125477
+w6Fw 125478
+15PXlQ== 125479
+44Gn44GX44Gf 125480
+2LnYtg== 125481
+0YHQutC+0Lk= 125482
+5oSf44GY 125483
+0Y7RgtGB0Y8= 125484
+INeZ15vXldec 125485
+44KT44Gg 125486
+0LLQuA== 125487
+4LmA4Lil4LmI4LiZ 125488
+7J2064uk 125489
+INmE2Yc= 125490
+4LiE4Li34Lit 125491
+2KrZgw== 125492
+2YXZg9mG 125493
+YcSfxLE= 125494
+16DXkw== 125495
+66+8 125496
+4LmE4Lin 125497
+4Liq4Liz4Lir 125498
+4Liq4Liz4Lir4Lij4Lix4Lia 125499
+0YHQu9C10LQ= 125500
+dMSxcg== 125501
+INmE2Yo= 125502
+INin2YTYudmF2YQ= 125503
+15HXldeq 125504
+15HXmded 125505
+4LiE4Liz 125506
+4LmA4LiE4Lij4Li34LmI4Lit4LiH 125507
+bMSxxJ/EsQ== 125508
+4Li34Lit4LiH 125509
+2KzYrw== 125510
+7Z6I 125511
+7Ius 125512
+16LXldeq 125513
+4Liq4Li04LiZ 125514
+0YfQuA== 125515
+2LHYtg== 125516
+4LmA4Lib4Li04LiU 125517
+4LiE4LmI4Liy 125518
+7ISg 125519
+2YjYsdip 125520
+16fXmA== 125521
+7Jyg 125522
+2LnZhdmE 125523
+15DXmded 125524
+15zXmded 125525
+4LmD4Lir4LiN 125526
+4LmD4Lir4LiN4LmI 125527
+4burYQ== 125528
+4buNaQ== 125529
+44G2 125530
+w61jaA== 125531
+44OH44Kj 125532
+15XXqNeZ150= 125533
+0YHQvg== 125534
+7JW9 125535
+0L7QstCw 125536
+0YfQsNGB0YI= 125537
+4LmA4LiI4LmJ4Liy 125538
+0L/RgNC+ 125539
+INee15c= 125540
+44OO 125541
+15XXmdeV16o= 125542
+INC00LU= 125543
+66eI 125544
+7KeB 125545
+15nXpNeU 125546
+INin2YTYudin2YTZhQ== 125547
+66W0 125548
+16jXkNeU 125549
+dXnhu4Nu 125550
+16LXmQ== 125551
+4Lih4Li34Lit 125552
+2KXZhg== 125553
+4Lij4Li5 125554
+INiy 125555
+15nXlded 125556
+4LiV4LmJ4LiZ 125557
+44Gm44GE44G+44GZ 125558
+2YXYp9mG 125559
+INCl 125560
+4Lib4Lij4Liw4LmA4LiX4Lio 125561
+4buz 125562
+15zXkQ== 125563
+4LmA4LiU4LmH 125564
+44Gf44Gh 125565
+4LiX4Li14Lih 125566
+4LiZ4Liw 125567
+7Jew 125568
+IOyggA== 125569
+2YTZhw== 125570
+4bufaQ== 125571
+INin2YTYsg== 125572
+2K/Yp9ix 125573
+44Kz44Oz 125574
+0LzQuNC9 125575
+4LmB4Lir4LmI4LiH 125576
+4LiU4Lix4Lia 125577
+15vXqA== 125578
+0LbQsA== 125579
+7ZaI 125580
+157Xlg== 125581
+4bujaQ== 125582
+4LiU4Liy 125583
+INi52KjYrw== 125584
+4LmB4Lij 125585
+15DXqteo 125586
+16LXoNeZ 125587
+4LmA4LiE 125588
+15XXpteo 125589
+7KeA66eM 125590
+2KfYptmF 125591
+2KPYsw== 125592
+dXnhu4Fu 125593
+INeQ16A= 125594
+15fXoNeV 125595
+15bXmQ== 125596
+4Lij4LmJ4Liy4LiZ 125597
+INCg0L7RgQ== 125598
+INCg0L7RgdGB 125599
+2LHYqNmK2Kk= 125600
+dMO8cg== 125601
+44KL44GT44Go 125602
+2LjYsQ== 125603
+0LHRiw== 125604
+4LiX4Li14LmI4Liq4Li44LiU 125605
+INem16g= 125606
+6Ieq5YiG 125607
+0LvQsNGB 125608
+INGP0LI= 125609
+INGP0LLQu9GP 125610
+4Lie4Lij4LmJ4Lit4Lih 125611
+4Lit4Liy4LiI 125612
+4Lia4Lij4Li04LiB4Liy4Lij 125613
+IMOnxLE= 125614
+642Y 125615
+INin2YTZhdiz2Ko= 125616
+2KrYtA== 125617
+16nXldeR 125618
+44K0 125619
+IHlhcMSxbA== 125620
+INin2YTYsA== 125621
+4Li44LmI4Lih 125622
+4LiW4LmJ4Liy 125623
+7ISk 125624
+7LCo 125625
+0LLQsNGA 125626
+4LmA4Lie4Li04LmI4Lih 125627
+xrDhu5tp 125628
+2YPYsw== 125629
+4Lit4Lii4Liy4LiB 125630
+44Gm44KC 125631
+INCz0L7QtA== 125632
+2YrYp9ix 125633
+4LiV4Lit4LiZ 125634
+INC40LPRgA== 125635
+4LmE4LiU4LmJ4Lij4Lix4Lia 125636
+INin2YTZhdix 125637
+2YLYqg== 125638
+IOuY 125639
+IOuYkA== 125640
+4bqpbg== 125641
+44GZ44KL44GT44Go 125642
+15LXnQ== 125643
+INeR15E= 125644
+2KrYrw== 125645
+2YjYp9ix 125646
+44Ku 125647
+0L/QvtC7 125648
+INC80L7Qsw== 125649
+2KrYsdmD 125650
+2YjYqw== 125651
+IMOnxLFr 125652
+2KfYqQ== 125653
+4LmA4LiU4Li14Lii4Lin 125654
+4Lih4Li14LiE4Lin4Liy4Lih 125655
+INee15I= 125656
+2LXZgQ== 125657
+INCi0LDQug== 125658
+INeb16o= 125659
+15nXk9eZ 125660
+0L7QstC+0YA= 125661
+4bqneQ== 125662
+4Liq4Li04LmI4LiH 125663
+2KjYqg== 125664
+w7xyw7w= 125665
+2YbYrA== 125666
+4Lir4Lil4Lix4LiB 125667
+15nXlNed 125668
+2YLYtQ== 125669
+0LfRiw== 125670
+15vXqteR 125671
+xrB1 125672
+bcSxeg== 125673
+IOyEuA== 125674
+0LvQvtCz 125675
+2YXZitmE 125676
+2YrYrA== 125677
+7ZKI 125678
+4Lie4Lia 125679
+4Lir4Lix4Lin 125680
+0LfQvdCw 125681
+16jXpw== 125682
+4LmC4Lij 125683
+INeR16E= 125684
+IEJhxZ9rYW4= 125685
+IOuUsA== 125686
+4Lit4Lix4LiZ 125687
+4Li14LmI4Lii4Lin 125688
+0L3QtdGB 125689
+4LmA4LiU4Li04LiZ 125690
+2YrYp9mG 125691
+15XXnNeZ 125692
+2KfYrtiq 125693
+16bXldeq 125694
+44GT44GT 125695
+INin2YTYp9mG 125696
+INC/0YDQvtGG 125697
+44G+44Gg 125698
+15vXoQ== 125699
+INin2YTYog== 125700
+2YrYsg== 125701
+INin2YTYr9mI2YQ= 125702
+IO2VmOuCmA== 125703
+2LbYuQ== 125704
+6ruY 125705
+xZt3aQ== 125706
+4Lii4Li0 125707
+44Gh44KD44KT 125708
+INmF2LQ= 125709
+4LiY4Li1 125710
+44Go44GN 125711
+16DXmdeV16o= 125712
+IOuv 125713
+IOuvuA== 125714
+IHPEsQ== 125715
+64uI6rmM 125716
+INC/0Ls= 125717
+2LrZhA== 125718
+4LmB4Lij4LiH 125719
+2KjZitix 125720
+44GC44KK44G+44Gb44KT 125721
+6re8 125722
+IHnDvHo= 125723
+IGRlxJ9lcg== 125724
+5aC05ZCI 125725
+4buh 125726
+0LzQsNGC 125727
+4Lij4Liy4LiK 125728
+2YjYsdmK 125729
+0LbQtdC9 125730
+44G+44KK 125731
+44Gu5Lit 125732
+15nXk9ei 125733
+4Lit4Li4 125734
+4Lia4Lit4Lil 125735
+4Lib4Lix4LiN4Lir4Liy 125736
+2LLZhQ== 125737
+xJ9h 125738
+4Lit4Li34LmI 125739
+4Lit4Li34LmI4LiZ 125740
+0L/Quw== 125741
+INC90LXQvtCx0YXQvtC00LjQvA== 125742
+15vXkQ== 125743
+4LmA4Lio 125744
+16fXqNeU 125745
+7LKY 125746
+66Co 125747
+157Xp9eV150= 125748
+asSFYw== 125749
+2YfZhA== 125750
+INei15HXldeT 125751
+4LmE4Lih4LmJ 125752
+4LiB4Lil4Lix4Lia 125753
+15XXm9ec 125754
+16fXkw== 125755
+2KfZhNmK2Kk= 125756
+2LHZhw== 125757
+44GR44KM44Gw 125758
+INmG2YHYsw== 125759
+44Ki44Or 125760
+7JeI64uk 125761
+16fXldeo 125762
+0L3QtdGA 125763
+2KjYp9io 125764
+44K2 125765
+2LPYqNio 125766
+2YTZitmE 125767
+2LXZhg== 125768
+2LXYr9ix 125769
+4bq/bQ== 125770
+4LiK4LmI4Lin4LiH 125771
+2K3Zhg== 125772
+INeR15I= 125773
+157Xldei 125774
+15zXlw== 125775
+5aSn44GN 125776
+2KrYqA== 125777
+0L3QtdGC 125778
+15nXkdeU 125779
+0LHQuw== 125780
+44OX44Oq 125781
+2KfYtdip 125782
+44Gk44GR 125783
+15nXnteV16k= 125784
+44GM44GC 125785
+64u0 125786
+44GL44KC44GX 125787
+44GL44KC44GX44KM 125788
+44Gh44KJ 125789
+15HXmA== 125790
+IGJhxJ8= 125791
+15nXl9eh 125792
+15HXldei 125793
+4Lil4Li1 125794
+16TXoteZ15w= 125795
+0LjQvNC4 125796
+Z8WC 125797
+INC40LzQtQ== 125798
+2K7Yr9in2YU= 125799
+15DXmdeo 125800
+IHlhcHQ= 125801
+44Go44GE 125802
+4LiH4LmI4Liy4Lii 125803
+15zXmdeV 125804
+2K3Yr9ir 125805
+2LHYp9mC 125806
+IMSQaQ== 125807
+2KfYr9ix 125808
+44GT44Go44KC 125809
+15HXmdeo 125810
+INCy0Lc= 125811
+2LbYp9mB 125812
+16rXldeb 125813
+0YDQvtC8 125814
+2LHYp9iq 125815
+4LmA4LiX4LmI4Liy 125816
+44GY44KD 125817
+44Gd44GT 125818
+2KfYrNiq2YXYp9i5 125819
+4LmJ4Lit4LiZ 125820
+2YLZhQ== 125821
+67O4 125822
+xJ4= 125823
+16nXmdeV 125824
+15HXoNeZ 125825
+7JyE7JuQ 125826
+4LmB4LiI 125827
+15fXldeo 125828
+2K/ZitmG2Kk= 125829
+2KrYtw== 125830
+4bqxbQ== 125831
+w7Jh 125832
+4Lii4Lit4LiU 125833
+IOuLuQ== 125834
+4Liq4Li44LiC 125835
+15PXqNea 125836
+2K/Zhg== 125837
+2LPZitmG 125838
+2YjZgtmB 125839
+0YbRiw== 125840
+0LPQvtGC0L7Qsg== 125841
+0LXQttC00YM= 125842
+4Lie4Lin4LiB 125843
+2KfZgtiq2LU= 125844
+2KfZgtiq2LXYp9iv 125845
+Y3rEmQ== 125846
+bmnEmQ== 125847
+0YDQtdCx 125848
+2K3ZiA== 125849
+4LiX4LmM 125850
+44KI44Gt 125851
+0LTQtg== 125852
+4LiB4Lil4LmI4Liy4Lin 125853
+2K/Zitir 125854
+44Kz44Of 125855
+2YLZiNmF 125856
+INiq2K0= 125857
+4LmA4LiV4Li0 125858
+2KfZgdi4 125859
+4LiI4Li4 125860
+2LHZitin2LY= 125861
+157Xqdea 125862
+4LmC4Lii 125863
+0LXRgNC1 125864
+44G/44Gf44GE 125865
+7J206528 125866
+INin2YTZhdmI 125867
+INGB0YLQvg== 125868
+4LmA4Lij4LmH4Lin 125869
+INC00LXRgg== 125870
+INGB0LTQtdC7 125871
+4LmA4LiK4Li34LmI4Lit 125872
+16TXoNeZ 125873
+2YjYttmI2Lk= 125874
+15HXoQ== 125875
+4LmB4LiU 125876
+w7Nj 125877
+4Lij4Li04Lih 125878
+0YDQsNC0 125879
+7Iig 125880
+44O844K6 125881
+44Gr44GK 125882
+0LjQvdC+ 125883
+16TXmdec 125884
+4LiK4Lix4LmI4LiZ 125885
+15fXk9ep 125886
+4LmA4LiZ4Li34LmI4Lit4LiH 125887
+16DXmdeh 125888
+2LrYsdio 125889
+44K444Oj 125890
+4Liq4Lix4LiH 125891
+4LmA4LiX4Li14LmI 125892
+4LmA4LiX4Li14LmI4Lii4Lin 125893
+65+8 125894
+4LmB4Lif 125895
+44O844K3 125896
+44O844K344On44Oz 125897
+INCy0L7Qt9C80L7Qtg== 125898
+2KzZhdmI2Lk= 125899
+15HXqNeZ150= 125900
+44OI44Op 125901
+INC60LDRh9C10YHRgtCy 125902
+2LfZig== 125903
+0YLRjw== 125904
+16bXldei 125905
+xJ/EsW7EsQ== 125906
+2LnZhNmJ 125907
+2KfYsA== 125908
+2YjYp9mC2Lk= 125909
+2YXZiNin 125910
+2KfYptmK2YQ= 125911
+0LrQvtC7 125912
+4buBbQ== 125913
+4Lic4Lil4Li04LiV 125914
+15nXoNeY16g= 125915
+2LPZgw== 125916
+16nXmdeo 125917
+4Lio4Li24LiB4Lip4Liy 125918
+4Lia4Lix 125919
+0YfQsNGB 125920
+15XXpNeU 125921
+15nXpNeV15w= 125922
+INin2YTYs9in2Kg= 125923
+2LHZitio 125924
+INin2YTYqNmK 125925
+44K544OG 125926
+0YfQtdC9 125927
+4LmB4Lic 125928
+INeg16k= 125929
+2LLZitiv 125930
+2K3Yp9iv 125931
+642U 125932
+2LHZiNi5 125933
+4LiX4Li44LiZ 125934
+4Liq4Lih4Liy 125935
+Y3plxYQ= 125936
+15nXk9eU 125937
+44Gn44GC 125938
+IMOnb2N1aw== 125939
+2K7YqA== 125940
+4Lia4Liy4Lii 125941
+4Lib4Lij4Liw4LiK4Liy 125942
+157Xqdec 125943
+44Gq44GL 125944
+4LiB4Liy4Lii 125945
+44OB44Oj 125946
+0LDRgNC4 125947
+INGH0LA= 125948
+4LiU4Liz 125949
+4LiX4Lix4LmI4Lin 125950
+0YPRhQ== 125951
+IMO2eg== 125952
+IOyiiw== 125953
+2KzYsdmK 125954
+2KfYptmC 125955
+4Lig4Lix4Lii 125956
+2LfYp9ix 125957
+2K/Yp9ix2Kk= 125958
+xKluaA== 125959
+2KvZhg== 125960
+emVsbGlr 125961
+2KfZhNiq 125962
+IGdlbGk= 125963
+44OV44Kp 125964
+0L7Qu9C+0LQ= 125965
+2LHYqNi5 125966
+16nXqtee16k= 125967
+4Lia4Lij4Lij 125968
+7Z2s 125969
+IMO8csO8bg== 125970
+IOq3uOughw== 125971
+4Lio4Liy4Liq4LiV4Lij4LmM 125972
+44Gc 125973
+15nXkdec 125974
+INC/0YDQtdC00YHRgtCw0LI= 125975
+2LPYt9mK2YY= 125976
+44KS5L2/ 125977
+INC/0L7QvNC+0Yk= 125978
+15XXp9eo 125979
+44Ov44O8 125980
+IHnDtm5ldA== 125981
+15nXp9eo 125982
+4LiC4Liy 125983
+0LXRgNC40LDQuw== 125984
+2K3ZgQ== 125985
+INeZ16Y= 125986
+4LiX4Li0 125987
+5aOy 125988
+4LiZ4Lit4LiB 125989
+15XXm9eo 125990
+7Zmc 125991
+4buneQ== 125992
+INin2YTZgtix 125993
+15nXkdeV16o= 125994
+xZtuaQ== 125995
+2YXYtNin2LE= 125996
+xrDhu6N0 125997
+INmE2K/Zig== 125998
+0YLQtdC7 125999
+INil2YTZig== 126000
+2LnZhNmI2YU= 126001
+7JWY 126002
+0LLQuNGC 126003
+4LiE4Liw 126004
+eXLEsQ== 126005
+44Go44Gj44Gm 126006
+4LmA4LiJ 126007
+4LiW4Liy4Lih 126008
+2YLYp9ix 126009
+2LnZhNin2YU= 126010
+4bq3bmc= 126011
+2YXZkg== 126012
+15nXnteq 126013
+2LPYqNip 126014
+44Kv44Op 126015
+15XXodej 126016
+INC/0YDQuNC9 126017
+44GE44KN 126018
+2LPYp9iz 126019
+2LnYqtio2LE= 126020
+4Lin4Li04LiX4Lii 126021
+4Lin4Li04LiX4Lii4Liy 126022
+2LPZg9ix 126023
+44K344On 126024
+44GB 126025
+4Lix4LiB4Lip 126026
+15HXldeU 126027
+4Lir4Lii 126028
+44G+44KM 126029
+INC+0YDQs9Cw0L3QuNC3 126030
+0LrQsNC30LDQuw== 126031
+INGB0LLRj9C3 126032
+dXnhur90 126033
+INC/0YDQvtC40Lc= 126034
+INen15g= 126035
+4LmB4LiB4LmJ 126036
+0L/Rg9GB 126037
+IOq3uOqygw== 126038
+64qQ 126039
+0LvQtdC60YE= 126040
+44O844OX 126041
+4LiV4Liz 126042
+16rXl9eZ15w= 126043
+4Lit4LiH4LiE4LmM 126044
+4bq1 126045
+16DXpg== 126046
+2KPYtA== 126047
+2LTZhw== 126048
+4Lii4Liw 126049
+4LiB4LiO 126050
+INin2YTYpdiz2YTYp9mF 126051
+0LXQtNGM 126052
+44Gy44Go 126053
+64+E66Gd 126054
+44Gp44Gu 126055
+0YPQsg== 126056
+0LXRh9C10L3QuNC1 126057
+INin2YTYqtis 126058
+44Gr6KGM 126059
+INC/0L7Qt9Cy 126060
+44KP44KK 126061
+2YTYp9ir 126062
+7ZWY7JiA 126063
+INC80LDRgA== 126064
+IGtvbnXFnw== 126065
+44Os44K5 126066
+44KS5oyB 126067
+INC+0YHQvdC+0LI= 126068
+15fXkQ== 126069
+2YjYrNmI2K8= 126070
+16TXldef 126071
+0LLQvtGA 126072
+INC90LjQug== 126073
+44GL44KL 126074
+xZ90xLFybWE= 126075
+15nXodeY 126076
+2KPZhA== 126077
+4Lir4LmM 126078
+0LjQvtC90LA= 126079
+0LvRjNC9 126080
+INCz0L7RgQ== 126081
+INCc0L7RgdC6 126082
+0YDQvtCx 126083
+15XXkNeZ 126084
+44GK44KK44G+44GZ 126085
+44Gj44Gx 126086
+0LrQuw== 126087
+4LiZ4LiU4LmM 126088
+2LHZitmB 126089
+2KfYs9io 126090
+INGA0LXRiA== 126091
+INC00L7Quw== 126092
+44G544GN 126093
+15nXkdeV16g= 126094
+0LzQtdGJ 126095
+INC90LDRiA== 126096
+4LmB4Lib4Lil 126097
+0YDQuNGC 126098
+0LrRg9GB 126099
+0LjRgNCw 126100
+0LDRgtGD0YA= 126101
+2YjYp9i12YQ= 126102
+4LmA4Lic4Lii 126103
+4Lit4Liz 126104
+4LmA4LiB4Li04LiZ 126105
+2LrZhQ== 126106
+44GZ44GO 126107
+bMSxa2w= 126108
+xYRzaw== 126109
+6rKs 126110
+15nXm9eU 126111
+15fXqdeR 126112
+2YjYsdmK2Kk= 126113
+INC00LXQudGB0YLQsg== 126114
+15fXnNeY 126115
+INec157Xog== 126116
+16bXnNeZ15c= 126117
+0LXRh9Cw 126118
+2YHYp9i5 126119
+15LXmdeT 126120
+4bqtbQ== 126121
+xJli 126122
+2LTYuQ== 126123
+44GP44KK 126124
+4Lie4Li4 126125
+0LXQtNC10YA= 126126
+4LiC4LiZ 126127
+4LiE4Liy4Lij 126128
+INCx0L7Qu9GM0Yg= 126129
+44GP44Gq44KK 126130
+4LiT4Liy 126131
+15PXldeS 126132
+INC80L0= 126133
+5LiK44GM 126134
+57aa44GN 126135
+4Lik4Lip 126136
+4LiG 126137
+2K7Zig== 126138
+4LmA4LiX4Lie 126139
+4Liq4Lix4Lih 126140
+4LmA4Liq4LiZ 126141
+4LmA4Liq4LiZ4Lit 126142
+44O0 126143
+INC40YHRgg== 126144
+2KjYp9i02LE= 126145
+INGD0YDQvtCy 126146
+157XldeW 126147
+YWLEsQ== 126148
+d2HFvA== 126149
+15XXpteQ15Q= 126150
+0YLQstC10YA= 126151
+4Lie4Lix4LiZ4LiY4LmM 126152
+16DXkteT 126153
+44KL44GT44Go44GM44Gn44GN 126154
+INGC0YDQtdCx 126155
+4LiB4Lij4Li44LiH 126156
+2K3Yqtin2Kw= 126157
+4LmA4LiE4Lil 126158
+44Y= 126159
+xJl0cg== 126160
+IHN6Y3plZw== 126161
+INeo16k= 126162
+4LiX4LiY 126163
+INC90LXQug== 126164
+INC90LXQutC+0YLQvtGA 126165
+0LLRiA== 126166
+0Kw= 126167
+4LmI4Lin4Lii 126168
+4Lil4Li4 126169
+0LHRgNGP 126170
+4Lir4Lih4Li54LmI 126171
+4LmB4LiV4LiB 126172
+16jXm9eZ150= 126173
+IO2WiQ== 126174
+w6Np 126175
+2YPYsdip 126176
+4q0= 126177
+7ZA= 126178
+440= 126179
+4YE= 126180
+4q4= 126181
+4qU= 126182
+7K4= 126183
+4L8= 126184
+4r8= 126185
+4YI= 126186
+4aQ= 126187
+4qA= 126188
+7Z8= 126189
+8JCN 126190
+8JCw 126191
+8J2G 126192
+8J+I 126193
+INei15w= 126194
+INi52YY= 126195
+INmF2Lk= 126196
+INeW15Q= 126197
+INmF2Kc= 126198
+IG3DoA== 126199
+IGThu6U= 126200
+4buHYw== 126201
+0LDRhQ== 126202
+c8Sx 126203
+7ZWY6rOg 126204
+INeV15E= 126205
+INCf0L4= 126206
+15XXqteo 126207
+INmE2YU= 126208
+INeV15w= 126209
+44GX44Gm44GE44KL 126210
+INee15k= 126211
+INio2YrZhg== 126212
+0LfQsA== 126213
+INmD2KfZhg== 126214
+INeU15nXlA== 126215
+64WE 126216
+15DXlQ== 126217
+0LTQuA== 126218
+INC/0LXRgNC1 126219
+ZMSx 126220
+INec16k= 126221
+INep154= 126222
+44GM44GC44KL 126223
+44GE44GE 126224
+0YDQtQ== 126225
+16fXlQ== 126226
+0LjQu9C4 126227
+0LzQtQ== 126228
+2YrYqg== 126229
+44Gn44GC44KL 126230
+INCy0L4= 126231
+4LmD4Lir4Lih 126232
+4LmD4Lir4Lih4LmI 126233
+INep15E= 126234
+IOC5guC4lOC4og== 126235
+2YrZhw== 126236
+44Gn44GZ44GM 126237
+44Go44Gv 126238
+16jXlQ== 126239
+IOC4i+C4tuC5iOC4hw== 126240
+44Gn44GN44KL 126241
+0LzQvg== 126242
+4LmA4Lie4Li34LmI4Lit 126243
+16bXlQ== 126244
+15jXlQ== 126245
+7JWI 126246
+IGjhu40= 126247
+4LmA4LiH4Li04LiZ 126248
+INin2YTYqA== 126249
+IOC4oeC4tQ== 126250
+66y8 126251
+0YHQtQ== 126252
+65Ok7J20 126253
+IOunkA== 126254
+IGzhu5s= 126255
+YcWC 126256
+15fXkdeo 126257
+IGThu7E= 126258
+2YrYqw== 126259
+IHRo4buL 126260
+4LiB4LmI4Lit4LiZ 126261
+INeR15vXnA== 126262
+44G4 126263
+44Go5oCd44GE44G+44GZ 126264
+4bqjbmg= 126265
+4Lii4Liy 126266
+2YHYpw== 126267
+4Liq4Li1 126268
+4LiV4Liy 126269
+67KV 126270
+44Oq44O8 126271
+4Lij4Liy4LiE4Liy 126272
+INeV15zXkA== 126273
+44Go44GT44KN 126274
+4LmA4Lil4Li34Lit 126275
+ZGnEn2k= 126276
+2YjYp9mG 126277
+INec15TXqg== 126278
+4Lij4Lin4Lih 126279
+16TXmded 126280
+4Lic4Lih 126281
+0LbQuA== 126282
+Y8Sx 126283
+0YDQvtC0 126284
+IGthcsWfxLE= 126285
+15LXlQ== 126286
+44Gr44Gk 126287
+44Gr44Gk44GE44Gm 126288
+csOg 126289
+15nXldeq16g= 126290
+IOyGjA== 126291
+16fXlA== 126292
+0YHRgtCy0L4= 126293
+44GR44Gp 126294
+Z8Op 126295
+4LiU4LmJ4Liy4LiZ 126296
+55qE44Gr 126297
+INmK2YXZg9mG 126298
+7IaN 126299
+2YrZgw== 126300
+4LmE4Lin4LmJ 126301
+0YHQutC40Lk= 126302
+w6xt 126303
+INec15DXl9eo 126304
+4Lit4Liy4Lir4Liy4Lij 126305
+IOC5gOC4ng== 126306
+4Lij4Liy4Liw 126307
+4Lil4Li54LiB 126308
+0YHRgtCw 126309
+IOycoA== 126310
+2YLZiNmE 126311
+0LHQvtGA 126312
+0YHQutC+0LPQvg== 126313
+4Lir4Lil4Lix4LiH 126314
+4LiC4LmI4Liy4Lin 126315
+4LmA4Lih4Li34Lit4LiH 126316
+6rCB 126317
+dMOg 126318
+2YrZitmG 126319
+2LnYsdi2 126320
+67Cp 126321
+IOuPmQ== 126322
+IOC5gOC4mw== 126323
+IOC5gOC4m+C5h+C4mQ== 126324
+w6dp 126325
+bGnEn2k= 126326
+7JeQ6rKM 126327
+44K/44O8 126328
+INec16o= 126329
+16TXldeq 126330
+4LiC4Lit 126331
+2LHYsw== 126332
+7KCQ 126333
+4Lic4LmI4Liy4LiZ 126334
+0YTQuA== 126335
+2KzZhg== 126336
+7KKF 126337
+INeU16Q= 126338
+IG5nbw== 126339
+4buLYQ== 126340
+IHThu5U= 126341
+IOq3uOumrA== 126342
+4LmA4Lih4Li34LmI4Lit 126343
+2LDZg9ix 126344
+7JaR 126345
+7Jet 126346
+15jXnA== 126347
+a8Sx 126348
+INi52YXZhA== 126349
+INi52YbYrw== 126350
+4LiL4Li34LmJ4Lit 126351
+IOqxsA== 126352
+0LLQtQ== 126353
+csO8 126354
+4LmA4Lit4Liy 126355
+4Liq4LmM 126356
+4LiI4LiZ 126357
+16HXqg== 126358
+IGdp4bqj 126359
+44KL44Go 126360
+4LiB4Liz4Lil4Lix4LiH 126361
+0L3QtdC5 126362
+4LiI4Lij4Li0 126363
+4LiI4Lij4Li04LiH 126364
+IOuN 126365
+IOuNlA== 126366
+4LiE4LmI4Liw 126367
+w6xu 126368
+IHPDvHJl 126369
+IHF1eQ== 126370
+4Lia4Liy4LiH 126371
+5Y+W44KK 126372
+16jXlw== 126373
+15HXqg== 126374
+44GM44GC44KK44G+44GZ 126375
+16jXqQ== 126376
+7JeQ64qU 126377
+INeQ16TXqdeo 126378
+YXnEsQ== 126379
+44GM44KJ 126380
+2K3YqA== 126381
+0LDQvdGB 126382
+2LPZiA== 126383
+INC/0YDQtQ== 126384
+2K/ZiA== 126385
+44Gr44KI 126386
+4LmA4LiB4Lih 126387
+4Liq4Li54LiH 126388
+bWFrdA== 126389
+bWFrdGFk 126390
+bWFrdGFkxLFy 126391
+IMO2bmVt 126392
+15nXnteZ150= 126393
+0LHQvg== 126394
+2YjZitip 126395
+4Lij4Li54Lib 126396
+4LmC4Lil4LiB 126397
+2YXZiti5 126398
+0YHRgtGD0L8= 126399
+4LmC4Lit 126400
+2K/ZitmG 126401
+7KSR 126402
+44GX44GP 126403
+4LmA4Liq4Li14Lii 126404
+0LLRiw== 126405
+2YXYqg== 126406
+7ZiE 126407
+44OQ44O8 126408
+2KfYtA== 126409
+16fXoQ== 126410
+IHThu6U= 126411
+4Lil4LiU 126412
+2YHYqQ== 126413
+7ZGc 126414
+2LHYrA== 126415
+a8WCYWQ= 126416
+IMWfZXk= 126417
+INij2YU= 126418
+IOC5gOC4oQ== 126419
+INio2YQ= 126420
+0YHQutCw0Y8= 126421
+44Go44Gu 126422
+IOyLpA== 126423
+4bqlbQ== 126424
+4Lir4LmJ4Lit4LiH 126425
+4LiK4Lih 126426
+ZMO8 126427
+IMOnZWs= 126428
+IOqzoA== 126429
+15LXkQ== 126430
+4LiK4Li14Lin4Li0 126431
+4LiK4Li14Lin4Li04LiV 126432
+2YHYttmE 126433
+4Liv 126434
+w6fEsQ== 126435
+INio2LQ= 126436
+INmH2YbYpw== 126437
+44GN44G+44GX44Gf 126438
+dMO8 126439
+IOyYgQ== 126440
+IFTDvHJr 126441
+0LrRgg== 126442
+16TXqNeh 126443
+44Go44GE44GG44GT44Go 126444
+7ZSE 126445
+4LmB4Lij4LiB 126446
+16jXldef 126447
+IGFyYXM= 126448
+157XpteQ 126449
+IHThu4k= 126450
+2LPYpw== 126451
+4Lie4Lit 126452
+INin2YTZhdit 126453
+44Ok 126454
+INin2YTYp9iz2Ko= 126455
+2YHZhg== 126456
+15nXnteU 126457
+2LHYqg== 126458
+44Go44KC 126459
+INC90LDRgQ== 126460
+0L/RgNC4 126461
+INeX15U= 126462
+0LjQu9Cw 126463
+2YrYtA== 126464
+IGfDtno= 126465
+INeR16DXmQ== 126466
+xLFtxLE= 126467
+INGC0LXRhQ== 126468
+IGjhu5k= 126469
+2LrYsQ== 126470
+0LrQvtC9 126471
+2KfYrdiq 126472
+IOC4ng== 126473
+4Lit4Lit4LiZ 126474
+4Lit4Lit4LiZ4LmE4Lil 126475
+4Lit4Lit4LiZ4LmE4Lil4LiZ4LmM 126476
+0YXQvg== 126477
+0Y/Qsg== 126478
+4LmB4Liq4LiU 126479
+4LmB4Liq4LiU4LiH 126480
+4LmA4Lie4Li14Lii4LiH 126481
+0YLQvtCy 126482
+2KfZig== 126483
+INeU15M= 126484
+INeV15s= 126485
+44KJ44GE 126486
+15XXpNef 126487
+IOu2iA== 126488
+4Lil4Lit4LiH 126489
+2LfYp9mE 126490
+INC90Lg= 126491
+INmF2LPYqg== 126492
+4bq/Yw== 126493
+INep15s= 126494
+IOuVjOusuA== 126495
+4Lin4Lix4LiZ4LiX4Li14LmI 126496
+15nXnNeT 126497
+2K3Ypw== 126498
+0LXRhg== 126499
+IGPhu6k= 126500
+15PXldeo 126501
+INmF2K0= 126502
+16jXm9eR 126503
+2KjZiti5 126504
+0L3QuNC4 126505
+INin2YTYo9mI2YQ= 126506
+4LiE4Lin4Lij 126507
+44Go5oCd44GG 126508
+INCh0L4= 126509
+2KfYptmK2Kk= 126510
+2LHYp9ih 126511
+0L7RgdC+0LE= 126512
+INio2KPZhg== 126513
+16LXldeT 126514
+INGC0LU= 126515
+44GT44GG 126516
+0YHRgtGA0LA= 126517
+0LDQudC9 126518
+IHPDtno= 126519
+2KrZhtin 126520
+4Lit4Li0 126521
+4bq3cA== 126522
+IOyVhOuLiA== 126523
+7ZWt 126524
+INeo15DXqQ== 126525
+IOC5hOC4lOC5iQ== 126526
+INeS15M= 126527
+INeh16TXqA== 126528
+0L7QsdGJ0LU= 126529
+INmI2KU= 126530
+YWRhxZ8= 126531
+44Gh44KH 126532
+16fXldec 126533
+0YDQtdC3 126534
+IGTDvMWfw7xu 126535
+INeR15DXng== 126536
+IOyWtOuW 126537
+16LXqNeR 126538
+0L3QtdC1 126539
+INGB0YLRgNCw0L0= 126540
+2LPYp9mG 126541
+eW7EsQ== 126542
+INin2YTYsdim2YrYsw== 126543
+44GX44Gq 126544
+INeg16o= 126545
+44Gr44Gq44Gj44Gf 126546
+Z8O8 126547
+5Y+X44GR 126548
+15zXqg== 126549
+7KCI 126550
+64qU642w 126551
+2K7Zitix 126552
+4LiV4LmJ4Lit4LiH4LiB4Liy4Lij 126553
+INmE2KPZhg== 126554
+IGNo4buL 126555
+2YjYqQ== 126556
+4LmD4Liq 126557
+67aA7YSw 126558
+7ZWY66m0 126559
+4buvdQ== 126560
+4LmA4Lir4Lih4Li34Lit4LiZ 126561
+0LHQtdGA 126562
+IOydtOyaqQ== 126563
+INGB0LXQsQ== 126564
+d2nEmWtz 126565
+INeg16I= 126566
+0YLRg9GA 126567
+IG5naMSp 126568
+16nXldeY 126569
+dGnEn2k= 126570
+IGRlxJ9p 126571
+15DXkQ== 126572
+INee154= 126573
+44OX44Ot 126574
+d2HFgg== 126575
+4LiI4Li24LiH 126576
+2K7Yr9mF 126577
+15DXnQ== 126578
+xLHFn8Sx 126579
+Y3rEhQ== 126580
+16jXkw== 126581
+INGA0YPQsQ== 126582
+2K7YsdmJ 126583
+44Gu5pa5 126584
+INC00LXQvdGM 126585
+15fXmded 126586
+0LXRgtC1 126587
+64Kc 126588
+15DXkg== 126589
+16LXldeo 126590
+67OE 126591
+5ZCM44GY 126592
+44Ky 126593
+16jXmg== 126594
+15XXqdeQ 126595
+7Jyh 126596
+2KfYrg== 126597
+16bXmdeU 126598
+4buxYQ== 126599
+44GI44Gm 126600
+16nXlNeV 126601
+0LDQvdGC 126602
+4Lil4Liy4LiU 126603
+0LjQvdCz 126604
+66Gg 126605
+2KfYudiv 126606
+2YjYs9i3 126607
+INCy0L7Qvw== 126608
+INCy0L7Qv9GA0L7RgQ== 126609
+2YXZitmG 126610
+4LiE4LiH 126611
+15nXqNeZ150= 126612
+Y8Ozdw== 126613
+6rKp 126614
+IOq3uOufsA== 126615
+IOynhA== 126616
+INep15zXlA== 126617
+4LmA4Lij4Li04LmI4Lih 126618
+4LiK4Lit4Lia 126619
+0LTQtdGC 126620
+0Y7RidC40YU= 126621
+4Lia4Lit4LiB 126622
+5oCd44GE 126623
+2LnZitiv 126624
+16HXng== 126625
+15LXmdei 126626
+16bXkw== 126627
+2KjYp9iq 126628
+IOuUsOudvA== 126629
+4LiI4Lix4LiH 126630
+44Gg44GR44Gn 126631
+16LXmdeo 126632
+INGH0LXQuw== 126633
+INGH0LXQu9C+0LI= 126634
+INGH0LXQu9C+0LLQtdC6 126635
+44OD44OB 126636
+4LmA4LiB4Li14LmI4Lii4Lin 126637
+4LiU4Li0 126638
+INek16I= 126639
+15nXnteZ 126640
+67CY 126641
+2K7Yp9ix 126642
+15HXmdeq 126643
+16LXmded 126644
+w7x5b3I= 126645
+44KB44Gm 126646
+0LrQu9Cw0LQ= 126647
+IOC4iOC4suC4gQ== 126648
+4LmA4LiE4Lii 126649
+4Liq4Lit4LiH 126650
+4LmB4LiE4LmI 126651
+4bqrdQ== 126652
+4Lir4LiZ4Lix4LiH 126653
+16nXnNeV150= 126654
+2KfZhtmK2Kk= 126655
+5Ye65Lya 126656
+5Ye65Lya44GE 126657
+4Lig4Liy4Lii 126658
+4Lia4Liy4LiX 126659
+4LiK4Liy4Lin 126660
+bXXFnw== 126661
+INec16fXkdec 126662
+44K344Oj 126663
+IMSwxZ8= 126664
+15LXk9eV15w= 126665
+2KzYudmE 126666
+67OA 126667
+4Lii4Li04LmI4LiH 126668
+4LiZ4Liy4Lii 126669
+4LiZ4Li14LmI 126670
+4Lin4Li04LiY4Li1 126671
+44KJ44Gq44GE 126672
+66CI 126673
+IOusuOygnA== 126674
+IOC4gQ== 126675
+4LiX4Liz4LiH4Liy4LiZ 126676
+4LmA4Lin4LmH4Lia 126677
+0YTQtQ== 126678
+5qW944GX 126679
+4Liq4Liz4LiE 126680
+4Liq4Liz4LiE4Lix4LiN 126681
+2LHZhQ== 126682
+44GV44KM44Gm 126683
+INC+0LHQu9Cw 126684
+16jXkNeZ 126685
+4Lir4Lih4LiU 126686
+2YbZitip 126687
+0LvQuNC9 126688
+IGXEnw== 126689
+aXRpbQ== 126690
+66C5 126691
+2LXYp9mE 126692
+xZts 126693
+4Lic4Li04LiU 126694
+44Oe44Oz 126695
+5YWl44KM 126696
+4LmA4LiV4Lit4Lij4LmM 126697
+2KfYsdmK 126698
+INCm 126699
+ZMO8cg== 126700
+4Liq4Lin4Lii 126701
+66a9 126702
+2LHZg9ip 126703
+IGjDow== 126704
+15nXqteU 126705
+4LiC4LiZ4Liy 126706
+4LiC4LiZ4Liy4LiU 126707
+4LiI4Liz4LiZ 126708
+4LiI4Liz4LiZ4Lin4LiZ 126709
+16nXlden 126710
+INC00L7QvA== 126711
+7LGF 126712
+44GL44GR 126713
+16TXldec 126714
+4LiK4Liy4Lii 126715
+0YHQvNC+0YLRgA== 126716
+0YHQu9GD0LY= 126717
+16nXkNec 126718
+0LrRgNGL0YI= 126719
+IOyemA== 126720
+6auY44GE 126721
+INGA0YPQug== 126722
+2YbYtQ== 126723
+0LTQsNCy 126724
+xrDhu6E= 126725
+xrDhu6FuZw== 126726
+2LHYp9mF 126727
+15nXoNeZ150= 126728
+44Op44O8 126729
+64Sk 126730
+INiq2Lk= 126731
+bGtl 126732
+5aW944GN 126733
+5oyB44Gh 126734
+IOunjg== 126735
+IHnDvGs= 126736
+INGB0L7RgdGC0LDQsg== 126737
+0LXQvdGC0YA= 126738
+cGXFgg== 126739
+4LmA4Lib4Lil4Li14LmI4Lii 126740
+4LmA4Lib4Lil4Li14LmI4Lii4LiZ 126741
+7Y+J 126742
+44KE44GZ 126743
+15fXlg== 126744
+15HXqNeU 126745
+66Oo 126746
+7JSA 126747
+2KjYrdir 126748
+4LmA4LiV4LmH 126749
+w7N3aQ== 126750
+2KjZhw== 126751
+44GN44G+44GZ 126752
+INei154= 126753
+15LXldec 126754
+0LXQt9C0 126755
+2YrZgdip 126756
+4Liq4LiZ4LmD4LiI 126757
+INeq15w= 126758
+0Y/RiQ== 126759
+INiz2YY= 126760
+INmI2KfYrdiv 126761
+INGB0Lw= 126762
+bGFkxLE= 126763
+xLFsZA== 126764
+15nXqNeq 126765
+4Li14Lii4LiZ 126766
+16rXl9eq 126767
+INC20LjQtw== 126768
+4Lie4Lix 126769
+4Lie4Lix4LiS 126770
+4Lie4Lix4LiS4LiZ4Liy 126771
+4LiK4Li0 126772
+2KfYrtmE 126773
+44Gj44Gm44GE44Gf 126774
+4Lij4Lix4LiQ 126775
+44KB44KL 126776
+4LmC4LiB 126777
+IFThu5U= 126778
+IGhha2s= 126779
+2LHZgQ== 126780
+7KCA 126781
+0YHQvtCx 126782
+44Gq44GR44KM44Gw 126783
+2YfZiA== 126784
+IOuylQ== 126785
+44KG 126786
+INin2YTYs9i52YjYrw== 126787
+INeQ16rXqA== 126788
+2KfYug== 126789
+INec15M= 126790
+4LmB4LiV 126791
+4LmB4LiV4LmI4LiH 126792
+7YyM 126793
+0YPQv9C40YLRjA== 126794
+4Lie4Li34LmJ4LiZ4LiX4Li14LmI 126795
+15HXqteZ 126796
+4LmH4LiB 126797
+xYJhdA== 126798
+IOqwnOyduA== 126799
+7KCV67O0 126800
+0YLQsNC7 126801
+IGfDvHZlbg== 126802
+IMSwbA== 126803
+IOqwgQ== 126804
+INio2Ko= 126805
+157Xldeg15Q= 126806
+INin2YTYrdmD2YjZhQ== 126807
+2YLYp9iq 126808
+4LmB4LiB4LmI 126809
+4Lir4Liy4LiB 126810
+0L3RjA== 126811
+4Lib4Lij4Lix4Lia 126812
+4Lih4Liy4LiT 126813
+INC90LXRgdC6 126814
+INi2 126815
+4Liq4Lih4Lix 126816
+4Liq4Lih4Lix4LiE4Lij 126817
+44GM44GC44KK 126818
+0LzQtdGB0YI= 126819
+INeQ16bXnA== 126820
+INC60L7QvNC/0LDQvdC4 126821
+16HXqA== 126822
+2YrZhdip 126823
+INGF0L7RgNC+ 126824
+INGF0L7RgNC+0Yg= 126825
+INeZ15XXkw== 126826
+w7xz 126827
+15LXmdep 126828
+4Lia4LiX 126829
+2KrZhti4 126830
+4Lin4Liy4LiH 126831
+4Lih4Lir4Liy 126832
+INeb15XXnA== 126833
+4LiC4LmJ4Liy4LiH 126834
+67Cc 126835
+0LPQvtC0 126836
+0LTQsNC9 126837
+44GL44KC44GX44KM44G+44Gb44KT 126838
+44GT44Gh44KJ 126839
+44OQ44Kk 126840
+ZWNlxJ9p 126841
+2K/Zitiv2Kk= 126842
+2YbZiQ== 126843
+IOuLpOydjA== 126844
+4Lin4Li1 126845
+2LrYpw== 126846
+0LvQuNC3 126847
+4LmA4LiU4Li0 126848
+4LmA4LiU4Li04Lih 126849
+INmK2LPYqg== 126850
+IHnEsWzEsQ== 126851
+a2/FhA== 126852
+44Gn44GX44KH44GG44GL 126853
+44GC44Gq 126854
+44GC44Gq44Gf 126855
+0YbQtdC9 126856
+INmI2LI= 126857
+15DXmdep 126858
+4LmI4Lit 126859
+2LHYrQ== 126860
+6rSR 126861
+0YDQsNGB0YI= 126862
+INeU15w= 126863
+44GX44Gm44KC 126864
+157XqNeb 126865
+157XqNeb15Y= 126866
+6YGV44GE 126867
+44Gf44GP 126868
+INGB0YPQtA== 126869
+0LLQtdGB0YLQuA== 126870
+IO2VhOyalA== 126871
+44OV44Kn 126872
+0YLQtdC70YzQvdC+ 126873
+4LmA4Lie4Li34LmI4Lit4LiZ 126874
+xYJ1xbw= 126875
+4LmA4LiU4Li04LiZ4LiX4Liy4LiH 126876
+16nXldeo 126877
+INee15M= 126878
+15XXotec 126879
+2YTYp9mF 126880
+4LmE4LiL 126881
+0LvQtdC5 126882
+0LrRg9GA 126883
+4bqi 126884
+4LiX4Liy4LiZ 126885
+7KeR 126886
+INCz0L7RgNC+0LQ= 126887
+16jXoQ== 126888
+15zXldeS 126889
+bWFzxLFuxLE= 126890
+INC70YPRhw== 126891
+4Lil4LmI4Liy 126892
+7Jq4 126893
+16nXmA== 126894
+INCY0L0= 126895
+7YKk 126896
+2YjZhNin 126897
+7JWg 126898
+INij2YrYttin 126899
+2YPYp9ix 126900
+INin2YTYqti5 126901
+4Liq4Li54LmI 126902
+44K8 126903
+15HXmdeQ 126904
+4Lii4LiB 126905
+INit2YI= 126906
+2LHYqNmK 126907
+44GY44KD44Gq44GE 126908
+4Lij4Lix4LiB4Lip4Liy 126909
+0YXQvtC00LjRgg== 126910
+4LiV4Lit4Lia 126911
+16DXmNeZ 126912
+INin2YTZhdis 126913
+2KrZhdi5 126914
+0L7QstCw0YLRjA== 126915
+2YTZitmG 126916
+15nXnteV16o= 126917
+IG3DuQ== 126918
+bsSZ 126919
+INiv2Yo= 126920
+15vXqdeZ15U= 126921
+IGhpw6c= 126922
+65GQ 126923
+2YjYp9ih 126924
+2YjYtw== 126925
+INin2YTYqNmE 126926
+4LmB4Lih4LmJ 126927
+16fXldeq 126928
+2YjYrNiv 126929
+5aeL44KB 126930
+2YrYptip 126931
+IOunpA== 126932
+2LXYqNit 126933
+16TXkA== 126934
+0LPQvtGA 126935
+16HXlA== 126936
+2KjZitmC 126937
+4Lii4Liy4LiB 126938
+INC90LDQtA== 126939
+2YrZkQ== 126940
+INio2Yg= 126941
+16HXldeo 126942
+2YXZg9in2YY= 126943
+16jXkQ== 126944
+15LXlg== 126945
+16bXqg== 126946
+YmlsaXQ= 126947
+0LvQsNCz 126948
+IE5nbw== 126949
+15DXldeo 126950
+4LiV4LiZ 126951
+7Yq5 126952
+4LiX4Li14LmI4LiU4Li1 126953
+4Lib4Lij4Liw4LiI4Liz 126954
+0L7QstCw0L3QuNC1 126955
+44GE44Gk 126956
+44OD44Kv44K5 126957
+5ZCI44KP 126958
+5ZCI44KP44Gb 126959
+15nXoNeV15k= 126960
+4bqheQ== 126961
+2KvZgg== 126962
+INC/0YDQvtCx 126963
+INC/0YDQvtCx0LvQtdC8 126964
+xZ9laA== 126965
+xZ9laGly 126966
+2LnYp9iv2Kk= 126967
+2KfZhtmI2YY= 126968
+4LiV4Lix4Lin4LmA4Lit4LiH 126969
+7LaV 126970
+xLFsYW4= 126971
+0LHQsNC9 126972
+44Oz44OJ 126973
+4LiI4Li1 126974
+INeU16nXoNeZ 126975
+0L/QvtGC 126976
+15XXnNeZ150= 126977
+4Lil4Lix4Lia 126978
+INGN0YLQuA== 126979
+15HXp9ep 126980
+67mE7Iqk 126981
+4Lit4Lii4LmI4Liy4LiH4LmE4Lij 126982
+15nXnNeZ 126983
+4LmD4LiK4LmI 126984
+INin2YTZg9mE 126985
+44Oa44O844K4 126986
+2LXYqQ== 126987
+0YLQuNGA 126988
+44KT44Gp 126989
+0LfRi9C6 126990
+d3nFvA== 126991
+2YfZig== 126992
+INmF2YTZig== 126993
+INCy0LjQtNC1 126994
+2LjYp9mF 126995
+2K/Yp9mI2YQ= 126996
+157XqteZ 126997
+IHPEsWs= 126998
+4LmA4LiV4Li04Lih 126999
+44Ki44Kk 127000
+0LrQsNGF 127001
+16bXmdec 127002
+4LmA4LiK4LmI4LiZ 127003
+0LzQsNCz 127004
+0LzQsNCz0LDQtw== 127005
+0LzQsNCz0LDQt9C40L0= 127006
+4Lib4Lix 127007
+4Lib4Lix4LiI 127008
+INep15nXqNeV16o= 127009
+4Li14Lii4Lih 127010
+44OW44Or 127011
+INiv2YjZhA== 127012
+16fXqNeZ150= 127013
+2YfZjw== 127014
+0L7QstC+ 127015
+IMO8cmV0 127016
+2K/ZiNmG 127017
+4LmB4LiZ4Lin 127018
+4LmA4LiZ4Li34LmJ4Lit 127019
+INGE0L7Rgg== 127020
+44OY 127021
+44Gk44GL 127022
+0Y/RgQ== 127023
+IO2VmOuCmOuLmA== 127024
+2KfYpti5 127025
+INC/0LvQsNGC 127026
+7JiI 127027
+IGRvc3TEmXA= 127028
+2YjYrNmH 127029
+INeU15fXmQ== 127030
+16DXmden 127031
+0LTQtdC5 127032
+7ZuE 127033
+xLF5 127034
+2KjYrdix 127035
+4LmA4Liq4Lij4Li04Lih 127036
+INec15I= 127037
+2LDZh9io 127038
+2KzZitmE 127039
+2LHZg9iy 127040
+IOuF 127041
+IOuFuA== 127042
+16TXmdec15U= 127043
+44G+44Ga 127044
+aXJpxZ8= 127045
+INmD2YrZgQ== 127046
+INeR16Y= 127047
+IOq1kA== 127048
+0YDQvtGB0YE= 127049
+INi02Yo= 127050
+IGnDp2Vy 127051
+15LXldeR15Q= 127052
+0LzQtdC90L3Qvg== 127053
+16LXkdeZ16g= 127054
+15XXnteU 127055
+44KJ44GX44GE 127056
+44G8 127057
+0YnQuNC9 127058
+6LK344GE 127059
+2KzZhdmI2LnYqQ== 127060
+IGTDtm5lbQ== 127061
+INeR15DXqA== 127062
+0LLQtdGB0YI= 127063
+15XXqNeV16o= 127064
+2LPZgQ== 127065
+4LmB4LiX4LiZ 127066
+INC00L7QutGD0LzQtdC90YI= 127067
+INin2Yo= 127068
+2KzYp9mG 127069
+16bXldei15k= 127070
+INC+0YHQvtCx 127071
+INin2YTZhdiz 127072
+0YDQsNCx 127073
+4Lig4Li5 127074
+4LiU4Liy4Lin 127075
+0LvQtdC60YI= 127076
+2LnZgg== 127077
+15XXk9eV16o= 127078
+IG9sdQ== 127079
+IG9sdcWfdHVy 127080
+44G+44G+ 127081
+0LXQtNC40L0= 127082
+4LmA4Lit4LiB 127083
+44K144Kk 127084
+64SI 127085
+2LfZhtmK 127086
+2LfZgtip 127087
+INCg0LDQtw== 127088
+2YTZkQ== 127089
+0YfQtdC8 127090
+INec15g= 127091
+4Liq4Lix4LmI4LiH 127092
+2LPYsdin2KbZitmE 127093
+INek16jXmNeZ 127094
+0LTQtdGB0Yw= 127095
+INeg15s= 127096
+2KfZhtio 127097
+2YrYp9ip 127098
+2YXYqNix 127099
+IGvEsQ== 127100
+4Lib4LiP 127101
+4Lib4LiP4Li0 127102
+4Lia4Lix4LiV4Li0 127103
+16DXqteZ 127104
+7Iah 127105
+2LHYp9io 127106
+4LmD4LiV 127107
+4LmD4LiV4LmJ 127108
+15nXoNeq 127109
+2YjZitix 127110
+INeU157XmQ== 127111
+0LXQudGH0LDRgQ== 127112
+16fXldeR 127113
+2K/Ysdin2LM= 127114
+INmF2YI= 127115
+2LHZitmG 127116
+2K7Yp9i1 127117
+44GK6YeR 127118
+INis2K/Ypw== 127119
+44GG44Gh 127120
+64W4 127121
+xLFyxLFt 127122
+5qeY 127123
+44Gr5a8= 127124
+44Gr5a++ 127125
+0YbQtdCy 127126
+IHZhcmQ= 127127
+INCQ0L0= 127128
+ZcSf 127129
+0YHRgtCy0LXQvdC90L4= 127130
+0Kg= 127131
+2LPYrw== 127132
+4LiB4Li4 127133
+4LmB4Lic4LiZ 127134
+4Lij4Li54LmJ4Liq 127135
+4Lij4Li54LmJ4Liq4Li24LiB 127136
+2KfYqtit2KfYrw== 127137
+0ZHRgg== 127138
+15fXlden 127139
+44GZ44GQ 127140
+2LfZhNin2YI= 127141
+INen15XXkw== 127142
+4LmD4LiK4LmJ4LiH 127143
+4LmD4LiK4LmJ4LiH4Liy4LiZ 127144
+44O844K/ 127145
+IHPDvHI= 127146
+0YDQvtC6 127147
+67OR 127148
+4Liq4Lih4Liy4LiK 127149
+4Liq4Lih4Liy4LiK4Li04LiB 127150
+44OV44Os 127151
+6L6844G/ 127152
+44K744Oz 127153
+IOqwgOyngA== 127154
+4Lic4LmJ4Liy 127155
+0Y3RgtC+0LzRgw== 127156
+0LjRgtC10Ls= 127157
+4Lig4Lix 127158
+4LiR 127159
+44OW44Op 127160
+15vXqteV15E= 127161
+16DXnQ== 127162
+0LXQvdC90YvQtQ== 127163
+16LXqNeb16o= 127164
+IOyC 127165
+IOyCtA== 127166
+4LiC4LmJ4Liy 127167
+16DXldeh 127168
+44Os44OT 127169
+0YDQtdGB 127170
+4LmA4Lil4LiC 127171
+2KvYp9mE 127172
+7JeG 127173
+INGH0LDRgdGC 127174
+4Liy4Lio 127175
+44Oq44Ki 127176
+dcOn 127177
+15nXm9eV16o= 127178
+4Lil4LmJ4Liy4LiZ 127179
+acOr 127180
+44K444Kn 127181
+4LiI4Lit 127182
+2YjYrdiv 127183
+15nXpteV15E= 127184
+INeR16nXnA== 127185
+0L7QutC+ 127186
+2LbYqQ== 127187
+2LDYsQ== 127188
+INGD0LQ= 127189
+xLBM 127190
+15XXpteZ150= 127191
+15bXntef 127192
+4Lib4LiB 127193
+7ZWZ6rWQ 127194
+2LPYp9mF 127195
+4LmE4LiU 127196
+4Lil4Liw4LmA4Lit 127197
+4Lil4Liw4LmA4Lit4Li14Lii 127198
+4Lil4Liw4LmA4Lit4Li14Lii4LiU 127199
+4bqjeQ== 127200
+0LDRhtC40L7QvQ== 127201
+44K544Kv 127202
+16TXldeh 127203
+4Lij4LmI4Liy4LiH 127204
+0LXQvdC90YvQuQ== 127205
+2LnZhg== 127206
+2LnZhNmG 127207
+2KfYptmB 127208
+ZMSZ 127209
+2KTZiNmE 127210
+15zXldeV 127211
+INeR16nXkQ== 127212
+5LuK5Zue 127213
+INin2YTYrNmG 127214
+2K/Yp9iv 127215
+d2HEhw== 127216
+44Oq44Oz 127217
+IOyekOyLoA== 127218
+2KfZhtmK2Kc= 127219
+44Oh44Oq 127220
+2YTZiNmG 127221
+4LiX4LmI4Lit4LiH 127222
+4LiX4LmI4Lit4LiH4LmA4LiX4Li14LmI4Lii4Lin 127223
+2KfZgdmK 127224
+INC70LjRiA== 127225
+2YXZitip 127226
+0L7RgtCy0LXRgg== 127227
+0YfQuNC9 127228
+w4o= 127229
+44Oh44Oz 127230
+5a6f 127231
+6Zqb44Gr 127232
+INGA0LDQuQ== 127233
+44Km44Oz 127234
+15nXqNeV16k= 127235
+15nXqNeV16nXnNeZ150= 127236
+4Lih4Liw 127237
+IGFyYQ== 127238
+0LrQsNC30LDRgtGM 127239
+4LiV4Lix4LiU 127240
+0YPRjtGC 127241
+IMO8c3Q= 127242
+15LXldeR 127243
+15LXldeR15XXqg== 127244
+bWFsxLE= 127245
+0LXQs9C+0LQ= 127246
+0LXQs9C+0LTQvdGP 127247
+2KfZgdmC 127248
+4LiK4LmI4Lit4LiH 127249
+IMO2emVsbGlr 127250
+15nXpteV16g= 127251
+IG1pxJlk 127252
+IGlsacWf 127253
+INC90LDRhdC+0LQ= 127254
+16LXlteo 127255
+15zXm9eq 127256
+2YbYqtin2Kw= 127257
+INGB0LXQvA== 127258
+4LiI4LmI4Liy4Lii 127259
+4LiV4Lij4Lin 127260
+4LiV4Lij4Lin4LiI 127261
+16TXqNeV 127262
+4LiC4Lix4Lia 127263
+44Ge 127264
+INC/0LvQvg== 127265
+0LrQvtC70Yw= 127266
+157XoteY 127267
+7ZWY7Iuc 127268
+asSFY2U= 127269
+2YbYp9mG 127270
+4Lil4Li14LiB 127271
+0L3Rg9GC 127272
+INC+0LHRgNCw0Lc= 127273
+2YPYqNix 127274
+INin2YTZiNi32YY= 127275
+44GV44Gb44Gm 127276
+2YLYp9ih 127277
+157Xk9eZ16A= 127278
+ecO8 127279
+16TXmdeq 127280
+16DXldef 127281
+2YXZhti4 127282
+4Lir4LiZ4Lix4LiB 127283
+7J6I 127284
+44Kr44O844OJ 127285
+2LnZhtmK 127286
+0L/QvtC0 127287
+2LbYp9ih 127288
+4LiZ4LiV4LmM 127289
+157Xqdek 127290
+4Lin4LmM 127291
+16jXlden 127292
+4Liq4Li34LmI4Lit 127293
+16TXp9eZ15M= 127294
+44Gq44KJ44Gq44GE 127295
+IOyXrOufrA== 127296
+2YTYrA== 127297
+0YnQuNGC 127298
+44OD44K3 127299
+2YTZitiz 127300
+INmE2YXYpw== 127301
+7KCR 127302
+15HXmdef 127303
+44OB44Kn 127304
+IGfDvMOn 127305
+IGNo4bup 127306
+15XXpteQ 127307
+16fXqNeR 127308
+4LmC4Lie 127309
+0L7Rh9C90L4= 127310
+16HXp9eZ 127311
+16nXnNed 127312
+2LXYsdmB 127313
+IEzDoA== 127314
+16LXmdeq 127315
+4bu3 127316
+4LmC4Lit4LiB 127317
+4LmC4Lit4LiB4Liy 127318
+4LmC4Lit4LiB4Liy4Liq 127319
+INeU15PXkdeo 127320
+4LiZ4Lix4LmI4LiZ 127321
+2LLYsQ== 127322
+0L3QsNC60L4= 127323
+7ZqN 127324
+44KC44Gh 127325
+44KC44Gh44KN 127326
+44KC44Gh44KN44KT 127327
+2KfZhdiq 127328
+2LnYr9in2K8= 127329
+0LjQvdGL 127330
+xYJ5dw== 127331
+4LiE4LiT4Liw 127332
+4LiX4Liw 127333
+a3TDtnI= 127334
+15nXl9eU 127335
+INC80LU= 127336
+INC80LXRgdGP 127337
+16DXlNeS 127338
+INGB0YPRidC10YHRgtCy 127339
+4LiZ4Lix4LiZ 127340
+0YTRhA== 127341
+0LXQutGC0LjQsg== 127342
+2LnZhNmI2YXYp9iq 127343
+0LHRg9C0 127344
+4LiZ4Lix4LiB4LiH4Liy4LiZ 127345
+4Lir4LiZ4LmJ4Liy4LiX4Li14LmI 127346
+2YLZitmC 127347
+44K344Oz 127348
+44Gr6Zai 127349
+15DXqNeS 127350
+INC/0YDQvtGC 127351
+INC/0YDQvtGC0LjQsg== 127352
+IOyeiOyWtA== 127353
+2YLZitmC2Kk= 127354
+7JeH 127355
+a8O8cg== 127356
+44Gr44Gq44KK44G+44GX44Gf 127357
+INC00LXRj9GC 127358
+INC00LXRj9GC0LXQu9GM 127359
+16TXldeo15g= 127360
+4Lif4LmJ4Liy 127361
+4LmA4Lig 127362
+INCw0LLRgtC+0LzQsNGC 127363
+15bXmden 127364
+IG9sZHVr 127365
+2LnYp9mF 127366
+INGC0L7RgA== 127367
+eXLEsWNh 127368
+w6rM 127369
+44Kt44Oz44Kw 127370
+44Gr44Go44Gj44Gm 127371
+4LmA4LiJ4Lie 127372
+4LmA4LiJ4Lie4Liy4Liw 127373
+44Gv44Ga 127374
+157XkNeZ 127375
+4Liq4Liw4LiU 127376
+4Liq4Liw4LiU4Lin4LiB 127377
+7Jy866mw 127378
+4LiB4Li1 127379
+4Lis 127380
+INei15XXqQ== 127381
+4Lig4Liy4Lip4Liy 127382
+4LiX4Lix4LiZ 127383
+YWNha3Q= 127384
+YWNha3TEsXI= 127385
+2KfYudiv2Kk= 127386
+INGD0YHQu9GD0LM= 127387
+16HXqNeY 127388
+15XXnteV16o= 127389
+15TXldeo 127390
+157XldeR 127391
+157XldeR158= 127392
+2LPZitin2LM= 127393
+2KfYqtmB2KfZgg== 127394
+15TXptec 127395
+2YXYpNiz 127396
+IHDDsw== 127397
+INC60L3QuA== 127398
+15nXm9eV15w= 127399
+4LmA4Lir4Lil4Li34Lit 127400
+15vXnNeb 127401
+16DXlg== 127402
+0YjQuNC1 127403
+csOocw== 127404
+INin2YTYrdmC 127405
+0LvRj9GA 127406
+4Lir4LiN 127407
+4Lir4LiN4Li04LiH 127408
+16jXkteZ16k= 127409
+4LmA4Liq4LmJ4LiZ 127410
+16nXkdeV158= 127411
+w7R0ZWw= 127412
+0LDQv9GA 127413
+0LDQv9GA0LjQvNC10YA= 127414
+2KfYqNmE 127415
+INGA0LDQt9Cy0LjRgg== 127416
+INC/0L7Qu9GM0Lc= 127417
+INCh0LXRgA== 127418
+15XXkdeZ 127419
+csOzxbw= 127420
+7Iut 127421
+44Kv44OI 127422
+44GX44KI44GG 127423
+4LiB4Lij4Lih 127424
+2K3Zg9mI2YU= 127425
+4LmC4Lia 127426
+4LiX4LmJ4Liy4Lii 127427
+IE3DoQ== 127428
+INGC0Ys= 127429
+4LiE4Lij4Lix4Lin 127430
+0YDRg9Cx 127431
+4bqhcA== 127432
+IG3Fgg== 127433
+IG3Fgm9k 127434
+IGfDtnLDvMWf 127435
+IGdlbGnFnw== 127436
+xrDGoWk= 127437
+157Xqden 127438
+2YDZgNmA2YA= 127439
+4Lij4Liy4Lin 127440
+44GX44Gj 127441
+44GX44Gj44GL44KK 127442
+INCa0L7QvQ== 127443
+IGvDqg== 127444
+4LmC4LiX4Lij 127445
+6JC944Gh 127446
+5Ye644Gm 127447
+4Lil4Lix4LiB4Lip 127448
+INeS15HXldeU 127449
+44OZ44Or 127450
+6rGw64KY 127451
+66eQ 127452
+15nXnNeT15nXnQ== 127453
+IOuEiA== 127454
+157XqNeZ 127455
+4Lij4Liq 127456
+44Ot44Oz 127457
+0LjQu9C+ 127458
+0L3QvtGB0YLRjNGO 127459
+15bXqNeX 127460
+0L/QvtC9 127461
+INeU16nXnA== 127462
+6rKg7Iq164uI64uk 127463
+IGtpxZ8= 127464
+INCa0Lg= 127465
+4Lin4Lij 127466
+2K/Yp9i5 127467
+xZ9pbQ== 127468
+2YbZkQ== 127469
+0LLQsNGC 127470
+2LHYp9mD 127471
+2KjYp9mE 127472
+0LjQtNC1 127473
+INeU157Xlw== 127474
+7Ja1 127475
+2KrZgdin2Lk= 127476
+2KPYqg== 127477
+64qY 127478
+16nXmdeq 127479
+2LPYqtmF2LE= 127480
+INGE0LDQug== 127481
+INin2YTYo9mF2LHZig== 127482
+656o 127483
+2KfYs9mF 127484
+IGHEnw== 127485
+IMOnZXY= 127486
+2YPZiNix 127487
+44GV44G+ 127488
+IMOnw7Z6 127489
+INix2LM= 127490
+xIVkYQ== 127491
+4Liq4LiZ4Li4 127492
+44GX44Gm44GP44KM 127493
+0L3Rjg== 127494
+bGXFn21l 127495
+44Kq44Oz 127496
+44Go44Gq44KK 127497
+YXZhxZ8= 127498
+15jXmdeR 127499
+2K3Ytg== 127500
+15XXpteQ15XXqg== 127501
+2YbZhdmI 127502
+xLF0 127503
+INGF0LA= 127504
+INGF0LDRgNCw0Lo= 127505
+INGF0LDRgNCw0LrRgtC10YA= 127506
+IGTFgg== 127507
+44OX44Op 127508
+4LiK4Li44Lih 127509
+4LmI4Lit4LiZ 127510
+15XXkdec 127511
+0YHQvtC7 127512
+15PXkg== 127513
+0LDRgNCw0YI= 127514
+bml2ZXJz 127515
+IGdlcsOnZWtsZcWfdGly 127516
+INin2YTZhNmK 127517
+4Lij4Liw4Lii4Liw 127518
+INmF2K7YqtmE2YE= 127519
+IGfDtm5kZXI= 127520
+2YHYp9ix 127521
+ZG/Enw== 127522
+ZG/En2Fu 127523
+2LXZhNin2K0= 127524
+IHlhecSxbg== 127525
+44OG44Oz 127526
+4Lij4Lin4LiI 127527
+15nXl9eZ15M= 127528
+w7xua8O8 127529
+0YbQuNCw0LvRjNC9 127530
+4Lia4Li5 127531
+4Lih4Li4 127532
+aMOk 127533
+2K7ZgQ== 127534
+5aKX 127535
+5aKX44GI 127536
+0LXRh9C90L4= 127537
+INin2YTYs9mG 127538
+4LiC4Liy4Lin 127539
+aW1kaQ== 127540
+0Ks= 127541
+4LiZ4Lit4LiB4LiI4Liy4LiB 127542
+4Lia4Liy4Lil 127543
+16rXqQ== 127544
+IGTDvHplbmxl 127545
+0LzRi9GB0Ls= 127546
+44GP44Gq 127547
+xbx1 127548
+IHdzcMOzxYI= 127549
+INC90LDQtw== 127550
+xLFuZGFraQ== 127551
+2KrYsdip 127552
+xZ9law== 127553
+IMO2ZA== 127554
+INmI2YM= 127555
+INC/0L7Qt9Cy0L7Qu9GP 127556
+INeq15XXmw== 127557
+2YXZhtiq2Kw= 127558
+66eJ 127559
+INin2YTYq9mE2KfYqw== 127560
+0LDRhtC40Y4= 127561
+2YjYsdmI 127562
+0YvQstCw0LXRgg== 127563
+2K7Ytdi1 127564
+INin2YTZgdmE 127565
+INin2YTZgdmE2LPYt9mK2YY= 127566
+2KXYrNix 127567
+2KXYrNix2KfYoQ== 127568
+2KfZhtiq2K4= 127569
+2KfZhtiq2K7Yp9io 127570
+2KfYsdmK2Kk= 127571
+15XW 127572
+2KLZhg== 127573
+157XoteV16o= 127574
+INC80LDQuw== 127575
+INeQ15c= 127576
+4LiX4LmJ4Lit4LiH 127577
+emXFmw== 127578
+IOunjOuTpA== 127579
+2LHZiti5 127580
+5LqL44KS 127581
+4Lia4Lij4Li04Lir4Liy4Lij 127582
+15zXnteZ15M= 127583
+INC80YPQtg== 127584
+2KrYsdmI 127585
+INio2KfZhNil 127586
+16TXmden 127587
+2LLZhdip 127588
+IMO2xJ9yZW5j 127589
+44O2 127590
+2KfZhdi52Kk= 127591
+16fXkdeV16Y= 127592
+157XoNeV16o= 127593
+2LHZitmF 127594
+INC+0LrQsNC3 127595
+44Gg44GR44Gp 127596
+IGjEsXo= 127597
+INep15DXqg== 127598
+44Ki44O8 127599
+IG1vxbxsaXdv 127600
+7IS8 127601
+2YjYp9io 127602
+0L7Qs9GA0LDRhA== 127603
+INi52KjYr9in2YQ= 127604
+44KS6KGM 127605
+2KjZitmE 127606
+IMSww6c= 127607
+4Lii4Liy4Lii 127608
+INGD0YfQsNGB0YI= 127609
+0YTQtdGB0YE= 127610
+0YTQtdGB0YHQuNC+0L3QsA== 127611
+4bqk 127612
+2YbZitmG 127613
+2LnYr9mE 127614
+4Liq4Lij4Lij 127615
+2K/ZitmE 127616
+15HXmden 127617
+Y3p5xYI= 127618
+0YDQvtC80LU= 127619
+INC80LXQtA== 127620
+7JmU 127621
+44Op44Kk44Oz 127622
+INGC0LXQvw== 127623
+0LXRgNGM 127624
+acSfaQ== 127625
+0LLQtdC70Lg= 127626
+0YDQuNGB0YI= 127627
+16HXldek 127628
+157XnNeX 127629
+INin2YTYpdmG 127630
+INec15TXqQ== 127631
+6LaK44GX 127632
+INGA0Ys= 127633
+15XXkNeo 127634
+2LHZh9in2Kg= 127635
+16TXldeQ15k= 127636
+INCz0L7RgdGD0LQ= 127637
+INCz0L7RgdGD0LTQsNGA 127638
+INCz0L7RgdGD0LTQsNGA0YHRgtCy 127639
+INin2YTYo9mF2YrYsQ== 127640
+2YXYrA== 127641
+4LmA4Lir4Lih4Liy4Liw 127642
+0YDQtdCy 127643
+4LiK4Li14Lie 127644
+44OV44OI 127645
+0LjRh9C90L4= 127646
+INin2YTZhdik 127647
+IGlodA== 127648
+7YWc 127649
+2K/ZhtmK 127650
+2LHYtQ== 127651
+0LvQsNGB0YI= 127652
+4LmA4Lir4Lil4LmI4Liy 127653
+xLFsxLFy 127654
+4Lij4LiT4LmM 127655
+157XqdeZ15o= 127656
+IGThu4s= 127657
+2LfZgdin2YQ= 127658
+15jXldef 127659
+INeR15nXoA== 127660
+44G+44Gj44Gf 127661
+0LvQvtC20LXQvdC40Y8= 127662
+2KrYrdix 127663
+2KjYp9it 127664
+4LmA4Liq4Li34LmJ4Lit 127665
+44GZ44GU 127666
+bHTDvHI= 127667
+4LiH4Liy4Lih 127668
+IHTDvA== 127669
+INC/0YDQuNC8 127670
+INC/0YDQuNC80LXQvQ== 127671
+IGhheWF0 127672
+64OQ 127673
+64uM 127674
+16DXmdeV 127675
+0LLQtdC00LXQvQ== 127676
+7IWo 127677
+4LiI4Lix4Lii 127678
+4LiB4LmI4Lit 127679
+INCy0L7QtA== 127680
+0L7RgdGC0L7Rjw== 127681
+0L3QsNGC 127682
+4LmB4Lir4Lil 127683
+2LPZhdmK 127684
+4LiU4Liz4LmA4LiZ 127685
+4LiU4Liz4LmA4LiZ4Li04LiZ 127686
+d8OzZA== 127687
+w7Z5bGU= 127688
+44OA44Kk 127689
+0YjQuNC5 127690
+0LzQtdGJ0LXQvQ== 127691
+44GX44G+44GG 127692
+44OJ44Op 127693
+2YjYttit 127694
+4Lit4LiZ4Li4 127695
+INin2YTYp9is2KrZhdin2Lk= 127696
+bGHFn21h 127697
+4LiE4Lit4LiZ 127698
+157XqNeZ150= 127699
+2YbYp9mF2Kw= 127700
+16nXqNeV16o= 127701
+2KfZhNij 127702
+IGtzacSFxbw= 127703
+INCw0L0= 127704
+0YDQsNC5 127705
+2KfZh9ix2Kk= 127706
+157Xk9eU 127707
+5LiA57c= 127708
+5LiA57eS 127709
+5LiA57eS44Gr 127710
+0YDQuNGC0L7RgA== 127711
+ZMSxa2w= 127712
+4LmB4LiW 127713
+4LmB4LiC4LmI4LiH 127714
+0LXQutGC0L7RgA== 127715
+157Xodei 127716
+0YDQsNC60YLQuA== 127717
+dcSfdQ== 127718
+15XXkdeq 127719
+4Liq4Li54LiV4Lij 127720
+IMOnYWzEscWfbQ== 127721
+IMOnYWzEscWfbWFsYXI= 127722
+INCw0L3QsA== 127723
+44Ob44O844Og 127724
+IGLDtmzDvG0= 127725
+INio2LU= 127726
+0L7Qu9C+0YE= 127727
+IOyViuuKlA== 127728
+4LmI4Liw 127729
+2YjYqtix 127730
+5LmX 127731
+2LPYqtiu2K/Yp9mF 127732
+16TXmdeZ16E= 127733
+16TXmdeZ16HXkQ== 127734
+16TXmdeZ16HXkdeV16c= 127735
+INC60YDQsNGB 127736
+0LvQuNC6 127737
+2LHZitit 127738
+157Xqdec15Q= 127739
+4LmA4Lii4Li14LmI4Lii 127740
+4LmA4Lii4Li14LmI4Lii4Lih 127741
+0LLQuNGB 127742
+0L7QvNC9 127743
+xJ91bg== 127744
+44Ot44O844Oz 127745
+2KPYqtmK 127746
+4LiV4Lij4Li1 127747
+55Sz44GX 127748
+2KrZhdix 127749
+7JeI7Iq164uI64uk 127750
+INmI2LrZitix 127751
+cmVkbmk= 127752
+INin2YTYtdmB 127753
+INC90LDRgdGC0L7Rjw== 127754
+INC90LDRgdGC0L7Rj9GJ 127755
+4LiV4Lij4Liy 127756
+INGD0YHQu9C+0LI= 127757
+INGD0YHQu9C+0LLQuNGP 127758
+0YbQtdC/ 127759
+15TXl9ec15g= 127760
+2LfZiti5 127761
+IEJha2Fu 127762
+INin2YTYsdmI 127763
+0LjQu9GM0L3Qvg== 127764
+INC80LXRgg== 127765
+4LiU4Lit4LiB 127766
+44GL44KJ44Gq44GE 127767
+INC/0L7RgdGC0L7Rjw== 127768
+INC/0L7RgdGC0L7Rj9C9 127769
+INGH0LDRgQ== 127770
+w7xj 127771
+d3LDsw== 127772
+0LHRg9GA 127773
+44OQ44OD44Kv 127774
+44Op44Oz44OJ 127775
+INC+0LPRgA== 127776
+4Liq4Lix4LiN 127777
+4Liq4Lix4LiN4LiN4Liy 127778
+4Lih4Lix4LmI4LiZ 127779
+4LiE4Lit4Lih 127780
+YWzEsWs= 127781
+INC90LXQtA== 127782
+w7xtw7x6 127783
+IMWbd2ll 127784
+w6lyaW8= 127785
+15nXkNeU 127786
+2K/Zhdin2Ko= 127787
+xLFybA== 127788
+INC+0YLQtw== 127789
+INC+0YLQt9GL0LI= 127790
+5LuY44GN 127791
+IGthxbxkZQ== 127792
+0LzQuNC90LjRgdGC 127793
+44Kw44Or 127794
+67CW 127795
+0LXQt9C9 127796
+2KfZhNmB 127797
+INep16fXnA== 127798
+2YXYtg== 127799
+44Od44O844OI 127800
+2YXZhtiq 127801
+2YLZitin2YU= 127802
+2LTZhg== 127803
+15nXqNeV16I= 127804
+44Kt44Oj44Oz 127805
+0LTQvtGA0L7Qsg== 127806
+157Xmdeq15k= 127807
+2YjZhNmI2Kw= 127808
+2YPYp9mB 127809
+INGA0LDQt9C70LjRhw== 127810
+0LjRgtC10YI= 127811
+0L3QvtC70L7Qsw== 127812
+4Lil4LiH4LiX4Li44LiZ 127813
+IHlha2xhxZ8= 127814
+44Os44Kk 127815
+6rKg64uk 127816
+5rGC44KB 127817
+2LHZiNmB 127818
+IO2K 127819
+IO2KuQ== 127820
+44Gj44GP44KK 127821
+4LiE4Lin4Liy4Lih4LiE4Li04LiU 127822
+15TXmdeh15g= 127823
+2KXZgg== 127824
+44Gm44GE 127825
+4LmC4LiK 127826
+IELDvHnDvGs= 127827
+INCk0LXQtNC10YA= 127828
+0YbQuNC9 127829
+0YDQvtCy0LA= 127830
+INin2YTYp9mC2KrYtdin2K8= 127831
+IGNow6E= 127832
+4LiY4Liy4LiZ 127833
+66Wg 127834
+4LmE4LiV 127835
+w61waW8= 127836
+2YvYpw== 127837
+INC+0LHRj9C3 127838
+2YfYrA== 127839
+IOykkeyalA== 127840
+44Gu44Gn44Gv44Gq44GE 127841
+2KjYp9ix2KfYqQ== 127842
+44Kk44Or 127843
+INC90L7RgNC8 127844
+4buJbmg= 127845
+bcO2 127846
+bcO2Z2xpY2g= 127847
+0YbQuNC/ 127848
+44Ki44Kv 127849
+15TXmQ== 127850
+0YbQuNCw0LvRjNC90L4= 127851
+IMWbd2k= 127852
+2KrZgg== 127853
+INGB0YLQvtC40Lw= 127854
+2KjZiti52Yo= 127855
+INec16nXng== 127856
+0LPQu9GP 127857
+0LPQu9GP0LQ= 127858
+44Gm44GP44KM 127859
+xJlkemk= 127860
+4LiC4Lix 127861
+4LiC4Lix4LmJ4LiZ 127862
+2LfZgg== 127863
+IOyXrQ== 127864
+44Gj44Gm44GX44G+44GG 127865
+IGRlxJ9lcmw= 127866
+IGRlxJ9lcmxlbmRpcg== 127867
+IMO8bGs= 127868
+INC80L3QvtCz 127869
+4LmL 127870
+67+Q 127871
+INCj0LrRgNCw 127872
+xJ9pbmk= 127873
+INCx0LXQt9C+0L8= 127874
+INCx0LXQt9C+0L/QsNGB 127875
+4Lit4Lit4LiB4LmB4Lia4Lia 127876
+2KfYuA== 127877
+2K3Yr9in2Ks= 127878
+0LvQtdGA 127879
+15nXpQ== 127880
+15nXoNeY16jXoNeY 127881
+bGFyxLFuxLF6 127882
+2K3Zitit 127883
+xbxlbGk= 127884
+4Lit4Lix4LiH 127885
+4Lit4Lix4LiH4LiB 127886
+4Lit4Lix4LiH4LiB4Lik4Lip 127887
+INC+0YLQu9C40Yc= 127888
+4Lix4Liq 127889
+656N 127890
+0L7QttC90L4= 127891
+44K544Od 127892
+INGF0L7Rhw== 127893
+INC60LDQvw== 127894
+0LXRh9C10L0= 127895
+2K3ZhNip 127896
+2YrYp9mH 127897
+0L3QsNC7 127898
+15XXpteo15nXnQ== 127899
+IGthbGQ= 127900
+5YON 127901
+INin2YTYtNiu2LU= 127902
+INC30L3QsA== 127903
+IHd6Z2w= 127904
+xbx5Y3o= 127905
+6rCd 127906
+4Lie4Lil4Lix4LiH 127907
+7YG8 127908
+IMO2bA== 127909
+IGLhu6U= 127910
+2LTZh9ix 127911
+INC30LDQvA== 127912
+INC00LXQsg== 127913
+15nXmNeq 127914
+2KrYudmE2YI= 127915
+2YjZhdip 127916
+44KS5L2c 127917
+44GN44Gm 127918
+7YOd 127919
+cmFzxLFuZGE= 127920
+44KS5o6i 127921
+INmF2KjYp9i02LE= 127922
+2LHYp9is2Lk= 127923
+INCy0L7Qt9C0 127924
+2YXYrdin 127925
+15XXqdeo 127926
+INC40YHRgtC+0YA= 127927
+4Lih4Lix4LiB 127928
+dMSxxJ8= 127929
+2KvYp9ix 127930
+2KrYsdmG2Ko= 127931
+4LmB4LiC4LmH 127932
+4LmB4LiC4LmH4LiH 127933
+0L/QvtGH 127934
+INeR15DXldeq 127935
+66+A 127936
+652864+E 127937
+4LiK4Lix4LiU 127938
+4Liq4LiV4LmM 127939
+44OL44OD44Kv 127940
+0LjQtNC10L3Rgg== 127941
+INCz0YDRg9C/0L8= 127942
+2KrYrg== 127943
+4bqg 127944
+4Lii4Li34LiZ 127945
+4Lii4Lix4LiZ 127946
+w7NyeQ== 127947
+VMOc 127948
+44GX44KD 127949
+INC/0YDQvtCy0LXQtA== 127950
+0LvRj9C10YI= 127951
+2YXYrg== 127952
+4Lii4Lit4Lih 127953
+15vXoNeh16o= 127954
+INin2YTZhdmG2Ko= 127955
+IG9sbWFk 127956
+16jXm9eW15k= 127957
+INCy0YHRgtGA 127958
+INC40YHRgdC70LXQtA== 127959
+0YLQstC10YDQtg== 127960
+2KjYr9mI 127961
+0LXRgNGC 127962
+77u3 127963
+sYU= 127964
+4Liq4Lix4Lih4Lie4Lix4LiZ4LiY4LmM 127965
+4Li04LmI4LiZ 127966
+16bXmdeR 127967
+d2nEmXQ= 127968
+IOywuA== 127969
+IHp3acSFeg== 127970
+2LPYqNmI2Lk= 127971
+44OD44Kw 127972
+4Lib4Lil4Lit4LiU 127973
+4Lib4Lil4Lit4LiU4Lig4Lix4Lii 127974
+44KC44KK 127975
+2YLYr9iz 127976
+IHNwcno= 127977
+IHNwcnplZGE= 127978
+IGlzdGVkaQ== 127979
+IGtodQ== 127980
+INC00LXQvQ== 127981
+IGtvxYQ= 127982
+INeR15fXmQ== 127983
+4LmA4LiX4LmJ4Liy 127984
+15XXodeZ16M= 127985
+44OL44Ol44O8 127986
+INC/0YDQtdC00L7RgdGC 127987
+INC/0YDQtdC00L7RgdGC0LDQsg== 127988
+4LmC4Lif 127989
+w6l2 127990
+INin2YTYtdit 127991
+2LXYrdin2Kg= 127992
+4LmA4LiI4LmH4Lia 127993
+0LLQu9C10Lo= 127994
+4Lin4Lix4LiV 127995
+4LiW4Li4 127996
+44GT44Go44GM44Gn44GN44G+44GZ 127997
+2YLZitmC2Yo= 127998
+15XXl9eo 127999
+0YvRiA== 128000
+INC+0YLQvdC+ 128001
+INC+0YLQvdC+0Yg= 128002
+0L7QsdC40LvRjA== 128003
+2YHYrQ== 128004
+xLFudA== 128005
+xLFudMSx 128006
+INec15HXkw== 128007
+7Y6Y7J207KeA 128008
+44OK44Or 128009
+INmF2LPYp9ih 128010
+15nXmNeR 128011
+0YzQtdGA 128012
+64S3 128013
+0YvRgtCw 128014
+INC+0YfQtdGA 128015
+4LiU4Li34LmI 128016
+4LiU4Li34LmI4Lih 128017
+IE5naA== 128018
+2KrYudio 128019
+2YTYp9mC2KfYqg== 128020
+15XXnNeV15LXmdeU 128021
+IOydtOqygw== 128022
+INeU15HXqA== 128023
+7Jy1 128024
+4LmA4LiE4Lil4Li34LmI4Lit4LiZ 128025
+2YfYqQ== 128026
+4LiI4Liz4LmA4Lib4LmH4LiZ 128027
+5aSJ44GI 128028
+d2nFm2NpZQ== 128029
+Y2hvZA== 128030
+Y2hvZHrEhQ== 128031
+0LLRgNC+ 128032
+157Xl9eZ16g= 128033
+IHnEsQ== 128034
+IHnEsWxs 128035
+7KGM 128036
+4LmE4Lir4Lin 128037
+44Gq44GP44Gq 128038
+INC30LDQstC40YE= 128039
+IOyYiOyImA== 128040
+2YHYsA== 128041
+4bunbmc= 128042
+4Lie4Li44LiX4LiY 128043
+0LfQvQ== 128044
+bGF5YW4= 128045
+44Kh 128046
+4LiB4LmH4LiV4Liy4Lih 128047
+IHNhxJ9sYW0= 128048
+4Lij4LiT 128049
+INGB0LjRgg== 128050
+INGB0LjRgtGD 128051
+INin2YTYqtmG 128052
+15TXlg== 128053
+INi32YjZitmE 128054
+dGHFgg== 128055
+IGfDtnJk 128056
+5aSJ44KP 128057
+64Ol 128058
+4LiE4LmI4Lit4Lii 128059
+15DXldeY 128060
+64WQ 128061
+44Op44Oz44K5 128062
+4Lin4Lix4LiS 128063
+4Lin4Lix4LiS4LiZ 128064
+IG9sdcWf 128065
+16TXoteV15w= 128066
+IHN6Y3plZ8OzxYI= 128067
+4LiE4Liy4Liq4Li0 128068
+4LiE4Liy4Liq4Li04LmC4LiZ 128069
+cG93aWVk 128070
+INGC0LXQsQ== 128071
+4Lir4LiZ4LmI4Lin4Lii 128072
+INC80LjQuw== 128073
+2K3Zgw== 128074
+4LiX4LiU 128075
+INC80LDRgtC10YDQuNCw0Ls= 128076
+xYJvdw== 128077
+4LmA4LiB4Li14Lii 128078
+INGB0L7QstC10YA= 128079
+44Kp 128080
+4Lib4Lij4Li0 128081
+INC40Y4= 128082
+0L3QsNGH0LXQvQ== 128083
+0YDQtdC90LQ= 128084
+bXXFn3R1cg== 128085
+INC/0YDQvtC00YPQug== 128086
+0LfQtA== 128087
+0Y/RgtC4 128088
+0Y/RgtC40Y8= 128089
+4LmA4Lih4Li14Lii 128090
+2LHYp9iq2YrYrA== 128091
+IGFtYWPEsQ== 128092
+16nXldec 128093
+16nXldec15c= 128094
+4Liq4Liw4Lit4Liy 128095
+4Liq4Liw4Lit4Liy4LiU 128096
+16TXktei 128097
+2LnYqNip 128098
+ZMSxbg== 128099
+7YWU 128100
+INee16nXl9en 128101
+IGZpeWF0 128102
+INC30LDRjw== 128103
+INC30LDRj9Cy 128104
+4LmC4Lir4Lil 128105
+4LmC4Lir4Lil4LiU 128106
+4LiB4Lij4Li44LiH4LmA4LiX4Lie 128107
+16bXmdeZ158= 128108
+7Jqx 128109
+2YXYqA== 128110
+2YXYqNin2K8= 128111
+bGFuZMSxcg== 128112
+INCy0LXRgdGM 128113
+IGjDvGs= 128114
+INCS0L7Qtw== 128115
+0YfQuNGC0YvQstCw 128116
+4Lin4Lil 128117
+15XXptei 128118
+4LiC4LiT4Liw4LiX4Li14LmI 128119
+IGHFn2HEn8Sx 128120
+15zXkNeV157XmQ== 128121
+dHJ6eW0= 128122
+w6TDn2ln 128123
+b3dvxZtjaQ== 128124
+44Gd44KC 128125
+IHJvendpxIV6 128126
+IGfFgsOzd24= 128127
+0LzQvtC90YI= 128128
+157Xldee 128129
+INGB0YLQsNC9 128130
+2YTYp9mC2Kk= 128131
+cHJvd2Fk 128132
+cHJvd2Fkemk= 128133
+INGB0L7RgdGC0L7Rjw== 128134
+15nXkNeV16o= 128135
+csSx 128136
+Z8Sx 128137
+44OR44OR 128138
+INC90LDQu9C40Yc= 128139
+15TXptei 128140
+INeg15Q= 128141
+4LiE4Lix4Lia 128142
+2LnYsdin2LY= 128143
+0LjQtg== 128144
+2YfYp9im2Yo= 128145
+44KJ44GP 128146
+0L7QttC10YI= 128147
+INC+0LHQvtGA 128148
+INC+0LHQvtGA0YPQtA== 128149
+2KPYs9mE 128150
+4LmH4LiU 128151
+0YDRg9GC 128152
+2K/ZitmF2YI= 128153
+2K/ZitmF2YLYsdin 128154
+IGplc3Rl 128155
+15XXldeZ16g= 128156
+15HXk9eZ16c= 128157
+0LTQtdGA0LbQuNCy0LA= 128158
+44GK44GP 128159
+ZXduxJl0cg== 128160
+ZXduxJl0cnpu 128161
+4Lie4Lik 128162
+INeQ15XXlA== 128163
+16rXl9eV16k= 128164
+IHpvYg== 128165
+0LTRg9C8 128166
+INGB0Ys= 128167
+2YrYsdin 128168
+IHdpxJlrcw== 128169
+4LmB4LiV4LiB4LiV4LmI4Liy4LiH 128170
+bGFyYXJhcw== 128171
+bGFyYXJhc8Sx 128172
+7ZiA 128173
+64m0 128174
+15XXktec 128175
+INC+0YLQvNC10YI= 128176
+INGA0LDQvQ== 128177
+2KrZg9mE 128178
+0LjRgtC10LvRjNC9 128179
+4Lib4Lij4Liw4Lin4Lix 128180
+4Lib4Lij4Liw4Lin4Lix4LiV4Li0 128181
+7J6W 128182
+0LzQvtC20L3Qvg== 128183
+cGllY3plxYQ= 128184
+cGllY3plxYRzdA== 128185
+66q7 128186
+7Iqo 128187
+157Xodee 128188
+4bum 128189
+4Lio4Li0 128190
+4Lio4Li04Lil 128191
+4Lio4Li04Lil4Lib 128192
+IMWadw== 128193
+44OD44K344On44Oz 128194
+dW5pdMOg 128195
+IG1pZXN6a2E= 128196
+IG1pZXN6a2HFhA== 128197
+cHJ6ZWQ= 128198
+cHJ6ZWRzaQ== 128199
+cHJ6ZWRzacSZYg== 128200
+cHJ6ZWRzacSZYmlvcg== 128201
+4Lib4Lij4Liw4Liq4Li04LiX4LiY4Li0 128202
+4Lib4Lij4Liw4Liq4Li04LiX4LiY4Li04Lig4Liy4Lie 128203
+4Lii4LmI 128204
+7JWZ 128205
+4Lij4Lin4LiU 128206
+4Lij4Lin4LiU4LmA4Lij4LmH4Lin 128207
+5b2T44Gf44KK 128208
+w6RsbGU= 128209
+0YPQtdGC0YHRjw== 128210
+w6Nu 128211
+66C1 128212
+dGjDqA== 128213
+44KS5Yip55So 128214
+7LWc 128215
+7ZOo 128216
+4LiX4Lix4Lia 128217
+4Liy4LiE4Lih 128218
+44GH 128219
+64KM 128220
+4LmA4Lib4Lil4LmI4Liy 128221
+4qY= 128222
+674= 128223
+6oA= 128224
+6oc= 128225
+4qE= 128226
+8J+f 128227
+45A= 128228
+4ro= 128229
+4a0= 128230
+4Zk= 128231
+4ZM= 128232
+4bI= 128233
+8JOP 128234
+4aw= 128235
+4q8= 128236
+5Kg= 128237
+6p0= 128238
+6qs= 128239
+8JE= 128240
+8JOD 128241
+8J2F 128242
+PHVuaw== 128243
+PHVuaz4= 128244
+PHM+ 128245
+PC9z 128246
+PC9zPg== 128247
+INi52YTZiQ== 128248
+IG3hu5l0 128249
+IHbhu5tp 128250
+IG5nxrDhu51p 128251
+INil2YTZiQ== 128252
+IG5o4buvbmc= 128253
+IHRo4buD 128254
+INeQ15U= 128255
+INei150= 128256
+2KfZiw== 128257
+IOC5geC4peC4sA== 128258
+INmE2Kc= 128259
+IG5oxrA= 128260
+INin2YTYqtmK 128261
+INeU15XXkA== 128262
+IMSR4bq/bg== 128263
+INij2Yg= 128264
+IHbhu4E= 128265
+IGzDoG0= 128266
+IHPhur0= 128267
+IGPFqW5n 128268
+IOG7nw== 128269
+IMSRw7M= 128270
+IG5oaeG7gXU= 128271
+IHThuqFp 128272
+IHRyw6pu 128273
+INeS150= 128274
+IG5ow6A= 128275
+INeb15k= 128276
+IHPhu7E= 128277
+IMSR4bqndQ== 128278
+IGLhu4s= 128279
+INmH2LDYpw== 128280
+IG5o4bqldA== 128281
+IHBo4bqjaQ== 128282
+IGhp4buHbg== 128283
+IGThu6VuZw== 128284
+IMSR4buZbmc= 128285
+INin2YTZhNmH 128286
+INiM 128287
+INmD2YQ= 128288
+IHZp4buHYw== 128289
+IG7Eg20= 128290
+IHRow6w= 128291
+IGjhu41j 128292
+INmI2Ko= 128293
+dMOp 128294
+INin2YY= 128295
+IHTDtGk= 128296
+INeQ16DXmQ== 128297
+INec15k= 128298
+INee15U= 128299
+IG5nw6B5 128300
+IG7GsOG7m2M= 128301
+INeU15nXkA== 128302
+INeQ15k= 128303
+IGjGoW4= 128304
+INmH2LDZhw== 128305
+INmI2Yo= 128306
+INin2YTYsNmK 128307
+INeV154= 128308
+IGdpw6E= 128309
+IG5ow6Ju 128310
+IGNow61uaA== 128311
+IG3DrG5o 128312
+INCd0LA= 128313
+IHRo4bq/ 128314
+INeZ15XXqteo 128315
+INeQ150= 128316
+IG7Dqm4= 128317
+IGjhu6M= 128318
+IGjhu6Nw 128319
+IGPDsm4= 128320
+INmH2Yg= 128321
+IGPGoQ== 128322
+IHLhuqV0 128323
+IFZp4buHdA== 128324
+INio2LnYrw== 128325
+INep15k= 128326
+IHRo4budaQ== 128327
+IGPDoWNo 128328
+IMSR4buTbmc= 128329
+INC90L4= 128330
+IHRyxrDhu51uZw== 128331
+2J8= 128332
+IMSR4buLbmg= 128333
+IMSRaeG7gXU= 128334
+15nXmded 128335
+IHRo4buxYw== 128336
+bsSxbg== 128337
+IGjDrG5o 128338
+IG7Ds2k= 128339
+IGPDuW5n 128340
+INeU15Q= 128341
+INil2YY= 128342
+INeQ15HXnA== 128343
+IG5oxrBuZw== 128344
+IGJp4bq/dA== 128345
+INC20LU= 128346
+IGNow7puZw== 128347
+IMSRYW5n 128348
+INiw2YTZgw== 128349
+IGzDqm4= 128350
+IGtow6FjaA== 128351
+IG7DoG8= 128352
+IHPhu60= 128353
+IGtow6Fj 128354
+IOuwjw== 128355
+IGzDvQ== 128356
+15nXmQ== 128357
+IMSRw6J5 128358
+INec154= 128359
+IGPhuqdu 128360
+IHRyw6xuaA== 128361
+IHBow6F0 128362
+44Gr44KC 128363
+0L/Qvg== 128364
+IG7Eg25n 128365
+IGLhu5k= 128366
+IHbhu6U= 128367
+IMSR4buZ 128368
+0YfQtQ== 128369
+IG5o4bqtbg== 128370
+IHRyxrDhu5tj 128371
+INei15M= 128372
+IGjDoG5o 128373
+INiu2YTYp9mE 128374
+IGzGsOG7o25n 128375
+IGPhuqVw 128376
+IHThu7E= 128377
+IHbDrA== 128378
+IHTGsA== 128379
+IGNo4bqldA== 128380
+INeb157XlQ== 128381
+IGfDrA== 128382
+INep16A= 128383
+IHThur8= 128384
+16rXlQ== 128385
+IG5naGnhu4dw 128386
+IG3hurd0 128387
+INmD2YXYpw== 128388
+INeR15nXnw== 128389
+INeo16c= 128390
+IHRo4bqleQ== 128391
+IG3DoXk= 128392
+INmB2Yk= 128393
+IGTDom4= 128394
+INeQ15fXkw== 128395
+IHTDom0= 128396
+INeb15o= 128397
+INec15U= 128398
+0LLQvg== 128399
+IHTDoWM= 128400
+IHRvw6Bu 128401
+INmI2YU= 128402
+IGvhur90 128403
+IOC4q+C4o+C4t+C4rQ== 128404
+INmI2KfZhNmF 128405
+IMSRaeG7g20= 128406
+INeW15U= 128407
+INeR15U= 128408
+15vXldeq 128409
+IGjhu5lp 128410
+IGLhurFuZw== 128411
+2KrZh9in 128412
+INeb15PXmQ== 128413
+INeU150= 128414
+IHh14bqldA== 128415
+INmC2K8= 128416
+IGLhuqNv 128417
+IHThu5F0 128418
+IHTDrG5o 128419
+INmH2Yo= 128420
+IMSR4buRaQ== 128421
+IHRoaeG6v3Q= 128422
+IGhp4buHdQ== 128423
+IHRp4bq/cA== 128424
+IHThuqFv 128425
+16rXlA== 128426
+IGNo4bun 128427
+b8WbxIc= 128428
+IGdpw7o= 128429
+IGdpw7pw 128430
+IMO9 128431
+IHF14bqj 128432
+IGxv4bqhaQ== 128433
+IGPDtA== 128434
+IMO0 128435
+IMO0bmc= 128436
+INeU15U= 128437
+INin2YTZitmI2YU= 128438
+IHTDrW5o 128439
+0LPQsA== 128440
+IHBow7JuZw== 128441
+IMSDbg== 128442
+INi52KfZhQ== 128443
+IHbhu4s= 128444
+bGFyxLFuxLE= 128445
+csOtYQ== 128446
+IHThu5tp 128447
+IMSRxrDhu51uZw== 128448
+IGdp4bubaQ== 128449
+IGLhuqNu 128450
+IGPhuqd1 128451
+IG5oacOqbg== 128452
+IGLhu4duaA== 128453
+IHRoxrDhu51uZw== 128454
+INeQ15nXnw== 128455
+IMSR4buB 128456
+IGjhu4c= 128457
+INeZ16nXqNeQ15w= 128458
+IHF1w6E= 128459
+INCX0LA= 128460
+44Gu44Gn44GZ44GM 128461
+INCf0YDQuA== 128462
+IHBo4bqnbg== 128463
+INmI2YTYpw== 128464
+IGzhu5tu 128465
+IHRy4buL 128466
+IGPhuqNt 128467
+INC80L4= 128468
+IGTDuW5n 128469
+INin2YTZiQ== 128470
+INi52YTZitmH 128471
+IOyeiOyKteuLiOuLpA== 128472
+2YrZgg== 128473
+INmC2KjZhA== 128474
+IGhv4bq3Yw== 128475
+INit2YrYqw== 128476
+IOC4l+C4teC5iA== 128477
+INi62YrYsQ== 128478
+IMSR4bqhaQ== 128479
+IHPhu5FuZw== 128480
+0L3Ri9C80Lg= 128481
+IHRo4bupYw== 128482
+INek15k= 128483
+IMSRaeG7h24= 128484
+44Gq44GL44Gj44Gf 128485
+IGdp4bqjaQ== 128486
+IHbhuqtu 128487
+INC40YU= 128488
+IMO2bmNl 128489
+IHbhuq15 128490
+IG114buRbg== 128491
+IOG6o25o 128492
+4LmD4LiZ4LiB4Liy4Lij 128493
+IFF14buRYw== 128494
+IGvhur8= 128495
+16DXkA== 128496
+INeh15k= 128497
+IHnDqnU= 128498
+44Gu44GL 128499
+IMSR4bq5 128500
+IMSR4bq5cA== 128501
+IGNo4bupYw== 128502
+IHnEsWw= 128503
+IFTDvHJraXll 128504
+ZMOp 128505
+INmC2KfZhA== 128506
+IGThu4tjaA== 128507
+IG9sZHXEn3U= 128508
+IGNo4buNbg== 128509
+INiq2YU= 128510
+4Lir4LiZ4Li24LmI4LiH 128511
+44GV44KM44Gf 128512
+IHBow6Fw 128513
+7JuU 128514
+IHRp4buBbg== 128515
+44GX44G+44GX44Gf 128516
+INep15zXkA== 128517
+2YTYqQ== 128518
+INec16TXoNeZ 128519
+INeR15nXqg== 128520
+IEjDoA== 128521
+INit2Ko= 128522
+INit2KrZiQ== 128523
+INei15XXkw== 128524
+IG7Dsw== 128525
+IHRow6FuZw== 128526
+4LmA4Lil4Li34Lit4LiB 128527
+16jXlA== 128528
+IHTEg25n 128529
+IGPDoWk= 128530
+IHRyaeG7g24= 128531
+INeQ15XXqteV 128532
+7KCB7J24 128533
+IEPDtG5n 128534
+INec15TXmdeV16o= 128535
+INCz0L7QtNCw 128536
+0LjRjg== 128537
+INio2LnYtg== 128538
+IOC4geC4suC4ow== 128539
+6Imv44GE 128540
+2YjYqg== 128541
+IGxpw6pu 128542
+INCd0L4= 128543
+INCd0LU= 128544
+55qE44Gq 128545
+INmF2Ko= 128546
+INGC0LDQutC20LU= 128547
+INC60L7RgtC+0YDRi9C1 128548
+INeZ15PXmQ== 128549
+IHRy4buNbmc= 128550
+44K144Kk44OI 128551
+7KCB7Jy866Gc 128552
+IHThuq1w 128553
+INep15zXmQ== 128554
+7ZWY6rKM 128555
+IHTDoGk= 128556
+INCv 128557
+IHLhu5Np 128558
+2KfZgw== 128559
+IHRoxrDGoW5n 128560
+INeU15bXlA== 128561
+INmI2YXZhg== 128562
+4LiX4Li14LmI4Lih4Li1 128563
+IGN14buZYw== 128564
+IGLDvHnDvGs= 128565
+44Go44GL 128566
+INeR15nXldeq16g= 128567
+IGzhuqdu 128568
+IGfDtnJl 128569
+IHRy4buf 128570
+INeY15XXkQ== 128571
+0YLRjNGB0Y8= 128572
+IHRo4buRbmc= 128573
+INeb16k= 128574
+IHRpw6p1 128575
+INee15DXldeT 128576
+2Js= 128577
+a8SF 128578
+IOC5g+C4mQ== 128579
+IHbhuqVu 128580
+INep15zXlQ== 128581
+IMSR4buBdQ== 128582
+2YHYqg== 128583
+IOqyg+ydtA== 128584
+IGjDs2E= 128585
+INin2YTYudin2YU= 128586
+INmK2YjZhQ== 128587
+0LrQvtC5 128588
+IGJp4buHdA== 128589
+0YHRgtC+ 128590
+INeU15nXlQ== 128591
+4LiX4Li14LmI4LiI4Liw 128592
+INeT15k= 128593
+INeQ15o= 128594
+IMOhbg== 128595
+2LXZiNix 128596
+IHRyw60= 128597
+INCf0YDQvg== 128598
+IGzhu7Fj 128599
+44GX44Gm44GE44G+44GZ 128600
+IGLDoGk= 128601
+INeW15DXqg== 128602
+IGLDoW8= 128603
+4Lia4LiZ 128604
+IOuMgO2VnA== 128605
+IHRp4bq/ 128606
+IHRp4bq/bmc= 128607
+IGLDqm4= 128608
+44GV44KM44KL 128609
+c2nDs24= 128610
+IHTDrG0= 128611
+16LXlQ== 128612
+bcOp 128613
+0L3QuNGP 128614
+44G744Gp 128615
+IOC5gOC4nuC4o+C4suC4sA== 128616
+2KjYqQ== 128617
+IOu2hA== 128618
+INeQ15Y= 128619
+4LiX4LmI4Liy4LiZ 128620
+16rXnQ== 128621
+IHRow6pt 128622
+IGhv4bqhdA== 128623
+ecSx 128624
+15bXlQ== 128625
+IGdp4bud 128626
+IGLDoW4= 128627
+4LiC4Liy4Lii 128628
+0YfQsA== 128629
+IOC5hg== 128630
+INin2YTZhdiq 128631
+INC+0YfQtdC90Yw= 128632
+IGLhuqV0 128633
+IHRy4bq7 128634
+0YLRgA== 128635
+INij2YbZhw== 128636
+INir2YU= 128637
+INeb157XlA== 128638
+IGtow7M= 128639
+IHLhurFuZw== 128640
+INmI2YHZig== 128641
+0L3QuNC5 128642
+IGhvw6Bu 128643
+dMOz 128644
+INeQ16nXqA== 128645
+IOyDneqwgQ== 128646
+0YHQsA== 128647
+INeb15HXqA== 128648
+INGN0YLQvtC8 128649
+bGFyxLFuxLFu 128650
+IGNoxrBh 128651
+0LfQuA== 128652
+IGThuqtu 128653
+INCa0LDQug== 128654
+2KzZiA== 128655
+INCx0YvQu9C+ 128656
+INmK2Ko= 128657
+bsSx 128658
+xYJhbQ== 128659
+INmI2YfZiA== 128660
+15HXlQ== 128661
+0L/QuA== 128662
+16jXqg== 128663
+IHF14buRYw== 128664
+0LbQtA== 128665
+IMSRxqFu 128666
+2YPYqtio 128667
+IG3huq90 128668
+4Lij4Liw4Lia 128669
+4Lij4Liw4Lia4Lia 128670
+INmD2KfZhtiq 128671
+IHRow6Ju 128672
+4Liq4Li04LiZ4LiE4LmJ4Liy 128673
+15LXmQ== 128674
+IHBoxrDGoW5n 128675
+4LmE4Lih4LmI4LmE4LiU4LmJ 128676
+IOyEsQ== 128677
+IEPDoWM= 128678
+INeU157XlQ== 128679
+INGC0LXQvA== 128680
+INeT15U= 128681
+4Lit4Liw4LmE4Lij 128682
+IHbEg24= 128683
+44Gq44Gu44Gn 128684
+IE7hu5lp 128685
+INei15U= 128686
+44KJ44KM44KL 128687
+IHPDoW5n 128688
+IGfDtnN0ZXI= 128689
+44GT44Go44KS 128690
+IHRhcmFmxLFuZGFu 128691
+INC80LA= 128692
+INC/0L7RgdC70LU= 128693
+INeg15nXqg== 128694
+INeg15nXqtef 128695
+INC70LXRgg== 128696
+INec16DXlQ== 128697
+0YHRgQ== 128698
+INeZ15U= 128699
+0L/QtQ== 128700
+INmI2YTZgw== 128701
+INmI2YTZg9mG 128702
+IG5nb8OgaQ== 128703
+IMSR4buLYQ== 128704
+cnrEhWQ= 128705
+ZHppYcWC 128706
+INmF2LE= 128707
+0LjRgtGM0YHRjw== 128708
+INeQ15fXqNeZ 128709
+INec15vXnA== 128710
+4LiC4LmJ4Lit4Lih 128711
+4LiC4LmJ4Lit4Lih4Li54Lil 128712
+INCx0L7Quw== 128713
+INCx0L7Qu9C10LU= 128714
+2KzZhdi5 128715
+0LvQtdGC 128716
+IGzhu4tjaA== 128717
+INmF2KvZhA== 128718
+IOq3uOumrOqzoA== 128719
+IHRo4bup 128720
+IGRlxJ9pbA== 128721
+2YjYrQ== 128722
+INep15zXmg== 128723
+INmF2K3Zhdiv 128724
+IG7hur91 128725
+IMSR4buVaQ== 128726
+IHbhu6th 128727
+IG3hu41p 128728
+INC+0L3QuA== 128729
+IGzDumM= 128730
+INmK2YPZiNmG 128731
+7KeI 128732
+INep15zXoNeV 128733
+INCU0L4= 128734
+INep16DXmQ== 128735
+4Lil4Li0 128736
+15DXpNep16g= 128737
+IHPhu6lj 128738
+6raM 128739
+IOG7qW5n 128740
+4LmE4Lih4LmI4Lih4Li1 128741
+2LfZhNio 128742
+INGH0LXQvA== 128743
+IGNodXnDqm4= 128744
+IHRow61jaA== 128745
+INeV15k= 128746
+7ZWp 128747
+INmF2LXYsQ== 128748
+0LTQvg== 128749
+IMSR4bqldA== 128750
+IGNo4bq/ 128751
+4LiK4Li34LmI4Lit 128752
+IOyLoA== 128753
+INil2LDYpw== 128754
+INix2KbZitiz 128755
+INep15nXqQ== 128756
+IGdp4bqjbQ== 128757
+0YHQutCw 128758
+bGFyxLFuZGE= 128759
+IHPhu58= 128760
+IHTDrWNo 128761
+INmE2YPZhg== 128762
+INio2YU= 128763
+16LXldeR 128764
+16LXldeR15M= 128765
+xYLEhWN6 128766
+bGFyxLFuYQ== 128767
+INep150= 128768
+INmE2Ko= 128769
+INep15TXldeQ 128770
+dMOzdw== 128771
+IOuLpOuluA== 128772
+INij2YPYq9ix 128773
+44Gu44Gn44GZ 128774
+15vXmded 128775
+IG9sZHXEn3VudQ== 128776
+44GL44Gq 128777
+44KC44GG 128778
+2YrYrQ== 128779
+IG5ow6xu 128780
+IG5naOG7hw== 128781
+44Gr44Gq44Gj44Gm 128782
+0L/QsA== 128783
+IHF1eeG6v3Q= 128784
+2YTZgg== 128785
+dMOh 128786
+IGx1w7Ru 128787
+IMSR4bq3Yw== 128788
+INeQ16g= 128789
+IHR14buVaQ== 128790
+c8Ojbw== 128791
+7Jm4 128792
+2LHYrw== 128793
+INio2YfYpw== 128794
+INeU15nXlded 128795
+15XXldeZ 128796
+44Gn44GZ44Gt 128797
+INGC0L7Qs9C+ 128798
+IHRo4bun 128799
+44GX44Gf44GE 128800
+2LHZgg== 128801
+IGLhuq90 128802
+0LPRgw== 128803
+IHThu60= 128804
+0YjQsA== 128805
+IOC4m+C4tQ== 128806
+INeU15DXnQ== 128807
+7Y+s 128808
+xbxh 128809
+INeQ16rXlA== 128810
+IG7hu5lp 128811
+IHBow60= 128812
+IMWfZWtpbGRl 128813
+IGzhu51p 128814
+ZMSxxJ/EsQ== 128815
+INeb15DXnw== 128816
+IHTDvG0= 128817
+IG3huqFuaA== 128818
+IE3hu7k= 128819
+44Gd44KT44Gq 128820
+IG5o4buP 128821
+44Gq44GM44KJ 128822
+IGLDrG5o 128823
+xLFw 128824
+4Lie4Liy 128825
+IMSRw6FuaA== 128826
+INmI2YQ= 128827
+16jXldeq 128828
+INeQ15nXmg== 128829
+IGNodXnhu4Nu 128830
+2YPYpw== 128831
+44KM44KL 128832
+4LmB4Lih4LmI 128833
+44KI44GP 128834
+INmI2YLYrw== 128835
+7ZaI64uk 128836
+IG7GoWk= 128837
+44Gr44KI44Gj44Gm 128838
+IHZp4bq/dA== 128839
+IOC5gOC4nuC4t+C5iOC4rQ== 128840
+65CY64qU 128841
+2KfYr9mK 128842
+INmB2KXZhg== 128843
+7Kad 128844
+IMSR4bq3dA== 128845
+IGjGsOG7m25n 128846
+IHjDow== 128847
+IMO2bmVtbGk= 128848
+44Gg44Go 128849
+IG3hurk= 128850
+INeR15k= 128851
+INeT15HXqA== 128852
+IHbhuq10 128853
+IMSR4bqhbw== 128854
+IGThu7FuZw== 128855
+INGC0L7QvA== 128856
+INmB2YrZh9in 128857
+INis2YXZiti5 128858
+IHRodeG6rXQ= 128859
+c3TEmXA= 128860
+IHRp4bq/dA== 128861
+2LTZig== 128862
+INC10YnQtQ== 128863
+44GZ44KL44Go 128864
+IG3DoHU= 128865
+INGN0YLQvtCz0L4= 128866
+IHbDtA== 128867
+INCt0YLQvg== 128868
+IHRo4bqtdA== 128869
+IG7hu69h 128870
+IGJp4bq/bg== 128871
+IG7hu68= 128872
+INec15vXnQ== 128873
+15nXmdef 128874
+INiz2Ko= 128875
+INCe0YI= 128876
+IHBo4bul 128877
+6rmM7KeA 128878
+INec15o= 128879
+IGvhu7M= 128880
+4LmD4LiE4Lij 128881
+IGfDonk= 128882
+INmE2YTZhQ== 128883
+IHThu6Vj 128884
+2KrZitmG 128885
+IHRy4buj 128886
+INec16TXmQ== 128887
+IGLhu5E= 128888
+INCa0LA= 128889
+IMSRw6xuaA== 128890
+b3fEhQ== 128891
+c8SxbmRh 128892
+IGtoaeG6v24= 128893
+c8Sxeg== 128894
+INC60L7Qs9C00LA= 128895
+16HXnA== 128896
+INCx0YvQuw== 128897
+4LiZ4LmJ4Lit4Lii 128898
+0L7QsdGA0LDQtw== 128899
+IOqyg+ydtOuLpA== 128900
+65Ok7J2A 128901
+44G444Gu 128902
+IOC5gOC4oeC4t+C5iOC4rQ== 128903
+IHBo4bulYw== 128904
+INeX15zXpw== 128905
+IGjhur90 128906
+IMSRYQ== 128907
+4LmA4LiU4LmH4LiB 128908
+7ZiV 128909
+bMOt 128910
+6riJ 128911
+INi52K/Yrw== 128912
+IMSR4buT 128913
+IGfhuqdu 128914
+INeZ15XXnQ== 128915
+IHPEqQ== 128916
+0YDRj9C0 128917
+IHF1eeG7gW4= 128918
+INeQ15zXkA== 128919
+2YfZhdin 128920
+16DXmdeU 128921
+15zXldeq 128922
+INeU16jXkdeU 128923
+IHRpw6pu 128924
+IGFsxLFu 128925
+IGThu4U= 128926
+5Lq644GM 128927
+0L3QvtGB 128928
+0LvRgdGP 128929
+IMSRxrBh 128930
+4Liq4Liy4Lin 128931
+0LjRgNC+0LLQsNC9 128932
+INee16HXpNeo 128933
+15LXnw== 128934
+IGtp4bq/bg== 128935
+INCo 128936
+cMOp 128937
+0LHRgw== 128938
+0L7QstC+0Lk= 128939
+0LHQsA== 128940
+INil2YTYpw== 128941
+15DXnNeZ 128942
+IHjDonk= 128943
+IGLhu59p 128944
+INep15U= 128945
+5Lq644Gu 128946
+16fXmded 128947
+4LmA4LiU4Li34Lit4LiZ 128948
+IGtow6E= 128949
+INeV15zXlA== 128950
+15PXldeq 128951
+INei15HXldeo 128952
+INio2LTZg9mE 128953
+INmH2YbYp9mD 128954
+0YLRgNCw 128955
+IO2VmOuKlA== 128956
+4Lij4Lit4Lia 128957
+b3dhxYI= 128958
+aMOp 128959
+IGRp4buFbg== 128960
+INeU15vXnA== 128961
+INij2LM= 128962
+IGNodXnhu4du 128963
+4Lij4Liw4LiU4Lix4Lia 128964
+IE5o4buvbmc= 128965
+INeQ15fXqg== 128966
+INit2YjZhA== 128967
+0LvQvtCy 128968
+16DXqA== 128969
+INeV16A= 128970
+IGNoxqFp 128971
+IGnDp2luZGU= 128972
+0YHRgtCy0YM= 128973
+IHBo4buR 128974
+INGB0YM= 128975
+56eB44Gv 128976
+IGNo4bupbmc= 128977
+IHbhu7Fj 128978
+4LmB4Lit 128979
+IGzhuq1w 128980
+IHThu6tuZw== 128981
+5bCR44GX 128982
+IE5ndXk= 128983
+IE5ndXnhu4Vu 128984
+INmB2YrZhw== 128985
+INCx0LA= 128986
+15nXmdeq 128987
+INec16LXqdeV16o= 128988
+INee15s= 128989
+IG5naGnhu4dt 128990
+INC80L3QvtCz0L4= 128991
+INC10LU= 128992
+65CY7Ja0 128993
+IGzhu6Np 128994
+INec15zXkA== 128995
+INeb158= 128996
+IGNow60= 128997
+44Gn44Gu 128998
+15fXlQ== 128999
+16nXlded 129000
+INee16g= 129001
+INCU0LvRjw== 129002
+xYE= 129003
+INeb15DXqdeo 129004
+IE3hu5l0 129005
+INmI2KfZhNiq 129006
+IOydtOufsA== 129007
+xZ9h 129008
+IGNoaeG6v24= 129009
+IGFyYXPEsW5kYQ== 129010
+INeR15DXqteo 129011
+44GV44KM44Gm44GE44KL 129012
+2LTZg9mE 129013
+IHTGsOG7o25n 129014
+INiq2Ko= 129015
+IEPDsw== 129016
+IGLhu48= 129017
+IHThu4luaA== 129018
+IGtow60= 129019
+INC/0YDQvtGB0YI= 129020
+INC/0YDQvtGB0YLQvg== 129021
+INmI2YLYp9mE 129022
+IGdpw6Fv 129023
+IE7hur91 129024
+15DXnteo 129025
+16LXoNeZ15nXnw== 129026
+7Y64 129027
+2YfYr9mB 129028
+IELhu5k= 129029
+IGLDoG4= 129030
+IG5ndXnDqm4= 129031
+IGfDvHplbA== 129032
+4Liq4Liy4Lii 129033
+7LKc 129034
+157Xldeo 129035
+IHBow6Ju 129036
+16HXpNen 129037
+16fXkdec 129038
+INin2YTZhdiq2K0= 129039
+INin2YTZhdiq2K3Yr9ip 129040
+2KfYptiv 129041
+INeQ157XqA== 129042
+IGtpxZ9p 129043
+7KSA 129044
+IHRydXnhu4Fu 129045
+INmE2YfYpw== 129046
+INCc0LA= 129047
+4Lia4Lij4Li04Lip 129048
+4Lia4Lij4Li04Lip4Lix 129049
+4Lia4Lij4Li04Lip4Lix4LiX 129050
+INep16DXmded 129051
+INC80LXQvdGP 129052
+xZ9l 129053
+IGRp4buHbg== 129054
+INeQ16DXl9eg15U= 129055
+a8O8 129056
+IGPhu5U= 129057
+IG3hu5dp 129058
+d8Ok 129059
+2YXZig== 129060
+IGhp4buDdQ== 129061
+64us 129062
+INeU15fXnA== 129063
+IHTDqm4= 129064
+IGtp4buHbg== 129065
+2YbZgtmE 129066
+IHbhu4c= 129067
+15PXqg== 129068
+INCg0L7RgdGB0LjQuA== 129069
+0LvRgw== 129070
+INin2YTYudix2KjZitip 129071
+INi32LHZitmC 129072
+INeU15HXmdeq 129073
+0YHQtdGA 129074
+INC80L3QtQ== 129075
+w6R1 129076
+IHRyaeG7h3U= 129077
+IMSR4bun 129078
+INeo15E= 129079
+2KrZh9mF 129080
+4LiL4Li1 129081
+IOyngOq4iA== 129082
+bGnFm215 129083
+2K/YudmF 129084
+44Gg44KN44GG 129085
+0YHQutC40LU= 129086
+IGjhu49p 129087
+INen15U= 129088
+0YDRg9GB 129089
+2YbYuNix 129090
+44Gu44KC 129091
+INeU15vXmQ== 129092
+IOybkA== 129093
+2YjZhw== 129094
+INmI2Y4= 129095
+IELhuqFu 129096
+0L/Qu9Cw0YI= 129097
+INee157XqQ== 129098
+0LvRjtCx 129099
+INC90YPQttC90L4= 129100
+IHRoxrA= 129101
+44G1 129102
+44GP44KJ44GE 129103
+2LHYtA== 129104
+16jXldeX 129105
+INmK2KrZhQ== 129106
+INem16jXmdea 129107
+IHBow6E= 129108
+4Lih4Lit4LiH 129109
+INeR15DXldek158= 129110
+IGPhuqNuaA== 129111
+IO2VnOuLpA== 129112
+INeU157Xqg== 129113
+4LiV4LmI4Liy4LiH4LmG 129114
+4Lih4Li14LiB4Liy4Lij 129115
+0YHQutC40YU= 129116
+INCS0YHQtQ== 129117
+INin2Yg= 129118
+2KzZig== 129119
+44GT44Go44Gv 129120
+IGTDoGk= 129121
+IGjhu5M= 129122
+6Ieq5YiG44Gu 129123
+4LmE4Lir4LiZ 129124
+65Ok7J2E 129125
+IFbEg24= 129126
+INC00LDQtg== 129127
+INC00LDQttC1 129128
+0YvQvNC4 129129
+0LvQsNGB0Yw= 129130
+2YrZiNmG 129131
+2YbZiA== 129132
+Y8Oz 129133
+44GX44Gm44GE44Gf 129134
+44Gg44GL44KJ 129135
+2LfYp9mE2Kg= 129136
+IGPhu61h 129137
+0L/RgNC+0YE= 129138
+44Gq44Gp44Gu 129139
+4Lij4Li44LmI4LiZ 129140
+IGNoaeG6v2M= 129141
+0LvRiw== 129142
+INGP0LLQu9GP0LXRgtGB0Y8= 129143
+IG7hu5Vp 129144
+44Gu44GK 129145
+INeQ16rXnQ== 129146
+IOuVjOusuOyXkA== 129147
+4LiB4Lil4Liy4LiH 129148
+IGJhxZ9rYQ== 129149
+7ISd 129150
+INGG0LXQuw== 129151
+2YHZgg== 129152
+44Gr44KI44KL 129153
+2YLYpw== 129154
+IMOnxLFrYXI= 129155
+IGPhu6l1 129156
+2LfYpw== 129157
+INep16o= 129158
+4LmC4LiE 129159
+INee15w= 129160
+INeU16TXqA== 129161
+INCz0LTQtQ== 129162
+INiu2Lc= 129163
+5YmN44Gr 129164
+Y2rEmQ== 129165
+INeX16nXldeR 129166
+16jXktei 129167
+IGtob+G6o25n 129168
+IMSR4budaQ== 129169
+INCg0LU= 129170
+INC+0L3QsA== 129171
+INeQ16DXlQ== 129172
+44Gu44Gr 129173
+INin2YTYsNmK2YY= 129174
+0LrRg9C/ 129175
+44K144O844M= 129176
+44K144O844OT 129177
+44K144O844OT44K5 129178
+0LLQsNC7 129179
+0LPQtQ== 129180
+IGdp4buvYQ== 129181
+IEtow7RuZw== 129182
+IOKXiw== 129183
+4LiB4Lil4Li44LmI4Lih 129184
+INmF2YbYsA== 129185
+4Lit4LmI4Liy4LiZ 129186
+INGB0L/QvtGB0L7QsQ== 129187
+IMSR4buZaQ== 129188
+IGRpxJ9lcg== 129189
+IOC4luC5ieC4sg== 129190
+2YXYq9mE 129191
+INeU15DXmQ== 129192
+INiv2YjZhg== 129193
+2YrYsdin2YY= 129194
+0YnQuA== 129195
+2KjZhtin2KE= 129196
+INii2K7YsQ== 129197
+2LjZh9ix 129198
+INeR15s= 129199
+INin2YTZhdi5 129200
+44OS 129201
+IHThuqV0 129202
+IG3hu6Vj 129203
+IGRvxJ9ydQ== 129204
+44Gf44KJ 129205
+INeh15U= 129206
+IHjDoWM= 129207
+4Lij4Lit 129208
+IGPEg24= 129209
+INC+0L3Quw== 129210
+INC+0L3Qu9Cw0LnQvQ== 129211
+IGvDvQ== 129212
+IGNow6Ju 129213
+IOC5hOC4oeC5iA== 129214
+2KfYrdip 129215
+csOhbg== 129216
+16DXmdeZ150= 129217
+INeR158= 129218
+INCW 129219
+4LiV4Lij4LiH 129220
+0LTRiw== 129221
+IHPhuq9j 129222
+2YTYqg== 129223
+44Ot44O8 129224
+INmE2YY= 129225
+INeo15U= 129226
+IGTGsOG7m2k= 129227
+4LmA4LiY 129228
+4LmA4LiY4Lit 129229
+ZcSfaQ== 129230
+INeV16k= 129231
+INmE2KM= 129232
+IGfhurdw 129233
+IGPhu5E= 129234
+44Go44Gm44KC 129235
+2LHZiNiz 129236
+INec15TXmQ== 129237
+IOuzuA== 129238
+5LiK44GS 129239
+IG3hu6lj 129240
+0YXQsA== 129241
+IOyerA== 129242
+4LiJ4Lix4LiZ 129243
+0YDRg9C2 129244
+IGHDp8Sxaw== 129245
+2YjYp9mE 129246
+INeW157Xnw== 129247
+5Lq644Gv 129248
+2LnZitmG 129249
+0Y/RhQ== 129250
+INeS15PXldec 129251
+16jXldeR 129252
+Z8Oz 129253
+65286rOg 129254
+IGFya2FkYcWf 129255
+2YbYtNix 129256
+INCz0L7QtNGD 129257
+INCx0L7Qu9GM0YjQtQ== 129258
+44Gh44KH44Gj44Go 129259
+IGPDonU= 129260
+IHPDoXQ= 129261
+7ZS8 129262
+IHRp4bq/bg== 129263
+7ZW07JW8 129264
+INmI2KPZhg== 129265
+4LiZ4Liy4LiZ 129266
+INeR15DXntem16I= 129267
+INeR15DXntem16LXldeq 129268
+INec16g= 129269
+IHF14bqjbg== 129270
+INmI2KfZhNij 129271
+INeQ15XXqteU 129272
+IOyWtOuWpA== 129273
+IOqyg+ydgA== 129274
+2K3Ys9mG 129275
+IG3huqV0 129276
+4LiE4Li54LmI 129277
+44Os44O8 129278
+INCU0LA= 129279
+IG9sbWFzxLE= 129280
+IHRodeG7mWM= 129281
+16DXlw== 129282
+7Yag 129283
+IHPDtnlsZQ== 129284
+44Gd44GG44Gn44GZ 129285
+INiq2YPZiNmG 129286
+0LvRg9GH 129287
+15zXmdea 129288
+INij2K3Yrw== 129289
+0LvQuNGB0Yw= 129290
+INCy0YHQtdCz0L4= 129291
+INeU16jXkQ== 129292
+IOuquw== 129293
+b8Sf 129294
+b8SfbHU= 129295
+IOyEoA== 129296
+INC60LDRgA== 129297
+4Lig4Liy4LiE 129298
+ZcWE 129299
+IOC4geC5hw== 129300
+IGF5bsSx 129301
+IGLDoA== 129302
+44Gq44KT44Gm 129303
+IOuqqOuToA== 129304
+2YLYsdin2LE= 129305
+44GX44Gq44GE 129306
+INCS0L4= 129307
+INmI2YfZig== 129308
+0L3QuNC60Lg= 129309
+44KM44Gf 129310
+IGNodeG6qW4= 129311
+16jXog== 129312
+2YHYsdmK2YI= 129313
+44KS5Y+X44GR 129314
+IMSRw7puZw== 129315
+0LHQtQ== 129316
+15vXldeX 129317
+0L/Rgw== 129318
+INeV15LXnQ== 129319
+157XoNeZ 129320
+7Zal 129321
+16bXmded 129322
+4LiL4Li0 129323
+2YfZhg== 129324
+0L3QtdC8 129325
+INeR15HXmdeq 129326
+2LHYuQ== 129327
+IOC4qg== 129328
+IMSQw6A= 129329
+7ZWY64uk 129330
+IOG6pXk= 129331
+15fXldeT 129332
+15fXldeT16k= 129333
+INGH0LXRgNC10Lc= 129334
+0YPQuw== 129335
+IELDrG5o 129336
+IOqyg+ydhA== 129337
+INeS16g= 129338
+5LuY44GR 129339
+15fXnNen 129340
+INiq2YTZgw== 129341
+4LmD4Liq4LmI 129342
+c3rEhQ== 129343
+2YLYp9mF 129344
+2K/ZiNix 129345
+INmB2YLYtw== 129346
+IGjhu691 129347
+INC80L7Qs9GD0YI= 129348
+IGfhu41p 129349
+INen16g= 129350
+4LiI4Liw4Lih4Li1 129351
+2KrZgtiv2YU= 129352
+INi52KjYsQ== 129353
+INec15TXnQ== 129354
+INGB0LDQvNC+ 129355
+16HXk9eo 129356
+IGPDoG5n 129357
+csOt 129358
+IOyepQ== 129359
+65Ok7J2Y 129360
+INmE2YM= 129361
+0L/QvtGA0YI= 129362
+IGto4bqj 129363
+INGB0LXQsdGP 129364
+16DXnw== 129365
+INiv2YjYsQ== 129366
+IG3hu58= 129367
+IGPDonk= 129368
+IGZhcms= 129369
+IGZhcmtsxLE= 129370
+0LDRjtGC 129371
+IHRy4buxYw== 129372
+d2nEmWtzeg== 129373
+IHRodeG7kWM= 129374
+INiq2K3Yqg== 129375
+2KrZhA== 129376
+0L7QstGL0LU= 129377
+64Kg 129378
+INCy0LDQvA== 129379
+2KjZhNi6 129380
+IOqwmeydgA== 129381
+7YyQ 129382
+2YTYqA== 129383
+IG5hc8SxbA== 129384
+INC+0LTQuNC9 129385
+0LzQsNC9 129386
+INi52YTZitmH2Kc= 129387
+0LHQuA== 129388
+INek16nXldeY 129389
+15HXqNeZ 129390
+INep16DXlA== 129391
+IOuPhA== 129392
+IMSQ4bqhaQ== 129393
+INeQ15XXqted 129394
+INin2YTYrdix 129395
+INCx0L4= 129396
+4LiI4Li44LiU 129397
+IHLDtQ== 129398
+IGRlxJ9pxZ8= 129399
+IOuLqA== 129400
+INGB0LvRg9GH0LA= 129401
+INGB0LvRg9GH0LDQtQ== 129402
+INeQ16DXqdeZ150= 129403
+15PXow== 129404
+16nXkdeq 129405
+INep15zXm9ed 129406
+IGNow7o= 129407
+bmlrw7N3 129408
+IHRhbsSx 129409
+IGPDoW8= 129410
+IMSRw6E= 129411
+INeQ15PXnQ== 129412
+IOqwlQ== 129413
+IG5oaeG7h20= 129414
+INec16E= 129415
+INeb16rXkQ== 129416
+INeU16HXpNeo 129417
+IMSRxINuZw== 129418
+IOuRkA== 129419
+4Lic4Li0 129420
+4Lic4Li04Lin 129421
+2KzYpw== 129422
+IOqwkA== 129423
+2LHYow== 129424
+2LPYqtiu2K/ZhQ== 129425
+44Gr44Gq44KK44G+44GZ 129426
+IHThu7c= 129427
+15jXldeo 129428
+0LPQvtCy0L7RgA== 129429
+INCy0L7RgQ== 129430
+INmF2YbZh9in 129431
+0LjRgNC+0LLQsNGC0Yw= 129432
+IMSR4bqneQ== 129433
+16DXkg== 129434
+INmF2Yg= 129435
+INmF2YjZgti5 129436
+16jXm9eZ 129437
+2KrZjw== 129438
+66qo 129439
+INeq15U= 129440
+2YrYp9mL 129441
+4LmD4LiU 129442
+44KK44G+44GZ 129443
+4Lit4Lii4Li54LmI4LmD4LiZ 129444
+INij2YjZhA== 129445
+INij2K7YsdmJ 129446
+IGPGsA== 129447
+2LXYp9ix 129448
+157Xl9ep15E= 129449
+0LHRgNCw 129450
+xYRza2k= 129451
+0LHRgA== 129452
+INmK2Y8= 129453
+4LiB4Li04LiZ 129454
+IGNo4buRbmc= 129455
+2YXZjw== 129456
+IOC4hOC4t+C4rQ== 129457
+INiq2YY= 129458
+dMOt 129459
+ecSH 129460
+IG3huqFuZw== 129461
+2YHZiA== 129462
+IGTDvG55YQ== 129463
+16fXqNeQ 129464
+INen15w= 129465
+INit2KfZhA== 129466
+Y8OtYQ== 129467
+IOC5gOC4o+C4sg== 129468
+INeo15XXpteU 129469
+IMOhcA== 129470
+67CV 129471
+2KfZgtip 129472
+0L3QuNGO 129473
+INeQ15zXlQ== 129474
+INee16HXlQ== 129475
+44Gn44Gv44Gq44GP 129476
+IHRy4bqj 129477
+INen16nXqA== 129478
+bWnFn3Rpcg== 129479
+IGzGsHU= 129480
+IGjhu5c= 129481
+INCx0YvQu9C4 129482
+IGzhuqV5 129483
+2LnZhNmF 129484
+IMO2emVs 129485
+5rCX44GM 129486
+INeT16jXmg== 129487
+2YXYrw== 129488
+c8SxbsSx 129489
+16DXldep15A= 129490
+csOzdw== 129491
+0YfQtdGA 129492
+6rWQ7Jyh 129493
+INCc0L4= 129494
+0LvQtdCz 129495
+IFbhu5tp 129496
+4Lin4Lix4LiZ4LiZ4Li14LmJ 129497
+0Y7RidC40LU= 129498
+44GK44GZ 129499
+44GK44GZ44GZ 129500
+44GK44GZ44GZ44KB 129501
+64+F 129502
+INeZ15TXmdeU 129503
+157XmNeo 129504
+0Y/QvNC4 129505
+IGzhu7Fh 129506
+IMSR4bqldQ== 129507
+4LmA4Liq4Li14Lii4LiH 129508
+IHTGsMahbmc= 129509
+65Ox 129510
+INGB0YLQsNGA 129511
+4LmD4Lia 129512
+4Lin4Lix4LiU 129513
+IMSwc3RhbmJ1bA== 129514
+IOC4iOC4sA== 129515
+4LiV4Lil4Liy4LiU 129516
+INio2Yo= 129517
+4LmB4LiZ4Liw 129518
+4LmB4LiZ4Liw4LiZ4Liz 129519
+2LPYp9i52K8= 129520
+INio2KM= 129521
+IGtp4buDbQ== 129522
+2K3Ys9io 129523
+4LiK4Lix4LmJ4LiZ 129524
+INeV16LXldeT 129525
+0L7QstGL0YU= 129526
+0L7RgdC90L7Qsg== 129527
+IHRyxrDhu59uZw== 129528
+16bXkdei 129529
+IMOtdA== 129530
+IGvhu7k= 129531
+Y3LDqQ== 129532
+0Y/QvA== 129533
+6rWw 129534
+44GM44Gq44GE 129535
+2YrZhNip 129536
+44OV44Kj 129537
+2LHZiQ== 129538
+INmK2KzYqA== 129539
+INeQ16M= 129540
+IGPhu7Fj 129541
+44KJ44KM44Gf 129542
+IOC4nOC4ueC5iQ== 129543
+IOC4rQ== 129544
+bGFyxLFtxLF6 129545
+IGthZMSxbg== 129546
+IOq3uOuemA== 129547
+IOq3uOuemOyEnA== 129548
+IOuYkOuKlA== 129549
+IMSR4bqj 129550
+IMSR4bqjbQ== 129551
+INeQ15XXnteo 129552
+IHnhur91 129553
+Y2nEhQ== 129554
+Y2nEhWc= 129555
+IHThu5E= 129556
+INep15DXoNeZ 129557
+IGR6aWHFgmE= 129558
+0YnQsA== 129559
+IMSRw6Bu 129560
+c8SxbmE= 129561
+44GT44KM44Gv 129562
+INeR15zXmQ== 129563
+INeR15nXqdeo15DXnA== 129564
+0LvQvtGB0Yw= 129565
+IGdp4buv 129566
+6rCQ 129567
+0YDQvtC9 129568
+2KrYrNin2LE= 129569
+0LPQu9Cw0LI= 129570
+0LLQuNC9 129571
+IGjhuqFu 129572
+IHlhcMSxbGFu 129573
+2KjYsw== 129574
+IOC4nuC4o+C5ieC4reC4oQ== 129575
+6rSA66as 129576
+bcSxxZ90xLFy 129577
+YsO8 129578
+csO8Y2s= 129579
+IEJhxZ9rYW7EsQ== 129580
+INmE2YrYsw== 129581
+IHPGoQ== 129582
+4LiI4Lix4LiH4Lir4Lin 129583
+4LiI4Lix4LiH4Lir4Lin4Lix4LiU 129584
+2K/Yp9ih 129585
+INeU15s= 129586
+dsOt 129587
+16nXkNeo 129588
+IGjGsOG7n25n 129589
+IGLDs25n 129590
+IENow61uaA== 129591
+xIVj 129592
+4LmA4LiB4Li14LmI4Lii4Lin4LiB4Lix4Lia 129593
+IHThu6k= 129594
+IHThu6lj 129595
+INGG0LLQtdGC 129596
+IHThu5Fp 129597
+IG5naMSpYQ== 129598
+2YTYp9i52Kg= 129599
+2K/ZhA== 129600
+INek16LXnQ== 129601
+aMO2cg== 129602
+4LiK4Li44LiU 129603
+4Lie4Li5 129604
+4Lie4Li54LiU 129605
+0L/QsNGB 129606
+IMWfdQ== 129607
+IHTGsOG7n25n 129608
+2K7Yp9ix2Kw= 129609
+IMOibQ== 129610
+INC40L3RgtC10YDQtdGB 129611
+0LXQvdC90YvRhQ== 129612
+15DXoNeZ 129613
+2KjYr9ij 129614
+652864qU 129615
+7Lm0 129616
+5pa544GM 129617
+0LvQuNCy 129618
+IOC4hOC4mQ== 129619
+16LXqNea 129620
+4LiC4Lit4LiH4LiE4Li44LiT 129621
+0L/QsNC0 129622
+IGPhuqFuaA== 129623
+IOuCqA== 129624
+IMSRw6J1 129625
+IGJp4buDdQ== 129626
+44KC44GC44KL 129627
+15zXkg== 129628
+IOC4quC4s+C4q+C4o+C4seC4mg== 129629
+IHh14buRbmc= 129630
+16HXlQ== 129631
+INiw2KfYqg== 129632
+INCc0LU= 129633
+2LnYp9mE2YU= 129634
+15DXoQ== 129635
+2KjZitip 129636
+2LTYpw== 129637
+0LjQtdC8 129638
+IE5nxrDhu51p 129639
+7ZiR 129640
+0YHQu9C+0LI= 129641
+INC/0LA= 129642
+IG3huqt1 129643
+INC/0YDQvtGG0LXRgdGB 129644
+IE5ow6A= 129645
+0L/RgNC+0LjQtw== 129646
+0L/RgNC+0LjQt9Cy0L7QtA== 129647
+4Lig4Liy4Lii4LmD4LiZ 129648
+IOC4muC4suC4lw== 129649
+157XoNeV 129650
+INC+0YDQs9Cw0L0= 129651
+16jXpteV 129652
+15XXnteZ150= 129653
+IHlhesSx 129654
+IGTDuQ== 129655
+44Os44Oz 129656
+2YjZhNmK 129657
+4Lii4Li5 129658
+IHRyw7I= 129659
+4LmA4Lie4Lil4LiH 129660
+INee15zXkA== 129661
+4LiV4Lil 129662
+4LiV4Lil4Lit4LiU 129663
+IMSR4bqhdA== 129664
+INeX15PXqQ== 129665
+cMOzxYI= 129666
+INee15PXmQ== 129667
+dWrEhWM= 129668
+157XoNeU15w= 129669
+INep15HXlQ== 129670
+INeU157Xqdek15g= 129671
+INeQ15zXlA== 129672
+INmI2LDZhNmD 129673
+4LmA4Lie4Lij4Liy4Liw 129674
+IMSRb8Ogbg== 129675
+IO2VqOq7mA== 129676
+IGThu6Vj 129677
+2LTYqg== 129678
+IHVsYQ== 129679
+IHVsYcWf 129680
+IHF1w70= 129681
+INeU15LXk9eV15w= 129682
+4LiV4Lix4LmJ4LiH4LmB4LiV4LmI 129683
+INep16g= 129684
+2LTZh9iv 129685
+16DXqdeZ150= 129686
+4Lie4Lil 129687
+2LHZiNin 129688
+44KM44Gm 129689
+INC90LjRhQ== 129690
+INC00LXQu9Cw 129691
+44Gn44GN44Gq44GE 129692
+xYJvxbw= 129693
+15DXl9eo 129694
+7L2U 129695
+44Ki44OD44OX 129696
+2K/Zgdi5 129697
+IHRp4buHbg== 129698
+IGto4buP 129699
+IGto4buPZQ== 129700
+INin2YTYudin2YXYqQ== 129701
+44Gr44GC44KL 129702
+IMSR4buZYw== 129703
+7KGx 129704
+IGPhu6U= 129705
+0LnRgtC1 129706
+INC30LDQutC+0L0= 129707
+INC/0YDQvtC10LrRgg== 129708
+7Ja4 129709
+2YTYrQ== 129710
+IMOnYWzEscWfbWE= 129711
+44KS44GZ44KL 129712
+0YXQuA== 129713
+2LnYp9iv 129714
+INeg157XpteQ 129715
+INeo15k= 129716
+4Lit4Lit4LiB4Lih4Liy 129717
+IFTDtGk= 129718
+IHRo4bqnbg== 129719
+INmK2Kc= 129720
+4Lil4Liy4Lii 129721
+INCw0LLRgtC+ 129722
+IHPEsXJh 129723
+INmD2KvZitix 129724
+2YXZitiy 129725
+INin2YTYudmE2YU= 129726
+5pa544Gv 129727
+15XXoteT 129728
+INC+0LHQu9Cw0YHRgtC4 129729
+15nXnNeZ150= 129730
+44GM5Ye6 129731
+4LiY4Li4 129732
+4LiY4Li44Lij 129733
+4LiY4Li44Lij4LiB4Li04LiI 129734
+2YLYqtmE 129735
+16jXkNeV 129736
+IG5ndQ== 129737
+IG5ndeG7k24= 129738
+IOC4oeC4sg== 129739
+INC/0LvQsNC9 129740
+dMOzcmlv 129741
+IGN14buRaQ== 129742
+0YHQutC+0Lw= 129743
+INin2YTZhdin2LY= 129744
+INin2YTZhdin2LbZig== 129745
+INeR16LXnA== 129746
+INeo15HXmded 129747
+IGx14bqtbg== 129748
+2YPZiA== 129749
+4LiX4Lix4LmJ4LiH4Lir4Lih4LiU 129750
+0LLQsNC9 129751
+IHRob+G6oWk= 129752
+4LmE4Lit 129753
+0LHQuNGA 129754
+INin2YTYtg== 129755
+2KrYpw== 129756
+INGA0L7QtA== 129757
+IFbDoA== 129758
+157Xmdef 129759
+INCx0YvQu9Cw 129760
+0LrQsNC80Lg= 129761
+INCU0LU= 129762
+dMSxaw== 129763
+16fXqNeZ 129764
+IGXEn2l0aW0= 129765
+INmD2KjZitix 129766
+2KjZgw== 129767
+INmE2Yg= 129768
+0LLQvtC5 129769
+IOOBk+OBrg== 129770
+INGC0YDRg9C0 129771
+bXnFm2w= 129772
+IHPGsA== 129773
+4Lie4Li14LmI 129774
+IOC5geC4peC5ieC4pw== 129775
+16LXpw== 129776
+INeX15HXqNeq 129777
+4Lij4Liw4Lir4Lin 129778
+4Lij4Liw4Lir4Lin4LmI4Liy4LiH 129779
+15nXmdeU 129780
+INin2YTZhtin2LM= 129781
+w7xuw7w= 129782
+INec157XlA== 129783
+IGNoxrDGoW5n 129784
+IEjhu5M= 129785
+2KfYsdiq 129786
+44KI44GG44Gn44GZ 129787
+bMOh 129788
+16fXmdeZ150= 129789
+5pys5b2T 129790
+5pys5b2T44Gr 129791
+44GT44KT44Gq 129792
+0YHQvtCy 129793
+INeV15c= 129794
+4LmA4LiB4LmH4Lia 129795
+INC60YLQvg== 129796
+4LmC4Lij4LiE 129797
+INi02LHZg9ip 129798
+2LnYstmK 129799
+2LnYstmK2LI= 129800
+2LfZhNmC 129801
+0L/Rg9GB0YI= 129802
+2YHYqtit 129803
+656A 129804
+IGjDo3k= 129805
+2LbZhQ== 129806
+66aw 129807
+5aC05ZCI44Gv 129808
+44Kq44O8 129809
+IGjhuq9u 129810
+INeQ15HXmdeR 129811
+INep15zXlNed 129812
+INeU15nXmdeq15Q= 129813
+INin2YTYr9mI2YTYqQ== 129814
+INin2YTZiNmC 129815
+INin2YTZiNmC2Ko= 129816
+44GC44G+44KK 129817
+IHRhxZ/EsQ== 129818
+xLBO 129819
+16LXoden 129820
+44Gm44GE44Gf 129821
+IHThu5VuZw== 129822
+INin2YTYpdmG2LM= 129823
+INin2YTYpdmG2LPYp9mG 129824
+0YDQtdGI 129825
+IGfDoWk= 129826
+INGG0LXQvQ== 129827
+INmB2YLYrw== 129828
+2YXYp9iq 129829
+44GV44KT44Gu 129830
+IHBow7k= 129831
+15jXlA== 129832
+INmI2KfZhNiq2Yo= 129833
+INio2YM= 129834
+7J2064KY 129835
+0LrRgQ== 129836
+2YXZitix 129837
+IHbDuW5n 129838
+INin2YTYtNi52Kg= 129839
+IE5oxrBuZw== 129840
+44OA44O8 129841
+INeX15nXmded 129842
+INi02K7YtQ== 129843
+16fXldeT 129844
+6rKA 129845
+16LXqQ== 129846
+16LXldec150= 129847
+16bXldeo 129848
+2LnZgtiv 129849
+IGnFn2xlbQ== 129850
+INeU15HXkA== 129851
+IGTGsOG7oW5n 129852
+4Lif4Lij4Li1 129853
+IHBow61h 129854
+44Gu5Lit44Gn 129855
+INC/0Lg= 129856
+IG5nw6BuaA== 129857
+0L3QuNC80LA= 129858
+INmH2YQ= 129859
+INeV15DXqg== 129860
+IMSRw6FuZw== 129861
+w6lxdWlwZQ== 129862
+INGN0YLQvtGC 129863
+IGfDtnJldg== 129864
+66ek 129865
+IHF1w6Ju 129866
+5byV44GN 129867
+5pmC44Gr 129868
+INio2YXYpw== 129869
+157Xmdeq 129870
+IMO8bGtl 129871
+INee16fXlded 129872
+15HXnw== 129873
+5rCX5oyB44Gh 129874
+IOunjuydgA== 129875
+IHnDvGtzZWs= 129876
+0YbQtdC90YLRgA== 129877
+INmF2KzZhNiz 129878
+56eB44Gu 129879
+2YLYr9ix 129880
+IOu2gOu2hA== 129881
+IOywqA== 129882
+2K7Ysdis 129883
+44GL44Gq44KK 129884
+67O064uk 129885
+INee15nXk9ei 129886
+cGXFgm5p 129887
+IHjhu60= 129888
+7JeQ7ISc64qU 129889
+INio2KfZhNmF 129890
+INmI2YXYpw== 129891
+INGN0YLQvtC5 129892
+2KjZitmG 129893
+bsO8 129894
+2K3Ysg== 129895
+2K3Ystio 129896
+INGA0LDQsdC+0YLQsA== 129897
+IE5o4bqtdA== 129898
+2YTYp9ih 129899
+IOuTpA== 129900
+IOuTpOyWtA== 129901
+44KE44GZ44GE 129902
+15fXlten 129903
+INeU15fXkdeo15Q= 129904
+0L/QuNGC 129905
+44GL44KJ44Gu 129906
+IOunkOyUgA== 129907
+INek15U= 129908
+2YTZjg== 129909
+4LmA4LiV4LmH4Lih 129910
+INCa0L4= 129911
+IG3Ds3dp 129912
+IHTDrW4= 129913
+16jXktep 129914
+16TXqNen 129915
+IHRy4bqhbmc= 129916
+INCe0L0= 129917
+15fXldel 129918
+INi52YbYr9mF2Kc= 129919
+INio2LE= 129920
+5L2/44GE 129921
+IHLhu5luZw== 129922
+64yA66Gc 129923
+7Yis 129924
+IGt0w7NyeWNo 129925
+0LLQuNC0 129926
+4Lil4Li54LiB4LiE4LmJ4Liy 129927
+IG1vZ8SF 129928
+INep15c= 129929
+15HXl9eo 129930
+44OW44Ot44Kw 129931
+IFRow6BuaA== 129932
+INeU16jXmQ== 129933
+INGB0YLQsNGC0Yw= 129934
+IEjhu5lp 129935
+4Lia4LmJ4Liy4LiH 129936
+54m544Gr 129937
+IMSQ4bupYw== 129938
+6ICF44Gu 129939
+16LXnteV15M= 129940
+15jXqNeU 129941
+0KU= 129942
+INmF2YXYpw== 129943
+IGXFnw== 129944
+INC90LXQvtCx0YXQvtC00LjQvNC+ 129945
+0L3QuNC60L7Qsg== 129946
+IMO8emVyaW5kZQ== 129947
+YcWCYQ== 129948
+IGNo4buLdQ== 129949
+INin2YTYr9mK2YY= 129950
+2KPYrtio2KfYsQ== 129951
+IMSRYXU= 129952
+44GM5aSa44GE 129953
+asSFY3ljaA== 129954
+2K/YrtmE 129955
+bGFyxLFuZA== 129956
+bGFyxLFuZGFu 129957
+IHPhurs= 129958
+4Lie4Li04LmA4Lio 129959
+4Lie4Li04LmA4Lio4Lip 129960
+16rXnw== 129961
+dMSxxJ/EsQ== 129962
+IGx14bqtdA== 129963
+IMWeZQ== 129964
+44Kr44O8 129965
+44Gu44GC44KL 129966
+INeU15DXqteo 129967
+INin2YTYotmG 129968
+xLFsZMSx 129969
+IMOhbw== 129970
+INC90LDRh9Cw0Ls= 129971
+IHZp4buHbg== 129972
+INeR16LXldec150= 129973
+0LfQvdCw0Yc= 129974
+15nXmNeU 129975
+0LrQsNC8 129976
+INCY0Lc= 129977
+4LmA4LiC4Li14Lii4LiZ 129978
+4LiZ4LmJ4Lit4LiH 129979
+0YLRgNC+ 129980
+4LmA4Lif 129981
+INC20LjQt9C90Lg= 129982
+IOC4quC5iOC4p+C4mQ== 129983
+IHbhuq1u 129984
+IOq0gOugqA== 129985
+IGzDonU= 129986
+16HXmNeo 129987
+16fXqQ== 129988
+2LPZitix 129989
+INeQ15XXqteZ 129990
+IG3DtGk= 129991
+2KfYptio 129992
+INC+0YHRgtCw 129993
+IG3Ds24= 129994
+INeR157Xp9eV150= 129995
+INiv2KfYrtmE 129996
+INeQ15XXqA== 129997
+INCy0LDRgQ== 129998
+2YPYtNmB 129999
+7Jio 130000
+4LiW4LmI4Liy4Lii 130001
+IGt1bGxhbsSxbA== 130002
+IHTDtA== 130003
+44Gr44KI44KK 130004
+IOuYkO2VnA== 130005
+INei15HXldeT15Q= 130006
+IHJpw6o= 130007
+IHJpw6puZw== 130008
+IHlha8Sxbg== 130009
+2LLYpw== 130010
+xbs= 130011
+15DXldeb15w= 130012
+2LTYp9ix2YM= 130013
+INCx0LXRgQ== 130014
+17Q= 130015
+INin2KjZhg== 130016
+IFThu5VuZw== 130017
+2YbYuA== 130018
+xZt3aWFk 130019
+44K144O8 130020
+4Lir4Liy4Lii 130021
+IEfDvG4= 130022
+IGhha2vEsW5kYQ== 130023
+4LmA4LiC4LmJ4Liy4Lih4Liy 130024
+2LLZhg== 130025
+INCg0L4= 130026
+IGJp4buDbg== 130027
+44Gp44GT 130028
+2YHYudmE 130029
+2LLYuQ== 130030
+16TXqNeY 130031
+INeU158= 130032
+2KPZh9mE 130033
+IHRo4bqldA== 130034
+2K3ZhdmE 130035
+0YfRgw== 130036
+IOyCrOyLpA== 130037
+7LC4 130038
+IOychO2VtA== 130039
+2YjYuA== 130040
+INCf0L7QtA== 130041
+IGtob+G6o24= 130042
+0YLQtdC9 130043
+INmB2KfZhA== 130044
+0YHQsNC0 130045
+4LiZ4Lit4LiZ 130046
+INin2YTYs9i52YjYr9mK2Kk= 130047
+ItiM 130048
+INin2YTZkg== 130049
+44KJ44Ga 130050
+IHRvw6Fu 130051
+IGNo4bqvYw== 130052
+15vXmdeo 130053
+bcOpZA== 130054
+bcOpZGlh 130055
+2LLZiA== 130056
+IHlhbsSx 130057
+16TXoNeZ150= 130058
+2K3YuA== 130059
+INCx0LXRgdC/ 130060
+INCx0LXRgdC/0LvQsNGC 130061
+INCx0LXRgdC/0LvQsNGC0L3Qvg== 130062
+INij2YXYp9mF 130063
+4Lit4Liy4Lii 130064
+4Lit4Liy4Lii4Li4 130065
+16jXqdeq 130066
+IGfhu5M= 130067
+IGfhu5Nt 130068
+IHXhu5FuZw== 130069
+2LXYqA== 130070
+a8Sxcg== 130071
+44OR44O8 130072
+INec15PXoteq 130073
+INC60YPQv9C40YLRjA== 130074
+15zXldeX 130075
+2YjYtti5 130076
+2YLZitmF 130077
+4Lib4Liy 130078
+0LbQuNCy 130079
+4LiU4Li04LiZ 130080
+15DXldek 130081
+4LmA4Lil4LmH4LiB 130082
+44OD44OJ 130083
+0LjRh9C10YHQutC40YU= 130084
+IENo4bun 130085
+0LrRgNCw0YE= 130086
+2YjYtdmE 130087
+cMWCYXQ= 130088
+0LzQvtGA 130089
+INeU15DXlQ== 130090
+4Lit4Li04LiZ 130091
+IO2VnOq1rQ== 130092
+0LPRgNC1 130093
+IOygnOqztQ== 130094
+7LC9 130095
+IOqwnOyduOygleuztA== 130096
+IG5naOG7iw== 130097
+4LiL4Liy 130098
+2K3Ys9in2Kg= 130099
+IGJ5xYJh 130100
+2YXZhNmD 130101
+0LjRh9C10YHQutC40LU= 130102
+IGLDoWM= 130103
+2LbYrQ== 130104
+6ri4 130105
+16nXntei 130106
+IOyWtOuWuw== 130107
+IOyWtOuWu+qyjA== 130108
+7JuM 130109
+2KfYqtmH 130110
+4LmC4Lij4LiH4LmB 130111
+4LmC4Lij4LiH4LmB4Lij4Lih 130112
+2K7Yr9mF2Kk= 130113
+INCg0LA= 130114
+15vXldec150= 130115
+157XqdeX16c= 130116
+INmI2YPYp9mG 130117
+16HXldej 130118
+INin2YTYrdmD2YjZhdip 130119
+INeR15g= 130120
+IHRy4bqtbg== 130121
+INeU16LXldec150= 130122
+IMOtY2g= 130123
+dMSF 130124
+16nXnteV 130125
+INeU16jXkNep15XXnw== 130126
+IO2VmOqzoA== 130127
+44GV44KJ 130128
+44GV44KJ44Gr 130129
+44Gr44GX44Gm 130130
+IOC4nOC4oQ== 130131
+44Gu44KI44GG44Gq 130132
+INmI2YLYqg== 130133
+44ON44OD44OI 130134
+2YTYudio 130135
+2YjYtA== 130136
+7Jis 130137
+IOC4q+C4suC4gQ== 130138
+IG1pYcWC 130139
+4LiX4Lit4LiH 130140
+0LjRgtCw 130141
+2KfYtdix 130142
+0LjQu9GB0Y8= 130143
+0LfQtQ== 130144
+4Lib4Lij4Liw4Lih4Liy4LiT 130145
+44Gd44KM44Gv 130146
+IGLEsXI= 130147
+IGLEsXJhaw== 130148
+2LXZhtin2Lk= 130149
+0K4= 130150
+2LTYudix 130151
+INeg15LXkw== 130152
+INio2LPYqNio 130153
+44Od44Kk 130154
+44Od44Kk44Oz44OI 130155
+INin2YTYrNmI 130156
+INC90LXRgdC60L7Qu9GM0LrQvg== 130157
+IGtp4bq/bQ== 130158
+2YHZjg== 130159
+INi22K8= 130160
+15HXmdeY15XXlw== 130161
+2KrYp9io2Lk= 130162
+2YbYsg== 130163
+IELhuqNu 130164
+IGHDp8Sxa2w= 130165
+IGHDp8Sxa2xhbWE= 130166
+IOC4hOC4uOC4kw== 130167
+4LiX4Liy 130168
+xYLDs3c= 130169
+2LfYqA== 130170
+2YbYrdmG 130171
+INee16fXldeo 130172
+IMSwcw== 130173
+INC00L7QvNCw 130174
+IOC4p+C4seC4mQ== 130175
+IGTDoG5o 130176
+0Y/QvQ== 130177
+0LzQuNGA 130178
+IG3DtA== 130179
+IHbDoG5n 130180
+2LXYp9io 130181
+c8SxbsSxbg== 130182
+4LiE4Li34LiZ 130183
+2K7YqNix 130184
+15bXm9eV 130185
+INee16nXlNeV 130186
+bcO8 130187
+INC60L7QvNC/0LDQvdC40Lg= 130188
+INeU16LXmdeo 130189
+INmD2Yg= 130190
+2YLZhNio 130191
+IGzhu5tw 130192
+0LjQutC4 130193
+16DXkQ== 130194
+4LmC4LiE4Lij 130195
+4LmC4LiE4Lij4LiH 130196
+4LmC4LiE4Lij4LiH4LiB4Liy4Lij 130197
+157Xldei15M= 130198
+0Y/RgtGB0Y8= 130199
+4Lir4Lil4Lix4LiH4LiI4Liy4LiB 130200
+0LXQvdC40Y4= 130201
+INep16I= 130202
+IGLGsOG7m2M= 130203
+44Oh44O844Or 130204
+44KE44KK 130205
+INeZ15XXk9ei 130206
+IOq0gO2VnA== 130207
+INin2YTYo9mF2LE= 130208
+IGLDtmxnZQ== 130209
+INGB0LLQvtC5 130210
+2YTYsw== 130211
+INee15nXldeX15M= 130212
+IOuCtOyaqQ== 130213
+INij2KzZhA== 130214
+IMSQw7RuZw== 130215
+INee16DXqg== 130216
+IOyLnOqwhA== 130217
+2YPZjg== 130218
+44Go44GE44GG44Gu44Gv 130219
+IG5hbGXFvHk= 130220
+2KrZhti42YrZhQ== 130221
+INGB0L7Qt9C00LA= 130222
+IHBow6k= 130223
+IHBow6lw 130224
+44Gn44GN44G+44GZ 130225
+INi52YTZhQ== 130226
+5aSn44GN44Gq 130227
+44Ky44O844Og 130228
+7YWM 130229
+INeb15XXnNec 130230
+INC40L3RgtC10YDQvdC10YI= 130231
+IFThu6s= 130232
+44Go44Gq44KL 130233
+2LLYp9mE 130234
+IGt0w7NyeW0= 130235
+IG5ow6k= 130236
+7Iic 130237
+0L3QtdCy 130238
+0LTQtdGA 130239
+44Ki44OX44Oq 130240
+aeG7h3U= 130241
+15HXmdec 130242
+INiq2LM= 130243
+IMSQw6J5 130244
+INin2YTYrtin2LXYqQ== 130245
+IOC5gOC4ig== 130246
+IOC5gOC4iuC5iOC4mQ== 130247
+2LXYp9iv 130248
+IGThuqFuZw== 130249
+2LPYudix 130250
+INep15nXnteV16k= 130251
+15LXmded 130252
+44GM44GC44Gj44Gf 130253
+0L/RgNC+0LI= 130254
+0L/RgNC+0LLQvtC0 130255
+INeQ15nXoNeV 130256
+INec16jXkA== 130257
+INec16jXkNeV16o= 130258
+INij2YHYttmE 130259
+INit2YQ= 130260
+INij2KjZiA== 130261
+6rCV 130262
+IOynkQ== 130263
+44Gu44KI44GG44Gr 130264
+INek16DXmQ== 130265
+16HXmded 130266
+INmI2YfYsNin 130267
+IGthw6c= 130268
+IMOpw6lu 130269
+IOqxtA== 130270
+67CU 130271
+0YPQtw== 130272
+4LiC4Lit4LiH4LmA4Lij4Liy 130273
+acWC 130274
+INCc0Ys= 130275
+IGNo4bq/dA== 130276
+INin2YTYq9in2YbZig== 130277
+15DXpw== 130278
+INeV16LXnA== 130279
+INin2YTYt9io 130280
+15HXmNeX 130281
+INis2K/Zitiv2Kk= 130282
+INi52K/ZhQ== 130283
+2LnYsg== 130284
+4Liq4Li04LmI4LiH4LiX4Li14LmI 130285
+44GZ44KM44Gw 130286
+IMSRw7Q= 130287
+7KOg 130288
+2K/Zgg== 130289
+0L3QvtC80YM= 130290
+IGvhu4M= 130291
+44Ki44Oz 130292
+5aSa44GP44Gu 130293
+4Lib4Lij4Liw4LiB 130294
+4Lib4Lij4Liw4LiB4Lit4Lia 130295
+16TXoteZ15zXldeq 130296
+INGB0YLQvtC7 130297
+bWF5xLE= 130298
+44Gk44GE 130299
+IHnEsWzEsW5kYQ== 130300
+IOC4iOC4tuC4hw== 130301
+a2/FhGN6 130302
+IFRow7RuZw== 130303
+INCw0LrRgtC40LI= 130304
+0L3RgdGC 130305
+0L3RgdGC0YDRgw== 130306
+IMOWeg== 130307
+INeq157XmdeT 130308
+INmD2YbYqg== 130309
+0YHQuNGB0YLQtdC8 130310
+cHLDqXM= 130311
+cHLDqXNlbnQ= 130312
+IG7Dog== 130313
+IG7Dom5n 130314
+Z8WCb3M= 130315
+INmI2LLZitix 130316
+2K3YtdmE 130317
+INC40LzQtdC10YI= 130318
+2K3YsdmD2Kk= 130319
+4Lie4LmI4Lit 130320
+44KS44GK 130321
+INin2LPYqtiu2K/Yp9mF 130322
+15DXmdeo15XXog== 130323
+5LuW44Gu 130324
+INep15TXnQ== 130325
+44GX44Gf44KJ 130326
+16nXnteZ 130327
+0YHQu9Cw 130328
+bcSx 130329
+IGJhesSx 130330
+IO2VmOyngOunjA== 130331
+15PXnA== 130332
+IHlhcHTEscSfxLE= 130333
+44OK44O8 130334
+15zXmdec15Q= 130335
+44Go44GE44Gj44Gf 130336
+w6RuZGln 130337
+IMWfYQ== 130338
+INmB2YrZhdin 130339
+0LjRgtC10LvRjw== 130340
+157Xldep 130341
+4LiC4Lit4Lia 130342
+bMO8aw== 130343
+IGjhu5Np 130344
+IOuqhQ== 130345
+INin2YTZg9ir2YrYsQ== 130346
+16bXkA== 130347
+IGhhesSxcg== 130348
+2LfYsdmB 130349
+2KfZitin 130350
+IMSRw7Rp 130351
+0LXQvdC0 130352
+2YTYug== 130353
+15fXlteV16g= 130354
+INCy0YHQtdCz 130355
+INCy0YHQtdCz0LTQsA== 130356
+65CY6rOg 130357
+15PXldeT 130358
+0LDQvdCw 130359
+2K/ZiNmE2Kk= 130360
+IGhv4bqhY2g= 130361
+2LnZhNin 130362
+2LnZhNin2Kw= 130363
+INeV16LXkw== 130364
+15TXnQ== 130365
+0LrQuNC5 130366
+2YTZkA== 130367
+INei15zXmdeV 130368
+0Y7RidC40Lk= 130369
+IG5n4bun 130370
+2LXZhti5 130371
+INin2YTYudix2KfZgg== 130372
+4LiV4LmI4Lit4LmE4Lib 130373
+44Gf44GP44GV44KT 130374
+IHBo4bqhbQ== 130375
+2YTYp9mG 130376
+2KfYqtmH2Kc= 130377
+IGLDtnlsZQ== 130378
+2KrZhtmB2Yo= 130379
+2KrZhtmB2YrYsA== 130380
+INep15TXmdeQ 130381
+0YHRgw== 130382
+4Lii4Liy4Lin 130383
+INep15XXoNeZ150= 130384
+INee15XXnA== 130385
+INGB0LjQuw== 130386
+INeQ15fXqNeZ150= 130387
+IHBo4bun 130388
+2YLYt9i5 130389
+IFRo4bun 130390
+4Lib4Lij4Liw4LmA4LiX4Lio4LmE4LiX4Lii 130391
+2YbZgg== 130392
+IMSRb+G6oW4= 130393
+INio2KU= 130394
+0L/RgNC10LTQtdC7 130395
+15XXqteV 130396
+IHlhcsSx 130397
+0L/RgNC1 130398
+IGN6xJnFm2Np 130399
+2K3Zg9mF 130400
+15XXoNeZ16o= 130401
+16TXotec 130402
+44KS44GX44Gm 130403
+IGt0w7Nyenk= 130404
+15zXnQ== 130405
+IMSQaeG7gXU= 130406
+INC60L7RgtC+0YDQsNGP 130407
+IOydtOyDgQ== 130408
+44GC44Gj44Gf 130409
+INee15PXldeR16g= 130410
+16TXldei15w= 130411
+ZMSxbQ== 130412
+6YCa44KK 130413
+INCx0YPQtNGD0YI= 130414
+4LmA4Lin4LmH4Lia4LmE4LiL 130415
+4LmA4Lin4LmH4Lia4LmE4LiL4LiV4LmM 130416
+2KfYrtix 130417
+15fXmdec 130418
+INeZ15w= 130419
+INeZ15zXk9eZ150= 130420
+15fXmdek 130421
+15fXmdek15XXqQ== 130422
+IGTDsm5n 130423
+INep15bXlA== 130424
+0YzQtQ== 130425
+44GC44Go 130426
+7J6Q6rCA 130427
+15DXkw== 130428
+IMO8eg== 130429
+IMO8emVyZQ== 130430
+2LjZhA== 130431
+INeQ15XXnNeZ 130432
+INeR15nXlded 130433
+2YTYp9iq 130434
+IG3Dqg== 130435
+7Lmo 130436
+2KrYrdiv 130437
+2KrYrdiv2Ks= 130438
+INiu2KfYtdip 130439
+INio2LHZhg== 130440
+INio2LHZhtin2YXYrA== 130441
+IEjDoG4= 130442
+15fXoQ== 130443
+INmI2YTZhQ== 130444
+16LXnQ== 130445
+IG3EsQ== 130446
+4Lif4Lix4LiH 130447
+16nXoteU 130448
+2YjZgdmC 130449
+16HXkdeZ16g= 130450
+0LDQu9GM0L3Ri9C5 130451
+15fXqdeV15E= 130452
+IG7DoG5n 130453
+67O8 130454
+INC60L7RgtC+0YDRi9GF 130455
+INeX15XXpw== 130456
+dMO2cg== 130457
+INC70YPRh9GI0LU= 130458
+44OR44Oz 130459
+4Lil4LmI4Liy4Liq4Li44LiU 130460
+INis2K/Zitiv 130461
+2YrYr9ip 130462
+4LiX4Lij4LiH 130463
+44KI44KK44KC 130464
+2YTZhA== 130465
+44KC44Gj44Go 130466
+16nXmNeX 130467
+INeV15DXmQ== 130468
+IGdp4buRbmc= 130469
+2KXYttin2YE= 130470
+16fXqg== 130471
+66ed 130472
+IHpvc3RhxYI= 130473
+0YDQvtC3 130474
+15nXpNeZ150= 130475
+INeb15zXnA== 130476
+16rXldeb158= 130477
+ZMSxxJ/EsW7EsQ== 130478
+2YLYs9mF 130479
+INGB0YfQuNGC 130480
+INGB0YfQuNGC0LA= 130481
+15jXldeq 130482
+IMawdQ== 130483
+INii2YQ= 130484
+INC80L7QvA== 130485
+INC80L7QvNC10L3Rgg== 130486
+INin2YTYqti52YTZitmF 130487
+16LXnNeV16o= 130488
+IGNo4buvYQ== 130489
+IHnDtm4= 130490
+IHRyw6A= 130491
+INit2YrZhg== 130492
+4LiL4Lix 130493
+IEPDoQ== 130494
+16LXlg== 130495
+INin2YTYo9mF2YY= 130496
+Y8Ot 130497
+IHbhu5Fu 130498
+IOC4meC4suC4og== 130499
+0L7QsdGA0LA= 130500
+16fXkA== 130501
+IHRoaeG6v3U= 130502
+44Oe44O8 130503
+4Liq4Lin4LiZ 130504
+IGfhu60= 130505
+IGfhu61p 130506
+IOq5 130507
+IOq5gA== 130508
+IHRoaeG7h24= 130509
+2YLYuQ== 130510
+d8SZ 130511
+INC90LDQvA== 130512
+0YLQvtC7 130513
+IHPDom4= 130514
+16HXldeS 130515
+IGdlw6dpcg== 130516
+0YLQvtC9 130517
+0LXQstCw 130518
+INmI2LbYuQ== 130519
+INi52LTYsQ== 130520
+0YHQu9C+ 130521
+4LiI4Lix4Lia 130522
+44K344O8 130523
+44KC44GC44KK44G+44GZ 130524
+IHbhurs= 130525
+IMSQ4buD 130526
+2LHZgdi5 130527
+INin2YTYo9mI2YTZiQ== 130528
+0YLQsNGA 130529
+44Gq44GP44Gm 130530
+2YXZjg== 130531
+cXXDrQ== 130532
+16LXoNeZ15nXoA== 130533
+0LPQtdC9 130534
+IGjDtG0= 130535
+4LiI4Liy 130536
+IG5o4bub 130537
+INin2YTYudix2KjZig== 130538
+15DXnw== 130539
+IGzhu5k= 130540
+IGplxZtsaQ== 130541
+4LmA4LiX4LmI4Liy4LiZ4Lix4LmJ4LiZ 130542
+INij2YbZh9in 130543
+IHR1eQ== 130544
+IHR1eeG7h3Q= 130545
+INiq2LU= 130546
+INiq2LXZhtmK 130547
+INiq2LXZhtmK2YE= 130548
+IOq3uOufrOuCmA== 130549
+0L7RhtC10L0= 130550
+4LiB4Li04LiI4LiB4Lij4Lij4Lih 130551
+44KE44Gj44Gm 130552
+IGto4buPaQ== 130553
+IGzhu4c= 130554
+INin2YTZhdis2KrZhdi5 130555
+4Lit4Liy4LiI4LiI4Liw 130556
+4LiI4Liw4LmA4Lib4LmH4LiZ 130557
+0L7QstGL0Lk= 130558
+16jXnQ== 130559
+4Lij4LmJ4Lit4LiZ 130560
+16nXntep 130561
+5Lq644Gr 130562
+IMO8emVyaW5l 130563
+16TXqNeZ 130564
+ZHXEn3U= 130565
+0YfQuNC6 130566
+IG3DuWE= 130567
+INee16rXldea 130568
+IGPhuq1w 130569
+INiq2KfYsdmK2K4= 130570
+15HXnNeq15k= 130571
+IOyigA== 130572
+2YTYuQ== 130573
+2KjYp9mG 130574
+IGNow7p0 130575
+INeU15bXntef 130576
+bsOpZQ== 130577
+IExpw6pu 130578
+INmE2YTYow== 130579
+2K3Yr9mI2K8= 130580
+INei15vXqdeZ15U= 130581
+0LLQvtC3 130582
+IHlhcHTEsQ== 130583
+INC+0LHQvg== 130584
+4LmD4Lir4LmJ4LiB4Lix4Lia 130585
+INeR15TXnQ== 130586
+44GP44Gm 130587
+2LHYo9iz 130588
+INGB0YDQtdC00YHRgtCy 130589
+IELDoGk= 130590
+44GT44Go44Gr 130591
+IOyCrO2ajA== 130592
+IOuqqOuRkA== 130593
+15HXkA== 130594
+IHRy4bqvbmc= 130595
+INin2YTYqNmE2K8= 130596
+IEhvw6BuZw== 130597
+0LvQuNCx0L4= 130598
+INC00YDRg9Cz0LjRhQ== 130599
+xLBS 130600
+0YPQvNCw 130601
+IEplxZtsaQ== 130602
+44KC44GX 130603
+IHbDsm5n 130604
+INeQ16rXqNeZ150= 130605
+IMSR4buNYw== 130606
+INCy0L7Rgg== 130607
+44Gg44GM 130608
+67Cw 130609
+4LiU4Li54LmB4Lil 130610
+INee15vXnA== 130611
+7JeQ64+E 130612
+0LPQsNC3 130613
+INeg15XXodek15nXnQ== 130614
+44GT44Go44Gn 130615
+INiq2Yg= 130616
+44Gn44GC44KK 130617
+4LiZ4Lix4LmI4LiH 130618
+INC80L7QttC10YLQtQ== 130619
+c3rEmQ== 130620
+44Gu44Gg 130621
+INmF2YbZhw== 130622
+IGLhu5U= 130623
+IGLDvHQ= 130624
+IGLDvHTDvG4= 130625
+67O06rOg 130626
+IGNo4buTbmc= 130627
+4LmB4LiI4LmJ4LiH 130628
+IFbDrA== 130629
+INit2LE= 130630
+IGdp4bqjbg== 130631
+INmF2K/ZitmG2Kk= 130632
+2KrYt9io2YrZgg== 130633
+4LiI4Li0 130634
+5pel44Gu 130635
+0LHQuNC7 130636
+4LiB4Lit4LiH 130637
+6rOz 130638
+INij2YXYpw== 130639
+7IaQ 130640
+IHRyw6Fp 130641
+INCy0YHQtdC8 130642
+INiz2YbYqQ== 130643
+INGB0LDQudGC 130644
+INCz0L7RgtC+0LI= 130645
+0L/Riw== 130646
+IOuQoA== 130647
+INin2YTYrti3 130648
+INin2YTYsdim2YrYs9mK2Kk= 130649
+IO2VqeuLiOuLpA== 130650
+IOyVhOuLiOudvA== 130651
+IOydtOughw== 130652
+IOydtOugh+qyjA== 130653
+KdiM 130654
+aMOkbHQ= 130655
+INij2YXYsQ== 130656
+INi52YXYsQ== 130657
+4LiB4LmH4LiI4Liw 130658
+IOC4l+C4s+C5g+C4q+C5iQ== 130659
+IGPDom4= 130660
+INeR15w= 130661
+INeR15zXkdeT 130662
+16TXoden 130663
+INmK2YLZiNmE 130664
+0L3Rg9GC0Yw= 130665
+4LmB4LiE 130666
+INen16bXqg== 130667
+IG7hurFt 130668
+IGjDsmE= 130669
+YmlsaXTDoA== 130670
+IOyXhuuLpA== 130671
+INeb16TXmQ== 130672
+0YDQvtC2 130673
+0LvQsNCz0LA= 130674
+INeU16nXmQ== 130675
+IE5nb8OgaQ== 130676
+INmI2Kw= 130677
+INmI2KzZiNiv 130678
+IOychO2VnA== 130679
+IHVzxYJ1Zw== 130680
+IHR14bqnbg== 130681
+ZMW6 130682
+157Xldef 130683
+INin2YTYudiv2YrYrw== 130684
+IGNo4bqzbmc= 130685
+4Liq4Li44LiC4Lig4Liy4Lie 130686
+INeR15PXqNea 130687
+INGB0LXQsdC1 130688
+IOyeiOydhA== 130689
+INin2YTYrdin2YQ= 130690
+IGTDoQ== 130691
+IGPGsOG7nWk= 130692
+IG5naGnDqm4= 130693
+aWXFhA== 130694
+IETGsMahbmc= 130695
+77yF 130696
+2LTYrw== 130697
+44GE44Gk44KC 130698
+INCy0YvQsdC+0YA= 130699
+IGPhu5luZw== 130700
+16nXmdeg15XXmQ== 130701
+IGNo4bqheQ== 130702
+INeR16LXnNeZ 130703
+2KfYrtio2KfYsQ== 130704
+7ZWY66mw 130705
+xbzEhQ== 130706
+2KzYp9iy 130707
+INeg16jXkNeU 130708
+4Lio4Li5 130709
+4Lio4Li54LiZ 130710
+4Lio4Li54LiZ4Lii4LmM 130711
+15LXog== 130712
+INei15PXmQ== 130713
+INei15PXmdeZ158= 130714
+2KjYsdin 130715
+0YbQuNC5 130716
+IMSQ4buTbmc= 130717
+2YLYp9mG2YjZhg== 130718
+IMSR4bupbmc= 130719
+44GX44Gf44KK 130720
+INeX15nXmQ== 130721
+IOuQnA== 130722
+IOuQnOuLpA== 130723
+INC80LXQttC00YM= 130724
+4Lie4Lin4LiB4LmA4LiC4Liy 130725
+IELhuq9j 130726
+4Lil4Liz 130727
+67Cx 130728
+IO2ZlQ== 130729
+4Lih4Liy4LiB4Lih 130730
+4Lih4Liy4LiB4Lih4Liy4Lii 130731
+0LHQsNC90Lo= 130732
+4Lit4Liy4LiB4Liy4Lij 130733
+IGjDoA== 130734
+INec16A= 130735
+4Lit4Lit 130736
+IOuwlOuhnA== 130737
+0LvQvtC8 130738
+bcOhdGljYQ== 130739
+INit2K8= 130740
+2KfYqNiq 130741
+4LiX4Li14LmI4LiZ4Li14LmI 130742
+IGNvxZs= 130743
+2YHZitiv2Yo= 130744
+2YHZitiv2YrZiA== 130745
+INC80LXRgdGC0L4= 130746
+IHBow7p0 130747
+4Lih4Liy4LiB4LiB4Lin4LmI4Liy 130748
+15DXpA== 130749
+2KjZkA== 130750
+IFBow7o= 130751
+7LGE 130752
+INmI2LPZhNmF 130753
+4LiI4Li14LiZ 130754
+0L/QvtGC0YDQtdCx 130755
+INeX15PXqdeV16o= 130756
+2LTZiA== 130757
+INei16bXnteV 130758
+INi52YXZhNmK2Kk= 130759
+4LiE4Li44LiT4Lig4Liy4Lie 130760
+44G+44GZ44GM 130761
+2K/YudmI 130762
+2LfYsdmC 130763
+4LmE4Lih4LmI4LiV4LmJ4Lit4LiH 130764
+67KU 130765
+7Iq5 130766
+IGvDrWNo 130767
+IOyXhuuKlA== 130768
+INGC0LDQvA== 130769
+INmG2K3ZiA== 130770
+INin2YTZgtin2YbZiNmG 130771
+15fXlded 130772
+IGvEsXo= 130773
+INeT15nXnw== 130774
+INCy0YDQtdC80LXQvdC4 130775
+44Gj44Gf44KK 130776
+INi02YfYsQ== 130777
+IOyEnOu5hOyKpA== 130778
+16LXqdeU 130779
+IGdpw6Fj 130780
+INin2YTYs9mE2KfZhQ== 130781
+INeQ16k= 130782
+INC/0L7Qu9GD0YfQsA== 130783
+4LiI4Lix4LiU4LiB4Liy4Lij 130784
+0LrQvtGA 130785
+INeU15jXldeR 130786
+4Lij4Liy4Lii4LiB4Liy4Lij 130787
+7KO87J2Y 130788
+4LmB4LiV4LmI4Lil4Liw 130789
+IOq3uOufsOuNsA== 130790
+4LiX4Li14LmI4LmA4Lib4LmH4LiZ 130791
+INeq15XXmg== 130792
+2KjZitin2YY= 130793
+0Jk= 130794
+b8WbY2nEhQ== 130795
+0YLQvtC6 130796
+IMOU 130797
+IMOUbmc= 130798
+4LmE4Lih4LmI4LmD4LiK4LmI 130799
+44G/44Gm 130800
+0J/Qvg== 130801
+INCn0YLQvg== 130802
+7Zmp 130803
+15jXkdei 130804
+0LzQtdGC0YA= 130805
+INeR157XlA== 130806
+INeR157XlNec 130807
+INeR157XlNec15o= 130808
+0YfRjA== 130809
+16fXqdeU 130810
+0LfQvdCw0Lo= 130811
+0LfQvdCw0LrQvtC8 130812
+dWrEmQ== 130813
+15nXpteo 130814
+INin2YTZhdmE2YM= 130815
+xLF5bGE= 130816
+15DXnteq 130817
+4Lib4Li04LiU 130818
+15DXl9eT 130819
+2LHYp9iv 130820
+IG3huq10 130821
+64uk64qU 130822
+IGzhuqFuaA== 130823
+16nXnNeV16k= 130824
+2K3Yr9mK2Ks= 130825
+2KrYsg== 130826
+5bm044Gu 130827
+INC60LLQsNGA 130828
+INC60LLQsNGA0YLQuNGA 130829
+5L2c44KK 130830
+2LHZiNio 130831
+0L7QstCw0L0= 130832
+INCi0LU= 130833
+4LiI4Liz4LiB 130834
+4LiI4Liz4LiB4Lix4LiU 130835
+2KjYp9i3 130836
+15LXqg== 130837
+INC80LDRiA== 130838
+INC80LDRiNC40L0= 130839
+15nXpteU 130840
+44G744Go 130841
+44G744Go44KT44Gp 130842
+w61kbw== 130843
+INGP0LfRi9C6 130844
+4Lia4Li04LiZ 130845
+4Liq4LiW4Liy4LiZ4LiX4Li14LmI 130846
+IOyXtA== 130847
+44Km44Kn 130848
+IGPDoA== 130849
+0L/QsNC9 130850
+5Y+j44Kz44Of 130851
+INix2K8= 130852
+2KfZgtiq 130853
+INmD2Kg= 130854
+INmD2KjZitix2Kk= 130855
+0YHRgtCw0Ls= 130856
+16nXnteX 130857
+cG9zaWNpw7Nu 130858
+INmF2YTZitmI2YY= 130859
+IOydtOyVvA== 130860
+IOydtOyVvOq4sA== 130861
+IGjDunQ= 130862
+IMWbd2lhdA== 130863
+IOuwqeuylQ== 130864
+INGB0LLQtdGC 130865
+INCy0LjQtNC10L4= 130866
+INin2YTZhti42KfZhQ== 130867
+IHRy4budaQ== 130868
+IOuMgO2VtOyEnA== 130869
+16jXnteq 130870
+2KrYr9in2YjZhA== 130871
+15XXqNeT 130872
+16rXng== 130873
+16rXnteV16DXldeq 130874
+INee158= 130875
+INC00LLQsA== 130876
+INeU16fXlQ== 130877
+5pel44Gr 130878
+INeU15LXmdei 130879
+4LmA4Lie4Li04LmI4Lih4LmA4LiV4Li04Lih 130880
+2YXYp9ix2LM= 130881
+IOqyg+yeheuLiOuLpA== 130882
+44Gq44GE44Go 130883
+IG5oaeG7h3Q= 130884
+65Cp64uI64uk 130885
+INeR16DXldep15A= 130886
+IOqwgOyepQ== 130887
+IHbhu6M= 130888
+IMSRw7NuZw== 130889
+16bXmdec15XXnQ== 130890
+6rSA6rOE 130891
+0LLQsNGP 130892
+15DXmdeW 130893
+15DXmdeW15Q= 130894
+INmG2LjYp9mF 130895
+2YXYrdin2YHYuA== 130896
+IHThuqNp 130897
+6riw64+E 130898
+4Lib4Lix4LiI4LiI4Li4 130899
+4Lib4Lix4LiI4LiI4Li44Lia4Lix4LiZ 130900
+15vXk9eV16g= 130901
+IOyVhOydtA== 130902
+15vXoNeZ16E= 130903
+4LmA4LiV4Lij 130904
+4LmA4LiV4Lij4Li14Lii4Lih 130905
+IG5nb+G6oWk= 130906
+INiv2YjZhNin2LE= 130907
+IHLhurs= 130908
+IGtoxINu 130909
+2LnYr9iv 130910
+2LTYudio 130911
+Y3p5xIc= 130912
+INin2YTZg9ix 130913
+INGH0LXQu9C+0LLQtdC60LA= 130914
+INmI2KXZhg== 130915
+15DXmA== 130916
+IHRoxqE= 130917
+INin2YTYsdmK2KfYtg== 130918
+0L7Qv9GA0LXQtNC10Ls= 130919
+0L7Qv9GA0LXQtNC10LvQtdC9 130920
+15TXntep15o= 130921
+INCd0L7QstC+ 130922
+0LfRi9Cy0LA= 130923
+INin2YTYr9mI2YTZig== 130924
+IMSRw6Fw 130925
+INC60YDQtdC0 130926
+INC60YDQtdC00LjRgg== 130927
+0L7QstC+0LPQvg== 130928
+IG3DtG4= 130929
+4Lib4Lij4Liw4LmC4Lii 130930
+4Lib4Lij4Liw4LmC4Lii4LiK4LiZ 130931
+4Lib4Lij4Liw4LmC4Lii4LiK4LiZ4LmM 130932
+0YHRgtC1 130933
+IFRo4buL 130934
+2K/Zitip 130935
+157XpteV 130936
+2YHYp9iq 130937
+16fXk9ed 130938
+7J2065286rOg 130939
+2YjYrg== 130940
+INeX15Y= 130941
+INGE0L7RgtC+ 130942
+15zXmdeq 130943
+2KrZjg== 130944
+2YjYqNix 130945
+0LnRgtC4 130946
+IMO2xJ9yZW4= 130947
+INeU15bXlQ== 130948
+IHbhu41uZw== 130949
+2YLZiNip 130950
+IFTDonk= 130951
+INCd0Lg= 130952
+INep15XXkQ== 130953
+44Go6KiA44KP44KM 130954
+44Gp44KT44Gq 130955
+15fXpteZ 130956
+772c 130957
+INeV15TXldeQ 130958
+5LiA44Gk 130959
+INGB0YLQvtC40YI= 130960
+bmnEhQ== 130961
+15jXqNeZ 130962
+INC00LXRgtC10Lk= 130963
+0L3Rj9GC0Yw= 130964
+INGB0LTQtdC70LDRgtGM 130965
+IOunjuydtA== 130966
+5L2V44GL 130967
+44Gb44KL 130968
+4LmE4Lir4Lih 130969
+4LiV4Li04LiU4LiV4LmI4Lit 130970
+INeR16rXlw== 130971
+INeR16rXl9eV150= 130972
+7JmE 130973
+7KeA64qU 130974
+0YHRgtCw0YI= 130975
+0Y/RgdC9 130976
+w7xi 130977
+IHRo4bqj 130978
+INeR15DXnteq 130979
+IHR1eeG6v24= 130980
+15PXmdeo15Q= 130981
+INeQ15nXqdeZ 130982
+15bXm9eo 130983
+44Gw44GL44KK 130984
+IHjDqXQ= 130985
+15vXmdeV 130986
+15vXmdeV15XXnw== 130987
+ZGnEn2luaQ== 130988
+INin2YTZhdmI2LbZiNi5 130989
+IGjhuq11 130990
+4LiI4Liy4LiB4LiB4Liy4Lij 130991
+15HXodeZ16E= 130992
+INee15LXmdei 130993
+15HXmdei 130994
+INmI2KzZhw== 130995
+4LmB4LiU4LiH 130996
+4LiZ4Liy4LiH 130997
+IMWeYQ== 130998
+7KG0 130999
+66GA 131000
+4LiV4Liw 131001
+INeU15fXmdeZ150= 131002
+2YHZitiv 131003
+44Gn44GZ44GL44KJ 131004
+6rec 131005
+xbpuaQ== 131006
+INC70Y7QtNC10Lk= 131007
+IHnDvHpkZQ== 131008
+xLF5b3J1bQ== 131009
+INin2YTYqNit2LE= 131010
+ZcOxbw== 131011
+0L/QsNGA 131012
+2YrZgtip 131013
+0L7QsdGA 131014
+16jXldea 131015
+2KrZiNmC2Lk= 131016
+INin2YTYtNmK2K4= 131017
+5Yid44KB44Gm 131018
+INGC0LXQu9C10YQ= 131019
+INGC0LXQu9C10YTQvtC9 131020
+IHRow7Rp 131021
+INeZ15vXldec15nXnQ== 131022
+IMWfaXJr 131023
+IMWfaXJrZXQ= 131024
+IOyasOumrOqwgA== 131025
+IMSRw7RuZw== 131026
+INeq15XXk9eU 131027
+0YHQvNC+0YLRgNC10YLRjA== 131028
+INmE2YfZhQ== 131029
+INec15s= 131030
+IE7Dsw== 131031
+INit2KfZhNip 131032
+44GE44GR 131033
+16fXqNeV 131034
+YXrEsQ== 131035
+44Kz44O8 131036
+INmE2YTYqg== 131037
+c8SxbsSxeg== 131038
+IEjhuqNp 131039
+6riw7Iig 131040
+4Lii4Lix4LiH4LmE4Lih4LmI 131041
+64uk6rOg 131042
+16TXlw== 131043
+INec15LXkdeZ 131044
+INi52YbZhw== 131045
+INC60LDQtw== 131046
+INC60LDQt9C40L3Qvg== 131047
+2KjZiNix 131048
+0YTQtdGA 131049
+IOqwmeydtA== 131050
+2KrYs9is2YrZhA== 131051
+INin2YTZhdix2YPYsg== 131052
+IFRow6Fp 131053
+0LTQsNGC0Yw= 131054
+157XmdeZ15w= 131055
+IHBheWxhxZ8= 131056
+44Gk44Gu 131057
+4LmA4Lij4Li34Lit 131058
+bsOnYQ== 131059
+16DXldeX 131060
+INeQ16TXmdec15U= 131061
+44Go6ICD44GI 131062
+44Go44GX44Gm44Gv 131063
+4LmA4LiI4Lit 131064
+157XpA== 131065
+IGdpcmnFnw== 131066
+0LvQuNGC 131067
+0YLQtdC70Y8= 131068
+0ZHQvQ== 131069
+5rCX44Gr 131070
+IGfDsw== 131071
+IGfDs3A= 131072
+5YiH44KK 131073
+INeU15fXk9ep 131074
+0LbQsNC7 131075
+INeT16LXqg== 131076
+6YGV44GG 131077
+4LmA4LiC4LmJ4Liy4LmE4Lib 131078
+INeh16jXmA== 131079
+ZcOxYQ== 131080
+5paw44GX44GE 131081
+2LHZjg== 131082
+INCQ0YA= 131083
+IHBo4bqjbg== 131084
+4LiI4Liw4LmE4LiU4LmJ 131085
+INeR16bXldeo15Q= 131086
+2LTYp9mH 131087
+2LTYp9mH2K8= 131088
+2YjYsdiv 131089
+4LmA4LiZ4Li34LmI4Lit4LiH4LiI4Liy4LiB 131090
+0LjQu9C40YHRjA== 131091
+4LmB4Lil4Liw4LiB4Liy4Lij 131092
+INeU15bXmw== 131093
+INeU15bXm9eV15nXldeq 131094
+ZWnDnw== 131095
+44Oo 131096
+7IOI 131097
+IMOHYQ== 131098
+xq8= 131099
+16nXkg== 131100
+2YrZhtip 131101
+4Lij4LmJ4Lit4LiH 131102
+44K144Oz 131103
+0YDQvtGB0YHQuNC5 131104
+0YDQvtGB0YHQuNC50YHQug== 131105
+YcSfYQ== 131106
+INC90LDRh9C40L3QsA== 131107
+INi12YTZiQ== 131108
+4LiX4Li44LiB4LiE4LiZ 131109
+7ZqM7IKs 131110
+INC70LjRhg== 131111
+2LTZitix 131112
+INi02YrYoQ== 131113
+2YrZhtin 131114
+INek15fXldeq 131115
+IGnDp2VyaXM= 131116
+IGnDp2VyaXNpbmRl 131117
+INij2K3Zhdiv 131118
+IMW8ZWJ5 131119
+7LSd 131120
+INC/0L7QutCw0Lc= 131121
+INC40LzQtdC90L3Qvg== 131122
+4Lir4LiZ4Lix4LiH4Liq 131123
+4Lir4LiZ4Lix4LiH4Liq4Li34Lit 131124
+INGC0YDQtQ== 131125
+4Liq4Lix4LiH4LiE4Lih 131126
+2KXZkA== 131127
+44GM5b+F6KaB 131128
+2YrZkdip 131129
+16TXpg== 131130
+7Yuw 131131
+INmF2KzYp9mE 131132
+16DXpNep 131133
+0LrQsNC9 131134
+15fXldek 131135
+15fXldek16k= 131136
+7LKY65+8 131137
+0L7QstCw0Y8= 131138
+0LfQvtCy 131139
+IGjhuqE= 131140
+IGR6acSZa2k= 131141
+15nXqNeV 131142
+INec157Xpg== 131143
+INec157XpteV15A= 131144
+15nXk9eV 131145
+IHPhu6M= 131146
+INec15TXkteZ16I= 131147
+16fXkdei 131148
+IGNoaeG7gXU= 131149
+44Oe44Kk 131150
+IGTDoG5n 131151
+4LmB4Lif4LiZ 131152
+IMO8eWU= 131153
+15nXoNeS 131154
+4LmA4Lij4Li14Lii4LiB 131155
+56eB44GM 131156
+dGjDqQ== 131157
+INGE0LjQu9GM 131158
+INGE0LjQu9GM0Lw= 131159
+IE5nw6B5 131160
+INC20LXQvQ== 131161
+INC20LXQvdGJ0LjQvQ== 131162
+2KzZitiv 131163
+bsOn 131164
+4Lib4Lij4Liy 131165
+15nXnteV 131166
+IG7hu4Fu 131167
+15DXldec150= 131168
+INCy0L7Qt9C80L7QttC90L7RgdGC0Yw= 131169
+IOuLpOyLnA== 131170
+6KaL44Gf 131171
+4LiW4LiZ 131172
+4LiW4LiZ4LiZ 131173
+bcSxesSx 131174
+INmF2KzZhdmI2LnYqQ== 131175
+Y2rEhQ== 131176
+INCg0KQ= 131177
+4LiB4Liz4Lir4LiZ 131178
+4LiB4Liz4Lir4LiZ4LiU 131179
+IOyXrOq4sA== 131180
+bGFuZMSx 131181
+0L3QuNGG 131182
+0YHRgtCy0LU= 131183
+INeT15HXqNeZ150= 131184
+IHNrxYJhZA== 131185
+44KK44G+44GX44Gf 131186
+INC+0YLQutGA0YvRgg== 131187
+0L3Rj9GC 131188
+INGB0LLQvtC10Lk= 131189
+4LiI4Li04LiV 131190
+INC60LDRh9C10YHRgtCy0LU= 131191
+IGV0dGnEn2k= 131192
+7IKs7ZWt 131193
+INin2YTZitmF2YY= 131194
+0LjRh9C10YHQutC40Lk= 131195
+67iM 131196
+INeR15DXqNel 131197
+INin2LPZhQ== 131198
+INC40LfQstC10YHRgg== 131199
+csOjbw== 131200
+IGF0dGl2aXTDoA== 131201
+4LmA4Lib4LmH4LiZ4LiB4Liy4Lij 131202
+INin2YTYr9mD2Ko= 131203
+INin2YTYr9mD2KrZiNix 131204
+INmI2KfYrdiv2Kk= 131205
+INGB0YfQtdGC 131206
+INC/0YDQuNGH 131207
+INC/0YDQuNGH0LjQvQ== 131208
+INmI2LLYp9ix2Kk= 131209
+IGh1eeG7h24= 131210
+INmD2KrYp9io 131211
+4LmB4LiZ4LmI4LiZ 131212
+4LmB4LiZ4LmI4LiZ4Lit4LiZ 131213
+IGfDvG7DvA== 131214
+0LPRgNGD0Lc= 131215
+INin2YTYrtin2LU= 131216
+IGfDtnLDvGw= 131217
+15zXnteT 131218
+IOygleuPhA== 131219
+15XXkdeZ15w= 131220
+INee16fXpteV16LXmQ== 131221
+INC+0YHQvtCx0LXQvdC90L4= 131222
+4Lib4Lij4Liw4LiB4Liy 131223
+4Lib4Lij4Liw4LiB4Liy4Lio 131224
+YWNhxJ/EsW7EsQ== 131225
+67aB 131226
+4Lig4Li54Lih4Li0 131227
+INGN0LvQtdC60YI= 131228
+INGN0LvQtdC60YLRgNC+ 131229
+INen16nXlA== 131230
+2LPZhNi3 131231
+4LiK4LiZ4Liw 131232
+16LXmdec 131233
+INCn0LU= 131234
+4LmB4LiZ4LmI 131235
+bMSxxJ8= 131236
+bMSxxJ/EsW4= 131237
+INee16LXqNeb16o= 131238
+5aW944GN44Gq 131239
+4Lih4Liy4LiB4LiC4Li24LmJ4LiZ 131240
+157XoteR16g= 131241
+INin2YTZhdi62LHYqA== 131242
+INC/0LXRgNC4 131243
+INC/0LXRgNC40L7QtA== 131244
+IG5o4bqhYw== 131245
+2KfZiNmK 131246
+INmI2LnZhNmJ 131247
+2KPYrtiw 131248
+IEPDtA== 131249
+16rXqNeR15XXqg== 131250
+15LXlA== 131251
+IGt0w7NyZWo= 131252
+15DXmdeq 131253
+15HXldeQ 131254
+0LTQtdC70Yw= 131255
+4Lij4Li14Lin4Li0 131256
+4Lij4Li14Lin4Li04Lin 131257
+0LbRgw== 131258
+INeR15fXlQ== 131259
+0LXRiNGM 131260
+INij2YTZgQ== 131261
+INin2YTZiNi32YbZig== 131262
+INin2YTZhdmG2LfZgtip 131263
+bsSFxIc= 131264
+IHRoacOqbg== 131265
+0LjRh9C10YHQutC+0Lk= 131266
+INin2YTZhdmE 131267
+INi52YU= 131268
+16HXpNeo 131269
+IG5ow7Nt 131270
+2YjYtdmB 131271
+IENow7puZw== 131272
+INix2YLZhQ== 131273
+44G+44GX44Gf44GM 131274
+YWxpdMOp 131275
+4Lil4Lih 131276
+IOuCtOqwgA== 131277
+15zXp9eV15c= 131278
+IFPGoW4= 131279
+cG9zacOnw6Nv 131280
+bWnEmQ== 131281
+IHRyw6FuaA== 131282
+IMSQ4buZ 131283
+15vXlw== 131284
+44GC44Gj44Gm 131285
+4Lit4Lii4LmI4Liy 131286
+INee15fXmdeo 131287
+INeU15nXqteU 131288
+4Lib4LmI4Liy 131289
+4Lit4Li34LmI4LiZ4LmG 131290
+2LTZgg== 131291
+16DXodeZ 131292
+66a8 131293
+44Gm44GX44G+44GG 131294
+INee16bXkQ== 131295
+44Gr5Ye6 131296
+2YXZiNin2LfZhg== 131297
+4Lii4Lix4LiH4Lih4Li1 131298
+0LDQu9GM0L3Ri9C1 131299
+c2FuxLF6 131300
+2KXYs9ix2KfYptmK2YQ= 131301
+IHbDoGk= 131302
+7KSE 131303
+44Go5oCd44Gj44Gm 131304
+15nXldeg15k= 131305
+55Sf44GN 131306
+IHPDonU= 131307
+0YfQuNGB0YI= 131308
+IGzhu4U= 131309
+IEdpw6E= 131310
+4Lit4Li44Lib 131311
+4Lit4Li44Lib4LiB4Lij 131312
+4Lit4Li44Lib4LiB4Lij4LiT4LmM 131313
+IG5o4bq5 131314
+csO2 131315
+16HXmNeZ 131316
+44GV44KT44GM 131317
+IGThuqd1 131318
+2LnZjg== 131319
+2KrYsdin 131320
+15LXk9ec 131321
+IHTDqWNuaWNh 131322
+15vXoNeZ150= 131323
+16rXp9ep 131324
+16rXp9ep15XXqNeq 131325
+INC90LXQs9C+ 131326
+w6l0YWl0 131327
+IG3hu4Ft 131328
+0YHQtdGC 131329
+IG5o4bqtdA== 131330
+INee16LXnA== 131331
+INeU16LXkdeV15M= 131332
+INeU16LXkdeV15PXlA== 131333
+INeS15nXnA== 131334
+44Gv44Gq44GE 131335
+2KfYptit 131336
+INC30LTQtdGB0Yw= 131337
+15DXmdeg15jXqA== 131338
+2YXZkA== 131339
+INeZ15fXkw== 131340
+2LHYp9mB 131341
+7LKY66as 131342
+15PXoteV16o= 131343
+7Lmc 131344
+INCi0L4= 131345
+IFRo4bq/ 131346
+7Lap 131347
+INeg15vXldef 131348
+2LnZiti0 131349
+0L3QuNC3 131350
+INis2KfZhtio 131351
+157Xp9em15XXog== 131352
+4LmC4LiL 131353
+0YHRg9GC 131354
+7Ja07JqU 131355
+44KS6KaL44Gm 131356
+2KfYsdiv 131357
+IGHDp8SxbA== 131358
+INin2YTYrdmK2KfYqQ== 131359
+4LiB4LmH4LmE4LiU4LmJ 131360
+44Gd44KM44KS 131361
+2LnYttmI 131362
+INCz0YDQsNC2 131363
+INCz0YDQsNC20LTQsNC9 131364
+4LiI4Liw4LiV4LmJ4Lit4LiH 131365
+IOydtOufrA== 131366
+IOydtOufrO2VnA== 131367
+IHRyw6FjaA== 131368
+2YbZjg== 131369
+IGvEsXNh 131370
+w5Q= 131371
+0YjQutCw 131372
+44Gu5Lq6 131373
+INCf0L7RgQ== 131374
+INCf0L7RgdC70LU= 131375
+0YPQu9GM 131376
+2YjYp9is2Yc= 131377
+2YLYsdio 131378
+4Lib4LiP4Li04Lia4Lix4LiV4Li0 131379
+6rCZ 131380
+INee16A= 131381
+INGB0LLQvtC4 131382
+2KjYsdin2YXYrA== 131383
+INix2Yg= 131384
+0L/RgNC+0LQ= 131385
+0L/RgNC+0LTQsNC2 131386
+IGJ5xYJ5 131387
+4Lin4Lix4Lii 131388
+IGfDtnLDvG4= 131389
+IMOI 131390
+0Y7RidC40Lw= 131391
+INGC0LDQutC+0Lk= 131392
+2YHZiNix 131393
+INmB2LnZhA== 131394
+INCx0LXQuw== 131395
+65Cg 131396
+ZXLDrWE= 131397
+INGB0LLQvtGO 131398
+IGzDow== 131399
+IGzDo25o 131400
+4LmA4Lie4Li34LmI4Lit4LmD4Lir4LmJ 131401
+2YLZhg== 131402
+2KrYt9mI2YrYsQ== 131403
+IHNhecSx 131404
+INGB0LXQudGH0LDRgQ== 131405
+INeQ15fXqNeq 131406
+16fXldek15Q= 131407
+16fXldeo16E= 131408
+INiz2YU= 131409
+INeY15nXpNeV15w= 131410
+7J20652864qU 131411
+2K/Ysdin2LPYqQ== 131412
+6LW344GT 131413
+15fXmdeg 131414
+15fXmdeg15XXmg== 131415
+15PXpw== 131416
+IOunng== 131417
+INC60L7QvNCw0L3QtA== 131418
+INCR0L4= 131419
+INC40LPRgNGL 131420
+4Lia4Li1 131421
+INij2Y4= 131422
+0LLQtdC9 131423
+INin2YTYrNiv2YrYrw== 131424
+INmE2KU= 131425
+INeV15DXoNeZ 131426
+INeU16HXmQ== 131427
+0LjRh9C10YHQutC+0LPQvg== 131428
+2LHZiNit 131429
+4LiB4Liy4Lij4Lio4Li24LiB4Lip4Liy 131430
+IFRyxrDhu51uZw== 131431
+0LjQs9GA0LA= 131432
+xLFsbWFzxLE= 131433
+INC80LDRgdGB 131434
+44Go44GN44Gr 131435
+4LiX4Li14LmI4Lic4LmI4Liy4LiZ 131436
+4LiX4Li14LmI4Lic4LmI4Liy4LiZ4Lih4Liy 131437
+INin2YTYs9in2KjZgg== 131438
+INee16LXmA== 131439
+0LLQsNGC0Yw= 131440
+bcO8xZ8= 131441
+INec15vXmg== 131442
+IHThu4tjaA== 131443
+2YHZh9mF 131444
+2KrYr9ix2YrYqA== 131445
+2LTZgw== 131446
+INeR157XmQ== 131447
+INeR157XmdeV15fXkw== 131448
+2YLYt9in2Lk= 131449
+44Gq44GX 131450
+15XXpteZ15A= 131451
+INmI2LPZig== 131452
+0LfRgw== 131453
+IHlhdA== 131454
+IHlhdMSxcsSxbQ== 131455
+66eO 131456
+IHRo4bqvbmc= 131457
+44GK5a6i 131458
+44GK5a6i5qeY 131459
+IFRoacOqbg== 131460
+44Gr5a++44GX44Gm 131461
+0YDQuNGB 131462
+2YbYqtin2KY= 131463
+2YbYqtin2KbYrA== 131464
+INee16nXqA== 131465
+INee16nXqNeT 131466
+INiq2LnYp9mE 131467
+INiq2LnYp9mE2Yk= 131468
+16nXoNeZ 131469
+2YfYp9mF 131470
+15DXoNep15nXnQ== 131471
+IMW8eWNpYQ== 131472
+INGA0YPQsdC70LXQuQ== 131473
+2YrYtg== 131474
+IGthdMSxbA== 131475
+INmF2YjYttmI2Lk= 131476
+IHZhcmTEsXI= 131477
+INmF2YbYt9mC2Kk= 131478
+IFRy4bqnbg== 131479
+INCy0LXRgQ== 131480
+w7xw 131481
+2YXZiNmG 131482
+0YjQu9C4 131483
+IG7Ds25n 131484
+2K7ZhNmB 131485
+INCh0YLQsA== 131486
+INC00L7RgA== 131487
+INC00L7RgNC+0LM= 131488
+IHfFgmHFm25pZQ== 131489
+ZcSfaW4= 131490
+IGhp4buDbQ== 131491
+INCh0LDQvA== 131492
+6ruY7ISc 131493
+INGE0LA= 131494
+44G744GG 131495
+44G744GG44GM 131496
+15XXpNeZ16I= 131497
+6rCI 131498
+2K/ZiNmE 131499
+IHRodcOq 131500
+IGNo4buX 131501
+IOuLueyLoA== 131502
+44GR44KM 131503
+44GR44KM44Gp 131504
+67O07Zi4 131505
+44GV44KM44Gm44GE44G+44GZ 131506
+INC90LDQtNC+ 131507
+IOyCrOuejOuTpA== 131508
+4LmA4LiC4LiV 131509
+4Liq4Lih4Lix4Lii 131510
+esWC 131511
+2KrZiNix 131512
+INep16rXmQ== 131513
+dsOq 131514
+INeR16rXldea 131515
+4LiK4Lix4Lii 131516
+44GE44Gj44Gf 131517
+7J2R 131518
+IHThuqc= 131519
+IHThuqduZw== 131520
+16nXm9eo 131521
+IOq4gA== 131522
+INeU16nXoNeU 131523
+INin2YbZhw== 131524
+56uL44Gh 131525
+csOpcw== 131526
+ZsO8aHJlbg== 131527
+2LHYrdmF 131528
+6re5 131529
+IOKAqw== 131530
+IHN14bqldA== 131531
+4Lif4Li0 131532
+2YrZh9in 131533
+INin2YTYp9iq2K3Yp9iv 131534
+IHR1eeG7g24= 131535
+44G+44KL 131536
+IG3huqFp 131537
+IG5nw6Ju 131538
+44Kw44Op 131539
+5qyy44GX44GE 131540
+2LPYp9ix 131541
+44KC44Gu44Gn44GZ 131542
+0LrQuNC1 131543
+IHNlw6dpbQ== 131544
+5YWl44KK 131545
+44Gq44Gp44KS 131546
+0YLRgNC4 131547
+INGB0L/QtdGG 131548
+INij2K8= 131549
+INC+0LTQvdC+ 131550
+0YjQtdC7 131551
+44OH44O844K/ 131552
+44K344K544OG 131553
+44K344K544OG44Og 131554
+6KGM44GN 131555
+44Go5oCd44Gj44Gf 131556
+4LmA4LiB4Li04LiU4LiC4Li24LmJ4LiZ 131557
+INGC0L7Qtg== 131558
+INGC0L7QttC1 131559
+IHPhuqFjaA== 131560
+INGB0YDQvtC6 131561
+INC60LvQuNC10L3Rgg== 131562
+INmF2LTYsdmI2Lk= 131563
+IGFsdMSxbmRh 131564
+IOy3qA== 131565
+5Lit44Gu 131566
+44GV44Gb44KL 131567
+44GZ44G5 131568
+44GZ44G544Gm 131569
+6rCc67Cc 131570
+IMSRw6pt 131571
+44Gq44GE44Gu44Gn 131572
+7LKg 131573
+16LXkdeT 131574
+IGThuqV1 131575
+4LiE4LiZ4LiX4Li14LmI 131576
+IEPDoWNo 131577
+2KrYudmE2YrZhQ== 131578
+IGjhuqFp 131579
+44K744OV44Os 131580
+INmG2YHYs9mH 131581
+IO2Gte2VtA== 131582
+0YjQu9C+ 131583
+INC90LDQv9GA0LDQsg== 131584
+INC90LDQv9GA0LDQstC70LXQvQ== 131585
+0YDRg9GH 131586
+7ZSM 131587
+INeR16jXmdeQ 131588
+44Gu44G/ 131589
+44Gr44GK44GE44Gm 131590
+15HXoNen 131591
+44Ko44Oz 131592
+2KvZhNin2Ks= 131593
+IG3hu7k= 131594
+INGB0LDQudGC0LU= 131595
+INC10LzRgw== 131596
+2KrYutmK 131597
+2KrYutmK2YrYsQ== 131598
+2K7YtdmI2LU= 131599
+0YLQtdC70Lg= 131600
+INeV15zXm9ef 131601
+16TXoted 131602
+INC/0L7RjdGC0L7QvNGD 131603
+2LHYp9mG 131604
+0LjRgtC10LvQtdC5 131605
+0L/QuNGB0LDQvQ== 131606
+16LXpQ== 131607
+IOyCrOyXhQ== 131608
+2YXYsg== 131609
+2KzZhdmK2Lk= 131610
+66m07ISc 131611
+4Lic4Lil4Li04LiV4Lig4Lix 131612
+4Lic4Lil4Li04LiV4Lig4Lix4LiT 131613
+4Lic4Lil4Li04LiV4Lig4Lix4LiT4LiR 131614
+4Lic4Lil4Li04LiV4Lig4Lix4LiT4LiR4LmM 131615
+INC/0YDQuNC80LXRgA== 131616
+44Kt44O8 131617
+bMOi 131618
+IGNoxINt 131619
+55uu44Gu 131620
+44GE44GL 131621
+44Go6KiA44GG 131622
+15bXldeS 131623
+INeR15PXmQ== 131624
+INeR15PXmdeV16c= 131625
+44GK5bqX 131626
+4LiV4Lit4LiZ4LiZ4Li14LmJ 131627
+IHBo4buRaQ== 131628
+0L/Rgg== 131629
+4Liq4LiZ4Liy4Lih 131630
+2LfZiA== 131631
+2LXYp9it 131632
+2LXYp9it2Kg= 131633
+IETDvA== 131634
+IETDvG55YQ== 131635
+INC/0L7QutCw 131636
+0L/QsNC7 131637
+IMSR4bqjbw== 131638
+INin2YTZgdmI2LE= 131639
+INin2YTZgdmI2LHZg9iz 131640
+IG3DoXU= 131641
+0LrRgNC10L8= 131642
+INin2YTYs9in2LnYqQ== 131643
+INCz0L7RgNC+0LTQsA== 131644
+2YHYtdmE 131645
+0LDQudGC0LU= 131646
+INC00L7Qsw== 131647
+INC00L7Qs9C+0LLQvtGA 131648
+INil2LA= 131649
+INeR15vXnNec 131650
+2YrYqtmH 131651
+15LXkdeo 131652
+IGJpcsOn 131653
+IGJpcsOnb2s= 131654
+66y47ZmU 131655
+44Gd44GG44Gq 131656
+2LHYp9it 131657
+INmF2LHYqQ== 131658
+INC00LXQvdGM0LPQuA== 131659
+ZsOk 131660
+4LiC4LmJ4Liy4Lin 131661
+INGB0L7QstGA0LXQvA== 131662
+INGB0L7QstGA0LXQvNC10L3QvQ== 131663
+15zXl9el 131664
+6Imv44GP 131665
+INmB2KM= 131666
+INeV15bXlA== 131667
+INC30LDQvdC4 131668
+INC30LDQvdC40LzQsA== 131669
+IOqwgOyngOqzoA== 131670
+IGjGoWk= 131671
+44Gq44Gu44GL 131672
+44OG44Os44OT 131673
+INeo15HXldeq 131674
+4LiV4Li1 131675
+INeR16nXoNeq 131676
+IFThuqFp 131677
+IHRodeG6rW4= 131678
+0YHQtdC7 131679
+0ZHQvA== 131680
+ZHppxIc= 131681
+INGB0LrQsA== 131682
+INGB0LrQsNGH 131683
+INGB0LrQsNGH0LDRgtGM 131684
+15XXnteV 131685
+0LPQu9Cw 131686
+INC80LjQvdGD0YI= 131687
+5Ye644GZ 131688
+INeX15nXmdeR 131689
+INeq15LXldeR15Q= 131690
+4Lij4Li54Lib4LmB4Lia4Lia 131691
+0L3QuNGG0LA= 131692
+IMSwbg== 131693
+INij2Lk= 131694
+INi22YXZhg== 131695
+2YXYq9in2YQ= 131696
+IHlhxZ9hbg== 131697
+IOyXsOq1rA== 131698
+IEzDqg== 131699
+16nXnNeX 131700
+44GP44Gq44KL 131701
+7JeG7J20 131702
+INGC0YDQuA== 131703
+INGH0LDRgdGC0L4= 131704
+INC+0LHRgNCw0YI= 131705
+0L/Qu9C+ 131706
+2K/Yrg== 131707
+2K/YrtmI2YQ= 131708
+2LPZhw== 131709
+4Lit4Liy4LiB 131710
+4Lit4Liy4LiB4Liy4Lio 131711
+INeb15bXlA== 131712
+INeU16LXoden 131713
+INin2YTYo9mG 131714
+5bm044Gr 131715
+16LXqdeV 131716
+INep16LXldeq 131717
+IG3DoG4= 131718
+15DXqNeZ 131719
+c8SxeWxh 131720
+2YHYsdmC 131721
+0L3QuNGF 131722
+INiq2LPYqg== 131723
+6KaL44Gm 131724
+2K3Yp9mI2YQ= 131725
+15DXmdeb15XXqg== 131726
+IGJhxZ9sYWTEsQ== 131727
+c3TEhQ== 131728
+c3TEhXBp 131729
+4LiX4Li14LmI4LmA4Lij4Liy 131730
+2YLYsdix 131731
+2KzYp9io 131732
+INeR16jXldeo 131733
+4LmA4LiC4LmJ4Liy4LmD4LiI 131734
+157Xl9en16g= 131735
+YWzEsW0= 131736
+INeh15nXpNeV16g= 131737
+44Gn44GC44KM44Gw 131738
+INep157Xldeo15XXqg== 131739
+INeV157XlA== 131740
+44GT44Gd 131741
+aWTDqWU= 131742
+5LiL44GV44GE 131743
+2KrZhtin2YjZhA== 131744
+IOC4peC5ieC4suC4mQ== 131745
+IOyasOumrOuKlA== 131746
+2KfZhtin 131747
+0YHRgtC+0Lk= 131748
+0LHQvtGC 131749
+IHlhxZ9hbQ== 131750
+a8O2eQ== 131751
+2KXZhA== 131752
+0YDRi9Cy 131753
+6riw7JeF 131754
+INeU157Xkw== 131755
+INeU157Xk9eZ16DXlA== 131756
+2K/YqA== 131757
+16LXmdeg15k= 131758
+157XqteX 131759
+INek16jXmQ== 131760
+44OL44O8 131761
+2KfZhdmK 131762
+IG5o4bqxbQ== 131763
+44KM44Gq44GE 131764
+2KrYudix2YE= 131765
+IOuniOydjA== 131766
+7JOw 131767
+IGjhuqVw 131768
+16jXkteZ15w= 131769
+2KjZjg== 131770
+IHLEg25n 131771
+Z2zEhWQ= 131772
+INGB0LjRgdGC0LXQvNGL 131773
+IGtow7Nh 131774
+44Gn44GZ44KI44Gt 131775
+5aSn44GN44GP 131776
+6riw66W8 131777
+IGvDqW8= 131778
+2YjYoQ== 131779
+2KzYp9mF 131780
+2KzYp9mF2Lk= 131781
+INei15nXpteV15E= 131782
+dMOpcmk= 131783
+INeq16k= 131784
+INeQ15HXmQ== 131785
+IENoxrDGoW5n 131786
+4Lia4Lij4Li04LmA4Lin 131787
+4Lia4Lij4Li04LmA4Lin4LiT 131788
+44Gk44GP 131789
+INeX15XXnA== 131790
+16LXqteZ15M= 131791
+16nXmdee15Q= 131792
+64Ko 131793
+INep15DXmdef 131794
+INmI2KfZhNil 131795
+0YTQsA== 131796
+IGtow6Ft 131797
+INeY15XXkdeU 131798
+INCy0YvRgQ== 131799
+INCy0YvRgdC+0LrQvg== 131800
+INin2YTYrdiv2YrYqw== 131801
+5Lq644KC 131802
+ZMO8xJ/DvA== 131803
+15nXl9eV15M= 131804
+2KrYudmE2Yo= 131805
+2KrYudmE2YrZgg== 131806
+bMO2 131807
+2KrYrdiv2YrYrw== 131808
+0L3QtdCz0L4= 131809
+INGD0LTQvtCx 131810
+INec157XmQ== 131811
+INeo15XXpteZ150= 131812
+INis2KfYoQ== 131813
+INeR15bXntef 131814
+4Lib4LiB4LiV4Li0 131815
+6auY44GP 131816
+4Lib4Lil4Liy 131817
+IGFydMSxaw== 131818
+IGJ1Z8O8bg== 131819
+16fXoNeZ 131820
+IGtob8Oh 131821
+INmF2LHZg9iy 131822
+IOyekOq4sA== 131823
+2K/Ysdis2Kk= 131824
+157Xqdeo15M= 131825
+IGdp4bqleQ== 131826
+IGNow7NuZw== 131827
+16fXpA== 131828
+2YrYqNip 131829
+IGN6xJlzdG8= 131830
+0LLQsNC70Lg= 131831
+2YPYqA== 131832
+7J+B 131833
+4Liq4Lia4Liy4Lii 131834
+4Lib4Lij4Liw4LiK4Liy4LiK4LiZ 131835
+15LXldej 131836
+65+J 131837
+44Gu44GT44Go 131838
+4Lil4Lit 131839
+IG5naOG7iQ== 131840
+5a2Q44Gp 131841
+5a2Q44Gp44KC 131842
+4LmE4LiU4LmJ4Lit4Lii 131843
+4LmE4LiU4LmJ4Lit4Lii4LmI4Liy4LiH 131844
+15PXog== 131845
+INin2YTYqtmJ 131846
+INGB0L7QstC10YI= 131847
+IHF1YWxpdMOg 131848
+5Ye644GX 131849
+INGA0YPQutC+0LI= 131850
+INGA0YPQutC+0LLQvtC0 131851
+4Lij4Liy4Lii4Lil4Liw4LmA4Lit4Li14Lii4LiU 131852
+44Gq44GL44Gq44GL 131853
+6riw6rSA 131854
+INeX15XXqQ== 131855
+INeX15XXqdeR 131856
+0LvQvtGC 131857
+4LiZ4Liw4LiE4Lij4Lix4Lia 131858
+16fXkdeV16bXlA== 131859
+IHRow6Fp 131860
+INep15HXlA== 131861
+INGI0LrQvtC7 131862
+INmE2YPZhA== 131863
+4LmD4LiZ4LiK4LmI4Lin4LiH 131864
+INmF2YPYp9mG 131865
+65WM 131866
+IGPhuqNp 131867
+IENow60= 131868
+0YPRh9Cw 131869
+7J21 131870
+IHjhuqN5 131871
+4LiK4LiZ4Li04LiU 131872
+IGPhuq11 131873
+0LrRgNC+0LI= 131874
+c3PDqQ== 131875
+INmG2YjYuQ== 131876
+INCi0LA= 131877
+2K7Zhdiz 131878
+16TXldeh15g= 131879
+IG3huq9j 131880
+IMSRZW0= 131881
+4LiB4Liy4Lij4LmD4LiK4LmJ 131882
+16jXldeh 131883
+INCb0LU= 131884
+IHRo4but 131885
+4Lij4LmI4Liy4LiH4LiB4Liy4Lii 131886
+w7x6w7w= 131887
+5pel5pys44Gu 131888
+6rO87KCV 131889
+16nXmdeQ 131890
+IOyeiOqzoA== 131891
+15HXldec 131892
+7JWF 131893
+INmI2KfZhNin 131894
+INCb0Lg= 131895
+INCy0YHRkQ== 131896
+IHXFvHl0a293 131897
+15fXldec 131898
+2LHZgdi2 131899
+IHNvbnXDpw== 131900
+44GE44G+44Gb44KT 131901
+7IKs7JeF 131902
+64iE 131903
+0YLQtdC6 131904
+IHVkemlhxYI= 131905
+0LvQtdC3 131906
+INeU15nXmdeq15k= 131907
+44KJ44KM44Gm 131908
+2YXYs9ik2YjZhA== 131909
+2LHYp9ix 131910
+0YLQsNC9 131911
+IMSRw6Bv 131912
+INeo15XXkQ== 131913
+INeR16nXkdeZ15w= 131914
+5LuK5Zue44Gv 131915
+44K444Ol 131916
+INei15HXqA== 131917
+44Gb44Gm 131918
+0L/QvtC70Yw= 131919
+YWtsxLE= 131920
+IGvDrW5o 131921
+2K/Yqg== 131922
+0LvQvtC20LXQvdC40LU= 131923
+INin2YTZhdi1 131924
+INin2YTZhdi12LHZig== 131925
+4LiI4Lij4Li04LiH4LmG 131926
+INin2YTYtNix2YPYqQ== 131927
+IMSR4buP 131928
+44Ob44OG 131929
+44Ob44OG44Or 131930
+0Y3QutC+0L0= 131931
+0Y3QutC+0L3QvtC8 131932
+INmI2LnZhg== 131933
+INeq16A= 131934
+INeq16DXkNeZ 131935
+INin2YTYr9mI2YTZitip 131936
+IOyngOyXrQ== 131937
+44Gn44GZ44GL 131938
+INCy0LDRgNC4 131939
+INCy0LDRgNC40LDQvdGC 131940
+INin2YTYudix2Kg= 131941
+0LXQu9Cw 131942
+IHTGsOG7m25n 131943
+c2vEhQ== 131944
+IG3hurdj 131945
+4Liq4Lix4LiB 131946
+44OT44O8 131947
+INeR15LXnA== 131948
+INeR15LXnNec 131949
+44OV44Kh44Oz 131950
+15HXmdem 131951
+15HXmdem15XXog== 131952
+0LvQuNGB0YI= 131953
+4Lif4Li4 131954
+4Lif4Li44LiV 131955
+4Lif4Li44LiV4Lia4Lit4Lil 131956
+4Lid4LmI4Liy4Lii 131957
+7J6Q7J2Y 131958
+INiz2YjZgQ== 131959
+INep15TXqg== 131960
+IOqxuA== 131961
+16LXkdeV15M= 131962
+44GZ44KL44GT44Go44GM 131963
+INGH0LDRgdGC0Yw= 131964
+44Ki44Oh44Oq 131965
+44Ki44Oh44Oq44Kr 131966
+IHRha8SxbQ== 131967
+IHPhu5s= 131968
+IHPhu5tt 131969
+16nXqNeU 131970
+6KiA44GG 131971
+0LvQsNC9 131972
+7Luk 131973
+15vXoNeU 131974
+2YjZgdmK 131975
+7ZeI 131976
+bHXEn3U= 131977
+IOuMgO2VtA== 131978
+INec15HXmdeq 131979
+INeU16jXkNep15XXoNeU 131980
+2LXZhQ== 131981
+IHPDtnlsZWQ= 131982
+IHPDtnlsZWRp 131983
+4Lib4Liy4LiB 131984
+IGFyZMSxbmRhbg== 131985
+44GI44Gf 131986
+4LiX4Lix4LmI4Lin4LmE4Lib 131987
+INeg15XXodej 131988
+0LHQvtC70Yw= 131989
+44KT44Gn44GZ44GR44Gp 131990
+INC70LjRiNGM 131991
+INeR15DXmQ== 131992
+INCx0YvRgdGC0YDQvg== 131993
+4Liq4Lix4LiZ 131994
+INeR16TXoNeZ 131995
+0LvQtdGH 131996
+INin2YTYrtio2LE= 131997
+IHPDs2M= 131998
+IHRow7o= 131999
+INC/0Y/Rgg== 132000
+44GK6aGY 132001
+44GK6aGY44GE 132002
+0YLQuNC9 132003
+44Gr44Gk44GE44Gm44Gv 132004
+16TXnw== 132005
+INC00LLRg9GF 132006
+4LiN4Li14LmI 132007
+4LiN4Li14LmI4Lib 132008
+4LiN4Li14LmI4Lib4Li4 132009
+4LiN4Li14LmI4Lib4Li44LmI4LiZ 132010
+0L7Qv9C10YA= 132011
+INin2YTYqNi02LE= 132012
+INin2YTZhdin2YQ= 132013
+xLF5b3J1eg== 132014
+2KrYrdmF2YrZhA== 132015
+4LiB4Liw 132016
+6ZaT44Gr 132017
+15fXldep 132018
+IE5ndXnDqm4= 132019
+44GE44Gm44GE44KL 132020
+0LTRg9GI 132021
+16nXpNei 132022
+0YjRgw== 132023
+5a6f6Zqb44Gr 132024
+INGA0LDQudC+0L0= 132025
+IENo4buJ 132026
+2YbYtdix 132027
+IOyatA== 132028
+IOyatOyYgQ== 132029
+INeU15PXmdef 132030
+2K3Yr9iv 132031
+2LHYsg== 132032
+INin2YTYr9mF 132033
+IFBow6Fw 132034
+0YLRgdGP 132035
+6KaL44GI 132036
+IHRp4buDdQ== 132037
+IHPhu61h 132038
+0LDRjtGC0YHRjw== 132039
+IELDoQ== 132040
+INeV15vXnA== 132041
+0JY= 132042
+0YjQuNC8 132043
+7J2064qU 132044
+0LvQtdCy 132045
+ZMSxaw== 132046
+IHByw6lzZW50ZQ== 132047
+IGFyYcOn 132048
+2LXYr9mC 132049
+INC/0L7QvNC+0LM= 132050
+INin2YTYtNix2YI= 132051
+INmI2KfZhNiw2Yo= 132052
+2LHZitin 132053
+15HXoNeV16o= 132054
+IG5n4buTaQ== 132055
+16jXldek 132056
+16jXldek15A= 132057
+IHRo4bqlcA== 132058
+44KE44Gv 132059
+44KE44Gv44KK 132060
+INin2YTYrNiv2YrYr9ip 132061
+6Z2e5bi444Gr 132062
+2YrZhNmK 132063
+7Kq9 132064
+2KrYudin2YXZhA== 132065
+44Gg44Go5oCd44GE44G+44GZ 132066
+2YXZhQ== 132067
+0LjRgtC10LvQuA== 132068
+44K144Kk44K6 132069
+2KfYr9in2Ko= 132070
+INin2YTZhdin2YTZitip 132071
+2YPYp9iq2Kg= 132072
+0LrQu9C4 132073
+0LLQtdGA0YU= 132074
+0L3QuNGH 132075
+INec16LXkdeV15M= 132076
+15zXmdeU 132077
+2K3Zjg== 132078
+44Kk44OZ 132079
+44Kk44OZ44Oz44OI 132080
+INeq15LXldeR15XXqg== 132081
+0YTQvtC9 132082
+INC00YDRg9Cz0LjQtQ== 132083
+15DXlteV16g= 132084
+IHBlcsOy 132085
+7JWe 132086
+5YCf44KK 132087
+16jXpteZ 132088
+15DXlg== 132089
+0LDQu9GM0L3Ri9GF 132090
+IOqyg+ycvOuhnA== 132091
+INC/0YDQsNCy0L4= 132092
+INin2YTYo9ix2LY= 132093
+4LmA4LiX4LiE 132094
+4LmA4LiX4LiE4LmC4LiZ 132095
+4LmA4LiX4LiE4LmC4LiZ4LmC4Lil 132096
+4LmA4LiX4LiE4LmC4LiZ4LmC4Lil4Lii 132097
+4LmA4LiX4LiE4LmC4LiZ4LmC4Lil4Lii4Li1 132098
+16bXqNeZ 132099
+INCa0YM= 132100
+xLFsbWE= 132101
+5rG644KB 132102
+2KfZiA== 132103
+INeT16fXldeq 132104
+4LiE4Lij4Li5 132105
+INmF2LPYqtmI2Yk= 132106
+4Lib4LmJ4Lit4LiH 132107
+4Lib4LmJ4Lit4LiH4LiB4Lix4LiZ 132108
+15PXldee15Q= 132109
+INGB0LXQs9C+0LTQvdGP 132110
+2LPZiNmC 132111
+16jXl9eV15E= 132112
+INil2K/Yp9ix2Kk= 132113
+0YXQvtC2 132114
+6YGO44GO 132115
+4LiE4Lit 132116
+0L3Rg9C7 132117
+15XXm9eU 132118
+2YjYp9mB2YI= 132119
+15vXnNec 132120
+INeU15PXlQ== 132121
+IGzEqW5o 132122
+IGto4bqjbw== 132123
+15DXntem16I= 132124
+66i4 132125
+INeb15nXpg== 132126
+INeb15nXpteT 132127
+INC00L7Qu9C20L3Riw== 132128
+4Lir4Lin4Lix4LiH 132129
+44OH44K2 132130
+44OH44K244Kk44Oz 132131
+IG5n4bud 132132
+5Lit44Gr 132133
+4LiB4Lil4Lix4Lia4Lih4Liy 132134
+2KzZhdin2YQ= 132135
+4LiU4Lix4LiH4LiB4Lil4LmI4Liy4Lin 132136
+2LPZg9mG 132137
+2LPZhg== 132138
+IMO2emVsbGlrbGU= 132139
+0LfQtdGA 132140
+cnrEmQ== 132141
+157Xldeo15Q= 132142
+IGzhuqE= 132143
+157Xmdeg15k= 132144
+16jXmdeq 132145
+44Gd44KM44GM 132146
+44GL44KM 132147
+INmK2YXZg9mG2YM= 132148
+w7ZmZmVudGxpY2g= 132149
+0LPQsNC9 132150
+INin2YTYrdmE 132151
+IG1pxJlkenk= 132152
+INGH0LDRgdGC0Lg= 132153
+dWrEhWN5 132154
+IGJhxJ9sxLE= 132155
+IGlsacWfa2k= 132156
+2YHYp9ih 132157
+44Oq44Oz44Kw 132158
+IGjDo25n 132159
+INC60L7QvdGC0YA= 132160
+INC60L7QvdGC0YDQvtC7 132161
+0LrQvtC/ 132162
+16nXmdei 132163
+16nXmdei15XXqA== 132164
+INCS0LDRiA== 132165
+INeU16rXpw== 132166
+2YXZhti5 132167
+IHBvbMOtdGljbw== 132168
+INCz0L7Qu9C+0LI= 132169
+INil2Yo= 132170
+2KXZhtiq2KfYrA== 132171
+4Lia4Li0 132172
+INCz0L7QstC+0YA= 132173
+INCz0L7QstC+0YDQuNGC 132174
+IHBo4buV 132175
+INGB0LXQvNGM 132176
+44Gv44GC44KK44G+44Gb44KT 132177
+INmI2KfYs9iq 132178
+157Xqdek15g= 132179
+0LfQtdC8 132180
+157Xk9eR16g= 132181
+IO2BsA== 132182
+IOydtOuyiA== 132183
+6rCA64qU 132184
+IOyngOybkA== 132185
+IGNhxYJ5 132186
+IGdlbGnFn3Rpcg== 132187
+0YHQutC+0LU= 132188
+cG9zw6k= 132189
+IGtow7Q= 132190
+4LiV4Li04LiU4LiV4Liy4Lih 132191
+bWlzc8Ojbw== 132192
+INec157XqA== 132193
+INec157XqNeV16o= 132194
+IGLDsw== 132195
+4LiV4Lij4Lin4LiI4Liq4Lit4Lia 132196
+IG5naOG7gQ== 132197
+INCx0LjQtw== 132198
+INCx0LjQt9C90LXRgQ== 132199
+0YHRgtC10YA= 132200
+2YjZjg== 132201
+5qW944GX44E= 132202
+5qW944GX44G/ 132203
+44GT44KM44GL44KJ 132204
+d2nEhXphbg== 132205
+4Liq4Lit4LiZ 132206
+2YXZiNix 132207
+16DXk9ec 132208
+INeU15DXk9ed 132209
+INC80L7Qu9C+0LQ= 132210
+2K3Zhdin 132211
+2K3Zhdin2YrYqQ== 132212
+0YHRgtGA0LDQvQ== 132213
+IGJ14buVaQ== 132214
+16rXmdeZ150= 132215
+YWJpbGVjZcSfaQ== 132216
+TMSw 132217
+4LmA4Lii4Lit4Liw 132218
+4LiI4Lij 132219
+2LPZg9in2YY= 132220
+4LiZ4Lix4LiU 132221
+IG3huqV5 132222
+INCR0LA= 132223
+c8WCYXc= 132224
+INmB2YTYpw== 132225
+INC60L7RgtC+0YDQvtC5 132226
+INC/0LvQvtGJ 132227
+INC/0LvQvtGJ0LDQtA== 132228
+44KC44GC44KK 132229
+c3pjesSZ 132230
+15nXpNeV 132231
+16nXnteq 132232
+b3dhxYJh 132233
+IG7DtG5n 132234
+16bXkdeQ 132235
+IOyeiOyXiA== 132236
+44G+44Go 132237
+44G+44Go44KB 132238
+2YLZiNin2Ko= 132239
+44G/44KT44Gq 132240
+INeb157XoteY 132241
+IHjDumM= 132242
+77yG 132243
+csSZ 132244
+csSZY3o= 132245
+15PXnteZ 132246
+IHThuq1u 132247
+4LiU4Lin4LiH 132248
+6rK97KCc 132249
+0L/Rg9GC 132250
+2KPYsdio2Lk= 132251
+INee16nXqtee16k= 132252
+44K/44Kk44OX 132253
+IOygnOqwgA== 132254
+INec15vXnw== 132255
+INC+0LHRgNCw0LfQvtC8 132256
+2YrZg9in 132257
+d8WC 132258
+d8WCYXNu 132259
+INin2YTZiNi32YbZitip 132260
+2KjZitio 132261
+157XnNeZ 132262
+0LrRgNCw0YI= 132263
+6riw7JeQ 132264
+2YLYp9iv 132265
+INmE2K/ZiQ== 132266
+4LiE4Lin4Liy4Lih4Lij4Li54LmJ 132267
+157Xk9eZ16DXmdeV16o= 132268
+6rKo 132269
+IO2YhOyerA== 132270
+16nXqteZ 132271
+0LzQvtC7 132272
+IG3DoWk= 132273
+4Lie4Li04Lih 132274
+4Lie4Li04Lih4Lie 132275
+4Lie4Li04Lih4Lie4LmM 132276
+4Lir4Lil4Lin4LiH 132277
+IHh1ecOqbg== 132278
+15fXodeo 132279
+2LHZiNmG 132280
+44Gd44GG44GE44GG 132281
+44Gd44KM44Ge 132282
+44Gd44KM44Ge44KM 132283
+INeb16nXlA== 132284
+0J/RgNCw0LI= 132285
+157Xkdem16I= 132286
+2LnYsdio 132287
+IGLDvHnDvA== 132288
+16TXmdeq15XXlw== 132289
+4LiI4Lia 132290
+INij2YPYqNix 132291
+16nXqNeq 132292
+157Xm9ep15nXqA== 132293
+INmI2YXYuQ== 132294
+44Gu44Gf44KB44Gr 132295
+4LiZ4Lix4Lia 132296
+7LCw 132297
+44Oq44OV44Kp 132298
+44Oq44OV44Kp44O844Og 132299
+IGPGsOG7nW5n 132300
+IOyggO2drA== 132301
+2YXZhti42YXYqQ== 132302
+IGhpw6diaXI= 132303
+44Gn44Gv44GC44KK44G+44Gb44KT 132304
+4Lij4Lit4Lii 132305
+65Cc64uk 132306
+44GZ44GQ44Gr 132307
+0LrQu9Cw 132308
+IMO8csO8bmxlcg== 132309
+IGtp4buDdQ== 132310
+IOuCmOuKlA== 132311
+0YLQutC4 132312
+0YHQuNC8 132313
+IGNo4buJbmg= 132314
+44KC44Gq44GE 132315
+4Lio4Lij4Li1 132316
+5pu/44GI 132317
+dGHFnw== 132318
+INio2YPZhA== 132319
+INeV15nXqQ== 132320
+dmlzw6Nv 132321
+5Lyd 132322
+5Lyd44GI 132323
+2YTYrw== 132324
+15zXmdee 132325
+15zXmdee15XXkw== 132326
+dMOzcmlh 132327
+2K/ZkQ== 132328
+2KfZhdix 132329
+IOq3uOugh+qyjA== 132330
+IG1hdGVyaWHFgg== 132331
+4LiX4Lij4Liy 132332
+4LiX4Lij4Liy4Lia 132333
+44Gu5pa544GM 132334
+44Gm44GN44Gf 132335
+2LbYug== 132336
+2LbYuti3 132337
+INmK2LnZhtmK 132338
+0LXQu9C+ 132339
+15DXlNeR15Q= 132340
+16LXng== 132341
+xZ/EsWs= 132342
+7J6Q64qU 132343
+44K/44Oz 132344
+IGLhuq10 132345
+157Xqdek15fXlA== 132346
+0LrRgNC4 132347
+0LHQu9C4 132348
+4Liq4Lix4LiV 132349
+4Liq4Lix4LiV4Lin4LmM 132350
+INiz2YbZiNin2Ko= 132351
+IFBoxrDGoW5n 132352
+44Gm44GX44G+44Gj44Gf 132353
+44Gq44Gc 132354
+INeR15DXlQ== 132355
+IGPDoW4= 132356
+2LPYrNmE 132357
+IGzhur0= 132358
+44Kx44O844K5 132359
+INen15nXkdec 132360
+4Lia4LiX4LiE4Lin4Liy4Lih 132361
+INeV15vXnw== 132362
+INC/0YDQtdC00YHRgtCw0LLQu9C10L0= 132363
+IG7hu5Fp 132364
+IGNvbWVudMOhcmlv 132365
+0LXQvdC40LXQvA== 132366
+IHThu48= 132367
+bMOg 132368
+INep15TXmdeU 132369
+0YHQu9Cw0LI= 132370
+INin2YTZiNmE2Kc= 132371
+INin2YTZiNmE2KfZitin2Ko= 132372
+2YTYrNmG2Kk= 132373
+16fXldeo15A= 132374
+0LHRi9GC 132375
+IOym 132376
+IOymiQ== 132377
+44Gn44GZ44GX 132378
+4Lir4Lij4Li34Lit4LmE4Lih4LmI 132379
+0LfQsNGJ0LjRgg== 132380
+2YHZhNiz2LfZitmG 132381
+IG1p4buFbg== 132382
+4LmA4Lii4LmH4LiZ 132383
+IMOnYWzEscWfYW4= 132384
+15nXkteU 132385
+IEXEnw== 132386
+IEXEn2l0aW0= 132387
+44OD44K344Ol 132388
+INC+0L/Riw== 132389
+INC+0L/Ri9GC 132390
+2LHYug== 132391
+2LHYutio 132392
+INGB0LLQvtC40YU= 132393
+4Lib4Lij4Liw4LiV 132394
+4Lib4Lij4Liw4LiV4Li5 132395
+INee15DXkw== 132396
+15vXldeg15nXnQ== 132397
+4LiZ4Li1 132398
+INCy0YvRhdC+0LQ= 132399
+44Gu5Lit44Gr 132400
+16TXnNeQ 132401
+INmI2YTZitiz 132402
+16TXldeo16E= 132403
+16TXldeo16HXnQ== 132404
+2YXYs9mE2YU= 132405
+IG5nw7Rp 132406
+15PXnteV16o= 132407
+44KS5L2/44Gj44Gm 132408
+INC/0L7QvNC+0YnRjNGO 132409
+2KPYs9ix 132410
+0LHQu9C+0Lo= 132411
+2YLZhw== 132412
+44GX44G+44GE 132413
+44Go44GX44Gf 132414
+INC/0LXRgQ== 132415
+44OJ44Or 132416
+15fXnQ== 132417
+44GX44Gq44GM44KJ 132418
+INCf0YDQtdC0 132419
+44OB44Kn44OD44Kv 132420
+5by344GE 132421
+16nXmdeo15XXqg== 132422
+0LTQsNC10YI= 132423
+15nXkdeV 132424
+IGdlbsOn 132425
+0LjQu9Cw0YE= 132426
+0LjQu9Cw0YHRjA== 132427
+INio2YTYrw== 132428
+5oKq 132429
+5oKq44GE 132430
+INee16nXqg== 132431
+5qeY44CF 132432
+5qeY44CF44Gq 132433
+4LiY4Lij4Lij4Lih4LiK4Liy4LiV4Li0 132434
+INmD2KfZhdmE 132435
+INin2YTYs9mF 132436
+15HXmNeZ15c= 132437
+Y8Oh 132438
+Z8OqbmNpYQ== 132439
+44K544K/44O8 132440
+4LiX4Liz4LiB4Liy4Lij 132441
+15nXnNeq 132442
+INeZ15XXpteQ 132443
+d8Ozag== 132444
+4Lia4Li44LiE 132445
+4Lia4Li44LiE4LiE4Lil 132446
+2LnYqtmF 132447
+2LnYqtmF2K8= 132448
+44Gd44KM44Gr 132449
+INin2YTYqtin2LHZitiu 132450
+2YLYsdin2KE= 132451
+IHnDtm5ldGlt 132452
+16fXqdeo 132453
+INGB0L/QvtGA0YI= 132454
+INeo15DXqdeV158= 132455
+IHNlw7FhbA== 132456
+IGNo4bqvbg== 132457
+54Sh44GE 132458
+INC00L7RgdGC0LDRgg== 132459
+INC00L7RgdGC0LDRgtC+0YfQvdC+ 132460
+IMOhZ3Vh 132461
+4LiB4Lij4LiT 132462
+4LiB4Lij4LiT4Li1 132463
+INee16nXlQ== 132464
+IHRy4bqjaQ== 132465
+67KM 132466
+dWrEhWN5Y2g= 132467
+2YHYsdiv 132468
+4LmD4LiB4Lil 132469
+4LmD4LiB4Lil4LmJ 132470
+44KL44Gu44Gv 132471
+16jXldeV15c= 132472
+2YbZgw== 132473
+INin2YTZhtmC 132474
+44Gu44Gn44GX44KH44GG 132475
+44Gu44Gn44GX44KH44GG44GL 132476
+2YXYudix2YE= 132477
+2YXYudix2YHYqQ== 132478
+0YPRidC1 132479
+INeR16LXmden16g= 132480
+2KrYtdmE 132481
+INeU15DXqA== 132482
+INeU15DXqNel 132483
+IMWeaQ== 132484
+4LiC4Liy4LiU 132485
+7Z6Y 132486
+44Gq44KT44Go 132487
+IOyCrOuekQ== 132488
+bMO8xJ/DvA== 132489
+2KjYp9ih 132490
+INin2YTYotiu2LE= 132491
+IGZhbcOtbGlh 132492
+IFRow6FuZw== 132493
+0YnQtdC90LjRjw== 132494
+44Kv44Ot 132495
+IFRo4bup 132496
+5pu444GN 132497
+0LXQvdC90L7QuQ== 132498
+7J6h 132499
+0LHQu9Cw0LM= 132500
+0LHQu9Cw0LPQvg== 132501
+0L/QvtCy 132502
+4LmB4Lin 132503
+4LiH4LiE4LmM 132504
+4Lit4Lix4LiZ4LiU4Lix4Lia 132505
+44GC44GS 132506
+4Lij4LmJ4Liy4Lii 132507
+w7xuw7xu 132508
+INeZ15vXldec15Q= 132509
+0LfQvtC9 132510
+INCc0Lg= 132511
+0LzQsNGC0LXRgNC40LDQuw== 132512
+IOuztOuptA== 132513
+2K3Zgdi4 132514
+w6rMgQ== 132515
+44Gr44GZ44KL 132516
+INeq15A= 132517
+INeU16HXlQ== 132518
+INGB0YLQvtGA 132519
+INGB0YLQvtGA0L7QvQ== 132520
+44OI44OD44OX 132521
+xYJvxZvEhw== 132522
+64W8 132523
+65Od 132524
+INmI2KfZhNi5 132525
+7LaU 132526
+INeZ16bXkA== 132527
+INGA0LDQt9C00LXQuw== 132528
+0LDQu9GM0L3QsNGP 132529
+15DXoNep15k= 132530
+c3BvxYI= 132531
+c3BvxYJlYw== 132532
+c3BvxYJlY3pu 132533
+2KXYudmE 132534
+2KXYudmE2KfZhg== 132535
+2YLZiNmJ 132536
+7ZWY66m07ISc 132537
+2KrYt9mI2LE= 132538
+IHNpw6p1 132539
+4bubdA== 132540
+0LTQstC4 132541
+0LTQstC40LY= 132542
+IHF14bqnbg== 132543
+a8SxbA== 132544
+INC/0YDQuNC30L3QsA== 132545
+IEjDow== 132546
+IEjDo3k= 132547
+INio2KfZhNiq 132548
+bWFuxLFu 132549
+44Kr44Or 132550
+IGvhu7c= 132551
+16fXnNeZ 132552
+65CY7KeA 132553
+2KrYudmE2YU= 132554
+7Iuc7ISk 132555
+7Iu2 132556
+7Zi8 132557
+2YPZitmB 132558
+5aOy44KK 132559
+4Lin4Li04LiK4Liy 132560
+0LHQsNC7 132561
+INij2K0= 132562
+INC00L7Qu9C20LXQvQ== 132563
+4Lij4Liy4LiH 132564
+4Lij4Liy4LiH4Lin4Lix 132565
+4Lij4Liy4LiH4Lin4Lix4Lil 132566
+2YXYp9ih 132567
+2KzYp9ix 132568
+xZo= 132569
+INee15DXlg== 132570
+16jXnteU 132571
+44GL44KC44GX44KM44Gq44GE 132572
+w6l0dWRl 132573
+Y3rEhWM= 132574
+IGfDs3I= 132575
+16DXodeU 132576
+2YXZitiv 132577
+INCf0LXRgNC1 132578
+2KPYrtix 132579
+44Gd44Gu5b6M 132580
+4LmA4LiU4Li14Lii4Lin4LiB4Lix4LiZ 132581
+157XkteV 132582
+157XkteV15XXnw== 132583
+0LTQvtCy 132584
+bWFzxLFuYQ== 132585
+16LXoNeU 132586
+44Kx44OD44OI 132587
+16HXog== 132588
+16HXoteZ16M= 132589
+IFTGsA== 132590
+IHTDs2M= 132591
+7Zmc64+Z 132592
+INCe0LQ= 132593
+INCe0LTQvdCw0LrQvg== 132594
+IGRvbGF5xLE= 132595
+2KTZg9iv 132596
+6rOE7ZqN 132597
+15zXqA== 132598
+0LLQtdGH 132599
+IGto4bufaQ== 132600
+IHRo4buneQ== 132601
+15PXnw== 132602
+4Lij4LiB 132603
+4Lia4Lix4LiV4Lij 132604
+4LmA4LiB4LmI4Liy 132605
+INin2YTYq9in2YQ= 132606
+INin2YTYq9in2YTYqw== 132607
+IHBvZHLDoQ== 132608
+16LXqNeZ 132609
+2YbYrNin2K0= 132610
+IGto4bqvYw== 132611
+7Lih 132612
+xLBN 132613
+44K744OD44OI 132614
+xbxlbmlh 132615
+INec15fXkdeo 132616
+ZXLDoA== 132617
+7LSI 132618
+IGvDvMOn 132619
+IGvDvMOnw7xr 132620
+2KfYqtmH2YU= 132621
+4LiL4LmM 132622
+2YXYtNin2LHZg9ip 132623
+INin2YTYqNi3 132624
+IGTDonk= 132625
+0LXQvdC90YvQvA== 132626
+4LiX4Li14LmI4LmE4Lih4LmI 132627
+2YLZjg== 132628
+IHbGsOG7o3Q= 132629
+IHRyw6w= 132630
+IHdwxYJ5dw== 132631
+QcWe 132632
+0LfQvg== 132633
+INin2YTYs9mK2K8= 132634
+4LiX4Liw4LmA4Lil 132635
+INGB0L7QtNC10YDQttCw 132636
+2LnYt9mK 132637
+INin2YTYudmG 132638
+6ICF44GM 132639
+4LmA4Lir4LiZ 132640
+4LmA4Lir4LiZ4Li34Lit 132641
+IGLDrQ== 132642
+IMO8emVyaW5kZW4= 132643
+IFbFqQ== 132644
+IG51w7Rp 132645
+2YbZhQ== 132646
+0LDQu9GM0L3QvtCz0L4= 132647
+16LXmdef 132648
+2K3Yttix 132649
+INC+0YLQtNC10Ls= 132650
+66qH 132651
+7JWh 132652
+INmE2K/ZitmH 132653
+7Jmc 132654
+IHNla3TDtnI= 132655
+INCy0L7Qt9C80L7QttC90L4= 132656
+INCU0LY= 132657
+IGjDtA== 132658
+5LqL44GM 132659
+0LjRgNC+0LLQsNC90LjQtQ== 132660
+0LDQu9GM0L3QvtC5 132661
+IOuvuOq1rQ== 132662
+2LHYrdmE 132663
+INGN0LrRgQ== 132664
+0L/RgNCw0LLQu9GP 132665
+IG5o4bud 132666
+IMSR4bqp 132667
+IMSR4bqpeQ== 132668
+2YHZg9ix 132669
+INmI2KPYttin2YE= 132670
+44OQ44K5 132671
+16rXldeb16DXmdeq 132672
+0YLQtdC70LXQuQ== 132673
+INil2YTZitmH 132674
+44Go6KiA44Gj44Gm 132675
+INC00LLQtQ== 132676
+IGNo4bqlcA== 132677
+IEzDtg== 132678
+4LiE4Lil4Li0 132679
+4LiE4Lil4Li04Lib 132680
+INiz2YjYsQ== 132681
+INiz2YjYsdmK2Kc= 132682
+157Xl9eV 132683
+c3TDpA== 132684
+0LTQvtCx 132685
+IG5p4buHbQ== 132686
+44Gu5aSn 132687
+16TXqNeV15nXpw== 132688
+16TXqNeV15nXp9eY 132689
+IENow6J1 132690
+INee15TXnQ== 132691
+0YHQutC40Lw= 132692
+INC/0L7Qu9GD0YfQuNGC0Yw= 132693
+2YrZiNmF 132694
+2KvZiNix 132695
+16TXldec15nXmA== 132696
+16TXldec15nXmNeZ 132697
+INC80LXRgdGP0YY= 132698
+5YWo44Gm 132699
+INin2YTZhdis2YTYsw== 132700
+INin2YTYqtin2YTZig== 132701
+INeX16g= 132702
+5ZCR44GR 132703
+15vXnteU 132704
+0LHQtdC0 132705
+2KPYudi2 132706
+2KPYudi22KfYoQ== 132707
+2YjZhNiv 132708
+4Lin4LmI4Liy4LiI4Liw 132709
+IGLDoW5o 132710
+4LiZ4Li04Lii 132711
+4LiZ4Li04Lii4Lih 132712
+4Lib4Lij4Liw4LiB4Lix4LiZ 132713
+0YHRgtCw0LLQuNGC0Yw= 132714
+4Lie4LiZ4Lix4LiZ 132715
+INGN0YTRhA== 132716
+INGN0YTRhNC10LrRgtC40LI= 132717
+INCw0LLRgtC+0YA= 132718
+IMSQxINuZw== 132719
+IHRoxrDhu59uZw== 132720
+44KS5oSf44GY 132721
+4LiB4Lix4Lia4LiB4Liy4Lij 132722
+5b6M44Gr 132723
+IHlhxJ8= 132724
+2LPYqtin2YY= 132725
+IGxp4buBbg== 132726
+44GE44G+ 132727
+acOqdQ== 132728
+4LmC4LiU4LiZ 132729
+INmE2LDZhNmD 132730
+4LmC4Lij4LiH4LmA4Lij4Li14Lii4LiZ 132731
+16bXmdeS 132732
+INin2YTZhdi52YTZiNmF2KfYqg== 132733
+56eB44Gf44Gh 132734
+4LiX4Li14LmI4LiE4Li44LiT 132735
+44Gr44Gq44Gj44Gm44GE44KL 132736
+157Xk9eZ16DXlA== 132737
+16HXm9ed 132738
+INCy0L3QtQ== 132739
+4Lie4LiZ4Lix4LiB4LiH4Liy4LiZ 132740
+0YDQtdC5 132741
+4LmA4LiI4LmJ4Liy4Lir4LiZ4LmJ4Liy4LiX4Li14LmI 132742
+IEhp4buHbg== 132743
+IG3DqWRpY28= 132744
+INiq2K3ZgtmK2YI= 132745
+0YzRgtC1 132746
+bWnFn3Rp 132747
+2YLZitin2K/YqQ== 132748
+44KP44GL44KK 132749
+4Lih4Liy4LiI4Liy4LiB 132750
+64WA 132751
+44Gr6Zai44GZ44KL 132752
+15DXqNeS15XXnw== 132753
+bcOodHJl 132754
+INei16bXnteZ 132755
+IENow7ph 132756
+4Lij4Li54LmJ4LiI 132757
+4Lij4Li54LmJ4LiI4Lix4LiB 132758
+7KOE 132759
+64u1 132760
+4LmB4LiX4LmJ 132761
+IGdlw6dlbg== 132762
+IGxhbsOnYQ== 132763
+INin2YTYqNit2Ks= 132764
+15PXnteV 132765
+44Gv44GY 132766
+44Gv44GY44KB 132767
+IGTDtm7DvMWf 132768
+6L+R44GP 132769
+4LmA4Liq4Lih 132770
+4LmA4Liq4Lih4Lit 132771
+6529 132772
+IMO8w6c= 132773
+4bue 132774
+0YjQsNGP 132775
+4LiX4Lij 132776
+2K3ZgtmK2YLYqQ== 132777
+4LiC4Lit4LiH4LiB4Liy4Lij 132778
+IOustOyXhw== 132779
+INeU15vXqA== 132780
+INin2YTYtdmK2YY= 132781
+INC70Y7QtNC4 132782
+4LiV4Liy4Lii 132783
+2KjZiNmE 132784
+IHZpw6pt 132785
+IHRoaeG7h3U= 132786
+4LiB4LiU 132787
+INec15PXkdeo 132788
+16TXoNeU 132789
+15DXqNeR16I= 132790
+2LPZiQ== 132791
+INin2YTYs9mK2KfYsw== 132792
+INin2YTYs9mK2KfYs9mK2Kk= 132793
+eWTEsQ== 132794
+2YjYrdiv2Kk= 132795
+INC00LXRj9GC0LXQu9GM0L3QvtGB0YLQuA== 132796
+INeV15TXng== 132797
+0L/QtdGH 132798
+0L/QtdGH0LDRgg== 132799
+0LjRgNC+0LLQsNC90LjRjw== 132800
+INGB0L7Qsw== 132801
+INGB0L7Qs9C70LDRgQ== 132802
+INeb15M= 132803
+INeb15PXkNeZ 132804
+INC40YHQv9C+0LvRjNC30L7QstCw0YLRjA== 132805
+16HXpNeV16jXmA== 132806
+IGlsw6dl 132807
+ZXhww6lyaWVuY2U= 132808
+IFRo4budaQ== 132809
+xLBL 132810
+4LmE4Lif4Lif4LmJ4Liy 132811
+65Ok7JeQ6rKM 132812
+4Lib4Lij4Liw4LmA4Lig 132813
+4Lib4Lij4Liw4LmA4Lig4LiX 132814
+IG3DvG1r 132815
+IG3DvG1rw7xu 132816
+INeQ15XXqteg15U= 132817
+7ISx7J2E 132818
+IOydtOycoA== 132819
+2LLZitin2LHYqQ== 132820
+IG9sZHVrw6dh 132821
+csOzYg== 132822
+INij2YbYpw== 132823
+INeU15HXmQ== 132824
+0YHQtdC9 132825
+16LXmden16g= 132826
+15nXk9eV16I= 132827
+ZHrEhQ== 132828
+2YXYudmE2YjZhdin2Ko= 132829
+2LTYp9io 132830
+IHBhcsOnYQ== 132831
+4LiZ4Liw4LiE4Liw 132832
+2KjYp9iz 132833
+INGC0L7RgNCz 132834
+INGC0L7RgNCz0L7Qsg== 132835
+INeX15PXqA== 132836
+15vXqNeY 132837
+15vXqNeY15nXoQ== 132838
+IEF5csSxY2E= 132839
+w6rMow== 132840
+7Jyo 132841
+INGC0LDQutC40LU= 132842
+INee16bXldeZ 132843
+44Op44Oz44Kt44Oz44Kw 132844
+16nXmdeV15XXpw== 132845
+5YmN44Gu 132846
+IELhuqNv 132847
+0YnRgw== 132848
+5pep44GP 132849
+IFBow7JuZw== 132850
+4Lie4Lij4Liw4Lij4Liy4LiK 132851
+16TXl9eV16o= 132852
+INCz0Ls= 132853
+INCz0LvQsNC3 132854
+4LiX4LmI4Liy 132855
+IGThuqF5 132856
+0YDQvtGB0YI= 132857
+4LmC4LiU4Lii4LmA4LiJ4Lie4Liy4Liw 132858
+IHF14bqtbg== 132859
+INeX15HXqNeV16o= 132860
+bcOqbWU= 132861
+bcSxxZ90xLE= 132862
+INin2YTYqtiv2KfZiNmE 132863
+IG7huqFu 132864
+INeU15PXmQ== 132865
+INin2YTYt9ix2YrZgg== 132866
+15LXldeq 132867
+INeU15PXqNea 132868
+dWrEhWNl 132869
+IGNo4buv 132870
+44KC44Gu44Gu 132871
+67Cb 132872
+44GV44KT44Gv 132873
+IHlhcmTEsW0= 132874
+INin2YTYudmF 132875
+IOynhO2WiQ== 132876
+INeZ15c= 132877
+INeZ15fXodeZ 132878
+INin2YTZhdiv2YrZhtip 132879
+IGPDug== 132880
+4LiB4Li14Lis 132881
+4LiB4Li14Lis4Liy 132882
+IG5pw6pu 132883
+bWlzacOzbg== 132884
+16DXmdeh15k= 132885
+16DXmdeh15nXldef 132886
+INCy0L7Qt9GA0LDRgdGC 132887
+INei15XXqdeU 132888
+INmF2K/Zitix 132889
+0Y/RgdGM 132890
+2K3YrNmF 132891
+7ZmY6rK9 132892
+INin2YTYo9iu2LHZiQ== 132893
+dcOfZXI= 132894
+INin2YTYudin2YTZhdmK2Kk= 132895
+IE5n4buNYw== 132896
+6rWQ7ZqM 132897
+5LiK44Gn 132898
+15nXlNeV15M= 132899
+15nXlNeV15PXmded 132900
+2YXYs9in2LnYr9ip 132901
+INC20LjQt9C90Yw= 132902
+INC/0L7RgtC+0LzRgw== 132903
+INin2YTZhdmF2YQ= 132904
+INin2YTZhdmF2YTZg9ip 132905
+IEfDtnI= 132906
+2LHZkA== 132907
+157Xp9eV157Xldeq 132908
+5Ye65p2l44KL 132909
+0YTRgg== 132910
+IOydtOygnA== 132911
+INGA0LXQvA== 132912
+INGA0LXQvNC+0L3Rgg== 132913
+16rXldea 132914
+5pmC44Gv 132915
+44KJ44KM44Gq44GE 132916
+YWx0xLE= 132917
+5a6244Gu 132918
+INin2YTYpdi52YTYp9mF 132919
+66as64qU 132920
+44GL44KJ44Gv 132921
+IEjhuqE= 132922
+44GC44Gu 132923
+15PXmdeV158= 132924
+2LHZitiz 132925
+IHNvY2lldMOg 132926
+INin2YTZg9io2YrYsQ== 132927
+INeR157XoQ== 132928
+INeR157XodeS16g= 132929
+INeR157XodeS16jXqg== 132930
+IOyeiOycvOupsA== 132931
+IG7hurduZw== 132932
+2YfZiQ== 132933
+IELDoA== 132934
+157XqNeV 132935
+IGrEmQ== 132936
+IGrEmXp5 132937
+IGrEmXp5aw== 132938
+INeb157XldeR158= 132939
+16LXnNeU 132940
+4LiX4Li14LmI4LmE4LiU4LmJ 132941
+44G+44GX44KH44GG 132942
+157Xodek16g= 132943
+0KLQng== 132944
+2LPZitin2LPYqQ== 132945
+INC60LDQttC00YvQuQ== 132946
+67Kg 132947
+dMSxbQ== 132948
+eeG7h24= 132949
+4Lij4Li14LmI 132950
+INC00LXRgtGB0Lo= 132951
+4Lin4Li04LiY4Li14LiB4Liy4Lij 132952
+bcOzd2k= 132953
+15jXoted 132954
+15TXptec15fXlA== 132955
+2LbZitmB 132956
+INGF0L7RgtGP 132957
+44KT44Gn44GE44KL 132958
+4LiE4Liy4LiU 132959
+4LiE4Lij4Lia 132960
+INC60YPRgNGB 132961
+IGJhxZ9hcsSx 132962
+15HXqNeV 132963
+2YrYudip 132964
+INCd0YM= 132965
+4LiE4Lin4Liy4Lih4LmA4Lib4LmH4LiZ 132966
+INec157Xqdec 132967
+IOyii+ydgA== 132968
+2YXYpNiz2LM= 132969
+2YXYpNiz2LPYp9iq 132970
+IHByw6ljaXM= 132971
+IHRo4bqjbw== 132972
+4LiB4LmH4LiE4Li34Lit 132973
+INep15vXnA== 132974
+ZsO8aHJ1bmc= 132975
+44GE44Gn 132976
+4LmB4Lil4Liw4Lih4Li1 132977
+4LiB4LmH4Lih4Li1 132978
+INep16k= 132979
+0LzQtdC7 132980
+INC60L3QuNCz 132981
+INio2KfZhNmG 132982
+INio2KfZhNmG2LPYqNip 132983
+IGFsZMSx 132984
+0YLQsNC5 132985
+INeX15PXqdeZ150= 132986
+5a6f44Gv 132987
+2LnZiNin 132988
+IOydmOuvuA== 132989
+0LjQt9C8 132990
+0YDQsNCx0L7RgtCw0YLRjA== 132991
+2YHYtQ== 132992
+INeR16DXldeh16M= 132993
+44Go44GX44Gm44KC 132994
+4LmA4Lib4LmH4LiZ4LiX4Li14LmI 132995
+INGB0LvQtdC00YPQtdGC 132996
+6ICD44GI44Gm 132997
+INeb15nXlded 132998
+0YHRgtGL 132999
+15vXnNeb15zXmQ== 133000
+5rWB44KM 133001
+44KS44Gk44GR 133002
+0YfQsNGC 133003
+15nXm9eV158= 133004
+15nXqNeZ 133005
+bGFyxLF5bGE= 133006
+44Kk44Oh 133007
+44Kk44Oh44O844K4 133008
+16DXlten 133009
+IGNpw7I= 133010
+IHPEsW4= 133011
+IHPEsW7EsXI= 133012
+4LiZ4LiE4Lij 133013
+0LrQsNGC 133014
+IGzhu5dp 133015
+656M 133016
+2KrZgdin2LU= 133017
+2KrZgdin2LXZitmE 133018
+64aT 133019
+INmF2LY= 133020
+aWxtacWf 133021
+2KjYp9ix2YM= 133022
+0J3QmA== 133023
+IHRo4bqpbQ== 133024
+INeQ15XXqtea 133025
+INC/0YDQuNC90LjQvA== 133026
+INC/0YDQuNC90LjQvNCw 133027
+IHnDtm50 133028
+IHnDtm50ZW0= 133029
+INee16fXkdec 133030
+IGt0w7NyZWdv 133031
+6reA 133032
+2LTYsdmB 133033
+2K/Yp9mF 133034
+44GE44KN44GE44KN 133035
+IEFsw6lt 133036
+IGfDtnLDvA== 133037
+IGfDtnLDvG50 133038
+IGfDtnLDvG50w7w= 133039
+2K/Ysw== 133040
+0YjQutC4 133041
+0LPRgNCw0LQ= 133042
+IGzhuqFj 133043
+IHPhu69h 133044
+44KJ44KM44G+44GZ 133045
+b8OgaQ== 133046
+0YnQtdC9 133047
+44GL44Gq44GE 133048
+INC/0L7Qvw== 133049
+INC/0L7Qv9GD 133050
+INC/0L7Qv9GD0LvRj9GA 133051
+INin2YTZhdmI2YLYuQ== 133052
+csOkZw== 133053
+77yh 133054
+7ZWE 133055
+44KS6KaL44KL 133056
+2KfZhdin 133057
+INin2YTYrdix2Kg= 133058
+INCf0LA= 133059
+INec15DXqteo 133060
+IHThu5Fj 133061
+15HXnNeU 133062
+2LHYptmK2LM= 133063
+0LLRgw== 133064
+2YrYr9mK 133065
+0LrQsNC30LDQvQ== 133066
+INeX16nXkdeV158= 133067
+aMO0dGVs 133068
+16LXldeg15Q= 133069
+2KjZhtmK 133070
+157Xldec 133071
+INC00L3Rjw== 133072
+6Zuj44GX44GE 133073
+0LLQtdC00LXQvdC40Y8= 133074
+INeV157Xqg== 133075
+0L3QsNC/0YDQuNC80LXRgA== 133076
+2YLYp9io2YQ= 133077
+IHLDqXN1bHRhdA== 133078
+INGA0LDQt9Cy0LjRgtC40Y8= 133079
+2LHZkQ== 133080
+7KCE66y4 133081
+INin2YTZhdiy2YrYrw== 133082
+IOychO2VtOyEnA== 133083
+64aN 133084
+7ZmV 133085
+IFRoaeG6v3Q= 133086
+7Yyo 133087
+bWFsxLFkxLFy 133088
+IGN6xYI= 133089
+IGN6xYJvd2ll 133090
+IGN6xYJvd2llaw== 133091
+INmE2KjZhg== 133092
+INmE2KjZhtin2YY= 133093
+w7xzw7w= 133094
+44Gq44KT44Gg 133095
+IMW8eWNpZQ== 133096
+INGF0L7RgNC+0YjQvg== 133097
+5pa544Gr 133098
+64uk66m0 133099
+0LjRh9C10YHQutCw0Y8= 133100
+16LXqNeZ15s= 133101
+16LXqNeZ15vXqg== 133102
+44G+44Gb44KT44Gn44GX44Gf 133103
+INGB0L7QsdC+0Lk= 133104
+IGfhu5c= 133105
+INC00LXQu9Cw0YLRjA== 133106
+ZGHEhw== 133107
+0LDRgNCw 133108
+csOzxbxuaQ== 133109
+4LmA4Lil4Li14LmJ 133110
+4LmA4Lil4Li14LmJ4Lii 133111
+4LmA4Lil4Li14LmJ4Lii4LiH 133112
+4Lid4Liy4LiB 133113
+INiq2YI= 133114
+INiq2YLYr9mK 133115
+INiq2YLYr9mK2YU= 133116
+4Lir4LiZ4Li44LmI4Lih 133117
+IG3DvGNhZGU= 133118
+IG3DvGNhZGVsZQ== 133119
+7KeA66W8 133120
+44Kk44K5 133121
+INij2LPYp9iz 133122
+asSFY2Vnbw== 133123
+IMWfZWg= 133124
+0L3RgtC10YA= 133125
+0YbQuNGO 133126
+77u7 133127
+0Y7RidC10LPQvg== 133128
+4LmC4Lib4Lij4LmB 133129
+4LmC4Lib4Lij4LmB4LiB4Lij4Lih 133130
+IG1pZcSH 133131
+2K3Zg9mI2YXYqQ== 133132
+44Gn44GX44Gf44GM 133133
+15nXodeU 133134
+44KC44Gu44KS 133135
+INee15DXqg== 133136
+4Liq4Li44LiU4LiX4LmJ4Liy4Lii 133137
+IGPFqQ== 133138
+2YbYs9io 133139
+INC/0YDQvtGH 133140
+INC00L3QtdC5 133141
+INGN0YLQuNGF 133142
+15zXnteq 133143
+0L3Rj9GP 133144
+0Y3Qug== 133145
+IOyngOuCnA== 133146
+4Lih4Lir4Liy4Lin4Li04LiX4Lii4Liy 133147
+4Lih4Lir4Liy4Lin4Li04LiX4Lii4Liy4Lil 133148
+4Lih4Lir4Liy4Lin4Li04LiX4Lii4Liy4Lil4Lix4Lii 133149
+ZMOjbw== 133150
+IE3DoXk= 133151
+IOq1reqwgA== 133152
+4Lia4Li44Lij4Li1 133153
+15LXmdec 133154
+INGC0YvRgdGP 133155
+INGC0YvRgdGP0Yc= 133156
+2YHZgw== 133157
+INCY0YE= 133158
+6KGM44KP44KM 133159
+16TXqNeT 133160
+44Gk44GN 133161
+4LiE4Lij4Lit4Lia 133162
+4LiE4Lij4Lit4Lia4LiE4Lij4Lix4Lin 133163
+4LiC4Li24LmJ4LiZ4Lih4Liy 133164
+5LuK5pel44Gv 133165
+IOyCrOuejOydtA== 133166
+16LXptee15Q= 133167
+0L/QvtGA 133168
+IEvhu7M= 133169
+IMahbg== 133170
+IHRoxINt 133171
+2YHYp9mC 133172
+44Ga44Gr 133173
+INec16fXqA== 133174
+INec16fXqNeV15A= 133175
+2KfZgdmK2Kk= 133176
+2YXZjtin 133177
+0LPQsNGA 133178
+2LXZhNin 133179
+2LXZhNin2Kk= 133180
+INee15bXlA== 133181
+bMSxxJ/EsW7EsQ== 133182
+INeQ15nXoNeU 133183
+0LrRgNC+ 133184
+IG5nxrDGoWk= 133185
+INCy0L3QuNC8 133186
+INCy0L3QuNC80LDQvdC40LU= 133187
+asSFY3k= 133188
+2YDZgNmA2YDZgA== 133189
+0YHRhdC+0LQ= 133190
+44Gq44KT44GL 133191
+157Xmdec 133192
+INeU15DXlw== 133193
+44KP44Gq44GE 133194
+2LnYs9mD2LE= 133195
+IOyEuOqzhA== 133196
+INGH0LXQs9C+ 133197
+INGB0YDQtdC00YHRgtCy0LA= 133198
+INCg0LDRgQ== 133199
+44Gq44GB 133200
+2YbZgdiz 133201
+16jXmdeV158= 133202
+0YHRg9C0 133203
+IOyduOqwhA== 133204
+INin2YTZhdmC2KjZhA== 133205
+2YbYudmF 133206
+2KrZiNmB2LE= 133207
+16nXkdei 133208
+xLFsbQ== 133209
+xLFsbcSxxZ8= 133210
+INec16rXqg== 133211
+2KrYtdmB 133212
+15TXpNeV15o= 133213
+4LmD4LiZ4Lib4Li1 133214
+7J206rOg 133215
+2YHZiNiy 133216
+4Lic4Lil4LiH4Liy4LiZ 133217
+IEdpw6Fv 133218
+4Lia4Lit4LiB4Lin4LmI4Liy 133219
+IGTEscWf 133220
+IGTEscWfxLFuZGE= 133221
+7KO9 133222
+IGR6aWXFhA== 133223
+0LrRhtC40Lg= 133224
+0LjRhtC1 133225
+44Gu5LiA 133226
+2LnYtA== 133227
+0L/RgNC10YHRgQ== 133228
+4Lir4LiZ4LmI4Lit4Lii 133229
+4Lil4Lix4LiB4Lip4LiT4Liw 133230
+IHBvc3NpYmlsaXTDoA== 133231
+4LmE4LiU4LmJ4Lij4Lix4Lia4LiB4Liy4Lij 133232
+4Lir4Lii4Li44LiU 133233
+IHBoacOqbg== 133234
+55Sf44G+44KM 133235
+2LfZiNmE 133236
+0YTQuNC9 133237
+ZsO8cg== 133238
+2K3Zitin2Kk= 133239
+7ZaI7Iq164uI64uk 133240
+15vXoNeV16o= 133241
+4Lib4Lij4Liw4Liq 133242
+4Lib4Lij4Liw4Liq4Lia 133243
+4Lib4Lij4Liw4Liq4Lia4LiB4Liy4Lij4LiT4LmM 133244
+65CY7JeI 133245
+IGthxbxkeQ== 133246
+IGx1eeG7h24= 133247
+INC+0YDQs9Cw0L3QuNC30LDRhtC40Lg= 133248
+5bCR44Gq44GP 133249
+0YHRgtGA0L7QtdC9 133250
+IHTDqWNuaWNv 133251
+16fXlNec 133252
+INeV15DXlw== 133253
+INi52YTZitmD 133254
+0YnQtdC90LjQtQ== 133255
+INeU15nXnNeT15nXnQ== 133256
+2YjYs9in2KbZhA== 133257
+INeV15TXqg== 133258
+2KrZhdmK2LI= 133259
+INGB0LrQsNC30LDQuw== 133260
+INC/0L7Qu9C4 133261
+INeU157XoQ== 133262
+2YTZkdmO 133263
+2YXYpNiz2LPYqQ== 133264
+INee15nXkw== 133265
+44Gj44Gh 133266
+IOuEiOustA== 133267
+4Lie4Li1 133268
+IHThurduZw== 133269
+IHThuqVu 133270
+16jXqded 133271
+IG3DqWRpY2E= 133272
+INei15XXng== 133273
+INei15XXnteT 133274
+0YTQvtGA 133275
+2YXYsdip 133276
+IHZhdGFuZGE= 133277
+IHZhdGFuZGHFnw== 133278
+INC00LXQu9C+ 133279
+4LiZ4Lih 133280
+44Go5ZCM44GY 133281
+2YHZiQ== 133282
+0YHQvtGA 133283
+INeU16HXqNeY 133284
+IMOpcG9jYQ== 133285
+7KCV7LGF 133286
+INGB0LLRj9C30LDQvQ== 133287
+2LbYsdio 133288
+INmE2YbYpw== 133289
+IHXFvHl3YQ== 133290
+INin2YTYrNmK2LQ= 133291
+0Y7RgA== 133292
+15HXodeV16M= 133293
+INC80YM= 133294
+INC80YPQt9GL0Lo= 133295
+YmlsaXTDqQ== 133296
+IG1hw6c= 133297
+2LPZjg== 133298
+2KrZhNmD 133299
+44Gs 133300
+2YrZhNin 133301
+0YjQu9Cw 133302
+2YDZgNmA 133303
+INC+0LTQvdC+0Lk= 133304
+0LfQstCw0L0= 133305
+INGB0YDQsNC3 133306
+INGB0YDQsNC30YM= 133307
+2YbYuNmF 133308
+2LHYp9mH 133309
+INmE2YfYsNin 133310
+15vXldeo 133311
+INeU16nXkdeV16I= 133312
+INeU16nXqg== 133313
+IFF14bqjbmc= 133314
+44Or44O8 133315
+44GI44Gq44GE 133316
+15jXkA== 133317
+IG1p4buBbg== 133318
+IFBo4bqtdA== 133319
+INin2YTYs9mI2YI= 133320
+xII= 133321
+INin2YTYrNmF2Lk= 133322
+INin2YTYrNmF2LnYqQ== 133323
+0Y7RidC10Lk= 133324
+YcWCZW0= 133325
+2LnYqtmC2K8= 133326
+2KPZhNmF 133327
+0YHQutC1 133328
+IOydtO2VtA== 133329
+2YbYs9iu 133330
+6KiA44GE 133331
+0LTQvtCx0LDQsg== 133332
+2LPYqNmC 133333
+16LXldeo16g= 133334
+0YLQuNC/ 133335
+44Gd44GT44Gn 133336
+dmlzacOzbg== 133337
+2LnZiNiv2Kk= 133338
+66i5 133339
+157Xlteo15c= 133340
+INil2K0= 133341
+INec15HXmdef 133342
+INec16bXkNeq 133343
+IHlhcmTEsQ== 133344
+IHlhcmTEsW1j 133345
+IHlhcmTEsW1jxLE= 133346
+xLBa 133347
+16fXpNeU 133348
+dHLDqQ== 133349
+bGnEn2luaQ== 133350
+0LrQu9GO0YfQsA== 133351
+IMO8cmV0aW0= 133352
+IGF5csSx 133353
+IGtpxZ9pbGVy 133354
+4LiE4LmJ4LiZ 133355
+4LiE4LmJ4LiZ4Lir4Liy 133356
+IFPhu7E= 133357
+INeb16E= 133358
+INeb16HXow== 133359
+INGC0LDQutC40YU= 133360
+IFh1w6Ju 133361
+INC70LXQsw== 133362
+INC70LXQs9C60L4= 133363
+2KvZgtin2YHYqQ== 133364
+0J3Qng== 133365
+44K544K/44OD 133366
+44K544K/44OD44OV 133367
+5ZCI44GE 133368
+INeU16nXmdee15XXqQ== 133369
+bWFuxLF6 133370
+INCS0LDRgQ== 133371
+Z8O8bg== 133372
+7JyE7JuQ7ZqM 133373
+IHdzcMOzbG4= 133374
+INGB0LLQvtC1 133375
+7YOB 133376
+4LmA4LiZ4Li14Lii 133377
+2YjYqNip 133378
+0LLRj9C3 133379
+xLFkxLFy 133380
+65CY7JeI64uk 133381
+IGRlxJ9pxZ90aXI= 133382
+44KL44GT44Go44GM 133383
+INeX15PXqdeU 133384
+44KJ44KM44Gm44GE44KL 133385
+15fXmdeZ15E= 133386
+INCa0LDRgA== 133387
+16DXmdeq15XXlw== 133388
+INen15jXnw== 133389
+16jXlg== 133390
+2YjYug== 133391
+6Kqt44G/ 133392
+INiq2YLZiNmF 133393
+INmD2KfZhA== 133394
+4Lid4Li24LiB 133395
+IOuwnOyDnQ== 133396
+b2zDs2dpY28= 133397
+2LHYp9i5 133398
+4LmB4LiB4LmJ4LmE4LiC 133399
+INGA0LDQsdC+0YLRgw== 133400
+2YbZkdmO 133401
+4Lit4Lii4Li54LmI4LiX4Li14LmI 133402
+INin2YTYq9in2YbZitip 133403
+IE5ow6Ju 133404
+0YXQstCw0YI= 133405
+w7ZuZQ== 133406
+INi52K/YqQ== 133407
+4LmB4Liq4LiH 133408
+0YLQvtC/ 133409
+0L/Rg9GB0LrQsA== 133410
+2LTYsdin2KE= 133411
+INCa0L7QvA== 133412
+INek16LXldec15Q= 133413
+7IKs7J20 133414
+7IKs7J207Yq4 133415
+6KGM44Gj44Gm 133416
+INeU15TXqg== 133417
+INGB0YLQvtGA0L4= 133418
+INGB0YLQvtGA0L7QvdGL 133419
+2K/Ysdiz 133420
+4LiL4Li5 133421
+4LiV4LmI4Liz 133422
+INij2KjZig== 133423
+0L/QvtC00L7QsQ== 133424
+44Gr44Gm 133425
+2KfYsdiq2YHYp9i5 133426
+INmF2KQ= 133427
+0LjQutC+0LI= 133428
+Z2Vmw7xocnQ= 133429
+4Lih4Li34Lit4LiW4Li34Lit 133430
+INmE2YLYrw== 133431
+INij2YbZkQ== 133432
+2LPZiti32LE= 133433
+44G+44Ga44Gv 133434
+16HXkw== 133435
+0YHQutC+0LvRjNC60L4= 133436
+44G/44Gf44GE44Gq 133437
+15PXqNeS 133438
+16LXmdeT 133439
+4LmD4Lir4LmJ4Lia4Lij4Li04LiB4Liy4Lij 133440
+INCU0Lg= 133441
+15HXoteZ15XXqg== 133442
+INeU15fXlQ== 133443
+0L/QuNGB0Yw= 133444
+INin2YTYrtmE 133445
+0LHQsNCy 133446
+IMSwbGs= 133447
+INin2YTYrtmF 133448
+INin2YTYrtmF2YrYsw== 133449
+INmK2YLZiNmF 133450
+5pmC44Gu 133451
+IHPFgm93 133452
+INij2YfZhQ== 133453
+2K7ZhNmC 133454
+INij2LXYqNit 133455
+IGNo4bupYQ== 133456
+IHRow6Fj 133457
+2YHYp9mE 133458
+IGNo4bud 133459
+INin2YTYrtin2LE= 133460
+INin2YTYrtin2LHYrA== 133461
+INin2YTYrtin2LHYrNmK2Kk= 133462
+2LfYp9im2LE= 133463
+IHTDoA== 133464
+IHTDoHU= 133465
+4LiB4Lil4LmJ4Lit4LiH 133466
+INin2YTZhdix2KM= 133467
+INin2YTZhdix2KPYqQ== 133468
+5YWo44GP 133469
+IMOWbg== 133470
+55qE44Gr44Gv 133471
+IHBpw6hjZQ== 133472
+15LXmdeR 133473
+INin2YTZiNin2YLYuQ== 133474
+5LuK44Gu 133475
+INin2YTZhdmC 133476
+Y3puxIU= 133477
+2YHYudin2YQ= 133478
+0LXQvdC90L7Qs9C+ 133479
+INGE0LDQutGC 133480
+7Iug7LKt 133481
+INCe0L3QuA== 133482
+INin2YTYqNmE2KfYrw== 133483
+0L7QstC40Yc= 133484
+64+M 133485
+0YTRg9C90LrRhtC4 133486
+IOyWtOuKkA== 133487
+44OV44Kp44O8 133488
+ZMOt 133489
+0LjQu9C+0YHRjA== 133490
+2YXZiQ== 133491
+INin2YTYo9mF2LHZitmD 133492
+INin2YTYo9mF2LHZitmD2YrYqQ== 133493
+15jXmdek15XXnA== 133494
+7ZSE66Gc6re4 133495
+7ZSE66Gc6re4656o 133496
+INep15XXoNeV16o= 133497
+2LTZhdmE 133498
+INC/0LDRgNCw 133499
+INeU15fXlden 133500
+2YjYstin2LHYqQ== 133501
+44Go44GZ44KL 133502
+IHF14bqjbmc= 133503
+IGHEn8Sxcg== 133504
+INin2YTZhNis 133505
+INin2YTZhNis2YbYqQ== 133506
+6ri0 133507
+IFTDom4= 133508
+2KzZhdmE 133509
+0LTQvtC7 133510
+4LmB4Lie4LiX4Lii 133511
+4LmB4Lie4LiX4Lii4LmM 133512
+INeo15DXqdeZ 133513
+0YnQtdC5 133514
+IMOnZXZyZQ== 133515
+INC60L7QvNC/0LvQtdC60YE= 133516
+INeR157Xqdea 133517
+IGFsdMSxbg== 133518
+INij2LnZhdin2YQ= 133519
+INGB0LLQvtC10LPQvg== 133520
+44KI44GE 133521
+15fXnNeZ15g= 133522
+157XoNei 133523
+INeo15HXlA== 133524
+INij2YrYttin2Ys= 133525
+15bXnA== 133526
+INin2YTYs9mK2KfYs9mK 133527
+5oCd44GG 133528
+16fXqNen 133529
+16fXqNen16I= 133530
+INin2YTZgdix2YrZgg== 133531
+0LHQuNGC 133532
+16fXoNeU 133533
+INil2YbZhw== 133534
+INCS0LDQvA== 133535
+0KDQng== 133536
+44OI44Oq 133537
+5b+F6KaB44Gq 133538
+IGNow6J1 133539
+57aa44GR 133540
+IMOnw7Z6w7xt 133541
+Z8WCb3c= 133542
+2LnZgtmE 133543
+5aOy44KL 133544
+aeG6v3Q= 133545
+4LiK4Li04LmJ4LiZ 133546
+INit2YLZiNmC 133547
+2LfZhNi5 133548
+IMSRZW4= 133549
+INmD2KfZgdip 133550
+44Gu44GU 133551
+IOus 133552
+IOusvA== 133553
+IOusvOuhoA== 133554
+INix2LPZiNmE 133555
+0LfQsNC8 133556
+0LfQsNC80LXQvQ== 133557
+IGt1bGxhbsSxY8Sx 133558
+16LXldec 133559
+6Imy44CF 133560
+0YjQuNGA 133561
+INeX16k= 133562
+IHd5Z2w= 133563
+IHd5Z2zEhWRh 133564
+16nXmdee15XXqQ== 133565
+5b+Y44KM 133566
+16LXmdem15XXkQ== 133567
+INin2YTYs9mI2LHZig== 133568
+5bCR44Gq44GE 133569
+INC/0L7QuNGB0Lo= 133570
+4Liq4Liz4LiZ4Lix4LiB4LiH4Liy4LiZ 133571
+INee16bXkw== 133572
+IG3DvMWf 133573
+IG3DvMWfdGVy 133574
+IG3DvMWfdGVyaQ== 133575
+INmF2YbZh9mF 133576
+4LiV4Liz4LmB 133577
+4LiV4Liz4LmB4Lir4LiZ 133578
+4LiV4Liz4LmB4Lir4LiZ4LmI4LiH 133579
+xZttaWU= 133580
+INep16DXqg== 133581
+INeU16TXmQ== 133582
+16TXqNep 133583
+16LXkdeo15nXqg== 133584
+4Liq4LiZ4Lix4Lia 133585
+4Liq4LiZ4Lix4Lia4Liq4LiZ4Li4 133586
+4Liq4LiZ4Lix4Lia4Liq4LiZ4Li44LiZ 133587
+6KiA44Gj44Gm 133588
+4LiB4Liy4Lij4LiI4Lix4LiU 133589
+IE1vxbxl 133590
+0LjQt9Cw0YbQuNC4 133591
+4bupdA== 133592
+INmI2KjYudiv 133593
+IGRlxJ9pbGQ= 133594
+IGRlxJ9pbGRpcg== 133595
+INeq154= 133596
+INee157XoNeV 133597
+6Kmx44KS 133598
+INGG0LXQvdCw 133599
+IHRow7pj 133600
+15nXnteV158= 133601
+IELDoW8= 133602
+44KS5Y+W44KK 133603
+5a6J44GE 133604
+INei15XXqdeZ150= 133605
+6Ieq5YiG44GM 133606
+bMOpZQ== 133607
+44KL44Gu44Gn 133608
+0LjRgNGD0LXRgg== 133609
+44Gm44KL 133610
+2LPYqtix 133611
+INin2YTYrdmK 133612
+15nXnNeV16o= 133613
+INeX15E= 133614
+2YLYsdij 133615
+2KrZhdmD2YY= 133616
+2LPYp9im2YQ= 133617
+cHLDvGY= 133618
+44GL44GR44Gm 133619
+INGB0L7QsdGB0YLQstC10L3QvdC+ 133620
+IOychO2VmOyXrA== 133621
+15zXmdeY 133622
+44GM5aSa44GP 133623
+2YrYqtmH2Kc= 133624
+56uL44Gm 133625
+4Lih4Lit4Lia 133626
+7Iuc7J6l 133627
+0L7RgNCw 133628
+IHNhdmHFnw== 133629
+15jXmdeR15k= 133630
+15HXoNeV 133631
+2YXYp9iw2Kc= 133632
+6riw6rCE 133633
+44Gq44Gp44Gn 133634
+INee16rXl9eZ15w= 133635
+IG5oaeG7hQ== 133636
+IG5oaeG7hW0= 133637
+0LrQsNGA 133638
+0LrQsNGA0YI= 133639
+INec15TXqdeq157XqQ== 133640
+16DXmdeX 133641
+2KfYr9mK2Kk= 133642
+4Lij4Liy4Lii4LiH4Liy4LiZ 133643
+IHByenlrxYJhZA== 133644
+0YnQuNC5 133645
+2K3YttmI2LE= 133646
+IGjDtG4= 133647
+w50= 133648
+16rXldem15DXldeq 133649
+2LHYp9io2Lc= 133650
+IGLhur9w 133651
+INC/0L7Qu9GD0YfQuA== 133652
+5Ye65Lya44GE57O7 133653
+4Lib4Lil4LmI4Lit4Lii 133654
+INin2YTYtNio2KfYqA== 133655
+2KfZh9mE 133656
+5LuK44G+44Gn 133657
+2LHYrNi5 133658
+44K244O8 133659
+2YLZgQ== 133660
+IEdyb8Of 133661
+IO2ajOybkA== 133662
+2KfYrNix 133663
+INeR157Xp9eo15Q= 133664
+IHNlZ3VyYW7Dp2E= 133665
+ZsO8aGw= 133666
+44Gm44GE44GP 133667
+4Lir4Lih4Lit 133668
+INC60L7RgtC+0YDQvtC8 133669
+IE7Eg20= 133670
+IGTFgnVnbw== 133671
+2YXZhtit 133672
+16nXldeV15k= 133673
+INij2YrYp9mF 133674
+4Liq4Lig4Liy4Lie 133675
+cnrEhQ== 133676
+2LTYsdmD2KfYqg== 133677
+44KS6ICD44GI 133678
+0LTQsNGA 133679
+4Lib4Lij4Liw4LiK4Li44Lih 133680
+INeV15DXlg== 133681
+aeG7h24= 133682
+IHTGsMahaQ== 133683
+16nXmdeX 133684
+4Lit4LmI4Lit4LiZ 133685
+5pu444GE44Gm 133686
+IG5n4buv 133687
+15HXmdeY15c= 133688
+15HXmdeY15fXldef 133689
+IHPhurU= 133690
+IHPhurVu 133691
+7KeA64+E 133692
+INC/0YDQtdC/ 133693
+INC/0YDQtdC/0LDRgNCw0YI= 133694
+INC90LDRg9GH 133695
+IMOcbml2ZXJz 133696
+IMOcbml2ZXJzaXRlcw== 133697
+IMOcbml2ZXJzaXRlc2k= 133698
+INeS15PXldec15Q= 133699
+INeU16DXqg== 133700
+INeU16DXqteR16I= 133701
+44Gn44GC44Gj44Gf 133702
+IG1pZXNpxIU= 133703
+IG1pZXNpxIVj 133704
+0LPRgNCw0Lw= 133705
+0LPRgNCw0LzQvA== 133706
+INio2LTYo9mG 133707
+INGF0YA= 133708
+16fXmdeT 133709
+16fXmdeT15XXnQ== 133710
+2LTZg9ix 133711
+IOG7lQ== 133712
+IOG7lW4= 133713
+44GM44GC44Gj44Gm 133714
+44GV44KM44G+44GZ 133715
+INeX15XXkw== 133716
+INeX15XXk9ep15nXnQ== 133717
+2YXZiNin2KzZhw== 133718
+2YXZiNin2KzZh9ip 133719
+2KPYtNiu2KfYtQ== 133720
+2KjYug== 133721
+4LmA4Lij4Li14Lii4LiZ4Lij4Li54LmJ 133722
+44GX44Gm44GE44GP 133723
+IHPhuqFu 133724
+5b+F44Ga 133725
+16DXmdeS 133726
+16DXmdeS15XXkw== 133727
+2KjYp9mE2Lo= 133728
+15fXqdee 133729
+15fXqdee15w= 133730
+IG5hcHJhdw== 133731
+IG5hcHJhd2TEmQ== 133732
+2LTZh9in2K8= 133733
+15DXldeU 133734
+15DXldeU15E= 133735
+0LjRhtGL 133736
+INeU16jXm9eR 133737
+656R 133738
+INeq16I= 133739
+INeU15nXqQ== 133740
+INeU15nXqdeo15A= 133741
+INeU15nXqdeo15DXnNeZ 133742
+2KPZhdmG 133743
+0Y7RidCw0Y8= 133744
+c2vDs3I= 133745
+TEVSxLA= 133746
+INeU15DXl9eo15XXnw== 133747
+16LXoNen 133748
+INmI2YPZhA== 133749
+44GT44GT44Gn 133750
+IHF1w6Fu 133751
+bGnEn2lu 133752
+4LiB4LiO4Lir4Lih4Liy4Lii 133753
+2LfZhQ== 133754
+2KPYrNmH 133755
+2KPYrNmH2LLYqQ== 133756
+IEVyZG/En2Fu 133757
+44Gn44GK 133758
+INCy0YDQsA== 133759
+INCy0YDQsNGH 133760
+IFBow7M= 133761
+4LiK4Lix4LmI4Lin 133762
+4LiK4Lix4LmI4Lin4LmC4Lih 133763
+4LiK4Lix4LmI4Lin4LmC4Lih4LiH 133764
+IHBow7pj 133765
+15nXpNeV16o= 133766
+16LXmdeV158= 133767
+IGR1xbxv 133768
+44OB44O844Og 133769
+INmK2Y4= 133770
+INC30LDQtNCw0Yc= 133771
+INeS15HXldeU15Q= 133772
+INeb15vXnA== 133773
+0LvQvtC20LXQvQ== 133774
+w6l0YXQ= 133775
+IG5nxINu 133776
+6LW344GN 133777
+IFRp4bq/bg== 133778
+2LXYudio 133779
+IGV4cGVyacOqbmNpYQ== 133780
+2K7ZhQ== 133781
+4LiB4Liy4Lij4LiX4Liz4LiH4Liy4LiZ 133782
+2LPZitiv 133783
+IEThu7E= 133784
+INC60L7RgtC+0YDQvtCz0L4= 133785
+bGFkxLHEn8Sx 133786
+IGto4buV 133787
+IOqzhOyGjQ== 133788
+0YnQuNC6 133789
+4Liq4LmI4Lin4LiZ4LiV4Lix4Lin 133790
+0LfQvtGA 133791
+2YbZjw== 133792
+IOC4lOC4seC4hw== 133793
+IOC4lOC4seC4h+C4meC4seC5ieC4mQ== 133794
+IGPhuqV1 133795
+IMSR4buRYw== 133796
+0L7RhA== 133797
+INin2YTYo9i52YXYp9mE 133798
+44Gq44GP44Gm44KC 133799
+15XXm9eZ150= 133800
+4LmB4Lib 133801
+IELDqm4= 133802
+44Ov44Oz 133803
+IGdpw6Ft 133804
+IMWedQ== 133805
+IGTDoW5n 133806
+2LnZhNmK 133807
+4LmA4LiB4Lip 133808
+4LmA4LiB4Lip4LiV4Lij 133809
+2YjYrNio 133810
+0L3QvdGL0LU= 133811
+2YLYttin2KE= 133812
+4LiE4Lin4Lia 133813
+4LiE4Lin4Lia4LiE4Li4 133814
+4LiE4Lin4Lia4LiE4Li44Lih 133815
+44Gk44Gk 133816
+IFZp4buHYw== 133817
+157XkdeY 133818
+16nXmdeq15XXow== 133819
+INCy0LXQtNGM 133820
+a2F6YQ== 133821
+a2F6YcWC 133822
+4LiV4Liz4Lij4Lin4LiI 133823
+44K/44Or 133824
+INC/0L7QstGL 133825
+INC/0L7QstGL0YjQtdC9 133826
+IFPhu58= 133827
+IOyEpOuqhQ== 133828
+IMOHw7xua8O8 133829
+7IOd7Zmc 133830
+1r4= 133831
+44KM44Gm44GE44KL 133832
+INeR16jXkNep 133833
+16jXldeS 133834
+INC+0YTQuA== 133835
+INC+0YTQuNGG0LjQsNC70YzQvQ== 133836
+INGD0YHRgtCw0L3QvtCy 133837
+INGD0YHRgtCw0L3QvtCy0LvQtdC9 133838
+INin2YTZhdi12LE= 133839
+INin2YTZhdi12LHZitip 133840
+INCf0L7RjdGC0L7QvNGD 133841
+2YbYtdmB 133842
+INmI2KfZhNmG 133843
+IGjDoGk= 133844
+4LiE4Li0 133845
+IEFwcsOocw== 133846
+7LOQ 133847
+4LmA4LiL4Li14Lii 133848
+15PXnteU 133849
+YWN0aXZpdMOp 133850
+4LiE4Li04LiU4Lin4LmI4Liy 133851
+0YLRgNC10L0= 133852
+4LmA4Liu 133853
+44OP44Kk 133854
+44GM5aKX44GI 133855
+0LXQvdC90LDRjw== 133856
+IOyYpOuKmA== 133857
+44Oi44Oz 133858
+INC60L7QvdC10YfQvdC+ 133859
+INmF2YLYp9io2YQ= 133860
+Y2zDqQ== 133861
+IGjDvA== 133862
+IHRo4bqzbmc= 133863
+7KCB7J20 133864
+INCQ0LvQtdC60YE= 133865
+INCQ0LvQtdC60YHQsNC9 133866
+INCQ0LvQtdC60YHQsNC90LTRgA== 133867
+44Oe44Oz44K344On44Oz 133868
+44Gy44Go44Gk 133869
+44Gq44GK 133870
+4LmA4LiI4LmJ4Liy4LiC4Lit4LiH 133871
+65Oc66as 133872
+2LTYp9ih 133873
+IHNhxJ9sxLFr 133874
+IMWfaW1kaQ== 133875
+15nXkNec 133876
+2KrYo9ir2YrYsQ== 133877
+2KPYs9io 133878
+2KPYs9io2KfYqA== 133879
+INCy0YvQv9C+0LvQvdC10L0= 133880
+0LvQvtC6 133881
+16nXmdeR15Q= 133882
+IGzhuq9t 133883
+IFRyxrDhu5tj 133884
+INeU16LXnA== 133885
+66as66W8 133886
+INGA0LXQtg== 133887
+INGA0LXQttC40Lw= 133888
+aW50w6k= 133889
+aW50w6lncg== 133890
+15LXoNeZ 133891
+INin2YTYtNi52LE= 133892
+IG1pbGjDtWVz 133893
+IHBlcXVlw7Fv 133894
+44Kz44O844K5 133895
+15XXm9eX 133896
+4LmA4LiK4LmJ4Liy 133897
+2LTYsdmC 133898
+IGjGsMahbmc= 133899
+4Lij4Lix4LiQ4Lia4Liy4Lil 133900
+4LiB4Lil4Liy4Lii 133901
+4LiB4Lil4Liy4Lii4LmA4Lib4LmH4LiZ 133902
+INC/0L7QtNGF0L7QtA== 133903
+16rXqdeV15HXlA== 133904
+44GP44Gq44Gj44Gm 133905
+INin2YTYo9mF2YU= 133906
+IEjhu41j 133907
+IHdzcMOzxYJwcg== 133908
+IHdzcMOzxYJwcmFj 133909
+0YfRg9Cy 133910
+0YfRg9Cy0YHRgtCy 133911
+w61zdGljbw== 133912
+4LmA4LiB4Liy4Liw 133913
+7JuA 133914
+INC90LDQt9Cw0LQ= 133915
+44KL44KI44GG44Gr 133916
+INCh0Kg= 133917
+INCh0KjQkA== 133918
+0LzQvtC9 133919
+IEFzw60= 133920
+15XXqNeS 133921
+0L/QvtC70L3QtdC9 133922
+157Xodec 133923
+157Xodec15XXnA== 133924
+4LmA4Lil4Li34Lit4LiU 133925
+4LmA4Lij4Li04LmI4Lih4LiV4LmJ4LiZ 133926
+INin2YTYpdmF 133927
+INin2YTYpdmF2KfYsdin2Ko= 133928
+16bXlNeo 133929
+44Oh44Oq44OD44OI 133930
+INC/0L7RgtC+0Lw= 133931
+0LLQuNC3 133932
+INmB2KrYsdip 133933
+5b6M44Gu 133934
+0J3QkA== 133935
+157Xodeo 133936
+2YrYsdmK 133937
+cHLDqQ== 133938
+IHRlxZ9law== 133939
+IHRlxZ9la2vDvHI= 133940
+IMO2ZGVtZQ== 133941
+2K/Yp9mG 133942
+44G+44GX44Gm 133943
+55uu44Gr 133944
+INGC0LXRh9C10L3QuNC1 133945
+bGFyZA== 133946
+bGFyZMSxcg== 133947
+4LmA4Lij4Liy4LiI4Liw 133948
+16HXpNeZ 133949
+INmI2YPYsNmE2YM= 133950
+IGjDoXQ= 133951
+IHThu5lj 133952
+4LiE4Li44Lii 133953
+IGLhu6lj 133954
+2K3ZitmG 133955
+6IGe44GE44Gm 133956
+2YXYpNi02LE= 133957
+IE5oxrA= 133958
+INC80LXQvdC10LU= 133959
+4Lil4Liw4LiE4Lij 133960
+0YHQuNC9 133961
+INGA0LXQug== 133962
+INGA0LXQutC7 133963
+INGA0LXQutC70LDQvA== 133964
+INmB2YfZiA== 133965
+INec15Y= 133966
+15nXoNeV16o= 133967
+IMWfYXJ0 133968
+0YHRgtCw0LLQutCw 133969
+IO2PrO2VqA== 133970
+44Gr6KGM44GP 133971
+77yd 133972
+INC/0L7Qt9Cy0L7Qu9GP0LXRgg== 133973
+INeq15XXm9ec15U= 133974
+0L7QstCw0Ls= 133975
+2LXZhNip 133976
+INec16nXoNeV16o= 133977
+INCY0LPRgA== 133978
+2YXZhtiq2KzYp9iq 133979
+IHNhdMSxxZ8= 133980
+0YHQutC+ 133981
+INin2YTYq9mE2KfYq9in2KE= 133982
+INeU15PXkdeo15nXnQ== 133983
+44GX44G+44GX44KH44GG 133984
+2KjZgtmJ 133985
+5Yqb44KS 133986
+IMOHb2s= 133987
+44OB44Ol 133988
+4LmA4LiK4Li34LmJ4Lit 133989
+4Lii4Li44LiE 133990
+4Lio4Liy4Lil 133991
+INen15XXk9ed 133992
+15bXqNeZ150= 133993
+44Gu5aC05ZCI 133994
+IOyViuyVmA== 133995
+44GC44KK44G+44GZ44GM 133996
+15DXqdeo 133997
+6KGM44GP 133998
+44G744GL 133999
+5rCX44Gr44Gq44KL 134000
+0LnQtNC10YI= 134001
+7ZWY7JiA64uk 134002
+2LPYqtmF2LHYp9ix 134003
+INCf0YDQtQ== 134004
+INGB0LHQvtGA 134005
+IOyVhOustA== 134006
+56eB44KC 134007
+2LnYtQ== 134008
+INC90LjRhw== 134009
+INC90LjRh9C10LPQvg== 134010
+INC/0YDQuNC10Lw= 134011
+16fXldee 134012
+IOyImOuPhA== 134013
+IOyhtA== 134014
+IOyhtOyerA== 134015
+INij2KvZhg== 134016
+INij2KvZhtin2KE= 134017
+INmI2KfZhNit 134018
+44GM44Gn44GN44KL 134019
+INeq15Q= 134020
+INeq15TXmdeU 134021
+16jXnw== 134022
+INGB0LLRj9C30Lg= 134023
+15LXqdeq 134024
+0YHQv9C10LrRgg== 134025
+16HXkdeZ15E= 134026
+16HXkdeZ15HXlA== 134027
+IO2VhOyalO2VnA== 134028
+2KrYrti12LU= 134029
+INC20LjQsg== 134030
+INC20LjQstC+0YI= 134031
+IE1hecSxcw== 134032
+2KrYudin 134033
+2KrYudin2YjZhg== 134034
+INi52YbZh9in 134035
+w7N3a2k= 134036
+INin2YTZgdmE2LPYt9mK2YbZig== 134037
+44Gg44GR44Gn44Gq44GP 134038
+7J247KeA 134039
+INin2YTYs9mI2K8= 134040
+INin2YTYs9mI2K/Yp9mG 134041
+2KXYrNix2KfYodin2Ko= 134042
+IGvDtnTDvA== 134043
+INeZ16rXqA== 134044
+15LXmdep15Q= 134045
+INem15XXqNea 134046
+4Lij4LiW4Lii 134047
+4Lij4LiW4Lii4LiZ4LiV4LmM 134048
+0YXQvtGC 134049
+0KDQkA== 134050
+2YjYt9mG 134051
+IHNhecSxc8Sx 134052
+16HXl9eo 134053
+2YXZiNmE 134054
+44KS5oyB44Gj44Gm 134055
+2LnYp9mG 134056
+IHThu5lp 134057
+INCy0YvRiNC1 134058
+IHThuqdt 134059
+44OI44Os 134060
+15nXpteV 134061
+4Lih4Li44Lih 134062
+2LPZiNiv 134063
+7KCE7J6Q 134064
+44K144Ot44Oz 134065
+7IKw7JeF 134066
+INC+0YHQvdC+0LLQsNC9 134067
+2K7Zgdi2 134068
+16jXpteU 134069
+2KjZiti2 134070
+15XWuQ== 134071
+16HXmdeZ16I= 134072
+INep15DXmQ== 134073
+INin2YTZgtix2KLZhg== 134074
+INCi0LDQutC20LU= 134075
+157Xqdee16LXldeq 134076
+2LPZh9mE 134077
+INeU16DXlA== 134078
+44KS44GX44Gm44GE44KL 134079
+15nXmdeh 134080
+15TXldeQ 134081
+IELDrQ== 134082
+INC80LDQu9C+ 134083
+IOuUsOudvOyEnA== 134084
+INeo15fXkQ== 134085
+44GM6auY44GE 134086
+2YjYp9iz 134087
+7IK8 134088
+16DXog== 134089
+44Gj44Gh44KD 134090
+IFTDvG0= 134091
+4Lit4Li14LiB4LiU4LmJ4Lin4Lii 134092
+44GX44Gm44GP44Gg44GV44GE 134093
+2YbYtNin2Lc= 134094
+44OX44Op44Oz 134095
+0LDQu9C40YHRjA== 134096
+15PXnNeq 134097
+IHdjemXFmw== 134098
+IHdjemXFm25pZWo= 134099
+INGN0YLQuNC8 134100
+IHRo4buLdA== 134101
+4Lia4Lix4LiN 134102
+4Lia4Lix4LiN4LiK4Li1 134103
+44Ga44Gj44Go 134104
+0YDQuNC9 134105
+IHN3b2rEhQ== 134106
+7ZWY64qU642w 134107
+IOunjOuTpOyWtA== 134108
+2KrYtNmD 134109
+2KrYtNmD2YrZhA== 134110
+2KfYptmH 134111
+INec16TXl9eV16o= 134112
+44OL44Ol 134113
+44OL44Ol44O844K5 134114
+15vXkNef 134115
+44Gn44GN44Gf 134116
+0LfQstC+0L0= 134117
+IHN0YcWC 134118
+15fXkdeo16rXmQ== 134119
+INij2LnZhNmG 134120
+4LmB4Lia4Lia4LiZ4Li14LmJ 134121
+2KjYr9ih 134122
+44KB44Gf 134123
+INee16nXntei15XXqg== 134124
+INee16nXntei15XXqteZ 134125
+w7Zyw7w= 134126
+IGjhuqFuaA== 134127
+esOkaGw= 134128
+IEzDvQ== 134129
+INeR15TXqg== 134130
+INeR15TXqteQ150= 134131
+0LHQsNGA 134132
+7KaI 134133
+5LuK5Zue44Gu 134134
+IHnDvA== 134135
+IHnDvGtz 134136
+IHnDvGtzZWw= 134137
+44K944O8 134138
+44GC44KM 134139
+16rXnNee15nXkw== 134140
+44Gk44Gq 134141
+15HXoNeZ150= 134142
+IHjhur9w 134143
+INC80YPQttGH0LjQvQ== 134144
+INin2YTZg9iq2KfYqA== 134145
+15vXnteV16o= 134146
+IMOnZQ== 134147
+IMOnZcWf 134148
+IMOnZcWfaXQ= 134149
+IMOnZcWfaXRsaQ== 134150
+15PXmdeo15XXqg== 134151
+4Lia4Li44LiN 134152
+INin2YTYpdmE2YM= 134153
+INin2YTYpdmE2YPYqtix2Yg= 134154
+INin2YTYpdmE2YPYqtix2YjZhtmK 134155
+INio2KfZhNil2LY= 134156
+INio2KfZhNil2LbYp9mB2Kk= 134157
+IHnDtm5lbA== 134158
+IHnDtm5lbGlr 134159
+bXlzxYI= 134160
+4LiU4LmJ4Lin4Lii4LiB4Liy4Lij 134161
+4LiB4Liy4Lij4LiX4Liz 134162
+0L7QstGL0Lw= 134163
+2KPYstmF2Kk= 134164
+5o6i44GX 134165
+7Zqo 134166
+INeV15DXnQ== 134167
+IG5naGnDqm0= 134168
+0YjQuNC9 134169
+0LrQsNC7 134170
+IGNyaWFuw6dhcw== 134171
+6Ieq5YiG44Gn 134172
+INC90LDQuQ== 134173
+INC90LDQudGC0Lg= 134174
+IFPhu5E= 134175
+IMO2xJ9yZW5jaWxlcg== 134176
+44O25pyI 134177
+0YHQsNC9 134178
+IErDoQ== 134179
+IGtvbnXFn21h 134180
+2LTYsdi3 134181
+64iI 134182
+YXJyacOocmU= 134183
+2LbYsdmI2LHYqQ== 134184
+44OU44Oz 134185
+16LXqdeo 134186
+0LDRgNGM 134187
+2KzZhdin2Lk= 134188
+IGTDqWNv 134189
+INeZ15TXldeT15k= 134190
+4Lie4Lil4Liy4LiU 134191
+INmK2YPZhg== 134192
+INis2KfZhdi52Kk= 134193
+2LfYqNmC 134194
+IGJvxZ8= 134195
+15XXldeQ 134196
+157Xk9ei 134197
+16fXkdeV16bXqg== 134198
+16TXmdeo 134199
+asSFY3lt 134200
+2YXYtNin 134201
+2YXYtNin2YPZhA== 134202
+16bXpNeV158= 134203
+2KXYs9iq 134204
+157Xm9eo 134205
+2LPZhdi5 134206
+INC60LDQutC+0Lk= 134207
+0YLQstC+0YA= 134208
+2K3YrA== 134209
+2YHYsdi2 134210
+0L/RgNCw0LLQu9C10L0= 134211
+INC90LjQutCw0Lo= 134212
+IG1p4buH 134213
+IG1p4buHbmc= 134214
+w7zDnw== 134215
+0LjRgNC+0LLQsNC7 134216
+15zXnteV16o= 134217
+5qyh44Gu 134218
+2YTYtw== 134219
+4LiV4Lix4LiZ 134220
+15TXqteX15nXnA== 134221
+IGZvdG/Enw== 134222
+IGZvdG/En3JhZg== 134223
+2LfYsdit 134224
+4Lit4Lit4LiB4LmE4Lib 134225
+IHnDqm4= 134226
+INC/0L7Qug== 134227
+INC/0L7QutGD0L8= 134228
+INC/0L7QutGD0L/QsA== 134229
+0YbRgw== 134230
+INC60L7QvNC/0YzRjg== 134231
+INC60L7QvNC/0YzRjtGC0LXRgA== 134232
+INin2YTZg9ix2YrZhQ== 134233
+2KrYtdmF 134234
+2KrYtdmF2YrZhQ== 134235
+INC+0LrQsNC30LA= 134236
+IHphcsOzd24= 134237
+IHphcsOzd25v 134238
+64yA7Lac 134239
+44K744Oz44K/44O8 134240
+IGpha2/Fm2Np 134241
+5oKp 134242
+5oKp44G/ 134243
+2KPZhtmI 134244
+2KPZhtmI2KfYuQ== 134245
+67mg 134246
+IOygleunkA== 134247
+IGvhurs= 134248
+INGB0LDQudGC0LA= 134249
+INeU16LXqNeR 134250
+2YfYsg== 134251
+cHJlc2nDs24= 134252
+INGB0YLQtdC9 134253
+44Gj44Gm44KL 134254
+IGjEsXpsxLE= 134255
+0JrQkA== 134256
+157Xqdek15fXqg== 134257
+INmG2YfYpw== 134258
+INmG2YfYp9mK2Kk= 134259
+44G+44GE 134260
+0L7RhdGA0LDQvQ== 134261
+4Lij4LmJ4Lit4Lii 134262
+4Lil4Li24LiB 134263
+INmI2KjYp9mE 134264
+44KC44Gu44GM 134265
+16jXm9eZ15E= 134266
+44Kk44Ok 134267
+2LPYpA== 134268
+2LPYpNin2YQ= 134269
+INmE2KPZhtmH 134270
+IGtvbnXFn3R1 134271
+0JrRg9C/0LjRgtGM 134272
+INep15DXqteU 134273
+INmI2KfZhNiz 134274
+IG1vxbxsaXdvxZtjaQ== 134275
+IHByw7Ni 134276
+65Sw 134277
+44Gp44KM 134278
+INCc0LjQvQ== 134279
+INC+0YDQs9Cw0L3QuNC30Lw= 134280
+44Gr5a++44GZ44KL 134281
+IFByw6k= 134282
+IHByaXbDqQ== 134283
+Y2jDqA== 134284
+44GE44Gf44Gg44GN 134285
+4Liq4LiZ4Li44LiB 134286
+YWrEhWNl 134287
+IER6aQ== 134288
+IER6acSZa2k= 134289
+xYJhdHc= 134290
+csOkbg== 134291
+csOkbms= 134292
+5p2l44Gf 134293
+INeU15nXlNeV15PXmQ== 134294
+44Ks44O8 134295
+INGA0LDQtA== 134296
+INGA0LDQtNC4 134297
+0LrRgtC40LI= 134298
+2KPZh9iv 134299
+2KPZh9iv2KfZgQ== 134300
+16nXkNeZ16g= 134301
+44Gm44GE44Gq44GE 134302
+IGZyw7xo 134303
+INC+0LrQvtC7 134304
+INC+0LrQvtC70L4= 134305
+IHJlZ2nDo28= 134306
+INGH0LjRgdC70LU= 134307
+IHBvbmlldw== 134308
+IHBvbmlld2HFvA== 134309
+7IS87YSw 134310
+IGLhuqd1 134311
+IOq3 134312
+IOq3nA== 134313
+IOq3nOyglQ== 134314
+IEjDsmE= 134315
+INGC0L7Rgg== 134316
+44KC5aSa44GE 134317
+INin2YTYpdiz2YTYp9mF2YrYqQ== 134318
+44GL44GE 134319
+0Y3QvQ== 134320
+INGD0LrQsNC30LDQvQ== 134321
+INGC0LDQutC+0LU= 134322
+77yz 134323
+64yA7ZWZ 134324
+IGdlbmnFnw== 134325
+INin2YTYrtmK 134326
+INin2YTYrtmK2KfYsdin2Ko= 134327
+44KS6KGM44GG 134328
+16nXnteU 134329
+IEzDoG0= 134330
+2YjZhtmK 134331
+INeQ15zXmdeV 134332
+xJg= 134333
+4LmE4Lih4LmI4Liq4Liy4Lih4Liy4Lij4LiW 134334
+5Lq644Go 134335
+2KjYsdiy 134336
+15nXodeV15M= 134337
+15LXnNeZ 134338
+INmK2YbYpw== 134339
+INmK2YbYp9mK2LE= 134340
+INC60LDRgNGC0LjQvQ== 134341
+IHTDtG4= 134342
+4LmA4LiB4Lij 134343
+4LiE4LiU4Li1 134344
+INec15DXldeo15o= 134345
+44KC44KJ44GG 134346
+44GL44GL44KL 134347
+0LDQvdC40Lg= 134348
+IGFyYcWfdMSxcm1h 134349
+2YTYp9it2Lg= 134350
+44GE44KE 134351
+IFTDoGk= 134352
+IOC4meC4reC4geC4iOC4suC4gQ== 134353
+IOC4meC4reC4geC4iOC4suC4geC4meC4teC5iQ== 134354
+IMSQ4bqjbmc= 134355
+44Gj44Gm44GN44Gf 134356
+IOC4i+C4tuC5iOC4h+C5gOC4m+C5h+C4mQ== 134357
+IHThuqM= 134358
+IG1vxbxsaXdvxZvEhw== 134359
+IFPhuqNu 134360
+IMSwa2k= 134361
+IGPhuq90 134362
+2LPYo9mE 134363
+IGJha8SxbQ== 134364
+2LTYqA== 134365
+4LiV4Li14LmJ 134366
+4Lie4Lii4Liy4Lii 134367
+4Lie4Lii4Liy4Lii4Liy4Lih 134368
+4Liq4Lix4Lib 134369
+4Liq4Lix4Lib4LiU4Liy 134370
+4Liq4Lix4Lib4LiU4Liy4Lir4LmM 134371
+67CA 134372
+0LXRgNGL 134373
+IGPDoW5o 134374
+IHRodeG6vw== 134375
+2KrYqNi5 134376
+44Gr5YWl44KM 134377
+0Y7RgdGM 134378
+7ZqM7J2Y 134379
+57Ch5Y0= 134380
+57Ch5Y2Y 134381
+57Ch5Y2Y44Gr 134382
+IHRyw7pj 134383
+INin2YTZg9mI2Yo= 134384
+INin2YTZg9mI2YrYqg== 134385
+44KP44GR44Gn44GZ 134386
+INGB0LLQvtCx 134387
+INGB0LLQvtCx0L7QtA== 134388
+INGD0YfQsNGB0YLQvdC40Lo= 134389
+4Liq4Li04LmJ4LiZ 134390
+INC/0YDQvtGE0LXRgdGB0LjQvtC90LA= 134391
+INC/0YDQvtGE0LXRgdGB0LjQvtC90LDQu9GM0L0= 134392
+0YHQv9C+0YA= 134393
+15fXldeR15Q= 134394
+2YXYudmG2Yk= 134395
+INin2YTZgdiq2LHYqQ== 134396
+4Liq4Li54LiH4Liq4Li44LiU 134397
+44KP44Ga 134398
+IMSRw6g= 134399
+IMSRw6hu 134400
+5q+U44G5 134401
+4Liy4LiY4Li0 134402
+IG1vxbxlbXk= 134403
+4LmB4LiL 134404
+4LiI4Liw4LmE4Lih4LmI 134405
+IHPhuq9w 134406
+0JrQng== 134407
+IHByw6FjdGljYQ== 134408
+2YjZg9in2YTYqQ== 134409
+6L6844KT44Gn 134410
+b2zDs2dpY2E= 134411
+INC10Yk= 134412
+INC10YnRkQ== 134413
+2KrYudiv2YrZhA== 134414
+INij2YPYrw== 134415
+INem16jXmdeb 134416
+INem16jXmdeb15nXnQ== 134417
+2KvZhQ== 134418
+INC60YDRgw== 134419
+INC60YDRg9C/ 134420
+15HXmden15XXqNeq 134421
+IOyhsOq4iA== 134422
+44Go44GN44Gv 134423
+IGLhuqFj 134424
+INGA0LDRgdC/0L7Quw== 134425
+INGA0LDRgdC/0L7Qu9C+0LY= 134426
+INGA0LDRgdC/0L7Qu9C+0LbQtdC9 134427
+2LLZitmG 134428
+INCa0YDQvtC80LU= 134429
+INin2YTZhti42LE= 134430
+15TXldeT 134431
+INin2YTYs9io2Ko= 134432
+44Go5oCd44GE 134433
+IHBhxYRzdA== 134434
+IHBhxYRzdHc= 134435
+INmE2YrYs9iq 134436
+INCx0YPQtNGD 134437
+4LiX4Lix4LiZ4LiX4Li1 134438
+4Lij4Liy4Lih 134439
+2K3YtdmI2YQ= 134440
+44GX44Gm44GP44KM44KL 134441
+INin2YTYpdiz2LHYp9im2YrZhA== 134442
+INin2YTYpdiz2LHYp9im2YrZhNmK 134443
+44GT44KM44G+44Gn 134444
+7IKs66W8 134445
+IHPDvHLDvA== 134446
+4LmA4Lin4Lit4Lij4LmM 134447
+4LmA4LiL4Lit4Lij4LmM 134448
+IHV0aWxpc8Op 134449
+INGB0LjRgdGC0LXQvNCw 134450
+IGR3w7M= 134451
+IGR3w7NjaA== 134452
+IHByw7Nwcmlv 134453
+IOuTseydhA== 134454
+YXJyw6p0 134455
+INCn0LA= 134456
+15DXnteg15XXqg== 134457
+2LnYp9ix2LY= 134458
+4LmA4LiB4Lih4Liq4LmM 134459
+INec15TXkdeZ158= 134460
+INec15HXlw== 134461
+INec15HXl9eV16g= 134462
+4Liq4Liy4LiC4Liy 134463
+INCc0L7RgdC60LLQtQ== 134464
+2KjYudiv 134465
+INin2YTZgtix2KfYsQ== 134466
+IMSQ4buLYQ== 134467
+INeX15I= 134468
+2YHYqtix 134469
+2YjZhtip 134470
+INeU15bXkNeq 134471
+5biC44Gu 134472
+44G744GX44GE 134473
+INeR16LXmdeo 134474
+INGC0LXQv9C10YDRjA== 134475
+7Iq164uI6rmM 134476
+4LmE4Lih4LmI4Lin 134477
+4LmE4Lih4LmI4Lin4LmI4Liy 134478
+4LmE4Lih4LmI4Lin4LmI4Liy4LiI4Liw 134479
+157XkNeU 134480
+5oOF5aCx 134481
+5oOF5aCx44KS 134482
+2LrZhg== 134483
+INC/0L7Rjw== 134484
+INC/0L7Rj9Cy0Lg= 134485
+6YGO44GU 134486
+2KrYtNi6 134487
+2KrYtNi62YrZhA== 134488
+0LLQtdC7 134489
+INeX154= 134490
+44Go44Gq44KK44G+44GZ 134491
+IHJhxJ8= 134492
+IHJhxJ9tZW4= 134493
+44GL44Gp44GG 134494
+44GL44Gp44GG44GL 134495
+0LXQvdC60L4= 134496
+7KeA6rOg 134497
+INeQ15zXmdeU 134498
+INij2YQ= 134499
+4LiI4Liz4Lir4LiZ 134500
+4LiI4Liz4Lir4LiZ4LmI4Liy4Lii 134501
+bsSxesSx 134502
+INec16fXl9eq 134503
+2KPZh9mF 134504
+2KPZh9mF2YrYqQ== 134505
+2KrYutmK2LE= 134506
+16nXl9eo 134507
+16HXldek16g= 134508
+15PXmdeo 134509
+6Imv44GL44Gj44Gf 134510
+157XnNeX157XlA== 134511
+0YHRgtCy0LjQtQ== 134512
+0YLRgNCw0YI= 134513
+INin2YTYo9iu 134514
+INin2YTYo9iu2YrYsdip 134515
+INin2YTYrdi12YjZhA== 134516
+IGNyw6lkaXRv 134517
+16bXmdei 134518
+44Os44OZ44Or 134519
+2KjYsdmK 134520
+65CQ 134521
+44Gg44Gj44Gm 134522
+IHJlYWx0w6A= 134523
+2LPZgdix 134524
+15XXoNeV 134525
+15LXldeT 134526
+15LXldeT15w= 134527
+4Liu4Liy 134528
+44GX44Gm44GK44KK44G+44GZ 134529
+IGfDoA== 134530
+INec15HXptei 134531
+5byV6LaK44GX 134532
+INee15nXnNeZ 134533
+INee15nXnNeZ15XXnw== 134534
+2YXYr9ix 134535
+2YXYr9ix2LPYqQ== 134536
+16TXldeY 134537
+4LiZ4LmJ4Liz4Lih4Lix4LiZ 134538
+64Gd 134539
+2LnZg9iz 134540
+INmC2LY= 134541
+INGA0YvQsQ== 134542
+2K7Yt9i3 134543
+157Xldeh15M= 134544
+INeb15zXnNeZ 134545
+INC60L7RgtC+0YDQvtC1 134546
+16bXmdeV158= 134547
+INC80LXRgdGC0LA= 134548
+44GL44Gk 134549
+0LPRgNGD0L/Qvw== 134550
+15zXmdec 134551
+16rXldeQ16g= 134552
+67O17KeA 134553
+4LmB4Lic4LmI4LiZ 134554
+INeR16LXqg== 134555
+5pmC6ZaT44KS 134556
+77yj 134557
+44Go44GE44GG44GT44Go44Gn 134558
+INec15TXpw== 134559
+INec15bXlA== 134560
+IOyggOuKlA== 134561
+INin2YTYpdix2YfYp9io 134562
+IOyeiOuKlOuNsA== 134563
+INGC0L7Qs9C00LA= 134564
+INeU16bXmQ== 134565
+15XXnNeY 134566
+INeo16TXldeQ15k= 134567
+44GT44Go44Gn44GZ 134568
+IMSRw61jaA== 134569
+2K3Zitin 134570
+INeU157XqdeX16c= 134571
+44Gc44Gy 134572
+INee15DXpNep16g= 134573
+44G/44G+44GX44Gf 134574
+INin2YTYo9mF2YrYsdmD2Yo= 134575
+2YXYrNiq2YXYuQ== 134576
+INiz2KfYqA== 134577
+INiz2KfYqNmC 134578
+15vXmdec 134579
+4bq+ 134580
+44Oq44K544OI 134581
+IOyD 134582
+IOyDiA== 134583
+IOyDiOuhnA== 134584
+IOyDiOuhnOyatA== 134585
+IEThu4tjaA== 134586
+4LmA4Lir4Lih4Liy4Liw4Liq4Lih 134587
+INin2YTZhtio2Yo= 134588
+15zXnA== 134589
+2YbYuQ== 134590
+0JPQu9Cw0LI= 134591
+0JPQu9Cw0LLQvdCw0Y8= 134592
+2YXYsdi2 134593
+INeV15M= 134594
+2KrZgtmK 134595
+2KrZgtmK2YrZhQ== 134596
+IGLhuqNuZw== 134597
+INmB2YLYp9mE 134598
+16LXnteZ 134599
+0LTRgNCw 134600
+IHN14buRdA== 134601
+2LPYsdi52Kk= 134602
+IGPhu60= 134603
+INeU15nXl9eZ15M= 134604
+2LPYudmK2K8= 134605
+4Lit4Liy4LiK4Li14Lie 134606
+INiz2YjYp9ih 134607
+44K944OV44OI 134608
+INC70LjRh9C90L4= 134609
+INCa0L7RgA== 134610
+2KfZh9iq2YU= 134611
+2KfZh9iq2YXYp9mF 134612
+4Lit4LiU4Li1 134613
+4Lit4LiU4Li14LiV 134614
+44GQ44KJ44GE 134615
+IGlodGl5YQ== 134616
+IGlodGl5YcOn 134617
+44G+44Gn44Gu 134618
+7Iuc7Iqk 134619
+7Iuc7Iqk7YWc 134620
+0YDRg9GI 134621
+44KE44Gj44Gx 134622
+44KE44Gj44Gx44KK 134623
+0LrQtdGA 134624
+IMW8eQ== 134625
+IMW8eXc= 134626
+0LrQu9C+0L0= 134627
+IGzGsOG7o3Q= 134628
+w74= 134629
+0LTQsNGH0Lg= 134630
+dMO8cms= 134631
+2LrZiA== 134632
+INC40LPRgNC+0Lo= 134633
+IHBow6o= 134634
+INep16LXnA== 134635
+INin2YTZhdiv2YbZig== 134636
+IOyXrOufrOu2hA== 134637
+16LXqNeZ150= 134638
+0YXQvtC00Y/Rgg== 134639
+IHjhu6k= 134640
+0JfQsA== 134641
+INmB2LHYtQ== 134642
+4LiI4Liw4LiX4Liz4LmD4Lir4LmJ 134643
+7YG0 134644
+16LXkdeV16g= 134645
+4LmA4Lir4Lil4LmI4Liy4LiZ4Li14LmJ 134646
+6ICD44GI44KL 134647
+0YDQtdGB0YI= 134648
+0L3QvdGL0Lk= 134649
+IGPhuqdt 134650
+2K/Yp9iu2YQ= 134651
+INmF2YTZitin2LE= 134652
+INCQ0Ls= 134653
+INCy0YDQtdC80LXQvQ== 134654
+4LiK4LmI4Lin4Lii4LmD4Lir4LmJ 134655
+16jXmdeV16o= 134656
+65Ov 134657
+6aOy44G/ 134658
+16DXnA== 134659
+16nXqtej 134660
+INin2YTYs9i52YjYr9mK 134661
+dcOf 134662
+7J24642w 134663
+IOydvOuwmA== 134664
+xYLEmQ== 134665
+IG3hu5Fp 134666
+157Xmdeg 134667
+INin2YTYo9i32YHYp9mE 134668
+IMOnxLFrYW4= 134669
+w6ljb2xl 134670
+16fXmdep 134671
+16fXmdep15XXqA== 134672
+INC+0YHRg9GJ0LXRgdGC0LI= 134673
+INC+0YHRg9GJ0LXRgdGC0LLQu9GP 134674
+15HXkNeo 134675
+4LmE4Lib4LiU4LmJ4Lin4Lii 134676
+INei15XXnNeU 134677
+4LiB4LmH4LmE4Lih4LmI 134678
+44Oi44OH 134679
+44Oi44OH44Or 134680
+2KrYrdmI2YQ= 134681
+INC+0LTQvdC+0LPQvg== 134682
+16rXl9eZ15zXqg== 134683
+INiq2K4= 134684
+IGNoY2lh 134685
+IGNoY2lhxYI= 134686
+44OQ44Oz 134687
+6ICF44Gv 134688
+INmF2K3ZhA== 134689
+0YHQu9C+0LY= 134690
+0YHQu9C+0LbQvQ== 134691
+IHTEmQ== 134692
+IMOnxLFrdA== 134693
+IMOnxLFrdMSx 134694
+IEPGoQ== 134695
+4LmE4LiU4LmJ4LmA4Lil4Lii 134696
+xLFya2Vu 134697
+4LmA4LiC4LmJ4Liy4Liq4Li54LmI 134698
+2YXYrdmD 134699
+2YXYrdmD2YXYqQ== 134700
+4LiE4Li44LmJ4Lih 134701
+4LiZ4LmI4Liy4LiI4Liw 134702
+0LvRjtC0 134703
+0LTQtdGB0Y8= 134704
+0LTQtdGB0Y/Rgg== 134705
+INC70Y7QsdC+0Lk= 134706
+2KrYrdix2YrYsQ== 134707
+16bXoteT 134708
+INC10ZE= 134709
+INin2YTYrdmD2YU= 134710
+INi12KjYp9it 134711
+4LmA4Lia4Lit4Lij4LmM 134712
+IHLDs8W8bnljaA== 134713
+0LPQuNCx 134714
+INGB0L7Rgg== 134715
+INGB0L7RgtGA0YPQtA== 134716
+INGB0L7RgtGA0YPQtNC90LjQug== 134717
+INC+0LHRitC10Lw= 134718
+16TXmNeo 134719
+44GZ44GU44GP 134720
+44Gr6Zai44GX44Gm 134721
+0LLQvtC7 134722
+2KvZhdin2YY= 134723
+IGThuqdu 134724
+5oqc 134725
+5oqc44GR 134726
+INei16k= 134727
+INei16nXldeZ 134728
+16HXldef 134729
+44Gq44Gu44Gn44GZ 134730
+44Gv44Gp44GG 134731
+157Xoteo15E= 134732
+77yw 134733
+2YXYtdix 134734
+2YXZhtin2LPYqA== 134735
+2YXZhtin2LPYqNip 134736
+5LiK44Gu 134737
+15DXmdep15XXqA== 134738
+IOyEpOy5mA== 134739
+157Xk9eZ16DXldeq 134740
+157XqNeq 134741
+44KL44Gu44GM 134742
+2K/Zjg== 134743
+INin2YTYtNix2YPYp9iq 134744
+7Iuc6rCE 134745
+INGA0LXRiNC10L3QuNC1 134746
+44GZ44KL44Gu44Gv 134747
+IOyekOyLoOydmA== 134748
+15zXnteV 134749
+44Go44GT44KN44Gn 134750
+INen16bXqA== 134751
+IG3Do2k= 134752
+IGvDvGx0w7xy 134753
+44Op44Kk44OW 134754
+4Lic4Li54LmJ4Lir4LiN4Li04LiH 134755
+5pmC6ZaT44GM 134756
+0LrQu9GO0YfQuA== 134757
+ZGnEn2luaXo= 134758
+4Lih4Liy4LiB4LmG 134759
+2KrYrdmF2YQ= 134760
+IGjhuqF0 134761
+44Km44Kj 134762
+0L/Qu9C1 134763
+157XnNeQ 134764
+xYLDsw== 134765
+IGfhu5Fj 134766
+INeQ15XXk9eV16o= 134767
+4Lir4Lin4Liy4LiZ 134768
+INin2YTZiNiy 134769
+INin2YTZiNiy2LHYp9ih 134770
+65Ok6rO8 134771
+INi12K0= 134772
+INi12K3ZitmB2Kk= 134773
+INC80Lw= 134774
+2KrYr9iu2YQ= 134775
+IHBlcnPDtm5saWNo 134776
+INiy2Yo= 134777
+INiy2YrYp9iv2Kk= 134778
+44K344Ki 134779
+IG5n4bqvbg== 134780
+4LiE4Lil4Li04LiB 134781
+IHPDtG5n 134782
+IHTDvGtldA== 134783
+0Y3RhNGE 134784
+0Y3RhNGE0LXQutGC 134785
+16nXmdeR 134786
+INin2LnYqg== 134787
+2KrYtg== 134788
+2KrYttmF2YY= 134789
+INin2YTZhdi02LHZiNi5 134790
+IHByb2R1w6fDo28= 134791
+INC/0YDQuNC80LXQvdGP 134792
+0L3QuNGG0Ys= 134793
+7KO864qU 134794
+2LHZjw== 134795
+IG3GoQ== 134796
+IGhheWF0xLE= 134797
+65+9 134798
+IMO8Y3JldA== 134799
+IHlhbsSxbmRh 134800
+IHByw6F0aWNh 134801
+15HXmden15XXqA== 134802
+w5xO 134803
+0YHQvtGC 134804
+44KP44GR44Gn 134805
+INC00L7Qu9Cz0L4= 134806
+16rXm9eV 134807
+IOyVhOuLjA== 134808
+642w7J20 134809
+IMOnaXo= 134810
+IGNob8SH 134811
+INeU15nXqg== 134812
+INeU15nXqteo 134813
+IHNvw6F0 134814
+15vXkdeT 134815
+4LmA4Lil4LmI4Liy 134816
+INC00LXRgA== 134817
+INC00LXRgNC10LI= 134818
+44KS5YWl44KM 134819
+15fXldeh 134820
+15fXldeh16g= 134821
+2KzZitmG 134822
+dMOzbg== 134823
+b25uw6k= 134824
+INC/0L7Qu9C90L7RgdGC0YzRjg== 134825
+5Lq644Gf44Gh 134826
+IHByw6p0 134827
+66C4 134828
+IGTDqWNlbWJyZQ== 134829
+Y8SxbGFy 134830
+INeq16o= 134831
+IOqyveyasOyXkOuKlA== 134832
+2YjYudiv 134833
+6KaL44KL 134834
+4Lin4Li04LiI4Lix4Lii 134835
+67aI 134836
+2LLZiNin 134837
+2LLZiNin2Kw= 134838
+ZMOs 134839
+44Gn44GZ44KI 134840
+INCy0L7QtNC+ 134841
+INmK2YjYrNiv 134842
+0YHQvtGB0YLQvtGP 134843
+0J7QoQ== 134844
+IMSQw7M= 134845
+15fXpNep 134846
+INem15nXkdeV16g= 134847
+INin2YTZgti3 134848
+INin2YTZgti32KfYuQ== 134849
+INC40LzQtdGO0YI= 134850
+IHBo4bqtbg== 134851
+15vXodek15k= 134852
+0L/QvtC70L3QuNGC0LXQu9GM 134853
+6ZmQ44KK 134854
+INGB0YDQsNCy 134855
+INGB0YDQsNCy0L0= 134856
+2YXYp9mE2YM= 134857
+15PXqNeV150= 134858
+55qG44GV44KT 134859
+2K3ZgtmC 134860
+4LmB4Lir4Lil4LmI4LiH 134861
+INin2YTYsdiz2YXZig== 134862
+0L7Rh9C60Lg= 134863
+15jXkdeX 134864
+IGNhbmzEsQ== 134865
+INec15w= 134866
+INec15zXnteV15M= 134867
+157XkdeV 134868
+16rXmw== 134869
+16rXm9eg15nXqg== 134870
+INin2YTZhdi02KfYsQ== 134871
+INin2YTZhdi02KfYsdmD2Kk= 134872
+xLDFng== 134873
+INiz2YrYp9iz2Yo= 134874
+0LLQvtC70Yw= 134875
+INGB0L/RgNCw0LI= 134876
+5p2l44Gm 134877
+16TXldeo15XXnQ== 134878
+4Liq4Liz4LmA4Lij4LmH 134879
+4Liq4Liz4LmA4Lij4LmH4LiI 134880
+IMWfw7Z5bGU= 134881
+IHpvc3RhxYJh 134882
+IEjDvA== 134883
+16jXldep 134884
+2K/ZhNmK2YQ= 134885
+0YDQuNC0 134886
+16nXnw== 134887
+157Xp9eV16g= 134888
+INGD0Yc= 134889
+INGD0YfQtdCx 134890
+INGN0YLQsA== 134891
+0LrQvtCy0LA= 134892
+4LiV4LiZ4LmA4Lit4LiH 134893
+2YbZkA== 134894
+4Lit4Li14LiB4LiE4Lij4Lix4LmJ4LiH 134895
+4Lij4Liw4Lia4Li4 134896
+IGThu68= 134897
+INin2YTYrdin2YTZig== 134898
+15vXldeb 134899
+15vXldeb15E= 134900
+INee15DXqdeo 134901
+IHRy4bul 134902
+0YLQtdC70LXQvA== 134903
+INCy0LvQuA== 134904
+INCy0LvQuNGP 134905
+INep15DXqted 134906
+IHV3YWc= 134907
+IHV3YWfEmQ== 134908
+15jXmdeq 134909
+15DXk9ed 134910
+4LiU4Li4 134911
+INeU15DXnNeU 134912
+IGthcsSxxZ8= 134913
+IMSQ4buRaQ== 134914
+0LTQsNGO0YI= 134915
+44Gq44Gu44Gr 134916
+xIVjeWNo 134917
+4LmA4LiZ4LmJ4LiZ 134918
+44GX44Gm44GX44G+44GG 134919
+aW50w6lyaWV1cg== 134920
+IGbDrXNpY2E= 134921
+INCf0L7Quw== 134922
+44GX44GV 134923
+4LiX4Liz4LmE4Lih 134924
+IEzDom0= 134925
+INin2YTZhdiz2YTZhQ== 134926
+INin2YTZhdiz2YTZhdmK2YY= 134927
+2LXYrdip 134928
+7JeE 134929
+4LmA4LiU4LmH4LiU 134930
+INGD0YfQtdGC 134931
+w6LMgQ== 134932
+INio2YTYpw== 134933
+INin2YTYp9is2KrZhdin2LnZig== 134934
+16TXqNeh150= 134935
+44OV44Op 134936
+INCa0L7Qs9C00LA= 134937
+bWllxZtjaQ== 134938
+INio2YrZhtmF2Kc= 134939
+INee15DXnteo15nXnQ== 134940
+INeR15DXlteV16g= 134941
+15XXqdeZ150= 134942
+INGB0LTQtdC70LA= 134943
+ZW50csOpZQ== 134944
+4LmA4LiE4LmJ4Liy 134945
+0YPQs9C7 134946
+INin2YTZgdmG2Yo= 134947
+INCS0L7Rgg== 134948
+4LiX4Li14LmI4Lih4Liy 134949
+15XXpteS 134950
+2YLYr9ix2Kk= 134951
+IOuqqQ== 134952
+IOuqqeyggQ== 134953
+7Y+J6rCA 134954
+INin2YTYo9ix2KjYuQ== 134955
+INin2YTYo9ix2KjYudin2KE= 134956
+16TXodeZ16c= 134957
+INGP0LLQu9GP0Y7RgtGB0Y8= 134958
+2KjZiNmG 134959
+7LC+ 134960
+157Xoteo15s= 134961
+157Xoteo15vXldeq 134962
+44K344Kn 134963
+INio2KfZhNij 134964
+7ZaI642Y 134965
+INin2YTYqNix2YbYp9mF2Kw= 134966
+INin2YTYo9it2K8= 134967
+IG3FqQ== 134968
+IG3FqWk= 134969
+0L/QsNGC 134970
+2KjYqw== 134971
+INGG0LXQvdGL 134972
+INeR16rXnA== 134973
+6KiA44KP44KM 134974
+INin2YTZhdis2KfZhA== 134975
+IOyEuOyDgQ== 134976
+INeS15XXpA== 134977
+INC90LDRiNC10Lk= 134978
+INC60L7QvNC/0LDQvdC40Y8= 134979
+0LHQuNC9 134980
+w7Zsw7w= 134981
+15nXmdeY 134982
+INee16HXpNeZ16c= 134983
+4Lii4Lix4LiH4LiE4LiH 134984
+INCn0Lg= 134985
+INCw0L3RgtC4 134986
+INGB0YDQtdC00Lg= 134987
+4Liq4LmI4Lin4LiZ4LmD4Lir4LiN4LmI 134988
+0L7Rh9C60LA= 134989
+7Yq567OE 134990
+4Lin4LmI4Liy4LiH 134991
+0LPQvtGA0L7QtA== 134992
+2KjYp9mD 134993
+4LmA4Liq4Li14LmI4Lii 134994
+4LmA4Liq4Li14LmI4Lii4LiH 134995
+44KC44KJ44GE 134996
+16fXlded 134997
+44Gb44Ga 134998
+INin2YTZgtin2YfYsdip 134999
+INeR15vXmg== 135000
+2YXYtNin2LHZiti5 135001
+2KjYp9it2Ks= 135002
+INC/0L7Rhw== 135003
+INC/0L7Rh9GC0Lg= 135004
+INGE0L7RgNC80LA= 135005
+U8Sw 135006
+INee16bXmdei 135007
+4Lil4Li3 135008
+4Lil4Li34Lih 135009
+INGC0LXRgA== 135010
+INGC0LXRgNGA0LjRgtC+0YA= 135011
+INGC0LXRgNGA0LjRgtC+0YDQuNC4 135012
+INCy0LzQtdGB0YI= 135013
+INCy0LzQtdGB0YLQtQ== 135014
+ZMSxa2xhcsSx 135015
+b3DDqXJhdGlvbg== 135016
+4LmC4Lir 135017
+2LXYr9mK 135018
+2LXYr9mK2YI= 135019
+7ZaJ7KCV 135020
+2KrYrNin 135021
+2KrYrNin2YjYsg== 135022
+IHN1w6c= 135023
+IGFydHk= 135024
+IGFydHlrdQ== 135025
+IGFydHlrdcWC 135026
+44K344On44OD44OX 135027
+16nXpA== 135028
+16nXpNeZ16I= 135029
+INeU16nXmdeo15XXqg== 135030
+4LmB4LiW4Lih 135031
+67iU 135032
+IHVrxYJhZA== 135033
+INeV15vXmQ== 135034
+4Lir4Lil4Liy4LiB 135035
+4Lir4Lil4Liy4LiB4Lir4Lil4Liy4Lii 135036
+5pa544KC 135037
+IHBvZHLDs8W8 135038
+IEXEn2Vy 135039
+INC60L7QvNC90LDRgg== 135040
+INGB0LDQvNGL0YU= 135041
+INCy0LrRg9GB 135042
+0LHQtdC2 135043
+INeR16fXlQ== 135044
+5o6b44GR 135045
+44G/44KL44Go 135046
+IGlsacWfa2lu 135047
+INmK2LnZhdmE 135048
+INC/0L7QtNCw0YA= 135049
+IHlhesSxbMSx 135050
+44KS5b6X 135051
+IHd5c3TEmXA= 135052
+4LiX4Li14LmI4LmD4LiK4LmJ 135053
+2K3Yp9iv2Ks= 135054
+2YjZitiv 135055
+0LrRg9C70YzRgg== 135056
+0LrRg9C70YzRgtGD0YA= 135057
+4LiB4Liy4Lij4LmB4LiC4LmI4LiH 135058
+4LiB4Liy4Lij4LmB4LiC4LmI4LiH4LiC 135059
+4LiB4Liy4Lij4LmB4LiC4LmI4LiH4LiC4Lix4LiZ 135060
+2YXZiNi4 135061
+2YXZiNi42YE= 135062
+2YrZhdmK 135063
+44KT44Gn44GZ44GM 135064
+ZGnEn2lt 135065
+ZGnEn2ltaXo= 135066
+INCf0LXRgA== 135067
+INCf0LXRgNCy 135068
+IG3Do28= 135069
+INGB0LXQtw== 135070
+INGB0LXQt9C+0L0= 135071
+INeU157Xog== 135072
+2YXYrNmF2YjYudip 135073
+INC40L3RhNC+0YDQvNCw0YbQuNC4 135074
+aeG6v2M= 135075
+w6NuZw== 135076
+IMSR4bqleQ== 135077
+44GU57Q= 135078
+44GU57S5 135079
+44GU57S55LuL 135080
+IGFkxLFt 135081
+4LmE4Lir4Lil 135082
+INC/0YDQsNC60YLQuA== 135083
+INC/0YDQsNC60YLQuNGH 135084
+INC/0YDQsNC60YLQuNGH0LXRgQ== 135085
+INC/0YDQsNC60YLQuNGH0LXRgdC60Lg= 135086
+INin2YTZhtmB2LM= 135087
+INGA0LDQsdC+0YLQtQ== 135088
+2YTZitmB 135089
+INin2YTYrNmG2YjYqA== 135090
+INCy0L7QtNGL 135091
+7LmZ 135092
+INC80LjRgNCw 135093
+IMSR4burbmc= 135094
+INC/0YDQvtGC0LjQstC+ 135095
+INGB0YLRgNCw0L3Riw== 135096
+4Lil4Li5 135097
+7IK2 135098
+a3JlxZts 135099
+IGJ1bHVuZA== 135100
+IGJ1bHVuZHXEn3U= 135101
+4LmB4Liq4LiZ 135102
+44Kx44Ki 135103
+16rXl9eV157XmQ== 135104
+16jXm9eU 135105
+INec16fXldeX 135106
+INec16fXldeX15XXqg== 135107
+INeb16rXldeR16o= 135108
+INmE2YPZhQ== 135109
+2KjYtNix 135110
+IHLDoG5n 135111
+INee15TXng== 135112
+INeQ15fXqNeV16o= 135113
+INCx0L7QvQ== 135114
+INCx0L7QvdGD0YE= 135115
+772X 135116
+4LmB4Lii4LiB 135117
+44GC44Gq44Gf44Gu 135118
+INGD0YfQsNGB0YLQuNC1 135119
+IEV5bA== 135120
+IEV5bMO8bA== 135121
+IMOnYWzEscWfbWFsYXLEsQ== 135122
+2K7Yt9ix 135123
+7J29 135124
+4LiB4Liy4Lij4LmD4LiK4LmJ4LiH4Liy4LiZ 135125
+INCw0L3QsNC70LjQtw== 135126
+16rXp9eR15w= 135127
+0L3QuNC10Lw= 135128
+IMSwbnM= 135129
+IMSwbnNhbg== 135130
+INio2YjYp9iz 135131
+INio2YjYp9iz2LfYqQ== 135132
+INeg15vXoNeh 135133
+INeU157XmdeT16I= 135134
+IMOnbw== 135135
+IMOnb8SfdQ== 135136
+4buY 135137
+IOq1reuvvA== 135138
+44KC44GE44GE 135139
+INeb15zXmQ== 135140
+INGB0YDQtdC00L3QtQ== 135141
+Z8WCbw== 135142
+Z8WCb8Wb 135143
+IG5lZ8Oz 135144
+IG5lZ8OzY2lv 135145
+INGA0LXQs9C40YHRgg== 135146
+INGA0LXQs9C40YHRgtGA0LA= 135147
+INGA0LXQs9C40YHRgtGA0LDRhtC40Lg= 135148
+IHRy4buTbmc= 135149
+INC/0YDRjw== 135150
+INC/0YDRj9C80L4= 135151
+66CI7J20 135152
+IGvDqW0= 135153
+0LrQu9C1 135154
+4LiZ4Liz4Lih4Liy 135155
+INGE0LjQvQ== 135156
+INGE0LjQvdCw0L3RgQ== 135157
+INGE0LjQvdCw0L3RgdC+0LI= 135158
+IGtp4buHbQ== 135159
+4Lii4Lix4LiH4LmE 135160
+4Lii4Lix4LiH4LmE4LiH 135161
+4Lii4Li04LiH 135162
+4LmC4Lib 135163
+INC/0L7Qu9GD0YfQuNC7 135164
+15nXlted 135165
+4LmB4Lil4Liw4LiE4Lin4Liy4Lih 135166
+INCy0L7QvtCx0YnQtQ== 135167
+2LXZitix 135168
+44OP44Oz 135169
+INin2YTZgtin2K8= 135170
+INin2YTZgtin2K/ZhQ== 135171
+INio2K/ZiNmG 135172
+2LnYuNmF 135173
+16rXoNeV16I= 135174
+16rXoNeV16LXlA== 135175
+2KPZhdmE 135176
+44GV44GI 135177
+0YLQtdC8 135178
+0YLQtdC80L/QtdGA 135179
+0YLQtdC80L/QtdGA0LDRgtGD0YA= 135180
+INec15nXpteV16g= 135181
+IHLEmWs= 135182
+2LHYs9mE 135183
+7J6Q66W8 135184
+INeZ16bXmdeo16o= 135185
+2YbYqNmK 135186
+0YfQvdCw0Y8= 135187
+2KrYrdmE2YrZhA== 135188
+INC80LjQug== 135189
+INC80LjQutGA0L4= 135190
+IFPDtno= 135191
+IGZvcsOnYQ== 135192
+0YHQvtC9 135193
+INin2YTYudix2Kc= 135194
+INin2YTYudix2KfZgtmK 135195
+IEjhu5NuZw== 135196
+44GZ44KL44Gf44KB44Gr 135197
+4LiX4Li14LmI4Lit4Lii4Li54LmI 135198
+INeV15DXow== 135199
+2LXZitiv 135200
+IOyViuqzoA== 135201
+4Lij4Lix4LiH 135202
+INin2YTYqtmI2KfYtdmE 135203
+4LmA4Lih4LiV4Lij 135204
+0YPRgdGC0YDQvtC5 135205
+0YPRgdGC0YDQvtC50YHRgtCy 135206
+bcSxeW9y 135207
+INio2KfYs9mF 135208
+INeV15vXlQ== 135209
+IEfDvGw= 135210
+4buQ 135211
+w4l0YXQ= 135212
+2LrYp9mE 135213
+2KXZhti0 135214
+2KXZhti02KfYoQ== 135215
+VMSw 135216
+4LiC4LmJ4Liy4Lih 135217
+IHRyb2No 135218
+IHRyb2NoxJk= 135219
+2KXYtQ== 135220
+2KXYtdin2KjYqQ== 135221
+INir2KfZhtmK 135222
+INin2YTYtdit2Kk= 135223
+INeW15TXlQ== 135224
+asSFY2Vq 135225
+44OA44Oz 135226
+7J247J20 135227
+INCy0L7Qu9C+0YE= 135228
+65CY66m0 135229
+IHpha8WCYWQ= 135230
+44GZ44GT44Go 135231
+5Lul5LiK44Gu 135232
+INeU157Xp9eV150= 135233
+2YXYtNin2Yc= 135234
+2YXYtNin2YfYr9ip 135235
+0YfQuNCy 135236
+2KjYtA== 135237
+4Lii4LmJ4Liy4Lii 135238
+IHPDvHJkw7xy 135239
+IE7hurU= 135240
+IE7hurVuZw== 135241
+INC40LPRgNCw0YLRjA== 135242
+IOq3uOufrOuptA== 135243
+44OV44Or 135244
+4Lil4LmI4Liw 135245
+IHRlbmRyw6E= 135246
+IGLDoHk= 135247
+4LmA4Lib4LmH4LiZ4Lic4Li54LmJ 135248
+IG9rbw== 135249
+IG9rb8WCbw== 135250
+d8WCYQ== 135251
+d8WCYcWbY2k= 135252
+d8WCYcWbY2l3 135253
+5oCd44KP 135254
+IFlhxZ8= 135255
+IELhu4duaA== 135256
+7Y+t 135257
+2KjZitiv 135258
+16fXqNef 135259
+4LmA4Lio4Lij 135260
+4LmA4Lio4Lij4Lip 135261
+4LmA4Lio4Lij4Lip4LiQ 135262
+4LmA4Lio4Lij4Lip4LiQ4LiB4Li04LiI 135263
+INin2YTYo9mI2LHZiA== 135264
+INin2YTYo9mI2LHZiNio2Yo= 135265
+ZmzDpGNoZQ== 135266
+5LmX44KK 135267
+IGLhu4Fu 135268
+2YfYqA== 135269
+5pyA44KC 135270
+IHNhw6c= 135271
+4Lit4Liz4LmA4Lig 135272
+4Lit4Liz4LmA4Lig4Lit 135273
+INij2Kw= 135274
+INin2YTYr9in2K7ZhA== 135275
+INin2YTYr9in2K7ZhNmK2Kk= 135276
+15jXldeR 135277
+44KC44Gq44GP 135278
+INC70LjRhtCw 135279
+4LmB4Lil4LmJ4Lin4LiB4LmH 135280
+15bXm9eZ16g= 135281
+IHF1w6A= 135282
+INmD2LDZhNmD 135283
+2LXYrdmB 135284
+IMOCdQ== 135285
+2YjYqNin 135286
+4LmA4Lib4Lil4Li14LmI4Lii4LiZ4LmB4Lib4Lil 135287
+4LmA4Lib4Lil4Li14LmI4Lii4LiZ4LmB4Lib4Lil4LiH 135288
+4LiV4Lix4Lin4Lit4Lii4LmI4Liy4LiH 135289
+IHLDoXBpZGE= 135290
+IHRhc2Fy 135291
+IHRhc2FyxLFt 135292
+INi52YTZitmH2YU= 135293
+16HXldec 135294
+Y8SxbMSx 135295
+Y8SxbMSxaw== 135296
+INix2LrZhQ== 135297
+7Iuc7YKk 135298
+INeQ15zXpw== 135299
+INeQ15zXp9eY16g= 135300
+INeQ15zXp9eY16jXldeg15k= 135301
+4LmB4Lia4LmI4LiH 135302
+IGjhuqFuZw== 135303
+44Gj44Gm44GP44KM 135304
+INmG2KrZig== 135305
+INmG2KrZitis2Kk= 135306
+xLFrbMSx 135307
+2LrYp9mG 135308
+4LiC4LmJ4Lit4LiE4Lin4Liy4Lih 135309
+4Lib4Lil4Liy4Lii 135310
+INij2YXYsw== 135311
+4LiX4Li14LmI4LmA4LiB4Li14LmI4Lii4Lin 135312
+4LiX4Li14LmI4LmA4LiB4Li14LmI4Lii4Lin4LiC 135313
+4LiX4Li14LmI4LmA4LiB4Li14LmI4Lii4Lin4LiC4LmJ4Lit4LiH 135314
+IGTDqWZpbg== 135315
+IGTDqWZpbmk= 135316
+2YHZhtin2K8= 135317
+2YHZhtin2K/Zgg== 135318
+4LmE4LiU4LmJ4Lin4LmI4Liy 135319
+44Gq44GE44KI44GG44Gr 135320
+IHByw7Nwcmlh 135321
+IFBow6F0 135322
+44KE44GZ44GP 135323
+4Liq4Lin4Lii4LiH4Liy4Lih 135324
+6rOg7JqU 135325
+0Y/QtdGC 135326
+44GL44KC44GX44KM44G+44Gb44KT44GM 135327
+2KrYsdis2YU= 135328
+INC60YDQsNGB0LjQsg== 135329
+INee16jXkNep 135330
+0LTQtdC2 135331
+INmK2YjZhg== 135332
+INmK2YjZhtmK2Yg= 135333
+0YHQutC+0YA= 135334
+IEthc8SxbQ== 135335
+6rOE7JW9 135336
+0LrQvtGB 135337
+INC90LDRgNGD 135338
+INC90LDRgNGD0YjQtdC9 135339
+IGR1xbxl 135340
+YWNjw6hz 135341
+IGjhu5NuZw== 135342
+IHbFqQ== 135343
+44GE44Gf44GX44G+44GZ 135344
+INeY15k= 135345
+INeY15nXldec 135346
+bMSxa2xhcsSx 135347
+IHF1w6o= 135348
+64W464+Z 135349
+7JWU 135350
+Q0nDk04= 135351
+IHThuq9j 135352
+cHJlc3PDo28= 135353
+IOyeiOycvA== 135354
+4Liq4Li04LiX4LiY4Li04LmM 135355
+7YOE 135356
+INeU157Xntep15zXlA== 135357
+5ayJ44GX44GE 135358
+IMSQ4bq3Yw== 135359
+2YbYstmE 135360
+INC00YDRg9Cz0L7QuQ== 135361
+0LTRg9GC 135362
+7IiZ 135363
+IHRo4bul 135364
+4LmA4Liq4Lij 135365
+4LmA4Liq4Lij4LmH 135366
+4LmA4Liq4Lij4LmH4LiI 135367
+IHRvcGxhbnQ= 135368
+IHRvcGxhbnTEsQ== 135369
+15DXntef 135370
+15XXnNeq 135371
+0L/QvtC80L0= 135372
+IHlvxJ91bg== 135373
+xYRza2llZ28= 135374
+7LCp 135375
+INir2YTYp9ir 135376
+INir2YTYp9ir2Kk= 135377
+IGzhuq9uZw== 135378
+66a0 135379
+4Lij4Liy4LiK4LiB4Liy4Lij 135380
+INGB0LvQvtCy0LA= 135381
+4buG 135382
+4LiU4Li14LiB4Lin4LmI4Liy 135383
+44GU44GW44GE44G+44GZ 135384
+INC00LjQtw== 135385
+INC00LjQt9Cw0LnQvQ== 135386
+ZsOpcmVuY2U= 135387
+bMSxa2xhcg== 135388
+44Gq44KT44Gn44GZ 135389
+YWrEhWN5 135390
+IOuLpOyWkQ== 135391
+IOuLpOyWke2VnA== 135392
+16fXmdeo 135393
+2K3Yp9ix 135394
+4Liq4Li54LmJ 135395
+IHpybw== 135396
+IHpyb2Jp 135397
+IHpyb2JpxIc= 135398
+157Xmdeb15Q= 135399
+4LiK4LmI4Lin4Lii4LmA4Lir4Lil4Li34Lit 135400
+INGN0YLRgw== 135401
+67SJ 135402
+5qW944GX44GE 135403
+2LPZiNix 135404
+7ZWY6rGw64KY 135405
+2YXYpNiq2YXYsQ== 135406
+IHBvY3rEhQ== 135407
+IHBvY3rEhXRr 135408
+IHBvY3rEhXRrdQ== 135409
+INi52LHYqNmK 135410
+2KfZhNij2LE= 135411
+2KfZhNij2LHYr9mG 135412
+4LiU4Lij 135413
+xZN1dnJl 135414
+INmI2YPYp9mG2Ko= 135415
+IMWbcmVkbmk= 135416
+2K7Yttix 135417
+IGNodXnhur9u 135418
+0L3Rgg== 135419
+IOyVjOqzoA== 135420
+IHbhu51p 135421
+INeR15nXk9eZ 135422
+157Xk9eV15HXqA== 135423
+2YjZgdix 135424
+2YrYoQ== 135425
+16DXm9eh 135426
+INCb0LA= 135427
+0LvQvtC9 135428
+IHjhuqV1 135429
+2YHZitmG 135430
+IGbDqXZyaWVy 135431
+INCe0L3QsA== 135432
+IFbhu4E= 135433
+IMWfZXlsZXI= 135434
+INC/0L7Qu9GD0YfQtdC9 135435
+0LfQsNC0 135436
+IG7DqXQ= 135437
+4LmE4Lib4Lii4Lix4LiH 135438
+15fXqdeR15U= 135439
+4Lia4Lix4LiZ4LiX 135440
+4Lia4Lix4LiZ4LiX4Li24LiB 135441
+IGdlcsOnZWtsZcWf 135442
+0LjRh9C10YHQutC+0LU= 135443
+7IiY6rCA 135444
+2KvYqNiq 135445
+44Gk44G+44KK 135446
+INGD0YHQu9C+0LLQuNGP0YU= 135447
+64uk6rCA 135448
+4Lij4Liy4Lii4LmE4LiU4LmJ 135449
+15vXkNeR 135450
+4LmC4Lib4Lij4LmC4Lih 135451
+4LmC4Lib4Lij4LmC4Lih4LiK4Lix4LmI4LiZ 135452
+asOkaHI= 135453
+asOkaHJpZ2U= 135454
+16fXoNeZ150= 135455
+157Xlden 135456
+157Xlden15M= 135457
+44Gr6KGM44Gj44Gm 135458
+2KLZhA== 135459
+0LLQtdC00LXQvdC40LU= 135460
+INec15vXqteV15E= 135461
+2KzZhdmH 135462
+2KzZhdmH2YjYsdmK2Kk= 135463
+4LiJ4Lia 135464
+4LiJ4Lia4Lix4Lia 135465
+IEPDsm4= 135466
+4Lic4Liq4Lih 135467
+44Gq44Gp44GM 135468
+15DXlNeR 135469
+INC00LXQudGB0YLQstC40Y8= 135470
+ecSxeg== 135471
+4LmE4Lih4LmI4LmA4LiE4Lii 135472
+2KzZiNiy 135473
+15TXl9ec15jXlA== 135474
+ZsOkbGx0 135475
+44OT44K4 135476
+44OT44K444ON 135477
+44OT44K444ON44K5 135478
+INeQ15nXoNed 135479
+INC90LDRhdC+0LTQuNGC0YHRjw== 135480
+IGR6acWb 135481
+2LPYqti32YrYuQ== 135482
+15zXmdef 135483
+2K7ZhNin2YE= 135484
+2YfZkA== 135485
+IGF0csOhcw== 135486
+7ZiB 135487
+44KS44GU 135488
+INeU157Xldem16g= 135489
+IEJha2FubMSxxJ/EsQ== 135490
+0Y7RidC10LU= 135491
+2YXZhtin2Lc= 135492
+2YXZhtin2LfZgg== 135493
+2YHYrw== 135494
+4LiZ4Liz4LmE4Lib 135495
+INCy0LDQtg== 135496
+INCy0LDQttC90L4= 135497
+IG3huqFjaA== 135498
+15vXoNeV 135499
+2KjYudir 135500
+bGFubWFzxLE= 135501
+IGF5cg== 135502
+IGF5csSxbA== 135503
+7IKs7ZqM 135504
+ZMOtYQ== 135505
+cMWCeXc= 135506
+2KfZhdmK2Kk= 135507
+7Zic 135508
+15DXoNeS15w= 135509
+15DXoNeS15zXmdeq 135510
+IOyeiOuLpOuKlA== 135511
+INiz2KfYudip 135512
+IOuCmO2DgA== 135513
+YsO2 135514
+4LiE4Lix4LiZ 135515
+IGR6aWHFgmFuaWE= 135516
+2KnZiw== 135517
+IG5nxak= 135518
+16DXpteX 135519
+44Gv44GC44KL 135520
+IHlhxZ/EsW5kYQ== 135521
+c3TDvGNr 135522
+Y2FyYWN0ZXI= 135523
+Y2FyYWN0ZXLDrXN0aWNhcw== 135524
+IHLhu61h 135525
+INmF2K7YqtmE2YHYqQ== 135526
+44Gr44GK44GR44KL 135527
+4LmB4Lie4LiH 135528
+4Lin4Li04LmI4LiH 135529
+16rXpNeV 135530
+2LPYp9mH2YU= 135531
+5L2/44GG 135532
+2YPYsdmK 135533
+15DXpNeZ 135534
+Li4uLi4uLi4uLi4uLi4u 135535
+INGC0LDQutC40Lw= 135536
+15nXm9eV15k= 135537
+2LTYqNmH 135538
+2KzZitix 135539
+44Gd44Gu44G+44G+ 135540
+YWNqxJk= 135541
+INin2YTYqtix2YM= 135542
+INin2YTYqtix2YPZig== 135543
+INC/0YDQsNCy0LjQu9GM0L3Qvg== 135544
+INiq2LnZhdmE 135545
+4LiB4Lil4LmJ4Liy 135546
+IGJpw6pu 135547
+INeR16DXmdeZ16o= 135548
+INC60LvRg9Cx 135549
+INee16nXlA== 135550
+0LLRiNC40Lk= 135551
+44GT44Go44GM44Gn44GN44KL 135552
+4Lie4Lix4LiZ4LiY4Li4 135553
+4Lie4Lix4LiZ4LiY4Li44LmM 135554
+16jXlded 135555
+INin2YTZgdix2YY= 135556
+INin2YTZgdix2YbYs9mK 135557
+4LmA4Lib4LmH4LiZ4LiE4LiZ 135558
+44GX44Gm44GK44KK 135559
+IHRo4bqneQ== 135560
+44KT44Gg44GR44Gp 135561
+7JSo 135562
+2YXYr9mG 135563
+2KrZiNmG 135564
+INC80LXRgtCw0Ls= 135565
+INC80LXRgtCw0LvQuw== 135566
+IGluw61jaW8= 135567
+4Lit4Lit4LiB4LiI4Liy4LiB 135568
+65Kk 135569
+IGN14buRbg== 135570
+IGJ14buZYw== 135571
+2YbYs9mK 135572
+w6RjaHQ= 135573
+157Xmdeg15nXnQ== 135574
+44GV44Gm 135575
+44GM44Gn44GN 135576
+0YrQtdC8 135577
+IHTDoWk= 135578
+INCn0YI= 135579
+INCn0YLQvtCx0Ys= 135580
+4Lib4Lil4Li54LiB 135581
+4LiK4Li44Lih4LiK4LiZ 135582
+0L3RgdC60LjQuQ== 135583
+IHbhu69uZw== 135584
+INeU15zXkQ== 135585
+w6tsZQ== 135586
+INep16LXkdeo 135587
+0LLQsNGC0YzRgdGP 135588
+0LHQvtC5 135589
+2LnZiNmG 135590
+4LmB4LiU4LiZ 135591
+INeh16TXqNeZ150= 135592
+IHR1ecOqbg== 135593
+IG5oacOqdQ== 135594
+IFF1w70= 135595
+IGh1eeG6v3Q= 135596
+44KP44GL44KJ44Gq44GE 135597
+INee15vXnw== 135598
+INeU16fXnA== 135599
+INec15DXldeo 135600
+IMSQaeG7h24= 135601
+2LTYpA== 135602
+2LTYpNmI2YY= 135603
+INee15fXpNep 135604
+INC/0L7RgdGC0L7Rj9C90L3Qvg== 135605
+157Xmdeo 135606
+7IWU 135607
+0J7RgQ== 135608
+0J7RgdC90L7Qsg== 135609
+15bXmdeq 135610
+IEjDoQ== 135611
+INGH0LDRgdC+0LI= 135612
+15DXldec15k= 135613
+IG3DoXQ= 135614
+2K7YsdmI 135615
+2K7YsdmI2Kw= 135616
+2YLYttin 135617
+2YLYttin2YrYpw== 135618
+4LmA4Lib4Lit4Lij4LmM 135619
+INmK2YjZhA== 135620
+INmK2YjZhNmK2Yg= 135621
+4LmC4LiX4Lip 135622
+16DXpNec 135623
+16rXldep 135624
+16rXldep15HXmQ== 135625
+IHbDoXJpb3M= 135626
+157XqNeQ15Q= 135627
+65287J20 135628
+2YbYug== 135629
+15HXptei 135630
+0LPQvtC9 135631
+IMSQxrDhu6Nj 135632
+2LnZjw== 135633
+0L/Rg9GB0Lo= 135634
+INmI2KfZhNmB 135635
+w7xjw7w= 135636
+15nXp9eZ150= 135637
+INiz2KjZitmE 135638
+15zXkdef 135639
+INin2YTZgtix2YY= 135640
+16HXldeq 135641
+IFF14bqtbg== 135642
+44GT44KM44GM 135643
+44OW44Op44Oz44OJ 135644
+15LXnteo 135645
+IHdhcnRvxZtjaQ== 135646
+INmI2KjZitmG 135647
+IGThuqE= 135648
+0JDQsg== 135649
+0JDQstGC0L4= 135650
+IG9sYWNha3TEsXI= 135651
+4LiZ4LiX4LmM 135652
+2YXYt9in2LE= 135653
+INei16fXkQ== 135654
+INeq16Q= 135655
+44GX44Gm44GE44Gm 135656
+16bXnteX 135657
+4LiI4Lit4LiH 135658
+IMO2ZGU= 135659
+7I2o 135660
+2YbYp9iz 135661
+6Kq/44G5 135662
+INC+0LPRgNC+0LzQvQ== 135663
+67O07ZeY 135664
+15jXpw== 135665
+15jXp9eh15g= 135666
+IGJhxZ92 135667
+IGJhxZ92dXJ1 135668
+IHBvbXlz 135669
+IHBvbXlzxYI= 135670
+44Gr5LmX 135671
+INep15vXnw== 135672
+INin2YTZhdiz2KTZiNmE 135673
+INC30LDQvQ== 135674
+INC30LDQvdGP0YI= 135675
+IGTGsMahbmc= 135676
+44OX44Os44Kk 135677
+4Lil4Lia 135678
+0YLQuNC60LA= 135679
+IEFyYWzEsWs= 135680
+INC90LXQtNC+ 135681
+IG3hu5k= 135682
+IG9yYW4= 135683
+IG9yYW7EsQ== 135684
+IGt0w7Ny 135685
+IGt0w7NyxIU= 135686
+INeU15DXl9eo15XXoNeV16o= 135687
+2KfYptmG 135688
+xYRz 135689
+xYRza2E= 135690
+5Zu944Gu 135691
+157XmNeZ 135692
+INCy0L7Qv9GA0L7RgdGL 135693
+4Lit4LiH4LiE4LmM4LiB4Lij 135694
+157Xldem15A= 135695
+IHDDs8W6 135696
+IHDDs8W6bmllag== 135697
+16nXnteQ15w= 135698
+IGthcHM= 135699
+IGthcHNhbQ== 135700
+IGthcHNhbcSxbmRh 135701
+IG3DoXF1aW5h 135702
+IMWbd2llY2ll 135703
+IGhvw6BuZw== 135704
+IMO2emfDvA== 135705
+15LXldeo150= 135706
+44GC44Gf44KK 135707
+4LiV4Lix4LiU4Liq4Li04LiZ 135708
+4LiV4Lix4LiU4Liq4Li04LiZ4LmD4LiI 135709
+0LHRgNC4 135710
+44Gr44Gq44KL44Go 135711
+2KrZg9mI2YY= 135712
+INeV15TXmdeQ 135713
+IGNoaeG6v3U= 135714
+0YHRgtCw0L3QsNCy 135715
+0YHRgtCw0L3QsNCy0LvQuA== 135716
+0YHRgtCw0L3QsNCy0LvQuNCy0LA= 135717
+157XldeS 135718
+Y2l0w6k= 135719
+IEvDtnJwZXI= 135720
+INep15LXnQ== 135721
+2LnYuA== 135722
+2LnYuNmK2YU= 135723
+INeU15DXmdep15k= 135724
+IG1hdGnDqHJl 135725
+INmB2YjZgg== 135726
+IGt0bw== 135727
+IGt0b8Wb 135728
+4LiZ4LmC4Lii 135729
+4LiZ4LmC4Lii4Lia4Liy4Lii 135730
+5b6F44Gh 135731
+4LmA4Lih4LiZ 135732
+4LmA4Lih4LiZ4Li5 135733
+QcOHw4NP 135734
+IHTDuQ== 135735
+IHTDuXk= 135736
+44OI44Oz 135737
+INC+0YLQutCw0Lc= 135738
+INee15XXpteo 135739
+w7xsw7w= 135740
+44GV44KT44Gr 135741
+INeX15XXkQ== 135742
+16fXqNeZ15DXlA== 135743
+INin2YTYrtiv2YXYp9iq 135744
+INmE2YXYr9ip 135745
+2LHYpA== 135746
+2LHYpNmK2Kk= 135747
+44KS6KaL44Gk44GR 135748
+4Lif4Liy 135749
+IHLDqXVzc2k= 135750
+4LiZ4Lix4LiB4LmA4Lij4Li14Lii4LiZ 135751
+INGH0LjRgdC7 135752
+4LiB4Liy4Lij4LmA4Lil4LmI4LiZ 135753
+IGhhesSxcmw= 135754
+IGhhesSxcmxhbg== 135755
+INC/0LXRgNCy0YvQuQ== 135756
+0LvQuNC8 135757
+INC+0YLQt9GL0LLRiw== 135758
+IHd5asSF 135759
+IHd5asSFdGs= 135760
+INij2YLZhA== 135761
+16HXmg== 135762
+IOqysOyglQ== 135763
+INec157Xotep15Q= 135764
+IGzhuq9w 135765
+4LmB4Lia4Lij 135766
+4LmB4Lia4Lij4LiZ4LiU4LmM 135767
+4Lin4LmI4Liy4LmA4Lib4LmH4LiZ 135768
+INio2K/Ypw== 135769
+INio2K/Yp9mK2Kk= 135770
+44Go44GE44GG44Gu44GM 135771
+0LjRh9C10YHQutC40Lw= 135772
+4LiB4Liy4Lij4Lie4Lix4LiS4LiZ4Liy 135773
+IGLDoG8= 135774
+IG1pYcWCYQ== 135775
+eXdhxIc= 135776
+IE3DpHJ6 135777
+INmG2LPYqNip 135778
+IMOpY29ub21pcXVl 135779
+15bXng== 135780
+15bXnteg15nXnQ== 135781
+5q2i44KB 135782
+IHThu6c= 135783
+7ZWY7Iug 135784
+IGthxbxkZWdv 135785
+c3RyYcOfZQ== 135786
+4LiK4Li14LmJ 135787
+4LmA4Lia4Liy 135788
+0YDQtdGB0YPRgNGB 135789
+0LXQstC+0Lk= 135790
+2LTYqNin2Kg= 135791
+4LiV4LmI4Liy4LiH4Lib4Lij4Liw4LmA4LiX4Lio 135792
+INeQ15nXqQ== 135793
+INeQ15nXqdeZ16o= 135794
+15nXldek 135795
+15nXldek15k= 135796
+IOyalOq1rA== 135797
+7KGw7IKs 135798
+44Gj44Gf44KJ 135799
+15zXmden 135800
+0LzQuNC90LjRgdGC0YA= 135801
+44KC44Gu44Gv 135802
+IGzGsMahbmc= 135803
+INC90LDQuA== 135804
+INC90LDQuNCx0L7Quw== 135805
+INC90LDQuNCx0L7Qu9C10LU= 135806
+7Y6Y 135807
+4LmB4Lie4LmJ 135808
+44Kt44Ol 135809
+INC60L7RgtC+0YDRi9C8 135810
+4LmB4LiX4LiH 135811
+4LmB4LiX4LiH4Lia4Lit4Lil 135812
+INeg15nXlA== 135813
+INeg15nXlNeV15w= 135814
+4oKq 135815
+IEdp4bqjaQ== 135816
+INC40YHQv9C+0LvRjNC30L7QstCw 135817
+66Cl7J2E 135818
+44GX44GL44KC 135819
+4LiB4LmH4LiV4LmJ4Lit4LiH 135820
+INGA0LXQsQ== 135821
+INGA0LXQsdC10L0= 135822
+INGA0LXQsdC10L3QutCw 135823
+2KrZiNin2LXZhA== 135824
+44Kw44Or44O844OX 135825
+44KE44KJ 135826
+4LmA4Lib4Li04LiU4LiV4Lix4Lin 135827
+0LHRgNC+ 135828
+67CW7JeQ 135829
+2YbZjtin 135830
+15TXkg== 135831
+15TXkteg15Q= 135832
+4LiX4Lij4Lix 135833
+4LiX4Lij4Lix4Lie 135834
+4LiX4Lij4Lix4Lie4Lii4LmM 135835
+IGto4buRaQ== 135836
+16LXptee15U= 135837
+0LHQvtC70LXQt9C9 135838
+IOuwm+yVhA== 135839
+4Lih4LiZ 135840
+4Lih4LiZ4Li4 135841
+4Lih4LiZ4Li44Lip 135842
+4Lih4LiZ4Li44Lip4Lii4LmM 135843
+4peG 135844
+157Xptec15nXlw== 135845
+0Y/QstC70LXQvdC40LU= 135846
+2YXYt9mE 135847
+2YXYt9mE2YjYqA== 135848
+2K7Yp9mE2YE= 135849
+2KrZiNmC2YE= 135850
+44Gn44GN44G+44Gb44KT 135851
+0L7RgdGC0LXQuQ== 135852
+0LzQtdGH0LA= 135853
+6riw64qU 135854
+16rXqdei 135855
+2LXZitio 135856
+INeR16LXldeT 135857
+4LiC4Lit4LiH4LmA4LiC4Liy 135858
+0YLRj9C2 135859
+INGD0L/RgNCw0LI= 135860
+INGD0L/RgNCw0LLQu9C10L3QuNGP 135861
+IGfDqW7DqXI= 135862
+IHRow60= 135863
+16TXmg== 135864
+INix2YXYtg== 135865
+INix2YXYttin2YY= 135866
+IHRydXnhu4du 135867
+2KXYudiv2KfYrw== 135868
+44K144Od44O844OI 135869
+INC/0L7Qu9C90L4= 135870
+2K7Yp9mF 135871
+0J/QtdGC 135872
+0J/QtdGC0LXRgA== 135873
+0J/QtdGC0LXRgNCx0YPRgA== 135874
+0J/QtdGC0LXRgNCx0YPRgNCz 135875
+2YXZhtiq2K/ZiQ== 135876
+44GV44KM44G+44GX44Gf 135877
+IOuMgO2VmOyXrA== 135878
+4Lic4Li54LmJ4LiX4Li14LmI 135879
+INee15DXlQ== 135880
+15zXoNeT 135881
+0L7Rh9C90YvQtQ== 135882
+INC90LDRh9Cw0LvQsA== 135883
+INec15nXnNeT15nXnQ== 135884
+0L7QstC+0LU= 135885
+44GZ44KL44GT44Go44Gn 135886
+INin2YTZhtmB 135887
+INin2YTZhtmB2Lc= 135888
+7J6I64qU 135889
+2LrZhtmK 135890
+16TXkw== 135891
+44K+ 135892
+IENyw6k= 135893
+44Gp44Gh44KJ 135894
+2KvYp9mG 135895
+0YDQsNCx0LDRgg== 135896
+0YDQsNCx0LDRgtGL0LLQsA== 135897
+IOqwmeuLpA== 135898
+4LiI4Lix 135899
+4LiI4Lix4LiB4Lij 135900
+IGNo4bul 135901
+IGNo4bulcA== 135902
+INC80LDRgdGC 135903
+INC80LDRgdGC0LXRgA== 135904
+IG7huq9t 135905
+INGB0YLQsNC70Lg= 135906
+INeU15DXmdeo15XXog== 135907
+44K944Oz 135908
+5YiG44GL44KK 135909
+2LfYqNi5 135910
+2KjYr9in 135911
+Z3LDoWZpY28= 135912
+0LPQtdGA 135913
+4LiU4Liz4LmA4LiZ4Li04LiZ4LiB4Liy4Lij 135914
+IHNhbGTEsXI= 135915
+IHNhbGTEsXLEsQ== 135916
+0LLRiNC40YU= 135917
+44GL44Gj44Gf44Gn44GZ 135918
+IHlhcMSxeW9y 135919
+INin2YTZgdiq 135920
+16bXqNek16o= 135921
+0LfQtNC+0YDQvtCy 135922
+15HXotec 135923
+INeQ157Xmdeq15k= 135924
+INC+0LHRiw== 135925
+INC+0LHRi9GH 135926
+INC+0LHRi9GH0L3Qvg== 135927
+INec15XXnteo 135928
+2KrZg9mG 135929
+2KrZg9mG2YjZhNmI2Kw= 135930
+2KrZg9mG2YjZhNmI2KzZitin 135931
+IGhha2vEsQ== 135932
+INGA0LDQsg== 135933
+INGA0LDQstC90L4= 135934
+2LHZitmD 135935
+INeR157XmdeT 135936
+INeR157XmdeT15Q= 135937
+4LmB4LiB4LmJ4Lin 135938
+IOyWmA== 135939
+IOyWmOq4sA== 135940
+44GX44Gm44GE44G+44GX44Gf 135941
+IGvEsXNt 135942
+IGvEsXNtxLE= 135943
+6rG4 135944
+5YaF44Gu 135945
+7KeV 135946
+4LmA4Lir4Lih4Li34Lit4LiZ4LiB4Lix4LiZ 135947
+INmB2ZA= 135948
+INmB2ZDZig== 135949
+2YLYp9i52K/YqQ== 135950
+IG1vxbxlc3o= 135951
+2YXYtdin2YQ= 135952
+2YXYtdin2YTYrQ== 135953
+44G+44Gf44Gv 135954
+0LHQtdCz 135955
+IHPEsWM= 135956
+IHPEsWNhaw== 135957
+0YfQuNGB 135958
+0YfQuNGB0LvQtdC9 135959
+INC90L7Qsw== 135960
+44OB44Oj44Oz 135961
+44Or44OJ 135962
+IGdpw7M= 135963
+IHPEsW7EsQ== 135964
+IHPEsW7EsWY= 135965
+0LjQstCw0YLRjA== 135966
+IHF1w6pu 135967
+IOyggQ== 135968
+IOyggeyaqQ== 135969
+IEpvw6Nv 135970
+2YHYp9iv 135971
+IEdsw7xjaw== 135972
+4LiX4Lit4LiU 135973
+IGfDs2k= 135974
+77yK 135975
+IGTDqXRhaWw= 135976
+INiv2YrYs9mF 135977
+INiv2YrYs9mF2KjYsQ== 135978
+66Gc7ISc 135979
+157XldeX 135980
+4LmE4Liu 135981
+INC+0YLQtA== 135982
+INC+0YLQtNGL0YU= 135983
+IGtodXnhur9u 135984
+4LiE4Lit4Lii 135985
+INis2YbZig== 135986
+INis2YbZitmH 135987
+INin2YTYr9mB2KfYuQ== 135988
+4LiZ4LmJ4Liz4Lir4LiZ4Lix4LiB 135989
+IOyCrOuejOuTpOydtA== 135990
+IHRo4burYQ== 135991
+IMO2xJ9yZW5jaQ== 135992
+INC/0L7QvNC+0YnQuA== 135993
+IGN6xJnFm8SH 135994
+16nXmNeo 135995
+IE5oaQ== 135996
+IE5oaeG7gXU= 135997
+16DXpteZ 135998
+INC90LDRiNC10Lw= 135999
+IGthcsWfxLFsYcWf 136000
+INeU16nXoNeZ150= 136001
+IMSQxrDhu51uZw== 136002
+IHRyw7o= 136003
+INGA0LDQt9C70LjRh9C90YvRhQ== 136004
+INin2YTYtNmH2LE= 136005
+INec16LXldec150= 136006
+2K3YrNix 136007
+IMSR4buV 136008
+IOydmO2VtA== 136009
+4Lia4LmI4Lit4Lii 136010
+INeU15nXnNeT 136011
+44Go44Gq44Gj44Gf 136012
+INeX15XXldeq 136013
+INep15nXqNeV16rXmQ== 136014
+xIVjeQ== 136015
+2LPYsdmK 136016
+S8Sw 136017
+16TXoNeV 136018
+0YHRgtGA0YPQutGC0YPRgA== 136019
+0YLRgNGD0LQ= 136020
+INeU16fXqA== 136021
+INeU16fXqNeV15E= 136022
+IHRo4bqtbQ== 136023
+6IGe44GN 136024
+2YLZiNmK 136025
+0LrQu9GO0YfQtdC9 136026
+0YLQtdGF 136027
+0YLQtdGF0L3QvtC70L7Qsw== 136028
+6KGM44Gj44Gf 136029
+INeV15DXmdef 136030
+IMWfZWtsaW4= 136031
+IMWfZWtsaW5kZQ== 136032
+csO0 136033
+0YDQvtCz 136034
+INC90L7QstGL0LU= 136035
+INeh15HXmdeR 136036
+IHRlY25vbG9nw61h 136037
+16HXmw== 136038
+16HXm9eV150= 136039
+IMWedWI= 136040
+IMWedWJhdA== 136041
+INeU157XnNeQ 136042
+IHd5cG9z 136043
+IHd5cG9zYcW8 136044
+44Gv5L2V 136045
+44Ks44Oz 136046
+6rCW 136047
+INC60LDQutC40LU= 136048
+IMOnb2N1a2xhcg== 136049
+INec16bXkw== 136050
+IGthecSxdA== 136051
+INC80LXRgdGC0LU= 136052
+2YXYr9mK2YbYqQ== 136053
+INeb15I= 136054
+INeb15LXldef 136055
+44GX44Gm44KL 136056
+INmF2KfZitmI 136057
+44Gj44Gm44GX44G+44Gj44Gf 136058
+INC/0YDQvtCz0YDQsNC80LzRiw== 136059
+4LmB4Lil4LiZ4LiU4LmM 136060
+44Ov44Kk 136061
+16LXqNeV16U= 136062
+0YHQuNC0 136063
+IELDtnlsZQ== 136064
+IOyymOydjA== 136065
+INeq16TXp9eZ15M= 136066
+IFRyw6pu 136067
+7YOI 136068
+INCg0L7RgdGB0LjQuQ== 136069
+INCg0L7RgdGB0LjQudGB0LrQvtC5 136070
+IHPDoG4= 136071
+IHLDqGdsZQ== 136072
+IHlha2xhxZ/EsWs= 136073
+4LmA4Lil4Li04LiB 136074
+INiv2KfYptmF 136075
+INeV15I= 136076
+2KfYqNix 136077
+IGLDqA== 136078
+INin2YTZgtiv2YU= 136079
+INGA0LXRiNC10L3QuNGP 136080
+aGnDqm4= 136081
+0YLQuNC6 136082
+xIQ= 136083
+4Lia4Lij4Lij4Lii4Liy4LiB 136084
+4Lia4Lij4Lij4Lii4Liy4LiB4Liy4Lio 136085
+16jXpteV158= 136086
+5YuV44GN 136087
+IEfDpHN0ZQ== 136088
+IOq4sOuzuA== 136089
+INmK2LnYsdmB 136090
+IFPhu60= 136091
+Z8WCxJli 136092
+4LmA4Lit4Liq 136093
+15DXnteZ158= 136094
+INC/0YPQvdC6 136095
+INC/0YPQvdC60YI= 136096
+INeZ15XXk9ei15nXnQ== 136097
+44Kr44Op44O8 136098
+INeR16HXk9eo 136099
+IGJ14buTbg== 136100
+0LnRgg== 136101
+0LnRgtC10YHRjA== 136102
+44KS5rGC44KB 136103
+INeQ16rXm9ed 136104
+IOuqqOultA== 136105
+2LjYsdmI2YE= 136106
+0YfQtdGB0YLQstC+ 136107
+7Ja07ISc 136108
+INC+0LTQvdCw 136109
+IGthcMSx 136110
+IOuFuOugpQ== 136111
+IEvDvGNoZQ== 136112
+INin2YTYqti0 136113
+2LfZitio 136114
+IO2Kue2eiA== 136115
+INCy0YvQv9GD0YE= 136116
+INCy0YvQv9GD0YHQug== 136117
+15PXqteZ 136118
+IHXEnw== 136119
+IHXEn3Jh 136120
+2KfYptmH2Kc= 136121
+IHRob8OhdA== 136122
+44Gq44KC44Gu 136123
+0ZHRgA== 136124
+6riw6rCA 136125
+IGdlbGnFn21l 136126
+2KrYrdmC 136127
+2KrYrdmC2YI= 136128
+INC+0L/QsNGB 136129
+0LHRgNC+0YE= 136130
+4Lir4Li4 136131
+4Lir4Li44LmJ4LiZ 136132
+7LyA 136133
+44K544Oe 136134
+44K544Oe44Ob 136135
+2KPZgdix 136136
+2KPZgdix2KfYrw== 136137
+IFRo4buxYw== 136138
+IHRo4bqv 136139
+44Oq44Oz44Kv 136140
+IG5p4buBbQ== 136141
+IEjDtmhl 136142
+2LnZhdin2LE= 136143
+2YPZiNix2YjZhg== 136144
+2YPZiNix2YjZhtin 136145
+IMSQ4bq/bg== 136146
+INGB0LDQvNC+0Lw= 136147
+INGC0LXQu9C1 136148
+IMSRb8Ohbg== 136149
+4LiE4Lin4Liy4Lih4LiE4Li04LiU4LmA4Lir4LmH4LiZ 136150
+INC00LjRgdC6 136151
+2KPYt9mB2KfZhA== 136152
+4Lih4Liy4Lij4LmM 136153
+4LiX4Lir4Liy4Lij 136154
+4LiX4LiZ 136155
+INio2LnZitiv 136156
+INin2YTZh9mG2K8= 136157
+5Ye644GX44Gm 136158
+IGthcmRl 136159
+IGthcmRlxZ8= 136160
+15TXmdeh15jXldeo 136161
+15TXmdeh15jXldeo15nXlA== 136162
+6YG444Gz 136163
+2LnYp9mF2YQ= 136164
+4LiC4Lii4Liy4Lii 136165
+IHTDvHJs 136166
+IHTDvHJsw7w= 136167
+IOydvOydtA== 136168
+IG1hdMOpcmlh 136169
+INeb15zXldee16g= 136170
+44OB44Oj44O8 136171
+2KzZhdin2LnYqQ== 136172
+INGB0LLQvtC40Lw= 136173
+2KXZgtin2YXYqQ== 136174
+5L6L44GI44Gw 136175
+2LPYp9io 136176
+2KLYrtix 136177
+2YLYr9mK2LE= 136178
+15DXnteZ 136179
+7Ja7 136180
+INeg15XXodek16o= 136181
+INCS0LvQsNC0 136182
+INCS0LvQsNC00LjQvA== 136183
+INCS0LvQsNC00LjQvNC40YA= 136184
+IGVzdGFyw6E= 136185
+44GT44GG44GE44GG 136186
+44KS5L2/55So 136187
+4Lih4Liy4LiV4Lij 136188
+4Lih4Liy4LiV4Lij4LiQ4Liy4LiZ 136189
+44Gj44G9 136190
+IG7Dug== 136191
+IG7Dumk= 136192
+4Lii4Liy4LiH 136193
+INin2YTYrNmG2LM= 136194
+IMO8c3TDvG4= 136195
+65y7 136196
+44K744Or 136197
+44Gm44GE44GN44G+44GZ 136198
+INeX15XXlg== 136199
+INeX15XXlteo 136200
+INCT0LvQsNCy 136201
+4LmC4LiK4LiE 136202
+7Y+Q 136203
+2YbYqti42LE= 136204
+INeS15HXmQ== 136205
+2LnZgtio 136206
+aW50w6ly 136207
+aW50w6lyw6p0 136208
+157XpNeS 136209
+157XpNeS16k= 136210
+IHRow7k= 136211
+2KfZgdiq 136212
+INee16nXpA== 136213
+INee16nXpNeY15k= 136214
+INmF2YjYp9mC2Lk= 136215
+6Kaa 136216
+6Kaa44GI 136217
+15PXmdef 136218
+4LmA4Lij4Li34LmI4Lit4LiH4Lij4Liy4Lin 136219
+44G+44GC 136220
+IGdo4bq/ 136221
+0LjRgNGD0Y7Rgg== 136222
+4LiB4Lin 136223
+4LiB4Lin4LmJ4Liy4LiH 136224
+INC/0L7QstC10YA= 136225
+INC/0L7QstC10YDRhQ== 136226
+INC/0L7QstC10YDRhdC90L7RgdGC 136227
+16DXk9eo 136228
+INC60L7QvdGG0LU= 136229
+INC00L7Qu9C20L3QsA== 136230
+INeZ16nXmdeo 136231
+YWNhxJ/EsXo= 136232
+7JeU 136233
+IG7DrXZlbA== 136234
+IMO2cg== 136235
+IMO2cm5law== 136236
+2YPZgQ== 136237
+INCk0LXQtNC10YDQsNGG0LjQuA== 136238
+IOq1rOyEsQ== 136239
+4Lir4Lix4Lin4LmD4LiI 136240
+IFbhuq15 136241
+0LzQtdC0 136242
+0LzQtdC00Lg= 136243
+0LzQtdC00LjRhtC40L0= 136244
+0LzQtdC00LjRhtC40L3RgdC6 136245
+2KfYstmK 136246
+15LXkdeV15w= 136247
+0YTRgA== 136248
+IHp1c8OkdHpsaWNo 136249
+4LiB4LiB 136250
+INin2YTYp9mC2KrYtdin2K/Zitip 136251
+IGjDqA== 136252
+bHXEn3Vu 136253
+2KzZjg== 136254
+4LmE4Lif4Lil4LmM 136255
+xJBU 136256
+44Gd44Gu5LuW 136257
+4LiX4Li04LmJ4LiH 136258
+INin2YTYo9mI 136259
+2LHYs9mF 136260
+5rCX44Gl 136261
+7J2066mw 136262
+0YzQtdCy 136263
+2LXYtw== 136264
+INin2YTYp9iz2KrYqw== 136265
+INin2YTYp9iz2KrYq9mF2KfYsQ== 136266
+4Lit4Liy4LiE4Liy4Lij 136267
+INGC0L7Rh9C90L4= 136268
+IFbDom4= 136269
+4Lit4Lij 136270
+4Lit4Lij4LmI4Lit4Lii 136271
+INin2YTYs9mG2Kk= 136272
+IGPGsOG7m2k= 136273
+15nXlNef 136274
+7Y28 136275
+6Kmx44GX 136276
+4peL 136277
+IOyViuydgA== 136278
+44Oh44O844I= 136279
+44Oh44O844Kr 136280
+44Oh44O844Kr44O8 136281
+INGC0LXQv9C70L4= 136282
+5b2844KJ 136283
+IMSweg== 136284
+IMSwem1pcg== 136285
+7ZmN 136286
+IHLGsOG7ow== 136287
+IHLGsOG7o3U= 136288
+5oCd44GE5Ye6 136289
+IFBo4bqhbQ== 136290
+IGNow6F1 136291
+16bXmdeV16o= 136292
+IOydvOuzuA== 136293
+7IKs64qU 136294
+INGB0L7Qt9C00LDQvQ== 136295
+IGFyYWPEsQ== 136296
+INei16g= 136297
+INei16jXmdeb15Q= 136298
+IO2VmOuCmOuLmOydmA== 136299
+ZHppxYI= 136300
+4Lib4Lij4Liw4LiY4Liy4LiZ 136301
+IHNlcsOtYQ== 136302
+IOyeiOuPhOuhnQ== 136303
+2K/Ysdis 136304
+7ZWc64uk64qU 136305
+4Lit4Liy4LiX 136306
+4Lit4Liy4LiX4Li04LiV 136307
+4Lit4Liy4LiX4Li04LiV4Lii4LmM 136308
+0YLQtdC70YzQvdGL0Lk= 136309
+INiu2K/Zhdin2Ko= 136310
+157XoNeY 136311
+IGzGsOG7o2M= 136312
+IFPDoGk= 136313
+INmI2KfYtg== 136314
+INmI2KfYttit 136315
+2LrYp9iy 136316
+IGRvxJ9hbA== 136317
+INeR16nXnQ== 136318
+INC00LvQuNC9 136319
+INil2LfYp9ix 136320
+INeR16HXpNeo 136321
+44KS5LiO 136322
+44KS5LiO44GI 136323
+IOuyleuloA== 136324
+INGD0LLQtdC70Lg= 136325
+INGD0LLQtdC70LjRh9C4 136326
+4Liq4LmE4LiV 136327
+4Liq4LmE4LiV4Lil4LmM 136328
+4LmE4LiB4Lil 136329
+15HXl9ef 136330
+IOydtO2bhA== 136331
+IG11bmlj 136332
+IG11bmljw61waW8= 136333
+2KrZhdir2YQ= 136334
+IMSRw6Fv 136335
+SMO0dGVs 136336
+IGzhu61h 136337
+IMSR4bqzbmc= 136338
+0YfQutC4 136339
+2LTYsdmI 136340
+2LTYsdmI2Lc= 136341
+IOydtOulvA== 136342
+2YrZi9in 136343
+157XnNea 136344
+157XlNeZ16jXldeq 136345
+INC+0LHRj9C30LDRgtC10LvRjA== 136346
+INC+0LHRj9C30LDRgtC10LvRjNC90L4= 136347
+w6luZXJnaWU= 136348
+IG11ZGFuw6dh 136349
+IG3hu6U= 136350
+IG3hu6Vu 136351
+IG7Cug== 136352
+INin2YTYqti52Kc= 136353
+INin2YTYqti52KfZiNmG 136354
+INin2YTYp9is2KrZhdin2LnZitip 136355
+INC/0LvQsNGB0YI= 136356
+IOuTseydmA== 136357
+44OQ44Kk44Kv 136358
+2YfYrNmI2YU= 136359
+IFNhw7pkZQ== 136360
+IOykkeyalO2VnA== 136361
+INeU16bXmdeR15XXqA== 136362
+16rXp9ef 136363
+INin2YTYudin2YTZhdmK 136364
+INCx0L7Qu9GM0YjQvtC5 136365
+INmD2YTZhQ== 136366
+INmD2YTZhdip 136367
+44Gu44Gn44Gv44Gq44GE44Gn44GX44KH44GG44GL 136368
+INmF2KjYp9ix2KfYqQ== 136369
+INep15DXoA== 136370
+INep15DXoNeX16DXlQ== 136371
+44K544K/44Kk44Or 136372
+IFNhxJ8= 136373
+IFNhxJ9sxLFr 136374
+IGjGsA== 136375
+16DXl9eU 136376
+INeR16fXqNeR 136377
+2LfYudmF 136378
+4Lir4Li04LiZ 136379
+4LiX4Li44LiB4Lin4Lix4LiZ 136380
+4LiE4Lij4Lix4LmJ4LiH4LiX4Li14LmI 136381
+IGzDoG5o 136382
+IGRvbm7DqQ== 136383
+44Gb44GE 136384
+2KzYstmK2LHYqQ== 136385
+0LTQvtGA0L7Qtg== 136386
+7Lyc 136387
+2KrZhti42YrZgQ== 136388
+44OB44On 136389
+IGFsZMSxxJ/EsQ== 136390
+2KzYp9is 136391
+INGC0L7QvNGD 136392
+4Lib4Li0 136393
+INeR16jXqdeq 136394
+44GP44Gq44KK44G+44GZ 136395
+INC/0YDQuNC90YbQuNC/ 136396
+INeX15zXlQ== 136397
+64+8 136398
+15XXktep 136399
+2LPYsw== 136400
+4Lib4Li5 136401
+IGjhuqd1 136402
+5oSf44GY44KL 136403
+77y0 136404
+2K/ZiNin 136405
+INGB0LzQvtCz 136406
+c2NyacOnw6Nv 136407
+IHRo4bqtbg== 136408
+INeo15XXkNeU 136409
+0L7QsdGA0LDQttC10L0= 136410
+INin2YTYqtis2KfYsdmK2Kk= 136411
+2LfYqNmK2Lk= 136412
+asSFY8SF 136413
+7ZaJ7JyE 136414
+INC90L7QstGL0Lk= 136415
+INee15fXk9ep 136416
+5oyv44KK 136417
+Z3XDqQ== 136418
+INeQ15nXqNeV16I= 136419
+INeQ15nXqNeV16LXmded 136420
+INin2YTYsNmH2Kg= 136421
+15PXkA== 136422
+2KrYp9mG 136423
+44Gg44GX 136424
+4Lit4Lix4LiV4Lij4Liy 136425
+4LmC4LiI 136426
+2KjZhNin2K8= 136427
+15TXmdeZ16DXlQ== 136428
+INGB0L/QtQ== 136429
+INGB0L/QtdGG0LjQsNC70YzQvdC+ 136430
+IMWbd2lhdGE= 136431
+44KT44Gn44GZ44KI 136432
+2LTYsdmD2Kk= 136433
+IHDFgnl0 136434
+IHNpdHXDqQ== 136435
+INeb15DXnNeU 136436
+16HXkdeo 136437
+IGthxbxk 136438
+IGthxbxkeW0= 136439
+44KS5oyB44Gk 136440
+15zXlNec 136441
+15zXlNec158= 136442
+IHfFgmFz 136443
+IHfFgmFzbmU= 136444
+IHNhxJ9sYW4= 136445
+157Xotec15Q= 136446
+INin2YTYp9mI2YQ= 136447
+7JeQ7ISc64+E 136448
+15DXmdeo15XXpNeU 136449
+2KrZgtmG2YrYqQ== 136450
+2YXYp9im 136451
+2YXYp9im2Kk= 136452
+IGNvbXBhw7HDrWE= 136453
+IHPDvHJlaw== 136454
+IHPDvHJla2xp 136455
+INC40YHQutGD0YE= 136456
+INC40YHQutGD0YHRgdGC0LI= 136457
+IELDvHJnZXI= 136458
+16rXl9eo 136459
+16rXl9eo15XXqg== 136460
+4Lie4Lij4LmJ4Lit4Lih4LiB4Lix4Lia 136461
+2LTZhQ== 136462
+4LiW4Li34Lit4Lin4LmI4Liy 136463
+6L6844KA 136464
+5LyR44G/ 136465
+INin2YTYo9io 136466
+INGB0YLQvtC40LzQvtGB0YLRjA== 136467
+INC/0YDQsNCy0LA= 136468
+bWF5xLFu 136469
+4Lir4Lin4Lii 136470
+INin2YTYt9io2YrYudmK 136471
+4LiX4Li14LmI4Lie4Lix4LiB 136472
+IEVzdMOh 136473
+0YvQstCw0Y7Rgg== 136474
+2KjYs9mK 136475
+2KjYs9mK2Lc= 136476
+INeR16LXkdeo 136477
+5Y+v6IO944Gn44GZ 136478
+INeT15XXnA== 136479
+INeT15XXnNeo 136480
+2YfZjtin 136481
+0LLQvtGA0L7Rgg== 136482
+44Gm44GE44G+44GX44Gf 136483
+4LmC4LiX4Lij4Lio 136484
+4LmC4LiX4Lij4Lio4Lix 136485
+4LmC4LiX4Lij4Lio4Lix4Lie 136486
+4LmC4LiX4Lij4Lio4Lix4Lie4LiX4LmM 136487
+INen16A= 136488
+INin2YTYq9mG 136489
+INin2YTYq9mG2KfYptmK2Kk= 136490
+IGNvw7t0 136491
+4LiV4Li04LiU4LiV4Lix4LmJ4LiH 136492
+IMO2cmc= 136493
+IMO2cmfDvHQ= 136494
+INin2YTYrtmE2Yo= 136495
+INin2YTYrtmE2YrYrA== 136496
+IGLhu41u 136497
+15XXnNeV15LXmQ== 136498
+656c 136499
+INCR0L7Qu9GM 136500
+INCR0L7Qu9GM0Yg= 136501
+15LXkdeo15nXnQ== 136502
+2YLZitiv 136503
+15HXmdeY15XXmQ== 136504
+5omT44Gh 136505
+IG9sbXXFnw== 136506
+ZsOkaA== 136507
+ZsOkaGln 136508
+4Lil4Liy4LiZ 136509
+INmC2LfYsQ== 136510
+16nXpNeU 136511
+6Kqt44KT44Gn 136512
+4LiC4Lin4Liy 136513
+IGNoaeG6v20= 136514
+44Kk44Oz44K/ 136515
+44Kk44Oz44K/44O844M= 136516
+44Kk44Oz44K/44O844ON 136517
+44Kk44Oz44K/44O844ON44OD44OI 136518
+INec16nXnteV16g= 136519
+INiq2LHZgw== 136520
+INiq2LHZg9mK2Kc= 136521
+16jXldeY 136522
+44Go5oCd44GE44G+44GX44Gf 136523
+INin2YTYqtmC 136524
+IGTGsA== 136525
+44Gm44GP44KM44KL 136526
+44GX44Gf44GT44Go 136527
+IHLDs8W8bmU= 136528
+INin2YTYt9mB2YQ= 136529
+IFBvc3TDqQ== 136530
+INee16nXlded 136531
+0Y3RgA== 136532
+INGA0LDQsdC+0YLQsNC10YI= 136533
+44K344Oq 136534
+44K344Oq44O844K6 136535
+INeR15TXl9ec15g= 136536
+16fXlNeZ15zXlA== 136537
+44Kr44Oh 136538
+44Kr44Oh44Op 136539
+77yv 136540
+IOyCrOydtA== 136541
+IGvDrA== 136542
+IHRoxrDhu5tj 136543
+2LbYqNi3 136544
+2YLYqNmI2YQ= 136545
+5Yil44Gu 136546
+IHBhcnRpY3VsacOocmU= 136547
+INGB0LLQvtC10Lw= 136548
+INei16HXpw== 136549
+INei16HXp9eZ150= 136550
+15HXl9eZ16jXldeq 136551
+15HXmdeg15U= 136552
+4LiL4Lit 136553
+INei15XXkdeo 136554
+44Gg44Gj44Gf44Gu44Gn 136555
+xLFsZMSxxJ/EsQ== 136556
+2YXYr9in2LE= 136557
+2YXYr9in2LHYsw== 136558
+7KO87Iuc 136559
+4Lit4Liy4Lio 136560
+4Lit4Liy4Lio4Lix4Lii 136561
+IHThuqVt 136562
+4Lie4Li04LiI 136563
+4Lie4Li04LiI4Liy4Lij 136564
+4Lie4Li04LiI4Liy4Lij4LiT4Liy 136565
+0YLQtdC70YzQvdGL0LU= 136566
+0YHQutGD0Y4= 136567
+0JzQmA== 136568
+4LmA4LiB4Liy 136569
+4LmA4LiB4Liy4Lir4Lil 136570
+4LmA4LiB4Liy4Lir4Lil4Li1 136571
+15PXlw== 136572
+4LmA4LiK4Li04LiH 136573
+INiv2YLZitmC2Kk= 136574
+7ZWZ7IOd 136575
+INep15DXnNeU 136576
+IGNvbnRyw7RsZQ== 136577
+IHNpdHVhw6fDo28= 136578
+4LiC4Lit4LiH4Lic4Li54LmJ 136579
+2YbYt9mC 136580
+6rO87ZWZ 136581
+4Lir4Lil4Liy4Lii4LiE4LiZ 136582
+IG7huq9uZw== 136583
+2YLZjw== 136584
+7KGw6rG0 136585
+0ZU= 136586
+44OD44Go 136587
+157Xmdec15Q= 136588
+R3LDvG4= 136589
+15nXmdei 136590
+15nXmdei15XXpQ== 136591
+157XoNeb 136592
+662Q 136593
+157Xotee15M= 136594
+4Liq4Liz4LiZ4Lix4LiB 136595
+2KzYr9iv 136596
+4LiE4Lix4LiU 136597
+INeU157Xqdek 136598
+INeU157Xqdek15fXlA== 136599
+157Xqden15w= 136600
+2YTZjw== 136601
+IHR5dHU= 136602
+IHR5dHXFgg== 136603
+0YjQtdC5 136604
+IOydvOu2gA== 136605
+0YjQtdC90LjQtQ== 136606
+IHBow7NuZw== 136607
+IOyXreyCrA== 136608
+44Kr44Oz 136609
+IHTDumk= 136610
+INmG2YjZgQ== 136611
+INmG2YjZgdmF2KjYsQ== 136612
+Z3LDvG4= 136613
+INin2YTYtNmF2KfZhA== 136614
+xZt3aWFkYw== 136615
+xZt3aWFkY3plbmll 136616
+16LXqNeU 136617
+INei15XXkQ== 136618
+INei15XXkdeT15nXnQ== 136619
+15PXldeS157XkA== 136620
+5LuK44Gv 136621
+IHbDo28= 136622
+INCi0LXQvA== 136623
+0YHQuNC70Yw= 136624
+IGNo4buj 136625
+2YXYsdin 136626
+2YXYsdin2YLYqA== 136627
+4LmE4Lih4LmI4Lij4Li54LmJ 136628
+INix2KfYpti5 136629
+15DXoNeX16DXlQ== 136630
+4Liq4LmI4LiH4LmA4Liq4Lij4Li04Lih 136631
+16bXlw== 136632
+IOyeiOyWtOyEnA== 136633
+IGt1cnVsdQ== 136634
+IGt1cnVsdcWf 136635
+IMOWemVsbGlr 136636
+IMOWemVsbGlrbGU= 136637
+INeq15nXpw== 136638
+IGdow6k= 136639
+IHNwcnrEmQ== 136640
+IHNwcnrEmXQ= 136641
+16LXqNeV16o= 136642
+2LHYp9it2Kk= 136643
+44Gj44GN 136644
+44Gj44GN44KK 136645
+IOyVhOuemA== 136646
+c3RpdHVpw6fDo28= 136647
+INC00L7Qu9C20L3Qvg== 136648
+15TXqNep 136649
+15TXqNep157XlA== 136650
+15TXnNea 136651
+44Gh44Gq 136652
+44Gh44Gq44G/ 136653
+44Gh44Gq44G/44Gr 136654
+16TXl9eT 136655
+INin2YTYrNmF2YrYuQ== 136656
+15HXotec15k= 136657
+IHRyw7luZw== 136658
+INek16rXlw== 136659
+157XnNeX157Xqg== 136660
+44OG44O844M= 136661
+44OG44O844Oe 136662
+2YXYqtin2Kg= 136663
+2YXYqtin2KjYudip 136664
+IOuqqOyKtQ== 136665
+2YrYtQ== 136666
+5ZCI44GG 136667
+IFlhcA== 136668
+IFlhcMSx 136669
+INGB0LrQsNC30LDRgtGM 136670
+66qw 136671
+4LiX4Li14LmI4Liq4Liz4LiE4Lix4LiN 136672
+IOyXhuyKteuLiOuLpA== 136673
+IG5o4bqvYw== 136674
+IMO8bGtlbGVy 136675
+INC80L3QvtCz0LjQtQ== 136676
+7ZWY7IWo 136677
+4Lih4Liy4LiB4LiX4Li14LmI4Liq4Li44LiU 136678
+4LiB4LmJ4Liy 136679
+4LiB4LmJ4Liy4Lin 136680
+IMSweWk= 136681
+0LvQtdC2 136682
+0LvQtdC20LA= 136683
+44K444On 136684
+4LiX4Lix4Lie 136685
+2KfZiNix 136686
+INeX15HXqNeZ 136687
+INec16nXnQ== 136688
+7LKr 136689
+IFThu60= 136690
+157Xldeg15k= 136691
+2YLZiNiv 136692
+4LiB4Lij4Liw4LmA4Lib 136693
+4LiB4Lij4Liw4LmA4Lib4LmL 136694
+4LiB4Lij4Liw4LmA4Lib4LmL4Liy 136695
+INC/0YDQvtCx0LvQtdC80Ys= 136696
+IGHDp8Sxcw== 136697
+IGHDp8Sxc8SxbmRhbg== 136698
+INeU157Xmw== 136699
+INmF2LnYuNmF 136700
+2YLZitin2LM= 136701
+INC/0YDQvtC00L7Qu9C2 136702
+INC/0YDQvtC00L7Qu9C20LA= 136703
+IHZlcmRpxJ9p 136704
+INC/0YDQtdC00LzQtdGC 136705
+44GE44G+44GZ44GM 136706
+IOuUsOuluA== 136707
+INin2YTZgtmK2KfZhQ== 136708
+INil2YTZitmH2Kc= 136709
+0KLQkA== 136710
+0L/QvtC3 136711
+44K344Ol 136712
+5LiK44GM44KK 136713
+4LmA4LiU4Li04Lih4Lie4Lix4LiZ 136714
+4LiB4Li44Lil 136715
+2K3YsdmK2Kk= 136716
+16fXkdeV16bXldeq 136717
+66+/ 136718
+INin2YTZhdmG2Kc= 136719
+INin2YTZhdmG2KfYt9mC 136720
+INCy0YvQv9C+0Ls= 136721
+INCy0YvQv9C+0LvQvdGP 136722
+44OL44Ki 136723
+IOqysOq1rQ== 136724
+15fXldee 136725
+15fXldee16jXmded 136726
+INCj0LrRgNCw0LjQvdGL 136727
+4Lir4Lit4Lih 136728
+16jXmdeh 136729
+INGF0L7RgtC10Ls= 136730
+INC+0LHRgNCw0LfQvtCy0LDQvdC40Y8= 136731
+IGto4bqzbmc= 136732
+IG3GsGE= 136733
+IGfDtnJtZQ== 136734
+IGfDvMOnbMO8 136735
+2LPYudmJ 136736
+4Lih4Lix4LmI4LiZ4LmD4LiI 136737
+7ZWY6rKg7Iq164uI64uk 136738
+INC/0L7Qu9GD 136739
+IGbDvG5m 136740
+44Go5oCd44Gj44Gm44GE44G+44GZ 136741
+IOq3uOqyg+ydgA== 136742
+IGTDvMWfw7xuY2U= 136743
+7J6g 136744
+IEjGsOG7m25n 136745
+IFRp4buDdQ== 136746
+IMOnaWZ0 136747
+44GR44Gw 136748
+4LiI4LiZ4LiW4Li24LiH 136749
+4LiX4Liz4LmE4LiU4LmJ 136750
+IOyekOyytA== 136751
+IGTDtQ== 136752
+IGTDtWk= 136753
+4LiI4Lix4LiZ 136754
+4LiI4Lix4LiZ4LiX 136755
+4LiI4Lix4LiZ4LiX4Lij4LmM 136756
+ZWNlxJ9pbmk= 136757
+16DXldei16g= 136758
+2LrYp9ix 136759
+INin2YTYo9mF2LHZitmD2Yo= 136760
+2K/Yp9i52LQ= 136761
+INCx0LXQt9C+0L/QsNGB0L3QvtGB0YLQuA== 136762
+INCx0Y4= 136763
+INCx0Y7QtNC2 136764
+INCx0Y7QtNC20LXRgg== 136765
+44OK44Kk 136766
+4Lie4Lia4Lin4LmI4Liy 136767
+ZGHEnw== 136768
+15DXldek158= 136769
+7ZeM 136770
+44OA44Kk44Ko 136771
+44OA44Kk44Ko44OD44OI 136772
+IOuMgO2GtQ== 136773
+IOuMgO2GteuguQ== 136774
+RMSw 136775
+2KPYrdiv2KfYqw== 136776
+IEHEnw== 136777
+IEHEn3VzdA== 136778
+IEHEn3VzdG9z 136779
+2K3ZhNmI2YQ= 136780
+IHfFmw== 136781
+IHfFm3LDs2Q= 136782
+INGB0L7QvtGC0LLQtdGC 136783
+INGB0L7QvtGC0LLQtdGC0YHRgtCy 136784
+INGB0L7QvtGC0LLQtdGC0YHRgtCy0LjQuA== 136785
+IEx14bqtdA== 136786
+INeb15zXpNeZ 136787
+INCy0LXRiQ== 136788
+INCy0LXRidC10YHRgtCy 136789
+16fXmdel 136790
+INio2YfYsNin 136791
+2LnYp9i0 136792
+4LmA4Lib4LmH4LiZ4LmA4Lij4Li34LmI4Lit4LiH 136793
+0KLQlQ== 136794
+INeR15DXmdeg15jXqNeg15g= 136795
+2LPYudiv 136796
+INeU15jXmdek15XXnA== 136797
+16TXmdeh 136798
+4LiH4LmI4Liy4Lii4LmG 136799
+IEdlcsOkdA== 136800
+15zXmdeT15Q= 136801
+INGA0LjRgdC6 136802
+15zXp9eX 136803
+0L3QvdCw0Y8= 136804
+16jXmdeT 136805
+0L/RgNCw0LrRgtC4 136806
+0L/RgNCw0LrRgtC40Lo= 136807
+4LiC4Lix4LmJ4LiZ4LiV4Lit4LiZ 136808
+4LiZ4LmI4Liy4Lij4Lix4LiB 136809
+bGFyxLFuxLF6xLE= 136810
+4Lit4LiZ4Li44LiN4Liy 136811
+4Lit4LiZ4Li44LiN4Liy4LiV 136812
+IHpkasSZY2lh 136813
+IGLDonk= 136814
+0YHRgA== 136815
+0YHRgNC+0Yc= 136816
+44OL44Oz44Kw 136817
+IMO2bmVy 136818
+IMO2bmVyaQ== 136819
+INC90L7QstGL0YU= 136820
+2K/YudmI2Kk= 136821
+IGfhuq9u 136822
+INin2YTZhNio2YY= 136823
+INin2YTZhNio2YbYp9mG2Yo= 136824
+44OG44Kj44O8 136825
+INi12K3Zitit 136826
+0LXQvNGL0YU= 136827
+55ay44KM 136828
+INC/0YDQvtC40YE= 136829
+INC/0YDQvtC40YHRhdC+0LTQuNGC 136830
+4Liq4LiV4Li0 136831
+IFThur90 136832
+INeU15zXnNeV 136833
+4LmA4Lij4Li34LmI4Lit4LiH4LiZ4Li14LmJ 136834
+157Xkdeg15Q= 136835
+IGNvbnRlw7pkbw== 136836
+INin2K7Yqg== 136837
+INin2K7YqtmK2KfYsQ== 136838
+2YXYs9mE 136839
+2YXYs9mE2LPZhA== 136840
+64+I 136841
+INec15nXkw== 136842
+4Lie4Li04LiY4Li1 136843
+INGB0L7QstGB 136844
+INGB0L7QstGB0LXQvA== 136845
+44GM44GC44KK44G+44GX44Gf 136846
+IHPDs25n 136847
+2KXYtdmE2KfYrQ== 136848
+66eB 136849
+2YHZitix 136850
+IEplxbxlbGk= 136851
+7KCc64+E 136852
+ZMWCdWc= 136853
+7IOB7J2E 136854
+IGPhuq1u 136855
+IGjhu41w 136856
+2KPYs9iq 136857
+2KPYs9iq2KfYsA== 136858
+INee15nXqdeU 136859
+INee15nXqdeU15U= 136860
+IGTDoHk= 136861
+IGNow6BuZw== 136862
+44Gh44KD44KT44Go 136863
+IMSRw6Ft 136864
+IHN3w7Nq 136865
+IHBvZGVyw6E= 136866
+INC+0YLQu9C40YfQsA== 136867
+IHDDqXJpb2Rl 136868
+w7xuZGln 136869
+15jXotef 136870
+0YHRgtGA0L7QuNGC0LXQu9GM 136871
+16jXqteZ 136872
+INeZ15TXmdeV 136873
+15zXoQ== 136874
+INin2YTZhdmG2LLZhA== 136875
+4LiZ4Li04LmJ4Lin 136876
+0LjRhNC40LrQsA== 136877
+0LjRhNC40LrQsNGG0Lg= 136878
+8J+YiQ== 136879
+IGFkxLFuYQ== 136880
+44CC44CC44CC 136881
+15DXmdef 136882
+16HXmdeo 136883
+INmK2LnYrw== 136884
+562U44GI 136885
+2KfZhNis2LI= 136886
+2KfZhNis2LLYp9im2LE= 136887
+0LXQvdGM0Lo= 136888
+4Lij4Lir 136889
+4Lij4Lir4Lix4Liq 136890
+IFTDvHJrw6dl 136891
+6r64 136892
+INeZ15XXm9ec 136893
+INep15XXoNeU 136894
+INeR157XpteR 136895
+INC00LXQudGB0YLQstC40YLQtdC70YzQvdC+ 136896
+INio2KPZhtmH 136897
+157Xp9eT 136898
+INeU16nXpw== 136899
+2K7Zitin2LHYp9iq 136900
+IGbEsQ== 136901
+IGbEsXJz 136902
+IGbEsXJzYXQ= 136903
+65GY 136904
+IOyEnOyauA== 136905
+INeU15LXldej 136906
+2LHYudin 136907
+2LHYudin2YrYqQ== 136908
+IEvhur90 136909
+0LrRgdC4 136910
+INGD0YHQu9GD0LPQuA== 136911
+0L3QvtGB0YLQtdC5 136912
+7Jq064+Z 136913
+INC+0LHRitGP 136914
+INC+0LHRitGP0LLQuw== 136915
+0L3QtdC2 136916
+15TXpNea 136917
+INeR16LXmdeg15k= 136918
+64aS 136919
+INC/0YDQvtGG0LXQtA== 136920
+INC/0YDQvtGG0LXQtNGD0YA= 136921
+IGlodGl5 136922
+IGlodGl5YWPEsQ== 136923
+IOuwlOuejQ== 136924
+IOuwlOuejeuLiOuLpA== 136925
+4LiB4Lil4Lix4Lin 136926
+INGB0LvQvtC20L3Qvg== 136927
+16fXmdeZ157Xqg== 136928
+IMSQw6xuaA== 136929
+INmF2YTZgQ== 136930
+IOC5guC4lOC4ouC4oeC4tQ== 136931
+IGthdGvEsQ== 136932
+2KrYrdmI2YrZhA== 136933
+4LmE4Lie 136934
+IEjhu40= 136935
+w7Fl 136936
+INC00L7RhdC+0LQ= 136937
+IHRob+G6o2k= 136938
+7ZWY7Jes7JW8 136939
+44K544Od44O844M= 136940
+44K544Od44O844OE 136941
+IEfDsm4= 136942
+IGvDqA== 136943
+IGvDqG0= 136944
+6YCy44KB 136945
+44K544O844M= 136946
+44K544O844OR 136947
+44K544O844OR44O8 136948
+IGdpw6B1 136949
+INil2LnYp9iv2Kk= 136950
+INec15XXpw== 136951
+INec15XXp9eX 136952
+INGF0L7Rh9C10YI= 136953
+15jXnNeV15U= 136954
+15jXnNeV15XXmdeW 136955
+15jXnNeV15XXmdeW15nXlA== 136956
+IHRodXnhur90 136957
+44Gd44KM44Gn 136958
+IHZhcmTEsQ== 136959
+4LmE4Lij4LmJ 136960
+2LnYqNiv 136961
+IFJlcMO6YmxpY2E= 136962
+44O844K/44O8 136963
+INee15DXldeq 136964
+4LmE4Lib4LmB4Lil4LmJ4Lin 136965
+IHlhcMSxbGFjYWs= 136966
+44K544K/44O844OI 136967
+44G744G8 136968
+IGtvxZ8= 136969
+INC80LDRgtC10YDQuA== 136970
+IHNpw6hjbGU= 136971
+INin2YTZhdiu2KrZhNmB 136972
+INin2YTZhdiu2KrZhNmB2Kk= 136973
+INec16fXqNeQ 136974
+INec16fXqNeQ16o= 136975
+INeU16TXldei15w= 136976
+IHTDsmE= 136977
+IHLGoWk= 136978
+5ZGo44KK 136979
+4Lid4LiZ 136980
+asWbxIc= 136981
+IOyViuydhA== 136982
+2KfZhtiq2YLYp9mE 136983
+65ag 136984
+0LjQstCw0LXRgg== 136985
+44OI44Or 136986
+INin2YTZgdmE2LPYt9mK2YbZitip 136987
+4LiB4Lil4LmI4Liy4Lin4Lin4LmI4Liy 136988
+2KfZg9iq 136989
+IMOWbA== 136990
+INGA0LXRiNC4 136991
+INGA0LXRiNC40Ls= 136992
+INeg15XXodek15XXqg== 136993
+IOygley5mA== 136994
+0LLQu9C10YfQtdC9 136995
+2YXYsdit2YTYqQ== 136996
+IGNvbWXDp2E= 136997
+IHnEsWs= 136998
+7IK0 136999
+4LiY4LiZ4Liy 137000
+4LiY4LiZ4Liy4LiE4Liy4Lij 137001
+4Lit4LiZ4Liy 137002
+4Lit4LiZ4Liy4LiE 137003
+4Lit4LiZ4Liy4LiE4LiV 137004
+IHBlcXVlw7Fh 137005
+5LuV5LqL44KS 137006
+INio2LDZhNmD 137007
+INC90L7QstC+0LPQvg== 137008
+44GX44Gm44GE44Gq44GE 137009
+INin2YTZhdmK2KfZhw== 137010
+4LiB4LmH4LmA4Lib4LmH4LiZ 137011
+INC20YPRgA== 137012
+INC20YPRgNC90LDQuw== 137013
+0LLQtdGB 137014
+2K7Yqtin2LE= 137015
+IOunpOyasA== 137016
+IE3Dow== 137017
+INCw0LLRgtC+0LzQsNGC0Ys= 137018
+2LbYudmB 137019
+INin2YTZgdmD2LE= 137020
+44Gn44GZ44Gu44Gn 137021
+44Oh44Oz44OQ44O8 137022
+INC60YDRg9Cz 137023
+INin2YTYs9mE2LfYqQ== 137024
+4LiE4Lij4Lix4LmJ4LiH4LmB4Lij4LiB 137025
+4LiB4Lij4Liw4LiX4Lij4Lin 137026
+4LiB4Lij4Liw4LiX4Lij4Lin4LiH 137027
+0YbQvtCy 137028
+6ZW344GE 137029
+5aSn44GN44GE 137030
+IGdlw6dtacWf 137031
+7ISx7J20 137032
+INem16jXmdeb15Q= 137033
+INC80L7RiQ== 137034
+INC80L7RidC9 137035
+INen15nXqQ== 137036
+INen15nXqdeV16jXmded 137037
+IE5hc8SxbA== 137038
+0LPRgNCw0L0= 137039
+INee15XXpteo15nXnQ== 137040
+INee16HXldeS 137041
+IHnDvHI= 137042
+IHnDvHLDvHQ= 137043
+INec15fXpteV 137044
+15XWvA== 137045
+IOyeiOyXiOuLpA== 137046
+IHRlcsO2cg== 137047
+IFRoxrDGoW5n 137048
+INmI2YrZhQ== 137049
+INmI2YrZhdmD2YY= 137050
+2KzZiNmG 137051
+INmI2LrZitix2YfYpw== 137052
+157XpNeV 137053
+15LXldeo157Xmded 137054
+15vXkdeZ16k= 137055
+INin2YTZhNi6 137056
+INin2YTZhNi62Kk= 137057
+2LTYsdmD 137058
+INin2YTYsdin2Kg= 137059
+INin2YTYsdin2KjYuQ== 137060
+INC/0YDQtdC6 137061
+INC/0YDQtdC60YDQsNGB 137062
+INC/0YDQtdC60YDQsNGB0L0= 137063
+IGVuZXJnw61h 137064
+16fXk9ee15k= 137065
+44GP44Gq44Gj44Gf 137066
+IMSR4bup 137067
+IMSR4bupYQ== 137068
+U2Vydmk= 137069
+U2VydmnDp28= 137070
+IGthbGTEsXI= 137071
+5YON44GN 137072
+INC+0LTQtdC2 137073
+INC+0LTQtdC20LQ= 137074
+66y87J2E 137075
+44Gd44GG44Gn 137076
+44GM44GC44KM44Gw 137077
+7JmV 137078
+16bXk9en 137079
+IGFydMSxcg== 137080
+IGlsZXRp 137081
+IGlsZXRpxZ9pbQ== 137082
+44KI44GG44Gn 137083
+44OI44O8 137084
+44Ki44OL 137085
+44Ki44OL44Oh 137086
+15jXmdeZ15w= 137087
+44OV44Oq44O8 137088
+44Od44Oz 137089
+0J/RgNC+ 137090
+INi52KfZhNmK2Kk= 137091
+IMO2xJ9yZXQ= 137092
+IMO2xJ9yZXRtZW4= 137093
+INC60LDRh9C10YHRgtCy0LA= 137094
+INeU15jXkdei 137095
+INC30L3QsNGO 137096
+44Gm44GP44KL 137097
+IG3hu6tuZw== 137098
+2YXZiNiq 137099
+16nXldee16g= 137100
+15fXnNeR 137101
+IHd6Z2zEmQ== 137102
+IHd6Z2zEmWR1 137103
+67KI7Ke4 137104
+IHThu5M= 137105
+IHThu5Nu 137106
+44Ov44O844Kv 137107
+IHBvxbx5Y3o= 137108
+IHBvxbx5Y3pr 137109
+15nXldem16jXmded 137110
+2YPYsdmF 137111
+INCz0LDRgA== 137112
+INCz0LDRgNCw0L0= 137113
+INCz0LDRgNCw0L3RgtC4 137114
+4Lil4LmJ4Liy4LiH 137115
+IOyYge2ZlA== 137116
+15jXmdeh 137117
+IHRo4bq7 137118
+IOyeiOuLpOqzoA== 137119
+2KfZhNiq2LI= 137120
+2KfZhNiq2LLYp9mF 137121
+INC90LDRiNC4 137122
+aXPDqWU= 137123
+44GT44KM44KS 137124
+IG3hur0= 137125
+2LbZhA== 137126
+2KjZiNiq 137127
+INeb15vXlA== 137128
+aOG7nw== 137129
+INin2YTYs9mI2LHZitip 137130
+INec16LXldee 137131
+INec16LXldee16o= 137132
+IGJhxZ9hcg== 137133
+IGJhxZ9hcsSxbMSx 137134
+0LXRgdGC0Yw= 137135
+4LiE4Lij4Li1 137136
+4LiE4Lij4Li14Lih 137137
+IOyghOyytA== 137138
+INiz2YrZg9mI2YY= 137139
+INee15PXldei 137140
+IOuVjOusuOydtOuLpA== 137141
+IGPhu6luZw== 137142
+Z2Vyw6R0 137143
+INC80LjRgA== 137144
+INC80LjRgNC1 137145
+INmD2YrZgdmK2Kk= 137146
+INek16jXmNeZ150= 137147
+IGdvxZtjaQ== 137148
+0LjRgtC10YHRjA== 137149
+0YPRiNC60Lg= 137150
+2KTZhdmG 137151
+INeQ15vXnw== 137152
+INin2YTYsdis2YQ= 137153
+IGzhu41j 137154
+4LmA4Lij4Li14Lii4LiB4Lin4LmI4Liy 137155
+44GT44Gu44KI44GG44Gq 137156
+66eM7YG8 137157
+INC/0LXRhw== 137158
+2YjZhNin2Ko= 137159
+IMOceWU= 137160
+bGnEn2luZGU= 137161
+4LiE4Liw4LmB4LiZ 137162
+4LiE4Liw4LmB4LiZ4LiZ 137163
+44KL44GT44Go44Gv 137164
+4Lin4Li04LmA4LiE4Lij 137165
+4Lin4Li04LmA4LiE4Lij4Liy4Liw 137166
+4Lin4Li04LmA4LiE4Lij4Liy4Liw4Lir4LmM 137167
+INCy0L7Qt9C80L7QttC90L7RgdGC0Lg= 137168
+INin2YTZhtiz2KfYoQ== 137169
+44OJ44Op44Oe 137170
+IGfDvGM= 137171
+IGfDvGPDvA== 137172
+IHTGsOG7nW5n 137173
+IGFjb21wYcOxYQ== 137174
+44Kk44Op 137175
+16fXpteR 137176
+IFnDtg== 137177
+IFnDtm5ldA== 137178
+IFnDtm5ldGlt 137179
+4Liq4Lix4Lih4Lic 137180
+4Liq4Lix4Lih4Lic4Lix4Liq 137181
+4LiZ4Liy4Lih 137182
+IMSR4bujaQ== 137183
+4LmB4Lir4LmI4LiH4LiK4Liy4LiV4Li0 137184
+44Gd44KM44Gn44KC 137185
+w6R0aWc= 137186
+16rXlded 137187
+IGJhxZ9sYXQ= 137188
+INCy0YHQtdC5 137189
+16rXmden 137190
+16rXmden15XXnw== 137191
+IE5nw7Q= 137192
+IEdlc2Now6Q= 137193
+IEdlc2Now6RmdHM= 137194
+2KPZhQ== 137195
+2KPZhdix2KfYtg== 137196
+4LmA4LiX4LiE4LiZ 137197
+4LmA4LiX4LiE4LiZ4Li0 137198
+4LmA4LiX4LiE4LiZ4Li04LiE 137199
+INC80LXQvdGM 137200
+INC80LXQvdGM0YjQtQ== 137201
+IMO2bMOn 137202
+IMO2bMOnw7w= 137203
+INmK2KzYudmE 137204
+IMSR4buh 137205
+16nXmdec 137206
+16nXmdec15XXkQ== 137207
+IEdyw7bDn2U= 137208
+INmH2KfYqtmB 137209
+4Lij4LmJ4Liy4LiZ4Lit4Liy4Lir4Liy4Lij 137210
+15TXnNeZ15s= 137211
+15TXnNeZ15vXmQ== 137212
+0LjRgNGD0Y7RiQ== 137213
+6Iul44GE 137214
+IMOWemVs 137215
+44GE44Gf44KJ 137216
+4LiE4Liz4LiW4Liy4Lih 137217
+IHpvc3RhxYJ5 137218
+INeU16HXmdek15XXqA== 137219
+15TXldec 137220
+15TXldec15o= 137221
+4LmA4LiK4LmI4LiZ4LiB4Lix4LiZ 137222
+4LmC4LiG 137223
+4LmC4LiG4Lip 137224
+4LmC4LiG4Lip4LiT4Liy 137225
+15DXqNem15XXqg== 137226
+15LXqNek15k= 137227
+IGFvw7t0 137228
+INmK2LHZitiv 137229
+2KrZiNis 137230
+2KrZiNis2YrZhw== 137231
+INGN0YLQsNC/ 137232
+44K544K/44Oz 137233
+IGtyw7M= 137234
+IGtyw7N0aw== 137235
+44KS5L2/44GG 137236
+7Leo 137237
+6Zai44KP 137238
+4LiU4LmJ4Lin4Lii4LiE4Lin4Liy4Lih 137239
+4LiZ4Liz4LmA4Liq4LiZ4Lit 137240
+IGF5csSxY2E= 137241
+4LiI4LmJ4Liy4LiH 137242
+INGE0L7RgtC+0LPRgNCw0YQ= 137243
+INCy0LXRhw== 137244
+INCy0LXRh9C10YA= 137245
+5Ye644GX44Gf 137246
+INCl0L4= 137247
+INee16jXkteZ16k= 137248
+4LmD4Lir4LmJ4LmA4Lib4LmH4LiZ 137249
+44KS55uu 137250
+44KS55uu5oyH 137251
+15zXnteZ150= 137252
+bsSFxYI= 137253
+INGB0YLQsNC90LQ= 137254
+INGB0YLQsNC90LTQsNGA0YI= 137255
+IFPDvGQ= 137256
+IFTDom0= 137257
+2KfYrtiq2KjYp9ix 137258
+4LmA4LiB4Lit4Lij4LmM 137259
+2YXYs9ix2K0= 137260
+IGJp4buHbg== 137261
+2KjZjw== 137262
+INi12KfZhA== 137263
+INi12KfZhNit 137264
+IFBo4bul 137265
+7Zy0 137266
+44Os44OT44Ol44O8 137267
+IGLhu6VuZw== 137268
+IHLDqWdpbWU= 137269
+INij2LTZh9ix 137270
+INGA0LDQsdC+0YLQvdC40Lo= 137271
+4Lid4Lix4LiZ 137272
+2KfYudiq2YU= 137273
+2KfYudiq2YXYp9iv 137274
+INC30LDQvNC10YI= 137275
+44G+44Gj44Gm 137276
+IGNo4bq3dA== 137277
+5p2l44KL 137278
+INin2YTZgtmI2KfYqg== 137279
+44Gr5YWl44Gj44Gm 137280
+2KrYrdin2YTZgQ== 137281
+2YXYstmK2K8= 137282
+INmK2LXZhA== 137283
+7Je8 137284
+4LmA4LiK4LmH 137285
+4LmA4LiK4LmH4LiE 137286
+IGvhu4s= 137287
+IGvhu4tw 137288
+IOyVhOyngQ== 137289
+15DXoNeS 137290
+INC+0LHQu9Cw0YHRgtGM 137291
+IHBvbW9jxIU= 137292
+INeV16nXnA== 137293
+65Og7KeA 137294
+IEdpw6Ft 137295
+IFN0w7xjaw== 137296
+IGNow6F5 137297
+IOuCmOyYpA== 137298
+16nXmdeY16o= 137299
+157Xk9eo 137300
+157Xk9eo15nXmg== 137301
+IHPDvHJlw6c= 137302
+0LrQstCw 137303
+15HXnNeZ150= 137304
+15TXqteZ 137305
+15TXqteZ15nXl9eh 137306
+2YLYqNin2YQ= 137307
+INeh15XXkg== 137308
+INeh15XXkteZ 137309
+0YHRgtC+0LvRjA== 137310
+5L2V44KC 137311
+15bXm9eV16g= 137312
+6LK344GG 137313
+5a6J44GP 137314
+4LiE4Lij4Lix4LmJ4LiH4LiZ4Li14LmJ 137315
+a8O2cA== 137316
+INGB0LXRgNCy0LjRgQ== 137317
+0L7Rh9C90YvRhQ== 137318
+6rGw656Y 137319
+2KrYo9mD 137320
+2KrYo9mD2YrYrw== 137321
+15PXnNen 137322
+INC/0L7Rh9C10Lw= 137323
+INC/0L7Rh9C10LzRgw== 137324
+0L/QuNGB0LDRgtGM 137325
+15HXqdeo 137326
+IEjDoG5n 137327
+IFTDrG0= 137328
+IHRy4bur 137329
+44K744OD44Kv44K5 137330
+15XXoNeS 137331
+bcSxemRh 137332
+0L/RgdC4 137333
+IOyeiOq4sA== 137334
+IHLDunQ= 137335
+2LLYp9mG 137336
+2KrZhtmI2Lk= 137337
+2YXZgtin 137338
+2YXZgtin2YjZhdip 137339
+INec16bXldeo15o= 137340
+INeR15nXqNeV16nXnNeZ150= 137341
+44O044Kj 137342
+ZWJpbGU= 137343
+ZWJpbGVjZcSfaQ== 137344
+44Om44O844I= 137345
+44Om44O844K2 137346
+44Om44O844K244O8 137347
+44KS5L2c44KL 137348
+0YHQvNC10YA= 137349
+0YHQvNC10YDRgg== 137350
+IOyngQ== 137351
+IOyngeygkQ== 137352
+INCf0LDRgA== 137353
+2K3Yp9i2 137354
+2K3Yp9i22LE= 137355
+2YXZg9in2YE= 137356
+2YXZg9in2YHYrdip 137357
+4Lil4Li04LiZ 137358
+44Gm44GN44Gm 137359
+0YDQvtGB0Ls= 137360
+IMSwxZ90ZQ== 137361
+2YLYtdmK2LE= 137362
+INeR15LXmdec 137363
+INee16rXkNeZ150= 137364
+INeU15fXkw== 137365
+INeU15fXk9ep15Q= 137366
+16jXldei 137367
+IHByb2R1a3TDs3c= 137368
+INmF2LXYr9ix 137369
+0L3QtdGG 137370
+INin2YTYudmF2YTYp9iq 137371
+IMOnxLFrbWE= 137372
+INiv2KjZig== 137373
+16fXmdef 137374
+16rXkNeo 137375
+16rXkNeo15nXmg== 137376
+16DXmdeZ15M= 137377
+2LXYsdin2Lk= 137378
+bMOodmU= 137379
+16bXmdeo 137380
+4LiU4Lix4LiZ 137381
+4LmD4Lir4LmJ4LmE4LiU4LmJ 137382
+44K/44Kk44Og 137383
+IGdp4bqjbmc= 137384
+0KHQnw== 137385
+INin2YTZhdit2YQ= 137386
+INin2YTZhdit2YTZitip 137387
+IFThuqV0 137388
+15zXldeY 137389
+aOG7lQ== 137390
+IGFtw6lyaWM= 137391
+IGFtw6lyaWNhaW4= 137392
+INeR16nXnNeR 137393
+INec15DXldee15k= 137394
+IHBlw6dh 137395
+INGA0LDQt9C90YvRhQ== 137396
+44GE44KL44Go 137397
+44OH44Oz 137398
+16HXp9eo 137399
+INeU157Xl9eZ16g= 137400
+44Go44GE44GG44KC44Gu 137401
+2LHYqtio2Lc= 137402
+INC40YHRgtC+0Yc= 137403
+INC40YHRgtC+0YfQvdC40Lo= 137404
+4Liq4Lih4Lix4LiE4Lij4Liq4Lih4Liy4LiK4Li04LiB 137405
+IOC4l+C4seC5ieC4hw== 137406
+IOC4l+C4seC5ieC4h+C4meC4teC5iQ== 137407
+IFThuq1w 137408
+44Gj44Gm44GE44GG 137409
+INin2YTZiNi12YjZhA== 137410
+IGTDqWNhZGE= 137411
+INC+0YTQvtGA0Lw= 137412
+INC+0YTQvtGA0LzQu9C10L0= 137413
+4Liq4Liz4Lir4Lij4Lix4Lia4LiB4Liy4Lij 137414
+IG9nw7Nsbg== 137415
+44GG44Gh44Gr 137416
+IHbDoXJpYXM= 137417
+44GZ44GO44KL 137418
+2YjZh9in 137419
+4LmC4Lib4Lij4LiU 137420
+INCg0L7RgdGB0LjRjw== 137421
+5Lq644CF 137422
+44GX44Gm44GN44Gf 137423
+IHPEsXJhc8SxbmRh 137424
+IG5nw7Ru 137425
+2LPZhtip 137426
+2KrZhdiq2Lk= 137427
+157Xm9eR15k= 137428
+IG5o4bqlbg== 137429
+16LXnteZ15M= 137430
+4buo 137431
+0LbQuNGC0Yw= 137432
+44KJ44Gb 137433
+Z3LDoWY= 137434
+Z3LDoWZpY2E= 137435
+INmC2YjZhA== 137436
+INmC2YjZhNmH 137437
+64uo7LK0 137438
+4Lir4LmJ4Liy 137439
+4Lir4LmJ4Liy4Lih 137440
+5L2/44Gj44Gm 137441
+16rXmdeR 137442
+16rXmdeR16o= 137443
+aeG7g3U= 137444
+4LmB4LiK4Lih 137445
+4LmB4LiK4Lih4Lib 137446
+4LmB4LiK4Lih4Lib4LmM 137447
+4bqs 137448
+IOuCmOudvA== 137449
+INmF2KjYp9i02LHYqQ== 137450
+IHRyxINt 137451
+2LPZg9mI 137452
+INin2YTYsNmJ 137453
+IGJpw6c= 137454
+IGJpw6dpbQ== 137455
+2KrYsdin2KzYuQ== 137456
+INC+0LHQtdGB0L8= 137457
+INC+0LHQtdGB0L/QtdGH 137458
+INC+0LHQtdGB0L/QtdGH0LjQstCw 137459
+INCy0L7Qt9C00YPRhQ== 137460
+0YvQstCw0YLRjA== 137461
+2YTYrdmC 137462
+IE3DvGTDvA== 137463
+IE3DvGTDvHJs 137464
+IE3DvGTDvHJsw7zEn8O8 137465
+IHlhcHTEsXI= 137466
+INek16jXoQ== 137467
+INek16jXodeV150= 137468
+2LfZiNix 137469
+0YHRgtCy0L7QstCw0YLRjA== 137470
+7J6l7J2E 137471
+4LiX4Li14LmI4LiU4Li14LiX4Li14LmI4Liq4Li44LiU 137472
+4Lit4Lix4Lil 137473
+0YDRjg== 137474
+2YXYs9iq2YLYqNmE 137475
+0YHQu9GD0Yg= 137476
+0YHQu9GD0YjQsA== 137477
+6KqN44KB 137478
+INec15nXng== 137479
+INec15nXnteV15PXmQ== 137480
+16rXqdeV15E= 137481
+16rXqdeV15HXldeq 137482
+IGdlcsOnZWtsZcWfdGlyaWw= 137483
+INin2YTYp9iq2YHYp9mC 137484
+INGD0YDQvtCy0L3QtQ== 137485
+INGC0YDQsNCy 137486
+INeU157Xldef 137487
+2K3Zgdin2Lg= 137488
+INmF2ZA= 137489
+INmF2ZDZhg== 137490
+INmF2ZDZhtmS 137491
+IGRlbcOhcw== 137492
+157XldeW15nXp9eU 137493
+16nXmdeX15Q= 137494
+IGLDug== 137495
+0LDQu9GM0L3Ri9C8 137496
+44KP44Gf 137497
+44KP44Gf44GX 137498
+INin2YTZhdmI2KfYrw== 137499
+16rXm9eg 137500
+16rXm9eg15XXnw== 137501
+44Ot44OD44Kv 137502
+aGnhur91 137503
+INGD0LzQtQ== 137504
+2YXYrdin2YjZhNip 137505
+15DXldep16g= 137506
+INC60L7QvdC60YPRgA== 137507
+INC60L7QvdC60YPRgNGB 137508
+INee15HXlw== 137509
+INee15HXl9eZ16DXqg== 137510
+IGFubGFt 137511
+IGFubGFtxLE= 137512
+IGxp4buHdA== 137513
+INCy0YXQvtC0 137514
+IEjDrG5o 137515
+INmG2Yo= 137516
+INmG2YrZiNiy 137517
+44K444Oj44O8 137518
+15HXmdel 137519
+0YLQtdC70YzQvdGL0YU= 137520
+4LiX4Li44LiB4Lit4Lii4LmI4Liy4LiH 137521
+IGtpxZ9pbmlu 137522
+2KPZg9ir2LE= 137523
+INC40YHRgtC+0YDQuNC4 137524
+IOuzgO2ZlA== 137525
+16TXnNeh15g= 137526
+16TXnNeh15jXmdeg15k= 137527
+INGB0LXRgg== 137528
+INGB0LXRgtC4 137529
+ZMSxxJ/EsW3EsXo= 137530
+7ZWY64+E66Gd 137531
+15TXqA== 137532
+15TXqNeR15Q= 137533
+44GZ44KL44GT44Go44Gv 137534
+IHBoaeG6v3U= 137535
+2KrYrdiz2YrZhg== 137536
+IMWbcm9k 137537
+IMWbcm9kb3c= 137538
+IMWbcm9kb3dpc2s= 137539
+INGA0LDRgdGF0L7QtA== 137540
+2KjYsdmK2K8= 137541
+INix2Yo= 137542
+INix2YrYp9mE 137543
+INeV15vXmg== 137544
+7KeA7JqU 137545
+15vXnteV 137546
+INei15zXmdeU150= 137547
+ZsOtY2lv 137548
+IGthcmFyxLE= 137549
+dMSxxJ/EsW7EsQ== 137550
+INCh0L7Qsg== 137551
+INCh0L7QstC10YI= 137552
+44GK6YeR44KS 137553
+0LzQtdC20LTRgw== 137554
+0LzQtdC20LTRg9C90LA= 137555
+0LzQtdC20LTRg9C90LDRgNC+0LQ= 137556
+0LzQtdC20LTRg9C90LDRgNC+0LTQvQ== 137557
+IG3hu51p 137558
+INin2YTYpdmK2LE= 137559
+INin2YTYpdmK2LHYp9mG2Yo= 137560
+INin2YTYsdmI2LPZig== 137561
+2LXZhtiv 137562
+2LXZhtiv2YjZgg== 137563
+INin2YTYpdmG2KrYsdmG2Ko= 137564
+IHThuq9t 137565
+INGC0LDQutC+0LPQvg== 137566
+INeR15zXldeS 137567
+IMO8Y3JldHM= 137568
+IMO8Y3JldHNpeg== 137569
+15fXlteZ16g= 137570
+7Ja07JW8 137571
+IFBo4bqnbg== 137572
+77yc 137573
+INeY15HXog== 137574
+INeY15HXoteZ 137575
+15DXnteQ 137576
+2KfZgtmE 137577
+IGNvbmRpw6fDtWVz 137578
+2YLYp9iq2YQ= 137579
+INGA0LXQt9GD0LvRjNGC0LDRgtC1 137580
+INGB0LLQvtC40LzQuA== 137581
+16bXkdeZ16I= 137582
+Z8Opbmk= 137583
+IHplcw== 137584
+IHplc3Bv 137585
+IHplc3BvxYI= 137586
+0YjQuNCy 137587
+INek16jXmNeZ15XXqg== 137588
+2YXYs9iq2LTZgQ== 137589
+2YXYs9iq2LTZgdmJ 137590
+2LTYsdi5 137591
+IGtvxZtjaQ== 137592
+INeU15DXmdeg15jXqNeg15g= 137593
+INCn0LXRgA== 137594
+0L/QvtGH0YI= 137595
+IGFjdGl2aXTDqXM= 137596
+55+l44Gj44Gm 137597
+INeR15bXlA== 137598
+IHnDvHpkZW4= 137599
+44Gq44KK44G+44Gb44KT 137600
+IO2YuQ== 137601
+IO2YueydgA== 137602
+INee16nXoNeU 137603
+INCS0LXRgA== 137604
+INeR15DXldeq15U= 137605
+6Z2i55m9 137606
+6Z2i55m944GE 137607
+2LTYsdit 137608
+Z3LDvG5kZQ== 137609
+2YHYtA== 137610
+2YHYtNmE 137611
+IHPDqWpvdXI= 137612
+67SQ 137613
+IHLDtGxl 137614
+2LTYudin2LE= 137615
+0LXQvNGL0LU= 137616
+INin2YTYrNiz2YU= 137617
+0LDQu9GM0L3QvtC1 137618
+IOyDge2DnA== 137619
+77yk 137620
+66+A66Gc 137621
+INmG2YLYtw== 137622
+INmG2YLYt9ip 137623
+44Gd44GG44Gg 137624
+44GZ44KL44Gu44GM 137625
+4Lir4Li5 137626
+IG5o4buL 137627
+IGVjb27Ds21pY2E= 137628
+16HXmNeV15M= 137629
+16HXmNeV15PXoNeY 137630
+4Lih4Li14LmC4Lit4LiB4Liy4Liq 137631
+IGdlc3TDo28= 137632
+4Lij4Li54LmJ4Lin4LmI4Liy 137633
+IGxv4bqhdA== 137634
+INin2YTZhdmP 137635
+INin2YTYrdmF2YQ= 137636
+INin2YTYudmF2YTZitip 137637
+IOqyg+uPhA== 137638
+INCc0L7RgdC60LLQsA== 137639
+16fXmNeV16g= 137640
+INC/0L7QtNGA0L7QsQ== 137641
+INC/0L7QtNGA0L7QsdC9 137642
+IGzGsG5n 137643
+2KrZgdiz 137644
+2KrZgdiz2YrYsQ== 137645
+INin2YTYqNi5 137646
+INin2YTYqNi52LY= 137647
+2KbYqg== 137648
+0JXQnQ== 137649
+7Jew6rWs 137650
+4LmD4Lir4LmJ4LiE4Li44LiT 137651
+44GC44KK44G+44GX44Gf 137652
+IGJpcmth 137653
+IGJpcmthw6c= 137654
+IMSwc2w= 137655
+IMSwc2xhbQ== 137656
+55eb44G/ 137657
+IGjhuqNv 137658
+INC80LDRjw== 137659
+IGnFn8OnaQ== 137660
+16nX 137661
+16nXgQ== 137662
+4LiB4Liy4Lij4LmA4Lih4Li34Lit4LiH 137663
+15XXlNeo 137664
+IGNow7M= 137665
+64aA 137666
+IHlhbmzEsQ== 137667
+IHlhbmzEscWf 137668
+5bm444Gb 137669
+15DXqNeS15XXoNeZ 137670
+4Lit4Liy4LiI4Liy4Lij 137671
+4Lit4Liy4LiI4Liy4Lij4Lii4LmM 137672
+INC40L3RhNC+0YDQvNCw0YbQuNGO 137673
+0JPQng== 137674
+16DXl9ep 137675
+IOyVjOyVhA== 137676
+INGF0LDRgNCw0LrRgtC10YDQuNGB0YI= 137677
+INGF0LDRgNCw0LrRgtC10YDQuNGB0YLQuNC6 137678
+4LiE4Li44LiT4Liq4Liy4Lih4Liy4Lij4LiW 137679
+6KaL44GI44KL 137680
+4LiK4Lix4LiU4LmA4LiI 137681
+4LiK4Lix4LiU4LmA4LiI4LiZ 137682
+IGR6aWHFgmFs 137683
+IGR6aWHFgmFsbm/Fm2Np 137684
+4LmC4Lie4Liq4LiV4LmM 137685
+INCa0L7Quw== 137686
+INmB2YfZig== 137687
+INee16TXoNeZ 137688
+INeU16fXqdeo 137689
+2YXYsdmD 137690
+2YXYsdmD2LI= 137691
+IGhvw6E= 137692
+INCw0L/Qvw== 137693
+INCw0L/Qv9Cw0YDQsNGC 137694
+IHBhbWk= 137695
+IHBhbWnEmQ== 137696
+IHBhbWnEmXRh 137697
+IMOnw7xua8O8 137698
+15PXldef 137699
+44Gv44GT44Gh44KJ 137700
+IE3DoA== 137701
+INmK2YLYr9mF 137702
+INC/0YDQtdC3 137703
+INC/0YDQtdC30LjQtNC10L3Rgg== 137704
+4Lit4Li44LiV 137705
+4Lit4Li44LiV4Liq4Liy 137706
+4Lit4Li44LiV4Liq4Liy4Lir 137707
+4Lit4Li44LiV4Liq4Liy4Lir4LiB4Lij4Lij4Lih 137708
+7KeA7JuQ 137709
+INeQ16TXqdeo15XXqg== 137710
+c2Now7x0 137711
+c2Now7x0eg== 137712
+IFRpw6pu 137713
+IHNhecSxbMSx 137714
+INCz0YDRg9C/0L/Riw== 137715
+0L7Rh9C90YvQuQ== 137716
+INec16LXnteV15M= 137717
+IHdyemXFmw== 137718
+IHdyemXFm25pYQ== 137719
+IMSQ4bqndQ== 137720
+4LmA4LiC4LmJ4Liy4Lij4LmI4Lin4Lih 137721
+bsSxemRh 137722
+2K7Ziti1 137723
+IGfDvG5j 137724
+IGfDvG5jZWw= 137725
+INmE2YfYsNmH 137726
+INmK2LnYqtio2LE= 137727
+bMOpZ2k= 137728
+44KP44GL44KL 137729
+IHLhu6tuZw== 137730
+2LjZhw== 137731
+2LjZh9mI2LE= 137732
+INee15HXmdef 137733
+IOq4sO2DgA== 137734
+5YiH44KM 137735
+bGFubcSxxZ8= 137736
+4LiX4Li14LmI4Lih4Li14LiE4Lin4Liy4Lih 137737
+IGjhu4E= 137738
+2KrZiNis2Yc= 137739
+INin2YTYpdiv2KfYsdip 137740
+IMO6dGls 137741
+16HXpNeV 137742
+4LiE4Lin4Liy4Lih4Lij4Lix4LiB 137743
+4LmC4Liu 137744
+INC/0L7Qu9C40YI= 137745
+INC/0L7Qu9C40YLQuNC6 137746
+IHNhdMSxbg== 137747
+IMWeaW1kaQ== 137748
+157Xldeo15nXnQ== 137749
+7JWY64uk 137750
+15fXldeV 137751
+15fXldeV15nXlA== 137752
+4LiE4Lit4Lih4Lie4Li0 137753
+4LiE4Lit4Lih4Lie4Li04Lin 137754
+4LiE4Lit4Lih4Lie4Li04Lin4LmA4LiV4Lit4Lij4LmM 137755
+INin2LDYpw== 137756
+2KrYrtin2LA= 137757
+44Ko44Or 137758
+IHBvc3NpYmlsaXTDqQ== 137759
+4Lii4Li34LiZ4Lii4Lix4LiZ 137760
+IMO8bml2ZXJz 137761
+IMO8bml2ZXJzaXRl 137762
+INin2YTYr9mI2LHZig== 137763
+IOyViuuKlOuLpA== 137764
+IOyEnOuhnA== 137765
+2K3Yp9mE 137766
+IOuo 137767
+IOuovA== 137768
+IOuovOyggA== 137769
+4LiX4Li14LmI4LiW4Li54LiB 137770
+7Kec 137771
+IHNrw7NyeQ== 137772
+0LvRjNGG 137773
+4LmD4LiK4LmJ4LmA4Lin4Lil4Liy 137774
+15HXp9ep16o= 137775
+INiw2Yg= 137776
+5pel44CF 137777
+INC60L7RgtC+0YDRg9GO 137778
+INGD0YDQvtCy0LXQvdGM 137779
+6rmo 137780
+4LmE4LiX 137781
+44K144OX44Oq 137782
+44K444On44Oz 137783
+44GZ44G544GN 137784
+IEfDs3I= 137785
+44OI44Kk 137786
+44OI44Kk44Os 137787
+IHlhxZ9hbWE= 137788
+IGThu4tw 137789
+IGLhu69h 137790
+4LiL4Li4 137791
+IMO2bMO8bQ== 137792
+44Gj44Gm44GP44KL 137793
+4LiB4Liy4Lij4LiE4LmJ4Liy 137794
+16nXoteo 137795
+INGC0LjQv9Cw 137796
+INCz0LXRgA== 137797
+INCz0LXRgNC+ 137798
+16jXp9ei 137799
+IHV3YcW8 137800
+IHV3YcW8YQ== 137801
+16nXntef 137802
+IGhhc3RhbMSxaw== 137803
+44KP44KM44KL 137804
+YmHFn8Sx 137805
+0YfRgtC+ 137806
+INeR157XqNeb15Y= 137807
+IOyasOumrOydmA== 137808
+INmD2KfZhtmI2Kc= 137809
+INij2KjYsQ== 137810
+INij2KjYsdmK2YQ= 137811
+7Li1 137812
+4LmE4LiC4LmI 137813
+INmI2YTZiA== 137814
+4LiX4Lix4Lin 137815
+4LiX4Lix4Lin4Lij4LmM 137816
+INmI2KPZg9iv 137817
+4LiK4Lin4LiZ 137818
+15zXlden 137819
+5o2o 137820
+5o2o44Gm 137821
+IMSww6dpbg== 137822
+cMOpcmk= 137823
+IHlhbA== 137824
+IHlhbG7EsXo= 137825
+0YzRj9C9 137826
+IGfhuq9uZw== 137827
+4LiB4LmH4Lii4Lix4LiH 137828
+INCj0LrRgNCw0LjQvQ== 137829
+INGB0LDQvNC4 137830
+INC/0YDQvtCy0LXQtNC10L0= 137831
+4LiV4LiB4LmB4LiV4LmI4LiH 137832
+IFF1w6Ju 137833
+w6lwYXJhdGlvbg== 137834
+IGJhxZ/EsW5kYQ== 137835
+IHpuYWxl 137836
+IHpuYWxlxbo= 137837
+IHpuYWxlxbrEhw== 137838
+44Kx44O8 137839
+44OO44O8 137840
+4LiW4Li54LiB4LiV4LmJ4Lit4LiH 137841
+66q4 137842
+IOuPjA== 137843
+IOuPjOyVhA== 137844
+IFNjaMO8bGVy 137845
+INC/0L7QtNCz0L7RgtC+0LI= 137846
+INC/0L7QtNCz0L7RgtC+0LLQug== 137847
+2LnYsdmI 137848
+2LnYsdmI2LY= 137849
+bGHFn3TEsXI= 137850
+INGB0L7RgdGC0LDQstC70Y/QtdGC 137851
+INC/0YDQvtC40LfQstC+0LQ= 137852
+INC/0YDQvtC40LfQstC+0LTRgdGC0LLQsA== 137853
+INC+0YHQvdC+0LLQtQ== 137854
+INi02YXYp9mE 137855
+4LiB4Lij4Li1 137856
+IGfDtnLDvMWfbWU= 137857
+0L7Rh9C10Lo= 137858
+INeX15HXqNeZ150= 137859
+2YXYrtin2Lc= 137860
+2YXYrtin2LfYsQ== 137861
+77yt 137862
+16jXpNeQ 137863
+IE3hurk= 137864
+4Lii4Lit4Lih4Lij4Lix4Lia 137865
+IHbhur90 137866
+2K7YsA== 137867
+INin2YTYqti3 137868
+INin2YTYqti32KjZitmC 137869
+4LiZ4Li24LiB 137870
+INeU15vXoNeh16o= 137871
+INC+0LPRgNCw0L3QuA== 137872
+INC+0LPRgNCw0L3QuNGH0LXQvQ== 137873
+IMOHYWzEscWf 137874
+INin2YTZhdmG2KrYr9mJ 137875
+4LiI4Liz4LiZ4Lin4LiZ4Lih4Liy4LiB 137876
+INGC0L7RgNGA 137877
+INGC0L7RgNGA0LXQvdGC 137878
+IOyCtOyVhA== 137879
+4Lie4Lil4Lix4LiH4LiH4Liy4LiZ 137880
+4LiK4Lix4LiZ 137881
+INCQ0L3QtNGA 137882
+IHLDqWFsaXPDqQ== 137883
+157XqdeQ 137884
+4LmB4LiK 137885
+4LmB4LiK4Lij4LmM 137886
+INCx0L7Qsw== 137887
+4Lih4Liy4LmB4Lil4LmJ4Lin 137888
+INin2YTZhtin2LE= 137889
+IG9sbWFkxLHEn8Sx 137890
+15PXoteU 137891
+INGD0LLQtdGA 137892
+INGD0LLQtdGA0LXQvQ== 137893
+44KL44KC44Gu 137894
+2KPYrw== 137895
+2KPYr9mI2KfYqg== 137896
+INeU15bXldeS 137897
+2KXYudmE2KfZhQ== 137898
+aOG7jw== 137899
+IE7DpGhl 137900
+INGC0LXRgdGC 137901
+INee15XXm9eo 137902
+IOusuOygnOqwgA== 137903
+16rXldem15DXlA== 137904
+bcOz 137905
+bcOzdmVs 137906
+INin2YTYqtis2KfYsdip 137907
+INC80L3QvtCz0LjRhQ== 137908
+0L7QsdGJ0LA= 137909
+INei16HXp9eZ 137910
+IEVkdWNhw6fDo28= 137911
+16fXqdeZ150= 137912
+w6l0YWJs 137913
+w6l0YWJsaXNzZW1lbnQ= 137914
+INC00LXQu9C1 137915
+0LjRgNGD0LXRgtGB0Y8= 137916
+2KLYq9in2LE= 137917
+INeU157XqNeb15bXmQ== 137918
+44OQ44Or 137919
+INCy0YHRgtGA0LXRhw== 137920
+44GS44KL 137921
+IGNpxIU= 137922
+IGNpxIVndQ== 137923
+2YrYs9iq 137924
+4Lig4Liy4Lin 137925
+4Lig4Liy4Lin4Liw 137926
+2KPZhdix 137927
+INC+0LbQuA== 137928
+INC+0LbQuNC00LA= 137929
+IOG7p3k= 137930
+44Oe44Or 137931
+2LHYp9iz 137932
+0L7Rh9C90L7QuQ== 137933
+16rXkteV15HXldeq 137934
+2KrYudix2YrZgQ== 137935
+INGB0L7RhtC40LDQu9GM0L3Qvg== 137936
+44KS6ZaL 137937
+INC40YHRgdC70LXQtNC+0LLQsA== 137938
+IGTDug== 137939
+IGTDunZpZGE= 137940
+IHNrxYI= 137941
+IHNrxYJhZGE= 137942
+IGjDpHVmaWc= 137943
+INCy0YvQsdGA 137944
+INCy0YvQsdGA0LDRgtGM 137945
+44Gu44Gn44Gv44Gq44GE44GL 137946
+INGB0LjQu9GM0L3Qvg== 137947
+0YLQstC10YDQttC00LXQvQ== 137948
+16jXpA== 137949
+16jXpNeV15DXlA== 137950
+5oCd44GE44G+44GZ 137951
+2K3Ysdi1 137952
+16nXldeq16M= 137953
+2YXYs9is2K8= 137954
+4LmC4LiK4Lin4LmM 137955
+0LXQvNGB0Y8= 137956
+0LLRiNC40LU= 137957
+INC80Ls= 137958
+INC80LvQvQ== 137959
+INec15TXkdeZ15A= 137960
+INmK2KrYudmE2YI= 137961
+4LiV4Li54LmJ 137962
+INC/0YDQsNC3 137963
+INC/0YDQsNC30LQ= 137964
+INC/0YDQsNC30LTQvdC40Lo= 137965
+INC90LXQvA== 137966
+INC90LXQvNC90L7Qs9C+ 137967
+IHPDoG5n 137968
+2KrZhtiz2Yo= 137969
+2KrZhtiz2YrZgg== 137970
+IHThu50= 137971
+INC80LXQtNC4 137972
+44Gr5og= 137973
+44Gr5oi7 137974
+4LiE4Lin4LmJ4Liy 137975
+44GL44GR44KL 137976
+15HXnNeV16o= 137977
+INGN0LrRgdC/ 137978
+INGN0LrRgdC/0LXRgNGC 137979
+INC00LXQstGD0Yg= 137980
+INC00LXQstGD0YjQug== 137981
+INit2LU= 137982
+2YbYtNij 137983
+44GM44GC44KL44Gu44Gn 137984
+INiq2LHYp9mF 137985
+INiq2LHYp9mF2Kg= 137986
+2KPYs9mI2KfZgg== 137987
+INec16TXoNeV16o= 137988
+INin77u3 137989
+44Gr44GP 137990
+44Gr44GP44GE 137991
+INij2LnZhNmJ 137992
+INec15TXntep15nXmg== 137993
+csOkdQ== 137994
+16nXnteZ150= 137995
+5YiG44GR 137996
+44GZ44Gn 137997
+44GZ44Gn44Gr 137998
+15TXnNeb15Q= 137999
+15fXnNeZ16M= 138000
+IOyxhQ== 138001
+IOyxheyehA== 138002
+4LmA4LiI4Lij4Li0 138003
+4LmA4LiI4Lij4Li04LiN 138004
+6YGK44Gz 138005
+2KzYs9iv 138006
+4Liq4Liy4LiY 138007
+4Liq4Liy4LiY4Liy4Lij 138008
+4Liq4Liy4LiY4Liy4Lij4LiT 138009
+IGJhc8Sxbg== 138010
+0YDQsNCz 138011
+0LPQsNC0 138012
+IGhvxZ8= 138013
+7ZW1 138014
+15HXl9eZ16jXlA== 138015
+157Xodea 138016
+IOygnO2SiA== 138017
+2KrZhdmI2YrZhA== 138018
+IEzGsHU= 138019
+66Gc67aA7YSw 138020
+INC/0L7QsQ== 138021
+INC/0L7QsdC10LQ= 138022
+2YXZhtiw 138023
+5bi444Gr 138024
+2YLYsw== 138025
+INin2YTZhdi12K/YsQ== 138026
+INmI2KfZhNin2LPYqg== 138027
+IGto4bqvcA== 138028
+INin2YTYrNin2YbYqA== 138029
+IG5ndXnhu4du 138030
+6ZaT6YGV44GE 138031
+INGB0YLRgNCw 138032
+INGB0YLRgNCw0YU= 138033
+INGB0YLRgNCw0YXQvtCy 138034
+4Lij4Li14Lia 138035
+IHjGsMahbmc= 138036
+IOywvg== 138037
+IOywvuyVhA== 138038
+IG5n4bqhaQ== 138039
+0LPQsNC7 138040
+4LiL4Li14LmI 138041
+INeR16TXmdeZ16HXkdeV16c= 138042
+0KbQtdC90YLRgA== 138043
+IGF2YWxpYcOnw6Nv 138044
+IGVjb27Ds21pY28= 138045
+15bXnw== 138046
+INCc0LDQug== 138047
+IGludGVyw6lz 138048
+4LiB4Lil4Li04LmI4LiZ 138049
+0YHRgtGM0Y4= 138050
+IMSRxrDGoW5n 138051
+5by344GP 138052
+IEtow6FjaA== 138053
+4LmA4LiZ4Li34LmJ4Lit4Lir4Liy 138054
+IFlhesSx 138055
+6LK344Gj44Gm 138056
+0KDQlQ== 138057
+4LmA4Lie4Li04LmI4Lih4LiC4Li24LmJ4LiZ 138058
+4Liq4Lih4Lia4Li5 138059
+4Liq4Lih4Lia4Li54Lij4LiT4LmM 138060
+INC80LjRgNC+0LI= 138061
+15LXoNeZ150= 138062
+IMSR4bupYw== 138063
+4Lit4Liy4Lij4LmM 138064
+2LXYp9i1 138065
+44GK44KI 138066
+44GK44KI44Gz 138067
+w6rMiQ== 138068
+INin2YTZhdik2KrZhdix 138069
+INin2YTZhdix2K3ZhNip 138070
+4Liq4Lit4Lia4LiW4Liy4Lih 138071
+IOC4iOC4suC4geC4meC4seC5ieC4mQ== 138072
+INiq2LnYrw== 138073
+44Gd44Gu44Gf44KB 138074
+IGtow6FuZw== 138075
+4LiZ4Li04LiU 138076
+44OK44Oz 138077
+64Sk7JqU 138078
+INin2YTYp9it2Ko= 138079
+INin2YTYp9it2KrZhNin2YQ= 138080
+7JqV 138081
+INC80L7QtNC10LvQuA== 138082
+INC/0YDQvtGG0LXQvdGC 138083
+4Lie4Lin4LiB4LmA4Lij4Liy 138084
+INeU16bXkw== 138085
+INeU16bXk9eT15nXnQ== 138086
+c3TDpG5kZQ== 138087
+16DXkteo 138088
+IGRvdHlj 138089
+IGRvdHljesSF 138090
+IGRvdHljesSFY2U= 138091
+IMWbd2nEmXQ= 138092
+157XqNeU 138093
+44GZ44GU44GE 138094
+44OH44Kj44Oz44Kw 138095
+4LiB4Liy4Lij4Liq4Lij4LmJ4Liy4LiH 138096
+64Ks 138097
+IOywuOyXrA== 138098
+0YHRhQ== 138099
+0YHRhdC10Lw= 138100
+2YXZiNiz 138101
+IG7huqV1 138102
+INec157Xotec15Q= 138103
+4LmA4Lib4LmJ4Liy 138104
+4LmA4Lib4LmJ4Liy4Lir4Lih4Liy4Lii 138105
+IG3DuWk= 138106
+2KfYptiy 138107
+7ZuI 138108
+15fXkdeV16jXlA== 138109
+4Lic4Li54LmJ4LmD4LiK4LmJ 138110
+IHBhxbo= 138111
+IHBhxbpkemk= 138112
+IHBhxbpkemllcm4= 138113
+IHBhxbpkemllcm5pa2E= 138114
+4Lil4LiH4LmE4Lib 138115
+2YLYp9i5 138116
+IGNo4bqtbQ== 138117
+IMO2emVsbGlrbGVyaQ== 138118
+IMSQbw== 138119
+IMSQb8Ogbg== 138120
+0LbQtdC90LjQtQ== 138121
+IGjhurM= 138122
+IGjhurNu 138123
+IGHFn2s= 138124
+772N 138125
+44OR44K5 138126
+15TXldeo15DXldeq 138127
+IMW7 138128
+IMW7eQ== 138129
+157Xltec 138130
+INGD0LrRgNCw 138131
+INGD0LrRgNCw0LjQvQ== 138132
+4LmA4LiK4Li0 138133
+4LmA4LiK4Li04LiN 138134
+0KDQmA== 138135
+IHp3acSFemt1 138136
+15TXl9ec15jXqg== 138137
+44KT44Gn44GZ44KI44Gt 138138
+44Gm44GK44KK 138139
+0LvQvtC20LjRgtGM 138140
+157Xldeg15nXnQ== 138141
+4Liu4Li0 138142
+7LCs 138143
+INin2YTZhdi02KrYsdmD 138144
+IGTDvMWfw7xr 138145
+0LDQs9C10L3Rgg== 138146
+INin2YTYo9iz2KjZiNi5 138147
+INmC2LHZitio 138148
+0LjQvdC0 138149
+0LjQvdC00LjQsg== 138150
+0LjQvdC00LjQstC40LQ= 138151
+0LjQvdC00LjQstC40LTRgw== 138152
+0LjQvdC00LjQstC40LTRg9Cw0LvRjNC9 138153
+ZsO2cmRlcg== 138154
+IHNlw6dlbg== 138155
+IHNlw6dlbmVr 138156
+IMOpdGFudA== 138157
+INC70Y7QsdC40Lw= 138158
+0LrQsNC30YvQstCw0LXRgg== 138159
+4Lin4Li04LiZ 138160
+INeU15HXkNeZ150= 138161
+INC00L7Qsg== 138162
+INC00L7QstC+0LvRjA== 138163
+INC00L7QstC+0LvRjNC90L4= 138164
+16LXk9eZ16M= 138165
+IG9rcmU= 138166
+IG9rcmXFmw== 138167
+IG9rcmXFm2xvbg== 138168
+INiq2LHZitiv 138169
+4LmA4Lih4Li34LmI4Lit4Lin4Lix4LiZ4LiX4Li14LmI 138170
+44KI44GL44Gj44Gf 138171
+Q3VtaA== 138172
+Q3VtaHVy 138173
+Q3VtaHVyYmE= 138174
+Q3VtaHVyYmHFnw== 138175
+Q3VtaHVyYmHFn2thbg== 138176
+Q3VtaHVyYmHFn2thbsSx 138177
+IG7hu6M= 138178
+4Lic4Li54LmJ4LmA4Lil4LmI4LiZ 138179
+IGNvbXBsw6h0ZQ== 138180
+4LmA4Lie4Lio 138181
+2K/ZkA== 138182
+IGTDvHo= 138183
+IGTDvHpleQ== 138184
+44Gn44GC44KL44GT44Go 138185
+ZXh0w6lyaWV1cg== 138186
+17M= 138187
+IGluZm9ybWHDp8Ojbw== 138188
+44Kv44Oq44OL44OD44Kv 138189
+IFB1Ymxp 138190
+IFB1Ymxpw6k= 138191
+16jXldeT 138192
+4LiE4Lin4Liy4Lih4Lib4Lil4Lit4LiU4Lig4Lix4Lii 138193
+INij2YrYtg== 138194
+INij2YrYttmL2Kc= 138195
+2KrYs9io2Kg= 138196
+44Gk44KC44KK 138197
+0LjQt9C80LA= 138198
+4LiC4Li24LmJ4LiZ4LmE4Lib 138199
+2YPZkA== 138200
+2YTZiNmF 138201
+INep16bXqA== 138202
+INep16bXqNeZ15o= 138203
+44Gv44KC44Gh44KN44KT 138204
+INC60LDQvQ== 138205
+INC60LDQvdCw0Ls= 138206
+44Gr44Gq44Gj44Gm44GE44G+44GZ 138207
+INin2YTYo9mD2KvYsQ== 138208
+2KrYp9it 138209
+2YbYqtmH 138210
+2YbYqtmH2KfYoQ== 138211
+2KfZiNmK2Kk= 138212
+IEJ1Z8O8bg== 138213
+0L3RgdC60L7Qs9C+ 138214
+4LiU4LmI4Lin4LiZ 138215
+w6l2b2x1dGlvbg== 138216
+44Gj44Gm44GE44G+44GX44Gf 138217
+44KF 138218
+IFbGsMahbmc= 138219
+4Lig4Liy4Lie4Lii 138220
+4Lig4Liy4Lie4Lii4LiZ 138221
+4Lig4Liy4Lie4Lii4LiZ4LiV4Lij4LmM 138222
+INeU16bXnNeZ15c= 138223
+INin2YTYpdiz2YTYp9mF2Yo= 138224
+2YTZitio 138225
+IGVkacOnw6Nv 138226
+0YHRgtGA0LXQuw== 138227
+IGtow7pj 138228
+2YbZhdmI2LA= 138229
+2YbZhdmI2LDYrA== 138230
+15zXpteU 138231
+0YHRgtCw0LLQuNC7 138232
+4LiW4Liy 138233
+4Liq4Lij4LmJ4Liy4LiH4LiE4Lin4Liy4Lih 138234
+44GE44Gj44Gx 138235
+44GE44Gj44Gx44GE 138236
+0YHRgtCw0LLQu9C10L0= 138237
+INin2YTZgtiv2LM= 138238
+IG5nxrDhu6Nj 138239
+2KjYrg== 138240
+4Liq4Lir4Lij 138241
+4Liq4Lir4Lij4Lix 138242
+4Liq4Lir4Lij4Lix4LiQ 138243
+INij2Lo= 138244
+INij2LrYs9i3 138245
+INij2LrYs9i32LM= 138246
+44GG44G+ 138247
+44GG44G+44GP 138248
+IOq1reygnA== 138249
+2K3Yttin2LE= 138250
+IGThu6tuZw== 138251
+5oq844GX 138252
+2KrZiNin 138253
+2KrZiNin2KzYrw== 138254
+16nXnteX15Q= 138255
+44GP44KT 138256
+INeR16LXpg== 138257
+INeR16LXpted 138258
+157XoNeZ15XXqg== 138259
+15XXmdeT 138260
+15XXmdeT15DXlQ== 138261
+4LiK4Li04LiH 138262
+IHByYWPEmQ== 138263
+INC30LDRgg== 138264
+INC30LDRgtC10Lw= 138265
+IOyekOycoA== 138266
+IOykgA== 138267
+IOykgOu5hA== 138268
+IGLhuq0= 138269
+IGLhuq1j 138270
+INeU157XpteR 138271
+INmC2YrZhdip 138272
+4LmA4Lit4LmA4LiK 138273
+4LmA4Lit4LmA4LiK4Li14Lii 138274
+IHBlcmNow6g= 138275
+INin2YTYudiz2YPYsQ== 138276
+INin2YTYudiz2YPYsdmK2Kk= 138277
+2KzZitio 138278
+6561 138279
+2YXZh9ix 138280
+2YXZh9ix2KzYp9mG 138281
+2YXYsdin2YM= 138282
+2YXYsdin2YPYsg== 138283
+INC+0LTQvdCw0LrQvg== 138284
+4LiU4Li14LmG 138285
+INem16TXlQ== 138286
+IGt1bGxhbsSxbGFu 138287
+INC60LjQvdC+ 138288
+44OG44Kj44Oz44Kw 138289
+IEdp4bubaQ== 138290
+2KrZiNiy 138291
+2KrZiNiy2YrYuQ== 138292
+4Lii4Li04LiZ 138293
+4Lii4Li04LiZ4LiU4Li1 138294
+IGPFk3Vy 138295
+IGnFn2FyZXQ= 138296
+INeR16LXlteo 138297
+INeR16LXlteo16o= 138298
+INC/0LDRhtC4 138299
+INC/0LDRhtC40LXQvdGC 138300
+44G/44Gf44GE44Gn44GZ 138301
+0LLQtdC3 138302
+0LvQuNC90LA= 138303
+0L7QtNC1 138304
+INeQ15XXqtef 138305
+ZMSxxJ/EsW7EsXo= 138306
+INCQ0LI= 138307
+INCQ0LLRgtC+0YA= 138308
+77yu 138309
+IEPhuqdu 138310
+INin2YTYp9iu 138311
+INin2YTYp9iu2KjYp9ix 138312
+IOqxsOydmA== 138313
+IGF0ZW7Dp8Ojbw== 138314
+IGdlbGRpxJ9p 138315
+44Kq44K5 138316
+44Kq44K544K5 138317
+44Kq44K544K544Oh 138318
+0LXQstGL0LU= 138319
+0LrRgNGL0Ls= 138320
+4LmA4LiK4Li14Lii4LiH 138321
+4LmA4LiK4Li14Lii4LiH4LmD4Lir4Lih4LmI 138322
+IG1hcsOnbw== 138323
+INin2YTZhdin2K/YqQ== 138324
+INCz0L7Quw== 138325
+IHNwcnplZGHFvHk= 138326
+IO2VtOqysA== 138327
+INCV0LPQvg== 138328
+6rmA 138329
+INec16fXkdec16o= 138330
+INin2YTZgdmG2KfZhg== 138331
+IGNvbXVuaWNhY2nDs24= 138332
+4LmA4Liq4LmJ4LiZ4LiX4Liy4LiH 138333
+7Zi5 138334
+4LiK4Liz 138335
+4LiK4Liz4Lij4Liw 138336
+INeb15DXng== 138337
+INeb15DXnteV16g= 138338
+4LiK4LmI4Liy4LiH 138339
+2LLZh9ix 138340
+IGtsaWVudMOzdw== 138341
+0LjQstCw0Y7Rgg== 138342
+0LDQvdCz 138343
+16DXmg== 138344
+IGfhu41u 138345
+w5xS 138346
+7JiB7IOB 138347
+INi62LLYqQ== 138348
+7J2M7J2E 138349
+IGJlenBv 138350
+IGJlenBvxZs= 138351
+IGJlenBvxZtyZWRuaQ== 138352
+INin2YTZhdmI2Kc= 138353
+INin2YTZhdmI2KfYt9mG 138354
+INin2YTZhdmI2KfYt9mG2YrZhg== 138355
+44KM44G+44GZ 138356
+INC80LDRgtGH 138357
+15DXldef 138358
+INix2LPZhdmK 138359
+INGN0LrQvtC9 138360
+INGN0LrQvtC90L7QvA== 138361
+INGN0LrQvtC90L7QvNC40YfQtdGB0Lo= 138362
+44Oc44O8 138363
+INC00LjRgA== 138364
+INC00LjRgNC10LrRgtC+0YA= 138365
+INGB0LrQvtGA0L4= 138366
+4Lia4Liz 138367
+4Lia4Liz4Lij 138368
+4Lia4Liz4Lij4Li44LiH 138369
+INGE0YPRgg== 138370
+INGE0YPRgtCx0L7Quw== 138371
+INeQ15nXnA== 138372
+IOykkeq1rQ== 138373
+7Jyk 138374
+ZcSfZQ== 138375
+4LmE4LiB4LmI 138376
+dHJhw64= 138377
+dHJhw65u 138378
+INGC0YDRg9Cx 138379
+4LmA4Lia4Li3 138380
+4LmA4Lia4Li34LmJ4Lit4LiH 138381
+4LmB4Lih4LiZ 138382
+INiq2K3Yr9mK2Ks= 138383
+INeb16LXqg== 138384
+2K3Yp9iz2Kg= 138385
+bMSxxJ9h 138386
+16fXmdeZ157Xmded 138387
+0L7RgdGC0YzRjg== 138388
+4Lid4Lix 138389
+4Lid4Lix4LmI4LiH 138390
+2LTYutmE 138391
+7Ju5 138392
+INC60LDQttC00L7Qs9C+ 138393
+IGLDtmzDvG3DvA== 138394
+4Lir4LiZ4Li1 138395
+IGlzdGVkacSfaQ== 138396
+IHRyxrBuZw== 138397
+44OM 138398
+4Liu4Lit 138399
+2KPZhti0 138400
+2KPZhti02LfYqQ== 138401
+INin2YTZhdiz2Yo= 138402
+INin2YTZhdiz2YrYrQ== 138403
+4Lil4Lix4LiB4Lip4LiT4LmM 138404
+IG7hu61h 138405
+4LiX4Li14LmI4LiV4LmJ4Lit4LiH4LiB4Liy4Lij 138406
+0YjQtdC6 138407
+0LvRkQ== 138408
+INep15nXlA== 138409
+INep15nXlNeZ15Q= 138410
+IGtodcO0bg== 138411
+INGC0YDQtdCx0L7QstCw0L3QuNGP 138412
+INec16LXlteV16g= 138413
+INin2YTYudmF2LE= 138414
+4Lij4Liy4LiE4Liy4LiW4Li54LiB 138415
+2YfZj9mF2ZI= 138416
+w7xzdA== 138417
+w7xzdMO8 138418
+INC00LXQvdC10LM= 138419
+IG7huqE= 138420
+4LiC4LiZ4Lih 138421
+INCx0LvQsNCz 138422
+INCx0LvQsNCz0L7QtA== 138423
+INCx0LvQsNCz0L7QtNCw0YA= 138424
+INCx0LvQsNCz0L7QtNCw0YDRjw== 138425
+2KXYs9mE2KfZhQ== 138426
+4LiZ4Li04Lin 138427
+55+l44KJ44Gq44GE 138428
+2KvZgtip 138429
+INCz0L7Qu9C+0YE= 138430
+15DXldeo15c= 138431
+IHRy4bupbmc= 138432
+INC+0LTQvdC+0Lw= 138433
+IGtvxYRjdQ== 138434
+INeV16jXpw== 138435
+V2nEmQ== 138436
+V2nEmWNlag== 138437
+INeQ15nXm9eV16o= 138438
+INeQ15nXm9eV16rXmQ== 138439
+0YHQvtGB 138440
+IGplxbxlbGk= 138441
+5Lul5LiL44Gu 138442
+5bCP44GV 138443
+5bCP44GV44Gq 138444
+0L7Qu9C+0LPQuNC4 138445
+INC+0LHRgdC70YPQtg== 138446
+INC+0LHRgdC70YPQttC40LLQsA== 138447
+2YPYqtin2KjYqQ== 138448
+IOq0gOyLrA== 138449
+16LXqdeZ16g= 138450
+IGFyYXPEsW5kYWtp 138451
+INGA0LDQudC+0L3QsA== 138452
+2YjYp9is2Kg= 138453
+INeR15fXmdeZ 138454
+7ZW07KO8 138455
+IGfDs2M= 138456
+0LDQudC7 138457
+IFTDrG5o 138458
+5pqu44KJ 138459
+5pqu44KJ44GX 138460
+5pmC44Gr44Gv 138461
+INCz0L7RgNC+0LTQtQ== 138462
+INeb15DXmdec 138463
+INeb15DXmdec15U= 138464
+IEPhu5luZw== 138465
+44Gp44GG44GX44Gm44KC 138466
+15fXldej 138467
+2KrYrdix2YM= 138468
+INGB0LvQvtCy0LDQvA== 138469
+4LiI4Liw4LiK4LmI4Lin4Lii 138470
+INin2YTZhdiz2KrZgtio2YQ= 138471
+2YLYtg== 138472
+2YLYttmK 138473
+15HXodeV16Q= 138474
+15HXodeV16TXlQ== 138475
+acSZxIc= 138476
+IFnEsWw= 138477
+2LTZitiu 138478
+4LiE4Li44LiT4LiI4Liw 138479
+16nXnteV16o= 138480
+INiq2LnYsdi2 138481
+IGFuw6FsaXNl 138482
+INGB0L7QsdC40YDQsA== 138483
+4LmA4Lie4LiK 138484
+4LmA4Lie4LiK4Lij 138485
+INCy0LXQu9C4 138486
+INCy0LXQu9C40Lo= 138487
+4Liq4Lix4LmJ4LiZ 138488
+IHBvcHVsYcOnw6Nv 138489
+4Lij4LmI4Lin4Lih4LiB4Lix4LiZ 138490
+15fXng== 138491
+15fXnteZ16nXmQ== 138492
+16HXmdeh 138493
+5YaF44Gn 138494
+IHNvYsSF 138495
+IFlheQ== 138496
+IFlhecSxbg== 138497
+44Oh44OL44Ol44O8 138498
+INC/0YDQtdC00L7RgdGC0LDQstC70Y8= 138499
+44Gg44Go5oCd44GG 138500
+IOqzoOqwnQ== 138501
+INC+0LTQvdC40Lw= 138502
+4LmD4LiZ4LmA4Lij4Li34LmI4Lit4LiH 138503
+IHPhu5U= 138504
+INCX0LTQtdGB0Yw= 138505
+INC40LfQvNC10L3QtdC90LjRjw== 138506
+IOydvOydhA== 138507
+44Gq44Gu44Gg 138508
+0LrQu9Cw0LTRi9Cy0LA= 138509
+0YDQvNCw 138510
+INeV15HXm9ec 138511
+2KrYo9mF2YrZhg== 138512
+INC/0YDQuNGP0YI= 138513
+INC/0YDQuNGP0YLQvQ== 138514
+2YXZhdin2LE= 138515
+2YXZhdin2LHYs9ip 138516
+44Go44Gq44Gj44Gm 138517
+INis2YXZitmE 138518
+IOyniA== 138519
+IOyniOusuA== 138520
+IHF1ZXN0w6Nv 138521
+acOp 138522
+acOpbmRv 138523
+4Lir4LmJ4Lit4LiH4Lie4Lix4LiB 138524
+44OR44O844OI 138525
+0YLQstC10YDQttC00LA= 138526
+0L3RgdC60L7QuQ== 138527
+0LfQsNC7 138528
+4Lih4Li44LmI4LiH 138529
+4buK 138530
+INeU15DXl9eo15XXoNeU 138531
+IFRoxrA= 138532
+7KO866+8 138533
+INin2YTYudio 138534
+w6l2w6lu 138535
+w6l2w6luZW1lbnQ= 138536
+2YLZiNin2LnYrw== 138537
+2K/Zjw== 138538
+IOyViuyKteuLiOuLpA== 138539
+IOuztOq4sA== 138540
+IHlhcMSxbG1hc8Sx 138541
+4LmA4Lij4Liy4LiB 138542
+4LmA4Lij4Liy4LiB4LmH 138543
+2K3YsNix 138544
+2YLYtdix 138545
+44Gm44GX44G+44GE44G+44GX44Gf 138546
+IOC5gOC4m+C5h+C4meC4leC5ieC4mQ== 138547
+44Go44Gr 138548
+44Go44Gr44GL 138549
+44Go44Gr44GL44GP 138550
+0L3RhtC1 138551
+0LfQstGD0Lo= 138552
+44GX44KI44GG44Go 138553
+INin2YTYtdit2YrYqQ== 138554
+INep15TXmdeV 138555
+IERpxJ9lcg== 138556
+2YLZhNmC 138557
+44K444Oj44Oz 138558
+IHLhu51p 138559
+INC70LXRhw== 138560
+INC70LXRh9C10L3QuNGP 138561
+2KrYqNin2K8= 138562
+2KrYqNin2K/ZhA== 138563
+16bXpNeU 138564
+4LiE4Lin4Liy4Lih4LmA4Lir4LmH4LiZ 138565
+INi02Kg= 138566
+INi02KjZg9ip 138567
+16jXmden 138568
+2YXYudiv 138569
+2YXYudiv2KfYqg== 138570
+ZMSxxJ/EsW5kYQ== 138571
+INeR16nXoNeZ150= 138572
+INeU15nXqdeo15DXnA== 138573
+INeU15nXqdeo15DXnNeZ16o= 138574
+IHPEsW5hdg== 138575
+16DXpteZ15I= 138576
+4Lin4Lix4LiV4LiW4Li4 138577
+INin2YTYqNix2YTZhQ== 138578
+INin2YTYqNix2YTZhdin2YY= 138579
+dGl2aXTDoA== 138580
+44KT44Gg44KN44GG 138581
+16fXmdeZ154= 138582
+2YTZitmD 138583
+IMSRw7I= 138584
+IMSRw7Jp 138585
+INCY0L3RgtC10YA= 138586
+INCY0L3RgtC10YDQvdC10YI= 138587
+44Gr44Go44Gj44Gm44Gv 138588
+44Gj44GT 138589
+16fXldeh 138590
+2LPYqtit2YI= 138591
+5pWZ44GI44Gm 138592
+44OA44Oh 138593
+INmF2YbYstmE 138594
+4LmA4LiL4LmH4LiZ 138595
+5L2/44GI44KL 138596
+6KaL56mN 138597
+6KaL56mN44KC44KK 138598
+2KPZgQ== 138599
+2KPZgdmD2KfYsQ== 138600
+INC40LPRgNC+0LI= 138601
+INC40LPRgNC+0LLRi9C1 138602
+IG3EmcW8 138603
+IG3EmcW8Y3p5 138604
+IG3EmcW8Y3p5em4= 138605
+INin2YTYrdmC2YrZgtmK 138606
+2LnYqNix 138607
+15vXldec16DXlQ== 138608
+7Z2l 138609
+157XkNeV15fXqA== 138610
+2K7Yqti1 138611
+44Oe44Oe 138612
+INeQ15fXldeW 138613
+7YyA 138614
+IHLhu5Fp 138615
+INCy0YLQvtGA 138616
+INCy0YLQvtGA0L7QuQ== 138617
+IGzhuqtu 138618
+0L/RgNC+0Lw= 138619
+0L/RgNC+0LzRi9GI 138620
+0L/RgNC+0LzRi9GI0LvQtdC9 138621
+0L/RgNC+0LzRi9GI0LvQtdC90L0= 138622
+INC+0YLQvdC+0YjQtdC90LjRjw== 138623
+IHPhu6k= 138624
+INC80L7QsdC40LvRjA== 138625
+INC80L7QsdC40LvRjNC9 138626
+INGN0YLQvtC80YM= 138627
+IHThuqFw 138628
+IOyCrOqxtA== 138629
+IOyVjOugpA== 138630
+2YPZjw== 138631
+2YPZj9mF2ZI= 138632
+INen15XXqNeU 138633
+INGE0LjRgA== 138634
+INGE0LjRgNC8 138635
+IHPEsWvEsW50xLE= 138636
+16DXmw== 138637
+16DXm9eV158= 138638
+2YjZhNmI2KzZig== 138639
+2K3Yp9mG 138640
+IGxv4bqhbg== 138641
+INeQ15zXow== 138642
+IG3huq9u 138643
+YWJow6RuZw== 138644
+YWJow6RuZ2ln 138645
+INGD0YDQvtCy0L3Rjw== 138646
+INec15HXk9eV16c= 138647
+2YrZhdmG 138648
+bGF5xLFu 138649
+IGjhuqNp 138650
+INC30LDQstC+0LQ= 138651
+IOyVhOyjvA== 138652
+4Liq4LiW4Liy 138653
+4Liq4LiW4Liy4Lia4Lix4LiZ 138654
+IGfDvHZlbmxpaw== 138655
+4LmA4LiU4LmI4LiZ 138656
+15HXk9en 138657
+IOuI 138658
+IOuIhA== 138659
+IOuIhOq1rA== 138660
+6YeN6KaB44Gq 138661
+4Lij4Lit4LiH4Lij4Lix4Lia 138662
+c2NobGll 138663
+c2NobGllw59lbg== 138664
+IOyWvA== 138665
+IOyWvOuniA== 138666
+IOyWvOuniOuCmA== 138667
+0YLQuNC60Lg= 138668
+7ZWc64uk6rOg 138669
+44Gg44Gj44Gf44KJ 138670
+INeU15nXmNeR 138671
+44Gq44GR44KM44Gw44Gq44KJ44Gq44GE 138672
+w6LM 138673
+w6LMow== 138674
+IHBo4bqhdA== 138675
+YWvEscWf 138676
+44Gm44GX44G+44GE44G+44GZ 138677
+4LmA4LiL4LmH 138678
+INCh0LXQs9C+0LTQvdGP 138679
+IGluc2FubGFyxLFu 138680
+IGTDqXZlbG9wcGU= 138681
+16rXpNeo 138682
+16rXpNeo15nXmA== 138683
+2KfZhtiq2LTYp9ix 138684
+6rCR 138685
+RnJhbsOnb2lz 138686
+2KPZhNi5 138687
+2KPZhNi52KfYqA== 138688
+44KS6LaF 138689
+44KS6LaF44GI 138690
+IOqwmeyKteuLiOuLpA== 138691
+44Kz44Os 138692
+INC80LXRgdGP0YbQtdCy 138693
+7YyF 138694
+INin2YTYrNin2YXYudip 138695
+7J247YSw 138696
+7J247YSw64S3 138697
+15PXqNeV16k= 138698
+INmI2KPYtNin2LE= 138699
+INC/0YDQsNCy0LjQu9Cw 138700
+44Gd44GT44Gr 138701
+15fXnteT 138702
+4LmA4Lir4LiV4Li44LiB4Liy4Lij4LiT4LmM 138703
+IOqyve2XmA== 138704
+44G244KK 138705
+15zXqQ== 138706
+15zXqdeV158= 138707
+4LmA4LiW 138708
+IERvxJ91 138709
+INC40YHQv9C+0LvRjNC30L7QstCw0L3QuNC1 138710
+IMOnb2N1xJ91 138711
+0LzQsNCz0LDQt9C40L3QtQ== 138712
+IMSRaeG7g24= 138713
+IGFzbMSx 138714
+IGFzbMSxbmRh 138715
+IGRvZW7Dp2E= 138716
+INiz2KfYuQ== 138717
+INiz2KfYudin2Ko= 138718
+INC40YHQv9C+0LvRjNC30L7QstCw0L3QuNGP 138719
+16jXldem15nXnQ== 138720
+INC30L3QsNGH0LjRgg== 138721
+INGA0LDQvA== 138722
+INGA0LDQvNC60LDRhQ== 138723
+6rGw66as 138724
+INC/0YvRgtCw 138725
+44OB44Oz 138726
+INC/0L7RgdC6 138727
+INC/0L7RgdC60L7Qu9GM 138728
+INC/0L7RgdC60L7Qu9GM0LrRgw== 138729
+2KXYqNix 138730
+2KXYqNix2KfZhw== 138731
+2KXYqNix2KfZh9mK2YU= 138732
+INGC0YDQtdGF 138733
+IEdlbsOn 138734
+2LPZiNmB 138735
+IHZlw61jdWxv 138736
+IE5nw6Ju 138737
+INC+0YfQtdGA0LXQtNGM 138738
+4LiE4Lij4Li24LmI4LiH 138739
+15DXkdeZ 138740
+4LiV4LmJ4Lih 138741
+44KS6KGM44GE 138742
+INin2YTYs9in2KjZgtip 138743
+0L3QsNGG0Lg= 138744
+0L3QsNGG0LjQvtC90LA= 138745
+0L3QsNGG0LjQvtC90LDQu9GM0L0= 138746
+IGdlc3Rpw7Nu 138747
+2KrZgtiv 138748
+INin2YTYqNmK2KfZhg== 138749
+INin2YTYqNmK2KfZhtin2Ko= 138750
+INin2YTYp9mG2KrYrtin2Kg= 138751
+INin2YTYp9mG2KrYrtin2KjYp9iq 138752
+4LmA4LiK4LmI4Liy 138753
+15PXkNeS 138754
+INec15LXnteo15k= 138755
+INiq2K3Yqtin2Kw= 138756
+IHRow7Ru 138757
+4LiV4LmJ4Lit4LiZ 138758
+4LiV4LmJ4Lit4LiZ4Lij4Lix4Lia 138759
+5aWz44Gu 138760
+5aWz44Gu5a2Q 138761
+IHRo4buf 138762
+2LfYrdmG 138763
+4Liy4Lij4LmM4LiU 138764
+16rXnteZ15M= 138765
+INGB0LDQvNGL0Lw= 138766
+IOyLnO2WiQ== 138767
+2KXYtdiv 138768
+2KXYtdiv2KfYsQ== 138769
+IE5naOG7hw== 138770
+7JWV 138771
+2LPYpg== 138772
+2LPYptmE 138773
+4Lit4Liy4Lij 138774
+4Lit4Liy4Lij4Lih 138775
+4Lit4Liy4Lij4Lih4LiT4LmM 138776
+4LmB4Liu 138777
+16DXmNec 138778
+IOyii+yVhA== 138779
+15XXnNec 138780
+INeR15vXqteR 138781
+44Kr44Op 138782
+16bXoteZ16jXmded 138783
+2KrYudio2YrYsQ== 138784
+INee16fXqNeU 138785
+INGE0LDQutGC0L7RgA== 138786
+INiq2YXYp9mF 138787
+INiq2YXYp9mF2Kc= 138788
+642V 138789
+IHbGsOG7nQ== 138790
+IHbGsOG7nW4= 138791
+IGTEscWfxLE= 138792
+44GE44Gh 138793
+INec16fXoNeV16o= 138794
+INin2YTYudmE2KfZgtin2Ko= 138795
+0L/Rg9Cx 138796
+0L/Rg9Cx0LvQuA== 138797
+2KXZitmF 138798
+2KXZitmF2KfZhg== 138799
+4Lit4Liz4LiZ4Liy 138800
+4Lit4Liz4LiZ4Liy4LiI 138801
+5ZCr44G+44KM 138802
+44KL44Gf44KB44Gr 138803
+16HXkg== 138804
+16HXkteg15XXnw== 138805
+2KrYrdiv2Yo= 138806
+IGF1cHLDqHM= 138807
+INin2YTYrNmH2Kc= 138808
+INin2YTYrNmH2KfYsg== 138809
+INee16rXl9eq 138810
+0LXQvdC90YPRjg== 138811
+INC30LjQvA== 138812
+4LiB4Liy4LmB4Lif 138813
+INeR16rXldeo 138814
+IG5naMOo 138815
+IG5naMOobw== 138816
+INCb0Y4= 138817
+INCb0Y7QsQ== 138818
+16rXp9em15nXkQ== 138819
+157Xotep15Q= 138820
+INin2YTYqNmK2Ko= 138821
+16bXmdek 138822
+INC+0LHRj9C30LDQvQ== 138823
+IE3hu5dp 138824
+INCi0YPRgA== 138825
+INmI2KjYp9mE2Ko= 138826
+INmI2KjYp9mE2KrYp9mE2Yo= 138827
+IGTDqWNpc2lvbg== 138828
+INio2K8= 138829
+INio2K/Yo9iq 138830
+IGPhu6Vj 138831
+IGJhc2s= 138832
+IGJhc2vEsQ== 138833
+IGhhdMSxcmw= 138834
+IGhhdMSxcmxh 138835
+5bCP44GV44GE 138836
+IGdlcsOnZWt0ZW4= 138837
+4Lic4Lix4LiB 138838
+5Y+v6IO944Gq 138839
+157XkNeh 138840
+IGNyw610aWNh 138841
+IOydmOybkA== 138842
+2LnZgtmI2K8= 138843
+15jXm9eg 138844
+15jXm9eg15XXnNeV15LXmdeU 138845
+6KiA44GI44Gw 138846
+INmC2YbYpw== 138847
+INmC2YbYp9ip 138848
+IOydtOqyg+ydgA== 138849
+2KrYtdix 138850
+4Lif4Lix4LiZ 138851
+INGA0LXRhtC10L8= 138852
+INGA0LXRhtC10L/Rgg== 138853
+INio2YbZgdiz 138854
+0YDQvtGI 138855
+INC80LDRgNGC0LA= 138856
+IHNvbnJhcw== 138857
+IHNvbnJhc8Sx 138858
+15XXkdep 138859
+44Oq44K544Kv 138860
+IEZyYW7Dp2Fpcw== 138861
+4bua 138862
+6rCU 138863
+INeU15HXqNeZ16o= 138864
+16TXmdem 138865
+16TXmdem15XXmQ== 138866
+INmE2YXYp9iw2Kc= 138867
+INCa0LjQtdCy 138868
+INGB0LzRi9GB0Ls= 138869
+6riI7Jy1 138870
+44K344Oj44Or 138871
+44Op44Kk44OI 138872
+7JuD 138873
+157Xl9eo 138874
+44aN 138875
+IGt1bGxhbsSxbQ== 138876
+INeQ16bXnNeg15U= 138877
+IHTDoG4= 138878
+44OP44O8 138879
+44Go44Go44KC 138880
+44Go44Go44KC44Gr 138881
+0YDQtdCz 138882
+0YDQtdCz0Lg= 138883
+0YDQtdCz0LjQvtC9 138884
+44Gq44GP44Gq44KL 138885
+IGNo4bqjeQ== 138886
+INis2YfYqQ== 138887
+xYRza2llag== 138888
+4Lit4Li14LmA4Lih 138889
+4Lit4Li14LmA4Lih4Lil 138890
+44GN44Gj44Go 138891
+IOyYiOyCsA== 138892
+IGtpdGFixLE= 138893
+IGVkdWNhw6fDo28= 138894
+IGJ1bHXFnw== 138895
+0L7Qu9C+0LPQuNGP 138896
+INC60L7QvdC60YA= 138897
+INC60L7QvdC60YDQtdGC 138898
+15LXmdeo 138899
+INC/0YDQtdC00LvQsNCz 138900
+INC/0YDQtdC00LvQsNCz0LDQtdGC 138901
+IFnDqm4= 138902
+IO2VnOuyiA== 138903
+INee16jXm9eW15k= 138904
+4LmA4Lib4Li04LiU4LmA4Lic4Lii 138905
+0YLQstC10YDQtA== 138906
+IEjhu4c= 138907
+INCT0YA= 138908
+4Lid4LmJ4Liy 138909
+15TXqden 138910
+15TXqden16LXlA== 138911
+INC90LDRg9C6 138912
+7KCQ7J2E 138913
+INC90LXQu9GM 138914
+INC90LXQu9GM0Lc= 138915
+INC90LXQu9GM0LfRjw== 138916
+0LPQuNC9 138917
+IELDtmw= 138918
+IELDtmxnZQ== 138919
+INCy0LvQsA== 138920
+INCy0LvQsNGB0YLQuA== 138921
+4LmA4LiZ4LmH 138922
+4LmA4LiZ4LmH4LiV 138923
+6rOo 138924
+IMO2bGQ= 138925
+IMO2bGTDvHI= 138926
+15vXoNei 138927
+INin2YTZh9mK2KbYqQ== 138928
+2KrYp9ix2YrYrg== 138929
+INCR0YA= 138930
+INGB0LzQvtC2 138931
+INGB0LzQvtC20LXRgtC1 138932
+IEzDumM= 138933
+4LmE4Lib4LiW4Li24LiH 138934
+IEJha2FuxLE= 138935
+IGVya2zDpHJ0 138936
+INCQ0L3QsA== 138937
+IHNjw6huZQ== 138938
+5ZWP44GE 138939
+5ZWP44GE5ZCI44KP44Gb 138940
+2YXZh9mG2K8= 138941
+2YXZh9mG2K/Ysw== 138942
+INC90LDQt9Cy0LDQvdC40LU= 138943
+0LjQstCw0L3QuNGP 138944
+44KS5aSJ44GI 138945
+5LuY44GN5ZCI 138946
+44OR44K9 138947
+44OR44K944Kz44Oz 138948
+5piO44KJ 138949
+5piO44KJ44GL 138950
+4LmA4Lit4LiB4Liq4Liy4Lij 138951
+4LmA4LiB4Li04LiZ4LmE4Lib 138952
+0LvQtdC/ 138953
+44GX44Gf44KC44Gu 138954
+IEPDom0= 138955
+IEPDom1hcmE= 138956
+16fXldec16DXldei 138957
+INeR15LXmdef 138958
+IG9jenk= 138959
+IG9jenl3acWbY2ll 138960
+YXR0aXZpdMOg 138961
+44OT44Ol44O8 138962
+IGVkdWNhY2nDs24= 138963
+xLBZRQ== 138964
+6rmM7JqU 138965
+44Ko44Oq44Ki 138966
+0L3QtdGB0YLQuA== 138967
+IG3Ds2c= 138968
+IG3Ds2fFgg== 138969
+INen15jXoNeZ150= 138970
+IFByw6Q= 138971
+INec16LXkdeV16g= 138972
+2KjZhtmJ 138973
+0LfQvtC7 138974
+0LfQvtC70L7Rgg== 138975
+IHduxJl0cg== 138976
+IHduxJl0cno= 138977
+IGNvbnN0cnXDp8Ojbw== 138978
+4Lij4Lix4Lia4Lij4Lit4LiH 138979
+2LPYrNmG 138980
+INen15XXoA== 138981
+16HXmdek15XXqA== 138982
+INmF2K/ZiQ== 138983
+2LHYttmJ 138984
+0L/Qu9Cw0LI= 138985
+77yl 138986
+IGlsYQ== 138987
+IGlsYcOn 138988
+44KL44G544GN 138989
+INmF2YjZgtmB 138990
+4LiB4Lij4Li4 138991
+4LiB4Lij4Li44LiT4Liy 138992
+Y2hvZHrEhWM= 138993
+INGC0YvRgQ== 138994
+0JXQstGA0L4= 138995
+INmK2K3Yr9ir 138996
+44Oh44Kk44Oz 138997
+INin2YTYtdit2Yo= 138998
+INCU0LDQvQ== 138999
+2K/Yudin2KE= 139000
+44K044O844Or 139001
+16nXoNeq15k= 139002
+16nXoNeq15nXmded 139003
+4LiU4LmJ4Lin4Lii4LiB4Lix4LiZ 139004
+IG9sYWNhxJ/EsQ== 139005
+INeR157Xl9eZ16g= 139006
+15TXpw== 139007
+15TXp9ee16o= 139008
+44Oi44OO 139009
+IMOnYWzEscWfdMSx 139010
+IGrDs3ZlbmVz 139011
+44GE44GP44KJ 139012
+INmF2LnYr9mE 139013
+IEPFqW5n 139014
+IFNlZ8O6bg== 139015
+IGTDtm5lbWRl 139016
+INec15nXk9eZ 139017
+44GN44Gh 139018
+44GN44Gh44KT 139019
+44GN44Gh44KT44Go 139020
+2YHYsdmG2LM= 139021
+2YHYsdmG2LPYpw== 139022
+5ZCR44GN 139023
+IGNhbXBhw7Fh 139024
+INGB0LDQvNC+0YHRgtC+0Y8= 139025
+INGB0LDQvNC+0YHRgtC+0Y/RgtC10LvRjNC90L4= 139026
+4buA 139027
+2YLZiNin 139028
+2LPZhNin2K0= 139029
+4LiB4Lij4Liw4LmB 139030
+4LiB4Lij4Liw4LmB4Liq 139031
+INC/0L7Qu9GM0LfRgw== 139032
+bnF1 139033
+bnF1w6p0ZQ== 139034
+4Lij4LmI4Lin4Lih4LiB4Lix4Lia 139035
+64qQ64OQ 139036
+4LiX4Li14Lih4LiK4Liy4LiV4Li0 139037
+IHnEsWxsxLFr 139038
+7Iqs 139039
+INij2LXYrdin2Kg= 139040
+aWxsw6k= 139041
+IGTDs2xh 139042
+IGTDs2xhcmVz 139043
+INC60L7Qtg== 139044
+INC60L7QttC4 139045
+4Lil4LmJ4Lit 139046
+4LmA4Lij4Li14Lii4Lia4Lij 139047
+4LmA4Lij4Li14Lii4Lia4Lij4LmJ4Lit4Lii 139048
+4LmA4Lie4Li0 139049
+4LmA4Lie4Li04LmI4LiH 139050
+0YDQuNGC0L7RgNC4 139051
+IO2RnA== 139052
+IO2RnO2YhA== 139053
+INC/0LXRgNC10LI= 139054
+INC/0LXRgNC10LLQvtC0 139055
+16TXkteZ16LXlA== 139056
+IGRlxJ9lcmxlbmRpcm1l 139057
+2YHYp9im 139058
+INCy0YvQs9C+0LQ= 139059
+xLFuxLF6xLE= 139060
+15XXm9eZ15c= 139061
+INC00L7RgdGC0LjQsw== 139062
+IG5nw6Bu 139063
+5oCd44Gj44Gf 139064
+INCV0YHRgtGM 139065
+INin2YTYsdi62YU= 139066
+IHp3acSFemFuZQ== 139067
+2LHYqNi3 139068
+4LiZ4Li24LiH 139069
+INec15fXlden 139070
+IHN6Y3plZ8OzbG4= 139071
+IHN6Y3plZ8OzbG5pZQ== 139072
+INio2KfYs9iq2K7Yr9in2YU= 139073
+IGbDrXNpY28= 139074
+16LXoQ== 139075
+16LXodeV16c= 139076
+2LPZhNmI2YM= 139077
+INin2K3Yrw== 139078
+0YfRkdGC 139079
+15bXm9eU 139080
+IGzhu4duaA== 139081
+INmI2K3Yqg== 139082
+INmI2K3YqtmJ 139083
+4LiE4Lin4Liy4Lih4Liq4Liy4Lih4Liy4Lij4LiW 139084
+4Lit4Lii4Li54LmI4LmB4Lil4LmJ4Lin 139085
+4LiB4Liy4Lij4LmA4LiU4Li04LiZ4LiX4Liy4LiH 139086
+2KrYrtiw 139087
+16bXmdeV15M= 139088
+INin2YTYo9iz 139089
+INin2YTYo9iz2YfZhQ== 139090
+IHThu4c= 139091
+44Gj44Gm44GE44Gm 139092
+4Liq4Lij4Li4 139093
+4Liq4Lij4Li44Lib 139094
+INC60L7QvNGE 139095
+INC60L7QvNGE0L7RgNGC 139096
+7Jik64qU 139097
+INGA0LDQt9Cy 139098
+INGA0LDQt9Cy0LjQstCw 139099
+0LvQsNC90LQ= 139100
+aMOkbmdl 139101
+INio2YbYs9io2Kk= 139102
+4LmA4LiC4Li14Lii4Lin 139103
+16LXpted 139104
+INec15zXm9eq 139105
+0YHQvtGG0LjQsNC70YzQvQ== 139106
+IOuLpOydjOqzvA== 139107
+INeo16nXldee 139108
+157XqNeX15E= 139109
+2LPZgti3 139110
+IGFsYW7EsQ== 139111
+IMSR4buH 139112
+6aOf44G544KL 139113
+4LiU4Li24LiH 139114
+IGdlZ2Vuw7xiZXI= 139115
+INio2YfYsNmH 139116
+4LiW4Li34Lit4LmA4Lib4LmH4LiZ 139117
+65WF 139118
+4LiE4LiZ4LmE4LiX4Lii 139119
+44Ki44Km 139120
+44Ki44Km44OI 139121
+4Lio4Lix4LiB 139122
+4Lio4Lix4LiB4LiU4Li0 139123
+4Lio4Lix4LiB4LiU4Li04LmM 139124
+2YLZiNin2YY= 139125
+2YLZiNin2YbZitmG 139126
+IGjhu5lw 139127
+44Gq44GP44Gq44Gj44Gm 139128
+INeQ157XoA== 139129
+INeQ157XoNed 139130
+4LmA4LiV4Li34Lit4LiZ 139131
+INC30LDQstC40YHQuNC8 139132
+INC30LDQstC40YHQuNC80L7RgdGC0Lg= 139133
+16rXmdeQ 139134
+16rXmdeQ15XXqA== 139135
+5aeL44KB44Gf 139136
+IG5n4buN 139137
+IG5n4buNdA== 139138
+7ZKN 139139
+6rO87J6l 139140
+IGLhuqFp 139141
+44Gn44GN44Gm 139142
+IGNvbWXDp2Fy 139143
+4Lib4Lij4Liy4LiB 139144
+4Lib4Lij4Liy4LiB4LiP 139145
+INCz0L7QtNGL 139146
+0LzQtdGB 139147
+INin2YTZhdiz2KrZiNmJ 139148
+INGB0LDQvNGL0LU= 139149
+0LvQu9C10YA= 139150
+44Gj44Gm44GX44G+44GE44G+44GZ 139151
+44Go44Gu44GT44Go 139152
+YmnDsw== 139153
+4LiB4Lil4LmI4Lit4LiH 139154
+INin2YTYstmI2Kw= 139155
+44Gr6KGM44Gj44Gf 139156
+4LiE4LmI4Lit4LiZ 139157
+4LiE4LmI4Lit4LiZ4LiC4LmJ4Liy4LiH 139158
+IGJhxJ9s 139159
+IGJhxJ9sYW50 139160
+IGJhxJ9sYW50xLE= 139161
+56K644GL 139162
+56K644GL44Gr 139163
+44Oc44O844Or 139164
+57WC44KP44KK 139165
+16nXnteo 139166
+4LiX4Li14LmI4Liq4Liy4Lih4Liy4Lij4LiW 139167
+2YTYstmF 139168
+0LTQsNC10YLRgdGP 139169
+4Lij4Lix4Lia4Lib4Lij4Liw 139170
+4Lij4Lix4Lia4Lib4Lij4Liw4LiX4Liy4LiZ 139171
+5aSJ44KP44KK 139172
+77yi 139173
+IOyYiOyImOuLmA== 139174
+44KI44GG44Go 139175
+4Lih4Lix4LiB4LiI4Liw 139176
+IEjGsMahbmc= 139177
+2YbZgdiw 139178
+157Xk9eT 139179
+IOyduOyglQ== 139180
+0YXQvtC00LjRgtGM 139181
+INC30LDQstC40YHQuNGC 139182
+15XXk9eZ16I= 139183
+44GT44Go44GM44GC44KK44G+44GZ 139184
+2LnYsdin2YI= 139185
+2LPYt9it 139186
+4LiB4Liz4LmE4Lij 139187
+65Ok64+E 139188
+15nXpteZ16jXlA== 139189
+44GG44GT44Go 139190
+2YTYp9it2YI= 139191
+44GE44KM44Gw 139192
+INC40YHQv9C+0LvRjNC30YPRjtGC 139193
+IELhu59p 139194
+INep16fXnNeZ150= 139195
+0YbQuNC60Ls= 139196
+0JDQng== 139197
+INeR16nXoNeU 139198
+2YbYtNi3 139199
+INep15nXoNeV15k= 139200
+INep15nXoNeV15nXmded 139201
+IHBvYmxhY2nDs24= 139202
+IEjGsG5n 139203
+4Lij4Liw4Lin 139204
+4Lij4Liw4Lin4Lix4LiH 139205
+2LHZitin2LbYqQ== 139206
+2LHYtdiv 139207
+2KrZgtmE2Yo= 139208
+2KrZgtmE2YrYrw== 139209
+IMO8bGtlbQ== 139210
+IMO8bGtlbWl6 139211
+4LiK4Liw 139212
+44Kv44Oq44O844Og 139213
+6IGe44GE44Gf 139214
+IHdhxbw= 139215
+IHdhxbxuZQ== 139216
+6rGw65Og 139217
+6rGw65Og7JqU 139218
+157XkNeR16c= 139219
+15fXk9ep15XXqg== 139220
+IFdyb2M= 139221
+IFdyb2PFgmF3 139222
+IEvDvGx0w7xy 139223
+c2lzdA== 139224
+c2lzdMOqbmNpYQ== 139225
+16LXlteo15Q= 139226
+IGfGsMahbmc= 139227
+4Lij4LmJ4Liy4LiZ4LiE4LmJ4Liy 139228
+INmI2KPZiNi22K0= 139229
+w6FuZG9zZQ== 139230
+44K344O844Oz 139231
+15DXoNeo15I= 139232
+15DXoNeo15LXmdeU 139233
+44Gq44GE44Gn44GZ 139234
+IGto4bunbmc= 139235
+IOusuOyEnA== 139236
+INeR15PXkdeo 139237
+15PXmdeV 139238
+15PXmdeV15XXlw== 139239
+IHLDqWds 139240
+2YXZiNin2K8= 139241
+0L7QsdC+0YA= 139242
+0L7QsdC+0YDQvtGC 139243
+INeU15HXnA== 139244
+INeU15HXnNeV15I= 139245
+2K3Yp9mF 139246
+INin2YTYudin2LU= 139247
+INin2YTYudin2LXZhdip 139248
+0L/QtdGA0LDRgtC+0YA= 139249
+2KrYrtmE 139250
+2KrYrtmE2LU= 139251
+44Gf44Gg44GX 139252
+2KrYs9mF 139253
+4LmC4Lij4LiH4Lie 139254
+4LmC4Lij4LiH4Lie4Lii4Liy 139255
+4LmC4Lij4LiH4Lie4Lii4Liy4Lia4Liy4Lil 139256
+IFnDvGs= 139257
+IFnDvGtzZWs= 139258
+INep16DXmdeq 139259
+INep16DXmdeq158= 139260
+bGnEn2U= 139261
+INek16o= 139262
+INek16rXldeX 139263
+IGJlxJ8= 139264
+IGJlxJ9lbg== 139265
+INee15XXqA== 139266
+INee15XXqNeb15E= 139267
+INix2LPYp9mE2Kk= 139268
+7Ya17Iug 139269
+IGF2YWxpYQ== 139270
+IGF2YWxpYcOnw7Vlcw== 139271
+IG1hbmg= 139272
+IG1hbmjDow== 139273
+IOyVng== 139274
+IOyVnuycvOuhnA== 139275
+2YLYqtix 139276
+2YLYqtix2K0= 139277
+4LmA4LiB4Li34Lit 139278
+4LmA4LiB4Li34Lit4Lia 139279
+IHByb3Bvc8Op 139280
+2KPZhdin 139281
+2KPZhdin2YPZhg== 139282
+INCe0J4= 139283
+INCe0J7Qng== 139284
+2YXZgtin2LE= 139285
+2YXZgtin2LHZhtip 139286
+64SQ 139287
+44GE44Gf44Gg44GP 139288
+2YLZitmE 139289
+INC90LDRiNC40YU= 139290
+44Kr44OD44OX 139291
+15fXnNeq 139292
+IOuLpOunjA== 139293
+4LiX4Lix4LmI4Lin4LmC4Lil4LiB 139294
+44ON44K/ 139295
+2K3Ys9in2LM= 139296
+44Gr44Gq44KM 139297
+2KzYp9im 139298
+2KzYp9im2LLYqQ== 139299
+w6ljaGFuZ2U= 139300
+w6ljb25vbQ== 139301
+w6ljb25vbWll 139302
+0KLQmA== 139303
+16HXqteb15w= 139304
+4LiX4Lix4LmJ4LiH4Liq4Lit4LiH 139305
+INin2YTYrtin2YU= 139306
+INin2YTYrtin2YXYsw== 139307
+16fXmNei 139308
+YXV3YcW8 139309
+4Lic4Li54LmJ4LiK4Liy4Lii 139310
+4LmB4Lib4Lil4LiB 139311
+5ZCM5pmC44Gr 139312
+0LfQvdCw0L3QuNGP 139313
+44GE44Gf44Gg44GN44G+44GX44Gf 139314
+INee15HXnNeZ 139315
+4LiC4Lit4LmD4Lir4LmJ 139316
+INin2YTYqtix2KjZitip 139317
+IGTDqWNvdXZlcnQ= 139318
+IMW8eWNpdQ== 139319
+YXByw6hz 139320
+IHlhYg== 139321
+IHlhYmFuYw== 139322
+IHlhYmFuY8Sx 139323
+IGJhxZ9sYXlhbg== 139324
+7JeI642Y 139325
+IGhlc2FixLE= 139326
+IOunjOyVvQ== 139327
+66eI64uk 139328
+IFRow6FuaA== 139329
+44O044Kh 139330
+4Lib4Lij4Lix4Lia4Lib4Lij 139331
+4Lib4Lij4Lix4Lia4Lib4Lij4Li44LiH 139332
+IE3hurdj 139333
+4LmA4Lir4LiV4Li44Lic4Lil 139334
+INCR0LXQtw== 139335
+IGNhcGFjaXTDoA== 139336
+xYJlxZs= 139337
+INC/0YDQtdC40Lw= 139338
+INC/0YDQtdC40LzRg9GJ0LXRgdGC0LI= 139339
+IMWad2nEmXQ= 139340
+IHB1Ymxpw6k= 139341
+157Xotem15E= 139342
+2YXYtNin2LHZg9in2Ko= 139343
+4Lig4Liy4Lip 139344
+4Lig4Liy4Lip4Li1 139345
+IGRldXhpw6htZQ== 139346
+INmF2K3Yp9mB2Lg= 139347
+INmF2K3Yp9mB2LjYqQ== 139348
+IFNjaMO2bg== 139349
+772k 139350
+INeU15HXog== 139351
+INeU15HXoteZ15Q= 139352
+INmI2KfZhNmE2Yc= 139353
+6KiA44Gj44Gf 139354
+4LiV4LmJ4Liy4LiZ 139355
+4Lin4Lij4Lij4LiT 139356
+4LiX4Li04Lio 139357
+IGJhxZ/EsW5h 139358
+IG1vZ8SZ 139359
+16nXmdek15XXqA== 139360
+INmI2LnYrw== 139361
+INmI2LnYr9mF 139362
+IGhpc3TDs3JpY28= 139363
+IGvEsXPEsQ== 139364
+IOydtOqyjA== 139365
+IFBvbMOtdGljYQ== 139366
+INGB0LjRgtGD0LDRhtC40Lg= 139367
+IGtvxYRjYQ== 139368
+15HXk9eZ16fXlA== 139369
+INin2YTYs9mK2KfYsdin2Ko= 139370
+44Gq44KJ44Gw 139371
+44K144Op 139372
+44KL44GT44Go44GM44Gn44GN44KL 139373
+IGRlY2lzw6Nv 139374
+15XXldeT 139375
+bMOkc3M= 139376
+bMOkc3NpZw== 139377
+INec15nXqdeo15DXnA== 139378
+INmK2KPYqtmK 139379
+16jXldeW 139380
+w7bEnw== 139381
+w7bEn3JldA== 139382
+w7bEn3JldGlt 139383
+INC00LXQug== 139384
+INC00LXQutCw0LE= 139385
+INC00LXQutCw0LHRgNGP 139386
+INep15fXldeo 139387
+44Gm44GP44KM44Gf 139388
+2LnYqNin2LHYqQ== 139389
+IMOpbGVjdHJpcXVl 139390
+INin2YTYqtmG2YXZitip 139391
+2KzYsdmJ 139392
+IOyImO2WiQ== 139393
+4LiX4Li5 139394
+INGA0LXQsNC70YzQvdC+ 139395
+0YHQv9C+0YHQvtCx 139396
+4LiE4Lil4LmJ4Liy4Lii 139397
+INiz2LnZiNiv 139398
+w7Zuw7w= 139399
+INmB2YXZhg== 139400
+2KrZg9mI 139401
+2KrZg9mI2YrZhg== 139402
+INC60LDRh9C10YHRgtCy0L4= 139403
+INC60L7QvdGC0LDQug== 139404
+INC60L7QvdGC0LDQutGC 139405
+IHPDtnpsZcWfbWU= 139406
+4Lit4LmJ4Liy4LiH 139407
+INiq2YjZgQ== 139408
+INiq2YjZgdmK2LE= 139409
+15TXlteT 139410
+15TXlteT157XoNeV16o= 139411
+INi32YjZitmE2Kk= 139412
+IHTDqXJtaW5v 139413
+INeQ15nXpNeU 139414
+44OT44Or 139415
+4Liq4LmC4Lih 139416
+4Liq4LmC4Lih4Liq4Lij 139417
+INin2YTYp9ir 139418
+INin2YTYp9ir2YbZitmG 139419
+0LXQstC40Yc= 139420
+IG9waW5pw7Nu 139421
+4Lib4Lin4LiU 139422
+5Y+k44GE 139423
+4Lij4LmI4Liy 139424
+IEJpYcWC 139425
+INGB0YLQsNC7 139426
+INGB0YLQsNC70L4= 139427
+w7Nsb2dv 139428
+IOyVhOuLiOuLpA== 139429
+INeQ15nXqg== 139430
+INeQ15nXqteV 139431
+4LmA4Lir4LmH4LiZ4Lin4LmI4Liy 139432
+4Lia4Liy4Lij4LmM 139433
+54S8 139434
+54S844GN 139435
+IOydtOyaqeyekA== 139436
+INC90LXQutC+0YLQvtGA0YvQtQ== 139437
+a3N6 139438
+a3N6dGHFgg== 139439
+a3N6dGHFgmM= 139440
+44Kt44Oj44OD44K3 139441
+44Kt44Oj44OD44K344Oz44Kw 139442
+IHJvxZs= 139443
+IHJvxZtsaW4= 139444
+0YDQsNC20LA= 139445
+15HXoNeZ15nXlA== 139446
+4Lib4Lij4Liq4Li0 139447
+4Lib4Lij4Liq4Li04LiV 139448
+IGfDtnJkw7w= 139449
+157XoNeU15nXkg== 139450
+5aSJ44KP44Gj44Gm 139451
+INeQ15Q= 139452
+INeQ15TXkdeq15k= 139453
+4LmA4Lij4LmI4LiH 139454
+IMO2bsO8bmRl 139455
+IOq3uOuDpQ== 139456
+0L/QvtC70LjRgg== 139457
+0L/QvtC70LjRgtC40YfQtdGB0Lo= 139458
+44Oh44OH44Kj 139459
+44Oh44OH44Kj44Ki 139460
+IERldGF5 139461
+IERldGF5bMSx 139462
+INin2YTYtdmB2K3YqQ== 139463
+4LiB4Liy4Lij4LmA4LiH4Li04LiZ 139464
+IOy1nOq3vA== 139465
+15vXqdec 139466
+77yp 139467
+0LLRiNC10LPQvg== 139468
+7ZWY7Iuk 139469
+INCt0YI= 139470
+INCt0YLQvtGC 139471
+4Liq4Li3 139472
+4Liq4Li34Lia 139473
+IG5n4burbmc= 139474
+INC00L7QutGD0LzQtdC90YLQvtCy 139475
+0LTQsNCy0LDRgtGM 139476
+INin2YTYtNiu2LXZitip 139477
+INem16LXmdeo 139478
+2K/YsdmD 139479
+2LPYrdio 139480
+4LmE4Lih4LmI4LiE4LmI4Lit4Lii 139481
+INeU157Xp9eV157XmQ== 139482
+4Liq4Lix4LmI4LiH4LiL4Li34LmJ4Lit 139483
+IOq3uOqyg+ydhA== 139484
+44GC44KL44GE 139485
+44GC44KL44GE44Gv 139486
+15DXldeY15XXkQ== 139487
+15DXldeY15XXkdeV16E= 139488
+0LrRhtC40L7QvQ== 139489
+INCc0L7QttC90L4= 139490
+44GP44Gg 139491
+44GP44Gg44GV 139492
+INC40L3RhNC+0YDQvNCw0YbQuNGP 139493
+77uf 139494
+IOyekeyXhQ== 139495
+INeZ15XXodej 139496
+2KXYr9in2LHYqQ== 139497
+INin2YTYrdin2Kw= 139498
+16DXodeZ16LXlA== 139499
+0LjQt9Cw0YbQuNGP 139500
+15DXnNeR 139501
+15DXnNeR15XXnQ== 139502
+0L/QtdC0 139503
+INen15jXoNeU 139504
+INmG2YHYs9mH2Kc= 139505
+IE1pbmlzdMOpcmlv 139506
+INC/0LXQvQ== 139507
+INC/0LXQvdGB0Lg= 139508
+44OQ44Op44Oz44K5 139509
+INeU16rXldeo15Q= 139510
+IHThuqFt 139511
+IOyXreyLnA== 139512
+772h 139513
+IHRo4bux 139514
+IMSxc8Sx 139515
+7Luo 139516
+44GX44Gj44GL44KK44Go 139517
+IHjGsGE= 139518
+IGPhurdw 139519
+15fXmdeR15XXqA== 139520
+4Lin4Lix4LiS4LiZ4LiY4Lij4Lij4Lih 139521
+c3TDpHI= 139522
+c3TDpHJrZQ== 139523
+INGB0LDQvNGL0Lk= 139524
+cGlzYQ== 139525
+cGlzYcSH 139526
+IG9sdcWfYW4= 139527
+INin2YTYpdmF2KfZhQ== 139528
+IGPEg25n 139529
+IGfDvG5s 139530
+IGfDvG5sw7xr 139531
+INeg16nXkNeo 139532
+IGtoaeG7g24= 139533
+57aa44GR44KL 139534
+c3RpdHVjacOzbg== 139535
+IGNhcGFjaXTDqQ== 139536
+IGpha2k= 139537
+IGpha2nFmw== 139538
+0LLRiNC40YE= 139539
+0LLRiNC40YHRjA== 139540
+16TXoteV15zXldeq 139541
+INit2YrYp9iq 139542
+INit2YrYp9iq2Yc= 139543
+INC90LjQutC+0LPQtNCw 139544
+0JvQrA== 139545
+INeU16LXldeR 139546
+INeU16LXldeR15PXlA== 139547
+IGNow6Bv 139548
+4Lir4Lil4Liy4Lii4LmG 139549
+INGP0L0= 139550
+INGP0L3QstCw0YA= 139551
+INGP0L3QstCw0YDRjw== 139552
+4LiI4Liz4LmA4Lib4LmH4LiZ4LiV4LmJ4Lit4LiH 139553
+IGjDtmhlcg== 139554
+44GV44KM44Gm44GE44Gf 139555
+4Liq4LiH4Liq4Lix 139556
+4Liq4LiH4Liq4Lix4Lii 139557
+INin2YTYp9iz 139558
+INin2YTYp9iz2YTYp9mF 139559
+INin2YTYtNmF2LM= 139560
+4Liq4LiW4Liy4LiZ4Li1 139561
+44Kv44Op44K5 139562
+4Lie4Lij4Lij 139563
+4Lie4Lij4Lij4LiE 139564
+cMO1 139565
+cMO1ZQ== 139566
+IHBvcsOpbQ== 139567
+4Lib4Lij4Liw4Liq4LiH 139568
+4Lib4Lij4Liw4Liq4LiH4LiE4LmM 139569
+cG93aWVkemll 139570
+cG93aWVkemllxIc= 139571
+INC80L7Qs9GD 139572
+INC20LXQuw== 139573
+INC20LXQu9C10Lc= 139574
+INin2YTYq9mC 139575
+INin2YTYq9mC2KfZgdmK 139576
+INC/0YDQsNCy0LjQu9C+ 139577
+IGdkecW8 139578
+16TXqdeV15g= 139579
+0YDQsNCx0L7RgtC60LA= 139580
+INmD2LHYqQ== 139581
+2LTYr9iv 139582
+2YXYp9ix2YM= 139583
+2YXZg9ip 139584
+INC/0L7QtNC/0LjRgQ== 139585
+15jXldeV15c= 139586
+IMWbYw== 139587
+IMWbY2lhbg== 139588
+INix2KzYp9mE 139589
+INeq15zXldeZ 139590
+0LjRiA== 139591
+0LjRiNGM 139592
+IG3DqWRlYw== 139593
+IG3DqWRlY2lu 139594
+642U652864+E 139595
+INGC0LXQsdGP 139596
+INec15TXldeh15nXow== 139597
+44GK6Kmx 139598
+IOC5geC4leC5iOC4geC5hw== 139599
+2K/Yp9mB 139600
+2K/Yp9mB2Lk= 139601
+IEPDuW5n 139602
+44O744O744O744O7 139603
+6raB 139604
+IGRlYmVyw61h 139605
+4Lir4LiZ4LmI4Lin4Lii4LiH4Liy4LiZ 139606
+IHZhzIA= 139607
+INei16bXng== 139608
+INei16bXnted 139609
+4LmA4LiK4Li34LmI4Lit4Lin4LmI4Liy 139610
+16nXp9ei 139611
+INeU15vXldec 139612
+INeU15vXldec15w= 139613
+0L3QuNCx0YPQtA== 139614
+0L3QuNCx0YPQtNGM 139615
+IOuEiO2drA== 139616
+INC+0LHRgNCw0Yk= 139617
+INC+0LHRgNCw0YnQsA== 139618
+INei15HXldeT16o= 139619
+INin2YTZhdmG2KrYrtio 139620
+xLF5b3Jk 139621
+xLF5b3JkdQ== 139622
+2YjYsA== 139623
+15fXqdeZ15HXldeq 139624
+INeU16LXmden 139625
+INeU16LXmden16jXmQ== 139626
+7KKM 139627
+4Lii4Li44LmC4Lij 139628
+4Lii4Li44LmC4Lij4Lib 139629
+INCw0L/RgA== 139630
+INCw0L/RgNC10LvRjw== 139631
+c3plZA== 139632
+c3plZMWC 139633
+0LTQvtC9 139634
+4LmA4LiV4Li04Lia 139635
+4LmA4LiV4Li04Lia4LmC4LiV 139636
+0LrQvtC70L4= 139637
+IGthxbxkZWo= 139638
+5biw 139639
+5biw44KK 139640
+INC80LjQu9C70Lg= 139641
+INC80LjQu9C70LjQvtC9 139642
+576O5ZGz44GX44GE 139643
+2KrZgtin2LE= 139644
+2KrZgtin2LHZitix 139645
+IOydtOujqA== 139646
+IOydtOujqOyWtA== 139647
+IHNwcnplZGHFvA== 139648
+15TXldem15DXldeq 139649
+44Ki44Kv44K7 139650
+44Ki44Kv44K744K5 139651
+16jXldel 139652
+INCz0L7RgdGD0LTQsNGA0YHRgtCy0LXQvdC9 139653
+2KPYrdmD 139654
+2KPYrdmD2KfZhQ== 139655
+IG9sdcWfdQ== 139656
+IEHDpw== 139657
+IEHDp8Sxaw== 139658
+44K444O8 139659
+57Sg5pm0 139660
+57Sg5pm044KJ44GX44GE 139661
+INeR16nXkdeV16I= 139662
+2KjYsA== 139663
+2KjYsNmE 139664
+4Liq4Liy4LmA4Lir4LiV4Li4 139665
+IHBvem9zdGE= 139666
+IHBvem9zdGHFgg== 139667
+2K3YsdmF 139668
+IGltcG9ydMOibmNpYQ== 139669
+bGXFn3Rpcm1l 139670
+INC00YDQtdCy 139671
+IG3Ds3ZpbA== 139672
+IEF5bsSx 139673
+INC90LDQu9C+0LM= 139674
+INC90LDQu9C+0LPQvtCy 139675
+INeX15nXpNeU 139676
+INGE0L7RgNC80YM= 139677
+4LiX4LiU4Liq4Lit4Lia 139678
+IGtzacSFxbxraQ== 139679
+IG1hxYJl 139680
+2YXYs9ij2YQ= 139681
+2YXYs9ij2YTYqQ== 139682
+77y+77y+ 139683
+w6fDo2VzdGU= 139684
+w6l2aXRlcg== 139685
+INC60L7QvdGB0YLRgNGD0Lo= 139686
+INC60L7QvdGB0YLRgNGD0LrRhtC4 139687
+776e 139688
+INeq15XXm9eg 139689
+44K544OI44Os44K5 139690
+INin2YTYp9mC2KrYtdin2K/Zig== 139691
+157Xk9eZ 139692
+IHfFgmFk 139693
+IHfFgmFkeg== 139694
+2K7ZiNmB 139695
+INC80LDRgtC10YDQuNCw0LvQvtCy 139696
+44Go44Gj44Gm44KC 139697
+IHpuYWpkdQ== 139698
+IHpuYWpkdWrEhQ== 139699
+2YHYptip 139700
+44Gp44Gu44KI44GG44Gq 139701
+5oqR44GI 139702
+16DXl9ec 139703
+IGTDvG55 139704
+IGTDvG55YW4= 139705
+IGTDvG55YW7EsW4= 139706
+0LPRgNCw0L3QuA== 139707
+0LPRgNCw0L3QuNGH 139708
+INeU16nXnNeZ16nXmQ== 139709
+INeU15DXqQ== 139710
+5Y+K44Gz 139711
+7Iut7Iuc 139712
+7Iut7Iuc7Jik 139713
+INC00L7Qu9C7 139714
+INC00L7Qu9C70LDRgA== 139715
+INC/0L7QstGC0L7RgA== 139716
+INeX15nXoNed 139717
+16rXpNeq15c= 139718
+0YPQstC10LvQuA== 139719
+0YPQstC10LvQuNGH0LXQvQ== 139720
+44Kr44Oq 139721
+cmF3aWQ= 139722
+cmF3aWTFgm93 139723
+15XXldec 139724
+44Of44Ol 139725
+7L2Y 139726
+IEJ5xYI= 139727
+0JzQkA== 139728
+2LnZkA== 139729
+INGB0L7QstC10YDRiA== 139730
+INGB0L7QstC10YDRiNC10L3QvdC+ 139731
+INC80L7QuQ== 139732
+INeV15zXkNeX16g= 139733
+5oWj 139734
+5oWj44KM 139735
+2K3Yp9mB2Lg= 139736
+IOustOujjA== 139737
+4LiE4LiT4Liw4LiB4Lij4Lij4Lih 139738
+4LiE4LiT4Liw4LiB4Lij4Lij4Lih4LiB4Liy4Lij 139739
+IOyWtOuUlA== 139740
+IGRpZmVyZW4= 139741
+IGRpZmVyZW7Dp2E= 139742
+INin2YTYo9iz2KfYsw== 139743
+INin2YTYo9iz2KfYs9mK2Kk= 139744
+INec15DXl9eo15XXoNeU 139745
+6reg 139746
+INeU16nXoNeZ15nXlA== 139747
+7JyE7JuQ7J6l 139748
+4Lil4Li44LiB 139749
+w6dpbGVy 139750
+INeU15DXnNeV 139751
+6IGe44GP 139752
+INeV15DXpNeZ15zXlQ== 139753
+INGA0LXQsNC70LjQtw== 139754
+INGA0LXQsNC70LjQt9Cw0YbQuA== 139755
+4Lij4Liw4Lii4Liw4LmA4Lin4Lil4Liy 139756
+INis2K/Yp9mL 139757
+2KrYqNin2Lk= 139758
+IHZlaMOtY3Vsbw== 139759
+INC00L7Qu9Cz 139760
+4Lib4Lij4Li04Lih4Liy4LiT 139761
+7KaQ 139762
+INec157Xp9eV150= 139763
+IOyCrOynhA== 139764
+4LiK4LmJ4Liy 139765
+INee16LXldec15Q= 139766
+IGfDtnJt 139767
+IGfDtnJtZWs= 139768
+INmI2YfYsNmH 139769
+0L/QtdGA0LI= 139770
+0L/QtdGA0LLRi9GF 139771
+6re4656Y 139772
+INin2YTYqNix2YrYtw== 139773
+INin2YTYqNix2YrYt9in2YbZig== 139774
+INC40Y7QvdGP 139775
+INCT0L7RgA== 139776
+INec16nXnNed 139777
+0JDQnQ== 139778
+INC90LDQt9C90LDRh9C10L0= 139779
+0L7QvtGA 139780
+0L7QvtGA0YPQtg== 139781
+IMO2emVsbGk= 139782
+IMO2emVsbGnEn2k= 139783
+INC90LjQttC1 139784
+57aa44GR44Gm 139785
+INCw0YDQtdC90LQ= 139786
+IGthdMSxbMSx 139787
+IGthdMSxbMSxbQ== 139788
+INil2LfZhNin2YI= 139789
+INmI2KXYsNin 139790
+INC+0LrRgtGP 139791
+INC+0LrRgtGP0LHRgNGP 139792
+4LmC4LiV4Lk= 139793
+4LmC4LiV4LmK 139794
+4LmC4LiV4LmK4Liw 139795
+IG9sZHVrbGFyxLE= 139796
+2YXZiNmC2Lk= 139797
+64Kp 139798
+44Go5oCd44Gj44Gm44GE44KL 139799
+INep15nXm9eV15w= 139800
+4Lin4Liy4LiU 139801
+2LPZitmE 139802
+4LiC4Lin4Lix 139803
+4LiC4Lin4Lix4LiN 139804
+2KrYrdmD2YU= 139805
+7IKt 139806
+IGNvbm5hw650 139807
+16DXpNeq15c= 139808
+IGNo4bq3 139809
+IGNo4bq3bg== 139810
+INmF2K3ZhQ== 139811
+INmF2K3ZhdmI2K8= 139812
+44G0 139813
+INC/0YDQvtC00YPQutGG0LjQuA== 139814
+0LfQtNGA0LDQsg== 139815
+44GU6KY= 139816
+44GU6Kan 139817
+15DXkdeQ 139818
+IHbDqXJpdGFibGU= 139819
+INi32YHZhA== 139820
+44OI44Op44OW44Or 139821
+6rOh 139822
+INeq157Xldeg15Q= 139823
+IGtpw6pu 139824
+INmC2KfYr9ix 139825
+2KXZgtmE2YrZhQ== 139826
+INC/0YDQtdC00L/RgNC4 139827
+INC/0YDQtdC00L/RgNC40Y/RgtC40Y8= 139828
+IGLEg25n 139829
+IGF5xLFuZGE= 139830
+IGfhuqVw 139831
+0LXRhdCw0Ls= 139832
+IGdpw6BuaA== 139833
+INC00LDQsg== 139834
+INC00LDQstC90L4= 139835
+7JiA64uk 139836
+4LiZ4Lix4LiB4LmA4LiV 139837
+4LiZ4Lix4LiB4LmA4LiV4Liw 139838
+2YXYs9iq2LTYp9ix 139839
+2LPYqtix2KfYqtmK2Kw= 139840
+2LPYqtix2KfYqtmK2KzZig== 139841
+2LHZhdiy 139842
+IHTEqW5o 139843
+66Gt 139844
+INGH0LXRgg== 139845
+INGH0LXRgtGL 139846
+INGH0LXRgtGL0YDQtQ== 139847
+IEVudMOjbw== 139848
+INi12Lo= 139849
+INi12LrZitix2Kk= 139850
+15HXmdeY15XXnA== 139851
+2K7Yt9mI2Lc= 139852
+INGA0LDQt9Cy0LjRgtC40LU= 139853
+IGFtYWPEsXlsYQ== 139854
+4LiX4Li14Lin4Li1 139855
+INC+0YHRgg== 139856
+INC+0YHRgtCw0LvRjNC9 139857
+16nXldec15fXnw== 139858
+INeb16DXmdeh 139859
+INeb16DXmdeh15Q= 139860
+IGThuq15 139861
+IHlhxZ9heWFu 139862
+INee15TXldeV15Q= 139863
+INGD0YHQuA== 139864
+INGD0YHQuNC70Lg= 139865
+157XpNeZ 139866
+INC/0YDQvtCy0LXQtNC10L3QuNGP 139867
+INix2Kg= 139868
+INix2KjZhdin 139869
+INin2YTYo9mI2LPYtw== 139870
+IOycoOyngA== 139871
+IHByYWNvd25paw== 139872
+IHByYWNvd25pa8Ozdw== 139873
+157XodeV16jXqg== 139874
+2YLYp9ix2Kg= 139875
+4LiE4Lin4Liy4Lih4Lij4Li54LmJ4Liq4Li24LiB 139876
+4LmB4Lir4Lil4Liw 139877
+INin2YTZhtmC2K8= 139878
+INeQ15zXpNeZ 139879
+2YXYs9im 139880
+2YXYs9im2YjZhA== 139881
+0LXQstGL0YU= 139882
+0LrQu9GO0YfQtdC90LjRjw== 139883
+15HXmdeg 139884
+15HXmdeg15nXlNed 139885
+16nXldeQ15Q= 139886
+IMWfYXJr 139887
+IMWfYXJrxLE= 139888
+IHPDvHJlYw== 139889
+IHPDvHJlY2lu 139890
+4LmA4LiE4Lij4LiU 139891
+4LmA4LiE4Lij4LiU4Li04LiV 139892
+44OQ44Os 139893
+INi02KPZhg== 139894
+4LmA4Lit4Liy4LmE4Lin4LmJ 139895
+bmnEmWNpZQ== 139896
+16jXpteX 139897
+IGHFn2FtYQ== 139898
+16DXpNeS16I= 139899
+IHRo4bud 139900
+IGtodeG6qW4= 139901
+ZGnEn2luZGU= 139902
+0Y/RidC40YU= 139903
+44OY44Or 139904
+IMO8YmVyaA== 139905
+IMO8YmVyaGF1cHQ= 139906
+INGC0YDQtdCx0L7QstCw 139907
+IGTFgnVnaQ== 139908
+15jXmdef 139909
+4LiC4LiZ4Liy4LiU4LmD4Lir4LiN4LmI 139910
+INin2YTYo9mH 139911
+INin2YTYo9mH2YTZig== 139912
+IE3DvGQ= 139913
+IE3DvGTDvHLDvA== 139914
+INeZ15TXldeT15Q= 139915
+0YvQstCw0LXRgtGB0Y8= 139916
+2LPYp9i3 139917
+15TXqteg15TXkg== 139918
+15TXqteg15TXkteV16o= 139919
+4LiB4Liy4Lij4Lic4Lil4Li04LiV 139920
+7ZKA 139921
+4Liq4LiW4Liy4LiZ4LiB4Liy4Lij4LiT4LmM 139922
+INC+0YQ= 139923
+INC+0YTQuNGB 139924
+INmE2LnYqNip 139925
+IHN0cm9uxJk= 139926
+INeo15DXldeZ 139927
+15fXkdec 139928
+INGA0YvQvQ== 139929
+INGA0YvQvdC60LU= 139930
+INec157Xotef 139931
+2KfYs9mE 139932
+4Lir4Lix4LiZ 139933
+INeQ15fXmQ== 139934
+INC/0YDQvtC00L7Quw== 139935
+6rCA7J6F 139936
+INeR16jXlw== 139937
+INeR16jXl9eR15k= 139938
+0LTQttC10YA= 139939
+INec15fXnA== 139940
+INec15fXnNeV15g= 139941
+INec15fXnNeV15jXmdef 139942
+4Lio4Liy4Liq4LiZ4Liy 139943
+44Ki44Kk44OG 139944
+44Ki44Kk44OG44Og 139945
+INek16jXldek 139946
+2KzYstin2KE= 139947
+4Lil4Lit4Lii 139948
+IGNpYcWCYQ== 139949
+IGdp4bq/dA== 139950
+INC30L3QsNGH0LjRgtC10LvRjNC90L4= 139951
+IG9sbWFkxLHEnw== 139952
+IG9sbWFkxLHEn8SxbsSx 139953
+0L3QtA== 139954
+0L3QtNC10LrRgQ== 139955
+2KrYo9mD2K8= 139956
+IOyWuA== 139957
+IOyWuOygnA== 139958
+YXlkxLFu 139959
+44OJ44Os44K5 139960
+IHPhuq90 139961
+IO2YuO2FlA== 139962
+IOu2gQ== 139963
+IOu2ge2VnA== 139964
+44OR44Kk 139965
+INee16nXl9en15k= 139966
+4LiE4LiZ4Lit4Li34LmI4LiZ 139967
+INC40LfQs9C+0YLQvtCy 139968
+INC40LfQs9C+0YLQvtCy0LvQtdC9 139969
+4LmA4LiB4Li14Lii4Lij 139970
+4LmA4LiB4Li14Lii4Lij4LiV4Li0 139971
+16rXp9ep16g= 139972
+INGA0LDRgdGH0LXRgg== 139973
+4Liq4LmA4LiV 139974
+IGzDpG5nZXI= 139975
+IGnFn2xldA== 139976
+IGnFn2xldG1l 139977
+INi52YTZitmG 139978
+INi52YTZitmG2Kc= 139979
+w6lsZWN0aW9u 139980
+INin2YTYutix2KjZitip 139981
+7YuA 139982
+44KC44KJ44GI 139983
+INC60L3QuNCz0Lg= 139984
+2KPYs9mF 139985
+2KPYs9mF2KfYoQ== 139986
+IHRo4buP 139987
+IHRo4buPYQ== 139988
+4Lir4LiZ4Li5 139989
+INeg16LXqdeU 139990
+4Lig4Liy4Lii4LmD4LiV4LmJ 139991
+4Lie4Li34LiK 139992
+2LHZiti3 139993
+2YHZiNi2 139994
+44GC44KK44GM44Go44GG44GU44GW44GE44G+44GX44Gf 139995
+16nXk9eU 139996
+IG5n4buxYw== 139997
+INGB0LXRgNGM 139998
+INGB0LXRgNGM0LXQt9C9 139999
+VMO0aQ== 140000
+IGZpeWF0bGFyxLE= 140001
+INCy0YHRjg== 140002
+IEPDs2RpZ28= 140003
+INeU16nXkA== 140004
+INeU16nXkNec15Q= 140005
+IFDDumJsaWNh 140006
+2KXYrg== 140007
+2KXYrtmI2KfZhg== 140008
+INC30LDRj9Cy0LjQuw== 140009
+44Om44O8 140010
+16jXkNeZ16o= 140011
+dm9sdWNpw7Nu 140012
+IHN6a28= 140013
+IHN6a2/Fgnk= 140014
+2KzYsdmK2K/YqQ== 140015
+IHBlbnPDqQ== 140016
+7Ims 140017
+IELDvHnDvGvFn2VoaXI= 140018
+INij2YXYsdmK 140019
+INij2YXYsdmK2YPZig== 140020
+4LiZ4Lix4LiB4Lio4Li24LiB4Lip4Liy 140021
+IHRvZGF2 140022
+IHRvZGF2w61h 140023
+INCh0LDQvQ== 140024
+INCh0LDQvdC60YI= 140025
+7ZWY7J6Q 140026
+2K3ZiNin2YQ= 140027
+15vXldep16g= 140028
+4LmA4Lil4Lii4LiE4Lij4Lix4Lia 140029
+IGFsZ3U= 140030
+IGFsZ3XDqW0= 140031
+2YHYsg== 140032
+IMOnZWtpbA== 140033
+INeT16jXm9eZ150= 140034
+44OQ44Op 140035
+4LiB4LmH4Liq4Liy4Lih4Liy4Lij4LiW 140036
+4Liq4LmI4Lin4LiZ4Lil4LiU 140037
+7Y+w 140038
+IFDDumI= 140039
+IFDDumJsaWNv 140040
+4LmB4LiZ4Lin4LiX4Liy4LiH 140041
+15DXqteS16g= 140042
+2LTYp9i0 140043
+2LTYp9i02Kk= 140044
+Y2nFm25p 140045
+IMOccsO8bg== 140046
+2YTZiNit 140047
+INin2YTYqNmG 140048
+INin2YTYqNmG2YM= 140049
+7KGw7LmY 140050
+IG9yZ2FuaXphY2nDs24= 140051
+44GC44KK44GM44Go44GG44GU44GW44GE44G+44GZ 140052
+c8OkdHpl 140053
+INGB0LXQvNC10Lk= 140054
+2YLYtdiv 140055
+0YHRgtCy0LXQvdC90YvQtQ== 140056
+IHByw6ljw6lk 140057
+IHByw6ljw6lkZW50 140058
+4LiB4Lij4Li44LiH4LmA4LiX4Lie4Liv 140059
+44Go6KiA44GE 140060
+15HXoNeZ15nXnw== 140061
+INit2Yg= 140062
+INit2YjYp9mE2Yo= 140063
+16HXp9eh 140064
+IHNhxJ9sYW1haw== 140065
+INec16bXmdeZ158= 140066
+16fXk9ep 140067
+INeU157Xoteo15vXqg== 140068
+INec15TXoteR15nXqA== 140069
+IGfDvG5k 140070
+IGfDvG5kZW0= 140071
+INC90LDRiNC10LPQvg== 140072
+4LmD4LiZ4Lie4Li34LmJ4LiZ4LiX4Li14LmI 140073
+4LmA4LiE4Lij4Li34Lit 140074
+4LmA4LiE4Lij4Li34Lit4LiC 140075
+4LmA4LiE4Lij4Li34Lit4LiC4LmI4Liy4Lii 140076
+2LjYp9mH2LHYqQ== 140077
+2YXZhti42YU= 140078
+2YXZhti42YXYp9iq 140079
+2YXYqtin2LI= 140080
+6L+944GE 140081
+ZMSxa3Q= 140082
+ZMSxa3Rhbg== 140083
+IOuNlOyasQ== 140084
+INCd0LDQv9GA0LjQvNC10YA= 140085
+dHfDs3I= 140086
+157Xldei16bXlA== 140087
+2YPZiNmD 140088
+0Kk= 140089
+157XmNek15w= 140090
+w7NsaWNh 140091
+6Kiq44KM 140092
+IOuMgOu2gA== 140093
+IOuMgOu2gOu2hA== 140094
+44Kv44Oq44OD44Kv 140095
+44KS6YG4 140096
+44KS6YG444G2 140097
+IHBvd3N0YQ== 140098
+IHBvd3N0YcWC 140099
+IHJhesOzbg== 140100
+15HXldeX16g= 140101
+INGB0L7QvtCx0YnQuNC7 140102
+INen15HXldei 140103
+csOqdA== 140104
+4LiU4Li14LiC4Li24LmJ4LiZ 140105
+157Xodei15M= 140106
+157Xodei15PXldeq 140107
+IMOWc3RlcnJlaWNo 140108
+INeg15fXqdeR 140109
+2YXYqNin2K/Ysdip 140110
+7LSJ 140111
+15LXoNeY15k= 140112
+5L+h44GY 140113
+ZHXEnw== 140114
+ZHXEn3VudQ== 140115
+IHBow7o= 140116
+INin2YTYo9iu2YrYsQ== 140117
+INiq2LnYqtio2LE= 140118
+bGFuZMSxcsSxbA== 140119
+44Go44Gv44GE 140120
+44Go44Gv44GE44GI 140121
+INin2YTYt9mE 140122
+INin2YTYt9mE2KfYqA== 140123
+IE7Cug== 140124
+6YG/44GR 140125
+2KfZhNmF2Lk= 140126
+2KfZhNmF2LnYsdmI2YE= 140127
+4Liq4Lig4Liy 140128
+6Zui44KM 140129
+INC/0L7QvNC+0YnRjA== 140130
+INC30L3QsNC10YI= 140131
+44OX44Os44K8 140132
+44OX44Os44K844Oz44OI 140133
+IHN1cMOpcmlldXI= 140134
+INep15zXmdep15k= 140135
+INin2YTZhtmI2Lk= 140136
+44KT44Gn44GZ44Gt 140137
+4Lit4Lia4Lij4Lih 140138
+IGdp4buNbmc= 140139
+IHd6Z2zEmWQ= 140140
+INin2YTZgdmC2LE= 140141
+w6hyZW50 140142
+INee15DXlw== 140143
+INee15DXl9eV16jXmQ== 140144
+15LXkg== 140145
+15nXmdeR 140146
+2YXZhNin2Kg= 140147
+2YXZhNin2KjYsw== 140148
+IGjDvGvDvA== 140149
+IGjDvGvDvG1ldA== 140150
+INee15LXmdeR 140151
+INCe0Yc= 140152
+INCe0YfQtdC90Yw= 140153
+5pep44GE 140154
+IGNvbnN0cnVjY2nDs24= 140155
+IHRoxrDhu6NuZw== 140156
+77yL 140157
+IGNvcmHDp8Ojbw== 140158
+4LmA4Lir4Lil4LmH4LiB 140159
+IEJhxZ9i 140160
+IEJhxZ9iYWthbg== 140161
+6YCj44KM 140162
+44GZ44KL44GT44Go44GM44Gn44GN44G+44GZ 140163
+INmC2KfZhdiq 140164
+INin2YPYq9ix 140165
+2YHYp9i52YQ= 140166
+INGE0L7RgA== 140167
+INGE0L7RgNGD0Lw= 140168
+2LrYsNmK 140169
+IGnFn2xl 140170
+IGnFn2xlbWw= 140171
+IGnFn2xlbWxlcmk= 140172
+IOyCrOuejOydgA== 140173
+IOyekeyEsQ== 140174
+IOuniOugqA== 140175
+2YXYrNmE2LM= 140176
+4Lir4Lih4Li5 140177
+0LTQsg== 140178
+0LTQstC40LM= 140179
+0LTQstC40LPQsA== 140180
+4LmA4Liq4Li14Lii4LiK4Li14Lin4Li04LiV 140181
+15TXqtek16rXlw== 140182
+15TXqtek16rXl9eV16o= 140183
+INC80LXRgtGA0L4= 140184
+INGB0LXQvdGC 140185
+INGB0LXQvdGC0Y8= 140186
+INGB0LXQvdGC0Y/QsdGA0Y8= 140187
+6rOn 140188
+INec16TXog== 140189
+INec16TXotee15nXnQ== 140190
+4LmA4Lia4Li14Lii 140191
+6Kmz44GX44GP 140192
+55Ww44Gq44KL 140193
+IMSwbMOnZQ== 140194
+IEF0YXQ= 140195
+IEF0YXTDvHI= 140196
+IEF0YXTDvHJr 140197
+4Lij4Li44LmI4LiH 140198
+IGthbGTEsQ== 140199
+IOyjvOyepQ== 140200
+IHByw6lzZW5jZQ== 140201
+INC90LDQsQ== 140202
+INC90LDQsdC70Y4= 140203
+INC90LDQsdC70Y7QtNCw 140204
+INGB0LDQvNC+0LPQvg== 140205
+15LXldep 140206
+157XmNeV16Q= 140207
+157XmNeV16TXnA== 140208
+INCy0YvQsdC40YDQsA== 140209
+IOyekOumrA== 140210
+5YiG44GL44KJ44Gq44GE 140211
+INC30YPQsQ== 140212
+INep15vXkdeo 140213
+INiv2KfYpg== 140214
+INiv2KfYptmF2Kc= 140215
+INC/0LDRgNGC0Lg= 140216
+77yy 140217
+INin2YrYttin 140218
+INGF0L7Qtw== 140219
+INGF0L7Qt9GP 140220
+INGF0L7Qt9GP0Lk= 140221
+INGF0L7Qt9GP0LnRgdGC0LI= 140222
+INin2YTYo9is 140223
+INin2YTYo9is2YbYqA== 140224
+INin2YTYo9is2YbYqNmK2Kk= 140225
+INCX0L3QsA== 140226
+IEFww7Nz 140227
+INGN0L3QtdGA 140228
+INGN0L3QtdGA0LPQuA== 140229
+IHlhbnM= 140230
+IHlhbnPEsQ== 140231
+IEp1c3Rp 140232
+IEp1c3Rpw6dh 140233
+IHByw6l2dQ== 140234
+4Lih4Lin4Lil 140235
+7J6l64uY 140236
+4LiB4Lij4Liw4Lia 140237
+4LiB4Lij4Liw4Lia4Lin4LiZ 140238
+4LiB4Lij4Liw4Lia4Lin4LiZ4LiB4Liy4Lij 140239
+157Xng== 140240
+157XnteV16bXog== 140241
+IGjhurk= 140242
+IGjhurlu 140243
+0LfQtNCw0L3QuNC1 140244
+IGFrxZ8= 140245
+IGFrxZ9hbQ== 140246
+15jXldek 140247
+IGdlcmVrdA== 140248
+IGdlcmVrdGk= 140249
+IGdlcmVrdGnEn2luaQ== 140250
+IG5hcno= 140251
+IG5hcnrEmWR6aQ== 140252
+w6lwbw== 140253
+w6lwb3F1ZQ== 140254
+IFRo4bqnbg== 140255
+IHd5c29rbw== 140256
+IHd5c29rb8WbY2k= 140257
+4Lic4Li54LmJ4Lib 140258
+4Lic4Li54LmJ4Lib4LmI4Lin4Lii 140259
+INmK2KjYr9mI 140260
+0YLQtdC70YzQvdC+0LPQvg== 140261
+INCy0LfQs9C70Y/QtA== 140262
+IGplZG7EhQ== 140263
+IOydmOqyrA== 140264
+IOC4guC4k+C4sOC4l+C4teC5iA== 140265
+16TXmdeT 140266
+7IOB64u0 140267
+IG3hu6E= 140268
+15TXntec 140269
+15TXntec16bXldeq 140270
+INGB0L7RgdGC0L4= 140271
+INGB0L7RgdGC0L7QuNGC 140272
+INCw0LLQuA== 140273
+INCw0LLQuNCw 140274
+IEzDpG5kZXI= 140275
+2KrYtdmI2YrYsQ== 140276
+157Xk9eZ15Q= 140277
+7KCI7LCo 140278
+44Go44KK 140279
+44Go44KK44GC 140280
+44Go44KK44GC44GI 140281
+44Go44KK44GC44GI44Ga 140282
+INGA0Y/QtA== 140283
+INGA0Y/QtNC+0Lw= 140284
+IE5o4bqldA== 140285
+INin2YTZg9in2YXZhA== 140286
+15fXnNec 140287
+IEdp4bqleQ== 140288
+16bXmNeo 140289
+16bXmNeo16M= 140290
+INec15HXmNec 140291
+INC40LzQtdGC0Yw= 140292
+16HXnteV15o= 140293
+IHBhcnRpY2lwYcOnw6Nv 140294
+7ZWc64uk66m0 140295
+2YXZhtiq2K/Zig== 140296
+2YXZhtiq2K/Zitin2Ko= 140297
+IGXEn2xlbg== 140298
+Z8Okbmdl 140299
+2LHYqNit 140300
+44Ku44Oj 140301
+INin2YTYsdmC2YU= 140302
+4LiL4LmJ4Liz 140303
+IEjDs2E= 140304
+157XqNeX16c= 140305
+2K3Zhdin2YU= 140306
+2KjZiNmD 140307
+IEFydMOtY3Vsbw== 140308
+44OE44Ki44O8 140309
+15TXpNeb15Q= 140310
+15fXnNeV158= 140311
+INC/0LXRgNC10YXQvtC0 140312
+bGVubWnFnw== 140313
+2LLYsdin2LnYqQ== 140314
+IHNlw7Fvcg== 140315
+44Gj44Gm44GN44Gm 140316
+2KXYtA== 140317
+2KXYtNin2LHYqQ== 140318
+IHBvZMOtYQ== 140319
+IMOcbGtl 140320
+0L3RgdC60LDRjw== 140321
+IGFkYXB0w6k= 140322
+IGTDvHplbmxlbg== 140323
+IGTDvHplbmxlbmVu 140324
+INGB0YLQsNC70LA= 140325
+INmK2K3Yqtin2Kw= 140326
+IG5pZXI= 140327
+IG5pZXJ1Y2g= 140328
+IG5pZXJ1Y2hvbW8= 140329
+IG5pZXJ1Y2hvbW/Fm2Np 140330
+44GT44Go44GM44GC44KL 140331
+4Lii4Lit4LiU4LmA4Lii4Li14LmI4Lii4Lih 140332
+INmF2Kw= 140333
+INmF2KzYp9mG2Yo= 140334
+INC30LDQsQ== 140335
+INC30LDQsdC+0Ls= 140336
+INC30LDQsdC+0LvQtdCy 140337
+INC30LDQsdC+0LvQtdCy0LDQvdC40Y8= 140338
+IMWbcm8= 140339
+IMWbcm9kaw== 140340
+IMWbcm9ka8Ozdw== 140341
+INeU15zXkNeV157XmQ== 140342
+IGRva8WCYWQ= 140343
+IGRva8WCYWRuaWU= 140344
+44Gf44GP44Gq44GE 140345
+44Gv44Ga44Gn44GZ 140346
+44Go5oCd44Gj44Gm44GE44Gf 140347
+w6ljcmFu 140348
+7JeF7LK0 140349
+dHJ6eW1hxYI= 140350
+0YHRgtCy0LXQvdC90YvQuQ== 140351
+IE5vdMOtYw== 140352
+IE5vdMOtY2lhcw== 140353
+2YXYsdmK 140354
+2YXYsdmK2LY= 140355
+5rCX6Ls= 140356
+5rCX6Lu9 140357
+5rCX6Lu944Gr 140358
+65Oj 140359
+INeT15XXkNeo 140360
+INec157XoA== 140361
+INec157XoNeV16I= 140362
+IMOnYWzEscWfxLF5b3I= 140363
+IMWfaWRk 140364
+IMWfaWRkZXQ= 140365
+IE3hurd0 140366
+IGF0ZcWf 140367
+INC/0L7Qu9GD0YfQtdC90LjRjw== 140368
+4LmA4LiE4Lij4Li34LmI4Lit4LiH4Lih4Li34Lit 140369
+IGdyw7bDn2Vy 140370
+2K/Yp9im 140371
+2K/Yp9im2LHYqQ== 140372
+IGJ1bHVu 140373
+IGJ1bHVubWFrdGFkxLFy 140374
+4LmA4Lir4Lij 140375
+4LmA4Lir4Lij4Li14Lii 140376
+4LmA4Lir4Lij4Li14Lii4LiN 140377
+4LiZ4Lix4LiB4LiX4LmI4Lit4LiH4LmA4LiX4Li14LmI4Lii4Lin 140378
+IGFsYW7EsW5kYQ== 140379
+INGD0LfQvdCw 140380
+INC70LXRh9C10L3QuNC1 140381
+5aOy44KM 140382
+IMOnZXZpcg== 140383
+IGRlc3RlxJ9p 140384
+IGhlacOfdA== 140385
+4pay 140386
+2K3Ytw== 140387
+4LiE4Liz4LiV4Lit4Lia 140388
+44Kq44Oz44Op44Kk44Oz 140389
+INeR15fXmdeZ150= 140390
+44Om44OL 140391
+IGTDvHplbmxlbWU= 140392
+IG1vZGFsaXTDoA== 140393
+2LPYsdi3 140394
+2LPYsdi32KfZhg== 140395
+157Xm9eV158= 140396
+INC00LDQvdC90YvQuQ== 140397
+2KrYsdiq 140398
+2KrYsdiq2YrYqA== 140399
+4Lia4Liy4LiH4LiE4LiZ 140400
+IMSQ4buLbmg= 140401
+4Lih4Li54Lil 140402
+4Lih4Li54Lil4LiE4LmI4Liy 140403
+2YbZgti1 140404
+4LiB4Liy4Lij4Lij4Lix4LiB4Lip4Liy 140405
+INGE0L7QvQ== 140406
+INGE0L7QvdC0 140407
+44KI44GG44Gr44Gq44Gj44Gf 140408
+2YXYudin2YQ= 140409
+2YXYudin2YTYrNip 140410
+IE9zbWFu 140411
+IE9zbWFubMSx 140412
+0LjRh9C10YHQutC+0Lw= 140413
+4Lit4Lii4Liy4LiB4LiI4Liw 140414
+44GV44G+44GW 140415
+44GV44G+44GW44G+ 140416
+44GV44G+44GW44G+44Gq 140417
+INeq15XXm9ec 140418
+16LXpteR 140419
+INin2YTYudiz2YM= 140420
+INin2YTYudiz2YPYsdmK 140421
+IHbDqWhpYw== 140422
+IHbDqWhpY3VsZQ== 140423
+INeZ16bXl9en 140424
+INin2YTZiNit 140425
+INin2YTZiNit2YrYrw== 140426
+INin2YTYudiv2Yg= 140427
+IFF14bqjbg== 140428
+IOqzteuPmQ== 140429
+2KjYr9mE 140430
+IMSR4bqjbmc= 140431
+IG3hu4duaA== 140432
+IG5pZXpi 140433
+IG5pZXpixJk= 140434
+IG5pZXpixJlkbg== 140435
+IHlhecSxbmxhbg== 140436
+0L7QsdGJ0Lg= 140437
+IGfDtnTDvHI= 140438
+16bXpA== 140439
+16bXpNeV15k= 140440
+INmE2YrYqNmK 140441
+INmE2YrYqNmK2Kc= 140442
+2K3ZiNin 140443
+INC00L7QsQ== 140444
+INC00L7QsdGA0L4= 140445
+0LjRgNGD0LXQvA== 140446
+INin2YTYrdmD2YjZhdmK2Kk= 140447
+bcOkw59pZw== 140448
+IGVkaWNpw7Nu 140449
+0LLQu9C10LrQsNGC0LXQu9GM 140450
+0LLQu9C10LrQsNGC0LXQu9GM0L0= 140451
+INeq16nXnNeV150= 140452
+INeU16nXldeg15nXnQ== 140453
+4Lih4Li04LiW4Li4 140454
+4Lih4Li04LiW4Li44LiZ 140455
+4Lih4Li04LiW4Li44LiZ4Liy4Lii4LiZ 140456
+6aOf44G544Gm 140457
+IOyImOynkQ== 140458
+16HXkdeZ 140459
+INC40Y7Qu9GP 140460
+IOC5hOC4lOC5ieC5geC4geC5iA== 140461
+15zXl9ed 140462
+dHLDpA== 140463
+dHLDpGd0 140464
+44Gd44KC44Gd44KC 140465
+0J3QlQ== 140466
+INCy0L3Rg9GC 140467
+INCy0L3Rg9GC0YDQuA== 140468
+44Go5LiA57eS44Gr 140469
+44Kr44OV44Kn 140470
+INeR15fXk9eo 140471
+15fXntep 140472
+44Ko44ON 140473
+44Ko44ON44Or 140474
+44Ko44ON44Or44Ku 140475
+44Ko44ON44Or44Ku44O8 140476
+4LiC4Lit4LiH4LiV4Lix4Lin4LmA4Lit4LiH 140477
+2KjZgtin2KE= 140478
+16TXodeZ15s= 140479
+16TXodeZ15vXldec15XXkg== 140480
+44Oh44OD 140481
+44Oh44OD44K7 140482
+44Oh44OD44K744O844K4 140483
+2YTZgtio 140484
+QcSe 140485
+16nXp9eZ16I= 140486
+2YLYs9in2YU= 140487
+15PXldeS157XlA== 140488
+5rex44GE 140489
+7ZaI64qU642w 140490
+IHJvendpxIV6YW5pZQ== 140491
+4LiZ4Lix4LmI4LiZ4LmA4Lit4LiH 140492
+15nXpteR 140493
+IHRyw7RuZw== 140494
+4LmD4LiK4LmJ4Lia4Lij4Li04LiB4Liy4Lij 140495
+INin2YTZhdmI2LPZhQ== 140496
+INC00LXRgtC4 140497
+44GX44GL44Gq44GE 140498
+16HXmdef 140499
+IHLDqWbDqXJlbmNl 140500
+4LmB4Lir4LmJ4LiH 140501
+44KC44KJ44Gj44Gf 140502
+INec16jXmw== 140503
+INec16jXm9eV16k= 140504
+2LTYudmI2LE= 140505
+INCR0L7Qsw== 140506
+IGxhesSxbQ== 140507
+INeZ16nXoNed 140508
+INC/0LDRgNGC 140509
+INC/0LDRgNGC0L3QtdGA 140510
+INGD0L3QuNC60LA= 140511
+INGD0L3QuNC60LDQu9GM0L0= 140512
+IG1hdMOpcmllbA== 140513
+157XqNen 140514
+IHBoxrDhu51uZw== 140515
+INC30LDQuQ== 140516
+INC30LDQudC8 140517
+2YHZgtiv 140518
+VW5pdmVyc2l0w6A= 140519
+16LXqNeb15nXnQ== 140520
+IGJhw7Fv 140521
+INC90L7Rjw== 140522
+INC90L7Rj9Cx0YDRjw== 140523
+4Lib4LmJ4Liy4Lii 140524
+IHRhdHM= 140525
+IHRhdHPDpGNo 140526
+IHRhdHPDpGNobGljaA== 140527
+INGC0YDQtdGC0Yw= 140528
+0Y3QvA== 140529
+44OZ44O844K5 140530
+IG5o4buxYQ== 140531
+7Iqk7YGs 140532
+INi52KjYr9in2YTZhNmH 140533
+INeq15XXqNeU 140534
+2KPYtNmK 140535
+2KPYtNmK2KfYoQ== 140536
+INmE2YTYutin 140537
+INmE2YTYutin2YrYqQ== 140538
+2YXZiNin2YI= 140539
+2YXZiNin2YLZgQ== 140540
+IGfFgsOzd25h 140541
+IGFydMSxxZ8= 140542
+INee16fXldee15k= 140543
+44Kv44Op44OW 140544
+INiz2YjZiQ== 140545
+IOyXrOyEsQ== 140546
+2KfYs9ix 140547
+2KfYs9ix2KfYptmK2YQ= 140548
+INeg15vXqteR 140549
+4Lii4LmJ4Lit4LiZ 140550
+IGRlYmVyw6E= 140551
+IHBo4bqrdQ== 140552
+0Y7RidC10Lw= 140553
+INmE2K/ZitmG2Kc= 140554
+157XmNeU 140555
+INeg15XXnNeT 140556
+INCy0YHRgtGA0LXRh9Cw 140557
+44KJ44KM44Gm44GE44G+44GZ 140558
+IGNhxYJlag== 140559
+4Lii4Li2 140560
+4Lii4Li24LiU 140561
+0L/QvtGC0LXQvQ== 140562
+0L/QvtGC0LXQvdGG0Lg= 140563
+INC70LjRgg== 140564
+INC70LjRgtC10YA= 140565
+INC70LjRgtC10YDQsNGC0YPRgA== 140566
+INC60LDQttC00L7QvA== 140567
+IO2MkA== 140568
+IO2MkOuLqA== 140569
+4LiI4Li5 140570
+IHByZXNlbsOnYQ== 140571
+44Gq44KT44Gn 140572
+2YXZitin2Yc= 140573
+0LjQvdGE0L7RgNC8 140574
+0LjQvdGE0L7RgNC80LDRhtC40L7QvQ== 140575
+0LjQvdGE0L7RgNC80LDRhtC40L7QvdC9 140576
+IOyekOyXsA== 140577
+16jXm9ep 140578
+IMO2ZMO8bA== 140579
+57aa44GP 140580
+INC/0YE= 140581
+INC/0YHQuNGF 140582
+INC/0YHQuNGF0L7Qu9C+0LM= 140583
+2KrYsNmD2LE= 140584
+IOyeheyepQ== 140585
+4Lil4LiU4LmM 140586
+7ISg6rGw 140587
+44Gj44Gm44GK44KK44G+44GZ 140588
+INeZ16I= 140589
+INeZ16LXp9eR 140590
+INin2YTYt9i52KfZhQ== 140591
+44OG44K544OI 140592
+IFR14bqlbg== 140593
+IHBhcnRpY2lwYWNpw7Nu 140594
+157Xldee15fXlA== 140595
+15LXqNeh15Q= 140596
+INin2YTYqtmG2YHZig== 140597
+INin2YTYqtmG2YHZitiw2Yo= 140598
+INCx0LXQt9C+0L/QsNGB0L0= 140599
+Z2Vm 140600
+Z2Vmw6Rocg== 140601
+2LTZiNix 140602
+IG15xZtsaQ== 140603
+2YjYp9i02YY= 140604
+2YjYp9i02YbYt9mG 140605
+16DXldeh16I= 140606
+2YPZhw== 140607
+2YPZh9ix2Kg= 140608
+2YPZh9ix2KjYp9ih 140609
+IG11c2lhxYI= 140610
+7Iu4 140611
+44OW44Op44OD44Kv 140612
+IGNyw6nDqQ== 140613
+2YbZh9in2LE= 140614
+b3dvxZvEhw== 140615
+2YXYrdin2YPZhQ== 140616
+IHfFgmHFmw== 140617
+IHfFgmHFm2M= 140618
+IHfFgmHFm2NpY2llbA== 140619
+INmK2KQ= 140620
+INmK2KTYr9mK 140621
+157XoteV16A= 140622
+15DXkdec 140623
+2K7Yt9ij 140624
+INGF0L7Qu9C+0LQ= 140625
+15bXldec 140626
+44GT44KM44KJ 140627
+44GT44KM44KJ44Gu 140628
+IGLDoXNpY2E= 140629
+4Lik4LiU 140630
+4Lik4LiU4Li54LiB 140631
+4Lik4LiU4Li54LiB4Liy 140632
+4Lik4LiU4Li54LiB4Liy4Lil 140633
+6JC944Gh552A 140634
+44Gq44GE44GT44Go 140635
+2LXZiNmF 140636
+2YbYrNit 140637
+16DXp9eV15M= 140638
+16DXp9eV15PXqg== 140639
+0LrQu9Cw0YHRgQ== 140640
+7ZWY7Iuc64qU 140641
+64SY 140642
+INep15DXmdeg15U= 140643
+INCh0LXQudGH0LDRgQ== 140644
+bWF5YWNhxJ/EsQ== 140645
+IHlhcMSxbMSxcg== 140646
+IGNhdGVnb3LDrWE= 140647
+2LnYqNin2K8= 140648
+INCi0LXQvw== 140649
+INCi0LXQv9C10YDRjA== 140650
+15TXmdeh15jXldeo15k= 140651
+aOG6vw== 140652
+44Kz44O844OJ 140653
+IGNhYmXDp2E= 140654
+2KzZhdin 140655
+2KzZhdin2Yc= 140656
+2KzZhdin2YfZitix 140657
+5L2O44GE 140658
+INGC0L7QstCw0YDQvtCy 140659
+4LiK4Liy4Lin4Lia4LmJ4Liy4LiZ 140660
+INGB0YLQsNC90L7Qsg== 140661
+INGB0YLQsNC90L7QstC40YLRgdGP 140662
+INCw0LLRgtC+0LzQvtCx0LjQu9GM 140663
+INGB0LvRg9GH0LDQuQ== 140664
+4Lit4Lix4Lie 140665
+IEdpcmnFnw== 140666
+IOydvOuLqA== 140667
+INC/0YDQvtGB 140668
+INC/0YDQvtGB0LzQvtGC0YA= 140669
+44Gq44GP44Gq44Gj44Gf 140670
+4Lih4Li14Lib4Lix4LiN4Lir4Liy 140671
+77qO 140672
+w6ljb3V0ZQ== 140673
+INmF2YjYrNmI2K8= 140674
+INiz2LHZiti5 140675
+INmI2YfZhtin 140676
+INmI2YfZhtin2YM= 140677
+4LiE4Li44LiT4Liq4Lih 140678
+4LiE4Li44LiT4Liq4Lih4Lia4Lix4LiV4Li0 140679
+IOyasOyEoA== 140680
+4Lie4Lij4Liw4Lie4Li44LiX4LiY 140681
+5aW944G/ 140682
+2LjZhNmF 140683
+INC80LDQutGB 140684
+INC80LDQutGB0LjQvNCw0LvRjA== 140685
+INC80LDQutGB0LjQvNCw0LvRjNC90L4= 140686
+44Oq44Ki44Or 140687
+4LmB4Lih4LmJ4Lin4LmI4Liy 140688
+INin2YTYrdmI2KfYsQ== 140689
+44OX44Op44K5 140690
+INi52YTYp9mC2Kk= 140691
+IO2WieuPmQ== 140692
+IGfDtm5kZXJpbA== 140693
+IGzDo2k= 140694
+IHNhxJ9sxLFrbA== 140695
+IHNhxJ9sxLFrbMSx 140696
+INGI0LDQsw== 140697
+INeR15DXqNeU 140698
+cHJvd2FkemnEhw== 140699
+44GE44GP44Gk44GL 140700
+INio2KrYp9ix2YrYrg== 140701
+INeR15DXldeq15Q= 140702
+IG3Ds2M= 140703
+INCc0L3QtQ== 140704
+44OX44Os44O8 140705
+15DXlteo15c= 140706
+5aC05ZCI44Gr44Gv 140707
+5L2/44GI 140708
+4LmA4Lij4Li34Lit4LiZ 140709
+INCf0LXRgg== 140710
+INCf0LXRgtGA 140711
+44Gr5YWl44KL 140712
+2YXYp9iv2Kk= 140713
+4LmA4LiH4Li34LmI4Lit4LiZ 140714
+4LmA4LiH4Li34LmI4Lit4LiZ4LmE4LiC 140715
+INGB0L7RgdGC0L7Rj9C90LjQtQ== 140716
+w7RuaWNh 140717
+INGE0LXQsg== 140718
+INGE0LXQstGA0LA= 140719
+INGE0LXQstGA0LDQu9GP 140720
+INeV15Y= 140721
+INeV15bXkNeq 140722
+4LiE4Lij4Li0 140723
+4LiE4Lij4Li04Liq 140724
+INCV0YnQtQ== 140725
+44Gj44Gm44GX44G+44GE44G+44GX44Gf 140726
+INC/0YDQsNCy0LjRgtC10LvRjA== 140727
+INC/0YDQsNCy0LjRgtC10LvRjNGB0YLQsg== 140728
+IHTDpGdsaWNo 140729
+IOuLueyLnA== 140730
+157Xldei157Xkw== 140731
+INC00LLQvtGA 140732
+5omV 140733
+5omV44GE 140734
+INGB0YLQsNC90LXRgg== 140735
+INCy0L7Qt9C00LXQudGB0YLQsg== 140736
+INCy0L7Qt9C00LXQudGB0YLQstC4 140737
+IGbDqnRl 140738
+4LmA4Liq4Liy 140739
+16rXp9eV15XXlA== 140740
+IHV5YXI= 140741
+IHV5YXLEsQ== 140742
+4LiB4Lil4Lix4Lia4LmE4Lib 140743
+IGdpxrDhu51uZw== 140744
+INCy0LA= 140745
+INCy0LDRiNC4 140746
+IMSR4bqtdQ== 140747
+IFNwYcOf 140748
+IOyVhOuniA== 140749
+4LmE4LiU4LmJ4LiH4LmI4Liy4Lii 140750
+INeU157Xkden16k= 140751
+5paw44Gf 140752
+5paw44Gf44Gq 140753
+xLFsxLF5b3I= 140754
+0L/Qu9Cw0L0= 140755
+INeU15HXqNeZ15DXldeq 140756
+IGHEn3LEsQ== 140757
+IHNheWfEsQ== 140758
+5bu644Gm 140759
+IG5hand5xbw= 140760
+IG5hand5xbxzeg== 140761
+2LPZitin2LPYp9iq 140762
+44GK5b6X 140763
+INin2YTYudmE2Yo= 140764
+INin2YTYudmE2YrYpw== 140765
+IGNvcmF6w7Nu 140766
+7LmY66OM 140767
+4Lir4Lix4Lin4LiC4LmJ4Lit 140768
+INio2K3Zig== 140769
+INio2K3Zitir 140770
+0LfQstC10LfQtA== 140771
+2KjZiNin2KjYqQ== 140772
+0JvQmA== 140773
+2YTYp9iy2YU= 140774
+IHJvenA= 140775
+IHJvenBvYw== 140776
+IHJvenBvY3rEmQ== 140777
+6Kem44KM 140778
+INin2YTYrNmF2Yc= 140779
+INin2YTYrNmF2YfZiNix 140780
+IHNwxJlk 140781
+IHNwxJlkeg== 140782
+4Lin4Li04LiX4Lii4Liy4Lio4Liy4Liq4LiV4Lij4LmM 140783
+0LjQstCw0LXRgtGB0Y8= 140784
+INC00LDQvdC90L7QuQ== 140785
+IHJlcHLDqXNlbnRl 140786
+IMSR4buLY2g= 140787
+INei157Xlden 140788
+4Lit4Lix4LiZ4LiV4Lij 140789
+4Lit4Lix4LiZ4LiV4Lij4Liy4Lii 140790
+IGVzdHJhdMOpZw== 140791
+IGVzdHJhdMOpZ2lh 140792
+cGFkxYI= 140793
+INCy0L/QvtC70L0= 140794
+INCy0L/QvtC70L3QtQ== 140795
+INC/0YDQtdC00L7RgdGC0LDQstC70LXQvQ== 140796
+15fXnNeV16c= 140797
+15fXnNeV16fXqg== 140798
+44Ki44OK 140799
+INin2YTYutiw 140800
+INin2YTYutiw2KfYptmK 140801
+INGD0LfQvQ== 140802
+INGD0LfQvdCw0YLRjA== 140803
+4LiL4LmJ4Liy4Lii 140804
+5b2T44Gm 140805
+2K3Zitin2KE= 140806
+IGLDoXNpY28= 140807
+16fXldeR16I= 140808
+INin2YTZhdio2KfYsdin2Kk= 140809
+INin2YTZh9in2KrZgQ== 140810
+INeb16DXkteT 140811
+4Lib4Lij4Liw4Lir4Lii 140812
+4Lib4Lij4Liw4Lir4Lii4Lix4LiU 140813
+0JrQsNC6 140814
+4LiX4Li14LmI4LiZ4LmI4Liy 140815
+4LiX4Li14LmI4LiZ4LmI4Liy4Liq4LiZ4LmD4LiI 140816
+44G+44GB 140817
+772i 140818
+0YHQutC+0L8= 140819
+IHNvbnJhc8SxbmRh 140820
+IHVyesSFZA== 140821
+IHVyesSFZHplbmlh 140822
+15vXldeV16A= 140823
+15vXldeV16DXqg== 140824
+INec15TXqtee15XXkw== 140825
+INec15TXqtee15XXk9eT 140826
+INGB0LvQuA== 140827
+INGB0LvQuNGI 140828
+INGB0LvQuNGI0LrQvtC8 140829
+INGB0YLRg9C0 140830
+INGB0YLRg9C00LXQvdGC 140831
+INeU15XXkw== 140832
+INeU15XXk9ei15Q= 140833
+67mE7Jqp 140834
+4Lit4Lii4Liy4LiB4LmD4Lir4LmJ 140835
+IGLhu4E= 140836
+4Lii4Li44LiX4LiY 140837
+0JjQnQ== 140838
+2LPYp9im2LE= 140839
+2KPYtdmI2YQ= 140840
+INin2YTYutix2YE= 140841
+44GT44Go44KC44GC44KK44G+44GZ 140842
+6L6844G+44KM 140843
+INin2YTYs9in2KjYuQ== 140844
+IGPhu6c= 140845
+44GE44Gf44Gg44GE44Gf 140846
+7KeT 140847
+7IKs66y0 140848
+cG93aWVkxbo= 140849
+2KrZgdmD 140850
+2KrZgdmD2YrYsQ== 140851
+0LjRgNC+0LLQutC4 140852
+IO2Gte2VtOyEnA== 140853
+44Ko44K544OG 140854
+INC00LXRj9GC0LXQu9GM0L3QvtGB0YLRjA== 140855
+INC00LDQvdC90YvQvA== 140856
+INei15XXqA== 140857
+INei15XXqNeb15k= 140858
+15XXk9ei16o= 140859
+IGhheWF0xLFuxLE= 140860
+IGLEhWQ= 140861
+IGLEhWTFug== 140862
+b2JzxYJ1Zw== 140863
+4LmA4Lie4Li14Lii4LiH4LmB4LiE4LmI 140864
+4LiL4LmI4Liy 140865
+6LKg44GR 140866
+INGB0YLRgNC10Lw= 140867
+IMSR4buJbmg= 140868
+INCg0YPRgQ== 140869
+IE7hu68= 140870
+INec15TXqdeZ15I= 140871
+IGplZG5vYw== 140872
+IGplZG5vY3pl 140873
+IGplZG5vY3plxZtuaWU= 140874
+INeU15LXkdeV15Q= 140875
+2KPYrtmE2KfZgg== 140876
+INC90LDRgdC10Ls= 140877
+INC90LDRgdC10LvQtdC90LjRjw== 140878
+INmK2YbYqA== 140879
+INmK2YbYqNi62Yo= 140880
+44GM44GL 140881
+44GM44GL44GL 140882
+15LXoteq 140883
+0J7QoA== 140884
+INC90LDQu9C40YfQuNC4 140885
+IOuniOyngA== 140886
+IOuniOyngOuniQ== 140887
+IO2WieyCrA== 140888
+IHRyZcWbY2k= 140889
+IOqwgOy5mA== 140890
+7KaY 140891
+INCw0L3QsNC70L7Qsw== 140892
+15TXptei16o= 140893
+0LLQu9Cw0LQ= 140894
+0LLQu9Cw0LTQtQ== 140895
+INGB0LTQtdC70LDQuw== 140896
+INeg15LXmdep 140897
+INeg15LXmdep15XXqg== 140898
+0L/QvtC70L3QtdC90LjQtQ== 140899
+4LiG4LmI4Liy 140900
+IETDtm4= 140901
+15vXnNeb15zXlA== 140902
+157XlteS 140903
+2YXZgQ== 140904
+2YXZgdmH 140905
+2YXZgdmH2YjZhQ== 140906
+15TXkw== 140907
+15TXk9ek16E= 140908
+15TXk9ek16HXlA== 140909
+44GZ44GO44Gm 140910
+INCz0YA= 140911
+INCz0YDQvQ== 140912
+157XmNeV16E= 140913
+IOq4sOyWtQ== 140914
+776f 140915
+IHDFgnlu 140916
+IEdyw7xuZGU= 140917
+IELDvGNoZXI= 140918
+IHdlZMWCdWc= 140919
+44G+44Gg44G+44Gg 140920
+INeg15TXk9eo 140921
+INmK2LPYqti32YrYuQ== 140922
+IEhp4buHcA== 140923
+44Kt44Oj44Oz44Oa 140924
+44Kt44Oj44Oz44Oa44O844Oz 140925
+IHRo4buV 140926
+IGV1cm9ww6llbm5l 140927
+4Lia4Lix4LiH 140928
+4Lia4Lix4LiH4LiE4Lix4Lia 140929
+IHN6Y3plZ8OzxYJvd28= 140930
+16DXqden 140931
+44OV44Op44Oz44K5 140932
+157Xldee15fXmQ== 140933
+IGNvbcO6bg== 140934
+IMOnYXJw 140935
+2K3YqtmK2Kc= 140936
+2K3YqtmK2KfYrA== 140937
+2K3YqtmK2KfYrNin2Ko= 140938
+64u064u5 140939
+5L2V5bqm 140940
+5L2V5bqm44KC 140941
+15PXkden 140942
+44GN44KM 140943
+44GN44KM44GE 140944
+INC60LDQvA== 140945
+INC60LDQvNC10YA= 140946
+IGVzcGVjw61maWNv 140947
+IHRlbMOpZm9ubw== 140948
+4LiV4Lix4LmJ4LiH4Lit4Lii4Li54LmI 140949
+ScWe 140950
+44Gp44KT44Gp 140951
+44Gp44KT44Gp44KT 140952
+16LXptee15DXmQ== 140953
+4LiU4Lix4LiH4LiZ4Li14LmJ 140954
+INGE0L7RgNC80LjRgNC+0LI= 140955
+INGE0L7RgNC80LjRgNC+0LLQsA== 140956
+15XXnteR 140957
+IGt1bGxhbsSxbcSx 140958
+0JzQng== 140959
+16LXqdeZ 140960
+16LXqdeZ15nXlA== 140961
+IMO2bmxlbQ== 140962
+4LmA4Lit4LmH 140963
+4LmA4Lit4LmH4Lih 140964
+157Xqden15nXog== 140965
+16jXmdeX 140966
+4LiC4Lix4LiU 140967
+IO2ZnA== 140968
+IO2ZnOyaqQ== 140969
+4LiL4Liw 140970
+44KI44GG44Gr44Gq44KK44G+44GX44Gf 140971
+INGA0LDRgdC/0YA= 140972
+INGA0LDRgdC/0YDQvtGB0YI= 140973
+INGA0LDRgdC/0YDQvtGB0YLRgNCw0L0= 140974
+INGA0LDRgdC/0YDQvtGB0YLRgNCw0L3QtdC9 140975
+15vXmdeV158= 140976
+2YLYqNi2 140977
+2KrYtdix2YrYrQ== 140978
+2KrYtdix2YrYrdin2Ko= 140979
+INC+0YDQuA== 140980
+INC+0YDQuNCz 140981
+INC+0YDQuNCz0LjQvdCw 140982
+INC+0YDQuNCz0LjQvdCw0Ls= 140983
+INin2YTYudin2YTZig== 140984
+4LmB4Lir4LmI4LiH4LiZ4Li14LmJ 140985
+44OV44Kh44O8 140986
+44Gm44GE44GN 140987
+44Gm44GE44GN44Gf44GE 140988
+16TXqteo 140989
+16TXqteo15XXoNeV16o= 140990
+INeR15nXlw== 140991
+INeR15nXl9eT 140992
+IG9kYnk= 140993
+IG9kYnnFgg== 140994
+INC+0YfQtdGA0LXQtA== 140995
+IHRyxrDGoW5n 140996
+44Kt44Oz 140997
+157Xldek 140998
+157Xldek16I= 140999
+65Oc66a9 141000
+65Oc66a964uI64uk 141001
+4Lie4Li34LmJ4LiZ4LiQ4Liy4LiZ 141002
+7J6Q6rKp 141003
+IFZp4buHbg== 141004
+IERlc3B1w6lz 141005
+INeQ15zXmdeg15U= 141006
+IGR1csOpZQ== 141007
+7Ye0 141008
+IG3DvHppaw== 141009
+aeG6v3U= 141010
+INGA0LDQt9C80LXRidC10L0= 141011
+INC60YPQtA== 141012
+INC60YPQtNCw 141013
+2LrYtg== 141014
+2LrYttio 141015
+IFRhbWLDqW0= 141016
+4LiI4Lix4LiU4Liq4LmI4LiH 141017
+4LiB4Liy4Lij4LmB4Liq4LiU4LiH 141018
+b25vbcOtYQ== 141019
+INCw0L3Qsw== 141020
+INCw0L3Qs9C70Lg= 141021
+INCw0L3Qs9C70LjQuQ== 141022
+INCw0L3Qs9C70LjQudGB0Lo= 141023
+IHpuYWw= 141024
+IHpuYWxheg== 141025
+IHpuYWxhesWC 141026
+16rXqNeS 141027
+16rXqNeS15XXnQ== 141028
+INGB0L3QvtCy 141029
+INGB0L3QvtCy0LA= 141030
+INGH0LDRgdCw 141031
+IGNvbW11bmF1dMOp 141032
+IGVzcGVjw61maWNh 141033
+IEzhu4tjaA== 141034
+IGxpw6k= 141035
+2YHYrNix 141036
+4LmA4LiB4LmI4LiH 141037
+2LnYp9mE 141038
+2LnYp9mE2Kw= 141039
+2KPZhti4 141040
+2KPZhti42YXYqQ== 141041
+RVPEsA== 141042
+INin2YTYrdiv2YrYrw== 141043
+4Lie4Lij4Liw4Lit4LiH4LiE4LmM 141044
+INek16jXqdeq 141045
+INC00LLQuNC2 141046
+INC00LLQuNC20LXQvdC40Y8= 141047
+INin2YTYrNin2LHZig== 141048
+4LiY4Liy4LiZ4Li1 141049
+0L3QtdGB0LXQvQ== 141050
+INin2YTZhtmH2KfYptmK 141051
+INCx0LXRgA== 141052
+INCx0LXRgNC10Lw= 141053
+INCx0LXRgNC10LzQtdC90L0= 141054
+IGTDqXBhcnRlbWVudA== 141055
+4LmA4LiX4Li14Lii 141056
+4LmA4LiX4Li14Lii4Lia 141057
+INCc0LDRgNC4 141058
+INC90LXQutC+0YLQvtGA0YvRhQ== 141059
+0L7QsdC10YHQvw== 141060
+0L7QsdC10YHQv9C10YfQtdC9 141061
+15fXldeW 141062
+15fXldeW15Q= 141063
+2YbYqtis 141064
+4LiI4Liw4LmE4LiU4LmJ4Lij4Lix4Lia 141065
+4buw 141066
+IMOpbMOpbWVudHM= 141067
+2LnYtw== 141068
+2LnYt9in2KE= 141069
+IHThuq90 141070
+aeG7h20= 141071
+0Y7RidC40YXRgdGP 141072
+44GX44Gw 141073
+44GX44Gw44KJ44GP 141074
+INC/0L7QvNC+0LbQtdGC 141075
+4LiC4LiT4Liw4LiZ4Li14LmJ 141076
+INei16nXqNeV16o= 141077
+6YGV44Gj44Gm 141078
+INC/0YDQvtCz 141079
+INC/0YDQvtCz0L0= 141080
+INC/0YDQvtCz0L3QvtC3 141081
+IHTFgg== 141082
+IHTFgnVt 141083
+IHTFgnVtYWN6 141084
+VMO8cg== 141085
+VMO8cmtpeWU= 141086
+44GN44Gj 141087
+44GN44Gj44GL44GR 141088
+INeU16DXldeb 141089
+INeU16DXldeb15fXmQ== 141090
+IOyDneyCsA== 141091
+INGE0L7RgNC80Ys= 141092
+576O44GX44GE 141093
+4Lib4Lij4Li24LiB 141094
+4Lib4Lij4Li24LiB4Lip4Liy 141095
+IGx1bWnDqHJl 141096
+44Kq44O844OX 141097
+44Kq44O844OX44Oz 141098
+4Lib4Li34LiZ 141099
+4Lin4Lix4Liq4LiU 141100
+4Lin4Lix4Liq4LiU4Li4 141101
+0LXRgNGC0LI= 141102
+2YPZhNmB 141103
+772j 141104
+4LiY4Lij4Lij4Lih4LiU4Liy 141105
+16DXmNeo 141106
+INC/0YDQtdC00YHRgtCw0LLQu9GP0LXRgg== 141107
+IGFuw6FsaXNpcw== 141108
+IGLDo2k= 141109
+2KjYp9mC2Yo= 141110
+4Lib4Lij4Liw4LmA4LiU 141111
+4Lib4Lij4Liw4LmA4LiU4LmH4LiZ 141112
+INGB0LvRg9GH0LDRjw== 141113
+INGB0LvRg9GH0LDRj9GF 141114
+0JvQkA== 141115
+4Liq4Lix4LiH4LmA4LiB 141116
+4Liq4Lix4LiH4LmA4LiB4LiV 141117
+IHByemVj 141118
+IHByemVjaWXFvA== 141119
+2YXYtdmE 141120
+2YXYtdmE2K3YqQ== 141121
+16nXlden15XXnNeT 141122
+INC+0LHQvtGA0YPQtNC+0LLQsNC90LjRjw== 141123
+IHRyd2HFgg== 141124
+2LHZiNmF 141125
+7JWI64K0 141126
+IE5naOG7iw== 141127
+2K7YtA== 141128
+4Lia4Liy4LiE4Liy4Lij 141129
+4Lia4Liy4LiE4Liy4Lij4LmI4Liy 141130
+INC+0L/RhtC40L7QvQ== 141131
+INGB0L7Qt9C00LDQvdC40Y8= 141132
+44Kz44K544OI 141133
+INeU16LXnNeZ 141134
+INeU16LXnNeZ15XXnw== 141135
+bMOkdWZ0 141136
+44OZ44K544OI 141137
+IHLDqg== 141138
+IHLDqnZl 141139
+15DXkdeZ15E= 141140
+15nXmdea 141141
+67aZ 141142
+44Kk44Oz44OJ 141143
+xYJvxbx5 141144
+xYJvxbx5xIc= 141145
+2LnYp9im2YQ= 141146
+2LnYp9im2YTYqQ== 141147
+2KPZiNix 141148
+2KPZiNix2KfZgg== 141149
+4LiX4LmJ4Lit4LiH4LiW 141150
+4LiX4LmJ4Lit4LiH4LiW4Li04LmI4LiZ 141151
+IMOkaG4= 141152
+IMOkaG5saWNo 141153
+44Of44OL 141154
+4Lic4Li5 141155
+4Lic4Li54LmJ4LiZ 141156
+4Lic4Li54LmJ4LiZ4Liz 141157
+INC80LDRgtC10YDQuNCw0LvRiw== 141158
+INC60LDQv9C40YI= 141159
+INC60LDQv9C40YLQsNC7 141160
+77ym 141161
+IHNlw6dpbA== 141162
+IGjhu6luZw== 141163
+IGludMOpcmVzc2FudA== 141164
+44Gj44Gm44GE44GP 141165
+IGXEn2Vy 141166
+65CY7JeI7Iq164uI64uk 141167
+IGFubGHFn21h 141168
+44GU5Yip55So 141169
+INeR15bXmw== 141170
+INeR15bXm9eV16o= 141171
+652866m0 141172
+INmK2YjYsw== 141173
+INmK2YjYs9mB 141174
+2KPYs9mE2K3YqQ== 141175
+IEdlZsO8aGw= 141176
+INC90L7RgNC80LDQu9GM0L0= 141177
+44OZ44Oz 141178
+44GV44KM44KL44GT44Go 141179
+INCR0LXRgQ== 141180
+44Go44GE44GI44Gw 141181
+INmF2YfZhQ== 141182
+INmF2YfZhdip 141183
+44Gn44GX44KH44GG44Gt 141184
+IOq1reuCtA== 141185
+4LmA4Lih4LmH4LiU 141186
+157Xkden16g= 141187
+INin2YTYr9mG2Yo= 141188
+INin2YTYr9mG2YrYpw== 141189
+4LiK4Li5 141190
+0LrRgNGD0YI= 141191
+IHRob8Ohbmc= 141192
+INeg15PXqA== 141193
+INeg15PXqNep 141194
+INGA0LDRgdGB0LrQsNC30LDQuw== 141195
+IEF1w59lcmRlbQ== 141196
+16TXkNeo 141197
+16TXkNeo16c= 141198
+INee16nXl9en15nXnQ== 141199
+16bXqNeb15nXnQ== 141200
+157Xk9eV 141201
+157Xk9eV15nXpw== 141202
+6Ium44GX 141203
+INGB0LjQsw== 141204
+INGB0LjQs9C90LDQuw== 141205
+IE3hu41p 141206
+IHRy4buv 141207
+IG5hc3TEmXA= 141208
+IG5hc3TEmXBuaWU= 141209
+IOy2lOynhA== 141210
+INin2YTZgdmG2K8= 141211
+INin2YTZgdmG2K/Zgg== 141212
+a2/FhGN6ecWC 141213
+4Liq4Li14LmI 141214
+16fXmdeR 141215
+16fXmdeR15XXpQ== 141216
+INC90YPQttC90Ys= 141217
+5aSn5YiH 141218
+5aSn5YiH44Gq 141219
+5o+b44GI 141220
+16rXldeh 141221
+16rXldeh16TXqg== 141222
+44Gj44Gm44GE44Gq44GE 141223
+INC80Y8= 141224
+INC80Y/Qsw== 141225
+INC80Y/Qs9C6 141226
+IGpha2ll 141227
+IGpha2llxZs= 141228
+4LiV4Liz4Lia 141229
+4LiV4Liz4Lia4Lil 141230
+IOyeiOyngA== 141231
+15HXmNeQ 141232
+INC+0YLQu9C40YfQvdC+ 141233
+2YLZkA== 141234
+INCw0LLRgtC+0LzQvtCx 141235
+INCw0LLRgtC+0LzQvtCx0Lg= 141236
+INCw0LLRgtC+0LzQvtCx0LjQu9GP 141237
+2K/ZitmF2YLYsdin2LfZig== 141238
+INin2YTZiNin 141239
+INin2YTZiNin2K3Yrw== 141240
+INiz2YjYsdmK2Kk= 141241
+2KPYutmE 141242
+2KPYutmE2Kg= 141243
+INGN0LrRgNCw0L0= 141244
+44OX44Op44Kk 141245
+IGplc3RlxZs= 141246
+44OQ44Oq 141247
+INeU15DXldeV15nXqA== 141248
+2KfYptmD 141249
+4Lit4Lii4LmI4Liy4LiH4Lii4Li04LmI4LiH 141250
+0YDQtdC60YI= 141251
+IHVtbw== 141252
+IHVtb8W8 141253
+IHVtb8W8bGk= 141254
+IHVtb8W8bGl3 141255
+IHVtb8W8bGl3aWE= 141256
+IG7DpGNoc3Rl 141257
+IOyeiOyngOunjA== 141258
+INC/0YDQtdC00L0= 141259
+INC/0YDQtdC00L3QsNC3 141260
+INC/0YDQtdC00L3QsNC30L3QsNGH0LXQvQ== 141261
+IG1hw6fEsQ== 141262
+IHBvbWk= 141263
+IHBvbWnEmWQ= 141264
+IHBvbWnEmWR6eQ== 141265
+INin2YTZhNmC2KfYoQ== 141266
+4LmA4LiU4Lit4Liw 141267
+INC90L7QstC+0YHRgtC4 141268
+157Xl9ec15Q= 141269
+2LHZitin2LbZig== 141270
+4LiU4LiZ 141271
+4LiU4LiZ4LiV4Lij4Li1 141272
+2KjYtdix 141273
+7Iqk7YOA 141274
+c2NyaXBjacOzbg== 141275
+IG5hcGlzYQ== 141276
+IG5hcGlzYcWC 141277
+INeg16nXntei 141278
+INin2YTZhdit2YTZig== 141279
+IGhp4buDbg== 141280
+15DXlw== 141281
+15DXl9eo15DXmQ== 141282
+INCz0YDQsNC90LjRhg== 141283
+5omL57aa44GN 141284
+2YPYs9io 141285
+IOC5geC4leC5iOC4luC5ieC4sg== 141286
+4LiU4Liy4Lin4LiZ4LmM 141287
+4LiU4Liy4Lin4LiZ4LmM4LmC4Lir4Lil4LiU 141288
+44KL44GT44Go44GM44Gn44GN44G+44GZ 141289
+5Z+65pys55qE44Gr 141290
+2YjZhNin2K8= 141291
+csOkdW1l 141292
+2K/Zgdin2Lk= 141293
+15nXptei 141294
+IE9jenk= 141295
+IE9jenl3acWbY2ll 141296
+IMWB 141297
+IMWBYQ== 141298
+2KfZhNmK2KfYqA== 141299
+2KfZhNmK2KfYqNin2YY= 141300
+4bqgSQ== 141301
+IEJpcmxpxJ9p 141302
+15TXldem 141303
+15TXldem15DXqg== 141304
+IMSRdWE= 141305
+IOq3uOufrOuLiOq5jA== 141306
+IHLDqWFsaXTDqQ== 141307
+2LnZhNin2YLYp9iq 141308
+SmVzdGU= 141309
+SmVzdGXFmw== 141310
+INC80L3QvtC2 141311
+INC80L3QvtC20LXRgdGC0LLQvg== 141312
+77yr 141313
+44OX44Ot44K444Kn 141314
+44OX44Ot44K444Kn44Kv44OI 141315
+INGE0Ls= 141316
+2LjZhg== 141317
+15LXnNeS15w= 141318
+IG3Fgm9kemll 141319
+IG3Fgm9kemllxbw= 141320
+4LiZ4LmJ4Liz4LiV4Liy 141321
+4LiZ4LmJ4Liz4LiV4Liy4Lil 141322
+0JvQlQ== 141323
+15HXldeY 141324
+INec15TXkteZ15M= 141325
+44GT44Go44KC44GC44KL 141326
+2LLYp9iv 141327
+157XmdeT16I= 141328
+IGfFgsOzd25pZQ== 141329
+44OP44Km 141330
+44OP44Km44K5 141331
+0LHQtdC7 141332
+IMOpdGFwZQ== 141333
+8J+YgA== 141334
+INC80L7QtNC10LvRjA== 141335
+YcSfxLFuxLE= 141336
+16nXl9en 141337
+16nXl9en158= 141338
+IG5pw7Fv 141339
+4LiK4LmJ4Liy4LiH 141340
+4LmA4Lil4Li14Lii 141341
+INGE0L7RgNC80LU= 141342
+INin2YTYtNix2YrZgQ== 141343
+INGD0LTQsNGA 141344
+YXJyaXY= 141345
+YXJyaXbDqWU= 141346
+IG1pZXNpxJk= 141347
+IG1pZXNpxJljeQ== 141348
+2K3YsdmD 141349
+2K3YsdmD2KfYqg== 141350
+IERp4buFbg== 141351
+0J3Qqw== 141352
+44G+44Gj44Gf44GP 141353
+INeZ16jXlden 141354
+0LXRgdGC0LXRgdGC0LI= 141355
+0LXRgdGC0LXRgdGC0LLQtdC90L0= 141356
+IOq3uOufvA== 141357
+INin2YTZhdiq2Yg= 141358
+INin2YTZhdiq2YjYs9i3 141359
+IGLDqW7DqWZpYw== 141360
+IGLDqW7DqWZpY2ll 141361
+IHd5YnJh 141362
+IHd5YnJhxIc= 141363
+INin2YTYstmF2YY= 141364
+INC/0YDQuNC90Y8= 141365
+INC/0YDQuNC90Y/Quw== 141366
+2YHYsdit 141367
+IGtzeg== 141368
+IGtzenRhxYI= 141369
+IGtzenRhxYJ0 141370
+16fXnNeY 141371
+15HXk9eZ16fXqg== 141372
+IGdp4bql 141373
+IGdp4bqlYw== 141374
+IHByb3ByaWV0w6A= 141375
+0LTQtdGA0LbQsNC9 141376
+IEvDtmxu 141377
+IEfDvHplbA== 141378
+15nXpNeV15k= 141379
+IEN14buZYw== 141380
+0Y3RgtCw0LY= 141381
+2KrYsdmD2Yo= 141382
+2KrYsdmD2YrYsg== 141383
+0LvQvtC20LXQvdC40Lk= 141384
+INC/0YM= 141385
+INC/0YPRgtC4 141386
+2KfYrtiq2YTYp9mB 141387
+5Ye644Gm44GP44KL 141388
+4Lia4Li44LiB 141389
+4p2k 141390
+0YTQsNC9 141391
+16TXqdeY 141392
+4Lia4Lix4LiZ4LmA4LiX 141393
+4Lia4Lix4LiZ4LmA4LiX4Li04LiH 141394
+INin2YTYs9in2K8= 141395
+INin2YTYs9in2K/Ysw== 141396
+INin2YTZgtmI2YU= 141397
+INin2YTZgtmI2YXZig== 141398
+IHnDtm5ldGljaQ== 141399
+2YfZiNin2Ko= 141400
+2YfZiNin2KrZgQ== 141401
+IHJlc3BvbnPDoXZlbA== 141402
+INC/0L7QtNC00LXRgNC20LjQstCw 141403
+INin2YTYs9mE2Lc= 141404
+INin2YTYs9mE2LfYp9iq 141405
+44GX44Gm44GK44GP 141406
+44Oa44OD44OI 141407
+4Lib4Li44LmI4Lih 141408
+IG9nbMSFZGE= 141409
+2YbYp9mC 141410
+2YbYp9mC2LQ= 141411
+4LiE4Lit4LiZ4LmC4LiU 141412
+IE3DvHNs 141413
+IE3DvHNsw7w= 141414
+IE3DvHNsw7xtYW4= 141415
+IE1vxbw= 141416
+IE1vxbxuYQ== 141417
+IG51bcOpcmlxdWU= 141418
+IHbhu48= 141419
+INiz2YrYqtmF 141420
+IHllcmxlxZ8= 141421
+0LzQvtC90YLQsNC2 141422
+IGdvw7t0 141423
+44Gm44GK44KK44G+44GZ 141424
+IEtow6FuaA== 141425
+INC10LTQuNC9 141426
+INC10LTQuNC90YHRgtCy 141427
+2KfZhtiu2YE= 141428
+2KfZhtiu2YHYp9i2 141429
+7Iuc7ZeY 141430
+IGzhurduZw== 141431
+INGA0L7Qu9GM 141432
+4LiV4Lix4Lin4LmB4LiX4LiZ 141433
+4LiE4LmI4Liy4LmD4LiK4LmJ 141434
+4LiE4LmI4Liy4LmD4LiK4LmJ4LiI4LmI4Liy4Lii 141435
+IHZlcmbDvGc= 141436
+IHZlcmbDvGdiYXI= 141437
+7JmU64uk 141438
+44GE44Ga 141439
+44GE44Ga44KM 141440
+INC40YHRgdC70LXQtNC+0LLQsNC90LjRjw== 141441
+0LzQtdGJ0LA= 141442
+15TXlw== 141443
+15TXl9eW16g= 141444
+4LmB4Lif4LiK4Lix4LmI4LiZ 141445
+2KrYtdix2YE= 141446
+2KXYsdmH2KfYqA== 141447
+IGV4ZXJjw61jaW8= 141448
+IMOpbGV2 141449
+IMOpbGV2w6k= 141450
+4Liq4Lix4LiN4LiN4Liy4LiT 141451
+w5Za 141452
+44OX44Ot44Kw 141453
+44OX44Ot44Kw44Op 141454
+44OX44Ot44Kw44Op44Og 141455
+IHdld27EmXRyem4= 141456
+IGhlbsO8eg== 141457
+6aOb44Gz 141458
+4LmA4LiU4Lit4Lij4LmM 141459
+0YHRg9C2 141460
+0YHRg9C20LTQtdC9 141461
+2LTYudmI2Kg= 141462
+44Gy44Go44KK 141463
+IHd5xYLEhQ== 141464
+IHd5xYLEhWN6bmll 141465
+INC/0LvQvtGF0L4= 141466
+0JTQlQ== 141467
+4bqm 141468
+2YHYudin2YTZig== 141469
+2YHYudin2YTZitin2Ko= 141470
+INin2YTYudi02LE= 141471
+0YHRgtGD0L/QuNC7 141472
+IHlhcmc= 141473
+IHlhcmfEsQ== 141474
+0L3RjtGO 141475
+15XXkNeR 141476
+IHXDpw== 141477
+IHXDp2Fr 141478
+67K9 141479
+2KrZiNmC2Yo= 141480
+2KrZiNmC2YrYuQ== 141481
+IOykkeyLrA== 141482
+16DXmdeV15XXmA== 141483
+2KPZg9mE 141484
+572u44GE44Gm 141485
+6aCC44GN 141486
+INeU16rXkQ== 141487
+INeU16rXkdeZ16LXlA== 141488
+IGTDvHJmZW4= 141489
+2YXZgtin2YQ= 141490
+2YXZgtin2YTYp9iq 141491
+INiy2YXZhg== 141492
+4Lie4Lik4Lio 141493
+4Lie4Lik4Lio4LiI 141494
+4Lie4Lik4Lio4LiI4Li04LiB 141495
+4Lie4Lik4Lio4LiI4Li04LiB4Liy4Lii4LiZ 141496
+INC90LXRgdC60L7Qu9GM 141497
+INC90LXRgdC60L7Qu9GM0LrQuA== 141498
+INC90LXRgdC60L7Qu9GM0LrQuNGF 141499
+IGNyaWFuw6dh 141500
+4Lih4Li04LiV4Lij 141501
+157Xm9eZ16jXldeq 141502
+4LiB4Liy4Lij4Lia4Lij4Li04Lir4Liy4Lij 141503
+IHTDqWzDqWNoYXJn 141504
+INeQ15XXlNeR16o= 141505
+IELDvHJv 141506
+5L2c44Gj44Gf 141507
+IEtpxZ9p 141508
+576O5ZGz44GX 141509
+4LmA4Lil4Lii4LiE4LmI4Liw 141510
+4Lie4Lia4LiB4Lix4Lia 141511
+4LiI4LmJ4Liy 141512
+IMOnZXI= 141513
+IMOnZXLDpw== 141514
+IMOnZXLDp2V2ZQ== 141515
+44KS5L2c44Gj44Gm 141516
+INC/0LXRgNCy0YPRjg== 141517
+157Xpteo15nXnQ== 141518
+15DXnNeV15Q= 141519
+15DXnNeV15TXmded 141520
+IGFncsOp 141521
+IGFncsOpYWJsZQ== 141522
+IGF5xLFy 141523
+xLBMxLA= 141524
+44Kl 141525
+IO2YhA== 141526
+IO2YhOyLpA== 141527
+2KvYp9mE2Ks= 141528
+16rXlg== 141529
+16rXlteV16DXlA== 141530
+44Go44GE44Gj44Gm 141531
+44Go44GE44Gj44Gm44KC 141532
+INin2KjZiA== 141533
+INGB0L7QsdCw0Lo= 141534
+6aOf44G544Gf 141535
+INC00LDQvdC90L7QvA== 141536
+4LmA4Lil4Li0 141537
+4LmA4Lil4Li04Lio 141538
+IO2a 141539
+IO2aqA== 141540
+IO2aqOqzvA== 141541
+44KC44KJ44GI44KL 141542
+16DXptec 141543
+0YTQuNC6 141544
+0YTQuNC60YE= 141545
+IGplc3RlxZtteQ== 141546
+16rXl9eV16nXlA== 141547
+4LmE4Lih4LmI4LiE4Lin4Lij 141548
+INit2LPZitmG 141549
+4LiB4Liy4Lij4Lil4LiH4LiX4Li44LiZ 141550
+67Sk 141551
+INCY0LzQtdC90L3Qvg== 141552
+4Lia4Lit4Lij4LmM 141553
+4Lia4Lit4Lij4LmM4LiU 141554
+IEPhuqNuaA== 141555
+7ISc67mE7Iqk 141556
+INC/0L7Qu9C+0LI= 141557
+INC/0L7Qu9C+0LLQuNC9 141558
+INC30LDQvNC10YfQsA== 141559
+44GE44KN44KT44Gq 141560
+INeR15nXpw== 141561
+INeR15nXp9ep 141562
+0LvRg9GI 141563
+44KS6L+O 141564
+44KS6L+O44GI 141565
+2KzYsdmK2YXYqQ== 141566
+IHTDonk= 141567
+INin2YTZhtmI 141568
+INin2YTZhtmI2YjZig== 141569
+w4JO 141570
+7L+g 141571
+4Lir4LiZ4Liy4Lin 141572
+INeR15fXqdeR15XXnw== 141573
+2LLYp9ix 141574
+4LiU4Liy4Lij 141575
+4LiU4Liy4Lij4Liy 141576
+IMWbbA== 141577
+IMWbbHVi 141578
+4Lih4Li14LiE4Lin4Liy4Lih4Liq4Li44LiC 141579
+IG5odQ== 141580
+IG5odeG6rW4= 141581
+2YXYrdi32Kk= 141582
+4LmA4Liq4Li34LmJ4Lit4Lic4LmJ4Liy 141583
+INCi0L7Qu9GM0LrQvg== 141584
+INmD2LM= 141585
+INmD2LPYp9ix2Kk= 141586
+2YXYtNix2YjYuQ== 141587
+bmnEmWNpYQ== 141588
+16LXm9ep15nXlQ== 141589
+2KrZhNmB 141590
+2KrZhNmB2LLZig== 141591
+2KrZhNmB2LLZitmI2YY= 141592
+IGzGsOG7m2k= 141593
+INCc0L7RgdC60LLRiw== 141594
+IHLDqXNlcnZl 141595
+IGFubGHFnw== 141596
+IGFubGHFn8SxbA== 141597
+IGVkZWNlxJ9p 141598
+4Lij4Lit4LiH4LmA4LiX4LmJ4Liy 141599
+INio2Lc= 141600
+INio2LfYsdmK 141601
+INio2LfYsdmK2YLYqQ== 141602
+44Gm44GX44G+44Gj44Gm 141603
+44KC44KJ44Gj44Gm 141604
+2KjYsdis 141605
+5rGa 141606
+5rGa44KM 141607
+IGNob2M= 141608
+IGNob2NpYQ== 141609
+IGNob2NpYcW8 141610
+IHpvYmFj 141611
+IHpvYmFjennEhw== 141612
+0L/RgNGP 141613
+0L/RgNGP0LbQtdC9 141614
+INGG0LjRhA== 141615
+INGG0LjRhNGA 141616
+INC80LDQvA== 141617
+INCy0LfRj9GC0Yw= 141618
+IGNo4bqhbQ== 141619
+2KzYs9mF 141620
+2K3Zhdin2LM= 141621
+4LmA4Lil4LmI4Lih 141622
+4Lie4Li04Lip 141623
+15TXpNeb15U= 141624
+4LiK4LmI4Lit4LiH4LiX4Liy4LiH 141625
+INCy0LXQug== 141626
+INCy0LXQutCw 141627
+xqHMgQ== 141628
+xqHMgWk= 141629
+IFRp4buBbg== 141630
+IHRy4bqnbQ== 141631
+0LzRi9GI 141632
+0LzRi9GI0Ls= 141633
+INGC0YM= 141634
+INGC0YPRgNC40YHRgg== 141635
+IGNoYw== 141636
+IGNoY8SF 141637
+INCw0LLQsw== 141638
+INCw0LLQs9GD0YHRgg== 141639
+INCw0LLQs9GD0YHRgtCw 141640
+16HXkNeV16o= 141641
+INeo15LXnA== 141642
+4Lic4Lil4LiB4Lij4Liw4LiX 141643
+4Lic4Lil4LiB4Lij4Liw4LiX4Lia 141644
+5aSJ44KP44KL 141645
+INeU15DXl9eo15XXoNeZ150= 141646
+2LPZgdmK2LE= 141647
+INGH0LDRidC1 141648
+44GE44KJ 141649
+44GE44KJ44Gj 141650
+44GE44KJ44Gj44GX44KD 141651
+15XXnteg15nXnQ== 141652
+IGFydHTEsXI= 141653
+IENo4buL 141654
+IOyhsOyngQ== 141655
+INGD0YHQv9C10YU= 141656
+INei15XXoQ== 141657
+INei15XXoden 141658
+IOyDneuqhQ== 141659
+0YbQuNGC 141660
+IHJlZ2nDs24= 141661
+0J7QnQ== 141662
+IGRvxJ91bQ== 141663
+IHlhxZ9hZA== 141664
+IHlhxZ9hZMSxxJ/EsQ== 141665
+4LiX4LiU4Lil4Lit4LiH 141666
+IGfDtnrDvA== 141667
+16nXmdeo15Q= 141668
+0LTRg9C80LDQuw== 141669
+IGRhxJ/EsQ== 141670
+IGRhxJ/EsXQ= 141671
+4LiX4Li14Lih4LiH4Liy4LiZ 141672
+IHRp4buBbQ== 141673
+INin2YTZg9io2LE= 141674
+INin2YTZg9io2LHZiQ== 141675
+7Lmt 141676
+IEfDvG5j 141677
+IEfDvG5jZWxsZQ== 141678
+IEfDvG5jZWxsZW1l 141679
+6rmK 141680
+INC+0LHQvtGA0YPQtNC+0LLQsNC90LjQtQ== 141681
+INGA0LXRiNCw 141682
+4buk 141683
+INC/0LjRgg== 141684
+INC/0LjRgtCw0L3QuNGP 141685
+4LmA4Lij4Li14Lii4Lia 141686
+15vXqteZ15HXlA== 141687
+INC/0L7QvQ== 141688
+INC/0L7QvdGA0LDQsg== 141689
+INC/0L7QvdGA0LDQstC4 141690
+INeU15XXnNeT 141691
+INeU15XXnNeT16o= 141692
+IOqygQ== 141693
+IOqygeuLiOuLpA== 141694
+INC/0LXRgNCy0L7QuQ== 141695
+44Op44Kk44OV 141696
+IMWfaWly 141697
+a3LEmQ== 141698
+a3LEmWM= 141699
+IHRoaeG7g3U= 141700
+4LmA4Lil4Lii4LiX4Li1 141701
+4LmA4Lil4Lii4LiX4Li14LmA4LiU4Li14Lii4Lin 141702
+15jXoteg15XXqg== 141703
+2KfYptmH2YU= 141704
+INeQ16HXldeo 141705
+INC/0LvQsNGC0LXQtg== 141706
+2KrYsdiv2K8= 141707
+IG1vxbxsaXdl 141708
+IGto4bub 141709
+IGto4bubcA== 141710
+2KrZgdin2LnZhA== 141711
+INGI0LrQvtC70Yw= 141712
+INGI0LrQvtC70YzQvQ== 141713
+INmC2LXYqQ== 141714
+IG3DqXRpZXI= 141715
+bsSZxYJh 141716
+4Lir4Lil4LmI4Lit 141717
+IOG7p25n 141718
+IHByemVnbA== 141719
+IHByemVnbMSFZA== 141720
+INin2YTZhdiq2LnZhA== 141721
+INin2YTZhdiq2LnZhNmC2Kk= 141722
+INGB0YvQvQ== 141723
+INCy0L7Qu9C9 141724
+44OH44O844OI 141725
+INCt0YLQuA== 141726
+INC60YDQvtC80LU= 141727
+4LiE4Liy4Lij4LmM 141728
+16DXp9eV15PXlA== 141729
+INec16nXnteV16I= 141730
+INeW15XXm9eo 141731
+77yn 141732
+2YrZjtin 141733
+IGdp4buPaQ== 141734
+5YON44GP 141735
+INGB0L3QuA== 141736
+INGB0L3QuNC20LXQvQ== 141737
+4LmB4LiU4LiU 141738
+4Lij4Li44LiZ 141739
+4Lij4Li44LiZ4LmB4Lij4LiH 141740
+IGhp4buHcA== 141741
+b2dyYWbDrWE= 141742
+4LmA4LiI4Lit4Lij4LmM 141743
+INC00LLQuNCz 141744
+INC00LLQuNCz0LDRgg== 141745
+INC00LLQuNCz0LDRgtC10Ls= 141746
+IMO8eQ== 141747
+IMO8eWVsZXI= 141748
+IMO8eWVsZXJp 141749
+INCx0YPQug== 141750
+INCx0YPQutCy 141751
+44KC5aSa44GP 141752
+IHRoaeG7h3Q= 141753
+IFBhw61z 141754
+INi32KjZiti52Yo= 141755
+4LmB4LiI4LiB 141756
+INin2YTYtdit2YrYrQ== 141757
+IGFwcHLDqQ== 141758
+IGFwcHLDqWNp 141759
+IGRlY2lzacOzbg== 141760
+IOuwmOuTnA== 141761
+IOuwmOuTnOyLnA== 141762
+INGC0LXQsdC1 141763
+44K344O844K6 141764
+44K344O844K644Oz 141765
+INC00LDQu9GM0L0= 141766
+IOyKpA== 141767
+IOyKpOyKpA== 141768
+IOyKpOyKpOuhnA== 141769
+IFRo4buD 141770
+IGthcsWf 141771
+IGthcsWfxLFz 141772
+IGthcsWfxLFzxLFuZGE= 141773
+IEvDtm4= 141774
+IEvDtm5pZw== 141775
+0LjQstCw0L3QuNC1 141776
+15HXldem16I= 141777
+0LPQu9Cw0YE= 141778
+IHR3w7M= 141779
+IHR3w7NyYw== 141780
+4Lib4LiB4LiE4Lij 141781
+4Lib4LiB4LiE4Lij4Lit4LiH 141782
+IEfFgg== 141783
+IEfFgsOzd24= 141784
+IFVudGVyc3TDvHQ= 141785
+IFVudGVyc3TDvHR6dW5n 141786
+INC00YPRhQ== 141787
+INC00YPRhdC+0LI= 141788
+2KPZhdin2YY= 141789
+15fXqdep 141790
+2KrYuA== 141791
+2KrYuNin2YfYsQ== 141792
+INC70Y7QsdC+0Lw= 141793
+4LiV4Liy4Lij 141794
+4LiV4Liy4Lij4Liy4LiH 141795
+IGtyw7Ns 141796
+2KPYrdiv2Ks= 141797
+7KGM64uk 141798
+0JrRg9GA0YE= 141799
+44OD44OE 141800
+157Xp9eV15HXnA== 141801
+INGB0LjQvNCy0L7Quw== 141802
+IGTDqXNvcm0= 141803
+IGTDqXNvcm1haXM= 141804
+d8O8bnM= 141805
+d8O8bnNjaGU= 141806
+0YPQvdC4 141807
+0YPQvdC40YbQuNC/ 141808
+0YPQvdC40YbQuNC/0LDQu9GM0L0= 141809
+4Lir4Lil4Lix4LiB4Liq4Li54LiV4Lij 141810
+2YbYqti02LE= 141811
+INCw0Ls= 141812
+INCw0LvQug== 141813
+INCw0LvQutC+0LM= 141814
+INCw0LvQutC+0LPQvtC7 141815
+INGD0YfQuNGC0YvQstCw 141816
+4LiB4Liz4LiB4Lix4Lia 141817
+INec16TXoteV15w= 141818
+IOyXsOqysA== 141819
+c8SFZA== 141820
+INin2YTYo9mK 141821
+INin2YTYo9mK2KfZhQ== 141822
+2LrZitin2Kg= 141823
+INC90LDRgA== 141824
+INC90LDRgNC60L4= 141825
+157XldeT16I= 141826
+INGB0LXRgNC40Lg= 141827
+0L/QuNGB0YvQstCw 141828
+4Liq4Li04Lin 141829
+57aa44GE44Gm 141830
+55Sz44GX6L6844G/ 141831
+INec15LXqA== 141832
+INec15LXqNeV150= 141833
+INC00LXQvA== 141834
+INC00LXQvNC+ 141835
+IOuztOuCtA== 141836
+2KrZh9iv2YrYrw== 141837
+INmF2LTZitix2Kc= 141838
+IGR1eQ== 141839
+IGR1eeG7h3Q= 141840
+IHdpxJlrc3pl 141841
+2YXYudin2Yo= 141842
+2YXYudin2YrZitix 141843
+IEdkYQ== 141844
+IEdkYcWEc2s= 141845
+IHJhaA== 141846
+IHJhaGF0cw== 141847
+IHJhaGF0c8Sxeg== 141848
+16jXldem15Q= 141849
+bMO2cw== 141850
+bMO2c3VuZw== 141851
+INCi0LDQutC40Lw= 141852
+0YjQtdC0 141853
+0YjQtdC00Yg= 141854
+2LnYstmE 141855
+INeo16nXmdee16o= 141856
+INec15TXmdeb 141857
+INec15TXmdeb16DXoQ== 141858
+INC/0YPRgg== 141859
+INC/0YPRgtC10Yg= 141860
+INC/0YPRgtC10YjQtdGB0YLQsg== 141861
+IG5vdMOtY2lh 141862
+IGFsxLHFnw== 141863
+IGFsxLHFn3Zlcg== 141864
+IGFsxLHFn3ZlcmnFnw== 141865
+IHfFgm9z 141866
+IHfFgm9zw7N3 141867
+INio2Lo= 141868
+INio2LrYr9in2K8= 141869
+IHZlcsO2ZmZlbnQ= 141870
+IHZlcsO2ZmZlbnRsaWNodA== 141871
+IEtow6E= 141872
+IHTDoW4= 141873
+65CY6riw 141874
+IOuwqeusuA== 141875
+2YHZitmE 141876
+4LmA4LiB4Li04LiU4LiI4Liy4LiB 141877
+5Y+v5oSb 141878
+5Y+v5oSb44GE 141879
+4LiW4Li44LiH 141880
+IHpld27EmXRyem4= 141881
+4Lig4Liy4Lip4Liy4Lit4Lix4LiH4LiB4Lik4Lip 141882
+IG3DoXhpbWE= 141883
+IHVsdXM= 141884
+IHVsdXNsYXJhcmFzxLE= 141885
+INeg15TXoA== 141886
+4LiC4LmI4Liy4Lin4Liq4Liy4Lij 141887
+IOydmOyCrA== 141888
+4LmA4Lir4Lil4Li34Lit4LiH 141889
+INiv2YI= 141890
+INiv2YLYp9im2YI= 141891
+4Liq4Li34LmI4Lit4Liq4Liy4Lij 141892
+66i8 141893
+INGB0L7RgdGC0L7Rj9C90LjQuA== 141894
+4Liq4Lih4Liy4LiE4Lih 141895
+4buC 141896
+INCc0L7RgdC60L7Qsg== 141897
+INCc0L7RgdC60L7QstGB0Lo= 141898
+157XodeV15LXnA== 141899
+44GL44GL44KK 141900
+IFRydXnhu4Fu 141901
+4LmB4LiC4LmH4LiH4LmB4Lij4LiH 141902
+157Xl9eW15nXpw== 141903
+4LmC4LiB4LmJ 141904
+2YrYs9ix 141905
+7JSp 141906
+15DXlden 141907
+15DXlden15g= 141908
+15DXlden15jXldeR16g= 141909
+IHByb3hpbWl0w6k= 141910
+2YXZhtmH2Kw= 141911
+INin2YTYrNiy 141912
+INin2YTYrNiy2KfYpg== 141913
+INin2YTYrNiy2KfYptix2Yo= 141914
+IMSQaeG7g20= 141915
+INC00LXQvdC10LY= 141916
+INC00LXQvdC10LbQvQ== 141917
+2YHYrdi1 141918
+2YHYpg== 141919
+INCR0YPQtA== 141920
+15LXmdeT15XXnA== 141921
+INCS0LXQtNGM 141922
+2LnZhNin2YXYqQ== 141923
+INeQ15fXqNeV16DXldeq 141924
+44GE44Gf44Gg44GE44Gm 141925
+2LPZhNit 141926
+2K3ZhNmF 141927
+2LLZiNin2LE= 141928
+2YPYs9ix 141929
+15jXp9eh 141930
+INCx0LDQvQ== 141931
+INCx0LDQvdC60L7Qsg== 141932
+INC/0YDQvtC2 141933
+INC/0YDQvtC20LjQstCw 141934
+bGl3bw== 141935
+bGl3b8WbY2k= 141936
+IFRp4bq/cA== 141937
+INin2YTZhdmG2KfYs9io 141938
+INin2YTYrtmK2KfYsQ== 141939
+44GK44GL 141940
+44GK44GL44GS 141941
+4LiU4Lit4LiB4LmE4Lih4LmJ 141942
+w6RtcA== 141943
+w6RtcGZl 141944
+4LiV4Lix4LmJ4LiH4LmD4LiI 141945
+INC30LDRidC40YI= 141946
+INC30LDRidC40YLRiw== 141947
+IFRoxrDhu51uZw== 141948
+INi12YE= 141949
+INi12YHYrdip 141950
+15fXldeo16M= 141951
+44OQ44OD44Kw 141952
+INeT15nXkg== 141953
+INeT15nXkteZ15g= 141954
+INeT15nXkteZ15jXnNeZ 141955
+INeU15fXldec15nXnQ== 141956
+0LLQtdGJ 141957
+0LLQtdGJ0LA= 141958
+INC60YPQu9GM0YI= 141959
+INC60YPQu9GM0YLRgw== 141960
+INC60YPQu9GM0YLRg9GA0Ys= 141961
+INin2YTYp9mG2KrYsdmG2Ko= 141962
+IGjDtmNo 141963
+IGjDtmNoc3Q= 141964
+IO2YlQ== 141965
+IO2Yle2DnA== 141966
+INCy0L7QuQ== 141967
+INCy0L7QudC90Ys= 141968
+0JvQng== 141969
+7Iug7Jqp 141970
+INee15HXldeh 141971
+INee15HXldeh16E= 141972
+157XoNeZ16I= 141973
+IGZpeWF0xLE= 141974
+INGB0LvRg9C2 141975
+INGB0LvRg9C20LHRiw== 141976
+4LiX4Lix4Lio 141977
+4LiX4Lix4Lio4LiZ 141978
+44GT44Go44GM5aSa44GE 141979
+INeU157Xqdeq 141980
+INeU157Xqdeq157XqQ== 141981
+5a+E44Gb 141982
+157Xqdec15XXlw== 141983
+5pmC54K5 141984
+5pmC54K544Gn 141985
+4Lie4Lij4Li1 141986
+4Lie4Lij4Li14LmA4Lih4Li14Lii 141987
+4Lie4Lij4Li14LmA4Lih4Li14Lii4Lij4LmM 141988
+4Lie4Lij4Li14LmA4Lih4Li14Lii4Lij4LmM4Lil4Li14LiB 141989
+IGRpZmZpY29sdA== 141990
+IGRpZmZpY29sdMOg 141991
+44Os44K544OI 141992
+44Os44K544OI44Op44Oz 141993
+4Liq4Lih4LmA4LiU4LmH 141994
+4Liq4Lih4LmA4LiU4LmH4LiI 141995
+INC20LjQtA== 141996
+INC20LjQtNC6 141997
+IHp1cGXFgg== 141998
+IHp1cGXFgm5pZQ== 141999
+INmF2KzYsQ== 142000
+INmF2KzYsdiv 142001
+44GM5aeL 142002
+44GM5aeL44G+ 142003
+44Kt44Oj44Op 142004
+INeQ15XXldeZ16g= 142005
+44GK5LqS 142006
+44GK5LqS44GE 142007
+IHBvdHLDoA== 142008
+IFBhxYRzdA== 142009
+IFBhxYRzdHdv 142010
+INio2YrYp9mG 142011
+INio2YrYp9mG2KfYqg== 142012
+INC40L3QvtCz0LTQsA== 142013
+INGA0LA= 142014
+INGA0LDRgdGC0LI= 142015
+INGA0LDRgdGC0LLQvtGA 142016
+INeW157XoA== 142017
+4Lii4Li04LmJ4Lih 142018
+xIY= 142019
+44G+44GV 142020
+44G+44GV44Gr 142021
+44OV44Kh44Kk44Or 142022
+IGfDtnJkw7zEn8O8 142023
+4Liq4LiH4LiE4Lij 142024
+4Liq4LiH4LiE4Lij4Liy4Lih 142025
+IEFya2FkYcWf 142026
+IHJvendpxIV6YW5pYQ== 142027
+157XldeY 142028
+cGnEmQ== 142029
+cGnEmXQ= 142030
+2LXYutix 142031
+4Liq4Lii 142032
+4Liq4Lii4Liy4Lih 142033
+44KG44Gj44GP44KK 142034
+IHRy4bqnbg== 142035
+IGVjb25vbcOtYQ== 142036
+IGdlaMO2cmVu 142037
+44K344On44O8 142038
+IHPFgnVjaGE= 142039
+4Lie4Lit4LmD4LiI 142040
+INC+0YLQvNC10YLQuNC7 142041
+2YbYqtmC2YQ= 142042
+IHByb3DDs3NpdG8= 142043
+INCy0LDRiNC10LPQvg== 142044
+IG5o4bqvbg== 142045
+4LmB4LiW4Lin 142046
+INC60L7QvNC40YE= 142047
+INC60L7QvNC40YHRgdC4 142048
+d2HFvG5pZQ== 142049
+IHlhdmHFnw== 142050
+157Xmden 142051
+157Xmden15XXnQ== 142052
+16nXkNec16o= 142053
+IHnEsWxsYXJkYQ== 142054
+INCu 142055
+INCu0YA= 142056
+16DXodeZ15HXldeq 142057
+16rXpg== 142058
+16rXpteV15I= 142059
+INC+0LTQvdGD 142060
+IOC4reC4ouC5iOC4suC4h+C5hOC4ow== 142061
+IOC4reC4ouC5iOC4suC4h+C5hOC4o+C4geC5h+C4leC4suC4oQ== 142062
+64G8 142063
+4LmE4Lil4LmI 142064
+2KrYs9mE2YrZhQ== 142065
+2KjZhNin2Lo= 142066
+IOyJ 142067
+IOyJvQ== 142068
+IOyJveqyjA== 142069
+44Oa44Oz 142070
+0LfQstGD0Yc= 142071
+IFfDpGg= 142072
+IFfDpGhyZW5k 142073
+INeZ15nXqg== 142074
+INeZ15nXqteb158= 142075
+IGtodXnDqm4= 142076
+IHbhur0= 142077
+INCw0LzQtdGA 142078
+INCw0LzQtdGA0LjQug== 142079
+INCw0LzQtdGA0LjQutCw0L0= 142080
+INCw0LzQtdGA0LjQutCw0L3RgdC6 142081
+2LnYrNio 142082
+44Ob44O844Og44Oa44O844K4 142083
+INC90LjQutGC0L4= 142084
+INmC2Y4= 142085
+INmC2Y7Yp9mE 142086
+INmC2Y7Yp9mE2Y4= 142087
+0JDQlw== 142088
+2YXYrNmF2YjYuQ== 142089
+2YXYrNmF2YjYudin2Ko= 142090
+IG5lY2Vzc2l0w6A= 142091
+IHBvYmxp 142092
+IHBvYmxpxbx1 142093
+IHBo4bqlbg== 142094
+INCh0L7QvtCx0Yk= 142095
+2YXZgtin2Lc= 142096
+2YXZgtin2LfYuQ== 142097
+INeU16bXldeo15o= 142098
+bGHFn3TEsXJtYQ== 142099
+4Lin4Li04LiU 142100
+4Lin4Li04LiU4Li1 142101
+4Lin4Li04LiU4Li14LmC4Lit 142102
+IOq3uOumrOyKpA== 142103
+IOq3uOumrOyKpOuPhA== 142104
+44K/44Kk44Of 142105
+44K/44Kk44Of44Oz44Kw 142106
+16fXmNeS15XXqA== 142107
+16fXmNeS15XXqNeZ15Q= 142108
+INeX15XXpA== 142109
+INeX15XXpNep15k= 142110
+2KPYrNix 142111
+INC40LzQtdC90Lg= 142112
+INGA0LDQvdC10LU= 142113
+4LmA4Lie4Li34LmI4Lit4LiZ4LmG 142114
+IEplc8O6cw== 142115
+0YHQvtC10LTQuNC9 142116
+0YHQvtC10LTQuNC90LXQvQ== 142117
+INeo15fXlden 142118
+4LmC4Lia4Lij4Liy 142119
+4LmC4Lia4Lij4Liy4LiT 142120
+IEjGoW4= 142121
+IHRo4bqtcA== 142122
+2KrYudmK2YrZhg== 142123
+IHRhcnTEscWf 142124
+IHRhcnTEscWfbWE= 142125
+IEdlc3By 142126
+IEdlc3Byw6RjaA== 142127
+16rXqNeV16Q= 142128
+16rXqNeV16TXldeq 142129
+IGNhdMOpZ29yaWU= 142130
+INC+0LrQsNC30YvQstCw 142131
+INC90LDQu9C40YfQuNC1 142132
+IHByw6lzZW50w6k= 142133
+IGt1bGw= 142134
+IGt1bGxhbmQ= 142135
+IGt1bGxhbmTEsQ== 142136
+IMO8bmw= 142137
+IMO8bmzDvA== 142138
+INmB2YPYsdip 142139
+0LjQt9Cw0YLQvtGA 142140
+15DXldeg 142141
+15DXldeg15nXkQ== 142142
+15DXldeg15nXkdeo16E= 142143
+15DXldeg15nXkdeo16HXmdeY16o= 142144
+INGA0LDRgdGB0LzQsNGC 142145
+INGA0LDRgdGB0LzQsNGC0YA= 142146
+INGA0LDRgdGB0LzQsNGC0YDQuNCy0LA= 142147
+2KrZg9mE2YU= 142148
+2YPYqtix2Yg= 142149
+2YPYqtix2YjZhtmK 142150
+INGB0L7Rh9C10YI= 142151
+INGB0L7Rh9C10YLQsA== 142152
+44KS6KaL44Gb 142153
+IG5n4burYQ== 142154
+INCg0LXRgdC/ 142155
+INCg0LXRgdC/0YPQsQ== 142156
+INCg0LXRgdC/0YPQsdC70LjQug== 142157
+44Km44Kp 142158
+44Km44Kp44O8 142159
+INCc0LXQttC00YM= 142160
+IOyeiOqyjA== 142161
+IG3Dog== 142162
+IOyalOyyrQ== 142163
+2LbYp9ix 142164
+4Lil4Li44LmJ4LiZ 142165
+64yA7ZWZ6rWQ 142166
+15bXmdeb 142167
+15bXmdeb16jXldef 142168
+44K544Oa 142169
+44K544Oa44O844K5 142170
+INC60YDQsNGB0L7Rgg== 142171
+77yo 142172
+6ryt 142173
+44KS6ZuG 142174
+44KS6ZuG44KB 142175
+67Cd 142176
+INeU16DXkA== 142177
+INeU16DXkNep150= 142178
+IOqwgOyatA== 142179
+IOqwgOyatOuNsA== 142180
+2KrZg9mE2YHYqQ== 142181
+INit2YLZitmC2Yo= 142182
+IGhhbGs= 142183
+IGhhbGvEsW4= 142184
+0Y7RidGD0Y4= 142185
+INGB0L/QuNC9 142186
+16HXqNeY158= 142187
+INC/0LXRgNCy0L7Qs9C+ 142188
+INC/0L7Qu9C+0LY= 142189
+INC/0L7Qu9C+0LbQuNGC0LXQu9GM0L0= 142190
+INC00Ls= 142191
+INC00LvQuNGC0LXQu9GM0L0= 142192
+IFbEqW5o 142193
+6rS0 142194
+INGB0YvRgA== 142195
+IO2Gte2VmOyXrA== 142196
+67OR7JuQ 142197
+4LmC4Lij4LiH4LiH4Liy4LiZ 142198
+4Lij4Lix4Lia4Lic4Li04LiU 142199
+4Lij4Lix4Lia4Lic4Li04LiU4LiK4Lit4Lia 142200
+2KrYrNmG2Kg= 142201
+c8WC 142202
+c8WCdWNo 142203
+44Ki44Or44OQ 142204
+44Ki44Or44OQ44Og 142205
+64m07Iqk 142206
+IHBhdGnDqw== 142207
+IHBhdGnDq250 142208
+IOyYpO0= 142209
+IOyYpO2e 142210
+IOyYpO2eiA== 142211
+IOyYpO2eiOugpA== 142212
+IERlcm5l 142213
+IERlcm5lxJ9p 142214
+d3LDs2Np 142215
+d3LDs2NpxIc= 142216
+INC+0LHRiQ== 142217
+INC+0LHRidC10YHRgtCy 142218
+INC+0LHRidC10YHRgtCy0LXQvdC90L4= 142219
+IOq1kOyImA== 142220
+dMSxxJ/EsW3EsXo= 142221
+INeU157XqdeZ15E= 142222
+a8O2cnBlcg== 142223
+INC/0L7Qt9Cy0L7Quw== 142224
+INC/0L7Qt9Cy0L7Qu9C40YI= 142225
+IENoaeG6v24= 142226
+2KPYrtmI 142227
+IEF5ZMSxbg== 142228
+4LiU4LmJ4Liy4LiZ4Lil 142229
+4LiU4LmJ4Liy4LiZ4Lil4LmI4Liy4LiH 142230
+IGRydQ== 142231
+IGRydcW8 142232
+IGRydcW8eW4= 142233
+IOuwnO2RnA== 142234
+IFRo4bqjbw== 142235
+2KzZh9in2K8= 142236
+4LiB4Lij4Liw4LiX4Li54LmJ 142237
+INC60YDQvtCy 142238
+INC60YDQvtCy0Lg= 142239
+IGnDp2VyaWs= 142240
+IG5hZHppZQ== 142241
+IG5hZHppZWrEmQ== 142242
+INCh0LzQvtGC0YA= 142243
+IHBo4bupYw== 142244
+2KzYqtmF2KfYuQ== 142245
+2KzYqtmF2KfYudmK2Kk= 142246
+0LrQvtC80L/QvtC9 142247
+0LrQvtC80L/QvtC90LXQvdGC 142248
+INCx0LjQuw== 142249
+INCx0LjQu9C10YI= 142250
+44OQ44Oz44OJ 142251
+IFBvbMOtY2lh 142252
+2KfZhNiq2Yc= 142253
+2KfZhNiq2YfYp9io 142254
+2K3YsdmB 142255
+2KrYrti3 142256
+2KrYrti32YrYtw== 142257
+44Kz44O844M= 142258
+44Kz44O844OS 142259
+44Kz44O844OS44O8 142260
+772l772l772l 142261
+4LiL4Lit4Lii 142262
+IGNyw6lkaXQ= 142263
+6LK344Gj44Gf 142264
+INC/0L7RgNGP0LQ= 142265
+INC/0L7RgNGP0LTQutC1 142266
+IHBow7M= 142267
+IHdpZGE= 142268
+IHdpZGHEhw== 142269
+2KzYsdin2KbZhQ== 142270
+4Lic4Li1 142271
+IGLEmWTEmQ== 142272
+INee16TXqteX 142273
+44OR44O844M= 142274
+44OR44O844OG 142275
+44OR44O844OG44Kj 142276
+44OR44O844OG44Kj44O8 142277
+IEthxbw= 142278
+IEthxbxkeQ== 142279
+INC90LXQvtCx0YXQvtC00LjQvNC+0YHRgtC4 142280
+4Lif4Lit4Lij4LmM 142281
+4Lif4Lit4Lij4LmM4Lih 142282
+INC80LDQu9GL0Yg= 142283
+INC/0LvQvtGC 142284
+INGD0YHRgtGA0L7QuQ== 142285
+INGD0YHRgtGA0L7QudGB0YLQstCw 142286
+4LiW4Lit4LiZ 142287
+IG9sdcWfdHVydWw= 142288
+IMWbd2lhZA== 142289
+IMWbd2lhZG9t 142290
+2YXYudmH2K8= 142291
+INC/0YDQvtC40LfQstC10LTQtdC9 142292
+xqA= 142293
+16jXmdep 142294
+2YXYs9iq2Ks= 142295
+2YXYs9iq2KvZhdix 142296
+16DXmdeZ16g= 142297
+cGHDsQ== 142298
+IDstKQ== 142299
+IOuwnOqyrA== 142300
+IGfDtnLDvHlvcg== 142301
+2YXYpNmE2YE= 142302
+IMSQ4buB 142303
+INin2YTZhtmI2KfYqA== 142304
+15fXp9eZ16jXlA== 142305
+IG3hu49p 142306
+6L+w44G5 142307
+0J3QuNC6 142308
+7J6W7JWE 142309
+7J6W7JWE7JqU 142310
+cHJvd2FkemnFgg== 142311
+bMOzZw== 142312
+bMOzZ2ljYQ== 142313
+16TXodeY 142314
+16TXodeY15nXkdec 142315
+INee15PXlA== 142316
+INee15PXlNeZ150= 142317
+44GT44GT44G+44Gn 142318
+15TXqteX 142319
+15TXqteX15zXlA== 142320
+INek15XXoQ== 142321
+INek15XXodeY15nXnQ== 142322
+INC90LXQsg== 142323
+INC90LXQstC+0Lc= 142324
+INC90LXQstC+0LfQvNC+0LbQvdC+ 142325
+IGRvc3TEmXBueQ== 142326
+INi62KfZhA== 142327
+INi62KfZhNio 142328
+IGJlenBpZWN6ZcWEc3Q= 142329
+IGJlenBpZWN6ZcWEc3R3YQ== 142330
+5YiG44GL44KL 142331
+IEbDvGhydW5n 142332
+4LiB4Li14LmJ 142333
+Z2Vtw6TDnw== 142334
+4LiK4LmI4Lin4LiH4LmA4Lin4Lil4Liy 142335
+IOyasOumrOuCmA== 142336
+IOyasOumrOuCmOudvA== 142337
+44Gl44GP44KK 142338
+INin2YTZhdiz2YQ= 142339
+INin2YTZhdiz2YTYrdip 142340
+IGxpYmVydMOp 142341
+0LrQu9GO0YfQtdC90LjQtQ== 142342
+IHphbcOzdw== 142343
+IHphbcOzd2llbmlh 142344
+4Lij4LiW4LmE4Lif 142345
+2KPZgdmE 142346
+2KPZgdmE2KfZhQ== 142347
+2YXYsdin2Kw= 142348
+2YXYsdin2KzYudip 142349
+IOu5hOq1kA== 142350
+INin2YTYqtin2Kg= 142351
+INin2YTYqtin2KjYudip 142352
+IOunjOuCmA== 142353
+INCx0YPQvA== 142354
+INCx0YPQvNCw0LM= 142355
+IGfDqW5lcm8= 142356
+IOyemOuquw== 142357
+157XpNeV16jXmA== 142358
+6LK344GE54mp 142359
+INmE2K/ZitmD 142360
+INec16LXmdeq 142361
+INec16LXmdeq15nXnQ== 142362
+IHPFgmFi 142363
+INC/0YDQtdC00YHRgtCw0LLQu9GP 142364
+44K/44Kk44OI 142365
+44K/44Kk44OI44Or 142366
+2YXYtQ== 142367
+2YXYtdi32YE= 142368
+2YXYtdi32YHZiQ== 142369
+IGRpZmZpY3VsdMOp 142370
+44OG44Kj44OW 142371
+IHBld25vxZtjaQ== 142372
+IHBld25vxZtjacSF 142373
+IOustOyKqA== 142374
+2KXYsdiz 142375
+2KXYsdiz2KfZhA== 142376
+INC00LDQu9GM 142377
+INC00LDQu9GM0YjQtQ== 142378
+INec16DXoQ== 142379
+INec16DXodeV16o= 142380
+4Lir4Lih4Li54LmI4Lia4LmJ4Liy4LiZ 142381
+157Xodee15vXmQ== 142382
+2KPYs9mE2YjYqA== 142383
+IHp3xYI= 142384
+IHp3xYJhcw== 142385
+IHp3xYJhc3pj 142386
+IHp3xYJhc3pjemE= 142387
+INC/0YDQtdC2 142388
+INC/0YDQtdC20LTQtQ== 142389
+INC+0YDQs9Cw0L3QuNC30LDRhtC40Y8= 142390
+IGTDtm5lbWlu 142391
+IGTDtm5lbWluZGU= 142392
+IOG7pg== 142393
+IOG7pnk= 142394
+5LiL44GS 142395
+INC/0L7RgdC70LXQtNC90LjQtQ== 142396
+IGfDvG5l 142397
+IGfDvG5lxZ8= 142398
+INeQ15bXqA== 142399
+INeQ15bXqNeX15k= 142400
+44Gn44GC44KN44GG 142401
+INmG2YI= 142402
+INmG2YLYp9i3 142403
+5q2j44GX44GE 142404
+INGA0LXQsw== 142405
+INGA0LXQs9C40L7QvdCw 142406
+IEbDtnJkZXI= 142407
+6rK97JiB 142408
+ZMSxa2xhcg== 142409
+ZMSxa2xhcsSxbsSx 142410
+dHJ6eW1hxIc= 142411
+2KPYtNmD 142412
+2KPYtNmD2KfZhA== 142413
+15TXqteQ 142414
+15TXqteQ157XlA== 142415
+4LiX4Liz4LmD4Lir4LmJ4LmA4LiB4Li04LiU 142416
+IEdlYsOk 142417
+IEdlYsOkdWRl 142418
+INCh0LXRgNCz 142419
+INCh0LXRgNCz0LXQuQ== 142420
+INC30LTQvtGA0L7Qsg== 142421
+INC30LTQvtGA0L7QstGM0Y8= 142422
+IHLDo2k= 142423
+INC/0YDQtdC00YPRgQ== 142424
+INC/0YDQtdC00YPRgdC80L7RgtGA 142425
+INC/0YDQtdC00YPRgdC80L7RgtGA0LXQvQ== 142426
+INeU16bXmdeR 142427
+INeU16bXmdeR15XXqNeZ 142428
+IGTDqXNpcg== 142429
+INC90L7Rhw== 142430
+INC90L7Rh9GM 142431
+bcO2Z2xpY2hrZWl0ZW4= 142432
+INeQ15fXqNeV16DXmded 142433
+IHNvaXLDqWU= 142434
+IE5o4bqtbg== 142435
+2ao= 142436
+4Lib4Lij4Liw4Lin4Lix4LiV4Li04Lio4Liy4Liq4LiV4Lij4LmM 142437
+6rWQ7Ya1 142438
+INij2K7Zig== 142439
+IGTDqWNpZA== 142440
+IGTDqWNpZMOp 142441
+IHd5amE= 142442
+IHd5amHFm25p 142443
+IOC4quC4tA== 142444
+IOC4quC4tOC4hw== 142445
+IOC4quC4tOC4h+C4q+C4sg== 142446
+IOC4quC4tOC4h+C4q+C4suC4hOC4oQ== 142447
+4LmB4Lit4Lij4LmM 142448
+4Lir4LiZ4LmJ4Liy4LiI4Lit 142449
+16HXqteo 142450
+IOq2 142451
+IOq2jA== 142452
+IOq2jOumrA== 142453
+cGzDpHR6ZQ== 142454
+2KjYt9mE 142455
+6rG07ISk 142456
+INeQ15nXnteZ 142457
+INeQ15nXnteZ15nXnA== 142458
+44G9 142459
+2KrYsdin2Ks= 142460
+15DXnNeZ157Xldeq 142461
+IGRpc3BvbsOtdmVpcw== 142462
+IHphbGU= 142463
+IHphbGXFvHk= 142464
+4Lib4Lij4Liw4LiK4Liy4Liq4Lix4Lih4Lie4Lix4LiZ4LiY4LmM 142465
+IMWad2lhdA== 142466
+IHBvcsOzd24= 142467
+IHBvcsOzd25h 142468
+INec15jXldeR16o= 142469
+15TXltee16DXlA== 142470
+INeb16rXldem15DXlA== 142471
+INeR16fXnA== 142472
+INeR16fXnNeV16o= 142473
+INC+0YLQutGA 142474
+INC+0YLQutGA0YvQstCw 142475
+44OR44Ov44O8 142476
+67+Q66eM 142477
+INCy0YHRjw== 142478
+INCy0YHRj9C6 142479
+44Go44Gq44Gj44Gm44GE44KL 142480
+IGdp4bqtbg== 142481
+INC+0LrRgNGD 142482
+INC+0LrRgNGD0LbQsA== 142483
+INC+0LrRgNGD0LbQsNGO0Yk= 142484
+IFVuaXZlcnNpdMOkdA== 142485
+INGA0L7Qtg== 142486
+INGA0L7QttC0 142487
+INGA0L7QttC00LXQvdC40Y8= 142488
+2K7ZitmE 142489
+INC60L7QvNC/0LDQvdC40Lk= 142490
+INGA0LDQt9C70LjRh9C90YvQtQ== 142491
+INCm0LXQvdCw 142492
+16DXmdeV15Y= 142493
+16DXmdeV15bXnA== 142494
+16DXmdeV15bXnNeY16g= 142495
+IOqzteqwhA== 142496
+IOqwnOuFkA== 142497
+bGFuZMSxcm1h 142498
+INGD0LTQsNC70LXQvQ== 142499
+4Lie4Lix4LiB4Lic 142500
+4Lie4Lix4LiB4Lic4LmI4Lit4LiZ 142501
+IHByb3RlY2Npw7Nu 142502
+IGLFgg== 142503
+IGLFgsSZZA== 142504
+w4g= 142505
+IO2WieuztQ== 142506
+IMWfw7w= 142507
+IMWfw7xwaGU= 142508
+IO2U 142509
+IO2UvA== 142510
+IO2UvO2VtA== 142511
+IOuLpOultA== 142512
+4LmE4Lih4LmI4LmA4LiB4Li04LiZ 142513
+44G/44Gq 142514
+44G/44Gq44GV44KT 142515
+INC/0L7RgtGA0LXQsQ== 142516
+INC/0L7RgtGA0LXQsdC40YLQtdC7 142517
+INin2YTZg9mE2KfZhQ== 142518
+7JWE67KE 142519
+7JWE67KE7KeA 142520
+44KS5L2/44Gj44Gf 142521
+IGLhu6Vp 142522
+INC/0L7RgtC10YA= 142523
+INC/0L7RgtC10YDRjw== 142524
+INii2YTYp9mB 142525
+INC90LDRgdGC0L7Rj9GJ0LXQtQ== 142526
+44GP44Gq44KK44G+44GX44Gf 142527
+Y2x1c8Ojbw== 142528
+44Kz44OU44O8 142529
+16bXpNeZ 142530
+16bXpNeZ15nXlA== 142531
+2K7ZhNin 142532
+2K7ZhNin2LU= 142533
+4Lil4LmJ4Liz 142534
+44Ov44Kk44Oz 142535
+IOC4oeC4teC4meC4sg== 142536
+IOC4oeC4teC4meC4suC4hOC4oQ== 142537
+2LTYrti1 142538
+2LTYrti12YrYp9iq 142539
+INeW16c= 142540
+INeW16fXlden 142541
+15nXmdem 142542
+15nXmdem15I= 142543
+6ICD44GI5pa5 142544
+IMO8csO8bsO8 142545
+INC40YHQv9C+0Ls= 142546
+INC40YHQv9C+0LvQvdC4 142547
+IGNvbXBhw7Flcm8= 142548
+16fXpteU 142549
+157Xoteg15nXpw== 142550
+2YXYrdmF2K8= 142551
+IGPDoW1hcmE= 142552
+INC/0LXQtA== 142553
+INC/0LXQtNCw0LM= 142554
+INC/0LXQtNCw0LPQvtCz 142555
+0LzQsNGA 142556
+0LzQsNGA0Lo= 142557
+15TXqteg15LXkw== 142558
+IOyGjOqwnA== 142559
+IGNvbXVuaXTDoA== 142560
+6rOk 142561
+IE5nw6Bp 142562
+4Liq4LiH4Lia 142563
+IG1pZXN6a2HFhGPDs3c= 142564
+INmG2YfYp9im2Yo= 142565
+aXZpdMOp 142566
+INC40LTQtQ== 142567
+INC40LTQtdCw0LvRjNC9 142568
+INij2LPYqNmI2Lk= 142569
+INeZ16LXnA== 142570
+INec16jXkNep 142571
+INec16jXkNep15XXoNeU 142572
+INC30LDQv9C40YHQuA== 142573
+INC60L7RgNC/0YPRgQ== 142574
+4Lin4LiH4Lio 142575
+4Lin4LiH4Lio4LmM 142576
+INCU0Lw= 142577
+INCU0LzQuNGC 142578
+INCU0LzQuNGC0YA= 142579
+IGvDtm5udA== 142580
+IGLDtmxnZXM= 142581
+IGLDtmxnZXNpbmRl 142582
+15vXmdeb 142583
+15vXmdeb16g= 142584
+INin2YTYpdir2YY= 142585
+INin2YTYpdir2YbZitmG 142586
+IG5n4buZ 142587
+7Lmg 142588
+2K/Ysdin2Kw= 142589
+IHVkYQ== 142590
+IHVkYcWCbw== 142591
+7LqQ 142592
+2KjYsdmG2KfZhdis 142593
+INGB0YPQtNC10LE= 142594
+INGB0YPQtNC10LHQvQ== 142595
+IHp1bsOkY2hzdA== 142596
+IEVkdWNhY2nDs24= 142597
+44Go44Gq44Gj44Gm44GE44G+44GZ 142598
+INeU15DXnteZ16rXmQ== 142599
+IMSwbnQ= 142600
+IMSwbnRlcm5ldA== 142601
+IGNhxYJlZ28= 142602
+44OX44Oq44Oz 142603
+2KXYqNiv 142604
+2KXYqNiv2KfYuQ== 142605
+INC/0L7RgNGC0LDQuw== 142606
+4LmC4LiV4LmJ 142607
+INeU16fXqdeV16g= 142608
+0L/Qu9C+0LQ= 142609
+INmF2K8= 142610
+INmF2K/YsdmK2K8= 142611
+157Xodei15PXlA== 142612
+INi02YrYpg== 142613
+INi02YrYptin 142614
+4LiB4LmI4Lit4Liq4Lij4LmJ4Liy4LiH 142615
+IOywuOqzoA== 142616
+4LmA4LiX4Lij 142617
+4LmA4LiX4Lij4LiU 142618
+INeR157Xp9eo15nXnQ== 142619
+IGLDonQ= 142620
+IGLDonRpbWVudA== 142621
+5ZG844Gz 142622
+57Sg5pW1 142623
+57Sg5pW144Gq 142624
+cHJ6ZWRzacSZYmlvcnN0 142625
+cHJ6ZWRzacSZYmlvcnN0dw== 142626
+INeg16rXldeg15nXnQ== 142627
+15fXnNeV150= 142628
+4Lij4Lin4Lii 142629
+2YXZiNi22YjYuQ== 142630
+INGB0L7QsdGA0LDQvQ== 142631
+0LLQtdC00YPRiQ== 142632
+INGC0LXQsNGC 142633
+INGC0LXQsNGC0YA= 142634
+bWV5ZQ== 142635
+bWV5ZWNlxJ9p 142636
+IHBpZW5pxIU= 142637
+IHBpZW5pxIVk 142638
+IHBpZW5pxIVkemU= 142639
+0YDQtdC30LjQtNC10L3Rgg== 142640
+2K3Ytdix 142641
+7Jil 142642
+4LmA4Lii4Li34Lit4LiZ 142643
+INGD0L3QuA== 142644
+INGD0L3QuNCy0LXRgA== 142645
+INGD0L3QuNCy0LXRgNGB 142646
+INGD0L3QuNCy0LXRgNGB0LjRgtC10YI= 142647
+INin2YTYsdit 142648
+INin2YTYsdit2YXZhg== 142649
+INGC0LXRhdC90L7Qu9C+0LM= 142650
+INGC0LXRhdC90L7Qu9C+0LPQuNC4 142651
+7JeQ64SI 142652
+7JeQ64SI7KeA 142653
+IO2VrQ== 142654
+IO2VreyDgQ== 142655
+4LiY4Liy 142656
+4LiY4Liy4LiV4Li4 142657
+IEVzcGHDsW9s 142658
+15PXktep 142659
+IOq1iQ== 142660
+IOq1ieyepQ== 142661
+IOq1ieyepe2eiA== 142662
+IMWCYXQ= 142663
+IMWCYXR3bw== 142664
+IGvhu4tjaA== 142665
+2KXYsg== 142666
+2KXYstin2YTYqQ== 142667
+INC00LXQudGB0YLQstC40LU= 142668
+IHNhxJ9sYXlhbg== 142669
+4Liq4Li44LiU4Lii4Lit4LiU 142670
+IHpvc3RhxIc= 142671
+IGRpc3BvbsOtdmVs 142672
+77qN 142673
+dmVyc3TDpG5k 142674
+dmVyc3TDpG5kbGljaA== 142675
+dHdvcg== 142676
+dHdvcnp5xIc= 142677
+2LnYrNiy 142678
+4LmA4LiC4LmJ4Lih 142679
+4Lii4LmI4Lit4Lih 142680
+IHN0cmF0w6ln 142681
+IHN0cmF0w6lnaWU= 142682
+4Lic4Lil4LmE4Lih4LmJ 142683
+IOqwgeyihQ== 142684
+INmF2YjYpw== 142685
+INmF2YjYp9i2 142686
+INmF2YjYp9i22YrYuQ== 142687
+2KfYrdiq2Kw= 142688
+2KfYrdiq2KzYp9is 142689
+IOG6pA== 142690
+IOG6pG4= 142691
+157Xntep15zXlA== 142692
+IMWfZWtpbA== 142693
+157Xl9ec 142694
+157Xl9ec15XXqg== 142695
+IOC4mA== 142696
+IOC4mOC4seC4mQ== 142697
+IOC4mOC4seC4meC4p+C4sg== 142698
+IOC4mOC4seC4meC4p+C4suC4hOC4oQ== 142699
+IOyLpOygnA== 142700
+IOyLpOygnOuhnA== 142701
+7KSR7JWZ 142702
+642U6528 142703
+INGI0LjRgA== 142704
+INGI0LjRgNC+0LrQvg== 142705
+IHNvbHVjacOzbg== 142706
+4Lin4Liy4LiH4LmB4Lic4LiZ 142707
+15DXldeY15XXng== 142708
+15DXldeY15XXnteY15k= 142709
+INGA0LXRgdGC 142710
+INGA0LXRgdGC0L7RgA== 142711
+INGA0LXRgdGC0L7RgNCw0L0= 142712
+6424 142713
+0YLRgNCw0LQ= 142714
+0YLRgNCw0LTQuA== 142715
+0YLRgNCw0LTQuNGG0LjQvtC9 142716
+0YLRgNCw0LTQuNGG0LjQvtC90L0= 142717
+4Lih4Liw4LmA4Lij4LmH 142718
+4Lih4Liw4LmA4Lij4LmH4LiH 142719
+4LmC4Liq 142720
+IG9sbWFzxLFuxLE= 142721
+157Xldeh16g= 142722
+INC+0YLQvdC+0YjQtdC90LjQuA== 142723
+IOqwgOuKpeyEsQ== 142724
+IHl1aw== 142725
+IHl1a2FyxLE= 142726
+7IaU 142727
+INGB0YQ= 142728
+INGB0YTQtdGA0LU= 142729
+INen15XXpA== 142730
+44Kx44O844I= 142731
+44Kx44O844Kt 142732
+4oCV4oCV 142733
+INin2YTYo9mE2YU= 142734
+INin2YTYo9mE2YXYp9mG2Yo= 142735
+4bqiTg== 142736
+16rXldeb16DXmdeV16o= 142737
+INGB0YPRidC10YHRgtCy0YPQtdGC 142738
+5oiR44CF 142739
+INin2YTYtdin2K/YsQ== 142740
+IFRy4buNbmc= 142741
+INCw0LQ= 142742
+INCw0LTQvNC40L3QuNGB0YI= 142743
+INCw0LTQvNC40L3QuNGB0YLRgNCw 142744
+INCw0LTQvNC40L3QuNGB0YLRgNCw0YbQuA== 142745
+INC00YDRg9Cz0LjQvNC4 142746
+0YHQv9C10Yg= 142747
+2LnZhNin2YXYp9iq 142748
+INCw0LE= 142749
+INCw0LHRgdC+0Ls= 142750
+INCw0LHRgdC+0LvRjtGC 142751
+INCw0LHRgdC+0LvRjtGC0L3Qvg== 142752
+4Lik4LiU4Li5 142753
+w6l0cg== 142754
+w6l0cmFuZ2Vy 142755
+0L3Rj9GC0Lg= 142756
+0L3Rj9GC0LjQtQ== 142757
+16LXldeg 142758
+16LXldeg16k= 142759
+INmC2KfYpg== 142760
+INmC2KfYptmE2Kc= 142761
+INC80LDRgQ== 142762
+INC80LDRgdC70L4= 142763
+44OJ44Kk 142764
+44OJ44Kk44OE 142765
+5b+F6KaB44GM44GC44KK44G+44GZ 142766
+157XldeW15nXkA== 142767
+157XldeW15nXkNeV158= 142768
+IE5nb+G6oWk= 142769
+IGvDqm5o 142770
+4LiB4Liy4Lij4Lit4Lit4LiB4LmB4Lia4Lia 142771
+157XpNen 142772
+157XpNen15M= 142773
+2YXZhtin2LI= 142774
+2YXZhtin2LLZhA== 142775
+67ew 142776
+7Zek 142777
+2YXZh9in2LHYp9iq 142778
+IHByb3ByacOpdMOp 142779
+16TXkteZ16nXlA== 142780
+0YfRgA== 142781
+0YfRgNC10LY= 142782
+0YfRgNC10LbQtNC10L0= 142783
+15TXldem15DXlA== 142784
+2K3Zg9mK2YU= 142785
+IO2ZiA== 142786
+IO2ZiO2OmOydtOyngA== 142787
+5Y6z 142788
+5Y6z44GX44GE 142789
+16LXnteT15Q= 142790
+IEF1w59lbg== 142791
+2LPZiNih 142792
+67mI 142793
+INmI2K4= 142794
+INmI2K7Yp9i12Kk= 142795
+0LjQvdGC0LXRgA== 142796
+0LjQvdGC0LXRgNC10YE= 142797
+6Ie044GX44G+44GZ 142798
+IGjDvGvDvG0= 142799
+4LmE4LiC4Lih4Lix4LiZ 142800
+IGRhdnJhbg== 142801
+IGRhdnJhbsSxxZ8= 142802
+4LmA4LiV4Li14Lii4LiH 142803
+0LLRgNC10Lw= 142804
+0LLRgNC10LzQtdC90L3Qvg== 142805
+4LmA4LiX4Lio4LiB4Liy 142806
+4LmA4LiX4Lio4LiB4Liy4Lil 142807
+5byV44Gj 142808
+5byV44Gj6LaK44GX 142809
+15DXqNeV15c= 142810
+15DXqNeV15fXqg== 142811
+4LmA4Lin4Li0 142812
+4LmA4Lin4Li04Lij4LmM 142813
+4Lit4Lii4LmI4Liy4LiH4Lij4Lin4LiU4LmA4Lij4LmH4Lin 142814
+IOyXrO2WiQ== 142815
+INGA0LDQvdGM 142816
+INGA0LDQvdGM0YjQtQ== 142817
+IHpvYm93 142818
+IHpvYm93acSF 142819
+IHpvYm93acSFeg== 142820
+INeV15vXnteV15HXnw== 142821
+INin2YTZhdmH 142822
+INin2YTZhdmH2YbZig== 142823
+44Ki44K4 142824
+44Ki44K444Ki 142825
+67Cp7Iah 142826
+4Lit4Lit4LiB4LiB4Liz4Lil4Lix4LiH 142827
+4Lit4Lit4LiB4LiB4Liz4Lil4Lix4LiH4LiB4Liy4Lii 142828
+YW3DqWxp 142829
+YW3DqWxpb3Jlcg== 142830
+5b2T44Gf44KK5YmN 142831
+IHJlZ2VsbQ== 142832
+IHJlZ2VsbcOkw59pZw== 142833
+44GK5Ys= 142834
+44GK5Yun 142835
+44GK5Yun44KB 142836
+IG3GsOG7nWk= 142837
+2KjYsdmF2Kw= 142838
+IE5hdMO8cmxpY2g= 142839
+IETFqW5n 142840
+INin2YTYsdis2KfZhA== 142841
+IHRow6lw 142842
+IG9sbXXFn3R1cg== 142843
+157Xldeh15nXp9eU 142844
+ZsOkbGxl 142845
+7KO87YOd 142846
+INin2YTZgdix2LU= 142847
+IG5handpxJlrcw== 142848
+IG5handpxJlrc3p5 142849
+IMOnYcSf 142850
+IMOnYcSfcsSx 142851
+7Lig 142852
+IHbDrWN0 142853
+IHbDrWN0aW1h 142854
+INGB0L7QstC10YDRiNC10L0= 142855
+15TXmdeZ16rXmQ== 142856
+4LmA4LiU4Li1 142857
+4LmA4LiU4Li14LmL 142858
+4LmA4LiU4Li14LmL4Lii4Lin 142859
+w7x5w7w= 142860
+INC00L7Qvw== 142861
+INC00L7Qv9C+0LvQvQ== 142862
+INC00L7Qv9C+0LvQvdC40YLQtdC70YzQvdC+ 142863
+4LmB4LiV4LiB4LiV4LmI4Liy4LiH4LiB4Lix4LiZ 142864
+IMOhbA== 142865
+IMOhbGJ1bQ== 142866
+4Lib4Lij4Liw4LiI4Liz4Lib4Li1 142867
+INGE0LXQtNC10YA= 142868
+INGE0LXQtNC10YDQsNC70YzQvQ== 142869
+IG9ic8WC 142870
+IG9ic8WCdWdp 142871
+4LmA4Lij4Li34LmI 142872
+4LmA4Lij4Li34LmI4Lit4Lii 142873
+4LmA4Lij4Li34LmI4Lit4Lii4LmG 142874
+64GM 142875
+IG5naMOsbg== 142876
+IEJhxZ9rYW5sxLHEn8Sx 142877
+2KrYo9iz2Yo= 142878
+2KrYo9iz2YrYsw== 142879
+INeR15HXlden16g= 142880
+INei15HXldeT15XXqg== 142881
+INio2LXZiNix2Kk= 142882
+44KP44GR44Gn44Gv44Gq44GE 142883
+ZsO8aHJlcg== 142884
+44K544Kt 142885
+44K544Kt44Or 142886
+INin2YTZgti2 142887
+INin2YTZgti22YrYqQ== 142888
+INC00L7Qu9C20L3QvtGB0YI= 142889
+2YHYp9ix2YI= 142890
+IGNvbWXDp291 142891
+IG9yZ2FuaXPDqQ== 142892
+IHh1w6Ju 142893
+INGB0L7QvtCx0YnQsNC10YI= 142894
+INC/0YDQuNC0 142895
+INC/0YDQuNC00LXRgtGB0Y8= 142896
+VMOcUks= 142897
+44Os44O844K344On44Oz 142898
+S2jDtG5n 142899
+2KfYs9iq2YE= 142900
+2KfYs9iq2YHYp9iv2Kk= 142901
+5LiK44GM44Gj44Gm 142902
+IHVtaWU= 142903
+IHVtaWVqxJk= 142904
+IHVtaWVqxJl0bg== 142905
+IHVtaWVqxJl0bm/Fm2Np 142906
+64K4 142907
+4LmA4LiZ4Lit4Lij4LmM 142908
+15PXldeV15c= 142909
+w61zaW1v 142910
+ScOK 142911
+ScOKTg== 142912
+IGFsY2Fuw6c= 142913
+IOC4leC4uA== 142914
+IOC4leC4uOC4peC4sg== 142915
+IOC4leC4uOC4peC4suC4hOC4oQ== 142916
+16nXnNeY15XXnw== 142917
+IMOpbMOo 142918
+IMOpbMOodmVz 142919
+IMSRdQ== 142920
+IMSRdeG7lWk= 142921
+INij2YE= 142922
+INij2YHYsdmK 142923
+INij2YHYsdmK2YLZig== 142924
+INij2YHYsdmK2YLZitin 142925
+44KS5o6i44GZ 142926
+INC/0YDQtdC00LvQvtC20LXQvdC40Y8= 142927
+2KzYp9iv 142928
+INGF0L7RgtGM 142929
+0YHQsNC7 142930
+0YHQsNC70L7QvQ== 142931
+4Lib4Lij4Liw4LmA4Lih 142932
+4Lib4Lij4Liw4LmA4Lih4Li04LiZ 142933
+44Kt44OD44OB 142934
+44Kt44OD44OB44Oz 142935
+15HXk9eZ16fXldeq 142936
+IGNow7k= 142937
+IGNow7lh 142938
+0JLQuNC00LU= 142939
+0JLQuNC00LXQvg== 142940
+0LjRgNC+0LLQutCw 142941
+INGF0L7RgtC40YLQtQ== 142942
+IHNww6ljaWZpcXVl 142943
+4Lij4Liq4LiK4Liy4LiV4Li0 142944
+6L6844KT44Gg 142945
+5Ly444Gz 142946
+15TXptec15fXqg== 142947
+44Gp44Gu44KI44GG44Gr 142948
+2LPYudin2K/YqQ== 142949
+INC70LjQtA== 142950
+INC70LjQtNC10YA= 142951
+4Lih4LiH 142952
+4Lih4LiH4LiE4Lil 142953
+2K3Yp9mF2YQ= 142954
+4Lir4Lil4Li44LiU 142955
+4Lit4Lii4LmI4Liy4LiH4LiV4LmI4Lit 142956
+4Lit4Lii4LmI4Liy4LiH4LiV4LmI4Lit4LmA4LiZ4Li34LmI4Lit4LiH 142957
+44GV44Gb44Gm6aCC 142958
+2KrYs9mI2Yo= 142959
+2KrYs9mI2YrZgg== 142960
+IGHFn2HEn8SxZA== 142961
+IGHFn2HEn8SxZGFraQ== 142962
+INGG0LXQu9GM 142963
+INGG0LXQu9GM0Y4= 142964
+IEFyYcWfdMSxcm1h 142965
+4LiC4Lix4Lia4Lij4LiW 142966
+2YfYsNmH 142967
+4Lil4LiH4LiX4Liw 142968
+4Lil4LiH4LiX4Liw4LmA4Lia 142969
+4Lil4LiH4LiX4Liw4LmA4Lia4Li14Lii4LiZ 142970
+2KrZg9in2YXZhA== 142971
+IGNpbw== 142972
+IGNpb8Oo 142973
+44Gm44GK44GP 142974
+INin2YTYtdit2YHZig== 142975
+IO2KueyglQ== 142976
+0L/QvtC70L3QuNGC0Yw= 142977
+44KT44GY44KD44Gq44GE 142978
+44KT44GY44KD44Gq44GE44GL 142979
+INin2YTYrNmH 142980
+INin2YTYrNmH2KfYqg== 142981
+INGD0YHQv9C10YjQvdC+ 142982
+INCy0L7Qug== 142983
+INCy0L7QutGA0YPQsw== 142984
+INGB0LjRgtGD0LDRhtC40Y8= 142985
+INeU15DXnteo 142986
+INeU15DXnteo15nXpw== 142987
+INeU15DXnteo15nXp9eQ15k= 142988
+157XkteW 142989
+157XkteW15nXnw== 142990
+INCw0LrRgtGD 142991
+INCw0LrRgtGD0LDQu9GM0L0= 142992
+w6l0YQ== 142993
+w6l0YWlz 142994
+IG1vZ8WCYQ== 142995
+INGC0L7Rh9C60Lg= 142996
+INee15TXntei 142997
+INee15TXntei16jXm9eq 142998
+4Lih4Li14Lib4Lij4Liw4Liq4Li04LiX4LiY4Li04Lig4Liy4Lie 142999
+15nXqNeZ15PXlA== 143000
+15LXqNee16A= 143001
+15LXqNee16DXmdeU 143002
+INCz0LvQsNCy 143003
+INCz0LvQsNCy0L3QvtC1 143004
+IOuvuOuemA== 143005
+INeg15vXldeg15Q= 143006
+INmI2LfZhtmK 143007
+b3Bwb3J0 143008
+b3Bwb3J0dW5pdMOg 143009
+IGjhu6d5 143010
+INmE2KrYrQ== 143011
+INmE2KrYrdmC2YrZgg== 143012
+IMOzcmc= 143013
+IMOzcmfDo28= 143014
+44K544OU 143015
+44K544OU44O844OJ 143016
+IMO2bsO8 143017
+IMO2bsO8bmU= 143018
+2YXYudin2YXZhA== 143019
+16nXnteZ16jXlA== 143020
+INCy0LXRgdGM0LzQsA== 143021
+IHdpxJlrc3pv 143022
+IHdpxJlrc3pvxZvEhw== 143023
+INin2LPYqtix2KfYqtmK2Kw= 143024
+INin2LPYqtix2KfYqtmK2KzZitip 143025
+INmB2KU= 143026
+INmB2KXYsNin 143027
+4LmA4LiK4Li34LmI4Lit4Lih 143028
+4LmA4LiK4Li34LmI4Lit4Lih4LiV4LmI4Lit 143029
+INec16TXqA== 143030
+INec16TXqNeY15nXnQ== 143031
+2YXYttmK 143032
+IEdlcsOnZWs= 143033
+IMOnb2N1a2xhcsSxbg== 143034
+2YjYq9in2KbZgg== 143035
+INmF2LPYp9ih2Ys= 143036
+IHVudGVyc3TDvHR6dA== 143037
+IHByw6lzdA== 143038
+IHByw6lzdGFtbw== 143039
+INCg0LDQt9C80LXRgA== 143040
+IMWfZWtlcg== 143041
+IHPDqWN1bG8= 143042
+15HXlNeZ16g= 143043
+2LTZh9mI2LE= 143044
+IOC4reC4teC4gQ== 143045
+IOC4reC4teC4geC4l+C4seC5ieC4hw== 143046
+IGxsZWfDsw== 143047
+4Lio4Li04Lil4Lib4Liw 143048
+5oiR44GM 143049
+5oiR44GM5a62 143050
+2LnZgtmI 143051
+2LnZgtmI2KjYp9iq 143052
+IEbDpGxsZQ== 143053
+IHPFgnXFvA== 143054
+IHPFgnXFvGI= 143055
+INin2YTYrdmC2YjZgg== 143056
+INC/0LvQuNGC 143057
+INC40L3QvtGB0YI= 143058
+INC40L3QvtGB0YLRgNCw0L0= 143059
+INC40L3QvtGB0YLRgNCw0L3QvQ== 143060
+4LmD4LiZ4LiC4LiT4Liw4LiX4Li14LmI 143061
+44Kr44OG 143062
+44Kr44OG44K0 143063
+44Kr44OG44K044Oq 143064
+4Lit4Li04Liq 143065
+4Lit4Li04Liq4Lij4Liw 143066
+4LmA4Lic4Lii4LmB 143067
+4LmA4Lic4Lii4LmB4Lie4Lij 143068
+4LmA4Lic4Lii4LmB4Lie4Lij4LmI 143069
+44GK44GE 143070
+44GK44GE44GX44GE 143071
+2KfYs9iq2YLZhA== 143072
+2KfYs9iq2YLZhNin2YQ= 143073
+2KrYrdi2 143074
+2KrYrdi22YrYsQ== 143075
+5Yqp44GR 143076
+2YXYsdin2YHZgg== 143077
+INeT15XXqA== 143078
+INeT15XXqNep 143079
+157XqteZ15nXl9eh 143080
+16HXmdeb 143081
+16HXmdeb15XXnQ== 143082
+7YyM7Yq4 143083
+IHd5xZs= 143084
+IHd5xZt3 143085
+IHd5xZt3aWV0 143086
+IHd5xZt3aWV0bA== 143087
+INin2YTYp9mG2LPYp9mG 143088
+IFN0cmHDn2Vu 143089
+77ys 143090
+44Gr5Z+6 143091
+44Gr5Z+644Gl 143092
+IGNhcMOtdHVsbw== 143093
+4Lil4Li44Lii 143094
+INeU157Xp9em15XXoteZ 143095
+44GC44KL56iL5bqm 143096
+4bui 143097
+INin2YTZhNin 143098
+INin2YTZhNin2LLZhdip 143099
+5pWZ44GI 143100
+INeo16nXkNeZ 143101
+0LfQsNCy 143102
+0LfQsNCy0LjRgQ== 143103
+0LfQsNCy0LjRgdC40Lw= 143104
+4Lib4Lix4LiI4LiI4Lix4Lii 143105
+4LmA4LiL4Lil 143106
+4LmA4LiL4Lil4Lil4LmM 143107
+IGRpZmbDqXJlbmNl 143108
+IEFsdMSxbg== 143109
+INC60YDQsNC5 143110
+INC60YDQsNC50L3QtQ== 143111
+INC30LvQvg== 143112
+IGfDvG7DvG3DvHo= 143113
+INC90LDRgtGD0YA= 143114
+INC90LDRgtGD0YDQsNC70YzQvQ== 143115
+15LXldec16nXmded 143116
+INC60LDRgtC10LPQvtGA 143117
+INC60LDRgtC10LPQvtGA0LjQuA== 143118
+INC30L3QsNC6 143119
+4LiB4LmI4Lit4LiZ4Lir4LiZ4LmJ4Liy 143120
+4LiB4LmI4Lit4LiZ4Lir4LiZ4LmJ4Liy4LiZ4Li14LmJ 143121
+INmF2YbYqg== 143122
+INmF2YbYqtiu2Kg= 143123
+44Ob44O844Or 143124
+INC10LLRgNC+ 143125
+4Liq4Lin 143126
+4Liq4Lin4Lih 143127
+IOychOybkA== 143128
+IOychOybkOuLmA== 143129
+INin2YTYrdmI2Ks= 143130
+INin2YTYrdmI2KvZig== 143131
+INGB0L7QtNC10YDQttC40YI= 143132
+44OV44Kh44OD44K344On44Oz 143133
+IOC4geC4seC4mQ== 143134
+IOC4geC4seC4meC4og== 143135
+IOC4geC4seC4meC4ouC4suC4ouC4mQ== 143136
+44Kq44Oq 143137
+44Kq44Oq44K4 143138
+44Kq44Oq44K444OK44Or 143139
+INCx0YDQtdC90LQ= 143140
+44KS5oyB44Gj44Gm44GE44KL 143141
+IGludmVyc2nDs24= 143142
+IOqwlg== 143143
+IOqwluqzoA== 143144
+IG5vdml0w6A= 143145
+6rSA6rSR 143146
+IOC4nuC4pOC4qQ== 143147
+IOC4nuC4pOC4qeC4oOC4sg== 143148
+IOC4nuC4pOC4qeC4oOC4suC4hOC4oQ== 143149
+15XXqNeX15nXnQ== 143150
+15vXnNeV15w= 143151
+IG5n4bqhYw== 143152
+15nXmdep 143153
+15nXmdep15XXkQ== 143154
+ZsOkbGw= 143155
+ZsOkbGxpZw== 143156
+INGC0YDQtdCx0YPQtdGC0YHRjw== 143157
+IGNhcsOh 143158
+IGNhcsOhY3Rlcg== 143159
+IHByaW5jw61waW8= 143160
+IMWCYXo= 143161
+IMWCYXppZW4= 143162
+IMWCYXppZW5r 143163
+IGdpw6Nu 143164
+0YHRgtGA0LDQuNCy0LA= 143165
+2YXYs9in2Kg= 143166
+2YXYs9in2KjZgtip 143167
+4LmA4LiE4Lij4Li34LmI4Lit4LiH4LiU4Li34LmI4Lih 143168
+2KrYsdmD2YrYqA== 143169
+dm9sdcOnw6Nv 143170
+INCf0L7Rhw== 143171
+INCf0L7Rh9C10Lw= 143172
+INCf0L7Rh9C10LzRgw== 143173
+0LrQsNC30LDQu9C+0YHRjA== 143174
+INC/0YDQuNC80LXQvdC10L3QuNGP 143175
+4LmA4LiX4Li14Lii4Lih 143176
+7YyU 143177
+4LiC4LmJ4Lit4LmA4Liq4LiZ4Lit 143178
+4Lib4Lix4LiN4LiN4Liy 143179
+INC+0LHRg9GH 143180
+INC+0LHRg9GH0LXQvdC40Y8= 143181
+INGB0LXRgNC4 143182
+INGB0LXRgNC40LDQuw== 143183
+IGluZ2zDqXM= 143184
+INmE2YPYsdip 143185
+INeY15w= 143186
+INeY15zXpNeV158= 143187
+IOygkQ== 143188
+IOygkeq3vA== 143189
+15DXldeS 143190
+15DXldeS15XXoQ== 143191
+15DXldeS15XXodeY 143192
+INCx0L7Qu9GM0YjQvtC1 143193
+INCa0L7QvdC10YfQvdC+ 143194
+16LXmdeq15XXoA== 143195
+16LXmdeq15XXoNeQ15k= 143196
+INC60L3QvtC/0Lo= 143197
+INC30L0= 143198
+INC30L3QsNGC0Yw= 143199
+IMSR4bux 143200
+IMSR4buxbmc= 143201
+0LLQu9Cw0LY= 143202
+0LLQu9Cw0LbQvQ== 143203
+157XmdeY15E= 143204
+44Ks44Kk 143205
+44Ks44Kk44OJ 143206
+Li4uLi4uLi4uLg== 143207
+IOC4geC4uOC4oQ== 143208
+IOC4geC4uOC4oeC4oOC4suC4ng== 143209
+IOC4geC4uOC4oeC4oOC4suC4nuC4seC4mQ== 143210
+IOC4geC4uOC4oeC4oOC4suC4nuC4seC4meC4mA== 143211
+IOC4geC4uOC4oeC4oOC4suC4nuC4seC4meC4mOC5jA== 143212
+YmV6 143213
+YmV6cGllY3plxYRzdA== 143214
+YmV6cGllY3plxYRzdHc= 143215
+44OR44OR5rS7 143216
+2LnYp9i3 143217
+2LnYp9i32YE= 143218
+IMSR4bqtbQ== 143219
+INC30YA= 143220
+INC30YDQtdC90LjRjw== 143221
+IGJvcsOn 143222
+INC90LXQtNC10Ls= 143223
+INC90LXQtNC10LvRjg== 143224
+IGjhu48= 143225
+IGjhu49uZw== 143226
+7J6l7JWg 143227
+7J6l7JWg7J24 143228
+INin2YTYudmE2KfZgtip 143229
+IO2BrA== 143230
+IO2BrOqyjA== 143231
+4LmE4Lij4LmI 143232
+4Lia4Liy4LiU 143233
+4Lia4Liy4LiU4LmA4LiI4LmH4Lia 143234
+4Lid4Lij4Lix 143235
+4Lid4Lij4Lix4LmI4LiH 143236
+4Lid4Lij4Lix4LmI4LiH4LmA4Lio 143237
+4Lid4Lij4Lix4LmI4LiH4LmA4Lio4Liq 143238
+16jXoteZ 143239
+16jXoteZ15XXoNeV16o= 143240
+IOuM 143241
+IOuMkw== 143242
+IOuMk+q4gA== 143243
+IG5hamI= 143244
+IG5hamJsaQ== 143245
+IG5hamJsacW8 143246
+IG5hamJsacW8c3o= 143247
+INC40YHQv9C+0LvRjNC30YPQtdGC0YHRjw== 143248
+IGNpZW50w61m 143249
+IGNpZW50w61maWNv 143250
+16LXnten 143251
+IGfhu6Np 143252
+2LTYrdmG 143253
+IMWbbQ== 143254
+IMWbbWllcg== 143255
+IMWbbWllcmNp 143256
+4LiE4Liy4Liq4Li04LmC4LiZ4Lit4Lit4LiZ4LmE4Lil4LiZ4LmM 143257
+15fXqdeR16rXmQ== 143258
+IG5pbmd1 143259
+IG5pbmd1w6lt 143260
+6L6844KB 143261
+44G3 143262
+INGD0LM= 143263
+INGD0LPQvtC7 143264
+772w 143265
+16TXqteZ15c= 143266
+16TXqteZ15fXqg== 143267
+INeU16jXkNep15XXoNeZ150= 143268
+cMOzc2l0bw== 143269
+44Kt44Os44Kk 143270
+44Gp44GT44KN 143271
+4LmA4LiX4LmI4Liy4LmE 143272
+4LmA4LiX4LmI4Liy4LmE4Lir4Lij 143273
+4LmA4LiX4LmI4Liy4LmE4Lir4Lij4LmI 143274
+INC40L3RgtC10YDRjNC10YA= 143275
+INit2KfYrA== 143276
+INit2KfYrNip 143277
+4Liq4Li14LiC4Liy4Lin 143278
+7Ja8 143279
+IG7hu5k= 143280
+IG7hu5lw 143281
+IMOtbmQ= 143282
+IMOtbmRpY2U= 143283
+4Liq4Liz4Lij4Lin4LiI 143284
+INC60LDQttC00L7QuQ== 143285
+IGhvdMOpaXM= 143286
+IG5hc3TEmQ== 143287
+IG5hc3TEmXBu 143288
+INeU16fXldeT 143289
+INeU16fXldeT150= 143290
+16TXldek 143291
+16TXldek15XXnA== 143292
+16TXldek15XXnNeo15k= 143293
+0LLRiNC10Lk= 143294
+44K344Oz44OX 143295
+44K344Oz44OX44Or 143296
+IHpkasSZxIc= 143297
+INCz0YDRg9C/0L/QsA== 143298
+INC/0L7QvNC10Yk= 143299
+INC/0L7QvNC10YnQtdC90LjRjw== 143300
+44Gp44GG44GE44GG 143301
+INC40YHQv9GL0YLQsA== 143302
+IG9nxYI= 143303
+IG9nxYJvcw== 143304
+IG9nxYJvc3plbg== 143305
+IG9nxYJvc3plbmk= 143306
+4Liq4Lij4LmJ4Liy4LiH4Liq4Lij4Lij 143307
+4Liq4Lij4LmJ4Liy4LiH4Liq4Lij4Lij4LiE4LmM 143308
+4Lie4Lij4Lij4LiT 143309
+IMOnxLFrxLHFnw== 143310
+INGH0LDRgdGC0L3QvtGB0YLQuA== 143311
+INeV15nXldeq16g= 143312
+57aa44GN44KS 143313
+57aa44GN44KS6Kqt 143314
+57aa44GN44KS6Kqt44KA 143315
+4LiB4Lij4Lix 143316
+4LiB4Lij4Lix4Lih 143317
+0LPRgNCw0YQ= 143318
+INCy0LvQsNC0 143319
+INCy0LvQsNC00LXQu9GM 143320
+INCy0LvQsNC00LXQu9GM0YY= 143321
+IGlzdGVkacSf 143322
+IGlzdGVkacSfaW5peg== 143323
+15HXnNei 143324
+15HXnNei15PXmQ== 143325
+2YXZiNin2YE= 143326
+2YXZiNin2YHZgtip 143327
+INeZ15XXqA== 143328
+INeZ15XXqNen 143329
+44Kr44O844OJ44Ot44O844Oz 143330
+INin2YTZhdi02YPZhA== 143331
+INin2YTZhdi02YPZhNip 143332
+IOq1re2ajA== 143333
+16HXpNeY 143334
+16HXpNeY154= 143335
+16HXpNeY157Xkdeo 143336
+IOyWtOugtQ== 143337
+2YPYp9mF 143338
+2YPYp9mF2YrYsdin 143339
+c2NobMO8 143340
+c2NobMO8c3Nl 143341
+INir2YY= 143342
+INir2YbYp9im2Yo= 143343
+7Im9 143344
+INCe0YHQvtCx 143345
+INCe0YHQvtCx0LXQvdC90L4= 143346
+INC40L3QstC10YHRgtC4 143347
+INC40L3QstC10YHRgtC40YbQuA== 143348
+2KfYrdiq2YU= 143349
+2KfYrdiq2YXYp9mE 143350
+RcSe 143351
+RcSexLA= 143352
+7ZWY6rKg64uk 143353
+INeQ15HXqNeU 143354
+INeQ15HXqNeU150= 143355
+INeR15fXmdeg150= 143356
+2KPZiNi2 143357
+2KPZiNi22KfYuQ== 143358
+IGTDqWw= 143359
+IGTDqWxhaQ== 143360
+INeQ15XXlNeR15nXnQ== 143361
+INGB0L7RhQ== 143362
+INGB0L7RhdGA 143363
+INGB0L7RhdGA0LDQvdC4 143364
+INC00L7RgdGC0LjQtg== 143365
+INC00L7RgdGC0LjQttC10L3QuA== 143366
+4Liq4Li04LmI4LiH4LmB 143367
+4Liq4Li04LmI4LiH4LmB4Lin4LiU 143368
+4Liq4Li04LmI4LiH4LmB4Lin4LiU4Lil 143369
+4Liq4Li04LmI4LiH4LmB4Lin4LiU4Lil4LmJ4Lit4Lih 143370
+INin2YTZhdio2KfYtNix 143371
+INGE0LjQsw== 143372
+INGE0LjQs9GD0YA= 143373
+0LzQvtC20LXQvA== 143374
+15zXnteZ15PXlA== 143375
+IGNpbsOp 143376
+IGNpbsOpbWE= 143377
+IGJhZGE= 143378
+IGJhZGHFhA== 143379
+2KzYqNmH2Kk= 143380
+INC00LXQvw== 143381
+INC00LXQv9GD0YI= 143382
+INC00LXQv9GD0YLQsNGC 143383
+IGRpc3TDom5jaWE= 143384
+INin2YTZhdi52KfYsQ== 143385
+INin2YTZhdi52KfYsdi22Kk= 143386
+dGjDqHNl 143387
+w7xuYw== 143388
+w7xuY8O8 143389
+INC00LDQvdC90L7Qs9C+ 143390
+IEJlbGdp 143391
+IEJlbGdpw6s= 143392
+INeR15HXpw== 143393
+INeR15HXp9ep15Q= 143394
+4Lii4LmI4Liy4LiZ 143395
+IHNvbHXDp8Ojbw== 143396
+INeU16bXmNeo 143397
+INeU16bXmNeo16TXlQ== 143398
+INij2YbYrQ== 143399
+INij2YbYrdin2KE= 143400
+INiv2YXYtA== 143401
+INiv2YXYtNmC 143402
+4Lih4Lix4LmJ 143403
+4Lih4Lix4LmJ4Lii 143404
+2YXYutix2Kg= 143405
+2KfYs9iq2LnZhdin2YQ= 143406
+IFPFgm93 143407
+IOuPmeyLnA== 143408
+IOuPmeyLnOyXkA== 143409
+INGB0L7RgQ== 143410
+INGB0L7RgdC10LQ= 143411
+7LKt7IaM 143412
+7LKt7IaM64WE 143413
+INCz0YDQsNGE 143414
+INCz0YDQsNGE0LjQug== 143415
+IOyekeydgA== 143416
+IHlldGk= 143417
+IHlldGnFn3Rpcg== 143418
+IOydtOqyg+ydtA== 143419
+4Lir4LmI4Liy4LiH 143420
+2KXZhdmD2KfZhg== 143421
+2KXZhdmD2KfZhtmK2Kk= 143422
+2KfYs9iq2LnYsdin2LY= 143423
+2YXYrtiv2LE= 143424
+INGH0YPRgtGM 143425
+2YXYr9mK2LE= 143426
+2YXYr9mK2LHZitip 143427
+IOC5gOC4oeC4qQ== 143428
+IOC5gOC4oeC4qeC4suC4ouC4mQ== 143429
+INC80LXRhQ== 143430
+INC80LXRhdCw0L3QuNC3 143431
+INC80LXRhdCw0L3QuNC30Lw= 143432
+INGB0YPQvA== 143433
+INGB0YPQvNC80YM= 143434
+IHbDtg== 143435
+IHbDtmxs 143436
+IHbDtmxsaWc= 143437
+INC00YDRg9C3 143438
+INC00YDRg9C30YzRjw== 143439
+44KS5Yip55So44GX44Gm 143440
+4Lia4Lij4Lij4LiI4Li4 143441
+cG/FvHljeg== 143442
+157Xqdeb 143443
+157Xqdeb16DXqg== 143444
+157Xqdeb16DXqteQ 143445
+IGV1cm9ww6llbg== 143446
+IHByb3ByacOp 143447
+IHByb3ByacOpdGFpcmU= 143448
+IGto4bqldQ== 143449
+44GE44Gf44Gg44GR44KL 143450
+IHRlY3LDvA== 143451
+IHRlY3LDvGJl 143452
+15TXkQ== 143453
+15TXkdeg15Q= 143454
+IGN1zA== 143455
+IGN1zIk= 143456
+IGN1zIlh 143457
+15DXldeV 143458
+15DXldeV15nXqNeU 143459
+INeb15XXnNeV 143460
+VWx1cw== 143461
+VWx1c2xhcmFyYXPEsQ== 143462
+INeg15XXqg== 143463
+INeg15XXqtef 143464
+44Gr5ZCR 143465
+44Gr5ZCR44GR44Gm 143466
+67mb 143467
+4LiX4Lix4LiB4Lip 143468
+4LiX4Lix4LiB4Lip4Liw 143469
+2LPZgtmI 143470
+2LPZgtmI2Lc= 143471
+INCy0L0= 143472
+INCy0L3QtdGI 143473
+INCy0L3QtdGI0L3QtQ== 143474
+IHVyeg== 143475
+IHVyesSZZA== 143476
+IMOhbWI= 143477
+IMOhbWJpdG8= 143478
+4Lit4LiY4Li0 143479
+4Lit4LiY4Li04Lia4Liy4Lii 143480
+IMWCYWQ= 143481
+IMWCYWRu 143482
+6rG07LaV 143483
+d8OzZHp0 143484
+d8OzZHp0dw== 143485
+IHF1ZXN0w7Vlcw== 143486
+INep16c= 143487
+INep16fXmdeR15w= 143488
+IG1pZWpzY293b8WbY2k= 143489
+INCy0LDQuw== 143490
+INCy0LDQu9GO0YI= 143491
+aMOkdXNlcg== 143492
+4Lir4LiZ4Lit4LiH 143493
+44Go5YWx 143494
+44Go5YWx44Gr 143495
+44OP44O844OJ 143496
+IOqwnOy1nA== 143497
+INC+0YHQvdC+0LLQvdC+0Lw= 143498
+INC80Y/RgQ== 143499
+2KfYudiq 143500
+2KfYudiq2YLYp9mE 143501
+4Liq4LiW4Li0 143502
+4Liq4LiW4Li04LiV4Li0 143503
+Tmd1 143504
+Tmd14buTbg== 143505
+INmF2KzZhA== 143506
+INmF2KzZhNip 143507
+4LmB4LiC4LiZ 143508
+INin2YTZhNmK2KjZig== 143509
+16TXoteZ15zXldeZ15XXqg== 143510
+INeU16jXpNeV15DXmQ== 143511
+16TXqNeV16Q= 143512
+16TXqNeV16TXmdec 143513
+16fXnNeQ 143514
+16fXnNeQ16HXmQ== 143515
+2YPYqti02YE= 143516
+44Gr44Gq44Gj44Gm44GX44G+44GG 143517
+4LmA4LiE4Lil4LmH4LiU 143518
+4LmA4LiE4Lil4LmH4LiU4Lil4Lix4Lia 143519
+IOy7tA== 143520
+IOy7tO2TqA== 143521
+IOy7tO2TqO2EsA== 143522
+INeX15nXldeR15k= 143523
+IG7DpG0= 143524
+IG7DpG1saWNo 143525
+5ZG844Gw 143526
+5ZG844Gw44KM 143527
+INGA0L7Quw== 143528
+INGA0L7Qu9C4 143529
+IHNww6ljaWFsaXPDqQ== 143530
+4LiZ4Lin4Lix4LiV 143531
+4LiZ4Lin4Lix4LiV4LiB4Lij4Lij4Lih 143532
+2YbYtdmI2LU= 143533
+0L/QtdGA0LXQtA== 143534
+0L/QtdGA0LXQtNCw0Yc= 143535
+dGjDqHF1ZQ== 143536
+INeo15DXmdeq15k= 143537
+44OA44Km44Oz 143538
+44KP44GL 143539
+44KP44GL44Gj44Gm 143540
+0LHQtdGA0LXQtg== 143541
+INGB0LXQug== 143542
+INGB0LXQutGA 143543
+INGB0LXQutGA0LXRgg== 143544
+INC/0L7RgdGC0L7Rj9C90L0= 143545
+4LiC4LiZ4Liq4LmI4LiH 143546
+IG3DvGs= 143547
+IG3DvGtlbQ== 143548
+IG3DvGtlbW1lbA== 143549
+0LXRgtC10YHRjA== 143550
+INin2YTYs9mG2YjYp9iq 143551
+IOyghO2YgA== 143552
+INeU157Xp9eV16jXmQ== 143553
+IG3DvGQ= 143554
+IG3DvGRhaA== 143555
+IG3DvGRhaGFsZQ== 143556
+IHd5Yg== 143557
+IHd5YsOzcg== 143558
+IHRlbmTDqm5jaWE= 143559
+2KXYr9in2LE= 143560
+2KXYr9in2LHZitip 143561
+IHVudGVyc3TDvHR6ZW4= 143562
+16rXkdeo 143563
+16rXkdeo16g= 143564
+IGRpw6E= 143565
+IGRpw6Fsb2dv 143566
+IMOWbmNl 143567
+IMOWbmNla2k= 143568
+44K544Od44OD44OI 143569
+64Sj 143570
+IEdlbGk= 143571
+IEdlbGnFnw== 143572
+44KS6YCa 143573
+44KS6YCa44GX44Gm 143574
+IEZ1w59iYWxs 143575
+IHNhbGFyaQ== 143576
+IHNhbGFyacOp 143577
+INC/0YDQvtC00YPQutGC0L7Qsg== 143578
+2LXZgdmC2Kk= 143579
+4Lij4Lin4Lia 143580
+4Lij4Lin4Lia4Lij4Lin4Lih 143581
+4LmD4LiZ4LiQ4Liy4LiZ 143582
+4LmD4LiZ4LiQ4Liy4LiZ4Liw 143583
+IGtheW5h 143584
+IGtheW5hxJ/EsQ== 143585
+IOyeke2SiA== 143586
+INCy0YvRgNCw0LY= 143587
+INCy0YvRgNCw0LbQtdC9 143588
+INGB0YLQtdC/ 143589
+INGB0YLQtdC/0LXQvdC4 143590
+INin2YTZhdmI2KzZiNiv 143591
+INin2YTZhdmI2KzZiNiv2Kk= 143592
+4Lil4LmJ4Lih 143593
+IG5hamN6xJk= 143594
+IG5hamN6xJnFm2NpZQ== 143595
+IG5hamN6xJnFm2NpZWo= 143596
+IHp3eQ== 143597
+IHp3eWs= 143598
+IHp3eWvFgg== 143599
+IOq3uOugh+yngA== 143600
+4LiB4Lij4Liw4LiI 143601
+4LiB4Lij4Liw4LiI4Liy4Lii 143602
+IOuLtQ== 143603
+IOuLteuzgA== 143604
+INGA0LXQsNC6 143605
+INGA0LXQsNC60YbQuA== 143606
+IMWbd2llxbw= 143607
+INGB0YLQvtC40LzQvtGB0YLQuA== 143608
+2YXZhtin2YI= 143609
+2YXZhtin2YLYtA== 143610
+2YXZhtin2YLYtNip 143611
+INGF0L7Rh9GD 143612
+44Oc44O844OJ 143613
+IHLDs8W8bmlj 143614
+INC60YDRiw== 143615
+INC60YDRi9GI 143616
+4pyT 143617
+44Kz44Oz44OG44Oz 143618
+44Kz44Oz44OG44Oz44OE 143619
+INC/0YDQtdC00L/QvtGH 143620
+157XqNeR15nXqg== 143621
+INi02YM= 143622
+INi02YPYsdin 143623
+INC00LDQuw== 143624
+INC00LDQu9C10Lo= 143625
+INC00LDQu9C10LrQvg== 143626
+2KjYsdmK2Lc= 143627
+2KjYsdmK2LfYp9mG2YrYpw== 143628
+2LnZhtin 143629
+2LnZhtin2YrYqQ== 143630
+INGA0LDRgdGB0LrQsNC3 143631
+INGA0LDRgdGB0LrQsNC30YvQstCw 143632
+2KPZhNmI 143633
+2KPZhNmI2KfZhg== 143634
+5oyB44Gj44Gm 143635
+5oyB44Gj44Gm44GE 143636
+2YXYqNin2K/Ypg== 143637
+15TXoteR16g= 143638
+15TXoteR16jXqg== 143639
+IHlhecSx 143640
+IHlhecSxbWw= 143641
+IHlhecSxbWxh 143642
+bcOhdA== 143643
+bcOhdGljb3M= 143644
+4LiB4Lix4LiH 143645
+4LiB4Lix4LiH4Lin4Lil 143646
+INec16TXqg== 143647
+INec16TXqteV15c= 143648
+4Lie4Lik4LiV4Li0 143649
+4Lie4Lik4LiV4Li04LiB4Lij4Lij4Lih 143650
+7YKs 143651
+INC+0LrRgNGD0LM= 143652
+INee16bXldeV15Q= 143653
+0JvQtdC90Lg= 143654
+0JvQtdC90LjQvQ== 143655
+IFRyaeG7gXU= 143656
+44Kz44Of44Ol 143657
+44Kz44Of44Ol44OL 143658
+44Kz44Of44Ol44OL44Kx 143659
+44Kz44Of44Ol44OL44Kx44O844K344On44Oz 143660
+2YPZhtmK 143661
+2YPZhtmK2LPYqQ== 143662
+44KS5Lit5b+D 143663
+44KS5Lit5b+D44Gr 143664
+IG1pxJlkeg== 143665
+IG1pxJlkenlu 143666
+IG1pxJlkenluYXI= 143667
+IG1pxJlkenluYXJvZA== 143668
+IG1pxJlkenluYXJvZG93 143669
+2YTZhg== 143670
+2YTZhtiv2Kc= 143671
+2KjYsdi0 143672
+2KjYsdi02YTZiNmG 143673
+2KjYsdi02YTZiNmG2Kk= 143674
+4LiB4Lij4Liw4LiV4Li4 143675
+4LiB4Lij4Liw4LiV4Li44LmJ4LiZ 143676
+IGfEsQ== 143677
+IGfEsWRh 143678
+4Lib4Lij4Liw4LiX4Lix4Lia 143679
+4Lib4Lij4Liw4LiX4Lix4Lia4LmD4LiI 143680
+IOu2iOq1rA== 143681
+IOu2iOq1rO2VmOqzoA== 143682
+INmG2Lc= 143683
+INmG2LfYp9mC 143684
+INCc0L7QttC10YI= 143685
+UHLDpHM= 143686
+UHLDpHNpZGVudA== 143687
+INGB0LrQvtGA 143688
+INGB0LrQvtGA0L7RgdGC0Yw= 143689
+INeU15HXlden16g= 143690
+0LXRhdCw0YLRjA== 143691
+IGfhuqFv 143692
+INep15DXmdeg150= 143693
+INeR16DXldeS 143694
+INeR16DXldeS16I= 143695
+INC+0L/QuNGB0LDQvdC40LU= 143696
+IHVjem5p 143697
+IHVjem5pw7N3 143698
+4LmA4Lit4LmH4LiZ 143699
+INiq2LQ= 143700
+INiq2LTYsdmK2YY= 143701
+IG5ow6Nu 143702
+67mo 143703
+IGNhcmFjdMOocmU= 143704
+16LXnNeZ 143705
+16LXnNeZ15nXlA== 143706
+5qW944GX44KB44KL 143707
+INGB0LDRhQ== 143708
+INGB0LDRhdCw0YA= 143709
+0LTRg9C80LDRgtGM 143710
+INCS0L7Qt9C80L7QttC90L4= 143711
+2LXZitin2YY= 143712
+2LXZitin2YbYqQ== 143713
+w7Ztw7xy 143714
+4Liq4Lil 143715
+4Liq4Lil4LmH 143716
+4Liq4Lil4LmH4Lit 143717
+4Liq4Lil4LmH4Lit4LiV 143718
+66Gv 143719
+IHRow7Np 143720
+Z3LDtsOfZQ== 143721
+IGtzacSZ 143722
+IGtzacSZZw== 143723
+INGA0L7QvA== 143724
+INGA0L7QvNCw0L0= 143725
+2YLYp9iz2YU= 143726
+157XkdeV15I= 143727
+157XkdeV15LXqNeZ150= 143728
+YmVzY2g= 143729
+YmVzY2jDpGZ0 143730
+YmVzY2jDpGZ0aWc= 143731
+15TXptei15Q= 143732
+IMOBcmVh 143733
+INC30LDRj9Cy0Lo= 143734
+xLk= 143735
+INC70Y7QsdC+0LPQvg== 143736
+IOC4oQ== 143737
+IOC4oeC4geC4ow== 143738
+IOC4oeC4geC4o+C4suC4hOC4oQ== 143739
+0YTQuNC3 143740
+0YTQuNC30LjRh9C10YHQug== 143741
+0LjQvdGE 143742
+0LjQvdGE0LXQug== 143743
+0LjQvdGE0LXQutGG0Lg= 143744
+2KfZhNi3 143745
+2KfZhNi32KfYptmB 143746
+INC60L7Qu9C7 143747
+INC60L7Qu9C70LXQutGC0LjQsg== 143748
+0LXQt9C20LA= 143749
+INiz2KjYrQ== 143750
+INiz2KjYrdin2YY= 143751
+INiz2KjYrdin2YbZhw== 143752
+c2NobMOk 143753
+c2NobMOkZ2U= 143754
+INC00Lg= 143755
+INC00LjQsNCz 143756
+INC00LjQsNCz0L3QvtGB0YI= 143757
+INC+0YLQvNC10YLQuNGC0Yw= 143758
+0KLQrA== 143759
+INin2YTYr9ix 143760
+INin2YTYr9ix2KfYs9mK 143761
+16LXptee 143762
+16LXptee15DXldeq 143763
+IGTDqW1hcmNo 143764
+IGTDqW1hcmNoZQ== 143765
+INeY15XXog== 143766
+INeY15XXotef 143767
+IGZ1bmNpb27DoXJpb3M= 143768
+4bu1 143769
+15zXm9eQ 143770
+15zXm9eQ15XXqNeU 143771
+4LiL4LmI 143772
+4LiL4LmI4Lit4Lih 143773
+INGH0YPQsg== 143774
+INGH0YPQstGB0YLQstC+ 143775
+4pa8 143776
+0L/Rg9GJ 143777
+0L/Rg9GJ0LXQvQ== 143778
+INC80LXRgA== 143779
+INC80LXRgNC+0L8= 143780
+INC80LXRgNC+0L/RgNC4 143781
+INC80LXRgNC+0L/RgNC40Y/RgtC40Y8= 143782
+IHXDp3U= 143783
+IHXDp3XFnw== 143784
+44KS5Yip55So44GZ44KL 143785
+YcSf 143786
+YcSfbMSx 143787
+7JiI7Iig 143788
+4LmB4Lii4LmI 143789
+INin2YTZg9mF 143790
+INin2YTZg9mF2KjZig== 143791
+INin2YTZg9mF2KjZitmI2KrYsQ== 143792
+2KrZiNmK 143793
+2KrZiNmK2KrYsQ== 143794
+4LmA4LiK4Li14LmI4Lii4Lin 143795
+4LmA4LiK4Li14LmI4Lii4Lin4LiK4Liy 143796
+4LmA4LiK4Li14LmI4Lii4Lin4LiK4Liy4LiN 143797
+4buU 143798
+IGhp4bq/bQ== 143799
+2LDYp9mD2LHYqQ== 143800
+INeU157XmdeV15fXkw== 143801
+IOyInA== 143802
+IOyInOqwhA== 143803
+IEvEsQ== 143804
+IEvEsXNh 143805
+IGdlbGVjZcSfaQ== 143806
+0L/RgNC+0YTQtdGB0YHQuNC+0L3QsA== 143807
+0L/RgNC+0YTQtdGB0YHQuNC+0L3QsNC7 143808
+IG9nw7M= 143809
+IG9nw7NsZQ== 143810
+IGfFgsOzdw== 143811
+IGfFgsOzd25l 143812
+INGB0YLQuNC70Yw= 143813
+15DXpNec 143814
+15DXpNec15nXpw== 143815
+15DXpNec15nXp9em15nXlA== 143816
+4Liq4Lih4Liy4Lij4LmM 143817
+4Liq4Lih4Liy4Lij4LmM4LiX 143818
+4Liq4Lih4Liy4Lij4LmM4LiX4LmC4Lif 143819
+4Liq4Lih4Liy4Lij4LmM4LiX4LmC4Lif4LiZ 143820
+IHRow6FuaA== 143821
+0J/QvtC0 143822
+0J/QvtC00YDQvtCx 143823
+0J/QvtC00YDQvtCx0L3QtdC1 143824
+INin2YTYqtmI2YY= 143825
+INin2YTYqtmI2YbYs9mK 143826
+IGJhaMOnZQ== 143827
+4LmB4LiB4LmJ4Lib4Lix4LiN4Lir4Liy 143828
+w6lkdWNhdGlvbg== 143829
+ZXVyb3A= 143830
+ZXVyb3DDpA== 143831
+ZXVyb3DDpGlzY2hl 143832
+IEtzaQ== 143833
+IEtzacSZ 143834
+IOuEmA== 143835
+IOuEmOyWtA== 143836
+IHbDvGM= 143837
+IHbDvGN1ZA== 143838
+IHlheWc= 143839
+IHlheWfEsW4= 143840
+IG5pZWt0 143841
+IG5pZWt0w7NyeQ== 143842
+IG5pZWt0w7NyeWNo 143843
+44Gt44GH 143844
+INC60LDQtg== 143845
+INC60LDQttC10YLRgdGP 143846
+0LrQsNC2 143847
+0LrQsNC20LXRgg== 143848
+INin2YTYr9mK2YXZgtix2Kc= 143849
+INin2YTYr9mK2YXZgtix2KfYtw== 143850
+INin2YTYr9mK2YXZgtix2KfYt9mK2Kk= 143851
+5q2p 143852
+5q2p44GE44Gm 143853
+IHZheg== 143854
+IHZhemdl 143855
+IHZhemdlw6c= 143856
+INC80LjQvdC40LzQsNC70Yw= 143857
+INC80LjQvdC40LzQsNC70YzQvQ== 143858
+44OR44K/ 143859
+44OR44K/44O844Oz 143860
+IOuK 143861
+IOuKkA== 143862
+IOuKkOuCjA== 143863
+44Gh44KH44GG 143864
+44Gh44KH44GG44Gp 143865
+IOC4geC4ow== 143866
+IOC4geC4o+C4geC4jg== 143867
+IOC4geC4o+C4geC4juC4suC4hOC4oQ== 143868
+2KrYrNiv2YrYrw== 143869
+INi02KfZhdmE 143870
+4Lir4Lil4Lix4LiB4LiQ4Liy4LiZ 143871
+INC80LDRgNGI 143872
+INC80LDRgNGI0YDRg9GC 143873
+IHbDrXQ= 143874
+IHbDrXRpbWE= 143875
+IHF1aXrDoQ== 143876
+YXlnxLE= 143877
+15PXkdeo15nXlQ== 143878
+INC40LfQtA== 143879
+INC40LfQtNC10LvQuA== 143880
+INC40LfQtNC10LvQuNGP 143881
+0L/Qu9Cw 143882
+0L/Qu9Cw0Yc= 143883
+0L/Qu9Cw0YfQuNCy0LA= 143884
+5Lu744Gb 143885
+IMOpcXVpcMOp 143886
+5LmF44GX44E= 143887
+5LmF44GX44G2 143888
+5LmF44GX44G244KK 143889
+INC60LDRgg== 143890
+INC60LDRgtCw0Ls= 143891
+INC60LDRgtCw0LvQvtCz 143892
+4Liq4LmJ4Lih 143893
+INGA0LXQuQ== 143894
+INGA0LXQudGC 143895
+INGA0LXQudGC0LjQvdCz 143896
+IHRodXnhu4Fu 143897
+INin2YTZhdmC2K/Ysw== 143898
+ZXNww6hyZQ== 143899
+44Gr5YWl44Gj44Gf 143900
+4Lir4Lih4Liy4Lii4LmA4Lil4LiC 143901
+16rXl9eV16nXqg== 143902
+4LiZ4LmI4Liw 143903
+IHBlxYI= 143904
+IHBlxYJuZQ== 143905
+IHDDqXJk 143906
+IHDDqXJkaWRh 143907
+4Lir4Lih4Lin4LiU 143908
+4Lir4Lih4Lin4LiU4Lir4Lih4Li54LmI 143909
+0LjRh9C10YHQutGD0Y4= 143910
+57WC44KP 143911
+57WC44KP44Gj44Gf 143912
+INeS15XXktec 143913
+4LiX4Liz4LiE4Lin4Liy4Lih 143914
+4LiX4Liz4LiE4Lin4Liy4Lih4Liq4Liw4Lit4Liy4LiU 143915
+SG90w6lpcw== 143916
+INC30LDRgA== 143917
+INC30LDRgNC10LPQuNGB0YI= 143918
+INC30LDRgNC10LPQuNGB0YLRgNC4 143919
+INC30LDRgNC10LPQuNGB0YLRgNC40YDQvtCy0LA= 143920
+INGB0L7QsdGL0YLQuA== 143921
+INGB0L7QsdGL0YLQuNGP 143922
+INeW15vXkA== 143923
+2YXZhti42YjZhdip 143924
+INeU157Xpg== 143925
+INeU157XpteZ15DXldeq 143926
+2YXZg9mI2YY= 143927
+2YXZg9mI2YbYp9iq 143928
+5LiK44GM44KL 143929
+IG3EmQ== 143930
+IG3EmXNr 143931
+4Lir4Lij4Li34Lit4LmA4Lib4Lil4LmI4Liy 143932
+64Ku 143933
+IG5va3Rhcw== 143934
+IG5va3Rhc8Sx 143935
+INCx0L7Qu9GM0YjQuNC8 143936
+INC70YPRh9GI0LjRhQ== 143937
+2LTZh9mK2K8= 143938
+4Lit4Liz4LiZ 143939
+4Lit4Liz4LiZ4Lin4Lii 143940
+4Lit4Liz4LiZ4Lin4Lii4LiE4Lin4Liy4Lih 143941
+4Lit4Liz4LiZ4Lin4Lii4LiE4Lin4Liy4Lih4Liq4Liw4LiU4Lin4LiB 143942
+INC10LI= 143943
+INC10LLRgA== 143944
+INC10LLRgNC+0L8= 143945
+INC10LLRgNC+0L/QtdC5 143946
+4LiJ4Liy4Lii 143947
+7ISt 143948
+2YXZgdin 143949
+2YXZgdin2YjYtg== 143950
+2YXZgdin2YjYttin2Ko= 143951
+67mM 143952
+6LWk44Gh44KD44KT 143953
+INGD0LTQsNC70L7RgdGM 143954
+INCl0L7Rgg== 143955
+INCl0L7RgtGP 143956
+cHJ6ZWRzacSZYmlvcmM= 143957
+IEjDtG0= 143958
+7ZWY7JiA7Iq164uI64uk 143959
+INC90LDQsw== 143960
+INC90LDQs9GA0YPQtw== 143961
+INC90LDQs9GA0YPQt9C6 143962
+INeR15nXoNec15DXldee15k= 143963
+IOqwgOuKpe2VnA== 143964
+IEjhu691 143965
+4Lit4Li44LiU 143966
+4Lit4Li44LiU4Lih 143967
+16rXldek 143968
+16rXldek16LXlA== 143969
+IG1pxYJv 143970
+IG1pxYJvxZtjaQ== 143971
+a3NpxIXFvA== 143972
+a3NpxIXFvGth 143973
+INin2YTZhNi52KjYqQ== 143974
+4LiJ4Liy4LiB 143975
+4Liq4Liw4Liq4Lih 143976
+157Xqteo 143977
+157Xqteo15fXqQ== 143978
+IGzDqWfDqHJl 143979
+INec16bXpA== 143980
+INec16bXpNeZ15Q= 143981
+INC40YHRgtC+0YDQuNGP 143982
+IOODiOODqQ== 143983
+IOODiOODqeODg+OCrw== 143984
+IOODiOODqeODg+OCr+ODkOODg+OCrw== 143985
+INC60LA= 143986
+INC60LDRhNC1 143987
+157Xodee15o= 143988
+IGPDvG0= 143989
+IGPDvG1sZQ== 143990
+4LmA4LiE4Lil4Li34LmI4Lit4LiZ4LmE4Lir4Lin 143991
+44GK44Gd 143992
+44GK44Gd44KJ44GP 143993
+7J6Q64+Z 143994
+7J6Q64+Z7LCo 143995
+4Lit4Lix4LiV 143996
+4Lit4Lix4LiV4LmC4LiZ 143997
+4Lit4Lix4LiV4LmC4LiZ4Lih4Lix 143998
+4Lit4Lix4LiV4LmC4LiZ4Lih4Lix4LiV4Li0 143999
+IMWfaWs= 144000
+IMWfaWtheQ== 144001
+IMWfaWtheWV0 144002
+ZXh0csOqbWU= 144003
+a3LDpA== 144004
+a3LDpGZ0ZQ== 144005
+64KZ 144006
+7ZWR 144007
+7LKZ 144008
+7ZiI 144009
+7LCN 144010
+4pmh 144011
+7J6U 144012
+66Kw 144013
+7Z2U 144014
+7Z2Q 144015
+4oeS 144016
+66eb 144017
+7IqI 144018
+4buS 144019
+7Ji1 144020
+4peO 144021
+7YKo 144022
+6r+I 144023
+7Iio 144024
+7Juo 144025
+66el 144026
+772A 144027
+77yq 144028
+4bqo 144029
+44WO 144030
+0Zc= 144031
+7ISs 144032
+7Lm8 144033
+77y2 144034
+7Jug 144035
+65+0 144036
+xYM= 144037
+64K8 144038
+64uQ 144039
+4oC5 144040
+66at 144041
+7KeQ 144042
+4oCk 144043
+w4U= 144044
+65yo 144045
+7YS4 144046
+7ZyY 144047
+6rKB 144048
+67SF 144049
+w5g= 144050
+662U 144051
+65iR 144052
+4peH 144053
+7JeY 144054
+77u0 144055
+66e5 144056
+776d 144057
+7Iq3 144058
+7YOV 144059
+77yg 144060
+7Lu0 144061
+66CM 144062
+7L2c 144063
+77u5 144064
+44Wg 144065
+7KG4 144066
+64W5 144067
+4oK6 144068
+4pa2 144069
+7YOQ 144070
+6rW0 144071
+7ZG4 144072
+0ZQ= 144073
+7ZS9 144074
+0IU= 144075
+67Ck 144076
+1IE= 144077
+7LKo 144078
+7LaY 144079
+67KX 144080
+66m4 144081
+77y7 144082
+77y9 144083
+77y3 144084
+7LCM 144085
+w5I= 144086
+7Y+0 144087
+7JO4 144088
+7LSM 144089
+64GU 144090
+65Sp 144091
+64eM 144092
+66mA 144093
+67Ko 144094
+77y1 144095
+66eh 144096
+64ur 144097
+4Li/ 144098
+44Gx 144099
+7Ie8 144100
+7Lqg 144101
+666k 144102
+6rGx 144103
+7Lus 144104
+4oSD 144105
+65Sx 144106
+64OI 144107
+7Iux 144108
+7ZmI 144109
+656Q 144110
+7IWA 144111
+7KCg 144112
+0IY= 144113
+66CJ 144114
+772F 144115
+772P 144116
+7ZmA 144117
+65uw 144118
+4buu 144119
+7YK5 144120
+6r2D 144121
+77uk 144122
+77qU 144123
+6rq8 144124
+7JWJ 144125
+4pmm 144126
+772B 144127
+7JO0 144128
+44CJ 144129
+7LCu 144130
+7KSY 144131
+4buq 144132
+64GE 144133
+65Co 144134
+7JWM 144135
+7Z2Y 144136
+7YWQ 144137
+44CI 144138
+6rKq 144139
+64ul 144140
+6rK8 144141
+4buM 144142
+66eo 144143
+64GK 144144
+67Kk 144145
+65GU 144146
+7Z2h 144147
+4bus 144148
+66yY 144149
+44GJ 144150
+656r 144151
+7ZSI 144152
+7YWN 144153
+7J6D 144154
+772J 144155
+7IGc 144156
+4pa9 144157
+66y7 144158
+4paz 144159
+77y4 144160
+7IGY 144161
+7Law 144162
+7Iq0 144163
+7JWx 144164
+7IeE 144165
+4bqu 144166
+77S/ 144167
+77S+ 144168
+4oK9 144169
+64ST 144170
+66Op 144171
+7LOk 144172
+6rSc 144173
+w5k= 144174
+4buc 144175
+77+j 144176
+65Ot 144177
+66mY 144178
+6ru0 144179
+66C0 144180
+0IM= 144181
+66y1 144182
+7Ked 144183
+44G6 144184
+8J+Ygg== 144185
+656s 144186
+7KCK 144187
+6rSE 144188
+7J6K 144189
+7Z6M 144190
+7ISv 144191
+4oiA 144192
+4pah 144193
+64CM 144194
+656Z 144195
+772D 144196
+4bq2 144197
+776E 144198
+77qY 144199
+67m8 144200
+w4w= 144201
+4pa3 144202
+6riN 144203
+66mL 144204
+44GD 144205
+7JiG 144206
+7Jiu 144207
+66qs 144208
+66Gk 144209
+66Cs 144210
+64qm 144211
+4paq 144212
+7LyT 144213
+7JyI 144214
+7Ken 144215
+7729 144216
+64OJ 144217
+776M 144218
+65iQ 144219
+77yD 144220
+4buE 144221
+7LSs 144222
+7Lak 144223
+77y5 144224
+77ut 144225
+4oKr 144226
+772H 144227
+7Ji3 144228
+65ao 144229
+4omr 144230
+66a/ 144231
+4pyo 144232
+2bE= 144233
+7K+k 144234
+6rmU 144235
+8J+Yig== 144236
+7Iir 144237
+6rOx 144238
+6rWz 144239
+772L 144240
+4LiM 144241
+xKA= 144242
+65S4 144243
+67CR 144244
+7IWL 144245
+7Y60 144246
+4pyF 144247
+7YOR 144248
+64iH 144249
+7Y+8 144250
+8J+YjQ== 144251
+7Jib 144252
+77uj 144253
+0Zg= 144254
+7KmM 144255
+66aF 144256
+7J2N 144257
+7724 144258
+642c 144259
+44GF 144260
+7Y68 144261
+64ud 144262
+67+M 144263
+7Lyw 144264
+7Iur 144265
+67Cl 144266
+7ZuM 144267
+7KiM 144268
+67mZ 144269
+772O 144270
+67SE 144271
+7IS5 144272
+772y 144273
+7IyT 144274
+0pE= 144275
+67CN 144276
+66CA 144277
+7Yak 144278
+772v 144279
+66SE 144280
+6r2k 144281
+772S 144282
+7JWo 144283
+7728 144284
+6rmQ 144285
+7YGQ 144286
+4oSW 144287
+66e6 144288
+77qu 144289
+64WB 144290
+6rK4 144291
+77ug 144292
+7Yqc 144293
+xbk= 144294
+66Wt 144295
+64iJ 144296
+772U 144297
+7Yys 144298
+7J6H 144299
+76yB 144300
+77uo 144301
+65Gl 144302
+656E 144303
+2aw= 144304
+7Yu0 144305
+7J6J 144306
+2r4= 144307
+7JuF 144308
+77uu 144309
+64uJ 144310
+4omq 144311
+4peE 144312
+64iM 144313
+7Zu8 144314
+7KSN 144315
+xbg= 144316
+7KSs 144317
+7L6M 144318
+772T 144319
+776K 144320
+8J+Puw== 144321
+776J 144322
+0IE= 144323
+7ZiQ 144324
+776Z 144325
+6rys 144326
+7Z6Q 144327
+4oCl 144328
+65+t 144329
+66ee 144330
+7IOk 144331
+77qS 144332
+7Yux 144333
+672R 144334
+w5U= 144335
+4oia 144336
+64KE 144337
+6rmd 144338
+64aI 144339
+4bq6 144340
+7IWI 144341
+7IyN 144342
+4oCh 144343
+77yx 144344
+7IGo 144345
+4pi6 144346
+65K3 144347
+7Jiz 144348
+8J+RjQ== 144349
+66q9 144350
+64Kt 144351
+77qt 144352
+66mI 144353
+4buI 144354
+7ZWA 144355
+64uZ 144356
+66aH 144357
+7JWk 144358
+7I28 144359
+44O1 144360
+0aM= 144361
+7JyX 144362
+4q2Q 144363
+776Y 144364
+7Zes 144365
+6r68 144366
+7JWX 144367
+77uM 144368
+6rG3 144369
+64WV 144370
+66Gx 144371
+7JWK 144372
+776A 144373
+7Ieg 144374
+7Yyp 144375
+77qq 144376
+66eZ 144377
+77y/ 144378
+6r+U 144379
+7Y6c 144380
+66O4 144381
+7ZSU 144382
+77uz 144383
+64+V 144384
+7Iu8 144385
+4buO 144386
+66eY 144387
+7KKL 144388
+7Yah 144389
+772x 144390
+7Z2R 144391
+4bu4 144392
+7KaM 144393
+7Lm4 144394
+662Y 144395
+776X 144396
+77uL 144397
+7YqA 144398
+66WZ 144399
+7L2p 144400
+64GX 144401
+6420 144402
+7IWc 144403
+wrg= 144404
+67uQ 144405
+7IO1 144406
+6rKQ 144407
+65Os 144408
+66Ow 144409
+44WL 144410
+7JeJ 144411
+4buW 144412
+64SM 144413
+7722 144414
+67SH 144415
+64Kz 144416
+44Kc 144417
+65a7 144418
+7Y6A 144419
+642p 144420
+7ZW4 144421
+w7c= 144422
+6ry8 144423
+65Sc 144424
+67C0 144425
+66mN 144426
+4pev 144427
+7JeR 144428
+7Jm8 144429
+77qR 144430
+67aV 144431
+66Gs 144432
+772M 144433
+7Yao 144434
+77q0 144435
+66CY 144436
+6rCk 144437
+7Iiy 144438
+0ZM= 144439
+7IWJ 144440
+77uT 144441
+64iU 144442
+642n 144443
+4oC8 144444
+77uy 144445
+6rCx 144446
+6r+A 144447
+64u3 144448
+4bq4 144449
+4bqq 144450
+xpI= 144451
+642k 144452
+7Iit 144453
+772C 144454
+772I 144455
+xaA= 144456
+66Os 144457
+0bU= 144458
+65ah 144459
+64OE 144460
+7ISw 144461
+65OI 144462
+776D 144463
+64eo 144464
+772Q 144465
+6rW9 144466
+7Je9 144467
+64KA 144468
+66y2 144469
+7723 144470
+7I+f 144471
+7ZiU 144472
+6ryI 144473
+64GI 144474
+7KWQ 144475
+77qX 144476
+xIw= 144477
+64ig 144478
+65a8 144479
+7YC0 144480
+4oml 144481
+64ut 144482
+7LGZ 144483
+6ruP 144484
+66mk 144485
+7IOY 144486
+642u 144487
+66Oh 144488
+7IK9 144489
+44ic 144490
+xKg= 144491
+4oCn 144492
+7726 144493
+xKM= 144494
+7KaJ 144495
+77y8 144496
+26k= 144497
+4oiZ 144498
+67CP 144499
+67mF 144500
+8J+Ymw== 144501
+7Yi0 144502
+8J+SlQ== 144503
+44CS 144504
+7J6Y 144505
+77qk 144506
+772W 144507
+66mc 144508
+67K8 144509
+652E 144510
+65qc 144511
+77uY 144512
+7IOM 144513
+772E 144514
+7KmU 144515
+772Z 144516
+77qp 144517
+254= 144518
+4piO 144519
+7KCk 144520
+65Cp 144521
+xZ0= 144522
+4p6h 144523
+77un 144524
+0I8= 144525
+7KuT 144526
+6rO9 144527
+yZE= 144528
+44Oy 144529
+64Kr 144530
+66aJ 144531
+7KKB 144532
+67Ct 144533
+8J+YgQ== 144534
+67m1 144535
+7LKp 144536
+7Lu1 144537
+8J+YmA== 144538
+67GF 144539
+4omI 144540
+67ma 144541
+77uc 144542
+8J+Zjw== 144543
+7YGw 144544
+7ISe 144545
+776a 144546
+7Ji5 144547
+67yI 144548
+64Kv 144549
+656p 144550
+7Zqh 144551
+772V 144552
+7YOT 144553
+652g 144554
+6rOB 144555
+65OA 144556
+7Jeg 144557
+77y6 144558
+66eR 144559
+64u/ 144560
+7L+o 144561
+446h 144562
+0Io= 144563
+7YSx 144564
+xag= 144565
+77qz 144566
+776P 144567
+4ouF 144568
+6ry0 144569
+4omk 144570
+7YyB 144571
+zqk= 144572
+6rak 144573
+7IiN 144574
+4py/ 144575
+7L2k 144576
+64iF 144577
+7Yax 144578
+44Wc 144579
+4ZCF 144580
+xZI= 144581
+8J+RiQ== 144582
+77um 144583
+0Ko= 144584
+66Wc 144585
+7ZWr 144586
+776L 144587
+4pmr 144588
+6rmc 144589
+67C4 144590
+65SY 144591
+7Z2J 144592
+776B 144593
+776b 144594
+66Cb 144595
+6rK5 144596
+7L+8 144597
+77us 144598
+4p6k 144599
+8J+ZgQ== 144600
+77qg 144601
+64ao 144602
+66+5 144603
+6riL 144604
+67uU 144605
+6rmD 144606
+65GR 144607
+7Yu4 144608
+7Y6Z 144609
+4p6W 144610
+44O9 144611
+7Kea 144612
+772s 144613
+77ul 144614
+7Yy9 144615
+4oCS 144616
+7IyA 144617
+7K2J 144618
+65qx 144619
+44Ke 144620
+7YuI 144621
+44KQ 144622
+64mY 144623
+zqM= 144624
+6rOw 144625
+67mX 144626
+776O 144627
+8J+YrQ== 144628
+7Z2g 144629
+7Je/ 144630
+6rCa 144631
+7KSM 144632
+66e1 144633
+772z 144634
+44Gi 144635
+77uX 144636
+4omm 144637
+2qQ= 144638
+66CB 144639
+6ry9 144640
+77ur 144641
+4omn 144642
+7LSb 144643
+7KCd 144644
+4bqw 144645
+4pmj 144646
+7LqY 144647
+4oiH 144648
+6rKJ 144649
+67Cf 144650
+77uU 144651
+7ZaH 144652
+4paS 144653
+8J+Rjw== 144654
+w54= 144655
+8J+Yhg== 144656
+77q8 144657
+4p2X 144658
+7LqU 144659
+7Lmp 144660
+65ak 144661
+64OF 144662
+4pSc 144663
+7727 144664
+zpQ= 144665
+4YOm 144666
+7J6O 144667
+4piA 144668
+4oi8 144669
+8J+UpQ== 144670
+67CM 144671
+7KCW 144672
+7Zeb 144673
+zpU= 144674
+77qD 144675
+67aJ 144676
+4oie 144677
+7YOt 144678
+w4s= 144679
+4oGE 144680
+44WH 144681
+64Sl 144682
+64uu 144683
+66C3 144684
+7Yyd 144685
+7Lqh 144686
+67eU 144687
+7KmN 144688
+7YK0 144689
+65qr 144690
+4pOS 144691
+7ZWN 144692
+4pmC 144693
+776G 144694
+4oap 144695
+7I2p 144696
+77qV 144697
+7Z2Z 144698
+0Zw= 144699
+7YK3 144700
+7Z2w 144701
+7YOx 144702
+65WQ 144703
+776S 144704
+14M= 144705
+64yE 144706
+7Ji0 144707
+7JW1 144708
+6rml 144709
+656t 144710
+7Kq8 144711
+446d 144712
+8J+YhQ== 144713
+64+L 144714
+66qr 144715
+77q4 144716
+666s 144717
+67KF 144718
+65Gg 144719
+7IWw 144720
+7Lu3 144721
+65Sq 144722
+64WU 144723
+44Wh 144724
+7JS7 144725
+7ZWP 144726
+642x 144727
+77qo 144728
+776N 144729
+7721 144730
+7KKA 144731
+7Y6M 144732
+77uw 144733
+77qj 144734
+xqM= 144735
+8J+kow== 144736
+77e6 144737
+64Ka 144738
+4ouG 144739
+67ON 144740
+8J+YhA== 144741
+7JaA 144742
+7Jmg 144743
+64aU 144744
+7Zeo 144745
+77ub 144746
+77ud 144747
+4bu2 144748
+7JaY 144749
+7I6E 144750
+2oY= 144751
+77ue 144752
+64CQ 144753
+6rKU 144754
+77u1 144755
+4pem 144756
+7Zqf 144757
+6rmB 144758
+6rCT 144759
+65S0 144760
+7I+Y 144761
+65qd 144762
+4bug 144763
+6560 144764
+64SJ 144765
+4pie 144766
+772Y 144767
+xb0= 144768
+66aO 144769
+4pas 144770
+662J 144771
+4oeb 144772
+7I2s 144773
+77qf 144774
+y5w= 144775
+67aT 144776
+7Juw 144777
+xZw= 144778
+662H 144779
+4buy 144780
+y5o= 144781
+65WA 144782
+4piR 144783
+8J+PvA== 144784
+7Ja9 144785
+4oyS 144786
+0I4= 144787
+yb4= 144788
+7Yyh 144789
+776F 144790
+7J6t 144791
+772o 144792
+7Lmr 144793
+7JyM 144794
+0ps= 144795
+6rW/ 144796
+64um 144797
+4pSU 144798
+776R 144799
+7KeW 144800
+7LqE 144801
+44CD 144802
+yrw= 144803
+6rKf 144804
+772n 144805
+xKI= 144806
+7Y6g 144807
+66e3 144808
+6rCH 144809
+7Iu5 144810
+8J+Spg== 144811
+776c 144812
+64qZ 144813
+67Kh 144814
+xb8= 144815
+8J+Yiw== 144816
+8J+Sqg== 144817
+7L+E 144818
+66mV 144819
+7K2k 144820
+64qE 144821
+8J+MuA== 144822
+44Kd 144823
+x44= 144824
+772a 144825
+xJc= 144826
+64GT 144827
+6raQ 144828
+4bWJ 144829
+44OC 144830
+6ruN 144831
+8J+Ypg== 144832
+44Cd 144833
+8J+klw== 144834
+0Z8= 144835
+7JeO 144836
+4pyM 144837
+7ImQ 144838
+w4Y= 144839
+7ZeQ 144840
+8J+OiQ== 144841
+zpE= 144842
+772t 144843
+8J+SmQ== 144844
+7Jus 144845
+7YCY 144846
+77ui 144847
+8J+Yjg== 144848
+7ZG8 144849
+7Z2p 144850
+77uE 144851
+7YWA 144852
+66CQ 144853
+7KWs 144854
+0Is= 144855
+7IO3 144856
+65ys 144857
+8J+Ygw== 144858
+64Ss 144859
+66Wo 144860
+7JuN 144861
+772G 144862
+7720 144863
+44OF 144864
+w48= 144865
+77uq 144866
+4pmg 144867
+64qs 144868
+67GA 144869
+67CL 144870
+7IOA 144871
+772+ 144872
+64Kx 144873
+7Lu4 144874
+8J+Slg== 144875
+8J+RjA== 144876
+0Z4= 144877
+7Kex 144878
+y4Y= 144879
+8J+Tmg== 144880
+4q2V 144881
+76yC 144882
+77uh 144883
+65Gs 144884
+7Yi8 144885
+4pa4 144886
+6rCv 144887
+6rmF 144888
+772u 144889
+65il 144890
+xKE= 144891
+7Yyf 144892
+0Iw= 144893
+7Iaf 144894
+77qT 144895
+77u8 144896
+w5s= 144897
+44O+ 144898
+64yT 144899
+7ZKL 144900
+7JWT 144901
+7725 144902
+64Kh 144903
+8J+Rhw== 144904
+4bq8 144905
+44Cf 144906
+8J+Mnw== 144907
+7YOg 144908
+44CG 144909
+4oCf 144910
+67iQ 144911
+8J+MuQ== 144912
+7KC8 144913
+8J+TjA== 144914
+7JSs 144915
+4peA 144916
+8J+Skw== 144917
+6rmO 144918
+7IKQ 144919
+7JSM 144920
+0Zs= 144921
+4pSI 144922
+67Kz 144923
+446e 144924
+1aE= 144925
+7YK1 144926
+8J+klA== 144927
+64CU 144928
+7IqQ 144929
+7ZmJ 144930
+4pym 144931
+65yv 144932
+7KCv 144933
+65Sn 144934
+zqY= 144935
+y4g= 144936
+7Im8 144937
+4peK 144938
+65yp 144939
+65yw 144940
+776Q 144941
+67+U 144942
+7Jeu 144943
+7LeM 144944
+77qn 144945
+zpI= 144946
+67WZ 144947
+77uK 144948
+7LCU 144949
+7Y6E 144950
+8J+Slw== 144951
+4bq0 144952
+7LCi 144953
+7Zy8 144954
+6r2C 144955
+7LGU 144956
+7Im0 144957
+4pa+ 144958
+7Yiw 144959
+64ub 144960
+4p2j 144961
+772q 144962
+8J+SnA== 144963
+y5g= 144964
+44Wk 144965
+4oaX 144966
+7ZaE 144967
+4pms 144968
+7JWw 144969
+77qc 144970
+4omh 144971
+44CT 144972
+7JGl 144973
+7YyN 144974
+7YmB 144975
+67uX 144976
+7Zyg 144977
+7Zyp 144978
+4pyI 144979
+7YCE 144980
+7JaH 144981
+7KKH 144982
+7Z6Z 144983
+66q5 144984
+44Kb 144985
+8J+YsQ== 144986
+642f 144987
+4LmF 144988
+6rW2 144989
+2as= 144990
+7JSB 144991
+4pyq 144992
+776I 144993
+8J+ZjA== 144994
+4pqh 144995
+zpo= 144996
+7LyI 144997
+776U 144998
+776C 144999
+6rWJ 145000
+77q7 145001
+8J+Siw== 145002
+4bmj 145003
+05k= 145004
+7Iac 145005
+7Jej 145006
+4pyp 145007
+7JyZ 145008
+77qw 145009
+4bqy 145010
+7J6j 145011
+4p2M 145012
+4piB 145013
+7JWO 145014
+xL0= 145015
+24E= 145016
+44Sx 145017
+65+/ 145018
+7Yy4 145019
+6r2J 145020
+7I+g 145021
+8J+NgA== 145022
+4oaU 145023
+662h 145024
+77uB 145025
+77yE 145026
+8J+SpQ== 145027
+4pib 145028
+7Ze3 145029
+65Gh 145030
+zqA= 145031
+zqQ= 145032
+4oST 145033
+77q3 145034
+zpk= 145035
+64+U 145036
+7Kek 145037
+4pSD 145038
+44S3 145039
+x5I= 145040
+8J+lsA== 145041
+65SV 145042
+7Jql 145043
+7LiE 145044
+7ZuU 145045
+77qH 145046
+77qs 145047
+8J+Yog== 145048
+67mh 145049
+7JS5 145050
+xbM= 145051
+y50= 145052
+7Y6R 145053
+776T 145054
+8J+Smg== 145055
+64qR 145056
+6rq+ 145057
+7Yaw 145058
+w78= 145059
+0IQ= 145060
+64yQ 145061
+672A 145062
+7LeE 145063
+8J+TjQ== 145064
+8J+ZiA== 145065
+4peI 145066
+6r+H 145067
+7LyE 145068
+7Y6r 145069
+8J+Htw== 145070
+4pSL 145071
+4pqg 145072
+67GJ 145073
+7I2w 145074
+7JmI 145075
+yao= 145076
+77qL 145077
+8J+YnA== 145078
+zp8= 145079
+8J+Zgg== 145080
+4pq9 145081
+xYg= 145082
+67mU 145083
+7Yyc 145084
+4LmP 145085
+7Ja5 145086
+7Yit 145087
+8J+lhw== 145088
+44S0 145089
+65Sl 145090
+7K2I 145091
+4oiG 145092
+65az 145093
+67GD 145094
+7J6m 145095
+77uQ 145096
+zpw= 145097
+4pyn 145098
+z40= 145099
+7KCT 145100
+4peV 145101
+65KA 145102
+77uA 145103
+8J+UtA== 145104
+6r2B 145105
+64yI 145106
+646M 145107
+44KO 145108
+4qaB 145109
+7L2n 145110
+76++ 145111
+4p2v 145112
+4LiF 145113
+8J+ZhA== 145114
+4p2A 145115
+8J+UuQ== 145116
+4oeQ 145117
+6rW1 145118
+4oeU 145119
+67aQ 145120
+8J+Smw== 145121
+zr4= 145122
+7YOs 145123
+4p2E 145124
+0qM= 145125
+44Cw 145126
+4oiR 145127
+4pi8 145128
+4omg 145129
+0q8= 145130
+77qv 145131
+6r+o 145132
+4pyW 145133
+ypY= 145134
+7YCA 145135
+6r6A 145136
+7Zed 145137
+4pSj 145138
+446c 145139
+65Sb 145140
+65y4 145141
+77qr 145142
+6r+w 145143
+8J+HuQ== 145144
+x5A= 145145
+25I= 145146
+66O7 145147
+77qW 145148
+0Zo= 145149
+64qg 145150
+25U= 145151
+6rmh 145152
+67+c 145153
+7LK8 145154
+76iR 145155
+66W1 145156
+7I24 145157
+7YWF 145158
+7ZG5 145159
+1oA= 145160
+77OM 145161
+44Wj 145162
+7JGk 145163
+7L2V 145164
+65Wg 145165
+8J+Mvw== 145166
+7YOU 145167
+7JuB 145168
+zrY= 145169
+4p6c 145170
+7IqY 145171
+7ZuX 145172
+66mn 145173
+7ImY 145174
+1bY= 145175
+4bmH 145176
+8J+OgQ== 145177
+772/ 145178
+77yC 145179
+4byQ 145180
+4pyV 145181
+4p6i 145182
+64So 145183
+7Lur 145184
+7K+U 145185
+7LCc 145186
+8J+SsA== 145187
+7YWd 145188
+446P 145189
+67O2 145190
+0pM= 145191
+4oaz 145192
+7IO0 145193
+7YGY 145194
+4paA 145195
+67KZ 145196
+4LiD 145197
+4b22 145198
+xJU= 145199
+4qyH 145200
+66SY 145201
+8J+OtQ== 145202
+4pya 145203
+77qP 145204
+zqE= 145205
+4peJ 145206
+8J+Sqw== 145207
+0Ig= 145208
+7JaE 145209
+7KeZ 145210
+77uD 145211
+8J2Rkg== 145212
+662E 145213
+4p2l 145214
+4p2W 145215
+4pid 145216
+yrk= 145217
+4bil 145218
+4oC/ 145219
+44WF 145220
+6riB 145221
+65Wh 145222
+642l 145223
+4oip 145224
+6ruE 145225
+666M 145226
+0rE= 145227
+4oiX 145228
+66CZ 145229
+77qM 145230
+y5A= 145231
+8J+Ysw== 145232
+8J+RqQ== 145233
+8J+Otg== 145234
+7L+1 145235
+8J+kqQ== 145236
+6rek 145237
+64yU 145238
+77qQ 145239
+z44= 145240
+7Lal 145241
+772K 145242
+4bmt 145243
+66S8 145244
+4par 145245
+7Keg 145246
+4byA 145247
+6ruR 145248
+64yB 145249
+7YC4 145250
+4pmb 145251
+8J+Sng== 145252
+4paw 145253
+8J2Rlg== 145254
+652k 145255
+4KSm 145256
+7LSY 145257
+8J+Yhw== 145258
+65Sk 145259
+zpc= 145260
+8J+Zhw== 145261
+y5s= 145262
+7Kmh 145263
+4oin 145264
+1aU= 145265
+0Zk= 145266
+65Cs 145267
+65aE 145268
+8J+Mtw== 145269
+7JeM 145270
+8J+YpQ== 145271
+64i0 145272
+77ua 145273
+yZs= 145274
+77qE 145275
+77uP 145276
+xYw= 145277
+67Ka 145278
+7Iuj 145279
+77qA 145280
+zpM= 145281
+8J+YjA== 145282
+y5k= 145283
+656P 145284
+8J+UuA== 145285
+8J+Ttw== 145286
+64G9 145287
+7YG9 145288
+8J+SoQ== 145289
+8J+MsQ== 145290
+67qP 145291
+7IGg 145292
+7IOQ 145293
+64+X 145294
+7Liw 145295
+64iV 145296
+zp0= 145297
+4oGJ 145298
+8J+MvA== 145299
+7Yyg 145300
+4ouv 145301
+4YOY 145302
+4pyk 145303
+6rGU 145304
+7YyO 145305
+8J+Srw== 145306
+7I+Z 145307
+7ZeJ 145308
+2a0= 145309
+7L2w 145310
+77q/ 145311
+77ux 145312
+7LGM 145313
+4piV 145314
+8J+OgA== 145315
+xJ0= 145316
+67Cn 145317
+7IK/ 145318
+4ZGV 145319
+8J+Ngw== 145320
+4oeo 145321
+zps= 145322
+66e0 145323
+67OV 145324
+4ZGQ 145325
+4paT 145326
+8J2RnA== 145327
+4pm7 145328
+7YKl 145329
+1bg= 145330
+44ix 145331
+67qA 145332
+7LK4 145333
+77qb 145334
+8J+Phg== 145335
+8J+Hqg== 145336
+4p2T 145337
+xIA= 145338
+7L2l 145339
+8J+Hpw== 145340
+4b23 145341
+4pyC 145342
+7J68 145343
+76eh 145344
+8J+TuA== 145345
+4pmv 145346
+yZQ= 145347
+4b24 145348
+4oyq 145349
+77uW 145350
+76Wn 145351
+4pqr 145352
+4pSX 145353
+8J+MiA== 145354
+77up 145355
+8J+Tsg== 145356
+z4g= 145357
+8J+YoQ== 145358
+8J2Rjg== 145359
+7Jy9 145360
+7Kes 145361
+7KeK 145362
+4b2z 145363
+7Iyk 145364
+64KN 145365
+4omS 145366
+8J+RqA== 145367
+4piY 145368
+06k= 145369
+4oKT 145370
+4oiC 145371
+77mB 145372
+8J+SkA== 145373
+7YWD 145374
+8J+PvQ== 145375
+6reE 145376
+8J+Yjw== 145377
+8J+Mug== 145378
+8J+YlA== 145379
+772r 145380
+4pyO 145381
+67WI 145382
+8J+HuA== 145383
+4oCj 145384
+4p6U 145385
+65iY 145386
+7IOs 145387
+yoM= 145388
+4qyF 145389
+7KmQ 145390
+8J+Zhg== 145391
+8J+OhA== 145392
+xL4= 145393
+4p+2 145394
+4YOQ 145395
+4pi7 145396
+7LGV 145397
+7IGp 145398
+672V 145399
+7Lqj 145400
+8J+RiA== 145401
+8J+Ziw== 145402
+776W 145403
+0po= 145404
+1as= 145405
+7IyI 145406
+67Kn 145407
+8J+Hrg== 145408
+772d 145409
+8J+NgQ== 145410
+7Jel 145411
+xLM= 145412
+672Q 145413
+7Y29 145414
+7ZuR 145415
+4oK5 145416
+44WB 145417
+7JS9 145418
+8J+UgQ== 145419
+4KSv 145420
+6r65 145421
+64mc 145422
+4peh 145423
+7ZWM 145424
+zpg= 145425
+66O5 145426
+7JmT 145427
+8J+Hpg== 145428
+8J+RgA== 145429
+4pSM 145430
+4b+m 145431
+64Sb 145432
+7ISj 145433
+7K2Z 145434
+77Gg 145435
+zp4= 145436
+yrs= 145437
+4b+2 145438
+4p2d 145439
+6rGA 145440
+65a0 145441
+44S5 145442
+8J+Sjg== 145443
+z7k= 145444
+4puF 145445
+77uV 145446
+44Ox 145447
+772b 145448
+64yV 145449
+67m9 145450
+7KWU 145451
+7L+k 145452
+8J+WpA== 145453
+0ZI= 145454
+6rmN 145455
+646A 145456
+7Iuv 145457
+67uk 145458
+8J+Tng== 145459
+8J+Tow== 145460
+8J+YnQ== 145461
+7I25 145462
+7Jeh 145463
+7LCQ 145464
+4b2Q 145465
+77uI 145466
+4pyN 145467
+xI8= 145468
+8J+Mng== 145469
+4oSm 145470
+6r2d 145471
+67uY 145472
+7Iix 145473
+4pSY 145474
+8J+Muw== 145475
+4oK0 145476
+4p6o 145477
+7ZCB 145478
+6raI 145479
+4pii 145480
+8J+YiA== 145481
+772p 145482
+4oSX 145483
+6rCt 145484
+6rC4 145485
+67uR 145486
+7KW0 145487
+7Lul 145488
+76SK 145489
+77uS 145490
+8J+YlQ== 145491
+4piU 145492
+7JiQ 145493
+8J+alw== 145494
+65eE 145495
+66eP 145496
+1b0= 145497
+4pa7 145498
+4p+1 145499
+7Imw 145500
+77uR 145501
+4pmp 145502
+zqU= 145503
+8J+Yow== 145504
+4oqC 145505
+44WC 145506
+7IW4 145507
+7Y+E 145508
+4py9 145509
+7KaZ 145510
+4paj 145511
+6rGN 145512
+6r+L 145513
+7KuE 145514
+7LqH 145515
+8J+HtQ== 145516
+8J+RkQ== 145517
+4pyY 145518
+8J2Rmw== 145519
+7I29 145520
+7LqJ 145521
+76y1 145522
+8J+Uug== 145523
+4oSu 145524
+7YOk 145525
+8J+Hug== 145526
+8J+StQ== 145527
+7YWo 145528
+772R 145529
+zqg= 145530
+7IO5 145531
+7JaV 145532
+7Lm1 145533
+8J+TsQ== 145534
+4KS1 145535
+8J+Rig== 145536
+8J+ShA== 145537
+8J+SnQ== 145538
+44yU 145539
+7JmB 145540
+0Ic= 145541
+4K6Q 145542
+4pa5 145543
+4bSb 145544
+4peY 145545
+67qo 145546
+7YOJ 145547
+7JaM 145548
+8J+Qtg== 145549
+44KR 145550
+y4c= 145551
+xY8= 145552
+4b25 145553
+7IWn 145554
+77mw 145555
+8J2RoQ== 145556
+8J+UnQ== 145557
+8J+Yuw== 145558
+8J+Sgw== 145559
+8J+kpg== 145560
+8J+Nkg== 145561
+7YC1 145562
+4pyG 145563
+67m0 145564
+76ek 145565
+77uZ 145566
+4bSX 145567
+8J+MtA== 145568
+zb4= 145569
+64yR 145570
+7KiL 145571
+7LW4 145572
+8J+OiA== 145573
+8J+PoA== 145574
+4b2x 145575
+24Y= 145576
+4b+W 145577
+4oCb 145578
+7LC8 145579
+7ZWl 145580
+7Ze0 145581
+8J+HrA== 145582
+7LCd 145583
+4oig 145584
+77yH 145585
+4oqZ 145586
+4p2R 145587
+64SL 145588
+656X 145589
+67CJ 145590
+7JeK 145591
+7KKG 145592
+7Yyl 145593
+77Cy 145594
+8J+Tlg== 145595
+8J+Yrg== 145596
+4pqq 145597
+8J+Ymg== 145598
+4p2e 145599
+8J2Rnw== 145600
+8J+Ogg== 145601
+xZU= 145602
+4ZCI 145603
+6rq9 145604
+7LGg 145605
+77qd 145606
+6r+J 145607
+4YOg 145608
+8J+Pgw== 145609
+8J+SuA== 145610
+4p2B 145611
+4pe+ 145612
+2qo= 145613
+4bmD 145614
+7Yqs 145615
+8J+HsQ== 145616
+7Y6t 145617
+8J+Yng== 145618
+676w 145619
+4bmb 145620
+65u4 145621
+4p2C 145622
+6pKz 145623
+4pSQ 145624
+7ZOw 145625
+4p6g 145626
+6rSY 145627
+64WY 145628
+67ul 145629
+7L6F 145630
+8J+YkA== 145631
+4oiq 145632
+8J+RgQ== 145633
+4oi0 145634
+4peB 145635
+67qQ 145636
+7J6k 145637
+7LGX 145638
+8J+Pvg== 145639
+zqc= 145640
+4b27 145641
+4p6l 145642
+7J+I 145643
+77uJ 145644
+4paM 145645
+44Ou 145646
+8J+kpA== 145647
+4oeT 145648
+7Lyg 145649
+4bSP 145650
+66es 145651
+67uj 145652
+8J+SrA== 145653
+8J+Nkw== 145654
+xLg= 145655
+2bk= 145656
+yr8= 145657
+4b2w 145658
+65Wc 145659
+7LCh 145660
+7LC7 145661
+7Y6N 145662
+8J+Orw== 145663
+8J+Ngg== 145664
+8J+Rpw== 145665
+4pmi 145666
+4Yae 145667
+4pmn 145668
+4pqc 145669
+4pyJ 145670
+65Om 145671
+662j 145672
+7IiP 145673
+7JOx 145674
+xa0= 145675
+yoo= 145676
+4pK4 145677
+4oep 145678
+8J+SlA== 145679
+1bU= 145680
+0Ik= 145681
+0rs= 145682
+66ej 145683
+7Juc 145684
+7L+h 145685
+7ZuF 145686
+7Zuk 145687
+77qi 145688
+4pyL 145689
+4oiI 145690
+8J+MjQ== 145691
+ypw= 145692
+64qq 145693
+65K5 145694
+77qy 145695
+4paE 145696
+44WI 145697
+65qk 145698
+7Y6p 145699
+4oio 145700
+8J+kqg== 145701
+4YOa 145702
+6rO2 145703
+7YqV 145704
+8J+YrA== 145705
+4oir 145706
+8J+Riw== 145707
+0pA= 145708
+7Yq/ 145709
+8J+UtQ== 145710
+8J+SqA== 145711
+8J+MmQ== 145712
+64ep 145713
+4pyz 145714
+66iB 145715
+67qE 145716
+7JmR 145717
+7LqF 145718
+7Y+I 145719
+8J2RmQ== 145720
+8J+SmA== 145721
+446l 145722
+4p2P 145723
+4pyw 145724
+76+/ 145725
+67WQ 145726
+7LyQ 145727
+77qx 145728
+1bQ= 145729
+76yA 145730
+4py0 145731
+8J+krQ== 145732
+8J+Rhg== 145733
+4puU 145734
+6reT 145735
+7IyM 145736
+8J+ktw== 145737
+25Q= 145738
+8J+noQ== 145739
+8J+Ykw== 145740
+zpY= 145741
+4o+w 145742
+6rKc 145743
+64uz 145744
+646F 145745
+67CI 145746
+766Q 145747
+8J+PoQ== 145748
+4oaq 145749
+4pOU 145750
+4pyK 145751
+z7I= 145752
+3JA= 145753
+8J+Hsw== 145754
+1oI= 145755
+4pyP 145756
+7JaX 145757
+7KuZ 145758
+8J+Ysg== 145759
+xK0= 145760
+4pmt 145761
+4pSP 145762
+4peM 145763
+8J+Yrw== 145764
+4bWS 145765
+7Yqg 145766
+xLc= 145767
+yoE= 145768
+4KSf 145769
+4bmB 145770
+4byw 145771
+4b+G 145772
+4qs= 145773
+4qu4 145774
+642r 145775
+7LOH 145776
+7Lyk 145777
+7Zuo 145778
+8J+Snw== 145779
+yoA= 145780
+yrM= 145781
+65OQ 145782
+4pWw 145783
+4p2H 145784
+x4A= 145785
+x5Q= 145786
+ybQ= 145787
+4pia 145788
+4pic 145789
+6raC 145790
+7KuS 145791
+7LGI 145792
+8J+HqA== 145793
+8J+OpQ== 145794
+8J+TnQ== 145795
+xKc= 145796
+8J2RkA== 145797
+24g= 145798
+4KSs 145799
+7KyQ 145800
+7Zel 145801
+4pmo 145802
+8J+NtA== 145803
+77mP 145804
+y4s= 145805
+8J+lug== 145806
+4pao 145807
+7ZmL 145808
+4oiF 145809
+64GZ 145810
+656g 145811
+7Ial 145812
+4oCW 145813
+8J+kmA== 145814
+8J+Quw== 145815
+4bWV 145816
+x50= 145817
+4piP 145818
+77qa 145819
+77uC 145820
+8J+aqQ== 145821
+7Iif 145822
+y4o= 145823
+4qS1 145824
+8J+Spw== 145825
+44WN 145826
+66mp 145827
+xqw= 145828
+zoc= 145829
+4oen 145830
+4pOa 145831
+7IKv 145832
+7Iiv 145833
+64aL 145834
+4pyv 145835
+8J+agA== 145836
+2pg= 145837
+2qg= 145838
+4pyt 145839
+6rKF 145840
+7Yyw 145841
+7ZyZ 145842
+8J+Mig== 145843
+8J+Okw== 145844
+8J+YmQ== 145845
+y4M= 145846
+8J+SgQ== 145847
+8J+Rjg== 145848
+4pi5 145849
+8J+Yqw== 145850
+8J+Suw== 145851
+64K1 145852
+7J2K 145853
+7Yy7 145854
+0rM= 145855
+4b2y 145856
+4p6e 145857
+64KR 145858
+652I 145859
+7KOk 145860
+77uv 145861
+8J+HqQ== 145862
+8J+lsw== 145863
+4pK8 145864
+8J+miw== 145865
+4piC 145866
+8J+YsA== 145867
+8J+Zgw== 145868
+8J+Ykg== 145869
+244= 145870
+z5U= 145871
+4bik 145872
+66O9 145873
+7Iql 145874
+8J2RiQ== 145875
+yZA= 145876
+8J+Njg== 145877
+4pWv 145878
+4pW5 145879
+4Lqy 145880
+776g 145881
+67mV 145882
+77qG 145883
+yro= 145884
+06c= 145885
+4oag 145886
+64OH 145887
+7I6I 145888
+7J+k 145889
+77Gi 145890
+4pWs 145891
+4pig 145892
+8J+Oig== 145893
+442N 145894
+446O 145895
+4piw 145896
+4pyD 145897
+44WJ 145898
+66+I 145899
+67mk 145900
+7I+t 145901
+8J2Rog== 145902
+8J+Qvg== 145903
+xYs= 145904
+8J+Rtg== 145905
+4pSb 145906
+77+i 145907
+4YOh 145908
+xLw= 145909
+xYY= 145910
+0ZA= 145911
+7IOb 145912
+7JiM 145913
+7LGk 145914
+7YWB 145915
+7ZqD 145916
+77OK 145917
+8J2RlA== 145918
+8J+Hqw== 145919
+4ouw 145920
+8J+YqA== 145921
+4oKp 145922
+1aw= 145923
+4biN 145924
+4bu0 145925
+4oaY 145926
+4piv 145927
+44WP 145928
+7KCs 145929
+4pmU 145930
+8J+UlA== 145931
+8J+YoA== 145932
+8J+Zig== 145933
+4K6c 145934
+4bmF 145935
+4peQ 145936
+4p2I 145937
+4p69 145938
+7IOF 145939
+8J2RoA== 145940
+xqI= 145941
+4ouZ 145942
+6rCb 145943
+6521 145944
+66Of 145945
+7I+c 145946
+77qB 145947
+8J+SrQ== 145948
+4oqD 145949
+8J+QsA== 145950
+44WM 145951
+3JM= 145952
+4p6V 145953
+4b2B 145954
+7JWz 145955
+8J2RnQ== 145956
+8J+OrA== 145957
+yaE= 145958
+4KSX 145959
+4ZCJ 145960
+7Kmc 145961
+7Lan 145962
+77OJ 145963
+77uF 145964
+8J2Qng== 145965
+4KS2 145966
+8J+Tog== 145967
+8J+Niw== 145968
+8J+ShQ== 145969
+776V 145970
+4qyG 145971
+4oi1 145972
+8J+kkQ== 145973
+4YOj 145974
+xoQ= 145975
+0bk= 145976
+4byU 145977
+6rCg 145978
+6rSM 145979
+6reQ 145980
+65u0 145981
+7LGY 145982
+766t 145983
+77q5 145984
+77q+ 145985
+4pyX 145986
+4p2m 145987
+8J+Rpg== 145988
+4YOX 145989
+2bI= 145990
+4b20 145991
+4oiP 145992
+4pyu 145993
+6rmw 145994
+67K1 145995
+7ISA 145996
+7Kmd 145997
+77qe 145998
+77q9 145999
+8J+HrQ== 146000
+y4I= 146001
+8J+NkQ== 146002
+8J+NjA== 146003
+8J+Uuw== 146004
+6rms 146005
+7Iqt 146006
+7Jy3 146007
+8J+bkQ== 146008
+x6c= 146009
+67yb 146010
+77qh 146011
+77q6 146012
+8J2Rmg== 146013
+8J+Tpg== 146014
+8J+Ujg== 146015
+8J+Xkw== 146016
+4YOU 146017
+4pyS 146018
+4pyh 146019
+8J+MtQ== 146020
+4pSV 146021
+64Cd 146022
+8J+Nig== 146023
+4piD 146024
+7JiF 146025
+4Kas 146026
+8J+mgQ== 146027
+4o6v 146028
+8J+QlQ== 146029
+0b8= 146030
+4KWk 146031
+4LyL 146032
+6reI 146033
+7KuM 146034
+8J+HsA== 146035
+4p2J 146036
+7KuA 146037
+7Z2E 146038
+8J2Qog== 146039
+8J+aqA== 146040
+4pmk 146041
+8J+YqQ== 146042
+8J+NjQ== 146043
+8J+YkQ== 146044
+8J+amg== 146045
+1oQ= 146046
+66s= 146047
+66u8 146048
+4KSP 146049
+4b+3 146050
+4oyp 146051
+4piQ 146052
+4p6j 146053
+6rix 146054
+6ry/ 146055
+64Sd 146056
+7I+0 146057
+7Jqk 146058
+7L+x 146059
+7Y6Q 146060
+8J+Sog== 146061
+7LSQ 146062
+4oeR 146063
+4pST 146064
+4oG+ 146065
+3J0= 146066
+8J+NsA== 146067
+4rSw 146068
+xo8= 146069
+z58= 146070
+2ro= 146071
+24M= 146072
+4YSS 146073
+4oif 146074
+4p2N 146075
+44Sy 146076
+7JyF 146077
+7KSP 146078
+8J+Hsg== 146079
+6rqE 146080
+8J+OpA== 146081
+4pyj 146082
+4rid 146083
+77i1 146084
+4Lqn 146085
+4YCZ 146086
+4pWg 146087
+1a8= 146088
+4o+p 146089
+8J2Row== 146090
+8J+Sow== 146091
+xZg= 146092
+4KWQ 146093
+4oGD 146094
+4oyY 146095
+6ruM 146096
+7IyU 146097
+8J2RmA== 146098
+8J+kkw== 146099
+1b8= 146100
+4KSt 146101
+4oya 146102
+4pyd 146103
+8J+QvA== 146104
+y4w= 146105
+4pWa 146106
+76aX 146107
+4p2V 146108
+4pWj 146109
+8J+QsQ== 146110
+4K6k 146111
+0b4= 146112
+4KSa 146113
+4KSc 146114
+7IiE 146115
+7Jqc 146116
+8J+Org== 146117
+yZI= 146118
+2rc= 146119
+4LqN 146120
+4oa1 146121
+4oiY 146122
+4p2K 146123
+67+N 146124
+7JCI 146125
+7JqY 146126
+7K+n 146127
+7YOv 146128
+7JaP 146129
+77iw 146130
+8J+Hrw== 146131
+8J+nmg== 146132
+8J+YtQ== 146133
+8J+Ytw== 146134
+8J+Msw== 146135
+4Lql 146136
+xIk= 146137
+xKU= 146138
+4py2 146139
+4b++ 146140
+4oqx 146141
+4pi+ 146142
+6rCJ 146143
+6ryw 146144
+67qR 146145
+8J+Uig== 146146
+8J+WkA== 146147
+xaQ= 146148
+0qs= 146149
+4K6u 146150
+4oyI 146151
+4peX 146152
+64S1 146153
+64Wc 146154
+65y5 146155
+8J2RpQ== 146156
+8J+Svw== 146157
+8J+bkg== 146158
+ypI= 146159
+4Z6T 146160
+8J+QnQ== 146161
+8J+mhA== 146162
+8J+Ntw== 146163
+4pif 146164
+77i2 146165
+8J+knw== 146166
+1LE= 146167
+4oay 146168
+4oiO 146169
+4pyr 146170
+64e9 146171
+64+Q 146172
+65WE 146173
+76az 146174
+76ed 146175
+77qZ 146176
+8J+Ruw== 146177
+8J+Tug== 146178
+6rW8 146179
+7Iyp 146180
+8J+Msg== 146181
+yLE= 146182
+7ZSV 146183
+8J+YpA== 146184
+44yi 146185
+ypQ= 146186
+4KSh 146187
+4byI 146188
+646D 146189
+66mx 146190
+666I 146191
+8J2Qqw== 146192
+4oqV 146193
+64Og 146194
+67us 146195
+7YuU 146196
+1aQ= 146197
+4byx 146198
+4pyl 146199
+4piE 146200
+4oil 146201
+4pqV 146202
+8J+RhA== 146203
+8J+OhQ== 146204
+4LqZ 146205
+4pSs 146206
+4b21 146207
+1b4= 146208
+1oE= 146209
+4peU 146210
+6r+N 146211
+65a1 146212
+66mO 146213
+6660 146214
+7JW0 146215
+4YOc 146216
+4byh 146217
+4pSK 146218
+4pWu 146219
+4pe8 146220
+8J+Nvg== 146221
+8J+bjQ== 146222
+8J+Rlw== 146223
+8J+kng== 146224
+4pyE 146225
+1YA= 146226
+4Kay 146227
+y4k= 146228
+4p+o 146229
+xK8= 146230
+z4o= 146231
+4bSc 146232
+67mz 146233
+77OL 146234
+77+g 146235
+xKo= 146236
+4oK4 146237
+4pyx 146238
+6ruQ 146239
+64u7 146240
+66e4 146241
+7J6/ 146242
+7Kmo 146243
+7K2Q 146244
+7LC/ 146245
+7YWf 146246
+8J2Qpw== 146247
+8J2RkQ== 146248
+8J+Mjg== 146249
+8J+Trg== 146250
+8J+VlA== 146251
+4peZ 146252
+4pe7 146253
+4p6n 146254
+7J+d 146255
+4pys 146256
+44Ow 146257
+4oGI 146258
+4pOY 146259
+8J+SjA== 146260
+76yD 146261
+4LqU 146262
+7JSw 146263
+8J+Yqg== 146264
+14A= 146265
+7IOo 146266
+762L 146267
+8J+NlQ== 146268
+8J+YtA== 146269
+z7M= 146270
+4byE 146271
+4b2F 146272
+4oei 146273
+4pWt 146274
+7Ji7 146275
+7Yqk 146276
+3Jg= 146277
+4qS0 146278
+4peN 146279
+4Z6f 146280
+8J+Nug== 146281
+4Z6a 146282
+8J+Pig== 146283
+8J+Qtw== 146284
+yow= 146285
+4b26 146286
+4oG7 146287
+6r2M 146288
+64iX 146289
+65eP 146290
+7L+w 146291
+7YC8 146292
+7Y2F 146293
+77ey 146294
+8J+Mjw== 146295
+8J+Nqw== 146296
+8J+Nsw== 146297
+8J+OsA== 146298
+8J+RsA== 146299
+8J+Ssg== 146300
+4aWZ 146301
+8J+Qnw== 146302
+77+h 146303
+8J+Xow== 146304
+8J+NnA== 146305
+4pyy 146306
+446i 146307
+8J+UsA== 146308
+4by4 146309
+4b2R 146310
+xI4= 146311
+4YSA 146312
+4pmV 146313
+66Cd 146314
+7Ii0 146315
+762t 146316
+05w= 146317
+1IA= 146318
+64Cc 146319
+64OU 146320
+7Iqb 146321
+7KuR 146322
+7Lql 146323
+7Lqs 146324
+8J2Rpg== 146325
+8J+Utg== 146326
+7L6o 146327
+8J2Qmg== 146328
+8J+Nuw== 146329
+8J+SjQ== 146330
+8J+koQ== 146331
+8J+Vig== 146332
+4r2H 146333
+4pOQ 146334
+8J+NrQ== 146335
+8J+Nqg== 146336
+8J+Uhg== 146337
+0qE= 146338
+4bSH 146339
+yZc= 146340
+3JQ= 146341
+4oSO 146342
+4p2D 146343
+65eA 146344
+77KU 146345
+77qI 146346
+8J2Quw== 146347
+8J+Sig== 146348
+8J+aqw== 146349
+0bA= 146350
+0bM= 146351
+4KS3 146352
+4peg 146353
+8J+RpA== 146354
+776H 146355
+4piT 146356
+8J+NtQ== 146357
+8J+kqA== 146358
+4pat 146359
+4K60 146360
+3KI= 146361
+3Kw= 146362
+4LSu 146363
+8J+Vug== 146364
+1Lk= 146365
+1aM= 146366
+4LSv 146367
+4bSA 146368
+4oyJ 146369
+4pyQ 146370
+4p6m 146371
+6rm9 146372
+64yc 146373
+8J+PpQ== 146374
+8J+TqQ== 146375
+0rk= 146376
+05g= 146377
+4KSF 146378
+4p2n 146379
+xpc= 146380
+4pe9 146381
+8J+Rqw== 146382
+8J+Opw== 146383
+8J+Row== 146384
+4py7 146385
+8J+ZhQ== 146386
+8J+Ylg== 146387
+8J+Srg== 146388
+4Lqw 146389
+8J+UnA== 146390
+8J+NhA== 146391
+8J+knQ== 146392
+4YOd 146393
+4Z6A 146394
+4oem 146395
+yr4= 146396
+0q4= 146397
+1bw= 146398
+4KSG 146399
+4peF 146400
+4pqT 146401
+4pqW 146402
+6r+p 146403
+66+E 146404
+7JCQ 146405
+7J6w 146406
+7Ket 146407
+7YuL 146408
+7Y6o 146409
+7Zmn 146410
+77KR 146411
+8J+Olw== 146412
+2bM= 146413
+8J+RuA== 146414
+4Kau 146415
+8J+RlQ== 146416
+2rU= 146417
+4oC+ 146418
+4p6w 146419
+8J+Rrw== 146420
+8J+OvA== 146421
+8J+PgQ== 146422
+xLo= 146423
+yo8= 146424
+2rM= 146425
+4o+x 146426
+6r2I 146427
+652M 146428
+7IyJ 146429
+7Je3 146430
+7J60 146431
+7Ze5 146432
+7Zyo 146433
+8J2Xsg== 146434
+8J+MkA== 146435
+8J+OmQ== 146436
+8J+PtQ== 146437
+7ZuZ 146438
+8J2RhQ== 146439
+8J+Ytg== 146440
+4pOF 146441
+4pWl 146442
+8J+Njw== 146443
+76aO 146444
+1ak= 146445
+8J2QhA== 146446
+06M= 146447
+2r8= 146448
+4pma 146449
+8J+Ulw== 146450
+4bir 146451
+4ouu 146452
+4pam 146453
+4pu9 146454
+4py1 146455
+44WG 146456
+44WK 146457
+64SZ 146458
+652o 146459
+66WE 146460
+7ISm 146461
+7Kew 146462
+7Ke5 146463
+7YmI 146464
+76eR 146465
+77uH 146466
+8J+Mvg== 146467
+8J+Plg== 146468
+8J+QkQ== 146469
+8J+Ssw== 146470
+8J+Thg== 146471
+24c= 146472
+3JU= 146473
+4b29 146474
+64Sc 146475
+4LSy 146476
+4LSz 146477
+4Lqt 146478
+4YOb 146479
+4p2U 146480
+4pGF 146481
+4YOl 146482
+8J+ThQ== 146483
+4p6z 146484
+4bS1 146485
+77mh 146486
+77m2 146487
+zoY= 146488
+4KSl 146489
+4Ym1 146490
+4p2Z 146491
+4p2x 146492
+64mg 146493
+646g 146494
+64+b 146495
+67+F 146496
+7JS4 146497
+7ZGv 146498
+7Z6J 146499
+7Z6b 146500
+76eE 146501
+762Y 146502
+77qm 146503
+77u4 146504
+8J2Rgg== 146505
+8J2Rjw== 146506
+z5E= 146507
+2qA= 146508
+4YCU 146509
+4Z6U 146510
+4bmi 146511
+64S4 146512
+8J2QqA== 146513
+8J+HtA== 146514
+1bA= 146515
+8J+RoA== 146516
+8J+Nhg== 146517
+8J+PgA== 146518
+8J+RkA== 146519
+8J+Nhw== 146520
+8J+Qow== 146521
+4Yit 146522
+3Ko= 146523
+8J+MgA== 146524
+4Z6Y 146525
+4oeE 146526
+8J2QgA== 146527
+ypk= 146528
+4pS8 146529
+8J+Pvw== 146530
+xrc= 146531
+yKA= 146532
+0b0= 146533
+4oKo 146534
+6rSt 146535
+6rm7 146536
+65So 146537
+7IiA 146538
+7L6w 146539
+7YaI 146540
+766n 146541
+76+9 146542
+8J+UhQ== 146543
+8J+Urg== 146544
+xaI= 146545
+yrA= 146546
+0bg= 146547
+4KSj 146548
+4oqX 146549
+66qE 146550
+77m3 146551
+77qF 146552
+8J2QtQ== 146553
+8J+Mtg== 146554
+8J+TsA== 146555
+8J+Utw== 146556
+8J+Wkg== 146557
+8J+ksg== 146558
+64mp 146559
+8J+Ohg== 146560
+8J+nkA== 146561
+8J+Nrg== 146562
+4oa6 146563
+4p2i 146564
+8J+Rqg== 146565
+8J+RsQ== 146566
+4oah 146567
+4Z6P 146568
+2pU= 146569
+8J+NuQ== 146570
+8J+SgA== 146571
+y64= 146572
+06g= 146573
+1oU= 146574
+4KSH 146575
+4oKh 146576
+4oiV 146577
+4piJ 146578
+6rm8 146579
+6ryQ 146580
+7L24 146581
+8J2QrA== 146582
+8J+PhQ== 146583
+8J+RmQ== 146584
+8J+SiQ== 146585
+8J+kmQ== 146586
+yJg= 146587
+ybM= 146588
+ybk= 146589
+2bo= 146590
+4YCE 146591
+4b+z 146592
+4pqY 146593
+4p2G 146594
+64aJ 146595
+7JaN 146596
+7JiH 146597
+7KWY 146598
+7ZaF 146599
+7ZmR 146600
+766K 146601
+77+t 146602
+8J2SkA== 146603
+8J2Xog== 146604
+8J+Ulg== 146605
+8J+UqA== 146606
+8J+akQ== 146607
+8J+asg== 146608
+xrg= 146609
+4pel 146610
+8J2QrQ== 146611
+8J+NvQ== 146612
+4peR 146613
+4pOH 146614
+8J+UsQ== 146615
+4py8 146616
+77mD 146617
+4pWx 146618
+44CX 146619
+8J+Piw== 146620
+8J+atA== 146621
+8J2Qrg== 146622
+xJo= 146623
+1Y8= 146624
+xLY= 146625
+4YOR 146626
+4bms 146627
+xIg= 146628
+xJI= 146629
+0rA= 146630
+05U= 146631
+4pA= 146632
+4pCj 146633
+4pei 146634
+4pqZ 146635
+44WX 146636
+6rCs 146637
+6rOq 146638
+6ruA 146639
+64S0 146640
+646B 146641
+652U 146642
+66y9 146643
+662N 146644
+7Iez 146645
+7LC5 146646
+7Yy5 146647
+7Z6d 146648
+766L 146649
+77aI 146650
+8J2Sgg== 146651
+8J+lgA== 146652
+8J+mhQ== 146653
+ypg= 146654
+4byR 146655
+4oGO 146656
+8J+Nng== 146657
+4oaW 146658
+4oaZ 146659
+8J+Ogw== 146660
+4oSh 146661
+4oux 146662
+8J+UjQ== 146663
+4LKo 146664
+4bWD 146665
+4pSr 146666
+4qa/ 146667
+8J+Huw== 146668
+xqQ= 146669
+0o8= 146670
+0rc= 146671
+24k= 146672
+4K6V 146673
+4biz 146674
+76yx 146675
+8J+GlA== 146676
+2q0= 146677
+26Y= 146678
+4YWh 146679
+4oS5 146680
+6r+O 146681
+65WU 146682
+67yJ 146683
+7Jqn 146684
+7LK1 146685
+7LSo 146686
+7YqI 146687
+7ZaQ 146688
+8J2XmA== 146689
+8J+Hvw== 146690
+8J+Olg== 146691
+8J+RhQ== 146692
+8J+TmA== 146693
+8J+amQ== 146694
+8J+btQ== 146695
+4La9 146696
+4pu1 146697
+8J2Qsw== 146698
+8J2QuA== 146699
+4pqU 146700
+8J+RrQ== 146701
+05E= 146702
+4pSv 146703
+8J+Fvw== 146704
+8J+YuQ== 146705
+77+r 146706
+4ryk 146707
+8J+Shw== 146708
+8J+Tjg== 146709
+8J+Wiw== 146710
+4Ka4 146711
+8J2QjQ== 146712
+xLI= 146713
+z4s= 146714
+0aw= 146715
+2qw= 146716
+3JI= 146717
+4bSs 146718
+76iE 146719
+yaM= 146720
+y5E= 146721
+z7U= 146722
+0p0= 146723
+26U= 146724
+3KA= 146725
+4Lmb 146726
+4YOV 146727
+4YqV 146728
+4b62 146729
+4oK3 146730
+4oe+ 146731
+4pWp 146732
+4paQ 146733
+4piq 146734
+4piu 146735
+4p2a 146736
+4p2t 146737
+4p6x 146738
+4rWO 146739
+44+K 146740
+66mT 146741
+7Je+ 146742
+7KqE 146743
+7ZOM 146744
+7ZW8 146745
+762s 146746
+8J2Rhg== 146747
+8J2Rng== 146748
+8J2Wig== 146749
+8J+OuA== 146750
+8J+PhA== 146751
+8J+RtQ== 146752
+8J+SoA== 146753
+8J+UmA== 146754
+8J+lgg== 146755
+xao= 146756
+4LeD 146757
+4bS8 146758
+4oqw 146759
+67OP 146760
+67Sj 146761
+76Wc 146762
+8J+TiA== 146763
+8J+Vrw== 146764
+8J+ngA== 146765
+4pmQ 146766
+8J+Glw== 146767
+8J+TlQ== 146768
+8J+ngQ== 146769
+3Ks= 146770
+4p2Q 146771
+1ZU= 146772
+4L2V 146773
+4p6d 146774
+4KaV 146775
+8J2Qtg== 146776
+yaI= 146777
+zoQ= 146778
+4Yai 146779
+4oKx 146780
+1Y0= 146781
+4KGV 146782
+4bSw 146783
+4bip 146784
+4pu3 146785
+4p2u 146786
+6qGT 146787
+64+k 146788
+65eQ 146789
+67WM 146790
+7JGI 146791
+7Y+/ 146792
+7Ze1 146793
+8J2Qjg== 146794
+8J+GmA== 146795
+8J+Pnw== 146796
+yaU= 146797
+1bs= 146798
+4KGU 146799
+4KSW 146800
+4bS4 146801
+4o6Z 146802
+4o6l 146803
+4o+z 146804
+64GV 146805
+64qJ 146806
+7KGN 146807
+7Lmh 146808
+76a2 146809
+76yf 146810
+766r 146811
+766v 146812
+77GD 146813
+77e7 146814
+77q1 146815
+8J2XlA== 146816
+8J2XoQ== 146817
+8J+OqA== 146818
+8J+Ukg== 146819
+2ps= 146820
+4KSn 146821
+4p65 146822
+4YCA 146823
+8J+NhQ== 146824
+4pek 146825
+4KSg 146826
+8J+QpQ== 146827
+4YOS 146828
+8J+PnQ== 146829
+8J+NvA== 146830
+44yn 146831
+4p2b 146832
+8J+QiA== 146833
+4Kav 146834
+4YCe 146835
+44CW 146836
+4Z6Z 146837
+4Kaq 146838
+1YY= 146839
+4oqG 146840
+4py+ 146841
+8J+Qlw== 146842
+77m/ 146843
+xKY= 146844
+3J8= 146845
+4LKg 146846
+4LKl 146847
+4Z6J 146848
+4bSl 146849
+4bSp 146850
+4b2A 146851
+4b2h 146852
+4oaV 146853
+4p6v 146854
+6qGR 146855
+65Gj 146856
+67GM 146857
+7IiR 146858
+7JyU 146859
+7J69 146860
+7KiN 146861
+8J2RgA== 146862
+8J+MjA== 146863
+8J+Npg== 146864
+8J+NqQ== 146865
+8J+Qmg== 146866
+8J+Tkg== 146867
+8J+TuQ== 146868
+8J+lkQ== 146869
+xIs= 146870
+y5c= 146871
+0as= 146872
+1aI= 146873
+2rA= 146874
+4oyA 146875
+4peC 146876
+4pej 146877
+4pyb 146878
+4p2S 146879
+4p2Y 146880
+4p6Z 146881
+4p6y 146882
+446N 146883
+6qGQ 146884
+656W 146885
+7Iqd 146886
+7Juk 146887
+7KGL 146888
+7Kiw 146889
+7ZeZ 146890
+76W4 146891
+77ON 146892
+77uO 146893
+8J2Rkw== 146894
+8J+Tig== 146895
+8J+avA== 146896
+76aB 146897
+8J2Vkg== 146898
+8J+RnA== 146899
+8J+Rvw== 146900
+8J+HvQ== 146901
+4LeE 146902
+4pa0 146903
+442J 146904
+4oqH 146905
+8J+nuA== 146906
+2qE= 146907
+4r6D 146908
+8J+Xuw== 146909
+4pOR 146910
+8J+kuA== 146911
+8J+krw== 146912
+6pKw 146913
+8J2Qkw== 146914
+4pS0 146915
+6pKx 146916
+4YCY 146917
+4puE 146918
+77m5 146919
+05Q= 146920
+4YOx 146921
+3KE= 146922
+354= 146923
+4pmP 146924
+4py4 146925
+7JGo 146926
+8J2QnQ== 146927
+8J2QpQ== 146928
+8J+NiQ== 146929
+8J+RvA== 146930
+8J+lnQ== 146931
+xpQ= 146932
+3aw= 146933
+4KSr 146934
+4Lqa 146935
+4bS0 146936
+4b2W 146937
+4oK2 146938
+4o6i 146939
+4p2F 146940
+4p+r 146941
+446b 146942
+666o 146943
+67qM 146944
+67yY 146945
+7Iad 146946
+7Jyz 146947
+7J6M 146948
+7KOX 146949
+7KqY 146950
+7Lu5 146951
+77e8 146952
+77qC 146953
+8J2QtA== 146954
+8J2QvA== 146955
+8J+Mmg== 146956
+8J+Pqw== 146957
+8J+SpA== 146958
+8J+Stg== 146959
+8J+SvA== 146960
+ypU= 146961
+yr0= 146962
+4rKf 146963
+44mg 146964
+6qGS 146965
+65yA 146966
+7IO+ 146967
+7Lik 146968
+76WB 146969
+8J2aig== 146970
+8J+agw== 146971
+4p6b 146972
+7IW0 146973
+4YSL 146974
+4oeX 146975
+76e3 146976
+4piW 146977
+8J+Qpg== 146978
+4ric 146979
+8J+StA== 146980
+8J+kmg== 146981
+44qX 146982
+4oyb 146983
+4Yib 146984
+4Ly6 146985
+4r2J 146986
+8J+Pog== 146987
+4pOe 146988
+4pi9 146989
+44CZ 146990
+8J+krg== 146991
+xZA= 146992
+4YOs 146993
+8J2Xuw== 146994
+8J+Nlg== 146995
+xoo= 146996
+yp8= 146997
+34s= 146998
+4KSL 146999
+4bWU 147000
+4b+D 147001
+4oSJ 147002
+4oyL 147003
+4o+y 147004
+4pOI 147005
+4pOi 147006
+4pWU 147007
+4pqR 147008
+4p2L 147009
+4p2O 147010
+4rWc 147011
+4rWj 147012
+65KI 147013
+65yB 147014
+67aH 147015
+7I27 147016
+7Jit 147017
+7Kei 147018
+7ZeA 147019
+76eK 147020
+76y4 147021
+77Gh 147022
+8J2Qug== 147023
+8J2Rpw== 147024
+8J2Ypg== 147025
+8J+TpQ== 147026
+8J+Ynw== 147027
+8J+lkA== 147028
+xJY= 147029
+yag= 147030
+4YCQ 147031
+4YOT 147032
+4bqT 147033
+4by2 147034
+4b2E 147035
+4oKk 147036
+4oyc 147037
+4oyf 147038
+4o6g 147039
+4pu4 147040
+4rWN 147041
+4rWP 147042
+4rWT 147043
+44CY 147044
+67e4 147045
+7YW8 147046
+76aM 147047
+762E 147048
+762O 147049
+8J2Zmg== 147050
+8J2amA== 147051
+4LyT 147052
+662F 147053
+4ZCb 147054
+446+ 147055
+76iA 147056
+8J+XvQ== 147057
+4pme 147058
+y5Y= 147059
+4pee 147060
+8J+kqw== 147061
+8J+Ylw== 147062
+772m 147063
+8J+kog== 147064
+4oGH 147065
+44C1 147066
+8J+NlA== 147067
+4Yqg 147068
+8J+YvA== 147069
+8J2Xrg== 147070
+8J+Qsw== 147071
+8J2Qiw== 147072
+8J+Gmg== 147073
+8J+Umw== 147074
+0bs= 147075
+3Kg= 147076
+4K6y 147077
+4pye 147078
+4rWZ 147079
+6rWj 147080
+7Lio 147081
+8J2QnA== 147082
+8J2YsA== 147083
+8J+UvQ== 147084
+x7s= 147085
+x78= 147086
+yoc= 147087
+zpA= 147088
+0IA= 147089
+0aE= 147090
+0bI= 147091
+0pI= 147092
+2bY= 147093
+35U= 147094
+4Lax 147095
+4ZCB 147096
+4oGe 147097
+4pan 147098
+4puI 147099
+4pyc 147100
+4py5 147101
+4p+5 147102
+4qSH 147103
+6rKK 147104
+6r6c 147105
+66+Q 147106
+67OQ 147107
+7IWp 147108
+7JCs 147109
+7JG5 147110
+76SU 147111
+76aa 147112
+76yg 147113
+762U 147114
+77q2 147115
+8J2Sjw== 147116
+8J2Whg== 147117
+8J2Xtg== 147118
+8J+Pgg== 147119
+8J+QvQ== 147120
+8J+SqQ== 147121
+8J+TvQ== 147122
+8J+XqA== 147123
+8J+Xug== 147124
+8J+YuA== 147125
+8J+lpw== 147126
+xZc= 147127
+yo4= 147128
+0pk= 147129
+17I= 147130
+4KSI 147131
+4by0 147132
+4b+R 147133
+4rWJ 147134
+44WT 147135
+7L20 147136
+8J2Wkw== 147137
+8J+Tlw== 147138
+8J+Uqg== 147139
+8J+WjQ== 147140
+z5I= 147141
+8J+RrA== 147142
+4YOZ 147143
+4oas 147144
+4pSk 147145
+4pu5 147146
+4pmf 147147
+8J+atg== 147148
+8J+Rvg== 147149
+4oiL 147150
+8J+Qrw== 147151
+4LyO 147152
+4py3 147153
+76iZ 147154
+4pS7 147155
+8J+RuQ== 147156
+4YSJ 147157
+4Lqq 147158
+4r6P 147159
+4r2F 147160
+446W 147161
+0bQ= 147162
+1a4= 147163
+2rw= 147164
+4YCV 147165
+4Ya8 147166
+662P 147167
+8J+QuA== 147168
+8J+aow== 147169
+xp0= 147170
+1Ls= 147171
+4YOi 147172
+8J+Nrw== 147173
+yaY= 147174
+1aY= 147175
+4pmL 147176
+76yr 147177
+8J2Xpg== 147178
+x5o= 147179
+ybE= 147180
+4KSJ 147181
+4bSE 147182
+4pmT 147183
+4puw 147184
+4p+q 147185
+64OY 147186
+66K4 147187
+7IKR 147188
+766U 147189
+8J2Vlg== 147190
+8J2Xpw== 147191
+8J+HvA== 147192
+8J+Tiw== 147193
+8J+anA== 147194
+8J+lpA== 147195
+xK4= 147196
+xbc= 147197
+34o= 147198
+4KWl 147199
+4K6q 147200
+4Z6E 147201
+4bWA 147202
+4biF 147203
+4byi 147204
+4oid 147205
+4oq5 147206
+4pK2 147207
+4pW0 147208
+4pux 147209
+4puz 147210
+4pu6 147211
+4p6f 147212
+44+E 147213
+6riU 147214
+6rmf 147215
+64ew 147216
+67m7 147217
+7IKl 147218
+7Ju7 147219
+7LCf 147220
+7YOw 147221
+7Ya6 147222
+7Zq9 147223
+76S0 147224
+76W+ 147225
+77Od 147226
+8J2Qpg== 147227
+8J2SnA== 147228
+8J2Snw== 147229
+8J2alw== 147230
+8J+OrQ== 147231
+8J+Pkw== 147232
+8J+Psw== 147233
+8J+Pug== 147234
+8J+QjQ== 147235
+8J+Rgw== 147236
+8J+Sjw== 147237
+8J+klg== 147238
+8J+ktQ== 147239
+1bI= 147240
+4rWU 147241
+65is 147242
+76aj 147243
+yoI= 147244
+4Yar 147245
+4Z6R 147246
+8J2Wjg== 147247
+8J2Xlg== 147248
+4YSD 147249
+4oeg 147250
+4YCh 147251
+4L2E 147252
+4p64 147253
+76aZ 147254
+4oea 147255
+8J+QrA== 147256
+8J+Qog== 147257
+4r6S 147258
+8J+QpA== 147259
+8J+Uqw== 147260
+44Ce 147261
+77i6 147262
+8J+Yug== 147263
+4r20 147264
+8J+GlQ== 147265
+4oG/ 147266
+8J+NqA== 147267
+4LKV 147268
+8J+amA== 147269
+4Z6F 147270
+4KaF 147271
+4Z6i 147272
+4Kic 147273
+4pqM 147274
+44C9 147275
+4Le0 147276
+4pOb 147277
+4YCc 147278
+7Iao 147279
+y6k= 147280
+3Jc= 147281
+4ou8 147282
+8J+ZiQ== 147283
+xYo= 147284
+yZM= 147285
+yrI= 147286
+zrA= 147287
+0bw= 147288
+1L8= 147289
+4KGQ 147290
+4Lyc 147291
+4L2m 147292
+4bac 147293
+4oKy 147294
+4oao 147295
+4oql 147296
+4pWn 147297
+4pmc 147298
+44uh 147299
+67Ss 147300
+67aR 147301
+7Im/ 147302
+7I6F 147303
+7KCx 147304
+7LCn 147305
+77Kh 147306
+8J2Smw== 147307
+8J2Vow== 147308
+8J2XnA== 147309
+8J+Nsg== 147310
+8J+OqQ== 147311
+8J+QkA== 147312
+8J+QoA== 147313
+8J+RvQ== 147314
+8J+SkQ== 147315
+8J+TnA== 147316
+8J+VtQ== 147317
+8J+ajA== 147318
+8J+bow== 147319
+yos= 147320
+068= 147321
+2bg= 147322
+35Q= 147323
+35k= 147324
+4KGT 147325
+4bSN 147326
+4bi/ 147327
+4o+6 147328
+4pal 147329
+66S9 147330
+7ZyR 147331
+8J2QuQ== 147332
+8J2WlA== 147333
+8J2ajg== 147334
+8J+ThA== 147335
+8J+mtw== 147336
+xoM= 147337
+4Kaf 147338
+4oyC 147339
+4pit 147340
+4rKa 147341
+652V 147342
+8J+Oow== 147343
+4K6H 147344
+4L2G 147345
+4YW1 147346
+4Zec 147347
+4oC9 147348
+4oyj 147349
+4oG9 147350
+8J+TrA== 147351
+8J+kpw== 147352
+4oeq 147353
+4r2j 147354
+4pef 147355
+76iX 147356
+6pKq 147357
+8J+bgA== 147358
+x4I= 147359
+8J+ltg== 147360
+8J+OjQ== 147361
+77+p 147362
+8J+Rkg== 147363
+4bWI 147364
+77i/ 147365
+4YWp 147366
+4r6m 147367
+4LCk 147368
+4bSW 147369
+4Kis 147370
+4LqX 147371
+4Ly7 147372
+0bo= 147373
+4Kiq 147374
+4bSz 147375
+8J2QiA== 147376
+4LuA 147377
+4bS/ 147378
+4oKN 147379
+4oeh 147380
+4puq 147381
+8J2Qgg== 147382
+8J2SlQ== 147383
+8J+QnA== 147384
+yo0= 147385
+0bE= 147386
+4L2D 147387
+666Q 147388
+7Juh 147389
+7JyB 147390
+8J2Qvw== 147391
+8J2VoA== 147392
+8J+Rmw== 147393
+xqo= 147394
+z7o= 147395
+06w= 147396
+2b8= 147397
+3aM= 147398
+4KqJ 147399
+4K65 147400
+4L2R 147401
+4Yav 147402
+4bWH 147403
+4oel 147404
+4o+q 147405
+4pmw 147406
+4pqt 147407
+4pq+ 147408
+44WE 147409
+6oCw 147410
+6rCX 147411
+6rKL 147412
+6rK7 147413
+6rac 147414
+6ryH 147415
+6r25 147416
+64Kf 147417
+64WI 147418
+64ui 147419
+66ef 147420
+66qG 147421
+67WA 147422
+7L2x 147423
+7YeY 147424
+7Zyc 147425
+76e+ 147426
+77G1 147427
+77Ki 147428
+77Kk 147429
+8J2Sig== 147430
+8J2Yrw== 147431
+8J+Nlw== 147432
+8J+PjQ== 147433
+8J+QmA== 147434
+8J+ToQ== 147435
+8J+Ung== 147436
+8J+ksw== 147437
+8J+lgQ== 147438
+8J+llw== 147439
+8J+mig== 147440
+xLU= 147441
+xqY= 147442
+x7U= 147443
+ya8= 147444
+zo8= 147445
+1YQ= 147446
+3KU= 147447
+4L2B 147448
+4aig 147449
+4pWr 147450
+446J 147451
+67e0 147452
+7IaO 147453
+7I6M 147454
+7KO1 147455
+7Zug 147456
+76eq 147457
+77OP 147458
+77u6 147459
+8J2RgQ== 147460
+8J2Rhw== 147461
+8J2Shg== 147462
+8J+OoA== 147463
+8J+QlA== 147464
+8J+Rnw== 147465
+xZY= 147466
+4KSM 147467
+4b69 147468
+6qaS 147469
+4K6f 147470
+4bSx 147471
+8J+PsA== 147472
+8J+Qng== 147473
+4L2A 147474
+4YCF 147475
+4oq/ 147476
+8J+Qpw== 147477
+4ZuB 147478
+4ryI 147479
+4pS/ 147480
+8J+ltA== 147481
+4ry/ 147482
+8J+nnA== 147483
+44W/ 147484
+4oSr 147485
+44Cz 147486
+44qZ 147487
+4ryA 147488
+76as 147489
+8J+PrA== 147490
+8J+Tuw== 147491
+4Yqb 147492
+4YSF 147493
+4LqK 147494
+4Lqb 147495
+4YWz 147496
+8J+Rrg== 147497
+4K6x 147498
+4piH 147499
+8J2Qjw== 147500
+4LS1 147501
+4LuB 147502
+4L2P 147503
+4L2i 147504
+4aWx 147505
+4oKj 147506
+76Wm 147507
+762Z 147508
+77Sp 147509
+77mC 147510
+8J+Now== 147511
+8J+VuQ== 147512
+z5Y= 147513
+4La4 147514
+4Lqi 147515
+4Yut 147516
+4o6d 147517
+4ped 147518
+4pmI 147519
+4pmO 147520
+6r2l 147521
+7LOU 147522
+7LyR 147523
+77Gw 147524
+8J2Rgw== 147525
+8J+Mqg== 147526
+8J+NoQ== 147527
+xY4= 147528
+yqY= 147529
+0ac= 147530
+044= 147531
+1LQ= 147532
+2og= 147533
+35M= 147534
+36c= 147535
+4KSU 147536
+4Yir 147537
+4Yi1 147538
+4Zep 147539
+4bSg 147540
+4byg 147541
+4oCX 147542
+4oGR 147543
+4oSP 147544
+4paH 147545
+4rKj 147546
+44Sz 147547
+44mu 147548
+6rOX 147549
+64SS 147550
+65ar 147551
+66GE 147552
+67mw 147553
+672B 147554
+7ISB 147555
+7IyY 147556
+7J+M 147557
+7LOJ 147558
+7LyV 147559
+76y7 147560
+77OO 147561
+77m4 147562
+77m+ 147563
+8J2Qhg== 147564
+8J2Rtw== 147565
+8J2bvA== 147566
+8J+Ojw== 147567
+8J+Ong== 147568
+8J+QmQ== 147569
+8J+Rgg== 147570
+8J+TgQ== 147571
+8J+WsQ== 147572
+8J+ajQ== 147573
+8J+apw== 147574
+8J+boQ== 147575
+8J+kkg== 147576
+8J+lng== 147577
+8J+lqQ== 147578
+8J+mgA== 147579
+8J+mlg== 147580
+y6I= 147581
+3Jo= 147582
+4K61 147583
+4YCB 147584
+4Ymw 147585
+4o+t 147586
+4pm/ 147587
+6rOY 147588
+64+d 147589
+65WD 147590
+7IWM 147591
+7JK4 147592
+7Juf 147593
+7YWE 147594
+7Zyr 147595
+76eY 147596
+77+s 147597
+8J+Ptw== 147598
+8J+Upw== 147599
+8J+liA== 147600
+xpY= 147601
+4Z6H 147602
+4Z6W 147603
+4oG6 147604
+4pec 147605
+4p6p 147606
+6qat 147607
+65mk 147608
+7628 147609
+8J2Zlg== 147610
+8J2Zow== 147611
+8J2ZpA== 147612
+8J+MnQ== 147613
+8J+UkQ== 147614
+8J+boA== 147615
+4LqH 147616
+4pij 147617
+44So 147618
+8J2Wlw== 147619
+05M= 147620
+4oaj 147621
+8J+liQ== 147622
+8J+MoA== 147623
+8J+YvQ== 147624
+446g 147625
+xac= 147626
+8J+Qkg== 147627
+76eQ 147628
+8J+Yvw== 147629
+4ois 147630
+8J+Qrg== 147631
+4p+x 147632
+4LKh 147633
+4r68 147634
+4LCy 147635
+y7Y= 147636
+4pa/ 147637
+1Yg= 147638
+4Z6O 147639
+4YWl 147640
+4Z6X 147641
+1ac= 147642
+8J+kkA== 147643
+8J+NoA== 147644
+4Kak 147645
+4La6 147646
+4pmN 147647
+7JiZ 147648
+7ZiT 147649
+77m6 147650
+8J+bsw== 147651
+xYk= 147652
+4bSO 147653
+4o+c 147654
+4pSz 147655
+6ri3 147656
+7KGU 147657
+8J2SiA== 147658
+8J2SjQ== 147659
+8J2SuQ== 147660
+8J2Thw== 147661
+8J2Vnw== 147662
+8J2XuQ== 147663
+8J+MhQ== 147664
+8J+PtA== 147665
+xJQ= 147666
+xKQ= 147667
+xbU= 147668
+x74= 147669
+z54= 147670
+z7Y= 147671
+1LM= 147672
+3IY= 147673
+36k= 147674
+4KGS 147675
+4KSY 147676
+4Laa 147677
+4L2W 147678
+4YGK 147679
+4YOe 147680
+4YSC 147681
+4Yur 147682
+4bS6 147683
+4bij 147684
+4biq 147685
+4bmC 147686
+4by3 147687
+4b+H 147688
+4oeM 147689
+4o+s 147690
+4pmM 147691
+4q6f 147692
+4rS7 147693
+4rWf 147694
+6qaV 147695
+6qaq 147696
+6qau 147697
+6rKE 147698
+6r6Q 147699
+64OR 147700
+65WL 147701
+66G4 147702
+66yA 147703
+7Iek 147704
+7Iip 147705
+7JyV 147706
+7K2Y 147707
+7Lew 147708
+7Le4 147709
+7ZyA 147710
+76Sj 147711
+76eN 147712
+77GE 147713
+77OR 147714
+8J2QpA== 147715
+8J2Skw== 147716
+8J2Stg== 147717
+8J2XvA== 147718
+8J2Zig== 147719
+8J+Hvg== 147720
+8J+Mmw== 147721
+8J+Mrg== 147722
+8J+Ohw== 147723
+8J+Osg== 147724
+8J+Pmw== 147725
+8J+RpQ== 147726
+8J+RtA== 147727
+8J+Shg== 147728
+8J+Tgg== 147729
+8J+Tpw== 147730
+8J+VkA== 147731
+8J+WlQ== 147732
+8J+Ypw== 147733
+8J+ZgA== 147734
+8J+akg== 147735
+8J+bqw== 147736
+8J+koA== 147737
+8J+lmg== 147738
+8J+lmw== 147739
+8J+low== 147740
+x68= 147741
+yKc= 147742
+zoo= 147743
+0rI= 147744
+17A= 147745
+25E= 147746
+4YOp 147747
+4YSM 147748
+4YiN 147749
+4Yml 147750
+4Y+C 147751
+4oGx 147752
+4oqi 147753
+4peT 147754
+4p2w 147755
+67+h 147756
+7Jup 147757
+7YGt 147758
+7Yaz 147759
+7YqE 147760
+7ZO4 147761
+76Wj 147762
+76W0 147763
+77GQ 147764
+77Gv 147765
+77Oa 147766
+8J2WmA== 147767
+8J2YgA== 147768
+8J+Qig== 147769
+8J+QjA== 147770
+8J+Rmg== 147771
+8J+Tgw== 147772
+8J+amw== 147773
+8J+aqg== 147774
+8J+ksA== 147775
+xLQ= 147776
+4YOu 147777
+4Zeo 147778
+4pmu 147779
+4rKe 147780
+44iU 147781
+7IWN 147782
+44WD 147783
+76Wh 147784
+4Lqh 147785
+1Y4= 147786
+1bo= 147787
+4qyb 147788
+4r2k 147789
+8J2Qsg== 147790
+4p61 147791
+4YCb 147792
+4pSF 147793
+4oaf 147794
+4ryK 147795
+8J+MvQ== 147796
+8J+avw== 147797
+76aK 147798
+44Sj 147799
+4pup 147800
+76mb 147801
+8J+NsQ== 147802
+4r6o 147803
+4LSk 147804
+4Z6B 147805
+4Lqe 147806
+ypo= 147807
+8J2Qkg== 147808
+4LSx 147809
+4Z6c 147810
+4K6p 147811
+4LCX 147812
+4LSa 147813
+4oej 147814
+76aV 147815
+1YU= 147816
+xpg= 147817
+4oKm 147818
+4pSE 147819
+76af 147820
+76ar 147821
+8J2QgQ== 147822
+8J2Qgw== 147823
+8J+NuA== 147824
+8J+Qsg== 147825
+xbY= 147826
+yZY= 147827
+35g= 147828
+4Lim 147829
+4L2U 147830
+4Ya3 147831
+4oGV 147832
+4pOC 147833
+4p2c 147834
+76Wl 147835
+76yu 147836
+8J2XnQ== 147837
+8J2Xvw== 147838
+8J+Ovg== 147839
+8J+XnQ== 147840
+8J+mjA== 147841
+xoU= 147842
+x6o= 147843
+0pc= 147844
+3Js= 147845
+36A= 147846
+4KGR 147847
+4Ymj 147848
+4Yqt 147849
+4bmh 147850
+4p68 147851
+4p6+ 147852
+4rSx 147853
+44mh 147854
+6rOv 147855
+672I 147856
+7IKY 147857
+7ImR 147858
+7KuY 147859
+7YyD 147860
+7Zmw 147861
+76SX 147862
+8J+MrA== 147863
+8J+MsA== 147864
+8J+NpA== 147865
+xLs= 147866
+xYc= 147867
+xqg= 147868
+yZU= 147869
+0qI= 147870
+0ro= 147871
+1o0= 147872
+17E= 147873
+2rE= 147874
+2r0= 147875
+25A= 147876
+4KSb 147877
+4LeA 147878
+4Lma 147879
+4Lqr 147880
+4bS5 147881
+4b2U 147882
+4b6z 147883
+4oKS 147884
+4oa0 147885
+4oed 147886
+4omF 147887
+4oyo 147888
+4pOT 147889
+4pai 147890
+4pqs 147891
+4p6t 147892
+4rKS 147893
+446/ 147894
+6r+0 147895
+64ix 147896
+642s 147897
+646Q 147898
+65Cr 147899
+65Sr 147900
+67GB 147901
+7IOl 147902
+7Yy8 147903
+762T 147904
+766l 147905
+77Kw 147906
+8J2Qhw== 147907
+8J2QkQ== 147908
+8J2RjA== 147909
+8J2Tqg== 147910
+8J2Vmg== 147911
+8J2Yqg== 147912
+8J2YvA== 147913
+8J2amw== 147914
+8J+Htg== 147915
+8J+MhA== 147916
+8J+MlQ== 147917
+8J+MpA== 147918
+8J+Mpw== 147919
+8J+NrA== 147920
+8J+Oiw== 147921
+8J+Ouw== 147922
+8J+PqA== 147923
+8J+Qhw== 147924
+8J+Rkw== 147925
+8J+TkA== 147926
+8J+TmQ== 147927
+8J+UvA== 147928
+8J+Vkg== 147929
+8J+Wjw== 147930
+8J+WpQ== 147931
+8J+krA== 147932
+8J+lig== 147933
+8J+lkg== 147934
+34w= 147935
+4LqE 147936
+4by1 147937
+4pWh 147938
+4rKk 147939
+4rS8 147940
+4rWi 147941
+44iv 147942
+65O4 147943
+65+H 147944
+67qN 147945
+8J2Zpw== 147946
+8J+NiA== 147947
+8J+UrA== 147948
+8J+Wig== 147949
+8J+kvg== 147950
+y6E= 147951
+3Kk= 147952
+4oyh 147953
+4q2R 147954
+4rKm 147955
+66mJ 147956
+7Lyt 147957
+77+k 147958
+8J2Sjg== 147959
+8J2XpQ== 147960
+8J+QtQ== 147961
+8J+Vtg== 147962
+8J+VuA== 147963
+8J+knA== 147964
+1ao= 147965
+4YiL 147966
+8J+ltQ== 147967
+77CB 147968
+4bWQ 147969
+4pWT 147970
+4YCW 147971
+4ouI 147972
+yZ4= 147973
+4p6u 147974
+4KWw 147975
+44aB 147976
+8J+SsQ== 147977
+8J+PrQ== 147978
+4Yao 147979
+8J+Nmg== 147980
+8J+mkA== 147981
+4bS7 147982
+4piM 147983
+4LSV 147984
+1bE= 147985
+4YWu 147986
+8J2QjA== 147987
+xaY= 147988
+4LqV 147989
+4pyZ 147990
+y7M= 147991
+1LU= 147992
+4pWS 147993
+8J2Xlw== 147994
+8J2XoA== 147995
+2po= 147996
+4Kan 147997
+4oad 147998
+4pmJ 147999
+44y7 148000
+7LmK 148001
+8J2Xug== 148002
+8J+nmA== 148003
+7LOj 148004
+76yd 148005
+8J+Rug== 148006
+x58= 148007
+zog= 148008
+zqs= 148009
+0aU= 148010
+1LI= 148011
+1ag= 148012
+3KY= 148013
+4KaG 148014
+4Kal 148015
+4ZCi 148016
+4byB 148017
+4byY 148018
+4bym 148019
+4pOd 148020
+44iw 148021
+446X 148022
+6rKh 148023
+66iA 148024
+7KOU 148025
+7LSk 148026
+7LWd 148027
+76e0 148028
+762K 148029
+77Kf 148030
+8J2Qtw== 148031
+8J2Riw== 148032
+8J2TiQ== 148033
+8J2YtQ== 148034
+8J+Stw== 148035
+8J+bqQ== 148036
+8J+nuQ== 148037
+xZQ= 148038
+yp4= 148039
+y6U= 148040
+zow= 148041
+0ak= 148042
+05A= 148043
+06A= 148044
+2pE= 148045
+2pI= 148046
+36g= 148047
+4KqI 148048
+4ZCD 148049
+4bmv 148050
+4oKL 148051
+4oK1 148052
+4oSF 148053
+4oSg 148054
+4oij 148055
+4om6 148056
+4om7 148057
+4oqb 148058
+4oyQ 148059
+4o6T 148060
+4pi4 148061
+4pmS 148062
+4pqS 148063
+4pyH 148064
+4pyg 148065
+4rS3 148066
+4rWW 148067
+44S4 148068
+44mi 148069
+44mw 148070
+6oe0 148071
+6rS4 148072
+6rqg 148073
+64KP 148074
+64Ki 148075
+65CA 148076
+67q0 148077
+7IOc 148078
+7I2F 148079
+7KSr 148080
+7LGm 148081
+7LqR 148082
+7LyB 148083
+7L+z 148084
+7YKB 148085
+7YWh 148086
+7ZKC 148087
+7ZKJ 148088
+7ZyE 148089
+762q 148090
+766s 148091
+76+m 148092
+77Gq 148093
+77KP 148094
+77SA 148095
+77uG 148096
+77+m 148097
+8J2Rlw== 148098
+8J2WmQ== 148099
+8J+MoQ== 148100
+8J+NnQ== 148101
+8J+Npw== 148102
+8J+Oqw== 148103
+8J+PmA== 148104
+8J+Pqg== 148105
+8J+Qiw== 148106
+8J+Qmw== 148107
+8J+Qug== 148108
+8J+Rlg== 148109
+8J+Rng== 148110
+8J+Rtw== 148111
+8J+TgA== 148112
+8J+UhA== 148113
+8J+UjA== 148114
+8J+VmQ== 148115
+8J+ZjQ== 148116
+8J+Zjg== 148117
+8J+mjQ== 148118
+x7A= 148119
+yZ8= 148120
+yoY= 148121
+1Lw= 148122
+2pw= 148123
+4Kah 148124
+4Ka2 148125
+4ZKD 148126
+4byp 148127
+4pOV 148128
+4rKI 148129
+6rCw 148130
+6rmg 148131
+6rqF 148132
+64S5 148133
+66+T 148134
+7ZCI 148135
+76e2 148136
+766R 148137
+77Ko 148138
+8J2SiQ== 148139
+8J2SlA== 148140
+8J2XqA== 148141
+8J2Zng== 148142
+8J2akg== 148143
+8J2alQ== 148144
+8J+Qjg== 148145
+8J+klQ== 148146
+8J+nlA== 148147
+z7A= 148148
+1J0= 148149
+4oyK 148150
+4pK+ 148151
+44mj 148152
+762p 148153
+8J2ang== 148154
+ypE= 148155
+4Kam 148156
+4YSH 148157
+4omD 148158
+4rKA 148159
+7J+O 148160
+8J2Rtg== 148161
+8J2Tsg== 148162
+8J+Otw== 148163
+8J+auQ== 148164
+4LqB 148165
+4aCg 148166
+44Sa 148167
+8J+Qvw== 148168
+4Zua 148169
+4pWz 148170
+8J+QrQ== 148171
+4pK5 148172
+8J2Wmg== 148173
+4pmW 148174
+44iy 148175
+4oa+ 148176
+4YSG 148177
+4pWb 148178
+8J+kjQ== 148179
+4r2l 148180
+8J+MqA== 148181
+4oiu 148182
+44yY 148183
+442R 148184
+77mA 148185
+4pOX 148186
+4oqE 148187
+8J+PuQ== 148188
+y5I= 148189
+8J+ksQ== 148190
+44+c 148191
+8J+OjA== 148192
+76Wt 148193
+4Kaj 148194
+8J+OuQ== 148195
+44qf 148196
+4LSw 148197
+8J2QlA== 148198
+4LSo 148199
+4L2a 148200
+4py6 148201
+1bc= 148202
+8J+Rsw== 148203
+4Kac 148204
+4piL 148205
+4pmK 148206
+44Cb 148207
+yIs= 148208
+4K6w 148209
+4YOo 148210
+4oSV 148211
+7ZGA 148212
+8J2Tgw== 148213
+8J+mlA== 148214
+xL8= 148215
+xYA= 148216
+xrM= 148217
+yZo= 148218
+1oM= 148219
+3KM= 148220
+358= 148221
+4Kat 148222
+4Keh 148223
+4La7 148224
+4Lqj 148225
+4L2H 148226
+4bio 148227
+4b2I 148228
+4r2s 148229
+6qGU 148230
+7LOE 148231
+76iJ 148232
+8J2QoQ== 148233
+8J2Yog== 148234
+8J+Nvw== 148235
+8J+Onw== 148236
+8J+PiQ== 148237
+8J+UkA== 148238
+8J+ahQ== 148239
+8J+kvQ== 148240
+xo0= 148241
+x6s= 148242
+x70= 148243
+yJo= 148244
+zok= 148245
+06Q= 148246
+06o= 148247
+1Yo= 148248
+2bw= 148249
+2rQ= 148250
+350= 148251
+4Lac 148252
+4byV 148253
+4b+l 148254
+4o6e 148255
+44Ca 148256
+44mk 148257
+6rO4 148258
+6reB 148259
+65OE 148260
+65OV 148261
+7KiU 148262
+7LGo 148263
+8J2Qvg== 148264
+8J2Ruw== 148265
+8J2UvA== 148266
+8J2VnQ== 148267
+8J2YrQ== 148268
+8J+GmQ== 148269
+8J+TpA== 148270
+8J+Unw== 148271
+8J+XvA== 148272
+xJw= 148273
+xoE= 148274
+xr8= 148275
+x7M= 148276
+x7c= 148277
+yYM= 148278
+yaA= 148279
+yok= 148280
+yqc= 148281
+y7I= 148282
+z7Q= 148283
+1YE= 148284
+1Z4= 148285
+1oc= 148286
+24I= 148287
+25M= 148288
+35c= 148289
+36Y= 148290
+4Ka5 148291
+4K6z 148292
+4LS4 148293
+4LuC 148294
+4Yid 148295
+4Yiq 148296
+4Yu1 148297
+4ZCK 148298
+4ZKq 148299
+4ZqW 148300
+4Z6b 148301
+4bSi 148302
+4bWP 148303
+4bWt 148304
+4bar 148305
+4biP 148306
+4bqS 148307
+4byl 148308
+4b2V 148309
+4b28 148310
+4oKK 148311
+4oSC 148312
+4oSp 148313
+4oeJ 148314
+4omj 148315
+4oyg 148316
+4o6f 148317
+4o+u 148318
+4pWY 148319
+4peW 148320
+4pip 148321
+4pmR 148322
+4pmy 148323
+4pqb 148324
+44Sf 148325
+44mx 148326
+446a 148327
+6qGV 148328
+6qqW 148329
+6rC5 148330
+6rKG 148331
+6rWE 148332
+64es 148333
+64uv 148334
+64+g 148335
+65Ks 148336
+65aI 148337
+65a9 148338
+65iU 148339
+6564 148340
+67iF 148341
+67ug 148342
+67+f 148343
+7IK1 148344
+7IqJ 148345
+7Jyw 148346
+7KCL 148347
+7KCU 148348
+7KWh 148349
+7K2d 148350
+7Lys 148351
+7YiH 148352
+7Ymc 148353
+7Y2E 148354
+7Zu+ 148355
+7Z2j 148356
+76Sp 148357
+76Sv 148358
+76ac 148359
+76an 148360
+76ec 148361
+76iI 148362
+76yq 148363
+76y0 148364
+7629 148365
+766J 148366
+76+e 148367
+77CS 148368
+77GH 148369
+77+E 148370
+8J2QhQ== 148371
+8J2RhA== 148372
+8J2Rug== 148373
+8J2Slw== 148374
+8J2Trg== 148375
+8J2Vmw== 148376
+8J2Vng== 148377
+8J2WkQ== 148378
+8J2YgQ== 148379
+8J2Yhg== 148380
+8J2Ytg== 148381
+8J2Zog== 148382
+8J2anA== 148383
+8J+Mgw== 148384
+8J+Mpg== 148385
+8J+Nnw== 148386
+8J+Ojg== 148387
+8J+PmQ== 148388
+8J+QqQ== 148389
+8J+Qqw== 148390
+8J+QtA== 148391
+8J+RlA== 148392
+8J+TiQ== 148393
+8J+Tmw== 148394
+8J+UiQ== 148395
+8J+WvA== 148396
+8J+Xgw== 148397
+8J+Xrw== 148398
+8J+ahw== 148399
+8J+akA== 148400
+8J+atQ== 148401
+8J+ktg== 148402
+8J+liw== 148403
+8J+lkw== 148404
+8J+lrg== 148405
+8J+mjg== 148406
+8J+moA== 148407
+8J+nkg== 148408
+8J+nqA== 148409
+xpA= 148410
+x40= 148411
+04A= 148412
+1Js= 148413
+4LKw 148414
+4LSZ 148415
+4YCS 148416
+6rKd 148417
+6rm5 148418
+66ml 148419
+7JaU 148420
+76SB 148421
+76SP 148422
+76aJ 148423
+76aT 148424
+76eJ 148425
+77Kd 148426
+8J2Xng== 148427
+8J2XsQ== 148428
+8J+Miw== 148429
+8J+Ntg== 148430
+4Kaa 148431
+7JWc 148432
+8J2Qrw== 148433
+8J2anQ== 148434
+4LCo 148435
+4L2Y 148436
+4L2g 148437
+4aGl 148438
+4b6w 148439
+4oGN 148440
+4pSw 148441
+4qyc 148442
+8J2QoA== 148443
+8J2Rrw== 148444
+8J2Xmw== 148445
+8J2Tuw== 148446
+8J2WiA== 148447
+4p67 148448
+4Z6g 148449
+4qGx 148450
+4ruR 148451
+8J+ntQ== 148452
+76ai 148453
+8J+RmA== 148454
+44KU 148455
+4ryf 148456
+44qk 148457
+76ad 148458
+44ym 148459
+4oC4 148460
+8J+UmQ== 148461
+47k= 148462
+47mm 148463
+77mF 148464
+76mM 148465
+44mo 148466
+77i9 148467
+4o2l 148468
+8J+aiQ== 148469
+8J+lnA== 148470
+4pOc 148471
+4rud 148472
+76ic 148473
+8J+Skg== 148474
+4YSR 148475
+4r6e 148476
+76iB 148477
+4LSq 148478
+4YSO 148479
+4p60 148480
+4Ka3 148481
+4YWs 148482
+4Z6n 148483
+4oai 148484
+4pWm 148485
+4pyR 148486
+y6w= 148487
+1ZA= 148488
+4LyU 148489
+yqQ= 148490
+y6g= 148491
+4KSe 148492
+4LuD 148493
+4Lya 148494
+4pOl 148495
+4pWc 148496
+8J+Qlg== 148497
+4byZ 148498
+4byk 148499
+7Iaw 148500
+yII= 148501
+yrE= 148502
+4K6a 148503
+4YOn 148504
+4bSL 148505
+4bSu 148506
+4p2h 148507
+4p63 148508
+652h 148509
+76ei 148510
+76+h 148511
+8J2VlQ== 148512
+8J+FsA== 148513
+8J+muA== 148514
+x7g= 148515
+054= 148516
+1LY= 148517
+1oY= 148518
+2oE= 148519
+24s= 148520
+4Y6l 148521
+4b6/ 148522
+4pSt 148523
+4pSu 148524
+6oCA 148525
+6rGY 148526
+65Ct 148527
+672E 148528
+7JSQ 148529
+7LiM 148530
+7YGg 148531
+7Zmx 148532
+76WJ 148533
+76iW 148534
+8J2RtA== 148535
+8J2Wkg== 148536
+8J2YqA== 148537
+8J2ajA== 148538
+8J+QoQ== 148539
+8J+Rog== 148540
+8J+TlA== 148541
+xYU= 148542
+xo4= 148543
+yKk= 148544
+0qo= 148545
+1IM= 148546
+4YOr 148547
+4biH 148548
+4puf 148549
+6rut 148550
+66iE 148551
+7J+A 148552
+7KS0 148553
+7ZqQ 148554
+76Sz 148555
+8J+fog== 148556
+xqc= 148557
+yLw= 148558
+yp0= 148559
+y4Q= 148560
+y4U= 148561
+y40= 148562
+y6c= 148563
+0qU= 148564
+1ZQ= 148565
+2I8= 148566
+2Lw= 148567
+35A= 148568
+35w= 148569
+4KST 148570
+4KaZ 148571
+4K6T 148572
+4La0 148573
+4LyN 148574
+4LyS 148575
+4L2j 148576
+4YCC 148577
+4YCK 148578
+4YSE 148579
+4YiY 148580
+4YuK 148581
+4YyN 148582
+4ZGL 148583
+4Z6C 148584
+4aCi 148585
+4aGd 148586
+4bSm 148587
+4bWN 148588
+4bWo 148589
+4bih 148590
+4biv 148591
+4byj 148592
+4oGC 148593
+4oSY 148594
+4oSc 148595
+4oSz 148596
+4oS1 148597
+4oam 148598
+4oeG 148599
+4oi3 148600
+4oqa 148601
+4oyr 148602
+4oyv 148603
+4o6b 148604
+4o6c 148605
+4o6k 148606
+4o6m 148607
+4o6u 148608
+4pGJ 148609
+4pSJ 148610
+4pWZ 148611
+4paC 148612
+4pet 148613
+4piK 148614
+4piN 148615
+4piS 148616
+4pqG 148617
+4pun 148618
+4puy 148619
+4p6Y 148620
+4qWE 148621
+4rSz 148622
+4rS9 148623
+4rWI 148624
+44mv 148625
+446R 148626
+46es 148627
+6pms 148628
+6qeB 148629
+6rOs 148630
+6rSe 148631
+6ruc 148632
+64WT 148633
+64u8 148634
+642W 148635
+65ax 148636
+652w 148637
+66G5 148638
+66K0 148639
+66OA 148640
+66Sg 148641
+66iV 148642
+662l 148643
+7IS2 148644
+7IWk 148645
+7IyV 148646
+7I2q 148647
+7I+p 148648
+7JKA 148649
+7JSv 148650
+7J2U 148651
+7J2c 148652
+7KCt 148653
+7Kem 148654
+7Kip 148655
+7LKs 148656
+7LOl 148657
+7Lyv 148658
+7YCr 148659
+7YCt 148660
+7YO4 148661
+7ZOB 148662
+7ZWs 148663
+7Ze4 148664
+7ZuV 148665
+7Zyt 148666
+7Z2X 148667
+76SM 148668
+76Sq 148669
+76e/ 148670
+76yE 148671
+76yF 148672
+762R 148673
+762r 148674
+7626 148675
+766C 148676
+766i 148677
+766o 148678
+77CO 148679
+77Cg 148680
+77Kj 148681
+77OQ 148682
+77OS 148683
+77OY 148684
+77Oc 148685
+77m8 148686
+77+o 148687
+8J2QqQ== 148688
+8J2Smg== 148689
+8J2VlA== 148690
+8J2VpA== 148691
+8J2WjA== 148692
+8J2Xow== 148693
+8J2XsA== 148694
+8J2XtA== 148695
+8J2Ygg== 148696
+8J2YpQ== 148697
+8J2Yrg== 148698
+8J2YuA== 148699
+8J2ZgA== 148700
+8J2bvg== 148701
+8J2cjw== 148702
+8J+MgQ== 148703
+8J+MnA== 148704
+8J+MpQ== 148705
+8J+Mrw== 148706
+8J+NkA== 148707
+8J+Okg== 148708
+8J+PlA== 148709
+8J+PlQ== 148710
+8J+Prg== 148711
+8J+Qgg== 148712
+8J+QiQ== 148713
+8J+QuQ== 148714
+8J+UlQ== 148715
+8J+Umg== 148716
+8J+VkQ== 148717
+8J+Vow== 148718
+8J+Xng== 148719
+8J+XoQ== 148720
+8J+Xvw== 148721
+8J+ahg== 148722
+8J+aig== 148723
+8J+akw== 148724
+8J+alQ== 148725
+8J+avg== 148726
+8J+bgQ== 148727
+8J+bjg== 148728
+8J+bjw== 148729
+8J+ktA== 148730
+8J+llQ== 148731
+8J+llg== 148732
+8J+loA== 148733
+8J+lpQ== 148734
+8J+mhg== 148735
+8J+miQ== 148736
+8J+mmg== 148737
+8J+nkQ== 148738
+8J+npQ== 148739
+8J+nvw== 148740
+xbA= 148741
+xro= 148742
+yac= 148743
+4KqH 148744
+4K6j 148745
+4YiI 148746
+4Yqk 148747
+4Yuu 148748
+4YyI 148749
+4Yy1 148750
+4aWy 148751
+4pOf 148752
+6pmz 148753
+6rCK 148754
+65WB 148755
+65Wo 148756
+7IqB 148757
+76a1 148758
+76yy 148759
+8J2WjQ== 148760
+8J2YjA== 148761
+8J2Ysw== 148762
+8J2ZqQ== 148763
+8J+NmQ== 148764
+8J+Wlg== 148765
+4Ymz 148766
+4Yuo 148767
+4ZaH 148768
+4Z6M 148769
+4bmn 148770
+4pWq 148771
+4p6a 148772
+4rKY 148773
+6pU= 148774
+6pWl 148775
+76S3 148776
+766j 148777
+76+g 148778
+8J2Slg== 148779
+8J2VmA== 148780
+8J2Whw== 148781
+8J2Xnw== 148782
+8J2Xqg== 148783
+8J2Xrw== 148784
+8J2ZoA== 148785
+8J+Tjw== 148786
+4KaX 148787
+4pK7 148788
+4rKg 148789
+8J2TtQ== 148790
+yqM= 148791
+4LCc 148792
+4Yqi 148793
+4Z6Q 148794
+4bi3 148795
+4oSb 148796
+4oeA 148797
+4oeK 148798
+6pKm 148799
+6qag 148800
+766k 148801
+8J+Nmw== 148802
+8J+kmw== 148803
+4ai+ 148804
+4p66 148805
+4ZWv 148806
+4ZuP 148807
+4oeC 148808
+4pS5 148809
+4pmX 148810
+8J+WqA== 148811
+6qaP 148812
+4Kqw 148813
+4Zqo 148814
+8J+kpQ== 148815
+8J+nog== 148816
+45CC 148817
+44Sl 148818
+8J+WjA== 148819
+4ryS 148820
+44qn 148821
+4o2p 148822
+8J+mkQ== 148823
+4pS3 148824
+76mQ 148825
+76mh 148826
+8JOI 148827
+8JOIkg== 148828
+4ruE 148829
+76iS 148830
+4oSq 148831
+0qc= 148832
+2ow= 148833
+4oC2 148834
+4rqg 148835
+4ruB 148836
+4oa4 148837
+4YSQ 148838
+44WQ 148839
+4LuE 148840
+4Zeq 148841
+4oa8 148842
+4oeL 148843
+4oeY 148844
+4oyR 148845
+4pap 148846
+8J2Qlw== 148847
+xIo= 148848
+4KaJ 148849
+7Img 148850
+yaQ= 148851
+340= 148852
+348= 148853
+4bWX 148854
+4oKl 148855
+4pOJ 148856
+4pSg 148857
+4pSo 148858
+4pWE 148859
+5KQ= 148860
+5KSA 148861
+6ru4 148862
+766B 148863
+8JOC 148864
+8JOCgw== 148865
+8J+mlQ== 148866
+xps= 148867
+4KaH 148868
+44+Y 148869
+7668 148870
+2pM= 148871
+2p0= 148872
+4KaT 148873
+4Lav 148874
+4bSF 148875
+4b2Z 148876
+4oG8 148877
+4paO 148878
+4ryp 148879
+5JQ= 148880
+5JSA 148881
+67uh 148882
+7Ju9 148883
+7YGE 148884
+76W8 148885
+77GJ 148886
+77m7 148887
+8J2Wiw== 148888
+8J2ZiA== 148889
+8J2Zqg== 148890
+8J2Ztg== 148891
+8J+QhA== 148892
+8J+Qhg== 148893
+4Y6i 148894
+4biM 148895
+4p20 148896
+8J+PuA== 148897
+yJ0= 148898
+ybg= 148899
+zoU= 148900
+z5w= 148901
+06I= 148902
+1bk= 148903
+4LSF 148904
+4LqI 148905
+4Yuw 148906
+4ZGO 148907
+4aC1 148908
+4aGg 148909
+4bSJ 148910
+4bi1 148911
+4b+0 148912
+4pOj 148913
+4pS2 148914
+4r2v 148915
+6rKl 148916
+6r+Y 148917
+64GO 148918
+646I 148919
+65Sv 148920
+67Kw 148921
+7Jiv 148922
+7Ju4 148923
+7J6X 148924
+7KeY 148925
+7Kys 148926
+7Les 148927
+7YGF 148928
+7ZOU 148929
+7Zud 148930
+76Su 148931
+76S5 148932
+76Wy 148933
+76+W 148934
+8J2ThQ== 148935
+8J2ZhA== 148936
+8J+Ttg== 148937
+8J+Xkg== 148938
+8J+llA== 148939
+8J+lrQ== 148940
+xa4= 148941
+xbQ= 148942
+xok= 148943
+xqs= 148944
+x4E= 148945
+x6M= 148946
+x7o= 148947
+x7w= 148948
+yI0= 148949
+yK8= 148950
+yZw= 148951
+yqw= 148952
+y4E= 148953
+y6Q= 148954
+y7U= 148955
+z5s= 148956
+0qQ= 148957
+0qw= 148958
+048= 148959
+05s= 148960
+06E= 148961
+07M= 148962
+1Iw= 148963
+1Kw= 148964
+1bM= 148965
+2bs= 148966
+2ok= 148967
+2qc= 148968
+3Jw= 148969
+36o= 148970
+4KSd 148971
+4Kab 148972
+4KiG 148973
+4KqV 148974
+4Kqh 148975
+4K6O 148976
+4LCs 148977
+4LW7 148978
+4LW8 148979
+4Lag 148980
+4Lat 148981
+4La2 148982
+4LeG 148983
+4Ly9 148984
+4YCa 148985
+4YWi 148986
+4Ya4 148987
+4YiA 148988
+4YiV 148989
+4Yiw 148990
+4Ymh 148991
+4Ymk 148992
+4Yqm 148993
+4Yqr 148994
+4YuL 148995
+4YuN 148996
+4Y6v 148997
+4ZGt 148998
+4ZWX 148999
+4Z+b 149000
+4aWS 149001
+4amJ 149002
+4a26 149003
+4bSh 149004
+4bWY 149005
+4bWb 149006
+4bag 149007
+4biB 149008
+4biL 149009
+4bmZ 149010
+4bmd 149011
+4bmm 149012
+4bqF 149013
+4byC 149014
+4b2D 149015
+4b2N 149016
+4b2n 149017
+4b63 149018
+4oC1 149019
+4oKO 149020
+4oSd 149021
+4oWA 149022
+4oae 149023
+4oan 149024
+4oeF 149025
+4oiD 149026
+4omP 149027
+4om9 149028
+4oqe 149029
+4oqh 149030
+4oqn 149031
+4oq2 149032
+4ouE 149033
+4o6S 149034
+4o6h 149035
+4o6j 149036
+4o6q 149037
+4o+O 149038
+4pOD 149039
+4pOW 149040
+4pOo 149041
+4pWL 149042
+4pWW 149043
+4pWi 149044
+4pWy 149045
+4paG 149046
+4paK 149047
+4paN 149048
+4pau 149049
+4pih 149050
+4pim 149051
+4pix 149052
+4pi/ 149053
+4pmY 149054
+4pmd 149055
+4pqw 149056
+4puR 149057
+4p6q 149058
+4qSd 149059
+4qSi 149060
+4qS3 149061
+4qer 149062
+4qit 149063
+4qiv 149064
+4rGj 149065
+4rKO 149066
+4rWb 149067
+44WU 149068
+44iP 149069
+44my 149070
+44mz 149071
+44qR 149072
+44ub 149073
+446Q 149074
+6rKk 149075
+6re/ 149076
+6rme 149077
+6ruo 149078
+6ryN 149079
+6r+4 149080
+64Os 149081
+64eQ 149082
+64ug 149083
+642v 149084
+65eM 149085
+65eR 149086
+66WA 149087
+66qD 149088
+66qv 149089
+67Gh 149090
+67OT 149091
+67O9 149092
+67Wc 149093
+7IKz 149094
+7IWl 149095
+7Ie9 149096
+7I+o 149097
+7I+4 149098
+7JWN 149099
+7JaW 149100
+7J+o 149101
+7KKD 149102
+7KKN 149103
+7KWR 149104
+7Ke8 149105
+7KmD 149106
+7K6c 149107
+7K64 149108
+7LOR 149109
+7LSl 149110
+7L6D 149111
+7YWm 149112
+7Yi/ 149113
+7ZO9 149114
+7ZWz 149115
+7ZaP 149116
+7Zeg 149117
+7Z2r 149118
+76ST 149119
+76SY 149120
+76WO 149121
+76W2 149122
+76aF 149123
+76a9 149124
+76eH 149125
+76yG 149126
+76yz 149127
+766H 149128
+766I 149129
+766d 149130
+766p 149131
+766x 149132
+76+Y 149133
+76+Z 149134
+76+i 149135
+76+j 149136
+76+k 149137
+76+l 149138
+77GC 149139
+77KG 149140
+77Kq 149141
+77S8 149142
+77qJ 149143
+77qK 149144
+77ql 149145
+8J2RqA== 149146
+8J2RqQ== 149147
+8J2Rsg== 149148
+8J2SjA== 149149
+8J2Sqg== 149150
+8J2Srg== 149151
+8J2Tgg== 149152
+8J2TiA== 149153
+8J2Trw== 149154
+8J2UqA== 149155
+8J2VgA== 149156
+8J2Vhg== 149157
+8J2Vpg== 149158
+8J2Vpw== 149159
+8J2Vqw== 149160
+8J2Vtw== 149161
+8J2XtQ== 149162
+8J2XuA== 149163
+8J2YhA== 149164
+8J2YmQ== 149165
+8J2YoA== 149166
+8J2YrA== 149167
+8J2ZjQ== 149168
+8J2ZkQ== 149169
+8J2ZoQ== 149170
+8J2ZqA== 149171
+8J2Ztw== 149172
+8J2ajQ== 149173
+8J2bvw== 149174
+8J+D 149175
+8J+Djw== 149176
+8J+FmA== 149177
+8J+J 149178
+8J+JkQ== 149179
+8J+OoQ== 149180
+8J+Oqg== 149181
+8J+OsQ== 149182
+8J+Osw== 149183
+8J+Oug== 149184
+8J+Pjg== 149185
+8J+Plw== 149186
+8J+Pmg== 149187
+8J+Png== 149188
+8J+Ppg== 149189
+8J+Ppw== 149190
+8J+QgQ== 149191
+8J+QhQ== 149192
+8J+Qkw== 149193
+8J+Sgg== 149194
+8J+TkQ== 149195
+8J+Tkw== 149196
+8J+TqA== 149197
+8J+Tqw== 149198
+8J+Uiw== 149199
+8J+UrQ== 149200
+8J+Urw== 149201
+8J+Vlw== 149202
+8J+agg== 149203
+8J+aog== 149204
+8J+apg== 149205
+8J+arA== 149206
+8J+biw== 149207
+8J+bjA== 149208
+8J+brA== 149209
+8J+btg== 149210
+8J+foQ== 149211
+8J+lmA== 149212
+8J+lnw== 149213
+8J+lpg== 149214
+8J+mhw== 149215
+8J+miA== 149216
+8J+nig== 149217
+8J+nlw== 149218
+8J+npA== 149219
+yrc= 149220
+y7k= 149221
+4bma 149222
+4b2l 149223
+4oSf 149224
+6rKv 149225
+6rur 149226
+67C3 149227
+7IOG 149228
+7Jud 149229
+7KiJ 149230
+7KuP 149231
+76+V 149232
+8J2ciw== 149233
+ybI= 149234
+0q0= 149235
+04g= 149236
+4L2b 149237
+4YuT 149238
+4Zmt 149239
+4aCp 149240
+4bmu 149241
+4oSS 149242
+4oa7 149243
+4rWD 149244
+64Co 149245
+66Cn 149246
+7Iml 149247
+7Iyc 149248
+7Je2 149249
+7KiI 149250
+7Kq+ 149251
+7Y+9 149252
+7ZqU 149253
+7Zu1 149254
+76S4 149255
+76aQ 149256
+76eX 149257
+76ea 149258
+76yv 149259
+8J2Qig== 149260
+8J2Vlw== 149261
+8J2Xmg== 149262
+8J2alg== 149263
+8J+FtA== 149264
+yIM= 149265
+yZ0= 149266
+z7E= 149267
+05c= 149268
+4KSi 149269
+4YWg 149270
+4Ymm 149271
+4ZGM 149272
+4ZK8 149273
+4Z6h 149274
+4aCo 149275
+4aCt 149276
+4aiF 149277
+4aiU 149278
+4bSY 149279
+4bam 149280
+4biO 149281
+4byF 149282
+4by5 149283
+4oav 149284
+4pOO 149285
+44+M 149286
+6ok= 149287
+6omC 149288
+64an 149289
+652x 149290
+7KKh 149291
+7Yi9 149292
+76SH 149293
+76Sb 149294
+8J2QlQ== 149295
+8J2TuA== 149296
+8J2TvA== 149297
+8J2XlQ== 149298
+8J2YiA== 149299
+8J+Pow== 149300
+8J+PpA== 149301
+8J+XhA== 149302
+0bc= 149303
+0qA= 149304
+4bWW 149305
+4byo 149306
+66yE 149307
+77C0 149308
+4oi9 149309
+1a0= 149310
+2rk= 149311
+4KWf 149312
+4YCG 149313
+4Z6S 149314
+44C2 149315
+6qar 149316
+77iT 149317
+8J2Qmw== 149318
+8J2Ylw== 149319
+8J+PnA== 149320
+7Kut 149321
+8J+nng== 149322
+4L2C 149323
+4oa/ 149324
+4oeP 149325
+4pOB 149326
+4pSn 149327
+4pWB 149328
+4pWk 149329
+6qaX 149330
+6qak 149331
+8J+PiA== 149332
+4Z6V 149333
+1L0= 149334
+4KqX 149335
+4KyG 149336
+4pWV 149337
+772g 149338
+4rym 149339
+4ryv 149340
+4r63 149341
+4pSW 149342
+4KyT 149343
+4piX 149344
+4o2L 149345
+76id 149346
+4ryl 149347
+76aq 149348
+4oSK 149349
+44C0 149350
+4o2i 149351
+8KGI 149352
+8KGIvQ== 149353
+76mo 149354
+44C7 149355
+44+D 149356
+76ah 149357
+76iY 149358
+8J+Qgw== 149359
+8J+Glg== 149360
+8J+Xvg== 149361
+44SH 149362
+3os= 149363
+4ry8 149364
+76it 149365
+3oA= 149366
+3oQ= 149367
+3og= 149368
+3pA= 149369
+4oyE 149370
+4ruY 149371
+45+i 149372
+4YWn 149373
+8JCMvw== 149374
+y7s= 149375
+4LKX 149376
+4YCH 149377
+4Z6K 149378
+4pWH 149379
+44e8 149380
+446w 149381
+1ZI= 149382
+3Ig= 149383
+36U= 149384
+4L+Q 149385
+4YCf 149386
+4oal 149387
+4pWM 149388
+4r2A 149389
+4r2w 149390
+4r6K 149391
+5IQ= 149392
+5ISA 149393
+8JOQ 149394
+8JOQjQ== 149395
+8J+Opg== 149396
+4oKv 149397
+4oqY 149398
+4oSN 149399
+yrU= 149400
+0bY= 149401
+2oM= 149402
+4KaU 149403
+4LSm 149404
+4Y62 149405
+4ZOV 149406
+4bmo 149407
+4oKg 149408
+4oew 149409
+4peS 149410
+4r+K 149411
+6rex 149412
+7LmV 149413
+7Yip 149414
+762A 149415
+8J2SuA== 149416
+8J2Tig== 149417
+8J2YqQ== 149418
+x6Y= 149419
+yas= 149420
+4Yqo 149421
+yLk= 149422
+yq8= 149423
+zqo= 149424
+2oA= 149425
+4Yy4 149426
+4Y67 149427
+4Y+V 149428
+4Y+0 149429
+4bKC 149430
+4b2o 149431
+4o+d 149432
+4piZ 149433
+64Oo 149434
+64S8 149435
+64iZ 149436
+66OF 149437
+7JS8 149438
+7JWd 149439
+7Jqs 149440
+7Jyx 149441
+76WC 149442
+76a5 149443
+76y5 149444
+762B 149445
+77OI 149446
+8J2UhQ== 149447
+8J2YpA== 149448
+8J2Zjw== 149449
+8J2ZmQ== 149450
+8J+ViQ== 149451
+8J+nmQ== 149452
+4biR 149453
+6rS8 149454
+64GN 149455
+65e0 149456
+652z 149457
+67Ce 149458
+67Ci 149459
+67WY 149460
+7IKU 149461
+7ISE 149462
+7Lya 149463
+7YCg 149464
+7Yqx 149465
+7YyW 149466
+76SR 149467
+76a0 149468
+76a4 149469
+77SN 149470
+8J2Ytw== 149471
+xKw= 149472
+xaw= 149473
+xoA= 149474
+xos= 149475
+xpw= 149476
+x5E= 149477
+x5g= 149478
+x54= 149479
+x6U= 149480
+x64= 149481
+ybA= 149482
+ybY= 149483
+ybc= 149484
+yb0= 149485
+yog= 149486
+ypA= 149487
+y44= 149488
+y58= 149489
+y6Y= 149490
+y68= 149491
+z5A= 149492
+z5M= 149493
+z6I= 149494
+z6Q= 149495
+z6o= 149496
+z60= 149497
+z64= 149498
+z7s= 149499
+0aA= 149500
+0a0= 149501
+0qg= 149502
+050= 149503
+1KE= 149504
+1Lc= 149505
+1Yk= 149506
+1ZM= 149507
+1ZY= 149508
+1Zo= 149509
+1Z0= 149510
+1o4= 149511
+2L8= 149512
+2oU= 149513
+2o0= 149514
+2pQ= 149515
+24o= 149516
+274= 149517
+3Jk= 149518
+3ZI= 149519
+3Zg= 149520
+35I= 149521
+35Y= 149522
+4KSK 149523
+4KSQ 149524
+4KaP 149525
+4KaW 149526
+4Kef 149527
+4Kqu 149528
+4Kq5 149529
+4K6F 149530
+4K6G 149531
+4LCh 149532
+4LCw 149533
+4LKa 149534
+4LKu 149535
+4LKv 149536
+4LSf 149537
+4LS3 149538
+4LW+ 149539
+4LaR 149540
+4Lae 149541
+4Ly8 149542
+4L2T 149543
+4YCT 149544
+4YKm 149545
+4YOW 149546
+4YOt 149547
+4YOv 149548
+4YWo 149549
+4YWq 149550
+4Yaw 149551
+4YiB 149552
+4YiO 149553
+4YiT 149554
+4Yil 149555
+4Yiy 149556
+4Yi0 149557
+4Yi7 149558
+4Ymg 149559
+4Ymy 149560
+4Ym2 149561
+4Yqj 149562
+4Yql 149563
+4Yqq 149564
+4YuY 149565
+4Yuy 149566
+4Yu2 149567
+4Yyj 149568
+4Y2h 149569
+4Y2j 149570
+4Y6s 149571
+4Y6+ 149572
+4ZCh 149573
+4ZWV 149574
+4Zax 149575
+4ZeQ 149576
+4Zet 149577
+4ZiJ 149578
+4Zqx 149579
+4Zuf 149580
+4Z6l 149581
+4Z+U 149582
+4aCj 149583
+4aCq 149584
+4aCw 149585
+4aC0 149586
+4aSW 149587
+4aWj 149588
+4a4= 149589
+4a6g 149590
+4a8= 149591
+4a+Z 149592
+4bA= 149593
+4bCN 149594
+4bSK 149595
+4bS+ 149596
+4bWB 149597
+4bWO 149598
+4bWe 149599
+4bWk 149600
+4baF 149601
+4baY 149602
+4baf 149603
+4bai 149604
+4bak 149605
+4bax 149606
+4ba7 149607
+4biJ 149608
+4bie 149609
+4bi6 149610
+4bmT 149611
+4bmX 149612
+4bmq 149613
+4bqK 149614
+4bqP 149615
+4bqb 149616
+4byD 149617
+4byM 149618
+4by/ 149619
+4b2C 149620
+4b2T 149621
+4b2X 149622
+4b2m 149623
+4b6x 149624
+4b60 149625
+4b+Y 149626
+4b+f 149627
+4b+4 149628
+4oGY 149629
+4oKR 149630
+4oKb 149631
+4oK/ 149632
+4oSH 149633
+4oSe 149634
+4oSx 149635
+4oef 149636
+4oey 149637
+4oik 149638
+4oi2 149639
+4omC 149640
+4om+ 149641
+4oqo 149642
+4oqz 149643
+4oq3 149644
+4ouM 149645
+4ouY 149646
+4oyV 149647
+4oyl 149648
+4oy1 149649
+4oy6 149650
+4o2j 149651
+4o2y 149652
+4o21 149653
+4o6H 149654
+4o+D 149655
+4o+Q 149656
+4o+g 149657
+4o+k 149658
+4o+2 149659
+4o+4 149660
+4o+5 149661
+4pGC 149662
+4pK3 149663
+4pK6 149664
+4pOh 149665
+4pOk 149666
+4pS+ 149667
+4paY 149668
+4pa1 149669
+4peq 149670
+4pe3 149671
+4pio 149672
+4pir 149673
+4piy 149674
+4piz 149675
+4pmG 149676
+4pqk 149677
+4pql 149678
+4puT 149679
+4pu0 149680
+4pu+ 149681
+4p6r 149682
+4p6/ 149683
+4p+3 149684
+4qSR 149685
+4qSr 149686
+4qS2 149687
+4qS9 149688
+4qeq 149689
+4qiA 149690
+4qm9 149691
+4qyh 149692
+4qyi 149693
+4qyk 149694
+4rKW 149695
+4rKq 149696
+4rWA 149697
+4riu 149698
+4ri9 149699
+44Cg 149700
+44C3 149701
+44SM 149702
+44SY 149703
+44WR 149704
+44iO 149705
+44iQ 149706
+44qc 149707
+44yT 149708
+44yg 149709
+446f 149710
+446k 149711
+446n 149712
+46yu 149713
+5Ig= 149714
+5IiA 149715
+5LA= 149716
+5LCA 149717
+6oU= 149718
+6oWJ 149719
+6oeX 149720
+6og= 149721
+6oiN 149722
+6qeC 149723
+6qeK 149724
+6qqA 149725
+6rKI 149726
+6rKN 149727
+6rOA 149728
+6rWg 149729
+6r2Q 149730
+6r6I 149731
+6r+x 149732
+64OP 149733
+64SR 149734
+64Wk 149735
+64e4 149736
+64i8 149737
+64mF 149738
+64qj 149739
+64u6 149740
+642e 149741
+65CM 149742
+65W4 149743
+65ig 149744
+65mH 149745
+65mI 149746
+65y9 149747
+656U 149748
+66Cc 149749
+66OQ 149750
+66eA 149751
+66eK 149752
+66qA 149753
+66yt 149754
+66++ 149755
+67Oc 149756
+67SK 149757
+67WJ 149758
+67ec 149759
+67iA 149760
+67mL 149761
+7IGE 149762
+7IKj 149763
+7IK7 149764
+7IS1 149765
+7IWS 149766
+7ImI 149767
+7ImU 149768
+7IqM 149769
+7IqZ 149770
+7JC0 149771
+7JO6 149772
+7JWa 149773
+7JW6 149774
+7Jac 149775
+7Jeq 149776
+7Jic 149777
+7Jmk 149778
+7Jqb 149779
+7Jq6 149780
+7J2F 149781
+7J2P 149782
+7J2t 149783
+7J22 149784
+7KCb 149785
+7KGI 149786
+7KKJ 149787
+7KKU 149788
+7Kmg 149789
+7K2M 149790
+7K+p 149791
+7LSj 149792
+7LiV 149793
+7Lmf 149794
+7L6h 149795
+7L+Z 149796
+7YGH 149797
+7YGJ 149798
+7YeA 149799
+7Yi2 149800
+7ZaR 149801
+7Zak 149802
+7ZeF 149803
+7ZyP 149804
+7Z2d 149805
+76SS 149806
+76SV 149807
+76Ss 149808
+76WF 149809
+76WH 149810
+76WP 149811
+76Wa 149812
+76Wf 149813
+76aE 149814
+76aI 149815
+76ao 149816
+76ap 149817
+76ay 149818
+76eB 149819
+76eD 149820
+76eU 149821
+76eg 149822
+76ej 149823
+76eu 149824
+762Q 149825
+762W 149826
+762m 149827
+7620 149828
+7621 149829
+7622 149830
+7624 149831
+766M 149832
+766O 149833
+766e 149834
+766f 149835
+766h 149836
+766q 149837
+76+U 149838
+76+X 149839
+76+a 149840
+76+b 149841
+76+d 149842
+76+f 149843
+76+n 149844
+76+o 149845
+76+r 149846
+76+v 149847
+76+w 149848
+76+x 149849
+76+y 149850
+76+z 149851
+76+0 149852
+76+1 149853
+76+2 149854
+77CA 149855
+77GF 149856
+77GU 149857
+77G0 149858
+77KB 149859
+77OV 149860
+77e9 149861
+77iV 149862
+77ix 149863
+77mj 149864
+77m9 149865
+77uN 149866
+776x 149867
+8J2QmQ== 149868
+8J2QvQ== 149869
+8J2RpA== 149870
+8J2Rrg== 149871
+8J2RtQ== 149872
+8J2Sgw== 149873
+8J2ShA== 149874
+8J2TrQ== 149875
+8J2Ttw== 149876
+8J2Ulg== 149877
+8J2Ung== 149878
+8J2Uog== 149879
+8J2Upg== 149880
+8J2UrA== 149881
+8J2VhA== 149882
+8J2Vig== 149883
+8J2Vjg== 149884
+8J2VmQ== 149885
+8J2VnA== 149886
+8J2VrQ== 149887
+8J2Vsw== 149888
+8J2VuA== 149889
+8J2Vvg== 149890
+8J2WiQ== 149891
+8J2Wjw== 149892
+8J2Yhw== 149893
+8J2YiQ== 149894
+8J2Ylg== 149895
+8J2Ymw== 149896
+8J2Yng== 149897
+8J2Yqw== 149898
+8J2Yvg== 149899
+8J2Zhw== 149900
+8J2ZiQ== 149901
+8J2Ziw== 149902
+8J2Zjg== 149903
+8J2ZmA== 149904
+8J2ZpQ== 149905
+8J2agw== 149906
+8J2akA== 149907
+8J2alA== 149908
+8J2cgw== 149909
+8J+Etw== 149910
+8J+FnQ== 149911
+8J+Fvg== 149912
+8J+Ggg== 149913
+8J+Gkw== 149914
+8J+Mgg== 149915
+8J+Mhg== 149916
+8J+MiQ== 149917
+8J+MkQ== 149918
+8J+MmA== 149919
+8J+MqQ== 149920
+8J+Mqw== 149921
+8J+Nog== 149922
+8J+NpQ== 149923
+8J+Omw== 149924
+8J+Oog== 149925
+8J+OtA== 149926
+8J+RoQ== 149927
+8J+Svg== 149928
+8J+TrQ== 149929
+8J+UiA== 149930
+8J+Upg== 149931
+8J+Usg== 149932
+8J+Usw== 149933
+8J+Vkw== 149934
+8J+VlQ== 149935
+8J+VmA== 149936
+8J+Vnw== 149937
+8J+Vtw== 149938
+8J+Xsw== 149939
+8J+ahA== 149940
+8J+alA== 149941
+8J+alg== 149942
+8J+bkA== 149943
+8J+bpA== 149944
+8J+buA== 149945
+8J+g 149946
+8J+gsw== 149947
+8J+kuQ== 149948
+8J+lgw== 149949
+8J+lqA== 149950
+8J+lqg== 149951
+8J+lvg== 149952
+8J+mgw== 149953
+8J+mkg== 149954
+8J+mmQ== 149955
+8J+mtg== 149956
+8J+noA== 149957
+8J+nqg== 149958
+8J+nrQ== 149959
+8J+nsg== 149960
+8KO3 149961
+8KO3rQ== 149962
+8KaY 149963
+8KaYkg== 149964
+xpE= 149965
+x5k= 149966
+yK4= 149967
+2KA= 149968
+2oQ= 149969
+3IA= 149970
+36I= 149971
+4YmA 149972
+4YqQ 149973
+4Y6g 149974
+4bqe 149975
+64ie 149976
+65Wf 149977
+66OB 149978
+66SX 149979
+7ISl 149980
+7IWR 149981
+7JaQ 149982
+7Jub 149983
+7KOV 149984
+7Y6P 149985
+7ZuT 149986
+76W6 149987
+77Ob 149988
+77Sr 149989
+8Jan 149990
+8Jantw== 149991
+8J2VgQ== 149992
+8J+Qqg== 149993
+8J+SiA== 149994
+8J+ToA== 149995
+8J+Vmw== 149996
+8J+VtA== 149997
+0Z0= 149998
+04o= 149999
+4KWy 150000
+4Kqq 150001
+4YOk 150002
+4Y2Q 150003
+4baw 150004
+4byd 150005
+4b2p 150006
+4ouL 150007
+4pK9 150008
+4pm+ 150009
+4r2U 150010
+4r6v 150011
+44SS 150012
+44Wa 150013
+65CN 150014
+67eB 150015
+7IuA 150016
+7Jqd 150017
+7KWw 150018
+7Lq0 150019
+7YuJ 150020
+7Z29 150021
+76aA 150022
+76a/ 150023
+76eF 150024
+76eT 150025
+762v 150026
+766G 150027
+8JCklQ== 150028
+8J2Qnw== 150029
+8J2ShQ== 150030
+8J2TnA== 150031
+8J2UsA== 150032
+8J2Uuw== 150033
+8J2YjQ== 150034
+8J2Zrw== 150035
+8J+EvQ== 150036
+8J+Fgg== 150037
+8J+FlA== 150038
+8J+FvQ== 150039
+8J+TtA== 150040
+8J+nlg== 150041
+05I= 150042
+4biy 150043
+64m8 150044
+x48= 150045
+yJM= 150046
+yrg= 150047
+1YI= 150048
+24U= 150049
+36E= 150050
+36M= 150051
+4K6v 150052
+4LCI 150053
+4LK4 150054
+4Lqu 150055
+4LyV 150056
+4YCO 150057
+4Yah 150058
+4ZCL 150059
+4ZCV 150060
+4ZGv 150061
+4Z6G 150062
+4aiV 150063
+4amI 150064
+4oGF 150065
+4oaa 150066
+4pSO 150067
+4qCp 150068
+4rKC 150069
+4rKU 150070
+4rKo 150071
+44qa 150072
+7ZOy 150073
+8J2RiA== 150074
+8J2RrA== 150075
+8J2RuQ== 150076
+8J2Svg== 150077
+8J2TsQ== 150078
+8J2TvQ== 150079
+8J2Vrw== 150080
+8J2Vuw== 150081
+8J2YvQ== 150082
+8J2ahg== 150083
+8J+EsA== 150084
+8J+QqA== 150085
+0pU= 150086
+4LKF 150087
+76iG 150088
+8J2RsA== 150089
+8J+EuA== 150090
+1I4= 150091
+2I0= 150092
+2bU= 150093
+4LK2 150094
+4YCI 150095
+4ZiX 150096
+4aC4 150097
+4aGh 150098
+4aiy 150099
+4amB 150100
+4bS3 150101
+4bWn 150102
+4pWo 150103
+4pqB 150104
+4r6d 150105
+44C8 150106
+44SP 150107
+6pKr 150108
+6qal 150109
+6qap 150110
+6qay 150111
+7Ji8 150112
+7ZOQ 150113
+8JOH 150114
+8JOHvA== 150115
+8J2Vvw== 150116
+8J+btA== 150117
+66ic 150118
+4LK1 150119
+4LSO 150120
+4LyA 150121
+4oeW 150122
+44ir 150123
+4pOA 150124
+4YW0 150125
+4Zq+ 150126
+4Zue 150127
+4Zur 150128
+4aW0 150129
+4oab 150130
+4oa2 150131
+4oek 150132
+4pWf 150133
+4pi3 150134
+4pqQ 150135
+8J+ntA== 150136
+4bmz 150137
+4pSN 150138
+4pSS 150139
+4pSp 150140
+4pSm 150141
+4r61 150142
+4Kqc 150143
+4Kqk 150144
+4oeZ 150145
+4pSx 150146
+4pWA 150147
+4r2K 150148
+772f 150149
+4Kyh 150150
+8KCu 150151
+8KCutw== 150152
+4pWD 150153
+4rCU 150154
+44qm 150155
+8J+OkA== 150156
+44ew 150157
+4ryd 150158
+4r6U 150159
+4r2S 150160
+4qCS 150161
+76im 150162
+76mS 150163
+76iy 150164
+76mW 150165
+8JOPuA== 150166
+44yD 150167
+8Jak 150168
+8JakkA== 150169
+76at 150170
+4oqF 150171
+4r6z 150172
+5LSl 150173
+76mV 150174
+8J+MlA== 150175
+4Z6L 150176
+4pqN 150177
+4ryL 150178
+446Y 150179
+8JCMsg== 150180
+yak= 150181
+4Y6R 150182
+4oau 150183
+4oeD 150184
+4pqO 150185
+44ex 150186
+44up 150187
+44y2 150188
+6pmq 150189
+646s 150190
+76iQ 150191
+76ib 150192
+76mK 150193
+76mN 150194
+8JOF 150195
+8JOFug== 150196
+z6E= 150197
+yJE= 150198
+yYI= 150199
+1JM= 150200
+344= 150201
+4LSn 150202
+4YCJ 150203
+4YCL 150204
+4YCR 150205
+4YCg 150206
+4ZqZ 150207
+4aiE 150208
+4aip 150209
+4ai5 150210
+4amT 150211
+4ayc 150212
+4bSZ 150213
+4bWR 150214
+4oKt 150215
+4oaw 150216
+4pyB 150217
+4r2Q 150218
+44uv 150219
+44y9 150220
+7Yai 150221
+76S/ 150222
+8J+C 150223
+8J+Cuw== 150224
+yJI= 150225
+zbo= 150226
+1KU= 150227
+1ZE= 150228
+2rY= 150229
+4KeO 150230
+4Lau 150231
+4LqW 150232
+4Lqc 150233
+4Lq9 150234
+4YO7 150235
+4YWv 150236
+4Yue 150237
+4ZaV 150238
+4bSI 150239
+4baG 150240
+4bic 150241
+4bm8 150242
+4b+o 150243
+4oSL 150244
+4oSt 150245
+4oix 150246
+4oyT 150247
+4pSH 150248
+4pSi 150249
+4rGu 150250
+4rKE 150251
+44e+ 150252
+44is 150253
+67ih 150254
+7JCJ 150255
+7Zmb 150256
+8J2Vqg== 150257
+xrk= 150258
+zbI= 150259
+04E= 150260
+27w= 150261
+4Kar 150262
+4YWf 150263
+4YmG 150264
+4Y2I 150265
+4bqW 150266
+4b2J 150267
+4pS4 150268
+4r2p 150269
+6pw= 150270
+6pyl 150271
+6rWF 150272
+64KU 150273
+64Sg 150274
+64eX 150275
+65md 150276
+7Jqv 150277
+7Jq3 150278
+7J+b 150279
+7LeQ 150280
+7Z+s 150281
+7Z+u 150282
+7Z+w 150283
+76aG 150284
+76ax 150285
+77Ke 150286
+77Ok 150287
+77Ol 150288
+8JCMuA== 150289
+8J2Ujw== 150290
+8J2Vrg== 150291
+8J2Yow== 150292
+4KaI 150293
+4o+P 150294
+44SW 150295
+6rKH 150296
+65aY 150297
+65y3 150298
+656S 150299
+66GT 150300
+66KJ 150301
+66OD 150302
+66eL 150303
+67KL 150304
+7IK3 150305
+7IiV 150306
+7Iyo 150307
+7JO7 150308
+7JaK 150309
+7Jms 150310
+7J27 150311
+7KaB 150312
+7LWk 150313
+7LeD 150314
+7YCc 150315
+7YWJ 150316
+7Y2g 150317
+7Y+F 150318
+7ZGx 150319
+7ZWV 150320
+7Zag 150321
+7Z2V 150322
+xpk= 150323
+xpo= 150324
+xp4= 150325
+x4M= 150326
+x4o= 150327
+x5w= 150328
+x6Q= 150329
+x60= 150330
+x7k= 150331
+yIA= 150332
+yIE= 150333
+yIU= 150334
+yIk= 150335
+yJc= 150336
+yJ8= 150337
+yKQ= 150338
+yKU= 150339
+yKg= 150340
+yLU= 150341
+yLo= 150342
+yLs= 150343
+yYw= 150344
+ya4= 150345
+yoU= 150346
+yqU= 150347
+yqg= 150348
+y5M= 150349
+y5Q= 150350
+y6A= 150351
+y6M= 150352
+y7g= 150353
+zbQ= 150354
+z5c= 150355
+z5g= 150356
+z5k= 150357
+z5o= 150358
+z50= 150359
+z6g= 150360
+z6w= 150361
+z74= 150362
+z78= 150363
+0ao= 150364
+0oA= 150365
+0pw= 150366
+0rw= 150367
+0r0= 150368
+04I= 150369
+04U= 150370
+04c= 150371
+040= 150372
+05Y= 150373
+058= 150374
+06s= 150375
+07E= 150376
+1IY= 150377
+1Ic= 150378
+1Lo= 150379
+1Ys= 150380
+1ok= 150381
+2Ig= 150382
+2Io= 150383
+2L0= 150384
+2L4= 150385
+2bc= 150386
+2oI= 150387
+2oo= 150388
+2pY= 150389
+2pc= 150390
+2qM= 150391
+2qs= 150392
+2rg= 150393
+24A= 150394
+240= 150395
+270= 150396
+3Ik= 150397
+3KQ= 150398
+3ac= 150399
+3bQ= 150400
+3oM= 150401
+3qQ= 150402
+3qU= 150403
+35o= 150404
+35s= 150405
+36Q= 150406
+4KCN 150407
+4KCT 150408
+4KCz 150409
+4KGi 150410
+4KWg 150411
+4Keg 150412
+4Ke6 150413
+4KiK 150414
+4KiQ 150415
+4Kiu 150416
+4Kiv 150417
+4Kiw 150418
+4Ki4 150419
+4KqG 150420
+4Kqz 150421
+4Kq1 150422
+4Kq9 150423
+4KyM 150424
+4KyY 150425
+4Ky9 150426
+4K6D 150427
+4K64 150428
+4LCG 150429
+4LCV 150430
+4LCm 150431
+4LKG 150432
+4LKK 150433
+4LKM 150434
+4LKQ 150435
+4LKb 150436
+4LKk 150437
+4LKm 150438
+4LKq 150439
+4LKy 150440
+4LK5 150441
+4LSG 150442
+4LSP 150443
+4LSX 150444
+4LSr 150445
+4LS5 150446
+4LW6 150447
+4LW9 150448
+4LaF 150449
+4LaK 150450
+4LaU 150451
+4Lan 150452
+4Lar 150453
+4Law 150454
+4LyE 150455
+4LyF 150456
+4LyK 150457
+4L2Z 150458
+4L2h 150459
+4L2n 150460
+4L+A 150461
+4L+Z 150462
+4YCd 150463
+4YCn 150464
+4YCp 150465
+4YC/ 150466
+4YG1 150467
+4YKB 150468
+4YK9 150469
+4YOC 150470
+4YOq 150471
+4YSK 150472
+4YSi 150473
+4YWm 150474
+4YWt 150475
+4Yau 150476
+4Yax 150477
+4Ya7 150478
+4Yc= 150479
+4YeC 150480
+4YiF 150481
+4YiJ 150482
+4YiM 150483
+4YiQ 150484
+4YiS 150485
+4YiZ 150486
+4Yia 150487
+4Yic 150488
+4Yie 150489
+4Yip 150490
+4Yiz 150491
+4Yi6 150492
+4Yi9 150493
+4YmF 150494
+4Ymi 150495
+4Ymx 150496
+4Ym0 150497
+4YqD 150498
+4YqN 150499
+4YqW 150500
+4Yqu 150501
+4Yq4 150502
+4Yub 150503
+4Yud 150504
+4Yuz 150505
+4YyB 150506
+4YyF 150507
+4Yyl 150508
+4Yym 150509
+4Yyo 150510
+4Y2K 150511
+4Y2N 150512
+4Y2V 150513
+4Y2W 150514
+4Y2i 150515
+4Y2k 150516
+4Y6S 150517
+4Y6q 150518
+4Y+B 150519
+4Y+Q 150520
+4Y+f 150521
+4ZCC 150522
+4ZCW 150523
+4ZCd 150524
+4ZCe 150525
+4ZCf 150526
+4ZCg 150527
+4ZGW 150528
+4ZKL 150529
+4ZKN 150530
+4ZKh 150531
+4ZOr 150532
+4ZSV 150533
+4ZWL 150534
+4ZWR 150535
+4ZWZ 150536
+4ZWa 150537
+4ZWb 150538
+4ZWk 150539
+4ZWm 150540
+4ZWu 150541
+4ZW8 150542
+4ZaT 150543
+4ZeX 150544
+4Zei 150545
+4Zev 150546
+4Ze3 150547
+4ZiE 150548
+4ZiR 150549
+4ZuC 150550
+4ZuZ 150551
+4Z6N 150552
+4aCG 150553
+4aCh 150554
+4aCm 150555
+4aCu 150556
+4aCv 150557
+4aCy 150558
+4aC3 150559
+4aGN 150560
+4aGe 150561
+4aGk 150562
+4aG0 150563
+4aG1 150564
+4aST 150565
+4aWW 150566
+4aWw 150567
+4aim 150568
+4ain 150569
+4aio 150570
+4aiq 150571
+4ais 150572
+4aiv 150573
+4aiz 150574
+4ai1 150575
+4amD 150576
+4ayV 150577
+4a2j 150578
+4bE= 150579
+4bGa 150580
+4bKg 150581
+4bST 150582
+4bS2 150583
+4bWC 150584
+4bWM 150585
+4bWl 150586
+4bW0 150587
+4baH 150588
+4biI 150589
+4big 150590
+4bin 150591
+4bi0 150592
+4bi+ 150593
+4bmA 150594
+4bmW 150595
+4bmf 150596
+4bmg 150597
+4bmr 150598
+4bmx 150599
+4bm3 150600
+4bm/ 150601
+4bqE 150602
+4bqN 150603
+4bqR 150604
+4bqX 150605
+4byJ 150606
+4byT 150607
+4byt 150608
+4b2L 150609
+4b2S 150610
+4b2g 150611
+4b2j 150612
+4b6E 150613
+4b6P 150614
+4b6R 150615
+4b6X 150616
+4b6m 150617
+4b6n 150618
+4b6+ 150619
+4b+E 150620
+4b+T 150621
+4b+h 150622
+4b+s 150623
+4oGa 150624
+4oKM 150625
+4oSB 150626
+4oSU 150627
+4oSj 150628
+4oSn 150629
+4oSv 150630
+4oSw 150631
+4oS0 150632
+4oWF 150633
+4oac 150634
+4oar 150635
+4oat 150636
+4oax 150637
+4oa5 150638
+4oa9 150639
+4oeH 150640
+4oec 150641
+4oe1 150642
+4oiJ 150643
+4oiK 150644
+4oiW 150645
+4oic 150646
+4oi+ 150647
+4omA 150648
+4omL 150649
+4omM 150650
+4omT 150651
+4omc 150652
+4om0 150653
+4om/ 150654
+4oqK 150655
+4oqL 150656
+4oqU 150657
+4oqW 150658
+4oqj 150659
+4oqm 150660
+4ouO 150661
+4ouq 150662
+4ouy 150663
+4oym 150664
+4oyn 150665
+4o26 150666
+4o6I 150667
+4o6o 150668
+4o6s 150669
+4o6z 150670
+4o68 150671
+4o6+ 150672
+4o+M 150673
+4o+a 150674
+4o+r 150675
+4o+v 150676
+4o+1 150677
+4pKc 150678
+4pKd 150679
+4pKr 150680
+4pOE 150681
+4pOK 150682
+4pOZ 150683
+4pOp 150684
+4pSR 150685
+4pSZ 150686
+4pSa 150687
+4pSl 150688
+4pWF 150689
+4pWJ 150690
+4pWN 150691
+4pWP 150692
+4pWe 150693
+4paa 150694
+4pav 150695
+4peD 150696
+4pea 150697
+4pes 150698
+4pe0 150699
+4piI 150700
+4pik 150701
+4pil 150702
+4pin 150703
+4pis 150704
+4pmB 150705
+4pmx 150706
+4pqD 150707
+4pqE 150708
+4pqF 150709
+4pqP 150710
+4pqa 150711
+4pqe 150712
+4pqf 150713
+4pqx 150714
+4pqy 150715
+4pyA 150716
+4pyf 150717
+4pyi 150718
+4p21 150719
+4p+h 150720
+4p+m 150721
+4p+n 150722
+4p+z 150723
+4p++ 150724
+4p+/ 150725
+4qCH 150726
+4qSE 150727
+4qS6 150728
+4qWC 150729
+4qW5 150730
+4qeJ 150731
+4qe8 150732
+4qe9 150733
+4qiN 150734
+4qyK 150735
+4qyf 150736
+4q2e 150737
+4q6e 150738
+4q6z 150739
+4q+I 150740
+4q+R 150741
+4rGg 150742
+4rGx 150743
+4rKt 150744
+4rS5 150745
+4rWV 150746
+4ri+ 150747
+4rqr 150748
+4ryG 150749
+4ryg 150750
+4r2f 150751
+4r28 150752
+4r6b 150753
+4r6n 150754
+4r+D 150755
+4r+7 150756
+44KV 150757
+44Kf 150758
+44Sb 150759
+44Sh 150760
+44S2 150761
+44S6 150762
+44WS 150763
+44Wf 150764
+44aA 150765
+44e7 150766
+44iR 150767
+44it 150768
+44iu 150769
+44iz 150770
+44i5 150771
+44ml 150772
+44mm 150773
+44m5 150774
+44m/ 150775
+44qe 150776
+44qo 150777
+44uR 150778
+44ul 150779
+44u0 150780
+44u6 150781
+446E 150782
+446V 150783
+446v 150784
+44+C 150785
+44+I 150786
+44+T 150787
+44+W 150788
+44+x 150789
+45Cx 150790
+45+B 150791
+46I= 150792
+46Ko 150793
+46g= 150794
+46iz 150795
+46uq 150796
+46u0 150797
+47az 150798
+47q+ 150799
+5IA= 150800
+5ICA 150801
+5Is= 150802
+5IuM 150803
+5IyA 150804
+5JCA 150805
+5KCA 150806
+5KA= 150807
+5KC8 150808
+5Kc= 150809
+5Kee 150810
+5Kiw 150811
+5Ki6 150812
+5LSA 150813
+5Lc= 150814
+5LeF 150815
+5Le4 150816
+6oI= 150817
+6oKr 150818
+6ow= 150819
+6oy8 150820
+6o0= 150821
+6o2y 150822
+6pK1 150823
+6pM= 150824
+6pO9 150825
+6pmt 150826
+6p2b 150827
+6p2l 150828
+6p4= 150829
+6p6K 150830
+6qaG 150831
+6qaH 150832
+6qaf 150833
+6qao 150834
+6qeI 150835
+6qk= 150836
+6qmf 150837
+6qqL 150838
+6qqR 150839
+6qqV 150840
+6qqX 150841
+6qqc 150842
+6qqu 150843
+6qqx 150844
+6qq7 150845
+6qq8 150846
+6quA 150847
+6qud 150848
+6rCD 150849
+6rCY 150850
+6rGc 150851
+6rKT 150852
+6rKa 150853
+6rOZ 150854
+6rO+ 150855
+6rSX 150856
+6rSZ 150857
+6rWb 150858
+6raD 150859
+6raV 150860
+6rao 150861
+6rip 150862
+6ri/ 150863
+6rmE 150864
+6rmG 150865
+6rmJ 150866
+6rmT 150867
+6rmi 150868
+6rmj 150869
+6rm4 150870
+6rqz 150871
+6r+P 150872
+6r+V 150873
+6r+n 150874
+64Cp 150875
+64GF 150876
+64O1 150877
+64SW 150878
+64SX 150879
+64Si 150880
+64WC 150881
+64aQ 150882
+64ec 150883
+64iL 150884
+64ia 150885
+64mN 150886
+64mo 150887
+64qa 150888
+64qh 150889
+64uc 150890
+64uq 150891
+64yY 150892
+64yk 150893
+64y4 150894
+646f 150895
+64+o 150896
+65CE 150897
+65CP 150898
+65C0 150899
+65C4 150900
+65GB 150901
+65G/ 150902
+65Ko 150903
+65O3 150904
+65Su 150905
+65Sy 150906
+65Wn 150907
+65aU 150908
+65aq 150909
+65it 150910
+65qA 150911
+65qg 150912
+65uU 150913
+65up 150914
+65yF 150915
+656V 150916
+656w 150917
+65+Q 150918
+66Ch 150919
+66Ge 150920
+66Gj 150921
+66G1 150922
+66OE 150923
+66ON 150924
+66Sz 150925
+66aN 150926
+66aP 150927
+66az 150928
+66eE 150929
+66eG 150930
+66eN 150931
+66ec 150932
+66er 150933
+66e7 150934
+66iu 150935
+66mC 150936
+66mt 150937
+66q0 150938
+66yc 150939
+66yg 150940
+66yr 150941
+66y+ 150942
+662s 150943
+666Y 150944
+6665 150945
+66+V 150946
+66+c 150947
+67Co 150948
+67Cq 150949
+67GU 150950
+67KY 150951
+67Kb 150952
+67Kx 150953
+67K0 150954
+67S9 150955
+67Wk 150956
+67Wo 150957
+67eX 150958
+67eY 150959
+67iT 150960
+67ic 150961
+67mq 150962
+67qD 150963
+67qY 150964
+67q1 150965
+67u0 150966
+67yQ 150967
+676U 150968
+7IGt 150969
+7IKg 150970
+7IKu 150971
+7IOP 150972
+7IOZ 150973
+7IS6 150974
+7IWi 150975
+7IaA 150976
+7IaF 150977
+7Iak 150978
+7Iam 150979
+7Ias 150980
+7Iex 150981
+7Ii1 150982
+7Iuo 150983
+7Iu0 150984
+7Iyw 150985
+7I2c 150986
+7I6X 150987
+7I6Y 150988
+7I68 150989
+7JGJ 150990
+7JGd 150991
+7JG7 150992
+7JKU 150993
+7JKv 150994
+7JOp 150995
+7JWQ 150996
+7JWW 150997
+7Jag 150998
+7Ja+ 150999
+7JeD 151000
+7JeX 151001
+7Jec 151002
+7Jeo 151003
+7JiC 151004
+7JiE 151005
+7JiP 151006
+7Ji+ 151007
+7Ji/ 151008
+7Jyn 151009
+7J2Q 151010
+7J2W 151011
+7J23 151012
+7J6N 151013
+7J6P 151014
+7J6o 151015
+7J6q 151016
+7J6z 151017
+7KCh 151018
+7KC0 151019
+7KC5 151020
+7KGA 151021
+7KGq 151022
+7KG1 151023
+7KKQ 151024
+7KKo 151025
+7KOM 151026
+7KOZ 151027
+7KOz 151028
+7KaR 151029
+7Kel 151030
+7Ke0 151031
+7Ke+ 151032
+7KiT 151033
+7KiV 151034
+7Kmw 151035
+7Km7 151036
+7Km8 151037
+7KqX 151038
+7KyU 151039
+7KyY 151040
+7K6u 151041
+7K+V 151042
+7K+Y 151043
+7LCO 151044
+7LCv 151045
+7LGD 151046
+7LG1 151047
+7LKn 151048
+7LKu 151049
+7LKv 151050
+7LOs 151051
+7LSL 151052
+7LSi 151053
+7LWl 151054
+7Laj 151055
+7LiI 151056
+7LiZ 151057
+7Lqk 151058
+7Lqt 151059
+7Lu9 151060
+7LyZ 151061
+7L2s 151062
+7L6A 151063
+7L+F 151064
+7L+9 151065
+7YCF 151066
+7YGm 151067
+7YKF 151068
+7YO2 151069
+7YO5 151070
+7YSU 151071
+7YWj 151072
+7YaE 151073
+7Yan 151074
+7Ya5 151075
+7Ye8 151076
+7Ymk 151077
+7Yq9 151078
+7YuC 151079
+7YuR 151080
+7Y2I 151081
+7Y2Z 151082
+7Y2/ 151083
+7Y62 151084
+7ZCd 151085
+7ZKc 151086
+7ZOd 151087
+7ZOq 151088
+7ZOx 151089
+7ZO3 151090
+7ZO8 151091
+7ZSZ 151092
+7ZSg 151093
+7ZWa 151094
+7ZWb 151095
+7ZWe 151096
+7ZWf 151097
+7ZWn 151098
+7ZW2 151099
+7ZaK 151100
+7ZaL 151101
+7ZaN 151102
+7ZaU 151103
+7ZaY 151104
+7Zah 151105
+7Zas 151106
+7Zej 151107
+7Ze/ 151108
+7ZiW 151109
+7Zit 151110
+7Zqw 151111
+7ZuN 151112
+7Zu9 151113
+7Z2f 151114
+7Z2t 151115
+7Z20 151116
+7Z6c 151117
+76SJ 151118
+76St 151119
+76Sy 151120
+76S1 151121
+76S8 151122
+76WA 151123
+76WR 151124
+76WS 151125
+76WV 151126
+76WY 151127
+76WZ 151128
+76Wr 151129
+76Ws 151130
+76Ww 151131
+76W/ 151132
+76aL 151133
+76aP 151134
+76aU 151135
+76aW 151136
+76aY 151137
+76ab 151138
+76ag 151139
+76au 151140
+76av 151141
+76a6 151142
+76a7 151143
+76a+ 151144
+76eG 151145
+76eW 151146
+76eb 151147
+76ee 151148
+76ef 151149
+76en 151150
+76ez 151151
+76e6 151152
+76e9 151153
+76iD 151154
+76ia 151155
+76ii 151156
+76mf 151157
+76yk 151158
+76ys 151159
+76y8 151160
+762S 151161
+762V 151162
+762b 151163
+762d 151164
+762e 151165
+762f 151166
+762k 151167
+762n 151168
+762o 151169
+762u 151170
+762w 151171
+762x 151172
+7623 151173
+7625 151174
+7627 151175
+766A 151176
+766D 151177
+766E 151178
+766F 151179
+766N 151180
+766S 151181
+766T 151182
+766V 151183
+766m 151184
+766u 151185
+766w 151186
+76+T 151187
+76+c 151188
+76+p 151189
+76+q 151190
+76+s 151191
+76+t 151192
+76+u 151193
+76+3 151194
+76+5 151195
+76+7 151196
+76+8 151197
+77CD 151198
+77CM 151199
+77CQ 151200
+77CY 151201
+77CZ 151202
+77Cc 151203
+77Ce 151204
+77Ci 151205
+77Cu 151206
+77Cw 151207
+77C8 151208
+77C/ 151209
+77GA 151210
+77GB 151211
+77GI 151212
+77GL 151213
+77GP 151214
+77Gt 151215
+77KA 151216
+77KH 151217
+77KI 151218
+77KL 151219
+77KO 151220
+77KS 151221
+77Kc 151222
+77Kg 151223
+77Ks 151224
+77K7 151225
+77OH 151226
+77OU 151227
+77Oj 151228
+77Or 151229
+77SY 151230
+77Sw 151231
+77S9 151232
+77Y= 151233
+77aw 151234
+77iW 151235
+77i0 151236
+77i5 151237
+77mN 151238
+77mX 151239
+77mi 151240
+77mk 151241
+77mp 151242
+77mx 151243
+776w 151244
+77+C 151245
+77+u 151246
+8JCMsA== 151247
+8JCMuQ== 151248
+8JCMug== 151249
+8JCMvQ== 151250
+8JCNgg== 151251
+8JCNgw== 151252
+8JCNhA== 151253
+8JCO 151254
+8JCOuQ== 151255
+8JCkgg== 151256
+8JCkjQ== 151257
+8JCkjw== 151258
+8JCkkw== 151259
+8JCtiQ== 151260
+8JCtjQ== 151261
+8JCwhw== 151262
+8JCwsA== 151263
+8JGC 151264
+8JGChA== 151265
+8JGY 151266
+8JGYgQ== 151267
+8JKA 151268
+8JKAuA== 151269
+8JKB 151270
+8JKBug== 151271
+8JKE 151272
+8JKEtw== 151273
+8JKK 151274
+8JKKkQ== 151275
+8JKL 151276
+8JKLlw== 151277
+8JKM 151278
+8JKMqA== 151279
+8JODog== 151280
+8JODsA== 151281
+8Jag 151282
+8Jagmg== 151283
+8J2Egw== 151284
+8J2EhQ== 151285
+8J2ElQ== 151286
+8J2EmQ== 151287
+8J2EsQ== 151288
+8J2EtA== 151289
+8J2EuQ== 151290
+8J2Fjg== 151291
+8J2Fqg== 151292
+8J2Gow== 151293
+8J2Gsw== 151294
+8J2GuQ== 151295
+8J2Hig== 151296
+8J2Hlw== 151297
+8J2Hmg== 151298
+8J2HnA== 151299
+8J2HoA== 151300
+8J2QiQ== 151301
+8J2Qlg== 151302
+8J2QmA== 151303
+8J2Qow== 151304
+8J2QsQ== 151305
+8J2Rig== 151306
+8J2RrQ== 151307
+8J2RvA== 151308
+8J2RvQ== 151309
+8J2SsA== 151310
+8J2Stw== 151311
+8J2Svw== 151312
+8J2TgQ== 151313
+8J2Tiw== 151314
+8J2Tjg== 151315
+8J2Tkg== 151316
+8J2TmA== 151317
+8J2Tog== 151318
+8J2Tpg== 151319
+8J2Tqw== 151320
+8J2Tvw== 151321
+8J2Ujg== 151322
+8J2UsQ== 151323
+8J2UtA== 151324
+8J2Utw== 151325
+8J2UuA== 151326
+8J2UvQ== 151327
+8J2Vgg== 151328
+8J2Vgw== 151329
+8J2Viw== 151330
+8J2Vjw== 151331
+8J2VkA== 151332
+8J2VpQ== 151333
+8J2VtA== 151334
+8J2Vug== 151335
+8J2WkA== 151336
+8J2Wmw== 151337
+8J2WnQ== 151338
+8J2Wng== 151339
+8J2XqQ== 151340
+8J2Xsw== 151341
+8J2XvQ== 151342
+8J2Yig== 151343
+8J2Yiw== 151344
+8J2YlA== 151345
+8J2YsQ== 151346
+8J2YtA== 151347
+8J2Yvw== 151348
+8J2Zkg== 151349
+8J2ZnQ== 151350
+8J2Znw== 151351
+8J2ZrA== 151352
+8J2ZrQ== 151353
+8J2Zuw== 151354
+8J2Zvg== 151355
+8J2aiA== 151356
+8J2aiw== 151357
+8J2akQ== 151358
+8J2anw== 151359
+8J2aoA== 151360
+8J2aow== 151361
+8J2bvQ== 151362
+8J2cgg== 151363
+8J2clA== 151364
+8J2cmQ== 151365
+8J+A 151366
+8J+AhA== 151367
+8J+Esg== 151368
+8J+Etg== 151369
+8J+FkA== 151370
+8J+Flg== 151371
+8J+Fmg== 151372
+8J+Fmw== 151373
+8J+Fpg== 151374
+8J+Ftg== 151375
+8J+Fuw== 151376
+8J+FvA== 151377
+8J+Ggw== 151378
+8J+Ghg== 151379
+8J+Gjg== 151380
+8J+Irw== 151381
+8J+Isg== 151382
+8J+IuQ== 151383
+8J+Mhw== 151384
+8J+Mkw== 151385
+8J+NmA== 151386
+8J+OkQ== 151387
+8J+Ovw== 151388
+8J+Pjw== 151389
+8J+Pkg== 151390
+8J+PqQ== 151391
+8J+Prw== 151392
+8J+QgA== 151393
+8J+RnQ== 151394
+8J+SuQ== 151395
+8J+Sug== 151396
+8J+Tnw== 151397
+8J+Tqg== 151398
+8J+TvA== 151399
+8J+UgA== 151400
+8J+Ugg== 151401
+8J+Ugw== 151402
+8J+Uhw== 151403
+8J+Ukw== 151404
+8J+Uog== 151405
+8J+UpA== 151406
+8J+UqQ== 151407
+8J+Vlg== 151408
+8J+Vmg== 151409
+8J+VnA== 151410
+8J+VnQ== 151411
+8J+Vng== 151412
+8J+VoA== 151413
+8J+Vog== 151414
+8J+Vsw== 151415
+8J+Whw== 151416
+8J+WkQ== 151417
+8J+Wtg== 151418
+8J+XgQ== 151419
+0ag= 151420
+2o4= 151421
+4aGM 151422
+4biw 151423
+4bqA 151424
+4byu 151425
+4b2d 151426
+4oSs 151427
+4pqn 151428
+4puk 151429
+47Os 151430
+6pmL 151431
+6riR 151432
+65SJ 151433
+65eN 151434
+66GR 151435
+66+R 151436
+67uF 151437
+67yd 151438
+7ISQ 151439
+7Imh 151440
+7Iuy 151441
+7I+x 151442
+7Jek 151443
+7J2p 151444
+7J2/ 151445
+7J+Z 151446
+7KCw 151447
+7KWJ 151448
+7Yqt 151449
+7ZWu 151450
+766P 151451
+8J+FsQ== 151452
+8J+Gkg== 151453
+8J+Viw== 151454
+yZg= 151455
+ypM= 151456
+1YM= 151457
+4LS0 151458
+4L2F 151459
+4Ya6 151460
+4YiK 151461
+4Yio 151462
+4Yi+ 151463
+4YmQ 151464
+4YyD 151465
+4Yy9 151466
+4ZSt 151467
+4aCC 151468
+4aCs 151469
+4ai4 151470
+4amL 151471
+4baP 151472
+4b6U 151473
+4b+Q 151474
+4b+a 151475
+4pmZ 151476
+4pqC 151477
+4pqX 151478
+4qGi 151479
+4qSm 151480
+65aw 151481
+66SC 151482
+66eg 151483
+67GL 151484
+67GQ 151485
+7Jui 151486
+7Jy+ 151487
+7LOF 151488
+7LuB 151489
+7YG7 151490
+7YOZ 151491
+7ZOW 151492
+7ZOt 151493
+7ZWx 151494
+7Zuc 151495
+76SF 151496
+76SG 151497
+76aD 151498
+76ep 151499
+76iC 151500
+8JCklA== 151501
+8JCtkw== 151502
+8JCwvA== 151503
+8J2Tng== 151504
+8J2TsA== 151505
+8J2ZnA== 151506
+8J2agQ== 151507
+8J+Fog== 151508
+8J+Phw== 151509
+yLI= 151510
+yrY= 151511
+1Ig= 151512
+1JE= 151513
+3ZM= 151514
+3aU= 151515
+4KSR 151516
+4KWx 151517
+4KyJ 151518
+4LCz 151519
+4LC1 151520
+4LKf 151521
+4YCP 151522
+4YG8 151523
+4Ymo 151524
+4YqS 151525
+4Yup 151526
+4YyE 151527
+4YyU 151528
+4ZCn 151529
+4ZKM 151530
+4ZSF 151531
+4ZSK 151532
+4aCE 151533
+4aiB 151534
+4biD 151535
+4bi7 151536
+4pSe 151537
+4pi1 151538
+4pqj 151539
+4rKi 151540
+44iq 151541
+5La1 151542
+6rKZ 151543
+6rK0 151544
+6rOC 151545
+66G8 151546
+7IaK 151547
+7LyH 151548
+7YuN 151549
+7ZOs 151550
+7ZOu 151551
+7ZO2 151552
+7ZO7 151553
+76Sm 151554
+76Wg 151555
+76Wx 151556
+762y 151557
+8JCtig== 151558
+8JCxhQ== 151559
+8Jal 151560
+8JalqA== 151561
+8J2Rsw== 151562
+8J2TlQ== 151563
+8J2TrA== 151564
+8J2TuQ== 151565
+8J2Tvg== 151566
+8J2Ukw== 151567
+8J2VjQ== 151568
+8J2VoQ== 151569
+8J2VsQ== 151570
+8J2Wlg== 151571
+8J2Yjw== 151572
+8J2YkA== 151573
+8J2Ymg== 151574
+8J2Zrg== 151575
+8J2ZsA== 151576
+8J2ZuA== 151577
+8J2Zug== 151578
+8J2ZvA== 151579
+8J2ZvQ== 151580
+8J2Zvw== 151581
+8J2ahA== 151582
+8J2ajw== 151583
+8J+FhQ== 151584
+8J+Fkw== 151585
+xog= 151586
+4KCM 151587
+4Zmz 151588
+4ZqM 151589
+4ZuF 151590
+4ZuQ 151591
+4aSK 151592
+4biK 151593
+4pS9 151594
+4pWK 151595
+4puH 151596
+4puP 151597
+4p2q 151598
+4p2r 151599
+4p+w 151600
+44SN 151601
+44ST 151602
+44Sn 151603
+44WW 151604
+44mr 151605
+6qaU 151606
+77GK 151607
+4LqC 151608
+4YWj 151609
+4aWU 151610
+4aWk 151611
+4oak 151612
+4oa3 151613
+4oee 151614
+4pak 151615
+4p62 151616
+44i8 151617
+76i3 151618
+8JOPpw== 151619
+4pSy 151620
+4oC0 151621
+4pKf 151622
+4pKh 151623
+4rCC 151624
+4rCN 151625
+4rCO 151626
+4rCQ 151627
+4rCR 151628
+4rCf 151629
+4rCg 151630
+4rCh 151631
+4ryt 151632
+44ql 151633
+4pKg 151634
+4r26 151635
+44e6 151636
+44e9 151637
+76iK 151638
+4ZW3 151639
+4o2o 151640
+4rqf 151641
+4r2X 151642
diff --git a/nemo/collections/multimodal_autoregressive/tokenizer/special_tokens_map.json b/nemo/collections/multimodal_autoregressive/tokenizer/special_tokens_map.json
new file mode 100644
index 000000000000..d13c7acc42c9
--- /dev/null
+++ b/nemo/collections/multimodal_autoregressive/tokenizer/special_tokens_map.json
@@ -0,0 +1,5 @@
+{
+ "bos_token": "<|extra_203|>",
+ "eos_token": "<|extra_204|>",
+ "pad_token": "<|endoftext|>"
+ }
\ No newline at end of file
diff --git a/nemo/collections/multimodal_autoregressive/tokenizer/tokenizer_config.json b/nemo/collections/multimodal_autoregressive/tokenizer/tokenizer_config.json
new file mode 100644
index 000000000000..883a5e06ae69
--- /dev/null
+++ b/nemo/collections/multimodal_autoregressive/tokenizer/tokenizer_config.json
@@ -0,0 +1,15 @@
+{
+ "added_tokens_decoder": {},
+ "auto_map":{
+ "AutoTokenizer": [
+ "cosmos_multimodal_tokenizer.CosmosMultiModalTokenizer",
+ null
+ ]
+ },
+ "bos_token": "<|extra_203|>",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|extra_204|>",
+ "model_max_length": 1000000000000000019884624838656,
+ "pad_token": "<|endoftext|>",
+ "tokenizer_class": "CosmosMultiModalTokenizer"
+ }
\ No newline at end of file
diff --git a/nemo/collections/nlp/data/language_modeling/megatron/base_prompt_learning_dataset.py b/nemo/collections/nlp/data/language_modeling/megatron/base_prompt_learning_dataset.py
index bbd14f47a651..ea5f8c5a930b 100644
--- a/nemo/collections/nlp/data/language_modeling/megatron/base_prompt_learning_dataset.py
+++ b/nemo/collections/nlp/data/language_modeling/megatron/base_prompt_learning_dataset.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import omegaconf
import torch
from nemo.collections.nlp.modules.common import VirtualPromptSource
@@ -70,8 +71,55 @@ def __init__(
# Datasets are a list of file path strings to .json or .jsonl files
elif isinstance(datasets[0], str):
for path in datasets:
- dataset = open(path, 'r', encoding='utf-8')
- self.load_data(dataset)
+ with open(path, 'r', encoding='utf-8') as dataset:
+ dataset_examples = self.load_data(dataset)
+ self.examples.extend(dataset_examples)
+ elif isinstance(datasets[0], omegaconf.ListConfig) or isinstance(datasets[0], list):
+ # Dataset is a list of tuples with the first element being the probability of sampling from the dataset
+ # This code repeates the smaller datasets to approximately match the target probabilities
+ total_examples = 0
+ dataset_lengths = []
+ target_probs = []
+ datasets_examples_list = []
+ for prob_and_path in datasets:
+ prob = prob_and_path[0]
+ path = prob_and_path[1]
+ with open(path, 'r', encoding='utf-8') as dataset:
+ dataset_examples = self.load_data(dataset)
+ datasets_examples_list.append(dataset_examples)
+ dataset_lengths.append(len(dataset_examples))
+ total_examples += len(dataset_examples)
+ target_probs.append(prob)
+
+ # Normalize the target probs
+ target_probs = [prob / sum(target_probs) for prob in target_probs]
+ current_probs = [dataset_lengths[i] / total_examples for i in range(len(dataset_lengths))]
+
+ # Increase number of examples needed without reducing the larger datasets with low target probs
+ new_total_examples = total_examples
+ for dataset_idx in range(len(datasets)):
+ if target_probs[dataset_idx] < current_probs[dataset_idx]:
+ target_total_examples = int(dataset_lengths[dataset_idx] / target_probs[dataset_idx])
+ new_total_examples = max(new_total_examples, target_total_examples)
+
+ final_total_examples = 0
+ final_dataset_lengths = []
+ for dataset_idx in range(len(datasets)):
+ num_samples_required = int(new_total_examples * target_probs[dataset_idx])
+ num_repeat = max(
+ int(round(num_samples_required // dataset_lengths[dataset_idx])), 1
+ ) # At least 1 repeat
+ logging.info("dataset idx {}, num_repeat {}".format(dataset_idx, num_repeat))
+ dataset_examples_repeated = datasets_examples_list[dataset_idx] * num_repeat
+ final_dataset_lengths.append(len(dataset_examples_repeated))
+ final_total_examples += len(dataset_examples_repeated)
+ self.examples.extend(dataset_examples_repeated)
+
+ final_probs = [final_dataset_lengths[i] / final_total_examples for i in range(len(final_dataset_lengths))]
+ logging.info("Target probs: {}".format(target_probs))
+ logging.info("Final probs: {}".format(final_probs))
+ logging.info("Initial total examples: {}".format(total_examples))
+ logging.info("Final total examples: {}".format(final_total_examples))
else:
raise ValueError("Datasets must be a list of dicts or a list of filepath strings")
diff --git a/nemo/collections/nlp/data/machine_translation/preproc_mt_data.py b/nemo/collections/nlp/data/machine_translation/preproc_mt_data.py
index b95993ded69e..59181d8cb89f 100644
--- a/nemo/collections/nlp/data/machine_translation/preproc_mt_data.py
+++ b/nemo/collections/nlp/data/machine_translation/preproc_mt_data.py
@@ -21,8 +21,8 @@
import tempfile
from joblib import Parallel, delayed
+from lightning.pytorch import Trainer
from omegaconf import ListConfig, OmegaConf
-from pytorch_lightning import Trainer
from nemo.collections.common.tokenizers.sentencepiece_tokenizer import SentencePieceTokenizer, create_spt_model
from nemo.collections.nlp.data.language_modeling.sentence_dataset import SentenceDataset
@@ -33,23 +33,23 @@
class MTDataPreproc:
- """ Automatically trains tokenizers and preprocesses machine translation data based on the MTEncDecModelConfig.
- For training NMT models with datasets larger than 5M sentence pairs,
- it can be inefficient to train them without first creating a tarred dataset.
- If the user wants to change the tokenizer, vocab size, or batch size, for example,
- they must reprocess the data with the correct configuration.
- With MTDataPreproc users can sweep through data configurations and the tarred dataset will
- be automatically created according to the model configuration.
- To train tokenizer model and create tarred dataset specify in configuration:
- model.preproc_out_dir=/path/to/preproc_out
- model.encoder_tokenizer.vocab_size=32000
- model.decoder_tokenizer.vocab_size=32000
- model.train_ds.use_tarred_dataset=True
- model.train_ds.src_file_name=/path/to/src.txt
- model.train_ds.tgt_file_name=/path/to/tgt.txt
- model.train_ds.tokens_in_batch=16000
- Once a dataset has been constructed based on this configuration, MTDataPreproc will not process it again.
- If a previously trained tokenizer model or tarred dataset is found, MTDataPreproc will not preprocess the data.
+ """Automatically trains tokenizers and preprocesses machine translation data based on the MTEncDecModelConfig.
+ For training NMT models with datasets larger than 5M sentence pairs,
+ it can be inefficient to train them without first creating a tarred dataset.
+ If the user wants to change the tokenizer, vocab size, or batch size, for example,
+ they must reprocess the data with the correct configuration.
+ With MTDataPreproc users can sweep through data configurations and the tarred dataset will
+ be automatically created according to the model configuration.
+ To train tokenizer model and create tarred dataset specify in configuration:
+ model.preproc_out_dir=/path/to/preproc_out
+ model.encoder_tokenizer.vocab_size=32000
+ model.decoder_tokenizer.vocab_size=32000
+ model.train_ds.use_tarred_dataset=True
+ model.train_ds.src_file_name=/path/to/src.txt
+ model.train_ds.tgt_file_name=/path/to/tgt.txt
+ model.train_ds.tokens_in_batch=16000
+ Once a dataset has been constructed based on this configuration, MTDataPreproc will not process it again.
+ If a previously trained tokenizer model or tarred dataset is found, MTDataPreproc will not preprocess the data.
"""
def __init__(self, cfg: MTEncDecModelConfig, trainer: Trainer = None) -> None:
@@ -147,12 +147,16 @@ def __init__(self, cfg: MTEncDecModelConfig, trainer: Trainer = None) -> None:
global_rank=self.global_rank,
encoder_training_sample_size=cfg.encoder_tokenizer.get('training_sample_size', -1),
decoder_training_sample_size=cfg.decoder_tokenizer.get('training_sample_size', -1),
- encoder_special_tokens=OmegaConf.to_container(cfg.encoder_tokenizer.special_tokens)
- if cfg.encoder_tokenizer.special_tokens
- else None,
- decoder_special_tokens=OmegaConf.to_container(cfg.decoder_tokenizer.special_tokens)
- if cfg.decoder_tokenizer.special_tokens
- else None,
+ encoder_special_tokens=(
+ OmegaConf.to_container(cfg.encoder_tokenizer.special_tokens)
+ if cfg.encoder_tokenizer.special_tokens
+ else None
+ ),
+ decoder_special_tokens=(
+ OmegaConf.to_container(cfg.decoder_tokenizer.special_tokens)
+ if cfg.decoder_tokenizer.special_tokens
+ else None
+ ),
spt_symbols=spt_symbols,
)
# update config
@@ -280,10 +284,10 @@ def __init__(self, cfg: MTEncDecModelConfig, trainer: Trainer = None) -> None:
)
def tar_files_to_string(self, tar_files):
- """ Tar files are generated in the following format: basename.number.tar
+ """Tar files are generated in the following format: basename.number.tar
Where number is an integer from 1 to the number of tar files.
We convert this list to a string that can be used in the model config to specify
- tarred datasets: basename_OP_1..num_tar_files_CL_.tar
+ tarred datasets: basename_OP_1..num_tar_files_CL_.tar
Args:
tar_files (List[str]): List of tar files generated by preprocess_parallel_dataset
@@ -337,7 +341,9 @@ def get_enc_dec_tokenizers(
@staticmethod
def get_monolingual_tokenizer(
- tokenizer_name=None, tokenizer_model=None, bpe_dropout=0.0,
+ tokenizer_name=None,
+ tokenizer_model=None,
+ bpe_dropout=0.0,
):
if tokenizer_name == 'sentencepiece':
tokenizer = SentencePieceTokenizer(model_path=tokenizer_model)
@@ -385,14 +391,14 @@ def preprocess_parallel_dataset(
src_fname (str): path to source text data
tgt_fname (str): path to target text data
out_dir (str): path to write tarred dataset
- encoder_tokenizer (Any): tokenizer for encoder
+ encoder_tokenizer (Any): tokenizer for encoder
decoder_tokenizer (Any): tokenizer for decoder
- max_seq_length (int): maximum sequence length
- min_seq_length (int): minimum sequence length
- tokens_in_batch (int): tokens per batch per GPU, effectively batch size
+ max_seq_length (int): maximum sequence length
+ min_seq_length (int): minimum sequence length
+ tokens_in_batch (int): tokens per batch per GPU, effectively batch size
lines_per_dataset_fragment (int): number of lines to consider for bucketing and padding
num_batches_per_tarfile (int): number of batches (pickle files) within each tarfile
- tar_file_prefix (str) : add string prefix to tar files
+ tar_file_prefix (str) : add string prefix to tar files
n_jobs (int): number of processes to use for data processing (-2 to use all but 2)
"""
@@ -471,7 +477,10 @@ def preprocess_parallel_dataset(
out_dir,
f'remainder-batches.tokens.{tokens_in_batch}.tar_file_{remainder_tar_file_ctr}.tar',
)
- remainder_tar_file_ptr = tarfile.open(remainder_tar_file_path, 'w',)
+ remainder_tar_file_ptr = tarfile.open(
+ remainder_tar_file_path,
+ 'w',
+ )
batch_in_tar_ctr = 0
tar_file_ptr.close()
os.remove(tar_file_path)
@@ -631,9 +640,9 @@ def preprocess_monolingual_dataset(
fname (str): Path to source text data
out_dir (str): Path to write tarred dataset
tokenizer (Any): Path to tokenizer model
- max_seq_length (int): maximum sequence length
- min_seq_length (int): minimum sequence length
- tokens_in_batch (int): tokens per batch per GPU, effectively batch size
+ max_seq_length (int): maximum sequence length
+ min_seq_length (int): minimum sequence length
+ tokens_in_batch (int): tokens per batch per GPU, effectively batch size
lines_per_dataset_fragment (int): number of lines to consider for bucketing and padding
num_batches_per_tarfile (int): number of batches (pickle files) within each tarfile
global_rank (int): if set to zero, data will be processed on this node
@@ -808,7 +817,8 @@ def train_tokenizers(
split_by_whitespace=split_by_whitespace,
)
os.rename(
- os.path.join(out_dir, 'tokenizer.model'), encoder_tokenizer_model,
+ os.path.join(out_dir, 'tokenizer.model'),
+ encoder_tokenizer_model,
)
else:
if encoder_tokenizer_name in supported_train_tokenizers:
@@ -1007,7 +1017,10 @@ def write_parallel_batches_to_tarfiles(
tar_file_path = os.path.join(
out_dir, 'fragment-%s-batches.tokens.%d.%d.tar' % (fragment_index, num_tokens, tar_file_ctr)
)
- tar_file_ptr = tarfile.open(tar_file_path, 'w',)
+ tar_file_ptr = tarfile.open(
+ tar_file_path,
+ 'w',
+ )
batch_ctr = 0
# return tar files paths that have batches remaining
diff --git a/nemo/collections/nlp/models/dialogue/dialogue_gpt_classification_model.py b/nemo/collections/nlp/models/dialogue/dialogue_gpt_classification_model.py
index 07ca790866c7..6c7472b95c42 100644
--- a/nemo/collections/nlp/models/dialogue/dialogue_gpt_classification_model.py
+++ b/nemo/collections/nlp/models/dialogue/dialogue_gpt_classification_model.py
@@ -21,8 +21,8 @@
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from transformers import AutoModelWithLMHead
diff --git a/nemo/collections/nlp/models/dialogue/dialogue_gpt_generation_model.py b/nemo/collections/nlp/models/dialogue/dialogue_gpt_generation_model.py
index 116605b65d52..7fb0ba770189 100644
--- a/nemo/collections/nlp/models/dialogue/dialogue_gpt_generation_model.py
+++ b/nemo/collections/nlp/models/dialogue/dialogue_gpt_generation_model.py
@@ -19,8 +19,8 @@
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from transformers import AutoModelWithLMHead
diff --git a/nemo/collections/nlp/models/dialogue/dialogue_nearest_neighbour_model.py b/nemo/collections/nlp/models/dialogue/dialogue_nearest_neighbour_model.py
index 29e2627fa038..9bf7ae2a9116 100644
--- a/nemo/collections/nlp/models/dialogue/dialogue_nearest_neighbour_model.py
+++ b/nemo/collections/nlp/models/dialogue/dialogue_nearest_neighbour_model.py
@@ -19,8 +19,8 @@
import numpy as np
import torch
import torch.nn.functional as F
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from transformers import AutoModel
from nemo.collections.nlp.data.dialogue import DialogueSGDDataProcessor
diff --git a/nemo/collections/nlp/models/dialogue/dialogue_s2s_generation_model.py b/nemo/collections/nlp/models/dialogue/dialogue_s2s_generation_model.py
index 48f3e5127a88..3f0d09d7dc66 100644
--- a/nemo/collections/nlp/models/dialogue/dialogue_s2s_generation_model.py
+++ b/nemo/collections/nlp/models/dialogue/dialogue_s2s_generation_model.py
@@ -18,8 +18,8 @@
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from transformers import AutoModelForSeq2SeqLM
diff --git a/nemo/collections/nlp/models/dialogue/dialogue_zero_shot_intent_model.py b/nemo/collections/nlp/models/dialogue/dialogue_zero_shot_intent_model.py
index 5298c060df08..1df19cf8a556 100644
--- a/nemo/collections/nlp/models/dialogue/dialogue_zero_shot_intent_model.py
+++ b/nemo/collections/nlp/models/dialogue/dialogue_zero_shot_intent_model.py
@@ -19,8 +19,8 @@
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from transformers import AutoModelForSequenceClassification, AutoTokenizer
from nemo.collections.nlp.data.dialogue import DialogueSGDDataProcessor
diff --git a/nemo/collections/nlp/models/dialogue/intent_slot_classification_model.py b/nemo/collections/nlp/models/dialogue/intent_slot_classification_model.py
index 777d468084e2..09a81b33c973 100644
--- a/nemo/collections/nlp/models/dialogue/intent_slot_classification_model.py
+++ b/nemo/collections/nlp/models/dialogue/intent_slot_classification_model.py
@@ -16,8 +16,8 @@
from typing import Dict, List, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from nemo.collections.common.losses import AggregatorLoss, CrossEntropyLoss
diff --git a/nemo/collections/nlp/models/dialogue/sgdqa_model.py b/nemo/collections/nlp/models/dialogue/sgdqa_model.py
index 3b30dfccd9ce..6cd2243423a4 100644
--- a/nemo/collections/nlp/models/dialogue/sgdqa_model.py
+++ b/nemo/collections/nlp/models/dialogue/sgdqa_model.py
@@ -22,8 +22,8 @@
from typing import List, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from nemo.collections.nlp.data.dialogue import DialogueSGDBERTDataset, DialogueSGDDataProcessor
diff --git a/nemo/collections/nlp/models/duplex_text_normalization/duplex_decoder.py b/nemo/collections/nlp/models/duplex_text_normalization/duplex_decoder.py
index 7d4cac46cc28..253962e55621 100644
--- a/nemo/collections/nlp/models/duplex_text_normalization/duplex_decoder.py
+++ b/nemo/collections/nlp/models/duplex_text_normalization/duplex_decoder.py
@@ -19,8 +19,8 @@
from typing import Dict, List, Optional, Union
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer, DataCollatorForSeq2Seq
import nemo.collections.nlp.data.text_normalization.constants as constants
@@ -307,7 +307,7 @@ def _infer(
span_ends: List[List[int]],
inst_directions: List[str],
):
- """ Main function for Inference
+ """Main function for Inference
Args:
sents: A list of inputs tokenized by a basic tokenizer.
nb_spans: A list of ints where each int indicates the number of semiotic spans in each input.
@@ -521,9 +521,9 @@ def _setup_dataloader_from_config(self, cfg: DictConfig, data_split: str):
tokenizer_name=self.transformer_name,
mode=self.mode,
max_len=self.max_sequence_len,
- decoder_data_augmentation=cfg.get('decoder_data_augmentation', False)
- if data_split == "train"
- else False,
+ decoder_data_augmentation=(
+ cfg.get('decoder_data_augmentation', False) if data_split == "train" else False
+ ),
lang=self.lang,
use_cache=cfg.get('use_cache', False),
max_insts=cfg.get('max_insts', -1),
diff --git a/nemo/collections/nlp/models/duplex_text_normalization/duplex_tagger.py b/nemo/collections/nlp/models/duplex_text_normalization/duplex_tagger.py
index feeda99bdbe5..1ce005403999 100644
--- a/nemo/collections/nlp/models/duplex_text_normalization/duplex_tagger.py
+++ b/nemo/collections/nlp/models/duplex_text_normalization/duplex_tagger.py
@@ -16,8 +16,8 @@
from typing import Dict, List, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from torch import nn
from transformers import AutoModelForTokenClassification, AutoTokenizer, DataCollatorForTokenClassification
from transformers.tokenization_utils_base import BatchEncoding
@@ -151,7 +151,7 @@ def on_test_epoch_end(self):
# Functions for inference
@torch.no_grad()
def _infer(self, sents: List[List[str]], inst_directions: List[str]):
- """ Main function for Inference
+ """Main function for Inference
Args:
sents: A list of inputs tokenized by a basic tokenizer.
@@ -248,7 +248,7 @@ def _infer(self, sents: List[List[str]], inst_directions: List[str]):
return all_tag_preds, nb_spans, span_starts, span_ends
def _postprocess_tag_preds(self, words: List[str], inst_dir: str, preds: List[str]):
- """ Function for postprocessing the raw tag predictions of the model. It
+ """Function for postprocessing the raw tag predictions of the model. It
corrects obvious mistakes in the tag predictions such as a TRANSFORM span
starts with I_TRANSFORM_TAG (instead of B_TRANSFORM_TAG).
@@ -280,7 +280,7 @@ def _postprocess_tag_preds(self, words: List[str], inst_dir: str, preds: List[st
return final_preds
def decode_tag_preds(self, tag_preds: List[List[str]]):
- """ Decoding the raw tag predictions to locate the semiotic spans in the
+ """Decoding the raw tag predictions to locate the semiotic spans in the
input texts.
Args:
diff --git a/nemo/collections/nlp/models/enc_dec_nlp_model.py b/nemo/collections/nlp/models/enc_dec_nlp_model.py
index d9aa3c017bae..60c6b616c20a 100644
--- a/nemo/collections/nlp/models/enc_dec_nlp_model.py
+++ b/nemo/collections/nlp/models/enc_dec_nlp_model.py
@@ -15,8 +15,8 @@
from dataclasses import dataclass
from typing import Any
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.omegaconf import MISSING
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.nlp_model import NLPModel
from nemo.collections.nlp.modules.common.decoder_module import DecoderModule
@@ -35,8 +35,7 @@ class EncDecNLPModelConfig(ModelConfig):
class EncDecNLPModel(NLPModel):
- """Base class for encoder-decoder NLP models.
- """
+ """Base class for encoder-decoder NLP models."""
def __init__(self, cfg: EncDecNLPModelConfig, trainer: Trainer = None):
super().__init__(cfg=cfg, trainer=trainer)
diff --git a/nemo/collections/nlp/models/entity_linking/entity_linking_model.py b/nemo/collections/nlp/models/entity_linking/entity_linking_model.py
index 4afae81e3893..640520cdaaa7 100644
--- a/nemo/collections/nlp/models/entity_linking/entity_linking_model.py
+++ b/nemo/collections/nlp/models/entity_linking/entity_linking_model.py
@@ -15,8 +15,8 @@
from typing import Dict, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from transformers import AutoTokenizer
from nemo.collections.common.losses import MultiSimilarityLoss
diff --git a/nemo/collections/nlp/models/glue_benchmark/glue_benchmark_model.py b/nemo/collections/nlp/models/glue_benchmark/glue_benchmark_model.py
index 4447ebb89386..e90cf9d88c30 100644
--- a/nemo/collections/nlp/models/glue_benchmark/glue_benchmark_model.py
+++ b/nemo/collections/nlp/models/glue_benchmark/glue_benchmark_model.py
@@ -19,8 +19,8 @@
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.common.losses import CrossEntropyLoss, MSELoss
from nemo.collections.nlp.data.glue_benchmark.glue_benchmark_dataset import GLUE_TASKS_NUM_LABELS, GLUEDataset
diff --git a/nemo/collections/nlp/models/information_retrieval/base_ir_model.py b/nemo/collections/nlp/models/information_retrieval/base_ir_model.py
index 67424320d185..91d86fef1851 100644
--- a/nemo/collections/nlp/models/information_retrieval/base_ir_model.py
+++ b/nemo/collections/nlp/models/information_retrieval/base_ir_model.py
@@ -17,8 +17,8 @@
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from nemo.collections.nlp.data import BertInformationRetrievalDataset
from nemo.collections.nlp.models.nlp_model import NLPModel
diff --git a/nemo/collections/nlp/models/information_retrieval/bert_dpr_model.py b/nemo/collections/nlp/models/information_retrieval/bert_dpr_model.py
index 03b62d91170c..bfbec123d13e 100644
--- a/nemo/collections/nlp/models/information_retrieval/bert_dpr_model.py
+++ b/nemo/collections/nlp/models/information_retrieval/bert_dpr_model.py
@@ -15,8 +15,8 @@
from typing import Dict, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.common.losses import SmoothedCrossEntropyLoss
from nemo.collections.nlp.data import BertInformationRetrievalDataset
@@ -63,29 +63,50 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
@typecheck()
def forward(
- self, q_input_ids, q_token_type_ids, q_attention_mask, p_input_ids, p_token_type_ids, p_attention_mask,
+ self,
+ q_input_ids,
+ q_token_type_ids,
+ q_attention_mask,
+ p_input_ids,
+ p_token_type_ids,
+ p_attention_mask,
):
q_vectors = self.q_encoder(
- input_ids=q_input_ids, token_type_ids=q_token_type_ids, attention_mask=q_attention_mask,
+ input_ids=q_input_ids,
+ token_type_ids=q_token_type_ids,
+ attention_mask=q_attention_mask,
)
q_vectors = q_vectors[:, 0]
batch_size, hidden_size = q_vectors.size()
p_vectors = self.p_encoder(
- input_ids=p_input_ids, token_type_ids=p_token_type_ids, attention_mask=p_attention_mask,
+ input_ids=p_input_ids,
+ token_type_ids=p_token_type_ids,
+ attention_mask=p_attention_mask,
)
num_passages = p_vectors.shape[0] // batch_size
p_vectors = p_vectors[:, 0].view(-1, num_passages, hidden_size)
p_positives, p_negatives = p_vectors[:, 0], p_vectors[:, 1:]
scores = torch.cat(
- (torch.matmul(q_vectors, p_positives.T), torch.einsum("ij,ipj->ip", q_vectors, p_negatives),), dim=1,
+ (
+ torch.matmul(q_vectors, p_positives.T),
+ torch.einsum("ij,ipj->ip", q_vectors, p_negatives),
+ ),
+ dim=1,
)
return scores
def compute_scores_and_loss(self, inputs):
- (q_input_ids, q_input_mask, q_input_type_ids, p_input_ids, p_input_mask, p_input_type_ids,) = inputs
+ (
+ q_input_ids,
+ q_input_mask,
+ q_input_type_ids,
+ p_input_ids,
+ p_input_mask,
+ p_input_type_ids,
+ ) = inputs
batch_size, num_passages, p_seq_length = p_input_ids.size()
q_seq_length = q_input_ids.size()[-1]
@@ -100,10 +121,17 @@ def compute_scores_and_loss(self, inputs):
normalized_scores = torch.log_softmax(scores, dim=-1)
labels = torch.arange(batch_size)[:, None].long().to(normalized_scores.device)
- loss = self.loss(log_probs=normalized_scores, labels=labels, output_mask=torch.ones_like(labels),)
+ loss = self.loss(
+ log_probs=normalized_scores,
+ labels=labels,
+ output_mask=torch.ones_like(labels),
+ )
scores = scores[:, 0]
- scores = torch.cat((torch.diag(scores)[:, None], scores[:, batch_size:]), dim=1,)
+ scores = torch.cat(
+ (torch.diag(scores)[:, None], scores[:, batch_size:]),
+ dim=1,
+ )
return scores, loss
diff --git a/nemo/collections/nlp/models/information_retrieval/bert_joint_ir_model.py b/nemo/collections/nlp/models/information_retrieval/bert_joint_ir_model.py
index a4dc4356342a..33885e6b50c6 100644
--- a/nemo/collections/nlp/models/information_retrieval/bert_joint_ir_model.py
+++ b/nemo/collections/nlp/models/information_retrieval/bert_joint_ir_model.py
@@ -15,8 +15,8 @@
from typing import Dict, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.common.losses import SmoothedCrossEntropyLoss
from nemo.collections.nlp.models.information_retrieval.base_ir_model import BaseIRModel
@@ -53,7 +53,9 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
self.bert_model = self.get_lm_model_with_padded_embedding(cfg)
hidden_size = self.bert_model.config.hidden_size
self.sim_score_regressor = SequenceRegression(
- hidden_size=hidden_size, num_layers=1, dropout=cfg.language_model.sim_score_dropout,
+ hidden_size=hidden_size,
+ num_layers=1,
+ dropout=cfg.language_model.sim_score_dropout,
)
self.loss = SmoothedCrossEntropyLoss(pad_id=self.tokenizer.pad_id)
@@ -61,7 +63,9 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
def forward(self, input_ids, attention_mask, token_type_ids):
hidden_states = self.bert_model(
- input_ids=input_ids, token_type_ids=token_type_ids, attention_mask=attention_mask,
+ input_ids=input_ids,
+ token_type_ids=token_type_ids,
+ attention_mask=attention_mask,
)
if isinstance(hidden_states, tuple):
hidden_states = hidden_states[0]
diff --git a/nemo/collections/nlp/models/information_retrieval/megatron_bert_embedding_model.py b/nemo/collections/nlp/models/information_retrieval/megatron_bert_embedding_model.py
index 5e38b61938c9..a5b71d5bcb69 100644
--- a/nemo/collections/nlp/models/information_retrieval/megatron_bert_embedding_model.py
+++ b/nemo/collections/nlp/models/information_retrieval/megatron_bert_embedding_model.py
@@ -16,13 +16,11 @@
import os
import numpy as np
-
-
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from megatron.core.models.bert.bert_layer_specs import bert_layer_with_transformer_engine_spec
from omegaconf import DictConfig, OmegaConf, open_dict
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.trainer.trainer import Trainer
from torch.distributed import all_gather as all_gather_no_backprop
from torch.distributed.nn.functional import all_gather as all_gather_with_backprop
@@ -46,7 +44,6 @@
from nemo.collections.nlp.parts.utils_funcs import get_last_rank
from nemo.utils import logging
-
try:
from megatron.core import parallel_state
from megatron.core.pipeline_parallel.schedules import get_forward_backward_func
diff --git a/nemo/collections/nlp/models/information_retrieval/megatron_gpt_embedding_model.py b/nemo/collections/nlp/models/information_retrieval/megatron_gpt_embedding_model.py
index c7565f45358e..b5240ec2e170 100644
--- a/nemo/collections/nlp/models/information_retrieval/megatron_gpt_embedding_model.py
+++ b/nemo/collections/nlp/models/information_retrieval/megatron_gpt_embedding_model.py
@@ -17,8 +17,8 @@
import numpy as np
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import DictConfig, ListConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.data.information_retrieval.gpt_embedding_dataset import GPTEmbeddingDataset
from nemo.collections.nlp.data.language_modeling.megatron.base_dataset_utils import (
diff --git a/nemo/collections/nlp/models/information_retrieval/megatron_gpt_reranker_model.py b/nemo/collections/nlp/models/information_retrieval/megatron_gpt_reranker_model.py
index e316871fe607..fa593adf5c8f 100644
--- a/nemo/collections/nlp/models/information_retrieval/megatron_gpt_reranker_model.py
+++ b/nemo/collections/nlp/models/information_retrieval/megatron_gpt_reranker_model.py
@@ -17,8 +17,8 @@
import numpy as np
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import DictConfig, ListConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.data.information_retrieval.gpt_embedding_dataset import GPTRerankerDataset
from nemo.collections.nlp.data.language_modeling.megatron.base_dataset_utils import (
diff --git a/nemo/collections/nlp/models/intent_slot_classification/intent_slot_classification_model.py b/nemo/collections/nlp/models/intent_slot_classification/intent_slot_classification_model.py
index 0cd1d07af5dd..a49bc699ab24 100644
--- a/nemo/collections/nlp/models/intent_slot_classification/intent_slot_classification_model.py
+++ b/nemo/collections/nlp/models/intent_slot_classification/intent_slot_classification_model.py
@@ -17,8 +17,8 @@
from typing import Dict, List, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from nemo.collections.common.losses import AggregatorLoss, CrossEntropyLoss
@@ -38,8 +38,7 @@
class IntentSlotClassificationModel(NLPModel):
def __init__(self, cfg: DictConfig, trainer: Trainer = None):
- """ Initializes BERT Joint Intent and Slot model.
- """
+ """Initializes BERT Joint Intent and Slot model."""
self.max_seq_length = cfg.language_model.max_seq_length
# init superclass
# Check the presence of data_dir.
@@ -75,7 +74,7 @@ def _set_defaults_data_desc(self, cfg):
OmegaConf.set_struct(cfg, True)
def _set_data_desc_to_cfg(self, cfg, data_dir, train_ds, validation_ds):
- """ Method creates IntentSlotDataDesc and copies generated values to cfg.data_desc. """
+ """Method creates IntentSlotDataDesc and copies generated values to cfg.data_desc."""
# Save data from data desc to config - so it can be reused later, e.g. in inference.
data_desc = IntentSlotDataDesc(data_dir=data_dir, modes=[train_ds.prefix, validation_ds.prefix])
OmegaConf.set_struct(cfg, False)
@@ -109,7 +108,7 @@ def _set_data_desc_to_cfg(self, cfg, data_dir, train_ds, validation_ds):
OmegaConf.set_struct(cfg, True)
def _save_label_ids(self, label_ids: Dict[str, int], filename: str) -> None:
- """ Saves label ids map to a file """
+ """Saves label ids map to a file"""
with open(filename, 'w') as out:
labels, _ = zip(*sorted(label_ids.items(), key=lambda x: x[1]))
out.write('\n'.join(labels))
@@ -117,7 +116,7 @@ def _save_label_ids(self, label_ids: Dict[str, int], filename: str) -> None:
logging.info(f'Labels mapping saved to : {out.name}')
def _reconfigure_classifier(self):
- """ Method reconfigures the classifier depending on the settings of model cfg.data_desc """
+ """Method reconfigures the classifier depending on the settings of model cfg.data_desc"""
self.classifier = SequenceTokenClassifier(
hidden_size=self.hidden_size,
diff --git a/nemo/collections/nlp/models/intent_slot_classification/multi_label_intent_slot_classification_model.py b/nemo/collections/nlp/models/intent_slot_classification/multi_label_intent_slot_classification_model.py
index c689b97ab0a5..7a2bec1f2cc0 100644
--- a/nemo/collections/nlp/models/intent_slot_classification/multi_label_intent_slot_classification_model.py
+++ b/nemo/collections/nlp/models/intent_slot_classification/multi_label_intent_slot_classification_model.py
@@ -18,8 +18,8 @@
import numpy as np
import numpy.typing as npt
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from sklearn.metrics import f1_score, precision_score, recall_score
from torch.utils.data import DataLoader
@@ -38,10 +38,10 @@
class MultiLabelIntentSlotClassificationModel(IntentSlotClassificationModel):
def __init__(self, cfg: DictConfig, trainer: Trainer = None):
- """
+ """
Initializes BERT Joint Intent and Slot model.
- Args:
+ Args:
cfg: configuration object
trainer: trainer for Pytorch Lightning
"""
@@ -69,12 +69,12 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
def _set_data_desc_to_cfg(
self, cfg: DictConfig, data_dir: str, train_ds: DictConfig, validation_ds: DictConfig
) -> None:
- """
- Creates MultiLabelIntentSlotDataDesc and copies generated values to Configuration object's data descriptor.
-
- Args:
+ """
+ Creates MultiLabelIntentSlotDataDesc and copies generated values to Configuration object's data descriptor.
+
+ Args:
cfg: configuration object
- data_dir: data directory
+ data_dir: data directory
train_ds: training dataset file name
validation_ds: validation dataset file name
@@ -101,7 +101,10 @@ def _set_data_desc_to_cfg(
if not hasattr(cfg, "class_labels") or cfg.class_labels is None:
cfg.class_labels = {}
cfg.class_labels = OmegaConf.create(
- {"intent_labels_file": "intent_labels.csv", "slot_labels_file": "slot_labels.csv",}
+ {
+ "intent_labels_file": "intent_labels.csv",
+ "slot_labels_file": "slot_labels.csv",
+ }
)
slot_labels_file = os.path.join(data_dir, cfg.class_labels.slot_labels_file)
@@ -114,7 +117,7 @@ def _set_data_desc_to_cfg(
OmegaConf.set_struct(cfg, True)
def _reconfigure_classifier(self) -> None:
- """ Method reconfigures the classifier depending on the settings of model cfg.data_desc """
+ """Method reconfigures the classifier depending on the settings of model cfg.data_desc"""
self.classifier = SequenceTokenClassifier(
hidden_size=self.bert_model.config.hidden_size,
@@ -135,7 +138,8 @@ def _reconfigure_classifier(self) -> None:
self.slot_loss = CrossEntropyLoss(logits_ndim=3)
self.total_loss = AggregatorLoss(
- num_inputs=2, weights=[self.cfg.intent_loss_weight, 1.0 - self.cfg.intent_loss_weight],
+ num_inputs=2,
+ weights=[self.cfg.intent_loss_weight, 1.0 - self.cfg.intent_loss_weight],
)
# setup to track metrics
@@ -161,12 +165,22 @@ def validation_step(self, batch, batch_idx) -> None:
batch: batches of data from DataLoader
batch_idx: batch idx from DataLoader
- Returns:
+ Returns:
None
"""
- (input_ids, input_type_ids, input_mask, loss_mask, subtokens_mask, intent_labels, slot_labels,) = batch
+ (
+ input_ids,
+ input_type_ids,
+ input_mask,
+ loss_mask,
+ subtokens_mask,
+ intent_labels,
+ slot_labels,
+ ) = batch
intent_logits, slot_logits = self(
- input_ids=input_ids, token_type_ids=input_type_ids, attention_mask=input_mask,
+ input_ids=input_ids,
+ token_type_ids=input_type_ids,
+ attention_mask=input_mask,
)
# calculate combined loss for intents and slots
@@ -201,7 +215,7 @@ def _setup_dataloader_from_config(self, cfg: DictConfig) -> DataLoader:
Args:
cfg: configuration object
-
+
Returns:
DataLoader for model's data
"""
@@ -289,8 +303,8 @@ def prediction_probabilities(self, queries: List[str], test_ds: DictConfig) -> n
def optimize_threshold(self, test_ds: DictConfig, file_name: str) -> None:
"""
- Set the optimal threshold of the model from performance on validation set. This threshold is used to round the
- logits to 0 or 1.
+ Set the optimal threshold of the model from performance on validation set. This threshold is used to round the
+ logits to 0 or 1.
Args:
test_ds: location of test dataset
@@ -361,16 +375,16 @@ def predict_from_examples(
queries: text sequences
test_ds: Dataset configuration section.
threshold: Threshold for rounding prediction logits
-
+
Returns:
predicted_intents: model intent predictions with their probabilities
- Example: [[('flight', 0.84)], [('airfare', 0.54),
+ Example: [[('flight', 0.84)], [('airfare', 0.54),
('flight', 0.73), ('meal', 0.24)]]
predicted_slots: model slot predictions
Example: ['O B-depart_date.month_name B-depart_date.day_number',
'O O B-flight_stop O O O']
- predicted_vector: model intent predictions for each individual query. Binary values within each list
+ predicted_vector: model intent predictions for each individual query. Binary values within each list
indicate whether a class is prediced for the given query (1 for True, 0 for False)
Example: [[1,0,0,0,0,0], [0,0,1,0,0,0]]
"""
diff --git a/nemo/collections/nlp/models/language_modeling/bert_lm_model.py b/nemo/collections/nlp/models/language_modeling/bert_lm_model.py
index 6b03d86982b0..dc7103b67aa6 100644
--- a/nemo/collections/nlp/models/language_modeling/bert_lm_model.py
+++ b/nemo/collections/nlp/models/language_modeling/bert_lm_model.py
@@ -16,8 +16,8 @@
from typing import Dict, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from nemo.collections.common.losses import AggregatorLoss, CrossEntropyLoss, SmoothedCrossEntropyLoss
@@ -75,7 +75,11 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
config_file = self.register_artifact('language_model.config_file', cfg.language_model.config_file)
self.bert_model = get_lm_model(
- config_file=config_file, config_dict=config_dict, vocab_file=vocab_file, trainer=trainer, cfg=cfg,
+ config_file=config_file,
+ config_dict=config_dict,
+ vocab_file=vocab_file,
+ trainer=trainer,
+ cfg=cfg,
)
self.hidden_size = self.bert_model.config.hidden_size
@@ -127,7 +131,9 @@ def forward(self, input_ids, attention_mask, token_type_ids):
in the `nn.Module` in vanilla PyTorch.
"""
hidden_states = self.bert_model(
- input_ids=input_ids, token_type_ids=token_type_ids, attention_mask=attention_mask,
+ input_ids=input_ids,
+ token_type_ids=token_type_ids,
+ attention_mask=attention_mask,
)
if isinstance(hidden_states, tuple):
hidden_states = hidden_states[0]
@@ -225,7 +231,9 @@ def _setup_preprocessed_dataloader(self, cfg: Optional[DictConfig]):
files = [dataset]
files.sort()
dl = BertPretrainingPreprocessedDataloader(
- data_files=files, max_predictions_per_seq=max_predictions_per_seq, batch_size=batch_size,
+ data_files=files,
+ max_predictions_per_seq=max_predictions_per_seq,
+ batch_size=batch_size,
)
return dl
diff --git a/nemo/collections/nlp/models/language_modeling/megatron/bert/bert_model.py b/nemo/collections/nlp/models/language_modeling/megatron/bert/bert_model.py
index 67a4802d83f6..c629db5af3c3 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron/bert/bert_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron/bert/bert_model.py
@@ -206,6 +206,9 @@ def forward(
context=None,
context_mask=None,
rotary_pos_emb=None,
+ rotary_pos_cos=None,
+ rotary_pos_sin=None,
+ attention_bias=None,
inference_params=None,
packed_seq_params=None,
):
diff --git a/nemo/collections/nlp/models/language_modeling/megatron/falcon/falcon_decoder_layer.py b/nemo/collections/nlp/models/language_modeling/megatron/falcon/falcon_decoder_layer.py
index 1783d5f5f3fd..7c3f3c194f14 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron/falcon/falcon_decoder_layer.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron/falcon/falcon_decoder_layer.py
@@ -55,7 +55,7 @@ class FalconTransformerLayer(TransformerLayer):
Transformer layer takes input with size [s, b, h] and returns an
output of the same size.
-
+
"""
def __init__(
@@ -106,6 +106,9 @@ def forward(
context=None,
context_mask=None,
rotary_pos_emb=None,
+ rotary_pos_cos=None,
+ rotary_pos_sin=None,
+ attention_bias=None,
inference_params=None,
packed_seq_params=None,
):
diff --git a/nemo/collections/nlp/models/language_modeling/megatron/gemma2/gemma2_modules.py b/nemo/collections/nlp/models/language_modeling/megatron/gemma2/gemma2_modules.py
index 5113ee745895..9ea1b4afe318 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron/gemma2/gemma2_modules.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron/gemma2/gemma2_modules.py
@@ -49,7 +49,8 @@ class Gemma2DotProductAttention(MegatronModule):
Region where selective activation recomputation is applied.
This region is memory intensive but less compute intensive which
makes activation checkpointing more efficient for LLMs (20B+).
- See Reducing Activation Recomputation in Large Transformer Models: https://arxiv.org/abs/2205.05198 for more details.
+ See Reducing Activation Recomputation in Large Transformer Models:
+ https://arxiv.org/abs/2205.05198 for more details.
We use the following notation:
h: hidden size
@@ -126,7 +127,12 @@ def forward(
attention_mask: Tensor,
attn_mask_type: AttnMaskType = None,
packed_seq_params: PackedSeqParams = None,
+ **kwargs,
):
+ """Forward.
+ Modified from mcore.transformer.dot_product_attention to support Gemma2-specific
+ final_logit_softcapping.
+ """
assert packed_seq_params is None, (
"Packed sequence is not supported by DotProductAttention." "Please use TEDotProductAttention instead."
)
@@ -243,6 +249,8 @@ def forward(
class TERowParallelLinearLayerNorm(TERowParallelLinear):
+ """Modified From TERowParallelLinear with an additional Post-LN."""
+
def __init__(
self,
input_size: int,
@@ -270,12 +278,16 @@ def __init__(
self.post_layernorm = TENorm(config, output_size)
def forward(self, x):
+ """Forward with additional Post LN on output"""
output, bias = super().forward(x)
return self.post_layernorm(output), bias
class Gemma2OutputLayer(ColumnParallelLinear):
+ """Extends from ColumnParallelLinear with logit soft capping."""
+
def forward(self, *args, **kwargs):
+ """Forward with logit soft capping."""
output, bias = super().forward(*args, **kwargs)
output = logit_softcapping(output, self.config.final_logit_softcapping)
return output, bias
diff --git a/nemo/collections/nlp/models/language_modeling/megatron/gpt_full_te_layer_autocast_spec.py b/nemo/collections/nlp/models/language_modeling/megatron/gpt_full_te_layer_autocast_spec.py
index e9fb1833fc08..1def214113ee 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron/gpt_full_te_layer_autocast_spec.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron/gpt_full_te_layer_autocast_spec.py
@@ -250,6 +250,9 @@ def forward(
context=None,
context_mask=None,
rotary_pos_emb=None,
+ rotary_pos_cos=None,
+ rotary_pos_sin=None,
+ attention_bias=None,
inference_params=None,
packed_seq_params=None, # TODO: handle this
):
diff --git a/nemo/collections/nlp/models/language_modeling/megatron/griffin/griffin_model.py b/nemo/collections/nlp/models/language_modeling/megatron/griffin/griffin_model.py
index 7a327a3a35cb..e0e3a2339ca1 100755
--- a/nemo/collections/nlp/models/language_modeling/megatron/griffin/griffin_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron/griffin/griffin_model.py
@@ -160,7 +160,9 @@ def forward(
rotary_pos_emb = None
self.decoder.input_tensor = None
if self.position_embedding_type == 'rope':
- rotary_seq_len = self.rotary_pos_emb.get_rotary_seq_len(None, self.decoder, hidden_states, self.config)
+ rotary_seq_len = self.rotary_pos_emb.get_rotary_seq_len(
+ None, self.decoder, hidden_states, self.config, None
+ )
rotary_pos_emb = self.rotary_pos_emb(rotary_seq_len)
hidden_states = self.decoder(hidden_states, attention_mask=attention_mask, rotary_pos_emb=rotary_pos_emb)
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_bart_model.py b/nemo/collections/nlp/models/language_modeling/megatron_bart_model.py
index 1c768829e3e2..4a53edacb566 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_bart_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_bart_model.py
@@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_t5_model import MegatronT5Model
@@ -48,7 +48,9 @@ def _validate_cfg(self):
@property
def _build_train_valid_test_datasets_kwargs(self):
"""allows child classes to add kwargs to dataset building"""
- return dict(delete_mask_prob=self._cfg.data.get('delete_mask_prob', 0.0),)
+ return dict(
+ delete_mask_prob=self._cfg.data.get('delete_mask_prob', 0.0),
+ )
def list_available_models(self):
pass
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_base_model.py b/nemo/collections/nlp/models/language_modeling/megatron_base_model.py
index d2a21e50e486..37ec8a82cef1 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_base_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_base_model.py
@@ -23,12 +23,12 @@
import omegaconf
import torch
import torch.nn as nn
+from lightning.pytorch.plugins.precision import MixedPrecisionPlugin
+from lightning.pytorch.trainer.connectors.logger_connector.fx_validator import _FxValidator
+from lightning.pytorch.trainer.trainer import Trainer
+from lightning.pytorch.utilities.exceptions import MisconfigurationException
from omegaconf import OmegaConf, open_dict
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.plugins.precision import MixedPrecisionPlugin
-from pytorch_lightning.trainer.connectors.logger_connector.fx_validator import _FxValidator
-from pytorch_lightning.trainer.trainer import Trainer
-from pytorch_lightning.utilities.exceptions import MisconfigurationException
from nemo.collections.nlp.models.nlp_model import NLPModel
from nemo.collections.nlp.modules.common.megatron.attention import HAVE_FLASH_ATTENTION
@@ -200,7 +200,8 @@ def __init__(self, cfg: DictConfig, trainer: Trainer, no_lm_init=True):
global_batch_size=cfg.get('global_batch_size'),
rampup_batch_size=cfg.get('rampup_batch_size', None),
use_fp8=cfg.get('fp8', False),
- init_mpi_proc_group=cfg.get('ub_tp_comm_overlap', False),
+ init_mpi_proc_group=cfg.get('ub_tp_comm_overlap', False)
+ and cfg.get('ub_tp_comm_bootstrap_backend', 'nccl') == 'mpi',
seed=self.cfg.get('seed', 1234),
apex_transformer_log_level=self.cfg.get('apex_transformer_log_level', 30),
use_te_rng_tracker=self.cfg.get('use_te_rng_tracker', False),
@@ -1173,6 +1174,7 @@ def build_model_parallel_config(self) -> ModelParallelConfig:
"grad_sync_func": None, # set dynamically during training
"param_sync_func": None, # set dynamically during training
"tp_comm_overlap": self.cfg.get('ub_tp_comm_overlap', False),
+ "tp_comm_bootstrap_backend": self.cfg.get('ub_tp_comm_bootstrap_backend', 'nccl'),
}
# instantitate ModelParallelConfig from this dict
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_base_prompt_learning_model.py b/nemo/collections/nlp/models/language_modeling/megatron_base_prompt_learning_model.py
index 2a356012c728..b00b6fcf0302 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_base_prompt_learning_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_base_prompt_learning_model.py
@@ -18,9 +18,9 @@
from typing import Any, Optional
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.dictconfig import DictConfig
from omegaconf.omegaconf import open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from torch import Tensor
from nemo.collections.common.tokenizers.sentencepiece_tokenizer import SentencePieceTokenizer
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_bert_model.py b/nemo/collections/nlp/models/language_modeling/megatron_bert_model.py
index 0eb5ea1c0048..e6945d1ada56 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_bert_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_bert_model.py
@@ -18,8 +18,8 @@
import torch
import torch.nn.functional as F
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.data.language_modeling.megatron import dataset_utils
from nemo.collections.nlp.data.language_modeling.megatron.data_samplers import (
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_glue_model.py b/nemo/collections/nlp/models/language_modeling/megatron_glue_model.py
index c0a4b6351530..d3829c3e8de1 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_glue_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_glue_model.py
@@ -11,8 +11,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.data.glue_benchmark.glue_benchmark_dataset import (
TextToTextGLUEDataset,
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_gpt_adapter_model.py b/nemo/collections/nlp/models/language_modeling/megatron_gpt_adapter_model.py
index c6b4d055ef6e..44860c3178f6 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_gpt_adapter_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_gpt_adapter_model.py
@@ -21,8 +21,8 @@
import os
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.common.parts.adapter_modules import LinearAdapterConfig
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
@@ -162,7 +162,7 @@ def state_dict(self, destination=None, prefix=None, keep_vars=False):
def load_state_dict(self, state_dict, strict: bool = True):
"""
- Loads a state_dict expecting the state_dict to contain key,values
+ Loads a state_dict expecting the state_dict to contain key,values
only for the adapter parameters.
"""
for name, module in self.frozen_model.named_modules():
@@ -176,13 +176,13 @@ def load_state_dict(self, state_dict, strict: bool = True):
def setup_optimizer_param_groups(self):
"""
- ModelPT override. Optimizer will get self._optimizer_param_groups.
+ ModelPT override. Optimizer will get self._optimizer_param_groups.
Makes two optimizer param groups, one for the frozen model params
- and one for the prompt-table/prompt-encoder params. The learning
+ and one for the prompt-table/prompt-encoder params. The learning
rate for the frozen model's params will always be zero effectively
freezing the model's params but still allowing for the needed gradients
- to be passed around in pipeline parallel models. The prompt-encoder
- and/or prompt table will use the learning rate set by the user.
+ to be passed around in pipeline parallel models. The prompt-encoder
+ and/or prompt table will use the learning rate set by the user.
"""
self.frozen_model.freeze() # Freeze the entire model
opt_params = []
@@ -246,8 +246,8 @@ class MegatronGPTAdapterLearningModel(MegatronGPTBaseAdapterModel):
Two adapter's are inserted into each Transformer layer in the base GPT Model.
It is assumed that these set of adapters will then be trained for a specific task.
- Once trained, the adapter weights will be saved and can be re-loaded
- and infused into the same GPT Model for inference.
+ Once trained, the adapter weights will be saved and can be re-loaded
+ and infused into the same GPT Model for inference.
"""
def __init__(self, cfg: DictConfig, trainer: Trainer):
@@ -295,7 +295,8 @@ def __init__(self, cfg: DictConfig, trainer: Trainer):
for adapter_key in self.adapter_name_keys:
if model_utils.import_class_by_path(adapter_cfg._target_) in module.get_accepted_adapter_types():
module.add_adapter(
- name=adapter_key, cfg=adapter_cfg,
+ name=adapter_key,
+ cfg=adapter_cfg,
)
logging.info(f'After adding adapters:\n{self.frozen_model.summarize()}')
@@ -313,8 +314,8 @@ class MegatronGPTInfusedAdapterModel(MegatronGPTBaseAdapterModel):
Three adapter's are inserted into each Transformer layer in the base GPT Model. Each adapter is basically a vector that simply scales the key, value or ffn hidden representations.
It is assumed that these set of adapters will then be trained for a specific task.
- Once trained, the adapter weights will be saved and can be re-loaded
- and infused into the same GPT Model for inference.
+ Once trained, the adapter weights will be saved and can be re-loaded
+ and infused into the same GPT Model for inference.
"""
def __init__(self, cfg: DictConfig, trainer: Trainer):
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py b/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py
index 8f541e5703e6..f71b1ad13c6d 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py
@@ -24,11 +24,11 @@
import packaging
import torch
+from lightning.pytorch.accelerators import CPUAccelerator
+from lightning.pytorch.loops.fetchers import _DataFetcherWrapper
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.accelerators import CPUAccelerator
-from pytorch_lightning.loops.fetchers import _DataFetcherWrapper
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.common.parts.utils import apply_rope_scaling, extend_instance
from nemo.collections.nlp.data.language_modeling.megatron.data_samplers import (
@@ -803,6 +803,7 @@ def initialize_ub_func(self):
tp_size=self.cfg.get('tensor_model_parallel_size'),
use_fp8=self.cfg.get('fp8'),
ub_cfgs=ub_cfgs,
+ bootstrap_backend=self.cfg.get('ub_tp_comm_bootstrap_backend', 'nccl'),
)
self.initialize_ub = False
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_gpt_prompt_learning_model.py b/nemo/collections/nlp/models/language_modeling/megatron_gpt_prompt_learning_model.py
index 78f671142c1b..7d39459ae654 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_gpt_prompt_learning_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_gpt_prompt_learning_model.py
@@ -18,9 +18,9 @@
from typing import Any, List, Optional, Union
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.dictconfig import DictConfig
from omegaconf.omegaconf import open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.common.tokenizers.sentencepiece_tokenizer import SentencePieceTokenizer
from nemo.collections.nlp.data.language_modeling.megatron.gpt_prompt_learning_dataset import GPTPromptLearningDataset
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_gpt_sft_model.py b/nemo/collections/nlp/models/language_modeling/megatron_gpt_sft_model.py
index 08bc5501363c..2d3f43b2f2a8 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_gpt_sft_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_gpt_sft_model.py
@@ -17,9 +17,9 @@
from typing import Any, Optional
import torch
+from lightning.pytorch.loops.fetchers import _DataFetcherWrapper
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import DictConfig, ListConfig
-from pytorch_lightning.loops.fetchers import _DataFetcherWrapper
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.common.metrics import MetricStringToTorchMetric
from nemo.collections.nlp.data.language_modeling.megatron.base_dataset_utils import (
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_griffin_model.py b/nemo/collections/nlp/models/language_modeling/megatron_griffin_model.py
index 1e5a2f0c15c0..40e147b90903 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_griffin_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_griffin_model.py
@@ -13,8 +13,8 @@
# limitations under the License.
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron.griffin.griffin_model import GriffinModel
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_griffin_sft_model.py b/nemo/collections/nlp/models/language_modeling/megatron_griffin_sft_model.py
index c53d231b2719..584a4b0572f7 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_griffin_sft_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_griffin_sft_model.py
@@ -12,9 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import DictConfig
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_base_model import MegatronBaseModel
from nemo.collections.nlp.models.language_modeling.megatron_gpt_sft_model import MegatronGPTSFTModel
@@ -48,8 +48,8 @@ def _reset_activation_checkpointing_args(self):
def on_validation_model_zero_grad(self) -> None:
"""
- Skip gradient zeroing at the beginning of validation routine.
- This is needed when overlapping the AllGather of the updated parameters with the following valdation step.
- """
+ Skip gradient zeroing at the beginning of validation routine.
+ This is needed when overlapping the AllGather of the updated parameters with the following valdation step.
+ """
if not self.validation_param_sync_overlap:
MegatronBaseModel.on_validation_model_zero_grad(self)
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_lm_encoder_decoder_model.py b/nemo/collections/nlp/models/language_modeling/megatron_lm_encoder_decoder_model.py
index 7b92b9e25d69..e530a40d8aaa 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_lm_encoder_decoder_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_lm_encoder_decoder_model.py
@@ -18,11 +18,11 @@
from typing import Any, Dict, List, Optional
import torch
+from lightning.pytorch.accelerators import CPUAccelerator
+from lightning.pytorch.loops.fetchers import _DataFetcherWrapper
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf, open_dict
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.accelerators import CPUAccelerator
-from pytorch_lightning.loops.fetchers import _DataFetcherWrapper
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.data.language_modeling.megatron.data_samplers import (
MegatronPretrainingRandomSampler,
@@ -32,12 +32,10 @@
from nemo.collections.nlp.modules.common.megatron.build_model import build_model
from nemo.collections.nlp.modules.common.megatron.module import Float16Module
from nemo.collections.nlp.modules.common.megatron.token_level_encoder_decoder import (
- AttnMaskType,
MegatronTokenLevelEncoderDecoderModule,
)
from nemo.collections.nlp.modules.common.megatron.utils import (
average_losses_across_data_parallel_group,
- build_attention_mask_3d,
get_params_for_weight_decay_optimization,
)
from nemo.collections.nlp.modules.common.text_generation_utils import (
@@ -683,14 +681,13 @@ def fwd_output_and_loss_func(dataloader_iter, model):
if self.mcore_t5:
# attn mask logic follows megatron.data.t5_dataset.py in Megatron-LM
- encoder_attn_mask_3d = build_attention_mask_3d(
- encoder_attn_mask, encoder_attn_mask, AttnMaskType.padding
- )
- decoder_attn_mask_3d = build_attention_mask_3d(
- decoder_attn_mask, decoder_attn_mask, AttnMaskType.causal
- )
- enc_dec_attn_mask_3d = build_attention_mask_3d(
- decoder_attn_mask, encoder_attn_mask, AttnMaskType.padding
+ encoder_attn_mask = encoder_attn_mask < 0.5
+ decoder_attn_mask = decoder_attn_mask < 0.5
+ encoder_attn_mask_3d = encoder_attn_mask.unsqueeze(1).unsqueeze(1)
+ decoder_attn_mask_3d = decoder_attn_mask.unsqueeze(1).unsqueeze(1)
+ enc_dec_attn_mask_3d = (
+ decoder_attn_mask_3d,
+ encoder_attn_mask_3d,
)
output = model( # model is MCoreT5Model
@@ -816,10 +813,8 @@ def fwd_output_only_func(dataloader_iter, model):
encoder_attn_mask,
) = batch
- # attn mask logic follows megatron.data.t5_dataset.py in Megatron-LM
- encoder_attn_mask_3d = build_attention_mask_3d(
- encoder_attn_mask, encoder_attn_mask, AttnMaskType.padding
- )
+ encoder_attn_mask = encoder_attn_mask < 0.5
+ encoder_attn_mask_3d = encoder_attn_mask.unsqueeze(1).unsqueeze(1)
output = model(
encoder_input_ids=encoder_input_ids,
@@ -841,15 +836,13 @@ def fwd_output_only_func(dataloader_iter, model):
decoder_attn_mask,
) = batch
- # attn mask logic follows megatron.data.t5_dataset.py in Megatron-LM
- encoder_attn_mask_3d = build_attention_mask_3d(
- encoder_attn_mask, encoder_attn_mask, AttnMaskType.padding
- )
- decoder_attn_mask_3d = build_attention_mask_3d(
- decoder_attn_mask, decoder_attn_mask, AttnMaskType.causal
- )
- enc_dec_attn_mask_3d = build_attention_mask_3d(
- decoder_attn_mask, encoder_attn_mask, AttnMaskType.padding
+ encoder_attn_mask = encoder_attn_mask < 0.5
+ decoder_attn_mask = decoder_attn_mask < 0.5
+ encoder_attn_mask_3d = encoder_attn_mask.unsqueeze(1).unsqueeze(1)
+ decoder_attn_mask_3d = decoder_attn_mask.unsqueeze(1).unsqueeze(1)
+ enc_dec_attn_mask_3d = (
+ decoder_attn_mask_3d,
+ encoder_attn_mask_3d,
)
# re-transpose encoder_hidden_states from [batch, seq_len, hidden] to [seq_len, batch, hidden]
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_mamba_model.py b/nemo/collections/nlp/models/language_modeling/megatron_mamba_model.py
index 4f0000dafaa2..ad92421ee607 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_mamba_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_mamba_model.py
@@ -13,8 +13,8 @@
# limitations under the License.
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.utils import logging
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_mamba_sft_model.py b/nemo/collections/nlp/models/language_modeling/megatron_mamba_sft_model.py
index ebcc47004711..cacdb1c190e7 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_mamba_sft_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_mamba_sft_model.py
@@ -12,15 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import DictConfig
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_base_model import MegatronBaseModel
from nemo.collections.nlp.models.language_modeling.megatron_gpt_sft_model import MegatronGPTSFTModel
from nemo.collections.nlp.models.language_modeling.megatron_mamba_model import MegatronMambaModel
-
__all__ = ['MegatronMambaSFTModel']
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_retrieval_model.py b/nemo/collections/nlp/models/language_modeling/megatron_retrieval_model.py
index 42323e503f7d..147c832f4b9a 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_retrieval_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_retrieval_model.py
@@ -16,8 +16,8 @@
from typing import Any, List, Optional, Union
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import DictConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.data.language_modeling.megatron.data_samplers import (
MegatronPretrainingRandomSampler,
@@ -294,7 +294,10 @@ def training_step(self, batch, batch_idx):
self.log('lr', lr, batch_size=1)
self.log('global_step', self.trainer.global_step, prog_bar=True, batch_size=1)
self.log(
- 'consumed_samples', self._compute_consumed_samples_after_training_step(), prog_bar=True, batch_size=1,
+ 'consumed_samples',
+ self._compute_consumed_samples_after_training_step(),
+ prog_bar=True,
+ batch_size=1,
)
self._reduced_loss_buffer = []
return lm_loss
@@ -427,7 +430,10 @@ def build_pretraining_data_loader(self, dataset, consumed_samples):
# Torch dataloader.
return torch.utils.data.DataLoader(
- dataset, batch_sampler=batch_sampler, num_workers=self.cfg.data.num_workers, pin_memory=True,
+ dataset,
+ batch_sampler=batch_sampler,
+ num_workers=self.cfg.data.num_workers,
+ pin_memory=True,
)
def setup(self, stage=None):
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_retro_fine_tune_model.py b/nemo/collections/nlp/models/language_modeling/megatron_retro_fine_tune_model.py
index 1eaec4238648..924da5825024 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_retro_fine_tune_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_retro_fine_tune_model.py
@@ -15,8 +15,8 @@
from functools import partial
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import DictConfig, ListConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.common.data import ConcatMapDataset
from nemo.collections.common.metrics import MetricStringToTorchMetric
@@ -50,11 +50,13 @@
def build_all_datasets(
- cfg, tokenizer, train_valid_test_num_samples,
+ cfg,
+ tokenizer,
+ train_valid_test_num_samples,
):
"""Build train, valid, and test RETRO datasets.
- There is one to one mapping between data_prefix and knn_map_path.
- Currently only supports one retrieval dataset.
+ There is one to one mapping between data_prefix and knn_map_path.
+ Currently only supports one retrieval dataset.
"""
train_dataset = RetroQAFineTuneDataset(
cfg.train_ds.get('file_name'),
@@ -97,7 +99,7 @@ def build_all_datasets(
class MegatronRetroFinetuneModel(MegatronRetrievalModel):
- """Finetune RETRO Model """
+ """Finetune RETRO Model"""
def build_train_valid_test_datasets(self):
logging.info('Building RETRO datasets.')
@@ -114,7 +116,9 @@ def build_train_valid_test_datasets(self):
]
self._train_ds, self._validation_ds, self._test_ds = build_all_datasets(
- cfg=self.cfg.data, tokenizer=self.tokenizer, train_valid_test_num_samples=train_valid_test_num_samples,
+ cfg=self.cfg.data,
+ tokenizer=self.tokenizer,
+ train_valid_test_num_samples=train_valid_test_num_samples,
)
if self._train_ds is not None:
logging.info(f'Length of train dataset: {len(self._train_ds)}')
@@ -143,5 +147,9 @@ def build_pretraining_data_loader(self, dataset, consumed_samples):
drop_last=True,
)
return torch.utils.data.DataLoader(
- dataset, batch_sampler=batch_sampler, collate_fn=collate_fn, num_workers=0, pin_memory=True,
+ dataset,
+ batch_sampler=batch_sampler,
+ collate_fn=collate_fn,
+ num_workers=0,
+ pin_memory=True,
)
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_retro_model.py b/nemo/collections/nlp/models/language_modeling/megatron_retro_model.py
index a6bf75fb9444..493d512fd30e 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_retro_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_retro_model.py
@@ -23,10 +23,10 @@
from typing import Any, Dict, Iterator, List, Optional, Union
import torch
+from lightning.pytorch.accelerators import CPUAccelerator
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf, open_dict
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.accelerators import CPUAccelerator
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.data.language_modeling.megatron.data_samplers import (
MegatronPretrainingRandomSampler,
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_t0_model.py b/nemo/collections/nlp/models/language_modeling/megatron_t0_model.py
index cee1b11a160b..92827b31a259 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_t0_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_t0_model.py
@@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import DictConfig, ListConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.data.language_modeling.megatron.base_dataset_utils import (
get_datasets_weights_and_num_samples,
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_t5_adapter_model.py b/nemo/collections/nlp/models/language_modeling/megatron_t5_adapter_model.py
index 31eb4519ded2..a6e6afc8b7eb 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_t5_adapter_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_t5_adapter_model.py
@@ -21,9 +21,9 @@
from typing import Any
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.dictconfig import DictConfig
from omegaconf.omegaconf import open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.common.parts.adapter_modules import LinearAdapterConfig
from nemo.collections.nlp.models.language_modeling.megatron_t5_model import MegatronT5Model
@@ -60,7 +60,15 @@ def __init__(self, cfg: DictConfig, trainer: Trainer):
self.adapter_name_keys = []
def forward(
- self, input_ids, dec_input, enc_mask, dec_mask, position_ids, taskname_ids, labels=None, inference=False,
+ self,
+ input_ids,
+ dec_input,
+ enc_mask,
+ dec_mask,
+ position_ids,
+ taskname_ids,
+ labels=None,
+ inference=False,
):
# Call forward on T5 model with preprocessed embeddings
if self.autocast_dtype == torch.float32:
@@ -195,13 +203,13 @@ def predict_step(self, batch: Any, batch_idx: int, dataloader_idx: int = 0) -> A
def setup_optimizer_param_groups(self):
"""
- ModelPT override. Optimizer will get self._optimizer_param_groups.
+ ModelPT override. Optimizer will get self._optimizer_param_groups.
Makes two optimizer param groups, one for the frozen model params
- and one for the prompt-table/prompt-encoder params. The learning
+ and one for the prompt-table/prompt-encoder params. The learning
rate for the frozen model's params will always be zero effectively
freezing the model's params but still allowing for the needed gradients
- to be passed around in pipeline parallel models. The prompt-encoder
- and/or prompt table will use the learning rate set by the user.
+ to be passed around in pipeline parallel models. The prompt-encoder
+ and/or prompt table will use the learning rate set by the user.
"""
self.frozen_model.freeze() # Freeze the entire model
opt_params = []
@@ -266,7 +274,7 @@ def state_dict(self, destination=None, prefix=None, keep_vars=False):
def load_state_dict(self, state_dict, strict: bool = True):
"""
- Loads a state_dict expecting the state_dict to contain key,values
+ Loads a state_dict expecting the state_dict to contain key,values
only for the adapter parameters.
"""
for name, module in self.frozen_model.named_modules():
@@ -319,7 +327,7 @@ def on_validation_epoch_end(self):
gather_results_dedup = list(set(itertools.chain(*gather_results)))
correct = 0
- for (input, pred, label) in gather_results_dedup:
+ for input, pred, label in gather_results_dedup:
if pred == label:
correct += 1
@@ -559,8 +567,8 @@ class MegatronT5InfusedAdapterModel(MegatronT5BaseAdapterModel):
Three adapter's are inserted into each Transformer layer in the base GPT Model. Each adapter is basically a vector that simply scales the key, value or ffn hidden representations.
It is assumed that these set of adapters will then be trained for a specific task.
- Once trained, the adapter weights will be saved and can be re-loaded
- and infused into the same GPT Model for inference.
+ Once trained, the adapter weights will be saved and can be re-loaded
+ and infused into the same GPT Model for inference.
"""
def __init__(self, cfg: DictConfig, trainer: Trainer):
@@ -670,7 +678,7 @@ def state_dict(self, destination=None, prefix=None, keep_vars=False):
def load_state_dict(self, state_dict, strict: bool = True):
"""
- Loads a state_dict expecting the state_dict to contain key,values
+ Loads a state_dict expecting the state_dict to contain key,values
only for the adapter parameters.
"""
encoder = self.frozen_model.enc_dec_model.enc_dec_model.encoder
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_t5_model.py b/nemo/collections/nlp/models/language_modeling/megatron_t5_model.py
index 0f5022795446..1df10403a9e7 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_t5_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_t5_model.py
@@ -15,8 +15,8 @@
import enum
import math
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.data.language_modeling.megatron.dataset_utils import build_train_valid_test_datasets
from nemo.collections.nlp.models.language_modeling.megatron_lm_encoder_decoder_model import (
@@ -79,7 +79,9 @@ def _validate_cfg(self):
@property
def _build_train_valid_test_datasets_kwargs(self):
"""allows child classes to add kwargs to dataset building"""
- return dict(max_seq_length_dec=self._cfg.data.seq_length_dec,)
+ return dict(
+ max_seq_length_dec=self._cfg.data.seq_length_dec,
+ )
def _build_vocab(self):
self.num_sentinel_tokens = self._cfg.tokenizer.num_sentinel_tokens
@@ -210,9 +212,9 @@ def build_train_valid_test_datasets(self):
]
if self.trainer.limit_val_batches <= 1.0 and isinstance(self.trainer.limit_val_batches, float):
- train_valid_test_num_samples[
- 1
- ] = 1 # This is to make sure we only have one epoch on every validation iteration
+ train_valid_test_num_samples[1] = (
+ 1 # This is to make sure we only have one epoch on every validation iteration
+ )
self._train_ds, self._validation_ds, self._test_ds = build_train_valid_test_datasets(
cfg=self._cfg,
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_t5_prompt_learning_model.py b/nemo/collections/nlp/models/language_modeling/megatron_t5_prompt_learning_model.py
index 1f54cb87428e..187f24c884b7 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_t5_prompt_learning_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_t5_prompt_learning_model.py
@@ -16,10 +16,10 @@
from typing import Any, List
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
from omegaconf.dictconfig import DictConfig
from omegaconf.omegaconf import open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.data.language_modeling.megatron.t5_prompt_learning_dataset import T5PromptLearningDataset
from nemo.collections.nlp.models.language_modeling.megatron_base_prompt_learning_model import (
diff --git a/nemo/collections/nlp/models/language_modeling/megatron_t5_sft_model.py b/nemo/collections/nlp/models/language_modeling/megatron_t5_sft_model.py
index c70f44925d33..6f9a69f27529 100644
--- a/nemo/collections/nlp/models/language_modeling/megatron_t5_sft_model.py
+++ b/nemo/collections/nlp/models/language_modeling/megatron_t5_sft_model.py
@@ -16,9 +16,9 @@
from typing import Dict, List
import torch
+from lightning.pytorch.loops.fetchers import _DataFetcherWrapper
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import DictConfig, ListConfig
-from pytorch_lightning.loops.fetchers import _DataFetcherWrapper
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.common.data import ConcatMapDataset
from nemo.collections.common.metrics import MetricStringToTorchMetric
diff --git a/nemo/collections/nlp/models/language_modeling/transformer_lm_model.py b/nemo/collections/nlp/models/language_modeling/transformer_lm_model.py
index 69db0d46e75e..3b8e1f819ea1 100644
--- a/nemo/collections/nlp/models/language_modeling/transformer_lm_model.py
+++ b/nemo/collections/nlp/models/language_modeling/transformer_lm_model.py
@@ -19,8 +19,8 @@
import numpy as np
import torch
import torch.utils.data as pt_data
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from nemo.collections.common.losses import SmoothedCrossEntropyLoss
from nemo.collections.common.metrics import GlobalAverageLossMetric
@@ -59,9 +59,11 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
tokenizer_model=cfg.tokenizer.get("tokenizer_model", None),
vocab_file=cfg.tokenizer.get("vocab_file", None),
bpe_dropout=cfg.tokenizer.get("bpe_dropout", 0.0),
- special_tokens=OmegaConf.to_container(cfg.tokenizer.special_tokens)
- if cfg.tokenizer.get("special_tokens", None)
- else None,
+ special_tokens=(
+ OmegaConf.to_container(cfg.tokenizer.special_tokens)
+ if cfg.tokenizer.get("special_tokens", None)
+ else None
+ ),
)
# init superclass
@@ -99,7 +101,7 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
# tie weights of embedding and softmax matrices
self.log_softmax.mlp.layer0.weight = self.encoder.embedding.token_embedding.weight
- std_init_range = 1 / self.encoder.hidden_size ** 0.5
+ std_init_range = 1 / self.encoder.hidden_size**0.5
# initialize weights if not using pretrained encoder
if not self._cfg.encoder.get('pretrained', False):
@@ -199,7 +201,12 @@ def on_test_epoch_end(self):
self.test_step_outputs.clear() # free memory
def setup_tokenizer(
- self, tokenizer_name=None, tokenizer_model=None, vocab_file=None, bpe_dropout=0.0, special_tokens=None,
+ self,
+ tokenizer_name=None,
+ tokenizer_model=None,
+ vocab_file=None,
+ bpe_dropout=0.0,
+ special_tokens=None,
):
supported_tokenizers = ['huggingface', 'sentencepiece', 'word']
diff --git a/nemo/collections/nlp/models/machine_translation/megatron_nmt_model.py b/nemo/collections/nlp/models/machine_translation/megatron_nmt_model.py
index 4461b417f311..b5f228f21e1a 100644
--- a/nemo/collections/nlp/models/machine_translation/megatron_nmt_model.py
+++ b/nemo/collections/nlp/models/machine_translation/megatron_nmt_model.py
@@ -19,10 +19,10 @@
import numpy as np
import torch
+from lightning.pytorch.loops.fetchers import _DataFetcherWrapper
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.dictconfig import DictConfig
from omegaconf.listconfig import ListConfig
-from pytorch_lightning.loops.fetchers import _DataFetcherWrapper
-from pytorch_lightning.trainer.trainer import Trainer
from sacrebleu import corpus_bleu
from nemo.collections.nlp.data.common.sequence_to_sequence_dataset import (
diff --git a/nemo/collections/nlp/models/machine_translation/mt_enc_dec_bottleneck_model.py b/nemo/collections/nlp/models/machine_translation/mt_enc_dec_bottleneck_model.py
index 41c6125ba05f..96077c4da82e 100644
--- a/nemo/collections/nlp/models/machine_translation/mt_enc_dec_bottleneck_model.py
+++ b/nemo/collections/nlp/models/machine_translation/mt_enc_dec_bottleneck_model.py
@@ -16,7 +16,7 @@
import numpy as np
import torch
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from nemo.collections.common.losses import NLLLoss
from nemo.collections.nlp.models.machine_translation.mt_enc_dec_config import MTBottleneckModelConfig
@@ -184,7 +184,11 @@ def loss(
output_mask = (tgt_labels != self.decoder_tokenizer.pad_id).type_as(tgt_log_probs)
log_p_x_given_z_per_token = (
- -recon_loss_fn(log_probs=tgt_log_probs, labels=tgt_labels,).view(tgt_log_probs.shape[:2]) * output_mask
+ -recon_loss_fn(
+ log_probs=tgt_log_probs,
+ labels=tgt_labels,
+ ).view(tgt_log_probs.shape[:2])
+ * output_mask
)
# probability per sample
@@ -216,7 +220,10 @@ def loss(
if self.model_type in ["mim", "vae"]:
# tokens = tgt_mask.sum()
- q_z_given_x = torch.distributions.Normal(loc=z_mean, scale=torch.exp(0.5 * z_logv),)
+ q_z_given_x = torch.distributions.Normal(
+ loc=z_mean,
+ scale=torch.exp(0.5 * z_logv),
+ )
# average latent distribution to match averaging of observations
if self.recon_per_token:
# average latent per dimension - to heuristically match per-token reconstruction
@@ -225,7 +232,10 @@ def loss(
log_q_z_given_x = q_z_given_x.log_prob(z).sum(-1).sum(-1).mean()
# build prior distribution
- p_z = torch.distributions.Normal(loc=torch.zeros_like(z), scale=torch.ones_like(z),)
+ p_z = torch.distributions.Normal(
+ loc=torch.zeros_like(z),
+ scale=torch.ones_like(z),
+ )
if self.recon_per_token:
# average latent distribution similar to averaging of observations
log_p_z = p_z.log_prob(z).mean(-1).mean(-1).mean()
@@ -267,7 +277,11 @@ def forward(self, src, src_mask, tgt, tgt_mask, timer=None):
if timer is not None:
timer.start("encoder")
- enc_hiddens, enc_mask = self.encoder(input_ids=src, encoder_mask=src_mask, return_mask=True,)
+ enc_hiddens, enc_mask = self.encoder(
+ input_ids=src,
+ encoder_mask=src_mask,
+ return_mask=True,
+ )
# build posterior distribution q(x|z)
z, z_mean, z_logv = self.encode_latent(hidden=enc_hiddens)
@@ -283,7 +297,10 @@ def forward(self, src, src_mask, tgt, tgt_mask, timer=None):
context_hiddens = self.latent2hidden(z)
tgt_hiddens = self.decoder(
- input_ids=tgt, decoder_mask=tgt_mask, encoder_embeddings=context_hiddens, encoder_mask=enc_mask,
+ input_ids=tgt,
+ decoder_mask=tgt_mask,
+ encoder_embeddings=context_hiddens,
+ encoder_mask=enc_mask,
)
# build decoding distribution
@@ -426,18 +443,25 @@ def eval_step(self, batch, batch_idx, mode, dataloader_idx=0):
return_info=True,
)
# pass cache to sampler in order to reuse encoder's output
- cache = dict(z=z, z_mean=z_mean, z_mask=z_mask, timer=timer,)
+ cache = dict(
+ z=z,
+ z_mean=z_mean,
+ z_mask=z_mask,
+ timer=timer,
+ )
inputs, translations = self.batch_translate(src=src_ids, src_mask=src_mask, cache=cache)
num_measurements = labels.shape[0] * labels.shape[1]
if dataloader_idx == 0:
getattr(self, f'{mode}_loss')(
- loss=eval_loss, num_measurements=num_measurements,
+ loss=eval_loss,
+ num_measurements=num_measurements,
)
else:
getattr(self, f'{mode}_loss_{dataloader_idx}')(
- loss=eval_loss, num_measurements=num_measurements,
+ loss=eval_loss,
+ num_measurements=num_measurements,
)
np_tgt = tgt_ids.detach().cpu().numpy()
ground_truths = [self.decoder_tokenizer.ids_to_text(tgt) for tgt in np_tgt]
diff --git a/nemo/collections/nlp/models/machine_translation/mt_enc_dec_model.py b/nemo/collections/nlp/models/machine_translation/mt_enc_dec_model.py
index 708d4236be7f..78b701699259 100644
--- a/nemo/collections/nlp/models/machine_translation/mt_enc_dec_model.py
+++ b/nemo/collections/nlp/models/machine_translation/mt_enc_dec_model.py
@@ -25,9 +25,9 @@
import torch
import torch.distributed as dist
import torch.utils.data as pt_data
+from lightning.pytorch import Trainer
+from lightning.pytorch.utilities import rank_zero_only
from omegaconf import DictConfig, ListConfig, OmegaConf
-from pytorch_lightning import Trainer
-from pytorch_lightning.utilities import rank_zero_only
from sacrebleu import corpus_bleu
from nemo.collections.common.data import ConcatDataset
@@ -120,17 +120,21 @@ def __init__(self, cfg: MTEncDecModelConfig, trainer: Trainer = None):
encoder_tokenizer, decoder_tokenizer = MTEncDecModel.setup_enc_dec_tokenizers(
encoder_tokenizer_library=self.encoder_tokenizer_library,
encoder_tokenizer_model=encoder_tokenizer_model,
- encoder_bpe_dropout=cfg.encoder_tokenizer.get('bpe_dropout', 0.0)
- if cfg.encoder_tokenizer.get('bpe_dropout', 0.0) is not None
- else 0.0,
+ encoder_bpe_dropout=(
+ cfg.encoder_tokenizer.get('bpe_dropout', 0.0)
+ if cfg.encoder_tokenizer.get('bpe_dropout', 0.0) is not None
+ else 0.0
+ ),
encoder_model_name=cfg.encoder.get('model_name') if hasattr(cfg.encoder, 'model_name') else None,
encoder_r2l=cfg.encoder_tokenizer.get('r2l', False),
decoder_tokenizer_library=self.decoder_tokenizer_library,
encoder_tokenizer_vocab_file=encoder_vocab_file,
decoder_tokenizer_model=decoder_tokenizer_model,
- decoder_bpe_dropout=cfg.decoder_tokenizer.get('bpe_dropout', 0.0)
- if cfg.decoder_tokenizer.get('bpe_dropout', 0.0) is not None
- else 0.0,
+ decoder_bpe_dropout=(
+ cfg.decoder_tokenizer.get('bpe_dropout', 0.0)
+ if cfg.decoder_tokenizer.get('bpe_dropout', 0.0) is not None
+ else 0.0
+ ),
decoder_model_name=cfg.decoder.get('model_name') if hasattr(cfg.decoder, 'model_name') else None,
decoder_r2l=cfg.decoder_tokenizer.get('r2l', False),
special_tokens=self.special_tokens,
@@ -254,7 +258,7 @@ def __init__(self, cfg: MTEncDecModelConfig, trainer: Trainer = None):
self.log_softmax.mlp.layer0.weight = self.decoder.embedding.token_embedding.weight
# TODO: encoder and decoder with different hidden size?
- std_init_range = 1 / self.encoder.hidden_size ** 0.5
+ std_init_range = 1 / self.encoder.hidden_size**0.5
# initialize weights if not using pretrained encoder/decoder
if not self._cfg.encoder.get('pretrained', False):
@@ -341,7 +345,10 @@ def filter_predicted_ids(cls, ids, decoder_tokenizer):
return ids
def test_encoder_ids(self, ids, raise_error=False):
- invalid_ids = torch.logical_or((ids >= self.encoder_tokenizer.vocab_size).any(), (ids < 0).any(),)
+ invalid_ids = torch.logical_or(
+ (ids >= self.encoder_tokenizer.vocab_size).any(),
+ (ids < 0).any(),
+ )
if raise_error and invalid_ids:
raise ValueError("Encoder ids are out of range (tip: check encoder tokenizer)")
@@ -349,7 +356,10 @@ def test_encoder_ids(self, ids, raise_error=False):
return not invalid_ids
def test_decoder_ids(self, ids, raise_error=False):
- invalid_ids = torch.logical_or((ids >= self.decoder_tokenizer.vocab_size).any(), (ids < 0).any(),)
+ invalid_ids = torch.logical_or(
+ (ids >= self.decoder_tokenizer.vocab_size).any(),
+ (ids < 0).any(),
+ )
if raise_error and invalid_ids:
raise ValueError("Decoder ids are out of range (tip: check decoder tokenizer)")
@@ -655,7 +665,10 @@ def setup_training_data(self, train_data_config: Optional[DictConfig]):
multilingual=self.multilingual,
multilingual_ids=self.multilingual_ids,
)
- self._train_dl = MTEncDecModel._setup_dataloader_from_config(cfg=train_data_config, dataset=self._train_ds,)
+ self._train_dl = MTEncDecModel._setup_dataloader_from_config(
+ cfg=train_data_config,
+ dataset=self._train_ds,
+ )
# Need to set this because if using an IterableDataset, the length of the dataloader is the total number
# of samples rather than the number of batches, and this messes up the tqdm progress bar.
@@ -714,7 +727,9 @@ def setup_validation_data(self, val_data_config: Optional[DictConfig]):
for dataloader_idx in range(len(self._validation_dl)):
if dataloader_idx == 0:
setattr(
- self, f'val_loss', GlobalAverageLossMetric(dist_sync_on_step=False, take_avg_loss=True),
+ self,
+ f'val_loss',
+ GlobalAverageLossMetric(dist_sync_on_step=False, take_avg_loss=True),
)
else:
setattr(
@@ -737,7 +752,9 @@ def setup_test_data(self, test_data_config: Optional[DictConfig]):
for dataloader_idx in range(len(self._test_dl)):
if dataloader_idx == 0:
setattr(
- self, f'test_loss', GlobalAverageLossMetric(dist_sync_on_step=False, take_avg_loss=True),
+ self,
+ f'test_loss',
+ GlobalAverageLossMetric(dist_sync_on_step=False, take_avg_loss=True),
)
else:
setattr(
@@ -886,13 +903,15 @@ def _setup_dataloader_from_config(cls, cfg, dataset):
return torch.utils.data.DataLoader(
dataset=dataset,
batch_size=1,
- sampler=None
- if (
- cfg.get("use_tarred_dataset", False)
- or cfg.get("dataset_type", "") == "tarred"
- or isinstance(dataset, ConcatDataset)
- )
- else sampler,
+ sampler=(
+ None
+ if (
+ cfg.get("use_tarred_dataset", False)
+ or cfg.get("dataset_type", "") == "tarred"
+ or isinstance(dataset, ConcatDataset)
+ )
+ else sampler
+ ),
num_workers=cfg.get("num_workers", 2),
pin_memory=cfg.get("pin_memory", False),
drop_last=cfg.get("drop_last", False),
@@ -983,9 +1002,11 @@ def _setup_eval_dataloader_from_config(cls, cfg, datasets):
torch.utils.data.DataLoader(
dataset=dataset,
batch_size=1,
- sampler=None
- if (cfg.get("use_tarred_dataset", False) or isinstance(datasets[0], ConcatDataset))
- else sampler,
+ sampler=(
+ None
+ if (cfg.get("use_tarred_dataset", False) or isinstance(datasets[0], ConcatDataset))
+ else sampler
+ ),
num_workers=cfg.get("num_workers", 2),
pin_memory=cfg.get("pin_memory", False),
drop_last=cfg.get("drop_last", False),
@@ -1188,7 +1209,10 @@ def translate(
)
if return_beam_scores:
_, all_translations, scores, best_translations = self.batch_translate(
- src, src_mask, return_beam_scores=True, cache=cache,
+ src,
+ src_mask,
+ return_beam_scores=True,
+ cache=cache,
)
return_val = all_translations, scores, best_translations
else:
diff --git a/nemo/collections/nlp/models/nlp_model.py b/nemo/collections/nlp/models/nlp_model.py
index b27c00c5d7c3..6a87eb28723c 100644
--- a/nemo/collections/nlp/models/nlp_model.py
+++ b/nemo/collections/nlp/models/nlp_model.py
@@ -19,13 +19,13 @@
from typing import Any, Mapping, Optional, Union
import torch
-from lightning_fabric.utilities.cloud_io import _load as pl_load
+from lightning.fabric.utilities.cloud_io import _load as pl_load
+from lightning.pytorch import Trainer
+from lightning.pytorch.core.saving import _load_state as ptl_load_state
+from lightning.pytorch.core.saving import load_hparams_from_tags_csv, load_hparams_from_yaml
+from lightning.pytorch.utilities import rank_zero_only
+from lightning.pytorch.utilities.migration import pl_legacy_patch
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
-from pytorch_lightning.core.saving import _load_state as ptl_load_state
-from pytorch_lightning.core.saving import load_hparams_from_tags_csv, load_hparams_from_yaml
-from pytorch_lightning.utilities import rank_zero_only
-from pytorch_lightning.utilities.migration import pl_legacy_patch
from transformers import TRANSFORMERS_CACHE
from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
@@ -397,7 +397,22 @@ def dummy():
model.trainer.strategy.launcher.launch(dummy, trainer=model.trainer)
model.trainer.strategy.setup_environment()
sharded_state_dict = model.sharded_state_dict()
- checkpoint['state_dict'] = sharded_state_dict
+ if kwargs.get("load_mlm", False):
+ mlm_sharded_state_dict = {}
+ for k, v in sharded_state_dict.items():
+ # Remove 'model.' from the sharded_state_dict keys
+ new_key = k.replace('model.', '', 1)
+
+ # Update the key attribute of the ShardedTensor value
+ new_value = v
+ if hasattr(v, 'key'):
+ new_value.key = v.key.replace('model.', '', 1)
+
+ # Add the updated key-value pair to the new dictionary
+ mlm_sharded_state_dict[new_key] = new_value
+ checkpoint['state_dict'] = mlm_sharded_state_dict
+ else:
+ checkpoint['state_dict'] = sharded_state_dict
# load the checkpoint from disk
checkpoint = dist_checkpointing.load(sharded_state_dict=checkpoint, checkpoint_dir=checkpoint_dir)
# restore the weights
diff --git a/nemo/collections/nlp/models/question_answering/qa_base_model.py b/nemo/collections/nlp/models/question_answering/qa_base_model.py
index 7ca78f2e136e..cb07e43c3dc1 100644
--- a/nemo/collections/nlp/models/question_answering/qa_base_model.py
+++ b/nemo/collections/nlp/models/question_answering/qa_base_model.py
@@ -15,8 +15,8 @@
from typing import Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from nemo.collections.nlp.data.question_answering.data_processor.qa_processing import (
EVALUATION_MODE,
diff --git a/nemo/collections/nlp/models/question_answering/qa_bert_model.py b/nemo/collections/nlp/models/question_answering/qa_bert_model.py
index d4bdef6d871d..4036b23999d8 100644
--- a/nemo/collections/nlp/models/question_answering/qa_bert_model.py
+++ b/nemo/collections/nlp/models/question_answering/qa_bert_model.py
@@ -17,8 +17,8 @@
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from transformers.models.bert.tokenization_bert import BasicTokenizer
from nemo.collections.common.losses import SpanningLoss
diff --git a/nemo/collections/nlp/models/question_answering/qa_gpt_model.py b/nemo/collections/nlp/models/question_answering/qa_gpt_model.py
index 059cf5625f15..f8c883643fe0 100644
--- a/nemo/collections/nlp/models/question_answering/qa_gpt_model.py
+++ b/nemo/collections/nlp/models/question_answering/qa_gpt_model.py
@@ -16,8 +16,8 @@
from typing import List, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from transformers import AutoModelForCausalLM
from nemo.collections.nlp.data.question_answering.data_processor.qa_processing import QAProcessor
diff --git a/nemo/collections/nlp/models/question_answering/qa_model.py b/nemo/collections/nlp/models/question_answering/qa_model.py
index 2147d7d6a5bf..01b07bb8b3b0 100644
--- a/nemo/collections/nlp/models/question_answering/qa_model.py
+++ b/nemo/collections/nlp/models/question_answering/qa_model.py
@@ -16,8 +16,8 @@
from typing import Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from torch.cuda.amp import autocast
from nemo.collections.common.losses import SpanningLoss
diff --git a/nemo/collections/nlp/models/question_answering/qa_s2s_model.py b/nemo/collections/nlp/models/question_answering/qa_s2s_model.py
index 5ad959fd1b6f..a703e23bc837 100644
--- a/nemo/collections/nlp/models/question_answering/qa_s2s_model.py
+++ b/nemo/collections/nlp/models/question_answering/qa_s2s_model.py
@@ -16,8 +16,8 @@
from typing import List, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from torch.cuda.amp import autocast
from transformers import AutoModelForSeq2SeqLM
diff --git a/nemo/collections/nlp/models/rag/custom_bert_embedder.py b/nemo/collections/nlp/models/rag/custom_bert_embedder.py
index d27ee98a14ef..84361e2728b5 100644
--- a/nemo/collections/nlp/models/rag/custom_bert_embedder.py
+++ b/nemo/collections/nlp/models/rag/custom_bert_embedder.py
@@ -15,10 +15,10 @@
from typing import Any, List
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from llama_index.core.bridge.pydantic import PrivateAttr
from llama_index.core.embeddings import BaseEmbedding
from omegaconf import DictConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.information_retrieval.megatron_bert_embedding_model import MegatronBertEmbeddingModel
from nemo.collections.nlp.parts.nlp_overrides import NLPDDPStrategy
diff --git a/nemo/collections/nlp/models/rag/custom_gpt_llm.py b/nemo/collections/nlp/models/rag/custom_gpt_llm.py
index f26a86cfaaf7..1bbeed38991b 100644
--- a/nemo/collections/nlp/models/rag/custom_gpt_llm.py
+++ b/nemo/collections/nlp/models/rag/custom_gpt_llm.py
@@ -14,10 +14,10 @@
from typing import Any
+from lightning.pytorch.trainer.trainer import Trainer
from llama_index.core.bridge.pydantic import PrivateAttr
from llama_index.core.llms import CompletionResponse, CompletionResponseGen, CustomLLM, LLMMetadata
from llama_index.core.llms.callbacks import llm_completion_callback
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.collections.nlp.modules.common.transformer.text_generation import LengthParam, SamplingParam
diff --git a/nemo/collections/nlp/models/spellchecking_asr_customization/spellchecking_model.py b/nemo/collections/nlp/models/spellchecking_asr_customization/spellchecking_model.py
index d9e08f6764fc..6d4974993bcb 100644
--- a/nemo/collections/nlp/models/spellchecking_asr_customization/spellchecking_model.py
+++ b/nemo/collections/nlp/models/spellchecking_asr_customization/spellchecking_model.py
@@ -17,8 +17,8 @@
from typing import Dict, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.common.losses import CrossEntropyLoss
from nemo.collections.nlp.data.spellchecking_asr_customization import (
diff --git a/nemo/collections/nlp/models/text2sparql/text2sparql_model.py b/nemo/collections/nlp/models/text2sparql/text2sparql_model.py
index 6503364fc07e..df7eefa310bb 100644
--- a/nemo/collections/nlp/models/text2sparql/text2sparql_model.py
+++ b/nemo/collections/nlp/models/text2sparql/text2sparql_model.py
@@ -19,8 +19,8 @@
from typing import Dict, List, Optional, Tuple
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from transformers import AutoModel, BartForConditionalGeneration, EncoderDecoderModel
from nemo.collections.common.metrics import Perplexity
@@ -145,7 +145,10 @@ def training_step(self, batch: Tuple, batch_idx: int) -> Dict:
"""
input_ids, input_mask, decoder_input_ids, labels = batch
loss = self.forward(
- input_ids=input_ids, attention_mask=input_mask, decoder_input_ids=decoder_input_ids, labels=labels,
+ input_ids=input_ids,
+ attention_mask=input_mask,
+ decoder_input_ids=decoder_input_ids,
+ labels=labels,
)[0]
tensorboard_logs = {"train_loss": loss, "lr": self._optimizer.param_groups[0]["lr"]}
@@ -159,7 +162,10 @@ def validation_step(self, batch: Tuple, batch_idx: int) -> Dict:
"""
input_ids, input_mask, decoder_input_ids, labels = batch
loss, logits = self.forward(
- input_ids=input_ids, attention_mask=input_mask, decoder_input_ids=decoder_input_ids, labels=labels,
+ input_ids=input_ids,
+ attention_mask=input_mask,
+ decoder_input_ids=decoder_input_ids,
+ labels=labels,
)[:2]
self.validation_perplexity(logits=logits)
diff --git a/nemo/collections/nlp/models/text_classification/text_classification_model.py b/nemo/collections/nlp/models/text_classification/text_classification_model.py
index 033447304bbf..b2da2fe21701 100644
--- a/nemo/collections/nlp/models/text_classification/text_classification_model.py
+++ b/nemo/collections/nlp/models/text_classification/text_classification_model.py
@@ -17,8 +17,8 @@
from typing import Dict, List, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.common.losses import CrossEntropyLoss
from nemo.collections.nlp.data.text_classification import TextClassificationDataset, calc_class_weights
diff --git a/nemo/collections/nlp/models/text_normalization_as_tagging/thutmose_tagger.py b/nemo/collections/nlp/models/text_normalization_as_tagging/thutmose_tagger.py
index 4c11dc157b2b..ddcb3a774055 100644
--- a/nemo/collections/nlp/models/text_normalization_as_tagging/thutmose_tagger.py
+++ b/nemo/collections/nlp/models/text_normalization_as_tagging/thutmose_tagger.py
@@ -17,8 +17,8 @@
from typing import Dict, List, Optional
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.common.losses import CrossEntropyLoss
from nemo.collections.nlp.data.text_normalization_as_tagging import (
@@ -289,7 +289,7 @@ def on_test_epoch_end(self):
# Functions for inference
@torch.no_grad()
def _infer(self, sents: List[str]) -> List[List[int]]:
- """ Main function for Inference
+ """Main function for Inference
Args:
sents: A list of input sentences (lowercase spoken-domain words separated by space).
diff --git a/nemo/collections/nlp/models/token_classification/punctuation_capitalization_lexical_audio_model.py b/nemo/collections/nlp/models/token_classification/punctuation_capitalization_lexical_audio_model.py
index 69df9b6ac009..bd42517a5720 100644
--- a/nemo/collections/nlp/models/token_classification/punctuation_capitalization_lexical_audio_model.py
+++ b/nemo/collections/nlp/models/token_classification/punctuation_capitalization_lexical_audio_model.py
@@ -17,8 +17,8 @@
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, open_dict
-from pytorch_lightning import Trainer
from torch.nn import Linear
from tqdm import tqdm
@@ -53,27 +53,27 @@ def update_model_config_to_support_adapter(model_cfg):
class PunctuationCapitalizationLexicalAudioModel(PunctuationCapitalizationModel):
"""
- A model for restoring punctuation and capitalization in text using lexical and audio features.
-
- The model consists of a language model and two multilayer perceptrons (MLP) on top the fusion of LM and AM. The first
- MLP serves for punctuation prediction and the second is for capitalization prediction. You can use only BERT-like
- HuggingFace language models (model ``forward`` method accepts ``input_ids``, ``token_types_ids``,
- ``attention_mask`` arguments). See more about model config options :ref:`here`.
- And any :class:`~nemo.collections.asr.models.EncDecCTCModel` which has encoder module which is used as an AM.
-
- For training and testing use dataset
- :class:`~nemo.collections.nlp.data.token_classification.punctuation_capitalization_dataset.BertPunctuationCapitalizationDataset` with parameter ``use_audio`` set to ``True``,
- for training on huge amounts of data which cannot be loaded into memory simultaneously use
- :class:`~nemo.collections.nlp.data.token_classification.punctuation_capitalization_tarred_dataset.BertPunctuationCapitalizationTarredDataset` with parameter ``use_audio`` set to ``True``.
-
- Args:
- cfg: a model configuration. It should follow dataclass
- :class:`~nemo.collections.nlp.models.token_classification.punctuation_capitalization_config.PunctuationCapitalizationLexicalAudioModelConfig`
- See an example of full config in
- `nemo/examples/nlp/token_classification/conf/punctuation_capitalization_lexical_audio_config.yaml
- `_
- trainer: an instance of a PyTorch Lightning trainer
- """
+ A model for restoring punctuation and capitalization in text using lexical and audio features.
+
+ The model consists of a language model and two multilayer perceptrons (MLP) on top the fusion of LM and AM. The first
+ MLP serves for punctuation prediction and the second is for capitalization prediction. You can use only BERT-like
+ HuggingFace language models (model ``forward`` method accepts ``input_ids``, ``token_types_ids``,
+ ``attention_mask`` arguments). See more about model config options :ref:`here`.
+ And any :class:`~nemo.collections.asr.models.EncDecCTCModel` which has encoder module which is used as an AM.
+
+ For training and testing use dataset
+ :class:`~nemo.collections.nlp.data.token_classification.punctuation_capitalization_dataset.BertPunctuationCapitalizationDataset` with parameter ``use_audio`` set to ``True``,
+ for training on huge amounts of data which cannot be loaded into memory simultaneously use
+ :class:`~nemo.collections.nlp.data.token_classification.punctuation_capitalization_tarred_dataset.BertPunctuationCapitalizationTarredDataset` with parameter ``use_audio`` set to ``True``.
+
+ Args:
+ cfg: a model configuration. It should follow dataclass
+ :class:`~nemo.collections.nlp.models.token_classification.punctuation_capitalization_config.PunctuationCapitalizationLexicalAudioModelConfig`
+ See an example of full config in
+ `nemo/examples/nlp/token_classification/conf/punctuation_capitalization_lexical_audio_config.yaml
+ `_
+ trainer: an instance of a PyTorch Lightning trainer
+ """
def __init__(self, cfg: DictConfig, trainer: Trainer = None) -> None:
super().__init__(cfg, trainer)
@@ -199,31 +199,31 @@ def forward(
features_length: torch.Tensor = None,
) -> Tuple[torch.Tensor, torch.Tensor]:
"""
- Executes a forward pass through the model. For more details see ``forward`` method of :class:`~nemo.collections.nlp.models.token_classification.punctuation_capitalization_config.PunctuationCapitalizationLexicalAudioModelConfig`
- and ``forward`` method of :class:'~nemo.collections.asr.models.EncDecCTCModel'
-
- Args:
- input_ids (:obj:`torch.Tensor`): an integer torch tensor of shape ``[Batch, Time]``. Contains encoded
- source tokens.
- attention_mask (:obj:`torch.Tensor`): a boolean torch tensor of shape ``[Batch, Time]``. Contains an
- attention mask for excluding paddings.
- token_type_ids (:obj:`torch.Tensor`): an integer torch Tensor of shape ``[Batch, Time]``. Contains an index
- of segment to which a token belongs. If ``token_type_ids`` is not ``None``, then it should be a zeros
- tensor.
- features (:obj:`torch.Tensor`): tensor that represents a batch of raw audio signals,
- of shape [B, T]. T here represents timesteps, with 1 second of audio represented as
- sample_rate number of floating point values.
- features_length (:obj:`torch.Tensor`): Vector of length B, that contains the individual lengths of the audio
- sequences.
-
- Returns:
- :obj:`Tuple[torch.Tensor, torch.Tensor]`: a tuple containing
-
- - ``punct_logits`` (:obj:`torch.Tensor`): a float torch tensor of shape
- ``[Batch, Time, NumPunctuationLabels]`` containing punctuation logits
- - ``capit_logits`` (:obj:`torch.Tensor`): a float torch tensor of shape
- ``[Batch, Time, NumCapitalizationLabels]`` containing capitalization logits
- """
+ Executes a forward pass through the model. For more details see ``forward`` method of :class:`~nemo.collections.nlp.models.token_classification.punctuation_capitalization_config.PunctuationCapitalizationLexicalAudioModelConfig`
+ and ``forward`` method of :class:'~nemo.collections.asr.models.EncDecCTCModel'
+
+ Args:
+ input_ids (:obj:`torch.Tensor`): an integer torch tensor of shape ``[Batch, Time]``. Contains encoded
+ source tokens.
+ attention_mask (:obj:`torch.Tensor`): a boolean torch tensor of shape ``[Batch, Time]``. Contains an
+ attention mask for excluding paddings.
+ token_type_ids (:obj:`torch.Tensor`): an integer torch Tensor of shape ``[Batch, Time]``. Contains an index
+ of segment to which a token belongs. If ``token_type_ids`` is not ``None``, then it should be a zeros
+ tensor.
+ features (:obj:`torch.Tensor`): tensor that represents a batch of raw audio signals,
+ of shape [B, T]. T here represents timesteps, with 1 second of audio represented as
+ sample_rate number of floating point values.
+ features_length (:obj:`torch.Tensor`): Vector of length B, that contains the individual lengths of the audio
+ sequences.
+
+ Returns:
+ :obj:`Tuple[torch.Tensor, torch.Tensor]`: a tuple containing
+
+ - ``punct_logits`` (:obj:`torch.Tensor`): a float torch tensor of shape
+ ``[Batch, Time, NumPunctuationLabels]`` containing punctuation logits
+ - ``capit_logits`` (:obj:`torch.Tensor`): a float torch tensor of shape
+ ``[Batch, Time, NumCapitalizationLabels]`` containing capitalization logits
+ """
self.update_max_seq_length(seq_length=features.size(1), device=features.device)
lexical_hidden_states = self.bert_model(
input_ids=input_ids, token_type_ids=token_type_ids, attention_mask=attention_mask
@@ -232,7 +232,8 @@ def forward(
lexical_hidden_states = lexical_hidden_states[0]
processed_signal, processed_signal_length = self.audio_encoder.preprocessor(
- input_signal=features, length=features_length,
+ input_signal=features,
+ length=features_length,
)
if self.audio_encoder.spec_augmentation is not None and self.training:
@@ -301,49 +302,49 @@ def add_punctuation_capitalization(
target_sr: Optional[int] = None,
) -> List[str]:
"""
- Adds punctuation and capitalization to the queries. Use this method for inference.
-
- Parameters ``max_seq_length``, ``step``, ``margin`` are for controlling the way queries are split into segments
- which are processed by the model. Parameter ``max_seq_length`` is a length of a segment after tokenization
- including special tokens [CLS] in the beginning and [SEP] in the end of a segment. Parameter ``step`` is a
- shift between consequent segments. Parameter ``margin`` is used to exclude negative effect of subtokens near
- borders of segments which have only one side context.
-
- If segments overlap, probabilities of overlapping predictions are multiplied and then the label with
- corresponding to the maximum probability is selected.
-
- Args:
- queries (:obj:`List[str]`): lower cased text without punctuation.
- batch_size (:obj:`List[str]`, `optional`): batch size to use during inference. If ``batch_size`` parameter
- is not provided, then it will be equal to length of ``queries`` list.
- max_seq_length (:obj:`int`, `optional`, defaults to :obj:`64`): maximum sequence length of a segment after
- tokenization including :code:`[CLS]` and :code:`[SEP]` tokens.
- step (:obj:`int`, `optional`, defaults to :obj:`8`): relative shift of consequent segments into which long
- queries are split. Long queries are split into segments which can overlap. Parameter ``step`` controls
- such overlapping. Imagine that queries are tokenized into characters, ``max_seq_length=5``, and
- ``step=2``. In such case, query ``"hello"`` is tokenized into segments
- ``[['[CLS]', 'h', 'e', 'l', '[SEP]'], ['[CLS]', 'l', 'l', 'o', '[SEP]']]``.
- margin (:obj:`int`, `optional`, defaults to :obj:`16`): number of subtokens in the beginning and the end of
- segments which are not used for prediction computation. The first segment does not have left margin and
- the last segment does not have right margin. For example, if an input sequence is tokenized into
- characters, ``max_seq_length=5``, ``step=1``, and ``margin=1``, then query ``"hello"`` will be
- tokenized into segments ``[['[CLS]', 'h', 'e', 'l', '[SEP]'], ['[CLS]', 'e', 'l', 'l', '[SEP]'],
- ['[CLS]', 'l', 'l', 'o', '[SEP]']]``. These segments are passed to the model. Before final predictions
- computation, margins are removed. In the next list, subtokens which logits are not used for final
- predictions computation are marked with asterisk: ``[['[CLS]'*, 'h', 'e', 'l'*, '[SEP]'*],
- ['[CLS]'*, 'e'*, 'l', 'l'*, '[SEP]'*], ['[CLS]'*, 'l'*, 'l', 'o', '[SEP]'*]]``.
- return_labels (:obj:`bool`, `optional`, defaults to :obj:`False`): whether to return labels in NeMo format
- (see :ref:`nlp/punctuation_and_capitalization/NeMo Data Format`) instead of queries with restored
- punctuation and capitalization.
- dataloader_kwargs (:obj:`Dict[str, Any]`, `optional`): an optional dictionary with parameters of PyTorch
- data loader. May include keys: ``'num_workers'``, ``'pin_memory'``, ``'worker_init_fn'``,
- ``'prefetch_factor'``, ``'persistent_workers'``.
- audio_queries (:obj:`List[str]`, `optional`): paths to audio files.
- target_sr (:obj:`int`, `optional`): target sample rate for audios.
- Returns:
- :obj:`List[str]`: a list of queries with restored capitalization and punctuation if
- ``return_labels=False``, else a list of punctuation and capitalization labels strings for all queries
- """
+ Adds punctuation and capitalization to the queries. Use this method for inference.
+
+ Parameters ``max_seq_length``, ``step``, ``margin`` are for controlling the way queries are split into segments
+ which are processed by the model. Parameter ``max_seq_length`` is a length of a segment after tokenization
+ including special tokens [CLS] in the beginning and [SEP] in the end of a segment. Parameter ``step`` is a
+ shift between consequent segments. Parameter ``margin`` is used to exclude negative effect of subtokens near
+ borders of segments which have only one side context.
+
+ If segments overlap, probabilities of overlapping predictions are multiplied and then the label with
+ corresponding to the maximum probability is selected.
+
+ Args:
+ queries (:obj:`List[str]`): lower cased text without punctuation.
+ batch_size (:obj:`List[str]`, `optional`): batch size to use during inference. If ``batch_size`` parameter
+ is not provided, then it will be equal to length of ``queries`` list.
+ max_seq_length (:obj:`int`, `optional`, defaults to :obj:`64`): maximum sequence length of a segment after
+ tokenization including :code:`[CLS]` and :code:`[SEP]` tokens.
+ step (:obj:`int`, `optional`, defaults to :obj:`8`): relative shift of consequent segments into which long
+ queries are split. Long queries are split into segments which can overlap. Parameter ``step`` controls
+ such overlapping. Imagine that queries are tokenized into characters, ``max_seq_length=5``, and
+ ``step=2``. In such case, query ``"hello"`` is tokenized into segments
+ ``[['[CLS]', 'h', 'e', 'l', '[SEP]'], ['[CLS]', 'l', 'l', 'o', '[SEP]']]``.
+ margin (:obj:`int`, `optional`, defaults to :obj:`16`): number of subtokens in the beginning and the end of
+ segments which are not used for prediction computation. The first segment does not have left margin and
+ the last segment does not have right margin. For example, if an input sequence is tokenized into
+ characters, ``max_seq_length=5``, ``step=1``, and ``margin=1``, then query ``"hello"`` will be
+ tokenized into segments ``[['[CLS]', 'h', 'e', 'l', '[SEP]'], ['[CLS]', 'e', 'l', 'l', '[SEP]'],
+ ['[CLS]', 'l', 'l', 'o', '[SEP]']]``. These segments are passed to the model. Before final predictions
+ computation, margins are removed. In the next list, subtokens which logits are not used for final
+ predictions computation are marked with asterisk: ``[['[CLS]'*, 'h', 'e', 'l'*, '[SEP]'*],
+ ['[CLS]'*, 'e'*, 'l', 'l'*, '[SEP]'*], ['[CLS]'*, 'l'*, 'l', 'o', '[SEP]'*]]``.
+ return_labels (:obj:`bool`, `optional`, defaults to :obj:`False`): whether to return labels in NeMo format
+ (see :ref:`nlp/punctuation_and_capitalization/NeMo Data Format`) instead of queries with restored
+ punctuation and capitalization.
+ dataloader_kwargs (:obj:`Dict[str, Any]`, `optional`): an optional dictionary with parameters of PyTorch
+ data loader. May include keys: ``'num_workers'``, ``'pin_memory'``, ``'worker_init_fn'``,
+ ``'prefetch_factor'``, ``'persistent_workers'``.
+ audio_queries (:obj:`List[str]`, `optional`): paths to audio files.
+ target_sr (:obj:`int`, `optional`): target sample rate for audios.
+ Returns:
+ :obj:`List[str]`: a list of queries with restored capitalization and punctuation if
+ ``return_labels=False``, else a list of punctuation and capitalization labels strings for all queries
+ """
if len(queries) == 0:
return []
@@ -408,7 +409,9 @@ def add_punctuation_capitalization(
acc_probs[q_i] = b_probs_i
else:
all_preds[q_i], acc_probs[q_i] = self._move_acc_probs_to_token_preds(
- all_preds[q_i], acc_probs[q_i], start_word_id - len(all_preds[q_i]),
+ all_preds[q_i],
+ acc_probs[q_i],
+ start_word_id - len(all_preds[q_i]),
)
acc_probs[q_i] = self._update_accumulated_probabilities(acc_probs[q_i], b_probs_i)
for all_preds, acc_probs in [(all_punct_preds, acc_punct_probs), (all_capit_preds, acc_capit_probs)]:
diff --git a/nemo/collections/nlp/models/token_classification/punctuation_capitalization_model.py b/nemo/collections/nlp/models/token_classification/punctuation_capitalization_model.py
index 6e2d1f5762ec..8cf153dfdf76 100644
--- a/nemo/collections/nlp/models/token_classification/punctuation_capitalization_model.py
+++ b/nemo/collections/nlp/models/token_classification/punctuation_capitalization_model.py
@@ -20,8 +20,8 @@
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from tqdm import tqdm
from nemo.collections.common.losses import AggregatorLoss, CrossEntropyLoss
@@ -812,7 +812,13 @@ def _setup_dataloader_from_config(self, cfg: DictConfig, train: bool) -> torch.u
raise ValueError(
f"If `use_tarred_dataset` is `False`, then you need to provide `tokens_in_batch` parameter."
)
- text_file, labels_file, = Path(cfg.ds_item) / cfg.text_file, Path(cfg.ds_item) / cfg.labels_file
+ (
+ text_file,
+ labels_file,
+ ) = (
+ Path(cfg.ds_item) / cfg.text_file,
+ Path(cfg.ds_item) / cfg.labels_file,
+ )
if cfg.audio_file:
audio_file = Path(cfg.ds_item) / cfg.audio_file
if self.label_ids_are_set:
@@ -1010,7 +1016,8 @@ def _transform_logit_to_prob_and_remove_margins_and_extract_word_probs(
stm = self._remove_margins(stm, margin, keep_left=first, keep_right=last)
for b_probs, logits in [(b_punct_probs, pl), (b_capit_probs, cl)]:
p = torch.nn.functional.softmax(
- self._remove_margins(logits, margin, keep_left=first, keep_right=last)[stm], dim=-1,
+ self._remove_margins(logits, margin, keep_left=first, keep_right=last)[stm],
+ dim=-1,
)
b_probs.append(p.detach().cpu().numpy())
return b_punct_probs, b_capit_probs, new_start_word_ids
@@ -1191,7 +1198,9 @@ def add_punctuation_capitalization(
):
inp_ids, inp_type_ids, inp_mask, subtokens_mask, start_word_ids, query_ids, is_first, is_last = batch
punct_logits, capit_logits = self.forward(
- input_ids=inp_ids.to(d), token_type_ids=inp_type_ids.to(d), attention_mask=inp_mask.to(d),
+ input_ids=inp_ids.to(d),
+ token_type_ids=inp_type_ids.to(d),
+ attention_mask=inp_mask.to(d),
)
_res = self._transform_logit_to_prob_and_remove_margins_and_extract_word_probs(
punct_logits, capit_logits, subtokens_mask, start_word_ids, margin, is_first, is_last
@@ -1208,7 +1217,9 @@ def add_punctuation_capitalization(
acc_probs[q_i] = b_probs_i
else:
all_preds[q_i], acc_probs[q_i] = self._move_acc_probs_to_token_preds(
- all_preds[q_i], acc_probs[q_i], start_word_id - len(all_preds[q_i]),
+ all_preds[q_i],
+ acc_probs[q_i],
+ start_word_id - len(all_preds[q_i]),
)
acc_probs[q_i] = self._update_accumulated_probabilities(acc_probs[q_i], b_probs_i)
for all_preds, acc_probs in [(all_punct_preds, acc_punct_probs), (all_capit_preds, acc_capit_probs)]:
diff --git a/nemo/collections/nlp/models/token_classification/token_classification_model.py b/nemo/collections/nlp/models/token_classification/token_classification_model.py
index 0b465bae663c..99bb2328b956 100644
--- a/nemo/collections/nlp/models/token_classification/token_classification_model.py
+++ b/nemo/collections/nlp/models/token_classification/token_classification_model.py
@@ -16,8 +16,8 @@
from typing import List, Optional, Union
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from nemo.collections.common.losses import CrossEntropyLoss
diff --git a/nemo/collections/nlp/models/zero_shot_intent_recognition/zero_shot_intent_model.py b/nemo/collections/nlp/models/zero_shot_intent_recognition/zero_shot_intent_model.py
index e65f3d7749eb..07e0826c712c 100644
--- a/nemo/collections/nlp/models/zero_shot_intent_recognition/zero_shot_intent_model.py
+++ b/nemo/collections/nlp/models/zero_shot_intent_recognition/zero_shot_intent_model.py
@@ -18,8 +18,8 @@
import numpy as np
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.nlp.data.zero_shot_intent_recognition.zero_shot_intent_dataset import (
ZeroShotIntentDataset,
@@ -155,7 +155,6 @@ def predict(
entailment_idx=1,
contradiction_idx=0,
) -> List[Dict]:
-
"""
Given a list of queries and a list of candidate labels, return a ranked list of labels and scores for each query.
diff --git a/nemo/collections/nlp/modules/common/lm_utils.py b/nemo/collections/nlp/modules/common/lm_utils.py
index af6fc9ecb0a7..86792059b28f 100644
--- a/nemo/collections/nlp/modules/common/lm_utils.py
+++ b/nemo/collections/nlp/modules/common/lm_utils.py
@@ -17,8 +17,8 @@
from typing import List, Optional, Union
from attr import asdict
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.nlp.modules.common.bert_module import BertModule
from nemo.collections.nlp.modules.common.decoder_module import DecoderModule
diff --git a/nemo/collections/nlp/modules/common/megatron/adapters/mcore_mixins.py b/nemo/collections/nlp/modules/common/megatron/adapters/mcore_mixins.py
index 5128b4ca6b16..e306a0a9b6b7 100644
--- a/nemo/collections/nlp/modules/common/megatron/adapters/mcore_mixins.py
+++ b/nemo/collections/nlp/modules/common/megatron/adapters/mcore_mixins.py
@@ -80,11 +80,22 @@ def forward(
context: Tensor = None,
context_mask: Tensor = None,
rotary_pos_emb: Tensor = None,
+ rotary_pos_cos: Tensor = None,
+ rotary_pos_sin: Tensor = None,
+ attention_bias: Tensor = None,
inference_params: InferenceParams = None,
packed_seq_params: PackedSeqParams = None,
):
hidden_states = super().forward(
- hidden_states, attention_mask, context, context_mask, rotary_pos_emb, inference_params, packed_seq_params
+ hidden_states=hidden_states,
+ attention_mask=attention_mask,
+ context=context,
+ context_mask=context_mask,
+ rotary_pos_emb=rotary_pos_emb,
+ rotary_pos_cos=rotary_pos_cos,
+ rotary_pos_sin=rotary_pos_sin,
+ inference_params=inference_params,
+ packed_seq_params=packed_seq_params,
)
mlp_head_adapter = self.get_adapter_module(AdapterName.MLP_HEAD_ADAPTER)
@@ -220,6 +231,9 @@ def forward(
inference_params=None,
rotary_pos_emb=None,
packed_seq_params=None,
+ rotary_pos_cos=None,
+ rotary_pos_sin=None,
+ attention_bias=None,
):
# hidden_states: [sq, b, h]
@@ -237,8 +251,8 @@ def forward(
# ===================================================
# Adjust key, value, and rotary_pos_emb for inference
# ===================================================
- key, value, rotary_pos_emb, attn_mask_type = self._adjust_key_value_for_inference(
- inference_params, key, value, rotary_pos_emb
+ query, key, value, rotary_pos_emb, attn_mask_type = self._adjust_key_value_for_inference(
+ inference_params, query, key, value, rotary_pos_emb
)
if packed_seq_params is not None:
diff --git a/nemo/collections/nlp/modules/common/megatron/adapters/parallel_adapters.py b/nemo/collections/nlp/modules/common/megatron/adapters/parallel_adapters.py
index 042dbb95979e..22f669fb5544 100644
--- a/nemo/collections/nlp/modules/common/megatron/adapters/parallel_adapters.py
+++ b/nemo/collections/nlp/modules/common/megatron/adapters/parallel_adapters.py
@@ -136,6 +136,26 @@ class MLPInfusedAdapterConfig(InfusedAdapterConfig):
_target_: str = "{0}.{1}".format(MLPInfusedAdapter.__module__, MLPInfusedAdapter.__name__)
+def pad_seq_to_mult(x, mult):
+ import torch.nn.functional as F
+
+ if x.shape[0] % mult == 0:
+ return x, 0
+ pad_len = mult - (x.shape[0] % mult)
+ with torch.no_grad():
+ # pad at the tail
+ x = torch.nn.functional.pad(x, (0, 0, 0, pad_len))
+ return x, pad_len
+
+
+def unpad_seq_to_mult(x, pad_len):
+ if pad_len <= 0:
+ return x
+ with torch.no_grad():
+ # prune tail padding
+ return x[:-pad_len, :]
+
+
class ParallelLinearAdapter(nn.Module, AdapterModuleUtil):
def __init__(
self,
@@ -154,6 +174,7 @@ def __init__(
alpha: float | None = None,
dropout_position: str = 'post',
a2a_experimental: bool = False, # TODO: should rename this or make it a default feature
+ is_expert: bool = False,
**kwargs,
):
super().__init__()
@@ -167,6 +188,7 @@ def __init__(
self.input_is_parallel = input_is_parallel
self.dropout_position = dropout_position
self.use_a2a = a2a_experimental
+ self.is_expert = is_expert
# megatron_gpt_peft_models will provide this arg, but deprecated ones do not.
# in case this arg is not provided, use the dummy default config.
@@ -292,6 +314,10 @@ def forward(self, x):
if self.dropout is not None and self.dropout_position == 'pre':
x = self.dropout(x)
+ pad_len = 0
+ if self.is_expert:
+ x, pad_len = pad_seq_to_mult(x, self.config.tensor_model_parallel_size)
+
if self.norm_position == 'pre':
x = self.layer_norm(x)
if self._sequence_parallel and not self.input_is_parallel:
@@ -311,7 +337,7 @@ def forward(self, x):
x.activation_offloading = True
x, _ = self.linear_out(x)
- if self._sequence_parallel and self.input_is_parallel:
+ if self._sequence_parallel and self.input_is_parallel and not self.is_expert:
# for attention_dense and linear_fc2
# layernorm after lora is impacted by sequence parallel,
# hence seq dim need to be scattered right after lora linear layers
@@ -331,6 +357,10 @@ def forward(self, x):
x = x * (self.alpha / self.dim)
+ if pad_len > 0:
+ # Remove MoE padding.
+ x = unpad_seq_to_mult(x, pad_len)
+
return x
def sharded_state_dict(
diff --git a/nemo/collections/nlp/modules/common/megatron/attention.py b/nemo/collections/nlp/modules/common/megatron/attention.py
index c1b4e3023e42..d5784081f6f0 100644
--- a/nemo/collections/nlp/modules/common/megatron/attention.py
+++ b/nemo/collections/nlp/modules/common/megatron/attention.py
@@ -380,6 +380,7 @@ def forward(
rotary_pos_emb=None, # rotary positional embedding
relative_position_bias=None,
checkpoint_core_attention=False,
+ return_scores=False,
):
# hidden_states: [sq, b, h]
@@ -398,7 +399,9 @@ def forward(
# Some consistency check.
if inference_max_sequence_len:
- assert self.inference_current_sequence_len < self.inference_key_memory.size(0)
+ # Added equals to as inference key_memory size refers to cross-attention key size
+ # which is already equal to the current "sequence length"
+ assert self.inference_current_sequence_len <= self.inference_key_memory.size(0)
assert inference_max_sequence_len == self.inference_key_memory.size(0)
# This is added for safety. In case inference_max_sequence_len
# is not provided, make sure there is no potential memory left
@@ -433,28 +436,40 @@ def forward(
(query_layer, key_layer, value_layer) = tensor_parallel.split_tensor_along_last_dim(
mixed_x_layer, 3, contiguous_split_chunks=True
)
- else:
+ else: # Else in cross_attention
# Attention heads [sk, b, h] --> [sk, b, (np * 2 * hn)]
- mixed_kv_layer, _ = self.key_value(encoder_output)
- if self.is_adapter_available():
- lora_kv_adapter = self.get_adapter_module(AdapterName.LORA_KV_ADAPTER)
- if lora_kv_adapter and self.adapter_cfg[AdapterName.LORA_KV_ADAPTER]['enabled']:
- lora_mixed_kv_layer = lora_kv_adapter(encoder_output)
- mixed_kv_layer = mixed_kv_layer + lora_mixed_kv_layer
-
- # [sk, b, (np * 2 * hn)] --> [sk, b, np, 2 * hn]
- new_tensor_shape = mixed_kv_layer.size()[:-1] + (
- self.num_attention_heads_per_partition,
- 2 * self.hidden_size_per_attention_head,
- )
- if self.megatron_legacy:
- mixed_kv_layer = self._transpose_last_dim(mixed_kv_layer, 2, True)
- mixed_kv_layer = mixed_kv_layer.view(*new_tensor_shape)
+ if (
+ inference_max_sequence_len is None
+ ) or self.inference_current_sequence_len < inference_max_sequence_len:
+ # If we are in traning and inference_max_sequence_len is None
+ # Or we haven't cached the key and value part of cross attention in the decoder on step 0,
+ # Do the caching
+ mixed_kv_layer, _ = self.key_value(encoder_output)
+ if self.is_adapter_available():
+ lora_kv_adapter = self.get_adapter_module(AdapterName.LORA_KV_ADAPTER)
+ if lora_kv_adapter and self.adapter_cfg[AdapterName.LORA_KV_ADAPTER]['enabled']:
+ lora_mixed_kv_layer = lora_kv_adapter(encoder_output)
+ mixed_kv_layer = mixed_kv_layer + lora_mixed_kv_layer
+
+ # [sk, b, (np * 2 * hn)] --> [sk, b, np, 2 * hn]
+ new_tensor_shape = mixed_kv_layer.size()[:-1] + (
+ self.num_attention_heads_per_partition,
+ 2 * self.hidden_size_per_attention_head,
+ )
+ if self.megatron_legacy:
+ mixed_kv_layer = self._transpose_last_dim(mixed_kv_layer, 2, True)
+ mixed_kv_layer = mixed_kv_layer.view(*new_tensor_shape)
- # [sk, b, np, 2 * hn] --> 2 [sk, b, np, hn]
- (key_layer, value_layer) = tensor_parallel.split_tensor_along_last_dim(
- mixed_kv_layer, 2, contiguous_split_chunks=True
- )
+ # [sk, b, np, 2 * hn] --> 2 [sk, b, np, hn]
+ (key_layer, value_layer) = tensor_parallel.split_tensor_along_last_dim(
+ mixed_kv_layer, 2, contiguous_split_chunks=True
+ )
+ else:
+ # else if we are in inference and have already cached key, value, can just read cache
+ key_layer = self.inference_key_memory[: self.inference_current_sequence_len, ...]
+ value_layer = self.inference_value_memory[: self.inference_current_sequence_len, ...]
+ if attention_mask is not None:
+ attention_mask = attention_mask[..., -1, :].unsqueeze(-2)
# Attention head [sq, b, h] --> [sq, b, hp]
query_layer, _ = self.query(hidden_states)
@@ -490,7 +505,9 @@ def forward(
if rotary_pos_emb is not None:
rotary_pos_emb = rotary_pos_emb if isinstance(rotary_pos_emb, tuple) else ((rotary_pos_emb,) * 2)
- if inference_max_sequence_len:
+ # If we are in cross attention (inference_current_sequence_len == inference_max_sequence_len == inference_key_memory.size(0))
+ # We only need to cache this once
+ if inference_max_sequence_len and self.inference_current_sequence_len < inference_max_sequence_len:
# Adjust the range variables.
start = self.inference_current_sequence_len
self.inference_current_sequence_len += key_layer.size(0)
@@ -501,7 +518,7 @@ def forward(
key_layer = self.inference_key_memory[:end, ...]
value_layer = self.inference_value_memory[:end, ...]
# Adjust attention mask
- if attention_mask is not None:
+ if attention_mask is not None and self.attention_type == AttnType.self_attn:
attention_mask = attention_mask[..., start:end, :end]
# adjust the key rotary positional embedding
if rotary_pos_emb is not None:
@@ -569,7 +586,10 @@ def forward(
relative_position_bias=relative_position_bias,
headscale_tensor=self.head_scale_tensor if self.headscale else None,
inference_mode=inference_max_sequence_len is not None and query_layer.shape[0] == 1,
+ return_scores=return_scores,
)
+ if return_scores:
+ context_layer, attention_probs = context_layer
# =================
# Output. [sq, b, h]
@@ -585,6 +605,9 @@ def forward(
if get_key_value:
output = [output, present]
+ if return_scores:
+ output = [output, attention_probs]
+
return output, bias
@@ -857,6 +880,7 @@ def forward(
relative_position_bias=None,
headscale_tensor=None,
inference_mode=None,
+ return_scores=None,
):
b, np, sq, sk, hn = (
query_layer.size(1),
@@ -914,9 +938,27 @@ def forward(
# relative_position_bias [b, np, sq, sk]
# context_layer [b, np, sq, hn]
# ==================================================
- context_layer = self.attn_fn(
- query_layer, key_layer, value_layer, attention_mask, relative_position_bias, inference_mode
- )
+ if not return_scores:
+ context_layer = self.attn_fn(
+ query_layer,
+ key_layer,
+ value_layer,
+ attention_mask,
+ relative_position_bias,
+ inference_mode,
+ )
+ else:
+ # SpeechLLM TTS modifications
+ context_layer = self.torch_attention_with_prior(
+ query_layer,
+ key_layer,
+ value_layer,
+ attention_mask,
+ relative_position_bias,
+ inference_mode,
+ return_scores=return_scores,
+ )
+ context_layer, attention_probs = context_layer
if headscale_tensor is not None:
context_layer = context_layer * headscale_tensor
@@ -928,7 +970,10 @@ def forward(
new_context_layer_shape = context_layer.size()[:-2] + (self.hidden_size_per_partition,)
context_layer = context_layer.view(*new_context_layer_shape)
- return context_layer
+ if return_scores:
+ return context_layer, attention_probs
+ else:
+ return context_layer
def torch_attention(self, query_layer, key_layer, value_layer, attention_mask, attention_bias, inference_mode):
sq, b, np, hn = query_layer.shape
@@ -986,6 +1031,69 @@ def torch_attention(self, query_layer, key_layer, value_layer, attention_mask, a
return context_layer
+ def torch_attention_with_prior(
+ self, query_layer, key_layer, value_layer, attention_mask, attention_bias, inference_mode, return_scores=False
+ ):
+ sq, b, np, hn = query_layer.shape
+ sk = key_layer.shape[0]
+
+ if self.multi_query_attention:
+ query_layer = rearrange(query_layer, 'sq b np hn -> b (np sq) hn')
+ key_layer = rearrange(key_layer, 'sk b 1 hn -> b hn sk')
+ value_layer = rearrange(value_layer, 'sv b np hn -> (b np) sv hn')
+ else:
+ query_layer = rearrange(query_layer, 'sq b np hn -> (b np) sq hn')
+ key_layer = rearrange(key_layer, 'sk b np hn -> (b np) hn sk')
+ value_layer = rearrange(value_layer, 'sv b np hn -> (b np) sv hn')
+
+ matmul_input_buffer = torch.empty(
+ query_layer.shape[0],
+ query_layer.shape[1],
+ key_layer.shape[2],
+ dtype=query_layer.dtype,
+ device=query_layer.device,
+ )
+
+ matmul_result = torch.baddbmm(
+ matmul_input_buffer,
+ query_layer,
+ key_layer,
+ beta=0.0,
+ alpha=(1.0 / self.norm_factor) if self.normalize_attention_scores else 1.0,
+ )
+
+ # change view to [b, np, sq, sk]
+ attention_scores = matmul_result.view(b, np, sq, sk)
+
+ if attention_bias is not None:
+ # attention_bias is not None only for cross attention layers right now in T5
+ attention_scores = torch.log_softmax(attention_scores, dim=-1) + attention_bias
+
+ _attention_probs = self.scale_mask_softmax(attention_scores, attention_mask)
+ # This is actually dropping out entire tokens to attend to, which might
+ # seem a bit unusual, but is taken from the original Transformer paper.
+
+ if not self.sequence_parallel:
+ with tensor_parallel.random.get_cuda_rng_tracker().fork():
+ attention_probs = self.attention_dropout(_attention_probs)
+ else:
+ attention_probs = self.attention_dropout(_attention_probs)
+
+ # change view [b * np, sq, sk]
+ attention_probs = rearrange(attention_probs, 'b np sq sk -> (b np) sq sk')
+
+ # matmul: [b * np, sq, hn]
+ context_layer = torch.bmm(attention_probs, value_layer)
+
+ # change view [b, np, sq, hn]
+ context_layer = rearrange(context_layer, '(b np) sq hn -> b np sq hn', np=np)
+
+ if return_scores:
+ # return context_layer, _attention_probs
+ return context_layer, attention_scores
+ else:
+ return context_layer
+
def flash_attention(self, query_layer, key_layer, value_layer, attention_mask, attention_bias, inference_mode):
query_layer = rearrange(query_layer, 'sq b np hn -> b sq np hn')
key_layer = rearrange(key_layer, 'sk b np hn -> b sk np hn')
diff --git a/nemo/collections/nlp/modules/common/megatron/megatron_decoders.py b/nemo/collections/nlp/modules/common/megatron/megatron_decoders.py
index 712ce10b81b5..d2945a061584 100644
--- a/nemo/collections/nlp/modules/common/megatron/megatron_decoders.py
+++ b/nemo/collections/nlp/modules/common/megatron/megatron_decoders.py
@@ -13,7 +13,7 @@
# limitations under the License.
"""Transformer based language model."""
-from ast import Mod
+from nemo.collections.nlp.modules.common.megatron.layer_type import LayerType
from nemo.collections.nlp.modules.common.megatron.megatron_transformer_decoder import MegatronTransformerDecoderModule
from nemo.collections.nlp.modules.common.megatron.retrieval_transformer import (
MegatronRetrievalTransformerDecoderModule,
@@ -87,7 +87,7 @@ def get_decoder_model(
transformer_block_type="pre_ln",
hidden_steps=-1,
parent_model_type=ModelType.encoder_or_decoder,
- layer_type=None,
+ layer_type=LayerType.decoder,
chunk_size=64,
layer_number_offset=0, # this is use only for attention norm_factor scaling
megatron_legacy=False,
@@ -158,6 +158,7 @@ def get_decoder_model(
moe_dropout=moe_dropout,
position_embedding_type=position_embedding_type,
use_flash_attention=use_flash_attention,
+ layer_type=layer_type,
)
elif arch == "retro":
decoder = MegatronRetrievalTransformerDecoderModule(
diff --git a/nemo/collections/nlp/modules/common/megatron/megatron_encoder_decoder.py b/nemo/collections/nlp/modules/common/megatron/megatron_encoder_decoder.py
index c4192dacb45a..744a6e18c8b1 100644
--- a/nemo/collections/nlp/modules/common/megatron/megatron_encoder_decoder.py
+++ b/nemo/collections/nlp/modules/common/megatron/megatron_encoder_decoder.py
@@ -13,7 +13,6 @@
# limitations under the License.
"""Transformer based language model."""
-from ast import Mod
import torch
@@ -46,8 +45,7 @@
class MegatronTransformerEncoderDecoderModule(MegatronModule):
- """Transformer encoder-decoder model.
- """
+ """Transformer encoder-decoder model."""
def __init__(
self,
@@ -85,6 +83,8 @@ def __init__(
encoder_attn_mask_type = AttnMaskType.padding
elif hasattr(encoder.model, 'self_attn_mask_type'):
encoder_attn_mask_type = encoder.model.self_attn_mask_type
+ elif isinstance(encoder.model, torch.nn.ModuleList) and hasattr(encoder.model[0], 'self_attn_mask_type'):
+ encoder_attn_mask_type = encoder.model[0].self_attn_mask_type
else:
raise AttributeError(
"Could not find an attribute for encoder self_attn_mask_type, make sure it is set when instatiating the encoder or pass it to the constructor of this class."
@@ -142,7 +142,11 @@ def encode(
# apply hidden transformations if needed
if self.hiddens_module is not None:
enc_output = self.hiddens_module.apply_hidden_transforms(
- {"hiddens": enc_output, "hiddens_mask": self.get_hiddens_mask(enc_attn_mask),}, batch_data=batch_data,
+ {
+ "hiddens": enc_output,
+ "hiddens_mask": self.get_hiddens_mask(enc_attn_mask),
+ },
+ batch_data=batch_data,
)
return enc_output
@@ -157,6 +161,11 @@ def decode(
dec_get_key_value=False,
dec_self_attention_relative_position_bias=None,
dec_cross_attention_relative_position_bias=None,
+ return_all_crossattention_probs=False,
+ set_inference_key_value_memory=False,
+ decoder_max_sequence_len=None,
+ encoder_max_sequence_len=None,
+ enc_output_to_layers=None,
):
if self.decoder is None:
raise ValueError(f"Cannot call .decode(...) when self.decoder is None.")
@@ -170,6 +179,11 @@ def decode(
enc_attn_mask=enc_attn_mask,
dec_self_attention_relative_position_bias=dec_self_attention_relative_position_bias,
dec_cross_attention_relative_position_bias=dec_cross_attention_relative_position_bias,
+ return_all_crossattention_probs=return_all_crossattention_probs,
+ set_inference_key_value_memory=set_inference_key_value_memory,
+ decoder_max_sequence_len=decoder_max_sequence_len,
+ encoder_max_sequence_len=encoder_max_sequence_len,
+ enc_output_to_layers=enc_output_to_layers,
)
return dec_output
@@ -191,6 +205,11 @@ def forward(
dec_self_attention_relative_position_bias=None,
dec_cross_attention_relative_position_bias=None,
batch_data=None,
+ return_all_crossattention_probs=False,
+ set_inference_key_value_memory=False,
+ decoder_max_sequence_len=None,
+ encoder_max_sequence_len=None,
+ enc_output_to_layers=None,
):
# encoder
if enc_output is None:
@@ -207,7 +226,10 @@ def forward(
assert self.encoder_hidden_state is not None
enc_output = self.encoder_hidden_state
else:
- enc_attn_mask = enc_output_attn_mask.to(enc_attn_mask)
+ if isinstance(enc_output_attn_mask, list):
+ enc_attn_mask = [mask.to(enc_attn_mask[midx]) for midx, mask in enumerate(enc_output_attn_mask)]
+ else:
+ enc_attn_mask = enc_output_attn_mask.to(enc_attn_mask)
if self.decoder is None or output_enc_hidden_only:
return enc_output
@@ -216,15 +238,22 @@ def forward(
dec_output = self.decode(
dec_input=dec_input,
dec_attn_mask=dec_attn_mask,
- enc_output=enc_output["enc_output"] # enc_output is a dict if we used hidden transformations
- if self.hiddens_module is not None
- else enc_output,
+ enc_output=(
+ enc_output["enc_output"] # enc_output is a dict if we used hidden transformations
+ if self.hiddens_module is not None
+ else enc_output
+ ),
# Adjust encoder attention mask if encoder is a perceiver.
enc_attn_mask=self.get_hiddens_mask(enc_attn_mask),
dec_layer_past=dec_layer_past,
dec_get_key_value=dec_get_key_value,
dec_self_attention_relative_position_bias=dec_self_attention_relative_position_bias,
dec_cross_attention_relative_position_bias=dec_cross_attention_relative_position_bias,
+ return_all_crossattention_probs=return_all_crossattention_probs,
+ set_inference_key_value_memory=set_inference_key_value_memory,
+ decoder_max_sequence_len=decoder_max_sequence_len,
+ encoder_max_sequence_len=encoder_max_sequence_len,
+ enc_output_to_layers=enc_output_to_layers,
)
# if self.hiddens_module is not None enc_output is a dict, else it is a torch.tensor
@@ -246,7 +275,10 @@ def state_dict_for_save_checkpoint(self, destination=None, prefix='', keep_vars=
def load_state_dict(self, state_dict, strict=True):
"""Customized load."""
- self.encoder.load_state_dict(state_dict[self._encoder_key], strict=strict)
- self.decoder.load_state_dict(state_dict[self._decoder_key], strict=strict)
- if self.hiddens_module is not None:
- self.hiddens_module.load_state_dict(state_dict[self._hiddens_module], strict=strict)
+ try:
+ self.encoder.load_state_dict(state_dict[self._encoder_key], strict=strict)
+ self.decoder.load_state_dict(state_dict[self._decoder_key], strict=strict)
+ if self.hiddens_module is not None:
+ self.hiddens_module.load_state_dict(state_dict[self._hiddens_module], strict=strict)
+ except KeyError as e:
+ super().load_state_dict(state_dict, strict=strict)
diff --git a/nemo/collections/nlp/modules/common/megatron/megatron_encoders.py b/nemo/collections/nlp/modules/common/megatron/megatron_encoders.py
index 601eb320e8fc..3d2b2c1ecc13 100644
--- a/nemo/collections/nlp/modules/common/megatron/megatron_encoders.py
+++ b/nemo/collections/nlp/modules/common/megatron/megatron_encoders.py
@@ -14,7 +14,10 @@
"""Transformer based language model."""
from nemo.collections.nlp.modules.common.megatron.megatron_perceiver_encoders import MegatronPerceiverEncoderModule
-from nemo.collections.nlp.modules.common.megatron.megatron_transformer_encoder import MegatronTransformerEncoderModule
+from nemo.collections.nlp.modules.common.megatron.megatron_transformer_encoder import (
+ MegatronTransformerEncoderModule,
+ MultiMegatronTransformerEncoderModule,
+)
from nemo.collections.nlp.modules.common.megatron.retrieval_transformer import (
MegatronRetrievalTransformerEncoderModule,
)
@@ -108,6 +111,7 @@ def get_encoder_model(
version=1, # model version
position_embedding_type='learned_absolute',
use_flash_attention=False,
+ n_transformers=1,
):
"""Build language model and return along with the key to save."""
@@ -167,6 +171,51 @@ def get_encoder_model(
position_embedding_type=position_embedding_type,
use_flash_attention=use_flash_attention,
)
+ elif arch == "multi_transformer":
+ encoder = MultiMegatronTransformerEncoderModule(
+ config=config,
+ n_transformers=n_transformers,
+ init_method=init_method,
+ output_layer_init_method=scaled_init_method,
+ hidden_size=hidden_size,
+ num_layers=num_layers,
+ num_attention_heads=num_attention_heads,
+ apply_query_key_layer_scaling=apply_query_key_layer_scaling,
+ kv_channels=kv_channels,
+ ffn_hidden_size=ffn_hidden_size,
+ encoder_attn_mask_type=encoder_attn_mask_type,
+ pre_process=pre_process,
+ post_process=post_process,
+ hidden_dropout=hidden_dropout,
+ attention_dropout=attention_dropout,
+ ffn_dropout=ffn_dropout,
+ precision=precision,
+ fp32_residual_connection=fp32_residual_connection,
+ activations_checkpoint_method=activations_checkpoint_method,
+ activations_checkpoint_num_layers=activations_checkpoint_num_layers,
+ activations_checkpoint_granularity=activations_checkpoint_granularity,
+ layernorm_epsilon=layernorm_epsilon,
+ bias_activation_fusion=bias_activation_fusion,
+ bias_dropout_add_fusion=bias_dropout_add_fusion,
+ masked_softmax_fusion=masked_softmax_fusion,
+ persist_layer_norm=persist_layer_norm,
+ openai_gelu=openai_gelu,
+ onnx_safe=onnx_safe,
+ activation=activation,
+ bias=bias,
+ normalization=normalization,
+ transformer_block_type=transformer_block_type,
+ headscale=headscale,
+ parent_model_type=parent_model_type,
+ megatron_legacy=megatron_legacy,
+ normalize_attention_scores=normalize_attention_scores,
+ num_moe_experts=num_moe_experts,
+ moe_frequency=moe_frequency,
+ moe_dropout=moe_dropout,
+ position_embedding_type=position_embedding_type,
+ use_flash_attention=use_flash_attention,
+ )
+
elif arch == "retro":
encoder = MegatronRetrievalTransformerEncoderModule(
config=config,
diff --git a/nemo/collections/nlp/modules/common/megatron/megatron_transformer_decoder.py b/nemo/collections/nlp/modules/common/megatron/megatron_transformer_decoder.py
index 4a05a08820e7..14677552492b 100644
--- a/nemo/collections/nlp/modules/common/megatron/megatron_transformer_decoder.py
+++ b/nemo/collections/nlp/modules/common/megatron/megatron_transformer_decoder.py
@@ -52,8 +52,7 @@
class MegatronTransformerDecoderModule(MegatronModule, Exportable, MegatronDecoderModule):
- """Transformer decoder model.
- """
+ """Transformer decoder model."""
def __init__(
self,
@@ -97,6 +96,7 @@ def __init__(
moe_dropout=0.0,
position_embedding_type='learned_absolute',
use_flash_attention=False,
+ layer_type=LayerType.decoder,
):
super(MegatronTransformerDecoderModule, self).__init__(config=config)
@@ -121,7 +121,7 @@ def __init__(
# Transformer.
self.model = ParallelTransformer(
config=config,
- layer_type=LayerType.decoder,
+ layer_type=layer_type,
init_method=self.init_method,
output_layer_init_method=self.output_layer_init_method,
num_layers=self.num_layers,
@@ -165,7 +165,7 @@ def __init__(
self._model_key = 'model'
def set_input_tensor(self, input_tensor):
- """ See megatron.model.transformer.set_input_tensor()"""
+ """See megatron.model.transformer.set_input_tensor()"""
self.model.set_input_tensor(input_tensor)
def forward(
@@ -178,15 +178,41 @@ def forward(
get_key_value=False,
dec_self_attention_relative_position_bias=None,
dec_cross_attention_relative_position_bias=None,
+ return_all_crossattention_probs=False,
+ set_inference_key_value_memory=False,
+ decoder_max_sequence_len=None,
+ encoder_max_sequence_len=None,
+ enc_output_to_layers=None,
):
# convert to Megatron mask
dec_attn_mask_3d = build_attention_mask_3d(
- source_mask=dec_attn_mask, target_mask=dec_attn_mask, attn_mask_type=self.model_attn_mask_type,
- )
- enc_dec_attn_mask_3d = build_attention_mask_3d(
- source_mask=dec_attn_mask, target_mask=enc_attn_mask, attn_mask_type=AttnMaskType.padding,
+ source_mask=dec_attn_mask,
+ target_mask=dec_attn_mask,
+ attn_mask_type=self.model_attn_mask_type,
)
+ if isinstance(enc_output, list):
+ assert len(enc_output) == len(enc_attn_mask)
+ enc_dec_attn_mask_3d = []
+ for i in range(len(enc_output)):
+ enc_dec_attn_mask_3d.append(
+ attn_mask_postprocess(
+ build_attention_mask_3d(
+ source_mask=dec_attn_mask,
+ target_mask=enc_attn_mask[i],
+ attn_mask_type=AttnMaskType.padding,
+ )
+ )
+ )
+ else:
+ enc_dec_attn_mask_3d = attn_mask_postprocess(
+ build_attention_mask_3d(
+ source_mask=dec_attn_mask,
+ target_mask=enc_attn_mask,
+ attn_mask_type=AttnMaskType.padding,
+ )
+ )
+
# transformer decoder
dec_output = self.model(
dec_input,
@@ -194,9 +220,14 @@ def forward(
layer_past=layer_past,
get_key_value=get_key_value,
encoder_output=enc_output,
- enc_dec_attn_mask=attn_mask_postprocess(enc_dec_attn_mask_3d),
+ enc_dec_attn_mask=enc_dec_attn_mask_3d,
self_attention_relative_position_bias=dec_self_attention_relative_position_bias,
cross_attention_relative_position_bias=dec_cross_attention_relative_position_bias,
+ return_all_crossattention_probs=return_all_crossattention_probs,
+ set_inference_key_value_memory=set_inference_key_value_memory,
+ decoder_max_sequence_len=decoder_max_sequence_len,
+ encoder_max_sequence_len=encoder_max_sequence_len,
+ enc_output_to_layers=enc_output_to_layers,
)
return dec_output
diff --git a/nemo/collections/nlp/modules/common/megatron/megatron_transformer_encoder.py b/nemo/collections/nlp/modules/common/megatron/megatron_transformer_encoder.py
index 7a41e1300066..a9b80868558f 100644
--- a/nemo/collections/nlp/modules/common/megatron/megatron_transformer_encoder.py
+++ b/nemo/collections/nlp/modules/common/megatron/megatron_transformer_encoder.py
@@ -13,6 +13,8 @@
# limitations under the License.
"""Transformer based language model."""
+import torch
+
from nemo.collections.nlp.modules.common.megatron.layer_type import LayerType
from nemo.collections.nlp.modules.common.megatron.megatron_encoder_module import MegatronEncoderModule
from nemo.collections.nlp.modules.common.megatron.module import MegatronModule
@@ -163,7 +165,7 @@ def __init__(
self._model_key = 'model'
def set_input_tensor(self, input_tensor):
- """ See megatron.model.transformer.set_input_tensor()"""
+ """See megatron.model.transformer.set_input_tensor()"""
self.model.set_input_tensor(input_tensor)
def forward(
@@ -173,6 +175,7 @@ def forward(
layer_past=None,
get_key_value=False,
enc_self_attention_relative_position_bias=None,
+ set_inference_key_value_memory=False,
):
# convert to Megatron mask
if self.use_flash_attention:
@@ -180,7 +183,9 @@ def forward(
else:
enc_attn_mask_3d = attn_mask_postprocess(
build_attention_mask_3d(
- source_mask=enc_attn_mask, target_mask=enc_attn_mask, attn_mask_type=self.model_attn_mask_type,
+ source_mask=enc_attn_mask,
+ target_mask=enc_attn_mask,
+ attn_mask_type=self.model_attn_mask_type,
)
)
@@ -192,6 +197,7 @@ def forward(
get_key_value=get_key_value,
self_attention_relative_position_bias=enc_self_attention_relative_position_bias,
cross_attention_relative_position_bias=None,
+ set_inference_key_value_memory=set_inference_key_value_memory,
)
return enc_output
@@ -231,3 +237,214 @@ def load_state_dict(self, state_dict, strict=True):
state_dict_ = state_dict_self_attention
self.model.load_state_dict(state_dict_, strict=strict)
+
+
+class MultiMegatronTransformerEncoderModule(MegatronModule, Exportable, MegatronEncoderModule):
+ """Transformer encoder model."""
+
+ def __init__(
+ self,
+ config: ModelParallelConfig,
+ n_transformers,
+ init_method,
+ output_layer_init_method,
+ hidden_size,
+ ffn_hidden_size,
+ num_layers,
+ num_attention_heads,
+ apply_query_key_layer_scaling=True,
+ kv_channels=None,
+ pre_process=True,
+ post_process=True,
+ encoder_attn_mask_type=AttnMaskType.padding,
+ hidden_dropout=0.1,
+ attention_dropout=0.1,
+ ffn_dropout=0.0,
+ precision=16,
+ fp32_residual_connection=False,
+ activations_checkpoint_method=None,
+ activations_checkpoint_num_layers=1,
+ activations_checkpoint_granularity=None,
+ layernorm_epsilon=1e-5,
+ bias_activation_fusion=True,
+ bias_dropout_add_fusion=True,
+ masked_softmax_fusion=True,
+ persist_layer_norm=False,
+ openai_gelu=False,
+ onnx_safe=False,
+ activation='gelu',
+ bias=True,
+ normalization='layernorm',
+ transformer_block_type='pre_ln',
+ headscale=False,
+ parent_model_type=ModelType.encoder_or_decoder,
+ megatron_legacy=False,
+ normalize_attention_scores=True,
+ num_moe_experts=1,
+ moe_frequency=1,
+ moe_dropout=0.0,
+ position_embedding_type='learned_absolute',
+ use_flash_attention=False,
+ ):
+ super(MultiMegatronTransformerEncoderModule, self).__init__(config=config)
+
+ self.pre_process = pre_process
+ self.post_process = post_process
+ self.hidden_size = hidden_size
+ self.num_layers = num_layers
+ self.init_method = init_method
+ self.model_attn_mask_type = encoder_attn_mask_type
+ self.hidden_dropout = hidden_dropout
+ self.output_layer_init_method = output_layer_init_method
+ self.parent_model_type = parent_model_type
+ self.normalization = normalization
+ self.transformer_block_type = transformer_block_type
+ self.use_flash_attention = use_flash_attention
+
+ if kv_channels is None:
+
+ assert (
+ hidden_size % num_attention_heads == 0
+ ), 'hidden_size must be divisible by num_attention_heads if kv_channels is None'
+ kv_channels = hidden_size // num_attention_heads
+
+ # Transformer List
+ self.model = []
+ for i in range(n_transformers):
+ transformer = ParallelTransformer(
+ config=config,
+ layer_type=LayerType.encoder,
+ init_method=self.init_method,
+ output_layer_init_method=self.output_layer_init_method,
+ num_layers=self.num_layers,
+ hidden_size=self.hidden_size,
+ num_attention_heads=num_attention_heads,
+ apply_query_key_layer_scaling=apply_query_key_layer_scaling,
+ kv_channels=kv_channels,
+ ffn_hidden_size=ffn_hidden_size,
+ self_attn_mask_type=self.model_attn_mask_type,
+ pre_process=self.pre_process,
+ post_process=self.post_process,
+ precision=precision,
+ fp32_residual_connection=fp32_residual_connection,
+ activations_checkpoint_method=activations_checkpoint_method,
+ activations_checkpoint_num_layers=activations_checkpoint_num_layers,
+ activations_checkpoint_granularity=activations_checkpoint_granularity,
+ layernorm_epsilon=layernorm_epsilon,
+ hidden_dropout=hidden_dropout,
+ attention_dropout=attention_dropout,
+ ffn_dropout=ffn_dropout,
+ bias_activation_fusion=bias_activation_fusion,
+ bias_dropout_add_fusion=bias_dropout_add_fusion,
+ masked_softmax_fusion=masked_softmax_fusion,
+ persist_layer_norm=persist_layer_norm,
+ openai_gelu=openai_gelu,
+ onnx_safe=onnx_safe,
+ activation=activation,
+ bias=bias,
+ normalization=normalization,
+ transformer_block_type=transformer_block_type,
+ headscale=headscale,
+ model_type=parent_model_type,
+ megatron_legacy=megatron_legacy,
+ normalize_attention_scores=normalize_attention_scores,
+ num_moe_experts=num_moe_experts,
+ moe_frequency=moe_frequency,
+ moe_dropout=moe_dropout,
+ position_embedding_type=position_embedding_type,
+ use_flash_attention=use_flash_attention,
+ )
+ self.model.append(transformer)
+
+ self.model = torch.nn.ModuleList(self.model)
+
+ self._model_key = 'model'
+
+ def set_input_tensor(self, input_tensor):
+ """See megatron.model.transformer.set_input_tensor()"""
+ for mi in range(len(self.model)):
+ self.model[mi].set_input_tensor(input_tensor)
+
+ def forward(
+ self,
+ enc_input,
+ enc_attn_mask,
+ layer_past=None,
+ get_key_value=False,
+ enc_self_attention_relative_position_bias=None,
+ set_inference_key_value_memory=False,
+ ):
+
+ assert isinstance(enc_input, list)
+ assert len(enc_input) == len(self.model)
+ assert isinstance(enc_attn_mask, list)
+ assert len(enc_attn_mask) == len(self.model)
+ assert isinstance(enc_self_attention_relative_position_bias, list)
+ # convert to Megatron mask
+ enc_outputs = []
+ for encoder_number in range(len(self.model)):
+ enc_input_ = enc_input[encoder_number]
+ enc_attn_mask_ = enc_attn_mask[encoder_number]
+ enc_self_attention_relative_position_bias_ = enc_self_attention_relative_position_bias[encoder_number]
+
+ if self.use_flash_attention:
+ enc_attn_mask_3d = enc_attn_mask_ < 0.5
+ else:
+ enc_attn_mask_3d = attn_mask_postprocess(
+ build_attention_mask_3d(
+ source_mask=enc_attn_mask_,
+ target_mask=enc_attn_mask_,
+ attn_mask_type=self.model_attn_mask_type,
+ )
+ )
+
+ # transformer encoder
+ enc_output = self.model[encoder_number](
+ enc_input_,
+ enc_attn_mask_3d,
+ layer_past=layer_past,
+ get_key_value=get_key_value,
+ self_attention_relative_position_bias=enc_self_attention_relative_position_bias_,
+ cross_attention_relative_position_bias=None,
+ set_inference_key_value_memory=set_inference_key_value_memory,
+ )
+
+ enc_outputs.append(enc_output)
+
+ return enc_outputs
+
+ def state_dict_for_save_checkpoint(self, destination=None, prefix='', keep_vars=False):
+ """For easy load."""
+
+ state_dict_ = {}
+
+ state_dict_[self._model_key] = self.model.state_dict_for_save_checkpoint(destination, prefix, keep_vars)
+
+ return state_dict_
+
+ def load_state_dict(self, state_dict, strict=True):
+ """Customized load."""
+
+ # Encoder.
+ if self._model_key in state_dict:
+ state_dict_ = state_dict[self._model_key]
+ # for backward compatibility.
+ elif 'transformer' in state_dict:
+ state_dict_ = state_dict['transformer']
+ else:
+ # for backward compatibility.
+ state_dict_ = {}
+ for key in state_dict.keys():
+ if 'transformer.' in key:
+ state_dict_[key.split('transformer.')[1]] = state_dict[key]
+
+ # for backward compatibility.
+ state_dict_self_attention = {}
+ for key in state_dict_.keys():
+ if '.attention.' in key:
+ state_dict_self_attention[key.replace(".attention.", ".self_attention.")] = state_dict_[key]
+ else:
+ state_dict_self_attention[key] = state_dict_[key]
+ state_dict_ = state_dict_self_attention
+
+ self.model.load_state_dict(state_dict_, strict=strict)
diff --git a/nemo/collections/nlp/modules/common/megatron/module.py b/nemo/collections/nlp/modules/common/megatron/module.py
index ccd485427c3c..a4efb2992166 100644
--- a/nemo/collections/nlp/modules/common/megatron/module.py
+++ b/nemo/collections/nlp/modules/common/megatron/module.py
@@ -113,7 +113,7 @@ def decoder_cross_attention_relative_position_embeddings_weight(self):
def initialize_word_embeddings(self, init_method, vocab_size, hidden_size):
if not self.share_token_embeddings:
- raise Exception('initialize_word_embeddings() was called but ' 'share_token_embeddings is false')
+ raise Exception('initialize_word_embeddings() was called but share_token_embeddings is false')
# This function just initializes the word embeddings in the final stage
# when we are using pipeline parallelism. If we aren't using pipeline
@@ -140,7 +140,10 @@ def initialize_word_embeddings(self, init_method, vocab_size, hidden_size):
# set word_embeddings weights to 0 here, then copy first
# stage's weights using all_reduce below.
self.word_embeddings = tensor_parallel.VocabParallelEmbedding(
- vocab_size, hidden_size, init_method=init_method, config=self.config,
+ vocab_size,
+ hidden_size,
+ init_method=init_method,
+ config=self.config,
)
self.word_embeddings.weight.data.fill_(0)
self.word_embeddings.weight.shared = True
diff --git a/nemo/collections/nlp/modules/common/megatron/token_level_encoder_decoder.py b/nemo/collections/nlp/modules/common/megatron/token_level_encoder_decoder.py
index b7b377940eb4..e68113949aa7 100644
--- a/nemo/collections/nlp/modules/common/megatron/token_level_encoder_decoder.py
+++ b/nemo/collections/nlp/modules/common/megatron/token_level_encoder_decoder.py
@@ -42,6 +42,7 @@
)
from nemo.collections.nlp.modules.common.megatron.vocab_parallel_cross_entropy import vocab_parallel_cross_entropy
from nemo.core.classes.mixins import adapter_mixins
+from nemo.utils import logging
try:
from apex.transformer.enums import AttnMaskType, ModelType
@@ -67,7 +68,11 @@
HAVE_MEGATRON_CORE = False
-__all__ = ["MegatronTokenLevelHead", "MegatronTokenLevelEncoderDecoderModule"]
+__all__ = [
+ "MegatronTokenLevelHead",
+ "MegatronTokenLevelEncoderDecoderModule",
+ "MegatronTokenLevelEncoderDecoderSpeechLLMModule",
+]
class MegatronTokenLevelHead(MegatronModule):
@@ -252,6 +257,7 @@ def __init__(
moe_dropout=encoder_cfg.get('moe_dropout', 0.0),
position_embedding_type=encoder_cfg.get('position_embedding_type', 'learned_absolute'),
use_flash_attention=encoder_cfg.get('use_flash_attention', False),
+ n_transformers=encoder_cfg.get('n_transformers', 1),
)
if add_decoder:
@@ -388,6 +394,7 @@ def __init__(
moe_dropout=decoder_cfg.get('moe_dropout', 0.0),
position_embedding_type=decoder_cfg.get('position_embedding_type', 'learned_absolute'),
use_flash_attention=decoder_cfg.get('use_flash_attention', False),
+ layer_type=decoder_cfg.get('layer_type', LayerType.decoder),
)
hiddens_module = get_hiddens_module(hiddens_cfg, model_parallel_cfg=config)
@@ -410,6 +417,7 @@ def __init__(
if add_decoder and post_process:
if share_decoder_tokens_head_embeddings:
+ # parallel_output is True if TP > 1 (3b model)
self.tokens_head = MegatronTokenLevelHead(
self.word_embeddings_weight().size(0), parallel_output, bias=tokens_head_bias
)
@@ -469,7 +477,7 @@ def _validate_config(self):
return encoder_kv_channels, decoder_kv_channels
def set_input_tensor(self, input_tensor):
- """ See megatron.model.transformer.set_input_tensor()"""
+ """See megatron.model.transformer.set_input_tensor()"""
# This is usually handled in schedules.py but some inference code still
# gives us non-lists or None
@@ -566,7 +574,8 @@ def forward(
if self.add_encoder and self.encoder_relative_position_embedding is not None:
encoder_self_attention_relative_position_bias = self.encoder_relative_position_embedding(
- query_seq_length=enc_seq_length, key_seq_length=enc_seq_length,
+ query_seq_length=enc_seq_length,
+ key_seq_length=enc_seq_length,
)
if output_enc_hidden_only:
@@ -604,8 +613,11 @@ def forward(
query_seq_length=dec_input_ids.size(1), key_seq_length=dec_input_ids.size(1)
)
if not self.decoder_cfg.relative_position_bias_self_attention_only:
- decoder_cross_attention_relative_position_bias = self.decoder_cross_attention_relative_position_embedding(
- query_seq_length=dec_input_ids.size(1), key_seq_length=enc_seq_length,
+ decoder_cross_attention_relative_position_bias = (
+ self.decoder_cross_attention_relative_position_embedding(
+ query_seq_length=dec_input_ids.size(1),
+ key_seq_length=enc_seq_length,
+ )
)
else:
decoder_cross_attention_relative_position_bias = None
@@ -656,7 +668,8 @@ def forward(
# check if hiddens is used
if self.hiddens_cfg is not None:
loss_dict = self.enc_dec_model.hiddens_module.apply_loss_transforms(
- outputs=enc_output, batch_data=batch_data,
+ outputs=enc_output,
+ batch_data=batch_data,
)
loss_dict["tokens_loss"] = tokens_loss
# We need to store default output in a known key, so that we can mimic default behaviour
@@ -708,8 +721,437 @@ def state_dict_for_save_checkpoint(self, destination=None, prefix='', keep_vars=
def load_state_dict(self, state_dict, strict=True):
"""Customized load."""
-
- self.encoder_embedding.encoder_embeddingload_state_dict(state_dict[self._encoder_embedding_key], strict=strict)
+ self.encoder_embedding.load_state_dict(state_dict[self._encoder_embedding_key], strict=strict)
self.decoder_embedding.load_state_dict(state_dict[self._decoder_embedding_key], strict=strict)
self.enc_dec_model.load_state_dict(state_dict[self._enc_dec_model_key], strict=strict)
self.tokens_head.load_state_dict(state_dict[self._tokens_head_key], strict=strict)
+
+
+class MegatronTokenLevelEncoderDecoderSpeechLLMModule(MegatronTokenLevelEncoderDecoderModule):
+ def __init__(self, *args, **kwargs):
+ super(MegatronTokenLevelEncoderDecoderSpeechLLMModule, self).__init__(*args, **kwargs)
+ # Overridden in MegatronT5SpeechLMModel constructor
+ self.seq_pattern = "parallel"
+ self.speech_head_type = "token_level"
+ self.attn_prior_scaledown_start_step = 10000
+ self.attn_prior_end_step = 11000
+ self.use_alignment_loss = False
+ self.return_all_crossattention_probs = False
+ self.logging_step = False
+ self.num_cross_attention_heads = 12 # 12 for 220m T5, 16 for 11b T5
+ self.enc_output_to_layers = None
+
+ def get_decoder_embeddings(self, dec_input_ids, dec_position_ids, token_type_ids):
+ if dec_input_ids.dim() <= 2:
+ dec_input = self.decoder_embedding(dec_input_ids, dec_position_ids, token_type_ids=token_type_ids)
+ else:
+ dec_input = None
+ for i in range(dec_input_ids.size()[1]):
+ if i == 0:
+ # For the first channel (text + first layer of speech), use the decoder embedding layer
+ dec_input = self.decoder_embedding(
+ dec_input_ids[:, i, :], dec_position_ids, token_type_ids=token_type_ids
+ )
+ else:
+ # For the rest of the channels (speech), use the speech embedding layer. No need for position, since already added in first layer.
+ current = self.speech_tokens_embeddings[i - 1](dec_input_ids[:, i, :]).permute(1, 0, 2)
+ # @pneekhara - Commenting the below because we always want to include all channels for speech.
+ # @pneekhara - include_channel_flag can become 0 when doing autoregressive inference and the first timestep is zeros
+ # For text inputs, only include 1st channel embeddings. Zero-out others.
+ # include_channel_flag = (torch.sum(dec_input_ids[:, i, :], dim=1) > 0).float() # [B]
+ # current = current * include_channel_flag.unsqueeze(0).unsqueeze(2)
+ dec_input = dec_input + current
+
+ return dec_input
+
+ def forward(
+ self,
+ enc_input_ids=None,
+ enc_attn_mask=None,
+ dec_input_ids=None,
+ dec_attn_mask=None,
+ token_type_ids=None,
+ labels=None,
+ batch_data=None, # additional data to be passed to hiddens module
+ enc_output=None, # Result of running the entire encoder
+ enc_output_attn_mask=None,
+ enc_input=None, # Result of running encoder embedding only
+ output_enc_hidden_only=False,
+ speech_mask=None,
+ cross_attention_prior=None,
+ text_limits=None,
+ global_step=None,
+ set_inference_key_value_memory=False,
+ decoder_max_sequence_len=None,
+ encoder_max_sequence_len=None,
+ ):
+ """
+ Return value is per token / per dimension (i.e., non collapsed loss value)
+ """
+ (
+ encoder_self_attention_relative_position_bias,
+ decoder_self_attention_relative_position_bias,
+ decoder_cross_attention_relative_position_bias,
+ ) = (None, None, None)
+
+ if enc_input is not None and enc_output is not None:
+ raise ValueError(
+ """Both enc_input and enc_output are not None.
+ You should only be passing one of them.
+ enc_input is the result of the encoder embedding layer
+ enc_output is the result of running the entire transformer encoder."""
+ )
+
+ # In order of precedence, we use enc_output, enc_input, and then enc_input_ids to determine the encoder sequence length.
+ if enc_output is not None:
+ # If enc_output is provided in `batch_for_pipeline`, we need to transpose it from [B x S x H] -> [S x B x H].
+ if isinstance(enc_output, list):
+ encoder_self_attention_relative_position_bias = [None for _ in enc_output]
+ enc_output = [x.transpose(0, 1) for x in enc_output]
+ enc_seq_length = [x.size(0) for x in enc_output]
+ else:
+ enc_output = enc_output.transpose(0, 1)
+ enc_seq_length = enc_output.size(0)
+ elif enc_input is not None:
+ # If enc_input is provided, we need to transpose it from [B x S x H] -> [S x B x H].
+ if isinstance(enc_input, list):
+ encoder_self_attention_relative_position_bias = [None for _ in enc_input]
+ enc_input = [x.transpose(0, 1) for x in enc_input]
+ enc_seq_length = [x.size(0) for x in enc_input]
+ else:
+ enc_input = enc_input.transpose(0, 1)
+ enc_seq_length = enc_input.size(0)
+ # Only need to run encoder embedding and position ids if enc_input or enc_output is not provided.
+ elif enc_input_ids is not None:
+ enc_seq_length = enc_input_ids.size(1)
+ if self.pre_process and self.add_encoder:
+ # We don't need position ids for RPE, because the embedding layer does not have position embeddings.
+ if self.encoder_relative_position_embedding is None:
+ enc_input_ids_p = enc_input_ids[:, 0, :] if enc_input_ids.dim() == 3 else enc_input_ids
+ enc_position_ids = build_position_ids(enc_input_ids_p)
+ else:
+ enc_position_ids = None
+ enc_input = self.encoder_embedding(enc_input_ids, enc_position_ids, token_type_ids=token_type_ids)
+ if self.is_adapter_available():
+ _sq, _bs, _hs = enc_input.size()
+ ptuning_adapter = self.get_adapter_module(AdapterName.PTUNING_ADAPTER)
+ v = ptuning_adapter.virtual_tokens
+ if (
+ ptuning_adapter and _sq >= v
+ ): # The sequence should be longer the v to insert virtual embeddings.
+ virtual_embeddings = ptuning_adapter(_bs)
+ enc_input = enc_input[
+ v:, :, :
+ ] # the first v tokens are pads so that they can be swapped out with virtual embeddings.
+ enc_input = torch.concat([virtual_embeddings, enc_input], dim=0)
+ else:
+ enc_input = None
+ else:
+ # This should only happen with PP > 1 for enc-dec prompt learning models
+ enc_seq_length = enc_attn_mask.size(1)
+
+ if self.add_encoder and self.encoder_relative_position_embedding is not None:
+ encoder_self_attention_relative_position_bias = self.encoder_relative_position_embedding(
+ query_seq_length=enc_seq_length,
+ key_seq_length=enc_seq_length,
+ )
+
+ if output_enc_hidden_only:
+ # When pipeline parallel > 1 we need to make sure encoder exist (will be missing in decoder)
+ # SpeechT5 should not go here for inference
+ if enc_output is None and self.enc_dec_model.encoder is not None:
+ enc_output = self.enc_dec_model.encode(
+ enc_input=enc_input,
+ enc_attn_mask=enc_attn_mask,
+ enc_layer_past=None,
+ enc_get_key_value=False,
+ enc_self_attention_relative_position_bias=encoder_self_attention_relative_position_bias,
+ batch_data=batch_data,
+ )
+ else:
+ enc_output = self.enc_dec_model.encoder_hidden_state
+
+ return enc_output
+ else:
+ if enc_output_attn_mask is None:
+ enc_output_attn_mask = enc_attn_mask
+
+ if self.pre_process and self.add_decoder:
+ # We don't need position ids for RPE, because the embedding layer does not have position embeddings.
+ if self.decoder_relative_position_embedding is None:
+ dec_input_ids_p = dec_input_ids[:, 0, :] if dec_input_ids.dim() == 3 else dec_input_ids
+ dec_position_ids = build_position_ids(dec_input_ids_p)
+ else:
+ dec_position_ids = None
+ dec_input = self.get_decoder_embeddings(dec_input_ids, dec_position_ids, token_type_ids)
+ if not set_inference_key_value_memory and (decoder_max_sequence_len or encoder_max_sequence_len):
+ # In inference
+ # On step 0 when set_inference_key_value_memory is True, we need all inputs in case
+ # we are using decoder context
+ # Else on step >= 1, only need last input
+ logging.debug("Clipping dec_input and only keep the last input.")
+ dec_input = dec_input[-1, :, :].unsqueeze(0) # shape (b, embed_dim)
+ else:
+ # Note: This is when the decoder itself is split across PP ranks.
+ dec_input = None
+
+ if self.add_decoder and self.decoder_relative_position_embedding is not None:
+ decoder_self_attention_relative_position_bias = self.decoder_relative_position_embedding(
+ query_seq_length=dec_input_ids.size(1), key_seq_length=dec_input_ids.size(1)
+ )
+ if not self.decoder_cfg.relative_position_bias_self_attention_only:
+ decoder_cross_attention_relative_position_bias = (
+ self.decoder_cross_attention_relative_position_embedding(
+ query_seq_length=dec_input_ids.size(1),
+ key_seq_length=enc_seq_length,
+ )
+ )
+ else:
+ decoder_cross_attention_relative_position_bias = None
+
+ return_all_crossattention_probs = self.return_all_crossattention_probs
+ single_encoder = False
+ if not isinstance(cross_attention_prior, list):
+ single_encoder = True
+ cross_attention_prior = [cross_attention_prior]
+
+ decoder_cross_attention_relative_position_bias = []
+ for _cross_attention_prior in cross_attention_prior:
+ _decoder_cross_attention_relative_position_bias = None
+ if _cross_attention_prior is not None:
+ # cross_attention_prior shape [B, dec_len, enc_len]
+ # Repeat it to make it [B, 12, dec_len, enc_len]
+ attn_prior_end_step = self.attn_prior_end_step
+ attn_prior_scaledown_start_step = self.attn_prior_scaledown_start_step
+ num_attention_heads = self.num_cross_attention_heads
+ assert attn_prior_scaledown_start_step <= attn_prior_end_step
+ logging.debug(
+ f"attn_prior_scaledown_start_step: {attn_prior_scaledown_start_step}, attn_prior_scaledown_start_step: {attn_prior_end_step}"
+ )
+ if global_step >= attn_prior_end_step:
+ _decoder_cross_attention_relative_position_bias = None
+ elif global_step > attn_prior_scaledown_start_step and global_step < attn_prior_end_step:
+ total_annealing_steps = attn_prior_end_step - attn_prior_scaledown_start_step
+ curr_annealing_step = global_step - attn_prior_scaledown_start_step
+ curr_cross_attention_prior = _cross_attention_prior + (
+ (1.0 - _cross_attention_prior) * curr_annealing_step / total_annealing_steps
+ )
+ _decoder_cross_attention_relative_position_bias = curr_cross_attention_prior.unsqueeze(
+ 1
+ ).repeat(1, num_attention_heads, 1, 1)
+ _decoder_cross_attention_relative_position_bias = torch.log(
+ _decoder_cross_attention_relative_position_bias + 1e-8
+ )
+ else:
+ _decoder_cross_attention_relative_position_bias = _cross_attention_prior.unsqueeze(1).repeat(
+ 1, num_attention_heads, 1, 1
+ )
+ _decoder_cross_attention_relative_position_bias = torch.log(
+ _decoder_cross_attention_relative_position_bias + 1e-8
+ )
+ decoder_cross_attention_relative_position_bias.append(_decoder_cross_attention_relative_position_bias)
+
+ return_all_crossattention_probs = return_all_crossattention_probs or self.logging_step
+
+ if single_encoder:
+ decoder_cross_attention_relative_position_bias = decoder_cross_attention_relative_position_bias[0]
+
+ output = self.enc_dec_model(
+ enc_input=enc_input,
+ enc_attn_mask=enc_attn_mask,
+ dec_input=dec_input,
+ dec_attn_mask=dec_attn_mask,
+ enc_layer_past=None,
+ enc_get_key_value=False,
+ enc_output=enc_output,
+ enc_output_attn_mask=enc_output_attn_mask,
+ dec_layer_past=None,
+ dec_get_key_value=False,
+ enc_self_attention_relative_position_bias=encoder_self_attention_relative_position_bias,
+ dec_self_attention_relative_position_bias=decoder_self_attention_relative_position_bias,
+ dec_cross_attention_relative_position_bias=decoder_cross_attention_relative_position_bias,
+ return_all_crossattention_probs=return_all_crossattention_probs,
+ batch_data=batch_data,
+ set_inference_key_value_memory=set_inference_key_value_memory,
+ decoder_max_sequence_len=decoder_max_sequence_len,
+ encoder_max_sequence_len=encoder_max_sequence_len,
+ enc_output_to_layers=self.enc_output_to_layers,
+ )
+
+ alignment_loss = None
+ if self.post_process and self.add_decoder:
+ dec_output, enc_output = output # [s, b, h]
+ if return_all_crossattention_probs:
+ dec_output, attention_scores = dec_output
+ attention_probs = [
+ torch.softmax(attention_score, dim=-1)
+ for lidx, attention_score in enumerate(attention_scores)
+ if lidx in self.alignment_decoder_layerids
+ ]
+
+ if text_limits is not None and self.use_alignment_loss and hasattr(self, "forward_sum_loss"):
+ attention_scores_filtered = [
+ attention_scores[lidx] for lidx in self.alignment_decoder_layerids
+ ]
+ attention_scores_combined = torch.cat(attention_scores_filtered, dim=1)
+ text_start_idx = text_limits[0, 0].item()
+ assert torch.all(
+ text_limits[:, 0] == text_start_idx
+ ) # all texts should start at the same index
+ end_offset = self.alignment_text_end_offset
+ # align_every_n_head: eg if set to 2, will skip every other head
+ # if set to 12, will select 1 head from every layer
+ align_every_n_head = self.align_every_n_head
+ dec_start_idx = self.decoder_context_len + 1 # +1 to remove bos
+ attention_scores_sliced = attention_scores_combined[
+ :, ::align_every_n_head, dec_start_idx:, text_start_idx : -(2 + end_offset)
+ ] # -2 to remove eos and pad
+ attention_logprobs = (
+ attention_scores_sliced # not taking log_softmax, since we will do that in loss function
+ )
+ attention_logprobs = torch.mean(attention_logprobs, dim=1, keepdim=True)
+ dec_len = torch.sum(dec_attn_mask, dim=1) - dec_start_idx
+ enc_len = text_limits[:, 1] - text_limits[:, 0] - end_offset
+ alignment_loss = self.forward_sum_loss(
+ attn_logprob=attention_logprobs, in_lens=enc_len, out_lens=dec_len
+ )
+ else:
+ attention_probs = None
+ # project decoder output to vocabulary-size dimensions
+ if self.share_decoder_tokens_head_embeddings:
+ first_layer_vocabsize = (
+ self.speech_offset + self.speech_codebook_size
+ ) # variables set in __init__ of speechlm model
+ token_logits = self.tokens_head(dec_output, self.word_embeddings_weight()) # s, b, vocab
+ if self.seq_pattern in ["parallel", "delay_parallel"]:
+ # For flat seq_pattern we need all the logits
+ token_logits = token_logits[:, :, :first_layer_vocabsize]
+ speech_layers = self.num_speech_codebooks - 1
+
+ # speech_logits_list will be used in loss calculation (parallel output)
+ speech_logits_list = []
+ if self.seq_pattern in ["parallel", "delay_parallel"] and torch.count_nonzero(speech_mask) > 0:
+ for i in range(speech_layers):
+ last_layer_logits = self.speech_tokens_heads[i](dec_output)[0] # T, B, 1024
+ speech_logits_list.append(last_layer_logits) # T, B, 1024
+ else:
+ token_logits = self.tokens_head(dec_output)[0] # T, B, WordEmbSize
+
+ if labels is not None:
+ if labels.dim() == 2:
+ # [b, s] -> [s, b]
+ labels = labels.transpose(0, 1).contiguous()
+ elif labels.dim() == 3:
+ # [b, c, s] -> [c, s, b]
+ labels = labels.permute(1, 2, 0).contiguous()
+
+ # Set label smoothing to 0 if in eval mode.
+ label_smoothing = self.label_smoothing if self.training else 0.0
+
+ # tensor_parallel.vocab_parallel_cross_entropy performs log_softmax and return log p(x_i|z) per token i
+ if self.fp16_cross_entropy:
+ assert token_logits.dtype == torch.half
+ if labels.dim() == 3:
+ raise NotImplementedError("fp16_cross_entropy is not support for labels of dimension 3")
+ tokens_loss = vocab_parallel_cross_entropy(token_logits, labels, label_smoothing)
+ else:
+ if labels.dim() == 2:
+ tokens_loss = vocab_parallel_cross_entropy(token_logits.float(), labels, label_smoothing)
+ elif labels.dim() == 3:
+ if token_logits.size()[0] != labels[0, :, :].size()[0]:
+ raise Exception("TODO: add a permute")
+ tokens_loss = vocab_parallel_cross_entropy(
+ token_logits.float(), labels[0, :, :], label_smoothing
+ )
+ logging.debug(f"token_loss: {tokens_loss}")
+ logging.debug(f"token_loss: {torch.all(torch.isfinite(tokens_loss))}")
+ if (
+ self.seq_pattern in ["parallel", "delay_parallel"]
+ and torch.count_nonzero(speech_mask) > 0
+ ):
+ for i in range(speech_layers):
+ if speech_logits_list[i].size()[0] != labels[i + 1, :, :].size()[0]:
+ raise Exception("TODO: add a permute")
+ curr_codebook_loss = (
+ vocab_parallel_cross_entropy(
+ speech_logits_list[i].float(), labels[i + 1, :, :], label_smoothing
+ )
+ * speech_mask.T
+ )
+ tokens_loss += curr_codebook_loss
+ logging.debug(f"token_loss_{i}: {tokens_loss}")
+ logging.debug(f"token_loss_{i}: {torch.all(torch.isfinite(tokens_loss))}")
+
+ # [s, b] -> [b, s]
+ tokens_loss = tokens_loss.transpose(0, 1).contiguous()
+
+ # check if hiddens is used
+ if self.hiddens_cfg is not None:
+ raise NotImplementedError("Not currently implemented for speechllm")
+ else:
+ return tokens_loss, [token_logits, speech_logits_list, attention_probs, alignment_loss]
+ else:
+ # else return token logits (and hiddens if needed)
+ # [s, b, h] -> [b, s, h]
+ # If labels is None then we are in inference mode and we return the gathered logits
+ if self.parallel_output:
+ # Gather logits from tensor parallel if in parallel_output mode
+ token_logits = tensor_parallel.gather_from_tensor_model_parallel_region(
+ token_logits
+ ) # T, B, 30208
+ for _i in range(len(speech_logits_list)):
+ speech_logits_list[_i] = tensor_parallel.gather_from_tensor_model_parallel_region(
+ speech_logits_list[_i]
+ ) # T, B, 1024
+
+ token_logits = token_logits.transpose(0, 1).contiguous() # (B, T, 30208)
+ speech_logits = torch.stack(speech_logits_list, dim=-1) # T, B, 1024, 7
+ speech_logits = speech_logits.transpose(0, 1).contiguous() # (B, T, 1024, 7)
+
+ _si = self.speech_offset
+ _ei = _si + self.speech_codebook_size
+ first_layer_speech_logits = token_logits[:, :, _si:_ei].unsqueeze(-1) # (b, s, 1023, 1)
+
+ all_speech_logits = torch.cat(
+ [first_layer_speech_logits, speech_logits], dim=-1
+ ) # (b, s, 1024, 8)
+
+ if self.hiddens_cfg is not None:
+ raise NotImplementedError("Not currently implemented for speechllm")
+ else:
+ # all_speech_logits: tensor, (b, s, 1024, 8), all layers of speech.
+ # token_logits: tensor, (b, s, vocab_size), text token logits.
+ # speech_logits: tensor, (b, s, 1024, 7), 1-7 layers of speech.
+ # attention_probs: tensor or None, (b, s, )
+ # enc_output: tensor, (virtual_token_len+context_token_len+question_token_len+extra_id_0+[SEP], b, )
+ return all_speech_logits, [token_logits, speech_logits, attention_probs, enc_output]
+
+ elif self.add_decoder and not self.add_encoder:
+ decoder_output, _ = output
+ return decoder_output
+ else:
+ encoder_output = output
+ return encoder_output
+
+ def state_dict(self):
+ """For easy load when model is combined with other heads,
+ add an extra key."""
+
+ state_dict_ = {}
+ state_dict_[self._encoder_embedding_key] = self.encoder_embedding.state_dict()
+ state_dict_[self._decoder_embedding_key] = self.decoder_embedding.state_dict()
+ state_dict_[self._enc_dec_model_key] = self.enc_dec_model.state_dict()
+ state_dict_[self._tokens_head_key] = self.tokens_head.state_dict()
+ if hasattr(self, "speech_tokens_heads"):
+ state_dict_["speech_tokens_heads"] = self.speech_tokens_heads.state_dict()
+ if hasattr(self, "speech_tokens_embeddings"):
+ state_dict_["speech_tokens_embeddings"] = self.speech_tokens_embeddings.state_dict()
+ return state_dict_
+
+ def load_state_dict(self, state_dict, strict=True):
+ """Customized load."""
+ super().load_state_dict(state_dict, strict=strict)
+ if hasattr(self, "speech_tokens_heads"):
+ self.speech_tokens_heads.load_state_dict(state_dict["speech_tokens_heads"], strict=strict)
+ if hasattr(self, "speech_tokens_embeddings"):
+ self.speech_tokens_embeddings.load_state_dict(state_dict["speech_tokens_embeddings"], strict=strict)
diff --git a/nemo/collections/nlp/modules/common/megatron/transformer.py b/nemo/collections/nlp/modules/common/megatron/transformer.py
index ab10b0d0e8b3..c5108d8e3801 100644
--- a/nemo/collections/nlp/modules/common/megatron/transformer.py
+++ b/nemo/collections/nlp/modules/common/megatron/transformer.py
@@ -22,6 +22,7 @@
import torch
import torch.nn as nn
from einops import rearrange
+from omegaconf.listconfig import ListConfig
from nemo.collections.common.parts.adapter_modules import LinearAdapterConfig
from nemo.collections.nlp.modules.common.megatron.adapters.parallel_adapters import (
@@ -479,6 +480,10 @@ def forward(
self_attention_relative_position_bias=None,
cross_attention_relative_position_bias=None,
checkpoint_core_attention=False,
+ return_crossattention_scores=False,
+ return_selfattention_scores=False,
+ decoder_max_sequence_len=None,
+ encoder_max_sequence_len=None,
):
# Self attention.
if rotary_pos_emb is not None:
@@ -489,6 +494,12 @@ def forward(
self_attention_pos_emb = None
cross_attention_pos_emb = None
+ if return_crossattention_scores and return_selfattention_scores:
+ raise NotImplementedError(
+ "We can only return 1 of cross attention scores or self attention scores. Not both yet."
+ )
+ attention_probs = None
+
if self.layer_type != LayerType.retrieval_decoder_after_self_attn:
# hidden_states: [b, s, h]
@@ -507,12 +518,16 @@ def forward(
layer_past=layer_past,
get_key_value=get_key_value,
set_inference_key_value_memory=set_inference_key_value_memory,
- inference_max_sequence_len=inference_max_sequence_len,
+ inference_max_sequence_len=inference_max_sequence_len or decoder_max_sequence_len,
rotary_pos_emb=self_attention_pos_emb,
relative_position_bias=self_attention_relative_position_bias,
checkpoint_core_attention=checkpoint_core_attention,
+ return_scores=return_selfattention_scores,
)
+ if return_selfattention_scores:
+ attention_output, attention_probs = attention_output
+
if get_key_value:
attention_output, presents = attention_output
@@ -526,7 +541,7 @@ def forward(
attention_bias = None
# jit scripting for a nn.module (with dropout) is not
- # trigerring the fusion kernel. For now, we use two
+ # triggering the fusion kernel. For now, we use two
# different nn.functional routines to account for varying
# dropout semantics during training and inference phases.
@@ -553,6 +568,9 @@ def forward(
elif self.transformer_block_type in ['pre_ln', 'normformer']:
# Layer norm post the self attention.
normalization_output = self.post_attention_layernorm(layernorm_input)
+ else:
+ normalization_output = None
+ logging.warning(f"This is a rare case since `normalization_output=None`")
else:
layernorm_input, normalization_output = hidden_states
@@ -579,7 +597,7 @@ def forward(
checkpoint_core_attention=checkpoint_core_attention,
)
else:
-
+ # Return Scores is being passed only for inter_attention and not self attention
attention_output, attention_bias = self.inter_attention(
normalization_output,
enc_dec_attn_mask,
@@ -587,7 +605,12 @@ def forward(
rotary_pos_emb=cross_attention_pos_emb,
relative_position_bias=cross_attention_relative_position_bias,
checkpoint_core_attention=checkpoint_core_attention,
+ return_scores=return_crossattention_scores,
+ set_inference_key_value_memory=set_inference_key_value_memory,
+ inference_max_sequence_len=encoder_max_sequence_len,
)
+ if return_crossattention_scores:
+ attention_output, attention_probs = attention_output
# If normformer, apply norm on the output of the self attention.
if self.transformer_block_type == 'normformer':
@@ -632,6 +655,9 @@ def forward(
if get_key_value:
output = [output, presents]
+ if attention_probs is not None:
+ output = [output, attention_probs]
+
return output
@@ -735,6 +761,10 @@ def forward(
self_attention_relative_position_bias=None,
cross_attention_relative_position_bias=None,
checkpoint_core_attention=False,
+ return_crossattention_scores=False,
+ return_selfattention_scores=False,
+ decoder_max_sequence_len=None,
+ encoder_max_sequence_len=None,
):
if self.dtype == torch.float32:
return super().forward(
@@ -750,6 +780,10 @@ def forward(
self_attention_relative_position_bias,
cross_attention_relative_position_bias,
checkpoint_core_attention,
+ return_crossattention_scores=return_crossattention_scores,
+ return_selfattention_scores=return_selfattention_scores,
+ decoder_max_sequence_len=decoder_max_sequence_len,
+ encoder_max_sequence_len=encoder_max_sequence_len,
)
with torch.autocast(device_type="cuda", dtype=self.dtype):
return super().forward(
@@ -765,6 +799,10 @@ def forward(
self_attention_relative_position_bias,
cross_attention_relative_position_bias,
checkpoint_core_attention,
+ return_crossattention_scores=return_crossattention_scores,
+ return_selfattention_scores=return_selfattention_scores,
+ decoder_max_sequence_len=decoder_max_sequence_len,
+ encoder_max_sequence_len=encoder_max_sequence_len,
)
@@ -1072,10 +1110,12 @@ def __init__(
# Transformer layers.
def build_layer(layer_number):
- if isinstance(layer_type, list):
+ if isinstance(layer_type, (list, ListConfig)):
lt = layer_type[layer_number - 1]
else:
lt = layer_type
+ if isinstance(lt, int):
+ lt = LayerType(lt)
if self.transformer_engine:
transformer_layer_args = {
@@ -1493,7 +1533,16 @@ def forward(
self_attention_relative_position_bias=None,
cross_attention_relative_position_bias=None,
checkpoint_activations_all_layers=None,
+ return_all_crossattention_probs=False,
+ return_all_selfattention_probs=False,
+ decoder_max_sequence_len=None,
+ encoder_max_sequence_len=None,
+ enc_output_to_layers=None,
):
+ if return_all_crossattention_probs and return_all_selfattention_probs:
+ raise NotImplementedError(
+ "We can only return 1 of cross attention probs or self attention probs. Not both yet."
+ )
# Checks.
if inference_max_sequence_len:
assert self.activations_checkpoint_method is None, 'inference does not work with activation checkpointing'
@@ -1580,6 +1629,7 @@ def forward(
if self.inference_params != None:
self.inference_params.sequence_len_offset = self.inference_current_sequence_len
+ attention_probs_list = []
if self.return_select_layer < 0:
assert (
parallel_state.get_pipeline_model_parallel_world_size() == 1
@@ -1588,10 +1638,32 @@ def forward(
logging.warning("Returning embeddings states only!")
return hidden_states
+ layer_to_encoder_num_mapping = {}
+ if enc_output_to_layers is not None:
+ assert len(enc_output_to_layers) == len(encoder_output)
+ for encoder_idx in range(len(encoder_output)):
+ for layer_idx in enc_output_to_layers[encoder_idx]:
+ layer_to_encoder_num_mapping[layer_idx] = encoder_idx
+
for index in range(self.num_layers):
layer = self._get_layer(index)
past = None
+ _encoder_output = encoder_output
+ _enc_dec_attn_mask = enc_dec_attn_mask
+ _cross_attention_relative_position_bias = cross_attention_relative_position_bias
+ _encoder_max_sequence_len = encoder_max_sequence_len
+ if index in layer_to_encoder_num_mapping:
+ _encoder_output = encoder_output[layer_to_encoder_num_mapping[index]]
+ _enc_dec_attn_mask = enc_dec_attn_mask[layer_to_encoder_num_mapping[index]]
+ _cross_attention_relative_position_bias = cross_attention_relative_position_bias[
+ layer_to_encoder_num_mapping[index]
+ ]
+ if encoder_max_sequence_len is not None:
+ _encoder_max_sequence_len = encoder_max_sequence_len[
+ layer_to_encoder_num_mapping[index]
+ ]
+
if layer_past is not None:
past = layer_past[index]
@@ -1625,27 +1697,65 @@ def forward(
hidden_states = layer(
hidden_states,
attention_mask,
- encoder_output=encoder_output,
- enc_dec_attn_mask=enc_dec_attn_mask,
+ encoder_output=_encoder_output,
+ enc_dec_attn_mask=_enc_dec_attn_mask,
inference_params=self.inference_params,
is_first_microbatch=is_first_microbatch,
checkpoint_core_attention=checkpoint_core_attention,
)
else:
- hidden_states = layer(
- hidden_states,
- attention_mask,
- encoder_output=encoder_output,
- enc_dec_attn_mask=enc_dec_attn_mask,
- layer_past=past,
- get_key_value=get_key_value,
- set_inference_key_value_memory=set_inference_key_value_memory,
- inference_max_sequence_len=inference_max_sequence_len,
- rotary_pos_emb=rotary_pos_emb,
- self_attention_relative_position_bias=self_attention_relative_position_bias,
- cross_attention_relative_position_bias=cross_attention_relative_position_bias,
- checkpoint_core_attention=checkpoint_core_attention,
- )
+ if layer.layer_type == LayerType.decoder and return_all_crossattention_probs:
+ hidden_states, attention_probs = layer(
+ hidden_states,
+ attention_mask,
+ encoder_output=_encoder_output,
+ enc_dec_attn_mask=_enc_dec_attn_mask,
+ layer_past=past,
+ set_inference_key_value_memory=set_inference_key_value_memory,
+ inference_max_sequence_len=inference_max_sequence_len,
+ rotary_pos_emb=rotary_pos_emb,
+ self_attention_relative_position_bias=self_attention_relative_position_bias,
+ cross_attention_relative_position_bias=_cross_attention_relative_position_bias,
+ checkpoint_core_attention=checkpoint_core_attention,
+ return_crossattention_scores=return_all_crossattention_probs,
+ decoder_max_sequence_len=decoder_max_sequence_len,
+ encoder_max_sequence_len=_encoder_max_sequence_len,
+ )
+ attention_probs_list.append(attention_probs)
+ elif layer.layer_type == LayerType.encoder and return_all_selfattention_probs:
+ hidden_states, attention_probs = layer(
+ hidden_states,
+ attention_mask,
+ encoder_output=_encoder_output,
+ enc_dec_attn_mask=_enc_dec_attn_mask,
+ layer_past=past,
+ get_key_value=get_key_value,
+ set_inference_key_value_memory=set_inference_key_value_memory,
+ inference_max_sequence_len=inference_max_sequence_len,
+ rotary_pos_emb=rotary_pos_emb,
+ self_attention_relative_position_bias=self_attention_relative_position_bias,
+ cross_attention_relative_position_bias=_cross_attention_relative_position_bias,
+ checkpoint_core_attention=checkpoint_core_attention,
+ return_selfattention_scores=return_all_selfattention_probs,
+ )
+ attention_probs_list.append(attention_probs)
+ else:
+ hidden_states = layer(
+ hidden_states,
+ attention_mask,
+ encoder_output=_encoder_output,
+ enc_dec_attn_mask=_enc_dec_attn_mask,
+ layer_past=past,
+ get_key_value=get_key_value,
+ set_inference_key_value_memory=set_inference_key_value_memory,
+ inference_max_sequence_len=inference_max_sequence_len,
+ rotary_pos_emb=rotary_pos_emb,
+ self_attention_relative_position_bias=self_attention_relative_position_bias,
+ cross_attention_relative_position_bias=_cross_attention_relative_position_bias,
+ checkpoint_core_attention=checkpoint_core_attention,
+ decoder_max_sequence_len=decoder_max_sequence_len,
+ encoder_max_sequence_len=_encoder_max_sequence_len,
+ )
if self.return_select_layer < 0:
assert (
@@ -1679,4 +1789,7 @@ def forward(
if get_key_value:
output = [output, presents]
+ if return_all_crossattention_probs or return_all_selfattention_probs:
+ output = [output, attention_probs_list]
+
return output
diff --git a/nemo/collections/nlp/modules/common/megatron/utils.py b/nemo/collections/nlp/modules/common/megatron/utils.py
index 601cb7a4d7e8..b0a6f755a9cc 100644
--- a/nemo/collections/nlp/modules/common/megatron/utils.py
+++ b/nemo/collections/nlp/modules/common/megatron/utils.py
@@ -18,7 +18,6 @@
from typing import Dict, Iterator, List, Optional, Tuple, Union
import torch
-import torch.nn as nn
from torch import Tensor
from nemo.utils import logging, logging_mode
@@ -474,9 +473,25 @@ def get_iterator_k_split(
else:
# Split a list of torch tensors
assert batch[0].shape[0] % num_microbatches == 0, "Issue with batch size configuration!"
- split_batch = [
- torch.tensor_split(item, num_microbatches, dim=0) if torch.is_tensor(item) else item for item in batch
- ]
+ split_batch = []
+ for item in batch:
+ if torch.is_tensor(item):
+ split_batch.append(torch.tensor_split(item, num_microbatches, dim=0))
+ elif isinstance(item, list):
+ if isinstance(item[0], torch.Tensor):
+ split_tensors = [torch.tensor_split(elem, num_microbatches, dim=0) for elem in item]
+ split_tuple = []
+ for mbi in range(num_microbatches):
+ split_tuple.append([split_tensors[i][mbi] for i in range(len(split_tensors))])
+ split_tuple = tuple(split_tuple)
+ split_batch.append(split_tuple)
+ else:
+ split_batch.append(split_list(item, num_microbatches))
+ elif item is None:
+ split_batch.append(item)
+ else:
+ raise ValueError(f"Unsupported item type: {type(item)}")
+
microbatches = [
[elem[i] if elem is not None else elem for elem in split_batch] for i in range(num_microbatches)
]
diff --git a/nemo/collections/nlp/modules/common/text_generation_utils.py b/nemo/collections/nlp/modules/common/text_generation_utils.py
index eeaaea26beac..4743c3216e6a 100644
--- a/nemo/collections/nlp/modules/common/text_generation_utils.py
+++ b/nemo/collections/nlp/modules/common/text_generation_utils.py
@@ -24,7 +24,7 @@
import numpy as np
import torch
import torch.nn.functional as F
-from lightning_fabric.utilities.seed import seed_everything
+from lightning.fabric.utilities.seed import seed_everything
from nemo.collections.common.tokenizers.tabular_tokenizer import TabularTokenizer
from nemo.collections.multimodal.data.neva.conversation import (
diff --git a/nemo/collections/nlp/parts/megatron_trainer_builder.py b/nemo/collections/nlp/parts/megatron_trainer_builder.py
index 7c7360ba3400..11f79baa819a 100644
--- a/nemo/collections/nlp/parts/megatron_trainer_builder.py
+++ b/nemo/collections/nlp/parts/megatron_trainer_builder.py
@@ -15,11 +15,11 @@
import sys
from typing import Optional, Union
-from lightning_fabric.utilities.exceptions import MisconfigurationException
+from lightning.fabric.utilities.exceptions import MisconfigurationException
+from lightning.pytorch import Trainer
+from lightning.pytorch.callbacks import ModelSummary
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
from omegaconf import DictConfig, open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.callbacks import ModelSummary
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
from nemo.collections.common.metrics.perf_metrics import FLOPsMeasurementCallback
from nemo.collections.nlp.parts.nlp_overrides import (
diff --git a/nemo/collections/nlp/parts/mixins/nlp_adapter_mixins.py b/nemo/collections/nlp/parts/mixins/nlp_adapter_mixins.py
index e2ccffeebdfa..8f7870b7d4c7 100644
--- a/nemo/collections/nlp/parts/mixins/nlp_adapter_mixins.py
+++ b/nemo/collections/nlp/parts/mixins/nlp_adapter_mixins.py
@@ -101,15 +101,18 @@ def _unwrap_model(self):
else:
return self.model
+ def _unwrap_model_list(self):
+ m = getattr(self, "model", [])
+ return m if isinstance(m, list) else [m]
+
+ def _unwrap_layers_model_list(self):
+ l = torch.nn.ModuleList([])
+ for m in self._unwrap_model_list():
+ l.extend(self._get_layers_from_model(m))
+ return l
+
def first_stage_of_pipeline(self):
- if hasattr(self._unwrap_model(), "pre_process"):
- return self._unwrap_model().pre_process
- elif hasattr(self._unwrap_model(), "module") and hasattr(self._unwrap_model().module, "pre_process"):
- # (guyueh1): this if condition is used to handle amp O2
- # when amp_O2 is on, self.model will be wrapped by the Float16Module class
- return self._unwrap_model().module.pre_process
- logging.warning("no attribute named model or no model.pre_process found. Can not detect stage of pipeline...")
- return False
+ return parallel_state.is_pipeline_first_stage()
def _get_all_keys(
self,
@@ -117,11 +120,12 @@ def _get_all_keys(
"""
Returns all the keys in the model
"""
- k = [n for n, p in self._unwrap_model().named_parameters(prefix="model")]
+ k = [n for m in self._unwrap_model_list() for n, p in m.named_parameters(prefix="model")]
b = [
n
- for n, p in self._unwrap_model().named_buffers(prefix="model")
- if n.replace("model.module.", "model.", 1) in self._unwrap_model().state_dict(prefix="model.").keys()
+ for m in self._unwrap_model_list()
+ for n, p in m.named_buffers(prefix="model")
+ if n.replace("model.module.", "model.", 1) in m.state_dict(prefix="model.").keys()
]
# we include buffers because ptuning representations are cached in a buffer and saved to state_dict for inference time use.
return set(k + b)
@@ -195,7 +199,7 @@ def _check_and_add_peft_cfg(self, peft_cfg):
f"{self.__class__.__name__} + {adapter_name})"
)
- layers = self._get_layers_from_model(self._unwrap_model())
+ layers = self._unwrap_layers_model_list()
for layer in layers:
if layer.layer_number in (layer_selection or list(range(1, self.cfg.num_layers + 1))):
for name, module in layer.named_modules():
@@ -312,13 +316,15 @@ def setup_optimizer_param_groups(self):
self.freeze(training=True) # Freeze the entire model
if not self.ptuning_only_and_non_first_stage:
opt_params = []
- for _, module in self._unwrap_model().named_modules(prefix="model"):
+ for _, module in [elem for m in self._unwrap_model_list() for elem in m.named_modules(prefix="model")]:
if isinstance(module, AdapterModuleMixin) and module.is_adapter_available():
module.set_enabled_adapters(enabled=True)
module.unfreeze_enabled_adapters() # selectively unfreeze the adapter modules.
opt_params += [p for p in module.parameters() if p.requires_grad]
- for name, param in self._unwrap_model().named_parameters(prefix="model"):
+ for name, param in [
+ elem for m in self._unwrap_model_list() for elem in m.named_parameters(prefix="model")
+ ]:
if name in self.tunable_base_param_keys:
param.requires_grad = True
opt_params += [param]
@@ -380,7 +386,7 @@ def load_adapters(
super().load_state_dict(state_dict, strict=False)
def set_tunable_base_params(self, peft_cfg):
- for n, p in self.named_parameters():
+ for n, p in self._unwrap_model().named_parameters(prefix="model"):
for tpn in peft_cfg.tunable_base_param_names:
# TODO: simplistic param name matching, should support regex-like syntax @adithyare
if f".{tpn}." in n:
@@ -390,7 +396,7 @@ def set_tunable_base_params(self, peft_cfg):
def tie_weights(self, peft_cfg):
pos_idx = 0
- layers = self._get_layers_from_model(self._unwrap_model())
+ layers = self._unwrap_layers_model_list()
if isinstance(peft_cfg, LoraPEFTConfig):
layer0 = layers[0].self_attention
@@ -419,12 +425,22 @@ def get_peft_state_dict(self):
"""
Gets the keys associated with the adapters only.
"""
- state_dict = self._unwrap_model().state_dict(prefix="model.")
- peft_state_dict = {}
- for k in self.adapter_keys.union(self.tunable_base_param_keys):
- # state_dict keys needs to be in non-O2 format and will be corrected in PEFTSaveRestoreConnector if O2=True
- new_k = k.replace("model.module.", "model.", 1)
- peft_state_dict[new_k] = state_dict[new_k]
+
+ def filter_state_dict(state_dict):
+ peft_state_dict = {}
+ for k in self.adapter_keys.union(self.tunable_base_param_keys):
+ # state_dict keys needs to be in non-O2 format and will be corrected in PEFTSaveRestoreConnector if O2=True
+ new_k = k.replace("model.module.", "model.", 1)
+ peft_state_dict[new_k] = state_dict[new_k] if new_k in state_dict else state_dict[k]
+ return peft_state_dict
+
+ if hasattr(self, 'model') and isinstance(self.model, list):
+ peft_state_dict = {}
+ for i, m in enumerate(self.model):
+ peft_state_dict[f"model_{i}"] = filter_state_dict(m.state_dict(prefix="model."))
+ else:
+ peft_state_dict = filter_state_dict(self._unwrap_model().state_dict(prefix="model."))
+
return peft_state_dict
def state_dict(self, destination=None, prefix=None, keep_vars=False):
@@ -447,8 +463,15 @@ def sharded_state_dict(self, prefix: str = ''):
return super().sharded_state_dict(prefix=prefix)
def load_state_dict(self, state_dict, strict: bool = True):
- if len(state_dict) == 0:
- return # checkpoint is loaded in on_load_checkpoint()
+ # If state_dict is empty, or if state_dict contains keys for virtual pipeline
+ # parallel chunks (starting from model_0) but those chunks are empty, skip this function.
+ # Checkpoint is loaded in on_load_checkpoint() instead.
+ if len(state_dict) == 0 or (
+ parallel_state.get_virtual_pipeline_model_parallel_world_size() is not None
+ and "model_0" in state_dict
+ and len(state_dict["model_0"]) == 0
+ ):
+ return
if self.use_peft and self.setup_complete:
# at this stage only adapter params will appear in the state_dict arg
# so we only update those while the rest of the model is frozen.
diff --git a/nemo/collections/nlp/parts/nlp_overrides.py b/nemo/collections/nlp/parts/nlp_overrides.py
index 2100e9c1ba8f..73263896af82 100644
--- a/nemo/collections/nlp/parts/nlp_overrides.py
+++ b/nemo/collections/nlp/parts/nlp_overrides.py
@@ -23,24 +23,24 @@
from pathlib import Path
from typing import Any, Callable, Dict, Generator, Iterator, List, Literal, Mapping, Optional, Sized, Union
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
-from lightning_fabric.plugins import TorchCheckpointIO
-from lightning_fabric.utilities.cloud_io import get_filesystem
-from lightning_fabric.utilities.optimizer import _optimizer_to_device
+from lightning.fabric.plugins import TorchCheckpointIO
+from lightning.fabric.utilities.cloud_io import get_filesystem
+from lightning.fabric.utilities.optimizer import _optimizer_to_device
+from lightning.pytorch.callbacks.progress import TQDMProgressBar
+from lightning.pytorch.callbacks.progress.tqdm_progress import _update_n
+from lightning.pytorch.core.optimizer import LightningOptimizer
+from lightning.pytorch.loops.fetchers import _DataFetcher
+from lightning.pytorch.plugins import ClusterEnvironment
+from lightning.pytorch.plugins.io.checkpoint_plugin import CheckpointIO
+from lightning.pytorch.plugins.io.wrapper import _WrappingCheckpointIO
+from lightning.pytorch.plugins.precision import MixedPrecisionPlugin
+from lightning.pytorch.plugins.precision.fsdp import FSDPPrecision
+from lightning.pytorch.strategies import DDPStrategy, FSDPStrategy
+from lightning.pytorch.trainer.states import TrainerFn
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.callbacks.progress import TQDMProgressBar
-from pytorch_lightning.callbacks.progress.tqdm_progress import _update_n
-from pytorch_lightning.core.optimizer import LightningOptimizer
-from pytorch_lightning.loops.fetchers import _DataFetcher
-from pytorch_lightning.plugins import ClusterEnvironment
-from pytorch_lightning.plugins.io.checkpoint_plugin import CheckpointIO
-from pytorch_lightning.plugins.io.wrapper import _WrappingCheckpointIO
-from pytorch_lightning.plugins.precision import MixedPrecisionPlugin
-from pytorch_lightning.plugins.precision.fsdp import FSDPPrecision
-from pytorch_lightning.strategies import DDPStrategy, FSDPStrategy
-from pytorch_lightning.trainer.states import TrainerFn
-from pytorch_lightning.trainer.trainer import Trainer
from torch._C._distributed_c10d import ReduceOp
from torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks import noop_hook
from torch.distributed.fsdp import BackwardPrefetch, FullStateDictConfig
@@ -107,6 +107,7 @@
from megatron.core.tensor_parallel.layers import param_is_not_tensor_parallel_duplicate
from megatron.core.transformer.module import Float16Module as MCoreFloat16Module
from megatron.core.transformer.transformer_layer import TransformerLayer as MCoreTransformerLayer
+
from nemo.utils.callbacks.dist_ckpt_io import DistributedCheckpointIO
HAVE_MEGATRON_CORE = True
@@ -175,9 +176,14 @@ def init_model_parallel(
app_state.data_parallel_size = parallel_state.get_data_parallel_world_size()
app_state.pipeline_model_parallel_group = parallel_state.get_pipeline_model_parallel_group()
- # create MPI process group for UCX-based communication APIs
if app_state.init_mpi_proc_group:
- torch.distributed.new_group(backend='mpi')
+ import packaging
+
+ te_version = packaging.version.Version(version('transformer_engine'))
+ if te_version < packaging.version.Version("1.9"):
+ # Create MPI process group for bootstrapping at old TE versions.
+ # From TE version v1.9, the process group is initialized in TE.
+ torch.distributed.new_group(backend='mpi')
class NLPDDPStrategy(DDPStrategy):
@@ -376,7 +382,7 @@ def save_checkpoint(
self, checkpoint: Dict[str, Any], filepath: Union[str, Path], storage_options: Optional[Any] = None
) -> None:
app_state = AppState()
- """ PTL method which we override to accomodate distributed checkpoints and
+ """ PTL method which we override to accomodate distributed checkpoints and
the legacy model parallel checkpoints.
When using megatron core, the distributed checkpointing library expects save functions to be
@@ -1269,6 +1275,7 @@ def restore_from(
return_config: bool = False,
trainer: Trainer = None,
validate_access_integrity: bool = True,
+ replace_sharded_tensor_key: Optional[str] = None,
):
"""
Restores model instance (weights and configuration) into .nemo file
@@ -1356,6 +1363,9 @@ def dummy():
checkpoint = {}
sharded_state_dict = instance.sharded_state_dict()
checkpoint['state_dict'] = sharded_state_dict
+ if replace_sharded_tensor_key:
+ for v in checkpoint["state_dict"].values():
+ v.key = v.key.replace("model", replace_sharded_tensor_key)
checkpoint_io = DistributedCheckpointIO.from_config(conf)
checkpoint = checkpoint_io.load_checkpoint(
diff --git a/nemo/collections/nlp/parts/utils_funcs.py b/nemo/collections/nlp/parts/utils_funcs.py
index a989ff3f606c..87fc1aa6f73c 100644
--- a/nemo/collections/nlp/parts/utils_funcs.py
+++ b/nemo/collections/nlp/parts/utils_funcs.py
@@ -28,9 +28,9 @@
import numpy as np
import torch
import torch.nn.functional as F
+from lightning.pytorch.trainer.trainer import Trainer
from matplotlib import pyplot as plt
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.trainer.trainer import Trainer
from sklearn.metrics import classification_report, confusion_matrix
from torch import Tensor
diff --git a/nemo/collections/tts/data/speechllm/__init__.py b/nemo/collections/tts/data/speechllm/__init__.py
new file mode 100644
index 000000000000..9df65818d226
--- /dev/null
+++ b/nemo/collections/tts/data/speechllm/__init__.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2024, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/nemo/collections/tts/data/speechllm/t5_speechllm_dataset.py b/nemo/collections/tts/data/speechllm/t5_speechllm_dataset.py
new file mode 100644
index 000000000000..32f0a14f5e65
--- /dev/null
+++ b/nemo/collections/tts/data/speechllm/t5_speechllm_dataset.py
@@ -0,0 +1,1355 @@
+# Copyright (c) 2024, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import enum
+import json
+import random
+from dataclasses import dataclass
+from pathlib import Path
+from typing import ClassVar, List, Optional, Union
+
+import numpy as np
+import torch
+from hydra.utils import instantiate
+from omegaconf import OmegaConf
+from tqdm.auto import tqdm
+
+from nemo.collections.asr.parts.preprocessing.features import WaveformFeaturizer
+from nemo.collections.asr.parts.preprocessing.segment import AudioSegment
+from nemo.collections.common.tokenizers.text_to_speech.ipa_lexicon import get_ipa_punctuation_list
+from nemo.collections.common.tokenizers.text_to_speech.tokenizer_utils import any_locale_text_preprocessing
+from nemo.collections.nlp.data.language_modeling.megatron.base_prompt_learning_dataset import BasePromptLearningDataset
+from nemo.collections.nlp.models.language_modeling.megatron_t5_model import T5Sentinel
+from nemo.collections.nlp.modules.common import VirtualPromptSource
+from nemo.collections.nlp.modules.common.megatron.utils import build_position_ids
+from nemo.collections.tts.parts.utils.helpers import get_mask_from_lengths
+from nemo.collections.tts.parts.utils.tts_dataset_utils import (
+ BetaBinomialInterpolator,
+ beta_binomial_prior_distribution,
+ general_padding,
+ get_base_dir,
+)
+from nemo.utils import logging
+
+__all__ = ['T5SpeechLMDataset', "Lang"]
+
+
+def get_full_list_puncts():
+ punct_set = set()
+ for locale_id in ["en-US", "de-DE", "fr-FR"]:
+ punct_list = get_ipa_punctuation_list(locale=locale_id)
+ punct_set.update(punct_list)
+ return sorted(punct_set)
+
+
+@dataclass
+class G2PConfig:
+ _target_: str = "nemo.collections.tts.g2p.models.en_us_arpabet.EnglishG2p"
+ phoneme_dict: str = "scripts/tts_dataset_files/cmudict-0.7b_nv22.10"
+ heteronyms: str = "scripts/tts_dataset_files/heteronyms-052722"
+ phoneme_probability: float = 0.5
+
+
+@dataclass
+class EnglishIpaG2pConfig:
+ _target_: str = "nemo.collections.tts.g2p.models.i18n_ipa.IpaG2p"
+ phoneme_dict: str = "scripts/tts_dataset_files/ipa_cmudict-0.7b_nv23.01.txt"
+ locale: str = "en-US"
+ heteronyms: str = "scripts/tts_dataset_files/heteronyms-052722"
+ phoneme_probability: float = 0.5
+ grapheme_case: str = "upper"
+ use_stresses: bool = True
+ use_chars: bool = True
+ ignore_ambiguous_words: bool = False
+
+
+@dataclass
+class TextTokenizer:
+ _target_: str = "nemo.collections.common.tokenizers.text_to_speech.tts_tokenizers.EnglishPhonemesTokenizer"
+ punct: bool = True
+ stresses: bool = True
+ chars: bool = True
+ apostrophe: bool = True
+ pad_with_space: bool = True
+ add_blank_at: bool = True
+ g2p: G2PConfig = G2PConfig()
+
+
+@dataclass
+class EnglishIpaTextTokenizer:
+ _target_: str = "nemo.collections.common.tokenizers.text_to_speech.tts_tokenizers.IPATokenizer"
+ locale: str = "en-US"
+ punct: bool = True
+ # Define non_default_punct_list as a ClassVar to explicitly mark it as a class variable
+ non_default_punct_list: ClassVar[List[str]] = get_full_list_puncts()
+ apostrophe: bool = True
+ pad_with_space: bool = True
+ add_blank_at: bool = True
+ g2p: EnglishIpaG2pConfig = EnglishIpaG2pConfig()
+
+
+@dataclass
+class TextTokenizerConfig:
+ text_tokenizer: TextTokenizer = TextTokenizer()
+
+
+@dataclass
+class EnglishIpaTextTokenizerConfig:
+ text_tokenizer: EnglishIpaTextTokenizer = EnglishIpaTextTokenizer()
+
+
+def _get_default_text_tokenizer_conf(phoneme_probability: float = 0.5, use_ipa: bool = False):
+ if use_ipa:
+ g2p = EnglishIpaG2pConfig(phoneme_probability=phoneme_probability)
+ _text_tokenizer = EnglishIpaTextTokenizer(g2p=g2p)
+ text_tokenizer: EnglishIpaTextTokenizerConfig = EnglishIpaTextTokenizerConfig(text_tokenizer=_text_tokenizer)
+ else:
+ g2p = G2PConfig(phoneme_probability=phoneme_probability)
+ _text_tokenizer = TextTokenizer(g2p=g2p)
+ text_tokenizer: TextTokenizerConfig = TextTokenizerConfig(text_tokenizer=_text_tokenizer)
+ return OmegaConf.create(OmegaConf.to_yaml(text_tokenizer))
+
+
+def pad_text_to_speech_dims(text_tensor, pad_id, pad_size=7):
+ token_len = text_tensor.shape[0]
+ empty_padding = torch.ones((pad_size, token_len), dtype=text_tensor.dtype, device=text_tensor.device) * pad_id
+ return torch.cat((text_tensor.unsqueeze(0), empty_padding), dim=0)
+
+
+class Lang(enum.Enum):
+ en = 1
+ es = 2
+ fr = 3
+ zh = 4
+ de = 4
+
+
+class T5SpeechLMDataset(BasePromptLearningDataset):
+ """
+ The dataset class for prompt-tuning or p-tuning pretrained T5 SpeechLM models.
+ """
+
+ def __init__(
+ self,
+ datasets,
+ tokenizer,
+ virtual_prompt_source: VirtualPromptSource,
+ task_templates: dict,
+ pseudo_tokens,
+ pad_token_id: str,
+ max_seq_length: int,
+ sample_rate: int,
+ min_seq_length: int = 1,
+ add_bos: bool = False,
+ add_eos: bool = True,
+ for_train: bool = True,
+ decoder_starts_with_pad: bool = False,
+ add_eos_to_decoder_output: bool = True,
+ add_sentinel_to_input: bool = True,
+ ul2_prompt_token: str = None,
+ segment_max_duration: Optional[int] = None,
+ trim: bool = False,
+ trim_ref: Optional[float] = None,
+ trim_top_db: Optional[int] = None,
+ trim_frame_length: Optional[int] = None,
+ trim_hop_length: Optional[int] = None,
+ pad_multiple: int = 1,
+ pitch_augment: bool = False,
+ sup_data_path: Optional[Union[Path, str]] = None,
+ speech_offset: Optional[int] = None,
+ train_task: Optional[str] = None,
+ seq_pattern: Optional[str] = "parallel",
+ use_attention_prior: Optional[bool] = False,
+ attention_prior_scaling_factor: Optional[float] = 1.0,
+ spec_aug=False,
+ spec_aug_time_width=0.2,
+ spec_aug_time_masks=2,
+ cross_attention_epsilon: Optional[float] = 0.0,
+ lm_vocab_size: Optional[int] = None,
+ num_speech_codebooks: Optional[int] = 8,
+ codebook_fps: Optional[int] = 86,
+ add_special_tokens_to_only_first_codebook: Optional[bool] = False,
+ context_pattern: Optional[str] = "parallel",
+ context_duration_min: Optional[float] = 3.0,
+ context_duration_max: Optional[float] = 5.0,
+ skip_datasets: Optional[List[str]] = [], # substrings of dataset names to skip
+ english_only_model: Optional[bool] = False,
+ context_conditioning: Optional[str] = "decoder", # encoder or decoder
+ use_beta_binomial_interpolator: Optional[str] = False, # encoder or decoder
+ context_slice_method: Optional[str] = "random", # random or fixed
+ phoneme_probability: Optional[float] = 0.5,
+ encoder_type: Optional[str] = "single_transformer",
+ use_ipa: bool = False,
+ **kwargs,
+ ):
+ """
+ Only speech parameters are explained here.
+ segment_max_duration: Optional[int] = None, - Speech max segment duration
+ trim: bool = False, - speech parameter
+ trim_ref: Optional[float] = None, - speech parameter
+ trim_top_db: Optional[int] = None, - speech parameter
+ trim_frame_length: Optional[int] = None, - speech parameter
+ trim_hop_length: Optional[int] = None, - speech parameter
+ pad_multiple: int = 1, - speech parameter
+ pitch_augment: bool = False, - speech parameter
+ sup_data_path: Optional[Union[Path, str]] = None, - Supplementary folder path where codecs are stored.
+ speech_offset: Optional[int] = None, - if speech tokens then add this offset to the token indices to distinguish between text and speech tokens.
+ lm_vocab_size: Optional[int] = None, - vocab size of the original language model (phoneme tokens start from this index)
+ english_only_model: Optional[bool] = False, specify if monolingual or multi-lingual modeling.
+ use_ipa: bool = False, specify if using IPA tokens or default ARPABET tokens. Either choice still mixes chars.
+ **kwargs,
+ """
+ # These two variables need to be set before calling super().__init__() because the parent class calls `load_data()` which requires these attributes.
+ self._rng = random.Random()
+ self.spec_aug = spec_aug if for_train else False
+ self.time_width = spec_aug_time_width
+ self.time_masks = spec_aug_time_masks
+ self.decoder_starts_with_pad = decoder_starts_with_pad
+ self.add_eos_to_decoder_output = add_eos_to_decoder_output
+ self.add_sentinel_to_input = add_sentinel_to_input
+ self.ul2_prompt_token = ul2_prompt_token
+ # Speech related variables
+ self.base_data_dir = None
+ self.segment_max_duration = segment_max_duration
+ self.sample_rate = sample_rate
+ self.featurizer = WaveformFeaturizer(sample_rate=self.sample_rate)
+ self.pad_multiple = pad_multiple
+ self.pitch_augment = pitch_augment
+ self.trim = trim
+ self.trim_ref = trim_ref if trim_ref is not None else np.max
+ self.trim_top_db = trim_top_db if trim_top_db is not None else 60
+ self.trim_frame_length = trim_frame_length if trim_frame_length is not None else 2048
+ self.trim_hop_length = trim_hop_length if trim_hop_length is not None else 512
+ self.speech_offset = speech_offset if speech_offset is not None else 3
+ self.seq_pattern = seq_pattern
+ self.use_attention_prior = use_attention_prior
+ self.attention_prior_scaling_factor = attention_prior_scaling_factor
+ self.cross_attention_epsilon = cross_attention_epsilon # value of prior for context tokens (b/w 0 and 1)
+ assert self.cross_attention_epsilon >= 0.0 and self.cross_attention_epsilon <= 1.0
+ self.lm_vocab_size = tokenizer.vocab_size if lm_vocab_size is None else lm_vocab_size
+ self.num_speech_codebooks = num_speech_codebooks
+ self.codebook_fps = codebook_fps
+ self.add_special_tokens_to_only_first_codebook = add_special_tokens_to_only_first_codebook
+ # context_pattern and duration arguments are supported only if context_type is REFSPEAKERCODEC in the manifest
+ self.context_pattern = context_pattern
+ self.context_duration_min = context_duration_min
+ self.context_duration_max = context_duration_max
+ self.english_only_model = english_only_model
+ self.phoneme_tokenizer = None
+ if english_only_model:
+ self.phoneme_tokenizer = instantiate(
+ _get_default_text_tokenizer_conf(phoneme_probability=phoneme_probability, use_ipa=use_ipa)
+ ).text_tokenizer
+ else:
+ self.g2p = {"fr": lambda x: x}
+ if kwargs.get("g2p", None):
+ if "english" in kwargs["g2p"]:
+ english_g2p = instantiate(kwargs["g2p"]["english"])
+ self.g2p["en"] = lambda x: english_g2p(x)
+ if "spanish" in kwargs["g2p"]:
+ spanish_g2p = instantiate(kwargs["g2p"]["spanish"])
+ self.g2p["es"] = lambda x: spanish_g2p(x)
+ if "mandarin" in kwargs["g2p"]:
+ mandarin_g2p = instantiate(kwargs["g2p"]["mandarin"])
+ self.g2p["zh"] = lambda x: mandarin_g2p(x)
+ if "german" in kwargs["g2p"]:
+ german_g2p = instantiate(kwargs["g2p"]["german"])
+ self.g2p["de"] = lambda x: german_g2p(x)
+
+ self.context_conditioning = context_conditioning
+ if self.context_conditioning == "decoder":
+ assert (
+ self.context_duration_min == self.context_duration_max
+ ), "For decoder conditioning, context_duration_min and context_duration_max should be same"
+ self.decoder_context_len = int(
+ self.context_duration_min * self.codebook_fps
+ ) # TODO: Just take from model var?
+
+ # Initialize sup_data_path, sup_data_types and run preprocessing methods for every supplementary data type\
+ self.sup_data_path = None
+ if sup_data_path is not None:
+ Path(sup_data_path).mkdir(parents=True, exist_ok=True)
+ self.sup_data_path = sup_data_path
+
+ self.codec_folder = kwargs.pop('codec_folder', None)
+ self.train_task = train_task
+ if self.codec_folder is None and self.sup_data_path is not None:
+ self.codec_folder = Path(self.sup_data_path) / "codec"
+ elif isinstance(self.codec_folder, str):
+ self.codec_folder = Path(self.codec_folder)
+
+ self.codec_folder.mkdir(exist_ok=True, parents=True)
+
+ self.context_length = kwargs.pop('context_length', None) # only used in gpt dataset atm
+ # self.attention_prior_strength = attention_prior_strength
+ self.transformer_type = kwargs.pop('transformer_type', 'T5')
+ self.skip_datasets = skip_datasets
+
+ self.beta_binomial_interpolator = (
+ BetaBinomialInterpolator(scaling_factor=self.attention_prior_scaling_factor)
+ if use_beta_binomial_interpolator
+ else None
+ )
+ self.context_slice_method = context_slice_method
+ self.encoder_type = encoder_type
+ super().__init__(
+ datasets=datasets,
+ tokenizer=tokenizer,
+ virtual_prompt_source=virtual_prompt_source,
+ task_templates=task_templates,
+ pseudo_tokens=pseudo_tokens,
+ pad_token_id=pad_token_id,
+ max_seq_length=max_seq_length,
+ min_seq_length=min_seq_length,
+ add_bos=add_bos,
+ add_eos=add_eos,
+ for_train=for_train,
+ )
+
+ def load_data(self, dataset):
+ """
+ Loads a dataset by filling in the task templates specified in the config file
+ with the information from each training/inference example. Converts all input
+ text into token ids. Also replaces the <|VIRTUAL_PROMPT_#|> placeholders in
+ the task templates with the actual virtual prompt token ids.
+
+ params:
+ dataset: A list of json objects or a dictionary objects each
+ containing the information needed for a training example
+ """
+ copy_dataset = list(dataset)
+ audio_filelist = []
+ # This loop is needed to calculate self.base_data_dir.
+ for json_line in copy_dataset:
+ if type(json_line) == dict:
+ doc = json_line
+ else:
+ doc = json.loads(json_line)
+ taskname = doc["taskname"]
+ prompt_template_fields = self.task_templates[taskname]["prompt_template_fields"]
+
+ for p in prompt_template_fields:
+ if f"{p}_type" in doc and doc[f"{p}_type"] == "SPEECH":
+ audio_filelist.append(doc[p])
+ self.base_data_dir = get_base_dir(audio_filelist)
+
+ skipped = 0
+ tts = 0
+ asr = 0
+ i = 0
+ logging.info(f"copy_dataset len === {len(copy_dataset)}")
+ examples = []
+ for json_line in tqdm(copy_dataset):
+ i += 1
+
+ # Read example dict or load the information for a single example from .json file
+ if type(json_line) == dict:
+ doc = json_line
+ else:
+ doc = json.loads(json_line)
+
+ if self.context_conditioning == "decoder":
+ # Modify doc to make combine context and anwer
+ assert ";" not in doc['context'], "Multiple contexts not supported in decoder conditioning"
+ doc['answer'] = "{};{}".format(doc['context'], doc['answer'])
+ doc['answer_duration'] = self.context_duration_min + doc['answer_duration']
+ doc['answer_type'] = "CONTEXTANSWER"
+ doc['context_type'] = "DUMMYCONTEXT"
+ doc['context'] = "DUMMYCONTEXT"
+
+ question_in_manifest = doc['question']
+
+ if "Text to speech this" in question_in_manifest or "Phoneme TTS" in question_in_manifest:
+ tts += 1
+ if self.train_task not in ['tts', 'all']:
+ continue
+ elif "Next token prediction" in question_in_manifest:
+ if self.train_task != 'tts':
+ asr += 1
+ else:
+ tts += 1
+ continue
+ else:
+ if self.train_task == 'tts':
+ continue
+ asr += 1
+
+ if doc["context_type"] == "SPEECH":
+ assert "context_duration" in doc, f"context_duration key not in document {doc}"
+ approx_context_len = 3 * (self.codebook_fps + 1) # +1 just to be safe
+ if self.context_length is not None and doc["context_duration"] < self.context_length:
+ logging.debug(
+ f"skipped as context_length of {doc['context_duration']} is less than {self.context_length}"
+ )
+ skipped += 1
+ continue
+ elif "Remove Noise" in question_in_manifest:
+ approx_context_len = doc["answer_duration"] * (self.codebook_fps + 1)
+ elif "Extract Speaker Audio" in question_in_manifest:
+ approx_context_len = (
+ doc["answer_duration"] * (self.codebook_fps + 1) + 400
+ ) # 400 is the max ref speaker audio
+ elif ("Text to speech this" in question_in_manifest) or ('Phoneme TTS' in question_in_manifest):
+ # approx_context_len = 400
+ approx_context_len = 5 * (
+ self.codebook_fps + 1
+ ) # better than 400. TODO: pneekhara: Need to change things for multi-encoder vs single encoder based filtering.
+ elif "Edit Speech" in question_in_manifest:
+ approx_context_len = doc["answer_duration"] * (self.codebook_fps + 1)
+ else:
+ raise NotImplementedError(f"Unknown context type {doc['context_type']}")
+
+ approx_question_len = len(doc["question"].split(' ')) + 3
+ if 'Phoneme TTS' in question_in_manifest:
+ # approx len is equal to num of characters
+ approx_question_len = len(question_in_manifest)
+
+ if doc["answer_type"] in ["SPEECH", "AUDIOCODEC", "CONTEXTANSWER"]:
+ assert "answer_duration" in doc, f"answer_duration key not in document {doc}"
+ approx_answer_len = doc["answer_duration"] * (self.codebook_fps + 1) + 3 # +3 for EOS, BOS padding
+ if self.seq_pattern == "delay_parallel":
+ # In delay parallel, there is padding so add 8 frames
+ approx_answer_len = approx_answer_len + self.num_speech_codebooks
+ else:
+ approx_answer_len = len(doc["answer"].split(' ')) + 3
+
+ skip_record = False
+ for skip_dataset in self.skip_datasets:
+ if skip_dataset in doc['answer']:
+ skip_record = True
+
+ if not skip_record:
+ if (self.transformer_type == "GPT") and (
+ self.min_seq_length
+ < approx_context_len + approx_question_len + approx_answer_len
+ < self.max_seq_length
+ ):
+ examples.append(doc)
+ elif (self.transformer_type == "T5") and (
+ self.min_seq_length < approx_context_len + approx_question_len < self.max_seq_length
+ and self.min_seq_length < approx_answer_len < self.max_seq_length
+ ):
+ examples.append(doc)
+ else:
+ logging.debug(f"skipped for {approx_context_len + approx_question_len} {approx_answer_len} len")
+ skipped += 1
+ else:
+ print("Skipping", doc['answer'])
+ logging.debug(f"skipped for {doc['answer']} as it is in skip_datasets")
+ skipped += 1
+
+ logging.info(f'Skipped {skipped} sentences, sequence length too short or too long even after truncation')
+
+ return examples
+
+ def __getitem__(self, idx):
+ doc = self.examples[idx]
+ taskname = doc["taskname"]
+ prompt_template = self.task_templates[taskname]["prompt_template"]
+ prompt_template_fields = self.task_templates[taskname]["prompt_template_fields"]
+ total_virtual_tokens = self.task_templates[taskname]["total_virtual_tokens"]
+ virtual_token_splits = self.task_templates[taskname]["virtual_token_splits"]
+ truncation_field = self.task_templates[taskname]['truncate_field']
+ answer_field = self.task_templates[taskname]["answer_field"]
+
+ input_example = prompt_template
+
+ self._input_sanity_checks(
+ total_virtual_tokens=total_virtual_tokens,
+ virtual_token_splits=virtual_token_splits,
+ prompt_template=prompt_template,
+ prompt_template_fields=doc.keys(), # Skip this check as we don't need it for TTS
+ truncation_field=truncation_field,
+ answer_field=answer_field,
+ doc=doc,
+ )
+ question_in_manifest = doc['question']
+
+ # Format the input example according to the template
+ # Get context, question and answer codes in a dict.
+ # TODO @xueyang: declare the instructions when initializing the dataset so that they can be re-used. Temporally
+ # hardcode them here.
+ question_text = doc["question"].strip()
+ instructions = ["Phoneme TTS", "Text to speech this"]
+ for prefix in instructions:
+ if doc["question"].startswith(prefix):
+ question_text = doc["question"][len(prefix) :].strip()
+ break
+
+ input_dict = self._insert_data_in_template(prompt_template_fields, doc, answer_field)
+ lang = Lang[doc.get("lang", "en")]
+ context_tokens = input_dict['context']
+ question_tokens = input_dict['question']
+
+ # Logic to prune context
+ # In case of TTS task, the entire reference speech is not required, so we randomly select a portion
+ # of the reference audio.
+ # In case of Next token prediction, We want context[:T] to go in the encoder and context[T+1:] to be
+ # predicted by the decoder.
+ start_token_index = 0
+ end_token_index = -1
+ if ("Text to speech this" in question_in_manifest) and (doc["context_type"] == "SPEECH"):
+ total_context_len = context_tokens[0].size()[1]
+ reduced_len = min(
+ 400,
+ (
+ int(total_context_len * 0.2)
+ if total_context_len > 600
+ else int(total_context_len * random.uniform(0.2, 0.5))
+ ),
+ )
+ start_token_index = random.randint(
+ 0, total_context_len - reduced_len
+ ) # start index can be greater than 440
+ context_tokens[0] = context_tokens[0][
+ :, start_token_index : min(start_token_index + 440, start_token_index + reduced_len)
+ ]
+ elif "Next token prediction" in question_in_manifest:
+ total_context_len = context_tokens[0].size()[1]
+ end_token_index = int(total_context_len * random.uniform(0.01, 0.2))
+ context_tokens[0] = context_tokens[0][:, :end_token_index]
+
+ # Get virtual tokens
+ # `virtual_tokens` is "".
+ virtual_tokens = self._insert_virtual_token_placeholders(input_example.split(' ')[0], virtual_token_splits)
+
+ # a trick to align with the data format in t5 pretraining
+ virtual_tokens = self.tokenizer.text_to_ids(virtual_tokens)
+ if self.add_sentinel_to_input:
+ question_tokens = question_tokens + self.tokenizer.text_to_ids(T5Sentinel.FIRST.value)
+
+ # Add BOS/EOS to the input of encoder if desired, adds EOS by default
+ if self.ul2_prompt_token is not None:
+ ul2_prompt_token_id = self.tokenizer.text_to_ids(self.ul2_prompt_token)
+ assert len(ul2_prompt_token_id) == 1
+ context_tokens = ul2_prompt_token_id + context_tokens
+ if self.add_bos:
+ context_tokens = [self.tokenizer.bos_id] + context_tokens
+ if self.add_eos:
+ question_tokens = question_tokens + [self.tokenizer.eos_id]
+
+ # Try to truncate input text to fit into the max sequence length
+ if self._get_len(context_tokens, question_tokens, virtual_tokens) > self.max_seq_length:
+ context_tokens, question_tokens, virtual_tokens = self._truncate_input_speech(
+ context_tokens, question_tokens, virtual_tokens
+ )
+
+ virtual_tokens, virtual_tokens_len = self.list_to_tensor(virtual_tokens)
+ context_tokens, context_tokens_len = self.list_to_tensor(context_tokens)
+ question_tokens, question_tokens_len = self.list_to_tensor(question_tokens)
+
+ if doc["question_type"] == "TEXT" and doc["context_type"] != "TEXT":
+ question_tokens = pad_text_to_speech_dims(
+ question_tokens, self.tokenizer.pad_id, self.num_speech_codebooks - 1
+ )
+ if doc["context_type"] == "TEXT" and doc["question_type"] != "TEXT":
+ context_tokens = pad_text_to_speech_dims(
+ context_tokens, self.tokenizer.pad_id, self.num_speech_codebooks - 1
+ )
+ if doc["context_type"] == "TEXT" and doc["question_type"] == "TEXT":
+ context_tokens = pad_text_to_speech_dims(
+ context_tokens, self.tokenizer.pad_id, self.num_speech_codebooks - 1
+ )
+ question_tokens = pad_text_to_speech_dims(
+ question_tokens, self.tokenizer.pad_id, self.num_speech_codebooks - 1
+ )
+
+ # context_tokens: tensor, (num_speech_codebooks, audio_context_len)
+ # question_tokens: tensor, (num_speech_codebooks, instruction token len + question token len + 1 ( + 1 ([SEP])), only first row includes token ids while all other rows are all zeros (pad)
+ if self.encoder_type == "multi_transformer":
+ context_and_question_tokens = [context_tokens, question_tokens]
+ else:
+ context_and_question_tokens = torch.cat([context_tokens, question_tokens], dim=1)
+
+ # get answer ids
+ if answer_field in doc.keys(): # training and validation
+ answer_ids = self._get_tokens(doc, answer_field, doc[answer_field])
+ if end_token_index > -1:
+ answer_ids[0] = answer_ids[0][:, end_token_index:]
+
+ if self.decoder_starts_with_pad:
+ answer_text_ids = [self.tokenizer.pad_id]
+ else:
+ answer_text_ids = [self.tokenizer.bos_id]
+ # a trick to align with the data format in t5 pretraining
+ # if self.add_sentinel_to_input:
+ # answer_text_ids += self.tokenizer.text_to_ids(T5Sentinel.FIRST.value)
+ answer_text_ids += answer_ids
+
+ if self.add_eos_to_decoder_output:
+ answer_text_ids += [self.tokenizer.eos_id]
+ else:
+ answer_text_ids += self.tokenizer.text_to_ids(T5Sentinel.END.value)
+
+ if self.virtual_prompt_source == VirtualPromptSource.PROMPT_ENCODER:
+ taskname_id = self.tokenizer.text_to_ids(taskname)
+ elif (
+ self.virtual_prompt_source == VirtualPromptSource.NO_PROMPT
+ ): # TODO (@adithyare) this class and GPTPromptLearningDataset should be merged.
+ taskname_id = -1
+ else:
+ raise ValueError("Invalid virtual prompt source specified")
+
+ dec_input = None
+ dec_labels = None
+
+ # if single-encoder and context_condition is decoder, answer_text_ids = [CLS_id, context audio code tensors, zero-pad, answer audio code tensor, SEP_id]
+ # if multi-encoder, answer_text_ids = [CLS_id, answer audio codec tensor, SEP_id], so dec_input will not include audio context anymore.
+ if answer_field in doc.keys(): # training and validation
+ dec_input = answer_text_ids[:-1]
+ dec_labels = answer_text_ids[1:]
+
+ # if single-encoder and context_condition is decoder:
+ # dec_input: shape=(self.num_speech_codebooks, 1([CLS]) + len(context audio frames) + 1([PAD]) + len(answer audio frames))
+ # dec_labels: shape=(self.num_speech_codebooks, len(context audio frames) + 1([PAD]) + len(answer audio frames) + 1([SEP]))
+ # if multi-encoder:
+ # dec_input: (num_speech_codebooks, 1([CLS]) + len(answer audio frames))
+ # dec_labels: (num_speech_codebooks, len(answer audio frames) + 1([SEP]))
+ dec_input, dec_input_len = self.list_to_tensor(dec_input, True)
+ dec_labels, dec_labels_len = self.list_to_tensor(dec_labels, True)
+ is_speech = True if doc["answer_type"] != "TEXT" else False
+ if is_speech:
+ assert dec_input.dim() == 2 and dec_labels.dim() == 2
+ if self.seq_pattern == "delay_parallel":
+ num_codebooks = dec_input.shape[0]
+ dec_input_padded = torch.cat(
+ [
+ torch.zeros_like(dec_input[:, 0:num_codebooks]),
+ dec_input,
+ torch.zeros_like(dec_input[:, 0:num_codebooks]),
+ ],
+ dim=1,
+ )
+ dec_labels_padded = torch.cat(
+ [
+ torch.zeros_like(dec_labels[:, 0:num_codebooks]),
+ dec_labels,
+ torch.zeros_like(dec_labels[:, 0:num_codebooks]),
+ ],
+ dim=1,
+ )
+ dec_input_new = []
+ dec_labels_new = []
+ for _c in range(self.num_speech_codebooks):
+ st = num_codebooks - _c
+ et_decoder_input = dec_input_padded.shape[1] - _c
+ et_decoder_labels = dec_labels_padded.shape[1] - _c
+ dec_input_new.append(dec_input_padded[_c, st:et_decoder_input])
+ dec_labels_new.append(dec_labels_padded[_c, st:et_decoder_labels])
+ dec_input = torch.stack(dec_input_new, dim=0)
+ dec_labels = torch.stack(dec_labels_new, dim=0)
+ dec_input_len = torch.tensor(dec_input.shape[1]).long()
+ dec_labels_len = torch.tensor(dec_labels.shape[1]).long()
+
+ if self.encoder_type == "multi_transformer":
+ enc_len = question_tokens_len + virtual_tokens_len
+ else:
+ enc_len = context_tokens_len + question_tokens_len + virtual_tokens_len
+ # TODO: Remove hardcoding
+ start_of_question_offset = 4 # For both "Text to Speech this" and "Phoneme TTS"
+ end_of_question_offset = 2
+ cross_attention_prior = torch.zeros(dec_labels_len, enc_len) + self.cross_attention_epsilon
+ if self.use_attention_prior:
+ prior_dec_len = dec_labels_len.item()
+ prior_dec_start_idx = 0
+ if self.context_conditioning == "decoder":
+ prior_dec_len = dec_labels_len.item() - (self.decoder_context_len + 1)
+ prior_dec_start_idx = self.decoder_context_len + 1
+ text_len = question_tokens_len.item() - start_of_question_offset - end_of_question_offset
+ audio_len = prior_dec_len
+ if self.beta_binomial_interpolator is not None:
+ cross_attention_question_prior = torch.from_numpy(self.beta_binomial_interpolator(audio_len, text_len))
+ else:
+ cross_attention_question_prior = torch.from_numpy(
+ beta_binomial_prior_distribution(
+ text_len,
+ audio_len,
+ scaling_factor=self.attention_prior_scaling_factor,
+ )
+ )
+ if self.encoder_type == "multi_transformer":
+ cross_attention_prior[
+ prior_dec_start_idx:, virtual_tokens_len + start_of_question_offset : -end_of_question_offset
+ ] = cross_attention_question_prior
+ else:
+ cross_attention_prior[
+ prior_dec_start_idx:,
+ virtual_tokens_len + context_tokens_len + start_of_question_offset : -end_of_question_offset,
+ ] = cross_attention_question_prior
+
+ if self.encoder_type == "multi_transformer":
+ context_and_question_len = [context_tokens_len, question_tokens_len]
+ else:
+ context_and_question_len = context_tokens_len + question_tokens_len
+ return (
+ taskname_id, # List, only one item. token id for "squad"
+ virtual_tokens, # Tensor, shape=(3,). token id for ['', '', '']
+ virtual_tokens_len, # tensor, 3
+ context_tokens_len, # tensor, 1
+ # tensor if single encoder and context_condition is encoder, shape=(self.num_speech_codebooks, 1(context) + question len + 1() + 1([SEP])). only first row includes token ids while all other rows are all zeros (pad).
+ # list if multi-encoder and context_condition is encoder.
+ context_and_question_tokens,
+ # tensor scalar if single encoder and context_condition is decoder, 1 + (question len + 1 + 1).
+ # list if multi-encoder and context_condition is encoder.
+ context_and_question_len,
+ dec_input, # tensor, shape=(self.num_speech_codebooks, 1 CLS + context audio frame len + 1 pad + answer audio frame len), first column is [CLS_id, 0*7]^T
+ dec_input_len, # scalar tensor, 1 CLS + context audio frame len + 1 pad + answer audio frame len. 1 corresponds to CLS id
+ dec_labels, # tensor, shape=(self.num_speech_codebooks, context audio frame len + 1 pad + answer frame len + 1 SEP).
+ dec_labels_len, # tensor, context audio frame len + 1 PAD + answer frame len + 1 SEP. 1 corresponds to SEP id.
+ is_speech, # True
+ cross_attention_prior, # tensor, shape=(dec_labels_len, context_tokens_len + question_tokens_len + virtual_tokens_len).
+ lang.value, # int,
+ question_text, # str, answer transcript without question type (Phoneme TTS or Text to speech this).
+ )
+
+ def _truncate_input_speech(self, context_tokens, question_tokens, virtual_tokens):
+ total_len = self._get_len(context_tokens, question_tokens, virtual_tokens)
+ context_len = self._get_element_len(context_tokens)
+ truncation_length = total_len - self.max_seq_length + 1
+ context_tokens[0] = context_tokens[0][:, min(truncation_length, context_len) :]
+ return context_tokens, question_tokens, virtual_tokens
+
+ def list_to_tensor(self, element, fill=False):
+ """
+ Convert list to tensor. The list might contain integers, 2D-tensors (speech tokens) and combination of two.
+ If all of them are ints, simply convert to tensor
+ If combination of 2D-tensor and ints. Convert int to the dimension of the tensor.
+ example: [2, 4, 5] -> torch.tensor([2, 4, 5])
+ example: [2, torch.tensor([[4, 5, 6], [6, 7, 8]])] -> torch.tensor( [[-1, 4, 5, 6], [2, 6, 7, 8]] )
+ """
+ ret, ln = None, None
+ if element is None:
+ return ret, ln
+
+ max_len = max([1 if isinstance(item, int) else len(item) for item in element])
+ if max_len == 1:
+ ret = torch.as_tensor(element).long()
+ ln = torch.tensor(ret.size()[0]).long()
+ else:
+ ret = []
+ for e in element:
+ if isinstance(e, int):
+ tmp = torch.full((self.num_speech_codebooks, 1), e if fill else -1)
+ tmp[self.num_speech_codebooks - 1] = e
+ if self.add_special_tokens_to_only_first_codebook:
+ # Fill zeros in all other codebooks (to avoid out of range when getting embeddings)
+ tmp[1:] = 0
+ else:
+ tmp = e
+ ret.append(tmp)
+ ret = torch.cat(ret, dim=1)
+ ln = torch.tensor(ret.size()[1]).long()
+ return ret, ln
+
+ def _get_text_tokens(self, text):
+ input_ids = self.tokenizer.text_to_ids(text)
+ return input_ids
+
+ def _get_phoneme_tokens(self, text, lang="en"):
+ if self.english_only_model:
+ input_ids = self.phoneme_tokenizer.encode(text)
+ input_ids_adjusted = [_id + self.lm_vocab_size for _id in input_ids]
+ return input_ids_adjusted
+ else:
+ text = any_locale_text_preprocessing(text)
+ input_ids = self.g2p[lang](text)
+ input_ids_adjusted = []
+ for i in input_ids:
+ input_ids_adjusted.append(f"p{{{i}}}")
+ input_ids_adjusted = self.tokenizer.text_to_ids("".join(input_ids_adjusted))
+ return input_ids_adjusted
+
+ def _pad_wav_to_multiple(self, wav):
+ if self.pad_multiple > 1:
+ if wav.shape[0] % self.pad_multiple != 0:
+ wav = torch.cat(
+ [wav, torch.zeros(self.pad_multiple - wav.shape[0] % self.pad_multiple, dtype=torch.float)]
+ )
+ return wav
+
+ def _get_element_len(self, element):
+ length = 0
+ if isinstance(element, list):
+ for e in element:
+ if isinstance(e, int):
+ length += 1
+ else:
+ if e.dim() > 1:
+ length += e.size()[1]
+ else:
+ length += e.size()[0]
+ else:
+ if element.dim() > 1:
+ length += element.size()[1]
+ else:
+ length += element.size()[0]
+ return length
+
+ def _get_len(self, context_tokens, question_tokens, virtual_tokens):
+ length = 0
+ length += self._get_element_len(context_tokens)
+ length += self._get_element_len(question_tokens)
+ length += self._get_element_len(virtual_tokens)
+ return length
+
+ def _load_audio(self, audio_filepath, dur=-1):
+ if self.segment_max_duration is not None and dur > 0 and dur > self.segment_max_duration:
+ # this case has been added for segmenting audio for speaker verification task of SSLDisentangler
+ n_segments = int(self.segment_max_duration * self.sample_rate)
+ features = AudioSegment.segment_from_file(
+ audio_filepath, target_sr=self.sample_rate, n_segments=n_segments, trim=self.trim
+ )
+
+ features = torch.tensor(features.samples)
+ if self.pad_multiple > 1:
+ features = self._pad_wav_to_multiple(features)
+ audio, audio_length = features, torch.tensor(features.shape[0]).long()
+ else:
+ features = self.featurizer.process(
+ audio_filepath,
+ trim=self.trim,
+ trim_ref=self.trim_ref,
+ trim_top_db=self.trim_top_db,
+ trim_frame_length=self.trim_frame_length,
+ trim_hop_length=self.trim_hop_length,
+ )
+
+ if self.pad_multiple > 1:
+ features = self._pad_wav_to_multiple(features)
+
+ audio, audio_length = features, torch.tensor(features.shape[0]).long()
+
+ return audio, audio_length
+
+ def convert_audio(self, audio, sample_rate, target_sample_rate, target_channels):
+ if audio.dim() == 1:
+ audio = audio.unsqueeze(0).unsqueeze(0)
+ assert audio.shape[1] in [1, 2], "Audio must be mono or stereo."
+ # assert sample_rate == target_sample_rate, "sample rate of FastPitch and Encodec model has to be same"
+ if target_channels == 2:
+ *shape, _, length = audio.shape
+ audio = audio.expand(*shape, target_channels, length)
+ return audio
+
+ def get_codec(self, audio):
+ wav1 = self.convert_audio(audio, self.sample_rate, self.encodec_model.sample_rate, self.encodec_model.channels)
+ encoded_frames = self.encodec_model.encode(wav1)
+ codes = torch.cat([encoded[0] for encoded in encoded_frames], dim=-1)
+ return codes.squeeze(0)
+
+ def get_quantizer_codebook(self, reference_codec, reference_codec_length):
+ out = torch.zeros((1, 128, reference_codec_length.item()))
+ for i in range(reference_codec.size()[0]):
+ out += self.encodec_model.quantizer.vq.layers[i].decode(reference_codec[i, :].unsqueeze(0))
+ return out.squeeze(0)
+
+ def _get_speech_tokens(self, audio_filepath, dur=-1):
+ # Let's keep audio name and all internal directories in rel_audio_path_as_text_id to avoid any collisions
+ rel_audio_path = Path(audio_filepath).relative_to(self.base_data_dir).with_suffix("")
+ rel_audio_path_as_text_id = str(rel_audio_path).replace("/", "_")
+
+ # Load audio features
+ audio, audio_length = self._load_audio(audio_filepath, dur)
+
+ # Convert to codes
+ codec_path = self.codec_folder / f"{rel_audio_path_as_text_id}.pt"
+
+ if codec_path.exists():
+ try:
+ codec_codes = torch.load(codec_path).long()
+ except Exception as e:
+ print(f"[ERROR IN LOADING {codec_path}] e")
+ codec_codes = self.get_codec(audio).long()
+ torch.save(codec_codes, codec_path)
+ else:
+ codec_codes = self.get_codec(audio).long()
+ torch.save(codec_codes, codec_path)
+
+ # Convert codes to codes corresponding to megatron embedding layer
+ codec_codes[0] = (codec_codes[0] + self.speech_offset).long()
+
+ return codec_codes
+
+ def _get_tokens(self, doc, field, field_data):
+ if self.context_slice_method == "random":
+ # During training, we want a random slice of the context
+ rng = random.Random() # Custom random generator (since random uses fixed seeds)
+ elif self.context_slice_method == "fixed":
+ # During inference, we want a fixed slice of the context
+ rng = random
+ else:
+ raise ValueError(f"Invalid context_slice_method {self.context_slice_method}")
+ if f"{field}_type" not in doc.keys():
+ field_tokens = self._get_text_tokens(field_data.strip(" ")) # list of ids
+ elif doc[f"{field}_type"] == 'TEXT':
+ _text = field_data.strip(" ")
+ if _text.startswith("Phoneme TTS"):
+ lang = doc.get("lang", "en")
+ instruction_tokens = self._get_text_tokens("Phoneme TTS")
+ field_tokens = self._get_phoneme_tokens(_text[len("Phoneme TTS") :].strip(), lang=lang)
+ field_tokens = instruction_tokens + field_tokens
+ elif _text.startswith("Edit Speech"):
+ # Always use phoneme tokenizer for edit speech
+ instruction_tokens = self._get_text_tokens("Edit Speech")
+ field_tokens = self._get_phoneme_tokens(_text[len("Edit Speech") :].strip())
+ field_tokens = instruction_tokens + field_tokens
+ elif _text.startswith("TEXT CONTEXT:"):
+ # Speaker id conditioning
+ field_tokens = self._get_text_tokens(_text)
+ # pad field tokens to fixed length
+ # assert self.context_duration_min == self.context_duration_max, "TEXT CONTEXT only supports fixed context duration"
+ # To keep context length the same for audio or tex context
+ # _fixed_context_len = int(self.context_duration_min * self.codebook_fps)
+ field_tokens = field_tokens + [self.tokenizer.eos_id]
+ else:
+ # if starts with Text to speech this
+ field_tokens = self._get_text_tokens(field_data.strip(" ")) # list of ids
+ elif doc[f"{field}_type"] == 'SPEECH':
+ dur = -1
+ if f"{field}_duration" in doc:
+ dur = doc[f"{field}_duration"]
+ field_tokens = self._get_speech_tokens(field_data, dur) # list of ids
+ if not isinstance(field_tokens, list):
+ field_tokens = [field_tokens]
+ elif doc[f"{field}_type"] == 'AUDIOCODEC':
+ reference_codec_paths = field_data.split(";")
+ reference_codec_path = rng.choice(reference_codec_paths)
+ if self.codec_folder is not None:
+ reference_codec_path = self.codec_folder / reference_codec_path
+ field_tokens = torch.load(reference_codec_path).long()
+ field_tokens[0] = (field_tokens[0] + self.speech_offset).long()
+ field_tokens = [field_tokens]
+ # print("AUDIOCODEC", field_tokens.shape)
+ elif doc[f"{field}_type"] == 'REFSPEAKERCODEC':
+ reference_codec_paths = field_data.split(";")
+ reference_codec_path = rng.choice(reference_codec_paths)
+ if self.codec_folder is not None:
+ reference_codec_path = self.codec_folder / reference_codec_path
+ field_tokens = torch.load(reference_codec_path).long()
+ field_tokens[0] = (field_tokens[0] + self.speech_offset).long()
+ _min_len = int(self.context_duration_min * self.codebook_fps)
+ _max_len = int(self.context_duration_max * self.codebook_fps)
+ reference_codec_len = rng.randint(_min_len, _max_len)
+ reference_codec_len = min(reference_codec_len, field_tokens.shape[1])
+ si = rng.randint(0, field_tokens.shape[1] - reference_codec_len)
+ field_tokens = field_tokens[:, si : si + reference_codec_len]
+ if self.context_pattern == "delay_parallel":
+ field_tokens = torch.cat(
+ [
+ torch.zeros(self.num_speech_codebooks, self.num_speech_codebooks).long(),
+ field_tokens,
+ torch.zeros(self.num_speech_codebooks, self.num_speech_codebooks).long(),
+ ],
+ dim=1,
+ )
+ new_field_tokens = []
+ for _c in range(self.num_speech_codebooks):
+ st = self.num_speech_codebooks - _c
+ et = field_tokens.shape[1] - _c
+ new_field_tokens.append(field_tokens[_c, st:et])
+ field_tokens = torch.stack(new_field_tokens, dim=0)
+ field_tokens = [field_tokens]
+ elif doc[f"{field}_type"] == 'DUMMYCONTEXT':
+ field_tokens = torch.zeros(self.num_speech_codebooks, 1).long()
+ return [field_tokens]
+ elif doc[f"{field}_type"] == 'CONTEXTANSWER':
+ # Both Context and Answer are in the field
+ context_info, answer_codec_path = field_data.split(";")
+ if self.codec_folder is not None:
+ context_codec_path = self.codec_folder / context_info
+ answer_codec_path = self.codec_folder / answer_codec_path
+ if context_info.startswith("TEXT CONTEXT:"):
+ context_tokens = self._get_text_tokens(context_info.strip(" "))
+ # pad field tokens to fixed length
+ assert (
+ self.context_duration_min == self.context_duration_max
+ ), "TEXT CONTEXT only supports fixed context duration"
+ _fixed_context_len = int(self.context_duration_min * self.codebook_fps)
+ context_tokens = context_tokens + [self.tokenizer.pad_id] * (_fixed_context_len - len(context_tokens))
+
+ answer_tokens = torch.load(answer_codec_path).long()
+ answer_tokens[0] = (answer_tokens[0] + self.speech_offset).long()
+ field_tokens = context_tokens + [self.tokenizer.pad_id] + [answer_tokens]
+ else:
+ context_tokens = torch.load(context_codec_path).long()
+ context_tokens[0] = (context_tokens[0] + self.speech_offset).long()
+ assert (
+ self.context_duration_min == self.context_duration_max
+ ), "CONTEXTANSWER only supports fixed context duration"
+ reference_codec_len = int(self.context_duration_min * self.codebook_fps)
+ if context_tokens.shape[1] < reference_codec_len:
+ # Repeat the context to match the reference_codec_len
+ context_tokens = torch.cat(
+ [context_tokens] * (reference_codec_len // context_tokens.shape[1] + 1), dim=1
+ )
+ assert (
+ context_tokens.shape[1] >= reference_codec_len
+ ), "CONTEXTANSWER context duration is less than min duration {} {} {}".format(
+ context_tokens.shape[1], reference_codec_len, context_codec_path
+ )
+ si = rng.randint(0, context_tokens.shape[1] - reference_codec_len)
+ context_tokens = context_tokens[:, si : si + reference_codec_len]
+
+ answer_tokens = torch.load(answer_codec_path).long()
+ answer_tokens[0] = (answer_tokens[0] + self.speech_offset).long()
+ pad_tokens = torch.zeros(self.num_speech_codebooks, 1).long()
+ # padding between context and answer
+ field_tokens = torch.cat([context_tokens, pad_tokens, answer_tokens], dim=1)
+ field_tokens = [field_tokens]
+ elif doc[f"{field}_type"] == 'SEPARATIONCODECS':
+ mixed_codec_path, reference_codec_paths = field_data.split(",")
+ reference_codec_paths = reference_codec_paths.split(";")
+ reference_codec_path = rng.choice(reference_codec_paths)
+ mixed_codec = torch.load(mixed_codec_path).long()
+ reference_codec = torch.load(reference_codec_path).long()
+ reference_codec_len = rng.randint(240, 400)
+ reference_codec = reference_codec[:, :reference_codec_len]
+ # MIXED AUDIO AND REF AUDIO ARE SEPARATED BY 8 TIMESTEPS OF 1023 TOKENS IN ALL CODEBOOKS
+ mask_tokens = (torch.ones(self.num_speech_codebooks, self.num_speech_codebooks) * 1023).long()
+ field_tokens = torch.cat([mixed_codec, mask_tokens, reference_codec], dim=1)
+ field_tokens[0] = (field_tokens[0] + self.speech_offset).long()
+ field_tokens = [field_tokens]
+ elif doc[f"{field}_type"] == 'EDITINGCODECS':
+ reference_audio_path = field_data
+ reference_codec = torch.load(reference_audio_path).long()
+ assert reference_codec.shape[1] > 80 # ensure reference audio is atleast 1 second
+ mask_len = rng.randint(40, 320) # ~0.5 second to 4 seconds
+ mask_len = min(mask_len, reference_codec.shape[1] - 80)
+ mask_start = rng.randint(0, reference_codec.shape[1] - mask_len)
+ mask_end = mask_start + mask_len
+ mask_tokens = (torch.ones(self.num_speech_codebooks, self.num_speech_codebooks) * 1023).long()
+ seg1 = reference_codec[:, :mask_start]
+ seg2 = reference_codec[:, mask_end:]
+ field_tokens = torch.cat([seg1, mask_tokens, seg2], dim=1)
+ # MISSING AUDIO IS REPLACED WITH 8 TIMESTEPS OF 1023 TOKENS IN ALL CODEBOOKS
+ field_tokens[0] = (field_tokens[0] + self.speech_offset).long()
+ field_tokens = [field_tokens]
+ else:
+ raise Exception(f"{field}_type not recognized")
+ return field_tokens
+
+ def _insert_data_in_template(self, prompt_template_fields, doc, answer_field):
+ """Format the input example according to the template"""
+ out_dict = {}
+ for field in prompt_template_fields:
+ # discard the last one, {label} / {answer}
+ # Or if some fields from the template aren't present, e.g. {answer} during inference
+ # just remove that field from the template, leaving the space blank
+ if field == answer_field or field not in doc.keys():
+ continue
+ # out_dict[field] = ""
+
+ elif field in doc.keys():
+ field_data = doc[field]
+ if f"{field}_type" not in doc.keys():
+ doc[f"{field}_type"] = "TEXT"
+ raise Exception(f"{field}_type does not exist in doc")
+ else:
+ out_dict[field] = self._get_tokens(doc, field, field_data)
+ return out_dict
+
+ def get_position_ids(self, virtual_token, context_and_qquestion):
+ enc_input = []
+ enc_input.append(virtual_token)
+ if context_and_qquestion.dim() > 2:
+ enc_input.append(context_and_qquestion[:, 0, :])
+ else:
+ enc_input.append(context_and_qquestion)
+
+ enc_input = torch.cat(enc_input, dim=1)
+
+ enc_input_p = enc_input[:, 0, :] if enc_input.dim() == 3 else enc_input
+ return build_position_ids(enc_input_p).contiguous()
+
+ def collate_fn(self, batch):
+ """Prepares enc_input, dec_input, labels, loss_mask, enc_mask, dec_mask, position_ids, taskname_ids for global batch"""
+
+ data_dict = self.pad_batch_and_build_loss_mask(batch)
+
+ if self.encoder_type == "multi_transformer":
+ position_ids = [
+ self.get_position_ids(data_dict['virtual_tokens'], data_dict['context_and_question_tokens'][0]),
+ self.get_position_ids(data_dict['virtual_tokens'], data_dict['context_and_question_tokens'][1]),
+ ]
+ else:
+ position_ids = self.get_position_ids(data_dict['virtual_tokens'], data_dict['context_and_question_tokens'])
+
+ return (
+ data_dict['virtual_tokens'],
+ data_dict['context_and_question_tokens'],
+ data_dict['enc_mask'],
+ data_dict['dec_input'],
+ data_dict['dec_input_mask'],
+ data_dict['dec_labels'],
+ data_dict['dec_labels_mask'],
+ position_ids,
+ data_dict['taskname_id'],
+ data_dict['speech_mask'],
+ data_dict['context_and_question_tokens_lens'],
+ data_dict['cross_attention_prior'],
+ data_dict['text_limits'],
+ data_dict['lang'],
+ data_dict['question_texts'],
+ )
+
+ def pad_batch_and_build_loss_mask(self, batch):
+ """Pad enc_input, dec_input, labels in batch to max batch length while building loss_mask, enc_mask, and dec_mask"""
+ (
+ taskname_ids,
+ _,
+ virtual_tokens_len,
+ _,
+ _,
+ context_and_question_tokens_len,
+ _,
+ dec_input_len,
+ _,
+ dec_labels_len,
+ _,
+ _,
+ _,
+ question_texts,
+ ) = zip(*batch)
+
+ taskname_ids = self.pad_taskname_ids(taskname_ids)
+
+ max_virtual_tokens_len = max(virtual_tokens_len).item() if virtual_tokens_len is not None else 0
+ if isinstance(virtual_tokens_len, tuple):
+ virtual_tokens_len = torch.stack(virtual_tokens_len)
+ virtual_mask = get_mask_from_lengths(virtual_tokens_len)
+
+ if self.encoder_type == "multi_transformer":
+ max_context_len = (
+ max(_c[0] for _c in context_and_question_tokens_len)
+ if context_and_question_tokens_len is not None
+ else 0
+ )
+ max_question_len = (
+ max(_c[1] for _c in context_and_question_tokens_len)
+ if context_and_question_tokens_len is not None
+ else 0
+ )
+ max_context_and_question_tokens_len = [max_context_len, max_question_len]
+ context_len = torch.stack([_c[0] for _c in context_and_question_tokens_len])
+ question_len = torch.stack([_c[1] for _c in context_and_question_tokens_len])
+ context_mask = get_mask_from_lengths(context_len)
+ question_mask = get_mask_from_lengths(question_len)
+ context_and_question_tokens_len = [context_len, question_len]
+ context_and_question_mask = [context_mask, question_mask]
+ enc_mask = [
+ torch.cat([virtual_mask, context_and_question_mask[0]], dim=1),
+ torch.cat([virtual_mask, context_and_question_mask[1]], dim=1),
+ ]
+ # import ipdb; ipdb.set_trace()
+ else:
+ max_context_and_question_tokens_len = (
+ max(context_and_question_tokens_len).item() if context_and_question_tokens_len is not None else 0
+ )
+ if isinstance(context_and_question_tokens_len, tuple):
+ context_and_question_tokens_len = torch.stack(context_and_question_tokens_len)
+ context_and_question_mask = get_mask_from_lengths(context_and_question_tokens_len)
+ enc_mask = torch.cat([virtual_mask, context_and_question_mask], dim=1)
+
+ max_dec_input_len = max(dec_input_len).item() if dec_input_len is not None else 0
+ max_dec_labels_len = max(dec_labels_len).item() if dec_labels_len is not None else 0
+
+ (
+ virtual_tokens_list,
+ context_question_tokens_list,
+ dec_input_list,
+ dec_input_mask_list,
+ dec_labels_list,
+ dec_labels_mask_list,
+ speech_mask_list,
+ cross_attention_prior_list,
+ text_limits,
+ lang_list,
+ ) = (
+ [],
+ [],
+ [],
+ [],
+ [],
+ [],
+ [],
+ [],
+ [],
+ [],
+ )
+
+ for i, sample_tuple in enumerate(batch):
+ (
+ _,
+ virtual_token,
+ virtual_token_len,
+ context_token_len,
+ context_and_question_token,
+ context_and_question_token_len,
+ dec_input,
+ dec_input_len,
+ dec_label,
+ dec_label_len,
+ is_speech,
+ cross_attention_prior,
+ lang,
+ _,
+ ) = sample_tuple
+
+ virtual_tokens_list.append(
+ general_padding(
+ virtual_token, virtual_token_len.item(), max_virtual_tokens_len, pad_value=self.tokenizer.pad_id
+ )
+ )
+
+ if self.encoder_type == "multi_transformer":
+ context_tokens_padded = general_padding(
+ context_and_question_token[0],
+ context_and_question_token_len[0].item(),
+ max_context_and_question_tokens_len[0],
+ pad_value=self.tokenizer.pad_id,
+ )
+ if len(context_tokens_padded.shape) < 2:
+ context_tokens_padded = pad_text_to_speech_dims(
+ context_tokens_padded, self.tokenizer.pad_id, self.num_speech_codebooks - 1
+ )
+ question_tokens_padded = general_padding(
+ context_and_question_token[1],
+ context_and_question_token_len[1].item(),
+ max_context_and_question_tokens_len[1],
+ pad_value=self.tokenizer.pad_id,
+ )
+ if len(question_tokens_padded.shape) < 2:
+ question_tokens_padded = pad_text_to_speech_dims(
+ question_tokens_padded, self.tokenizer.pad_id, self.num_speech_codebooks - 1
+ )
+ context_question_tokens_list.append([context_tokens_padded, question_tokens_padded])
+ else:
+ # This means context and questions are concatenated together
+ context_tokens_padded = general_padding(
+ context_and_question_token,
+ context_and_question_token_len.item(),
+ max_context_and_question_tokens_len,
+ pad_value=self.tokenizer.pad_id,
+ )
+ if len(context_tokens_padded.shape) < 2:
+ context_tokens_padded = pad_text_to_speech_dims(
+ context_tokens_padded, self.tokenizer.pad_id, self.num_speech_codebooks - 1
+ )
+ context_question_tokens_list.append(context_tokens_padded)
+
+ if max_dec_input_len > 0:
+ dec_input_padded = general_padding(
+ dec_input, dec_input_len.item(), max_dec_input_len, pad_value=self.tokenizer.pad_id
+ )
+ if len(dec_input_padded.shape) < 2:
+ dec_input_padded = pad_text_to_speech_dims(
+ dec_input_padded, self.tokenizer.pad_id, self.num_speech_codebooks - 1
+ )
+ dec_input_list.append(dec_input_padded)
+ dec_mask = (
+ torch.as_tensor(([1] * dec_input_len) + ([0] * (max_dec_input_len - dec_input_len)))
+ .long()
+ .contiguous()
+ )
+ dec_input_mask_list.append(dec_mask)
+ speech_mask = dec_mask if is_speech else torch.zeros(dec_mask.shape)
+ speech_mask_list.append(speech_mask)
+
+ if max_dec_labels_len > 0:
+ loss_mask = (
+ torch.as_tensor(([1] * dec_label_len) + ([0] * (max_dec_labels_len - dec_label_len)))
+ .long()
+ .contiguous()
+ )
+ dec_label_padded = general_padding(
+ dec_label, dec_label_len.item(), max_dec_labels_len, pad_value=self.tokenizer.pad_id
+ )
+ if len(dec_label_padded.shape) < 2:
+ dec_label_padded = pad_text_to_speech_dims(
+ dec_label_padded, self.tokenizer.pad_id, self.num_speech_codebooks - 1
+ )
+ dec_labels_list.append(dec_label_padded)
+ dec_labels_mask_list.append(loss_mask)
+
+ _p0 = max_dec_labels_len - dec_label_len
+ if self.encoder_type == "multi_transformer":
+ _p1 = (
+ max_virtual_tokens_len
+ + max_context_and_question_tokens_len[1]
+ - context_and_question_token_len[1]
+ - virtual_token_len
+ )
+ else:
+ _p1 = (
+ max_virtual_tokens_len
+ + max_context_and_question_tokens_len
+ - context_and_question_token_len
+ - virtual_token_len
+ )
+
+ cross_attention_prior_padded = torch.nn.functional.pad(
+ cross_attention_prior,
+ pad=(0, _p1, 0, _p0),
+ mode="constant",
+ value=1,
+ )
+ cross_attention_prior_list.append(cross_attention_prior_padded)
+
+ if self.encoder_type == "multi_transformer":
+ _start_of_text_id = virtual_token_len + 4
+ _end_of_text_id = _start_of_text_id + (
+ context_and_question_token_len[1] - 2 - 4
+ ) # -2 for some end tokens
+ else:
+ _start_of_text_id = virtual_token_len + context_token_len + 4
+ _end_of_text_id = _start_of_text_id + (
+ context_and_question_token_len - context_token_len - 2 - 4
+ ) # -2 for some end tokens
+ text_limits.append(torch.tensor([_start_of_text_id.item(), _end_of_text_id.item()]))
+ lang_list.append(torch.tensor(lang))
+
+ dec_labels_mask = torch.stack(dec_labels_mask_list) if len(dec_labels_mask_list) > 0 else None
+ if dec_labels_mask is not None and self.context_conditioning == 'decoder':
+ # Mask out context tokens from loss computation. +1 for bos/pad in the beginning
+ dec_labels_mask[:, : self.decoder_context_len + 1] = 0
+
+ if self.encoder_type == "multi_transformer":
+ context_batch = torch.stack([c[0] for c in context_question_tokens_list])
+ question_batch = torch.stack([c[1] for c in context_question_tokens_list])
+ context_and_question_tokens = [context_batch, question_batch]
+ else:
+ context_and_question_tokens = torch.stack(context_question_tokens_list)
+
+ data_dict = {
+ "taskname_id": taskname_ids,
+ "virtual_tokens": torch.stack(virtual_tokens_list),
+ "context_and_question_tokens": context_and_question_tokens,
+ "enc_mask": enc_mask,
+ "dec_input": torch.stack(dec_input_list) if len(dec_input_list) > 0 else None,
+ "dec_input_mask": torch.stack(dec_input_mask_list) if len(dec_input_mask_list) > 0 else None,
+ "dec_labels": torch.stack(dec_labels_list) if len(dec_labels_list) > 0 else None,
+ "dec_labels_mask": dec_labels_mask,
+ "speech_mask": torch.stack(speech_mask_list) if len(speech_mask_list) > 0 else None,
+ "context_and_question_tokens_lens": context_and_question_tokens_len,
+ "cross_attention_prior": (
+ torch.stack(cross_attention_prior_list) if len(cross_attention_prior_list) > 0 else None
+ ),
+ "text_limits": (
+ torch.stack(text_limits) if len(text_limits) > 0 else None
+ ), # tensor, valid range of answer transcripts without virtual/instruction/end tokens.
+ "lang": torch.stack(lang_list),
+ "question_texts": question_texts,
+ }
+
+ return data_dict
diff --git a/nemo/collections/tts/data/speechllm/t5_speechllm_tarred_dataset.py b/nemo/collections/tts/data/speechllm/t5_speechllm_tarred_dataset.py
new file mode 100644
index 000000000000..9b0a4f8d06c2
--- /dev/null
+++ b/nemo/collections/tts/data/speechllm/t5_speechllm_tarred_dataset.py
@@ -0,0 +1,986 @@
+# Copyright (c) 2024, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import io
+import os
+import random
+from dataclasses import dataclass
+from typing import List, Optional, Union
+
+import numpy as np
+import torch
+import webdataset as wd
+from omegaconf import OmegaConf
+
+from nemo.collections.asr.data.audio_to_text import (
+ _speech_collate_fn,
+ cache_datastore_manifests,
+ expand_sharded_filepaths,
+ shard_manifests_if_needed,
+)
+from nemo.collections.common.parts.preprocessing import collections
+from nemo.collections.nlp.models.language_modeling.megatron_t5_model import T5Sentinel
+from nemo.collections.nlp.modules.common import VirtualPromptSource
+from nemo.collections.nlp.modules.common.megatron.utils import build_position_ids
+from nemo.collections.tts.parts.utils.helpers import get_mask_from_lengths
+from nemo.collections.tts.parts.utils.tts_dataset_utils import beta_binomial_prior_distribution, general_padding
+from nemo.core.classes import IterableDataset
+from nemo.utils import logging
+
+__all__ = ['T5SpeechLMTarredDataset']
+
+
+@dataclass
+class G2PConfig:
+ _target_: str = "nemo.collections.tts.g2p.models.en_us_arpabet.EnglishG2p"
+ phoneme_dict: str = "scripts/tts_dataset_files/cmudict-0.7b_nv22.10"
+ heteronyms: str = "scripts/tts_dataset_files/heteronyms-052722"
+ phoneme_probability: float = 0.5
+
+
+@dataclass
+class TextTokenizer:
+ _target_: str = "nemo.collections.common.tokenizers.text_to_speech.tts_tokenizers.EnglishPhonemesTokenizer"
+ punct: bool = True
+ stresses: bool = True
+ chars: bool = True
+ apostrophe: bool = True
+ pad_with_space: bool = True
+ add_blank_at: bool = True
+ g2p: G2PConfig = G2PConfig()
+
+
+@dataclass
+class TextTokenizerConfig:
+ text_tokenizer: TextTokenizer = TextTokenizer()
+
+
+def _get_default_text_tokenizer_conf():
+ text_tokenizer: TextTokenizerConfig = TextTokenizerConfig()
+ return OmegaConf.create(OmegaConf.to_yaml(text_tokenizer))
+
+
+def pad_text_to_speech_dims(text_tensor, pad_id):
+ token_len = text_tensor.shape[0]
+ empty_padding = torch.ones((7, token_len), dtype=text_tensor.dtype, device=text_tensor.device) * pad_id
+ return torch.cat((text_tensor.unsqueeze(0), empty_padding), dim=0)
+
+
+class InstructionTuningManifestProcessor:
+ """
+ Class that processes a manifest json file containing paths to audio files, transcripts, and durations (in seconds).
+ Each new line is a different sample. Example below:
+ {"audio_filepath": "/path/to/audio.wav", "text_filepath": "/path/to/audio.txt", "duration": 23.147}
+ ...
+ {"audio_filepath": "/path/to/audio.wav", "text": "the transcription", "offset": 301.75, "duration": 0.82, "utt":
+ "utterance_id", "ctm_utt": "en_4156", "side": "A"}
+ Args:
+ manifest_filepath: Path to manifest json as described above. Can be comma-separated paths.
+ parser: Str for a language specific preprocessor or a callable.
+ max_duration: If audio exceeds this length, do not include in dataset.
+ min_duration: If audio is less than this length, do not include in dataset.
+ max_utts: Limit number of utterances.
+ bos_id: Id of beginning of sequence symbol to append if not None.
+ eos_id: Id of end of sequence symbol to append if not None.
+ pad_id: Id of pad symbol. Defaults to 0.
+ """
+
+ def __init__(
+ self,
+ manifest_filepath: str,
+ max_duration: Optional[float] = None,
+ min_duration: Optional[float] = None,
+ max_seq_length: Optional[float] = None,
+ max_utts: int = 0,
+ index_by_file_id: bool = False,
+ decoder_only_model: bool = False,
+ use_phoneme_tokenizer: bool = False,
+ ):
+
+ # ASRAudioText(
+ self.collection = collections.InstructionTuningAudioText(
+ manifests_files=manifest_filepath,
+ min_duration=min_duration,
+ max_duration=max_duration,
+ max_seq_length=max_seq_length,
+ max_number=max_utts,
+ index_by_file_id=index_by_file_id,
+ decoder_only_model=decoder_only_model,
+ use_phoneme_tokenizer=use_phoneme_tokenizer,
+ )
+
+
+class _TarredInstructionTuningDataset(IterableDataset):
+ """
+ A similar Dataset to the AudioToCharDataset/AudioToBPEDataset, but which loads tarred audio files.
+ """
+
+ def __init__(
+ self,
+ audio_tar_filepaths: Union[str, List[str]],
+ manifest_filepath: str,
+ sample_rate: int,
+ shuffle_n: int = 0,
+ min_duration: Optional[float] = None,
+ max_duration: Optional[float] = None,
+ max_seq_length: Optional[float] = None,
+ shard_strategy: str = "scatter",
+ shard_manifests: bool = False,
+ global_rank: int = 0,
+ world_size: int = 0,
+ return_sample_id: bool = False,
+ decoder_only_model: bool = False,
+ use_phoneme_tokenizer: bool = False,
+ ):
+ self.shard_manifests = shard_manifests
+
+ # Shard manifests if necessary and possible and then expand the paths
+ manifest_filepath = shard_manifests_if_needed(
+ shard_manifests=shard_manifests,
+ shard_strategy=shard_strategy,
+ manifest_filepaths=manifest_filepath,
+ world_size=world_size,
+ global_rank=global_rank,
+ )
+
+ # If necessary, cache manifests from object store
+ cache_datastore_manifests(manifest_filepaths=manifest_filepath)
+
+ self.manifest_processor = InstructionTuningManifestProcessor(
+ manifest_filepath=manifest_filepath,
+ max_duration=max_duration,
+ min_duration=min_duration,
+ max_seq_length=max_seq_length,
+ max_utts=0,
+ index_by_file_id=True, # Must set this so the manifest lines can be indexed by file ID
+ decoder_only_model=decoder_only_model,
+ use_phoneme_tokenizer=use_phoneme_tokenizer,
+ )
+
+ self.len = self._compute_len()
+ self.return_sample_id = return_sample_id
+
+ audio_tar_filepaths = expand_sharded_filepaths(
+ sharded_filepaths=audio_tar_filepaths,
+ shard_strategy=shard_strategy,
+ world_size=world_size,
+ global_rank=global_rank,
+ )
+
+ if shuffle_n > 0:
+ # Only shuffle training data tar files
+ logging.info("Shuffling Tar files")
+ custom_rng = random.Random()
+ custom_rng.shuffle(audio_tar_filepaths)
+ logging.info("Done shuffling Tar files")
+ logging.info(audio_tar_filepaths[:10])
+
+ self.sample_rate = sample_rate
+
+ # Put together WebDataset
+ self._dataset = wd.WebDataset(urls=audio_tar_filepaths, nodesplitter=None)
+
+ if shuffle_n > 0:
+ self._dataset = self._dataset.shuffle(shuffle_n)
+ else:
+ logging.info("WebDataset will not shuffle files within the tar files.")
+
+ self._dataset = (
+ self._dataset.rename(key='__key__', answer='pt', context='context.pt')
+ .to_tuple('key', 'answer', 'context')
+ .pipe(self._filter)
+ .pipe(self._loop_offsets)
+ .map(f=self._build_sample)
+ )
+
+ def _filter(self, iterator):
+ """This function is used to remove samples that have been filtered out by ASRAudioText already.
+ Otherwise, we would get a KeyError as _build_sample attempts to find the manifest entry for a sample
+ that was filtered out (e.g. for duration).
+ Note that if using multi-GPU training, filtering may lead to an imbalance in samples in each shard,
+ which may make your code hang as one process will finish before the other.
+ """
+
+ class TarredAudioFilter:
+ def __init__(self, collection):
+ self.iterator = iterator
+ self.collection = collection
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ while True:
+ audio_filename, answer_bytes, context_bytes = next(self.iterator)
+ file_id, _ = os.path.splitext(os.path.basename(audio_filename))
+ if file_id in self.collection.mapping:
+ return audio_filename, answer_bytes, context_bytes
+
+ return TarredAudioFilter(self.manifest_processor.collection)
+
+ def _loop_offsets(self, iterator):
+ """This function is used to iterate through utterances with different offsets for each file."""
+
+ class TarredAudioLoopOffsets:
+ def __init__(self, collection):
+ self.iterator = iterator
+ self.collection = collection
+ self.current_fn = None
+ self.current_bytes = None
+ self.current_context_bytes = None
+ self.offset_id = 0
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if self.current_fn is None:
+ self.current_fn, self.current_bytes, self.current_context_bytes = next(self.iterator)
+ self.offset_id = 0
+ else:
+ offset_list = self.collection.mapping[self.current_fn]
+ if len(offset_list) == self.offset_id + 1:
+ self.current_fn, self.current_bytes, self.current_context_bytes = next(self.iterator)
+ self.offset_id = 0
+ else:
+ self.offset_id += 1
+
+ return self.current_fn, self.current_bytes, self.current_context_bytes, self.offset_id
+
+ return TarredAudioLoopOffsets(self.manifest_processor.collection)
+
+ def _collate_fn(self, batch):
+ return _speech_collate_fn(batch)
+
+ def _build_sample(self, tup):
+ """Builds the training sample by combining the data from the WebDataset with the manifest info."""
+ audio_filename, encodec, ref_encodec, offset_id = tup
+ return audio_filename, encodec, ref_encodec, offset_id
+
+ def get_manifest_sample(self, sample_id):
+ return self.manifest_processor.collection[sample_id]
+
+ def __iter__(self):
+ return self._dataset.__iter__()
+
+ def _compute_len(self):
+ if self.shard_manifests and torch.distributed.is_available() and torch.distributed.is_initialized():
+ my_len = torch.tensor(len(self.manifest_processor.collection), dtype=torch.int32).cuda()
+ torch.distributed.all_reduce(my_len)
+ my_len = my_len.int()
+ logging.info(f'Sharded manifests: Total length: {my_len}')
+ else:
+ my_len = len(self.manifest_processor.collection)
+
+ return my_len
+
+ def __len__(self):
+ return self.len
+
+
+class T5SpeechLMTarredDataset(_TarredInstructionTuningDataset):
+ """
+ The dataset class for prompt-tuning or p-tuning pretrained T5 SpeechLM models.
+ """
+
+ def __init__(
+ self,
+ audio_tar_filepaths: Union[str, List[str]],
+ manifest_filepath: str,
+ tokenizer,
+ virtual_prompt_source: VirtualPromptSource,
+ task_templates: dict,
+ pseudo_tokens,
+ pad_token_id: str,
+ max_seq_length: int,
+ sample_rate: int,
+ shuffle_n: int = 0,
+ min_seq_length: int = 1,
+ add_bos: bool = False,
+ add_eos: bool = True,
+ for_train: bool = True,
+ decoder_starts_with_pad: bool = False,
+ add_eos_to_decoder_output: bool = True,
+ add_sentinel_to_input: bool = True,
+ ul2_prompt_token: str = None,
+ segment_max_duration: Optional[int] = None,
+ trim: bool = False,
+ trim_ref: Optional[float] = None,
+ trim_top_db: Optional[int] = None,
+ trim_frame_length: Optional[int] = None,
+ trim_hop_length: Optional[int] = None,
+ pad_multiple: int = 1,
+ pitch_augment: bool = False,
+ speech_offset: Optional[int] = None,
+ train_task: Optional[str] = None,
+ seq_pattern: Optional[str] = "parallel",
+ shard_strategy: str = "scatter",
+ shard_manifests: bool = False,
+ global_rank: int = 0,
+ world_size: int = 0,
+ return_sample_id: bool = False,
+ decoder_only_model: bool = False,
+ use_phoneme_tokenizer: Optional[bool] = False,
+ lm_vocab_size: Optional[int] = None,
+ use_attention_prior: Optional[bool] = False,
+ attention_prior_scaling_factor: Optional[float] = 1.0,
+ cross_attention_epsilon: Optional[float] = 0.0,
+ num_speech_codebooks: Optional[int] = 8,
+ **kwargs,
+ ):
+ """
+ Only speech parameters are explained here.
+ segment_max_duration: Optional[int] = None, - Speech max segment duration
+ trim: bool = False, - speech parameter
+ trim_ref: Optional[float] = None, - speech parameter
+ trim_top_db: Optional[int] = None, - speech parameter
+ trim_frame_length: Optional[int] = None, - speech parameter
+ trim_hop_length: Optional[int] = None, - speech parameter
+ pad_multiple: int = 1, - speech parameter
+ pitch_augment: bool = False, - speech parameter
+ speech_offset: Optional[int] = None, - if speech tokens then add this offset to the token indices to distinguish between text and speech tokens.
+ **kwargs,
+ """
+ # These two variables need to be set before calling super().__init__() because the parent class calls `load_data()` which requires these attributes.
+ self.decoder_starts_with_pad = decoder_starts_with_pad
+ self.add_eos_to_decoder_output = add_eos_to_decoder_output
+ self.add_sentinel_to_input = add_sentinel_to_input
+ self.ul2_prompt_token = ul2_prompt_token
+ # Speech related variables
+ # self.encodec_model = EncodecModel.encodec_model_24khz()
+ # self.encodec_model.set_target_bandwidth(6.0)
+ self.base_data_dir = None
+ self.segment_max_duration = segment_max_duration
+ self.sample_rate = sample_rate
+ # self.featurizer = WaveformFeaturizer(sample_rate=self.sample_rate)
+ self.pad_multiple = pad_multiple
+ self.pitch_augment = pitch_augment
+ self.trim = trim
+ self.trim_ref = trim_ref if trim_ref is not None else np.max
+ self.trim_top_db = trim_top_db if trim_top_db is not None else 60
+ self.trim_frame_length = trim_frame_length if trim_frame_length is not None else 2048
+ self.trim_hop_length = trim_hop_length if trim_hop_length is not None else 512
+ self.speech_offset = speech_offset if speech_offset is not None else 3
+ self.seq_pattern = seq_pattern
+ self.min_duration = kwargs.get('min_duration', 0.1)
+ self.max_duration = kwargs.get('max_duration', 20)
+ self.use_attention_prior = use_attention_prior
+ self.attention_prior_scaling_factor = attention_prior_scaling_factor
+ self.cross_attention_epsilon = cross_attention_epsilon # value of prior for context tokens (b/w 0 and 1)
+ assert self.cross_attention_epsilon >= 0.0 and self.cross_attention_epsilon <= 1.0
+
+ self.train_task = train_task
+
+ # Initialized super part
+ self.tokenizer = tokenizer
+ self.virtual_prompt_source = virtual_prompt_source
+ self.task_templates = task_templates
+ self.pseudo_tokens = pseudo_tokens
+ self.pseudo_token_ids = set(self.tokenizer.tokens_to_ids(self.pseudo_tokens))
+ self.pad_token_id = pad_token_id
+ self.max_seq_length = max_seq_length
+ self.min_seq_length = min_seq_length
+ self.add_bos = add_bos
+ self.add_eos = add_eos
+ self.for_train = for_train
+ self.use_phoneme_tokenizer = use_phoneme_tokenizer
+ self.examples = []
+ self.lm_vocab_size = tokenizer.vocab_size if lm_vocab_size is None else lm_vocab_size
+ self.num_speech_codebooks = num_speech_codebooks
+
+ assert self.min_seq_length <= max_seq_length, "Min sequence length should be less than or equal to max"
+ assert self.max_seq_length > 0, "Max sequence length should be greater than 0"
+
+ self.context_length = kwargs.pop('context_length', None) # only used in gpt dataset atm
+
+ logging.info("Loading and tokenizing dataset ... ")
+
+ super().__init__(
+ audio_tar_filepaths=audio_tar_filepaths,
+ manifest_filepath=manifest_filepath,
+ sample_rate=sample_rate,
+ shuffle_n=shuffle_n,
+ min_duration=self.min_duration,
+ max_duration=self.max_duration,
+ max_seq_length=max_seq_length,
+ shard_strategy=shard_strategy,
+ shard_manifests=shard_manifests,
+ global_rank=global_rank,
+ world_size=world_size,
+ return_sample_id=return_sample_id,
+ decoder_only_model=decoder_only_model,
+ use_phoneme_tokenizer=use_phoneme_tokenizer,
+ )
+
+ self.encodec, self.ref_encodec = None, None
+
+ def _insert_virtual_token_placeholders(self, input_example, virtual_token_splits):
+ """Insert the correct number of pseudo tokens at the <|VIRTUAL_PROMPT_n|> markers"""
+ total_inserted_tokens = 0
+
+ for idx in range(len(virtual_token_splits)):
+ split_start = total_inserted_tokens
+ split_end = total_inserted_tokens + virtual_token_splits[idx]
+ pseudo_tokens_for_split = "".join(self.pseudo_tokens[split_start:split_end])
+ input_example = input_example.replace(f'<|VIRTUAL_PROMPT_{idx}|>', pseudo_tokens_for_split)
+ total_inserted_tokens = split_end
+
+ return input_example
+
+ def pad_taskname_ids(self, taskname_ids):
+ # Pad taskname_ids to be the same length for the prompt encoder
+ if self.virtual_prompt_source == VirtualPromptSource.PROMPT_ENCODER:
+ max_taskname_length = max(len(ids) for ids in taskname_ids)
+ taskname_ids = [ids + [self.pad_token_id] * (max_taskname_length - len(ids)) for ids in taskname_ids]
+ taskname_ids = torch.tensor(taskname_ids)
+
+ # Task ids are just used for a look up embeddings for prompt-table
+ elif self.virtual_prompt_source == VirtualPromptSource.NO_PROMPT:
+ taskname_ids = torch.tensor(taskname_ids)
+
+ return taskname_ids
+
+ def _build_sample(self, tup):
+ audio_filename, self.encodec, self.ref_encodec, offset_id = tup
+
+ file_id, _ = os.path.splitext(os.path.basename(audio_filename))
+ manifest_idx = self.manifest_processor.collection.mapping[file_id][offset_id]
+ manifest_entry = self.manifest_processor.collection[manifest_idx]
+ doc = {}
+ doc['context'] = manifest_entry.context
+ doc['context_type'] = manifest_entry.context_type
+ doc['context_duration'] = manifest_entry.context_duration
+ doc['answer'] = manifest_entry.answer
+ doc['answer_type'] = manifest_entry.answer_type
+ doc['answer_duration'] = manifest_entry.answer_duration
+ doc['question'] = manifest_entry.question
+ doc['question_type'] = manifest_entry.question_type
+
+ taskname = "squad"
+ prompt_template = self.task_templates[taskname]["prompt_template"]
+ prompt_template_fields = self.task_templates[taskname]["prompt_template_fields"]
+ virtual_token_splits = self.task_templates[taskname]["virtual_token_splits"]
+ answer_field = self.task_templates[taskname]["answer_field"]
+
+ input_example = prompt_template
+
+ question_in_manifest = manifest_entry.question
+
+ # Format the input example according to the template
+ # Get context, question and answer codes in a dict.
+ input_dict = self._insert_data_in_template(input_example, prompt_template_fields, doc, answer_field)
+ context_tokens = input_dict['context']
+ question_tokens = input_dict['question']
+
+ # Logic to prune context
+ # In case of TTS task, the entire reference speech is not required, so we randomly select a portion
+ # of the reference audio.
+ # In case of Next token prediction, We want context[:T] to go in the encoder and context[T+1:] to be
+ # predicted by the decoder.
+ start_token_index = 0
+ end_token_index = -1
+ if "Text to speech this" in question_in_manifest:
+ total_context_len = context_tokens[0].size()[1]
+ reduced_len = min(
+ 400,
+ (
+ int(total_context_len * 0.2)
+ if total_context_len > 600
+ else int(total_context_len * random.uniform(0.2, 0.5))
+ ),
+ )
+ start_token_index = random.randint(
+ 0, total_context_len - reduced_len
+ ) # start index can be greater than 440
+ context_tokens[0] = context_tokens[0][
+ :, start_token_index : min(start_token_index + 440, start_token_index + reduced_len)
+ ]
+ elif "Next token prediction" in question_in_manifest:
+ total_context_len = context_tokens[0].size()[1]
+ end_token_index = int(total_context_len * random.uniform(0.01, 0.2))
+ context_tokens[0] = context_tokens[0][:, :end_token_index]
+
+ # Get virtual tokens
+ virtual_tokens = self._insert_virtual_token_placeholders(input_example.split(' ')[0], virtual_token_splits)
+
+ # a trick to align with the data format in t5 pretraining
+ # new
+ virtual_tokens = self.tokenizer.text_to_ids(virtual_tokens)
+ if self.add_sentinel_to_input:
+ question_tokens = question_tokens + self.tokenizer.text_to_ids(T5Sentinel.FIRST.value)
+
+ # Add BOS/EOS to the input of encoder if desired, adds EOS by default
+ if self.ul2_prompt_token is not None:
+ ul2_prompt_token_id = self.tokenizer.text_to_ids(self.ul2_prompt_token)
+ assert len(ul2_prompt_token_id) == 1
+ context_tokens = ul2_prompt_token_id + context_tokens
+ if self.add_bos:
+ context_tokens = [self.tokenizer.bos_id] + context_tokens
+ if self.add_eos:
+ question_tokens = question_tokens + [self.tokenizer.eos_id]
+
+ # Try to truncate input text to fit into the max sequence length
+ if self._get_len(context_tokens, question_tokens, virtual_tokens) > self.max_seq_length:
+ context_tokens, question_tokens, virtual_tokens = self._truncate_input_speech(
+ context_tokens, question_tokens, virtual_tokens
+ )
+
+ virtual_tokens, virtual_tokens_len = self.list_to_tensor(virtual_tokens)
+ context_tokens, context_tokens_len = self.list_to_tensor(context_tokens)
+ question_tokens, question_tokens_len = self.list_to_tensor(question_tokens)
+
+ if doc["question_type"] != "SPEECH" and doc["context_type"] == "SPEECH":
+ question_tokens = pad_text_to_speech_dims(question_tokens, self.tokenizer.pad_id)
+ if doc["context_type"] != "SPEECH" and doc["question_type"] == "SPEECH":
+ context_tokens = pad_text_to_speech_dims(context_tokens, self.tokenizer.pad_id)
+ context_tokens = context_tokens.to(question_tokens.device)
+ context_and_question_tokens = torch.cat([context_tokens, question_tokens], dim=1)
+
+ # get answer ids
+ if answer_field in doc.keys(): # training and validation
+ answer_ids = self._get_tokens(doc, answer_field, doc[answer_field])
+ if end_token_index > -1:
+ answer_ids[0] = answer_ids[0][:, end_token_index:]
+
+ if self.decoder_starts_with_pad:
+ answer_text_ids = [self.tokenizer.pad_id]
+ else:
+ answer_text_ids = [self.tokenizer.bos_id]
+
+ answer_text_ids += answer_ids
+
+ if self.add_eos_to_decoder_output:
+ answer_text_ids += [self.tokenizer.eos_id]
+ else:
+ answer_text_ids += self.tokenizer.text_to_ids(T5Sentinel.END.value)
+
+ # Skip example if the final length doesn't fit length requirements even after truncation
+ if (
+ self.min_seq_length
+ <= self._get_element_len(context_and_question_tokens) + self._get_element_len(virtual_tokens)
+ <= self.max_seq_length
+ and self.min_seq_length <= self._get_element_len(answer_text_ids) <= self.max_seq_length
+ ):
+ if self.virtual_prompt_source == VirtualPromptSource.PROMPT_ENCODER:
+ taskname_id = self.tokenizer.text_to_ids(taskname)
+ elif (
+ self.virtual_prompt_source == VirtualPromptSource.NO_PROMPT
+ ): # TODO (@adithyare) this class and GPTPromptLearningDataset should be merged.
+ taskname_id = -1
+ else:
+ raise ValueError("Invalid virtual prompt source specified")
+
+ dec_input = None
+ dec_labels = None
+
+ if answer_field in doc.keys(): # training and validation
+ dec_input = answer_text_ids[:-1]
+ dec_labels = answer_text_ids[1:]
+
+ dec_input, dec_input_len = self.list_to_tensor(dec_input, True)
+ dec_labels, dec_labels_len = self.list_to_tensor(dec_labels, True)
+ is_speech = True if doc["answer_type"] == "SPEECH" else False
+ if is_speech:
+ assert dec_input.dim() == 2 and dec_labels.dim() == 2
+ if self.seq_pattern == "delay_parallel":
+ num_codebooks = dec_input.shape[0]
+ dec_input_padded = torch.cat(
+ [
+ torch.zeros_like(dec_input[:, 0:num_codebooks]),
+ dec_input,
+ torch.zeros_like(dec_input[:, 0:num_codebooks]),
+ ],
+ dim=1,
+ )
+ dec_labels_padded = torch.cat(
+ [
+ torch.zeros_like(dec_labels[:, 0:num_codebooks]),
+ dec_labels,
+ torch.zeros_like(dec_labels[:, 0:num_codebooks]),
+ ],
+ dim=1,
+ )
+ dec_input_new = []
+ dec_labels_new = []
+ for _c in range(self.num_speech_codebooks):
+ st = num_codebooks - _c
+ et_decoder_input = dec_input_padded.shape[1] - _c
+ et_decoder_labels = dec_labels_padded.shape[1] - _c
+ dec_input_new.append(dec_input_padded[_c, st:et_decoder_input])
+ dec_labels_new.append(dec_labels_padded[_c, st:et_decoder_labels])
+ dec_input = torch.stack(dec_input_new, dim=0)
+ dec_labels = torch.stack(dec_labels_new, dim=0)
+ dec_input_len = torch.tensor(dec_input.shape[1]).long()
+ dec_labels_len = torch.tensor(dec_labels.shape[1]).long()
+
+ enc_len = context_tokens_len + question_tokens_len + virtual_tokens_len
+ # TODO: Remove hardcoding
+ num_question_offset = 4 # For "Text to Speech this"
+
+ cross_attention_prior = torch.zeros(dec_labels_len, enc_len) + self.cross_attention_epsilon
+ if self.use_attention_prior:
+ cross_attention_question_prior = torch.from_numpy(
+ beta_binomial_prior_distribution(
+ question_tokens_len.item() - num_question_offset,
+ dec_labels_len.item(),
+ scaling_factor=self.attention_prior_scaling_factor,
+ )
+ )
+ cross_attention_prior[:, virtual_tokens_len + context_tokens_len + num_question_offset :] = (
+ cross_attention_question_prior
+ )
+
+ return (
+ taskname_id,
+ virtual_tokens,
+ virtual_tokens_len,
+ context_and_question_tokens,
+ context_tokens_len + question_tokens_len,
+ dec_input,
+ dec_input_len,
+ dec_labels,
+ dec_labels_len,
+ is_speech,
+ cross_attention_prior,
+ )
+ else:
+ return None
+
+ def _truncate_input_speech(self, context_tokens, question_tokens, virtual_tokens):
+ total_len = self._get_len(context_tokens, question_tokens, virtual_tokens)
+ context_len = self._get_element_len(context_tokens)
+ truncation_length = total_len - self.max_seq_length + 1
+ context_tokens[0] = context_tokens[0][:, min(truncation_length, context_len) :]
+ return context_tokens, question_tokens, virtual_tokens
+
+ def list_to_tensor(self, element, fill=False):
+ """
+ Convert list to tensor. The list might contain integers, 2D-tensors (speech tokens) and combination of two.
+ If all of them are ints, simply convert to tensor
+ If combination of 2D-tensor and ints. Convert int to the dimension of the tensor.
+ example: [2, 4, 5] -> torch.tensor([2, 4, 5])
+ example: [2, torch.tensor([[4, 5, 6], [6, 7, 8]])] -> torch.tensor( [[-1, 4, 5, 6], [2, 6, 7, 8]] )
+ """
+ ret, ln = None, None
+ if element is None:
+ return ret, ln
+
+ max_len = max([1 if isinstance(item, int) else len(item) for item in element])
+ if max_len == 1:
+ ret = torch.as_tensor(element).long()
+ ln = torch.tensor(ret.size()[0]).long()
+ else:
+ ret = []
+ for e in element:
+ if isinstance(e, int):
+ tmp = torch.full((8, 1), e if fill else -1)
+ tmp[7] = e
+ else:
+ tmp = e
+ ret.append(tmp)
+ ret = torch.cat(ret, dim=1)
+ ln = torch.tensor(ret.size()[1]).long()
+ return ret, ln
+
+ def _get_text_tokens(self, text):
+ input_ids = self.tokenizer.text_to_ids(text)
+ return input_ids
+
+ def _get_phoneme_tokens(self, text):
+ input_ids = phoneme_tokenizer.encode(text)
+ input_ids_adjusted = [_id + self.lm_vocab_size for _id in input_ids]
+ return input_ids_adjusted
+
+ def _pad_wav_to_multiple(self, wav):
+ if self.pad_multiple > 1:
+ if wav.shape[0] % self.pad_multiple != 0:
+ wav = torch.cat(
+ [wav, torch.zeros(self.pad_multiple - wav.shape[0] % self.pad_multiple, dtype=torch.float)]
+ )
+ return wav
+
+ def _get_element_len(self, element):
+ length = 0
+ if isinstance(element, list):
+ for e in element:
+ if isinstance(e, int):
+ length += 1
+ else:
+ if e.dim() > 1:
+ length += e.size()[1]
+ else:
+ length += e.size()[0]
+ else:
+ if element.dim() > 1:
+ length += element.size()[1]
+ else:
+ length += element.size()[0]
+ return length
+
+ def _get_len(self, context_tokens, question_tokens, virtual_tokens):
+ length = 0
+ length += self._get_element_len(context_tokens)
+ length += self._get_element_len(question_tokens)
+ length += self._get_element_len(virtual_tokens)
+ return length
+
+ def _get_speech_tokens(self, field):
+
+ # Convert to codes
+ codec_codes, codec_codes_length = None, None # Codes
+
+ if self.train_task == 'tts':
+ if field == 'context':
+ self.ref_encodec = torch.load(io.BytesIO(self.ref_encodec), map_location="cpu").long()
+ codec_codes = self.ref_encodec
+ elif field == 'answer':
+ self.encodec = torch.load(io.BytesIO(self.encodec), map_location="cpu").long()
+ codec_codes = self.encodec
+ elif self.train_task == 'asr':
+ if field == 'context':
+ self.ref_encodec = torch.load(io.BytesIO(self.ref_encodec), map_location="cpu").long()
+ codec_codes = self.ref_encodec
+
+ # codec_codes_length = torch.tensor(codec_codes.shape[1]).long()
+
+ # Convert codes to codes corresponding to megatron embedding layer
+ codec_codes[0] = (codec_codes[0] + self.speech_offset).long()
+
+ return codec_codes
+
+ def _get_tokens(self, doc, field, field_data):
+ if f"{field}_type" not in doc.keys():
+ field_tokens = self._get_text_tokens(field_data.strip(" ")) # list of ids
+ elif doc[f"{field}_type"] == 'TEXT':
+ _text = field_data.strip(" ")
+ if self.use_phoneme_tokenizer:
+ instruction_tokens = self._get_text_tokens("Phoneme TTS")
+ field_tokens = self._get_phoneme_tokens(_text.replace("Text to speech this ", ""))
+ field_tokens = instruction_tokens + field_tokens
+ else:
+ field_tokens = self._get_text_tokens(_text) # list of ids
+ elif doc[f"{field}_type"] == 'SPEECH':
+ field_tokens = self._get_speech_tokens(field) # list of ids
+ if not isinstance(field_tokens, list):
+ field_tokens = [field_tokens]
+ elif doc[f"{field}_type"] == 'TOKENS':
+ # Do nothing; already tokenized
+ field_tokens = field_data
+ else:
+ raise Exception(f"{field}_type not recognized")
+ return field_tokens
+
+ def _insert_data_in_template(self, input_example, prompt_template_fields, doc, answer_field):
+ """Format the input example according to the template"""
+ out_dict = {}
+ for field in prompt_template_fields:
+ # discard the last one, {label} / {answer}
+ # Or if some fields from the template aren't present, e.g. {answer} during inference
+ # just remove that field from the template, leaving the space blank
+ if field == answer_field or field not in doc.keys():
+ continue
+ # out_dict[field] = ""
+
+ elif field in doc.keys():
+ field_data = doc[field]
+ if f"{field}_type" not in doc.keys():
+ doc[f"{field}_type"] = "TEXT"
+ raise Exception(f"{field}_type does not exist in doc")
+ else:
+ out_dict[field] = self._get_tokens(doc, field, field_data)
+ return out_dict
+
+ def get_position_ids(self, virtual_token, context_and_qquestion):
+ enc_input = []
+ enc_input.append(virtual_token)
+ if context_and_qquestion.dim() > 2:
+ enc_input.append(context_and_qquestion[:, 0, :])
+ else:
+ enc_input.append(context_and_qquestion)
+
+ enc_input = torch.cat(enc_input, dim=1)
+
+ enc_input_p = enc_input[:, 0, :] if enc_input.dim() == 3 else enc_input
+ return build_position_ids(enc_input_p).contiguous()
+
+ def collate_fn(self, batch):
+ """Prepares enc_input, dec_input, labels, loss_mask, enc_mask, dec_mask, position_ids, taskname_ids for global batch"""
+
+ data_dict = self.pad_batch_and_build_loss_mask(batch)
+
+ position_ids = self.get_position_ids(data_dict['virtual_tokens'], data_dict['context_and_question_tokens'])
+
+ return (
+ data_dict['virtual_tokens'],
+ data_dict['context_and_question_tokens'],
+ data_dict['enc_mask'],
+ data_dict['dec_input'],
+ data_dict['dec_input_mask'],
+ data_dict['dec_labels'],
+ data_dict['dec_labels_mask'],
+ position_ids,
+ data_dict['taskname_id'],
+ data_dict['speech_mask'],
+ data_dict['context_and_question_tokens_lens'],
+ data_dict['cross_attention_prior'],
+ )
+
+ def pad_batch_and_build_loss_mask(self, batch):
+ """Pad enc_input, dec_input, labels in batch to max batch length while building loss_mask, enc_mask, and dec_mask"""
+ (
+ taskname_ids,
+ _,
+ virtual_tokens_len,
+ _,
+ context_and_question_tokens_len,
+ _,
+ dec_input_len,
+ _,
+ dec_labels_len,
+ _,
+ _,
+ ) = zip(*batch)
+
+ taskname_ids = self.pad_taskname_ids(taskname_ids)
+
+ max_virtual_tokens_len = max(virtual_tokens_len).item() if virtual_tokens_len is not None else 0
+ if isinstance(virtual_tokens_len, tuple):
+ virtual_tokens_len = torch.stack(virtual_tokens_len)
+ virtual_mask = get_mask_from_lengths(virtual_tokens_len)
+
+ max_context_and_question_tokens_len = (
+ max(context_and_question_tokens_len).item() if context_and_question_tokens_len is not None else 0
+ )
+ if isinstance(context_and_question_tokens_len, tuple):
+ context_and_question_tokens_len = torch.stack(context_and_question_tokens_len)
+ context_and_question_mask = get_mask_from_lengths(context_and_question_tokens_len)
+
+ max_dec_input_len = max(dec_input_len).item() if dec_input_len is not None else 0
+ max_dec_labels_len = max(dec_labels_len).item() if dec_labels_len is not None else 0
+ enc_mask = torch.cat([virtual_mask, context_and_question_mask], dim=1)
+
+ (
+ virtual_tokens_list,
+ context_question_tokens_list,
+ dec_input_list,
+ dec_input_mask_list,
+ dec_labels_list,
+ dec_labels_mask_list,
+ speech_mask_list,
+ cross_attention_prior_list,
+ ) = (
+ [],
+ [],
+ [],
+ [],
+ [],
+ [],
+ [],
+ [],
+ )
+
+ for i, sample_tuple in enumerate(batch):
+ (
+ _,
+ virtual_token,
+ virtual_token_len,
+ context_and_question_token,
+ context_and_question_token_len,
+ dec_input,
+ dec_input_len,
+ dec_label,
+ dec_label_len,
+ is_speech,
+ cross_attention_prior,
+ ) = sample_tuple
+
+ virtual_tokens_list.append(
+ general_padding(
+ virtual_token, virtual_token_len.item(), max_virtual_tokens_len, pad_value=self.tokenizer.pad_id
+ )
+ )
+
+ context_tokens_padded = general_padding(
+ context_and_question_token,
+ context_and_question_token_len.item(),
+ max_context_and_question_tokens_len,
+ pad_value=self.tokenizer.pad_id,
+ )
+ if len(context_tokens_padded.shape) < 2:
+ context_tokens_padded = pad_text_to_speech_dims(context_tokens_padded, self.tokenizer.pad_id)
+ context_question_tokens_list.append(context_tokens_padded)
+
+ if max_dec_input_len > 0:
+ dec_input_padded = general_padding(
+ dec_input, dec_input_len.item(), max_dec_input_len, pad_value=self.tokenizer.pad_id
+ )
+ if len(dec_input_padded.shape) < 2:
+ dec_input_padded = pad_text_to_speech_dims(dec_input_padded, self.tokenizer.pad_id)
+ dec_input_list.append(dec_input_padded)
+ dec_mask = (
+ torch.as_tensor(([1] * dec_input_len) + ([0] * (max_dec_input_len - dec_input_len)))
+ .long()
+ .contiguous()
+ )
+ dec_input_mask_list.append(dec_mask)
+ speech_mask = dec_mask if is_speech else torch.zeros(dec_mask.shape)
+ speech_mask_list.append(speech_mask)
+
+ if max_dec_labels_len > 0:
+ loss_mask = (
+ torch.as_tensor(([1] * dec_label_len) + ([0] * (max_dec_labels_len - dec_label_len)))
+ .long()
+ .contiguous()
+ )
+ dec_label_padded = general_padding(
+ dec_label, dec_label_len.item(), max_dec_labels_len, pad_value=self.tokenizer.pad_id
+ )
+ if len(dec_label_padded.shape) < 2:
+ dec_label_padded = pad_text_to_speech_dims(dec_label_padded, self.tokenizer.pad_id)
+ dec_labels_list.append(dec_label_padded)
+ dec_labels_mask_list.append(loss_mask)
+
+ _p0 = max_dec_labels_len - dec_label_len
+ _p1 = (
+ max_virtual_tokens_len
+ + max_context_and_question_tokens_len
+ - context_and_question_token_len
+ - virtual_token_len
+ )
+
+ cross_attention_prior_padded = torch.nn.functional.pad(
+ cross_attention_prior,
+ pad=(0, _p1, 0, _p0),
+ mode="constant",
+ value=1,
+ )
+ cross_attention_prior_list.append(cross_attention_prior_padded)
+
+ data_dict = {
+ "taskname_id": taskname_ids,
+ "virtual_tokens": torch.stack(virtual_tokens_list),
+ "context_and_question_tokens": torch.stack(context_question_tokens_list),
+ "enc_mask": enc_mask,
+ "dec_input": torch.stack(dec_input_list) if len(dec_input_list) > 0 else None,
+ "dec_input_mask": torch.stack(dec_input_mask_list) if len(dec_input_mask_list) > 0 else None,
+ "dec_labels": torch.stack(dec_labels_list) if len(dec_labels_list) > 0 else None,
+ "dec_labels_mask": torch.stack(dec_labels_mask_list) if len(dec_labels_mask_list) > 0 else None,
+ "speech_mask": torch.stack(speech_mask_list) if len(speech_mask_list) > 0 else None,
+ "context_and_question_tokens_lens": context_and_question_tokens_len,
+ "cross_attention_prior": (
+ torch.stack(cross_attention_prior_list) if len(cross_attention_prior_list) > 0 else None
+ ),
+ }
+
+ return data_dict
diff --git a/nemo/collections/tts/g2p/models/ctc.py b/nemo/collections/tts/g2p/models/ctc.py
index 2e180e766211..1859b09594ff 100644
--- a/nemo/collections/tts/g2p/models/ctc.py
+++ b/nemo/collections/tts/g2p/models/ctc.py
@@ -19,8 +19,8 @@
import torch
from hydra.utils import instantiate
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, ListConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from transformers import AutoConfig, AutoModel, AutoTokenizer
from nemo.collections.tts.g2p.data.ctc import CTCG2PBPEDataset
@@ -101,11 +101,21 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
self.decoding = CTCBPEDecoding(self.cfg.decoding, tokenizer=self.tokenizer)
- self.wer = WER(decoding=self.decoding, use_cer=False, log_prediction=False, dist_sync_on_step=True,)
- self.per = WER(decoding=self.decoding, use_cer=True, log_prediction=False, dist_sync_on_step=True,)
+ self.wer = WER(
+ decoding=self.decoding,
+ use_cer=False,
+ log_prediction=False,
+ dist_sync_on_step=True,
+ )
+ self.per = WER(
+ decoding=self.decoding,
+ use_cer=True,
+ log_prediction=False,
+ dist_sync_on_step=True,
+ )
def setup_grapheme_tokenizer(self, cfg):
- """ Initialized grapheme tokenizer """
+ """Initialized grapheme tokenizer"""
if self.mode == "byt5":
# Load appropriate tokenizer from HuggingFace
@@ -315,7 +325,10 @@ def _setup_infer_dataloader(self, cfg: DictConfig) -> 'torch.utils.data.DataLoad
)
@torch.no_grad()
- def _infer(self, config: DictConfig,) -> List[int]:
+ def _infer(
+ self,
+ config: DictConfig,
+ ) -> List[int]:
"""
Runs model inference.
diff --git a/nemo/collections/tts/g2p/models/heteronym_classification.py b/nemo/collections/tts/g2p/models/heteronym_classification.py
index 54b9a8b07413..47d08eb16e17 100644
--- a/nemo/collections/tts/g2p/models/heteronym_classification.py
+++ b/nemo/collections/tts/g2p/models/heteronym_classification.py
@@ -19,8 +19,8 @@
import torch
from hydra.utils import instantiate
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.common.losses import CrossEntropyLoss
from nemo.collections.nlp.metrics.classification_report import ClassificationReport
@@ -113,9 +113,9 @@ def make_step(self, batch):
def training_step(self, batch, batch_idx):
"""
- Lightning calls this inside the training loop with the data from the training dataloader
- passed in as `batch`.
- """
+ Lightning calls this inside the training loop with the data from the training dataloader
+ passed in as `batch`.
+ """
loss, logits = self.make_step(batch)
self.log('train_loss', loss)
@@ -267,7 +267,11 @@ def disambiguate(
item = {"text_graphemes": cur_sentence, "start_end": cur_start_ends, "heteronym_span": cur_heteronyms}
f.write(json.dumps(item, ensure_ascii=False) + '\n')
- all_preds = self._disambiguate(manifest=tmp_manifest, batch_size=batch_size, num_workers=num_workers,)
+ all_preds = self._disambiguate(
+ manifest=tmp_manifest,
+ batch_size=batch_size,
+ num_workers=num_workers,
+ )
if wordid_to_phonemes_file is not None:
self.set_wordid_to_phonemes(wordid_to_phonemes_file)
diff --git a/nemo/collections/tts/g2p/models/t5.py b/nemo/collections/tts/g2p/models/t5.py
index 19f976081687..4c673b18dc4a 100644
--- a/nemo/collections/tts/g2p/models/t5.py
+++ b/nemo/collections/tts/g2p/models/t5.py
@@ -17,8 +17,8 @@
import torch
from hydra.utils import instantiate
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
from transformers import AutoTokenizer, T5ForConditionalGeneration
from nemo.collections.asr.metrics.wer import word_error_rate
diff --git a/nemo/collections/tts/g2p/models/zh_cn_pinyin.py b/nemo/collections/tts/g2p/models/zh_cn_pinyin.py
index 985897d8df3f..2fe0ac3f6077 100644
--- a/nemo/collections/tts/g2p/models/zh_cn_pinyin.py
+++ b/nemo/collections/tts/g2p/models/zh_cn_pinyin.py
@@ -93,7 +93,7 @@ def __init__(
self.ascii_letter_dict = {
x: ascii_letter_prefix + x for x in get_grapheme_character_set(locale="en-US", case=ascii_letter_case)
}
- self.ascii_letter_list = sorted(self.ascii_letter_dict)
+ self.ascii_letter_list = sorted(self.ascii_letter_dict.values())
self.ascii_letter_case = ascii_letter_case
if apply_to_oov_word is None:
@@ -181,6 +181,7 @@ def __call__(self, text: str) -> List[str]:
`['wo3', 'jin1', 'tian1', 'qu4', 'le5', 'A', 'p', 'p', 'l', 'e', ' ', 'S', 't', 'o', 'r', 'e', ',', ' ', 'mai3',
'le5', 'yi2', 'ge4', 'i', 'P', 'h', 'o', 'n', 'e', 'γ']`
"""
+ err = False
text = set_grapheme_case(text, case=self.ascii_letter_case)
pinyin_seq = []
@@ -201,7 +202,15 @@ def __call__(self, text: str) -> List[str]:
tone_hyp = pinyin[-1]
if tone_hyp in self.tone_dict:
syllable = pinyin[:-1]
- assert syllable in self.phoneme_dict, f"Syllable <{syllable}> does not exist in the dictionary."
+ # TODO: skipping the syllable that does not exist in the dictionary will lead to deletion errors in the
+ # synthesized speech. Even though this case is uncommon, it should be fixed in future.
+ if syllable not in self.phoneme_dict:
+ err = True
+ logging.error(
+ f"Syllable <{syllable}> does not exist in the dictionary. You should expect symbol "
+ f"deletion risks!!"
+ )
+ continue
phoneme_seq += self.phoneme_dict[syllable]
phoneme_seq.append(self.tone_dict[tone_hyp])
# All pinyin would end up with a number in 1-5, which represents tones of the pinyin.
@@ -211,4 +220,6 @@ def __call__(self, text: str) -> List[str]:
phoneme_seq.append(self.ascii_letter_dict[tone_hyp])
else:
phoneme_seq.append(pinyin)
+ if err:
+ logging.error(f"|{text}| contained unknown syllables")
return phoneme_seq
diff --git a/nemo/collections/tts/models/aligner.py b/nemo/collections/tts/models/aligner.py
index d8e65d6e6821..5fea8615f7f2 100644
--- a/nemo/collections/tts/models/aligner.py
+++ b/nemo/collections/tts/models/aligner.py
@@ -18,9 +18,9 @@
import omegaconf
import torch
from hydra.utils import instantiate
+from lightning.pytorch import Trainer
+from lightning.pytorch.loggers import WandbLogger
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
-from pytorch_lightning.loggers import WandbLogger
from torch import nn
from nemo.collections.tts.losses.aligner_loss import BinLoss, ForwardSumLoss
diff --git a/nemo/collections/tts/models/audio_codec.py b/nemo/collections/tts/models/audio_codec.py
index 0c5e41157613..230a24e36cb0 100644
--- a/nemo/collections/tts/models/audio_codec.py
+++ b/nemo/collections/tts/models/audio_codec.py
@@ -21,8 +21,8 @@
import torch.nn.functional as F
from einops import rearrange
from hydra.utils import instantiate
+from lightning.pytorch import Trainer
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
from nemo.collections.tts.losses.audio_codec_loss import (
FeatureMatchingLoss,
diff --git a/nemo/collections/tts/models/fastpitch.py b/nemo/collections/tts/models/fastpitch.py
index b1e702c89124..34213303abf4 100644
--- a/nemo/collections/tts/models/fastpitch.py
+++ b/nemo/collections/tts/models/fastpitch.py
@@ -18,9 +18,9 @@
import torch
from hydra.utils import instantiate
+from lightning.pytorch import Trainer
+from lightning.pytorch.loggers import TensorBoardLogger
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning import Trainer
-from pytorch_lightning.loggers import TensorBoardLogger
from nemo.collections.common.parts.preprocessing import parsers
from nemo.collections.tts.losses.aligner_loss import BinLoss, ForwardSumLoss
diff --git a/nemo/collections/tts/models/fastpitch_ssl.py b/nemo/collections/tts/models/fastpitch_ssl.py
index fe743edf8783..f2384c41c5b5 100644
--- a/nemo/collections/tts/models/fastpitch_ssl.py
+++ b/nemo/collections/tts/models/fastpitch_ssl.py
@@ -16,9 +16,9 @@
import torch
from hydra.utils import instantiate
+from lightning.pytorch import Trainer
+from lightning.pytorch.loggers import TensorBoardLogger
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
-from pytorch_lightning.loggers import TensorBoardLogger
from nemo.collections.tts.losses.fastpitchloss import DurationLoss, MelLoss, PitchLoss
from nemo.collections.tts.modules.fastpitch import FastPitchSSLModule, average_features
@@ -34,7 +34,7 @@
class FastPitchModel_SSL(ModelPT):
"""
FastPitch based model that can synthesize mel spectrograms from content and speaker embeddings
- obtained from SSLDisentangler. This model can be used for voice conversion by swapping the speaker embedding
+ obtained from SSLDisentangler. This model can be used for voice conversion by swapping the speaker embedding
of a given source utterance, with the speaker embedding of a target speaker.
"""
@@ -133,9 +133,21 @@ def tb_logger(self):
return self._tb_logger
def forward(
- self, *, enc_out=None, enc_mask=None, durs=None, pitch=None, pace=1.0,
+ self,
+ *,
+ enc_out=None,
+ enc_mask=None,
+ durs=None,
+ pitch=None,
+ pace=1.0,
):
- return self.fastpitch(enc_out=enc_out, enc_mask=enc_mask, durs=durs, pitch=pitch, pace=pace,)
+ return self.fastpitch(
+ enc_out=enc_out,
+ enc_mask=enc_mask,
+ durs=durs,
+ pitch=pitch,
+ pace=pace,
+ )
def compute_encoding(self, content_embedding, speaker_embedding, dataset_id=None):
# content embedding is (B, C, T)
@@ -177,7 +189,11 @@ def training_step(self, batch, batch_idx):
enc_mask = enc_mask[:, :, None]
mels_pred, _, _, log_durs_pred, pitch_pred, pitch = self(
- enc_out=enc_out, enc_mask=enc_mask, durs=durs, pitch=pitch, pace=1.0,
+ enc_out=enc_out,
+ enc_mask=enc_mask,
+ durs=durs,
+ pitch=pitch,
+ pace=1.0,
)
loss = 0
@@ -208,7 +224,10 @@ def training_step(self, batch, batch_idx):
)
spec_predict = mels_pred[0].data.cpu().float().numpy()
self.tb_logger.add_image(
- "train_mel_predicted", plot_spectrogram_to_numpy(spec_predict), self.global_step, dataformats="HWC",
+ "train_mel_predicted",
+ plot_spectrogram_to_numpy(spec_predict),
+ self.global_step,
+ dataformats="HWC",
)
return loss
@@ -286,7 +305,10 @@ def on_validation_epoch_end(self, outputs):
)
spec_predict = spec_predict[_rand_idx].data.cpu().float().numpy()
self.tb_logger.add_image(
- "val_mel_predicted", plot_spectrogram_to_numpy(spec_predict), self.global_step, dataformats="HWC",
+ "val_mel_predicted",
+ plot_spectrogram_to_numpy(spec_predict),
+ self.global_step,
+ dataformats="HWC",
)
if self.pitch_conditioning:
@@ -321,10 +343,10 @@ def generate_wav(
):
"""
Args:
- content_embedding : Content embedding from SSL backbone (B, C, T)
+ content_embedding : Content embedding from SSL backbone (B, C, T)
speaker_embedding : Speaker embedding from SSL backbone (B, C)
pitch_contour : Normalized Pitch contour derived from the mel spectrogram
- encoded_len: Length of each content embedding, optional if batch size is 1.
+ encoded_len: Length of each content embedding, optional if batch size is 1.
compute_pitch: if true, predict pitch contour from content and speaker embedding.
compute_duration: if true, predict duration from content and speaker embedding.
durs_gt: Ground truth duration of each content embedding, ignored if compute_duration is True.
diff --git a/nemo/collections/tts/models/hifigan.py b/nemo/collections/tts/models/hifigan.py
index 7a9a6d30671f..1a5462349c4d 100644
--- a/nemo/collections/tts/models/hifigan.py
+++ b/nemo/collections/tts/models/hifigan.py
@@ -18,8 +18,8 @@
import torch
import torch.nn.functional as F
from hydra.utils import instantiate
+from lightning.pytorch.loggers.wandb import WandbLogger
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning.loggers.wandb import WandbLogger
from nemo.collections.tts.losses.hifigan_losses import DiscriminatorLoss, FeatureMatchingLoss, GeneratorLoss
from nemo.collections.tts.models.base import Vocoder
@@ -313,7 +313,7 @@ def stft(x):
comp = torch.stft(x.squeeze(1), n_fft=1024, hop_length=256, win_length=1024, return_complex=True)
comp = torch.view_as_real(comp)
real, imag = comp[..., 0], comp[..., 1]
- mags = torch.sqrt(real ** 2 + imag ** 2)
+ mags = torch.sqrt(real**2 + imag**2)
phase = torch.atan2(imag, real)
return mags, phase
diff --git a/nemo/collections/tts/models/mixer_tts.py b/nemo/collections/tts/models/mixer_tts.py
index c260df22e3c0..58b7f6f9706b 100644
--- a/nemo/collections/tts/models/mixer_tts.py
+++ b/nemo/collections/tts/models/mixer_tts.py
@@ -20,9 +20,9 @@
import transformers
import wandb
from hydra.utils import instantiate
+from lightning.pytorch import Trainer
+from lightning.pytorch.loggers import WandbLogger
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
-from pytorch_lightning.loggers import WandbLogger
from torch import nn
from torch.nn import functional as F
from transformers import AlbertTokenizer
diff --git a/nemo/collections/tts/models/radtts.py b/nemo/collections/tts/models/radtts.py
index 82f85d1ed6a2..3f04f2ca3908 100644
--- a/nemo/collections/tts/models/radtts.py
+++ b/nemo/collections/tts/models/radtts.py
@@ -15,9 +15,9 @@
import torch
from hydra.utils import instantiate
+from lightning.pytorch import Trainer
+from lightning.pytorch.loggers import TensorBoardLogger
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
-from pytorch_lightning.loggers import TensorBoardLogger
from nemo.collections.common.tokenizers.text_to_speech.tts_tokenizers import BaseTokenizer
from nemo.collections.tts.losses.radttsloss import AttentionBinarizationLoss, RADTTSLoss
diff --git a/nemo/collections/tts/models/spectrogram_enhancer.py b/nemo/collections/tts/models/spectrogram_enhancer.py
index 65934d9a10ce..3644a77eb6fe 100644
--- a/nemo/collections/tts/models/spectrogram_enhancer.py
+++ b/nemo/collections/tts/models/spectrogram_enhancer.py
@@ -43,9 +43,9 @@
import torch.nn.functional as F
from einops import rearrange
from hydra.utils import instantiate
+from lightning.pytorch import Trainer
+from lightning.pytorch.loggers import TensorBoardLogger, WandbLogger
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
-from pytorch_lightning.loggers import TensorBoardLogger, WandbLogger
from torch.utils.tensorboard.writer import SummaryWriter
from nemo.collections.common.parts.utils import mask_sequence_tensor
diff --git a/nemo/collections/tts/models/speechllm/__init__.py b/nemo/collections/tts/models/speechllm/__init__.py
new file mode 100644
index 000000000000..9df65818d226
--- /dev/null
+++ b/nemo/collections/tts/models/speechllm/__init__.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2024, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/nemo/collections/tts/models/speechllm/megatron_base_speechllm_prompt_model.py b/nemo/collections/tts/models/speechllm/megatron_base_speechllm_prompt_model.py
new file mode 100644
index 000000000000..658ace21726f
--- /dev/null
+++ b/nemo/collections/tts/models/speechllm/megatron_base_speechllm_prompt_model.py
@@ -0,0 +1,444 @@
+# Copyright (c) 2024, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+
+import torch
+from lightning.pytorch.trainer.trainer import Trainer
+from omegaconf.dictconfig import DictConfig
+from torch import Tensor
+
+from nemo.collections.common.tokenizers.sentencepiece_tokenizer import SentencePieceTokenizer
+from nemo.collections.nlp.metrics.prompt_learning_metrics import AccuracyScore, BLEUScore, ROUGEScores
+from nemo.collections.nlp.models.language_modeling.megatron_base_model import MegatronBaseModel
+from nemo.collections.nlp.modules.common import (
+ PromptEncoder,
+ PromptEncoderType,
+ VirtualPromptPlaceholderToken,
+ VirtualPromptSource,
+ VirtualPromptStyle,
+)
+from nemo.collections.nlp.modules.common.transformer.text_generation import TextGeneration
+from nemo.collections.nlp.parts import utils_funcs
+from nemo.utils import AppState
+
+try:
+ from apex.transformer.pipeline_parallel.utils import _reconfigure_microbatch_calculator
+
+ HAVE_APEX = True
+
+except (ImportError, ModuleNotFoundError):
+ HAVE_APEX = False
+
+try:
+ from megatron.core import parallel_state
+
+ HAVE_MEGATRON_CORE = True
+
+except (ImportError, ModuleNotFoundError):
+
+ HAVE_MEGATRON_CORE = False
+
+
+__all__ = ['MegatronBaseSpeechLM']
+
+
+class MegatronBaseSpeechLM(MegatronBaseModel, TextGeneration):
+ """
+ Model class for prompt-tuning or p-tuning a pretrained Megatron model.
+
+ Prompt Tuning initalizes virtual prompt embeddings directly from a copy of
+ certain token embeddings from the the pretrained model's vocabulary
+ and directly tunes these embedding weights. The token embeddings used in
+ initalization are specified by the user in the config file. The model can
+ be prompt-tuned for multiple tasks at once. virtual prompts are stored in a
+ prompt table and can be added or deleted without disrupting virtual prompts
+ for other tasks.
+
+ P-tuning initializes an LSTM encoder model that generates virtual prompt
+ embeddings for every task. Each task shares the same encoder. After ptuning
+ is compelete, the learned virtual prompts can be saved to the prompt table
+ using add_ptuned_prompts_to_prompt_table(). Thus, if a user wants to add a
+ new virtual prompt via p-tuning, they do not need to retrain on all previous
+ tasks. This gives p-tuning the same task flexiblity as prompt-tuning.
+ """
+
+ def __init__(self, cfg: DictConfig, trainer: Trainer):
+ super().__init__(cfg, trainer)
+ self.init_model(cfg, trainer)
+ self.config = self.model_parallel_config
+
+ def init_model(self, cfg: DictConfig, trainer: Trainer):
+ self.cfg = cfg
+
+ self.load_frozen_model(cfg, trainer)
+ self.prompt_encoder = None
+ self.tokenizer = self.frozen_model.tokenizer
+
+ if hasattr(self.frozen_model.cfg, "encoder") and hasattr(self.frozen_model.cfg, "decoder"):
+ self.hidden_size = (
+ self.frozen_model.cfg.encoder.hidden_size
+ ) # Encoder and decoder need to have the same hidden size and we check for this in the frozen enc-dec model.
+ else:
+ self.hidden_size = self.frozen_model.cfg.hidden_size
+
+ self.existing_tasks = list(self.cfg.get('existing_tasks', []))
+ self.new_tasks = list(self.cfg.get('new_tasks', []))
+ self.virtual_prompt_style = VirtualPromptStyle(cfg.virtual_prompt_style)
+
+ # Load templates for assigning virtual prompt token positions
+ self.load_task_templates(self.cfg.task_templates)
+
+ if self.first_stage_of_pipeline() and self.virtual_prompt_style in [
+ VirtualPromptStyle.P_TUNING,
+ ]:
+ # TODO: Handle this when moving GPT prompt learning to the base class.
+ self.word_embeddings = self.frozen_model.enc_dec_model.encoder_embedding.word_embeddings
+
+ # P-Tuning uses an LSTM Encoder to produce virtual token embeddings
+ if self.virtual_prompt_style == VirtualPromptStyle.P_TUNING:
+ self.virtual_prompt_source = VirtualPromptSource.PROMPT_ENCODER
+ elif self.virtual_prompt_style == VirtualPromptStyle.NO_PROMPT:
+ self.virtual_prompt_source = VirtualPromptSource.NO_PROMPT
+ else:
+ raise ValueError(f"\nvirtual prompt style '{cfg.virtual_prompt_style}'")
+
+ self._reduced_loss_buffer = []
+ self._inference_config = None
+
+ # Prepare pseudo token ids for virtual/virtual prompt tokens
+ self.pseudo_tokens = get_pseudo_tokens(self.max_virtual_tokens)
+ if isinstance(self.tokenizer, SentencePieceTokenizer):
+ self.tokenizer.add_special_tokens(self.pseudo_tokens)
+ else:
+ self.tokenizer.add_special_tokens({'additional_special_tokens': self.pseudo_tokens})
+ self.pseudo_token_ids = self.tokenizer.tokens_to_ids(self.pseudo_tokens)
+ self.pseudo_token_ids_start = self.pseudo_token_ids[0] if self.pseudo_token_ids else None
+ self.pad_token_id = self.tokenizer.pad_id if self.tokenizer.pad_id is not None else self.tokenizer.unk_id
+ self.decoder_seq_length = cfg.get('decoder_seq_length', 40)
+
+ self.autocast_dtype = utils_funcs.torch_dtype_from_precision(self.cfg.precision) # Mixed precision datatype
+ # make sure the default pytorch lightning gradient clipping in the basemodel
+ self.grad_clip_pl_default = True
+ self.lowest_val_loss = None
+ self.prompt_encoder = None
+
+ self.enable_autocast = not self.megatron_amp_O2 and self.autocast_dtype in [torch.float16, torch.bfloat16]
+
+ # define validation metric
+ if self.cfg.get('report_validation_metric', False):
+ validation_metric = self.cfg.get('validation_metric', 'accuracy')
+ if validation_metric == 'accuracy':
+ self.validation_metric = AccuracyScore()
+ elif validation_metric == 'bleu':
+ self.validation_metric = BLEUScore()
+ elif validation_metric == 'rouge':
+ self.validation_metric = ROUGEScores()
+
+ def load_task_templates(self, task_templates):
+ """
+ Takes in the task template portion of the config and turns
+ it into a table where each task's prompt template and
+ the number of virtual tokens to insert in a given part of
+ the prompt template are specified.
+ """
+ self.task_templates = {}
+ self.task_id_num_to_name = {}
+ self.max_virtual_tokens = 0
+
+ task_id_num = 0
+ for task in task_templates:
+ self.task_templates[task.taskname] = {
+ "prompt_template": task.prompt_template,
+ "prompt_template_fields": re.findall("\{(.*?)\}", task.prompt_template),
+ "answer_only_loss": task.get("answer_only_loss", False),
+ "answer_field": task.get("answer_field", None),
+ "truncate_field": task.truncate_field,
+ "total_virtual_tokens": task.total_virtual_tokens,
+ "virtual_token_splits": task.virtual_token_splits,
+ "task_id_num": task_id_num,
+ }
+
+ self.max_virtual_tokens = max(self.max_virtual_tokens, task.total_virtual_tokens)
+ self.task_id_num_to_name[task_id_num] = task.taskname
+ task_id_num += 1
+
+ # Check that all new tasks have the same total num virtual tokens
+ # Num virtual tokens for new tasks don't need to match num used for previously tuned tasks
+ if self.new_tasks:
+ new_task_name = self.new_tasks[0]
+ self.total_new_task_virtual_tokens = self.task_templates[new_task_name]["total_virtual_tokens"]
+
+ assert all(
+ self.task_templates[taskname]["total_virtual_tokens"] == self.total_new_task_virtual_tokens
+ for taskname in self.new_tasks
+ ), "Total virtual tokens for each task tuned simultaneously must match. If you want to use a different number of virtual tokens for different tasks, tune them separately."
+
+ def init_prompt_encoder(self):
+ """
+ Init the prompt encoder needed for p-tuning on a new task
+ """
+ # Total virtual tokens should be the same across all new tasks, so just need one
+ new_task = self.new_tasks[0]
+ total_virtual_tokens = self.task_templates[new_task]["total_virtual_tokens"]
+
+ encoder_type = PromptEncoderType(self.cfg.p_tuning.get("encoder_type", "tpmlp").lower())
+ self.prompt_encoder = PromptEncoder(
+ config=self.model_parallel_config,
+ encoder_type=encoder_type,
+ total_virtual_tokens=total_virtual_tokens,
+ token_dim=self.hidden_size,
+ hidden_size=self.cfg.p_tuning.get("encoder_hidden", self.hidden_size // 2),
+ lstm_dropout=self.cfg.p_tuning.get("dropout", 0.0),
+ num_layers=self.cfg.p_tuning.get("num_layers", 2),
+ init_std=self.cfg.p_tuning.get("init_std", 0.023),
+ taskname=new_task,
+ )
+
+ def freeze_existing_word_embeddings(self):
+ """Freeze params of existing virtual prompts that should not be tuned further"""
+ # Make sure word embeddings are frozen
+ for params in self.word_embeddings.parameters():
+ params.requires_grad = False
+
+ def state_dict(self):
+ """
+ Custom state dict that only contains prompt table and prompt encoder parameters.
+ No frozen model parameters are stored in the state dict. Prompt encoder parameters
+ are only in state dict for intermediate checkpoints saved during training. Final
+ nemo checkpoints at the end of training will contain prompt table parameters only.
+ """
+ state_dict_ = {}
+ state_dict_["frozen_model_enc_dec_model"] = self.frozen_model.enc_dec_model.state_dict()
+ state_dict_["word_embeddings"] = self.word_embeddings.state_dict()
+ if self.prompt_encoder is not None:
+ state_dict_["prompt_encoder"] = self.prompt_encoder.state_dict()
+
+ return state_dict_
+
+ def load_state_dict(self, state_dict, strict: bool = True):
+ """
+ Custom load state dict method that only loads prompt table and prompt encoder
+ parameters. Matching load method for this class' custom state dict method.
+ """
+ self.init_prompt_encoder()
+ self.frozen_model.enc_dec_model.load_state_dict(state_dict["frozen_model_enc_dec_model"], strict)
+ self.word_embeddings.load_state_dict(state_dict["word_embeddings"], strict)
+ if 'prompt_encoder' in state_dict:
+ self.prompt_encoder.load_state_dict(state_dict["prompt_encoder"], strict)
+
+ # Not sure why when we resume training the prompt encoder is on cpu
+ # Because it's not created on init - Should really be moved to init
+ self.prompt_encoder.to("cuda")
+
+ def embed_input(self, input_ids: Tensor, taskname_ids: Tensor, use_cached_reps: bool):
+ """
+ Replaces the virtual tokens in the input_ids with embeddings
+ calculated from either the 'prompt_table' or 'prompt_encoder'.
+ The virtual token placeholders have token_ids listed in
+ `self.pseudo_token_ids`.
+
+ params:
+ input_ids: the input token ids
+ taskname_ids: the NLP task tag token ids
+ returns:
+ the token embedding for the LM model.
+ """
+ # Replace virtual token ids with padding for forward pass through vocab embeddings
+ discrete_token_ids = input_ids.clone()
+ discrete_token_ids[(input_ids >= self.pseudo_token_ids_start)] = self.pad_token_id
+ discrete_token_embeds = self.word_embeddings(discrete_token_ids).clone()
+
+ # Find the indicies where virtual tokens should be inserted
+ virtual_token_locations = input_ids >= self.pseudo_token_ids_start
+
+ # If there are no virtual tokens, just return discrete token embeds
+ if not virtual_token_locations.any():
+ return discrete_token_embeds
+
+ if self.virtual_prompt_source == VirtualPromptSource.PROMPT_ENCODER:
+ # taskname_embeddings = self.word_embeddings(taskname_ids)
+ batch_size, _ = taskname_ids.size()
+ virtual_token_embeds = self.prompt_encoder(batch_size=batch_size, use_cached_reps=use_cached_reps)
+ else:
+ raise ValueError("invalid VirtualPromptSource.")
+
+ # Create index template specifying where virtual token embeddings should be placed
+ batch_size, _, embedding_size = discrete_token_embeds.shape
+ virtual_token_index = virtual_token_locations.nonzero().reshape((batch_size, -1, 2))[:, :, 1][:, :, None]
+ virtual_token_index = virtual_token_index.expand(
+ batch_size, self.total_new_task_virtual_tokens, embedding_size
+ )
+
+ # Make sure discrete_token_embeds and virtual_token_embeds share the same dtype
+ discrete_token_embeds = discrete_token_embeds.type(virtual_token_embeds.dtype)
+
+ # Insert virtual token embeddings where they belong amoung the discrete token embeddings
+ discrete_token_embeds.scatter_(1, virtual_token_index, virtual_token_embeds)
+ input_embeds = discrete_token_embeds
+
+ return input_embeds
+
+ def on_train_end(self):
+ # Save p-tuned prompts to prompt table for inference or future task training
+ self.save_to(save_path=self.cfg.nemo_path)
+
+ def setup(self, stage=None):
+ if stage == 'predict' and self.first_stage_of_pipeline():
+ return
+
+ self.setup_test_data()
+ if stage == 'test':
+ return
+
+ if self.first_stage_of_pipeline():
+ if self.virtual_prompt_style == VirtualPromptStyle.P_TUNING:
+ if self.prompt_encoder is None:
+ self.init_prompt_encoder()
+
+ self.setup_training_data()
+ self.setup_validation_data()
+
+ def setup_training_data(self, training_data_config=None):
+ if self.cfg.data.get('train_ds', None):
+ self._train_ds, self._train_dl = self.build_virtual_prompt_dataset(
+ dataset_paths=self.cfg.data.train_ds,
+ batch_size=self.cfg.global_batch_size,
+ for_train=True,
+ drop_last=True,
+ shuffle=True,
+ num_workers=self.cfg.data.num_workers,
+ pin_memory=True,
+ )
+ elif self.cfg.data.get('train_manifest', None):
+ self._train_ds, self._train_dl = self.build_virtual_prompt_tarred_dataset(
+ dataset_paths=self.cfg.data.train_manifest,
+ audio_path=self.cfg.data.train_audio_path,
+ batch_size=self.cfg.global_batch_size,
+ for_train=True,
+ drop_last=True,
+ shuffle=self.cfg.data.shuffle,
+ num_workers=self.cfg.data.num_workers,
+ pin_memory=True,
+ )
+
+ def setup_validation_data(self, validation_data_config=None):
+ if self.cfg.data.get('validation_ds', None):
+ self._validation_ds, self._validation_dl = self.build_virtual_prompt_dataset(
+ dataset_paths=self.cfg.data.validation_ds,
+ batch_size=self.cfg.get("validation_global_batch_size", self.cfg.global_batch_size),
+ for_train=True,
+ drop_last=self.cfg.get("validation_drop_last", True),
+ shuffle=False,
+ num_workers=self.cfg.data.num_workers,
+ pin_memory=True,
+ )
+ elif self.cfg.data.get('validation_manifest', None):
+ self._validation_ds, self._validation_dl = self.build_virtual_prompt_tarred_dataset(
+ dataset_paths=self.cfg.data.validation_manifest,
+ audio_path=self.cfg.data.validation_audio_path,
+ batch_size=self.cfg.get("validation_global_batch_size", self.cfg.global_batch_size),
+ for_train=True,
+ drop_last=self.cfg.get("validation_drop_last", True),
+ shuffle=0,
+ num_workers=self.cfg.data.num_workers,
+ pin_memory=True,
+ )
+
+ def setup_test_data(self, test_data_config=None):
+ if self.cfg.data.get('test_ds', None):
+ self._test_ds, self._test_dl = self.build_virtual_prompt_dataset(
+ dataset_paths=self.cfg.data.test_ds,
+ batch_size=self.cfg.get("validation_global_batch_size", self.cfg.global_batch_size),
+ for_train=False,
+ drop_last=False,
+ shuffle=False,
+ num_workers=self.cfg.data.num_workers,
+ pin_memory=True,
+ )
+ elif self.cfg.data.get('test_manifest', None):
+ self._test_ds, self._test_dl = self.build_virtual_prompt_tarred_dataset(
+ dataset_paths=self.cfg.data.test_manifest,
+ audio_path=self.cfg.data.test_audio_path,
+ batch_size=self.cfg.global_batch_size,
+ for_train=False,
+ drop_last=False,
+ shuffle=0,
+ num_workers=self.cfg.data.num_workers,
+ pin_memory=True,
+ )
+
+ def _reconfigure_and_process_inference_batch(self, global_batch_size_per_gpu, gbs):
+ # This should happen only on the last batch of the dataset.
+ if global_batch_size_per_gpu != gbs // parallel_state.get_data_parallel_world_size():
+ # NOTE: This is reconfiguring to make sure there is no grad-acc for validation batches.
+ app_state = AppState()
+ _reconfigure_microbatch_calculator(
+ rank=app_state.global_rank,
+ rampup_batch_size=None,
+ global_batch_size=global_batch_size_per_gpu * parallel_state.get_data_parallel_world_size(),
+ micro_batch_size=global_batch_size_per_gpu,
+ data_parallel_size=parallel_state.get_data_parallel_world_size(),
+ )
+
+ def _reconfigure_batch_sizes(self, gbs: int, mbs: int):
+ app_state = AppState()
+ _reconfigure_microbatch_calculator(
+ rank=app_state.global_rank,
+ rampup_batch_size=None,
+ global_batch_size=gbs,
+ micro_batch_size=mbs,
+ data_parallel_size=parallel_state.get_data_parallel_world_size(),
+ )
+
+ def set_inference_config(self, inference_config):
+ self._inference_config = inference_config
+
+ def get_inference_config(self):
+ return self._inference_config
+
+ def set_input_tensor(self, input_tensor):
+ pass
+
+ def first_stage_of_pipeline(self):
+ pass
+
+ @classmethod
+ def list_available_models(cls):
+ pass
+
+ def load_frozen_model(self, cfg, trainer):
+ pass
+
+
+def get_pseudo_tokens(num_virtual_tokens):
+ """
+ Takes in an integer and returns a list of strings where each string
+ is a numbered virtual token placeholder. If
+ num_virtual_tokens = 3, then this function returns:
+
+ ["", "", ""]
+
+ Args:
+ num_virtual_tokens: (int) Number of virtual token strings you want to make
+
+ returns a list of string.
+
+ """
+ pseudo_tokens = [
+ VirtualPromptPlaceholderToken.BASE.value + str(i) + VirtualPromptPlaceholderToken.END.value
+ for i in range(num_virtual_tokens)
+ ]
+
+ return pseudo_tokens
diff --git a/nemo/collections/tts/models/speechllm/megatron_t5_speechllm_model.py b/nemo/collections/tts/models/speechllm/megatron_t5_speechllm_model.py
new file mode 100644
index 000000000000..d35d53b3cac7
--- /dev/null
+++ b/nemo/collections/tts/models/speechllm/megatron_t5_speechllm_model.py
@@ -0,0 +1,2672 @@
+# Copyright (c) 2024, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+import json
+import os
+import random
+import string
+from functools import partial
+from typing import Any, List
+
+import editdistance
+import imageio
+import numpy as np
+import soundfile as sf
+import torch
+from lightning.pytorch.trainer.trainer import Trainer
+from omegaconf import OmegaConf
+from omegaconf.dictconfig import DictConfig
+from omegaconf.omegaconf import open_dict
+
+import nemo.collections.asr as nemo_asr
+from nemo.collections.asr.metrics.wer import word_error_rate
+from nemo.collections.common.tokenizers.sentencepiece_tokenizer import SentencePieceSpeechLLMTTSTokenizer
+from nemo.collections.nlp.models.language_modeling.megatron_t5_model import MegatronT5Model
+from nemo.collections.nlp.models.language_modeling.megatron_t5_sft_model import MegatronT5SFTModel
+from nemo.collections.nlp.modules.common.megatron.token_level_encoder_decoder import (
+ MegatronTokenLevelEncoderDecoderSpeechLLMModule,
+)
+from nemo.collections.nlp.modules.common.megatron.utils import (
+ average_losses_across_data_parallel_group,
+ get_iterator_k_split,
+ init_method_normal,
+)
+from nemo.collections.nlp.parts.nlp_overrides import NLPSaveRestoreConnector
+from nemo.collections.nlp.parts.utils_funcs import get_last_rank
+from nemo.collections.tts.data.speechllm.t5_speechllm_dataset import Lang, T5SpeechLMDataset
+from nemo.collections.tts.data.speechllm.t5_speechllm_tarred_dataset import T5SpeechLMTarredDataset
+from nemo.collections.tts.losses.aligner_loss import ForwardSumLoss
+from nemo.collections.tts.models import AudioCodecModel
+from nemo.collections.tts.models.speechllm.megatron_base_speechllm_prompt_model import MegatronBaseSpeechLM
+from nemo.collections.tts.parts.utils.helpers import plot_alignment_to_numpy_for_speechllm, plot_codec_to_numpy
+from nemo.utils import AppState, logging
+
+try:
+ from apex.transformer.pipeline_parallel.utils import get_micro_batch_size, get_num_microbatches
+
+ HAVE_APEX = True
+
+except (ImportError, ModuleNotFoundError):
+
+ HAVE_APEX = False
+
+try:
+ from megatron.core import parallel_state, tensor_parallel
+ from megatron.core.enums import ModelType
+ from megatron.core.pipeline_parallel.schedules import get_forward_backward_func
+
+ HAVE_MEGATRON_CORE = True
+
+except (ImportError, ModuleNotFoundError):
+
+ HAVE_MEGATRON_CORE = False
+
+
+import time
+
+import librosa
+from torchaudio.pipelines import SQUIM_SUBJECTIVE
+from transformers import Wav2Vec2FeatureExtractor, WavLMForXVector
+
+__all__ = ['MegatronT5SpeechLMModel']
+
+
+class MegatronT5OverrideModel(MegatronT5Model):
+ def _build_tokenizer(self):
+ if self._cfg.tokenizer.library == "sentencepiece":
+ if hasattr(self._cfg.tokenizer, "sentencepiece_legacy"):
+ legacy = self._cfg.tokenizer.sentencepiece_legacy
+ else:
+ legacy = True if self._cfg.tokenizer.library == 'sentencepiece' else False
+ self.tokenizer = SentencePieceSpeechLLMTTSTokenizer(
+ model_path=self.register_artifact("tokenizer.model", self._cfg.tokenizer.get('model', None)),
+ legacy=legacy,
+ )
+
+ if self._cfg.tokenizer.get('additional_special_tokens', None) is not None:
+ tokens_list = OmegaConf.to_object(self._cfg.tokenizer.additional_special_tokens)
+ self.tokenizer.add_special_tokens(tokens_list)
+ else:
+ super()._build_tokenizer()
+
+ def model_provider_func(self, pre_process, post_process, add_encoder, add_decoder):
+ if not hasattr(self.cfg, 'encoder') or not hasattr(self.cfg, 'decoder'):
+ logging.warning(
+ 'Could not find encoder or decoder in config. This is probably because of restoring an old checkpoint. Copying shared model configs to encoder and decoder configs.'
+ )
+ # After the call below, self.cfg.encoder and self.cfg.decoder will be populated with the cfg.model configs from old checkpoints.
+ self._populate_encoder_decoder_configs_for_backward_compatibility(self.cfg)
+
+ if parallel_state.get_pipeline_model_parallel_world_size() > 1 and self.cfg.encoder.arch == 'perceiver':
+ raise ValueError(f"Perceivers with pipeline parallel > 1 is not supported yet.")
+
+ if not hasattr(self.cfg, 'embedding_init_method_std'):
+ embedding_init_method_std = self.cfg.encoder.init_method_std
+ else:
+ embedding_init_method_std = self.cfg.embedding_init_method_std
+
+ if not hasattr(self.cfg, 'embedding_dropout'):
+ embedding_dropout = self.cfg.encoder.hidden_dropout
+ else:
+ embedding_dropout = self.cfg.embedding_dropout
+
+ model = MegatronTokenLevelEncoderDecoderSpeechLLMModule(
+ config=self.model_parallel_config,
+ encoder_cfg=self.cfg.encoder,
+ decoder_cfg=self.cfg.decoder,
+ vocab_size=self.padded_vocab_size,
+ max_position_embeddings=self.cfg.max_position_embeddings,
+ num_tokentypes=0,
+ parallel_output=True,
+ pre_process=pre_process,
+ post_process=post_process,
+ fp16_cross_entropy=self.cfg.get('fp16_lm_cross_entropy', False),
+ precision=self.cfg.get('precision', 16),
+ embedding_init_method_std=embedding_init_method_std,
+ embedding_dropout=embedding_dropout,
+ label_smoothing=self.cfg.get('label_smoothing', 0.0),
+ add_encoder=add_encoder,
+ add_decoder=add_decoder,
+ share_token_embeddings=self.cfg.get('share_token_embeddings', True),
+ share_decoder_tokens_head_embeddings=self.cfg.get('share_decoder_tokens_head_embeddings', True),
+ tokens_head_bias=self.cfg.get('tokens_head_bias', True),
+ hiddens_cfg=self.cfg.get('hiddens', None),
+ )
+ return model
+
+
+class MegatronT5SpeechLMModel(MegatronBaseSpeechLM):
+ """
+ Model class for prompt-tuning or p-tuning a pretrained Megatron T5 model.
+
+ Prompt Tuning initializes virtual prompt embeddings directly from a copy of
+ certain token embeddings from the pretrained T5 model's vocabulary
+ and directly tunes these embedding weights. The token embeddings used in
+ initialization are specified by the user in the config file. The model can
+ be prompt-tuned for multiple tasks at once. Virtual prompts are stored in a
+ prompt table and can be added or deleted without disrupting virtual prompts
+ for other tasks.
+
+ P-tuning initializes an LSTM encoder model that generates virtual prompt
+ embeddings for every task. Each task shares the same encoder. After p-tuning
+ is complete, the learned virtual prompts can be saved to the prompt table
+ using add_ptuned_prompts_to_prompt_table(). Thus, if a user wants to add a
+ new virtual prompt via p-tuning, they do not need to retrain on all previous
+ tasks. This gives p-tuning the same task flexibility as prompt-tuning.
+ """
+
+ def __init__(self, cfg: DictConfig, trainer: Trainer):
+ super().__init__(cfg, trainer)
+ self.model_type = ModelType.encoder_and_decoder
+ speech_codebook_size = cfg.data.get('speech_codebook_size', 1024)
+ num_speech_codebooks = cfg.data.get('num_speech_codebooks', 8)
+ speech_offset = cfg.data.get('speech_offset', 30000)
+ codecmodel_type = cfg.get('codecmodel_type', 'nemo_codec')
+ attn_prior_scaledown_start_step = cfg.get('attn_prior_scaledown_start_step', 10000)
+ attn_prior_end_step = cfg.get('attn_prior_end_step', 11000)
+ num_cross_attention_heads = cfg.get('num_cross_attention_heads', 12)
+ self.lm_vocab_size = cfg.get('lm_vocab_size', 30000)
+ self.context_pattern = cfg.data.get('context_pattern', 'parallel')
+ self.context_conditioning = cfg.get('context_conditioning', "decoder")
+ self.context_duration_min = cfg.data.get('context_duration_min', 2.9)
+ self.context_duration_max = cfg.data.get('context_duration_max', 2.9)
+ self.codebook_fps = cfg.data.get('codebook_fps', 86)
+ self.decoder_context_len = 0
+ if self.context_conditioning == "decoder":
+ assert self.context_duration_min == self.context_duration_max, "Decoder context duration must be fixed"
+ self.decoder_context_len = int(self.codebook_fps * self.context_duration_min)
+
+ self.speech_offset = speech_offset
+ self.speech_codebook_size = speech_codebook_size
+ self.num_speech_codebooks = num_speech_codebooks
+ self.codecmodel_type = codecmodel_type
+ self.enc_output_to_layers = cfg.get('enc_output_to_layers', None)
+ if self.enc_output_to_layers is not None:
+ # Convert from listconfig to list
+ self.enc_output_to_layers = [[l for l in encoder_layer] for encoder_layer in self.enc_output_to_layers]
+
+ self.frozen_model.enc_dec_model.speech_offset = speech_offset
+ self.frozen_model.enc_dec_model.speech_codebook_size = speech_codebook_size
+ self.frozen_model.enc_dec_model.num_speech_codebooks = num_speech_codebooks
+ self.frozen_model.enc_dec_model.seq_pattern = cfg.get('seq_pattern', 'parallel')
+ self.frozen_model.enc_dec_model.attn_prior_scaledown_start_step = attn_prior_scaledown_start_step
+ self.frozen_model.enc_dec_model.attn_prior_end_step = attn_prior_end_step
+ self.frozen_model.enc_dec_model.alignment_decoder_layerids = cfg.get(
+ 'alignment_decoder_layerids', list(range(0, 12))
+ )
+ self.frozen_model.enc_dec_model.return_all_crossattention_probs = cfg.get(
+ 'return_all_crossattention_probs', False
+ )
+ self.frozen_model.enc_dec_model.num_cross_attention_heads = num_cross_attention_heads
+ self.frozen_model.enc_dec_model.context_conditioning = self.context_conditioning
+ self.frozen_model.enc_dec_model.decoder_context_len = self.decoder_context_len
+ self.frozen_model.enc_dec_model.enc_output_to_layers = self.enc_output_to_layers
+
+ self.alignment_loss_start_step = 0
+ self.alignment_loss_end_step = float('inf')
+ self.use_alignment_loss = cfg.get('use_alignment_loss', False)
+ if self.use_alignment_loss:
+ alignment_loss_scale = cfg.get('alignment_loss_scale', 1.0)
+ self.frozen_model.enc_dec_model.use_alignment_loss = True
+ self.frozen_model.enc_dec_model.forward_sum_loss = ForwardSumLoss(loss_scale=alignment_loss_scale)
+ self.frozen_model.enc_dec_model.alignment_text_end_offset = cfg.get('alignment_text_end_offset', 0)
+ self.frozen_model.enc_dec_model.align_every_n_head = cfg.get('align_every_n_head', 1)
+ self.alignment_loss_start_step = cfg.get('alignment_loss_start_step', 0)
+ self.alignment_loss_end_step = cfg.get('alignment_loss_end_step', float('inf'))
+
+ # Need to explicitly set this since it is already initialized
+ self.frozen_model.enc_dec_model.tokens_head.parallel_output = self.frozen_model.enc_dec_model.parallel_output
+
+ list_of_speech_heads = []
+ list_of_speech_tokens_embeddings = []
+ for _ in range(self.num_speech_codebooks - 1):
+ # init is NOT used since we overwrite the weight below anyways
+ _speech_head_embedding = tensor_parallel.VocabParallelEmbedding(
+ speech_codebook_size,
+ embedding_dim=self.word_embeddings.embedding_dim,
+ init_method=lambda x: x.data.fill_(0),
+ config=self.model_parallel_config,
+ )
+ _speech_head_embedding.weight.data.fill_(0)
+ _speech_head_embedding.shared = True
+ list_of_speech_tokens_embeddings.append(_speech_head_embedding)
+ # Linear layer that maps from hidden size to speech codebook size
+ hidden_size = self.frozen_model.enc_dec_model.decoder_cfg.hidden_size
+ init_method_std = self.frozen_model.enc_dec_model.decoder_cfg.init_method_std
+ # Changing to ColumnParallelLinear instead of Linear to support 3b Tensor Parallelism
+ _speech_head = tensor_parallel.ColumnParallelLinear(
+ input_size=hidden_size,
+ output_size=speech_codebook_size,
+ bias=True,
+ gather_output=not self.frozen_model.enc_dec_model.parallel_output,
+ init_method=init_method_normal(init_method_std),
+ config=self.model_parallel_config,
+ )
+ list_of_speech_heads.append(_speech_head)
+
+ self.frozen_model.enc_dec_model.speech_tokens_heads = torch.nn.ModuleList(list_of_speech_heads)
+ self.frozen_model.enc_dec_model.speech_tokens_embeddings = torch.nn.ModuleList(
+ list_of_speech_tokens_embeddings
+ )
+
+ self.sample_rate = 24000
+ if codecmodel_type == 'nemo_codec':
+ codec_model = AudioCodecModel.restore_from(cfg.get('codecmodel_path'))
+ codec_model.to('cuda')
+ codec_model.eval()
+ self.sample_rate = 22050
+ else:
+ raise NotImplementedError()
+
+ self.additional_models = {'codec': codec_model}
+ self.train_check_interval = self.cfg.get('train_check_interval', 500)
+ self.plot_alignments_sliced = self.cfg.get('plot_alignments_sliced', True)
+ app_state = AppState()
+ self.is_rank_zero = app_state.global_rank == 0
+ self.predict_step_outputs = []
+ self.phoneme_tokenizer = None
+
+ # classifier-free guidance (CFG) option during training. The probability (0.0 <= Ξ΅ <= 1.0) is used to trigger the action that the
+ # text or audio tokens in a batch are replaced by [UNK], such that mimicking the text- or audio-free scenario.
+ # If a random number is greater than Ξ΅, then keep text or audio tokens as-is, otherwise, the text or audio tokens are
+ # replaced by [UNK]. Default to 0.0, meaning CFG is disabled.
+ self.train_text_cfg_prob = cfg.get('train_text_cfg_prob', 0.0)
+ self.train_audio_cfg_prob = cfg.get('train_audio_cfg_prob', 0.0)
+ self._rng = random.Random()
+
+ # control the strength of the classifier guidance during inference, Logits_cfg = w*Logits_cond + (1-w)*Logits_uncond,
+ # equivalent to Logits_cfg = Logits_cond + alpha*(Logits_cond - Logits_uncond) where alpha=w-1.
+ # Default w to 1.O, indicating no interpolation is applied.
+ self.inference_cfg_interpolation_scale = cfg.get('inference_cfg_interpolation_scale', 1.0)
+ self.inference_apply_text_cfg = cfg.get('inference_apply_text_cfg', False)
+ self.inference_apply_audio_cfg = cfg.get('inference_apply_audio_cfg', False)
+ if self.inference_cfg_interpolation_scale == 1.0:
+ self.inference_apply_text_cfg = False
+ self.inference_apply_audio_cfg = False
+
+ # whether to apply cfg filter to address faster speech rate.
+ self.inference_apply_cfg_filter = cfg.get("inference_apply_cfg_filter", False)
+
+ # this scale is suggested to be smaller than `self.question_guidance_scale` and it is used to balance the weights
+ # between the conditioned logits after applying cfg filter and the original unconditioned logits. Default to 1.0,
+ # indicating only conditioned logits are used.
+ if not self.inference_apply_cfg_filter:
+ self.inference_cfg_filter_interpolation_scale = None
+ else:
+ self.inference_cfg_filter_interpolation_scale = cfg.get('inference_cfg_filter_interpolation_scale', 1.0)
+
+ # whether to estimate MOS in predict_step.
+ self.estimate_mos = cfg.get('estimate_mos', True)
+ if self.estimate_mos:
+ # requires to specify a non-matching high-quality and clean reference audio file. It is used to estimate MOS.
+ self.non_matching_ref_audio_filepath = cfg.get('non_matching_ref_audio_filepath', None)
+ if self.non_matching_ref_audio_filepath is None:
+ raise ValueError(
+ f"Please provide a high-quality reference audio to estimate the MOS. Alternatively, "
+ f"set `model.estimate_mos=False` to disable MOS estimation."
+ )
+ if not os.path.exists(self.non_matching_ref_audio_filepath):
+ raise FileNotFoundError(
+ f"Please provide a valid file path for a high-quality reference audio to estimate"
+ f" the MOS. Alternatively, set `model.estimate_mos=False` to disable MOS estimation."
+ )
+
+ def decode_wav_from_codec_model(self, codes):
+ codec_model = self.additional_models['codec']
+ if self.codecmodel_type == 'nemo_codec':
+ codec_len = torch.Tensor([codes.shape[1]]).long().cuda()
+ if codec_len < 10:
+ # return a one-second silence
+ return torch.zeros(24000).cuda()
+ wav, _ = codec_model.decode(tokens=codes.unsqueeze(0), tokens_len=codec_len)
+ wav = wav[0]
+ else:
+ raise NotImplementedError()
+ return wav
+
+ def first_stage_of_pipeline(self):
+ if self.frozen_model.enc_dec_model.pre_process and parallel_state.get_pipeline_model_parallel_rank() == 0:
+ return True
+ return False
+
+ def forward(
+ self,
+ virtual_tokens,
+ context_and_question_tokens,
+ enc_mask,
+ dec_input,
+ dec_mask,
+ position_ids,
+ taskname_ids,
+ labels=None,
+ speech_mask=None,
+ inference=False,
+ inference_step=0,
+ cross_attention_prior=None,
+ text_limits=None,
+ decoder_max_sequence_len=None,
+ encoder_max_sequence_len=None,
+ ):
+ """
+ Special forward method for p-tuning/prompt-tuning pretrained
+ T5 style models.
+ """
+ if isinstance(context_and_question_tokens, list):
+ multi_encoder = True
+ assert isinstance(enc_mask, list)
+ assert isinstance(position_ids, list)
+ if cross_attention_prior is None:
+ cross_attention_prior = [None for _ in range(len(context_and_question_tokens))]
+ assert isinstance(cross_attention_prior, list)
+ assert len(context_and_question_tokens) == len(enc_mask) == len(position_ids) == len(cross_attention_prior)
+ else:
+ multi_encoder = False
+ context_and_question_tokens = [context_and_question_tokens]
+ enc_mask = [enc_mask]
+ position_ids = [position_ids]
+ cross_attention_prior = [cross_attention_prior]
+
+ enc_output = None
+ logging.debug(
+ f"self.first_stage_of_pipeline()={self.first_stage_of_pipeline()}\tinference_step={inference_step}"
+ )
+ if self.first_stage_of_pipeline() and inference_step == 0:
+ # Get embeddings for text tokens and insert virtual token embeddings
+ encoder_input_list = []
+ for ei in range(len(context_and_question_tokens)):
+ input_embeds = self.get_embeddings_and_combine(
+ [virtual_tokens, context_and_question_tokens[ei]], taskname_ids, inference
+ )
+ # TODO: This check needs to be revisited with PP support.
+ if hasattr(self.frozen_model.enc_dec_model.encoder_embedding, 'position_embeddings'):
+ position_embeddings = self.frozen_model.enc_dec_model.encoder_embedding.position_embeddings(
+ position_ids[ei]
+ )
+ encoder_input = input_embeds + position_embeddings
+ else:
+ encoder_input = input_embeds
+ encoder_input_list.append(encoder_input)
+ else:
+ encoder_input_list = None
+ encoder_input = None
+ if inference_step != 0:
+ enc_output = context_and_question_tokens if multi_encoder else context_and_question_tokens[0]
+
+ # If the decoder input starts with instead of , which is the case for huggingface T5 models, we don't want to mask the first token.
+ # For NeMo-Megatron, the sequence starts with , which is never masked so we can always set index 0 to be unmasked.
+ dec_mask[:, 0] = 1
+
+ if not self.cfg.data.get('use_attention_prior', False):
+ cross_attention_prior = [None for _ in range(len(cross_attention_prior))]
+
+ _encoder_input = encoder_input_list
+ if not multi_encoder:
+ enc_mask = enc_mask[0]
+ cross_attention_prior = cross_attention_prior[0]
+ _encoder_input = encoder_input_list[0] if encoder_input_list is not None else None
+
+ # Call forward on T5 model with preprocessed embeddings
+ if inference and inference_step == 0:
+ set_inference_key_value_memory = True
+ else:
+ set_inference_key_value_memory = False
+
+ if self.autocast_dtype == torch.float32:
+ output, out_logits = self.frozen_model.enc_dec_model(
+ enc_input_ids=None,
+ enc_attn_mask=enc_mask,
+ dec_input_ids=dec_input,
+ dec_attn_mask=dec_mask,
+ token_type_ids=None,
+ labels=labels,
+ output_enc_hidden_only=False,
+ enc_input=_encoder_input,
+ enc_output=enc_output,
+ speech_mask=speech_mask,
+ cross_attention_prior=cross_attention_prior,
+ text_limits=text_limits,
+ global_step=self.global_step,
+ set_inference_key_value_memory=set_inference_key_value_memory,
+ decoder_max_sequence_len=decoder_max_sequence_len,
+ encoder_max_sequence_len=encoder_max_sequence_len,
+ )
+ else:
+ with torch.autocast(device_type="cuda", dtype=self.autocast_dtype):
+ output, out_logits = self.frozen_model.enc_dec_model(
+ enc_input_ids=None,
+ enc_attn_mask=enc_mask,
+ dec_input_ids=dec_input,
+ dec_attn_mask=dec_mask,
+ token_type_ids=None,
+ labels=labels,
+ output_enc_hidden_only=False,
+ enc_input=_encoder_input,
+ enc_output=enc_output,
+ speech_mask=speech_mask,
+ cross_attention_prior=cross_attention_prior,
+ text_limits=text_limits,
+ global_step=self.global_step,
+ set_inference_key_value_memory=set_inference_key_value_memory,
+ decoder_max_sequence_len=decoder_max_sequence_len,
+ encoder_max_sequence_len=encoder_max_sequence_len,
+ )
+
+ return output, encoder_input, out_logits
+
+ def load_frozen_model(self, cfg, trainer):
+ self.megatron_amp_O2 = cfg.get('megatron_amp_o2', False)
+
+ # TODO: Fix this once apex patches FusedScaledMaskedSoftmax.
+ # This is a workaround for the fact that `masked_softmax_fusion` has issues with certain input sizes that may be present while finetuning.
+ cfg_language_model_path = cfg.get('language_model_path', None)
+ cfg_frozen_model = cfg.get('frozen_model', None)
+ if not (bool(cfg_language_model_path) ^ bool(cfg_frozen_model)):
+ raise ValueError(
+ "T5-TTS requires either 'language_model_path' or 'frozen_model' in its config, but not both."
+ )
+
+ if cfg_language_model_path:
+ t5_cfg = MegatronT5Model.restore_from(cfg_language_model_path, trainer=trainer, return_config=True)
+ else:
+ t5_cfg = cfg_frozen_model
+
+ OmegaConf.set_struct(t5_cfg, True)
+ with open_dict(t5_cfg):
+ if hasattr(t5_cfg, 'encoder') and hasattr(t5_cfg, 'decoder'):
+ t5_cfg.encoder.masked_softmax_fusion = False
+ t5_cfg.decoder.masked_softmax_fusion = False
+ else:
+ t5_cfg.masked_softmax_fusion = False
+ t5_cfg.megatron_amp_O2 = self.megatron_amp_O2
+ # hack to make the _GLOBAL_NUM_MICROBATCHES_CALCULATOR initialize
+ t5_cfg.micro_batch_size = cfg.get('micro_batch_size', 4)
+ t5_cfg.global_batch_size = cfg.get('global_batch_size', 4)
+ t5_cfg.precision = trainer.precision
+ t5_cfg.tokenizer.num_sentinel_tokens = cfg.get('num_sentinel_tokens', 39184 - 29056)
+ t5_cfg.seq_length = cfg.data.max_seq_length
+ if cfg.get('max_position_embeddings', None) is None:
+ t5_cfg.max_position_embeddings = cfg.data.max_seq_length
+ else:
+ t5_cfg.max_position_embeddings = cfg.get('max_position_embeddings')
+ t5_cfg.use_flash_attention = cfg.get('use_flash_attention', False)
+ if cfg.get('override_token_model', None):
+ t5_cfg.tokenizer.model = cfg['override_token_model']
+ if cfg.get('override_tokenizer_vocab_file', None):
+ t5_cfg.tokenizer.vocab_file = cfg['override_tokenizer_vocab_file']
+
+ if cfg.get('train_from_scratch', False):
+ print("Training from scratch!")
+ # Defaults for 220m model
+ # To override any of these, add +model.override_= to the config file.
+ # Eg. +model.override_hidden_size=1024
+ overide_keys = [
+ 'hidden_size', # 768
+ 'num_layers', # 12
+ 'num_attention_heads', # 12
+ 'hidden_dropout', # 0.1
+ 'attention_dropout', # 0.1
+ 'kv_channels', # 64
+ 'ffn_hidden_size', # 2048
+ ]
+ # Defaults for 220m model
+ for k in overide_keys:
+ if cfg.get(f'override_{k}') is not None:
+ t5_cfg[k] = cfg.get(f'override_{k}')
+
+ self.frozen_model = MegatronT5OverrideModel(t5_cfg, trainer=trainer)
+ num_params = sum(p.numel() for p in self.frozen_model.parameters() if p.requires_grad)
+ print(f"Number of parameters: {num_params}")
+ else:
+ print(f"Loading from pretrained checkpoint: {cfg_language_model_path}")
+ if cfg_language_model_path is None:
+ raise ValueError(
+ "T5-TTS SFT on pretrained model checkpoint requires `langauge_model_path` in its config."
+ )
+
+ self.frozen_model = MegatronT5OverrideModel.restore_from(
+ cfg_language_model_path,
+ trainer=trainer,
+ override_config_path=t5_cfg,
+ save_restore_connector=NLPSaveRestoreConnector(),
+ )
+
+ if not cfg.get('english_only_model', False):
+ self.frozen_model.tokenizer.add_phone_tokens_to_special_tokens()
+
+ logging.info(f"self.frozen_model {self.frozen_model}")
+
+ def fwd_bwd_step(self, dataloader_iter, batch_idx, forward_only):
+ """
+ Dataloader produces a global batch which is turned into a list of microbatches.
+ The list of microbatches is then piped through the pipeline using megatron-core fwd/bwd functions.
+ """
+ # Get seq length of batch
+ batch = next(dataloader_iter)
+ _, seq_length = batch[0].shape
+ if batch[4].dim() > 2:
+ _, _, dec_seq_length = batch[4].shape
+ else:
+ _, dec_seq_length = batch[4].shape
+ data_iter = get_iterator_k_split(batch, get_num_microbatches())
+
+ fwd_bwd_function = get_forward_backward_func()
+
+ losses_reduced_per_micro_batch = fwd_bwd_function(
+ forward_step_func=self.get_forward_output_and_loss_func(forward_only),
+ data_iterator=data_iter,
+ model=[self],
+ num_microbatches=get_num_microbatches(),
+ forward_only=forward_only,
+ seq_length=seq_length,
+ micro_batch_size=get_micro_batch_size(),
+ decoder_seq_length=dec_seq_length,
+ )
+
+ # only the last stages of the pipeline return losses
+ if losses_reduced_per_micro_batch:
+ # average loss across micro batches
+ loss_tensors_list = [loss_reduced['avg'] for loss_reduced in losses_reduced_per_micro_batch]
+ loss_tensor = torch.concat(loss_tensors_list)
+ loss_mean = loss_tensor.mean()
+ else:
+ # we're not on the last pipeline stage so no losses
+ loss_mean = torch.tensor(0.0).cuda()
+
+ return loss_mean
+
+ def convert_tokens_to_range(self, tokens, apply_offset_correction=True, pattern=None):
+ # convert tokens to range [0, 1024]
+ output_tokens = tokens.clone()
+ if apply_offset_correction:
+ output_tokens[0] = output_tokens[0] - self.speech_offset
+ output_tokens = torch.clamp(output_tokens, min=0, max=self.speech_codebook_size - 1)
+ if pattern is None:
+ pattern = self.cfg.get('seq_pattern', 'delay_parallel')
+ if pattern == "delay_parallel":
+ output_tokens_new = []
+ for _c in range(output_tokens.shape[0]):
+ si = _c
+ ei = _c + output_tokens.shape[1] - self.num_speech_codebooks
+ output_tokens_new.append(output_tokens[_c, si:ei])
+ output_tokens_new = torch.stack(output_tokens_new)
+ output_tokens = output_tokens_new
+
+ return output_tokens
+
+ def get_forward_output_and_loss_func(self, validation_step=False):
+ def fwd_output_and_loss_func(dataloader_iter, model):
+ batch = next(dataloader_iter)
+ _batch = []
+ for x in batch:
+ if isinstance(x, torch.Tensor):
+ x = x.cuda(non_blocking=True)
+ elif isinstance(x, list):
+ if isinstance(x[0], torch.Tensor):
+ x = [y.cuda(non_blocking=True) for y in x]
+ _batch.append(x)
+ batch = _batch
+ # batch = [x.cuda(non_blocking=True) if isinstance(x, torch.Tensor) else x for x in batch]
+ (
+ virtual_tokens,
+ context_and_question_tokens,
+ enc_mask,
+ dec_input,
+ dec_input_mask,
+ labels,
+ loss_mask,
+ position_ids,
+ taskname_ids,
+ speech_mask,
+ context_and_question_tokens_lens,
+ cross_attention_prior,
+ text_limits,
+ _, # TODO: text limit and lang not in tarred dataset
+ _,
+ ) = batch
+
+ if self.trainer.global_step % self.train_check_interval == 0 and not validation_step and self.is_rank_zero:
+ self.frozen_model.enc_dec_model.logging_step = True
+
+ _cross_attention_prior = cross_attention_prior
+ if isinstance(context_and_question_tokens, list):
+ # None for context and prior for question
+ _cross_attention_prior = [None, cross_attention_prior]
+
+ output_tensor, encoder_input, out_logits = model(
+ virtual_tokens,
+ context_and_question_tokens,
+ enc_mask,
+ dec_input,
+ dec_input_mask,
+ position_ids,
+ taskname_ids,
+ labels=labels,
+ speech_mask=speech_mask,
+ cross_attention_prior=_cross_attention_prior,
+ text_limits=text_limits,
+ inference=False,
+ )
+ output_tensor = output_tensor.contiguous()
+
+ alignment_loss = out_logits[3]
+ if alignment_loss is not None:
+ self.logger.experiment.add_scalar('train_alignment_loss', alignment_loss, self.global_step)
+
+ if self.trainer.global_step % self.train_check_interval == 0 and not validation_step and self.is_rank_zero:
+ self.frozen_model.enc_dec_model.logging_step = False
+ with torch.no_grad():
+ with torch.cuda.amp.autocast(enabled=False):
+ if torch.count_nonzero(speech_mask) == 0:
+ text_labels = labels[:, 0, :] # [B, 8, T] -> [B, T]
+ token_logits = out_logits[0] * 1 # [T, B, V]
+ if self.frozen_model.enc_dec_model.parallel_output:
+ # Gather from tensor parallel region
+ token_logits = tensor_parallel.gather_from_tensor_model_parallel_region(token_logits)
+ token_logits = token_logits.argmax(dim=2) # [T, B]
+ token_logits = token_logits.t() # [B, T]
+ score = 0
+ for i in range(text_labels.size()[0]):
+ r = text_labels[i].long()
+ nzm = r != 0
+ r = r.tolist()
+ h = token_logits[i].long() * nzm
+ h = h.tolist()
+ score += editdistance.eval(r, h)
+ score /= text_labels.size()[0]
+ logging.info(f"wer score : {score}")
+ self.logger.experiment.add_scalar('WER', score, self.global_step)
+ else:
+ audio_len = (
+ self.decoder_context_len + (labels[0][0][self.decoder_context_len :] != 0).sum().item()
+ )
+ labels_to_1024 = self.convert_tokens_to_range(labels[0, :, 0:audio_len])
+ label_wav = self.decode_wav_from_codec_model(labels_to_1024)
+ dec_input_to_1024 = self.convert_tokens_to_range(dec_input[0, :, 0:audio_len])
+ dec_input_wav = self.decode_wav_from_codec_model(dec_input_to_1024)
+ self.logger.experiment.add_audio(
+ "train_label_wav", label_wav, self.global_step, self.sample_rate
+ )
+ self.logger.experiment.add_audio(
+ "train_dec_input_wav", dec_input_wav, self.global_step, self.sample_rate
+ )
+ if isinstance(context_and_question_tokens, list):
+ context_tokens = context_and_question_tokens[0]
+ question_tokens = context_and_question_tokens[1]
+ input_token_list_all = [
+ question_tokens[0, 0, i].item() for i in range(question_tokens.shape[2])
+ ]
+ input_token_list = [
+ (ti, t)
+ for ti, t in enumerate(input_token_list_all)
+ if t != 0 and t < self.speech_offset
+ ]
+ context_end_step = context_and_question_tokens_lens[0][0].item()
+ _context_tokens = context_tokens[0, :, :context_end_step]
+ else:
+ input_token_list_all = [
+ context_and_question_tokens[0, 0, i].item()
+ for i in range(context_and_question_tokens.shape[2])
+ ]
+ input_token_list = [
+ (ti, t)
+ for ti, t in enumerate(input_token_list_all)
+ if t != 0 and t < self.speech_offset
+ ]
+ context_end_step = input_token_list[0][0]
+ _context_tokens = context_and_question_tokens[0, :, :context_end_step]
+
+ if context_end_step > 1:
+ is_speech_context = _context_tokens[1, :].sum().item() > 0
+ if is_speech_context:
+ _context_tokens = self.convert_tokens_to_range(
+ _context_tokens, pattern=self.context_pattern
+ )
+ _context_wav = self.decode_wav_from_codec_model(_context_tokens)
+ self.logger.experiment.add_audio(
+ "train_context_wav", _context_wav, self.global_step, self.sample_rate
+ )
+ else:
+ _context_token_list = [v.item() for v in _context_tokens[0, :]]
+ _context_text = self.frozen_model.tokenizer.ids_to_text(
+ [v for v in _context_token_list if v < self.lm_vocab_size]
+ )
+ self.logger.experiment.add_text(
+ "train_context_text", _context_text, self.global_step
+ )
+
+ question_si = text_limits[0, 0].item() - virtual_tokens.shape[1]
+ question_ei = text_limits[0, 1].item() - virtual_tokens.shape[1]
+ text_si = text_limits[0, 0].item()
+ text_ei = text_limits[0, 1].item()
+ input_text = self.frozen_model.tokenizer.ids_to_text(
+ [v for v in input_token_list_all[question_si:question_ei] if v < self.lm_vocab_size]
+ )
+ self.logger.experiment.add_text("Train Input Text", input_text, self.global_step)
+
+ input_phoneme_tokens = [
+ v - self.lm_vocab_size
+ for v in input_token_list_all[question_si:question_ei]
+ if v >= self.lm_vocab_size
+ ]
+
+ if len(input_phoneme_tokens) > 0:
+ phoneme_text = self.phoneme_tokenizer.decode(input_phoneme_tokens)
+ self.logger.experiment.add_text(
+ "Train Input Phoneme Text", phoneme_text, self.global_step
+ )
+
+ token_logits = out_logits[0]
+ speech_logits_list = out_logits[1]
+
+ attention_probs_list = out_logits[2] # list of (BS, 12, out_length, in_length)
+ if attention_probs_list is not None:
+ attention_sliced_list = []
+ for lidx in range(len(attention_probs_list)):
+ attention_probs = attention_probs_list[lidx]
+ for _i in range(attention_probs.shape[1]):
+ name = f"Attention Probs Layer {lidx} Head {_i}"
+ attention_to_plot = attention_probs[0, _i, :audio_len, :text_ei]
+ if self.plot_alignments_sliced:
+ attention_to_plot = attention_probs[0, _i, 0:audio_len, text_si:text_ei]
+ # 4 to offset "Text to Speech this"
+ name += " Sliced"
+ alignment_image = plot_alignment_to_numpy_for_speechllm(
+ attention_to_plot.cpu().float().numpy().T,
+ phoneme_ver=0 if self.plot_alignments_sliced else 1,
+ phoneme_seq=None if self.plot_alignments_sliced else [text_si],
+ )
+ self.logger.experiment.add_image(
+ name,
+ alignment_image,
+ self.global_step,
+ dataformats="HWC",
+ )
+ attention_sliced_list.append(
+ attention_probs[
+ 0, _i, self.decoder_context_len : audio_len, text_si:text_ei
+ ]
+ )
+ attention_sliced = torch.stack(attention_sliced_list)
+ attention_sliced = torch.mean(attention_sliced, 0)
+ text = None
+ if len(input_text) > 0:
+ text = self.frozen_model.tokenizer.ids_to_tokens(
+ [
+ v
+ for v in input_token_list_all[question_si:question_ei]
+ if v < self.lm_vocab_size
+ ]
+ )
+ if len(input_phoneme_tokens) > 0:
+ text = phoneme_text.split("|")
+ alignment_image_sliced = plot_alignment_to_numpy_for_speechllm(
+ attention_sliced.cpu().float().numpy().T,
+ phoneme_seq=text,
+ phoneme_ver=2,
+ vmin=0.0,
+ phone_offset=0,
+ h_offset=False,
+ )
+ self.logger.experiment.add_image(
+ f"Attention Probs Average Sliced",
+ alignment_image_sliced,
+ self.global_step,
+ dataformats="HWC",
+ )
+ if self.frozen_model.enc_dec_model.parallel_output:
+ # Gather from tensor parallel region
+ token_logits = tensor_parallel.gather_from_tensor_model_parallel_region(token_logits)
+ for _i in range(len(speech_logits_list)):
+ speech_logits_list[_i] = tensor_parallel.gather_from_tensor_model_parallel_region(
+ speech_logits_list[_i]
+ )
+ speech_logits = torch.stack(speech_logits_list, dim=-1) # (t, b, 1024, 7)
+ token_logits_example = token_logits[:, 0, :] * 1
+ speech_logits_example = speech_logits[:, 0, :, :] * 1
+ first_layer_tokens = token_logits_example.argmax(dim=1) - self.speech_offset
+ other_layer_tokens = []
+ for _i in range(speech_logits_example.shape[2]):
+ other_layer_tokens.append(speech_logits_example[:, :, _i].argmax(dim=1))
+
+ all_layer_tokens = torch.stack([first_layer_tokens] + other_layer_tokens) # (8, t)
+ all_layer_tokens = self.convert_tokens_to_range(
+ all_layer_tokens, apply_offset_correction=False
+ )
+ # all_layer_tokens = torch.clip(all_layer_tokens, 0, 1023)
+ predicted_wav = self.decode_wav_from_codec_model(all_layer_tokens)
+ self.logger.experiment.add_audio(
+ "train_tf_pred_wav", predicted_wav, self.global_step, self.sample_rate
+ )
+
+ def loss_func(loss_args):
+ output_tensor, out_logits, curr_step = loss_args
+ alignment_loss = out_logits[3]
+ loss = self.frozen_model.loss_func(loss_mask, output_tensor)
+ if (
+ (alignment_loss is not None)
+ and (curr_step > self.alignment_loss_start_step)
+ and (curr_step < self.alignment_loss_end_step)
+ ):
+ logging.debug(f"Adding alignment loss. cur:{curr_step} start:{self.alignment_loss_start_step}")
+ loss = loss + alignment_loss
+ reduced_loss = average_losses_across_data_parallel_group([loss])
+ return loss, {'avg': reduced_loss}
+
+ return [output_tensor, out_logits, self.global_step], loss_func
+
+ return fwd_output_and_loss_func
+
+ def get_forward_output_only_func(self):
+ """Used in inference / predict"""
+
+ def fwd_output_only_func(dataloader_iter, model):
+ batch = next(dataloader_iter)
+ _batch = []
+ for x in batch:
+ if isinstance(x, torch.Tensor):
+ x = x.cuda(non_blocking=True)
+ elif isinstance(x, list):
+ if isinstance(x[0], torch.Tensor):
+ x = [y.cuda(non_blocking=True) for y in x]
+ _batch.append(x)
+ batch = _batch
+ # batch = [x.cuda(non_blocking=True) if isinstance(x, torch.Tensor) else x for x in batch]
+ (
+ decoder_max_sequence_len,
+ encoder_max_sequence_len,
+ context_and_question_tokens,
+ enc_mask,
+ dec_input,
+ dec_input_mask,
+ position_ids,
+ taskname_ids,
+ speech_mask,
+ ) = batch
+
+ output_logits, _, token_and_speech_logits = model(
+ context_and_question_tokens,
+ context_and_question_tokens,
+ enc_mask,
+ dec_input,
+ dec_input_mask,
+ position_ids,
+ taskname_ids,
+ labels=None,
+ speech_mask=speech_mask,
+ inference=True,
+ inference_step=1,
+ decoder_max_sequence_len=decoder_max_sequence_len,
+ encoder_max_sequence_len=encoder_max_sequence_len,
+ )
+ output_tensor = [output_logits, token_and_speech_logits]
+
+ def id_func(output_tensor):
+ return 0, {'output_logits': output_tensor[0], 'token_and_speech_logits': output_tensor[1]}
+
+ return output_tensor, id_func
+
+ return fwd_output_only_func
+
+ def backward(self, *args, **kwargs):
+ """LightningModule hook to do backward.
+ We want this to do nothing since we run backward in the fwd/bwd functions from megatron-core.
+ No need to call it here.
+ """
+ return
+
+ def optimizer_zero_grad(self, *args, **kwargs):
+ """LightningModule hook to zero grad.
+ We want this to do nothing as we are zeroing grads during the training_step.
+ """
+ return
+
+ def set_input_tensor(self, input_tensor):
+ """Set input tensor to be used instead of forward()'s input.
+ When using pipeline parallelism the input from the previous
+ stage comes from communication, not from the input, so the
+ model's forward_step_func won't have it. This function is thus
+ used by internal code to bypass the input provided by the
+ forward_step_func"""
+ self.frozen_model.enc_dec_model.set_input_tensor(input_tensor)
+
+ def on_train_epoch_start(self) -> None:
+ gbs = self.cfg.global_batch_size
+ mbs = self.cfg.micro_batch_size
+ self._reconfigure_batch_sizes(gbs, mbs)
+ return super().on_train_epoch_start()
+
+ def on_validation_epoch_start(self) -> None:
+ gbs = self.cfg.get('validation_global_batch_size', self.cfg.global_batch_size)
+ mbs = self.cfg.get('validation_micro_batch_size', self.cfg.micro_batch_size)
+ self._reconfigure_batch_sizes(gbs, mbs)
+ return super().on_validation_epoch_start()
+
+ def training_step(self, dataloader_iter, batch_idx):
+ self._optimizer.zero_grad()
+ batch = next(dataloader_iter)
+
+ # apply text classifier-free guidance by replacing input question tokens with [UNK].
+ if self.train_text_cfg_prob > 0.0:
+ if self._rng.random() < self.train_text_cfg_prob:
+ logging.info(f"Text Classifier-Free Guidance is triggered for the {batch_idx}-th batch.")
+
+ # temporally disable computing CTC alignment loss.
+ if self.use_alignment_loss:
+ self.frozen_model.enc_dec_model.use_alignment_loss = False
+
+ # make cross-attention prior to None to remove the prior.
+ batch[11] = None
+
+ # replace question token IDs with [UNK]'s id. No speech offset for Phoneme's [UNK]. Same op as train.
+ # instruction token IDs are bpe token IDs directly obtained from self.tokenizer without any offset.
+ # question token IDs are phoneme and grapheme token IDs and are offset by self.lm_vocab_size
+ # if under "Phoneme TTS" instruction, so existing no overlaps between instruction and question token IDs.
+ # question token IDs are bpe token IDs without any offset
+ # if under "Text to speech this" instruction, so existing overlaps between instruction and question token IDs.
+ context_and_question_tokens = batch[
+ 1
+ ] # (batch_size, self.num_speech_codebooks, max_context_question_tokens_len)
+ text_limits = batch[12]
+ virtual_tokens = batch[0]
+ question_limits = text_limits - virtual_tokens.size(
+ 1
+ ) # (b, 2), reset question range to start from [pad] context, same start position as context_and_question_tokens.
+ question_start = question_limits[:, 0].unsqueeze(1) # (b, 1)
+ question_end = question_limits[:, 1].unsqueeze(1) # (b, 1)
+
+ if isinstance(context_and_question_tokens, list): # indicate self.encoder_type=multi_transformers.
+ context_tokens, question_tokens = context_and_question_tokens
+ question_tokens_unconditioned = question_tokens.clone()
+ time_range = torch.arange(
+ question_tokens_unconditioned.size(2), device=question_tokens_unconditioned.device
+ ).unsqueeze(0)
+ question_mask = (time_range >= question_start) & (
+ time_range < question_end
+ ) # create a mask for question only tokens.
+ question_tokens_unconditioned[:, 0][
+ question_mask
+ ] = self.tokenizer.unk_id # only the first layer has non-zero IDs.
+ batch[1] = [context_tokens, question_tokens_unconditioned]
+ else:
+ context_and_question_tokens_unconditioned = (
+ context_and_question_tokens.clone()
+ ) # (batch_size, self.num_speech_codebooks, max_context_question_tokens_len)
+ time_range = torch.arange(
+ context_and_question_tokens_unconditioned.size(2),
+ device=context_and_question_tokens_unconditioned.device,
+ ).unsqueeze(
+ 0
+ ) # (1, max_context_question_tokens_len)
+ question_mask = (time_range >= question_start) & (
+ time_range < question_end
+ ) # create a mask for question only tokens.
+ context_and_question_tokens_unconditioned[:, 0][
+ question_mask
+ ] = self.tokenizer.unk_id # only the first layer has non-zero IDs.
+ batch[1] = context_and_question_tokens_unconditioned
+
+ del question_limits, question_start, question_end, time_range, question_mask
+ else:
+ # recover to original alignment loss config.
+ self.frozen_model.enc_dec_model.use_alignment_loss = self.use_alignment_loss
+
+ # apply audio context classifier-free guidance by replacing audio codec with [UNK]
+ if self.train_audio_cfg_prob > 0.0:
+ if self._rng.random() < self.train_audio_cfg_prob:
+ logging.info(f"Audio Classifier-Free Guidance is triggered for the {batch_idx}-th batch.")
+
+ context_and_question_tokens = batch[
+ 1
+ ] # (batch_size, self.num_speech_codebooks, max_context_question_tokens_len)
+
+ if isinstance(context_and_question_tokens, list): # indicate self.encoder_type=multi_transformers.
+ context_tokens, question_tokens = context_and_question_tokens
+ context_tokens_unconditioned = context_tokens.clone()
+ context_tokens_unconditioned[:, :, :] = (
+ self.tokenizer.unk_id
+ ) # TODO @xueyang: verify if extra tokens other than audio codec tokens are appended.
+ batch[1] = [context_tokens_unconditioned, question_tokens]
+ else:
+ # dec_input
+ dec_input = batch[3]
+ dec_input_unconditioned = dec_input.clone()
+ dec_input_unconditioned[:, :, 1 : self.decoder_context_len + 1] = (
+ self.tokenizer.unk_id
+ ) # TODO @xueyang: switch to other token id if this one is conflict with text unk.
+ batch[3] = dec_input_unconditioned
+
+ loss_mean = self.fwd_bwd_step(itertools.chain([batch]), batch_idx, forward_only=False)
+ self.allreduce_gradients()
+
+ ## logging
+ # we can only log on one rank if it is rank zero so we broadcast from last rank
+ # we can avoid this broadcast by updating the PTL log function to accept specific ranks
+ torch.distributed.broadcast(loss_mean, get_last_rank())
+
+ if self.cfg.precision == 16 and hasattr(self.trainer.precision_plugin.scaler, "_scale"):
+ loss_scale = self.trainer.precision_plugin.scaler._scale
+ if loss_scale is not None:
+ self.log('loss_scale', loss_scale, batch_size=1)
+
+ self.log('reduced_train_loss', loss_mean, prog_bar=True, rank_zero_only=True, batch_size=1)
+ lr = self._optimizer.param_groups[0]['lr']
+ self.log('lr', lr, rank_zero_only=True, batch_size=1)
+ self.log('global_step', self.trainer.global_step, prog_bar=True, rank_zero_only=True, batch_size=1)
+ return loss_mean
+
+ def get_predictions(self, input_ids, enc_mask, encoder_input, labels):
+ predicted_token_ids, log_probs = self.frozen_model.decode(
+ tokens_enc=input_ids,
+ enc_mask=enc_mask,
+ num_tokens_to_generate=self.decoder_seq_length,
+ encoder_input=encoder_input,
+ bos_id=(
+ self.tokenizer.pad_id if self.cfg.data.get('decoder_starts_with_pad', False) else self.tokenizer.bos_id
+ ),
+ )
+ # Special ids to text function to handle stripping and special tokens with sentencepiece tokenizers.
+ preds_text = MegatronT5SFTModel.ids_to_text(predicted_token_ids, self.tokenizer)
+ labels_text = MegatronT5SFTModel.ids_to_text(labels, self.tokenizer)
+ input_text = MegatronT5SFTModel.ids_to_text(input_ids, self.tokenizer)
+ return {
+ 'predicted_token_ids': preds_text,
+ 'labels': labels_text,
+ 'enc_inputs': input_text,
+ }
+
+ def get_embeddings(self, tokens, taskname_ids, inference=False):
+ out = None
+ if tokens.dim() > 2:
+ for i in range(tokens.size()[1]): # for 8 channels
+ if i == 0:
+ # Embed first layer using word embeddings
+ out = self.embed_input(tokens[:, i, :], taskname_ids, inference) # (B, T, D)
+ else:
+ # Embed other layers using speech embeddings
+ cur = self.frozen_model.enc_dec_model.speech_tokens_embeddings[i - 1](tokens[:, i, :])
+ # do not add embeddings of zero tokens of other channels (except the first channel)
+ non_zero_flag = tokens[:, i, :] != 0 # (B, T)
+ cur = cur * non_zero_flag.unsqueeze(2)
+ out = out + cur
+ else:
+ out = self.embed_input(tokens, taskname_ids, inference)
+ return out
+
+ def get_embeddings_and_combine(self, token_list, taskname_ids, inference):
+ embedding_list = []
+ for tokens in token_list:
+ embedding_list.append(self.get_embeddings(tokens, taskname_ids, inference))
+ return torch.cat(embedding_list, dim=1)
+
+ def validation_step(self, batch, batch_idx, dataloader_idx=0):
+ (
+ virtual_tokens,
+ context_and_question_tokens,
+ enc_mask,
+ dec_input,
+ dec_input_mask,
+ labels,
+ loss_mask,
+ position_ids,
+ taskname_ids,
+ speech_mask,
+ context_and_question_tokens_lens,
+ cross_attention_prior,
+ text_limits,
+ _,
+ _,
+ ) = batch
+ # loss_mask (b, t)
+ # does not use dataloader_iter due to device placement issues arising from PTL
+
+ mode = self.training
+ self.eval()
+ gbs = self.cfg.get('validation_global_batch_size', self.cfg.global_batch_size)
+ self._reconfigure_and_process_inference_batch(virtual_tokens.size(0), gbs)
+
+ loss_mean = self.fwd_bwd_step(
+ itertools.chain([batch]), batch_idx, forward_only=True
+ ) # comment this out and add custom forward function to calculate WER
+ # # logging.info (f'loss_mean {loss_mean}')
+
+ if batch_idx == 0 and self.is_rank_zero:
+ self.frozen_model.enc_dec_model.logging_step = True
+ self.predict_step_outputs = []
+ # log_scalars=False avoids logging scalar TTS metrics in the predict_step
+ # Images, audio and texts will still be logged
+ self.predict_step(batch=batch, batch_idx=batch_idx, log_scalars=False, global_step=self.global_step)
+ for inf_key in self.predict_step_outputs[0]:
+ if self.predict_step_outputs[0][inf_key] is not None:
+ self.logger.experiment.add_scalar(
+ f'Val_{inf_key}', self.predict_step_outputs[0][inf_key], self.global_step
+ )
+
+ labels_original = labels.clone() # (b, 8, t)
+
+ _cross_attention_prior = cross_attention_prior
+ if isinstance(context_and_question_tokens, list):
+ _cross_attention_prior = [None, cross_attention_prior]
+
+ output_loss, _, output_logits = self.forward(
+ virtual_tokens,
+ context_and_question_tokens,
+ enc_mask,
+ dec_input,
+ dec_input_mask,
+ position_ids,
+ taskname_ids,
+ labels=labels,
+ speech_mask=speech_mask,
+ cross_attention_prior=_cross_attention_prior,
+ text_limits=text_limits,
+ inference=False,
+ )
+
+ if batch_idx == 0 and self.is_rank_zero:
+ self.frozen_model.enc_dec_model.logging_step = False
+ with torch.cuda.amp.autocast(enabled=False):
+ if torch.count_nonzero(speech_mask) == 0:
+ text_labels = labels[:, 0, :] # [B, 8, T] -> [B, T]
+ token_logits = output_logits[0] * 1 # [T, B, V]
+ if self.frozen_model.enc_dec_model.parallel_output:
+ # Gather from tensor parallel region
+ token_logits = tensor_parallel.gather_from_tensor_model_parallel_region(token_logits)
+ token_logits = token_logits.argmax(dim=2) # [T, B]
+ token_logits = token_logits.t() # [B, T]
+ score = 0
+ for i in range(text_labels.size()[0]):
+ r = text_labels[i].long()
+ nzm = r != 0
+ r = r.tolist()
+ h = token_logits[i].long() * nzm
+ h = h.tolist()
+ score += editdistance.eval(r, h)
+ score /= text_labels.size()[0]
+ logging.info(f"wer score : {score}")
+ self.logger.experiment.add_scalar('WER', score, self.global_step)
+ else:
+ audio_len = self.decoder_context_len + (labels[0][0][self.decoder_context_len :] != 0).sum().item()
+ labels_to_1024 = self.convert_tokens_to_range(labels[0, :, 0:audio_len])
+ label_wav = self.decode_wav_from_codec_model(labels_to_1024)
+ dec_input_to_1024 = self.convert_tokens_to_range(dec_input[0, :, 0:audio_len])
+ dec_input_wav = self.decode_wav_from_codec_model(dec_input_to_1024)
+ self.logger.experiment.add_audio("val_label_wav", label_wav, self.global_step, self.sample_rate)
+ self.logger.experiment.add_audio(
+ "val_dec_input_wav", dec_input_wav, self.global_step, self.sample_rate
+ )
+
+ if isinstance(context_and_question_tokens, list):
+ context_tokens = context_and_question_tokens[0]
+ question_tokens = context_and_question_tokens[1]
+ input_token_list_all = [
+ question_tokens[0, 0, i].item() for i in range(question_tokens.shape[2])
+ ]
+ input_token_list = [
+ (ti, t) for ti, t in enumerate(input_token_list_all) if t != 0 and t < self.speech_offset
+ ]
+ context_end_step = context_and_question_tokens_lens[0][0].item()
+ _context_tokens = context_tokens[0, :, :context_end_step]
+
+ else:
+ input_token_list_all = [
+ context_and_question_tokens[0, 0, i].item()
+ for i in range(context_and_question_tokens.shape[2])
+ ]
+ input_token_list = [
+ (ti, t) for ti, t in enumerate(input_token_list_all) if t != 0 and t < self.speech_offset
+ ]
+ context_end_step = input_token_list[0][0]
+ _context_tokens = context_and_question_tokens[0, :, :context_end_step]
+ if context_end_step > 1:
+ is_speech_context = _context_tokens[1, :].sum().item() > 0
+ if is_speech_context:
+ _context_tokens = self.convert_tokens_to_range(
+ _context_tokens, pattern=self.context_pattern
+ )
+ _context_wav = self.decode_wav_from_codec_model(_context_tokens)
+ self.logger.experiment.add_audio(
+ "val_context_wav", _context_wav, self.global_step, self.sample_rate
+ )
+ else:
+ _context_token_list = [v.item() for v in _context_tokens[0, :]]
+ _context_text = self.frozen_model.tokenizer.ids_to_text(
+ [v for v in _context_token_list if v < self.lm_vocab_size]
+ )
+ self.logger.experiment.add_text("val_context_text", _context_text, self.global_step)
+
+ question_si = text_limits[0, 0].item() - virtual_tokens.shape[1]
+ question_ei = text_limits[0, 1].item() - virtual_tokens.shape[1]
+
+ text_si = text_limits[0, 0].item()
+ text_ei = text_limits[0, 1].item()
+
+ input_text = self.frozen_model.tokenizer.ids_to_text(
+ [v for v in input_token_list_all[question_si:question_ei] if v < self.lm_vocab_size]
+ )
+ self.logger.experiment.add_text("Val Input Text", input_text, self.global_step)
+
+ input_phoneme_tokens = [
+ v - self.lm_vocab_size
+ for v in input_token_list_all[question_si:question_ei]
+ if v >= self.lm_vocab_size
+ ]
+ if len(input_phoneme_tokens) > 0:
+ phoneme_text = self.phoneme_tokenizer.decode(input_phoneme_tokens)
+ self.logger.experiment.add_text("Val Input Phoneme Text", phoneme_text, self.global_step)
+
+ token_logits = output_logits[0]
+ speech_logits_list = output_logits[1]
+
+ # if self.trainer.global_step % 500 == 0:
+ attention_probs_list = output_logits[2] # list of (BS, 12, out_length, in_length)
+ if attention_probs_list is not None:
+ attention_sliced_list = []
+ for lidx in range(len(attention_probs_list)):
+ attention_probs = attention_probs_list[lidx]
+ for _i in range(attention_probs.shape[1]):
+ attention_sliced_list.append(
+ attention_probs[0, _i, self.decoder_context_len : audio_len, text_si:text_ei]
+ )
+ attention_sliced = torch.stack(attention_sliced_list)
+ attention_sliced = torch.mean(attention_sliced, 0)
+ text = None
+ if len(input_text) > 0:
+ text = self.frozen_model.tokenizer.ids_to_tokens(
+ [v for v in input_token_list_all[question_si:question_ei] if v < self.lm_vocab_size]
+ )
+ if len(input_phoneme_tokens) > 0:
+ text = phoneme_text.split("|")
+ alignment_image_sliced = plot_alignment_to_numpy_for_speechllm(
+ attention_sliced.cpu().float().numpy().T,
+ phoneme_seq=text,
+ phoneme_ver=2,
+ vmin=0.0,
+ phone_offset=0,
+ h_offset=False,
+ )
+ self.logger.experiment.add_image(
+ f"Val Attention Probs Average Sliced",
+ alignment_image_sliced,
+ self.global_step,
+ dataformats="HWC",
+ )
+ if self.frozen_model.enc_dec_model.parallel_output:
+ # Gather from tensor parallel region
+ token_logits = tensor_parallel.gather_from_tensor_model_parallel_region(token_logits)
+ for _i in range(len(speech_logits_list)):
+ speech_logits_list[_i] = tensor_parallel.gather_from_tensor_model_parallel_region(
+ speech_logits_list[_i]
+ )
+ speech_logits = torch.stack(speech_logits_list, dim=-1) # (t, b, 1024, 7)
+ token_logits_example = token_logits[:, 0, :] * 1
+ speech_logits_example = speech_logits[:, 0, :, :] * 1
+ first_layer_tokens = token_logits_example.argmax(dim=1) - self.speech_offset
+ other_layer_tokens = []
+ for _i in range(speech_logits_example.shape[2]):
+ other_layer_tokens.append(speech_logits_example[:, :, _i].argmax(dim=1))
+
+ all_layer_tokens = torch.stack([first_layer_tokens] + other_layer_tokens) # (8, t)
+ all_layer_tokens = self.convert_tokens_to_range(all_layer_tokens, apply_offset_correction=False)
+ all_layer_tokens = torch.clip(all_layer_tokens, 0, self.speech_codebook_size - 1)
+ predicted_wav = self.decode_wav_from_codec_model(all_layer_tokens)
+ self.logger.experiment.add_audio(
+ "val_tf_pred_wav", predicted_wav, self.global_step, self.sample_rate
+ )
+
+ first_layer_logits = output_logits[0]
+ speech_logits_list = output_logits[1]
+
+ if self.frozen_model.enc_dec_model.parallel_output:
+ # Gather from tensor parallel region
+ first_layer_logits = tensor_parallel.gather_from_tensor_model_parallel_region(first_layer_logits)
+ if torch.count_nonzero(speech_mask) > 0:
+ for _i in range(len(speech_logits_list)):
+ speech_logits_list[_i] = tensor_parallel.gather_from_tensor_model_parallel_region(
+ speech_logits_list[_i]
+ )
+ speech_logits = torch.stack(speech_logits_list, dim=-1) # (t, b, 1024, 7)
+ first_layer_preds = first_layer_logits.argmax(dim=2) # (t,bs)
+ first_layer_preds = first_layer_preds.transpose(0, 1) # (bs,t)
+ labels_first_layer = labels_original[:, 0, :] # (bs,t)
+ correct_predictions = first_layer_preds == labels_first_layer # (bs,t)
+ correct_predictions = correct_predictions * loss_mask # (bs,t)
+ total_correct_predictions = torch.sum(correct_predictions)
+ total_predictions = torch.sum(loss_mask)
+ first_layer_accuracy = total_correct_predictions / total_predictions
+ first_layer_loss = torch.nn.functional.cross_entropy(
+ first_layer_logits.permute(1, 2, 0), labels_first_layer, reduction='none'
+ ) # (bs,t)
+ first_layer_loss = torch.sum(first_layer_loss * loss_mask) / total_predictions
+
+ metrics = {
+ 'loss': loss_mean,
+ 'first_layer_accuracy': first_layer_accuracy,
+ 'first_layer_loss': first_layer_loss,
+ }
+ loss_total = first_layer_loss
+ for i in range(self.num_speech_codebooks - 1):
+ if torch.count_nonzero(speech_mask) > 0:
+ speech_logits_i = speech_logits[:, :, :, i]
+ speech_preds_i = speech_logits_i.argmax(dim=2) # (t,bs)
+ speech_preds_i = speech_preds_i.transpose(0, 1) # (bs,t)
+ labels_i = labels_original[:, i + 1, :] # (bs,t)
+ correct_predictions_i = speech_preds_i == labels_i # (bs,t)
+ correct_predictions_i = correct_predictions_i * loss_mask * speech_mask # (bs,t)
+ total_correct_predictions_i = torch.sum(correct_predictions_i)
+ total_predictions_i = torch.sum(loss_mask * speech_mask)
+ speech_accuracy_i = total_correct_predictions_i / total_predictions_i
+ loss_i = torch.nn.functional.cross_entropy(
+ speech_logits_i.permute(1, 2, 0), labels_i, reduction='none'
+ ) # (bs,t)
+ loss_i = torch.sum(loss_i * loss_mask * speech_mask) / total_predictions_i
+ else:
+ speech_accuracy_i = torch.tensor(0.0)
+ loss_i = torch.tensor(0.0)
+ metrics[f'speech_accuracy_{i+1}'] = speech_accuracy_i
+ metrics[f'speech_loss_{i+1}'] = loss_i
+ loss_total += loss_i
+
+ metrics['loss_total_check'] = loss_total
+ self.validation_step_outputs.append(metrics)
+ self.train(mode=mode)
+ self.frozen_model.train()
+ return metrics['loss']
+
+ def on_validation_epoch_end(self):
+ outputs = self.validation_step_outputs
+ if self.cfg.get('pipeline_model_parallel_size', 1) > 1:
+ if parallel_state.is_pipeline_last_stage():
+ # only the last pipeline parallel stages return loss
+ averaged_loss = torch.stack([item['loss'] for item in outputs]).mean()
+ averaged_loss_total_check = torch.stack([item['loss_total_check'] for item in outputs]).mean()
+ averaged_first_layer_accuracy = torch.stack([item['first_layer_accuracy'] for item in outputs]).mean()
+
+ self.log(
+ 'val_loss_total_check',
+ averaged_loss_total_check,
+ prog_bar=False,
+ rank_zero_only=True,
+ batch_size=1,
+ )
+ self.log(
+ 'val_first_layer_accuracy',
+ averaged_first_layer_accuracy,
+ prog_bar=True,
+ rank_zero_only=True,
+ batch_size=1,
+ )
+ logging.info(f'Validation first_layer_accuracy: {averaged_first_layer_accuracy}')
+ logging.info(f'Validation loss_total_check: {averaged_loss_total_check}')
+
+ for i in range(1, self.num_speech_codebooks):
+ averaged_speech_accuracy = torch.stack([item[f'speech_accuracy_{i}'] for item in outputs]).mean()
+ averaged_speech_loss = torch.stack([item[f'speech_loss_{i}'] for item in outputs]).mean()
+ self.log(
+ f'val_speech_accuracy_{i}',
+ averaged_speech_accuracy,
+ prog_bar=True,
+ rank_zero_only=True,
+ batch_size=1,
+ )
+ self.log(
+ f'val_speech_loss_{i}', averaged_speech_loss, prog_bar=True, rank_zero_only=True, batch_size=1
+ )
+ logging.info(f'Validation speech_accuracy_{i}: {averaged_speech_accuracy}')
+ logging.info(f'Validation speech_loss_{i}: {averaged_speech_loss}')
+ else:
+ averaged_loss = torch.tensor(0.0).cuda()
+
+ # we can only log on one rank if it is rank zero so we broadcast from last rank
+ torch.distributed.broadcast(averaged_loss, get_last_rank())
+
+ self.log('val_loss', averaged_loss, prog_bar=True, rank_zero_only=True, batch_size=1)
+ logging.info(f'Validation loss: {averaged_loss}')
+
+ else:
+ if len(outputs) > 0:
+ averaged_loss = torch.stack([item['loss'] for item in outputs]).mean()
+ averaged_loss_total_check = torch.stack([item['loss_total_check'] for item in outputs]).mean()
+ logging.info(f'Validation loss: {averaged_loss}')
+ self.log('val_loss', averaged_loss, prog_bar=True, rank_zero_only=True, batch_size=1)
+ self.log(
+ 'val_loss_total_check',
+ averaged_loss_total_check,
+ prog_bar=False,
+ rank_zero_only=True,
+ batch_size=1,
+ )
+
+ averaged_first_layer_accuracy = torch.stack([item['first_layer_accuracy'] for item in outputs]).mean()
+ logging.info(f'Validation first_layer_accuracy: {averaged_first_layer_accuracy}')
+ self.log(
+ 'val_first_layer_accuracy',
+ averaged_first_layer_accuracy,
+ prog_bar=True,
+ rank_zero_only=True,
+ batch_size=1,
+ )
+
+ for i in range(1, self.num_speech_codebooks):
+ averaged_speech_accuracy = torch.stack([item[f'speech_accuracy_{i}'] for item in outputs]).mean()
+ averaged_speech_loss = torch.stack([item[f'speech_loss_{i}'] for item in outputs]).mean()
+ logging.info(f'Validation speech_accuracy_{i}: {averaged_speech_accuracy}')
+ logging.info(f'Validation speech_loss_{i}: {averaged_speech_loss}')
+ self.log(
+ f'val_speech_accuracy_{i}',
+ averaged_speech_accuracy,
+ prog_bar=True,
+ rank_zero_only=True,
+ batch_size=1,
+ )
+ self.log(
+ f'val_speech_loss_{i}', averaged_speech_loss, prog_bar=True, rank_zero_only=True, batch_size=1
+ )
+
+ if self.cfg.get("report_validation_metric", False):
+ gather_results = [None for _ in range(parallel_state.get_data_parallel_world_size())]
+
+ all_preds = list(itertools.chain(*[item['predicted_token_ids'] for item in outputs]))
+ all_labels = list(itertools.chain(*[item['labels'] for item in outputs]))
+ all_inputs = list(itertools.chain(*[item['enc_inputs'] for item in outputs]))
+
+ assert len(all_preds) == len(all_labels)
+ assert len(all_preds) == len(all_inputs)
+
+ # Gather inputs, preds, labels from all workers
+ torch.distributed.all_gather_object(
+ gather_results,
+ [(input, pred, label) for (input, pred, label) in zip(all_inputs, all_preds, all_labels)],
+ group=parallel_state.get_data_parallel_group(),
+ )
+
+ # Deduplicate sentences that may have been distributed across multiple data parallel ranks.
+ if parallel_state.get_data_parallel_rank() == 0:
+
+ gather_results_dedup = list(set(itertools.chain(*gather_results)))
+
+ val_metric_dict = self.validation_metric.get_score(
+ [i[2] for i in gather_results_dedup],
+ [i[1] for i in gather_results_dedup],
+ )
+
+ for metric, val in val_metric_dict.items():
+ logging.info(f'Validation {metric}: {val}')
+ val_metric = list(val_metric_dict.items())[0][1]
+ metric_name = list(val_metric_dict.items())[0][0]
+ else:
+ val_metric = torch.tensor(0.0).cuda()
+ metric_name = ''
+
+ self.log(f'val_{metric_name}', val_metric, prog_bar=True, rank_zero_only=True, batch_size=1)
+
+ gbs = self.cfg.global_batch_size
+ mbs = self.cfg.micro_batch_size
+ self._reconfigure_batch_sizes(gbs, mbs)
+ self.validation_step_outputs.clear()
+
+ def test_step(self, batch, batch_idx):
+ result = self.predict_step(batch, batch_idx)
+ return result
+
+ def on_test_epoch_end(self):
+ """
+ This might still be broken for lightning 2.0. to fix: see
+ https://github.com/NVIDIA/NeMo/blob/9bdf4d12276ee8f95a340cf2f7f340e9b5b74a7e/docs/source/starthere/migration-guide.rst
+ """
+ outputs = self.predict_step_outputs
+ average_metrics = {}
+ for output in outputs:
+ for key in output:
+ if key not in average_metrics:
+ average_metrics[key] = []
+ if isinstance(output[key], torch.Tensor):
+ average_metrics[key].append(output[key].item())
+ elif output[key] is None:
+ continue
+ else:
+ average_metrics[key].append(output[key])
+
+ for key in average_metrics:
+ average_metrics[key] = np.mean(average_metrics[key]).item()
+ logging.info(f'Test {key}: {average_metrics[key]}')
+ self.log(f'test_{key}', average_metrics[key], prog_bar=True, rank_zero_only=True, batch_size=1)
+ self.logger.experiment.add_scalar(f'Inf Cumulative {key}', average_metrics[key], 0)
+
+ # save average metrics into json file
+ with open(os.path.join(self.logger.log_dir, 'output_metrics.json'), 'w') as f:
+ json.dump(average_metrics, f)
+
+ def build_virtual_prompt_dataset(
+ self, dataset_paths, batch_size, for_train, drop_last, shuffle, num_workers, pin_memory
+ ):
+ dataset = T5SpeechLMDataset(
+ datasets=dataset_paths,
+ tokenizer=self.tokenizer,
+ sample_rate=self.cfg.data.get('sample_rate', 24000),
+ virtual_prompt_source=self.virtual_prompt_source,
+ task_templates=self.task_templates,
+ pseudo_tokens=self.pseudo_tokens,
+ pad_token_id=self.pad_token_id,
+ max_seq_length=self.cfg.data.get('max_seq_length', self.frozen_model.cfg.max_position_embeddings),
+ min_seq_length=self.cfg.data.get('min_seq_length', 1),
+ add_bos=self.cfg.data.get('add_bos', False),
+ add_eos=self.cfg.data.get('add_eos', True),
+ decoder_starts_with_pad=self.cfg.data.get('decoder_starts_with_pad', False),
+ add_eos_to_decoder_output=self.cfg.data.get('add_eos_to_decoder_output', True),
+ add_sentinel_to_input=self.cfg.data.get('add_sentinel_to_input', True),
+ ul2_prompt_token=self.cfg.data.get('ul2_prompt_token', None),
+ for_train=for_train,
+ segment_max_duration=self.cfg.data.get('segment_max_duration', None),
+ trim=self.cfg.data.get('trim', None),
+ trim_ref=self.cfg.data.get('trim_ref', None),
+ trim_top_db=self.cfg.data.get('trim_top_db', None),
+ trim_frame_length=self.cfg.data.get('trim_frame_length', None),
+ trim_hop_length=self.cfg.data.get('trim_hop_length', None),
+ pad_multiple=self.cfg.data.get('pad_multiple', 1),
+ pitch_augment=self.cfg.data.get('pitch_augment', None),
+ sup_data_path=self.cfg.data.get('sup_data_path', None),
+ codec_folder=self.cfg.data.get('codec_folder', None),
+ speech_offset=self.cfg.data.get('speech_offset', None),
+ train_task=self.cfg.data.get('train_task', "tts"),
+ seq_pattern=self.cfg.get('seq_pattern', 'delay_parallel'),
+ use_attention_prior=self.cfg.data.get('use_attention_prior', False),
+ attention_prior_scaling_factor=self.cfg.data.get('attention_prior_scaling_factor', 1.0),
+ cross_attention_epsilon=self.cfg.data.get('cross_attention_epsilon', 0.0),
+ lm_vocab_size=self.lm_vocab_size,
+ num_speech_codebooks=self.num_speech_codebooks,
+ codebook_fps=self.cfg.data.get('codebook_fps', 86),
+ add_special_tokens_to_only_first_codebook=self.cfg.data.get(
+ 'add_special_tokens_to_only_first_codebook', False
+ ),
+ context_pattern=self.cfg.data.get('context_pattern', 'parallel'),
+ context_duration_min=self.cfg.data.get('context_duration_min', 3.0),
+ context_duration_max=self.cfg.data.get('context_duration_max', 5.0),
+ g2p=self.cfg.data.get('g2p', None),
+ skip_datasets=self.cfg.data.get('skip_datasets', []),
+ english_only_model=self.cfg.get('english_only_model', False),
+ use_ipa=self.cfg.data.get('use_ipa', False),
+ context_conditioning=self.cfg.get('context_conditioning', "decoder"),
+ use_beta_binomial_interpolator=self.cfg.get('use_beta_binomial_interpolator', False),
+ context_slice_method=self.cfg.data.get('context_slice_method', 'random'),
+ phoneme_probability=self.cfg.data.get('phoneme_probability', 0.5),
+ encoder_type=self.cfg.data.get('encoder_type', 'single_transformer'),
+ )
+
+ rank = parallel_state.get_data_parallel_rank()
+ world_size = parallel_state.get_data_parallel_world_size()
+ sampler = torch.utils.data.distributed.DistributedSampler(
+ dataset, num_replicas=world_size, rank=rank, shuffle=shuffle, seed=self.cfg.seed
+ )
+
+ dataloader = torch.utils.data.DataLoader(
+ dataset,
+ collate_fn=dataset.collate_fn,
+ sampler=sampler,
+ batch_size=batch_size // world_size,
+ drop_last=drop_last,
+ num_workers=num_workers,
+ pin_memory=pin_memory,
+ persistent_workers=(
+ True if num_workers > 0 else False
+ ), # (@adithyare and @eharper) We need to set this to True to get around issues with spawn=True
+ )
+ logging.info(f'build success: {len(dataloader)} {dataset_paths}')
+ if self.phoneme_tokenizer is None:
+ self.phoneme_tokenizer = dataset.phoneme_tokenizer
+ return dataset, dataloader
+
+ def build_virtual_prompt_tarred_dataset(
+ self, dataset_paths, audio_path, batch_size, for_train, drop_last, shuffle, num_workers, pin_memory
+ ):
+ dataset = T5SpeechLMTarredDataset(
+ audio_tar_filepaths=audio_path,
+ manifest_filepath=dataset_paths,
+ tokenizer=self.tokenizer,
+ sample_rate=self.cfg.data.get('sample_rate', 24000),
+ virtual_prompt_source=self.virtual_prompt_source,
+ task_templates=self.task_templates,
+ pseudo_tokens=self.pseudo_tokens,
+ pad_token_id=self.pad_token_id,
+ max_seq_length=self.cfg.data.get('max_seq_length', self.frozen_model.cfg.max_position_embeddings),
+ min_seq_length=self.cfg.data.get('min_seq_length', 1),
+ shuffle_n=shuffle,
+ add_bos=self.cfg.data.get('add_bos', False),
+ add_eos=self.cfg.data.get('add_eos', True),
+ decoder_starts_with_pad=self.cfg.data.get('decoder_starts_with_pad', False),
+ add_eos_to_decoder_output=self.cfg.data.get('add_eos_to_decoder_output', True),
+ add_sentinel_to_input=self.cfg.data.get('add_sentinel_to_input', True),
+ ul2_prompt_token=self.cfg.data.get('ul2_prompt_token', None),
+ for_train=for_train,
+ segment_max_duration=self.cfg.data.get('segment_max_duration', None),
+ trim=self.cfg.data.get('trim', None),
+ trim_ref=self.cfg.data.get('trim_ref', None),
+ trim_top_db=self.cfg.data.get('trim_top_db', None),
+ trim_frame_length=self.cfg.data.get('trim_frame_length', None),
+ trim_hop_length=self.cfg.data.get('trim_hop_length', None),
+ pad_multiple=self.cfg.data.get('pad_multiple', 1),
+ pitch_augment=self.cfg.data.get('pitch_augment', None),
+ speech_offset=self.cfg.data.get('speech_offset', None),
+ train_task=self.cfg.data.get('train_task', "tts"),
+ seq_pattern=self.cfg.get('seq_pattern', 'delay_parallel'),
+ use_attention_prior=self.cfg.data.get('use_attention_prior', False),
+ attention_prior_scaling_factor=self.cfg.data.get('attention_prior_scaling_factor', 1.0),
+ cross_attention_epsilon=self.cfg.data.get('cross_attention_epsilon', 0.0),
+ lm_vocab_size=self.lm_vocab_size,
+ num_speech_codebooks=self.num_speech_codebooks,
+ )
+ rank = parallel_state.get_data_parallel_rank()
+ world_size = parallel_state.get_data_parallel_world_size()
+ dataloader = torch.utils.data.DataLoader(
+ dataset,
+ collate_fn=dataset.collate_fn,
+ batch_size=batch_size // world_size,
+ drop_last=drop_last,
+ num_workers=num_workers,
+ pin_memory=pin_memory,
+ persistent_workers=(
+ True if num_workers > 0 else False
+ ), # (@adithyare and @eharper) We need to set this to True to get around issues with spawn=True
+ )
+ logging.info(f'build success: {len(dataloader)} {dataset_paths}')
+
+ return dataset, dataloader
+
+ def process_text(self, input_text):
+ """
+ Normalizes text for CER/WER calculation.
+ Taken from hallucination_eval.py
+ """
+ # Convert text to lowercase
+ lower_case_text = input_text.lower()
+
+ # Remove commas from text
+ no_comma_text = lower_case_text.replace(",", "")
+
+ # Replace "-" with spaces
+ no_dash_text = no_comma_text.replace("-", " ")
+
+ # Replace double spaces with single space
+ single_space_text = " ".join(no_dash_text.split())
+
+ single_space_text = single_space_text.translate(str.maketrans('', '', string.punctuation))
+
+ return single_space_text
+
+ def predict_step(
+ self, batch: Any, batch_idx: int, dataloader_idx: int = 0, log_scalars=True, global_step=None
+ ) -> Any:
+
+ with torch.no_grad():
+ (
+ virtual_tokens,
+ context_and_question_tokens,
+ enc_mask,
+ dec_input_raw,
+ dec_input_mask_raw,
+ labels,
+ loss_mask,
+ position_ids,
+ taskname_ids,
+ speech_mask,
+ context_and_question_tokens_lens,
+ cross_attention_prior,
+ text_limits, # [start of question token, question token len) in [0, enc_mask.size(1))
+ lang,
+ question_texts,
+ ) = batch
+
+ batch_size = virtual_tokens.size(0)
+ dec_input = (
+ dec_input_raw * 1
+ ) # (B, 8, T) # TODO @xueyang: apply clone() method bypasses this unnecessary computation.
+ dec_input_mask = dec_input_mask_raw * 1 # (B, T)
+ dec_input_mask[:, :] = 1 # Does not really matter
+ output_token_list = []
+
+ end_indices = {}
+ # pad dec_input (B, 8, T) to 1000 timesteps
+ max_inference_timesteps = self.cfg.get('max_inference_timesteps', 2000)
+ # TODO @xueyang: potential bug when max_inference_timesteps < dec_input.shape[2], then dec_input is clipped.
+ dec_input = torch.nn.functional.pad(dec_input, (0, max_inference_timesteps - dec_input.shape[2]), value=0)
+ dec_input[:, :, self.decoder_context_len + 1 :].zero_()
+ # TODO @xueyang: why not just declare torch.ones(dec_input_raw.size(0), max_inference_timesteps)?
+ dec_input_mask = torch.nn.functional.pad(
+ dec_input_mask, (0, max_inference_timesteps - dec_input_mask.shape[1]), value=1
+ )
+
+ if self.inference_apply_text_cfg and self.inference_apply_audio_cfg:
+ question_limits = text_limits - virtual_tokens.size(
+ 1
+ ) # (b, 2), reset question range to start from [pad] context, same start position as context_and_question_tokens.
+ question_start = question_limits[:, 0].unsqueeze(1) # (b, 1)
+ question_end = question_limits[:, 1].unsqueeze(1) # (b, 1)
+
+ # duplicate and glue two batches into a single one.
+ virtual_tokens = torch.cat((virtual_tokens, virtual_tokens), dim=0)
+ taskname_ids = torch.cat((taskname_ids, taskname_ids), dim=0)
+ speech_mask = torch.cat((speech_mask, speech_mask), dim=0)
+ dec_input_mask = torch.cat((dec_input_mask, dec_input_mask), dim=0)
+
+ if isinstance(context_and_question_tokens, list): # indicate self.encoder_type = "multi_transformers".
+ context_tokens, question_tokens = context_and_question_tokens
+
+ # text
+ question_tokens_unconditioned = question_tokens.clone()
+ time_range = torch.arange(
+ question_tokens_unconditioned.size(2), device=question_tokens_unconditioned.device
+ ).unsqueeze(0)
+ question_mask = (time_range >= question_start) & (
+ time_range < question_end
+ ) # create a mask for question only tokens.
+ question_tokens_unconditioned[:, 0][
+ question_mask
+ ] = self.tokenizer.unk_id # only the first layer has non-zero IDs.
+
+ # audio
+ context_tokens_unconditioned = context_tokens.clone()
+ context_tokens_unconditioned[:, :, :] = self.tokenizer.unk_id
+
+ # concatenate both conditioned and unconditioned batches as a single one.
+ context_and_question_tokens = [
+ torch.cat((context_tokens, context_tokens_unconditioned), dim=0),
+ torch.cat((question_tokens, question_tokens_unconditioned), dim=0),
+ ]
+ enc_mask = [torch.cat((mask, mask), dim=0) for mask in enc_mask]
+ dec_input = torch.cat((dec_input, dec_input), dim=0)
+ position_ids = [torch.cat((pos_ids, pos_ids), dim=0) for pos_ids in position_ids]
+ else:
+ assert (
+ self.context_conditioning == "decoder"
+ ), f"The encoder_type is single_transformer. We expect context_condition is decoder: context_condition={self.context_conditioning}"
+
+ # text
+ context_and_question_tokens_unconditioned = context_and_question_tokens.clone()
+ time_range = torch.arange(
+ context_and_question_tokens_unconditioned.size(2),
+ device=context_and_question_tokens_unconditioned.device,
+ ).unsqueeze(
+ 0
+ ) # (1, max_context_question_tokens_len)
+ question_mask = (time_range >= question_start) & (
+ time_range < question_end
+ ) # create a mask for question only tokens.
+ context_and_question_tokens_unconditioned[:, 0][
+ question_mask
+ ] = self.tokenizer.unk_id # only the first layer has non-zero IDs.
+
+ # audio
+ dec_input_unconditioned = dec_input.clone()
+ dec_input_unconditioned[:, :, 1 : self.decoder_context_len + 1] = (
+ self.tokenizer.unk_id
+ ) # TODO @xueyang: switch to other token id if this one is conflict with text unk.
+
+ # concatenate both conditioned and unconditioned batches as a single one.
+ context_and_question_tokens = torch.cat(
+ (context_and_question_tokens, context_and_question_tokens_unconditioned), dim=0
+ )
+ enc_mask = torch.cat((enc_mask, enc_mask), dim=0)
+ dec_input = torch.cat((dec_input, dec_input_unconditioned), dim=0)
+ position_ids = torch.cat((position_ids, position_ids), dim=0)
+
+ # clean up useless variables.
+ del question_limits, question_start, question_end, time_range, question_mask
+ elif self.inference_apply_text_cfg:
+ # replace question token IDs with [UNK]'s id. No speech offset for Phoneme's [UNK]. Same op as train.
+ # instruction token IDs are bpe token IDs directly obtained from self.tokenizer without any offset.
+ # question token IDs are phoneme and grapheme token IDs and are offset by self.lm_vocab_size
+ # if under "Phoneme TTS" instruction, so exising no overlaps between instruction and question token IDs.
+ # question token IDs are bpe token IDs without any offset
+ # if under "Text to speech this" instruction, so existing overlaps between instruction and question token IDs.
+ question_limits = text_limits - virtual_tokens.size(
+ 1
+ ) # (b, 2), reset question range to start from [pad] context, same start position as context_and_question_tokens.
+ question_start = question_limits[:, 0].unsqueeze(1) # (b, 1)
+ question_end = question_limits[:, 1].unsqueeze(1) # (b, 1)
+
+ # duplicate and glue two batches into a single one.
+ virtual_tokens = torch.cat((virtual_tokens, virtual_tokens), dim=0)
+ taskname_ids = torch.cat((taskname_ids, taskname_ids), dim=0)
+ speech_mask = torch.cat((speech_mask, speech_mask), dim=0)
+ dec_input_mask = torch.cat((dec_input_mask, dec_input_mask), dim=0)
+
+ if isinstance(context_and_question_tokens, list): # indicate self.encoder_type = "multi_transformers".
+ context_tokens, question_tokens = context_and_question_tokens
+ question_tokens_unconditioned = question_tokens.clone()
+
+ time_range = torch.arange(
+ question_tokens_unconditioned.size(2), device=question_tokens_unconditioned.device
+ ).unsqueeze(0)
+ question_mask = (time_range >= question_start) & (
+ time_range < question_end
+ ) # create a mask for question only tokens.
+ question_tokens_unconditioned[:, 0][
+ question_mask
+ ] = self.tokenizer.unk_id # only the first layer has non-zero IDs.
+
+ # concatenate both conditioned and unconditioned batches as a single one.
+ context_and_question_tokens = [
+ torch.cat((context_tokens, context_tokens), dim=0),
+ torch.cat((question_tokens, question_tokens_unconditioned), dim=0),
+ ]
+ enc_mask = [torch.cat((mask, mask), dim=0) for mask in enc_mask]
+ dec_input = torch.cat((dec_input, dec_input), dim=0)
+ position_ids = [torch.cat((pos_ids, pos_ids), dim=0) for pos_ids in position_ids]
+ else:
+ assert (
+ self.context_conditioning == "decoder"
+ ), f"The encoder_type is single_transformer. We expect context_condition is decoder: context_condition={self.context_conditioning}"
+ context_and_question_tokens_unconditioned = context_and_question_tokens.clone()
+ time_range = torch.arange(
+ context_and_question_tokens_unconditioned.size(2),
+ device=context_and_question_tokens_unconditioned.device,
+ ).unsqueeze(
+ 0
+ ) # (1, max_context_question_tokens_len)
+ question_mask = (time_range >= question_start) & (
+ time_range < question_end
+ ) # create a mask for question only tokens.
+ context_and_question_tokens_unconditioned[:, 0][
+ question_mask
+ ] = self.tokenizer.unk_id # only the first layer has non-zero IDs.
+
+ # concatenate both conditioned and unconditioned batches as a single one.
+ context_and_question_tokens = torch.cat(
+ (context_and_question_tokens, context_and_question_tokens_unconditioned), dim=0
+ )
+ enc_mask = torch.cat((enc_mask, enc_mask), dim=0)
+ dec_input = torch.cat((dec_input, dec_input), dim=0)
+ position_ids = torch.cat((position_ids, position_ids), dim=0)
+
+ # clean up useless variables.
+ del question_limits, question_start, question_end, time_range, question_mask
+ elif self.inference_apply_audio_cfg:
+ # duplicate and glue two batches into a single one.
+ virtual_tokens = torch.cat((virtual_tokens, virtual_tokens), dim=0)
+ taskname_ids = torch.cat((taskname_ids, taskname_ids), dim=0)
+ speech_mask = torch.cat((speech_mask, speech_mask), dim=0)
+ dec_input_mask = torch.cat((dec_input_mask, dec_input_mask), dim=0)
+
+ if isinstance(
+ context_and_question_tokens, list
+ ): # indicate that self.encoder_type = "multi_transformers"
+ context_tokens, question_tokens = context_and_question_tokens
+ context_tokens_unconditioned = context_tokens.clone()
+ context_tokens_unconditioned[:, :, :] = (
+ self.tokenizer.unk_id
+ ) # TODO @xueyang: verify if extra tokens other than audio codec tokens are appended.
+
+ # concatenate both conditioned and unconditioned batches as a single one.
+ context_and_question_tokens = [
+ torch.cat((context_tokens, context_tokens_unconditioned), dim=0),
+ torch.cat((question_tokens, question_tokens), dim=0),
+ ]
+ enc_mask = [torch.cat((mask, mask), dim=0) for mask in enc_mask]
+ dec_input = torch.cat((dec_input, dec_input), dim=0)
+ position_ids = [torch.cat((pos_ids, pos_ids), dim=0) for pos_ids in position_ids]
+ else:
+ assert (
+ self.context_conditioning == "decoder"
+ ), f"The encoder_type is single_transformer. We expect context_condition is decoder: context_condition={self.context_conditioning}"
+ dec_input_unconditioned = dec_input.clone()
+ dec_input_unconditioned[:, :, 1 : self.decoder_context_len + 1] = (
+ self.tokenizer.unk_id
+ ) # TODO @xueyang: switch to other token id if this one is conflict with text unk.
+
+ # concatenate both conditioned and unconditioned batches as a single one.
+ context_and_question_tokens = torch.cat(
+ (context_and_question_tokens, context_and_question_tokens), dim=0
+ )
+ enc_mask = torch.cat((enc_mask, enc_mask), dim=0)
+ dec_input = torch.cat((dec_input, dec_input_unconditioned), dim=0)
+ position_ids = torch.cat((position_ids, position_ids), dim=0)
+ else:
+ logging.debug(
+ f"Neither text or audio cfg logits are applied:"
+ f" self.inference_apply_text_cfg={self.inference_apply_text_cfg},"
+ f" self.inference_apply_audio_cfg={self.inference_apply_audio_cfg}"
+ )
+
+ end_inference_loop_at = None
+ fwd_bwd_function = get_forward_backward_func()
+ encoder_output = None
+ attention_probs_all = []
+ start_time = time.time()
+ for t in range(self.decoder_context_len + 1, dec_input.shape[2] - 1):
+ # Start at 0 if encoder context, else context_len
+ if t % 100 == 0:
+ logging.info("Timestep {}".format(t))
+ if t == end_inference_loop_at:
+ print("All ends detected")
+ break
+
+ if isinstance(enc_mask, list):
+ encoder_max_sequence_len = [e.size(1) for e in enc_mask]
+ else:
+ encoder_max_sequence_len = enc_mask.size(1)
+
+ # if context_condition is decoder, then t starts at [PAD] token represented as [0] * 8.
+ # if context_condition is encoder, then t starts at [CLS].
+ if t == self.decoder_context_len + 1:
+ # Run first step manually
+ output_logits, _, token_and_speech_logits = self.forward(
+ virtual_tokens,
+ context_and_question_tokens,
+ enc_mask,
+ dec_input[
+ :, :, : t + 1
+ ], # tensors representing [CLS] + context audio tokens + [PAD] if context_condition is decoder, otherwise, tensors representing [CLS].
+ dec_input_mask[:, : t + 1], # doesn't matter because of all ones.
+ position_ids,
+ taskname_ids,
+ labels=None,
+ speech_mask=speech_mask,
+ inference=True,
+ inference_step=0,
+ decoder_max_sequence_len=max_inference_timesteps,
+ encoder_max_sequence_len=encoder_max_sequence_len,
+ )
+ encoder_output = token_and_speech_logits[-1]
+
+ if isinstance(encoder_output, list):
+ encoder_output = [e.transpose(0, 1) for e in encoder_output]
+ else:
+ encoder_output = encoder_output.transpose(0, 1)
+
+ else:
+ # Prepare batch
+ batch = [
+ max_inference_timesteps,
+ encoder_max_sequence_len,
+ encoder_output,
+ enc_mask,
+ dec_input[:, :, : t + 1],
+ dec_input_mask[:, : t + 1],
+ position_ids,
+ taskname_ids,
+ speech_mask,
+ ]
+
+ output_tensor = fwd_bwd_function(
+ forward_step_func=self.get_forward_output_only_func(),
+ data_iterator=iter(
+ [
+ batch,
+ ]
+ ),
+ model=[self],
+ num_microbatches=get_num_microbatches(),
+ forward_only=True,
+ seq_length=t,
+ micro_batch_size=dec_input.shape[0],
+ )
+ output_logits = output_tensor[0]['output_logits'] # (B, T, V, 8) or (2B, T, V, 8)
+ token_and_speech_logits = output_tensor[0]['token_and_speech_logits']
+
+ # when return_all_crossattention is False, attention_probs is None.
+ if self.frozen_model.enc_dec_model.return_all_crossattention_probs:
+ attention_probs = token_and_speech_logits[2]
+ attention_probs_mean = torch.stack(attention_probs).mean(dim=0) # B, 12, 1, enc_timesteps
+ attention_probs_all.append(attention_probs_mean)
+
+ if self.inference_apply_text_cfg or self.inference_apply_audio_cfg:
+ # interpolate conditioned and unconditioned logits
+ token_logits = (
+ self.inference_cfg_interpolation_scale * token_and_speech_logits[0][:batch_size]
+ + (1 - self.inference_cfg_interpolation_scale) * token_and_speech_logits[0][batch_size:]
+ )
+ output_speech_logits = (
+ self.inference_cfg_interpolation_scale * output_logits[:batch_size]
+ + (1 - self.inference_cfg_interpolation_scale) * output_logits[batch_size:]
+ )
+ else:
+ token_logits = token_and_speech_logits[0] # (B, T, V)
+ output_speech_logits = output_logits
+
+ token_logits_currtimestep = token_logits[:, -1, :] # (B, V)
+ token_preds = token_logits_currtimestep.argmax(dim=1) # (B,)
+
+ if torch.count_nonzero(speech_mask) > 0:
+ output_logits_currtimestep = (
+ output_speech_logits[:, -1, :, :]
+ .permute(0, 2, 1)
+ .contiguous()
+ .view(-1, self.speech_codebook_size)
+ ) # (B*8, V)
+ output_logits_currtimestep_conditioned = (
+ output_logits[:batch_size][:, -1, :, :]
+ .permute(0, 2, 1)
+ .contiguous()
+ .view(-1, self.speech_codebook_size)
+ )
+ output_logits_currtimestep_unconditioned = (
+ output_logits[batch_size:][:, -1, :, :]
+ .permute(0, 2, 1)
+ .contiguous()
+ .view(-1, self.speech_codebook_size)
+ )
+ else:
+ output_logits_currtimestep = token_logits_currtimestep # (B, V)
+ output_logits_currtimestep_conditioned = token_logits_currtimestep
+ output_logits_currtimestep_unconditioned = token_logits_currtimestep
+
+ top_k = self.cfg.get('top_k', 80)
+
+ # (B*8, 80) or (B, 80)
+ output_logits_currtimestep_topk = torch.topk(output_logits_currtimestep, top_k, dim=1)[0]
+
+ # find indices which are not top k
+ indices_to_remove = output_logits_currtimestep < output_logits_currtimestep_topk[:, -1].unsqueeze(1)
+ # (B*8, 1024) or (B, 1024)
+
+ if self.inference_apply_cfg_filter:
+ output_logits_currtimestep_rescored = output_logits_currtimestep_conditioned.clone()
+ else:
+ output_logits_currtimestep_rescored = output_logits_currtimestep.clone()
+
+ output_logits_currtimestep_rescored[indices_to_remove] = -float('Inf')
+
+ # logits interpolation between conditioned and unconditioned logits.
+ if (
+ self.inference_apply_text_cfg or self.inference_apply_audio_cfg
+ ) and self.inference_apply_cfg_filter:
+ output_logits_currtimestep_rescored = (
+ self.inference_cfg_filter_interpolation_scale * output_logits_currtimestep_rescored
+ + (1 - self.inference_cfg_filter_interpolation_scale)
+ * output_logits_currtimestep_unconditioned
+ )
+
+ temperature = self.cfg.get('temperature', 0.85) # Set temp 0.01 for greedy decoding
+ output_logits_currtimestep_rescored = output_logits_currtimestep_rescored / temperature
+ output_logits_currtimestep_rescored = torch.nn.functional.softmax(
+ output_logits_currtimestep_rescored, dim=1
+ )
+
+ output_tokens_curr_timestep = torch.multinomial(
+ output_logits_currtimestep_rescored, num_samples=1
+ ) # (B*8, 1)
+
+ if torch.count_nonzero(speech_mask) > 0:
+ # Convert back to (B, 8)
+ output_tokens_curr_timestep = output_tokens_curr_timestep.view(
+ batch_size, self.num_speech_codebooks
+ )
+
+ for _b in range(token_preds.shape[0]):
+ if t > self.decoder_context_len + 10 and token_preds[_b] == self.tokenizer.eos_id:
+ if _b not in end_indices:
+ logging.info("End detected for item {}".format(_b) + " at timestep {}".format(t))
+ end_indices[_b] = t
+ if len(end_indices) == token_preds.shape[0]:
+ end_inference_loop_at = t + self.num_speech_codebooks
+
+ output_token_list.append(output_tokens_curr_timestep)
+
+ # duplicate to 2b dim as input for the next iteration if enabling cfg.
+ if self.inference_apply_text_cfg or self.inference_apply_audio_cfg:
+ output_tokens_curr_timestep = torch.cat(
+ (output_tokens_curr_timestep, output_tokens_curr_timestep), dim=0
+ )
+
+ if torch.count_nonzero(speech_mask) > 0:
+ dec_input_next_timestep = output_tokens_curr_timestep * 1 # (B,8)
+ dec_input_next_timestep[:, 0] = (
+ dec_input_next_timestep[:, 0] + self.speech_offset
+ ) # add offset to first codebook
+ dec_input[:, :, t + 1] = dec_input_next_timestep * 1
+ else:
+ dec_input[:, 0, t + 1] = output_tokens_curr_timestep.squeeze(1)
+
+ # end of for loop
+ output_tokens_combined = torch.stack(output_token_list) # (T, B, 8) if speech else (T, B)
+ if torch.count_nonzero(speech_mask) > 0:
+ output_tokens_combined = output_tokens_combined.permute(1, 2, 0) # (B, 8, T)
+ else:
+ output_tokens_combined = output_tokens_combined.squeeze(2)
+ output_tokens_combined = output_tokens_combined.permute(1, 0) # (B, T)
+
+ # consider only autoregressive time, disconsider loading eval models for RTF time
+ total_process_time = time.time() - start_time
+
+ # Layerwise token error rate
+ ter_dict = {}
+ for i in range(self.num_speech_codebooks):
+ ter_dict[i] = {'hypothesis': [], 'gt': []}
+
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
+
+ if 'nemo_sv_model' not in self.additional_models:
+ nemo_sv_model = nemo_asr.models.EncDecSpeakerLabelModel.from_pretrained(model_name='titanet_large')
+ nemo_sv_model = nemo_sv_model.to(device)
+ nemo_sv_model.encoder.disable_torch_distributed = True # For multi-gpu training validation
+ nemo_sv_model.eval()
+ self.additional_models['nemo_sv_model'] = nemo_sv_model
+ logging.info(f"Loaded SV Model: {nemo_sv_model}")
+ else:
+ nemo_sv_model = self.additional_models['nemo_sv_model']
+
+ if 'asr_model' not in self.additional_models:
+ asr_model = self.cfg.get("asr_model_name", "stt_multilingual_fastconformer_hybrid_large_pc_blend_eu")
+
+ if "hybrid" in asr_model:
+ model = nemo_asr.models.EncDecHybridRNNTCTCBPEModel
+ else:
+ model = nemo_asr.models.EncDecRNNTBPEModel
+ asr_model = model.from_pretrained(model_name=asr_model)
+ asr_model = asr_model.to(device)
+ asr_model.encoder.disable_torch_distributed = True # For multi-gpu training validation
+ asr_model.eval()
+ self.additional_models['asr_model'] = asr_model
+ logging.info(f"Loaded ASR Model: {asr_model}")
+ else:
+ asr_model = self.additional_models['asr_model']
+
+ asr_model_zh = None
+ if Lang.zh.value in lang:
+ if 'asr_model_zh' not in self.additional_models:
+ asr_model_zh = nemo_asr.models.EncDecRNNTModel.from_pretrained(
+ model_name="stt_zh_conformer_transducer_large"
+ )
+ asr_model_zh = asr_model_zh.to(device)
+ asr_model_zh.eval()
+ self.additional_models['asr_model_zh'] = asr_model_zh
+ else:
+ asr_model_zh = self.additional_models['asr_model_zh']
+
+ if 'wavlm_sv_model' not in self.additional_models:
+ wavlm_sv_extractor = Wav2Vec2FeatureExtractor.from_pretrained('microsoft/wavlm-base-plus-sv')
+ wavlm_sv_model = WavLMForXVector.from_pretrained('microsoft/wavlm-base-plus-sv')
+ wavlm_sv_model = wavlm_sv_model.to(device)
+ wavlm_sv_model = wavlm_sv_model.eval()
+ self.additional_models['wavlm_sv_model'] = wavlm_sv_model
+ self.additional_models['wavlm_sv_extractor'] = wavlm_sv_extractor
+ logging.info(f"Loaded SV Model: {wavlm_sv_model}")
+ else:
+ wavlm_sv_model = self.additional_models['wavlm_sv_model']
+ wavlm_sv_extractor = self.additional_models['wavlm_sv_extractor']
+
+ # load MOS estimator model only if True.
+ if self.estimate_mos:
+ # load mos estimator.
+ if 'squim_mos_model' not in self.additional_models:
+ squim_mos_model_full = SQUIM_SUBJECTIVE.get_model().to(device)
+ self.additional_models['squim_mos_model'] = squim_mos_model_full
+ else:
+ squim_mos_model_full = self.additional_models['squim_mos_model']
+
+ # load non-matching reference clean audio.
+ ref_16khz_wav, _ = librosa.load(self.non_matching_ref_audio_filepath, sr=16000)
+
+ # prepare MOS estimator by taking a single audio example as an input.
+ squim_mos_model = partial(
+ squim_mos_model_full, reference=torch.from_numpy(ref_16khz_wav).to(device).unsqueeze(0)
+ )
+
+ _exp_dir_path = self.logger.log_dir
+ _exp_dir_path = _exp_dir_path + '/Sample_Audios'
+ if not os.path.exists(_exp_dir_path):
+ os.mkdir(_exp_dir_path)
+
+ squim_mos_list_pred = []
+ squim_mos_list_context = []
+ squim_mos_list_gt = []
+ similarity_list = []
+ similarity_list_wavlm = []
+ pred_context_similarity_list = []
+ pred_context_similarity_list_wavlm = []
+ gt_context_similarity_list = []
+ gt_context_similarity_list_wavlm = []
+ question_type = []
+
+ # predicting audio
+ batch_size = output_tokens_combined.shape[0]
+ test_dataloader_batch_size = batch_size
+ # self.test_dataloader() is not defined during validation
+ if isinstance(self.test_dataloader(), torch.utils.data.DataLoader):
+ test_dataloader_batch_size = self.test_dataloader().batch_size
+
+ # logging attention maps.
+ # empty attention_probs_all indicates self.frozen_model.enc_dec_model.return_all_crossattention_probs is False.
+ if len(attention_probs_all) != 0:
+ attention_probs_all = torch.cat(attention_probs_all, dim=2) # B, 12, dec_timesteps, enc_timesteps
+ attention_probs_all = attention_probs_all.mean(dim=1) # B, dec_timesteps, enc_timesteps
+
+ for i in range(batch_size):
+ text_end_step = text_limits[i, 1].item()
+ text_start_step = text_limits[i, 0].item()
+ end_index = end_indices.get(i, output_tokens_combined.shape[2])
+ if len(attention_probs_all) != 0:
+ attention_probs_example = attention_probs_all[i][
+ : end_index - (1 + self.decoder_context_len), text_start_step:text_end_step
+ ] # T, enc_timesteps
+ attention_map = attention_probs_example.float().cpu().numpy().T
+ alignment_image = plot_alignment_to_numpy_for_speechllm(
+ attention_map,
+ phoneme_ver=1,
+ phoneme_seq=None,
+ )
+
+ if global_step is not None:
+ # During validation, step is simply global_step + i
+ step = global_step + i
+ else:
+ # During inference, step is the index of the sample
+ step = batch_idx * test_dataloader_batch_size + i
+
+ self.logger.experiment.add_image(
+ "Inf Attention Map",
+ alignment_image,
+ step,
+ dataformats="HWC",
+ )
+ # Save attention image to file
+ alignment_fp = os.path.join(_exp_dir_path, f'attention_map_{step}.png')
+ imageio.imwrite(alignment_fp, alignment_image)
+
+ wer_score = 0
+ audio_to_pred = []
+ audio_to_pred_zh = []
+ total_audio_seconds = 0
+ for i in range(batch_size):
+ if global_step is not None:
+ # During validation, step is simply global_step + i
+ step = global_step + i
+ else:
+ # During inference, step is the index of the sample
+ step = batch_idx * test_dataloader_batch_size + i
+
+ audio_len = self.decoder_context_len + (labels[i][0][self.decoder_context_len :] != 0).sum().item()
+
+ if torch.count_nonzero(speech_mask) > 0:
+ dec_input_to_1024 = self.convert_tokens_to_range(dec_input_raw[i, :, 0:audio_len])
+ dec_input_to_1024_answer = dec_input_to_1024[:, self.decoder_context_len + 1 :]
+ dec_input_wav = self.decode_wav_from_codec_model(dec_input_to_1024_answer)
+ self.logger.experiment.add_audio("Inf Dec Input Wav", dec_input_wav, step, self.sample_rate)
+
+ predicted_tokens = output_tokens_combined[i] # Should not contain context even if decoder context
+ if i in end_indices:
+ logging.info(f"Clipping until end index for audio {i}")
+ if self.cfg.get('seq_pattern', 'parallel') == 'delay_parallel':
+ predicted_tokens = predicted_tokens[
+ :, 0 : end_indices[i] - (1 + self.decoder_context_len) + self.num_speech_codebooks
+ ] # trim to audio length
+ else:
+ predicted_tokens = predicted_tokens[
+ :, 0 : end_indices[i] - (1 + self.decoder_context_len)
+ ] # trim to audio length
+
+ pred_img = predicted_tokens.data.cpu().float().numpy()
+ dec_inp_img = dec_input_to_1024.data.cpu().float().numpy()
+ start_time = time.time()
+ predicted_tokens = self.convert_tokens_to_range(predicted_tokens, apply_offset_correction=False)
+ predicted_wav = self.decode_wav_from_codec_model(predicted_tokens)
+ # accumulate audio length in seconds and process time in seconds to the RTF
+ total_process_time = total_process_time + (time.time() - start_time)
+ total_audio_seconds = total_audio_seconds + predicted_wav.size(-1) / self.sample_rate
+
+ self.logger.experiment.add_audio("Inf Pred Wav", predicted_wav, step, self.sample_rate)
+ self.logger.experiment.add_image(
+ "Inf Pred Tokens",
+ plot_codec_to_numpy(pred_img),
+ step,
+ dataformats="HWC",
+ )
+ self.logger.experiment.add_image(
+ "Inf Dec Input Tokens",
+ plot_codec_to_numpy(dec_inp_img),
+ step,
+ dataformats="HWC",
+ )
+
+ # save predicted_wav and gt_wav to a wav files in dir_path
+ if global_step is not None:
+ # During training, overwrite the wav file from the previous validation
+ wav_num = i
+ else:
+ wav_num = step
+
+ audio_fp_pred = os.path.join(_exp_dir_path, f'predicted_wav_{wav_num}.wav')
+ sf.write(audio_fp_pred, predicted_wav.cpu().numpy(), self.sample_rate)
+ audio_fp_gt = os.path.join(_exp_dir_path, f'dec_input_wav_{wav_num}.wav')
+ sf.write(audio_fp_gt, dec_input_wav.cpu().numpy(), self.sample_rate)
+
+ # speaker verification evaluation using nemo model
+ spk_embedding_pred = nemo_sv_model.get_embedding(audio_fp_pred)
+ spk_embedding_pred = spk_embedding_pred.cpu().detach().numpy().flatten()
+ spk_embedding_gt = nemo_sv_model.get_embedding(audio_fp_gt)
+ spk_embedding_gt = spk_embedding_gt.cpu().detach().numpy().flatten()
+ similarity = np.dot(spk_embedding_pred, spk_embedding_gt) / (
+ np.linalg.norm(spk_embedding_pred) * np.linalg.norm(spk_embedding_gt)
+ )
+
+ if log_scalars:
+ self.logger.experiment.add_scalar(f'Inf SV Cossim Individual Sample', similarity, step)
+ similarity_list.append(similarity)
+
+ # speaker verification evaluation using wavlm model
+ gt_16khz_wav, _ = librosa.load(audio_fp_gt, sr=16000)
+ pred_16khz_wav, _ = librosa.load(audio_fp_pred, sr=16000)
+ inputs_wavlm = wavlm_sv_extractor(
+ [pred_16khz_wav, gt_16khz_wav], padding=True, return_tensors="pt", sampling_rate=16000
+ )
+ for key in inputs_wavlm.keys():
+ inputs_wavlm[key] = inputs_wavlm[key].to(device)
+
+ with torch.no_grad():
+ wavlm_embeddings = wavlm_sv_model(**inputs_wavlm).embeddings
+ wavlm_embeddings = torch.nn.functional.normalize(wavlm_embeddings, dim=-1).cpu()
+
+ spk_embedding_pred_wavlm = wavlm_embeddings[0].cpu().detach().numpy().flatten()
+ spk_embedding_gt_wavlm = wavlm_embeddings[1].cpu().detach().numpy().flatten()
+ similarity_wavlm = np.dot(spk_embedding_pred_wavlm, spk_embedding_gt_wavlm) / (
+ np.linalg.norm(spk_embedding_pred_wavlm) * np.linalg.norm(spk_embedding_gt_wavlm)
+ )
+ similarity_list_wavlm.append(similarity_wavlm)
+
+ if lang[i] == Lang.zh.value:
+ audio_to_pred_zh.append({"step": i, "audio": audio_fp_pred})
+ audio_to_pred_zh.append({"step": i, "audio": audio_fp_gt})
+ else:
+ audio_to_pred.append({"step": i, "audio": audio_fp_pred})
+ audio_to_pred.append({"step": i, "audio": audio_fp_gt})
+
+ if isinstance(context_and_question_tokens, list):
+ context_tokens, question_tokens = context_and_question_tokens
+ input_token_list = [
+ question_tokens[i, 0, j].item()
+ for j in range(context_and_question_tokens_lens[1][i].item())
+ ]
+ input_token_list = [
+ (ti, t) for ti, t in enumerate(input_token_list) if t != 0 and t < self.speech_offset
+ ]
+ context_end_step = context_and_question_tokens_lens[0][i]
+ context_tokens = context_tokens[i][:, :context_end_step]
+ else:
+ input_token_list = [
+ context_and_question_tokens[i, 0, j].item()
+ for j in range(context_and_question_tokens.shape[2])
+ ]
+ input_token_list = [
+ (ti, t) for ti, t in enumerate(input_token_list) if t != 0 and t < self.speech_offset
+ ]
+ context_end_step = input_token_list[0][0]
+ context_tokens = context_and_question_tokens[i][:, :context_end_step]
+
+ spk_embedding_context = spk_embedding_gt
+ spk_embedding_context_wavlm = spk_embedding_gt_wavlm
+ if self.decoder_context_len > 0:
+ context_tokens = dec_input_to_1024[:, : self.decoder_context_len + 1]
+ context_wav = self.decode_wav_from_codec_model(context_tokens)
+ elif context_end_step > 1:
+ is_speech_context = context_tokens[1, :].sum().item() > 0
+ if is_speech_context:
+ context_tokens = self.convert_tokens_to_range(context_tokens, pattern=self.context_pattern)
+ context_wav = self.decode_wav_from_codec_model(context_tokens)
+ else:
+ context_wav = None
+ _context_token_list = [v.item() for v in context_tokens[0, :]]
+ _context_text = self.frozen_model.tokenizer.ids_to_text(
+ [v for v in _context_token_list if v < self.lm_vocab_size]
+ )
+ self.logger.experiment.add_text("Context Text", _context_text, self.global_step)
+
+ else:
+ context_wav = None
+
+ if context_wav is not None:
+ self.logger.experiment.add_audio("Context Wav", context_wav, step, self.sample_rate)
+ context_wav_fp = os.path.join(_exp_dir_path, f'context_wav_{wav_num}.wav')
+ sf.write(context_wav_fp, context_wav.cpu().numpy(), self.sample_rate)
+ # titanet
+ spk_embedding_context = nemo_sv_model.get_embedding(context_wav_fp)
+ spk_embedding_context = spk_embedding_context.cpu().detach().numpy().flatten()
+ # wavlm
+ context_wavlm_wav, _ = librosa.load(context_wav_fp, sr=16000)
+ inputs_wavlm = wavlm_sv_extractor(
+ [context_wavlm_wav], padding=True, return_tensors="pt", sampling_rate=16000
+ )
+ for key in inputs_wavlm.keys():
+ inputs_wavlm[key] = inputs_wavlm[key].to(device)
+
+ with torch.no_grad():
+ wavlm_embeddings = wavlm_sv_model(**inputs_wavlm).embeddings
+ wavlm_embeddings = torch.nn.functional.normalize(wavlm_embeddings, dim=-1).cpu()
+
+ spk_embedding_context_wavlm = wavlm_embeddings[0].cpu().detach().numpy().flatten()
+
+ pred_similarity_context = np.dot(spk_embedding_context, spk_embedding_pred) / (
+ np.linalg.norm(spk_embedding_context) * np.linalg.norm(spk_embedding_pred)
+ )
+ gt_similarity_context = np.dot(spk_embedding_context, spk_embedding_gt) / (
+ np.linalg.norm(spk_embedding_context) * np.linalg.norm(spk_embedding_gt)
+ )
+
+ pred_similarity_context_wavlm = np.dot(spk_embedding_context_wavlm, spk_embedding_pred_wavlm) / (
+ np.linalg.norm(spk_embedding_context_wavlm) * np.linalg.norm(spk_embedding_pred_wavlm)
+ )
+ gt_similarity_context_wavlm = np.dot(spk_embedding_context_wavlm, spk_embedding_gt_wavlm) / (
+ np.linalg.norm(spk_embedding_context_wavlm) * np.linalg.norm(spk_embedding_gt_wavlm)
+ )
+
+ if log_scalars:
+ self.logger.experiment.add_scalar(f'Inf SV Cossim Context Pred', pred_similarity_context, step)
+ self.logger.experiment.add_scalar(f'Inf SV Cossim Context GT', gt_similarity_context, step)
+ pred_context_similarity_list.append(pred_similarity_context)
+ gt_context_similarity_list.append(gt_similarity_context)
+ pred_context_similarity_list_wavlm.append(pred_similarity_context_wavlm)
+ gt_context_similarity_list_wavlm.append(gt_similarity_context_wavlm)
+
+ task_question = self.frozen_model.tokenizer.ids_to_text(
+ [v[1] for v in input_token_list if v[1] < self.lm_vocab_size]
+ )
+ self.logger.experiment.add_text("Inf Task Question", task_question, step)
+ if "Phoneme TTS" in task_question:
+ question_type.append("Phoneme TTS")
+ elif "Text to speech this" in task_question:
+ question_type.append("Text to speech this")
+ else:
+ question_type.append("Other")
+
+ task_question_phoneme_tokens = [
+ v[1] - self.lm_vocab_size for v in input_token_list if v[1] >= self.lm_vocab_size
+ ]
+ if len(task_question_phoneme_tokens) > 0:
+ phoneme_text = self.phoneme_tokenizer.decode(task_question_phoneme_tokens)
+ self.logger.experiment.add_text("Inf Task Question Phoneme Text", phoneme_text, step)
+
+ # store predicted_tokens for each layer to compute token error rate
+ for layer_idx in range(self.num_speech_codebooks):
+ ter_dict[layer_idx]['hypothesis'].append(predicted_tokens[layer_idx].cpu().numpy().tolist())
+ ter_dict[layer_idx]['gt'].append(dec_input_to_1024_answer[layer_idx].cpu().numpy().tolist())
+
+ # estimate MOS scores.
+ if self.estimate_mos:
+ squim_mos_score_pred = squim_mos_model(
+ torch.from_numpy(pred_16khz_wav).to(device).unsqueeze(0)
+ ).item()
+ squim_mos_score_gt = squim_mos_model(
+ torch.from_numpy(gt_16khz_wav).to(device).unsqueeze(0)
+ ).item()
+ if context_wav is not None:
+ squim_mos_score_context = squim_mos_model(context_wav.to(device).unsqueeze(0)).item()
+ squim_mos_list_context.append(squim_mos_score_context)
+ squim_mos_list_pred.append(squim_mos_score_pred)
+ squim_mos_list_gt.append(squim_mos_score_gt)
+ else:
+ r = labels[i, 0].long()
+ nzm = r != 0
+ r = r.tolist()[:-1]
+ nzm = nzm[:-1]
+ h = output_tokens_combined[i].long() * nzm
+ h = h.tolist()
+ cur_wer_score = editdistance.eval(r, h)
+ if log_scalars:
+ self.logger.experiment.add_scalar('WER', cur_wer_score, step)
+ logging.info(f"current wer score : {cur_wer_score}")
+ wer_score += cur_wer_score
+ if wer_score > 0:
+ wer_score /= batch_size
+ if log_scalars:
+ self.logger.experiment.add_scalar('AVG WER', wer_score, step)
+ logging.info(f"average wer score : {wer_score}")
+
+ # compute token error rate for each layer
+ if log_scalars:
+ for layer_idx in range(self.num_speech_codebooks):
+ wer = word_error_rate(ter_dict[layer_idx]['hypothesis'], ter_dict[layer_idx]['gt'], use_cer=True)
+ self.logger.experiment.add_scalar(f'Inf TER Layer {layer_idx}', wer, 0)
+
+ greedy_transcripts = []
+ if len(audio_to_pred) > 0:
+ greedy_transcripts.extend(asr_model.transcribe([i["audio"] for i in audio_to_pred])[0])
+ if len(audio_to_pred_zh) > 0:
+ greedy_transcripts.extend(asr_model_zh.transcribe([i["audio"] for i in audio_to_pred_zh])[0])
+
+ all_audio_to_pred = audio_to_pred + audio_to_pred_zh
+ # Note WER over the batch is not equal to WER(sample) / batch_size, but approx. here
+
+ # These are between ASR outputs of GT audio and predicted audio
+ wer_batch = []
+ cer_batch = []
+ cer_phoneme = []
+ wer_phoneme = []
+ cer_tts = []
+ wer_tts = []
+
+ # These are between ASR output of Pred audio and GT text
+ wer_batch_gt = []
+ cer_batch_gt = []
+ cer_phoneme_gt = []
+ wer_phoneme_gt = []
+ cer_tts_gt = []
+ wer_tts_gt = []
+
+ for i in range(0, len(greedy_transcripts) - 1, 2):
+ assert all_audio_to_pred[i]["step"] == all_audio_to_pred[i + 1]["step"]
+ step = batch_idx * test_dataloader_batch_size + all_audio_to_pred[i]["step"]
+ question_text = question_texts[i // 2]
+
+ # No need to process text since both are ASR outputs
+ cer_sample = word_error_rate([greedy_transcripts[i]], [greedy_transcripts[i + 1]], use_cer=True)
+ wer_sample = word_error_rate([greedy_transcripts[i]], [greedy_transcripts[i + 1]], use_cer=False)
+
+ # Processing text since one is ASR output and the other is the GT text
+ cer_gt = word_error_rate(
+ [self.process_text(greedy_transcripts[i])], [self.process_text(question_text)], use_cer=True
+ )
+ wer_gt = word_error_rate(
+ [self.process_text(greedy_transcripts[i])], [self.process_text(question_text)], use_cer=False
+ )
+
+ self.logger.experiment.add_text("Inf Predicted Text", greedy_transcripts[i], step)
+ self.logger.experiment.add_text("Inf GT Text", greedy_transcripts[i + 1], step)
+ self.logger.experiment.add_text("Inf Question Text", question_text, step)
+ if log_scalars:
+ self.logger.experiment.add_scalar(f'Inf CER Transcript', cer_sample, step)
+ self.logger.experiment.add_scalar(f'Inf WER Transcript', wer_sample, step)
+ self.logger.experiment.add_scalar(f'Inf CER GT Transcript', cer_gt, step)
+ cer_batch.append(cer_sample)
+ wer_batch.append(wer_sample)
+ cer_batch_gt.append(cer_gt)
+ wer_batch_gt.append(wer_gt)
+ if question_type[all_audio_to_pred[i]["step"]] == "Phoneme TTS":
+ if log_scalars:
+ self.logger.experiment.add_scalar(f'Inf CER Phoneme Task', cer_sample, step)
+ self.logger.experiment.add_scalar(f'Inf WER Phoneme Task', wer_sample, step)
+ self.logger.experiment.add_scalar(f'Inf CER GT Phoneme Task', cer_gt, step)
+ cer_phoneme.append(cer_sample)
+ wer_phoneme.append(wer_sample)
+ cer_phoneme_gt.append(cer_gt)
+ wer_phoneme_gt.append(wer_gt)
+ elif question_type[all_audio_to_pred[i]["step"]] == "Text to speech this":
+ if log_scalars:
+ self.logger.experiment.add_scalar(f'Inf CER TTS Task', cer_sample, step)
+ self.logger.experiment.add_scalar(f'Inf WER TTS Task', wer_sample, step)
+ self.logger.experiment.add_scalar(f'Inf CER GT TTS Task', cer_gt, step)
+ cer_tts.append(cer_sample)
+ wer_tts.append(wer_sample)
+ cer_tts_gt.append(cer_gt)
+ wer_tts_gt.append(wer_gt)
+
+ # compute average similarity
+ similarity_avg = np.mean(similarity_list)
+ pred_context_similarity_avg = np.mean(pred_context_similarity_list)
+ gt_context_similarity_avg = np.mean(gt_context_similarity_list)
+ similarity_avg_wavlm = np.mean(similarity_list_wavlm)
+ pred_context_similarity_avg_wavlm = np.mean(pred_context_similarity_list_wavlm)
+ gt_context_similarity_avg_wavlm = np.mean(gt_context_similarity_list_wavlm)
+
+ if log_scalars:
+ self.logger.experiment.add_scalar(f'Inf SV Avg Cossim', similarity_avg, batch_idx)
+ self.predict_step_outputs.append(
+ {
+ 'titanet_avg_cossim': similarity_avg,
+ 'titanet_avg_cossim_context_pred': pred_context_similarity_avg,
+ 'titanet_avg_cossim_context_gt': gt_context_similarity_avg,
+ 'wavlm_avg_cossim': similarity_avg_wavlm,
+ 'wavlm_avg_cossim_context_pred': pred_context_similarity_avg_wavlm,
+ 'wavlm_avg_cossim_context_gt': gt_context_similarity_avg_wavlm,
+ 'squim_mos_pred': np.mean(squim_mos_list_pred) if len(squim_mos_list_pred) > 0 else None,
+ 'squim_mos_context': np.mean(squim_mos_list_context) if len(squim_mos_list_context) > 0 else None,
+ 'squim_mos_gt': np.mean(squim_mos_list_gt) if len(squim_mos_list_gt) > 0 else None,
+ 'cer_transcript': np.mean(cer_batch),
+ 'wer_transcript': np.mean(wer_batch),
+ 'cer_phoneme': np.mean(cer_phoneme) if len(cer_phoneme) > 0 else None,
+ 'wer_phoneme': np.mean(wer_phoneme) if len(wer_phoneme) > 0 else None,
+ 'cer_tts': np.mean(cer_tts) if len(cer_tts) > 0 else None,
+ 'wer_tts': np.mean(wer_tts) if len(wer_tts) > 0 else None,
+ 'cer_transcript_gt': np.mean(cer_batch_gt),
+ 'wer_transcript_gt': np.mean(wer_batch_gt),
+ 'cer_phoneme_gt': np.mean(cer_phoneme_gt) if len(cer_phoneme_gt) > 0 else None,
+ 'wer_phoneme_gt': np.mean(wer_phoneme_gt) if len(wer_phoneme_gt) > 0 else None,
+ 'cer_tts_gt': np.mean(cer_tts_gt) if len(cer_tts_gt) > 0 else None,
+ 'wer_tts_gt': np.mean(wer_tts_gt) if len(wer_tts_gt) > 0 else None,
+ "RTF": total_process_time / total_audio_seconds,
+ }
+ )
+
+ # TODO @xueyang: PTL 2.0+ patch. Signature of method `on_predict_epoch_end` does not match signature of the base method in PTL class 'ModelHooks'.
+ # Remove the `outputs` param and choose `self.predict_step_output` instead.
+ def on_predict_epoch_end(self, outputs: List[Any]) -> None:
+
+ gather_results = [None for _ in range(parallel_state.get_data_parallel_world_size())]
+ all_preds = list(itertools.chain(*[item['preds_text'] for item in outputs[0]]))
+ all_labels = list(itertools.chain(*[item['labels_text'] for item in outputs[0]]))
+ all_inputs = list(itertools.chain(*[item['input_text'] for item in outputs[0]]))
+
+ assert len(all_preds) == len(all_labels)
+ assert len(all_preds) == len(all_inputs)
+
+ # Gather inputs, predictions, and ground truths from all workers
+ torch.distributed.all_gather_object(
+ gather_results,
+ [(input, pred, label) for (input, pred, label) in zip(all_inputs, all_preds, all_labels)],
+ group=parallel_state.get_data_parallel_group(),
+ )
+
+ # Deduplicate sentences that may have been distributed across multiple data parallel ranks.
+ if parallel_state.get_data_parallel_rank() == 0:
+ gather_results_dedup = list(set(itertools.chain(*gather_results)))
+
+ input_prediction_pair = []
+ correct = 0
+ for input, pred, label in gather_results_dedup:
+ input_prediction_pair.append((input, pred))
+ if label:
+ if pred == label:
+ correct += 1
+
+ acc = correct / len(gather_results_dedup) if all_labels[0] else None
+ logging.info(f'Prediction results: {acc}')
+ logging.info(f'Test finish')
diff --git a/nemo/collections/tts/models/ssl_tts.py b/nemo/collections/tts/models/ssl_tts.py
index 298a1a599008..f2cc4f798ec5 100644
--- a/nemo/collections/tts/models/ssl_tts.py
+++ b/nemo/collections/tts/models/ssl_tts.py
@@ -18,10 +18,10 @@
import librosa
import torch
from hydra.utils import instantiate
+from lightning.pytorch import Trainer
+from lightning.pytorch.loggers import TensorBoardLogger
+from lightning.pytorch.utilities.combined_loader import CombinedLoader
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
-from pytorch_lightning.loggers import TensorBoardLogger
-from pytorch_lightning.utilities.combined_loader import CombinedLoader
from nemo.collections.asr.losses.angularloss import AngularSoftmaxLoss
from nemo.collections.tts.data.dataset import TTSDataset
@@ -38,10 +38,10 @@
class SSLDisentangler(ModelPT):
"""
SSLDisentangler is a Conformer based model for extracting disentangled content and speaker embeddings
- from an audio waveform. This model uses a pre-trained Conformer SSL model. To extract the linguistic content
- and speaker representations using a pre-trained Conformer, two randomly initialized downstream
- heads are added and the entire setup is finetuned in multi-task manner for speech recognition and speaker verification.
- These representations can be used by FastPitchModel_SSL for voice conversion by swapping the speaker embedding
+ from an audio waveform. This model uses a pre-trained Conformer SSL model. To extract the linguistic content
+ and speaker representations using a pre-trained Conformer, two randomly initialized downstream
+ heads are added and the entire setup is finetuned in multi-task manner for speech recognition and speaker verification.
+ These representations can be used by FastPitchModel_SSL for voice conversion by swapping the speaker embedding
of a given source utterance, with the speaker embedding of a target speaker.
"""
@@ -92,7 +92,10 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
librosa_mel_filter = librosa.filters.mel(
sr=stft_cfg.sample_rate, n_fft=stft_cfg.n_fft, n_mels=stft_cfg.features, fmin=0, fmax=8000
)
- fb = torch.tensor(librosa_mel_filter, dtype=torch.float,).unsqueeze(0)
+ fb = torch.tensor(
+ librosa_mel_filter,
+ dtype=torch.float,
+ ).unsqueeze(0)
self.register_buffer("fb", fb)
@@ -212,7 +215,10 @@ def configure_optimizers(self):
sched_downstream_config = optim_downstream_config.pop("sched", None)
OmegaConf.set_struct(optim_downstream_config, True)
- optim_backbone = instantiate(optim_backbone_config, params=self.encoder.parameters(),)
+ optim_backbone = instantiate(
+ optim_backbone_config,
+ params=self.encoder.parameters(),
+ )
optim_downstream = instantiate(
optim_downstream_config,
params=itertools.chain(
@@ -254,7 +260,8 @@ def configure_optimizers(self):
def forward(self, input_signal=None, input_signal_length=None, normalize_content=True):
processed_signal, processed_signal_length = self.preprocessor_disentangler(
- input_signal=input_signal, length=input_signal_length,
+ input_signal=input_signal,
+ length=input_signal_length,
)
encoded, encoded_len = self.encoder(audio_signal=processed_signal, length=processed_signal_length) # b,c,t
@@ -292,7 +299,9 @@ def forward_for_export(self, input_signal=None, input_signal_length=None, normal
# Same as forward right now. Earlier version of encoder had a different forward for export.
# This function is still kept for compatibility with older evaluation/inference scripts.
return self.forward(
- input_signal=input_signal, input_signal_length=input_signal_length, normalize_content=normalize_content,
+ input_signal=input_signal,
+ input_signal_length=input_signal_length,
+ normalize_content=normalize_content,
)
def training_step(self, batch, batch_idx):
diff --git a/nemo/collections/tts/models/tacotron2.py b/nemo/collections/tts/models/tacotron2.py
index 2fb005d80ca6..33d476029011 100644
--- a/nemo/collections/tts/models/tacotron2.py
+++ b/nemo/collections/tts/models/tacotron2.py
@@ -18,9 +18,9 @@
import torch
from hydra.utils import instantiate
+from lightning.pytorch.loggers import TensorBoardLogger, WandbLogger
from omegaconf import MISSING, DictConfig, OmegaConf, open_dict
from omegaconf.errors import ConfigAttributeError
-from pytorch_lightning.loggers import TensorBoardLogger, WandbLogger
from torch import nn
from nemo.collections.common.parts.preprocessing import parsers
diff --git a/nemo/collections/tts/models/univnet.py b/nemo/collections/tts/models/univnet.py
index 64ee891b0754..12500be8d180 100644
--- a/nemo/collections/tts/models/univnet.py
+++ b/nemo/collections/tts/models/univnet.py
@@ -18,8 +18,8 @@
import torch
import torch.nn.functional as F
from hydra.utils import instantiate
+from lightning.pytorch.loggers.wandb import WandbLogger
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning.loggers.wandb import WandbLogger
from nemo.collections.tts.losses.hifigan_losses import DiscriminatorLoss, GeneratorLoss
from nemo.collections.tts.losses.stftlosses import MultiResolutionSTFTLoss
@@ -114,8 +114,14 @@ def configure_optimizers(self):
if sched_config is None and 'sched' in self._cfg:
sched_config = self._cfg.sched
- optim_g = instantiate(optim_config, params=self.generator.parameters(),)
- optim_d = instantiate(optim_config, params=itertools.chain(self.mrd.parameters(), self.mpd.parameters()),)
+ optim_g = instantiate(
+ optim_config,
+ params=self.generator.parameters(),
+ )
+ optim_d = instantiate(
+ optim_config,
+ params=itertools.chain(self.mrd.parameters(), self.mpd.parameters()),
+ )
if sched_config is not None:
max_steps = self._cfg.get("max_steps", None)
@@ -290,7 +296,7 @@ def stft(x):
comp = torch.stft(x.squeeze(1), n_fft=1024, hop_length=256, win_length=1024, return_complex=True)
comp = torch.view_as_real(comp)
real, imag = comp[..., 0], comp[..., 1]
- mags = torch.sqrt(real ** 2 + imag ** 2)
+ mags = torch.sqrt(real**2 + imag**2)
phase = torch.atan2(imag, real)
return mags, phase
diff --git a/nemo/collections/tts/models/vits.py b/nemo/collections/tts/models/vits.py
index 4a891fa8823e..3c53442a0863 100644
--- a/nemo/collections/tts/models/vits.py
+++ b/nemo/collections/tts/models/vits.py
@@ -18,9 +18,9 @@
import omegaconf
import torch
from hydra.utils import instantiate
+from lightning.pytorch import Trainer
+from lightning.pytorch.loggers import WandbLogger
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Trainer
-from pytorch_lightning.loggers import WandbLogger
from torch.cuda.amp import autocast
from torch.nn import functional as F
diff --git a/nemo/collections/tts/models/waveglow.py b/nemo/collections/tts/models/waveglow.py
index 728b5b94b084..04eec734b26e 100644
--- a/nemo/collections/tts/models/waveglow.py
+++ b/nemo/collections/tts/models/waveglow.py
@@ -15,8 +15,8 @@
import torch
from hydra.utils import instantiate
+from lightning.pytorch.loggers import TensorBoardLogger
from omegaconf import DictConfig, open_dict
-from pytorch_lightning.loggers import TensorBoardLogger
from nemo.collections.tts.losses.waveglowloss import WaveGlowLoss
from nemo.collections.tts.models.base import GlowVocoder
diff --git a/nemo/collections/tts/parts/utils/callbacks.py b/nemo/collections/tts/parts/utils/callbacks.py
index c4ec09031cf9..1856dee0ce0f 100644
--- a/nemo/collections/tts/parts/utils/callbacks.py
+++ b/nemo/collections/tts/parts/utils/callbacks.py
@@ -23,10 +23,10 @@
import soundfile as sf
import torch
from einops import rearrange
-from pytorch_lightning import Callback, LightningModule, Trainer
-from pytorch_lightning.loggers import TensorBoardLogger
-from pytorch_lightning.loggers.logger import Logger
-from pytorch_lightning.loggers.wandb import WandbLogger
+from lightning.pytorch import Callback, LightningModule, Trainer
+from lightning.pytorch.loggers import TensorBoardLogger
+from lightning.pytorch.loggers.logger import Logger
+from lightning.pytorch.loggers.wandb import WandbLogger
from torch import Tensor
from nemo.collections.tts.parts.utils.helpers import create_plot
@@ -194,7 +194,10 @@ def _log_audio(self, audio: AudioArtifact, log_dir: Path, step: int):
if self.tensorboard_logger:
self.tensorboard_logger.add_audio(
- tag=audio.id, snd_tensor=audio.data, global_step=step, sample_rate=audio.sample_rate,
+ tag=audio.id,
+ snd_tensor=audio.data,
+ global_step=step,
+ sample_rate=audio.sample_rate,
)
if self.wandb_logger:
@@ -212,7 +215,10 @@ def _log_image(self, image: ImageArtifact, log_dir: Path, step: int):
if self.tensorboard_logger:
self.tensorboard_logger.add_image(
- tag=image.id, img_tensor=image_plot, global_step=step, dataformats="HWC",
+ tag=image.id,
+ img_tensor=image_plot,
+ global_step=step,
+ dataformats="HWC",
)
if self.wandb_logger:
@@ -220,8 +226,7 @@ def _log_image(self, image: ImageArtifact, log_dir: Path, step: int):
self.wandb_logger.log({image.id: wandb_image})
def _log_artifacts(self, audio_list: list, image_list: list, log_dir: Optional[Path] = None, global_step: int = 0):
- """Log audio and image artifacts.
- """
+ """Log audio and image artifacts."""
if log_dir is not None:
log_dir.mkdir(parents=True, exist_ok=True)
@@ -232,8 +237,7 @@ def _log_artifacts(self, audio_list: list, image_list: list, log_dir: Optional[P
self._log_image(image=image, log_dir=log_dir, step=global_step)
def on_fit_start(self, trainer: Trainer, model: LightningModule):
- """Log initial data artifacts.
- """
+ """Log initial data artifacts."""
audio_list = []
image_list = []
for batch_dict in self.data_loader:
@@ -255,8 +259,7 @@ def on_fit_start(self, trainer: Trainer, model: LightningModule):
self._log_artifacts(audio_list=audio_list, image_list=image_list, log_dir=log_dir)
def on_train_epoch_end(self, trainer: Trainer, model: LightningModule):
- """Log artifacts at the end of an epoch.
- """
+ """Log artifacts at the end of an epoch."""
epoch = 1 + model.current_epoch
if (epoch not in self.log_epochs) and (epoch % self.epoch_frequency != 0):
return
@@ -306,7 +309,10 @@ def generate_artifacts(
audio_gt_path = Path(f"{dataset_name}/{audio_id}_gt.wav")
audio_gt_i = audio[i, : audio_len[i]].cpu().numpy()
audio_artifact = AudioArtifact(
- id=f"audio_gt_{audio_id}", data=audio_gt_i, filepath=audio_gt_path, sample_rate=model.sample_rate,
+ id=f"audio_gt_{audio_id}",
+ data=audio_gt_i,
+ filepath=audio_gt_path,
+ sample_rate=model.sample_rate,
)
audio_artifacts.append(audio_artifact)
return audio_artifacts, []
@@ -321,7 +327,10 @@ def generate_artifacts(
audio_pred_path = Path(f"{dataset_name}/{audio_id}.wav")
audio_pred_i = audio_pred[i, : audio_len[i]].cpu().numpy()
audio_artifact = AudioArtifact(
- id=f"audio_{audio_id}", data=audio_pred_i, filepath=audio_pred_path, sample_rate=model.sample_rate,
+ id=f"audio_{audio_id}",
+ data=audio_pred_i,
+ filepath=audio_pred_path,
+ sample_rate=model.sample_rate,
)
audio_artifacts.append(audio_artifact)
@@ -378,7 +387,10 @@ def _generate_audio(
audio_pred_path = Path(f"{dataset_name}/{audio_id}_audio_out.wav")
audio_pred_i = audio_pred[i, : audio_pred_len[i]].cpu().numpy()
audio_artifact = AudioArtifact(
- id=f"audio_out_{audio_id}", data=audio_pred_i, filepath=audio_pred_path, sample_rate=model.sample_rate,
+ id=f"audio_out_{audio_id}",
+ data=audio_pred_i,
+ filepath=audio_pred_path,
+ sample_rate=model.sample_rate,
)
audio_artifacts.append(audio_artifact)
@@ -388,7 +400,10 @@ def _generate_audio(
audio_in_path = Path(f"{dataset_name}/{audio_id}_audio_in.wav")
audio_in_i = audio[i, : audio_len[i]].cpu().numpy()
audio_artifact = AudioArtifact(
- id=f"audio_in_{audio_id}", data=audio_in_i, filepath=audio_in_path, sample_rate=model.sample_rate,
+ id=f"audio_in_{audio_id}",
+ data=audio_in_i,
+ filepath=audio_in_path,
+ sample_rate=model.sample_rate,
)
audio_artifacts.append(audio_artifact)
@@ -538,7 +553,11 @@ def _create_ground_truth_artifacts(
spec_gt_path = Path(f"{dataset_name}/{audio_id}_spec_gt.png")
spec_gt_i = spec[i, :, : spec_len[i]].cpu().numpy()
spec_artifact = ImageArtifact(
- id=f"spec_{audio_id}", data=spec_gt_i, filepath=spec_gt_path, x_axis="Audio Frames", y_axis="Channels",
+ id=f"spec_{audio_id}",
+ data=spec_gt_i,
+ filepath=spec_gt_path,
+ x_axis="Audio Frames",
+ y_axis="Channels",
)
image_artifacts.append(spec_artifact)
@@ -565,14 +584,22 @@ def _generate_predictions(
with torch.no_grad():
# [B, C, T_spec]
- mels_pred, mels_pred_len, *_ = model.forward(text=text, input_lens=text_lens, speaker=speaker,)
+ mels_pred, mels_pred_len, *_ = model.forward(
+ text=text,
+ input_lens=text_lens,
+ speaker=speaker,
+ )
if self.log_spectrogram:
for i, (dataset_name, audio_id) in enumerate(zip(dataset_names, audio_ids)):
spec_path = Path(f"{dataset_name}/{audio_id}_spec.png")
spec_i = mels_pred[i, :, : mels_pred_len[i]].cpu().numpy()
spec_artifact = ImageArtifact(
- id=f"spec_{audio_id}", data=spec_i, filepath=spec_path, x_axis="Audio Frames", y_axis="Channels",
+ id=f"spec_{audio_id}",
+ data=spec_i,
+ filepath=spec_path,
+ x_axis="Audio Frames",
+ y_axis="Channels",
)
image_artifacts.append(spec_artifact)
diff --git a/nemo/collections/tts/parts/utils/helpers.py b/nemo/collections/tts/parts/utils/helpers.py
index a4c65f9ed0e5..28be259502c5 100644
--- a/nemo/collections/tts/parts/utils/helpers.py
+++ b/nemo/collections/tts/parts/utils/helpers.py
@@ -48,8 +48,8 @@
import librosa
import matplotlib.pylab as plt
import numpy as np
+import seaborn as sns
import torch
-from einops import rearrange
from numba import jit, prange
from nemo.collections.tts.torch.tts_data_types import DATA_STR2DATA_CLASS, MAIN_DATA_TYPES, WithLens
@@ -63,7 +63,7 @@
HAVE_WANDB = False
try:
- from pytorch_lightning.utilities import rank_zero_only
+ from lightning.pytorch.utilities import rank_zero_only
except ModuleNotFoundError:
from functools import wraps
@@ -468,6 +468,74 @@ def plot_alignment_to_numpy(alignment, title='', info=None, phoneme_seq=None, vm
return data
+def plot_alignment_to_numpy_for_speechllm(
+ alignment,
+ title='',
+ info=None,
+ phoneme_seq=None,
+ vmin=None,
+ vmax=None,
+ phoneme_ver=0,
+ phone_offset=2,
+ h_offset=True,
+):
+ alignment = np.clip(alignment, a_min=0, a_max=None)
+ fig, ax = plt.subplots(figsize=(8, 6))
+ im = ax.imshow(alignment, aspect='auto', origin='lower', interpolation='none', vmin=vmin, vmax=vmax)
+ ax.set_title(title)
+ fig.colorbar(im, ax=ax)
+ xlabel = 'Decoder timestep'
+ if info is not None:
+ xlabel += '\n\n' + info
+ plt.xlabel(xlabel)
+ plt.ylabel('Encoder timestep')
+
+ if phoneme_seq is not None:
+ if phoneme_ver == 0:
+ # for debugging of phonemes and durs in maps. Not used by def in training code
+ ax.set_yticks(np.arange(len(phoneme_seq)))
+ ax.set_yticklabels(phoneme_seq)
+ ax.hlines(np.arange(len(phoneme_seq)), xmin=0.0, xmax=max(ax.get_xticks()))
+ elif phoneme_ver == 1:
+ yticks = ax.get_yticks()
+ new_yticks = []
+ for tick in yticks:
+ if tick < 0 or tick > alignment.shape[0]:
+ continue
+ new_yticks.append(tick)
+ new_yticks += phoneme_seq
+ ax.set_yticks(new_yticks)
+ elif phoneme_ver == 2:
+ phones = phoneme_seq[phone_offset:]
+ ax.set_yticks(np.arange(len(phones)))
+ ax.set_yticklabels(phones)
+ ax.hlines(np.arange(0.5, len(phones) - 0.5, 1.0), xmin=0.0, xmax=alignment.shape[1] - 0.5, colors="black")
+
+ if h_offset:
+ xticks = ax.get_xticks()
+ new_xticks = []
+ for tick in xticks:
+ new_xticks.append(f"{tick+phoneme_seq[1]:.0f}")
+ ax.set_xticklabels(new_xticks)
+
+ plt.tight_layout()
+ fig.canvas.draw()
+ data = save_figure_to_numpy(fig)
+ plt.close()
+ return data
+
+
+def plot_codec_to_numpy(codes, title=''):
+ fig, ax = plt.subplots(figsize=(10, 3))
+ sns.heatmap(codes, ax=ax)
+
+ plt.tight_layout()
+ fig.canvas.draw()
+ data = save_figure_to_numpy(fig)
+ plt.close()
+ return data
+
+
def plot_pitch_to_numpy(pitch, ylim_range=None):
fig, ax = plt.subplots(figsize=(12, 3))
plt.plot(pitch)
diff --git a/nemo/collections/tts/parts/utils/tts_dataset_utils.py b/nemo/collections/tts/parts/utils/tts_dataset_utils.py
index 5f1185c2c399..96806f633a54 100644
--- a/nemo/collections/tts/parts/utils/tts_dataset_utils.py
+++ b/nemo/collections/tts/parts/utils/tts_dataset_utils.py
@@ -67,8 +67,7 @@ def get_audio_filepaths(manifest_entry: Dict[str, Any], audio_dir: Path) -> Tupl
def normalize_volume(audio: np.array, volume_level: float = 0.95) -> np.array:
- """Apply peak normalization to the input audio.
- """
+ """Apply peak normalization to the input audio."""
if not (0.0 <= volume_level <= 1.0):
raise ValueError(f"Volume must be in range [0.0, 1.0], received {volume_level}")
@@ -88,10 +87,11 @@ class BetaBinomialInterpolator:
The implementation is taken from https://github.com/NVIDIA/DeepLearningExamples/blob/master/PyTorch/SpeechSynthesis/FastPitch/fastpitch/data_function.py
"""
- def __init__(self, round_mel_len_to=50, round_text_len_to=10, cache_size=500):
+ def __init__(self, round_mel_len_to=50, round_text_len_to=10, cache_size=500, scaling_factor: float = 1.0):
self.round_mel_len_to = round_mel_len_to
self.round_text_len_to = round_text_len_to
- self.bank = functools.lru_cache(maxsize=cache_size)(beta_binomial_prior_distribution)
+ cached_func = lambda x, y: beta_binomial_prior_distribution(x, y, scaling_factor=scaling_factor)
+ self.bank = functools.lru_cache(maxsize=cache_size)(cached_func)
@staticmethod
def round(val, to):
@@ -315,7 +315,11 @@ def load_audio(
def sample_audio(
- manifest_entry: Dict[str, Any], audio_dir: Path, sample_rate: int, n_samples: int, volume_norm: bool = False,
+ manifest_entry: Dict[str, Any],
+ audio_dir: Path,
+ sample_rate: int,
+ n_samples: int,
+ volume_norm: bool = False,
) -> Tuple[np.ndarray, Path, Path]:
"""
Randomly sample an audio segment from a manifest entry.
diff --git a/nemo/collections/vision/models/megatron_vit_classification_models.py b/nemo/collections/vision/models/megatron_vit_classification_models.py
index 5cffdd6d12a3..c4024a5a47a7 100644
--- a/nemo/collections/vision/models/megatron_vit_classification_models.py
+++ b/nemo/collections/vision/models/megatron_vit_classification_models.py
@@ -17,9 +17,9 @@
from typing import Any, Optional
import torch
+from lightning.pytorch.accelerators import CPUAccelerator
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.dictconfig import DictConfig
-from pytorch_lightning.accelerators import CPUAccelerator
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.data.language_modeling.megatron.data_samplers import MegatronPretrainingSampler
from nemo.collections.nlp.models.language_modeling.megatron_base_model import MegatronBaseModel
diff --git a/nemo/collections/vision/modules/common/megatron/vision_transformer.py b/nemo/collections/vision/modules/common/megatron/vision_transformer.py
index 80793067128c..2abaf6dfe224 100644
--- a/nemo/collections/vision/modules/common/megatron/vision_transformer.py
+++ b/nemo/collections/vision/modules/common/megatron/vision_transformer.py
@@ -169,6 +169,8 @@ def forward(
self_attention_relative_position_bias=None,
cross_attention_relative_position_bias=None,
checkpoint_core_attention=False,
+ decoder_max_sequence_len=None,
+ encoder_max_sequence_len=None,
):
# Self attention.
if rotary_pos_emb is not None:
@@ -373,6 +375,8 @@ def forward(
self_attention_relative_position_bias=None,
cross_attention_relative_position_bias=None,
checkpoint_core_attention=False,
+ decoder_max_sequence_len=None,
+ encoder_max_sequence_len=None,
):
kwargs = locals()
for key in ["self", "__class__"]:
diff --git a/nemo/collections/vlm/__init__.py b/nemo/collections/vlm/__init__.py
index 2aeeae299a7d..266790f3af71 100644
--- a/nemo/collections/vlm/__init__.py
+++ b/nemo/collections/vlm/__init__.py
@@ -1,28 +1,58 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from nemo.collections.vlm.mllama.data import MLlamaLazyDataModule, MLlamaMockDataModule
+from nemo.collections.vlm.mllama.model.base import (
+ CrossAttentionTextConfig,
+ CrossAttentionVisionConfig,
+ MLlamaModel,
+ MLlamaModelConfig,
+)
+from nemo.collections.vlm.mllama.model.mllama import (
+ MLlamaConfig11B,
+ MLlamaConfig11BInstruct,
+ MLlamaConfig90B,
+ MLlamaConfig90BInstruct,
+)
from nemo.collections.vlm.neva.data import (
DataConfig,
ImageDataConfig,
ImageToken,
- MockDataModule,
+ LlavaNextTaskEncoder,
MultiModalToken,
NevaLazyDataModule,
+ NevaMockDataModule,
VideoDataConfig,
VideoToken,
)
-from nemo.collections.vlm.neva.model import (
+from nemo.collections.vlm.neva.model.base import (
CLIPViTConfig,
HFCLIPVisionConfig,
- Llava1_5Config7B,
- Llava1_5Config13B,
- LlavaConfig,
- LlavaModel,
MultimodalProjectorConfig,
NevaConfig,
NevaModel,
)
+from nemo.collections.vlm.neva.model.llava import Llava15Config7B, Llava15Config13B, LlavaConfig, LlavaModel
+from nemo.collections.vlm.neva.model.vit_config import CLIPViTL_14_336_Config, SigLIPViT400M_14_384_Config
+from nemo.collections.vlm.peft import LoRA
+from nemo.collections.vlm.recipes import *
__all__ = [
- "MockDataModule",
+ "NevaMockDataModule",
"NevaLazyDataModule",
+ "MLlamaMockDataModule",
+ "MLlamaLazyDataModule",
"DataConfig",
"ImageDataConfig",
"VideoDataConfig",
@@ -31,11 +61,24 @@
"VideoToken",
"CLIPViTConfig",
"HFCLIPVisionConfig",
+ "CLIPViTL_14_336_Config",
+ "SigLIPViT400M_14_384_Config",
"MultimodalProjectorConfig",
"NevaConfig",
"NevaModel",
"LlavaConfig",
- "Llava1_5Config7B",
- "Llava1_5Config13B",
+ "Llava15Config7B",
+ "Llava15Config13B",
"LlavaModel",
+ "LlavaNextTaskEncoder",
+ "MLlamaModel",
+ "MLlamaModelConfig",
+ "CrossAttentionTextConfig",
+ "CrossAttentionVisionConfig",
+ "MLlamaConfig11B",
+ "MLlamaConfig11BInstruct",
+ "MLlamaConfig90B",
+ "MLlamaConfig90BInstruct",
+ "mllama_11b",
+ "mllama_90b",
]
diff --git a/nemo/collections/vlm/layer_specs.py b/nemo/collections/vlm/layer_specs.py
new file mode 100644
index 000000000000..11c4d697a5aa
--- /dev/null
+++ b/nemo/collections/vlm/layer_specs.py
@@ -0,0 +1,131 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add
+from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear
+from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules
+from megatron.core.transformer.dot_product_attention import DotProductAttention
+from megatron.core.transformer.enums import AttnMaskType
+from megatron.core.transformer.identity_op import IdentityOp
+from megatron.core.transformer.mlp import MLP, MLPSubmodules
+from megatron.core.transformer.spec_utils import ModuleSpec
+from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules
+
+try:
+ from megatron.core.extensions.transformer_engine import (
+ TEColumnParallelLinear,
+ TEDotProductAttention,
+ TELayerNormColumnParallelLinear,
+ TENorm,
+ TERowParallelLinear,
+ )
+
+ HAVE_TE = True
+except ImportError:
+ HAVE_TE = False
+
+try:
+ from megatron.core.fusions.fused_layer_norm import FusedLayerNorm
+
+ HAVE_APEX = True
+ LNImpl = FusedLayerNorm
+except ImportError:
+ import warnings
+
+ from megatron.core.transformer.torch_layer_norm import WrappedTorchLayerNorm
+
+ warnings.warn(f'Apex is not installed. Falling back to Torch LayerNorm')
+ LNImpl = WrappedTorchLayerNorm
+
+
+def get_layer_spec(is_vit, normalization) -> ModuleSpec:
+ """Transformer Layer Spec"""
+ attn_mask_type = AttnMaskType.no_mask if is_vit else AttnMaskType.causal
+ if normalization == "LayerNorm":
+ norm = LNImpl
+ elif normalization == "RMSNorm":
+ norm = TENorm
+ else:
+ raise RuntimeError("unknown normalization", normalization)
+
+ mlp = get_mlp_module_spec(use_te=False) # doesn't include norm.
+
+ return ModuleSpec(
+ module=TransformerLayer,
+ submodules=TransformerLayerSubmodules(
+ input_layernorm=norm,
+ self_attention=ModuleSpec(
+ module=SelfAttention,
+ params={"attn_mask_type": attn_mask_type},
+ submodules=SelfAttentionSubmodules(
+ linear_qkv=ColumnParallelLinear,
+ core_attention=DotProductAttention,
+ linear_proj=RowParallelLinear,
+ q_layernorm=IdentityOp,
+ k_layernorm=IdentityOp,
+ ),
+ ),
+ self_attn_bda=get_bias_dropout_add,
+ pre_mlp_layernorm=norm,
+ mlp=mlp,
+ mlp_bda=get_bias_dropout_add,
+ ),
+ )
+
+
+def get_layer_spec_te(is_vit=False) -> ModuleSpec:
+ """Transformer Layer Spec w/ TE Modules"""
+ attn_mask_type = AttnMaskType.no_mask if is_vit else AttnMaskType.causal
+
+ mlp = get_norm_mlp_module_spec_te()
+ return ModuleSpec(
+ module=TransformerLayer,
+ submodules=TransformerLayerSubmodules(
+ self_attention=ModuleSpec(
+ module=SelfAttention,
+ params={"attn_mask_type": attn_mask_type},
+ submodules=SelfAttentionSubmodules(
+ linear_qkv=TELayerNormColumnParallelLinear,
+ core_attention=TEDotProductAttention,
+ linear_proj=TERowParallelLinear,
+ q_layernorm=IdentityOp,
+ k_layernorm=IdentityOp,
+ ),
+ ),
+ self_attn_bda=get_bias_dropout_add,
+ pre_mlp_layernorm=IdentityOp,
+ mlp=mlp,
+ mlp_bda=get_bias_dropout_add,
+ ),
+ )
+
+
+def get_mlp_module_spec(use_te: bool = True) -> ModuleSpec:
+ """MLP Submodule Spec"""
+ # Dense MLP w/ or w/o TE modules.
+ return ModuleSpec(
+ module=MLP,
+ submodules=MLPSubmodules(
+ linear_fc1=TEColumnParallelLinear if use_te else ColumnParallelLinear,
+ linear_fc2=TERowParallelLinear if use_te else RowParallelLinear,
+ ),
+ )
+
+
+def get_norm_mlp_module_spec_te() -> ModuleSpec:
+ """Norm + MLP Submodule Spec"""
+ return ModuleSpec(
+ module=MLP,
+ submodules=MLPSubmodules(linear_fc1=TELayerNormColumnParallelLinear, linear_fc2=TERowParallelLinear),
+ )
diff --git a/nemo/collections/vlm/mllama/__init__.py b/nemo/collections/vlm/mllama/__init__.py
new file mode 100644
index 000000000000..94a1021ca0f8
--- /dev/null
+++ b/nemo/collections/vlm/mllama/__init__.py
@@ -0,0 +1,17 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from transformers import PreTrainedTokenizerFast
+from nemo.lightning.io import track_io
+
+track_io(PreTrainedTokenizerFast)
diff --git a/nemo/collections/vlm/mllama/data/__init__.py b/nemo/collections/vlm/mllama/data/__init__.py
new file mode 100644
index 000000000000..0e89762a4c9a
--- /dev/null
+++ b/nemo/collections/vlm/mllama/data/__init__.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from nemo.collections.vlm.mllama.data.lazy import MLlamaLazyDataModule
+from nemo.collections.vlm.mllama.data.mock import MockDataModule as MLlamaMockDataModule
+
+__all__ = [
+ "MLlamaMockDataModule",
+ "MLlamaLazyDataModule",
+]
diff --git a/nemo/collections/vlm/mllama/data/lazy.py b/nemo/collections/vlm/mllama/data/lazy.py
new file mode 100644
index 000000000000..5069f8593377
--- /dev/null
+++ b/nemo/collections/vlm/mllama/data/lazy.py
@@ -0,0 +1,308 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import re
+from typing import Any, Dict, List, Optional, Sequence
+
+import lightning.pytorch as pl
+import torch
+import torch.nn.functional as F
+from lightning.pytorch.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
+from torch.utils import data
+from torch.utils.data import DataLoader, default_collate
+
+from nemo.collections.nlp.modules.common.megatron.utils import get_ltor_masks_and_position_ids
+from nemo.collections.vlm.mllama.model.utils import create_vision_mask_tensor
+from nemo.collections.vlm.neva.data.config import DataConfig, ImageDataConfig
+from nemo.collections.vlm.neva.data.lazy import IGNORE_INDEX, LazySupervisedDataset
+from nemo.lightning.pytorch.plugins import MegatronDataSampler
+
+
+class MLlamaDataset(LazySupervisedDataset):
+ """Dataset for supervised fine-tuning."""
+
+ def __init__(
+ self,
+ data_path,
+ data_config,
+ tokenizer,
+ image_processor,
+ sequence_length,
+ ):
+
+ if data_path.endswith(".json"):
+ super().__init__(data_path, data_config, tokenizer, image_processor, sequence_length)
+
+ elif data_path.endswith(".jsonl"):
+ super().__init__(None, data_config, tokenizer, image_processor, sequence_length)
+ logging.warning("Loading image inputs from SteerLM Dataset...")
+ if data_config.media_type == 'image':
+ image_folder = data_config.image_folder
+ for line in open(data_path, "r"):
+ record = json.loads(line)
+
+ # This currently supports only a single image
+ # search for tag
+
+ record['image'] = []
+ for turn in record['conversations']:
+ matches = re.finditer(r'", turn['value'])
+
+ self.list_data_dict.append(record)
+
+ else:
+ raise ValueError(f"Formatting of {data_path} is not supported in MLlama.")
+
+ def __getitem__(self, i) -> Dict[str, torch.Tensor]:
+ source = self.list_data_dict[i]
+ conversations = self._apply_prompt_templates(source, use_plain=self.conv_template == "plain")
+ conversations = conversations.replace("", "<|image|>")
+ tokens, labels = self._tokenize_and_label(conversations)
+
+ image_dict = self._process_images(source)
+ data_dict = dict(
+ **image_dict,
+ tokens=tokens,
+ labels=labels,
+ )
+ return data_dict
+
+ def _process_images(self, source):
+ images = []
+ if 'image' in source:
+ if not isinstance(source['image'], list):
+ source['image'] = [source['image']]
+ for image_file in source['image']:
+ image = self.image_loader.open_image(image_file)
+ if image is None:
+ logging.warning(f"Image {image_file} could not be found!")
+ images.append(image)
+
+ if len(images) > 0:
+ image_dict = self.image_processor.preprocess(images, return_tensors='pt')
+ image_dict = {
+ k: v[0] for k, v in image_dict.items() if k in ["pixel_values", "aspect_ratio_ids", "num_tiles"]
+ } # remove batch dim
+ else:
+ image_dict = dict(
+ pixel_values=torch.zeros(
+ 1, 4, 3, self.image_processor.size['height'], self.image_processor.size['width']
+ ),
+ aspect_ratio_ids=torch.tensor([0], dtype=torch.long),
+ num_tiles=[0],
+ )
+
+ return image_dict
+
+ def collate_fn(self, instances: Sequence[Dict]) -> Dict[str, torch.Tensor]:
+ data_config = self.data_config
+ max_len = (max(instance['tokens'].shape[0] for instance in instances) - 1) // 64 * 64 + 64
+ if max_len > self.sequence_length:
+ logging.warning(f"Truncating sequence length {max_len} to {self.seq_length}.")
+ max_len = self.sequence_length
+ max_num_concurrent_media = max(instance['pixel_values'].shape[0] for instance in instances)
+ for instance in instances:
+ pad_len = max_len - instance['tokens'].shape[0]
+ instance['tokens'] = F.pad(instance['tokens'], (0, pad_len), 'constant', 0)
+ instance['labels'] = F.pad(instance['labels'], (0, pad_len), 'constant', IGNORE_INDEX)
+ pad_num_images = max_num_concurrent_media - instance['pixel_values'].shape[0]
+ instance['pixel_values'] = F.pad(
+ instance['pixel_values'], (0, 0, 0, 0, 0, 0, 0, 0, 0, pad_num_images), 'constant', 0
+ )
+ instance['aspect_ratio_ids'] = F.pad(
+ instance['aspect_ratio_ids'], (0, max(pad_num_images - 1, 0)), 'constant', 0
+ )
+ instance['num_tiles'] = F.pad(
+ torch.tensor(instance['num_tiles']), (0, max(pad_num_images - 1, 0)), 'constant', 0
+ )
+
+ batch_masks = [create_vision_mask_tensor(instance['tokens'], 128256) for instance in instances]
+ batch = default_collate(instances)
+
+ tokenizer = self.tokenizer
+
+ tokens = batch['tokens']
+ labels = batch['labels']
+
+ attention_mask, loss_mask, position_ids = get_ltor_masks_and_position_ids(
+ data=tokens,
+ eod_token=tokenizer.eos_token_id,
+ eod_mask_loss=data_config.eod_mask_loss,
+ reset_attention_mask=data_config.reset_attention_mask,
+ reset_position_ids=data_config.reset_position_ids,
+ )
+
+ loss_mask[labels < 0] = 0.0
+ batch = {
+ 'tokens': tokens,
+ 'labels': labels,
+ 'batch_images': batch['pixel_values'],
+ 'batch_masks': batch_masks,
+ 'num_chunks': batch['num_tiles'],
+ 'attention_mask': attention_mask,
+ "aspect_ratio_ids": batch['aspect_ratio_ids'],
+ 'loss_mask': loss_mask,
+ 'position_ids': position_ids,
+ }
+ return batch
+
+
+class MLlamaLazyDataModule(pl.LightningDataModule):
+ def __init__(
+ self,
+ paths: str | List[str],
+ weights: Optional[List[float]] = None,
+ data_config: Optional[DataConfig] = ImageDataConfig,
+ seq_length: int = 2048,
+ decoder_seq_length: Optional[int] = None,
+ tokenizer: Optional = None,
+ image_processor: Optional = None,
+ micro_batch_size: int = 4,
+ global_batch_size: int = 8,
+ num_train_samples: int = 10_000,
+ num_val_samples: int = 10_000,
+ num_test_samples: int = 10_000,
+ num_workers: int = 8,
+ pin_memory: bool = True,
+ persistent_workers: bool = False,
+ use_packed_sequence: bool = False,
+ seed: int = 1234,
+ ) -> None:
+ super().__init__()
+ if not isinstance(paths, (list, tuple)):
+ paths = [paths]
+ if weights is not None:
+ assert len(weights) == len(paths)
+ if len(weights) == 1:
+ # weights must be None if there is only one dataset
+ weights = None
+
+ self.paths = paths
+ self.weights = weights
+ self.data_config = data_config
+ self.seq_length = seq_length
+ self.decoder_seq_length = decoder_seq_length
+ self.tokenizer = tokenizer
+ self.image_processor = image_processor
+ self.num_train_samples = num_train_samples
+ self.num_val_samples = num_val_samples
+ self.num_test_samples = num_test_samples
+ self.num_workers = num_workers
+ self.pin_memory = pin_memory
+ self.persistent_workers = persistent_workers
+ self.seed = seed
+ self.use_packed_sequence = use_packed_sequence
+ self.init_global_step = 0
+ self.tokenizer = tokenizer
+ self.image_processor = image_processor
+
+ if tokenizer is None or image_processor is None:
+ logging.warning(
+ f"Processor and tokenizer are not provided! Fall back to `meta-llama/Llama-3.2-11B-Vision-Instruct`."
+ )
+ from transformers import AutoProcessor
+
+ processor = AutoProcessor.from_pretrained("meta-llama/Llama-3.2-11B-Vision-Instruct")
+ self.tokenizer = tokenizer or processor.tokenizer
+ self.image_processor = image_processor or processor.image_processor
+
+ self.data_sampler = MegatronDataSampler(
+ seq_len=self.seq_length,
+ decoder_seq_len=self.decoder_seq_length,
+ micro_batch_size=micro_batch_size,
+ global_batch_size=global_batch_size,
+ dataloader_type="cyclic",
+ )
+
+ def setup(self, stage: str = "") -> None:
+ assert len(self.paths) == 1, "not yet support blend dataset in MLlama 2.0!"
+ if self.use_packed_sequence:
+ pass # TODO
+ else:
+ # TODO:
+ # rng = torch.Generator().manual_seed(self.seed)
+ # train_dataset, val_dataset, test_dataset = random_split(dataset, [train_size, val_size, test_size], generator=rng)
+ self._train_ds = MLlamaDataset(
+ self.paths[0], self.data_config, self.tokenizer, self.image_processor, self.seq_length
+ )
+ self._validation_ds = MLlamaDataset(
+ self.paths[0], self.data_config, self.tokenizer, self.image_processor, self.seq_length
+ )
+
+ def train_dataloader(self) -> TRAIN_DATALOADERS:
+ return self._create_dataloader(self._train_ds)
+
+ def val_dataloader(self) -> EVAL_DATALOADERS:
+ return self._create_dataloader(self._validation_ds)
+
+ def test_dataloader(self) -> EVAL_DATALOADERS:
+ return self._create_dataloader(self._test_ds)
+
+ def _create_dataloader(self, dataset, **kwargs) -> DataLoader:
+ self.init_global_step = self.trainer.global_step
+ self.data_sampler.init_global_step = self.init_global_step
+ return DataLoader(
+ dataset,
+ num_workers=self.num_workers,
+ pin_memory=self.pin_memory,
+ persistent_workers=self.persistent_workers,
+ collate_fn=getattr(dataset, 'collate_fn', data.dataloader.default_collate),
+ **kwargs,
+ )
+
+ def state_dict(self) -> Dict[str, Any]:
+ """Called when saving a checkpoint, implement to generate and save datamodule state.
+
+ Returns:
+ A dictionary containing datamodule state.
+
+ """
+ consumed_samples = self.data_sampler.compute_consumed_samples(self.trainer.global_step - self.init_global_step)
+ return {'consumed_samples': consumed_samples}
+
+ def load_state_dict(self, state_dict: Dict[str, Any]) -> None:
+ """Called when loading a checkpoint, implement to reload datamodule state given datamodule stat
+
+ Args:
+ state_dict: the datamodule state returned by ``state_dict``.
+
+ """
+ try:
+ from apex.transformer.pipeline_parallel.utils import _GLOBAL_NUM_MICROBATCHES_CALCULATOR
+ except ModuleNotFoundError:
+ from nemo.lightning.apex_utils import _GLOBAL_NUM_MICROBATCHES_CALCULATOR
+ consumed_samples = state_dict['consumed_samples']
+ self.data_sampler.init_consumed_samples = consumed_samples
+ self.data_sampler.prev_consumed_samples = consumed_samples
+ self.if_first_step = 1
+
+ if _GLOBAL_NUM_MICROBATCHES_CALCULATOR is not None:
+ num_microbatch_calculator = _GLOBAL_NUM_MICROBATCHES_CALCULATOR # noqa: SLF001
+
+ num_microbatch_calculator.update(
+ consumed_samples=consumed_samples,
+ consistency_check=False,
+ )
diff --git a/nemo/collections/vlm/mllama/data/mock.py b/nemo/collections/vlm/mllama/data/mock.py
new file mode 100644
index 000000000000..fae92b097200
--- /dev/null
+++ b/nemo/collections/vlm/mllama/data/mock.py
@@ -0,0 +1,188 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Dict, List, Optional, Tuple
+
+import lightning.pytorch as pl
+import numpy as np
+import torch
+from lightning.pytorch.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
+from torch.utils import data
+from torch.utils.data import DataLoader, Dataset
+
+from nemo.lightning.pytorch.plugins import MegatronDataSampler
+
+
+class MockDataModule(pl.LightningDataModule):
+ def __init__(
+ self,
+ seq_length: int = 2048,
+ decoder_seq_length: Optional = None,
+ vocab_size: int = 128256,
+ crop_size: Tuple[int, int] = (560, 560),
+ micro_batch_size: int = 4,
+ global_batch_size: int = 8,
+ rampup_batch_size: Optional[List[int]] = None,
+ tokenizer: Optional = None,
+ image_processor: Optional = None,
+ num_train_samples: int = 10_000,
+ num_val_samples: int = 10_000,
+ num_test_samples: int = 10_000,
+ num_workers: int = 8,
+ pin_memory: bool = True,
+ persistent_workers: bool = False,
+ ):
+ super().__init__()
+ self.seq_length = seq_length
+ self.decoder_seq_length = decoder_seq_length
+ self.num_train_samples = num_train_samples
+ self.num_val_samples = num_val_samples
+ self.num_test_samples = num_test_samples
+ self.num_workers = num_workers
+ self.pin_memory = pin_memory
+ self.persistent_workers = persistent_workers
+ self.vocab_size = vocab_size
+ self.crop_size = crop_size
+ self.tokenizer = tokenizer
+ self.image_processor = image_processor
+
+ self.data_sampler = MegatronDataSampler(
+ seq_len=self.seq_length,
+ decoder_seq_len=self.decoder_seq_length,
+ micro_batch_size=micro_batch_size,
+ global_batch_size=global_batch_size,
+ rampup_batch_size=rampup_batch_size,
+ )
+
+ def setup(self, stage: str = "") -> None:
+ self._train_ds = _MockMLlamaDataset(
+ self.vocab_size, self.crop_size, "train", self.num_train_samples, self.decoder_seq_length
+ )
+ self._validation_ds = _MockMLlamaDataset(
+ self.vocab_size, self.crop_size, "valid", self.num_val_samples, self.decoder_seq_length
+ )
+ self._test_ds = _MockMLlamaDataset(
+ self.vocab_size, self.crop_size, "test", self.num_test_samples, self.decoder_seq_length
+ )
+
+ def train_dataloader(self) -> TRAIN_DATALOADERS:
+ if not hasattr(self, "_train_ds"):
+ self.setup()
+ return self._create_dataloader(self._train_ds)
+
+ def val_dataloader(self) -> EVAL_DATALOADERS:
+ if not hasattr(self, "_validation_ds"):
+ self.setup()
+ return self._create_dataloader(self._validation_ds)
+
+ def test_dataloader(self) -> EVAL_DATALOADERS:
+ if not hasattr(self, "_test_ds"):
+ self.setup()
+ return self._create_dataloader(self._test_ds)
+
+ def _create_dataloader(self, dataset, **kwargs) -> DataLoader:
+ return DataLoader(
+ dataset,
+ num_workers=self.num_workers,
+ pin_memory=self.pin_memory,
+ persistent_workers=self.persistent_workers,
+ collate_fn=dataset.collate_fn,
+ **kwargs,
+ )
+
+
+class _MockMLlamaDataset(Dataset):
+ def __init__(
+ self,
+ vocab_size,
+ crop_size,
+ name: str,
+ num_samples: int,
+ seq_length: int,
+ seed: int = 42,
+ ) -> None:
+ super().__init__()
+ self.name = name
+ self.seq_length = seq_length
+
+ self.vocab_size = vocab_size
+
+ self.image_height, self.image_width = crop_size
+
+ self.length = num_samples
+ self.seed = seed
+
+ self.loss_mask = torch.ones(self.seq_length, dtype=torch.float)
+ self.position_ids = torch.arange(self.seq_length, dtype=torch.int64)
+
+ def __len__(self) -> int:
+ return self.length
+
+ def _get_text(self, idx: int) -> np.ndarray:
+ np_gen = np.random.default_rng(seed=(self.seed + idx))
+ return np_gen.integers(self.vocab_size, size=[self.seq_length], dtype=np.int64)
+
+ def __getitem__(self, idx) -> Dict[str, torch.Tensor]:
+ # Generate data of the expected size and datatype (based on GPTDataset).
+ np_gen = np.random.default_rng(seed=(self.seed + idx))
+ tokens = torch.from_numpy(np_gen.integers(self.vocab_size, size=[self.seq_length + 1], dtype=np.int64))
+ images = torch.from_numpy(np_gen.standard_normal((1, 4, 3, self.image_height, self.image_width)))
+ aspect_ratio_ids = torch.from_numpy(np_gen.integers(8, size=[1], dtype=np.int64)) + 1
+
+ labels = tokens.clone()
+ tokens = tokens[:-1]
+ labels = labels[1:]
+
+ return {
+ "images": images,
+ "masks": torch.tensor([[5, 512]]),
+ "num_chunks": torch.tensor([4]),
+ "tokens": tokens,
+ "aspect_ratio_ids": aspect_ratio_ids,
+ "loss_mask": self.loss_mask,
+ "position_ids": self.position_ids,
+ "labels": labels,
+ }
+
+ def _collate_fn(self, batch):
+ """
+ A default implementation of a collation function.
+ Users should override this method to define custom data loaders.
+ """
+ collated_batch = {}
+ collated_batch["batch_masks"] = [sample.pop("masks") for sample in batch]
+ collated_batch["attention_mask"] = None
+ collated_batch.update(data.dataloader.default_collate(batch))
+ collated_batch["batch_images"] = collated_batch.pop("images")
+ return collated_batch
+
+ def collate_fn(self, batch):
+ """Method that user pass as functor to DataLoader.
+
+ The method optionally performs neural type checking and add types to the outputs.
+
+ Please note, subclasses of Dataset should not implement `input_types`.
+
+ # Usage:
+ dataloader = torch.utils.data.DataLoader(
+ ....,
+ collate_fn=dataset.collate_fn,
+ ....
+ )
+
+ Returns
+ -------
+ Collated batch, with or without types.
+ """
+ return self._collate_fn(batch)
diff --git a/nemo/collections/vlm/mllama/data/sample_encoder.py b/nemo/collections/vlm/mllama/data/sample_encoder.py
new file mode 100644
index 000000000000..d7bfa08978c8
--- /dev/null
+++ b/nemo/collections/vlm/mllama/data/sample_encoder.py
@@ -0,0 +1,144 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+from dataclasses import field
+from typing import Dict
+
+import torch
+from megatron.energon import VQASample
+
+from nemo.collections.multimodal.data.energon.config import ImageTextSample, MultiModalSampleConfig
+from nemo.collections.multimodal.data.energon.sample_encoder import VQASampleEncoder
+from nemo.collections.vlm.mllama.model.utils import create_vision_mask_tensor
+from nemo.utils import logging
+
+
+class LlamaImageTextSample(ImageTextSample):
+ vision_mask: torch.Tensor = field(default_factory=lambda: torch.empty(0, dtype=torch.float))
+ aspect_ratio_ids: torch.Tensor = field(default_factory=lambda: torch.empty(0, dtype=torch.float))
+ aspect_ratio_mask: torch.Tensor = field(default_factory=lambda: torch.empty(0, dtype=torch.float))
+ num_tiles: torch.Tensor = field(default_factory=lambda: torch.empty(0, dtype=torch.float))
+
+
+class Llama3SampleEncoder(VQASampleEncoder):
+ def __init__(self, tokenizer, image_processor, multimodal_sample_config=MultiModalSampleConfig()):
+ """
+ Initialize the VQASampleEncoder.
+
+ Parameters:
+ tokenizer (Tokenizer): The HF tokenizer used for processing text.
+ image_processor (ImageProcessor): The HF image processor used for preprocessing images.
+ multimodal_sample_config (MultiModalSampleConfig, optional): Configuration object for multimodal samples.
+ Defaults to MultiModalSampleConfig().
+ """
+ super().__init__(tokenizer, image_processor, multimodal_sample_config)
+ self.conversation_template_config = multimodal_sample_config.conversation_template_config
+
+ def process_image(self, image) -> Dict[str, torch.Tensor]:
+ image_dict = self.image_processor.preprocess(image, return_tensors='pt', do_rescale=False)
+ return image_dict
+
+ def apply_prompt_template(self, input_text: VQASample, use_plain=False):
+ if self.conversation_template_config.chat_template:
+ self.tokenizer.chat_template = self.conversation_template_config.chat_template
+ elif self.tokenizer.chat_template is None:
+ raise ValueError(
+ "Both tokenizer and conversation template does not have chat template defined. Refer to "
+ "https://huggingface.co/docs/transformers/main/en/chat_templating "
+ )
+ logging.debug(f"apply_conversation_template context {input_text.context} answer {input_text.answers}")
+
+ messages = []
+ if self.conversation_template_config.system:
+ messages.append(
+ {'role': 'system', 'content': [{'type': 'text', 'text': self.conversation_template_config.system}]}
+ )
+
+ if isinstance(input_text.context, list) and isinstance(input_text.answers, list):
+ # Ensure both lists are the same length or adjust based on your specific needs
+ min_length = min(len(input_text.context), len(input_text.answers))
+ for i in range(min_length):
+ messages.append(
+ {
+ 'role': self.conversation_template_config.roles[0],
+ 'content': [{'type': 'text', 'text': input_text.context[i]}],
+ }
+ )
+ messages.append(
+ {
+ 'role': self.conversation_template_config.roles[1],
+ 'content': [{'type': 'text', 'text': input_text.answers[i]}],
+ }
+ )
+ elif isinstance(input_text.context, str) and isinstance(input_text.answers, str):
+ # Handle single context and answer as strings
+ messages.append(
+ {
+ 'role': self.conversation_template_config.roles[0],
+ 'content': [{'type': 'text', 'text': input_text.context}],
+ }
+ )
+ messages.append(
+ {
+ 'role': self.conversation_template_config.roles[1],
+ 'content': [{'type': 'text', 'text': input_text.answers}],
+ }
+ )
+ else:
+ raise ValueError(
+ f"VQA Sample context/answers should either be a List[str] or str. Other types not supported"
+ )
+
+ templated_prompt = self.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=False)
+ logging.debug(f"apply prompt template templated_prompt {templated_prompt}")
+ return templated_prompt
+
+ def tokenize(self, prompt: str) -> torch.Tensor:
+ regex_pattern = '(' + '|'.join(re.escape(token) for token in [self.image_token.token_str]) + ')'
+ chunks = re.split(regex_pattern, prompt)
+ # Tokenize each chunk and replace special tokens with their indices
+ tokenized_chunks = []
+ for chunk in chunks:
+ if chunk == self.image_token.token_str:
+ tokenized_chunks.append(self.image_token.token_id)
+ elif len(chunk) > 0:
+ tokenized_chunks.extend(self.tokenizer(chunk, add_special_tokens=False).input_ids)
+
+ return torch.tensor(tokenized_chunks, dtype=torch.long)
+
+ def encode(self, input_sample: VQASample, output_sample: LlamaImageTextSample):
+ conversation_prompt = self.apply_prompt_template(input_sample)
+ logging.debug(f"[Energon] task encoder encode_sample conversation_prompt {conversation_prompt}")
+ # tokenize prompt
+ tokens = self.tokenize(conversation_prompt)
+ labels = self.compute_labels(tokens, input_sample)
+
+ tokens = tokens[:-1].contiguous()
+ labels = labels[1:].contiguous()
+ logging.debug(f"[Energon] task encoder encode_sample after tokenize prompt tokens {tokens}")
+ logging.debug(f"[Energon] task encoder encode_sample labels {labels}")
+ loss_mask = self.compute_loss_mask(labels)
+ vision_mask = create_vision_mask_tensor(tokens=tokens, vision_token_id=self.image_token.token_id)
+ processed_image_dict = self.process_image(input_sample.image)
+ output_sample.__key__ = input_sample.__key__
+ output_sample.images = processed_image_dict['pixel_values'][0]
+ output_sample.aspect_ratio_ids = processed_image_dict['aspect_ratio_ids'][0]
+ output_sample.aspect_ratio_mask = processed_image_dict['aspect_ratio_mask'][0]
+ output_sample.num_tiles = processed_image_dict['num_tiles'][0]
+ output_sample.tokens = tokens
+ output_sample.labels = labels
+ output_sample.loss_mask = loss_mask
+ output_sample.vision_mask = vision_mask
+ return output_sample
diff --git a/nemo/collections/vlm/mllama/data/task_encoder.py b/nemo/collections/vlm/mllama/data/task_encoder.py
new file mode 100644
index 000000000000..a7dcd3c8fb2c
--- /dev/null
+++ b/nemo/collections/vlm/mllama/data/task_encoder.py
@@ -0,0 +1,108 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from dataclasses import dataclass, field
+from typing import Dict, List
+
+import torch
+import torch.nn.functional as F
+from megatron.energon import VQASample, batch_list, batch_pad_stack
+from torch.nn.utils.rnn import pad_sequence
+
+from nemo.collections.multimodal.data.energon.sample_encoder import SampleEncoder
+from nemo.collections.multimodal.data.energon.task_encoder import MultiModalTaskEncoder
+from nemo.collections.vlm.mllama.data.sample_encoder import Llama3SampleEncoder, LlamaImageTextSample
+
+
+def pad_or_truncate(sequence_batch, seq_length: int, padding_value: int):
+ # Pad the sequence if it's shorter than seq_length
+ if sequence_batch.size(1) < seq_length:
+ pad_size = seq_length - sequence_batch.size(1)
+ sequence_batch = F.pad(sequence_batch, (0, pad_size), value=padding_value)
+ else:
+ # Truncate the sequence if it's longer than seq_length
+ sequence_batch = sequence_batch[:, :seq_length]
+
+ return sequence_batch
+
+
+@dataclass
+class LlamaImageTextRawBatch:
+ __keys__: List[str] = field(default_factory=list)
+
+ tokens: torch.Tensor = field(default_factory=lambda: torch.empty(0, dtype=torch.long))
+ labels: torch.Tensor = field(default_factory=lambda: torch.empty(0, dtype=torch.long))
+ loss_mask: torch.Tensor = field(default_factory=lambda: torch.empty(0, dtype=torch.float))
+
+ batch_images: torch.Tensor = field(default_factory=lambda: torch.empty(0))
+ batch_masks: torch.Tensor = field(default_factory=lambda: torch.empty(0))
+
+ aspect_ratio_ids: torch.Tensor = field(default_factory=lambda: torch.empty(0, dtype=torch.float))
+ aspect_ratio_mask: torch.Tensor = field(default_factory=lambda: torch.empty(0, dtype=torch.float))
+ num_chunks: torch.Tensor = field(default_factory=lambda: torch.empty(0, dtype=torch.float))
+
+
+class LlamaTaskEncoder(MultiModalTaskEncoder):
+ def __init__(self, tokenizer, image_processor, multimodal_sample_config, seq_length=None):
+ super().__init__(tokenizer, image_processor, multimodal_sample_config)
+ self.encoders: Dict[str, SampleEncoder] = {
+ VQASample.__name__: Llama3SampleEncoder(tokenizer, image_processor, multimodal_sample_config)
+ }
+ self.seq_length = seq_length
+ self.ignore_index = multimodal_sample_config.ignore_place_holder
+
+ def batch(self, samples: List[LlamaImageTextSample]) -> LlamaImageTextRawBatch:
+
+ keys, images, tokens, labels, loss_mask, vision_mask = [], [], [], [], [], []
+ aspect_ratio_ids, aspect_ratio_mask, num_tiles = [], [], []
+ for sample in samples:
+ keys.append(sample.__key__)
+ images.append(sample.images)
+ tokens.append(sample.tokens)
+ labels.append(sample.labels)
+ loss_mask.append(sample.loss_mask)
+ vision_mask.append(sample.vision_mask)
+ aspect_ratio_ids.append(sample.aspect_ratio_ids)
+ aspect_ratio_mask.append(sample.aspect_ratio_mask)
+ num_tiles.append(sample.num_tiles)
+
+ batch_keys = batch_list(keys)
+ batch_images = batch_pad_stack(images)
+
+ batch_tokens = pad_sequence(tokens, batch_first=True, padding_value=self.tokenizer.pad_token_id)
+ batch_labels = pad_sequence(labels, batch_first=True, padding_value=self.ignore_index)
+ batch_loss_mask = batch_pad_stack(loss_mask)
+ if self.seq_length is not None:
+ seq_length = self.seq_length
+ else:
+ seq_length = (batch_tokens.size(1) - 1) // 64 * 64 + 64
+ batch_tokens = pad_or_truncate(batch_tokens, seq_length, self.tokenizer.pad_token_id)
+ batch_labels = pad_or_truncate(batch_labels, seq_length, self.ignore_index)
+ batch_loss_mask = pad_or_truncate(batch_loss_mask, seq_length, 0)
+ assert batch_loss_mask.sum() > 0, "This batch has nothing to predict! Will trigger a nan loss."
+ batch_vision_mask = batch_pad_stack(vision_mask)
+ batch_aspect_ratio_ids = batch_pad_stack(aspect_ratio_ids)
+ batch_aspect_ratio_mask = batch_pad_stack(aspect_ratio_mask)
+ batch_num_tiles = torch.tensor(num_tiles)
+ return LlamaImageTextRawBatch(
+ __keys__=batch_keys,
+ batch_images=batch_images,
+ batch_masks=batch_vision_mask,
+ tokens=batch_tokens,
+ labels=batch_labels,
+ loss_mask=batch_loss_mask,
+ aspect_ratio_ids=batch_aspect_ratio_ids,
+ aspect_ratio_mask=batch_aspect_ratio_mask,
+ num_chunks=batch_num_tiles,
+ )
diff --git a/nemo/collections/vlm/mllama/model/__init__.py b/nemo/collections/vlm/mllama/model/__init__.py
new file mode 100644
index 000000000000..9eb076609f84
--- /dev/null
+++ b/nemo/collections/vlm/mllama/model/__init__.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from nemo.collections.vlm.mllama.model.base import (
+ CrossAttentionTextConfig,
+ CrossAttentionVisionConfig,
+ MLlamaModel,
+ MLlamaModelConfig,
+)
diff --git a/nemo/collections/vlm/mllama/model/base.py b/nemo/collections/vlm/mllama/model/base.py
new file mode 100644
index 000000000000..d417af27aedd
--- /dev/null
+++ b/nemo/collections/vlm/mllama/model/base.py
@@ -0,0 +1,580 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+import math
+from dataclasses import dataclass
+from typing import Callable, Dict, List, Optional, Tuple
+
+import lightning.pytorch as L
+import torch
+import torch.distributed
+from einops import rearrange
+from megatron.core.enums import ModelType
+from megatron.core.models.vision.multimodal_projector import MultimodalProjector
+from megatron.core.optimizer import OptimizerConfig
+from megatron.core.tensor_parallel.layers import ColumnParallelLinear
+from megatron.core.transformer import MegatronModule
+from megatron.core.transformer.mlp import MLPSubmodules
+from megatron.core.transformer.spec_utils import ModuleSpec
+from megatron.core.transformer.transformer_config import TransformerConfig
+from PIL import Image as PIL_Image
+from torch import nn
+
+from nemo.collections.common.tokenizers.tokenizer_spec import TokenizerSpec
+from nemo.collections.llm import fn
+from nemo.collections.llm.gpt.model import local_layer_spec, transformer_engine_layer_spec
+from nemo.collections.llm.gpt.model.base import get_batch_on_this_context_parallel_rank, get_packed_seq_params
+from nemo.collections.llm.gpt.model.llama import Llama31Config, apply_rope_scaling
+from nemo.collections.vlm.mllama.model.language import CrossAttentionTextModel
+from nemo.collections.vlm.mllama.model.utils import _generate_cross_attention_mask, _pad_attention_masks
+from nemo.collections.vlm.mllama.model.vision import VisionEncoder
+from nemo.collections.vlm.neva.model.base import MODEL_CONFIG_ATTR
+from nemo.lightning import get_vocab_size, io
+from nemo.lightning.megatron_parallel import MaskedTokenLossReduction
+from nemo.lightning.pytorch.optim import MegatronOptimizerModule, OptimizerModule
+from nemo.utils import logging
+
+
+def llama_data_step(dataloader_iter) -> Dict[str, torch.Tensor]:
+ from megatron.core import parallel_state
+
+ # Based on: https://github.com/NVIDIA/Megatron-LM/blob/main/pretrain_gpt.py#L87
+ # https://github.com/NVIDIA/NeMo/blob/main/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py#L828-L842
+
+ batch = next(dataloader_iter)
+
+ _batch: dict
+ if isinstance(batch, tuple) and len(batch) == 3:
+ _batch = batch[0]
+ else:
+ _batch = batch
+
+ required_keys = set()
+ required_keys.update(
+ (
+ "attention_mask",
+ "tokens",
+ "batch_masks",
+ "position_ids",
+ "num_chunks",
+ )
+ )
+ if parallel_state.is_pipeline_first_stage():
+ required_keys.update(
+ (
+ "batch_images",
+ "aspect_ratio_ids",
+ )
+ )
+ if parallel_state.is_pipeline_last_stage():
+ required_keys.update(
+ (
+ "labels",
+ "loss_mask",
+ )
+ )
+
+ _batch = {
+ key: val.cuda(non_blocking=True) if key in required_keys and isinstance(val, torch.Tensor) else val
+ for key, val in _batch.items()
+ }
+ # slice batch along sequence dimension for context parallelism
+ output = get_batch_on_this_context_parallel_rank(_batch)
+
+ return output
+
+
+def llama_forward_step(model, batch) -> torch.Tensor:
+ forward_config = {
+ "batch_images": batch["batch_images"],
+ "batch_masks": batch["batch_masks"],
+ "tokens": batch["tokens"],
+ "position_ids": batch["position_ids"],
+ "aspect_ratio_ids": batch["aspect_ratio_ids"],
+ "num_chunks": batch["num_chunks"],
+ "labels": batch.get("labels", None),
+ }
+
+ if 'cu_seqlens' in batch:
+ forward_config['packed_seq_params'] = get_packed_seq_params(batch)
+
+ return model(**forward_config)
+
+
+def set_input_tensor(self, tensor):
+ pass
+
+
+@dataclass
+class CrossAttentionVisionConfig(TransformerConfig, io.IOMixin):
+ # core params
+
+ bias_activation_fusion: bool = True
+ bias_dropout_add_fusion: bool = True
+
+ # vision model params
+ num_layers: int = 32
+ hidden_size: int = 1280
+ num_attention_heads: int = 16
+ vision_chunk_size: int = -1 # image resolution for image models
+ vision_max_num_chunks: int = 4
+ num_global_layers: int = 8
+ max_num_tiles: int = 4
+ text_hidden_size: int = 4096
+ hidden_dropout: float = 0.0
+ attention_dropout: float = 0.0
+ ffn_dropout: float = 0.0
+ gated: bool = False
+ supported_aspect_ratios: Tuple[Tuple[int, int], ...] = (
+ (1, 1),
+ (1, 2),
+ (1, 3),
+ (1, 4),
+ (2, 1),
+ (2, 2),
+ (3, 1),
+ (4, 1),
+ )
+
+ @property
+ def max_aspect_ratio_id(self) -> int:
+ return len(self.supported_aspect_ratios)
+
+ def configure_model(self) -> "CrossAttentionVisionModel":
+ return CrossAttentionVisionModel(
+ self,
+ )
+
+
+@dataclass
+class CrossAttentionTextConfig(Llama31Config):
+ rotary_base: int = 500_000
+ seq_length: int = 8192
+ num_layers: int = 32
+ hidden_size: int = 4096
+ ffn_hidden_size: int = 14336
+ num_attention_heads: int = 32
+ num_cross_attention_layers: int = 8
+ vocab_size: int = 128256
+ apply_rope_fusion: bool = False
+
+ def _init_fusion_schedule(self, num_layers: int) -> List[int]:
+ llama_layers = list(range(self.num_layers))
+ # uniformly spread the layers
+ k = math.ceil(len(llama_layers) / num_layers)
+ return llama_layers[::-1][::k][:num_layers][::-1]
+
+ def configure_model(self, tokenizer, pre_process=True, post_process=True):
+ self.fusion_schedule = self._init_fusion_schedule(self.num_cross_attention_layers)
+ vp_size = self.virtual_pipeline_model_parallel_size
+ if vp_size:
+ p_size = self.pipeline_model_parallel_size
+ assert (
+ self.num_layers // p_size
+ ) % vp_size == 0, "Make sure the number of model chunks is the same across all pipeline stages."
+
+ transformer_layer_spec = self.transformer_layer_spec
+ if not isinstance(transformer_layer_spec, ModuleSpec):
+ transformer_layer_spec = transformer_layer_spec(self)
+
+ if hasattr(self, 'vocab_size'):
+ vocab_size = self.vocab_size
+ logging.info(
+ f"Use preset vocab_size: {vocab_size}, original vocab_size: {tokenizer.vocab_size}, dummy tokens:"
+ f" {vocab_size - tokenizer.vocab_size}."
+ )
+ else:
+ vocab_size = get_vocab_size(self, tokenizer.vocab_size, self.make_vocab_size_divisible_by)
+
+ model = CrossAttentionTextModel(
+ self,
+ transformer_layer_spec=transformer_layer_spec,
+ vocab_size=vocab_size,
+ max_sequence_length=self.seq_length,
+ fp16_lm_cross_entropy=self.fp16_lm_cross_entropy,
+ parallel_output=self.parallel_output,
+ share_embeddings_and_output_weights=self.share_embeddings_and_output_weights,
+ position_embedding_type=self.position_embedding_type,
+ rotary_percent=self.rotary_percent,
+ rotary_base=self.rotary_base,
+ seq_len_interpolation_factor=self.seq_len_interpolation_factor,
+ pre_process=pre_process,
+ post_process=post_process,
+ )
+ model.rotary_pos_emb.inv_freq = apply_rope_scaling(
+ model.rotary_pos_emb.inv_freq,
+ factor=self.scale_factor,
+ low_freq_factor=self.low_freq_factor,
+ high_freq_factor=self.high_freq_factor,
+ old_context_len=self.old_context_len,
+ )
+ return model
+
+
+@dataclass
+class MLlamaModelConfig(TransformerConfig, io.IOMixin):
+ language_model_config: Optional[CrossAttentionTextConfig] = None
+ vision_model_config: Optional[CrossAttentionVisionConfig] = None
+
+ encoder_pipeline_model_parallel_size: int = 0
+ encoder_tensor_model_parallel_size: int = 1
+ vision_num_cross_attention_layers: int = -1
+ num_layers: int = 1 # Placeholder, NOT used!
+ num_attention_heads: int = 8 # Placeholder, NOT used!
+
+ language_model_from_pretrained: Optional[str] = None # TODO
+ vision_model_from_pretrained: Optional[str] = None # TODO
+
+ forward_step_fn: Callable = llama_forward_step
+ data_step_fn: Callable = llama_data_step
+
+ def __post_init__(self):
+ if self.language_model_config is not None:
+ for attr in MODEL_CONFIG_ATTR:
+ setattr(self, attr, getattr(self.language_model_config, attr))
+
+ def configure_model(self, tokenizer) -> "MLlamaBaseModel":
+ from megatron.core import parallel_state as ps
+
+ self.language_model_config.tensor_model_parallel_size = self.tensor_model_parallel_size
+ self.vision_model_config.tensor_model_parallel_size = self.tensor_model_parallel_size
+ self.language_model_config.pipeline_model_parallel_size = self.pipeline_model_parallel_size
+
+ if self.encoder_pipeline_model_parallel_size > 0:
+ assert self.encoder_pipeline_model_parallel_size == 1, "ViT can only live on 1 pipeline stage."
+ self.vision_model_config.pipeline_model_parallel_size = self.encoder_pipeline_model_parallel_size
+ self.language_model_config.encoder_pipeline_model_parallel_size = self.encoder_pipeline_model_parallel_size
+ if self.encoder_tensor_model_parallel_size > 0:
+ self.vision_model_config.tensor_model_parallel_size = self.encoder_tensor_model_parallel_size
+
+ model = MLlamaBaseModel(
+ config=self,
+ tokenizer=tokenizer,
+ pre_process=ps.is_pipeline_first_stage()
+ or ps.get_pipeline_model_parallel_rank() == self.encoder_pipeline_model_parallel_size,
+ post_process=ps.is_pipeline_last_stage(),
+ add_encoder=ps.is_pipeline_first_stage(),
+ add_decoder=ps.is_pipeline_last_stage()
+ or ps.get_pipeline_model_parallel_rank() >= self.encoder_pipeline_model_parallel_size,
+ )
+
+ return model
+
+
+class CrossAttentionVisionModel(MegatronModule):
+ def __init__(self, config) -> None:
+ super().__init__(config=config)
+ return_intermediate = "3,7,15,23,30"
+ self.vision_input_dim = 1280
+ self.image_res = config.vision_chunk_size
+ self.max_num_chunks = config.vision_max_num_chunks
+ if return_intermediate is not None:
+ return_intermediate = [int(l) for l in return_intermediate.split(",")]
+ self.vision_input_dim = (len(return_intermediate) + 1) * self.vision_input_dim
+ self.patch_size = 14
+ self.vision_encoder = VisionEncoder(
+ config=config,
+ image_size=config.vision_chunk_size,
+ patch_size=self.patch_size,
+ return_intermediate=return_intermediate,
+ ).to(config.params_dtype)
+
+ projection_config = copy.deepcopy(config)
+ projection_config.hidden_size = config.text_hidden_size
+ affine_layer_spec = MLPSubmodules(linear_fc1=ColumnParallelLinear, linear_fc2=None)
+ self.vision_projection = MultimodalProjector(
+ config=projection_config,
+ submodules=affine_layer_spec,
+ projector_type="affine",
+ input_size=self.vision_input_dim,
+ )
+ self.vision_projection.encoder.skip_bias_add = False # Temporary fix for a MCore side bug
+
+ def forward(self, images: torch.Tensor, aspect_ratio_ids: torch.Tensor) -> torch.Tensor:
+ # vision_tokens: (B, T, D)
+ # aspect_ratio_ids: (B, 1)
+ # h: (B, T, D)
+ vision_tokens = self.vision_encoder(images.to(dtype=torch.bfloat16), aspect_ratio_ids)
+ vision_shape = vision_tokens.shape
+ vision_tokens = self.vision_projection(vision_tokens.reshape(-1, *vision_shape[-2:]))
+ vision_tokens = vision_tokens.reshape(*vision_shape[:-1], -1)
+ return vision_tokens
+
+ def set_input_tensor(self, tensor):
+ pass
+
+
+class MLlamaBaseModel(MegatronModule):
+ def __init__(
+ self,
+ config: MLlamaModelConfig,
+ tokenizer: Optional = None,
+ pre_process: bool = True,
+ post_process: bool = True,
+ add_encoder: bool = True,
+ add_decoder: bool = True,
+ ) -> None:
+ super().__init__(config=config)
+
+ language_model_config = config.language_model_config
+ vision_model_config = config.vision_model_config
+ self.pre_process = pre_process
+ self.post_process = post_process
+
+ self.encoder_hidden_state = None
+ self.vision_model: Optional[CrossAttentionVisionModel] = None
+ self.language_model: Optional[CrossAttentionTextModel] = None
+
+ self.share_embeddings_and_output_weights = False
+ self.add_decoder = (language_model_config is not None) and add_decoder
+ self.add_encoder = (vision_model_config is not None) and add_encoder
+
+ if self.add_decoder:
+ self.language_model = language_model_config.configure_model(
+ tokenizer=tokenizer, pre_process=pre_process, post_process=post_process
+ )
+ self.share_embeddings_and_output_weights = self.language_model.share_embeddings_and_output_weights
+
+ if self.add_encoder:
+ self.vision_model = vision_model_config.configure_model()
+
+ self.model_type = ModelType.encoder_and_decoder
+ self.xattn_needed = True
+
+ self.patch_size = 14
+ self.image_res = vision_model_config.vision_chunk_size
+ self.max_num_chunks = vision_model_config.vision_max_num_chunks
+ logging.warning("[WARNING] NeMo Mllama will always pad images to max number of tiles. A fix is coming soon!")
+
+ def setup_cache(self, max_batch_size: int, dtype: torch.dtype):
+ self.language_model.setup_cache(max_batch_size, dtype)
+
+ def compute_xattn_caches_masks(
+ self,
+ vision_tokens: torch.Tensor,
+ vision_orig_shape: Tuple[int, int, int, int, int],
+ batch_masks: torch.Tensor,
+ num_chunks: torch.Tensor,
+ total_len: int,
+ ) -> Tuple[List, torch.Tensor, torch.Tensor]:
+ bsz, nimg, nchunk, ntok, image_token_dim = vision_orig_shape
+
+ xattn_caches = [
+ layer.compute_xattn_kv_cache(vision_tokens) for layer in self.language_model.decoder.xattn_layers
+ ]
+
+ padded_masks = _pad_attention_masks(
+ batch_masks,
+ num_chunks,
+ total_len,
+ self.max_num_chunks,
+ vision_tokens.device,
+ )
+ vision_tokens = rearrange(
+ vision_tokens, "(nimg nchk ntok) b dim -> b nimg nchk ntok dim", nimg=nimg, nchk=nchunk, ntok=ntok
+ )
+ cross_attention_masks, full_text_row_masked_out_mask = _generate_cross_attention_mask(
+ text_token_count=total_len,
+ text_device="cuda",
+ text_dtype=next(self.language_model.parameters()).dtype,
+ vision_tokens=vision_tokens,
+ cross_attention_masks=padded_masks,
+ )
+
+ return (xattn_caches, cross_attention_masks, full_text_row_masked_out_mask)
+
+ def forward(
+ self,
+ position_ids: torch.Tensor,
+ tokens: torch.Tensor,
+ labels: Optional[torch.Tensor] = None,
+ batch_images: Optional[torch.Tensor] = None,
+ batch_masks: Optional[torch.Tensor] = None,
+ num_chunks: Optional[torch.Tensor] = None,
+ aspect_ratio_ids: Optional[torch.Tensor] = None,
+ cross_attention_masks: Optional[torch.Tensor] = None,
+ full_text_row_masked_out_mask: Optional[torch.Tensor] = None,
+ xattn_caches: Optional[List] = None,
+ ) -> torch.Tensor:
+ if xattn_caches is None:
+ bsz, max_num_images = batch_images.size(0), batch_images.size(1)
+ vision_orig_shape = (
+ bsz,
+ max_num_images,
+ self.max_num_chunks,
+ int((self.image_res / self.patch_size) ** 2 + 1),
+ self.config.hidden_size,
+ )
+ skip_vision_encoder = False
+ num_chunks[num_chunks > 0] = self.max_num_chunks
+ if max_num_images == 0:
+ skip_vision_encoder = True
+
+ if self.encoder_hidden_state is not None:
+ vision_tokens = self.encoder_hidden_state
+ else:
+ if skip_vision_encoder:
+ vision_tokens = torch.zeros(
+ vision_orig_shape,
+ device="cuda",
+ dtype=torch.bfloat16,
+ )
+ else:
+ vision_tokens = self.vision_model(batch_images, aspect_ratio_ids)
+ vision_tokens = rearrange(
+ vision_tokens, "b nimg nchk ntok dim -> (nimg nchk ntok) b dim"
+ ).contiguous()
+
+ if not self.add_decoder:
+ return vision_tokens
+
+ xattn_caches, cross_attention_masks, full_text_row_masked_out_mask = self.compute_xattn_caches_masks(
+ vision_tokens=vision_tokens,
+ vision_orig_shape=vision_orig_shape,
+ batch_masks=batch_masks,
+ num_chunks=num_chunks,
+ total_len=position_ids.shape[1],
+ )
+
+ assert self.add_decoder, "Language model required for forward pass."
+ language_embeddings = None
+ if self.pre_process:
+ language_embeddings = self.language_model.get_partially_trainable_embedding(tokens)
+ language_embeddings = language_embeddings.transpose(1, 0).contiguous() # [text_seq_len, b, h_language]
+
+ full_text_row_masked_out_mask = (
+ full_text_row_masked_out_mask[:, :, position_ids[0]].permute(2, 0, 1, 3).squeeze(2)
+ if cross_attention_masks is not None
+ else None
+ )
+ output = self.language_model(
+ input_ids=tokens,
+ position_ids=position_ids,
+ labels=labels,
+ decoder_input=language_embeddings,
+ attention_mask=None,
+ cross_attention_masks=(
+ cross_attention_masks[:, :, position_ids[0]] if cross_attention_masks is not None else None
+ ),
+ full_text_row_masked_out_mask=full_text_row_masked_out_mask,
+ xattn_caches=xattn_caches,
+ )
+ return output
+
+ def set_input_tensor(self, input_tensor) -> None:
+ """Set model chunk input tensor."""
+ if not isinstance(input_tensor, list):
+ input_tensor = [input_tensor]
+
+ if self.add_encoder:
+ self.vision_model.set_input_tensor(input_tensor[0])
+ elif self.add_decoder and self.pre_process:
+ self.encoder_hidden_state = input_tensor[0]
+ else:
+ assert len(input_tensor) == 2, 'input_tensor should contain encoder output.'
+ self.language_model.set_input_tensor(input_tensor[0])
+ self.encoder_hidden_state = input_tensor[1]
+
+
+class MLlamaModel(L.LightningModule, io.IOMixin, io.ConnectorMixin, fn.FNMixin):
+ def __init__(
+ self,
+ config: MLlamaModelConfig,
+ optim: Optional[OptimizerModule] = None,
+ tokenizer: Optional["TokenizerSpec"] = None,
+ model_transform: Optional[Callable[[nn.Module], nn.Module]] = None,
+ ):
+ super().__init__()
+ self.config = config
+ self.tokenizer = tokenizer
+ self.optim = optim or MegatronOptimizerModule(config=OptimizerConfig(lr=1e-4, use_distributed_optimizer=True))
+ self.optim.connect(self) # This will bind the `configure_optimizers` method
+ self.model_transform = model_transform
+ self._training_loss_reduction = None
+ self._validation_loss_reduction = None
+
+ def configure_model(self) -> None:
+ if not hasattr(self, "module"):
+ self.module: MLlamaBaseModel = self.config.configure_model(self.tokenizer)
+
+ def forward(
+ self,
+ batch_images: List[List[PIL_Image.Image]],
+ tokens: torch.LongTensor,
+ position_ids: torch.LongTensor,
+ batch_masks: Optional[torch.Tensor] = None,
+ num_chunks: Optional[torch.Tensor] = None,
+ aspect_ratio_ids: Optional[torch.Tensor] = None,
+ labels: Optional[torch.Tensor] = None,
+ cross_attention_masks: Optional[torch.Tensor] = None,
+ full_text_row_masked_out_mask: Optional[torch.Tensor] = None,
+ xattn_caches: Optional[torch.Tensor] = None,
+ ) -> torch.Tensor:
+
+ output_tensor = self.module(
+ position_ids=position_ids,
+ tokens=tokens,
+ batch_images=batch_images,
+ batch_masks=batch_masks,
+ num_chunks=num_chunks,
+ aspect_ratio_ids=aspect_ratio_ids,
+ labels=labels,
+ cross_attention_masks=cross_attention_masks,
+ full_text_row_masked_out_mask=full_text_row_masked_out_mask,
+ xattn_caches=xattn_caches,
+ )
+
+ return output_tensor
+
+ def data_step(self, dataloader_iter) -> Dict[str, torch.Tensor]:
+ return self.config.data_step_fn(dataloader_iter)
+
+ def forward_step(self, batch) -> torch.Tensor:
+ return self.config.forward_step_fn(self, batch)
+
+ def training_step(self, batch, batch_idx=None) -> torch.Tensor:
+ # In mcore the loss-function is part of the forward-pass (when labels are provided)
+ return self.forward_step(batch)
+
+ def validation_step(self, batch, batch_idx=None) -> torch.Tensor:
+ # In mcore the loss-function is part of the forward-pass (when labels are provided)
+
+ return self.forward_step(batch)
+
+ @property
+ def training_loss_reduction(self) -> MaskedTokenLossReduction:
+ if not self._training_loss_reduction:
+ self._training_loss_reduction = MaskedTokenLossReduction()
+
+ return self._training_loss_reduction
+
+ @property
+ def validation_loss_reduction(self) -> MaskedTokenLossReduction:
+ if not self._validation_loss_reduction:
+ self._validation_loss_reduction = MaskedTokenLossReduction(validation_step=True)
+
+ return self._validation_loss_reduction
+
+
+__all__ = [
+ "MLlamaModel",
+ "MLlamaModelConfig",
+ "CrossAttentionTextConfig",
+ "CrossAttentionVisionConfig",
+ "llama_data_step",
+ "llama_forward_step",
+ "transformer_engine_layer_spec",
+ "local_layer_spec",
+]
diff --git a/nemo/collections/vlm/mllama/model/language.py b/nemo/collections/vlm/mllama/model/language.py
new file mode 100644
index 000000000000..b8985e53c54c
--- /dev/null
+++ b/nemo/collections/vlm/mllama/model/language.py
@@ -0,0 +1,722 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import math
+from contextlib import nullcontext
+from dataclasses import dataclass
+from typing import List, Literal, Optional, Union
+
+import torch
+from megatron.core import InferenceParams, parallel_state, tensor_parallel
+from megatron.core.dist_checkpointing.mapping import ShardedStateDict
+from megatron.core.dist_checkpointing.utils import replace_prefix_for_sharding
+from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add
+
+from megatron.core.models.gpt.gpt_model import GPTModel as MCoreGPTModel
+from megatron.core.packed_seq_params import PackedSeqParams
+from megatron.core.transformer.attention import Attention
+from megatron.core.transformer.custom_layers.transformer_engine import (
+ TEColumnParallelLinear,
+ TEDotProductAttention,
+ TELayerNormColumnParallelLinear,
+ TERowParallelLinear,
+)
+from megatron.core.transformer.enums import AttnMaskType
+from megatron.core.transformer.identity_op import IdentityOp
+from megatron.core.transformer.mlp import MLP, MLPSubmodules
+from megatron.core.transformer.module import MegatronModule
+from megatron.core.transformer.spec_utils import ModuleSpec, build_module
+from megatron.core.transformer.transformer_block import TransformerBlock
+from megatron.core.transformer.transformer_config import TransformerConfig
+from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules
+from megatron.core.transformer.utils import sharded_state_dict_default
+from megatron.core.utils import make_viewless_tensor
+from torch import Tensor, nn
+
+from nemo.utils import logging
+
+try:
+ from megatron.core.transformer.custom_layers.transformer_engine import TEDelayedScaling, TENorm
+
+ HAVE_TE = True
+ LayerNormImpl = TENorm
+except ImportError:
+ from megatron.core.transformer.torch_layer_norm import WrappedTorchLayerNorm
+
+ HAVE_TE = False
+ LayerNormImpl = WrappedTorchLayerNorm
+
+
+@dataclass
+class MLlamaCrossAttentionSubmodules:
+ linear_q: Union[ModuleSpec, type] = None
+ linear_kv: Union[ModuleSpec, type] = None
+ core_attention: Union[ModuleSpec, type] = None
+ linear_proj: Union[ModuleSpec, type] = None
+ q_layernorm: Union[ModuleSpec, type] = None
+ k_layernorm: Union[ModuleSpec, type] = None
+
+
+class CrossAttentionTextModel(MCoreGPTModel):
+ def __init__(
+ self,
+ config: TransformerConfig,
+ transformer_layer_spec: ModuleSpec,
+ vocab_size: int,
+ max_sequence_length: int,
+ pre_process: bool = True,
+ post_process: bool = True,
+ fp16_lm_cross_entropy: bool = False,
+ parallel_output: bool = True,
+ share_embeddings_and_output_weights: bool = False,
+ position_embedding_type: Literal['learned_absolute', 'rope', 'none'] = 'learned_absolute',
+ rotary_percent: float = 1.0,
+ rotary_base: int = 10000,
+ seq_len_interpolation_factor: Optional[float] = None,
+ ):
+ super().__init__(
+ config,
+ transformer_layer_spec,
+ vocab_size,
+ max_sequence_length,
+ pre_process,
+ post_process,
+ fp16_lm_cross_entropy,
+ parallel_output,
+ share_embeddings_and_output_weights,
+ position_embedding_type,
+ rotary_percent,
+ rotary_base,
+ seq_len_interpolation_factor,
+ )
+
+ # Overwrite the self.decoder
+ self.decoder = CrossAttentionTransformerBlock(
+ config=self.config,
+ spec=transformer_layer_spec,
+ pre_process=self.pre_process,
+ post_process=self.post_process,
+ )
+
+ if self.pre_process:
+ self.learnable_embedding = tensor_parallel.VocabParallelEmbedding(
+ num_embeddings=8,
+ embedding_dim=self.config.hidden_size,
+ init_method=self.config.init_method,
+ reduce_scatter_embeddings=False, # TODO double check this
+ config=self.config,
+ )
+
+ self.num_frozen_embeddings = self.embedding.word_embeddings.num_embeddings
+ self._thresh = self.num_frozen_embeddings - 1
+
+ def get_partially_trainable_embedding(self, x):
+ xz = torch.zeros_like(x, device=x.device)
+ oz = torch.ones_like(x, device=x.device)
+ x_orig = torch.minimum(x, torch.tensor(self._thresh, device=x.device))
+ x_new = torch.maximum(x, torch.tensor(self._thresh + 1, device=x.device)) - self.num_frozen_embeddings
+
+ mask_orig = torch.where(x >= self.num_frozen_embeddings, xz, oz).unsqueeze(-1)
+ mask_new = torch.where(x < self.num_frozen_embeddings, xz, oz).unsqueeze(-1)
+
+ x_orig = self.embedding(x_orig, None).transpose(0, 1)
+ x_new = self.learnable_embedding(x_new).type_as(x_orig)
+ return x_orig * mask_orig.type_as(x_orig) + x_new * mask_new.type_as(x_new)
+
+ def forward(
+ self,
+ input_ids: Tensor,
+ position_ids: Tensor,
+ attention_mask: Tensor,
+ decoder_input: Tensor = None,
+ cross_attention_masks: Tensor = None,
+ full_text_row_masked_out_mask: Tensor = None,
+ xattn_caches: Optional[List] = None,
+ labels: Tensor = None,
+ inference_params: InferenceParams = None,
+ packed_seq_params: PackedSeqParams = None,
+ extra_block_kwargs: dict = None,
+ ) -> Tensor:
+
+ # Decoder embedding.
+ if decoder_input is not None:
+ pass
+ elif self.pre_process:
+ raise ValueError("Require: decoder_input is not None or self.pre_process is False")
+ else:
+ # intermediate stage of pipeline
+ # decoder will get hidden_states from encoder.input_tensor
+ decoder_input = None
+
+ # Rotary positional embeddings (embedding is None for PP intermediate devices)
+ rotary_pos_emb = None
+ if self.position_embedding_type == 'rope':
+ rotary_seq_len = self.rotary_pos_emb.get_rotary_seq_len(
+ inference_params,
+ self.decoder,
+ decoder_input,
+ self.config,
+ packed_seq_params=None,
+ )
+ rotary_pos_emb = self.rotary_pos_emb(rotary_seq_len)
+
+ # Run decoder.
+ hidden_states = self.decoder(
+ hidden_states=decoder_input,
+ attention_mask=attention_mask,
+ inference_params=inference_params,
+ rotary_pos_emb=rotary_pos_emb,
+ packed_seq_params=packed_seq_params,
+ cross_attention_masks=cross_attention_masks,
+ full_text_row_masked_out_mask=full_text_row_masked_out_mask,
+ xattn_caches=xattn_caches,
+ **(extra_block_kwargs or {}),
+ )
+
+ if not self.post_process:
+ return hidden_states
+
+ # logits and loss
+ output_weight = None
+ if self.share_embeddings_and_output_weights:
+ output_weight = self.shared_embedding_or_output_weight()
+ logits, _ = self.output_layer(hidden_states, weight=output_weight)
+
+ if labels is None:
+ # [s b h] => [b s h]
+ return logits.transpose(0, 1).contiguous()
+
+ loss = self.compute_language_model_loss(labels, logits)
+
+ return loss
+
+
+class CrossAttentionTransformerBlock(TransformerBlock):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ self.fusion_schedule = [
+ x - self._get_layer_offset()
+ for x in self.config.fusion_schedule
+ if 0 <= (x - self._get_layer_offset()) < self.num_layers_per_pipeline_rank
+ ]
+ self.xattn_layers = []
+
+ for i in range(self.num_layers_per_pipeline_rank):
+ if i in self.fusion_schedule:
+ layer_spec = ModuleSpec(
+ module=CrossAttentionTransformerLayer,
+ submodules=TransformerLayerSubmodules(
+ cross_attention=ModuleSpec(
+ module=MLlamaCrossAttention,
+ params={"attn_mask_type": AttnMaskType.arbitrary},
+ submodules=MLlamaCrossAttentionSubmodules(
+ linear_q=TELayerNormColumnParallelLinear, # This wraps attention_norm before attention
+ linear_kv=TEColumnParallelLinear,
+ core_attention=TEDotProductAttention,
+ linear_proj=TERowParallelLinear,
+ q_layernorm=TENorm,
+ k_layernorm=TENorm,
+ ),
+ ),
+ cross_attn_bda=get_bias_dropout_add,
+ pre_mlp_layernorm=IdentityOp,
+ mlp=ModuleSpec(
+ module=MLP,
+ submodules=MLPSubmodules(
+ linear_fc1=TELayerNormColumnParallelLinear, # This wraps ffn_norm before feed_forward
+ linear_fc2=TERowParallelLinear,
+ ),
+ ),
+ mlp_bda=get_bias_dropout_add,
+ ),
+ )
+ self.xattn_layers.append(build_module(layer_spec, config=self.config, layer_number=i + 1))
+ else:
+ self.xattn_layers.append(DummyCrossAttentionTransformerLayer(config=self.config))
+ self.xattn_layers = torch.nn.ModuleList(self.xattn_layers)
+
+ assert len(self.xattn_layers) == len(self.layers), 'Check PP implementation for cross attention layers!'
+
+ def _get_layer_offset(self):
+ encoder_pipeline_model_parallel_size = getattr(self.config, "encoder_pipeline_model_parallel_size", 0)
+ decoder_pipeline_model_parallel_rank = (
+ parallel_state.get_pipeline_model_parallel_rank() - encoder_pipeline_model_parallel_size
+ )
+ return decoder_pipeline_model_parallel_rank * self.num_layers_per_pipeline_rank
+
+ def forward(
+ self,
+ hidden_states: Tensor,
+ attention_mask: Tensor,
+ xattn_caches: Optional[List] = None,
+ cross_attention_masks: Tensor = None,
+ full_text_row_masked_out_mask: Tensor = None,
+ rotary_pos_emb: Tensor = None,
+ inference_params: InferenceParams = None,
+ packed_seq_params: PackedSeqParams = None,
+ ):
+ # hidden_states (float): [s, b, h]
+ # attention_mask (bool): [1, 1, s, s]
+
+ if not self.pre_process:
+ hidden_states = self.input_tensor
+
+ hidden_states = make_viewless_tensor(
+ inp=hidden_states,
+ requires_grad=True,
+ keep_graph=True,
+ )
+
+ if self.config.sequence_parallel:
+ rng_context = tensor_parallel.get_cuda_rng_tracker().fork()
+ else:
+ rng_context = nullcontext()
+
+ if self.config.fp8:
+ import transformer_engine # To keep out TE dependency when not training in fp8
+
+ if self.config.fp8 == "e4m3":
+ fp8_format = transformer_engine.common.recipe.Format.E4M3
+ elif self.config.fp8 == "hybrid":
+ fp8_format = transformer_engine.common.recipe.Format.HYBRID
+ else:
+ raise ValueError("E4M3 and HYBRID are the only supported FP8 formats.")
+
+ fp8_recipe = TEDelayedScaling(
+ config=self.config,
+ fp8_format=fp8_format,
+ override_linear_precision=(False, False, not self.config.fp8_wgrad),
+ )
+ fp8_group = None
+ if parallel_state.model_parallel_is_initialized():
+ fp8_group = parallel_state.get_amax_reduction_group(with_context_parallel=True)
+ fp8_context = transformer_engine.pytorch.fp8_autocast(
+ enabled=True, fp8_recipe=fp8_recipe, fp8_group=fp8_group
+ )
+ else:
+ fp8_context = nullcontext()
+
+ with rng_context and fp8_context:
+ # Forward pass.
+ if self.config.recompute_granularity == 'full' and self.training:
+ raise NotImplementedError
+ else:
+ for l_no, (layer, xattn_layer) in enumerate(zip(self.layers, self.xattn_layers)):
+ layer: TransformerLayer
+ xattn_layer: Union[DummyCrossAttentionTransformerLayer, CrossAttentionTransformerLayer]
+ with self.offload_context:
+ if (len(self.cuda_graphs) == 0) or (not self.training):
+ hidden_states, context = xattn_layer(
+ hidden_states=hidden_states,
+ cross_attention_masks=cross_attention_masks,
+ xattn_cache=xattn_caches[l_no],
+ full_text_row_masked_out_mask=full_text_row_masked_out_mask,
+ rotary_pos_emb=rotary_pos_emb,
+ inference_params=inference_params,
+ packed_seq_params=packed_seq_params,
+ )
+ hidden_states, context = layer(
+ hidden_states=hidden_states,
+ attention_mask=attention_mask,
+ rotary_pos_emb=rotary_pos_emb,
+ inference_params=inference_params,
+ packed_seq_params=packed_seq_params,
+ )
+ # CUDA graph doesn't output context and is expected to be None
+ assert (context is None) or (not self.config.enable_cuda_graph) or (not self.training)
+ else:
+ assert (len(self.cuda_graphs) > l_no) and (
+ self.current_microbatch < len(self.cuda_graphs[l_no])
+ )
+ hidden_states = self.cuda_graphs[l_no][self.current_microbatch](
+ hidden_states, is_first_microbatch=(self.current_microbatch == 0)
+ )
+
+ if (
+ torch.is_grad_enabled()
+ and self.config.cpu_offloading
+ and self.group_prefetch_offload_commit_async is not None
+ ):
+ hidden_states = self.group_prefetch_offload_commit_async(hidden_states)
+
+ # Final layer norm.
+ if self.final_layernorm is not None:
+ hidden_states = self.final_layernorm(hidden_states)
+ hidden_states = make_viewless_tensor(inp=hidden_states, requires_grad=True, keep_graph=True)
+
+ return hidden_states
+
+ def sharded_state_dict(
+ self, prefix: str = '', sharded_offsets: tuple = (), metadata: dict = None
+ ) -> ShardedStateDict:
+ sharded_state_dict = {}
+
+ layer_prefix = f'{prefix}layers.'
+ num_layers = self.config.num_layers
+ for layer in self.layers:
+ offset = layer._get_layer_offset()
+ global_layer_offset = layer.layer_number - 1 # self.layer_number starts at 1
+ state_dict_prefix = f'{layer_prefix}{global_layer_offset - offset}.' # module list index in TransformerBlock # pylint: disable=line-too-long
+ sharded_prefix = layer_prefix
+ sharded_pp_offset = [(0, global_layer_offset, num_layers)] # PP sharding offset for ShardedTensors
+ layer_sharded_state_dict = layer.sharded_state_dict(state_dict_prefix, sharded_pp_offset, metadata)
+ replace_prefix_for_sharding(layer_sharded_state_dict, state_dict_prefix, sharded_prefix)
+ sharded_state_dict.update(layer_sharded_state_dict)
+
+ xlayer_prefix = f'{prefix}xattn_layers.'
+ for xlayer in self.xattn_layers:
+ if isinstance(xlayer, DummyCrossAttentionTransformerLayer):
+ continue
+ offset = xlayer._get_layer_offset()
+ global_layer_offset = xlayer.layer_number - 1
+ state_dict_prefix = f'{xlayer_prefix}{global_layer_offset - offset}.' # module list index in TransformerBlock # pylint: disable=line-too-long
+ sharded_prefix = f'{xlayer_prefix}{global_layer_offset}.'
+ sharded_pp_offset = []
+ xlayer_sharded_state_dict = xlayer.sharded_state_dict(state_dict_prefix, sharded_pp_offset, metadata)
+ replace_prefix_for_sharding(xlayer_sharded_state_dict, state_dict_prefix, sharded_prefix)
+ sharded_state_dict.update(xlayer_sharded_state_dict)
+
+ # Add modules other than self.layers
+ for name, module in self.named_children():
+ if not module is self.layers and not module is self.xattn_layers:
+ sharded_state_dict.update(
+ sharded_state_dict_default(module, f'{prefix}{name}.', sharded_offsets, metadata)
+ )
+
+ return sharded_state_dict
+
+
+class CrossAttentionTransformerLayer(TransformerLayer):
+ def __init__(
+ self,
+ config: TransformerConfig,
+ submodules: TransformerLayerSubmodules,
+ layer_number: int = 1,
+ hidden_dropout: float = None,
+ ):
+ super().__init__(
+ config=config,
+ submodules=submodules,
+ layer_number=layer_number,
+ hidden_dropout=hidden_dropout,
+ )
+
+ self.gate_attn = nn.Parameter(torch.zeros(1, dtype=self.config.params_dtype))
+ self.gate_ffn = nn.Parameter(torch.zeros(1, dtype=self.config.params_dtype))
+
+ def compute_xattn_kv_cache(self, xattn_tokens: Tensor) -> Tensor:
+ return self.cross_attention._compute_xattn_kv_cache(xattn_tokens)
+
+ def forward(
+ self,
+ hidden_states,
+ cross_attention_masks,
+ xattn_cache=None,
+ full_text_row_masked_out_mask=None,
+ rotary_pos_emb=None,
+ inference_params=None,
+ packed_seq_params=None,
+ ):
+ # hidden_states: [s, b, h]
+
+ # Residual connection.
+ residual = hidden_states
+
+ # Optional Layer norm after self-attention
+ pre_cross_attn_layernorm_output = self.pre_cross_attn_layernorm(hidden_states)
+
+ # Cross attention.
+ attention_output_with_bias = self.cross_attention(
+ pre_cross_attn_layernorm_output,
+ cross_attention_masks=cross_attention_masks,
+ xattn_cache=xattn_cache,
+ full_text_row_masked_out_mask=full_text_row_masked_out_mask,
+ rotary_pos_emb=rotary_pos_emb,
+ inference_params=inference_params,
+ )
+
+ _gate_attn = self.gate_attn.tanh()
+ assert isinstance(
+ attention_output_with_bias, tuple
+ ), "`attention_output_with_bias` needs to be tuple for gating."
+ attention_output_with_bias = tuple(
+ _gate_attn * output if output is not None else None for output in attention_output_with_bias
+ )
+
+ # TODO: could we move `bias_dropout_add_exec_handler` itself
+ # inside the module provided in the `bias_dropout_add_spec` module?
+ with self.bias_dropout_add_exec_handler():
+ hidden_states = self.cross_attn_bda(self.training, self.config.bias_dropout_fusion)(
+ attention_output_with_bias, residual, self.hidden_dropout
+ )
+
+ # Residual connection.
+ residual = hidden_states
+
+ # Optional Layer norm post the cross-attention.
+ pre_mlp_layernorm_output = self.pre_mlp_layernorm(hidden_states)
+
+ # MLP.
+ mlp_output_with_bias = self.mlp(pre_mlp_layernorm_output)
+
+ _gate_ffn = self.gate_ffn.tanh() * full_text_row_masked_out_mask
+ assert isinstance(mlp_output_with_bias, tuple), "`mlp_output_with_bias` needs to be tuple for gating."
+ mlp_output_with_bias = tuple(
+ _gate_ffn * output if output is not None else None for output in mlp_output_with_bias
+ )
+
+ # TODO: could we move `bias_dropout_add_exec_handler` itself
+ # inside the module provided in the `bias_dropout_add_spec` module?
+ with self.bias_dropout_add_exec_handler():
+ hidden_states = self.mlp_bda(self.training, self.config.bias_dropout_fusion)(
+ mlp_output_with_bias, residual, self.hidden_dropout
+ )
+
+ # Jit compiled function creates 'view' tensor. This tensor
+ # potentially gets saved in the MPU checkpoint function context,
+ # which rejects view tensors. While making a viewless tensor here
+ # won't result in memory savings (like the data loader, or
+ # p2p_communication), it serves to document the origin of this
+ # 'view' tensor.
+ output = make_viewless_tensor(inp=hidden_states, requires_grad=hidden_states.requires_grad, keep_graph=True)
+
+ return output, None # context
+
+
+class DummyCrossAttentionTransformerLayer(MegatronModule):
+ """Dummy cross-attention transformer block with tanh-gated attention and feedforward."""
+
+ def __call__(
+ self,
+ hidden_states: Tensor,
+ *args,
+ **kwargs,
+ ):
+ return hidden_states, None
+
+ def compute_xattn_kv_cache(self, xattn_tokens: Tensor) -> Optional[Tensor]:
+ return None
+
+
+class MLlamaCrossAttention(Attention):
+ """Cross-attention layer class for Llama VLM support
+
+ Cross-attention layer takes input with size [s, b, h] and context with size
+ [s, b, h] and returns output of the same size.
+ """
+
+ def __init__(
+ self,
+ config: TransformerConfig,
+ submodules: MLlamaCrossAttentionSubmodules,
+ layer_number: int,
+ attn_mask_type=AttnMaskType.padding,
+ ):
+ super().__init__(
+ config=config,
+ submodules=submodules,
+ layer_number=layer_number,
+ attn_mask_type=attn_mask_type,
+ attention_type="cross",
+ )
+
+ # TODO might need special care when TP>8
+ assert self.query_projection_size % self.kv_projection_size == 0
+
+ self.linear_q = build_module(
+ submodules.linear_q,
+ self.config.hidden_size,
+ self.query_projection_size,
+ config=self.config,
+ init_method=self.config.init_method,
+ gather_output=False,
+ bias=self.config.add_bias_linear,
+ skip_bias_add=False,
+ is_expert=False,
+ )
+
+ self.linear_kv = build_module(
+ submodules.linear_kv,
+ self.config.hidden_size,
+ 2 * self.kv_projection_size,
+ config=self.config,
+ init_method=self.config.init_method,
+ gather_output=False,
+ bias=self.config.add_bias_linear,
+ skip_bias_add=False,
+ is_expert=False,
+ )
+
+ self.q_layernorm = build_module(
+ submodules.q_layernorm,
+ hidden_size=self.hidden_size_per_attention_head,
+ config=self.config,
+ eps=self.config.layernorm_epsilon,
+ )
+
+ self.k_layernorm = build_module(
+ submodules.k_layernorm,
+ hidden_size=self.hidden_size_per_attention_head,
+ config=self.config,
+ eps=self.config.layernorm_epsilon,
+ )
+
+ def get_key_value_tensors(self, key_value_states):
+ mixed_kv, _ = self.linear_kv(key_value_states)
+
+ # [sk, b, (np * 2 * hn)] --> [sk, b, np, 2 * hn]
+ new_tensor_shape = mixed_kv.size()[:-1] + (
+ self.num_query_groups_per_partition,
+ 2 * self.hidden_size_per_attention_head,
+ )
+ mixed_kv = mixed_kv.view(*new_tensor_shape)
+
+ # [sk, b, np, 2 * hn] --> 2 [sk, b, np, hn]
+ (key, value) = tensor_parallel.split_tensor_along_last_dim(mixed_kv, 2)
+ # Apply LayerNorm
+ key = self.k_layernorm(key.contiguous())
+ return key, value
+
+ def get_query_tensor(self, hidden_states):
+
+ # Attention head [sq, b, h] --> [sq, b, hp]
+ query, _ = self.linear_q(hidden_states)
+
+ # [sq, b, hp] --> [sq, b, np, hn]
+ new_tensor_shape = query.size()[:-1] + (
+ self.num_attention_heads_per_partition,
+ self.hidden_size_per_attention_head,
+ )
+ query = query.view(*new_tensor_shape)
+
+ # Apply LayerNorm
+ query = self.q_layernorm(query)
+
+ return query
+
+ def get_query_key_value_tensors(self, hidden_states, key_value_states):
+ query = self.get_query_tensor(hidden_states)
+ key, value = self.get_key_value_tensors(key_value_states)
+ return query, key, value
+
+ def forward(
+ self,
+ hidden_states,
+ cross_attention_masks,
+ xattn_cache=None,
+ full_text_row_masked_out_mask=None,
+ inference_params=None,
+ rotary_pos_emb=None,
+ packed_seq_params=None,
+ ):
+
+ # For self attention we just duplicate the rotary_pos_emb if it isn't already
+ if rotary_pos_emb is not None and not isinstance(rotary_pos_emb, tuple):
+ rotary_pos_emb = (rotary_pos_emb,) * 2
+
+ # =====================
+ # Query, Key, and Value
+ # =====================
+ # Get the query, key and value tensors based on the type of attention -
+ # self or cross attn.
+ query = self.get_query_tensor(hidden_states)
+ key, value = xattn_cache
+
+ # ===================================================
+ # Adjust key, value, and rotary_pos_emb for inference
+ # ===================================================
+ key, value, rotary_pos_emb, attn_mask_type = self._adjust_key_value_for_inference(
+ inference_params, key, value, rotary_pos_emb
+ )
+
+ if packed_seq_params is not None:
+ query = query.squeeze(1)
+ key = key.squeeze(1)
+ value = value.squeeze(1)
+
+ # ==================================
+ # core attention computation
+ # ==================================
+
+ # In TE "True" means masked out
+ cross_attention_masks = torch.where(cross_attention_masks == 0, False, True)
+
+ if self.checkpoint_core_attention and self.training:
+ core_attn_out = self._checkpointed_attention_forward(
+ query,
+ key,
+ value,
+ cross_attention_masks,
+ attn_mask_type=attn_mask_type,
+ packed_seq_params=packed_seq_params,
+ )
+ else:
+ core_attn_out = self.core_attention(
+ query,
+ key,
+ value,
+ cross_attention_masks,
+ attn_mask_type=attn_mask_type,
+ packed_seq_params=packed_seq_params,
+ )
+
+ if packed_seq_params is not None:
+ # reshape to same output shape as unpacked case
+ # (t, np, hn) -> (t, b=1, h=np*hn)
+ # t is the pack size = sum (sq_i)
+ # note that batch is a dummy dimension in the packed case
+ core_attn_out = core_attn_out.reshape(core_attn_out.size(0), 1, -1)
+
+ # [b, head, s, dim]
+ core_attn_out = core_attn_out * full_text_row_masked_out_mask
+
+ # =================
+ # Output. [sq, b, h]
+ # =================
+
+ output, bias = self.linear_proj(core_attn_out)
+
+ return output, bias
+
+ def _compute_xattn_kv_cache(self, xattn_tokens: Tensor) -> Tensor:
+ key, value = self.get_key_value_tensors(xattn_tokens)
+ return torch.stack([key, value])
+
+
+def apply_rope_scaling(
+ inv_freq,
+ factor: int = 8,
+ low_freq_factor: int = 1,
+ high_freq_factor: int = 4,
+ old_context_len: int = 8192,
+):
+ logging.info(
+ f"Apply rope scaling with factor={factor}, low_freq_factor={low_freq_factor}, high_freq_factor={high_freq_factor}, old_context_len={old_context_len}."
+ )
+
+ low_freq_wavelen = old_context_len / low_freq_factor
+ high_freq_wavelen = old_context_len / high_freq_factor
+
+ wavelen = 2 * math.pi / inv_freq
+ # wavelen < high_freq_wavelen: do nothing
+ # wavelen > low_freq_wavelen: divide by factor
+ inv_freq_llama = torch.where(wavelen > low_freq_wavelen, inv_freq / factor, inv_freq)
+ # otherwise: interpolate between the two, using a smooth factor
+ smooth_factor = (old_context_len / wavelen - low_freq_factor) / (high_freq_factor - low_freq_factor)
+ smoothed_inv_freq = (1 - smooth_factor) * inv_freq_llama / factor + smooth_factor * inv_freq_llama
+ is_medium_freq = ~(wavelen < high_freq_wavelen) * ~(wavelen > low_freq_wavelen)
+ inv_freq_llama = torch.where(is_medium_freq, smoothed_inv_freq, inv_freq_llama)
+
+ return inv_freq_llama
diff --git a/nemo/collections/vlm/mllama/model/mllama.py b/nemo/collections/vlm/mllama/model/mllama.py
new file mode 100644
index 000000000000..ce618f6c36df
--- /dev/null
+++ b/nemo/collections/vlm/mllama/model/mllama.py
@@ -0,0 +1,461 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+from dataclasses import dataclass, field
+from pathlib import Path
+from typing import Dict, Optional
+
+import torch
+import torch.distributed
+from megatron.core.transformer import TransformerConfig
+from torch import Tensor
+
+from nemo.collections.vlm.mllama.model.base import (
+ CrossAttentionTextConfig,
+ CrossAttentionVisionConfig,
+ MLlamaModel,
+ MLlamaModelConfig,
+)
+from nemo.lightning import MegatronStrategy, Trainer, io, teardown
+from nemo.lightning.pytorch.utils import dtype_from_hf
+
+
+@dataclass
+class MLlamaConfig11B(MLlamaModelConfig):
+ language_model_config: Optional[TransformerConfig] = field(default_factory=lambda: CrossAttentionTextConfig())
+ vision_model_config: Optional[TransformerConfig] = field(
+ default_factory=lambda: CrossAttentionVisionConfig(vision_chunk_size=448)
+ )
+
+
+@dataclass
+class MLlamaConfig11BInstruct(MLlamaModelConfig):
+ language_model_config: Optional[TransformerConfig] = field(default_factory=lambda: CrossAttentionTextConfig())
+ vision_model_config: Optional[TransformerConfig] = field(
+ default_factory=lambda: CrossAttentionVisionConfig(vision_chunk_size=560)
+ )
+
+
+@dataclass
+class MLlamaConfig90B(MLlamaModelConfig):
+ language_model_config: Optional[TransformerConfig] = field(
+ default_factory=lambda: CrossAttentionTextConfig(
+ hidden_size=8192,
+ ffn_hidden_size=28672,
+ num_attention_heads=64,
+ num_layers=80,
+ num_cross_attention_layers=20,
+ )
+ )
+ vision_model_config: Optional[TransformerConfig] = field(
+ default_factory=lambda: CrossAttentionVisionConfig(vision_chunk_size=560, text_hidden_size=8192)
+ )
+
+
+@dataclass
+class MLlamaConfig90BInstruct(MLlamaConfig90B):
+ pass
+
+
+@io.model_importer(MLlamaModel, "hf")
+class HFMLlamaImporter(io.ModelConnector["MLlamaModel", MLlamaModel]):
+ def init(self) -> MLlamaModel:
+ return MLlamaModel(self.config, tokenizer=self.tokenizer)
+
+ def local_path(self, base_path: Optional[Path] = None) -> Path:
+ # note: this entire function is for debugging
+ output_path = super().local_path(base_path)
+ return output_path
+
+ def apply(self, output_path: Path) -> Path:
+ from transformers import MllamaForConditionalGeneration
+
+ source = MllamaForConditionalGeneration.from_pretrained(str(self), torch_dtype='auto')
+
+ class ModelState:
+ def __init__(self, state_dict):
+ self._state_dict = state_dict
+
+ def state_dict(self):
+ return self._state_dict
+
+ state_dict = _rename_xattn_layer_nums_hf(source.state_dict())
+ source = ModelState(state_dict)
+ target = self.init()
+ dummy_trainer = Trainer(
+ devices=1,
+ accelerator="cpu",
+ strategy=MegatronStrategy(
+ store_optimizer_states=False,
+ save_ckpt_format='torch_dist',
+ ),
+ )
+ trainer = self.nemo_setup(target, dummy_trainer)
+ self.convert_state(source, target)
+ self.nemo_save(output_path, trainer)
+
+ print(f"Converted Mllama model to Nemo, model saved to {output_path}")
+
+ teardown(trainer, target)
+ del trainer, target
+
+ return output_path
+
+ def convert_state(self, source, target):
+ mapping = {}
+ transforms = []
+ mapping.update(
+ {
+ "language_model.model.layers.*.self_attn.o_proj.weight": "language_model.decoder.layers.*.self_attention.linear_proj.weight",
+ "language_model.model.xattn_layers.*.cross_attn.o_proj.weight": "language_model.decoder.xattn_layers.*.cross_attention.linear_proj.weight",
+ "language_model.model.xattn_layers.*.cross_attn.q_proj.weight": "language_model.decoder.xattn_layers.*.cross_attention.linear_q.weight",
+ "language_model.model.norm.weight": "language_model.decoder.final_layernorm.weight",
+ "language_model.lm_head.weight": "language_model.output_layer.weight",
+ "language_model.model.layers.*.post_attention_layernorm.weight": "language_model.decoder.layers.*.mlp.linear_fc1.layer_norm_weight",
+ "language_model.model.layers.*.mlp.down_proj.weight": "language_model.decoder.layers.*.mlp.linear_fc2.weight",
+ "language_model.model.layers.*.input_layernorm.weight": "language_model.decoder.layers.*.self_attention.linear_qkv.layer_norm_weight",
+ "language_model.model.xattn_layers.*.cross_attn.k_norm.weight": "language_model.decoder.xattn_layers.*.cross_attention.k_layernorm.weight",
+ "language_model.model.xattn_layers.*.input_layernorm.weight": "language_model.decoder.xattn_layers.*.cross_attention.linear_q.layer_norm_weight",
+ "language_model.model.xattn_layers.*.cross_attn.q_norm.weight": "language_model.decoder.xattn_layers.*.cross_attention.q_layernorm.weight",
+ "language_model.model.xattn_layers.*.post_attention_layernorm.weight": "language_model.decoder.xattn_layers.*.mlp.linear_fc1.layer_norm_weight",
+ "language_model.model.xattn_layers.*.mlp.down_proj.weight": "language_model.decoder.xattn_layers.*.mlp.linear_fc2.weight",
+ }
+ )
+
+ transforms.extend(
+ [
+ io.state_transform(
+ source_key="language_model.model.xattn_layers.*.cross_attn_attn_gate",
+ target_key="language_model.decoder.xattn_layers.*.gate_attn",
+ fn=_import_gate,
+ ),
+ io.state_transform(
+ source_key="language_model.model.xattn_layers.*.cross_attn_mlp_gate",
+ target_key="language_model.decoder.xattn_layers.*.gate_ffn",
+ fn=_import_gate,
+ ),
+ io.state_transform(
+ source_key=(
+ "language_model.model.layers.*.self_attn.q_proj.weight",
+ "language_model.model.layers.*.self_attn.k_proj.weight",
+ "language_model.model.layers.*.self_attn.v_proj.weight",
+ ),
+ target_key="language_model.decoder.layers.*.self_attention.linear_qkv.weight",
+ fn=_import_text_qkv,
+ ),
+ io.state_transform(
+ source_key=(
+ "language_model.model.layers.*.mlp.gate_proj.weight",
+ "language_model.model.layers.*.mlp.up_proj.weight",
+ ),
+ target_key="language_model.decoder.layers.*.mlp.linear_fc1.weight",
+ fn=_import_simple_concat,
+ ),
+ io.state_transform(
+ source_key=(
+ "language_model.model.xattn_layers.*.cross_attn.k_proj.weight",
+ "language_model.model.xattn_layers.*.cross_attn.v_proj.weight",
+ ),
+ target_key="language_model.decoder.xattn_layers.*.cross_attention.linear_kv.weight",
+ fn=_import_text_kv,
+ ),
+ io.state_transform(
+ source_key=(
+ "language_model.model.xattn_layers.*.mlp.gate_proj.weight",
+ "language_model.model.xattn_layers.*.mlp.up_proj.weight",
+ ),
+ target_key="language_model.decoder.xattn_layers.*.mlp.linear_fc1.weight",
+ fn=_import_simple_concat,
+ ),
+ io.state_transform(
+ source_key="language_model.model.embed_tokens.weight",
+ target_key=(
+ "language_model.embedding.word_embeddings.weight",
+ "language_model.learnable_embedding.weight",
+ ),
+ fn=_import_embedding_hf,
+ ),
+ ]
+ )
+
+ v = "vision_model.vision_encoder"
+ mapping.update(
+ {
+ "vision_model.global_transformer.layers.*.self_attn.o_proj.weight": f"{v}.global_transformer.layers.*.self_attention.linear_proj.weight",
+ "vision_model.global_transformer.layers.*.gate_attn": f"{v}.global_transformer.layers.*.gate_attn",
+ "vision_model.global_transformer.layers.*.gate_ffn": f"{v}.global_transformer.layers.*.gate_ffn",
+ "vision_model.global_transformer.layers.*.input_layernorm.bias": f"{v}.global_transformer.layers.*.input_layernorm.bias",
+ "vision_model.global_transformer.layers.*.input_layernorm.weight": f"{v}.global_transformer.layers.*.input_layernorm.weight",
+ "vision_model.global_transformer.layers.*.post_attention_layernorm.bias": f"{v}.global_transformer.layers.*.pre_mlp_layernorm.bias",
+ "vision_model.global_transformer.layers.*.post_attention_layernorm.weight": f"{v}.global_transformer.layers.*.pre_mlp_layernorm.weight",
+ "vision_model.global_transformer.layers.*.mlp.fc1.bias": f"{v}.global_transformer.layers.*.mlp.linear_fc1.bias",
+ "vision_model.global_transformer.layers.*.mlp.fc1.weight": f"{v}.global_transformer.layers.*.mlp.linear_fc1.weight",
+ "vision_model.global_transformer.layers.*.mlp.fc2.bias": f"{v}.global_transformer.layers.*.mlp.linear_fc2.bias",
+ "vision_model.global_transformer.layers.*.mlp.fc2.weight": f"{v}.global_transformer.layers.*.mlp.linear_fc2.weight",
+ "vision_model.transformer.layers.*.self_attn.o_proj.weight": f"{v}.transformer.layers.*.self_attention.linear_proj.weight",
+ "vision_model.transformer.layers.*.input_layernorm.bias": f"{v}.transformer.layers.*.input_layernorm.bias",
+ "vision_model.transformer.layers.*.input_layernorm.weight": f"{v}.transformer.layers.*.input_layernorm.weight",
+ "vision_model.transformer.layers.*.post_attention_layernorm.bias": f"{v}.transformer.layers.*.pre_mlp_layernorm.bias",
+ "vision_model.transformer.layers.*.post_attention_layernorm.weight": f"{v}.transformer.layers.*.pre_mlp_layernorm.weight",
+ "vision_model.transformer.layers.*.mlp.fc1.bias": f"{v}.transformer.layers.*.mlp.linear_fc1.bias",
+ "vision_model.transformer.layers.*.mlp.fc1.weight": f"{v}.transformer.layers.*.mlp.linear_fc1.weight",
+ "vision_model.transformer.layers.*.mlp.fc2.bias": f"{v}.transformer.layers.*.mlp.linear_fc2.bias",
+ "vision_model.transformer.layers.*.mlp.fc2.weight": f"{v}.transformer.layers.*.mlp.linear_fc2.weight",
+ "vision_model.class_embedding": f"{v}.class_embedding",
+ "vision_model.gated_positional_embedding.embedding": f"{v}.positional_embedding",
+ "vision_model.gated_positional_embedding.tile_embedding.weight": f"{v}.gated_tile_positional_embedding.weight",
+ "vision_model.gated_positional_embedding.gate": f"{v}.gated_positional_embedding_gate",
+ "vision_model.layernorm_post.bias": f"{v}.ln_post.bias",
+ "vision_model.layernorm_post.weight": f"{v}.ln_post.weight",
+ "vision_model.layernorm_pre.bias": f"{v}.ln_pre.bias",
+ "vision_model.layernorm_pre.weight": f"{v}.ln_pre.weight",
+ "vision_model.post_tile_positional_embedding.embedding.weight": f"{v}.post_tile_pos_embed.embedding.weight",
+ "vision_model.post_tile_positional_embedding.gate": f"{v}.post_tile_pos_embed.gate",
+ "vision_model.pre_tile_positional_embedding.embedding.weight": f"{v}.pre_tile_pos_embed.embedding.weight",
+ "vision_model.pre_tile_positional_embedding.gate": f"{v}.pre_tile_pos_embed.gate",
+ "multi_modal_projector.bias": "vision_model.vision_projection.encoder.bias",
+ "multi_modal_projector.weight": "vision_model.vision_projection.encoder.weight",
+ }
+ )
+ transforms.extend(
+ [
+ io.state_transform(
+ source_key=(
+ "vision_model.global_transformer.layers.*.self_attn.q_proj.weight",
+ "vision_model.global_transformer.layers.*.self_attn.k_proj.weight",
+ "vision_model.global_transformer.layers.*.self_attn.v_proj.weight",
+ ),
+ target_key=(f"{v}.global_transformer.layers.*.self_attention.linear_qkv.weight"),
+ fn=_import_vision_qkv,
+ ),
+ io.state_transform(
+ source_key=(
+ "vision_model.transformer.layers.*.self_attn.q_proj.weight",
+ "vision_model.transformer.layers.*.self_attn.k_proj.weight",
+ "vision_model.transformer.layers.*.self_attn.v_proj.weight",
+ ),
+ target_key=(f"{v}.transformer.layers.*.self_attention.linear_qkv.weight"),
+ fn=_import_vision_qkv,
+ ),
+ io.state_transform(
+ source_key="vision_model.patch_embedding.weight",
+ target_key=f"{v}.conv1._linear.weight",
+ fn=_import_patch_embedding_hf,
+ ),
+ ]
+ )
+
+ return io.apply_transforms(source, target, mapping=mapping, transforms=transforms)
+
+ @property
+ def tokenizer(self) -> "AutoTokenizer":
+ from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
+
+ return AutoTokenizer(self.save_hf_tokenizer_assets(str(self)))
+
+ @property
+ def config(self) -> MLlamaModelConfig:
+ from transformers import AutoConfig
+
+ source = AutoConfig.from_pretrained(str(self))
+
+ return MLlamaModelConfig(
+ language_model_config=self._language_model_config(source),
+ vision_model_config=self._vision_model_config(source),
+ )
+
+ def _language_model_config(self, source) -> Optional[CrossAttentionTextConfig]:
+ def _calculate_num_layers(num_hidden_layers, cross_attention_layers):
+ return num_hidden_layers - len(cross_attention_layers)
+
+ return CrossAttentionTextConfig(
+ rotary_base=source.text_config.rope_theta,
+ seq_length=8192,
+ num_layers=_calculate_num_layers(
+ source.text_config.num_hidden_layers, source.text_config.cross_attention_layers
+ ),
+ num_cross_attention_layers=len(source.text_config.cross_attention_layers),
+ hidden_size=source.text_config.hidden_size,
+ ffn_hidden_size=source.text_config.intermediate_size,
+ num_attention_heads=source.text_config.num_attention_heads,
+ num_query_groups=source.text_config.num_key_value_heads,
+ vocab_size=source.text_config.vocab_size,
+ fp16=(dtype_from_hf(source) == torch.float16),
+ bf16=(dtype_from_hf(source) == torch.bfloat16),
+ params_dtype=dtype_from_hf(source),
+ )
+
+ def _vision_model_config(self, source) -> Optional[CrossAttentionVisionConfig]:
+ return CrossAttentionVisionConfig(
+ num_layers=source.vision_config.num_hidden_layers,
+ hidden_size=source.vision_config.hidden_size,
+ num_attention_heads=source.vision_config.attention_heads,
+ vision_chunk_size=source.vision_config.image_size,
+ vision_max_num_chunks=source.vision_config.max_num_tiles,
+ text_hidden_size=source.text_config.hidden_size,
+ fp16=(dtype_from_hf(source) == torch.float16),
+ bf16=(dtype_from_hf(source) == torch.bfloat16),
+ params_dtype=dtype_from_hf(source),
+ )
+
+
+def _rename_xattn_layer_nums_hf(source: Dict):
+ def convert_layer_num(match):
+ layer_num = int(match.group(1))
+ cross_num = (layer_num - 3) // (cross_attention_frequency + 1)
+ if (layer_num - 3) % (cross_attention_frequency + 1) == 0:
+ new_layer_num = cross_num * cross_attention_frequency + 3
+ return f'xattn_layers.{new_layer_num}.'
+
+ new_layer_num = layer_num - cross_num - 1
+ return f'layers.{new_layer_num}.'
+
+ cross_attention_frequency = 4
+
+ output_dict = {}
+ for k, v in source.items():
+ if "language_model" in k:
+ output_dict[re.sub(r"layers\.(\d+)\.", convert_layer_num, k)] = v
+ else:
+ output_dict[k] = v
+ return output_dict
+
+
+def _import_embedding_hf(a):
+ return torch.split(a, a.shape[0] - 8, dim=0)
+
+
+def _import_patch_embedding_hf(a):
+ return a.reshape(a.shape[0], -1)
+
+
+def _import_gate(gate):
+ return gate[0:1]
+
+
+def _import_vision_qkv(ctx: io.TransformCTX, q, k, v):
+ vision_config = ctx.target.config.vision_model_config
+
+ head_num = vision_config.num_attention_heads
+ num_query_groups = vision_config.num_query_groups
+ head_size = vision_config.kv_channels
+ hidden_size = vision_config.hidden_size
+ return _merge_qkv(q, k, v, head_num, num_query_groups, head_size, hidden_size)
+
+
+def _import_text_qkv(ctx: io.TransformCTX, q, k, v):
+ text_config = ctx.target.config.language_model_config
+
+ head_num = text_config.num_attention_heads
+ num_query_groups = text_config.num_query_groups
+ head_size = text_config.kv_channels
+ hidden_size = text_config.hidden_size
+ return _merge_qkv(q, k, v, head_num, num_query_groups, head_size, hidden_size)
+
+
+def _import_text_kv(ctx: io.TransformCTX, k, v):
+ text_config = ctx.target.config.language_model_config
+
+ head_num = text_config.num_attention_heads
+ num_query_groups = text_config.num_query_groups
+ head_size = text_config.kv_channels
+ hidden_size = text_config.hidden_size
+ return _merge_kv(k, v, head_num, num_query_groups, head_size, hidden_size)
+
+
+def _merge_kv(k: Tensor, v: Tensor, head_num: int, num_query_groups: int, head_size: int, hidden_size: int):
+ old_tensor_shape = k.size()
+ new_kv_tensor_shape = (num_query_groups, head_size) + old_tensor_shape[1:]
+
+ k = k.view(*new_kv_tensor_shape)
+ v = v.view(*new_kv_tensor_shape)
+
+ kv_weights = torch.stack((k, v), dim=1)
+ kv_weights = kv_weights.reshape(-1, *new_kv_tensor_shape[1:])
+ assert kv_weights.ndim == 3, kv_weights.shape
+ assert kv_weights.shape[0] == 2 * num_query_groups, kv_weights.shape
+ assert kv_weights.shape[1] == head_size, kv_weights.shape
+ assert kv_weights.shape[2] == old_tensor_shape[1], kv_weights.shape
+
+ kv_weights = kv_weights.reshape([head_size * 2 * num_query_groups, hidden_size])
+ return kv_weights
+
+
+def _merge_qkv(
+ q: Tensor, k: Tensor, v: Tensor, head_num: int, num_query_groups: int, head_size: int, hidden_size: int
+):
+ heads_per_group = head_num // num_query_groups
+ old_tensor_shape = q.size()
+ new_q_tensor_shape = (head_num, head_size) + old_tensor_shape[1:]
+ new_kv_tensor_shape = (num_query_groups, head_size) + old_tensor_shape[1:]
+
+ q = q.view(*new_q_tensor_shape)
+ k = k.view(*new_kv_tensor_shape)
+ v = v.view(*new_kv_tensor_shape)
+
+ qkv_weights_l = []
+ for i in range(num_query_groups):
+ qkv_weights_l.append(q[i * heads_per_group : (i + 1) * heads_per_group, :, :])
+ qkv_weights_l.append(k[i : i + 1, :, :])
+ qkv_weights_l.append(v[i : i + 1, :, :])
+ qkv_weights = torch.cat(qkv_weights_l)
+ assert qkv_weights.ndim == 3, qkv_weights.shape
+ assert qkv_weights.shape[0] == (heads_per_group + 2) * num_query_groups, qkv_weights.shape
+ assert qkv_weights.shape[1] == head_size, qkv_weights.shape
+ assert qkv_weights.shape[2] == old_tensor_shape[1], qkv_weights.shape
+
+ qkv_weights = qkv_weights.reshape([head_size * (head_num + 2 * num_query_groups), hidden_size])
+
+ return qkv_weights
+
+
+def _split_qkv(qkv, head_num: int, num_query_groups: int, head_size: int, hidden_size: int):
+ heads_per_group = head_num // num_query_groups
+ qkv_total_dim = head_num + 2 * num_query_groups
+
+ linear_qkv = qkv.reshape([qkv_total_dim, head_size, hidden_size])
+ q_slice = torch.cat(
+ [
+ torch.arange((heads_per_group + 2) * i, (heads_per_group + 2) * i + heads_per_group)
+ for i in range(num_query_groups)
+ ]
+ )
+ k_slice = torch.arange(heads_per_group, qkv_total_dim, (heads_per_group + 2))
+ v_slice = torch.arange(heads_per_group + 1, qkv_total_dim, (heads_per_group + 2))
+
+ q_proj = linear_qkv[q_slice].reshape(-1, hidden_size).cpu()
+ k_proj = linear_qkv[k_slice].reshape(-1, hidden_size).cpu()
+ v_proj = linear_qkv[v_slice].reshape(-1, hidden_size).cpu()
+
+ return q_proj, k_proj, v_proj
+
+
+def _import_simple_concat(a, b):
+ # for both (w1, w3) -> fc1, and (wk, wv) -> wkv
+ return torch.cat((a, b), dim=0)
+
+
+def _rename_xattn_layer_nums(source: Dict):
+ def convert_layer_num(match):
+ new_layer_num = int(match.group(1)) * 4 + 3
+ return f'.{new_layer_num}.'
+
+ output_dict = {}
+ for k, v in source.items():
+ if "cross_attention_layers" in k:
+ output_dict[re.sub(r"\.(\d+)\.", convert_layer_num, k)] = v
+ else:
+ output_dict[k] = v
+ return output_dict
diff --git a/nemo/collections/vlm/mllama/model/utils.py b/nemo/collections/vlm/mllama/model/utils.py
new file mode 100644
index 000000000000..786be18020a4
--- /dev/null
+++ b/nemo/collections/vlm/mllama/model/utils.py
@@ -0,0 +1,180 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from typing import Tuple
+
+import torch
+
+
+def _pad_attention_masks(
+ masks: torch.Tensor,
+ num_chunks: torch.Tensor,
+ total_length: int,
+ max_chunks: int,
+ device: torch.device,
+ dtype=torch.bfloat16,
+) -> torch.Tensor:
+ """
+ Pads the provided masks to a uniform shape for batching.
+
+ Args:
+ masks (torch.Tensor): List of tensors containing attention masks for each batch.
+ num_chunks (torch.Tensor): Tensor containing the number of chunks for each mask.
+ total_length (int): Total sequence length for padding.
+ max_chunks (int): Maximum number of chunks to pad each mask to.
+ device (torch.device): Device to place the output tensor on.
+ dtype (torch.dtype): Data type for the output tensor. Default is `torch.bfloat16`.
+
+ Returns:
+ torch.Tensor: A padded tensor of shape [B, total_length, max_num_media, max_chunks]
+ where `B` is the batch size.
+ """
+ mask_value = 1.0
+ batch_size = len(masks)
+ max_num_media = max([len(m) for m in masks])
+
+ padded_masks = torch.full(
+ (batch_size, total_length, max_num_media, max_chunks),
+ mask_value,
+ dtype=dtype,
+ device=device,
+ )
+
+ for idx, (mask_group, chunks) in enumerate(zip(masks, num_chunks)):
+ for media_idx, (mask, chunk_count) in enumerate(zip(mask_group, chunks)):
+ if len(mask) == 2:
+ mask[1] = min(mask[1], total_length)
+ if mask[1] == -1:
+ mask[1] = total_length
+ padded_masks[idx, mask[0] : mask[1], media_idx, :chunk_count].fill_(0.0)
+
+ return padded_masks
+
+
+def _get_full_row_masked_out_mask(
+ attention_bias: torch.Tensor,
+ mask_value: float,
+):
+ """
+ Determines whether each row in the attention bias tensor contains masked values.
+
+ Args:
+ attention_bias (torch.Tensor): A 4D tensor of shape [B, H, S1, S2], where:
+ - B: Batch size.
+ - H: Number of attention heads.
+ - S1: Length of the first sequence.
+ - S2: Length of the second sequence.
+ mask_value (float): The value used to represent masked positions in `attention_bias`.
+
+ Returns:
+ torch.Tensor: A 4D tensor of shape [B, H, S1, 1], containing boolean values (as a tensor)
+ indicating if each row in the last dimension is fully masked (0 if fully masked, 1 otherwise).
+ """
+ return (attention_bias != mask_value).any(dim=-1).type_as(attention_bias)[..., None]
+
+
+def _generate_cross_attention_mask(
+ text_token_count: int,
+ text_device: torch.device,
+ text_dtype: torch.dtype,
+ vision_tokens: torch.Tensor,
+ cross_attention_masks: torch.Tensor,
+) -> Tuple[torch.Tensor, torch.Tensor]:
+ """
+ Generates a cross-attention mask for aligning text and vision tokens.
+
+ Args:
+ text_token_count (int): Number of tokens in the text sequence.
+ text_device (torch.device): Device to place the output tensor on.
+ text_dtype (torch.dtype): Data type for the output tensor.
+ vision_tokens (torch.Tensor): Vision tokens tensor of shape [B, I, T, D] where:
+ - B: Batch size.
+ - I: Number of images.
+ - T: Number of image tokens per image.
+ - D: Dimension of each image token.
+ cross_attention_masks (torch.Tensor): Cross attention masks of shape [B, N, I, C], where:
+ - B: Batch size.
+ - N: Number of text tokens.
+ - I: Number of images.
+ - C: Number of chunks.
+
+ Returns:
+ Tuple[torch.Tensor, torch.Tensor]: A tuple containing:
+ - The adjusted cross-attention masks of shape [B, 1, N, I * T].
+ - The full row mask status tensor of shape [B, 1, N, 1].
+ """
+ assert vision_tokens is not None, "Vision tokens must be provided"
+ vision_token_length = vision_tokens.shape[3]
+ assert (
+ vision_tokens.shape[1] == cross_attention_masks.shape[2]
+ ), f"Mismatch in number of images given and number of masks provided: {vision_tokens.shape} vs {cross_attention_masks.shape}"
+ assert (
+ vision_tokens.shape[2] == cross_attention_masks.shape[3]
+ ), f"Mismatch between vision tokens and cross-attention masks: {vision_tokens.shape} vs {cross_attention_masks.shape}"
+ assert (
+ text_token_count == cross_attention_masks.shape[1]
+ ), f"Text sequence length {text_token_count} does not match cross-attention mask length {cross_attention_masks.shape[1]}"
+
+ batch_size, _, num_images, num_chunks = cross_attention_masks.shape
+ cross_attention_masks = cross_attention_masks.view(batch_size, text_token_count, -1).unsqueeze(1)
+
+ full_row_mask_status = _get_full_row_masked_out_mask(cross_attention_masks, mask_value=1.0)
+ cross_attention_masks = cross_attention_masks.repeat_interleave(vision_token_length, dim=3)
+ cross_attention_masks *= full_row_mask_status
+
+ return (
+ cross_attention_masks.to(device=text_device, dtype=text_dtype),
+ full_row_mask_status.to(device=text_device, dtype=text_dtype),
+ )
+
+
+def create_vision_mask_tensor(tokens: torch.Tensor, vision_token_id: int = 128256) -> torch.Tensor:
+ """
+ Create a vision mask from a tensor of tokens and a vision token ID.
+
+ Args:
+ tokens (torch.Tensor): A 1D tensor of token IDs.
+ vision_token_id (int): The ID of the vision token.
+
+ Returns:
+ torch.Tensor: A tensor containing vision masks in the format [start, end].
+ """
+ # Get the locations of the vision tokens
+ vision_token_locations = (tokens == vision_token_id).nonzero(as_tuple=False).squeeze()
+
+ # If no vision token found, return an empty tensor
+ if vision_token_locations.numel() == 0:
+ return torch.empty(1, 2, dtype=torch.long)
+
+ vision_masks = []
+
+ # Handle case with only one vision token
+ if vision_token_locations.numel() == 1:
+ vision_masks.append([vision_token_locations.item(), len(tokens)])
+ else:
+ # Multiple vision tokens, pairwise masks
+ for i in range(len(vision_token_locations) - 1):
+ vision_masks.append([vision_token_locations[i].item(), vision_token_locations[i + 1].item()])
+ # Last vision token attends to all subsequent text
+ vision_masks.append([vision_token_locations[-1].item(), len(tokens)])
+
+ # Handle consecutive vision tokens
+ last_mask_end = vision_masks[-1][1]
+ for vision_mask in reversed(vision_masks):
+ if vision_mask[0] == vision_mask[1] - 1:
+ vision_mask[1] = last_mask_end
+ last_mask_end = vision_mask[1]
+
+ return torch.tensor(vision_masks, dtype=torch.long)
diff --git a/nemo/collections/vlm/mllama/model/vision.py b/nemo/collections/vlm/mllama/model/vision.py
new file mode 100644
index 000000000000..f023cc7bf943
--- /dev/null
+++ b/nemo/collections/vlm/mllama/model/vision.py
@@ -0,0 +1,722 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import copy
+import math
+import types
+from contextlib import nullcontext
+from typing import TYPE_CHECKING, List, Optional, Tuple, Union
+
+import torch
+import torch.nn.functional as F
+from megatron.core import InferenceParams, parallel_state, tensor_parallel
+from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add
+
+from megatron.core.packed_seq_params import PackedSeqParams
+from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules
+from megatron.core.transformer.custom_layers.transformer_engine import (
+ TEColumnParallelLinear,
+ TEDotProductAttention,
+ TERowParallelLinear,
+)
+from megatron.core.transformer.enums import AttnMaskType
+from megatron.core.transformer.identity_op import IdentityOp
+from megatron.core.transformer.mlp import MLP, MLPSubmodules
+from megatron.core.transformer.module import MegatronModule
+from megatron.core.transformer.spec_utils import ModuleSpec, build_module
+from megatron.core.transformer.transformer_block import TransformerBlock
+from megatron.core.transformer.transformer_config import TransformerConfig
+from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules
+from megatron.core.utils import make_viewless_tensor
+from PIL import Image as PIL_Image
+from torch import Tensor, nn
+
+if TYPE_CHECKING:
+ from nemo.collections.vlm import CrossAttentionVisionConfig
+
+try:
+ from megatron.core.transformer.custom_layers.transformer_engine import TEDelayedScaling, TENorm
+
+ HAVE_TE = True
+ LayerNormImpl = TENorm
+except ImportError:
+ from megatron.core.transformer.torch_layer_norm import WrappedTorchLayerNorm
+
+ HAVE_TE = False
+ LayerNormImpl = WrappedTorchLayerNorm
+
+
+def to_2tuple(x):
+ """
+ Convert an input to a 2-tuple.
+ """
+ if isinstance(x, collections.abc.Iterable):
+ return x
+ return (x, x)
+
+
+def _stack_images(
+ images: List[List[PIL_Image.Image]],
+ max_num_chunks: int,
+ image_res: int,
+ max_num_images: int,
+) -> Tuple[torch.Tensor, List[int]]:
+ """
+ Stack a list of image lists into a tensor while accounting for varying resolutions and aspect ratios.
+
+ Args:
+ images (List[List[PIL_Image.Image]]): List of image lists for stacking.
+ max_num_chunks (int): Maximum number of chunks per image.
+ image_res (int): Target resolution for each image.
+ max_num_images (int): Maximum number of images to stack.
+
+ Returns:
+ Tuple[torch.Tensor, List[int]]: Tensor of stacked images and a list of chunk counts for each image.
+ """
+ out_images, out_num_chunks = [], []
+ for imgs_sample in images:
+ out_images_i = torch.zeros(
+ max_num_images,
+ max_num_chunks,
+ 3,
+ image_res,
+ image_res,
+ )
+ _num_chunks = []
+ for j, chunks_image in enumerate(imgs_sample):
+ out_images_i[j, : chunks_image.shape[0]] = chunks_image
+ _num_chunks.append(chunks_image.shape[0])
+ out_images.append(out_images_i)
+ out_num_chunks.append(_num_chunks)
+ return torch.stack(out_images), out_num_chunks
+
+
+def build_encoder_attention_mask(
+ x: torch.Tensor, ar_ids: torch.Tensor, ntok: int, num_chunks: int, supported_aspect_ratios: List[List[int]]
+):
+ """
+ Build attention masks for a vision encoder to handle padding and token alignment.
+
+ Args:
+ x (torch.Tensor): Input tensor of shape (batch_size, sequence_length).
+ ar_ids (torch.Tensor): Aspect ratio IDs for masking.
+ ntok (int): Number of tokens.
+ num_chunks (int): Number of chunks in the data.
+ supported_aspect_ratios (List[List[int]]): List of supported aspect ratios.
+
+ Returns:
+ torch.Tensor: Tensor containing the attention mask.
+ """
+ masks = []
+ for ar_id in ar_ids:
+ arx = supported_aspect_ratios[ar_id - 1]
+ mask_i = torch.ones((num_chunks, x.shape[1] // num_chunks), device=x.device)
+ mask_i[: arx[0] * arx[1], :ntok] = 0
+ mask_i = mask_i.view(num_chunks * x.shape[1] // num_chunks, -1)
+ mask_i = (mask_i @ mask_i.T).type(torch.bool)
+ mask_i = mask_i.unsqueeze(0)
+ masks.append(mask_i)
+ masks = torch.stack(masks)
+ return masks
+
+
+def apply_scaling(freqs: torch.Tensor):
+ """
+ Scale frequency values based on predefined thresholds and a smoothing factor.
+ """
+ # Values obtained from grid search
+ scale_factor = 8
+ low_freq_factor = 1
+ high_freq_factor = 4
+ old_context_len = 8192 # original llama3 length
+
+ low_freq_wavelen = old_context_len / low_freq_factor
+ high_freq_wavelen = old_context_len / high_freq_factor
+ new_freqs = []
+ for freq in freqs:
+ wavelen = 2 * math.pi / freq
+ if wavelen < high_freq_wavelen:
+ new_freqs.append(freq)
+ elif wavelen > low_freq_wavelen:
+ new_freqs.append(freq / scale_factor)
+ else:
+ assert low_freq_wavelen != high_freq_wavelen
+ smooth = (old_context_len / wavelen - low_freq_factor) / (high_freq_factor - low_freq_factor)
+ new_freqs.append((1 - smooth) * freq / scale_factor + smooth * freq)
+ return torch.tensor(new_freqs, dtype=freqs.dtype, device=freqs.device)
+
+
+# Use this spec for an implementation using modules in TE
+def get_image_transformer_layer_spec() -> ModuleSpec:
+ """
+ Create a specification for an image transformer layer.
+ """
+ image_transformer_submodules = TransformerLayerSubmodules(
+ input_layernorm=TENorm,
+ self_attention=ModuleSpec(
+ module=SelfAttentionNoBias,
+ params={"attn_mask_type": AttnMaskType.no_mask},
+ submodules=SelfAttentionSubmodules(
+ linear_qkv=TEColumnParallelLinear,
+ core_attention=TEDotProductAttention,
+ linear_proj=TERowParallelLinear,
+ q_layernorm=IdentityOp,
+ k_layernorm=IdentityOp,
+ ),
+ ),
+ self_attn_bda=get_bias_dropout_add,
+ pre_mlp_layernorm=TENorm,
+ mlp=ModuleSpec(
+ module=MLP,
+ submodules=MLPSubmodules(
+ linear_fc1=TEColumnParallelLinear,
+ linear_fc2=TERowParallelLinear,
+ ),
+ ),
+ mlp_bda=get_bias_dropout_add,
+ )
+ return ModuleSpec(module=ImageTransformerLayer, submodules=image_transformer_submodules)
+
+
+def forward_with_return_intermediate(
+ self,
+ hidden_states: Tensor,
+ attention_mask: Tensor,
+ context: Tensor = None,
+ context_mask: Tensor = None,
+ rotary_pos_emb: Tensor = None,
+ inference_params: InferenceParams = None,
+ packed_seq_params: PackedSeqParams = None,
+ return_intermediate: List[int] = None,
+):
+ """
+ Perform a forward pass through the transformer layers with optional intermediate outputs.
+ Override regular MCore transformer layer forward pass.
+ """
+ # hidden_states (float): [s, b, h]
+ # attention_mask (bool): [1, 1, s, s]
+
+ if not self.pre_process:
+ # See set_input_tensor()
+ hidden_states = self.input_tensor
+
+ hidden_states = make_viewless_tensor(inp=hidden_states, requires_grad=True, keep_graph=True)
+
+ if self.config.sequence_parallel:
+ rng_context = tensor_parallel.get_cuda_rng_tracker().fork()
+ else:
+ rng_context = nullcontext()
+
+ if self.config.fp8:
+ import transformer_engine # To keep out TE dependency when not training in fp8
+
+ if self.config.fp8 == "e4m3":
+ fp8_format = transformer_engine.common.recipe.Format.E4M3
+ elif self.config.fp8 == "hybrid":
+ fp8_format = transformer_engine.common.recipe.Format.HYBRID
+ else:
+ raise ValueError("E4M3 and HYBRID are the only supported FP8 formats.")
+
+ fp8_recipe = TEDelayedScaling(
+ config=self.config,
+ fp8_format=fp8_format,
+ override_linear_precision=(False, False, not self.config.fp8_wgrad),
+ )
+ fp8_group = None
+ if parallel_state.model_parallel_is_initialized():
+ fp8_group = parallel_state.get_amax_reduction_group(with_context_parallel=True)
+ fp8_context = transformer_engine.pytorch.fp8_autocast(enabled=True, fp8_recipe=fp8_recipe, fp8_group=fp8_group)
+ else:
+ fp8_context = nullcontext()
+
+ with rng_context and fp8_context:
+ # Forward pass.
+ if self.config.recompute_granularity == 'full' and self.training:
+ assert return_intermediate is None, (
+ "Config `return_intermediate` cannot be used with " "`recompute_granularity='full'`. "
+ )
+ hidden_states = self._checkpointed_forward(
+ hidden_states=hidden_states,
+ attention_mask=attention_mask,
+ context=context,
+ context_mask=context_mask,
+ rotary_pos_emb=rotary_pos_emb,
+ packed_seq_params=packed_seq_params,
+ )
+ else:
+ intermediate_hidden_states = []
+ for l_no, layer in enumerate(self.layers):
+ if return_intermediate is not None and l_no in return_intermediate:
+ intermediate_hidden_states.append(hidden_states)
+
+ with self.offload_context:
+ if (len(self.cuda_graphs) == 0) or (not self.training):
+ hidden_states, context = layer(
+ hidden_states=hidden_states,
+ attention_mask=attention_mask,
+ context=context,
+ context_mask=context_mask,
+ rotary_pos_emb=rotary_pos_emb,
+ inference_params=inference_params,
+ packed_seq_params=packed_seq_params,
+ )
+ # CUDA graph doesn't output context and is expected to be None
+ assert (context is None) or (not self.config.enable_cuda_graph) or (not self.training)
+ else:
+ # CUDA graph replay for layer `l_no` and microbatch `self.current_microbatch`
+ # CUDA graph requires positional arguments with the exception of is_first_microbatch.
+ # Also CUDA graph accepts only Tensor inputs and outputs. Hence, the arg list and
+ # returned list is limited to `hidden_states`.
+ assert (len(self.cuda_graphs) > l_no) and (
+ self.current_microbatch < len(self.cuda_graphs[l_no])
+ )
+ hidden_states = self.cuda_graphs[l_no][self.current_microbatch](
+ hidden_states, is_first_microbatch=(self.current_microbatch == 0)
+ )
+
+ if (
+ torch.is_grad_enabled()
+ and self.config.cpu_offloading
+ and self.group_prefetch_offload_commit_async is not None
+ ):
+ hidden_states = self.group_prefetch_offload_commit_async(hidden_states)
+
+ # Final layer norm.
+ if self.final_layernorm is not None:
+ hidden_states = self.final_layernorm(hidden_states)
+ # TENorm produces a "viewed" tensor. This will result in schedule.py's
+ # deallocate_output_tensor() throwing an error, so a viewless tensor is
+ # created to prevent this.
+ hidden_states = make_viewless_tensor(inp=hidden_states, requires_grad=True, keep_graph=True)
+
+ if return_intermediate is not None:
+ return hidden_states, torch.stack(intermediate_hidden_states, dim=-1)
+
+ return hidden_states
+
+
+class ColumnParallelConv2dPatch(MegatronModule):
+ """
+ Conv2D Patching layer with model parallelism. Applies convolution in a column-parallel fashion.
+
+ Args:
+ config (TransformerConfig): Configuration object for the layer.
+ in_channels (int): Number of input channels.
+ out_channels (int): Number of output channels.
+ kernel_size (Union[int, Tuple[int, int]]): Size of the convolution kernel.
+ stride (Union[int, Tuple[int, int]]): Stride of the convolution.
+ bias (Optional[bool], default=False): Whether to include a bias term.
+
+ Input:
+ torch.Tensor: Input tensor of shape (batch_size, in_channels, width, height).
+
+ Output:
+ torch.Tensor: Output tensor of shape (batch_size, num_tokens, out_channels).
+ """
+
+ def __init__(
+ self,
+ config: TransformerConfig,
+ in_channels: int,
+ out_channels: int,
+ kernel_size: Union[int, Tuple[int, int]],
+ stride: Union[int, Tuple[int, int]],
+ bias: Optional[bool] = False,
+ ) -> None:
+ super().__init__(config=config)
+ if isinstance(kernel_size, int):
+ kernel_size = (kernel_size, kernel_size)
+ self._unfold = torch.nn.Unfold(kernel_size=kernel_size, stride=stride)
+ self._linear = TEColumnParallelLinear(
+ in_channels * kernel_size[0] * kernel_size[1],
+ out_channels,
+ bias=bias,
+ config=self.config,
+ init_method=self.config.init_method,
+ gather_output=False,
+ skip_bias_add=False,
+ is_expert=False,
+ tp_comm_buffer_name='conv1',
+ )
+
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
+ """Forward."""
+ x = self._unfold(x)
+ x = x.permute(0, 2, 1)
+ x = F.linear(x, self._linear.weight)
+ x = tensor_parallel.gather_from_tensor_model_parallel_region(x)
+ return x
+
+
+class PrecomputedTilePositionEmbedding(torch.nn.Module):
+ """
+ Module to compute positional embeddings for tiles with optional gating.
+
+ Args:
+ config (TransformerConfig): Configuration object.
+ gated (bool, default=False): Whether to apply gating to the embeddings.
+ """
+
+ def __init__(
+ self,
+ config: TransformerConfig,
+ gated: bool = False,
+ ):
+ super().__init__()
+ self.max_num_tiles = config.max_num_tiles
+ self.hidden_size = config.hidden_size
+ self.max_aspect_ratio_id = config.max_aspect_ratio_id
+
+ self.embedding = nn.Embedding(self.max_aspect_ratio_id + 1, self.max_num_tiles * self.hidden_size)
+ self.gated = gated
+ if gated:
+ self.gate = nn.Parameter(torch.zeros(1))
+
+ def forward(self, hidden_states: torch.Tensor, aspect_ratio_ids: torch.Tensor) -> torch.Tensor:
+ """Forward."""
+ embeddings = self.embedding(aspect_ratio_ids)
+ embeddings = embeddings.reshape(-1, self.max_num_tiles, 1, self.hidden_size)
+
+ if self.gated:
+ embeddings = embeddings * self.gate.tanh()
+
+ hidden_states = hidden_states + embeddings
+ return hidden_states
+
+
+class SelfAttentionNoBias(SelfAttention):
+ """
+ Self-attention layer implementation without bias.
+
+ Args:
+ config (TransformerConfig): Configuration for the transformer.
+ submodules (SelfAttentionSubmodules): Submodules required for self-attention.
+ layer_number (int): The layer number in the transformer stack.
+ attn_mask_type (AttnMaskType): Type of attention mask to apply.
+ """
+
+ def __init__(
+ self,
+ config: TransformerConfig,
+ submodules: SelfAttentionSubmodules,
+ layer_number: int,
+ attn_mask_type=AttnMaskType.padding,
+ ):
+ super().__init__(
+ config=config,
+ submodules=submodules,
+ layer_number=layer_number,
+ attn_mask_type=attn_mask_type,
+ )
+
+ # Override to remove bias since we don't have a good config for this.
+ self.linear_qkv = build_module(
+ submodules.linear_qkv,
+ self.config.hidden_size,
+ self.query_projection_size + 2 * self.kv_projection_size,
+ config=self.config,
+ init_method=self.config.init_method,
+ gather_output=False,
+ bias=False,
+ skip_bias_add=False,
+ is_expert=False,
+ tp_comm_buffer_name='qkv',
+ )
+
+ self.linear_proj = build_module(
+ submodules.linear_proj,
+ self.query_projection_size,
+ self.config.hidden_size,
+ config=self.config,
+ init_method=self.config.output_layer_init_method,
+ bias=False,
+ input_is_parallel=True,
+ skip_bias_add=True,
+ is_expert=False,
+ tp_comm_buffer_name='proj',
+ )
+
+
+class ImageTransformerLayer(TransformerLayer):
+ """
+ Transformer layer adapted for processing image data with optional gating.
+
+ Args:
+ config (TransformerConfig): Transformer configuration object.
+ submodules (TransformerLayerSubmodules): Submodules to use in the layer.
+ layer_number (int, default=1): Layer number in the transformer.
+ hidden_dropout (float, optional): Dropout rate for hidden layers.
+ """
+
+ def __init__(
+ self,
+ config: TransformerConfig,
+ submodules: TransformerLayerSubmodules,
+ layer_number: int = 1,
+ hidden_dropout: float = None,
+ ):
+ super().__init__(
+ config=config,
+ submodules=submodules,
+ layer_number=layer_number,
+ hidden_dropout=hidden_dropout,
+ )
+ self.gated = self.config.gated
+ if self.gated:
+ self.gate_attn = nn.Parameter(torch.zeros(1, dtype=self.config.params_dtype))
+ self.gate_ffn = nn.Parameter(torch.zeros(1, dtype=self.config.params_dtype))
+
+ def forward(
+ self,
+ hidden_states,
+ attention_mask=None,
+ context=None,
+ context_mask=None,
+ rotary_pos_emb=None,
+ rotary_pos_cos=None,
+ rotary_pos_sin=None,
+ attention_bias=None,
+ inference_params=None,
+ packed_seq_params=None,
+ ):
+ """Forward."""
+ # hidden_states: [s, b, h]
+
+ # Residual connection.
+ residual = hidden_states
+
+ # Optional Input Layer norm
+ input_layernorm_output = self.input_layernorm(hidden_states)
+
+ # Self attention.
+ attention_output_with_bias = self.self_attention(
+ input_layernorm_output,
+ attention_mask=attention_mask,
+ inference_params=inference_params,
+ rotary_pos_emb=rotary_pos_emb,
+ packed_seq_params=packed_seq_params,
+ )
+
+ _gate_attn = 1 if not self.gated else self.gate_attn.tanh()
+ assert isinstance(
+ attention_output_with_bias, tuple
+ ), "`attention_output_with_bias` needs to be tuple for gating."
+ attention_output_with_bias = tuple(
+ _gate_attn * output if output is not None else None for output in attention_output_with_bias
+ )
+
+ with self.bias_dropout_add_exec_handler():
+ hidden_states = self.self_attn_bda(self.training, self.config.bias_dropout_fusion)(
+ attention_output_with_bias, residual, self.hidden_dropout
+ )
+
+ # Residual connection.
+ residual = hidden_states
+
+ # Optional Layer norm post the cross-attention.
+ pre_mlp_layernorm_output = self.pre_mlp_layernorm(hidden_states)
+
+ # MLP.
+ mlp_output_with_bias = self.mlp(pre_mlp_layernorm_output)
+
+ _gate_ffn = 1 if not self.gated else self.gate_ffn.tanh()
+ assert isinstance(mlp_output_with_bias, tuple), "`mlp_output_with_bias` needs to be tuple for gating."
+ mlp_output_with_bias = tuple(
+ _gate_ffn * output if output is not None else None for output in mlp_output_with_bias
+ )
+
+ with self.bias_dropout_add_exec_handler():
+ hidden_states = self.mlp_bda(self.training, self.config.bias_dropout_fusion)(
+ mlp_output_with_bias, residual, self.hidden_dropout
+ )
+
+ output = make_viewless_tensor(inp=hidden_states, requires_grad=hidden_states.requires_grad, keep_graph=True)
+
+ # CUDA graph requires returned values to be Tensors
+ if self.config.external_cuda_graph and self.training:
+ return output
+ return output, context
+
+
+class VisionEncoder(MegatronModule):
+ """
+ Vision encoder module for processing image inputs with patch-based embeddings.
+
+ Args:
+ config ('CrossAttentionVisionConfig'): Configuration object for the encoder.
+ image_size (int, default=560): Input image size.
+ patch_size (int, default=14): Size of patches extracted from the image.
+ in_channels (int, default=3): Number of input channels.
+ pre_process (bool, default=True): Whether to preprocess input.
+ post_process (bool, default=True): Whether to postprocess output.
+ return_intermediate (Optional[bool]): Whether to return intermediate layers.
+ """
+
+ def __init__(
+ self,
+ config: 'CrossAttentionVisionConfig',
+ image_size: int = 560,
+ patch_size: int = 14,
+ in_channels: int = 3,
+ pre_process: bool = True,
+ post_process: bool = True,
+ return_intermediate=None,
+ ):
+ super().__init__(config=config)
+ self.return_intermediate = return_intermediate
+ self.image_size = to_2tuple(image_size)
+ self.patch_size = to_2tuple(patch_size)
+ self.grid_size = (
+ self.image_size[0] // self.patch_size[0],
+ self.image_size[1] // self.patch_size[1],
+ )
+ self.pre_process = pre_process
+ self.post_process = post_process
+
+ self.max_aspect_ratio_id = self.config.max_aspect_ratio_id
+ self.max_num_tiles = config.max_num_tiles
+ width = config.hidden_size
+ self.conv1 = ColumnParallelConv2dPatch(
+ config=config,
+ in_channels=in_channels,
+ out_channels=width,
+ kernel_size=patch_size,
+ stride=patch_size,
+ bias=False,
+ )
+ scale = width**-0.5
+ self.class_embedding = nn.Parameter(scale * torch.randn(width))
+ self.positional_embedding = nn.Parameter(scale * torch.randn(self.grid_size[0] * self.grid_size[1] + 1, width))
+ self.ln_post = LayerNormImpl(config=config, hidden_size=width)
+ self.ln_pre = LayerNormImpl(config=config, hidden_size=width)
+ self.transformer = TransformerBlock(
+ config=self.config,
+ spec=get_image_transformer_layer_spec(),
+ post_layer_norm=False,
+ pre_process=self.pre_process,
+ post_process=self.post_process,
+ )
+ self.transformer.forward = types.MethodType(forward_with_return_intermediate, self.transformer)
+ # pre and post tile position embedding
+ global_config = copy.deepcopy(self.config)
+ global_config.num_layers = self.config.num_global_layers
+ global_config.gated = True
+ self.global_transformer = TransformerBlock(
+ config=global_config,
+ spec=get_image_transformer_layer_spec(),
+ post_layer_norm=False,
+ pre_process=self.pre_process,
+ post_process=self.post_process,
+ )
+ # pre and post tile position embedding
+ self.pre_tile_pos_embed = PrecomputedTilePositionEmbedding(
+ config=config,
+ gated=True,
+ )
+ self.post_tile_pos_embed = PrecomputedTilePositionEmbedding(
+ config=config,
+ gated=True,
+ )
+ self.gated_tile_positional_embedding = nn.Embedding(
+ self.max_aspect_ratio_id + 1, self.max_num_tiles * (self.grid_size[0] * self.grid_size[1] + 1) * width
+ )
+ self.gated_positional_embedding_gate = nn.Parameter(torch.zeros(1))
+
+ def apply_positional_embedding(self, x, aspect_ratio_ids):
+ """Apply regular position embedding and tile positonal embedding."""
+ bsz, num_chunks, num_tokens, dim = x.shape
+ x = x.view(bsz * num_chunks, num_tokens, dim)
+ x = x + self.positional_embedding * (1 - self.gated_positional_embedding_gate.tanh())
+ x = x.view(bsz, num_chunks, num_tokens, dim)
+ tile_position_embedding = self.gated_tile_positional_embedding(aspect_ratio_ids)
+ tile_position_embedding = tile_position_embedding.reshape(bsz, num_chunks, num_tokens, dim)
+ x = x + self.gated_positional_embedding_gate.tanh() * tile_position_embedding
+ return x
+
+ def apply_class_embedding(self, x):
+ """Concat class embedding tokens."""
+ x = torch.cat(
+ [
+ self.class_embedding.to(x.dtype)
+ + torch.zeros(x.shape[0], 1, x.shape[-1], dtype=x.dtype, device=x.device),
+ x,
+ ],
+ dim=1,
+ ) # shape = [*, grid ** 2 + 1, width]
+ return x
+
+ def forward(self, images: torch.Tensor, ar_ids: torch.Tensor) -> torch.Tensor:
+ """Forward."""
+ if images.ndim == 5:
+ num_concurrent_media = 1
+ bsz, num_chunks, nch, w, h = images.shape
+ else:
+ bsz, num_concurrent_media, num_chunks, nch, w, h = images.shape
+
+ images = images.reshape(bsz * num_concurrent_media * num_chunks, nch, w, h)
+ ar_ids = ar_ids.reshape(bsz * num_concurrent_media, 1)
+
+ # patch embedding
+ x = images.reshape(bsz * num_concurrent_media * num_chunks, nch, w, h)
+ x = self.conv1(x) # shape = [*, width, grid ** 2]
+ _, ntok, dim = x.shape
+ x = x.reshape(bsz * num_concurrent_media, num_chunks, ntok, dim)
+
+ # tile embeddings
+ x = self.pre_tile_pos_embed(x, ar_ids)
+ x = x.reshape(bsz * num_concurrent_media * num_chunks, ntok, dim)
+
+ # apply cls token
+ x = self.apply_class_embedding(x)
+ ntok += 1
+
+ # apply position embeddings
+ x = x.reshape(bsz * num_concurrent_media, num_chunks, ntok, dim)
+ x = self.apply_positional_embedding(x, ar_ids)
+
+ x = self.ln_pre(x)
+ x = x.view(bsz * num_concurrent_media, -1, dim)
+
+ npad, attn_mask = 0, None
+ attn_mask = build_encoder_attention_mask(x, ar_ids, ntok, num_chunks, self.config.supported_aspect_ratios)
+ x = x.transpose(0, 1).contiguous()
+ x, int_x = self.transformer(
+ hidden_states=x,
+ attention_mask=attn_mask,
+ return_intermediate=self.return_intermediate,
+ )
+
+ # [ntok * num_concurrent_media * num_chunks, bsz, hidden_size]
+ # -> [bsz, ntok * num_concurrent_media * num_chunks, hidden_size]
+ x, int_x = x.transpose(0, 1).contiguous(), int_x.transpose(0, 1).contiguous()
+ x = self.ln_post(x)
+ x = x.reshape(bsz * num_concurrent_media, num_chunks, ntok + npad, dim)
+ x = self.post_tile_pos_embed(x, ar_ids)
+ x = x.reshape(bsz * num_concurrent_media, num_chunks * (ntok + npad), dim)
+ x = x.transpose(0, 1).contiguous()
+ x = self.global_transformer(
+ hidden_states=x,
+ attention_mask=None,
+ )
+ x = x.transpose(0, 1)
+ x = x.reshape(bsz * num_concurrent_media, num_chunks, ntok + npad, dim)
+
+ # adding back intermediate layer outputs
+ x = x.reshape(bsz, num_concurrent_media, num_chunks, ntok, dim)
+ int_x = int_x.reshape(bsz * num_concurrent_media, num_chunks, ntok + npad, -1)
+ # int_x = contract_num_tokens_from_mult8(int_x, npad)
+ int_x = int_x.reshape(bsz, num_concurrent_media, num_chunks, ntok, -1)
+ x = torch.cat([x, int_x], dim=-1)
+ return x
diff --git a/nemo/collections/vlm/neva/data/__init__.py b/nemo/collections/vlm/neva/data/__init__.py
index bbd502e21c80..df9716fe5610 100644
--- a/nemo/collections/vlm/neva/data/__init__.py
+++ b/nemo/collections/vlm/neva/data/__init__.py
@@ -14,16 +14,18 @@
from nemo.collections.vlm.neva.data.config import DataConfig, ImageDataConfig, VideoDataConfig
from nemo.collections.vlm.neva.data.lazy import NevaLazyDataModule
-from nemo.collections.vlm.neva.data.mock import MockDataModule
+from nemo.collections.vlm.neva.data.llava_next_energon import LlavaNextTaskEncoder
+from nemo.collections.vlm.neva.data.mock import MockDataModule as NevaMockDataModule
from nemo.collections.vlm.neva.data.multimodal_tokens import ImageToken, MultiModalToken, VideoToken
__all__ = [
"NevaLazyDataModule",
- "MockDataModule",
+ "NevaMockDataModule",
"DataConfig",
"ImageDataConfig",
"VideoDataConfig",
"MultiModalToken",
"ImageToken",
"VideoToken",
+ "LlavaNextTaskEncoder",
]
diff --git a/nemo/collections/vlm/neva/data/api.py b/nemo/collections/vlm/neva/data/api.py
index c2e51e033d8a..15ba45c82fd9 100644
--- a/nemo/collections/vlm/neva/data/api.py
+++ b/nemo/collections/vlm/neva/data/api.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.collections.vlm.neva.data.lazy import NevaLazyDataModule
from nemo.collections.vlm.neva.data.mock import MockDataModule
diff --git a/nemo/collections/vlm/neva/data/conversation.py b/nemo/collections/vlm/neva/data/conversation.py
index 22c435cb1fd2..58953dc53b7a 100644
--- a/nemo/collections/vlm/neva/data/conversation.py
+++ b/nemo/collections/vlm/neva/data/conversation.py
@@ -34,6 +34,7 @@ class SeparatorStyle(Enum):
CHATML = auto()
LLAMA_2 = auto()
LLAMA_3 = auto()
+ MLLAMA = auto()
MISTRAL = auto()
NVGPT = auto()
QWEN = auto()
@@ -76,7 +77,6 @@ def process_chat_template(self, tokenizer_name_or_path, messages):
def get_prompt(self):
messages = self.messages
- messages = self.process_prompt_with_images(messages)
if self.sep_style == SeparatorStyle.SINGLE:
ret = self.system + self.sep
@@ -99,6 +99,8 @@ def get_prompt(self):
if type(message) is tuple:
message, _, _ = message
ret += role + ": " + message + seps[i % 2]
+ # Add space to make sure the labels can be correctly generated.
+ self.messages[i][1] = " " + self.messages[i][1]
else:
ret += role + ":"
@@ -153,6 +155,10 @@ def get_prompt(self):
tokenizer_name_or_path = self.tokenizer_name_or_path or "meta-llama/Meta-Llama-3-8B-Instruct"
ret = self.process_chat_template(tokenizer_name_or_path, messages)
+ elif self.sep_style == SeparatorStyle.MLLAMA:
+ tokenizer_name_or_path = self.tokenizer_name_or_path or "meta-llama/Llama-3.2-11B-Vision-Instruct"
+ ret = self.process_chat_template(tokenizer_name_or_path, messages)
+
elif self.sep_style == SeparatorStyle.NVGPT:
ret = self.sep2 + self.system + self.sep
for role, message in messages:
@@ -458,6 +464,18 @@ def dict(self):
stop_str="<|eot_id|>",
)
+conv_mllama = Conversation(
+ system="",
+ roles=("user", "assistant"),
+ version="llama_v3_2",
+ messages=[],
+ offset=0,
+ sep="<|eot_id|>",
+ sep_style=SeparatorStyle.MLLAMA,
+ tokenizer_name_or_path="meta-llama/Llama-3.2-11B-Vision-Instruct",
+ stop_str="<|eot_id|>",
+)
+
conv_mistral_instruct = Conversation(
system="",
roles=("USER", "ASSISTANT"),
@@ -648,6 +666,7 @@ def dict(self):
"v1": conv_vicuna_v1,
"vicuna_v1": conv_vicuna_v1,
"llama_2": conv_llama_2,
+ "mllama": conv_mllama,
"mistral_instruct": conv_mistral_instruct,
"mistral_orca": conv_mistral_orca,
"mistral_zephyr": conv_mistral_zephyr,
diff --git a/nemo/collections/vlm/neva/data/lazy.py b/nemo/collections/vlm/neva/data/lazy.py
index ca1179e24033..5bc2cbe0458e 100644
--- a/nemo/collections/vlm/neva/data/lazy.py
+++ b/nemo/collections/vlm/neva/data/lazy.py
@@ -12,37 +12,29 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import TYPE_CHECKING, Optional
-
-import pytorch_lightning as pl
-from pytorch_lightning.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
-from torch.utils import data
-from torch.utils.data import DataLoader
-
-from nemo.collections.vlm.neva.data.config import DataConfig, ImageDataConfig
-from nemo.collections.vlm.neva.data.conversation import conv_templates as supported_conv_templates
-from nemo.lightning.pytorch.plugins import MegatronDataSampler
-
-if TYPE_CHECKING:
- pass
-
import json
import logging
import os
import re
import tarfile
-from typing import Any, Dict, List, Sequence
+from typing import Any, Dict, List, Optional, Sequence
import decord
+import lightning.pytorch as pl
import numpy as np
import torch
import torch.nn.functional as F
+from lightning.pytorch.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
from PIL import Image
-from torch.utils.data import Dataset, default_collate
+from torch.utils import data
+from torch.utils.data import DataLoader, Dataset, default_collate
from transformers import CLIPImageProcessor, SiglipImageProcessor
from nemo.collections.nlp.modules.common.megatron.utils import get_ltor_masks_and_position_ids
+from nemo.collections.vlm.neva.data.config import DataConfig, ImageDataConfig
+from nemo.collections.vlm.neva.data.conversation import conv_templates as supported_conv_templates
from nemo.collections.vlm.neva.data.multimodal_tokens import IGNORE_INDEX, SPECIAL_TOKEN_MAP
+from nemo.lightning.pytorch.plugins import MegatronDataSampler
class TarOrFolderImageLoader:
@@ -259,6 +251,7 @@ def __init__(
data_config,
tokenizer,
image_processor,
+ sequence_length=None,
):
super().__init__()
if data_path is not None:
@@ -270,7 +263,13 @@ def __init__(
logging.warning("Formatting inputs...Skip in lazy mode")
self.data_config = data_config
self.tokenizer = tokenizer
+ from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
+
+ if isinstance(self.tokenizer, AutoTokenizer):
+ self.tokenizer = self.tokenizer.tokenizer
+
self.image_processor = image_processor
+ self.sequence_length = sequence_length
self.conv_template = data_config.conv_template
self.conv = supported_conv_templates[self.conv_template]
@@ -323,8 +322,13 @@ def _apply_prompt_templates(self, source, use_plain=False):
roles = {"human": conv.roles[0], "gpt": conv.roles[1]}
source = source['conversations']
- if roles[source[0]["from"]] != conv.roles[0]:
- source = source[1:]
+
+ def _fix_roles(roles):
+ if len(source) < 2:
+ return roles
+ return {source[0]["from"]: conv.roles[0], source[1]["from"]: conv.roles[1]}
+
+ roles = _fix_roles(roles)
conv.messages = []
for j, sentence in enumerate(source):
@@ -354,6 +358,7 @@ def _tokenize_and_label(self, conversations):
return_tensors="pt",
)[0]
answer_start, answer_end = find_pattern_indices(tokens, answer_tokens, search_start_index)
+ assert answer_start > 0, "Not found valid answer in conversation."
labels[answer_start:answer_end] = tokens[answer_start:answer_end]
search_start_index = answer_end
tokens = tokens[:-1]
@@ -492,6 +497,7 @@ def __init__(
weights: Optional[List[float]] = None,
data_config: Optional[DataConfig] = ImageDataConfig,
seq_length: int = 2048,
+ decoder_seq_length: Optional[int] = None,
tokenizer: Optional = None,
image_processor: Optional = None,
micro_batch_size: int = 4,
@@ -518,6 +524,7 @@ def __init__(
self.weights = weights
self.data_config = data_config
self.seq_length = seq_length
+ self.decoder_seq_length = decoder_seq_length
self.tokenizer = tokenizer
self.image_processor = image_processor
self.num_train_samples = num_train_samples
@@ -533,13 +540,15 @@ def __init__(
if tokenizer is None or image_processor is None:
logging.warning(f"Processor and tokenizer are not provided! Fall back to `llava-hf/llava-1.5-7b-hf`.")
from transformers import AutoProcessor
+ from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
processor = AutoProcessor.from_pretrained("llava-hf/llava-1.5-7b-hf")
- self.tokenizer = tokenizer or processor.tokenizer
+ self.tokenizer = tokenizer or AutoTokenizer("llava-hf/llava-1.5-7b-hf")
self.image_processor = image_processor or processor.image_processor
self.data_sampler = MegatronDataSampler(
seq_len=self.seq_length,
+ decoder_seq_len=self.decoder_seq_length,
micro_batch_size=micro_batch_size,
global_batch_size=global_batch_size,
dataloader_type="cyclic",
diff --git a/nemo/collections/vlm/neva/data/llava_next_energon.py b/nemo/collections/vlm/neva/data/llava_next_energon.py
new file mode 100644
index 000000000000..c45ee50e5be3
--- /dev/null
+++ b/nemo/collections/vlm/neva/data/llava_next_energon.py
@@ -0,0 +1,161 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from dataclasses import dataclass, field
+from typing import Dict, List
+
+import torch
+from megatron.energon import VQASample, batch_list, batch_pad_stack
+from torch.nn.utils.rnn import pad_sequence
+
+from nemo.collections.multimodal.data.energon.config import ImageTextRawBatch, ImageTextSample, MultiModalSampleConfig
+from nemo.collections.multimodal.data.energon.sample_encoder import SampleEncoder, VQASampleEncoder
+from nemo.collections.multimodal.data.energon.task_encoder import MultiModalTaskEncoder
+from nemo.utils import logging
+
+
+class LlavaNextTextSample(ImageTextSample):
+ num_media_tiles: int = 0
+
+
+@dataclass
+class LlavaNextTextRawBatch(ImageTextRawBatch):
+ num_media_tiles: List[int] = field(default_factory=list)
+
+
+class LlavaNextSampleEncoder(VQASampleEncoder):
+ def __init__(self, tokenizer, image_processor, multimodal_sample_config=MultiModalSampleConfig()):
+ """
+ Initialize the LlavaNextSampleEncoder, inherited from VQASampleEncoder for multimodal samples
+ focused on VQA-style data to support LLaVANeXT
+
+ Parameters:
+ tokenizer (Tokenizer): The HF tokenizer used for processing text.
+ image_processor (ImageProcessor): The HF image processor used for preprocessing images.
+ multimodal_sample_config (MultiModalSampleConfig, optional): Configuration object for multimodal samples.
+ Defaults to MultiModalSampleConfig().
+ """
+ super().__init__(tokenizer, image_processor, multimodal_sample_config)
+
+ def process_image(self, image):
+ """
+ Process and prepare an image sample for encoding.
+
+ This method preprocesses the image using the HF image_processor, converting it to
+ a tensor.
+
+ Parameters:
+ image: The input image to be processed.
+
+ Returns:
+ torch.Tensor: The processed image tensor.
+ """
+ image_array = self.image_processor.preprocess(image, return_tensors='pt', do_rescale=False)['pixel_values'][0]
+ return image_array
+
+ def encode(self, input_sample: VQASample, output_sample: LlavaNextTextSample):
+ """
+ Encode a single sample into a format suitable for model input.
+
+ This method prepares the conversation prompt, tokenizes it, and processes
+ the associated image. It fills the output sample with tokens, labels, loss mask,
+ and other required fields for multimodal processing.
+
+ Parameters:
+ input_sample (VQASample): The input VQA sample containing an image and conversation text.
+ output_sample (LlavaNextTextSample): The output sample structure where encoded results are stored.
+
+ Returns:
+ LlavaNextTextSample: The encoded output sample, containing processed tokens, labels,
+ images, loss masks, and metadata.
+ """
+ conversation_prompt = self.apply_prompt_template(input_sample)
+ logging.debug(f"task encoder encode_sample conversation_prompt {conversation_prompt}")
+ # tokenize prompt
+ tokens = self.tokenize(conversation_prompt)
+ labels = self.compute_labels(tokens, input_sample)
+
+ tokens = tokens[:-1].contiguous()
+ labels = labels[1:].contiguous()
+ logging.debug(f"task encoder encode_sample after tokenize prompt tokens {tokens}")
+ logging.debug(f"task encoder encode_sample lables {labels}")
+ loss_mask = self.compute_loss_mask(labels)
+ processed_image = self.process_image(input_sample.image)
+ output_sample.__key__ = input_sample.__key__
+ output_sample.images = processed_image
+ output_sample.tokens = tokens
+ output_sample.labels = labels
+ output_sample.loss_mask = loss_mask
+ output_sample.num_media_tiles = processed_image.shape[0]
+ return output_sample
+
+
+class LlavaNextTaskEncoder(MultiModalTaskEncoder):
+ def __init__(self, tokenizer, image_processor, multimodal_sample_config):
+ """
+ Initialize the LlavaNextTaskEncoder.
+
+ This encoder extends MultiModalTaskEncoder to specifically handle LlavaNeXT,
+ overriding encoders for VQA sample type.
+
+ Parameters:
+ tokenizer (Tokenizer): The tokenizer for processing text data across sample types.
+ image_processor (ImageProcessor): The image processor for preprocessing images.
+ multimodal_sample_config (MultiModalSampleConfig): Configuration settings for multimodal samples.
+ """
+ super().__init__(tokenizer, image_processor, multimodal_sample_config)
+ self.encoders: Dict[str, SampleEncoder] = {
+ VQASample.__name__: LlavaNextSampleEncoder(tokenizer, image_processor, multimodal_sample_config)
+ }
+
+ def batch(self, samples: List[LlavaNextTextSample]) -> LlavaNextTextRawBatch:
+ """
+ Batch multiple encoded samples into a single batch structure for model input.
+
+ This method combines individual sample fields (keys, images, tokens, labels, etc.) and
+ pads or stacks them as needed to create a unified batch.
+
+ Parameters:
+ samples (List[LlavaNextTextSample]): A list of LlavaNextTextSample instances to be batched.
+
+ Returns:
+ LlavaNextTextRawBatch: A batch containing all input samples' images, tokens, labels,
+ loss masks, and other metadata prepared for model processing.
+ """
+ keys, images, tokens, labels, loss_mask, num_media_tiles = [], [], [], [], [], []
+ for sample in samples:
+ keys.append(sample.__key__)
+ images.append(sample.images)
+ tokens.append(sample.tokens)
+ labels.append(sample.labels)
+ loss_mask.append(sample.loss_mask)
+ num_media_tiles.append(sample.num_media_tiles)
+
+ batch_keys = batch_list(keys)
+
+ batch_images = torch.cat(images, dim=0)
+
+ batch_tokens = pad_sequence(tokens, batch_first=True)
+ batch_labels = pad_sequence(labels, batch_first=True)
+
+ batch_loss_mask = batch_pad_stack(loss_mask)
+ batch_num_media_tiles = torch.tensor(batch_list(num_media_tiles), dtype=torch.int)
+ return LlavaNextTextRawBatch(
+ __keys__=batch_keys,
+ images=batch_images,
+ tokens=batch_tokens,
+ labels=batch_labels,
+ loss_mask=batch_loss_mask,
+ num_media_tiles=batch_num_media_tiles,
+ )
diff --git a/nemo/collections/vlm/neva/data/mock.py b/nemo/collections/vlm/neva/data/mock.py
index ac4bc56a068c..9e2308752641 100644
--- a/nemo/collections/vlm/neva/data/mock.py
+++ b/nemo/collections/vlm/neva/data/mock.py
@@ -14,35 +14,38 @@
from typing import Dict, List, Optional
+import lightning.pytorch as pl
import numpy as np
-import pytorch_lightning as pl
import torch
-from pytorch_lightning.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
+from lightning.pytorch.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
from torch.utils import data
from torch.utils.data import DataLoader, Dataset
from nemo.collections.vlm.neva.data.multimodal_tokens import IMAGE_TOKEN_INDEX
from nemo.lightning.pytorch.plugins import MegatronDataSampler
+from nemo.utils import logging
class MockDataModule(pl.LightningDataModule):
def __init__(
self,
seq_length: int = 2048,
+ decoder_seq_length: Optional[int] = None,
tokenizer: Optional = None,
image_processor: Optional = None,
micro_batch_size: int = 4,
global_batch_size: int = 8,
rampup_batch_size: Optional[List[int]] = None,
- num_train_samples: int = 10_000,
- num_val_samples: int = 10_000,
- num_test_samples: int = 10_000,
+ num_train_samples: int = 10_000_000,
+ num_val_samples: int = 10_000_000,
+ num_test_samples: int = 10_000_000,
num_workers: int = 8,
pin_memory: bool = True,
persistent_workers: bool = False,
):
super().__init__()
self.seq_length = seq_length
+ self.decoder_seq_len = decoder_seq_length
self.num_train_samples = num_train_samples
self.num_val_samples = num_val_samples
self.num_test_samples = num_test_samples
@@ -51,13 +54,16 @@ def __init__(
self.persistent_workers = persistent_workers
if tokenizer is None or image_processor is None:
+ logging.warning(f"Processor or tokenizer are not provided! Fall back to `llava-hf/llava-1.5-7b-hf`.")
from transformers import AutoProcessor
+ from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
processor = AutoProcessor.from_pretrained("llava-hf/llava-1.5-7b-hf")
- self.tokenizer = tokenizer or processor.tokenizer
+ self.tokenizer = tokenizer or AutoTokenizer("llava-hf/llava-1.5-7b-hf")
self.image_processor = image_processor or processor.image_processor
self.data_sampler = MegatronDataSampler(
seq_len=self.seq_length,
+ decoder_seq_len=self.decoder_seq_len,
micro_batch_size=micro_batch_size,
global_batch_size=global_batch_size,
rampup_batch_size=rampup_batch_size,
diff --git a/nemo/collections/vlm/neva/model/__init__.py b/nemo/collections/vlm/neva/model/__init__.py
index 25842186ecfe..99862f97b9ed 100644
--- a/nemo/collections/vlm/neva/model/__init__.py
+++ b/nemo/collections/vlm/neva/model/__init__.py
@@ -19,16 +19,19 @@
NevaConfig,
NevaModel,
)
-from nemo.collections.vlm.neva.model.llava import Llava1_5Config7B, Llava1_5Config13B, LlavaConfig, LlavaModel
+from nemo.collections.vlm.neva.model.llava import Llava15Config7B, Llava15Config13B, LlavaConfig, LlavaModel
+from nemo.collections.vlm.neva.model.vit_config import CLIPViTL_14_336_Config, SigLIPViT400M_14_384_Config
__all__ = [
"CLIPViTConfig",
+ "CLIPViTL_14_336_Config",
+ "SigLIPViT400M_14_384_Config",
"HFCLIPVisionConfig",
"MultimodalProjectorConfig",
"NevaConfig",
"NevaModel",
"LlavaConfig",
- "Llava1_5Config7B",
- "Llava1_5Config13B",
+ "Llava15Config7B",
+ "Llava15Config13B",
"LlavaModel",
]
diff --git a/nemo/collections/vlm/neva/model/api.py b/nemo/collections/vlm/neva/model/api.py
index 62374d536712..13444632464e 100644
--- a/nemo/collections/vlm/neva/model/api.py
+++ b/nemo/collections/vlm/neva/model/api.py
@@ -12,20 +12,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
-from nemo.collections.vlm.neva.model import Llava1_5Config7B, Llava1_5Config13B, LlavaModel
+from nemo.collections.vlm.neva.model import Llava15Config7B, Llava15Config13B, LlavaModel
-def llava1_5_7b() -> pl.LightningModule:
- return LlavaModel(Llava1_5Config7B())
+def llava15_7b() -> pl.LightningModule:
+ return LlavaModel(Llava15Config7B())
-def llava1_5_13b() -> pl.LightningModule:
- return LlavaModel(Llava1_5Config13B())
+def llava15_13b() -> pl.LightningModule:
+ return LlavaModel(Llava15Config13B())
__all__ = [
- "llava1_5_7b",
- "llava1_5_13b",
+ "llava15_7b",
+ "llava15_13b",
]
diff --git a/nemo/collections/vlm/neva/model/base.py b/nemo/collections/vlm/neva/model/base.py
index 7d0c53b79321..360874152cf7 100644
--- a/nemo/collections/vlm/neva/model/base.py
+++ b/nemo/collections/vlm/neva/model/base.py
@@ -17,22 +17,25 @@
from dataclasses import dataclass
from typing import Callable, Dict, List, Optional, Union
-import pytorch_lightning as L
+import lightning.pytorch as L
import torch
import torch.distributed
import torch.nn.functional as F
from megatron.core import dist_checkpointing
+from megatron.core import parallel_state as ps
+from megatron.core.enums import ModelType
+from megatron.core.extensions.transformer_engine import TEDotProductAttention
from megatron.core.inference_params import InferenceParams
from megatron.core.models.multimodal.llava_model import LLaVAModel as MCoreLLaVAModel
from megatron.core.models.vision.clip_vit_model import CLIPViTModel as MCoreCLIPViTModel
from megatron.core.models.vision.multimodal_projector import MultimodalProjector as MCoreMultimodalProjector
from megatron.core.optimizer import OptimizerConfig
+from megatron.core.tensor_parallel import gather_from_sequence_parallel_region
from megatron.core.transformer.custom_layers.transformer_engine import (
TEColumnParallelLinear,
TENorm,
TERowParallelLinear,
)
-from megatron.core.transformer.enums import ModelType
from megatron.core.transformer.mlp import MLP, MLPSubmodules
from megatron.core.transformer.spec_utils import ModuleSpec
from megatron.core.transformer.transformer_config import TransformerConfig
@@ -41,15 +44,43 @@
from nemo.collections.common.tokenizers.tokenizer_spec import TokenizerSpec
from nemo.collections.llm import fn
-from nemo.collections.llm.gpt.model import local_layer_spec, transformer_engine_layer_spec
+from nemo.collections.llm.gpt.model import transformer_engine_layer_spec
from nemo.collections.llm.gpt.model.base import get_batch_on_this_context_parallel_rank, get_packed_seq_params
-from nemo.collections.nlp.modules.common.megatron.module import MegatronModule
-from nemo.collections.vlm.neva.data.multimodal_tokens import IGNORE_INDEX, IMAGE_TOKEN_INDEX
+from nemo.collections.vlm.neva.data.multimodal_tokens import IMAGE_TOKEN_INDEX
from nemo.lightning import io
+from nemo.lightning.io.pl import ckpt_to_weights_subdir
from nemo.lightning.megatron_parallel import MaskedTokenLossReductionWithLossMask
from nemo.lightning.pytorch.optim import MegatronOptimizerModule, OptimizerModule
from nemo.utils import logging
+MODEL_CONFIG_ATTR = [
+ 'num_layers',
+ 'hidden_size',
+ 'num_attention_heads',
+ 'num_query_groups',
+ 'ffn_hidden_size',
+ 'kv_channels',
+ 'hidden_dropout',
+ 'attention_dropout',
+ 'fp32_residual_connection',
+ 'apply_residual_connection_post_layernorm',
+ 'layernorm_epsilon',
+ 'layernorm_zero_centered_gamma',
+ 'add_bias_linear',
+ 'add_qkv_bias',
+ 'gated_linear_unit',
+ 'activation_func',
+ 'activation_func_fp8_input_store',
+ 'num_moe_experts',
+ 'rotary_interleaved',
+ 'window_size',
+ 'normalization',
+ 'qk_layernorm',
+ 'test_mode',
+ 'calculate_per_token_loss',
+ 'seq_length',
+]
+
def get_image_sequence_length(img_h, img_w, patch_dim, add_class_token, class_token_len):
"""Get image sequence length given image size, patch size, and class token."""
@@ -64,9 +95,7 @@ def neva_data_step(dataloader_iter) -> Dict[str, torch.Tensor]:
# Based on: https://github.com/NVIDIA/Megatron-LM/blob/main/pretrain_gpt.py#L87
# https://github.com/NVIDIA/NeMo/blob/main/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py#L828-L842
-
batch = next(dataloader_iter)
-
_batch: dict
if isinstance(batch, tuple) and len(batch) == 3:
_batch = batch[0]
@@ -74,11 +103,23 @@ def neva_data_step(dataloader_iter) -> Dict[str, torch.Tensor]:
_batch = batch
required_keys = set()
- required_keys.add("attention_mask")
+ required_keys.update(
+ (
+ "tokens",
+ "attention_mask",
+ "media",
+ "num_media_tiles",
+ )
+ )
if parallel_state.is_pipeline_first_stage():
- required_keys.update(("media", "tokens", "position_ids"))
+ required_keys.update(("position_ids",))
if parallel_state.is_pipeline_last_stage():
- required_keys.update(("labels", "loss_mask"))
+ required_keys.update(
+ (
+ "labels",
+ "loss_mask",
+ )
+ )
_batch = {
key: val.cuda(non_blocking=True) if key in required_keys and val is not None else None
@@ -98,6 +139,7 @@ def neva_forward_step(model, batch) -> torch.Tensor:
"attention_mask": batch.get("attention_mask", None),
"loss_mask": batch.get("loss_mask", None),
"labels": batch.get("labels", None),
+ "num_media_tiles": batch.get("num_media_tiles", None),
}
if 'cu_seqlens' in batch:
@@ -139,6 +181,9 @@ def configure_model(self) -> "MCoreMultimodalProjector":
),
)
self.layer_spec = self.layer_spec.submodules
+ elif self.projector_type == "mcore_affine":
+ self.projector_type = "affine" # strip "mcore_" for mcore init
+ self.layer_spec = MLPSubmodules(linear_fc1=TEColumnParallelLinear, linear_fc2=None)
else:
raise NotImplementedError(f"Not supported projector type `{self.projector_type}`")
@@ -173,10 +218,11 @@ class HFCLIPVisionConfig(CLIPVisionConfig, io.IOMixin):
https://github.com/huggingface/transformers/blob/v4.44.0/src/transformers/models/clip/configuration_clip.py#L261
"""
+ hidden_size: int = 1024
pretrained_model_name_or_path: Optional[Union[str, os.PathLike]] = None
- def configure_hf_config(self, *args, **kwargs) -> None:
- CLIPVisionConfig.__init__(self, *args, **kwargs)
+ def __post_init__(self, *args, **kwargs) -> None:
+ CLIPVisionConfig.__init__(self, *args, **kwargs, hidden_size=self.hidden_size)
def configure_model(self) -> "CLIPVisionModel":
# Monkey patch the method to the vision encoder
@@ -195,26 +241,40 @@ def configure_model(self) -> "CLIPVisionModel":
@dataclass
class CLIPViTConfig(TransformerConfig, io.IOMixin):
ln_pre_impl: Union[ModuleSpec, type] = TENorm
+ ln_post_impl: Union[ModuleSpec, type] = TENorm
add_class_token: bool = True
class_token_len: int = 1
patch_dim: int = 14
img_h: int = 336
img_w: int = 336
+ vision_model_type: str = "clip" # ["clip", "siglip"]
transformer_layer_spec: ModuleSpec = transformer_engine_layer_spec
- def configure_model(self) -> "MCoreCLIPViTModel":
+ num_layers: int = 1 # Placeholder, NOT used!
+ num_attention_heads: int = 8 # Placeholder, NOT used!
+
+ def __post_init__(self):
+ if self.vision_model_type == "siglip":
+ self.add_class_token = False
+ self.class_token_len = 0
+
+ def configure_model(self) -> "CLIPViTModel":
transformer_layer_spec = self.transformer_layer_spec
if not isinstance(transformer_layer_spec, ModuleSpec):
- transformer_layer_spec = transformer_layer_spec(self)
- return MCoreCLIPViTModel(
+ from nemo.collections.vlm.layer_specs import get_layer_spec_te
+
+ transformer_layer_spec = get_layer_spec_te(is_vit=True)
+ return CLIPViTModel(
self,
transformer_layer_spec,
ln_pre_impl=self.ln_pre_impl,
+ ln_post_impl=self.ln_post_impl,
add_class_token=self.add_class_token,
class_token_len=self.class_token_len,
patch_dim=self.patch_dim,
img_h=self.img_h,
img_w=self.img_w,
+ model_subtype=self.vision_model_type,
)
@@ -223,283 +283,173 @@ class NevaConfig(TransformerConfig, io.IOMixin):
language_transformer_config: Optional[TransformerConfig] = None
vision_transformer_config: Optional[TransformerConfig] = None
vision_projection_config: Optional[TransformerConfig] = None
+
drop_vision_class_token: bool = True
+ vision_feature_layer: int = -2
+
+ encoder_pipeline_model_parallel_size: int = 0
+ encoder_tensor_model_parallel_size: int = 1
num_layers: int = 1 # Placeholder, NOT used!
num_attention_heads: int = 8 # Placeholder, NOT used!
- vision_feature_layer: int = -2
+
+ seq_length: int = 1024
language_model_from_pretrained: Optional[str] = None
vision_model_from_pretrained: Optional[str] = None # TODO
vision_projection_from_pretrained: Optional[str] = None # TODO
- freeze_language_model: bool = True
- freeze_vision_model: bool = True
+ freeze_language_model: bool = False
+ freeze_vision_model: bool = False
freeze_vision_projection: bool = False
forward_step_fn: Callable = neva_forward_step
data_step_fn: Callable = neva_data_step
- def configure_model(self, tokenizer) -> "MCoreLLaVAModel":
- language_model = self.language_transformer_config.configure_model(tokenizer=tokenizer)
- vision_model = self.vision_transformer_config.configure_model()
- vision_projection = self.vision_projection_config.configure_model()
-
- if self.language_model_from_pretrained is not None:
- sharded_state_dict = dict(state_dict=language_model.sharded_state_dict(prefix="module."))
- loaded_state_dict = dist_checkpointing.load(
- sharded_state_dict=sharded_state_dict, checkpoint_dir=self.language_model_from_pretrained
+ def __post_init__(self):
+ if self.language_transformer_config is not None:
+ for attr in MODEL_CONFIG_ATTR:
+ setattr(self, attr, getattr(self.language_transformer_config, attr))
+
+ def configure_model(self, tokenizer) -> "MCoreNevaModel":
+ from megatron.core import parallel_state as ps
+
+ self.language_transformer_config.tensor_model_parallel_size = self.tensor_model_parallel_size
+ self.language_transformer_config.sequence_parallel = self.sequence_parallel
+ self.vision_transformer_config.tensor_model_parallel_size = self.tensor_model_parallel_size
+ self.vision_projection_config.tensor_model_parallel_size = self.tensor_model_parallel_size
+ self.language_transformer_config.pipeline_model_parallel_size = self.pipeline_model_parallel_size
+
+ if self.encoder_pipeline_model_parallel_size > 0:
+ assert self.encoder_pipeline_model_parallel_size == 1, "ViT can only live on 1 pipeline stage."
+ self.vision_transformer_config.pipeline_model_parallel_size = self.encoder_pipeline_model_parallel_size
+ self.vision_projection_config.pipeline_model_parallel_size = self.encoder_pipeline_model_parallel_size
+ self.language_transformer_config.encoder_pipeline_model_parallel_size = (
+ self.encoder_pipeline_model_parallel_size
)
- loaded_state_dict = {k.removeprefix("module."): v for k, v in loaded_state_dict["state_dict"].items()}
- language_model.load_state_dict(loaded_state_dict)
- logging.info(f"Restored language model weights from {self.language_model_from_pretrained}")
+ if self.encoder_tensor_model_parallel_size > 0:
+ self.vision_transformer_config.tensor_model_parallel_size = self.encoder_tensor_model_parallel_size
+ self.vision_projection_config.tensor_model_parallel_size = self.encoder_tensor_model_parallel_size
+
model = MCoreNevaModel(
- transformer_config=self,
- language_model=language_model,
- vision_model=vision_model,
- vision_projection=vision_projection,
+ config=self,
+ tokenizer=tokenizer,
+ pre_process=ps.is_pipeline_first_stage()
+ or ps.get_pipeline_model_parallel_rank() == self.encoder_pipeline_model_parallel_size,
+ post_process=ps.is_pipeline_last_stage(),
+ add_encoder=ps.is_pipeline_first_stage(),
+ add_decoder=ps.is_pipeline_last_stage()
+ or ps.get_pipeline_model_parallel_rank() >= self.encoder_pipeline_model_parallel_size,
drop_vision_class_token=self.drop_vision_class_token,
)
- model.freeze(
- freeze_language_model=self.freeze_language_model,
- freeze_vision_model=self.freeze_vision_model,
- freeze_vision_projection=self.freeze_vision_projection,
- )
+
return model
+class CLIPViTModel(MCoreCLIPViTModel):
+ """CLIP ViT vision model."""
+
+ def forward(
+ self, x: torch.Tensor, attention_mask: Optional[torch.Tensor] = None, num_unused_layers: int = 0
+ ) -> torch.Tensor:
+ if num_unused_layers > 0:
+ unused_layers = self.decoder.layers[-num_unused_layers:]
+ self.decoder.layers = self.decoder.layers[:-num_unused_layers]
+ x = super().forward(x, attention_mask)
+ self.decoder.layers.append(unused_layers)
+ return x
+
+ return super().forward(x, attention_mask)
+
+
class MCoreNevaModel(MCoreLLaVAModel):
def __init__(
self,
- transformer_config: TransformerConfig,
- language_model: MegatronModule,
- vision_model: MegatronModule,
- vision_projection: MegatronModule,
+ config: NevaConfig,
+ tokenizer: Optional = None,
pre_process: bool = True,
post_process: bool = True,
+ add_encoder: bool = True,
+ add_decoder: bool = True,
drop_vision_class_token: bool = False,
) -> None:
- super(MCoreLLaVAModel, self).__init__(config=transformer_config)
+ super(MCoreLLaVAModel, self).__init__(config=config)
- logging.warning("LLaVA model is under development and may be missing features.")
+ language_transformer_config = config.language_transformer_config
+ vision_transformer_config = config.vision_transformer_config
+ vision_projection_config = config.vision_projection_config
self.pre_process = pre_process
self.post_process = post_process
+ self.add_encoder = add_encoder
+ self.add_decoder = add_decoder
self.encoder_hidden_state = None
- self.vision_model = vision_model
- self.vision_projection = vision_projection
- self.language_model = language_model
- self.model_type = ModelType.encoder_or_decoder
- # This attribute is needed to check if an all-reduce is required
- # on the word embeddings inside `finalize_model_grads._allreduce_word_embedding_grads`.
- self.share_embeddings_and_output_weights = False
- if self.language_model is not None:
- self.share_embeddings_and_output_weights = self.language_model.share_embeddings_and_output_weights
- self._language_max_sequence_length = self.language_model.max_sequence_length
+ self.vision_model = None
+ self.vision_projection = None
+ self.language_model = None
- if self.vision_model is not None:
- self._drop_vision_class_token = drop_vision_class_token
+ self.sequence_parallel_lm = language_transformer_config.sequence_parallel
+ self.tp_comm_overlap_lm = language_transformer_config.tp_comm_overlap
- self.add_encoder = self.vision_model is not None
- self.add_decoder = self.language_model is not None
- self.vision_model_from_hf = str(self.vision_model.__class__.__module__).startswith("transformers.")
+ self.share_embeddings_and_output_weights = False
if self.add_decoder:
- vision_config = self.config.vision_transformer_config
- if self.vision_model_from_hf:
- # img_h, img_w, patch_dim, add_class_token, class_token_len
- self._img_seq_len = get_image_sequence_length(
- img_h=vision_config.image_size,
- img_w=vision_config.image_size,
- patch_dim=vision_config.patch_size,
- add_class_token=not drop_vision_class_token,
- class_token_len=0 if "siglip" in vision_config.model_type else 1,
+ self.language_model = language_transformer_config.configure_model(
+ tokenizer=tokenizer, pre_process=pre_process, post_process=post_process
+ )
+ self.share_embeddings_and_output_weights = self.language_model.share_embeddings_and_output_weights
+ self._language_max_sequence_length = self.language_model.max_sequence_length
+ self._language_is_pipeline_parallel = language_transformer_config.pipeline_model_parallel_size > 1
+ if config.language_model_from_pretrained is not None:
+ sharded_state_dict = dict(state_dict=self.language_model.sharded_state_dict(prefix="module."))
+ loaded_state_dict = dist_checkpointing.load(
+ sharded_state_dict=sharded_state_dict,
+ checkpoint_dir=ckpt_to_weights_subdir(config.language_model_from_pretrained, is_saving=False),
+ validate_access_integrity=False,
)
- else:
- self._img_seq_len = 576 # TODO(yuya): Fix hardcode
+ loaded_state_dict = {k.removeprefix("module."): v for k, v in loaded_state_dict["state_dict"].items()}
+ self.language_model.load_state_dict(loaded_state_dict)
+ logging.info(f"Restored language model weights from {config.language_model_from_pretrained}")
else:
- self._img_seq_len = 0
-
- def _preprocess_data(
- self,
- image_embeddings,
- language_embeddings,
- input_ids,
- loss_mask,
- labels,
- use_inference_kv_cache,
- image_token_index,
- num_image_tiles,
- ):
- # TODO (yuya): remove this and use the mcore method
- """Preprocess input data before input to language model.
-
- This function is adopted from
- https://github.com/huggingface/transformers/blob/85817d98fb60977c97e3014196a462b732d2ed1a/src/transformers/models/llava_next/modeling_llava_next.py#L409
- for our input data conventions.
-
- image_token_index = -200 indicates the image position in the input_ids = [0, 1, -200, 2, 3] and labels = [1, -200, 2, 3, 4], for example.
- We want to replace the image position (-200) with image_embeddings and return the following:
- - final_embeddings = [0, 1, image_embeddings, 2, 3],
- - final_labels = [1, -100, 2, 3, 4]
- - final_loss_mask = [1, 0, 0, 1, 1]
-
- This function also handles the case where the input does not contain an image (text-only sample). It also handles the case where a single input
- image is split into multiple tiles.
-
- If pipeline parallelism is not used, then self.pre_process and self.post_process are both True and we update both
- input embeddings, labels and loss masks (if available).
-
- If pipeline parallelism is used, then we do the following
- - the first language model chunk has self.pre_process = True and self.post_process = False. We update input embeddings.
- - the middle language model chunk(s) has self.pre_process = False and self.post_process = False. We don't need to update anything.
- - the last language model chunk has self.pre_process = False and self.post_process = True. We update labels and loss mask.
+ if config.language_model_from_pretrained is not None:
+ dist_checkpointing.load(
+ sharded_state_dict=dict(state_dict={}),
+ checkpoint_dir=config.language_model_from_pretrained,
+ validate_access_integrity=False,
+ )
- TODO: This function should adjust the attention mask too. Currently, we assume the language model uses a causal mask.
+ if self.add_encoder:
+ self.vision_model = vision_transformer_config.configure_model()
+ self.vision_projection = vision_projection_config.configure_model()
+ self._drop_vision_class_token = drop_vision_class_token
- Returns:
- final_embedding (torch.Tensor): image and text embeddings concated [combined_seq_len, b, h].
- final_labels (torch.Tensor): labels for image and text positions [b, combined_seq_len].
- final_loss_mask (torch.Tensor): loss mask for image and text positions [b, combined_seq_len].
- """
- assert self.add_decoder, "input text preprocessing is only needed for the language model"
+ self.freeze(
+ freeze_language_model=config.freeze_language_model,
+ freeze_vision_model=config.freeze_vision_model,
+ freeze_vision_projection=config.freeze_vision_projection,
+ )
- # No pre- or postprocessing needed. With pipeline parallel > 2, this means a chunk in the middle of the model.
- if not self.pre_process and not self.post_process:
- return language_embeddings, loss_mask, labels
+ self.model_type = ModelType.encoder_or_decoder
+ # This attribute is needed to check if an all-reduce is required
+ # on the word embeddings inside `finalize_model_grads._allreduce_word_embedding_grads`.
- # If using the inference KV cache, the image tokens are already computed.
- if use_inference_kv_cache:
- return language_embeddings, loss_mask, labels
-
- img_seq_len = self._img_seq_len
- batch_size, text_seq_len = input_ids.shape
-
- has_labels = labels is not None
- if has_labels:
- assert (
- labels.shape == loss_mask.shape
- ), f"mismatching labels shape {labels.shape} and loss mask shape {loss_mask.shape}"
-
- # Create indices for new text and label positions.
- with torch.no_grad():
- image_token_mask = input_ids == image_token_index
- num_image_tokens = torch.sum(image_token_mask, dim=-1)
-
- # Number of tiles per sample.
- num_image_tiles_batch = num_image_tiles.split(num_image_tokens.tolist(), dim=0)
- num_image_tiles_batch = torch.tensor([x.sum() for x in num_image_tiles_batch], device=input_ids.device)
-
- # Sequence length for each sample is the image sequence length multiplied by the number of tiles for that image, minus image token indices,
- # plus text sequence length.
- seq_lens = num_image_tiles_batch * img_seq_len - num_image_tokens + text_seq_len
- max_seq_len = seq_lens.max()
- batch_indices, non_image_indices = torch.where(input_ids != image_token_index)
-
- # New position ids for the text tokens, shifted by the image sequence length.
- # E.g. for input_ids = [-200, 1, 2, 3] and img_seq_len = 576, we get new_position_ids = [576, 577, 578, 579].
- # text_position_ids are then [577, 578, 579].
- image_token_mask_lens = image_token_mask.int().clone()
- # -1 is for the removed image token index.
- image_token_mask_lens[image_token_mask] = num_image_tiles * img_seq_len - 1
- # +1 is needed here for the cumulative sum. -1 is adjusting for zero-based indexing.
- new_position_ids = torch.cumsum((image_token_mask_lens + 1), dim=-1) - 1
- text_position_ids = new_position_ids[batch_indices, non_image_indices]
-
- # Labels are shifted to left by one. So, shift text position ids and non-image indices to left by one.
- if has_labels:
- label_text_position_ids = text_position_ids - 1
- valid_label_text_position_ids = label_text_position_ids >= 0
- label_text_position_ids = label_text_position_ids[valid_label_text_position_ids]
-
- label_batch_indices = batch_indices[valid_label_text_position_ids]
-
- label_non_image_indices = non_image_indices - 1
- valid_label_non_image_indices = label_non_image_indices >= 0
- label_non_image_indices = label_non_image_indices[valid_label_non_image_indices]
-
- # Create a mask for the image embedding positions.
- with torch.no_grad():
- images_mask = torch.full((batch_size, max_seq_len), True, dtype=torch.bool, device=input_ids.device)
- # No images in the text positions.
- images_mask[batch_indices, text_position_ids] = False
- # Samples can have different amount of images tokens. new_position_ids[:, -1] gives the last text position id for each sample.
- # Padding is needed when the number of image tokens differs.
- first_padding_idx = new_position_ids[:, -1] + 1
- images_mask[
- torch.arange(max_seq_len, device=first_padding_idx.device).repeat(batch_size, 1)
- >= first_padding_idx.unsqueeze(1)
- ] = False
-
- # Create the final input embedding (if this is the first language model stage).
- final_embedding = None
- if self.pre_process:
- embed_dim = language_embeddings.shape[-1]
- final_embedding = torch.zeros(
- batch_size,
- max_seq_len,
- embed_dim,
- dtype=image_embeddings.dtype,
- device=image_embeddings.device,
+ self.vision_model_from_hf = hasattr(vision_transformer_config, "image_size")
+ if self.vision_model_from_hf:
+ # img_h, img_w, patch_dim, add_class_token, class_token_len
+ self._img_seq_len = get_image_sequence_length(
+ img_h=vision_transformer_config.image_size,
+ img_w=vision_transformer_config.image_size,
+ patch_dim=vision_transformer_config.patch_size,
+ add_class_token=not drop_vision_class_token,
+ class_token_len=0 if "siglip" in vision_transformer_config.model_type else 1,
)
-
- # Put text embeddings to the text positions in the result tensor.
- final_embedding[batch_indices, text_position_ids] = language_embeddings[batch_indices, non_image_indices]
-
- # Put image embeddings to image positions.
- final_embedding[images_mask] = image_embeddings.reshape(-1, embed_dim).contiguous()
-
- # Create the final labels and loss mask (if this is the last language model stage).
- final_labels, final_loss_mask = None, None
- if has_labels:
- final_labels = torch.full(
- (batch_size, max_seq_len), IGNORE_INDEX, dtype=labels.dtype, device=labels.device
+ else:
+ self._img_seq_len = get_image_sequence_length(
+ img_h=vision_transformer_config.img_h,
+ img_w=vision_transformer_config.img_w,
+ patch_dim=vision_transformer_config.patch_dim,
+ add_class_token=not drop_vision_class_token,
+ class_token_len=vision_transformer_config.class_token_len,
)
- final_loss_mask = torch.full((batch_size, max_seq_len), 0, dtype=loss_mask.dtype, device=loss_mask.device)
-
- # Put text labels and loss mask to the text positions.
- final_labels[label_batch_indices, label_text_position_ids] = labels[
- label_batch_indices, label_non_image_indices
- ]
-
- final_loss_mask[batch_indices, text_position_ids] = loss_mask[batch_indices, non_image_indices]
-
- # For labels, we need to pick the last label index that got dropped by the shift to left.
- label_extra_text_position_ids = seq_lens - 1
- batch_range = torch.arange(len(label_extra_text_position_ids))
- final_labels[batch_range, label_extra_text_position_ids] = labels[batch_range, -1]
-
- # Loss mask the image positions.
- final_loss_mask[images_mask] = 0
-
- # Loss mask last text position just before an image so that text token does not need to predict the first image token.
- batch_image_indices, image_indices = torch.where(image_token_mask)
- # Indices just before image tokens. If it's -1, skip it.
- before_image_indices = image_indices - 1
- valid = before_image_indices >= 0
- valid_batch_image_indices = batch_image_indices[valid]
- valid_before_image_indices = before_image_indices[valid]
- # Map those indices those position ids.
- valid_before_image_indices = new_position_ids[valid_batch_image_indices, valid_before_image_indices]
-
- final_loss_mask[valid_batch_image_indices, valid_before_image_indices] = 0
-
- if final_embedding is not None and has_labels:
- assert (
- final_embedding.shape[:2] == final_labels.shape == final_loss_mask.shape
- ), "unexpected shapes after data preprocessing"
-
- if final_embedding is not None:
- final_embedding = final_embedding.transpose(1, 0).contiguous()
-
- # Truncate if exceeding the language model's max sequence length.
- if final_embedding is not None and final_embedding.shape[0] > self._language_max_sequence_length:
- final_embedding = final_embedding[: self._language_max_sequence_length]
-
- if has_labels and final_labels.shape[1] > self._language_max_sequence_length:
- final_labels = final_labels[:, : self._language_max_sequence_length]
- final_loss_mask = final_loss_mask[:, : self._language_max_sequence_length]
-
- return final_embedding, final_labels, final_loss_mask
def forward(
self,
@@ -512,6 +462,7 @@ def forward(
inference_params: Optional[InferenceParams] = None,
num_media_tiles: Optional[List[int]] = None,
media_token_index: Optional[int] = IMAGE_TOKEN_INDEX,
+ runtime_gather_output: Optional[bool] = None,
) -> torch.Tensor:
"""Forward function of the LLaVA model.
@@ -530,34 +481,44 @@ def forward(
output (torch.Tensor): Loss of shape [b, s] if labels are provided, otherwise logits of shape [b, s, vocab_size].
loss_mask (torch.Tensor): Loss mask expanded to combined sequence length. Shape [b, s].
"""
+
use_inference_kv_cache = (
inference_params is not None and "image_tokens_count" in inference_params.key_value_memory_dict
)
+ has_images = media.shape[0] > 0
+
# If running inference, we can skip media token computation if they were computed already earlier for this sample.
- if use_inference_kv_cache or media is None:
+ if use_inference_kv_cache:
media_embeddings = None
- elif self.add_encoder:
+ elif self.add_encoder and not has_images:
+ # If no images provided, use an empty image embeddings tensor.
+ media_embeddings = torch.tensor([], dtype=media.dtype, device=media.device).reshape(0, 0, 0)
+ elif self.add_encoder and has_images:
# media is in shape of (num_images_in_mbs, c, h, w)
# note num_images_in_mbs is not mbs but total images in this mbs.
if self.vision_model_from_hf:
- media_embeddings = self.vision_model(
- media, output_hidden_states=True
- ) # [num_images, img_seq_len, h_vision]
+ self.vision_model = self.vision_model.eval()
+ media_embeddings = self.vision_model(media, output_hidden_states=True)
media_embeddings = media_embeddings[-1][
self.config.vision_feature_layer
- ] # take second from last layer
+ ] # [num_images, img_seq_len, h_vision]
else:
# TODO(yuya): MCore Clip path not yet support taking a specific layer hidden states
- media_embeddings = self.vision_model(media)
+ media = media.to(next(self.vision_model.parameters()).dtype)
+ media_embeddings = self.vision_model(media, num_unused_layers=-self.config.vision_feature_layer - 1)
if self._drop_vision_class_token:
class_token_len = getattr(self.vision_model, "class_token_len", 1)
media_embeddings = media_embeddings[:, class_token_len:, :]
+ # contiguous() required as `permute` can sparsify the tensor and this breaks pipelining
+ media_embeddings = media_embeddings.permute(1, 0, 2).contiguous() # [img_seq_len, num_tiles, h_vision]
+
# map vision model output size to language model input size.
- media_embeddings = self.vision_projection(media_embeddings) # [img_seq_len, num_tiles, h_vision]
+ media_embeddings = self.vision_projection(media_embeddings) # [img_seq_len, num_tiles, h_language]
- # If running inference, the language model KV cache will be updated for media token positions.
- # Here we store the media tokens sequence length, which can be used as an offset to the KV cache later.
+ # TODO: Support batched inference.
+ # In inference, the language model KV cache will be updated for image token positions.
+ # Store the image tokens sequence length to be used as an offset to the KV cache later.
if inference_params is not None:
inference_params.key_value_memory_dict["media_tokens_count"] = (
media_embeddings.shape[0] * media_embeddings.shape[1]
@@ -566,40 +527,61 @@ def forward(
media_embeddings = self.encoder_hidden_state
if not self.add_decoder:
- return media_embeddings, loss_mask
+ return media_embeddings
language_embeddings = None
if self.pre_process:
input_ids_text = input_ids.clone()
# MultiModal Token indices are assumed to be values
input_ids_text[input_ids_text < 0] = 0
- # Note: This adds absolute position embedding but not RoPE. Each image is counted as one position.
- # RoPE is added in language_model forward call. Each image embedding is one position.
+ # Note: This adds absolute position embedding but not RoPE.
+ # Each image is counted as one position.
+ # RoPE is added in language_model forward. Each image embedding is one position.
+ if self.sequence_parallel_lm:
+ # Pad to nearest multiple of TP world size for embedding.
+ tp_world_size = ps.get_tensor_model_parallel_world_size()
+ padded_seq_len = (
+ int((input_ids_text.shape[1] + tp_world_size - 1) // tp_world_size * tp_world_size)
+ - input_ids_text.shape[1]
+ )
+ if padded_seq_len != 0:
+ input_ids_text = torch.nn.functional.pad(input_ids_text, (0, padded_seq_len))
+ if position_ids is not None:
+ position_ids = torch.nn.functional.pad(position_ids, (0, padded_seq_len))
language_embeddings = self.language_model.embedding(
input_ids=input_ids_text, position_ids=position_ids
) # [text_seq_len, b, h_language]
+ if self.sequence_parallel_lm:
+ # Gather the language embeddings back.
+ # We use the full embedding to insert image embeddings
+ # and then scatter to avoid load imbalance.
+ language_embeddings = gather_from_sequence_parallel_region(
+ language_embeddings, tensor_parallel_output_grad=False
+ )
+ # Remove the padding done for SP as we'll need new padding calculation
+ # after image embeddings are inserted.
+ if padded_seq_len != 0:
+ language_embeddings = language_embeddings[:-padded_seq_len]
language_embeddings = language_embeddings.transpose(1, 0).contiguous() # [b, text_seq_len, h_language]
- if media is None:
- combined_embeddings = language_embeddings.transpose(1, 0).contiguous()
- final_labels = labels
- final_loss_mask = loss_mask
- else:
- # Assume 1 tile per image if the number of tiles is not provided.
- if num_media_tiles is None:
- num_media_tiles = torch.ones(media.shape[0], dtype=torch.int, device=input_ids.device)
-
- # Preprocess input, labels and loss mask.
- combined_embeddings, final_labels, final_loss_mask = self._preprocess_data(
- media_embeddings,
- language_embeddings,
- input_ids,
- loss_mask,
- labels,
- use_inference_kv_cache,
- media_token_index,
- num_media_tiles,
- ) # [combined_seq_len, b, h_language], [b, combined_seq_len], [b, combined_seq_len]
+ # Assume 1 tile per image if the number of tiles is not provided.
+ if num_media_tiles is None:
+ num_media_tiles = torch.ones(media.shape[0], dtype=torch.int, device=input_ids.device)
+ elif isinstance(num_media_tiles, list):
+ num_media_tiles = torch.tensor(num_media_tiles, dtype=torch.int, device=input_ids.device)
+
+ # Preprocess input, labels and loss mask.
+ combined_embeddings, final_labels, final_loss_mask, final_attention_mask = self._preprocess_data(
+ media_embeddings,
+ language_embeddings,
+ input_ids,
+ loss_mask,
+ labels,
+ use_inference_kv_cache,
+ media_token_index,
+ num_media_tiles,
+ attention_mask,
+ ) # [combined_seq_len, b, h_language], [b, combined_seq_len], [b, combined_seq_len]
output = self.language_model(
input_ids=None,
@@ -608,6 +590,7 @@ def forward(
decoder_input=combined_embeddings,
labels=final_labels,
inference_params=inference_params,
+ runtime_gather_output=runtime_gather_output,
)
if labels is None or loss_mask is None:
@@ -615,12 +598,28 @@ def forward(
return output, final_loss_mask.contiguous()
+ def set_input_tensor(self, input_tensor) -> None:
+ """Set model chunk input tensor."""
+ # This is usually handled in schedules.py but some inference code still
+ # gives us non-lists or None
+ if not isinstance(input_tensor, list):
+ input_tensor = [input_tensor]
+ assert len(input_tensor) == 1, 'input_tensor should only be length 1 for llava'
+
+ if self.add_encoder and self.add_decoder:
+ self.vision_model.set_input_tensor(input_tensor[0])
+ elif self.add_encoder:
+ self.vision_model.set_input_tensor(input_tensor[0])
+ elif self.pre_process:
+ self.encoder_hidden_state = input_tensor[0]
+ else:
+ self.language_model.set_input_tensor(input_tensor[0])
+
class NevaModel(L.LightningModule, io.IOMixin, io.ConnectorMixin, fn.FNMixin):
def __init__(
self,
config: NevaConfig,
- # TODO: Add transformer_layer_spec when we update mcore
optim: Optional[OptimizerModule] = None,
tokenizer: Optional["TokenizerSpec"] = None,
model_transform: Optional[Callable[[nn.Module], nn.Module]] = None,
@@ -647,6 +646,7 @@ def forward(
media: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None,
inference_params: InferenceParams = None,
+ num_media_tiles: Optional[List[int]] = None,
) -> torch.Tensor:
output_tensor = self.module(
media=media,
@@ -656,6 +656,7 @@ def forward(
attention_mask=attention_mask,
labels=labels,
inference_params=inference_params,
+ num_media_tiles=num_media_tiles,
)
return output_tensor
@@ -695,6 +696,4 @@ def validation_loss_reduction(self) -> MaskedTokenLossReductionWithLossMask:
"NevaConfig",
"neva_data_step",
"neva_forward_step",
- "transformer_engine_layer_spec",
- "local_layer_spec",
]
diff --git a/nemo/collections/vlm/neva/model/llava.py b/nemo/collections/vlm/neva/model/llava.py
index 52b55b6f9c2d..5e02b4f9e9d7 100644
--- a/nemo/collections/vlm/neva/model/llava.py
+++ b/nemo/collections/vlm/neva/model/llava.py
@@ -43,7 +43,7 @@ class LlavaConfig(NevaConfig):
@dataclass
-class Llava1_5Config7B(LlavaConfig):
+class Llava15Config7B(LlavaConfig):
from transformers import PretrainedConfig
language_transformer_config: TransformerConfig = field(default_factory=lambda: Llama2Config7B())
@@ -56,7 +56,7 @@ class Llava1_5Config7B(LlavaConfig):
@dataclass
-class Llava1_5Config13B(LlavaConfig):
+class Llava15Config13B(LlavaConfig):
from transformers import PretrainedConfig
language_transformer_config: TransformerConfig = field(default_factory=lambda: Llama2Config13B())
@@ -111,7 +111,6 @@ def convert_state(self, source, target):
"language_model.model.layers.*.post_attention_layernorm.weight": "language_model.decoder.layers.*.mlp.linear_fc1.layer_norm_weight",
"language_model.model.norm.weight": "language_model.decoder.final_layernorm.weight",
"language_model.lm_head.weight": "language_model.output_layer.weight",
- "vision_tower.vision_model.**": "vision_model.vision_model.**",
}
if "vision_projection.encoder.linear_fc1.weight" in target.module.state_dict().keys():
mapping.update(
@@ -134,7 +133,45 @@ def convert_state(self, source, target):
else:
raise KeyError("Unable to map vision projection keys.")
- return io.apply_transforms(source, target, mapping=mapping, transforms=[_import_qkv, _import_linear_fc1])
+ if "vision_model.vision_model.embeddings.class_embedding" in target.module.state_dict().keys():
+ mapping.update(
+ {
+ "vision_tower.vision_model.**": "vision_model.vision_model.**",
+ }
+ )
+ elif "vision_model.class_token" in target.module.state_dict().keys():
+ mapping.update(
+ {
+ "vision_tower.vision_model.embeddings.patch_embedding.weight": "vision_model.conv1.weight",
+ "vision_tower.vision_model.embeddings.position_embedding.weight": "vision_model.position_embeddings.weight",
+ "vision_tower.vision_model.encoder.layers.*.layer_norm1.weight": "vision_model.decoder.layers.*.self_attention.linear_qkv.layer_norm_weight",
+ "vision_tower.vision_model.encoder.layers.*.layer_norm1.bias": "vision_model.decoder.layers.*.self_attention.linear_qkv.layer_norm_bias",
+ "vision_tower.vision_model.encoder.layers.*.layer_norm2.weight": "vision_model.decoder.layers.*.mlp.linear_fc1.layer_norm_weight",
+ "vision_tower.vision_model.encoder.layers.*.layer_norm2.bias": "vision_model.decoder.layers.*.mlp.linear_fc1.layer_norm_bias",
+ "vision_tower.vision_model.encoder.layers.*.self_attn.out_proj.weight": "vision_model.decoder.layers.*.self_attention.linear_proj.weight",
+ "vision_tower.vision_model.encoder.layers.*.self_attn.out_proj.bias": "vision_model.decoder.layers.*.self_attention.linear_proj.bias",
+ "vision_tower.vision_model.encoder.layers.*.mlp.fc1.weight": "vision_model.decoder.layers.*.mlp.linear_fc1.weight",
+ "vision_tower.vision_model.encoder.layers.*.mlp.fc1.bias": "vision_model.decoder.layers.*.mlp.linear_fc1.bias",
+ "vision_tower.vision_model.encoder.layers.*.mlp.fc2.weight": "vision_model.decoder.layers.*.mlp.linear_fc2.weight",
+ "vision_tower.vision_model.encoder.layers.*.mlp.fc2.bias": "vision_model.decoder.layers.*.mlp.linear_fc2.bias",
+ "vision_tower.vision_model.pre_layrnorm.weight": "vision_model.ln_pre.weight",
+ "vision_tower.vision_model.pre_layrnorm.bias": "vision_model.ln_pre.bias",
+ }
+ )
+ else:
+ raise KeyError("Unable to map vision encoder keys.")
+ return io.apply_transforms(
+ source,
+ target,
+ mapping=mapping,
+ transforms=[
+ _import_language_qkv,
+ _import_vision_qkv,
+ _import_vision_qkv_bias,
+ _import_cls_token,
+ _import_linear_fc1,
+ ],
+ )
@property
def tokenizer(self) -> "AutoTokenizer":
@@ -183,80 +220,7 @@ def make_vocab_size_divisible_by(vocab_size):
return output
-@io.model_exporter(LlavaModel, "hf")
-class HFLlavaExporter(io.ModelConnector[LlavaModel, "LlavaForConditionalGeneration"]):
- def init(self) -> "LlavaForConditionalGeneration":
- raise NotImplementedError("Neva Exporter hasn't been verified!")
-
- from transformers import AutoModelForCausalLM
-
- return AutoModelForCausalLM.from_config(self.config)
-
- def apply(self, output_path: Path) -> Path:
- target = self.init()
- source, _ = self.nemo_load(str(self))
-
- target = self.convert_state(source, target)
-
- target = target.cpu()
- target.save_pretrained(output_path)
- self.tokenizer.save_pretrained(output_path)
-
- return output_path
-
- def convert_state(self, source, target):
- mapping = {
- "language_model.embedding.word_embeddings.weight": "language_model.model.embed_tokens.weight",
- "language_model.decoder.layers.*.self_attention.linear_proj.weight": "language_model.model.layers.*.self_attn.o_proj.weight",
- "language_model.decoder.layers.*.mlp.linear_fc2.weight": "language_model.model.layers.*.mlp.down_proj.weight",
- "language_model.decoder.layers.*.self_attention.linear_qkv.layer_norm_weight": "language_model.model.layers.*.input_layernorm.weight",
- "language_model.decoder.layers.*.mlp.linear_fc1.layer_norm_weight": "language_model.model.layers.*.post_attention_layernorm.weight",
- "language_model.decoder.final_layernorm.weight": "language_model.model.norm.weight",
- "language_model.output_layer.weight": "language_model.lm_head.weight",
- }
-
- return io.apply_transforms(source, target, mapping=mapping, transforms=[_export_qkv, _export_linear_fc1])
-
- @property
- def tokenizer(self):
- return io.load_context(str(self)).model.tokenizer.tokenizer
-
- @property
- def config(self) -> "HFLlavaConfig":
- source: LlavaConfig = io.load_context(str(self)).model.config
-
- from transformers import LlavaConfig as HFLlavaConfig
-
- return HFLlavaConfig(
- num_hidden_layers=source.num_layers,
- hidden_size=source.hidden_size,
- intermediate_size=source.ffn_hidden_size,
- num_attention_heads=source.num_attention_heads,
- max_position_embeddings=source.seq_length,
- initializer_range=source.init_method_std,
- rms_norm_eps=source.layernorm_epsilon,
- num_key_value_heads=source.num_query_groups,
- rope_theta=source.rotary_base,
- vocab_size=self.tokenizer.vocab_size,
- )
-
-
-@io.state_transform(
- source_key=(
- "language_model.model.layers.*.self_attn.q_proj.weight",
- "language_model.model.layers.*.self_attn.k_proj.weight",
- "language_model.model.layers.*.self_attn.v_proj.weight",
- ),
- target_key="language_model.decoder.layers.*.self_attention.linear_qkv.weight",
-)
-def _import_qkv(ctx: io.TransformCTX, q, k, v):
- megatron_config = ctx.target.config.language_transformer_config
- head_num = megatron_config.num_attention_heads
- num_query_groups = megatron_config.num_query_groups
- heads_per_group = head_num // num_query_groups
- hidden_size = megatron_config.hidden_size
- head_size = megatron_config.kv_channels
-
+def import_qkv(q, k, v, head_num, num_query_groups, heads_per_group, hidden_size, head_size):
old_tensor_shape = q.size()
new_q_tensor_shape = (head_num, head_size) + old_tensor_shape[1:]
new_kv_tensor_shape = (num_query_groups, head_size) + old_tensor_shape[1:]
@@ -282,59 +246,85 @@ def _import_qkv(ctx: io.TransformCTX, q, k, v):
@io.state_transform(
- source_key="language_model.decoder.layers.*.self_attention.linear_qkv.weight",
- target_key=(
+ source_key=(
"language_model.model.layers.*.self_attn.q_proj.weight",
"language_model.model.layers.*.self_attn.k_proj.weight",
"language_model.model.layers.*.self_attn.v_proj.weight",
),
+ target_key="language_model.decoder.layers.*.self_attention.linear_qkv.weight",
)
-def _export_qkv(ctx: io.TransformCTX, linear_qkv):
- megatron_config = ctx.source.config
-
- head_num = megatron_config.num_attention_heads
- num_query_groups = megatron_config.num_query_groups
- heads_per_group = head_num // num_query_groups
- hidden_size = megatron_config.hidden_size
- head_size = megatron_config.kv_channels
- qkv_total_dim = head_num + 2 * num_query_groups
-
- linear_qkv = linear_qkv.reshape([qkv_total_dim, head_size, hidden_size])
- q_slice = torch.cat(
- [
- torch.arange((heads_per_group + 2) * i, (heads_per_group + 2) * i + heads_per_group)
- for i in range(num_query_groups)
- ]
+def _import_language_qkv(ctx: io.TransformCTX, q, k, v):
+ megatron_config = ctx.target.config.language_transformer_config
+ return import_qkv(
+ q,
+ k,
+ v,
+ head_num=megatron_config.num_attention_heads,
+ num_query_groups=megatron_config.num_query_groups,
+ heads_per_group=megatron_config.num_attention_heads // megatron_config.num_query_groups,
+ hidden_size=megatron_config.hidden_size,
+ head_size=megatron_config.kv_channels,
)
- k_slice = torch.arange(heads_per_group, qkv_total_dim, (heads_per_group + 2))
- v_slice = torch.arange(heads_per_group + 1, qkv_total_dim, (heads_per_group + 2))
- q_proj = linear_qkv[q_slice].reshape(-1, hidden_size).cpu()
- k_proj = linear_qkv[k_slice].reshape(-1, hidden_size).cpu()
- v_proj = linear_qkv[v_slice].reshape(-1, hidden_size).cpu()
- return q_proj, k_proj, v_proj
+@io.state_transform(
+ source_key=(
+ "vision_tower.vision_model.encoder.layers.*.self_attn.q_proj.weight",
+ "vision_tower.vision_model.encoder.layers.*.self_attn.k_proj.weight",
+ "vision_tower.vision_model.encoder.layers.*.self_attn.v_proj.weight",
+ ),
+ target_key="vision_model.decoder.layers.*.self_attention.linear_qkv.weight",
+)
+def _import_vision_qkv(ctx: io.TransformCTX, q, k, v):
+ megatron_config = ctx.target.config.vision_transformer_config
+ return import_qkv(
+ q,
+ k,
+ v,
+ head_num=megatron_config.num_attention_heads,
+ num_query_groups=megatron_config.num_query_groups,
+ heads_per_group=megatron_config.num_attention_heads // megatron_config.num_query_groups,
+ hidden_size=megatron_config.hidden_size,
+ head_size=megatron_config.kv_channels,
+ )
@io.state_transform(
source_key=(
- "language_model.model.layers.*.mlp.gate_proj.weight",
- "language_model.model.layers.*.mlp.up_proj.weight",
+ "vision_tower.vision_model.encoder.layers.*.self_attn.q_proj.bias",
+ "vision_tower.vision_model.encoder.layers.*.self_attn.k_proj.bias",
+ "vision_tower.vision_model.encoder.layers.*.self_attn.v_proj.bias",
),
- target_key="language_model.decoder.layers.*.mlp.linear_fc1.weight",
+ target_key="vision_model.decoder.layers.*.self_attention.linear_qkv.bias",
)
-def _import_linear_fc1(down, gate):
- return torch.cat((down, gate), axis=0)
+def _import_vision_qkv_bias(ctx: io.TransformCTX, q_bias, k_bias, v_bias):
+ megatron_config = ctx.target.config.vision_transformer_config
+ return import_qkv(
+ q_bias.unsqueeze(-1),
+ k_bias.unsqueeze(-1),
+ v_bias.unsqueeze(-1),
+ head_num=megatron_config.num_attention_heads,
+ num_query_groups=megatron_config.num_query_groups,
+ heads_per_group=megatron_config.num_attention_heads // megatron_config.num_query_groups,
+ hidden_size=1,
+ head_size=megatron_config.kv_channels,
+ ).squeeze(-1)
+
+
+@io.state_transform(
+ source_key=("vision_tower.vision_model.embeddings.class_embedding",),
+ target_key="vision_model.class_token",
+)
+def _import_cls_token(ctx: io.TransformCTX, cls_token):
+ return cls_token.reshape(1, 1, -1)
@io.state_transform(
- source_key="language_model.decoder.layers.*.mlp.linear_fc1.weight",
- target_key=(
+ source_key=(
"language_model.model.layers.*.mlp.gate_proj.weight",
"language_model.model.layers.*.mlp.up_proj.weight",
),
+ target_key="language_model.decoder.layers.*.mlp.linear_fc1.weight",
)
-def _export_linear_fc1(linear_fc1):
- gate_proj, up_proj = torch.chunk(linear_fc1, 2, dim=0)
-
- return gate_proj, up_proj
+def _import_linear_fc1(down, gate):
+ return torch.cat((down, gate), axis=0)
diff --git a/nemo/collections/vlm/neva/model/vit_config.py b/nemo/collections/vlm/neva/model/vit_config.py
new file mode 100644
index 000000000000..5d60a84313ca
--- /dev/null
+++ b/nemo/collections/vlm/neva/model/vit_config.py
@@ -0,0 +1,79 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from dataclasses import dataclass
+
+from nemo.collections.llm.fn.activation import openai_gelu, quick_gelu
+
+from nemo.collections.vlm.neva.model.base import CLIPViTConfig
+
+
+@dataclass
+class CLIPViTL_14_336_Config(CLIPViTConfig):
+ """Clip vit large patch14 config"""
+
+ vision_model_type = "clip"
+ patch_dim = 14
+ img_h = 336
+ img_w = 336
+ num_layers = 24
+ num_attention_heads = 16
+ add_bias_linear = True
+ add_qkv_bias = True
+ hidden_size = 1024
+ hidden_dropout = 0.0
+ attention_dropout = 0.0
+ ffn_hidden_size = 4096
+ gated_linear_unit = False
+ activation_func = quick_gelu
+ kv_channels = 64
+ num_query_groups = 16
+ layernorm_zero_centered_gamma = False
+ apply_query_key_layer_scaling = False
+ bias_activation_fusion = False
+ bias_dropout_fusion = False
+ attention_softmax_in_fp32 = True
+ normalization = 'LayerNorm'
+ apply_rope_fusion = False
+
+
+@dataclass
+class SigLIPViT400M_14_384_Config(CLIPViTConfig):
+ """Siglip so400m patch14 384 config"""
+
+ vision_model_type = "siglip"
+ patch_dim = 14
+ img_h = 384
+ img_w = 384
+ num_layers = 27
+ num_attention_heads = 16
+ add_bias_linear = True
+ add_qkv_bias = True
+ hidden_size = 1152
+ hidden_dropout = 0.0
+ attention_dropout = 0.0
+ ffn_hidden_size = 4304
+ gated_linear_unit = False
+ activation_func = openai_gelu
+ kv_channels = 72
+ num_query_groups = 16
+ layernorm_zero_centered_gamma = False
+ apply_query_key_layer_scaling = False
+ bias_activation_fusion = False
+ bias_dropout_fusion = False
+ attention_softmax_in_fp32 = True
+ normalization = 'LayerNorm'
+ apply_rope_fusion = False
+ qk_layernorm = False
+ layernorm_epsilon = 1e-6
diff --git a/nemo/collections/vlm/peft/__init__.py b/nemo/collections/vlm/peft/__init__.py
new file mode 100644
index 000000000000..ab0c451a7d9d
--- /dev/null
+++ b/nemo/collections/vlm/peft/__init__.py
@@ -0,0 +1,17 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from nemo.collections.vlm.peft.lora import LoRA
+
+__all__ = ["LoRA"]
diff --git a/nemo/collections/vlm/peft/lora.py b/nemo/collections/vlm/peft/lora.py
new file mode 100644
index 000000000000..7a80b7e06883
--- /dev/null
+++ b/nemo/collections/vlm/peft/lora.py
@@ -0,0 +1,62 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from dataclasses import dataclass
+
+from torch import nn
+
+from nemo.collections.llm.peft.lora import LoRA as LLMLoRA
+
+
+@dataclass
+class LoRA(LLMLoRA):
+ """
+ Built on top of llm.LoRA, vlm.LoRA additionally allows the user to specify whether the language or vision
+ models should be frozen.
+ For example, a common finetuning workload for multimodal models is to apply adapters to language model and fully
+ finetune the vision model.
+
+ For detailed usage of the LoRA api, see llm.LoRA docstrings.
+
+ Example:
+ --------
+ >>> from nemo.collections import vlm
+ >>> lora = vlm.peft.LoRA(target_modules=["*.language_model.*.linear_qkv"], freeze_vision_model=False, dim=32)
+ >>> model = vlm.MLlamaModel(model_transform=lora)
+ >>> # (set up trainer and data)
+ >>> trainer.fit(model, data)
+
+ References:
+ -----------
+ Hu, E. J., Shen, Y., Wallis, P., Allen-Zhu, Z., Li, Y., Wang, S., Wang, L., & Chen, W. (2021).
+ LoRA: Low-Rank Adaptation of Large Language Models. arXiv preprint arXiv:2106.09685.
+ https://arxiv.org/abs/2106.09685
+
+ )
+
+ """
+
+ freeze_language_model: bool = True
+ freeze_vision_model: bool = True
+
+ def freeze_model(self, model: nn.Module) -> None:
+ modules = []
+ if self.freeze_language_model and model.module.module.language_model is not None:
+ modules.append(model.module.module.language_model)
+ if self.freeze_vision_model and model.module.module.vision_model is not None:
+ modules.append(model.module.module.vision_model)
+
+ for module in modules:
+ for param in module.parameters():
+ param.requires_grad = False
diff --git a/nemo/collections/vlm/recipes/__init__.py b/nemo/collections/vlm/recipes/__init__.py
new file mode 100644
index 000000000000..ba8706437c56
--- /dev/null
+++ b/nemo/collections/vlm/recipes/__init__.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from nemo.collections.vlm.recipes import llava15_7b, llava15_13b, mllama_11b, mllama_90b
+
+__all__ = [
+ "llava15_7b",
+ "llava15_13b",
+ "mllama_11b",
+ "mllama_90b",
+]
diff --git a/nemo/collections/vlm/recipes/llava15_13b.py b/nemo/collections/vlm/recipes/llava15_13b.py
new file mode 100644
index 000000000000..97b77b82d3de
--- /dev/null
+++ b/nemo/collections/vlm/recipes/llava15_13b.py
@@ -0,0 +1,150 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from typing import Optional
+
+import nemo_run as run
+import pytorch_lightning as pl
+import torch
+
+from nemo import lightning as nl
+from nemo.collections import llm, vlm
+from nemo.collections.llm.recipes.finetune_default import nemo_resume
+from nemo.collections.llm.recipes.log.default import tensorboard_logger
+from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
+from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
+from nemo.collections.vlm.neva.data.mock import MockDataModule
+
+NAME = "llava15_13b"
+
+
+@run.cli.factory(name=NAME)
+def model() -> run.Config[pl.LightningModule]:
+ """
+ Factory function to create a Llava 1.5 13B model configuration.
+
+ Returns:
+ run.Config[pl.LightningModule]: Configuration for the Llava 1.5 13B model model.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain model=llava15_13b ...
+
+ Python API usage:
+ >>> model_config = model()
+ >>> print(model_config)
+ """
+ return run.Config(vlm.LlavaModel, config=run.Config(vlm.Llava15Config13B))
+
+
+@run.cli.factory(target=llm.finetune, name=NAME)
+def finetune_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for Llava1.5 13B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory llava15_13b
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="llava15_13b_finetune", num_nodes=1)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+
+ strategy = run.Config(
+ nl.MegatronStrategy,
+ tensor_model_parallel_size=1,
+ pipeline_model_parallel_size=1,
+ encoder_pipeline_model_parallel_size=0,
+ pipeline_dtype=torch.bfloat16,
+ )
+
+ trainer = run.Config(
+ nl.Trainer,
+ accelerator="gpu",
+ accumulate_grad_batches=1,
+ devices=num_gpus_per_node,
+ limit_val_batches=10,
+ log_every_n_steps=1,
+ max_steps=5190,
+ num_nodes=num_nodes,
+ plugins=bf16_mixed(),
+ strategy=strategy,
+ val_check_interval=1000,
+ )
+
+ recipe = run.Partial(
+ llm.finetune,
+ model=model(),
+ trainer=trainer,
+ data=run.Config(
+ MockDataModule,
+ seq_length=4096,
+ global_batch_size=128,
+ micro_batch_size=1,
+ tokenizer=None,
+ image_processor=None,
+ num_workers=4,
+ ),
+ log=llm.default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
+ optim=distributed_fused_adam_with_cosine_annealing(max_lr=2.0e-05, min_lr=2.0e-07, warmup_steps=150),
+ resume=nemo_resume("llava-hf/llava-1.5-13b-hf"),
+ )
+
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 2
+ recipe.optim.config.lr = 2e-05
+ elif peft_scheme.lower() == 'lora':
+ recipe.peft = run.Config(
+ vlm.LoRA,
+ freeze_vision_model=False,
+ target_modules=[
+ "*.language_model.*.linear_qkv",
+ "*.language_model.*.linear_q",
+ "*.language_model.*.linear_kv",
+ "*.language_model.*.linear_proj",
+ "*.language_model.*.linear_fc1",
+ "*.language_model.*.linear_fc2",
+ ],
+ )
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ return recipe
diff --git a/nemo/collections/vlm/recipes/llava15_7b.py b/nemo/collections/vlm/recipes/llava15_7b.py
new file mode 100644
index 000000000000..04e6bd36f4d4
--- /dev/null
+++ b/nemo/collections/vlm/recipes/llava15_7b.py
@@ -0,0 +1,152 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from typing import Optional
+
+import nemo_run as run
+import pytorch_lightning as pl
+import torch
+
+from nemo import lightning as nl
+from nemo.collections import llm, vlm
+from nemo.collections.llm.recipes.finetune_default import nemo_resume
+from nemo.collections.llm.recipes.log.default import tensorboard_logger
+from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
+from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
+from nemo.collections.vlm.neva.data.mock import MockDataModule
+from nemo.utils.exp_manager import TimingCallback
+
+NAME = "llava15_7b"
+
+
+@run.cli.factory(name=NAME)
+def model() -> run.Config[pl.LightningModule]:
+ """
+ Factory function to create a Llava 1.5 7B model configuration.
+
+ Returns:
+ run.Config[pl.LightningModule]: Configuration for the Llava 1.5 7B model model.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain model=llava15_7b ...
+
+ Python API usage:
+ >>> model_config = model()
+ >>> print(model_config)
+ """
+ return run.Config(vlm.LlavaModel, config=run.Config(vlm.Llava15Config7B))
+
+
+@run.cli.factory(target=llm.finetune, name=NAME)
+def finetune_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'none',
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for Llava1.5 7B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory llava15_7b
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="llava15_7b_finetune", num_nodes=1)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+
+ strategy = run.Config(
+ nl.MegatronStrategy,
+ tensor_model_parallel_size=1,
+ pipeline_model_parallel_size=1,
+ encoder_pipeline_model_parallel_size=0,
+ pipeline_dtype=torch.bfloat16,
+ )
+
+ trainer = run.Config(
+ nl.Trainer,
+ accelerator="gpu",
+ accumulate_grad_batches=1,
+ devices=num_gpus_per_node,
+ limit_val_batches=10,
+ log_every_n_steps=1,
+ max_steps=5190,
+ num_nodes=num_nodes,
+ plugins=bf16_mixed(),
+ strategy=strategy,
+ val_check_interval=1000,
+ callbacks=[run.Config(TimingCallback)],
+ )
+
+ recipe = run.Partial(
+ llm.finetune,
+ model=model(),
+ trainer=trainer,
+ data=run.Config(
+ MockDataModule,
+ seq_length=4096,
+ global_batch_size=128,
+ micro_batch_size=2,
+ tokenizer=None,
+ image_processor=None,
+ num_workers=4,
+ ),
+ log=llm.default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
+ optim=distributed_fused_adam_with_cosine_annealing(max_lr=2.0e-05, min_lr=2.0e-07, warmup_steps=150),
+ resume=nemo_resume("llava-hf/llava-1.5-7b-hf"),
+ )
+
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 2
+ recipe.optim.config.lr = 2e-05
+ elif peft_scheme.lower() == 'lora':
+ recipe.peft = run.Config(
+ vlm.LoRA,
+ freeze_vision_model=False,
+ target_modules=[
+ "*.language_model.*.linear_qkv",
+ "*.language_model.*.linear_q",
+ "*.language_model.*.linear_kv",
+ "*.language_model.*.linear_proj",
+ "*.language_model.*.linear_fc1",
+ "*.language_model.*.linear_fc2",
+ ],
+ )
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ return recipe
diff --git a/nemo/collections/vlm/recipes/mllama_11b.py b/nemo/collections/vlm/recipes/mllama_11b.py
new file mode 100644
index 000000000000..e4842ae63d52
--- /dev/null
+++ b/nemo/collections/vlm/recipes/mllama_11b.py
@@ -0,0 +1,151 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from typing import Optional
+
+import lightning.pytorch as pl
+import nemo_run as run
+import torch
+
+from nemo import lightning as nl
+from nemo.collections import llm, vlm
+from nemo.collections.llm.recipes.finetune_default import nemo_resume
+from nemo.collections.llm.recipes.log.default import tensorboard_logger
+from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
+from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
+from nemo.collections.vlm.mllama.data.mock import MockDataModule
+
+NAME = "mllama_11b"
+
+
+@run.cli.factory(name=NAME)
+def model() -> run.Config[pl.LightningModule]:
+ """
+ Factory function to create a Llama-3.2-Vision 11B model configuration.
+
+ Returns:
+ run.Config[pl.LightningModule]: Configuration for the Llama-3.2-Vision 11B model.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain model=mllama_11b ...
+
+ Python API usage:
+ >>> model_config = model()
+ >>> print(model_config)
+ """
+ return run.Config(vlm.MLlamaModel, config=run.Config(vlm.MLlamaConfig11B))
+
+
+@run.cli.factory(target=llm.finetune, name=NAME)
+def finetune_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for Llama3.2 11B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory mllama_11b
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="mllama_11b_finetune", num_nodes=1)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+
+ strategy = run.Config(
+ nl.MegatronStrategy,
+ tensor_model_parallel_size=1,
+ pipeline_model_parallel_size=1,
+ encoder_pipeline_model_parallel_size=0,
+ pipeline_dtype=torch.bfloat16,
+ )
+
+ trainer = run.Config(
+ nl.Trainer,
+ accelerator="gpu",
+ accumulate_grad_batches=1,
+ devices=num_gpus_per_node,
+ limit_val_batches=2,
+ log_every_n_steps=10,
+ max_steps=5190,
+ num_nodes=num_nodes,
+ plugins=bf16_mixed(),
+ strategy=strategy,
+ val_check_interval=100,
+ )
+
+ recipe = run.Partial(
+ llm.finetune,
+ model=model(),
+ trainer=trainer,
+ data=run.Config(
+ MockDataModule,
+ seq_length=4100, # encoder (vision) seq length
+ decoder_seq_length=512, # decoder (llm) seq length
+ global_batch_size=16,
+ micro_batch_size=2,
+ vocab_size=128256,
+ crop_size=(448, 448),
+ num_workers=0,
+ ),
+ log=llm.default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
+ optim=distributed_fused_adam_with_cosine_annealing(max_lr=1e-4, min_lr=2.0e-07, warmup_steps=150),
+ resume=nemo_resume("meta-llama/Llama-3.2-11B-Vision"),
+ )
+
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ recipe.trainer.strategy.tensor_model_parallel_size = 2
+ recipe.optim.config.lr = 2e-05
+ elif peft_scheme.lower() == 'lora':
+ recipe.peft = run.Config(
+ vlm.LoRA,
+ freeze_vision_model=False,
+ target_modules=[
+ "*.language_model.*.linear_qkv",
+ "*.language_model.*.linear_q",
+ "*.language_model.*.linear_kv",
+ "*.language_model.*.linear_proj",
+ "*.language_model.*.linear_fc1",
+ "*.language_model.*.linear_fc2",
+ ],
+ )
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ return recipe
diff --git a/nemo/collections/vlm/recipes/mllama_90b.py b/nemo/collections/vlm/recipes/mllama_90b.py
new file mode 100644
index 000000000000..28a6ff7ff9a6
--- /dev/null
+++ b/nemo/collections/vlm/recipes/mllama_90b.py
@@ -0,0 +1,150 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from typing import Optional
+
+import lightning.pytorch as pl
+import nemo_run as run
+import torch
+
+from nemo import lightning as nl
+from nemo.collections import llm, vlm
+from nemo.collections.llm.recipes.finetune_default import nemo_resume
+from nemo.collections.llm.recipes.log.default import tensorboard_logger
+from nemo.collections.llm.recipes.optim.adam import distributed_fused_adam_with_cosine_annealing
+from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
+from nemo.collections.vlm.mllama.data.mock import MockDataModule
+
+NAME = "mllama_90b"
+
+
+@run.cli.factory(name=NAME)
+def model() -> run.Config[pl.LightningModule]:
+ """
+ Factory function to create a Llama-3.2-Vision 90B model configuration.
+
+ Returns:
+ run.Config[pl.LightningModule]: Configuration for the Llama-3.2-Vision 90B model.
+
+ Examples:
+ CLI usage:
+ $ nemo llm pretrain model=mllama_90b ...
+
+ Python API usage:
+ >>> model_config = model()
+ >>> print(model_config)
+ """
+ return run.Config(vlm.MLlamaModel, config=run.Config(vlm.MLlamaConfig90B))
+
+
+@run.cli.factory(target=llm.finetune, name=NAME)
+def finetune_recipe(
+ dir: Optional[str] = None,
+ name: str = "default",
+ num_nodes: int = 1,
+ num_gpus_per_node: int = 8,
+ peft_scheme: Optional[str] = 'lora',
+) -> run.Partial:
+ """
+ Create a fine-tuning recipe for Llama3.2 90B model.
+
+ This function sets up a complete configuration for fine-tuning, including
+ model, trainer, data, logging, optimization, and resumption settings.
+ The recipe uses LoRA (Low-Rank Adaptation) for efficient fine-tuning, unless peft_scheme is set to None.
+
+ Args:
+ dir (Optional[str]): Directory for saving logs and checkpoints.
+ name (str): Name of the fine-tuning run.
+ num_nodes (int): Number of compute nodes to use.
+ num_gpus_per_node (int): Number of GPUs per node.
+
+ Returns:
+ run.Partial: Partial configuration for fine-tuning.
+
+ Examples:
+ CLI usage:
+ $ nemo llm finetune --factory mllama_90b
+
+ Python API usage:
+ >>> recipe = finetune_recipe(name="mllama_90b_finetune", num_nodes=1)
+ >>> print(recipe)
+
+ Note:
+ This recipe uses the SQuAD dataset for fine-tuning. For more information
+ on fine-tuning LLMs with NeMo, see the fine-tuning guide in the
+ `examples/llm/finetune/` directory.
+ """
+
+ strategy = run.Config(
+ nl.MegatronStrategy,
+ tensor_model_parallel_size=8,
+ pipeline_model_parallel_size=1,
+ encoder_pipeline_model_parallel_size=0,
+ pipeline_dtype=torch.bfloat16,
+ )
+
+ trainer = run.Config(
+ nl.Trainer,
+ accelerator="gpu",
+ accumulate_grad_batches=1,
+ devices=num_gpus_per_node,
+ limit_val_batches=2,
+ log_every_n_steps=10,
+ max_steps=5190,
+ num_nodes=num_nodes,
+ plugins=bf16_mixed(),
+ strategy=strategy,
+ val_check_interval=100,
+ )
+
+ recipe = run.Partial(
+ llm.finetune,
+ model=model(),
+ trainer=trainer,
+ data=run.Config(
+ MockDataModule,
+ seq_length=6404, # encoder (vision) seq length
+ decoder_seq_length=512, # decoder (llm) seq length
+ global_batch_size=16,
+ micro_batch_size=2,
+ vocab_size=128256,
+ crop_size=(560, 560),
+ num_workers=0,
+ ),
+ log=llm.default_log(dir=dir, name=name, tensorboard_logger=tensorboard_logger(name=name)),
+ optim=distributed_fused_adam_with_cosine_annealing(max_lr=1e-4, min_lr=2.0e-07, warmup_steps=150),
+ resume=nemo_resume("meta-llama/Llama-3.2-90B-Vision"),
+ )
+
+ if peft_scheme is None or peft_scheme.lower() == 'none':
+ raise ValueError("Full finetuning recipe for Llama-3.2-90B model will be supported soon.")
+ elif peft_scheme.lower() == 'lora':
+ recipe.peft = run.Config(
+ vlm.LoRA,
+ freeze_vision_model=False,
+ target_modules=[
+ "*.language_model.*.linear_qkv",
+ "*.language_model.*.linear_q",
+ "*.language_model.*.linear_kv",
+ "*.language_model.*.linear_proj",
+ "*.language_model.*.linear_fc1",
+ "*.language_model.*.linear_fc2",
+ ],
+ )
+ recipe.optim.config.lr = 1e-4
+ else:
+ raise ValueError(f"Unrecognized peft scheme: {peft_scheme}")
+
+ return recipe
diff --git a/nemo/core/classes/__init__.py b/nemo/core/classes/__init__.py
index 3a6db2602648..e773972c6d7b 100644
--- a/nemo/core/classes/__init__.py
+++ b/nemo/core/classes/__init__.py
@@ -14,8 +14,8 @@
import hydra
+import lightning.pytorch
import omegaconf
-import pytorch_lightning
from nemo.core.classes.common import (
FileIO,
diff --git a/nemo/core/classes/exportable.py b/nemo/core/classes/exportable.py
index aab09d42d907..ba284e7c28cd 100644
--- a/nemo/core/classes/exportable.py
+++ b/nemo/core/classes/exportable.py
@@ -15,7 +15,7 @@
from typing import Dict, List, Optional, Union
import torch
-from pytorch_lightning.core.module import _jit_is_scripting
+from lightning.pytorch.core.module import _jit_is_scripting
from nemo.core.classes import typecheck
from nemo.core.neural_types import NeuralType
diff --git a/nemo/core/classes/modelPT.py b/nemo/core/classes/modelPT.py
index 5b8d414ac85b..88ff47caf8c2 100644
--- a/nemo/core/classes/modelPT.py
+++ b/nemo/core/classes/modelPT.py
@@ -35,9 +35,9 @@
HAVE_MEGATRON_CORE = False
+from lightning.pytorch import LightningModule, Trainer
+from lightning.pytorch.utilities import model_summary, rank_zero_only
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning import LightningModule, Trainer
-from pytorch_lightning.utilities import model_summary, rank_zero_only
from nemo import package_info
from nemo.core import optim
@@ -79,7 +79,7 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
"""
if trainer is not None and not isinstance(trainer, Trainer):
raise ValueError(
- f"trainer constructor argument must be either None or pytorch_lightning.Trainer. But got {type(trainer)} instead."
+ f"trainer constructor argument must be either None or lightning.pytorch.Trainer. But got {type(trainer)} instead."
)
super().__init__()
@@ -211,6 +211,12 @@ def __init__(self, cfg: DictConfig, trainer: Trainer = None):
self._memory_profile_started = False
self._memory_profile_complete = False
+ # Setup chakra profiling if it has been enabled in the model config
+ self._setup_chakra_profiling()
+
+ # A flag for the profile generation
+ self._chakra_profile_in_progress = False
+
def __init_subclass__(cls) -> None:
cls._save_restore_connector = SaveRestoreConnector()
@@ -1027,6 +1033,7 @@ def on_validation_epoch_end(self) -> Optional[Dict[str, Dict[str, torch.Tensor]]
if 'log' in output_dict:
self.log_dict(output_dict.pop('log'), on_epoch=True)
+
# return everything else
return output_dict
@@ -1646,6 +1653,14 @@ def hparams(self):
self._cfg (e.g., in self.setup_optimization()) that was not done via `self.cfg = new_cfg`.
"""
self._set_hparams(OmegaConf.create({'cfg': self._cfg}))
+
+ if (
+ hasattr(self, '_hparams_initial')
+ and 'cfg' in self._hparams_initial
+ and isinstance(self._hparams_initial['cfg'], DictConfig)
+ ):
+ self._hparams_initial['cfg'] = OmegaConf.to_object(self._hparams_initial['cfg'])
+
return super().hparams
@property
@@ -1735,6 +1750,78 @@ def update_save_restore_connector(cls, save_restore_connector):
else:
setattr(cls, '_save_restore_connector', save_restore_connector)
+ def _setup_chakra_profiling(self):
+ """Enables chakra profiling
+ To use, add the following options to the model config:
+ ## Chakra profiling options
+ chakra_profile:
+ enabled: False
+ start_step: 2 # Global batch to start profiling
+ end_step: 2 # Global batch to end profiling
+ warmup_steps: 0 # Global batch to start profiling
+ active_steps: 1 # Global batch to start profiling
+ trace_dir: None # Path to store the profile output file
+ """
+ if self.cfg.get('chakra_profile', None) is not None:
+ if self.cfg.chakra_profile.get('enabled', False):
+
+ from torch.profiler import ExecutionTraceObserver
+ from nemo.utils.env_var_parsing import get_envint
+
+ self._chakra_profile_enabled = True
+ self._chakra_profile_start_step = self.cfg.chakra_profile.get('start_step', 0)
+ self._chakra_profile_end_step = self.cfg.chakra_profile.get('end_step', 0)
+ trace_dir = self.cfg.chakra_profile.get('trace_dir', None)
+
+ if trace_dir is None or not os.path.isdir(trace_dir):
+ raise ValueError(f'chakra profile output path ({trace_dir}) is not set or does not exist.')
+
+ trace_dir = Path(trace_dir)
+ warmup_steps = self.cfg.chakra_profile.get('warmup_steps', 0)
+ active_steps = self.cfg.chakra_profile.get('active_steps', 1)
+
+ job_id = get_envint("SLURM_JOB_ID", 0)
+
+ self._chakra_trace_dir = trace_dir / f'{job_id}_chakra'
+ self._kineto_trace_dir = trace_dir / f'{job_id}_kineto'
+
+ self._chakra_trace_dir.mkdir(parents=True, exist_ok=True)
+ self._kineto_trace_dir.mkdir(parents=True, exist_ok=True)
+
+ if isinstance(self._chakra_profile_start_step, int):
+ logging.info(f'chakra profiling setup with start_step: {self._chakra_profile_start_step}')
+ else:
+ raise ValueError(
+ f'chakra start_step must be of type int. Found: {type(self._chakra_profile_start_step)}'
+ )
+
+ if isinstance(self._chakra_profile_end_step, int):
+ logging.info(f'chakra profiling setup with end_step: {self._chakra_profile_end_step}')
+ else:
+ raise ValueError(
+ f'chakra end_step must be of type int. Found: {type(self._chakra_profile_end_step)}'
+ )
+
+ if self._chakra_profile_end_step >= self._chakra_profile_start_step:
+ pass
+ else:
+ raise ValueError(f'chakra end_step must be greater than or equal to chakra start_step')
+
+ if self.cfg.nsys_profile.get('enabled', False):
+ raise Exception(
+ f"Profiler conflict: Chakra profiling and Nsys profiling cannot be enabled at the same time."
+ )
+
+ self._et = ExecutionTraceObserver()
+ self._prof = torch.profiler.profile(
+ activities=[
+ torch.profiler.ProfilerActivity.CPU,
+ torch.profiler.ProfilerActivity.CUDA,
+ ],
+ schedule=torch.profiler.schedule(wait=0, warmup=warmup_steps, active=active_steps),
+ execution_trace_observer=self._et,
+ )
+
def _setup_profiling(self):
"""Enables nsys profiling
To use, add the following optoins to the model config:
@@ -1839,11 +1926,22 @@ def on_train_start(self):
def on_train_batch_start(self, batch: Any, batch_idx: int, unused: int = 0) -> Optional[int]:
"""PyTorch Lightning hook:
https://pytorch-lightning.readthedocs.io/en/stable/common/lightning_module.html#on-train-batch-start
- We use it here to enable nsys profiling and dynamic freezing.
+ We use it here to enable profiling and dynamic freezing.
"""
-
- # nsys profiling
if self.device.type == 'cuda':
+ if hasattr(self, '_chakra_profile_enabled'):
+ if self._chakra_profile_enabled and not self._chakra_profile_in_progress:
+ if (
+ self.trainer.global_step >= self._chakra_profile_start_step
+ and self.trainer.global_step <= self._chakra_profile_end_step
+ ):
+ logging.info(
+ f"====== Start chakra profiling from global_step {self.trainer.global_step} ======"
+ )
+ self._et.register_callback(str(self._chakra_trace_dir / f'rank-{get_rank()}.json'))
+ self._prof.start()
+ self._chakra_profile_in_progress = True
+
if hasattr(self, '_nsys_profile_enabled'):
if self._nsys_profile_enabled and not self._nsys_profile_started:
if batch_idx >= self._nsys_profile_start_step and get_rank() in self._nsys_profile_ranks:
@@ -1889,6 +1987,18 @@ def on_train_batch_end(self, outputs, batch: Any, batch_idx: int, unused: int =
"""
if self.device.type == 'cuda':
+ if hasattr(self, '_chakra_profile_enabled'):
+ # self.trainer.global_step is increaeasd before on_train_batch_end
+ if self._chakra_profile_enabled and self._chakra_profile_in_progress:
+ if self.trainer.global_step - 1 >= self._chakra_profile_end_step:
+ logging.info(f"====== End chakra profiling at global_step {self.trainer.global_step} ======")
+ self._prof.stop()
+ self._prof.export_chrome_trace(str(self._kineto_trace_dir / f'rank-{get_rank()}.json'))
+ self._et.unregister_callback()
+ self._chakra_profile_in_progress = False
+ elif self.trainer.global_step - 1 >= self._chakra_profile_start_step:
+ self._prof.step()
+
if hasattr(self, '_nsys_profile_enabled'):
if self._nsys_profile_enabled and not self._nsys_profile_complete:
if batch_idx >= self._nsys_profile_end_step and get_rank() in self._nsys_profile_ranks:
diff --git a/nemo/core/connectors/save_restore_connector.py b/nemo/core/connectors/save_restore_connector.py
index cd9971a9c383..2c4c826d1daf 100644
--- a/nemo/core/connectors/save_restore_connector.py
+++ b/nemo/core/connectors/save_restore_connector.py
@@ -23,9 +23,9 @@
from typing import Callable, Generator, Optional, Set, Union
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import DictConfig, OmegaConf
from omegaconf.omegaconf import open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.core import classes as nemo_classes # to avoid circular import do not import ModelPT directly
from nemo.utils import logging, model_utils
diff --git a/nemo/core/utils/k2_guard.py b/nemo/core/utils/k2_guard.py
index a9f64ce39c6b..b0e86d319ec0 100644
--- a/nemo/core/utils/k2_guard.py
+++ b/nemo/core/utils/k2_guard.py
@@ -21,8 +21,9 @@
import textwrap
+from lightning.pytorch.utilities.imports import package_available
from packaging.version import Version
-from pytorch_lightning.utilities.imports import package_available
+
from nemo.core.utils.k2_utils import K2_INSTALLATION_MESSAGE
__K2_MINIMUM_MAJOR_VERSION = 1
diff --git a/nemo/deploy/deploy_base.py b/nemo/deploy/deploy_base.py
index 63746199bac6..41e0e7ddbdc9 100644
--- a/nemo/deploy/deploy_base.py
+++ b/nemo/deploy/deploy_base.py
@@ -18,7 +18,7 @@
use_pytorch_lightning = True
try:
- from pytorch_lightning import Trainer
+ from lightning.pytorch import Trainer
except Exception:
use_pytorch_lightning = False
diff --git a/nemo/deploy/nlp/megatronllm_deployable.py b/nemo/deploy/nlp/megatronllm_deployable.py
index 64cf6114ceba..0ce5991cdc95 100644
--- a/nemo/deploy/nlp/megatronllm_deployable.py
+++ b/nemo/deploy/nlp/megatronllm_deployable.py
@@ -20,7 +20,7 @@
import numpy as np
import torch
import wrapt
-from pytorch_lightning.trainer.trainer import Trainer
+from lightning.pytorch.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.collections.nlp.modules.common.text_generation_utils import (
diff --git a/nemo/deploy/nlp/query_llm.py b/nemo/deploy/nlp/query_llm.py
index 7e873db6b5b1..e1d21bb54b76 100644
--- a/nemo/deploy/nlp/query_llm.py
+++ b/nemo/deploy/nlp/query_llm.py
@@ -174,6 +174,7 @@ def query_llm(
end_strings=None,
init_timeout=60.0,
openai_format_response: bool = False,
+ output_generation_logits: bool = False,
):
"""
Query the Triton server synchronously and return a list of responses.
@@ -190,6 +191,8 @@ def query_llm(
no_repeat_ngram_size (int): no repeat ngram size.
task_id (str): downstream task id if virtual tokens are used.
init_timeout (flat): timeout for the connection.
+ openai_format_response: return response similar to OpenAI API format
+ output_generation_logits: return generation logits from model on PyTriton
"""
prompts = str_list2numpy(prompts)
@@ -248,6 +251,9 @@ def query_llm(
if end_strings is not None:
inputs["end_strings"] = str_list2numpy(end_strings)
+ if output_generation_logits is not None:
+ inputs["output_generation_logits"] = np.full(prompts.shape, output_generation_logits, dtype=np.bool_)
+
with ModelClient(self.url, self.model_name, init_timeout_s=init_timeout) as client:
result_dict = client.infer_batch(**inputs)
output_type = client.model_config.outputs[0].dtype
@@ -269,6 +275,9 @@ def query_llm(
"model": self.model_name,
"choices": [{"text": str(sentences)}],
}
+ # Convert gneration logits to a list to make it json serializable and add it to openai_response dict
+ if output_generation_logits:
+ openai_response["choices"][0]["generation_logits"] = result_dict["generation_logits"].tolist()
return openai_response
else:
return sentences
diff --git a/nemo/deploy/service/rest_model_api.py b/nemo/deploy/service/rest_model_api.py
index fbc774883faa..64afea167295 100644
--- a/nemo/deploy/service/rest_model_api.py
+++ b/nemo/deploy/service/rest_model_api.py
@@ -8,8 +8,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
-import json
import os
from pathlib import Path
import requests
@@ -19,6 +17,7 @@
from pydantic_settings import BaseSettings
from nemo.deploy.nlp import NemoQueryLLM
+from nemo.utils import logging
class TritonSettings(BaseSettings):
@@ -29,14 +28,13 @@ class TritonSettings(BaseSettings):
def __init__(self):
super(TritonSettings, self).__init__()
try:
- with open(os.path.join(Path.cwd(), 'nemo/deploy/service/config.json')) as config:
- config_json = json.load(config)
- self._triton_service_port = config_json["triton_service_port"]
- self._triton_service_ip = config_json["triton_service_ip"]
- self._triton_request_timeout = config_json["triton_request_timeout"]
- self._openai_format_response = config_json["openai_format_response"]
+ self._triton_service_port = int(os.environ.get('TRITON_PORT', 8080))
+ self._triton_service_ip = os.environ.get('TRITON_HTTP_ADDRESS', '0.0.0.0')
+ self._triton_request_timeout = int(os.environ.get('TRITON_REQUEST_TIMEOUT', 60))
+ self._openai_format_response = os.environ.get('OPENAI_FORMAT_RESPONSE', 'False').lower() == 'true'
+ self._output_generation_logits = os.environ.get('OUTPUT_GENERATION_LOGITS', 'False').lower() == 'true'
except Exception as error:
- print("An exception occurred:", error)
+ logging.error("An exception occurred trying to retrieve set args in TritonSettings class. Error:", error)
return
@property
@@ -54,11 +52,17 @@ def triton_request_timeout(self):
@property
def openai_format_response(self):
"""
- Retuns the response from Triton server in OpenAI compatible formar if set to True,
- default set in config.json is false.
+ Retuns the response from Triton server in OpenAI compatible format if set to True.
"""
return self._openai_format_response
+ @property
+ def output_generation_logits(self):
+ """
+ Retuns the generation logits along with text in Triton server output if set to True.
+ """
+ return self._output_generation_logits
+
app = FastAPI()
triton_settings = TritonSettings()
@@ -70,19 +74,27 @@ class CompletionRequest(BaseModel):
max_tokens: int = 512
temperature: float = 1.0
top_p: float = 0.0
- n: int = 1
+ top_k: int = 1
stream: bool = False
stop: str | None = None
frequency_penalty: float = 1.0
-@app.get("/triton_health")
+@app.get("/v1/health")
+def health_check():
+ return {"status": "ok"}
+
+
+@app.get("/v1/triton_health")
async def check_triton_health():
"""
This method exposes endpoint "/triton_health" which can be used to verify if Triton server is accessible while running the REST or FastAPI application.
- Verify by running: curl http://service_http_address:service_port/triton_health and the returned status should inform if the server is accessible.
+ Verify by running: curl http://service_http_address:service_port/v1/triton_health and the returned status should inform if the server is accessible.
"""
- triton_url = f"triton_settings.triton_service_ip:str(triton_settings.triton_service_port)/v2/health/ready"
+ triton_url = (
+ f"http://{triton_settings.triton_service_ip}:{str(triton_settings.triton_service_port)}/v2/health/ready"
+ )
+ logging.info(f"Attempting to connect to Triton server at: {triton_url}")
try:
response = requests.get(triton_url, timeout=5)
if response.status_code == 200:
@@ -101,11 +113,13 @@ def completions_v1(request: CompletionRequest):
output = nq.query_llm(
prompts=[request.prompt],
max_output_len=request.max_tokens,
- top_k=request.n,
+ # when these below params are passed as None
+ top_k=request.top_k,
top_p=request.top_p,
temperature=request.temperature,
init_timeout=triton_settings.triton_request_timeout,
openai_format_response=triton_settings.openai_format_response,
+ output_generation_logits=triton_settings.output_generation_logits,
)
if triton_settings.openai_format_response:
return output
@@ -114,5 +128,5 @@ def completions_v1(request: CompletionRequest):
"output": output[0][0],
}
except Exception as error:
- print("An exception occurred:", error)
+ logging.error("An exception occurred with the post request to /v1/completions/ endpoint:", error)
return {"error": "An exception occurred"}
diff --git a/nemo/export/tensorrt_llm.py b/nemo/export/tensorrt_llm.py
index fb43224d59a9..8f2b0db20341 100644
--- a/nemo/export/tensorrt_llm.py
+++ b/nemo/export/tensorrt_llm.py
@@ -30,19 +30,20 @@
import wrapt
from tensorrt_llm._utils import numpy_to_torch
+from nemo.collections.nlp.parts.utils_funcs import torch_dtype_from_precision
from nemo.deploy import ITritonDeployable
from nemo.export.tarutils import TarPath, unpack_tarball
from nemo.export.trt_llm.converter.model_converter import model_to_trtllm_ckpt
-from nemo.export.trt_llm.converter.model_to_trt_llm_ckpt import dist_model_to_trt_llm_ckpt
+from nemo.export.trt_llm.converter.model_to_trt_llm_ckpt import dist_model_to_trt_llm_ckpt, get_layer_prefix
from nemo.export.trt_llm.converter.utils import init_model_parallel_from_nemo
from nemo.export.trt_llm.nemo_ckpt_loader.nemo_file import (
build_tokenizer,
- get_tokenzier,
+ get_tokenizer,
is_nemo_file,
load_nemo_model,
)
from nemo.export.trt_llm.qnemo import qnemo_to_tensorrt_llm
-from nemo.export.trt_llm.qnemo.tokenizer_utils import get_nmt_tokenizer
+from nemo.export.trt_llm.qnemo.tokenizer_utils import TOKENIZER_CONFIG_FILE, get_nmt_tokenizer
from nemo.export.trt_llm.qnemo.utils import is_qnemo_checkpoint
from nemo.export.trt_llm.tensorrt_llm_build import build_and_save_engine
from nemo.export.trt_llm.tensorrt_llm_run import (
@@ -65,6 +66,8 @@
@wrapt.decorator
def noop_decorator(func):
+ """No op decorator"""
+
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
@@ -80,6 +83,7 @@ def wrapper(*args, **kwargs):
use_pytriton = False
+# pylint: disable=line-too-long
class TensorRTLLM(ITritonDeployable):
"""
Exports nemo checkpoints to TensorRT-LLM and run fast inference.
@@ -180,6 +184,8 @@ def export(
reduce_fusion: bool = True,
fp8_quantized: Optional[bool] = None,
fp8_kvcache: Optional[bool] = None,
+ gather_context_logits: Optional[bool] = False,
+ gather_generation_logits: Optional[bool] = False,
):
"""
Exports nemo checkpoints to TensorRT-LLM.
@@ -218,6 +224,8 @@ def export(
reduce_fusion (bool): enables fusing extra kernels after custom TRT-LLM allReduce
fp8_quantized (Optional[bool]): enables exporting to FP8 TRT-LLM checkpoints. If not set, autodetects the type.
fp8_kvcache (Optional[bool]): enables FP8 KV-cache quantization. If not set, autodetects the type.
+ gather_context_logits (Optional[bool]): if True, enables gather_context_logits while building trtllm engine. Default: False
+ gather_generation_logits (Optional[bool]): if True, enables gather_generation_logits while building trtllm engine. Default: False
"""
if n_gpus is not None:
warnings.warn(
@@ -294,7 +302,14 @@ def export(
else:
unpack_tarball(nemo_checkpoint_path, tmp_dir.name)
nemo_checkpoint_path = tmp_dir.name
- self.tokenizer = get_nmt_tokenizer(nemo_checkpoint_path)
+
+ if os.path.exists(os.path.join(nemo_checkpoint_path, TOKENIZER_CONFIG_FILE)):
+ # Instantiate tokenizer for a legacy "Nemo 1" quantized checkpoint from a tokenizer config.
+ # Note that using the config is deprecated and it will be removed in future releases.
+ LOGGER.warning("Detected legacy tokenizer_config.yaml, using it to build tokenizer.")
+ self.tokenizer = get_nmt_tokenizer(nemo_checkpoint_path)
+ else:
+ self.tokenizer = get_tokenizer(nemo_checkpoint_path)
qnemo_to_tensorrt_llm(
nemo_checkpoint_path=nemo_checkpoint_path,
@@ -336,43 +351,14 @@ def export(
DEFAULT_CONVERSION_DICT,
)
from megatron.core.export.trtllm.trtllm_helper import TRTLLMHelper
- from megatron.core.transformer.transformer_config import TransformerConfig
from tensorrt_llm.layers import MoeConfig
- def get_transformer_config(nemo_model_config):
- normalization = nemo_model_config.get('normalization', 'layernorm')
- transformer_config_normalization = 'LayerNorm'
- layernorm_zero_centered_gamma = False
- if normalization == 'layernorm1p':
- layernorm_zero_centered_gamma = True
- elif normalization == 'rmsnorm':
- transformer_config_normalization = 'RMSNorm'
-
- conf = TransformerConfig(
- num_layers=nemo_model_config.get('num_layers'),
- moe_router_topk=nemo_model_config.get('moe_router_topk', 0),
- num_attention_heads=nemo_model_config.get('num_attention_heads'),
- num_query_groups=nemo_model_config.get(
- 'num_query_groups', nemo_model_config['num_attention_heads']
- ),
- kv_channels=nemo_model_config.get("kv_channels", None),
- hidden_size=nemo_model_config.get('hidden_size'),
- ffn_hidden_size=nemo_model_config.get('ffn_hidden_size'),
- layernorm_epsilon=nemo_model_config.get('layernorm_epsilon'),
- add_bias_linear=nemo_model_config.get('bias'),
- num_moe_experts=nemo_model_config.get('num_moe_experts', None),
- normalization=transformer_config_normalization,
- layernorm_zero_centered_gamma=layernorm_zero_centered_gamma,
- )
-
- return conf
-
# We build the transformer config using the nemo model config.
- transformer_config = get_transformer_config(model_configs)
+ transformer_config = self.get_transformer_config(model_configs)
input_model_type = getattr(ModelType, model_type)
# MCore export supports some default conversion dictionaries
- mcore_model_conversion_dict = DEFAULT_CONVERSION_DICT[input_model_type]
+ mcore_model_conversion_dict = DEFAULT_CONVERSION_DICT
# All Mcore conversion dicts start with "decoder.layers.4.blah.blah" , while nemo models start with "model.decoder.layers.4.blahblah". so we append model. to the keys
nemo_model_conversion_dict = {
f'model.{key}': value for key, value in mcore_model_conversion_dict.items()
@@ -488,6 +474,8 @@ def get_transformer_config(nemo_model_config):
multiple_profiles=multiple_profiles,
gpt_attention_plugin=gpt_attention_plugin,
gemm_plugin=gemm_plugin,
+ gather_context_logits=gather_context_logits,
+ gather_generation_logits=gather_generation_logits,
)
tokenizer_path = os.path.join(nemo_export_dir, "tokenizer.model")
@@ -511,6 +499,34 @@ def get_transformer_config(nemo_model_config):
if load_model:
self._load()
+ def get_transformer_config(self, nemo_model_config):
+ """Given nemo model config get transformer config"""
+ from megatron.core.transformer.transformer_config import TransformerConfig
+
+ normalization = nemo_model_config.get('normalization', 'layernorm')
+ transformer_config_normalization = 'LayerNorm'
+ layernorm_zero_centered_gamma = False
+ if normalization == 'layernorm1p':
+ layernorm_zero_centered_gamma = True
+ elif normalization == 'rmsnorm':
+ transformer_config_normalization = 'RMSNorm'
+
+ conf = TransformerConfig(
+ num_layers=nemo_model_config.get('num_layers'),
+ moe_router_topk=nemo_model_config.get('moe_router_topk', 0),
+ num_attention_heads=nemo_model_config.get('num_attention_heads'),
+ num_query_groups=nemo_model_config.get('num_query_groups', nemo_model_config['num_attention_heads']),
+ kv_channels=nemo_model_config.get("kv_channels", None),
+ hidden_size=nemo_model_config.get('hidden_size'),
+ ffn_hidden_size=nemo_model_config.get('ffn_hidden_size'),
+ layernorm_epsilon=nemo_model_config.get('layernorm_epsilon'),
+ add_bias_linear=nemo_model_config.get('bias'),
+ num_moe_experts=nemo_model_config.get('num_moe_experts', None),
+ normalization=transformer_config_normalization,
+ layernorm_zero_centered_gamma=layernorm_zero_centered_gamma,
+ )
+ return conf
+
def convert_to_safe_tensors(
self,
nemo_checkpoint_path: str,
@@ -523,6 +539,7 @@ def convert_to_safe_tensors(
use_embedding_sharing: bool = False,
dtype: str = "bfloat16",
):
+ """Convert to safe tensor"""
gpus_per_node = tensor_parallelism_size if gpus_per_node is None else gpus_per_node
if Path(self.model_dir).exists():
@@ -588,6 +605,167 @@ def convert_to_safe_tensors(
if tensorrt_llm.mpi_world_size() > 1:
tensorrt_llm.mpi_barrier()
+ def gather_and_reshard_model(self, model_config, model, storage_dtype):
+ """
+ Accumulate all vp model chunks together, and reshard model (i.e) gather all pp ranks
+ if required and return the final model state dict
+ """
+
+ def _get_layer_index(split_key):
+ for index, key in enumerate(split_key):
+ if key == "layers":
+ return index + 1
+ raise ValueError(f"Unknown layer name format: {split_key}")
+
+ def rename_layer_num(param_name, layer_num):
+ split_key = param_name.split(".")
+ layer_index = int(_get_layer_index(split_key))
+ split_key[layer_index] = str(layer_num)
+ return ".".join(split_key)
+
+ def get_layer_num(param_name):
+ split_key = param_name.split(".")
+ layer_index = int(_get_layer_index(split_key))
+ return int(split_key[layer_index])
+
+ from megatron.core import parallel_state
+
+ tp_size = parallel_state.get_tensor_model_parallel_world_size()
+ pp_rank = parallel_state.get_pipeline_model_parallel_rank()
+ pp_first_rank = parallel_state.get_pipeline_model_parallel_first_rank()
+ pp_last_rank = parallel_state.get_pipeline_model_parallel_last_rank()
+ pp_size = parallel_state.get_pipeline_model_parallel_world_size()
+ pp_group = parallel_state.get_pipeline_model_parallel_group()
+ vp_size = parallel_state.get_virtual_pipeline_model_parallel_world_size()
+ if not vp_size:
+ vp_size = 1
+
+ inference_tp_size = self.tp_size
+ inference_pp_size = self.pp_size
+ reshard_model = False
+ if inference_tp_size != tp_size or inference_pp_size != pp_size:
+ LOGGER.info("Training/Generation model parallelism resharding enabled")
+ if inference_pp_size == 1 and pp_size > 1 and inference_tp_size == tp_size:
+ reshard_model = True
+ else:
+ raise NotImplementedError(
+ f"NeMo currently only supports PP>1 -> PP=1 resharding, other types of resharding will come in future releases."
+ )
+
+ num_layers = model_config["num_layers"]
+ layers_per_pp = num_layers // pp_size
+ layers_per_chunk = layers_per_pp // vp_size
+
+ tl_params = {}
+ model_level_params = {}
+ if vp_size > 1: # consolidate params across model chunks
+ for idx, model_chunk in enumerate(model):
+ for key, val in model_chunk.state_dict().items():
+ if torch.is_tensor(val):
+ if 'layers' in key:
+ key2 = rename_layer_num(key, get_layer_num(key) + idx * pp_size * layers_per_chunk)
+ tl_params[key2] = val
+ else:
+ model_level_params[key] = val
+ else:
+ for key, val in model.state_dict().items():
+ if torch.is_tensor(val):
+ if 'decoder.layers' in key:
+ tl_params[key] = val
+ else:
+ model_level_params[key] = val
+
+ if vp_size > 1 or reshard_model:
+ # gather layers across pp ranks
+ gathered_params = {}
+ for key, val in tl_params.items():
+ weight_list = [torch.zeros_like(val) for _ in range(pp_size)]
+ torch.distributed.all_gather(weight_list, val, group=pp_group)
+ for idx in range(pp_size):
+ layer_num = get_layer_num(key) + idx * layers_per_chunk
+ key2 = rename_layer_num(key, layer_num)
+ if not reshard_model: # Save only layers of 1 single PP stage
+ layers_start = layers_per_pp * pp_rank
+ layers_end = layers_per_pp * (pp_rank + 1) - 1
+ if layer_num >= layers_start and layer_num <= layers_end:
+ key2 = rename_layer_num(key, layer_num % layers_per_pp)
+ gathered_params[key2] = weight_list[idx]
+ else:
+ gathered_params[key2] = weight_list[idx]
+ tl_params = gathered_params
+
+ model_state_dict = model_level_params
+ model_state_dict.update(tl_params)
+
+ def get_tensor_if_available(key, pp_src_idx, group):
+ tensor = model_state_dict.get(key)
+ if tensor is not None:
+ tensor_shape = [tensor.shape]
+ else:
+ tensor_shape = [None]
+
+ torch.distributed.broadcast_object_list(tensor_shape, pp_src_idx, group=group)
+
+ if tensor_shape[0] is None:
+ return None
+ if torch.distributed.get_rank() != pp_src_idx:
+ tensor = torch.empty(tensor_shape[0], dtype=storage_dtype).cuda()
+
+ torch.distributed.broadcast(tensor.contiguous(), pp_src_idx, group=pp_group)
+ return tensor
+
+ if reshard_model:
+ key = 'decoder.final_layernorm.weight'
+ tensor = get_tensor_if_available(key, pp_last_rank, pp_group)
+ if tensor is not None:
+ model_state_dict[key] = tensor
+
+ key = 'decoder.final_layernorm.bias'
+ tensor = get_tensor_if_available(key, pp_last_rank, pp_group)
+ if tensor is not None:
+ model_state_dict[key] = tensor
+
+ key = 'embedding.word_embeddings.weight'
+ tensor = get_tensor_if_available(key, pp_first_rank, pp_group)
+ if tensor is not None:
+ model_state_dict[key] = tensor
+
+ key = 'output_layer.weight'
+ tensor = get_tensor_if_available(key, pp_last_rank, pp_group)
+ if tensor is not None:
+ model_state_dict[key] = tensor
+
+ return model_state_dict
+
+ def get_input_dtype(self, storage_dtype):
+ """
+ Return mcore export dtype given torch dtype
+ """
+ from megatron.core.export.data_type import DataType
+
+ if storage_dtype == torch.bfloat16:
+ return DataType.bfloat16
+ elif storage_dtype == torch.float32:
+ return DataType.float32
+ elif storage_dtype == torch.float16:
+ return DataType.float16
+
+ def get_nemo_to_trtllm_conversion_dict(self, model_state_dict):
+ """MCore export supports some default conversion dictionaries
+ All Mcore conversion dicts start with "decoder.layers.4.blah.blah" , while nemo models sometimes start with "model.decoder.layers.4.blahblah". so we append model prefix. to the keys
+ """
+ from megatron.core.export.trtllm.model_to_trllm_mapping.default_conversion_dict import DEFAULT_CONVERSION_DICT
+
+ model_prefix, _ = get_layer_prefix(layer_names=model_state_dict.keys(), is_mcore=True)
+
+ nemo_model_conversion_dict = {}
+ for key, value in DEFAULT_CONVERSION_DICT.items():
+ if 'layers' in key:
+ nemo_model_conversion_dict[f'{model_prefix}.{key}'] = value
+ else:
+ nemo_model_conversion_dict[key] = value
+ return nemo_model_conversion_dict
+
def build(
self,
model,
@@ -600,6 +778,7 @@ def build(
max_batch_size: int = 4,
use_refit: bool = True,
reshard_model: bool = False,
+ use_mcore_path: bool = True,
):
"""
Convert a model parallel nemo model to TensorRT-LLM.
@@ -614,31 +793,103 @@ def build(
if self.dp_size > 1:
self.model_dir = os.path.join(self.model_dir, f"dp_rank{self.dp_rank}")
- weights, model_config = model_to_trtllm_ckpt(
- model=model,
- nemo_model_config=model_config,
- nemo_export_dir=self.model_dir,
- decoder_type=model_type,
- tensor_parallel_size=self.tp_size,
- pipeline_parallel_size=self.pp_size,
- gpus_per_node=gpus_per_node,
- use_parallel_embedding=True,
- use_distributed_convert=True,
- model_parallel_rank=self.mp_rank,
- vocab_size=self.tokenizer.vocab_size,
- )
+ if use_mcore_path:
+ from megatron.core.export.model_type import ModelType
+ from megatron.core.export.trtllm.trtllm_helper import TRTLLMHelper
+ from tensorrt_llm.layers import MoeConfig
+
+ storage_dtype = torch_dtype_from_precision(model_config.precision)
+ model_state_dict = self.gather_and_reshard_model(model_config, model, storage_dtype)
+ # We build the transformer config using the nemo model config.
+ transformer_config = self.get_transformer_config(model_config)
+ input_model_type = getattr(ModelType, model_type)
+
+ nemo_model_conversion_dict = self.get_nemo_to_trtllm_conversion_dict(model_state_dict)
+
+ self.trtllm_helper = TRTLLMHelper(
+ transformer_config=transformer_config,
+ model_type=input_model_type,
+ trtllm_conversion_dict=nemo_model_conversion_dict,
+ position_embedding_type=model_config.get('position_embedding_type'),
+ max_position_embeddings=model_config.get('max_position_embeddings'),
+ rotary_percentage=model_config.get('rotary_percentage', 1.0),
+ rotary_base=model_config.get('rotary_base', 10000),
+ moe_tp_mode=model_config.get('moe_tp_mode', 2),
+ multi_query_mode=model_config.get("multi_query_mode", False),
+ activation=model_config.get('activation', "gelu"),
+ seq_len_interpolation_factor=model_config.get("seq_len_interpolation_factor"),
+ moe_renorm_mode=model_config.get(
+ 'moe_renorm_mode', MoeConfig.ExpertScaleNormalizationMode.RENORMALIZE
+ ),
+ share_embeddings_and_output_weights=model_config.get("share_embeddings_and_output_weights", False),
+ )
+
+ input_dtype = self.get_input_dtype(storage_dtype)
+
+ trtllm_model_weights_list, trtllm_model_config_list = (
+ self.trtllm_helper.get_trtllm_pretrained_config_and_model_weights(
+ model_state_dict=model_state_dict,
+ dtype=input_dtype,
+ state_dict_split_by_layer_numbers=True,
+ on_device_distributed_conversion=True,
+ vocab_size=self.tokenizer.vocab_size,
+ gpus_per_node=gpus_per_node,
+ )
+ )
+ trtllm_model_config = trtllm_model_config_list[0]
+ trtllm_model_weights = trtllm_model_weights_list[0]
+
+ if reshard_model:
+ assert self.pp_size == 1, 'Reshard is true, but pp size is not one'
+ # MCORE Export will use parallel_state to determine pp .
+ # Since we reshard to pp = 1, we need to modify the config and mapping
+ world_size = self.tp_size * self.pp_size
+ trtllm_model_config.pp_size = self.pp_size
+ trtllm_model_config.world_size = world_size
+ trtllm_model_config.mapping = tensorrt_llm.Mapping(
+ world_size=world_size,
+ rank=self.mp_rank,
+ tp_size=self.tp_size,
+ pp_size=self.pp_size,
+ )
+
+ engine = self.trtllm_helper.build_and_save_engine(
+ max_input_len=max_input_len,
+ max_output_len=max_output_len,
+ max_seq_len=max_input_len + max_output_len,
+ max_batch_size=max_batch_size,
+ trtllm_model_config=trtllm_model_config,
+ trtllm_model_weights=trtllm_model_weights,
+ engine_dir=self.model_dir,
+ use_refit=use_refit,
+ )
+ else:
+ weights, model_config = model_to_trtllm_ckpt(
+ model=model,
+ nemo_model_config=model_config,
+ nemo_export_dir=self.model_dir,
+ decoder_type=model_type,
+ tensor_parallel_size=self.tp_size,
+ pipeline_parallel_size=self.pp_size,
+ gpus_per_node=gpus_per_node,
+ use_parallel_embedding=True,
+ use_distributed_convert=True,
+ model_parallel_rank=self.mp_rank,
+ vocab_size=self.tokenizer.vocab_size,
+ )
+
+ engine = build_and_save_engine(
+ max_input_len=max_input_len,
+ max_output_len=max_output_len,
+ max_seq_len=max_input_len + max_output_len,
+ max_batch_size=max_batch_size,
+ model_config=model_config[0],
+ model_weights=weights[0],
+ model_dir=self.model_dir,
+ model_type=model_type,
+ use_refit=use_refit,
+ )
- engine = build_and_save_engine(
- max_input_len=max_input_len,
- max_output_len=max_output_len,
- max_seq_len=max_input_len + max_output_len,
- max_batch_size=max_batch_size,
- model_config=model_config[0],
- model_weights=weights[0],
- model_dir=self.model_dir,
- model_type=model_type,
- use_refit=use_refit,
- )
torch.distributed.barrier()
cfg_path = Path(os.path.join(self.model_dir, f'config_{torch.distributed.get_rank()}.json'))
@@ -647,18 +898,33 @@ def build(
load_distributed(self.model_dir, self.mp_rank, gpus_per_node)
- def refit(self, model, model_config):
+ def refit(self, model, model_config, use_mcore_path=True):
"""
Refits an TensorRT engine using an instantiated nemo model.
This function should only be used after calling build()
"""
- weights_dict = dist_model_to_trt_llm_ckpt(
- model=model,
- nemo_model_config=model_config,
- inference_tp_size=self.tp_size,
- inference_pp_size=self.pp_size,
- tokenizer_vocab_size=self.tokenizer.vocab_size,
- )
+ weights_dict = None
+ if use_mcore_path:
+ storage_dtype = torch_dtype_from_precision(model_config.precision)
+
+ model_state_dict = self.gather_and_reshard_model(model_config, model, storage_dtype)
+
+ nemo_model_conversion_dict = self.get_nemo_to_trtllm_conversion_dict(model_state_dict)
+ self.trtllm_helper.weights_converter.convert(
+ model_state_dict=model_state_dict,
+ tokenizer_vocab_size=self.tokenizer.vocab_size,
+ trtllm_conversion_dict=nemo_model_conversion_dict,
+ )
+ weights_dict = self.trtllm_helper.weights_converter.trtllm_model_weights
+
+ else:
+ weights_dict = dist_model_to_trt_llm_ckpt(
+ model=model,
+ nemo_model_config=model_config,
+ inference_tp_size=self.tp_size,
+ inference_pp_size=self.pp_size,
+ tokenizer_vocab_size=self.tokenizer.vocab_size,
+ )
load_distributed(self.model_dir, self.mp_rank, self.gpus_per_node)
gc.collect()
torch.cuda.empty_cache()
@@ -681,6 +947,7 @@ def forward(
prompt_embeddings_checkpoint_path: str = None,
streaming: bool = False,
output_log_probs: bool = False,
+ output_generation_logits: bool = False,
**sampling_kwargs,
):
"""
@@ -699,6 +966,7 @@ def forward(
task_ids (List(str)): list of the task ids for the prompt tables.
prompt_embeddings_table (List(float)): prompt embeddings table.
prompt_embeddings_checkpoint_path (str): path for the nemo checkpoint for the prompt embedding table.
+ output_generation_logits (bool): if True returns generation_logits in the outout of generate method.
sampling_kwargs: Additional kwargs to set in the SamplingConfig.
"""
@@ -777,6 +1045,7 @@ def forward(
no_repeat_ngram_size=no_repeat_ngram_size,
output_log_probs=output_log_probs,
multiprocessed_env=multiprocessed_env,
+ output_generation_logits=output_generation_logits,
**sampling_kwargs,
)
else:
@@ -799,6 +1068,7 @@ def forward(
)
def add_prompt_table(self, task_name: str, prompt_embeddings_checkpoint_path: str):
+ """Add prompt table"""
if self.model is None:
raise Exception(
"A nemo checkpoint should be exported to TensorRT-LLM and "
@@ -820,6 +1090,7 @@ def add_prompt_table(self, task_name: str, prompt_embeddings_checkpoint_path: st
self._prep_ptuning_table()
def remove_prompt_table(self, task_name: str):
+ """Remove prompt table"""
if self.ptuning_tables is not None:
for i in range(len(self.ptuning_tables)):
if self.ptuning_tables[i]["task_name"] == task_name:
@@ -831,11 +1102,13 @@ def remove_prompt_table(self, task_name: str):
@property
def get_supported_models_list(self):
+ """Supported model list"""
# gpt and gptnext are the same. Keeping the gptnext due to backward compatibility.
return ["gpt", "gptnext", "llama", "falcon", "starcoder", "mixtral", "gemma"]
@property
def get_hidden_size(self):
+ """Get hidden size"""
if self.config is None:
return None
else:
@@ -843,6 +1116,7 @@ def get_hidden_size(self):
@property
def get_triton_input(self):
+ """Get triton input"""
inputs = (
Tensor(name="prompts", shape=(-1,), dtype=bytes),
Tensor(name="max_output_len", shape=(-1,), dtype=np.int_, optional=True),
@@ -855,16 +1129,22 @@ def get_triton_input(self):
Tensor(name="no_repeat_ngram_size", shape=(-1,), dtype=np.single, optional=True),
Tensor(name="task_id", shape=(-1,), dtype=bytes, optional=True),
Tensor(name="lora_uids", shape=(-1,), dtype=bytes, optional=True),
+ Tensor(name="output_generation_logits", shape=(-1,), dtype=np.bool_, optional=False),
)
return inputs
@property
def get_triton_output(self):
- outputs = (Tensor(name="outputs", shape=(-1,), dtype=bytes),)
+ outputs = (
+ Tensor(name="outputs", shape=(-1,), dtype=bytes),
+ Tensor(name="generation_logits", shape=(-1,), dtype=np.single),
+ )
return outputs
@batch
def triton_infer_fn(self, **inputs: np.ndarray):
+ """Triton infer function for streaming"""
+ output_dict = {}
try:
infer_input = {"input_texts": str_ndarray2list(inputs.pop("prompts"))}
if "max_output_len" in inputs:
@@ -891,17 +1171,24 @@ def triton_infer_fn(self, **inputs: np.ndarray):
if "lora_uids" in inputs:
lora_uids = np.char.decode(inputs.pop("lora_uids").astype("bytes"), encoding="utf-8")
infer_input["lora_uids"] = lora_uids[0].tolist()
+ if "output_generation_logits" in inputs:
+ infer_input["output_generation_logits"] = inputs.pop("output_generation_logits")[0][0]
- output_texts = self.forward(**infer_input)
- output = cast_output(output_texts, np.bytes_)
+ if infer_input["output_generation_logits"]:
+ output_texts, generation_logits = self.forward(**infer_input)
+ output_dict["generation_logits"] = np.array(generation_logits.cpu().numpy())
+ else:
+ output_texts = self.forward(**infer_input)
+ output_dict["outputs"] = cast_output(output_texts, np.bytes_)
except Exception as error:
err_msg = "An error occurred: {0}".format(str(error))
- output = cast_output([err_msg], np.bytes_)
+ output_dict["outputs"] = cast_output([err_msg], np.bytes_)
- return {"outputs": output}
+ return output_dict
@batch
def triton_infer_fn_streaming(self, **inputs: np.ndarray):
+ """Triton infer function for streaming"""
try:
infer_input = {"input_texts": str_ndarray2list(inputs.pop("prompts"))}
if "max_output_len" in inputs:
@@ -1092,7 +1379,7 @@ def _load(self):
if len(folders) > 0:
try:
self._load_config_file()
- self.tokenizer = get_tokenzier(Path(os.path.join(self.model_dir)))
+ self.tokenizer = get_tokenizer(self.model_dir)
self.model = load(
tokenizer=self.tokenizer,
engine_dir=self.model_dir,
@@ -1111,4 +1398,5 @@ def _load(self):
) from error
def unload_engine(self):
+ """Unload engine"""
unload_engine()
diff --git a/nemo/export/trt_llm/converter/model_converter.py b/nemo/export/trt_llm/converter/model_converter.py
index e5e9f8154d24..9729781e6eba 100755
--- a/nemo/export/trt_llm/converter/model_converter.py
+++ b/nemo/export/trt_llm/converter/model_converter.py
@@ -254,6 +254,8 @@ def model_to_trtllm_ckpt(
layer_num = int(new_key.split(".")[2])
if layer_num in layers_range:
new_key = new_key.replace(f"layers.{layer_num}", f"layers.{layer_num-layers_range[0]}")
+ else:
+ continue
if config.get("new_decoder_architecture", False) and "post_layernorm" in new_key:
new_key = new_key.replace("post_layernorm", "mlp_layernorm")
weights_dict_local[new_key] = v
diff --git a/nemo/export/trt_llm/converter/model_to_trt_llm_ckpt.py b/nemo/export/trt_llm/converter/model_to_trt_llm_ckpt.py
index db1aec0f5a55..b0e134ab0c35 100644
--- a/nemo/export/trt_llm/converter/model_to_trt_llm_ckpt.py
+++ b/nemo/export/trt_llm/converter/model_to_trt_llm_ckpt.py
@@ -161,7 +161,7 @@ def convert_model_to_trt_llm_ckpt(
or nemo_model_config.get("layernorm_zero_centered_gamma", False),
"tp_size": training_tp_size,
"split_gated_activation": nemo_model_config.get("activation", "gelu")
- in ["swiglu", "geglu", "fast-swiglu", "fast-geglu"]
+ in ["swiglu", "geglu", "fast-swiglu", "fast-geglu", "openai-gelu"]
and (decoder_type == "gptnext" or is_mcore),
"num_attention_heads": num_attention_heads,
"num_kv_heads": num_kv_heads,
@@ -336,7 +336,7 @@ def dist_model_to_trt_llm_ckpt(
"apply_layernorm_1p": nemo_model_config.get("normalization", "") == "layernorm1p",
"tp_size": tp_size,
"split_gated_activation": nemo_model_config.get("activation", "gelu")
- in ["swiglu", "geglu", "fast-swiglu", "fast-geglu"],
+ in ["swiglu", "geglu", "fast-swiglu", "fast-geglu", "openai-gelu"],
"num_attention_heads": nemo_model_config["num_attention_heads"],
"num_kv_heads": nemo_model_config.get('num_query_groups', nemo_model_config['num_attention_heads']),
"convert_on_device": True,
diff --git a/nemo/export/trt_llm/nemo_ckpt_loader/nemo_file.py b/nemo/export/trt_llm/nemo_ckpt_loader/nemo_file.py
index 171932d84cfb..f3cb73811af1 100644
--- a/nemo/export/trt_llm/nemo_ckpt_loader/nemo_file.py
+++ b/nemo/export/trt_llm/nemo_ckpt_loader/nemo_file.py
@@ -117,7 +117,7 @@ def load_scaling_factors(state_dict: dict, basename: str, size: int) -> Optional
def filter_experts_extra_states(state_dict: dict):
- pattern = r'module\.decoder\.layers\.mlp\.experts\.experts\.linear_fc\d+\._extra_state/shard_\d+\.\d+_\d+\.\d+'
+ pattern = r'model\.decoder\.layers\.mlp\.experts\.experts\.linear_fc\d+\._extra_state/shard_\d+\.\d+_\d+\.\d+'
return {k: v for k, v in state_dict.items() if not re.fullmatch(pattern, k)}
@@ -283,16 +283,17 @@ def copy_tokenizer_files(config, out_dir):
outfile.write(infile.read())
-def get_tokenzier(tokenizer_dir_or_path: Path) -> PreTrainedTokenizer:
- """Loads the tokenizer from the decoded NEMO weights dir."""
+def get_tokenizer(tokenizer_dir_or_path: Union[str, Path]) -> PreTrainedTokenizer:
+ """Loads the tokenizer from the decoded NeMo weights dir."""
+ tokenizer_dir_or_path = Path(tokenizer_dir_or_path)
if (tokenizer_dir_or_path / "nemo_context").exists():
from nemo.lightning import io
tokenizer_spec = io.load_context((tokenizer_dir_or_path / "nemo_context"), subpath="model.tokenizer")
return build_tokenizer(tokenizer_spec)
else:
- if os.path.isdir(os.path.join(tokenizer_dir_or_path, "huggingface_tokenizer")):
- return AutoTokenizer.from_pretrained(os.path.join(tokenizer_dir_or_path, "huggingface_tokenizer"))
+ if (tokenizer_dir_or_path / "huggingface_tokenizer").is_dir():
+ return AutoTokenizer.from_pretrained(tokenizer_dir_or_path / "huggingface_tokenizer")
model_path = (
tokenizer_dir_or_path / "tokenizer.model" if tokenizer_dir_or_path.is_dir() else tokenizer_dir_or_path
@@ -316,24 +317,31 @@ def build_tokenizer(tokenizer):
if tokenizer.eos_token_id is None:
tokenizer.add_special_tokens({"eos_token": ""})
else:
- try:
- # If NeMo tokenizer, monkey patch interface
- from nemo.collections.common.tokenizers.tokenizer_spec import TokenizerSpec
-
- if isinstance(tokenizer, TokenizerSpec):
-
- def batch_encode_patch(self, ids):
+ # For NeMo tokenizers, monkey patch encode & batch_decode methods for unified interface
+ import nemo.collections.common.tokenizers as nemo_tokenizers
+
+ if isinstance(tokenizer, nemo_tokenizers.TokenizerSpec):
+ if isinstance(tokenizer, nemo_tokenizers.AutoTokenizer):
+ # Unwrap the original methods of HF tokenizer
+ batch_decode = tokenizer.tokenizer.batch_decode
+ encode = tokenizer.tokenizer.encode
+ elif isinstance(tokenizer, nemo_tokenizers.SentencePieceTokenizer):
+ # Define HF equivalents based on available SP methods
+ def batch_decode(self, ids):
if torch.is_tensor(ids):
ids = ids.cpu().numpy()
- ids = ids[0] if len(ids.shape) > 1 else ids
- return self.ids_to_text(ids)
+ if isinstance(ids, np.ndarray):
+ ids = ids.tolist()
+ return self.tokenizer.decode(ids)
+
+ encode = tokenizer.tokenizer.encode_as_ids
+ else:
+ raise NotImplementedError(f"Patching tokenizer methods for {type(tokenizer)} is not available")
- tokenizer.bos_token_id = tokenizer.bos_id
- tokenizer.eos_token_id = tokenizer.eos_id
- tokenizer.encode = tokenizer.text_to_ids
- TokenizerSpec.batch_decode = batch_encode_patch
- except:
- raise TypeError(f'Unsupported tokenizer build input: {type(tokenizer)}')
+ tokenizer.bos_token_id = tokenizer.bos_id
+ tokenizer.eos_token_id = tokenizer.eos_id
+ nemo_tokenizers.TokenizerSpec.encode = encode
+ nemo_tokenizers.TokenizerSpec.batch_decode = batch_decode
return tokenizer
@@ -394,7 +402,10 @@ def load_nemo_model(nemo_ckpt: Union[str, Path], nemo_export_dir: Union[str, Pat
if isinstance(v, (float, int, str, bool)):
nemo_model_config[k] = v
elif k == "activation_func":
- nemo_model_config["activation"] = v.__name__
+ if isinstance(v, torch.jit.ScriptFunction):
+ nemo_model_config["activation"] = v.name
+ else:
+ nemo_model_config["activation"] = v.__name__
if nemo_model_config.get("num_moe_experts") is None:
nemo_model_config["num_moe_experts"] = 0
@@ -402,10 +413,13 @@ def load_nemo_model(nemo_ckpt: Union[str, Path], nemo_export_dir: Union[str, Pat
if nemo_model_config["activation"] == "silu":
nemo_model_config["activation"] = "fast-swiglu"
elif nemo_model_config["activation"] == "openai_gelu":
- nemo_model_config["activation"] = "geglu"
+ nemo_model_config["activation"] = "openai-gelu"
+ elif nemo_model_config["activation"] == "squared_relu":
+ nemo_model_config["activation"] = "squared-relu"
nemo_model_config["mcore_gpt"] = True
nemo_model_config["max_position_embeddings"] = nemo_model_config.get("seq_length", 4096)
+ nemo_model_config["rotary_percentage"] = nemo_model_config.get("rotary_percent", 1.0)
shutil.copytree(io_folder, nemo_export_dir / "nemo_context")
else:
diff --git a/nemo/export/trt_llm/qnemo/qnemo_to_tensorrt_llm.py b/nemo/export/trt_llm/qnemo/qnemo_to_tensorrt_llm.py
index 7a1f7a6cc31d..f601c8cb1c5a 100644
--- a/nemo/export/trt_llm/qnemo/qnemo_to_tensorrt_llm.py
+++ b/nemo/export/trt_llm/qnemo/qnemo_to_tensorrt_llm.py
@@ -18,7 +18,6 @@
import warnings
from typing import List, Optional
-import tensorrt_llm
from tensorrt_llm.models import PretrainedConfig
from nemo.export.trt_llm.qnemo.utils import CONFIG_NAME, WEIGHTS_NAME
@@ -51,7 +50,7 @@ def qnemo_to_tensorrt_llm(
warnings.warn(
"Note that setting tensor_parallel_size, pipeline_parallel_size and use_parallel_embedding "
- " parameters for quantized models is done on calibration step with nemo.export.quantize module."
+ " parameters for quantized models is done on the calibration step (in PTQ workflow)."
" These parameters are ignored when building and running TensorRT-LLM engine below.",
UserWarning,
stacklevel=3,
@@ -77,8 +76,6 @@ def qnemo_to_tensorrt_llm(
use_qdq = quant_algo in ["FP8", "W8A8_SQ_PER_CHANNEL"]
- builder_opt = 4 if "RecurrentGemma" not in config.architecture else 0
-
speculative_decoding_mode = "medusa" if "Medusa" in config.architecture else None
build_cmd = "trtllm-build "
@@ -90,17 +87,12 @@ def qnemo_to_tensorrt_llm(
build_cmd += f"--max_input_len {max_input_len} "
build_cmd += f"--max_beam_width {max_beam_width} "
build_cmd += f"--max_prompt_embedding_table_size {max_prompt_embedding_table_size} "
- build_cmd += f"--builder_opt {builder_opt} "
build_cmd += f"--paged_kv_cache {'enable' if paged_kv_cache else 'disable'} "
build_cmd += f"--use_paged_context_fmha {'enable' if paged_context_fmha else 'disable'} "
build_cmd += f"--remove_input_padding {'enable' if remove_input_padding else 'disable'} "
build_cmd += f"--multiple_profiles {'enable' if multiple_profiles else 'disable'} "
build_cmd += f"--reduce_fusion {'enable' if reduce_fusion else 'disable'} "
- # TODO: resolve version check for setting use_fused_mlp once we move to 0.13.0 in the NeMo container
- if tensorrt_llm.__version__ >= "0.13.0":
- build_cmd += f"--use_fused_mlp {'enable' if use_fused_mlp else 'disable'} "
- else:
- build_cmd += "--use_fused_mlp " if use_fused_mlp else ""
+ build_cmd += f"--use_fused_mlp {'enable' if use_fused_mlp else 'disable'} "
if not use_qdq:
build_cmd += f"--gemm_plugin auto "
diff --git a/nemo/export/trt_llm/qnemo/tokenizer_utils.py b/nemo/export/trt_llm/qnemo/tokenizer_utils.py
index 36efa9259f9d..5a6b6280d7c1 100644
--- a/nemo/export/trt_llm/qnemo/tokenizer_utils.py
+++ b/nemo/export/trt_llm/qnemo/tokenizer_utils.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import logging
import os
from omegaconf import OmegaConf
@@ -24,27 +25,23 @@
TOKENIZER_CONFIG_FILE = "tokenizer_config.yaml"
TOKENIZER_DIR = "tokenizer"
+LOGGER = logging.getLogger("NeMo")
def get_nmt_tokenizer(nemo_checkpoint_path: str):
"""Build tokenizer from Nemo tokenizer config."""
- tokenizer_dir = os.path.join(nemo_checkpoint_path, TOKENIZER_DIR)
- if os.path.exists(tokenizer_dir):
- print(f"Initializing tokenizer from {TOKENIZER_DIR} directory")
- return AutoTokenizer.from_pretrained(tokenizer_dir)
-
- print(f"Initializing tokenizer from {TOKENIZER_CONFIG_FILE}")
+ LOGGER.info(f"Initializing tokenizer from {TOKENIZER_CONFIG_FILE}")
tokenizer_cfg = OmegaConf.load(os.path.join(nemo_checkpoint_path, TOKENIZER_CONFIG_FILE))
library = tokenizer_cfg.library
legacy = tokenizer_cfg.get("sentencepiece_legacy", library == "sentencepiece")
if library == "huggingface":
- print(f"Getting HuggingFace AutoTokenizer with pretrained_model_name: {tokenizer_cfg.type}")
+ LOGGER.info(f"Getting HuggingFace AutoTokenizer with pretrained_model_name: {tokenizer_cfg.type}")
tokenizer = AutoTokenizer.from_pretrained(tokenizer_cfg["type"], use_fast=tokenizer_cfg.get("use_fast", False))
elif library == "sentencepiece":
- print(f"Getting SentencePieceTokenizer with model: {tokenizer_cfg.model}")
+ LOGGER.info(f"Getting SentencePieceTokenizer with model: {tokenizer_cfg.model}")
tokenizer = SentencePieceTokenizer(
model_path=os.path.join(nemo_checkpoint_path, tokenizer_cfg.model), legacy=legacy
)
diff --git a/nemo/export/trt_llm/tensorrt_llm_build.py b/nemo/export/trt_llm/tensorrt_llm_build.py
index cdf8eaac6b1c..b2b761483700 100755
--- a/nemo/export/trt_llm/tensorrt_llm_build.py
+++ b/nemo/export/trt_llm/tensorrt_llm_build.py
@@ -54,6 +54,8 @@ def build_and_save_engine(
gpt_attention_plugin: str = "auto",
gemm_plugin: str = "auto",
reduce_fusion: bool = False,
+ gather_context_logits: bool = False,
+ gather_generation_logits: bool = False,
):
architecture = "LLaMAForCausalLM" if model_config.architecture == "LlamaForCausalLM" else model_config.architecture
try:
@@ -96,8 +98,8 @@ def build_and_save_engine(
'max_num_tokens': max_num_tokens,
'opt_num_tokens': opt_num_tokens,
'max_prompt_embedding_table_size': max_prompt_embedding_table_size,
- 'gather_context_logits': False,
- 'gather_generation_logits': False,
+ 'gather_context_logits': gather_context_logits,
+ 'gather_generation_logits': gather_generation_logits,
'strongly_typed': False,
'builder_opt': None,
'use_refit': use_refit,
diff --git a/nemo/export/trt_llm/tensorrt_llm_run.py b/nemo/export/trt_llm/tensorrt_llm_run.py
index 1772c071a745..ef67c918290f 100644
--- a/nemo/export/trt_llm/tensorrt_llm_run.py
+++ b/nemo/export/trt_llm/tensorrt_llm_run.py
@@ -32,17 +32,23 @@
from tensorrt_llm.lora_manager import LoraManager
from tensorrt_llm.mapping import Mapping
from tensorrt_llm.quantization import QuantMode
-from tensorrt_llm.runtime import GenerationSession, ModelConfig, ModelRunner, ModelRunnerCpp, SamplingConfig
+from tensorrt_llm.runtime import ModelConfig, ModelRunner, ModelRunnerCpp, SamplingConfig
from transformers import PreTrainedTokenizer
LOGGER = logging.getLogger("NeMo")
use_trtllm_bindings = True
try:
- from tensorrt_llm.bindings import GptJsonConfig, KvCacheConfig, WorldConfig
+ from tensorrt_llm.bindings import GptJsonConfig
except Exception as e:
use_trtllm_bindings = False
+TRTLLM_SUPPORTS_DEVICE_DISABLE = True
+try:
+ from tensorrt_llm.runtime.generation import DISABLE_TORCH_DEVICE_SET
+except (ImportError, ModuleNotFoundError):
+ TRTLLM_SUPPORTS_DEVICE_DISABLE = False
+
@dataclass
class TensorrtLLMHostContext:
@@ -494,12 +500,20 @@ def load_distributed(engine_dir, model_parallel_rank, gpus_per_node):
json_config_str = f.read()
engine = Engine.from_buffer(engine_buffer=engine_data, json_config_str=json_config_str, rank=model_parallel_rank)
+
+ if not TRTLLM_SUPPORTS_DEVICE_DISABLE:
+ raise RuntimeError(
+ f"TensorRT-LLM does not support torch device disabling. Please upgrade TensorRT-LLM to make use of this feature."
+ )
+ elif not DISABLE_TORCH_DEVICE_SET:
+ raise RuntimeError(
+ f"To use TensorRT-LLM's python ModelRunner API in load_distributed(...) you must set the env var DISABLE_TORCH_DEVICE_SET=1"
+ )
decoder = ModelRunner.from_engine(
engine=engine,
# We want the engine to have the mp_rank, but the python runtime to not resassign the device of the current process
# So we will set it to the current device
rank=torch.cuda.current_device(),
- _disable_torch_cuda_device_set=True,
)
tensorrt_llm_worker_context.decoder = decoder
@@ -633,6 +647,7 @@ def generate(
streaming: bool = False,
output_log_probs=False,
multiprocessed_env=False,
+ output_generation_logits=False,
**sampling_kwargs,
) -> Optional[List[List[str]]]:
"""Generate the output sequence from the input sequence.
@@ -678,6 +693,7 @@ def generate(
multiprocessed_env=multiprocessed_env,
**sampling_kwargs,
)
+
assert outputs is not None
if tensorrt_llm.mpi_rank() != 0:
return None
@@ -691,8 +707,8 @@ def generate(
for b in range(output_ids.shape[0])
]
- if output_log_probs:
- return output_lines_list, log_probs
+ if output_generation_logits:
+ return output_lines_list, outputs['generation_logits']
return output_lines_list
diff --git a/nemo/export/vllm_exporter.py b/nemo/export/vllm_exporter.py
index 0ce7d49126d3..97575058bd1c 100644
--- a/nemo/export/vllm_exporter.py
+++ b/nemo/export/vllm_exporter.py
@@ -222,7 +222,6 @@ def export(
max_num_seqs=256,
# Note: max_model_len can be derived by model_config if the input value is None
max_model_len=model_config.max_model_len,
- use_v2_block_manager=False,
num_lookahead_slots=0,
delay_factor=0.0,
enable_chunked_prefill=False,
@@ -403,6 +402,7 @@ def get_triton_input(self):
Tensor(name="top_p", shape=(-1,), dtype=numpy.single, optional=True),
Tensor(name="temperature", shape=(-1,), dtype=numpy.single, optional=True),
Tensor(name="lora_uids", shape=(-1,), dtype=bytes, optional=True),
+ Tensor(name="output_generation_logits", shape=(-1,), dtype=numpy.bool_, optional=True),
)
return inputs
@@ -455,6 +455,7 @@ def forward(
prompt_embeddings_checkpoint_path: Optional[str] = None,
streaming: bool = False,
output_log_probs: bool = False,
+ output_generation_logits: bool = False,
) -> Union[List[List[str]], Iterable[List[List[str]]]]:
"""
The forward function performs LLM evaluation on the provided array of prompts with other parameters shared,
@@ -484,6 +485,9 @@ def forward(
if output_log_probs:
raise NotImplementedError("output_log_probs is not supported")
+ if output_generation_logits:
+ raise NotImplementedError("output_generation_logits is not supported")
+
request_ids = []
for index in range(len(input_texts)):
prompt = input_texts[index]
diff --git a/nemo/export/vllm_hf_exporter.py b/nemo/export/vllm_hf_exporter.py
new file mode 100755
index 000000000000..2d9754e08767
--- /dev/null
+++ b/nemo/export/vllm_hf_exporter.py
@@ -0,0 +1,115 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from typing import List
+
+import numpy as np
+from pytriton.decorators import batch
+from pytriton.model_config import Tensor
+from vllm import LLM, SamplingParams
+
+from nemo.deploy import ITritonDeployable
+from nemo.deploy.utils import cast_output, str_ndarray2list
+
+
+class vLLMHFExporter(ITritonDeployable):
+ """
+ The Exporter class uses vLLM APIs to convert a HF model to vLLM and makes the class,
+ deployable with Triton server.
+
+ Example:
+ from nemo.export import vLLMHFExporter
+ from nemo.deploy import DeployPyTriton
+
+ exporter = vLLMHFExporter()
+ exporter.export(model="/path/to/model/")
+
+ server = DeployPyTriton(
+ model=exporter,
+ triton_model_name='model'
+ )
+
+ server.deploy()
+ server.serve()
+ server.stop()
+ """
+
+ def __init__(self):
+ self.model = None
+
+ def export(self, model):
+ """
+ Exports the HF checkpoint to vLLM and initializes the engine.
+ Args:
+ model (str): model name or the path
+ """
+ self.model = LLM(model=model)
+
+ @property
+ def get_triton_input(self):
+ inputs = (
+ Tensor(name="prompts", shape=(-1,), dtype=bytes),
+ Tensor(name="max_output_len", shape=(-1,), dtype=np.int_, optional=True),
+ Tensor(name="top_k", shape=(-1,), dtype=np.int_, optional=True),
+ Tensor(name="top_p", shape=(-1,), dtype=np.single, optional=True),
+ Tensor(name="temperature", shape=(-1,), dtype=np.single, optional=True),
+ )
+ return inputs
+
+ @property
+ def get_triton_output(self):
+ outputs = (Tensor(name="outputs", shape=(-1,), dtype=bytes),)
+ return outputs
+
+ @batch
+ def triton_infer_fn(self, **inputs: np.ndarray):
+ try:
+ infer_input = {"input_texts": str_ndarray2list(inputs.pop("prompts"))}
+ if "max_output_len" in inputs:
+ infer_input["max_output_len"] = inputs.pop("max_output_len")[0][0]
+ if "top_k" in inputs:
+ infer_input["top_k"] = inputs.pop("top_k")[0][0]
+ if "top_p" in inputs:
+ infer_input["top_p"] = inputs.pop("top_p")[0][0]
+ if "temperature" in inputs:
+ infer_input["temperature"] = inputs.pop("temperature")[0][0]
+
+ output_texts = self.forward(**infer_input)
+ output = cast_output(output_texts, np.bytes_)
+ except Exception as error:
+ err_msg = "An error occurred: {0}".format(str(error))
+ output = cast_output([err_msg], np.bytes_)
+
+ return {"outputs": output}
+
+ def forward(
+ self,
+ input_texts: List[str],
+ max_output_len: int = 64,
+ top_k: int = 1,
+ top_p: float = 0.0,
+ temperature: float = 1.0,
+ ):
+ assert self.model is not None, "Model is not initialized."
+
+ sampling_params = SamplingParams(
+ max_tokens=max_output_len, temperature=temperature, top_k=int(top_k), top_p=top_p
+ )
+ request_output = self.model.generate(input_texts, sampling_params)
+ output = []
+ for o in request_output:
+ output.append(o.outputs[0].text)
+
+ return output
diff --git a/nemo/lightning/__init__.py b/nemo/lightning/__init__.py
index 2cc720e148d4..e01a2d5e5765 100644
--- a/nemo/lightning/__init__.py
+++ b/nemo/lightning/__init__.py
@@ -14,8 +14,8 @@
from typing import Union
-from lightning_fabric.plugins.environments import slurm
-from pytorch_lightning import plugins as _pl_plugins
+from lightning.fabric.plugins.environments import slurm
+from lightning.pytorch import plugins as _pl_plugins
# This is here to import it once, which improves the speed of launch when in debug-mode
from nemo.utils.import_utils import safe_import
@@ -33,7 +33,7 @@
from nemo.lightning.pytorch.plugins import data_sampler as _data_sampler
from nemo.lightning.pytorch.strategies import FSDPStrategy, MegatronStrategy
from nemo.lightning.pytorch.strategies.utils import RestoreConfig
-from nemo.lightning.pytorch.trainer import Trainer
+from nemo.lightning.pytorch.trainer import Trainer, configure_no_restart_validation_training_loop
from nemo.lightning.resume import AutoResume
@@ -66,6 +66,7 @@ def _is_slurm_interactive_mode():
"ModelCheckpoint",
"OptimizerModule",
"Trainer",
+ "configure_no_restart_validation_training_loop",
"get_vocab_size",
"teardown",
]
diff --git a/nemo/lightning/_strategy_lib.py b/nemo/lightning/_strategy_lib.py
index 1bee71e26e17..182454012d79 100644
--- a/nemo/lightning/_strategy_lib.py
+++ b/nemo/lightning/_strategy_lib.py
@@ -28,7 +28,7 @@
if TYPE_CHECKING:
- from lightning_fabric.utilities.types import Optimizable
+ from lightning.fabric.utilities.types import Optimizable
from megatron.core.model_parallel_config import ModelParallelConfig
@@ -89,7 +89,8 @@ def init_parallel_ranks(
seed=seed,
pipeline_model_parallel_split_rank=getattr(parallel_config, "pipeline_model_parallel_split_rank", None),
use_fp8=fp8,
- init_mpi_proc_group=getattr(parallel_config, "tp_comm_overlap", False),
+ init_mpi_proc_group=getattr(parallel_config, "tp_comm_overlap", False)
+ and getattr(parallel_config, "tp_comm_bootstrap_backend", None) == 'mpi',
# apex_transformer_log_level=self.cfg.get('apex_transformer_log_level', 30),
)
diff --git a/nemo/lightning/base.py b/nemo/lightning/base.py
index b6ba14726818..3b0b1c0c7234 100644
--- a/nemo/lightning/base.py
+++ b/nemo/lightning/base.py
@@ -19,7 +19,7 @@
import torch
import torch.distributed
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from torch import nn
diff --git a/nemo/lightning/ckpt_utils.py b/nemo/lightning/ckpt_utils.py
index ae1fe520a119..fa588092497a 100644
--- a/nemo/lightning/ckpt_utils.py
+++ b/nemo/lightning/ckpt_utils.py
@@ -33,12 +33,6 @@ def idempotent_path_append(base_dir: Union[str, Path], suffix) -> Path:
return base_dir
-def ckpt_to_weights_subdir(filepath: Union[str, Path]) -> Path:
- """Given an input checkpoint filepath, clean it using `ckpt_to_dir` and then return the weights subdirectory."""
- base_dir = ckpt_to_dir(filepath=filepath)
- return idempotent_path_append(base_dir, WEIGHTS_PATH)
-
-
def ckpt_to_context_subdir(filepath: Union[str, Path]) -> Path:
"""Given an input checkpoint filepath, clean it using `ckpt_to_dir` and then return the context subdirectory."""
base_dir = ckpt_to_dir(filepath=filepath)
diff --git a/nemo/lightning/data.py b/nemo/lightning/data.py
index 6c7fd128e530..9cb685a096fa 100644
--- a/nemo/lightning/data.py
+++ b/nemo/lightning/data.py
@@ -19,7 +19,7 @@
from typing import List, Literal, Optional
import torch
-from pytorch_lightning.overrides.distributed import _IndexBatchSamplerWrapper
+from lightning.pytorch.overrides.distributed import _IndexBatchSamplerWrapper
from torch.utils.data import DataLoader, Dataset
@@ -375,6 +375,7 @@ def __init__(
drop_last: bool = True,
global_batch_size: Optional[int] = None,
pad_samples_to_global_batch_size: Optional[bool] = False,
+ seed: int = 0,
) -> None:
super().__init__(
total_samples=total_samples,
@@ -389,7 +390,30 @@ def __init__(
assert (
not pad_samples_to_global_batch_size
), "`MegatronPretrainingRandomSampler` does not support sample padding"
+ if (not drop_last) and self.micro_batch_times_data_parallel_size > 1:
+ raise RuntimeError(
+ "`MegatronPretrainingRandomSampler` does not support drop_last=False when micro_batch_size * data_parallel_size > 1. \
+ please reduce your MBS and data parallelism to 1 if you want to use drop_last=False, or switch to drop_last=True to avoid this error"
+ )
self.last_batch_size = self.total_samples % self.micro_batch_times_data_parallel_size
+ self.seed = seed
+
+ def __len__(self):
+ active_total_samples = self.total_samples - (self.last_batch_size if self.drop_last else 0)
+ num_available_samples = active_total_samples - self.consumed_samples % active_total_samples
+ if self.global_batch_size is not None:
+ if self.drop_last:
+ num_global_batches = num_available_samples // self.global_batch_size
+ else:
+ num_global_batches = (num_available_samples + self.global_batch_size - 1) // self.global_batch_size
+ # return len of dataloader in terms of micro batches to avoid discrepancy between len of dataloader and
+ # num of batches fetched (as training step fetches in terms of micro batches)
+ return num_global_batches * (self.global_batch_size // self.micro_batch_times_data_parallel_size)
+ else:
+ if self.drop_last:
+ return num_available_samples // self.micro_batch_times_data_parallel_size
+ else:
+ return (num_available_samples - 1) // self.micro_batch_times_data_parallel_size
def __iter__(self):
active_total_samples = self.total_samples - self.last_batch_size
@@ -404,7 +428,7 @@ def __iter__(self):
start_idx = self.data_parallel_rank * bucket_size
g = torch.Generator()
- g.manual_seed(self.epoch)
+ g.manual_seed(self.seed + self.epoch)
random_idx = torch.randperm(bucket_size, generator=g).tolist()
idx_range = [start_idx + x for x in random_idx[bucket_offset:]]
diff --git a/nemo/lightning/fabric/conversion.py b/nemo/lightning/fabric/conversion.py
index 9ad713ec5261..d1c7affe3f40 100644
--- a/nemo/lightning/fabric/conversion.py
+++ b/nemo/lightning/fabric/conversion.py
@@ -15,10 +15,10 @@
from functools import singledispatch
from typing import Any, TypeVar
-from lightning_fabric import plugins as fl_plugins
-from lightning_fabric import strategies as fl_strategies
-from pytorch_lightning import plugins as pl_plugins
-from pytorch_lightning import strategies as pl_strategies
+from lightning.fabric import plugins as fl_plugins
+from lightning.fabric import strategies as fl_strategies
+from lightning.pytorch import plugins as pl_plugins
+from lightning.pytorch import strategies as pl_strategies
T = TypeVar('T')
FabricT = TypeVar('FabricT')
@@ -39,8 +39,8 @@ def to_fabric(obj: Any) -> Any:
NotImplementedError: If no converter is registered for the object's type.
Example:
- >>> from pytorch_lightning.strategies import Strategy as PLStrategy
- >>> from lightning_fabric.strategies import Strategy as FabricStrategy
+ >>> from lightning.pytorch.strategies import Strategy as PLStrategy
+ >>> from lightning.fabric.strategies import Strategy as FabricStrategy
>>> from nemo.lightning.fabric.conversion import to_fabric
>>>
>>> # Define a custom PyTorch Lightning strategy
@@ -70,7 +70,7 @@ def to_fabric(obj: Any) -> Any:
f"No Fabric converter registered for {type(obj).__name__}. "
f"To register a new conversion, use the @to_fabric.register decorator:\n\n"
f"from nemo.lightning.fabric.conversion import to_fabric\n"
- f"from lightning_fabric import strategies as fl_strategies\n\n"
+ f"from lightning.fabric import strategies as fl_strategies\n\n"
f"@to_fabric.register({type(obj).__name__})\n"
f"def _{type(obj).__name__.lower()}_converter(obj: {type(obj).__name__}) -> fl_strategies.Strategy:\n"
f" return fl_strategies.SomeStrategy(\n"
diff --git a/nemo/lightning/fabric/fabric.py b/nemo/lightning/fabric/fabric.py
index b1ca867cab83..7d604de749d6 100644
--- a/nemo/lightning/fabric/fabric.py
+++ b/nemo/lightning/fabric/fabric.py
@@ -17,12 +17,12 @@
from typing import TYPE_CHECKING, Optional, Protocol, Sequence, Type, TypeVar, Union, runtime_checkable
import fiddle as fdl
-import lightning_fabric as lb
-import pytorch_lightning as pl
+import lightning.fabric as lb
+import lightning.pytorch as pl
from torch import nn
from typing_extensions import Self, override
-from nemo.lightning.ckpt_utils import ckpt_to_context_subdir, ckpt_to_weights_subdir
+from nemo.lightning.ckpt_utils import ckpt_to_context_subdir
from nemo.lightning.io.mixin import IOMixin, serialization, track_io
if TYPE_CHECKING:
@@ -83,7 +83,7 @@ def load_model(
model = context.model
dist_model = self.setup_module(model)
- self.load(ckpt_to_weights_subdir(path), {"state_dict": dist_model})
+ self.load(path, {"state_dict": dist_model})
return dist_model
diff --git a/nemo/lightning/fabric/plugins.py b/nemo/lightning/fabric/plugins.py
index 723b48b6b357..58bf5f5ca9f9 100644
--- a/nemo/lightning/fabric/plugins.py
+++ b/nemo/lightning/fabric/plugins.py
@@ -16,7 +16,7 @@
from typing import TYPE_CHECKING, Any, Generator, Literal, TypeVar
import torch
-from lightning_fabric.plugins.precision import MixedPrecision
+from lightning.fabric.plugins.precision import MixedPrecision
from torch import nn
from torch.optim import Optimizer
diff --git a/nemo/lightning/fabric/strategies.py b/nemo/lightning/fabric/strategies.py
index 7445413b612e..30a03504060f 100644
--- a/nemo/lightning/fabric/strategies.py
+++ b/nemo/lightning/fabric/strategies.py
@@ -29,21 +29,21 @@
)
import torch
-from lightning_fabric.accelerators import CPUAccelerator
-from lightning_fabric.accelerators.accelerator import Accelerator
-from lightning_fabric.plugins.collectives.torch_collective import default_pg_timeout
-from lightning_fabric.plugins.environments.cluster_environment import ClusterEnvironment
-from lightning_fabric.plugins.io.checkpoint_io import CheckpointIO
-from lightning_fabric.plugins.precision import Precision
-from lightning_fabric.strategies import DDPStrategy
-from lightning_fabric.strategies.strategy import _validate_keys_for_strict_loading
-from lightning_fabric.utilities.types import _PATH, _Stateful
+from lightning.fabric.accelerators import CPUAccelerator
+from lightning.fabric.accelerators.accelerator import Accelerator
+from lightning.fabric.plugins.collectives.torch_collective import default_pg_timeout
+from lightning.fabric.plugins.environments.cluster_environment import ClusterEnvironment
+from lightning.fabric.plugins.io.checkpoint_io import CheckpointIO
+from lightning.fabric.plugins.precision import Precision
+from lightning.fabric.strategies import DDPStrategy
+from lightning.fabric.strategies.strategy import _validate_keys_for_strict_loading
+from lightning.fabric.utilities.types import _PATH, _Stateful
+from lightning.pytorch import LightningDataModule
+from lightning.pytorch.loops.fetchers import _DataFetcher
+from lightning.pytorch.plugins.io.wrapper import _WrappingCheckpointIO
+from lightning.pytorch.utilities.combined_loader import CombinedLoader
from megatron.core.distributed import DistributedDataParallelConfig
from megatron.core.optimizer import OptimizerConfig
-from pytorch_lightning import LightningDataModule
-from pytorch_lightning.loops.fetchers import _DataFetcher
-from pytorch_lightning.plugins.io.wrapper import _WrappingCheckpointIO
-from pytorch_lightning.utilities.combined_loader import CombinedLoader
from torch import Tensor, nn
from torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks import noop_hook
from torch.nn import Module
@@ -72,6 +72,7 @@ def __init__(
tensor_model_parallel_size: int = 1,
pipeline_model_parallel_size: int = 1,
virtual_pipeline_model_parallel_size: Optional[int] = None,
+ microbatch_group_size_per_vp_stage: Optional[int] = None,
context_parallel_size: int = 1,
sequence_parallel: bool = False,
expert_model_parallel_size: int = 1,
@@ -108,6 +109,11 @@ def __init__(
self.data_sampler: Optional['DataSampler'] = data_sampler
self.tensor_model_parallel_size = tensor_model_parallel_size
self.pipeline_model_parallel_size = pipeline_model_parallel_size
+ self.microbatch_group_size_per_vp_stage = (
+ microbatch_group_size_per_vp_stage
+ if microbatch_group_size_per_vp_stage is not None
+ else pipeline_model_parallel_size
+ )
self.context_parallel_size = context_parallel_size
self.expert_model_parallel_size = expert_model_parallel_size
self.moe_extended_tp = moe_extended_tp
@@ -406,6 +412,7 @@ def parallelism(self):
tensor_model_parallel_size=self.tensor_model_parallel_size,
pipeline_model_parallel_size=self.pipeline_model_parallel_size,
virtual_pipeline_model_parallel_size=self.virtual_pipeline_model_parallel_size,
+ microbatch_group_size_per_vp_stage=self.microbatch_group_size_per_vp_stage,
context_parallel_size=self.context_parallel_size,
sequence_parallel=self.sequence_parallel,
expert_model_parallel_size=self.expert_model_parallel_size,
diff --git a/nemo/lightning/io/api.py b/nemo/lightning/io/api.py
index 4abc8fa7cca3..869ec6e613cb 100644
--- a/nemo/lightning/io/api.py
+++ b/nemo/lightning/io/api.py
@@ -16,7 +16,7 @@
from typing import Callable, Optional, Type, overload
import fiddle as fdl
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.lightning.io.mixin import ConnectorMixin, ConnT, ModelConnector, load
from nemo.lightning.io.pl import TrainerContext
@@ -51,7 +51,17 @@ def load_context(path: Path, subpath: Optional[str] = None, build: bool = True):
checkpoint: TrainerContext = load_ckpt("/path/to/checkpoint", subpath="model.config")
"""
- return load(path, output_type=TrainerContext, subpath=subpath, build=build)
+ if not isinstance(path, Path):
+ path = Path(path)
+ try:
+ return load(path, output_type=TrainerContext, subpath=subpath, build=build)
+ except FileNotFoundError:
+ # Maintain backwards compatibility with checkpoints that don't have '/context' dir.
+ if path.parts[-1] == 'context':
+ path = path.parent
+ else:
+ path = path / 'context'
+ return load(path, output_type=TrainerContext, subpath=subpath, build=build)
def model_importer(target: Type[ConnectorMixin], ext: str) -> Callable[[Type[ConnT]], Type[ConnT]]:
diff --git a/nemo/lightning/io/connector.py b/nemo/lightning/io/connector.py
index be1dcc3c088d..a38be6ee8f0a 100644
--- a/nemo/lightning/io/connector.py
+++ b/nemo/lightning/io/connector.py
@@ -18,11 +18,11 @@
from pathlib import Path, PosixPath, PurePath, WindowsPath
from typing import Generic, Optional, Tuple, TypeVar
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from filelock import FileLock, Timeout
-from pytorch_lightning.trainer.states import TrainerFn
+from lightning.pytorch.trainer.states import TrainerFn
-from nemo.lightning.ckpt_utils import ckpt_to_context_subdir, ckpt_to_weights_subdir
+from nemo.lightning.ckpt_utils import ckpt_to_context_subdir
# Dynamically inherit from the correct Path subclass based on the operating system.
if os.name == 'nt':
@@ -198,7 +198,7 @@ def nemo_save(self, output_path: Path, trainer: pl.Trainer, dump_io: bool = True
trainer.strategy.setup(trainer)
output_path = Path(output_path)
output_path.mkdir(parents=True, exist_ok=True)
- trainer.save_checkpoint(ckpt_to_weights_subdir(output_path))
+ trainer.save_checkpoint(output_path)
if getattr(trainer.strategy, "async_save", False):
trainer.strategy.checkpoint_io.maybe_finalize_save_checkpoint(blocking=True)
@@ -228,7 +228,9 @@ def nemo_load(
model = load_context(path).model
_trainer = trainer or Trainer(
- devices=1, accelerator="cpu" if cpu else "gpu", strategy=MegatronStrategy(ddp="pytorch")
+ devices=1,
+ accelerator="cpu" if cpu else "gpu",
+ strategy=MegatronStrategy(ddp="pytorch", setup_optimizers=False),
)
_trainer.strategy.connect(model)
@@ -255,10 +257,12 @@ def local_path(self, base_path: Optional[Path] = None) -> Path:
_base = Path(NEMO_MODELS_CACHE)
- # If the useu supplied `hf:///path/to/downloaded/my-model/`
+ # If the user supplied `hf:///path/to/downloaded/my-model/`
# then extract the last dir-name (i.e. my-model) and append it to _base
if str(self).startswith('/'):
- return _base / PurePath((str(self))).name
+ if self.suffix in ['.pt', '.pth']:
+ return _base / self.parent.name
+ return _base / self.name
return _base / str(self).replace("://", "/")
def on_import_ckpt(self, model: pl.LightningModule):
diff --git a/nemo/lightning/io/mixin.py b/nemo/lightning/io/mixin.py
index e2b9d7f409ae..33b7afdf1e76 100644
--- a/nemo/lightning/io/mixin.py
+++ b/nemo/lightning/io/mixin.py
@@ -81,15 +81,33 @@ def _partial_representer_with_defaults(dumper, data):
def _safe_object_representer(dumper, data):
- if not inspect.isclass(data):
- cls = data.__class__
- call = True
- else:
- cls = data
+ """
+ Represent a given object as YAML using the specified dumper.
+
+ This function is a fallback for objects that don't have specific representers.
+ If the object has __qualname__ attr, the __target__ is set to f"{inspect.getmodule(obj).__name__}.{obj.__qualname__}".
+ If the object does not have a __qualname__ attr, the __target__ is set from its __class__ attr.
+ The __call__ key is used to indicate whether the target should be called to create an instance.
+
+ Args:
+ dumper (yaml.Dumper): The YAML dumper to use for serialization.
+ data (Any): The data to serialize. This can be any Python object,
+ but if it's a class or a class instance, special handling will be applied.
+
+ Returns:
+ str: The YAML representation of the data.
+ """
+ try:
+ obj = data
+ target = f"{inspect.getmodule(obj).__name__}.{obj.__qualname__}"
call = False
+ except AttributeError:
+ obj = data.__class__
+ target = f"{inspect.getmodule(obj).__name__}.{obj.__qualname__}"
+ call = True
value = {
- "_target_": f"{inspect.getmodule(cls).__name__}.{cls.__qualname__}", # type: ignore
+ "_target_": target, # type: ignore
"_call_": call,
}
return dumper.represent_data(value)
@@ -590,7 +608,9 @@ def _io_flatten_object(instance):
def _io_unflatten_object(values, metadata):
- assert hasattr(_thread_local, "output_dir")
+ if not hasattr(_thread_local, "output_dir"):
+ return fdl.Config.__unflatten__(values, metadata)
+
output_dir = _thread_local.output_dir
if len(values) == 1:
diff --git a/nemo/lightning/io/pl.py b/nemo/lightning/io/pl.py
index fb6ef707ab7c..788697887e39 100644
--- a/nemo/lightning/io/pl.py
+++ b/nemo/lightning/io/pl.py
@@ -17,12 +17,12 @@
from pathlib import Path
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
-from lightning_fabric.plugins import CheckpointIO
-from lightning_fabric.plugins.io.checkpoint_io import CheckpointIO
-from lightning_fabric.utilities.cloud_io import get_filesystem
-from lightning_fabric.utilities.types import _PATH
+from lightning.fabric.plugins import CheckpointIO
+from lightning.fabric.plugins.io.checkpoint_io import CheckpointIO
+from lightning.fabric.utilities.cloud_io import get_filesystem
+from lightning.fabric.utilities.types import _PATH
from megatron.core.dist_checkpointing.serialization import (
get_default_load_sharded_strategy,
get_default_save_sharded_strategy,
@@ -37,7 +37,7 @@
from torch import nn
from typing_extensions import Self, override
-from nemo.lightning.ckpt_utils import ckpt_to_dir
+from nemo.lightning.ckpt_utils import WEIGHTS_PATH, ckpt_to_dir
from nemo.lightning.io.capture import IOProtocol
from nemo.lightning.io.mixin import IOMixin
@@ -78,6 +78,26 @@ def construct_extra(cls, trainer: pl.Trainer) -> Dict[str, Any]:
return extra
+def ckpt_to_weights_subdir(filepath: Union[str, Path], is_saving) -> Path:
+ """Given an input checkpoint filepath, clean it using `ckpt_to_dir` and then return the weights subdirectory, if it exists."""
+ filepath = ckpt_to_dir(filepath=filepath)
+ base_dir = filepath
+ assert isinstance(base_dir, Path)
+ if base_dir.parts[-1] != WEIGHTS_PATH:
+ maybe_base_dir = base_dir / WEIGHTS_PATH
+ if maybe_base_dir.is_dir() or is_saving:
+ base_dir = maybe_base_dir
+ ## handle adapter paths
+ if hasattr(base_dir, "base_model_path") and base_dir.base_model_path.parts[-1] != WEIGHTS_PATH:
+ maybe_base_model_path = base_dir.base_model_path / WEIGHTS_PATH
+ if maybe_base_model_path.is_dir() or is_saving:
+ base_dir.base_model_path = base_dir.base_model_path / WEIGHTS_PATH
+ if is_saving:
+ assert base_dir.parts[-1] == WEIGHTS_PATH
+ assert base_dir.parent == Path(filepath)
+ return base_dir
+
+
class MegatronCheckpointIO(AsyncCompatibleCheckpointIO, IOMixin):
"""CheckpointIO that utilizes :func:`torch.save` and :func:`torch.load` to save and load checkpoints respectively,
common for most use cases.
@@ -132,7 +152,8 @@ def save_checkpoint(self, checkpoint: Dict[str, Any], path: _PATH, storage_optio
f" storage_options, but {storage_options=} was provided."
f" Ignoring given storage_options"
)
- checkpoint_dir = ckpt_to_dir(path)
+ checkpoint_dir = ckpt_to_weights_subdir(path, is_saving=True)
+
fs = get_filesystem(checkpoint_dir)
if fs.isdir(checkpoint_dir) and dist_checkpointing.check_is_distributed_checkpoint(checkpoint_dir):
logging.info(f'Distributed checkpoint at path {checkpoint_dir} already exists, skipping saving')
@@ -142,21 +163,13 @@ def save_checkpoint(self, checkpoint: Dict[str, Any], path: _PATH, storage_optio
validate_sharding_integrity = not (self.validated_consistency and self.assume_constant_structure)
self.validated_consistency = True
- try:
- return dist_checkpointing.save(
- sharded_state_dict=checkpoint,
- checkpoint_dir=checkpoint_dir,
- sharded_strategy=self.save_sharded_strategy,
- validate_access_integrity=validate_sharding_integrity,
- async_sharded_save=self.async_save,
- )
- except:
- logging.error(f"Failed to save checkpoint to {checkpoint_dir}")
- # Do cleanup.
- import shutil
-
- shutil.rmtree(checkpoint_dir)
- raise
+ return dist_checkpointing.save(
+ sharded_state_dict=checkpoint,
+ checkpoint_dir=checkpoint_dir,
+ sharded_strategy=self.save_sharded_strategy,
+ validate_access_integrity=validate_sharding_integrity,
+ async_sharded_save=self.async_save,
+ )
@override
def load_checkpoint(
@@ -188,6 +201,11 @@ def load_checkpoint(
if not fs.isdir(path):
raise ValueError(f"Distributed checkpoints should be a directory. Found: {path}.")
+ # Load from ckpt_path/weights (new format) if it exists
+ path = ckpt_to_weights_subdir(path, is_saving=False)
+ if hasattr(path, "base_model_path") and not path.base_model_path.exists():
+ path.base_model_path = path.base_model_path.parent
+
if self.save_ckpt_format == 'zarr' and self.load_directly_on_device:
from megatron.core.dist_checkpointing.strategies.tensorstore import TensorStoreLoadShardedStrategy
diff --git a/nemo/lightning/io/state.py b/nemo/lightning/io/state.py
index 6632768ec8dd..f2c26aa4d495 100644
--- a/nemo/lightning/io/state.py
+++ b/nemo/lightning/io/state.py
@@ -242,7 +242,12 @@ def __call__(self, ctx: TransformCTX) -> TransformCTX:
source_matches_dict = {k: _match_keys(list(source_dict.keys()), v) for k, v in source_key_dict.items()}
target_matches = _match_keys(list(target_dict.keys()), target_key)
param_names = list(filter(lambda x: x in source_matches_dict, fn_params))
- for layer_names_group in zip(*([source_matches_dict[v] for v in param_names] + [target_matches])):
+ source_matches = [
+ source_matches_dict[v] if source_matches_dict[v].ndim > 0 else [source_matches_dict[v].item()]
+ for v in param_names
+ ]
+ target_matches = [target_matches if target_matches.ndim > 0 else [target_matches.item()]]
+ for layer_names_group in zip(*(source_matches + target_matches)):
# Wrap in a list if it's a single layer (ie non-expert)
if isinstance(layer_names_group[0], str):
layer_names_group = [[x] for x in layer_names_group]
diff --git a/nemo/lightning/megatron_parallel.py b/nemo/lightning/megatron_parallel.py
index 2a0e346ced2a..0f84f3be0a23 100644
--- a/nemo/lightning/megatron_parallel.py
+++ b/nemo/lightning/megatron_parallel.py
@@ -42,12 +42,12 @@
import torch
import torch.distributed
+from lightning.pytorch.utilities import move_data_to_device
from megatron.core import parallel_state
from megatron.core.distributed import DistributedDataParallel as McoreDDP
from megatron.core.distributed import DistributedDataParallelConfig
from megatron.core.optimizer import OptimizerConfig
from megatron.core.transformer.transformer_config import TransformerConfig
-from pytorch_lightning.utilities import move_data_to_device
from torch import Tensor, nn
from typing_extensions import override
@@ -57,7 +57,7 @@
STEP_OUTPUT = Optional[Union[Tensor, Mapping[str, Any]]]
if TYPE_CHECKING:
- import pytorch_lightning as pl
+ import lightning.pytorch as pl
@runtime_checkable
@@ -835,7 +835,7 @@ def add(self, *callbacks) -> "CallbackConnector":
"""
_pl_callback = None
try:
- import pytorch_lightning as pl
+ import lightning.pytorch as pl
_pl_callback = pl.Callback
except ImportError:
@@ -1040,6 +1040,7 @@ class MegatronStep(Generic[ModelT, DataT]):
micro_batch_size (Optional[int]): Size of each micro-batch.
seq_length (Optional[int]): Sequence length for the current step.
num_microbatches (Optional[int]): Number of micro-batches in this step.
+ decoder_seq_length (Optional[int]): Sequence length of decoder (used only in encoder-decoder style models) for the current step.
Type Parameters:
ModelT: The type of the model being used.
@@ -1054,6 +1055,7 @@ class MegatronStep(Generic[ModelT, DataT]):
seq_length: Optional[int] = None
num_microbatches: Optional[int] = None
step_i: Optional[int] = None
+ decoder_seq_length: Optional[int] = None
@classmethod
def infer(
@@ -1131,6 +1133,7 @@ def __call__(self) -> List[Any]:
seq_length=self.seq_length,
micro_batch_size=self.micro_batch_size,
forward_only=self.forward_only,
+ decoder_seq_length=self.decoder_seq_length,
)
def to_data_iterator_list(
diff --git a/nemo/lightning/nemo_logger.py b/nemo/lightning/nemo_logger.py
index 8b10f9aca50a..79f622ebc6a8 100644
--- a/nemo/lightning/nemo_logger.py
+++ b/nemo/lightning/nemo_logger.py
@@ -18,10 +18,10 @@
from pathlib import Path
from typing import List, Optional, Union
-import lightning_fabric as fl
-import pytorch_lightning as pl
-from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint as PTLModelCheckpoint
-from pytorch_lightning.loggers import Logger, TensorBoardLogger, WandbLogger
+import lightning.fabric as fl
+import lightning.pytorch as pl
+from lightning.pytorch.callbacks.model_checkpoint import ModelCheckpoint as PTLModelCheckpoint
+from lightning.pytorch.loggers import Logger, TensorBoardLogger, WandbLogger
from nemo.lightning.io.mixin import IOMixin
from nemo.lightning.pytorch.callbacks import ModelCheckpoint
@@ -220,7 +220,7 @@ def _setup_trainer_model_checkpoint(self, trainer, log_dir, ckpt=None):
if callback.dirpath is None:
callback.dirpath = Path(log_dir / "checkpoints")
if callback.filename is None:
- callback.filename = f"{self.name}--{{{callback.monitor}:.4f}}-{{epoch}}"
+ callback.filename = f"{self.name}--{{{callback.monitor}:.4f}}-{{epoch}}-{{consumed_samples}}"
ModelCheckpoint.CHECKPOINT_NAME_LAST = callback.filename + "-last"
def _handle_task_config(self, task_config, log_dir):
diff --git a/nemo/lightning/pytorch/accelerate/__init__.py b/nemo/lightning/pytorch/accelerate/__init__.py
new file mode 100644
index 000000000000..d9155f923f18
--- /dev/null
+++ b/nemo/lightning/pytorch/accelerate/__init__.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/nemo/lightning/pytorch/accelerate/transformer_engine.py b/nemo/lightning/pytorch/accelerate/transformer_engine.py
new file mode 100755
index 000000000000..8e621352d099
--- /dev/null
+++ b/nemo/lightning/pytorch/accelerate/transformer_engine.py
@@ -0,0 +1,123 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from types import MethodType
+
+import torch
+from nemo.utils import logging
+from nemo.utils.import_utils import safe_import_from
+
+te, HAVE_TE = safe_import_from("transformer_engine", "pytorch")
+
+
+def te_accelerate(model, fp8_autocast=False):
+ """
+ Replaces original model layers with TE's accelerated layers
+ Args:
+ model: HF model
+ fp8_autocast (bool): apply autocast or not
+ """
+
+ if not HAVE_TE:
+ logging.warning("Transformer Engine is not available and the module replacements " "will not be applied.")
+ else:
+ _apply_basic_module_replacement(model)
+ if fp8_autocast:
+ apply_fp8_autocast(model)
+
+
+@torch.no_grad
+def _apply_basic_module_replacement(model):
+ for name, module in model.named_children():
+ if isinstance(module, torch.nn.Linear):
+ has_bias = module.bias is not None
+ if any(p % 16 != 0 for p in module.weight.shape):
+ continue
+ te_module = te.Linear(
+ module.in_features, module.out_features, bias=has_bias, params_dtype=module.weight.dtype
+ )
+ te_module.weight.copy_(module.weight)
+ if has_bias:
+ te_module.bias.copy_(module.bias)
+
+ setattr(model, name, te_module)
+ elif isinstance(module, torch.nn.LayerNorm):
+ te_module = te.LayerNorm(module.normalized_shape[0], eps=module.eps, params_dtype=module.weight.dtype)
+ te_module.weight.copy_(module.weight)
+ te_module.bias.copy_(module.bias)
+ setattr(model, name, te_module)
+ elif isinstance(module, torch.nn.RMSNorm):
+ te_module = te.RMSNorm(module.normalized_shape[0], eps=module.eps, dtype=module.weight.dtype)
+ te_module.weight.copy_(module.weight)
+ te_module.bias.copy_(module.bias)
+ setattr(model, name, te_module)
+ else:
+ _apply_basic_module_replacement(module)
+
+
+def is_te_accelerated(model):
+ """
+ Checks whether model has TE layers or not
+ Args:
+ model: HF model
+ """
+
+ if not HAVE_TE:
+ logging.warning("Transformer Engine is not available.")
+ return False
+ else:
+ for name, module in model.named_modules():
+ if isinstance(module, (te.LayerNorm, te.Linear, te.TransformerLayer)):
+ return True
+
+ return False
+
+
+def apply_fp8_autocast(model, fp8_recipe_handler=None):
+ """
+ Applies TE's autocast
+ Args:
+ model: HF model
+ fp8_recipe_handler: fpt handler
+ """
+
+ if not HAVE_TE:
+ logging.warning("Transformer Engine is not available and the FP8 autocast " "will not be applied.")
+ else:
+ import transformer_engine.common.recipe as te_recipe
+
+ kwargs = fp8_recipe_handler.to_kwargs() if fp8_recipe_handler is not None else {}
+ if "fp8_format" in kwargs:
+ kwargs["fp8_format"] = getattr(te_recipe.Format, kwargs["fp8_format"])
+ use_during_eval = kwargs.pop("use_autocast_during_eval", False)
+ fp8_recipe = te_recipe.DelayedScaling(**kwargs)
+ new_forward = _contextual_fp8_autocast(model.forward, fp8_recipe, use_during_eval)
+
+ if hasattr(model.forward, "__func__"):
+ model.forward = MethodType(new_forward, model)
+ else:
+ model.forward = new_forward
+
+
+def _contextual_fp8_autocast(model_forward, fp8_recipe, use_during_eval=False):
+ from transformer_engine.pytorch import fp8_autocast
+
+ def forward(self, *args, **kwargs):
+ enabled = use_during_eval or self.training
+ with fp8_autocast(enabled=enabled, fp8_recipe=fp8_recipe):
+ return model_forward(*args, **kwargs)
+
+ forward.__wrapped__ = model_forward
+
+ return forward
diff --git a/nemo/lightning/pytorch/callbacks/__init__.py b/nemo/lightning/pytorch/callbacks/__init__.py
old mode 100644
new mode 100755
index 8da1a50dcd64..031f027e63b2
--- a/nemo/lightning/pytorch/callbacks/__init__.py
+++ b/nemo/lightning/pytorch/callbacks/__init__.py
@@ -16,6 +16,7 @@
from nemo.lightning.pytorch.callbacks.debugging import ParameterDebugger
from nemo.lightning.pytorch.callbacks.garbage_collection import GarbageCollectionCallback
from nemo.lightning.pytorch.callbacks.memory_profiler import MemoryProfileCallback
+from nemo.lightning.pytorch.callbacks.model_callback import ModelCallback
from nemo.lightning.pytorch.callbacks.model_checkpoint import ModelCheckpoint
from nemo.lightning.pytorch.callbacks.model_transform import ModelTransform
from nemo.lightning.pytorch.callbacks.nsys import NsysCallback
@@ -36,4 +37,5 @@
"DdpParityChecker",
"GarbageCollectionCallback",
"ParameterDebugger",
+ "ModelCallback",
]
diff --git a/nemo/lightning/pytorch/callbacks/ddp_parity_checker.py b/nemo/lightning/pytorch/callbacks/ddp_parity_checker.py
index 391666fb8f32..320140d76f3a 100644
--- a/nemo/lightning/pytorch/callbacks/ddp_parity_checker.py
+++ b/nemo/lightning/pytorch/callbacks/ddp_parity_checker.py
@@ -15,8 +15,8 @@
from functools import cache
import torch
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core.utils import check_param_hashes_across_dp_replicas
-from pytorch_lightning.callbacks.callback import Callback
from nemo.lightning import io
from nemo.utils import logging
diff --git a/nemo/lightning/pytorch/callbacks/debugging.py b/nemo/lightning/pytorch/callbacks/debugging.py
index 5f6e722ef89b..135e8e486837 100644
--- a/nemo/lightning/pytorch/callbacks/debugging.py
+++ b/nemo/lightning/pytorch/callbacks/debugging.py
@@ -14,9 +14,9 @@
from typing import Callable, Dict, List, Optional, Union
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
-from pytorch_lightning.callbacks import Callback
+from lightning.pytorch.callbacks import Callback
from nemo.lightning.pytorch.optim.megatron import MegatronOptimizerModule
from nemo.utils import logging
diff --git a/nemo/lightning/pytorch/callbacks/garbage_collection.py b/nemo/lightning/pytorch/callbacks/garbage_collection.py
index ba4d378ee893..90e122f6d3e4 100644
--- a/nemo/lightning/pytorch/callbacks/garbage_collection.py
+++ b/nemo/lightning/pytorch/callbacks/garbage_collection.py
@@ -15,7 +15,7 @@
import gc
from typing import Any
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from nemo.utils import logging
diff --git a/nemo/lightning/pytorch/callbacks/megatron_comm_overlap.py b/nemo/lightning/pytorch/callbacks/megatron_comm_overlap.py
index 2f53babfe559..172aeaeb855d 100644
--- a/nemo/lightning/pytorch/callbacks/megatron_comm_overlap.py
+++ b/nemo/lightning/pytorch/callbacks/megatron_comm_overlap.py
@@ -13,12 +13,12 @@
# limitations under the License.
from dataclasses import asdict, dataclass, fields
-import pytorch_lightning as pl
+import lightning.pytorch as pl
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core import ModelParallelConfig
from megatron.core.distributed import DistributedDataParallelConfig
from megatron.core.optimizer import OptimizerConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo.collections.llm.recipes.tp_overlap_configs.userbuffers import TransformerLayerTPOverlapCfg
from nemo.lightning.pytorch.strategies.megatron_strategy import MegatronStrategy, ParallelismConfig
@@ -43,6 +43,7 @@ class _CommOverlapConfig:
# Tensor parallel communication overlap (experimental)
tp_comm_overlap: bool = None
tp_comm_overlap_cfg: dict = None
+ tp_comm_bootstrap_backend: str = None
# Pipeline parallel communication overlap
overlap_p2p_comm: bool = None
batch_p2p_comm: bool = None
@@ -88,6 +89,7 @@ def __init__(
self,
tp_comm_overlap: bool = None,
tp_comm_overlap_cfg: TransformerLayerTPOverlapCfg = None,
+ tp_comm_bootstrap_backend: str = None,
overlap_p2p_comm: bool = None,
batch_p2p_comm: bool = None,
overlap_grad_reduce: bool = None,
@@ -102,6 +104,7 @@ def __init__(
self.user_comm_overlap_cfg = _CommOverlapConfig(
tp_comm_overlap=tp_comm_overlap,
tp_comm_overlap_cfg=tp_comm_overlap_cfg,
+ tp_comm_bootstrap_backend=tp_comm_bootstrap_backend,
overlap_p2p_comm=overlap_p2p_comm,
batch_p2p_comm=batch_p2p_comm,
overlap_grad_reduce=overlap_grad_reduce,
@@ -114,6 +117,7 @@ def __init__(
)
self.tp_comm_overlap_cfg = None
+ self.tp_comm_bootstrap_backend = None
self.need_tp_overlap_ub_init = False
def _get_model_comm_overlap_cfgs(
@@ -129,6 +133,7 @@ def _get_model_comm_overlap_cfgs(
# Optimizations disabled by default, can be overriden by user
comm_overlap_cfg.tp_comm_overlap = False
comm_overlap_cfg.tp_comm_overlap_cfg = None
+ comm_overlap_cfg.tp_comm_bootstrap_backend = None
comm_overlap_cfg.defer_embedding_wgrad_compute = False
comm_overlap_cfg.wgrad_deferral_limit = -1
@@ -181,7 +186,8 @@ def _get_optimizer_overlap_cfgs(self, parallelism_cfg: ParallelismConfig) -> _Co
comm_overlap_cfg.overlap_grad_reduce = True
comm_overlap_cfg.overlap_param_gather = True
if parallelism_cfg.pipeline_model_parallel_size > 1 and vp_size > 1:
- comm_overlap_cfg.overlap_param_gather_with_optimizer_step = True
+ # Currently disabled due to an issue with checkpointing
+ # comm_overlap_cfg.overlap_param_gather_with_optimizer_step = True
comm_overlap_cfg.align_param_gather = True
comm_overlap_cfg = self._override_user_cfgs(comm_overlap_cfg)
@@ -215,6 +221,7 @@ def setup(self, trainer: pl.Trainer, pl_module: pl.LightningModule, stage: str)
if trainer.model.config.tp_comm_overlap:
self.tp_comm_overlap_cfg = comm_overlap_cfg.tp_comm_overlap_cfg
+ self.tp_comm_bootstrap_backend = comm_overlap_cfg.tp_comm_bootstrap_backend
self.need_tp_overlap_ub_init = True
# Data parallel overlap is only available with the Megatron DDP and Distributed optimizer
@@ -257,6 +264,7 @@ def _init_te_userbuffers(self, model_parallel_cfg: ModelParallelConfig):
tp_size=parallel_state.get_tensor_model_parallel_world_size(),
use_fp8=fp8,
ub_cfgs=self.tp_comm_overlap_cfg,
+ bootstrap_backend=self.tp_comm_bootstrap_backend,
)
except Exception as error:
raise Exception(f"Tensor parallel overlap: userbuffer initialization failed with {error}")
diff --git a/nemo/lightning/pytorch/callbacks/memory_profiler.py b/nemo/lightning/pytorch/callbacks/memory_profiler.py
index 5b2ee1d46e11..2813bd141a7a 100644
--- a/nemo/lightning/pytorch/callbacks/memory_profiler.py
+++ b/nemo/lightning/pytorch/callbacks/memory_profiler.py
@@ -15,7 +15,7 @@
import os
import torch
-from pytorch_lightning.callbacks.callback import Callback
+from lightning.pytorch.callbacks.callback import Callback
from torch.utils.viz._cycles import warn_tensor_cycles
from nemo.lightning import io
diff --git a/nemo/lightning/pytorch/callbacks/model_callback.py b/nemo/lightning/pytorch/callbacks/model_callback.py
new file mode 100755
index 000000000000..0625e3f006c5
--- /dev/null
+++ b/nemo/lightning/pytorch/callbacks/model_callback.py
@@ -0,0 +1,145 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import inspect
+from typing import Callable, Optional
+from lightning.pytorch.callbacks import LambdaCallback
+
+
+class ModelCallback(LambdaCallback):
+ """
+ A callback that extends LambdaCallback to intelligently handle function parameters.
+ Functions can take either (trainer, pl_module), just (pl_module), or just (trainer).
+
+ Supported parameter names:
+ - trainer, pl_trainer
+ - model, pl_model, pl_module, module
+
+ Example:
+ >>> # Using with torch.compile
+ >>> callback = ModelCallback(on_train_start=torch.compile)
+ >>>
+ >>> # Using with thunder_compile
+ >>> callback = ModelCallback(on_train_start=thunder_compile)
+ >>>
+ >>> # Mix different callbacks
+ >>> callback = ModelCallback(
+ ... on_train_start=lambda model: torch.compile(model),
+ ... on_fit_start=lambda trainer, model: print(f"Starting fit with {model}")
+ ... )
+ """
+
+ TRAINER_PARAMS = {'trainer', 'pl_trainer'}
+ MODEL_PARAMS = {'model', 'pl_model', 'pl_module', 'module'}
+
+ def __init__(
+ self,
+ setup: Optional[Callable] = None,
+ teardown: Optional[Callable] = None,
+ on_fit_start: Optional[Callable] = None,
+ on_fit_end: Optional[Callable] = None,
+ on_sanity_check_start: Optional[Callable] = None,
+ on_sanity_check_end: Optional[Callable] = None,
+ on_train_batch_start: Optional[Callable] = None,
+ on_train_batch_end: Optional[Callable] = None,
+ on_train_epoch_start: Optional[Callable] = None,
+ on_train_epoch_end: Optional[Callable] = None,
+ on_validation_epoch_start: Optional[Callable] = None,
+ on_validation_epoch_end: Optional[Callable] = None,
+ on_test_epoch_start: Optional[Callable] = None,
+ on_test_epoch_end: Optional[Callable] = None,
+ on_validation_batch_start: Optional[Callable] = None,
+ on_validation_batch_end: Optional[Callable] = None,
+ on_test_batch_start: Optional[Callable] = None,
+ on_test_batch_end: Optional[Callable] = None,
+ on_train_start: Optional[Callable] = None,
+ on_train_end: Optional[Callable] = None,
+ on_validation_start: Optional[Callable] = None,
+ on_validation_end: Optional[Callable] = None,
+ on_test_start: Optional[Callable] = None,
+ on_test_end: Optional[Callable] = None,
+ on_exception: Optional[Callable] = None,
+ on_save_checkpoint: Optional[Callable] = None,
+ on_load_checkpoint: Optional[Callable] = None,
+ on_before_backward: Optional[Callable] = None,
+ on_after_backward: Optional[Callable] = None,
+ on_before_optimizer_step: Optional[Callable] = None,
+ on_before_zero_grad: Optional[Callable] = None,
+ on_predict_start: Optional[Callable] = None,
+ on_predict_end: Optional[Callable] = None,
+ on_predict_batch_start: Optional[Callable] = None,
+ on_predict_batch_end: Optional[Callable] = None,
+ on_predict_epoch_start: Optional[Callable] = None,
+ on_predict_epoch_end: Optional[Callable] = None,
+ ):
+ # Create a dictionary of non-None callbacks
+ callbacks = {
+ name: self._wrap_func(func)
+ for name, func in locals().items()
+ if name != 'self' and name != '__class__' and func is not None
+ }
+
+ super().__init__(**callbacks)
+
+ def _get_param_type(self, param_name: str) -> Optional[str]:
+ """Determine if a parameter name refers to trainer or model."""
+ param_name = param_name.lower()
+ if param_name in self.TRAINER_PARAMS:
+ return 'trainer'
+ if param_name in self.MODEL_PARAMS:
+ return 'model'
+ return None
+
+ def _wrap_func(self, func: Callable) -> Callable:
+ """Wraps a function to handle parameter inspection and passing."""
+ sig = inspect.signature(func)
+ params = sig.parameters
+
+ def wrapped(trainer, pl_module, *args, **kwargs):
+ call_args = {}
+
+ for param_name, param in params.items():
+ param_type = self._get_param_type(param_name)
+
+ if param_type == 'trainer':
+ call_args[param_name] = trainer
+ elif param_type == 'model':
+ call_args[param_name] = pl_module
+ else:
+ # If parameter name is not recognized, use position to determine
+ if len(params) == 1:
+ call_args[param_name] = pl_module
+ elif len(params) == 2:
+ if len(call_args) == 0:
+ call_args[param_name] = trainer
+ else:
+ call_args[param_name] = pl_module
+ else:
+ raise ValueError(
+ f"Unable to determine parameter mapping for '{param_name}'. "
+ f"Please use recognized parameter names: "
+ f"trainer/pl_trainer for trainer, "
+ f"model/pl_model/pl_module/module for model."
+ )
+
+ try:
+ return func(**call_args)
+ except TypeError as e:
+ raise TypeError(
+ f"Failed to call callback function {func.__name__ if hasattr(func, '__name__') else func}. "
+ f"Attempted to pass arguments: {call_args.keys()}. Error: {str(e)}"
+ ) from e
+
+ return wrapped
diff --git a/nemo/lightning/pytorch/callbacks/model_checkpoint.py b/nemo/lightning/pytorch/callbacks/model_checkpoint.py
index 1bfe9ccd8052..455022b1ba44 100644
--- a/nemo/lightning/pytorch/callbacks/model_checkpoint.py
+++ b/nemo/lightning/pytorch/callbacks/model_checkpoint.py
@@ -17,14 +17,14 @@
import shutil
from datetime import timedelta
from pathlib import Path
-from typing import Any, Dict, Iterable, List, Optional, Union
+from typing import Any, Dict, Iterable, List, Literal, Optional, Union
-import pytorch_lightning
+import lightning.pytorch
import torch
from _weakref import proxy
-from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint as PTLModelCheckpoint
-from pytorch_lightning.callbacks.model_checkpoint import _is_local_file_protocol
-from pytorch_lightning.utilities import rank_zero_info
+from lightning.pytorch.callbacks.model_checkpoint import ModelCheckpoint as PTLModelCheckpoint
+from lightning.pytorch.callbacks.model_checkpoint import _is_local_file_protocol
+from lightning.pytorch.utilities import rank_zero_info
from nemo.lightning.ckpt_utils import ckpt_to_dir
from nemo.lightning.io.pl import TrainerContext
@@ -58,13 +58,12 @@ class ModelCheckpoint(PTLModelCheckpoint):
"""
UNFINISHED_CHECKPOINT_SUFFIX = "-unfinished"
- WEIGHTS_PATH = "weights"
def __init__(
self,
monitor: Optional[str] = "val_loss",
verbose: bool = True,
- save_last: Optional[bool] = True,
+ save_last: Optional[Union[bool, Literal["link"]]] = True,
save_top_k: int = 3,
save_weights_only: bool = False, ## TODO: check support
mode: str = "min",
@@ -196,7 +195,9 @@ def nemo_topk_check_previous_run(self):
match = re.search('[A-z]', checkpoint[index:])
if match:
value = checkpoint[index : index + match.start() - 1] # -1 due to separator hyphen
- self.best_k_models[checkpoint] = float(value)
+ else:
+ value = checkpoint[index:]
+ self.best_k_models[checkpoint] = float(value)
if len(self.best_k_models) < 1:
return # No saved checkpoints yet
@@ -311,7 +312,7 @@ def _del_model_without_trainer(self, filepath: str) -> None:
if torch.distributed.is_initialized():
torch.distributed.barrier()
- def _ema_callback(self, trainer: 'pytorch_lightning.Trainer'):
+ def _ema_callback(self, trainer: 'lightning.pytorch.Trainer'):
from nemo.collections.common.callbacks import EMA
ema_callback = None
@@ -392,7 +393,7 @@ def remove_checkpoint_unfinished_marker(checkpoint_path: Union[Path, str], barri
except:
return
- def file_exists(self, filepath: str, trainer: "pytorch_lightning.Trainer", check_dist_ckpt: bool = True) -> bool:
+ def file_exists(self, filepath: str, trainer: "lightning.pytorch.Trainer", check_dist_ckpt: bool = True) -> bool:
"""Checks if a file or a file without a suffix (distributed checkpoint) exists."""
exists = self._fs.exists(filepath) or (check_dist_ckpt and self._fs.exists(ckpt_to_dir(filepath)))
return trainer.strategy.broadcast(exists)
@@ -431,12 +432,11 @@ def _link_checkpoint(self, trainer: "pl.Trainer", filepath: str, linkpath: str,
linkpath = ckpt_to_dir(linkpath)
super()._link_checkpoint(trainer, filepath, linkpath)
- def _save_checkpoint(self, trainer: 'pytorch_lightning.Trainer', filepath: str) -> None:
+ def _save_checkpoint(self, trainer: 'lightning.pytorch.Trainer', filepath: str) -> None:
from nemo.utils.get_rank import is_global_rank_zero
# barrier_after=True, so all ranks continue after the unfinished checkpoint marker is placed.
# if anything goes wrong during checkpointing, we should be able to detect that data is incomplete.
- ckpt_filepath = ckpt_to_dir(filepath) / ModelCheckpoint.WEIGHTS_PATH
self.set_checkpoint_unfinished_marker(filepath, barrier_after=True)
ema_callback = self._ema_callback(trainer)
@@ -453,15 +453,15 @@ def _save_checkpoint(self, trainer: 'pytorch_lightning.Trainer', filepath: str)
if self.async_save:
raise ValueError('async_save with EMA not supported')
with ema_callback.save_original_optimizer_state(trainer):
- super()._save_checkpoint(trainer, ckpt_filepath)
+ super()._save_checkpoint(trainer, filepath)
# save EMA copy of the model as well.
with ema_callback.save_ema_model(trainer):
- rank_zero_info(f"Saving EMA weights to separate checkpoint {ckpt_filepath}")
- ckpt_filepath = self._ema_format_filepath(ckpt_filepath)
+ rank_zero_info(f"Saving EMA weights to separate checkpoint {filepath}")
+ filepath = self._ema_format_filepath(filepath)
if self.verbose:
- rank_zero_info(f"Saving EMA weights to separate checkpoint {ckpt_filepath}")
- super()._save_checkpoint(trainer, ckpt_filepath)
+ rank_zero_info(f"Saving EMA weights to separate checkpoint {filepath}")
+ super()._save_checkpoint(trainer, filepath)
self.remove_checkpoint_unfinished_marker(filepath, barrier_before=True)
else:
## Determine whether to include optimizer states in the checkpoint
@@ -487,7 +487,7 @@ def _save_checkpoint(self, trainer: 'pytorch_lightning.Trainer', filepath: str)
self.deferred_ckpts_to_remove.append([])
else:
storage_options = None
- trainer.save_checkpoint(ckpt_filepath, save_weights_only, storage_options=storage_options)
+ trainer.save_checkpoint(filepath, save_weights_only, storage_options=storage_options)
if self.always_save_context and is_global_rank_zero():
TrainerContext.from_trainer(trainer).io_dump(ckpt_to_dir(filepath) / "context", yaml_attrs=["model"])
@@ -499,7 +499,7 @@ def _save_checkpoint(self, trainer: 'pytorch_lightning.Trainer', filepath: str)
finalize_fn()
def _get_finalize_save_checkpoint_callback(
- self, trainer: 'pytorch_lightning.Trainer', filepath: str, global_step: int
+ self, trainer: 'lightning.pytorch.Trainer', filepath: str, global_step: int
):
"""Creates a callback that can be used to finalize async (and sync) ckpt saves."""
@@ -534,7 +534,7 @@ def _cb():
return _cb
- def _remove_checkpoint(self, trainer: "pytorch_lightning.Trainer", filepath: str, override_async=False) -> None:
+ def _remove_checkpoint(self, trainer: "lightning.pytorch.Trainer", filepath: str, override_async=False) -> None:
"""Performs checkpoint removal.
With async save, `self._remove_checkpoint` is called before the checkpoint
@@ -596,11 +596,11 @@ def _remove_unfinished_checkpoints(checkpoint_dir: Union[Path, str]) -> None:
}
checkpoint_filepaths = {f.resolve() for f in checkpoint_dir.rglob("*.ckpt")}
- for ckpt_filepath in checkpoint_filepaths:
- possible_marker_path = ModelCheckpoint.format_checkpoint_unfinished_marker_path(ckpt_filepath)
+ for filepath in checkpoint_filepaths:
+ possible_marker_path = ModelCheckpoint.format_checkpoint_unfinished_marker_path(filepath)
if possible_marker_path in existing_marker_filepaths:
- logging.warning(f'Removing unfinished checkpoint: {ckpt_filepath}')
- os.remove(ckpt_filepath)
+ logging.warning(f'Removing unfinished checkpoint: {filepath}')
+ os.remove(filepath)
# some directories might be distributed checkpoints, we remove these if they have a unfinished marker
all_dirpaths = {d.resolve() for d in checkpoint_dir.glob("*") if d.is_dir()}
diff --git a/nemo/lightning/pytorch/callbacks/model_transform.py b/nemo/lightning/pytorch/callbacks/model_transform.py
index 64602b501ac3..b3c3310aa30f 100644
--- a/nemo/lightning/pytorch/callbacks/model_transform.py
+++ b/nemo/lightning/pytorch/callbacks/model_transform.py
@@ -15,7 +15,7 @@
from functools import wraps
from typing import Any, Callable, Optional, TypeVar
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from torch import nn
from nemo.utils import logging
@@ -85,7 +85,7 @@ def _maybe_apply_transform(self, trainer):
def apply_transform(self, trainer):
self.model_transform(trainer.model)
- from pytorch_lightning.utilities import model_summary
+ from lightning.pytorch.utilities import model_summary
logging.info(
f"After applying model_transform:\n" f"{model_summary.summarize(trainer.lightning_module, max_depth=1)}"
diff --git a/nemo/lightning/pytorch/callbacks/moe_token_drop.py b/nemo/lightning/pytorch/callbacks/moe_token_drop.py
index 10483dca5096..b0c7ff7999eb 100644
--- a/nemo/lightning/pytorch/callbacks/moe_token_drop.py
+++ b/nemo/lightning/pytorch/callbacks/moe_token_drop.py
@@ -12,9 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytorch_lightning as pl
+import lightning.pytorch as pl
+from lightning.pytorch.callbacks.callback import Callback
from megatron.core import ModelParallelConfig
-from pytorch_lightning.callbacks.callback import Callback
from nemo.lightning.pytorch.strategies.megatron_strategy import MegatronStrategy
diff --git a/nemo/lightning/pytorch/callbacks/nsys.py b/nemo/lightning/pytorch/callbacks/nsys.py
index 2a5707d3166c..f350eae40730 100644
--- a/nemo/lightning/pytorch/callbacks/nsys.py
+++ b/nemo/lightning/pytorch/callbacks/nsys.py
@@ -15,7 +15,7 @@
from typing import List, Optional
import torch
-from pytorch_lightning.callbacks.callback import Callback
+from lightning.pytorch.callbacks.callback import Callback
from nemo.utils import logging
from nemo.utils.get_rank import get_rank
diff --git a/nemo/lightning/pytorch/callbacks/peft.py b/nemo/lightning/pytorch/callbacks/peft.py
index 2e32b1f0b73e..fb846043c8aa 100644
--- a/nemo/lightning/pytorch/callbacks/peft.py
+++ b/nemo/lightning/pytorch/callbacks/peft.py
@@ -18,16 +18,17 @@
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Tuple
-import pytorch_lightning as pl
+import lightning.pytorch as pl
+import torch
import torch.nn as nn
-from lightning_fabric.utilities.types import _PATH
-from pytorch_lightning.plugins.io.wrapper import _WrappingCheckpointIO
-from pytorch_lightning.trainer.states import TrainerFn
+from lightning.fabric.utilities.types import _PATH
+from lightning.pytorch.plugins.io.wrapper import _WrappingCheckpointIO
+from lightning.pytorch.trainer.states import TrainerFn
from typing_extensions import override
from nemo.lightning.ckpt_utils import ADAPTER_META_FILENAME
from nemo.lightning.io.mixin import IOMixin
-from nemo.lightning.io.pl import ckpt_to_dir
+from nemo.lightning.io.pl import ckpt_to_dir, ckpt_to_weights_subdir
from nemo.lightning.megatron_parallel import MegatronParallel
from nemo.lightning.pytorch.callbacks.model_transform import ModelTransform
from nemo.lightning.pytorch.optim.megatron import MegatronOptimizerModule
@@ -93,17 +94,19 @@ def __call__(self, model: nn.Module) -> nn.Module:
Returns:
nn.Module: The transformed model with PEFT applied.
"""
+ self.freeze_model(model)
- # If using megatron virtual pipeline parallelism, model is a list of
- # model chunks so iterate over model
+ # apply walk to model(s)
if isinstance(model, MegatronParallel) and len(model) > 1:
for model_chunk in model:
- model_chunk.freeze()
model_chunk.walk(self.transform)
+ elif isinstance(model, torch.nn.parallel.distributed.DistributedDataParallel):
+ model.module.walk(self.transform)
else:
- model.freeze()
model.walk(self.transform)
+ if hasattr(model, "trainer") and model.trainer.state.fn != TrainerFn.FITTING:
+ self.freeze_model(model)
return model
def freeze_model(self, model: nn.Module) -> None:
@@ -118,36 +121,56 @@ def freeze_model(self, model: nn.Module) -> None:
Returns:
nn.Module: The transformed model with PEFT applied.
"""
- model.freeze()
- model.train(mode=True)
+ if isinstance(model, MegatronParallel) and len(model) > 1:
+ for model_chunk in model:
+ model_chunk.freeze()
+ if isinstance(model, torch.nn.parallel.distributed.DistributedDataParallel):
+ model.module.freeze()
+ else:
+ model.freeze()
+ if hasattr(model, "trainer") and model.trainer.state.fn == TrainerFn.FITTING:
+ model.train(mode=True)
def setup(self, trainer: pl.Trainer, pl_module: pl.LightningModule, stage: str) -> None:
+ """PTL callback setup function."""
from nemo.lightning.pytorch.strategies.utils import create_checkpoint_io
super().setup(trainer, pl_module, stage=stage)
trainer.strategy.trainer = trainer
wrapped_io = partial(WrappedAdapterIO, peft=self)
+
+ ckpt_io_kwarg_names = [
+ "save_ckpt_format",
+ "async_save",
+ "torch_dist_multiproc",
+ "assume_constant_structure",
+ "parallel_save",
+ "parallel_save_within_dp",
+ "parallel_load",
+ "load_directly_on_device",
+ ]
ckpt_io_kwargs = {
- "save_ckpt_format": trainer.strategy.save_ckpt_format,
- "async_save": trainer.strategy.async_save,
- "torch_dist_multiproc": trainer.strategy.torch_dist_multiproc,
- "assume_constant_structure": trainer.strategy.assume_constant_structure,
- "parallel_save": trainer.strategy.parallel_save,
- "parallel_save_within_dp": trainer.strategy.parallel_save_within_dp,
- "parallel_load": trainer.strategy.parallel_load,
- "load_directly_on_device": trainer.strategy.load_directly_on_device,
+ arg: getattr(trainer.strategy, arg)
+ for arg in filter(lambda x: hasattr(trainer.strategy, x), ckpt_io_kwarg_names)
}
trainer.strategy._checkpoint_io = create_checkpoint_io(wrapping_ckpt_io=wrapped_io, **ckpt_io_kwargs)
self.wrapped_io = (
trainer.strategy._checkpoint_io._checkpoint_io
- if trainer.strategy.async_save
+ if getattr(trainer.strategy, 'async_save', False)
else trainer.strategy._checkpoint_io
)
trainer.strategy._init_model_parallel = False
trainer.strategy._setup_optimizers = False
def apply_transform(self, trainer):
+ """
+ This function does the following:
+ 1. Apply PEFT model transform.
+ 2. Set up model parallel and optimizer, which were skipped in setup
+ 3. Load weights and optimizer state dict
+ 4. Set up `finalize_model_grads` from mcore.
+ """
super().apply_transform(trainer)
self.trainable_params = set(
name for name, param in trainer.lightning_module.named_parameters() if param.requires_grad
@@ -193,6 +216,10 @@ def apply_transform(self, trainer):
)
def adapter_key_filter(self, key: str) -> bool:
+ """
+ Given a key in the state dict, return whether the key is an adapter (or base model).
+ This function can be subclassed in each PEFT method class.
+ """
return key in self.trainable_params or ".adapter." in key or key.endswith(".adapters")
@@ -229,6 +256,36 @@ def __init__(self, to_wrap: nn.Module, adapter: nn.Module):
self.to_wrap = to_wrap
self.adapter = adapter
+ def base_linear_forward(self, x):
+ """
+ Run the forward method of the linear module `to_wrap`.
+ Return a tuple of three elements: linear_output, bias, layernorm_output
+
+ x -> [layernorm/identity] -> layernorm_output -> [linear] -> linear_output, bias
+
+ layernorm_output is different from input x only when linear layer is LayerNormColumnParallelLinear.
+ """
+ linear_output = self.to_wrap(x)
+ assert isinstance(
+ linear_output, tuple
+ ), f"{self.to_wrap} should return a tuple but instead returns {linear_output}"
+ """ Four cases for the wrapped module's return values
+ 1. nothing: (out, None)
+ 2. return_bias: (out, bias)
+ 2. return_layernorm_output: ((out, ln_out), None)
+ 3. both: (out, bias, ln_out)
+ """
+ bias = None
+ layernorm_output = x
+ if len(linear_output) == 2:
+ linear_output, bias = linear_output
+ if isinstance(linear_output, tuple) and len(linear_output) == 2:
+ linear_output, layernorm_output = linear_output
+ elif len(linear_output) == 3:
+ linear_output, bias, layernorm_output = linear_output
+
+ return linear_output, bias, layernorm_output
+
def state_dict(self, destination=None, prefix='', keep_vars=False):
"""Retrieve the state dictionary of the wrapped module and adapter.
@@ -283,33 +340,38 @@ def sharded_state_dict(
sharded_state_dict.update(self.adapter.sharded_state_dict(f"{prefix}adapter.", sharded_offsets, metadata))
return sharded_state_dict
- def load_state_dict(self, state_dict, strict=True):
- """Load a state dictionary into the wrapped module and adapter.
- This method overrides the default load_state_dict behavior to handle
- loading states for both the main module and the adapter.
+class WrappedAdapterIO(_WrappingCheckpointIO, AsyncCompatibleCheckpointIO):
+ """
+ A wrapper class for checkpoint I/O operations, specifically designed for PEFT (Parameter-Efficient Fine-Tuning).
- Args:
- state_dict (dict): The state dictionary to load.
- strict (bool): Whether to strictly enforce that the keys in state_dict
- match the keys returned by this module's state_dict()
- function. Defaults to True.
- """
- # Check if the 'adapters' key is present in the state_dict
- if 'adapters' in state_dict:
- adapter_state_dict = state_dict.pop('adapters')
- else:
- adapter_state_dict = {}
+ This class handles the complexities of saving and loading checkpoints for both initial PEFT training and resuming
+ PEFT training. It ensures that only the necessary adapter weights are saved and loaded, while also preserving the
+ base model weights.
- # Load the main module state dict
- self.to_wrap.load_state_dict(state_dict, strict)
+ **Usage:**
- # Load the adapter module state dict if present
- if adapter_state_dict:
- self.adapter.load_state_dict(adapter_state_dict, strict)
+ 1. **Initial PEFT Training:**
+ - The class handles the saving of only adapter weights.
+ - Metadata about the base model checkpoint is stored for future reference.
+ 2. **PEFT Resume:**
+ - The class loads both base model and adapter weights.
+ - The previously stored metadata is used to locate the correct base model checkpoint.
+
+ **Attributes:**
+
+ - `peft`: The PEFT instance associated with the wrapped checkpoint I/O.
+ - `model_ckpt_path`: The path to the base model checkpoint.
+ - `adapter_ckpt_path`: The path to the adapter checkpoint.
+ Note that the paths are set by save/load functions and users do not need to set them.
+
+ **Methods:**
+
+ - `save_checkpoint`: Saves the adapter weights and metadata to the specified path.
+ - `load_checkpoint`: Loads the base model and adapter weights based on the specified path and metadata.
+ """
-class WrappedAdapterIO(_WrappingCheckpointIO, AsyncCompatibleCheckpointIO):
peft: Optional[PEFT] = None
model_ckpt_path: Optional[Path] = None
adapter_ckpt_path: Optional[Path] = None
@@ -334,7 +396,7 @@ def save_checkpoint(self, checkpoint: Dict[str, Any], path: _PATH, storage_optio
if is_global_rank_zero():
metadata = {"model_ckpt_path": str(self.model_ckpt_path)}
- base_dir = ckpt_to_dir(path)
+ base_dir = ckpt_to_weights_subdir(path, is_saving=True)
base_dir.mkdir(parents=True, exist_ok=True)
adapter_meta_path = base_dir / ADAPTER_META_FILENAME
with open(adapter_meta_path, "w") as f:
@@ -362,7 +424,8 @@ def load_checkpoint(
As such, this function will be entered twice during PEFT training resume.
For the FIRST TIME this function is called by trainer._checkpoint_connector._restore_modules_and_callbacks.
- `path = AdapterPath(, base_model_path=)`, and sharded_state_dict contains only base model weights
+ `path = AdapterPath(, base_model_path=)`, and sharded_state_dict contains only base
+ model weights
For the SECOND TIME this function is called by PEFT.apply_transform (above, in the same file).
`path = PosixPath()`, and sharded_state_dict contains only adapter weights.
diff --git a/nemo/lightning/pytorch/callbacks/preemption.py b/nemo/lightning/pytorch/callbacks/preemption.py
index 69ac378ed698..98b59a9da0d0 100644
--- a/nemo/lightning/pytorch/callbacks/preemption.py
+++ b/nemo/lightning/pytorch/callbacks/preemption.py
@@ -18,8 +18,8 @@
from typing import Optional
import torch
-from pytorch_lightning.callbacks import Callback
-from pytorch_lightning.trainer.trainer import Trainer
+from lightning.pytorch.callbacks import Callback
+from lightning.pytorch.trainer.trainer import Trainer
from nemo.lightning.io.mixin import IOMixin
from nemo.utils import logging
diff --git a/nemo/lightning/pytorch/callbacks/progress_bar.py b/nemo/lightning/pytorch/callbacks/progress_bar.py
index 6912c3fc57d4..f3c3c4555bac 100644
--- a/nemo/lightning/pytorch/callbacks/progress_bar.py
+++ b/nemo/lightning/pytorch/callbacks/progress_bar.py
@@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from pytorch_lightning.callbacks.progress import TQDMProgressBar
-from pytorch_lightning.callbacks.progress.tqdm_progress import _update_n
+from lightning.pytorch.callbacks.progress import TQDMProgressBar
+from lightning.pytorch.callbacks.progress.tqdm_progress import _update_n
class MegatronProgressBar(TQDMProgressBar):
diff --git a/nemo/lightning/pytorch/callbacks/progress_printer.py b/nemo/lightning/pytorch/callbacks/progress_printer.py
index d32f7d70cbdd..12d05ed2950c 100644
--- a/nemo/lightning/pytorch/callbacks/progress_printer.py
+++ b/nemo/lightning/pytorch/callbacks/progress_printer.py
@@ -15,9 +15,9 @@
from collections import defaultdict
from typing import Any
+from lightning.pytorch.callbacks.progress import ProgressBar
+from lightning.pytorch.utilities.types import STEP_OUTPUT
from megatron.core.num_microbatches_calculator import get_num_microbatches
-from pytorch_lightning.callbacks.progress import ProgressBar
-from pytorch_lightning.utilities.types import STEP_OUTPUT
from typing_extensions import override
diff --git a/nemo/lightning/pytorch/optim/base.py b/nemo/lightning/pytorch/optim/base.py
index 1d476142941a..fec3b7c118a4 100644
--- a/nemo/lightning/pytorch/optim/base.py
+++ b/nemo/lightning/pytorch/optim/base.py
@@ -17,8 +17,8 @@
from copy import deepcopy
from typing import List, Optional
-import pytorch_lightning as L
-from pytorch_lightning.utilities.types import OptimizerLRScheduler
+import lightning.pytorch as L
+from lightning.pytorch.utilities.types import OptimizerLRScheduler
from torch.optim import Optimizer
from nemo.lightning.io.mixin import IOMixin
diff --git a/nemo/lightning/pytorch/optim/megatron.py b/nemo/lightning/pytorch/optim/megatron.py
index 7ac413d4544f..9f9d2029be9e 100644
--- a/nemo/lightning/pytorch/optim/megatron.py
+++ b/nemo/lightning/pytorch/optim/megatron.py
@@ -15,7 +15,7 @@
import inspect
from typing import Callable, List, Optional
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from megatron.core.distributed import finalize_model_grads
from megatron.core.optimizer import OptimizerConfig
from megatron.core.utils import get_model_config
diff --git a/nemo/lightning/pytorch/optim/pytorch.py b/nemo/lightning/pytorch/optim/pytorch.py
index 6600fc0cf0a4..ccd03f563ef8 100644
--- a/nemo/lightning/pytorch/optim/pytorch.py
+++ b/nemo/lightning/pytorch/optim/pytorch.py
@@ -14,8 +14,10 @@
from typing import Callable, List, Optional
-import pytorch_lightning as pl
+import lightning.pytorch as pl
+import lightning.pytorch as L
from torch.optim import Optimizer
+from torch.optim.optimizer import ParamsT
from nemo.lightning.megatron_parallel import MegatronParallel
from nemo.lightning.pytorch.optim.base import LRSchedulerModule, OptimizerModule
@@ -25,20 +27,43 @@ def _param_does_not_have_wd(param_name, param):
return 'bias' in param_name
+def _extract_model_params_for_optim(model, weight_decay=0, no_weight_decay_cond=None):
+ params_with_wd, params_without_wd = [], []
+ if no_weight_decay_cond is not None:
+ for name, param in model.named_parameters():
+ if no_weight_decay_cond(name, param):
+ params_without_wd.append(param)
+ else:
+ params_with_wd.append(param)
+ else:
+ params_with_wd = model.parameters()
+
+ assert max(map(len, (params_with_wd, params_without_wd))) > 0, "Expected at least one optimizer with params"
+
+ return [
+ {'params': params, 'weight_decay': wd}
+ for params, wd in zip((params_with_wd, params_without_wd), (weight_decay, 0))
+ ]
+
+
class PytorchOptimizerModule(OptimizerModule):
"""A OptimizerModule for pytorch optimizers.
Attributes:
- config (OptimizerConfig): Configuration for the optimizer.
+ optimizer_fn (Callable[[ParamsT], Optimizer]): Configuration for the optimizer.
no_weight_decay_cond (Optional[Callable]): Condition for no weight decay.
scale_lr_cond (Optional[Callable]): Condition for scaling learning rate.
lr_mult (float): Learning rate multiplier.
Example::
- config = OptimizerConfig(...)
+ optimizer_fn = run.Partial(
+ SGD,
+ lr=lr,
+ weight_decay=wd,
+ )
lr_scheduler = MyLRSchedulerModule(...)
- optimizer_module = PytorchOptimizerModule(config, lr_scheduler)
+ optimizer_module = PytorchOptimizerModule(optimizer_fn, lr_scheduler)
Methods:
setup(model): Sets up the optimizer.
@@ -47,8 +72,7 @@ class PytorchOptimizerModule(OptimizerModule):
def __init__(
self,
- optim_cls,
- config: dict = {'lr': 3e-4},
+ optimizer_fn: Callable[[ParamsT], Optimizer],
lr_scheduler: Optional[LRSchedulerModule] = None,
no_weight_decay_cond: Optional[Callable] = _param_does_not_have_wd,
scale_lr_cond: Optional[Callable] = None,
@@ -57,7 +81,7 @@ def __init__(
"""Initializes the PytorchOptimizerModule.
Args:
- config (OptimizerConfig): Configuration for the optimizer.
+ optimizer_fn (Callable[[ParamsT], Optimizer]): Configuration for the optimizer.
lr_scheduler (Optional[LRSchedulerModule]): The learning rate scheduler module.
no_weight_decay_cond (Optional[Callable]): Condition for no weight decay.
scale_lr_cond (Optional[Callable]): Condition for scaling learning rate.
@@ -65,12 +89,10 @@ def __init__(
"""
super().__init__(lr_scheduler=lr_scheduler)
- self.optim_cls = optim_cls
- self.config = config
+ self.optimizer_fn = optimizer_fn
self.no_weight_decay_cond = no_weight_decay_cond
self.scale_lr_cond = scale_lr_cond
self.lr_mult = lr_mult
- self.optim_cls = optim_cls
def on_fit_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule"):
# Noop
@@ -92,41 +114,17 @@ def optimizers(self, model) -> List[Optimizer]:
if isinstance(model, MegatronParallel):
raise ValueError("Model cannot be an instance of MegatronParallel")
- params_with_wd, params_without_wd = [], []
- if self.no_weight_decay_cond is not None:
- for name, param in model.named_parameters():
- if self.no_weight_decay_cond(name, param):
- params_without_wd.append(param)
- else:
- params_with_wd.append(param)
- else:
- params_with_wd = model.parameters()
-
- optimizers = []
- if len(params_with_wd) > 0:
- optimizers.append(
- self.optim_cls(
- params_with_wd,
- **self.config,
- )
- )
-
- if len(params_without_wd) > 0:
- wd = self.config.get('weight_decay', None)
- kwargs['weight_decay'] = 0
- optimizers.append(
- self.optim_cls(
- params_without_wd,
- **kwargs,
- )
- )
- # restore value
- if wd is not None:
- kwargs['weight_decay'] = wd
-
- assert len(optimizers) > 0, "Expected at least one optimizer with params"
- return optimizers
+ wd = self.optimizer_fn.keywords.get('weight_decay', 0)
+ return self.optimizer_fn(_extract_model_params_for_optim(model, wd, self.no_weight_decay_cond))
def finalize_model_grads(self, *args, **kwargs):
# Noop
pass
+
+ def connect(self, model: L.LightningModule) -> None:
+ """Connects the optimizer module to the model and trainer.
+
+ Args:
+ model (L.LightningModule): The model to which the optimizer module is being connected.
+ """
+ model.configure_optimizers = lambda: self.optimizers(model)
diff --git a/nemo/lightning/pytorch/plugins/data_sampler.py b/nemo/lightning/pytorch/plugins/data_sampler.py
index f37fd38adf53..479e442d5ccb 100644
--- a/nemo/lightning/pytorch/plugins/data_sampler.py
+++ b/nemo/lightning/pytorch/plugins/data_sampler.py
@@ -16,7 +16,7 @@
import logging
from typing import List, Literal, Optional
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from torch.utils.data import DataLoader
from nemo.lightning.megatron_parallel import MegatronStep
@@ -44,8 +44,10 @@ def __init__(
init_consumed_samples: int = 0,
init_global_step: int = 0,
output_log: bool = True,
+ decoder_seq_len: Optional[int] = None,
):
self.seq_len = seq_len
+ self.decoder_seq_len = decoder_seq_len
self.output_log = output_log
self.micro_batch_size = micro_batch_size
self.global_batch_size = global_batch_size
@@ -110,6 +112,7 @@ def on_megatron_step_start(self, step: MegatronStep) -> MegatronStep:
seq_length=self.seq_len,
micro_batch_size=self.micro_batch_size,
num_microbatches=self.num_microbatches,
+ decoder_seq_length=self.decoder_seq_len,
)
def on_megatron_microbatches_start(self, step: MegatronStep) -> None:
diff --git a/nemo/lightning/pytorch/plugins/mixed_precision.py b/nemo/lightning/pytorch/plugins/mixed_precision.py
index 5c318b59e54a..830978ba11e7 100644
--- a/nemo/lightning/pytorch/plugins/mixed_precision.py
+++ b/nemo/lightning/pytorch/plugins/mixed_precision.py
@@ -16,9 +16,8 @@
from dataclasses import dataclass, fields
from typing import Any, Callable, Generator, List, Literal, Tuple, TypeVar, Union
-import pytorch_lightning as pl
import torch
-from pytorch_lightning.plugins.precision import Precision
+from lightning.pytorch.plugins.precision import Precision
from torch.nn import Module
from torch.optim import Optimizer
diff --git a/nemo/lightning/pytorch/strategies/fsdp_strategy.py b/nemo/lightning/pytorch/strategies/fsdp_strategy.py
index 83d5781c0dde..4c5a165c2d8d 100644
--- a/nemo/lightning/pytorch/strategies/fsdp_strategy.py
+++ b/nemo/lightning/pytorch/strategies/fsdp_strategy.py
@@ -17,14 +17,14 @@
from pathlib import Path
from typing import Any, Dict, Optional, Union
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
-from lightning_fabric.plugins import CheckpointIO
-from lightning_fabric.strategies.fsdp import _get_sharded_state_dict_context
+from lightning.fabric.plugins import CheckpointIO
+from lightning.fabric.strategies.fsdp import _get_sharded_state_dict_context
+from lightning.pytorch.strategies.fsdp import FSDPStrategy as PLFSDPStrategy
+from lightning.pytorch.trainer.states import TrainerFn
+from lightning.pytorch.utilities.types import STEP_OUTPUT
from megatron.core.transformer.transformer_layer import TransformerLayer
-from pytorch_lightning.strategies.fsdp import FSDPStrategy as PLFSDPStrategy
-from pytorch_lightning.trainer.states import TrainerFn
-from pytorch_lightning.utilities.types import STEP_OUTPUT
from torch.distributed.checkpoint.state_dict import ( # get_state_dict,
StateDictOptions,
get_optimizer_state_dict,
diff --git a/nemo/lightning/pytorch/strategies/megatron_strategy.py b/nemo/lightning/pytorch/strategies/megatron_strategy.py
index c22df7cc9dfe..870cc0aaaddd 100644
--- a/nemo/lightning/pytorch/strategies/megatron_strategy.py
+++ b/nemo/lightning/pytorch/strategies/megatron_strategy.py
@@ -35,20 +35,20 @@
cast,
)
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
import torch.distributed
-from lightning_fabric.plugins import CheckpointIO, ClusterEnvironment
-from lightning_fabric.utilities.optimizer import _optimizer_to_device, _optimizers_to_device
+from lightning.fabric.plugins import CheckpointIO, ClusterEnvironment
+from lightning.fabric.utilities.optimizer import _optimizer_to_device, _optimizers_to_device
+from lightning.pytorch.accelerators import CPUAccelerator
+from lightning.pytorch.loops import _AutomaticOptimization, evaluation_loop, fit_loop, prediction_loop
+from lightning.pytorch.loops.fetchers import _DataLoaderIterDataFetcher
+from lightning.pytorch.overrides.distributed import _sync_module_states
+from lightning.pytorch.strategies.ddp import DDPStrategy
+from lightning.pytorch.trainer.states import RunningStage, TrainerFn
+from lightning.pytorch.utilities.types import STEP_OUTPUT
from megatron.core.distributed import DistributedDataParallelConfig
from megatron.core.optimizer import OptimizerConfig
-from pytorch_lightning.accelerators import CPUAccelerator
-from pytorch_lightning.loops import _AutomaticOptimization, evaluation_loop, fit_loop, prediction_loop
-from pytorch_lightning.loops.fetchers import _DataLoaderIterDataFetcher
-from pytorch_lightning.overrides.distributed import _sync_module_states
-from pytorch_lightning.strategies.ddp import DDPStrategy
-from pytorch_lightning.trainer.states import RunningStage, TrainerFn
-from pytorch_lightning.utilities.types import STEP_OUTPUT
from torch import nn
from torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks import noop_hook
from torch.nn.parallel import DistributedDataParallel
@@ -57,13 +57,7 @@
from nemo.core.optim.mcore_optim import McoreDistributedOptimizer
from nemo.lightning import _strategy_lib, io
-from nemo.lightning.ckpt_utils import ckpt_to_weights_subdir
-from nemo.lightning.megatron_parallel import (
- CallbackConnector,
- MegatronParallel,
- _ModuleStepFunction,
- aggregate_moe_loss_stats,
-)
+from nemo.lightning.megatron_parallel import CallbackConnector, MegatronParallel, aggregate_moe_loss_stats
from nemo.lightning.pytorch.callbacks import ModelTransform
from nemo.lightning.pytorch.strategies.utils import (
RestoreConfig,
@@ -74,7 +68,6 @@
setup_data_sampler,
setup_parallel_ranks,
)
-from nemo.lightning.resume import AdapterPath
from nemo.utils import logging
from nemo.utils.callbacks.dist_ckpt_io import AsyncFinalizerCallback
@@ -89,9 +82,16 @@
@dataclass
class ParallelismConfig:
+ """
+ POD containing parallelism configuration.
+ Parallelism configuration is passed to MegatronStrategy via constructor arguments,
+ then copied to model's config during model setup.
+ """
+
tensor_model_parallel_size: int
pipeline_model_parallel_size: int
virtual_pipeline_model_parallel_size: int
+ microbatch_group_size_per_vp_stage: int
context_parallel_size: int
sequence_parallel: bool
expert_model_parallel_size: int
@@ -115,6 +115,9 @@ class MegatronStrategy(DDPStrategy, io.IOMixin):
across GPU ranks. Defaults to 1.
virtual_pipeline_model_parallel_size (Optional[int]): Interleaved pipeline parallelism used to
improve performance by reducing the pipeline bubble. Defaults to None.
+ microbatch_group_size_per_vp_stageοΌOptional[int]οΌ: the number of micro-batches that are executed
+ at a time for a given virtual stage (both forward and backward). Defaults to None and convert
+ to pipeline_parallel_size. which specifies a depth-first schedule.
context_parallel_size (int): Splits network input along sequence dimension across GPU ranks.
Defaults to 1.
sequence_parallel (bool): Makes tensor parallelism more memory efficient for LLMs (20B+) by
@@ -175,6 +178,7 @@ def __init__(
tensor_model_parallel_size: int = 1,
pipeline_model_parallel_size: int = 1,
virtual_pipeline_model_parallel_size: Optional[int] = None,
+ microbatch_group_size_per_vp_stage: Optional[int] = None,
context_parallel_size: int = 1,
sequence_parallel: bool = False,
expert_model_parallel_size: int = 1,
@@ -219,6 +223,11 @@ def __init__(
self.data_sampler: Optional["DataSampler"] = data_sampler
self.tensor_model_parallel_size = tensor_model_parallel_size
self.pipeline_model_parallel_size = pipeline_model_parallel_size
+ self.microbatch_group_size_per_vp_stage = (
+ microbatch_group_size_per_vp_stage
+ if microbatch_group_size_per_vp_stage is not None
+ else pipeline_model_parallel_size
+ )
self.context_parallel_size = context_parallel_size
self.expert_model_parallel_size = expert_model_parallel_size
self.moe_extended_tp = moe_extended_tp
@@ -266,22 +275,26 @@ def __init__(
@override
def connect(self, model: pl.LightningModule) -> None:
+ """Attaches a model to strategy."""
super().connect(model)
assert not 'is_hf_model' in model.__dict__, "Cannot use HfAutoModelForCausalLM with MegatronParallel"
+ dtype_config = getattr(self._precision_plugin, "dtype_config", None)
+ if self.pipeline_dtype is None and dtype_config:
+ self.pipeline_dtype = dtype_config.pipeline_dtype
+
_maybe_mcore_config = _strategy_lib.set_model_parallel_attributes(model, self.parallelism)
if _maybe_mcore_config:
self._mcore_config = _maybe_mcore_config
- dtype_config = getattr(self._precision_plugin, "dtype_config", None)
if dtype_config:
from nemo.lightning.pytorch.plugins.mixed_precision import update_config_with_dtype_overrides
model.config = update_config_with_dtype_overrides(dtype_config, model.config)
has_optim = getattr(model, "optim", None)
- if has_optim:
+ if has_optim and self._setup_optimizers:
opt_config = getattr(model.optim, "config", None)
if isinstance(opt_config, OptimizerConfig):
mcore_opt_config: OptimizerConfig = cast(OptimizerConfig, opt_config)
@@ -299,6 +312,7 @@ def connect(self, model: pl.LightningModule) -> None:
@override
def setup(self, trainer: pl.Trainer) -> None:
+ """Setups the strategy"""
assert self.accelerator is not None
self.accelerator.setup(trainer)
self.trainer = trainer
@@ -308,7 +322,8 @@ def setup(self, trainer: pl.Trainer) -> None:
logging.info(f"Copying Trainer's 'max_steps' ({trainer.max_steps}) to LR scheduler's 'max_steps'.")
except AttributeError:
logging.warning(
- "Could not copy Trainer's 'max_steps' to LR scheduler's 'max_steps'. If you are not using an LR scheduler, this warning can safely be ignored."
+ "Could not copy Trainer's 'max_steps' to LR scheduler's 'max_steps'. "
+ "If you are not using an LR scheduler, this warning can safely be ignored."
)
# move the model to the correct device
@@ -380,6 +395,7 @@ def setup(self, trainer: pl.Trainer) -> None:
@override
def setup_distributed(self) -> None:
+ """Setups dist env"""
setup_parallel_ranks(self)
super().setup_distributed()
init_model_parallel(self.model)
@@ -390,12 +406,14 @@ def setup_distributed(self) -> None:
@override
def process_dataloader(self, dataloader: DataLoader) -> DataLoader:
+ """Setups dataloader"""
if self.data_sampler:
return self.data_sampler.transform_dataloader(dataloader)
return dataloader
def setup_megatron_parallel(self, trainer: pl.Trainer) -> None:
+ """Configures megatron parallel"""
assert self.model is not None, "Model is not set"
convert_module_fn = None
@@ -439,10 +457,12 @@ def setup_megatron_parallel(self, trainer: pl.Trainer) -> None:
self.model.callbacks.add(datamodule)
def init_model_parallel(self):
+ """Initializes megatron parallel"""
self.megatron_parallel.init_model_parallel()
@override
def configure_ddp(self) -> None:
+ """Configures ddp"""
logging.debug(f"{self.__class__.__name__}: configuring MegatronParallel")
self.model = self._setup_model(self.model)
if self.ddp_config is None:
@@ -475,6 +495,7 @@ def _setup_model(self, model: nn.Module) -> nn.Module:
@override
def setup_optimizers(self, trainer: "pl.Trainer") -> None:
+ """Setups optimizers"""
super().setup_optimizers(trainer)
if hasattr(self.precision_plugin, "convert_optimizer"):
_optimizers = [*self.optimizers]
@@ -485,6 +506,7 @@ def setup_optimizers(self, trainer: "pl.Trainer") -> None:
@override
def training_step(self, dataloader_iter, *args: Any, **kwargs: Any) -> STEP_OUTPUT:
+ """Runs one training step"""
assert self.lightning_module is not None
assert isinstance(self.model, MegatronParallel)
@@ -558,6 +580,7 @@ def optimizer_step(
model: Optional[Union["pl.LightningModule", nn.Module]] = None,
**kwargs: Any,
) -> Any:
+ """Runs one optimizer step"""
optimizer_output = super().optimizer_step(optimizer, closure, model, **kwargs)
if isinstance(optimizer, McoreDistributedOptimizer):
@@ -571,6 +594,7 @@ def optimizer_step(
@override
def validation_step(self, dataloader_iter, *args: Any, **kwargs: Any) -> STEP_OUTPUT:
+ """Runs one validation step"""
assert self.lightning_module is not None
assert isinstance(self.model, MegatronParallel)
@@ -598,6 +622,7 @@ def validation_step(self, dataloader_iter, *args: Any, **kwargs: Any) -> STEP_OU
@override
def test_step(self, dataloader_iter, *args: Any, **kwargs: Any) -> STEP_OUTPUT:
+ """Runs one test step"""
assert self.lightning_module is not None
assert isinstance(self.model, MegatronParallel)
@@ -606,6 +631,7 @@ def test_step(self, dataloader_iter, *args: Any, **kwargs: Any) -> STEP_OUTPUT:
@override
def predict_step(self, dataloader_iter, *args: Any, **kwargs: Any) -> STEP_OUTPUT:
+ """Runs one prediction step"""
assert self.lightning_module is not None
assert isinstance(self.model, MegatronParallel)
@@ -614,10 +640,12 @@ def predict_step(self, dataloader_iter, *args: Any, **kwargs: Any) -> STEP_OUTPU
@override
def teardown(self) -> None:
+ """Tearsdown the strategy"""
super().teardown()
@override
def model_sharded_context(self) -> ContextManager:
+ """Model sharded context"""
if self.lazy_init and hasattr(self, "_mcore_config"):
stack = ExitStack()
stack.enter_context(_strategy_lib.megatron_lazy_init_context(self._mcore_config))
@@ -659,6 +687,7 @@ def optimizer_sharded_state_dict(self, is_loading=False):
def save_checkpoint(
self, checkpoint: Dict[str, Any], filepath: Union[str, Path], storage_options: Optional[Any] = None
) -> None:
+ """Saves checkpoint"""
checkpoint["state_dict"] = OrderedDict([]) # remove device state_dict
# retrieve `sharded_state_dict` if it has not already been configured in `on_save_checkpoint`
if "sharded_state_dict" not in checkpoint:
@@ -678,6 +707,7 @@ def save_checkpoint(
self.checkpoint_io.save_checkpoint(checkpoint, filepath, storage_options=storage_options)
def should_restore_optimizer_states(self, selective_restore: bool = False) -> bool:
+ """Determines whether to restore optimizer states or not"""
if selective_restore:
return self.restore_config.load_optim_state if self.restore_config else False
@@ -703,17 +733,12 @@ def load_checkpoint(self, checkpoint_path: Union[str, Path], selective_restore:
if self.lightning_module.optimizers(use_pl_optimizer=False):
sharded_state_dict["optimizer"] = [self.optimizer_sharded_state_dict(is_loading=True)]
- # Load from ckpt_path/weights (new format) if it exists, otherwise load from ckpt_path (legacy format)
- load_dir = ckpt_to_weights_subdir(checkpoint_path)
- if not load_dir.exists():
- load_dir = checkpoint_path
- if isinstance(load_dir, AdapterPath) and not load_dir.base_model_path.exists():
- load_dir.base_model_path = load_dir.base_model_path.parent
- checkpoint = self.checkpoint_io.load_checkpoint(load_dir, sharded_state_dict=sharded_state_dict)
+ checkpoint = self.checkpoint_io.load_checkpoint(checkpoint_path, sharded_state_dict=sharded_state_dict)
return checkpoint
def selective_restore(self) -> None:
+ """Implements selective restoration of checkpoint"""
if not self.restore_config:
return
@@ -736,6 +761,7 @@ def selective_restore(self) -> None:
@override
def load_optimizer_state_dict(self, checkpoint: Mapping[str, Any], selective_restore: bool = False) -> None:
+ """Loads optimizer state-dict"""
if not self.should_restore_optimizer_states(selective_restore=selective_restore):
return
@@ -745,6 +771,7 @@ def load_optimizer_state_dict(self, checkpoint: Mapping[str, Any], selective_res
_optimizer_to_device(optimizer, self.root_device)
def remove_checkpoint(self, filepath: Union[str, Path]) -> None:
+ """Deletes checkpoint"""
ckpt = ckpt_to_dir(filepath)
if self.is_global_zero:
if os.path.islink(ckpt):
@@ -753,6 +780,7 @@ def remove_checkpoint(self, filepath: Union[str, Path]) -> None:
shutil.rmtree(ckpt)
def load_model_state_dict(self, checkpoint: Mapping[str, Any], strict: bool = True) -> None:
+ """loads model state dict"""
assert self.megatron_parallel is not None
_strategy_lib.load_model_state_dict(self.megatron_parallel, checkpoint, strict=strict)
@@ -764,6 +792,7 @@ def load_model_state_dict(self, checkpoint: Mapping[str, Any], strict: bool = Tr
@property
@override
def checkpoint_io(self) -> CheckpointIO:
+ """Creates & returns checkpoint io"""
if not self._checkpoint_io:
self._checkpoint_io = create_checkpoint_io(
save_ckpt_format=self.save_ckpt_format,
@@ -780,6 +809,7 @@ def checkpoint_io(self) -> CheckpointIO:
@checkpoint_io.setter
def checkpoint_io(self, io: CheckpointIO) -> None:
+ """CheckpointIO setter"""
self._checkpoint_io = io
@property
@@ -794,6 +824,7 @@ def current_epoch_step(self) -> int:
@property
def distributed_sampler_kwargs(self) -> Dict[str, Any]:
+ """Returns dist-sampler's kwargs"""
from nemo.utils import AppState
app_state = AppState()
@@ -819,10 +850,12 @@ def restore_checkpoint_after_setup(self) -> bool:
@property
def parallelism(self) -> ParallelismConfig:
+ """Returns parallelism config from class attrs as a POD"""
return ParallelismConfig(
tensor_model_parallel_size=self.tensor_model_parallel_size,
pipeline_model_parallel_size=self.pipeline_model_parallel_size,
virtual_pipeline_model_parallel_size=self.virtual_pipeline_model_parallel_size,
+ microbatch_group_size_per_vp_stage=self.microbatch_group_size_per_vp_stage,
context_parallel_size=self.context_parallel_size,
sequence_parallel=self.sequence_parallel,
expert_model_parallel_size=self.expert_model_parallel_size,
@@ -835,6 +868,7 @@ def parallelism(self) -> ParallelismConfig:
@contextmanager
@override
def tensor_init_context(self, empty_init: Optional[bool] = None):
+ """Context manager used for initialization"""
# Materializaton happens in `setup()`
# @akoumparouli: using Parent's tensor_init_context causes mcore
# parameters to be initialized on GPU instead of (assumed) CPU.
diff --git a/nemo/lightning/pytorch/strategies/utils.py b/nemo/lightning/pytorch/strategies/utils.py
index 43a5a9243aa5..4f5a78419d6d 100644
--- a/nemo/lightning/pytorch/strategies/utils.py
+++ b/nemo/lightning/pytorch/strategies/utils.py
@@ -17,15 +17,14 @@
from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union, cast
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
-from lightning_fabric.plugins import ClusterEnvironment
+from lightning.fabric.plugins import ClusterEnvironment
+from lightning.pytorch.callbacks import TQDMProgressBar
from megatron.core import parallel_state
from megatron.core.dist_checkpointing.mapping import ShardedBase, ShardedObject, ShardedTensor
from megatron.core.dist_checkpointing.strategies.torch import sharded_tensor_to_torch_sharded_tensor
from megatron.core.transformer.utils import _get_extra_state_offsets
-from pytorch_lightning.callbacks import TQDMProgressBar
-from pytorch_lightning.plugins.io.wrapper import _WrappingCheckpointIO
from torch.distributed._sharded_tensor import ShardedTensor as TorchShardedTensor
from torch.distributed._tensor import DTensor, Replicate, Shard
from torch.distributed.device_mesh import DeviceMesh
diff --git a/nemo/lightning/pytorch/trainer.py b/nemo/lightning/pytorch/trainer.py
index 0d71c49bf198..701c1cde4eaf 100644
--- a/nemo/lightning/pytorch/trainer.py
+++ b/nemo/lightning/pytorch/trainer.py
@@ -12,10 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import warnings
from copy import deepcopy
import fiddle as fdl
-import pytorch_lightning as pl
+import lightning.pytorch as pl
+from lightning.pytorch.loops import _TrainingEpochLoop
+from lightning.pytorch.loops.fetchers import _DataFetcher
from typing_extensions import Self
from nemo.lightning.fabric.conversion import to_fabric
@@ -23,8 +26,40 @@
from nemo.lightning.io.mixin import IOMixin, serialization, track_io
-class Trainer(pl.Trainer, IOMixin):
+class NoValOnRestartTrainingLoop(_TrainingEpochLoop):
+ """
+ Extend the PTL Epoch loop to skip validation when restarting.
+ This happens when resuming a checkpoint that has already run validation, but loading restores
+ the training state before validation has run.
+ """
+
+ def _should_check_val_fx(self, data_fetcher) -> bool:
+ if self.skip_val_on_restart:
+ return False
+ return super()._should_check_val_fx(data_fetcher)
+
+ def load_state_dict(self, state_dict: dict, prefix: str = "") -> None:
+ super().load_state_dict(state_dict, prefix)
+
+ self.skip_val_on_restart = True
+
+ def advance(self, data_fetcher: _DataFetcher) -> None:
+ super().advance(data_fetcher)
+
+ self.skip_val_on_restart = False
+
+def configure_no_restart_validation_training_loop(trainer: pl.Trainer) -> None:
+ if not isinstance(trainer.fit_loop.epoch_loop, _TrainingEpochLoop):
+ warnings.warn("Detected custom epoch loop. Skipping no validation on restart support.", UserWarning)
+ return
+
+ ## Pass trainer object to avoid trainer getting overwritten as None
+ loop = NoValOnRestartTrainingLoop(trainer, trainer.min_steps, trainer.max_steps)
+ trainer.fit_loop.epoch_loop = loop
+
+
+class Trainer(pl.Trainer, IOMixin):
def add_io(self, obj):
"""Recurse to the leaves of a container and add io functionality to non-serializable leaves"""
if isinstance(obj, (dict, list)):
diff --git a/nemo/lightning/resume.py b/nemo/lightning/resume.py
index 412ca8665b84..7b534646731c 100644
--- a/nemo/lightning/resume.py
+++ b/nemo/lightning/resume.py
@@ -18,8 +18,8 @@
from pathlib import Path, PosixPath, WindowsPath
from typing import Optional, Union
-import lightning_fabric as fl
-import pytorch_lightning as pl
+import lightning.fabric as fl
+import lightning.pytorch as pl
from nemo.lightning import io
from nemo.lightning.base import NEMO_MODELS_CACHE
diff --git a/nemo/lightning/run/plugins.py b/nemo/lightning/run/plugins.py
index 9d2936e567ec..645665723706 100644
--- a/nemo/lightning/run/plugins.py
+++ b/nemo/lightning/run/plugins.py
@@ -20,17 +20,22 @@
import nemo_run as run
import yaml
+from lightning.pytorch import Callback
+from lightning.pytorch.loggers import WandbLogger
from nemo_run.core.serialization.yaml import YamlSerializer
-from pytorch_lightning import Callback
-from pytorch_lightning.loggers import WandbLogger
from nemo.lightning.pytorch.callbacks import NsysCallback, PreemptionCallback
from nemo.lightning.pytorch.strategies.megatron_strategy import MegatronStrategy
from nemo.utils import logging
+from nemo.utils.import_utils import safe_import
+
+res_module, HAVE_RES = safe_import('nvidia_resiliency_ext.ptl_resiliency')
+
# This file contains plugins based on NeMo-Run's run.Plugin API.
# Plugins operate both on a configured task and an executor at the same time, and are specific to NeMo-Run.
-# If you are adding functionality that goes directly into the Pytorch Lightning trainer, you may consider adding a callback instead of a plugin.
+# If you are adding functionality that goes directly into the Pytorch Lightning trainer,
+# you may consider adding a callback instead of a plugin.
def _merge_callbacks(partial: run.Partial, callbacks: list[run.Config[Callback]]):
@@ -79,6 +84,55 @@ def setup(self, task: run.Partial | run.Script, executor: run.Executor):
_merge_callbacks(task, callbacks=self.callbacks)
+@dataclass(kw_only=True)
+class FaultTolerancePlugin(run.Plugin):
+ """
+ A plugin for setting up the fault tolerance callback from nvidia-resiliency-ext.
+ This plugin enables workload hang detection, automatic calculation of timeouts used for hang detection, detection of rank(s) terminated due to an error and workload respawning in case of a failure.
+ Note: FaultTolerancePlugin does not work with the NsysPlugin.
+ Args:
+ num_in_process_restarts (int): Max number of restarts on failure, within the same job. Default is 3.
+ num_job_retries_on_failure (int): Max number of new job restarts on failure. Default is 2.
+ initial_rank_heartbeat_timeout (int): Timeouts are time intervals used by a rank monitor to detect that a rank is not alive. This is the max timeout for the initial heartbeat. Default is 1800.
+ rank_heartbeat_timeout (int): This is the timeout for subsequent hearbeats after the initial heartbeat. Default is 300.
+ """
+
+ num_in_process_restarts: int = 3
+ num_job_retries_on_failure: int = 2
+ initial_rank_heartbeat_timeout: int = 1800
+ rank_heartbeat_timeout: int = 300
+
+ def setup(self, task: run.Partial | run.Script, executor: run.Executor):
+
+ assert HAVE_RES, "nvidia-resiliency-ext.ptl_resiliency is required to use the FaultTolerancePlugin."
+
+ executor.launcher = run.FaultTolerance(
+ max_restarts=self.num_in_process_restarts,
+ initial_rank_heartbeat_timeout=self.initial_rank_heartbeat_timeout,
+ rank_heartbeat_timeout=self.rank_heartbeat_timeout,
+ )
+ executor.retries = self.num_job_retries_on_failure
+
+ assert isinstance(task, run.Partial)
+
+ callbacks = [
+ run.Config(
+ res_module.FaultToleranceCallback, autoresume=True, calculate_timeouts=True, exp_dir=task.log.log_dir
+ )
+ ]
+
+ assert not executor.launcher.nsys_profile, "Nsys not supported with the FaultTolerancePlugin."
+ if hasattr(task, "trainer") and hasattr(task.trainer, "callbacks"):
+ assert all(
+ map(
+ lambda cb: not cb.__fn_or_cls__ == NsysCallback if "__fn_or_cls__" in dir(cb) else True,
+ task.trainer.callbacks,
+ )
+ ), "Nsys not supported with FaultTolerancePlugin."
+
+ _merge_callbacks(task, callbacks=callbacks)
+
+
@dataclass(kw_only=True)
class NsysPlugin(run.Plugin):
"""
@@ -260,8 +314,11 @@ class PerfEnvPlugin(run.Plugin):
enable_layernorm_sm_margin: bool = True
layernorm_sm_margin: int = 16
enable_vboost: bool = False
+ nccl_pp_comm_chunksize: int = None
def get_vboost_srun_cmd(self, nodes, job_dir):
+ "Create the vboost `sudo nvidia-smi boost-slider --vboost 1` command"
+
import shlex
vboost_cmd = " ".join(
@@ -281,12 +338,13 @@ def get_vboost_srun_cmd(self, nodes, job_dir):
return vboost_cmd
def setup(self, task: run.Partial | run.Script, executor: run.Executor):
+ """Enable the performance environment settings"""
if task.trainer.strategy.__fn_or_cls__ == MegatronStrategy:
# Force program order kernel launch for TP, CP overlap
tp_size = task.trainer.strategy.tensor_model_parallel_size
cp_size = task.trainer.strategy.context_parallel_size
- if tp_size > 1 and cp_size > 1:
+ if tp_size > 1 or cp_size > 1:
executor.env_vars["CUDA_DEVICE_MAX_CONNECTIONS"] = "1"
# Set LayerNorm SM margin to support the overlap with LayerNorm kernel
@@ -294,6 +352,13 @@ def setup(self, task: run.Partial | run.Script, executor: run.Executor):
executor.env_vars["NVTE_FWD_LAYERNORM_SM_MARGIN"] = str(self.layernorm_sm_margin)
executor.env_vars["NVTE_BWD_LAYERNORM_SM_MARGIN"] = str(self.layernorm_sm_margin)
+ # Set the chunk size of P2P communications. Using a large chunk size reduces the
+ # buffering overhead from the communication kernel execution time
+ pp_size = task.trainer.strategy.pipeline_model_parallel_size
+ if pp_size > 1 and self.nccl_pp_comm_chunksize is not None:
+ assert isinstance(self.nccl_pp_comm_chunksize, int) and self.nccl_pp_comm_chunksize > 1
+ executor.env_vars["NCCL_P2P_NET_CHUNKSIZE"] = str(self.nccl_pp_comm_chunksize)
+
# Improve perf by steering power to tensor cores, may not work on all systems
if self.enable_vboost and isinstance(executor, run.SlurmExecutor):
vboost_cmd = self.get_vboost_srun_cmd(executor.nodes, executor.job_dir)
diff --git a/nemo/utils/callbacks/cuda_graph.py b/nemo/utils/callbacks/cuda_graph.py
index c78196934108..b44006828963 100644
--- a/nemo/utils/callbacks/cuda_graph.py
+++ b/nemo/utils/callbacks/cuda_graph.py
@@ -37,15 +37,15 @@
from types import MethodType
from typing import Any, Dict
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
-from pytorch_lightning import LightningModule
-from pytorch_lightning.callbacks import Callback
-from pytorch_lightning.loops.optimization.automatic import ClosureResult
-from pytorch_lightning.trainer.connectors.logger_connector.result import _ResultCollection, _ResultMetric
-from pytorch_lightning.utilities import CombinedLoader, rank_zero_info
-from pytorch_lightning.utilities.signature_utils import is_param_in_hook_signature
-from pytorch_lightning.utilities.types import STEP_OUTPUT
+from lightning.pytorch import LightningModule
+from lightning.pytorch.callbacks import Callback
+from lightning.pytorch.loops.optimization.automatic import ClosureResult
+from lightning.pytorch.trainer.connectors.logger_connector.result import _ResultCollection, _ResultMetric
+from lightning.pytorch.utilities import CombinedLoader, rank_zero_info
+from lightning.pytorch.utilities.signature_utils import is_param_in_hook_signature
+from lightning.pytorch.utilities.types import STEP_OUTPUT
from torch.nn.parallel import DistributedDataParallel
__all__ = ["CUDAGraphCallback"]
@@ -431,8 +431,8 @@ def on_save_checkpoint(
Called when saving a checkpoint to give you a chance to store anything else you might want to save.
Args:
- trainer: the current :class:`~pytorch_lightning.trainer.Trainer` instance.
- pl_module: the current :class:`~pytorch_lightning.core.module.LightningModule` instance.
+ trainer: the current :class:`~lightning.pytorch.trainer.Trainer` instance.
+ pl_module: the current :class:`~lightning.pytorch.core.module.LightningModule` instance.
checkpoint: the checkpoint dictionary that will be saved.
"""
# Since we've add bound method to optimizer and lr_scheduler, it can lead to more
diff --git a/nemo/utils/callbacks/dist_ckpt_io.py b/nemo/utils/callbacks/dist_ckpt_io.py
index 091075488878..b78ec9b4ac0f 100644
--- a/nemo/utils/callbacks/dist_ckpt_io.py
+++ b/nemo/utils/callbacks/dist_ckpt_io.py
@@ -19,12 +19,12 @@
from time import time
from typing import Any, Dict, Optional, Union
-import pytorch_lightning as pl
-from lightning_fabric.plugins import CheckpointIO
-from lightning_fabric.utilities.cloud_io import get_filesystem
-from lightning_fabric.utilities.types import _PATH
-from pytorch_lightning import Callback
-from pytorch_lightning.plugins.io.wrapper import _WrappingCheckpointIO
+import lightning.pytorch as pl
+from lightning.fabric.plugins import CheckpointIO
+from lightning.fabric.utilities.cloud_io import get_filesystem
+from lightning.fabric.utilities.types import _PATH
+from lightning.pytorch import Callback
+from lightning.pytorch.plugins.io.wrapper import _WrappingCheckpointIO
from nemo.utils import logging
diff --git a/nemo/utils/callbacks/nemo_model_checkpoint.py b/nemo/utils/callbacks/nemo_model_checkpoint.py
index dc1da9ce1875..8fe3beaaa985 100644
--- a/nemo/utils/callbacks/nemo_model_checkpoint.py
+++ b/nemo/utils/callbacks/nemo_model_checkpoint.py
@@ -19,14 +19,13 @@
from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
-import pytorch_lightning
+import lightning.pytorch
import torch
from _weakref import proxy
-
-from lightning_fabric.utilities.cloud_io import get_filesystem
-from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint, _is_local_file_protocol
-from pytorch_lightning.trainer import call
-from pytorch_lightning.utilities import rank_zero_info
+from lightning.fabric.utilities.cloud_io import get_filesystem
+from lightning.pytorch.callbacks.model_checkpoint import ModelCheckpoint, _is_local_file_protocol
+from lightning.pytorch.trainer import call
+from lightning.pytorch.utilities import rank_zero_info
from nemo.collections.common.callbacks import EMA
from nemo.utils import logging
@@ -357,7 +356,7 @@ def _del_model_without_trainer(self, filepath: str) -> None:
except:
logging.info(f"Tried to remove checkpoint: {filepath} but failed.")
- def _ema_callback(self, trainer: 'pytorch_lightning.Trainer') -> Optional[EMA]:
+ def _ema_callback(self, trainer: 'lightning.pytorch.Trainer') -> Optional[EMA]:
ema_callback = None
for callback in trainer.callbacks:
if isinstance(callback, EMA):
@@ -506,12 +505,12 @@ def remove_checkpoint_unfinished_marker(checkpoint_path: Union[Path, str], barri
except:
return
- def file_exists(self, filepath: str, trainer: "pytorch_lightning.Trainer", check_dist_ckpt: bool = True) -> bool:
+ def file_exists(self, filepath: str, trainer: "lightning.pytorch.Trainer", check_dist_ckpt: bool = True) -> bool:
"""Checks if a file or a file without a suffix (distributed checkpoint) exists."""
exists = self._fs.exists(filepath) or (check_dist_ckpt and self._fs.exists(ckpt_to_dir(filepath)))
return trainer.strategy.broadcast(exists)
- def _save_checkpoint(self, trainer: 'pytorch_lightning.Trainer', filepath: str) -> None:
+ def _save_checkpoint(self, trainer: 'lightning.pytorch.Trainer', filepath: str) -> None:
# barrier_after=True, so all ranks continue after the unfinished checkpoint marker is placed.
# if anything goes wrong during checkpointing, we should be able to detect that data is incomplete.
self.set_checkpoint_unfinished_marker(filepath, barrier_after=True)
@@ -552,7 +551,7 @@ def _save_checkpoint(self, trainer: 'pytorch_lightning.Trainer', filepath: str)
self._drop_optimizer_states(trainer, filepath, storage_options)
def _get_finalize_save_checkpoint_callback(
- self, trainer: 'pytorch_lightning.Trainer', filepath: str, global_step: int
+ self, trainer: 'lightning.pytorch.Trainer', filepath: str, global_step: int
):
"""Creates a callback that can be used to finalize async (and sync) ckpt saves."""
@@ -585,7 +584,7 @@ def _cb():
return _cb
- def _remove_checkpoint(self, trainer: "pytorch_lightning.Trainer", filepath: str, override_async=False) -> None:
+ def _remove_checkpoint(self, trainer: "lightning.pytorch.Trainer", filepath: str, override_async=False) -> None:
"""Performs checkpoint removal or deferred removal.
With async save, `self._remove_checkpoint` is called before the checkpoint
diff --git a/nemo/utils/callbacks/preemption.py b/nemo/utils/callbacks/preemption.py
index e9b5f95022f3..178fe94cee7c 100644
--- a/nemo/utils/callbacks/preemption.py
+++ b/nemo/utils/callbacks/preemption.py
@@ -16,7 +16,7 @@
import sys
import torch
-from pytorch_lightning.callbacks import Callback
+from lightning.pytorch.callbacks import Callback
from nemo.utils import logging
@@ -24,7 +24,7 @@
class PreemptionCallback(Callback):
"""
PreemptionCallback class creates a callback that checks for preemption during training at the end of every step.
- Upon preemption the callback provides a function to gracefully exit the training immediately and also saves the current state in a checkpoint as *last.ckpt.
+ Upon preemption the callback provides a function to gracefully exit the training immediately and also saves the current state in a checkpoint as *last.ckpt.
(to be able to start from the same step without wasting any compute while resuming the next time).
PreemptionCallback is always enabled by default via the arg create_preemption_callback under ExpManagerConfig. To disable please pass
@@ -47,7 +47,7 @@ def interrupted(self):
def on_train_start(self, trainer, pl_module):
"""
- Defines custom handlers at the beginning of training to be executed when the
+ Defines custom handlers at the beginning of training to be executed when the
preemption signal is received.
"""
diff --git a/nemo/utils/callbacks/s3_checkpoint_io.py b/nemo/utils/callbacks/s3_checkpoint_io.py
index 7a9f984fee1b..4a48198311a2 100644
--- a/nemo/utils/callbacks/s3_checkpoint_io.py
+++ b/nemo/utils/callbacks/s3_checkpoint_io.py
@@ -22,7 +22,7 @@
from typing import Any, Callable, Dict, Optional, Union
import torch
-from lightning_fabric.plugins.io.checkpoint_io import CheckpointIO
+from lightning.fabric.plugins.io.checkpoint_io import CheckpointIO
from nemo.utils import logging
from nemo.utils.s3_utils import (
diff --git a/nemo/utils/cloud.py b/nemo/utils/cloud.py
index 7245567d636c..d565028bdf8c 100644
--- a/nemo/utils/cloud.py
+++ b/nemo/utils/cloud.py
@@ -17,8 +17,8 @@
from time import sleep
import wget
-from pytorch_lightning.plugins.environments import LightningEnvironment
-from pytorch_lightning.strategies import DDPStrategy, StrategyRegistry
+from lightning.pytorch.plugins.environments import LightningEnvironment
+from lightning.pytorch.strategies import DDPStrategy, StrategyRegistry
from nemo.utils import logging
@@ -105,7 +105,10 @@ def initialize_sagemaker() -> None:
"""
StrategyRegistry.register(
- name='smddp', strategy=SageMakerDDPStrategy, process_group_backend="smddp", find_unused_parameters=False,
+ name='smddp',
+ strategy=SageMakerDDPStrategy,
+ process_group_backend="smddp",
+ find_unused_parameters=False,
)
def _install_system_libraries() -> None:
diff --git a/nemo/utils/exp_manager.py b/nemo/utils/exp_manager.py
index b512bc57cbab..04c43c46d247 100644
--- a/nemo/utils/exp_manager.py
+++ b/nemo/utils/exp_manager.py
@@ -26,18 +26,18 @@
from shutil import copy, move
from typing import Any, Collection, Dict, List, Optional, Tuple, Union
-import pytorch_lightning
+import lightning.pytorch
import torch
from hydra.core.hydra_config import HydraConfig
from hydra.utils import get_original_cwd
+from lightning.pytorch.callbacks import Callback, ModelCheckpoint
+from lightning.pytorch.callbacks.early_stopping import EarlyStopping
+from lightning.pytorch.callbacks.timer import Interval, Timer
+from lightning.pytorch.loggers import MLFlowLogger, NeptuneLogger, TensorBoardLogger, WandbLogger
+from lightning.pytorch.loops import _TrainingEpochLoop
+from lightning.pytorch.strategies.ddp import DDPStrategy
+from lightning.pytorch.trainer.connectors.checkpoint_connector import _CheckpointConnector
from omegaconf import DictConfig, OmegaConf, open_dict
-from pytorch_lightning.callbacks import Callback, ModelCheckpoint
-from pytorch_lightning.callbacks.early_stopping import EarlyStopping
-from pytorch_lightning.callbacks.timer import Interval, Timer
-from pytorch_lightning.loggers import MLFlowLogger, NeptuneLogger, TensorBoardLogger, WandbLogger
-from pytorch_lightning.loops import _TrainingEpochLoop
-from pytorch_lightning.strategies.ddp import DDPStrategy
-from pytorch_lightning.trainer.connectors.checkpoint_connector import _CheckpointConnector
from nemo.collections.common.callbacks import EMA
from nemo.constants import NEMO_ENV_VARNAME_TESTING, NEMO_ENV_VARNAME_VERSION
@@ -343,7 +343,7 @@ def on_validation_batch_end(self, trainer, pl_module, outputs, batch, batch_idx)
self._on_batch_end("validation_step_timing in s", trainer, pl_module)
-def exp_manager(trainer: 'pytorch_lightning.Trainer', cfg: Optional[Union[DictConfig, Dict]] = None) -> Optional[Path]:
+def exp_manager(trainer: 'lightning.pytorch.Trainer', cfg: Optional[Union[DictConfig, Dict]] = None) -> Optional[Path]:
"""
exp_manager is a helper function used to manage folders for experiments. It follows the pytorch lightning paradigm
of exp_dir/model_or_experiment_name/version. If the lightning trainer has a logger, exp_manager will get exp_dir,
@@ -362,7 +362,7 @@ def exp_manager(trainer: 'pytorch_lightning.Trainer', cfg: Optional[Union[DictCo
resume_if_exists is set to True, creating the version folders is ignored.
Args:
- trainer (pytorch_lightning.Trainer): The lightning trainer.
+ trainer (lightning.pytorch.Trainer): The lightning trainer.
cfg (DictConfig, dict): Can have the following keys:
- explicit_log_dir (str, Path): Can be used to override exp_dir/name/version folder creation. Defaults to
@@ -680,7 +680,7 @@ def exp_manager(trainer: 'pytorch_lightning.Trainer', cfg: Optional[Union[DictCo
return log_dir
-def error_checks(trainer: 'pytorch_lightning.Trainer', cfg: Optional[Union[DictConfig, Dict]] = None):
+def error_checks(trainer: 'lightning.pytorch.Trainer', cfg: Optional[Union[DictConfig, Dict]] = None):
"""
Checks that the passed trainer is compliant with NeMo and exp_manager's passed configuration. Checks that:
- Throws error when hydra has changed the working directory. This causes issues with lightning's DDP
@@ -728,7 +728,7 @@ def _filter_out_unfinished_checkpoints(checkpoint_paths: Collection[Union[Path,
def check_resume(
- trainer: 'pytorch_lightning.Trainer',
+ trainer: 'lightning.pytorch.Trainer',
log_dir: str,
resume_if_exists: bool = False,
resume_past_end: bool = False,
@@ -886,7 +886,7 @@ def check_resume(
def check_explicit_log_dir(
- trainer: 'pytorch_lightning.Trainer', explicit_log_dir: Union[Path, str], exp_dir: str, name: str, version: str
+ trainer: 'lightning.pytorch.Trainer', explicit_log_dir: Union[Path, str], exp_dir: str, name: str, version: str
) -> Tuple[Path, str, str, str]:
"""Checks that the passed arguments are compatible with explicit_log_dir.
@@ -917,7 +917,7 @@ def check_explicit_log_dir(
def get_log_dir(
- trainer: 'pytorch_lightning.Trainer',
+ trainer: 'lightning.pytorch.Trainer',
exp_dir: str = None,
name: str = None,
version: str = None,
@@ -1025,7 +1025,7 @@ def get_git_diff():
def configure_loggers(
- trainer: 'pytorch_lightning.Trainer',
+ trainer: 'lightning.pytorch.Trainer',
exp_dir: [Path, str],
log_dir: [Path, str],
name: str,
@@ -1136,7 +1136,7 @@ def resume_start(self, checkpoint_path=None) -> None:
def configure_checkpointing(
- trainer: 'pytorch_lightning.Trainer',
+ trainer: 'lightning.pytorch.Trainer',
log_dir: Path,
name: str,
resume: bool,
@@ -1257,12 +1257,12 @@ def _check_time_remaining(self, trainer: "pl.Trainer") -> None:
monitor_candidates = checkpoint_callback._monitor_candidates(trainer)
checkpoint_callback._save_last_checkpoint(trainer, monitor_candidates)
# Throw this exception to signal to Lightning to terminate gracefully.
- from pytorch_lightning.utilities.exceptions import _TunerExitException
+ from lightning.pytorch.utilities.exceptions import _TunerExitException
raise _TunerExitException()
-def configure_no_restart_validation_training_loop(trainer: pytorch_lightning.Trainer) -> None:
+def configure_no_restart_validation_training_loop(trainer: lightning.pytorch.Trainer) -> None:
if type(trainer.fit_loop.epoch_loop) != _TrainingEpochLoop:
warnings.warn("Detected custom epoch loop. Skipping no validation on restart support.", UserWarning)
return
diff --git a/nemo/utils/lightning_logger_patch.py b/nemo/utils/lightning_logger_patch.py
index 1b21ce3b1ae5..1528146c64b5 100644
--- a/nemo/utils/lightning_logger_patch.py
+++ b/nemo/utils/lightning_logger_patch.py
@@ -15,7 +15,7 @@
import logging as _logging
from logging.handlers import MemoryHandler
-import pytorch_lightning as pl
+import lightning.pytorch as pl
HANDLERS = {}
PATCHED = False
diff --git a/nemo/utils/loggers/clearml_logger.py b/nemo/utils/loggers/clearml_logger.py
index 4e2063705b4f..c7c3945ad853 100644
--- a/nemo/utils/loggers/clearml_logger.py
+++ b/nemo/utils/loggers/clearml_logger.py
@@ -19,11 +19,11 @@
from typing import Any, List, Literal, Mapping, Optional, Union
import pandas as pd
+from lightning.pytorch.callbacks import Checkpoint
+from lightning.pytorch.loggers import Logger
+from lightning.pytorch.utilities.parsing import AttributeDict
from lightning_utilities.core.apply_func import apply_to_collection
from omegaconf import DictConfig, ListConfig, OmegaConf
-from pytorch_lightning.callbacks import Checkpoint
-from pytorch_lightning.loggers import Logger
-from pytorch_lightning.utilities.parsing import AttributeDict
from torch import Tensor
from nemo.utils import logging
diff --git a/nemo/utils/loggers/dllogger.py b/nemo/utils/loggers/dllogger.py
index cdeef63b75f7..871d7ee3f7a2 100644
--- a/nemo/utils/loggers/dllogger.py
+++ b/nemo/utils/loggers/dllogger.py
@@ -17,11 +17,11 @@
from pathlib import Path
from typing import Optional
+from lightning.pytorch.loggers import Logger
+from lightning.pytorch.utilities import rank_zero_only
+from lightning.pytorch.utilities.parsing import AttributeDict
from lightning_utilities.core.apply_func import apply_to_collection
from omegaconf import DictConfig, ListConfig, OmegaConf
-from pytorch_lightning.loggers import Logger
-from pytorch_lightning.utilities import rank_zero_only
-from pytorch_lightning.utilities.parsing import AttributeDict
from nemo.utils import logging
@@ -34,7 +34,7 @@
HAVE_DLLOGGER = False
try:
- from lightning_fabric.utilities.logger import _convert_params, _flatten_dict, _sanitize_callable_params
+ from lightning.fabric.utilities.logger import _convert_params, _flatten_dict, _sanitize_callable_params
PL_LOGGER_UTILITIES = True
except (ImportError, ModuleNotFoundError):
diff --git a/nemo/utils/model_utils.py b/nemo/utils/model_utils.py
index b417c088b22e..5d7d019c6099 100644
--- a/nemo/utils/model_utils.py
+++ b/nemo/utils/model_utils.py
@@ -724,6 +724,10 @@ def save_artifacts(model, output_dir: str, use_abspath: bool = False) -> None:
app_state = AppState()
model_file = app_state.model_restore_path
model_cfg = copy.deepcopy(model.cfg)
+
+ if model_cfg.tokenizer.library == "huggingface":
+ model.tokenizer.save_pretrained(os.path.join(output_dir, "huggingface_tokenizer"))
+
if not hasattr(model, "artifacts"):
if hasattr(model_cfg, "tokenizer"):
OmegaConf.save(model_cfg.tokenizer, os.path.join(output_dir, "tokenizer_config.yaml"))
diff --git a/requirements/requirements_common.txt b/requirements/requirements_common.txt
index d8ad52452c7c..91e6ce671878 100644
--- a/requirements/requirements_common.txt
+++ b/requirements/requirements_common.txt
@@ -1,6 +1,7 @@
datasets
einops
inflect
+mediapy==1.1.6
pandas
sacremoses>=0.0.43
sentencepiece<1.0.0
diff --git a/requirements/requirements_lightning.txt b/requirements/requirements_lightning.txt
index e8020f244821..adca2283f577 100644
--- a/requirements/requirements_lightning.txt
+++ b/requirements/requirements_lightning.txt
@@ -1,8 +1,8 @@
cloudpickle
fiddle
hydra-core>1.3,<=1.3.2
+lightning>2.2.1
omegaconf<=2.3
-pytorch-lightning>2.2.1
torchmetrics>=0.11.0
transformers>=4.45.0
wandb
diff --git a/requirements/requirements_multimodal.txt b/requirements/requirements_multimodal.txt
index 18abe82c9f96..aa33b3b55127 100644
--- a/requirements/requirements_multimodal.txt
+++ b/requirements/requirements_multimodal.txt
@@ -5,7 +5,7 @@ diffusers>=0.19.3
einops_exts
imageio
kornia
-megatron-energon
+megatron-energon<3.0.0
nerfacc>=0.5.3
open_clip_torch==2.24.0
PyMCubes
diff --git a/requirements/requirements_nlp.txt b/requirements/requirements_nlp.txt
index 16b6c574d2fa..6a86dacbfefb 100644
--- a/requirements/requirements_nlp.txt
+++ b/requirements/requirements_nlp.txt
@@ -14,7 +14,7 @@ matplotlib>=3.3.2
#megatron_core>0.6.0 # add back once mcore on pypi is compatible again
nltk>=3.6.5
numpy<2 # tensorstore has an implicit compiled dependency on numpy<2
-opencc<1.1.7
+opencc
pangu
prettytable
rapidfuzz
diff --git a/requirements/requirements_tts.txt b/requirements/requirements_tts.txt
index 0d499feb3b1f..6d20e0f2250f 100644
--- a/requirements/requirements_tts.txt
+++ b/requirements/requirements_tts.txt
@@ -11,3 +11,5 @@ nltk
pandas
pypinyin
pypinyin-dict
+seaborn
+
diff --git a/requirements/requirements_vllm.txt b/requirements/requirements_vllm.txt
index 414e05078680..6f5c8880f632 100644
--- a/requirements/requirements_vllm.txt
+++ b/requirements/requirements_vllm.txt
@@ -1 +1,19 @@
-vllm==0.5.3.post1
+# Minimal set of NeMo requirements to run vLLM export & deployment in /opt/venv in a NeMo container
+braceexpand
+faiss-cpu
+h5py
+hydra-core>1.3,<=1.3.2
+ijson
+jieba
+lightning>2.2.1
+matplotlib>=3.3.2
+omegaconf<=2.3
+onnx>=1.7.0
+OpenCC
+pangu
+rouge_score
+sacrebleu
+scikit-learn
+vllm==0.6.3
+webdataset>=0.2.86
+wget
diff --git a/scripts/checkpoint_averaging/average_model_checkpoints.py b/scripts/checkpoint_averaging/average_model_checkpoints.py
index 06c522f1e192..ce88bba9716b 100644
--- a/scripts/checkpoint_averaging/average_model_checkpoints.py
+++ b/scripts/checkpoint_averaging/average_model_checkpoints.py
@@ -60,7 +60,7 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import OmegaConf, open_dict
diff --git a/scripts/checkpoint_averaging/megatron_checkpoint_averaging.py b/scripts/checkpoint_averaging/megatron_checkpoint_averaging.py
index 59f02a117da4..7b964fd7bade 100755
--- a/scripts/checkpoint_averaging/megatron_checkpoint_averaging.py
+++ b/scripts/checkpoint_averaging/megatron_checkpoint_averaging.py
@@ -35,8 +35,8 @@
import sys
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.parts.nlp_overrides import NLPDDPStrategy, NLPSaveRestoreConnector
from nemo.core import ModelPT
@@ -60,7 +60,10 @@ def main():
help='A list of Python file names to "from FILE import *" (Needed when some classes were defined in __main__ of a script)',
)
parser.add_argument(
- '--class_path', type=str, default='', help='A path to class "module.submodule.class" (if given)',
+ '--class_path',
+ type=str,
+ default='',
+ help='A path to class "module.submodule.class" (if given)',
)
args = parser.parse_args()
diff --git a/scripts/checkpoint_converters/convert_baichuan2_hf_to_nemo.py b/scripts/checkpoint_converters/convert_baichuan2_hf_to_nemo.py
index b87f7e028cdb..b35fb201865e 100644
--- a/scripts/checkpoint_converters/convert_baichuan2_hf_to_nemo.py
+++ b/scripts/checkpoint_converters/convert_baichuan2_hf_to_nemo.py
@@ -25,9 +25,9 @@
from collections import OrderedDict
import torch
+from lightning.pytorch.core.saving import _load_state as ptl_load_state
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.core.saving import _load_state as ptl_load_state
-from pytorch_lightning.trainer.trainer import Trainer
from transformers import AutoModelForCausalLM, AutoTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
@@ -158,7 +158,7 @@ def convert(args):
scaler = None
if precision in [16, '16', '16-mixed']:
scaler = GradScaler(
- init_scale=nemo_config.get('native_amp_init_scale', 2 ** 32),
+ init_scale=nemo_config.get('native_amp_init_scale', 2**32),
growth_interval=nemo_config.get('native_amp_growth_interval', 1000),
hysteresis=nemo_config.get('hysteresis', 2),
)
diff --git a/scripts/checkpoint_converters/convert_baichuan2_nemo_to_hf.py b/scripts/checkpoint_converters/convert_baichuan2_nemo_to_hf.py
index ec048e4b6f19..335989309791 100644
--- a/scripts/checkpoint_converters/convert_baichuan2_nemo_to_hf.py
+++ b/scripts/checkpoint_converters/convert_baichuan2_nemo_to_hf.py
@@ -17,7 +17,7 @@
from collections import OrderedDict
import torch
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from transformers import AutoModelForCausalLM
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
@@ -128,7 +128,7 @@ def convert(input_nemo_file, output_hf_file, precision=None, cpu_only=False) ->
ffn_hidden_size = model.cfg.ffn_hidden_size
num_query_groups = model.cfg.get("num_query_groups", head_num) # different num_query_groups for 70B
- head_size = hidden_size // head_num
+ head_size = model.cfg.get("kv_channels") or (hidden_size // head_num) # equivalent to hf's head_dim
heads_per_group = head_num // num_query_groups
qkv_total_dim = head_num + 2 * num_query_groups
diff --git a/scripts/checkpoint_converters/convert_bert_nemo_to_hf.py b/scripts/checkpoint_converters/convert_bert_nemo_to_hf.py
index e970ea29fca2..0ec5cc1e474b 100644
--- a/scripts/checkpoint_converters/convert_bert_nemo_to_hf.py
+++ b/scripts/checkpoint_converters/convert_bert_nemo_to_hf.py
@@ -26,7 +26,7 @@
import torch
import torch.nn.functional as F
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from transformers import AutoTokenizer, BertConfig, BertModel
from nemo.collections.nlp.models.language_modeling.megatron_bert_model import MegatronBertModel
@@ -207,10 +207,16 @@ def convert_config(ref_config, hf_state_dict):
def get_args():
parser = ArgumentParser()
parser.add_argument(
- "--input_name_or_path", type=str, required=True, help="Path to .nemo file",
+ "--input_name_or_path",
+ type=str,
+ required=True,
+ help="Path to .nemo file",
)
parser.add_argument(
- "--output_path", type=str, required=True, help="Output HF model path",
+ "--output_path",
+ type=str,
+ required=True,
+ help="Output HF model path",
)
args = parser.parse_args()
diff --git a/scripts/checkpoint_converters/convert_chatglm_hf_to_nemo.py b/scripts/checkpoint_converters/convert_chatglm_hf_to_nemo.py
index 363e4de09ef7..2545181ce968 100644
--- a/scripts/checkpoint_converters/convert_chatglm_hf_to_nemo.py
+++ b/scripts/checkpoint_converters/convert_chatglm_hf_to_nemo.py
@@ -25,8 +25,8 @@
from collections import OrderedDict
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.trainer.trainer import Trainer
from transformers import AutoModel, AutoTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
@@ -126,7 +126,7 @@ def convert(args):
scaler = None
if precision in [16, '16', '16-mixed']:
scaler = GradScaler(
- init_scale=nemo_config.get('native_amp_init_scale', 2 ** 32),
+ init_scale=nemo_config.get('native_amp_init_scale', 2**32),
growth_interval=nemo_config.get('native_amp_growth_interval', 1000),
hysteresis=nemo_config.get('hysteresis', 2),
)
@@ -211,7 +211,11 @@ def convert(args):
qkv_bias = torch.cat((qkv_bias, q[i * heads_per_group : (i + 1) * heads_per_group, :]))
qkv_bias = torch.cat((qkv_bias, k[i : i + 1, :]))
qkv_bias = torch.cat((qkv_bias, v[i : i + 1, :]))
- qkv_bias = qkv_bias.reshape([head_size * (head_num + 2 * num_query_groups),])
+ qkv_bias = qkv_bias.reshape(
+ [
+ head_size * (head_num + 2 * num_query_groups),
+ ]
+ )
if mcore_gpt:
qkv_weights_base_name = f'model.decoder.layers.{l}.self_attention.linear_qkv.weight'
diff --git a/scripts/checkpoint_converters/convert_chatglm_nemo_to_hf.py b/scripts/checkpoint_converters/convert_chatglm_nemo_to_hf.py
index 5a8e52ee8be5..241e4254a9be 100644
--- a/scripts/checkpoint_converters/convert_chatglm_nemo_to_hf.py
+++ b/scripts/checkpoint_converters/convert_chatglm_nemo_to_hf.py
@@ -17,7 +17,7 @@
from collections import OrderedDict
import torch
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from transformers import AutoModel
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
@@ -126,7 +126,7 @@ def convert(input_nemo_file, output_hf_file, precision=None, cpu_only=False) ->
num_layers = model.cfg.num_layers
num_query_groups = model.cfg.get("num_query_groups", head_num) # different num_query_groups for 70B
- head_size = hidden_size // head_num
+ head_size = model.cfg.get("kv_channels") or (hidden_size // head_num) # equivalent to hf's head_dim
heads_per_group = head_num // num_query_groups # 32 / 2 = 16
qkv_total_dim = head_num + 2 * num_query_groups # 32 + 2 * 2 = 36
diff --git a/scripts/checkpoint_converters/convert_clip_hf_to_nemo.py b/scripts/checkpoint_converters/convert_clip_hf_to_nemo.py
index 2b8156ad4b26..c47444534604 100644
--- a/scripts/checkpoint_converters/convert_clip_hf_to_nemo.py
+++ b/scripts/checkpoint_converters/convert_clip_hf_to_nemo.py
@@ -38,9 +38,9 @@
from argparse import ArgumentParser
import torch
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
-from pytorch_lightning.trainer.trainer import Trainer
from transformers import CLIPModel
from nemo.collections.multimodal.models.vision_language_foundation.clip.megatron_clip_models import MegatronCLIPModel
diff --git a/scripts/checkpoint_converters/convert_falcon_hf_to_nemo.py b/scripts/checkpoint_converters/convert_falcon_hf_to_nemo.py
index ae8885f4de93..8a880a290484 100644
--- a/scripts/checkpoint_converters/convert_falcon_hf_to_nemo.py
+++ b/scripts/checkpoint_converters/convert_falcon_hf_to_nemo.py
@@ -32,7 +32,7 @@
import time
from typing import Dict
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
import yaml
from omegaconf import OmegaConf
@@ -83,11 +83,11 @@ def get_new_key(old_key):
def load_falcon_config(args) -> FalconConfig:
- """ Helper utility to load FalconConfig.
+ """Helper utility to load FalconConfig.
Legacy Falcon-7B and Falcon-40B are not compatible with `transformers.FalconConfig` and
`transformers.FalconModel`. need to manually set the config values
- and force to `falcon` model type.
+ and force to `falcon` model type.
"""
config = FalconConfig.from_pretrained(args.input_name_or_path)
if config.model_type == 'RefinedWeb':
diff --git a/scripts/checkpoint_converters/convert_falcon_nemo_to_hf.py b/scripts/checkpoint_converters/convert_falcon_nemo_to_hf.py
index da8f15b92649..cc1d99b6d1c6 100644
--- a/scripts/checkpoint_converters/convert_falcon_nemo_to_hf.py
+++ b/scripts/checkpoint_converters/convert_falcon_nemo_to_hf.py
@@ -17,7 +17,7 @@
from collections import OrderedDict
import torch
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from transformers import AutoModelForCausalLM
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/scripts/checkpoint_converters/convert_gpt_nemo_to_mcore.py b/scripts/checkpoint_converters/convert_gpt_nemo_to_mcore.py
index 35039f8d02e9..61443a3bcb28 100644
--- a/scripts/checkpoint_converters/convert_gpt_nemo_to_mcore.py
+++ b/scripts/checkpoint_converters/convert_gpt_nemo_to_mcore.py
@@ -17,8 +17,8 @@
from collections import OrderedDict
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf, open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.collections.nlp.parts.nlp_overrides import NLPDDPStrategy
diff --git a/scripts/checkpoint_converters/convert_llama_hf_to_nemo.py b/scripts/checkpoint_converters/convert_llama_hf_to_nemo.py
index 4eb8cb6330ca..44de38497b44 100644
--- a/scripts/checkpoint_converters/convert_llama_hf_to_nemo.py
+++ b/scripts/checkpoint_converters/convert_llama_hf_to_nemo.py
@@ -27,8 +27,8 @@
from collections import OrderedDict
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.trainer.trainer import Trainer
from transformers import AutoTokenizer, LlamaForCausalLM, LlamaTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/scripts/checkpoint_converters/convert_llama_hf_to_nemo_load.py b/scripts/checkpoint_converters/convert_llama_hf_to_nemo_load.py
index 42d3e77ce4c8..75bd0d0ab6ed 100644
--- a/scripts/checkpoint_converters/convert_llama_hf_to_nemo_load.py
+++ b/scripts/checkpoint_converters/convert_llama_hf_to_nemo_load.py
@@ -28,8 +28,8 @@
from collections import OrderedDict
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.trainer.trainer import Trainer
from transformers import AutoTokenizer, LlamaForCausalLM, LlamaTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/scripts/checkpoint_converters/convert_llama_hf_to_nemo_save_dict.py b/scripts/checkpoint_converters/convert_llama_hf_to_nemo_save_dict.py
index f7096996e5b1..4a8a409a88fd 100644
--- a/scripts/checkpoint_converters/convert_llama_hf_to_nemo_save_dict.py
+++ b/scripts/checkpoint_converters/convert_llama_hf_to_nemo_save_dict.py
@@ -27,8 +27,8 @@
from collections import OrderedDict
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.trainer.trainer import Trainer
from transformers import AutoTokenizer, LlamaForCausalLM, LlamaTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/scripts/checkpoint_converters/convert_llama_nemo_to_hf.py b/scripts/checkpoint_converters/convert_llama_nemo_to_hf.py
index a3c40676a980..87b7151aa961 100644
--- a/scripts/checkpoint_converters/convert_llama_nemo_to_hf.py
+++ b/scripts/checkpoint_converters/convert_llama_nemo_to_hf.py
@@ -17,8 +17,8 @@
from collections import OrderedDict
import torch
+from lightning.pytorch import Trainer
from omegaconf import open_dict
-from pytorch_lightning import Trainer
from transformers import AutoModelForCausalLM, LlamaTokenizer, LlamaTokenizerFast, convert_slow_tokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
@@ -26,7 +26,7 @@
from nemo.utils import logging
"""
-Script to convert a llama2 checkpoint in nemo (mcore path) into a HuggingFace checkpoint.
+Script to convert a llama checkpoint in nemo (mcore path) into a HuggingFace checkpoint.
This script can be used to 1) generate only the HF weights, or 2) generate an entire HF model folder.
1) Generate only HF weights from a nemo file:
@@ -37,13 +37,21 @@
2) Generate the full HF model folder
+ python convert_llama_nemo_to_hf.py \
+ --input_name_or_path /path/to/file.nemo or /path/to/extracted_folder \
+ --output_path /path/to/pytorch_model.bin \
+ --hf_input_path /path/to/input_hf_folder \
+ --hf_output_path /path/to/output_hf_folder
+
+3) Generate the full HF model folder with a custom tokenizer
+
python convert_llama_nemo_to_hf.py \
--input_name_or_path /path/to/file.nemo or /path/to/extracted_folder \
--output_path /path/to/pytorch_model.bin \
--hf_input_path /path/to/input_hf_folder \
--hf_output_path /path/to/output_hf_folder \
- --input_tokenizer /path/to/tokenizer \
- --hf_output_tokenizer /path/to/output_tokenizer \
+ --input_tokenizer /path/to/custom_nemo_tokenizer.model \
+ --hf_output_tokenizer /path/to/output_tokenizer
Use the --cpu-only flag if the model cannot fit in the GPU (e.g. Llama2 70b).
However this option makes the conversion script significantly slower.
@@ -143,7 +151,7 @@ def convert(input_nemo_file, output_hf_file, precision=None, cpu_only=False) ->
ffn_hidden_size = model.cfg.ffn_hidden_size
num_query_groups = model.cfg.get("num_query_groups", head_num) # different num_query_groups for 70B
- head_size = hidden_size // head_num
+ head_size = model.cfg.get("kv_channels") or (hidden_size // head_num) # equivalent to hf's head_dim
heads_per_group = head_num // num_query_groups
qkv_total_dim = head_num + 2 * num_query_groups
@@ -246,21 +254,25 @@ def replace_hf_weights_and_tokenizer(
nemo_exported = torch.load(weights_file)
if tokenizer_path:
- tokenizer = LlamaTokenizer.from_pretrained(
- tokenizer_path,
- local_files_only=True,
- legacy=False,
- )
- tmp_tokenizer = convert_slow_tokenizer.convert_slow_tokenizer(tokenizer)
- fast_tokenizer = LlamaTokenizerFast(tokenizer_object=tmp_tokenizer)
- tokenizer_length = len(fast_tokenizer)
- model.resize_token_embeddings(tokenizer_length)
+ try:
+ tokenizer = LlamaTokenizer.from_pretrained(
+ tokenizer_path,
+ local_files_only=True,
+ legacy=False,
+ )
+ tmp_tokenizer = convert_slow_tokenizer.convert_slow_tokenizer(tokenizer)
+ fast_tokenizer = LlamaTokenizerFast(tokenizer_object=tmp_tokenizer)
+ tokenizer_length = len(fast_tokenizer)
+ model.resize_token_embeddings(tokenizer_length)
+ except:
+ tokenizer = None
+ logging.warning("Could not load custom tokenizer, proceeding with default tokenizer")
model.load_state_dict(nemo_exported)
model.save_pretrained(output_hf_path)
logging.info(f"Full HF model saved to {output_hf_path}")
- if tokenizer_path:
+ if tokenizer_path and (tokenizer is not None):
fast_tokenizer.save_pretrained(output_hf_tokenizer)
tokenizer.save_pretrained(output_hf_tokenizer)
logging.info(f"Tokenizer saved to {output_hf_tokenizer}")
diff --git a/scripts/checkpoint_converters/convert_mistral_7b_hf_to_nemo.py b/scripts/checkpoint_converters/convert_mistral_7b_hf_to_nemo.py
index 425a6c696120..3cf5bbd4acf9 100644
--- a/scripts/checkpoint_converters/convert_mistral_7b_hf_to_nemo.py
+++ b/scripts/checkpoint_converters/convert_mistral_7b_hf_to_nemo.py
@@ -29,9 +29,9 @@
import torch
import torch.nn
+from lightning.pytorch.core.saving import _load_state as ptl_load_state
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.core.saving import _load_state as ptl_load_state
-from pytorch_lightning.trainer.trainer import Trainer
from transformers import AutoModelForCausalLM, AutoTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
@@ -46,6 +46,7 @@
def get_args():
+ """parses cli args"""
parser = ArgumentParser()
parser.add_argument(
"--input_name_or_path",
@@ -63,6 +64,7 @@ def get_args():
def restore_model_from_checkpoint(cls, checkpoint, strict, **kwargs):
+ """Loads mcore ckpt"""
try:
if 'cfg' in kwargs:
model = ptl_load_state(cls, checkpoint, strict=strict, **kwargs)
@@ -103,6 +105,7 @@ def restore_model_from_checkpoint(cls, checkpoint, strict, **kwargs):
def load_config(mistral_config, tokenizer, config_path):
+ """Create mcor config"""
nemo_config = OmegaConf.load(
os.path.join(os.path.dirname(__file__), '../../examples/nlp/language_modeling/conf/megatron_llama_config.yaml')
).model
@@ -130,7 +133,7 @@ def load_config(mistral_config, tokenizer, config_path):
nemo_config.activation = 'fast-swiglu'
# Tokenizer config
- if hasattr(tokenizer, 'vocab_file'):
+ if getattr(tokenizer, 'vocab_file', None) is not None:
nemo_config.tokenizer.model = tokenizer.vocab_file
elif os.path.exists(os.path.join(config_path, 'tekken.json')):
# Load tekken.json, extract the 'vocab' field & write it to file.
@@ -177,6 +180,8 @@ def load_config(mistral_config, tokenizer, config_path):
class LazyStateDict:
+ """Lazy"""
+
def __init__(self, ckpt_index, root):
self.map = ckpt_index
self.root = root
@@ -192,6 +197,7 @@ def __getitem__(self, key):
def load_mistral_ckpt(in_dir, load_model=True):
+ """loads mistral hf ckpt"""
params_file = os.path.join(in_dir, 'config.json')
assert os.path.exists(params_file)
with open(params_file, 'r') as fp:
@@ -217,6 +223,7 @@ def load_mistral_ckpt(in_dir, load_model=True):
def parse_precision(precision):
+ """parses precision string"""
if precision in ["32", "16"]:
return int(float(precision))
elif precision in ["bf16", "bf16-mixed"]:
@@ -230,6 +237,7 @@ def parse_precision(precision):
def make_trainer(args, nemo_config):
+ """creates PTL trainer"""
model_args, ckpt, tokenizer = load_mistral_ckpt(args.input_name_or_path, load_model=False)
nemo_config = load_config(model_args, tokenizer, args.input_name_or_path)
precision = parse_precision(args.precision)
@@ -269,6 +277,7 @@ def make_trainer(args, nemo_config):
def convert(args):
+ """converts chceckpoint from hf to nemo"""
logging.info(f"loading checkpoint {args.input_name_or_path}")
model_args, ckpt, tokenizer = load_mistral_ckpt(args.input_name_or_path)
@@ -408,6 +417,7 @@ def convert(args):
def merge(a: dict, b: dict, path=[]):
+ """merges two state dicts"""
is_dict = lambda x: isinstance(x, OrderedDict) or isinstance(x, dict)
for key in b:
if key in a:
@@ -421,6 +431,7 @@ def merge(a: dict, b: dict, path=[]):
def save_to_nemo(args, checkpoint):
+ """saves checkpoint to nemo format"""
logging.info(f"loading checkpoint {args.input_name_or_path}")
model_args, ckpt, tokenizer = load_mistral_ckpt(args.input_name_or_path, load_model=False)
diff --git a/scripts/checkpoint_converters/convert_mistral_7b_nemo_to_hf.py b/scripts/checkpoint_converters/convert_mistral_7b_nemo_to_hf.py
index b8c30a1b929d..1f0a31076f8e 100644
--- a/scripts/checkpoint_converters/convert_mistral_7b_nemo_to_hf.py
+++ b/scripts/checkpoint_converters/convert_mistral_7b_nemo_to_hf.py
@@ -25,7 +25,7 @@
import torch
import torch.nn
-from pytorch_lightning.trainer.trainer import Trainer
+from lightning.pytorch.trainer.trainer import Trainer
from transformers import AutoConfig, AutoModelForCausalLM, AutoTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
@@ -134,7 +134,7 @@ def convert(in_file, precision=None, cpu_only=True) -> None:
num_layers = model.cfg.num_layers
num_query_groups = model.cfg.get("num_query_groups", head_num) # different num_query_groups for 70B
- head_size = model.cfg.get('kv_channels', hidden_size // head_num)
+ head_size = model.cfg.get("kv_channels") or (hidden_size // head_num) # equivalent to hf's head_dim
heads_per_group = head_num // num_query_groups
qkv_total_dim = head_num + 2 * num_query_groups
diff --git a/scripts/checkpoint_converters/convert_mixtral_hf_to_nemo.py b/scripts/checkpoint_converters/convert_mixtral_hf_to_nemo.py
index 36e4c0c2c3ea..a75c6876e70a 100644
--- a/scripts/checkpoint_converters/convert_mixtral_hf_to_nemo.py
+++ b/scripts/checkpoint_converters/convert_mixtral_hf_to_nemo.py
@@ -30,9 +30,9 @@
import megatron.core.parallel_state as parallel_state
import torch
import torch.nn
+from lightning.pytorch.core.saving import _load_state as ptl_load_state
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.core.saving import _load_state as ptl_load_state
-from pytorch_lightning.trainer.trainer import Trainer
from transformers import AutoModelForCausalLM, AutoTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/scripts/checkpoint_converters/convert_mixtral_nemo_to_hf.py b/scripts/checkpoint_converters/convert_mixtral_nemo_to_hf.py
index 2bac2eaad616..eb934803f164 100644
--- a/scripts/checkpoint_converters/convert_mixtral_nemo_to_hf.py
+++ b/scripts/checkpoint_converters/convert_mixtral_nemo_to_hf.py
@@ -26,7 +26,7 @@
import megatron.core.parallel_state as parallel_state
import torch
import torch.nn
-from pytorch_lightning.trainer.trainer import Trainer
+from lightning.pytorch.trainer.trainer import Trainer
from transformers import AutoConfig, AutoModelForCausalLM, AutoTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
@@ -137,7 +137,7 @@ def convert(in_file, precision=None) -> None:
num_layers = model.cfg.num_layers
num_query_groups = model.cfg.get("num_query_groups", head_num) # different num_query_groups for 70B
- head_size = hidden_size // head_num
+ head_size = model.cfg.get("kv_channels") or (hidden_size // head_num) # equivalent to hf's head_dim
heads_per_group = head_num // num_query_groups
qkv_total_dim = head_num + 2 * num_query_groups
diff --git a/scripts/checkpoint_converters/convert_mpt_hf_to_nemo.py b/scripts/checkpoint_converters/convert_mpt_hf_to_nemo.py
index e7d81f709092..d4a450a8e046 100644
--- a/scripts/checkpoint_converters/convert_mpt_hf_to_nemo.py
+++ b/scripts/checkpoint_converters/convert_mpt_hf_to_nemo.py
@@ -56,7 +56,7 @@
import argparse
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
import yaml
from omegaconf import OmegaConf
@@ -68,7 +68,11 @@
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
- "--input_name_or_path", type=str, default=None, required=True, help="Path to Huggingface MPT checkpoints",
+ "--input_name_or_path",
+ type=str,
+ default=None,
+ required=True,
+ help="Path to Huggingface MPT checkpoints",
)
parser.add_argument("--output_path", type=str, default=None, required=True, help="Path to output .nemo file.")
parser.add_argument(
diff --git a/scripts/checkpoint_converters/convert_nemo1_to_nemo2.py b/scripts/checkpoint_converters/convert_nemo1_to_nemo2.py
new file mode 100644
index 000000000000..12e56e9f1793
--- /dev/null
+++ b/scripts/checkpoint_converters/convert_nemo1_to_nemo2.py
@@ -0,0 +1,242 @@
+# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+r"""
+Script to convert NeMo 1.0 checkpoints to NeMo 2.0 format.
+Available model listed in MODEL_CONFIG_MAPPING
+Example usage:
+
+a. Convert a .nemo checkpoint
+ python /opt/NeMo/scripts/checkpoint_converters/convert_nemo1_to_nemo2.py \
+ --input_path=Meta-Llama-3-8B.nemo \
+ --output_path=your_output_dir \
+ --model_id=meta-llama/Meta-Llama-3-8B
+
+b. Convert a model weight directory.
+ The checkpoint should be similar to `model_weights` subdir after extracting the .nemo file.
+ Please also provide tokenizer_library and tokenizer_path when loading from weight directory.
+ python /opt/NeMo/scripts/checkpoint_converters/convert_nemo1_to_nemo2.py \
+ --input_path=nemotron3-8b-extracted/model_weights \
+ --tokenizer_path=path_to_your_tokenizer_model.model \
+ --tokenizer_library=sentencepiece \
+ --output_path=your_output_dir \
+ --model_id=nvidia/nemotron-3-8b-base-4k
+
+"""
+
+import os
+import shutil
+import tempfile
+from argparse import ArgumentParser
+from pathlib import Path
+
+import torch
+from megatron.core.dist_checkpointing.dict_utils import dict_list_map_inplace
+from megatron.core.dist_checkpointing.mapping import LocalNonpersistentObject, ShardedObject
+from omegaconf import OmegaConf
+from transformers import AutoTokenizer as HFAutoTokenizer
+
+from nemo.collections import llm
+from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
+from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed
+from nemo.collections.nlp.modules.common.tokenizer_utils import get_nmt_tokenizer
+from nemo.collections.nlp.parts.nlp_overrides import NLPSaveRestoreConnector
+from nemo.lightning import MegatronStrategy, Trainer, _strategy_lib
+from nemo.lightning.ckpt_utils import ckpt_to_context_subdir
+from nemo.lightning.io.pl import TrainerContext, ckpt_to_weights_subdir
+from nemo.utils import logging
+
+MODEL_CONFIG_MAPPING = {
+ "meta-llama/Llama-2-7b-hf": (llm.LlamaModel, llm.Llama2Config7B),
+ "meta-llama/Llama-2-13b-hf": (llm.LlamaModel, llm.Llama2Config13B),
+ "meta-llama/Llama-2-70b-hf": (llm.LlamaModel, llm.Llama2Config70B),
+ "meta-llama/Meta-Llama-3-8B": (llm.LlamaModel, llm.Llama3Config8B),
+ "meta-llama/Meta-Llama-3-70B": (llm.LlamaModel, llm.Llama3Config70B),
+ "mistralai/Mixtral-8x7B-v0.1": (llm.MixtralModel, llm.MixtralConfig8x7B),
+ "mistralai/Mixtral-8x22B-v0.1": (llm.MixtralModel, llm.MixtralConfig8x22B),
+ "mistralai/Mistral-7B-v0.1": (llm.MistralModel, llm.MistralConfig7B),
+ "nvidia/nemotron-3-8b-base-4k": (llm.NemotronModel, llm.Nemotron3Config8B),
+ "nemotron4-22b": (llm.NemotronModel, llm.Nemotron3Config22B),
+ "nemotron4-15b": (llm.NemotronModel, llm.Nemotron4Config15B),
+ "nemotron4-340b": (llm.NemotronModel, llm.Nemotron4Config340B),
+}
+
+
+def get_args():
+ """
+ Parse the command line arguments.
+ """
+ parser = ArgumentParser(
+ description="""Script to convert NeMo 1.0 checkpoints to NeMo 2.0 format.
+ This script may download from Hugging Face, make sure you have
+ access to gate repo and have logged into Hugging Face (e.g. huggingface-cli login)"""
+ )
+ parser.add_argument(
+ "--input_path",
+ type=str,
+ default=None,
+ required=True,
+ help="""Path to NeMo 1.0 checkpoints. Could be .nemo file, or `model_weights` directory a
+ fter untar the .nemo. Please also provide tokenizer_library and tokenizer_path if you pass
+ in `model_weights` directory.""",
+ )
+ parser.add_argument(
+ "--output_path", type=str, default=None, required=True, help="Path to output NeMo 2.0 directory."
+ )
+ parser.add_argument(
+ "--model_id", type=str, default=None, required=True, help="Hugging Face or nemotron model id for the model"
+ )
+ parser.add_argument(
+ "--tokenizer_path",
+ type=str,
+ default=None,
+ required=False,
+ help="""Path to tokenizer. If not provided, will 1. try instantiate from nemo1 config
+ 2. pull AutoTokenizer from Hugging Face according to model_id if 1 fails""",
+ )
+ parser.add_argument(
+ "--tokenizer_library",
+ type=str,
+ default=None,
+ required=False,
+ help="Tokenizer library, e.g. `sentencepiece`, `megatron`. Defaults to `sentencepiece`",
+ )
+ args = parser.parse_args()
+ return args
+
+
+def get_nemo2_model(model_id, tokenizer) -> llm.GPTModel:
+ """
+ Get NeMo 2.0 model class from model_id and tokenizer. Use bf16 for NeMo 1.0 ckpts.
+
+ Returns:
+ llm.GPTModel: NeMo 2.0 model instance
+ """
+
+ if model_id not in MODEL_CONFIG_MAPPING:
+ valid_ids = "\n- ".join([""] + list(MODEL_CONFIG_MAPPING.keys()))
+ raise ValueError(f"Unsupported model_id: {model_id}. Please provide a valid model_id from {valid_ids}")
+ model_cls, config_cls = MODEL_CONFIG_MAPPING[model_id]
+ # nemo1 ckpts are bf16
+ return model_cls(config_cls(bf16=True, params_dtype=torch.bfloat16), tokenizer=tokenizer)
+
+
+def get_tokenizer(input_path: Path, tokenizer_tmp_dir: Path) -> AutoTokenizer:
+ """
+ Get tokenizer from input .nemo file, or args.tokenizer_path, or Hugging Face.
+ Only SentencePiece and Hugging Face tokenizers are supported.
+
+ Returns:
+ AutoTokenizer: tokenizer instance
+ """
+ if not input_path.is_dir(): # if .nemo tar
+ with tempfile.TemporaryDirectory() as tmp_dir: # we want to clean up this tmp dir
+ NLPSaveRestoreConnector._unpack_nemo_file(input_path, tmp_dir)
+ cfg = OmegaConf.load(f"{tmp_dir}/model_config.yaml")
+ tokenizer_lib = cfg.tokenizer.library
+ tokenizer_model = cfg.tokenizer.get("model") and cfg.tokenizer.get("model").split("nemo:", 1)[-1]
+ if tokenizer_model:
+ shutil.copy(f"{tmp_dir}/{tokenizer_model}", f"{tokenizer_tmp_dir}/{tokenizer_model}")
+ elif cfg.tokenizer.library == "huggingface":
+ HFAutoTokenizer.from_pretrained(cfg.tokenizer.type).save_pretrained(tokenizer_tmp_dir)
+ tokenizer_model = f"{tokenizer_tmp_dir}/{tokenizer_model}" if tokenizer_model else None
+ else:
+ if args.tokenizer_path: # not .nemo file, only weight dir need to specify tokenizer lib and path
+ tokenizer_lib = args.tokenizer_library or "sentencepiece"
+ if args.tokenizer_library is None:
+ logging.warning(
+ "You specified tokenizer_path but did not provide tokenizer_library using default sentencepiece"
+ )
+ tokenizer_model = args.tokenizer_path
+ else: # no .nemo config, no tokenizer path specified, grab from HF, reload
+ tokenizer_lib = "huggingface"
+ HFAutoTokenizer.from_pretrained(args.model_id).save_pretrained(tokenizer_tmp_dir)
+
+ if tokenizer_lib == "huggingface":
+ return AutoTokenizer(tokenizer_tmp_dir)
+ else: # not directly use huggingface tokenizer in get_nmt_tokenizer since it will pull from HF and no reload
+ return get_nmt_tokenizer(library=tokenizer_lib, tokenizer_model=tokenizer_model)
+
+
+def main() -> None:
+ """
+ Main function to convert NeMo 1.0 checkpoint to NeMo 2.0 format.
+ """
+ tokenizer_tmp_dir = Path("/tmp/nemo_tokenizer")
+ tokenizer_tmp_dir.mkdir(parents=True, exist_ok=True)
+ tokenizer = get_tokenizer(Path(args.input_path), tokenizer_tmp_dir)
+ model = get_nemo2_model(args.model_id, tokenizer=tokenizer)
+ model.optim = None
+
+ trainer = Trainer(
+ devices=1,
+ accelerator="cpu",
+ strategy=MegatronStrategy(ddp="pytorch", setup_optimizers=False, plugins=bf16_mixed()),
+ )
+
+ trainer.strategy.connect(model)
+ trainer.strategy.setup_environment()
+ if not model.state_dict():
+ with _strategy_lib.megatron_cpu_init_context(model.config):
+ model.configure_model()
+
+ trainer.strategy.setup(trainer)
+
+ logging.info(f"loading checkpoint {args.input_path}")
+
+ sharded_state_dict = {"state_dict": trainer.strategy.megatron_parallel.sharded_state_dict()}
+
+ for key in list(sharded_state_dict['state_dict'].keys()):
+ new_key = key.replace('module', 'model', 1)
+ sharded_state_dict['state_dict'][new_key] = sharded_state_dict['state_dict'].pop(key)
+ sharded_state_dict['state_dict'][new_key].key = sharded_state_dict['state_dict'][new_key].key.replace(
+ 'module', 'model', 1
+ )
+
+ def skip_fp8_load(x):
+ if isinstance(x, ShardedObject) and 'core_attention' in x.key and '_extra_state' in x.key:
+ x = LocalNonpersistentObject(x.data) # use the FP8 state from initialization, not from ckpt
+ return x
+
+ dict_list_map_inplace(skip_fp8_load, sharded_state_dict)
+ if not Path(args.input_path).is_dir():
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ NLPSaveRestoreConnector._unpack_nemo_file(args.input_path, tmp_dir)
+ model_weight_dir = f"{tmp_dir}/model_weights"
+ model_ckpt = trainer.strategy.checkpoint_io.load_checkpoint(model_weight_dir, sharded_state_dict, None)
+ else:
+ model_ckpt = trainer.strategy.checkpoint_io.load_checkpoint(args.input_path, sharded_state_dict, None)
+
+ logging.info(f"Saving checkpoint to {args.output_path}")
+ model_ckpt['state_dict'] = {k.replace('model', 'module', 1): v for k, v in model_ckpt['state_dict'].items()}
+ trainer.model.module.load_state_dict(model_ckpt['state_dict'])
+ trainer.save_checkpoint(ckpt_to_weights_subdir(args.output_path, is_saving=False))
+ if getattr(trainer.strategy, "async_save", False):
+ trainer.strategy.checkpoint_io.maybe_finalize_save_checkpoint(blocking=True)
+
+ # Corresponding to Connector: on_import_ckpt
+ if hasattr(trainer.model, "__io__") and hasattr(trainer.model.tokenizer, '__io__'):
+ trainer.model.__io__.tokenizer = trainer.model.tokenizer.__io__
+ TrainerContext.from_trainer(trainer).io_dump(ckpt_to_context_subdir(args.output_path), yaml_attrs=["model"])
+
+ # remove tmp dir
+ if os.path.isdir(tokenizer_tmp_dir):
+ shutil.rmtree(tokenizer_tmp_dir)
+
+ logging.info(f"NeMo 2.0 checkpoint saved at {args.output_path}")
+
+
+if __name__ == '__main__':
+ args = get_args()
+ main()
diff --git a/scripts/checkpoint_converters/convert_nemotron_nemo_to_hf.py b/scripts/checkpoint_converters/convert_nemotron_nemo_to_hf.py
index fc0f660cbd42..2f66773f8724 100644
--- a/scripts/checkpoint_converters/convert_nemotron_nemo_to_hf.py
+++ b/scripts/checkpoint_converters/convert_nemotron_nemo_to_hf.py
@@ -19,9 +19,9 @@
from collections import OrderedDict
import torch
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from transformers import LlamaTokenizer, PreTrainedTokenizerFast
-from transformers.convert_slow_tokenizer import LlamaConverter
+from transformers.convert_slow_tokenizer import LlamaConverter, TikTokenConverter
from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
@@ -130,6 +130,20 @@ def convert_hf_config(nemo_config, tokenizer, vocab_size, dtype, hf_output_path,
json.dump(hf_config, open(f"{hf_output_path}/config.json", "w"), indent=2)
+def convert_tiktoken(vocab_file) -> None:
+ with open(vocab_file, 'r') as f:
+ vocab = json.load(f)
+ os.remove(vocab_file)
+
+ lines = []
+ for line in vocab:
+ lines.append(f"{line['token_bytes']} {line['rank']}")
+
+ for line in lines:
+ with open(vocab_file, 'a') as f:
+ f.write(line + '\n')
+
+
def convert(input_nemo_file, output_hf_file, precision=None, cpu_only=False) -> None:
"""
Convert NeMo weights to HF weights
@@ -323,6 +337,28 @@ def extract_nemotron_tokenizer(nemo_file, model_config, output_hf_path, nemo_tok
)
tokenizer.save_pretrained(output_hf_path)
logging.info(f"Setencepiece tokenizer has been saved to {output_tokenizer}")
+ elif tokenizer_cfg.library == "tiktoken":
+ tokenizer_fn = tokenizer_cfg.model[5:]
+ special_tokens = ["", "", ""]
+ import tarfile
+
+ archive = tarfile.open(nemo_file, "r")
+ tokenizer_filename = "./" + tokenizer_fn # exclude 'nemo:' prefix
+ archive.extract(tokenizer_filename, output_hf_path)
+ archive.close()
+ vocab_file = os.path.join(output_hf_path, tokenizer_fn)
+ convert_tiktoken(vocab_file)
+ converted_tokenizer = TikTokenConverter(
+ vocab_file=vocab_file, additional_special_tokens=special_tokens
+ ).converted()
+ os.remove(vocab_file)
+ tokenizer = PreTrainedTokenizerFast(
+ tokenizer_object=converted_tokenizer,
+ model_input_names=["input_ids", "attention_mask"],
+ bos_token="",
+ eos_token="",
+ )
+ tokenizer.save_pretrained(output_hf_path)
elif isinstance(nemo_tokenizer, AutoTokenizer):
nemo_tokenizer.tokenizer.save_pretrained(output_hf_path)
logging.info(f"HF AutoTokenizer has been saved to {output_hf_path}")
diff --git a/scripts/checkpoint_converters/convert_qwen2_hf_to_nemo.py b/scripts/checkpoint_converters/convert_qwen2_hf_to_nemo.py
index a29a58557c0c..565220efc515 100644
--- a/scripts/checkpoint_converters/convert_qwen2_hf_to_nemo.py
+++ b/scripts/checkpoint_converters/convert_qwen2_hf_to_nemo.py
@@ -25,8 +25,8 @@
from collections import OrderedDict
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.trainer.trainer import Trainer
from transformers import Qwen2ForCausalLM, Qwen2Tokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/scripts/checkpoint_converters/convert_qwen2_nemo_to_hf.py b/scripts/checkpoint_converters/convert_qwen2_nemo_to_hf.py
index 6080499ffdf8..968caade917c 100644
--- a/scripts/checkpoint_converters/convert_qwen2_nemo_to_hf.py
+++ b/scripts/checkpoint_converters/convert_qwen2_nemo_to_hf.py
@@ -17,7 +17,7 @@
from collections import OrderedDict
import torch
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from transformers import Qwen2ForCausalLM, Qwen2Tokenizer, Qwen2TokenizerFast, convert_slow_tokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
@@ -142,7 +142,7 @@ def convert(input_nemo_file, output_hf_file, precision=None, cpu_only=False) ->
ffn_hidden_size = model.cfg.ffn_hidden_size
num_query_groups = model.cfg.get("num_query_groups", head_num)
- head_size = hidden_size // head_num
+ head_size = model.cfg.get("kv_channels") or (hidden_size // head_num) # equivalent to hf's head_dim
heads_per_group = head_num // num_query_groups
qkv_total_dim = head_num + 2 * num_query_groups
diff --git a/scripts/checkpoint_converters/convert_starcoder2_hf_to_nemo.py b/scripts/checkpoint_converters/convert_starcoder2_hf_to_nemo.py
index fc898c797a9e..862777cf52a8 100644
--- a/scripts/checkpoint_converters/convert_starcoder2_hf_to_nemo.py
+++ b/scripts/checkpoint_converters/convert_starcoder2_hf_to_nemo.py
@@ -28,9 +28,9 @@
import torch
import torch.nn
+from lightning.pytorch.core.saving import _load_state as ptl_load_state
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning.core.saving import _load_state as ptl_load_state
-from pytorch_lightning.trainer.trainer import Trainer
from transformers import AutoModelForCausalLM, AutoTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
@@ -168,7 +168,7 @@ def convert(args):
scaler = None
if precision in [16, '16', '16-mixed']:
scaler = GradScaler(
- init_scale=nemo_config.get('native_amp_init_scale', 2 ** 32),
+ init_scale=nemo_config.get('native_amp_init_scale', 2**32),
growth_interval=nemo_config.get('native_amp_growth_interval', 1000),
hysteresis=nemo_config.get('hysteresis', 2),
)
diff --git a/scripts/checkpoint_converters/convert_starcoder2_nemo_to_hf.py b/scripts/checkpoint_converters/convert_starcoder2_nemo_to_hf.py
index 4b65533b74ec..c418a714be0a 100644
--- a/scripts/checkpoint_converters/convert_starcoder2_nemo_to_hf.py
+++ b/scripts/checkpoint_converters/convert_starcoder2_nemo_to_hf.py
@@ -25,7 +25,7 @@
import torch
import torch.nn
-from pytorch_lightning.trainer.trainer import Trainer
+from lightning.pytorch.trainer.trainer import Trainer
from transformers import AutoConfig, AutoModelForCausalLM, AutoTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
@@ -141,7 +141,7 @@ def convert(in_file, precision=None, cpu_only=True) -> None:
num_layers = model.cfg.num_layers
num_query_groups = model.cfg.get("num_query_groups", head_num) # different num_query_groups for 70B
- head_size = hidden_size // head_num
+ head_size = model.cfg.get("kv_channels") or (hidden_size // head_num) # equivalent to hf's head_dim
heads_per_group = head_num // num_query_groups
qkv_total_dim = head_num + 2 * num_query_groups
diff --git a/scripts/checkpoint_converters/convert_starcoder_hf_to_nemo.py b/scripts/checkpoint_converters/convert_starcoder_hf_to_nemo.py
index e600c65e6de1..6b9f30ab427b 100644
--- a/scripts/checkpoint_converters/convert_starcoder_hf_to_nemo.py
+++ b/scripts/checkpoint_converters/convert_starcoder_hf_to_nemo.py
@@ -52,7 +52,7 @@
import os
from typing import Dict
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
import yaml
from omegaconf import OmegaConf
diff --git a/scripts/checkpoint_converters/quantize_model_to_nf4.py b/scripts/checkpoint_converters/quantize_model_to_nf4.py
index db3a48aaa16d..8fbaeb875f7a 100644
--- a/scripts/checkpoint_converters/quantize_model_to_nf4.py
+++ b/scripts/checkpoint_converters/quantize_model_to_nf4.py
@@ -16,7 +16,7 @@
from typing import List
import torch
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from torch import nn
from nemo.collections.nlp.models.language_modeling.megatron_gpt_sft_model import MegatronGPTSFTModel
diff --git a/scripts/confidence_ensembles/build_ensemble.py b/scripts/confidence_ensembles/build_ensemble.py
index 4c05e2e4ff3f..dfb3793b42f4 100644
--- a/scripts/confidence_ensembles/build_ensemble.py
+++ b/scripts/confidence_ensembles/build_ensemble.py
@@ -80,8 +80,8 @@
from typing import Dict, List, Optional, Tuple
import joblib
+import lightning.pytorch as pl
import numpy as np
-import pytorch_lightning as pl
from omegaconf import MISSING, DictConfig, OmegaConf
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import confusion_matrix
@@ -215,7 +215,12 @@ class BuildEnsembleConfig:
preserve_frame_confidence=True,
exclude_blank=True,
aggregation="mean",
- method_cfg=ConfidenceMethodConfig(name="entropy", entropy_type="renyi", alpha=0.25, entropy_norm="lin",),
+ method_cfg=ConfidenceMethodConfig(
+ name="entropy",
+ entropy_type="renyi",
+ alpha=0.25,
+ entropy_norm="lin",
+ ),
)
)
temperature: float = 1.0
@@ -499,7 +504,12 @@ def find_best_confidence(
dev_features = np.array(list(zip(*cur_dev_confidences)))
dev_labels = np.array(dev_labels)
pipe, score = train_model_selection(
- training_features, training_labels, dev_features, dev_labels, tune_lr, tune_lr_config,
+ training_features,
+ training_labels,
+ dev_features,
+ dev_labels,
+ tune_lr,
+ tune_lr_config,
)
if max_score < score:
max_score = score
@@ -513,7 +523,7 @@ def find_best_confidence(
@hydra_runner(config_name="BuildEnsembleConfig", schema=BuildEnsembleConfig)
def main(cfg: BuildEnsembleConfig):
# silencing all messages from nemo/ptl to avoid dumping tons of configs to the stdout
- logging.getLogger('pytorch_lightning').setLevel(logging.CRITICAL)
+ logging.getLogger('lightning.pytorch').setLevel(logging.CRITICAL)
logging.getLogger('nemo_logger').setLevel(logging.CRITICAL)
LOG.info(f'Build ensemble config:\n{OmegaConf.to_yaml(cfg)}')
diff --git a/scripts/deploy/nlp/deploy_triton.py b/scripts/deploy/nlp/deploy_triton.py
index e3394726fa1c..154ffc90dc9c 100755
--- a/scripts/deploy/nlp/deploy_triton.py
+++ b/scripts/deploy/nlp/deploy_triton.py
@@ -419,13 +419,14 @@ def nemo_deploy(argv):
LOGGER.info("Triton deploy function will be called.")
nm.deploy()
+ nm.run()
except Exception as error:
LOGGER.error("Error message has occurred during deploy function. Error message: " + str(error))
return
try:
LOGGER.info("Model serving on Triton is will be started.")
- if args.start_rest_service == "True":
+ if args.start_rest_service:
try:
LOGGER.info("REST service will be started.")
uvicorn.run(
diff --git a/scripts/deploy/nlp/deploy_vllm_triton.py b/scripts/deploy/nlp/deploy_vllm_triton.py
index ab9f13a1b8da..a3cf5e8ec762 100755
--- a/scripts/deploy/nlp/deploy_vllm_triton.py
+++ b/scripts/deploy/nlp/deploy_vllm_triton.py
@@ -41,7 +41,7 @@ def get_args(argv):
"-mt",
"--model_type",
type=str,
- required=False,
+ required=True,
choices=["llama", "mistral", "mixtral", "starcoder2", "gemma"],
help="Type of the model",
)
diff --git a/scripts/diffusion_model_lora_merge/merge_lora_weights_into_base_model.py b/scripts/diffusion_model_lora_merge/merge_lora_weights_into_base_model.py
index 57d9964cad3d..a80d9d2639e3 100644
--- a/scripts/diffusion_model_lora_merge/merge_lora_weights_into_base_model.py
+++ b/scripts/diffusion_model_lora_merge/merge_lora_weights_into_base_model.py
@@ -16,7 +16,7 @@
from typing import Any, Dict
import torch
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from nemo.collections.multimodal.models.text_to_image.stable_diffusion.ldm.ddpm import MegatronLatentDiffusion
from nemo.collections.multimodal.parts.utils import setup_trainer_and_model_for_inference
diff --git a/scripts/export.py b/scripts/export.py
index acfd3e3e3450..6e0b9b72e15b 100644
--- a/scripts/export.py
+++ b/scripts/export.py
@@ -30,8 +30,8 @@
import sys
import torch
+from lightning.pytorch import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning import Trainer
import nemo
from nemo.core import ModelPT
diff --git a/scripts/export/export_to_trt_llm.py b/scripts/export/export_to_trt_llm.py
index 6b246131b69e..d9e846547c68 100644
--- a/scripts/export/export_to_trt_llm.py
+++ b/scripts/export/export_to_trt_llm.py
@@ -44,7 +44,7 @@ def get_args(argv):
parser.add_argument(
"-mr", "--model_repository", required=True, default=None, type=str, help="Folder for the trt-llm model files"
)
- parser.add_argument("-ng", "--num_gpus", default=1, type=int, help="Number of GPUs for the deployment")
+ parser.add_argument("-ng", "--num_gpus", default=None, type=int, help="Number of GPUs for the deployment")
parser.add_argument("-tps", "--tensor_parallelism_size", default=1, type=int, help="Tensor parallelism size")
parser.add_argument("-pps", "--pipeline_parallelism_size", default=1, type=int, help="Pipeline parallelism size")
parser.add_argument(
@@ -64,7 +64,14 @@ def get_args(argv):
"-mpet", "--max_prompt_embedding_table_size", default=None, type=int, help="Max prompt embedding table size"
)
parser.add_argument(
- "-npkc", "--no_paged_kv_cache", default=False, action='store_true', help="Enable paged kv cache."
+ "-upe",
+ "--use_parallel_embedding",
+ default=False,
+ action='store_true',
+ help="Use parallel embedding.",
+ )
+ parser.add_argument(
+ "-npkc", "--no_paged_kv_cache", default=False, action='store_true', help="Disable paged kv cache."
)
parser.add_argument(
"-drip",
@@ -183,6 +190,7 @@ def nemo_export_trt_llm(argv):
max_num_tokens=args.max_num_tokens,
opt_num_tokens=args.opt_num_tokens,
max_prompt_embedding_table_size=args.max_prompt_embedding_table_size,
+ use_parallel_embedding=args.use_parallel_embedding,
paged_kv_cache=(not args.no_paged_kv_cache),
remove_input_padding=(not args.disable_remove_input_padding),
dtype=args.dtype,
@@ -191,6 +199,7 @@ def nemo_export_trt_llm(argv):
max_lora_rank=args.max_lora_rank,
fp8_quantized=args.export_fp8_quantized,
fp8_kvcache=args.use_fp8_kv_cache,
+ load_model=False,
)
LOGGER.info("Export is successful.")
diff --git a/scripts/llm/ptq.py b/scripts/llm/ptq.py
index 0fd2c5682e8a..c04d32290e5f 100644
--- a/scripts/llm/ptq.py
+++ b/scripts/llm/ptq.py
@@ -92,7 +92,7 @@ def main():
quantizer = quantization.Quantizer(quantization_config, export_config)
model = quantization.load_with_modelopt_layer_spec(args.nemo_checkpoint, args.calib_tp, args.calib_pp)
model = quantizer.quantize(model)
- quantizer.export(model)
+ quantizer.export(model, args.nemo_checkpoint)
if __name__ == '__main__':
diff --git a/scripts/nemo_legacy_import/nlp_checkpoint_port.py b/scripts/nemo_legacy_import/nlp_checkpoint_port.py
index b7541ffdb8cd..058f9e072f5f 100644
--- a/scripts/nemo_legacy_import/nlp_checkpoint_port.py
+++ b/scripts/nemo_legacy_import/nlp_checkpoint_port.py
@@ -30,7 +30,7 @@
import logging
import sys
-import pytorch_lightning as pl
+import lightning.pytorch as pl
from omegaconf import OmegaConf, open_dict
from nemo.collections.nlp.parts.nlp_overrides import NLPSaveRestoreConnector
diff --git a/scripts/nlp_language_modeling/convert_prompt_learning_ckpt_to_nemo.py b/scripts/nlp_language_modeling/convert_prompt_learning_ckpt_to_nemo.py
index 334b3415a93b..3e96186552a5 100644
--- a/scripts/nlp_language_modeling/convert_prompt_learning_ckpt_to_nemo.py
+++ b/scripts/nlp_language_modeling/convert_prompt_learning_ckpt_to_nemo.py
@@ -14,7 +14,7 @@
import os
-from pytorch_lightning.trainer.trainer import Trainer
+from lightning.pytorch.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_prompt_learning_model import (
MegatronGPTPromptLearningModel,
diff --git a/scripts/nlp_language_modeling/hf_t5-v1_1_to_nemo.py b/scripts/nlp_language_modeling/hf_t5-v1_1_to_nemo.py
index 6a94e8f501bb..2361e000ef7e 100644
--- a/scripts/nlp_language_modeling/hf_t5-v1_1_to_nemo.py
+++ b/scripts/nlp_language_modeling/hf_t5-v1_1_to_nemo.py
@@ -53,8 +53,8 @@
from argparse import ArgumentParser
import torch
+from lightning.pytorch import Trainer
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
from transformers import AutoTokenizer, T5ForConditionalGeneration
from nemo.collections.nlp.models.language_modeling.megatron_t5_model import MegatronT5Model
diff --git a/scripts/nlp_language_modeling/merge_lora_weights/merge.py b/scripts/nlp_language_modeling/merge_lora_weights/merge.py
index 55d50502705c..3a6d110997ba 100644
--- a/scripts/nlp_language_modeling/merge_lora_weights/merge.py
+++ b/scripts/nlp_language_modeling/merge_lora_weights/merge.py
@@ -33,8 +33,8 @@
from typing import Any, Dict, List
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf, open_dict
-from pytorch_lightning.trainer.trainer import Trainer
from torch.utils.data import DataLoader, Dataset
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/scripts/nlp_language_modeling/service_launch_scripts/start_retro_model_service.py b/scripts/nlp_language_modeling/service_launch_scripts/start_retro_model_service.py
index ee32f69bf734..dd7c1a3656be 100644
--- a/scripts/nlp_language_modeling/service_launch_scripts/start_retro_model_service.py
+++ b/scripts/nlp_language_modeling/service_launch_scripts/start_retro_model_service.py
@@ -15,8 +15,8 @@
import os
import torch
+from lightning.pytorch import Trainer
from omegaconf.omegaconf import OmegaConf, open_dict
-from pytorch_lightning import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_retrieval_model import MegatronRetrievalModel
from nemo.collections.nlp.modules.common.text_generation_server import MegatronServer
@@ -66,7 +66,10 @@ def main(cfg) -> None:
save_restore_connector.model_extracted_dir = model_path
model_cfg = MegatronRetrievalModel.restore_from(
- model_path, trainer=trainer, return_config=True, save_restore_connector=save_restore_connector,
+ model_path,
+ trainer=trainer,
+ return_config=True,
+ save_restore_connector=save_restore_connector,
)
with open_dict(model_cfg):
@@ -76,7 +79,10 @@ def main(cfg) -> None:
model_cfg.activations_checkpoint_method = None
model = MegatronRetrievalModel.restore_from(
- model_path, trainer=trainer, save_restore_connector=save_restore_connector, override_config_path=model_cfg,
+ model_path,
+ trainer=trainer,
+ save_restore_connector=save_restore_connector,
+ override_config_path=model_cfg,
)
# check whether the DDP is initialized
diff --git a/scripts/speech_recognition/confidence/benchmark_asr_confidence.py b/scripts/speech_recognition/confidence/benchmark_asr_confidence.py
index 9c42ef6cca5b..7208867ff938 100644
--- a/scripts/speech_recognition/confidence/benchmark_asr_confidence.py
+++ b/scripts/speech_recognition/confidence/benchmark_asr_confidence.py
@@ -18,7 +18,7 @@
from pathlib import Path
from typing import Optional
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from omegaconf import MISSING, OmegaConf
from sklearn.model_selection import ParameterGrid
diff --git a/scripts/speech_recognition/oomptimizer.py b/scripts/speech_recognition/oomptimizer.py
index 3d5eb5a4dbb1..8d215cbc14eb 100755
--- a/scripts/speech_recognition/oomptimizer.py
+++ b/scripts/speech_recognition/oomptimizer.py
@@ -20,7 +20,7 @@
from typing import Iterable, Literal
import click
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from lhotse import compute_num_samples
from omegaconf import OmegaConf
diff --git a/tests/collections/asr/confidence/test_asr_confidence.py b/tests/collections/asr/confidence/test_asr_confidence.py
index 015264a9debe..89beb61f50bf 100644
--- a/tests/collections/asr/confidence/test_asr_confidence.py
+++ b/tests/collections/asr/confidence/test_asr_confidence.py
@@ -19,8 +19,8 @@
import numpy as np
import pytest
+from lightning.pytorch import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning import Trainer
from nemo.collections.asr.models import ASRModel, EncDecCTCModelBPE, EncDecRNNTBPEModel
from nemo.collections.asr.parts.submodules.ctc_decoding import CTCDecodingConfig
diff --git a/tests/collections/asr/conftest.py b/tests/collections/asr/conftest.py
index dba29f949fb0..a9bc13153164 100644
--- a/tests/collections/asr/conftest.py
+++ b/tests/collections/asr/conftest.py
@@ -19,6 +19,8 @@
import pytest
import torch
+from nemo.collections.asr.models import ASRModel
+
class RNNTTestHelper:
@staticmethod
@@ -353,3 +355,18 @@ def rnnt_test_helper() -> Type[RNNTTestHelper]:
@pytest.fixture(scope="session")
def rnn_loss_sample_data() -> Type[RnntLossSampleData]:
return RnntLossSampleData
+
+
+@pytest.fixture(scope='session')
+def fast_conformer_transducer_model():
+ return ASRModel.from_pretrained("stt_en_fastconformer_transducer_large")
+
+
+@pytest.fixture(scope='session')
+def fast_conformer_ctc_model():
+ return ASRModel.from_pretrained("stt_en_fastconformer_ctc_large")
+
+
+@pytest.fixture(scope='session')
+def fast_conformer_hybrid_model():
+ return ASRModel.from_pretrained("parakeet-tdt_ctc-110m")
diff --git a/tests/collections/asr/decoding/test_rnnt_decoding.py b/tests/collections/asr/decoding/test_rnnt_decoding.py
index 82b5d00bede6..b5250ad5f144 100644
--- a/tests/collections/asr/decoding/test_rnnt_decoding.py
+++ b/tests/collections/asr/decoding/test_rnnt_decoding.py
@@ -22,8 +22,9 @@
from nemo.collections.asr.models import ASRModel
from nemo.collections.asr.modules import RNNTDecoder, RNNTJoint
from nemo.collections.asr.parts.mixins import mixins
-from nemo.collections.asr.parts.submodules import rnnt_beam_decoding as beam_decode
+from nemo.collections.asr.parts.submodules import rnnt_beam_decoding
from nemo.collections.asr.parts.submodules import rnnt_greedy_decoding as greedy_decode
+from nemo.collections.asr.parts.submodules import tdt_beam_decoding
from nemo.collections.asr.parts.submodules.rnnt_decoding import RNNTBPEDecoding, RNNTDecoding, RNNTDecodingConfig
from nemo.collections.asr.parts.utils import rnnt_utils
from nemo.core.utils import numba_utils
@@ -166,6 +167,39 @@ def check_subword_timestamps(hyp: rnnt_utils.Hypothesis, decoding: RNNTBPEDecodi
assert len(hyp.timestep['segment']) == segments_count
+def check_beam_decoding(test_data_dir, beam_config):
+ beam_size = beam_config.pop("beam_size", 1)
+ model, encoded, encoded_len = get_model_encoder_output(test_data_dir, 'nvidia/parakeet-tdt_ctc-110m')
+
+ model_config = model.to_config_dict()
+ durations = list(model_config["model_defaults"]["tdt_durations"])
+
+ beam = tdt_beam_decoding.BeamTDTInfer(
+ model.decoder,
+ model.joint,
+ beam_size=beam_size,
+ return_best_hypothesis=False,
+ durations=durations,
+ **beam_config,
+ )
+
+ enc_out = encoded
+ enc_len = encoded_len
+
+ with torch.no_grad():
+ hyps: rnnt_utils.Hypothesis = beam(encoder_output=enc_out, encoded_lengths=enc_len)[0]
+ _, all_hyps = decode_text_from_nbest_hypotheses(hyps, model.decoding)
+ all_hyps = all_hyps[0]
+
+ print("Beam search algorithm :", beam_config['search_type'])
+ for idx, hyp_ in enumerate(all_hyps):
+ print("Hyp index", idx + 1, "text :", hyp_.text)
+
+ assert len(hyp_.timestep) > 0
+ print("Timesteps", hyp_.timestep)
+ print()
+
+
class TestRNNTDecoding:
@pytest.mark.unit
def test_constructor(self):
@@ -312,10 +346,10 @@ def test_batched_greedy_decoding_preserve_alignments(self, test_data_dir, loop_l
{"search_type": "maes", "maes_num_steps": 3, "maes_expansion_beta": 1, "beam_size": 2},
],
)
- def test_beam_decoding_preserve_alignments(self, test_data_dir, beam_config):
+ def test_rnnt_beam_decoding_preserve_alignments(self, test_data_dir, beam_config):
beam_size = beam_config.pop("beam_size", 1)
model, encoded, encoded_len = get_model_encoder_output(test_data_dir, 'stt_en_conformer_transducer_small')
- beam = beam_decode.BeamRNNTInfer(
+ beam = rnnt_beam_decoding.BeamRNNTInfer(
model.decoder,
model.joint,
beam_size=beam_size,
@@ -442,3 +476,51 @@ def test_char_decoding_compute_timestamps(self, test_data_dir, decoding_strategy
hyps, _ = decoding.rnnt_decoder_predictions_tensor(encoded, encoded_len, return_hypotheses=True)
check_char_timestamps(hyps[0], decoding)
+
+ @pytest.mark.skipif(
+ not NUMBA_RNNT_LOSS_AVAILABLE,
+ reason='RNNTLoss has not been compiled with appropriate numba version.',
+ )
+ @pytest.mark.with_downloads
+ @pytest.mark.unit
+ @pytest.mark.parametrize(
+ "beam_config",
+ [
+ {
+ "search_type": "default",
+ "beam_size": 2,
+ },
+ {"search_type": "maes", "maes_num_steps": 2, "maes_expansion_beta": 2, "beam_size": 2},
+ {"search_type": "maes", "maes_num_steps": 2, "maes_expansion_beta": 1, "beam_size": 4},
+ ],
+ )
+ def test_tdt_beam_decoding(self, test_data_dir, beam_config):
+ check_beam_decoding(test_data_dir, beam_config)
+
+ @pytest.mark.skipif(
+ not NUMBA_RNNT_LOSS_AVAILABLE,
+ reason='RNNTLoss has not been compiled with appropriate numba version.',
+ )
+ @pytest.mark.with_downloads
+ @pytest.mark.unit
+ @pytest.mark.parametrize(
+ "beam_config",
+ [
+ {
+ "search_type": "maes",
+ "maes_num_steps": 2,
+ "maes_expansion_beta": 1,
+ "beam_size": 4,
+ "ngram_lm_alpha": 0.3,
+ },
+ ],
+ )
+ def test_tdt_beam_decoding_with_kenlm(self, test_data_dir, beam_config):
+ # skipping if kenlm is not installed
+ pytest.importorskip("kenlm", reason="Skipping test because 'kenlm' is not installed.")
+
+ kenlm_model_path = os.path.join(
+ test_data_dir, "asr", "kenlm_ngram_lm", "parakeet-tdt_ctc-110m-libri-1024.kenlm.tmp.arpa"
+ )
+ beam_config["ngram_lm_model"] = kenlm_model_path
+ check_beam_decoding(test_data_dir, beam_config)
diff --git a/tests/collections/asr/mixins/test_transcription.py b/tests/collections/asr/mixins/test_transcription.py
index 1a6f38681d0c..6e2d5fe16c68 100644
--- a/tests/collections/asr/mixins/test_transcription.py
+++ b/tests/collections/asr/mixins/test_transcription.py
@@ -23,7 +23,6 @@
from torch.utils.data import DataLoader, Dataset
from nemo.collections.asr.data.audio_to_text import _speech_collate_fn
-from nemo.collections.asr.models import ASRModel
from nemo.collections.asr.parts.mixins import TranscribeConfig, TranscriptionMixin
from nemo.collections.asr.parts.mixins.transcription import GenericTranscriptionType
from nemo.collections.asr.parts.utils import Hypothesis
@@ -44,6 +43,23 @@ def forward(self, x):
return out
+@pytest.mark.with_downloads()
+@pytest.fixture()
+def audio_files(test_data_dir):
+ """
+ Returns a list of audio files for testing.
+ """
+ import soundfile as sf
+
+ audio_file1 = os.path.join(test_data_dir, "asr", "train", "an4", "wav", "an46-mmap-b.wav")
+ audio_file2 = os.path.join(test_data_dir, "asr", "train", "an4", "wav", "an104-mrcb-b.wav")
+
+ audio1, _ = sf.read(audio_file1, dtype='float32')
+ audio2, _ = sf.read(audio_file2, dtype='float32')
+
+ return audio1, audio2
+
+
class TranscribableDummy(DummyModel, TranscriptionMixin):
def _transcribe_on_begin(self, audio, trcfg: TranscribeConfig):
super()._transcribe_on_begin(audio, trcfg)
@@ -297,12 +313,11 @@ class OverrideConfig(TranscribeConfig):
pytest.mark.with_downloads()
@pytest.mark.unit
- def test_transcribe_return_hypothesis(self, test_data_dir):
- model = ASRModel.from_pretrained("stt_en_conformer_ctc_small")
+ def test_transcribe_return_hypothesis(self, test_data_dir, fast_conformer_ctc_model):
audio_file = os.path.join(test_data_dir, "asr", "train", "an4", "wav", "an46-mmap-b.wav")
- # Numpy array test
- outputs = model.transcribe(audio_file, batch_size=1, return_hypotheses=True)
+ # Audio file test
+ outputs = fast_conformer_ctc_model.transcribe(audio_file, batch_size=1, return_hypotheses=True)
assert len(outputs) == 1
assert isinstance(outputs[0], Hypothesis)
@@ -313,62 +328,82 @@ def test_transcribe_return_hypothesis(self, test_data_dir):
@pytest.mark.with_downloads()
@pytest.mark.unit
- def test_transcribe_tensor(self, test_data_dir):
- model = ASRModel.from_pretrained("stt_en_conformer_ctc_small")
-
- # Load audio file
- import soundfile as sf
-
- audio_file = os.path.join(test_data_dir, "asr", "train", "an4", "wav", "an46-mmap-b.wav")
- audio, sr = sf.read(audio_file, dtype='float32')
+ def test_transcribe_tensor(self, audio_files, fast_conformer_ctc_model):
+ audio, _ = audio_files
# Numpy array test
- outputs = model.transcribe(audio, batch_size=1)
+ outputs = fast_conformer_ctc_model.transcribe(audio, batch_size=1)
assert len(outputs) == 1
assert isinstance(outputs[0], str)
@pytest.mark.with_downloads()
@pytest.mark.unit
- def test_transcribe_multiple_tensor(self, test_data_dir):
- model = ASRModel.from_pretrained("stt_en_conformer_ctc_small")
-
- # Load audio file
- import soundfile as sf
-
- audio_file = os.path.join(test_data_dir, "asr", "train", "an4", "wav", "an46-mmap-b.wav")
- audio, sr = sf.read(audio_file, dtype='float32')
+ def test_transcribe_multiple_tensor(self, audio_files, fast_conformer_ctc_model):
- audio_file_2 = os.path.join(test_data_dir, "asr", "train", "an4", "wav", "an104-mrcb-b.wav")
- audio_2, sr = sf.read(audio_file_2, dtype='float32')
+ audio, audio_2 = audio_files
# Mix second audio to torch.tensor()
audio_2 = torch.tensor(audio_2)
# Numpy array test
- outputs = model.transcribe([audio, audio_2], batch_size=2)
+ outputs = fast_conformer_ctc_model.transcribe([audio, audio_2], batch_size=2)
assert len(outputs) == 2
assert isinstance(outputs[0], str)
assert isinstance(outputs[1], str)
@pytest.mark.with_downloads()
@pytest.mark.unit
- def test_transcribe_dataloader(self, test_data_dir):
- model = ASRModel.from_pretrained("stt_en_conformer_ctc_small")
-
- # Load audio file
- import soundfile as sf
-
- audio_file = os.path.join(test_data_dir, "asr", "train", "an4", "wav", "an46-mmap-b.wav")
- audio, sr = sf.read(audio_file, dtype='float32')
+ def test_transcribe_dataloader(self, audio_files, fast_conformer_ctc_model):
- audio_file2 = os.path.join(test_data_dir, "asr", "train", "an4", "wav", "an152-mwhw-b.wav")
- audio2, sr = sf.read(audio_file2, dtype='float32')
+ audio, audio2 = audio_files
dataset = DummyDataset([audio, audio2])
collate_fn = lambda x: _speech_collate_fn(x, pad_id=0)
dataloader = DataLoader(dataset, batch_size=2, shuffle=False, num_workers=0, collate_fn=collate_fn)
# DataLoader test
- outputs = model.transcribe(dataloader, batch_size=1)
+ outputs = fast_conformer_ctc_model.transcribe(dataloader, batch_size=1)
assert len(outputs) == 2
assert isinstance(outputs[0], str)
assert isinstance(outputs[1], str)
+
+ @pytest.mark.with_downloads()
+ @pytest.mark.unit
+ def test_timestamps_with_transcribe(self, audio_files, fast_conformer_ctc_model):
+ audio1, audio2 = audio_files
+
+ output = fast_conformer_ctc_model.transcribe([audio1, audio2], timestamps=True)
+
+ # check len of output
+ assert len(output) == 2
+
+ # check hypothesis object
+ assert isinstance(output[0], Hypothesis)
+ # check transcript
+ assert output[0].text == 'stop'
+ assert output[1].text == 'start'
+
+ # check timestamp
+ assert output[0].timestep['segment'][0]['start'] == pytest.approx(0.4)
+ assert output[0].timestep['segment'][0]['end'] == pytest.approx(0.48)
+
+ @pytest.mark.with_downloads()
+ @pytest.mark.unit
+ def test_timestamps_with_transcribe_hybrid(self, audio_files, fast_conformer_hybrid_model):
+ audio1, audio2 = audio_files
+
+ output = fast_conformer_hybrid_model.transcribe([audio1, audio2], timestamps=True)
+
+ # check len of output
+ assert len(output) == 2
+
+ output = output[1] # Transducer returns tuple
+
+ # check hypothesis object
+ assert isinstance(output[0], Hypothesis)
+ # check transcript
+ assert output[0].text == 'Stop?'
+ assert output[1].text == 'Start.'
+
+ # check timestamp
+ assert output[0].timestep['segment'][0]['start'] == pytest.approx(0.48)
+ assert output[0].timestep['segment'][0]['end'] == pytest.approx(0.72)
diff --git a/tests/collections/asr/numba/rnnt_loss/test_rnnt_pytorch.py b/tests/collections/asr/numba/rnnt_loss/test_rnnt_pytorch.py
index 1a29a14f540d..9f38bf6dbe8a 100644
--- a/tests/collections/asr/numba/rnnt_loss/test_rnnt_pytorch.py
+++ b/tests/collections/asr/numba/rnnt_loss/test_rnnt_pytorch.py
@@ -33,6 +33,7 @@
if torch.cuda.is_available():
DEVICES.append('cuda')
+CUDA_ONLY_DEVICE = ['cuda']
DTYPES = [np.float32]
if numba_utils.is_numba_cuda_fp16_supported():
@@ -542,65 +543,86 @@ def test_case_randomized_act_label(self, device):
class TestTDTLoss:
@pytest.mark.unit
- @pytest.mark.parametrize('device', DEVICES)
+ @pytest.mark.parametrize('device', CUDA_ONLY_DEVICE)
def test_case_randomized_act_label(self, device):
- if device == 'cuda':
- numba_utils.skip_numba_cuda_test_if_unsupported(__NUMBA_MINIMUM_VERSION__)
+ numba_utils.skip_numba_cuda_test_if_unsupported(__NUMBA_MINIMUM_VERSION__)
- B, T, U, V = 4, 8, 4, 8 # here V is number of non blank labels
- durations = [0, 1, 2, 3, 4, 5]
- sigma = 0.05
+ B, T, U, V = 4, 8, 4, 8 # here V is number of non blank labels
+ durations = [0, 1, 2, 3, 4, 5]
+ sigma = 0.05
- acts = torch.rand([B, T, U, V + 1 + len(durations)])
- labels = [[random.randrange(0, V) for i in range(U - 1)] for j in range(B)]
+ acts = torch.rand([B, T, U, V + 1 + len(durations)])
+ labels = [[random.randrange(0, V) for i in range(U - 1)] for j in range(B)]
- fn_pt = TDTLossNumba(blank=V, reduction='sum', durations=durations, sigma=sigma)
- pt_cost, pt_grads = wrap_and_call(fn_pt, acts, labels, device)
+ fn_pt = TDTLossNumba(blank=V, reduction='sum', durations=durations, sigma=sigma)
+ pt_cost, pt_grads = wrap_and_call(fn_pt, acts, labels, device)
- fn_ag = TDTLossPytorch(
- blank=V, reduction='sum', durations=durations, sigma=sigma
- ) # ag for automatic gradient computation
- ag_cost, ag_grads = wrap_and_call(fn_ag, acts, labels, device)
+ fn_ag = TDTLossPytorch(
+ blank=V, reduction='sum', durations=durations, sigma=sigma
+ ) # ag for automatic gradient computation
+ ag_cost, ag_grads = wrap_and_call(fn_ag, acts, labels, device)
- assert np.allclose(pt_cost, ag_cost, rtol=1e-6), "tdt costs mismatch."
- assert np.allclose(pt_grads, ag_grads, rtol=1e-2), "td gradient mismatch."
+ assert np.allclose(pt_cost, ag_cost, rtol=1e-6), "tdt costs mismatch."
+ assert np.allclose(pt_grads, ag_grads, rtol=1e-2), "td gradient mismatch."
@pytest.mark.unit
- @pytest.mark.parametrize('device', DEVICES)
+ @pytest.mark.parametrize('device', CUDA_ONLY_DEVICE)
+ def test_case_randomized_act_label_no_0_duration(self, device):
+ numba_utils.skip_numba_cuda_test_if_unsupported(__NUMBA_MINIMUM_VERSION__)
+
+ B, T, U, V = 4, 8, 4, 8 # here V is number of non blank labels
+ durations = [1, 2, 3, 4, 5]
+ sigma = 0.05
+
+ acts = torch.rand([B, T, U, V + 1 + len(durations)])
+ labels = [[random.randrange(0, V) for i in range(U - 1)] for j in range(B)]
+
+ fn_pt = TDTLossNumba(blank=V, reduction='sum', durations=durations, sigma=sigma)
+ pt_cost, pt_grads = wrap_and_call(fn_pt, acts, labels, device)
+
+ fn_ag = TDTLossPytorch(
+ blank=V, reduction='sum', durations=durations, sigma=sigma
+ ) # ag for automatic gradient computation
+ ag_cost, ag_grads = wrap_and_call(fn_ag, acts, labels, device)
+
+ assert np.allclose(pt_cost, ag_cost, rtol=1e-6), "tdt costs mismatch."
+ assert np.allclose(pt_grads, ag_grads, rtol=1e-2), "td gradient mismatch."
+
+ @pytest.mark.unit
+ @pytest.mark.parametrize('device', CUDA_ONLY_DEVICE)
def test_case_fixed_case_act_label(self, device):
- if device == 'cuda':
- numba_utils.skip_numba_cuda_test_if_unsupported(__NUMBA_MINIMUM_VERSION__)
+ numba_utils.skip_numba_cuda_test_if_unsupported(__NUMBA_MINIMUM_VERSION__)
- B, T, U, V = 1, 3, 2, 3 # here V is number of non blank labels
- durations = [0, 1, 2]
- sigma = 0.05
+ B, T, U, V = 1, 3, 2, 3 # here V is number of non blank labels
+ durations = [0, 1, 2]
+ sigma = 0.05
- acts = torch.zeros([B, T, U, V + 1 + len(durations)])
- labels = [[(i + j) % (V - 1) for i in range(U - 1)] for j in range(B)]
+ acts = torch.zeros([B, T, U, V + 1 + len(durations)])
+ labels = [[(i + j) % (V - 1) for i in range(U - 1)] for j in range(B)]
- fn_pt = TDTLossNumba(blank=V, reduction='sum', durations=durations, sigma=sigma)
- pt_cost, pt_grads = wrap_and_call(fn_pt, acts, labels, device)
+ fn_pt = TDTLossNumba(blank=V, reduction='sum', durations=durations, sigma=sigma)
+ pt_cost, pt_grads = wrap_and_call(fn_pt, acts, labels, device)
- expected_cost = 4.155739
- expected_grads = [
+ expected_cost = 4.155739
+ expected_grads = [
+ [
[
- [
- [-0.64962804, 0.25, 0.25, 0.14962798, 0.2672583, -0.16792619, -0.09933221],
- [0.01651875, 0.01651875, 0.01651875, -0.04955626, 0.022025, -0.01227201, -0.009753],
- ],
- [
- [-0.04892651, 0.01714851, 0.01714851, 0.01462949, -0.01143234, -0.01143234, 0.02286467],
- [0.12531489, 0.12531489, 0.12531489, -0.37594467, 0.16708651, 0.13027048, -0.29735702],
- ],
- [
- [-0.02572276, 0.00857425, 0.00857425, 0.00857425, -0.02286468, 0.01143234, 0.01143234],
- [0.13388914, 0.13388914, 0.13388914, -0.40166742, 0.17851885, -0.35703772, 0.17851885],
- ],
- ]
+ [-0.64962804, 0.25, 0.25, 0.14962798, 0.2672583, -0.16792619, -0.09933221],
+ [0.01651875, 0.01651875, 0.01651875, -0.04955626, 0.022025, -0.01227201, -0.009753],
+ ],
+ [
+ [-0.04892651, 0.01714851, 0.01714851, 0.01462949, -0.01143234, -0.01143234, 0.02286467],
+ [0.12531489, 0.12531489, 0.12531489, -0.37594467, 0.16708651, 0.13027048, -0.29735702],
+ ],
+ [
+ [-0.02572276, 0.00857425, 0.00857425, 0.00857425, -0.02286468, 0.01143234, 0.01143234],
+ [0.13388914, 0.13388914, 0.13388914, -0.40166742, 0.17851885, -0.35703772, 0.17851885],
+ ],
]
+ ]
- assert np.allclose(pt_cost, expected_cost, rtol=1e-6), "tdt costs mismatch."
- assert np.allclose(pt_grads, expected_grads, rtol=1e-2), "td gradient mismatch."
+ assert np.allclose(pt_cost, expected_cost, rtol=1e-6), "tdt costs mismatch."
+ assert np.allclose(pt_grads, expected_grads, rtol=1e-2), "td gradient mismatch."
if __name__ == "__main__":
diff --git a/tests/collections/asr/test_asr_context_biasing.py b/tests/collections/asr/test_asr_context_biasing.py
index 0fa76fdfb95d..b23b12655a8d 100644
--- a/tests/collections/asr/test_asr_context_biasing.py
+++ b/tests/collections/asr/test_asr_context_biasing.py
@@ -19,7 +19,7 @@
import numpy as np
import pytest
import torch
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from nemo.collections.asr.models import EncDecCTCModelBPE
from nemo.collections.asr.parts import context_biasing
@@ -105,25 +105,43 @@ def test_merge_alignment_with_ws_hyps(self, conformer_ctc_bpe_model):
# ctc argmax predictions
preds = np.array([120, 29, blank_idx, blank_idx])
pred_text, raw_text = context_biasing.merge_alignment_with_ws_hyps(
- preds, asr_model, ws_results, decoder_type="ctc", blank_idx=blank_idx,
+ preds,
+ asr_model,
+ ws_results,
+ decoder_type="ctc",
+ blank_idx=blank_idx,
)
assert raw_text == "gp"
assert pred_text == "gpu"
# rnnt token predictions
preds = rnnt_utils.Hypothesis(
- y_sequence=torch.tensor([120, 29]), score=0.0, timestep=torch.tensor([0, 1, 2, 3]),
+ y_sequence=torch.tensor([120, 29]),
+ score=0.0,
+ timestep=torch.tensor([0, 1, 2, 3]),
)
pred_text, raw_text = context_biasing.merge_alignment_with_ws_hyps(
- preds, asr_model, ws_results, decoder_type="rnnt", blank_idx=blank_idx,
+ preds,
+ asr_model,
+ ws_results,
+ decoder_type="rnnt",
+ blank_idx=blank_idx,
)
assert raw_text == "gp"
assert pred_text == "gpu"
# rnnt empty token predictions
- preds = rnnt_utils.Hypothesis(y_sequence=[], score=0.0, timestep=[],)
+ preds = rnnt_utils.Hypothesis(
+ y_sequence=[],
+ score=0.0,
+ timestep=[],
+ )
pred_text, raw_text = context_biasing.merge_alignment_with_ws_hyps(
- preds, asr_model, ws_results, decoder_type="rnnt", blank_idx=blank_idx,
+ preds,
+ asr_model,
+ ws_results,
+ decoder_type="rnnt",
+ blank_idx=blank_idx,
)
assert raw_text == ""
assert pred_text == "gpu"
diff --git a/tests/collections/asr/test_asr_interctc_models.py b/tests/collections/asr/test_asr_interctc_models.py
index 8d5e4b0b689c..a8d7101033ab 100644
--- a/tests/collections/asr/test_asr_interctc_models.py
+++ b/tests/collections/asr/test_asr_interctc_models.py
@@ -13,8 +13,8 @@
# limitations under the License.
from typing import Dict
+import lightning.pytorch as pl
import pytest
-import pytorch_lightning as pl
import torch
from omegaconf import DictConfig, ListConfig
@@ -68,7 +68,8 @@ def squeezeformer_encoder_config() -> Dict:
class TestInterCTCLoss:
@pytest.mark.unit
@pytest.mark.parametrize(
- "model_class", [EncDecCTCModel, EncDecHybridRNNTCTCModel],
+ "model_class",
+ [EncDecCTCModel, EncDecHybridRNNTCTCModel],
)
@pytest.mark.parametrize(
"encoder_config",
@@ -241,10 +242,12 @@ def __getitem__(self, idx):
trainer.fit(
asr_model,
train_dataloaders=torch.utils.data.DataLoader(
- DummyDataset([input_signal, input_length, target, target_length]), collate_fn=lambda x: x[0],
+ DummyDataset([input_signal, input_length, target, target_length]),
+ collate_fn=lambda x: x[0],
),
val_dataloaders=torch.utils.data.DataLoader(
- DummyDataset([input_signal, input_length, target, target_length]), collate_fn=lambda x: x[0],
+ DummyDataset([input_signal, input_length, target, target_length]),
+ collate_fn=lambda x: x[0],
),
)
required_metrics = ['final_loss'] if len(loss_weights) > 0 else []
@@ -264,7 +267,8 @@ def __getitem__(self, idx):
trainer.test(
asr_model,
dataloaders=torch.utils.data.DataLoader(
- DummyDataset([input_signal, input_length, target, target_length]), collate_fn=lambda x: x[0],
+ DummyDataset([input_signal, input_length, target, target_length]),
+ collate_fn=lambda x: x[0],
),
)
required_metrics = [f'inter_ctc_loss_l{idx}' for idx in apply_at_layers]
diff --git a/tests/collections/asr/test_asr_local_attn.py b/tests/collections/asr/test_asr_local_attn.py
index 257dc0949af3..3013c0efbddf 100644
--- a/tests/collections/asr/test_asr_local_attn.py
+++ b/tests/collections/asr/test_asr_local_attn.py
@@ -15,8 +15,8 @@
import shutil
import tempfile
+import lightning.pytorch as pl
import pytest
-import pytorch_lightning as pl
import torch
from omegaconf import DictConfig
@@ -89,10 +89,12 @@ def test_change_save_restore(self):
@pytest.mark.unit
@pytest.mark.parametrize(
- "global_tokens", [0, 1, 4],
+ "global_tokens",
+ [0, 1, 4],
)
@pytest.mark.parametrize(
- "global_tokens_spacing", [1, 4],
+ "global_tokens_spacing",
+ [1, 4],
)
def test_train(self, global_tokens, global_tokens_spacing):
preprocessor_config = {'_target_': 'nemo.collections.asr.modules.AudioToMelSpectrogramPreprocessor'}
@@ -178,15 +180,18 @@ def __getitem__(self, idx):
trainer.fit(
asr_model,
train_dataloaders=torch.utils.data.DataLoader(
- DummyDataset([input_signal, input_length, target, target_length]), collate_fn=lambda x: x[0],
+ DummyDataset([input_signal, input_length, target, target_length]),
+ collate_fn=lambda x: x[0],
),
val_dataloaders=torch.utils.data.DataLoader(
- DummyDataset([input_signal, input_length, target, target_length]), collate_fn=lambda x: x[0],
+ DummyDataset([input_signal, input_length, target, target_length]),
+ collate_fn=lambda x: x[0],
),
)
trainer.test(
asr_model,
dataloaders=torch.utils.data.DataLoader(
- DummyDataset([input_signal, input_length, target, target_length]), collate_fn=lambda x: x[0],
+ DummyDataset([input_signal, input_length, target, target_length]),
+ collate_fn=lambda x: x[0],
),
)
diff --git a/tests/collections/common/test_ema.py b/tests/collections/common/test_ema.py
index 98f733f1c568..18ee04e371e2 100644
--- a/tests/collections/common/test_ema.py
+++ b/tests/collections/common/test_ema.py
@@ -15,13 +15,13 @@
import os.path
from typing import Any, Dict, Union
+import lightning.pytorch as pl
import pytest
-import pytorch_lightning as pl
import torch
+from lightning.pytorch import Callback, Trainer
+from lightning.pytorch.utilities.exceptions import MisconfigurationException
+from lightning.pytorch.utilities.types import STEP_OUTPUT
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning import Callback, Trainer
-from pytorch_lightning.utilities.exceptions import MisconfigurationException
-from pytorch_lightning.utilities.types import STEP_OUTPUT
from nemo.collections.common.callbacks import EMA
from nemo.collections.common.callbacks.ema import EMAOptimizer
@@ -349,7 +349,12 @@ class TestEMATrain:
@pytest.mark.parametrize("validate_original_weights", [True, False])
@pytest.mark.run_only_on('GPU')
def test_ema_run_cuda(
- self, test_data_dir, precision, accumulate_grad_batches, validate_original_weights, tmpdir,
+ self,
+ test_data_dir,
+ precision,
+ accumulate_grad_batches,
+ validate_original_weights,
+ tmpdir,
):
self.run_training_test(
accumulate_grad_batches=accumulate_grad_batches,
diff --git a/tests/collections/llm/bitexact/mixtral/run.sh b/tests/collections/llm/bitexact/mixtral/run.sh
index c32dbbc95b98..87bf7c382b99 100644
--- a/tests/collections/llm/bitexact/mixtral/run.sh
+++ b/tests/collections/llm/bitexact/mixtral/run.sh
@@ -43,4 +43,4 @@ python3 /workspace/tests/collections/llm/bitexact/mixtral/pretrain_mini_mixtral.
# Compare outputs
python3 /workspace/tests/collections/llm/bitexact/mixtral/compare_ckpts.py \
- "$NEMO_OUTPUT_PATH/checkpoints/--None=0.0000-epoch=0/" "$MCORE_OUTPUT_PATH/iter_0000010/"
+ "$NEMO_OUTPUT_PATH/checkpoints/--None=0.0000-epoch=0-consumed_samples=20.0/weights" "$MCORE_OUTPUT_PATH/iter_0000010/"
diff --git a/tests/collections/llm/common.py b/tests/collections/llm/common.py
index 95b8bc0de584..c17243936bd1 100644
--- a/tests/collections/llm/common.py
+++ b/tests/collections/llm/common.py
@@ -14,7 +14,7 @@
import os
-import pytorch_lightning as pl
+import lightning.pytorch as pl
import torch
from nemo import lightning as nl
diff --git a/tests/collections/llm/gpt/model/megatron_ssm_finetuning.py b/tests/collections/llm/gpt/model/megatron_ssm_finetuning.py
index 67174974f9a3..d0f95811edf5 100644
--- a/tests/collections/llm/gpt/model/megatron_ssm_finetuning.py
+++ b/tests/collections/llm/gpt/model/megatron_ssm_finetuning.py
@@ -109,7 +109,7 @@ def get_args():
global_batch_size=4,
tokenizer=model.tokenizer,
num_workers=0,
- pad_to_max_length=True,
+ dataset_kwargs={"pad_to_max_length": True},
)
app_state = _setup(
diff --git a/tests/collections/llm/gpt/model/megatron_ssm_pretraining.py b/tests/collections/llm/gpt/model/megatron_ssm_pretraining.py
index d7ecaafaaf8c..55bea59d6274 100644
--- a/tests/collections/llm/gpt/model/megatron_ssm_pretraining.py
+++ b/tests/collections/llm/gpt/model/megatron_ssm_pretraining.py
@@ -16,9 +16,11 @@
## There are no guarantees that this script is up-to-date with latest NeMo.
import argparse
+
import torch
+from lightning.pytorch.loggers import TensorBoardLogger
from megatron.core.optimizer import OptimizerConfig
-from pytorch_lightning.loggers import TensorBoardLogger
+
from nemo import lightning as nl
from nemo.collections import llm
from nemo.collections.llm.api import train
diff --git a/tests/collections/llm/gpt/model/test_nemotron.py b/tests/collections/llm/gpt/model/test_nemotron.py
index 5fdd1547a75d..994227518b6f 100644
--- a/tests/collections/llm/gpt/model/test_nemotron.py
+++ b/tests/collections/llm/gpt/model/test_nemotron.py
@@ -16,8 +16,8 @@
from nemo.collections.llm.gpt.model.nemotron import (
Nemotron3Config4B,
Nemotron3Config8B,
+ Nemotron3Config22B,
Nemotron4Config15B,
- Nemotron4Config22B,
Nemotron4Config340B,
NemotronConfig,
)
@@ -72,28 +72,28 @@ def test_nemotron3_config_8b():
assert config.init_method_std == 0.010
-def test_nemotron4_config_15b():
- config = Nemotron4Config15B()
- assert config.num_layers == 32
+def test_nemotron3_config_22b():
+ config = Nemotron3Config22B()
+ assert config.num_layers == 40
assert config.seq_length == 4096
assert config.hidden_size == 6144
assert config.ffn_hidden_size == 24576
assert config.num_attention_heads == 48
- assert config.num_query_groups == 8
+ assert config.num_query_groups == 48
assert config.kv_channels == 6144 // 48
- assert config.init_method_std == 0.0134
+ assert config.init_method_std == 0.008
-def test_nemotron4_config_22b():
- config = Nemotron4Config22B()
- assert config.num_layers == 40
+def test_nemotron4_config_15b():
+ config = Nemotron4Config15B()
+ assert config.num_layers == 32
assert config.seq_length == 4096
assert config.hidden_size == 6144
assert config.ffn_hidden_size == 24576
assert config.num_attention_heads == 48
- assert config.num_query_groups == 48
+ assert config.num_query_groups == 8
assert config.kv_channels == 6144 // 48
- assert config.init_method_std == 0.008
+ assert config.init_method_std == 0.0134
def test_nemotron4_config_340b():
diff --git a/tests/collections/llm/gpt_finetuning.py b/tests/collections/llm/gpt_finetuning.py
index 7eaa7744729c..91796585bf96 100644
--- a/tests/collections/llm/gpt_finetuning.py
+++ b/tests/collections/llm/gpt_finetuning.py
@@ -94,8 +94,8 @@ def get_args():
),
)
- if args.peft == 'lora':
- peft = llm.peft.LoRA()
+ if args.peft in ['lora', 'dora']:
+ peft = llm.peft.PEFT_STR2CLS[args.peft]()
else:
peft = None
diff --git a/tests/collections/llm/lora_mistralai.py b/tests/collections/llm/lora_mistralai.py
new file mode 100644
index 000000000000..0415569304ac
--- /dev/null
+++ b/tests/collections/llm/lora_mistralai.py
@@ -0,0 +1,139 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+
+import lightning.pytorch as pl
+import torch
+from megatron.core.optimizer import OptimizerConfig
+
+from nemo import lightning as nl
+from nemo.collections import llm
+from nemo.lightning.io.mixin import track_io
+
+
+def get_args():
+ parser = argparse.ArgumentParser(description='Finetune a small GPT model using NeMo 2.0')
+ parser.add_argument('--model', type=str.lower, choices=['mistral', 'mixtral'], help="model")
+ parser.add_argument('--max-steps', type=int, default=9, help="number of devices")
+ parser.add_argument('--mbs', type=int, default=2, help="micro batch size")
+ parser.add_argument('--gbs', type=int, default=4, help="global batch size")
+ parser.add_argument('--tp', type=int, default=1, help="tensor parallel size")
+ parser.add_argument('--ep', type=int, default=1, help="expert parallel size")
+ parser.add_argument('--dist-opt', action='store_true', help='use dist opt')
+ return parser.parse_args()
+
+
+def trainer(devices, tp, ep, sp, max_steps) -> nl.Trainer:
+ strategy = nl.MegatronStrategy(
+ tensor_model_parallel_size=tp,
+ expert_model_parallel_size=ep,
+ sequence_parallel=sp,
+ )
+
+ return nl.Trainer(
+ devices=max(ep, tp),
+ max_steps=max_steps,
+ accelerator="gpu",
+ strategy=strategy,
+ plugins=nl.MegatronMixedPrecision(precision="bf16-mixed"),
+ log_every_n_steps=1,
+ limit_val_batches=0,
+ val_check_interval=0,
+ num_sanity_val_steps=0,
+ )
+
+
+@track_io
+class OrdTokenizer:
+ def __init__(self, vocab_size=30_000, num_reserved_tokens=128, special_token_names=['bos_id', 'eos_id', 'pad_id']):
+ self.vocab_size = vocab_size
+ self.num_reserved_tokens = num_reserved_tokens
+ self.special_token_names = special_token_names
+ assert len(self.special_token_names) < num_reserved_tokens
+
+ def __getattr__(self, name):
+ if name in self.__dict__.get('special_token_names', {}):
+ return self.__dict__['special_token_names'].index(name)
+ elif name in self.__dict__:
+ return self.__dict__[name]
+ else:
+ raise AttributeError
+
+ def text_to_ids(self, text):
+ token_ids = list(map(lambda x: self.num_reserved_tokens + ord(x), list(text)))
+ assert max(token_ids) < self.vocab_size
+ return token_ids
+
+
+def logger() -> nl.NeMoLogger:
+ ckpt = nl.ModelCheckpoint(
+ save_last=True,
+ every_n_train_steps=10,
+ monitor="reduced_train_loss",
+ save_top_k=1,
+ save_on_train_epoch_end=True,
+ save_optim_on_train_end=True,
+ )
+
+ return nl.NeMoLogger(
+ name="nemo2_peft",
+ log_dir="/tmp/peft_logs",
+ use_datetime_version=False, # must be false if using auto resume
+ ckpt=ckpt,
+ wandb=None,
+ )
+
+
+def squad(mbs, gbs) -> pl.LightningDataModule:
+ return llm.SquadDataModule(seq_length=2048, micro_batch_size=mbs, global_batch_size=gbs, num_workers=0)
+
+
+def mixtral_8x7b() -> pl.LightningModule:
+ tokenizer = OrdTokenizer()
+ model = llm.MixtralModel(llm.MixtralConfig8x7B(num_layers=2), tokenizer=tokenizer)
+ lora = llm.peft.LoRA()
+ return model, lora
+
+
+def mistral_7b() -> pl.LightningModule:
+ tokenizer = OrdTokenizer()
+ model = llm.MistralModel(llm.MistralConfig7B(num_layers=2), tokenizer=tokenizer)
+ lora = llm.peft.LoRA()
+ return model, lora
+
+
+if __name__ == '__main__':
+ args = get_args()
+ if args.model == 'mistral':
+ model, lora = mistral_7b()
+ else:
+ model, lora = mixtral_8x7b()
+ llm.finetune(
+ model=model,
+ data=squad(args.mbs, args.gbs),
+ trainer=trainer(args.tp, args.tp, args.ep, args.tp > 1, args.max_steps),
+ peft=lora,
+ log=logger(),
+ optim=nl.MegatronOptimizerModule(
+ config=OptimizerConfig(
+ optimizer="adam",
+ lr=0.0001,
+ adam_beta2=0.98,
+ use_distributed_optimizer=args.dist_opt,
+ clip_grad=1.0,
+ bf16=True,
+ ),
+ ),
+ )
diff --git a/tests/collections/llm/megatron_gpt_pretraining.py b/tests/collections/llm/megatron_gpt_pretraining.py
index a73b2a694c76..9722ba9d6c68 100644
--- a/tests/collections/llm/megatron_gpt_pretraining.py
+++ b/tests/collections/llm/megatron_gpt_pretraining.py
@@ -18,8 +18,8 @@
import argparse
import torch
+from lightning.pytorch.loggers import TensorBoardLogger
from megatron.core.optimizer import OptimizerConfig
-from pytorch_lightning.loggers import TensorBoardLogger
from nemo import lightning as nl
from nemo.collections import llm
diff --git a/tests/collections/llm/megatron_mixtral_pretraining.py b/tests/collections/llm/megatron_mixtral_pretraining.py
index 82188f75351e..4123c7b37987 100644
--- a/tests/collections/llm/megatron_mixtral_pretraining.py
+++ b/tests/collections/llm/megatron_mixtral_pretraining.py
@@ -158,7 +158,7 @@ def main(args):
)
# Confirm checkpoint directory structure
- output_path = Path(args.experiment_dir) / "checkpoints/--None=0.0000-epoch=0/"
+ output_path = Path(args.experiment_dir) / "checkpoints/--None=0.0000-epoch=0-consumed_samples=8.0/weights"
assert output_path.exists(), f"Expected {output_path} to exist"
assert output_path.is_dir(), f"Expected {output_path} to be a directory"
output_files = ['__0_0.distcp', '__0_1.distcp', 'common.pt', 'metadata.json', '.metadata']
diff --git a/tests/collections/llm/megatron_t5_finetuning.py b/tests/collections/llm/megatron_t5_finetuning.py
index e8f4947c9674..976ad5c48053 100644
--- a/tests/collections/llm/megatron_t5_finetuning.py
+++ b/tests/collections/llm/megatron_t5_finetuning.py
@@ -18,8 +18,8 @@
import argparse
import torch
+from lightning.pytorch.loggers import WandbLogger
from megatron.core.optimizer import OptimizerConfig
-from pytorch_lightning.loggers import WandbLogger
from nemo import lightning as nl
from nemo.collections import llm
@@ -103,7 +103,7 @@ def get_args():
optimizer='adam',
lr=2.0e-5,
use_distributed_optimizer=False,
- bf16=False,
+ bf16=True,
weight_decay=0.1,
)
opt = MegatronOptimizerModule(
@@ -124,7 +124,7 @@ def get_args():
log_every_n_steps=1,
limit_val_batches=2,
val_check_interval=50,
- plugins=nl.MegatronMixedPrecision(precision="32"),
+ plugins=nl.MegatronMixedPrecision(precision="bf16-mixed"),
)
if args.wandb_project is not None:
diff --git a/tests/collections/llm/megatron_t5_pretraining.py b/tests/collections/llm/megatron_t5_pretraining.py
index a5460be3d154..ad63ae88fb73 100644
--- a/tests/collections/llm/megatron_t5_pretraining.py
+++ b/tests/collections/llm/megatron_t5_pretraining.py
@@ -18,8 +18,8 @@
import argparse
import torch
+from lightning.pytorch.loggers import WandbLogger
from megatron.core.optimizer import OptimizerConfig
-from pytorch_lightning.loggers import WandbLogger
from nemo import lightning as nl
from nemo.collections import llm
diff --git a/tests/collections/llm/peft/lora_merge.py b/tests/collections/llm/peft/lora_merge.py
new file mode 100644
index 000000000000..2ca7390ea7e6
--- /dev/null
+++ b/tests/collections/llm/peft/lora_merge.py
@@ -0,0 +1,42 @@
+# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import argparse
+from dataclasses import dataclass
+
+from nemo.collections import llm
+
+
+@dataclass
+class Llama3ConfigCI(llm.Llama3Config8B):
+ seq_length: int = 2048
+ num_layers: int = 2
+ hidden_size: int = 768
+ ffn_hidden_size: int = 3072
+ num_attention_heads: int = 8
+
+
+def get_args():
+ parser = argparse.ArgumentParser(description='Merge LoRA weights with base LLM')
+ parser.add_argument('--lora_checkpoint_path', type=str, help="Path to finetuned LORA checkpoint")
+ parser.add_argument('--output_path', type=str, help="Path to save merged checkpoint")
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+ args = get_args()
+
+ llm.peft.merge_lora(
+ lora_checkpoint_path=args.lora_checkpoint_path,
+ output_path=args.output_path,
+ )
diff --git a/tests/collections/llm/recipes/test_nemotron4_22b.py b/tests/collections/llm/recipes/test_nemotron3_22b.py
similarity index 91%
rename from tests/collections/llm/recipes/test_nemotron4_22b.py
rename to tests/collections/llm/recipes/test_nemotron3_22b.py
index fef24c216766..50ce3028c5bb 100644
--- a/tests/collections/llm/recipes/test_nemotron4_22b.py
+++ b/tests/collections/llm/recipes/test_nemotron3_22b.py
@@ -17,15 +17,15 @@
from nemo.collections.llm.api import pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.gpt.model.nemotron import Nemotron4Config22B, NemotronModel
-from nemo.collections.llm.recipes import nemotron4_22b
+from nemo.collections.llm.gpt.model.nemotron import Nemotron3Config22B, NemotronModel
+from nemo.collections.llm.recipes import nemotron3_22b
from nemo.lightning import Trainer
-class TestNemotron4_22B:
+class TestNemotron3_22B:
@pytest.fixture(scope="class")
def recipe_module(self):
- return nemotron4_22b
+ return nemotron3_22b
def test_model(self, recipe_module):
model = recipe_module.model()
@@ -36,7 +36,7 @@ def test_model_config_parameters(self, recipe_module):
model = recipe_module.model()
nemotron_config = model.config
assert isinstance(nemotron_config, run.Config)
- assert nemotron_config.__fn_or_cls__ == Nemotron4Config22B
+ assert nemotron_config.__fn_or_cls__ == Nemotron3Config22B
assert nemotron_config.num_layers == 40
assert nemotron_config.hidden_size == 6144
assert nemotron_config.seq_length == 4096
diff --git a/tests/collections/llm/recipes/test_nemotron4_22b_16k.py b/tests/collections/llm/recipes/test_nemotron3_22b_16k.py
similarity index 93%
rename from tests/collections/llm/recipes/test_nemotron4_22b_16k.py
rename to tests/collections/llm/recipes/test_nemotron3_22b_16k.py
index 313d838067fb..607c655a6703 100644
--- a/tests/collections/llm/recipes/test_nemotron4_22b_16k.py
+++ b/tests/collections/llm/recipes/test_nemotron3_22b_16k.py
@@ -17,15 +17,15 @@
from nemo.collections.llm.api import pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.gpt.model.nemotron import Nemotron4Config22B, NemotronModel
-from nemo.collections.llm.recipes import nemotron4_22b_16k
+from nemo.collections.llm.gpt.model.nemotron import Nemotron3Config22B, NemotronModel
+from nemo.collections.llm.recipes import nemotron3_22b_16k
from nemo.lightning import Trainer
-class TestNemotron4_22B_16K:
+class TestNemotron3_22B_16K:
@pytest.fixture(scope="class")
def recipe_module(self):
- return nemotron4_22b_16k
+ return nemotron3_22b_16k
def test_model(self, recipe_module):
model = recipe_module.model()
@@ -36,7 +36,7 @@ def test_model_config_parameters(self, recipe_module):
model = recipe_module.model()
nemotron_config = model.config
assert isinstance(nemotron_config, run.Config)
- assert nemotron_config.__fn_or_cls__ == Nemotron4Config22B
+ assert nemotron_config.__fn_or_cls__ == Nemotron3Config22B
assert nemotron_config.num_layers == 40
assert nemotron_config.hidden_size == 6144
assert nemotron_config.seq_length == 16384
diff --git a/tests/collections/llm/recipes/test_nemotron4_22b_64k.py b/tests/collections/llm/recipes/test_nemotron3_22b_64k.py
similarity index 93%
rename from tests/collections/llm/recipes/test_nemotron4_22b_64k.py
rename to tests/collections/llm/recipes/test_nemotron3_22b_64k.py
index 5ac90e971cbb..423ca5a4fa1b 100644
--- a/tests/collections/llm/recipes/test_nemotron4_22b_64k.py
+++ b/tests/collections/llm/recipes/test_nemotron3_22b_64k.py
@@ -17,15 +17,15 @@
from nemo.collections.llm.api import pretrain
from nemo.collections.llm.gpt.data.mock import MockDataModule
-from nemo.collections.llm.gpt.model.nemotron import Nemotron4Config22B, NemotronModel
-from nemo.collections.llm.recipes import nemotron4_22b_64k
+from nemo.collections.llm.gpt.model.nemotron import Nemotron3Config22B, NemotronModel
+from nemo.collections.llm.recipes import nemotron3_22b_64k
from nemo.lightning import Trainer
-class TestNemotron4_22B_64K:
+class TestNemotron3_22B_64K:
@pytest.fixture(scope="class")
def recipe_module(self):
- return nemotron4_22b_64k
+ return nemotron3_22b_64k
def test_model(self, recipe_module):
model = recipe_module.model()
@@ -36,7 +36,7 @@ def test_model_config_parameters(self, recipe_module):
model = recipe_module.model()
nemotron_config = model.config
assert isinstance(nemotron_config, run.Config)
- assert nemotron_config.__fn_or_cls__ == Nemotron4Config22B
+ assert nemotron_config.__fn_or_cls__ == Nemotron3Config22B
assert nemotron_config.num_layers == 40
assert nemotron_config.hidden_size == 6144
assert nemotron_config.seq_length == 65536
diff --git a/tests/collections/llm/recipes/test_nemotron3_8b.py b/tests/collections/llm/recipes/test_nemotron3_8b.py
index 2cf1c440080a..c64c9ae655af 100644
--- a/tests/collections/llm/recipes/test_nemotron3_8b.py
+++ b/tests/collections/llm/recipes/test_nemotron3_8b.py
@@ -66,13 +66,13 @@ def test_pretrain_recipe_with_different_configurations(self, recipe_module, num_
def test_finetune_recipe(self, recipe_module):
recipe = recipe_module.finetune_recipe()
assert isinstance(recipe, run.Partial)
- assert recipe.__fn_or_cls__ == finetune
+ assert isinstance(recipe, run.Partial)
assert isinstance(recipe.model, run.Config)
assert recipe.model.__fn_or_cls__ == NemotronModel
assert isinstance(recipe.trainer, run.Config)
assert recipe.trainer.__fn_or_cls__ == Trainer
assert isinstance(recipe.data, run.Config)
assert recipe.data.__fn_or_cls__ == SquadDataModule
- assert recipe.data.seq_length == 4096
- assert recipe.data.global_batch_size == 32
- assert recipe.data.micro_batch_size == 2
+ assert recipe.data.seq_length == 2048
+ assert recipe.data.global_batch_size == 128
+ assert recipe.data.micro_batch_size == 1
diff --git a/tests/collections/llm/recipes/test_nemotron4_340b.py b/tests/collections/llm/recipes/test_nemotron4_340b.py
index 603c16696429..968e2cf76e3e 100644
--- a/tests/collections/llm/recipes/test_nemotron4_340b.py
+++ b/tests/collections/llm/recipes/test_nemotron4_340b.py
@@ -64,7 +64,7 @@ def test_pretrain_recipe_with_different_configurations(self, recipe_module, num_
assert recipe.trainer.devices == num_gpus_per_node
def test_finetune_recipe(self, recipe_module):
- recipe = recipe_module.finetune_recipe()
+ recipe = recipe_module.finetune_recipe(num_nodes=4)
assert isinstance(recipe, run.Partial)
assert recipe.__fn_or_cls__ == finetune
assert isinstance(recipe.model, run.Config)
@@ -73,6 +73,6 @@ def test_finetune_recipe(self, recipe_module):
assert recipe.trainer.__fn_or_cls__ == Trainer
assert isinstance(recipe.data, run.Config)
assert recipe.data.__fn_or_cls__ == SquadDataModule
- assert recipe.data.seq_length == 4096
- assert recipe.data.global_batch_size == 2304
+ assert recipe.data.seq_length == 2048
+ assert recipe.data.global_batch_size == 128
assert recipe.data.micro_batch_size == 1
diff --git a/tests/collections/llm/recipes/test_t5_11b.py b/tests/collections/llm/recipes/test_t5_11b.py
index 8c4ab8332c18..5a0130323672 100644
--- a/tests/collections/llm/recipes/test_t5_11b.py
+++ b/tests/collections/llm/recipes/test_t5_11b.py
@@ -15,9 +15,11 @@
import nemo_run as run
import pytest
-from nemo.collections.llm.api import pretrain
+from nemo.collections.llm.api import finetune, pretrain
+from nemo.collections.llm.peft.lora import LoRA
from nemo.collections.llm.recipes import t5_11b
from nemo.collections.llm.t5.data.mock import MockDataModule
+from nemo.collections.llm.t5.data.squad import SquadDataModule
from nemo.collections.llm.t5.model.t5 import T5Config11B, T5Model
from nemo.lightning import Trainer
@@ -82,6 +84,22 @@ def test_pretrain_recipe(self, recipe_module):
assert recipe.data.seq_length_dec == 128
assert recipe.data.global_batch_size == 1920
+ def test_finetune_recipe(self, recipe_module):
+ recipe = recipe_module.finetune_recipe()
+ assert isinstance(recipe, run.Partial)
+ assert recipe.__fn_or_cls__ == finetune
+ assert isinstance(recipe.model, run.Config)
+ assert recipe.model.__fn_or_cls__ == T5Model
+ assert isinstance(recipe.trainer, run.Config)
+ assert recipe.trainer.__fn_or_cls__ == Trainer
+ assert isinstance(recipe.data, run.Config)
+ assert recipe.data.__fn_or_cls__ == SquadDataModule
+ assert recipe.data.seq_length == 512
+ assert recipe.data.seq_length_dec == 128
+ assert recipe.data.global_batch_size == 128
+ assert isinstance(recipe.peft, run.Config)
+ assert recipe.peft.__fn_or_cls__ == LoRA
+
@pytest.mark.parametrize("num_nodes,num_gpus_per_node", [(1, 8), (2, 4), (4, 2)])
def test_pretrain_recipe_with_different_configurations(self, recipe_module, num_nodes, num_gpus_per_node):
recipe = recipe_module.pretrain_recipe(num_nodes=num_nodes, num_gpus_per_node=num_gpus_per_node)
diff --git a/tests/collections/llm/recipes/test_t5_220m.py b/tests/collections/llm/recipes/test_t5_220m.py
index 744598e3b01b..725061280b4f 100644
--- a/tests/collections/llm/recipes/test_t5_220m.py
+++ b/tests/collections/llm/recipes/test_t5_220m.py
@@ -15,9 +15,11 @@
import nemo_run as run
import pytest
-from nemo.collections.llm.api import pretrain
+from nemo.collections.llm.api import finetune, pretrain
+from nemo.collections.llm.peft.lora import LoRA
from nemo.collections.llm.recipes import t5_220m
from nemo.collections.llm.t5.data.mock import MockDataModule
+from nemo.collections.llm.t5.data.squad import SquadDataModule
from nemo.collections.llm.t5.model.t5 import T5Config220M, T5Model
from nemo.lightning import Trainer
@@ -82,6 +84,22 @@ def test_pretrain_recipe(self, recipe_module):
assert recipe.data.seq_length_dec == 128
assert recipe.data.global_batch_size == 512
+ def test_finetune_recipe(self, recipe_module):
+ recipe = recipe_module.finetune_recipe()
+ assert isinstance(recipe, run.Partial)
+ assert recipe.__fn_or_cls__ == finetune
+ assert isinstance(recipe.model, run.Config)
+ assert recipe.model.__fn_or_cls__ == T5Model
+ assert isinstance(recipe.trainer, run.Config)
+ assert recipe.trainer.__fn_or_cls__ == Trainer
+ assert isinstance(recipe.data, run.Config)
+ assert recipe.data.__fn_or_cls__ == SquadDataModule
+ assert recipe.data.seq_length == 512
+ assert recipe.data.seq_length_dec == 128
+ assert recipe.data.global_batch_size == 128
+ assert isinstance(recipe.peft, run.Config)
+ assert recipe.peft.__fn_or_cls__ == LoRA
+
@pytest.mark.parametrize("num_nodes,num_gpus_per_node", [(1, 8), (2, 4), (4, 2)])
def test_pretrain_recipe_with_different_configurations(self, recipe_module, num_nodes, num_gpus_per_node):
recipe = recipe_module.pretrain_recipe(num_nodes=num_nodes, num_gpus_per_node=num_gpus_per_node)
diff --git a/tests/collections/llm/recipes/test_t5_3b.py b/tests/collections/llm/recipes/test_t5_3b.py
index 7672b95426cb..e81bb3e27f47 100644
--- a/tests/collections/llm/recipes/test_t5_3b.py
+++ b/tests/collections/llm/recipes/test_t5_3b.py
@@ -15,9 +15,11 @@
import nemo_run as run
import pytest
-from nemo.collections.llm.api import pretrain
+from nemo.collections.llm.api import finetune, pretrain
+from nemo.collections.llm.peft.lora import LoRA
from nemo.collections.llm.recipes import t5_3b
from nemo.collections.llm.t5.data.mock import MockDataModule
+from nemo.collections.llm.t5.data.squad import SquadDataModule
from nemo.collections.llm.t5.model.t5 import T5Config3B, T5Model
from nemo.lightning import Trainer
@@ -82,6 +84,22 @@ def test_pretrain_recipe(self, recipe_module):
assert recipe.data.seq_length_dec == 128
assert recipe.data.global_batch_size == 1920
+ def test_finetune_recipe(self, recipe_module):
+ recipe = recipe_module.finetune_recipe()
+ assert isinstance(recipe, run.Partial)
+ assert recipe.__fn_or_cls__ == finetune
+ assert isinstance(recipe.model, run.Config)
+ assert recipe.model.__fn_or_cls__ == T5Model
+ assert isinstance(recipe.trainer, run.Config)
+ assert recipe.trainer.__fn_or_cls__ == Trainer
+ assert isinstance(recipe.data, run.Config)
+ assert recipe.data.__fn_or_cls__ == SquadDataModule
+ assert recipe.data.seq_length == 512
+ assert recipe.data.seq_length_dec == 128
+ assert recipe.data.global_batch_size == 128
+ assert isinstance(recipe.peft, run.Config)
+ assert recipe.peft.__fn_or_cls__ == LoRA
+
@pytest.mark.parametrize("num_nodes,num_gpus_per_node", [(1, 8), (2, 4), (4, 2)])
def test_pretrain_recipe_with_different_configurations(self, recipe_module, num_nodes, num_gpus_per_node):
recipe = recipe_module.pretrain_recipe(num_nodes=num_nodes, num_gpus_per_node=num_gpus_per_node)
diff --git a/tests/collections/llm/test_mnist_model_nemo2.py b/tests/collections/llm/test_mnist_model_nemo2.py
index a5c2aa96fc03..92cffc2a35bb 100644
--- a/tests/collections/llm/test_mnist_model_nemo2.py
+++ b/tests/collections/llm/test_mnist_model_nemo2.py
@@ -23,16 +23,16 @@
from pathlib import Path
from typing import Any, Dict, Iterable, Iterator, List, Optional, Sequence, Tuple, TypedDict, TypeVar, Union
+import lightning.pytorch as pl
import megatron.core.num_microbatches_calculator
import pytest
-import pytorch_lightning as pl
import torch
import torch.distributed
+from lightning.pytorch.loggers import TensorBoardLogger
from megatron.core import ModelParallelConfig, parallel_state
from megatron.core.optimizer import OptimizerConfig
from megatron.core.transformer.enums import ModelType
from megatron.core.transformer.module import MegatronModule
-from pytorch_lightning.loggers import TensorBoardLogger
from torch import Tensor, nn
from torch.utils.data import DataLoader
from torchvision import transforms
diff --git a/tests/collections/llm/test_mnist_model_nemo2_fsdp.py b/tests/collections/llm/test_mnist_model_nemo2_fsdp.py
index 8a6c1f993d28..da45d0e1fc38 100644
--- a/tests/collections/llm/test_mnist_model_nemo2_fsdp.py
+++ b/tests/collections/llm/test_mnist_model_nemo2_fsdp.py
@@ -23,16 +23,16 @@
from pathlib import Path
from typing import Any, Dict, Iterable, Iterator, List, Optional, Sequence, Tuple, TypedDict, TypeVar, Union
+import lightning.pytorch as pl
import megatron.core.num_microbatches_calculator
import pytest
-import pytorch_lightning as pl
import torch
import torch.distributed
+from lightning.pytorch.loggers import TensorBoardLogger
from megatron.core import ModelParallelConfig, parallel_state
from megatron.core.optimizer import OptimizerConfig
from megatron.core.transformer.enums import ModelType
from megatron.core.transformer.module import MegatronModule
-from pytorch_lightning.loggers import TensorBoardLogger
from torch import Tensor, nn
from torch.optim import Adam
from torch.utils.data import DataLoader
diff --git a/tests/collections/multimodal/data/energon/test_data_module.py b/tests/collections/multimodal/data/energon/test_data_module.py
index 23edc0dd3094..179d3f09f2df 100644
--- a/tests/collections/multimodal/data/energon/test_data_module.py
+++ b/tests/collections/multimodal/data/energon/test_data_module.py
@@ -93,14 +93,14 @@ def test_data_module(self):
self.assertIn('attention_mask', batch)
print(batch)
decoded_text = self.decode_vqa_tokens_to_text(batch['tokens'][0].tolist())
- system_message = re.escape(self.data_module.multimodal_sample_config.conversation_template_config.system)
+ # system_message = re.escape(self.data_module.multimodal_sample_config.conversation_template_config.system)
user_context = re.escape(self.vqa_json[0]['value'])
assistant_answer = re.escape(self.vqa_json[1]['value'])
- self.assertRegex(
- decoded_text,
- rf"{system_message}",
- msg="System message block does not match the expected format.",
- )
+ # self.assertRegex(
+ # decoded_text,
+ # rf"{system_message}",
+ # msg="System message block does not match the expected format.",
+ # )
self.assertRegex(decoded_text, user_context, msg="User context did not match in decoded text")
self.assertRegex(
decoded_text, assistant_answer, msg="Assistant answer block did not match in decoded text"
@@ -117,14 +117,14 @@ def test_data_module(self):
self.assertIn('attention_mask', batch)
print(batch)
decoded_text = self.decode_vqa_tokens_to_text(batch['tokens'][0].tolist())
- system_message = re.escape(self.data_module.multimodal_sample_config.conversation_template_config.system)
+ # system_message = re.escape(self.data_module.multimodal_sample_config.conversation_template_config.system)
user_context = re.escape(self.vqa_json[0]['value'])
assistant_answer = re.escape(self.vqa_json[1]['value'])
- self.assertRegex(
- decoded_text,
- rf"{system_message}",
- msg="System message block does not match the expected format.",
- )
+ # self.assertRegex(
+ # decoded_text,
+ # rf"{system_message}",
+ # msg="System message block does not match the expected format.",
+ # )
self.assertRegex(decoded_text, user_context, msg="User context did not match in decoded text")
self.assertRegex(
decoded_text, assistant_answer, msg="Assistant answer block did not match in decoded text"
diff --git a/tests/collections/multimodal/test_speechllm_models.py b/tests/collections/multimodal/test_speechllm_models.py
index 8698fed205ea..09149064b657 100644
--- a/tests/collections/multimodal/test_speechllm_models.py
+++ b/tests/collections/multimodal/test_speechllm_models.py
@@ -16,13 +16,13 @@
import tempfile
from pathlib import Path
+import lightning.pytorch as pl
import numpy as np
import pytest
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.plugins.environments import TorchElasticEnvironment
from megatron.core import parallel_state
from omegaconf import DictConfig, OmegaConf
-from pytorch_lightning.plugins.environments import TorchElasticEnvironment
from nemo.collections.multimodal.speech_llm.models import modular_models
from nemo.collections.multimodal.speech_llm.parts.utils.data_utils import shift_tokens_by_multi_audios
diff --git a/tests/collections/nlp/test_falcon_model.py b/tests/collections/nlp/test_falcon_model.py
index 23430ad36300..62a4591092a9 100644
--- a/tests/collections/nlp/test_falcon_model.py
+++ b/tests/collections/nlp/test_falcon_model.py
@@ -14,8 +14,8 @@
import pytest
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/tests/collections/nlp/test_flash_attention.py b/tests/collections/nlp/test_flash_attention.py
index f5585ddc1636..c8309b34b433 100644
--- a/tests/collections/nlp/test_flash_attention.py
+++ b/tests/collections/nlp/test_flash_attention.py
@@ -16,8 +16,8 @@
import pytest
import torch
+from lightning.pytorch.trainer.trainer import Trainer
from megatron.core import ModelParallelConfig
-from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.modules.common.megatron.attention import CoreAttention
from nemo.collections.nlp.modules.common.megatron.megatron_init import initialize_model_parallel_for_nemo
diff --git a/tests/collections/nlp/test_gpt_eval.py b/tests/collections/nlp/test_gpt_eval.py
index fb3f9fda5ac3..020185ec7385 100644
--- a/tests/collections/nlp/test_gpt_eval.py
+++ b/tests/collections/nlp/test_gpt_eval.py
@@ -16,7 +16,7 @@
import numpy as np
import pytest
-from pytorch_lightning.trainer.trainer import Trainer
+from lightning.pytorch.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.collections.nlp.modules.common.transformer.text_generation import LengthParam, SamplingParam
diff --git a/tests/collections/nlp/test_gpt_model.py b/tests/collections/nlp/test_gpt_model.py
index 7b6c02f948a4..334167f3dcf8 100644
--- a/tests/collections/nlp/test_gpt_model.py
+++ b/tests/collections/nlp/test_gpt_model.py
@@ -16,8 +16,8 @@
import pytest
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
diff --git a/tests/collections/nlp/test_nlp_exportables.py b/tests/collections/nlp/test_nlp_exportables.py
index b404764e7eed..6da0f8c93cc0 100644
--- a/tests/collections/nlp/test_nlp_exportables.py
+++ b/tests/collections/nlp/test_nlp_exportables.py
@@ -14,9 +14,9 @@
import os
import tempfile
+import lightning.pytorch as pl
import onnx
import pytest
-import pytorch_lightning as pl
import torch
import wget
from omegaconf import DictConfig, OmegaConf
diff --git a/tests/collections/nlp/test_pretrained_models_performance.py b/tests/collections/nlp/test_pretrained_models_performance.py
index 82ff6ed103f1..b51f00681f57 100644
--- a/tests/collections/nlp/test_pretrained_models_performance.py
+++ b/tests/collections/nlp/test_pretrained_models_performance.py
@@ -17,8 +17,8 @@
from shutil import rmtree
from unittest import TestCase
+import lightning.pytorch as pl
import pytest
-import pytorch_lightning as pl
from omegaconf import OmegaConf
import nemo.collections.nlp.models as models
diff --git a/tests/collections/nlp/test_rampup_batch_size.py b/tests/collections/nlp/test_rampup_batch_size.py
index c7efb5f57f4c..763dfaaf3c51 100644
--- a/tests/collections/nlp/test_rampup_batch_size.py
+++ b/tests/collections/nlp/test_rampup_batch_size.py
@@ -16,8 +16,8 @@
import pytest
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.collections.nlp.parts.nlp_overrides import NLPDDPStrategy
diff --git a/tests/collections/nlp/test_retrieval_module.py b/tests/collections/nlp/test_retrieval_module.py
index 426e393c85bf..381d009f0e02 100644
--- a/tests/collections/nlp/test_retrieval_module.py
+++ b/tests/collections/nlp/test_retrieval_module.py
@@ -16,7 +16,7 @@
import pytest
import torch
from einops import rearrange
-from pytorch_lightning.trainer.trainer import Trainer
+from lightning.pytorch.trainer.trainer import Trainer
from nemo.collections.nlp.modules.common.megatron.attention import ParallelChunkedCrossAttention
from nemo.collections.nlp.modules.common.megatron.layer_type import LayerType
@@ -73,7 +73,13 @@ def setup_class(cls):
MB_SIZE = 4
GB_SIZE = 8
SEED = 1234
- trainer = Trainer(strategy=NLPDDPStrategy(), devices=GPUS, accelerator='gpu', num_nodes=1, logger=None,)
+ trainer = Trainer(
+ strategy=NLPDDPStrategy(),
+ devices=GPUS,
+ accelerator='gpu',
+ num_nodes=1,
+ logger=None,
+ )
initialize_model_parallel_for_nemo(
world_size=trainer.world_size,
@@ -134,7 +140,9 @@ def test_cross_attn(self, model_parallel_config):
dec_attn_mask = rearrange(hidden_mask, '(k n) b -> (b k) n', k=chunks)
context_attn_mask = rearrange(context_mask, 'k r n b -> (b k) (r n)')
enc_dec_attn_mask_3d = build_attention_mask_3d(
- source_mask=dec_attn_mask, target_mask=context_attn_mask, attn_mask_type=AttnMaskType.padding,
+ source_mask=dec_attn_mask,
+ target_mask=context_attn_mask,
+ attn_mask_type=AttnMaskType.padding,
)
enc_dec_attn_mask_3d = enc_dec_attn_mask_3d[:, None, :, :]
diff --git a/tests/collections/nlp/test_retrieval_module_inference.py b/tests/collections/nlp/test_retrieval_module_inference.py
index ccb426ce4ab1..a7da05340708 100644
--- a/tests/collections/nlp/test_retrieval_module_inference.py
+++ b/tests/collections/nlp/test_retrieval_module_inference.py
@@ -17,7 +17,7 @@
import torch
import torch.nn.functional as F
from einops import rearrange
-from pytorch_lightning.trainer.trainer import Trainer
+from lightning.pytorch.trainer.trainer import Trainer
from nemo.collections.nlp.modules.common.megatron.attention import ParallelChunkedCrossAttention
from nemo.collections.nlp.modules.common.megatron.layer_type import LayerType
@@ -73,7 +73,13 @@ def setup_class(cls):
MB_SIZE = 4
GB_SIZE = 8
SEED = 1234
- trainer = Trainer(strategy=NLPDDPStrategy(), devices=GPUS, accelerator='gpu', num_nodes=1, logger=None,)
+ trainer = Trainer(
+ strategy=NLPDDPStrategy(),
+ devices=GPUS,
+ accelerator='gpu',
+ num_nodes=1,
+ logger=None,
+ )
initialize_model_parallel_for_nemo(
world_size=trainer.world_size,
@@ -176,15 +182,33 @@ def test_retrieval_encoder_inference(self, model_parallel_config):
neighbors=neighbors,
)
assert (encoder.encoder_output - hidden_emb[:, :64]).abs().max().item() < 1e-5
- assert (out_gt[:, 0,] - out_2[:, 0]).abs().max().item() < 1e-2
+ assert (
+ out_gt[
+ :,
+ 0,
+ ]
+ - out_2[:, 0]
+ ).abs().max().item() < 1e-2
out_test = encoder(
retrieved_emb[:, :1],
context_mask[:, :1],
context_attn_mask=hidden_mask[:, :64],
encoder_output=hidden_emb[:, :64],
)
- assert (out_gt[:, 0,] - out_test[:, 0]).abs().max().item() < 1e-2
- assert (out_gt[:, 0,] - out_2[:, 0]).abs().max().item() < 1e-2
+ assert (
+ out_gt[
+ :,
+ 0,
+ ]
+ - out_test[:, 0]
+ ).abs().max().item() < 1e-2
+ assert (
+ out_gt[
+ :,
+ 0,
+ ]
+ - out_2[:, 0]
+ ).abs().max().item() < 1e-2
for i in range(64, 127):
out_3 = encoder(
@@ -207,7 +231,13 @@ def test_retrieval_encoder_inference(self, model_parallel_config):
neighbors=neighbors,
)
assert (encoder.encoder_output - hidden_emb[:, 64:128]).abs().max().item() < 1e-5
- assert (out_gt[:, :2,] - out_3).abs().max().item() < 1e-2
+ assert (
+ out_gt[
+ :,
+ :2,
+ ]
+ - out_3
+ ).abs().max().item() < 1e-2
# test inference
for i in range(128, 191):
out_4 = encoder(
@@ -231,7 +261,13 @@ def test_retrieval_encoder_inference(self, model_parallel_config):
)
assert (encoder.encoder_output - hidden_emb[:, 128:192]).abs().max().item() < 1e-5
- assert (out_gt[:, :3,] - out_4).abs().max().item() < 1e-2
+ assert (
+ out_gt[
+ :,
+ :3,
+ ]
+ - out_4
+ ).abs().max().item() < 1e-2
out_2 = encoder(
retrieved_emb[:, :2],
@@ -263,7 +299,13 @@ def test_retrieval_encoder_inference(self, model_parallel_config):
neighbors=neighbors,
)
assert (encoder.encoder_output - hidden_emb[:, 128:192]).abs().max().item() < 1e-5
- assert (out_gt[:, :3,] - out_4).abs().max().item() < 1e-2
+ assert (
+ out_gt[
+ :,
+ :3,
+ ]
+ - out_4
+ ).abs().max().item() < 1e-2
@pytest.mark.unit
def test_cross_attn_inference(self, model_parallel_config):
@@ -309,7 +351,9 @@ def get_attn_mask_3d(hidden_mask, context_mask, chunks):
dec_attn_mask = rearrange(hidden_mask, '(k n) b -> (b k) n', k=chunks)
context_attn_mask = rearrange(context_mask, 'k r n b -> (b k) (r n)')
enc_dec_attn_mask_3d = build_attention_mask_3d(
- source_mask=dec_attn_mask, target_mask=context_attn_mask, attn_mask_type=AttnMaskType.padding,
+ source_mask=dec_attn_mask,
+ target_mask=context_attn_mask,
+ attn_mask_type=AttnMaskType.padding,
)
enc_dec_attn_mask_3d = enc_dec_attn_mask_3d[:, None, :, :]
return enc_dec_attn_mask_3d
diff --git a/tests/collections/nlp/test_retro_model.py b/tests/collections/nlp/test_retro_model.py
index b96016c8d7ec..e91590915ba5 100644
--- a/tests/collections/nlp/test_retro_model.py
+++ b/tests/collections/nlp/test_retro_model.py
@@ -16,8 +16,8 @@
import pytest
import torch
+from lightning.pytorch import Trainer
from omegaconf import DictConfig
-from pytorch_lightning import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_retro_model import MegatronRetroModel
from nemo.collections.nlp.modules.common.megatron.utils import get_ltor_masks_and_position_ids
diff --git a/tests/core/test_config_utils.py b/tests/core/test_config_utils.py
index bb0a0f177dfb..9716fc160629 100644
--- a/tests/core/test_config_utils.py
+++ b/tests/core/test_config_utils.py
@@ -15,9 +15,9 @@
from dataclasses import dataclass
from typing import Any
+import lightning.pytorch as ptl
import pytest
-import pytorch_lightning as ptl
-from pytorch_lightning.callbacks.early_stopping import EarlyStopping
+from lightning.pytorch.callbacks.early_stopping import EarlyStopping
from nemo.core.config.pytorch_lightning import TrainerConfig
from nemo.utils import config_utils
@@ -126,7 +126,9 @@ def test_ptl_config(self):
assert dataclass_subset is None
@pytest.mark.unit
- def test_early_stopping_config(self,):
+ def test_early_stopping_config(
+ self,
+ ):
result = config_utils.assert_dataclass_signature_match(EarlyStopping, EarlyStoppingParams)
signatures_match, cls_subset, dataclass_subset = result
diff --git a/tests/core/test_dist_ckpt.py b/tests/core/test_dist_ckpt.py
index 0a483c0f58ab..6c066d1856a2 100644
--- a/tests/core/test_dist_ckpt.py
+++ b/tests/core/test_dist_ckpt.py
@@ -17,11 +17,11 @@
from pathlib import Path
from typing import Any, Dict
+import lightning.pytorch as pl
import pytest
-import pytorch_lightning as pl
import torch
-from lightning_fabric.plugins import TorchCheckpointIO
-from pytorch_lightning.demos.boring_classes import BoringModel
+from lightning.fabric.plugins import TorchCheckpointIO
+from lightning.pytorch.demos.boring_classes import BoringModel
from nemo.collections.nlp.parts.nlp_overrides import NLPDDPStrategy
from nemo.utils.callbacks.dist_ckpt_io import (
diff --git a/tests/core/test_exp_manager.py b/tests/core/test_exp_manager.py
index d4b1d37c1938..32d401b2051f 100644
--- a/tests/core/test_exp_manager.py
+++ b/tests/core/test_exp_manager.py
@@ -18,13 +18,13 @@
from pathlib import Path
from typing import Any
+import lightning.pytorch as pl
import pytest
-import pytorch_lightning as pl
import torch
+from lightning.pytorch import Callback
+from lightning.pytorch.loops import _TrainingEpochLoop
from omegaconf import OmegaConf
from omegaconf.errors import OmegaConfBaseException
-from pytorch_lightning import Callback
-from pytorch_lightning.loops import _TrainingEpochLoop
from nemo.collections.nlp.parts.nlp_overrides import NLPDDPStrategy
from nemo.constants import NEMO_ENV_VARNAME_VERSION
diff --git a/tests/core/test_fault_tolerance.py b/tests/core/test_fault_tolerance.py
index 5b4e0ecba4aa..f916a7b44454 100644
--- a/tests/core/test_fault_tolerance.py
+++ b/tests/core/test_fault_tolerance.py
@@ -13,8 +13,8 @@
# limitations under the License.
import os
+import lightning.pytorch as pl
import pytest
-import pytorch_lightning as pl
from nemo.utils.exp_manager import exp_manager
diff --git a/tests/core/test_optimizers_schedulers.py b/tests/core/test_optimizers_schedulers.py
index 5e5d1ee20c83..419db309a918 100644
--- a/tests/core/test_optimizers_schedulers.py
+++ b/tests/core/test_optimizers_schedulers.py
@@ -15,12 +15,12 @@
import math
import random
+import lightning.pytorch as pl
import omegaconf
import pytest
-import pytorch_lightning as pl
import torch
import torch.optim
-from pytorch_lightning.utilities import rank_zero_only
+from lightning.pytorch.utilities import rank_zero_only
from nemo.core import config, optim
from nemo.core.optim.lr_scheduler import AVAILABLE_SCHEDULERS
@@ -936,7 +936,13 @@ def train(
enable_progress_bar=False,
)
max_steps = optim.lr_scheduler.compute_max_steps(
- max_epochs, accumulate_grad_batches, limit_train_batches, devices, dataset_len, batch_size, drop_last,
+ max_epochs,
+ accumulate_grad_batches,
+ limit_train_batches,
+ devices,
+ dataset_len,
+ batch_size,
+ drop_last,
)
model = ExampleModel(batch_size, dataset_len, drop_last, max_steps)
trainer.callbacks.append(Callback())
@@ -991,7 +997,13 @@ def train(
dataset_len = random.randint(20, devices * 500)
batch_size = random.randint(math.ceil(5.0 / devices), min(dataset_len // devices, 128))
train(
- max_epochs, accumulate_grad_batches, limit_train_batches, devices, batch_size, dataset_len, drop_last,
+ max_epochs,
+ accumulate_grad_batches,
+ limit_train_batches,
+ devices,
+ batch_size,
+ dataset_len,
+ drop_last,
)
@pytest.mark.unit
diff --git a/tests/core/test_save_restore.py b/tests/core/test_save_restore.py
index 394ced55a452..8ac9dfeca1ae 100644
--- a/tests/core/test_save_restore.py
+++ b/tests/core/test_save_restore.py
@@ -12,10 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import filecmp
+import json
import os
import shutil
import tempfile
-from typing import Callable, Dict, Optional, Set, Union
+from typing import Any, Callable, Dict, Optional, Set, Union
import pytest
import torch
@@ -59,6 +60,18 @@ def getattr2(object, attr):
return getattr2(getattr(object, arr[0]), '.'.join(arr[1:]))
+def _is_json_serializable(value: Any) -> bool:
+ """Test whether a variable can be encoded as json."""
+ if value is None or isinstance(value, (bool, int, float, str, list, dict)): # fast path
+ return True
+ try:
+ json.dumps(value)
+ return True
+ except (TypeError, OverflowError):
+ # OverflowError is raised if number is too large to encode
+ return False
+
+
class MockModel(ModelPT):
def __init__(self, cfg, trainer=None):
super(MockModel, self).__init__(cfg=cfg, trainer=trainer)
@@ -1193,6 +1206,8 @@ def test_mock_model_nested_child_from_pretrained(self):
parent = self.__test_restore_elsewhere(parent, map_location='cpu')
assert isinstance(parent.ctc_model, EncDecCTCModel)
+ assert _is_json_serializable(parent.ctc_model.hparams_initial)
+
@pytest.mark.unit
def test_mock_model_nested_custom_config_field(self):
"""
diff --git a/tests/core/test_straggler_det.py b/tests/core/test_straggler_det.py
index ee5222854889..1f938214d792 100644
--- a/tests/core/test_straggler_det.py
+++ b/tests/core/test_straggler_det.py
@@ -14,8 +14,8 @@
import sys
+import lightning.pytorch as pl
import pytest
-import pytorch_lightning as pl
import torch
from omegaconf import OmegaConf
diff --git a/tests/core_ptl/check_for_ranks.py b/tests/core_ptl/check_for_ranks.py
index a1eae66790c4..dfbc05166c5a 100644
--- a/tests/core_ptl/check_for_ranks.py
+++ b/tests/core_ptl/check_for_ranks.py
@@ -16,9 +16,9 @@
import shutil
import torch
+from lightning.pytorch import Trainer
+from lightning.pytorch.utilities import rank_zero_only
from omegaconf import OmegaConf
-from pytorch_lightning import Trainer
-from pytorch_lightning.utilities import rank_zero_only
from nemo.core import ModelPT
from nemo.utils import logging
diff --git a/tests/core_ptl/check_manual_upload_to_hf_hub.py b/tests/core_ptl/check_manual_upload_to_hf_hub.py
index f411ee72332c..912eabb805bf 100644
--- a/tests/core_ptl/check_manual_upload_to_hf_hub.py
+++ b/tests/core_ptl/check_manual_upload_to_hf_hub.py
@@ -14,7 +14,7 @@
import shutil
from huggingface_hub import HfApi
-from pytorch_lightning.utilities import rank_zero_only
+from lightning.pytorch.utilities import rank_zero_only
from nemo.core import ModelPT
from nemo.utils import AppState, logging
@@ -40,7 +40,9 @@ def load_model_from_unpacked_hf_dir(repo_id):
def upload_model_as_single_nemo_file(model: ModelPT, repo_id, token):
# Upload the model to HF Hub
model.push_to_hf_hub(
- repo_id=repo_id, pack_nemo_file=True, token=token,
+ repo_id=repo_id,
+ pack_nemo_file=True,
+ token=token,
)
@@ -48,7 +50,9 @@ def upload_model_as_single_nemo_file(model: ModelPT, repo_id, token):
def upload_model_as_single_nemo_file(model: ModelPT, repo_id, token):
# Upload the model to HF Hub
model.push_to_hf_hub(
- repo_id=repo_id, pack_nemo_file=True, token=token,
+ repo_id=repo_id,
+ pack_nemo_file=True,
+ token=token,
)
@@ -56,7 +60,9 @@ def upload_model_as_single_nemo_file(model: ModelPT, repo_id, token):
def upload_model_as_unpacked_files(model: ModelPT, repo_id, token):
# Upload the model to HF Hub
model.push_to_hf_hub(
- repo_id=repo_id, pack_nemo_file=False, token=token,
+ repo_id=repo_id,
+ pack_nemo_file=False,
+ token=token,
)
diff --git a/tests/core_ptl/test_ptl_stateless_timer.py b/tests/core_ptl/test_ptl_stateless_timer.py
index 25f354a23c0d..5cfbbda39bbf 100644
--- a/tests/core_ptl/test_ptl_stateless_timer.py
+++ b/tests/core_ptl/test_ptl_stateless_timer.py
@@ -17,8 +17,8 @@
import pytest
import torch
+from lightning.pytorch import Trainer
from omegaconf import OmegaConf
-from pytorch_lightning import Trainer
from nemo.core import ModelPT
from nemo.utils import logging
diff --git a/tests/export/nemo_export.py b/tests/export/nemo_export.py
index e929f2601022..16aca9ccea4b 100644
--- a/tests/export/nemo_export.py
+++ b/tests/export/nemo_export.py
@@ -849,6 +849,9 @@ def run_inference_tests(args):
"Use the same value for --min_tps and --max_tps."
)
+ if args.debug:
+ LOGGER.setLevel(logging.DEBUG)
+
result_dic: Dict[int, Tuple[FunctionalResult, Optional[AccuracyResult]]] = {}
if args.existing_test_models:
diff --git a/tests/lightning/_fabric/test_conversion.py b/tests/lightning/_fabric/test_conversion.py
index e690557ec2eb..e97e766c86a7 100644
--- a/tests/lightning/_fabric/test_conversion.py
+++ b/tests/lightning/_fabric/test_conversion.py
@@ -13,10 +13,10 @@
# limitations under the License.
import pytest
-from lightning_fabric import plugins as fl_plugins
-from lightning_fabric import strategies as fl_strategies
-from pytorch_lightning import plugins as pl_plugins
-from pytorch_lightning import strategies as pl_strategies
+from lightning.fabric import plugins as fl_plugins
+from lightning.fabric import strategies as fl_strategies
+from lightning.pytorch import plugins as pl_plugins
+from lightning.pytorch import strategies as pl_strategies
from nemo import lightning as nl
from nemo.lightning.fabric.conversion import to_fabric
diff --git a/tests/lightning/_io/test_api.py b/tests/lightning/_io/test_api.py
index a4d458cef17b..e0aaac1a6aa2 100644
--- a/tests/lightning/_io/test_api.py
+++ b/tests/lightning/_io/test_api.py
@@ -19,7 +19,7 @@
import fiddle as fdl
import pytest
import yaml
-from pytorch_lightning.loggers import TensorBoardLogger
+from lightning.pytorch.loggers import TensorBoardLogger
from nemo import lightning as nl
from nemo.collections import llm
diff --git a/tests/lightning/pytorch/callbacks/test_model_checkpoint.py b/tests/lightning/pytorch/callbacks/test_model_checkpoint.py
index 802f2b28c25c..edaa8a6f4ec9 100644
--- a/tests/lightning/pytorch/callbacks/test_model_checkpoint.py
+++ b/tests/lightning/pytorch/callbacks/test_model_checkpoint.py
@@ -17,12 +17,12 @@
from pathlib import Path
from typing import Iterator, Optional, Sequence, Tuple
+import lightning.pytorch as pl
import megatron
import pytest
-import pytorch_lightning as pl
import torch
+from lightning.pytorch.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
from megatron.core import ModelParallelConfig, parallel_state
-from pytorch_lightning.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS
from torch import Tensor
import nemo.lightning as nl
diff --git a/tests/lightning/pytorch/callbacks/test_model_transform.py b/tests/lightning/pytorch/callbacks/test_model_transform.py
index c59a82895125..cfae55cf99a9 100644
--- a/tests/lightning/pytorch/callbacks/test_model_transform.py
+++ b/tests/lightning/pytorch/callbacks/test_model_transform.py
@@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import lightning.pytorch as pl
import pytest
-import pytorch_lightning as pl
from torch import nn
from nemo.lightning.pytorch.callbacks.model_transform import ModelTransform
diff --git a/tests/lightning/pytorch/callbacks/test_peft.py b/tests/lightning/pytorch/callbacks/test_peft.py
index 95caca4d2784..fb6728acee8f 100644
--- a/tests/lightning/pytorch/callbacks/test_peft.py
+++ b/tests/lightning/pytorch/callbacks/test_peft.py
@@ -15,7 +15,7 @@
from unittest.mock import MagicMock, call, patch
import torch.nn as nn
-from pytorch_lightning.trainer.states import TrainerFn
+from lightning.pytorch.trainer.states import TrainerFn
from nemo.collections.llm import fn
from nemo.lightning.pytorch.callbacks.peft import PEFT, WrappedAdapterIO
from nemo.utils.callbacks.dist_ckpt_io import AsyncFinalizableCheckpointIO
@@ -26,6 +26,11 @@ class DummyPEFT(PEFT):
def transform(self, module, name=None, prefix=None):
return module # No-op transform for testing
+ def freeze_model(self, module):
+ super().freeze_model(module)
+ self.is_called = True
+ return module
+
class DummyModel(nn.Module, fn.FNMixin):
def __init__(self):
super().__init__()
@@ -38,6 +43,9 @@ def test_peft_call(self):
transformed_model = peft(model)
+ assert (
+ hasattr(peft, "is_called") and peft.is_called == True
+ ), "peft methods may subclass `freeze_model()`, so it must be called"
assert transformed_model.linear.weight.requires_grad == False
assert transformed_model.conv.weight.requires_grad == False
diff --git a/tests/lightning/pytorch/callbacks/test_preemption.py b/tests/lightning/pytorch/callbacks/test_preemption.py
index 4152f7fcce59..802d898c5a2b 100644
--- a/tests/lightning/pytorch/callbacks/test_preemption.py
+++ b/tests/lightning/pytorch/callbacks/test_preemption.py
@@ -17,7 +17,7 @@
import pytest
import torch
-from pytorch_lightning import Trainer
+from lightning.pytorch import Trainer
from nemo.lightning.pytorch.callbacks.preemption import PreemptionCallback
diff --git a/tests/lightning/test_dist_ckpt.py b/tests/lightning/test_dist_ckpt.py
index 886b1085ed55..107d15061792 100644
--- a/tests/lightning/test_dist_ckpt.py
+++ b/tests/lightning/test_dist_ckpt.py
@@ -21,8 +21,8 @@ def set_env():
from pathlib import Path
+import lightning.pytorch as pl
import pytest
-import pytorch_lightning as pl
import torch
import nemo.lightning as nl
diff --git a/tests/lightning/test_nemo_logger.py b/tests/lightning/test_nemo_logger.py
index a5a5ec32c886..8a63a92f0ee6 100644
--- a/tests/lightning/test_nemo_logger.py
+++ b/tests/lightning/test_nemo_logger.py
@@ -19,8 +19,8 @@
from unittest.mock import patch
import pytest
-from pytorch_lightning.callbacks import ModelCheckpoint as PTLModelCheckpoint
-from pytorch_lightning.loggers import WandbLogger
+from lightning.pytorch.callbacks import ModelCheckpoint as PTLModelCheckpoint
+from lightning.pytorch.loggers import WandbLogger
from nemo import lightning as nl
from nemo.constants import NEMO_ENV_VARNAME_VERSION
diff --git a/tests/lightning/test_nemo_run.py b/tests/lightning/test_nemo_run.py
index 2d63031a5ad0..1371b9adaa8e 100644
--- a/tests/lightning/test_nemo_run.py
+++ b/tests/lightning/test_nemo_run.py
@@ -30,7 +30,12 @@
("llama3_70b", "finetune_recipe", "llama3_70b_finetune"),
("llama3_70b_16k", "pretrain_recipe", "llama3_70b_16k_pretrain"),
("llama3_70b_64k", "pretrain_recipe", "llama3_70b_64k_pretrain"),
+ ("llama31_8b", "pretrain_recipe", "llama31_8b_pretrain"),
+ ("llama31_8b", "finetune_recipe", "llama31_8b_finetune"),
+ ("llama31_70b", "pretrain_recipe", "llama31_70b_pretrain"),
+ ("llama31_70b", "finetune_recipe", "llama31_70b_finetune"),
("llama31_405b", "pretrain_recipe", "llama31_405b_pretrain"),
+ ("llama31_405b", "finetune_recipe", "llama31_405b_finetune"),
("mistral_7b", "pretrain_recipe", "mistral_pretrain"),
("mistral_7b", "finetune_recipe", "mistral_finetune"),
("mixtral_8x7b", "pretrain_recipe", "mixtral_8x7b_pretrain"),
@@ -42,12 +47,12 @@
("nemotron3_4b", "pretrain_recipe", "nemotron3_4b_pretrain"),
("nemotron3_8b", "pretrain_recipe", "nemotron3_8b_pretrain"),
("nemotron3_8b", "finetune_recipe", "nemotron3_8b_finetune"),
+ ("nemotron3_22b", "pretrain_recipe", "nemotron3_22b_pretrain"),
+ ("nemotron3_22b_16k", "pretrain_recipe", "nemotron3_22b_16k_pretrain"),
+ ("nemotron3_22b_64k", "pretrain_recipe", "nemotron3_22b_64k_pretrain"),
("nemotron4_15b", "pretrain_recipe", "nemotron4_15b_pretrain"),
("nemotron4_15b_16k", "pretrain_recipe", "nemotron4_15b_16k_pretrain"),
("nemotron4_15b_64k", "pretrain_recipe", "nemotron4_15b_64k_pretrain"),
- ("nemotron4_22b", "pretrain_recipe", "nemotron4_22b_pretrain"),
- ("nemotron4_22b_16k", "pretrain_recipe", "nemotron4_22b_16k_pretrain"),
- ("nemotron4_22b_64k", "pretrain_recipe", "nemotron4_22b_64k_pretrain"),
("nemotron4_340b", "pretrain_recipe", "nemotron4_340b_pretrain"),
("nemotron4_340b", "finetune_recipe", "nemotron4_340b_finetune"),
("gpt3_175b", "pretrain_recipe", "gpt3_175b_pretrain"),
diff --git a/tests/lightning/test_precision_plugin.py b/tests/lightning/test_precision_plugin.py
index 44ffa5939fab..960e658187c5 100644
--- a/tests/lightning/test_precision_plugin.py
+++ b/tests/lightning/test_precision_plugin.py
@@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import lightning.pytorch as pl
import pytest
-import pytorch_lightning as pl
import torch
from megatron.core.optimizer import OptimizerConfig
diff --git a/tests/lightning/test_state_restoration.py b/tests/lightning/test_state_restoration.py
index 44e0673a1a39..59c5cc2234f7 100644
--- a/tests/lightning/test_state_restoration.py
+++ b/tests/lightning/test_state_restoration.py
@@ -17,8 +17,8 @@
import pytest
import torch
+from lightning.pytorch.callbacks import Callback
from megatron.core.optimizer import OptimizerConfig
-from pytorch_lightning.callbacks import Callback
from nemo import lightning as nl
from nemo.collections import llm
@@ -239,7 +239,7 @@ def run_resume_train(mbs, gbs, num_dev):
resume=AutoResume(
resume_if_exists=True,
resume_ignore_no_checkpoint=False,
- resume_from_path=f'{EXP_DIR}default/v1/checkpoints/default--None=0.0000-epoch=0/',
+ resume_from_path=f'{EXP_DIR}default/v1/checkpoints/default--None=0.0000-epoch=0-consumed_samples=20.0/',
),
)
trainer._teardown()
diff --git a/tests/utils/test_trainer_utils.py b/tests/utils/test_trainer_utils.py
index 55eee92a523c..251e59d4b648 100644
--- a/tests/utils/test_trainer_utils.py
+++ b/tests/utils/test_trainer_utils.py
@@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from lightning.pytorch.strategies import DDPStrategy
from omegaconf import OmegaConf
-from pytorch_lightning.strategies import DDPStrategy
from nemo.utils.trainer_utils import resolve_trainer_cfg
@@ -25,7 +25,7 @@ def test_resolve_trainer_cfg_strategy():
assert ans["strategy"] == "ddp"
cfg = OmegaConf.create(
- {"strategy": {"_target_": "pytorch_lightning.strategies.DDPStrategy", "gradient_as_bucket_view": True}}
+ {"strategy": {"_target_": "lightning.pytorch.strategies.DDPStrategy", "gradient_as_bucket_view": True}}
)
ans = resolve_trainer_cfg(cfg)
assert isinstance(ans, dict)
diff --git a/tutorials/01_NeMo_Models.ipynb b/tutorials/01_NeMo_Models.ipynb
index 4255a6656b8a..eb76e00cd981 100644
--- a/tutorials/01_NeMo_Models.ipynb
+++ b/tutorials/01_NeMo_Models.ipynb
@@ -984,7 +984,7 @@
"id": "0TsfmCYthMux"
},
"source": [
- "import pytorch_lightning as ptl\n",
+ "import lightning.pytorch as ptl\n",
"from nemo.core import ModelPT\n",
"from omegaconf import OmegaConf"
],
diff --git a/tutorials/asr/ASR_CTC_Language_Finetuning.ipynb b/tutorials/asr/ASR_CTC_Language_Finetuning.ipynb
index a02ee4f99714..6ad3307da496 100644
--- a/tutorials/asr/ASR_CTC_Language_Finetuning.ipynb
+++ b/tutorials/asr/ASR_CTC_Language_Finetuning.ipynb
@@ -1292,7 +1292,7 @@
},
"source": [
"import torch\n",
- "import pytorch_lightning as ptl\n",
+ "import lightning.pytorch as ptl\n",
"\n",
"if torch.cuda.is_available():\n",
" accelerator = 'gpu'\n",
@@ -2088,7 +2088,7 @@
},
"source": [
"import torch\n",
- "import pytorch_lightning as ptl\n",
+ "import lightning.pytorch as ptl\n",
"\n",
"if torch.cuda.is_available():\n",
" accelerator = 'gpu'\n",
diff --git a/tutorials/asr/ASR_TTS_Tutorial.ipynb b/tutorials/asr/ASR_TTS_Tutorial.ipynb
index 709f96d14ba5..544255f76d06 100644
--- a/tutorials/asr/ASR_TTS_Tutorial.ipynb
+++ b/tutorials/asr/ASR_TTS_Tutorial.ipynb
@@ -172,7 +172,7 @@
"import tempfile\n",
"\n",
"from omegaconf import OmegaConf\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"import torch\n",
"from tqdm.auto import tqdm\n",
"import wget\n",
diff --git a/tutorials/asr/ASR_with_NeMo.ipynb b/tutorials/asr/ASR_with_NeMo.ipynb
index bd95c7194655..bb62e2f5eb9d 100644
--- a/tutorials/asr/ASR_with_NeMo.ipynb
+++ b/tutorials/asr/ASR_with_NeMo.ipynb
@@ -619,7 +619,7 @@
"id": "GUfR6tAK0k2u"
},
"source": [
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"trainer = pl.Trainer(devices=1, accelerator='gpu', max_epochs=50)"
],
"execution_count": null,
diff --git a/tutorials/asr/ASR_with_Subword_Tokenization.ipynb b/tutorials/asr/ASR_with_Subword_Tokenization.ipynb
index ff15a5f75532..7a69735ae542 100644
--- a/tutorials/asr/ASR_with_Subword_Tokenization.ipynb
+++ b/tutorials/asr/ASR_with_Subword_Tokenization.ipynb
@@ -765,7 +765,7 @@
"id": "3rslHEKeq9qy"
},
"source": [
- "import pytorch_lightning as pl\r\n",
+ "import lightning.pytorch as pl\r\n",
"trainer = pl.Trainer(devices=1, accelerator='gpu', max_epochs=50)"
],
"execution_count": null,
diff --git a/tutorials/asr/ASR_with_Transducers.ipynb b/tutorials/asr/ASR_with_Transducers.ipynb
index d20042b9b970..95eecbfb8916 100644
--- a/tutorials/asr/ASR_with_Transducers.ipynb
+++ b/tutorials/asr/ASR_with_Transducers.ipynb
@@ -754,7 +754,7 @@
"outputs": [],
"source": [
"import torch\n",
- "from pytorch_lightning import Trainer\n",
+ "from lightning.pytorch import Trainer\n",
"\n",
"if torch.cuda.is_available():\n",
" accelerator = 'gpu'\n",
diff --git a/tutorials/asr/Confidence_Ensembles.ipynb b/tutorials/asr/Confidence_Ensembles.ipynb
index 734ddc9a0604..5a999df304b0 100644
--- a/tutorials/asr/Confidence_Ensembles.ipynb
+++ b/tutorials/asr/Confidence_Ensembles.ipynb
@@ -214,7 +214,7 @@
"# check out https://github.com/NVIDIA/NeMo/blob/main/tutorials/asr/ASR_CTC_Language_Finetuning.ipynb\n",
"# to learn more about finetuning NeMo ASR models\n",
"from omegaconf import open_dict, OmegaConf\n",
- "from pytorch_lightning import Trainer\n",
+ "from lightning.pytorch import Trainer\n",
"\n",
"from nemo.collections.asr.models.ctc_bpe_models import EncDecCTCModelBPE\n",
"import nemo.utils.exp_manager as exp_manager\n",
diff --git a/tutorials/asr/Multilang_ASR.ipynb b/tutorials/asr/Multilang_ASR.ipynb
index 612271a8baab..800f8a2d2ded 100644
--- a/tutorials/asr/Multilang_ASR.ipynb
+++ b/tutorials/asr/Multilang_ASR.ipynb
@@ -1527,7 +1527,7 @@
"outputs": [],
"source": [
"import torch\n",
- "import pytorch_lightning as ptl"
+ "import lightning.pytorch as ptl"
]
},
{
diff --git a/tutorials/asr/Self_Supervised_Pre_Training.ipynb b/tutorials/asr/Self_Supervised_Pre_Training.ipynb
index b055f14f5885..0506bafb56e3 100644
--- a/tutorials/asr/Self_Supervised_Pre_Training.ipynb
+++ b/tutorials/asr/Self_Supervised_Pre_Training.ipynb
@@ -17,7 +17,9 @@
"3. Connect to an instance with a GPU (Runtime -> Change runtime type -> select \"GPU\" for hardware accelerator)\n",
"4. Run this cell to set up dependencies.\n",
"5. Restart the runtime (Runtime -> Restart Runtime) for any upgraded packages to take effect\n",
- "\n\nNOTE: User is responsible for checking the content of datasets and the applicable licenses and determining if suitable for the intended use.\n",
+ "\n",
+ "\n",
+ "NOTE: User is responsible for checking the content of datasets and the applicable licenses and determining if suitable for the intended use.\n",
"\"\"\"\n",
"# If you're using Google Colab and not running locally, run this cell.\n",
"\n",
@@ -272,8 +274,8 @@
"source": [
"## Grab the configs we'll use in this example\n",
"!mkdir configs\n",
- "!wget -P configs/ https://raw.githubusercontent.com/NVIDIA/NeMo/main/examples/asr/conf/ssl/citrinet/citrinet_ssl_1024.yaml\n",
- "!wget -P configs/ https://raw.githubusercontent.com/NVIDIA/NeMo/main/examples/asr/conf/citrinet/citrinet_1024.yaml\n"
+ "!wget -P configs/ https://raw.githubusercontent.com/NVIDIA/NeMo/$BRANCH/examples/asr/conf/ssl/citrinet/citrinet_ssl_1024.yaml\n",
+ "!wget -P configs/ https://raw.githubusercontent.com/NVIDIA/NeMo/$BRANCH/examples/asr/conf/citrinet/citrinet_1024.yaml\n"
]
},
{
@@ -380,16 +382,16 @@
},
{
"cell_type": "markdown",
+ "metadata": {
+ "id": "4JnepitBZ3ta"
+ },
"source": [
"Note that for this loss the outputs must match the inputs, so since we are using Citrinet architecture with 8x stride, we would need to either set \"combine_time_steps\" to 8, or put additional stride layers in the decoder. By default for Citrinet with 8x stride we use \"combine_time_steps=4\" and \"stride_layers=1\" to match the 8x stride.\n",
"\n",
"Since in MaskedPatchAugmentation we set mask_patches to 0.5 and our min_durations are set to 3.2, we are guaranteed to have 1.6 masked second per utterance, or 160 masked steps. Since combine_time_steps is set to 4, this means that 160 / 4 = 40 total negatives can be sampled, so we set num_negatives to 40 (unless you set sample_from_same_utterance_only to false or sample_from_non_masked to true, but this tends to make results worse).\n",
"\n",
"In the default configs we assume that min_duration for samples is higher (8 seconds by default), so there we can set patch_size to 48 for a total of 480 masked steps, and use 100 sampled negatives. If the min_duration of samples that you are training on allows, the amount of masked steps as well as negatives can be increased further (masking around 50% of the sample duration tends to work well)."
- ],
- "metadata": {
- "id": "4JnepitBZ3ta"
- }
+ ]
},
{
"cell_type": "markdown",
@@ -431,7 +433,7 @@
},
"outputs": [],
"source": [
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf\n",
"\n",
"from nemo.collections.asr.models.ssl_models import SpeechEncDecSelfSupervisedModel\n",
@@ -482,7 +484,7 @@
"outputs": [],
"source": [
"!mkdir scripts\n",
- "!wget -P scripts/ https://raw.githubusercontent.com/NVIDIA/NeMo/main/scripts/tokenizers/process_asr_text_tokenizer.py\n",
+ "!wget -P scripts/ https://raw.githubusercontent.com/NVIDIA/NeMo/$BRANCH/scripts/tokenizers/process_asr_text_tokenizer.py\n",
"\n",
"!python ./scripts/process_asr_text_tokenizer.py \\\n",
" --manifest=\"{data_dir}/an4/train_manifest.json\" \\\n",
@@ -650,23 +652,23 @@
},
{
"cell_type": "markdown",
- "source": [
- "We can optionally freeze the encoder and only fine-tune the decoder during training. This can be done to lower the memory and time requirements of fine-tuning, but will likely result in a higher word error rate."
- ],
"metadata": {
"id": "S5aVb2F8WuAR"
- }
+ },
+ "source": [
+ "We can optionally freeze the encoder and only fine-tune the decoder during training. This can be done to lower the memory and time requirements of fine-tuning, but will likely result in a higher word error rate."
+ ]
},
{
"cell_type": "code",
- "source": [
- "#asr_model.encoder.freeze()"
- ],
+ "execution_count": null,
"metadata": {
"id": "LpF_YQUmXUR8"
},
- "execution_count": null,
- "outputs": []
+ "outputs": [],
+ "source": [
+ "#asr_model.encoder.freeze()"
+ ]
},
{
"cell_type": "markdown",
@@ -711,7 +713,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.7.7"
+ "version": "3.10.12"
},
"pycharm": {
"stem_cell": {
diff --git a/tutorials/asr/Speech_Commands.ipynb b/tutorials/asr/Speech_Commands.ipynb
index 438533f0f03a..c8a54e5135b2 100644
--- a/tutorials/asr/Speech_Commands.ipynb
+++ b/tutorials/asr/Speech_Commands.ipynb
@@ -408,7 +408,7 @@
},
"source": [
"import torch\n",
- "import pytorch_lightning as pl"
+ "import lightning.pytorch as pl"
],
"execution_count": null,
"outputs": []
diff --git a/tutorials/asr/Transducers_with_HF_Datasets.ipynb b/tutorials/asr/Transducers_with_HF_Datasets.ipynb
index a47cd00a0b9a..82f17fe8c1ac 100644
--- a/tutorials/asr/Transducers_with_HF_Datasets.ipynb
+++ b/tutorials/asr/Transducers_with_HF_Datasets.ipynb
@@ -554,7 +554,7 @@
"outputs": [],
"source": [
"import torch\n",
- "from pytorch_lightning import Trainer\n",
+ "from lightning.pytorch import Trainer\n",
"\n",
"if torch.cuda.is_available():\n",
" accelerator = 'gpu'\n",
diff --git a/tutorials/asr/Voice_Activity_Detection.ipynb b/tutorials/asr/Voice_Activity_Detection.ipynb
index 123a03efc28e..fb3cef1b44ea 100644
--- a/tutorials/asr/Voice_Activity_Detection.ipynb
+++ b/tutorials/asr/Voice_Activity_Detection.ipynb
@@ -425,7 +425,7 @@
"outputs": [],
"source": [
"import torch\n",
- "import pytorch_lightning as pl"
+ "import lightning.pytorch as pl"
]
},
{
diff --git a/tutorials/asr/asr_adapters/ASR_with_Adapters.ipynb b/tutorials/asr/asr_adapters/ASR_with_Adapters.ipynb
index c9c547a8383e..c3334a59b0d2 100644
--- a/tutorials/asr/asr_adapters/ASR_with_Adapters.ipynb
+++ b/tutorials/asr/asr_adapters/ASR_with_Adapters.ipynb
@@ -260,7 +260,7 @@
"source": [
"import torch\n",
"from omegaconf import OmegaConf, open_dict\n",
- "from pytorch_lightning import Trainer\n",
+ "from lightning.pytorch import Trainer\n",
"\n",
"import nemo.collections.asr as nemo_asr"
],
diff --git a/tutorials/asr/asr_adapters/Multi_Task_Adapters.ipynb b/tutorials/asr/asr_adapters/Multi_Task_Adapters.ipynb
index 852b3e838d5c..0d35feb11a9a 100644
--- a/tutorials/asr/asr_adapters/Multi_Task_Adapters.ipynb
+++ b/tutorials/asr/asr_adapters/Multi_Task_Adapters.ipynb
@@ -1,1660 +1,1666 @@
{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "b0373c4a-e565-4e8f-a87f-aae932d3aeed",
- "metadata": {
- "id": "b0373c4a-e565-4e8f-a87f-aae932d3aeed"
- },
- "outputs": [],
- "source": [
- "\"\"\"\n",
- "You can run either this notebook locally (if you have all the dependencies and a GPU) or on Google Colab.\n",
- "\n",
- "Instructions for setting up Colab are as follows:\n",
- "1. Open a new Python 3 notebook.\n",
- "2. Import this notebook from GitHub (File -> Upload Notebook -> \"GitHub\" tab -> copy/paste GitHub URL)\n",
- "3. Connect to an instance with a GPU (Runtime -> Change runtime type -> select \"GPU\" for hardware accelerator)\n",
- "4. Run this cell to set up dependencies.\n",
- "5. Restart the runtime (Runtime -> Restart Runtime) for any upgraded packages to take effect\n",
- "\n",
- "\n",
- "NOTE: User is responsible for checking the content of datasets and the applicable licenses and determining if suitable for the intended use.\n",
- "\"\"\"\n",
- "# If you're using Google Colab and not running locally, run this cell.\n",
- "import os\n",
- "\n",
- "# Install dependencies\n",
- "!pip install wget\n",
- "!apt-get install sox libsndfile1 ffmpeg\n",
- "!pip install text-unidecode\n",
- "!pip install matplotlib>=3.3.2\n",
- "\n",
- "## Install NeMo\n",
- "BRANCH = 'main'\n",
- "!python -m pip install \"nemo_toolkit[asr] @ git+https://github.com/NVIDIA/NeMo.git@$BRANCH\""
- ]
- },
- {
- "cell_type": "markdown",
- "id": "6c021f07-0576-491d-b73c-6c65c8501351",
- "metadata": {
- "id": "6c021f07-0576-491d-b73c-6c65c8501351"
- },
- "source": [
- "# Multi Task Adaptation with Adapters\n",
- "\n",
- "\n",
- "In earlier tutorials, we utilized a specific model for one task - for example, an ASR model (CTC, RNN-T etc) for the singular task of Speech Recognition. This is very useful if we want to specialize one task per model, but it can be expensive to deploy a fleet of models for each task, and learn routers to pass user tasks to correct models.\n",
- "\n",
- "We now support Multi Task models in NeMo, such that a single model can perform multiple tasks such as speech recognition, speech translation, voice activity detection, and more in the future. With one model supporting multiple tasks, we can simplify the task of deploying models and also hope to leverage individual tasks to improve each other (for example: you do need strong speech recognition first before you start doing translation).\n",
- "\n",
- "---\n",
- "\n",
- "Multi Task (Canary) models are highly capable large neural networks capable of things like speech recognition, X to English and English to X translation and able to select whether to transcribe speech with punctuation and capitalization. These huge models are trained on several thousand hours of speech and text data, making it challenging to adapt to new datasets.\n",
- "\n",
- "In the previous tutorial for [ASR Adapters](https://github.com/NVIDIA/NeMo/blob/main/tutorials/asr/asr_adapters/ASR_with_Adapters.ipynb), we used small adapter modules to tune a large ASR model on a small amount of data. In this tutorial, we will adapt a [Nvidia Canary](https://huggingface.co/nvidia/canary-1b) model onto a small amount of speech data for both Automatic Speech Recognition (ASR) and Automatic Speech Translation (AST).\n",
- "\n",
- "In this tutorial, we will also demonstrate a simple way of creating custom Data Modules from PyTorch Lightning to design custom datasets and data loaders for the highly flexible Multi Task Models in NeMo ASR. This offers users more flexibility in designing new tasks, and finetuning the models on small amounts of data."
- ]
- },
- {
- "cell_type": "markdown",
- "id": "cbe2f8eb-204f-4d90-bb0a-a49d994f1ed7",
- "metadata": {
- "id": "cbe2f8eb-204f-4d90-bb0a-a49d994f1ed7"
- },
- "source": [
- "----\n",
- "\n",
- "First, lets instantiate the [Canary](https://huggingface.co/nvidia/canary-1b) model"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "46c3e5c1-b4f2-4f84-89d6-c77bbe7ebe4f",
- "metadata": {
- "id": "46c3e5c1-b4f2-4f84-89d6-c77bbe7ebe4f"
- },
- "outputs": [],
- "source": [
- "import os\n",
- "import json\n",
- "\n",
- "import nemo.collections.asr as nemo_asr"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "48b9677b-b1d9-4361-becf-ee84fe8d53ca",
- "metadata": {
- "id": "48b9677b-b1d9-4361-becf-ee84fe8d53ca"
- },
- "outputs": [],
- "source": [
- "model = nemo_asr.models.ASRModel.from_pretrained(\"nvidia/canary-1b\")"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "6c0c87c9-5290-4634-9338-818f181c936a",
- "metadata": {
- "id": "6c0c87c9-5290-4634-9338-818f181c936a"
- },
- "source": [
- "# Enable Adapter Support in Model\n",
- "\n",
- "New in NeMo 2.0, we now have a simple utility function to convert the model into one that supports adapters, called `replace_adapter_compatible_modules()`.\n",
- "\n",
- "This will go through the full model and check modules if they support adapters, and then enable that ability. Once used, you can freely use adapter methods."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "bfd72316-630b-43c3-9a02-65bb2dabe624",
- "metadata": {
- "scrolled": true,
- "id": "bfd72316-630b-43c3-9a02-65bb2dabe624"
- },
- "outputs": [],
- "source": [
- "model.replace_adapter_compatible_modules()"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "30505bd5-323f-4e90-a941-d0de3f6e55e3",
- "metadata": {
- "id": "30505bd5-323f-4e90-a941-d0de3f6e55e3"
- },
- "source": [
- "## Check Which Targets Are Supported For This Model\n",
- "\n",
- "Now that the model has enabled adapter support, lets take a look at which of its modules support adapter modules to be attached to them.\n",
- "\n",
- "**Note**\n",
- "Below, you might see an adapter module with no name `''` - this corresponds to the \"default\" model target if the target isn't specified. Users can chose to simply skip the module name when adding an adapter, and the model will by default add adapters to the encoder module."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "13bcf42e-d33a-4364-8d0f-ab59a26ffa7c",
- "metadata": {
- "id": "13bcf42e-d33a-4364-8d0f-ab59a26ffa7c"
- },
- "outputs": [],
- "source": [
- "model.adapter_module_names"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "67324f6a-ffff-47a7-9ee5-dc93819f6ffd",
- "metadata": {
- "id": "67324f6a-ffff-47a7-9ee5-dc93819f6ffd"
- },
- "source": [
- "## Prepare the Adapter\n",
- "\n",
- "Now that we know which modules are supported, lets create a simple adapter module for the encoder and decoder modules."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "65ec3b2b-3f84-43ed-8a90-085aee383ea6",
- "metadata": {
- "id": "65ec3b2b-3f84-43ed-8a90-085aee383ea6"
- },
- "outputs": [],
- "source": [
- "from nemo.collections.common.parts import LinearAdapterConfig"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "47aab832-bfec-4cca-b4ee-868ea1af9869",
- "metadata": {
- "id": "47aab832-bfec-4cca-b4ee-868ea1af9869"
- },
- "outputs": [],
- "source": [
- "input_dim = model.cfg.encoder.d_model\n",
- "adapter_dim = 8"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "cd519281-ad45-4719-9ad6-561e6192717f",
- "metadata": {
- "id": "cd519281-ad45-4719-9ad6-561e6192717f"
- },
- "outputs": [],
- "source": [
- "enc_adapter_cfg = LinearAdapterConfig(in_features=input_dim, dim=adapter_dim)\n",
- "dec_adapter_cfg = LinearAdapterConfig(in_features=input_dim, dim=adapter_dim)"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "f147fc89-ab93-4454-ad6b-909288a452a2",
- "metadata": {
- "id": "f147fc89-ab93-4454-ad6b-909288a452a2"
- },
- "source": [
- "## Add Adapter Modules\n",
- "\n",
- "Now that we have the adapter configs prepared, lets add them to the model !\n",
- "\n",
- "We provide the target module by using `target:adapter_name` when calling `add_adapter()` - this tells the model to setup an adapter called `adapter_name` to the module denoted by `target` with the config `cfg`."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "a23256ce-bc09-4fb0-8c3b-214519b8774b",
- "metadata": {
- "id": "a23256ce-bc09-4fb0-8c3b-214519b8774b"
- },
- "outputs": [],
- "source": [
- "model.add_adapter(name=\"encoder:enc\", cfg=enc_adapter_cfg)\n",
- "model.add_adapter(name=\"transf_decoder:dec\", cfg=dec_adapter_cfg)\n",
- "\n",
- "print(\"Added adapters!\")"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "2dbe9b7b-9a3d-4504-a652-1d90701cbbf8",
- "metadata": {
- "id": "2dbe9b7b-9a3d-4504-a652-1d90701cbbf8"
- },
- "source": [
- "## Freeze Original Module Parameters and Unfreeze Adapter Weights Only\n",
- "\n",
- "When tuning adapters, we usually freeze the entire base model and only tune the adapters. This prevents the need for large amounts of data, preserves a lot of memory (since the full model doesnt need backward pass, only the adapters) and makes it easier to adapt huge models."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "2f8162dd-0373-4e65-aa8a-f458a1633578",
- "metadata": {
- "scrolled": true,
- "id": "2f8162dd-0373-4e65-aa8a-f458a1633578"
- },
- "outputs": [],
- "source": [
- "model.freeze()\n",
- "model.unfreeze_enabled_adapters()"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "0b3795a4-fcfe-49ee-a76f-1cb77d99ace1",
- "metadata": {
- "id": "0b3795a4-fcfe-49ee-a76f-1cb77d99ace1"
- },
- "source": [
- "----\n",
- "\n",
- "Lets make sure that the number of trainable parameters is a lot smaller (< 1 M) than the total number of params (1 B)."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "58453f40-d72d-4f9b-a427-3fb63787f3d6",
- "metadata": {
- "id": "58453f40-d72d-4f9b-a427-3fb63787f3d6"
- },
- "outputs": [],
- "source": [
- "model.summarize()"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "aa713f4a-ec16-4e2a-aeb3-ac7c4090f20f",
- "metadata": {
- "id": "aa713f4a-ec16-4e2a-aeb3-ac7c4090f20f"
- },
- "source": [
- "## Check Enabled Adapters\n",
- "\n",
- "Here, we check that the adapters that we named above (`enc` and `dec`) are both setup and enabled."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "d69f09d9-411e-420e-8f17-c86391e88fc3",
- "metadata": {
- "id": "d69f09d9-411e-420e-8f17-c86391e88fc3"
- },
- "outputs": [],
- "source": [
- "model.get_enabled_adapters()"
- ]
- },
- {
- "cell_type": "markdown",
- "source": [
- "# Customizing Multi Task Models\n",
- "\n",
- "In the following section, we will take a deeper look into what are the components that compose a Multi Task Model and how users can override each of these parts to create their own customizable multi task models.\n",
- "\n",
- "---\n",
- "\n",
- "In this tutorial, we will only see the internal components such as the prompt format and dataset construction, but not change them.\n",
- "\n",
- "In a following tutorial, we will show how to add an additional task to a pre-trained Multi Task Model using a pre-trained model as a starting point."
- ],
- "metadata": {
- "id": "f_XpTJx9hQXy"
- },
- "id": "f_XpTJx9hQXy"
- },
- {
- "cell_type": "markdown",
- "id": "6f0beb8c-7b12-4169-a3f7-1639bdaf6160",
- "metadata": {
- "id": "6f0beb8c-7b12-4169-a3f7-1639bdaf6160"
- },
- "source": [
- "# Prompt Handling for Multi Task Models\n",
- "Nvidia Canary is our first model that is a Multi Task Model.\n",
- "\n",
- "Multi Task models utilize a prompt format, similar to those used in Large Language Models, in order to denote to the model which task is to be performed, which langauge is being spoken and what language should the output transcript be in, whether to provide punctuation and capitalization or not, and so much more in the future !\n",
- "\n",
- "Lets take a look at the model's `prompt` for the Canary model that we have created -"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "56a78cd0-afaf-4272-898f-d9e13ba871d3",
- "metadata": {
- "id": "56a78cd0-afaf-4272-898f-d9e13ba871d3"
- },
- "outputs": [],
- "source": [
- "model.prompt_format"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "9cbaf28a-1f10-4da3-a3ed-53b2239baa49",
- "metadata": {
- "id": "9cbaf28a-1f10-4da3-a3ed-53b2239baa49"
- },
- "source": [
- "----\n",
- "\n",
- "This gives us the prompt format functions name, which we will see below points to a prompt format function that reads in manifest items and maps it to the template."
- ]
- },
- {
- "cell_type": "markdown",
- "id": "087d1f60-3679-4593-840f-8d0fbd8a0e3e",
- "metadata": {
- "id": "087d1f60-3679-4593-840f-8d0fbd8a0e3e"
- },
- "source": [
- "## Reuse / Register a Prompt Format Function\n",
- "\n",
- "When we print `model.prompt_format` it writes `canary` which is one of the registered prompt templates available in NeMo ASR.\n",
- "For simplicity's sake, we will continue to use the same prompt format for this tutorial. However, we enable users to define their own prompt formats and register them as needed.\n",
- "\n",
- "Let's see what the `canary` prompt format looks like:"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "c202abaf-63ca-4475-a2bb-3b487be8e375",
- "metadata": {
- "id": "c202abaf-63ca-4475-a2bb-3b487be8e375"
- },
- "outputs": [],
- "source": [
- "from nemo.collections.common.prompts.fn import get_prompt_format_fn, registered_prompt_format_fn"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "07c56dc3-fe42-49fc-936c-770ec17a29ac",
- "metadata": {
- "scrolled": true,
- "id": "07c56dc3-fe42-49fc-936c-770ec17a29ac"
- },
- "outputs": [],
- "source": [
- "canary_prompt_format_fn = get_prompt_format_fn(\"canary\")\n",
- "canary_prompt_format_fn?"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "1170b57c-f4c7-432f-91bb-1dbf73063d60",
- "metadata": {
- "id": "1170b57c-f4c7-432f-91bb-1dbf73063d60"
- },
- "source": [
- "### Registering a New Prompt Format Function"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "d11a8a05-6ba7-41f3-97ab-43453a59c860",
- "metadata": {
- "id": "d11a8a05-6ba7-41f3-97ab-43453a59c860"
- },
- "source": [
- "Just to show that this is user-configurable, we show how to register a dummy prompt format below:"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "f77378ff-d5de-4b86-bfaf-e62b51c7f9ce",
- "metadata": {
- "id": "f77378ff-d5de-4b86-bfaf-e62b51c7f9ce"
- },
- "outputs": [],
- "source": [
- "@registered_prompt_format_fn\n",
- "def canary2(cuts, tokenizer, inference: bool):\n",
- " \"\"\" Users can implement this as needed \"\"\"\n",
- " raise NotImplementedError()\n",
- "\n",
- "print(\"Registered prompt\")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "cb02f068-8fee-46e1-8096-910062668173",
- "metadata": {
- "id": "cb02f068-8fee-46e1-8096-910062668173"
- },
- "outputs": [],
- "source": [
- "temp = get_prompt_format_fn('canary2')\n",
- "temp.__name__"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "f14aa85b-71cb-4813-837b-b28a384685dc",
- "metadata": {
- "id": "f14aa85b-71cb-4813-837b-b28a384685dc"
- },
- "source": [
- "## Create / Reuse a Prompt Format\n",
- "\n",
- "Canary Multi Task Model comes with a pre-defined prompt template, so we need to provide it data in a format that can be handled by that prompt format class.\n",
- "\n",
- "A `PromptFormatter` is a special class that defines the dialog template of the order of turns that occur in a model's prompt. For example, in Language Models, we normally may begin with either a `System` or `User` turn, followed by an `Assistant` turn which produces an output from the model. Similarly in Multi Task models, we enable support for such a usage pattern.\n",
- "\n",
- "Do note: Current generation of Canary models are not trained to operate on multi turn conversations, however future variants of Multi Task models may support such usage."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "35530cad-84d7-422b-82c5-1bda5c1a4497",
- "metadata": {
- "scrolled": true,
- "id": "35530cad-84d7-422b-82c5-1bda5c1a4497"
- },
- "outputs": [],
- "source": [
- "# Let's review the actual prompt formatter clas docs\n",
- "model.prompt?"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "0cd0c0d1-da8a-4de6-9efc-86a7dd3ed660",
- "metadata": {
- "id": "0cd0c0d1-da8a-4de6-9efc-86a7dd3ed660"
- },
- "outputs": [],
- "source": [
- "# Let's see the actual template of this prompt formatter\n",
- "model.prompt.TEMPLATE"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "72956a2f-f051-42d2-9e08-47e954d88e5c",
- "metadata": {
- "id": "72956a2f-f051-42d2-9e08-47e954d88e5c"
- },
- "source": [
- "---\n",
- "\n",
- "We see that the template contains two turns - `user` and `assistant`.\n",
- "\n",
- "User template looks as follows: `<|startoftranscript|>|source_lang||task||target_lang||pnc|`\n",
- "During execution, we remove the `|` in order to fill in the actual value of the slots provided by the the data loader.\n",
- "\n",
- "User holds the following allowed slots -\n",
- "* `source_lang`\n",
- "* `target_lang`\n",
- "* `task`\n",
- "* `pnc`\n",
- "\n",
- "Similarly, for Assistant template : `|text|<|endoftext|>`\n",
- "\n",
- "Assistant holds the following allowed slots -\n",
- "* `text`"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "540c04af-34d1-4b46-b935-40b16f54ca03",
- "metadata": {
- "id": "540c04af-34d1-4b46-b935-40b16f54ca03"
- },
- "source": [
- "### Creating and Using a Custom Prompt Formatter\n",
- "\n",
- "While we provide a pre-trained model with a pre-defined prompt format, we also enable users to create their own PromptFormatter subclass and change it as needed.\n",
- "\n",
- "Below, we show a simple modification to the model's PromptFormatter and show how to change it."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "0adb576c-df58-4b66-b8fa-8e653da6fead",
- "metadata": {
- "id": "0adb576c-df58-4b66-b8fa-8e653da6fead"
- },
- "outputs": [],
- "source": [
- "# Create a new prompt formatter using the original CanaryPromptFormatter class as baseclass\n",
- "class CanaryPromptFormatterV2(model.prompt.__class__):\n",
- "\n",
- " # make sure to provide a new name\n",
- " NAME: str = \"canary2\"\n",
- "\n",
- " # Make any changes as necessary.\n",
- " # For this demonstration, we will not change anything other than the name"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "f7d85683-ddd0-40c5-956d-e14d09243424",
- "metadata": {
- "id": "f7d85683-ddd0-40c5-956d-e14d09243424"
- },
- "outputs": [],
- "source": [
- "# Next, lets update the model's prompt formatter\n",
- "model.change_prompt(\"canary2\")"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "6581f934-a55b-41df-864a-351d1fb0029e",
- "metadata": {
- "id": "6581f934-a55b-41df-864a-351d1fb0029e"
- },
- "source": [
- "---\n",
- "\n",
- "We have now successfully changed the prompt format to `canary2`.\n",
- "\n",
- "**Note**: It is important to know that when changing the prompt format, the name of the new prompt format class (`canary2` in this case) **has to match** the name of the prompt function registered with `@registered_prompt_format_fn`!"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "c1d84948-8f73-4c31-923f-eaf01d877835",
- "metadata": {
- "scrolled": true,
- "id": "c1d84948-8f73-4c31-923f-eaf01d877835"
- },
- "outputs": [],
- "source": [
- "# Check if everything is ok -\n",
- "model.prompt.__class__.__name__"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "f617cda0-d16b-400a-b495-dac213d318e1",
- "metadata": {
- "id": "f617cda0-d16b-400a-b495-dac213d318e1"
- },
- "outputs": [],
- "source": [
- "model.prompt_format"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "cb964964-e978-43e9-befa-9bb0904db82f",
- "metadata": {
- "id": "cb964964-e978-43e9-befa-9bb0904db82f"
- },
- "source": [
- "---\n",
- "For the rest of the tutorial, we will revert back to the original prompt formatter"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "526093a8-86ba-48f0-a60b-55642720fc4e",
- "metadata": {
- "id": "526093a8-86ba-48f0-a60b-55642720fc4e"
- },
- "outputs": [],
- "source": [
- "model.change_prompt('canary')"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "9c4d2986-89b4-4589-ab0e-69683084cfd4",
- "metadata": {
- "id": "9c4d2986-89b4-4589-ab0e-69683084cfd4"
- },
- "source": [
- "## Creating / Using a Multi Task Dataset\n",
- "\n",
- "Now that we have learned how to modify the model's prompt formatter and the underlying format function that maps manifest items into slots to inject into the prompt template, next let's take a look at how to use and create custom datasets for training multi task models.\n",
- "\n",
- "---\n",
- "\n",
- "Unlike previous tutorials that showcase how to use pre-defined datasets and point them to your manifest files, we will take a slightly more hands-on approach for multi task modes. This is due to shear flexibility of multi task models - they can do almost any task that you can formulate into a \"speech in - text out\" problem.\n",
- "\n",
- "So it is not easy to have a pre-defined dataset class that can handle all new ideas and tasks that researchers can come up with.\n",
- "\n",
- "Instead, we showcase how to build a custom dataset for yourself and use it with the Multi Task model instead."
- ]
- },
- {
- "cell_type": "markdown",
- "id": "b35ca0c2-8ceb-423f-b9ef-7dd6ec5a6952",
- "metadata": {
- "id": "b35ca0c2-8ceb-423f-b9ef-7dd6ec5a6952"
- },
- "source": [
- "---\n",
- "\n",
- "However, we also provide a base class that can be used as is by users if they dont want the hassle of writing their own datasets.\n",
- "\n",
- "This is handled by the `PromptedAudioToTextLhotseDataset` - it maps user defined manifest items to the items defined in the prompt template of the model, so as long as the manifest corresponds to the slots supported by the model, it will be managed by the Dataset automatically."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "3d35d513-8538-4bcb-b892-898f16ad3f0f",
- "metadata": {
- "scrolled": true,
- "id": "3d35d513-8538-4bcb-b892-898f16ad3f0f"
- },
- "outputs": [],
- "source": [
- "from nemo.collections.asr.data.audio_to_text_lhotse_prompted import PromptedAudioToTextLhotseDataset\n",
- "\n",
- "# Uncomment below line to see the class definition of PromptedAudioToTextLhotseDataset\n",
- "# PromptedAudioToTextLhotseDataset??"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "51e3a150-40b9-4599-8c6e-0f01698989b4",
- "metadata": {
- "id": "51e3a150-40b9-4599-8c6e-0f01698989b4"
- },
- "source": [
- "### Creating a New Prompted Dataset"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "56208452-ea18-44c8-8c71-0daef431dc31",
- "metadata": {
- "id": "56208452-ea18-44c8-8c71-0daef431dc31"
- },
- "outputs": [],
- "source": [
- "import torch.utils.data\n",
- "from lhotse import CutSet\n",
- "from lhotse.cut import MixedCut, MonoCut\n",
- "from lhotse.dataset import AudioSamples\n",
- "from lhotse.dataset.collation import collate_vectors\n",
- "\n",
- "from nemo.collections.asr.data.audio_to_text_lhotse import TokenizerWrapper\n",
- "from nemo.collections.asr.data.audio_to_text_lhotse_prompted import PromptedAudioToTextLhotseDataset\n",
- "\n",
- "class MyCanaryPromptedAudioToTextLhotseDataset(torch.utils.data.Dataset):\n",
- " \"\"\"\n",
- " This dataset is based on :class:`~nemo.collections.asr.data.audio_to_text_lhotse.LhotseSpeechToTextBpeDataset`.\n",
- " It is a Lhotse-style dataset that converts a mini-batch of Cuts into tensors.\n",
- " The main difference from ``LhotseSpeechToTextBpeDataset`` is that we introduce\n",
- " a special prompt format for multitask encoder-decoder models.\n",
- "\n",
- " To perform the prompt formatting, we accept a ``prompt_format_fn``.\n",
- " It's expected to accept:\n",
- " * a ``CutSet`` which it will internally iterate over for utterances, and\n",
- " * a ``TokenizerWrapper`` object that will be internally used to tokenize the utterances\n",
- "\n",
- " Tokenized utterances will be extended with special prompt tokens according to ``prompt_format_fn`` logic.\n",
- " We support cuts with multiple supervision segments -- their tokenized texts will be concatenated before we add the prompt tokens.\n",
- " This is useful, for example, in code-switched scenarios where each segment is spoken in a different language.\n",
- " \"\"\"\n",
- "\n",
- " def __init__(\n",
- " self,\n",
- " tokenizer: 'TokenizerSpec',\n",
- " inference: bool = False,\n",
- " ):\n",
- " super().__init__()\n",
- " self.tokenizer = TokenizerWrapper(tokenizer)\n",
- " self.load_audio = AudioSamples(fault_tolerant=True)\n",
- " self.padding_value = self.tokenizer._tokenizer.pad_id\n",
- " self.prompt_format_fn = get_prompt_format_fn('canary') # Use the default canary prompt function\n",
- " self.inference = inference\n",
- "\n",
- " def __getitem__(self, cuts: CutSet) -> tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:\n",
- " audio, audio_lens, cuts = self.load_audio(cuts)\n",
- "\n",
- " prompts_with_answers, prompts = self.prompt_format_fn(cuts, self.tokenizer, inference=self.inference)\n",
- "\n",
- " prompts_with_answers = [torch.as_tensor(t) for t in prompts_with_answers]\n",
- " prompts_with_answers_lens = torch.tensor([t.size(0) for t in prompts_with_answers], dtype=torch.long)\n",
- " prompts_with_answers = collate_vectors(prompts_with_answers, padding_value=self.padding_value)\n",
- "\n",
- " if self.inference:\n",
- " prompts = [torch.as_tensor(t) for t in prompts]\n",
- " prompts_lens = torch.tensor([t.size(0) for t in prompts], dtype=torch.long)\n",
- " prompts = collate_vectors(prompts, padding_value=self.padding_value)\n",
- " else:\n",
- " prompts = None\n",
- " prompts_lens = None\n",
- "\n",
- " return audio, audio_lens, prompts_with_answers, prompts_with_answers_lens, prompts, prompts_lens"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "5cb71ba1-ce2e-49c7-8126-be7e7851c812",
- "metadata": {
- "id": "5cb71ba1-ce2e-49c7-8126-be7e7851c812"
- },
- "source": [
- "---\n",
- "\n",
- "The above class is mostly a demonstration, but it showcases how users might flexibly change the prompt formatter, prompt format function and even the data set that handles these two in a flexible way.\n",
- "\n",
- "The order of operations is usually this -\n",
- "\n",
- "1) Create a new Prompt Formatter class - this denotes the slots that each turn can have (including new task inputs or other values). This class is auto registered.\n",
- "2) Create a new Prompt Format function - Using `@registered_prompt_format_fn` decorator, write a custom function that accepts args and processes the provided input data from a manifest.\n",
- "3) Create a new Dataset class (usually based on the `PromptedAudioToTextLhotseDataset` dataset) that uses the Prompt Format function to convert manifest items into nicely formatted samples that can be passed to the Prompt Formatter."
- ]
- },
- {
- "cell_type": "markdown",
- "id": "a7bf8078-663e-43cb-b045-0c8b6ef08e30",
- "metadata": {
- "id": "a7bf8078-663e-43cb-b045-0c8b6ef08e30"
- },
- "source": [
- "# Preparing a Canary Dataset\n",
- "\n",
- "Now that we have all the pieces together on the model side, let's take a look on the data side."
- ]
- },
- {
- "cell_type": "markdown",
- "id": "83c9eabc-0473-463e-be1f-ab6d5f519a79",
- "metadata": {
- "id": "83c9eabc-0473-463e-be1f-ab6d5f519a79"
- },
- "source": [
- "## Required Roles Defined by Prompt Format\n",
- "\n",
- "These are the available 'roles' available in the prompt format - they denote at each turn, one role can be enabled and its input or output can be calculated."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "11ff9641-53fd-4481-b414-0edc12bf4dc3",
- "metadata": {
- "id": "11ff9641-53fd-4481-b414-0edc12bf4dc3"
- },
- "outputs": [],
- "source": [
- "model.prompt.get_roles()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "203a67e2-74fd-440c-9658-451f41239f36",
- "metadata": {
- "id": "203a67e2-74fd-440c-9658-451f41239f36"
- },
- "outputs": [],
- "source": [
- "for role in model.prompt.get_roles():\n",
- " print(role, model.prompt.get_slots(role))\n",
- " print()"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "8e887f9d-94e7-4843-9da8-f914e24651f3",
- "metadata": {
- "id": "8e887f9d-94e7-4843-9da8-f914e24651f3"
- },
- "source": [
- "## Create a Data Module\n",
- "\n",
- "Data Modules are one way of organizing datasets in PyTorch Lightning. It provides a unified place where data loading and processing can be potentially handled.\n",
- "\n",
- "**Note**: This isn't strictly necessary - you can achieve the same using just Pytorch dataloaders directly and passing it to Trainer.fit() but we showcase a data module codebase that can be extended by the user."
- ]
- },
- {
- "cell_type": "markdown",
- "id": "51d58931-4166-4ab9-a755-4c5268001192",
- "metadata": {
- "id": "51d58931-4166-4ab9-a755-4c5268001192"
- },
- "source": [
- "----\n",
- "\n",
- "In our CanaryAN4DataModule - we will perform two tasks. One is En ASR - transcribing the AN4 English dataset. Another is En to De AST - directly translating the english audio to German text.\n",
- "\n",
- "For simplicity's sake, we will use a small off-the-shelf model to perform the translation of English Transcripts to German."
- ]
- },
- {
- "cell_type": "markdown",
- "id": "91ed74ca-5d5e-412d-a813-0659014aa9a3",
- "metadata": {
- "id": "91ed74ca-5d5e-412d-a813-0659014aa9a3"
- },
- "source": [
- "---\n",
- "\n",
- "In NeMo 2.0, we utilize [Lhotse](https://github.com/lhotse-speech/lhotse) as our data backbone for speech tasks, which simplifies using custom speech datasets.\n",
- "\n",
- "Most of the magic is handled by the following code\n",
- "\n",
- "```python\n",
- "from nemo.collections.common.data.lhotse import get_lhotse_dataloader_from_config\n",
- "\n",
- "get_lhotse_dataloader_from_config(\n",
- " OmegaConf.create(config), # Pass in a config that points to the manifest files and other arguments\n",
- " global_rank=self.trainer.global_rank,\n",
- " world_size=self.trainer.world_size,\n",
- " # Pass in the dataset class for Lhotse to handle. This class now receives CutSet as input.\n",
- " dataset=MyCanaryPromptedAudioToTextLhotseDataset(tokenizer=self.tokenizer, inference=inference),\n",
- ")\n",
- "```"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "4a15ab9b-7603-4ac5-890c-92a541a0527c",
- "metadata": {
- "id": "4a15ab9b-7603-4ac5-890c-92a541a0527c"
- },
- "outputs": [],
- "source": [
- "import os\n",
- "import glob\n",
- "import json\n",
- "import copy\n",
- "import subprocess\n",
- "import tarfile\n",
- "import wget\n",
- "import librosa\n",
- "import tqdm\n",
- "from omegaconf import OmegaConf\n",
- "\n",
- "from torch.utils.data import DataLoader, Dataset\n",
- "\n",
- "import pytorch_lightning as L\n",
- "\n",
- "from transformers import T5Tokenizer, T5ForConditionalGeneration\n",
- "\n",
- "from nemo.collections.asr.parts.utils.manifest_utils import read_manifest, write_manifest\n",
- "from nemo.collections.common.data.lhotse import get_lhotse_dataloader_from_config\n",
- "\n",
- "\n",
- "# Function to build a manifest\n",
- "def build_manifest(transcripts_path, manifest_path, wav_path, data_dir):\n",
- " with open(transcripts_path, 'r') as fin:\n",
- " with open(manifest_path, 'w') as fout:\n",
- " for line in fin:\n",
- " # Lines look like this:\n",
- " # transcript (fileID)\n",
- " transcript = line[: line.find('(')-1].lower()\n",
- " transcript = transcript.replace('', '').replace('', '')\n",
- " transcript = transcript.strip()\n",
- "\n",
- " file_id = line[line.find('(')+1 : -2] # e.g. \"cen4-fash-b\"\n",
- " audio_path = os.path.join(\n",
- " data_dir, wav_path,\n",
- " file_id[file_id.find('-')+1 : file_id.rfind('-')],\n",
- " file_id + '.wav')\n",
- "\n",
- " duration = librosa.core.get_duration(path=audio_path)\n",
- "\n",
- " # Write the metadata to the manifest\n",
- " metadata = {\n",
- " \"audio_filepath\": audio_path,\n",
- " \"duration\": duration,\n",
- " \"text\": transcript,\n",
- " \"pnc\": \"no\",\n",
- " \"source_lang\": \"en\",\n",
- " \"target_lang\": \"en\",\n",
- " \"task\": \"asr\",\n",
- " }\n",
- " json.dump(metadata, fout)\n",
- " fout.write('\\n')\n",
- "\n",
- " return manifest_path\n",
- "\n",
- "\n",
- "class CanaryAN4DataModule(L.LightningDataModule):\n",
- "\n",
- " def __init__(self, tokenizer, data_dir: str = \"./an4/\", batch_size=8):\n",
- " super().__init__()\n",
- " self.tokenizer = tokenizer\n",
- " self.data_dir = data_dir\n",
- " self.batch_size = batch_size\n",
- "\n",
- " # ASR manifests\n",
- " self.train_manifest = data_dir + '/an4/train_manifest.json'\n",
- " self.test_manifest = data_dir + '/an4/test_manifest.json'\n",
- "\n",
- " # AST manifests\n",
- " self.ast_train_manifest = data_dir + '/an4/ast_train_manifest.json'\n",
- " self.ast_test_manifest = data_dir + '/an4/ast_test_manifest.json'\n",
- "\n",
- " # Combined manifests\n",
- " self.combined_train_manifest = data_dir + '/an4/combined_train_manifest.json'\n",
- " self.combined_test_manifest = data_dir + '/an4/combined_test_manifest.json'\n",
- "\n",
- " def setup(self, stage):\n",
- " # make assignments here (val/train/test split)\n",
- " # called on every process in DDP\n",
- " # Assign train/val datasets for use in dataloaders\n",
- " pass\n",
- "\n",
- " def train_dataloader(self):\n",
- " config = {'manifest_filepath': self.combined_train_manifest, 'batch_size': self.batch_size,\n",
- " 'num_workers': 4, 'shuffle': True, 'min_duration': 0.3, 'max_duration': 10.0}\n",
- " return self._setup_dataloader(config)\n",
- "\n",
- " def val_dataloader(self):\n",
- " config = {'manifest_filepath': self.combined_test_manifest, 'batch_size': self.batch_size,\n",
- " 'num_workers': 4, 'shuffle': False, 'min_duration': 0.3, 'max_duration': 10.0}\n",
- " return self._setup_dataloader(config, inference=True)\n",
- "\n",
- " def test_dataloader(self):\n",
- " config = {'manifest_filepath': self.combined_test_manifest, 'batch_size': self.batch_size,\n",
- " 'num_workers': 4, 'shuffle': False, 'min_duration': 0.3, 'max_duration': 10.0}\n",
- " return self._setup_dataloader(config, inference=True)\n",
- "\n",
- " def teardown(self, stage):\n",
- " # clean up after fit or test\n",
- " # called on every process in DDP\n",
- " pass\n",
- "\n",
- " def _setup_dataloader(self, config, inference: bool = False):\n",
- " \"\"\"\n",
- " The main function that creates the data loader using Lhotse's integration with NeMo.\n",
- " \"\"\"\n",
- " return get_lhotse_dataloader_from_config(\n",
- " OmegaConf.create(config),\n",
- " global_rank=self.trainer.global_rank,\n",
- " world_size=self.trainer.world_size,\n",
- " # Note the passing of our custom dataset\n",
- " dataset=MyCanaryPromptedAudioToTextLhotseDataset(tokenizer=self.tokenizer, inference=inference),\n",
- " )\n",
- "\n",
- " def prepare_data(self):\n",
- " # download, split, etc...\n",
- " # only called on 1 GPU/TPU in distributed\n",
- " if not os.path.exists(self.data_dir):\n",
- " os.makedirs(self.data_dir)\n",
- "\n",
- " data_dir = self.data_dir\n",
- " if not os.path.exists(data_dir + '/an4_sphere.tar.gz'):\n",
- " an4_url = 'https://dldata-public.s3.us-east-2.amazonaws.com/an4_sphere.tar.gz'\n",
- " an4_path = wget.download(an4_url, data_dir)\n",
- " print(f\"Dataset downloaded at: {an4_path}\")\n",
- " else:\n",
- " print(\"Tarfile already exists.\")\n",
- " an4_path = data_dir + '/an4_sphere.tar.gz'\n",
- "\n",
- " if not os.path.exists(data_dir + '/an4/'):\n",
- " # Untar and convert .sph to .wav (using sox)\n",
- " tar = tarfile.open(an4_path)\n",
- " tar.extractall(path=data_dir)\n",
- "\n",
- " print(\"Converting .sph to .wav...\")\n",
- " sph_list = glob.glob(data_dir + '/an4/**/*.sph', recursive=True)\n",
- " for sph_path in sph_list:\n",
- " wav_path = sph_path[:-4] + '.wav'\n",
- " cmd = [\"sox\", sph_path, wav_path]\n",
- " subprocess.run(cmd)\n",
- " print(\"Finished conversion.\\n******\")\n",
- "\n",
- " # Building Manifests\n",
- " print(\"******\")\n",
- " train_transcripts = data_dir + '/an4/etc/an4_train.transcription'\n",
- " train_manifest = self.train_manifest\n",
- " if not os.path.isfile(train_manifest):\n",
- " build_manifest(train_transcripts, train_manifest, 'an4/wav/an4_clstk', data_dir)\n",
- " print(\"Training manifest created.\")\n",
- "\n",
- " test_transcripts = data_dir + '/an4/etc/an4_test.transcription'\n",
- " test_manifest = self.test_manifest\n",
- " if not os.path.isfile(test_manifest):\n",
- " build_manifest(test_transcripts, test_manifest, 'an4/wav/an4test_clstk', data_dir)\n",
- " print(\"Test manifest created.\")\n",
- " print(\"*** Wrote manifests for Eng ***\")\n",
- "\n",
- " train_manifest_data = read_manifest(self.train_manifest)\n",
- " test_manifest_data = read_manifest(self.test_manifest)\n",
- "\n",
- " if not os.path.isfile(self.ast_train_manifest) or not os.path.isfile(self.ast_test_manifest) or not os.path.isfile(self.combined_train_manifest) or not os.path.isfile(self.combined_test_manifest):\n",
- " tokenizer = T5Tokenizer.from_pretrained(\"google-t5/t5-small\")\n",
- " t5_model = T5ForConditionalGeneration.from_pretrained(\"google-t5/t5-small\")\n",
- "\n",
- " if torch.cuda.is_available():\n",
- " t5_model = t5_model.cuda()\n",
- "\n",
- " def pipe(text):\n",
- " if isinstance(text, str):\n",
- " text = [text]\n",
- "\n",
- " prefix = \"translate English to German\"\n",
- " prompts = [prefix + \": \" + x for x in text]\n",
- " input_ids = tokenizer(prompts, return_tensors=\"pt\", padding=True, truncation=True).input_ids\n",
- " input_ids = input_ids.to(t5_model.device)\n",
- " outputs = t5_model.generate(input_ids, max_new_tokens=64)\n",
- " return [tokenizer.decode(output, skip_special_tokens=True) for output in outputs]\n",
- "\n",
- " ast_train_manifest_data = copy.deepcopy(train_manifest_data)\n",
- " ast_test_manifest_data = copy.deepcopy(test_manifest_data)\n",
- "\n",
- " print(\"Translating train set\")\n",
- " train_texts = [x['text'] for x in train_manifest_data]\n",
- " BATCH_SIZE = 32\n",
- "\n",
- " for i in tqdm.tqdm(range(0, len(train_texts), BATCH_SIZE), total=len(train_texts) // BATCH_SIZE):\n",
- " batch_texts = train_texts[i:i+BATCH_SIZE]\n",
- " batch_texts = pipe(batch_texts)\n",
- " for j, text in enumerate(batch_texts):\n",
- " ast_train_manifest_data[i+j]['text'] = text\n",
- " ast_train_manifest_data[i+j]['task'] = 'ast'\n",
- " ast_train_manifest_data[i+j]['target_lang'] = 'de'\n",
- "\n",
- " print(\"Translating test set\")\n",
- " for data in tqdm.tqdm(ast_test_manifest_data, total=len(ast_test_manifest_data)):\n",
- " data['text'] = pipe(data['text'])[0]\n",
- " data['task'] = 'ast'\n",
- " data['target_lang'] = 'de'\n",
- "\n",
- " write_manifest(self.ast_train_manifest, ast_train_manifest_data)\n",
- " write_manifest(self.ast_test_manifest, ast_test_manifest_data)\n",
- "\n",
- " print(\"*** Wrote ast manifests ***\")\n",
- "\n",
- " combined_train, combined_test = [], []\n",
- " combined_train.extend(train_manifest_data)\n",
- " combined_train.extend(ast_train_manifest_data)\n",
- "\n",
- " combined_test.extend(test_manifest_data)\n",
- " combined_test.extend(ast_test_manifest_data)\n",
- "\n",
- " write_manifest(self.combined_train_manifest, combined_train)\n",
- " write_manifest(self.combined_test_manifest, combined_test)\n",
- " print(\"*** Wrote combined manifests ***\")\n",
- "\n",
- " else:\n",
- " print(\"*** Wrote ast and combined manifests ***\")\n"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "e06e697d-7dc2-489f-a52f-195946bfbf6e",
- "metadata": {
- "id": "e06e697d-7dc2-489f-a52f-195946bfbf6e"
- },
- "source": [
- "---\n",
- "\n",
- "Each item in the prepared manifest has the following items by default.\n",
- "\n",
- "As you will recognize, these are the same keys provided by the `CanaryPromptFormatter` classes `slots` argument, so each of these values in the is mapped back to those slots.\n",
- "\n",
- "```python\n",
- "metadata = {\n",
- " \"audio_filepath\": audio_path,\n",
- " \"duration\": duration,\n",
- " \"text\": transcript,\n",
- " \"pnc\": \"no\",\n",
- " \"source_lang\": \"en\",\n",
- " \"target_lang\": \"en\",\n",
- " \"task\": \"asr\",\n",
- "}\n",
- "```\n",
- "\n",
- "The most important function in the Data Module above is `prepare_data()`:\n",
- "\n",
- "1) It first downloads and converts the AN4 audio files to wav files.\n",
- "2) Then it writes a new manifest file with the above keys for ASR task\n",
- "3) It then translates the En transcripts with a `t5-small` model to generate German transcripts\n",
- "4) Finally it writes another manifest for the AST task with these translated texts.\n",
- "5) Finally it builds a combined manifest item for both ASR (en) and AST (en to de) multi-task training\n",
- "\n",
- "**Note**: We are using prepare_data() only for demonstration. Normally, users should process before experimentation, and so they would only need to implement methods above prepare_data() in their Data Module."
- ]
- },
- {
- "cell_type": "markdown",
- "id": "739f0141-1e0e-4db7-b1f6-9d13589bf50c",
- "metadata": {
- "id": "739f0141-1e0e-4db7-b1f6-9d13589bf50c"
- },
- "source": [
- "## Download and Prepare Dataset"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "323287f1-9a44-49ab-8438-dcbf34bf2ebe",
- "metadata": {
- "id": "323287f1-9a44-49ab-8438-dcbf34bf2ebe"
- },
- "outputs": [],
- "source": [
- "data_module = CanaryAN4DataModule(tokenizer=model.tokenizer, batch_size=16)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "123faf0d-05b2-4f12-850f-350a175ba7c1",
- "metadata": {
- "scrolled": true,
- "id": "123faf0d-05b2-4f12-850f-350a175ba7c1"
- },
- "outputs": [],
- "source": [
- "data_module.prepare_data()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "fbec085b-9600-49bd-8739-73e5e8e3773f",
- "metadata": {
- "id": "fbec085b-9600-49bd-8739-73e5e8e3773f"
- },
- "outputs": [],
- "source": [
- "!head -n 5 {data_module.train_manifest}"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "66bad9ac-3bad-4d84-8b30-830856c06804",
- "metadata": {
- "id": "66bad9ac-3bad-4d84-8b30-830856c06804"
- },
- "outputs": [],
- "source": [
- "!head -n 5 {data_module.ast_train_manifest}"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "cde19c46-e78c-4d7c-adbf-f1559c9203e1",
- "metadata": {
- "id": "cde19c46-e78c-4d7c-adbf-f1559c9203e1"
- },
- "source": [
- "# Evaluate Model before Training\n",
- "\n",
- "Canary Multi Task model is already very capable, achieving strong scores on multiple benchmarks. So we first evaluate the baseline numbers on the two tasks\n",
- "\n",
- "1) ASR: WER calculation on transcripts\n",
- "\n",
- "2) AST: SacreBLEU calculation on translations"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "eb4588b4-7d52-4c4e-bb81-2bcb5a227afd",
- "metadata": {
- "id": "eb4588b4-7d52-4c4e-bb81-2bcb5a227afd"
- },
- "outputs": [],
- "source": [
- "from nemo.collections.asr.metrics.wer import word_error_rate\n",
- "from torchmetrics.text import SacreBLEUScore"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "a1c71044-3cb3-453c-bfcd-ee551cecdddf",
- "metadata": {
- "id": "a1c71044-3cb3-453c-bfcd-ee551cecdddf"
- },
- "outputs": [],
- "source": [
- "asr_test = read_manifest(data_module.test_manifest)\n",
- "ast_test = read_manifest(data_module.ast_test_manifest)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "f1d8acd2-aa08-4ba0-b0c6-c5d662243b00",
- "metadata": {
- "id": "f1d8acd2-aa08-4ba0-b0c6-c5d662243b00"
- },
- "outputs": [],
- "source": [
- "asr_filepaths = [x['audio_filepath'] for x in asr_test]\n",
- "asr_gt = [x['text'] for x in asr_test]\n",
- "\n",
- "ast_filepaths = [x['audio_filepath'] for x in ast_test]\n",
- "ast_gt = [x['text'] for x in ast_test]\n",
- "\n",
- "print(\"Num files:\", len(asr_filepaths))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "85ace700-97bf-4697-8e1a-5793eb21e678",
- "metadata": {
- "id": "85ace700-97bf-4697-8e1a-5793eb21e678"
- },
- "outputs": [],
- "source": [
- "if torch.cuda.is_available():\n",
- " model = model.cuda() # move model to gpu\n",
- " model = model.to(torch.bfloat16) # cast full model to bfloat16"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "00f2607a-2f67-47fe-9903-0adae4d9adf5",
- "metadata": {
- "id": "00f2607a-2f67-47fe-9903-0adae4d9adf5"
- },
- "outputs": [],
- "source": [
- "asr_preds = model.transcribe(asr_filepaths, pnc='no', task='asr', source_lang='en', target_lang='en', batch_size=32)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "eea5ab20-60d4-4e19-87fb-71f6835941e8",
- "metadata": {
- "id": "eea5ab20-60d4-4e19-87fb-71f6835941e8"
- },
- "outputs": [],
- "source": [
- "ast_preds = model.transcribe(ast_filepaths, pnc='no', task='ast', source_lang='en', target_lang='de', batch_size=32)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "69e5bb54-5193-4268-98e1-dc6daae8f6eb",
- "metadata": {
- "id": "69e5bb54-5193-4268-98e1-dc6daae8f6eb"
- },
- "outputs": [],
- "source": [
- "wer = word_error_rate(asr_preds, asr_gt)\n",
- "print(\"WER\", wer)\n",
- "\n",
- "sacrebleu = SacreBLEUScore(n_gram=4)\n",
- "scores = []\n",
- "preds = []\n",
- "gts = []\n",
- "for pred, gt in zip(ast_preds, ast_gt):\n",
- " preds.append(pred)\n",
- " gts.append([gt])\n",
- "\n",
- "# bleu = sum(scores) / len(scores)\n",
- "sacrebleu.update(preds, gts)\n",
- "bleu = sacrebleu.compute()\n",
- "print(\"BLEU\", bleu.item() * 100)"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "5ee530c9-36a3-47d2-83b9-b2a64080c0eb",
- "metadata": {
- "id": "5ee530c9-36a3-47d2-83b9-b2a64080c0eb"
- },
- "source": [
- "# Train Model\n",
- "\n",
- "Finally, now that adapters have been prepared, model has been evaluated for a baseline and the dataset is prepared, it's time to train the adapter weights on the new datasets.\n",
- "\n",
- "---\n",
- "\n",
- "First, we update the optimizer and scheduler config"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "d0a40461-d739-436c-967a-1a0f8a3ad197",
- "metadata": {
- "id": "d0a40461-d739-436c-967a-1a0f8a3ad197"
- },
- "outputs": [],
- "source": [
- "print(OmegaConf.to_yaml(model.cfg.optim))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "4ba5811a-fc42-4de5-add5-0d26d1c84219",
- "metadata": {
- "id": "4ba5811a-fc42-4de5-add5-0d26d1c84219"
- },
- "outputs": [],
- "source": [
- "# Setup optimization\n",
- "model.cfg.optim.lr = 3e-4\n",
- "model.cfg.optim.sched.warmup_steps = 25"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "d1de270a-d1cb-4080-b571-7acf365d7b99",
- "metadata": {
- "id": "d1de270a-d1cb-4080-b571-7acf365d7b99"
- },
- "source": [
- "---\n",
- "\n",
- "Next, we setup a Lightning Trainer and Experiment Manager"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "b9e34369-21ec-41bf-beae-30b60ab46c14",
- "metadata": {
- "id": "b9e34369-21ec-41bf-beae-30b60ab46c14"
- },
- "outputs": [],
- "source": [
- "from omegaconf import OmegaConf\n",
- "from nemo.utils import exp_manager"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "46f74863-a34d-4ad0-9d8e-3337ea5edd63",
- "metadata": {
- "id": "46f74863-a34d-4ad0-9d8e-3337ea5edd63"
- },
- "outputs": [],
- "source": [
- "trainer = L.Trainer(max_steps=200, accumulate_grad_batches=1, logger=False, enable_checkpointing=False, check_val_every_n_epoch=5)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "414d7887-bed5-46a2-bfe1-8349db1e6b5b",
- "metadata": {
- "id": "414d7887-bed5-46a2-bfe1-8349db1e6b5b"
- },
- "outputs": [],
- "source": [
- "# # Environment variable generally used for multi-node multi-gpu training.\n",
- "# # In notebook environments, this flag is unnecessary and can cause logs of multiple training runs to overwrite each other.\n",
- "# os.environ.pop('NEMO_EXPM_VERSION', None)\n",
- "\n",
- "# config = exp_manager.ExpManagerConfig(\n",
- "# exp_dir=f'experiments/canary/',\n",
- "# name=f\"Canary-Model-Adapter-Training\",\n",
- "# checkpoint_callback_params=exp_manager.CallbackParams(\n",
- "# monitor=\"val_wer\",\n",
- "# mode=\"min\",\n",
- "# always_save_nemo=False,\n",
- "# save_best_model=False,\n",
- "# ),\n",
- "# )\n",
- "\n",
- "# config = OmegaConf.structured(config)\n",
- "\n",
- "# logdir = exp_manager.exp_manager(trainer, config)"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "60769859-8ed5-4f9c-b93a-a6875c7c1c73",
- "metadata": {
- "id": "60769859-8ed5-4f9c-b93a-a6875c7c1c73"
- },
- "source": [
- "---\n",
- "\n",
- "Begin training !"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "2adb8607-a011-440d-bfa8-976c2871e8ef",
- "metadata": {
- "scrolled": true,
- "id": "2adb8607-a011-440d-bfa8-976c2871e8ef"
- },
- "outputs": [],
- "source": [
- "trainer.fit(model, data_module)"
- ]
- },
- {
- "cell_type": "markdown",
- "source": [
- "---\n",
- "\n",
- "Save just the adapter parameters - which is less than 2 MB !"
- ],
- "metadata": {
- "id": "MImbKiqQ6ng-"
- },
- "id": "MImbKiqQ6ng-"
- },
- {
- "cell_type": "code",
- "source": [
- "model.save_adapters(\"adapters.pt\")\n",
- "!ls -l -- *.pt\n",
- "!du -sh *.pt"
- ],
- "metadata": {
- "id": "-akTdyGM6gum"
- },
- "id": "-akTdyGM6gum",
- "execution_count": null,
- "outputs": []
- },
- {
- "cell_type": "markdown",
- "id": "2525bec5-c42b-48c1-b03c-e8126c346238",
- "metadata": {
- "id": "2525bec5-c42b-48c1-b03c-e8126c346238"
- },
- "source": [
- "# Evaluate after Adaptation\n",
- "\n",
- "Now that the model is done training, lets evaluate its scores on the test set again.\n",
- "We should see a markedly higher translation BLEU and lower WER from above."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "6edb5528-b1b6-4505-8cdc-ee68c715415e",
- "metadata": {
- "id": "6edb5528-b1b6-4505-8cdc-ee68c715415e"
- },
- "outputs": [],
- "source": [
- "asr_test = read_manifest(data_module.test_manifest)\n",
- "ast_test = read_manifest(data_module.ast_test_manifest)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "384aa5f2-89d5-4080-a717-4d65776fae6b",
- "metadata": {
- "id": "384aa5f2-89d5-4080-a717-4d65776fae6b"
- },
- "outputs": [],
- "source": [
- "asr_filepaths = [x['audio_filepath'] for x in asr_test]\n",
- "asr_gt = [x['text'] for x in asr_test]\n",
- "\n",
- "ast_filepaths = [x['audio_filepath'] for x in ast_test]\n",
- "ast_gt = [x['text'] for x in ast_test]\n",
- "\n",
- "print(\"Num files:\", len(asr_filepaths))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "48ce5b4c-d349-4d86-ad3c-ee930bb569ee",
- "metadata": {
- "id": "48ce5b4c-d349-4d86-ad3c-ee930bb569ee"
- },
- "outputs": [],
- "source": [
- "if torch.cuda.is_available():\n",
- " model = model.cuda()\n",
- " model = model.to(torch.bfloat16)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "49a37806-286e-4954-8f27-3829cf61d755",
- "metadata": {
- "id": "49a37806-286e-4954-8f27-3829cf61d755"
- },
- "outputs": [],
- "source": [
- "asr_preds = model.transcribe(asr_filepaths, pnc='no', task='asr', source_lang='en', target_lang='en', batch_size=32)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "b701e014-2f71-487c-9300-a3ea89a43a45",
- "metadata": {
- "id": "b701e014-2f71-487c-9300-a3ea89a43a45"
- },
- "outputs": [],
- "source": [
- "ast_preds = model.transcribe(ast_filepaths, pnc='no', task='ast', source_lang='en', target_lang='de', batch_size=32)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "087054e5-c511-4094-a115-faf4a3b49d51",
- "metadata": {
- "id": "087054e5-c511-4094-a115-faf4a3b49d51"
- },
- "outputs": [],
- "source": [
- "from nemo.collections.asr.metrics.wer import word_error_rate\n",
- "from torchmetrics.text import SacreBLEUScore"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "ef938f8f-b2db-45f6-9b30-4b3bbce2423f",
- "metadata": {
- "id": "ef938f8f-b2db-45f6-9b30-4b3bbce2423f"
- },
- "outputs": [],
- "source": [
- "wer = word_error_rate(asr_preds, asr_gt)\n",
- "print(\"WER\", wer)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "5a7c2820-d394-4627-8438-0d810d89b72d",
- "metadata": {
- "id": "5a7c2820-d394-4627-8438-0d810d89b72d"
- },
- "outputs": [],
- "source": [
- "sacrebleu = SacreBLEUScore(n_gram=4)\n",
- "scores = []\n",
- "preds = []\n",
- "gts = []\n",
- "for pred, gt in zip(ast_preds, ast_gt):\n",
- " preds.append(pred)\n",
- " gts.append([gt])\n",
- "\n",
- "# bleu = sum(scores) / len(scores)\n",
- "sacrebleu.update(preds, gts)\n",
- "bleu = sacrebleu.compute()\n",
- "print(\"BLEU\", bleu.item() * 100)"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "521df0e6-1d3c-4709-a080-63638315c514",
- "metadata": {
- "id": "521df0e6-1d3c-4709-a080-63638315c514"
- },
- "source": [
- "# Conclusion\n",
- "\n",
- "In this tutorial we added adapters to a Multi Task model (Nvidia Canary) and show how to create a custom dataset to finetune a canary model to a new dataset with previous tasks such as ASR and AST. The primary goal of this tutorial was to show how to flexibly adapt a Canary model to any of the pre-existing tasks.\n",
- "\n",
- "In a future tutorial, we will show how to add additional tasks to a pre-trained Canary, so that you can leverage the pre-trained encoder and decoder for your own custom tasks!"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.10.14"
- },
- "colab": {
- "provenance": [],
- "gpuType": "T4"
- },
- "accelerator": "GPU"
- },
- "nbformat": 4,
- "nbformat_minor": 5
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b0373c4a-e565-4e8f-a87f-aae932d3aeed",
+ "metadata": {
+ "id": "b0373c4a-e565-4e8f-a87f-aae932d3aeed"
+ },
+ "outputs": [],
+ "source": [
+ "\"\"\"\n",
+ "You can run either this notebook locally (if you have all the dependencies and a GPU) or on Google Colab.\n",
+ "\n",
+ "Instructions for setting up Colab are as follows:\n",
+ "1. Open a new Python 3 notebook.\n",
+ "2. Import this notebook from GitHub (File -> Upload Notebook -> \"GitHub\" tab -> copy/paste GitHub URL)\n",
+ "3. Connect to an instance with a GPU (Runtime -> Change runtime type -> select \"GPU\" for hardware accelerator)\n",
+ "4. Run this cell to set up dependencies.\n",
+ "5. Restart the runtime (Runtime -> Restart Runtime) for any upgraded packages to take effect\n",
+ "\n",
+ "\n",
+ "NOTE: User is responsible for checking the content of datasets and the applicable licenses and determining if suitable for the intended use.\n",
+ "\"\"\"\n",
+ "# If you're using Google Colab and not running locally, run this cell.\n",
+ "import os\n",
+ "\n",
+ "# Install dependencies\n",
+ "!pip install wget\n",
+ "!apt-get install sox libsndfile1 ffmpeg\n",
+ "!pip install text-unidecode\n",
+ "!pip install matplotlib>=3.3.2\n",
+ "\n",
+ "## Install NeMo\n",
+ "BRANCH = 'main'\n",
+ "!python -m pip install \"nemo_toolkit[asr] @ git+https://github.com/NVIDIA/NeMo.git@$BRANCH\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6c021f07-0576-491d-b73c-6c65c8501351",
+ "metadata": {
+ "id": "6c021f07-0576-491d-b73c-6c65c8501351"
+ },
+ "source": [
+ "# Multi Task Adaptation with Adapters\n",
+ "\n",
+ "\n",
+ "In earlier tutorials, we utilized a specific model for one task - for example, an ASR model (CTC, RNN-T etc) for the singular task of Speech Recognition. This is very useful if we want to specialize one task per model, but it can be expensive to deploy a fleet of models for each task, and learn routers to pass user tasks to correct models.\n",
+ "\n",
+ "We now support Multi Task models in NeMo, such that a single model can perform multiple tasks such as speech recognition, speech translation, voice activity detection, and more in the future. With one model supporting multiple tasks, we can simplify the task of deploying models and also hope to leverage individual tasks to improve each other (for example: you do need strong speech recognition first before you start doing translation).\n",
+ "\n",
+ "---\n",
+ "\n",
+ "Multi Task (Canary) models are highly capable large neural networks capable of things like speech recognition, X to English and English to X translation and able to select whether to transcribe speech with punctuation and capitalization. These huge models are trained on several thousand hours of speech and text data, making it challenging to adapt to new datasets.\n",
+ "\n",
+ "In the previous tutorial for [ASR Adapters](https://github.com/NVIDIA/NeMo/blob/main/tutorials/asr/asr_adapters/ASR_with_Adapters.ipynb), we used small adapter modules to tune a large ASR model on a small amount of data. In this tutorial, we will adapt a [Nvidia Canary](https://huggingface.co/nvidia/canary-1b) model onto a small amount of speech data for both Automatic Speech Recognition (ASR) and Automatic Speech Translation (AST).\n",
+ "\n",
+ "In this tutorial, we will also demonstrate a simple way of creating custom Data Modules from PyTorch Lightning to design custom datasets and data loaders for the highly flexible Multi Task Models in NeMo ASR. This offers users more flexibility in designing new tasks, and finetuning the models on small amounts of data."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "cbe2f8eb-204f-4d90-bb0a-a49d994f1ed7",
+ "metadata": {
+ "id": "cbe2f8eb-204f-4d90-bb0a-a49d994f1ed7"
+ },
+ "source": [
+ "----\n",
+ "\n",
+ "First, lets instantiate the [Canary](https://huggingface.co/nvidia/canary-1b) model"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "46c3e5c1-b4f2-4f84-89d6-c77bbe7ebe4f",
+ "metadata": {
+ "id": "46c3e5c1-b4f2-4f84-89d6-c77bbe7ebe4f"
+ },
+ "outputs": [],
+ "source": [
+ "import os\n",
+ "import json\n",
+ "\n",
+ "import nemo.collections.asr as nemo_asr"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "48b9677b-b1d9-4361-becf-ee84fe8d53ca",
+ "metadata": {
+ "id": "48b9677b-b1d9-4361-becf-ee84fe8d53ca"
+ },
+ "outputs": [],
+ "source": [
+ "model = nemo_asr.models.ASRModel.from_pretrained(\"nvidia/canary-1b\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6c0c87c9-5290-4634-9338-818f181c936a",
+ "metadata": {
+ "id": "6c0c87c9-5290-4634-9338-818f181c936a"
+ },
+ "source": [
+ "# Enable Adapter Support in Model\n",
+ "\n",
+ "New in NeMo 2.0, we now have a simple utility function to convert the model into one that supports adapters, called `replace_adapter_compatible_modules()`.\n",
+ "\n",
+ "This will go through the full model and check modules if they support adapters, and then enable that ability. Once used, you can freely use adapter methods."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "bfd72316-630b-43c3-9a02-65bb2dabe624",
+ "metadata": {
+ "id": "bfd72316-630b-43c3-9a02-65bb2dabe624",
+ "scrolled": true
+ },
+ "outputs": [],
+ "source": [
+ "model.replace_adapter_compatible_modules()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "30505bd5-323f-4e90-a941-d0de3f6e55e3",
+ "metadata": {
+ "id": "30505bd5-323f-4e90-a941-d0de3f6e55e3"
+ },
+ "source": [
+ "## Check Which Targets Are Supported For This Model\n",
+ "\n",
+ "Now that the model has enabled adapter support, lets take a look at which of its modules support adapter modules to be attached to them.\n",
+ "\n",
+ "**Note**\n",
+ "Below, you might see an adapter module with no name `''` - this corresponds to the \"default\" model target if the target isn't specified. Users can chose to simply skip the module name when adding an adapter, and the model will by default add adapters to the encoder module."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "13bcf42e-d33a-4364-8d0f-ab59a26ffa7c",
+ "metadata": {
+ "id": "13bcf42e-d33a-4364-8d0f-ab59a26ffa7c"
+ },
+ "outputs": [],
+ "source": [
+ "model.adapter_module_names"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "67324f6a-ffff-47a7-9ee5-dc93819f6ffd",
+ "metadata": {
+ "id": "67324f6a-ffff-47a7-9ee5-dc93819f6ffd"
+ },
+ "source": [
+ "## Prepare the Adapter\n",
+ "\n",
+ "Now that we know which modules are supported, lets create a simple adapter module for the encoder and decoder modules."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "65ec3b2b-3f84-43ed-8a90-085aee383ea6",
+ "metadata": {
+ "id": "65ec3b2b-3f84-43ed-8a90-085aee383ea6"
+ },
+ "outputs": [],
+ "source": [
+ "from nemo.collections.common.parts import LinearAdapterConfig"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "47aab832-bfec-4cca-b4ee-868ea1af9869",
+ "metadata": {
+ "id": "47aab832-bfec-4cca-b4ee-868ea1af9869"
+ },
+ "outputs": [],
+ "source": [
+ "input_dim = model.cfg.encoder.d_model\n",
+ "adapter_dim = 8"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "cd519281-ad45-4719-9ad6-561e6192717f",
+ "metadata": {
+ "id": "cd519281-ad45-4719-9ad6-561e6192717f"
+ },
+ "outputs": [],
+ "source": [
+ "enc_adapter_cfg = LinearAdapterConfig(in_features=input_dim, dim=adapter_dim)\n",
+ "dec_adapter_cfg = LinearAdapterConfig(in_features=input_dim, dim=adapter_dim)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "f147fc89-ab93-4454-ad6b-909288a452a2",
+ "metadata": {
+ "id": "f147fc89-ab93-4454-ad6b-909288a452a2"
+ },
+ "source": [
+ "## Add Adapter Modules\n",
+ "\n",
+ "Now that we have the adapter configs prepared, lets add them to the model !\n",
+ "\n",
+ "We provide the target module by using `target:adapter_name` when calling `add_adapter()` - this tells the model to setup an adapter called `adapter_name` to the module denoted by `target` with the config `cfg`."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "a23256ce-bc09-4fb0-8c3b-214519b8774b",
+ "metadata": {
+ "id": "a23256ce-bc09-4fb0-8c3b-214519b8774b"
+ },
+ "outputs": [],
+ "source": [
+ "model.add_adapter(name=\"encoder:enc\", cfg=enc_adapter_cfg)\n",
+ "model.add_adapter(name=\"transf_decoder:dec\", cfg=dec_adapter_cfg)\n",
+ "\n",
+ "print(\"Added adapters!\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "2dbe9b7b-9a3d-4504-a652-1d90701cbbf8",
+ "metadata": {
+ "id": "2dbe9b7b-9a3d-4504-a652-1d90701cbbf8"
+ },
+ "source": [
+ "## Freeze Original Module Parameters and Unfreeze Adapter Weights Only\n",
+ "\n",
+ "When tuning adapters, we usually freeze the entire base model and only tune the adapters. This prevents the need for large amounts of data, preserves a lot of memory (since the full model doesnt need backward pass, only the adapters) and makes it easier to adapt huge models."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "2f8162dd-0373-4e65-aa8a-f458a1633578",
+ "metadata": {
+ "id": "2f8162dd-0373-4e65-aa8a-f458a1633578",
+ "scrolled": true
+ },
+ "outputs": [],
+ "source": [
+ "model.freeze()\n",
+ "model.unfreeze_enabled_adapters()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "0b3795a4-fcfe-49ee-a76f-1cb77d99ace1",
+ "metadata": {
+ "id": "0b3795a4-fcfe-49ee-a76f-1cb77d99ace1"
+ },
+ "source": [
+ "----\n",
+ "\n",
+ "Lets make sure that the number of trainable parameters is a lot smaller (< 1 M) than the total number of params (1 B)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "58453f40-d72d-4f9b-a427-3fb63787f3d6",
+ "metadata": {
+ "id": "58453f40-d72d-4f9b-a427-3fb63787f3d6"
+ },
+ "outputs": [],
+ "source": [
+ "model.summarize()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "aa713f4a-ec16-4e2a-aeb3-ac7c4090f20f",
+ "metadata": {
+ "id": "aa713f4a-ec16-4e2a-aeb3-ac7c4090f20f"
+ },
+ "source": [
+ "## Check Enabled Adapters\n",
+ "\n",
+ "Here, we check that the adapters that we named above (`enc` and `dec`) are both setup and enabled."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "d69f09d9-411e-420e-8f17-c86391e88fc3",
+ "metadata": {
+ "id": "d69f09d9-411e-420e-8f17-c86391e88fc3"
+ },
+ "outputs": [],
+ "source": [
+ "model.get_enabled_adapters()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "f_XpTJx9hQXy",
+ "metadata": {
+ "id": "f_XpTJx9hQXy"
+ },
+ "source": [
+ "# Customizing Multi Task Models\n",
+ "\n",
+ "In the following section, we will take a deeper look into what are the components that compose a Multi Task Model and how users can override each of these parts to create their own customizable multi task models.\n",
+ "\n",
+ "---\n",
+ "\n",
+ "In this tutorial, we will only see the internal components such as the prompt format and dataset construction, but not change them.\n",
+ "\n",
+ "In a following tutorial, we will show how to add an additional task to a pre-trained Multi Task Model using a pre-trained model as a starting point."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6f0beb8c-7b12-4169-a3f7-1639bdaf6160",
+ "metadata": {
+ "id": "6f0beb8c-7b12-4169-a3f7-1639bdaf6160"
+ },
+ "source": [
+ "# Prompt Handling for Multi Task Models\n",
+ "Nvidia Canary is our first model that is a Multi Task Model.\n",
+ "\n",
+ "Multi Task models utilize a prompt format, similar to those used in Large Language Models, in order to denote to the model which task is to be performed, which langauge is being spoken and what language should the output transcript be in, whether to provide punctuation and capitalization or not, and so much more in the future !\n",
+ "\n",
+ "Lets take a look at the model's `prompt` for the Canary model that we have created -"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "56a78cd0-afaf-4272-898f-d9e13ba871d3",
+ "metadata": {
+ "id": "56a78cd0-afaf-4272-898f-d9e13ba871d3"
+ },
+ "outputs": [],
+ "source": [
+ "model.prompt_format"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9cbaf28a-1f10-4da3-a3ed-53b2239baa49",
+ "metadata": {
+ "id": "9cbaf28a-1f10-4da3-a3ed-53b2239baa49"
+ },
+ "source": [
+ "----\n",
+ "\n",
+ "This gives us the prompt format functions name, which we will see below points to a prompt format function that reads in manifest items and maps it to the template."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "087d1f60-3679-4593-840f-8d0fbd8a0e3e",
+ "metadata": {
+ "id": "087d1f60-3679-4593-840f-8d0fbd8a0e3e"
+ },
+ "source": [
+ "## Reuse / Register a Prompt Format Function\n",
+ "\n",
+ "When we print `model.prompt_format` it writes `canary` which is one of the registered prompt templates available in NeMo ASR.\n",
+ "For simplicity's sake, we will continue to use the same prompt format for this tutorial. However, we enable users to define their own prompt formats and register them as needed.\n",
+ "\n",
+ "Let's see what the `canary` prompt format looks like:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "c202abaf-63ca-4475-a2bb-3b487be8e375",
+ "metadata": {
+ "id": "c202abaf-63ca-4475-a2bb-3b487be8e375"
+ },
+ "outputs": [],
+ "source": [
+ "from nemo.collections.common.prompts.fn import get_prompt_format_fn, registered_prompt_format_fn"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "07c56dc3-fe42-49fc-936c-770ec17a29ac",
+ "metadata": {
+ "id": "07c56dc3-fe42-49fc-936c-770ec17a29ac",
+ "scrolled": true
+ },
+ "outputs": [],
+ "source": [
+ "canary_prompt_format_fn = get_prompt_format_fn(\"canary\")\n",
+ "canary_prompt_format_fn?"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1170b57c-f4c7-432f-91bb-1dbf73063d60",
+ "metadata": {
+ "id": "1170b57c-f4c7-432f-91bb-1dbf73063d60"
+ },
+ "source": [
+ "### Registering a New Prompt Format Function"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "d11a8a05-6ba7-41f3-97ab-43453a59c860",
+ "metadata": {
+ "id": "d11a8a05-6ba7-41f3-97ab-43453a59c860"
+ },
+ "source": [
+ "Just to show that this is user-configurable, we show how to register a dummy prompt format below:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f77378ff-d5de-4b86-bfaf-e62b51c7f9ce",
+ "metadata": {
+ "id": "f77378ff-d5de-4b86-bfaf-e62b51c7f9ce"
+ },
+ "outputs": [],
+ "source": [
+ "@registered_prompt_format_fn\n",
+ "def canary2(cuts, tokenizer, inference: bool):\n",
+ " \"\"\" Users can implement this as needed \"\"\"\n",
+ " raise NotImplementedError()\n",
+ "\n",
+ "print(\"Registered prompt\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "cb02f068-8fee-46e1-8096-910062668173",
+ "metadata": {
+ "id": "cb02f068-8fee-46e1-8096-910062668173"
+ },
+ "outputs": [],
+ "source": [
+ "temp = get_prompt_format_fn('canary2')\n",
+ "temp.__name__"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "f14aa85b-71cb-4813-837b-b28a384685dc",
+ "metadata": {
+ "id": "f14aa85b-71cb-4813-837b-b28a384685dc"
+ },
+ "source": [
+ "## Create / Reuse a Prompt Format\n",
+ "\n",
+ "Canary Multi Task Model comes with a pre-defined prompt template, so we need to provide it data in a format that can be handled by that prompt format class.\n",
+ "\n",
+ "A `PromptFormatter` is a special class that defines the dialog template of the order of turns that occur in a model's prompt. For example, in Language Models, we normally may begin with either a `System` or `User` turn, followed by an `Assistant` turn which produces an output from the model. Similarly in Multi Task models, we enable support for such a usage pattern.\n",
+ "\n",
+ "Do note: Current generation of Canary models are not trained to operate on multi turn conversations, however future variants of Multi Task models may support such usage."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "35530cad-84d7-422b-82c5-1bda5c1a4497",
+ "metadata": {
+ "id": "35530cad-84d7-422b-82c5-1bda5c1a4497",
+ "scrolled": true
+ },
+ "outputs": [],
+ "source": [
+ "# Let's review the actual prompt formatter clas docs\n",
+ "model.prompt?"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "0cd0c0d1-da8a-4de6-9efc-86a7dd3ed660",
+ "metadata": {
+ "id": "0cd0c0d1-da8a-4de6-9efc-86a7dd3ed660"
+ },
+ "outputs": [],
+ "source": [
+ "# Let's see the actual template of this prompt formatter\n",
+ "model.prompt.TEMPLATE"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "72956a2f-f051-42d2-9e08-47e954d88e5c",
+ "metadata": {
+ "id": "72956a2f-f051-42d2-9e08-47e954d88e5c"
+ },
+ "source": [
+ "---\n",
+ "\n",
+ "We see that the template contains two turns - `user` and `assistant`.\n",
+ "\n",
+ "User template looks as follows: `<|startoftranscript|>|source_lang||task||target_lang||pnc|`\n",
+ "During execution, we remove the `|` in order to fill in the actual value of the slots provided by the the data loader.\n",
+ "\n",
+ "User holds the following allowed slots -\n",
+ "* `source_lang`\n",
+ "* `target_lang`\n",
+ "* `task`\n",
+ "* `pnc`\n",
+ "\n",
+ "Similarly, for Assistant template : `|text|<|endoftext|>`\n",
+ "\n",
+ "Assistant holds the following allowed slots -\n",
+ "* `text`"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "540c04af-34d1-4b46-b935-40b16f54ca03",
+ "metadata": {
+ "id": "540c04af-34d1-4b46-b935-40b16f54ca03"
+ },
+ "source": [
+ "### Creating and Using a Custom Prompt Formatter\n",
+ "\n",
+ "While we provide a pre-trained model with a pre-defined prompt format, we also enable users to create their own PromptFormatter subclass and change it as needed.\n",
+ "\n",
+ "Below, we show a simple modification to the model's PromptFormatter and show how to change it."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "0adb576c-df58-4b66-b8fa-8e653da6fead",
+ "metadata": {
+ "id": "0adb576c-df58-4b66-b8fa-8e653da6fead"
+ },
+ "outputs": [],
+ "source": [
+ "# Create a new prompt formatter using the original CanaryPromptFormatter class as baseclass\n",
+ "class CanaryPromptFormatterV2(model.prompt.__class__):\n",
+ "\n",
+ " # make sure to provide a new name\n",
+ " NAME: str = \"canary2\"\n",
+ "\n",
+ " # Make any changes as necessary.\n",
+ " # For this demonstration, we will not change anything other than the name"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f7d85683-ddd0-40c5-956d-e14d09243424",
+ "metadata": {
+ "id": "f7d85683-ddd0-40c5-956d-e14d09243424"
+ },
+ "outputs": [],
+ "source": [
+ "# Next, lets update the model's prompt formatter\n",
+ "model.change_prompt(\"canary2\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6581f934-a55b-41df-864a-351d1fb0029e",
+ "metadata": {
+ "id": "6581f934-a55b-41df-864a-351d1fb0029e"
+ },
+ "source": [
+ "---\n",
+ "\n",
+ "We have now successfully changed the prompt format to `canary2`.\n",
+ "\n",
+ "**Note**: It is important to know that when changing the prompt format, the name of the new prompt format class (`canary2` in this case) **has to match** the name of the prompt function registered with `@registered_prompt_format_fn`!"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "c1d84948-8f73-4c31-923f-eaf01d877835",
+ "metadata": {
+ "id": "c1d84948-8f73-4c31-923f-eaf01d877835",
+ "scrolled": true
+ },
+ "outputs": [],
+ "source": [
+ "# Check if everything is ok -\n",
+ "model.prompt.__class__.__name__"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f617cda0-d16b-400a-b495-dac213d318e1",
+ "metadata": {
+ "id": "f617cda0-d16b-400a-b495-dac213d318e1"
+ },
+ "outputs": [],
+ "source": [
+ "model.prompt_format"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "cb964964-e978-43e9-befa-9bb0904db82f",
+ "metadata": {
+ "id": "cb964964-e978-43e9-befa-9bb0904db82f"
+ },
+ "source": [
+ "---\n",
+ "For the rest of the tutorial, we will revert back to the original prompt formatter"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "526093a8-86ba-48f0-a60b-55642720fc4e",
+ "metadata": {
+ "id": "526093a8-86ba-48f0-a60b-55642720fc4e"
+ },
+ "outputs": [],
+ "source": [
+ "model.change_prompt('canary')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9c4d2986-89b4-4589-ab0e-69683084cfd4",
+ "metadata": {
+ "id": "9c4d2986-89b4-4589-ab0e-69683084cfd4"
+ },
+ "source": [
+ "## Creating / Using a Multi Task Dataset\n",
+ "\n",
+ "Now that we have learned how to modify the model's prompt formatter and the underlying format function that maps manifest items into slots to inject into the prompt template, next let's take a look at how to use and create custom datasets for training multi task models.\n",
+ "\n",
+ "---\n",
+ "\n",
+ "Unlike previous tutorials that showcase how to use pre-defined datasets and point them to your manifest files, we will take a slightly more hands-on approach for multi task modes. This is due to shear flexibility of multi task models - they can do almost any task that you can formulate into a \"speech in - text out\" problem.\n",
+ "\n",
+ "So it is not easy to have a pre-defined dataset class that can handle all new ideas and tasks that researchers can come up with.\n",
+ "\n",
+ "Instead, we showcase how to build a custom dataset for yourself and use it with the Multi Task model instead."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b35ca0c2-8ceb-423f-b9ef-7dd6ec5a6952",
+ "metadata": {
+ "id": "b35ca0c2-8ceb-423f-b9ef-7dd6ec5a6952"
+ },
+ "source": [
+ "---\n",
+ "\n",
+ "However, we also provide a base class that can be used as is by users if they dont want the hassle of writing their own datasets.\n",
+ "\n",
+ "This is handled by the `PromptedAudioToTextLhotseDataset` - it maps user defined manifest items to the items defined in the prompt template of the model, so as long as the manifest corresponds to the slots supported by the model, it will be managed by the Dataset automatically."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "3d35d513-8538-4bcb-b892-898f16ad3f0f",
+ "metadata": {
+ "id": "3d35d513-8538-4bcb-b892-898f16ad3f0f",
+ "scrolled": true
+ },
+ "outputs": [],
+ "source": [
+ "from nemo.collections.asr.data.audio_to_text_lhotse_prompted import PromptedAudioToTextLhotseDataset\n",
+ "\n",
+ "# Uncomment below line to see the class definition of PromptedAudioToTextLhotseDataset\n",
+ "# PromptedAudioToTextLhotseDataset??"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "51e3a150-40b9-4599-8c6e-0f01698989b4",
+ "metadata": {
+ "id": "51e3a150-40b9-4599-8c6e-0f01698989b4"
+ },
+ "source": [
+ "### Creating a New Prompted Dataset"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "56208452-ea18-44c8-8c71-0daef431dc31",
+ "metadata": {
+ "id": "56208452-ea18-44c8-8c71-0daef431dc31"
+ },
+ "outputs": [],
+ "source": [
+ "import torch.utils.data\n",
+ "from lhotse import CutSet\n",
+ "from lhotse.cut import MixedCut, MonoCut\n",
+ "from lhotse.dataset import AudioSamples\n",
+ "from lhotse.dataset.collation import collate_vectors\n",
+ "\n",
+ "from nemo.collections.asr.data.audio_to_text_lhotse_prompted import PromptedAudioToTextLhotseDataset, PromptedAudioToTextMiniBatch\n",
+ "\n",
+ "class MyCanaryPromptedAudioToTextLhotseDataset(torch.utils.data.Dataset):\n",
+ " \"\"\"\n",
+ " This dataset is based on :class:`~nemo.collections.asr.data.audio_to_text_lhotse.LhotseSpeechToTextBpeDataset`.\n",
+ " It is a Lhotse-style dataset that converts a mini-batch of Cuts into tensors.\n",
+ " The main difference from ``LhotseSpeechToTextBpeDataset`` is that we introduce\n",
+ " a special prompt format for multitask encoder-decoder models.\n",
+ "\n",
+ " To perform the prompt formatting, we accept a ``prompt_format_fn``.\n",
+ " It's expected to accept:\n",
+ " * a ``CutSet`` which it will internally iterate over for utterances, and\n",
+ " * a ``tokenizer`` object that will be internally used to tokenize the utterances\n",
+ "\n",
+ " Tokenized utterances will be extended with special prompt tokens according to ``prompt_format_fn`` logic.\n",
+ " We support cuts with multiple supervision segments -- their tokenized texts will be concatenated before we add the prompt tokens.\n",
+ " This is useful, for example, in code-switched scenarios where each segment is spoken in a different language.\n",
+ " \"\"\"\n",
+ "\n",
+ " def __init__(\n",
+ " self,\n",
+ " tokenizer: 'TokenizerSpec',\n",
+ " ):\n",
+ " super().__init__()\n",
+ " self.tokenizer = tokenizer\n",
+ " self.load_audio = AudioSamples(fault_tolerant=True)\n",
+ " self.padding_value = self.tokenizer.pad_id\n",
+ " self.prompt_format_fn = get_prompt_format_fn('canary') # Use the default canary prompt function\n",
+ "\n",
+ "\n",
+ " def __getitem__(self, cuts: CutSet) -> PromptedAudioToTextMiniBatch:\n",
+ " audio, audio_lens, cuts = self.load_audio(cuts)\n",
+ "\n",
+ " prompts_with_answers, prompts, answers = self.prompt_format_fn(cuts, self.tokenizer)\n",
+ " \n",
+ " transcript, transcript_lens = self._collate_tokens(answers)\n",
+ " prompts_with_answers, prompts_with_answers_lens = self._collate_tokens(prompts_with_answers)\n",
+ " prompts, prompt_lens = self._collate_tokens(prompts)\n",
+ "\n",
+ " return PromptedAudioToTextMiniBatch(\n",
+ " audio=audio,\n",
+ " audio_lens=audio_lens,\n",
+ " transcript=transcript,\n",
+ " transcript_lens=transcript_lens,\n",
+ " prompt=prompts,\n",
+ " prompt_lens=prompt_lens,\n",
+ " prompted_transcript=prompts_with_answers,\n",
+ " prompted_transcript_lens=prompts_with_answers_lens,\n",
+ " cuts=cuts.drop_in_memory_data(),\n",
+ " )\n",
+ "\n",
+ " def _collate_tokens(self, tokens: list[list[int] | torch.Tensor]) -> tuple[torch.Tensor, torch.Tensor]:\n",
+ " tokens = [torch.as_tensor(t) for t in tokens]\n",
+ " token_lens = torch.tensor([t.size(0) for t in tokens], dtype=torch.long)\n",
+ " tokens = collate_vectors(tokens, padding_value=self.padding_value)\n",
+ " return tokens, token_lens\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "5cb71ba1-ce2e-49c7-8126-be7e7851c812",
+ "metadata": {
+ "id": "5cb71ba1-ce2e-49c7-8126-be7e7851c812"
+ },
+ "source": [
+ "---\n",
+ "\n",
+ "The above class is mostly a demonstration, but it showcases how users might flexibly change the prompt formatter, prompt format function and even the data set that handles these two in a flexible way.\n",
+ "\n",
+ "The order of operations is usually this -\n",
+ "\n",
+ "1) Create a new Prompt Formatter class - this denotes the slots that each turn can have (including new task inputs or other values). This class is auto registered.\n",
+ "2) Create a new Prompt Format function - Using `@registered_prompt_format_fn` decorator, write a custom function that accepts args and processes the provided input data from a manifest.\n",
+ "3) Create a new Dataset class (usually based on the `PromptedAudioToTextLhotseDataset` dataset) that uses the Prompt Format function to convert manifest items into nicely formatted samples that can be passed to the Prompt Formatter."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "a7bf8078-663e-43cb-b045-0c8b6ef08e30",
+ "metadata": {
+ "id": "a7bf8078-663e-43cb-b045-0c8b6ef08e30"
+ },
+ "source": [
+ "# Preparing a Canary Dataset\n",
+ "\n",
+ "Now that we have all the pieces together on the model side, let's take a look on the data side."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "83c9eabc-0473-463e-be1f-ab6d5f519a79",
+ "metadata": {
+ "id": "83c9eabc-0473-463e-be1f-ab6d5f519a79"
+ },
+ "source": [
+ "## Required Roles Defined by Prompt Format\n",
+ "\n",
+ "These are the available 'roles' available in the prompt format - they denote at each turn, one role can be enabled and its input or output can be calculated."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "11ff9641-53fd-4481-b414-0edc12bf4dc3",
+ "metadata": {
+ "id": "11ff9641-53fd-4481-b414-0edc12bf4dc3"
+ },
+ "outputs": [],
+ "source": [
+ "model.prompt.get_roles()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "203a67e2-74fd-440c-9658-451f41239f36",
+ "metadata": {
+ "id": "203a67e2-74fd-440c-9658-451f41239f36"
+ },
+ "outputs": [],
+ "source": [
+ "for role in model.prompt.get_roles():\n",
+ " print(role, model.prompt.get_slots(role))\n",
+ " print()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "8e887f9d-94e7-4843-9da8-f914e24651f3",
+ "metadata": {
+ "id": "8e887f9d-94e7-4843-9da8-f914e24651f3"
+ },
+ "source": [
+ "## Create a Data Module\n",
+ "\n",
+ "Data Modules are one way of organizing datasets in PyTorch Lightning. It provides a unified place where data loading and processing can be potentially handled.\n",
+ "\n",
+ "**Note**: This isn't strictly necessary - you can achieve the same using just Pytorch dataloaders directly and passing it to Trainer.fit() but we showcase a data module codebase that can be extended by the user."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "51d58931-4166-4ab9-a755-4c5268001192",
+ "metadata": {
+ "id": "51d58931-4166-4ab9-a755-4c5268001192"
+ },
+ "source": [
+ "----\n",
+ "\n",
+ "In our CanaryAN4DataModule - we will perform two tasks. One is En ASR - transcribing the AN4 English dataset. Another is En to De AST - directly translating the english audio to German text.\n",
+ "\n",
+ "For simplicity's sake, we will use a small off-the-shelf model to perform the translation of English Transcripts to German."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "91ed74ca-5d5e-412d-a813-0659014aa9a3",
+ "metadata": {
+ "id": "91ed74ca-5d5e-412d-a813-0659014aa9a3"
+ },
+ "source": [
+ "---\n",
+ "\n",
+ "In NeMo 2.0, we utilize [Lhotse](https://github.com/lhotse-speech/lhotse) as our data backbone for speech tasks, which simplifies using custom speech datasets.\n",
+ "\n",
+ "Most of the magic is handled by the following code\n",
+ "\n",
+ "```python\n",
+ "from nemo.collections.common.data.lhotse import get_lhotse_dataloader_from_config\n",
+ "\n",
+ "get_lhotse_dataloader_from_config(\n",
+ " OmegaConf.create(config), # Pass in a config that points to the manifest files and other arguments\n",
+ " global_rank=self.trainer.global_rank,\n",
+ " world_size=self.trainer.world_size,\n",
+ " # Pass in the dataset class for Lhotse to handle. This class now receives CutSet as input.\n",
+ " dataset=MyCanaryPromptedAudioToTextLhotseDataset(tokenizer=self.tokenizer),\n",
+ ")\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "4a15ab9b-7603-4ac5-890c-92a541a0527c",
+ "metadata": {
+ "id": "4a15ab9b-7603-4ac5-890c-92a541a0527c"
+ },
+ "outputs": [],
+ "source": [
+ "import os\n",
+ "import glob\n",
+ "import json\n",
+ "import copy\n",
+ "import subprocess\n",
+ "import tarfile\n",
+ "import wget\n",
+ "import librosa\n",
+ "import tqdm\n",
+ "from omegaconf import OmegaConf\n",
+ "\n",
+ "from torch.utils.data import DataLoader, Dataset\n",
+ "\n",
+ "import lightning.pytorch as L\n",
+ "\n",
+ "from transformers import T5Tokenizer, T5ForConditionalGeneration\n",
+ "\n",
+ "from nemo.collections.asr.parts.utils.manifest_utils import read_manifest, write_manifest\n",
+ "from nemo.collections.common.data.lhotse import get_lhotse_dataloader_from_config\n",
+ "\n",
+ "\n",
+ "# Function to build a manifest\n",
+ "def build_manifest(transcripts_path, manifest_path, wav_path, data_dir):\n",
+ " with open(transcripts_path, 'r') as fin:\n",
+ " with open(manifest_path, 'w') as fout:\n",
+ " for line in fin:\n",
+ " # Lines look like this:\n",
+ " # transcript (fileID)\n",
+ " transcript = line[: line.find('(')-1].lower()\n",
+ " transcript = transcript.replace('', '').replace('', '')\n",
+ " transcript = transcript.strip()\n",
+ "\n",
+ " file_id = line[line.find('(')+1 : -2] # e.g. \"cen4-fash-b\"\n",
+ " audio_path = os.path.join(\n",
+ " data_dir, wav_path,\n",
+ " file_id[file_id.find('-')+1 : file_id.rfind('-')],\n",
+ " file_id + '.wav')\n",
+ "\n",
+ " duration = librosa.core.get_duration(path=audio_path)\n",
+ "\n",
+ " # Write the metadata to the manifest\n",
+ " metadata = {\n",
+ " \"audio_filepath\": audio_path,\n",
+ " \"duration\": duration,\n",
+ " \"text\": transcript,\n",
+ " \"pnc\": \"no\",\n",
+ " \"source_lang\": \"en\",\n",
+ " \"target_lang\": \"en\",\n",
+ " \"task\": \"asr\",\n",
+ " }\n",
+ " json.dump(metadata, fout)\n",
+ " fout.write('\\n')\n",
+ "\n",
+ " return manifest_path\n",
+ "\n",
+ "\n",
+ "class CanaryAN4DataModule(L.LightningDataModule):\n",
+ "\n",
+ " def __init__(self, tokenizer, data_dir: str = \"./an4/\", batch_size=8):\n",
+ " super().__init__()\n",
+ " self.tokenizer = tokenizer\n",
+ " self.data_dir = data_dir\n",
+ " self.batch_size = batch_size\n",
+ "\n",
+ " # ASR manifests\n",
+ " self.train_manifest = data_dir + '/an4/train_manifest.json'\n",
+ " self.test_manifest = data_dir + '/an4/test_manifest.json'\n",
+ "\n",
+ " # AST manifests\n",
+ " self.ast_train_manifest = data_dir + '/an4/ast_train_manifest.json'\n",
+ " self.ast_test_manifest = data_dir + '/an4/ast_test_manifest.json'\n",
+ "\n",
+ " # Combined manifests\n",
+ " self.combined_train_manifest = data_dir + '/an4/combined_train_manifest.json'\n",
+ " self.combined_test_manifest = data_dir + '/an4/combined_test_manifest.json'\n",
+ "\n",
+ " def setup(self, stage):\n",
+ " # make assignments here (val/train/test split)\n",
+ " # called on every process in DDP\n",
+ " # Assign train/val datasets for use in dataloaders\n",
+ " pass\n",
+ "\n",
+ " def train_dataloader(self):\n",
+ " config = {'manifest_filepath': self.combined_train_manifest, 'batch_size': self.batch_size,\n",
+ " 'num_workers': 4, 'shuffle': True, 'min_duration': 0.3, 'max_duration': 10.0}\n",
+ " return self._setup_dataloader(config)\n",
+ "\n",
+ " def val_dataloader(self):\n",
+ " config = {'manifest_filepath': self.combined_test_manifest, 'batch_size': self.batch_size,\n",
+ " 'num_workers': 4, 'shuffle': False, 'min_duration': 0.3, 'max_duration': 10.0}\n",
+ " return self._setup_dataloader(config)\n",
+ "\n",
+ " def test_dataloader(self):\n",
+ " config = {'manifest_filepath': self.combined_test_manifest, 'batch_size': self.batch_size,\n",
+ " 'num_workers': 4, 'shuffle': False, 'min_duration': 0.3, 'max_duration': 10.0}\n",
+ " return self._setup_dataloader(config)\n",
+ "\n",
+ " def teardown(self, stage):\n",
+ " # clean up after fit or test\n",
+ " # called on every process in DDP\n",
+ " pass\n",
+ "\n",
+ " def _setup_dataloader(self, config):\n",
+ " \"\"\"\n",
+ " The main function that creates the data loader using Lhotse's integration with NeMo.\n",
+ " \"\"\"\n",
+ " return get_lhotse_dataloader_from_config(\n",
+ " OmegaConf.create(config),\n",
+ " global_rank=self.trainer.global_rank,\n",
+ " world_size=self.trainer.world_size,\n",
+ " # Note the passing of our custom dataset\n",
+ " dataset=MyCanaryPromptedAudioToTextLhotseDataset(tokenizer=self.tokenizer),\n",
+ " )\n",
+ "\n",
+ " def prepare_data(self):\n",
+ " # download, split, etc...\n",
+ " # only called on 1 GPU/TPU in distributed\n",
+ " if not os.path.exists(self.data_dir):\n",
+ " os.makedirs(self.data_dir)\n",
+ "\n",
+ " data_dir = self.data_dir\n",
+ " if not os.path.exists(data_dir + '/an4_sphere.tar.gz'):\n",
+ " an4_url = 'https://dldata-public.s3.us-east-2.amazonaws.com/an4_sphere.tar.gz'\n",
+ " an4_path = wget.download(an4_url, data_dir)\n",
+ " print(f\"Dataset downloaded at: {an4_path}\")\n",
+ " else:\n",
+ " print(\"Tarfile already exists.\")\n",
+ " an4_path = data_dir + '/an4_sphere.tar.gz'\n",
+ "\n",
+ " if not os.path.exists(data_dir + '/an4/'):\n",
+ " # Untar and convert .sph to .wav (using sox)\n",
+ " tar = tarfile.open(an4_path)\n",
+ " tar.extractall(path=data_dir)\n",
+ "\n",
+ " print(\"Converting .sph to .wav...\")\n",
+ " sph_list = glob.glob(data_dir + '/an4/**/*.sph', recursive=True)\n",
+ " for sph_path in sph_list:\n",
+ " wav_path = sph_path[:-4] + '.wav'\n",
+ " cmd = [\"sox\", sph_path, wav_path]\n",
+ " subprocess.run(cmd)\n",
+ " print(\"Finished conversion.\\n******\")\n",
+ "\n",
+ " # Building Manifests\n",
+ " print(\"******\")\n",
+ " train_transcripts = data_dir + '/an4/etc/an4_train.transcription'\n",
+ " train_manifest = self.train_manifest\n",
+ " if not os.path.isfile(train_manifest):\n",
+ " build_manifest(train_transcripts, train_manifest, 'an4/wav/an4_clstk', data_dir)\n",
+ " print(\"Training manifest created.\")\n",
+ "\n",
+ " test_transcripts = data_dir + '/an4/etc/an4_test.transcription'\n",
+ " test_manifest = self.test_manifest\n",
+ " if not os.path.isfile(test_manifest):\n",
+ " build_manifest(test_transcripts, test_manifest, 'an4/wav/an4test_clstk', data_dir)\n",
+ " print(\"Test manifest created.\")\n",
+ " print(\"*** Wrote manifests for Eng ***\")\n",
+ "\n",
+ " train_manifest_data = read_manifest(self.train_manifest)\n",
+ " test_manifest_data = read_manifest(self.test_manifest)\n",
+ "\n",
+ " if not os.path.isfile(self.ast_train_manifest) or not os.path.isfile(self.ast_test_manifest) or not os.path.isfile(self.combined_train_manifest) or not os.path.isfile(self.combined_test_manifest):\n",
+ " tokenizer = T5Tokenizer.from_pretrained(\"google-t5/t5-small\")\n",
+ " t5_model = T5ForConditionalGeneration.from_pretrained(\"google-t5/t5-small\")\n",
+ "\n",
+ " if torch.cuda.is_available():\n",
+ " t5_model = t5_model.cuda()\n",
+ "\n",
+ " def pipe(text):\n",
+ " if isinstance(text, str):\n",
+ " text = [text]\n",
+ "\n",
+ " prefix = \"translate English to German\"\n",
+ " prompts = [prefix + \": \" + x for x in text]\n",
+ " input_ids = tokenizer(prompts, return_tensors=\"pt\", padding=True, truncation=True).input_ids\n",
+ " input_ids = input_ids.to(t5_model.device)\n",
+ " outputs = t5_model.generate(input_ids, max_new_tokens=64)\n",
+ " return [tokenizer.decode(output, skip_special_tokens=True) for output in outputs]\n",
+ "\n",
+ " ast_train_manifest_data = copy.deepcopy(train_manifest_data)\n",
+ " ast_test_manifest_data = copy.deepcopy(test_manifest_data)\n",
+ "\n",
+ " print(\"Translating train set\")\n",
+ " train_texts = [x['text'] for x in train_manifest_data]\n",
+ " BATCH_SIZE = 32\n",
+ "\n",
+ " for i in tqdm.tqdm(range(0, len(train_texts), BATCH_SIZE), total=len(train_texts) // BATCH_SIZE):\n",
+ " batch_texts = train_texts[i:i+BATCH_SIZE]\n",
+ " batch_texts = pipe(batch_texts)\n",
+ " for j, text in enumerate(batch_texts):\n",
+ " ast_train_manifest_data[i+j]['text'] = text\n",
+ " ast_train_manifest_data[i+j]['task'] = 'ast'\n",
+ " ast_train_manifest_data[i+j]['target_lang'] = 'de'\n",
+ "\n",
+ " print(\"Translating test set\")\n",
+ " for data in tqdm.tqdm(ast_test_manifest_data, total=len(ast_test_manifest_data)):\n",
+ " data['text'] = pipe(data['text'])[0]\n",
+ " data['task'] = 'ast'\n",
+ " data['target_lang'] = 'de'\n",
+ "\n",
+ " write_manifest(self.ast_train_manifest, ast_train_manifest_data)\n",
+ " write_manifest(self.ast_test_manifest, ast_test_manifest_data)\n",
+ "\n",
+ " print(\"*** Wrote ast manifests ***\")\n",
+ "\n",
+ " combined_train, combined_test = [], []\n",
+ " combined_train.extend(train_manifest_data)\n",
+ " combined_train.extend(ast_train_manifest_data)\n",
+ "\n",
+ " combined_test.extend(test_manifest_data)\n",
+ " combined_test.extend(ast_test_manifest_data)\n",
+ "\n",
+ " write_manifest(self.combined_train_manifest, combined_train)\n",
+ " write_manifest(self.combined_test_manifest, combined_test)\n",
+ " print(\"*** Wrote combined manifests ***\")\n",
+ "\n",
+ " else:\n",
+ " print(\"*** Wrote ast and combined manifests ***\")\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "e06e697d-7dc2-489f-a52f-195946bfbf6e",
+ "metadata": {
+ "id": "e06e697d-7dc2-489f-a52f-195946bfbf6e"
+ },
+ "source": [
+ "---\n",
+ "\n",
+ "Each item in the prepared manifest has the following items by default.\n",
+ "\n",
+ "As you will recognize, these are the same keys provided by the `CanaryPromptFormatter` classes `slots` argument, so each of these values in the is mapped back to those slots.\n",
+ "\n",
+ "```python\n",
+ "metadata = {\n",
+ " \"audio_filepath\": audio_path,\n",
+ " \"duration\": duration,\n",
+ " \"text\": transcript,\n",
+ " \"pnc\": \"no\",\n",
+ " \"source_lang\": \"en\",\n",
+ " \"target_lang\": \"en\",\n",
+ " \"task\": \"asr\",\n",
+ "}\n",
+ "```\n",
+ "\n",
+ "The most important function in the Data Module above is `prepare_data()`:\n",
+ "\n",
+ "1) It first downloads and converts the AN4 audio files to wav files.\n",
+ "2) Then it writes a new manifest file with the above keys for ASR task\n",
+ "3) It then translates the En transcripts with a `t5-small` model to generate German transcripts\n",
+ "4) Finally it writes another manifest for the AST task with these translated texts.\n",
+ "5) Finally it builds a combined manifest item for both ASR (en) and AST (en to de) multi-task training\n",
+ "\n",
+ "**Note**: We are using prepare_data() only for demonstration. Normally, users should process before experimentation, and so they would only need to implement methods above prepare_data() in their Data Module."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "739f0141-1e0e-4db7-b1f6-9d13589bf50c",
+ "metadata": {
+ "id": "739f0141-1e0e-4db7-b1f6-9d13589bf50c"
+ },
+ "source": [
+ "## Download and Prepare Dataset"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "323287f1-9a44-49ab-8438-dcbf34bf2ebe",
+ "metadata": {
+ "id": "323287f1-9a44-49ab-8438-dcbf34bf2ebe"
+ },
+ "outputs": [],
+ "source": [
+ "data_module = CanaryAN4DataModule(tokenizer=model.tokenizer, batch_size=16)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "123faf0d-05b2-4f12-850f-350a175ba7c1",
+ "metadata": {
+ "id": "123faf0d-05b2-4f12-850f-350a175ba7c1",
+ "scrolled": true
+ },
+ "outputs": [],
+ "source": [
+ "data_module.prepare_data()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "fbec085b-9600-49bd-8739-73e5e8e3773f",
+ "metadata": {
+ "id": "fbec085b-9600-49bd-8739-73e5e8e3773f"
+ },
+ "outputs": [],
+ "source": [
+ "!head -n 5 {data_module.train_manifest}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "66bad9ac-3bad-4d84-8b30-830856c06804",
+ "metadata": {
+ "id": "66bad9ac-3bad-4d84-8b30-830856c06804"
+ },
+ "outputs": [],
+ "source": [
+ "!head -n 5 {data_module.ast_train_manifest}"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "cde19c46-e78c-4d7c-adbf-f1559c9203e1",
+ "metadata": {
+ "id": "cde19c46-e78c-4d7c-adbf-f1559c9203e1"
+ },
+ "source": [
+ "# Evaluate Model before Training\n",
+ "\n",
+ "Canary Multi Task model is already very capable, achieving strong scores on multiple benchmarks. So we first evaluate the baseline numbers on the two tasks\n",
+ "\n",
+ "1) ASR: WER calculation on transcripts\n",
+ "\n",
+ "2) AST: SacreBLEU calculation on translations"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "eb4588b4-7d52-4c4e-bb81-2bcb5a227afd",
+ "metadata": {
+ "id": "eb4588b4-7d52-4c4e-bb81-2bcb5a227afd"
+ },
+ "outputs": [],
+ "source": [
+ "from nemo.collections.asr.metrics.wer import word_error_rate\n",
+ "from torchmetrics.text import SacreBLEUScore"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "a1c71044-3cb3-453c-bfcd-ee551cecdddf",
+ "metadata": {
+ "id": "a1c71044-3cb3-453c-bfcd-ee551cecdddf"
+ },
+ "outputs": [],
+ "source": [
+ "asr_test = read_manifest(data_module.test_manifest)\n",
+ "ast_test = read_manifest(data_module.ast_test_manifest)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f1d8acd2-aa08-4ba0-b0c6-c5d662243b00",
+ "metadata": {
+ "id": "f1d8acd2-aa08-4ba0-b0c6-c5d662243b00"
+ },
+ "outputs": [],
+ "source": [
+ "asr_filepaths = [x['audio_filepath'] for x in asr_test]\n",
+ "asr_gt = [x['text'] for x in asr_test]\n",
+ "\n",
+ "ast_filepaths = [x['audio_filepath'] for x in ast_test]\n",
+ "ast_gt = [x['text'] for x in ast_test]\n",
+ "\n",
+ "print(\"Num files:\", len(asr_filepaths))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "85ace700-97bf-4697-8e1a-5793eb21e678",
+ "metadata": {
+ "id": "85ace700-97bf-4697-8e1a-5793eb21e678"
+ },
+ "outputs": [],
+ "source": [
+ "if torch.cuda.is_available():\n",
+ " model = model.cuda() # move model to gpu\n",
+ " model = model.to(torch.bfloat16) # cast full model to bfloat16"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "00f2607a-2f67-47fe-9903-0adae4d9adf5",
+ "metadata": {
+ "id": "00f2607a-2f67-47fe-9903-0adae4d9adf5"
+ },
+ "outputs": [],
+ "source": [
+ "asr_preds = model.transcribe(asr_filepaths, pnc='no', task='asr', source_lang='en', target_lang='en', batch_size=32)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "eea5ab20-60d4-4e19-87fb-71f6835941e8",
+ "metadata": {
+ "id": "eea5ab20-60d4-4e19-87fb-71f6835941e8"
+ },
+ "outputs": [],
+ "source": [
+ "ast_preds = model.transcribe(ast_filepaths, pnc='no', task='ast', source_lang='en', target_lang='de', batch_size=32)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "69e5bb54-5193-4268-98e1-dc6daae8f6eb",
+ "metadata": {
+ "id": "69e5bb54-5193-4268-98e1-dc6daae8f6eb"
+ },
+ "outputs": [],
+ "source": [
+ "wer = word_error_rate(asr_preds, asr_gt)\n",
+ "print(\"WER\", wer)\n",
+ "\n",
+ "sacrebleu = SacreBLEUScore(n_gram=4)\n",
+ "scores = []\n",
+ "preds = []\n",
+ "gts = []\n",
+ "for pred, gt in zip(ast_preds, ast_gt):\n",
+ " preds.append(pred)\n",
+ " gts.append([gt])\n",
+ "\n",
+ "# bleu = sum(scores) / len(scores)\n",
+ "sacrebleu.update(preds, gts)\n",
+ "bleu = sacrebleu.compute()\n",
+ "print(\"BLEU\", bleu.item() * 100)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "5ee530c9-36a3-47d2-83b9-b2a64080c0eb",
+ "metadata": {
+ "id": "5ee530c9-36a3-47d2-83b9-b2a64080c0eb"
+ },
+ "source": [
+ "# Train Model\n",
+ "\n",
+ "Finally, now that adapters have been prepared, model has been evaluated for a baseline and the dataset is prepared, it's time to train the adapter weights on the new datasets.\n",
+ "\n",
+ "---\n",
+ "\n",
+ "First, we update the optimizer and scheduler config"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "d0a40461-d739-436c-967a-1a0f8a3ad197",
+ "metadata": {
+ "id": "d0a40461-d739-436c-967a-1a0f8a3ad197"
+ },
+ "outputs": [],
+ "source": [
+ "print(OmegaConf.to_yaml(model.cfg.optim))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "4ba5811a-fc42-4de5-add5-0d26d1c84219",
+ "metadata": {
+ "id": "4ba5811a-fc42-4de5-add5-0d26d1c84219"
+ },
+ "outputs": [],
+ "source": [
+ "# Setup optimization\n",
+ "model.cfg.optim.lr = 3e-4\n",
+ "model.cfg.optim.sched.warmup_steps = 25"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "d1de270a-d1cb-4080-b571-7acf365d7b99",
+ "metadata": {
+ "id": "d1de270a-d1cb-4080-b571-7acf365d7b99"
+ },
+ "source": [
+ "---\n",
+ "\n",
+ "Next, we setup a Lightning Trainer and Experiment Manager"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b9e34369-21ec-41bf-beae-30b60ab46c14",
+ "metadata": {
+ "id": "b9e34369-21ec-41bf-beae-30b60ab46c14"
+ },
+ "outputs": [],
+ "source": [
+ "from omegaconf import OmegaConf\n",
+ "from nemo.utils import exp_manager"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "46f74863-a34d-4ad0-9d8e-3337ea5edd63",
+ "metadata": {
+ "id": "46f74863-a34d-4ad0-9d8e-3337ea5edd63"
+ },
+ "outputs": [],
+ "source": [
+ "trainer = L.Trainer(max_steps=200, accumulate_grad_batches=1, logger=False, enable_checkpointing=False, check_val_every_n_epoch=5)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "414d7887-bed5-46a2-bfe1-8349db1e6b5b",
+ "metadata": {
+ "id": "414d7887-bed5-46a2-bfe1-8349db1e6b5b"
+ },
+ "outputs": [],
+ "source": [
+ "# # Environment variable generally used for multi-node multi-gpu training.\n",
+ "# # In notebook environments, this flag is unnecessary and can cause logs of multiple training runs to overwrite each other.\n",
+ "# os.environ.pop('NEMO_EXPM_VERSION', None)\n",
+ "\n",
+ "# config = exp_manager.ExpManagerConfig(\n",
+ "# exp_dir=f'experiments/canary/',\n",
+ "# name=f\"Canary-Model-Adapter-Training\",\n",
+ "# checkpoint_callback_params=exp_manager.CallbackParams(\n",
+ "# monitor=\"val_wer\",\n",
+ "# mode=\"min\",\n",
+ "# always_save_nemo=False,\n",
+ "# save_best_model=False,\n",
+ "# ),\n",
+ "# )\n",
+ "\n",
+ "# config = OmegaConf.structured(config)\n",
+ "\n",
+ "# logdir = exp_manager.exp_manager(trainer, config)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "60769859-8ed5-4f9c-b93a-a6875c7c1c73",
+ "metadata": {
+ "id": "60769859-8ed5-4f9c-b93a-a6875c7c1c73"
+ },
+ "source": [
+ "---\n",
+ "\n",
+ "Begin training !"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "2adb8607-a011-440d-bfa8-976c2871e8ef",
+ "metadata": {
+ "id": "2adb8607-a011-440d-bfa8-976c2871e8ef"
+ },
+ "outputs": [],
+ "source": [
+ "trainer.fit(model, data_module)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "MImbKiqQ6ng-",
+ "metadata": {
+ "id": "MImbKiqQ6ng-"
+ },
+ "source": [
+ "---\n",
+ "\n",
+ "Save just the adapter parameters - which is less than 2 MB !"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "-akTdyGM6gum",
+ "metadata": {
+ "id": "-akTdyGM6gum"
+ },
+ "outputs": [],
+ "source": [
+ "model.save_adapters(\"adapters.pt\")\n",
+ "!ls -l -- *.pt\n",
+ "!du -sh *.pt"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "2525bec5-c42b-48c1-b03c-e8126c346238",
+ "metadata": {
+ "id": "2525bec5-c42b-48c1-b03c-e8126c346238"
+ },
+ "source": [
+ "# Evaluate after Adaptation\n",
+ "\n",
+ "Now that the model is done training, lets evaluate its scores on the test set again.\n",
+ "We should see a markedly higher translation BLEU and lower WER from above."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6edb5528-b1b6-4505-8cdc-ee68c715415e",
+ "metadata": {
+ "id": "6edb5528-b1b6-4505-8cdc-ee68c715415e"
+ },
+ "outputs": [],
+ "source": [
+ "asr_test = read_manifest(data_module.test_manifest)\n",
+ "ast_test = read_manifest(data_module.ast_test_manifest)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "384aa5f2-89d5-4080-a717-4d65776fae6b",
+ "metadata": {
+ "id": "384aa5f2-89d5-4080-a717-4d65776fae6b"
+ },
+ "outputs": [],
+ "source": [
+ "asr_filepaths = [x['audio_filepath'] for x in asr_test]\n",
+ "asr_gt = [x['text'] for x in asr_test]\n",
+ "\n",
+ "ast_filepaths = [x['audio_filepath'] for x in ast_test]\n",
+ "ast_gt = [x['text'] for x in ast_test]\n",
+ "\n",
+ "print(\"Num files:\", len(asr_filepaths))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "48ce5b4c-d349-4d86-ad3c-ee930bb569ee",
+ "metadata": {
+ "id": "48ce5b4c-d349-4d86-ad3c-ee930bb569ee"
+ },
+ "outputs": [],
+ "source": [
+ "if torch.cuda.is_available():\n",
+ " model = model.cuda()\n",
+ " model = model.to(torch.bfloat16)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "49a37806-286e-4954-8f27-3829cf61d755",
+ "metadata": {
+ "id": "49a37806-286e-4954-8f27-3829cf61d755"
+ },
+ "outputs": [],
+ "source": [
+ "asr_preds = model.transcribe(asr_filepaths, pnc='no', task='asr', source_lang='en', target_lang='en', batch_size=32)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b701e014-2f71-487c-9300-a3ea89a43a45",
+ "metadata": {
+ "id": "b701e014-2f71-487c-9300-a3ea89a43a45"
+ },
+ "outputs": [],
+ "source": [
+ "ast_preds = model.transcribe(ast_filepaths, pnc='no', task='ast', source_lang='en', target_lang='de', batch_size=32)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "087054e5-c511-4094-a115-faf4a3b49d51",
+ "metadata": {
+ "id": "087054e5-c511-4094-a115-faf4a3b49d51"
+ },
+ "outputs": [],
+ "source": [
+ "from nemo.collections.asr.metrics.wer import word_error_rate\n",
+ "from torchmetrics.text import SacreBLEUScore"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "ef938f8f-b2db-45f6-9b30-4b3bbce2423f",
+ "metadata": {
+ "id": "ef938f8f-b2db-45f6-9b30-4b3bbce2423f"
+ },
+ "outputs": [],
+ "source": [
+ "wer = word_error_rate(asr_preds, asr_gt)\n",
+ "print(\"WER\", wer)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "5a7c2820-d394-4627-8438-0d810d89b72d",
+ "metadata": {
+ "id": "5a7c2820-d394-4627-8438-0d810d89b72d"
+ },
+ "outputs": [],
+ "source": [
+ "sacrebleu = SacreBLEUScore(n_gram=4)\n",
+ "scores = []\n",
+ "preds = []\n",
+ "gts = []\n",
+ "for pred, gt in zip(ast_preds, ast_gt):\n",
+ " preds.append(pred)\n",
+ " gts.append([gt])\n",
+ "\n",
+ "# bleu = sum(scores) / len(scores)\n",
+ "sacrebleu.update(preds, gts)\n",
+ "bleu = sacrebleu.compute()\n",
+ "print(\"BLEU\", bleu.item() * 100)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "521df0e6-1d3c-4709-a080-63638315c514",
+ "metadata": {
+ "id": "521df0e6-1d3c-4709-a080-63638315c514"
+ },
+ "source": [
+ "# Conclusion\n",
+ "\n",
+ "In this tutorial we added adapters to a Multi Task model (Nvidia Canary) and show how to create a custom dataset to finetune a canary model to a new dataset with previous tasks such as ASR and AST. The primary goal of this tutorial was to show how to flexibly adapt a Canary model to any of the pre-existing tasks.\n",
+ "\n",
+ "In a future tutorial, we will show how to add additional tasks to a pre-trained Canary, so that you can leverage the pre-trained encoder and decoder for your own custom tasks!"
+ ]
+ }
+ ],
+ "metadata": {
+ "accelerator": "GPU",
+ "colab": {
+ "gpuType": "T4",
+ "provenance": []
+ },
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.13"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
}
diff --git a/tutorials/audio/speech_enhancement/Speech_Enhancement_with_NeMo.ipynb b/tutorials/audio/speech_enhancement/Speech_Enhancement_with_NeMo.ipynb
index 5c697840ba09..faef27d18abf 100644
--- a/tutorials/audio/speech_enhancement/Speech_Enhancement_with_NeMo.ipynb
+++ b/tutorials/audio/speech_enhancement/Speech_Enhancement_with_NeMo.ipynb
@@ -91,7 +91,7 @@
"import IPython.display as ipd\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"import soundfile as sf\n",
"\n",
"from omegaconf import OmegaConf, open_dict\n",
diff --git a/tutorials/audio/speech_enhancement/Speech_Enhancement_with_Online_Augmentation.ipynb b/tutorials/audio/speech_enhancement/Speech_Enhancement_with_Online_Augmentation.ipynb
index ff6970d98522..e8b734537a41 100644
--- a/tutorials/audio/speech_enhancement/Speech_Enhancement_with_Online_Augmentation.ipynb
+++ b/tutorials/audio/speech_enhancement/Speech_Enhancement_with_Online_Augmentation.ipynb
@@ -93,7 +93,7 @@
"import IPython.display as ipd\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"import soundfile as sf\n",
"from pathlib import Path\n",
"from omegaconf import OmegaConf, open_dict\n",
@@ -981,4 +981,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
-}
\ No newline at end of file
+}
diff --git a/tutorials/llm/llama-3/README.rst b/tutorials/llm/llama-3/README.rst
index bb6171e6f582..1d12b8847c0d 100755
--- a/tutorials/llm/llama-3/README.rst
+++ b/tutorials/llm/llama-3/README.rst
@@ -2,7 +2,7 @@
Getting Started with Llama 3 and Llama 3.1
==========================================
-This repository contains jupyter notebook tutorials using NeMo Framework for Llama-3 and Llama-3.1 models by Meta.
+This repository contains Jupyter Notebook tutorials using the NeMo Framework for Llama-3 and Llama-3.1 models by Meta.
.. list-table::
:widths: 100 25 100
@@ -16,7 +16,7 @@ This repository contains jupyter notebook tutorials using NeMo Framework for Lla
- Perform LoRA PEFT on Llama 3 8B Instruct using a dataset for bio-medical domain question answering. Deploy multiple LoRA adapters with NVIDIA NIM.
* - `Llama 3.1 Law-Domain LoRA Fine-Tuning and Deployment with NeMo Framework and NVIDIA NIM <./sdg-law-title-generation>`_
- `Law StackExchange `_
- - Perform LoRA PEFT on Llama 3.1 8B Instruct using a synthetically augmented version of Law StackExchange with NeMo Framework, followed by deployment with NVIDIA NIM. As a pre-requisite, follow the tutorial for `data curation using NeMo Curator `__.
- * - `Llama 3.1 WikiText Pruning and Distillation with NeMo Framework <./pruning-distillation>`_
+ - Perform LoRA PEFT on Llama 3.1 8B Instruct using a synthetically augmented version of Law StackExchange with NeMo Framework, followed by deployment with NVIDIA NIM. As a prerequisite, follow the tutorial for `data curation using NeMo Curator `_.
+ * - `Llama 3.1 Pruning and Distillation with NeMo Framework <./pruning-distillation>`_
- `WikiText-103-v1 `_
- - Perform pruning and distillation on Llama 3.1 8B Instruct using the WikiText-103-v1 dataset with NeMo Framework.
+ - Perform pruning and distillation on Llama 3.1 8B using the WikiText-103-v1 dataset with NeMo Framework.
diff --git a/tutorials/llm/llama-3/nemo2-sft-peft/README.rst b/tutorials/llm/llama-3/nemo2-sft-peft/README.rst
new file mode 100644
index 000000000000..74a581c52f44
--- /dev/null
+++ b/tutorials/llm/llama-3/nemo2-sft-peft/README.rst
@@ -0,0 +1,61 @@
+Llama 3 Supervised Fine-Tuning and Parameter Efficient Fine-Tuning with NeMo 2.0
+================================================================================
+
+`Llama 3 `_ is an open-source large language model by Meta that delivers state-of-the-art performance on popular industry benchmarks. It has been pretrained on over 15 trillion tokens and supports an 8K token context length. It is available in two sizes, 8B and 70B, and each size has two variantsβbase pretrained and instruction tuned.
+
+Supervised Fine-Tuning (SFT) refers to unfreezing all the weights and layers in our model and training on a newly labeled set of examples. We can fine-tune to incorporate new, domain-specific knowledge, or teach the foundation model what type of response to provide.
+
+`Low-Rank Adaptation (LoRA) `__ has emerged as a popular Parameter-Efficient Fine-Tuning (PEFT) technique that tunes a very small number of additional parameters as compared to full fine-tuning, thereby reducing the compute required.
+
+`NVIDIA NeMo
+Framework `__ provides tools to perform SFT and LoRA on Llama 3 to fit your use case.
+
+Requirements
+------------
+
+* System Configuration
+ * For SFT: access to at least 2 NVIDIA GPUs with a cumulative memory of at least 80GB, for example: 2 x H100-80GB or 2 x A100-80GB.
+ * For LoRA: access to at least 1 NVIDIA GPUs with a cumulative memory of at least 80GB, for example: 1 x H100-80GB or 1 x A100-80GB.
+ * A Docker-enabled environment, with `NVIDIA Container Runtime `_ installed, which will make the container GPU-aware.
+
+* Software Requirements
+ * Use the latest [NeMo Framework Container](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/nemo/tags) . Note that you must be logged in to the container registry to view this page.
+ * This notebook uses the container: `nvcr.io/nvidia/nemo:dev`.
+ * Get your Hugging Face [access token](https://huggingface.co/docs/hub/en/security-tokens), which will be used to obtain the tokenizer required during training.
+
+* NeMo 2.0 and NeMo-Run
+ * We will use NeMo 2.0 and NeMo-Run to perform SFT and LoRA on Llama 3. Both are already available in the NeMo Framework Container.
+
+
+Start the NeMo Framework Container
+----------------------------------
+
+1. You can start and enter the dev container by:
+
+.. code:: bash
+
+ docker run \
+ --gpus device=1 \
+ --shm-size=2g \
+ --net=host \
+ --ulimit memlock=-1 \
+ --rm -it \
+ -v ${PWD}:/workspace \
+ -w /workspace \
+ nvcr.io/nvidia/nemo:dev bash
+
+
+2. You need to request download permission from Meta and Hugging Face. Then, from within the container, log in through `huggingface-cli` using your Hugging Face token.
+
+.. code:: bash
+
+ huggingface-cli login
+
+
+3. From within the container, start the Jupyter lab:
+
+.. code:: bash
+
+ jupyter lab --ip 0.0.0.0 --port=8888 --allow-root
+
+4. Then, navigate to `the SFT notebook <./nemo2-sft.ipynb>`__ or `the LoRA notebook <./nemo2-peft.ipynb>`__ to perform SFT or LoRA on Llama 3, respectively.
diff --git a/tutorials/llm/llama-3/nemo2-sft-peft/nemo2-peft.ipynb b/tutorials/llm/llama-3/nemo2-sft-peft/nemo2-peft.ipynb
new file mode 100644
index 000000000000..aa463e2b84be
--- /dev/null
+++ b/tutorials/llm/llama-3/nemo2-sft-peft/nemo2-peft.ipynb
@@ -0,0 +1,572 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Learning Goals\n",
+ "\n",
+ "## Optimizing Foundation Models with Parameter-Efficient Fine-Tuning (PEFT)\n",
+ "\n",
+ "This notebook aims to demonstrate how to adapt or customize foundation models to improve performance on specific tasks using NeMo 2.0.\n",
+ "\n",
+ "This optimization process is known as fine-tuning, which involves adjusting the weights of a pre-trained foundation model with custom data.\n",
+ "\n",
+ "Considering that foundation models can be significantly large, a variant of fine-tuning has gained traction recently known as PEFT. PEFT encompasses several methods, including P-Tuning, LoRA, Adapters, IA3, etc. NeMo 2.0 currently supports [Low-Rank Adaptation (LoRA)](https://arxiv.org/pdf/2106.09685) method.\n",
+ "\n",
+ "NeMo 2.0 introduces Python-based configurations, PyTorch Lightningβs modular abstractions, and NeMo-Run for scaling experiments across multiple GPUs. In this notebook, we will use NeMo-Run to streamline the configuration and execution of our experiments.\n",
+ "\n",
+ "## Data\n",
+ "This notebook uses the SQuAD dataset. For more details about the data, refer to [SQuAD: 100,000+ Questions for Machine Comprehension of Text](https://arxiv.org/abs/1606.05250)\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "## Step 1. Import the Hugging Face Checkpoint\n",
+ "We use the `llm.import_ckpt` API to download the specified model using the \"hf://\" URL format. It will then convert the model into NeMo 2.0 format. For all model supported in NeMo 2.0, refer to [Large Language Models](https://docs.nvidia.com/nemo-framework/user-guide/24.09/llms/index.html#large-language-models) section of NeMo Framework User Guide."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "import nemo_run as run\n",
+ "from nemo import lightning as nl\n",
+ "from nemo.collections import llm\n",
+ "from megatron.core.optimizer import OptimizerConfig\n",
+ "from nemo.collections.llm.peft.lora import LoRA\n",
+ "import torch\n",
+ "import pytorch_lightning as pl\n",
+ "from pathlib import Path\n",
+ "from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed\n",
+ "\n",
+ "\n",
+ "# llm.import_ckpt is the nemo2 API for converting Hugging Face checkpoint to NeMo format\n",
+ "# example usage:\n",
+ "# llm.import_ckpt(model=llm.llama3_8b.model(), source=\"hf://meta-llama/Meta-Llama-3-8B\")\n",
+ "#\n",
+ "# We use run.Partial to configure this function\n",
+ "def configure_checkpoint_conversion():\n",
+ " return run.Partial(\n",
+ " llm.import_ckpt,\n",
+ " model=llm.llama3_8b.model(),\n",
+ " source=\"hf://meta-llama/Meta-Llama-3-8B\",\n",
+ " overwrite=False,\n",
+ " )\n",
+ "\n",
+ "# configure your function\n",
+ "import_ckpt = configure_checkpoint_conversion()\n",
+ "# define your executor\n",
+ "local_executor = run.LocalExecutor()\n",
+ "\n",
+ "# run your experiment\n",
+ "run.run(import_ckpt, executor=local_executor)\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Step 2. Prepare the Data\n",
+ "\n",
+ "We will be using SQuAD for this notebook. NeMo 2.0 already provides a `SquadDataModule`. Example usage:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def squad() -> run.Config[pl.LightningDataModule]:\n",
+ " return run.Config(llm.SquadDataModule, seq_length=2048, micro_batch_size=1, global_batch_size=8, num_workers=0)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "To learn how to use your own data to create a custom `DataModule` for performing PEFT, refer to [NeMo 2.0 SFT notebook](./nemo2-sft.ipynb)."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Step 3.1: Configure PEFT with NeMo 2.0 API and NeMo-Run\n",
+ "\n",
+ "The following Python script utilizes the NeMo 2.0 API to perform PEFT. In this script, we are configuring the following components for training. These components are similar between SFT and PEFT. SFT and PEFT both use `llm.finetune` API. To switch from SFT to PEFT, you just need to add `peft` with the LoRA adapter to the API parameter.\n",
+ "\n",
+ "### Configure the Trainer\n",
+ "The NeMo 2.0 Trainer works similarly to the PyTorch Lightning trainer.\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def trainer() -> run.Config[nl.Trainer]:\n",
+ " strategy = run.Config(\n",
+ " nl.MegatronStrategy,\n",
+ " tensor_model_parallel_size=1\n",
+ " )\n",
+ " trainer = run.Config(\n",
+ " nl.Trainer,\n",
+ " devices=1,\n",
+ " max_steps=20,\n",
+ " accelerator=\"gpu\",\n",
+ " strategy=strategy,\n",
+ " plugins=bf16_mixed(),\n",
+ " log_every_n_steps=1,\n",
+ " limit_val_batches=2,\n",
+ " val_check_interval=2,\n",
+ " num_sanity_val_steps=0,\n",
+ " )\n",
+ " return trainer\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Configure the Logger\n",
+ "Configure your training steps, output directories and logging through `NeMoLogger`. In the following example, the experiment output will be saved at `./results/nemo2_peft`.\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def logger() -> run.Config[nl.NeMoLogger]:\n",
+ " ckpt = run.Config(\n",
+ " nl.ModelCheckpoint,\n",
+ " save_last=True,\n",
+ " every_n_train_steps=10,\n",
+ " monitor=\"reduced_train_loss\",\n",
+ " save_top_k=1,\n",
+ " save_on_train_epoch_end=True,\n",
+ " save_optim_on_train_end=True,\n",
+ " )\n",
+ "\n",
+ " return run.Config(\n",
+ " nl.NeMoLogger,\n",
+ " name=\"nemo2_peft\",\n",
+ " log_dir=\"./results\",\n",
+ " use_datetime_version=False,\n",
+ " ckpt=ckpt,\n",
+ " wandb=None\n",
+ " )"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "\n",
+ "### Configure the Optimizer\n",
+ "In the following example, we will be using the distributed adam optimizer and pass in the optimizer configuration through `OptimizerConfig`: "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def adam_with_cosine_annealing() -> run.Config[nl.OptimizerModule]:\n",
+ " opt_cfg = run.Config(\n",
+ " OptimizerConfig,\n",
+ " optimizer=\"adam\",\n",
+ " lr=0.0001,\n",
+ " adam_beta2=0.98,\n",
+ " use_distributed_optimizer=True,\n",
+ " clip_grad=1.0,\n",
+ " bf16=True,\n",
+ " )\n",
+ " return run.Config(\n",
+ " nl.MegatronOptimizerModule,\n",
+ " config=opt_cfg\n",
+ " )\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Pass in the LoRA Adapter\n",
+ "We need to pass in the LoRA adapter to our fine-tuning API to perform LoRA fine-tuning. We can configure the adapter as follows. The target module we support includes: `linear_qkv`, `linear_proj`, `linear_fc1` and `linear_fc2`. In the final script, we used the default configurations for LoRA (`llm.peft.LoRA()`), which will use the full list with `dim=32`."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def lora() -> run.Config[nl.pytorch.callbacks.PEFT]:\n",
+ " return run.Config(LoRA)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Configure the Base Model\n",
+ "We will perform PEFT on top of Llama-3-8b, so we create a `LlamaModel` to pass to the NeMo 2.0 finetune API."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def llama3_8b() -> run.Config[pl.LightningModule]:\n",
+ " return run.Config(llm.LlamaModel, config=run.Config(llm.Llama3Config8B))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Auto Resume\n",
+ "In NeMo 2.0, we can directly pass in the Llama3-8b Hugging Face ID to start PEFT without manually converting it into the NeMo checkpoint, as required in NeMo 1.0."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def resume() -> run.Config[nl.AutoResume]:\n",
+ " return run.Config(\n",
+ " nl.AutoResume,\n",
+ " restore_config=run.Config(nl.RestoreConfig,\n",
+ " path=\"nemo://meta-llama/Meta-Llama-3-8B\"\n",
+ " ),\n",
+ " resume_if_exists=True,\n",
+ " )"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "### Configure the NeMo 2.0 finetune API\n",
+ "Using all the components we created above, we can call the NeMo 2.0 finetune API. The python example usage is as below:\n",
+ "```\n",
+ "llm.finetune(\n",
+ " model=llama3_8b(),\n",
+ " data=squad(),\n",
+ " trainer=trainer(),\n",
+ " peft=lora(),\n",
+ " log=logger(),\n",
+ " optim=adam_with_cosine_annealing(),\n",
+ " resume=resume(),\n",
+ ")\n",
+ "```\n",
+ "We configure the `llm.finetune` API as below:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def configure_finetuning_recipe():\n",
+ " return run.Partial(\n",
+ " llm.finetune,\n",
+ " model=llama3_8b(),\n",
+ " trainer=trainer(),\n",
+ " data=squad(),\n",
+ " log=logger(),\n",
+ " peft=lora(),\n",
+ " optim=adam_with_cosine_annealing(),\n",
+ " resume=resume(),\n",
+ " )"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Step 3.2: Run PEFT with NeMo 2.0 API and NeMo-Run\n",
+ "\n",
+ "We use `LocalExecutor` for executing our configured finetune function. For more details on the NeMo-Run executor, refer to [Execute NeMo Run](https://github.com/NVIDIA/NeMo-Run/blob/main/docs/source/guides/execution.md) of NeMo-Run Guides. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def local_executor_torchrun(nodes: int = 1, devices: int = 1) -> run.LocalExecutor:\n",
+ " # Env vars for jobs are configured here\n",
+ " env_vars = {\n",
+ " \"TORCH_NCCL_AVOID_RECORD_STREAMS\": \"1\",\n",
+ " \"NCCL_NVLS_ENABLE\": \"0\",\n",
+ " \"NVTE_DP_AMAX_REDUCE_INTERVAL\": \"0\",\n",
+ " \"NVTE_ASYNC_AMAX_REDUCTION\": \"1\",\n",
+ " \"NVTE_FUSED_ATTN\": \"0\",\n",
+ " }\n",
+ "\n",
+ " executor = run.LocalExecutor(ntasks_per_node=devices, launcher=\"torchrun\", env_vars=env_vars)\n",
+ "\n",
+ " return executor\n",
+ "\n",
+ "if __name__ == '__main__':\n",
+ " run.run(configure_finetuning_recipe(), executor=local_executor_torchrun())\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Step 4. Generate Results from Trained PEFT Checkpoints \n",
+ "\n",
+ "We use the `llm.generate` API in NeMo 2.0 to generate results from the trained PEFT checkpoint. Find your last saved checkpoint from your experiment dir: `results/nemo2_peft/checkpoints`. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "peft_ckpt_path=str(next((d for d in Path(\"./results/nemo2_peft/checkpoints/\").iterdir() if d.is_dir() and d.name.endswith(\"-last\")), None))\n",
+ "print(\"We will load PEFT checkpoint from:\", peft_ckpt_path)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The SQuAD test set contains over 10,000 samples. For a quick demonstration, we will use the first 100 lines as an example input. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "%%bash\n",
+ "head -n 100 /root/.cache/nemo/datasets/squad/test.jsonl > toy_testset.jsonl\n",
+ "head -n 3 /root/.cache/nemo/datasets/squad/test.jsonl"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "You should see something like:\n",
+ "```\n",
+ "{\"input\": \"Context: Super Bowl 50 was an American football game to determine the champion of the National Football League (NFL) for the 2015 season. The American Football Conference (AFC) champion Denver Broncos defeated the National Football Conference (NFC) champion Carolina Panthers 24\\u201310 to earn their third Super Bowl title. The game was played on February 7, 2016, at Levi's Stadium in the San Francisco Bay Area at Santa Clara, California. As this was the 50th Super Bowl, the league emphasized the \\\"golden anniversary\\\" with various gold-themed initiatives, as well as temporarily suspending the tradition of naming each Super Bowl game with Roman numerals (under which the game would have been known as \\\"Super Bowl L\\\"), so that the logo could prominently feature the Arabic numerals 50. Question: Which NFL team represented the AFC at Super Bowl 50? Answer:\", \"output\": \"Denver Broncos\", \"original_answers\": [\"Denver Broncos\", \"Denver Broncos\", \"Denver Broncos\"]}\n",
+ "{\"input\": \"Context: Super Bowl 50 was an American football game to determine the champion of the National Football League (NFL) for the 2015 season. The American Football Conference (AFC) champion Denver Broncos defeated the National Football Conference (NFC) champion Carolina Panthers 24\\u201310 to earn their third Super Bowl title. The game was played on February 7, 2016, at Levi's Stadium in the San Francisco Bay Area at Santa Clara, California. As this was the 50th Super Bowl, the league emphasized the \\\"golden anniversary\\\" with various gold-themed initiatives, as well as temporarily suspending the tradition of naming each Super Bowl game with Roman numerals (under which the game would have been known as \\\"Super Bowl L\\\"), so that the logo could prominently feature the Arabic numerals 50. Question: Which NFL team represented the NFC at Super Bowl 50? Answer:\", \"output\": \"Carolina Panthers\", \"original_answers\": [\"Carolina Panthers\", \"Carolina Panthers\", \"Carolina Panthers\"]}\n",
+ "{\"input\": \"Context: Super Bowl 50 was an American football game to determine the champion of the National Football League (NFL) for the 2015 season. The American Football Conference (AFC) champion Denver Broncos defeated the National Football Conference (NFC) champion Carolina Panthers 24\\u201310 to earn their third Super Bowl title. The game was played on February 7, 2016, at Levi's Stadium in the San Francisco Bay Area at Santa Clara, California. As this was the 50th Super Bowl, the league emphasized the \\\"golden anniversary\\\" with various gold-themed initiatives, as well as temporarily suspending the tradition of naming each Super Bowl game with Roman numerals (under which the game would have been known as \\\"Super Bowl L\\\"), so that the logo could prominently feature the Arabic numerals 50. Question: Where did Super Bowl 50 take place? Answer:\", \"output\": \"Santa Clara, California\", \"original_answers\": [\"Santa Clara, California\", \"Levi's Stadium\", \"Levi's Stadium in the San Francisco Bay Area at Santa Clara, California.\"]}\n",
+ "\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "We will pass the string `toy_testset.jsonl` to the `input_dataset` parameter of `llm.generate`. To evaluate the entire test set, you can instead pass the SQuAD data module directly, using `input_dataset=squad()`. The input JSONL file should follow the format shown above, containing `input` and `output` fields (additional keys are optional)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "from megatron.core.inference.common_inference_params import CommonInferenceParams\n",
+ "\n",
+ "\n",
+ "def trainer() -> run.Config[nl.Trainer]:\n",
+ " strategy = run.Config(\n",
+ " nl.MegatronStrategy,\n",
+ " tensor_model_parallel_size=1\n",
+ " )\n",
+ " trainer = run.Config(\n",
+ " nl.Trainer,\n",
+ " accelerator=\"gpu\",\n",
+ " devices=1,\n",
+ " num_nodes=1,\n",
+ " strategy=strategy,\n",
+ " plugins=bf16_mixed(),\n",
+ " )\n",
+ " return trainer\n",
+ "\n",
+ "def configure_inference():\n",
+ " return run.Partial(\n",
+ " llm.generate,\n",
+ " path=str(peft_ckpt_path),\n",
+ " trainer=trainer(),\n",
+ " input_dataset=\"toy_testset.jsonl\",\n",
+ " inference_params=CommonInferenceParams(num_tokens_to_generate=20, top_k=1),\n",
+ " output_path=\"peft_prediction.jsonl\",\n",
+ " )\n",
+ "\n",
+ "\n",
+ "def local_executor_torchrun(nodes: int = 1, devices: int = 1) -> run.LocalExecutor:\n",
+ " # Env vars for jobs are configured here\n",
+ " env_vars = {\n",
+ " \"TORCH_NCCL_AVOID_RECORD_STREAMS\": \"1\",\n",
+ " \"NCCL_NVLS_ENABLE\": \"0\",\n",
+ " \"NVTE_DP_AMAX_REDUCE_INTERVAL\": \"0\",\n",
+ " \"NVTE_ASYNC_AMAX_REDUCTION\": \"1\",\n",
+ " \"NVTE_FUSED_ATTN\": \"0\",\n",
+ " }\n",
+ "\n",
+ " executor = run.LocalExecutor(ntasks_per_node=devices, launcher=\"torchrun\", env_vars=env_vars)\n",
+ "\n",
+ " return executor\n",
+ "\n",
+ "if __name__ == '__main__':\n",
+ " run.run(configure_inference(), executor=local_executor_torchrun())\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "After the inference is complete, you will see results similar to the following:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "%%bash\n",
+ "head -n 3 peft_prediction.jsonl"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "You should see outputs similar to the following:\n",
+ "```\n",
+ "{\"input\": \"Context: Super Bowl 50 was an American football game to determine the champion of the National Football League (NFL) for the 2015 season. The American Football Conference (AFC) champion Denver Broncos defeated the National Football Conference (NFC) champion Carolina Panthers 24\\u201310 to earn their third Super Bowl title. The game was played on February 7, 2016, at Levi's Stadium in the San Francisco Bay Area at Santa Clara, California. As this was the 50th Super Bowl, the league emphasized the \\\"golden anniversary\\\" with various gold-themed initiatives, as well as temporarily suspending the tradition of naming each Super Bowl game with Roman numerals (under which the game would have been known as \\\"Super Bowl L\\\"), so that the logo could prominently feature the Arabic numerals 50. Question: Which NFL team represented the AFC at Super Bowl 50? Answer:\", \"original_answers\": [\"Denver Broncos\", \"Denver Broncos\", \"Denver Broncos\"], \"label\": \"Denver Broncos\", \"prediction\": \" Denver Broncos\"}\n",
+ "{\"input\": \"Context: Super Bowl 50 was an American football game to determine the champion of the National Football League (NFL) for the 2015 season. The American Football Conference (AFC) champion Denver Broncos defeated the National Football Conference (NFC) champion Carolina Panthers 24\\u201310 to earn their third Super Bowl title. The game was played on February 7, 2016, at Levi's Stadium in the San Francisco Bay Area at Santa Clara, California. As this was the 50th Super Bowl, the league emphasized the \\\"golden anniversary\\\" with various gold-themed initiatives, as well as temporarily suspending the tradition of naming each Super Bowl game with Roman numerals (under which the game would have been known as \\\"Super Bowl L\\\"), so that the logo could prominently feature the Arabic numerals 50. Question: Which NFL team represented the NFC at Super Bowl 50? Answer:\", \"original_answers\": [\"Carolina Panthers\", \"Carolina Panthers\", \"Carolina Panthers\"], \"label\": \"Carolina Panthers\", \"prediction\": \" Carolina Panthers\"}\n",
+ "{\"input\": \"Context: Super Bowl 50 was an American football game to determine the champion of the National Football League (NFL) for the 2015 season. The American Football Conference (AFC) champion Denver Broncos defeated the National Football Conference (NFC) champion Carolina Panthers 24\\u201310 to earn their third Super Bowl title. The game was played on February 7, 2016, at Levi's Stadium in the San Francisco Bay Area at Santa Clara, California. As this was the 50th Super Bowl, the league emphasized the \\\"golden anniversary\\\" with various gold-themed initiatives, as well as temporarily suspending the tradition of naming each Super Bowl game with Roman numerals (under which the game would have been known as \\\"Super Bowl L\\\"), so that the logo could prominently feature the Arabic numerals 50. Question: Where did Super Bowl 50 take place? Answer:\", \"original_answers\": [\"Santa Clara, California\", \"Levi's Stadium\", \"Levi's Stadium in the San Francisco Bay Area at Santa Clara, California.\"], \"label\": \"Santa Clara, California\", \"prediction\": \" Levi's Stadium\"}\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Step 5. Calculate Evaluation Metrics\n",
+ "\n",
+ "We can evaluate the model's predictions by calculating the Exact Match (EM) and F1 scores.\n",
+ "- Exact Match is a binary measure (0 or 1) checking if the model outputs match one of the\n",
+ "ground truth answer exactly.\n",
+ "- F1 score is the harmonic mean of precision and recall for the answer words.\n",
+ "\n",
+ "Below is a script that computes these metrics. The sample scores can be improved by training the model further and performing hyperparameter tuning. In this notebook, we only train for 20 steps.\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "!python /opt/NeMo/scripts/metric_calculation/peft_metric_calc.py --pred_file peft_prediction.jsonl --label_field \"original_answers\" --pred_field \"prediction\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# NeMo Tools and Resources\n",
+ "1. [NeMo GitHub repo](https://github.com/NVIDIA/NeMo)\n",
+ "\n",
+ "2. [NeMo-Run GitHub repo](https://github.com/NVIDIA/NeMo-Run/)\n",
+ "\n",
+ "3. NeMo Framework Container: `nvcr.io/nvidia/nemo:dev`\n",
+ "\n",
+ "\n",
+ "\n",
+ "# Educational Resources\n",
+ "1. Blog: [Mastering LLM Techniques: Customization](https://developer.nvidia.com/blog/selecting-large-language-model-customization-techniques/)\n",
+ "\n",
+ "2. Whitepaper: [LoRA: Low-Rank Adaptation of Large Language Models](https://arxiv.org/abs/2106.09685)\n",
+ "\n",
+ "3. [NeMo 2.0 Overview](https://docs.nvidia.com/nemo-framework/user-guide/latest/nemo-2.0/index.html)\n",
+ "\n",
+ "4. Blog: [Tune and Deploy LoRA LLMs with NVIDIA TensorRT-LLM](https://developer.nvidia.com/blog/tune-and-deploy-lora-llms-with-nvidia-tensorrt-llm/)\n"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.12"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/tutorials/llm/llama-3/nemo2-sft-peft/nemo2-sft.ipynb b/tutorials/llm/llama-3/nemo2-sft-peft/nemo2-sft.ipynb
new file mode 100644
index 000000000000..e84ff916fc4e
--- /dev/null
+++ b/tutorials/llm/llama-3/nemo2-sft-peft/nemo2-sft.ipynb
@@ -0,0 +1,657 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Learning Goals\n",
+ "\n",
+ "## Optimizing Foundation Models with Supervised Fine-Tuning (SFT)\n",
+ "\n",
+ "Often, we want to adapt or customize foundation models to be more performant on our specific task. Fine-tuning refers to how we can modify the weights of a pre-trained foundation model with additional custom data. Supervised Fine-Tuning (SFT) refers to unfreezing all the weights and layers in our model and training on a newly labeled set of examples. We can fine-tune to incorporate new, domain-specific knowledge, or teach the foundation model what type of response to provide. One specific type of SFT is also referred to as βinstruction tuningβ where we use SFT to teach a model to follow instructions better. In this tutorial, will demonstrate how to perform SFT with Llama3-8b using NeMo 2.0.\n",
+ "\n",
+ "NeMo 2.0 introduces Python-based configurations, PyTorch Lightningβs modular abstractions, and NeMo-Run for scaling experiments across multiple GPUs. In this notebook, we will use NeMo-Run to streamline the configuration and execution of our experiments.\n",
+ "\n",
+ "## Data\n",
+ "Databricks-dolly-15k is an open-source dataset created by the collaborative efforts of Databricks employees. It consists of high-quality, human-generated prompt/response pairs specifically designed for instruction tuning LLMs. These pairs cover a diverse range of behaviors, from brainstorming and content generation to information extraction and summarization. \n",
+ "\n",
+ "For more information, refer to [databricks-dolly-15k | Hugging Face](https://huggingface.co/datasets/databricks/databricks-dolly-15k)."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "## Step 1. Import the Hugging Face Checkpoint\n",
+ "We use the `llm.import_ckpt` API to download the specified model using the \"hf://\" URL format. It will then convert the model into NeMo 2.0 format. For all model supported in NeMo 2.0, refer to [Large Language Models](https://docs.nvidia.com/nemo-framework/user-guide/24.09/llms/index.html#large-language-models) section of NeMo Framework User Guide."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "import nemo_run as run\n",
+ "from nemo import lightning as nl\n",
+ "from nemo.collections import llm\n",
+ "from megatron.core.optimizer import OptimizerConfig\n",
+ "import torch\n",
+ "import pytorch_lightning as pl\n",
+ "from pathlib import Path\n",
+ "from nemo.collections.llm.recipes.precision.mixed_precision import bf16_mixed\n",
+ "\n",
+ "\n",
+ "# llm.import_ckpt is the nemo2 API for converting Hugging Face checkpoint to NeMo format\n",
+ "# example python usage:\n",
+ "# llm.import_ckpt(model=llm.llama3_8b.model(), source=\"hf://meta-llama/Meta-Llama-3-8B\")\n",
+ "#\n",
+ "# We use run.Partial to configure this function\n",
+ "def configure_checkpoint_conversion():\n",
+ " return run.Partial(\n",
+ " llm.import_ckpt,\n",
+ " model=llm.llama3_8b.model(),\n",
+ " source=\"hf://meta-llama/Meta-Llama-3-8B\",\n",
+ " overwrite=False,\n",
+ " )\n",
+ "\n",
+ "# configure your function\n",
+ "import_ckpt = configure_checkpoint_conversion()\n",
+ "# define your executor\n",
+ "local_executor = run.LocalExecutor()\n",
+ "\n",
+ "# run your experiment\n",
+ "run.run(import_ckpt, executor=local_executor)\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Step 2. Prepare the Data and Customize the DataModule\n",
+ "\n",
+ "We will be using Databricks-dolly-15k for this notebook. NeMo 2.0 already provides a `DollyDataModule`. For all data modules that are included in NeMo 2.0, refer to the [data module directory](https://github.com/NVIDIA/NeMo/tree/main/nemo/collections/llm/gpt/data). Example usage:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def dolly() -> run.Config[pl.LightningDataModule]:\n",
+ " return run.Config(llm.DollyDataModule, seq_length=2048, micro_batch_size=1, global_batch_size=8, num_workers=0)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "To use your own data, you will need to create a custom `DataModule`. This involves extending the base class `FineTuningDataModule` so that you have access to existing data handling logic, such as packed sequences. Here we walk you through the process step by step, using the already existing [`DollyDataModule`](https://github.com/NVIDIA/NeMo/blob/main/nemo/collections/llm/gpt/data/dolly.py) as an example. \n",
+ "\n",
+ "### Subclass the FineTuningDataModule\n",
+ "You need to extend the `FineTuningDataModule` if you're fine-tuning NeMo models. This provides access to existing data handling logic, such as packed sequences. The `data_root` parameter is where you store your generated `train/validation/test.jsonl` in NeMo format. Below is how `DollyDataModule` does it:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "from typing import List, Optional\n",
+ "from nemo.lightning.io.mixin import IOMixin\n",
+ "from nemo.collections.llm.gpt.data.fine_tuning import FineTuningDataModule\n",
+ "\n",
+ "class DollyDataModule(FineTuningDataModule, IOMixin):\n",
+ " def __init__(\n",
+ " self,\n",
+ " seq_length: int = 2048,\n",
+ " tokenizer: Optional[\"TokenizerSpec\"] = None,\n",
+ " micro_batch_size: int = 4,\n",
+ " global_batch_size: int = 8,\n",
+ " rampup_batch_size: Optional[List[int]] = None,\n",
+ " force_redownload: bool = False,\n",
+ " delete_raw: bool = True,\n",
+ " seed: int = 1234,\n",
+ " memmap_workers: int = 1,\n",
+ " num_workers: int = 8,\n",
+ " pin_memory: bool = True,\n",
+ " persistent_workers: bool = False,\n",
+ " pad_to_max_length: bool = False,\n",
+ " packed_sequence_size: int = -1,\n",
+ " ):\n",
+ " self.force_redownload = force_redownload\n",
+ " self.delete_raw = delete_raw\n",
+ "\n",
+ " super().__init__(\n",
+ " dataset_root=get_dataset_root(\"dolly\"),\n",
+ " seq_length=seq_length,\n",
+ " tokenizer=tokenizer,\n",
+ " micro_batch_size=micro_batch_size,\n",
+ " global_batch_size=global_batch_size,\n",
+ " rampup_batch_size=rampup_batch_size,\n",
+ " seed=seed,\n",
+ " memmap_workers=memmap_workers,\n",
+ " num_workers=num_workers,\n",
+ " pin_memory=pin_memory,\n",
+ " persistent_workers=persistent_workers,\n",
+ " pad_to_max_length=pad_to_max_length,\n",
+ " packed_sequence_size=packed_sequence_size,\n",
+ " )"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Override the `prepare_data` Method\n",
+ "\n",
+ "The `prepare_data` method is responsible for downloading and preprocessing data if needed. If the dataset is already downloaded, you can skip this step.\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def prepare_data(self) -> None:\n",
+ " # if train file is specified, no need to do anything\n",
+ " if not self.train_path.exists() or self.force_redownload:\n",
+ " dset = self._download_data()\n",
+ " self._preprocess_and_split_data(dset)\n",
+ " super().prepare_data()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Implement Data Download and Preprocessing Logic\n",
+ "\n",
+ "If your dataset requires downloading or preprocessing, implement this logic within the helper methods. Skip the download part if it's not needed."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def _download_data(self):\n",
+ " logging.info(f\"Downloading {self.__class__.__name__}...\")\n",
+ " return load_dataset(\n",
+ " \"databricks/databricks-dolly-15k\",\n",
+ " cache_dir=str(self.dataset_root),\n",
+ " download_mode=\"force_redownload\" if self.force_redownload else None,\n",
+ " )\n",
+ "\n",
+ "def _preprocess_and_split_data(self, dset, train_ratio: float = 0.80, val_ratio: float = 0.15):\n",
+ " logging.info(f\"Preprocessing {self.__class__.__name__} to jsonl format and splitting...\")\n",
+ "\n",
+ " test_ratio = 1 - train_ratio - val_ratio\n",
+ " save_splits = {}\n",
+ " dataset = dset.get('train')\n",
+ " split_dataset = dataset.train_test_split(test_size=val_ratio + test_ratio, seed=self.seed)\n",
+ " split_dataset2 = split_dataset['test'].train_test_split(\n",
+ " test_size=test_ratio / (val_ratio + test_ratio), seed=self.seed\n",
+ " )\n",
+ " save_splits['training'] = split_dataset['train']\n",
+ " save_splits['validation'] = split_dataset2['train']\n",
+ " save_splits['test'] = split_dataset2['test']\n",
+ "\n",
+ " for split_name, dataset in save_splits.items():\n",
+ " output_file = self.dataset_root / f\"{split_name}.jsonl\"\n",
+ " with output_file.open(\"w\", encoding=\"utf-8\") as f:\n",
+ " for example in dataset:\n",
+ " context = example[\"context\"].strip()\n",
+ " if context != \"\":\n",
+ " # Randomize context and instruction order.\n",
+ " context_first = np.random.randint(0, 2) == 0\n",
+ " if context_first:\n",
+ " instruction = example[\"instruction\"].strip()\n",
+ " assert instruction != \"\"\n",
+ " _input = f\"{context}\\n\\n{instruction}\"\n",
+ " _output = example[\"response\"]\n",
+ " else:\n",
+ " instruction = example[\"instruction\"].strip()\n",
+ " assert instruction != \"\"\n",
+ " _input = f\"{instruction}\\n\\n{context}\"\n",
+ " _output = example[\"response\"]\n",
+ " else:\n",
+ " _input = example[\"instruction\"]\n",
+ " _output = example[\"response\"]\n",
+ "\n",
+ " f.write(json.dumps({\"input\": _input, \"output\": _output, \"category\": example[\"category\"]}) + \"\\n\")\n",
+ "\n",
+ " logging.info(f\"{split_name} split saved to {output_file}\")\n",
+ "\n",
+ " if self.delete_raw:\n",
+ " for p in self.dataset_root.iterdir():\n",
+ " if p.is_dir():\n",
+ " shutil.rmtree(p)\n",
+ " elif '.jsonl' not in str(p.name):\n",
+ " p.unlink()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The original example in Dolly dataset looks like:\n",
+ "```\n",
+ "{'instruction': 'Extract all the movies from this passage and the year they were released out. Write each movie as a separate sentence', 'context': \"The genre has existed since the early years of silent cinema, when Georges Melies' A Trip to the Moon (1902) employed trick photography effects. The next major example (first in feature length in the genre) was the film Metropolis (1927). From the 1930s to the 1950s, the genre consisted mainly of low-budget B movies. After Stanley Kubrick's landmark 2001: A Space Odyssey (1968), the science fiction film genre was taken more seriously. In the late 1970s, big-budget science fiction films filled with special effects became popular with audiences after the success of Star Wars (1977) and paved the way for the blockbuster hits of subsequent decades.\", 'response': 'A Trip to the Moon was released in 1902. Metropolis came out in 1927. 2001: A Space Odyssey was released in 1968. Star Wars came out in 1977.', 'category': 'information_extraction'}\n",
+ "```\n",
+ "After the preprocessing logic, the data examples are transformed into NeMo format, as below:\n",
+ "```\n",
+ "{'input': \"Extract all the movies from this passage and the year they were released out. Write each movie as a separate sentence\\n\\nThe genre has existed since the early years of silent cinema, when Georges Melies' A Trip to the Moon (1902) employed trick photography effects. The next major example (first in feature length in the genre) was the film Metropolis (1927). From the 1930s to the 1950s, the genre consisted mainly of low-budget B movies. After Stanley Kubrick's landmark 2001: A Space Odyssey (1968), the science fiction film genre was taken more seriously. In the late 1970s, big-budget science fiction films filled with special effects became popular with audiences after the success of Star Wars (1977) and paved the way for the blockbuster hits of subsequent decades.\", 'output': 'A Trip to the Moon was released in 1902. Metropolis came out in 1927. 2001: A Space Odyssey was released in 1968. Star Wars came out in 1977.', 'category': 'information_extraction'}\n",
+ "```\n",
+ "Each data example is saved as a json string as one line in the `train/validation/test.jsonl` file, under `data_root` directory you specified earlier."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Step 3.1: Configure SFT with the NeMo 2.0 API \n",
+ "\n",
+ "In this notebook we use NeMo 2.0 API to perform SFT. First we configure the following components for training. These components are similar between SFT and PEFT. SFT and PEFT both uses `llm.finetune` API. To switch from PEFT to SFT, you just need to remove the `peft` parameter.\n",
+ "\n",
+ "### Configure the Trainer\n",
+ "The NeMo 2.0 Trainer works similarly to the PyTorch Lightning trainer."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def trainer() -> run.Config[nl.Trainer]:\n",
+ " strategy = run.Config(\n",
+ " nl.MegatronStrategy,\n",
+ " tensor_model_parallel_size=2\n",
+ " )\n",
+ " trainer = run.Config(\n",
+ " nl.Trainer,\n",
+ " devices=2,\n",
+ " max_steps=20,\n",
+ " accelerator=\"gpu\",\n",
+ " strategy=strategy,\n",
+ " plugins=bf16_mixed(),\n",
+ " log_every_n_steps=1,\n",
+ " limit_val_batches=2,\n",
+ " val_check_interval=2,\n",
+ " num_sanity_val_steps=0,\n",
+ " )\n",
+ " return trainer"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "### Configure the Logger\n",
+ "Configure your training steps, output directories and logging through `NeMoLogger`. In the following example, the experiment output will be saved at `./results/nemo2_sft`."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def logger() -> run.Config[nl.NeMoLogger]:\n",
+ " ckpt = run.Config(\n",
+ " nl.ModelCheckpoint,\n",
+ " save_last=True,\n",
+ " every_n_train_steps=10,\n",
+ " monitor=\"reduced_train_loss\",\n",
+ " save_top_k=1,\n",
+ " save_on_train_epoch_end=True,\n",
+ " save_optim_on_train_end=True,\n",
+ " )\n",
+ "\n",
+ " return run.Config(\n",
+ " nl.NeMoLogger,\n",
+ " name=\"nemo2_sft\",\n",
+ " log_dir=\"./results\",\n",
+ " use_datetime_version=False,\n",
+ " ckpt=ckpt,\n",
+ " wandb=None\n",
+ " )"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "\n",
+ "### Configure the Optimizer\n",
+ "In the following example, we will be using the distributed adam optimizer and pass in the optimizer configuration through `OptimizerConfig`: "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def adam_with_cosine_annealing() -> run.Config[nl.OptimizerModule]:\n",
+ " opt_cfg = run.Config(\n",
+ " OptimizerConfig,\n",
+ " optimizer=\"adam\",\n",
+ " lr=5e-6,\n",
+ " adam_beta2=0.98,\n",
+ " use_distributed_optimizer=True,\n",
+ " clip_grad=1.0,\n",
+ " bf16=True,\n",
+ " )\n",
+ " return run.Config(\n",
+ " nl.MegatronOptimizerModule,\n",
+ " config=opt_cfg\n",
+ " )\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Configure the Base Model\n",
+ "We will perform SFT on top of Llama3-8B, so we create a `LlamaModel` to pass to the finetune API."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def llama3_8b() -> run.Config[pl.LightningModule]:\n",
+ " return run.Config(llm.LlamaModel, config=run.Config(llm.Llama3Config8B))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Auto Resume\n",
+ "In NeMo 2.0, we can directly pass in the Llama3-8b Hugging Face ID to start SFT without manually converting it into the NeMo checkpoint format, as required in NeMo 1.0."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def resume() -> run.Config[nl.AutoResume]:\n",
+ " return run.Config(\n",
+ " nl.AutoResume,\n",
+ " restore_config=run.Config(nl.RestoreConfig,\n",
+ " path=\"nemo://meta-llama/Meta-Llama-3-8B\"\n",
+ " ),\n",
+ " resume_if_exists=True,\n",
+ " )"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "### Configure NeMo 2.0 finetune API\n",
+ "Using all the components we created above, we can call the NeMo 2.0 finetune API. The python example usage is as below:\n",
+ "```\n",
+ "llm.finetune(\n",
+ " model=llama3_8b(),\n",
+ " data=dolly(),\n",
+ " trainer=trainer(),\n",
+ " log=logger(),\n",
+ " optim=adam_with_cosine_annealing(),\n",
+ " resume=resume(),\n",
+ ")\n",
+ "```\n",
+ "We configure the `llm.finetune` API as below:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def configure_finetuning_recipe():\n",
+ " return run.Partial(\n",
+ " llm.finetune,\n",
+ " model=llama3_8b(),\n",
+ " trainer=trainer(),\n",
+ " data=dolly(),\n",
+ " log=logger(),\n",
+ " optim=adam_with_cosine_annealing(),\n",
+ " resume=resume(),\n",
+ " )"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Step 3.2: Run SFT with NeMo 2.0 API and NeMo-Run\n",
+ "\n",
+ "We use `LocalExecutor` for executing our configured finetune function. For more details on the NeMo-Run executor, refer to [Execute NeMo Run](https://github.com/NVIDIA/NeMo-Run/blob/main/docs/source/guides/execution.md) of NeMo-Run Guides. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def local_executor_torchrun(nodes: int = 1, devices: int = 2) -> run.LocalExecutor:\n",
+ " # Env vars for jobs are configured here\n",
+ " env_vars = {\n",
+ " \"TORCH_NCCL_AVOID_RECORD_STREAMS\": \"1\",\n",
+ " \"NCCL_NVLS_ENABLE\": \"0\",\n",
+ " \"NVTE_DP_AMAX_REDUCE_INTERVAL\": \"0\",\n",
+ " \"NVTE_ASYNC_AMAX_REDUCTION\": \"1\",\n",
+ " \"NVTE_FUSED_ATTN\": \"0\",\n",
+ " }\n",
+ "\n",
+ " executor = run.LocalExecutor(ntasks_per_node=devices, launcher=\"torchrun\", env_vars=env_vars)\n",
+ "\n",
+ " return executor\n",
+ "\n",
+ "if __name__ == '__main__':\n",
+ " run.run(configure_finetuning_recipe(), executor=local_executor_torchrun())"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Step 4. Generate Results from Trained SFT Checkpoints\n",
+ "\n",
+ "We use the `llm.generate` API in NeMo 2.0 to generate results from the trained SFT checkpoint. Find your last saved checkpoint from your experiment dir: `results/nemo2_sft/checkpoints`. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "sft_ckpt_path=str(next((d for d in Path(\"./results/nemo2_sft/checkpoints/\").iterdir() if d.is_dir() and d.name.endswith(\"-last\")), None))\n",
+ "print(\"We will load SFT checkpoint from:\", sft_ckpt_path)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "When using `llm.generate` API, you can pass a data module such as dolly: `input_dataset=dolly()`. This will use the test set from the specified data module to generate predictions. In the following example, the generated predictions are saved to the `sft_predictions.txt` file. Note that while fine-tuning required a minimum of 2 GPUs with `tensor_model_parallel_size=2`, generating predictions only requires `tensor_model_parallel_size=1`. However, using multiple GPUs can speed up the inference."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "from megatron.core.inference.common_inference_params import CommonInferenceParams\n",
+ "\n",
+ "\n",
+ "def trainer() -> run.Config[nl.Trainer]:\n",
+ " strategy = run.Config(\n",
+ " nl.MegatronStrategy,\n",
+ " tensor_model_parallel_size=1,\n",
+ " )\n",
+ " trainer = run.Config(\n",
+ " nl.Trainer,\n",
+ " accelerator=\"gpu\",\n",
+ " devices=1,\n",
+ " num_nodes=1,\n",
+ " strategy=strategy,\n",
+ " plugins=bf16_mixed(),\n",
+ " )\n",
+ " return trainer\n",
+ "\n",
+ "def configure_inference():\n",
+ " return run.Partial(\n",
+ " llm.generate,\n",
+ " path=str(sft_ckpt_path),\n",
+ " trainer=trainer(),\n",
+ " input_dataset=dolly(),\n",
+ " inference_params=CommonInferenceParams(num_tokens_to_generate=20, top_k=1),\n",
+ " output_path=\"sft_prediction.jsonl\",\n",
+ " )\n",
+ "\n",
+ "\n",
+ "def local_executor_torchrun(nodes: int = 1, devices: int = 1) -> run.LocalExecutor:\n",
+ " # Env vars for jobs are configured here\n",
+ " env_vars = {\n",
+ " \"TORCH_NCCL_AVOID_RECORD_STREAMS\": \"1\",\n",
+ " \"NCCL_NVLS_ENABLE\": \"0\",\n",
+ " \"NVTE_DP_AMAX_REDUCE_INTERVAL\": \"0\",\n",
+ " \"NVTE_ASYNC_AMAX_REDUCTION\": \"1\",\n",
+ " \"NVTE_FUSED_ATTN\": \"0\",\n",
+ " }\n",
+ "\n",
+ " executor = run.LocalExecutor(ntasks_per_node=devices, launcher=\"torchrun\", env_vars=env_vars)\n",
+ "\n",
+ " return executor\n",
+ "\n",
+ "if __name__ == '__main__':\n",
+ " run.run(configure_inference(), executor=local_executor_torchrun())\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "After the inference is complete, you will see results similar to the following:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "%%bash\n",
+ "head -n 3 sft_prediction.jsonl"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "You should see output similar to the following:\n",
+ "```\n",
+ "{\"input\": \"What is best creator's platform\", \"category\": \"brainstorming\", \"label\": \"Youtube. Youtube should be best creator platform\", \"prediction\": \" for video content creators. YouTube is best creator's platform for video content creators.\"}\n",
+ "{\"input\": \"When was the last time the Raiders won the Super Bowl?\", \"category\": \"open_qa\", \"label\": \"The Raiders have won three Super Bowl championships (1977, 1981, and 1984), one American Football League (AFL) championship (1967), and four American Football Conference (AFC) titles. The most recent Super Bowl ring was won in 1984 against the Washington Redskins of the NFC.\", \"prediction\": \" 2003\"}\n",
+ "{\"input\": \"Muckle Water is a long, narrow fresh water loch on Ward Hill on Rousay, Orkney, Scotland. It is the biggest loch on the island and is popular for fishing. It can be reached by a track from the roadside. The Suso Burn on the north eastern shore drains the loch into the Sound of Rousay.\\n\\nWhere is Muckle Water?\", \"category\": \"closed_qa\", \"label\": \"Muckle water is located in Rousay, Orkney, Scotland.\", \"prediction\": \" Muckle Water is a long, narrow fresh water loch on Ward Hill on Rousay,\"}\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Step 5. Calculate Evaluation Metrics\n",
+ "\n",
+ "We can evaluate the model's predictions by calculating the Exact Match (EM) and F1 scores.\n",
+ "- Exact Match is a binary measure (0 or 1) checking if the model outputs match one of the\n",
+ "ground truth answer exactly.\n",
+ "- F1 score is the harmonic mean of precision and recall for the answer words.\n",
+ "\n",
+ "Below is a script that computes these metrics. The sample scores can be improved by training the model further and performing hyperparameter tuning. In this notebook, we only train for 20 steps."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "!python /opt/NeMo/scripts/metric_calculation/peft_metric_calc.py --pred_file sft_prediction.jsonl --label_field \"label\" --pred_field \"prediction\""
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.12"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/tutorials/llm/llama-3/pruning-distillation/01_data_preparation.ipynb b/tutorials/llm/llama-3/pruning-distillation/01_data_preparation.ipynb
new file mode 100644
index 000000000000..8548c0cfb1d0
--- /dev/null
+++ b/tutorials/llm/llama-3/pruning-distillation/01_data_preparation.ipynb
@@ -0,0 +1,102 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "ab9e2e97-7f10-4353-859e-693842bde465",
+ "metadata": {},
+ "source": [
+ "### Step 1: Prepare the dataset\n",
+ "\n",
+ "The dataset has to be preprocessed using the [preprocess_data_for_megatron.py](https://github.com/NVIDIA/NeMo/blob/main/scripts/nlp_language_modeling/preprocess_data_for_megatron.py) script included in the NeMo Framework. This step will also tokenize data using the `meta-llama/Meta-Llama-3.1-8B` tokenizer model to convert the data into a memory map format.\n",
+ "\n",
+ "> `NOTE:` In the block of code below, pass the paths to your train, test, and validation data files."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6505c00b-9eb4-4087-9e49-423f6228e690",
+ "metadata": {
+ "scrolled": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "!python /opt/NeMo/scripts/nlp_language_modeling/preprocess_data_for_megatron.py \\\n",
+ "--input=\"./wikitext-data/wikitext-train.jsonl\" \\\n",
+ "--tokenizer-library='huggingface' \\\n",
+ "--tokenizer-type='meta-llama/Meta-Llama-3.1-8B' \\\n",
+ "--output-prefix=wikitext_tokenized_train \\\n",
+ "--append-eod \\\n",
+ "--workers=32"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "fb1aa80f-70bc-4dff-8b08-3bff48d9a1c3",
+ "metadata": {
+ "scrolled": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "!python /opt/NeMo/scripts/nlp_language_modeling/preprocess_data_for_megatron.py \\\n",
+ "--input=\"./wikitext-data/wikitext-test.jsonl\" \\\n",
+ "--tokenizer-library='huggingface' \\\n",
+ "--tokenizer-type='meta-llama/Meta-Llama-3.1-8B' \\\n",
+ "--output-prefix=wikitext_tokenized_test \\\n",
+ "--append-eod \\\n",
+ "--workers=32"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "42bec54a-94f6-4c87-8e14-2726ef6c2625",
+ "metadata": {
+ "scrolled": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "!python /opt/NeMo/scripts/nlp_language_modeling/preprocess_data_for_megatron.py \\\n",
+ "--input=\"./wikitext-data/wikitext-val.jsonl\" \\\n",
+ "--tokenizer-library='huggingface' \\\n",
+ "--tokenizer-type='meta-llama/Meta-Llama-3.1-8B' \\\n",
+ "--output-prefix=wikitext_tokenized_val \\\n",
+ "--append-eod \\\n",
+ "--workers=32"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "5d77ee8a-e0dc-44f7-b5e8-3b6025d979d7",
+ "metadata": {},
+ "source": [
+ "After running the above scripts, you will see the preprocesed `wikitext_tokenized_{train/val/test}_text_document.{idx/bin}`files. These output files will be used in the next step."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.12"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/tutorials/llm/llama-3/pruning-distillation/02_teacher_finetuning.ipynb b/tutorials/llm/llama-3/pruning-distillation/02_teacher_finetuning.ipynb
new file mode 100644
index 000000000000..7d58ac4779aa
--- /dev/null
+++ b/tutorials/llm/llama-3/pruning-distillation/02_teacher_finetuning.ipynb
@@ -0,0 +1,153 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "84b146ba-08b6-4adb-a858-8e4294c5e781",
+ "metadata": {},
+ "source": [
+ "\n",
+ "### Step 2: Fine-tune the teacher on the dataset\n",
+ "\n",
+ "NeMo Framework includes a standard Python script, [megatron_gpt_pretraining.py](https://github.com/NVIDIA/NeMo/blob/main/examples/nlp/language_modeling/megatron_gpt_pretraining.py), for training a model. Once you have your model downloaded and the dataset ready, fine-tuning the teacher model with NeMo is essentially just running this script!\n",
+ "\n",
+ "We fine-tune the unpruned model on our dataset to correct the distribution shift from the original dataset the model was trained on. According to the [blog](https://developer.nvidia.com/blog/how-to-prune-and-distill-llama-3-1-8b-to-an-nvidia-llama-3-1-minitron-4b-model/) and [tech report](https://arxiv.org/pdf/2408.11796), experiments showed that without correcting for this distribution shift, the teacher provides suboptimal guidance on the dataset during distillation.\n",
+ "\n",
+ "For this demonstration, this training run is capped by `STEPS`, and validation is carried out every `VAL_INTERVAL` steps.\n",
+ "\n",
+ "> `NOTE:` In the block of code below, pass the paths to your pre-processed train, test, and validation data files, as well as the path to the teacher .nemo model."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "12007ac8-2fd5-4de8-8964-97821c2198c0",
+ "metadata": {
+ "scrolled": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "%%bash \n",
+ "\n",
+ "export CUDA_DEVICE_MAX_CONNECTIONS=1\n",
+ "\n",
+ "# Set path(s) if different:\n",
+ "\n",
+ "MODEL=\"/workspace/llama-3_1-8b-nemo_v1.0/llama3_1_8b.nemo\"\n",
+ "\n",
+ "# Can change these to accommodate resources:\n",
+ "\n",
+ "TENSOR_PARALLEL_SIZE=8\n",
+ "NODES=1\n",
+ "MICRO_BATCH_SIZE=4\n",
+ "\n",
+ "# Don't change the following:\n",
+ "\n",
+ "EXPERIMENT_DIR=\"distill_trainings\"\n",
+ "EXPERIMENT_NAME=\"megatron_llama_ft\"\n",
+ "\n",
+ "DATA_TRAIN='wikitext_tokenized_train_text_document'\n",
+ "DATA_VAL='wikitext_tokenized_test_text_document'\n",
+ "DATA_TEST='wikitext_tokenized_val_text_document'\n",
+ "\n",
+ "STEPS=30\n",
+ "GLOBAL_BATCH_SIZE=128\n",
+ "\n",
+ "LOG_INTERVAL=1\n",
+ "VAL_INTERVAL=10\n",
+ "NUM_VAL_BATCHES=5\n",
+ "\n",
+ "LR=1e-4\n",
+ "MIN_LR=1e-5\n",
+ "WARMUP_STEPS=2\n",
+ "\n",
+ "cmd=\"torchrun --nproc-per-node=${TENSOR_PARALLEL_SIZE}\"\n",
+ "\n",
+ "${cmd} /opt/NeMo/examples/nlp/language_modeling/megatron_gpt_pretraining.py \\\n",
+ " --config-path /opt/NeMo/examples/nlp/language_modeling/conf/ \\\n",
+ " --config-name megatron_llama_distill.yaml \\\n",
+ " \\\n",
+ " name=${EXPERIMENT_NAME} \\\n",
+ " \\\n",
+ " exp_manager.exp_dir=${EXPERIMENT_DIR} \\\n",
+ " exp_manager.checkpoint_callback_params.save_top_k=1 \\\n",
+ " exp_manager.checkpoint_callback_params.save_nemo_on_train_end=True \\\n",
+ " \\\n",
+ " trainer.max_steps=${STEPS} \\\n",
+ " trainer.log_every_n_steps=${LOG_INTERVAL} \\\n",
+ " trainer.val_check_interval=${VAL_INTERVAL} \\\n",
+ " trainer.limit_val_batches=${NUM_VAL_BATCHES} \\\n",
+ " +trainer.num_sanity_val_steps=0 \\\n",
+ " \\\n",
+ " trainer.precision=bf16 \\\n",
+ " trainer.devices=${TENSOR_PARALLEL_SIZE} \\\n",
+ " trainer.num_nodes=${NODES} \\\n",
+ " \\\n",
+ " \"model.data.data_prefix={train:[1.0,$DATA_TRAIN],validation:[$DATA_VAL],test:[$DATA_TEST]}\" \\\n",
+ " \\\n",
+ " model.restore_from_path=${MODEL} \\\n",
+ " +model.dist_ckpt_load_strictness=log_all \\\n",
+ " \\\n",
+ " ~model.tokenizer \\\n",
+ " +model.tokenizer='{library: huggingface, type: meta-llama/Meta-Llama-3.1-8B, use_fast: True}' \\\n",
+ " \\\n",
+ " model.tensor_model_parallel_size=${TENSOR_PARALLEL_SIZE} \\\n",
+ " model.sequence_parallel=True \\\n",
+ " model.micro_batch_size=${MICRO_BATCH_SIZE} \\\n",
+ " model.global_batch_size=${GLOBAL_BATCH_SIZE} \\\n",
+ " \\\n",
+ " model.encoder_seq_length=8192 \\\n",
+ " model.num_layers=32 \\\n",
+ " model.hidden_size=4096 \\\n",
+ " model.ffn_hidden_size=14336 \\\n",
+ " model.num_attention_heads=32 \\\n",
+ " model.hidden_dropout=0.0 \\\n",
+ " model.attention_dropout=0.0 \\\n",
+ " model.apply_query_key_layer_scaling=True \\\n",
+ " model.normalization='rmsnorm' \\\n",
+ " model.bias=False \\\n",
+ " model.activation='fast-swiglu' \\\n",
+ " model.position_embedding_type='rope' \\\n",
+ " model.share_embeddings_and_output_weights=False \\\n",
+ " model.num_query_groups=8 \\\n",
+ " ++model.scale_positional_embedding=True \\\n",
+ " ++model.rotary_base=500000.0 \\\n",
+ " \\\n",
+ " model.optim.name=distributed_fused_adam \\\n",
+ " model.optim.lr=${LR} \\\n",
+ " model.optim.sched.min_lr=${MIN_LR} \\\n",
+ " model.optim.sched.warmup_steps=${WARMUP_STEPS}"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3040a993-8423-475f-8bc6-d1dd1ce16a83",
+ "metadata": {},
+ "source": [
+ "This will create a fine-tuned teacher model named `megatron_llama_ft.nemo` in `./distill_trainings/megatron_llama_ft/checkpoints/`. We'll use this later.\n",
+ "> `NOTE:`This script takes at least 20 minutes to run (depending on GPU) and will generate the fine-tuned teacher model."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.12"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/tutorials/llm/llama-3/pruning-distillation/03_a_depth_pruning.ipynb b/tutorials/llm/llama-3/pruning-distillation/03_a_depth_pruning.ipynb
new file mode 100644
index 000000000000..d64f8c15bd00
--- /dev/null
+++ b/tutorials/llm/llama-3/pruning-distillation/03_a_depth_pruning.ipynb
@@ -0,0 +1,77 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "8bc99d2f-9ac6-40c2-b072-12b6cb7b9aca",
+ "metadata": {},
+ "source": [
+ "### Step 3: Prune the fine-tuned teacher model to create a student\n",
+ "In this step, we will explore two methods to prune the fine-tuned teacher model. Refer to the ``NOTE`` in the **_step-by-step instructions_** section of [introduction.ipynb](./introduction.ipynb) to decide which pruning techniques you would like to explore.\n",
+ "\n",
+ "In the first method, depth-pruning, we trim the layers of the model."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "72fa494e-6268-4044-a1d6-c0518d450cfd",
+ "metadata": {},
+ "source": [
+ "#### Step 3.a.: Using depth-pruning \n",
+ "To depth-prune, we will trim the last 16 layers in the finetined teacher model. For depth-pruning, we would be using the [megatron_gpt_drop_layers](https://github.com/NVIDIA/NeMo/blob/main/examples/nlp/language_modeling/megatron_gpt_drop_layers.py) script. \n",
+ "\n",
+ "Per the [blog](https://developer.nvidia.com/blog/how-to-prune-and-distill-llama-3-1-8b-to-an-nvidia-llama-3-1-minitron-4b-model/) and [tech report](https://arxiv.org/pdf/2408.11796), removing contiguous layers from the second last block (layers 16 to 31 continuously) yields the best overall results. \n",
+ "\n",
+ "> `NOTE:` In the block of code below, pass the paths to your fine-tuned teacher .nemo model."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "60cae073-a192-4d47-b220-b09736d39a93",
+ "metadata": {
+ "scrolled": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "!python -m torch.distributed.launch --nproc_per_node=8 \\\n",
+ " /opt/NeMo/examples/nlp/language_modeling/megatron_gpt_drop_layers.py \\\n",
+ " --path_to_nemo \"./distill_trainings/megatron_llama_ft/checkpoints/megatron_llama_ft.nemo\" \\\n",
+ " --path_to_save \"/workspace/4b_depth_pruned_model.nemo\" \\\n",
+ " --tensor_model_parallel_size 8 \\\n",
+ " --pipeline_model_parallel_size 1 \\\n",
+ " --gpus_per_node 8 \\\n",
+ " --drop_layers 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "375f298a-0363-4f44-b40c-2c8e9bab7d76",
+ "metadata": {},
+ "source": [
+ "Running this script will save the depth-pruned model `4b_depth_pruned_model.nemo` to your workspace."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.12"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/tutorials/llm/llama-3/pruning-distillation/03_b_width_pruning.ipynb b/tutorials/llm/llama-3/pruning-distillation/03_b_width_pruning.ipynb
new file mode 100644
index 000000000000..5c4a47872afb
--- /dev/null
+++ b/tutorials/llm/llama-3/pruning-distillation/03_b_width_pruning.ipynb
@@ -0,0 +1,92 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "8bc99d2f-9ac6-40c2-b072-12b6cb7b9aca",
+ "metadata": {},
+ "source": [
+ "### Step 3: Step 3: Prune the fine-tuned teacher model to create a student\n",
+ "In the second method, we will width-prune. In width-pruning, we trim the neurons, attention heads, and embedding channels.\n",
+ "\n",
+ "Refer to the ``NOTE`` in the **_step-by-step instructions_** section of [introduction.ipynb](./introduction.ipynb) to decide which pruning techniques you would like to explore."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9207ed14-2f37-4712-88f3-543a128663ac",
+ "metadata": {
+ "tags": []
+ },
+ "source": [
+ "#### Step 3.b.: Using width-pruning\n",
+ "To width-prune the model, we do the following:\n",
+ "- Prune (trim) the MLP intermediate dimension from 14336 to 9216.\n",
+ "- Prune the hidden size from 4096 to 3072.\n",
+ "- Retrain the attention headcount and number of layers\n",
+ "\n",
+ "For width-pruning, we will use the [megatron_gpt_prune.py](https://github.com/NVIDIA/NeMo/blob/main/examples/nlp/language_modeling/megatron_gpt_prune.py) script in the NeMo Framework. To see the detailed list of parameters for width-pruning, you can view the [megatron_gpt_prune.yaml](https://github.com/NVIDIA/NeMo/blob/main/examples/nlp/language_modeling/conf/megatron_gpt_prune.yaml) file.\n",
+ "\n",
+ "We use the above parameters to get a competitive model for this demonstration. You can use other strategies or parameters from the [blog](https://developer.nvidia.com/blog/how-to-prune-and-distill-llama-3-1-8b-to-an-nvidia-llama-3-1-minitron-4b-model/) or the [tech report](https://arxiv.org/pdf/2408.11796) for your experiments. \n",
+ "\n",
+ "> `NOTE:` In the block of code below, pass the paths to your fine-tuned teacher .nemo model.\n",
+ "\n",
+ "> `TIP:` You can increase the ``batch_size`` (upto 1024) to speed up the width-pruning script execution."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "571d1483-dd4c-403e-b321-293342e7a62a",
+ "metadata": {
+ "scrolled": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "!torchrun --nproc-per-node=8 /opt/NeMo/examples/nlp/language_modeling/megatron_gpt_prune.py \\\n",
+ " model.restore_from_path=\"./distill_trainings/megatron_llama_ft/checkpoints/megatron_llama_ft.nemo\" \\\n",
+ " model.tensor_model_parallel_size=1 \\\n",
+ " model.pipeline_model_parallel_size=8 \\\n",
+ " +model.dist_ckpt_load_strictness=log_all \\\n",
+ " inference.batch_size=64 \\\n",
+ " trainer.num_nodes=1 \\\n",
+ " trainer.precision=bf16 \\\n",
+ " trainer.devices=8 \\\n",
+ " prune.ffn_hidden_size=9216 \\\n",
+ " prune.num_attention_heads=null \\\n",
+ " prune.num_query_groups=null \\\n",
+ " prune.hidden_size=3072 \\\n",
+ " export.save_path=\"/workspace/4b_width_pruned_model.nemo\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "e9fb0977-5c02-4ecc-b602-54d74b2e2184",
+ "metadata": {},
+ "source": [
+ "Running this script will save the width-pruned model `4b_width_pruned_model.nemo` to your workspace."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.12"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/tutorials/llm/llama-3/pruning-distillation/04_a_distilling_depth_pruned_student.ipynb b/tutorials/llm/llama-3/pruning-distillation/04_a_distilling_depth_pruned_student.ipynb
new file mode 100644
index 000000000000..488225837731
--- /dev/null
+++ b/tutorials/llm/llama-3/pruning-distillation/04_a_distilling_depth_pruned_student.ipynb
@@ -0,0 +1,136 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "09d30e35-8e9d-4d2e-bd14-738c627a3963",
+ "metadata": {},
+ "source": [
+ "### Step 4: Distill knowledge from teacher into student\n",
+ "Distillation of a model with NeMo Framework is also possible using a Python script: [megatron_gpt_distillation.py](https://github.com/NVIDIA/NeMo/blob/main/examples/nlp/language_modeling/megatron_gpt_distillation.py). In this notebook, we will explore distillation with the depth-pruned model as the `STUDENT` model.\n",
+ "\n",
+ "For this demonstration, the `TEACHER` would be the fine-tuned teacher model `megatron_llama_ft.nemo` and the `STUDENT` model would be the pruned 4B model. This training run is capped by `STEPS`, and validation is carried out every `VAL_INTERVAL` steps."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "c33cf641-0d27-417f-b3ee-c06701698184",
+ "metadata": {},
+ "source": [
+ "#### Step 4.a.: Using depth-pruned student\n",
+ "While distilling knowledge from the teacher to depth-pruned model, the `STUDENT` model would be `4b_depth_pruned_model.nemo` as produced by the [depth-pruning](./03_a_depth_pruning.ipynb) notebook. This training run is capped by `STEPS`, and validation is carried out every `VAL_INTERVAL` steps.\n",
+ "\n",
+ "> `NOTE:` In the block of code below, pass the paths to your pre-processed train, test, and validation data files, as well as path to the teacher and student .nemo models."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "5d23a01e-4912-47cb-bf21-b4fd72007ec1",
+ "metadata": {
+ "scrolled": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "%%bash \n",
+ "\n",
+ "export CUDA_DEVICE_MAX_CONNECTIONS=1\n",
+ "\n",
+ "# Can change these to accommodate resources:\n",
+ "\n",
+ "TENSOR_PARALLEL_SIZE=8\n",
+ "NODES=1\n",
+ "MICRO_BATCH_SIZE=4\n",
+ "\n",
+ "# Don't change the following:\n",
+ "\n",
+ "EXPERIMENT_DIR=\"distill_trainings\"\n",
+ "EXPERIMENT_NAME=\"megatron_llama_distill_depth_pruned_student\"\n",
+ "\n",
+ "TEACHER=\"${EXPERIMENT_DIR}/megatron_llama_ft/checkpoints/megatron_llama_ft.nemo\"\n",
+ "STUDENT=\"/workspace/4b_depth_pruned_model.nemo\"\n",
+ "\n",
+ "FINAL_MODEL_PATH=\"${EXPERIMENT_DIR}/${EXPERIMENT_NAME}/checkpoints/depth_pruned_distilled_4b_model.nemo\"\n",
+ "\n",
+ "DATA_TRAIN='wikitext_tokenized_train_text_document'\n",
+ "DATA_VAL='wikitext_tokenized_test_text_document'\n",
+ "DATA_TEST='wikitext_tokenized_val_text_document'\n",
+ "\n",
+ "STEPS=30\n",
+ "GLOBAL_BATCH_SIZE=128\n",
+ "\n",
+ "LOG_INTERVAL=1\n",
+ "VAL_INTERVAL=10\n",
+ "NUM_VAL_BATCHES=5\n",
+ "\n",
+ "LR=1e-4\n",
+ "MIN_LR=1e-5\n",
+ "WARMUP_STEPS=2\n",
+ "\n",
+ "cmd=\"torchrun --nproc-per-node=${TENSOR_PARALLEL_SIZE}\"\n",
+ "\n",
+ "${cmd} /opt/NeMo/examples/nlp/language_modeling/megatron_gpt_distillation.py \\\n",
+ " name=${EXPERIMENT_NAME} \\\n",
+ " \\\n",
+ " exp_manager.exp_dir=${EXPERIMENT_DIR} \\\n",
+ " exp_manager.checkpoint_callback_params.save_top_k=1 \\\n",
+ " \\\n",
+ " trainer.max_steps=${STEPS} \\\n",
+ " trainer.log_every_n_steps=${LOG_INTERVAL} \\\n",
+ " trainer.val_check_interval=${VAL_INTERVAL} \\\n",
+ " trainer.limit_val_batches=${NUM_VAL_BATCHES} \\\n",
+ " +trainer.num_sanity_val_steps=0 \\\n",
+ " \\\n",
+ " trainer.precision=bf16 \\\n",
+ " trainer.devices=${TENSOR_PARALLEL_SIZE} \\\n",
+ " trainer.num_nodes=${NODES} \\\n",
+ " \\\n",
+ " \"model.data.data_prefix={train:[1.0,$DATA_TRAIN],validation:[$DATA_VAL],test:[$DATA_TEST]}\" \\\n",
+ " \\\n",
+ " model.restore_from_path=${STUDENT} \\\n",
+ " model.kd_teacher_restore_from_path=${TEACHER} \\\n",
+ " model.nemo_path=${FINAL_MODEL_PATH} \\\n",
+ " \\\n",
+ " model.tensor_model_parallel_size=${TENSOR_PARALLEL_SIZE} \\\n",
+ " model.sequence_parallel=True \\\n",
+ " model.micro_batch_size=${MICRO_BATCH_SIZE} \\\n",
+ " model.global_batch_size=${GLOBAL_BATCH_SIZE} \\\n",
+ " \\\n",
+ " model.optim.name=distributed_fused_adam \\\n",
+ " model.optim.lr=${LR} \\\n",
+ " model.optim.sched.min_lr=${MIN_LR} \\\n",
+ " model.optim.sched.warmup_steps=${WARMUP_STEPS}"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "42d910d9-14dd-44ba-bf2c-0064737c70fa",
+ "metadata": {},
+ "source": [
+ "This will create the final distilled model named `depth_pruned_distilled_4b_model.nemo` in `./distill_trainings/megatron_llama_distill_depth_pruned_student/checkpoints`.\n",
+ "> `NOTE:`This script takes at least 35 minutes to run (depends on GPU) and generate the final distilled model."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.12"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/tutorials/llm/llama-3/pruning-distillation/04_b_distilling_width_pruned_student.ipynb b/tutorials/llm/llama-3/pruning-distillation/04_b_distilling_width_pruned_student.ipynb
new file mode 100644
index 000000000000..95110dd19dd9
--- /dev/null
+++ b/tutorials/llm/llama-3/pruning-distillation/04_b_distilling_width_pruned_student.ipynb
@@ -0,0 +1,138 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "d5062f23-c604-479b-9a4e-69989598b131",
+ "metadata": {},
+ "source": [
+ "### Step 4: Distill knowledge from teacher into student\n",
+ "Distillation of a model with NeMo Framework is also possible using a Python script: [megatron_gpt_distillation.py](https://github.com/NVIDIA/NeMo/blob/main/examples/nlp/language_modeling/megatron_gpt_distillation.py). \n",
+ "In this notebook, we will explore distillation with the width-pruned model as the `STUDENT` model.\n",
+ "\n",
+ "For this demonstration, the `TEACHER` would be the fine-tuned teacher model `megatron_llama_ft.nemo` and the `STUDENT` model would be the pruned 4B model. This training run is capped by `STEPS`, and validation is carried out every `VAL_INTERVAL` steps."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "be7de691-dd1d-4719-9872-98501a22e3c9",
+ "metadata": {},
+ "source": [
+ "#### Step 4.b.: Using width-pruned student\n",
+ "While distilling knowledge from the teacher to width-pruned model, the `STUDENT` model would be `4b_width_pruned_model.nemo` as produced by the [width-pruning](./03_b_width_pruning.ipynb) notebook. This training run is capped by `STEPS`, and validation is carried out every `VAL_INTERVAL` steps.\n",
+ "\n",
+ "> `NOTE:` In the block of code below, pass the paths to your pre-processed train, test, and validation data files, as well as path to the teacher and student .nemo models."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "0070b526-771a-4a8d-b0ba-ab218b382bd9",
+ "metadata": {
+ "scrolled": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "%%bash \n",
+ "\n",
+ "export CUDA_DEVICE_MAX_CONNECTIONS=1\n",
+ "\n",
+ "# Can change these to accommodate resources:\n",
+ "\n",
+ "TENSOR_PARALLEL_SIZE=8\n",
+ "NODES=1\n",
+ "MICRO_BATCH_SIZE=4\n",
+ "\n",
+ "# Don't change the following:\n",
+ "\n",
+ "EXPERIMENT_DIR=\"distill_trainings\"\n",
+ "EXPERIMENT_NAME=\"megatron_llama_distill_width_pruned_student\"\n",
+ "\n",
+ "TEACHER=\"${EXPERIMENT_DIR}/megatron_llama_ft/checkpoints/megatron_llama_ft.nemo\"\n",
+ "STUDENT=\"/workspace/4b_width_pruned_model.nemo\"\n",
+ "\n",
+ "FINAL_MODEL_PATH=\"${EXPERIMENT_DIR}/${EXPERIMENT_NAME}/checkpoints/width_pruned_distilled_4b_model.nemo\"\n",
+ "\n",
+ "DATA_TRAIN='wikitext_tokenized_train_text_document'\n",
+ "DATA_VAL='wikitext_tokenized_test_text_document'\n",
+ "DATA_TEST='wikitext_tokenized_val_text_document'\n",
+ "\n",
+ "STEPS=30\n",
+ "GLOBAL_BATCH_SIZE=128\n",
+ "\n",
+ "LOG_INTERVAL=1\n",
+ "VAL_INTERVAL=10\n",
+ "NUM_VAL_BATCHES=5\n",
+ "\n",
+ "LR=1e-4\n",
+ "MIN_LR=1e-5\n",
+ "WARMUP_STEPS=2\n",
+ "\n",
+ "cmd=\"torchrun --nproc-per-node=${TENSOR_PARALLEL_SIZE}\"\n",
+ "\n",
+ "${cmd} /opt/NeMo/examples/nlp/language_modeling/megatron_gpt_distillation.py \\\n",
+ " name=${EXPERIMENT_NAME} \\\n",
+ " \\\n",
+ " exp_manager.exp_dir=${EXPERIMENT_DIR} \\\n",
+ " exp_manager.checkpoint_callback_params.save_top_k=1 \\\n",
+ " \\\n",
+ " trainer.max_steps=${STEPS} \\\n",
+ " trainer.log_every_n_steps=${LOG_INTERVAL} \\\n",
+ " trainer.val_check_interval=${VAL_INTERVAL} \\\n",
+ " trainer.limit_val_batches=${NUM_VAL_BATCHES} \\\n",
+ " +trainer.num_sanity_val_steps=0 \\\n",
+ " \\\n",
+ " trainer.precision=bf16 \\\n",
+ " trainer.devices=${TENSOR_PARALLEL_SIZE} \\\n",
+ " trainer.num_nodes=${NODES} \\\n",
+ " \\\n",
+ " \"model.data.data_prefix={train:[1.0,$DATA_TRAIN],validation:[$DATA_VAL],test:[$DATA_TEST]}\" \\\n",
+ " \\\n",
+ " model.restore_from_path=${STUDENT} \\\n",
+ " model.kd_teacher_restore_from_path=${TEACHER} \\\n",
+ " model.nemo_path=${FINAL_MODEL_PATH} \\\n",
+ " \\\n",
+ " model.tensor_model_parallel_size=${TENSOR_PARALLEL_SIZE} \\\n",
+ " model.sequence_parallel=True \\\n",
+ " model.micro_batch_size=${MICRO_BATCH_SIZE} \\\n",
+ " model.global_batch_size=${GLOBAL_BATCH_SIZE} \\\n",
+ " \\\n",
+ " model.optim.name=distributed_fused_adam \\\n",
+ " model.optim.lr=${LR} \\\n",
+ " model.optim.sched.min_lr=${MIN_LR} \\\n",
+ " model.optim.sched.warmup_steps=${WARMUP_STEPS} \\\n",
+ " +model.dist_ckpt_load_strictness=log_all"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "d9dbc377-e19a-49e0-b245-fa828cca415a",
+ "metadata": {},
+ "source": [
+ "This will create the final width-pruned distilled model named `width_pruned_distilled_4b_model.nemo` in `./distill_trainings/megatron_llama_distill_width_pruned_student/checkpoints`.\n",
+ "> `NOTE:`This script takes at least 20 minutes to run (depends on GPU) and generate the final distilled model."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.12"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/tutorials/llm/llama-3/pruning-distillation/05_display_results.ipynb b/tutorials/llm/llama-3/pruning-distillation/05_display_results.ipynb
new file mode 100644
index 000000000000..dcb483c55ab6
--- /dev/null
+++ b/tutorials/llm/llama-3/pruning-distillation/05_display_results.ipynb
@@ -0,0 +1,161 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "6c91263b-b312-4ab2-b13f-0ee4b6e8bd0f",
+ "metadata": {},
+ "source": [
+ "### Step 5: Display the validation loss\n",
+ "\n",
+ "Now that the results are in, let's visualize the validation loss of the two distilled models using the `tensorboard` library. \n",
+ "\n",
+ "> `NOTE:` This notebook demonstrates the use of the teacher fine-tuning, pruning, and the distillation script. These scripts should ideally be run on a multi-node cluster with a larger `GLOBAL_BATCH_SIZE` and `STEPS` to see improvement in the validation loss."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b5822d62-8131-4046-8c22-0bf0fce81df7",
+ "metadata": {},
+ "source": [
+ "#### Validation Loss Using Depth-Pruned Model as Student in Distillation Script\n",
+ "Here is an image of the validation loss over 30 steps of running the training step in the distillation script, where we distill the knowledge from the fine-tuned teacher model to the depth-pruned student."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "0a665fe1-df45-4126-8694-f182af113133",
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "%load_ext tensorboard\n",
+ "%tensorboard --logdir \"distill_trainings/megatron_llama_distill_depth_pruned_student/\" --port=6007"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "db6fcf26-8ae8-40e1-875a-0a10bf85be81",
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "Validation Loss over 30 Training Steps with Depth-Pruned Model as Student
"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/html": [
+ ""
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from IPython.display import Image, display, HTML\n",
+ "title = \"Validation Loss over 30 Training Steps with Depth-Pruned Model as Student\"\n",
+ "display(HTML(f\"{title}
\"))\n",
+ "display(Image(url=\"https://github.com/NVIDIA/NeMo/releases/download/r2.0.0rc1/val_loss_depth_pruned_student_distillation.png\", width=400))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "f10041ae-6533-47de-9f76-f97d4469c27a",
+ "metadata": {},
+ "source": [
+ "#### Validation Loss Using Width-Pruned Model as Student in Distillation Script\n",
+ "Here is an image of the validation loss over 30 steps of running the training step in the distillation script, where we distill the knowledge from the fine-tuned teacher model to the width-pruned student."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "7b0c3118-4987-4df3-88bd-fcffdb521c5d",
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "%load_ext tensorboard\n",
+ "%tensorboard --logdir \"distill_trainings/megatron_llama_distill_width_pruned_student/\" --port=6008"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "ecd79583-f662-40c6-a690-9f4bb847de4e",
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "Validation Loss over 30 Training Steps with Width-Pruned Model as Student
"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/html": [
+ ""
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from IPython.display import Image, display, HTML\n",
+ "title = \"Validation Loss over 30 Training Steps with Width-Pruned Model as Student\"\n",
+ "display(HTML(f\"{title}
\"))\n",
+ "display(Image(url=\"https://github.com/NVIDIA/NeMo/releases/download/r2.0.0rc1/val_loss_width_pruned_student_distillation.png\", width=400))"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.12"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/tutorials/llm/llama-3/pruning-distillation/README.rst b/tutorials/llm/llama-3/pruning-distillation/README.rst
index 9d4207a5c968..45cb119ffcd8 100644
--- a/tutorials/llm/llama-3/pruning-distillation/README.rst
+++ b/tutorials/llm/llama-3/pruning-distillation/README.rst
@@ -1,44 +1,57 @@
-Llama 3.1 WikiText Pruning and Distillation with NeMo Framework
+Llama 3.1 Pruning and Distillation with NeMo Framework
=======================================================================================
-`Llama 3.1 `_ are open-source large language models by Meta that deliver state-of-the-art performance on popular industry benchmarks. They have been pretrained on over 15 trillion tokens, and support a 128K token context length. They are available in three sizes, 8B, 70B, and 405B, and each size has two variantsβbase pretrained and instruction tuned.
+`Llama 3.1 `_ models, developed by Meta, are open-source large language models that deliver state-of-the-art performance on popular industry benchmarks. Pretrained on over 15 trillion tokens, they support a 128K token context length. These models are available in three sizes: 8B, 70B, and 405B. Each size offers two variants: base pretrained and instruction tuned.
-`NVIDIA NeMo Framework `_ provides tools to perform teacher finetuning, pruning and distillation on Llama 3.1 to fit your use case.
+`NVIDIA NeMo Framework `_ provides tools to perform teacher fine-tuning, pruning, and distillation on Llama 3.1 to fit your use case.
-`LLM Pruning and Distillation in Practice: The Minitron Approach `_ provides tools to perform teacher finetuning, pruning and distillation on Llama 3.1 as described in the `tech report `_.
+`NVIDIA TensorRT Model Optimizer `_ is a library (referred to as **Model Optimizer**, or **ModelOpt**) comprising state-of-the-art model optimization techniques including `quantization `_, `sparsity `_, `distillation `_, and `pruning `_ to compress models.
+
+`LLM Pruning and Distillation in Practice: The Minitron Approach `_ provides tools to perform teacher fine-tuning, pruning, and distillation on Llama 3.1 as described in the `tech report `_.
+
+`How to Prune and Distill Llama-3.1 8B to an NVIDIA Llama-3.1-Minitron 4B Model `_ provides practical and effective structured compression best practices for LLMs that combine depth, width, attention, and MLP pruning with knowledge distillation-based retraining. These strategies are presented in the `Compact Language Models via Pruning and Knowledge Distillation `_ paper.
+
+`Mistral-NeMo-Minitron 8B Model Delivers Unparalleled Accuracy `_ introduces the Mistral-NeMo-Minitron 8B, a state-of-the-art 8 billion parameter language model created by pruning and distilling the larger Mistral NeMo 12B model.
Objectives
----------
-This tutorial shows how to perform depth-pruning, teacher finetuning and distillation on **Llama 3.1 8B Instruct** using the `WikiText-103-v1 `_ dataset with NeMo Framework. The `WikiText-103-v1 `_ language modeling dataset is a collection of over 100 million tokens extracted from the set of verified Good and Featured articles on Wikipedia. For this demonstration, we will perform a light finetuning procedure on the ``Meta Llama 3.1 8B Instruct`` teacher model to generate a finetuned teacher model ``megatron_llama_ft.nemo`` needed for optimal distillation. This finetuned teacher model is then depth-pruned to create a trimmed model ``4b_trimmed_model.nemo``. These models will serve as a starting point for distillation to create a final distilled 4B model.
+This tutorial demonstrates how to perform depth-pruning, width-pruning, teacher fine-tuning, and distillation on **Llama 3.1 8B** using the `WikiText-103-v1 _ dataset with the NeMo Framework. The WikiText-103-v1 `_ language modeling dataset comprises over 100 million tokens extracted from verified Good and Featured articles on Wikipedia.
+
+For this demonstration, we will perform teacher correction by running a light fine-tuning procedure on the ``Meta LLama 3.1 8B`` teacher model to generate a fine-tuned teacher model, ``megatron_llama_ft.nemo``, needed for optimal distillation. This fine-tuned teacher model is then trimmed. There are two methods to prune a model: depth-pruning and width-pruning. We will explore both techniques, yielding ``4b_depth_pruned_model.nemo`` and ``4b_width_pruned_model.nemo``, respectively. These models will serve as starting points for distillation to create the final distilled 4B models.
+
We are using models utilizing the ``meta-llama/Meta-Llama-3.1-8B`` tokenizer for this demonstration.
+``NOTE:`` A subset of functions is being demonstrated in the notebooks. Some features like Neural Architecture Search (NAS) are unavailable, but will be supported in future releases.
+
Requirements
-------------
* System Configuration
- * Access to at least 8 NVIDIA GPU with an individual memory of at least 80GB, for example: 8 x H100-80GB or 8 x A100-80GB.
+ * Access to at least 8 NVIDIA GPUs, each with a memory of at least 80GB (e.g., 8 x H100-80GB or 8 x A100-80GB).
* A Docker-enabled environment, with `NVIDIA Container Runtime `_ installed, which will make the container GPU-aware.
-* `Authenticate with NVIDIA NGC `_, and download `NGC CLI Tool `_. You will use this tool to download the model and customize it with NeMo Framework.
+* `Authenticate with NVIDIA NGC `_ and download `NGC CLI Tool `_. You will use this tool to download the model and customize it with NeMo Framework.
* Get your Hugging Face `access token `_, which will be used to obtain the tokenizer required during training.
-``NOTE:`` The default configuration in the notebook runs on 8 x 80GB NVIDIA GPUs but you can potentially reduce Tensor Parallel size ``(TENSOR_PARALLEL_SIZE)`` along with the Micro-Batchsize ``(MICRO_BATCH_SIZE)`` in the teacher finetuning and distillation scripts to accommodate lower resource availability.
+``NOTE:`` The default configuration in the notebook runs on 8 x 80GB NVIDIA GPUs. However, you can potentially reduce the Tensor Parallel size ``(TENSOR_PARALLEL_SIZE)`` along with the Micro-Batchsize ``(MICRO_BATCH_SIZE)`` in the teacher fine-tuning and distillation scripts to accommodate lower resource availability.
-Create a pruned and distilled model with NeMo Framework
+Create a Pruned and Distilled Model with NeMo Framework
------------------------------------------------------------------------------
-For pruning and distilling the model, you will use the NeMo Framework which is available as a `docker container `_.
+For pruning and distilling the model, you will use the NeMo Framework, which is available as a `Docker container `_.
+``NOTE:`` These notebooks use the `NVIDIA TensorRT Model Optimizer `_ under the hood for pruning and distillation.
-1. Download the `Llama 3.1 8B Instruct .nemo `_ from NVIDIA NGC using the `NGC CLI `_. Generate the ``NGC_API_KEY`` following these `instructions `_. The following command saves the ``.nemo`` format model in a folder named ``llama-3_1-8b-instruct-nemo_v1.0`` in the current directory. You can specify another path using the ``-d`` option in the CLI tool.
+
+1. Download the `Llama 3.1 8B .nemo `_ from NVIDIA NGC using the `NGC CLI `_. Generate the ``NGC_API_KEY`` following these `instructions `_. The following command saves the ``.nemo`` format model in a folder named ``llama-3_1-8b-nemo_v1.0`` in the current directory. You can specify another path using the ``-d`` option in the CLI tool.
.. code:: bash
- ngc registry model download-version "nvidia/nemo/llama-3_1-8b-instruct-nemo:1.0"
+ ngc registry model download-version "nvidia/nemo/llama-3_1-8b-nemo:1.0"
-2. Run the container using the following command. It is assumed that you have the dataset, notebook(s), and the ``llama-3.1-8b-instruct`` model available in the current directory. If not, mount the appropriate folder to ``/workspace``.
+2. Run the container using the following command. It is assumed that you have the dataset, notebook(s), and the ``llama3_1_8b.nemo`` model available in the current directory. If not, mount the appropriate folder to ``/workspace``.
.. code:: bash
@@ -63,17 +76,38 @@ For pruning and distilling the model, you will use the NeMo Framework which is a
jupyter lab --ip 0.0.0.0 --port=8888 --allow-root
-4. Then, navigate to `this notebook <./llama3-pruning-distillation-nemofw.ipynb>`_.
+4. Then, navigate to `this notebook <./introduction.ipynb>`_ to get started.
+
+This directory contains a list of notebooks that cover all the steps to create a distilled 4B model.
+
+::
+ <$pruning_distillation>
+ βββ introduction.ipynb
+ βββ 01_data_preparation.ipynb
+ βββ 02_teacher_finetuning.ipynb
+ βββ 03_a_depth_pruning.ipynb
+ βββ 03_b_width_pruning.ipynb
+ βββ 04_a_distilling_depth_pruned_student.ipynb
+ βββ 04_b_distilling_width_pruned_student.ipynb
+ βββ 05_display_results.ipynb
+
Results
------------------------------------------------------------------------------
-``NOTE:`` This notebook demonstrates the use of the teacher finetuning, pruning and the distillation script. These scripts should ideally be run on a multi-node cluster with a larger ``GLOBAL_BATCH_SIZE`` and ``STEPS`` to see improvement in the validation loss.
+``NOTE:`` This notebook demonstrates the use of the teacher fine-tuning, pruning, and the distillation scripts. These scripts should ideally be run on a multi-node cluster with a larger ``GLOBAL_BATCH_SIZE`` and ``STEPS`` to see improvement in the validation loss.
-Here is the validation loss over 30 steps of running the training step in the distillation script (at the end of the `notebook <./llama3-pruning-distillation-nemofw.ipynb>`_).
+Here are the validation loss plots over 30 steps of running the training step in the distillation script (at the end of the `notebook <./05_display_results.ipynb>`_).
+
+.. figure:: https://github.com/NVIDIA/NeMo/releases/download/r2.0.0rc1/val_loss_depth_pruned_student_distillation.png
+ :width: 400px
+ :alt: Diagram showing the validation loss over 30 steps of running the training step in the distillation script when using the depth-pruned model as the student
+ :align: center
-.. figure:: https://github.com/NVIDIA/NeMo/releases/download/r2.0.0rc1/val_loss_distillation.png
+ Figure 1: Validation Loss Plot When Using the Depth-Pruned Model as the Student
+
+.. figure:: https://github.com/NVIDIA/NeMo/releases/download/r2.0.0rc1/val_loss_width_pruned_student_distillation.png
:width: 400px
- :alt: Diagram showing the validation loss over 30 steps of running the training step in the distillation script
+ :alt: Diagram showing the validation loss over 30 steps of running the training step in the distillation script when using the width-pruned model as the student
:align: center
- Figure 1: Validation Loss Plot
\ No newline at end of file
+ Figure 2: Validation Loss Plot When Using the Width-Pruned Model as the Student
\ No newline at end of file
diff --git a/tutorials/llm/llama-3/pruning-distillation/introduction.ipynb b/tutorials/llm/llama-3/pruning-distillation/introduction.ipynb
new file mode 100644
index 000000000000..71a5a6cfb03c
--- /dev/null
+++ b/tutorials/llm/llama-3/pruning-distillation/introduction.ipynb
@@ -0,0 +1,190 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "411e6711-60fc-4488-8aa1-c6463cac8695",
+ "metadata": {
+ "tags": []
+ },
+ "source": [
+ "# Efficient Model Reduction with Pruning and Distillation of Llama 3.1 Using NeMo Framework"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "03fd1cf4-c67a-4b8d-a5e5-46531be0f991",
+ "metadata": {},
+ "source": [
+ "This tutorial demonstrates how to perform depth-pruning, teacher fine-tuning, and distillation on **Llama 3.1-8B** using the [WikiText-103-v1](https://huggingface.co/datasets/Salesforce/wikitext/viewer/wikitext-103-v1) dataset with NeMo Framework. The [WikiText-103-v1](https://huggingface.co/datasets/Salesforce/wikitext/viewer/wikitext-103-v1) language modeling dataset comprises over 100 million tokens extracted from verified Good and Featured articles on Wikipedia.\n",
+ "\n",
+ "For this demonstration, we will perform teacher correction by running a light fine-tuning procedure on the `Meta Llama 3.1 8B` teacher model to generate a fine-tuned teacher model, `megatron_llama_ft.nemo`, needed for optimal distillation. This fine-tuned teacher model is then trimmed. There are two methods to prune a model: depth-pruning and width-pruning. We will explore both techniques, yielding `4b_depth_pruned_model.nemo` and `4b_width_pruned_model.nemo`, respectively. These models will serve as starting points for distillation to create the final distilled 4B models.\n",
+ "\n",
+ "> We are using models utilizing the `meta-llama/Meta-Llama-3.1-8B` tokenizer for this demonstration.\n",
+ "\n",
+ "> `NOTE:` Ensure that you run this notebook inside the [NeMo Framework container](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/nemo) which has all the required dependencies. \n",
+ "\n",
+ "**Instructions for downloading the model and the container are available in the [README](./README.rst).**"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "5a5026ce-39f1-43e3-93af-4c4f1e9da1f2",
+ "metadata": {
+ "scrolled": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "!pip install --upgrade ipywidgets notebook\n",
+ "!pip install datasets"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "afe59b07-bb48-4913-90cc-bb416b48196c",
+ "metadata": {
+ "tags": []
+ },
+ "source": [
+ "---\n",
+ "## Prerequisites\n",
+ "Ensure you meet the prerequisites listed in this section.\n",
+ "1. **Get the teacher model**: Download the `Meta Llama 3.1 8B .nemo` model. You must follow the instructions in the associated README to download and mount the folder to the NeMo Framework container."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b9d48b81-e978-4894-8ba4-4f183f698bb1",
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "!ls /workspace/llama-3_1-8b-nemo_v1.0/llama3_1_8b.nemo"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "7129d44e-0536-4e62-bdbc-0f1ad44dc84a",
+ "metadata": {},
+ "source": [
+ "2. **Set the Hugging Face Access Token**: You can obtain this from your [Hugging Face account](https://huggingface.co/docs/hub/en/security-tokens). "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "481417ed-1456-4962-8f67-4350bde1aabd",
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "from huggingface_hub import login\n",
+ "login(token=\"\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "245eda8d-c999-431e-9ebc-5c92c4f21f3b",
+ "metadata": {},
+ "source": [
+ "3. **Obtain the dataset**: Generate the `wikitext-{train/val/test}.jsonl` splits after loading the [WikiText-103-v1](https://huggingface.co/datasets/Salesforce/wikitext/viewer/wikitext-103-v1) dataset."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "eaef2c7d-41f7-41ad-a76a-2d714e9c35de",
+ "metadata": {
+ "scrolled": true,
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "# Split into train, test and val files\n",
+ "\n",
+ "import json\n",
+ "import os\n",
+ "from datasets import load_dataset\n",
+ "\n",
+ "# Load the WikiText-103 dataset\n",
+ "dataset = load_dataset(\"wikitext\", \"wikitext-103-v1\")\n",
+ "\n",
+ "# Define the destination folder\n",
+ "data_folder = 'wikitext-data'\n",
+ "os.makedirs(data_folder, exist_ok=True)\n",
+ "\n",
+ "# Define file paths and destination paths\n",
+ "file_paths = {\n",
+ " 'train': os.path.join(data_folder, 'wikitext-train.jsonl'),\n",
+ " 'validation': os.path.join(data_folder, 'wikitext-val.jsonl'),\n",
+ " 'test': os.path.join(data_folder, 'wikitext-test.jsonl')\n",
+ "}\n",
+ "\n",
+ "# Function to save dataset split to a JSONL file\n",
+ "def save_to_jsonl(file_path, data):\n",
+ " with open(file_path, 'w') as file:\n",
+ " for item in data:\n",
+ " file.write(json.dumps(item) + '\\n')\n",
+ "\n",
+ "# Define splits\n",
+ "splits = [\"train\", \"validation\", \"test\"]\n",
+ "\n",
+ "# Save splits to JSONL files and calculate their sizes\n",
+ "for split in splits:\n",
+ " if split in dataset:\n",
+ " save_to_jsonl(file_paths[split], dataset[split])\n",
+ " else:\n",
+ " print(f\"Split {split} not found in the dataset.\")\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "2d0cc359-0598-40aa-af80-9503ecd4dac1",
+ "metadata": {
+ "tags": []
+ },
+ "source": [
+ "---\n",
+ "## Step-by-Step Instructions\n",
+ "\n",
+ "This workflow is structured into seven notebooks:\n",
+ "1. [Prepare the dataset](./01_data_preparation.ipynb)\n",
+ "2. [Fine-tune the teacher on the dataset](./02_teacher_finetuning.ipynb)\n",
+ "3. Prune the fine-tuned teacher model to create a student \n",
+ " - 3.a. [Using depth-pruning](./03_a_depth_pruning.ipynb)\n",
+ " - 3.b. [Using width-pruning](./03_b_width_pruning.ipynb)\n",
+ "4. Distill knowledge from teacher into student\n",
+ " - 4.a. [Using depth-pruned student](./04_a_distilling_depth_pruned_student.ipynb)\n",
+ " - 4.b. [Using width-pruned student](./04_b_distilling_width_pruned_student.ipynb)\n",
+ "5. [Display the validation loss](./05_display_results.ipynb)\n",
+ "\n",
+ "> `NOTE:` We are exploring two methods to prune the fine-tuned teacher model: [depth-pruning](./03_a_depth_pruning.ipynb) and [width-pruning](./03_b_width_pruning.ipynb). Per the [tech report](https://arxiv.org/pdf/2408.11796), we can observe that width-pruning generally outperforms depth-pruning so users can choose to perform either [depth-pruning](./03_a_depth_pruning.ipynb) or [width-pruning](./03_b_width_pruning.ipynb) or both methods."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.12"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/tutorials/llm/llama-3/pruning-distillation/llama3-pruning-distillation-nemofw.ipynb b/tutorials/llm/llama-3/pruning-distillation/llama3-pruning-distillation-nemofw.ipynb
deleted file mode 100644
index 8b31ad4de018..000000000000
--- a/tutorials/llm/llama-3/pruning-distillation/llama3-pruning-distillation-nemofw.ipynb
+++ /dev/null
@@ -1,587 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "id": "363a6974-810c-41c5-84da-4751a92fb72b",
- "metadata": {
- "tags": []
- },
- "source": [
- "# Pruning and Distillation of Llama 3.1 model with NeMo Framework"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "c6d4ed6d-8ecd-4647-bd0a-e48fec64c199",
- "metadata": {},
- "source": [
- "This notebook showcases performing pruning and distillation on **Llama 3.1-8B-Instruct** with the [WikiText-103-v1](https://huggingface.co/datasets/Salesforce/wikitext/viewer/wikitext-103-v1) dataset using NeMo Framework. The [WikiText-103-v1](https://huggingface.co/datasets/Salesforce/wikitext/viewer/wikitext-103-v1) language modeling dataset is a collection of over 100 million tokens extracted from the set of verified Good and Featured articles on Wikipedia. \n",
- "\n",
- "For this demonstration, we will perform a light finetuning procedure on the `Meta Llama 3.1 8B Instruct` teacher model to generate a finetuned teacher model. This finetuned teacher model will then be trimmed to create a depth-pruned model `4b_trimmed_model.nemo` that will serve as a starting point for distillation to a final 4B model. \n",
- "\n",
- "> We are using models utilizing the `meta-llama/Meta-Llama-3.1-8B` tokenizer for this demonstration.\n",
- "\n",
- "> `NOTE:` Ensure that you run this notebook inside the [NeMo Framework container](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/nemo) which has all the required dependencies. \n",
- "\n",
- "**Instructions are available in the associated tutorial README to download the model and the container.**"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "1d0dc714-5bbf-4266-805a-9841ff486c05",
- "metadata": {
- "scrolled": true,
- "tags": []
- },
- "outputs": [],
- "source": [
- "!pip install --upgrade ipywidgets notebook\n",
- "!pip install datasets"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "2658505d-7990-40a5-a269-866ddd8a0181",
- "metadata": {
- "tags": []
- },
- "source": [
- "---\n",
- "## Prerequisites\n",
- "Ensure you have the following -\n",
- "1. **Get the teacher model**: Download the `Meta Llama 3.1 8B Instruct .nemo` model. You must follow the instructions in the associated README to download and mount the folder to the NeMo FW container."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "a30cfe8a-87a8-4511-be5f-e20d7fe558d4",
- "metadata": {},
- "outputs": [],
- "source": [
- "!ls /workspace/llama-3_1-8b-instruct-nemo_v1.0"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "251a670e-9636-4807-bc98-a91c6137454d",
- "metadata": {},
- "source": [
- "2. **Set the Hugging Face Access Token**: You can obtain this from your [Hugging Face account](https://huggingface.co/docs/hub/en/security-tokens). "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "47d7887d-b582-4a1e-81cd-fdc1be8d9afb",
- "metadata": {
- "tags": []
- },
- "outputs": [],
- "source": [
- "from huggingface_hub import login\n",
- "login(token=\"\")"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "b5384e9a-6c40-4454-abe8-413ad9d5db96",
- "metadata": {},
- "source": [
- "3. **Obtain the dataset**: Generate the `wikitext-{train/val/test}.jsonl` splits after loading the [WikiText-103-v1](https://huggingface.co/datasets/Salesforce/wikitext/viewer/wikitext-103-v1) dataset."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "b420bd44-3628-45e2-92e7-df38f72a658a",
- "metadata": {
- "scrolled": true,
- "tags": []
- },
- "outputs": [],
- "source": [
- "# Split into train, test and val files\n",
- "\n",
- "import json\n",
- "import os\n",
- "from datasets import load_dataset\n",
- "\n",
- "# Load the WikiText-103 dataset\n",
- "dataset = load_dataset(\"wikitext\", \"wikitext-103-v1\")\n",
- "\n",
- "# Define the destination folder\n",
- "data_folder = 'wikitext-data'\n",
- "os.makedirs(data_folder, exist_ok=True)\n",
- "\n",
- "# Define file paths and destination paths\n",
- "file_paths = {\n",
- " 'train': os.path.join(data_folder, 'wikitext-train.jsonl'),\n",
- " 'validation': os.path.join(data_folder, 'wikitext-val.jsonl'),\n",
- " 'test': os.path.join(data_folder, 'wikitext-test.jsonl')\n",
- "}\n",
- "\n",
- "# Function to save dataset split to a JSONL file\n",
- "def save_to_jsonl(file_path, data):\n",
- " with open(file_path, 'w') as file:\n",
- " for item in data:\n",
- " file.write(json.dumps(item) + '\\n')\n",
- "\n",
- "# Define splits\n",
- "splits = [\"train\", \"validation\", \"test\"]\n",
- "\n",
- "# Save splits to JSONL files and calculate their sizes\n",
- "for split in splits:\n",
- " if split in dataset:\n",
- " save_to_jsonl(file_paths[split], dataset[split])\n",
- " else:\n",
- " print(f\"Split {split} not found in the dataset.\")\n"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "0185a0a9-904d-46de-a450-db4c84c4cde4",
- "metadata": {
- "tags": []
- },
- "source": [
- "---\n",
- "## Step-by-step instructions\n",
- "\n",
- "This notebook is structured into five steps:\n",
- "1. Prepare the dataset\n",
- "2. Finetune the teacher on the dataset\n",
- "3. Prune the finetuned-teacher model to create a student\n",
- "3. Distill knowledge from teacher into student\n",
- "4. Display the validation loss"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "cf1d41ff-2cba-4efc-84e3-7d713df0cdb8",
- "metadata": {},
- "source": [
- "### Step 1: Prepare the dataset\n",
- "\n",
- "The dataset has to be preprocessed using the [preprocess_data_for_megatron.py](https://github.com/NVIDIA/NeMo/blob/main/scripts/nlp_language_modeling/preprocess_data_for_megatron.py) script included in the NeMo Framework. This step will also tokenize data using the `meta-llama/Meta-Llama-3.1-8B` tokenizer model to convert the data into a memory map format.\n",
- "\n",
- "> `NOTE:` In the block of code below, pass the paths to your train, test and validation data files."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "2c49c1b8-2447-426c-9f24-bf5956aa2941",
- "metadata": {
- "scrolled": true,
- "tags": []
- },
- "outputs": [],
- "source": [
- "!python /opt/NeMo/scripts/nlp_language_modeling/preprocess_data_for_megatron.py \\\n",
- "--input=\"./wikitext-data/wikitext-train.jsonl\" \\\n",
- "--tokenizer-library='huggingface' \\\n",
- "--tokenizer-type='meta-llama/Meta-Llama-3.1-8B' \\\n",
- "--output-prefix=wikitext_tokenized_train \\\n",
- "--append-eod \\\n",
- "--workers=32"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "72d14fd7-702f-4b74-a6e5-af3a60eef3a9",
- "metadata": {
- "scrolled": true,
- "tags": []
- },
- "outputs": [],
- "source": [
- "!python /opt/NeMo/scripts/nlp_language_modeling/preprocess_data_for_megatron.py \\\n",
- "--input=\"./wikitext-data/wikitext-test.jsonl\" \\\n",
- "--tokenizer-library='huggingface' \\\n",
- "--tokenizer-type='meta-llama/Meta-Llama-3.1-8B' \\\n",
- "--output-prefix=wikitext_tokenized_test \\\n",
- "--append-eod \\\n",
- "--workers=32"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "1338a1ce-f0e2-4151-ad3d-d34db75ea1bd",
- "metadata": {
- "scrolled": true,
- "tags": []
- },
- "outputs": [],
- "source": [
- "!python /opt/NeMo/scripts/nlp_language_modeling/preprocess_data_for_megatron.py \\\n",
- "--input=\"./wikitext-data/wikitext-val.jsonl\" \\\n",
- "--tokenizer-library='huggingface' \\\n",
- "--tokenizer-type='meta-llama/Meta-Llama-3.1-8B' \\\n",
- "--output-prefix=wikitext_tokenized_val \\\n",
- "--append-eod \\\n",
- "--workers=32"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "eb80e212-c343-4e51-a92d-184db43df011",
- "metadata": {},
- "source": [
- "After running the above scripts, you will see the preprocesed `wikitext_tokenized_{train/val/test}_text_document.{idx/bin}`files. These output files will be used in the next step."
- ]
- },
- {
- "cell_type": "markdown",
- "id": "e9f30c0a-4315-4017-b014-add4291a3fde",
- "metadata": {},
- "source": [
- "\n",
- "### Step 2: Finetune the teacher on the dataset\n",
- "\n",
- "NeMo framework includes a standard python script [megatron_gpt_pretraining.py](https://github.com/NVIDIA/NeMo/blob/main/examples/nlp/language_modeling/megatron_gpt_pretraining.py) for training a model. Once you have your model downloaded and the dataset ready, fine-tuning the teacher model with NeMo is essentially just running this script!\n",
- "\n",
- "For this demonstration, this training run is capped by `STEPS`, and validation is carried out every `VAL_INTERVAL` steps.\n",
- "\n",
- "> `NOTE:` In the block of code below, pass the paths to your pre-processed train, test and validation data files as well as path to the teacher .nemo model."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "c31fd642-0304-43ed-9211-041dc36f22c3",
- "metadata": {
- "scrolled": true,
- "tags": []
- },
- "outputs": [],
- "source": [
- "%%bash \n",
- "\n",
- "export CUDA_DEVICE_MAX_CONNECTIONS=1\n",
- "\n",
- "\n",
- "# Set path(s) if different:\n",
- "\n",
- "MODEL=\"/workspace/llama-3_1-8b-instruct-nemo_v1.0/llama3_1_8b_instruct.nemo\"\n",
- "\n",
- "# Can change these to accommodate resources:\n",
- "\n",
- "TENSOR_PARALLEL_SIZE=8\n",
- "NODES=1\n",
- "MICRO_BATCH_SIZE=4\n",
- "\n",
- "# Don't change the following:\n",
- "\n",
- "EXPERIMENT_DIR=\"distill_trainings\"\n",
- "EXPERIMENT_NAME=\"megatron_llama_ft\"\n",
- "\n",
- "DATA_TRAIN='wikitext_tokenized_train_text_document'\n",
- "DATA_VAL='wikitext_tokenized_test_text_document'\n",
- "DATA_TEST='wikitext_tokenized_val_text_document'\n",
- "\n",
- "STEPS=30\n",
- "GLOBAL_BATCH_SIZE=128\n",
- "\n",
- "LOG_INTERVAL=1\n",
- "VAL_INTERVAL=10\n",
- "NUM_VAL_BATCHES=5\n",
- "\n",
- "LR=1e-4\n",
- "MIN_LR=1e-5\n",
- "WARMUP_STEPS=2\n",
- "\n",
- "\n",
- "cmd=\"torchrun --nproc-per-node=${TENSOR_PARALLEL_SIZE}\"\n",
- "\n",
- "${cmd} /opt/NeMo/examples/nlp/language_modeling/megatron_gpt_pretraining.py \\\n",
- " --config-path /opt/NeMo/examples/nlp/language_modeling/conf/ \\\n",
- " --config-name megatron_llama_distill.yaml \\\n",
- " \\\n",
- " name=${EXPERIMENT_NAME} \\\n",
- " \\\n",
- " exp_manager.exp_dir=${EXPERIMENT_DIR} \\\n",
- " exp_manager.checkpoint_callback_params.save_top_k=1 \\\n",
- " exp_manager.checkpoint_callback_params.save_nemo_on_train_end=True \\\n",
- " \\\n",
- " trainer.max_steps=${STEPS} \\\n",
- " trainer.log_every_n_steps=${LOG_INTERVAL} \\\n",
- " trainer.val_check_interval=${VAL_INTERVAL} \\\n",
- " trainer.limit_val_batches=${NUM_VAL_BATCHES} \\\n",
- " +trainer.num_sanity_val_steps=0 \\\n",
- " \\\n",
- " trainer.precision=bf16 \\\n",
- " trainer.devices=${TENSOR_PARALLEL_SIZE} \\\n",
- " trainer.num_nodes=${NODES} \\\n",
- " \\\n",
- " \"model.data.data_prefix={train:[1.0,$DATA_TRAIN],validation:[$DATA_VAL],test:[$DATA_TEST]}\" \\\n",
- " \\\n",
- " model.restore_from_path=${MODEL} \\\n",
- " \\\n",
- " ~model.tokenizer \\\n",
- " +model.tokenizer='{library: huggingface, type: meta-llama/Meta-Llama-3.1-8B, use_fast: True}' \\\n",
- " \\\n",
- " model.tensor_model_parallel_size=${TENSOR_PARALLEL_SIZE} \\\n",
- " model.sequence_parallel=True \\\n",
- " model.micro_batch_size=${MICRO_BATCH_SIZE} \\\n",
- " model.global_batch_size=${GLOBAL_BATCH_SIZE} \\\n",
- " \\\n",
- " model.encoder_seq_length=8192 \\\n",
- " model.num_layers=32 \\\n",
- " model.hidden_size=4096 \\\n",
- " model.ffn_hidden_size=14336 \\\n",
- " model.num_attention_heads=32 \\\n",
- " model.hidden_dropout=0.0 \\\n",
- " model.attention_dropout=0.0 \\\n",
- " model.apply_query_key_layer_scaling=True \\\n",
- " model.normalization='rmsnorm' \\\n",
- " model.bias=False \\\n",
- " model.activation='fast-swiglu' \\\n",
- " model.position_embedding_type='rope' \\\n",
- " model.share_embeddings_and_output_weights=False \\\n",
- " model.num_query_groups=8 \\\n",
- " ++model.scale_positional_embedding=True \\\n",
- " ++model.rotary_base=500000.0 \\\n",
- " \\\n",
- " model.optim.name=distributed_fused_adam \\\n",
- " model.optim.lr=${LR} \\\n",
- " model.optim.sched.min_lr=${MIN_LR} \\\n",
- " model.optim.sched.warmup_steps=${WARMUP_STEPS}"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "8aaf604a-efc0-4908-9055-5cf3bb0a05ae",
- "metadata": {},
- "source": [
- "This will create a finetuned teacher model named `megatron_llama_ft.nemo` in `./distill_trainings/megatron_llama_ft/checkpoints/`. We'll use this later.\n",
- "> `NOTE:`This script takes at least 20 minutes to run (depending on GPU) and will generate the finetuned teacher model."
- ]
- },
- {
- "cell_type": "markdown",
- "id": "2709ccc0-bbb8-44ba-b00d-15b1dc5d60a7",
- "metadata": {},
- "source": [
- "### Step 3: Prune the finetuned-teacher model to create a student\n",
- "\n",
- "The next step is to trim the last 16 layers in the finetined teacher model. In this notebook, we are using depth-pruning and would be using the [megatron_gpt_drop_layers](https://github.com/NVIDIA/NeMo/blob/main/examples/nlp/language_modeling/megatron_gpt_drop_layers.py) script. \n",
- "> `NOTE:` In the block of code below, pass the paths to your finetuned teacher .nemo model."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "a9715a1b-7a23-437f-b5e1-feec8e6c68e0",
- "metadata": {
- "scrolled": true,
- "tags": []
- },
- "outputs": [],
- "source": [
- "!python -m torch.distributed.launch --nproc_per_node=8 \\\n",
- " /opt/NeMo/examples/nlp/language_modeling/megatron_gpt_drop_layers.py \\\n",
- " --path_to_nemo \"./distill_trainings/megatron_llama_ft/checkpoints/megatron_llama_ft.nemo\" \\\n",
- " --path_to_save \"/workspace/4b_trimmed_model.nemo\" \\\n",
- " --tensor_model_parallel_size 8 \\\n",
- " --pipeline_model_parallel_size 1 \\\n",
- " --gpus_per_node 8 \\\n",
- " --drop_layers 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "1e9553db-9478-4074-9de1-1fa01a0e835c",
- "metadata": {},
- "source": [
- "Running this script will save the depth-pruned model `4b_trimmed_model.nemo` to your workspace."
- ]
- },
- {
- "cell_type": "markdown",
- "id": "b8ada696-5d77-4113-9d15-a603113fdd58",
- "metadata": {},
- "source": [
- "\n",
- "### Step 4: Distill knowledge from teacher into student\n",
- "\n",
- "Distillation of a model with NeMo Framework is also possible using a python script: [megatron_gpt_distillation.py](https://github.com/NVIDIA/NeMo/blob/main/examples/nlp/language_modeling/megatron_gpt_distillation.py). \n",
- "\n",
- "For this demonstration, the `TEACHER` would be the finetuned teacher model `megatron_llama_ft.nemo` and the `STUDENT` model would be the pruned 4B model `4b_trimmed_model.nemo`. This training run is capped by `STEPS`, and validation is carried out every `VAL_INTERVAL` steps.\n",
- "\n",
- "> `NOTE:` In the block of code below, pass the paths to your pre-processed train, test and validation data files as well as path to the teacher and student .nemo models."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "61c0c69d-9401-4355-8725-78aa72eee8da",
- "metadata": {
- "scrolled": true,
- "tags": []
- },
- "outputs": [],
- "source": [
- "%%bash \n",
- "\n",
- "export CUDA_DEVICE_MAX_CONNECTIONS=1\n",
- "\n",
- "\n",
- "# Can change these to accommodate resources:\n",
- "\n",
- "TENSOR_PARALLEL_SIZE=8\n",
- "NODES=1\n",
- "MICRO_BATCH_SIZE=4\n",
- "\n",
- "# Don't change the following:\n",
- "\n",
- "EXPERIMENT_DIR=\"distill_trainings\"\n",
- "EXPERIMENT_NAME=\"megatron_llama_distill\"\n",
- "\n",
- "TEACHER=\"${EXPERIMENT_DIR}/megatron_llama_ft/checkpoints/megatron_llama_ft.nemo\"\n",
- "STUDENT=\"/workspace/4b_trimmed_model.nemo\"\n",
- "\n",
- "FINAL_MODEL_PATH=\"${EXPERIMENT_DIR}/${EXPERIMENT_NAME}/checkpoints/distilled_4b_model.nemo\"\n",
- "\n",
- "DATA_TRAIN='wikitext_tokenized_train_text_document'\n",
- "DATA_VAL='wikitext_tokenized_test_text_document'\n",
- "DATA_TEST='wikitext_tokenized_val_text_document'\n",
- "\n",
- "STEPS=30\n",
- "GLOBAL_BATCH_SIZE=128\n",
- "\n",
- "LOG_INTERVAL=1\n",
- "VAL_INTERVAL=10\n",
- "NUM_VAL_BATCHES=5\n",
- "\n",
- "LR=1e-4\n",
- "MIN_LR=1e-5\n",
- "WARMUP_STEPS=2\n",
- "\n",
- "\n",
- "cmd=\"torchrun --nproc-per-node=${TENSOR_PARALLEL_SIZE}\"\n",
- "\n",
- "${cmd} /opt/NeMo/examples/nlp/language_modeling/megatron_gpt_distillation.py \\\n",
- " name=${EXPERIMENT_NAME} \\\n",
- " \\\n",
- " exp_manager.exp_dir=${EXPERIMENT_DIR} \\\n",
- " exp_manager.checkpoint_callback_params.save_top_k=1 \\\n",
- " \\\n",
- " trainer.max_steps=${STEPS} \\\n",
- " trainer.log_every_n_steps=${LOG_INTERVAL} \\\n",
- " trainer.val_check_interval=${VAL_INTERVAL} \\\n",
- " trainer.limit_val_batches=${NUM_VAL_BATCHES} \\\n",
- " +trainer.num_sanity_val_steps=0 \\\n",
- " \\\n",
- " trainer.precision=bf16 \\\n",
- " trainer.devices=${TENSOR_PARALLEL_SIZE} \\\n",
- " trainer.num_nodes=${NODES} \\\n",
- " \\\n",
- " \"model.data.data_prefix={train:[1.0,$DATA_TRAIN],validation:[$DATA_VAL],test:[$DATA_TEST]}\" \\\n",
- " \\\n",
- " model.restore_from_path=${STUDENT} \\\n",
- " model.kd_teacher_restore_from_path=${TEACHER} \\\n",
- " model.nemo_path=${FINAL_MODEL_PATH} \\\n",
- " \\\n",
- " model.tensor_model_parallel_size=${TENSOR_PARALLEL_SIZE} \\\n",
- " model.sequence_parallel=True \\\n",
- " model.micro_batch_size=${MICRO_BATCH_SIZE} \\\n",
- " model.global_batch_size=${GLOBAL_BATCH_SIZE} \\\n",
- " \\\n",
- " model.optim.name=distributed_fused_adam \\\n",
- " model.optim.lr=${LR} \\\n",
- " model.optim.sched.min_lr=${MIN_LR} \\\n",
- " model.optim.sched.warmup_steps=${WARMUP_STEPS}\n"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "fe7034ba-8c69-4edb-8c0f-84fdca43c152",
- "metadata": {},
- "source": [
- "This will create the final distilled model named `distilled_4b_model.nemo` in `./distill_trainings/megatron_llama_distill/checkpoints`.\n",
- "> `NOTE:`This script takes at least 35 minutes to run and generate the final distilled model."
- ]
- },
- {
- "cell_type": "markdown",
- "id": "c9a66d44-5028-47f9-9df3-9f07692e9461",
- "metadata": {},
- "source": [
- "### Step 5: Display the validation loss\n",
- "\n",
- "Now that the results are in, let's visualize the validation loss of the distilled model using the `tensorboard` library. \n",
- "> `NOTE:` This notebook demonstrates the use of the teacher finetuning, pruning and the distillation script. These scripts should ideally be run on a multi-node cluster with a larger `GLOBAL_BATCH_SIZE` and `STEPS` to see improvement in the validation loss."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "be4da14c-c03f-4c28-accd-8f676dbef8a9",
- "metadata": {},
- "outputs": [],
- "source": [
- "%load_ext tensorboard\n",
- "%tensorboard --logdir \"distill_trainings/megatron_llama_distill/\" --port=6007"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "08c63b80-0f24-4dde-b5d6-11db444726ed",
- "metadata": {},
- "source": [
- "Here is an image of the validation loss over 30 steps of running the training step in the distillation script."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "id": "648424fc-6a51-43ca-8f19-6ad05f949054",
- "metadata": {
- "tags": []
- },
- "outputs": [
- {
- "data": {
- "text/html": [
- ""
- ],
- "text/plain": [
- ""
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- }
- ],
- "source": [
- "from IPython.display import Image, display\n",
- "display(Image(url=\"https://github.com/NVIDIA/NeMo/releases/download/r2.0.0rc1/val_loss_distillation.png\", width=400))"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3 (ipykernel)",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.10.12"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
diff --git a/tutorials/nlp/ITN_with_Thutmose_Tagger.ipynb b/tutorials/nlp/ITN_with_Thutmose_Tagger.ipynb
index 6204bf2516bb..b028b2d5c190 100644
--- a/tutorials/nlp/ITN_with_Thutmose_Tagger.ipynb
+++ b/tutorials/nlp/ITN_with_Thutmose_Tagger.ipynb
@@ -249,7 +249,7 @@
"\n",
"import wget \n",
"import torch\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf\n",
"import pandas as pd"
]
diff --git a/tutorials/nlp/Punctuation_and_Capitalization.ipynb b/tutorials/nlp/Punctuation_and_Capitalization.ipynb
index f88c33fada34..cbdab3941b6f 100644
--- a/tutorials/nlp/Punctuation_and_Capitalization.ipynb
+++ b/tutorials/nlp/Punctuation_and_Capitalization.ipynb
@@ -72,7 +72,7 @@
"import os\n",
"import wget \n",
"import torch\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf"
]
},
diff --git a/tutorials/nlp/Punctuation_and_Capitalization_Lexical_Audio.ipynb b/tutorials/nlp/Punctuation_and_Capitalization_Lexical_Audio.ipynb
index 2afbb19c0e66..51d3a66c91fc 100644
--- a/tutorials/nlp/Punctuation_and_Capitalization_Lexical_Audio.ipynb
+++ b/tutorials/nlp/Punctuation_and_Capitalization_Lexical_Audio.ipynb
@@ -74,7 +74,7 @@
"import os\n",
"import wget\n",
"import torch\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf"
]
},
diff --git a/tutorials/nlp/Relation_Extraction-BioMegatron.ipynb b/tutorials/nlp/Relation_Extraction-BioMegatron.ipynb
index d6b1e98b428e..3c9e427e7e09 100644
--- a/tutorials/nlp/Relation_Extraction-BioMegatron.ipynb
+++ b/tutorials/nlp/Relation_Extraction-BioMegatron.ipynb
@@ -71,7 +71,7 @@
"import os\n",
"import wget\n",
"import torch\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf"
]
},
diff --git a/tutorials/nlp/Text_Classification_Sentiment_Analysis.ipynb b/tutorials/nlp/Text_Classification_Sentiment_Analysis.ipynb
index fdcff979ea46..0ed846881d02 100644
--- a/tutorials/nlp/Text_Classification_Sentiment_Analysis.ipynb
+++ b/tutorials/nlp/Text_Classification_Sentiment_Analysis.ipynb
@@ -58,7 +58,7 @@
"import os\n",
"import wget \n",
"import torch\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf"
]
},
diff --git a/tutorials/nlp/Token_Classification-BioMegatron.ipynb b/tutorials/nlp/Token_Classification-BioMegatron.ipynb
index 85cb769b28c0..a59eae67dde1 100644
--- a/tutorials/nlp/Token_Classification-BioMegatron.ipynb
+++ b/tutorials/nlp/Token_Classification-BioMegatron.ipynb
@@ -45,7 +45,7 @@
"import os\n",
"import wget \n",
"import torch\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf"
]
},
diff --git a/tutorials/nlp/Token_Classification_Named_Entity_Recognition.ipynb b/tutorials/nlp/Token_Classification_Named_Entity_Recognition.ipynb
index 3ab98f6c19fd..4c34c293dcca 100644
--- a/tutorials/nlp/Token_Classification_Named_Entity_Recognition.ipynb
+++ b/tutorials/nlp/Token_Classification_Named_Entity_Recognition.ipynb
@@ -94,7 +94,7 @@
"import os\n",
"import wget \n",
"import torch\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf"
],
"execution_count": null,
diff --git a/tutorials/nlp/Zero_Shot_Intent_Recognition.ipynb b/tutorials/nlp/Zero_Shot_Intent_Recognition.ipynb
index 7f1baf536d87..b1eca63b8fd1 100644
--- a/tutorials/nlp/Zero_Shot_Intent_Recognition.ipynb
+++ b/tutorials/nlp/Zero_Shot_Intent_Recognition.ipynb
@@ -66,7 +66,7 @@
"from nemo.utils import logging\n",
"from omegaconf import OmegaConf\n",
"import pandas as pd\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"import torch\n",
"import wget "
]
diff --git a/tutorials/nlp/lora.ipynb b/tutorials/nlp/lora.ipynb
index c67fa6c2de15..0429dd7f053c 100644
--- a/tutorials/nlp/lora.ipynb
+++ b/tutorials/nlp/lora.ipynb
@@ -422,7 +422,7 @@
"source": [
"from nemo.collections.nlp.parts.nlp_overrides import NLPDDPStrategy\n",
"import torch\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"from nemo.collections.nlp.parts.megatron_trainer_builder import MegatronTrainerBuilder\n",
"\n",
"# let's modify some trainer configs\n",
diff --git a/tutorials/speaker_tasks/Speaker_Diarization_Inference.ipynb b/tutorials/speaker_tasks/Speaker_Diarization_Inference.ipynb
index 1fd0f1b140d5..439e28496715 100644
--- a/tutorials/speaker_tasks/Speaker_Diarization_Inference.ipynb
+++ b/tutorials/speaker_tasks/Speaker_Diarization_Inference.ipynb
@@ -21,6 +21,7 @@
"!pip install wget\n",
"!apt-get install sox libsndfile1 ffmpeg\n",
"!pip install text-unidecode\n",
+ "!pip install ipython\n",
"\n",
"# ## Install NeMo\n",
"BRANCH = 'main'\n",
diff --git a/tutorials/speaker_tasks/Speaker_Diarization_Training.ipynb b/tutorials/speaker_tasks/Speaker_Diarization_Training.ipynb
index 7db905b6d225..c193e6600666 100644
--- a/tutorials/speaker_tasks/Speaker_Diarization_Training.ipynb
+++ b/tutorials/speaker_tasks/Speaker_Diarization_Training.ipynb
@@ -777,7 +777,7 @@
"metadata": {},
"outputs": [],
"source": [
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"from nemo.collections.asr.models import EncDecDiarLabelModel\n",
"from nemo.utils.exp_manager import exp_manager\n",
"\n",
diff --git a/tutorials/speaker_tasks/Speaker_Identification_Verification.ipynb b/tutorials/speaker_tasks/Speaker_Identification_Verification.ipynb
index 27a01b894eae..c4f7fbaca67e 100644
--- a/tutorials/speaker_tasks/Speaker_Identification_Verification.ipynb
+++ b/tutorials/speaker_tasks/Speaker_Identification_Verification.ipynb
@@ -438,7 +438,7 @@
"outputs": [],
"source": [
"import torch\n",
- "import pytorch_lightning as pl"
+ "import lightning.pytorch as pl"
]
},
{
diff --git a/tutorials/tools/DefinedCrowd_x_NeMo_ASR_Training_Tutorial.ipynb b/tutorials/tools/DefinedCrowd_x_NeMo_ASR_Training_Tutorial.ipynb
index afd202f99d4a..8b0114690540 100644
--- a/tutorials/tools/DefinedCrowd_x_NeMo_ASR_Training_Tutorial.ipynb
+++ b/tutorials/tools/DefinedCrowd_x_NeMo_ASR_Training_Tutorial.ipynb
@@ -1636,7 +1636,7 @@
"outputId": "67209ee3-5161-40dc-a179-83d8219c3d71"
},
"source": [
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"from omegaconf import DictConfig\n",
"import copy\n",
"\n",
diff --git a/tutorials/tts/Tacotron2_Training.ipynb b/tutorials/tts/Tacotron2_Training.ipynb
index 79546bb79db9..edc814cf12ec 100644
--- a/tutorials/tts/Tacotron2_Training.ipynb
+++ b/tutorials/tts/Tacotron2_Training.ipynb
@@ -178,7 +178,7 @@
"Let's take a look at the tacotron2.py file\n",
"\n",
"```python\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"\n",
"from nemo.collections.common.callbacks import LogEpochTimeCallback\n",
"from nemo.collections.tts.models import Tacotron2Model\n",
diff --git a/tutorials/tts/Vits_Training.ipynb b/tutorials/tts/Vits_Training.ipynb
index 9d3919e8dc6a..060c6bda43bb 100644
--- a/tutorials/tts/Vits_Training.ipynb
+++ b/tutorials/tts/Vits_Training.ipynb
@@ -191,7 +191,7 @@
"Let's take a look at the vits.py file\n",
"\n",
"```python\n",
- "import pytorch_lightning as pl\n",
+ "import lightning.pytorch as pl\n",
"\n",
"from nemo.collections.tts.models.vits import VitsModel\n",
"from nemo.core.config import hydra_runner\n",