Upload 48 files
Browse files- .gitattributes +1 -0
- LICENSE +21 -0
- analysis/SA_Score/README.md +1 -0
- analysis/SA_Score/fpscores.pkl.gz +3 -0
- analysis/SA_Score/sascorer.py +173 -0
- analysis/docking.py +197 -0
- analysis/docking_py27.py +27 -0
- analysis/metrics.py +251 -0
- analysis/molecule_builder.py +250 -0
- analysis/visualization.py +472 -0
- colab/DiffSBDD.ipynb +468 -0
- configs/crossdock_ca_cond.yml +62 -0
- configs/crossdock_ca_joint.yml +62 -0
- configs/crossdock_fullatom_cond.yml +62 -0
- configs/crossdock_fullatom_joint.yml +62 -0
- configs/moad_ca_cond.yml +62 -0
- configs/moad_ca_joint.yml +62 -0
- configs/moad_fullatom_cond.yml +63 -0
- configs/moad_fullatom_joint.yml +63 -0
- constants.py +183 -0
- data/moad_test.txt +1 -0
- data/moad_train.txt +0 -0
- data/moad_val.txt +1 -0
- data/prepare_crossdocked.py +55 -0
- data/timesplit_no_lig_or_rec_overlap_train +14037 -0
- data/timesplit_no_lig_or_rec_overlap_val +1223 -0
- data/timesplit_test +363 -0
- dataset.py +70 -0
- environment.yaml +29 -0
- equivariant_diffusion/conditional_model.py +746 -0
- equivariant_diffusion/dynamics.py +187 -0
- equivariant_diffusion/egnn_new.py +335 -0
- equivariant_diffusion/en_diffusion.py +1190 -0
- example/3rfm.pdb +0 -0
- example/3rfm_B_CFF.sdf +77 -0
- example/5ndu.pdb +0 -0
- example/5ndu_C_8V2.sdf +236 -0
- example/fragments.sdf +41 -0
- generate_ligands.py +60 -0
- geometry_utils.py +141 -0
- img/overview.png +3 -0
- inpaint.py +230 -0
- lightning_modules.py +914 -0
- optimize.py +249 -0
- process_bindingmoad.py +652 -0
- process_crossdock.py +443 -0
- test.py +176 -0
- train.py +122 -0
- utils.py +234 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
img/overview.png filter=lfs diff=lfs merge=lfs -text
|
LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MIT License
|
| 2 |
+
|
| 3 |
+
Copyright (c) 2022 Arne Schneuing, Yuanqi Du, Charles Harris
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 21 |
+
SOFTWARE.
|
analysis/SA_Score/README.md
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
Files taken from: https://github.com/rdkit/rdkit/tree/master/Contrib/SA_Score
|
analysis/SA_Score/fpscores.pkl.gz
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:10dcef9340c873e7b987924461b0af5365eb8dd96be607203debe8ddf80c1e73
|
| 3 |
+
size 3848394
|
analysis/SA_Score/sascorer.py
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# calculation of synthetic accessibility score as described in:
|
| 3 |
+
#
|
| 4 |
+
# Estimation of Synthetic Accessibility Score of Drug-like Molecules based on Molecular Complexity and Fragment Contributions
|
| 5 |
+
# Peter Ertl and Ansgar Schuffenhauer
|
| 6 |
+
# Journal of Cheminformatics 1:8 (2009)
|
| 7 |
+
# http://www.jcheminf.com/content/1/1/8
|
| 8 |
+
#
|
| 9 |
+
# several small modifications to the original paper are included
|
| 10 |
+
# particularly slightly different formula for marocyclic penalty
|
| 11 |
+
# and taking into account also molecule symmetry (fingerprint density)
|
| 12 |
+
#
|
| 13 |
+
# for a set of 10k diverse molecules the agreement between the original method
|
| 14 |
+
# as implemented in PipelinePilot and this implementation is r2 = 0.97
|
| 15 |
+
#
|
| 16 |
+
# peter ertl & greg landrum, september 2013
|
| 17 |
+
#
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
from rdkit import Chem
|
| 21 |
+
from rdkit.Chem import rdMolDescriptors
|
| 22 |
+
import pickle
|
| 23 |
+
|
| 24 |
+
import math
|
| 25 |
+
from collections import defaultdict
|
| 26 |
+
|
| 27 |
+
import os.path as op
|
| 28 |
+
|
| 29 |
+
_fscores = None
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def readFragmentScores(name='fpscores'):
|
| 33 |
+
import gzip
|
| 34 |
+
global _fscores
|
| 35 |
+
# generate the full path filename:
|
| 36 |
+
if name == "fpscores":
|
| 37 |
+
name = op.join(op.dirname(__file__), name)
|
| 38 |
+
data = pickle.load(gzip.open('%s.pkl.gz' % name))
|
| 39 |
+
outDict = {}
|
| 40 |
+
for i in data:
|
| 41 |
+
for j in range(1, len(i)):
|
| 42 |
+
outDict[i[j]] = float(i[0])
|
| 43 |
+
_fscores = outDict
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def numBridgeheadsAndSpiro(mol, ri=None):
|
| 47 |
+
nSpiro = rdMolDescriptors.CalcNumSpiroAtoms(mol)
|
| 48 |
+
nBridgehead = rdMolDescriptors.CalcNumBridgeheadAtoms(mol)
|
| 49 |
+
return nBridgehead, nSpiro
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def calculateScore(m):
|
| 53 |
+
if _fscores is None:
|
| 54 |
+
readFragmentScores()
|
| 55 |
+
|
| 56 |
+
# fragment score
|
| 57 |
+
fp = rdMolDescriptors.GetMorganFingerprint(m,
|
| 58 |
+
2) # <- 2 is the *radius* of the circular fingerprint
|
| 59 |
+
fps = fp.GetNonzeroElements()
|
| 60 |
+
score1 = 0.
|
| 61 |
+
nf = 0
|
| 62 |
+
for bitId, v in fps.items():
|
| 63 |
+
nf += v
|
| 64 |
+
sfp = bitId
|
| 65 |
+
score1 += _fscores.get(sfp, -4) * v
|
| 66 |
+
score1 /= nf
|
| 67 |
+
|
| 68 |
+
# features score
|
| 69 |
+
nAtoms = m.GetNumAtoms()
|
| 70 |
+
nChiralCenters = len(Chem.FindMolChiralCenters(m, includeUnassigned=True))
|
| 71 |
+
ri = m.GetRingInfo()
|
| 72 |
+
nBridgeheads, nSpiro = numBridgeheadsAndSpiro(m, ri)
|
| 73 |
+
nMacrocycles = 0
|
| 74 |
+
for x in ri.AtomRings():
|
| 75 |
+
if len(x) > 8:
|
| 76 |
+
nMacrocycles += 1
|
| 77 |
+
|
| 78 |
+
sizePenalty = nAtoms**1.005 - nAtoms
|
| 79 |
+
stereoPenalty = math.log10(nChiralCenters + 1)
|
| 80 |
+
spiroPenalty = math.log10(nSpiro + 1)
|
| 81 |
+
bridgePenalty = math.log10(nBridgeheads + 1)
|
| 82 |
+
macrocyclePenalty = 0.
|
| 83 |
+
# ---------------------------------------
|
| 84 |
+
# This differs from the paper, which defines:
|
| 85 |
+
# macrocyclePenalty = math.log10(nMacrocycles+1)
|
| 86 |
+
# This form generates better results when 2 or more macrocycles are present
|
| 87 |
+
if nMacrocycles > 0:
|
| 88 |
+
macrocyclePenalty = math.log10(2)
|
| 89 |
+
|
| 90 |
+
score2 = 0. - sizePenalty - stereoPenalty - spiroPenalty - bridgePenalty - macrocyclePenalty
|
| 91 |
+
|
| 92 |
+
# correction for the fingerprint density
|
| 93 |
+
# not in the original publication, added in version 1.1
|
| 94 |
+
# to make highly symmetrical molecules easier to synthetise
|
| 95 |
+
score3 = 0.
|
| 96 |
+
if nAtoms > len(fps):
|
| 97 |
+
score3 = math.log(float(nAtoms) / len(fps)) * .5
|
| 98 |
+
|
| 99 |
+
sascore = score1 + score2 + score3
|
| 100 |
+
|
| 101 |
+
# need to transform "raw" value into scale between 1 and 10
|
| 102 |
+
min = -4.0
|
| 103 |
+
max = 2.5
|
| 104 |
+
sascore = 11. - (sascore - min + 1) / (max - min) * 9.
|
| 105 |
+
# smooth the 10-end
|
| 106 |
+
if sascore > 8.:
|
| 107 |
+
sascore = 8. + math.log(sascore + 1. - 9.)
|
| 108 |
+
if sascore > 10.:
|
| 109 |
+
sascore = 10.0
|
| 110 |
+
elif sascore < 1.:
|
| 111 |
+
sascore = 1.0
|
| 112 |
+
|
| 113 |
+
return sascore
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def processMols(mols):
|
| 117 |
+
print('smiles\tName\tsa_score')
|
| 118 |
+
for i, m in enumerate(mols):
|
| 119 |
+
if m is None:
|
| 120 |
+
continue
|
| 121 |
+
|
| 122 |
+
s = calculateScore(m)
|
| 123 |
+
|
| 124 |
+
smiles = Chem.MolToSmiles(m)
|
| 125 |
+
print(smiles + "\t" + m.GetProp('_Name') + "\t%3f" % s)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
if __name__ == '__main__':
|
| 129 |
+
import sys
|
| 130 |
+
import time
|
| 131 |
+
|
| 132 |
+
t1 = time.time()
|
| 133 |
+
readFragmentScores("fpscores")
|
| 134 |
+
t2 = time.time()
|
| 135 |
+
|
| 136 |
+
suppl = Chem.SmilesMolSupplier(sys.argv[1])
|
| 137 |
+
t3 = time.time()
|
| 138 |
+
processMols(suppl)
|
| 139 |
+
t4 = time.time()
|
| 140 |
+
|
| 141 |
+
print('Reading took %.2f seconds. Calculating took %.2f seconds' % ((t2 - t1), (t4 - t3)),
|
| 142 |
+
file=sys.stderr)
|
| 143 |
+
|
| 144 |
+
#
|
| 145 |
+
# Copyright (c) 2013, Novartis Institutes for BioMedical Research Inc.
|
| 146 |
+
# All rights reserved.
|
| 147 |
+
#
|
| 148 |
+
# Redistribution and use in source and binary forms, with or without
|
| 149 |
+
# modification, are permitted provided that the following conditions are
|
| 150 |
+
# met:
|
| 151 |
+
#
|
| 152 |
+
# * Redistributions of source code must retain the above copyright
|
| 153 |
+
# notice, this list of conditions and the following disclaimer.
|
| 154 |
+
# * Redistributions in binary form must reproduce the above
|
| 155 |
+
# copyright notice, this list of conditions and the following
|
| 156 |
+
# disclaimer in the documentation and/or other materials provided
|
| 157 |
+
# with the distribution.
|
| 158 |
+
# * Neither the name of Novartis Institutes for BioMedical Research Inc.
|
| 159 |
+
# nor the names of its contributors may be used to endorse or promote
|
| 160 |
+
# products derived from this software without specific prior written permission.
|
| 161 |
+
#
|
| 162 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 163 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 164 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 165 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 166 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 167 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 168 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 169 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 170 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 171 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 172 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 173 |
+
#
|
analysis/docking.py
ADDED
|
@@ -0,0 +1,197 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import re
|
| 3 |
+
import tempfile
|
| 4 |
+
import numpy as np
|
| 5 |
+
import torch
|
| 6 |
+
from pathlib import Path
|
| 7 |
+
import argparse
|
| 8 |
+
import pandas as pd
|
| 9 |
+
from rdkit import Chem
|
| 10 |
+
from tqdm import tqdm
|
| 11 |
+
|
| 12 |
+
try:
|
| 13 |
+
import utils
|
| 14 |
+
except ModuleNotFoundError as e:
|
| 15 |
+
print(e)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def calculate_smina_score(pdb_file, sdf_file):
|
| 19 |
+
# add '-o <name>_smina.sdf' if you want to see the output
|
| 20 |
+
out = os.popen(f'smina.static -l {sdf_file} -r {pdb_file} '
|
| 21 |
+
f'--score_only').read()
|
| 22 |
+
matches = re.findall(
|
| 23 |
+
r"Affinity:[ ]+([+-]?[0-9]*[.]?[0-9]+)[ ]+\(kcal/mol\)", out)
|
| 24 |
+
return [float(x) for x in matches]
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def smina_score(rdmols, receptor_file):
|
| 28 |
+
"""
|
| 29 |
+
Calculate smina score
|
| 30 |
+
:param rdmols: List of RDKit molecules
|
| 31 |
+
:param receptor_file: Receptor pdb/pdbqt file or list of receptor files
|
| 32 |
+
:return: Smina score for each input molecule (list)
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
if isinstance(receptor_file, list):
|
| 36 |
+
scores = []
|
| 37 |
+
for mol, rec_file in zip(rdmols, receptor_file):
|
| 38 |
+
with tempfile.NamedTemporaryFile(suffix='.sdf') as tmp:
|
| 39 |
+
tmp_file = tmp.name
|
| 40 |
+
utils.write_sdf_file(tmp_file, [mol])
|
| 41 |
+
scores.extend(calculate_smina_score(rec_file, tmp_file))
|
| 42 |
+
|
| 43 |
+
# Use same receptor file for all molecules
|
| 44 |
+
else:
|
| 45 |
+
with tempfile.NamedTemporaryFile(suffix='.sdf') as tmp:
|
| 46 |
+
tmp_file = tmp.name
|
| 47 |
+
utils.write_sdf_file(tmp_file, rdmols)
|
| 48 |
+
scores = calculate_smina_score(receptor_file, tmp_file)
|
| 49 |
+
|
| 50 |
+
return scores
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def sdf_to_pdbqt(sdf_file, pdbqt_outfile, mol_id):
|
| 54 |
+
os.popen(f'obabel {sdf_file} -O {pdbqt_outfile} '
|
| 55 |
+
f'-f {mol_id + 1} -l {mol_id + 1}').read()
|
| 56 |
+
return pdbqt_outfile
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def calculate_qvina2_score(receptor_file, sdf_file, out_dir, size=20,
|
| 60 |
+
exhaustiveness=16, return_rdmol=False):
|
| 61 |
+
|
| 62 |
+
receptor_file = Path(receptor_file)
|
| 63 |
+
sdf_file = Path(sdf_file)
|
| 64 |
+
|
| 65 |
+
if receptor_file.suffix == '.pdb':
|
| 66 |
+
# prepare receptor, requires Python 2.7
|
| 67 |
+
receptor_pdbqt_file = Path(out_dir, receptor_file.stem + '.pdbqt')
|
| 68 |
+
os.popen(f'prepare_receptor4.py -r {receptor_file} -O {receptor_pdbqt_file}')
|
| 69 |
+
else:
|
| 70 |
+
receptor_pdbqt_file = receptor_file
|
| 71 |
+
|
| 72 |
+
scores = []
|
| 73 |
+
rdmols = [] # for if return rdmols
|
| 74 |
+
suppl = Chem.SDMolSupplier(str(sdf_file), sanitize=False)
|
| 75 |
+
for i, mol in enumerate(suppl): # sdf file may contain several ligands
|
| 76 |
+
ligand_name = f'{sdf_file.stem}_{i}'
|
| 77 |
+
# prepare ligand
|
| 78 |
+
ligand_pdbqt_file = Path(out_dir, ligand_name + '.pdbqt')
|
| 79 |
+
out_sdf_file = Path(out_dir, ligand_name + '_out.sdf')
|
| 80 |
+
|
| 81 |
+
if out_sdf_file.exists():
|
| 82 |
+
with open(out_sdf_file, 'r') as f:
|
| 83 |
+
scores.append(
|
| 84 |
+
min([float(x.split()[2]) for x in f.readlines()
|
| 85 |
+
if x.startswith(' VINA RESULT:')])
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
else:
|
| 89 |
+
sdf_to_pdbqt(sdf_file, ligand_pdbqt_file, i)
|
| 90 |
+
|
| 91 |
+
# center box at ligand's center of mass
|
| 92 |
+
cx, cy, cz = mol.GetConformer().GetPositions().mean(0)
|
| 93 |
+
|
| 94 |
+
# run QuickVina 2
|
| 95 |
+
out = os.popen(
|
| 96 |
+
f'qvina2.1 --receptor {receptor_pdbqt_file} '
|
| 97 |
+
f'--ligand {ligand_pdbqt_file} '
|
| 98 |
+
f'--center_x {cx:.4f} --center_y {cy:.4f} --center_z {cz:.4f} '
|
| 99 |
+
f'--size_x {size} --size_y {size} --size_z {size} '
|
| 100 |
+
f'--exhaustiveness {exhaustiveness}'
|
| 101 |
+
).read()
|
| 102 |
+
|
| 103 |
+
# clean up
|
| 104 |
+
ligand_pdbqt_file.unlink()
|
| 105 |
+
|
| 106 |
+
if '-----+------------+----------+----------' not in out:
|
| 107 |
+
scores.append(np.nan)
|
| 108 |
+
continue
|
| 109 |
+
|
| 110 |
+
out_split = out.splitlines()
|
| 111 |
+
best_idx = out_split.index('-----+------------+----------+----------') + 1
|
| 112 |
+
best_line = out_split[best_idx].split()
|
| 113 |
+
assert best_line[0] == '1'
|
| 114 |
+
scores.append(float(best_line[1]))
|
| 115 |
+
|
| 116 |
+
out_pdbqt_file = Path(out_dir, ligand_name + '_out.pdbqt')
|
| 117 |
+
if out_pdbqt_file.exists():
|
| 118 |
+
os.popen(f'obabel {out_pdbqt_file} -O {out_sdf_file}').read()
|
| 119 |
+
|
| 120 |
+
# clean up
|
| 121 |
+
out_pdbqt_file.unlink()
|
| 122 |
+
|
| 123 |
+
if return_rdmol:
|
| 124 |
+
rdmol = Chem.SDMolSupplier(str(out_sdf_file))[0]
|
| 125 |
+
rdmols.append(rdmol)
|
| 126 |
+
|
| 127 |
+
if return_rdmol:
|
| 128 |
+
return scores, rdmols
|
| 129 |
+
else:
|
| 130 |
+
return scores
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
if __name__ == '__main__':
|
| 134 |
+
parser = argparse.ArgumentParser('QuickVina evaluation')
|
| 135 |
+
parser.add_argument('--pdbqt_dir', type=Path,
|
| 136 |
+
help='Receptor files in pdbqt format')
|
| 137 |
+
parser.add_argument('--sdf_dir', type=Path, default=None,
|
| 138 |
+
help='Ligand files in sdf format')
|
| 139 |
+
parser.add_argument('--sdf_files', type=Path, nargs='+', default=None)
|
| 140 |
+
parser.add_argument('--out_dir', type=Path)
|
| 141 |
+
parser.add_argument('--write_csv', action='store_true')
|
| 142 |
+
parser.add_argument('--write_dict', action='store_true')
|
| 143 |
+
parser.add_argument('--dataset', type=str, default='moad')
|
| 144 |
+
args = parser.parse_args()
|
| 145 |
+
|
| 146 |
+
assert (args.sdf_dir is not None) ^ (args.sdf_files is not None)
|
| 147 |
+
|
| 148 |
+
args.out_dir.mkdir(exist_ok=True)
|
| 149 |
+
|
| 150 |
+
results = {'receptor': [], 'ligand': [], 'scores': []}
|
| 151 |
+
results_dict = {}
|
| 152 |
+
sdf_files = list(args.sdf_dir.glob('[!.]*.sdf')) \
|
| 153 |
+
if args.sdf_dir is not None else args.sdf_files
|
| 154 |
+
pbar = tqdm(sdf_files)
|
| 155 |
+
for sdf_file in pbar:
|
| 156 |
+
pbar.set_description(f'Processing {sdf_file.name}')
|
| 157 |
+
|
| 158 |
+
if args.dataset == 'moad':
|
| 159 |
+
"""
|
| 160 |
+
Ligand file names should be of the following form:
|
| 161 |
+
<receptor-name>_<pocket-id>_<some-suffix>.sdf
|
| 162 |
+
where <receptor-name> and <pocket-id> cannot contain any
|
| 163 |
+
underscores, e.g.: 1abc-bio1_pocket0_gen.sdf
|
| 164 |
+
"""
|
| 165 |
+
ligand_name = sdf_file.stem
|
| 166 |
+
receptor_name, pocket_id, *suffix = ligand_name.split('_')
|
| 167 |
+
suffix = '_'.join(suffix)
|
| 168 |
+
receptor_file = Path(args.pdbqt_dir, receptor_name + '.pdbqt')
|
| 169 |
+
elif args.dataset == 'crossdocked':
|
| 170 |
+
ligand_name = sdf_file.stem
|
| 171 |
+
receptor_name = ligand_name[:-4]
|
| 172 |
+
receptor_file = Path(args.pdbqt_dir, receptor_name + '.pdbqt')
|
| 173 |
+
|
| 174 |
+
# try:
|
| 175 |
+
scores, rdmols = calculate_qvina2_score(
|
| 176 |
+
receptor_file, sdf_file, args.out_dir, return_rdmol=True)
|
| 177 |
+
# except AttributeError as e:
|
| 178 |
+
# print(e)
|
| 179 |
+
# continue
|
| 180 |
+
results['receptor'].append(str(receptor_file))
|
| 181 |
+
results['ligand'].append(str(sdf_file))
|
| 182 |
+
results['scores'].append(scores)
|
| 183 |
+
|
| 184 |
+
if args.write_dict:
|
| 185 |
+
results_dict[ligand_name] = {
|
| 186 |
+
'receptor': str(receptor_file),
|
| 187 |
+
'ligand': str(sdf_file),
|
| 188 |
+
'scores': scores,
|
| 189 |
+
'rmdols': rdmols
|
| 190 |
+
}
|
| 191 |
+
|
| 192 |
+
if args.write_csv:
|
| 193 |
+
df = pd.DataFrame.from_dict(results)
|
| 194 |
+
df.to_csv(Path(args.out_dir, 'qvina2_scores.csv'))
|
| 195 |
+
|
| 196 |
+
if args.write_dict:
|
| 197 |
+
torch.save(results_dict, Path(args.out_dir, 'qvina2_scores.pt'))
|
analysis/docking_py27.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
import glob
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def pdbs_to_pdbqts(pdb_dir, pdbqt_dir, dataset):
|
| 7 |
+
for file in glob.glob(os.path.join(pdb_dir, '*.pdb')):
|
| 8 |
+
name = os.path.splitext(os.path.basename(file))[0]
|
| 9 |
+
outfile = os.path.join(pdbqt_dir, name + '.pdbqt')
|
| 10 |
+
pdb_to_pdbqt(file, outfile, dataset)
|
| 11 |
+
print('Wrote converted file to {}'.format(outfile))
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def pdb_to_pdbqt(pdb_file, pdbqt_file, dataset):
|
| 15 |
+
if os.path.exists(pdbqt_file):
|
| 16 |
+
return pdbqt_file
|
| 17 |
+
if dataset == 'crossdocked':
|
| 18 |
+
os.system('prepare_receptor4.py -r {} -o {}'.format(pdb_file, pdbqt_file))
|
| 19 |
+
elif dataset == 'bindingmoad':
|
| 20 |
+
os.system('prepare_receptor4.py -r {} -o {} -A checkhydrogens -e'.format(pdb_file, pdbqt_file))
|
| 21 |
+
else:
|
| 22 |
+
raise NotImplementedError
|
| 23 |
+
return pdbqt_file
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
if __name__ == '__main__':
|
| 27 |
+
pdbs_to_pdbqts(sys.argv[1], sys.argv[2], sys.argv[3])
|
analysis/metrics.py
ADDED
|
@@ -0,0 +1,251 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
from tqdm import tqdm
|
| 3 |
+
from rdkit import Chem, DataStructs
|
| 4 |
+
from rdkit.Chem import Descriptors, Crippen, Lipinski, QED
|
| 5 |
+
from analysis.SA_Score.sascorer import calculateScore
|
| 6 |
+
|
| 7 |
+
from analysis.molecule_builder import build_molecule
|
| 8 |
+
from copy import deepcopy
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class CategoricalDistribution:
|
| 12 |
+
EPS = 1e-10
|
| 13 |
+
|
| 14 |
+
def __init__(self, histogram_dict, mapping):
|
| 15 |
+
histogram = np.zeros(len(mapping))
|
| 16 |
+
for k, v in histogram_dict.items():
|
| 17 |
+
histogram[mapping[k]] = v
|
| 18 |
+
|
| 19 |
+
# Normalize histogram
|
| 20 |
+
self.p = histogram / histogram.sum()
|
| 21 |
+
self.mapping = deepcopy(mapping)
|
| 22 |
+
|
| 23 |
+
def kl_divergence(self, other_sample):
|
| 24 |
+
sample_histogram = np.zeros(len(self.mapping))
|
| 25 |
+
for x in other_sample:
|
| 26 |
+
# sample_histogram[self.mapping[x]] += 1
|
| 27 |
+
sample_histogram[x] += 1
|
| 28 |
+
|
| 29 |
+
# Normalize
|
| 30 |
+
q = sample_histogram / sample_histogram.sum()
|
| 31 |
+
|
| 32 |
+
return -np.sum(self.p * np.log(q / self.p + self.EPS))
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def rdmol_to_smiles(rdmol):
|
| 36 |
+
mol = Chem.Mol(rdmol)
|
| 37 |
+
Chem.RemoveStereochemistry(mol)
|
| 38 |
+
mol = Chem.RemoveHs(mol)
|
| 39 |
+
return Chem.MolToSmiles(mol)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class BasicMolecularMetrics(object):
|
| 43 |
+
def __init__(self, dataset_info, dataset_smiles_list=None,
|
| 44 |
+
connectivity_thresh=1.0):
|
| 45 |
+
self.atom_decoder = dataset_info['atom_decoder']
|
| 46 |
+
if dataset_smiles_list is not None:
|
| 47 |
+
dataset_smiles_list = set(dataset_smiles_list)
|
| 48 |
+
self.dataset_smiles_list = dataset_smiles_list
|
| 49 |
+
self.dataset_info = dataset_info
|
| 50 |
+
self.connectivity_thresh = connectivity_thresh
|
| 51 |
+
|
| 52 |
+
def compute_validity(self, generated):
|
| 53 |
+
""" generated: list of couples (positions, atom_types)"""
|
| 54 |
+
if len(generated) < 1:
|
| 55 |
+
return [], 0.0
|
| 56 |
+
|
| 57 |
+
valid = []
|
| 58 |
+
for mol in generated:
|
| 59 |
+
try:
|
| 60 |
+
Chem.SanitizeMol(mol)
|
| 61 |
+
except ValueError:
|
| 62 |
+
continue
|
| 63 |
+
|
| 64 |
+
valid.append(mol)
|
| 65 |
+
|
| 66 |
+
return valid, len(valid) / len(generated)
|
| 67 |
+
|
| 68 |
+
def compute_connectivity(self, valid):
|
| 69 |
+
""" Consider molecule connected if its largest fragment contains at
|
| 70 |
+
least x% of all atoms, where x is determined by
|
| 71 |
+
self.connectivity_thresh (defaults to 100%). """
|
| 72 |
+
if len(valid) < 1:
|
| 73 |
+
return [], 0.0
|
| 74 |
+
|
| 75 |
+
connected = []
|
| 76 |
+
connected_smiles = []
|
| 77 |
+
for mol in valid:
|
| 78 |
+
mol_frags = Chem.rdmolops.GetMolFrags(mol, asMols=True)
|
| 79 |
+
largest_mol = \
|
| 80 |
+
max(mol_frags, default=mol, key=lambda m: m.GetNumAtoms())
|
| 81 |
+
if largest_mol.GetNumAtoms() / mol.GetNumAtoms() >= self.connectivity_thresh:
|
| 82 |
+
smiles = rdmol_to_smiles(largest_mol)
|
| 83 |
+
if smiles is not None:
|
| 84 |
+
connected_smiles.append(smiles)
|
| 85 |
+
connected.append(largest_mol)
|
| 86 |
+
|
| 87 |
+
return connected, len(connected_smiles) / len(valid), connected_smiles
|
| 88 |
+
|
| 89 |
+
def compute_uniqueness(self, connected):
|
| 90 |
+
""" valid: list of SMILES strings."""
|
| 91 |
+
if len(connected) < 1 or self.dataset_smiles_list is None:
|
| 92 |
+
return [], 0.0
|
| 93 |
+
|
| 94 |
+
return list(set(connected)), len(set(connected)) / len(connected)
|
| 95 |
+
|
| 96 |
+
def compute_novelty(self, unique):
|
| 97 |
+
if len(unique) < 1:
|
| 98 |
+
return [], 0.0
|
| 99 |
+
|
| 100 |
+
num_novel = 0
|
| 101 |
+
novel = []
|
| 102 |
+
for smiles in unique:
|
| 103 |
+
if smiles not in self.dataset_smiles_list:
|
| 104 |
+
novel.append(smiles)
|
| 105 |
+
num_novel += 1
|
| 106 |
+
return novel, num_novel / len(unique)
|
| 107 |
+
|
| 108 |
+
def evaluate_rdmols(self, rdmols):
|
| 109 |
+
valid, validity = self.compute_validity(rdmols)
|
| 110 |
+
print(f"Validity over {len(rdmols)} molecules: {validity * 100 :.2f}%")
|
| 111 |
+
|
| 112 |
+
connected, connectivity, connected_smiles = \
|
| 113 |
+
self.compute_connectivity(valid)
|
| 114 |
+
print(f"Connectivity over {len(valid)} valid molecules: "
|
| 115 |
+
f"{connectivity * 100 :.2f}%")
|
| 116 |
+
|
| 117 |
+
unique, uniqueness = self.compute_uniqueness(connected_smiles)
|
| 118 |
+
print(f"Uniqueness over {len(connected)} connected molecules: "
|
| 119 |
+
f"{uniqueness * 100 :.2f}%")
|
| 120 |
+
|
| 121 |
+
_, novelty = self.compute_novelty(unique)
|
| 122 |
+
print(f"Novelty over {len(unique)} unique connected molecules: "
|
| 123 |
+
f"{novelty * 100 :.2f}%")
|
| 124 |
+
|
| 125 |
+
return [validity, connectivity, uniqueness, novelty], [valid, connected]
|
| 126 |
+
|
| 127 |
+
def evaluate(self, generated):
|
| 128 |
+
""" generated: list of pairs (positions: n x 3, atom_types: n [int])
|
| 129 |
+
the positions and atom types should already be masked. """
|
| 130 |
+
|
| 131 |
+
rdmols = [build_molecule(*graph, self.dataset_info)
|
| 132 |
+
for graph in generated]
|
| 133 |
+
return self.evaluate_rdmols(rdmols)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
class MoleculeProperties:
|
| 137 |
+
|
| 138 |
+
@staticmethod
|
| 139 |
+
def calculate_qed(rdmol):
|
| 140 |
+
return QED.qed(rdmol)
|
| 141 |
+
|
| 142 |
+
@staticmethod
|
| 143 |
+
def calculate_sa(rdmol):
|
| 144 |
+
sa = calculateScore(rdmol)
|
| 145 |
+
return round((10 - sa) / 9, 2) # from pocket2mol
|
| 146 |
+
|
| 147 |
+
@staticmethod
|
| 148 |
+
def calculate_logp(rdmol):
|
| 149 |
+
return Crippen.MolLogP(rdmol)
|
| 150 |
+
|
| 151 |
+
@staticmethod
|
| 152 |
+
def calculate_lipinski(rdmol):
|
| 153 |
+
rule_1 = Descriptors.ExactMolWt(rdmol) < 500
|
| 154 |
+
rule_2 = Lipinski.NumHDonors(rdmol) <= 5
|
| 155 |
+
rule_3 = Lipinski.NumHAcceptors(rdmol) <= 10
|
| 156 |
+
rule_4 = (logp := Crippen.MolLogP(rdmol) >= -2) & (logp <= 5)
|
| 157 |
+
rule_5 = Chem.rdMolDescriptors.CalcNumRotatableBonds(rdmol) <= 10
|
| 158 |
+
return np.sum([int(a) for a in [rule_1, rule_2, rule_3, rule_4, rule_5]])
|
| 159 |
+
|
| 160 |
+
@classmethod
|
| 161 |
+
def calculate_diversity(cls, pocket_mols):
|
| 162 |
+
if len(pocket_mols) < 2:
|
| 163 |
+
return 0.0
|
| 164 |
+
|
| 165 |
+
div = 0
|
| 166 |
+
total = 0
|
| 167 |
+
for i in range(len(pocket_mols)):
|
| 168 |
+
for j in range(i + 1, len(pocket_mols)):
|
| 169 |
+
div += 1 - cls.similarity(pocket_mols[i], pocket_mols[j])
|
| 170 |
+
total += 1
|
| 171 |
+
return div / total
|
| 172 |
+
|
| 173 |
+
@staticmethod
|
| 174 |
+
def similarity(mol_a, mol_b):
|
| 175 |
+
# fp1 = AllChem.GetMorganFingerprintAsBitVect(
|
| 176 |
+
# mol_a, 2, nBits=2048, useChirality=False)
|
| 177 |
+
# fp2 = AllChem.GetMorganFingerprintAsBitVect(
|
| 178 |
+
# mol_b, 2, nBits=2048, useChirality=False)
|
| 179 |
+
fp1 = Chem.RDKFingerprint(mol_a)
|
| 180 |
+
fp2 = Chem.RDKFingerprint(mol_b)
|
| 181 |
+
return DataStructs.TanimotoSimilarity(fp1, fp2)
|
| 182 |
+
|
| 183 |
+
def evaluate(self, pocket_rdmols):
|
| 184 |
+
"""
|
| 185 |
+
Run full evaluation
|
| 186 |
+
Args:
|
| 187 |
+
pocket_rdmols: list of lists, the inner list contains all RDKit
|
| 188 |
+
molecules generated for a pocket
|
| 189 |
+
Returns:
|
| 190 |
+
QED, SA, LogP, Lipinski (per molecule), and Diversity (per pocket)
|
| 191 |
+
"""
|
| 192 |
+
|
| 193 |
+
for pocket in pocket_rdmols:
|
| 194 |
+
for mol in pocket:
|
| 195 |
+
Chem.SanitizeMol(mol)
|
| 196 |
+
assert mol is not None, "only evaluate valid molecules"
|
| 197 |
+
|
| 198 |
+
all_qed = []
|
| 199 |
+
all_sa = []
|
| 200 |
+
all_logp = []
|
| 201 |
+
all_lipinski = []
|
| 202 |
+
per_pocket_diversity = []
|
| 203 |
+
for pocket in tqdm(pocket_rdmols):
|
| 204 |
+
all_qed.append([self.calculate_qed(mol) for mol in pocket])
|
| 205 |
+
all_sa.append([self.calculate_sa(mol) for mol in pocket])
|
| 206 |
+
all_logp.append([self.calculate_logp(mol) for mol in pocket])
|
| 207 |
+
all_lipinski.append([self.calculate_lipinski(mol) for mol in pocket])
|
| 208 |
+
per_pocket_diversity.append(self.calculate_diversity(pocket))
|
| 209 |
+
|
| 210 |
+
print(f"{sum([len(p) for p in pocket_rdmols])} molecules from "
|
| 211 |
+
f"{len(pocket_rdmols)} pockets evaluated.")
|
| 212 |
+
|
| 213 |
+
qed_flattened = [x for px in all_qed for x in px]
|
| 214 |
+
print(f"QED: {np.mean(qed_flattened):.3f} \pm {np.std(qed_flattened):.2f}")
|
| 215 |
+
|
| 216 |
+
sa_flattened = [x for px in all_sa for x in px]
|
| 217 |
+
print(f"SA: {np.mean(sa_flattened):.3f} \pm {np.std(sa_flattened):.2f}")
|
| 218 |
+
|
| 219 |
+
logp_flattened = [x for px in all_logp for x in px]
|
| 220 |
+
print(f"LogP: {np.mean(logp_flattened):.3f} \pm {np.std(logp_flattened):.2f}")
|
| 221 |
+
|
| 222 |
+
lipinski_flattened = [x for px in all_lipinski for x in px]
|
| 223 |
+
print(f"Lipinski: {np.mean(lipinski_flattened):.3f} \pm {np.std(lipinski_flattened):.2f}")
|
| 224 |
+
|
| 225 |
+
print(f"Diversity: {np.mean(per_pocket_diversity):.3f} \pm {np.std(per_pocket_diversity):.2f}")
|
| 226 |
+
|
| 227 |
+
return all_qed, all_sa, all_logp, all_lipinski, per_pocket_diversity
|
| 228 |
+
|
| 229 |
+
def evaluate_mean(self, rdmols):
|
| 230 |
+
"""
|
| 231 |
+
Run full evaluation and return mean of each property
|
| 232 |
+
Args:
|
| 233 |
+
rdmols: list of RDKit molecules
|
| 234 |
+
Returns:
|
| 235 |
+
QED, SA, LogP, Lipinski, and Diversity
|
| 236 |
+
"""
|
| 237 |
+
|
| 238 |
+
if len(rdmols) < 1:
|
| 239 |
+
return 0.0, 0.0, 0.0, 0.0, 0.0
|
| 240 |
+
|
| 241 |
+
for mol in rdmols:
|
| 242 |
+
Chem.SanitizeMol(mol)
|
| 243 |
+
assert mol is not None, "only evaluate valid molecules"
|
| 244 |
+
|
| 245 |
+
qed = np.mean([self.calculate_qed(mol) for mol in rdmols])
|
| 246 |
+
sa = np.mean([self.calculate_sa(mol) for mol in rdmols])
|
| 247 |
+
logp = np.mean([self.calculate_logp(mol) for mol in rdmols])
|
| 248 |
+
lipinski = np.mean([self.calculate_lipinski(mol) for mol in rdmols])
|
| 249 |
+
diversity = self.calculate_diversity(rdmols)
|
| 250 |
+
|
| 251 |
+
return qed, sa, logp, lipinski, diversity
|
analysis/molecule_builder.py
ADDED
|
@@ -0,0 +1,250 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import warnings
|
| 2 |
+
import tempfile
|
| 3 |
+
|
| 4 |
+
import torch
|
| 5 |
+
import numpy as np
|
| 6 |
+
from rdkit import Chem
|
| 7 |
+
from rdkit.Chem.rdForceFieldHelpers import UFFOptimizeMolecule, UFFHasAllMoleculeParams
|
| 8 |
+
import openbabel
|
| 9 |
+
|
| 10 |
+
import utils
|
| 11 |
+
from constants import bonds1, bonds2, bonds3, margin1, margin2, margin3, \
|
| 12 |
+
bond_dict
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def get_bond_order(atom1, atom2, distance):
|
| 16 |
+
distance = 100 * distance # We change the metric
|
| 17 |
+
|
| 18 |
+
if atom1 in bonds3 and atom2 in bonds3[atom1] and distance < bonds3[atom1][atom2] + margin3:
|
| 19 |
+
return 3 # Triple
|
| 20 |
+
|
| 21 |
+
if atom1 in bonds2 and atom2 in bonds2[atom1] and distance < bonds2[atom1][atom2] + margin2:
|
| 22 |
+
return 2 # Double
|
| 23 |
+
|
| 24 |
+
if atom1 in bonds1 and atom2 in bonds1[atom1] and distance < bonds1[atom1][atom2] + margin1:
|
| 25 |
+
return 1 # Single
|
| 26 |
+
|
| 27 |
+
return 0 # No bond
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def get_bond_order_batch(atoms1, atoms2, distances, dataset_info):
|
| 31 |
+
if isinstance(atoms1, np.ndarray):
|
| 32 |
+
atoms1 = torch.from_numpy(atoms1)
|
| 33 |
+
if isinstance(atoms2, np.ndarray):
|
| 34 |
+
atoms2 = torch.from_numpy(atoms2)
|
| 35 |
+
if isinstance(distances, np.ndarray):
|
| 36 |
+
distances = torch.from_numpy(distances)
|
| 37 |
+
|
| 38 |
+
distances = 100 * distances # We change the metric
|
| 39 |
+
|
| 40 |
+
bonds1 = torch.tensor(dataset_info['bonds1'], device=atoms1.device)
|
| 41 |
+
bonds2 = torch.tensor(dataset_info['bonds2'], device=atoms1.device)
|
| 42 |
+
bonds3 = torch.tensor(dataset_info['bonds3'], device=atoms1.device)
|
| 43 |
+
|
| 44 |
+
bond_types = torch.zeros_like(atoms1) # 0: No bond
|
| 45 |
+
|
| 46 |
+
# Single
|
| 47 |
+
bond_types[distances < bonds1[atoms1, atoms2] + margin1] = 1
|
| 48 |
+
|
| 49 |
+
# Double (note that already assigned single bonds will be overwritten)
|
| 50 |
+
bond_types[distances < bonds2[atoms1, atoms2] + margin2] = 2
|
| 51 |
+
|
| 52 |
+
# Triple
|
| 53 |
+
bond_types[distances < bonds3[atoms1, atoms2] + margin3] = 3
|
| 54 |
+
|
| 55 |
+
return bond_types
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def make_mol_openbabel(positions, atom_types, atom_decoder):
|
| 59 |
+
"""
|
| 60 |
+
Build an RDKit molecule using openbabel for creating bonds
|
| 61 |
+
Args:
|
| 62 |
+
positions: N x 3
|
| 63 |
+
atom_types: N
|
| 64 |
+
atom_decoder: maps indices to atom types
|
| 65 |
+
Returns:
|
| 66 |
+
rdkit molecule
|
| 67 |
+
"""
|
| 68 |
+
atom_types = [atom_decoder[x] for x in atom_types]
|
| 69 |
+
|
| 70 |
+
with tempfile.NamedTemporaryFile() as tmp:
|
| 71 |
+
tmp_file = tmp.name
|
| 72 |
+
|
| 73 |
+
# Write xyz file
|
| 74 |
+
utils.write_xyz_file(positions, atom_types, tmp_file)
|
| 75 |
+
|
| 76 |
+
# Convert to sdf file with openbabel
|
| 77 |
+
# openbabel will add bonds
|
| 78 |
+
obConversion = openbabel.OBConversion()
|
| 79 |
+
obConversion.SetInAndOutFormats("xyz", "sdf")
|
| 80 |
+
ob_mol = openbabel.OBMol()
|
| 81 |
+
obConversion.ReadFile(ob_mol, tmp_file)
|
| 82 |
+
|
| 83 |
+
obConversion.WriteFile(ob_mol, tmp_file)
|
| 84 |
+
|
| 85 |
+
# Read sdf file with RDKit
|
| 86 |
+
tmp_mol = Chem.SDMolSupplier(tmp_file, sanitize=False)[0]
|
| 87 |
+
|
| 88 |
+
# Build new molecule. This is a workaround to remove radicals.
|
| 89 |
+
mol = Chem.RWMol()
|
| 90 |
+
for atom in tmp_mol.GetAtoms():
|
| 91 |
+
mol.AddAtom(Chem.Atom(atom.GetSymbol()))
|
| 92 |
+
mol.AddConformer(tmp_mol.GetConformer(0))
|
| 93 |
+
|
| 94 |
+
for bond in tmp_mol.GetBonds():
|
| 95 |
+
mol.AddBond(bond.GetBeginAtomIdx(), bond.GetEndAtomIdx(),
|
| 96 |
+
bond.GetBondType())
|
| 97 |
+
|
| 98 |
+
return mol
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def make_mol_edm(positions, atom_types, dataset_info, add_coords):
|
| 102 |
+
"""
|
| 103 |
+
Equivalent to EDM's way of building RDKit molecules
|
| 104 |
+
"""
|
| 105 |
+
n = len(positions)
|
| 106 |
+
|
| 107 |
+
# (X, A, E): atom_types, adjacency matrix, edge_types
|
| 108 |
+
# X: N (int)
|
| 109 |
+
# A: N x N (bool) -> (binary adjacency matrix)
|
| 110 |
+
# E: N x N (int) -> (bond type, 0 if no bond)
|
| 111 |
+
pos = positions.unsqueeze(0) # add batch dim
|
| 112 |
+
dists = torch.cdist(pos, pos, p=2).squeeze(0).view(-1) # remove batch dim & flatten
|
| 113 |
+
atoms1, atoms2 = torch.cartesian_prod(atom_types, atom_types).T
|
| 114 |
+
E_full = get_bond_order_batch(atoms1, atoms2, dists, dataset_info).view(n, n)
|
| 115 |
+
E = torch.tril(E_full, diagonal=-1) # Warning: the graph should be DIRECTED
|
| 116 |
+
A = E.bool()
|
| 117 |
+
X = atom_types
|
| 118 |
+
|
| 119 |
+
mol = Chem.RWMol()
|
| 120 |
+
for atom in X:
|
| 121 |
+
a = Chem.Atom(dataset_info["atom_decoder"][atom.item()])
|
| 122 |
+
mol.AddAtom(a)
|
| 123 |
+
|
| 124 |
+
all_bonds = torch.nonzero(A)
|
| 125 |
+
for bond in all_bonds:
|
| 126 |
+
mol.AddBond(bond[0].item(), bond[1].item(),
|
| 127 |
+
bond_dict[E[bond[0], bond[1]].item()])
|
| 128 |
+
|
| 129 |
+
if add_coords:
|
| 130 |
+
conf = Chem.Conformer(mol.GetNumAtoms())
|
| 131 |
+
for i in range(mol.GetNumAtoms()):
|
| 132 |
+
conf.SetAtomPosition(i, (positions[i, 0].item(),
|
| 133 |
+
positions[i, 1].item(),
|
| 134 |
+
positions[i, 2].item()))
|
| 135 |
+
mol.AddConformer(conf)
|
| 136 |
+
|
| 137 |
+
return mol
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def build_molecule(positions, atom_types, dataset_info, add_coords=False,
|
| 141 |
+
use_openbabel=True):
|
| 142 |
+
"""
|
| 143 |
+
Build RDKit molecule
|
| 144 |
+
Args:
|
| 145 |
+
positions: N x 3
|
| 146 |
+
atom_types: N
|
| 147 |
+
dataset_info: dict
|
| 148 |
+
add_coords: Add conformer to mol (always added if use_openbabel=True)
|
| 149 |
+
use_openbabel: use OpenBabel to create bonds
|
| 150 |
+
Returns:
|
| 151 |
+
RDKit molecule
|
| 152 |
+
"""
|
| 153 |
+
if use_openbabel:
|
| 154 |
+
mol = make_mol_openbabel(positions, atom_types,
|
| 155 |
+
dataset_info["atom_decoder"])
|
| 156 |
+
else:
|
| 157 |
+
mol = make_mol_edm(positions, atom_types, dataset_info, add_coords)
|
| 158 |
+
|
| 159 |
+
return mol
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
def process_molecule(rdmol, add_hydrogens=False, sanitize=False, relax_iter=0,
|
| 163 |
+
largest_frag=False):
|
| 164 |
+
"""
|
| 165 |
+
Apply filters to an RDKit molecule. Makes a copy first.
|
| 166 |
+
Args:
|
| 167 |
+
rdmol: rdkit molecule
|
| 168 |
+
add_hydrogens
|
| 169 |
+
sanitize
|
| 170 |
+
relax_iter: maximum number of UFF optimization iterations
|
| 171 |
+
largest_frag: filter out the largest fragment in a set of disjoint
|
| 172 |
+
molecules
|
| 173 |
+
Returns:
|
| 174 |
+
RDKit molecule or None if it does not pass the filters
|
| 175 |
+
"""
|
| 176 |
+
|
| 177 |
+
# Create a copy
|
| 178 |
+
mol = Chem.Mol(rdmol)
|
| 179 |
+
|
| 180 |
+
if sanitize:
|
| 181 |
+
try:
|
| 182 |
+
Chem.SanitizeMol(mol)
|
| 183 |
+
except ValueError:
|
| 184 |
+
warnings.warn('Sanitization failed. Returning None.')
|
| 185 |
+
return None
|
| 186 |
+
|
| 187 |
+
if add_hydrogens:
|
| 188 |
+
mol = Chem.AddHs(mol, addCoords=(len(mol.GetConformers()) > 0))
|
| 189 |
+
|
| 190 |
+
if largest_frag:
|
| 191 |
+
mol_frags = Chem.GetMolFrags(mol, asMols=True, sanitizeFrags=False)
|
| 192 |
+
mol = max(mol_frags, default=mol, key=lambda m: m.GetNumAtoms())
|
| 193 |
+
if sanitize:
|
| 194 |
+
# sanitize the updated molecule
|
| 195 |
+
try:
|
| 196 |
+
Chem.SanitizeMol(mol)
|
| 197 |
+
except ValueError:
|
| 198 |
+
return None
|
| 199 |
+
|
| 200 |
+
if relax_iter > 0:
|
| 201 |
+
if not UFFHasAllMoleculeParams(mol):
|
| 202 |
+
warnings.warn('UFF parameters not available for all atoms. '
|
| 203 |
+
'Returning None.')
|
| 204 |
+
return None
|
| 205 |
+
|
| 206 |
+
try:
|
| 207 |
+
uff_relax(mol, relax_iter)
|
| 208 |
+
if sanitize:
|
| 209 |
+
# sanitize the updated molecule
|
| 210 |
+
Chem.SanitizeMol(mol)
|
| 211 |
+
except (RuntimeError, ValueError) as e:
|
| 212 |
+
return None
|
| 213 |
+
|
| 214 |
+
return mol
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
def uff_relax(mol, max_iter=200):
|
| 218 |
+
"""
|
| 219 |
+
Uses RDKit's universal force field (UFF) implementation to optimize a
|
| 220 |
+
molecule.
|
| 221 |
+
"""
|
| 222 |
+
more_iterations_required = UFFOptimizeMolecule(mol, maxIters=max_iter)
|
| 223 |
+
if more_iterations_required:
|
| 224 |
+
warnings.warn(f'Maximum number of FF iterations reached. '
|
| 225 |
+
f'Returning molecule after {max_iter} relaxation steps.')
|
| 226 |
+
return more_iterations_required
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
def filter_rd_mol(rdmol):
|
| 230 |
+
"""
|
| 231 |
+
Filter out RDMols if they have a 3-3 ring intersection
|
| 232 |
+
adapted from:
|
| 233 |
+
https://github.com/luost26/3D-Generative-SBDD/blob/main/utils/chem.py
|
| 234 |
+
"""
|
| 235 |
+
ring_info = rdmol.GetRingInfo()
|
| 236 |
+
ring_info.AtomRings()
|
| 237 |
+
rings = [set(r) for r in ring_info.AtomRings()]
|
| 238 |
+
|
| 239 |
+
# 3-3 ring intersection
|
| 240 |
+
for i, ring_a in enumerate(rings):
|
| 241 |
+
if len(ring_a) != 3:
|
| 242 |
+
continue
|
| 243 |
+
for j, ring_b in enumerate(rings):
|
| 244 |
+
if i <= j:
|
| 245 |
+
continue
|
| 246 |
+
inter = ring_a.intersection(ring_b)
|
| 247 |
+
if (len(ring_b) == 3) and (len(inter) > 0):
|
| 248 |
+
return False
|
| 249 |
+
|
| 250 |
+
return True
|
analysis/visualization.py
ADDED
|
@@ -0,0 +1,472 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import numpy as np
|
| 3 |
+
import os
|
| 4 |
+
import glob
|
| 5 |
+
import random
|
| 6 |
+
import matplotlib
|
| 7 |
+
import imageio
|
| 8 |
+
|
| 9 |
+
matplotlib.use('Agg')
|
| 10 |
+
import matplotlib.pyplot as plt
|
| 11 |
+
from analysis.molecule_builder import get_bond_order
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
##############
|
| 15 |
+
### Files ####
|
| 16 |
+
###########-->
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def save_xyz_file(path, one_hot, positions, atom_decoder, id_from=0,
|
| 20 |
+
name='molecule', batch_mask=None):
|
| 21 |
+
try:
|
| 22 |
+
os.makedirs(path)
|
| 23 |
+
except OSError:
|
| 24 |
+
pass
|
| 25 |
+
|
| 26 |
+
if batch_mask is None:
|
| 27 |
+
batch_mask = torch.zeros(len(one_hot))
|
| 28 |
+
|
| 29 |
+
for batch_i in torch.unique(batch_mask):
|
| 30 |
+
cur_batch_mask = (batch_mask == batch_i)
|
| 31 |
+
n_atoms = int(torch.sum(cur_batch_mask).item())
|
| 32 |
+
f = open(path + name + '_' + "%03d.xyz" % (batch_i + id_from), "w")
|
| 33 |
+
f.write("%d\n\n" % n_atoms)
|
| 34 |
+
atoms = torch.argmax(one_hot[cur_batch_mask], dim=1)
|
| 35 |
+
batch_pos = positions[cur_batch_mask]
|
| 36 |
+
for atom_i in range(n_atoms):
|
| 37 |
+
atom = atoms[atom_i]
|
| 38 |
+
atom = atom_decoder[atom]
|
| 39 |
+
f.write("%s %.9f %.9f %.9f\n" % (atom, batch_pos[atom_i, 0], batch_pos[atom_i, 1], batch_pos[atom_i, 2]))
|
| 40 |
+
f.close()
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def load_molecule_xyz(file, dataset_info):
|
| 44 |
+
with open(file, encoding='utf8') as f:
|
| 45 |
+
n_atoms = int(f.readline())
|
| 46 |
+
one_hot = torch.zeros(n_atoms, len(dataset_info['atom_decoder']))
|
| 47 |
+
positions = torch.zeros(n_atoms, 3)
|
| 48 |
+
f.readline()
|
| 49 |
+
atoms = f.readlines()
|
| 50 |
+
for i in range(n_atoms):
|
| 51 |
+
atom = atoms[i].split(' ')
|
| 52 |
+
atom_type = atom[0]
|
| 53 |
+
one_hot[i, dataset_info['atom_encoder'][atom_type]] = 1
|
| 54 |
+
position = torch.Tensor([float(e) for e in atom[1:]])
|
| 55 |
+
positions[i, :] = position
|
| 56 |
+
return positions, one_hot
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def load_xyz_files(path, shuffle=True):
|
| 60 |
+
files = glob.glob(path + "/*.xyz")
|
| 61 |
+
if shuffle:
|
| 62 |
+
random.shuffle(files)
|
| 63 |
+
return files
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
# <----########
|
| 67 |
+
### Files ####
|
| 68 |
+
##############
|
| 69 |
+
def draw_sphere(ax, x, y, z, size, color, alpha):
|
| 70 |
+
u = np.linspace(0, 2 * np.pi, 100)
|
| 71 |
+
v = np.linspace(0, np.pi, 100)
|
| 72 |
+
|
| 73 |
+
xs = size * np.outer(np.cos(u), np.sin(v))
|
| 74 |
+
ys = size * np.outer(np.sin(u), np.sin(v)) * 0.8 # Correct for matplotlib.
|
| 75 |
+
zs = size * np.outer(np.ones(np.size(u)), np.cos(v))
|
| 76 |
+
# for i in range(2):
|
| 77 |
+
# ax.plot_surface(x+random.randint(-5,5), y+random.randint(-5,5), z+random.randint(-5,5), rstride=4, cstride=4, color='b', linewidth=0, alpha=0.5)
|
| 78 |
+
|
| 79 |
+
ax.plot_surface(x + xs, y + ys, z + zs, rstride=2, cstride=2, color=color,
|
| 80 |
+
linewidth=0,
|
| 81 |
+
alpha=alpha)
|
| 82 |
+
# # calculate vectors for "vertical" circle
|
| 83 |
+
# a = np.array([-np.sin(elev / 180 * np.pi), 0, np.cos(elev / 180 * np.pi)])
|
| 84 |
+
# b = np.array([0, 1, 0])
|
| 85 |
+
# b = b * np.cos(rot) + np.cross(a, b) * np.sin(rot) + a * np.dot(a, b) * (
|
| 86 |
+
# 1 - np.cos(rot))
|
| 87 |
+
# ax.plot(np.sin(u), np.cos(u), 0, color='k', linestyle='dashed')
|
| 88 |
+
# horiz_front = np.linspace(0, np.pi, 100)
|
| 89 |
+
# ax.plot(np.sin(horiz_front), np.cos(horiz_front), 0, color='k')
|
| 90 |
+
# vert_front = np.linspace(np.pi / 2, 3 * np.pi / 2, 100)
|
| 91 |
+
# ax.plot(a[0] * np.sin(u) + b[0] * np.cos(u), b[1] * np.cos(u),
|
| 92 |
+
# a[2] * np.sin(u) + b[2] * np.cos(u), color='k', linestyle='dashed')
|
| 93 |
+
# ax.plot(a[0] * np.sin(vert_front) + b[0] * np.cos(vert_front),
|
| 94 |
+
# b[1] * np.cos(vert_front),
|
| 95 |
+
# a[2] * np.sin(vert_front) + b[2] * np.cos(vert_front), color='k')
|
| 96 |
+
#
|
| 97 |
+
# ax.view_init(elev=elev, azim=0)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def plot_molecule(ax, positions, atom_type, alpha, spheres_3d, hex_bg_color,
|
| 101 |
+
dataset_info):
|
| 102 |
+
# draw_sphere(ax, 0, 0, 0, 1)
|
| 103 |
+
# draw_sphere(ax, 1, 1, 1, 1)
|
| 104 |
+
|
| 105 |
+
x = positions[:, 0]
|
| 106 |
+
y = positions[:, 1]
|
| 107 |
+
z = positions[:, 2]
|
| 108 |
+
# Hydrogen, Carbon, Nitrogen, Oxygen, Flourine
|
| 109 |
+
|
| 110 |
+
# ax.set_facecolor((1.0, 0.47, 0.42))
|
| 111 |
+
colors_dic = np.array(dataset_info['colors_dic'])
|
| 112 |
+
radius_dic = np.array(dataset_info['radius_dic'])
|
| 113 |
+
area_dic = 1500 * radius_dic ** 2
|
| 114 |
+
# areas_dic = sizes_dic * sizes_dic * 3.1416
|
| 115 |
+
|
| 116 |
+
areas = area_dic[atom_type]
|
| 117 |
+
radii = radius_dic[atom_type]
|
| 118 |
+
colors = colors_dic[atom_type]
|
| 119 |
+
|
| 120 |
+
if spheres_3d:
|
| 121 |
+
for i, j, k, s, c in zip(x, y, z, radii, colors):
|
| 122 |
+
draw_sphere(ax, i.item(), j.item(), k.item(), 0.7 * s, c, alpha)
|
| 123 |
+
else:
|
| 124 |
+
ax.scatter(x, y, z, s=areas, alpha=0.9 * alpha,
|
| 125 |
+
c=colors) # , linewidths=2, edgecolors='#FFFFFF')
|
| 126 |
+
|
| 127 |
+
for i in range(len(x)):
|
| 128 |
+
for j in range(i + 1, len(x)):
|
| 129 |
+
p1 = np.array([x[i], y[i], z[i]])
|
| 130 |
+
p2 = np.array([x[j], y[j], z[j]])
|
| 131 |
+
dist = np.sqrt(np.sum((p1 - p2) ** 2))
|
| 132 |
+
atom1, atom2 = dataset_info['atom_decoder'][atom_type[i]], \
|
| 133 |
+
dataset_info['atom_decoder'][atom_type[j]]
|
| 134 |
+
s = (atom_type[i], atom_type[j])
|
| 135 |
+
|
| 136 |
+
draw_edge_int = get_bond_order(dataset_info['atom_decoder'][s[0]],
|
| 137 |
+
dataset_info['atom_decoder'][s[1]],
|
| 138 |
+
dist)
|
| 139 |
+
line_width = 2
|
| 140 |
+
|
| 141 |
+
draw_edge = draw_edge_int > 0
|
| 142 |
+
if draw_edge:
|
| 143 |
+
if draw_edge_int == 4:
|
| 144 |
+
linewidth_factor = 1.5
|
| 145 |
+
else:
|
| 146 |
+
# linewidth_factor = draw_edge_int # Prop to number of
|
| 147 |
+
# edges.
|
| 148 |
+
linewidth_factor = 1
|
| 149 |
+
ax.plot([x[i], x[j]], [y[i], y[j]], [z[i], z[j]],
|
| 150 |
+
linewidth=line_width * linewidth_factor,
|
| 151 |
+
c=hex_bg_color, alpha=alpha)
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def plot_data3d(positions, atom_type, dataset_info, camera_elev=0,
|
| 155 |
+
camera_azim=0, save_path=None, spheres_3d=False,
|
| 156 |
+
bg='black', alpha=1.):
|
| 157 |
+
black = (0, 0, 0)
|
| 158 |
+
white = (1, 1, 1)
|
| 159 |
+
hex_bg_color = '#FFFFFF' if bg == 'black' else '#666666'
|
| 160 |
+
|
| 161 |
+
from mpl_toolkits.mplot3d import Axes3D
|
| 162 |
+
fig = plt.figure()
|
| 163 |
+
ax = fig.add_subplot(projection='3d')
|
| 164 |
+
ax.set_aspect('auto')
|
| 165 |
+
ax.view_init(elev=camera_elev, azim=camera_azim)
|
| 166 |
+
if bg == 'black':
|
| 167 |
+
ax.set_facecolor(black)
|
| 168 |
+
else:
|
| 169 |
+
ax.set_facecolor(white)
|
| 170 |
+
# ax.xaxis.pane.set_edgecolor('#D0D0D0')
|
| 171 |
+
ax.xaxis.pane.set_alpha(0)
|
| 172 |
+
ax.yaxis.pane.set_alpha(0)
|
| 173 |
+
ax.zaxis.pane.set_alpha(0)
|
| 174 |
+
ax._axis3don = False
|
| 175 |
+
|
| 176 |
+
if bg == 'black':
|
| 177 |
+
ax.w_xaxis.line.set_color("black")
|
| 178 |
+
else:
|
| 179 |
+
ax.w_xaxis.line.set_color("white")
|
| 180 |
+
|
| 181 |
+
plot_molecule(ax, positions, atom_type, alpha, spheres_3d,
|
| 182 |
+
hex_bg_color, dataset_info)
|
| 183 |
+
|
| 184 |
+
# if 'qm9' in dataset_info['name']:
|
| 185 |
+
max_value = positions.abs().max().item()
|
| 186 |
+
|
| 187 |
+
# axis_lim = 3.2
|
| 188 |
+
axis_lim = min(40, max(max_value / 1.5 + 0.3, 3.2))
|
| 189 |
+
ax.set_xlim(-axis_lim, axis_lim)
|
| 190 |
+
ax.set_ylim(-axis_lim, axis_lim)
|
| 191 |
+
ax.set_zlim(-axis_lim, axis_lim)
|
| 192 |
+
# elif dataset_info['name'] == 'geom':
|
| 193 |
+
# max_value = positions.abs().max().item()
|
| 194 |
+
#
|
| 195 |
+
# # axis_lim = 3.2
|
| 196 |
+
# axis_lim = min(40, max(max_value / 1.5 + 0.3, 3.2))
|
| 197 |
+
# ax.set_xlim(-axis_lim, axis_lim)
|
| 198 |
+
# ax.set_ylim(-axis_lim, axis_lim)
|
| 199 |
+
# ax.set_zlim(-axis_lim, axis_lim)
|
| 200 |
+
# elif dataset_info['name'] == 'pdbbind':
|
| 201 |
+
# max_value = positions.abs().max().item()
|
| 202 |
+
#
|
| 203 |
+
# # axis_lim = 3.2
|
| 204 |
+
# axis_lim = min(40, max(max_value / 1.5 + 0.3, 3.2))
|
| 205 |
+
# ax.set_xlim(-axis_lim, axis_lim)
|
| 206 |
+
# ax.set_ylim(-axis_lim, axis_lim)
|
| 207 |
+
# ax.set_zlim(-axis_lim, axis_lim)
|
| 208 |
+
# else:
|
| 209 |
+
# raise ValueError(dataset_info['name'])
|
| 210 |
+
|
| 211 |
+
dpi = 120 if spheres_3d else 50
|
| 212 |
+
|
| 213 |
+
if save_path is not None:
|
| 214 |
+
plt.savefig(save_path, bbox_inches='tight', pad_inches=0.0, dpi=dpi)
|
| 215 |
+
|
| 216 |
+
if spheres_3d:
|
| 217 |
+
img = imageio.imread(save_path)
|
| 218 |
+
img_brighter = np.clip(img * 1.4, 0, 255).astype('uint8')
|
| 219 |
+
imageio.imsave(save_path, img_brighter)
|
| 220 |
+
else:
|
| 221 |
+
plt.show()
|
| 222 |
+
plt.close()
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
def plot_data3d_uncertainty(
|
| 226 |
+
all_positions, all_atom_types, dataset_info, camera_elev=0,
|
| 227 |
+
camera_azim=0,
|
| 228 |
+
save_path=None, spheres_3d=False, bg='black', alpha=1.):
|
| 229 |
+
black = (0, 0, 0)
|
| 230 |
+
white = (1, 1, 1)
|
| 231 |
+
hex_bg_color = '#FFFFFF' if bg == 'black' else '#666666'
|
| 232 |
+
|
| 233 |
+
from mpl_toolkits.mplot3d import Axes3D
|
| 234 |
+
fig = plt.figure()
|
| 235 |
+
ax = fig.add_subplot(projection='3d')
|
| 236 |
+
ax.set_aspect('auto')
|
| 237 |
+
ax.view_init(elev=camera_elev, azim=camera_azim)
|
| 238 |
+
if bg == 'black':
|
| 239 |
+
ax.set_facecolor(black)
|
| 240 |
+
else:
|
| 241 |
+
ax.set_facecolor(white)
|
| 242 |
+
# ax.xaxis.pane.set_edgecolor('#D0D0D0')
|
| 243 |
+
ax.xaxis.pane.set_alpha(0)
|
| 244 |
+
ax.yaxis.pane.set_alpha(0)
|
| 245 |
+
ax.zaxis.pane.set_alpha(0)
|
| 246 |
+
ax._axis3don = False
|
| 247 |
+
|
| 248 |
+
if bg == 'black':
|
| 249 |
+
ax.w_xaxis.line.set_color("black")
|
| 250 |
+
else:
|
| 251 |
+
ax.w_xaxis.line.set_color("white")
|
| 252 |
+
|
| 253 |
+
for i in range(len(all_positions)):
|
| 254 |
+
positions = all_positions[i]
|
| 255 |
+
atom_type = all_atom_types[i]
|
| 256 |
+
plot_molecule(ax, positions, atom_type, alpha, spheres_3d,
|
| 257 |
+
hex_bg_color, dataset_info)
|
| 258 |
+
|
| 259 |
+
if 'qm9' in dataset_info['name']:
|
| 260 |
+
max_value = all_positions[0].abs().max().item()
|
| 261 |
+
|
| 262 |
+
# axis_lim = 3.2
|
| 263 |
+
axis_lim = min(40, max(max_value + 0.3, 3.2))
|
| 264 |
+
ax.set_xlim(-axis_lim, axis_lim)
|
| 265 |
+
ax.set_ylim(-axis_lim, axis_lim)
|
| 266 |
+
ax.set_zlim(-axis_lim, axis_lim)
|
| 267 |
+
elif dataset_info['name'] == 'geom':
|
| 268 |
+
max_value = all_positions[0].abs().max().item()
|
| 269 |
+
|
| 270 |
+
# axis_lim = 3.2
|
| 271 |
+
axis_lim = min(40, max(max_value / 2 + 0.3, 3.2))
|
| 272 |
+
ax.set_xlim(-axis_lim, axis_lim)
|
| 273 |
+
ax.set_ylim(-axis_lim, axis_lim)
|
| 274 |
+
ax.set_zlim(-axis_lim, axis_lim)
|
| 275 |
+
elif dataset_info['name'] == 'pdbbind':
|
| 276 |
+
max_value = all_positions[0].abs().max().item()
|
| 277 |
+
|
| 278 |
+
# axis_lim = 3.2
|
| 279 |
+
axis_lim = min(40, max(max_value / 2 + 0.3, 3.2))
|
| 280 |
+
ax.set_xlim(-axis_lim, axis_lim)
|
| 281 |
+
ax.set_ylim(-axis_lim, axis_lim)
|
| 282 |
+
ax.set_zlim(-axis_lim, axis_lim)
|
| 283 |
+
else:
|
| 284 |
+
raise ValueError(dataset_info['name'])
|
| 285 |
+
|
| 286 |
+
dpi = 120 if spheres_3d else 50
|
| 287 |
+
|
| 288 |
+
if save_path is not None:
|
| 289 |
+
plt.savefig(save_path, bbox_inches='tight', pad_inches=0.0, dpi=dpi)
|
| 290 |
+
|
| 291 |
+
if spheres_3d:
|
| 292 |
+
img = imageio.imread(save_path)
|
| 293 |
+
img_brighter = np.clip(img * 1.4, 0, 255).astype('uint8')
|
| 294 |
+
imageio.imsave(save_path, img_brighter)
|
| 295 |
+
else:
|
| 296 |
+
plt.show()
|
| 297 |
+
plt.close()
|
| 298 |
+
|
| 299 |
+
|
| 300 |
+
def plot_grid():
|
| 301 |
+
import matplotlib.pyplot as plt
|
| 302 |
+
from mpl_toolkits.axes_grid1 import ImageGrid
|
| 303 |
+
|
| 304 |
+
im1 = np.arange(100).reshape((10, 10))
|
| 305 |
+
im2 = im1.T
|
| 306 |
+
im3 = np.flipud(im1)
|
| 307 |
+
im4 = np.fliplr(im2)
|
| 308 |
+
|
| 309 |
+
fig = plt.figure(figsize=(10., 10.))
|
| 310 |
+
grid = ImageGrid(fig, 111, # similar to subplot(111)
|
| 311 |
+
nrows_ncols=(6, 6), # creates 2x2 grid of axes
|
| 312 |
+
axes_pad=0.1, # pad between axes in inch.
|
| 313 |
+
)
|
| 314 |
+
|
| 315 |
+
for ax, im in zip(grid, [im1, im2, im3, im4]):
|
| 316 |
+
# Iterating over the grid returns the Axes.
|
| 317 |
+
|
| 318 |
+
ax.imshow(im)
|
| 319 |
+
|
| 320 |
+
plt.show()
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
def visualize(path, dataset_info, max_num=25, wandb=None, spheres_3d=False):
|
| 324 |
+
files = load_xyz_files(path)[0:max_num]
|
| 325 |
+
for file in files:
|
| 326 |
+
positions, one_hot = load_molecule_xyz(file, dataset_info)
|
| 327 |
+
atom_type = torch.argmax(one_hot, dim=1).numpy()
|
| 328 |
+
dists = torch.cdist(positions.unsqueeze(0),
|
| 329 |
+
positions.unsqueeze(0)).squeeze(0)
|
| 330 |
+
dists = dists[dists > 0]
|
| 331 |
+
# print("Average distance between atoms", dists.mean().item())
|
| 332 |
+
plot_data3d(positions, atom_type, dataset_info=dataset_info,
|
| 333 |
+
save_path=file[:-4] + '.png',
|
| 334 |
+
spheres_3d=spheres_3d)
|
| 335 |
+
|
| 336 |
+
if wandb is not None:
|
| 337 |
+
path = file[:-4] + '.png'
|
| 338 |
+
# Log image(s)
|
| 339 |
+
im = plt.imread(path)
|
| 340 |
+
wandb.log({'molecule': [wandb.Image(im, caption=path)]})
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
def visualize_chain(path, dataset_info, wandb=None, spheres_3d=False,
|
| 344 |
+
mode="chain"):
|
| 345 |
+
files = load_xyz_files(path)
|
| 346 |
+
files = sorted(files)
|
| 347 |
+
save_paths = []
|
| 348 |
+
|
| 349 |
+
for i in range(len(files)):
|
| 350 |
+
file = files[i]
|
| 351 |
+
|
| 352 |
+
positions, one_hot = load_molecule_xyz(file, dataset_info=dataset_info)
|
| 353 |
+
|
| 354 |
+
atom_type = torch.argmax(one_hot, dim=1).numpy()
|
| 355 |
+
fn = file[:-4] + '.png'
|
| 356 |
+
plot_data3d(positions, atom_type, dataset_info=dataset_info,
|
| 357 |
+
save_path=fn, spheres_3d=spheres_3d, alpha=1.0)
|
| 358 |
+
save_paths.append(fn)
|
| 359 |
+
|
| 360 |
+
imgs = [imageio.imread(fn) for fn in save_paths]
|
| 361 |
+
dirname = os.path.dirname(save_paths[0])
|
| 362 |
+
gif_path = dirname + '/output.gif'
|
| 363 |
+
print(f'Creating gif with {len(imgs)} images')
|
| 364 |
+
# Add the last frame 10 times so that the final result remains temporally.
|
| 365 |
+
# imgs.extend([imgs[-1]] * 10)
|
| 366 |
+
imageio.mimsave(gif_path, imgs, subrectangles=True)
|
| 367 |
+
|
| 368 |
+
if wandb is not None:
|
| 369 |
+
wandb.log({mode: [wandb.Video(gif_path, caption=gif_path)]})
|
| 370 |
+
|
| 371 |
+
|
| 372 |
+
def visualize_chain_uncertainty(
|
| 373 |
+
path, dataset_info, wandb=None, spheres_3d=False, mode="chain"):
|
| 374 |
+
files = load_xyz_files(path)
|
| 375 |
+
files = sorted(files)
|
| 376 |
+
save_paths = []
|
| 377 |
+
|
| 378 |
+
for i in range(len(files)):
|
| 379 |
+
if i + 2 == len(files):
|
| 380 |
+
break
|
| 381 |
+
|
| 382 |
+
file = files[i]
|
| 383 |
+
file2 = files[i + 1]
|
| 384 |
+
file3 = files[i + 2]
|
| 385 |
+
|
| 386 |
+
positions, one_hot, _ = load_molecule_xyz(file,
|
| 387 |
+
dataset_info=dataset_info)
|
| 388 |
+
positions2, one_hot2, _ = load_molecule_xyz(
|
| 389 |
+
file2, dataset_info=dataset_info)
|
| 390 |
+
positions3, one_hot3, _ = load_molecule_xyz(
|
| 391 |
+
file3, dataset_info=dataset_info)
|
| 392 |
+
|
| 393 |
+
all_positions = torch.stack([positions, positions2, positions3], dim=0)
|
| 394 |
+
one_hot = torch.stack([one_hot, one_hot2, one_hot3], dim=0)
|
| 395 |
+
|
| 396 |
+
all_atom_type = torch.argmax(one_hot, dim=2).numpy()
|
| 397 |
+
fn = file[:-4] + '.png'
|
| 398 |
+
plot_data3d_uncertainty(
|
| 399 |
+
all_positions, all_atom_type, dataset_info=dataset_info,
|
| 400 |
+
save_path=fn, spheres_3d=spheres_3d, alpha=0.5)
|
| 401 |
+
save_paths.append(fn)
|
| 402 |
+
|
| 403 |
+
imgs = [imageio.imread(fn) for fn in save_paths]
|
| 404 |
+
dirname = os.path.dirname(save_paths[0])
|
| 405 |
+
gif_path = dirname + '/output.gif'
|
| 406 |
+
print(f'Creating gif with {len(imgs)} images')
|
| 407 |
+
# Add the last frame 10 times so that the final result remains temporally.
|
| 408 |
+
# imgs.extend([imgs[-1]] * 10)
|
| 409 |
+
imageio.mimsave(gif_path, imgs, subrectangles=True)
|
| 410 |
+
|
| 411 |
+
if wandb is not None:
|
| 412 |
+
wandb.log({mode: [wandb.Video(gif_path, caption=gif_path)]})
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
if __name__ == '__main__':
|
| 416 |
+
# plot_grid()
|
| 417 |
+
import qm9.dataset as dataset
|
| 418 |
+
from configs.datasets_config import qm9_with_h, geom_with_h
|
| 419 |
+
|
| 420 |
+
matplotlib.use('macosx')
|
| 421 |
+
|
| 422 |
+
task = "visualize_molecules"
|
| 423 |
+
task_dataset = 'geom'
|
| 424 |
+
|
| 425 |
+
if task_dataset == 'qm9':
|
| 426 |
+
dataset_info = qm9_with_h
|
| 427 |
+
|
| 428 |
+
|
| 429 |
+
class Args:
|
| 430 |
+
batch_size = 1
|
| 431 |
+
num_workers = 0
|
| 432 |
+
filter_n_atoms = None
|
| 433 |
+
datadir = 'qm9/temp'
|
| 434 |
+
dataset = 'qm9'
|
| 435 |
+
remove_h = False
|
| 436 |
+
|
| 437 |
+
|
| 438 |
+
cfg = Args()
|
| 439 |
+
|
| 440 |
+
dataloaders, charge_scale = dataset.retrieve_dataloaders(cfg)
|
| 441 |
+
|
| 442 |
+
for i, data in enumerate(dataloaders['train']):
|
| 443 |
+
positions = data['positions'].view(-1, 3)
|
| 444 |
+
positions_centered = positions - positions.mean(dim=0, keepdim=True)
|
| 445 |
+
one_hot = data['one_hot'].view(-1, 5).type(torch.float32)
|
| 446 |
+
atom_type = torch.argmax(one_hot, dim=1).numpy()
|
| 447 |
+
|
| 448 |
+
plot_data3d(
|
| 449 |
+
positions_centered, atom_type, dataset_info=dataset_info,
|
| 450 |
+
spheres_3d=True)
|
| 451 |
+
|
| 452 |
+
elif task_dataset == 'geom':
|
| 453 |
+
files = load_xyz_files('outputs/data')
|
| 454 |
+
matplotlib.use('macosx')
|
| 455 |
+
for file in files:
|
| 456 |
+
x, one_hot, _ = load_molecule_xyz(file, dataset_info=geom_with_h)
|
| 457 |
+
|
| 458 |
+
positions = x.view(-1, 3)
|
| 459 |
+
positions_centered = positions - positions.mean(dim=0, keepdim=True)
|
| 460 |
+
one_hot = one_hot.view(-1, 16).type(torch.float32)
|
| 461 |
+
atom_type = torch.argmax(one_hot, dim=1).numpy()
|
| 462 |
+
|
| 463 |
+
mask = (x == 0).sum(1) != 3
|
| 464 |
+
positions_centered = positions_centered[mask]
|
| 465 |
+
atom_type = atom_type[mask]
|
| 466 |
+
|
| 467 |
+
plot_data3d(
|
| 468 |
+
positions_centered, atom_type, dataset_info=geom_with_h,
|
| 469 |
+
spheres_3d=False)
|
| 470 |
+
|
| 471 |
+
else:
|
| 472 |
+
raise ValueError(dataset)
|
colab/DiffSBDD.ipynb
ADDED
|
@@ -0,0 +1,468 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"nbformat": 4,
|
| 3 |
+
"nbformat_minor": 0,
|
| 4 |
+
"metadata": {
|
| 5 |
+
"colab": {
|
| 6 |
+
"provenance": [],
|
| 7 |
+
"include_colab_link": true
|
| 8 |
+
},
|
| 9 |
+
"kernelspec": {
|
| 10 |
+
"name": "python3",
|
| 11 |
+
"display_name": "Python 3"
|
| 12 |
+
},
|
| 13 |
+
"language_info": {
|
| 14 |
+
"name": "python"
|
| 15 |
+
},
|
| 16 |
+
"accelerator": "GPU",
|
| 17 |
+
"widgets": {
|
| 18 |
+
"application/vnd.jupyter.widget-state+json": {
|
| 19 |
+
"e293d4b7fb5e4d60a9303400a571d481": {
|
| 20 |
+
"model_module": "@jupyter-widgets/controls",
|
| 21 |
+
"model_name": "TextModel",
|
| 22 |
+
"model_module_version": "1.5.0",
|
| 23 |
+
"state": {
|
| 24 |
+
"_dom_classes": [],
|
| 25 |
+
"_model_module": "@jupyter-widgets/controls",
|
| 26 |
+
"_model_module_version": "1.5.0",
|
| 27 |
+
"_model_name": "TextModel",
|
| 28 |
+
"_view_count": null,
|
| 29 |
+
"_view_module": "@jupyter-widgets/controls",
|
| 30 |
+
"_view_module_version": "1.5.0",
|
| 31 |
+
"_view_name": "TextView",
|
| 32 |
+
"continuous_update": true,
|
| 33 |
+
"description": "",
|
| 34 |
+
"description_tooltip": null,
|
| 35 |
+
"disabled": false,
|
| 36 |
+
"layout": "IPY_MODEL_9ccaa9faa36646678a0278cd651c3f30",
|
| 37 |
+
"placeholder": "",
|
| 38 |
+
"style": "IPY_MODEL_50a9c77fae984dab908378418b726eff",
|
| 39 |
+
"value": "A:330"
|
| 40 |
+
}
|
| 41 |
+
},
|
| 42 |
+
"9ccaa9faa36646678a0278cd651c3f30": {
|
| 43 |
+
"model_module": "@jupyter-widgets/base",
|
| 44 |
+
"model_name": "LayoutModel",
|
| 45 |
+
"model_module_version": "1.2.0",
|
| 46 |
+
"state": {
|
| 47 |
+
"_model_module": "@jupyter-widgets/base",
|
| 48 |
+
"_model_module_version": "1.2.0",
|
| 49 |
+
"_model_name": "LayoutModel",
|
| 50 |
+
"_view_count": null,
|
| 51 |
+
"_view_module": "@jupyter-widgets/base",
|
| 52 |
+
"_view_module_version": "1.2.0",
|
| 53 |
+
"_view_name": "LayoutView",
|
| 54 |
+
"align_content": null,
|
| 55 |
+
"align_items": null,
|
| 56 |
+
"align_self": null,
|
| 57 |
+
"border": null,
|
| 58 |
+
"bottom": null,
|
| 59 |
+
"display": null,
|
| 60 |
+
"flex": null,
|
| 61 |
+
"flex_flow": null,
|
| 62 |
+
"grid_area": null,
|
| 63 |
+
"grid_auto_columns": null,
|
| 64 |
+
"grid_auto_flow": null,
|
| 65 |
+
"grid_auto_rows": null,
|
| 66 |
+
"grid_column": null,
|
| 67 |
+
"grid_gap": null,
|
| 68 |
+
"grid_row": null,
|
| 69 |
+
"grid_template_areas": null,
|
| 70 |
+
"grid_template_columns": null,
|
| 71 |
+
"grid_template_rows": null,
|
| 72 |
+
"height": null,
|
| 73 |
+
"justify_content": null,
|
| 74 |
+
"justify_items": null,
|
| 75 |
+
"left": null,
|
| 76 |
+
"margin": null,
|
| 77 |
+
"max_height": null,
|
| 78 |
+
"max_width": null,
|
| 79 |
+
"min_height": null,
|
| 80 |
+
"min_width": null,
|
| 81 |
+
"object_fit": null,
|
| 82 |
+
"object_position": null,
|
| 83 |
+
"order": null,
|
| 84 |
+
"overflow": null,
|
| 85 |
+
"overflow_x": null,
|
| 86 |
+
"overflow_y": null,
|
| 87 |
+
"padding": null,
|
| 88 |
+
"right": null,
|
| 89 |
+
"top": null,
|
| 90 |
+
"visibility": null,
|
| 91 |
+
"width": null
|
| 92 |
+
}
|
| 93 |
+
},
|
| 94 |
+
"50a9c77fae984dab908378418b726eff": {
|
| 95 |
+
"model_module": "@jupyter-widgets/controls",
|
| 96 |
+
"model_name": "DescriptionStyleModel",
|
| 97 |
+
"model_module_version": "1.5.0",
|
| 98 |
+
"state": {
|
| 99 |
+
"_model_module": "@jupyter-widgets/controls",
|
| 100 |
+
"_model_module_version": "1.5.0",
|
| 101 |
+
"_model_name": "DescriptionStyleModel",
|
| 102 |
+
"_view_count": null,
|
| 103 |
+
"_view_module": "@jupyter-widgets/base",
|
| 104 |
+
"_view_module_version": "1.2.0",
|
| 105 |
+
"_view_name": "StyleView",
|
| 106 |
+
"description_width": ""
|
| 107 |
+
}
|
| 108 |
+
}
|
| 109 |
+
}
|
| 110 |
+
}
|
| 111 |
+
},
|
| 112 |
+
"cells": [
|
| 113 |
+
{
|
| 114 |
+
"cell_type": "markdown",
|
| 115 |
+
"metadata": {
|
| 116 |
+
"id": "view-in-github",
|
| 117 |
+
"colab_type": "text"
|
| 118 |
+
},
|
| 119 |
+
"source": [
|
| 120 |
+
"<a href=\"https://colab.research.google.com/github/arneschneuing/DiffSBDD/blob/main/colab/DiffSBDD.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
|
| 121 |
+
]
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"cell_type": "markdown",
|
| 125 |
+
"source": [
|
| 126 |
+
"# DiffSBDD: Structure-based Drug Design with Equivariant Diffusion Models\n",
|
| 127 |
+
"\n",
|
| 128 |
+
"[**[Paper]**](https://arxiv.org/abs/2210.13695)\n",
|
| 129 |
+
"[**[Code]**](https://github.com/arneschneuing/DiffSBDD)\n",
|
| 130 |
+
"\n",
|
| 131 |
+
"Make sure to select `Runtime` -> `Change runtime type` -> `GPU` before you run the script.\n",
|
| 132 |
+
"\n",
|
| 133 |
+
"<img src=\"https://raw.githubusercontent.com/arneschneuing/DiffSBDD/main/img/overview.png\" height=250>"
|
| 134 |
+
],
|
| 135 |
+
"metadata": {
|
| 136 |
+
"id": "m12HrhIsNKkS"
|
| 137 |
+
}
|
| 138 |
+
},
|
| 139 |
+
{
|
| 140 |
+
"cell_type": "code",
|
| 141 |
+
"source": [
|
| 142 |
+
"#@title Install condacolab (the kernel will be restarted, after that you can execute the remaining cells)\n",
|
| 143 |
+
"!pip install -q condacolab\n",
|
| 144 |
+
"import condacolab\n",
|
| 145 |
+
"condacolab.install()"
|
| 146 |
+
],
|
| 147 |
+
"metadata": {
|
| 148 |
+
"cellView": "form",
|
| 149 |
+
"id": "iAiLo8O8klSG"
|
| 150 |
+
},
|
| 151 |
+
"execution_count": null,
|
| 152 |
+
"outputs": []
|
| 153 |
+
},
|
| 154 |
+
{
|
| 155 |
+
"cell_type": "code",
|
| 156 |
+
"source": [
|
| 157 |
+
"#@title Install dependencies (this will take about 5-10 minutes)\n",
|
| 158 |
+
"%cd /content\n",
|
| 159 |
+
"\n",
|
| 160 |
+
"import os\n",
|
| 161 |
+
"\n",
|
| 162 |
+
"commands = [\n",
|
| 163 |
+
" \"pip install torch==2.0.1 --extra-index-url https://download.pytorch.org/whl/cu118\",\n",
|
| 164 |
+
" \"pip install pytorch-lightning==1.8.4\",\n",
|
| 165 |
+
" \"pip install wandb==0.13.1\",\n",
|
| 166 |
+
" \"pip install rdkit==2022.3.3\",\n",
|
| 167 |
+
" \"pip install biopython==1.79\",\n",
|
| 168 |
+
" \"pip install imageio==2.21.2\",\n",
|
| 169 |
+
" \"pip install scipy==1.7.3\",\n",
|
| 170 |
+
" \"pip install pyg-lib torch-scatter -f https://data.pyg.org/whl/torch-2.0.1+cu118.html\",\n",
|
| 171 |
+
" \"pip install networkx==2.8.6\",\n",
|
| 172 |
+
" \"pip install py3Dmol==1.8.1\",\n",
|
| 173 |
+
" \"conda install openbabel -c conda-forge\",\n",
|
| 174 |
+
" \"git clone https://github.com/arneschneuing/DiffSBDD.git\",\n",
|
| 175 |
+
" \"mkdir -p /content/DiffSBDD/checkpoints\",\n",
|
| 176 |
+
" \"wget -P /content/DiffSBDD/checkpoints https://zenodo.org/record/8183747/files/moad_fullatom_cond.ckpt\",\n",
|
| 177 |
+
" \"wget -P /content/DiffSBDD/checkpoints https://zenodo.org/record/8183747/files/moad_fullatom_joint.ckpt\",\n",
|
| 178 |
+
"]\n",
|
| 179 |
+
"\n",
|
| 180 |
+
"errors = {}\n",
|
| 181 |
+
"\n",
|
| 182 |
+
"if not os.path.isfile(\"/content/READY\"):\n",
|
| 183 |
+
" for cmd in commands:\n",
|
| 184 |
+
" # os.system(cmd)\n",
|
| 185 |
+
" with os.popen(cmd) as f:\n",
|
| 186 |
+
" out = f.read()\n",
|
| 187 |
+
" status = f.close()\n",
|
| 188 |
+
"\n",
|
| 189 |
+
" if status is not None:\n",
|
| 190 |
+
" errors[cmd] = out\n",
|
| 191 |
+
" print(f\"\\n\\nAn error occurred while running '{cmd}'\\n\")\n",
|
| 192 |
+
" print(\"Status:\\t\", status)\n",
|
| 193 |
+
" print(\"Message:\\t\", out)\n",
|
| 194 |
+
"\n",
|
| 195 |
+
"if len(errors) == 0:\n",
|
| 196 |
+
" os.system(\"touch /content/READY\")"
|
| 197 |
+
],
|
| 198 |
+
"metadata": {
|
| 199 |
+
"id": "bZ5Q_kLdVIp7",
|
| 200 |
+
"cellView": "form"
|
| 201 |
+
},
|
| 202 |
+
"execution_count": null,
|
| 203 |
+
"outputs": []
|
| 204 |
+
},
|
| 205 |
+
{
|
| 206 |
+
"cell_type": "markdown",
|
| 207 |
+
"source": [
|
| 208 |
+
"## Choose target PDB"
|
| 209 |
+
],
|
| 210 |
+
"metadata": {
|
| 211 |
+
"id": "e46yNUYbdqZ3"
|
| 212 |
+
}
|
| 213 |
+
},
|
| 214 |
+
{
|
| 215 |
+
"cell_type": "code",
|
| 216 |
+
"source": [
|
| 217 |
+
"from google.colab import files\n",
|
| 218 |
+
"from google.colab import output\n",
|
| 219 |
+
"output.enable_custom_widget_manager()\n",
|
| 220 |
+
"import os.path\n",
|
| 221 |
+
"from pathlib import Path\n",
|
| 222 |
+
"import urllib\n",
|
| 223 |
+
"import os\n",
|
| 224 |
+
"\n",
|
| 225 |
+
"input_dir = Path(\"/content/input_pdbs/\")\n",
|
| 226 |
+
"output_dir = Path(\"/content/output_sdfs/\")\n",
|
| 227 |
+
"input_dir.mkdir(exist_ok=True)\n",
|
| 228 |
+
"output_dir.mkdir(exist_ok=True)\n",
|
| 229 |
+
"\n",
|
| 230 |
+
"target = \"example (3rfm)\" #@param [\"example (3rfm)\", \"upload structure\"]\n",
|
| 231 |
+
"\n",
|
| 232 |
+
"if target == \"example (3rfm)\":\n",
|
| 233 |
+
" pdbfile = Path(input_dir, '3rfm.pdb')\n",
|
| 234 |
+
" urllib.request.urlretrieve('http://files.rcsb.org/download/3rfm.pdb', pdbfile)\n",
|
| 235 |
+
"\n",
|
| 236 |
+
"elif target == \"upload structure\":\n",
|
| 237 |
+
" uploaded = files.upload()\n",
|
| 238 |
+
" fn = list(uploaded.keys())[0]\n",
|
| 239 |
+
" pdbfile = Path(input_dir, fn)\n",
|
| 240 |
+
" Path(fn).rename(pdbfile)"
|
| 241 |
+
],
|
| 242 |
+
"metadata": {
|
| 243 |
+
"cellView": "form",
|
| 244 |
+
"id": "tzkQJJeNdJMa"
|
| 245 |
+
},
|
| 246 |
+
"execution_count": 2,
|
| 247 |
+
"outputs": []
|
| 248 |
+
},
|
| 249 |
+
{
|
| 250 |
+
"cell_type": "markdown",
|
| 251 |
+
"source": [
|
| 252 |
+
"## Define binding pocket\n",
|
| 253 |
+
"\n",
|
| 254 |
+
"You can choose between two options to define the binding pocket:\n",
|
| 255 |
+
"1. **list of residues:** provide a list where each residue is specified as `<chain_id>:<res_id>`, e.g, `A:1 A:2 A:3 A:4 A:5 A:6 A:7`\n",
|
| 256 |
+
"2. **reference ligand:** if the uploaded PDB structure contains a reference ligand in the target pocket, you can specify its location as `<chain_id>:<res_id>` and the pocket will be extracted automatically"
|
| 257 |
+
],
|
| 258 |
+
"metadata": {
|
| 259 |
+
"id": "eif7HG0Bd2qj"
|
| 260 |
+
}
|
| 261 |
+
},
|
| 262 |
+
{
|
| 263 |
+
"cell_type": "code",
|
| 264 |
+
"source": [
|
| 265 |
+
"#@title { run: \"auto\" }\n",
|
| 266 |
+
"import ipywidgets as widgets\n",
|
| 267 |
+
"\n",
|
| 268 |
+
"#@markdown **Note:** This cell is an interactive widget and the values will be updated automatically every time you change them. You do not need to execute the cell again. If you do, the default values will be reinserted.\n",
|
| 269 |
+
"\n",
|
| 270 |
+
"pocket_definition = \"reference ligand\" #@param [\"list of residues\", \"reference ligand\"]\n",
|
| 271 |
+
"\n",
|
| 272 |
+
"if pocket_definition == \"list of residues\":\n",
|
| 273 |
+
" print('pocket_residues:')\n",
|
| 274 |
+
" w = widgets.Text(value='A:9 A:59 A:60 A:62 A:63 A:64 A:66 A:67 A:80 A:81 A:84 A:85 A:88 A:167 A:168 A:169 A:170 A:172 A:174 A:177 A:181 A:246 A:249 A:250 A:252 A:253 A:256 A:265 A:267 A:270 A:271 A:273 A:274 A:275 A:277 A:278')\n",
|
| 275 |
+
" pocket_flag = \"--resi_list\"\n",
|
| 276 |
+
"elif pocket_definition == \"reference ligand\":\n",
|
| 277 |
+
" print('reference_ligand:')\n",
|
| 278 |
+
" w = widgets.Text(value='A:330')\n",
|
| 279 |
+
" pocket_flag = \"--ref_ligand\"\n",
|
| 280 |
+
"\n",
|
| 281 |
+
"display(w)"
|
| 282 |
+
],
|
| 283 |
+
"metadata": {
|
| 284 |
+
"colab": {
|
| 285 |
+
"base_uri": "https://localhost:8080/",
|
| 286 |
+
"height": 67,
|
| 287 |
+
"referenced_widgets": [
|
| 288 |
+
"e293d4b7fb5e4d60a9303400a571d481",
|
| 289 |
+
"9ccaa9faa36646678a0278cd651c3f30",
|
| 290 |
+
"50a9c77fae984dab908378418b726eff"
|
| 291 |
+
]
|
| 292 |
+
},
|
| 293 |
+
"cellView": "form",
|
| 294 |
+
"id": "E9AMhAo_VlUo",
|
| 295 |
+
"outputId": "b4f9f05f-123c-40d4-c4bd-77ef5e19cb63"
|
| 296 |
+
},
|
| 297 |
+
"execution_count": 3,
|
| 298 |
+
"outputs": [
|
| 299 |
+
{
|
| 300 |
+
"output_type": "stream",
|
| 301 |
+
"name": "stdout",
|
| 302 |
+
"text": [
|
| 303 |
+
"reference_ligand:\n"
|
| 304 |
+
]
|
| 305 |
+
},
|
| 306 |
+
{
|
| 307 |
+
"output_type": "display_data",
|
| 308 |
+
"data": {
|
| 309 |
+
"text/plain": [
|
| 310 |
+
"Text(value='A:330')"
|
| 311 |
+
],
|
| 312 |
+
"application/vnd.jupyter.widget-view+json": {
|
| 313 |
+
"version_major": 2,
|
| 314 |
+
"version_minor": 0,
|
| 315 |
+
"model_id": "e293d4b7fb5e4d60a9303400a571d481"
|
| 316 |
+
}
|
| 317 |
+
},
|
| 318 |
+
"metadata": {
|
| 319 |
+
"application/vnd.jupyter.widget-view+json": {
|
| 320 |
+
"colab": {
|
| 321 |
+
"custom_widget_manager": {
|
| 322 |
+
"url": "https://ssl.gstatic.com/colaboratory-static/widgets/colab-cdn-widget-manager/b3e629b1971e1542/manager.min.js"
|
| 323 |
+
}
|
| 324 |
+
}
|
| 325 |
+
}
|
| 326 |
+
}
|
| 327 |
+
}
|
| 328 |
+
]
|
| 329 |
+
},
|
| 330 |
+
{
|
| 331 |
+
"cell_type": "markdown",
|
| 332 |
+
"source": [
|
| 333 |
+
"## Settings\n",
|
| 334 |
+
"\n",
|
| 335 |
+
"Notes:\n",
|
| 336 |
+
"- `timesteps < 500` is an experimental feature\n",
|
| 337 |
+
"- `resamplings` and `jump_length` only pertain to the inpainting model"
|
| 338 |
+
],
|
| 339 |
+
"metadata": {
|
| 340 |
+
"id": "eYgXjygkeG14"
|
| 341 |
+
}
|
| 342 |
+
},
|
| 343 |
+
{
|
| 344 |
+
"cell_type": "code",
|
| 345 |
+
"source": [
|
| 346 |
+
"#@markdown ## Sampling\n",
|
| 347 |
+
"n_samples = 10 #@param {type:\"slider\", min:1, max:100, step:1}\n",
|
| 348 |
+
"ligand_nodes = 20 #@param {type:\"integer\"}\n",
|
| 349 |
+
"\n",
|
| 350 |
+
"model = \"Conditional model (Binding MOAD)\" #@param [\"Conditional model (Binding MOAD)\", \"Inpainting model (Binding MOAD)\"]\n",
|
| 351 |
+
"checkpoint = Path('/content', 'DiffSBDD', 'checkpoints', 'moad_fullatom_cond.ckpt') if model == \"Conditional model (Binding MOAD)\" else Path('DiffSBDD', 'checkpoints', 'moad_fullatom_joint.ckpt')\n",
|
| 352 |
+
"\n",
|
| 353 |
+
"timesteps = 100 #@param {type:\"slider\", min:1, max:500, step:1}\n",
|
| 354 |
+
"\n",
|
| 355 |
+
"#@markdown ## Inpainting parameters\n",
|
| 356 |
+
"resamplings = 1 #@param {type:\"integer\"}\n",
|
| 357 |
+
"jump_length = 1 #@param {type:\"integer\"}\n",
|
| 358 |
+
"\n",
|
| 359 |
+
"#@markdown ## Post-processing\n",
|
| 360 |
+
"keep_all_fragments = False #@param {type:\"boolean\"}\n",
|
| 361 |
+
"keep_all_fragments = \"--all_frags\" if keep_all_fragments else \"\"\n",
|
| 362 |
+
"sanitize = True #@param {type:\"boolean\"}\n",
|
| 363 |
+
"sanitize = \"--sanitize\" if sanitize else \"\"\n",
|
| 364 |
+
"relax = True #@param {type:\"boolean\"}\n",
|
| 365 |
+
"relax = \"--relax\" if relax else \"\""
|
| 366 |
+
],
|
| 367 |
+
"metadata": {
|
| 368 |
+
"id": "VQ6xa7EPMtyI",
|
| 369 |
+
"cellView": "form"
|
| 370 |
+
},
|
| 371 |
+
"execution_count": 4,
|
| 372 |
+
"outputs": []
|
| 373 |
+
},
|
| 374 |
+
{
|
| 375 |
+
"cell_type": "code",
|
| 376 |
+
"source": [
|
| 377 |
+
"#@title Run sampling (this will take a few minutes; runtime depends on the input parameters `n_samples`, `timesteps` etc.)\n",
|
| 378 |
+
"%%capture\n",
|
| 379 |
+
"%cd /content/DiffSBDD\n",
|
| 380 |
+
"\n",
|
| 381 |
+
"import argparse\n",
|
| 382 |
+
"from pathlib import Path\n",
|
| 383 |
+
"import torch\n",
|
| 384 |
+
"import utils\n",
|
| 385 |
+
"from lightning_modules import LigandPocketDDPM\n",
|
| 386 |
+
"\n",
|
| 387 |
+
"\n",
|
| 388 |
+
"pdb_id = Path(pdbfile).stem\n",
|
| 389 |
+
"pocket = w.value\n",
|
| 390 |
+
"\n",
|
| 391 |
+
"device = 'cuda' if torch.cuda.is_available() else 'cpu'\n",
|
| 392 |
+
"\n",
|
| 393 |
+
"# Load model\n",
|
| 394 |
+
"model = LigandPocketDDPM.load_from_checkpoint(checkpoint, map_location=device)\n",
|
| 395 |
+
"model = model.to(device)\n",
|
| 396 |
+
"\n",
|
| 397 |
+
"num_nodes_lig = torch.ones(n_samples, dtype=int) * ligand_nodes\n",
|
| 398 |
+
"\n",
|
| 399 |
+
"if pocket_flag == '--ref_ligand':\n",
|
| 400 |
+
" resi_list = None\n",
|
| 401 |
+
" ref_ligand = pocket\n",
|
| 402 |
+
"else:\n",
|
| 403 |
+
" resi_list = pocket.split()\n",
|
| 404 |
+
" ref_ligand = None\n",
|
| 405 |
+
"\n",
|
| 406 |
+
"molecules = model.generate_ligands(\n",
|
| 407 |
+
" pdbfile, n_samples, resi_list, ref_ligand,\n",
|
| 408 |
+
" num_nodes_lig, (sanitize == '--sanitize'),\n",
|
| 409 |
+
" largest_frag=not (keep_all_fragments == \"--all_frags\"),\n",
|
| 410 |
+
" relax_iter=(200 if (relax == \"--relax\") else 0),\n",
|
| 411 |
+
" resamplings=resamplings, jump_length=jump_length,\n",
|
| 412 |
+
" timesteps=timesteps\n",
|
| 413 |
+
")\n",
|
| 414 |
+
"\n",
|
| 415 |
+
"# Make SDF files\n",
|
| 416 |
+
"utils.write_sdf_file(Path(output_dir, f'{pdb_id}_mol.sdf'), molecules)"
|
| 417 |
+
],
|
| 418 |
+
"metadata": {
|
| 419 |
+
"id": "pWitBCDUoRBw",
|
| 420 |
+
"cellView": "form"
|
| 421 |
+
},
|
| 422 |
+
"execution_count": 5,
|
| 423 |
+
"outputs": []
|
| 424 |
+
},
|
| 425 |
+
{
|
| 426 |
+
"cell_type": "code",
|
| 427 |
+
"source": [
|
| 428 |
+
"#@title Show generated molecules\n",
|
| 429 |
+
"\n",
|
| 430 |
+
"import sys\n",
|
| 431 |
+
"sys.path.append(\"/usr/local/lib/python3.9/site-packages\")\n",
|
| 432 |
+
"import py3Dmol\n",
|
| 433 |
+
"\n",
|
| 434 |
+
"view = py3Dmol.view(js='https://3dmol.org/build/3Dmol.js',)\n",
|
| 435 |
+
"view.addModel(open(pdbfile, 'r').read(), 'pdb')\n",
|
| 436 |
+
"view.setStyle({'model': -1}, {'cartoon': {'color': 'lime'}})\n",
|
| 437 |
+
"# view.addSurface(py3Dmol.VDW, {'opacity': 0.4, 'color': 'lime'})\n",
|
| 438 |
+
"view.addModelsAsFrames(open(Path(output_dir, f\"{pdbfile.stem}_mol.sdf\"), 'r').read())\n",
|
| 439 |
+
"view.setStyle({'model': -1}, {'stick': {}})\n",
|
| 440 |
+
"view.zoomTo({'model': -1})\n",
|
| 441 |
+
"view.zoom(0.5)\n",
|
| 442 |
+
"if target == \"example (3rfm)\":\n",
|
| 443 |
+
" view.rotate(90, 'y')\n",
|
| 444 |
+
"view.animate({'loop': \"forward\", 'interval': 1000})\n",
|
| 445 |
+
"view.show()"
|
| 446 |
+
],
|
| 447 |
+
"metadata": {
|
| 448 |
+
"cellView": "form",
|
| 449 |
+
"id": "lVyysoc0Rp_-"
|
| 450 |
+
},
|
| 451 |
+
"execution_count": null,
|
| 452 |
+
"outputs": []
|
| 453 |
+
},
|
| 454 |
+
{
|
| 455 |
+
"cell_type": "code",
|
| 456 |
+
"source": [
|
| 457 |
+
"#@title Download .sdf file\n",
|
| 458 |
+
"files.download(Path(output_dir, f\"{pdbfile.stem}_mol.sdf\"))"
|
| 459 |
+
],
|
| 460 |
+
"metadata": {
|
| 461 |
+
"cellView": "form",
|
| 462 |
+
"id": "3lQUv8rmQRd_"
|
| 463 |
+
},
|
| 464 |
+
"execution_count": null,
|
| 465 |
+
"outputs": []
|
| 466 |
+
}
|
| 467 |
+
]
|
| 468 |
+
}
|
configs/crossdock_ca_cond.yml
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
run_name: 'SE3-cond-CA'
|
| 2 |
+
logdir: '/path/to/logdir'
|
| 3 |
+
wandb_params:
|
| 4 |
+
mode: 'online' # disabled, offline, online
|
| 5 |
+
entity: 'my_username'
|
| 6 |
+
dataset: 'crossdock'
|
| 7 |
+
datadir: '/path/to/processed_crossdock_noH_ca_only'
|
| 8 |
+
enable_progress_bar: True
|
| 9 |
+
num_sanity_val_steps: 0
|
| 10 |
+
|
| 11 |
+
mode: 'pocket_conditioning' # joint, pocket_conditioning
|
| 12 |
+
pocket_representation: 'CA' # CA, full-atom
|
| 13 |
+
batch_size: 96
|
| 14 |
+
lr: 1.0e-3
|
| 15 |
+
noise_factor: 1.0
|
| 16 |
+
n_epochs: 1000
|
| 17 |
+
num_workers: 0
|
| 18 |
+
gpus: 4
|
| 19 |
+
clip_grad: True
|
| 20 |
+
augment_rotation: False
|
| 21 |
+
augment_noise: 0
|
| 22 |
+
accumulate_grad_batches: 1
|
| 23 |
+
|
| 24 |
+
auxiliary_loss: False
|
| 25 |
+
loss_params:
|
| 26 |
+
max_weight: 0.001
|
| 27 |
+
schedule: 'linear'
|
| 28 |
+
clamp_lj: 3.0
|
| 29 |
+
|
| 30 |
+
egnn_params:
|
| 31 |
+
device: 'cuda'
|
| 32 |
+
edge_cutoff_ligand: null
|
| 33 |
+
edge_cutoff_pocket: 5.0
|
| 34 |
+
edge_cutoff_interaction: 5.0
|
| 35 |
+
reflection_equivariant: False
|
| 36 |
+
joint_nf: 128
|
| 37 |
+
hidden_nf: 256
|
| 38 |
+
n_layers: 6
|
| 39 |
+
attention: True
|
| 40 |
+
tanh: True
|
| 41 |
+
norm_constant: 1
|
| 42 |
+
inv_sublayers: 1
|
| 43 |
+
sin_embedding: False
|
| 44 |
+
aggregation_method: 'sum'
|
| 45 |
+
normalization_factor: 100 #1 # used if aggregation_method='sum'
|
| 46 |
+
|
| 47 |
+
diffusion_params:
|
| 48 |
+
diffusion_steps: 500
|
| 49 |
+
diffusion_noise_schedule: 'polynomial_2' # learned, cosine
|
| 50 |
+
diffusion_noise_precision: 5.0e-4
|
| 51 |
+
diffusion_loss_type: 'l2' # vlb, l2
|
| 52 |
+
normalize_factors: [1, 1] #[10, 4] # [x, h]
|
| 53 |
+
|
| 54 |
+
eval_epochs: 50
|
| 55 |
+
visualize_sample_epoch: 50
|
| 56 |
+
visualize_chain_epoch: 50
|
| 57 |
+
eval_params:
|
| 58 |
+
n_eval_samples: 100
|
| 59 |
+
eval_batch_size: 100
|
| 60 |
+
smiles_file: '/path/to/train_smiles.npy'
|
| 61 |
+
n_visualize_samples: 5
|
| 62 |
+
keep_frames: 100
|
configs/crossdock_ca_joint.yml
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
run_name: 'SE3-inpaint-CA'
|
| 2 |
+
logdir: '/path/to/logdir'
|
| 3 |
+
wandb_params:
|
| 4 |
+
mode: 'online' # disabled, offline, online
|
| 5 |
+
entity: 'my_username'
|
| 6 |
+
dataset: 'crossdock'
|
| 7 |
+
datadir: '/path/to/processed_crossdock_noH_ca_only'
|
| 8 |
+
enable_progress_bar: True
|
| 9 |
+
num_sanity_val_steps: 0
|
| 10 |
+
|
| 11 |
+
mode: 'joint' # joint, pocket_conditioning
|
| 12 |
+
pocket_representation: 'CA' # CA, full-atom
|
| 13 |
+
batch_size: 96
|
| 14 |
+
lr: 1.0e-3
|
| 15 |
+
noise_factor: 1.0
|
| 16 |
+
n_epochs: 1000
|
| 17 |
+
num_workers: 0
|
| 18 |
+
gpus: 4
|
| 19 |
+
clip_grad: True
|
| 20 |
+
augment_rotation: False
|
| 21 |
+
augment_noise: 0
|
| 22 |
+
accumulate_grad_batches: 1
|
| 23 |
+
|
| 24 |
+
auxiliary_loss: False
|
| 25 |
+
loss_params:
|
| 26 |
+
max_weight: 0.001
|
| 27 |
+
schedule: 'linear'
|
| 28 |
+
clamp_lj: 3.0
|
| 29 |
+
|
| 30 |
+
egnn_params:
|
| 31 |
+
device: 'cuda'
|
| 32 |
+
edge_cutoff_ligand: null
|
| 33 |
+
edge_cutoff_pocket: 5.0
|
| 34 |
+
edge_cutoff_interaction: 5.0
|
| 35 |
+
reflection_equivariant: False
|
| 36 |
+
joint_nf: 128
|
| 37 |
+
hidden_nf: 256
|
| 38 |
+
n_layers: 6
|
| 39 |
+
attention: True
|
| 40 |
+
tanh: True
|
| 41 |
+
norm_constant: 1
|
| 42 |
+
inv_sublayers: 1
|
| 43 |
+
sin_embedding: False
|
| 44 |
+
aggregation_method: 'sum'
|
| 45 |
+
normalization_factor: 100 #1 # used if aggregation_method='sum'
|
| 46 |
+
|
| 47 |
+
diffusion_params:
|
| 48 |
+
diffusion_steps: 500
|
| 49 |
+
diffusion_noise_schedule: 'polynomial_2' # learned, cosine
|
| 50 |
+
diffusion_noise_precision: 5.0e-4
|
| 51 |
+
diffusion_loss_type: 'l2' # vlb, l2
|
| 52 |
+
normalize_factors: [1, 1] #[10, 4] # [x, h]
|
| 53 |
+
|
| 54 |
+
eval_epochs: 50
|
| 55 |
+
visualize_sample_epoch: 50
|
| 56 |
+
visualize_chain_epoch: 50
|
| 57 |
+
eval_params:
|
| 58 |
+
n_eval_samples: 100
|
| 59 |
+
eval_batch_size: 100
|
| 60 |
+
smiles_file: '/path/to/train_smiles.npy'
|
| 61 |
+
n_visualize_samples: 5
|
| 62 |
+
keep_frames: 100
|
configs/crossdock_fullatom_cond.yml
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
run_name: 'SE3-cond-full'
|
| 2 |
+
logdir: '/path/to/logdir'
|
| 3 |
+
wandb_params:
|
| 4 |
+
mode: 'online' # disabled, offline, online
|
| 5 |
+
entity: 'my_username'
|
| 6 |
+
dataset: 'crossdock'
|
| 7 |
+
datadir: '/path/to/processed_crossdock_noH_full'
|
| 8 |
+
enable_progress_bar: True
|
| 9 |
+
num_sanity_val_steps: 0
|
| 10 |
+
|
| 11 |
+
mode: 'pocket_conditioning' # joint, pocket_conditioning
|
| 12 |
+
pocket_representation: 'full-atom' # CA, full-atom
|
| 13 |
+
batch_size: 16
|
| 14 |
+
lr: 1.0e-3
|
| 15 |
+
noise_factor: 1.0
|
| 16 |
+
n_epochs: 1000
|
| 17 |
+
num_workers: 0
|
| 18 |
+
gpus: 4
|
| 19 |
+
clip_grad: True
|
| 20 |
+
augment_rotation: False
|
| 21 |
+
augment_noise: 0
|
| 22 |
+
accumulate_grad_batches: 1
|
| 23 |
+
|
| 24 |
+
auxiliary_loss: False
|
| 25 |
+
loss_params:
|
| 26 |
+
max_weight: 0.001
|
| 27 |
+
schedule: 'linear'
|
| 28 |
+
clamp_lj: 3.0
|
| 29 |
+
|
| 30 |
+
egnn_params:
|
| 31 |
+
device: 'cuda'
|
| 32 |
+
edge_cutoff_ligand: null
|
| 33 |
+
edge_cutoff_pocket: 5.0
|
| 34 |
+
edge_cutoff_interaction: 5.0
|
| 35 |
+
reflection_equivariant: False
|
| 36 |
+
joint_nf: 128
|
| 37 |
+
hidden_nf: 256
|
| 38 |
+
n_layers: 6
|
| 39 |
+
attention: True
|
| 40 |
+
tanh: True
|
| 41 |
+
norm_constant: 1
|
| 42 |
+
inv_sublayers: 1
|
| 43 |
+
sin_embedding: False
|
| 44 |
+
aggregation_method: 'sum'
|
| 45 |
+
normalization_factor: 100 #1 # used if aggregation_method='sum'
|
| 46 |
+
|
| 47 |
+
diffusion_params:
|
| 48 |
+
diffusion_steps: 500
|
| 49 |
+
diffusion_noise_schedule: 'polynomial_2' # learned, cosine
|
| 50 |
+
diffusion_noise_precision: 5.0e-4
|
| 51 |
+
diffusion_loss_type: 'l2' # vlb, l2
|
| 52 |
+
normalize_factors: [1, 4] #[10, 4] # [x, h]
|
| 53 |
+
|
| 54 |
+
eval_epochs: 50
|
| 55 |
+
visualize_sample_epoch: 50
|
| 56 |
+
visualize_chain_epoch: 50
|
| 57 |
+
eval_params:
|
| 58 |
+
n_eval_samples: 100
|
| 59 |
+
eval_batch_size: 100
|
| 60 |
+
smiles_file: '/path/to/train_smiles.npy'
|
| 61 |
+
n_visualize_samples: 5
|
| 62 |
+
keep_frames: 100
|
configs/crossdock_fullatom_joint.yml
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
run_name: 'SE3-inpaint-full'
|
| 2 |
+
logdir: '/path/to/logdir'
|
| 3 |
+
wandb_params:
|
| 4 |
+
mode: 'online' # disabled, offline, online
|
| 5 |
+
entity: 'my_username'
|
| 6 |
+
dataset: 'crossdock'
|
| 7 |
+
datadir: '/path/to/processed_crossdock_noH_full'
|
| 8 |
+
enable_progress_bar: True
|
| 9 |
+
num_sanity_val_steps: 0
|
| 10 |
+
|
| 11 |
+
mode: 'joint' # joint, pocket_conditioning
|
| 12 |
+
pocket_representation: 'full-atom' # CA, full-atom
|
| 13 |
+
batch_size: 8
|
| 14 |
+
lr: 1.0e-3
|
| 15 |
+
noise_factor: 1.0
|
| 16 |
+
n_epochs: 1000
|
| 17 |
+
num_workers: 0
|
| 18 |
+
gpus: 4
|
| 19 |
+
clip_grad: True
|
| 20 |
+
augment_rotation: False
|
| 21 |
+
augment_noise: 0
|
| 22 |
+
accumulate_grad_batches: 4
|
| 23 |
+
|
| 24 |
+
auxiliary_loss: False
|
| 25 |
+
loss_params:
|
| 26 |
+
max_weight: 0.001
|
| 27 |
+
schedule: 'linear'
|
| 28 |
+
clamp_lj: 3.0
|
| 29 |
+
|
| 30 |
+
egnn_params:
|
| 31 |
+
device: 'cuda'
|
| 32 |
+
edge_cutoff_ligand: null
|
| 33 |
+
edge_cutoff_pocket: 5.0
|
| 34 |
+
edge_cutoff_interaction: 5.0
|
| 35 |
+
reflection_equivariant: False
|
| 36 |
+
joint_nf: 32
|
| 37 |
+
hidden_nf: 128
|
| 38 |
+
n_layers: 5
|
| 39 |
+
attention: True
|
| 40 |
+
tanh: True
|
| 41 |
+
norm_constant: 1
|
| 42 |
+
inv_sublayers: 1
|
| 43 |
+
sin_embedding: False
|
| 44 |
+
aggregation_method: 'sum'
|
| 45 |
+
normalization_factor: 100 #1 # used if aggregation_method='sum'
|
| 46 |
+
|
| 47 |
+
diffusion_params:
|
| 48 |
+
diffusion_steps: 500
|
| 49 |
+
diffusion_noise_schedule: 'polynomial_2' # learned, cosine
|
| 50 |
+
diffusion_noise_precision: 5.0e-4
|
| 51 |
+
diffusion_loss_type: 'l2' # vlb, l2
|
| 52 |
+
normalize_factors: [1, 4] #[10, 4] # [x, h]
|
| 53 |
+
|
| 54 |
+
eval_epochs: 50
|
| 55 |
+
visualize_sample_epoch: 50
|
| 56 |
+
visualize_chain_epoch: 50
|
| 57 |
+
eval_params:
|
| 58 |
+
n_eval_samples: 100
|
| 59 |
+
eval_batch_size: 100
|
| 60 |
+
smiles_file: '/path/to/train_smiles.npy'
|
| 61 |
+
n_visualize_samples: 5
|
| 62 |
+
keep_frames: 100
|
configs/moad_ca_cond.yml
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
run_name: 'SE3-cond-CA'
|
| 2 |
+
logdir: '/path/to/logdir'
|
| 3 |
+
wandb_params:
|
| 4 |
+
mode: 'online' # disabled, offline, online
|
| 5 |
+
entity: 'my_username'
|
| 6 |
+
group: 'bindingmoad'
|
| 7 |
+
dataset: 'bindingmoad'
|
| 8 |
+
datadir: '/path/to/processed_noH_ca/'
|
| 9 |
+
enable_progress_bar: False
|
| 10 |
+
num_sanity_val_steps: 0
|
| 11 |
+
|
| 12 |
+
mode: 'pocket_conditioning'
|
| 13 |
+
pocket_representation: 'CA'
|
| 14 |
+
virtual_nodes: False
|
| 15 |
+
batch_size: 64
|
| 16 |
+
lr: 5.0e-4
|
| 17 |
+
n_epochs: 1000
|
| 18 |
+
num_workers: 2
|
| 19 |
+
gpus: 1
|
| 20 |
+
clip_grad: True
|
| 21 |
+
augment_rotation: False
|
| 22 |
+
augment_noise: 0
|
| 23 |
+
|
| 24 |
+
auxiliary_loss: False
|
| 25 |
+
loss_params:
|
| 26 |
+
max_weight: 0.001
|
| 27 |
+
schedule: 'linear'
|
| 28 |
+
clamp_lj: 3.0
|
| 29 |
+
|
| 30 |
+
egnn_params:
|
| 31 |
+
device: 'cuda'
|
| 32 |
+
edge_cutoff_ligand: null
|
| 33 |
+
edge_cutoff_pocket: 8.0
|
| 34 |
+
edge_cutoff_interaction: 8.0
|
| 35 |
+
reflection_equivariant: False
|
| 36 |
+
edge_embedding_dim: null
|
| 37 |
+
joint_nf: 32
|
| 38 |
+
hidden_nf: 128
|
| 39 |
+
n_layers: 5
|
| 40 |
+
attention: True
|
| 41 |
+
tanh: True
|
| 42 |
+
norm_constant: 1
|
| 43 |
+
inv_sublayers: 1
|
| 44 |
+
sin_embedding: False
|
| 45 |
+
aggregation_method: 'sum'
|
| 46 |
+
normalization_factor: 100 # used if aggregation_method='sum'
|
| 47 |
+
|
| 48 |
+
diffusion_params:
|
| 49 |
+
diffusion_steps: 500
|
| 50 |
+
diffusion_noise_schedule: 'polynomial_2' # learned, cosine
|
| 51 |
+
diffusion_noise_precision: 1.0e-5
|
| 52 |
+
diffusion_loss_type: 'l2' # vlb, l2
|
| 53 |
+
normalize_factors: [1, 4] # [x, h]
|
| 54 |
+
|
| 55 |
+
eval_epochs: 25
|
| 56 |
+
visualize_sample_epoch: 25
|
| 57 |
+
visualize_chain_epoch: 25
|
| 58 |
+
eval_params:
|
| 59 |
+
n_eval_samples: 100
|
| 60 |
+
smiles_file: '/path/to/train_smiles.npy'
|
| 61 |
+
n_visualize_samples: 5
|
| 62 |
+
keep_frames: 100
|
configs/moad_ca_joint.yml
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
run_name: 'SE3-joint-CA'
|
| 2 |
+
logdir: '/path/to/logdir'
|
| 3 |
+
wandb_params:
|
| 4 |
+
mode: 'online' # disabled, offline, online
|
| 5 |
+
entity: 'my_username'
|
| 6 |
+
group: 'bindingmoad'
|
| 7 |
+
dataset: 'bindingmoad'
|
| 8 |
+
datadir: '/path/to/processed_noH_ca/'
|
| 9 |
+
enable_progress_bar: False
|
| 10 |
+
num_sanity_val_steps: 0
|
| 11 |
+
|
| 12 |
+
mode: 'joint'
|
| 13 |
+
pocket_representation: 'CA'
|
| 14 |
+
virtual_nodes: False
|
| 15 |
+
batch_size: 64
|
| 16 |
+
lr: 5.0e-4
|
| 17 |
+
n_epochs: 1000
|
| 18 |
+
num_workers: 2
|
| 19 |
+
gpus: 1
|
| 20 |
+
clip_grad: True
|
| 21 |
+
augment_rotation: False
|
| 22 |
+
augment_noise: 0
|
| 23 |
+
|
| 24 |
+
auxiliary_loss: False
|
| 25 |
+
loss_params:
|
| 26 |
+
max_weight: 0.001
|
| 27 |
+
schedule: 'linear'
|
| 28 |
+
clamp_lj: 3.0
|
| 29 |
+
|
| 30 |
+
egnn_params:
|
| 31 |
+
device: 'cuda'
|
| 32 |
+
edge_cutoff_ligand: null
|
| 33 |
+
edge_cutoff_pocket: 8.0
|
| 34 |
+
edge_cutoff_interaction: 8.0
|
| 35 |
+
reflection_equivariant: False
|
| 36 |
+
edge_embedding_dim: null
|
| 37 |
+
joint_nf: 32
|
| 38 |
+
hidden_nf: 128
|
| 39 |
+
n_layers: 5
|
| 40 |
+
attention: True
|
| 41 |
+
tanh: True
|
| 42 |
+
norm_constant: 1
|
| 43 |
+
inv_sublayers: 1
|
| 44 |
+
sin_embedding: False
|
| 45 |
+
aggregation_method: 'sum'
|
| 46 |
+
normalization_factor: 100 # used if aggregation_method='sum'
|
| 47 |
+
|
| 48 |
+
diffusion_params:
|
| 49 |
+
diffusion_steps: 500
|
| 50 |
+
diffusion_noise_schedule: 'polynomial_2' # learned, cosine
|
| 51 |
+
diffusion_noise_precision: 1.0e-5
|
| 52 |
+
diffusion_loss_type: 'l2' # vlb, l2
|
| 53 |
+
normalize_factors: [1, 4] # [x, h]
|
| 54 |
+
|
| 55 |
+
eval_epochs: 25
|
| 56 |
+
visualize_sample_epoch: 25
|
| 57 |
+
visualize_chain_epoch: 25
|
| 58 |
+
eval_params:
|
| 59 |
+
n_eval_samples: 100
|
| 60 |
+
smiles_file: '/path/to/train_smiles.npy'
|
| 61 |
+
n_visualize_samples: 5
|
| 62 |
+
keep_frames: 100
|
configs/moad_fullatom_cond.yml
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
run_name: 'SE3-cond-fullAtom'
|
| 2 |
+
logdir: '/path/to/logdir'
|
| 3 |
+
wandb_params:
|
| 4 |
+
mode: 'online' # disabled, offline, online
|
| 5 |
+
entity: 'my_username'
|
| 6 |
+
group: 'bindingmoad'
|
| 7 |
+
dataset: 'bindingmoad'
|
| 8 |
+
datadir: '/path/to/processed_noH_full/'
|
| 9 |
+
enable_progress_bar: False
|
| 10 |
+
num_sanity_val_steps: 0
|
| 11 |
+
|
| 12 |
+
mode: 'pocket_conditioning'
|
| 13 |
+
pocket_representation: 'full-atom'
|
| 14 |
+
virtual_nodes: False
|
| 15 |
+
batch_size: 32
|
| 16 |
+
lr: 5.0e-4
|
| 17 |
+
n_epochs: 1000
|
| 18 |
+
num_workers: 2
|
| 19 |
+
gpus: 2
|
| 20 |
+
clip_grad: True
|
| 21 |
+
augment_rotation: False
|
| 22 |
+
augment_noise: 0
|
| 23 |
+
|
| 24 |
+
auxiliary_loss: False
|
| 25 |
+
loss_params:
|
| 26 |
+
max_weight: 0.001
|
| 27 |
+
schedule: 'linear'
|
| 28 |
+
clamp_lj: 3.0
|
| 29 |
+
|
| 30 |
+
egnn_params:
|
| 31 |
+
device: 'cuda'
|
| 32 |
+
edge_cutoff_ligand: null
|
| 33 |
+
edge_cutoff_pocket: 4.0
|
| 34 |
+
edge_cutoff_interaction: 7.0
|
| 35 |
+
reflection_equivariant: False
|
| 36 |
+
edge_embedding_dim: 8
|
| 37 |
+
joint_nf: 128
|
| 38 |
+
hidden_nf: 192
|
| 39 |
+
n_layers: 6
|
| 40 |
+
attention: True
|
| 41 |
+
tanh: True
|
| 42 |
+
norm_constant: 1
|
| 43 |
+
inv_sublayers: 1
|
| 44 |
+
sin_embedding: False
|
| 45 |
+
aggregation_method: 'sum'
|
| 46 |
+
normalization_factor: 100 # used if aggregation_method='sum'
|
| 47 |
+
|
| 48 |
+
diffusion_params:
|
| 49 |
+
diffusion_steps: 500
|
| 50 |
+
diffusion_noise_schedule: 'polynomial_2' # learned, cosine
|
| 51 |
+
diffusion_noise_precision: 1.0e-5
|
| 52 |
+
diffusion_loss_type: 'l2' # vlb, l2
|
| 53 |
+
normalize_factors: [1, 4] # [x, h]
|
| 54 |
+
|
| 55 |
+
eval_epochs: 25
|
| 56 |
+
visualize_sample_epoch: 25
|
| 57 |
+
visualize_chain_epoch: 25
|
| 58 |
+
eval_params:
|
| 59 |
+
n_eval_samples: 100
|
| 60 |
+
eval_batch_size: 50
|
| 61 |
+
smiles_file: '/path/to/train_smiles.npy'
|
| 62 |
+
n_visualize_samples: 5
|
| 63 |
+
keep_frames: 100
|
configs/moad_fullatom_joint.yml
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
run_name: 'SE3-joint-fullAtom'
|
| 2 |
+
logdir: '/path/to/logdir'
|
| 3 |
+
wandb_params:
|
| 4 |
+
mode: 'online' # disabled, offline, online
|
| 5 |
+
entity: 'my_username'
|
| 6 |
+
group: 'bindingmoad'
|
| 7 |
+
dataset: 'bindingmoad'
|
| 8 |
+
datadir: '/path/to//processed_noH_full/'
|
| 9 |
+
enable_progress_bar: False
|
| 10 |
+
num_sanity_val_steps: 0
|
| 11 |
+
|
| 12 |
+
mode: 'joint'
|
| 13 |
+
pocket_representation: 'full-atom'
|
| 14 |
+
virtual_nodes: False
|
| 15 |
+
batch_size: 16
|
| 16 |
+
lr: 5.0e-4
|
| 17 |
+
n_epochs: 1000
|
| 18 |
+
num_workers: 2
|
| 19 |
+
gpus: 2
|
| 20 |
+
clip_grad: True
|
| 21 |
+
augment_rotation: False
|
| 22 |
+
augment_noise: 0
|
| 23 |
+
|
| 24 |
+
auxiliary_loss: False
|
| 25 |
+
loss_params:
|
| 26 |
+
max_weight: 0.001
|
| 27 |
+
schedule: 'linear'
|
| 28 |
+
clamp_lj: 3.0
|
| 29 |
+
|
| 30 |
+
egnn_params:
|
| 31 |
+
device: 'cuda'
|
| 32 |
+
edge_cutoff_ligand: null
|
| 33 |
+
edge_cutoff_pocket: 0.8 # = 4.0 / 5.0
|
| 34 |
+
edge_cutoff_interaction: 1.4 # = 7.0 / 5.0
|
| 35 |
+
reflection_equivariant: False
|
| 36 |
+
edge_embedding_dim: 8
|
| 37 |
+
joint_nf: 128
|
| 38 |
+
hidden_nf: 192
|
| 39 |
+
n_layers: 6
|
| 40 |
+
attention: True
|
| 41 |
+
tanh: True
|
| 42 |
+
norm_constant: 1
|
| 43 |
+
inv_sublayers: 1
|
| 44 |
+
sin_embedding: False
|
| 45 |
+
aggregation_method: 'sum'
|
| 46 |
+
normalization_factor: 100 # used if aggregation_method='sum'
|
| 47 |
+
|
| 48 |
+
diffusion_params:
|
| 49 |
+
diffusion_steps: 500
|
| 50 |
+
diffusion_noise_schedule: 'polynomial_2' # learned, cosine
|
| 51 |
+
diffusion_noise_precision: 1.0e-5
|
| 52 |
+
diffusion_loss_type: 'l2' # vlb, l2
|
| 53 |
+
normalize_factors: [5, 5] # [x, h]
|
| 54 |
+
|
| 55 |
+
eval_epochs: 25
|
| 56 |
+
visualize_sample_epoch: 25
|
| 57 |
+
visualize_chain_epoch: 25
|
| 58 |
+
eval_params:
|
| 59 |
+
n_eval_samples: 100
|
| 60 |
+
eval_batch_size: 50
|
| 61 |
+
smiles_file: '/path/to/train_smiles.npy'
|
| 62 |
+
n_visualize_samples: 5
|
| 63 |
+
keep_frames: 100
|
constants.py
ADDED
|
@@ -0,0 +1,183 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
from rdkit import Chem
|
| 3 |
+
import torch
|
| 4 |
+
|
| 5 |
+
# ------------------------------------------------------------------------------
|
| 6 |
+
# Computational
|
| 7 |
+
# ------------------------------------------------------------------------------
|
| 8 |
+
FLOAT_TYPE = torch.float32
|
| 9 |
+
INT_TYPE = torch.int64
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
# ------------------------------------------------------------------------------
|
| 13 |
+
# Bond parameters
|
| 14 |
+
# ------------------------------------------------------------------------------
|
| 15 |
+
|
| 16 |
+
# margin1, margin2, margin3 = 10, 5, 3
|
| 17 |
+
margin1, margin2, margin3 = 3, 2, 1
|
| 18 |
+
|
| 19 |
+
allowed_bonds = {'H': 1, 'C': 4, 'N': 3, 'O': 2, 'F': 1, 'B': 3, 'Al': 3,
|
| 20 |
+
'Si': 4, 'P': [3, 5],
|
| 21 |
+
'S': 4, 'Cl': 1, 'As': 3, 'Br': 1, 'I': 1, 'Hg': [1, 2],
|
| 22 |
+
'Bi': [3, 5]}
|
| 23 |
+
|
| 24 |
+
# Bond lengths from:
|
| 25 |
+
# http://www.wiredchemist.com/chemistry/data/bond_energies_lengths.html
|
| 26 |
+
# And:
|
| 27 |
+
# http://chemistry-reference.com/tables/Bond%20Lengths%20and%20Enthalpies.pdf
|
| 28 |
+
bonds1 = {'H': {'H': 74, 'C': 109, 'N': 101, 'O': 96, 'F': 92,
|
| 29 |
+
'B': 119, 'Si': 148, 'P': 144, 'As': 152, 'S': 134,
|
| 30 |
+
'Cl': 127, 'Br': 141, 'I': 161},
|
| 31 |
+
'C': {'H': 109, 'C': 154, 'N': 147, 'O': 143, 'F': 135,
|
| 32 |
+
'Si': 185, 'P': 184, 'S': 182, 'Cl': 177, 'Br': 194,
|
| 33 |
+
'I': 214},
|
| 34 |
+
'N': {'H': 101, 'C': 147, 'N': 145, 'O': 140, 'F': 136,
|
| 35 |
+
'Cl': 175, 'Br': 214, 'S': 168, 'I': 222, 'P': 177},
|
| 36 |
+
'O': {'H': 96, 'C': 143, 'N': 140, 'O': 148, 'F': 142,
|
| 37 |
+
'Br': 172, 'S': 151, 'P': 163, 'Si': 163, 'Cl': 164,
|
| 38 |
+
'I': 194},
|
| 39 |
+
'F': {'H': 92, 'C': 135, 'N': 136, 'O': 142, 'F': 142,
|
| 40 |
+
'S': 158, 'Si': 160, 'Cl': 166, 'Br': 178, 'P': 156,
|
| 41 |
+
'I': 187},
|
| 42 |
+
'B': {'H': 119, 'Cl': 175},
|
| 43 |
+
'Si': {'Si': 233, 'H': 148, 'C': 185, 'O': 163, 'S': 200,
|
| 44 |
+
'F': 160, 'Cl': 202, 'Br': 215, 'I': 243 },
|
| 45 |
+
'Cl': {'Cl': 199, 'H': 127, 'C': 177, 'N': 175, 'O': 164,
|
| 46 |
+
'P': 203, 'S': 207, 'B': 175, 'Si': 202, 'F': 166,
|
| 47 |
+
'Br': 214},
|
| 48 |
+
'S': {'H': 134, 'C': 182, 'N': 168, 'O': 151, 'S': 204,
|
| 49 |
+
'F': 158, 'Cl': 207, 'Br': 225, 'Si': 200, 'P': 210,
|
| 50 |
+
'I': 234},
|
| 51 |
+
'Br': {'Br': 228, 'H': 141, 'C': 194, 'O': 172, 'N': 214,
|
| 52 |
+
'Si': 215, 'S': 225, 'F': 178, 'Cl': 214, 'P': 222},
|
| 53 |
+
'P': {'P': 221, 'H': 144, 'C': 184, 'O': 163, 'Cl': 203,
|
| 54 |
+
'S': 210, 'F': 156, 'N': 177, 'Br': 222},
|
| 55 |
+
'I': {'H': 161, 'C': 214, 'Si': 243, 'N': 222, 'O': 194,
|
| 56 |
+
'S': 234, 'F': 187, 'I': 266},
|
| 57 |
+
'As': {'H': 152}
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
bonds2 = {'C': {'C': 134, 'N': 129, 'O': 120, 'S': 160},
|
| 61 |
+
'N': {'C': 129, 'N': 125, 'O': 121},
|
| 62 |
+
'O': {'C': 120, 'N': 121, 'O': 121, 'P': 150},
|
| 63 |
+
'P': {'O': 150, 'S': 186},
|
| 64 |
+
'S': {'P': 186, 'C': 160}}
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
bonds3 = {'C': {'C': 120, 'N': 116, 'O': 113},
|
| 68 |
+
'N': {'C': 116, 'N': 110},
|
| 69 |
+
'O': {'C': 113}}
|
| 70 |
+
|
| 71 |
+
bond_dict = [None, Chem.rdchem.BondType.SINGLE, Chem.rdchem.BondType.DOUBLE,
|
| 72 |
+
Chem.rdchem.BondType.TRIPLE, Chem.rdchem.BondType.AROMATIC]
|
| 73 |
+
|
| 74 |
+
# https://en.wikipedia.org/wiki/Covalent_radius#Radii_for_multiple_bonds
|
| 75 |
+
# (2022/08/14)
|
| 76 |
+
covalent_radii = {'H': 32, 'C': 60, 'N': 54, 'O': 53, 'F': 53, 'B': 73,
|
| 77 |
+
'Al': 111, 'Si': 102, 'P': 94, 'S': 94, 'Cl': 93, 'As': 106,
|
| 78 |
+
'Br': 109, 'I': 125, 'Hg': 133, 'Bi': 135}
|
| 79 |
+
|
| 80 |
+
# ------------------------------------------------------------------------------
|
| 81 |
+
# Backbone geometry
|
| 82 |
+
# Taken from: Bhagavan, N. V., and C. E. Ha.
|
| 83 |
+
# "Chapter 4-Three-dimensional structure of proteins and disorders of protein misfolding."
|
| 84 |
+
# Essentials of Medical Biochemistry (2015): 31-51.
|
| 85 |
+
# https://www.sciencedirect.com/science/article/pii/B978012416687500004X
|
| 86 |
+
# ------------------------------------------------------------------------------
|
| 87 |
+
N_CA_DIST = 1.47
|
| 88 |
+
CA_C_DIST = 1.53
|
| 89 |
+
N_CA_C_ANGLE = 110 * np.pi / 180
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
# ------------------------------------------------------------------------------
|
| 93 |
+
# Dataset-specific constants
|
| 94 |
+
# ------------------------------------------------------------------------------
|
| 95 |
+
dataset_params = {}
|
| 96 |
+
dataset_params['bindingmoad'] = {
|
| 97 |
+
'atom_encoder': {'C': 0, 'N': 1, 'O': 2, 'S': 3, 'B': 4, 'Br': 5, 'Cl': 6, 'P': 7, 'I': 8, 'F': 9},
|
| 98 |
+
'atom_decoder': ['C', 'N', 'O', 'S', 'B', 'Br', 'Cl', 'P', 'I', 'F'],
|
| 99 |
+
'aa_encoder': {'A': 0, 'C': 1, 'D': 2, 'E': 3, 'F': 4, 'G': 5, 'H': 6, 'I': 7, 'K': 8, 'L': 9, 'M': 10, 'N': 11, 'P': 12, 'Q': 13, 'R': 14, 'S': 15, 'T': 16, 'V': 17, 'W': 18, 'Y': 19},
|
| 100 |
+
'aa_decoder': ['A', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'V', 'W', 'Y'],
|
| 101 |
+
# PyMOL colors, see: https://pymolwiki.org/index.php/Color_Values#Chemical_element_colours
|
| 102 |
+
'colors_dic': ['#33ff33', '#3333ff', '#ff4d4d', '#e6c540', '#ffb5b5', '#A62929', '#1FF01F', '#ff8000', '#940094', '#B3FFFF', '#b3e3f5'],
|
| 103 |
+
'radius_dic': [0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3],
|
| 104 |
+
'bonds1': [
|
| 105 |
+
[154.0, 147.0, 143.0, 182.0, 0.0, 194.0, 177.0, 184.0, 214.0, 135.0],
|
| 106 |
+
[147.0, 145.0, 140.0, 168.0, 0.0, 214.0, 175.0, 177.0, 222.0, 136.0],
|
| 107 |
+
[143.0, 140.0, 148.0, 151.0, 0.0, 172.0, 164.0, 163.0, 194.0, 142.0],
|
| 108 |
+
[182.0, 168.0, 151.0, 204.0, 0.0, 225.0, 207.0, 210.0, 234.0, 158.0],
|
| 109 |
+
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 175.0, 0.0, 0.0, 0.0],
|
| 110 |
+
[194.0, 214.0, 172.0, 225.0, 0.0, 228.0, 214.0, 222.0, 0.0, 178.0],
|
| 111 |
+
[177.0, 175.0, 164.0, 207.0, 175.0, 214.0, 199.0, 203.0, 0.0, 166.0],
|
| 112 |
+
[184.0, 177.0, 163.0, 210.0, 0.0, 222.0, 203.0, 221.0, 0.0, 156.0],
|
| 113 |
+
[214.0, 222.0, 194.0, 234.0, 0.0, 0.0, 0.0, 0.0, 266.0, 187.0],
|
| 114 |
+
[135.0, 136.0, 142.0, 158.0, 0.0, 178.0, 166.0, 156.0, 187.0, 142.0]],
|
| 115 |
+
'bonds2': [[134.0, 129.0, 120.0, 160.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 116 |
+
[129.0, 125.0, 121.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 117 |
+
[120.0, 121.0, 121.0, 0.0, 0.0, 0.0, 0.0, 150.0, 0.0, 0.0],
|
| 118 |
+
[160.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 186.0, 0.0, 0.0],
|
| 119 |
+
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 120 |
+
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 121 |
+
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 122 |
+
[0.0, 0.0, 150.0, 186.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 123 |
+
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 124 |
+
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
|
| 125 |
+
'bonds3': [[120.0, 116.0, 113.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 126 |
+
[116.0, 110.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 127 |
+
[113.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 128 |
+
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 129 |
+
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 130 |
+
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 131 |
+
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 132 |
+
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 133 |
+
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
| 134 |
+
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
|
| 135 |
+
'lennard_jones_rm': [
|
| 136 |
+
[120.0, 116.0, 113.0, 160.0, 133.0, 194.0, 177.0, 184.0, 214.0, 135.0],
|
| 137 |
+
[116.0, 110.0, 121.0, 168.0, 127.0, 214.0, 175.0, 177.0, 222.0, 136.0],
|
| 138 |
+
[113.0, 121.0, 121.0, 151.0, 126.0, 172.0, 164.0, 150.0, 194.0, 142.0],
|
| 139 |
+
[160.0, 168.0, 151.0, 204.0, 167.0, 225.0, 207.0, 186.0, 234.0, 158.0],
|
| 140 |
+
[133.0, 127.0, 126.0, 167.0, 146.0, 182.0, 175.0, 167.0, 198.0, 126.0],
|
| 141 |
+
[194.0, 214.0, 172.0, 225.0, 182.0, 228.0, 214.0, 222.0, 234.0, 178.0],
|
| 142 |
+
[177.0, 175.0, 164.0, 207.0, 175.0, 214.0, 199.0, 203.0, 218.0, 166.0],
|
| 143 |
+
[184.0, 177.0, 150.0, 186.0, 167.0, 222.0, 203.0, 221.0, 219.0, 156.0],
|
| 144 |
+
[214.0, 222.0, 194.0, 234.0, 198.0, 234.0, 218.0, 219.0, 266.0, 187.0],
|
| 145 |
+
[135.0, 136.0, 142.0, 158.0, 126.0, 178.0, 166.0, 156.0, 187.0, 142.0]],
|
| 146 |
+
'atom_hist': {'C': 545542, 'N': 90205, 'O': 132965, 'S': 9342, 'B': 109,
|
| 147 |
+
'Br': 1424, 'Cl': 5516, 'P': 5154, 'I': 445, 'F': 9742},
|
| 148 |
+
'aa_hist': {'A': 109798, 'C': 31556, 'D': 83921, 'E': 79405, 'F': 97083,
|
| 149 |
+
'G': 139319, 'H': 62661, 'I': 99008, 'K': 62403, 'L': 155105,
|
| 150 |
+
'M': 59977, 'N': 70437, 'P': 58833, 'Q': 48254, 'R': 74215,
|
| 151 |
+
'S': 103286, 'T': 90972, 'V': 119954, 'W': 42017, 'Y': 90596},
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
dataset_params['crossdock_full'] = {
|
| 155 |
+
'atom_encoder': {'C': 0, 'N': 1, 'O': 2, 'S': 3, 'B': 4, 'Br': 5, 'Cl': 6, 'P': 7, 'I': 8, 'F': 9, 'others': 10},
|
| 156 |
+
'atom_decoder': ['C', 'N', 'O', 'S', 'B', 'Br', 'Cl', 'P', 'I', 'F', 'others'],
|
| 157 |
+
'aa_encoder': {'C': 0, 'N': 1, 'O': 2, 'S': 3, 'B': 4, 'Br': 5, 'Cl': 6, 'P': 7, 'I': 8, 'F': 9, 'others': 10},
|
| 158 |
+
'aa_decoder': ['C', 'N', 'O', 'S', 'B', 'Br', 'Cl', 'P', 'I', 'F', 'others'],
|
| 159 |
+
'colors_dic': ['#33ff33', '#3333ff', '#ff4d4d', '#e6c540', '#ffb5b5', '#A62929', '#1FF01F', '#ff8000', '#940094', '#B3FFFF', '#ffb5b5'],
|
| 160 |
+
'radius_dic': [0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3],
|
| 161 |
+
'bonds1': [[154.0, 147.0, 143.0, 182.0, 0.0, 194.0, 177.0, 184.0, 214.0, 135.0, 0.0], [147.0, 145.0, 140.0, 168.0, 0.0, 214.0, 175.0, 177.0, 222.0, 136.0, 0.0], [143.0, 140.0, 148.0, 151.0, 0.0, 172.0, 164.0, 163.0, 194.0, 142.0, 0.0], [182.0, 168.0, 151.0, 204.0, 0.0, 225.0, 207.0, 210.0, 234.0, 158.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 175.0, 0.0, 0.0, 0.0, 0.0], [194.0, 214.0, 172.0, 225.0, 0.0, 228.0, 214.0, 222.0, 0.0, 178.0, 0.0], [177.0, 175.0, 164.0, 207.0, 175.0, 214.0, 199.0, 203.0, 0.0, 166.0, 0.0], [184.0, 177.0, 163.0, 210.0, 0.0, 222.0, 203.0, 221.0, 0.0, 156.0, 0.0], [214.0, 222.0, 194.0, 234.0, 0.0, 0.0, 0.0, 0.0, 266.0, 187.0, 0.0], [135.0, 136.0, 142.0, 158.0, 0.0, 178.0, 166.0, 156.0, 187.0, 142.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
|
| 162 |
+
'bonds2': [[134.0, 129.0, 120.0, 160.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [129.0, 125.0, 121.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [120.0, 121.0, 121.0, 0.0, 0.0, 0.0, 0.0, 150.0, 0.0, 0.0, 0.0], [160.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 186.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 150.0, 186.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
|
| 163 |
+
'bonds3': [[120.0, 116.0, 113.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [116.0, 110.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [113.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
|
| 164 |
+
'lennard_jones_rm': [[120.0, 116.0, 113.0, 160.0, 133.0, 194.0, 177.0, 184.0, 214.0, 135.0, 0.0], [116.0, 110.0, 121.0, 168.0, 127.0, 214.0, 175.0, 177.0, 222.0, 136.0, 0.0], [113.0, 121.0, 121.0, 151.0, 126.0, 172.0, 164.0, 150.0, 194.0, 142.0, 0.0], [160.0, 168.0, 151.0, 204.0, 167.0, 225.0, 207.0, 186.0, 234.0, 158.0, 0.0], [133.0, 127.0, 126.0, 167.0, 146.0, 182.0, 175.0, 167.0, 198.0, 126.0, 0.0], [194.0, 214.0, 172.0, 225.0, 182.0, 228.0, 214.0, 222.0, 234.0, 178.0, 0.0], [177.0, 175.0, 164.0, 207.0, 175.0, 214.0, 199.0, 203.0, 218.0, 166.0, 0.0], [184.0, 177.0, 150.0, 186.0, 167.0, 222.0, 203.0, 221.0, 219.0, 156.0, 0.0], [214.0, 222.0, 194.0, 234.0, 198.0, 234.0, 218.0, 219.0, 266.0, 187.0, 0.0], [135.0, 136.0, 142.0, 158.0, 126.0, 178.0, 166.0, 156.0, 187.0, 142.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
|
| 165 |
+
'atom_hist': {'C': 1570767, 'N': 273858, 'O': 396837, 'S': 26352, 'B': 0, 'Br': 0, 'Cl': 15058, 'P': 25994, 'I': 0, 'F': 30687, 'others': 0},
|
| 166 |
+
'aa_hist': {'C': 23302704, 'N': 6093090, 'O': 6701210, 'S': 276805, 'B': 0, 'Br': 0, 'Cl': 0, 'P': 0, 'I': 0, 'F': 0, 'others': 0},
|
| 167 |
+
}
|
| 168 |
+
|
| 169 |
+
dataset_params['crossdock'] = {
|
| 170 |
+
'atom_encoder': {'C': 0, 'N': 1, 'O': 2, 'S': 3, 'B': 4, 'Br': 5, 'Cl': 6, 'P': 7, 'I': 8, 'F': 9},
|
| 171 |
+
'atom_decoder': ['C', 'N', 'O', 'S', 'B', 'Br', 'Cl', 'P', 'I', 'F'],
|
| 172 |
+
'aa_encoder': {'A': 0, 'C': 1, 'D': 2, 'E': 3, 'F': 4, 'G': 5, 'H': 6, 'I': 7, 'K': 8, 'L': 9, 'M': 10, 'N': 11, 'P': 12, 'Q': 13, 'R': 14, 'S': 15, 'T': 16, 'V': 17, 'W': 18, 'Y': 19},
|
| 173 |
+
'aa_decoder': ['A', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'V', 'W', 'Y'],
|
| 174 |
+
# PyMOL colors, see: https://pymolwiki.org/index.php/Color_Values#Chemical_element_colours
|
| 175 |
+
'colors_dic': ['#33ff33', '#3333ff', '#ff4d4d', '#e6c540', '#ffb5b5', '#A62929', '#1FF01F', '#ff8000', '#940094', '#B3FFFF'],
|
| 176 |
+
'radius_dic': [0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3],
|
| 177 |
+
'bonds1': [[154.0, 147.0, 143.0, 182.0, 0.0, 194.0, 177.0, 184.0, 214.0, 135.0], [147.0, 145.0, 140.0, 168.0, 0.0, 214.0, 175.0, 177.0, 222.0, 136.0], [143.0, 140.0, 148.0, 151.0, 0.0, 172.0, 164.0, 163.0, 194.0, 142.0], [182.0, 168.0, 151.0, 204.0, 0.0, 225.0, 207.0, 210.0, 234.0, 158.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 175.0, 0.0, 0.0, 0.0], [194.0, 214.0, 172.0, 225.0, 0.0, 228.0, 214.0, 222.0, 0.0, 178.0], [177.0, 175.0, 164.0, 207.0, 175.0, 214.0, 199.0, 203.0, 0.0, 166.0], [184.0, 177.0, 163.0, 210.0, 0.0, 222.0, 203.0, 221.0, 0.0, 156.0], [214.0, 222.0, 194.0, 234.0, 0.0, 0.0, 0.0, 0.0, 266.0, 187.0], [135.0, 136.0, 142.0, 158.0, 0.0, 178.0, 166.0, 156.0, 187.0, 142.0]],
|
| 178 |
+
'bonds2': [[134.0, 129.0, 120.0, 160.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [129.0, 125.0, 121.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [120.0, 121.0, 121.0, 0.0, 0.0, 0.0, 0.0, 150.0, 0.0, 0.0], [160.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 186.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 150.0, 186.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
|
| 179 |
+
'bonds3': [[120.0, 116.0, 113.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [116.0, 110.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [113.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
|
| 180 |
+
'lennard_jones_rm': [[120.0, 116.0, 113.0, 160.0, 133.0, 194.0, 177.0, 184.0, 214.0, 135.0], [116.0, 110.0, 121.0, 168.0, 127.0, 214.0, 175.0, 177.0, 222.0, 136.0], [113.0, 121.0, 121.0, 151.0, 126.0, 172.0, 164.0, 150.0, 194.0, 142.0], [160.0, 168.0, 151.0, 204.0, 167.0, 225.0, 207.0, 186.0, 234.0, 158.0], [133.0, 127.0, 126.0, 167.0, 146.0, 182.0, 175.0, 167.0, 198.0, 126.0], [194.0, 214.0, 172.0, 225.0, 182.0, 228.0, 214.0, 222.0, 234.0, 178.0], [177.0, 175.0, 164.0, 207.0, 175.0, 214.0, 199.0, 203.0, 218.0, 166.0], [184.0, 177.0, 150.0, 186.0, 167.0, 222.0, 203.0, 221.0, 219.0, 156.0], [214.0, 222.0, 194.0, 234.0, 198.0, 234.0, 218.0, 219.0, 266.0, 187.0], [135.0, 136.0, 142.0, 158.0, 126.0, 178.0, 166.0, 156.0, 187.0, 142.0]],
|
| 181 |
+
'atom_hist': {'C': 1570032, 'N': 273792, 'O': 396623, 'S': 26339, 'B': 0, 'Br': 0, 'Cl': 15055, 'P': 25975, 'I': 0, 'F': 30673},
|
| 182 |
+
'aa_hist': {'A': 277175, 'C': 92406, 'D': 254046, 'E': 201833, 'F': 234995, 'G': 376966, 'H': 147704, 'I': 290683, 'K': 173210, 'L': 421883, 'M': 157813, 'N': 174241, 'P': 148581, 'Q': 120232, 'R': 173848, 'S': 274430, 'T': 247605, 'V': 326134, 'W': 88552, 'Y': 226668},
|
| 183 |
+
}
|
data/moad_test.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
5NDU-bio2_8V2:B:201,6PB9-bio1_PAM:A:301,2GM1-bio1_2AZ:A:371,3K3B-bio1_L31:A:371,3L9H-bio1_EMQ:A:601,2UYI-bio1_K02:A:604,5NCF-bio2_8T5:B:204,3F7H-bio2_419:B:1,1LT6-bio2_GAA:M:104,3F7I-bio1_G13:A:1,1EEI-bio1_GAA:G:507,2PG2-bio1_K01:A:604,1FD7-bio1_AI1:H:104,1YRS-bio2_L47:B:603,2PG2-bio1_K01:B:604,2VL8-bio1_CTS:A:1544,1J6Z-bio1_RHO:A:1381,3KEN-bio1_ZZD:A:370,1LT6-bio1_GAA:H:104,2FL6-bio2_N5T:B:605,2Q2Y-bio2_MKR:B:605,3F7I-bio2_G13:B:1,1YRS-bio1_L47:A:604,6C0B-bio1_PAM:B:202,6P7R-bio1_PAM:A:301,1MFI-bio1_FHC:C:117,1EFI-bio1_GAT:G:107,1FD7-bio2_AI1:P:104,5NCG-bio2_8TB:B:202,1DTL-bio1_BEP:A:206,1LT6-bio1_GAA:D:104,3CJO-bio1_K30:A:1,1FD7-bio2_AI1:N:104,1EFI-bio1_GAT:F:106,2UYI-bio2_K02:B:604,1RVX-bio1_NAG:A:3017,1EEI-bio1_GAA:E:505,1MFI-bio1_FHC:A:115,5NEG-bio1_8VK:B:201,5NEG-bio1_8VK:A:201,2VL8-bio3_CTS:C:1544,1DTL-bio1_BEP:A:204,7AKI-bio1_RJQ:A:203,1LT6-bio1_GAA:F:104,1Q0B-bio1_NAT:A:604,1RVX-bio2_NAG:G:3012,1EFI-bio1_GAT:H:108,3EKU-bio1_CY9:A:903,1RDW-bio1_LAR:X:391,3MN5-bio1_LAB:A:376,1RV0-bio1_NDG:H:642,3GT9-bio2_516:B:1,1FD7-bio1_AI1:E:104,3GTA-bio2_851:B:1,1BXM-bio1_ERG:A:99,4PA0-bio2_2OW:B:1101,2UYM-bio1_K03:A:604,2GM1-bio3_2AZ:D:371,1MFI-bio1_FHC:B:116,1DTL-bio1_BEP:A:205,3K3B-bio2_L31:B:372,1OKE-bio1_BOG:B:1400,2A5X-bio1_LAR:A:379,1LT6-bio2_GAA:P:104,2FME-bio1_3QC:A:370,3F7H-bio1_419:A:1,1FD7-bio1_AI1:G:104,6G6Y-bio1_EOK:A:403,1J78-bio2_VDY:B:500,1EFI-bio1_GAT:E:105,2FL2-bio1_N4T:A:604,1FD7-bio2_AI1:O:104,3GTA-bio1_851:A:1,7AKI-bio1_RJQ:A:202,5XDU-bio1_ZI6:A:403,3ZJX-bio3_BOG:C:1296,1EEI-bio1_GAA:F:506,2GM1-bio2_2AZ:B:371,1LT6-bio1_GAA:E:104,2X7E-bio2_X7E:B:1365,2UYM-bio2_K03:B:604,1FD7-bio1_AI1:D:104,3L9H-bio2_EMQ:B:602,2I3I-bio2_618:B:501,2VL8-bio2_CTS:B:1544,5NCG-bio1_8TB:A:203,2Q0U-bio1_LAB:A:401,2FME-bio2_3QC:B:370,2FL6-bio1_N5T:A:604,1Q0B-bio2_NAT:B:605,1LT6-bio1_GAA:G:104,5NCF-bio1_8T5:A:201,1EEI-bio1_GAA:D:504,1LT6-bio2_GAA:O:104,1FD7-bio2_AI1:M:104,1RV0-bio1_NDG:J:640,3GT9-bio1_516:A:1,2FKY-bio2_N2T:B:605,1RVX-bio1_NAG:A:3018,5NCP-bio1_EG5:A:201,5ZZB-bio2_LAB:D:401,3UKR-bio1_CKH:B:401,3PA8-bio1_621:B:300,2Q2Y-bio1_MKR:A:604,2FKY-bio1_N2T:A:604,1RV0-bio1_DAN:L:701,1RVT-bio1_NDG:H:742,1LT6-bio2_GAA:L:104,5NCG-bio1_8TB:A:201,1SQK-bio1_LAR:A:378,1ESV-bio1_LAR:A:401,1FD7-bio1_AI1:F:104,5NCG-bio1_8TB:A:202,1EEI-bio1_GAA:H:508,3EKS-bio1_CY9:A:903,6RCF-bio1_K0K:A:201,2FL2-bio2_N4T:B:605,3CJO-bio2_K30:B:603,4PA0-bio1_2OW:A:1101,1FD7-bio2_AI1:L:104,6RCJ-bio1_K0H:A:201,2I3I-bio1_618:A:501,2GM1-bio4_2AZ:E:371,5NDU-bio1_8V2:A:201,5NCG-bio2_8TB:B:201,3PA8-bio1_621:A:300,1LT6-bio2_GAA:N:104,3ZJX-bio4_BOG:D:1296,6RVP-bio1_MB3:A:403,1EFI-bio1_GAT:D:104
|
data/moad_train.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
data/moad_val.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
1YEC_PNB:H:551,2JKL_CLM:A:1144,2JKL_BRX:A:1143,2JKL_BRX:B:1142,2JKL_BRX:B:1143,2JKL_CLM:B:1144,2JKL_BRX:C:1142,2JKL_CLM:C:1143,2JKL_BRX:E:1142,2JKL_CLM:E:1143,2JKL_BRX:D:1143,2JKL_CLM:D:1145,2JKL_BRX:D:1144,2JKL_BRX:F:1142,2JKL_CLM:F:1143,6V3J_NAG:G:301,3FO2_BZH:A:401,3FO2_BZH:H:401,2DDQ_HRB:L:502,5LN4_CHT:B:201,5LN4_CHT:C:201,1USQ_CLM:A:1143,1USQ_CLM:B:1143,1USQ_CLM:C:1143,1USQ_CLM:D:1142,1USQ_CLM:E:1143,1USQ_CLM:F:1144,2HXW_FLC:B:301,3GMM_PLM:A:301,1B09_PC:B:500,1B09_PC:D:500,1B09_PC:E:500,1B09_PC:C:500,1B09_PC:A:500,3VRI_1KX:A:301,2BMK_PDD:B:1217,2BMK_PDD:L:1215,1OAR_AZN:H:500,1OAR_AZN:I:500,1OAR_AZN:J:500,1OAR_AZN:K:500,2BFV_STG:H:200,2AJV_COC:L:501,1RUM_BEZ:H:1601,6ITP_HCY:B:201,6ITP_HCY:A:201,3CFB_SPB:B:302,3CFB_SPB:L:301,4MRH_2CQ:A:201,6VH6_QX4:B:701,6BE3_NAG:H:301,1UB5_SPB:H:601,1UB5_SPB:B:701,2JKJ_CLM:A:1141,2JKJ_CLM:B:1141,2JKJ_TH8:B:1142,2JKJ_CLM:C:1141,2JKJ_CLM:D:1142,2JKJ_TH8:D:1143,2JKJ_CLM:E:1141,2JKJ_CLM:F:1143,3OAZ_2M5:H:237,3OAZ_2M5:M:232,1X9Q_FLU:A:1341,2PYE_7PE:A:504,1RUA_BEZ:H:601,5ZQK_SAM:A:1001,3BQM_BQM:B:1,3BQM_BQM:C:1,1JGU_HBC:H:3001,1F3D_TPM:J:401,1F3D_TPM:L:400,1WC7_PP3:A:1214,1YEI_PGG:L:551,3FJG_3PG:C:301,3FJG_3PG:D:301,2PCP_1PC:B:227,2PCP_1PC:C:212,1LO3_AN1:Y:500,1LO3_AN1:H:501,35C8_NOX:L:212,1XDD_AAY:A:401,1XDD_AAY:B:501,1I7Z_COC:A:301,1I7Z_COC:C:302,4K3H_1OM:A:201,4K3H_1OM:D:201,4K3H_1OM:E:201,4K3H_1OM:G:201,6NPI_60Q:B:701,6NPI_KW1:B:702,1FE8_NAG:I:553,2OMN_IPH:B:401,2DWE_TBA:C:4001,6LB2_EKG:A:322,6LB2_EKG:C:316,3FO1_BZH:H:401,3FO1_BZH:B:401,3IF1_NGA:B:1,3IF1_NGA:D:213,6LAH_EKG:A:309,6LAH_EKG:C:321,3UPR_1KX:A:277,3UPR_1KX:C:277,6PVD_N18:A:301,6PVD_N18:C:301,1YEK_NPO:H:551,1RUL_BEZ:H:601,5J6H_NCA:A:402,3KDM_TES:H:226,3KDM_TES:B:226,6ITQ_HCY:D:201,6ITQ_HCY:B:201,6ITQ_HCY:A:201,6ITQ_HCY:C:201,1EAP_HEP:B:222,1GHQ_NDG:C:701,2Z93_END:C:1,2AJX_TGN:L:501,6X42_I3C:X:904,1RIU_RBE:H:201,1T66_FLU:L:300,1T66_FLU:C:301,1RIV_OBE:H:201,4MRG_24W:A:201,3VRJ_1KX:A:301,2YK1_NCT:H:300,2CJU_PHX:H:1114,1RD4_L08:A:328,1RD4_L08:B:1328,1RD4_L08:C:2328,1RD4_L08:D:3328,1LO2_OX1:Y:500,1LO2_OX1:H:501,1UM5_SS1:H:1001,4LCW_1VY:A:302,4LCW_1VY:C:301,6LT6_EKG:A:301,4F8N_PC:A:202,6PVC_P1J:A:402,6PVC_P1J:C:301,2C1P_FNZ:B:1215,2C1P_FNZ:H:1214,5U98_1KX:A:301,5U98_1KX:D:301,43CA_NPO:A:921,43CA_NPO:C:922,43CA_NPO:E:923,43CA_NPO:G:924,2YIO_YIO:A:1141,2YIO_YIO:B:1138,2YIP_YIO:A:1135,2YIP_YIO:B:1135,2YIP_YIO:C:1135,2YIP_YIO:D:1135,2YIP_YIO:E:1135,2YIP_YIO:F:1135,4NP2_2L1:A:201,1QYG_BCG:H:401,1UM4_SH4:H:1001,1MJJ_HAL:A:1001,1MJJ_HAL:H:1002,1MRF_DI:L:901,1LO0_BC1:Y:500,1LO0_BC1:H:501,1KN4_PDE:L:551,6NPP_KWG:B:701,2ICA_2IC:A:1,1MEX_RAC:H:601,1MH5_HAL:B:601,1MH5_HAL:H:602,1XUO_LA1:A:401,4F8L_AES:A:239,1A6V_NPC:H:430,1C1E_ENH:H:703,3F78_ICF:A:1,3F78_ICF:B:2,2CGR_GAS:H:215,1XDG_AB8:A:401,1XDG_AB8:B:501,3VKX_T3:A:301,1BFV_STG:H:200,1FLR_FLU:L:600,1GAF_NPE:H:218,25C8_GEP:L:212,5XQW_8EU:H:301,4L4V_1VY:A:301,4L4V_1VY:C:301,2O5Z_ANO:H:501,1CFV_E3G:H:200,3M6F_BJZ:A:1,1Y0L_HAN:H:501,1Y0L_HAN:A:601,1Y0L_HAN:D:701,1Y0L_HAN:F:801,6NUX_FOF:A:301,2YKL_NLD:H:1213,1JGL_EST:L:911,4IA6_EIC:A:608,1FL3_SPB:L:225,1FL3_SPB:A:226,2AJZ_ECG:L:301,2AJZ_ECG:A:302,6LAM_EKG:A:311,6LAM_EKG:C:316,1MJ7_HAL:H:301,4NP3_2L2:A:201,2BJM_ANF:H:500,3FJ7_PEQ:A:301,3FJ7_PEQ:B:301,1NGP_NPA:H:302,2HVK_TBA:C:4001,6OOY_A7M:C:201,1C5C_TK4:L:1001,1YEG_BPN:L:551,3T0W_DIW:A:204,1DL7_NCH:L:999,1Q72_COC:H:401,7JRA_VGY:B:301,2Z92_ENE:A:1,1YEF_PNC:L:551,2AJY_ECG:L:304,2O7N_2O7:A:1,2R2E_KDE:A:214,1YEE_PNB:H:551,1AJ7_NPE:H:217,1YNK_SC5:H:501,1Q0Y_MOI:H:401,6NPM_KVD:B:701,6DZN_AE3:L:304,1VPO_TES:H:1010,4MRF_2CK:A:203,3FO0_BZH:H:401,4MRE_2C9:A:202,1WZ1_DNS:H:200,2E27_AB0:H:6001,3T0X_DIW:A:201,1A4K_FRA:H:3083,1A4K_FRA:B:3083,1A0Q_HEP:H:214,1D6V_HOP:H:401,1H8S_AIC:A:1000,1F4X_MGS:H:301,1KN2_PNE:L:551,3CFD_SPB:B:302,3CFD_SPB:H:301
|
data/prepare_crossdocked.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
import torch
|
| 3 |
+
import shutil
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
|
| 6 |
+
from rdkit import Chem
|
| 7 |
+
from tqdm import tqdm
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
basedir = sys.argv[1]
|
| 11 |
+
structure_dir = Path(basedir, 'crossdocked_pocket10')
|
| 12 |
+
|
| 13 |
+
test_set = torch.load(Path(basedir, 'split_by_name.pt'))['test']
|
| 14 |
+
|
| 15 |
+
receptor_dir = Path(basedir, 'receptor_pdbs')
|
| 16 |
+
receptor_dir.mkdir(exist_ok=True)
|
| 17 |
+
|
| 18 |
+
ref_ligand_dir = Path(basedir, 'reference_ligands')
|
| 19 |
+
ref_ligand_dir.mkdir(exist_ok=True)
|
| 20 |
+
|
| 21 |
+
methods = ['cvae', 'sbdd', 'p2m']
|
| 22 |
+
for method in methods:
|
| 23 |
+
method_lig_dir = Path(basedir, f'{method}_processed')
|
| 24 |
+
method_lig_dir.mkdir(exist_ok=True)
|
| 25 |
+
|
| 26 |
+
for pocket_idx, (receptor_name, ligand_name) in enumerate(tqdm(test_set)):
|
| 27 |
+
|
| 28 |
+
# copy receptor file and remove underscores
|
| 29 |
+
new_rec_name = Path(receptor_name).stem.replace('_', '-')
|
| 30 |
+
shutil.copy(Path(structure_dir, receptor_name), Path(receptor_dir, new_rec_name + '.pdb'))
|
| 31 |
+
|
| 32 |
+
# copy and rename reference ligands
|
| 33 |
+
new_lig_name = new_rec_name + '_' + Path(ligand_name).stem.replace('_', '-')
|
| 34 |
+
shutil.copy(Path(structure_dir, ligand_name), Path(ref_ligand_dir, new_lig_name + '.sdf'))
|
| 35 |
+
|
| 36 |
+
for method in methods:
|
| 37 |
+
|
| 38 |
+
method_pocket_dir = Path(basedir, method, f'pocket_{pocket_idx}')
|
| 39 |
+
|
| 40 |
+
generated_mols = [Chem.SDMolSupplier(str(file), sanitize=False)[0]
|
| 41 |
+
for file in method_pocket_dir.glob(f'mol_*.sdf')]
|
| 42 |
+
|
| 43 |
+
# only select first 100 molecules
|
| 44 |
+
generated_mols = generated_mols[:100]
|
| 45 |
+
if len(generated_mols) < 1:
|
| 46 |
+
print('No molecule found for this pocket')
|
| 47 |
+
continue
|
| 48 |
+
if len(generated_mols) < 100:
|
| 49 |
+
print('Less than 100 molecules found for this pocket')
|
| 50 |
+
|
| 51 |
+
# write a combined sdf file
|
| 52 |
+
sdf_path = Path(basedir, f'{method}_processed', f'{new_rec_name}_mols-pocket-{pocket_idx}.sdf')
|
| 53 |
+
with Chem.SDWriter(str(sdf_path)) as w:
|
| 54 |
+
for mol in generated_mols:
|
| 55 |
+
w.write(mol)
|
data/timesplit_no_lig_or_rec_overlap_train
ADDED
|
@@ -0,0 +1,14037 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
4lp9
|
| 2 |
+
3dpf
|
| 3 |
+
1me7
|
| 4 |
+
2zy1
|
| 5 |
+
5c0m
|
| 6 |
+
4o2c
|
| 7 |
+
2zv9
|
| 8 |
+
3a2c
|
| 9 |
+
3b2t
|
| 10 |
+
1v2q
|
| 11 |
+
4q87
|
| 12 |
+
4j28
|
| 13 |
+
3efw
|
| 14 |
+
3k8c
|
| 15 |
+
3s54
|
| 16 |
+
2fda
|
| 17 |
+
2q55
|
| 18 |
+
3fzn
|
| 19 |
+
3eoc
|
| 20 |
+
4tn2
|
| 21 |
+
3hfv
|
| 22 |
+
2qo8
|
| 23 |
+
5gg4
|
| 24 |
+
5svi
|
| 25 |
+
2r58
|
| 26 |
+
1cw2
|
| 27 |
+
1ft7
|
| 28 |
+
4hww
|
| 29 |
+
6b1j
|
| 30 |
+
3di6
|
| 31 |
+
5jaz
|
| 32 |
+
5ema
|
| 33 |
+
5am5
|
| 34 |
+
4bnt
|
| 35 |
+
1fq6
|
| 36 |
+
3pqz
|
| 37 |
+
4nw7
|
| 38 |
+
4ahr
|
| 39 |
+
1p1o
|
| 40 |
+
3le8
|
| 41 |
+
3nzk
|
| 42 |
+
1eoc
|
| 43 |
+
3qw8
|
| 44 |
+
4za0
|
| 45 |
+
5l8a
|
| 46 |
+
5teg
|
| 47 |
+
5u6j
|
| 48 |
+
2wp1
|
| 49 |
+
1zyr
|
| 50 |
+
4m6q
|
| 51 |
+
2jq9
|
| 52 |
+
4m5h
|
| 53 |
+
2ll6
|
| 54 |
+
4hw3
|
| 55 |
+
6eq5
|
| 56 |
+
3hqy
|
| 57 |
+
2gv6
|
| 58 |
+
5yjb
|
| 59 |
+
4lwv
|
| 60 |
+
4lm4
|
| 61 |
+
3g30
|
| 62 |
+
5f32
|
| 63 |
+
4dkr
|
| 64 |
+
6b4l
|
| 65 |
+
4j9a
|
| 66 |
+
5w86
|
| 67 |
+
4mpn
|
| 68 |
+
5m51
|
| 69 |
+
3bea
|
| 70 |
+
2wtd
|
| 71 |
+
2o65
|
| 72 |
+
6dqb
|
| 73 |
+
5q0e
|
| 74 |
+
2r03
|
| 75 |
+
4xv9
|
| 76 |
+
4y0a
|
| 77 |
+
4kqq
|
| 78 |
+
5ota
|
| 79 |
+
3u5l
|
| 80 |
+
3m3o
|
| 81 |
+
2w47
|
| 82 |
+
2g5u
|
| 83 |
+
5wzs
|
| 84 |
+
5mky
|
| 85 |
+
3ibc
|
| 86 |
+
2qwf
|
| 87 |
+
3p76
|
| 88 |
+
4np3
|
| 89 |
+
4kb8
|
| 90 |
+
4tkf
|
| 91 |
+
2xvd
|
| 92 |
+
2fai
|
| 93 |
+
5ar8
|
| 94 |
+
2lzg
|
| 95 |
+
3e62
|
| 96 |
+
1p6e
|
| 97 |
+
1p02
|
| 98 |
+
6gjr
|
| 99 |
+
1nhu
|
| 100 |
+
4yyl
|
| 101 |
+
3rdv
|
| 102 |
+
3jzq
|
| 103 |
+
4pce
|
| 104 |
+
4gvu
|
| 105 |
+
4n7g
|
| 106 |
+
3s9e
|
| 107 |
+
3k3b
|
| 108 |
+
1pmn
|
| 109 |
+
1ogd
|
| 110 |
+
2j4i
|
| 111 |
+
1oss
|
| 112 |
+
1v0k
|
| 113 |
+
1d4w
|
| 114 |
+
6cn6
|
| 115 |
+
2btr
|
| 116 |
+
6egw
|
| 117 |
+
4zx1
|
| 118 |
+
4zfi
|
| 119 |
+
2rfn
|
| 120 |
+
5cep
|
| 121 |
+
2xyu
|
| 122 |
+
3eou
|
| 123 |
+
4mme
|
| 124 |
+
4fnn
|
| 125 |
+
4gkh
|
| 126 |
+
6d1l
|
| 127 |
+
6dlj
|
| 128 |
+
1q4l
|
| 129 |
+
5i94
|
| 130 |
+
1eb2
|
| 131 |
+
4qzw
|
| 132 |
+
3fu0
|
| 133 |
+
4aif
|
| 134 |
+
5eec
|
| 135 |
+
2qlj
|
| 136 |
+
5cfb
|
| 137 |
+
5sy3
|
| 138 |
+
1mhw
|
| 139 |
+
2flu
|
| 140 |
+
3e6y
|
| 141 |
+
4wiv
|
| 142 |
+
5oot
|
| 143 |
+
4bae
|
| 144 |
+
3po6
|
| 145 |
+
2wo9
|
| 146 |
+
1rp7
|
| 147 |
+
1l2s
|
| 148 |
+
5ab1
|
| 149 |
+
3pce
|
| 150 |
+
2w6z
|
| 151 |
+
3e85
|
| 152 |
+
3e12
|
| 153 |
+
5arf
|
| 154 |
+
1ikw
|
| 155 |
+
6goo
|
| 156 |
+
4zdu
|
| 157 |
+
3wcb
|
| 158 |
+
2w1h
|
| 159 |
+
4x2i
|
| 160 |
+
4mm5
|
| 161 |
+
6ggv
|
| 162 |
+
5f2u
|
| 163 |
+
1o46
|
| 164 |
+
3iej
|
| 165 |
+
2jnw
|
| 166 |
+
3gpj
|
| 167 |
+
5uex
|
| 168 |
+
1ndw
|
| 169 |
+
1zhk
|
| 170 |
+
4mcd
|
| 171 |
+
4cps
|
| 172 |
+
3gcp
|
| 173 |
+
5try
|
| 174 |
+
4c2v
|
| 175 |
+
3otq
|
| 176 |
+
2yns
|
| 177 |
+
4m0e
|
| 178 |
+
5tkj
|
| 179 |
+
2m3z
|
| 180 |
+
3ftu
|
| 181 |
+
3pa5
|
| 182 |
+
5o0b
|
| 183 |
+
3m58
|
| 184 |
+
5u6k
|
| 185 |
+
4yw6
|
| 186 |
+
3ewh
|
| 187 |
+
3wc7
|
| 188 |
+
1rhk
|
| 189 |
+
3gi4
|
| 190 |
+
2wd7
|
| 191 |
+
3jq8
|
| 192 |
+
5yr5
|
| 193 |
+
2xye
|
| 194 |
+
6fnf
|
| 195 |
+
3hx3
|
| 196 |
+
4mz6
|
| 197 |
+
1ol2
|
| 198 |
+
4aa5
|
| 199 |
+
1olu
|
| 200 |
+
2wk2
|
| 201 |
+
1rry
|
| 202 |
+
1ozv
|
| 203 |
+
5jf6
|
| 204 |
+
4mnq
|
| 205 |
+
5wa5
|
| 206 |
+
3nlb
|
| 207 |
+
3f6e
|
| 208 |
+
2xu5
|
| 209 |
+
3wk7
|
| 210 |
+
4p6w
|
| 211 |
+
2pmc
|
| 212 |
+
3byo
|
| 213 |
+
5ehe
|
| 214 |
+
6c8x
|
| 215 |
+
3pp1
|
| 216 |
+
3ad7
|
| 217 |
+
3qzq
|
| 218 |
+
2z97
|
| 219 |
+
5d1r
|
| 220 |
+
4pxf
|
| 221 |
+
2w1e
|
| 222 |
+
3upk
|
| 223 |
+
4gj8
|
| 224 |
+
1td7
|
| 225 |
+
6erw
|
| 226 |
+
4rhx
|
| 227 |
+
1hxw
|
| 228 |
+
6cbf
|
| 229 |
+
5qal
|
| 230 |
+
1q8w
|
| 231 |
+
4mf0
|
| 232 |
+
5t1s
|
| 233 |
+
1fzk
|
| 234 |
+
4tju
|
| 235 |
+
5o0j
|
| 236 |
+
2v87
|
| 237 |
+
1my2
|
| 238 |
+
1epo
|
| 239 |
+
2r5p
|
| 240 |
+
4pis
|
| 241 |
+
5czm
|
| 242 |
+
3ehy
|
| 243 |
+
2r1x
|
| 244 |
+
2f8i
|
| 245 |
+
3dek
|
| 246 |
+
5wre
|
| 247 |
+
3hp2
|
| 248 |
+
4bcw
|
| 249 |
+
4uau
|
| 250 |
+
5ap3
|
| 251 |
+
2ksa
|
| 252 |
+
5ekg
|
| 253 |
+
5lh8
|
| 254 |
+
2uw7
|
| 255 |
+
3uvq
|
| 256 |
+
2glp
|
| 257 |
+
3m3r
|
| 258 |
+
6bsx
|
| 259 |
+
2ddf
|
| 260 |
+
2mwp
|
| 261 |
+
1h4n
|
| 262 |
+
4jit
|
| 263 |
+
5f4r
|
| 264 |
+
4ybj
|
| 265 |
+
4kz8
|
| 266 |
+
4ogi
|
| 267 |
+
5ufp
|
| 268 |
+
4uiw
|
| 269 |
+
3igv
|
| 270 |
+
5b56
|
| 271 |
+
1bn3
|
| 272 |
+
2xg9
|
| 273 |
+
5oua
|
| 274 |
+
5qaa
|
| 275 |
+
6h7j
|
| 276 |
+
2o7v
|
| 277 |
+
5v7t
|
| 278 |
+
2r02
|
| 279 |
+
2j94
|
| 280 |
+
1o3k
|
| 281 |
+
4mr4
|
| 282 |
+
5am7
|
| 283 |
+
6g35
|
| 284 |
+
1ljt
|
| 285 |
+
3srv
|
| 286 |
+
4bi6
|
| 287 |
+
2yoh
|
| 288 |
+
5tln
|
| 289 |
+
3rkb
|
| 290 |
+
5jiq
|
| 291 |
+
1tkz
|
| 292 |
+
3qrk
|
| 293 |
+
3c45
|
| 294 |
+
3nf3
|
| 295 |
+
2oaz
|
| 296 |
+
3pwk
|
| 297 |
+
3ryy
|
| 298 |
+
2y7k
|
| 299 |
+
6dm8
|
| 300 |
+
5ism
|
| 301 |
+
5i7u
|
| 302 |
+
2qtn
|
| 303 |
+
5n1y
|
| 304 |
+
4kot
|
| 305 |
+
5wa1
|
| 306 |
+
188l
|
| 307 |
+
3h2o
|
| 308 |
+
1sw1
|
| 309 |
+
1zuc
|
| 310 |
+
2nnq
|
| 311 |
+
4l8m
|
| 312 |
+
5j75
|
| 313 |
+
4afe
|
| 314 |
+
5nwi
|
| 315 |
+
2n06
|
| 316 |
+
3l4w
|
| 317 |
+
3ibl
|
| 318 |
+
4l31
|
| 319 |
+
4ruy
|
| 320 |
+
5sxk
|
| 321 |
+
4hnn
|
| 322 |
+
4zij
|
| 323 |
+
3fdn
|
| 324 |
+
5fe6
|
| 325 |
+
3nuj
|
| 326 |
+
5h8x
|
| 327 |
+
5ets
|
| 328 |
+
1w3k
|
| 329 |
+
3ro0
|
| 330 |
+
3zi8
|
| 331 |
+
1nl4
|
| 332 |
+
5mb1
|
| 333 |
+
5cao
|
| 334 |
+
4gjd
|
| 335 |
+
2p3d
|
| 336 |
+
2z8e
|
| 337 |
+
3w2p
|
| 338 |
+
1f28
|
| 339 |
+
2iw6
|
| 340 |
+
1h9l
|
| 341 |
+
4o78
|
| 342 |
+
1soj
|
| 343 |
+
3krw
|
| 344 |
+
5drr
|
| 345 |
+
3t4v
|
| 346 |
+
1sqp
|
| 347 |
+
5o0a
|
| 348 |
+
4c4i
|
| 349 |
+
2vwx
|
| 350 |
+
6miq
|
| 351 |
+
6guk
|
| 352 |
+
5c3k
|
| 353 |
+
5fhn
|
| 354 |
+
3o75
|
| 355 |
+
5kap
|
| 356 |
+
4zo5
|
| 357 |
+
2jko
|
| 358 |
+
3iss
|
| 359 |
+
4eg6
|
| 360 |
+
4fcd
|
| 361 |
+
4bcf
|
| 362 |
+
1ikx
|
| 363 |
+
6f6i
|
| 364 |
+
1om2
|
| 365 |
+
6axb
|
| 366 |
+
3s3n
|
| 367 |
+
1fl6
|
| 368 |
+
4luo
|
| 369 |
+
3zlw
|
| 370 |
+
1qiw
|
| 371 |
+
5f2w
|
| 372 |
+
5tbj
|
| 373 |
+
5k0c
|
| 374 |
+
5ndf
|
| 375 |
+
1ek2
|
| 376 |
+
4zb8
|
| 377 |
+
3r9o
|
| 378 |
+
5qah
|
| 379 |
+
3avb
|
| 380 |
+
5mi6
|
| 381 |
+
6fu5
|
| 382 |
+
4ymq
|
| 383 |
+
3dpo
|
| 384 |
+
4z7q
|
| 385 |
+
5mri
|
| 386 |
+
2byp
|
| 387 |
+
5du8
|
| 388 |
+
1i7z
|
| 389 |
+
1ec3
|
| 390 |
+
5jmx
|
| 391 |
+
4yxi
|
| 392 |
+
3iw7
|
| 393 |
+
3l4v
|
| 394 |
+
3wcf
|
| 395 |
+
5igk
|
| 396 |
+
1wum
|
| 397 |
+
3rv6
|
| 398 |
+
1x7r
|
| 399 |
+
2c57
|
| 400 |
+
6cwh
|
| 401 |
+
3hkq
|
| 402 |
+
2xag
|
| 403 |
+
5nw7
|
| 404 |
+
5qau
|
| 405 |
+
4ocz
|
| 406 |
+
3chg
|
| 407 |
+
6fns
|
| 408 |
+
5oh1
|
| 409 |
+
1hkm
|
| 410 |
+
5e0l
|
| 411 |
+
3mn8
|
| 412 |
+
5ngf
|
| 413 |
+
4bo6
|
| 414 |
+
1b57
|
| 415 |
+
5wzt
|
| 416 |
+
4uzd
|
| 417 |
+
1cyn
|
| 418 |
+
3eys
|
| 419 |
+
4umc
|
| 420 |
+
6guh
|
| 421 |
+
2e99
|
| 422 |
+
2p8o
|
| 423 |
+
2xb8
|
| 424 |
+
5tus
|
| 425 |
+
6ap7
|
| 426 |
+
4ajn
|
| 427 |
+
4od7
|
| 428 |
+
5v40
|
| 429 |
+
5wjj
|
| 430 |
+
3kc0
|
| 431 |
+
3zs1
|
| 432 |
+
4igq
|
| 433 |
+
5e8r
|
| 434 |
+
4ci1
|
| 435 |
+
3qd0
|
| 436 |
+
2ohv
|
| 437 |
+
3uw5
|
| 438 |
+
3lp4
|
| 439 |
+
3ftw
|
| 440 |
+
6c5q
|
| 441 |
+
4hbw
|
| 442 |
+
2yoj
|
| 443 |
+
4b2i
|
| 444 |
+
3mkn
|
| 445 |
+
1i5r
|
| 446 |
+
5tig
|
| 447 |
+
5bqs
|
| 448 |
+
1oz0
|
| 449 |
+
5a2s
|
| 450 |
+
3fkv
|
| 451 |
+
3qnj
|
| 452 |
+
5w5o
|
| 453 |
+
2cht
|
| 454 |
+
5dj5
|
| 455 |
+
5oje
|
| 456 |
+
4hxs
|
| 457 |
+
5t36
|
| 458 |
+
5gmh
|
| 459 |
+
4eh9
|
| 460 |
+
3kr2
|
| 461 |
+
3rk5
|
| 462 |
+
2b54
|
| 463 |
+
2zxa
|
| 464 |
+
4e70
|
| 465 |
+
4huo
|
| 466 |
+
3zrk
|
| 467 |
+
2iw8
|
| 468 |
+
4ge4
|
| 469 |
+
5nwc
|
| 470 |
+
5lmb
|
| 471 |
+
4i6h
|
| 472 |
+
5y6e
|
| 473 |
+
3n87
|
| 474 |
+
5bvn
|
| 475 |
+
4lxd
|
| 476 |
+
3b3s
|
| 477 |
+
5j9f
|
| 478 |
+
3jzp
|
| 479 |
+
2no3
|
| 480 |
+
1ouk
|
| 481 |
+
5epk
|
| 482 |
+
1g7g
|
| 483 |
+
6au2
|
| 484 |
+
4wt6
|
| 485 |
+
3w2q
|
| 486 |
+
5tdr
|
| 487 |
+
4xx9
|
| 488 |
+
1jg0
|
| 489 |
+
2n27
|
| 490 |
+
5lyn
|
| 491 |
+
1p1q
|
| 492 |
+
1hi3
|
| 493 |
+
2wih
|
| 494 |
+
4hym
|
| 495 |
+
4ew3
|
| 496 |
+
5ixq
|
| 497 |
+
1h60
|
| 498 |
+
3ppk
|
| 499 |
+
4x8s
|
| 500 |
+
3ddf
|
| 501 |
+
1kv2
|
| 502 |
+
4djq
|
| 503 |
+
1ze8
|
| 504 |
+
5ia1
|
| 505 |
+
2qcm
|
| 506 |
+
5m9d
|
| 507 |
+
2erz
|
| 508 |
+
4g0l
|
| 509 |
+
5tqg
|
| 510 |
+
2i5j
|
| 511 |
+
1met
|
| 512 |
+
3r42
|
| 513 |
+
3i97
|
| 514 |
+
1alw
|
| 515 |
+
2r5q
|
| 516 |
+
5cav
|
| 517 |
+
5wj6
|
| 518 |
+
2o7n
|
| 519 |
+
2r4f
|
| 520 |
+
5nkn
|
| 521 |
+
2g6p
|
| 522 |
+
1bnu
|
| 523 |
+
4itj
|
| 524 |
+
4uc5
|
| 525 |
+
3kze
|
| 526 |
+
5dg6
|
| 527 |
+
1nym
|
| 528 |
+
6cki
|
| 529 |
+
4kmz
|
| 530 |
+
6hsh
|
| 531 |
+
4zj8
|
| 532 |
+
6ggn
|
| 533 |
+
4qzx
|
| 534 |
+
5ety
|
| 535 |
+
1o5a
|
| 536 |
+
4otf
|
| 537 |
+
1pgp
|
| 538 |
+
5wex
|
| 539 |
+
4h4m
|
| 540 |
+
4j53
|
| 541 |
+
5dva
|
| 542 |
+
1e3v
|
| 543 |
+
4wop
|
| 544 |
+
4jls
|
| 545 |
+
5mxv
|
| 546 |
+
4m7c
|
| 547 |
+
3fvn
|
| 548 |
+
4xu1
|
| 549 |
+
5ivn
|
| 550 |
+
2o4n
|
| 551 |
+
4olh
|
| 552 |
+
1m5b
|
| 553 |
+
3t1a
|
| 554 |
+
1mqi
|
| 555 |
+
5n84
|
| 556 |
+
4q1e
|
| 557 |
+
5ifu
|
| 558 |
+
5lgr
|
| 559 |
+
5unf
|
| 560 |
+
5w5u
|
| 561 |
+
5tq1
|
| 562 |
+
3dkj
|
| 563 |
+
1br8
|
| 564 |
+
3tsd
|
| 565 |
+
6dvn
|
| 566 |
+
1oxg
|
| 567 |
+
4kz5
|
| 568 |
+
5v3h
|
| 569 |
+
4avi
|
| 570 |
+
220l
|
| 571 |
+
2xbv
|
| 572 |
+
3twx
|
| 573 |
+
5zo8
|
| 574 |
+
2w4i
|
| 575 |
+
4q8y
|
| 576 |
+
5acy
|
| 577 |
+
5l7e
|
| 578 |
+
3arp
|
| 579 |
+
4rt1
|
| 580 |
+
4mq6
|
| 581 |
+
6ey7
|
| 582 |
+
3uzd
|
| 583 |
+
2b9a
|
| 584 |
+
5o4z
|
| 585 |
+
3faa
|
| 586 |
+
2ow0
|
| 587 |
+
6f5w
|
| 588 |
+
1x7q
|
| 589 |
+
5agv
|
| 590 |
+
5pzm
|
| 591 |
+
6afh
|
| 592 |
+
4z1e
|
| 593 |
+
5flp
|
| 594 |
+
1b4z
|
| 595 |
+
5f1r
|
| 596 |
+
1uwh
|
| 597 |
+
3bmo
|
| 598 |
+
1p19
|
| 599 |
+
6fbv
|
| 600 |
+
4lrr
|
| 601 |
+
1gi9
|
| 602 |
+
4xuh
|
| 603 |
+
2weo
|
| 604 |
+
5vqx
|
| 605 |
+
5xpi
|
| 606 |
+
4btm
|
| 607 |
+
5h0e
|
| 608 |
+
4gye
|
| 609 |
+
6bgu
|
| 610 |
+
5wlt
|
| 611 |
+
1t49
|
| 612 |
+
5k1v
|
| 613 |
+
1fsw
|
| 614 |
+
6cvx
|
| 615 |
+
6c3e
|
| 616 |
+
6cj5
|
| 617 |
+
5fqc
|
| 618 |
+
3uwl
|
| 619 |
+
3pka
|
| 620 |
+
5l3g
|
| 621 |
+
5jnc
|
| 622 |
+
3ps6
|
| 623 |
+
6csr
|
| 624 |
+
3hau
|
| 625 |
+
3ud9
|
| 626 |
+
6csp
|
| 627 |
+
5lqf
|
| 628 |
+
4bfz
|
| 629 |
+
2xp6
|
| 630 |
+
3sjo
|
| 631 |
+
5os3
|
| 632 |
+
4z8d
|
| 633 |
+
5un1
|
| 634 |
+
6esy
|
| 635 |
+
3kjq
|
| 636 |
+
5mgl
|
| 637 |
+
5dp5
|
| 638 |
+
3n0h
|
| 639 |
+
5fao
|
| 640 |
+
6dxl
|
| 641 |
+
4qw0
|
| 642 |
+
3q92
|
| 643 |
+
3rwf
|
| 644 |
+
2xke
|
| 645 |
+
4gfo
|
| 646 |
+
3t03
|
| 647 |
+
3pd8
|
| 648 |
+
3f18
|
| 649 |
+
4utr
|
| 650 |
+
5n1s
|
| 651 |
+
3wkb
|
| 652 |
+
2zdt
|
| 653 |
+
2yit
|
| 654 |
+
5x54
|
| 655 |
+
5dpa
|
| 656 |
+
4rt0
|
| 657 |
+
2y5h
|
| 658 |
+
3cde
|
| 659 |
+
5lud
|
| 660 |
+
3vva
|
| 661 |
+
2wev
|
| 662 |
+
5mrm
|
| 663 |
+
4ypf
|
| 664 |
+
5ea4
|
| 665 |
+
3fr5
|
| 666 |
+
4tkh
|
| 667 |
+
4n9e
|
| 668 |
+
5tyi
|
| 669 |
+
1db1
|
| 670 |
+
4hxj
|
| 671 |
+
3hu1
|
| 672 |
+
5ftg
|
| 673 |
+
2eh8
|
| 674 |
+
1jiz
|
| 675 |
+
4est
|
| 676 |
+
5nih
|
| 677 |
+
1rr6
|
| 678 |
+
4fp1
|
| 679 |
+
4y24
|
| 680 |
+
3lea
|
| 681 |
+
4lch
|
| 682 |
+
4qjr
|
| 683 |
+
4os1
|
| 684 |
+
4pft
|
| 685 |
+
5eb3
|
| 686 |
+
5qad
|
| 687 |
+
3cpc
|
| 688 |
+
3c94
|
| 689 |
+
5jhb
|
| 690 |
+
6b22
|
| 691 |
+
3vfj
|
| 692 |
+
6hmx
|
| 693 |
+
1kuk
|
| 694 |
+
2gh9
|
| 695 |
+
1il9
|
| 696 |
+
1xh6
|
| 697 |
+
3eyl
|
| 698 |
+
5hng
|
| 699 |
+
3nes
|
| 700 |
+
3i1y
|
| 701 |
+
2anl
|
| 702 |
+
3n2e
|
| 703 |
+
2n0u
|
| 704 |
+
4zt6
|
| 705 |
+
4f6v
|
| 706 |
+
5n7g
|
| 707 |
+
4yc9
|
| 708 |
+
4igr
|
| 709 |
+
3q6w
|
| 710 |
+
2cmb
|
| 711 |
+
3pww
|
| 712 |
+
4cpr
|
| 713 |
+
3t2t
|
| 714 |
+
5d7x
|
| 715 |
+
3muf
|
| 716 |
+
5f95
|
| 717 |
+
5e2w
|
| 718 |
+
2chx
|
| 719 |
+
2pk6
|
| 720 |
+
3bet
|
| 721 |
+
6cje
|
| 722 |
+
5khk
|
| 723 |
+
4eh5
|
| 724 |
+
1i9l
|
| 725 |
+
4q4o
|
| 726 |
+
1kf0
|
| 727 |
+
1llb
|
| 728 |
+
5aep
|
| 729 |
+
2zp0
|
| 730 |
+
3zze
|
| 731 |
+
6dar
|
| 732 |
+
5eyk
|
| 733 |
+
3u7n
|
| 734 |
+
4z0e
|
| 735 |
+
5fh6
|
| 736 |
+
5kj2
|
| 737 |
+
2o4k
|
| 738 |
+
5llg
|
| 739 |
+
3li2
|
| 740 |
+
3of8
|
| 741 |
+
3mvh
|
| 742 |
+
1jut
|
| 743 |
+
1auj
|
| 744 |
+
5awj
|
| 745 |
+
5x73
|
| 746 |
+
1f90
|
| 747 |
+
4crd
|
| 748 |
+
1d2e
|
| 749 |
+
5byz
|
| 750 |
+
3gol
|
| 751 |
+
2h9m
|
| 752 |
+
3vbg
|
| 753 |
+
3rse
|
| 754 |
+
2ydk
|
| 755 |
+
4mp2
|
| 756 |
+
3hhk
|
| 757 |
+
5tur
|
| 758 |
+
2xxr
|
| 759 |
+
2r3w
|
| 760 |
+
4u6e
|
| 761 |
+
4qlt
|
| 762 |
+
3db8
|
| 763 |
+
2bub
|
| 764 |
+
3s8o
|
| 765 |
+
3eg6
|
| 766 |
+
1jaq
|
| 767 |
+
3ua8
|
| 768 |
+
2kfg
|
| 769 |
+
3d3x
|
| 770 |
+
5a5v
|
| 771 |
+
3nuu
|
| 772 |
+
1jh1
|
| 773 |
+
1iy7
|
| 774 |
+
5zo7
|
| 775 |
+
4xnw
|
| 776 |
+
5dlz
|
| 777 |
+
4wpf
|
| 778 |
+
4pvy
|
| 779 |
+
1sve
|
| 780 |
+
4qmy
|
| 781 |
+
5drq
|
| 782 |
+
4leq
|
| 783 |
+
1azg
|
| 784 |
+
3wjw
|
| 785 |
+
2qiq
|
| 786 |
+
5yjp
|
| 787 |
+
2qfu
|
| 788 |
+
1thz
|
| 789 |
+
1oi9
|
| 790 |
+
3w1f
|
| 791 |
+
2v12
|
| 792 |
+
3cd5
|
| 793 |
+
1fpy
|
| 794 |
+
5f37
|
| 795 |
+
1e03
|
| 796 |
+
4q8x
|
| 797 |
+
5uk8
|
| 798 |
+
4v25
|
| 799 |
+
3u8k
|
| 800 |
+
5hlb
|
| 801 |
+
4qfn
|
| 802 |
+
4li7
|
| 803 |
+
4gkm
|
| 804 |
+
6f3e
|
| 805 |
+
4rn4
|
| 806 |
+
3vtb
|
| 807 |
+
5l6i
|
| 808 |
+
4lrh
|
| 809 |
+
4jvq
|
| 810 |
+
5est
|
| 811 |
+
3gtc
|
| 812 |
+
2jbu
|
| 813 |
+
4yx9
|
| 814 |
+
5bs0
|
| 815 |
+
5ea3
|
| 816 |
+
1vyf
|
| 817 |
+
3cyw
|
| 818 |
+
3kku
|
| 819 |
+
3tvx
|
| 820 |
+
6b4d
|
| 821 |
+
4bny
|
| 822 |
+
5j8x
|
| 823 |
+
5tza
|
| 824 |
+
5yg2
|
| 825 |
+
1d6v
|
| 826 |
+
5t4e
|
| 827 |
+
2j4g
|
| 828 |
+
5tci
|
| 829 |
+
1z9y
|
| 830 |
+
2ca8
|
| 831 |
+
4hyf
|
| 832 |
+
2r1y
|
| 833 |
+
4bcq
|
| 834 |
+
1lqf
|
| 835 |
+
6b3v
|
| 836 |
+
5htc
|
| 837 |
+
2qh6
|
| 838 |
+
4pul
|
| 839 |
+
1gj7
|
| 840 |
+
4mmf
|
| 841 |
+
3sfg
|
| 842 |
+
5ap2
|
| 843 |
+
2oi9
|
| 844 |
+
4h81
|
| 845 |
+
3s77
|
| 846 |
+
3s74
|
| 847 |
+
4u5u
|
| 848 |
+
6euz
|
| 849 |
+
6eq3
|
| 850 |
+
4obq
|
| 851 |
+
4u4x
|
| 852 |
+
2r23
|
| 853 |
+
4tjw
|
| 854 |
+
2a4z
|
| 855 |
+
2mwy
|
| 856 |
+
4pzh
|
| 857 |
+
5m7m
|
| 858 |
+
4mzj
|
| 859 |
+
1o3l
|
| 860 |
+
3d62
|
| 861 |
+
2xow
|
| 862 |
+
3rwq
|
| 863 |
+
4fpf
|
| 864 |
+
1f5k
|
| 865 |
+
5c7c
|
| 866 |
+
3q77
|
| 867 |
+
4pnr
|
| 868 |
+
5hz8
|
| 869 |
+
5ho7
|
| 870 |
+
5xmx
|
| 871 |
+
2xpk
|
| 872 |
+
6mdq
|
| 873 |
+
3tge
|
| 874 |
+
5fls
|
| 875 |
+
3oku
|
| 876 |
+
5jr2
|
| 877 |
+
3e0q
|
| 878 |
+
2l98
|
| 879 |
+
5csz
|
| 880 |
+
5yr6
|
| 881 |
+
6m8w
|
| 882 |
+
1ex8
|
| 883 |
+
6ft7
|
| 884 |
+
2bdj
|
| 885 |
+
5l7g
|
| 886 |
+
1cnx
|
| 887 |
+
1mmp
|
| 888 |
+
2ay2
|
| 889 |
+
6fnq
|
| 890 |
+
5eng
|
| 891 |
+
4a6v
|
| 892 |
+
2zq2
|
| 893 |
+
4wag
|
| 894 |
+
3s7a
|
| 895 |
+
4is6
|
| 896 |
+
5tq8
|
| 897 |
+
4w9x
|
| 898 |
+
5lto
|
| 899 |
+
1o86
|
| 900 |
+
5a69
|
| 901 |
+
4zg7
|
| 902 |
+
4unq
|
| 903 |
+
2q88
|
| 904 |
+
2jds
|
| 905 |
+
5c1m
|
| 906 |
+
6bgy
|
| 907 |
+
4fil
|
| 908 |
+
2qi1
|
| 909 |
+
2x7o
|
| 910 |
+
5icx
|
| 911 |
+
6exj
|
| 912 |
+
3nkk
|
| 913 |
+
1q65
|
| 914 |
+
2xxw
|
| 915 |
+
1om9
|
| 916 |
+
4jq7
|
| 917 |
+
1lf2
|
| 918 |
+
4xg4
|
| 919 |
+
3sgx
|
| 920 |
+
1sdt
|
| 921 |
+
4kp8
|
| 922 |
+
1hwr
|
| 923 |
+
2cem
|
| 924 |
+
5nzo
|
| 925 |
+
3oaw
|
| 926 |
+
2wwj
|
| 927 |
+
4g11
|
| 928 |
+
1lf3
|
| 929 |
+
6hm4
|
| 930 |
+
3jq9
|
| 931 |
+
1qx1
|
| 932 |
+
4old
|
| 933 |
+
1g5s
|
| 934 |
+
3tic
|
| 935 |
+
3cs8
|
| 936 |
+
4er4
|
| 937 |
+
5kr0
|
| 938 |
+
2pe0
|
| 939 |
+
5abf
|
| 940 |
+
3v04
|
| 941 |
+
4e6q
|
| 942 |
+
2yc3
|
| 943 |
+
3bys
|
| 944 |
+
5jeo
|
| 945 |
+
4fci
|
| 946 |
+
5eh5
|
| 947 |
+
4r02
|
| 948 |
+
5ef7
|
| 949 |
+
3dt1
|
| 950 |
+
5v5n
|
| 951 |
+
5ehp
|
| 952 |
+
5yr4
|
| 953 |
+
6gl8
|
| 954 |
+
3uvl
|
| 955 |
+
3v0l
|
| 956 |
+
5ne5
|
| 957 |
+
4nvp
|
| 958 |
+
4qok
|
| 959 |
+
4ipj
|
| 960 |
+
4zyi
|
| 961 |
+
1o3j
|
| 962 |
+
1lrt
|
| 963 |
+
5zwe
|
| 964 |
+
4hbn
|
| 965 |
+
2gz2
|
| 966 |
+
6bj2
|
| 967 |
+
5tco
|
| 968 |
+
3gc5
|
| 969 |
+
4b4m
|
| 970 |
+
4l6t
|
| 971 |
+
1zsb
|
| 972 |
+
5mnx
|
| 973 |
+
5m23
|
| 974 |
+
4do3
|
| 975 |
+
2etr
|
| 976 |
+
5gwy
|
| 977 |
+
3b5j
|
| 978 |
+
1oai
|
| 979 |
+
2ath
|
| 980 |
+
2v11
|
| 981 |
+
2ym6
|
| 982 |
+
4mwu
|
| 983 |
+
6g86
|
| 984 |
+
4f7l
|
| 985 |
+
4nan
|
| 986 |
+
4f63
|
| 987 |
+
3m3x
|
| 988 |
+
2bpm
|
| 989 |
+
2wb5
|
| 990 |
+
6fel
|
| 991 |
+
3tdc
|
| 992 |
+
4fgy
|
| 993 |
+
4z1s
|
| 994 |
+
6bh1
|
| 995 |
+
1f8a
|
| 996 |
+
3cyz
|
| 997 |
+
3m8p
|
| 998 |
+
2df6
|
| 999 |
+
3jrs
|
| 1000 |
+
5oah
|
| 1001 |
+
2wmv
|
| 1002 |
+
3d50
|
| 1003 |
+
3aza
|
| 1004 |
+
1jqe
|
| 1005 |
+
4nb3
|
| 1006 |
+
1okw
|
| 1007 |
+
4n98
|
| 1008 |
+
6hzp
|
| 1009 |
+
6aoy
|
| 1010 |
+
4o3u
|
| 1011 |
+
5nev
|
| 1012 |
+
5byy
|
| 1013 |
+
4bo7
|
| 1014 |
+
3su2
|
| 1015 |
+
4hlf
|
| 1016 |
+
3sww
|
| 1017 |
+
2os9
|
| 1018 |
+
5eei
|
| 1019 |
+
4men
|
| 1020 |
+
4qq4
|
| 1021 |
+
2wxd
|
| 1022 |
+
3oqf
|
| 1023 |
+
3eht
|
| 1024 |
+
3dd0
|
| 1025 |
+
2ke1
|
| 1026 |
+
4v1f
|
| 1027 |
+
4e6d
|
| 1028 |
+
1jyi
|
| 1029 |
+
4ob1
|
| 1030 |
+
5x4n
|
| 1031 |
+
5alc
|
| 1032 |
+
2pbw
|
| 1033 |
+
5eym
|
| 1034 |
+
4e28
|
| 1035 |
+
4qtl
|
| 1036 |
+
5jz9
|
| 1037 |
+
3chr
|
| 1038 |
+
2p2a
|
| 1039 |
+
1v1j
|
| 1040 |
+
4o3b
|
| 1041 |
+
5v83
|
| 1042 |
+
5v5e
|
| 1043 |
+
3byu
|
| 1044 |
+
7abp
|
| 1045 |
+
5jga
|
| 1046 |
+
3sw2
|
| 1047 |
+
5jgb
|
| 1048 |
+
4dtk
|
| 1049 |
+
1nje
|
| 1050 |
+
4yrt
|
| 1051 |
+
5ih5
|
| 1052 |
+
6afe
|
| 1053 |
+
4res
|
| 1054 |
+
1sqn
|
| 1055 |
+
4oas
|
| 1056 |
+
5u5l
|
| 1057 |
+
1v79
|
| 1058 |
+
3hp5
|
| 1059 |
+
5ivc
|
| 1060 |
+
3avg
|
| 1061 |
+
3tws
|
| 1062 |
+
1rmz
|
| 1063 |
+
4rse
|
| 1064 |
+
5zae
|
| 1065 |
+
5w2q
|
| 1066 |
+
5glu
|
| 1067 |
+
5b4w
|
| 1068 |
+
5f62
|
| 1069 |
+
4y6r
|
| 1070 |
+
4mha
|
| 1071 |
+
5etk
|
| 1072 |
+
5nzm
|
| 1073 |
+
4o2b
|
| 1074 |
+
3p78
|
| 1075 |
+
1tyn
|
| 1076 |
+
2pj8
|
| 1077 |
+
3ejq
|
| 1078 |
+
3lvw
|
| 1079 |
+
2fw3
|
| 1080 |
+
2f6y
|
| 1081 |
+
6bic
|
| 1082 |
+
4p0x
|
| 1083 |
+
5tkt
|
| 1084 |
+
4acd
|
| 1085 |
+
5aki
|
| 1086 |
+
3zy2
|
| 1087 |
+
4tw7
|
| 1088 |
+
3b3w
|
| 1089 |
+
1nkm
|
| 1090 |
+
5uoo
|
| 1091 |
+
4l7u
|
| 1092 |
+
4iut
|
| 1093 |
+
1pbq
|
| 1094 |
+
4dow
|
| 1095 |
+
3hab
|
| 1096 |
+
1owe
|
| 1097 |
+
4rvm
|
| 1098 |
+
6ft3
|
| 1099 |
+
4y2q
|
| 1100 |
+
2hs1
|
| 1101 |
+
6gu6
|
| 1102 |
+
4j2c
|
| 1103 |
+
1s50
|
| 1104 |
+
5fox
|
| 1105 |
+
3jvs
|
| 1106 |
+
1zub
|
| 1107 |
+
5wxp
|
| 1108 |
+
5oh7
|
| 1109 |
+
4ydg
|
| 1110 |
+
3ovn
|
| 1111 |
+
1h08
|
| 1112 |
+
5wzv
|
| 1113 |
+
2fxs
|
| 1114 |
+
3f37
|
| 1115 |
+
4xtz
|
| 1116 |
+
4mww
|
| 1117 |
+
6gg4
|
| 1118 |
+
4zz3
|
| 1119 |
+
5cf4
|
| 1120 |
+
4twc
|
| 1121 |
+
3zsz
|
| 1122 |
+
4er2
|
| 1123 |
+
4uuh
|
| 1124 |
+
1i8j
|
| 1125 |
+
5opv
|
| 1126 |
+
3voz
|
| 1127 |
+
3d1g
|
| 1128 |
+
3kfc
|
| 1129 |
+
3qqs
|
| 1130 |
+
1oay
|
| 1131 |
+
5os4
|
| 1132 |
+
4abe
|
| 1133 |
+
4jwr
|
| 1134 |
+
1l6m
|
| 1135 |
+
3nf6
|
| 1136 |
+
3qip
|
| 1137 |
+
4udb
|
| 1138 |
+
1nlt
|
| 1139 |
+
3zdg
|
| 1140 |
+
3o84
|
| 1141 |
+
3ehn
|
| 1142 |
+
3k16
|
| 1143 |
+
5szb
|
| 1144 |
+
2j2u
|
| 1145 |
+
2weg
|
| 1146 |
+
1ttm
|
| 1147 |
+
3dz4
|
| 1148 |
+
4jdf
|
| 1149 |
+
3wyy
|
| 1150 |
+
3ifo
|
| 1151 |
+
5j41
|
| 1152 |
+
3fl5
|
| 1153 |
+
1q1m
|
| 1154 |
+
2y80
|
| 1155 |
+
3fr4
|
| 1156 |
+
3o56
|
| 1157 |
+
1yrs
|
| 1158 |
+
2gvv
|
| 1159 |
+
3uxk
|
| 1160 |
+
2zq0
|
| 1161 |
+
5uac
|
| 1162 |
+
2wk6
|
| 1163 |
+
184l
|
| 1164 |
+
6gvz
|
| 1165 |
+
6dik
|
| 1166 |
+
3u8w
|
| 1167 |
+
3rxb
|
| 1168 |
+
3nwe
|
| 1169 |
+
13gs
|
| 1170 |
+
4gpl
|
| 1171 |
+
4rxz
|
| 1172 |
+
4qbm
|
| 1173 |
+
5y9l
|
| 1174 |
+
1okz
|
| 1175 |
+
1mpl
|
| 1176 |
+
4rra
|
| 1177 |
+
5ey8
|
| 1178 |
+
4j03
|
| 1179 |
+
5npc
|
| 1180 |
+
5tt3
|
| 1181 |
+
1uwf
|
| 1182 |
+
4dgr
|
| 1183 |
+
5xzr
|
| 1184 |
+
4mw4
|
| 1185 |
+
3n5k
|
| 1186 |
+
4umq
|
| 1187 |
+
2l6e
|
| 1188 |
+
6drg
|
| 1189 |
+
2ksp
|
| 1190 |
+
1sps
|
| 1191 |
+
5hna
|
| 1192 |
+
6gnp
|
| 1193 |
+
3dxk
|
| 1194 |
+
5eom
|
| 1195 |
+
1e34
|
| 1196 |
+
5ff6
|
| 1197 |
+
2itp
|
| 1198 |
+
2zlf
|
| 1199 |
+
4iva
|
| 1200 |
+
2y4m
|
| 1201 |
+
4o1b
|
| 1202 |
+
2o9v
|
| 1203 |
+
4i7f
|
| 1204 |
+
4ezw
|
| 1205 |
+
1k4h
|
| 1206 |
+
2xgm
|
| 1207 |
+
4nud
|
| 1208 |
+
3m55
|
| 1209 |
+
4x34
|
| 1210 |
+
5fqr
|
| 1211 |
+
2h6k
|
| 1212 |
+
3fmr
|
| 1213 |
+
4kzb
|
| 1214 |
+
3tpx
|
| 1215 |
+
4hpy
|
| 1216 |
+
4mvh
|
| 1217 |
+
2nq7
|
| 1218 |
+
3n5u
|
| 1219 |
+
3rjw
|
| 1220 |
+
3emg
|
| 1221 |
+
2xm9
|
| 1222 |
+
1kzn
|
| 1223 |
+
5faq
|
| 1224 |
+
5f1u
|
| 1225 |
+
1c6y
|
| 1226 |
+
4j8g
|
| 1227 |
+
5lvr
|
| 1228 |
+
1xuo
|
| 1229 |
+
2vgo
|
| 1230 |
+
1grp
|
| 1231 |
+
2rol
|
| 1232 |
+
5cbm
|
| 1233 |
+
6hke
|
| 1234 |
+
5tr6
|
| 1235 |
+
3d6o
|
| 1236 |
+
5awu
|
| 1237 |
+
4wz8
|
| 1238 |
+
5egm
|
| 1239 |
+
1duv
|
| 1240 |
+
6b4n
|
| 1241 |
+
1t7j
|
| 1242 |
+
4jh0
|
| 1243 |
+
4q7s
|
| 1244 |
+
5cpr
|
| 1245 |
+
1bqm
|
| 1246 |
+
4fem
|
| 1247 |
+
4wnk
|
| 1248 |
+
4uv8
|
| 1249 |
+
5hgq
|
| 1250 |
+
6ck3
|
| 1251 |
+
3uzj
|
| 1252 |
+
3u2q
|
| 1253 |
+
1nl9
|
| 1254 |
+
5yzd
|
| 1255 |
+
2i0j
|
| 1256 |
+
4hvh
|
| 1257 |
+
4ool
|
| 1258 |
+
3s0n
|
| 1259 |
+
3b4p
|
| 1260 |
+
2ea4
|
| 1261 |
+
5k76
|
| 1262 |
+
1nvq
|
| 1263 |
+
2xzq
|
| 1264 |
+
1pq6
|
| 1265 |
+
2y81
|
| 1266 |
+
4bhf
|
| 1267 |
+
3f8s
|
| 1268 |
+
2hwh
|
| 1269 |
+
4y4g
|
| 1270 |
+
1q4w
|
| 1271 |
+
4h39
|
| 1272 |
+
3zmh
|
| 1273 |
+
5mkz
|
| 1274 |
+
1ftl
|
| 1275 |
+
4ok5
|
| 1276 |
+
2qcn
|
| 1277 |
+
1g52
|
| 1278 |
+
3i4a
|
| 1279 |
+
5x79
|
| 1280 |
+
2am2
|
| 1281 |
+
2ym7
|
| 1282 |
+
3adu
|
| 1283 |
+
3t6y
|
| 1284 |
+
4pgc
|
| 1285 |
+
1k1j
|
| 1286 |
+
3d14
|
| 1287 |
+
3el4
|
| 1288 |
+
2y0j
|
| 1289 |
+
2w1c
|
| 1290 |
+
5l7k
|
| 1291 |
+
4n6g
|
| 1292 |
+
5cp5
|
| 1293 |
+
2isc
|
| 1294 |
+
5cbs
|
| 1295 |
+
5hvu
|
| 1296 |
+
5mpn
|
| 1297 |
+
2o0u
|
| 1298 |
+
5zoo
|
| 1299 |
+
1t69
|
| 1300 |
+
1oj5
|
| 1301 |
+
5lle
|
| 1302 |
+
4ezk
|
| 1303 |
+
2c6o
|
| 1304 |
+
5vrl
|
| 1305 |
+
5za8
|
| 1306 |
+
1i80
|
| 1307 |
+
6ce2
|
| 1308 |
+
4cfx
|
| 1309 |
+
3m5e
|
| 1310 |
+
2xyn
|
| 1311 |
+
6eea
|
| 1312 |
+
3s3m
|
| 1313 |
+
5t4f
|
| 1314 |
+
6br3
|
| 1315 |
+
3krl
|
| 1316 |
+
3upy
|
| 1317 |
+
2rm0
|
| 1318 |
+
4app
|
| 1319 |
+
7gch
|
| 1320 |
+
5hx8
|
| 1321 |
+
4ok6
|
| 1322 |
+
5g3w
|
| 1323 |
+
1h27
|
| 1324 |
+
1wvc
|
| 1325 |
+
1idg
|
| 1326 |
+
5c8n
|
| 1327 |
+
3w33
|
| 1328 |
+
3rzb
|
| 1329 |
+
4xg6
|
| 1330 |
+
2rib
|
| 1331 |
+
3oz1
|
| 1332 |
+
2uuo
|
| 1333 |
+
1r1h
|
| 1334 |
+
5agu
|
| 1335 |
+
1r58
|
| 1336 |
+
1nlo
|
| 1337 |
+
3g70
|
| 1338 |
+
2ofu
|
| 1339 |
+
4k6z
|
| 1340 |
+
4x13
|
| 1341 |
+
5ihc
|
| 1342 |
+
2yiw
|
| 1343 |
+
6bgw
|
| 1344 |
+
6gji
|
| 1345 |
+
1tve
|
| 1346 |
+
5tq5
|
| 1347 |
+
4ujb
|
| 1348 |
+
4l33
|
| 1349 |
+
6cqz
|
| 1350 |
+
4avh
|
| 1351 |
+
5uga
|
| 1352 |
+
5tzy
|
| 1353 |
+
5op6
|
| 1354 |
+
1m7d
|
| 1355 |
+
4aia
|
| 1356 |
+
4qgh
|
| 1357 |
+
3uuo
|
| 1358 |
+
1me8
|
| 1359 |
+
4jr3
|
| 1360 |
+
1wdy
|
| 1361 |
+
4o4r
|
| 1362 |
+
2c3j
|
| 1363 |
+
2r9m
|
| 1364 |
+
5ual
|
| 1365 |
+
3vfq
|
| 1366 |
+
4c16
|
| 1367 |
+
6h7l
|
| 1368 |
+
3tv6
|
| 1369 |
+
2pyn
|
| 1370 |
+
2bj4
|
| 1371 |
+
4qvy
|
| 1372 |
+
2puy
|
| 1373 |
+
4abk
|
| 1374 |
+
3m53
|
| 1375 |
+
3cwk
|
| 1376 |
+
3d52
|
| 1377 |
+
1a4q
|
| 1378 |
+
4rvl
|
| 1379 |
+
4odp
|
| 1380 |
+
1ogz
|
| 1381 |
+
3luo
|
| 1382 |
+
4ran
|
| 1383 |
+
3uph
|
| 1384 |
+
1hpx
|
| 1385 |
+
3c8b
|
| 1386 |
+
2vtl
|
| 1387 |
+
1fj4
|
| 1388 |
+
2zm3
|
| 1389 |
+
3rxl
|
| 1390 |
+
3uo5
|
| 1391 |
+
1q1y
|
| 1392 |
+
2ql9
|
| 1393 |
+
1bhf
|
| 1394 |
+
2jg8
|
| 1395 |
+
2q2n
|
| 1396 |
+
4aof
|
| 1397 |
+
1heg
|
| 1398 |
+
3ff3
|
| 1399 |
+
4cae
|
| 1400 |
+
6fkq
|
| 1401 |
+
4i9i
|
| 1402 |
+
4msg
|
| 1403 |
+
3ask
|
| 1404 |
+
1dbj
|
| 1405 |
+
4zbf
|
| 1406 |
+
2io6
|
| 1407 |
+
3d1y
|
| 1408 |
+
3kwf
|
| 1409 |
+
1juj
|
| 1410 |
+
3i0r
|
| 1411 |
+
4gui
|
| 1412 |
+
2hy0
|
| 1413 |
+
6g9m
|
| 1414 |
+
3nnx
|
| 1415 |
+
5u48
|
| 1416 |
+
2v2v
|
| 1417 |
+
6f5h
|
| 1418 |
+
5mwh
|
| 1419 |
+
5m0d
|
| 1420 |
+
5fue
|
| 1421 |
+
3k5k
|
| 1422 |
+
5edi
|
| 1423 |
+
4d2p
|
| 1424 |
+
2cm7
|
| 1425 |
+
8cpa
|
| 1426 |
+
3doy
|
| 1427 |
+
1vyg
|
| 1428 |
+
6bmv
|
| 1429 |
+
5m4q
|
| 1430 |
+
5j4n
|
| 1431 |
+
4tpt
|
| 1432 |
+
2jbv
|
| 1433 |
+
5ue4
|
| 1434 |
+
5enj
|
| 1435 |
+
3m35
|
| 1436 |
+
4a9t
|
| 1437 |
+
5khx
|
| 1438 |
+
5f0h
|
| 1439 |
+
4wq3
|
| 1440 |
+
4ktu
|
| 1441 |
+
2qbr
|
| 1442 |
+
1sbr
|
| 1443 |
+
2f6z
|
| 1444 |
+
1z2b
|
| 1445 |
+
6cgt
|
| 1446 |
+
5nra
|
| 1447 |
+
3fed
|
| 1448 |
+
5l2o
|
| 1449 |
+
3uik
|
| 1450 |
+
4bzn
|
| 1451 |
+
1efi
|
| 1452 |
+
3nrm
|
| 1453 |
+
5kjm
|
| 1454 |
+
2cle
|
| 1455 |
+
3h9k
|
| 1456 |
+
5n4s
|
| 1457 |
+
3mfv
|
| 1458 |
+
5yie
|
| 1459 |
+
5yz7
|
| 1460 |
+
5cr7
|
| 1461 |
+
4e35
|
| 1462 |
+
6di0
|
| 1463 |
+
4w9n
|
| 1464 |
+
3vqh
|
| 1465 |
+
4z1j
|
| 1466 |
+
6b98
|
| 1467 |
+
4x6m
|
| 1468 |
+
2pu1
|
| 1469 |
+
4q1s
|
| 1470 |
+
3n7a
|
| 1471 |
+
4qbb
|
| 1472 |
+
6fng
|
| 1473 |
+
5ya5
|
| 1474 |
+
3qiz
|
| 1475 |
+
5awt
|
| 1476 |
+
3sv8
|
| 1477 |
+
3doz
|
| 1478 |
+
6dlx
|
| 1479 |
+
4rkx
|
| 1480 |
+
3h91
|
| 1481 |
+
4uma
|
| 1482 |
+
1lvc
|
| 1483 |
+
5gso
|
| 1484 |
+
4loo
|
| 1485 |
+
5ap0
|
| 1486 |
+
5drc
|
| 1487 |
+
4j5e
|
| 1488 |
+
2o22
|
| 1489 |
+
3rz0
|
| 1490 |
+
5unj
|
| 1491 |
+
5qcl
|
| 1492 |
+
3cii
|
| 1493 |
+
4nw6
|
| 1494 |
+
4qqi
|
| 1495 |
+
3r0i
|
| 1496 |
+
4w9g
|
| 1497 |
+
5uff
|
| 1498 |
+
4lbp
|
| 1499 |
+
5akh
|
| 1500 |
+
2yjb
|
| 1501 |
+
3dx0
|
| 1502 |
+
4dfg
|
| 1503 |
+
3g42
|
| 1504 |
+
5xpp
|
| 1505 |
+
2c4g
|
| 1506 |
+
2bz6
|
| 1507 |
+
3x00
|
| 1508 |
+
4nat
|
| 1509 |
+
4ppc
|
| 1510 |
+
3lpb
|
| 1511 |
+
3tlh
|
| 1512 |
+
3nok
|
| 1513 |
+
3lfs
|
| 1514 |
+
3o9h
|
| 1515 |
+
3we4
|
| 1516 |
+
4xek
|
| 1517 |
+
2kgi
|
| 1518 |
+
4jok
|
| 1519 |
+
5llc
|
| 1520 |
+
2wap
|
| 1521 |
+
5ogb
|
| 1522 |
+
5w8i
|
| 1523 |
+
1a42
|
| 1524 |
+
2llq
|
| 1525 |
+
5j7b
|
| 1526 |
+
2c6l
|
| 1527 |
+
1lt5
|
| 1528 |
+
5ai5
|
| 1529 |
+
3khj
|
| 1530 |
+
6hh3
|
| 1531 |
+
4uac
|
| 1532 |
+
5nob
|
| 1533 |
+
1ynd
|
| 1534 |
+
2on6
|
| 1535 |
+
5ut4
|
| 1536 |
+
2wkz
|
| 1537 |
+
5jvi
|
| 1538 |
+
3gww
|
| 1539 |
+
4uja
|
| 1540 |
+
1pq3
|
| 1541 |
+
4u5v
|
| 1542 |
+
4z83
|
| 1543 |
+
6dh8
|
| 1544 |
+
1h36
|
| 1545 |
+
3vjk
|
| 1546 |
+
1unh
|
| 1547 |
+
3hv7
|
| 1548 |
+
2vxj
|
| 1549 |
+
5lgo
|
| 1550 |
+
1a08
|
| 1551 |
+
1fgi
|
| 1552 |
+
5fqs
|
| 1553 |
+
4o0z
|
| 1554 |
+
1gvx
|
| 1555 |
+
5vqe
|
| 1556 |
+
2j7w
|
| 1557 |
+
3b1m
|
| 1558 |
+
5zah
|
| 1559 |
+
1iwq
|
| 1560 |
+
4zx8
|
| 1561 |
+
3wkd
|
| 1562 |
+
5mtv
|
| 1563 |
+
5myd
|
| 1564 |
+
3t6r
|
| 1565 |
+
2c6n
|
| 1566 |
+
2c6e
|
| 1567 |
+
3opm
|
| 1568 |
+
5n9r
|
| 1569 |
+
6gue
|
| 1570 |
+
3gnv
|
| 1571 |
+
5nyz
|
| 1572 |
+
1fh7
|
| 1573 |
+
5wae
|
| 1574 |
+
1tow
|
| 1575 |
+
1toj
|
| 1576 |
+
5een
|
| 1577 |
+
5fog
|
| 1578 |
+
6e9a
|
| 1579 |
+
5jek
|
| 1580 |
+
3m94
|
| 1581 |
+
1jet
|
| 1582 |
+
5qck
|
| 1583 |
+
6coj
|
| 1584 |
+
1if8
|
| 1585 |
+
5t6g
|
| 1586 |
+
5eje
|
| 1587 |
+
1ezf
|
| 1588 |
+
3twd
|
| 1589 |
+
3mo8
|
| 1590 |
+
4rvt
|
| 1591 |
+
6bbu
|
| 1592 |
+
4ejl
|
| 1593 |
+
1waw
|
| 1594 |
+
4heu
|
| 1595 |
+
6b67
|
| 1596 |
+
2cmc
|
| 1597 |
+
3arv
|
| 1598 |
+
4r4t
|
| 1599 |
+
1j80
|
| 1600 |
+
5ads
|
| 1601 |
+
6abp
|
| 1602 |
+
5a82
|
| 1603 |
+
3el7
|
| 1604 |
+
6fex
|
| 1605 |
+
4i9h
|
| 1606 |
+
4m5o
|
| 1607 |
+
3qgw
|
| 1608 |
+
1k1i
|
| 1609 |
+
5t8o
|
| 1610 |
+
3pcn
|
| 1611 |
+
3nw3
|
| 1612 |
+
5w84
|
| 1613 |
+
3bqn
|
| 1614 |
+
4pin
|
| 1615 |
+
2w3o
|
| 1616 |
+
5yic
|
| 1617 |
+
3vo3
|
| 1618 |
+
4unr
|
| 1619 |
+
4cp5
|
| 1620 |
+
4b73
|
| 1621 |
+
6c6o
|
| 1622 |
+
1xz8
|
| 1623 |
+
5zo9
|
| 1624 |
+
2g1r
|
| 1625 |
+
4xc2
|
| 1626 |
+
1oe8
|
| 1627 |
+
2z94
|
| 1628 |
+
5aic
|
| 1629 |
+
4m12
|
| 1630 |
+
1sld
|
| 1631 |
+
2h96
|
| 1632 |
+
6cn5
|
| 1633 |
+
4uro
|
| 1634 |
+
3bvb
|
| 1635 |
+
5x27
|
| 1636 |
+
3usx
|
| 1637 |
+
2f7i
|
| 1638 |
+
2iit
|
| 1639 |
+
3fzt
|
| 1640 |
+
5vgi
|
| 1641 |
+
5jv1
|
| 1642 |
+
1py1
|
| 1643 |
+
5ktx
|
| 1644 |
+
4b35
|
| 1645 |
+
1m0q
|
| 1646 |
+
5kjk
|
| 1647 |
+
2vey
|
| 1648 |
+
6dkg
|
| 1649 |
+
6gbx
|
| 1650 |
+
3e01
|
| 1651 |
+
1p03
|
| 1652 |
+
4zuq
|
| 1653 |
+
4wbo
|
| 1654 |
+
4rfd
|
| 1655 |
+
5alb
|
| 1656 |
+
3liw
|
| 1657 |
+
2mwo
|
| 1658 |
+
4yjl
|
| 1659 |
+
4ab8
|
| 1660 |
+
4at4
|
| 1661 |
+
1ke0
|
| 1662 |
+
5fpk
|
| 1663 |
+
1zz2
|
| 1664 |
+
5hz6
|
| 1665 |
+
2cll
|
| 1666 |
+
4m2v
|
| 1667 |
+
3ozt
|
| 1668 |
+
4ge7
|
| 1669 |
+
3pkd
|
| 1670 |
+
5t70
|
| 1671 |
+
3ts4
|
| 1672 |
+
3ogm
|
| 1673 |
+
5t18
|
| 1674 |
+
5c7d
|
| 1675 |
+
1ikv
|
| 1676 |
+
4obp
|
| 1677 |
+
4k6y
|
| 1678 |
+
2fb8
|
| 1679 |
+
3vbd
|
| 1680 |
+
5ung
|
| 1681 |
+
4b32
|
| 1682 |
+
2qlq
|
| 1683 |
+
3jzs
|
| 1684 |
+
3tiz
|
| 1685 |
+
1v2k
|
| 1686 |
+
3zzf
|
| 1687 |
+
1hpv
|
| 1688 |
+
5xo2
|
| 1689 |
+
5e2l
|
| 1690 |
+
1jn4
|
| 1691 |
+
5aly
|
| 1692 |
+
3rwp
|
| 1693 |
+
3r7r
|
| 1694 |
+
4url
|
| 1695 |
+
2wm0
|
| 1696 |
+
2brg
|
| 1697 |
+
2lgf
|
| 1698 |
+
5dlx
|
| 1699 |
+
4g2y
|
| 1700 |
+
5jzs
|
| 1701 |
+
5fd2
|
| 1702 |
+
5nqr
|
| 1703 |
+
3tkm
|
| 1704 |
+
5ti0
|
| 1705 |
+
5lxp
|
| 1706 |
+
4l7f
|
| 1707 |
+
3avi
|
| 1708 |
+
5zuj
|
| 1709 |
+
3p3h
|
| 1710 |
+
1g3d
|
| 1711 |
+
3d9v
|
| 1712 |
+
4gs8
|
| 1713 |
+
4dbn
|
| 1714 |
+
1gjc
|
| 1715 |
+
1udu
|
| 1716 |
+
4ir5
|
| 1717 |
+
5yp5
|
| 1718 |
+
5ukj
|
| 1719 |
+
6fgq
|
| 1720 |
+
4iuo
|
| 1721 |
+
4ibk
|
| 1722 |
+
5ali
|
| 1723 |
+
1b5h
|
| 1724 |
+
2y36
|
| 1725 |
+
3f81
|
| 1726 |
+
4e3f
|
| 1727 |
+
3t84
|
| 1728 |
+
4na7
|
| 1729 |
+
2ow7
|
| 1730 |
+
4ob0
|
| 1731 |
+
1qb1
|
| 1732 |
+
2iku
|
| 1733 |
+
3npc
|
| 1734 |
+
1k2v
|
| 1735 |
+
2w68
|
| 1736 |
+
1b2h
|
| 1737 |
+
4dfl
|
| 1738 |
+
3dri
|
| 1739 |
+
4qzs
|
| 1740 |
+
3sqq
|
| 1741 |
+
1d4l
|
| 1742 |
+
4dn0
|
| 1743 |
+
9hvp
|
| 1744 |
+
2lko
|
| 1745 |
+
1lev
|
| 1746 |
+
3aau
|
| 1747 |
+
4mib
|
| 1748 |
+
1lee
|
| 1749 |
+
1o5e
|
| 1750 |
+
4os4
|
| 1751 |
+
4b34
|
| 1752 |
+
3m56
|
| 1753 |
+
4qvw
|
| 1754 |
+
5wf6
|
| 1755 |
+
3u7k
|
| 1756 |
+
4j5d
|
| 1757 |
+
1r4w
|
| 1758 |
+
3buw
|
| 1759 |
+
1eou
|
| 1760 |
+
3vbq
|
| 1761 |
+
6g5j
|
| 1762 |
+
5t4u
|
| 1763 |
+
3fuh
|
| 1764 |
+
2pj4
|
| 1765 |
+
2zn7
|
| 1766 |
+
4n4v
|
| 1767 |
+
2obo
|
| 1768 |
+
1tjp
|
| 1769 |
+
6b1w
|
| 1770 |
+
1ru2
|
| 1771 |
+
1fh9
|
| 1772 |
+
4wcf
|
| 1773 |
+
6hh5
|
| 1774 |
+
5orz
|
| 1775 |
+
4xmb
|
| 1776 |
+
5f94
|
| 1777 |
+
4jfv
|
| 1778 |
+
6ekq
|
| 1779 |
+
4nrt
|
| 1780 |
+
4ura
|
| 1781 |
+
4prn
|
| 1782 |
+
6ev0
|
| 1783 |
+
1v3x
|
| 1784 |
+
2pqz
|
| 1785 |
+
3k5i
|
| 1786 |
+
4pn1
|
| 1787 |
+
5bue
|
| 1788 |
+
3ik1
|
| 1789 |
+
2h5d
|
| 1790 |
+
4yax
|
| 1791 |
+
5nhv
|
| 1792 |
+
2x97
|
| 1793 |
+
2q72
|
| 1794 |
+
2qpj
|
| 1795 |
+
4k6t
|
| 1796 |
+
6b1e
|
| 1797 |
+
3f3e
|
| 1798 |
+
2yog
|
| 1799 |
+
4kio
|
| 1800 |
+
3qem
|
| 1801 |
+
3ued
|
| 1802 |
+
2xtk
|
| 1803 |
+
5mge
|
| 1804 |
+
2bfq
|
| 1805 |
+
3mxs
|
| 1806 |
+
4k1b
|
| 1807 |
+
4p6g
|
| 1808 |
+
3td4
|
| 1809 |
+
5mnh
|
| 1810 |
+
5ien
|
| 1811 |
+
3lk8
|
| 1812 |
+
2zx6
|
| 1813 |
+
1rql
|
| 1814 |
+
3rm4
|
| 1815 |
+
5u4c
|
| 1816 |
+
4kb7
|
| 1817 |
+
5ap1
|
| 1818 |
+
4rj8
|
| 1819 |
+
5tkk
|
| 1820 |
+
4j04
|
| 1821 |
+
6i8b
|
| 1822 |
+
2nxl
|
| 1823 |
+
3zm4
|
| 1824 |
+
4dy6
|
| 1825 |
+
3h0z
|
| 1826 |
+
4zyf
|
| 1827 |
+
3e7o
|
| 1828 |
+
5li3
|
| 1829 |
+
4qjw
|
| 1830 |
+
4aq4
|
| 1831 |
+
3kpu
|
| 1832 |
+
5oq7
|
| 1833 |
+
3h2c
|
| 1834 |
+
5jn9
|
| 1835 |
+
2qnq
|
| 1836 |
+
2l8r
|
| 1837 |
+
3rxk
|
| 1838 |
+
1upf
|
| 1839 |
+
4qh7
|
| 1840 |
+
4w9k
|
| 1841 |
+
1vja
|
| 1842 |
+
4zw7
|
| 1843 |
+
5ov8
|
| 1844 |
+
5wag
|
| 1845 |
+
1x8j
|
| 1846 |
+
5a6b
|
| 1847 |
+
6br2
|
| 1848 |
+
1tok
|
| 1849 |
+
4b6p
|
| 1850 |
+
3drs
|
| 1851 |
+
3tvl
|
| 1852 |
+
5mlo
|
| 1853 |
+
2xaf
|
| 1854 |
+
3h0j
|
| 1855 |
+
4yas
|
| 1856 |
+
1o35
|
| 1857 |
+
5dex
|
| 1858 |
+
2x52
|
| 1859 |
+
3lgs
|
| 1860 |
+
3dcr
|
| 1861 |
+
4y5d
|
| 1862 |
+
4umb
|
| 1863 |
+
3l4y
|
| 1864 |
+
6fkp
|
| 1865 |
+
1ony
|
| 1866 |
+
6b95
|
| 1867 |
+
5ueu
|
| 1868 |
+
5ahj
|
| 1869 |
+
2xel
|
| 1870 |
+
2g1y
|
| 1871 |
+
6iiv
|
| 1872 |
+
3b1t
|
| 1873 |
+
5dht
|
| 1874 |
+
4zup
|
| 1875 |
+
4rlk
|
| 1876 |
+
1axs
|
| 1877 |
+
5fnj
|
| 1878 |
+
1izi
|
| 1879 |
+
4gvm
|
| 1880 |
+
4xmr
|
| 1881 |
+
5yhg
|
| 1882 |
+
3rg2
|
| 1883 |
+
3tao
|
| 1884 |
+
2q7o
|
| 1885 |
+
1pme
|
| 1886 |
+
6gl3
|
| 1887 |
+
4pct
|
| 1888 |
+
2tsr
|
| 1889 |
+
3qtw
|
| 1890 |
+
4uua
|
| 1891 |
+
5abe
|
| 1892 |
+
3ozp
|
| 1893 |
+
3q8d
|
| 1894 |
+
2bu5
|
| 1895 |
+
5kya
|
| 1896 |
+
2q6h
|
| 1897 |
+
1n95
|
| 1898 |
+
3k84
|
| 1899 |
+
3bl1
|
| 1900 |
+
3ui2
|
| 1901 |
+
2cgv
|
| 1902 |
+
4ez5
|
| 1903 |
+
3t3c
|
| 1904 |
+
3puk
|
| 1905 |
+
6css
|
| 1906 |
+
3ezr
|
| 1907 |
+
2gss
|
| 1908 |
+
1n1m
|
| 1909 |
+
3obu
|
| 1910 |
+
4awj
|
| 1911 |
+
5vt4
|
| 1912 |
+
4y62
|
| 1913 |
+
1bcd
|
| 1914 |
+
3w9k
|
| 1915 |
+
3qu0
|
| 1916 |
+
1zxc
|
| 1917 |
+
2vtj
|
| 1918 |
+
2pu2
|
| 1919 |
+
4hej
|
| 1920 |
+
5ive
|
| 1921 |
+
2wo8
|
| 1922 |
+
5i2f
|
| 1923 |
+
6dj7
|
| 1924 |
+
4a6w
|
| 1925 |
+
4q1x
|
| 1926 |
+
2g63
|
| 1927 |
+
1jak
|
| 1928 |
+
5knr
|
| 1929 |
+
5y7z
|
| 1930 |
+
5npb
|
| 1931 |
+
2izx
|
| 1932 |
+
2x38
|
| 1933 |
+
4knr
|
| 1934 |
+
3rwe
|
| 1935 |
+
3daj
|
| 1936 |
+
4mbi
|
| 1937 |
+
5a2k
|
| 1938 |
+
4cr9
|
| 1939 |
+
6ht1
|
| 1940 |
+
1uj5
|
| 1941 |
+
3ljt
|
| 1942 |
+
1kyn
|
| 1943 |
+
5osd
|
| 1944 |
+
5whc
|
| 1945 |
+
2r2l
|
| 1946 |
+
4qws
|
| 1947 |
+
3uwo
|
| 1948 |
+
5n8v
|
| 1949 |
+
2y54
|
| 1950 |
+
4m5n
|
| 1951 |
+
1yy6
|
| 1952 |
+
2j50
|
| 1953 |
+
3lvp
|
| 1954 |
+
2gdo
|
| 1955 |
+
3hyf
|
| 1956 |
+
3zmu
|
| 1957 |
+
2pt9
|
| 1958 |
+
5nxg
|
| 1959 |
+
4qsk
|
| 1960 |
+
5eif
|
| 1961 |
+
5l2z
|
| 1962 |
+
4djr
|
| 1963 |
+
2ceo
|
| 1964 |
+
4apo
|
| 1965 |
+
5gj9
|
| 1966 |
+
1q4x
|
| 1967 |
+
4keq
|
| 1968 |
+
2b17
|
| 1969 |
+
4kzu
|
| 1970 |
+
5bml
|
| 1971 |
+
1bdr
|
| 1972 |
+
4zjr
|
| 1973 |
+
3ftz
|
| 1974 |
+
2c2l
|
| 1975 |
+
4yo8
|
| 1976 |
+
1vj5
|
| 1977 |
+
1w4p
|
| 1978 |
+
1aaq
|
| 1979 |
+
2q9n
|
| 1980 |
+
2yay
|
| 1981 |
+
1szd
|
| 1982 |
+
5acb
|
| 1983 |
+
5ne1
|
| 1984 |
+
1vj6
|
| 1985 |
+
3n5h
|
| 1986 |
+
3arr
|
| 1987 |
+
3v0p
|
| 1988 |
+
2fqw
|
| 1989 |
+
5jf7
|
| 1990 |
+
2n14
|
| 1991 |
+
5mk3
|
| 1992 |
+
2pj6
|
| 1993 |
+
4c68
|
| 1994 |
+
4erw
|
| 1995 |
+
2bba
|
| 1996 |
+
5iz8
|
| 1997 |
+
6iiu
|
| 1998 |
+
3txo
|
| 1999 |
+
4nah
|
| 2000 |
+
4ir6
|
| 2001 |
+
4ufy
|
| 2002 |
+
6fer
|
| 2003 |
+
4zba
|
| 2004 |
+
1jij
|
| 2005 |
+
2piy
|
| 2006 |
+
5td2
|
| 2007 |
+
4yzc
|
| 2008 |
+
5qap
|
| 2009 |
+
5yhl
|
| 2010 |
+
1h1p
|
| 2011 |
+
5ni7
|
| 2012 |
+
4pf3
|
| 2013 |
+
4a6b
|
| 2014 |
+
4uy1
|
| 2015 |
+
2xwy
|
| 2016 |
+
2w71
|
| 2017 |
+
4d7b
|
| 2018 |
+
2jld
|
| 2019 |
+
4yoj
|
| 2020 |
+
2ms4
|
| 2021 |
+
4iwz
|
| 2022 |
+
5ctb
|
| 2023 |
+
3t85
|
| 2024 |
+
4lpg
|
| 2025 |
+
3uri
|
| 2026 |
+
3hfb
|
| 2027 |
+
4mr3
|
| 2028 |
+
3lq8
|
| 2029 |
+
3mlb
|
| 2030 |
+
4qwf
|
| 2031 |
+
4ixv
|
| 2032 |
+
5n34
|
| 2033 |
+
2xvn
|
| 2034 |
+
4njd
|
| 2035 |
+
5tzo
|
| 2036 |
+
5qa9
|
| 2037 |
+
5ai8
|
| 2038 |
+
3o23
|
| 2039 |
+
5i22
|
| 2040 |
+
4o72
|
| 2041 |
+
4ej8
|
| 2042 |
+
1dpu
|
| 2043 |
+
1rv1
|
| 2044 |
+
4ixh
|
| 2045 |
+
1w82
|
| 2046 |
+
3arx
|
| 2047 |
+
3k02
|
| 2048 |
+
5fnt
|
| 2049 |
+
3uf9
|
| 2050 |
+
5wij
|
| 2051 |
+
4oq5
|
| 2052 |
+
1z4o
|
| 2053 |
+
3lq5
|
| 2054 |
+
3u4h
|
| 2055 |
+
4f20
|
| 2056 |
+
4mg5
|
| 2057 |
+
5ufi
|
| 2058 |
+
4n8d
|
| 2059 |
+
6mu1
|
| 2060 |
+
5zag
|
| 2061 |
+
4emv
|
| 2062 |
+
4bqt
|
| 2063 |
+
3t2q
|
| 2064 |
+
1jyc
|
| 2065 |
+
4q81
|
| 2066 |
+
2fqx
|
| 2067 |
+
4kza
|
| 2068 |
+
5k51
|
| 2069 |
+
3u4i
|
| 2070 |
+
5ho8
|
| 2071 |
+
4ciz
|
| 2072 |
+
1yy4
|
| 2073 |
+
2w6p
|
| 2074 |
+
4flh
|
| 2075 |
+
4w4s
|
| 2076 |
+
3i28
|
| 2077 |
+
3oyl
|
| 2078 |
+
2nn1
|
| 2079 |
+
5lrk
|
| 2080 |
+
4e3i
|
| 2081 |
+
4xg9
|
| 2082 |
+
4d8n
|
| 2083 |
+
3lik
|
| 2084 |
+
4aj2
|
| 2085 |
+
5j7w
|
| 2086 |
+
3h2a
|
| 2087 |
+
6bjo
|
| 2088 |
+
3nik
|
| 2089 |
+
5lxc
|
| 2090 |
+
3qce
|
| 2091 |
+
1vrt
|
| 2092 |
+
4qxo
|
| 2093 |
+
5lne
|
| 2094 |
+
5am6
|
| 2095 |
+
5er1
|
| 2096 |
+
3ob1
|
| 2097 |
+
6e2m
|
| 2098 |
+
4lnp
|
| 2099 |
+
3el5
|
| 2100 |
+
1w84
|
| 2101 |
+
4zz1
|
| 2102 |
+
4gm8
|
| 2103 |
+
6hm7
|
| 2104 |
+
3o6t
|
| 2105 |
+
2ycm
|
| 2106 |
+
1g53
|
| 2107 |
+
5ot3
|
| 2108 |
+
5dp8
|
| 2109 |
+
5va9
|
| 2110 |
+
4wx7
|
| 2111 |
+
5kww
|
| 2112 |
+
6bu1
|
| 2113 |
+
1q6t
|
| 2114 |
+
1uto
|
| 2115 |
+
2bks
|
| 2116 |
+
4fmq
|
| 2117 |
+
5opr
|
| 2118 |
+
3ies
|
| 2119 |
+
6hti
|
| 2120 |
+
5u66
|
| 2121 |
+
4qz6
|
| 2122 |
+
6b1o
|
| 2123 |
+
4gy5
|
| 2124 |
+
5mrb
|
| 2125 |
+
4rrv
|
| 2126 |
+
3skk
|
| 2127 |
+
4mss
|
| 2128 |
+
4kju
|
| 2129 |
+
1hti
|
| 2130 |
+
5l6o
|
| 2131 |
+
2am1
|
| 2132 |
+
3t5i
|
| 2133 |
+
1dkd
|
| 2134 |
+
2onc
|
| 2135 |
+
6cvf
|
| 2136 |
+
6bkw
|
| 2137 |
+
3fdt
|
| 2138 |
+
4wh7
|
| 2139 |
+
5i3m
|
| 2140 |
+
1azx
|
| 2141 |
+
3fck
|
| 2142 |
+
5u6c
|
| 2143 |
+
4ded
|
| 2144 |
+
4k0u
|
| 2145 |
+
5xmu
|
| 2146 |
+
3wk9
|
| 2147 |
+
4kz0
|
| 2148 |
+
5a7c
|
| 2149 |
+
6g9j
|
| 2150 |
+
3iw6
|
| 2151 |
+
4ddk
|
| 2152 |
+
4z2o
|
| 2153 |
+
4e3h
|
| 2154 |
+
4c70
|
| 2155 |
+
2y57
|
| 2156 |
+
3dzt
|
| 2157 |
+
5e0g
|
| 2158 |
+
4nrb
|
| 2159 |
+
4qr3
|
| 2160 |
+
5ltn
|
| 2161 |
+
3omg
|
| 2162 |
+
6bod
|
| 2163 |
+
3fzs
|
| 2164 |
+
5edd
|
| 2165 |
+
4rww
|
| 2166 |
+
2oyl
|
| 2167 |
+
2bmc
|
| 2168 |
+
2xqq
|
| 2169 |
+
3kqy
|
| 2170 |
+
1oiy
|
| 2171 |
+
5y13
|
| 2172 |
+
4d1s
|
| 2173 |
+
3ked
|
| 2174 |
+
5djp
|
| 2175 |
+
4bo0
|
| 2176 |
+
4o2a
|
| 2177 |
+
5em6
|
| 2178 |
+
5tfx
|
| 2179 |
+
4d9p
|
| 2180 |
+
3tku
|
| 2181 |
+
4p1r
|
| 2182 |
+
5nwz
|
| 2183 |
+
4bs4
|
| 2184 |
+
3oys
|
| 2185 |
+
5lgs
|
| 2186 |
+
2vxn
|
| 2187 |
+
1y98
|
| 2188 |
+
3ck7
|
| 2189 |
+
1nh0
|
| 2190 |
+
4an0
|
| 2191 |
+
1mcz
|
| 2192 |
+
2h9n
|
| 2193 |
+
3h0s
|
| 2194 |
+
2ycq
|
| 2195 |
+
2x4s
|
| 2196 |
+
4acc
|
| 2197 |
+
5ows
|
| 2198 |
+
3rxa
|
| 2199 |
+
5lhi
|
| 2200 |
+
5l8c
|
| 2201 |
+
3rxi
|
| 2202 |
+
3r00
|
| 2203 |
+
5oq5
|
| 2204 |
+
1jev
|
| 2205 |
+
4ztn
|
| 2206 |
+
4a7j
|
| 2207 |
+
4os7
|
| 2208 |
+
3e3c
|
| 2209 |
+
5hvt
|
| 2210 |
+
4jfj
|
| 2211 |
+
5dxh
|
| 2212 |
+
1d4k
|
| 2213 |
+
5ewk
|
| 2214 |
+
1x6u
|
| 2215 |
+
3g2z
|
| 2216 |
+
1i7c
|
| 2217 |
+
5opb
|
| 2218 |
+
4y85
|
| 2219 |
+
6fii
|
| 2220 |
+
6c7q
|
| 2221 |
+
4jkt
|
| 2222 |
+
5dgz
|
| 2223 |
+
3run
|
| 2224 |
+
4qoc
|
| 2225 |
+
5twz
|
| 2226 |
+
4a4h
|
| 2227 |
+
6gl9
|
| 2228 |
+
4fxz
|
| 2229 |
+
2hu6
|
| 2230 |
+
4ayt
|
| 2231 |
+
2y7x
|
| 2232 |
+
1x8b
|
| 2233 |
+
5i8c
|
| 2234 |
+
2aoi
|
| 2235 |
+
3kgt
|
| 2236 |
+
3gwx
|
| 2237 |
+
5lyr
|
| 2238 |
+
3nk8
|
| 2239 |
+
5orv
|
| 2240 |
+
1mrx
|
| 2241 |
+
1pf8
|
| 2242 |
+
6dj1
|
| 2243 |
+
4jss
|
| 2244 |
+
3uii
|
| 2245 |
+
5cqt
|
| 2246 |
+
1yds
|
| 2247 |
+
3hy9
|
| 2248 |
+
5y5u
|
| 2249 |
+
6dko
|
| 2250 |
+
5hm0
|
| 2251 |
+
3s3o
|
| 2252 |
+
5om7
|
| 2253 |
+
2j9n
|
| 2254 |
+
5wzw
|
| 2255 |
+
1d3v
|
| 2256 |
+
4pvv
|
| 2257 |
+
4i0s
|
| 2258 |
+
4pmm
|
| 2259 |
+
6ftp
|
| 2260 |
+
4hct
|
| 2261 |
+
1c5t
|
| 2262 |
+
5urk
|
| 2263 |
+
5q0f
|
| 2264 |
+
5xv7
|
| 2265 |
+
4bj9
|
| 2266 |
+
1css
|
| 2267 |
+
1hqf
|
| 2268 |
+
5tg1
|
| 2269 |
+
3cvk
|
| 2270 |
+
6awp
|
| 2271 |
+
1t4e
|
| 2272 |
+
2nns
|
| 2273 |
+
5al5
|
| 2274 |
+
3cr5
|
| 2275 |
+
6equ
|
| 2276 |
+
5fl5
|
| 2277 |
+
2yex
|
| 2278 |
+
5lsh
|
| 2279 |
+
4fgz
|
| 2280 |
+
3mv0
|
| 2281 |
+
3juq
|
| 2282 |
+
1q6m
|
| 2283 |
+
5khh
|
| 2284 |
+
5c85
|
| 2285 |
+
1npw
|
| 2286 |
+
5w1e
|
| 2287 |
+
4j0a
|
| 2288 |
+
4mvn
|
| 2289 |
+
5ioz
|
| 2290 |
+
4yve
|
| 2291 |
+
5tbm
|
| 2292 |
+
2fx7
|
| 2293 |
+
4oex
|
| 2294 |
+
1nnu
|
| 2295 |
+
3c4h
|
| 2296 |
+
3vp1
|
| 2297 |
+
1bky
|
| 2298 |
+
5ajc
|
| 2299 |
+
1jcx
|
| 2300 |
+
8hvp
|
| 2301 |
+
4wvs
|
| 2302 |
+
5hkh
|
| 2303 |
+
2llo
|
| 2304 |
+
2jkm
|
| 2305 |
+
1q6p
|
| 2306 |
+
5mf6
|
| 2307 |
+
5afj
|
| 2308 |
+
4l50
|
| 2309 |
+
1o3e
|
| 2310 |
+
6evm
|
| 2311 |
+
1pyw
|
| 2312 |
+
4mnx
|
| 2313 |
+
4fll
|
| 2314 |
+
6hx5
|
| 2315 |
+
3ivc
|
| 2316 |
+
5wf5
|
| 2317 |
+
4c4g
|
| 2318 |
+
1w5w
|
| 2319 |
+
2w26
|
| 2320 |
+
4qk4
|
| 2321 |
+
4an9
|
| 2322 |
+
5n8j
|
| 2323 |
+
6mr5
|
| 2324 |
+
4l7r
|
| 2325 |
+
4aro
|
| 2326 |
+
6g07
|
| 2327 |
+
4hze
|
| 2328 |
+
2cbj
|
| 2329 |
+
5wbr
|
| 2330 |
+
5afx
|
| 2331 |
+
3bh3
|
| 2332 |
+
4ca8
|
| 2333 |
+
6aam
|
| 2334 |
+
1r78
|
| 2335 |
+
1gcz
|
| 2336 |
+
3bjc
|
| 2337 |
+
1o2x
|
| 2338 |
+
1b40
|
| 2339 |
+
4u45
|
| 2340 |
+
4qiy
|
| 2341 |
+
3v30
|
| 2342 |
+
3kyq
|
| 2343 |
+
3unn
|
| 2344 |
+
5elf
|
| 2345 |
+
5hjc
|
| 2346 |
+
2p4s
|
| 2347 |
+
1xoe
|
| 2348 |
+
2oqi
|
| 2349 |
+
5mpz
|
| 2350 |
+
4hxz
|
| 2351 |
+
4rfr
|
| 2352 |
+
3vp3
|
| 2353 |
+
6upj
|
| 2354 |
+
5xyy
|
| 2355 |
+
4wci
|
| 2356 |
+
2hnc
|
| 2357 |
+
3ocz
|
| 2358 |
+
1k4g
|
| 2359 |
+
3v3v
|
| 2360 |
+
5ihh
|
| 2361 |
+
4g0c
|
| 2362 |
+
5ngs
|
| 2363 |
+
2v77
|
| 2364 |
+
3nzs
|
| 2365 |
+
5tyk
|
| 2366 |
+
3dne
|
| 2367 |
+
1m5f
|
| 2368 |
+
2xk8
|
| 2369 |
+
4br3
|
| 2370 |
+
1bgq
|
| 2371 |
+
3fa3
|
| 2372 |
+
2y07
|
| 2373 |
+
4lpb
|
| 2374 |
+
4tlr
|
| 2375 |
+
1ocq
|
| 2376 |
+
4j5c
|
| 2377 |
+
2n3k
|
| 2378 |
+
2y5g
|
| 2379 |
+
4mf1
|
| 2380 |
+
4oho
|
| 2381 |
+
4u3f
|
| 2382 |
+
3tv5
|
| 2383 |
+
2xv1
|
| 2384 |
+
5kmh
|
| 2385 |
+
3n2v
|
| 2386 |
+
3o7u
|
| 2387 |
+
2ieo
|
| 2388 |
+
1wbo
|
| 2389 |
+
5i25
|
| 2390 |
+
4mhz
|
| 2391 |
+
5mmp
|
| 2392 |
+
3qs8
|
| 2393 |
+
5igm
|
| 2394 |
+
3zxz
|
| 2395 |
+
4zji
|
| 2396 |
+
4b71
|
| 2397 |
+
1fpp
|
| 2398 |
+
2y76
|
| 2399 |
+
1i3z
|
| 2400 |
+
5lch
|
| 2401 |
+
5gp7
|
| 2402 |
+
3mmr
|
| 2403 |
+
4qc1
|
| 2404 |
+
3sbi
|
| 2405 |
+
4bsq
|
| 2406 |
+
5yhe
|
| 2407 |
+
5hj9
|
| 2408 |
+
4qw1
|
| 2409 |
+
1q5l
|
| 2410 |
+
4i9z
|
| 2411 |
+
1o5b
|
| 2412 |
+
1ryf
|
| 2413 |
+
3oeu
|
| 2414 |
+
4mw7
|
| 2415 |
+
3nx7
|
| 2416 |
+
4izy
|
| 2417 |
+
5lvf
|
| 2418 |
+
3hll
|
| 2419 |
+
4jbp
|
| 2420 |
+
5icv
|
| 2421 |
+
2yln
|
| 2422 |
+
3n2c
|
| 2423 |
+
5om3
|
| 2424 |
+
1e2l
|
| 2425 |
+
2oyk
|
| 2426 |
+
2c0o
|
| 2427 |
+
5w5v
|
| 2428 |
+
3w8o
|
| 2429 |
+
6cvw
|
| 2430 |
+
6h2z
|
| 2431 |
+
2a3i
|
| 2432 |
+
5jnl
|
| 2433 |
+
6m9d
|
| 2434 |
+
4twp
|
| 2435 |
+
5gmu
|
| 2436 |
+
5wg4
|
| 2437 |
+
2mkr
|
| 2438 |
+
3elc
|
| 2439 |
+
3eb1
|
| 2440 |
+
5wlv
|
| 2441 |
+
4p0v
|
| 2442 |
+
5ahu
|
| 2443 |
+
5two
|
| 2444 |
+
2jf4
|
| 2445 |
+
1mm6
|
| 2446 |
+
3c4c
|
| 2447 |
+
5a2j
|
| 2448 |
+
3ti4
|
| 2449 |
+
3pd9
|
| 2450 |
+
4rx8
|
| 2451 |
+
2vr0
|
| 2452 |
+
6mim
|
| 2453 |
+
2k00
|
| 2454 |
+
3fr2
|
| 2455 |
+
5dgw
|
| 2456 |
+
3tiy
|
| 2457 |
+
4jxs
|
| 2458 |
+
5tpx
|
| 2459 |
+
5xva
|
| 2460 |
+
1o49
|
| 2461 |
+
3v66
|
| 2462 |
+
2mlm
|
| 2463 |
+
4alu
|
| 2464 |
+
1tv6
|
| 2465 |
+
4g2f
|
| 2466 |
+
2xp4
|
| 2467 |
+
2bz8
|
| 2468 |
+
4kbc
|
| 2469 |
+
5tho
|
| 2470 |
+
1wun
|
| 2471 |
+
3m17
|
| 2472 |
+
1slg
|
| 2473 |
+
3pcc
|
| 2474 |
+
1oh4
|
| 2475 |
+
2vio
|
| 2476 |
+
3v6r
|
| 2477 |
+
4n7h
|
| 2478 |
+
3fv8
|
| 2479 |
+
4f08
|
| 2480 |
+
2p33
|
| 2481 |
+
5a5d
|
| 2482 |
+
3n1v
|
| 2483 |
+
3wzp
|
| 2484 |
+
4j4o
|
| 2485 |
+
5ewm
|
| 2486 |
+
3ds4
|
| 2487 |
+
4b3b
|
| 2488 |
+
1sqi
|
| 2489 |
+
2nmx
|
| 2490 |
+
1gai
|
| 2491 |
+
1fax
|
| 2492 |
+
4deg
|
| 2493 |
+
6f6s
|
| 2494 |
+
5ujv
|
| 2495 |
+
1juf
|
| 2496 |
+
4zx3
|
| 2497 |
+
1m48
|
| 2498 |
+
5ttf
|
| 2499 |
+
4man
|
| 2500 |
+
5ljt
|
| 2501 |
+
1y0l
|
| 2502 |
+
6hsk
|
| 2503 |
+
3bi6
|
| 2504 |
+
3fue
|
| 2505 |
+
1c87
|
| 2506 |
+
4byj
|
| 2507 |
+
4r18
|
| 2508 |
+
6bnh
|
| 2509 |
+
3qpn
|
| 2510 |
+
4r4i
|
| 2511 |
+
4e5h
|
| 2512 |
+
4ojq
|
| 2513 |
+
3fu3
|
| 2514 |
+
3d1x
|
| 2515 |
+
4j48
|
| 2516 |
+
5uig
|
| 2517 |
+
5vcz
|
| 2518 |
+
5tzd
|
| 2519 |
+
4hva
|
| 2520 |
+
3mvl
|
| 2521 |
+
6b1c
|
| 2522 |
+
4e96
|
| 2523 |
+
5aeh
|
| 2524 |
+
4uvu
|
| 2525 |
+
3met
|
| 2526 |
+
6dh5
|
| 2527 |
+
4mrh
|
| 2528 |
+
5fqv
|
| 2529 |
+
5t8q
|
| 2530 |
+
6f7c
|
| 2531 |
+
5l17
|
| 2532 |
+
3zim
|
| 2533 |
+
3k83
|
| 2534 |
+
2p3a
|
| 2535 |
+
4wsj
|
| 2536 |
+
4cd1
|
| 2537 |
+
2xk7
|
| 2538 |
+
5w6u
|
| 2539 |
+
4u58
|
| 2540 |
+
6f5m
|
| 2541 |
+
3vw6
|
| 2542 |
+
1o42
|
| 2543 |
+
5uor
|
| 2544 |
+
2ycs
|
| 2545 |
+
1p06
|
| 2546 |
+
2i4z
|
| 2547 |
+
4q1d
|
| 2548 |
+
3ipy
|
| 2549 |
+
5m0s
|
| 2550 |
+
3qup
|
| 2551 |
+
1tr7
|
| 2552 |
+
5xpn
|
| 2553 |
+
2aa6
|
| 2554 |
+
4mk7
|
| 2555 |
+
5uv2
|
| 2556 |
+
3pgl
|
| 2557 |
+
5uir
|
| 2558 |
+
3shj
|
| 2559 |
+
1p57
|
| 2560 |
+
4ezx
|
| 2561 |
+
5csw
|
| 2562 |
+
4ag8
|
| 2563 |
+
3ibn
|
| 2564 |
+
4yht
|
| 2565 |
+
4wkt
|
| 2566 |
+
6ceh
|
| 2567 |
+
3k0k
|
| 2568 |
+
5neb
|
| 2569 |
+
4ysi
|
| 2570 |
+
1bo5
|
| 2571 |
+
1a28
|
| 2572 |
+
1ckb
|
| 2573 |
+
5j6d
|
| 2574 |
+
3hr1
|
| 2575 |
+
6hrq
|
| 2576 |
+
1py5
|
| 2577 |
+
3km4
|
| 2578 |
+
4c66
|
| 2579 |
+
1sr7
|
| 2580 |
+
1rnm
|
| 2581 |
+
3krx
|
| 2582 |
+
3d25
|
| 2583 |
+
5d6e
|
| 2584 |
+
5w4v
|
| 2585 |
+
2x8e
|
| 2586 |
+
4f09
|
| 2587 |
+
5afv
|
| 2588 |
+
6bkx
|
| 2589 |
+
2hug
|
| 2590 |
+
5dnu
|
| 2591 |
+
2woa
|
| 2592 |
+
5xvw
|
| 2593 |
+
3fi3
|
| 2594 |
+
4yho
|
| 2595 |
+
1h1s
|
| 2596 |
+
1sdv
|
| 2597 |
+
5fot
|
| 2598 |
+
5tmn
|
| 2599 |
+
5myk
|
| 2600 |
+
5f29
|
| 2601 |
+
1utl
|
| 2602 |
+
3zls
|
| 2603 |
+
4ff8
|
| 2604 |
+
5f6d
|
| 2605 |
+
2gfs
|
| 2606 |
+
1c86
|
| 2607 |
+
5e5g
|
| 2608 |
+
3zvy
|
| 2609 |
+
5i59
|
| 2610 |
+
2f3r
|
| 2611 |
+
4e5j
|
| 2612 |
+
3l5r
|
| 2613 |
+
5l01
|
| 2614 |
+
2vj8
|
| 2615 |
+
4b9w
|
| 2616 |
+
4i7m
|
| 2617 |
+
4qwu
|
| 2618 |
+
3rpr
|
| 2619 |
+
4i7k
|
| 2620 |
+
2c1n
|
| 2621 |
+
4ret
|
| 2622 |
+
1owk
|
| 2623 |
+
4mxc
|
| 2624 |
+
5huw
|
| 2625 |
+
5w14
|
| 2626 |
+
1f8e
|
| 2627 |
+
4ciy
|
| 2628 |
+
5etm
|
| 2629 |
+
2pwr
|
| 2630 |
+
4r1e
|
| 2631 |
+
4ncg
|
| 2632 |
+
2jbo
|
| 2633 |
+
4abf
|
| 2634 |
+
1my4
|
| 2635 |
+
4jib
|
| 2636 |
+
4nxr
|
| 2637 |
+
3zev
|
| 2638 |
+
1njd
|
| 2639 |
+
1pdq
|
| 2640 |
+
5fi6
|
| 2641 |
+
2vtp
|
| 2642 |
+
5dro
|
| 2643 |
+
1o3b
|
| 2644 |
+
4jln
|
| 2645 |
+
2yis
|
| 2646 |
+
1rt2
|
| 2647 |
+
4feq
|
| 2648 |
+
5za9
|
| 2649 |
+
4gii
|
| 2650 |
+
3gjs
|
| 2651 |
+
5eef
|
| 2652 |
+
2zx8
|
| 2653 |
+
3ujd
|
| 2654 |
+
5eto
|
| 2655 |
+
1c5p
|
| 2656 |
+
5tys
|
| 2657 |
+
6ayo
|
| 2658 |
+
4eh4
|
| 2659 |
+
3mhm
|
| 2660 |
+
2pow
|
| 2661 |
+
1w0y
|
| 2662 |
+
4dbm
|
| 2663 |
+
2ywp
|
| 2664 |
+
5uhi
|
| 2665 |
+
5am3
|
| 2666 |
+
1pxp
|
| 2667 |
+
2bq6
|
| 2668 |
+
1n1g
|
| 2669 |
+
966c
|
| 2670 |
+
3vws
|
| 2671 |
+
2of2
|
| 2672 |
+
2ydf
|
| 2673 |
+
4nbk
|
| 2674 |
+
1l0a
|
| 2675 |
+
4u43
|
| 2676 |
+
5trh
|
| 2677 |
+
5etv
|
| 2678 |
+
1h61
|
| 2679 |
+
2vcb
|
| 2680 |
+
3d1v
|
| 2681 |
+
1xq0
|
| 2682 |
+
1b3h
|
| 2683 |
+
3svj
|
| 2684 |
+
5fun
|
| 2685 |
+
4bw1
|
| 2686 |
+
4jof
|
| 2687 |
+
4izm
|
| 2688 |
+
4phu
|
| 2689 |
+
3jy9
|
| 2690 |
+
5ael
|
| 2691 |
+
4irx
|
| 2692 |
+
3avn
|
| 2693 |
+
5mm9
|
| 2694 |
+
5twl
|
| 2695 |
+
4w9l
|
| 2696 |
+
2ad5
|
| 2697 |
+
4h2o
|
| 2698 |
+
4b4g
|
| 2699 |
+
4yrs
|
| 2700 |
+
4a22
|
| 2701 |
+
5vfc
|
| 2702 |
+
3l3q
|
| 2703 |
+
6fn9
|
| 2704 |
+
4m14
|
| 2705 |
+
5a6h
|
| 2706 |
+
4c35
|
| 2707 |
+
3iob
|
| 2708 |
+
6apr
|
| 2709 |
+
6g3v
|
| 2710 |
+
2xpb
|
| 2711 |
+
3loo
|
| 2712 |
+
3drr
|
| 2713 |
+
3e4a
|
| 2714 |
+
5thi
|
| 2715 |
+
5mgm
|
| 2716 |
+
4i8n
|
| 2717 |
+
5obr
|
| 2718 |
+
6ee4
|
| 2719 |
+
2wzx
|
| 2720 |
+
5j8z
|
| 2721 |
+
2itz
|
| 2722 |
+
4mdn
|
| 2723 |
+
5nw0
|
| 2724 |
+
5l9o
|
| 2725 |
+
3bel
|
| 2726 |
+
4g69
|
| 2727 |
+
5ovv
|
| 2728 |
+
1i9n
|
| 2729 |
+
1hlk
|
| 2730 |
+
2hhn
|
| 2731 |
+
5v13
|
| 2732 |
+
2rcx
|
| 2733 |
+
5zms
|
| 2734 |
+
2ncz
|
| 2735 |
+
1a86
|
| 2736 |
+
5g2n
|
| 2737 |
+
3dpe
|
| 2738 |
+
5d3j
|
| 2739 |
+
2vaq
|
| 2740 |
+
4ui5
|
| 2741 |
+
5g1a
|
| 2742 |
+
3f39
|
| 2743 |
+
4owm
|
| 2744 |
+
1dl7
|
| 2745 |
+
3ce3
|
| 2746 |
+
5azg
|
| 2747 |
+
5l7f
|
| 2748 |
+
6fh7
|
| 2749 |
+
4ifi
|
| 2750 |
+
2xo8
|
| 2751 |
+
5lws
|
| 2752 |
+
5sve
|
| 2753 |
+
3q2h
|
| 2754 |
+
4e3n
|
| 2755 |
+
5eoc
|
| 2756 |
+
6b7h
|
| 2757 |
+
5ekn
|
| 2758 |
+
4erk
|
| 2759 |
+
5fso
|
| 2760 |
+
5e80
|
| 2761 |
+
2bpx
|
| 2762 |
+
1yw8
|
| 2763 |
+
3sc1
|
| 2764 |
+
5o4t
|
| 2765 |
+
5c2o
|
| 2766 |
+
2w17
|
| 2767 |
+
5nxy
|
| 2768 |
+
1oy7
|
| 2769 |
+
6dvm
|
| 2770 |
+
1py2
|
| 2771 |
+
4zx0
|
| 2772 |
+
4m5k
|
| 2773 |
+
4nkt
|
| 2774 |
+
3s1h
|
| 2775 |
+
2ym5
|
| 2776 |
+
6cis
|
| 2777 |
+
3hdm
|
| 2778 |
+
2yj2
|
| 2779 |
+
4ef4
|
| 2780 |
+
3kvw
|
| 2781 |
+
6c1s
|
| 2782 |
+
4j6i
|
| 2783 |
+
6bij
|
| 2784 |
+
5xgh
|
| 2785 |
+
5u2c
|
| 2786 |
+
3m3e
|
| 2787 |
+
3dga
|
| 2788 |
+
1b6k
|
| 2789 |
+
4gd6
|
| 2790 |
+
4ww6
|
| 2791 |
+
1zhy
|
| 2792 |
+
3eky
|
| 2793 |
+
2q38
|
| 2794 |
+
2ymt
|
| 2795 |
+
3gur
|
| 2796 |
+
1njs
|
| 2797 |
+
4na8
|
| 2798 |
+
4msa
|
| 2799 |
+
5nk8
|
| 2800 |
+
1o4k
|
| 2801 |
+
3l6h
|
| 2802 |
+
1i5h
|
| 2803 |
+
3t6b
|
| 2804 |
+
6ck6
|
| 2805 |
+
6gzd
|
| 2806 |
+
2yc5
|
| 2807 |
+
4aa4
|
| 2808 |
+
4ask
|
| 2809 |
+
4ea1
|
| 2810 |
+
3pxz
|
| 2811 |
+
4n4s
|
| 2812 |
+
5wkm
|
| 2813 |
+
4jg0
|
| 2814 |
+
3sie
|
| 2815 |
+
3pkc
|
| 2816 |
+
1w1g
|
| 2817 |
+
5y8c
|
| 2818 |
+
2c6m
|
| 2819 |
+
5ou3
|
| 2820 |
+
6d50
|
| 2821 |
+
5v3y
|
| 2822 |
+
5lzj
|
| 2823 |
+
5tya
|
| 2824 |
+
1sqc
|
| 2825 |
+
3eyu
|
| 2826 |
+
4xtm
|
| 2827 |
+
3agl
|
| 2828 |
+
2brh
|
| 2829 |
+
4mbf
|
| 2830 |
+
5d4a
|
| 2831 |
+
2zz6
|
| 2832 |
+
2gbi
|
| 2833 |
+
1t5f
|
| 2834 |
+
5jiy
|
| 2835 |
+
4x1r
|
| 2836 |
+
3hl7
|
| 2837 |
+
1zd4
|
| 2838 |
+
5fdc
|
| 2839 |
+
3dnj
|
| 2840 |
+
3cct
|
| 2841 |
+
4pm0
|
| 2842 |
+
4gj2
|
| 2843 |
+
5d6f
|
| 2844 |
+
4joj
|
| 2845 |
+
4jju
|
| 2846 |
+
5hn9
|
| 2847 |
+
4tjy
|
| 2848 |
+
6ffn
|
| 2849 |
+
5usf
|
| 2850 |
+
2clk
|
| 2851 |
+
5n70
|
| 2852 |
+
3eka
|
| 2853 |
+
5eq1
|
| 2854 |
+
5t35
|
| 2855 |
+
4qlq
|
| 2856 |
+
4acf
|
| 2857 |
+
4orx
|
| 2858 |
+
3fz1
|
| 2859 |
+
4zhm
|
| 2860 |
+
3rxo
|
| 2861 |
+
1atl
|
| 2862 |
+
5t1k
|
| 2863 |
+
3zt4
|
| 2864 |
+
1bdl
|
| 2865 |
+
4u6y
|
| 2866 |
+
5mob
|
| 2867 |
+
2igy
|
| 2868 |
+
5jlz
|
| 2869 |
+
4flj
|
| 2870 |
+
2zu4
|
| 2871 |
+
2w70
|
| 2872 |
+
5d6y
|
| 2873 |
+
4kip
|
| 2874 |
+
2uw0
|
| 2875 |
+
1r2b
|
| 2876 |
+
4qvm
|
| 2877 |
+
1w4q
|
| 2878 |
+
5okt
|
| 2879 |
+
5op4
|
| 2880 |
+
3al3
|
| 2881 |
+
2rvn
|
| 2882 |
+
3dp1
|
| 2883 |
+
4luz
|
| 2884 |
+
3o87
|
| 2885 |
+
4bf6
|
| 2886 |
+
1w2h
|
| 2887 |
+
1fiv
|
| 2888 |
+
1uh1
|
| 2889 |
+
6c8p
|
| 2890 |
+
4g8m
|
| 2891 |
+
4hai
|
| 2892 |
+
2q2z
|
| 2893 |
+
4yyt
|
| 2894 |
+
4od0
|
| 2895 |
+
5k9w
|
| 2896 |
+
3fyj
|
| 2897 |
+
3zlk
|
| 2898 |
+
5vh0
|
| 2899 |
+
5ale
|
| 2900 |
+
3th8
|
| 2901 |
+
4pvo
|
| 2902 |
+
4whs
|
| 2903 |
+
5hed
|
| 2904 |
+
5qbv
|
| 2905 |
+
2oei
|
| 2906 |
+
4y2s
|
| 2907 |
+
4px6
|
| 2908 |
+
2knh
|
| 2909 |
+
5kns
|
| 2910 |
+
2g96
|
| 2911 |
+
3cm7
|
| 2912 |
+
4gcj
|
| 2913 |
+
1qft
|
| 2914 |
+
3ztd
|
| 2915 |
+
4yzu
|
| 2916 |
+
4kiq
|
| 2917 |
+
1vwl
|
| 2918 |
+
3zyh
|
| 2919 |
+
6mx3
|
| 2920 |
+
1q8t
|
| 2921 |
+
4ue1
|
| 2922 |
+
2y7z
|
| 2923 |
+
3mdz
|
| 2924 |
+
4gtr
|
| 2925 |
+
6fh6
|
| 2926 |
+
4qvq
|
| 2927 |
+
3tti
|
| 2928 |
+
4mbj
|
| 2929 |
+
5ap6
|
| 2930 |
+
3tdj
|
| 2931 |
+
4lvt
|
| 2932 |
+
1xo2
|
| 2933 |
+
1pcg
|
| 2934 |
+
5fiv
|
| 2935 |
+
1g3e
|
| 2936 |
+
6gx3
|
| 2937 |
+
5dhj
|
| 2938 |
+
5ixt
|
| 2939 |
+
1xhm
|
| 2940 |
+
4po7
|
| 2941 |
+
6egs
|
| 2942 |
+
3iw8
|
| 2943 |
+
5qag
|
| 2944 |
+
5brz
|
| 2945 |
+
3mpt
|
| 2946 |
+
3ral
|
| 2947 |
+
1j14
|
| 2948 |
+
5wdj
|
| 2949 |
+
4io3
|
| 2950 |
+
4tpm
|
| 2951 |
+
3f35
|
| 2952 |
+
6mrg
|
| 2953 |
+
2oi3
|
| 2954 |
+
1onp
|
| 2955 |
+
4bjx
|
| 2956 |
+
2i0v
|
| 2957 |
+
4hmk
|
| 2958 |
+
5mat
|
| 2959 |
+
4h2m
|
| 2960 |
+
4ayx
|
| 2961 |
+
5iwg
|
| 2962 |
+
2xez
|
| 2963 |
+
5em7
|
| 2964 |
+
5kdf
|
| 2965 |
+
4y59
|
| 2966 |
+
3ti6
|
| 2967 |
+
5gn6
|
| 2968 |
+
4u7v
|
| 2969 |
+
3rz7
|
| 2970 |
+
1ejn
|
| 2971 |
+
5wxo
|
| 2972 |
+
1o2w
|
| 2973 |
+
4r0i
|
| 2974 |
+
1a30
|
| 2975 |
+
5nz2
|
| 2976 |
+
3rf4
|
| 2977 |
+
6ee6
|
| 2978 |
+
1a9u
|
| 2979 |
+
3iop
|
| 2980 |
+
5x4p
|
| 2981 |
+
3gl6
|
| 2982 |
+
1hqg
|
| 2983 |
+
2oji
|
| 2984 |
+
4ps3
|
| 2985 |
+
2w05
|
| 2986 |
+
1tnh
|
| 2987 |
+
2c9b
|
| 2988 |
+
2flb
|
| 2989 |
+
5itd
|
| 2990 |
+
5k00
|
| 2991 |
+
1t37
|
| 2992 |
+
3g0b
|
| 2993 |
+
2rfh
|
| 2994 |
+
4bdi
|
| 2995 |
+
4ikn
|
| 2996 |
+
1rww
|
| 2997 |
+
4g68
|
| 2998 |
+
5yum
|
| 2999 |
+
4hxl
|
| 3000 |
+
3dbd
|
| 3001 |
+
4hw7
|
| 3002 |
+
5mys
|
| 3003 |
+
4yt6
|
| 3004 |
+
2vvt
|
| 3005 |
+
3r2y
|
| 3006 |
+
5ng9
|
| 3007 |
+
1y2g
|
| 3008 |
+
2vwm
|
| 3009 |
+
3if7
|
| 3010 |
+
2hl4
|
| 3011 |
+
3ffp
|
| 3012 |
+
4qwg
|
| 3013 |
+
3g6z
|
| 3014 |
+
5dhf
|
| 3015 |
+
4y79
|
| 3016 |
+
3m57
|
| 3017 |
+
4oyi
|
| 3018 |
+
3s0b
|
| 3019 |
+
1yvx
|
| 3020 |
+
4md6
|
| 3021 |
+
4g9c
|
| 3022 |
+
3upi
|
| 3023 |
+
2w6m
|
| 3024 |
+
5tt7
|
| 3025 |
+
3pkb
|
| 3026 |
+
2weh
|
| 3027 |
+
5gid
|
| 3028 |
+
2w1g
|
| 3029 |
+
3mxc
|
| 3030 |
+
1ind
|
| 3031 |
+
5k0i
|
| 3032 |
+
4npv
|
| 3033 |
+
2q7q
|
| 3034 |
+
5xaf
|
| 3035 |
+
5hf1
|
| 3036 |
+
2aw1
|
| 3037 |
+
3o9i
|
| 3038 |
+
3dbu
|
| 3039 |
+
4l34
|
| 3040 |
+
5j0d
|
| 3041 |
+
5afl
|
| 3042 |
+
2x7x
|
| 3043 |
+
4qp1
|
| 3044 |
+
2w3i
|
| 3045 |
+
5ew3
|
| 3046 |
+
5e0m
|
| 3047 |
+
5jfu
|
| 3048 |
+
1o4h
|
| 3049 |
+
4w9p
|
| 3050 |
+
4onf
|
| 3051 |
+
4zhl
|
| 3052 |
+
3mj2
|
| 3053 |
+
3k22
|
| 3054 |
+
2xxy
|
| 3055 |
+
5m5q
|
| 3056 |
+
5kab
|
| 3057 |
+
1rsi
|
| 3058 |
+
5c42
|
| 3059 |
+
3tb6
|
| 3060 |
+
4myd
|
| 3061 |
+
2i40
|
| 3062 |
+
5nhj
|
| 3063 |
+
3ds6
|
| 3064 |
+
1lt6
|
| 3065 |
+
3eft
|
| 3066 |
+
2w8g
|
| 3067 |
+
4de3
|
| 3068 |
+
4yrc
|
| 3069 |
+
5yg4
|
| 3070 |
+
5hmh
|
| 3071 |
+
5nw2
|
| 3072 |
+
1k9r
|
| 3073 |
+
3mxf
|
| 3074 |
+
2q1j
|
| 3075 |
+
1u0h
|
| 3076 |
+
4ye3
|
| 3077 |
+
4k60
|
| 3078 |
+
5zop
|
| 3079 |
+
6htg
|
| 3080 |
+
2fx9
|
| 3081 |
+
4q83
|
| 3082 |
+
4y8y
|
| 3083 |
+
4gr0
|
| 3084 |
+
2yaz
|
| 3085 |
+
1k1m
|
| 3086 |
+
4i2w
|
| 3087 |
+
5ovx
|
| 3088 |
+
3db6
|
| 3089 |
+
1v2p
|
| 3090 |
+
5njx
|
| 3091 |
+
1wtg
|
| 3092 |
+
3uvn
|
| 3093 |
+
3h89
|
| 3094 |
+
2q7y
|
| 3095 |
+
4o2f
|
| 3096 |
+
3fv1
|
| 3097 |
+
3pwm
|
| 3098 |
+
1x7b
|
| 3099 |
+
4tmp
|
| 3100 |
+
5ulp
|
| 3101 |
+
6di1
|
| 3102 |
+
4iks
|
| 3103 |
+
5hg8
|
| 3104 |
+
5n9s
|
| 3105 |
+
3tjd
|
| 3106 |
+
6b1x
|
| 3107 |
+
5nw8
|
| 3108 |
+
5yve
|
| 3109 |
+
4uwg
|
| 3110 |
+
1jd5
|
| 3111 |
+
5km5
|
| 3112 |
+
2w4x
|
| 3113 |
+
3umx
|
| 3114 |
+
2ony
|
| 3115 |
+
1bm2
|
| 3116 |
+
2pj9
|
| 3117 |
+
2m41
|
| 3118 |
+
4z8a
|
| 3119 |
+
2yjq
|
| 3120 |
+
6mwe
|
| 3121 |
+
1prl
|
| 3122 |
+
5y1y
|
| 3123 |
+
1c5y
|
| 3124 |
+
5ak6
|
| 3125 |
+
2pv2
|
| 3126 |
+
1w25
|
| 3127 |
+
2fme
|
| 3128 |
+
4ba3
|
| 3129 |
+
1ydt
|
| 3130 |
+
6c2x
|
| 3131 |
+
4ijl
|
| 3132 |
+
3gxy
|
| 3133 |
+
3f3u
|
| 3134 |
+
4io2
|
| 3135 |
+
4drm
|
| 3136 |
+
3cjo
|
| 3137 |
+
5c87
|
| 3138 |
+
5ni8
|
| 3139 |
+
2ll7
|
| 3140 |
+
2bdf
|
| 3141 |
+
2pem
|
| 3142 |
+
2x7d
|
| 3143 |
+
3zc6
|
| 3144 |
+
1m2q
|
| 3145 |
+
4gk2
|
| 3146 |
+
3lqi
|
| 3147 |
+
3c1x
|
| 3148 |
+
2az9
|
| 3149 |
+
1fv0
|
| 3150 |
+
1gfy
|
| 3151 |
+
2nxm
|
| 3152 |
+
4yml
|
| 3153 |
+
4yog
|
| 3154 |
+
2g6q
|
| 3155 |
+
1t79
|
| 3156 |
+
3hdk
|
| 3157 |
+
3uij
|
| 3158 |
+
5vl2
|
| 3159 |
+
4aph
|
| 3160 |
+
6d1u
|
| 3161 |
+
4rn2
|
| 3162 |
+
2p4i
|
| 3163 |
+
1w80
|
| 3164 |
+
2vh0
|
| 3165 |
+
3kqa
|
| 3166 |
+
5jgi
|
| 3167 |
+
3g9n
|
| 3168 |
+
5var
|
| 3169 |
+
4lnf
|
| 3170 |
+
3e9h
|
| 3171 |
+
2xn3
|
| 3172 |
+
2fzz
|
| 3173 |
+
3cfv
|
| 3174 |
+
5h21
|
| 3175 |
+
5fdr
|
| 3176 |
+
5z7b
|
| 3177 |
+
6cj1
|
| 3178 |
+
5tts
|
| 3179 |
+
6bmx
|
| 3180 |
+
1mqd
|
| 3181 |
+
2qhd
|
| 3182 |
+
6e4a
|
| 3183 |
+
4tk0
|
| 3184 |
+
3fur
|
| 3185 |
+
3v1r
|
| 3186 |
+
3q4j
|
| 3187 |
+
4wwn
|
| 3188 |
+
1u9l
|
| 3189 |
+
1oqp
|
| 3190 |
+
1dmp
|
| 3191 |
+
2wnj
|
| 3192 |
+
4i6q
|
| 3193 |
+
4d8s
|
| 3194 |
+
2jb5
|
| 3195 |
+
4p58
|
| 3196 |
+
4usj
|
| 3197 |
+
6eq6
|
| 3198 |
+
3o0j
|
| 3199 |
+
5apj
|
| 3200 |
+
4ty1
|
| 3201 |
+
5avf
|
| 3202 |
+
5k1f
|
| 3203 |
+
1a7c
|
| 3204 |
+
5f2r
|
| 3205 |
+
2c9d
|
| 3206 |
+
3a9i
|
| 3207 |
+
3deh
|
| 3208 |
+
3q3k
|
| 3209 |
+
4xtw
|
| 3210 |
+
2r3l
|
| 3211 |
+
5qa4
|
| 3212 |
+
3wyx
|
| 3213 |
+
4ijp
|
| 3214 |
+
1s5z
|
| 3215 |
+
1y2h
|
| 3216 |
+
1c83
|
| 3217 |
+
5dts
|
| 3218 |
+
5e1d
|
| 3219 |
+
5j9k
|
| 3220 |
+
5o4f
|
| 3221 |
+
2wxi
|
| 3222 |
+
5f04
|
| 3223 |
+
2qmj
|
| 3224 |
+
3kvx
|
| 3225 |
+
5d48
|
| 3226 |
+
2vyt
|
| 3227 |
+
4cfv
|
| 3228 |
+
1qcp
|
| 3229 |
+
4hlm
|
| 3230 |
+
3snl
|
| 3231 |
+
4dzw
|
| 3232 |
+
2brm
|
| 3233 |
+
4q7v
|
| 3234 |
+
5teh
|
| 3235 |
+
5mro
|
| 3236 |
+
3b1u
|
| 3237 |
+
2z9g
|
| 3238 |
+
2rt5
|
| 3239 |
+
3gxz
|
| 3240 |
+
6f7b
|
| 3241 |
+
4qz5
|
| 3242 |
+
5dh5
|
| 3243 |
+
3k0h
|
| 3244 |
+
3l54
|
| 3245 |
+
2hd6
|
| 3246 |
+
5hbe
|
| 3247 |
+
2qi7
|
| 3248 |
+
1jao
|
| 3249 |
+
5ajp
|
| 3250 |
+
3gjq
|
| 3251 |
+
5y20
|
| 3252 |
+
1rwx
|
| 3253 |
+
4ibj
|
| 3254 |
+
5op2
|
| 3255 |
+
3c6u
|
| 3256 |
+
2w0s
|
| 3257 |
+
5yyb
|
| 3258 |
+
3ni5
|
| 3259 |
+
3t8w
|
| 3260 |
+
5ntp
|
| 3261 |
+
1o4e
|
| 3262 |
+
5tqu
|
| 3263 |
+
2xyd
|
| 3264 |
+
5tl9
|
| 3265 |
+
2vpo
|
| 3266 |
+
4ov5
|
| 3267 |
+
4qps
|
| 3268 |
+
4a9s
|
| 3269 |
+
3atk
|
| 3270 |
+
4ms0
|
| 3271 |
+
3fzr
|
| 3272 |
+
1i9m
|
| 3273 |
+
2y71
|
| 3274 |
+
3f33
|
| 3275 |
+
5q0h
|
| 3276 |
+
5x9p
|
| 3277 |
+
5ajo
|
| 3278 |
+
3c56
|
| 3279 |
+
4pnl
|
| 3280 |
+
4fmu
|
| 3281 |
+
3uli
|
| 3282 |
+
5nlk
|
| 3283 |
+
2vo5
|
| 3284 |
+
2fts
|
| 3285 |
+
3sfc
|
| 3286 |
+
3pke
|
| 3287 |
+
4dk5
|
| 3288 |
+
1lor
|
| 3289 |
+
3jya
|
| 3290 |
+
6bqg
|
| 3291 |
+
3p9t
|
| 3292 |
+
6fnj
|
| 3293 |
+
6fmc
|
| 3294 |
+
5c0k
|
| 3295 |
+
1mmq
|
| 3296 |
+
4fck
|
| 3297 |
+
5npe
|
| 3298 |
+
4j59
|
| 3299 |
+
4eol
|
| 3300 |
+
5m57
|
| 3301 |
+
4efk
|
| 3302 |
+
1oiq
|
| 3303 |
+
5dwr
|
| 3304 |
+
3wyk
|
| 3305 |
+
4qf9
|
| 3306 |
+
4c5w
|
| 3307 |
+
5tuq
|
| 3308 |
+
3oqk
|
| 3309 |
+
4xj0
|
| 3310 |
+
5mwg
|
| 3311 |
+
2vwu
|
| 3312 |
+
6dpy
|
| 3313 |
+
3wi6
|
| 3314 |
+
5owf
|
| 3315 |
+
3nw5
|
| 3316 |
+
4fhh
|
| 3317 |
+
3sna
|
| 3318 |
+
2vcg
|
| 3319 |
+
5j7j
|
| 3320 |
+
4gtt
|
| 3321 |
+
5jc1
|
| 3322 |
+
2hz0
|
| 3323 |
+
5l4i
|
| 3324 |
+
1y1m
|
| 3325 |
+
5kj0
|
| 3326 |
+
5vo1
|
| 3327 |
+
4y2v
|
| 3328 |
+
1ecq
|
| 3329 |
+
6eeo
|
| 3330 |
+
5jrs
|
| 3331 |
+
4l23
|
| 3332 |
+
6f34
|
| 3333 |
+
4r4q
|
| 3334 |
+
4quo
|
| 3335 |
+
6dq6
|
| 3336 |
+
4e3k
|
| 3337 |
+
1cet
|
| 3338 |
+
5e2k
|
| 3339 |
+
5jyy
|
| 3340 |
+
5jna
|
| 3341 |
+
4qls
|
| 3342 |
+
4ksq
|
| 3343 |
+
4py4
|
| 3344 |
+
6biy
|
| 3345 |
+
2hqu
|
| 3346 |
+
4yk5
|
| 3347 |
+
4km0
|
| 3348 |
+
5ttu
|
| 3349 |
+
2za3
|
| 3350 |
+
4o5g
|
| 3351 |
+
2y1g
|
| 3352 |
+
4ztr
|
| 3353 |
+
4jps
|
| 3354 |
+
4uin
|
| 3355 |
+
2x7t
|
| 3356 |
+
2q6c
|
| 3357 |
+
6ayr
|
| 3358 |
+
1pwy
|
| 3359 |
+
5lwe
|
| 3360 |
+
5cxi
|
| 3361 |
+
4n9b
|
| 3362 |
+
1fzo
|
| 3363 |
+
5ukm
|
| 3364 |
+
1ork
|
| 3365 |
+
4mk0
|
| 3366 |
+
1jbd
|
| 3367 |
+
1k2i
|
| 3368 |
+
4ifh
|
| 3369 |
+
4yh4
|
| 3370 |
+
3fee
|
| 3371 |
+
5tqe
|
| 3372 |
+
1jj9
|
| 3373 |
+
4li5
|
| 3374 |
+
1lah
|
| 3375 |
+
3ppm
|
| 3376 |
+
4kn7
|
| 3377 |
+
1nz7
|
| 3378 |
+
1m5w
|
| 3379 |
+
3vbx
|
| 3380 |
+
4ie2
|
| 3381 |
+
4r3s
|
| 3382 |
+
4qnu
|
| 3383 |
+
4qmu
|
| 3384 |
+
4os6
|
| 3385 |
+
4o6w
|
| 3386 |
+
2bge
|
| 3387 |
+
5xvg
|
| 3388 |
+
1nfu
|
| 3389 |
+
2zjf
|
| 3390 |
+
5id1
|
| 3391 |
+
5ukl
|
| 3392 |
+
3ccz
|
| 3393 |
+
2d3u
|
| 3394 |
+
5tyj
|
| 3395 |
+
1ggd
|
| 3396 |
+
5ofx
|
| 3397 |
+
4o12
|
| 3398 |
+
4wf4
|
| 3399 |
+
3pck
|
| 3400 |
+
5jt9
|
| 3401 |
+
5ggk
|
| 3402 |
+
6h5x
|
| 3403 |
+
1r5w
|
| 3404 |
+
4zw6
|
| 3405 |
+
5ep7
|
| 3406 |
+
4ps0
|
| 3407 |
+
4drk
|
| 3408 |
+
1los
|
| 3409 |
+
3s6t
|
| 3410 |
+
3be9
|
| 3411 |
+
4ipi
|
| 3412 |
+
3wf7
|
| 3413 |
+
4pow
|
| 3414 |
+
3fun
|
| 3415 |
+
4wk2
|
| 3416 |
+
4y46
|
| 3417 |
+
5izj
|
| 3418 |
+
1d4j
|
| 3419 |
+
4qg7
|
| 3420 |
+
2avm
|
| 3421 |
+
1g85
|
| 3422 |
+
1r1i
|
| 3423 |
+
3g1v
|
| 3424 |
+
4oti
|
| 3425 |
+
4a4o
|
| 3426 |
+
1fvv
|
| 3427 |
+
3uok
|
| 3428 |
+
5n3v
|
| 3429 |
+
4tim
|
| 3430 |
+
4a4g
|
| 3431 |
+
6c0s
|
| 3432 |
+
1w12
|
| 3433 |
+
4jn2
|
| 3434 |
+
4oyp
|
| 3435 |
+
4zt7
|
| 3436 |
+
2ypo
|
| 3437 |
+
3kmc
|
| 3438 |
+
4hwp
|
| 3439 |
+
3q2j
|
| 3440 |
+
4k5l
|
| 3441 |
+
3hwn
|
| 3442 |
+
5i83
|
| 3443 |
+
4ht6
|
| 3444 |
+
4fyh
|
| 3445 |
+
1kfy
|
| 3446 |
+
5g43
|
| 3447 |
+
4bnv
|
| 3448 |
+
5fpu
|
| 3449 |
+
1o48
|
| 3450 |
+
5hv1
|
| 3451 |
+
6ekd
|
| 3452 |
+
2k3w
|
| 3453 |
+
1uze
|
| 3454 |
+
3ksq
|
| 3455 |
+
5wkl
|
| 3456 |
+
4hzz
|
| 3457 |
+
2f8g
|
| 3458 |
+
2uw3
|
| 3459 |
+
6mi6
|
| 3460 |
+
4u91
|
| 3461 |
+
3f7u
|
| 3462 |
+
2qx0
|
| 3463 |
+
2f0z
|
| 3464 |
+
4bcc
|
| 3465 |
+
3qvv
|
| 3466 |
+
2oc1
|
| 3467 |
+
5he3
|
| 3468 |
+
1pun
|
| 3469 |
+
3pp7
|
| 3470 |
+
5o91
|
| 3471 |
+
4mjo
|
| 3472 |
+
3ff6
|
| 3473 |
+
4xnv
|
| 3474 |
+
3tay
|
| 3475 |
+
3ejs
|
| 3476 |
+
4c1w
|
| 3477 |
+
3njq
|
| 3478 |
+
6guc
|
| 3479 |
+
4el9
|
| 3480 |
+
3h0e
|
| 3481 |
+
4pri
|
| 3482 |
+
3pcb
|
| 3483 |
+
4jg8
|
| 3484 |
+
3lbj
|
| 3485 |
+
4a4x
|
| 3486 |
+
2wa3
|
| 3487 |
+
5tcy
|
| 3488 |
+
3wth
|
| 3489 |
+
1ylv
|
| 3490 |
+
5bve
|
| 3491 |
+
4g34
|
| 3492 |
+
5u28
|
| 3493 |
+
5if6
|
| 3494 |
+
5z5f
|
| 3495 |
+
4ot6
|
| 3496 |
+
2g97
|
| 3497 |
+
5n16
|
| 3498 |
+
1m5e
|
| 3499 |
+
3efk
|
| 3500 |
+
4h7q
|
| 3501 |
+
2v5a
|
| 3502 |
+
5vs6
|
| 3503 |
+
2cne
|
| 3504 |
+
1pxn
|
| 3505 |
+
1jqy
|
| 3506 |
+
5j6s
|
| 3507 |
+
4abv
|
| 3508 |
+
6d6t
|
| 3509 |
+
1t4s
|
| 3510 |
+
3zsx
|
| 3511 |
+
1p01
|
| 3512 |
+
2f4j
|
| 3513 |
+
3f78
|
| 3514 |
+
3h0w
|
| 3515 |
+
5d3x
|
| 3516 |
+
2vk2
|
| 3517 |
+
3zst
|
| 3518 |
+
2lbm
|
| 3519 |
+
6fil
|
| 3520 |
+
6ew7
|
| 3521 |
+
2iu0
|
| 3522 |
+
5ai9
|
| 3523 |
+
1tsm
|
| 3524 |
+
5wbm
|
| 3525 |
+
4ark
|
| 3526 |
+
3hlo
|
| 3527 |
+
6dk1
|
| 3528 |
+
5hk2
|
| 3529 |
+
1qhc
|
| 3530 |
+
3v5l
|
| 3531 |
+
2r59
|
| 3532 |
+
4no8
|
| 3533 |
+
5hmy
|
| 3534 |
+
2jql
|
| 3535 |
+
4lv4
|
| 3536 |
+
3kf7
|
| 3537 |
+
5hbh
|
| 3538 |
+
2vr3
|
| 3539 |
+
3wf9
|
| 3540 |
+
4ieh
|
| 3541 |
+
5ar0
|
| 3542 |
+
5dyw
|
| 3543 |
+
4xuz
|
| 3544 |
+
1mmr
|
| 3545 |
+
4h4b
|
| 3546 |
+
5elv
|
| 3547 |
+
2g79
|
| 3548 |
+
4w52
|
| 3549 |
+
3iqh
|
| 3550 |
+
2rnx
|
| 3551 |
+
2az8
|
| 3552 |
+
4lv3
|
| 3553 |
+
4mwb
|
| 3554 |
+
5a83
|
| 3555 |
+
1prm
|
| 3556 |
+
4gg5
|
| 3557 |
+
1rlq
|
| 3558 |
+
5fbn
|
| 3559 |
+
1f47
|
| 3560 |
+
4yfi
|
| 3561 |
+
2v10
|
| 3562 |
+
3pa3
|
| 3563 |
+
4jvi
|
| 3564 |
+
4gmy
|
| 3565 |
+
4cft
|
| 3566 |
+
2v7a
|
| 3567 |
+
1epp
|
| 3568 |
+
2hoc
|
| 3569 |
+
4r5g
|
| 3570 |
+
4k0y
|
| 3571 |
+
5zz2
|
| 3572 |
+
4ng9
|
| 3573 |
+
5hor
|
| 3574 |
+
3qci
|
| 3575 |
+
4msu
|
| 3576 |
+
5hg1
|
| 3577 |
+
1pyn
|
| 3578 |
+
1hmt
|
| 3579 |
+
2lpr
|
| 3580 |
+
4yha
|
| 3581 |
+
5hoa
|
| 3582 |
+
4h3c
|
| 3583 |
+
4lmu
|
| 3584 |
+
3tf6
|
| 3585 |
+
4z2p
|
| 3586 |
+
5ctc
|
| 3587 |
+
2hny
|
| 3588 |
+
5vd1
|
| 3589 |
+
3qcf
|
| 3590 |
+
5ia0
|
| 3591 |
+
3l7g
|
| 3592 |
+
1eld
|
| 3593 |
+
1xh9
|
| 3594 |
+
1h3c
|
| 3595 |
+
4urm
|
| 3596 |
+
4j7i
|
| 3597 |
+
4ly9
|
| 3598 |
+
1nu1
|
| 3599 |
+
5sz5
|
| 3600 |
+
6cdl
|
| 3601 |
+
1uvu
|
| 3602 |
+
2wl4
|
| 3603 |
+
4eo4
|
| 3604 |
+
5lj2
|
| 3605 |
+
3kc1
|
| 3606 |
+
5nqe
|
| 3607 |
+
6gon
|
| 3608 |
+
2xs8
|
| 3609 |
+
5gmj
|
| 3610 |
+
5eyd
|
| 3611 |
+
6c42
|
| 3612 |
+
4wks
|
| 3613 |
+
5d10
|
| 3614 |
+
3ka2
|
| 3615 |
+
4p6e
|
| 3616 |
+
4nnn
|
| 3617 |
+
3zvw
|
| 3618 |
+
4q6r
|
| 3619 |
+
5v8v
|
| 3620 |
+
2jdv
|
| 3621 |
+
5e8f
|
| 3622 |
+
5qcm
|
| 3623 |
+
5y80
|
| 3624 |
+
3ud5
|
| 3625 |
+
4n70
|
| 3626 |
+
4cgi
|
| 3627 |
+
2uw8
|
| 3628 |
+
4ivb
|
| 3629 |
+
4m6u
|
| 3630 |
+
2xu1
|
| 3631 |
+
4ud7
|
| 3632 |
+
5nvh
|
| 3633 |
+
2arm
|
| 3634 |
+
3fmq
|
| 3635 |
+
5jin
|
| 3636 |
+
2nt7
|
| 3637 |
+
1p4r
|
| 3638 |
+
3eqr
|
| 3639 |
+
3hxf
|
| 3640 |
+
4zwx
|
| 3641 |
+
4xx3
|
| 3642 |
+
4yee
|
| 3643 |
+
6aak
|
| 3644 |
+
2xae
|
| 3645 |
+
5t31
|
| 3646 |
+
3bhb
|
| 3647 |
+
5llh
|
| 3648 |
+
5nge
|
| 3649 |
+
4b9h
|
| 3650 |
+
5oxl
|
| 3651 |
+
5ipa
|
| 3652 |
+
1q3w
|
| 3653 |
+
6df1
|
| 3654 |
+
3ole
|
| 3655 |
+
2v16
|
| 3656 |
+
3g2s
|
| 3657 |
+
4led
|
| 3658 |
+
2uz6
|
| 3659 |
+
4pnu
|
| 3660 |
+
3nrz
|
| 3661 |
+
1f7b
|
| 3662 |
+
1j4k
|
| 3663 |
+
5ku9
|
| 3664 |
+
4uia
|
| 3665 |
+
5kv8
|
| 3666 |
+
2rke
|
| 3667 |
+
4e8z
|
| 3668 |
+
5ykp
|
| 3669 |
+
4z1n
|
| 3670 |
+
5sz1
|
| 3671 |
+
4oak
|
| 3672 |
+
3twu
|
| 3673 |
+
2r3p
|
| 3674 |
+
1jq9
|
| 3675 |
+
5cf6
|
| 3676 |
+
5l4j
|
| 3677 |
+
4qpa
|
| 3678 |
+
1u1b
|
| 3679 |
+
2k1q
|
| 3680 |
+
4eny
|
| 3681 |
+
4i0r
|
| 3682 |
+
5fxq
|
| 3683 |
+
3re4
|
| 3684 |
+
6cer
|
| 3685 |
+
6cf5
|
| 3686 |
+
2cbr
|
| 3687 |
+
5ece
|
| 3688 |
+
5t2t
|
| 3689 |
+
1x8r
|
| 3690 |
+
4n3w
|
| 3691 |
+
1klu
|
| 3692 |
+
5nxx
|
| 3693 |
+
5jcb
|
| 3694 |
+
5klt
|
| 3695 |
+
2mov
|
| 3696 |
+
6bid
|
| 3697 |
+
2f94
|
| 3698 |
+
4luv
|
| 3699 |
+
5gu4
|
| 3700 |
+
3wf8
|
| 3701 |
+
5ufo
|
| 3702 |
+
4n5t
|
| 3703 |
+
5uq9
|
| 3704 |
+
5hcx
|
| 3705 |
+
5mtw
|
| 3706 |
+
1o38
|
| 3707 |
+
5z1c
|
| 3708 |
+
4w4z
|
| 3709 |
+
4v27
|
| 3710 |
+
1u3s
|
| 3711 |
+
3jrx
|
| 3712 |
+
5l96
|
| 3713 |
+
1esz
|
| 3714 |
+
2m3m
|
| 3715 |
+
5ivf
|
| 3716 |
+
1o3p
|
| 3717 |
+
4auj
|
| 3718 |
+
2lyb
|
| 3719 |
+
5icy
|
| 3720 |
+
5j3l
|
| 3721 |
+
2fgu
|
| 3722 |
+
3snd
|
| 3723 |
+
2wsx
|
| 3724 |
+
4qfl
|
| 3725 |
+
5iu6
|
| 3726 |
+
1siv
|
| 3727 |
+
4ht2
|
| 3728 |
+
2ltz
|
| 3729 |
+
1o39
|
| 3730 |
+
4p0w
|
| 3731 |
+
3p8p
|
| 3732 |
+
5ypy
|
| 3733 |
+
3zvt
|
| 3734 |
+
2c5y
|
| 3735 |
+
3erk
|
| 3736 |
+
3exh
|
| 3737 |
+
6g4y
|
| 3738 |
+
4ucd
|
| 3739 |
+
3e16
|
| 3740 |
+
4i7p
|
| 3741 |
+
5alv
|
| 3742 |
+
1osg
|
| 3743 |
+
6gw1
|
| 3744 |
+
2oqs
|
| 3745 |
+
5wxg
|
| 3746 |
+
3tvw
|
| 3747 |
+
5hkb
|
| 3748 |
+
2r3n
|
| 3749 |
+
2iog
|
| 3750 |
+
1h1r
|
| 3751 |
+
5eyc
|
| 3752 |
+
1ogx
|
| 3753 |
+
3er3
|
| 3754 |
+
5itp
|
| 3755 |
+
4nuc
|
| 3756 |
+
4w9j
|
| 3757 |
+
4mz4
|
| 3758 |
+
5khj
|
| 3759 |
+
3gsm
|
| 3760 |
+
3hzv
|
| 3761 |
+
6dpx
|
| 3762 |
+
4hvg
|
| 3763 |
+
1s17
|
| 3764 |
+
6arm
|
| 3765 |
+
5jq7
|
| 3766 |
+
2v88
|
| 3767 |
+
5lay
|
| 3768 |
+
5nn4
|
| 3769 |
+
4hj2
|
| 3770 |
+
1rst
|
| 3771 |
+
1pzp
|
| 3772 |
+
3qcq
|
| 3773 |
+
3rx8
|
| 3774 |
+
5ewz
|
| 3775 |
+
5mgx
|
| 3776 |
+
4c1t
|
| 3777 |
+
3cso
|
| 3778 |
+
4wq2
|
| 3779 |
+
4gxl
|
| 3780 |
+
5isz
|
| 3781 |
+
2xj2
|
| 3782 |
+
4y5h
|
| 3783 |
+
3zcw
|
| 3784 |
+
1l8g
|
| 3785 |
+
5vex
|
| 3786 |
+
4tky
|
| 3787 |
+
4elh
|
| 3788 |
+
1d4i
|
| 3789 |
+
2wuf
|
| 3790 |
+
6gw4
|
| 3791 |
+
5ksu
|
| 3792 |
+
4jg6
|
| 3793 |
+
4eqf
|
| 3794 |
+
3hbo
|
| 3795 |
+
5yjy
|
| 3796 |
+
2gv7
|
| 3797 |
+
4x6p
|
| 3798 |
+
4hdf
|
| 3799 |
+
1yi3
|
| 3800 |
+
5edr
|
| 3801 |
+
5nmf
|
| 3802 |
+
4w7p
|
| 3803 |
+
4mb9
|
| 3804 |
+
4fz6
|
| 3805 |
+
1g4j
|
| 3806 |
+
4a2a
|
| 3807 |
+
5kx8
|
| 3808 |
+
5a0b
|
| 3809 |
+
4qta
|
| 3810 |
+
4fn5
|
| 3811 |
+
1gj8
|
| 3812 |
+
1kdw
|
| 3813 |
+
3d94
|
| 3814 |
+
6euw
|
| 3815 |
+
6glb
|
| 3816 |
+
2xaq
|
| 3817 |
+
3vh9
|
| 3818 |
+
1iky
|
| 3819 |
+
5j4v
|
| 3820 |
+
2f1g
|
| 3821 |
+
2r5a
|
| 3822 |
+
1g1e
|
| 3823 |
+
1bzj
|
| 3824 |
+
4abg
|
| 3825 |
+
3cho
|
| 3826 |
+
5t6j
|
| 3827 |
+
4jda
|
| 3828 |
+
5ywy
|
| 3829 |
+
6cdo
|
| 3830 |
+
6fo9
|
| 3831 |
+
1g74
|
| 3832 |
+
3oxi
|
| 3833 |
+
5t8p
|
| 3834 |
+
1fyr
|
| 3835 |
+
5hwu
|
| 3836 |
+
5ai1
|
| 3837 |
+
4e9u
|
| 3838 |
+
4uyn
|
| 3839 |
+
5k05
|
| 3840 |
+
6fgl
|
| 3841 |
+
5iha
|
| 3842 |
+
4aa2
|
| 3843 |
+
1wzy
|
| 3844 |
+
2w85
|
| 3845 |
+
3s2v
|
| 3846 |
+
4yuy
|
| 3847 |
+
6dud
|
| 3848 |
+
3chq
|
| 3849 |
+
3h5s
|
| 3850 |
+
1xdg
|
| 3851 |
+
4hiq
|
| 3852 |
+
3bgs
|
| 3853 |
+
3rqf
|
| 3854 |
+
1jmg
|
| 3855 |
+
4g93
|
| 3856 |
+
6c7b
|
| 3857 |
+
3zso
|
| 3858 |
+
4xjt
|
| 3859 |
+
4rsc
|
| 3860 |
+
5iyv
|
| 3861 |
+
1qxy
|
| 3862 |
+
5n20
|
| 3863 |
+
5c7e
|
| 3864 |
+
4x9v
|
| 3865 |
+
5tjx
|
| 3866 |
+
3tg5
|
| 3867 |
+
1b52
|
| 3868 |
+
1w1v
|
| 3869 |
+
5lg3
|
| 3870 |
+
2bmg
|
| 3871 |
+
5wii
|
| 3872 |
+
5zmq
|
| 3873 |
+
5wew
|
| 3874 |
+
5iua
|
| 3875 |
+
2g00
|
| 3876 |
+
4wef
|
| 3877 |
+
1i7m
|
| 3878 |
+
4hrc
|
| 3879 |
+
6at0
|
| 3880 |
+
4uhg
|
| 3881 |
+
5hvp
|
| 3882 |
+
1ugy
|
| 3883 |
+
5ll4
|
| 3884 |
+
5v6y
|
| 3885 |
+
1ru1
|
| 3886 |
+
2o4l
|
| 3887 |
+
2rc9
|
| 3888 |
+
2fci
|
| 3889 |
+
6evq
|
| 3890 |
+
3rah
|
| 3891 |
+
3g2v
|
| 3892 |
+
5ny1
|
| 3893 |
+
1p10
|
| 3894 |
+
5vsd
|
| 3895 |
+
5vqi
|
| 3896 |
+
5qax
|
| 3897 |
+
4aa0
|
| 3898 |
+
3ddu
|
| 3899 |
+
1pzi
|
| 3900 |
+
3sax
|
| 3901 |
+
4nmr
|
| 3902 |
+
5d26
|
| 3903 |
+
1elr
|
| 3904 |
+
3gc7
|
| 3905 |
+
3c84
|
| 3906 |
+
2y1d
|
| 3907 |
+
2uzo
|
| 3908 |
+
3v8s
|
| 3909 |
+
5mqv
|
| 3910 |
+
4zly
|
| 3911 |
+
3qqa
|
| 3912 |
+
5dlv
|
| 3913 |
+
4g3g
|
| 3914 |
+
3h9f
|
| 3915 |
+
4wr8
|
| 3916 |
+
5win
|
| 3917 |
+
4hzm
|
| 3918 |
+
3rqe
|
| 3919 |
+
5mw6
|
| 3920 |
+
5mwj
|
| 3921 |
+
1xap
|
| 3922 |
+
4zv2
|
| 3923 |
+
1ol1
|
| 3924 |
+
1a69
|
| 3925 |
+
4btt
|
| 3926 |
+
5t4h
|
| 3927 |
+
2oi2
|
| 3928 |
+
6bbs
|
| 3929 |
+
6aaj
|
| 3930 |
+
5i80
|
| 3931 |
+
1hte
|
| 3932 |
+
6cfd
|
| 3933 |
+
5kbe
|
| 3934 |
+
3saz
|
| 3935 |
+
2wej
|
| 3936 |
+
3kqr
|
| 3937 |
+
2b07
|
| 3938 |
+
3iue
|
| 3939 |
+
3bxf
|
| 3940 |
+
1is0
|
| 3941 |
+
5ktu
|
| 3942 |
+
4agd
|
| 3943 |
+
3nf8
|
| 3944 |
+
2nd0
|
| 3945 |
+
6fhq
|
| 3946 |
+
3vbv
|
| 3947 |
+
3f3t
|
| 3948 |
+
1zhl
|
| 3949 |
+
1g9s
|
| 3950 |
+
5vb9
|
| 3951 |
+
3qtr
|
| 3952 |
+
2ay4
|
| 3953 |
+
4u71
|
| 3954 |
+
1q66
|
| 3955 |
+
3qin
|
| 3956 |
+
5mzg
|
| 3957 |
+
5h08
|
| 3958 |
+
3rv7
|
| 3959 |
+
3rv8
|
| 3960 |
+
6ckx
|
| 3961 |
+
2itk
|
| 3962 |
+
4ij1
|
| 3963 |
+
1pxo
|
| 3964 |
+
5nt0
|
| 3965 |
+
5qao
|
| 3966 |
+
3le9
|
| 3967 |
+
4a6s
|
| 3968 |
+
3bls
|
| 3969 |
+
1pus
|
| 3970 |
+
3asx
|
| 3971 |
+
4lng
|
| 3972 |
+
3mvj
|
| 3973 |
+
4q3q
|
| 3974 |
+
3s9t
|
| 3975 |
+
4gfd
|
| 3976 |
+
5wqc
|
| 3977 |
+
3ri1
|
| 3978 |
+
5ih8
|
| 3979 |
+
6g6w
|
| 3980 |
+
3cjf
|
| 3981 |
+
6duh
|
| 3982 |
+
4pda
|
| 3983 |
+
1bp0
|
| 3984 |
+
3cpb
|
| 3985 |
+
4lmn
|
| 3986 |
+
1zm6
|
| 3987 |
+
6fjm
|
| 3988 |
+
2kup
|
| 3989 |
+
3v3b
|
| 3990 |
+
3vru
|
| 3991 |
+
3arg
|
| 3992 |
+
2o64
|
| 3993 |
+
2igx
|
| 3994 |
+
6ayi
|
| 3995 |
+
4qab
|
| 3996 |
+
4xu2
|
| 3997 |
+
5wzu
|
| 3998 |
+
1w6y
|
| 3999 |
+
5eis
|
| 4000 |
+
4ejf
|
| 4001 |
+
5wqd
|
| 4002 |
+
4aw5
|
| 4003 |
+
5f0f
|
| 4004 |
+
3iqg
|
| 4005 |
+
4p90
|
| 4006 |
+
5jfr
|
| 4007 |
+
5eak
|
| 4008 |
+
4n1u
|
| 4009 |
+
5alj
|
| 4010 |
+
6eku
|
| 4011 |
+
1j5i
|
| 4012 |
+
5hln
|
| 4013 |
+
2y7w
|
| 4014 |
+
3be2
|
| 4015 |
+
5uc1
|
| 4016 |
+
3ax5
|
| 4017 |
+
4f3k
|
| 4018 |
+
5qb0
|
| 4019 |
+
4llj
|
| 4020 |
+
4eyr
|
| 4021 |
+
5v3x
|
| 4022 |
+
4fys
|
| 4023 |
+
4crl
|
| 4024 |
+
2zxd
|
| 4025 |
+
2etk
|
| 4026 |
+
3ddp
|
| 4027 |
+
4q0a
|
| 4028 |
+
3el0
|
| 4029 |
+
2ica
|
| 4030 |
+
4jin
|
| 4031 |
+
5enc
|
| 4032 |
+
1pb9
|
| 4033 |
+
5lpm
|
| 4034 |
+
5jur
|
| 4035 |
+
5n1z
|
| 4036 |
+
5ou2
|
| 4037 |
+
4wet
|
| 4038 |
+
4y8d
|
| 4039 |
+
4gby
|
| 4040 |
+
5acw
|
| 4041 |
+
4r76
|
| 4042 |
+
5hrv
|
| 4043 |
+
3ocb
|
| 4044 |
+
4qxj
|
| 4045 |
+
5er5
|
| 4046 |
+
3hu2
|
| 4047 |
+
1v2l
|
| 4048 |
+
3dz5
|
| 4049 |
+
4j8b
|
| 4050 |
+
5vqs
|
| 4051 |
+
5khd
|
| 4052 |
+
5fho
|
| 4053 |
+
2f71
|
| 4054 |
+
5gmm
|
| 4055 |
+
3sd5
|
| 4056 |
+
3lxl
|
| 4057 |
+
3q8h
|
| 4058 |
+
4h36
|
| 4059 |
+
4qmz
|
| 4060 |
+
4ks5
|
| 4061 |
+
4dew
|
| 4062 |
+
1nf8
|
| 4063 |
+
3s73
|
| 4064 |
+
4r3b
|
| 4065 |
+
2oag
|
| 4066 |
+
2w06
|
| 4067 |
+
5j74
|
| 4068 |
+
2a2g
|
| 4069 |
+
4ju7
|
| 4070 |
+
3i81
|
| 4071 |
+
1af2
|
| 4072 |
+
5hda
|
| 4073 |
+
5zla
|
| 4074 |
+
3nu3
|
| 4075 |
+
6das
|
| 4076 |
+
5njz
|
| 4077 |
+
1b74
|
| 4078 |
+
2x6x
|
| 4079 |
+
1sts
|
| 4080 |
+
2co0
|
| 4081 |
+
4mw9
|
| 4082 |
+
5od1
|
| 4083 |
+
3pab
|
| 4084 |
+
2gnh
|
| 4085 |
+
5u06
|
| 4086 |
+
3uig
|
| 4087 |
+
2uw5
|
| 4088 |
+
2vrx
|
| 4089 |
+
1ele
|
| 4090 |
+
4j3m
|
| 4091 |
+
3hp9
|
| 4092 |
+
4mh7
|
| 4093 |
+
2xkc
|
| 4094 |
+
2w3l
|
| 4095 |
+
5hip
|
| 4096 |
+
4aqh
|
| 4097 |
+
4xv2
|
| 4098 |
+
1fch
|
| 4099 |
+
2j4k
|
| 4100 |
+
1o33
|
| 4101 |
+
2v57
|
| 4102 |
+
4a4w
|
| 4103 |
+
5bmm
|
| 4104 |
+
3dcw
|
| 4105 |
+
4i9o
|
| 4106 |
+
4ydn
|
| 4107 |
+
3lbk
|
| 4108 |
+
1gvu
|
| 4109 |
+
5j1r
|
| 4110 |
+
4no1
|
| 4111 |
+
5duf
|
| 4112 |
+
4wnp
|
| 4113 |
+
4djp
|
| 4114 |
+
4yzn
|
| 4115 |
+
4p75
|
| 4116 |
+
1b1h
|
| 4117 |
+
5hwv
|
| 4118 |
+
3rlb
|
| 4119 |
+
1c5f
|
| 4120 |
+
4fqo
|
| 4121 |
+
6b5m
|
| 4122 |
+
1iau
|
| 4123 |
+
4mcb
|
| 4124 |
+
5y93
|
| 4125 |
+
5qa6
|
| 4126 |
+
3wk5
|
| 4127 |
+
6fba
|
| 4128 |
+
2q8z
|
| 4129 |
+
4odl
|
| 4130 |
+
5mo0
|
| 4131 |
+
3wvm
|
| 4132 |
+
4qh8
|
| 4133 |
+
2w66
|
| 4134 |
+
2joa
|
| 4135 |
+
5fqt
|
| 4136 |
+
2viw
|
| 4137 |
+
4o76
|
| 4138 |
+
3cm2
|
| 4139 |
+
3ion
|
| 4140 |
+
1ero
|
| 4141 |
+
4y2u
|
| 4142 |
+
2rip
|
| 4143 |
+
6eiz
|
| 4144 |
+
3ieo
|
| 4145 |
+
2pk5
|
| 4146 |
+
2vgp
|
| 4147 |
+
2f6t
|
| 4148 |
+
1zfq
|
| 4149 |
+
5ypo
|
| 4150 |
+
2nnv
|
| 4151 |
+
4jvj
|
| 4152 |
+
2cej
|
| 4153 |
+
3hqw
|
| 4154 |
+
2boj
|
| 4155 |
+
3bzi
|
| 4156 |
+
5ut6
|
| 4157 |
+
2toh
|
| 4158 |
+
5urm
|
| 4159 |
+
5f3t
|
| 4160 |
+
5ml8
|
| 4161 |
+
2qpu
|
| 4162 |
+
6f5u
|
| 4163 |
+
3t3y
|
| 4164 |
+
2zx5
|
| 4165 |
+
5alu
|
| 4166 |
+
1z4n
|
| 4167 |
+
4w53
|
| 4168 |
+
5cf8
|
| 4169 |
+
4hcz
|
| 4170 |
+
1jik
|
| 4171 |
+
5ul6
|
| 4172 |
+
5l2n
|
| 4173 |
+
4k3q
|
| 4174 |
+
5vio
|
| 4175 |
+
2piz
|
| 4176 |
+
5ta4
|
| 4177 |
+
3jyj
|
| 4178 |
+
5ly2
|
| 4179 |
+
6do4
|
| 4180 |
+
1v2s
|
| 4181 |
+
4ezo
|
| 4182 |
+
3hl8
|
| 4183 |
+
4q1c
|
| 4184 |
+
3sk2
|
| 4185 |
+
6cdj
|
| 4186 |
+
4w4w
|
| 4187 |
+
5lca
|
| 4188 |
+
3rxf
|
| 4189 |
+
4q7w
|
| 4190 |
+
5ceq
|
| 4191 |
+
5aen
|
| 4192 |
+
3kjf
|
| 4193 |
+
2psj
|
| 4194 |
+
4avt
|
| 4195 |
+
5nxo
|
| 4196 |
+
2eg7
|
| 4197 |
+
3qn7
|
| 4198 |
+
1f73
|
| 4199 |
+
6eyz
|
| 4200 |
+
5ory
|
| 4201 |
+
2asu
|
| 4202 |
+
3rxg
|
| 4203 |
+
6ezq
|
| 4204 |
+
2chz
|
| 4205 |
+
3nzc
|
| 4206 |
+
1ilh
|
| 4207 |
+
1o4j
|
| 4208 |
+
6evp
|
| 4209 |
+
2wks
|
| 4210 |
+
2yc0
|
| 4211 |
+
4xy8
|
| 4212 |
+
2jg0
|
| 4213 |
+
1u33
|
| 4214 |
+
5iez
|
| 4215 |
+
2zxb
|
| 4216 |
+
2qnz
|
| 4217 |
+
3ob0
|
| 4218 |
+
1g36
|
| 4219 |
+
4at3
|
| 4220 |
+
5l3e
|
| 4221 |
+
3pd3
|
| 4222 |
+
5jjr
|
| 4223 |
+
4y8x
|
| 4224 |
+
3l4t
|
| 4225 |
+
3s2a
|
| 4226 |
+
4o3f
|
| 4227 |
+
5c28
|
| 4228 |
+
2zns
|
| 4229 |
+
4pp9
|
| 4230 |
+
4zvi
|
| 4231 |
+
5ul1
|
| 4232 |
+
3fwv
|
| 4233 |
+
2vvv
|
| 4234 |
+
2aof
|
| 4235 |
+
5vfd
|
| 4236 |
+
2qcf
|
| 4237 |
+
3bl0
|
| 4238 |
+
3gk1
|
| 4239 |
+
5i2i
|
| 4240 |
+
5w8v
|
| 4241 |
+
4bgk
|
| 4242 |
+
3piy
|
| 4243 |
+
4ptc
|
| 4244 |
+
4ddy
|
| 4245 |
+
2vwo
|
| 4246 |
+
6bt0
|
| 4247 |
+
5l99
|
| 4248 |
+
6f28
|
| 4249 |
+
1zh7
|
| 4250 |
+
5uiu
|
| 4251 |
+
4eh7
|
| 4252 |
+
5epp
|
| 4253 |
+
3iaf
|
| 4254 |
+
1o6h
|
| 4255 |
+
1b05
|
| 4256 |
+
5t5g
|
| 4257 |
+
5hva
|
| 4258 |
+
6b1k
|
| 4259 |
+
4k75
|
| 4260 |
+
3ips
|
| 4261 |
+
5obg
|
| 4262 |
+
2q89
|
| 4263 |
+
5a3o
|
| 4264 |
+
5owc
|
| 4265 |
+
5ggo
|
| 4266 |
+
4xk9
|
| 4267 |
+
3cf9
|
| 4268 |
+
5vrp
|
| 4269 |
+
3a1e
|
| 4270 |
+
1xlz
|
| 4271 |
+
5zz4
|
| 4272 |
+
5k0b
|
| 4273 |
+
2nta
|
| 4274 |
+
5fa7
|
| 4275 |
+
5fou
|
| 4276 |
+
4gvc
|
| 4277 |
+
5i4o
|
| 4278 |
+
4u7o
|
| 4279 |
+
1i5d
|
| 4280 |
+
2pmk
|
| 4281 |
+
3s1g
|
| 4282 |
+
6mvu
|
| 4283 |
+
6dvl
|
| 4284 |
+
5fe9
|
| 4285 |
+
5ur5
|
| 4286 |
+
3sbh
|
| 4287 |
+
2jiw
|
| 4288 |
+
2r43
|
| 4289 |
+
4ibb
|
| 4290 |
+
2v13
|
| 4291 |
+
4io8
|
| 4292 |
+
2vmc
|
| 4293 |
+
5ggn
|
| 4294 |
+
2afx
|
| 4295 |
+
1g3c
|
| 4296 |
+
4xty
|
| 4297 |
+
1xgi
|
| 4298 |
+
3mmf
|
| 4299 |
+
5v1d
|
| 4300 |
+
2uxz
|
| 4301 |
+
4wwo
|
| 4302 |
+
4kpz
|
| 4303 |
+
4xbb
|
| 4304 |
+
5up0
|
| 4305 |
+
2gzl
|
| 4306 |
+
4r5x
|
| 4307 |
+
3iet
|
| 4308 |
+
1oq5
|
| 4309 |
+
6afd
|
| 4310 |
+
1b55
|
| 4311 |
+
1xp0
|
| 4312 |
+
2q5k
|
| 4313 |
+
5ti7
|
| 4314 |
+
5uqx
|
| 4315 |
+
5w94
|
| 4316 |
+
1k1o
|
| 4317 |
+
4lm3
|
| 4318 |
+
1ek1
|
| 4319 |
+
2xxn
|
| 4320 |
+
5lsc
|
| 4321 |
+
4ui7
|
| 4322 |
+
6beh
|
| 4323 |
+
3ud7
|
| 4324 |
+
4zxx
|
| 4325 |
+
6gzh
|
| 4326 |
+
1uu9
|
| 4327 |
+
5jgq
|
| 4328 |
+
5l2y
|
| 4329 |
+
3v3l
|
| 4330 |
+
2gmv
|
| 4331 |
+
5bvo
|
| 4332 |
+
3snb
|
| 4333 |
+
5upf
|
| 4334 |
+
5tq3
|
| 4335 |
+
3gw5
|
| 4336 |
+
4wwp
|
| 4337 |
+
5wbo
|
| 4338 |
+
3ft2
|
| 4339 |
+
3rmf
|
| 4340 |
+
1fhd
|
| 4341 |
+
4flk
|
| 4342 |
+
2r0y
|
| 4343 |
+
2wbb
|
| 4344 |
+
3da6
|
| 4345 |
+
5ech
|
| 4346 |
+
3l4z
|
| 4347 |
+
1v2w
|
| 4348 |
+
4jjq
|
| 4349 |
+
2azb
|
| 4350 |
+
2oxn
|
| 4351 |
+
3f8w
|
| 4352 |
+
3moh
|
| 4353 |
+
5m3a
|
| 4354 |
+
3tz0
|
| 4355 |
+
3zyu
|
| 4356 |
+
2pzy
|
| 4357 |
+
5usy
|
| 4358 |
+
4mx0
|
| 4359 |
+
4cpq
|
| 4360 |
+
4aq6
|
| 4361 |
+
3qbn
|
| 4362 |
+
5gmv
|
| 4363 |
+
6rsa
|
| 4364 |
+
3u5j
|
| 4365 |
+
5h1t
|
| 4366 |
+
1ctr
|
| 4367 |
+
5lub
|
| 4368 |
+
4h5d
|
| 4369 |
+
5iz6
|
| 4370 |
+
5dhr
|
| 4371 |
+
4l2g
|
| 4372 |
+
4n99
|
| 4373 |
+
4r17
|
| 4374 |
+
4cc2
|
| 4375 |
+
4hzw
|
| 4376 |
+
5lbq
|
| 4377 |
+
3ati
|
| 4378 |
+
3pd2
|
| 4379 |
+
1f74
|
| 4380 |
+
1qyg
|
| 4381 |
+
5dhg
|
| 4382 |
+
4mwx
|
| 4383 |
+
5ohy
|
| 4384 |
+
4ob2
|
| 4385 |
+
3c0z
|
| 4386 |
+
2gtv
|
| 4387 |
+
4r75
|
| 4388 |
+
5w2s
|
| 4389 |
+
3wsy
|
| 4390 |
+
1rs2
|
| 4391 |
+
2r6w
|
| 4392 |
+
4gk4
|
| 4393 |
+
4qq5
|
| 4394 |
+
1n7t
|
| 4395 |
+
1ocn
|
| 4396 |
+
1gnj
|
| 4397 |
+
4u79
|
| 4398 |
+
3hmm
|
| 4399 |
+
2vl1
|
| 4400 |
+
1ec1
|
| 4401 |
+
3wk6
|
| 4402 |
+
5f4u
|
| 4403 |
+
4u5j
|
| 4404 |
+
4jfk
|
| 4405 |
+
4btw
|
| 4406 |
+
1vkj
|
| 4407 |
+
4jnc
|
| 4408 |
+
3fyk
|
| 4409 |
+
6f3i
|
| 4410 |
+
1w3l
|
| 4411 |
+
3gv9
|
| 4412 |
+
2ylc
|
| 4413 |
+
2fgi
|
| 4414 |
+
5sz9
|
| 4415 |
+
5gjg
|
| 4416 |
+
2q1q
|
| 4417 |
+
5yia
|
| 4418 |
+
2rny
|
| 4419 |
+
1o30
|
| 4420 |
+
3m1k
|
| 4421 |
+
6dj5
|
| 4422 |
+
1pfu
|
| 4423 |
+
4k42
|
| 4424 |
+
2wcg
|
| 4425 |
+
3oj8
|
| 4426 |
+
5g1n
|
| 4427 |
+
4lq9
|
| 4428 |
+
1jzs
|
| 4429 |
+
6g9h
|
| 4430 |
+
3axm
|
| 4431 |
+
4qp7
|
| 4432 |
+
4mma
|
| 4433 |
+
3zm9
|
| 4434 |
+
5wi0
|
| 4435 |
+
5iep
|
| 4436 |
+
2opy
|
| 4437 |
+
5c4l
|
| 4438 |
+
6eq1
|
| 4439 |
+
4y2j
|
| 4440 |
+
4x1s
|
| 4441 |
+
5mfs
|
| 4442 |
+
1bux
|
| 4443 |
+
4cjn
|
| 4444 |
+
5d2a
|
| 4445 |
+
6cms
|
| 4446 |
+
3g72
|
| 4447 |
+
4wz4
|
| 4448 |
+
2las
|
| 4449 |
+
4his
|
| 4450 |
+
5ohi
|
| 4451 |
+
2v86
|
| 4452 |
+
3wdc
|
| 4453 |
+
3d9m
|
| 4454 |
+
1fzj
|
| 4455 |
+
2ltw
|
| 4456 |
+
4kzl
|
| 4457 |
+
5ixf
|
| 4458 |
+
1fd7
|
| 4459 |
+
5f25
|
| 4460 |
+
5js3
|
| 4461 |
+
4ceb
|
| 4462 |
+
3dst
|
| 4463 |
+
4x7n
|
| 4464 |
+
4cr5
|
| 4465 |
+
4z2b
|
| 4466 |
+
3iph
|
| 4467 |
+
5t37
|
| 4468 |
+
3zqe
|
| 4469 |
+
6m9t
|
| 4470 |
+
3m54
|
| 4471 |
+
2oxx
|
| 4472 |
+
4mlx
|
| 4473 |
+
3uxg
|
| 4474 |
+
3qri
|
| 4475 |
+
4l0v
|
| 4476 |
+
1a07
|
| 4477 |
+
5qay
|
| 4478 |
+
3r0y
|
| 4479 |
+
1wbs
|
| 4480 |
+
6bgz
|
| 4481 |
+
1ecv
|
| 4482 |
+
4c94
|
| 4483 |
+
1ha2
|
| 4484 |
+
2kfx
|
| 4485 |
+
2be2
|
| 4486 |
+
6cjw
|
| 4487 |
+
2p94
|
| 4488 |
+
5tq7
|
| 4489 |
+
5fsc
|
| 4490 |
+
4btb
|
| 4491 |
+
5yov
|
| 4492 |
+
6eox
|
| 4493 |
+
5am2
|
| 4494 |
+
2pvm
|
| 4495 |
+
2ql7
|
| 4496 |
+
5j58
|
| 4497 |
+
1msm
|
| 4498 |
+
2bjm
|
| 4499 |
+
3rt8
|
| 4500 |
+
3ds3
|
| 4501 |
+
1m13
|
| 4502 |
+
4nxo
|
| 4503 |
+
2cfg
|
| 4504 |
+
3fqe
|
| 4505 |
+
3so9
|
| 4506 |
+
2m0v
|
| 4507 |
+
4r5b
|
| 4508 |
+
1v2j
|
| 4509 |
+
4bks
|
| 4510 |
+
4tkj
|
| 4511 |
+
1jld
|
| 4512 |
+
2ojf
|
| 4513 |
+
1z5m
|
| 4514 |
+
3m1s
|
| 4515 |
+
4lpf
|
| 4516 |
+
4zls
|
| 4517 |
+
4af3
|
| 4518 |
+
5eue
|
| 4519 |
+
2kvm
|
| 4520 |
+
5etl
|
| 4521 |
+
5hn0
|
| 4522 |
+
3f6g
|
| 4523 |
+
2jj3
|
| 4524 |
+
3sfh
|
| 4525 |
+
1ong
|
| 4526 |
+
3vb6
|
| 4527 |
+
3cen
|
| 4528 |
+
4rvr
|
| 4529 |
+
2wqp
|
| 4530 |
+
6fnt
|
| 4531 |
+
3ejt
|
| 4532 |
+
5c2h
|
| 4533 |
+
2hob
|
| 4534 |
+
3ml2
|
| 4535 |
+
1ld7
|
| 4536 |
+
1yz3
|
| 4537 |
+
3m2u
|
| 4538 |
+
2wcx
|
| 4539 |
+
1ec2
|
| 4540 |
+
3ccn
|
| 4541 |
+
3r1v
|
| 4542 |
+
3cn0
|
| 4543 |
+
4ks1
|
| 4544 |
+
5ah2
|
| 4545 |
+
3rtf
|
| 4546 |
+
2pvk
|
| 4547 |
+
3ppr
|
| 4548 |
+
4x9w
|
| 4549 |
+
5syn
|
| 4550 |
+
3g6g
|
| 4551 |
+
4hdb
|
| 4552 |
+
4bti
|
| 4553 |
+
1eef
|
| 4554 |
+
6bln
|
| 4555 |
+
4wyy
|
| 4556 |
+
4bw3
|
| 4557 |
+
3mks
|
| 4558 |
+
3iit
|
| 4559 |
+
2axi
|
| 4560 |
+
3aho
|
| 4561 |
+
3q2a
|
| 4562 |
+
2w12
|
| 4563 |
+
6e06
|
| 4564 |
+
5dy7
|
| 4565 |
+
1gi1
|
| 4566 |
+
3az8
|
| 4567 |
+
4ew2
|
| 4568 |
+
3tgs
|
| 4569 |
+
3zc5
|
| 4570 |
+
2ggx
|
| 4571 |
+
4ih3
|
| 4572 |
+
5u00
|
| 4573 |
+
4qkx
|
| 4574 |
+
4oef
|
| 4575 |
+
6gjn
|
| 4576 |
+
2xii
|
| 4577 |
+
5al2
|
| 4578 |
+
1o4p
|
| 4579 |
+
2xck
|
| 4580 |
+
3dcs
|
| 4581 |
+
2g0h
|
| 4582 |
+
5adr
|
| 4583 |
+
1kv1
|
| 4584 |
+
6h33
|
| 4585 |
+
2wl5
|
| 4586 |
+
6h3k
|
| 4587 |
+
3rwj
|
| 4588 |
+
5x4q
|
| 4589 |
+
2xk3
|
| 4590 |
+
3p4f
|
| 4591 |
+
5prc
|
| 4592 |
+
4gxs
|
| 4593 |
+
1qm5
|
| 4594 |
+
4aw8
|
| 4595 |
+
2eep
|
| 4596 |
+
5l15
|
| 4597 |
+
3tk2
|
| 4598 |
+
5heb
|
| 4599 |
+
5aia
|
| 4600 |
+
4nh9
|
| 4601 |
+
5nki
|
| 4602 |
+
3eos
|
| 4603 |
+
4m8y
|
| 4604 |
+
5u49
|
| 4605 |
+
5yh8
|
| 4606 |
+
3nxq
|
| 4607 |
+
2x3t
|
| 4608 |
+
5exl
|
| 4609 |
+
4x0z
|
| 4610 |
+
4rj6
|
| 4611 |
+
5ly1
|
| 4612 |
+
3zo4
|
| 4613 |
+
2nw4
|
| 4614 |
+
4n3r
|
| 4615 |
+
5ek0
|
| 4616 |
+
3otx
|
| 4617 |
+
4r4c
|
| 4618 |
+
1y3n
|
| 4619 |
+
5cal
|
| 4620 |
+
4dld
|
| 4621 |
+
5z89
|
| 4622 |
+
5sys
|
| 4623 |
+
1ybo
|
| 4624 |
+
2cmo
|
| 4625 |
+
1k3q
|
| 4626 |
+
2ima
|
| 4627 |
+
3caj
|
| 4628 |
+
1xug
|
| 4629 |
+
4l2k
|
| 4630 |
+
3gs6
|
| 4631 |
+
5sz4
|
| 4632 |
+
5alg
|
| 4633 |
+
4b4n
|
| 4634 |
+
4hxm
|
| 4635 |
+
2ov4
|
| 4636 |
+
3n9s
|
| 4637 |
+
4n07
|
| 4638 |
+
3zxr
|
| 4639 |
+
3v7x
|
| 4640 |
+
1aid
|
| 4641 |
+
4u8w
|
| 4642 |
+
2zis
|
| 4643 |
+
6e2o
|
| 4644 |
+
5yqn
|
| 4645 |
+
3oc0
|
| 4646 |
+
4qw7
|
| 4647 |
+
1c1r
|
| 4648 |
+
1zsf
|
| 4649 |
+
3cs7
|
| 4650 |
+
2x24
|
| 4651 |
+
5d7c
|
| 4652 |
+
1bji
|
| 4653 |
+
4yl3
|
| 4654 |
+
4k78
|
| 4655 |
+
2f9u
|
| 4656 |
+
3rl7
|
| 4657 |
+
5etu
|
| 4658 |
+
5drs
|
| 4659 |
+
6es0
|
| 4660 |
+
2g8n
|
| 4661 |
+
5ol3
|
| 4662 |
+
2i6a
|
| 4663 |
+
5gvk
|
| 4664 |
+
3sx4
|
| 4665 |
+
4q4r
|
| 4666 |
+
5nq5
|
| 4667 |
+
3ctt
|
| 4668 |
+
5kxc
|
| 4669 |
+
1tyr
|
| 4670 |
+
2uzn
|
| 4671 |
+
4oon
|
| 4672 |
+
3prs
|
| 4673 |
+
1f8c
|
| 4674 |
+
4jvb
|
| 4675 |
+
3eig
|
| 4676 |
+
1dbk
|
| 4677 |
+
4p74
|
| 4678 |
+
4zg9
|
| 4679 |
+
3wtn
|
| 4680 |
+
4cdr
|
| 4681 |
+
4fli
|
| 4682 |
+
2bts
|
| 4683 |
+
1u9q
|
| 4684 |
+
3n9r
|
| 4685 |
+
5ea6
|
| 4686 |
+
6db3
|
| 4687 |
+
4b6o
|
| 4688 |
+
3nnw
|
| 4689 |
+
3p5k
|
| 4690 |
+
2c4f
|
| 4691 |
+
2xuc
|
| 4692 |
+
4hvd
|
| 4693 |
+
3ijz
|
| 4694 |
+
5n4t
|
| 4695 |
+
4jr5
|
| 4696 |
+
6e9w
|
| 4697 |
+
5f63
|
| 4698 |
+
4cjq
|
| 4699 |
+
1b46
|
| 4700 |
+
4j4n
|
| 4701 |
+
4kcx
|
| 4702 |
+
4k4f
|
| 4703 |
+
6bky
|
| 4704 |
+
2bak
|
| 4705 |
+
5csd
|
| 4706 |
+
4mgc
|
| 4707 |
+
4mwv
|
| 4708 |
+
1mzs
|
| 4709 |
+
4ym2
|
| 4710 |
+
5ldo
|
| 4711 |
+
4ztm
|
| 4712 |
+
5epn
|
| 4713 |
+
3vb7
|
| 4714 |
+
5v7w
|
| 4715 |
+
3n49
|
| 4716 |
+
2zlg
|
| 4717 |
+
2kbr
|
| 4718 |
+
5fng
|
| 4719 |
+
2fgv
|
| 4720 |
+
1f4e
|
| 4721 |
+
2gd8
|
| 4722 |
+
3g6h
|
| 4723 |
+
5u0f
|
| 4724 |
+
4j44
|
| 4725 |
+
5v35
|
| 4726 |
+
1c9d
|
| 4727 |
+
6g6y
|
| 4728 |
+
4azi
|
| 4729 |
+
5kpk
|
| 4730 |
+
4ui8
|
| 4731 |
+
1q6j
|
| 4732 |
+
4p3h
|
| 4733 |
+
3gcs
|
| 4734 |
+
5ngu
|
| 4735 |
+
1sqo
|
| 4736 |
+
6bl1
|
| 4737 |
+
1o2j
|
| 4738 |
+
3u2k
|
| 4739 |
+
6mil
|
| 4740 |
+
1n7i
|
| 4741 |
+
3owl
|
| 4742 |
+
3g34
|
| 4743 |
+
1ec9
|
| 4744 |
+
3drg
|
| 4745 |
+
5j9y
|
| 4746 |
+
4crf
|
| 4747 |
+
3hha
|
| 4748 |
+
3qiy
|
| 4749 |
+
4pid
|
| 4750 |
+
2y55
|
| 4751 |
+
5hhx
|
| 4752 |
+
5etf
|
| 4753 |
+
4xar
|
| 4754 |
+
2pvu
|
| 4755 |
+
6b96
|
| 4756 |
+
5ito
|
| 4757 |
+
1li3
|
| 4758 |
+
3dya
|
| 4759 |
+
3ti3
|
| 4760 |
+
1tze
|
| 4761 |
+
6fmf
|
| 4762 |
+
1di9
|
| 4763 |
+
5f4p
|
| 4764 |
+
5sz7
|
| 4765 |
+
1m2p
|
| 4766 |
+
4yz9
|
| 4767 |
+
1uu8
|
| 4768 |
+
4y64
|
| 4769 |
+
3rt6
|
| 4770 |
+
3vs3
|
| 4771 |
+
4bdg
|
| 4772 |
+
5jv0
|
| 4773 |
+
4pge
|
| 4774 |
+
5cxz
|
| 4775 |
+
5ak4
|
| 4776 |
+
2j27
|
| 4777 |
+
4lte
|
| 4778 |
+
2ayr
|
| 4779 |
+
5m6f
|
| 4780 |
+
5dv4
|
| 4781 |
+
6ciy
|
| 4782 |
+
3ekq
|
| 4783 |
+
3c7q
|
| 4784 |
+
2osc
|
| 4785 |
+
2ph9
|
| 4786 |
+
4xqb
|
| 4787 |
+
5jr6
|
| 4788 |
+
2uwl
|
| 4789 |
+
1y3y
|
| 4790 |
+
1i8i
|
| 4791 |
+
2i7c
|
| 4792 |
+
4m1j
|
| 4793 |
+
6rnt
|
| 4794 |
+
3g5y
|
| 4795 |
+
3hwx
|
| 4796 |
+
1mik
|
| 4797 |
+
1sm3
|
| 4798 |
+
3py1
|
| 4799 |
+
5fah
|
| 4800 |
+
4pnn
|
| 4801 |
+
3tza
|
| 4802 |
+
3zmp
|
| 4803 |
+
2wxl
|
| 4804 |
+
5qaz
|
| 4805 |
+
4odf
|
| 4806 |
+
1eix
|
| 4807 |
+
5b2d
|
| 4808 |
+
5kdr
|
| 4809 |
+
3hcm
|
| 4810 |
+
2x91
|
| 4811 |
+
3frg
|
| 4812 |
+
6gch
|
| 4813 |
+
4qz2
|
| 4814 |
+
1hvh
|
| 4815 |
+
1g3m
|
| 4816 |
+
1tnl
|
| 4817 |
+
1vcj
|
| 4818 |
+
3s7f
|
| 4819 |
+
6cpa
|
| 4820 |
+
6g0w
|
| 4821 |
+
6ffh
|
| 4822 |
+
1nj5
|
| 4823 |
+
5epr
|
| 4824 |
+
6afr
|
| 4825 |
+
5kup
|
| 4826 |
+
4o97
|
| 4827 |
+
2i5f
|
| 4828 |
+
2f89
|
| 4829 |
+
5fsn
|
| 4830 |
+
3nex
|
| 4831 |
+
6b2c
|
| 4832 |
+
4zt5
|
| 4833 |
+
3sud
|
| 4834 |
+
5btx
|
| 4835 |
+
3fxb
|
| 4836 |
+
2x4z
|
| 4837 |
+
3l4x
|
| 4838 |
+
5svx
|
| 4839 |
+
5u98
|
| 4840 |
+
3lpr
|
| 4841 |
+
4pnk
|
| 4842 |
+
4ogj
|
| 4843 |
+
5k0s
|
| 4844 |
+
4oba
|
| 4845 |
+
6h7m
|
| 4846 |
+
3aas
|
| 4847 |
+
4zae
|
| 4848 |
+
4n6h
|
| 4849 |
+
4dsy
|
| 4850 |
+
4w9o
|
| 4851 |
+
2qbp
|
| 4852 |
+
5d45
|
| 4853 |
+
5lhu
|
| 4854 |
+
2bro
|
| 4855 |
+
1ugx
|
| 4856 |
+
4tx6
|
| 4857 |
+
6avi
|
| 4858 |
+
2aox
|
| 4859 |
+
4he9
|
| 4860 |
+
5n24
|
| 4861 |
+
2oz5
|
| 4862 |
+
1h8l
|
| 4863 |
+
3dz6
|
| 4864 |
+
3pwd
|
| 4865 |
+
1it6
|
| 4866 |
+
4rvk
|
| 4867 |
+
6b27
|
| 4868 |
+
2qu5
|
| 4869 |
+
4x7j
|
| 4870 |
+
5tyo
|
| 4871 |
+
2ym3
|
| 4872 |
+
5mjn
|
| 4873 |
+
3h26
|
| 4874 |
+
5ufs
|
| 4875 |
+
2wj1
|
| 4876 |
+
1rt1
|
| 4877 |
+
1kz8
|
| 4878 |
+
2zbk
|
| 4879 |
+
4krs
|
| 4880 |
+
5j87
|
| 4881 |
+
3u3u
|
| 4882 |
+
3g1d
|
| 4883 |
+
5bms
|
| 4884 |
+
2z7g
|
| 4885 |
+
4xv1
|
| 4886 |
+
4j8t
|
| 4887 |
+
5d11
|
| 4888 |
+
5c11
|
| 4889 |
+
3f17
|
| 4890 |
+
4alw
|
| 4891 |
+
3q3b
|
| 4892 |
+
3rwd
|
| 4893 |
+
1c70
|
| 4894 |
+
5kv9
|
| 4895 |
+
4wey
|
| 4896 |
+
4ok3
|
| 4897 |
+
2ctc
|
| 4898 |
+
4bcb
|
| 4899 |
+
5v49
|
| 4900 |
+
6e8m
|
| 4901 |
+
3shy
|
| 4902 |
+
4mnw
|
| 4903 |
+
5ut0
|
| 4904 |
+
3lcd
|
| 4905 |
+
5yc3
|
| 4906 |
+
2vwz
|
| 4907 |
+
2xbx
|
| 4908 |
+
1o0o
|
| 4909 |
+
2vj1
|
| 4910 |
+
6b33
|
| 4911 |
+
4hnp
|
| 4912 |
+
4bh3
|
| 4913 |
+
1ahy
|
| 4914 |
+
4rrr
|
| 4915 |
+
4rqv
|
| 4916 |
+
1hih
|
| 4917 |
+
5ovr
|
| 4918 |
+
4ach
|
| 4919 |
+
6c0u
|
| 4920 |
+
4k18
|
| 4921 |
+
1e72
|
| 4922 |
+
4ocq
|
| 4923 |
+
1b11
|
| 4924 |
+
3gds
|
| 4925 |
+
5ivs
|
| 4926 |
+
3bux
|
| 4927 |
+
3qc4
|
| 4928 |
+
5ovc
|
| 4929 |
+
3wzq
|
| 4930 |
+
1pu7
|
| 4931 |
+
1y3w
|
| 4932 |
+
6e7j
|
| 4933 |
+
1uu3
|
| 4934 |
+
5dh3
|
| 4935 |
+
3tu1
|
| 4936 |
+
4r5a
|
| 4937 |
+
4q3s
|
| 4938 |
+
4z16
|
| 4939 |
+
3uyr
|
| 4940 |
+
5wir
|
| 4941 |
+
5kby
|
| 4942 |
+
1bq4
|
| 4943 |
+
4iur
|
| 4944 |
+
4zv1
|
| 4945 |
+
5u8c
|
| 4946 |
+
4gtp
|
| 4947 |
+
6bj3
|
| 4948 |
+
2o8h
|
| 4949 |
+
3wqw
|
| 4950 |
+
5th7
|
| 4951 |
+
5tv3
|
| 4952 |
+
1tfz
|
| 4953 |
+
5ntw
|
| 4954 |
+
3cyy
|
| 4955 |
+
4q6f
|
| 4956 |
+
5w7x
|
| 4957 |
+
5tyl
|
| 4958 |
+
2v00
|
| 4959 |
+
4nks
|
| 4960 |
+
1ets
|
| 4961 |
+
2fq6
|
| 4962 |
+
4r59
|
| 4963 |
+
3bkl
|
| 4964 |
+
2boh
|
| 4965 |
+
2r5d
|
| 4966 |
+
1ikt
|
| 4967 |
+
5alh
|
| 4968 |
+
5szc
|
| 4969 |
+
1cr6
|
| 4970 |
+
4zzy
|
| 4971 |
+
1phw
|
| 4972 |
+
3l9n
|
| 4973 |
+
3eml
|
| 4974 |
+
3nij
|
| 4975 |
+
5amg
|
| 4976 |
+
3bv2
|
| 4977 |
+
2w73
|
| 4978 |
+
1h35
|
| 4979 |
+
3tne
|
| 4980 |
+
3jyr
|
| 4981 |
+
5l9g
|
| 4982 |
+
2xsb
|
| 4983 |
+
4f70
|
| 4984 |
+
2iuz
|
| 4985 |
+
1ml1
|
| 4986 |
+
3ewj
|
| 4987 |
+
4mk9
|
| 4988 |
+
2jiu
|
| 4989 |
+
4es0
|
| 4990 |
+
6dqa
|
| 4991 |
+
3kab
|
| 4992 |
+
5mxk
|
| 4993 |
+
2i0y
|
| 4994 |
+
4zy2
|
| 4995 |
+
3d4f
|
| 4996 |
+
6cqt
|
| 4997 |
+
2i47
|
| 4998 |
+
2i0d
|
| 4999 |
+
5oui
|
| 5000 |
+
2i80
|
| 5001 |
+
1nfw
|
| 5002 |
+
5wmg
|
| 5003 |
+
5uzk
|
| 5004 |
+
1y6a
|
| 5005 |
+
1e2k
|
| 5006 |
+
6ma4
|
| 5007 |
+
1nc6
|
| 5008 |
+
4zyv
|
| 5009 |
+
1wcc
|
| 5010 |
+
1kzk
|
| 5011 |
+
1nms
|
| 5012 |
+
1fwu
|
| 5013 |
+
5djr
|
| 5014 |
+
3a1c
|
| 5015 |
+
1hdq
|
| 5016 |
+
5ml6
|
| 5017 |
+
5l4h
|
| 5018 |
+
4rj3
|
| 5019 |
+
2x4t
|
| 5020 |
+
5nk9
|
| 5021 |
+
2xj1
|
| 5022 |
+
3mfw
|
| 5023 |
+
3gy3
|
| 5024 |
+
5nvv
|
| 5025 |
+
4rh5
|
| 5026 |
+
4x5q
|
| 5027 |
+
4jxv
|
| 5028 |
+
5ivv
|
| 5029 |
+
4axa
|
| 5030 |
+
5cbr
|
| 5031 |
+
5lli
|
| 5032 |
+
3q5h
|
| 5033 |
+
4wmu
|
| 5034 |
+
4lkf
|
| 5035 |
+
1rjk
|
| 5036 |
+
6g93
|
| 5037 |
+
4nni
|
| 5038 |
+
6exs
|
| 5039 |
+
6erv
|
| 5040 |
+
6msy
|
| 5041 |
+
1z3c
|
| 5042 |
+
3r5n
|
| 5043 |
+
1lol
|
| 5044 |
+
3qk5
|
| 5045 |
+
1rhm
|
| 5046 |
+
5avi
|
| 5047 |
+
4jfe
|
| 5048 |
+
3ti1
|
| 5049 |
+
5b5b
|
| 5050 |
+
4fjz
|
| 5051 |
+
1oxq
|
| 5052 |
+
3wk8
|
| 5053 |
+
6bl2
|
| 5054 |
+
3oe8
|
| 5055 |
+
3f7z
|
| 5056 |
+
4j3i
|
| 5057 |
+
5ogl
|
| 5058 |
+
6fob
|
| 5059 |
+
4o1l
|
| 5060 |
+
2uy0
|
| 5061 |
+
1g46
|
| 5062 |
+
5orl
|
| 5063 |
+
1uwu
|
| 5064 |
+
1g3b
|
| 5065 |
+
185l
|
| 5066 |
+
4z07
|
| 5067 |
+
2w8w
|
| 5068 |
+
3jwq
|
| 5069 |
+
1o2z
|
| 5070 |
+
5lsy
|
| 5071 |
+
3iok
|
| 5072 |
+
1sdu
|
| 5073 |
+
4xtp
|
| 5074 |
+
4qt2
|
| 5075 |
+
5fdd
|
| 5076 |
+
5ia2
|
| 5077 |
+
1ux7
|
| 5078 |
+
5jhk
|
| 5079 |
+
4ca4
|
| 5080 |
+
1qvt
|
| 5081 |
+
3kjn
|
| 5082 |
+
3t9t
|
| 5083 |
+
3lpf
|
| 5084 |
+
2bpv
|
| 5085 |
+
3jzr
|
| 5086 |
+
1fcz
|
| 5087 |
+
4p7m
|
| 5088 |
+
4hvb
|
| 5089 |
+
3cp9
|
| 5090 |
+
5jm4
|
| 5091 |
+
2q9y
|
| 5092 |
+
3eid
|
| 5093 |
+
4o4y
|
| 5094 |
+
6b1f
|
| 5095 |
+
3rxe
|
| 5096 |
+
4ap0
|
| 5097 |
+
4mu7
|
| 5098 |
+
1pvn
|
| 5099 |
+
4jfm
|
| 5100 |
+
1rej
|
| 5101 |
+
3ens
|
| 5102 |
+
4o7e
|
| 5103 |
+
3e9b
|
| 5104 |
+
4hby
|
| 5105 |
+
6guf
|
| 5106 |
+
5t1i
|
| 5107 |
+
5upj
|
| 5108 |
+
3c89
|
| 5109 |
+
3pcg
|
| 5110 |
+
3vbw
|
| 5111 |
+
3px8
|
| 5112 |
+
5kit
|
| 5113 |
+
6dh0
|
| 5114 |
+
1ukh
|
| 5115 |
+
5y5t
|
| 5116 |
+
4rxh
|
| 5117 |
+
2ien
|
| 5118 |
+
2upj
|
| 5119 |
+
3gzn
|
| 5120 |
+
3eio
|
| 5121 |
+
2hk5
|
| 5122 |
+
3emh
|
| 5123 |
+
1o3f
|
| 5124 |
+
5ipj
|
| 5125 |
+
6dai
|
| 5126 |
+
3hfj
|
| 5127 |
+
3su5
|
| 5128 |
+
3sh1
|
| 5129 |
+
5e0h
|
| 5130 |
+
4m3q
|
| 5131 |
+
4wf2
|
| 5132 |
+
2pj3
|
| 5133 |
+
2osf
|
| 5134 |
+
2r0z
|
| 5135 |
+
1jrs
|
| 5136 |
+
1i9q
|
| 5137 |
+
2qd8
|
| 5138 |
+
2jfh
|
| 5139 |
+
2aa9
|
| 5140 |
+
5vt1
|
| 5141 |
+
5vsj
|
| 5142 |
+
5l0c
|
| 5143 |
+
4ybk
|
| 5144 |
+
3zmt
|
| 5145 |
+
5kr1
|
| 5146 |
+
2r7b
|
| 5147 |
+
2lh8
|
| 5148 |
+
2wyn
|
| 5149 |
+
6lpr
|
| 5150 |
+
3v2n
|
| 5151 |
+
5oq8
|
| 5152 |
+
6bhd
|
| 5153 |
+
2pym
|
| 5154 |
+
4xaq
|
| 5155 |
+
5m0m
|
| 5156 |
+
3puj
|
| 5157 |
+
3tcp
|
| 5158 |
+
5k5c
|
| 5159 |
+
2rnw
|
| 5160 |
+
5qa8
|
| 5161 |
+
4gtm
|
| 5162 |
+
3uec
|
| 5163 |
+
4ncm
|
| 5164 |
+
5hvs
|
| 5165 |
+
1lxh
|
| 5166 |
+
5aux
|
| 5167 |
+
2h2d
|
| 5168 |
+
4qir
|
| 5169 |
+
1s89
|
| 5170 |
+
4bhz
|
| 5171 |
+
1qwf
|
| 5172 |
+
2avv
|
| 5173 |
+
5knt
|
| 5174 |
+
4odn
|
| 5175 |
+
3o1e
|
| 5176 |
+
3lj7
|
| 5177 |
+
3eov
|
| 5178 |
+
6cpw
|
| 5179 |
+
1qca
|
| 5180 |
+
1jii
|
| 5181 |
+
3lkh
|
| 5182 |
+
3rul
|
| 5183 |
+
4g4p
|
| 5184 |
+
1qk4
|
| 5185 |
+
5nkh
|
| 5186 |
+
1w0z
|
| 5187 |
+
1jlq
|
| 5188 |
+
4meo
|
| 5189 |
+
4pmt
|
| 5190 |
+
2vvc
|
| 5191 |
+
2v2h
|
| 5192 |
+
4lm2
|
| 5193 |
+
1bzs
|
| 5194 |
+
1v0n
|
| 5195 |
+
3d67
|
| 5196 |
+
4uxj
|
| 5197 |
+
4bo2
|
| 5198 |
+
4aa1
|
| 5199 |
+
3ga5
|
| 5200 |
+
5hka
|
| 5201 |
+
3vtd
|
| 5202 |
+
2gm1
|
| 5203 |
+
4x6x
|
| 5204 |
+
6ma2
|
| 5205 |
+
1tpw
|
| 5206 |
+
2uxi
|
| 5207 |
+
3gc4
|
| 5208 |
+
1v0p
|
| 5209 |
+
2uzb
|
| 5210 |
+
5xvq
|
| 5211 |
+
3r7b
|
| 5212 |
+
3b8q
|
| 5213 |
+
2y6c
|
| 5214 |
+
1jd6
|
| 5215 |
+
3bc5
|
| 5216 |
+
2bes
|
| 5217 |
+
4f6x
|
| 5218 |
+
3uvk
|
| 5219 |
+
6m8y
|
| 5220 |
+
4avu
|
| 5221 |
+
5vwi
|
| 5222 |
+
1ms6
|
| 5223 |
+
5xo7
|
| 5224 |
+
3dcq
|
| 5225 |
+
5idn
|
| 5226 |
+
2ygf
|
| 5227 |
+
1zsr
|
| 5228 |
+
4zzx
|
| 5229 |
+
1bl7
|
| 5230 |
+
4k7n
|
| 5231 |
+
5voj
|
| 5232 |
+
3ck8
|
| 5233 |
+
5d6q
|
| 5234 |
+
5y0f
|
| 5235 |
+
3dpk
|
| 5236 |
+
1bjv
|
| 5237 |
+
4kn4
|
| 5238 |
+
6bw5
|
| 5239 |
+
3dow
|
| 5240 |
+
1mzc
|
| 5241 |
+
2nng
|
| 5242 |
+
2xnm
|
| 5243 |
+
3f8e
|
| 5244 |
+
2ndg
|
| 5245 |
+
4lh5
|
| 5246 |
+
3e8r
|
| 5247 |
+
2e9o
|
| 5248 |
+
3uih
|
| 5249 |
+
4hlc
|
| 5250 |
+
5moo
|
| 5251 |
+
2i3h
|
| 5252 |
+
6ew6
|
| 5253 |
+
3ama
|
| 5254 |
+
4l0t
|
| 5255 |
+
1vj9
|
| 5256 |
+
3pup
|
| 5257 |
+
2xct
|
| 5258 |
+
5fh8
|
| 5259 |
+
5ivz
|
| 5260 |
+
1str
|
| 5261 |
+
2xxx
|
| 5262 |
+
5mra
|
| 5263 |
+
4o28
|
| 5264 |
+
1d6n
|
| 5265 |
+
1pr5
|
| 5266 |
+
2cma
|
| 5267 |
+
4qt0
|
| 5268 |
+
2a5c
|
| 5269 |
+
5yy9
|
| 5270 |
+
2x81
|
| 5271 |
+
2cv3
|
| 5272 |
+
5jo0
|
| 5273 |
+
3l0e
|
| 5274 |
+
6hly
|
| 5275 |
+
3p3j
|
| 5276 |
+
1eed
|
| 5277 |
+
5aib
|
| 5278 |
+
3qbc
|
| 5279 |
+
1n7j
|
| 5280 |
+
5fpi
|
| 5281 |
+
6gxw
|
| 5282 |
+
1rw8
|
| 5283 |
+
5zw6
|
| 5284 |
+
4e3d
|
| 5285 |
+
2mji
|
| 5286 |
+
6ayh
|
| 5287 |
+
1w2k
|
| 5288 |
+
3zns
|
| 5289 |
+
4jbo
|
| 5290 |
+
5mty
|
| 5291 |
+
6c3u
|
| 5292 |
+
4ks4
|
| 5293 |
+
3ds9
|
| 5294 |
+
4hld
|
| 5295 |
+
3elj
|
| 5296 |
+
4j45
|
| 5297 |
+
5cgj
|
| 5298 |
+
5v41
|
| 5299 |
+
2yne
|
| 5300 |
+
5nad
|
| 5301 |
+
5lvq
|
| 5302 |
+
3d9p
|
| 5303 |
+
3ske
|
| 5304 |
+
3ip5
|
| 5305 |
+
1w22
|
| 5306 |
+
4kbk
|
| 5307 |
+
5lqq
|
| 5308 |
+
5wyx
|
| 5309 |
+
3ps1
|
| 5310 |
+
5ou1
|
| 5311 |
+
2xru
|
| 5312 |
+
3lq2
|
| 5313 |
+
3t4p
|
| 5314 |
+
3uzp
|
| 5315 |
+
2o2u
|
| 5316 |
+
3dei
|
| 5317 |
+
3u93
|
| 5318 |
+
4z8m
|
| 5319 |
+
3lhs
|
| 5320 |
+
2f81
|
| 5321 |
+
3fn0
|
| 5322 |
+
4iqt
|
| 5323 |
+
5laz
|
| 5324 |
+
1xot
|
| 5325 |
+
1zs0
|
| 5326 |
+
1oau
|
| 5327 |
+
5d47
|
| 5328 |
+
3o4k
|
| 5329 |
+
6dcg
|
| 5330 |
+
1o4q
|
| 5331 |
+
1csi
|
| 5332 |
+
5nwd
|
| 5333 |
+
2wca
|
| 5334 |
+
1u1w
|
| 5335 |
+
5aae
|
| 5336 |
+
4gja
|
| 5337 |
+
2pj2
|
| 5338 |
+
5f2p
|
| 5339 |
+
2yme
|
| 5340 |
+
3cbs
|
| 5341 |
+
2wxg
|
| 5342 |
+
5u0y
|
| 5343 |
+
3uhm
|
| 5344 |
+
5qaw
|
| 5345 |
+
5d3h
|
| 5346 |
+
3sb0
|
| 5347 |
+
5alx
|
| 5348 |
+
5g57
|
| 5349 |
+
6afj
|
| 5350 |
+
5jf5
|
| 5351 |
+
4ccd
|
| 5352 |
+
5mhq
|
| 5353 |
+
5etx
|
| 5354 |
+
4k6u
|
| 5355 |
+
5tb6
|
| 5356 |
+
6g28
|
| 5357 |
+
4zyz
|
| 5358 |
+
1sj0
|
| 5359 |
+
4eh3
|
| 5360 |
+
2r5b
|
| 5361 |
+
5eq0
|
| 5362 |
+
1cil
|
| 5363 |
+
5le1
|
| 5364 |
+
5vm0
|
| 5365 |
+
2ff1
|
| 5366 |
+
3f8f
|
| 5367 |
+
2rka
|
| 5368 |
+
3dda
|
| 5369 |
+
4iu4
|
| 5370 |
+
2vc7
|
| 5371 |
+
5bs4
|
| 5372 |
+
5a0a
|
| 5373 |
+
4fyo
|
| 5374 |
+
1p2a
|
| 5375 |
+
4ps8
|
| 5376 |
+
2uze
|
| 5377 |
+
3iw4
|
| 5378 |
+
3nzu
|
| 5379 |
+
3gi6
|
| 5380 |
+
5dtt
|
| 5381 |
+
1gaf
|
| 5382 |
+
3nii
|
| 5383 |
+
2xm8
|
| 5384 |
+
4att
|
| 5385 |
+
2vex
|
| 5386 |
+
4jx7
|
| 5387 |
+
6h34
|
| 5388 |
+
4tvj
|
| 5389 |
+
3pvw
|
| 5390 |
+
1ai7
|
| 5391 |
+
1kc7
|
| 5392 |
+
2rl5
|
| 5393 |
+
6c0r
|
| 5394 |
+
4rcp
|
| 5395 |
+
1nhv
|
| 5396 |
+
5mw2
|
| 5397 |
+
5hct
|
| 5398 |
+
4mg7
|
| 5399 |
+
1drk
|
| 5400 |
+
4mzh
|
| 5401 |
+
1r6g
|
| 5402 |
+
5xpl
|
| 5403 |
+
4o74
|
| 5404 |
+
3bu8
|
| 5405 |
+
3cgf
|
| 5406 |
+
4nie
|
| 5407 |
+
1xos
|
| 5408 |
+
2wlz
|
| 5409 |
+
1a37
|
| 5410 |
+
5lpj
|
| 5411 |
+
1yvh
|
| 5412 |
+
4xhe
|
| 5413 |
+
2lct
|
| 5414 |
+
3lau
|
| 5415 |
+
1g2l
|
| 5416 |
+
4uiv
|
| 5417 |
+
6b4u
|
| 5418 |
+
3jqf
|
| 5419 |
+
4pvt
|
| 5420 |
+
5lpl
|
| 5421 |
+
3sdk
|
| 5422 |
+
5km3
|
| 5423 |
+
3fmz
|
| 5424 |
+
1t7f
|
| 5425 |
+
4p5z
|
| 5426 |
+
4tk1
|
| 5427 |
+
2j9a
|
| 5428 |
+
3bbt
|
| 5429 |
+
3tjc
|
| 5430 |
+
2qi4
|
| 5431 |
+
4acg
|
| 5432 |
+
5tuz
|
| 5433 |
+
5uuu
|
| 5434 |
+
5ckr
|
| 5435 |
+
2h2e
|
| 5436 |
+
5qai
|
| 5437 |
+
1qxk
|
| 5438 |
+
2wyf
|
| 5439 |
+
3f0r
|
| 5440 |
+
1rd4
|
| 5441 |
+
2ipo
|
| 5442 |
+
1ukt
|
| 5443 |
+
2xx2
|
| 5444 |
+
3hvh
|
| 5445 |
+
5d24
|
| 5446 |
+
4bdk
|
| 5447 |
+
2xu4
|
| 5448 |
+
6alc
|
| 5449 |
+
4r5y
|
| 5450 |
+
3kdm
|
| 5451 |
+
4gk3
|
| 5452 |
+
1x1z
|
| 5453 |
+
4pz5
|
| 5454 |
+
2duv
|
| 5455 |
+
6fnx
|
| 5456 |
+
5u4b
|
| 5457 |
+
4dfn
|
| 5458 |
+
1n9a
|
| 5459 |
+
5mny
|
| 5460 |
+
5ktw
|
| 5461 |
+
4qem
|
| 5462 |
+
3l81
|
| 5463 |
+
4c0r
|
| 5464 |
+
1utp
|
| 5465 |
+
5tuy
|
| 5466 |
+
5fb7
|
| 5467 |
+
3n7r
|
| 5468 |
+
3bze
|
| 5469 |
+
2bvd
|
| 5470 |
+
2fx6
|
| 5471 |
+
5vcw
|
| 5472 |
+
4ibf
|
| 5473 |
+
1zom
|
| 5474 |
+
4f6w
|
| 5475 |
+
1i91
|
| 5476 |
+
3ikd
|
| 5477 |
+
4w9e
|
| 5478 |
+
4ntj
|
| 5479 |
+
4wk7
|
| 5480 |
+
1cx9
|
| 5481 |
+
3f15
|
| 5482 |
+
4u6c
|
| 5483 |
+
1ing
|
| 5484 |
+
3wq6
|
| 5485 |
+
2yb0
|
| 5486 |
+
3wtl
|
| 5487 |
+
5yl2
|
| 5488 |
+
2hz4
|
| 5489 |
+
2gbf
|
| 5490 |
+
3r8z
|
| 5491 |
+
5cso
|
| 5492 |
+
2j95
|
| 5493 |
+
5jxq
|
| 5494 |
+
4ofb
|
| 5495 |
+
2fr3
|
| 5496 |
+
5b4l
|
| 5497 |
+
3ovx
|
| 5498 |
+
6cha
|
| 5499 |
+
4qwl
|
| 5500 |
+
1tl9
|
| 5501 |
+
3mo2
|
| 5502 |
+
3c8e
|
| 5503 |
+
3k5x
|
| 5504 |
+
3fhr
|
| 5505 |
+
1b3f
|
| 5506 |
+
5a8z
|
| 5507 |
+
5bvk
|
| 5508 |
+
2hb1
|
| 5509 |
+
3qxd
|
| 5510 |
+
3i4y
|
| 5511 |
+
6g9d
|
| 5512 |
+
6gop
|
| 5513 |
+
1oar
|
| 5514 |
+
5flt
|
| 5515 |
+
2zhd
|
| 5516 |
+
2xey
|
| 5517 |
+
4ikr
|
| 5518 |
+
4ckr
|
| 5519 |
+
5xvk
|
| 5520 |
+
4lq3
|
| 5521 |
+
1dtq
|
| 5522 |
+
5bpp
|
| 5523 |
+
6fg6
|
| 5524 |
+
3fl9
|
| 5525 |
+
6em7
|
| 5526 |
+
5cnj
|
| 5527 |
+
4os5
|
| 5528 |
+
6awn
|
| 5529 |
+
3atm
|
| 5530 |
+
5msb
|
| 5531 |
+
1odj
|
| 5532 |
+
2h5a
|
| 5533 |
+
2w3k
|
| 5534 |
+
5w1v
|
| 5535 |
+
1pi4
|
| 5536 |
+
4kx8
|
| 5537 |
+
5m25
|
| 5538 |
+
1dhj
|
| 5539 |
+
2qki
|
| 5540 |
+
3oag
|
| 5541 |
+
1n0t
|
| 5542 |
+
5dyo
|
| 5543 |
+
1ql9
|
| 5544 |
+
5u7k
|
| 5545 |
+
4mjr
|
| 5546 |
+
5mtx
|
| 5547 |
+
4u6w
|
| 5548 |
+
3d8z
|
| 5549 |
+
2pnc
|
| 5550 |
+
5em8
|
| 5551 |
+
5cil
|
| 5552 |
+
3iny
|
| 5553 |
+
5l2t
|
| 5554 |
+
5fow
|
| 5555 |
+
2owb
|
| 5556 |
+
1k1y
|
| 5557 |
+
3ijg
|
| 5558 |
+
4ad2
|
| 5559 |
+
4tqn
|
| 5560 |
+
3zzh
|
| 5561 |
+
3qel
|
| 5562 |
+
5jha
|
| 5563 |
+
3rxh
|
| 5564 |
+
5xqx
|
| 5565 |
+
3q2g
|
| 5566 |
+
5wl0
|
| 5567 |
+
4bic
|
| 5568 |
+
1xp9
|
| 5569 |
+
4u7t
|
| 5570 |
+
4ef6
|
| 5571 |
+
5jzb
|
| 5572 |
+
2h8h
|
| 5573 |
+
5sxm
|
| 5574 |
+
2ym8
|
| 5575 |
+
6fmi
|
| 5576 |
+
5f1h
|
| 5577 |
+
4qyg
|
| 5578 |
+
2qtu
|
| 5579 |
+
1vwf
|
| 5580 |
+
1ivp
|
| 5581 |
+
5typ
|
| 5582 |
+
5igq
|
| 5583 |
+
4h1j
|
| 5584 |
+
3sug
|
| 5585 |
+
3h85
|
| 5586 |
+
6hf5
|
| 5587 |
+
2poq
|
| 5588 |
+
6afa
|
| 5589 |
+
4mpc
|
| 5590 |
+
6c0t
|
| 5591 |
+
5dit
|
| 5592 |
+
3h8c
|
| 5593 |
+
2e9v
|
| 5594 |
+
2hfp
|
| 5595 |
+
4u5n
|
| 5596 |
+
5caq
|
| 5597 |
+
4m2w
|
| 5598 |
+
4tv3
|
| 5599 |
+
4ux4
|
| 5600 |
+
3oy8
|
| 5601 |
+
3owk
|
| 5602 |
+
3arn
|
| 5603 |
+
4i6f
|
| 5604 |
+
2dri
|
| 5605 |
+
4ymj
|
| 5606 |
+
5ix0
|
| 5607 |
+
4jr0
|
| 5608 |
+
1nzl
|
| 5609 |
+
5u4a
|
| 5610 |
+
4eg4
|
| 5611 |
+
1wbw
|
| 5612 |
+
3nee
|
| 5613 |
+
5fdp
|
| 5614 |
+
1hvs
|
| 5615 |
+
2w6q
|
| 5616 |
+
1n8v
|
| 5617 |
+
5ct7
|
| 5618 |
+
5dx4
|
| 5619 |
+
2fdd
|
| 5620 |
+
4ydf
|
| 5621 |
+
5c2e
|
| 5622 |
+
3qsb
|
| 5623 |
+
5zk3
|
| 5624 |
+
5vgy
|
| 5625 |
+
5ot8
|
| 5626 |
+
4j70
|
| 5627 |
+
6boe
|
| 5628 |
+
1c29
|
| 5629 |
+
2pjl
|
| 5630 |
+
1jd0
|
| 5631 |
+
2euf
|
| 5632 |
+
3as1
|
| 5633 |
+
3abu
|
| 5634 |
+
3dpd
|
| 5635 |
+
5aku
|
| 5636 |
+
4k6w
|
| 5637 |
+
4j3l
|
| 5638 |
+
4nau
|
| 5639 |
+
3fpm
|
| 5640 |
+
3c3q
|
| 5641 |
+
3mg7
|
| 5642 |
+
3buo
|
| 5643 |
+
3rhx
|
| 5644 |
+
5lyh
|
| 5645 |
+
1b0h
|
| 5646 |
+
4ynd
|
| 5647 |
+
5y0x
|
| 5648 |
+
5enb
|
| 5649 |
+
2a5b
|
| 5650 |
+
5mqe
|
| 5651 |
+
1tl1
|
| 5652 |
+
4iaw
|
| 5653 |
+
3way
|
| 5654 |
+
3gs7
|
| 5655 |
+
3ex3
|
| 5656 |
+
1szm
|
| 5657 |
+
5mxq
|
| 5658 |
+
2cnh
|
| 5659 |
+
4eyj
|
| 5660 |
+
6mj7
|
| 5661 |
+
4ln2
|
| 5662 |
+
4jia
|
| 5663 |
+
4l4z
|
| 5664 |
+
4qr4
|
| 5665 |
+
2er9
|
| 5666 |
+
5vnb
|
| 5667 |
+
5xff
|
| 5668 |
+
4prp
|
| 5669 |
+
2jqk
|
| 5670 |
+
1gi8
|
| 5671 |
+
2ynr
|
| 5672 |
+
5nk2
|
| 5673 |
+
1rsd
|
| 5674 |
+
5umz
|
| 5675 |
+
3jzf
|
| 5676 |
+
4k69
|
| 5677 |
+
6b5o
|
| 5678 |
+
4o4k
|
| 5679 |
+
2afw
|
| 5680 |
+
3nht
|
| 5681 |
+
5xof
|
| 5682 |
+
4f9w
|
| 5683 |
+
3wym
|
| 5684 |
+
1w31
|
| 5685 |
+
5c6o
|
| 5686 |
+
3nim
|
| 5687 |
+
1vso
|
| 5688 |
+
1o37
|
| 5689 |
+
5i8g
|
| 5690 |
+
3lbz
|
| 5691 |
+
4bqs
|
| 5692 |
+
3tfv
|
| 5693 |
+
1bra
|
| 5694 |
+
2k46
|
| 5695 |
+
2w76
|
| 5696 |
+
5jcj
|
| 5697 |
+
3b92
|
| 5698 |
+
3zha
|
| 5699 |
+
2i4x
|
| 5700 |
+
3ess
|
| 5701 |
+
3sap
|
| 5702 |
+
3wbl
|
| 5703 |
+
5trk
|
| 5704 |
+
2n8t
|
| 5705 |
+
3ge7
|
| 5706 |
+
4unn
|
| 5707 |
+
2ria
|
| 5708 |
+
4wgi
|
| 5709 |
+
4bds
|
| 5710 |
+
2y06
|
| 5711 |
+
6bhh
|
| 5712 |
+
1olx
|
| 5713 |
+
4raq
|
| 5714 |
+
5i56
|
| 5715 |
+
2xk4
|
| 5716 |
+
4az0
|
| 5717 |
+
3ov1
|
| 5718 |
+
4ycw
|
| 5719 |
+
5ei4
|
| 5720 |
+
4u1b
|
| 5721 |
+
3bv3
|
| 5722 |
+
5etp
|
| 5723 |
+
3kcf
|
| 5724 |
+
6fod
|
| 5725 |
+
3eq9
|
| 5726 |
+
1eat
|
| 5727 |
+
4giu
|
| 5728 |
+
3vyd
|
| 5729 |
+
2pvh
|
| 5730 |
+
5aaf
|
| 5731 |
+
2hdr
|
| 5732 |
+
1lgw
|
| 5733 |
+
2lsp
|
| 5734 |
+
4l02
|
| 5735 |
+
3ioe
|
| 5736 |
+
4c1m
|
| 5737 |
+
4obv
|
| 5738 |
+
3r6t
|
| 5739 |
+
1uj6
|
| 5740 |
+
1a09
|
| 5741 |
+
4yk6
|
| 5742 |
+
1rzx
|
| 5743 |
+
3cy2
|
| 5744 |
+
4la7
|
| 5745 |
+
1qwe
|
| 5746 |
+
2opb
|
| 5747 |
+
3psl
|
| 5748 |
+
5y5w
|
| 5749 |
+
2j4q
|
| 5750 |
+
5vfn
|
| 5751 |
+
3e37
|
| 5752 |
+
4fzg
|
| 5753 |
+
5qim
|
| 5754 |
+
1bzh
|
| 5755 |
+
6da4
|
| 5756 |
+
1oxr
|
| 5757 |
+
5hgc
|
| 5758 |
+
5ieg
|
| 5759 |
+
6cq0
|
| 5760 |
+
2ioa
|
| 5761 |
+
3lpg
|
| 5762 |
+
4mzo
|
| 5763 |
+
1w7x
|
| 5764 |
+
2aia
|
| 5765 |
+
5abg
|
| 5766 |
+
2xcn
|
| 5767 |
+
5toz
|
| 5768 |
+
5tp0
|
| 5769 |
+
4z22
|
| 5770 |
+
4wlb
|
| 5771 |
+
4pd5
|
| 5772 |
+
3zbx
|
| 5773 |
+
3pb9
|
| 5774 |
+
3f5l
|
| 5775 |
+
6bke
|
| 5776 |
+
4p7s
|
| 5777 |
+
5enh
|
| 5778 |
+
4m3p
|
| 5779 |
+
4jmg
|
| 5780 |
+
1k03
|
| 5781 |
+
4pni
|
| 5782 |
+
4asy
|
| 5783 |
+
1a4r
|
| 5784 |
+
4deu
|
| 5785 |
+
4hy9
|
| 5786 |
+
4g0p
|
| 5787 |
+
3wqv
|
| 5788 |
+
2r3m
|
| 5789 |
+
5ar5
|
| 5790 |
+
1drv
|
| 5791 |
+
4ua8
|
| 5792 |
+
5eh8
|
| 5793 |
+
4wy3
|
| 5794 |
+
2aov
|
| 5795 |
+
6dj2
|
| 5796 |
+
2og8
|
| 5797 |
+
10gs
|
| 5798 |
+
4uw1
|
| 5799 |
+
4hge
|
| 5800 |
+
5qcn
|
| 5801 |
+
6c4d
|
| 5802 |
+
1bai
|
| 5803 |
+
5hfb
|
| 5804 |
+
3i5n
|
| 5805 |
+
6epa
|
| 5806 |
+
3dgq
|
| 5807 |
+
5tzx
|
| 5808 |
+
4h3b
|
| 5809 |
+
1e5a
|
| 5810 |
+
5n93
|
| 5811 |
+
2exg
|
| 5812 |
+
5k4l
|
| 5813 |
+
1qpb
|
| 5814 |
+
1f3e
|
| 5815 |
+
4a0j
|
| 5816 |
+
5ula
|
| 5817 |
+
4cc3
|
| 5818 |
+
2oym
|
| 5819 |
+
1dis
|
| 5820 |
+
3ns7
|
| 5821 |
+
3uz5
|
| 5822 |
+
3ewz
|
| 5823 |
+
1qbo
|
| 5824 |
+
1nd5
|
| 5825 |
+
2qzr
|
| 5826 |
+
4oq3
|
| 5827 |
+
6e9l
|
| 5828 |
+
2n0w
|
| 5829 |
+
2hr6
|
| 5830 |
+
5vse
|
| 5831 |
+
5jsm
|
| 5832 |
+
1ssq
|
| 5833 |
+
3l4u
|
| 5834 |
+
2r3k
|
| 5835 |
+
3e3u
|
| 5836 |
+
4ibi
|
| 5837 |
+
3kgu
|
| 5838 |
+
1tni
|
| 5839 |
+
1dva
|
| 5840 |
+
5mno
|
| 5841 |
+
1qbu
|
| 5842 |
+
3px9
|
| 5843 |
+
5xpm
|
| 5844 |
+
4zjw
|
| 5845 |
+
4gqp
|
| 5846 |
+
3r21
|
| 5847 |
+
3lpp
|
| 5848 |
+
4bdt
|
| 5849 |
+
4ks2
|
| 5850 |
+
2v58
|
| 5851 |
+
3bft
|
| 5852 |
+
1il5
|
| 5853 |
+
5yco
|
| 5854 |
+
4p5e
|
| 5855 |
+
3rhk
|
| 5856 |
+
4q7p
|
| 5857 |
+
4x3h
|
| 5858 |
+
1ro7
|
| 5859 |
+
3lil
|
| 5860 |
+
4mwq
|
| 5861 |
+
3zsw
|
| 5862 |
+
3wf6
|
| 5863 |
+
1tft
|
| 5864 |
+
3hpt
|
| 5865 |
+
5t3n
|
| 5866 |
+
4oo9
|
| 5867 |
+
2b5j
|
| 5868 |
+
2bet
|
| 5869 |
+
3nuy
|
| 5870 |
+
5nea
|
| 5871 |
+
3kdd
|
| 5872 |
+
5ls6
|
| 5873 |
+
6f2n
|
| 5874 |
+
4rac
|
| 5875 |
+
4j47
|
| 5876 |
+
3iut
|
| 5877 |
+
4uvv
|
| 5878 |
+
2i4v
|
| 5879 |
+
5k48
|
| 5880 |
+
2hpa
|
| 5881 |
+
1mrs
|
| 5882 |
+
6fv4
|
| 5883 |
+
1q8u
|
| 5884 |
+
5aqz
|
| 5885 |
+
4ytc
|
| 5886 |
+
1ibc
|
| 5887 |
+
2v95
|
| 5888 |
+
3me9
|
| 5889 |
+
4kn0
|
| 5890 |
+
1ai5
|
| 5891 |
+
1n51
|
| 5892 |
+
1gnm
|
| 5893 |
+
4zur
|
| 5894 |
+
4zsj
|
| 5895 |
+
3v8w
|
| 5896 |
+
4tzm
|
| 5897 |
+
5ih9
|
| 5898 |
+
3gen
|
| 5899 |
+
4r6t
|
| 5900 |
+
5u7i
|
| 5901 |
+
4e81
|
| 5902 |
+
3v01
|
| 5903 |
+
3n4b
|
| 5904 |
+
1jmi
|
| 5905 |
+
4dcx
|
| 5906 |
+
4mzl
|
| 5907 |
+
5tcj
|
| 5908 |
+
2w54
|
| 5909 |
+
4o13
|
| 5910 |
+
5ufc
|
| 5911 |
+
2xs0
|
| 5912 |
+
4flp
|
| 5913 |
+
3o8p
|
| 5914 |
+
3tpu
|
| 5915 |
+
5yft
|
| 5916 |
+
5mql
|
| 5917 |
+
5w6e
|
| 5918 |
+
4pqn
|
| 5919 |
+
4m48
|
| 5920 |
+
2iiv
|
| 5921 |
+
4ohm
|
| 5922 |
+
4a9c
|
| 5923 |
+
6en4
|
| 5924 |
+
3wk4
|
| 5925 |
+
3p50
|
| 5926 |
+
3bpr
|
| 5927 |
+
5wys
|
| 5928 |
+
4oel
|
| 5929 |
+
4u01
|
| 5930 |
+
4ibc
|
| 5931 |
+
4jpy
|
| 5932 |
+
1q41
|
| 5933 |
+
2gvd
|
| 5934 |
+
5aip
|
| 5935 |
+
5i0b
|
| 5936 |
+
4zyt
|
| 5937 |
+
4yxd
|
| 5938 |
+
4qt1
|
| 5939 |
+
4w4y
|
| 5940 |
+
3e5u
|
| 5941 |
+
3kqd
|
| 5942 |
+
4uj2
|
| 5943 |
+
3k4q
|
| 5944 |
+
3b0w
|
| 5945 |
+
3t64
|
| 5946 |
+
1hty
|
| 5947 |
+
5dgm
|
| 5948 |
+
3bjm
|
| 5949 |
+
1b8o
|
| 5950 |
+
4l3p
|
| 5951 |
+
2b1z
|
| 5952 |
+
4yyi
|
| 5953 |
+
5ceh
|
| 5954 |
+
5ny6
|
| 5955 |
+
3ebl
|
| 5956 |
+
4bo8
|
| 5957 |
+
3fup
|
| 5958 |
+
5ot9
|
| 5959 |
+
3eax
|
| 5960 |
+
3vhk
|
| 5961 |
+
3cda
|
| 5962 |
+
5c1y
|
| 5963 |
+
1obx
|
| 5964 |
+
4tzn
|
| 5965 |
+
3qd3
|
| 5966 |
+
3wka
|
| 5967 |
+
3ihz
|
| 5968 |
+
5iis
|
| 5969 |
+
5vo6
|
| 5970 |
+
5tc0
|
| 5971 |
+
3fqk
|
| 5972 |
+
1dm2
|
| 5973 |
+
4y63
|
| 5974 |
+
6mvx
|
| 5975 |
+
4ucu
|
| 5976 |
+
5oht
|
| 5977 |
+
1n8u
|
| 5978 |
+
4xua
|
| 5979 |
+
2ybp
|
| 5980 |
+
2cgf
|
| 5981 |
+
1hn2
|
| 5982 |
+
3d4q
|
| 5983 |
+
4qp2
|
| 5984 |
+
4gsy
|
| 5985 |
+
2psv
|
| 5986 |
+
5lck
|
| 5987 |
+
1xxe
|
| 5988 |
+
5trg
|
| 5989 |
+
5dp6
|
| 5990 |
+
2lnw
|
| 5991 |
+
5kr2
|
| 5992 |
+
4utx
|
| 5993 |
+
1a8t
|
| 5994 |
+
4qw5
|
| 5995 |
+
2itt
|
| 5996 |
+
4m7y
|
| 5997 |
+
3kqo
|
| 5998 |
+
5ku3
|
| 5999 |
+
1oit
|
| 6000 |
+
1w1t
|
| 6001 |
+
4j26
|
| 6002 |
+
4yh3
|
| 6003 |
+
3nzw
|
| 6004 |
+
3vw9
|
| 6005 |
+
1csr
|
| 6006 |
+
2we3
|
| 6007 |
+
6mnf
|
| 6008 |
+
4ijh
|
| 6009 |
+
2jdk
|
| 6010 |
+
4kcg
|
| 6011 |
+
1kmv
|
| 6012 |
+
1mxu
|
| 6013 |
+
4ovh
|
| 6014 |
+
4mg8
|
| 6015 |
+
3lka
|
| 6016 |
+
4twy
|
| 6017 |
+
5vsb
|
| 6018 |
+
4ofl
|
| 6019 |
+
6cdm
|
| 6020 |
+
2f3k
|
| 6021 |
+
4tkg
|
| 6022 |
+
5w4e
|
| 6023 |
+
3zot
|
| 6024 |
+
2yak
|
| 6025 |
+
2pv1
|
| 6026 |
+
3avz
|
| 6027 |
+
2q8g
|
| 6028 |
+
3ilq
|
| 6029 |
+
3zw3
|
| 6030 |
+
1qbr
|
| 6031 |
+
4q08
|
| 6032 |
+
3zpu
|
| 6033 |
+
3rtp
|
| 6034 |
+
4b95
|
| 6035 |
+
2f9b
|
| 6036 |
+
2uz9
|
| 6037 |
+
4ehg
|
| 6038 |
+
4xsz
|
| 6039 |
+
6f22
|
| 6040 |
+
1tet
|
| 6041 |
+
3f3w
|
| 6042 |
+
4abd
|
| 6043 |
+
5jsg
|
| 6044 |
+
4op3
|
| 6045 |
+
6fsy
|
| 6046 |
+
3twv
|
| 6047 |
+
5d25
|
| 6048 |
+
2vto
|
| 6049 |
+
2oa0
|
| 6050 |
+
6csq
|
| 6051 |
+
6dh3
|
| 6052 |
+
6c7e
|
| 6053 |
+
5jq9
|
| 6054 |
+
4tmf
|
| 6055 |
+
4brx
|
| 6056 |
+
5zku
|
| 6057 |
+
2ghg
|
| 6058 |
+
2gvj
|
| 6059 |
+
4wnm
|
| 6060 |
+
5ayf
|
| 6061 |
+
4osf
|
| 6062 |
+
1i90
|
| 6063 |
+
3lzs
|
| 6064 |
+
3gdt
|
| 6065 |
+
6eru
|
| 6066 |
+
5ops
|
| 6067 |
+
2mpm
|
| 6068 |
+
4cc7
|
| 6069 |
+
2vvs
|
| 6070 |
+
1o4d
|
| 6071 |
+
4bkz
|
| 6072 |
+
6ep4
|
| 6073 |
+
4aom
|
| 6074 |
+
1wc6
|
| 6075 |
+
2pl9
|
| 6076 |
+
2p53
|
| 6077 |
+
4zyr
|
| 6078 |
+
6g9a
|
| 6079 |
+
3gqz
|
| 6080 |
+
4mlt
|
| 6081 |
+
1mf4
|
| 6082 |
+
1xxh
|
| 6083 |
+
3p3g
|
| 6084 |
+
4xu3
|
| 6085 |
+
3g0c
|
| 6086 |
+
5dbm
|
| 6087 |
+
5g60
|
| 6088 |
+
4a9u
|
| 6089 |
+
4gj7
|
| 6090 |
+
4nwd
|
| 6091 |
+
4ei4
|
| 6092 |
+
5yjf
|
| 6093 |
+
2won
|
| 6094 |
+
2f2c
|
| 6095 |
+
3mg8
|
| 6096 |
+
3i06
|
| 6097 |
+
2wa4
|
| 6098 |
+
3hzm
|
| 6099 |
+
3wkc
|
| 6100 |
+
4j7e
|
| 6101 |
+
3dxm
|
| 6102 |
+
3m9f
|
| 6103 |
+
4clp
|
| 6104 |
+
5a09
|
| 6105 |
+
5tg5
|
| 6106 |
+
1njf
|
| 6107 |
+
3ce0
|
| 6108 |
+
1tpz
|
| 6109 |
+
6bh0
|
| 6110 |
+
3su1
|
| 6111 |
+
2vtn
|
| 6112 |
+
5yp6
|
| 6113 |
+
4i74
|
| 6114 |
+
5zk8
|
| 6115 |
+
2al5
|
| 6116 |
+
3bh9
|
| 6117 |
+
4djo
|
| 6118 |
+
5m4u
|
| 6119 |
+
1wva
|
| 6120 |
+
3uvx
|
| 6121 |
+
2aod
|
| 6122 |
+
1pqc
|
| 6123 |
+
4n4t
|
| 6124 |
+
5fdi
|
| 6125 |
+
1lcp
|
| 6126 |
+
4ere
|
| 6127 |
+
1ci7
|
| 6128 |
+
2qju
|
| 6129 |
+
1ftj
|
| 6130 |
+
5hk9
|
| 6131 |
+
5y3o
|
| 6132 |
+
6ayn
|
| 6133 |
+
5os1
|
| 6134 |
+
2qm9
|
| 6135 |
+
2x6j
|
| 6136 |
+
5ei2
|
| 6137 |
+
5izc
|
| 6138 |
+
3u18
|
| 6139 |
+
4zek
|
| 6140 |
+
4s1g
|
| 6141 |
+
2wq4
|
| 6142 |
+
1nfy
|
| 6143 |
+
6bnt
|
| 6144 |
+
4jai
|
| 6145 |
+
2jb6
|
| 6146 |
+
5ahw
|
| 6147 |
+
4uit
|
| 6148 |
+
3bmn
|
| 6149 |
+
6gxu
|
| 6150 |
+
4gki
|
| 6151 |
+
3ccb
|
| 6152 |
+
4ezq
|
| 6153 |
+
5v7a
|
| 6154 |
+
4az3
|
| 6155 |
+
4ucr
|
| 6156 |
+
4pnt
|
| 6157 |
+
2wtj
|
| 6158 |
+
5amn
|
| 6159 |
+
4ux6
|
| 6160 |
+
3t8v
|
| 6161 |
+
1i33
|
| 6162 |
+
3r7n
|
| 6163 |
+
5nvy
|
| 6164 |
+
4aq3
|
| 6165 |
+
4d8z
|
| 6166 |
+
3wdz
|
| 6167 |
+
5nib
|
| 6168 |
+
6b8y
|
| 6169 |
+
1v2h
|
| 6170 |
+
5jpt
|
| 6171 |
+
1xge
|
| 6172 |
+
4umn
|
| 6173 |
+
2xrw
|
| 6174 |
+
2vvn
|
| 6175 |
+
3t07
|
| 6176 |
+
2vpp
|
| 6177 |
+
3zcz
|
| 6178 |
+
4ydq
|
| 6179 |
+
2dbl
|
| 6180 |
+
3ho9
|
| 6181 |
+
5w88
|
| 6182 |
+
2g71
|
| 6183 |
+
2lk1
|
| 6184 |
+
4ai5
|
| 6185 |
+
1o41
|
| 6186 |
+
1tsv
|
| 6187 |
+
5al3
|
| 6188 |
+
4fk7
|
| 6189 |
+
2o4z
|
| 6190 |
+
4a51
|
| 6191 |
+
4gwk
|
| 6192 |
+
3own
|
| 6193 |
+
1lcj
|
| 6194 |
+
1hk3
|
| 6195 |
+
3kbz
|
| 6196 |
+
4rhy
|
| 6197 |
+
4yoi
|
| 6198 |
+
3prf
|
| 6199 |
+
3slz
|
| 6200 |
+
2qwg
|
| 6201 |
+
1bdq
|
| 6202 |
+
4emt
|
| 6203 |
+
6bec
|
| 6204 |
+
1qf2
|
| 6205 |
+
2r0u
|
| 6206 |
+
1f2o
|
| 6207 |
+
1b9t
|
| 6208 |
+
2c1p
|
| 6209 |
+
1hn4
|
| 6210 |
+
6c2y
|
| 6211 |
+
4pg3
|
| 6212 |
+
3uw4
|
| 6213 |
+
1wdn
|
| 6214 |
+
3mho
|
| 6215 |
+
4yl1
|
| 6216 |
+
6cex
|
| 6217 |
+
3b7r
|
| 6218 |
+
4pop
|
| 6219 |
+
5f02
|
| 6220 |
+
3d7b
|
| 6221 |
+
4nal
|
| 6222 |
+
2zft
|
| 6223 |
+
4oya
|
| 6224 |
+
3otf
|
| 6225 |
+
2xhm
|
| 6226 |
+
5c20
|
| 6227 |
+
1zrz
|
| 6228 |
+
4rqk
|
| 6229 |
+
5k8o
|
| 6230 |
+
1tvo
|
| 6231 |
+
6fnr
|
| 6232 |
+
4zlo
|
| 6233 |
+
1nyx
|
| 6234 |
+
5aad
|
| 6235 |
+
3kqw
|
| 6236 |
+
3nw7
|
| 6237 |
+
6e59
|
| 6238 |
+
5ti6
|
| 6239 |
+
1ywr
|
| 6240 |
+
2xn5
|
| 6241 |
+
4cy1
|
| 6242 |
+
4rpv
|
| 6243 |
+
1yfz
|
| 6244 |
+
5e3a
|
| 6245 |
+
4a7c
|
| 6246 |
+
5ljq
|
| 6247 |
+
4yje
|
| 6248 |
+
1gi6
|
| 6249 |
+
2b1p
|
| 6250 |
+
1kc5
|
| 6251 |
+
3kpw
|
| 6252 |
+
1q5k
|
| 6253 |
+
4aml
|
| 6254 |
+
4ibg
|
| 6255 |
+
6e49
|
| 6256 |
+
2w8y
|
| 6257 |
+
3kmm
|
| 6258 |
+
2w6o
|
| 6259 |
+
3hdn
|
| 6260 |
+
1u8t
|
| 6261 |
+
4e8y
|
| 6262 |
+
4gj6
|
| 6263 |
+
3vf8
|
| 6264 |
+
4hbm
|
| 6265 |
+
4g3f
|
| 6266 |
+
4eok
|
| 6267 |
+
2rox
|
| 6268 |
+
1t48
|
| 6269 |
+
2xd6
|
| 6270 |
+
2fjn
|
| 6271 |
+
4z9l
|
| 6272 |
+
5ndb
|
| 6273 |
+
4j77
|
| 6274 |
+
3ui7
|
| 6275 |
+
5e73
|
| 6276 |
+
1h1h
|
| 6277 |
+
4ep2
|
| 6278 |
+
3kdc
|
| 6279 |
+
4jnm
|
| 6280 |
+
5h9s
|
| 6281 |
+
3cth
|
| 6282 |
+
2j34
|
| 6283 |
+
1h62
|
| 6284 |
+
6bil
|
| 6285 |
+
3mhc
|
| 6286 |
+
4zw8
|
| 6287 |
+
3g90
|
| 6288 |
+
4f7v
|
| 6289 |
+
4eh8
|
| 6290 |
+
5wkh
|
| 6291 |
+
4ycm
|
| 6292 |
+
4mk8
|
| 6293 |
+
2xj0
|
| 6294 |
+
2tpi
|
| 6295 |
+
4clz
|
| 6296 |
+
4ko8
|
| 6297 |
+
5qaj
|
| 6298 |
+
5dus
|
| 6299 |
+
2uzl
|
| 6300 |
+
1k1l
|
| 6301 |
+
4g95
|
| 6302 |
+
5g10
|
| 6303 |
+
4nnr
|
| 6304 |
+
3e64
|
| 6305 |
+
1fq8
|
| 6306 |
+
4c71
|
| 6307 |
+
4b0c
|
| 6308 |
+
3ft3
|
| 6309 |
+
1ybg
|
| 6310 |
+
4y2t
|
| 6311 |
+
5mme
|
| 6312 |
+
5dk4
|
| 6313 |
+
4dds
|
| 6314 |
+
3ggw
|
| 6315 |
+
5axi
|
| 6316 |
+
3ueo
|
| 6317 |
+
2r9b
|
| 6318 |
+
3ppj
|
| 6319 |
+
1meu
|
| 6320 |
+
4z7o
|
| 6321 |
+
3n8k
|
| 6322 |
+
2jup
|
| 6323 |
+
3p9j
|
| 6324 |
+
3hxe
|
| 6325 |
+
1e06
|
| 6326 |
+
3ika
|
| 6327 |
+
3su3
|
| 6328 |
+
5yyf
|
| 6329 |
+
4kc4
|
| 6330 |
+
2uy4
|
| 6331 |
+
4z7h
|
| 6332 |
+
5hcv
|
| 6333 |
+
5ir1
|
| 6334 |
+
1f9g
|
| 6335 |
+
5law
|
| 6336 |
+
3o99
|
| 6337 |
+
1yci
|
| 6338 |
+
6mu3
|
| 6339 |
+
3g3d
|
| 6340 |
+
3aje
|
| 6341 |
+
2o5d
|
| 6342 |
+
3b7i
|
| 6343 |
+
6af9
|
| 6344 |
+
2gvz
|
| 6345 |
+
4qmt
|
| 6346 |
+
3qj0
|
| 6347 |
+
2y9q
|
| 6348 |
+
5wg8
|
| 6349 |
+
4bzr
|
| 6350 |
+
1k6t
|
| 6351 |
+
3fui
|
| 6352 |
+
1lek
|
| 6353 |
+
5kql
|
| 6354 |
+
5d21
|
| 6355 |
+
1iig
|
| 6356 |
+
5ewh
|
| 6357 |
+
2gh6
|
| 6358 |
+
4x6y
|
| 6359 |
+
4mbl
|
| 6360 |
+
2v54
|
| 6361 |
+
5ug9
|
| 6362 |
+
3hvk
|
| 6363 |
+
3qts
|
| 6364 |
+
3q43
|
| 6365 |
+
4nvq
|
| 6366 |
+
3co9
|
| 6367 |
+
4dwb
|
| 6368 |
+
5jbi
|
| 6369 |
+
2il2
|
| 6370 |
+
1yt9
|
| 6371 |
+
3t6j
|
| 6372 |
+
2zu5
|
| 6373 |
+
5buj
|
| 6374 |
+
2xiy
|
| 6375 |
+
2fs9
|
| 6376 |
+
1al8
|
| 6377 |
+
4cig
|
| 6378 |
+
3cgy
|
| 6379 |
+
4mdq
|
| 6380 |
+
3st6
|
| 6381 |
+
4dzy
|
| 6382 |
+
1m0o
|
| 6383 |
+
1lkk
|
| 6384 |
+
4uu7
|
| 6385 |
+
1zky
|
| 6386 |
+
5d9l
|
| 6387 |
+
1zd3
|
| 6388 |
+
5t6f
|
| 6389 |
+
1g2o
|
| 6390 |
+
2i1m
|
| 6391 |
+
3khv
|
| 6392 |
+
3o88
|
| 6393 |
+
5fi2
|
| 6394 |
+
3idp
|
| 6395 |
+
2pov
|
| 6396 |
+
2qe4
|
| 6397 |
+
3qtz
|
| 6398 |
+
5alm
|
| 6399 |
+
3fts
|
| 6400 |
+
2x4u
|
| 6401 |
+
4o45
|
| 6402 |
+
1tkx
|
| 6403 |
+
2q8h
|
| 6404 |
+
4eop
|
| 6405 |
+
5akk
|
| 6406 |
+
5wtt
|
| 6407 |
+
3h9o
|
| 6408 |
+
3v4x
|
| 6409 |
+
5o0s
|
| 6410 |
+
5b6c
|
| 6411 |
+
4bch
|
| 6412 |
+
5nkg
|
| 6413 |
+
5c4k
|
| 6414 |
+
2br1
|
| 6415 |
+
6ajg
|
| 6416 |
+
3whw
|
| 6417 |
+
6b59
|
| 6418 |
+
5aan
|
| 6419 |
+
6fo5
|
| 6420 |
+
4ddh
|
| 6421 |
+
4pmp
|
| 6422 |
+
5i0l
|
| 6423 |
+
2xk9
|
| 6424 |
+
5mon
|
| 6425 |
+
2xib
|
| 6426 |
+
3c2r
|
| 6427 |
+
5mfr
|
| 6428 |
+
2ra6
|
| 6429 |
+
2xl2
|
| 6430 |
+
4w5a
|
| 6431 |
+
3zm6
|
| 6432 |
+
3zm5
|
| 6433 |
+
3zhz
|
| 6434 |
+
3ogq
|
| 6435 |
+
4rrq
|
| 6436 |
+
4wxi
|
| 6437 |
+
4asd
|
| 6438 |
+
5txy
|
| 6439 |
+
5u0w
|
| 6440 |
+
4xta
|
| 6441 |
+
3t2v
|
| 6442 |
+
2q9m
|
| 6443 |
+
5lyw
|
| 6444 |
+
3hmv
|
| 6445 |
+
6g92
|
| 6446 |
+
1z6f
|
| 6447 |
+
5a46
|
| 6448 |
+
3gnw
|
| 6449 |
+
5k03
|
| 6450 |
+
2bgn
|
| 6451 |
+
3ohi
|
| 6452 |
+
3acw
|
| 6453 |
+
3hng
|
| 6454 |
+
4j06
|
| 6455 |
+
1owj
|
| 6456 |
+
1gbt
|
| 6457 |
+
4fmo
|
| 6458 |
+
4g8v
|
| 6459 |
+
2fzc
|
| 6460 |
+
2vx0
|
| 6461 |
+
2o1c
|
| 6462 |
+
5unp
|
| 6463 |
+
4o7c
|
| 6464 |
+
4rxe
|
| 6465 |
+
5t90
|
| 6466 |
+
5jyo
|
| 6467 |
+
2wzy
|
| 6468 |
+
1tfq
|
| 6469 |
+
3s78
|
| 6470 |
+
2ei6
|
| 6471 |
+
3peq
|
| 6472 |
+
3o9f
|
| 6473 |
+
1mq6
|
| 6474 |
+
1k6v
|
| 6475 |
+
5m39
|
| 6476 |
+
3wz7
|
| 6477 |
+
1nwl
|
| 6478 |
+
3ard
|
| 6479 |
+
5e3d
|
| 6480 |
+
4ogn
|
| 6481 |
+
2woq
|
| 6482 |
+
3iw5
|
| 6483 |
+
2h15
|
| 6484 |
+
3avh
|
| 6485 |
+
2azr
|
| 6486 |
+
5vsk
|
| 6487 |
+
6ckr
|
| 6488 |
+
4up5
|
| 6489 |
+
4n84
|
| 6490 |
+
4eh2
|
| 6491 |
+
4cpw
|
| 6492 |
+
5f91
|
| 6493 |
+
4k3r
|
| 6494 |
+
4dgn
|
| 6495 |
+
2yer
|
| 6496 |
+
5ayt
|
| 6497 |
+
4cpz
|
| 6498 |
+
4y29
|
| 6499 |
+
5tq6
|
| 6500 |
+
3roc
|
| 6501 |
+
5lwm
|
| 6502 |
+
3gjd
|
| 6503 |
+
4zip
|
| 6504 |
+
6exm
|
| 6505 |
+
5qak
|
| 6506 |
+
4u0n
|
| 6507 |
+
5wo4
|
| 6508 |
+
1rrw
|
| 6509 |
+
5hls
|
| 6510 |
+
3pjc
|
| 6511 |
+
1cin
|
| 6512 |
+
3b2w
|
| 6513 |
+
1kmy
|
| 6514 |
+
4ys7
|
| 6515 |
+
4f0c
|
| 6516 |
+
3vqu
|
| 6517 |
+
3uoh
|
| 6518 |
+
1qr3
|
| 6519 |
+
5i8b
|
| 6520 |
+
2aay
|
| 6521 |
+
4jlm
|
| 6522 |
+
3wnr
|
| 6523 |
+
4en4
|
| 6524 |
+
5w0f
|
| 6525 |
+
4lxz
|
| 6526 |
+
4cjr
|
| 6527 |
+
4jj7
|
| 6528 |
+
2h5i
|
| 6529 |
+
3ezv
|
| 6530 |
+
3hkw
|
| 6531 |
+
3uxm
|
| 6532 |
+
5hjd
|
| 6533 |
+
4b5d
|
| 6534 |
+
2vtt
|
| 6535 |
+
2y1n
|
| 6536 |
+
5nin
|
| 6537 |
+
3uvo
|
| 6538 |
+
5anw
|
| 6539 |
+
5nvw
|
| 6540 |
+
4b9k
|
| 6541 |
+
1j4p
|
| 6542 |
+
4io5
|
| 6543 |
+
4dkp
|
| 6544 |
+
2jdt
|
| 6545 |
+
1iem
|
| 6546 |
+
4lm0
|
| 6547 |
+
5nr8
|
| 6548 |
+
6f7t
|
| 6549 |
+
5ort
|
| 6550 |
+
1h1q
|
| 6551 |
+
4twd
|
| 6552 |
+
4anm
|
| 6553 |
+
1e1v
|
| 6554 |
+
4tki
|
| 6555 |
+
4wv6
|
| 6556 |
+
4htx
|
| 6557 |
+
2qw1
|
| 6558 |
+
1t13
|
| 6559 |
+
4euo
|
| 6560 |
+
6gy1
|
| 6561 |
+
4civ
|
| 6562 |
+
5y8z
|
| 6563 |
+
5knv
|
| 6564 |
+
5hn7
|
| 6565 |
+
4kqr
|
| 6566 |
+
5vb5
|
| 6567 |
+
3f5p
|
| 6568 |
+
2kfh
|
| 6569 |
+
4b3u
|
| 6570 |
+
5nf5
|
| 6571 |
+
5mes
|
| 6572 |
+
4w55
|
| 6573 |
+
1sv3
|
| 6574 |
+
3p7a
|
| 6575 |
+
6frf
|
| 6576 |
+
1o2t
|
| 6577 |
+
5afm
|
| 6578 |
+
3cow
|
| 6579 |
+
6f9g
|
| 6580 |
+
3ds1
|
| 6581 |
+
6a6w
|
| 6582 |
+
2g8r
|
| 6583 |
+
6e05
|
| 6584 |
+
5cau
|
| 6585 |
+
1s38
|
| 6586 |
+
1zge
|
| 6587 |
+
1me4
|
| 6588 |
+
3cj4
|
| 6589 |
+
4pr5
|
| 6590 |
+
4gzf
|
| 6591 |
+
1q54
|
| 6592 |
+
5l9i
|
| 6593 |
+
1z6j
|
| 6594 |
+
2ez5
|
| 6595 |
+
3unk
|
| 6596 |
+
3o86
|
| 6597 |
+
4j51
|
| 6598 |
+
6fyi
|
| 6599 |
+
3cj2
|
| 6600 |
+
5ekm
|
| 6601 |
+
3f9y
|
| 6602 |
+
2uy5
|
| 6603 |
+
3u7l
|
| 6604 |
+
2brb
|
| 6605 |
+
5g45
|
| 6606 |
+
5jgd
|
| 6607 |
+
4ikt
|
| 6608 |
+
5opu
|
| 6609 |
+
4mwc
|
| 6610 |
+
1t7d
|
| 6611 |
+
2viq
|
| 6612 |
+
3ocp
|
| 6613 |
+
3wuv
|
| 6614 |
+
4yay
|
| 6615 |
+
2vh6
|
| 6616 |
+
2ra0
|
| 6617 |
+
5fwg
|
| 6618 |
+
1o3c
|
| 6619 |
+
2mip
|
| 6620 |
+
4bi0
|
| 6621 |
+
1kll
|
| 6622 |
+
3qtq
|
| 6623 |
+
3ho2
|
| 6624 |
+
3vw7
|
| 6625 |
+
6h37
|
| 6626 |
+
4rlw
|
| 6627 |
+
6dak
|
| 6628 |
+
3p3t
|
| 6629 |
+
3m11
|
| 6630 |
+
6fdp
|
| 6631 |
+
4c73
|
| 6632 |
+
5ksx
|
| 6633 |
+
1okl
|
| 6634 |
+
4gqq
|
| 6635 |
+
2x2i
|
| 6636 |
+
3sji
|
| 6637 |
+
1hkj
|
| 6638 |
+
5hu9
|
| 6639 |
+
5vom
|
| 6640 |
+
5fdo
|
| 6641 |
+
1uu7
|
| 6642 |
+
5iay
|
| 6643 |
+
5nu5
|
| 6644 |
+
4q06
|
| 6645 |
+
3apc
|
| 6646 |
+
1re8
|
| 6647 |
+
5uey
|
| 6648 |
+
1tcx
|
| 6649 |
+
3w0l
|
| 6650 |
+
3s45
|
| 6651 |
+
3n5j
|
| 6652 |
+
2p59
|
| 6653 |
+
1rhj
|
| 6654 |
+
5d0r
|
| 6655 |
+
2qp6
|
| 6656 |
+
2vqj
|
| 6657 |
+
4d3h
|
| 6658 |
+
2zaz
|
| 6659 |
+
2zir
|
| 6660 |
+
5alt
|
| 6661 |
+
5w5j
|
| 6662 |
+
3ekw
|
| 6663 |
+
4xas
|
| 6664 |
+
5mwy
|
| 6665 |
+
4mwy
|
| 6666 |
+
2z1w
|
| 6667 |
+
1d4t
|
| 6668 |
+
4yk0
|
| 6669 |
+
3hnb
|
| 6670 |
+
6g36
|
| 6671 |
+
1rhr
|
| 6672 |
+
3tzd
|
| 6673 |
+
2r3f
|
| 6674 |
+
6bcr
|
| 6675 |
+
5idp
|
| 6676 |
+
4n6z
|
| 6677 |
+
3qcx
|
| 6678 |
+
4z89
|
| 6679 |
+
4pkb
|
| 6680 |
+
1xoz
|
| 6681 |
+
3d9z
|
| 6682 |
+
2vnp
|
| 6683 |
+
4meq
|
| 6684 |
+
3so6
|
| 6685 |
+
3tiw
|
| 6686 |
+
6h7u
|
| 6687 |
+
3u0d
|
| 6688 |
+
1ur9
|
| 6689 |
+
4lar
|
| 6690 |
+
4css
|
| 6691 |
+
2a3c
|
| 6692 |
+
1h3a
|
| 6693 |
+
4aqc
|
| 6694 |
+
5l2s
|
| 6695 |
+
5vi6
|
| 6696 |
+
6cmr
|
| 6697 |
+
4e3m
|
| 6698 |
+
3vgc
|
| 6699 |
+
3chs
|
| 6700 |
+
5uc4
|
| 6701 |
+
2x6e
|
| 6702 |
+
5ey9
|
| 6703 |
+
4ih5
|
| 6704 |
+
1mto
|
| 6705 |
+
4qwi
|
| 6706 |
+
5nai
|
| 6707 |
+
4ym4
|
| 6708 |
+
3prz
|
| 6709 |
+
2nnp
|
| 6710 |
+
1vij
|
| 6711 |
+
5bvd
|
| 6712 |
+
5swf
|
| 6713 |
+
3pn3
|
| 6714 |
+
2pql
|
| 6715 |
+
3ekv
|
| 6716 |
+
5c7n
|
| 6717 |
+
1els
|
| 6718 |
+
5bqi
|
| 6719 |
+
4k5m
|
| 6720 |
+
1o2k
|
| 6721 |
+
4x69
|
| 6722 |
+
5mts
|
| 6723 |
+
1e9h
|
| 6724 |
+
4ykj
|
| 6725 |
+
6i3s
|
| 6726 |
+
1zpb
|
| 6727 |
+
4u70
|
| 6728 |
+
4wph
|
| 6729 |
+
4xy9
|
| 6730 |
+
4ith
|
| 6731 |
+
5aph
|
| 6732 |
+
5cy3
|
| 6733 |
+
3fat
|
| 6734 |
+
4ly1
|
| 6735 |
+
2vti
|
| 6736 |
+
1f0q
|
| 6737 |
+
4wsy
|
| 6738 |
+
1b7h
|
| 6739 |
+
3ej1
|
| 6740 |
+
3rux
|
| 6741 |
+
2vvu
|
| 6742 |
+
5a3t
|
| 6743 |
+
4qtd
|
| 6744 |
+
5a5s
|
| 6745 |
+
3t7g
|
| 6746 |
+
4gah
|
| 6747 |
+
5flq
|
| 6748 |
+
2hw2
|
| 6749 |
+
4hl5
|
| 6750 |
+
2imd
|
| 6751 |
+
1ndv
|
| 6752 |
+
5yji
|
| 6753 |
+
2hxm
|
| 6754 |
+
4jpx
|
| 6755 |
+
4uix
|
| 6756 |
+
2iv9
|
| 6757 |
+
4dwk
|
| 6758 |
+
4qvp
|
| 6759 |
+
2c1b
|
| 6760 |
+
5cw8
|
| 6761 |
+
3h22
|
| 6762 |
+
2d3z
|
| 6763 |
+
2oc9
|
| 6764 |
+
4ehz
|
| 6765 |
+
1ett
|
| 6766 |
+
1fdq
|
| 6767 |
+
2avo
|
| 6768 |
+
4r0a
|
| 6769 |
+
1zzz
|
| 6770 |
+
3moe
|
| 6771 |
+
5muc
|
| 6772 |
+
3zqi
|
| 6773 |
+
6f08
|
| 6774 |
+
6dif
|
| 6775 |
+
3mf5
|
| 6776 |
+
5qas
|
| 6777 |
+
3fu5
|
| 6778 |
+
3rv9
|
| 6779 |
+
5urj
|
| 6780 |
+
5oxk
|
| 6781 |
+
4y3y
|
| 6782 |
+
4tyb
|
| 6783 |
+
2glm
|
| 6784 |
+
2xnb
|
| 6785 |
+
3tt0
|
| 6786 |
+
6gmq
|
| 6787 |
+
4i5p
|
| 6788 |
+
2wnc
|
| 6789 |
+
1f92
|
| 6790 |
+
3psd
|
| 6791 |
+
5hcy
|
| 6792 |
+
2wot
|
| 6793 |
+
1q6n
|
| 6794 |
+
4tmn
|
| 6795 |
+
1tmn
|
| 6796 |
+
3zep
|
| 6797 |
+
1c88
|
| 6798 |
+
4fea
|
| 6799 |
+
5nwb
|
| 6800 |
+
1ppc
|
| 6801 |
+
5myx
|
| 6802 |
+
4ci3
|
| 6803 |
+
1z6d
|
| 6804 |
+
4kww
|
| 6805 |
+
4ixu
|
| 6806 |
+
1leg
|
| 6807 |
+
3a5y
|
| 6808 |
+
4bo3
|
| 6809 |
+
5cyi
|
| 6810 |
+
5uzj
|
| 6811 |
+
4mw0
|
| 6812 |
+
5j18
|
| 6813 |
+
5ml3
|
| 6814 |
+
2bmk
|
| 6815 |
+
2brc
|
| 6816 |
+
5oxm
|
| 6817 |
+
1bn1
|
| 6818 |
+
4qlv
|
| 6819 |
+
5wxf
|
| 6820 |
+
5yib
|
| 6821 |
+
3ig7
|
| 6822 |
+
4pgb
|
| 6823 |
+
4p73
|
| 6824 |
+
2y4s
|
| 6825 |
+
5tex
|
| 6826 |
+
3b4f
|
| 6827 |
+
4zyx
|
| 6828 |
+
3poa
|
| 6829 |
+
4m0y
|
| 6830 |
+
5e91
|
| 6831 |
+
1yc5
|
| 6832 |
+
3iqi
|
| 6833 |
+
1o2s
|
| 6834 |
+
1pmu
|
| 6835 |
+
4l58
|
| 6836 |
+
4mx1
|
| 6837 |
+
5am4
|
| 6838 |
+
2hs2
|
| 6839 |
+
4yne
|
| 6840 |
+
1ctt
|
| 6841 |
+
2zm1
|
| 6842 |
+
1lxf
|
| 6843 |
+
2f1b
|
| 6844 |
+
4zw5
|
| 6845 |
+
2oh0
|
| 6846 |
+
5n2z
|
| 6847 |
+
3f19
|
| 6848 |
+
2obf
|
| 6849 |
+
6f9t
|
| 6850 |
+
5vee
|
| 6851 |
+
4oyo
|
| 6852 |
+
5moq
|
| 6853 |
+
3ft4
|
| 6854 |
+
6cbg
|
| 6855 |
+
5wuu
|
| 6856 |
+
5f5b
|
| 6857 |
+
4xs2
|
| 6858 |
+
4rwl
|
| 6859 |
+
4hpi
|
| 6860 |
+
4e34
|
| 6861 |
+
2fxr
|
| 6862 |
+
5jy0
|
| 6863 |
+
5e1m
|
| 6864 |
+
5oq6
|
| 6865 |
+
1nhz
|
| 6866 |
+
3pjt
|
| 6867 |
+
5jzi
|
| 6868 |
+
4elf
|
| 6869 |
+
4fxy
|
| 6870 |
+
6dpz
|
| 6871 |
+
3mo0
|
| 6872 |
+
2waj
|
| 6873 |
+
2fqt
|
| 6874 |
+
3mxy
|
| 6875 |
+
1y91
|
| 6876 |
+
6biv
|
| 6877 |
+
5xjm
|
| 6878 |
+
3zmi
|
| 6879 |
+
4wn5
|
| 6880 |
+
2fv5
|
| 6881 |
+
3p3u
|
| 6882 |
+
3ryj
|
| 6883 |
+
3b95
|
| 6884 |
+
3i5r
|
| 6885 |
+
2ajl
|
| 6886 |
+
6c99
|
| 6887 |
+
3bx5
|
| 6888 |
+
2rf2
|
| 6889 |
+
2j2i
|
| 6890 |
+
5g0q
|
| 6891 |
+
4yuw
|
| 6892 |
+
2r3g
|
| 6893 |
+
2bgr
|
| 6894 |
+
4l9i
|
| 6895 |
+
4igk
|
| 6896 |
+
4qfp
|
| 6897 |
+
5ty8
|
| 6898 |
+
4odm
|
| 6899 |
+
3hs8
|
| 6900 |
+
4ele
|
| 6901 |
+
5zia
|
| 6902 |
+
2pvj
|
| 6903 |
+
5w5k
|
| 6904 |
+
4m5l
|
| 6905 |
+
3wz8
|
| 6906 |
+
1nfs
|
| 6907 |
+
5w7j
|
| 6908 |
+
4jfw
|
| 6909 |
+
6eda
|
| 6910 |
+
5m5d
|
| 6911 |
+
3kag
|
| 6912 |
+
4zam
|
| 6913 |
+
6eed
|
| 6914 |
+
2jbp
|
| 6915 |
+
4z3v
|
| 6916 |
+
2pu0
|
| 6917 |
+
4f14
|
| 6918 |
+
1pxh
|
| 6919 |
+
2hwp
|
| 6920 |
+
1u9e
|
| 6921 |
+
4wko
|
| 6922 |
+
5kqx
|
| 6923 |
+
1tog
|
| 6924 |
+
2xf0
|
| 6925 |
+
4uof
|
| 6926 |
+
4io7
|
| 6927 |
+
4ds1
|
| 6928 |
+
5diq
|
| 6929 |
+
3ti5
|
| 6930 |
+
4x8u
|
| 6931 |
+
3avm
|
| 6932 |
+
3kqm
|
| 6933 |
+
4zph
|
| 6934 |
+
2wou
|
| 6935 |
+
4jyu
|
| 6936 |
+
3ip8
|
| 6937 |
+
5ti3
|
| 6938 |
+
4cku
|
| 6939 |
+
3vdb
|
| 6940 |
+
3chd
|
| 6941 |
+
4i5h
|
| 6942 |
+
5i2r
|
| 6943 |
+
3tyv
|
| 6944 |
+
4uwk
|
| 6945 |
+
5j6a
|
| 6946 |
+
3lk0
|
| 6947 |
+
5k13
|
| 6948 |
+
2hj4
|
| 6949 |
+
3bfu
|
| 6950 |
+
4ajk
|
| 6951 |
+
3n45
|
| 6952 |
+
3te5
|
| 6953 |
+
5v9p
|
| 6954 |
+
2bzz
|
| 6955 |
+
3a6t
|
| 6956 |
+
1hvl
|
| 6957 |
+
5a2i
|
| 6958 |
+
4mx9
|
| 6959 |
+
4g8y
|
| 6960 |
+
4dko
|
| 6961 |
+
3kdb
|
| 6962 |
+
1lf9
|
| 6963 |
+
4qmp
|
| 6964 |
+
3w32
|
| 6965 |
+
4d2d
|
| 6966 |
+
4ksp
|
| 6967 |
+
6f8b
|
| 6968 |
+
3h6z
|
| 6969 |
+
5k0h
|
| 6970 |
+
3h2n
|
| 6971 |
+
5ke0
|
| 6972 |
+
5w4w
|
| 6973 |
+
5mxr
|
| 6974 |
+
5igl
|
| 6975 |
+
2h9p
|
| 6976 |
+
4iuu
|
| 6977 |
+
4d8a
|
| 6978 |
+
3dvp
|
| 6979 |
+
1lox
|
| 6980 |
+
3ao4
|
| 6981 |
+
4h38
|
| 6982 |
+
4bdb
|
| 6983 |
+
4j1p
|
| 6984 |
+
1o4b
|
| 6985 |
+
4mro
|
| 6986 |
+
5eel
|
| 6987 |
+
2ay3
|
| 6988 |
+
3e8n
|
| 6989 |
+
6g8m
|
| 6990 |
+
2pyy
|
| 6991 |
+
4m0z
|
| 6992 |
+
2mas
|
| 6993 |
+
4ks3
|
| 6994 |
+
4p72
|
| 6995 |
+
6ccx
|
| 6996 |
+
6aqq
|
| 6997 |
+
6em6
|
| 6998 |
+
4rqz
|
| 6999 |
+
4hkk
|
| 7000 |
+
2q6f
|
| 7001 |
+
5he1
|
| 7002 |
+
2wu6
|
| 7003 |
+
2pj7
|
| 7004 |
+
1bty
|
| 7005 |
+
1ule
|
| 7006 |
+
5ezg
|
| 7007 |
+
3atw
|
| 7008 |
+
3s7b
|
| 7009 |
+
4x48
|
| 7010 |
+
1kug
|
| 7011 |
+
2hdu
|
| 7012 |
+
1bmq
|
| 7013 |
+
3odu
|
| 7014 |
+
3l9m
|
| 7015 |
+
2ihj
|
| 7016 |
+
5mgk
|
| 7017 |
+
2n1g
|
| 7018 |
+
6hk6
|
| 7019 |
+
1mq5
|
| 7020 |
+
4kp5
|
| 7021 |
+
1yyr
|
| 7022 |
+
5al4
|
| 7023 |
+
2z4b
|
| 7024 |
+
6axl
|
| 7025 |
+
5mym
|
| 7026 |
+
4gv8
|
| 7027 |
+
2cni
|
| 7028 |
+
4xub
|
| 7029 |
+
6cjh
|
| 7030 |
+
3ig1
|
| 7031 |
+
4m2r
|
| 7032 |
+
5ime
|
| 7033 |
+
6d6u
|
| 7034 |
+
1utj
|
| 7035 |
+
1dfo
|
| 7036 |
+
5anu
|
| 7037 |
+
2ogz
|
| 7038 |
+
5vad
|
| 7039 |
+
2x9e
|
| 7040 |
+
6epz
|
| 7041 |
+
4yv2
|
| 7042 |
+
2jdh
|
| 7043 |
+
1w13
|
| 7044 |
+
4eos
|
| 7045 |
+
4x1f
|
| 7046 |
+
4gao
|
| 7047 |
+
5c8k
|
| 7048 |
+
4k5n
|
| 7049 |
+
3pj1
|
| 7050 |
+
1x7a
|
| 7051 |
+
1h9z
|
| 7052 |
+
6gjm
|
| 7053 |
+
3ipe
|
| 7054 |
+
3fuj
|
| 7055 |
+
5g11
|
| 7056 |
+
4asj
|
| 7057 |
+
4ybs
|
| 7058 |
+
5gmi
|
| 7059 |
+
3d04
|
| 7060 |
+
2fie
|
| 7061 |
+
3ary
|
| 7062 |
+
4anb
|
| 7063 |
+
6hmg
|
| 7064 |
+
4n9c
|
| 7065 |
+
2lsk
|
| 7066 |
+
2ksb
|
| 7067 |
+
5mwd
|
| 7068 |
+
1gfw
|
| 7069 |
+
5gja
|
| 7070 |
+
2f7o
|
| 7071 |
+
3mzc
|
| 7072 |
+
4nmq
|
| 7073 |
+
4qz7
|
| 7074 |
+
1bug
|
| 7075 |
+
5jf3
|
| 7076 |
+
2qft
|
| 7077 |
+
5jop
|
| 7078 |
+
4jaj
|
| 7079 |
+
3s9i
|
| 7080 |
+
6cvy
|
| 7081 |
+
3oaf
|
| 7082 |
+
5ta6
|
| 7083 |
+
5mae
|
| 7084 |
+
5m2v
|
| 7085 |
+
4zxy
|
| 7086 |
+
1g9r
|
| 7087 |
+
3pch
|
| 7088 |
+
5kkt
|
| 7089 |
+
2fqo
|
| 7090 |
+
5mn1
|
| 7091 |
+
3e3b
|
| 7092 |
+
4jlj
|
| 7093 |
+
4zga
|
| 7094 |
+
4jlg
|
| 7095 |
+
1pf7
|
| 7096 |
+
5ewd
|
| 7097 |
+
4dhf
|
| 7098 |
+
4ywa
|
| 7099 |
+
5fxr
|
| 7100 |
+
4e1k
|
| 7101 |
+
2zmd
|
| 7102 |
+
4jyv
|
| 7103 |
+
6iik
|
| 7104 |
+
3zxe
|
| 7105 |
+
2vx1
|
| 7106 |
+
3hav
|
| 7107 |
+
1trd
|
| 7108 |
+
3u10
|
| 7109 |
+
2a0c
|
| 7110 |
+
3ly2
|
| 7111 |
+
6f6n
|
| 7112 |
+
4rj5
|
| 7113 |
+
2xn6
|
| 7114 |
+
1irs
|
| 7115 |
+
2ntf
|
| 7116 |
+
3rum
|
| 7117 |
+
6iin
|
| 7118 |
+
5hzx
|
| 7119 |
+
1urw
|
| 7120 |
+
4umr
|
| 7121 |
+
5apr
|
| 7122 |
+
5ngt
|
| 7123 |
+
3q71
|
| 7124 |
+
3og7
|
| 7125 |
+
4oeg
|
| 7126 |
+
2ow6
|
| 7127 |
+
3sff
|
| 7128 |
+
3eu7
|
| 7129 |
+
2zju
|
| 7130 |
+
5tx5
|
| 7131 |
+
3v8t
|
| 7132 |
+
1nqc
|
| 7133 |
+
3t2c
|
| 7134 |
+
4afh
|
| 7135 |
+
3vhu
|
| 7136 |
+
2d1x
|
| 7137 |
+
2h2h
|
| 7138 |
+
5nvz
|
| 7139 |
+
4qy8
|
| 7140 |
+
3w2s
|
| 7141 |
+
4w9h
|
| 7142 |
+
4r6v
|
| 7143 |
+
5esq
|
| 7144 |
+
5v5d
|
| 7145 |
+
4q07
|
| 7146 |
+
4jrv
|
| 7147 |
+
2in6
|
| 7148 |
+
3bki
|
| 7149 |
+
6ayd
|
| 7150 |
+
4zyc
|
| 7151 |
+
4ybm
|
| 7152 |
+
6min
|
| 7153 |
+
3ibu
|
| 7154 |
+
5kde
|
| 7155 |
+
1br5
|
| 7156 |
+
1ujj
|
| 7157 |
+
1lan
|
| 7158 |
+
1gwq
|
| 7159 |
+
4av0
|
| 7160 |
+
4qpd
|
| 7161 |
+
4urk
|
| 7162 |
+
4uct
|
| 7163 |
+
5uwj
|
| 7164 |
+
4b8y
|
| 7165 |
+
5nvc
|
| 7166 |
+
6f23
|
| 7167 |
+
5w0e
|
| 7168 |
+
2hdq
|
| 7169 |
+
5c6p
|
| 7170 |
+
2amt
|
| 7171 |
+
2rgu
|
| 7172 |
+
6g9n
|
| 7173 |
+
6hu2
|
| 7174 |
+
3v7d
|
| 7175 |
+
5iz9
|
| 7176 |
+
2vev
|
| 7177 |
+
3dej
|
| 7178 |
+
2hxq
|
| 7179 |
+
4mvx
|
| 7180 |
+
2c3l
|
| 7181 |
+
5c29
|
| 7182 |
+
1we2
|
| 7183 |
+
6f9u
|
| 7184 |
+
2f80
|
| 7185 |
+
2ooz
|
| 7186 |
+
4py2
|
| 7187 |
+
4zcw
|
| 7188 |
+
5kx7
|
| 7189 |
+
4m13
|
| 7190 |
+
4kz6
|
| 7191 |
+
5ypw
|
| 7192 |
+
1qf5
|
| 7193 |
+
2zdn
|
| 7194 |
+
2i4w
|
| 7195 |
+
2xu3
|
| 7196 |
+
1wbn
|
| 7197 |
+
5lt9
|
| 7198 |
+
3zmv
|
| 7199 |
+
4efs
|
| 7200 |
+
7cpa
|
| 7201 |
+
2i72
|
| 7202 |
+
4rpo
|
| 7203 |
+
2h13
|
| 7204 |
+
4obo
|
| 7205 |
+
3f88
|
| 7206 |
+
6g47
|
| 7207 |
+
2y6d
|
| 7208 |
+
3o57
|
| 7209 |
+
5a0c
|
| 7210 |
+
2fl6
|
| 7211 |
+
5mja
|
| 7212 |
+
5g22
|
| 7213 |
+
3g9l
|
| 7214 |
+
1y2a
|
| 7215 |
+
5znp
|
| 7216 |
+
1hi4
|
| 7217 |
+
6ay5
|
| 7218 |
+
3f5k
|
| 7219 |
+
3et7
|
| 7220 |
+
6bvb
|
| 7221 |
+
5u94
|
| 7222 |
+
1m1b
|
| 7223 |
+
3ryv
|
| 7224 |
+
2peh
|
| 7225 |
+
4ppa
|
| 7226 |
+
5e1e
|
| 7227 |
+
3i51
|
| 7228 |
+
5fb0
|
| 7229 |
+
1rtl
|
| 7230 |
+
2i4p
|
| 7231 |
+
1a9m
|
| 7232 |
+
6au5
|
| 7233 |
+
4jyt
|
| 7234 |
+
4k3k
|
| 7235 |
+
2c5o
|
| 7236 |
+
2qu6
|
| 7237 |
+
4hwr
|
| 7238 |
+
6bqk
|
| 7239 |
+
4bqh
|
| 7240 |
+
4dea
|
| 7241 |
+
6gzm
|
| 7242 |
+
3fuk
|
| 7243 |
+
5y3n
|
| 7244 |
+
2ypi
|
| 7245 |
+
1b3l
|
| 7246 |
+
4o44
|
| 7247 |
+
2e1w
|
| 7248 |
+
4zz2
|
| 7249 |
+
4ucs
|
| 7250 |
+
5arg
|
| 7251 |
+
3sl0
|
| 7252 |
+
3p3r
|
| 7253 |
+
6an1
|
| 7254 |
+
3eor
|
| 7255 |
+
2uue
|
| 7256 |
+
1n4h
|
| 7257 |
+
3gk2
|
| 7258 |
+
3oot
|
| 7259 |
+
5i5z
|
| 7260 |
+
3hv3
|
| 7261 |
+
2hzi
|
| 7262 |
+
5yc2
|
| 7263 |
+
1g48
|
| 7264 |
+
5wbq
|
| 7265 |
+
4axm
|
| 7266 |
+
6g91
|
| 7267 |
+
4q0l
|
| 7268 |
+
6df2
|
| 7269 |
+
1d8e
|
| 7270 |
+
1lpg
|
| 7271 |
+
1gmy
|
| 7272 |
+
5yy4
|
| 7273 |
+
6ht8
|
| 7274 |
+
3orn
|
| 7275 |
+
2x5o
|
| 7276 |
+
1pk0
|
| 7277 |
+
1xt3
|
| 7278 |
+
4lkd
|
| 7279 |
+
1v2n
|
| 7280 |
+
4ufl
|
| 7281 |
+
5bsk
|
| 7282 |
+
3uza
|
| 7283 |
+
2w8f
|
| 7284 |
+
4nka
|
| 7285 |
+
6bef
|
| 7286 |
+
5oaj
|
| 7287 |
+
5abw
|
| 7288 |
+
6bkh
|
| 7289 |
+
5l30
|
| 7290 |
+
2clx
|
| 7291 |
+
4an1
|
| 7292 |
+
4ufz
|
| 7293 |
+
5nk6
|
| 7294 |
+
1syi
|
| 7295 |
+
3zsy
|
| 7296 |
+
5ul5
|
| 7297 |
+
3irx
|
| 7298 |
+
5ux4
|
| 7299 |
+
3e2m
|
| 7300 |
+
4bdc
|
| 7301 |
+
5fut
|
| 7302 |
+
3lgp
|
| 7303 |
+
2ivz
|
| 7304 |
+
2ydi
|
| 7305 |
+
3zxv
|
| 7306 |
+
4g2w
|
| 7307 |
+
3o95
|
| 7308 |
+
6gu4
|
| 7309 |
+
2doo
|
| 7310 |
+
3n5e
|
| 7311 |
+
2y5k
|
| 7312 |
+
3g0g
|
| 7313 |
+
4a9r
|
| 7314 |
+
1qfs
|
| 7315 |
+
5mli
|
| 7316 |
+
4o7b
|
| 7317 |
+
3ty0
|
| 7318 |
+
4uil
|
| 7319 |
+
3st5
|
| 7320 |
+
3f36
|
| 7321 |
+
6fqo
|
| 7322 |
+
6ghj
|
| 7323 |
+
1h8y
|
| 7324 |
+
2pq9
|
| 7325 |
+
5em3
|
| 7326 |
+
3juo
|
| 7327 |
+
1rlp
|
| 7328 |
+
1z9g
|
| 7329 |
+
1yvf
|
| 7330 |
+
4cqg
|
| 7331 |
+
3srb
|
| 7332 |
+
6fmk
|
| 7333 |
+
4o6e
|
| 7334 |
+
5xmv
|
| 7335 |
+
1ke9
|
| 7336 |
+
1mfd
|
| 7337 |
+
1a5h
|
| 7338 |
+
4b1j
|
| 7339 |
+
3up2
|
| 7340 |
+
2ay5
|
| 7341 |
+
5mqx
|
| 7342 |
+
3lpu
|
| 7343 |
+
3bmq
|
| 7344 |
+
5alp
|
| 7345 |
+
4qj0
|
| 7346 |
+
4j5b
|
| 7347 |
+
3pz4
|
| 7348 |
+
5ee8
|
| 7349 |
+
1ka7
|
| 7350 |
+
4np2
|
| 7351 |
+
3bwj
|
| 7352 |
+
5a0e
|
| 7353 |
+
5all
|
| 7354 |
+
4jze
|
| 7355 |
+
2xa4
|
| 7356 |
+
3b3x
|
| 7357 |
+
3sm0
|
| 7358 |
+
4yp8
|
| 7359 |
+
4bc5
|
| 7360 |
+
5f08
|
| 7361 |
+
4qw6
|
| 7362 |
+
5ofv
|
| 7363 |
+
4y83
|
| 7364 |
+
4y4j
|
| 7365 |
+
6cfc
|
| 7366 |
+
5eth
|
| 7367 |
+
1oky
|
| 7368 |
+
2ban
|
| 7369 |
+
4blb
|
| 7370 |
+
3f38
|
| 7371 |
+
3g0w
|
| 7372 |
+
2uy3
|
| 7373 |
+
6fdq
|
| 7374 |
+
4qmx
|
| 7375 |
+
4bo5
|
| 7376 |
+
2oph
|
| 7377 |
+
5t9w
|
| 7378 |
+
4av4
|
| 7379 |
+
2wxv
|
| 7380 |
+
5v8p
|
| 7381 |
+
4eqj
|
| 7382 |
+
4aoc
|
| 7383 |
+
5jk3
|
| 7384 |
+
4xtx
|
| 7385 |
+
2aoj
|
| 7386 |
+
1v7a
|
| 7387 |
+
1sqa
|
| 7388 |
+
3lzv
|
| 7389 |
+
3bum
|
| 7390 |
+
3fxw
|
| 7391 |
+
1b6j
|
| 7392 |
+
3f80
|
| 7393 |
+
4or0
|
| 7394 |
+
6dq5
|
| 7395 |
+
5lsx
|
| 7396 |
+
4bdd
|
| 7397 |
+
4x8v
|
| 7398 |
+
4od9
|
| 7399 |
+
3vye
|
| 7400 |
+
2yix
|
| 7401 |
+
2k4i
|
| 7402 |
+
3wci
|
| 7403 |
+
4z0f
|
| 7404 |
+
5dp7
|
| 7405 |
+
5llo
|
| 7406 |
+
3d20
|
| 7407 |
+
2ymd
|
| 7408 |
+
4hn2
|
| 7409 |
+
5n31
|
| 7410 |
+
4ffs
|
| 7411 |
+
4b5b
|
| 7412 |
+
4i73
|
| 7413 |
+
1fkw
|
| 7414 |
+
2pqb
|
| 7415 |
+
2i19
|
| 7416 |
+
1cbx
|
| 7417 |
+
1gsz
|
| 7418 |
+
5ws3
|
| 7419 |
+
5u4g
|
| 7420 |
+
4h3q
|
| 7421 |
+
4uvy
|
| 7422 |
+
4mfe
|
| 7423 |
+
4guj
|
| 7424 |
+
3c8a
|
| 7425 |
+
4ll3
|
| 7426 |
+
4pp0
|
| 7427 |
+
5hk1
|
| 7428 |
+
4rux
|
| 7429 |
+
3coj
|
| 7430 |
+
4kpx
|
| 7431 |
+
3d6p
|
| 7432 |
+
1o79
|
| 7433 |
+
3nti
|
| 7434 |
+
5hl9
|
| 7435 |
+
2e27
|
| 7436 |
+
3wdd
|
| 7437 |
+
3azb
|
| 7438 |
+
1lnm
|
| 7439 |
+
4x24
|
| 7440 |
+
4tsx
|
| 7441 |
+
1bqn
|
| 7442 |
+
4e8w
|
| 7443 |
+
5xyf
|
| 7444 |
+
4uco
|
| 7445 |
+
1eas
|
| 7446 |
+
2b7f
|
| 7447 |
+
4m5u
|
| 7448 |
+
3ipu
|
| 7449 |
+
2xog
|
| 7450 |
+
4p5d
|
| 7451 |
+
4bcp
|
| 7452 |
+
5thj
|
| 7453 |
+
3tam
|
| 7454 |
+
4jsc
|
| 7455 |
+
3ug2
|
| 7456 |
+
3c3r
|
| 7457 |
+
5e7n
|
| 7458 |
+
1ofz
|
| 7459 |
+
3uo4
|
| 7460 |
+
2buc
|
| 7461 |
+
3gr2
|
| 7462 |
+
2m0u
|
| 7463 |
+
6f7q
|
| 7464 |
+
1sme
|
| 7465 |
+
3h3c
|
| 7466 |
+
1xff
|
| 7467 |
+
4oc6
|
| 7468 |
+
3b3c
|
| 7469 |
+
1xa5
|
| 7470 |
+
1fe3
|
| 7471 |
+
5olb
|
| 7472 |
+
5a85
|
| 7473 |
+
2oz2
|
| 7474 |
+
4d8i
|
| 7475 |
+
5swh
|
| 7476 |
+
1inc
|
| 7477 |
+
4obz
|
| 7478 |
+
2clh
|
| 7479 |
+
4i5c
|
| 7480 |
+
1p28
|
| 7481 |
+
5j7s
|
| 7482 |
+
4iq6
|
| 7483 |
+
1hyz
|
| 7484 |
+
2pe2
|
| 7485 |
+
4f3c
|
| 7486 |
+
4ih7
|
| 7487 |
+
4puj
|
| 7488 |
+
4w9i
|
| 7489 |
+
3qrj
|
| 7490 |
+
4dcd
|
| 7491 |
+
4azg
|
| 7492 |
+
6azl
|
| 7493 |
+
6dxx
|
| 7494 |
+
4sga
|
| 7495 |
+
1gja
|
| 7496 |
+
5k4j
|
| 7497 |
+
4yef
|
| 7498 |
+
1sbg
|
| 7499 |
+
3s00
|
| 7500 |
+
5fcw
|
| 7501 |
+
4txe
|
| 7502 |
+
5xxf
|
| 7503 |
+
5jic
|
| 7504 |
+
4abh
|
| 7505 |
+
3nzx
|
| 7506 |
+
3q4l
|
| 7507 |
+
5edl
|
| 7508 |
+
3pcf
|
| 7509 |
+
3qqu
|
| 7510 |
+
1hgj
|
| 7511 |
+
2qch
|
| 7512 |
+
2yim
|
| 7513 |
+
4dtt
|
| 7514 |
+
4l09
|
| 7515 |
+
3f07
|
| 7516 |
+
4hki
|
| 7517 |
+
4cl6
|
| 7518 |
+
5amd
|
| 7519 |
+
1ym1
|
| 7520 |
+
1s64
|
| 7521 |
+
2w6t
|
| 7522 |
+
3tl5
|
| 7523 |
+
1w1y
|
| 7524 |
+
4eoh
|
| 7525 |
+
5i58
|
| 7526 |
+
4z5w
|
| 7527 |
+
6hu1
|
| 7528 |
+
3wav
|
| 7529 |
+
1nxy
|
| 7530 |
+
3n4c
|
| 7531 |
+
5c91
|
| 7532 |
+
2kzu
|
| 7533 |
+
5kre
|
| 7534 |
+
2jkk
|
| 7535 |
+
4hkp
|
| 7536 |
+
4y2y
|
| 7537 |
+
4w54
|
| 7538 |
+
4few
|
| 7539 |
+
1q95
|
| 7540 |
+
4bnu
|
| 7541 |
+
3lp7
|
| 7542 |
+
1gni
|
| 7543 |
+
2idw
|
| 7544 |
+
2xy9
|
| 7545 |
+
2q6b
|
| 7546 |
+
3uxd
|
| 7547 |
+
3ao1
|
| 7548 |
+
3v2o
|
| 7549 |
+
5ewj
|
| 7550 |
+
1i00
|
| 7551 |
+
2rgp
|
| 7552 |
+
2xln
|
| 7553 |
+
6ma5
|
| 7554 |
+
4d2w
|
| 7555 |
+
6eww
|
| 7556 |
+
4lkq
|
| 7557 |
+
4o7a
|
| 7558 |
+
5m4k
|
| 7559 |
+
1wm1
|
| 7560 |
+
4fzj
|
| 7561 |
+
2nyr
|
| 7562 |
+
2mc1
|
| 7563 |
+
2qky
|
| 7564 |
+
6eh2
|
| 7565 |
+
4glw
|
| 7566 |
+
3arb
|
| 7567 |
+
2vin
|
| 7568 |
+
4rss
|
| 7569 |
+
3q7j
|
| 7570 |
+
2h4g
|
| 7571 |
+
2x2c
|
| 7572 |
+
1gt4
|
| 7573 |
+
1fo0
|
| 7574 |
+
1ftk
|
| 7575 |
+
4kby
|
| 7576 |
+
1ch8
|
| 7577 |
+
5fnu
|
| 7578 |
+
3sv7
|
| 7579 |
+
5uah
|
| 7580 |
+
2wpa
|
| 7581 |
+
1a4h
|
| 7582 |
+
5lhg
|
| 7583 |
+
6eeh
|
| 7584 |
+
4mse
|
| 7585 |
+
3nnv
|
| 7586 |
+
6eq4
|
| 7587 |
+
5f1l
|
| 7588 |
+
3q0z
|
| 7589 |
+
2uzd
|
| 7590 |
+
1r0p
|
| 7591 |
+
1j16
|
| 7592 |
+
2qhz
|
| 7593 |
+
5h0b
|
| 7594 |
+
4nyt
|
| 7595 |
+
5ij7
|
| 7596 |
+
4l2l
|
| 7597 |
+
1afl
|
| 7598 |
+
5vd0
|
| 7599 |
+
2ves
|
| 7600 |
+
5tyn
|
| 7601 |
+
3gf2
|
| 7602 |
+
3nuo
|
| 7603 |
+
5w2p
|
| 7604 |
+
4uv9
|
| 7605 |
+
1dmb
|
| 7606 |
+
3b7u
|
| 7607 |
+
4xbd
|
| 7608 |
+
5m6h
|
| 7609 |
+
4zh3
|
| 7610 |
+
5d0c
|
| 7611 |
+
2e7l
|
| 7612 |
+
3kba
|
| 7613 |
+
5nhh
|
| 7614 |
+
1p0y
|
| 7615 |
+
2l7u
|
| 7616 |
+
3wns
|
| 7617 |
+
5u4d
|
| 7618 |
+
3axk
|
| 7619 |
+
4qpl
|
| 7620 |
+
5jt2
|
| 7621 |
+
2rk7
|
| 7622 |
+
5lss
|
| 7623 |
+
3sni
|
| 7624 |
+
4pb1
|
| 7625 |
+
3oe4
|
| 7626 |
+
5m63
|
| 7627 |
+
5xg5
|
| 7628 |
+
3lle
|
| 7629 |
+
4bo4
|
| 7630 |
+
4o10
|
| 7631 |
+
2yac
|
| 7632 |
+
4odk
|
| 7633 |
+
2x95
|
| 7634 |
+
5v9t
|
| 7635 |
+
2ig0
|
| 7636 |
+
2zdl
|
| 7637 |
+
4ay6
|
| 7638 |
+
2c1a
|
| 7639 |
+
1nny
|
| 7640 |
+
1nw4
|
| 7641 |
+
3kah
|
| 7642 |
+
4db7
|
| 7643 |
+
3ifl
|
| 7644 |
+
5f9e
|
| 7645 |
+
4p2t
|
| 7646 |
+
4exh
|
| 7647 |
+
6d2o
|
| 7648 |
+
6eee
|
| 7649 |
+
2oi0
|
| 7650 |
+
3lpl
|
| 7651 |
+
4tk3
|
| 7652 |
+
6eux
|
| 7653 |
+
2ojj
|
| 7654 |
+
3pyy
|
| 7655 |
+
3jy0
|
| 7656 |
+
3r5t
|
| 7657 |
+
3qo9
|
| 7658 |
+
4u2w
|
| 7659 |
+
5nme
|
| 7660 |
+
4mre
|
| 7661 |
+
1jtq
|
| 7662 |
+
4ryg
|
| 7663 |
+
3c14
|
| 7664 |
+
5kzi
|
| 7665 |
+
6bix
|
| 7666 |
+
5ldm
|
| 7667 |
+
1o2v
|
| 7668 |
+
4llx
|
| 7669 |
+
3lw0
|
| 7670 |
+
2op9
|
| 7671 |
+
4nhc
|
| 7672 |
+
5wi1
|
| 7673 |
+
4e20
|
| 7674 |
+
5uit
|
| 7675 |
+
6fjf
|
| 7676 |
+
2x2m
|
| 7677 |
+
5l98
|
| 7678 |
+
3qsd
|
| 7679 |
+
5lcj
|
| 7680 |
+
1adl
|
| 7681 |
+
2wa8
|
| 7682 |
+
4q9y
|
| 7683 |
+
2rkg
|
| 7684 |
+
3f70
|
| 7685 |
+
2xk6
|
| 7686 |
+
3juk
|
| 7687 |
+
5uf0
|
| 7688 |
+
3l1s
|
| 7689 |
+
6evr
|
| 7690 |
+
1lag
|
| 7691 |
+
1awi
|
| 7692 |
+
1pmx
|
| 7693 |
+
4kln
|
| 7694 |
+
5g61
|
| 7695 |
+
3wgg
|
| 7696 |
+
4o9v
|
| 7697 |
+
5n69
|
| 7698 |
+
3kqc
|
| 7699 |
+
1him
|
| 7700 |
+
3cd0
|
| 7701 |
+
2kbs
|
| 7702 |
+
2vgc
|
| 7703 |
+
1moq
|
| 7704 |
+
2f10
|
| 7705 |
+
2zb0
|
| 7706 |
+
2d41
|
| 7707 |
+
4bdh
|
| 7708 |
+
1ebz
|
| 7709 |
+
5lcf
|
| 7710 |
+
5u13
|
| 7711 |
+
4qwk
|
| 7712 |
+
4cd8
|
| 7713 |
+
1tvr
|
| 7714 |
+
3pd4
|
| 7715 |
+
1hq5
|
| 7716 |
+
5tiu
|
| 7717 |
+
1o44
|
| 7718 |
+
4btk
|
| 7719 |
+
2cli
|
| 7720 |
+
6cb5
|
| 7721 |
+
4lp6
|
| 7722 |
+
5afk
|
| 7723 |
+
2o3p
|
| 7724 |
+
1zpc
|
| 7725 |
+
3d1z
|
| 7726 |
+
2z3h
|
| 7727 |
+
1hms
|
| 7728 |
+
5t78
|
| 7729 |
+
5hg5
|
| 7730 |
+
3mxe
|
| 7731 |
+
4i7l
|
| 7732 |
+
5lmk
|
| 7733 |
+
3nu9
|
| 7734 |
+
5om9
|
| 7735 |
+
2qi3
|
| 7736 |
+
2nv7
|
| 7737 |
+
2wkt
|
| 7738 |
+
1aq1
|
| 7739 |
+
3ddq
|
| 7740 |
+
1vyj
|
| 7741 |
+
5ods
|
| 7742 |
+
1b8n
|
| 7743 |
+
4xhl
|
| 7744 |
+
4jft
|
| 7745 |
+
3n7o
|
| 7746 |
+
5j9z
|
| 7747 |
+
5ufr
|
| 7748 |
+
4qjm
|
| 7749 |
+
4uvx
|
| 7750 |
+
2yb9
|
| 7751 |
+
3qaa
|
| 7752 |
+
5mwo
|
| 7753 |
+
2onz
|
| 7754 |
+
2hiw
|
| 7755 |
+
2agv
|
| 7756 |
+
4qgg
|
| 7757 |
+
5x28
|
| 7758 |
+
1o4a
|
| 7759 |
+
3t5u
|
| 7760 |
+
4klv
|
| 7761 |
+
3lgl
|
| 7762 |
+
2wey
|
| 7763 |
+
1qk3
|
| 7764 |
+
1rti
|
| 7765 |
+
3clp
|
| 7766 |
+
4f8h
|
| 7767 |
+
4qwj
|
| 7768 |
+
1bmb
|
| 7769 |
+
5a5z
|
| 7770 |
+
2yiu
|
| 7771 |
+
2z5t
|
| 7772 |
+
3mna
|
| 7773 |
+
4xh2
|
| 7774 |
+
2p95
|
| 7775 |
+
3t09
|
| 7776 |
+
1fmb
|
| 7777 |
+
3s72
|
| 7778 |
+
5ia5
|
| 7779 |
+
4kp6
|
| 7780 |
+
1aqc
|
| 7781 |
+
4xtt
|
| 7782 |
+
1s9t
|
| 7783 |
+
2fhy
|
| 7784 |
+
4del
|
| 7785 |
+
3ao5
|
| 7786 |
+
4xii
|
| 7787 |
+
5cp9
|
| 7788 |
+
3vb4
|
| 7789 |
+
5el2
|
| 7790 |
+
2uup
|
| 7791 |
+
4z0d
|
| 7792 |
+
4jv6
|
| 7793 |
+
3e8u
|
| 7794 |
+
3ubd
|
| 7795 |
+
1tys
|
| 7796 |
+
4p10
|
| 7797 |
+
5ekj
|
| 7798 |
+
4bpi
|
| 7799 |
+
3n23
|
| 7800 |
+
3amb
|
| 7801 |
+
4gue
|
| 7802 |
+
4n8q
|
| 7803 |
+
1a9q
|
| 7804 |
+
2vtq
|
| 7805 |
+
1h1d
|
| 7806 |
+
5uw5
|
| 7807 |
+
4ty9
|
| 7808 |
+
6bfp
|
| 7809 |
+
2yiv
|
| 7810 |
+
3tfu
|
| 7811 |
+
5xyz
|
| 7812 |
+
2q1l
|
| 7813 |
+
4m7j
|
| 7814 |
+
3m8u
|
| 7815 |
+
5kbf
|
| 7816 |
+
3btl
|
| 7817 |
+
4ctj
|
| 7818 |
+
2ajb
|
| 7819 |
+
3unz
|
| 7820 |
+
2xgs
|
| 7821 |
+
5g1c
|
| 7822 |
+
5w19
|
| 7823 |
+
5vqw
|
| 7824 |
+
5yc8
|
| 7825 |
+
1w83
|
| 7826 |
+
6buv
|
| 7827 |
+
4zh2
|
| 7828 |
+
4amw
|
| 7829 |
+
3dnd
|
| 7830 |
+
1nzv
|
| 7831 |
+
6bbx
|
| 7832 |
+
5lgp
|
| 7833 |
+
6ay3
|
| 7834 |
+
1fwv
|
| 7835 |
+
3mg4
|
| 7836 |
+
2z4o
|
| 7837 |
+
3qmk
|
| 7838 |
+
4dma
|
| 7839 |
+
2r3c
|
| 7840 |
+
1d6s
|
| 7841 |
+
6do3
|
| 7842 |
+
1tg5
|
| 7843 |
+
5n7v
|
| 7844 |
+
1g5f
|
| 7845 |
+
1c5q
|
| 7846 |
+
1lb6
|
| 7847 |
+
4pv7
|
| 7848 |
+
3ii5
|
| 7849 |
+
4rab
|
| 7850 |
+
2w10
|
| 7851 |
+
5i86
|
| 7852 |
+
1uho
|
| 7853 |
+
5vsf
|
| 7854 |
+
3g08
|
| 7855 |
+
2zxg
|
| 7856 |
+
3hb4
|
| 7857 |
+
5j4y
|
| 7858 |
+
5lgt
|
| 7859 |
+
6boy
|
| 7860 |
+
5wa8
|
| 7861 |
+
2xdw
|
| 7862 |
+
2i0e
|
| 7863 |
+
4n7j
|
| 7864 |
+
3ptg
|
| 7865 |
+
4yp1
|
| 7866 |
+
5vp9
|
| 7867 |
+
3r02
|
| 7868 |
+
4mw2
|
| 7869 |
+
3djf
|
| 7870 |
+
5t1a
|
| 7871 |
+
5hrw
|
| 7872 |
+
5e2o
|
| 7873 |
+
4xuc
|
| 7874 |
+
3q32
|
| 7875 |
+
6c4g
|
| 7876 |
+
2csn
|
| 7877 |
+
4q9m
|
| 7878 |
+
3vry
|
| 7879 |
+
3fv7
|
| 7880 |
+
5lhh
|
| 7881 |
+
3fh5
|
| 7882 |
+
2qry
|
| 7883 |
+
4f9g
|
| 7884 |
+
4c8r
|
| 7885 |
+
4m6p
|
| 7886 |
+
4riu
|
| 7887 |
+
3dp3
|
| 7888 |
+
2r3i
|
| 7889 |
+
6ew3
|
| 7890 |
+
4bs5
|
| 7891 |
+
4g3e
|
| 7892 |
+
2wvz
|
| 7893 |
+
3wc5
|
| 7894 |
+
3exf
|
| 7895 |
+
4nb6
|
| 7896 |
+
5xmr
|
| 7897 |
+
4ps5
|
| 7898 |
+
4dvi
|
| 7899 |
+
3uvm
|
| 7900 |
+
2xix
|
| 7901 |
+
4c6z
|
| 7902 |
+
5alw
|
| 7903 |
+
3iaw
|
| 7904 |
+
2xnp
|
| 7905 |
+
6ccu
|
| 7906 |
+
6dvo
|
| 7907 |
+
1cgl
|
| 7908 |
+
4xv3
|
| 7909 |
+
5dya
|
| 7910 |
+
3c79
|
| 7911 |
+
2e9a
|
| 7912 |
+
6dil
|
| 7913 |
+
4caf
|
| 7914 |
+
5fov
|
| 7915 |
+
5n3w
|
| 7916 |
+
5ttv
|
| 7917 |
+
6f3b
|
| 7918 |
+
5cas
|
| 7919 |
+
3dz2
|
| 7920 |
+
1xhy
|
| 7921 |
+
5th4
|
| 7922 |
+
2wxn
|
| 7923 |
+
4q3t
|
| 7924 |
+
4yff
|
| 7925 |
+
3bti
|
| 7926 |
+
5vll
|
| 7927 |
+
4hy4
|
| 7928 |
+
6g3y
|
| 7929 |
+
4ykn
|
| 7930 |
+
5wg7
|
| 7931 |
+
1bwb
|
| 7932 |
+
3fnu
|
| 7933 |
+
2gvf
|
| 7934 |
+
6g84
|
| 7935 |
+
6ap6
|
| 7936 |
+
6hpg
|
| 7937 |
+
5if4
|
| 7938 |
+
5wgp
|
| 7939 |
+
3d6q
|
| 7940 |
+
2wer
|
| 7941 |
+
4cfl
|
| 7942 |
+
3uib
|
| 7943 |
+
6fgf
|
| 7944 |
+
5tx3
|
| 7945 |
+
3plu
|
| 7946 |
+
1zzl
|
| 7947 |
+
6db4
|
| 7948 |
+
5tpg
|
| 7949 |
+
3k8o
|
| 7950 |
+
2fxv
|
| 7951 |
+
3c2u
|
| 7952 |
+
1w8l
|
| 7953 |
+
1hiy
|
| 7954 |
+
1di8
|
| 7955 |
+
4cu1
|
| 7956 |
+
5gvl
|
| 7957 |
+
5nvx
|
| 7958 |
+
1bt6
|
| 7959 |
+
6d9x
|
| 7960 |
+
1vjb
|
| 7961 |
+
6b31
|
| 7962 |
+
2qo1
|
| 7963 |
+
3daz
|
| 7964 |
+
5c4t
|
| 7965 |
+
1v0l
|
| 7966 |
+
2qtb
|
| 7967 |
+
5ewy
|
| 7968 |
+
4i9u
|
| 7969 |
+
4afj
|
| 7970 |
+
5hzn
|
| 7971 |
+
4eqc
|
| 7972 |
+
3p8h
|
| 7973 |
+
5y53
|
| 7974 |
+
2za5
|
| 7975 |
+
1kat
|
| 7976 |
+
4kwo
|
| 7977 |
+
3k39
|
| 7978 |
+
4hys
|
| 7979 |
+
4lww
|
| 7980 |
+
3rdq
|
| 7981 |
+
4ega
|
| 7982 |
+
5fsl
|
| 7983 |
+
3ehw
|
| 7984 |
+
5gnk
|
| 7985 |
+
5n8b
|
| 7986 |
+
6h29
|
| 7987 |
+
1dy4
|
| 7988 |
+
3nu6
|
| 7989 |
+
5dxu
|
| 7990 |
+
5jf8
|
| 7991 |
+
2qd9
|
| 7992 |
+
5zaj
|
| 7993 |
+
2xkf
|
| 7994 |
+
1ywi
|
| 7995 |
+
5uiq
|
| 7996 |
+
1xr8
|
| 7997 |
+
4yvz
|
| 7998 |
+
2y82
|
| 7999 |
+
1pkx
|
| 8000 |
+
2qbw
|
| 8001 |
+
1ebg
|
| 8002 |
+
3a4p
|
| 8003 |
+
3ful
|
| 8004 |
+
5qb2
|
| 8005 |
+
6bgx
|
| 8006 |
+
1ela
|
| 8007 |
+
5j59
|
| 8008 |
+
2q63
|
| 8009 |
+
6cmj
|
| 8010 |
+
5nwk
|
| 8011 |
+
3w54
|
| 8012 |
+
4rrg
|
| 8013 |
+
2l1r
|
| 8014 |
+
2wva
|
| 8015 |
+
2vpn
|
| 8016 |
+
1qji
|
| 8017 |
+
4gts
|
| 8018 |
+
6gnw
|
| 8019 |
+
4afg
|
| 8020 |
+
5ty9
|
| 8021 |
+
4q18
|
| 8022 |
+
4ps7
|
| 8023 |
+
1o2p
|
| 8024 |
+
3qak
|
| 8025 |
+
1kak
|
| 8026 |
+
4u5o
|
| 8027 |
+
3h06
|
| 8028 |
+
2b4m
|
| 8029 |
+
4q99
|
| 8030 |
+
4dgb
|
| 8031 |
+
4hw2
|
| 8032 |
+
5uvc
|
| 8033 |
+
1yxd
|
| 8034 |
+
4cix
|
| 8035 |
+
4ehm
|
| 8036 |
+
5his
|
| 8037 |
+
2ihq
|
| 8038 |
+
4kmd
|
| 8039 |
+
5cin
|
| 8040 |
+
5v7i
|
| 8041 |
+
3itz
|
| 8042 |
+
3brn
|
| 8043 |
+
1h4w
|
| 8044 |
+
2ilp
|
| 8045 |
+
5th2
|
| 8046 |
+
1me3
|
| 8047 |
+
5fh7
|
| 8048 |
+
1rgk
|
| 8049 |
+
3iub
|
| 8050 |
+
1njt
|
| 8051 |
+
1ya4
|
| 8052 |
+
2x6i
|
| 8053 |
+
4yl0
|
| 8054 |
+
1at6
|
| 8055 |
+
3pr0
|
| 8056 |
+
4uzh
|
| 8057 |
+
3eyd
|
| 8058 |
+
4j93
|
| 8059 |
+
4dwg
|
| 8060 |
+
5ti2
|
| 8061 |
+
4pcs
|
| 8062 |
+
3qcj
|
| 8063 |
+
4qjo
|
| 8064 |
+
4qer
|
| 8065 |
+
2kp8
|
| 8066 |
+
4o2p
|
| 8067 |
+
4pd6
|
| 8068 |
+
2op3
|
| 8069 |
+
6got
|
| 8070 |
+
2weq
|
| 8071 |
+
5l97
|
| 8072 |
+
6bed
|
| 8073 |
+
4ezt
|
| 8074 |
+
5y8w
|
| 8075 |
+
4rj7
|
| 8076 |
+
2yol
|
| 8077 |
+
5t68
|
| 8078 |
+
2ltx
|
| 8079 |
+
5g17
|
| 8080 |
+
1mxl
|
| 8081 |
+
4c7t
|
| 8082 |
+
3uvu
|
| 8083 |
+
5a3r
|
| 8084 |
+
4yuz
|
| 8085 |
+
4j0r
|
| 8086 |
+
3nf9
|
| 8087 |
+
2fix
|
| 8088 |
+
3pgu
|
| 8089 |
+
4kn1
|
| 8090 |
+
3mi3
|
| 8091 |
+
5dpw
|
| 8092 |
+
3e5a
|
| 8093 |
+
2m3o
|
| 8094 |
+
1y1z
|
| 8095 |
+
5j8i
|
| 8096 |
+
4e5d
|
| 8097 |
+
3cd8
|
| 8098 |
+
2b53
|
| 8099 |
+
4x9r
|
| 8100 |
+
3rtx
|
| 8101 |
+
2zmj
|
| 8102 |
+
1xp6
|
| 8103 |
+
6cks
|
| 8104 |
+
3fw3
|
| 8105 |
+
4znx
|
| 8106 |
+
1q1g
|
| 8107 |
+
5o5m
|
| 8108 |
+
4w4v
|
| 8109 |
+
2w1d
|
| 8110 |
+
5d1j
|
| 8111 |
+
3ipq
|
| 8112 |
+
2ydj
|
| 8113 |
+
1mxo
|
| 8114 |
+
3gv6
|
| 8115 |
+
5l9h
|
| 8116 |
+
3odk
|
| 8117 |
+
6cze
|
| 8118 |
+
6c98
|
| 8119 |
+
2izl
|
| 8120 |
+
2wmx
|
| 8121 |
+
3qa2
|
| 8122 |
+
6gnm
|
| 8123 |
+
3q6z
|
| 8124 |
+
2oqv
|
| 8125 |
+
6g9u
|
| 8126 |
+
5exn
|
| 8127 |
+
3s53
|
| 8128 |
+
5lzg
|
| 8129 |
+
3mo5
|
| 8130 |
+
4x1q
|
| 8131 |
+
3ow3
|
| 8132 |
+
4a4c
|
| 8133 |
+
6ema
|
| 8134 |
+
2h6q
|
| 8135 |
+
2fjm
|
| 8136 |
+
5uez
|
| 8137 |
+
6ep9
|
| 8138 |
+
2q80
|
| 8139 |
+
4nj3
|
| 8140 |
+
4mdr
|
| 8141 |
+
5dri
|
| 8142 |
+
1v2r
|
| 8143 |
+
2vtr
|
| 8144 |
+
1hk4
|
| 8145 |
+
5xfj
|
| 8146 |
+
4z0q
|
| 8147 |
+
4heg
|
| 8148 |
+
5h3q
|
| 8149 |
+
4l7d
|
| 8150 |
+
3nb5
|
| 8151 |
+
3oay
|
| 8152 |
+
4aj4
|
| 8153 |
+
2w9h
|
| 8154 |
+
2iws
|
| 8155 |
+
3oy1
|
| 8156 |
+
2x6d
|
| 8157 |
+
3qpo
|
| 8158 |
+
2hog
|
| 8159 |
+
3rl8
|
| 8160 |
+
5ia4
|
| 8161 |
+
1x70
|
| 8162 |
+
3q5u
|
| 8163 |
+
2i0a
|
| 8164 |
+
2pcp
|
| 8165 |
+
5m77
|
| 8166 |
+
4a50
|
| 8167 |
+
1elb
|
| 8168 |
+
2ydm
|
| 8169 |
+
1rri
|
| 8170 |
+
4k9g
|
| 8171 |
+
2ptz
|
| 8172 |
+
3uo6
|
| 8173 |
+
2qwd
|
| 8174 |
+
5fdz
|
| 8175 |
+
6fiv
|
| 8176 |
+
5ehg
|
| 8177 |
+
1hqh
|
| 8178 |
+
5anv
|
| 8179 |
+
5k5e
|
| 8180 |
+
5aer
|
| 8181 |
+
2zu3
|
| 8182 |
+
6cw4
|
| 8183 |
+
5lwn
|
| 8184 |
+
1i9o
|
| 8185 |
+
1npv
|
| 8186 |
+
4piq
|
| 8187 |
+
2w0j
|
| 8188 |
+
6c5f
|
| 8189 |
+
1w8m
|
| 8190 |
+
5gjd
|
| 8191 |
+
5yql
|
| 8192 |
+
3ns9
|
| 8193 |
+
3zn1
|
| 8194 |
+
4eg7
|
| 8195 |
+
3u4r
|
| 8196 |
+
6hu0
|
| 8197 |
+
4c37
|
| 8198 |
+
4l6q
|
| 8199 |
+
3d9k
|
| 8200 |
+
5ucj
|
| 8201 |
+
3piz
|
| 8202 |
+
3ekt
|
| 8203 |
+
3m2w
|
| 8204 |
+
1mv0
|
| 8205 |
+
6emh
|
| 8206 |
+
1ttv
|
| 8207 |
+
5u12
|
| 8208 |
+
1w1p
|
| 8209 |
+
2i4t
|
| 8210 |
+
4z68
|
| 8211 |
+
1rhq
|
| 8212 |
+
3oxc
|
| 8213 |
+
3adt
|
| 8214 |
+
3r8u
|
| 8215 |
+
1udt
|
| 8216 |
+
1o36
|
| 8217 |
+
4ou3
|
| 8218 |
+
6apz
|
| 8219 |
+
5gmp
|
| 8220 |
+
2l84
|
| 8221 |
+
3o64
|
| 8222 |
+
3uo9
|
| 8223 |
+
6f6r
|
| 8224 |
+
3hy7
|
| 8225 |
+
3cwj
|
| 8226 |
+
3ans
|
| 8227 |
+
1mdl
|
| 8228 |
+
2fsv
|
| 8229 |
+
3p79
|
| 8230 |
+
4q9z
|
| 8231 |
+
6f55
|
| 8232 |
+
1gu1
|
| 8233 |
+
5y6k
|
| 8234 |
+
3ert
|
| 8235 |
+
3ant
|
| 8236 |
+
1azm
|
| 8237 |
+
5a3h
|
| 8238 |
+
3zdv
|
| 8239 |
+
3o1d
|
| 8240 |
+
6ar4
|
| 8241 |
+
2ym4
|
| 8242 |
+
3s3v
|
| 8243 |
+
3fk1
|
| 8244 |
+
3pbb
|
| 8245 |
+
3r6c
|
| 8246 |
+
1xh8
|
| 8247 |
+
5lh4
|
| 8248 |
+
2psu
|
| 8249 |
+
5gty
|
| 8250 |
+
2qi0
|
| 8251 |
+
2nn7
|
| 8252 |
+
5vb6
|
| 8253 |
+
4b8o
|
| 8254 |
+
1zp8
|
| 8255 |
+
5uqv
|
| 8256 |
+
4pci
|
| 8257 |
+
5f5z
|
| 8258 |
+
4rj4
|
| 8259 |
+
6dh4
|
| 8260 |
+
4ea2
|
| 8261 |
+
4y6m
|
| 8262 |
+
5d3p
|
| 8263 |
+
1gsf
|
| 8264 |
+
4l7l
|
| 8265 |
+
1tkt
|
| 8266 |
+
3a29
|
| 8267 |
+
2qci
|
| 8268 |
+
5ljj
|
| 8269 |
+
4gpk
|
| 8270 |
+
3qkv
|
| 8271 |
+
3ern
|
| 8272 |
+
3zhf
|
| 8273 |
+
1f4y
|
| 8274 |
+
6bgg
|
| 8275 |
+
5uz0
|
| 8276 |
+
2znt
|
| 8277 |
+
5du4
|
| 8278 |
+
1utn
|
| 8279 |
+
5ayy
|
| 8280 |
+
1lzo
|
| 8281 |
+
1gym
|
| 8282 |
+
5k8s
|
| 8283 |
+
3kb3
|
| 8284 |
+
3ijy
|
| 8285 |
+
5wio
|
| 8286 |
+
1yei
|
| 8287 |
+
4hlh
|
| 8288 |
+
2eg8
|
| 8289 |
+
5tkd
|
| 8290 |
+
1kpm
|
| 8291 |
+
3uyt
|
| 8292 |
+
3o9l
|
| 8293 |
+
3tcy
|
| 8294 |
+
4qxr
|
| 8295 |
+
5ejv
|
| 8296 |
+
6ekn
|
| 8297 |
+
2fky
|
| 8298 |
+
5vp0
|
| 8299 |
+
4rxa
|
| 8300 |
+
2xpa
|
| 8301 |
+
1kav
|
| 8302 |
+
5vlh
|
| 8303 |
+
4lko
|
| 8304 |
+
4m5r
|
| 8305 |
+
3fhe
|
| 8306 |
+
5sz0
|
| 8307 |
+
3u78
|
| 8308 |
+
3zt3
|
| 8309 |
+
4hkn
|
| 8310 |
+
4h58
|
| 8311 |
+
4f1l
|
| 8312 |
+
2jkh
|
| 8313 |
+
5w13
|
| 8314 |
+
3r88
|
| 8315 |
+
1yvz
|
| 8316 |
+
4qmq
|
| 8317 |
+
5aag
|
| 8318 |
+
1b6m
|
| 8319 |
+
4i47
|
| 8320 |
+
4ca6
|
| 8321 |
+
4ysl
|
| 8322 |
+
4gbz
|
| 8323 |
+
1aj7
|
| 8324 |
+
1wcq
|
| 8325 |
+
3m96
|
| 8326 |
+
6awo
|
| 8327 |
+
5t92
|
| 8328 |
+
6gjl
|
| 8329 |
+
4yxo
|
| 8330 |
+
5l2m
|
| 8331 |
+
4wov
|
| 8332 |
+
3l2y
|
| 8333 |
+
5v5y
|
| 8334 |
+
3a3y
|
| 8335 |
+
2gmx
|
| 8336 |
+
2fo4
|
| 8337 |
+
2h5j
|
| 8338 |
+
2r05
|
| 8339 |
+
1v11
|
| 8340 |
+
5npf
|
| 8341 |
+
1gt5
|
| 8342 |
+
4jfd
|
| 8343 |
+
4l53
|
| 8344 |
+
2qhr
|
| 8345 |
+
2sfp
|
| 8346 |
+
3rx5
|
| 8347 |
+
4e4l
|
| 8348 |
+
4mao
|
| 8349 |
+
3jzj
|
| 8350 |
+
5h9q
|
| 8351 |
+
5one
|
| 8352 |
+
3lp2
|
| 8353 |
+
2i4j
|
| 8354 |
+
5eta
|
| 8355 |
+
4yth
|
| 8356 |
+
3gsg
|
| 8357 |
+
2of4
|
| 8358 |
+
5inh
|
| 8359 |
+
3ed0
|
| 8360 |
+
5llp
|
| 8361 |
+
6esj
|
| 8362 |
+
5c8m
|
| 8363 |
+
3dkf
|
| 8364 |
+
4z6h
|
| 8365 |
+
4x6o
|
| 8366 |
+
2j47
|
| 8367 |
+
1usk
|
| 8368 |
+
3p55
|
| 8369 |
+
5w8j
|
| 8370 |
+
4x1p
|
| 8371 |
+
1xb7
|
| 8372 |
+
1lqe
|
| 8373 |
+
4apr
|
| 8374 |
+
5khm
|
| 8375 |
+
2ga2
|
| 8376 |
+
2ly0
|
| 8377 |
+
4zro
|
| 8378 |
+
4e26
|
| 8379 |
+
4uvw
|
| 8380 |
+
1f5l
|
| 8381 |
+
4ipn
|
| 8382 |
+
4hwt
|
| 8383 |
+
4g90
|
| 8384 |
+
1kds
|
| 8385 |
+
4bck
|
| 8386 |
+
5k6s
|
| 8387 |
+
5eqp
|
| 8388 |
+
1vwn
|
| 8389 |
+
4uye
|
| 8390 |
+
4gtq
|
| 8391 |
+
6fe1
|
| 8392 |
+
6gjj
|
| 8393 |
+
2ynn
|
| 8394 |
+
3hb8
|
| 8395 |
+
2zdm
|
| 8396 |
+
2vu3
|
| 8397 |
+
5alz
|
| 8398 |
+
3eyg
|
| 8399 |
+
4os2
|
| 8400 |
+
1j4q
|
| 8401 |
+
4o36
|
| 8402 |
+
6faf
|
| 8403 |
+
1mx1
|
| 8404 |
+
3lfn
|
| 8405 |
+
2i1r
|
| 8406 |
+
3hzy
|
| 8407 |
+
1a4k
|
| 8408 |
+
4zl4
|
| 8409 |
+
1dub
|
| 8410 |
+
3i90
|
| 8411 |
+
3qgy
|
| 8412 |
+
6gfy
|
| 8413 |
+
1o2u
|
| 8414 |
+
4xyn
|
| 8415 |
+
3rk7
|
| 8416 |
+
6atv
|
| 8417 |
+
2wu7
|
| 8418 |
+
4tnw
|
| 8419 |
+
2uwo
|
| 8420 |
+
3my1
|
| 8421 |
+
5w0l
|
| 8422 |
+
4dli
|
| 8423 |
+
4tw8
|
| 8424 |
+
6b8u
|
| 8425 |
+
5jg1
|
| 8426 |
+
2azm
|
| 8427 |
+
4qw3
|
| 8428 |
+
5ntk
|
| 8429 |
+
5jq8
|
| 8430 |
+
3gcq
|
| 8431 |
+
2zcs
|
| 8432 |
+
2bcd
|
| 8433 |
+
1ph0
|
| 8434 |
+
3nu5
|
| 8435 |
+
1t08
|
| 8436 |
+
4m7b
|
| 8437 |
+
4ge5
|
| 8438 |
+
4w9s
|
| 8439 |
+
5fat
|
| 8440 |
+
5l2w
|
| 8441 |
+
5nx2
|
| 8442 |
+
2gst
|
| 8443 |
+
5myn
|
| 8444 |
+
2xoi
|
| 8445 |
+
4r7m
|
| 8446 |
+
1kf6
|
| 8447 |
+
2on3
|
| 8448 |
+
5wa9
|
| 8449 |
+
5tw3
|
| 8450 |
+
1q4k
|
| 8451 |
+
6fu4
|
| 8452 |
+
5t97
|
| 8453 |
+
4ezy
|
| 8454 |
+
4pyx
|
| 8455 |
+
1fao
|
| 8456 |
+
1n46
|
| 8457 |
+
2ooh
|
| 8458 |
+
1q72
|
| 8459 |
+
3mag
|
| 8460 |
+
6euv
|
| 8461 |
+
4g5f
|
| 8462 |
+
4dgm
|
| 8463 |
+
3ken
|
| 8464 |
+
4d2r
|
| 8465 |
+
5fnq
|
| 8466 |
+
4z7f
|
| 8467 |
+
2o3z
|
| 8468 |
+
4xgz
|
| 8469 |
+
4i2z
|
| 8470 |
+
6g15
|
| 8471 |
+
5ypp
|
| 8472 |
+
2x09
|
| 8473 |
+
5lt6
|
| 8474 |
+
4tln
|
| 8475 |
+
4mrw
|
| 8476 |
+
3c10
|
| 8477 |
+
3sdi
|
| 8478 |
+
4tw6
|
| 8479 |
+
3pa4
|
| 8480 |
+
3q4k
|
| 8481 |
+
5l3a
|
| 8482 |
+
3b9g
|
| 8483 |
+
4ohp
|
| 8484 |
+
4rqi
|
| 8485 |
+
5vfj
|
| 8486 |
+
4x7k
|
| 8487 |
+
2q8s
|
| 8488 |
+
5oh4
|
| 8489 |
+
6ffs
|
| 8490 |
+
4loq
|
| 8491 |
+
4prb
|
| 8492 |
+
5vwk
|
| 8493 |
+
2yhw
|
| 8494 |
+
5eob
|
| 8495 |
+
5dgj
|
| 8496 |
+
2i4d
|
| 8497 |
+
5wlo
|
| 8498 |
+
5aln
|
| 8499 |
+
3p5l
|
| 8500 |
+
4q1y
|
| 8501 |
+
6c4u
|
| 8502 |
+
4l0s
|
| 8503 |
+
1gi7
|
| 8504 |
+
2jkt
|
| 8505 |
+
4yv1
|
| 8506 |
+
3ctj
|
| 8507 |
+
4wmy
|
| 8508 |
+
5c84
|
| 8509 |
+
3ie3
|
| 8510 |
+
5mnn
|
| 8511 |
+
3l8v
|
| 8512 |
+
1o2n
|
| 8513 |
+
4nmv
|
| 8514 |
+
5ok6
|
| 8515 |
+
4yab
|
| 8516 |
+
2vnf
|
| 8517 |
+
3r0w
|
| 8518 |
+
2uyq
|
| 8519 |
+
5j19
|
| 8520 |
+
3nth
|
| 8521 |
+
5ni0
|
| 8522 |
+
4uda
|
| 8523 |
+
4tpk
|
| 8524 |
+
1vyq
|
| 8525 |
+
4txs
|
| 8526 |
+
2wgs
|
| 8527 |
+
5gv2
|
| 8528 |
+
1b3g
|
| 8529 |
+
3kad
|
| 8530 |
+
6hgz
|
| 8531 |
+
3dng
|
| 8532 |
+
4u7q
|
| 8533 |
+
2c5x
|
| 8534 |
+
3m37
|
| 8535 |
+
2aqu
|
| 8536 |
+
5ho6
|
| 8537 |
+
5j5t
|
| 8538 |
+
1ke6
|
| 8539 |
+
5g5f
|
| 8540 |
+
3wff
|
| 8541 |
+
3m36
|
| 8542 |
+
4bb4
|
| 8543 |
+
3ai8
|
| 8544 |
+
2p16
|
| 8545 |
+
5u14
|
| 8546 |
+
4xe1
|
| 8547 |
+
4av5
|
| 8548 |
+
3bgc
|
| 8549 |
+
5i4v
|
| 8550 |
+
3g0d
|
| 8551 |
+
2aei
|
| 8552 |
+
4hup
|
| 8553 |
+
4yv9
|
| 8554 |
+
4uwf
|
| 8555 |
+
2ovy
|
| 8556 |
+
2oo8
|
| 8557 |
+
4dlj
|
| 8558 |
+
5qan
|
| 8559 |
+
4nwc
|
| 8560 |
+
4msc
|
| 8561 |
+
4iti
|
| 8562 |
+
6i8l
|
| 8563 |
+
2yof
|
| 8564 |
+
3qti
|
| 8565 |
+
5nf6
|
| 8566 |
+
5n7x
|
| 8567 |
+
3t0t
|
| 8568 |
+
3oyw
|
| 8569 |
+
4ab9
|
| 8570 |
+
1mfg
|
| 8571 |
+
3aw0
|
| 8572 |
+
3oyq
|
| 8573 |
+
2x2l
|
| 8574 |
+
2lkk
|
| 8575 |
+
3s3i
|
| 8576 |
+
6c28
|
| 8577 |
+
2ofv
|
| 8578 |
+
1b58
|
| 8579 |
+
4cpt
|
| 8580 |
+
4em7
|
| 8581 |
+
1t5a
|
| 8582 |
+
4xyf
|
| 8583 |
+
4j73
|
| 8584 |
+
2isw
|
| 8585 |
+
4xh6
|
| 8586 |
+
1j15
|
| 8587 |
+
3b7j
|
| 8588 |
+
3eqs
|
| 8589 |
+
3tkz
|
| 8590 |
+
3qw5
|
| 8591 |
+
6df4
|
| 8592 |
+
2jjb
|
| 8593 |
+
5auy
|
| 8594 |
+
2v5x
|
| 8595 |
+
2rcu
|
| 8596 |
+
5mqy
|
| 8597 |
+
2aoe
|
| 8598 |
+
1kl5
|
| 8599 |
+
4zsg
|
| 8600 |
+
3e0p
|
| 8601 |
+
3su6
|
| 8602 |
+
5tz3
|
| 8603 |
+
4hco
|
| 8604 |
+
3i6c
|
| 8605 |
+
2h4n
|
| 8606 |
+
2ow1
|
| 8607 |
+
4nue
|
| 8608 |
+
4hod
|
| 8609 |
+
3oik
|
| 8610 |
+
4jsz
|
| 8611 |
+
2viv
|
| 8612 |
+
4gih
|
| 8613 |
+
4wk1
|
| 8614 |
+
1b9s
|
| 8615 |
+
5i40
|
| 8616 |
+
4zb6
|
| 8617 |
+
5fns
|
| 8618 |
+
5hg9
|
| 8619 |
+
1h37
|
| 8620 |
+
1uj0
|
| 8621 |
+
3e9i
|
| 8622 |
+
4igt
|
| 8623 |
+
3i5z
|
| 8624 |
+
1v2v
|
| 8625 |
+
3lpt
|
| 8626 |
+
3k4d
|
| 8627 |
+
3nhi
|
| 8628 |
+
1cbr
|
| 8629 |
+
5uis
|
| 8630 |
+
5nk3
|
| 8631 |
+
3okv
|
| 8632 |
+
3krr
|
| 8633 |
+
3zyf
|
| 8634 |
+
3ivg
|
| 8635 |
+
5d3n
|
| 8636 |
+
4rs0
|
| 8637 |
+
5oxn
|
| 8638 |
+
1owd
|
| 8639 |
+
1w7h
|
| 8640 |
+
5o4s
|
| 8641 |
+
3hrf
|
| 8642 |
+
2r3h
|
| 8643 |
+
1yp9
|
| 8644 |
+
5oyd
|
| 8645 |
+
4uvb
|
| 8646 |
+
1b9j
|
| 8647 |
+
2bgd
|
| 8648 |
+
6giu
|
| 8649 |
+
5eek
|
| 8650 |
+
3tnh
|
| 8651 |
+
3bxs
|
| 8652 |
+
2ay1
|
| 8653 |
+
5cap
|
| 8654 |
+
3mkf
|
| 8655 |
+
5a3n
|
| 8656 |
+
4zx6
|
| 8657 |
+
6b5r
|
| 8658 |
+
1fbm
|
| 8659 |
+
5wou
|
| 8660 |
+
3g76
|
| 8661 |
+
2rkf
|
| 8662 |
+
5lav
|
| 8663 |
+
2rkn
|
| 8664 |
+
3jzo
|
| 8665 |
+
3kr5
|
| 8666 |
+
2vo4
|
| 8667 |
+
4lzr
|
| 8668 |
+
4wbg
|
| 8669 |
+
1vyz
|
| 8670 |
+
3ebi
|
| 8671 |
+
2qbx
|
| 8672 |
+
3gbb
|
| 8673 |
+
3vnt
|
| 8674 |
+
3erd
|
| 8675 |
+
1o34
|
| 8676 |
+
1lbk
|
| 8677 |
+
6czi
|
| 8678 |
+
4d1c
|
| 8679 |
+
1k1p
|
| 8680 |
+
2bmv
|
| 8681 |
+
1jlr
|
| 8682 |
+
5hx6
|
| 8683 |
+
2igw
|
| 8684 |
+
4o0r
|
| 8685 |
+
2isv
|
| 8686 |
+
3kr8
|
| 8687 |
+
4k3m
|
| 8688 |
+
5xiw
|
| 8689 |
+
2z5s
|
| 8690 |
+
5a3s
|
| 8691 |
+
4u03
|
| 8692 |
+
3w5e
|
| 8693 |
+
1zz3
|
| 8694 |
+
4rx9
|
| 8695 |
+
4ty7
|
| 8696 |
+
6g4z
|
| 8697 |
+
4j3e
|
| 8698 |
+
1s3k
|
| 8699 |
+
6ajh
|
| 8700 |
+
2wei
|
| 8701 |
+
5v5o
|
| 8702 |
+
4nbl
|
| 8703 |
+
3qqk
|
| 8704 |
+
4rxd
|
| 8705 |
+
1cp6
|
| 8706 |
+
1r5h
|
| 8707 |
+
4nnw
|
| 8708 |
+
16pk
|
| 8709 |
+
4cga
|
| 8710 |
+
3kb7
|
| 8711 |
+
4phw
|
| 8712 |
+
3zpq
|
| 8713 |
+
4b3c
|
| 8714 |
+
1wv7
|
| 8715 |
+
5hja
|
| 8716 |
+
3jzk
|
| 8717 |
+
1hsh
|
| 8718 |
+
6aro
|
| 8719 |
+
3vyf
|
| 8720 |
+
2v7d
|
| 8721 |
+
5eeq
|
| 8722 |
+
5mos
|
| 8723 |
+
1hyv
|
| 8724 |
+
3ska
|
| 8725 |
+
4bg1
|
| 8726 |
+
4lgg
|
| 8727 |
+
4anw
|
| 8728 |
+
6gr7
|
| 8729 |
+
2x7s
|
| 8730 |
+
1p05
|
| 8731 |
+
4zbi
|
| 8732 |
+
5c6v
|
| 8733 |
+
3qzv
|
| 8734 |
+
5etq
|
| 8735 |
+
1nhw
|
| 8736 |
+
2xx4
|
| 8737 |
+
1uys
|
| 8738 |
+
3zsq
|
| 8739 |
+
4h75
|
| 8740 |
+
3veh
|
| 8741 |
+
4d0w
|
| 8742 |
+
4pml
|
| 8743 |
+
3gyn
|
| 8744 |
+
5usq
|
| 8745 |
+
4e3b
|
| 8746 |
+
2hwi
|
| 8747 |
+
3kl8
|
| 8748 |
+
2vr4
|
| 8749 |
+
3o0e
|
| 8750 |
+
3dab
|
| 8751 |
+
4ynb
|
| 8752 |
+
1kr3
|
| 8753 |
+
4w9d
|
| 8754 |
+
4msl
|
| 8755 |
+
5k8v
|
| 8756 |
+
5zaf
|
| 8757 |
+
6cve
|
| 8758 |
+
4h85
|
| 8759 |
+
3d5m
|
| 8760 |
+
5uox
|
| 8761 |
+
4fcm
|
| 8762 |
+
2zz1
|
| 8763 |
+
4i1r
|
| 8764 |
+
5hkm
|
| 8765 |
+
5he2
|
| 8766 |
+
3n3g
|
| 8767 |
+
3g5v
|
| 8768 |
+
3k3j
|
| 8769 |
+
1utr
|
| 8770 |
+
3ixg
|
| 8771 |
+
4u44
|
| 8772 |
+
4ahs
|
| 8773 |
+
1o2y
|
| 8774 |
+
3fcf
|
| 8775 |
+
3bgm
|
| 8776 |
+
4yhq
|
| 8777 |
+
2nxd
|
| 8778 |
+
4k19
|
| 8779 |
+
5mkx
|
| 8780 |
+
3v3m
|
| 8781 |
+
4ph4
|
| 8782 |
+
1nja
|
| 8783 |
+
5i23
|
| 8784 |
+
5n17
|
| 8785 |
+
4bt5
|
| 8786 |
+
2ojg
|
| 8787 |
+
5x72
|
| 8788 |
+
6eq7
|
| 8789 |
+
3ehx
|
| 8790 |
+
1a5v
|
| 8791 |
+
5c4u
|
| 8792 |
+
3qcs
|
| 8793 |
+
5cy9
|
| 8794 |
+
3bgp
|
| 8795 |
+
3nf7
|
| 8796 |
+
4dmn
|
| 8797 |
+
2dxs
|
| 8798 |
+
4msk
|
| 8799 |
+
4gto
|
| 8800 |
+
2wmu
|
| 8801 |
+
4hws
|
| 8802 |
+
5c2a
|
| 8803 |
+
3cj5
|
| 8804 |
+
3lco
|
| 8805 |
+
4x3u
|
| 8806 |
+
5c7a
|
| 8807 |
+
1pa9
|
| 8808 |
+
3oe0
|
| 8809 |
+
3m67
|
| 8810 |
+
1j7z
|
| 8811 |
+
4kql
|
| 8812 |
+
5l8y
|
| 8813 |
+
4prj
|
| 8814 |
+
5cks
|
| 8815 |
+
4cc5
|
| 8816 |
+
3jzc
|
| 8817 |
+
2qzx
|
| 8818 |
+
4u73
|
| 8819 |
+
2an5
|
| 8820 |
+
1dzj
|
| 8821 |
+
6b97
|
| 8822 |
+
5y6d
|
| 8823 |
+
1qvu
|
| 8824 |
+
4ymx
|
| 8825 |
+
1yej
|
| 8826 |
+
6f3d
|
| 8827 |
+
5tg7
|
| 8828 |
+
2w1i
|
| 8829 |
+
1vyw
|
| 8830 |
+
3sw9
|
| 8831 |
+
4qz3
|
| 8832 |
+
8a3h
|
| 8833 |
+
3o3j
|
| 8834 |
+
5gn9
|
| 8835 |
+
4b7j
|
| 8836 |
+
2i4u
|
| 8837 |
+
3m8q
|
| 8838 |
+
1swi
|
| 8839 |
+
3pz1
|
| 8840 |
+
3q4c
|
| 8841 |
+
2jo9
|
| 8842 |
+
4bzo
|
| 8843 |
+
2x7c
|
| 8844 |
+
5duc
|
| 8845 |
+
3pty
|
| 8846 |
+
5f3i
|
| 8847 |
+
4q9o
|
| 8848 |
+
3e1r
|
| 8849 |
+
4j78
|
| 8850 |
+
4ucv
|
| 8851 |
+
6h38
|
| 8852 |
+
3wtk
|
| 8853 |
+
4lno
|
| 8854 |
+
2h65
|
| 8855 |
+
3odi
|
| 8856 |
+
3s4q
|
| 8857 |
+
3lp0
|
| 8858 |
+
4ajo
|
| 8859 |
+
5maj
|
| 8860 |
+
6hpw
|
| 8861 |
+
5fsb
|
| 8862 |
+
4f6u
|
| 8863 |
+
5h0g
|
| 8864 |
+
2zdk
|
| 8865 |
+
6hoy
|
| 8866 |
+
6bib
|
| 8867 |
+
3rpv
|
| 8868 |
+
1kel
|
| 8869 |
+
4v24
|
| 8870 |
+
4j4v
|
| 8871 |
+
3kai
|
| 8872 |
+
2v59
|
| 8873 |
+
5cs5
|
| 8874 |
+
5jqb
|
| 8875 |
+
6as8
|
| 8876 |
+
5dw2
|
| 8877 |
+
1mq1
|
| 8878 |
+
4gw1
|
| 8879 |
+
4gj9
|
| 8880 |
+
6e2n
|
| 8881 |
+
4qlu
|
| 8882 |
+
5jog
|
| 8883 |
+
4iu1
|
| 8884 |
+
3tyq
|
| 8885 |
+
6bg5
|
| 8886 |
+
3r2b
|
| 8887 |
+
5alr
|
| 8888 |
+
5c4s
|
| 8889 |
+
3pa8
|
| 8890 |
+
2qq7
|
| 8891 |
+
1qwu
|
| 8892 |
+
4qn9
|
| 8893 |
+
1yye
|
| 8894 |
+
1oir
|
| 8895 |
+
6ert
|
| 8896 |
+
5jn8
|
| 8897 |
+
3d7z
|
| 8898 |
+
6dq8
|
| 8899 |
+
4lsj
|
| 8900 |
+
2wxm
|
| 8901 |
+
3pj2
|
| 8902 |
+
1v1m
|
| 8903 |
+
3nwb
|
| 8904 |
+
4hwo
|
| 8905 |
+
3afk
|
| 8906 |
+
6acb
|
| 8907 |
+
6g85
|
| 8908 |
+
1kna
|
| 8909 |
+
4pvx
|
| 8910 |
+
5hex
|
| 8911 |
+
3jqa
|
| 8912 |
+
5xms
|
| 8913 |
+
4an2
|
| 8914 |
+
3rdh
|
| 8915 |
+
2r3y
|
| 8916 |
+
6few
|
| 8917 |
+
4cd4
|
| 8918 |
+
6anl
|
| 8919 |
+
1yyy
|
| 8920 |
+
3r16
|
| 8921 |
+
4al4
|
| 8922 |
+
2ovv
|
| 8923 |
+
4ya8
|
| 8924 |
+
3bgq
|
| 8925 |
+
1qpe
|
| 8926 |
+
5lpr
|
| 8927 |
+
5fap
|
| 8928 |
+
5cei
|
| 8929 |
+
3v5q
|
| 8930 |
+
4jx9
|
| 8931 |
+
4xqa
|
| 8932 |
+
3ghe
|
| 8933 |
+
3fci
|
| 8934 |
+
1zfp
|
| 8935 |
+
2l12
|
| 8936 |
+
4lil
|
| 8937 |
+
6eq2
|
| 8938 |
+
4l0i
|
| 8939 |
+
4zsl
|
| 8940 |
+
5yu9
|
| 8941 |
+
1p04
|
| 8942 |
+
1rnt
|
| 8943 |
+
2chm
|
| 8944 |
+
1nhx
|
| 8945 |
+
3ckb
|
| 8946 |
+
2vot
|
| 8947 |
+
5n1p
|
| 8948 |
+
5ztn
|
| 8949 |
+
4j82
|
| 8950 |
+
3hxb
|
| 8951 |
+
3qvu
|
| 8952 |
+
1hk5
|
| 8953 |
+
3atl
|
| 8954 |
+
3jzi
|
| 8955 |
+
4ad3
|
| 8956 |
+
4b6r
|
| 8957 |
+
3g8o
|
| 8958 |
+
4kc2
|
| 8959 |
+
7upj
|
| 8960 |
+
6do5
|
| 8961 |
+
5wfj
|
| 8962 |
+
2j4a
|
| 8963 |
+
5oy3
|
| 8964 |
+
4o55
|
| 8965 |
+
2qi5
|
| 8966 |
+
4jfs
|
| 8967 |
+
2y68
|
| 8968 |
+
3fv2
|
| 8969 |
+
5v3r
|
| 8970 |
+
3ioc
|
| 8971 |
+
5jyp
|
| 8972 |
+
4mm8
|
| 8973 |
+
4jjm
|
| 8974 |
+
5ccl
|
| 8975 |
+
5umy
|
| 8976 |
+
4n7e
|
| 8977 |
+
4f64
|
| 8978 |
+
3p9h
|
| 8979 |
+
4hz5
|
| 8980 |
+
4kin
|
| 8981 |
+
6cqf
|
| 8982 |
+
3feg
|
| 8983 |
+
3aaq
|
| 8984 |
+
3sl1
|
| 8985 |
+
4oee
|
| 8986 |
+
4i7d
|
| 8987 |
+
5ll5
|
| 8988 |
+
3s9y
|
| 8989 |
+
2p3b
|
| 8990 |
+
2x8i
|
| 8991 |
+
4ui3
|
| 8992 |
+
5ji6
|
| 8993 |
+
4lts
|
| 8994 |
+
5nkb
|
| 8995 |
+
1km3
|
| 8996 |
+
2qnx
|
| 8997 |
+
3myg
|
| 8998 |
+
4nms
|
| 8999 |
+
5ar7
|
| 9000 |
+
2h3e
|
| 9001 |
+
4lp0
|
| 9002 |
+
2ay9
|
| 9003 |
+
5trs
|
| 9004 |
+
6cdg
|
| 9005 |
+
6ful
|
| 9006 |
+
4x8n
|
| 9007 |
+
4w4x
|
| 9008 |
+
2vte
|
| 9009 |
+
5m29
|
| 9010 |
+
4ocx
|
| 9011 |
+
3sov
|
| 9012 |
+
4fxp
|
| 9013 |
+
3ex2
|
| 9014 |
+
4wpn
|
| 9015 |
+
5nwh
|
| 9016 |
+
1vik
|
| 9017 |
+
4uj9
|
| 9018 |
+
5tg4
|
| 9019 |
+
5hrx
|
| 9020 |
+
3wz6
|
| 9021 |
+
1mm7
|
| 9022 |
+
5v37
|
| 9023 |
+
5eol
|
| 9024 |
+
5n21
|
| 9025 |
+
3vhe
|
| 9026 |
+
3cj3
|
| 9027 |
+
4w9q
|
| 9028 |
+
4z93
|
| 9029 |
+
3jqb
|
| 9030 |
+
5f3c
|
| 9031 |
+
5ll9
|
| 9032 |
+
4zts
|
| 9033 |
+
5aei
|
| 9034 |
+
1o9d
|
| 9035 |
+
2iw9
|
| 9036 |
+
5b5g
|
| 9037 |
+
2qta
|
| 9038 |
+
1g54
|
| 9039 |
+
1ez9
|
| 9040 |
+
4c6x
|
| 9041 |
+
2g24
|
| 9042 |
+
1m9n
|
| 9043 |
+
5f60
|
| 9044 |
+
1rt9
|
| 9045 |
+
5jq5
|
| 9046 |
+
3ha6
|
| 9047 |
+
2yj9
|
| 9048 |
+
4nra
|
| 9049 |
+
3el8
|
| 9050 |
+
6afi
|
| 9051 |
+
4kni
|
| 9052 |
+
5v79
|
| 9053 |
+
4fxf
|
| 9054 |
+
4jog
|
| 9055 |
+
4x3s
|
| 9056 |
+
1dif
|
| 9057 |
+
4aa7
|
| 9058 |
+
4d1b
|
| 9059 |
+
5av0
|
| 9060 |
+
2clm
|
| 9061 |
+
4l10
|
| 9062 |
+
2ltv
|
| 9063 |
+
3rwh
|
| 9064 |
+
5yfs
|
| 9065 |
+
5xkm
|
| 9066 |
+
5mjb
|
| 9067 |
+
2i03
|
| 9068 |
+
2v0z
|
| 9069 |
+
3i3b
|
| 9070 |
+
1h1b
|
| 9071 |
+
6esa
|
| 9072 |
+
4ai8
|
| 9073 |
+
5ak3
|
| 9074 |
+
3wto
|
| 9075 |
+
1jjt
|
| 9076 |
+
1jdj
|
| 9077 |
+
2chw
|
| 9078 |
+
3ll8
|
| 9079 |
+
5j79
|
| 9080 |
+
5adq
|
| 9081 |
+
4r74
|
| 9082 |
+
5n53
|
| 9083 |
+
3ljg
|
| 9084 |
+
2lsr
|
| 9085 |
+
3rk9
|
| 9086 |
+
1add
|
| 9087 |
+
5ja0
|
| 9088 |
+
5x02
|
| 9089 |
+
5d12
|
| 9090 |
+
1o6q
|
| 9091 |
+
5f20
|
| 9092 |
+
4lgh
|
| 9093 |
+
5etr
|
| 9094 |
+
3mj5
|
| 9095 |
+
4km2
|
| 9096 |
+
1hmr
|
| 9097 |
+
3are
|
| 9098 |
+
5u9i
|
| 9099 |
+
1lv8
|
| 9100 |
+
5n99
|
| 9101 |
+
4gly
|
| 9102 |
+
1sh9
|
| 9103 |
+
4uns
|
| 9104 |
+
4cpu
|
| 9105 |
+
2rkd
|
| 9106 |
+
4psq
|
| 9107 |
+
3az9
|
| 9108 |
+
5wcm
|
| 9109 |
+
5vsc
|
| 9110 |
+
4nl1
|
| 9111 |
+
4um9
|
| 9112 |
+
4wa9
|
| 9113 |
+
2obj
|
| 9114 |
+
4zw3
|
| 9115 |
+
6htz
|
| 9116 |
+
3n2p
|
| 9117 |
+
2vwy
|
| 9118 |
+
4ge2
|
| 9119 |
+
5d7e
|
| 9120 |
+
4wx4
|
| 9121 |
+
3sgv
|
| 9122 |
+
5w12
|
| 9123 |
+
4gmc
|
| 9124 |
+
4dij
|
| 9125 |
+
1r5g
|
| 9126 |
+
4r3w
|
| 9127 |
+
5hn8
|
| 9128 |
+
4fk6
|
| 9129 |
+
5ggl
|
| 9130 |
+
2ks9
|
| 9131 |
+
1tq4
|
| 9132 |
+
1my8
|
| 9133 |
+
6mv3
|
| 9134 |
+
3kqs
|
| 9135 |
+
3gz9
|
| 9136 |
+
1uv5
|
| 9137 |
+
2bok
|
| 9138 |
+
1b42
|
| 9139 |
+
5mlw
|
| 9140 |
+
4qr5
|
| 9141 |
+
1r6n
|
| 9142 |
+
4pio
|
| 9143 |
+
3mp1
|
| 9144 |
+
4pz8
|
| 9145 |
+
5yfz
|
| 9146 |
+
5zwh
|
| 9147 |
+
4kb9
|
| 9148 |
+
2wmw
|
| 9149 |
+
1nax
|
| 9150 |
+
1o4n
|
| 9151 |
+
2znu
|
| 9152 |
+
5gvp
|
| 9153 |
+
3ma3
|
| 9154 |
+
3k15
|
| 9155 |
+
1gt1
|
| 9156 |
+
2jew
|
| 9157 |
+
5qae
|
| 9158 |
+
1v48
|
| 9159 |
+
1m5d
|
| 9160 |
+
2v2c
|
| 9161 |
+
2ql5
|
| 9162 |
+
3h0y
|
| 9163 |
+
3wpn
|
| 9164 |
+
3pdc
|
| 9165 |
+
2qa8
|
| 9166 |
+
1rs4
|
| 9167 |
+
5tgy
|
| 9168 |
+
2b7d
|
| 9169 |
+
2w8j
|
| 9170 |
+
3tdz
|
| 9171 |
+
2wxj
|
| 9172 |
+
5g2b
|
| 9173 |
+
3gwv
|
| 9174 |
+
1kne
|
| 9175 |
+
3kqe
|
| 9176 |
+
5khg
|
| 9177 |
+
4uiu
|
| 9178 |
+
4kyh
|
| 9179 |
+
1a4m
|
| 9180 |
+
4byi
|
| 9181 |
+
5ngz
|
| 9182 |
+
6fam
|
| 9183 |
+
5lgq
|
| 9184 |
+
2hb3
|
| 9185 |
+
2pnx
|
| 9186 |
+
4qmw
|
| 9187 |
+
1lzq
|
| 9188 |
+
3aid
|
| 9189 |
+
3psb
|
| 9190 |
+
5w6r
|
| 9191 |
+
4c4e
|
| 9192 |
+
4jxw
|
| 9193 |
+
5ngr
|
| 9194 |
+
1utz
|
| 9195 |
+
3r7o
|
| 9196 |
+
3umq
|
| 9197 |
+
1abt
|
| 9198 |
+
1tkc
|
| 9199 |
+
3mbp
|
| 9200 |
+
3i9g
|
| 9201 |
+
1rwq
|
| 9202 |
+
4iz0
|
| 9203 |
+
6dh2
|
| 9204 |
+
4rx7
|
| 9205 |
+
2cn8
|
| 9206 |
+
3bar
|
| 9207 |
+
1cny
|
| 9208 |
+
2bys
|
| 9209 |
+
4y6p
|
| 9210 |
+
4y6s
|
| 9211 |
+
2uxx
|
| 9212 |
+
5cs2
|
| 9213 |
+
4mgv
|
| 9214 |
+
2wxf
|
| 9215 |
+
6fyk
|
| 9216 |
+
1x8s
|
| 9217 |
+
5b25
|
| 9218 |
+
4tk5
|
| 9219 |
+
5nhp
|
| 9220 |
+
4lov
|
| 9221 |
+
1kui
|
| 9222 |
+
3art
|
| 9223 |
+
4d2v
|
| 9224 |
+
1bnv
|
| 9225 |
+
4kov
|
| 9226 |
+
6cyc
|
| 9227 |
+
2j4z
|
| 9228 |
+
2p3c
|
| 9229 |
+
1f0s
|
| 9230 |
+
3dy6
|
| 9231 |
+
3mww
|
| 9232 |
+
1hvk
|
| 9233 |
+
1x76
|
| 9234 |
+
2aez
|
| 9235 |
+
3k48
|
| 9236 |
+
2krd
|
| 9237 |
+
4bbg
|
| 9238 |
+
5otc
|
| 9239 |
+
3kqp
|
| 9240 |
+
4esi
|
| 9241 |
+
2rku
|
| 9242 |
+
2kpl
|
| 9243 |
+
1erb
|
| 9244 |
+
2xx5
|
| 9245 |
+
2fl2
|
| 9246 |
+
4a4v
|
| 9247 |
+
1lqd
|
| 9248 |
+
1lfo
|
| 9249 |
+
3hf6
|
| 9250 |
+
3bm8
|
| 9251 |
+
1a99
|
| 9252 |
+
3zly
|
| 9253 |
+
6cwn
|
| 9254 |
+
3d8y
|
| 9255 |
+
5icz
|
| 9256 |
+
1hgi
|
| 9257 |
+
4qgf
|
| 9258 |
+
3tn8
|
| 9259 |
+
6f3g
|
| 9260 |
+
1n3i
|
| 9261 |
+
3tz2
|
| 9262 |
+
1xws
|
| 9263 |
+
5uov
|
| 9264 |
+
2o9j
|
| 9265 |
+
3coh
|
| 9266 |
+
3h98
|
| 9267 |
+
1mes
|
| 9268 |
+
3ot3
|
| 9269 |
+
6f1x
|
| 9270 |
+
5gs9
|
| 9271 |
+
5fyx
|
| 9272 |
+
2hha
|
| 9273 |
+
1pfy
|
| 9274 |
+
4j3j
|
| 9275 |
+
4rn1
|
| 9276 |
+
4ze6
|
| 9277 |
+
3zpt
|
| 9278 |
+
4f9y
|
| 9279 |
+
4lbu
|
| 9280 |
+
2nno
|
| 9281 |
+
5v42
|
| 9282 |
+
2fyv
|
| 9283 |
+
4j8m
|
| 9284 |
+
6hjk
|
| 9285 |
+
3o9d
|
| 9286 |
+
1izh
|
| 9287 |
+
2r2b
|
| 9288 |
+
2xc0
|
| 9289 |
+
5uv5
|
| 9290 |
+
4ty8
|
| 9291 |
+
2vew
|
| 9292 |
+
3br9
|
| 9293 |
+
1utt
|
| 9294 |
+
6cd8
|
| 9295 |
+
3l8x
|
| 9296 |
+
4fe6
|
| 9297 |
+
5adt
|
| 9298 |
+
4f49
|
| 9299 |
+
5nzq
|
| 9300 |
+
6htt
|
| 9301 |
+
5k0t
|
| 9302 |
+
3pn1
|
| 9303 |
+
4ryc
|
| 9304 |
+
4nga
|
| 9305 |
+
4crc
|
| 9306 |
+
4xcu
|
| 9307 |
+
3f3c
|
| 9308 |
+
3obx
|
| 9309 |
+
6agg
|
| 9310 |
+
6cq5
|
| 9311 |
+
4nzn
|
| 9312 |
+
6hm2
|
| 9313 |
+
3w2r
|
| 9314 |
+
3dsu
|
| 9315 |
+
3zs0
|
| 9316 |
+
4wkv
|
| 9317 |
+
3w07
|
| 9318 |
+
1ntk
|
| 9319 |
+
4dk8
|
| 9320 |
+
4anv
|
| 9321 |
+
4ery
|
| 9322 |
+
4a4l
|
| 9323 |
+
4jzd
|
| 9324 |
+
3tjh
|
| 9325 |
+
4jwk
|
| 9326 |
+
4hyi
|
| 9327 |
+
4kne
|
| 9328 |
+
3u9q
|
| 9329 |
+
5ih2
|
| 9330 |
+
2h02
|
| 9331 |
+
1i8z
|
| 9332 |
+
4c6u
|
| 9333 |
+
5ohj
|
| 9334 |
+
4bid
|
| 9335 |
+
3lmp
|
| 9336 |
+
4j81
|
| 9337 |
+
5dey
|
| 9338 |
+
5k8n
|
| 9339 |
+
1pz5
|
| 9340 |
+
1shd
|
| 9341 |
+
3hhm
|
| 9342 |
+
5n0f
|
| 9343 |
+
3nba
|
| 9344 |
+
4u5s
|
| 9345 |
+
2ndo
|
| 9346 |
+
3vid
|
| 9347 |
+
1ik4
|
| 9348 |
+
1al7
|
| 9349 |
+
1fq7
|
| 9350 |
+
2x4r
|
| 9351 |
+
4kz3
|
| 9352 |
+
3s8l
|
| 9353 |
+
2xh5
|
| 9354 |
+
2aoc
|
| 9355 |
+
2p93
|
| 9356 |
+
5huy
|
| 9357 |
+
1agm
|
| 9358 |
+
1fv9
|
| 9359 |
+
4c72
|
| 9360 |
+
1vfn
|
| 9361 |
+
4xg3
|
| 9362 |
+
5vk0
|
| 9363 |
+
3hl5
|
| 9364 |
+
2xah
|
| 9365 |
+
5n58
|
| 9366 |
+
3mof
|
| 9367 |
+
3r22
|
| 9368 |
+
1utm
|
| 9369 |
+
3p8e
|
| 9370 |
+
1kl3
|
| 9371 |
+
5o5h
|
| 9372 |
+
5lpk
|
| 9373 |
+
3miy
|
| 9374 |
+
2vta
|
| 9375 |
+
1bnq
|
| 9376 |
+
4myh
|
| 9377 |
+
5llm
|
| 9378 |
+
4xsx
|
| 9379 |
+
1q6s
|
| 9380 |
+
1jws
|
| 9381 |
+
5xmp
|
| 9382 |
+
4ahv
|
| 9383 |
+
4e4n
|
| 9384 |
+
4o75
|
| 9385 |
+
3ogp
|
| 9386 |
+
3dk1
|
| 9387 |
+
4bdf
|
| 9388 |
+
2ybs
|
| 9389 |
+
2aj8
|
| 9390 |
+
4ght
|
| 9391 |
+
1e5j
|
| 9392 |
+
4odq
|
| 9393 |
+
1veb
|
| 9394 |
+
4wrq
|
| 9395 |
+
5nsp
|
| 9396 |
+
2wtx
|
| 9397 |
+
2rqu
|
| 9398 |
+
5kpl
|
| 9399 |
+
3g19
|
| 9400 |
+
3gy7
|
| 9401 |
+
4bhi
|
| 9402 |
+
3rjm
|
| 9403 |
+
3f3v
|
| 9404 |
+
4h42
|
| 9405 |
+
4y16
|
| 9406 |
+
1o3g
|
| 9407 |
+
1utc
|
| 9408 |
+
3wzu
|
| 9409 |
+
6b5q
|
| 9410 |
+
1u2r
|
| 9411 |
+
4ygf
|
| 9412 |
+
1nju
|
| 9413 |
+
3drp
|
| 9414 |
+
6azk
|
| 9415 |
+
3r9d
|
| 9416 |
+
4awi
|
| 9417 |
+
1ce5
|
| 9418 |
+
5vd2
|
| 9419 |
+
5kzq
|
| 9420 |
+
2bal
|
| 9421 |
+
1y2f
|
| 9422 |
+
2xxt
|
| 9423 |
+
5hlp
|
| 9424 |
+
2y8c
|
| 9425 |
+
5auu
|
| 9426 |
+
1bwa
|
| 9427 |
+
6gbe
|
| 9428 |
+
2zv2
|
| 9429 |
+
3f7b
|
| 9430 |
+
1e4h
|
| 9431 |
+
6arn
|
| 9432 |
+
3ryw
|
| 9433 |
+
6fr0
|
| 9434 |
+
5wbz
|
| 9435 |
+
1c8v
|
| 9436 |
+
1v41
|
| 9437 |
+
4oew
|
| 9438 |
+
5fbo
|
| 9439 |
+
3l9h
|
| 9440 |
+
5hi7
|
| 9441 |
+
3igg
|
| 9442 |
+
4hy0
|
| 9443 |
+
5enf
|
| 9444 |
+
2a4q
|
| 9445 |
+
1cwc
|
| 9446 |
+
5yjo
|
| 9447 |
+
2jc0
|
| 9448 |
+
186l
|
| 9449 |
+
6epy
|
| 9450 |
+
3qk0
|
| 9451 |
+
6cq4
|
| 9452 |
+
2jkq
|
| 9453 |
+
3gb2
|
| 9454 |
+
5xmt
|
| 9455 |
+
2ybu
|
| 9456 |
+
3arz
|
| 9457 |
+
2o5k
|
| 9458 |
+
5c13
|
| 9459 |
+
4gfm
|
| 9460 |
+
2fm2
|
| 9461 |
+
4jsa
|
| 9462 |
+
4pv0
|
| 9463 |
+
1hnn
|
| 9464 |
+
3bl2
|
| 9465 |
+
5dhp
|
| 9466 |
+
6c7i
|
| 9467 |
+
1inq
|
| 9468 |
+
5pzn
|
| 9469 |
+
1pye
|
| 9470 |
+
3upf
|
| 9471 |
+
3zcl
|
| 9472 |
+
2uyi
|
| 9473 |
+
1usi
|
| 9474 |
+
3dc3
|
| 9475 |
+
1ezq
|
| 9476 |
+
4qz0
|
| 9477 |
+
2vnt
|
| 9478 |
+
5ehy
|
| 9479 |
+
1mqj
|
| 9480 |
+
5gs4
|
| 9481 |
+
5xg4
|
| 9482 |
+
3h59
|
| 9483 |
+
1bv7
|
| 9484 |
+
1br6
|
| 9485 |
+
1mns
|
| 9486 |
+
2ybk
|
| 9487 |
+
1bcj
|
| 9488 |
+
1e02
|
| 9489 |
+
1ykr
|
| 9490 |
+
5vja
|
| 9491 |
+
3r5j
|
| 9492 |
+
5edu
|
| 9493 |
+
3kga
|
| 9494 |
+
6b5t
|
| 9495 |
+
5qaq
|
| 9496 |
+
1jpl
|
| 9497 |
+
5hmi
|
| 9498 |
+
3mjl
|
| 9499 |
+
4n8e
|
| 9500 |
+
1hrn
|
| 9501 |
+
2o4s
|
| 9502 |
+
1agw
|
| 9503 |
+
4wn1
|
| 9504 |
+
3tc5
|
| 9505 |
+
2pjb
|
| 9506 |
+
6ea1
|
| 9507 |
+
5epy
|
| 9508 |
+
3vc4
|
| 9509 |
+
2i0g
|
| 9510 |
+
3ah8
|
| 9511 |
+
6bau
|
| 9512 |
+
4drn
|
| 9513 |
+
1aw1
|
| 9514 |
+
5fl1
|
| 9515 |
+
4prg
|
| 9516 |
+
4ibe
|
| 9517 |
+
2yem
|
| 9518 |
+
3fql
|
| 9519 |
+
5zwf
|
| 9520 |
+
1x0n
|
| 9521 |
+
6ehh
|
| 9522 |
+
1cps
|
| 9523 |
+
5akl
|
| 9524 |
+
3g2t
|
| 9525 |
+
3vsx
|
| 9526 |
+
4dhl
|
| 9527 |
+
1hsg
|
| 9528 |
+
3vhv
|
| 9529 |
+
3ro4
|
| 9530 |
+
5myl
|
| 9531 |
+
4lyw
|
| 9532 |
+
3e92
|
| 9533 |
+
4o91
|
| 9534 |
+
3ipa
|
| 9535 |
+
1imx
|
| 9536 |
+
1u71
|
| 9537 |
+
5dx3
|
| 9538 |
+
4hvi
|
| 9539 |
+
4urn
|
| 9540 |
+
1h5v
|
| 9541 |
+
3p7b
|
| 9542 |
+
2rg5
|
| 9543 |
+
4i8x
|
| 9544 |
+
4bcn
|
| 9545 |
+
4d09
|
| 9546 |
+
5f8y
|
| 9547 |
+
5mwp
|
| 9548 |
+
3n76
|
| 9549 |
+
3gba
|
| 9550 |
+
4lyn
|
| 9551 |
+
6bik
|
| 9552 |
+
1unl
|
| 9553 |
+
3e6v
|
| 9554 |
+
3rsr
|
| 9555 |
+
1nlp
|
| 9556 |
+
5dxg
|
| 9557 |
+
2cji
|
| 9558 |
+
5zwi
|
| 9559 |
+
3o9a
|
| 9560 |
+
4ju4
|
| 9561 |
+
2x2k
|
| 9562 |
+
4n5d
|
| 9563 |
+
5l4f
|
| 9564 |
+
5nya
|
| 9565 |
+
5yqo
|
| 9566 |
+
4gzt
|
| 9567 |
+
3tki
|
| 9568 |
+
5dd0
|
| 9569 |
+
5ivt
|
| 9570 |
+
5mnr
|
| 9571 |
+
3asl
|
| 9572 |
+
2oc0
|
| 9573 |
+
3l3n
|
| 9574 |
+
5izf
|
| 9575 |
+
1i6v
|
| 9576 |
+
2y56
|
| 9577 |
+
2h4k
|
| 9578 |
+
1drj
|
| 9579 |
+
4yy6
|
| 9580 |
+
3hku
|
| 9581 |
+
3kfn
|
| 9582 |
+
1w5v
|
| 9583 |
+
2c3k
|
| 9584 |
+
5oha
|
| 9585 |
+
4myq
|
| 9586 |
+
3new
|
| 9587 |
+
1o5c
|
| 9588 |
+
4k5y
|
| 9589 |
+
5kbi
|
| 9590 |
+
5dhs
|
| 9591 |
+
1y19
|
| 9592 |
+
3hky
|
| 9593 |
+
5nka
|
| 9594 |
+
1wax
|
| 9595 |
+
6h9b
|
| 9596 |
+
4b6c
|
| 9597 |
+
1h3h
|
| 9598 |
+
5xw6
|
| 9599 |
+
4cra
|
| 9600 |
+
3d4l
|
| 9601 |
+
2nwn
|
| 9602 |
+
2zyb
|
| 9603 |
+
3rm9
|
| 9604 |
+
4bbe
|
| 9605 |
+
6gxa
|
| 9606 |
+
5h4j
|
| 9607 |
+
4ljh
|
| 9608 |
+
4a4f
|
| 9609 |
+
4nw5
|
| 9610 |
+
2vi5
|
| 9611 |
+
3eqy
|
| 9612 |
+
4tpp
|
| 9613 |
+
1pi5
|
| 9614 |
+
4bt3
|
| 9615 |
+
5oh2
|
| 9616 |
+
3kiv
|
| 9617 |
+
4xyc
|
| 9618 |
+
1zd2
|
| 9619 |
+
2vur
|
| 9620 |
+
5znr
|
| 9621 |
+
2k2r
|
| 9622 |
+
5j1x
|
| 9623 |
+
3pfp
|
| 9624 |
+
6g37
|
| 9625 |
+
3nsq
|
| 9626 |
+
6eq8
|
| 9627 |
+
5jrq
|
| 9628 |
+
5eci
|
| 9629 |
+
1nmk
|
| 9630 |
+
5ehr
|
| 9631 |
+
4qz1
|
| 9632 |
+
5zkb
|
| 9633 |
+
4f65
|
| 9634 |
+
5sym
|
| 9635 |
+
5ct2
|
| 9636 |
+
3wti
|
| 9637 |
+
4nwk
|
| 9638 |
+
5l8t
|
| 9639 |
+
5k0f
|
| 9640 |
+
1mqh
|
| 9641 |
+
1h3b
|
| 9642 |
+
1wxz
|
| 9643 |
+
3tt4
|
| 9644 |
+
5k0x
|
| 9645 |
+
2ez7
|
| 9646 |
+
5wbp
|
| 9647 |
+
4ddl
|
| 9648 |
+
2ygu
|
| 9649 |
+
3e7a
|
| 9650 |
+
5m7s
|
| 9651 |
+
2odd
|
| 9652 |
+
4eon
|
| 9653 |
+
3vf9
|
| 9654 |
+
1pxm
|
| 9655 |
+
4umt
|
| 9656 |
+
4rlt
|
| 9657 |
+
4f8j
|
| 9658 |
+
5dp9
|
| 9659 |
+
1ung
|
| 9660 |
+
4x7h
|
| 9661 |
+
3max
|
| 9662 |
+
4btu
|
| 9663 |
+
4hyb
|
| 9664 |
+
3nyx
|
| 9665 |
+
3nif
|
| 9666 |
+
5hmk
|
| 9667 |
+
3tz4
|
| 9668 |
+
3qxm
|
| 9669 |
+
6isd
|
| 9670 |
+
6g97
|
| 9671 |
+
5ccm
|
| 9672 |
+
5u0z
|
| 9673 |
+
2wbd
|
| 9674 |
+
3aav
|
| 9675 |
+
5vzu
|
| 9676 |
+
2hai
|
| 9677 |
+
6evn
|
| 9678 |
+
2yga
|
| 9679 |
+
1p17
|
| 9680 |
+
5fus
|
| 9681 |
+
5elw
|
| 9682 |
+
4azt
|
| 9683 |
+
6ea2
|
| 9684 |
+
1xbo
|
| 9685 |
+
2yhy
|
| 9686 |
+
4u0m
|
| 9687 |
+
6b3e
|
| 9688 |
+
5hff
|
| 9689 |
+
3iu8
|
| 9690 |
+
1dqx
|
| 9691 |
+
4o0v
|
| 9692 |
+
4gs6
|
| 9693 |
+
6gn1
|
| 9694 |
+
2j87
|
| 9695 |
+
4hy1
|
| 9696 |
+
5nmg
|
| 9697 |
+
2pgz
|
| 9698 |
+
1wdq
|
| 9699 |
+
4er1
|
| 9700 |
+
2l6j
|
| 9701 |
+
3itu
|
| 9702 |
+
5a7j
|
| 9703 |
+
3su4
|
| 9704 |
+
3r6g
|
| 9705 |
+
5ea7
|
| 9706 |
+
5qb1
|
| 9707 |
+
3grj
|
| 9708 |
+
4xjs
|
| 9709 |
+
2x85
|
| 9710 |
+
3le6
|
| 9711 |
+
5t19
|
| 9712 |
+
5ett
|
| 9713 |
+
2esm
|
| 9714 |
+
3u7s
|
| 9715 |
+
3c6w
|
| 9716 |
+
2clo
|
| 9717 |
+
3d0e
|
| 9718 |
+
2jqi
|
| 9719 |
+
1ekb
|
| 9720 |
+
4ovz
|
| 9721 |
+
3soq
|
| 9722 |
+
3rbm
|
| 9723 |
+
1hef
|
| 9724 |
+
5os0
|
| 9725 |
+
4tk2
|
| 9726 |
+
4j22
|
| 9727 |
+
4etz
|
| 9728 |
+
3gus
|
| 9729 |
+
3s76
|
| 9730 |
+
4pyv
|
| 9731 |
+
3c49
|
| 9732 |
+
2yz3
|
| 9733 |
+
1xnx
|
| 9734 |
+
1elc
|
| 9735 |
+
5i24
|
| 9736 |
+
5t9z
|
| 9737 |
+
5h1e
|
| 9738 |
+
2qt5
|
| 9739 |
+
5ekh
|
| 9740 |
+
3fbr
|
| 9741 |
+
2za0
|
| 9742 |
+
5wej
|
| 9743 |
+
2yel
|
| 9744 |
+
4anx
|
| 9745 |
+
1rtf
|
| 9746 |
+
1r5y
|
| 9747 |
+
1pzo
|
| 9748 |
+
5kam
|
| 9749 |
+
5i13
|
| 9750 |
+
3krj
|
| 9751 |
+
5exw
|
| 9752 |
+
1aj6
|
| 9753 |
+
3qs1
|
| 9754 |
+
2x9f
|
| 9755 |
+
2np8
|
| 9756 |
+
5ci7
|
| 9757 |
+
1m7y
|
| 9758 |
+
1fcx
|
| 9759 |
+
5ok3
|
| 9760 |
+
1hp0
|
| 9761 |
+
4jq8
|
| 9762 |
+
4bhn
|
| 9763 |
+
1i9p
|
| 9764 |
+
4hyh
|
| 9765 |
+
1jil
|
| 9766 |
+
5f2s
|
| 9767 |
+
1bwn
|
| 9768 |
+
1xh7
|
| 9769 |
+
1pmv
|
| 9770 |
+
5kbg
|
| 9771 |
+
4bg6
|
| 9772 |
+
4ycu
|
| 9773 |
+
3lxk
|
| 9774 |
+
4w57
|
| 9775 |
+
4kxl
|
| 9776 |
+
5mk9
|
| 9777 |
+
6er4
|
| 9778 |
+
6h1u
|
| 9779 |
+
3iae
|
| 9780 |
+
3fnm
|
| 9781 |
+
3mxr
|
| 9782 |
+
3c2o
|
| 9783 |
+
5eva
|
| 9784 |
+
4kbi
|
| 9785 |
+
3ogx
|
| 9786 |
+
5n0e
|
| 9787 |
+
5owa
|
| 9788 |
+
6bmr
|
| 9789 |
+
4mgb
|
| 9790 |
+
5ka9
|
| 9791 |
+
2bza
|
| 9792 |
+
3sad
|
| 9793 |
+
4wj5
|
| 9794 |
+
4o1d
|
| 9795 |
+
4pnc
|
| 9796 |
+
4mzf
|
| 9797 |
+
1f9e
|
| 9798 |
+
4whl
|
| 9799 |
+
2imb
|
| 9800 |
+
4llk
|
| 9801 |
+
4ih6
|
| 9802 |
+
3stj
|
| 9803 |
+
5tnt
|
| 9804 |
+
4hg7
|
| 9805 |
+
5dxe
|
| 9806 |
+
1hc9
|
| 9807 |
+
3dcc
|
| 9808 |
+
5azf
|
| 9809 |
+
5c5h
|
| 9810 |
+
4m7x
|
| 9811 |
+
5nve
|
| 9812 |
+
4dcy
|
| 9813 |
+
1r9l
|
| 9814 |
+
3l0k
|
| 9815 |
+
4iuv
|
| 9816 |
+
5j31
|
| 9817 |
+
5ulg
|
| 9818 |
+
2jxr
|
| 9819 |
+
4qwx
|
| 9820 |
+
4joe
|
| 9821 |
+
3sm1
|
| 9822 |
+
4prd
|
| 9823 |
+
5etn
|
| 9824 |
+
1ihy
|
| 9825 |
+
3ut5
|
| 9826 |
+
4gjc
|
| 9827 |
+
5c1x
|
| 9828 |
+
2oxy
|
| 9829 |
+
5can
|
| 9830 |
+
5hz9
|
| 9831 |
+
4oem
|
| 9832 |
+
3nzi
|
| 9833 |
+
4hfz
|
| 9834 |
+
4mn3
|
| 9835 |
+
1svh
|
| 9836 |
+
4zsa
|
| 9837 |
+
5y24
|
| 9838 |
+
5f9b
|
| 9839 |
+
4mka
|
| 9840 |
+
5wf7
|
| 9841 |
+
4o0y
|
| 9842 |
+
5w8h
|
| 9843 |
+
1t1s
|
| 9844 |
+
5w7i
|
| 9845 |
+
4o2e
|
| 9846 |
+
4e3j
|
| 9847 |
+
4jmu
|
| 9848 |
+
3dg8
|
| 9849 |
+
1fq4
|
| 9850 |
+
5u7o
|
| 9851 |
+
5owt
|
| 9852 |
+
5myr
|
| 9853 |
+
5ka3
|
| 9854 |
+
1orw
|
| 9855 |
+
1vea
|
| 9856 |
+
1j01
|
| 9857 |
+
2hkf
|
| 9858 |
+
2fv9
|
| 9859 |
+
1e55
|
| 9860 |
+
3cfs
|
| 9861 |
+
5nhy
|
| 9862 |
+
4x6s
|
| 9863 |
+
4q1w
|
| 9864 |
+
4o5b
|
| 9865 |
+
1b32
|
| 9866 |
+
2fzk
|
| 9867 |
+
2p3g
|
| 9868 |
+
1h24
|
| 9869 |
+
3qfd
|
| 9870 |
+
4xu0
|
| 9871 |
+
5ow1
|
| 9872 |
+
3du8
|
| 9873 |
+
4cby
|
| 9874 |
+
1mtr
|
| 9875 |
+
4uj1
|
| 9876 |
+
4omj
|
| 9877 |
+
2y58
|
| 9878 |
+
1onz
|
| 9879 |
+
3esj
|
| 9880 |
+
1zyj
|
| 9881 |
+
1fig
|
| 9882 |
+
3cd7
|
| 9883 |
+
2hf8
|
| 9884 |
+
1o4f
|
| 9885 |
+
5w0q
|
| 9886 |
+
1ql7
|
| 9887 |
+
3kv2
|
| 9888 |
+
3que
|
| 9889 |
+
2loz
|
| 9890 |
+
4zg6
|
| 9891 |
+
5f4l
|
| 9892 |
+
4mo4
|
| 9893 |
+
3mam
|
| 9894 |
+
2h44
|
| 9895 |
+
5cwa
|
| 9896 |
+
5hyr
|
| 9897 |
+
3wix
|
| 9898 |
+
5tw5
|
| 9899 |
+
4dem
|
| 9900 |
+
5di1
|
| 9901 |
+
6en5
|
| 9902 |
+
4ogv
|
| 9903 |
+
2r6y
|
| 9904 |
+
6c7w
|
| 9905 |
+
3dx4
|
| 9906 |
+
5qa7
|
| 9907 |
+
1os0
|
| 9908 |
+
4tn4
|
| 9909 |
+
5vm6
|
| 9910 |
+
4x5r
|
| 9911 |
+
3k23
|
| 9912 |
+
2q54
|
| 9913 |
+
2pe1
|
| 9914 |
+
4k2y
|
| 9915 |
+
4rfc
|
| 9916 |
+
6hqy
|
| 9917 |
+
4ie3
|
| 9918 |
+
2qbq
|
| 9919 |
+
5o07
|
| 9920 |
+
4mm7
|
| 9921 |
+
3rqg
|
| 9922 |
+
1a85
|
| 9923 |
+
5etb
|
| 9924 |
+
4bty
|
| 9925 |
+
1nvs
|
| 9926 |
+
2c9t
|
| 9927 |
+
2h6b
|
| 9928 |
+
4ivc
|
| 9929 |
+
5ar4
|
| 9930 |
+
1xfv
|
| 9931 |
+
6g6z
|
| 9932 |
+
4e1n
|
| 9933 |
+
2q2y
|
| 9934 |
+
2zwz
|
| 9935 |
+
2xe4
|
| 9936 |
+
3nin
|
| 9937 |
+
4py1
|
| 9938 |
+
4j52
|
| 9939 |
+
4mrf
|
| 9940 |
+
1ngw
|
| 9941 |
+
1m7q
|
| 9942 |
+
5c7b
|
| 9943 |
+
3lq4
|
| 9944 |
+
4m8t
|
| 9945 |
+
4qhp
|
| 9946 |
+
1my3
|
| 9947 |
+
5fgk
|
| 9948 |
+
2ff2
|
| 9949 |
+
5qat
|
| 9950 |
+
3chf
|
| 9951 |
+
4kox
|
| 9952 |
+
5v1b
|
| 9953 |
+
1det
|
| 9954 |
+
4cq0
|
| 9955 |
+
2iwx
|
| 9956 |
+
4isi
|
| 9957 |
+
4bcg
|
| 9958 |
+
1pro
|
| 9959 |
+
4mm6
|
| 9960 |
+
4m5m
|
| 9961 |
+
3v31
|
| 9962 |
+
4yt7
|
| 9963 |
+
3vp4
|
| 9964 |
+
3imy
|
| 9965 |
+
1rbo
|
| 9966 |
+
1u6q
|
| 9967 |
+
2w7y
|
| 9968 |
+
1k9q
|
| 9969 |
+
4erq
|
| 9970 |
+
5tzh
|
| 9971 |
+
2a3x
|
| 9972 |
+
5f61
|
| 9973 |
+
3el1
|
| 9974 |
+
5lkr
|
| 9975 |
+
1mn9
|
| 9976 |
+
4oym
|
| 9977 |
+
2iko
|
| 9978 |
+
1h39
|
| 9979 |
+
5nu1
|
| 9980 |
+
1qxz
|
| 9981 |
+
4qz4
|
| 9982 |
+
1o5p
|
| 9983 |
+
4i9c
|
| 9984 |
+
3wmc
|
| 9985 |
+
4xxh
|
| 9986 |
+
4ajl
|
| 9987 |
+
4zud
|
| 9988 |
+
3kig
|
| 9989 |
+
3wde
|
| 9990 |
+
6fdu
|
| 9991 |
+
4bw2
|
| 9992 |
+
1hk1
|
| 9993 |
+
1pph
|
| 9994 |
+
3wut
|
| 9995 |
+
2ate
|
| 9996 |
+
3nw9
|
| 9997 |
+
5ceo
|
| 9998 |
+
1g2m
|
| 9999 |
+
1o3i
|
| 10000 |
+
5ald
|
| 10001 |
+
2hjb
|
| 10002 |
+
5za1
|
| 10003 |
+
4pnw
|
| 10004 |
+
4uwl
|
| 10005 |
+
5c1w
|
| 10006 |
+
1m0n
|
| 10007 |
+
2nsj
|
| 10008 |
+
4imz
|
| 10009 |
+
4x3k
|
| 10010 |
+
4zgk
|
| 10011 |
+
3pvu
|
| 10012 |
+
2fah
|
| 10013 |
+
6ex0
|
| 10014 |
+
6gz9
|
| 10015 |
+
1ke8
|
| 10016 |
+
3tkw
|
| 10017 |
+
3m40
|
| 10018 |
+
4bo9
|
| 10019 |
+
4cqf
|
| 10020 |
+
5sy2
|
| 10021 |
+
3fh7
|
| 10022 |
+
5u4f
|
| 10023 |
+
5or9
|
| 10024 |
+
1msn
|
| 10025 |
+
5ow8
|
| 10026 |
+
5ekx
|
| 10027 |
+
1dar
|
| 10028 |
+
6iil
|
| 10029 |
+
3tsk
|
| 10030 |
+
3tzm
|
| 10031 |
+
3ttz
|
| 10032 |
+
4ibd
|
| 10033 |
+
4zx5
|
| 10034 |
+
2c4w
|
| 10035 |
+
1qf4
|
| 10036 |
+
2xgo
|
| 10037 |
+
3bm6
|
| 10038 |
+
3h8b
|
| 10039 |
+
4zy1
|
| 10040 |
+
4okg
|
| 10041 |
+
3btc
|
| 10042 |
+
4ivd
|
| 10043 |
+
1o4i
|
| 10044 |
+
3wtm
|
| 10045 |
+
6mxe
|
| 10046 |
+
5ygf
|
| 10047 |
+
4mpe
|
| 10048 |
+
1o2q
|
| 10049 |
+
4qtb
|
| 10050 |
+
3qw6
|
| 10051 |
+
4yb5
|
| 10052 |
+
1b5i
|
| 10053 |
+
1dbb
|
| 10054 |
+
4gj3
|
| 10055 |
+
5y5n
|
| 10056 |
+
5jdi
|
| 10057 |
+
1kv5
|
| 10058 |
+
4der
|
| 10059 |
+
2xc4
|
| 10060 |
+
3s3r
|
| 10061 |
+
5lso
|
| 10062 |
+
1n2v
|
| 10063 |
+
4jje
|
| 10064 |
+
5akw
|
| 10065 |
+
5f3z
|
| 10066 |
+
6dl9
|
| 10067 |
+
5wgd
|
| 10068 |
+
3muk
|
| 10069 |
+
3uw9
|
| 10070 |
+
1o32
|
| 10071 |
+
2zpk
|
| 10072 |
+
3hk1
|
| 10073 |
+
2f1a
|
| 10074 |
+
2haw
|
| 10075 |
+
4i54
|
| 10076 |
+
5d3s
|
| 10077 |
+
5m56
|
| 10078 |
+
2bkz
|
| 10079 |
+
5dxt
|
| 10080 |
+
1qw7
|
| 10081 |
+
4abu
|
| 10082 |
+
5x33
|
| 10083 |
+
5fjw
|
| 10084 |
+
4k5p
|
| 10085 |
+
5als
|
| 10086 |
+
5eqy
|
| 10087 |
+
4de0
|
| 10088 |
+
2a4g
|
| 10089 |
+
3hf8
|
| 10090 |
+
5em9
|
| 10091 |
+
4gr8
|
| 10092 |
+
4ztl
|
| 10093 |
+
2g1q
|
| 10094 |
+
4phv
|
| 10095 |
+
5tdb
|
| 10096 |
+
6dh7
|
| 10097 |
+
3rz3
|
| 10098 |
+
2vww
|
| 10099 |
+
5t4b
|
| 10100 |
+
5j7g
|
| 10101 |
+
1f4f
|
| 10102 |
+
5mev
|
| 10103 |
+
5cgd
|
| 10104 |
+
3vrw
|
| 10105 |
+
2o48
|
| 10106 |
+
4cgj
|
| 10107 |
+
4cbt
|
| 10108 |
+
5ofw
|
| 10109 |
+
3q2m
|
| 10110 |
+
4ce2
|
| 10111 |
+
2qqs
|
| 10112 |
+
4yw7
|
| 10113 |
+
1p93
|
| 10114 |
+
5wip
|
| 10115 |
+
6afl
|
| 10116 |
+
6b16
|
| 10117 |
+
2cgu
|
| 10118 |
+
4o24
|
| 10119 |
+
1z34
|
| 10120 |
+
3fvg
|
| 10121 |
+
1w6j
|
| 10122 |
+
3py0
|
| 10123 |
+
5nkc
|
| 10124 |
+
1os5
|
| 10125 |
+
3t70
|
| 10126 |
+
2nwl
|
| 10127 |
+
6dq4
|
| 10128 |
+
2brp
|
| 10129 |
+
5z95
|
| 10130 |
+
1tx7
|
| 10131 |
+
4yc8
|
| 10132 |
+
1hvr
|
| 10133 |
+
4e5w
|
| 10134 |
+
4f1s
|
| 10135 |
+
1xzx
|
| 10136 |
+
3fty
|
| 10137 |
+
6au9
|
| 10138 |
+
4eu0
|
| 10139 |
+
6fdt
|
| 10140 |
+
4d1a
|
| 10141 |
+
5ap7
|
| 10142 |
+
1sqb
|
| 10143 |
+
3wt5
|
| 10144 |
+
3eqb
|
| 10145 |
+
4o3c
|
| 10146 |
+
1rgl
|
| 10147 |
+
5cxh
|
| 10148 |
+
3r7q
|
| 10149 |
+
1jvu
|
| 10150 |
+
2ay8
|
| 10151 |
+
5ak5
|
| 10152 |
+
6dug
|
| 10153 |
+
3wgw
|
| 10154 |
+
6bfn
|
| 10155 |
+
2fx8
|
| 10156 |
+
4rxc
|
| 10157 |
+
6bny
|
| 10158 |
+
6h36
|
| 10159 |
+
5ugm
|
| 10160 |
+
5trj
|
| 10161 |
+
5xpo
|
| 10162 |
+
1jmf
|
| 10163 |
+
3wp1
|
| 10164 |
+
3ugc
|
| 10165 |
+
4jhz
|
| 10166 |
+
3p44
|
| 10167 |
+
1zp5
|
| 10168 |
+
1v2o
|
| 10169 |
+
5tbe
|
| 10170 |
+
1o2r
|
| 10171 |
+
4wsk
|
| 10172 |
+
5vdk
|
| 10173 |
+
3rni
|
| 10174 |
+
3kc3
|
| 10175 |
+
4pb2
|
| 10176 |
+
5euk
|
| 10177 |
+
5ukk
|
| 10178 |
+
2k2g
|
| 10179 |
+
4a4q
|
| 10180 |
+
4gtv
|
| 10181 |
+
5u0g
|
| 10182 |
+
4zh4
|
| 10183 |
+
3v2p
|
| 10184 |
+
3kqt
|
| 10185 |
+
4im0
|
| 10186 |
+
1ni1
|
| 10187 |
+
3wp0
|
| 10188 |
+
4tkn
|
| 10189 |
+
6bg3
|
| 10190 |
+
1o1s
|
| 10191 |
+
5d3l
|
| 10192 |
+
2qc6
|
| 10193 |
+
5lp6
|
| 10194 |
+
2br6
|
| 10195 |
+
5ku6
|
| 10196 |
+
5d3c
|
| 10197 |
+
5ny3
|
| 10198 |
+
5hjb
|
| 10199 |
+
4txc
|
| 10200 |
+
3waw
|
| 10201 |
+
5b1s
|
| 10202 |
+
3nu4
|
| 10203 |
+
2pwc
|
| 10204 |
+
4x3i
|
| 10205 |
+
3djk
|
| 10206 |
+
3qw7
|
| 10207 |
+
3ivx
|
| 10208 |
+
6b8j
|
| 10209 |
+
1y0x
|
| 10210 |
+
1ros
|
| 10211 |
+
4rwj
|
| 10212 |
+
6bto
|
| 10213 |
+
3ztc
|
| 10214 |
+
1jlx
|
| 10215 |
+
3l0v
|
| 10216 |
+
4ycn
|
| 10217 |
+
5qam
|
| 10218 |
+
4mep
|
| 10219 |
+
1bil
|
| 10220 |
+
5tzc
|
| 10221 |
+
4u6r
|
| 10222 |
+
4g2r
|
| 10223 |
+
4pg9
|
| 10224 |
+
2hzn
|
| 10225 |
+
3r8v
|
| 10226 |
+
1fzm
|
| 10227 |
+
5d9k
|
| 10228 |
+
4gg7
|
| 10229 |
+
3d9n
|
| 10230 |
+
6cct
|
| 10231 |
+
5f27
|
| 10232 |
+
2w6n
|
| 10233 |
+
3k8d
|
| 10234 |
+
4b9z
|
| 10235 |
+
4uiy
|
| 10236 |
+
5fl4
|
| 10237 |
+
2bbb
|
| 10238 |
+
1qon
|
| 10239 |
+
1ydk
|
| 10240 |
+
4jv8
|
| 10241 |
+
2bw7
|
| 10242 |
+
1li2
|
| 10243 |
+
4qag
|
| 10244 |
+
3hyg
|
| 10245 |
+
2qtr
|
| 10246 |
+
2cen
|
| 10247 |
+
4k3n
|
| 10248 |
+
5cxa
|
| 10249 |
+
1dzp
|
| 10250 |
+
3gcu
|
| 10251 |
+
4nbn
|
| 10252 |
+
4ctk
|
| 10253 |
+
1okx
|
| 10254 |
+
5fb1
|
| 10255 |
+
4r5v
|
| 10256 |
+
4det
|
| 10257 |
+
2xm2
|
| 10258 |
+
3exe
|
| 10259 |
+
1jje
|
| 10260 |
+
4wup
|
| 10261 |
+
3sjt
|
| 10262 |
+
6h8s
|
| 10263 |
+
5tqf
|
| 10264 |
+
3h1x
|
| 10265 |
+
4ps1
|
| 10266 |
+
3c72
|
| 10267 |
+
2dua
|
| 10268 |
+
4w97
|
| 10269 |
+
5diu
|
| 10270 |
+
2cbs
|
| 10271 |
+
4zy4
|
| 10272 |
+
4alx
|
| 10273 |
+
2gbg
|
| 10274 |
+
1y6b
|
| 10275 |
+
1tnj
|
| 10276 |
+
2g78
|
| 10277 |
+
5aml
|
| 10278 |
+
2vo7
|
| 10279 |
+
4qye
|
| 10280 |
+
4no9
|
| 10281 |
+
5ct1
|
| 10282 |
+
4elb
|
| 10283 |
+
5sz2
|
| 10284 |
+
4u0x
|
| 10285 |
+
4ztq
|
| 10286 |
+
2nnd
|
| 10287 |
+
4abj
|
| 10288 |
+
3uh4
|
| 10289 |
+
1pop
|
| 10290 |
+
3hv5
|
| 10291 |
+
3zll
|
| 10292 |
+
4rak
|
| 10293 |
+
5u7m
|
| 10294 |
+
5tuo
|
| 10295 |
+
5ur1
|
| 10296 |
+
4iqu
|
| 10297 |
+
3kac
|
| 10298 |
+
1t7r
|
| 10299 |
+
4um1
|
| 10300 |
+
5mwa
|
| 10301 |
+
4o77
|
| 10302 |
+
4r5t
|
| 10303 |
+
4ijq
|
| 10304 |
+
5ld8
|
| 10305 |
+
3hdz
|
| 10306 |
+
4bxn
|
| 10307 |
+
3vb5
|
| 10308 |
+
3rx7
|
| 10309 |
+
2qnp
|
| 10310 |
+
1k9s
|
| 10311 |
+
6cjy
|
| 10312 |
+
4zjj
|
| 10313 |
+
1bbz
|
| 10314 |
+
4rx5
|
| 10315 |
+
4nzm
|
| 10316 |
+
1m5c
|
| 10317 |
+
4djs
|
| 10318 |
+
5ect
|
| 10319 |
+
5ejw
|
| 10320 |
+
6f20
|
| 10321 |
+
1zea
|
| 10322 |
+
2zzu
|
| 10323 |
+
3rxp
|
| 10324 |
+
2z3z
|
| 10325 |
+
1oeb
|
| 10326 |
+
1gux
|
| 10327 |
+
4g8r
|
| 10328 |
+
1hk2
|
| 10329 |
+
5dfp
|
| 10330 |
+
5vc4
|
| 10331 |
+
4g55
|
| 10332 |
+
4o7d
|
| 10333 |
+
6cdp
|
| 10334 |
+
1v16
|
| 10335 |
+
5gvn
|
| 10336 |
+
3l8s
|
| 10337 |
+
4bis
|
| 10338 |
+
2baj
|
| 10339 |
+
1njj
|
| 10340 |
+
5wik
|
| 10341 |
+
5a7i
|
| 10342 |
+
4pnq
|
| 10343 |
+
2a5u
|
| 10344 |
+
5nn0
|
| 10345 |
+
3q6k
|
| 10346 |
+
6eab
|
| 10347 |
+
3s68
|
| 10348 |
+
1yw7
|
| 10349 |
+
4z88
|
| 10350 |
+
4x8p
|
| 10351 |
+
5vgo
|
| 10352 |
+
4yo6
|
| 10353 |
+
4lv1
|
| 10354 |
+
5ose
|
| 10355 |
+
4ogt
|
| 10356 |
+
4k76
|
| 10357 |
+
2pr9
|
| 10358 |
+
1z3j
|
| 10359 |
+
2g72
|
| 10360 |
+
4gdy
|
| 10361 |
+
2l1b
|
| 10362 |
+
6g0q
|
| 10363 |
+
5tri
|
| 10364 |
+
1g7q
|
| 10365 |
+
1o4g
|
| 10366 |
+
4cg8
|
| 10367 |
+
1v2u
|
| 10368 |
+
3wyj
|
| 10369 |
+
4kfq
|
| 10370 |
+
4hrd
|
| 10371 |
+
1no6
|
| 10372 |
+
5lax
|
| 10373 |
+
3f7g
|
| 10374 |
+
5tmp
|
| 10375 |
+
4deh
|
| 10376 |
+
4rad
|
| 10377 |
+
3d78
|
| 10378 |
+
5l72
|
| 10379 |
+
3avj
|
| 10380 |
+
4rg0
|
| 10381 |
+
5nt4
|
| 10382 |
+
2w08
|
| 10383 |
+
3lxo
|
| 10384 |
+
3ljj
|
| 10385 |
+
4kif
|
| 10386 |
+
2jdo
|
| 10387 |
+
1v0o
|
| 10388 |
+
1k3n
|
| 10389 |
+
1rgj
|
| 10390 |
+
4zt8
|
| 10391 |
+
6dub
|
| 10392 |
+
1zdp
|
| 10393 |
+
187l
|
| 10394 |
+
3n6k
|
| 10395 |
+
1tnk
|
| 10396 |
+
2h2g
|
| 10397 |
+
4mjq
|
| 10398 |
+
2gni
|
| 10399 |
+
4pre
|
| 10400 |
+
3bgb
|
| 10401 |
+
4ddm
|
| 10402 |
+
1mwt
|
| 10403 |
+
5nhl
|
| 10404 |
+
6b4w
|
| 10405 |
+
3sou
|
| 10406 |
+
3sm2
|
| 10407 |
+
5qik
|
| 10408 |
+
1loq
|
| 10409 |
+
4wmx
|
| 10410 |
+
4yqh
|
| 10411 |
+
5mi7
|
| 10412 |
+
4ynl
|
| 10413 |
+
2f9v
|
| 10414 |
+
1qbn
|
| 10415 |
+
3uod
|
| 10416 |
+
2w0z
|
| 10417 |
+
1fsy
|
| 10418 |
+
1tmm
|
| 10419 |
+
3va4
|
| 10420 |
+
2adu
|
| 10421 |
+
3ozs
|
| 10422 |
+
5g42
|
| 10423 |
+
5vo2
|
| 10424 |
+
4h3a
|
| 10425 |
+
5tzw
|
| 10426 |
+
1xkk
|
| 10427 |
+
3tdu
|
| 10428 |
+
3efj
|
| 10429 |
+
3as2
|
| 10430 |
+
2ity
|
| 10431 |
+
4zwy
|
| 10432 |
+
2nmy
|
| 10433 |
+
4amy
|
| 10434 |
+
5kad
|
| 10435 |
+
3p58
|
| 10436 |
+
6h1i
|
| 10437 |
+
5x13
|
| 10438 |
+
4d1d
|
| 10439 |
+
6aum
|
| 10440 |
+
1ftm
|
| 10441 |
+
4hla
|
| 10442 |
+
2qoe
|
| 10443 |
+
5em5
|
| 10444 |
+
3u0p
|
| 10445 |
+
3dx2
|
| 10446 |
+
4kn2
|
| 10447 |
+
2pou
|
| 10448 |
+
4ce1
|
| 10449 |
+
4b7r
|
| 10450 |
+
1rth
|
| 10451 |
+
3eq7
|
| 10452 |
+
3i0s
|
| 10453 |
+
2pgl
|
| 10454 |
+
2yj8
|
| 10455 |
+
2z6w
|
| 10456 |
+
6fs0
|
| 10457 |
+
1dhi
|
| 10458 |
+
4hdp
|
| 10459 |
+
5svz
|
| 10460 |
+
4mz5
|
| 10461 |
+
1a1c
|
| 10462 |
+
4p0n
|
| 10463 |
+
3n7s
|
| 10464 |
+
2aow
|
| 10465 |
+
1j19
|
| 10466 |
+
4k3o
|
| 10467 |
+
1m7i
|
| 10468 |
+
3s9z
|
| 10469 |
+
1igb
|
| 10470 |
+
4o9s
|
| 10471 |
+
5xyx
|
| 10472 |
+
4mex
|
| 10473 |
+
5jy3
|
| 10474 |
+
3hxc
|
| 10475 |
+
2c3i
|
| 10476 |
+
4itp
|
| 10477 |
+
4b12
|
| 10478 |
+
3o6m
|
| 10479 |
+
2vd4
|
| 10480 |
+
3tib
|
| 10481 |
+
5yj8
|
| 10482 |
+
1aze
|
| 10483 |
+
4yad
|
| 10484 |
+
3w37
|
| 10485 |
+
5e8z
|
| 10486 |
+
2p4y
|
| 10487 |
+
2xda
|
| 10488 |
+
1g6r
|
| 10489 |
+
4h5e
|
| 10490 |
+
3fuz
|
| 10491 |
+
1f2p
|
| 10492 |
+
1b51
|
| 10493 |
+
5lm6
|
| 10494 |
+
4wn0
|
| 10495 |
+
3h21
|
| 10496 |
+
4bjc
|
| 10497 |
+
3nnu
|
| 10498 |
+
3d32
|
| 10499 |
+
3l9l
|
| 10500 |
+
4kqo
|
| 10501 |
+
5w38
|
| 10502 |
+
2p2i
|
| 10503 |
+
4ohk
|
| 10504 |
+
5l2i
|
| 10505 |
+
4jlh
|
| 10506 |
+
4whh
|
| 10507 |
+
3u6i
|
| 10508 |
+
2khh
|
| 10509 |
+
1ogu
|
| 10510 |
+
1zeo
|
| 10511 |
+
1iq1
|
| 10512 |
+
4knb
|
| 10513 |
+
3lrh
|
| 10514 |
+
5iok
|
| 10515 |
+
5t6p
|
| 10516 |
+
2ow2
|
| 10517 |
+
4dx9
|
| 10518 |
+
2a0t
|
| 10519 |
+
5g3m
|
| 10520 |
+
1c5x
|
| 10521 |
+
1qxw
|
| 10522 |
+
3d83
|
| 10523 |
+
3pwh
|
| 10524 |
+
5g46
|
| 10525 |
+
1yw2
|
| 10526 |
+
4yhm
|
| 10527 |
+
1tka
|
| 10528 |
+
3qpp
|
| 10529 |
+
4g19
|
| 10530 |
+
1pdz
|
| 10531 |
+
3odl
|
| 10532 |
+
5mby
|
| 10533 |
+
3zt1
|
| 10534 |
+
1y3v
|
| 10535 |
+
3tv4
|
| 10536 |
+
2phb
|
| 10537 |
+
1bzf
|
| 10538 |
+
1m2r
|
| 10539 |
+
6g46
|
| 10540 |
+
3hvj
|
| 10541 |
+
3mhl
|
| 10542 |
+
4w9c
|
| 10543 |
+
3o9e
|
| 10544 |
+
4imq
|
| 10545 |
+
5k4i
|
| 10546 |
+
3abt
|
| 10547 |
+
3vuc
|
| 10548 |
+
4oyt
|
| 10549 |
+
5qb3
|
| 10550 |
+
5yv5
|
| 10551 |
+
4n1b
|
| 10552 |
+
1pip
|
| 10553 |
+
5iu8
|
| 10554 |
+
3zke
|
| 10555 |
+
5f1z
|
| 10556 |
+
4zlz
|
| 10557 |
+
5ai0
|
| 10558 |
+
4op2
|
| 10559 |
+
3tfn
|
| 10560 |
+
5cnm
|
| 10561 |
+
6dge
|
| 10562 |
+
1htg
|
| 10563 |
+
3rey
|
| 10564 |
+
1o6r
|
| 10565 |
+
3w5n
|
| 10566 |
+
4l52
|
| 10567 |
+
4b76
|
| 10568 |
+
5qar
|
| 10569 |
+
2kaw
|
| 10570 |
+
3eyh
|
| 10571 |
+
4ezz
|
| 10572 |
+
3hkt
|
| 10573 |
+
5e1b
|
| 10574 |
+
3ufa
|
| 10575 |
+
4btx
|
| 10576 |
+
5mgj
|
| 10577 |
+
5cst
|
| 10578 |
+
5ea5
|
| 10579 |
+
5kgn
|
| 10580 |
+
3jq7
|
| 10581 |
+
1qy2
|
| 10582 |
+
4fpk
|
| 10583 |
+
3hq5
|
| 10584 |
+
1at5
|
| 10585 |
+
1hge
|
| 10586 |
+
5ty1
|
| 10587 |
+
4b3d
|
| 10588 |
+
1lyb
|
| 10589 |
+
5mrd
|
| 10590 |
+
4zt4
|
| 10591 |
+
4ovg
|
| 10592 |
+
4pp7
|
| 10593 |
+
4elg
|
| 10594 |
+
5j71
|
| 10595 |
+
3i6o
|
| 10596 |
+
4mx5
|
| 10597 |
+
4pnm
|
| 10598 |
+
2g0g
|
| 10599 |
+
1x81
|
| 10600 |
+
4wkp
|
| 10601 |
+
4kxm
|
| 10602 |
+
1wht
|
| 10603 |
+
6q3q
|
| 10604 |
+
4gee
|
| 10605 |
+
2f70
|
| 10606 |
+
2wd1
|
| 10607 |
+
6aah
|
| 10608 |
+
4rfm
|
| 10609 |
+
2wvt
|
| 10610 |
+
3dx3
|
| 10611 |
+
5t4v
|
| 10612 |
+
1owh
|
| 10613 |
+
4k7o
|
| 10614 |
+
4z7n
|
| 10615 |
+
5ehw
|
| 10616 |
+
1ohr
|
| 10617 |
+
3c4e
|
| 10618 |
+
4k3h
|
| 10619 |
+
5y8y
|
| 10620 |
+
1pu8
|
| 10621 |
+
2mg5
|
| 10622 |
+
3tfk
|
| 10623 |
+
1t31
|
| 10624 |
+
4iho
|
| 10625 |
+
3i91
|
| 10626 |
+
5fdg
|
| 10627 |
+
5ntq
|
| 10628 |
+
5wa4
|
| 10629 |
+
5ene
|
| 10630 |
+
6ax4
|
| 10631 |
+
4aoi
|
| 10632 |
+
1u3r
|
| 10633 |
+
4b6f
|
| 10634 |
+
3src
|
| 10635 |
+
1sw2
|
| 10636 |
+
5fwa
|
| 10637 |
+
2q7m
|
| 10638 |
+
5g6u
|
| 10639 |
+
1puq
|
| 10640 |
+
3s56
|
| 10641 |
+
3ckt
|
| 10642 |
+
2jaj
|
| 10643 |
+
2qlb
|
| 10644 |
+
1l6y
|
| 10645 |
+
4tzq
|
| 10646 |
+
5u9d
|
| 10647 |
+
4nzb
|
| 10648 |
+
3ccc
|
| 10649 |
+
3isj
|
| 10650 |
+
5kbr
|
| 10651 |
+
5duw
|
| 10652 |
+
3ijh
|
| 10653 |
+
1uti
|
| 10654 |
+
5tw2
|
| 10655 |
+
4wuy
|
| 10656 |
+
3u8n
|
| 10657 |
+
1pl0
|
| 10658 |
+
6flg
|
| 10659 |
+
5w6o
|
| 10660 |
+
2v2q
|
| 10661 |
+
5jf4
|
| 10662 |
+
5a8y
|
| 10663 |
+
2nq6
|
| 10664 |
+
1tsy
|
| 10665 |
+
1rek
|
| 10666 |
+
2gtk
|
| 10667 |
+
4kom
|
| 10668 |
+
6en6
|
| 10669 |
+
3zmz
|
| 10670 |
+
3pb8
|
| 10671 |
+
6fap
|
| 10672 |
+
4whr
|
| 10673 |
+
5j1v
|
| 10674 |
+
1ld8
|
| 10675 |
+
4o37
|
| 10676 |
+
5qa5
|
| 10677 |
+
4c9w
|
| 10678 |
+
2olb
|
| 10679 |
+
4dru
|
| 10680 |
+
2h42
|
| 10681 |
+
5jnn
|
| 10682 |
+
1w0x
|
| 10683 |
+
6chh
|
| 10684 |
+
3av9
|
| 10685 |
+
3uzc
|
| 10686 |
+
1l83
|
| 10687 |
+
2osm
|
| 10688 |
+
5i89
|
| 10689 |
+
5wb6
|
| 10690 |
+
1b4h
|
| 10691 |
+
5e0a
|
| 10692 |
+
1nyy
|
| 10693 |
+
4bqg
|
| 10694 |
+
5axp
|
| 10695 |
+
2w1f
|
| 10696 |
+
4z1q
|
| 10697 |
+
4iax
|
| 10698 |
+
4q4q
|
| 10699 |
+
2bua
|
| 10700 |
+
4alv
|
| 10701 |
+
4jfx
|
| 10702 |
+
5kjn
|
| 10703 |
+
4u0w
|
| 10704 |
+
2k0g
|
| 10705 |
+
3uwk
|
| 10706 |
+
5iq6
|
| 10707 |
+
3o0g
|
| 10708 |
+
6fgg
|
| 10709 |
+
6hjj
|
| 10710 |
+
3iu9
|
| 10711 |
+
4xg8
|
| 10712 |
+
4xya
|
| 10713 |
+
1zfk
|
| 10714 |
+
3jvk
|
| 10715 |
+
5wxh
|
| 10716 |
+
3t0w
|
| 10717 |
+
5yg3
|
| 10718 |
+
1xpz
|
| 10719 |
+
4b60
|
| 10720 |
+
4j2t
|
| 10721 |
+
3ds0
|
| 10722 |
+
2l0i
|
| 10723 |
+
1pxk
|
| 10724 |
+
4da5
|
| 10725 |
+
1z6e
|
| 10726 |
+
4pry
|
| 10727 |
+
8lpr
|
| 10728 |
+
6bh5
|
| 10729 |
+
4bnz
|
| 10730 |
+
5fky
|
| 10731 |
+
2o4h
|
| 10732 |
+
4x2s
|
| 10733 |
+
2vth
|
| 10734 |
+
1i43
|
| 10735 |
+
1m21
|
| 10736 |
+
4xsy
|
| 10737 |
+
2pvn
|
| 10738 |
+
5htl
|
| 10739 |
+
4uwh
|
| 10740 |
+
5o7i
|
| 10741 |
+
3skh
|
| 10742 |
+
5wic
|
| 10743 |
+
1fvt
|
| 10744 |
+
4rio
|
| 10745 |
+
6dnp
|
| 10746 |
+
4ufu
|
| 10747 |
+
2ork
|
| 10748 |
+
5dkr
|
| 10749 |
+
3m59
|
| 10750 |
+
3btr
|
| 10751 |
+
5nps
|
| 10752 |
+
4nxs
|
| 10753 |
+
3r8i
|
| 10754 |
+
1z1h
|
| 10755 |
+
4jik
|
| 10756 |
+
6cjv
|
| 10757 |
+
1ff1
|
| 10758 |
+
1uwb
|
| 10759 |
+
4lgu
|
| 10760 |
+
4x3t
|
| 10761 |
+
5dtk
|
| 10762 |
+
2giu
|
| 10763 |
+
3meu
|
| 10764 |
+
3ove
|
| 10765 |
+
2zcq
|
| 10766 |
+
1qbt
|
| 10767 |
+
4n7y
|
| 10768 |
+
4e4a
|
| 10769 |
+
2ovq
|
| 10770 |
+
2avs
|
| 10771 |
+
4e3l
|
| 10772 |
+
1hbv
|
| 10773 |
+
3tl0
|
| 10774 |
+
3g15
|
| 10775 |
+
6c5t
|
| 10776 |
+
4ayu
|
| 10777 |
+
4aba
|
| 10778 |
+
5mmg
|
| 10779 |
+
5k09
|
| 10780 |
+
3fj7
|
| 10781 |
+
4y3j
|
| 10782 |
+
3dxg
|
| 10783 |
+
5a6n
|
| 10784 |
+
5lvd
|
| 10785 |
+
3c2f
|
| 10786 |
+
4m3m
|
| 10787 |
+
4x6k
|
| 10788 |
+
6h7b
|
| 10789 |
+
5gow
|
| 10790 |
+
2f18
|
| 10791 |
+
4kvm
|
| 10792 |
+
2qhn
|
| 10793 |
+
5y2f
|
| 10794 |
+
3fi2
|
| 10795 |
+
2y3p
|
| 10796 |
+
3vzv
|
| 10797 |
+
5h5q
|
| 10798 |
+
5gic
|
| 10799 |
+
1g6g
|
| 10800 |
+
5j47
|
| 10801 |
+
4tmr
|
| 10802 |
+
4inb
|
| 10803 |
+
1w6h
|
| 10804 |
+
1inf
|
| 10805 |
+
5alk
|
| 10806 |
+
3u8l
|
| 10807 |
+
5bnj
|
| 10808 |
+
5nk4
|
| 10809 |
+
5ug8
|
| 10810 |
+
6bgv
|
| 10811 |
+
3fzc
|
| 10812 |
+
5mgf
|
| 10813 |
+
4gfn
|
| 10814 |
+
2k0x
|
| 10815 |
+
4o0a
|
| 10816 |
+
4m1d
|
| 10817 |
+
2ds1
|
| 10818 |
+
3wnt
|
| 10819 |
+
2iwu
|
| 10820 |
+
2ogy
|
| 10821 |
+
6ffi
|
| 10822 |
+
6eu6
|
| 10823 |
+
3s0e
|
| 10824 |
+
3u1y
|
| 10825 |
+
3r04
|
| 10826 |
+
2nsl
|
| 10827 |
+
4knj
|
| 10828 |
+
1e1x
|
| 10829 |
+
5cs3
|
| 10830 |
+
5hfc
|
| 10831 |
+
5zob
|
| 10832 |
+
3hvi
|
| 10833 |
+
4erz
|
| 10834 |
+
5ant
|
| 10835 |
+
6ft4
|
| 10836 |
+
3th0
|
| 10837 |
+
1ijr
|
| 10838 |
+
4lnw
|
| 10839 |
+
4pyq
|
| 10840 |
+
2r3t
|
| 10841 |
+
5mkr
|
| 10842 |
+
2gz7
|
| 10843 |
+
3fcl
|
| 10844 |
+
1joj
|
| 10845 |
+
1j17
|
| 10846 |
+
3avf
|
| 10847 |
+
3u0t
|
| 10848 |
+
1c85
|
| 10849 |
+
6d4o
|
| 10850 |
+
2qic
|
| 10851 |
+
2tmn
|
| 10852 |
+
4cc6
|
| 10853 |
+
5k5n
|
| 10854 |
+
1zpa
|
| 10855 |
+
5n55
|
| 10856 |
+
3rxj
|
| 10857 |
+
3srg
|
| 10858 |
+
4bgm
|
| 10859 |
+
4m0r
|
| 10860 |
+
5nxv
|
| 10861 |
+
5j1w
|
| 10862 |
+
4bbf
|
| 10863 |
+
5czb
|
| 10864 |
+
4mot
|
| 10865 |
+
1tl7
|
| 10866 |
+
1bju
|
| 10867 |
+
2pmn
|
| 10868 |
+
1k6p
|
| 10869 |
+
5akz
|
| 10870 |
+
4ca7
|
| 10871 |
+
1mfa
|
| 10872 |
+
4qqc
|
| 10873 |
+
5e2q
|
| 10874 |
+
2fxu
|
| 10875 |
+
3ejp
|
| 10876 |
+
1a1b
|
| 10877 |
+
5k0j
|
| 10878 |
+
3h5u
|
| 10879 |
+
2rg6
|
| 10880 |
+
5fe7
|
| 10881 |
+
4pgd
|
| 10882 |
+
4rr6
|
| 10883 |
+
4o70
|
| 10884 |
+
5cfa
|
| 10885 |
+
2cfd
|
| 10886 |
+
2wtw
|
| 10887 |
+
3k3g
|
| 10888 |
+
4ju3
|
| 10889 |
+
2qhm
|
| 10890 |
+
2vqt
|
| 10891 |
+
5q0d
|
| 10892 |
+
3gcv
|
| 10893 |
+
4fmn
|
| 10894 |
+
5kgx
|
| 10895 |
+
2n9e
|
| 10896 |
+
3fc1
|
| 10897 |
+
1ok7
|
| 10898 |
+
4nru
|
| 10899 |
+
6fmp
|
| 10900 |
+
5fxs
|
| 10901 |
+
4dk7
|
| 10902 |
+
4e5f
|
| 10903 |
+
5ml0
|
| 10904 |
+
3v6s
|
| 10905 |
+
5cjf
|
| 10906 |
+
5e1o
|
| 10907 |
+
6bw8
|
| 10908 |
+
3h2f
|
| 10909 |
+
1yly
|
| 10910 |
+
4avs
|
| 10911 |
+
1sri
|
| 10912 |
+
1avn
|
| 10913 |
+
4cfw
|
| 10914 |
+
4jg7
|
| 10915 |
+
3fu6
|
| 10916 |
+
1g7f
|
| 10917 |
+
4l1u
|
| 10918 |
+
3ocg
|
| 10919 |
+
5wal
|
| 10920 |
+
4dq2
|
| 10921 |
+
1uz8
|
| 10922 |
+
4hgc
|
| 10923 |
+
2onb
|
| 10924 |
+
2lyw
|
| 10925 |
+
5xgl
|
| 10926 |
+
1ydr
|
| 10927 |
+
2r0h
|
| 10928 |
+
4q4s
|
| 10929 |
+
4riv
|
| 10930 |
+
4cp7
|
| 10931 |
+
1il4
|
| 10932 |
+
2qaf
|
| 10933 |
+
1idb
|
| 10934 |
+
1hos
|
| 10935 |
+
2xnn
|
| 10936 |
+
5myv
|
| 10937 |
+
5ti4
|
| 10938 |
+
3gi5
|
| 10939 |
+
2wxh
|
| 10940 |
+
4eo6
|
| 10941 |
+
3spk
|
| 10942 |
+
1g3f
|
| 10943 |
+
5iy4
|
| 10944 |
+
5op8
|
| 10945 |
+
5h1v
|
| 10946 |
+
3hig
|
| 10947 |
+
2c92
|
| 10948 |
+
1n7m
|
| 10949 |
+
5dif
|
| 10950 |
+
2py4
|
| 10951 |
+
1nfx
|
| 10952 |
+
4cst
|
| 10953 |
+
3h0v
|
| 10954 |
+
5z7j
|
| 10955 |
+
5ta2
|
| 10956 |
+
5nu3
|
| 10957 |
+
3k3i
|
| 10958 |
+
3zq9
|
| 10959 |
+
5cqj
|
| 10960 |
+
6axk
|
| 10961 |
+
4len
|
| 10962 |
+
4nmp
|
| 10963 |
+
3ch9
|
| 10964 |
+
5djv
|
| 10965 |
+
4hxw
|
| 10966 |
+
4euc
|
| 10967 |
+
1hvy
|
| 10968 |
+
2jk9
|
| 10969 |
+
3ncz
|
| 10970 |
+
3bu1
|
| 10971 |
+
4bb2
|
| 10972 |
+
3wqh
|
| 10973 |
+
6alz
|
| 10974 |
+
3qch
|
| 10975 |
+
4jv7
|
| 10976 |
+
3nan
|
| 10977 |
+
1oth
|
| 10978 |
+
2aie
|
| 10979 |
+
3dln
|
| 10980 |
+
3mv5
|
| 10981 |
+
5egu
|
| 10982 |
+
6dki
|
| 10983 |
+
5am0
|
| 10984 |
+
3vrt
|
| 10985 |
+
6dpt
|
| 10986 |
+
1ew9
|
| 10987 |
+
2w6u
|
| 10988 |
+
2ggu
|
| 10989 |
+
5jsj
|
| 10990 |
+
3zya
|
| 10991 |
+
5uwp
|
| 10992 |
+
2exc
|
| 10993 |
+
5e28
|
| 10994 |
+
2xng
|
| 10995 |
+
6esm
|
| 10996 |
+
1gi4
|
| 10997 |
+
3d2t
|
| 10998 |
+
3ekx
|
| 10999 |
+
2uwp
|
| 11000 |
+
1klg
|
| 11001 |
+
4y2x
|
| 11002 |
+
2dwx
|
| 11003 |
+
5lrq
|
| 11004 |
+
1od8
|
| 11005 |
+
4ci2
|
| 11006 |
+
1nq7
|
| 11007 |
+
4m5i
|
| 11008 |
+
1o9e
|
| 11009 |
+
3ejr
|
| 11010 |
+
4wrs
|
| 11011 |
+
1g1d
|
| 11012 |
+
4eor
|
| 11013 |
+
5ofi
|
| 11014 |
+
4eb9
|
| 11015 |
+
2kce
|
| 11016 |
+
5eko
|
| 11017 |
+
1ndy
|
| 11018 |
+
2nnk
|
| 11019 |
+
5ybe
|
| 11020 |
+
1svg
|
| 11021 |
+
5aes
|
| 11022 |
+
5j5x
|
| 11023 |
+
1cka
|
| 11024 |
+
5ehv
|
| 11025 |
+
3n7h
|
| 11026 |
+
3pxe
|
| 11027 |
+
4kz7
|
| 11028 |
+
1tcw
|
| 11029 |
+
3v5j
|
| 11030 |
+
2xp8
|
| 11031 |
+
5ier
|
| 11032 |
+
4bv2
|
| 11033 |
+
4i6b
|
| 11034 |
+
4oyb
|
| 11035 |
+
1ox9
|
| 11036 |
+
3zvv
|
| 11037 |
+
5f90
|
| 11038 |
+
1rdj
|
| 11039 |
+
4azb
|
| 11040 |
+
3o5n
|
| 11041 |
+
4uub
|
| 11042 |
+
3d1e
|
| 11043 |
+
1n5z
|
| 11044 |
+
6b1y
|
| 11045 |
+
2o7e
|
| 11046 |
+
3o9v
|
| 11047 |
+
4hf4
|
| 11048 |
+
2qi6
|
| 11049 |
+
3gss
|
| 11050 |
+
1c5c
|
| 11051 |
+
5l9l
|
| 11052 |
+
3ifp
|
| 11053 |
+
4fi9
|
| 11054 |
+
1cs4
|
| 11055 |
+
4ode
|
| 11056 |
+
1mui
|
| 11057 |
+
5vp1
|
| 11058 |
+
3vbt
|
| 11059 |
+
3sh0
|
| 11060 |
+
4avw
|
| 11061 |
+
1qf1
|
| 11062 |
+
3hjo
|
| 11063 |
+
1iiq
|
| 11064 |
+
2r2w
|
| 11065 |
+
3wtj
|
| 11066 |
+
4lys
|
| 11067 |
+
4lv2
|
| 11068 |
+
5ii1
|
| 11069 |
+
5d6p
|
| 11070 |
+
4nku
|
| 11071 |
+
1w1d
|
| 11072 |
+
1e37
|
| 11073 |
+
4pte
|
| 11074 |
+
3c7p
|
| 11075 |
+
4glx
|
| 11076 |
+
1hvi
|
| 11077 |
+
5f4n
|
| 11078 |
+
5f6u
|
| 11079 |
+
5ggj
|
| 11080 |
+
3gx0
|
| 11081 |
+
3ztx
|
| 11082 |
+
4uiz
|
| 11083 |
+
4cl9
|
| 11084 |
+
2wnl
|
| 11085 |
+
3ipx
|
| 11086 |
+
1i72
|
| 11087 |
+
3dsz
|
| 11088 |
+
3mhi
|
| 11089 |
+
4ap7
|
| 11090 |
+
2xcg
|
| 11091 |
+
3kwa
|
| 11092 |
+
7prc
|
| 11093 |
+
4e3o
|
| 11094 |
+
4int
|
| 11095 |
+
1zz1
|
| 11096 |
+
5bjt
|
| 11097 |
+
2xas
|
| 11098 |
+
4ny3
|
| 11099 |
+
5fi7
|
| 11100 |
+
1owi
|
| 11101 |
+
3qd4
|
| 11102 |
+
1ew8
|
| 11103 |
+
4hdc
|
| 11104 |
+
3h52
|
| 11105 |
+
5nzp
|
| 11106 |
+
1lbf
|
| 11107 |
+
1b6p
|
| 11108 |
+
5np8
|
| 11109 |
+
3qxt
|
| 11110 |
+
1ksn
|
| 11111 |
+
3twp
|
| 11112 |
+
3ti8
|
| 11113 |
+
6ewe
|
| 11114 |
+
5v82
|
| 11115 |
+
3t19
|
| 11116 |
+
3eu5
|
| 11117 |
+
2flr
|
| 11118 |
+
4acm
|
| 11119 |
+
1oe7
|
| 11120 |
+
2or9
|
| 11121 |
+
1v2m
|
| 11122 |
+
5fjx
|
| 11123 |
+
2c80
|
| 11124 |
+
3nm6
|
| 11125 |
+
4nak
|
| 11126 |
+
5y12
|
| 11127 |
+
2vwn
|
| 11128 |
+
5chk
|
| 11129 |
+
5akg
|
| 11130 |
+
5m7u
|
| 11131 |
+
6axp
|
| 11132 |
+
1upk
|
| 11133 |
+
1j36
|
| 11134 |
+
1uv6
|
| 11135 |
+
3f2a
|
| 11136 |
+
1ov3
|
| 11137 |
+
1yms
|
| 11138 |
+
4o4g
|
| 11139 |
+
3tsz
|
| 11140 |
+
6ma1
|
| 11141 |
+
2hrm
|
| 11142 |
+
5k4x
|
| 11143 |
+
3hzk
|
| 11144 |
+
5w5s
|
| 11145 |
+
6gh9
|
| 11146 |
+
5l26
|
| 11147 |
+
2lsv
|
| 11148 |
+
6eqa
|
| 11149 |
+
5fp0
|
| 11150 |
+
2jk7
|
| 11151 |
+
3h03
|
| 11152 |
+
4mk1
|
| 11153 |
+
1y20
|
| 11154 |
+
5yc1
|
| 11155 |
+
5lx6
|
| 11156 |
+
2er0
|
| 11157 |
+
5x4m
|
| 11158 |
+
3myq
|
| 11159 |
+
3mbl
|
| 11160 |
+
4a7i
|
| 11161 |
+
5koq
|
| 11162 |
+
3cx9
|
| 11163 |
+
4zom
|
| 11164 |
+
4j0s
|
| 11165 |
+
5sz6
|
| 11166 |
+
2vqm
|
| 11167 |
+
1syh
|
| 11168 |
+
1kwr
|
| 11169 |
+
6cyb
|
| 11170 |
+
2zym
|
| 11171 |
+
5fv7
|
| 11172 |
+
3shz
|
| 11173 |
+
6b5j
|
| 11174 |
+
1wn6
|
| 11175 |
+
5vd3
|
| 11176 |
+
1njc
|
| 11177 |
+
5ia3
|
| 11178 |
+
4a4e
|
| 11179 |
+
4iue
|
| 11180 |
+
2ewa
|
| 11181 |
+
5ew9
|
| 11182 |
+
5n8e
|
| 11183 |
+
4kow
|
| 11184 |
+
5w6i
|
| 11185 |
+
5mgg
|
| 11186 |
+
5oh3
|
| 11187 |
+
4hmh
|
| 11188 |
+
4f3i
|
| 11189 |
+
3kd7
|
| 11190 |
+
1w9u
|
| 11191 |
+
4n6y
|
| 11192 |
+
5jid
|
| 11193 |
+
1juy
|
| 11194 |
+
1f0r
|
| 11195 |
+
5iw0
|
| 11196 |
+
4ui4
|
| 11197 |
+
3tkh
|
| 11198 |
+
2gc8
|
| 11199 |
+
4f4p
|
| 11200 |
+
5qc4
|
| 11201 |
+
3wzj
|
| 11202 |
+
5l7h
|
| 11203 |
+
2kdh
|
| 11204 |
+
3sv9
|
| 11205 |
+
4b7q
|
| 11206 |
+
5a6i
|
| 11207 |
+
5om2
|
| 11208 |
+
5lgu
|
| 11209 |
+
5unh
|
| 11210 |
+
5u1q
|
| 11211 |
+
5nw1
|
| 11212 |
+
1uef
|
| 11213 |
+
4ck3
|
| 11214 |
+
1g42
|
| 11215 |
+
2e9d
|
| 11216 |
+
5k4z
|
| 11217 |
+
4l7b
|
| 11218 |
+
1so2
|
| 11219 |
+
6esn
|
| 11220 |
+
1ida
|
| 11221 |
+
4j3d
|
| 11222 |
+
3jzb
|
| 11223 |
+
1o2h
|
| 11224 |
+
5qby
|
| 11225 |
+
1nde
|
| 11226 |
+
6dne
|
| 11227 |
+
1f8d
|
| 11228 |
+
5ult
|
| 11229 |
+
4o15
|
| 11230 |
+
5alo
|
| 11231 |
+
4oar
|
| 11232 |
+
4zy0
|
| 11233 |
+
4pd7
|
| 11234 |
+
2aq7
|
| 11235 |
+
2zga
|
| 11236 |
+
5luu
|
| 11237 |
+
2l8j
|
| 11238 |
+
2wky
|
| 11239 |
+
4yrg
|
| 11240 |
+
2v22
|
| 11241 |
+
2ndf
|
| 11242 |
+
1jsv
|
| 11243 |
+
4u6z
|
| 11244 |
+
5wdl
|
| 11245 |
+
5nxi
|
| 11246 |
+
6i8y
|
| 11247 |
+
4yx4
|
| 11248 |
+
4u6x
|
| 11249 |
+
4mw5
|
| 11250 |
+
3ith
|
| 11251 |
+
4amx
|
| 11252 |
+
1wb0
|
| 11253 |
+
3arq
|
| 11254 |
+
3dp2
|
| 11255 |
+
1lst
|
| 11256 |
+
3fqh
|
| 11257 |
+
2ivu
|
| 11258 |
+
1s5q
|
| 11259 |
+
6ct7
|
| 11260 |
+
3pma
|
| 11261 |
+
2ay6
|
| 11262 |
+
5e90
|
| 11263 |
+
1re1
|
| 11264 |
+
4l32
|
| 11265 |
+
4bw4
|
| 11266 |
+
1pig
|
| 11267 |
+
2xne
|
| 11268 |
+
4qme
|
| 11269 |
+
5ugh
|
| 11270 |
+
1cpi
|
| 11271 |
+
3wd9
|
| 11272 |
+
1o5m
|
| 11273 |
+
5wfc
|
| 11274 |
+
4tun
|
| 11275 |
+
2y77
|
| 11276 |
+
5laq
|
| 11277 |
+
1ax2
|
| 11278 |
+
4ptg
|
| 11279 |
+
3hrb
|
| 11280 |
+
5dah
|
| 11281 |
+
5iza
|
| 11282 |
+
3thb
|
| 11283 |
+
4bi1
|
| 11284 |
+
2c97
|
| 11285 |
+
3arf
|
| 11286 |
+
4gwi
|
| 11287 |
+
6h1h
|
| 11288 |
+
5yzc
|
| 11289 |
+
4ec4
|
| 11290 |
+
5tg6
|
| 11291 |
+
3vap
|
| 11292 |
+
2q2a
|
| 11293 |
+
2fle
|
| 11294 |
+
2vwf
|
| 11295 |
+
1g2k
|
| 11296 |
+
2zz2
|
| 11297 |
+
5xhz
|
| 11298 |
+
4zim
|
| 11299 |
+
5fwr
|
| 11300 |
+
6g9i
|
| 11301 |
+
6bqa
|
| 11302 |
+
1g45
|
| 11303 |
+
4r1v
|
| 11304 |
+
3is9
|
| 11305 |
+
3wt7
|
| 11306 |
+
5vlr
|
| 11307 |
+
5fnr
|
| 11308 |
+
6cf6
|
| 11309 |
+
5t1m
|
| 11310 |
+
3uda
|
| 11311 |
+
3ur9
|
| 11312 |
+
3dp4
|
| 11313 |
+
6c7g
|
| 11314 |
+
5kmf
|
| 11315 |
+
4hev
|
| 11316 |
+
1zc9
|
| 11317 |
+
6d8e
|
| 11318 |
+
4nuf
|
| 11319 |
+
5acx
|
| 11320 |
+
5ouh
|
| 11321 |
+
2fde
|
| 11322 |
+
6c0n
|
| 11323 |
+
4wt2
|
| 11324 |
+
6cgp
|
| 11325 |
+
5dyt
|
| 11326 |
+
2xcs
|
| 11327 |
+
4z1k
|
| 11328 |
+
4b11
|
| 11329 |
+
2i2c
|
| 11330 |
+
5ko5
|
| 11331 |
+
3g2u
|
| 11332 |
+
6hlx
|
| 11333 |
+
1xgj
|
| 11334 |
+
6ayt
|
| 11335 |
+
5vij
|
| 11336 |
+
1ao8
|
| 11337 |
+
1c5z
|
| 11338 |
+
1b9v
|
| 11339 |
+
5toe
|
| 11340 |
+
4pox
|
| 11341 |
+
1gnn
|
| 11342 |
+
5kqg
|
| 11343 |
+
5am1
|
| 11344 |
+
2x4o
|
| 11345 |
+
1i37
|
| 11346 |
+
5uln
|
| 11347 |
+
3gpo
|
| 11348 |
+
2g01
|
| 11349 |
+
3fx6
|
| 11350 |
+
1ke7
|
| 11351 |
+
3my5
|
| 11352 |
+
4inr
|
| 11353 |
+
1jp5
|
| 11354 |
+
5eqe
|
| 11355 |
+
4muf
|
| 11356 |
+
5xhr
|
| 11357 |
+
2wmr
|
| 11358 |
+
3ebb
|
| 11359 |
+
4mwe
|
| 11360 |
+
5ap4
|
| 11361 |
+
5nwg
|
| 11362 |
+
4x8t
|
| 11363 |
+
4oz1
|
| 11364 |
+
3hvc
|
| 11365 |
+
1flr
|
| 11366 |
+
3pz3
|
| 11367 |
+
6hlz
|
| 11368 |
+
4drp
|
| 11369 |
+
4tw9
|
| 11370 |
+
5b4k
|
| 11371 |
+
4o3t
|
| 11372 |
+
4fev
|
| 11373 |
+
1o45
|
| 11374 |
+
4bcj
|
| 11375 |
+
1qkb
|
| 11376 |
+
5nho
|
| 11377 |
+
2x8d
|
| 11378 |
+
1aq7
|
| 11379 |
+
5y21
|
| 11380 |
+
5i12
|
| 11381 |
+
3bsc
|
| 11382 |
+
3s75
|
| 11383 |
+
4wki
|
| 11384 |
+
1ouy
|
| 11385 |
+
4jv9
|
| 11386 |
+
5o9h
|
| 11387 |
+
4ez3
|
| 11388 |
+
3dd8
|
| 11389 |
+
5h7g
|
| 11390 |
+
1sqt
|
| 11391 |
+
4enx
|
| 11392 |
+
1bap
|
| 11393 |
+
6enm
|
| 11394 |
+
5za7
|
| 11395 |
+
3dbs
|
| 11396 |
+
4mvw
|
| 11397 |
+
3vrv
|
| 11398 |
+
2azc
|
| 11399 |
+
3i60
|
| 11400 |
+
5hld
|
| 11401 |
+
2wxq
|
| 11402 |
+
4m8x
|
| 11403 |
+
5f6v
|
| 11404 |
+
4fhi
|
| 11405 |
+
5auv
|
| 11406 |
+
4l7c
|
| 11407 |
+
3wcl
|
| 11408 |
+
3gqo
|
| 11409 |
+
5dhh
|
| 11410 |
+
5u7d
|
| 11411 |
+
6grp
|
| 11412 |
+
1xm6
|
| 11413 |
+
1lpz
|
| 11414 |
+
5cvd
|
| 11415 |
+
3zmj
|
| 11416 |
+
1g6s
|
| 11417 |
+
5eud
|
| 11418 |
+
3wyl
|
| 11419 |
+
4wh9
|
| 11420 |
+
2who
|
| 11421 |
+
2nmz
|
| 11422 |
+
2xiz
|
| 11423 |
+
4hbx
|
| 11424 |
+
6bee
|
| 11425 |
+
3ryx
|
| 11426 |
+
5ans
|
| 11427 |
+
5zvw
|
| 11428 |
+
4zwz
|
| 11429 |
+
3q96
|
| 11430 |
+
4v05
|
| 11431 |
+
5jf2
|
| 11432 |
+
1wbv
|
| 11433 |
+
6c7x
|
| 11434 |
+
5m6u
|
| 11435 |
+
1m0b
|
| 11436 |
+
3qg6
|
| 11437 |
+
2gz8
|
| 11438 |
+
3nyn
|
| 11439 |
+
3lhj
|
| 11440 |
+
3g86
|
| 11441 |
+
1ny0
|
| 11442 |
+
3zj8
|
| 11443 |
+
3o6l
|
| 11444 |
+
2vb8
|
| 11445 |
+
3p9l
|
| 11446 |
+
4bnx
|
| 11447 |
+
5uch
|
| 11448 |
+
3sv6
|
| 11449 |
+
1gvk
|
| 11450 |
+
4ynk
|
| 11451 |
+
223l
|
| 11452 |
+
5orr
|
| 11453 |
+
4o0j
|
| 11454 |
+
3rxm
|
| 11455 |
+
4po0
|
| 11456 |
+
4z84
|
| 11457 |
+
4j21
|
| 11458 |
+
5yjm
|
| 11459 |
+
5n25
|
| 11460 |
+
4bky
|
| 11461 |
+
5svy
|
| 11462 |
+
5gmn
|
| 11463 |
+
5u7l
|
| 11464 |
+
1f1j
|
| 11465 |
+
4aji
|
| 11466 |
+
2oj9
|
| 11467 |
+
3oli
|
| 11468 |
+
4r4o
|
| 11469 |
+
4ql1
|
| 11470 |
+
1skj
|
| 11471 |
+
2wfj
|
| 11472 |
+
1wc1
|
| 11473 |
+
3hqz
|
| 11474 |
+
2r3o
|
| 11475 |
+
3m5a
|
| 11476 |
+
4lwc
|
| 11477 |
+
4j5p
|
| 11478 |
+
3n46
|
| 11479 |
+
3ttp
|
| 11480 |
+
1jjk
|
| 11481 |
+
5vb7
|
| 11482 |
+
2pj0
|
| 11483 |
+
3dog
|
| 11484 |
+
2bve
|
| 11485 |
+
5vfm
|
| 11486 |
+
4l2x
|
| 11487 |
+
3uvp
|
| 11488 |
+
4wno
|
| 11489 |
+
4y38
|
| 11490 |
+
2c69
|
| 11491 |
+
2zx9
|
| 11492 |
+
2alv
|
| 11493 |
+
3ip6
|
| 11494 |
+
3sio
|
| 11495 |
+
4xoe
|
| 11496 |
+
3sn8
|
| 11497 |
+
5y48
|
| 11498 |
+
5u7j
|
| 11499 |
+
5f3g
|
| 11500 |
+
1xh4
|
| 11501 |
+
4j79
|
| 11502 |
+
5ghv
|
| 11503 |
+
2i2b
|
| 11504 |
+
1gah
|
| 11505 |
+
3kr1
|
| 11506 |
+
5npd
|
| 11507 |
+
5mks
|
| 11508 |
+
1jwu
|
| 11509 |
+
4inh
|
| 11510 |
+
1tsl
|
| 11511 |
+
2wq5
|
| 11512 |
+
2e9u
|
| 11513 |
+
1hpo
|
| 11514 |
+
6aff
|
| 11515 |
+
6e99
|
| 11516 |
+
5vlp
|
| 11517 |
+
4u5t
|
| 11518 |
+
2uxu
|
| 11519 |
+
5byi
|
| 11520 |
+
4ris
|
| 11521 |
+
2hwo
|
| 11522 |
+
5e2s
|
| 11523 |
+
5q0g
|
| 11524 |
+
3lj3
|
| 11525 |
+
4qo4
|
| 11526 |
+
4bt9
|
| 11527 |
+
6b5a
|
| 11528 |
+
4mk2
|
| 11529 |
+
4idv
|
| 11530 |
+
5lj1
|
| 11531 |
+
3nal
|
| 11532 |
+
6d1m
|
| 11533 |
+
5kpm
|
| 11534 |
+
5sxn
|
| 11535 |
+
5doh
|
| 11536 |
+
3m6f
|
| 11537 |
+
6bsk
|
| 11538 |
+
1gui
|
| 11539 |
+
4m4q
|
| 11540 |
+
4ow0
|
| 11541 |
+
5ur6
|
| 11542 |
+
5iu7
|
| 11543 |
+
1bma
|
| 11544 |
+
5uxn
|
| 11545 |
+
1oiu
|
| 11546 |
+
4ln7
|
| 11547 |
+
2a5s
|
| 11548 |
+
6afg
|
| 11549 |
+
5e4w
|
| 11550 |
+
1oyq
|
| 11551 |
+
4xtv
|
| 11552 |
+
6eji
|
| 11553 |
+
3at1
|
| 11554 |
+
4q1a
|
| 11555 |
+
1njb
|
| 11556 |
+
4u0g
|
| 11557 |
+
1tou
|
| 11558 |
+
2pj5
|
| 11559 |
+
3avk
|
| 11560 |
+
6fmj
|
| 11561 |
+
2lto
|
| 11562 |
+
6czc
|
| 11563 |
+
3u8m
|
| 11564 |
+
2y5l
|
| 11565 |
+
3fjg
|
| 11566 |
+
5uwi
|
| 11567 |
+
6m9c
|
| 11568 |
+
3jxw
|
| 11569 |
+
4zqt
|
| 11570 |
+
5vc6
|
| 11571 |
+
5knx
|
| 11572 |
+
3pix
|
| 11573 |
+
3e6k
|
| 11574 |
+
4f7j
|
| 11575 |
+
3koo
|
| 11576 |
+
5n3y
|
| 11577 |
+
3jdw
|
| 11578 |
+
4nmo
|
| 11579 |
+
1ody
|
| 11580 |
+
5tpc
|
| 11581 |
+
6bsm
|
| 11582 |
+
2cm8
|
| 11583 |
+
5gn7
|
| 11584 |
+
4gql
|
| 11585 |
+
1yq7
|
| 11586 |
+
2gga
|
| 11587 |
+
4wy7
|
| 11588 |
+
2brn
|
| 11589 |
+
2fmb
|
| 11590 |
+
5n8t
|
| 11591 |
+
3nus
|
| 11592 |
+
5x5g
|
| 11593 |
+
5yls
|
| 11594 |
+
5u3b
|
| 11595 |
+
4x14
|
| 11596 |
+
4puk
|
| 11597 |
+
4nmt
|
| 11598 |
+
3uol
|
| 11599 |
+
4jzf
|
| 11600 |
+
4um3
|
| 11601 |
+
5k7h
|
| 11602 |
+
5x74
|
| 11603 |
+
3ddb
|
| 11604 |
+
1xd0
|
| 11605 |
+
4mm9
|
| 11606 |
+
1ulg
|
| 11607 |
+
3coz
|
| 11608 |
+
5epl
|
| 11609 |
+
3n3l
|
| 11610 |
+
4ear
|
| 11611 |
+
3vzg
|
| 11612 |
+
4qac
|
| 11613 |
+
1ai6
|
| 11614 |
+
3d8w
|
| 11615 |
+
2wtv
|
| 11616 |
+
6ge0
|
| 11617 |
+
4q3r
|
| 11618 |
+
4qb3
|
| 11619 |
+
2vip
|
| 11620 |
+
3pz2
|
| 11621 |
+
3f34
|
| 11622 |
+
3rin
|
| 11623 |
+
3acx
|
| 11624 |
+
5n2t
|
| 11625 |
+
5oqw
|
| 11626 |
+
3p4w
|
| 11627 |
+
2pvl
|
| 11628 |
+
1b0f
|
| 11629 |
+
4p6x
|
| 11630 |
+
3rcj
|
| 11631 |
+
5i9z
|
| 11632 |
+
5ai6
|
| 11633 |
+
3c4f
|
| 11634 |
+
1o4o
|
| 11635 |
+
1sje
|
| 11636 |
+
3alt
|
| 11637 |
+
5f41
|
| 11638 |
+
5yid
|
| 11639 |
+
3bgl
|
| 11640 |
+
4rdn
|
| 11641 |
+
4hso
|
| 11642 |
+
4ttv
|
| 11643 |
+
5yvx
|
| 11644 |
+
3ok9
|
| 11645 |
+
5y0g
|
| 11646 |
+
5twx
|
| 11647 |
+
1wug
|
| 11648 |
+
4a6c
|
| 11649 |
+
4lzs
|
| 11650 |
+
2zx7
|
| 11651 |
+
3gy2
|
| 11652 |
+
3jvr
|
| 11653 |
+
4as9
|
| 11654 |
+
3f1a
|
| 11655 |
+
4cd6
|
| 11656 |
+
4r06
|
| 11657 |
+
5h5s
|
| 11658 |
+
5w0i
|
| 11659 |
+
4wp7
|
| 11660 |
+
5e3g
|
| 11661 |
+
2ydt
|
| 11662 |
+
4abb
|
| 11663 |
+
5ji8
|
| 11664 |
+
4olc
|
| 11665 |
+
5ieo
|
| 11666 |
+
5i2z
|
| 11667 |
+
4zy5
|
| 11668 |
+
4qga
|
| 11669 |
+
3t82
|
| 11670 |
+
4dff
|
| 11671 |
+
3v4v
|
| 11672 |
+
4gw8
|
| 11673 |
+
2hxl
|
| 11674 |
+
4bkt
|
| 11675 |
+
2wj2
|
| 11676 |
+
4xwk
|
| 11677 |
+
1xk9
|
| 11678 |
+
3tv7
|
| 11679 |
+
1ajp
|
| 11680 |
+
1tps
|
| 11681 |
+
3ag9
|
| 11682 |
+
3ls4
|
| 11683 |
+
5wg3
|
| 11684 |
+
1cwb
|
| 11685 |
+
3mea
|
| 11686 |
+
1bjr
|
| 11687 |
+
5auz
|
| 11688 |
+
5l6p
|
| 11689 |
+
2y67
|
| 11690 |
+
2i3i
|
| 11691 |
+
5ivj
|
| 11692 |
+
4l7n
|
| 11693 |
+
5efj
|
| 11694 |
+
4jzi
|
| 11695 |
+
5vlk
|
| 11696 |
+
3rt4
|
| 11697 |
+
3zmm
|
| 11698 |
+
2wn9
|
| 11699 |
+
2a25
|
| 11700 |
+
4hmq
|
| 11701 |
+
5jim
|
| 11702 |
+
4zyy
|
| 11703 |
+
4bi7
|
| 11704 |
+
1f3j
|
| 11705 |
+
2b52
|
| 11706 |
+
2xkd
|
| 11707 |
+
1yys
|
| 11708 |
+
3vtr
|
| 11709 |
+
3zrl
|
| 11710 |
+
1ke5
|
| 11711 |
+
6hwz
|
| 11712 |
+
4d0x
|
| 11713 |
+
4mzs
|
| 11714 |
+
4a6l
|
| 11715 |
+
4eg5
|
| 11716 |
+
1sgu
|
| 11717 |
+
1w11
|
| 11718 |
+
3dkg
|
| 11719 |
+
1s19
|
| 11720 |
+
4msn
|
| 11721 |
+
4wyo
|
| 11722 |
+
3ppp
|
| 11723 |
+
4rlu
|
| 11724 |
+
2jjk
|
| 11725 |
+
5ovp
|
| 11726 |
+
6cd9
|
| 11727 |
+
3qio
|
| 11728 |
+
4wrb
|
| 11729 |
+
5na0
|
| 11730 |
+
1wbt
|
| 11731 |
+
4ael
|
| 11732 |
+
5eu1
|
| 11733 |
+
3nw6
|
| 11734 |
+
1c5s
|
| 11735 |
+
4ual
|
| 11736 |
+
2vsl
|
| 11737 |
+
5vil
|
| 11738 |
+
4o7f
|
| 11739 |
+
1b5j
|
| 11740 |
+
1pw6
|
| 11741 |
+
3rwg
|
| 11742 |
+
5wev
|
| 11743 |
+
2pjc
|
| 11744 |
+
3vi2
|
| 11745 |
+
4xum
|
| 11746 |
+
2zmm
|
| 11747 |
+
3fv3
|
| 11748 |
+
3ukr
|
| 11749 |
+
5myo
|
| 11750 |
+
3lbl
|
| 11751 |
+
4ppb
|
| 11752 |
+
1q0b
|
| 11753 |
+
4abi
|
| 11754 |
+
5ygd
|
| 11755 |
+
4z90
|
| 11756 |
+
4omk
|
| 11757 |
+
5bqh
|
| 11758 |
+
5lrj
|
| 11759 |
+
3ad8
|
| 11760 |
+
5izu
|
| 11761 |
+
4ty6
|
| 11762 |
+
5nut
|
| 11763 |
+
3zps
|
| 11764 |
+
5e2v
|
| 11765 |
+
4k43
|
| 11766 |
+
5e7r
|
| 11767 |
+
3rjc
|
| 11768 |
+
4uvc
|
| 11769 |
+
5up3
|
| 11770 |
+
3uix
|
| 11771 |
+
4rz1
|
| 11772 |
+
2p15
|
| 11773 |
+
3f66
|
| 11774 |
+
1gu3
|
| 11775 |
+
6cw8
|
| 11776 |
+
5ei8
|
| 11777 |
+
4psh
|
| 11778 |
+
4i71
|
| 11779 |
+
6f9v
|
| 11780 |
+
4yoz
|
| 11781 |
+
6hu3
|
| 11782 |
+
5e2m
|
| 11783 |
+
5ap5
|
| 11784 |
+
1uod
|
| 11785 |
+
3e63
|
| 11786 |
+
3mke
|
| 11787 |
+
3io7
|
| 11788 |
+
4whz
|
| 11789 |
+
4mt9
|
| 11790 |
+
4x21
|
| 11791 |
+
1ppi
|
| 11792 |
+
5jjs
|
| 11793 |
+
5icp
|
| 11794 |
+
4g1f
|
| 11795 |
+
1csh
|
| 11796 |
+
4mwr
|
| 11797 |
+
5al1
|
| 11798 |
+
4qxq
|
| 11799 |
+
3o9c
|
| 11800 |
+
2wxo
|
| 11801 |
+
4xmo
|
| 11802 |
+
4ii9
|
| 11803 |
+
4wr7
|
| 11804 |
+
4w9w
|
| 11805 |
+
4xjr
|
| 11806 |
+
5fhm
|
| 11807 |
+
5o2d
|
| 11808 |
+
5osk
|
| 11809 |
+
3i7e
|
| 11810 |
+
4kz4
|
| 11811 |
+
4d4d
|
| 11812 |
+
1sfi
|
| 11813 |
+
5uy8
|
| 11814 |
+
2l11
|
| 11815 |
+
5v2p
|
| 11816 |
+
2vtm
|
| 11817 |
+
5ef8
|
| 11818 |
+
4b2l
|
| 11819 |
+
3g32
|
| 11820 |
+
4x50
|
| 11821 |
+
2oax
|
| 11822 |
+
3ucj
|
| 11823 |
+
4c61
|
| 11824 |
+
1w2g
|
| 11825 |
+
5er2
|
| 11826 |
+
3up7
|
| 11827 |
+
3r01
|
| 11828 |
+
5izq
|
| 11829 |
+
3l6f
|
| 11830 |
+
3p9m
|
| 11831 |
+
4b74
|
| 11832 |
+
6fyz
|
| 11833 |
+
5g1z
|
| 11834 |
+
3t4h
|
| 11835 |
+
4wkc
|
| 11836 |
+
2o4j
|
| 11837 |
+
4bdj
|
| 11838 |
+
5hcl
|
| 11839 |
+
5mek
|
| 11840 |
+
4gvd
|
| 11841 |
+
5qil
|
| 11842 |
+
2e2b
|
| 11843 |
+
4bi2
|
| 11844 |
+
5l0h
|
| 11845 |
+
5ugb
|
| 11846 |
+
2ynd
|
| 11847 |
+
2wzz
|
| 11848 |
+
5wyz
|
| 11849 |
+
5may
|
| 11850 |
+
4eev
|
| 11851 |
+
1t1r
|
| 11852 |
+
4kiw
|
| 11853 |
+
2y7i
|
| 11854 |
+
3s71
|
| 11855 |
+
2qcd
|
| 11856 |
+
1vru
|
| 11857 |
+
1h00
|
| 11858 |
+
4uat
|
| 11859 |
+
3nyd
|
| 11860 |
+
3bkk
|
| 11861 |
+
4n9d
|
| 11862 |
+
1bim
|
| 11863 |
+
5zt1
|
| 11864 |
+
3kce
|
| 11865 |
+
5ml2
|
| 11866 |
+
3fpd
|
| 11867 |
+
6h7o
|
| 11868 |
+
3p8z
|
| 11869 |
+
4ewh
|
| 11870 |
+
3u15
|
| 11871 |
+
4uva
|
| 11872 |
+
5a3q
|
| 11873 |
+
4dai
|
| 11874 |
+
5o55
|
| 11875 |
+
3f69
|
| 11876 |
+
4puz
|
| 11877 |
+
5khi
|
| 11878 |
+
6f05
|
| 11879 |
+
3g35
|
| 11880 |
+
4qij
|
| 11881 |
+
4dxg
|
| 11882 |
+
3n1c
|
| 11883 |
+
4dt2
|
| 11884 |
+
5u2e
|
| 11885 |
+
3er5
|
| 11886 |
+
6c7j
|
| 11887 |
+
4cj4
|
| 11888 |
+
2w7x
|
| 11889 |
+
5iu4
|
| 11890 |
+
5upz
|
| 11891 |
+
3ffg
|
| 11892 |
+
5yba
|
| 11893 |
+
2f35
|
| 11894 |
+
4gsc
|
| 11895 |
+
4qvl
|
| 11896 |
+
1mwn
|
| 11897 |
+
1jeu
|
| 11898 |
+
1c3b
|
| 11899 |
+
3ual
|
| 11900 |
+
1rdn
|
| 11901 |
+
6dh1
|
| 11902 |
+
5iub
|
| 11903 |
+
5mav
|
| 11904 |
+
3t83
|
| 11905 |
+
6cwf
|
| 11906 |
+
2fpz
|
| 11907 |
+
5ml4
|
| 11908 |
+
5f2f
|
| 11909 |
+
1erq
|
| 11910 |
+
5t9u
|
| 11911 |
+
2qlf
|
| 11912 |
+
5jmp
|
| 11913 |
+
4s3f
|
| 11914 |
+
6hm6
|
| 11915 |
+
2qt9
|
| 11916 |
+
5mft
|
| 11917 |
+
5vih
|
| 11918 |
+
3mnu
|
| 11919 |
+
6djc
|
| 11920 |
+
6eqp
|
| 11921 |
+
5gsw
|
| 11922 |
+
4ca5
|
| 11923 |
+
5mar
|
| 11924 |
+
2w77
|
| 11925 |
+
2avq
|
| 11926 |
+
5eqq
|
| 11927 |
+
1tsi
|
| 11928 |
+
2vw2
|
| 11929 |
+
2ggd
|
| 11930 |
+
6f6d
|
| 11931 |
+
1f4g
|
| 11932 |
+
3ndm
|
| 11933 |
+
3l17
|
| 11934 |
+
5fni
|
| 11935 |
+
1c3r
|
| 11936 |
+
5trf
|
| 11937 |
+
1pau
|
| 11938 |
+
4li8
|
| 11939 |
+
4re9
|
| 11940 |
+
4l7o
|
| 11941 |
+
5kbh
|
| 11942 |
+
2oh4
|
| 11943 |
+
1joc
|
| 11944 |
+
1qxl
|
| 11945 |
+
4qge
|
| 11946 |
+
2yfe
|
| 11947 |
+
3g8e
|
| 11948 |
+
6czu
|
| 11949 |
+
2fs8
|
| 11950 |
+
4mk5
|
| 11951 |
+
4i60
|
| 11952 |
+
3fh8
|
| 11953 |
+
4h1m
|
| 11954 |
+
2qd7
|
| 11955 |
+
1uom
|
| 11956 |
+
3oim
|
| 11957 |
+
3f9w
|
| 11958 |
+
6ezi
|
| 11959 |
+
3r17
|
| 11960 |
+
4bcd
|
| 11961 |
+
1l6s
|
| 11962 |
+
2uzv
|
| 11963 |
+
4e1e
|
| 11964 |
+
3d28
|
| 11965 |
+
5kez
|
| 11966 |
+
1bn4
|
| 11967 |
+
6biz
|
| 11968 |
+
4lwt
|
| 11969 |
+
3jpv
|
| 11970 |
+
5fsm
|
| 11971 |
+
4mds
|
| 11972 |
+
4e6c
|
| 11973 |
+
3o9p
|
| 11974 |
+
5fyq
|
| 11975 |
+
4hxq
|
| 11976 |
+
3mj1
|
| 11977 |
+
1zm7
|
| 11978 |
+
1s63
|
| 11979 |
+
4nk9
|
| 11980 |
+
4qmm
|
| 11981 |
+
4zyw
|
| 11982 |
+
3kid
|
| 11983 |
+
2q8i
|
| 11984 |
+
3gfe
|
| 11985 |
+
5ih6
|
| 11986 |
+
4owo
|
| 11987 |
+
5o5f
|
| 11988 |
+
1qbq
|
| 11989 |
+
5du6
|
| 11990 |
+
1ymx
|
| 11991 |
+
1oe0
|
| 11992 |
+
4x5p
|
| 11993 |
+
1fhr
|
| 11994 |
+
1ntv
|
| 11995 |
+
1o5f
|
| 11996 |
+
1ui0
|
| 11997 |
+
3os3
|
| 11998 |
+
3d1f
|
| 11999 |
+
3lxs
|
| 12000 |
+
3lxg
|
| 12001 |
+
1pxl
|
| 12002 |
+
5n87
|
| 12003 |
+
1gz9
|
| 12004 |
+
1b6l
|
| 12005 |
+
5kcx
|
| 12006 |
+
2xni
|
| 12007 |
+
3ig6
|
| 12008 |
+
1p4u
|
| 12009 |
+
3r9h
|
| 12010 |
+
5n2x
|
| 12011 |
+
6aud
|
| 12012 |
+
5nee
|
| 12013 |
+
1bnw
|
| 12014 |
+
6bo6
|
| 12015 |
+
3kaf
|
| 12016 |
+
2fvd
|
| 12017 |
+
2xaj
|
| 12018 |
+
4a95
|
| 12019 |
+
5iv2
|
| 12020 |
+
3n2u
|
| 12021 |
+
4q6e
|
| 12022 |
+
2f34
|
| 12023 |
+
6evo
|
| 12024 |
+
5ksv
|
| 12025 |
+
2e9n
|
| 12026 |
+
5cgc
|
| 12027 |
+
1m51
|
| 12028 |
+
1d4h
|
| 12029 |
+
5kls
|
| 12030 |
+
3s2p
|
| 12031 |
+
1f0u
|
| 12032 |
+
4k3l
|
| 12033 |
+
4tww
|
| 12034 |
+
5f88
|
| 12035 |
+
3fcb
|
| 12036 |
+
4arw
|
| 12037 |
+
5kos
|
| 12038 |
+
4o9w
|
| 12039 |
+
3vzd
|
| 12040 |
+
2ay7
|
| 12041 |
+
5dcz
|
| 12042 |
+
5dqf
|
| 12043 |
+
3ttj
|
| 12044 |
+
3lc3
|
| 12045 |
+
1uml
|
| 12046 |
+
4oty
|
| 12047 |
+
4wzv
|
| 12048 |
+
1hxb
|
| 12049 |
+
5fcz
|
| 12050 |
+
5l87
|
| 12051 |
+
4ah9
|
| 12052 |
+
5ma7
|
| 12053 |
+
4hs8
|
| 12054 |
+
4qgd
|
| 12055 |
+
5zkc
|
| 12056 |
+
4d2t
|
| 12057 |
+
5elz
|
| 12058 |
+
3ara
|
| 12059 |
+
5cuh
|
| 12060 |
+
3jqg
|
| 12061 |
+
5w4s
|
| 12062 |
+
4n7m
|
| 12063 |
+
3tct
|
| 12064 |
+
4n1t
|
| 12065 |
+
5nhf
|
| 12066 |
+
2yge
|
| 12067 |
+
4gs9
|
| 12068 |
+
6bu3
|
| 12069 |
+
5vqr
|
| 12070 |
+
2am4
|
| 12071 |
+
5wr7
|
| 12072 |
+
4j58
|
| 12073 |
+
2bkt
|
| 12074 |
+
4buq
|
| 12075 |
+
3u7m
|
| 12076 |
+
3u51
|
| 12077 |
+
3w55
|
| 12078 |
+
2gfa
|
| 12079 |
+
3fum
|
| 12080 |
+
4hyu
|
| 12081 |
+
4pra
|
| 12082 |
+
4kyk
|
| 12083 |
+
3v2w
|
| 12084 |
+
2pog
|
| 12085 |
+
4li6
|
| 12086 |
+
3qj9
|
| 12087 |
+
5d3t
|
| 12088 |
+
2w67
|
| 12089 |
+
5fs5
|
| 12090 |
+
5qaf
|
| 12091 |
+
4crb
|
| 12092 |
+
6fe0
|
| 12093 |
+
3zmq
|
| 12094 |
+
5t1t
|
| 12095 |
+
5ks7
|
| 12096 |
+
2xm1
|
| 12097 |
+
4oaz
|
| 12098 |
+
6e8k
|
| 12099 |
+
2fys
|
| 12100 |
+
5egs
|
| 12101 |
+
4aac
|
| 12102 |
+
4wym
|
| 12103 |
+
2qbs
|
| 12104 |
+
1lke
|
| 12105 |
+
3smq
|
| 12106 |
+
5eds
|
| 12107 |
+
3wmb
|
| 12108 |
+
5j7q
|
| 12109 |
+
3cz1
|
| 12110 |
+
1s39
|
| 12111 |
+
1n94
|
| 12112 |
+
5lc0
|
| 12113 |
+
5wg5
|
| 12114 |
+
5ttg
|
| 12115 |
+
1ndz
|
| 12116 |
+
6gmx
|
| 12117 |
+
3fsj
|
| 12118 |
+
1cim
|
| 12119 |
+
2f2h
|
| 12120 |
+
4qsh
|
| 12121 |
+
2g9x
|
| 12122 |
+
1o47
|
| 12123 |
+
6aqf
|
| 12124 |
+
2ce9
|
| 12125 |
+
2ycf
|
| 12126 |
+
1t29
|
| 12127 |
+
4k5z
|
| 12128 |
+
5ijr
|
| 12129 |
+
5kks
|
| 12130 |
+
5tg2
|
| 12131 |
+
4yxu
|
| 12132 |
+
4i7c
|
| 12133 |
+
3ftv
|
| 12134 |
+
1ke3
|
| 12135 |
+
4bib
|
| 12136 |
+
3coy
|
| 12137 |
+
5orw
|
| 12138 |
+
5bns
|
| 12139 |
+
4bs0
|
| 12140 |
+
3igp
|
| 12141 |
+
2uw4
|
| 12142 |
+
4avj
|
| 12143 |
+
3qcy
|
| 12144 |
+
5vqz
|
| 12145 |
+
1yqj
|
| 12146 |
+
3a73
|
| 12147 |
+
4uu8
|
| 12148 |
+
4rwk
|
| 12149 |
+
5j5r
|
| 12150 |
+
2rkm
|
| 12151 |
+
1w14
|
| 12152 |
+
4edu
|
| 12153 |
+
3e73
|
| 12154 |
+
5o1s
|
| 12155 |
+
1mnc
|
| 12156 |
+
2ypp
|
| 12157 |
+
3wq5
|
| 12158 |
+
4j74
|
| 12159 |
+
3rz5
|
| 12160 |
+
5upe
|
| 12161 |
+
4eh6
|
| 12162 |
+
1c84
|
| 12163 |
+
4fcf
|
| 12164 |
+
1sc8
|
| 12165 |
+
3agm
|
| 12166 |
+
5w10
|
| 12167 |
+
1gzg
|
| 12168 |
+
4qvv
|
| 12169 |
+
4qp6
|
| 12170 |
+
3ip9
|
| 12171 |
+
5he0
|
| 12172 |
+
4yv5
|
| 12173 |
+
4uyd
|
| 12174 |
+
1nu3
|
| 12175 |
+
3ozr
|
| 12176 |
+
5qin
|
| 12177 |
+
4utv
|
| 12178 |
+
5x9o
|
| 12179 |
+
1okv
|
| 12180 |
+
3cyx
|
| 12181 |
+
4xhv
|
| 12182 |
+
5ll7
|
| 12183 |
+
4gm3
|
| 12184 |
+
2w78
|
| 12185 |
+
3bym
|
| 12186 |
+
1ga8
|
| 12187 |
+
1p5e
|
| 12188 |
+
4c4f
|
| 12189 |
+
2mps
|
| 12190 |
+
4w9f
|
| 12191 |
+
3lmk
|
| 12192 |
+
3sz1
|
| 12193 |
+
4tya
|
| 12194 |
+
3o2m
|
| 12195 |
+
2v85
|
| 12196 |
+
2ael
|
| 12197 |
+
4pv5
|
| 12198 |
+
3i02
|
| 12199 |
+
6fs1
|
| 12200 |
+
3twr
|
| 12201 |
+
2qoa
|
| 12202 |
+
4lk6
|
| 12203 |
+
3qtu
|
| 12204 |
+
3f8c
|
| 12205 |
+
2qhy
|
| 12206 |
+
5jmw
|
| 12207 |
+
3bun
|
| 12208 |
+
4l70
|
| 12209 |
+
6cd4
|
| 12210 |
+
4ymb
|
| 12211 |
+
4bcm
|
| 12212 |
+
6bh2
|
| 12213 |
+
5h0h
|
| 12214 |
+
1ppx
|
| 12215 |
+
6gnr
|
| 12216 |
+
2x6k
|
| 12217 |
+
3f9n
|
| 12218 |
+
4oys
|
| 12219 |
+
3rf5
|
| 12220 |
+
5orx
|
| 12221 |
+
3hub
|
| 12222 |
+
4mqu
|
| 12223 |
+
1i41
|
| 12224 |
+
3c5u
|
| 12225 |
+
4gw5
|
| 12226 |
+
1xp1
|
| 12227 |
+
2r9w
|
| 12228 |
+
5lm4
|
| 12229 |
+
2csm
|
| 12230 |
+
3n1w
|
| 12231 |
+
5nhz
|
| 12232 |
+
4zk5
|
| 12233 |
+
4kai
|
| 12234 |
+
1e6q
|
| 12235 |
+
4cg9
|
| 12236 |
+
4bf1
|
| 12237 |
+
2ycr
|
| 12238 |
+
3hd3
|
| 12239 |
+
4ehe
|
| 12240 |
+
5tku
|
| 12241 |
+
5wle
|
| 12242 |
+
2r64
|
| 12243 |
+
6ft8
|
| 12244 |
+
5ti5
|
| 12245 |
+
4tyt
|
| 12246 |
+
5lma
|
| 12247 |
+
3zt2
|
| 12248 |
+
5ai4
|
| 12249 |
+
3zh8
|
| 12250 |
+
6gjy
|
| 12251 |
+
3fl8
|
| 12252 |
+
3old
|
| 12253 |
+
3fqs
|
| 12254 |
+
4xct
|
| 12255 |
+
4mjp
|
| 12256 |
+
3g2y
|
| 12257 |
+
6g8n
|
| 12258 |
+
5kkr
|
| 12259 |
+
1fjs
|
| 12260 |
+
6dxg
|
| 12261 |
+
3che
|
| 12262 |
+
3qfv
|
| 12263 |
+
4tsz
|
| 12264 |
+
1r5v
|
| 12265 |
+
1pr1
|
| 12266 |
+
3nsn
|
| 12267 |
+
4p1u
|
| 12268 |
+
3szb
|
| 12269 |
+
5u11
|
| 12270 |
+
4o0t
|
| 12271 |
+
4q4i
|
| 12272 |
+
2nww
|
| 12273 |
+
4rsp
|
| 12274 |
+
1fsg
|
| 12275 |
+
1u0g
|
| 12276 |
+
2clv
|
| 12277 |
+
3opp
|
| 12278 |
+
4lg6
|
| 12279 |
+
1hps
|
| 12280 |
+
6bhe
|
| 12281 |
+
6bir
|
| 12282 |
+
5h09
|
| 12283 |
+
3kwj
|
| 12284 |
+
2bed
|
| 12285 |
+
3e51
|
| 12286 |
+
6axj
|
| 12287 |
+
3udd
|
| 12288 |
+
1zvx
|
| 12289 |
+
5o0e
|
| 12290 |
+
4eox
|
| 12291 |
+
2rly
|
| 12292 |
+
3ob2
|
| 12293 |
+
3osi
|
| 12294 |
+
5m7t
|
| 12295 |
+
5vqy
|
| 12296 |
+
5ei6
|
| 12297 |
+
6drt
|
| 12298 |
+
3mbz
|
| 12299 |
+
4f7n
|
| 12300 |
+
3ipb
|
| 12301 |
+
4zx9
|
| 12302 |
+
3nmq
|
| 12303 |
+
6gy5
|
| 12304 |
+
2hh5
|
| 12305 |
+
1vjy
|
| 12306 |
+
4hy5
|
| 12307 |
+
4ts1
|
| 12308 |
+
2qcg
|
| 12309 |
+
5lwd
|
| 12310 |
+
3oyn
|
| 12311 |
+
3rbq
|
| 12312 |
+
1y57
|
| 12313 |
+
3ujc
|
| 12314 |
+
3ekp
|
| 12315 |
+
3p7c
|
| 12316 |
+
3gws
|
| 12317 |
+
3fw4
|
| 12318 |
+
4pov
|
| 12319 |
+
4m0f
|
| 12320 |
+
3qaq
|
| 12321 |
+
4ovf
|
| 12322 |
+
1ps3
|
| 12323 |
+
4gw6
|
| 12324 |
+
5a4c
|
| 12325 |
+
2ovx
|
| 12326 |
+
1gwm
|
| 12327 |
+
2kwn
|
| 12328 |
+
4uce
|
| 12329 |
+
4y2p
|
| 12330 |
+
2q2c
|
| 12331 |
+
5lgn
|
| 12332 |
+
3i4b
|
| 12333 |
+
4qmo
|
| 12334 |
+
2wyg
|
| 12335 |
+
3kqb
|
| 12336 |
+
5eay
|
| 12337 |
+
2b4l
|
| 12338 |
+
2xd9
|
| 12339 |
+
3gjt
|
| 12340 |
+
2b1v
|
| 12341 |
+
3ctq
|
| 12342 |
+
5tzz
|
| 12343 |
+
3jup
|
| 12344 |
+
1h28
|
| 12345 |
+
4b13
|
| 12346 |
+
2aac
|
| 12347 |
+
4wku
|
| 12348 |
+
2fjp
|
| 12349 |
+
5eij
|
| 12350 |
+
4mga
|
| 12351 |
+
5ak2
|
| 12352 |
+
5dp4
|
| 12353 |
+
4inu
|
| 12354 |
+
4nrc
|
| 12355 |
+
1tng
|
| 12356 |
+
1xh3
|
| 12357 |
+
1toi
|
| 12358 |
+
2lya
|
| 12359 |
+
3mpm
|
| 12360 |
+
1qb9
|
| 12361 |
+
5hlw
|
| 12362 |
+
2fvc
|
| 12363 |
+
4muw
|
| 12364 |
+
3ikc
|
| 12365 |
+
3d91
|
| 12366 |
+
3img
|
| 12367 |
+
4op1
|
| 12368 |
+
4s3e
|
| 12369 |
+
3qxp
|
| 12370 |
+
4f5y
|
| 12371 |
+
4qmv
|
| 12372 |
+
3gwu
|
| 12373 |
+
4qtc
|
| 12374 |
+
6tim
|
| 12375 |
+
4h2j
|
| 12376 |
+
1x7e
|
| 12377 |
+
1b6h
|
| 12378 |
+
3opr
|
| 12379 |
+
4gjb
|
| 12380 |
+
5v3o
|
| 12381 |
+
5l44
|
| 12382 |
+
2c6i
|
| 12383 |
+
4ezr
|
| 12384 |
+
5u4e
|
| 12385 |
+
4wke
|
| 12386 |
+
3n3j
|
| 12387 |
+
5eg4
|
| 12388 |
+
2yiq
|
| 12389 |
+
4jmx
|
| 12390 |
+
4q2k
|
| 12391 |
+
2x2r
|
| 12392 |
+
2fzg
|
| 12393 |
+
1ajx
|
| 12394 |
+
3ccw
|
| 12395 |
+
5u0e
|
| 12396 |
+
5j32
|
| 12397 |
+
6gwr
|
| 12398 |
+
6gu7
|
| 12399 |
+
5dxb
|
| 12400 |
+
1hyo
|
| 12401 |
+
3lc5
|
| 12402 |
+
3arw
|
| 12403 |
+
3rz1
|
| 12404 |
+
5jv2
|
| 12405 |
+
4l2f
|
| 12406 |
+
5etj
|
| 12407 |
+
2a3w
|
| 12408 |
+
1h79
|
| 12409 |
+
6ed6
|
| 12410 |
+
5w85
|
| 12411 |
+
4x7l
|
| 12412 |
+
6mub
|
| 12413 |
+
5od5
|
| 12414 |
+
5ejl
|
| 12415 |
+
3ubx
|
| 12416 |
+
2c6k
|
| 12417 |
+
4rn0
|
| 12418 |
+
3pdq
|
| 12419 |
+
4unp
|
| 12420 |
+
3lp1
|
| 12421 |
+
5mng
|
| 12422 |
+
4w5j
|
| 12423 |
+
5afn
|
| 12424 |
+
1ahx
|
| 12425 |
+
2jst
|
| 12426 |
+
1o3h
|
| 12427 |
+
3rz8
|
| 12428 |
+
4na9
|
| 12429 |
+
1f0t
|
| 12430 |
+
4eym
|
| 12431 |
+
5vew
|
| 12432 |
+
4k3p
|
| 12433 |
+
4zs9
|
| 12434 |
+
3b9s
|
| 12435 |
+
5wcl
|
| 12436 |
+
4mmm
|
| 12437 |
+
2l65
|
| 12438 |
+
4lph
|
| 12439 |
+
3frz
|
| 12440 |
+
3rz9
|
| 12441 |
+
4kba
|
| 12442 |
+
5a8x
|
| 12443 |
+
5eps
|
| 12444 |
+
5f1j
|
| 12445 |
+
2oxd
|
| 12446 |
+
4g0k
|
| 12447 |
+
5gr9
|
| 12448 |
+
3uxl
|
| 12449 |
+
3fud
|
| 12450 |
+
4x68
|
| 12451 |
+
3k2f
|
| 12452 |
+
5lsg
|
| 12453 |
+
3zrm
|
| 12454 |
+
5v6u
|
| 12455 |
+
4lke
|
| 12456 |
+
1o4r
|
| 12457 |
+
4fcb
|
| 12458 |
+
4qp8
|
| 12459 |
+
1htf
|
| 12460 |
+
4yc0
|
| 12461 |
+
3q4b
|
| 12462 |
+
5dh4
|
| 12463 |
+
5ivy
|
| 12464 |
+
3sn7
|
| 12465 |
+
4waf
|
| 12466 |
+
3gst
|
| 12467 |
+
5ecv
|
| 12468 |
+
1hvj
|
| 12469 |
+
3m3c
|
| 12470 |
+
2ewb
|
| 12471 |
+
3ryz
|
| 12472 |
+
3a2o
|
| 12473 |
+
4ea3
|
| 12474 |
+
4wx6
|
| 12475 |
+
1i32
|
| 12476 |
+
4lge
|
| 12477 |
+
3cwe
|
| 12478 |
+
1h0r
|
| 12479 |
+
5m9w
|
| 12480 |
+
3jwr
|
| 12481 |
+
2q70
|
| 12482 |
+
4iu0
|
| 12483 |
+
1tl3
|
| 12484 |
+
2gqn
|
| 12485 |
+
3bc3
|
| 12486 |
+
4gk7
|
| 12487 |
+
2wd3
|
| 12488 |
+
4ybt
|
| 12489 |
+
5dms
|
| 12490 |
+
1nhg
|
| 12491 |
+
3f16
|
| 12492 |
+
4mw1
|
| 12493 |
+
3cgo
|
| 12494 |
+
5v4q
|
| 12495 |
+
4c36
|
| 12496 |
+
4x6n
|
| 12497 |
+
3bh8
|
| 12498 |
+
3f6h
|
| 12499 |
+
5d0j
|
| 12500 |
+
3l6x
|
| 12501 |
+
4c1u
|
| 12502 |
+
6fzx
|
| 12503 |
+
4hzx
|
| 12504 |
+
4g8o
|
| 12505 |
+
4mrz
|
| 12506 |
+
4tyo
|
| 12507 |
+
4umj
|
| 12508 |
+
4rfz
|
| 12509 |
+
1rxp
|
| 12510 |
+
2zyn
|
| 12511 |
+
5t54
|
| 12512 |
+
6bh3
|
| 12513 |
+
4cd5
|
| 12514 |
+
4hlk
|
| 12515 |
+
3t0x
|
| 12516 |
+
5dy5
|
| 12517 |
+
1ft4
|
| 12518 |
+
5org
|
| 12519 |
+
5to8
|
| 12520 |
+
6aox
|
| 12521 |
+
2y5f
|
| 12522 |
+
4x7o
|
| 12523 |
+
4y4v
|
| 12524 |
+
1h25
|
| 12525 |
+
5d2r
|
| 12526 |
+
2xch
|
| 12527 |
+
5ndd
|
| 12528 |
+
4nzo
|
| 12529 |
+
3mct
|
| 12530 |
+
5dia
|
| 12531 |
+
1iih
|
| 12532 |
+
3ime
|
| 12533 |
+
3tll
|
| 12534 |
+
1w2x
|
| 12535 |
+
2jle
|
| 12536 |
+
1h26
|
| 12537 |
+
5v88
|
| 12538 |
+
1o2o
|
| 12539 |
+
3e90
|
| 12540 |
+
4j46
|
| 12541 |
+
3dp9
|
| 12542 |
+
5ngb
|
| 12543 |
+
5jh6
|
| 12544 |
+
3rpy
|
| 12545 |
+
2vfz
|
| 12546 |
+
5xn3
|
| 12547 |
+
4xx4
|
| 12548 |
+
3ppo
|
| 12549 |
+
4qfo
|
| 12550 |
+
1o3d
|
| 12551 |
+
3s43
|
| 12552 |
+
6ar2
|
| 12553 |
+
3cyu
|
| 12554 |
+
1qf0
|
| 12555 |
+
4amz
|
| 12556 |
+
4mti
|
| 12557 |
+
6f29
|
| 12558 |
+
5mk1
|
| 12559 |
+
1qsc
|
| 12560 |
+
1uvr
|
| 12561 |
+
4whq
|
| 12562 |
+
4d08
|
| 12563 |
+
6gu3
|
| 12564 |
+
4b6q
|
| 12565 |
+
4agc
|
| 12566 |
+
4uu5
|
| 12567 |
+
3blt
|
| 12568 |
+
2zb1
|
| 12569 |
+
3krd
|
| 12570 |
+
3lm1
|
| 12571 |
+
1t32
|
| 12572 |
+
5kbq
|
| 12573 |
+
1gwr
|
| 12574 |
+
4k6v
|
| 12575 |
+
2vba
|
| 12576 |
+
3zrc
|
| 12577 |
+
4nmx
|
| 12578 |
+
4mbc
|
| 12579 |
+
1ie9
|
| 12580 |
+
3bgz
|
| 12581 |
+
2k62
|
| 12582 |
+
1mqg
|
| 12583 |
+
1qy1
|
| 12584 |
+
2x7u
|
| 12585 |
+
3iqq
|
| 12586 |
+
2g5p
|
| 12587 |
+
4q3u
|
| 12588 |
+
5flo
|
| 12589 |
+
4bo1
|
| 12590 |
+
5tyr
|
| 12591 |
+
5x26
|
| 12592 |
+
2drc
|
| 12593 |
+
4bbh
|
| 12594 |
+
3gkz
|
| 12595 |
+
5e6o
|
| 12596 |
+
4qy3
|
| 12597 |
+
1nq0
|
| 12598 |
+
2gu8
|
| 12599 |
+
1w9v
|
| 12600 |
+
5uci
|
| 12601 |
+
2veu
|
| 12602 |
+
5hvy
|
| 12603 |
+
3ewu
|
| 12604 |
+
1jm4
|
| 12605 |
+
5yas
|
| 12606 |
+
1ols
|
| 12607 |
+
4ojr
|
| 12608 |
+
1h0w
|
| 12609 |
+
3as0
|
| 12610 |
+
4e3g
|
| 12611 |
+
4euv
|
| 12612 |
+
2wly
|
| 12613 |
+
3lir
|
| 12614 |
+
3znr
|
| 12615 |
+
3q3t
|
| 12616 |
+
6ap8
|
| 12617 |
+
3g3n
|
| 12618 |
+
3c52
|
| 12619 |
+
3g2w
|
| 12620 |
+
4iku
|
| 12621 |
+
5bry
|
| 12622 |
+
3neo
|
| 12623 |
+
4wvu
|
| 12624 |
+
1abf
|
| 12625 |
+
1sjh
|
| 12626 |
+
3hc8
|
| 12627 |
+
2liq
|
| 12628 |
+
5dtw
|
| 12629 |
+
5dhu
|
| 12630 |
+
1jvp
|
| 12631 |
+
5jxn
|
| 12632 |
+
4mrd
|
| 12633 |
+
2yjc
|
| 12634 |
+
2o9r
|
| 12635 |
+
5en3
|
| 12636 |
+
5hpm
|
| 12637 |
+
3k05
|
| 12638 |
+
3h0q
|
| 12639 |
+
2a4l
|
| 12640 |
+
4xue
|
| 12641 |
+
1dtt
|
| 12642 |
+
4yzm
|
| 12643 |
+
4q4p
|
| 12644 |
+
3d9o
|
| 12645 |
+
3s8x
|
| 12646 |
+
6ek3
|
| 12647 |
+
2hds
|
| 12648 |
+
5fwj
|
| 12649 |
+
3kme
|
| 12650 |
+
1zog
|
| 12651 |
+
5bvf
|
| 12652 |
+
4c6v
|
| 12653 |
+
1xdd
|
| 12654 |
+
1dud
|
| 12655 |
+
3lzu
|
| 12656 |
+
2er6
|
| 12657 |
+
3f48
|
| 12658 |
+
2xuz
|
| 12659 |
+
3c39
|
| 12660 |
+
1ydd
|
| 12661 |
+
6fqu
|
| 12662 |
+
4fc0
|
| 12663 |
+
5w4r
|
| 12664 |
+
1bzc
|
| 12665 |
+
3pj8
|
| 12666 |
+
2auc
|
| 12667 |
+
4kp0
|
| 12668 |
+
2y7p
|
| 12669 |
+
3h23
|
| 12670 |
+
4ono
|
| 12671 |
+
3nc9
|
| 12672 |
+
4bjb
|
| 12673 |
+
3mz3
|
| 12674 |
+
5edc
|
| 12675 |
+
2xzg
|
| 12676 |
+
6bqj
|
| 12677 |
+
1ga9
|
| 12678 |
+
4oq6
|
| 12679 |
+
5ee7
|
| 12680 |
+
3rj7
|
| 12681 |
+
4y8z
|
| 12682 |
+
5ka1
|
| 12683 |
+
5vii
|
| 12684 |
+
5ele
|
| 12685 |
+
2ew6
|
| 12686 |
+
1s9v
|
| 12687 |
+
2x6w
|
| 12688 |
+
5mnc
|
| 12689 |
+
5eni
|
| 12690 |
+
1wqv
|
| 12691 |
+
4jve
|
| 12692 |
+
3oy0
|
| 12693 |
+
4ish
|
| 12694 |
+
6h41
|
| 12695 |
+
1ebw
|
| 12696 |
+
4zx4
|
| 12697 |
+
4q15
|
| 12698 |
+
6czv
|
| 12699 |
+
2wti
|
| 12700 |
+
5j9l
|
| 12701 |
+
1o5r
|
| 12702 |
+
2bq7
|
| 12703 |
+
4fic
|
| 12704 |
+
2ole
|
| 12705 |
+
3umw
|
| 12706 |
+
1a1e
|
| 12707 |
+
1ax0
|
| 12708 |
+
1fcy
|
| 12709 |
+
4ux9
|
| 12710 |
+
6bhi
|
| 12711 |
+
3pj3
|
| 12712 |
+
5m55
|
| 12713 |
+
2uw6
|
| 12714 |
+
2ea2
|
| 12715 |
+
5dl1
|
| 12716 |
+
6g7f
|
| 12717 |
+
5o8t
|
| 12718 |
+
5t1l
|
| 12719 |
+
1d09
|
| 12720 |
+
3ibi
|
| 12721 |
+
5h22
|
| 12722 |
+
4bgh
|
| 12723 |
+
2j9h
|
| 12724 |
+
2iyf
|
| 12725 |
+
5m28
|
| 12726 |
+
2v0c
|
| 12727 |
+
5b6g
|
| 12728 |
+
2q3z
|
| 12729 |
+
3pcj
|
| 12730 |
+
5y0z
|
| 12731 |
+
3u6h
|
| 12732 |
+
3dtc
|
| 12733 |
+
3cr4
|
| 12734 |
+
2igv
|
| 12735 |
+
4qaa
|
| 12736 |
+
3n35
|
| 12737 |
+
5ylj
|
| 12738 |
+
5dls
|
| 12739 |
+
5h1u
|
| 12740 |
+
4qw4
|
| 12741 |
+
4hv3
|
| 12742 |
+
4jfi
|
| 12743 |
+
2go4
|
| 12744 |
+
2m0o
|
| 12745 |
+
5vzy
|
| 12746 |
+
2h6t
|
| 12747 |
+
4x1n
|
| 12748 |
+
2qnn
|
| 12749 |
+
2aqb
|
| 12750 |
+
1zd5
|
| 12751 |
+
1bnt
|
| 12752 |
+
4z46
|
| 12753 |
+
2p8s
|
| 12754 |
+
3uh2
|
| 12755 |
+
1b2m
|
| 12756 |
+
5eiw
|
| 12757 |
+
4bfy
|
| 12758 |
+
5csx
|
| 12759 |
+
2rjp
|
| 12760 |
+
5o7n
|
| 12761 |
+
3d3p
|
| 12762 |
+
4l0l
|
| 12763 |
+
3zo2
|
| 12764 |
+
3zhx
|
| 12765 |
+
2hah
|
| 12766 |
+
3dx1
|
| 12767 |
+
4dkt
|
| 12768 |
+
3n0n
|
| 12769 |
+
4l4m
|
| 12770 |
+
6asz
|
| 12771 |
+
3hv4
|
| 12772 |
+
5boj
|
| 12773 |
+
3imc
|
| 12774 |
+
3eju
|
| 12775 |
+
5t6z
|
| 12776 |
+
4jaz
|
| 12777 |
+
1gjb
|
| 12778 |
+
5fl0
|
| 12779 |
+
3r9n
|
| 12780 |
+
3sw8
|
| 12781 |
+
5ggp
|
| 12782 |
+
1x8t
|
| 12783 |
+
6m9f
|
| 12784 |
+
3bva
|
| 12785 |
+
2wtc
|
| 12786 |
+
5lom
|
| 12787 |
+
2vwc
|
| 12788 |
+
6bw2
|
| 12789 |
+
4n1z
|
| 12790 |
+
2w16
|
| 12791 |
+
3lzb
|
| 12792 |
+
1fwe
|
| 12793 |
+
2i6b
|
| 12794 |
+
4j8r
|
| 12795 |
+
4je8
|
| 12796 |
+
2xn7
|
| 12797 |
+
3t60
|
| 12798 |
+
2n9x
|
| 12799 |
+
4lwu
|
| 12800 |
+
6ebe
|
| 12801 |
+
6ajj
|
| 12802 |
+
2xp7
|
| 12803 |
+
3ql9
|
| 12804 |
+
1q7a
|
| 12805 |
+
5dkn
|
| 12806 |
+
2gnl
|
| 12807 |
+
3rwc
|
| 12808 |
+
5npr
|
| 12809 |
+
1ajv
|
| 12810 |
+
6e4f
|
| 12811 |
+
5alf
|
| 12812 |
+
2xys
|
| 12813 |
+
6h5w
|
| 12814 |
+
2jfz
|
| 12815 |
+
4bda
|
| 12816 |
+
3rxd
|
| 12817 |
+
1f57
|
| 12818 |
+
9icd
|
| 12819 |
+
3zpr
|
| 12820 |
+
4ciw
|
| 12821 |
+
2zcr
|
| 12822 |
+
2w9r
|
| 12823 |
+
3hnz
|
| 12824 |
+
4r1y
|
| 12825 |
+
3olg
|
| 12826 |
+
1kwq
|
| 12827 |
+
5lrg
|
| 12828 |
+
5f0c
|
| 12829 |
+
3u8j
|
| 12830 |
+
1bl6
|
| 12831 |
+
4b6s
|
| 12832 |
+
1ej4
|
| 12833 |
+
4ht0
|
| 12834 |
+
3u6w
|
| 12835 |
+
4q09
|
| 12836 |
+
5jss
|
| 12837 |
+
2b1i
|
| 12838 |
+
5cls
|
| 12839 |
+
5mrh
|
| 12840 |
+
3nox
|
| 12841 |
+
2qe2
|
| 12842 |
+
3u6j
|
| 12843 |
+
1qka
|
| 12844 |
+
3k5u
|
| 12845 |
+
1ws1
|
| 12846 |
+
6ft9
|
| 12847 |
+
1ajn
|
| 12848 |
+
3zn0
|
| 12849 |
+
2i3z
|
| 12850 |
+
1y3x
|
| 12851 |
+
2r9s
|
| 12852 |
+
5vpm
|
| 12853 |
+
4occ
|
| 12854 |
+
3osw
|
| 12855 |
+
3hy5
|
| 12856 |
+
4j86
|
| 12857 |
+
5f1v
|
| 12858 |
+
5l4e
|
| 12859 |
+
2qve
|
| 12860 |
+
4erf
|
| 12861 |
+
1wss
|
| 12862 |
+
1ajq
|
| 12863 |
+
5h5r
|
| 12864 |
+
1hee
|
| 12865 |
+
3rq7
|
| 12866 |
+
1dxp
|
| 12867 |
+
1vgc
|
| 12868 |
+
3huc
|
| 12869 |
+
5fqb
|
| 12870 |
+
4mue
|
| 12871 |
+
1qbs
|
| 12872 |
+
1ujk
|
| 12873 |
+
4xo8
|
| 12874 |
+
4ww8
|
| 12875 |
+
4qxt
|
| 12876 |
+
2wxp
|
| 12877 |
+
5j3s
|
| 12878 |
+
2h1h
|
| 12879 |
+
3f82
|
| 12880 |
+
4lhm
|
| 12881 |
+
2rjs
|
| 12882 |
+
5nvf
|
| 12883 |
+
3l0n
|
| 12884 |
+
5b5f
|
| 12885 |
+
4oow
|
| 12886 |
+
3ej5
|
| 12887 |
+
1li6
|
| 12888 |
+
5swg
|
| 12889 |
+
6eum
|
| 12890 |
+
3fas
|
| 12891 |
+
5klr
|
| 12892 |
+
2vv9
|
| 12893 |
+
2byr
|
| 12894 |
+
5nr7
|
| 12895 |
+
3suf
|
| 12896 |
+
2xb9
|
| 12897 |
+
1f4x
|
| 12898 |
+
5v86
|
| 12899 |
+
3kl6
|
| 12900 |
+
1qng
|
| 12901 |
+
2a8g
|
| 12902 |
+
2j62
|
| 12903 |
+
3zi0
|
| 12904 |
+
5j5d
|
| 12905 |
+
1h6e
|
| 12906 |
+
3ap7
|
| 12907 |
+
3zj6
|
| 12908 |
+
4hp0
|
| 12909 |
+
1bmk
|
| 12910 |
+
1nj1
|
| 12911 |
+
5xp7
|
| 12912 |
+
4iwd
|
| 12913 |
+
1if7
|
| 12914 |
+
4ia0
|
| 12915 |
+
5yjk
|
| 12916 |
+
5j7f
|
| 12917 |
+
5ake
|
| 12918 |
+
1h2t
|
| 12919 |
+
4ezj
|
| 12920 |
+
4mzk
|
| 12921 |
+
5jfp
|
| 12922 |
+
4an3
|
| 12923 |
+
5v24
|
| 12924 |
+
2xmy
|
| 12925 |
+
2vts
|
| 12926 |
+
2i0h
|
| 12927 |
+
2hwg
|
| 12928 |
+
3ddg
|
| 12929 |
+
2zfs
|
| 12930 |
+
5wfd
|
| 12931 |
+
4lnb
|
| 12932 |
+
4qyy
|
| 12933 |
+
2r3j
|
| 12934 |
+
4eoi
|
| 12935 |
+
3pdh
|
| 12936 |
+
1y3p
|
| 12937 |
+
4g5y
|
| 12938 |
+
3v4t
|
| 12939 |
+
4okp
|
| 12940 |
+
1xpc
|
| 12941 |
+
3avl
|
| 12942 |
+
6gzl
|
| 12943 |
+
2pqc
|
| 12944 |
+
3wab
|
| 12945 |
+
5iop
|
| 12946 |
+
3bex
|
| 12947 |
+
3cdb
|
| 12948 |
+
6afc
|
| 12949 |
+
1v0m
|
| 12950 |
+
3ksl
|
| 12951 |
+
3fvl
|
| 12952 |
+
6c7d
|
| 12953 |
+
1t2v
|
| 12954 |
+
1jn2
|
| 12955 |
+
4k4e
|
| 12956 |
+
1a94
|
| 12957 |
+
1j81
|
| 12958 |
+
3e93
|
| 12959 |
+
1q9d
|
| 12960 |
+
4idt
|
| 12961 |
+
4k2f
|
| 12962 |
+
3snc
|
| 12963 |
+
4otg
|
| 12964 |
+
2lty
|
| 12965 |
+
5bnr
|
| 12966 |
+
6c7r
|
| 12967 |
+
3as3
|
| 12968 |
+
4ge9
|
| 12969 |
+
4xg7
|
| 12970 |
+
1d4y
|
| 12971 |
+
1gno
|
| 12972 |
+
5dqe
|
| 12973 |
+
3rwi
|
| 12974 |
+
4x8o
|
| 12975 |
+
3el9
|
| 12976 |
+
4kup
|
| 12977 |
+
3eks
|
| 12978 |
+
2kff
|
| 12979 |
+
1w5y
|
| 12980 |
+
2vwv
|
| 12981 |
+
3u3f
|
| 12982 |
+
3rvg
|
| 12983 |
+
4oeu
|
| 12984 |
+
3u4o
|
| 12985 |
+
2pv3
|
| 12986 |
+
4i0t
|
| 12987 |
+
3aru
|
| 12988 |
+
4yv0
|
| 12989 |
+
5uv1
|
| 12990 |
+
3vjc
|
| 12991 |
+
5vc5
|
| 12992 |
+
2ito
|
| 12993 |
+
3adv
|
| 12994 |
+
3oe6
|
| 12995 |
+
5mnb
|
| 12996 |
+
5gvm
|
| 12997 |
+
1r1j
|
| 12998 |
+
2qe5
|
| 12999 |
+
2rjr
|
| 13000 |
+
4cfm
|
| 13001 |
+
3c1n
|
| 13002 |
+
6ma3
|
| 13003 |
+
1z9h
|
| 13004 |
+
4jk6
|
| 13005 |
+
6g98
|
| 13006 |
+
1qm4
|
| 13007 |
+
4j24
|
| 13008 |
+
3pju
|
| 13009 |
+
1wvj
|
| 13010 |
+
4csy
|
| 13011 |
+
5cqx
|
| 13012 |
+
2yde
|
| 13013 |
+
4mqp
|
| 13014 |
+
2np9
|
| 13015 |
+
4zyu
|
| 13016 |
+
3ua9
|
| 13017 |
+
3fc2
|
| 13018 |
+
2xp5
|
| 13019 |
+
1umw
|
| 13020 |
+
5jer
|
| 13021 |
+
3dcv
|
| 13022 |
+
4aft
|
| 13023 |
+
4rhu
|
| 13024 |
+
3ahn
|
| 13025 |
+
6hth
|
| 13026 |
+
4ytf
|
| 13027 |
+
5os2
|
| 13028 |
+
5el9
|
| 13029 |
+
4oth
|
| 13030 |
+
2mnz
|
| 13031 |
+
1lkl
|
| 13032 |
+
6ela
|
| 13033 |
+
5vkf
|
| 13034 |
+
2oz6
|
| 13035 |
+
4q1f
|
| 13036 |
+
5usz
|
| 13037 |
+
4fxq
|
| 13038 |
+
3ggu
|
| 13039 |
+
1hiv
|
| 13040 |
+
4d2s
|
| 13041 |
+
2fkf
|
| 13042 |
+
4oz3
|
| 13043 |
+
6fkz
|
| 13044 |
+
2qd6
|
| 13045 |
+
1jyq
|
| 13046 |
+
3uvw
|
| 13047 |
+
4f6s
|
| 13048 |
+
2qk8
|
| 13049 |
+
1lvu
|
| 13050 |
+
11gs
|
| 13051 |
+
3ml5
|
| 13052 |
+
5vkc
|
| 13053 |
+
1wu1
|
| 13054 |
+
1ih0
|
| 13055 |
+
5m17
|
| 13056 |
+
1rne
|
| 13057 |
+
1l2z
|
| 13058 |
+
3s0d
|
| 13059 |
+
4nxq
|
| 13060 |
+
4at5
|
| 13061 |
+
5iu2
|
| 13062 |
+
1x78
|
| 13063 |
+
6bc9
|
| 13064 |
+
5dgu
|
| 13065 |
+
5juz
|
| 13066 |
+
4qvn
|
| 13067 |
+
5i7x
|
| 13068 |
+
5o7e
|
| 13069 |
+
1fl3
|
| 13070 |
+
1enu
|
| 13071 |
+
2clf
|
| 13072 |
+
4zs0
|
| 13073 |
+
4llp
|
| 13074 |
+
5nrf
|
| 13075 |
+
4z0k
|
| 13076 |
+
6cdc
|
| 13077 |
+
6bin
|
| 13078 |
+
4e1z
|
| 13079 |
+
2q64
|
| 13080 |
+
3s0o
|
| 13081 |
+
4z0u
|
| 13082 |
+
1ii5
|
| 13083 |
+
4lk7
|
| 13084 |
+
5ubt
|
| 13085 |
+
5m1z
|
| 13086 |
+
1t4j
|
| 13087 |
+
1g7v
|
| 13088 |
+
4q1n
|
| 13089 |
+
3l08
|
| 13090 |
+
5d7r
|
| 13091 |
+
2emt
|
| 13092 |
+
4y2b
|
| 13093 |
+
4g31
|
| 13094 |
+
5n0d
|
| 13095 |
+
6ecz
|
| 13096 |
+
4gb9
|
| 13097 |
+
4jfl
|
| 13098 |
+
2ow3
|
| 13099 |
+
1ibg
|
| 13100 |
+
3fyz
|
| 13101 |
+
5g5z
|
| 13102 |
+
7lpr
|
| 13103 |
+
4g8n
|
| 13104 |
+
4bie
|
| 13105 |
+
2yir
|
| 13106 |
+
4yps
|
| 13107 |
+
5jdc
|
| 13108 |
+
3p4v
|
| 13109 |
+
2fum
|
| 13110 |
+
5c4o
|
| 13111 |
+
1cj1
|
| 13112 |
+
6f09
|
| 13113 |
+
4ce3
|
| 13114 |
+
4ft2
|
| 13115 |
+
2pcu
|
| 13116 |
+
4mo8
|
| 13117 |
+
2g2r
|
| 13118 |
+
4utn
|
| 13119 |
+
5nxp
|
| 13120 |
+
5t52
|
| 13121 |
+
3iod
|
| 13122 |
+
1h01
|
| 13123 |
+
4deb
|
| 13124 |
+
4jjs
|
| 13125 |
+
5ts0
|
| 13126 |
+
3su0
|
| 13127 |
+
4yhf
|
| 13128 |
+
4u4s
|
| 13129 |
+
3heg
|
| 13130 |
+
2g70
|
| 13131 |
+
3p2h
|
| 13132 |
+
1sqq
|
| 13133 |
+
2f4b
|
| 13134 |
+
1igj
|
| 13135 |
+
3fcq
|
| 13136 |
+
2pja
|
| 13137 |
+
4ase
|
| 13138 |
+
2xyf
|
| 13139 |
+
5ka7
|
| 13140 |
+
5kzp
|
| 13141 |
+
3oob
|
| 13142 |
+
5w6t
|
| 13143 |
+
1h2u
|
| 13144 |
+
4zt3
|
| 13145 |
+
1w96
|
| 13146 |
+
2pzi
|
| 13147 |
+
4qf7
|
| 13148 |
+
3c88
|
| 13149 |
+
4y67
|
| 13150 |
+
2wzm
|
| 13151 |
+
4qtn
|
| 13152 |
+
3sue
|
| 13153 |
+
6fhu
|
| 13154 |
+
1qs4
|
| 13155 |
+
5u6b
|
| 13156 |
+
5edb
|
| 13157 |
+
2p2h
|
| 13158 |
+
5gtr
|
| 13159 |
+
4hbp
|
| 13160 |
+
3tia
|
| 13161 |
+
2cgw
|
| 13162 |
+
3gwt
|
| 13163 |
+
5t8r
|
| 13164 |
+
6fo7
|
| 13165 |
+
1g9t
|
| 13166 |
+
3kr0
|
| 13167 |
+
1gt3
|
| 13168 |
+
5c83
|
| 13169 |
+
2r9x
|
| 13170 |
+
5nk7
|
| 13171 |
+
4cfu
|
| 13172 |
+
5e2r
|
| 13173 |
+
4b4q
|
| 13174 |
+
3uil
|
| 13175 |
+
5fl6
|
| 13176 |
+
1ms7
|
| 13177 |
+
4csj
|
| 13178 |
+
4yat
|
| 13179 |
+
4f1q
|
| 13180 |
+
6fi1
|
| 13181 |
+
4lm1
|
| 13182 |
+
3hj0
|
| 13183 |
+
3oad
|
| 13184 |
+
4bd3
|
| 13185 |
+
6h7n
|
| 13186 |
+
5exm
|
| 13187 |
+
1ctu
|
| 13188 |
+
5l3f
|
| 13189 |
+
2ajd
|
| 13190 |
+
2c94
|
| 13191 |
+
3g3r
|
| 13192 |
+
4dro
|
| 13193 |
+
1uou
|
| 13194 |
+
6bsl
|
| 13195 |
+
1z4u
|
| 13196 |
+
3zyb
|
| 13197 |
+
4o43
|
| 13198 |
+
3wch
|
| 13199 |
+
5orb
|
| 13200 |
+
3kgp
|
| 13201 |
+
3zeb
|
| 13202 |
+
4pyy
|
| 13203 |
+
4b0j
|
| 13204 |
+
1jq8
|
| 13205 |
+
3u4w
|
| 13206 |
+
2xyt
|
| 13207 |
+
5jhd
|
| 13208 |
+
3hmp
|
| 13209 |
+
4dxj
|
| 13210 |
+
5hg7
|
| 13211 |
+
3zlx
|
| 13212 |
+
1zkk
|
| 13213 |
+
1gny
|
| 13214 |
+
4ju6
|
| 13215 |
+
1il3
|
| 13216 |
+
4daf
|
| 13217 |
+
4o0x
|
| 13218 |
+
5ygi
|
| 13219 |
+
6gla
|
| 13220 |
+
3mtw
|
| 13221 |
+
3mg0
|
| 13222 |
+
3d9l
|
| 13223 |
+
5edq
|
| 13224 |
+
4bci
|
| 13225 |
+
4e4x
|
| 13226 |
+
3wuu
|
| 13227 |
+
3udv
|
| 13228 |
+
1lpk
|
| 13229 |
+
4dkq
|
| 13230 |
+
4bfp
|
| 13231 |
+
5i2e
|
| 13232 |
+
3rm8
|
| 13233 |
+
4ahu
|
| 13234 |
+
3wi2
|
| 13235 |
+
4de2
|
| 13236 |
+
5g2g
|
| 13237 |
+
2wuu
|
| 13238 |
+
5nud
|
| 13239 |
+
4hcu
|
| 13240 |
+
3k37
|
| 13241 |
+
6prc
|
| 13242 |
+
2e7f
|
| 13243 |
+
4bb9
|
| 13244 |
+
1amk
|
| 13245 |
+
4l1a
|
| 13246 |
+
5wkf
|
| 13247 |
+
4qsm
|
| 13248 |
+
3sxu
|
| 13249 |
+
5lif
|
| 13250 |
+
4azf
|
| 13251 |
+
5ntt
|
| 13252 |
+
5d75
|
| 13253 |
+
3vsw
|
| 13254 |
+
5fos
|
| 13255 |
+
1qin
|
| 13256 |
+
3e7b
|
| 13257 |
+
3vjm
|
| 13258 |
+
3ny3
|
| 13259 |
+
6aqs
|
| 13260 |
+
1k6c
|
| 13261 |
+
1ydb
|
| 13262 |
+
6fc6
|
| 13263 |
+
2gnj
|
| 13264 |
+
4zy6
|
| 13265 |
+
5dhq
|
| 13266 |
+
2aou
|
| 13267 |
+
4b0b
|
| 13268 |
+
5t2p
|
| 13269 |
+
5efb
|
| 13270 |
+
5u0d
|
| 13271 |
+
5vjp
|
| 13272 |
+
5bui
|
| 13273 |
+
5e13
|
| 13274 |
+
5f5i
|
| 13275 |
+
4ivk
|
| 13276 |
+
7hvp
|
| 13277 |
+
3d7k
|
| 13278 |
+
4vgc
|
| 13279 |
+
5g1p
|
| 13280 |
+
6b1h
|
| 13281 |
+
2br8
|
| 13282 |
+
1cqp
|
| 13283 |
+
4w50
|
| 13284 |
+
4ehv
|
| 13285 |
+
3ws9
|
| 13286 |
+
2yfa
|
| 13287 |
+
5mpk
|
| 13288 |
+
4zz0
|
| 13289 |
+
3str
|
| 13290 |
+
5mrp
|
| 13291 |
+
5nkd
|
| 13292 |
+
5kqy
|
| 13293 |
+
4lw1
|
| 13294 |
+
1avp
|
| 13295 |
+
5xxk
|
| 13296 |
+
4oks
|
| 13297 |
+
4hxr
|
| 13298 |
+
3ur0
|
| 13299 |
+
4pns
|
| 13300 |
+
4i5m
|
| 13301 |
+
2vtd
|
| 13302 |
+
2gph
|
| 13303 |
+
3rme
|
| 13304 |
+
5fqp
|
| 13305 |
+
5hfu
|
| 13306 |
+
2iok
|
| 13307 |
+
4yux
|
| 13308 |
+
2hvx
|
| 13309 |
+
1n0s
|
| 13310 |
+
3s1y
|
| 13311 |
+
1g7p
|
| 13312 |
+
2rr4
|
| 13313 |
+
2x00
|
| 13314 |
+
2b55
|
| 13315 |
+
4wmv
|
| 13316 |
+
3evf
|
| 13317 |
+
4kc1
|
| 13318 |
+
2x39
|
| 13319 |
+
5joh
|
| 13320 |
+
2w0p
|
| 13321 |
+
4az6
|
| 13322 |
+
1o0f
|
| 13323 |
+
3haw
|
| 13324 |
+
1h8s
|
| 13325 |
+
4ge1
|
| 13326 |
+
3phe
|
| 13327 |
+
1evh
|
| 13328 |
+
6b30
|
| 13329 |
+
1g35
|
| 13330 |
+
5gjf
|
| 13331 |
+
6bt6
|
| 13332 |
+
5trr
|
| 13333 |
+
5ly3
|
| 13334 |
+
5eh0
|
| 13335 |
+
5ioy
|
| 13336 |
+
1xt8
|
| 13337 |
+
3c3o
|
| 13338 |
+
5e74
|
| 13339 |
+
3acl
|
| 13340 |
+
5abh
|
| 13341 |
+
5fol
|
| 13342 |
+
3qxv
|
| 13343 |
+
3ttm
|
| 13344 |
+
3iwy
|
| 13345 |
+
5lxb
|
| 13346 |
+
5n18
|
| 13347 |
+
6beb
|
| 13348 |
+
5nxw
|
| 13349 |
+
4muk
|
| 13350 |
+
4mm4
|
| 13351 |
+
4hs6
|
| 13352 |
+
5ncq
|
| 13353 |
+
4i72
|
| 13354 |
+
4hcv
|
| 13355 |
+
4pee
|
| 13356 |
+
1q63
|
| 13357 |
+
1qtn
|
| 13358 |
+
4rfy
|
| 13359 |
+
5dyy
|
| 13360 |
+
3ud8
|
| 13361 |
+
5itf
|
| 13362 |
+
5hey
|
| 13363 |
+
2ko7
|
| 13364 |
+
3oil
|
| 13365 |
+
5tks
|
| 13366 |
+
5o3q
|
| 13367 |
+
4k72
|
| 13368 |
+
4oma
|
| 13369 |
+
3hv6
|
| 13370 |
+
5tq4
|
| 13371 |
+
5uxf
|
| 13372 |
+
6df7
|
| 13373 |
+
5er4
|
| 13374 |
+
4no6
|
| 13375 |
+
1rev
|
| 13376 |
+
6hdq
|
| 13377 |
+
3c43
|
| 13378 |
+
1a7t
|
| 13379 |
+
6fky
|
| 13380 |
+
5n1r
|
| 13381 |
+
3rr4
|
| 13382 |
+
5bqg
|
| 13383 |
+
3b8r
|
| 13384 |
+
2pfy
|
| 13385 |
+
5c7f
|
| 13386 |
+
5l6j
|
| 13387 |
+
1etz
|
| 13388 |
+
3drf
|
| 13389 |
+
3k7f
|
| 13390 |
+
4b6e
|
| 13391 |
+
4kil
|
| 13392 |
+
6asu
|
| 13393 |
+
1fd0
|
| 13394 |
+
4zjc
|
| 13395 |
+
1y3g
|
| 13396 |
+
4joh
|
| 13397 |
+
4c38
|
| 13398 |
+
4h5c
|
| 13399 |
+
5kej
|
| 13400 |
+
6aji
|
| 13401 |
+
4mvy
|
| 13402 |
+
4pms
|
| 13403 |
+
6fx1
|
| 13404 |
+
3ioi
|
| 13405 |
+
2xbj
|
| 13406 |
+
1bnm
|
| 13407 |
+
1yda
|
| 13408 |
+
5mhp
|
| 13409 |
+
4fgt
|
| 13410 |
+
4hwb
|
| 13411 |
+
4aje
|
| 13412 |
+
1v2t
|
| 13413 |
+
3bc4
|
| 13414 |
+
5h85
|
| 13415 |
+
2ew5
|
| 13416 |
+
3uoj
|
| 13417 |
+
2aog
|
| 13418 |
+
3o5x
|
| 13419 |
+
5vr8
|
| 13420 |
+
2o4p
|
| 13421 |
+
1fh8
|
| 13422 |
+
4jz1
|
| 13423 |
+
5oq4
|
| 13424 |
+
3zdh
|
| 13425 |
+
3lnz
|
| 13426 |
+
6ee3
|
| 13427 |
+
3rak
|
| 13428 |
+
6ba7
|
| 13429 |
+
6czb
|
| 13430 |
+
2wyj
|
| 13431 |
+
4qyo
|
| 13432 |
+
3mxd
|
| 13433 |
+
2ri9
|
| 13434 |
+
2zq1
|
| 13435 |
+
3ljo
|
| 13436 |
+
6g2a
|
| 13437 |
+
1gj6
|
| 13438 |
+
5oug
|
| 13439 |
+
1g4o
|
| 13440 |
+
3mvm
|
| 13441 |
+
1iyl
|
| 13442 |
+
2cgx
|
| 13443 |
+
4yrr
|
| 13444 |
+
3v2x
|
| 13445 |
+
5iv4
|
| 13446 |
+
4m2u
|
| 13447 |
+
4q1b
|
| 13448 |
+
3ggc
|
| 13449 |
+
2c68
|
| 13450 |
+
1laf
|
| 13451 |
+
4kxn
|
| 13452 |
+
2ada
|
| 13453 |
+
1om1
|
| 13454 |
+
3wax
|
| 13455 |
+
5eh7
|
| 13456 |
+
4jk5
|
| 13457 |
+
4q93
|
| 13458 |
+
1x11
|
| 13459 |
+
4c4j
|
| 13460 |
+
3bzf
|
| 13461 |
+
4y7r
|
| 13462 |
+
4zow
|
| 13463 |
+
4ui6
|
| 13464 |
+
1pxi
|
| 13465 |
+
5ni5
|
| 13466 |
+
5v4b
|
| 13467 |
+
1o4m
|
| 13468 |
+
1urc
|
| 13469 |
+
3ads
|
| 13470 |
+
5gn5
|
| 13471 |
+
4mrg
|
| 13472 |
+
5ylt
|
| 13473 |
+
5k6a
|
| 13474 |
+
4nld
|
| 13475 |
+
2xl3
|
| 13476 |
+
4auy
|
| 13477 |
+
1zoe
|
| 13478 |
+
5lxd
|
| 13479 |
+
4pum
|
| 13480 |
+
3gbq
|
| 13481 |
+
5xs2
|
| 13482 |
+
4ggl
|
| 13483 |
+
5a14
|
| 13484 |
+
4au8
|
| 13485 |
+
5n49
|
| 13486 |
+
5x8i
|
| 13487 |
+
6c7f
|
| 13488 |
+
4j84
|
| 13489 |
+
3fvk
|
| 13490 |
+
4kqp
|
| 13491 |
+
5y97
|
| 13492 |
+
5mxx
|
| 13493 |
+
3lox
|
| 13494 |
+
2oc7
|
| 13495 |
+
6hsz
|
| 13496 |
+
1ilq
|
| 13497 |
+
3twj
|
| 13498 |
+
5vcx
|
| 13499 |
+
3svv
|
| 13500 |
+
4na4
|
| 13501 |
+
3v5g
|
| 13502 |
+
4p7e
|
| 13503 |
+
2vwl
|
| 13504 |
+
4umu
|
| 13505 |
+
3v4j
|
| 13506 |
+
6gfz
|
| 13507 |
+
6duf
|
| 13508 |
+
5ezh
|
| 13509 |
+
4yti
|
| 13510 |
+
4zyq
|
| 13511 |
+
4lop
|
| 13512 |
+
4nus
|
| 13513 |
+
2a3a
|
| 13514 |
+
2o9i
|
| 13515 |
+
3ia6
|
| 13516 |
+
5nfh
|
| 13517 |
+
1e00
|
| 13518 |
+
3n8n
|
| 13519 |
+
3kxz
|
| 13520 |
+
4fxj
|
| 13521 |
+
1gx0
|
| 13522 |
+
4u90
|
| 13523 |
+
3eq8
|
| 13524 |
+
3q44
|
| 13525 |
+
5e2p
|
| 13526 |
+
1pxj
|
| 13527 |
+
4p6c
|
| 13528 |
+
1zoh
|
| 13529 |
+
4i7j
|
| 13530 |
+
4hlg
|
| 13531 |
+
3wiy
|
| 13532 |
+
4k77
|
| 13533 |
+
1hsl
|
| 13534 |
+
4tjz
|
| 13535 |
+
5l6h
|
| 13536 |
+
6cbh
|
| 13537 |
+
6g38
|
| 13538 |
+
5wep
|
| 13539 |
+
1jwm
|
| 13540 |
+
3wfg
|
| 13541 |
+
5lvl
|
| 13542 |
+
5kqd
|
| 13543 |
+
1fq5
|
| 13544 |
+
5nn6
|
| 13545 |
+
5xag
|
| 13546 |
+
3w69
|
| 13547 |
+
4c4h
|
| 13548 |
+
5mgi
|
| 13549 |
+
5ad1
|
| 13550 |
+
4i7b
|
| 13551 |
+
6cwi
|
| 13552 |
+
1tkb
|
| 13553 |
+
3unj
|
| 13554 |
+
4u54
|
| 13555 |
+
2x0y
|
| 13556 |
+
3isw
|
| 13557 |
+
2xbw
|
| 13558 |
+
6fzu
|
| 13559 |
+
5wgq
|
| 13560 |
+
4kzc
|
| 13561 |
+
5btr
|
| 13562 |
+
1juq
|
| 13563 |
+
2idk
|
| 13564 |
+
2v25
|
| 13565 |
+
4prh
|
| 13566 |
+
5o5a
|
| 13567 |
+
1sle
|
| 13568 |
+
2ieh
|
| 13569 |
+
5ous
|
| 13570 |
+
1p7m
|
| 13571 |
+
3fuf
|
| 13572 |
+
1w10
|
| 13573 |
+
1guw
|
| 13574 |
+
1gbq
|
| 13575 |
+
5thn
|
| 13576 |
+
4xoc
|
| 13577 |
+
5ugc
|
| 13578 |
+
4h71
|
| 13579 |
+
5i43
|
| 13580 |
+
3t0d
|
| 13581 |
+
5eok
|
| 13582 |
+
4kw6
|
| 13583 |
+
4jrg
|
| 13584 |
+
4ra5
|
| 13585 |
+
4azp
|
| 13586 |
+
5sz3
|
| 13587 |
+
5hbj
|
| 13588 |
+
1npa
|
| 13589 |
+
4wi1
|
| 13590 |
+
3biz
|
| 13591 |
+
4bpj
|
| 13592 |
+
3lxe
|
| 13593 |
+
5qav
|
| 13594 |
+
4ge6
|
| 13595 |
+
4k1e
|
| 13596 |
+
3vje
|
| 13597 |
+
4qp9
|
| 13598 |
+
2xg5
|
| 13599 |
+
1w5x
|
| 13600 |
+
3omc
|
| 13601 |
+
5ii2
|
| 13602 |
+
5elq
|
| 13603 |
+
1ec0
|
| 13604 |
+
4x8g
|
| 13605 |
+
3cft
|
| 13606 |
+
4qd6
|
| 13607 |
+
5n1x
|
| 13608 |
+
4ucc
|
| 13609 |
+
5nne
|
| 13610 |
+
5ocj
|
| 13611 |
+
4mul
|
| 13612 |
+
3lok
|
| 13613 |
+
3eql
|
| 13614 |
+
5kyj
|
| 13615 |
+
3hfz
|
| 13616 |
+
2pg2
|
| 13617 |
+
3g3m
|
| 13618 |
+
4fs3
|
| 13619 |
+
1nnk
|
| 13620 |
+
4n9a
|
| 13621 |
+
6cvd
|
| 13622 |
+
4tkb
|
| 13623 |
+
3tfp
|
| 13624 |
+
5nsx
|
| 13625 |
+
6bbv
|
| 13626 |
+
5myg
|
| 13627 |
+
3ws8
|
| 13628 |
+
6ex1
|
| 13629 |
+
5m6m
|
| 13630 |
+
2jkr
|
| 13631 |
+
2vw5
|
| 13632 |
+
4ezl
|
| 13633 |
+
3dp0
|
| 13634 |
+
2a4r
|
| 13635 |
+
5hyq
|
| 13636 |
+
5iyy
|
| 13637 |
+
2oc4
|
| 13638 |
+
5div
|
| 13639 |
+
3p1d
|
| 13640 |
+
2r38
|
| 13641 |
+
3chp
|
| 13642 |
+
3ot8
|
| 13643 |
+
2wqb
|
| 13644 |
+
6bqh
|
| 13645 |
+
3gxl
|
| 13646 |
+
2ayp
|
| 13647 |
+
2pgj
|
| 13648 |
+
6gfx
|
| 13649 |
+
2i3v
|
| 13650 |
+
5yqw
|
| 13651 |
+
5n9t
|
| 13652 |
+
6cc9
|
| 13653 |
+
3p3s
|
| 13654 |
+
3oe9
|
| 13655 |
+
2y1o
|
| 13656 |
+
2o4r
|
| 13657 |
+
1dzm
|
| 13658 |
+
3vp2
|
| 13659 |
+
4oz2
|
| 13660 |
+
1p6d
|
| 13661 |
+
5mih
|
| 13662 |
+
4xt2
|
| 13663 |
+
3tf7
|
| 13664 |
+
5v19
|
| 13665 |
+
6g9k
|
| 13666 |
+
4ipf
|
| 13667 |
+
3bim
|
| 13668 |
+
4nwm
|
| 13669 |
+
2aeb
|
| 13670 |
+
3blu
|
| 13671 |
+
3vtc
|
| 13672 |
+
3nww
|
| 13673 |
+
3mp6
|
| 13674 |
+
1nu8
|
| 13675 |
+
3wzk
|
| 13676 |
+
3gvb
|
| 13677 |
+
4bt4
|
| 13678 |
+
2f6v
|
| 13679 |
+
5m53
|
| 13680 |
+
3iux
|
| 13681 |
+
4b0g
|
| 13682 |
+
4yvc
|
| 13683 |
+
1ai4
|
| 13684 |
+
1jr1
|
| 13685 |
+
1z1r
|
| 13686 |
+
1ryh
|
| 13687 |
+
1rhu
|
| 13688 |
+
3g0i
|
| 13689 |
+
4clb
|
| 13690 |
+
5cte
|
| 13691 |
+
4kmu
|
| 13692 |
+
6f9r
|
| 13693 |
+
3pjg
|
| 13694 |
+
3tww
|
| 13695 |
+
2lbv
|
| 13696 |
+
5w1w
|
| 13697 |
+
1s26
|
| 13698 |
+
3w9r
|
| 13699 |
+
5km0
|
| 13700 |
+
2h03
|
| 13701 |
+
1qnh
|
| 13702 |
+
4cpx
|
| 13703 |
+
4mnv
|
| 13704 |
+
4qo9
|
| 13705 |
+
4tk4
|
| 13706 |
+
5wg6
|
| 13707 |
+
5uwf
|
| 13708 |
+
4cjp
|
| 13709 |
+
3uu1
|
| 13710 |
+
4bxu
|
| 13711 |
+
2gyi
|
| 13712 |
+
4qwr
|
| 13713 |
+
1cnw
|
| 13714 |
+
4fzc
|
| 13715 |
+
4e5i
|
| 13716 |
+
3oq5
|
| 13717 |
+
3wig
|
| 13718 |
+
5my8
|
| 13719 |
+
4aj1
|
| 13720 |
+
4xcb
|
| 13721 |
+
3wf5
|
| 13722 |
+
5htb
|
| 13723 |
+
1ugp
|
| 13724 |
+
5anq
|
| 13725 |
+
1apb
|
| 13726 |
+
1ove
|
| 13727 |
+
1lrh
|
| 13728 |
+
3v7t
|
| 13729 |
+
1xk5
|
| 13730 |
+
4fiv
|
| 13731 |
+
4mcv
|
| 13732 |
+
1mrw
|
| 13733 |
+
4b14
|
| 13734 |
+
5isl
|
| 13735 |
+
5un9
|
| 13736 |
+
3nil
|
| 13737 |
+
1x07
|
| 13738 |
+
6fni
|
| 13739 |
+
4xt9
|
| 13740 |
+
4u2y
|
| 13741 |
+
4hbv
|
| 13742 |
+
6h2t
|
| 13743 |
+
4q6d
|
| 13744 |
+
3b8z
|
| 13745 |
+
2cnf
|
| 13746 |
+
4isu
|
| 13747 |
+
2ok1
|
| 13748 |
+
5u2f
|
| 13749 |
+
2cng
|
| 13750 |
+
4q90
|
| 13751 |
+
4l0b
|
| 13752 |
+
6g3q
|
| 13753 |
+
4ylu
|
| 13754 |
+
3w5t
|
| 13755 |
+
4dei
|
| 13756 |
+
2zjv
|
| 13757 |
+
2x6y
|
| 13758 |
+
3ikg
|
| 13759 |
+
5j7p
|
| 13760 |
+
4mcc
|
| 13761 |
+
3vqs
|
| 13762 |
+
3u9n
|
| 13763 |
+
2g5t
|
| 13764 |
+
3owj
|
| 13765 |
+
5bnm
|
| 13766 |
+
3hkn
|
| 13767 |
+
3qtx
|
| 13768 |
+
3qzt
|
| 13769 |
+
6fag
|
| 13770 |
+
1o43
|
| 13771 |
+
3vby
|
| 13772 |
+
6as6
|
| 13773 |
+
2r7g
|
| 13774 |
+
5vjn
|
| 13775 |
+
3skc
|
| 13776 |
+
3bg8
|
| 13777 |
+
1tc1
|
| 13778 |
+
5bq0
|
| 13779 |
+
5qac
|
| 13780 |
+
3l16
|
| 13781 |
+
5kw2
|
| 13782 |
+
3uj9
|
| 13783 |
+
6byk
|
| 13784 |
+
5kgw
|
| 13785 |
+
4wvt
|
| 13786 |
+
1bv9
|
| 13787 |
+
4ruz
|
| 13788 |
+
1fzq
|
| 13789 |
+
1srg
|
| 13790 |
+
4y73
|
| 13791 |
+
3upv
|
| 13792 |
+
4jj8
|
| 13793 |
+
5xvf
|
| 13794 |
+
1xka
|
| 13795 |
+
4ewn
|
| 13796 |
+
4ewr
|
| 13797 |
+
6g9b
|
| 13798 |
+
4u69
|
| 13799 |
+
1qb6
|
| 13800 |
+
3c6t
|
| 13801 |
+
3qar
|
| 13802 |
+
4k5o
|
| 13803 |
+
2gnf
|
| 13804 |
+
1u3q
|
| 13805 |
+
3g31
|
| 13806 |
+
3oev
|
| 13807 |
+
4uib
|
| 13808 |
+
1eby
|
| 13809 |
+
3uef
|
| 13810 |
+
4k0o
|
| 13811 |
+
5ehi
|
| 13812 |
+
5h63
|
| 13813 |
+
3wcg
|
| 13814 |
+
4jhq
|
| 13815 |
+
1hii
|
| 13816 |
+
3sgt
|
| 13817 |
+
4aua
|
| 13818 |
+
5mmn
|
| 13819 |
+
4hnc
|
| 13820 |
+
3bxh
|
| 13821 |
+
5g5v
|
| 13822 |
+
4j08
|
| 13823 |
+
4anu
|
| 13824 |
+
3hac
|
| 13825 |
+
2f7p
|
| 13826 |
+
5akj
|
| 13827 |
+
4ycl
|
| 13828 |
+
3hs4
|
| 13829 |
+
3s22
|
| 13830 |
+
3obq
|
| 13831 |
+
1o9k
|
| 13832 |
+
4xy2
|
| 13833 |
+
3k9x
|
| 13834 |
+
1xog
|
| 13835 |
+
4m5g
|
| 13836 |
+
4czs
|
| 13837 |
+
5eyr
|
| 13838 |
+
4lm5
|
| 13839 |
+
5xup
|
| 13840 |
+
4rao
|
| 13841 |
+
3qcl
|
| 13842 |
+
3lkj
|
| 13843 |
+
2xno
|
| 13844 |
+
1gjd
|
| 13845 |
+
4bzs
|
| 13846 |
+
4j7d
|
| 13847 |
+
1jfh
|
| 13848 |
+
2cgr
|
| 13849 |
+
4pd8
|
| 13850 |
+
3mz6
|
| 13851 |
+
5obj
|
| 13852 |
+
3gm0
|
| 13853 |
+
4j3u
|
| 13854 |
+
1uvt
|
| 13855 |
+
5ml5
|
| 13856 |
+
5vnd
|
| 13857 |
+
4r3c
|
| 13858 |
+
4kfp
|
| 13859 |
+
5xhs
|
| 13860 |
+
5mwz
|
| 13861 |
+
3n86
|
| 13862 |
+
5qab
|
| 13863 |
+
5zeq
|
| 13864 |
+
1fo3
|
| 13865 |
+
5n8w
|
| 13866 |
+
2exm
|
| 13867 |
+
4klb
|
| 13868 |
+
2qhc
|
| 13869 |
+
4qn7
|
| 13870 |
+
5wlg
|
| 13871 |
+
5nbw
|
| 13872 |
+
4k2g
|
| 13873 |
+
5l4m
|
| 13874 |
+
3qck
|
| 13875 |
+
3zo1
|
| 13876 |
+
4knx
|
| 13877 |
+
6dh6
|
| 13878 |
+
3k8q
|
| 13879 |
+
4kiu
|
| 13880 |
+
5f3e
|
| 13881 |
+
4own
|
| 13882 |
+
5uoy
|
| 13883 |
+
4gr3
|
| 13884 |
+
6cyd
|
| 13885 |
+
2f14
|
| 13886 |
+
5v2q
|
| 13887 |
+
3h2m
|
| 13888 |
+
3ao2
|
| 13889 |
+
1k1n
|
| 13890 |
+
3gfw
|
| 13891 |
+
4q19
|
| 13892 |
+
2b1g
|
| 13893 |
+
4pdk
|
| 13894 |
+
1oxn
|
| 13895 |
+
2pj1
|
| 13896 |
+
4o3a
|
| 13897 |
+
2y6s
|
| 13898 |
+
1inh
|
| 13899 |
+
4u5l
|
| 13900 |
+
4esg
|
| 13901 |
+
4mg6
|
| 13902 |
+
4de5
|
| 13903 |
+
3wke
|
| 13904 |
+
4mp7
|
| 13905 |
+
4de1
|
| 13906 |
+
4z6i
|
| 13907 |
+
2r4b
|
| 13908 |
+
4qyh
|
| 13909 |
+
4b33
|
| 13910 |
+
2koh
|
| 13911 |
+
5y94
|
| 13912 |
+
6fo8
|
| 13913 |
+
4mny
|
| 13914 |
+
5yqx
|
| 13915 |
+
5c26
|
| 13916 |
+
4eb8
|
| 13917 |
+
3lk1
|
| 13918 |
+
3mg6
|
| 13919 |
+
4oru
|
| 13920 |
+
2wpb
|
| 13921 |
+
4k7i
|
| 13922 |
+
5fsy
|
| 13923 |
+
3ava
|
| 13924 |
+
3ex6
|
| 13925 |
+
4b8p
|
| 13926 |
+
3l13
|
| 13927 |
+
3s3k
|
| 13928 |
+
3chc
|
| 13929 |
+
6bvh
|
| 13930 |
+
6b5i
|
| 13931 |
+
3gk4
|
| 13932 |
+
2lcs
|
| 13933 |
+
5fas
|
| 13934 |
+
1lyx
|
| 13935 |
+
6edr
|
| 13936 |
+
1ghz
|
| 13937 |
+
1egh
|
| 13938 |
+
2x6f
|
| 13939 |
+
5o48
|
| 13940 |
+
3znc
|
| 13941 |
+
1czq
|
| 13942 |
+
5f39
|
| 13943 |
+
2bpy
|
| 13944 |
+
5op5
|
| 13945 |
+
5oh9
|
| 13946 |
+
4ory
|
| 13947 |
+
3hu3
|
| 13948 |
+
2ybt
|
| 13949 |
+
1v1k
|
| 13950 |
+
5o22
|
| 13951 |
+
4eo8
|
| 13952 |
+
4bco
|
| 13953 |
+
2p3i
|
| 13954 |
+
3hii
|
| 13955 |
+
2xdm
|
| 13956 |
+
6axq
|
| 13957 |
+
2xhs
|
| 13958 |
+
5cyv
|
| 13959 |
+
1sre
|
| 13960 |
+
2y8o
|
| 13961 |
+
9lpr
|
| 13962 |
+
4l3o
|
| 13963 |
+
1dzk
|
| 13964 |
+
3lnj
|
| 13965 |
+
3fsm
|
| 13966 |
+
4ghi
|
| 13967 |
+
2nqi
|
| 13968 |
+
1i48
|
| 13969 |
+
5mo2
|
| 13970 |
+
6fr2
|
| 13971 |
+
1bnn
|
| 13972 |
+
2jbl
|
| 13973 |
+
1r17
|
| 13974 |
+
4rrf
|
| 13975 |
+
6apw
|
| 13976 |
+
1lgt
|
| 13977 |
+
2wl0
|
| 13978 |
+
3oe5
|
| 13979 |
+
3fjz
|
| 13980 |
+
3hxd
|
| 13981 |
+
6g3o
|
| 13982 |
+
2nqg
|
| 13983 |
+
3fc8
|
| 13984 |
+
2zvj
|
| 13985 |
+
3mw1
|
| 13986 |
+
2c5n
|
| 13987 |
+
3mhw
|
| 13988 |
+
5tdw
|
| 13989 |
+
5n7b
|
| 13990 |
+
4kij
|
| 13991 |
+
5axq
|
| 13992 |
+
3pxy
|
| 13993 |
+
3kjd
|
| 13994 |
+
5cf5
|
| 13995 |
+
5id0
|
| 13996 |
+
5e0j
|
| 13997 |
+
1xr9
|
| 13998 |
+
4kjv
|
| 13999 |
+
4e1m
|
| 14000 |
+
3gy4
|
| 14001 |
+
1xqc
|
| 14002 |
+
2hzy
|
| 14003 |
+
5brn
|
| 14004 |
+
2d0k
|
| 14005 |
+
3s8n
|
| 14006 |
+
5zc5
|
| 14007 |
+
3p5o
|
| 14008 |
+
1xh5
|
| 14009 |
+
4hv7
|
| 14010 |
+
4zt2
|
| 14011 |
+
2b7a
|
| 14012 |
+
1thl
|
| 14013 |
+
1jmq
|
| 14014 |
+
5h7h
|
| 14015 |
+
5mz3
|
| 14016 |
+
3dxj
|
| 14017 |
+
6ays
|
| 14018 |
+
3w2t
|
| 14019 |
+
1bkm
|
| 14020 |
+
3vjl
|
| 14021 |
+
2a8h
|
| 14022 |
+
5tt8
|
| 14023 |
+
5lyx
|
| 14024 |
+
1zaf
|
| 14025 |
+
2fuu
|
| 14026 |
+
4mw6
|
| 14027 |
+
2j6m
|
| 14028 |
+
3qai
|
| 14029 |
+
4kzq
|
| 14030 |
+
2uym
|
| 14031 |
+
2bqw
|
| 14032 |
+
2q8y
|
| 14033 |
+
6ary
|
| 14034 |
+
4daw
|
| 14035 |
+
4i06
|
| 14036 |
+
2wxk
|
| 14037 |
+
1r5n
|
data/timesplit_no_lig_or_rec_overlap_val
ADDED
|
@@ -0,0 +1,1223 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
4mi6
|
| 2 |
+
5ylv
|
| 3 |
+
4ozo
|
| 4 |
+
6gip
|
| 5 |
+
3std
|
| 6 |
+
3g2n
|
| 7 |
+
6ax1
|
| 8 |
+
6h96
|
| 9 |
+
5q0m
|
| 10 |
+
5hh5
|
| 11 |
+
4idz
|
| 12 |
+
6cec
|
| 13 |
+
5wqa
|
| 14 |
+
3k3e
|
| 15 |
+
1ppk
|
| 16 |
+
4og7
|
| 17 |
+
4b5w
|
| 18 |
+
4bgg
|
| 19 |
+
4jgv
|
| 20 |
+
2g9v
|
| 21 |
+
4og3
|
| 22 |
+
5lz2
|
| 23 |
+
6chq
|
| 24 |
+
5aqv
|
| 25 |
+
4k67
|
| 26 |
+
5j3v
|
| 27 |
+
5lz9
|
| 28 |
+
5t2y
|
| 29 |
+
4el5
|
| 30 |
+
2z60
|
| 31 |
+
4zed
|
| 32 |
+
4pks
|
| 33 |
+
5cuu
|
| 34 |
+
5q0x
|
| 35 |
+
4x5z
|
| 36 |
+
3hs9
|
| 37 |
+
3v7s
|
| 38 |
+
2qyn
|
| 39 |
+
5ehn
|
| 40 |
+
1sz0
|
| 41 |
+
4x6i
|
| 42 |
+
4u82
|
| 43 |
+
2vrj
|
| 44 |
+
6g2l
|
| 45 |
+
2bow
|
| 46 |
+
5o9y
|
| 47 |
+
4mji
|
| 48 |
+
6ccs
|
| 49 |
+
1yvm
|
| 50 |
+
3sym
|
| 51 |
+
4fz3
|
| 52 |
+
5tdi
|
| 53 |
+
2ie4
|
| 54 |
+
1n4m
|
| 55 |
+
1c3i
|
| 56 |
+
5eie
|
| 57 |
+
5ye8
|
| 58 |
+
5in9
|
| 59 |
+
4b7z
|
| 60 |
+
5ncz
|
| 61 |
+
5lz4
|
| 62 |
+
6bw4
|
| 63 |
+
3sl4
|
| 64 |
+
4dmy
|
| 65 |
+
4cmt
|
| 66 |
+
5a9u
|
| 67 |
+
1fki
|
| 68 |
+
4mc1
|
| 69 |
+
5o9o
|
| 70 |
+
4b5t
|
| 71 |
+
4m3g
|
| 72 |
+
1pyg
|
| 73 |
+
3b67
|
| 74 |
+
5l8o
|
| 75 |
+
5mkj
|
| 76 |
+
3oyp
|
| 77 |
+
4anq
|
| 78 |
+
5hog
|
| 79 |
+
4de7
|
| 80 |
+
5tkb
|
| 81 |
+
2vle
|
| 82 |
+
3f7i
|
| 83 |
+
3v51
|
| 84 |
+
3l7d
|
| 85 |
+
2v6n
|
| 86 |
+
4qna
|
| 87 |
+
4cd0
|
| 88 |
+
3iog
|
| 89 |
+
4i8w
|
| 90 |
+
2xup
|
| 91 |
+
3t3i
|
| 92 |
+
1db4
|
| 93 |
+
5es1
|
| 94 |
+
5i9i
|
| 95 |
+
6ccm
|
| 96 |
+
2xui
|
| 97 |
+
1q91
|
| 98 |
+
1bgo
|
| 99 |
+
1akt
|
| 100 |
+
1q84
|
| 101 |
+
1yt7
|
| 102 |
+
2l75
|
| 103 |
+
5aac
|
| 104 |
+
6nao
|
| 105 |
+
5iuh
|
| 106 |
+
3oof
|
| 107 |
+
4ona
|
| 108 |
+
5q0n
|
| 109 |
+
3sfi
|
| 110 |
+
2g9q
|
| 111 |
+
1hlf
|
| 112 |
+
5aqj
|
| 113 |
+
1g9c
|
| 114 |
+
1ayu
|
| 115 |
+
6co4
|
| 116 |
+
6bd1
|
| 117 |
+
4yur
|
| 118 |
+
3vw0
|
| 119 |
+
6cea
|
| 120 |
+
4nyf
|
| 121 |
+
3v43
|
| 122 |
+
2ya8
|
| 123 |
+
1b4d
|
| 124 |
+
2ccb
|
| 125 |
+
5q1f
|
| 126 |
+
1fkb
|
| 127 |
+
3bcs
|
| 128 |
+
1h46
|
| 129 |
+
6dgt
|
| 130 |
+
2ftd
|
| 131 |
+
5t2l
|
| 132 |
+
3i7c
|
| 133 |
+
6ckw
|
| 134 |
+
3csl
|
| 135 |
+
1j07
|
| 136 |
+
3omm
|
| 137 |
+
4g17
|
| 138 |
+
3v49
|
| 139 |
+
4fny
|
| 140 |
+
1fkh
|
| 141 |
+
4u0e
|
| 142 |
+
2g9r
|
| 143 |
+
6hd4
|
| 144 |
+
5oss
|
| 145 |
+
2adm
|
| 146 |
+
4b85
|
| 147 |
+
2fm0
|
| 148 |
+
4hgs
|
| 149 |
+
2qn3
|
| 150 |
+
1ddm
|
| 151 |
+
3fal
|
| 152 |
+
5q0y
|
| 153 |
+
5q1a
|
| 154 |
+
3g2k
|
| 155 |
+
2j7b
|
| 156 |
+
6ee2
|
| 157 |
+
2jjr
|
| 158 |
+
5q1i
|
| 159 |
+
6b7a
|
| 160 |
+
5kh7
|
| 161 |
+
6gqm
|
| 162 |
+
3mta
|
| 163 |
+
3g2l
|
| 164 |
+
4i4f
|
| 165 |
+
5iaw
|
| 166 |
+
5q0i
|
| 167 |
+
3h1z
|
| 168 |
+
4jbl
|
| 169 |
+
5lvx
|
| 170 |
+
6chm
|
| 171 |
+
6fdc
|
| 172 |
+
2ax6
|
| 173 |
+
6cnj
|
| 174 |
+
1pwu
|
| 175 |
+
4fht
|
| 176 |
+
3th9
|
| 177 |
+
5db1
|
| 178 |
+
2hvc
|
| 179 |
+
4pp5
|
| 180 |
+
5q15
|
| 181 |
+
3u8h
|
| 182 |
+
4el0
|
| 183 |
+
4jt8
|
| 184 |
+
5std
|
| 185 |
+
2pwd
|
| 186 |
+
5wh6
|
| 187 |
+
3kf4
|
| 188 |
+
1q83
|
| 189 |
+
2xml
|
| 190 |
+
1c2t
|
| 191 |
+
4in9
|
| 192 |
+
2jt5
|
| 193 |
+
1icj
|
| 194 |
+
3oy3
|
| 195 |
+
3g2h
|
| 196 |
+
3qo2
|
| 197 |
+
3tu9
|
| 198 |
+
3s0j
|
| 199 |
+
1e3g
|
| 200 |
+
6d1x
|
| 201 |
+
6mx8
|
| 202 |
+
3aqt
|
| 203 |
+
6aol
|
| 204 |
+
4m8e
|
| 205 |
+
1ado
|
| 206 |
+
5hh6
|
| 207 |
+
4lkt
|
| 208 |
+
2j78
|
| 209 |
+
5q0o
|
| 210 |
+
1d8m
|
| 211 |
+
3suu
|
| 212 |
+
1gyy
|
| 213 |
+
5aqt
|
| 214 |
+
3oap
|
| 215 |
+
4zs3
|
| 216 |
+
2qn1
|
| 217 |
+
2p98
|
| 218 |
+
4cmu
|
| 219 |
+
4ie0
|
| 220 |
+
1w3j
|
| 221 |
+
5y59
|
| 222 |
+
2mpa
|
| 223 |
+
1akw
|
| 224 |
+
5tyh
|
| 225 |
+
5lb7
|
| 226 |
+
1fkf
|
| 227 |
+
3djv
|
| 228 |
+
5nxq
|
| 229 |
+
1kti
|
| 230 |
+
3mrv
|
| 231 |
+
5t2i
|
| 232 |
+
4uuq
|
| 233 |
+
2gfj
|
| 234 |
+
4poh
|
| 235 |
+
6by8
|
| 236 |
+
6b7e
|
| 237 |
+
4u0f
|
| 238 |
+
5lp1
|
| 239 |
+
4jym
|
| 240 |
+
3suv
|
| 241 |
+
6fse
|
| 242 |
+
5e1s
|
| 243 |
+
2eum
|
| 244 |
+
6hai
|
| 245 |
+
6h7f
|
| 246 |
+
3suw
|
| 247 |
+
2gg7
|
| 248 |
+
3np7
|
| 249 |
+
5l13
|
| 250 |
+
6f8x
|
| 251 |
+
2evc
|
| 252 |
+
1z6q
|
| 253 |
+
5o9r
|
| 254 |
+
4pkt
|
| 255 |
+
1haa
|
| 256 |
+
5jan
|
| 257 |
+
4oiv
|
| 258 |
+
3djq
|
| 259 |
+
3p7i
|
| 260 |
+
2j7h
|
| 261 |
+
3v7c
|
| 262 |
+
2fw6
|
| 263 |
+
3diw
|
| 264 |
+
4dpt
|
| 265 |
+
1tu6
|
| 266 |
+
4pku
|
| 267 |
+
1ggn
|
| 268 |
+
2usn
|
| 269 |
+
6baw
|
| 270 |
+
1zxv
|
| 271 |
+
3bl7
|
| 272 |
+
5vdu
|
| 273 |
+
6fsd
|
| 274 |
+
4u0i
|
| 275 |
+
4j8s
|
| 276 |
+
1g49
|
| 277 |
+
4hni
|
| 278 |
+
6e83
|
| 279 |
+
6f0y
|
| 280 |
+
4mra
|
| 281 |
+
1d5r
|
| 282 |
+
3v3q
|
| 283 |
+
6f8w
|
| 284 |
+
1c7e
|
| 285 |
+
4b5s
|
| 286 |
+
4ara
|
| 287 |
+
4glr
|
| 288 |
+
4cff
|
| 289 |
+
2q93
|
| 290 |
+
5i8p
|
| 291 |
+
1apw
|
| 292 |
+
3sur
|
| 293 |
+
4b84
|
| 294 |
+
4duh
|
| 295 |
+
3l7c
|
| 296 |
+
1j4r
|
| 297 |
+
3mrt
|
| 298 |
+
1l7x
|
| 299 |
+
4dv8
|
| 300 |
+
1iep
|
| 301 |
+
1bsk
|
| 302 |
+
1i8h
|
| 303 |
+
5xih
|
| 304 |
+
5knj
|
| 305 |
+
5ick
|
| 306 |
+
2v3d
|
| 307 |
+
1sln
|
| 308 |
+
4k64
|
| 309 |
+
6cck
|
| 310 |
+
4ajw
|
| 311 |
+
2hb9
|
| 312 |
+
6std
|
| 313 |
+
2w87
|
| 314 |
+
4a23
|
| 315 |
+
1ayv
|
| 316 |
+
4bvb
|
| 317 |
+
1nc1
|
| 318 |
+
5q0p
|
| 319 |
+
3zlv
|
| 320 |
+
3okh
|
| 321 |
+
1pwq
|
| 322 |
+
3pp0
|
| 323 |
+
4pgh
|
| 324 |
+
2xbp
|
| 325 |
+
2qrp
|
| 326 |
+
4mho
|
| 327 |
+
6gpb
|
| 328 |
+
1aku
|
| 329 |
+
6b7f
|
| 330 |
+
3pcu
|
| 331 |
+
1fm9
|
| 332 |
+
5ddc
|
| 333 |
+
5q0u
|
| 334 |
+
1biw
|
| 335 |
+
3ery
|
| 336 |
+
2evo
|
| 337 |
+
4cxx
|
| 338 |
+
4g2l
|
| 339 |
+
5h6v
|
| 340 |
+
5yyz
|
| 341 |
+
4nw2
|
| 342 |
+
1em6
|
| 343 |
+
3shv
|
| 344 |
+
6cz3
|
| 345 |
+
4lh2
|
| 346 |
+
2gfk
|
| 347 |
+
2z78
|
| 348 |
+
4yv8
|
| 349 |
+
2jt6
|
| 350 |
+
3kdt
|
| 351 |
+
6chp
|
| 352 |
+
2xba
|
| 353 |
+
5wbl
|
| 354 |
+
5t2d
|
| 355 |
+
3fqa
|
| 356 |
+
5xii
|
| 357 |
+
2j83
|
| 358 |
+
4x6j
|
| 359 |
+
2q94
|
| 360 |
+
1vsn
|
| 361 |
+
1c8k
|
| 362 |
+
4b81
|
| 363 |
+
5y86
|
| 364 |
+
1caq
|
| 365 |
+
1k3t
|
| 366 |
+
2qrm
|
| 367 |
+
4u68
|
| 368 |
+
1exw
|
| 369 |
+
1nlj
|
| 370 |
+
3kdu
|
| 371 |
+
1axr
|
| 372 |
+
1kkq
|
| 373 |
+
3mrx
|
| 374 |
+
4z2h
|
| 375 |
+
3oki
|
| 376 |
+
6chl
|
| 377 |
+
1ppl
|
| 378 |
+
2q95
|
| 379 |
+
5q13
|
| 380 |
+
2j7g
|
| 381 |
+
2w2u
|
| 382 |
+
5aqq
|
| 383 |
+
1ms0
|
| 384 |
+
2b2v
|
| 385 |
+
6bfa
|
| 386 |
+
5kew
|
| 387 |
+
2cbu
|
| 388 |
+
1nli
|
| 389 |
+
6bw3
|
| 390 |
+
4h4e
|
| 391 |
+
2ha5
|
| 392 |
+
2aq9
|
| 393 |
+
1g98
|
| 394 |
+
2pri
|
| 395 |
+
1apv
|
| 396 |
+
1gar
|
| 397 |
+
3szm
|
| 398 |
+
8gpb
|
| 399 |
+
1noi
|
| 400 |
+
5foo
|
| 401 |
+
2z4w
|
| 402 |
+
3r93
|
| 403 |
+
2z7i
|
| 404 |
+
5vdr
|
| 405 |
+
5ylu
|
| 406 |
+
6f8r
|
| 407 |
+
3upx
|
| 408 |
+
3zyr
|
| 409 |
+
2jnp
|
| 410 |
+
2nm1
|
| 411 |
+
4kab
|
| 412 |
+
4n8r
|
| 413 |
+
5z4h
|
| 414 |
+
4f9v
|
| 415 |
+
3l7a
|
| 416 |
+
2am9
|
| 417 |
+
2wec
|
| 418 |
+
1h6h
|
| 419 |
+
3nfl
|
| 420 |
+
3wd1
|
| 421 |
+
4mdt
|
| 422 |
+
3sxf
|
| 423 |
+
6ftn
|
| 424 |
+
3gt9
|
| 425 |
+
1oim
|
| 426 |
+
1ywh
|
| 427 |
+
1u9w
|
| 428 |
+
1w70
|
| 429 |
+
3u1i
|
| 430 |
+
2v3e
|
| 431 |
+
5jap
|
| 432 |
+
3b68
|
| 433 |
+
2qnb
|
| 434 |
+
5hbs
|
| 435 |
+
2ama
|
| 436 |
+
2web
|
| 437 |
+
6e4t
|
| 438 |
+
4xm6
|
| 439 |
+
6hd6
|
| 440 |
+
4og4
|
| 441 |
+
3il6
|
| 442 |
+
4zs2
|
| 443 |
+
2z50
|
| 444 |
+
1nki
|
| 445 |
+
4my6
|
| 446 |
+
3vvy
|
| 447 |
+
3nc4
|
| 448 |
+
2z4y
|
| 449 |
+
2euk
|
| 450 |
+
3g4k
|
| 451 |
+
2y2i
|
| 452 |
+
1usn
|
| 453 |
+
1y6r
|
| 454 |
+
3bu6
|
| 455 |
+
4u0c
|
| 456 |
+
2ces
|
| 457 |
+
5ye7
|
| 458 |
+
2cbv
|
| 459 |
+
5twg
|
| 460 |
+
1f40
|
| 461 |
+
5m2q
|
| 462 |
+
4hvs
|
| 463 |
+
4z2i
|
| 464 |
+
3g0e
|
| 465 |
+
3dct
|
| 466 |
+
6bnl
|
| 467 |
+
5v8q
|
| 468 |
+
1au2
|
| 469 |
+
3h0a
|
| 470 |
+
2z52
|
| 471 |
+
4ie5
|
| 472 |
+
2auz
|
| 473 |
+
2qn2
|
| 474 |
+
4lh3
|
| 475 |
+
5lyy
|
| 476 |
+
4z2l
|
| 477 |
+
2bmz
|
| 478 |
+
2evm
|
| 479 |
+
5wvd
|
| 480 |
+
3o8h
|
| 481 |
+
6e4w
|
| 482 |
+
1syo
|
| 483 |
+
1yk7
|
| 484 |
+
5q11
|
| 485 |
+
2ha0
|
| 486 |
+
6ccn
|
| 487 |
+
1mh5
|
| 488 |
+
2ai8
|
| 489 |
+
3rde
|
| 490 |
+
5q17
|
| 491 |
+
4zsh
|
| 492 |
+
6aom
|
| 493 |
+
1pot
|
| 494 |
+
5i5x
|
| 495 |
+
4cqe
|
| 496 |
+
3l3x
|
| 497 |
+
4ie6
|
| 498 |
+
6cnk
|
| 499 |
+
1c12
|
| 500 |
+
1gfz
|
| 501 |
+
4jal
|
| 502 |
+
4ie7
|
| 503 |
+
5tvn
|
| 504 |
+
2qoh
|
| 505 |
+
4z2g
|
| 506 |
+
6b7b
|
| 507 |
+
5kxi
|
| 508 |
+
6f8v
|
| 509 |
+
1xor
|
| 510 |
+
1aqi
|
| 511 |
+
6q73
|
| 512 |
+
5t27
|
| 513 |
+
5q0r
|
| 514 |
+
1y2k
|
| 515 |
+
5q1b
|
| 516 |
+
5ko1
|
| 517 |
+
1a8i
|
| 518 |
+
3g2j
|
| 519 |
+
5gwz
|
| 520 |
+
1c8l
|
| 521 |
+
4o42
|
| 522 |
+
3r2a
|
| 523 |
+
1d7i
|
| 524 |
+
3ovz
|
| 525 |
+
3l3z
|
| 526 |
+
4i32
|
| 527 |
+
2ych
|
| 528 |
+
5aqp
|
| 529 |
+
4xkc
|
| 530 |
+
4og8
|
| 531 |
+
3g8i
|
| 532 |
+
2z7h
|
| 533 |
+
3kfa
|
| 534 |
+
3vfa
|
| 535 |
+
5q0v
|
| 536 |
+
3k41
|
| 537 |
+
2pwg
|
| 538 |
+
1au0
|
| 539 |
+
1g9d
|
| 540 |
+
4i8z
|
| 541 |
+
6cef
|
| 542 |
+
4mhs
|
| 543 |
+
4xm7
|
| 544 |
+
1c3e
|
| 545 |
+
3kx1
|
| 546 |
+
2gj4
|
| 547 |
+
5q16
|
| 548 |
+
4pli
|
| 549 |
+
2j7e
|
| 550 |
+
2j7d
|
| 551 |
+
6q6y
|
| 552 |
+
4gq6
|
| 553 |
+
3k5v
|
| 554 |
+
4j09
|
| 555 |
+
3bl9
|
| 556 |
+
2xuf
|
| 557 |
+
1e1y
|
| 558 |
+
6gi6
|
| 559 |
+
5z4o
|
| 560 |
+
5q18
|
| 561 |
+
4foc
|
| 562 |
+
6bcy
|
| 563 |
+
4aaw
|
| 564 |
+
2ha6
|
| 565 |
+
4pl5
|
| 566 |
+
3vw1
|
| 567 |
+
6bq0
|
| 568 |
+
2aux
|
| 569 |
+
1mkd
|
| 570 |
+
2q92
|
| 571 |
+
1xon
|
| 572 |
+
3aig
|
| 573 |
+
3oxz
|
| 574 |
+
2r6n
|
| 575 |
+
4mmp
|
| 576 |
+
5hki
|
| 577 |
+
2gg0
|
| 578 |
+
4qip
|
| 579 |
+
3ms2
|
| 580 |
+
5d1t
|
| 581 |
+
2ot1
|
| 582 |
+
2xpc
|
| 583 |
+
4zcs
|
| 584 |
+
5db3
|
| 585 |
+
3t3h
|
| 586 |
+
6cz4
|
| 587 |
+
4h4d
|
| 588 |
+
3h78
|
| 589 |
+
5q10
|
| 590 |
+
4jsr
|
| 591 |
+
4qll
|
| 592 |
+
4e90
|
| 593 |
+
6dry
|
| 594 |
+
5aqu
|
| 595 |
+
5oei
|
| 596 |
+
5hz5
|
| 597 |
+
3kwz
|
| 598 |
+
4m3d
|
| 599 |
+
5xig
|
| 600 |
+
3u3z
|
| 601 |
+
2y2k
|
| 602 |
+
5lz5
|
| 603 |
+
1o8b
|
| 604 |
+
4hlw
|
| 605 |
+
4tq3
|
| 606 |
+
1fkg
|
| 607 |
+
3pkn
|
| 608 |
+
2xb7
|
| 609 |
+
5ov9
|
| 610 |
+
3ggv
|
| 611 |
+
3cke
|
| 612 |
+
4m84
|
| 613 |
+
6cho
|
| 614 |
+
1m6p
|
| 615 |
+
5jal
|
| 616 |
+
1a0q
|
| 617 |
+
4eky
|
| 618 |
+
3vw2
|
| 619 |
+
2y2n
|
| 620 |
+
5q0q
|
| 621 |
+
6gin
|
| 622 |
+
2nmb
|
| 623 |
+
3rik
|
| 624 |
+
1akv
|
| 625 |
+
4m3e
|
| 626 |
+
4kwg
|
| 627 |
+
5yun
|
| 628 |
+
3mqf
|
| 629 |
+
4pkw
|
| 630 |
+
3k3h
|
| 631 |
+
3t3v
|
| 632 |
+
3t1n
|
| 633 |
+
2bdl
|
| 634 |
+
2prj
|
| 635 |
+
5wh5
|
| 636 |
+
4qhc
|
| 637 |
+
4eoy
|
| 638 |
+
2bv4
|
| 639 |
+
1i7g
|
| 640 |
+
5iui
|
| 641 |
+
4og5
|
| 642 |
+
5evk
|
| 643 |
+
2fsa
|
| 644 |
+
3sdg
|
| 645 |
+
3g2i
|
| 646 |
+
1y2d
|
| 647 |
+
1c7f
|
| 648 |
+
1qkn
|
| 649 |
+
2etm
|
| 650 |
+
3o1g
|
| 651 |
+
3t3d
|
| 652 |
+
1bxo
|
| 653 |
+
2z5o
|
| 654 |
+
3bla
|
| 655 |
+
5o9p
|
| 656 |
+
5g3n
|
| 657 |
+
5v1y
|
| 658 |
+
4gq4
|
| 659 |
+
2vvo
|
| 660 |
+
4u0b
|
| 661 |
+
1opi
|
| 662 |
+
3sut
|
| 663 |
+
3wd2
|
| 664 |
+
4xm8
|
| 665 |
+
4kp4
|
| 666 |
+
1hy7
|
| 667 |
+
1g05
|
| 668 |
+
5aaa
|
| 669 |
+
5wmt
|
| 670 |
+
2fj0
|
| 671 |
+
1bxq
|
| 672 |
+
5t2b
|
| 673 |
+
1o6i
|
| 674 |
+
4xdo
|
| 675 |
+
5ez0
|
| 676 |
+
5wqj
|
| 677 |
+
5t8e
|
| 678 |
+
6g22
|
| 679 |
+
3o0u
|
| 680 |
+
2gfd
|
| 681 |
+
5fpp
|
| 682 |
+
1tuf
|
| 683 |
+
4v0i
|
| 684 |
+
4og6
|
| 685 |
+
3g4g
|
| 686 |
+
2std
|
| 687 |
+
1xnz
|
| 688 |
+
2dw7
|
| 689 |
+
4oue
|
| 690 |
+
6ds0
|
| 691 |
+
5jar
|
| 692 |
+
4ibm
|
| 693 |
+
1d5j
|
| 694 |
+
2hrp
|
| 695 |
+
1koj
|
| 696 |
+
1d7j
|
| 697 |
+
4ryl
|
| 698 |
+
2f6j
|
| 699 |
+
4eke
|
| 700 |
+
4btl
|
| 701 |
+
6b7d
|
| 702 |
+
3bwk
|
| 703 |
+
5aqg
|
| 704 |
+
4i80
|
| 705 |
+
1c3x
|
| 706 |
+
2qrq
|
| 707 |
+
1oif
|
| 708 |
+
2p9a
|
| 709 |
+
5f67
|
| 710 |
+
4mc9
|
| 711 |
+
4dpu
|
| 712 |
+
3il5
|
| 713 |
+
6bnk
|
| 714 |
+
4lh7
|
| 715 |
+
6ccl
|
| 716 |
+
4m3b
|
| 717 |
+
6drz
|
| 718 |
+
4ebw
|
| 719 |
+
6et8
|
| 720 |
+
1g9b
|
| 721 |
+
3vvz
|
| 722 |
+
5q12
|
| 723 |
+
1jys
|
| 724 |
+
1g9a
|
| 725 |
+
5q1c
|
| 726 |
+
4mc6
|
| 727 |
+
2gg9
|
| 728 |
+
5t2m
|
| 729 |
+
3gta
|
| 730 |
+
5q0w
|
| 731 |
+
5oa2
|
| 732 |
+
3mt9
|
| 733 |
+
5iql
|
| 734 |
+
5q0t
|
| 735 |
+
2gkl
|
| 736 |
+
1z95
|
| 737 |
+
6c91
|
| 738 |
+
2z4z
|
| 739 |
+
3syr
|
| 740 |
+
4g16
|
| 741 |
+
3qi3
|
| 742 |
+
1z6p
|
| 743 |
+
3p8o
|
| 744 |
+
1qpl
|
| 745 |
+
2pix
|
| 746 |
+
4crj
|
| 747 |
+
2cet
|
| 748 |
+
4wf6
|
| 749 |
+
4qfr
|
| 750 |
+
1y2c
|
| 751 |
+
4gh6
|
| 752 |
+
1ct8
|
| 753 |
+
3guz
|
| 754 |
+
1oyn
|
| 755 |
+
1d8f
|
| 756 |
+
4x6h
|
| 757 |
+
3gp0
|
| 758 |
+
2srt
|
| 759 |
+
4k63
|
| 760 |
+
1pwp
|
| 761 |
+
4k66
|
| 762 |
+
4ql8
|
| 763 |
+
4ie4
|
| 764 |
+
2fm5
|
| 765 |
+
3g4l
|
| 766 |
+
5ix1
|
| 767 |
+
5d1u
|
| 768 |
+
4y8c
|
| 769 |
+
2evl
|
| 770 |
+
5dde
|
| 771 |
+
5y7w
|
| 772 |
+
6clv
|
| 773 |
+
2fu8
|
| 774 |
+
3hg1
|
| 775 |
+
4xe0
|
| 776 |
+
5k1i
|
| 777 |
+
3c9e
|
| 778 |
+
1gpy
|
| 779 |
+
2gg2
|
| 780 |
+
5vdv
|
| 781 |
+
5eyz
|
| 782 |
+
2wc4
|
| 783 |
+
4qlk
|
| 784 |
+
3t3g
|
| 785 |
+
4xrq
|
| 786 |
+
3v5p
|
| 787 |
+
1exv
|
| 788 |
+
1std
|
| 789 |
+
5jjm
|
| 790 |
+
5cc2
|
| 791 |
+
4f9u
|
| 792 |
+
5jao
|
| 793 |
+
5dda
|
| 794 |
+
3eta
|
| 795 |
+
6f6u
|
| 796 |
+
6cee
|
| 797 |
+
4pl6
|
| 798 |
+
3ms9
|
| 799 |
+
4kwf
|
| 800 |
+
5q0s
|
| 801 |
+
5q1e
|
| 802 |
+
5o83
|
| 803 |
+
5lz7
|
| 804 |
+
5kq5
|
| 805 |
+
5xij
|
| 806 |
+
5kh3
|
| 807 |
+
4qfg
|
| 808 |
+
3ebo
|
| 809 |
+
2zdx
|
| 810 |
+
6q74
|
| 811 |
+
5bpe
|
| 812 |
+
4poj
|
| 813 |
+
4qgi
|
| 814 |
+
2n7b
|
| 815 |
+
1ow7
|
| 816 |
+
3sx9
|
| 817 |
+
2e92
|
| 818 |
+
2amv
|
| 819 |
+
4std
|
| 820 |
+
5ur9
|
| 821 |
+
2jdl
|
| 822 |
+
3ktr
|
| 823 |
+
1ogg
|
| 824 |
+
1onh
|
| 825 |
+
4ad6
|
| 826 |
+
3sl5
|
| 827 |
+
5v8o
|
| 828 |
+
4yrd
|
| 829 |
+
4dce
|
| 830 |
+
3rcd
|
| 831 |
+
3g4i
|
| 832 |
+
3zqt
|
| 833 |
+
3olf
|
| 834 |
+
1j1a
|
| 835 |
+
1aqj
|
| 836 |
+
3fq7
|
| 837 |
+
4cmo
|
| 838 |
+
3b66
|
| 839 |
+
4htp
|
| 840 |
+
5vdw
|
| 841 |
+
3l79
|
| 842 |
+
3usn
|
| 843 |
+
4i4e
|
| 844 |
+
3d27
|
| 845 |
+
2qrh
|
| 846 |
+
2wc3
|
| 847 |
+
4djh
|
| 848 |
+
1jif
|
| 849 |
+
3g58
|
| 850 |
+
3mt7
|
| 851 |
+
4yec
|
| 852 |
+
6b7c
|
| 853 |
+
1y2b
|
| 854 |
+
2v3u
|
| 855 |
+
2qlm
|
| 856 |
+
7std
|
| 857 |
+
2vpe
|
| 858 |
+
2qln
|
| 859 |
+
5wbk
|
| 860 |
+
5ftq
|
| 861 |
+
3fei
|
| 862 |
+
2nsx
|
| 863 |
+
4ebv
|
| 864 |
+
4b82
|
| 865 |
+
4pp3
|
| 866 |
+
5ddd
|
| 867 |
+
1l5r
|
| 868 |
+
4psb
|
| 869 |
+
4cnh
|
| 870 |
+
1azl
|
| 871 |
+
5evb
|
| 872 |
+
5dpx
|
| 873 |
+
4k9y
|
| 874 |
+
1jq3
|
| 875 |
+
2ggb
|
| 876 |
+
2gm9
|
| 877 |
+
4g2j
|
| 878 |
+
5lz8
|
| 879 |
+
2ylq
|
| 880 |
+
5kz0
|
| 881 |
+
5t8j
|
| 882 |
+
4cts
|
| 883 |
+
2j75
|
| 884 |
+
3mt8
|
| 885 |
+
1n3z
|
| 886 |
+
4rme
|
| 887 |
+
2gg8
|
| 888 |
+
4z2k
|
| 889 |
+
2ha7
|
| 890 |
+
4mi3
|
| 891 |
+
4k6i
|
| 892 |
+
3nfk
|
| 893 |
+
4pl4
|
| 894 |
+
6ce8
|
| 895 |
+
6bx6
|
| 896 |
+
1xom
|
| 897 |
+
4u0a
|
| 898 |
+
3djp
|
| 899 |
+
4zeb
|
| 900 |
+
5q0j
|
| 901 |
+
2wos
|
| 902 |
+
5vtb
|
| 903 |
+
1u9x
|
| 904 |
+
1g4k
|
| 905 |
+
1nc3
|
| 906 |
+
4gu9
|
| 907 |
+
6e4u
|
| 908 |
+
4b83
|
| 909 |
+
2p99
|
| 910 |
+
5dtj
|
| 911 |
+
3o8g
|
| 912 |
+
2fwp
|
| 913 |
+
4fod
|
| 914 |
+
3np9
|
| 915 |
+
5o9q
|
| 916 |
+
3v9b
|
| 917 |
+
1i1e
|
| 918 |
+
3jsw
|
| 919 |
+
4gu6
|
| 920 |
+
1snk
|
| 921 |
+
1bm6
|
| 922 |
+
4fnz
|
| 923 |
+
4qfs
|
| 924 |
+
4nj9
|
| 925 |
+
2ha2
|
| 926 |
+
4ej2
|
| 927 |
+
6ced
|
| 928 |
+
3jsi
|
| 929 |
+
2d1o
|
| 930 |
+
4mc2
|
| 931 |
+
6g2m
|
| 932 |
+
6e86
|
| 933 |
+
1l5q
|
| 934 |
+
3g0f
|
| 935 |
+
2jal
|
| 936 |
+
5y1u
|
| 937 |
+
2ya7
|
| 938 |
+
5jau
|
| 939 |
+
4c4n
|
| 940 |
+
3tmk
|
| 941 |
+
1t46
|
| 942 |
+
5ddb
|
| 943 |
+
5n6s
|
| 944 |
+
1ppm
|
| 945 |
+
5tbn
|
| 946 |
+
2wbg
|
| 947 |
+
6d28
|
| 948 |
+
5q14
|
| 949 |
+
3ik3
|
| 950 |
+
5w99
|
| 951 |
+
5q1h
|
| 952 |
+
4joa
|
| 953 |
+
5ha1
|
| 954 |
+
3m3z
|
| 955 |
+
4pzv
|
| 956 |
+
5dd9
|
| 957 |
+
2e91
|
| 958 |
+
1mem
|
| 959 |
+
1rdt
|
| 960 |
+
5vds
|
| 961 |
+
2xwd
|
| 962 |
+
5k32
|
| 963 |
+
3g4f
|
| 964 |
+
4x5y
|
| 965 |
+
3mtb
|
| 966 |
+
2cc7
|
| 967 |
+
4pkr
|
| 968 |
+
1gyx
|
| 969 |
+
5jas
|
| 970 |
+
1xoq
|
| 971 |
+
1u9v
|
| 972 |
+
3mtd
|
| 973 |
+
3kwb
|
| 974 |
+
5aqn
|
| 975 |
+
4ac3
|
| 976 |
+
2ylp
|
| 977 |
+
3p0g
|
| 978 |
+
3bz3
|
| 979 |
+
1xow
|
| 980 |
+
3ew2
|
| 981 |
+
1akq
|
| 982 |
+
5da3
|
| 983 |
+
4lh6
|
| 984 |
+
1db5
|
| 985 |
+
1g27
|
| 986 |
+
2ao6
|
| 987 |
+
5z9e
|
| 988 |
+
5zun
|
| 989 |
+
4cwb
|
| 990 |
+
2ccc
|
| 991 |
+
5tbp
|
| 992 |
+
1nl6
|
| 993 |
+
4pkv
|
| 994 |
+
2ww2
|
| 995 |
+
3upz
|
| 996 |
+
5aab
|
| 997 |
+
2ha4
|
| 998 |
+
3mss
|
| 999 |
+
1zkn
|
| 1000 |
+
4y87
|
| 1001 |
+
2pyi
|
| 1002 |
+
2yhd
|
| 1003 |
+
3rw9
|
| 1004 |
+
3f7h
|
| 1005 |
+
4q9s
|
| 1006 |
+
2g9u
|
| 1007 |
+
4jt9
|
| 1008 |
+
5twh
|
| 1009 |
+
4bj8
|
| 1010 |
+
4pl3
|
| 1011 |
+
2y2h
|
| 1012 |
+
4mi9
|
| 1013 |
+
5cdh
|
| 1014 |
+
4n5g
|
| 1015 |
+
7gpb
|
| 1016 |
+
2wr8
|
| 1017 |
+
3i7b
|
| 1018 |
+
1q9m
|
| 1019 |
+
1p2g
|
| 1020 |
+
4kao
|
| 1021 |
+
5l8n
|
| 1022 |
+
1bl4
|
| 1023 |
+
3iad
|
| 1024 |
+
1q6k
|
| 1025 |
+
4i31
|
| 1026 |
+
4fob
|
| 1027 |
+
5mlj
|
| 1028 |
+
5hm3
|
| 1029 |
+
2oz7
|
| 1030 |
+
5ehq
|
| 1031 |
+
4u0d
|
| 1032 |
+
6b2q
|
| 1033 |
+
4m3f
|
| 1034 |
+
3tcg
|
| 1035 |
+
6ccq
|
| 1036 |
+
4x0u
|
| 1037 |
+
1y6q
|
| 1038 |
+
3iof
|
| 1039 |
+
5db0
|
| 1040 |
+
1n4k
|
| 1041 |
+
4wht
|
| 1042 |
+
4dpy
|
| 1043 |
+
4cli
|
| 1044 |
+
3msc
|
| 1045 |
+
2ylo
|
| 1046 |
+
4x7q
|
| 1047 |
+
1g2a
|
| 1048 |
+
4arb
|
| 1049 |
+
5ncy
|
| 1050 |
+
1zaj
|
| 1051 |
+
3qt6
|
| 1052 |
+
3npa
|
| 1053 |
+
5aqh
|
| 1054 |
+
5oku
|
| 1055 |
+
1yon
|
| 1056 |
+
3ekn
|
| 1057 |
+
2bb7
|
| 1058 |
+
1akr
|
| 1059 |
+
5h2u
|
| 1060 |
+
4cfe
|
| 1061 |
+
4why
|
| 1062 |
+
3ril
|
| 1063 |
+
5q1d
|
| 1064 |
+
5aqo
|
| 1065 |
+
4cxw
|
| 1066 |
+
5osy
|
| 1067 |
+
4m8h
|
| 1068 |
+
1h5u
|
| 1069 |
+
5yea
|
| 1070 |
+
5t2g
|
| 1071 |
+
1c50
|
| 1072 |
+
5l3j
|
| 1073 |
+
4cxy
|
| 1074 |
+
6cco
|
| 1075 |
+
1ow8
|
| 1076 |
+
4k4j
|
| 1077 |
+
5q19
|
| 1078 |
+
5oxg
|
| 1079 |
+
3sus
|
| 1080 |
+
3kw9
|
| 1081 |
+
5wqk
|
| 1082 |
+
6f8u
|
| 1083 |
+
4i33
|
| 1084 |
+
4z2j
|
| 1085 |
+
1y2e
|
| 1086 |
+
4xpj
|
| 1087 |
+
6h0b
|
| 1088 |
+
2wor
|
| 1089 |
+
3ldq
|
| 1090 |
+
3ebp
|
| 1091 |
+
1bqo
|
| 1092 |
+
3ook
|
| 1093 |
+
3l7b
|
| 1094 |
+
1ow6
|
| 1095 |
+
5ye9
|
| 1096 |
+
2off
|
| 1097 |
+
1noj
|
| 1098 |
+
2aig
|
| 1099 |
+
1iup
|
| 1100 |
+
5eou
|
| 1101 |
+
5db2
|
| 1102 |
+
4wcu
|
| 1103 |
+
3ewc
|
| 1104 |
+
6ce6
|
| 1105 |
+
5fto
|
| 1106 |
+
4zei
|
| 1107 |
+
4b80
|
| 1108 |
+
3qi4
|
| 1109 |
+
2xi7
|
| 1110 |
+
2bqv
|
| 1111 |
+
5fkj
|
| 1112 |
+
4wj7
|
| 1113 |
+
6ez6
|
| 1114 |
+
1yhm
|
| 1115 |
+
2z92
|
| 1116 |
+
3sz9
|
| 1117 |
+
5ytu
|
| 1118 |
+
6f8t
|
| 1119 |
+
3amv
|
| 1120 |
+
3eyf
|
| 1121 |
+
5iug
|
| 1122 |
+
5d7a
|
| 1123 |
+
4clj
|
| 1124 |
+
5fum
|
| 1125 |
+
3v5t
|
| 1126 |
+
3ms7
|
| 1127 |
+
1yqy
|
| 1128 |
+
3aox
|
| 1129 |
+
4yjn
|
| 1130 |
+
3o4l
|
| 1131 |
+
2ax9
|
| 1132 |
+
5yto
|
| 1133 |
+
2wed
|
| 1134 |
+
3ozj
|
| 1135 |
+
2whp
|
| 1136 |
+
2qrg
|
| 1137 |
+
2gg5
|
| 1138 |
+
1k08
|
| 1139 |
+
2flh
|
| 1140 |
+
1l5s
|
| 1141 |
+
3n51
|
| 1142 |
+
2vpg
|
| 1143 |
+
5jat
|
| 1144 |
+
6drx
|
| 1145 |
+
4ktc
|
| 1146 |
+
4k8a
|
| 1147 |
+
2zof
|
| 1148 |
+
5aa9
|
| 1149 |
+
1kcs
|
| 1150 |
+
1y4z
|
| 1151 |
+
5oa6
|
| 1152 |
+
4du8
|
| 1153 |
+
2xwe
|
| 1154 |
+
3ms4
|
| 1155 |
+
2y2j
|
| 1156 |
+
6chn
|
| 1157 |
+
5q0l
|
| 1158 |
+
5aa8
|
| 1159 |
+
2qdt
|
| 1160 |
+
4a16
|
| 1161 |
+
3u8d
|
| 1162 |
+
5t28
|
| 1163 |
+
4xkb
|
| 1164 |
+
4hgl
|
| 1165 |
+
4l4v
|
| 1166 |
+
2gg3
|
| 1167 |
+
5ddf
|
| 1168 |
+
4ra1
|
| 1169 |
+
3t3u
|
| 1170 |
+
1ciz
|
| 1171 |
+
2j7x
|
| 1172 |
+
1x8d
|
| 1173 |
+
1kvo
|
| 1174 |
+
1b8y
|
| 1175 |
+
4yik
|
| 1176 |
+
1osv
|
| 1177 |
+
2hdx
|
| 1178 |
+
1k06
|
| 1179 |
+
3g1m
|
| 1180 |
+
5aqf
|
| 1181 |
+
1d7x
|
| 1182 |
+
5yf1
|
| 1183 |
+
3b5r
|
| 1184 |
+
3r0h
|
| 1185 |
+
6b41
|
| 1186 |
+
4mic
|
| 1187 |
+
2rin
|
| 1188 |
+
3bpc
|
| 1189 |
+
2e5y
|
| 1190 |
+
1n5r
|
| 1191 |
+
2j77
|
| 1192 |
+
1gag
|
| 1193 |
+
3djo
|
| 1194 |
+
4zec
|
| 1195 |
+
5xwr
|
| 1196 |
+
5d1s
|
| 1197 |
+
1uz1
|
| 1198 |
+
3sl8
|
| 1199 |
+
2j79
|
| 1200 |
+
3r5m
|
| 1201 |
+
3b65
|
| 1202 |
+
2e95
|
| 1203 |
+
3t3e
|
| 1204 |
+
5cj6
|
| 1205 |
+
1nok
|
| 1206 |
+
5wpb
|
| 1207 |
+
1hfs
|
| 1208 |
+
6e5x
|
| 1209 |
+
5evd
|
| 1210 |
+
5ikb
|
| 1211 |
+
5aqr
|
| 1212 |
+
3p8n
|
| 1213 |
+
5q0z
|
| 1214 |
+
1dg9
|
| 1215 |
+
3qt7
|
| 1216 |
+
5jah
|
| 1217 |
+
5ax9
|
| 1218 |
+
2q96
|
| 1219 |
+
2j7f
|
| 1220 |
+
5q1g
|
| 1221 |
+
2y2p
|
| 1222 |
+
5v84
|
| 1223 |
+
4pji
|
data/timesplit_test
ADDED
|
@@ -0,0 +1,363 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
6qqw
|
| 2 |
+
6d08
|
| 3 |
+
6jap
|
| 4 |
+
6np2
|
| 5 |
+
6uvp
|
| 6 |
+
6oxq
|
| 7 |
+
6jsn
|
| 8 |
+
6hzb
|
| 9 |
+
6qrc
|
| 10 |
+
6oio
|
| 11 |
+
6jag
|
| 12 |
+
6moa
|
| 13 |
+
6hld
|
| 14 |
+
6i9a
|
| 15 |
+
6e4c
|
| 16 |
+
6g24
|
| 17 |
+
6jb4
|
| 18 |
+
6s55
|
| 19 |
+
6seo
|
| 20 |
+
6dyz
|
| 21 |
+
5zk5
|
| 22 |
+
6jid
|
| 23 |
+
5ze6
|
| 24 |
+
6qlu
|
| 25 |
+
6a6k
|
| 26 |
+
6qgf
|
| 27 |
+
6e3z
|
| 28 |
+
6te6
|
| 29 |
+
6pka
|
| 30 |
+
6g2o
|
| 31 |
+
6jsf
|
| 32 |
+
5zxk
|
| 33 |
+
6qxd
|
| 34 |
+
6n97
|
| 35 |
+
6jt3
|
| 36 |
+
6qtr
|
| 37 |
+
6oy1
|
| 38 |
+
6n96
|
| 39 |
+
6qzh
|
| 40 |
+
6qqz
|
| 41 |
+
6qmt
|
| 42 |
+
6ibx
|
| 43 |
+
6hmt
|
| 44 |
+
5zk7
|
| 45 |
+
6k3l
|
| 46 |
+
6cjs
|
| 47 |
+
6n9l
|
| 48 |
+
6ibz
|
| 49 |
+
6ott
|
| 50 |
+
6gge
|
| 51 |
+
6hot
|
| 52 |
+
6e3p
|
| 53 |
+
6md6
|
| 54 |
+
6hlb
|
| 55 |
+
6fe5
|
| 56 |
+
6uwp
|
| 57 |
+
6npp
|
| 58 |
+
6g2f
|
| 59 |
+
6mo7
|
| 60 |
+
6bqd
|
| 61 |
+
6nsv
|
| 62 |
+
6i76
|
| 63 |
+
6n53
|
| 64 |
+
6g2c
|
| 65 |
+
6eeb
|
| 66 |
+
6n0m
|
| 67 |
+
6uvy
|
| 68 |
+
6ovz
|
| 69 |
+
6olx
|
| 70 |
+
6v5l
|
| 71 |
+
6hhg
|
| 72 |
+
5zcu
|
| 73 |
+
6dz2
|
| 74 |
+
6mjq
|
| 75 |
+
6efk
|
| 76 |
+
6s9w
|
| 77 |
+
6gdy
|
| 78 |
+
6kqi
|
| 79 |
+
6ueg
|
| 80 |
+
6oxt
|
| 81 |
+
6oy0
|
| 82 |
+
6qr7
|
| 83 |
+
6i41
|
| 84 |
+
6cyg
|
| 85 |
+
6qmr
|
| 86 |
+
6g27
|
| 87 |
+
6ggb
|
| 88 |
+
6g3c
|
| 89 |
+
6n4e
|
| 90 |
+
6fcj
|
| 91 |
+
6quv
|
| 92 |
+
6iql
|
| 93 |
+
6i74
|
| 94 |
+
6qr4
|
| 95 |
+
6rnu
|
| 96 |
+
6jib
|
| 97 |
+
6izq
|
| 98 |
+
6qw8
|
| 99 |
+
6qto
|
| 100 |
+
6qrd
|
| 101 |
+
6hza
|
| 102 |
+
6e5s
|
| 103 |
+
6dz3
|
| 104 |
+
6e6w
|
| 105 |
+
6cyh
|
| 106 |
+
5zlf
|
| 107 |
+
6om4
|
| 108 |
+
6gga
|
| 109 |
+
6pgp
|
| 110 |
+
6qqv
|
| 111 |
+
6qtq
|
| 112 |
+
6gj6
|
| 113 |
+
6os5
|
| 114 |
+
6s07
|
| 115 |
+
6i77
|
| 116 |
+
6hhj
|
| 117 |
+
6ahs
|
| 118 |
+
6oxx
|
| 119 |
+
6mjj
|
| 120 |
+
6hor
|
| 121 |
+
6jb0
|
| 122 |
+
6i68
|
| 123 |
+
6pz4
|
| 124 |
+
6mhb
|
| 125 |
+
6uim
|
| 126 |
+
6jsg
|
| 127 |
+
6i78
|
| 128 |
+
6oxy
|
| 129 |
+
6gbw
|
| 130 |
+
6mo0
|
| 131 |
+
6ggf
|
| 132 |
+
6qge
|
| 133 |
+
6cjr
|
| 134 |
+
6oxp
|
| 135 |
+
6d07
|
| 136 |
+
6i63
|
| 137 |
+
6ten
|
| 138 |
+
6uii
|
| 139 |
+
6qlr
|
| 140 |
+
6sen
|
| 141 |
+
6oxv
|
| 142 |
+
6g2b
|
| 143 |
+
5zr3
|
| 144 |
+
6kjf
|
| 145 |
+
6qr9
|
| 146 |
+
6g9f
|
| 147 |
+
6e6v
|
| 148 |
+
5zk9
|
| 149 |
+
6pnn
|
| 150 |
+
6nri
|
| 151 |
+
6uwv
|
| 152 |
+
6ooz
|
| 153 |
+
6npi
|
| 154 |
+
6oip
|
| 155 |
+
6miv
|
| 156 |
+
6s57
|
| 157 |
+
6p8x
|
| 158 |
+
6hoq
|
| 159 |
+
6qts
|
| 160 |
+
6ggd
|
| 161 |
+
6pnm
|
| 162 |
+
6oy2
|
| 163 |
+
6oi8
|
| 164 |
+
6mhd
|
| 165 |
+
6agt
|
| 166 |
+
6i5p
|
| 167 |
+
6hhr
|
| 168 |
+
6p8z
|
| 169 |
+
6c85
|
| 170 |
+
6g5u
|
| 171 |
+
6j06
|
| 172 |
+
6qsz
|
| 173 |
+
6jbb
|
| 174 |
+
6hhp
|
| 175 |
+
6np5
|
| 176 |
+
6nlj
|
| 177 |
+
6qlp
|
| 178 |
+
6n94
|
| 179 |
+
6e13
|
| 180 |
+
6qls
|
| 181 |
+
6uil
|
| 182 |
+
6st3
|
| 183 |
+
6n92
|
| 184 |
+
6s56
|
| 185 |
+
6hzd
|
| 186 |
+
6uhv
|
| 187 |
+
6k05
|
| 188 |
+
6q36
|
| 189 |
+
6ic0
|
| 190 |
+
6hhi
|
| 191 |
+
6e3m
|
| 192 |
+
6qtx
|
| 193 |
+
6jse
|
| 194 |
+
5zjy
|
| 195 |
+
6o3y
|
| 196 |
+
6rpg
|
| 197 |
+
6rr0
|
| 198 |
+
6gzy
|
| 199 |
+
6qlt
|
| 200 |
+
6ufo
|
| 201 |
+
6o0h
|
| 202 |
+
6o3x
|
| 203 |
+
5zjz
|
| 204 |
+
6i8t
|
| 205 |
+
6ooy
|
| 206 |
+
6oiq
|
| 207 |
+
6od6
|
| 208 |
+
6nrh
|
| 209 |
+
6qra
|
| 210 |
+
6hhh
|
| 211 |
+
6m7h
|
| 212 |
+
6ufn
|
| 213 |
+
6qr0
|
| 214 |
+
6o5u
|
| 215 |
+
6h14
|
| 216 |
+
6jwa
|
| 217 |
+
6ny0
|
| 218 |
+
6jan
|
| 219 |
+
6ftf
|
| 220 |
+
6oxw
|
| 221 |
+
6jon
|
| 222 |
+
6cf7
|
| 223 |
+
6rtn
|
| 224 |
+
6jsz
|
| 225 |
+
6o9c
|
| 226 |
+
6mo8
|
| 227 |
+
6qln
|
| 228 |
+
6qqu
|
| 229 |
+
6i66
|
| 230 |
+
6mja
|
| 231 |
+
6gwe
|
| 232 |
+
6d3z
|
| 233 |
+
6oxr
|
| 234 |
+
6r4k
|
| 235 |
+
6hle
|
| 236 |
+
6h9v
|
| 237 |
+
6hou
|
| 238 |
+
6nv9
|
| 239 |
+
6py0
|
| 240 |
+
6qlq
|
| 241 |
+
6nv7
|
| 242 |
+
6n4b
|
| 243 |
+
6jaq
|
| 244 |
+
6i8m
|
| 245 |
+
6dz0
|
| 246 |
+
6oxs
|
| 247 |
+
6k2n
|
| 248 |
+
6cjj
|
| 249 |
+
6ffg
|
| 250 |
+
6a73
|
| 251 |
+
6qqt
|
| 252 |
+
6a1c
|
| 253 |
+
6oxu
|
| 254 |
+
6qre
|
| 255 |
+
6qtw
|
| 256 |
+
6np4
|
| 257 |
+
6hv2
|
| 258 |
+
6n55
|
| 259 |
+
6e3o
|
| 260 |
+
6kjd
|
| 261 |
+
6sfc
|
| 262 |
+
6qi7
|
| 263 |
+
6hzc
|
| 264 |
+
6k04
|
| 265 |
+
6op0
|
| 266 |
+
6q38
|
| 267 |
+
6n8x
|
| 268 |
+
6np3
|
| 269 |
+
6uvv
|
| 270 |
+
6pgo
|
| 271 |
+
6jbe
|
| 272 |
+
6i75
|
| 273 |
+
6qqq
|
| 274 |
+
6i62
|
| 275 |
+
6j9y
|
| 276 |
+
6g29
|
| 277 |
+
6h7d
|
| 278 |
+
6mo9
|
| 279 |
+
6jao
|
| 280 |
+
6jmf
|
| 281 |
+
6hmy
|
| 282 |
+
6qfe
|
| 283 |
+
5zml
|
| 284 |
+
6i65
|
| 285 |
+
6e7m
|
| 286 |
+
6i61
|
| 287 |
+
6rz6
|
| 288 |
+
6qtm
|
| 289 |
+
6qlo
|
| 290 |
+
6oie
|
| 291 |
+
6miy
|
| 292 |
+
6nrf
|
| 293 |
+
6gj5
|
| 294 |
+
6jad
|
| 295 |
+
6mj4
|
| 296 |
+
6h12
|
| 297 |
+
6d3y
|
| 298 |
+
6qr2
|
| 299 |
+
6qxa
|
| 300 |
+
6o9b
|
| 301 |
+
6ckl
|
| 302 |
+
6oir
|
| 303 |
+
6d40
|
| 304 |
+
6e6j
|
| 305 |
+
6i7a
|
| 306 |
+
6g25
|
| 307 |
+
6oin
|
| 308 |
+
6jam
|
| 309 |
+
6oxz
|
| 310 |
+
6hop
|
| 311 |
+
6rot
|
| 312 |
+
6uhu
|
| 313 |
+
6mji
|
| 314 |
+
6nrj
|
| 315 |
+
6nt2
|
| 316 |
+
6op9
|
| 317 |
+
6pno
|
| 318 |
+
6e4v
|
| 319 |
+
6k1s
|
| 320 |
+
6a87
|
| 321 |
+
6oim
|
| 322 |
+
6cjp
|
| 323 |
+
6pyb
|
| 324 |
+
6h13
|
| 325 |
+
6qrf
|
| 326 |
+
6mhc
|
| 327 |
+
6j9w
|
| 328 |
+
6nrg
|
| 329 |
+
6fff
|
| 330 |
+
6n93
|
| 331 |
+
6jut
|
| 332 |
+
6g2e
|
| 333 |
+
6nd3
|
| 334 |
+
6os6
|
| 335 |
+
6dql
|
| 336 |
+
6inz
|
| 337 |
+
6i67
|
| 338 |
+
6quw
|
| 339 |
+
6qwi
|
| 340 |
+
6npm
|
| 341 |
+
6i64
|
| 342 |
+
6e3n
|
| 343 |
+
6qrg
|
| 344 |
+
6nxz
|
| 345 |
+
6iby
|
| 346 |
+
6gj7
|
| 347 |
+
6qr3
|
| 348 |
+
6qr1
|
| 349 |
+
6s9x
|
| 350 |
+
6q4q
|
| 351 |
+
6hbn
|
| 352 |
+
6nw3
|
| 353 |
+
6tel
|
| 354 |
+
6p8y
|
| 355 |
+
6d5w
|
| 356 |
+
6t6a
|
| 357 |
+
6o5g
|
| 358 |
+
6r7d
|
| 359 |
+
6pya
|
| 360 |
+
6ffe
|
| 361 |
+
6d3x
|
| 362 |
+
6gj8
|
| 363 |
+
6mo2
|
dataset.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import accumulate
|
| 2 |
+
import numpy as np
|
| 3 |
+
import torch
|
| 4 |
+
from torch.utils.data import Dataset
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class ProcessedLigandPocketDataset(Dataset):
|
| 8 |
+
def __init__(self, npz_path, center=True, transform=None):
|
| 9 |
+
|
| 10 |
+
self.transform = transform
|
| 11 |
+
|
| 12 |
+
with np.load(npz_path, allow_pickle=True) as f:
|
| 13 |
+
data = {key: val for key, val in f.items()}
|
| 14 |
+
|
| 15 |
+
# split data based on mask
|
| 16 |
+
self.data = {}
|
| 17 |
+
for (k, v) in data.items():
|
| 18 |
+
if k == 'names' or k == 'receptors':
|
| 19 |
+
self.data[k] = v
|
| 20 |
+
continue
|
| 21 |
+
|
| 22 |
+
sections = np.where(np.diff(data['lig_mask']))[0] + 1 \
|
| 23 |
+
if 'lig' in k \
|
| 24 |
+
else np.where(np.diff(data['pocket_mask']))[0] + 1
|
| 25 |
+
self.data[k] = [torch.from_numpy(x) for x in np.split(v, sections)]
|
| 26 |
+
|
| 27 |
+
# add number of nodes for convenience
|
| 28 |
+
if k == 'lig_mask':
|
| 29 |
+
self.data['num_lig_atoms'] = \
|
| 30 |
+
torch.tensor([len(x) for x in self.data['lig_mask']])
|
| 31 |
+
elif k == 'pocket_mask':
|
| 32 |
+
self.data['num_pocket_nodes'] = \
|
| 33 |
+
torch.tensor([len(x) for x in self.data['pocket_mask']])
|
| 34 |
+
|
| 35 |
+
if center:
|
| 36 |
+
for i in range(len(self.data['lig_coords'])):
|
| 37 |
+
mean = (self.data['lig_coords'][i].sum(0) +
|
| 38 |
+
self.data['pocket_coords'][i].sum(0)) / \
|
| 39 |
+
(len(self.data['lig_coords'][i]) + len(self.data['pocket_coords'][i]))
|
| 40 |
+
self.data['lig_coords'][i] = self.data['lig_coords'][i] - mean
|
| 41 |
+
self.data['pocket_coords'][i] = self.data['pocket_coords'][i] - mean
|
| 42 |
+
|
| 43 |
+
def __len__(self):
|
| 44 |
+
return len(self.data['names'])
|
| 45 |
+
|
| 46 |
+
def __getitem__(self, idx):
|
| 47 |
+
data = {key: val[idx] for key, val in self.data.items()}
|
| 48 |
+
if self.transform is not None:
|
| 49 |
+
data = self.transform(data)
|
| 50 |
+
return data
|
| 51 |
+
|
| 52 |
+
@staticmethod
|
| 53 |
+
def collate_fn(batch):
|
| 54 |
+
out = {}
|
| 55 |
+
for prop in batch[0].keys():
|
| 56 |
+
|
| 57 |
+
if prop == 'names' or prop == 'receptors':
|
| 58 |
+
out[prop] = [x[prop] for x in batch]
|
| 59 |
+
elif prop == 'num_lig_atoms' or prop == 'num_pocket_nodes' \
|
| 60 |
+
or prop == 'num_virtual_atoms':
|
| 61 |
+
out[prop] = torch.tensor([x[prop] for x in batch])
|
| 62 |
+
elif 'mask' in prop:
|
| 63 |
+
# make sure indices in batch start at zero (needed for
|
| 64 |
+
# torch_scatter)
|
| 65 |
+
out[prop] = torch.cat([i * torch.ones(len(x[prop]))
|
| 66 |
+
for i, x in enumerate(batch)], dim=0)
|
| 67 |
+
else:
|
| 68 |
+
out[prop] = torch.cat([x[prop] for x in batch], dim=0)
|
| 69 |
+
|
| 70 |
+
return out
|
environment.yaml
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: diffsbdd
|
| 2 |
+
channels:
|
| 3 |
+
- pyg
|
| 4 |
+
- pytorch
|
| 5 |
+
- nvidia
|
| 6 |
+
- anaconda
|
| 7 |
+
- conda-forge
|
| 8 |
+
- defaults
|
| 9 |
+
dependencies:
|
| 10 |
+
- python=3.10.4
|
| 11 |
+
- pip=24.2
|
| 12 |
+
- pytorch=2.0.1=*cuda11.8*
|
| 13 |
+
- cudatoolkit=11.8
|
| 14 |
+
- pytorch-lightning=1.8.4
|
| 15 |
+
- wandb=0.13.1
|
| 16 |
+
- rdkit=2022.03.2
|
| 17 |
+
- biopython=1.79
|
| 18 |
+
- imageio=2.21.2
|
| 19 |
+
- scipy=1.13.1
|
| 20 |
+
- pytorch-scatter=2.1.2
|
| 21 |
+
- networkx=3.3
|
| 22 |
+
- numpy=1.26.4
|
| 23 |
+
- openbabel=3.1.1
|
| 24 |
+
- pandas=2.2.2
|
| 25 |
+
- seaborn=0.13.2
|
| 26 |
+
- torchmetrics=1.4.2
|
| 27 |
+
- tqdm=4.66.5
|
| 28 |
+
- yaml=0.2.5
|
| 29 |
+
- protobuf=3.20.*
|
equivariant_diffusion/conditional_model.py
ADDED
|
@@ -0,0 +1,746 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
|
| 3 |
+
import numpy as np
|
| 4 |
+
import torch
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
from torch_scatter import scatter_add, scatter_mean
|
| 7 |
+
|
| 8 |
+
import utils
|
| 9 |
+
from equivariant_diffusion.en_diffusion import EnVariationalDiffusion
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class ConditionalDDPM(EnVariationalDiffusion):
|
| 13 |
+
"""
|
| 14 |
+
Conditional Diffusion Module.
|
| 15 |
+
"""
|
| 16 |
+
def __init__(self, *args, **kwargs):
|
| 17 |
+
super().__init__(*args, **kwargs)
|
| 18 |
+
assert not self.dynamics.update_pocket_coords
|
| 19 |
+
|
| 20 |
+
def kl_prior(self, xh_lig, mask_lig, num_nodes):
|
| 21 |
+
"""Computes the KL between q(z1 | x) and the prior p(z1) = Normal(0, 1).
|
| 22 |
+
|
| 23 |
+
This is essentially a lot of work for something that is in practice
|
| 24 |
+
negligible in the loss. However, you compute it so that you see it when
|
| 25 |
+
you've made a mistake in your noise schedule.
|
| 26 |
+
"""
|
| 27 |
+
batch_size = len(num_nodes)
|
| 28 |
+
|
| 29 |
+
# Compute the last alpha value, alpha_T.
|
| 30 |
+
ones = torch.ones((batch_size, 1), device=xh_lig.device)
|
| 31 |
+
gamma_T = self.gamma(ones)
|
| 32 |
+
alpha_T = self.alpha(gamma_T, xh_lig)
|
| 33 |
+
|
| 34 |
+
# Compute means.
|
| 35 |
+
mu_T_lig = alpha_T[mask_lig] * xh_lig
|
| 36 |
+
mu_T_lig_x, mu_T_lig_h = \
|
| 37 |
+
mu_T_lig[:, :self.n_dims], mu_T_lig[:, self.n_dims:]
|
| 38 |
+
|
| 39 |
+
# Compute standard deviations (only batch axis for x-part, inflated for h-part).
|
| 40 |
+
sigma_T_x = self.sigma(gamma_T, mu_T_lig_x).squeeze()
|
| 41 |
+
sigma_T_h = self.sigma(gamma_T, mu_T_lig_h).squeeze()
|
| 42 |
+
|
| 43 |
+
# Compute KL for h-part.
|
| 44 |
+
zeros = torch.zeros_like(mu_T_lig_h)
|
| 45 |
+
ones = torch.ones_like(sigma_T_h)
|
| 46 |
+
mu_norm2 = self.sum_except_batch((mu_T_lig_h - zeros) ** 2, mask_lig)
|
| 47 |
+
kl_distance_h = self.gaussian_KL(mu_norm2, sigma_T_h, ones, d=1)
|
| 48 |
+
|
| 49 |
+
# Compute KL for x-part.
|
| 50 |
+
zeros = torch.zeros_like(mu_T_lig_x)
|
| 51 |
+
ones = torch.ones_like(sigma_T_x)
|
| 52 |
+
mu_norm2 = self.sum_except_batch((mu_T_lig_x - zeros) ** 2, mask_lig)
|
| 53 |
+
subspace_d = self.subspace_dimensionality(num_nodes)
|
| 54 |
+
kl_distance_x = self.gaussian_KL(mu_norm2, sigma_T_x, ones, subspace_d)
|
| 55 |
+
|
| 56 |
+
return kl_distance_x + kl_distance_h
|
| 57 |
+
|
| 58 |
+
def log_pxh_given_z0_without_constants(self, ligand, z_0_lig, eps_lig,
|
| 59 |
+
net_out_lig, gamma_0, epsilon=1e-10):
|
| 60 |
+
|
| 61 |
+
# Discrete properties are predicted directly from z_t.
|
| 62 |
+
z_h_lig = z_0_lig[:, self.n_dims:]
|
| 63 |
+
|
| 64 |
+
# Take only part over x.
|
| 65 |
+
eps_lig_x = eps_lig[:, :self.n_dims]
|
| 66 |
+
net_lig_x = net_out_lig[:, :self.n_dims]
|
| 67 |
+
|
| 68 |
+
# Compute sigma_0 and rescale to the integer scale of the data.
|
| 69 |
+
sigma_0 = self.sigma(gamma_0, target_tensor=z_0_lig)
|
| 70 |
+
sigma_0_cat = sigma_0 * self.norm_values[1]
|
| 71 |
+
|
| 72 |
+
# Computes the error for the distribution
|
| 73 |
+
# N(x | 1 / alpha_0 z_0 + sigma_0/alpha_0 eps_0, sigma_0 / alpha_0),
|
| 74 |
+
# the weighting in the epsilon parametrization is exactly '1'.
|
| 75 |
+
squared_error = (eps_lig_x - net_lig_x) ** 2
|
| 76 |
+
if self.vnode_idx is not None:
|
| 77 |
+
# coordinates of virtual atoms should not contribute to the error
|
| 78 |
+
squared_error[ligand['one_hot'][:, self.vnode_idx].bool(), :self.n_dims] = 0
|
| 79 |
+
log_p_x_given_z0_without_constants_ligand = -0.5 * (
|
| 80 |
+
self.sum_except_batch(squared_error, ligand['mask'])
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
# Compute delta indicator masks.
|
| 84 |
+
# un-normalize
|
| 85 |
+
ligand_onehot = ligand['one_hot'] * self.norm_values[1] + self.norm_biases[1]
|
| 86 |
+
|
| 87 |
+
estimated_ligand_onehot = z_h_lig * self.norm_values[1] + self.norm_biases[1]
|
| 88 |
+
|
| 89 |
+
# Centered h_cat around 1, since onehot encoded.
|
| 90 |
+
centered_ligand_onehot = estimated_ligand_onehot - 1
|
| 91 |
+
|
| 92 |
+
# Compute integrals from 0.5 to 1.5 of the normal distribution
|
| 93 |
+
# N(mean=z_h_cat, stdev=sigma_0_cat)
|
| 94 |
+
log_ph_cat_proportional_ligand = torch.log(
|
| 95 |
+
self.cdf_standard_gaussian((centered_ligand_onehot + 0.5) / sigma_0_cat[ligand['mask']])
|
| 96 |
+
- self.cdf_standard_gaussian((centered_ligand_onehot - 0.5) / sigma_0_cat[ligand['mask']])
|
| 97 |
+
+ epsilon
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
# Normalize the distribution over the categories.
|
| 101 |
+
log_Z = torch.logsumexp(log_ph_cat_proportional_ligand, dim=1,
|
| 102 |
+
keepdim=True)
|
| 103 |
+
log_probabilities_ligand = log_ph_cat_proportional_ligand - log_Z
|
| 104 |
+
|
| 105 |
+
# Select the log_prob of the current category using the onehot
|
| 106 |
+
# representation.
|
| 107 |
+
log_ph_given_z0_ligand = self.sum_except_batch(
|
| 108 |
+
log_probabilities_ligand * ligand_onehot, ligand['mask'])
|
| 109 |
+
|
| 110 |
+
return log_p_x_given_z0_without_constants_ligand, log_ph_given_z0_ligand
|
| 111 |
+
|
| 112 |
+
def sample_p_xh_given_z0(self, z0_lig, xh0_pocket, lig_mask, pocket_mask,
|
| 113 |
+
batch_size, fix_noise=False):
|
| 114 |
+
"""Samples x ~ p(x|z0)."""
|
| 115 |
+
t_zeros = torch.zeros(size=(batch_size, 1), device=z0_lig.device)
|
| 116 |
+
gamma_0 = self.gamma(t_zeros)
|
| 117 |
+
# Computes sqrt(sigma_0^2 / alpha_0^2)
|
| 118 |
+
sigma_x = self.SNR(-0.5 * gamma_0)
|
| 119 |
+
net_out_lig, _ = self.dynamics(
|
| 120 |
+
z0_lig, xh0_pocket, t_zeros, lig_mask, pocket_mask)
|
| 121 |
+
|
| 122 |
+
# Compute mu for p(zs | zt).
|
| 123 |
+
mu_x_lig = self.compute_x_pred(net_out_lig, z0_lig, gamma_0, lig_mask)
|
| 124 |
+
xh_lig, xh0_pocket = self.sample_normal_zero_com(
|
| 125 |
+
mu_x_lig, xh0_pocket, sigma_x, lig_mask, pocket_mask, fix_noise)
|
| 126 |
+
|
| 127 |
+
x_lig, h_lig = self.unnormalize(
|
| 128 |
+
xh_lig[:, :self.n_dims], z0_lig[:, self.n_dims:])
|
| 129 |
+
x_pocket, h_pocket = self.unnormalize(
|
| 130 |
+
xh0_pocket[:, :self.n_dims], xh0_pocket[:, self.n_dims:])
|
| 131 |
+
|
| 132 |
+
h_lig = F.one_hot(torch.argmax(h_lig, dim=1), self.atom_nf)
|
| 133 |
+
# h_pocket = F.one_hot(torch.argmax(h_pocket, dim=1), self.residue_nf)
|
| 134 |
+
|
| 135 |
+
return x_lig, h_lig, x_pocket, h_pocket
|
| 136 |
+
|
| 137 |
+
def sample_normal(self, *args):
|
| 138 |
+
raise NotImplementedError("Has been replaced by sample_normal_zero_com()")
|
| 139 |
+
|
| 140 |
+
def sample_normal_zero_com(self, mu_lig, xh0_pocket, sigma, lig_mask,
|
| 141 |
+
pocket_mask, fix_noise=False):
|
| 142 |
+
"""Samples from a Normal distribution."""
|
| 143 |
+
if fix_noise:
|
| 144 |
+
# bs = 1 if fix_noise else mu.size(0)
|
| 145 |
+
raise NotImplementedError("fix_noise option isn't implemented yet")
|
| 146 |
+
|
| 147 |
+
eps_lig = self.sample_gaussian(
|
| 148 |
+
size=(len(lig_mask), self.n_dims + self.atom_nf),
|
| 149 |
+
device=lig_mask.device)
|
| 150 |
+
|
| 151 |
+
out_lig = mu_lig + sigma[lig_mask] * eps_lig
|
| 152 |
+
|
| 153 |
+
# project to COM-free subspace
|
| 154 |
+
xh_pocket = xh0_pocket.detach().clone()
|
| 155 |
+
out_lig[:, :self.n_dims], xh_pocket[:, :self.n_dims] = \
|
| 156 |
+
self.remove_mean_batch(out_lig[:, :self.n_dims],
|
| 157 |
+
xh0_pocket[:, :self.n_dims],
|
| 158 |
+
lig_mask, pocket_mask)
|
| 159 |
+
|
| 160 |
+
return out_lig, xh_pocket
|
| 161 |
+
|
| 162 |
+
def noised_representation(self, xh_lig, xh0_pocket, lig_mask, pocket_mask,
|
| 163 |
+
gamma_t):
|
| 164 |
+
# Compute alpha_t and sigma_t from gamma.
|
| 165 |
+
alpha_t = self.alpha(gamma_t, xh_lig)
|
| 166 |
+
sigma_t = self.sigma(gamma_t, xh_lig)
|
| 167 |
+
|
| 168 |
+
# Sample zt ~ Normal(alpha_t x, sigma_t)
|
| 169 |
+
eps_lig = self.sample_gaussian(
|
| 170 |
+
size=(len(lig_mask), self.n_dims + self.atom_nf),
|
| 171 |
+
device=lig_mask.device)
|
| 172 |
+
|
| 173 |
+
# Sample z_t given x, h for timestep t, from q(z_t | x, h)
|
| 174 |
+
z_t_lig = alpha_t[lig_mask] * xh_lig + sigma_t[lig_mask] * eps_lig
|
| 175 |
+
|
| 176 |
+
# project to COM-free subspace
|
| 177 |
+
xh_pocket = xh0_pocket.detach().clone()
|
| 178 |
+
z_t_lig[:, :self.n_dims], xh_pocket[:, :self.n_dims] = \
|
| 179 |
+
self.remove_mean_batch(z_t_lig[:, :self.n_dims],
|
| 180 |
+
xh_pocket[:, :self.n_dims],
|
| 181 |
+
lig_mask, pocket_mask)
|
| 182 |
+
|
| 183 |
+
return z_t_lig, xh_pocket, eps_lig
|
| 184 |
+
|
| 185 |
+
def log_pN(self, N_lig, N_pocket):
|
| 186 |
+
"""
|
| 187 |
+
Prior on the sample size for computing
|
| 188 |
+
log p(x,h,N) = log p(x,h|N) + log p(N), where log p(x,h|N) is the
|
| 189 |
+
model's output
|
| 190 |
+
Args:
|
| 191 |
+
N: array of sample sizes
|
| 192 |
+
Returns:
|
| 193 |
+
log p(N)
|
| 194 |
+
"""
|
| 195 |
+
log_pN = self.size_distribution.log_prob_n1_given_n2(N_lig, N_pocket)
|
| 196 |
+
return log_pN
|
| 197 |
+
|
| 198 |
+
def delta_log_px(self, num_nodes):
|
| 199 |
+
return -self.subspace_dimensionality(num_nodes) * \
|
| 200 |
+
np.log(self.norm_values[0])
|
| 201 |
+
|
| 202 |
+
def forward(self, ligand, pocket, return_info=False):
|
| 203 |
+
"""
|
| 204 |
+
Computes the loss and NLL terms
|
| 205 |
+
"""
|
| 206 |
+
# Normalize data, take into account volume change in x.
|
| 207 |
+
ligand, pocket = self.normalize(ligand, pocket)
|
| 208 |
+
|
| 209 |
+
# Likelihood change due to normalization
|
| 210 |
+
# if self.vnode_idx is not None:
|
| 211 |
+
# delta_log_px = self.delta_log_px(ligand['size'] - ligand['num_virtual_atoms'] + pocket['size'])
|
| 212 |
+
# else:
|
| 213 |
+
delta_log_px = self.delta_log_px(ligand['size'])
|
| 214 |
+
|
| 215 |
+
# Sample a timestep t for each example in batch
|
| 216 |
+
# At evaluation time, loss_0 will be computed separately to decrease
|
| 217 |
+
# variance in the estimator (costs two forward passes)
|
| 218 |
+
lowest_t = 0 if self.training else 1
|
| 219 |
+
t_int = torch.randint(
|
| 220 |
+
lowest_t, self.T + 1, size=(ligand['size'].size(0), 1),
|
| 221 |
+
device=ligand['x'].device).float()
|
| 222 |
+
s_int = t_int - 1 # previous timestep
|
| 223 |
+
|
| 224 |
+
# Masks: important to compute log p(x | z0).
|
| 225 |
+
t_is_zero = (t_int == 0).float()
|
| 226 |
+
t_is_not_zero = 1 - t_is_zero
|
| 227 |
+
|
| 228 |
+
# Normalize t to [0, 1]. Note that the negative
|
| 229 |
+
# step of s will never be used, since then p(x | z0) is computed.
|
| 230 |
+
s = s_int / self.T
|
| 231 |
+
t = t_int / self.T
|
| 232 |
+
|
| 233 |
+
# Compute gamma_s and gamma_t via the network.
|
| 234 |
+
gamma_s = self.inflate_batch_array(self.gamma(s), ligand['x'])
|
| 235 |
+
gamma_t = self.inflate_batch_array(self.gamma(t), ligand['x'])
|
| 236 |
+
|
| 237 |
+
# Concatenate x, and h[categorical].
|
| 238 |
+
xh0_lig = torch.cat([ligand['x'], ligand['one_hot']], dim=1)
|
| 239 |
+
xh0_pocket = torch.cat([pocket['x'], pocket['one_hot']], dim=1)
|
| 240 |
+
|
| 241 |
+
# Center the input nodes
|
| 242 |
+
xh0_lig[:, :self.n_dims], xh0_pocket[:, :self.n_dims] = \
|
| 243 |
+
self.remove_mean_batch(xh0_lig[:, :self.n_dims],
|
| 244 |
+
xh0_pocket[:, :self.n_dims],
|
| 245 |
+
ligand['mask'], pocket['mask'])
|
| 246 |
+
|
| 247 |
+
# Find noised representation
|
| 248 |
+
z_t_lig, xh_pocket, eps_t_lig = \
|
| 249 |
+
self.noised_representation(xh0_lig, xh0_pocket, ligand['mask'],
|
| 250 |
+
pocket['mask'], gamma_t)
|
| 251 |
+
|
| 252 |
+
# Neural net prediction.
|
| 253 |
+
net_out_lig, _ = self.dynamics(
|
| 254 |
+
z_t_lig, xh_pocket, t, ligand['mask'], pocket['mask'])
|
| 255 |
+
|
| 256 |
+
# For LJ loss term
|
| 257 |
+
# xh_lig_hat does not need to be zero-centered as it is only used for
|
| 258 |
+
# computing relative distances
|
| 259 |
+
xh_lig_hat = self.xh_given_zt_and_epsilon(z_t_lig, net_out_lig, gamma_t,
|
| 260 |
+
ligand['mask'])
|
| 261 |
+
|
| 262 |
+
# Compute the L2 error.
|
| 263 |
+
squared_error = (eps_t_lig - net_out_lig) ** 2
|
| 264 |
+
if self.vnode_idx is not None:
|
| 265 |
+
# coordinates of virtual atoms should not contribute to the error
|
| 266 |
+
squared_error[ligand['one_hot'][:, self.vnode_idx].bool(), :self.n_dims] = 0
|
| 267 |
+
error_t_lig = self.sum_except_batch(squared_error, ligand['mask'])
|
| 268 |
+
|
| 269 |
+
# Compute weighting with SNR: (1 - SNR(s-t)) for epsilon parametrization
|
| 270 |
+
SNR_weight = (1 - self.SNR(gamma_s - gamma_t)).squeeze(1)
|
| 271 |
+
assert error_t_lig.size() == SNR_weight.size()
|
| 272 |
+
|
| 273 |
+
# The _constants_ depending on sigma_0 from the
|
| 274 |
+
# cross entropy term E_q(z0 | x) [log p(x | z0)].
|
| 275 |
+
neg_log_constants = -self.log_constants_p_x_given_z0(
|
| 276 |
+
n_nodes=ligand['size'], device=error_t_lig.device)
|
| 277 |
+
|
| 278 |
+
# The KL between q(zT | x) and p(zT) = Normal(0, 1).
|
| 279 |
+
# Should be close to zero.
|
| 280 |
+
kl_prior = self.kl_prior(xh0_lig, ligand['mask'], ligand['size'])
|
| 281 |
+
|
| 282 |
+
if self.training:
|
| 283 |
+
# Computes the L_0 term (even if gamma_t is not actually gamma_0)
|
| 284 |
+
# and this will later be selected via masking.
|
| 285 |
+
log_p_x_given_z0_without_constants_ligand, log_ph_given_z0 = \
|
| 286 |
+
self.log_pxh_given_z0_without_constants(
|
| 287 |
+
ligand, z_t_lig, eps_t_lig, net_out_lig, gamma_t)
|
| 288 |
+
|
| 289 |
+
loss_0_x_ligand = -log_p_x_given_z0_without_constants_ligand * \
|
| 290 |
+
t_is_zero.squeeze()
|
| 291 |
+
loss_0_h = -log_ph_given_z0 * t_is_zero.squeeze()
|
| 292 |
+
|
| 293 |
+
# apply t_is_zero mask
|
| 294 |
+
error_t_lig = error_t_lig * t_is_not_zero.squeeze()
|
| 295 |
+
|
| 296 |
+
else:
|
| 297 |
+
# Compute noise values for t = 0.
|
| 298 |
+
t_zeros = torch.zeros_like(s)
|
| 299 |
+
gamma_0 = self.inflate_batch_array(self.gamma(t_zeros), ligand['x'])
|
| 300 |
+
|
| 301 |
+
# Sample z_0 given x, h for timestep t, from q(z_t | x, h)
|
| 302 |
+
z_0_lig, xh_pocket, eps_0_lig = \
|
| 303 |
+
self.noised_representation(xh0_lig, xh0_pocket, ligand['mask'],
|
| 304 |
+
pocket['mask'], gamma_0)
|
| 305 |
+
|
| 306 |
+
net_out_0_lig, _ = self.dynamics(
|
| 307 |
+
z_0_lig, xh_pocket, t_zeros, ligand['mask'], pocket['mask'])
|
| 308 |
+
|
| 309 |
+
log_p_x_given_z0_without_constants_ligand, log_ph_given_z0 = \
|
| 310 |
+
self.log_pxh_given_z0_without_constants(
|
| 311 |
+
ligand, z_0_lig, eps_0_lig, net_out_0_lig, gamma_0)
|
| 312 |
+
loss_0_x_ligand = -log_p_x_given_z0_without_constants_ligand
|
| 313 |
+
loss_0_h = -log_ph_given_z0
|
| 314 |
+
|
| 315 |
+
# sample size prior
|
| 316 |
+
log_pN = self.log_pN(ligand['size'], pocket['size'])
|
| 317 |
+
|
| 318 |
+
info = {
|
| 319 |
+
'eps_hat_lig_x': scatter_mean(
|
| 320 |
+
net_out_lig[:, :self.n_dims].abs().mean(1), ligand['mask'],
|
| 321 |
+
dim=0).mean(),
|
| 322 |
+
'eps_hat_lig_h': scatter_mean(
|
| 323 |
+
net_out_lig[:, self.n_dims:].abs().mean(1), ligand['mask'],
|
| 324 |
+
dim=0).mean(),
|
| 325 |
+
}
|
| 326 |
+
loss_terms = (delta_log_px, error_t_lig, torch.tensor(0.0), SNR_weight,
|
| 327 |
+
loss_0_x_ligand, torch.tensor(0.0), loss_0_h,
|
| 328 |
+
neg_log_constants, kl_prior, log_pN,
|
| 329 |
+
t_int.squeeze(), xh_lig_hat)
|
| 330 |
+
return (*loss_terms, info) if return_info else loss_terms
|
| 331 |
+
|
| 332 |
+
def partially_noised_ligand(self, ligand, pocket, noising_steps):
|
| 333 |
+
"""
|
| 334 |
+
Partially noises a ligand to be later denoised.
|
| 335 |
+
"""
|
| 336 |
+
|
| 337 |
+
# Inflate timestep into an array
|
| 338 |
+
t_int = torch.ones(size=(ligand['size'].size(0), 1),
|
| 339 |
+
device=ligand['x'].device).float() * noising_steps
|
| 340 |
+
|
| 341 |
+
# Normalize t to [0, 1].
|
| 342 |
+
t = t_int / self.T
|
| 343 |
+
|
| 344 |
+
# Compute gamma_s and gamma_t via the network.
|
| 345 |
+
gamma_t = self.inflate_batch_array(self.gamma(t), ligand['x'])
|
| 346 |
+
|
| 347 |
+
# Concatenate x, and h[categorical].
|
| 348 |
+
xh0_lig = torch.cat([ligand['x'], ligand['one_hot']], dim=1)
|
| 349 |
+
xh0_pocket = torch.cat([pocket['x'], pocket['one_hot']], dim=1)
|
| 350 |
+
|
| 351 |
+
# Center the input nodes
|
| 352 |
+
xh0_lig[:, :self.n_dims], xh0_pocket[:, :self.n_dims] = \
|
| 353 |
+
self.remove_mean_batch(xh0_lig[:, :self.n_dims],
|
| 354 |
+
xh0_pocket[:, :self.n_dims],
|
| 355 |
+
ligand['mask'], pocket['mask'])
|
| 356 |
+
|
| 357 |
+
# Find noised representation
|
| 358 |
+
z_t_lig, xh_pocket, eps_t_lig = \
|
| 359 |
+
self.noised_representation(xh0_lig, xh0_pocket, ligand['mask'],
|
| 360 |
+
pocket['mask'], gamma_t)
|
| 361 |
+
|
| 362 |
+
return z_t_lig, xh_pocket, eps_t_lig
|
| 363 |
+
|
| 364 |
+
def diversify(self, ligand, pocket, noising_steps):
|
| 365 |
+
"""
|
| 366 |
+
Diversifies a set of ligands via noise-denoising
|
| 367 |
+
"""
|
| 368 |
+
|
| 369 |
+
# Normalize data, take into account volume change in x.
|
| 370 |
+
ligand, pocket = self.normalize(ligand, pocket)
|
| 371 |
+
|
| 372 |
+
z_lig, xh_pocket, _ = self.partially_noised_ligand(ligand, pocket, noising_steps)
|
| 373 |
+
|
| 374 |
+
timesteps = self.T
|
| 375 |
+
n_samples = len(pocket['size'])
|
| 376 |
+
device = pocket['x'].device
|
| 377 |
+
|
| 378 |
+
# xh0_pocket is the original pocket while xh_pocket might be a
|
| 379 |
+
# translated version of it
|
| 380 |
+
xh0_pocket = torch.cat([pocket['x'], pocket['one_hot']], dim=1)
|
| 381 |
+
|
| 382 |
+
lig_mask = ligand['mask']
|
| 383 |
+
|
| 384 |
+
self.assert_mean_zero_with_mask(z_lig[:, :self.n_dims], lig_mask)
|
| 385 |
+
|
| 386 |
+
# Iteratively sample p(z_s | z_t) for t = 1, ..., T, with s = t - 1.
|
| 387 |
+
|
| 388 |
+
for s in reversed(range(0, noising_steps)):
|
| 389 |
+
s_array = torch.full((n_samples, 1), fill_value=s,
|
| 390 |
+
device=z_lig.device)
|
| 391 |
+
t_array = s_array + 1
|
| 392 |
+
s_array = s_array / timesteps
|
| 393 |
+
t_array = t_array / timesteps
|
| 394 |
+
|
| 395 |
+
z_lig, xh_pocket = self.sample_p_zs_given_zt(
|
| 396 |
+
s_array, t_array, z_lig.detach(), xh_pocket.detach(), lig_mask, pocket['mask'])
|
| 397 |
+
|
| 398 |
+
# Finally sample p(x, h | z_0).
|
| 399 |
+
x_lig, h_lig, x_pocket, h_pocket = self.sample_p_xh_given_z0(
|
| 400 |
+
z_lig, xh_pocket, lig_mask, pocket['mask'], n_samples)
|
| 401 |
+
|
| 402 |
+
self.assert_mean_zero_with_mask(x_lig, lig_mask)
|
| 403 |
+
|
| 404 |
+
# Overwrite last frame with the resulting x and h.
|
| 405 |
+
out_lig = torch.cat([x_lig, h_lig], dim=1)
|
| 406 |
+
out_pocket = torch.cat([x_pocket, h_pocket], dim=1)
|
| 407 |
+
|
| 408 |
+
# remove frame dimension if only the final molecule is returned
|
| 409 |
+
return out_lig, out_pocket, lig_mask, pocket['mask']
|
| 410 |
+
|
| 411 |
+
|
| 412 |
+
def xh_given_zt_and_epsilon(self, z_t, epsilon, gamma_t, batch_mask):
|
| 413 |
+
""" Equation (7) in the EDM paper """
|
| 414 |
+
alpha_t = self.alpha(gamma_t, z_t)
|
| 415 |
+
sigma_t = self.sigma(gamma_t, z_t)
|
| 416 |
+
xh = z_t / alpha_t[batch_mask] - epsilon * sigma_t[batch_mask] / \
|
| 417 |
+
alpha_t[batch_mask]
|
| 418 |
+
return xh
|
| 419 |
+
|
| 420 |
+
def sample_p_zt_given_zs(self, zs_lig, xh0_pocket, ligand_mask, pocket_mask,
|
| 421 |
+
gamma_t, gamma_s, fix_noise=False):
|
| 422 |
+
sigma2_t_given_s, sigma_t_given_s, alpha_t_given_s = \
|
| 423 |
+
self.sigma_and_alpha_t_given_s(gamma_t, gamma_s, zs_lig)
|
| 424 |
+
|
| 425 |
+
mu_lig = alpha_t_given_s[ligand_mask] * zs_lig
|
| 426 |
+
zt_lig, xh0_pocket = self.sample_normal_zero_com(
|
| 427 |
+
mu_lig, xh0_pocket, sigma_t_given_s, ligand_mask, pocket_mask,
|
| 428 |
+
fix_noise)
|
| 429 |
+
|
| 430 |
+
return zt_lig, xh0_pocket
|
| 431 |
+
|
| 432 |
+
def sample_p_zs_given_zt(self, s, t, zt_lig, xh0_pocket, ligand_mask,
|
| 433 |
+
pocket_mask, fix_noise=False):
|
| 434 |
+
"""Samples from zs ~ p(zs | zt). Only used during sampling."""
|
| 435 |
+
gamma_s = self.gamma(s)
|
| 436 |
+
gamma_t = self.gamma(t)
|
| 437 |
+
|
| 438 |
+
sigma2_t_given_s, sigma_t_given_s, alpha_t_given_s = \
|
| 439 |
+
self.sigma_and_alpha_t_given_s(gamma_t, gamma_s, zt_lig)
|
| 440 |
+
|
| 441 |
+
sigma_s = self.sigma(gamma_s, target_tensor=zt_lig)
|
| 442 |
+
sigma_t = self.sigma(gamma_t, target_tensor=zt_lig)
|
| 443 |
+
|
| 444 |
+
# Neural net prediction.
|
| 445 |
+
eps_t_lig, _ = self.dynamics(
|
| 446 |
+
zt_lig, xh0_pocket, t, ligand_mask, pocket_mask)
|
| 447 |
+
|
| 448 |
+
# Compute mu for p(zs | zt).
|
| 449 |
+
# Note: mu_{t->s} = 1 / alpha_{t|s} z_t - sigma_{t|s}^2 / sigma_t / alpha_{t|s} epsilon
|
| 450 |
+
# follows from the definition of mu_{t->s} and Equ. (7) in the EDM paper
|
| 451 |
+
mu_lig = zt_lig / alpha_t_given_s[ligand_mask] - \
|
| 452 |
+
(sigma2_t_given_s / alpha_t_given_s / sigma_t)[ligand_mask] * \
|
| 453 |
+
eps_t_lig
|
| 454 |
+
|
| 455 |
+
# Compute sigma for p(zs | zt).
|
| 456 |
+
sigma = sigma_t_given_s * sigma_s / sigma_t
|
| 457 |
+
|
| 458 |
+
# Sample zs given the parameters derived from zt.
|
| 459 |
+
zs_lig, xh0_pocket = self.sample_normal_zero_com(
|
| 460 |
+
mu_lig, xh0_pocket, sigma, ligand_mask, pocket_mask, fix_noise)
|
| 461 |
+
|
| 462 |
+
self.assert_mean_zero_with_mask(zt_lig[:, :self.n_dims], ligand_mask)
|
| 463 |
+
|
| 464 |
+
return zs_lig, xh0_pocket
|
| 465 |
+
|
| 466 |
+
def sample_combined_position_feature_noise(self, lig_indices, xh0_pocket,
|
| 467 |
+
pocket_indices):
|
| 468 |
+
"""
|
| 469 |
+
Samples mean-centered normal noise for z_x, and standard normal noise
|
| 470 |
+
for z_h.
|
| 471 |
+
"""
|
| 472 |
+
raise NotImplementedError("Use sample_normal_zero_com() instead.")
|
| 473 |
+
|
| 474 |
+
def sample(self, *args):
|
| 475 |
+
raise NotImplementedError("Conditional model does not support sampling "
|
| 476 |
+
"without given pocket.")
|
| 477 |
+
|
| 478 |
+
@torch.no_grad()
|
| 479 |
+
def sample_given_pocket(self, pocket, num_nodes_lig, return_frames=1,
|
| 480 |
+
timesteps=None):
|
| 481 |
+
"""
|
| 482 |
+
Draw samples from the generative model. Optionally, return intermediate
|
| 483 |
+
states for visualization purposes.
|
| 484 |
+
"""
|
| 485 |
+
timesteps = self.T if timesteps is None else timesteps
|
| 486 |
+
assert 0 < return_frames <= timesteps
|
| 487 |
+
assert timesteps % return_frames == 0
|
| 488 |
+
|
| 489 |
+
n_samples = len(pocket['size'])
|
| 490 |
+
device = pocket['x'].device
|
| 491 |
+
|
| 492 |
+
_, pocket = self.normalize(pocket=pocket)
|
| 493 |
+
|
| 494 |
+
# xh0_pocket is the original pocket while xh_pocket might be a
|
| 495 |
+
# translated version of it
|
| 496 |
+
xh0_pocket = torch.cat([pocket['x'], pocket['one_hot']], dim=1)
|
| 497 |
+
|
| 498 |
+
lig_mask = utils.num_nodes_to_batch_mask(
|
| 499 |
+
n_samples, num_nodes_lig, device)
|
| 500 |
+
|
| 501 |
+
# Sample from Normal distribution in the pocket center
|
| 502 |
+
mu_lig_x = scatter_mean(pocket['x'], pocket['mask'], dim=0)
|
| 503 |
+
mu_lig_h = torch.zeros((n_samples, self.atom_nf), device=device)
|
| 504 |
+
mu_lig = torch.cat((mu_lig_x, mu_lig_h), dim=1)[lig_mask]
|
| 505 |
+
sigma = torch.ones_like(pocket['size']).unsqueeze(1)
|
| 506 |
+
|
| 507 |
+
z_lig, xh_pocket = self.sample_normal_zero_com(
|
| 508 |
+
mu_lig, xh0_pocket, sigma, lig_mask, pocket['mask'])
|
| 509 |
+
|
| 510 |
+
self.assert_mean_zero_with_mask(z_lig[:, :self.n_dims], lig_mask)
|
| 511 |
+
|
| 512 |
+
out_lig = torch.zeros((return_frames,) + z_lig.size(),
|
| 513 |
+
device=z_lig.device)
|
| 514 |
+
out_pocket = torch.zeros((return_frames,) + xh_pocket.size(),
|
| 515 |
+
device=device)
|
| 516 |
+
|
| 517 |
+
# Iteratively sample p(z_s | z_t) for t = 1, ..., T, with s = t - 1.
|
| 518 |
+
for s in reversed(range(0, timesteps)):
|
| 519 |
+
s_array = torch.full((n_samples, 1), fill_value=s,
|
| 520 |
+
device=z_lig.device)
|
| 521 |
+
t_array = s_array + 1
|
| 522 |
+
s_array = s_array / timesteps
|
| 523 |
+
t_array = t_array / timesteps
|
| 524 |
+
|
| 525 |
+
z_lig, xh_pocket = self.sample_p_zs_given_zt(
|
| 526 |
+
s_array, t_array, z_lig, xh_pocket, lig_mask, pocket['mask'])
|
| 527 |
+
|
| 528 |
+
# save frame
|
| 529 |
+
if (s * return_frames) % timesteps == 0:
|
| 530 |
+
idx = (s * return_frames) // timesteps
|
| 531 |
+
out_lig[idx], out_pocket[idx] = \
|
| 532 |
+
self.unnormalize_z(z_lig, xh_pocket)
|
| 533 |
+
|
| 534 |
+
# Finally sample p(x, h | z_0).
|
| 535 |
+
x_lig, h_lig, x_pocket, h_pocket = self.sample_p_xh_given_z0(
|
| 536 |
+
z_lig, xh_pocket, lig_mask, pocket['mask'], n_samples)
|
| 537 |
+
|
| 538 |
+
self.assert_mean_zero_with_mask(x_lig, lig_mask)
|
| 539 |
+
|
| 540 |
+
# Correct CoM drift for examples without intermediate states
|
| 541 |
+
if return_frames == 1:
|
| 542 |
+
max_cog = scatter_add(x_lig, lig_mask, dim=0).abs().max().item()
|
| 543 |
+
if max_cog > 5e-2:
|
| 544 |
+
print(f'Warning CoG drift with error {max_cog:.3f}. Projecting '
|
| 545 |
+
f'the positions down.')
|
| 546 |
+
x_lig, x_pocket = self.remove_mean_batch(
|
| 547 |
+
x_lig, x_pocket, lig_mask, pocket['mask'])
|
| 548 |
+
|
| 549 |
+
# Overwrite last frame with the resulting x and h.
|
| 550 |
+
out_lig[0] = torch.cat([x_lig, h_lig], dim=1)
|
| 551 |
+
out_pocket[0] = torch.cat([x_pocket, h_pocket], dim=1)
|
| 552 |
+
|
| 553 |
+
# remove frame dimension if only the final molecule is returned
|
| 554 |
+
return out_lig.squeeze(0), out_pocket.squeeze(0), lig_mask, \
|
| 555 |
+
pocket['mask']
|
| 556 |
+
|
| 557 |
+
@torch.no_grad()
|
| 558 |
+
def inpaint(self, ligand, pocket, lig_fixed, resamplings=1, return_frames=1,
|
| 559 |
+
timesteps=None, center='ligand'):
|
| 560 |
+
"""
|
| 561 |
+
Draw samples from the generative model while fixing parts of the input.
|
| 562 |
+
Optionally, return intermediate states for visualization purposes.
|
| 563 |
+
Inspired by Algorithm 1 in:
|
| 564 |
+
Lugmayr, Andreas, et al.
|
| 565 |
+
"Repaint: Inpainting using denoising diffusion probabilistic models."
|
| 566 |
+
Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern
|
| 567 |
+
Recognition. 2022.
|
| 568 |
+
"""
|
| 569 |
+
timesteps = self.T if timesteps is None else timesteps
|
| 570 |
+
assert 0 < return_frames <= timesteps
|
| 571 |
+
assert timesteps % return_frames == 0
|
| 572 |
+
|
| 573 |
+
if len(lig_fixed.size()) == 1:
|
| 574 |
+
lig_fixed = lig_fixed.unsqueeze(1)
|
| 575 |
+
|
| 576 |
+
n_samples = len(ligand['size'])
|
| 577 |
+
device = pocket['x'].device
|
| 578 |
+
|
| 579 |
+
# Normalize
|
| 580 |
+
ligand, pocket = self.normalize(ligand, pocket)
|
| 581 |
+
|
| 582 |
+
# xh0_pocket is the original pocket while xh_pocket might be a
|
| 583 |
+
# translated version of it
|
| 584 |
+
xh0_pocket = torch.cat([pocket['x'], pocket['one_hot']], dim=1)
|
| 585 |
+
com_pocket_0 = scatter_mean(pocket['x'], pocket['mask'], dim=0)
|
| 586 |
+
xh0_ligand = torch.cat([ligand['x'], ligand['one_hot']], dim=1)
|
| 587 |
+
xh_ligand = xh0_ligand.clone()
|
| 588 |
+
|
| 589 |
+
# Center initial system, subtract COM of known parts
|
| 590 |
+
if center == 'ligand':
|
| 591 |
+
mean_known = scatter_mean(ligand['x'][lig_fixed.bool().view(-1)],
|
| 592 |
+
ligand['mask'][lig_fixed.bool().view(-1)],
|
| 593 |
+
dim=0)
|
| 594 |
+
elif center == 'pocket':
|
| 595 |
+
mean_known = scatter_mean(pocket['x'], pocket['mask'], dim=0)
|
| 596 |
+
else:
|
| 597 |
+
raise NotImplementedError(
|
| 598 |
+
f"Centering option {center} not implemented")
|
| 599 |
+
|
| 600 |
+
# Sample from Normal distribution in the ligand center
|
| 601 |
+
mu_lig_x = mean_known
|
| 602 |
+
mu_lig_h = torch.zeros((n_samples, self.atom_nf), device=device)
|
| 603 |
+
mu_lig = torch.cat((mu_lig_x, mu_lig_h), dim=1)[ligand['mask']]
|
| 604 |
+
sigma = torch.ones_like(pocket['size']).unsqueeze(1)
|
| 605 |
+
|
| 606 |
+
z_lig, xh_pocket = self.sample_normal_zero_com(
|
| 607 |
+
mu_lig, xh0_pocket, sigma, ligand['mask'], pocket['mask'])
|
| 608 |
+
|
| 609 |
+
# Output tensors
|
| 610 |
+
out_lig = torch.zeros((return_frames,) + z_lig.size(),
|
| 611 |
+
device=z_lig.device)
|
| 612 |
+
out_pocket = torch.zeros((return_frames,) + xh_pocket.size(),
|
| 613 |
+
device=device)
|
| 614 |
+
|
| 615 |
+
# Iteratively sample with resampling iterations
|
| 616 |
+
for s in reversed(range(0, timesteps)):
|
| 617 |
+
|
| 618 |
+
# resampling iterations
|
| 619 |
+
for u in range(resamplings):
|
| 620 |
+
|
| 621 |
+
# Denoise one time step: t -> s
|
| 622 |
+
s_array = torch.full((n_samples, 1), fill_value=s,
|
| 623 |
+
device=device)
|
| 624 |
+
t_array = s_array + 1
|
| 625 |
+
s_array = s_array / timesteps
|
| 626 |
+
t_array = t_array / timesteps
|
| 627 |
+
|
| 628 |
+
gamma_t = self.gamma(t_array)
|
| 629 |
+
gamma_s = self.gamma(s_array)
|
| 630 |
+
|
| 631 |
+
# sample inpainted part
|
| 632 |
+
z_lig_unknown, xh_pocket = self.sample_p_zs_given_zt(
|
| 633 |
+
s_array, t_array, z_lig, xh_pocket, ligand['mask'],
|
| 634 |
+
pocket['mask'])
|
| 635 |
+
|
| 636 |
+
# sample known nodes from the input
|
| 637 |
+
com_pocket = scatter_mean(xh_pocket[:, :self.n_dims],
|
| 638 |
+
pocket['mask'], dim=0)
|
| 639 |
+
xh_ligand[:, :self.n_dims] = \
|
| 640 |
+
ligand['x'] + (com_pocket - com_pocket_0)[ligand['mask']]
|
| 641 |
+
z_lig_known, xh_pocket, _ = self.noised_representation(
|
| 642 |
+
xh_ligand, xh_pocket, ligand['mask'], pocket['mask'],
|
| 643 |
+
gamma_s)
|
| 644 |
+
|
| 645 |
+
# move center of mass of the noised part to the center of mass
|
| 646 |
+
# of the corresponding denoised part before combining them
|
| 647 |
+
# -> the resulting system should be COM-free
|
| 648 |
+
com_noised = scatter_mean(
|
| 649 |
+
z_lig_known[lig_fixed.bool().view(-1)][:, :self.n_dims],
|
| 650 |
+
ligand['mask'][lig_fixed.bool().view(-1)], dim=0)
|
| 651 |
+
com_denoised = scatter_mean(
|
| 652 |
+
z_lig_unknown[lig_fixed.bool().view(-1)][:, :self.n_dims],
|
| 653 |
+
ligand['mask'][lig_fixed.bool().view(-1)], dim=0)
|
| 654 |
+
dx = com_denoised - com_noised
|
| 655 |
+
z_lig_known[:, :self.n_dims] = z_lig_known[:, :self.n_dims] + dx[ligand['mask']]
|
| 656 |
+
xh_pocket[:, :self.n_dims] = xh_pocket[:, :self.n_dims] + dx[pocket['mask']]
|
| 657 |
+
|
| 658 |
+
# combine
|
| 659 |
+
z_lig = z_lig_known * lig_fixed + z_lig_unknown * (
|
| 660 |
+
1 - lig_fixed)
|
| 661 |
+
|
| 662 |
+
if u < resamplings - 1:
|
| 663 |
+
# Noise the sample
|
| 664 |
+
z_lig, xh_pocket = self.sample_p_zt_given_zs(
|
| 665 |
+
z_lig, xh_pocket, ligand['mask'], pocket['mask'],
|
| 666 |
+
gamma_t, gamma_s)
|
| 667 |
+
|
| 668 |
+
# save frame at the end of a resampling cycle
|
| 669 |
+
if u == resamplings - 1:
|
| 670 |
+
if (s * return_frames) % timesteps == 0:
|
| 671 |
+
idx = (s * return_frames) // timesteps
|
| 672 |
+
|
| 673 |
+
out_lig[idx], out_pocket[idx] = \
|
| 674 |
+
self.unnormalize_z(z_lig, xh_pocket)
|
| 675 |
+
|
| 676 |
+
# Finally sample p(x, h | z_0).
|
| 677 |
+
x_lig, h_lig, x_pocket, h_pocket = self.sample_p_xh_given_z0(
|
| 678 |
+
z_lig, xh_pocket, ligand['mask'], pocket['mask'], n_samples)
|
| 679 |
+
|
| 680 |
+
# Overwrite last frame with the resulting x and h.
|
| 681 |
+
out_lig[0] = torch.cat([x_lig, h_lig], dim=1)
|
| 682 |
+
out_pocket[0] = torch.cat([x_pocket, h_pocket], dim=1)
|
| 683 |
+
|
| 684 |
+
# remove frame dimension if only the final molecule is returned
|
| 685 |
+
return out_lig.squeeze(0), out_pocket.squeeze(0), ligand['mask'], \
|
| 686 |
+
pocket['mask']
|
| 687 |
+
|
| 688 |
+
@classmethod
|
| 689 |
+
def remove_mean_batch(cls, x_lig, x_pocket, lig_indices, pocket_indices):
|
| 690 |
+
|
| 691 |
+
# Just subtract the center of mass of the sampled part
|
| 692 |
+
mean = scatter_mean(x_lig, lig_indices, dim=0)
|
| 693 |
+
|
| 694 |
+
x_lig = x_lig - mean[lig_indices]
|
| 695 |
+
x_pocket = x_pocket - mean[pocket_indices]
|
| 696 |
+
return x_lig, x_pocket
|
| 697 |
+
|
| 698 |
+
|
| 699 |
+
# ------------------------------------------------------------------------------
|
| 700 |
+
# The same model without subspace-trick
|
| 701 |
+
# ------------------------------------------------------------------------------
|
| 702 |
+
class SimpleConditionalDDPM(ConditionalDDPM):
|
| 703 |
+
"""
|
| 704 |
+
Simpler conditional diffusion module without subspace-trick.
|
| 705 |
+
- rotational equivariance is guaranteed by construction
|
| 706 |
+
- translationally equivariant likelihood is achieved by first mapping
|
| 707 |
+
samples to a space where the context is COM-free and evaluating the
|
| 708 |
+
likelihood there
|
| 709 |
+
- molecule generation is equivariant because we can first sample in the
|
| 710 |
+
space where the context is COM-free and translate the whole system back to
|
| 711 |
+
the original position of the context later
|
| 712 |
+
"""
|
| 713 |
+
def subspace_dimensionality(self, input_size):
|
| 714 |
+
""" Override because we don't use the linear subspace anymore. """
|
| 715 |
+
return input_size * self.n_dims
|
| 716 |
+
|
| 717 |
+
@classmethod
|
| 718 |
+
def remove_mean_batch(cls, x_lig, x_pocket, lig_indices, pocket_indices):
|
| 719 |
+
""" Hacky way of removing the centering steps without changing too much
|
| 720 |
+
code. """
|
| 721 |
+
return x_lig, x_pocket
|
| 722 |
+
|
| 723 |
+
@staticmethod
|
| 724 |
+
def assert_mean_zero_with_mask(x, node_mask, eps=1e-10):
|
| 725 |
+
return
|
| 726 |
+
|
| 727 |
+
def forward(self, ligand, pocket, return_info=False):
|
| 728 |
+
|
| 729 |
+
# Subtract pocket center of mass
|
| 730 |
+
pocket_com = scatter_mean(pocket['x'], pocket['mask'], dim=0)
|
| 731 |
+
ligand['x'] = ligand['x'] - pocket_com[ligand['mask']]
|
| 732 |
+
pocket['x'] = pocket['x'] - pocket_com[pocket['mask']]
|
| 733 |
+
|
| 734 |
+
return super(SimpleConditionalDDPM, self).forward(
|
| 735 |
+
ligand, pocket, return_info)
|
| 736 |
+
|
| 737 |
+
@torch.no_grad()
|
| 738 |
+
def sample_given_pocket(self, pocket, num_nodes_lig, return_frames=1,
|
| 739 |
+
timesteps=None):
|
| 740 |
+
|
| 741 |
+
# Subtract pocket center of mass
|
| 742 |
+
pocket_com = scatter_mean(pocket['x'], pocket['mask'], dim=0)
|
| 743 |
+
pocket['x'] = pocket['x'] - pocket_com[pocket['mask']]
|
| 744 |
+
|
| 745 |
+
return super(SimpleConditionalDDPM, self).sample_given_pocket(
|
| 746 |
+
pocket, num_nodes_lig, return_frames, timesteps)
|
equivariant_diffusion/dynamics.py
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
import torch.nn.functional as F
|
| 4 |
+
from equivariant_diffusion.egnn_new import EGNN, GNN
|
| 5 |
+
from equivariant_diffusion.en_diffusion import EnVariationalDiffusion
|
| 6 |
+
remove_mean_batch = EnVariationalDiffusion.remove_mean_batch
|
| 7 |
+
import numpy as np
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class EGNNDynamics(nn.Module):
|
| 11 |
+
def __init__(self, atom_nf, residue_nf,
|
| 12 |
+
n_dims, joint_nf=16, hidden_nf=64, device='cpu',
|
| 13 |
+
act_fn=torch.nn.SiLU(), n_layers=4, attention=False,
|
| 14 |
+
condition_time=True, tanh=False, mode='egnn_dynamics',
|
| 15 |
+
norm_constant=0, inv_sublayers=2, sin_embedding=False,
|
| 16 |
+
normalization_factor=100, aggregation_method='sum',
|
| 17 |
+
update_pocket_coords=True, edge_cutoff_ligand=None,
|
| 18 |
+
edge_cutoff_pocket=None, edge_cutoff_interaction=None,
|
| 19 |
+
reflection_equivariant=True, edge_embedding_dim=None):
|
| 20 |
+
super().__init__()
|
| 21 |
+
self.mode = mode
|
| 22 |
+
self.edge_cutoff_l = edge_cutoff_ligand
|
| 23 |
+
self.edge_cutoff_p = edge_cutoff_pocket
|
| 24 |
+
self.edge_cutoff_i = edge_cutoff_interaction
|
| 25 |
+
self.edge_nf = edge_embedding_dim
|
| 26 |
+
|
| 27 |
+
self.atom_encoder = nn.Sequential(
|
| 28 |
+
nn.Linear(atom_nf, 2 * atom_nf),
|
| 29 |
+
act_fn,
|
| 30 |
+
nn.Linear(2 * atom_nf, joint_nf)
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
self.atom_decoder = nn.Sequential(
|
| 34 |
+
nn.Linear(joint_nf, 2 * atom_nf),
|
| 35 |
+
act_fn,
|
| 36 |
+
nn.Linear(2 * atom_nf, atom_nf)
|
| 37 |
+
)
|
| 38 |
+
|
| 39 |
+
self.residue_encoder = nn.Sequential(
|
| 40 |
+
nn.Linear(residue_nf, 2 * residue_nf),
|
| 41 |
+
act_fn,
|
| 42 |
+
nn.Linear(2 * residue_nf, joint_nf)
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
self.residue_decoder = nn.Sequential(
|
| 46 |
+
nn.Linear(joint_nf, 2 * residue_nf),
|
| 47 |
+
act_fn,
|
| 48 |
+
nn.Linear(2 * residue_nf, residue_nf)
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
self.edge_embedding = nn.Embedding(3, self.edge_nf) \
|
| 52 |
+
if self.edge_nf is not None else None
|
| 53 |
+
self.edge_nf = 0 if self.edge_nf is None else self.edge_nf
|
| 54 |
+
|
| 55 |
+
if condition_time:
|
| 56 |
+
dynamics_node_nf = joint_nf + 1
|
| 57 |
+
else:
|
| 58 |
+
print('Warning: dynamics model is _not_ conditioned on time.')
|
| 59 |
+
dynamics_node_nf = joint_nf
|
| 60 |
+
|
| 61 |
+
if mode == 'egnn_dynamics':
|
| 62 |
+
self.egnn = EGNN(
|
| 63 |
+
in_node_nf=dynamics_node_nf, in_edge_nf=self.edge_nf,
|
| 64 |
+
hidden_nf=hidden_nf, device=device, act_fn=act_fn,
|
| 65 |
+
n_layers=n_layers, attention=attention, tanh=tanh,
|
| 66 |
+
norm_constant=norm_constant,
|
| 67 |
+
inv_sublayers=inv_sublayers, sin_embedding=sin_embedding,
|
| 68 |
+
normalization_factor=normalization_factor,
|
| 69 |
+
aggregation_method=aggregation_method,
|
| 70 |
+
reflection_equiv=reflection_equivariant
|
| 71 |
+
)
|
| 72 |
+
self.node_nf = dynamics_node_nf
|
| 73 |
+
self.update_pocket_coords = update_pocket_coords
|
| 74 |
+
|
| 75 |
+
elif mode == 'gnn_dynamics':
|
| 76 |
+
self.gnn = GNN(
|
| 77 |
+
in_node_nf=dynamics_node_nf + n_dims, in_edge_nf=self.edge_nf,
|
| 78 |
+
hidden_nf=hidden_nf, out_node_nf=n_dims + dynamics_node_nf,
|
| 79 |
+
device=device, act_fn=act_fn, n_layers=n_layers,
|
| 80 |
+
attention=attention, normalization_factor=normalization_factor,
|
| 81 |
+
aggregation_method=aggregation_method)
|
| 82 |
+
|
| 83 |
+
self.device = device
|
| 84 |
+
self.n_dims = n_dims
|
| 85 |
+
self.condition_time = condition_time
|
| 86 |
+
|
| 87 |
+
def forward(self, xh_atoms, xh_residues, t, mask_atoms, mask_residues):
|
| 88 |
+
|
| 89 |
+
x_atoms = xh_atoms[:, :self.n_dims].clone()
|
| 90 |
+
h_atoms = xh_atoms[:, self.n_dims:].clone()
|
| 91 |
+
|
| 92 |
+
x_residues = xh_residues[:, :self.n_dims].clone()
|
| 93 |
+
h_residues = xh_residues[:, self.n_dims:].clone()
|
| 94 |
+
|
| 95 |
+
# embed atom features and residue features in a shared space
|
| 96 |
+
h_atoms = self.atom_encoder(h_atoms)
|
| 97 |
+
h_residues = self.residue_encoder(h_residues)
|
| 98 |
+
|
| 99 |
+
# combine the two node types
|
| 100 |
+
x = torch.cat((x_atoms, x_residues), dim=0)
|
| 101 |
+
h = torch.cat((h_atoms, h_residues), dim=0)
|
| 102 |
+
mask = torch.cat([mask_atoms, mask_residues])
|
| 103 |
+
|
| 104 |
+
if self.condition_time:
|
| 105 |
+
if np.prod(t.size()) == 1:
|
| 106 |
+
# t is the same for all elements in batch.
|
| 107 |
+
h_time = torch.empty_like(h[:, 0:1]).fill_(t.item())
|
| 108 |
+
else:
|
| 109 |
+
# t is different over the batch dimension.
|
| 110 |
+
h_time = t[mask]
|
| 111 |
+
h = torch.cat([h, h_time], dim=1)
|
| 112 |
+
|
| 113 |
+
# get edges of a complete graph
|
| 114 |
+
edges = self.get_edges(mask_atoms, mask_residues, x_atoms, x_residues)
|
| 115 |
+
assert torch.all(mask[edges[0]] == mask[edges[1]])
|
| 116 |
+
|
| 117 |
+
# Get edge types
|
| 118 |
+
if self.edge_nf > 0:
|
| 119 |
+
# 0: ligand-pocket, 1: ligand-ligand, 2: pocket-pocket
|
| 120 |
+
edge_types = torch.zeros(edges.size(1), dtype=int, device=edges.device)
|
| 121 |
+
edge_types[(edges[0] < len(mask_atoms)) & (edges[1] < len(mask_atoms))] = 1
|
| 122 |
+
edge_types[(edges[0] >= len(mask_atoms)) & (edges[1] >= len(mask_atoms))] = 2
|
| 123 |
+
|
| 124 |
+
# Learnable embedding
|
| 125 |
+
edge_types = self.edge_embedding(edge_types)
|
| 126 |
+
else:
|
| 127 |
+
edge_types = None
|
| 128 |
+
|
| 129 |
+
if self.mode == 'egnn_dynamics':
|
| 130 |
+
update_coords_mask = None if self.update_pocket_coords \
|
| 131 |
+
else torch.cat((torch.ones_like(mask_atoms),
|
| 132 |
+
torch.zeros_like(mask_residues))).unsqueeze(1)
|
| 133 |
+
h_final, x_final = self.egnn(h, x, edges,
|
| 134 |
+
update_coords_mask=update_coords_mask,
|
| 135 |
+
batch_mask=mask, edge_attr=edge_types)
|
| 136 |
+
vel = (x_final - x)
|
| 137 |
+
|
| 138 |
+
elif self.mode == 'gnn_dynamics':
|
| 139 |
+
xh = torch.cat([x, h], dim=1)
|
| 140 |
+
output = self.gnn(xh, edges, node_mask=None, edge_attr=edge_types)
|
| 141 |
+
vel = output[:, :3]
|
| 142 |
+
h_final = output[:, 3:]
|
| 143 |
+
|
| 144 |
+
else:
|
| 145 |
+
raise Exception("Wrong mode %s" % self.mode)
|
| 146 |
+
|
| 147 |
+
if self.condition_time:
|
| 148 |
+
# Slice off last dimension which represented time.
|
| 149 |
+
h_final = h_final[:, :-1]
|
| 150 |
+
|
| 151 |
+
# decode atom and residue features
|
| 152 |
+
h_final_atoms = self.atom_decoder(h_final[:len(mask_atoms)])
|
| 153 |
+
h_final_residues = self.residue_decoder(h_final[len(mask_atoms):])
|
| 154 |
+
|
| 155 |
+
if torch.any(torch.isnan(vel)):
|
| 156 |
+
if self.training:
|
| 157 |
+
vel[torch.isnan(vel)] = 0.0
|
| 158 |
+
else:
|
| 159 |
+
raise ValueError("NaN detected in EGNN output")
|
| 160 |
+
|
| 161 |
+
if self.update_pocket_coords:
|
| 162 |
+
# in case of unconditional joint distribution, include this as in
|
| 163 |
+
# the original code
|
| 164 |
+
vel = remove_mean_batch(vel, mask)
|
| 165 |
+
|
| 166 |
+
return torch.cat([vel[:len(mask_atoms)], h_final_atoms], dim=-1), \
|
| 167 |
+
torch.cat([vel[len(mask_atoms):], h_final_residues], dim=-1)
|
| 168 |
+
|
| 169 |
+
def get_edges(self, batch_mask_ligand, batch_mask_pocket, x_ligand, x_pocket):
|
| 170 |
+
adj_ligand = batch_mask_ligand[:, None] == batch_mask_ligand[None, :]
|
| 171 |
+
adj_pocket = batch_mask_pocket[:, None] == batch_mask_pocket[None, :]
|
| 172 |
+
adj_cross = batch_mask_ligand[:, None] == batch_mask_pocket[None, :]
|
| 173 |
+
|
| 174 |
+
if self.edge_cutoff_l is not None:
|
| 175 |
+
adj_ligand = adj_ligand & (torch.cdist(x_ligand, x_ligand) <= self.edge_cutoff_l)
|
| 176 |
+
|
| 177 |
+
if self.edge_cutoff_p is not None:
|
| 178 |
+
adj_pocket = adj_pocket & (torch.cdist(x_pocket, x_pocket) <= self.edge_cutoff_p)
|
| 179 |
+
|
| 180 |
+
if self.edge_cutoff_i is not None:
|
| 181 |
+
adj_cross = adj_cross & (torch.cdist(x_ligand, x_pocket) <= self.edge_cutoff_i)
|
| 182 |
+
|
| 183 |
+
adj = torch.cat((torch.cat((adj_ligand, adj_cross), dim=1),
|
| 184 |
+
torch.cat((adj_cross.T, adj_pocket), dim=1)), dim=0)
|
| 185 |
+
edges = torch.stack(torch.where(adj), dim=0)
|
| 186 |
+
|
| 187 |
+
return edges
|
equivariant_diffusion/egnn_new.py
ADDED
|
@@ -0,0 +1,335 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from torch import nn
|
| 2 |
+
import torch
|
| 3 |
+
import math
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class GCL(nn.Module):
|
| 7 |
+
def __init__(self, input_nf, output_nf, hidden_nf, normalization_factor, aggregation_method,
|
| 8 |
+
edges_in_d=0, nodes_att_dim=0, act_fn=nn.SiLU(), attention=False):
|
| 9 |
+
super(GCL, self).__init__()
|
| 10 |
+
input_edge = input_nf * 2
|
| 11 |
+
self.normalization_factor = normalization_factor
|
| 12 |
+
self.aggregation_method = aggregation_method
|
| 13 |
+
self.attention = attention
|
| 14 |
+
|
| 15 |
+
self.edge_mlp = nn.Sequential(
|
| 16 |
+
nn.Linear(input_edge + edges_in_d, hidden_nf),
|
| 17 |
+
act_fn,
|
| 18 |
+
nn.Linear(hidden_nf, hidden_nf),
|
| 19 |
+
act_fn)
|
| 20 |
+
|
| 21 |
+
self.node_mlp = nn.Sequential(
|
| 22 |
+
nn.Linear(hidden_nf + input_nf + nodes_att_dim, hidden_nf),
|
| 23 |
+
act_fn,
|
| 24 |
+
nn.Linear(hidden_nf, output_nf))
|
| 25 |
+
|
| 26 |
+
if self.attention:
|
| 27 |
+
self.att_mlp = nn.Sequential(
|
| 28 |
+
nn.Linear(hidden_nf, 1),
|
| 29 |
+
nn.Sigmoid())
|
| 30 |
+
|
| 31 |
+
def edge_model(self, source, target, edge_attr, edge_mask):
|
| 32 |
+
if edge_attr is None: # Unused.
|
| 33 |
+
out = torch.cat([source, target], dim=1)
|
| 34 |
+
else:
|
| 35 |
+
out = torch.cat([source, target, edge_attr], dim=1)
|
| 36 |
+
mij = self.edge_mlp(out)
|
| 37 |
+
|
| 38 |
+
if self.attention:
|
| 39 |
+
att_val = self.att_mlp(mij)
|
| 40 |
+
out = mij * att_val
|
| 41 |
+
else:
|
| 42 |
+
out = mij
|
| 43 |
+
|
| 44 |
+
if edge_mask is not None:
|
| 45 |
+
out = out * edge_mask
|
| 46 |
+
return out, mij
|
| 47 |
+
|
| 48 |
+
def node_model(self, x, edge_index, edge_attr, node_attr):
|
| 49 |
+
row, col = edge_index
|
| 50 |
+
agg = unsorted_segment_sum(edge_attr, row, num_segments=x.size(0),
|
| 51 |
+
normalization_factor=self.normalization_factor,
|
| 52 |
+
aggregation_method=self.aggregation_method)
|
| 53 |
+
if node_attr is not None:
|
| 54 |
+
agg = torch.cat([x, agg, node_attr], dim=1)
|
| 55 |
+
else:
|
| 56 |
+
agg = torch.cat([x, agg], dim=1)
|
| 57 |
+
out = x + self.node_mlp(agg)
|
| 58 |
+
return out, agg
|
| 59 |
+
|
| 60 |
+
def forward(self, h, edge_index, edge_attr=None, node_attr=None, node_mask=None, edge_mask=None):
|
| 61 |
+
row, col = edge_index
|
| 62 |
+
edge_feat, mij = self.edge_model(h[row], h[col], edge_attr, edge_mask)
|
| 63 |
+
h, agg = self.node_model(h, edge_index, edge_feat, node_attr)
|
| 64 |
+
if node_mask is not None:
|
| 65 |
+
h = h * node_mask
|
| 66 |
+
return h, mij
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class EquivariantUpdate(nn.Module):
|
| 70 |
+
def __init__(self, hidden_nf, normalization_factor, aggregation_method,
|
| 71 |
+
edges_in_d=1, act_fn=nn.SiLU(), tanh=False, coords_range=10.0,
|
| 72 |
+
reflection_equiv=True):
|
| 73 |
+
super(EquivariantUpdate, self).__init__()
|
| 74 |
+
self.tanh = tanh
|
| 75 |
+
self.coords_range = coords_range
|
| 76 |
+
self.reflection_equiv = reflection_equiv
|
| 77 |
+
input_edge = hidden_nf * 2 + edges_in_d
|
| 78 |
+
layer = nn.Linear(hidden_nf, 1, bias=False)
|
| 79 |
+
torch.nn.init.xavier_uniform_(layer.weight, gain=0.001)
|
| 80 |
+
self.coord_mlp = nn.Sequential(
|
| 81 |
+
nn.Linear(input_edge, hidden_nf),
|
| 82 |
+
act_fn,
|
| 83 |
+
nn.Linear(hidden_nf, hidden_nf),
|
| 84 |
+
act_fn,
|
| 85 |
+
layer)
|
| 86 |
+
self.cross_product_mlp = nn.Sequential(
|
| 87 |
+
nn.Linear(input_edge, hidden_nf),
|
| 88 |
+
act_fn,
|
| 89 |
+
nn.Linear(hidden_nf, hidden_nf),
|
| 90 |
+
act_fn,
|
| 91 |
+
layer
|
| 92 |
+
) if not self.reflection_equiv else None
|
| 93 |
+
self.normalization_factor = normalization_factor
|
| 94 |
+
self.aggregation_method = aggregation_method
|
| 95 |
+
|
| 96 |
+
def coord_model(self, h, coord, edge_index, coord_diff, coord_cross,
|
| 97 |
+
edge_attr, edge_mask, update_coords_mask=None):
|
| 98 |
+
row, col = edge_index
|
| 99 |
+
input_tensor = torch.cat([h[row], h[col], edge_attr], dim=1)
|
| 100 |
+
if self.tanh:
|
| 101 |
+
trans = coord_diff * torch.tanh(self.coord_mlp(input_tensor)) * self.coords_range
|
| 102 |
+
else:
|
| 103 |
+
trans = coord_diff * self.coord_mlp(input_tensor)
|
| 104 |
+
|
| 105 |
+
if not self.reflection_equiv:
|
| 106 |
+
phi_cross = self.cross_product_mlp(input_tensor)
|
| 107 |
+
if self.tanh:
|
| 108 |
+
phi_cross = torch.tanh(phi_cross) * self.coords_range
|
| 109 |
+
trans = trans + coord_cross * phi_cross
|
| 110 |
+
|
| 111 |
+
if edge_mask is not None:
|
| 112 |
+
trans = trans * edge_mask
|
| 113 |
+
|
| 114 |
+
agg = unsorted_segment_sum(trans, row, num_segments=coord.size(0),
|
| 115 |
+
normalization_factor=self.normalization_factor,
|
| 116 |
+
aggregation_method=self.aggregation_method)
|
| 117 |
+
|
| 118 |
+
if update_coords_mask is not None:
|
| 119 |
+
agg = update_coords_mask * agg
|
| 120 |
+
|
| 121 |
+
coord = coord + agg
|
| 122 |
+
return coord
|
| 123 |
+
|
| 124 |
+
def forward(self, h, coord, edge_index, coord_diff, coord_cross,
|
| 125 |
+
edge_attr=None, node_mask=None, edge_mask=None,
|
| 126 |
+
update_coords_mask=None):
|
| 127 |
+
coord = self.coord_model(h, coord, edge_index, coord_diff, coord_cross,
|
| 128 |
+
edge_attr, edge_mask,
|
| 129 |
+
update_coords_mask=update_coords_mask)
|
| 130 |
+
if node_mask is not None:
|
| 131 |
+
coord = coord * node_mask
|
| 132 |
+
return coord
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
class EquivariantBlock(nn.Module):
|
| 136 |
+
def __init__(self, hidden_nf, edge_feat_nf=2, device='cpu', act_fn=nn.SiLU(), n_layers=2, attention=True,
|
| 137 |
+
norm_diff=True, tanh=False, coords_range=15, norm_constant=1, sin_embedding=None,
|
| 138 |
+
normalization_factor=100, aggregation_method='sum', reflection_equiv=True):
|
| 139 |
+
super(EquivariantBlock, self).__init__()
|
| 140 |
+
self.hidden_nf = hidden_nf
|
| 141 |
+
self.device = device
|
| 142 |
+
self.n_layers = n_layers
|
| 143 |
+
self.coords_range_layer = float(coords_range)
|
| 144 |
+
self.norm_diff = norm_diff
|
| 145 |
+
self.norm_constant = norm_constant
|
| 146 |
+
self.sin_embedding = sin_embedding
|
| 147 |
+
self.normalization_factor = normalization_factor
|
| 148 |
+
self.aggregation_method = aggregation_method
|
| 149 |
+
self.reflection_equiv = reflection_equiv
|
| 150 |
+
|
| 151 |
+
for i in range(0, n_layers):
|
| 152 |
+
self.add_module("gcl_%d" % i, GCL(self.hidden_nf, self.hidden_nf, self.hidden_nf, edges_in_d=edge_feat_nf,
|
| 153 |
+
act_fn=act_fn, attention=attention,
|
| 154 |
+
normalization_factor=self.normalization_factor,
|
| 155 |
+
aggregation_method=self.aggregation_method))
|
| 156 |
+
self.add_module("gcl_equiv", EquivariantUpdate(hidden_nf, edges_in_d=edge_feat_nf, act_fn=nn.SiLU(), tanh=tanh,
|
| 157 |
+
coords_range=self.coords_range_layer,
|
| 158 |
+
normalization_factor=self.normalization_factor,
|
| 159 |
+
aggregation_method=self.aggregation_method,
|
| 160 |
+
reflection_equiv=self.reflection_equiv))
|
| 161 |
+
self.to(self.device)
|
| 162 |
+
|
| 163 |
+
def forward(self, h, x, edge_index, node_mask=None, edge_mask=None,
|
| 164 |
+
edge_attr=None, update_coords_mask=None, batch_mask=None):
|
| 165 |
+
# Edit Emiel: Remove velocity as input
|
| 166 |
+
distances, coord_diff = coord2diff(x, edge_index, self.norm_constant)
|
| 167 |
+
if self.reflection_equiv:
|
| 168 |
+
coord_cross = None
|
| 169 |
+
else:
|
| 170 |
+
coord_cross = coord2cross(x, edge_index, batch_mask,
|
| 171 |
+
self.norm_constant)
|
| 172 |
+
if self.sin_embedding is not None:
|
| 173 |
+
distances = self.sin_embedding(distances)
|
| 174 |
+
edge_attr = torch.cat([distances, edge_attr], dim=1)
|
| 175 |
+
for i in range(0, self.n_layers):
|
| 176 |
+
h, _ = self._modules["gcl_%d" % i](h, edge_index, edge_attr=edge_attr,
|
| 177 |
+
node_mask=node_mask, edge_mask=edge_mask)
|
| 178 |
+
x = self._modules["gcl_equiv"](h, x, edge_index, coord_diff, coord_cross, edge_attr,
|
| 179 |
+
node_mask, edge_mask, update_coords_mask=update_coords_mask)
|
| 180 |
+
|
| 181 |
+
# Important, the bias of the last linear might be non-zero
|
| 182 |
+
if node_mask is not None:
|
| 183 |
+
h = h * node_mask
|
| 184 |
+
return h, x
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
class EGNN(nn.Module):
|
| 188 |
+
def __init__(self, in_node_nf, in_edge_nf, hidden_nf, device='cpu', act_fn=nn.SiLU(), n_layers=3, attention=False,
|
| 189 |
+
norm_diff=True, out_node_nf=None, tanh=False, coords_range=15, norm_constant=1, inv_sublayers=2,
|
| 190 |
+
sin_embedding=False, normalization_factor=100, aggregation_method='sum', reflection_equiv=True):
|
| 191 |
+
super(EGNN, self).__init__()
|
| 192 |
+
if out_node_nf is None:
|
| 193 |
+
out_node_nf = in_node_nf
|
| 194 |
+
self.hidden_nf = hidden_nf
|
| 195 |
+
self.device = device
|
| 196 |
+
self.n_layers = n_layers
|
| 197 |
+
self.coords_range_layer = float(coords_range/n_layers)
|
| 198 |
+
self.norm_diff = norm_diff
|
| 199 |
+
self.normalization_factor = normalization_factor
|
| 200 |
+
self.aggregation_method = aggregation_method
|
| 201 |
+
self.reflection_equiv = reflection_equiv
|
| 202 |
+
|
| 203 |
+
if sin_embedding:
|
| 204 |
+
self.sin_embedding = SinusoidsEmbeddingNew()
|
| 205 |
+
edge_feat_nf = self.sin_embedding.dim * 2
|
| 206 |
+
else:
|
| 207 |
+
self.sin_embedding = None
|
| 208 |
+
edge_feat_nf = 2
|
| 209 |
+
|
| 210 |
+
edge_feat_nf = edge_feat_nf + in_edge_nf
|
| 211 |
+
|
| 212 |
+
self.embedding = nn.Linear(in_node_nf, self.hidden_nf)
|
| 213 |
+
self.embedding_out = nn.Linear(self.hidden_nf, out_node_nf)
|
| 214 |
+
for i in range(0, n_layers):
|
| 215 |
+
self.add_module("e_block_%d" % i, EquivariantBlock(hidden_nf, edge_feat_nf=edge_feat_nf, device=device,
|
| 216 |
+
act_fn=act_fn, n_layers=inv_sublayers,
|
| 217 |
+
attention=attention, norm_diff=norm_diff, tanh=tanh,
|
| 218 |
+
coords_range=coords_range, norm_constant=norm_constant,
|
| 219 |
+
sin_embedding=self.sin_embedding,
|
| 220 |
+
normalization_factor=self.normalization_factor,
|
| 221 |
+
aggregation_method=self.aggregation_method,
|
| 222 |
+
reflection_equiv=self.reflection_equiv))
|
| 223 |
+
self.to(self.device)
|
| 224 |
+
|
| 225 |
+
def forward(self, h, x, edge_index, node_mask=None, edge_mask=None, update_coords_mask=None,
|
| 226 |
+
batch_mask=None, edge_attr=None):
|
| 227 |
+
# Edit Emiel: Remove velocity as input
|
| 228 |
+
edge_feat, _ = coord2diff(x, edge_index)
|
| 229 |
+
if self.sin_embedding is not None:
|
| 230 |
+
edge_feat = self.sin_embedding(edge_feat)
|
| 231 |
+
if edge_attr is not None:
|
| 232 |
+
edge_feat = torch.cat([edge_feat, edge_attr], dim=1)
|
| 233 |
+
h = self.embedding(h)
|
| 234 |
+
for i in range(0, self.n_layers):
|
| 235 |
+
h, x = self._modules["e_block_%d" % i](
|
| 236 |
+
h, x, edge_index, node_mask=node_mask, edge_mask=edge_mask,
|
| 237 |
+
edge_attr=edge_feat, update_coords_mask=update_coords_mask,
|
| 238 |
+
batch_mask=batch_mask)
|
| 239 |
+
|
| 240 |
+
# Important, the bias of the last linear might be non-zero
|
| 241 |
+
h = self.embedding_out(h)
|
| 242 |
+
if node_mask is not None:
|
| 243 |
+
h = h * node_mask
|
| 244 |
+
return h, x
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
class GNN(nn.Module):
|
| 248 |
+
def __init__(self, in_node_nf, in_edge_nf, hidden_nf, aggregation_method='sum', device='cpu',
|
| 249 |
+
act_fn=nn.SiLU(), n_layers=4, attention=False,
|
| 250 |
+
normalization_factor=1, out_node_nf=None):
|
| 251 |
+
super(GNN, self).__init__()
|
| 252 |
+
if out_node_nf is None:
|
| 253 |
+
out_node_nf = in_node_nf
|
| 254 |
+
self.hidden_nf = hidden_nf
|
| 255 |
+
self.device = device
|
| 256 |
+
self.n_layers = n_layers
|
| 257 |
+
### Encoder
|
| 258 |
+
self.embedding = nn.Linear(in_node_nf, self.hidden_nf)
|
| 259 |
+
self.embedding_out = nn.Linear(self.hidden_nf, out_node_nf)
|
| 260 |
+
for i in range(0, n_layers):
|
| 261 |
+
self.add_module("gcl_%d" % i, GCL(
|
| 262 |
+
self.hidden_nf, self.hidden_nf, self.hidden_nf,
|
| 263 |
+
normalization_factor=normalization_factor,
|
| 264 |
+
aggregation_method=aggregation_method,
|
| 265 |
+
edges_in_d=in_edge_nf, act_fn=act_fn,
|
| 266 |
+
attention=attention))
|
| 267 |
+
self.to(self.device)
|
| 268 |
+
|
| 269 |
+
def forward(self, h, edges, edge_attr=None, node_mask=None, edge_mask=None):
|
| 270 |
+
# Edit Emiel: Remove velocity as input
|
| 271 |
+
h = self.embedding(h)
|
| 272 |
+
for i in range(0, self.n_layers):
|
| 273 |
+
h, _ = self._modules["gcl_%d" % i](h, edges, edge_attr=edge_attr, node_mask=node_mask, edge_mask=edge_mask)
|
| 274 |
+
h = self.embedding_out(h)
|
| 275 |
+
|
| 276 |
+
# Important, the bias of the last linear might be non-zero
|
| 277 |
+
if node_mask is not None:
|
| 278 |
+
h = h * node_mask
|
| 279 |
+
return h
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
class SinusoidsEmbeddingNew(nn.Module):
|
| 283 |
+
def __init__(self, max_res=15., min_res=15. / 2000., div_factor=4):
|
| 284 |
+
super().__init__()
|
| 285 |
+
self.n_frequencies = int(math.log(max_res / min_res, div_factor)) + 1
|
| 286 |
+
self.frequencies = 2 * math.pi * div_factor ** torch.arange(self.n_frequencies)/max_res
|
| 287 |
+
self.dim = len(self.frequencies) * 2
|
| 288 |
+
|
| 289 |
+
def forward(self, x):
|
| 290 |
+
x = torch.sqrt(x + 1e-8)
|
| 291 |
+
emb = x * self.frequencies[None, :].to(x.device)
|
| 292 |
+
emb = torch.cat((emb.sin(), emb.cos()), dim=-1)
|
| 293 |
+
return emb.detach()
|
| 294 |
+
|
| 295 |
+
|
| 296 |
+
def coord2diff(x, edge_index, norm_constant=1):
|
| 297 |
+
row, col = edge_index
|
| 298 |
+
coord_diff = x[row] - x[col]
|
| 299 |
+
radial = torch.sum((coord_diff) ** 2, 1).unsqueeze(1)
|
| 300 |
+
norm = torch.sqrt(radial + 1e-8)
|
| 301 |
+
coord_diff = coord_diff/(norm + norm_constant)
|
| 302 |
+
return radial, coord_diff
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
def coord2cross(x, edge_index, batch_mask, norm_constant=1):
|
| 306 |
+
|
| 307 |
+
mean = unsorted_segment_sum(x, batch_mask,
|
| 308 |
+
num_segments=batch_mask.max() + 1,
|
| 309 |
+
normalization_factor=None,
|
| 310 |
+
aggregation_method='mean')
|
| 311 |
+
row, col = edge_index
|
| 312 |
+
cross = torch.cross(x[row]-mean[batch_mask[row]],
|
| 313 |
+
x[col]-mean[batch_mask[col]], dim=1)
|
| 314 |
+
norm = torch.linalg.norm(cross, dim=1, keepdim=True)
|
| 315 |
+
cross = cross / (norm + norm_constant)
|
| 316 |
+
return cross
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
def unsorted_segment_sum(data, segment_ids, num_segments, normalization_factor, aggregation_method: str):
|
| 320 |
+
"""Custom PyTorch op to replicate TensorFlow's `unsorted_segment_sum`.
|
| 321 |
+
Normalization: 'sum' or 'mean'.
|
| 322 |
+
"""
|
| 323 |
+
result_shape = (num_segments, data.size(1))
|
| 324 |
+
result = data.new_full(result_shape, 0) # Init empty result tensor.
|
| 325 |
+
segment_ids = segment_ids.unsqueeze(-1).expand(-1, data.size(1))
|
| 326 |
+
result.scatter_add_(0, segment_ids, data)
|
| 327 |
+
if aggregation_method == 'sum':
|
| 328 |
+
result = result / normalization_factor
|
| 329 |
+
|
| 330 |
+
if aggregation_method == 'mean':
|
| 331 |
+
norm = data.new_zeros(result.shape)
|
| 332 |
+
norm.scatter_add_(0, segment_ids, data.new_ones(data.shape))
|
| 333 |
+
norm[norm == 0] = 1
|
| 334 |
+
result = result / norm
|
| 335 |
+
return result
|
equivariant_diffusion/en_diffusion.py
ADDED
|
@@ -0,0 +1,1190 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
from typing import Dict
|
| 3 |
+
|
| 4 |
+
import numpy as np
|
| 5 |
+
import torch
|
| 6 |
+
from torch import nn
|
| 7 |
+
import torch.nn.functional as F
|
| 8 |
+
from torch_scatter import scatter_add, scatter_mean
|
| 9 |
+
|
| 10 |
+
import utils
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class EnVariationalDiffusion(nn.Module):
|
| 14 |
+
"""
|
| 15 |
+
The E(n) Diffusion Module.
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
def __init__(
|
| 19 |
+
self,
|
| 20 |
+
dynamics: nn.Module, atom_nf: int, residue_nf: int,
|
| 21 |
+
n_dims: int, size_histogram: Dict,
|
| 22 |
+
timesteps: int = 1000, parametrization='eps',
|
| 23 |
+
noise_schedule='learned', noise_precision=1e-4,
|
| 24 |
+
loss_type='vlb', norm_values=(1., 1.), norm_biases=(None, 0.),
|
| 25 |
+
virtual_node_idx=None):
|
| 26 |
+
super().__init__()
|
| 27 |
+
|
| 28 |
+
assert loss_type in {'vlb', 'l2'}
|
| 29 |
+
self.loss_type = loss_type
|
| 30 |
+
if noise_schedule == 'learned':
|
| 31 |
+
assert loss_type == 'vlb', 'A noise schedule can only be learned' \
|
| 32 |
+
' with a vlb objective.'
|
| 33 |
+
|
| 34 |
+
# Only supported parametrization.
|
| 35 |
+
assert parametrization == 'eps'
|
| 36 |
+
|
| 37 |
+
if noise_schedule == 'learned':
|
| 38 |
+
self.gamma = GammaNetwork()
|
| 39 |
+
else:
|
| 40 |
+
self.gamma = PredefinedNoiseSchedule(noise_schedule,
|
| 41 |
+
timesteps=timesteps,
|
| 42 |
+
precision=noise_precision)
|
| 43 |
+
|
| 44 |
+
# The network that will predict the denoising.
|
| 45 |
+
self.dynamics = dynamics
|
| 46 |
+
|
| 47 |
+
self.atom_nf = atom_nf
|
| 48 |
+
self.residue_nf = residue_nf
|
| 49 |
+
self.n_dims = n_dims
|
| 50 |
+
self.num_classes = self.atom_nf
|
| 51 |
+
|
| 52 |
+
self.T = timesteps
|
| 53 |
+
self.parametrization = parametrization
|
| 54 |
+
|
| 55 |
+
self.norm_values = norm_values
|
| 56 |
+
self.norm_biases = norm_biases
|
| 57 |
+
self.register_buffer('buffer', torch.zeros(1))
|
| 58 |
+
|
| 59 |
+
# distribution of nodes
|
| 60 |
+
self.size_distribution = DistributionNodes(size_histogram)
|
| 61 |
+
|
| 62 |
+
# indicate if virtual nodes are present
|
| 63 |
+
self.vnode_idx = virtual_node_idx
|
| 64 |
+
|
| 65 |
+
if noise_schedule != 'learned':
|
| 66 |
+
self.check_issues_norm_values()
|
| 67 |
+
|
| 68 |
+
def check_issues_norm_values(self, num_stdevs=8):
|
| 69 |
+
zeros = torch.zeros((1, 1))
|
| 70 |
+
gamma_0 = self.gamma(zeros)
|
| 71 |
+
sigma_0 = self.sigma(gamma_0, target_tensor=zeros).item()
|
| 72 |
+
|
| 73 |
+
# Checked if 1 / norm_value is still larger than 10 * standard
|
| 74 |
+
# deviation.
|
| 75 |
+
norm_value = self.norm_values[1]
|
| 76 |
+
|
| 77 |
+
if sigma_0 * num_stdevs > 1. / norm_value:
|
| 78 |
+
raise ValueError(
|
| 79 |
+
f'Value for normalization value {norm_value} probably too '
|
| 80 |
+
f'large with sigma_0 {sigma_0:.5f} and '
|
| 81 |
+
f'1 / norm_value = {1. / norm_value}')
|
| 82 |
+
|
| 83 |
+
def sigma_and_alpha_t_given_s(self, gamma_t: torch.Tensor,
|
| 84 |
+
gamma_s: torch.Tensor,
|
| 85 |
+
target_tensor: torch.Tensor):
|
| 86 |
+
"""
|
| 87 |
+
Computes sigma t given s, using gamma_t and gamma_s. Used during sampling.
|
| 88 |
+
These are defined as:
|
| 89 |
+
alpha t given s = alpha t / alpha s,
|
| 90 |
+
sigma t given s = sqrt(1 - (alpha t given s) ^2 ).
|
| 91 |
+
"""
|
| 92 |
+
sigma2_t_given_s = self.inflate_batch_array(
|
| 93 |
+
-torch.expm1(F.softplus(gamma_s) - F.softplus(gamma_t)), target_tensor
|
| 94 |
+
)
|
| 95 |
+
|
| 96 |
+
# alpha_t_given_s = alpha_t / alpha_s
|
| 97 |
+
log_alpha2_t = F.logsigmoid(-gamma_t)
|
| 98 |
+
log_alpha2_s = F.logsigmoid(-gamma_s)
|
| 99 |
+
log_alpha2_t_given_s = log_alpha2_t - log_alpha2_s
|
| 100 |
+
|
| 101 |
+
alpha_t_given_s = torch.exp(0.5 * log_alpha2_t_given_s)
|
| 102 |
+
alpha_t_given_s = self.inflate_batch_array(
|
| 103 |
+
alpha_t_given_s, target_tensor)
|
| 104 |
+
|
| 105 |
+
sigma_t_given_s = torch.sqrt(sigma2_t_given_s)
|
| 106 |
+
|
| 107 |
+
return sigma2_t_given_s, sigma_t_given_s, alpha_t_given_s
|
| 108 |
+
|
| 109 |
+
def kl_prior_with_pocket(self, xh_lig, xh_pocket, mask_lig, mask_pocket,
|
| 110 |
+
num_nodes):
|
| 111 |
+
"""Computes the KL between q(z1 | x) and the prior p(z1) = Normal(0, 1).
|
| 112 |
+
|
| 113 |
+
This is essentially a lot of work for something that is in practice
|
| 114 |
+
negligible in the loss. However, you compute it so that you see it when
|
| 115 |
+
you've made a mistake in your noise schedule.
|
| 116 |
+
"""
|
| 117 |
+
batch_size = len(num_nodes)
|
| 118 |
+
|
| 119 |
+
# Compute the last alpha value, alpha_T.
|
| 120 |
+
ones = torch.ones((batch_size, 1), device=xh_lig.device)
|
| 121 |
+
gamma_T = self.gamma(ones)
|
| 122 |
+
alpha_T = self.alpha(gamma_T, xh_lig)
|
| 123 |
+
|
| 124 |
+
# Compute means.
|
| 125 |
+
mu_T_lig = alpha_T[mask_lig] * xh_lig
|
| 126 |
+
mu_T_lig_x, mu_T_lig_h = mu_T_lig[:, :self.n_dims], \
|
| 127 |
+
mu_T_lig[:, self.n_dims:]
|
| 128 |
+
|
| 129 |
+
# Compute standard deviations (only batch axis for x-part, inflated for h-part).
|
| 130 |
+
sigma_T_x = self.sigma(gamma_T, mu_T_lig_x).squeeze()
|
| 131 |
+
sigma_T_h = self.sigma(gamma_T, mu_T_lig_h).squeeze()
|
| 132 |
+
|
| 133 |
+
# Compute means.
|
| 134 |
+
mu_T_pocket = alpha_T[mask_pocket] * xh_pocket
|
| 135 |
+
mu_T_pocket_x, mu_T_pocket_h = mu_T_pocket[:, :self.n_dims], \
|
| 136 |
+
mu_T_pocket[:, self.n_dims:]
|
| 137 |
+
|
| 138 |
+
# Compute KL for h-part.
|
| 139 |
+
zeros_lig = torch.zeros_like(mu_T_lig_h)
|
| 140 |
+
zeros_pocket = torch.zeros_like(mu_T_pocket_h)
|
| 141 |
+
ones = torch.ones_like(sigma_T_h)
|
| 142 |
+
mu_norm2 = self.sum_except_batch((mu_T_lig_h - zeros_lig) ** 2, mask_lig) + \
|
| 143 |
+
self.sum_except_batch((mu_T_pocket_h - zeros_pocket) ** 2, mask_pocket)
|
| 144 |
+
kl_distance_h = self.gaussian_KL(mu_norm2, sigma_T_h, ones, d=1)
|
| 145 |
+
|
| 146 |
+
# Compute KL for x-part.
|
| 147 |
+
zeros_lig = torch.zeros_like(mu_T_lig_x)
|
| 148 |
+
zeros_pocket = torch.zeros_like(mu_T_pocket_x)
|
| 149 |
+
ones = torch.ones_like(sigma_T_x)
|
| 150 |
+
mu_norm2 = self.sum_except_batch((mu_T_lig_x - zeros_lig) ** 2, mask_lig) + \
|
| 151 |
+
self.sum_except_batch((mu_T_pocket_x - zeros_pocket) ** 2, mask_pocket)
|
| 152 |
+
subspace_d = self.subspace_dimensionality(num_nodes)
|
| 153 |
+
kl_distance_x = self.gaussian_KL(mu_norm2, sigma_T_x, ones, subspace_d)
|
| 154 |
+
|
| 155 |
+
return kl_distance_x + kl_distance_h
|
| 156 |
+
|
| 157 |
+
def compute_x_pred(self, net_out, zt, gamma_t, batch_mask):
|
| 158 |
+
"""Commputes x_pred, i.e. the most likely prediction of x."""
|
| 159 |
+
if self.parametrization == 'x':
|
| 160 |
+
x_pred = net_out
|
| 161 |
+
elif self.parametrization == 'eps':
|
| 162 |
+
sigma_t = self.sigma(gamma_t, target_tensor=net_out)
|
| 163 |
+
alpha_t = self.alpha(gamma_t, target_tensor=net_out)
|
| 164 |
+
eps_t = net_out
|
| 165 |
+
x_pred = 1. / alpha_t[batch_mask] * (zt - sigma_t[batch_mask] * eps_t)
|
| 166 |
+
else:
|
| 167 |
+
raise ValueError(self.parametrization)
|
| 168 |
+
|
| 169 |
+
return x_pred
|
| 170 |
+
|
| 171 |
+
def log_constants_p_x_given_z0(self, n_nodes, device):
|
| 172 |
+
"""Computes p(x|z0)."""
|
| 173 |
+
|
| 174 |
+
batch_size = len(n_nodes)
|
| 175 |
+
degrees_of_freedom_x = self.subspace_dimensionality(n_nodes)
|
| 176 |
+
|
| 177 |
+
zeros = torch.zeros((batch_size, 1), device=device)
|
| 178 |
+
gamma_0 = self.gamma(zeros)
|
| 179 |
+
|
| 180 |
+
# Recall that sigma_x = sqrt(sigma_0^2 / alpha_0^2) = SNR(-0.5 gamma_0).
|
| 181 |
+
log_sigma_x = 0.5 * gamma_0.view(batch_size)
|
| 182 |
+
|
| 183 |
+
return degrees_of_freedom_x * (- log_sigma_x - 0.5 * np.log(2 * np.pi))
|
| 184 |
+
|
| 185 |
+
def log_pxh_given_z0_without_constants(
|
| 186 |
+
self, ligand, z_0_lig, eps_lig, net_out_lig,
|
| 187 |
+
pocket, z_0_pocket, eps_pocket, net_out_pocket,
|
| 188 |
+
gamma_0, epsilon=1e-10):
|
| 189 |
+
|
| 190 |
+
# Discrete properties are predicted directly from z_t.
|
| 191 |
+
z_h_lig = z_0_lig[:, self.n_dims:]
|
| 192 |
+
z_h_pocket = z_0_pocket[:, self.n_dims:]
|
| 193 |
+
|
| 194 |
+
# Take only part over x.
|
| 195 |
+
eps_lig_x = eps_lig[:, :self.n_dims]
|
| 196 |
+
net_lig_x = net_out_lig[:, :self.n_dims]
|
| 197 |
+
eps_pocket_x = eps_pocket[:, :self.n_dims]
|
| 198 |
+
net_pocket_x = net_out_pocket[:, :self.n_dims]
|
| 199 |
+
|
| 200 |
+
# Compute sigma_0 and rescale to the integer scale of the data.
|
| 201 |
+
sigma_0 = self.sigma(gamma_0, target_tensor=z_0_lig)
|
| 202 |
+
sigma_0_cat = sigma_0 * self.norm_values[1]
|
| 203 |
+
|
| 204 |
+
# Computes the error for the distribution
|
| 205 |
+
# N(x | 1 / alpha_0 z_0 + sigma_0/alpha_0 eps_0, sigma_0 / alpha_0),
|
| 206 |
+
# the weighting in the epsilon parametrization is exactly '1'.
|
| 207 |
+
log_p_x_given_z0_without_constants_ligand = -0.5 * (
|
| 208 |
+
self.sum_except_batch((eps_lig_x - net_lig_x) ** 2, ligand['mask'])
|
| 209 |
+
)
|
| 210 |
+
|
| 211 |
+
log_p_x_given_z0_without_constants_pocket = -0.5 * (
|
| 212 |
+
self.sum_except_batch((eps_pocket_x - net_pocket_x) ** 2,
|
| 213 |
+
pocket['mask'])
|
| 214 |
+
)
|
| 215 |
+
|
| 216 |
+
# Compute delta indicator masks.
|
| 217 |
+
# un-normalize
|
| 218 |
+
ligand_onehot = ligand['one_hot'] * self.norm_values[1] + self.norm_biases[1]
|
| 219 |
+
pocket_onehot = pocket['one_hot'] * self.norm_values[1] + self.norm_biases[1]
|
| 220 |
+
|
| 221 |
+
estimated_ligand_onehot = z_h_lig * self.norm_values[1] + self.norm_biases[1]
|
| 222 |
+
estimated_pocket_onehot = z_h_pocket * self.norm_values[1] + self.norm_biases[1]
|
| 223 |
+
|
| 224 |
+
# Centered h_cat around 1, since onehot encoded.
|
| 225 |
+
centered_ligand_onehot = estimated_ligand_onehot - 1
|
| 226 |
+
centered_pocket_onehot = estimated_pocket_onehot - 1
|
| 227 |
+
|
| 228 |
+
# Compute integrals from 0.5 to 1.5 of the normal distribution
|
| 229 |
+
# N(mean=z_h_cat, stdev=sigma_0_cat)
|
| 230 |
+
log_ph_cat_proportional_ligand = torch.log(
|
| 231 |
+
self.cdf_standard_gaussian((centered_ligand_onehot + 0.5) / sigma_0_cat[ligand['mask']])
|
| 232 |
+
- self.cdf_standard_gaussian((centered_ligand_onehot - 0.5) / sigma_0_cat[ligand['mask']])
|
| 233 |
+
+ epsilon
|
| 234 |
+
)
|
| 235 |
+
log_ph_cat_proportional_pocket = torch.log(
|
| 236 |
+
self.cdf_standard_gaussian((centered_pocket_onehot + 0.5) / sigma_0_cat[pocket['mask']])
|
| 237 |
+
- self.cdf_standard_gaussian((centered_pocket_onehot - 0.5) / sigma_0_cat[pocket['mask']])
|
| 238 |
+
+ epsilon
|
| 239 |
+
)
|
| 240 |
+
|
| 241 |
+
# Normalize the distribution over the categories.
|
| 242 |
+
log_Z = torch.logsumexp(log_ph_cat_proportional_ligand, dim=1,
|
| 243 |
+
keepdim=True)
|
| 244 |
+
log_probabilities_ligand = log_ph_cat_proportional_ligand - log_Z
|
| 245 |
+
|
| 246 |
+
log_Z = torch.logsumexp(log_ph_cat_proportional_pocket, dim=1,
|
| 247 |
+
keepdim=True)
|
| 248 |
+
log_probabilities_pocket = log_ph_cat_proportional_pocket - log_Z
|
| 249 |
+
|
| 250 |
+
# Select the log_prob of the current category using the onehot
|
| 251 |
+
# representation.
|
| 252 |
+
log_ph_given_z0_ligand = self.sum_except_batch(
|
| 253 |
+
log_probabilities_ligand * ligand_onehot, ligand['mask'])
|
| 254 |
+
log_ph_given_z0_pocket = self.sum_except_batch(
|
| 255 |
+
log_probabilities_pocket * pocket_onehot, pocket['mask'])
|
| 256 |
+
|
| 257 |
+
# Combine log probabilities of ligand and pocket for h.
|
| 258 |
+
log_ph_given_z0 = log_ph_given_z0_ligand + log_ph_given_z0_pocket
|
| 259 |
+
|
| 260 |
+
return log_p_x_given_z0_without_constants_ligand, \
|
| 261 |
+
log_p_x_given_z0_without_constants_pocket, log_ph_given_z0
|
| 262 |
+
|
| 263 |
+
def sample_p_xh_given_z0(self, z0_lig, z0_pocket, lig_mask, pocket_mask,
|
| 264 |
+
batch_size, fix_noise=False):
|
| 265 |
+
"""Samples x ~ p(x|z0)."""
|
| 266 |
+
t_zeros = torch.zeros(size=(batch_size, 1), device=z0_lig.device)
|
| 267 |
+
gamma_0 = self.gamma(t_zeros)
|
| 268 |
+
# Computes sqrt(sigma_0^2 / alpha_0^2)
|
| 269 |
+
sigma_x = self.SNR(-0.5 * gamma_0)
|
| 270 |
+
net_out_lig, net_out_pocket = self.dynamics(
|
| 271 |
+
z0_lig, z0_pocket, t_zeros, lig_mask, pocket_mask)
|
| 272 |
+
|
| 273 |
+
# Compute mu for p(zs | zt).
|
| 274 |
+
mu_x_lig = self.compute_x_pred(net_out_lig, z0_lig, gamma_0, lig_mask)
|
| 275 |
+
mu_x_pocket = self.compute_x_pred(net_out_pocket, z0_pocket, gamma_0,
|
| 276 |
+
pocket_mask)
|
| 277 |
+
xh_lig, xh_pocket = self.sample_normal(mu_x_lig, mu_x_pocket, sigma_x,
|
| 278 |
+
lig_mask, pocket_mask, fix_noise)
|
| 279 |
+
|
| 280 |
+
x_lig, h_lig = self.unnormalize(
|
| 281 |
+
xh_lig[:, :self.n_dims], z0_lig[:, self.n_dims:])
|
| 282 |
+
x_pocket, h_pocket = self.unnormalize(
|
| 283 |
+
xh_pocket[:, :self.n_dims], z0_pocket[:, self.n_dims:])
|
| 284 |
+
|
| 285 |
+
h_lig = F.one_hot(torch.argmax(h_lig, dim=1), self.atom_nf)
|
| 286 |
+
h_pocket = F.one_hot(torch.argmax(h_pocket, dim=1), self.residue_nf)
|
| 287 |
+
|
| 288 |
+
return x_lig, h_lig, x_pocket, h_pocket
|
| 289 |
+
|
| 290 |
+
def sample_normal(self, mu_lig, mu_pocket, sigma, lig_mask, pocket_mask,
|
| 291 |
+
fix_noise=False):
|
| 292 |
+
"""Samples from a Normal distribution."""
|
| 293 |
+
if fix_noise:
|
| 294 |
+
# bs = 1 if fix_noise else mu.size(0)
|
| 295 |
+
raise NotImplementedError("fix_noise option isn't implemented yet")
|
| 296 |
+
eps_lig, eps_pocket = self.sample_combined_position_feature_noise(
|
| 297 |
+
lig_mask, pocket_mask)
|
| 298 |
+
|
| 299 |
+
return mu_lig + sigma[lig_mask] * eps_lig, \
|
| 300 |
+
mu_pocket + sigma[pocket_mask] * eps_pocket
|
| 301 |
+
|
| 302 |
+
def noised_representation(self, xh_lig, xh_pocket, lig_mask, pocket_mask,
|
| 303 |
+
gamma_t):
|
| 304 |
+
# Compute alpha_t and sigma_t from gamma.
|
| 305 |
+
alpha_t = self.alpha(gamma_t, xh_lig)
|
| 306 |
+
sigma_t = self.sigma(gamma_t, xh_lig)
|
| 307 |
+
|
| 308 |
+
# Sample zt ~ Normal(alpha_t x, sigma_t)
|
| 309 |
+
eps_lig, eps_pocket = self.sample_combined_position_feature_noise(
|
| 310 |
+
lig_mask, pocket_mask)
|
| 311 |
+
|
| 312 |
+
# Sample z_t given x, h for timestep t, from q(z_t | x, h)
|
| 313 |
+
z_t_lig = alpha_t[lig_mask] * xh_lig + sigma_t[lig_mask] * eps_lig
|
| 314 |
+
z_t_pocket = alpha_t[pocket_mask] * xh_pocket + \
|
| 315 |
+
sigma_t[pocket_mask] * eps_pocket
|
| 316 |
+
|
| 317 |
+
return z_t_lig, z_t_pocket, eps_lig, eps_pocket
|
| 318 |
+
|
| 319 |
+
def log_pN(self, N_lig, N_pocket):
|
| 320 |
+
"""
|
| 321 |
+
Prior on the sample size for computing
|
| 322 |
+
log p(x,h,N) = log p(x,h|N) + log p(N), where log p(x,h|N) is the
|
| 323 |
+
model's output
|
| 324 |
+
Args:
|
| 325 |
+
N: array of sample sizes
|
| 326 |
+
Returns:
|
| 327 |
+
log p(N)
|
| 328 |
+
"""
|
| 329 |
+
log_pN = self.size_distribution.log_prob(N_lig, N_pocket)
|
| 330 |
+
return log_pN
|
| 331 |
+
|
| 332 |
+
def delta_log_px(self, num_nodes):
|
| 333 |
+
return -self.subspace_dimensionality(num_nodes) * \
|
| 334 |
+
np.log(self.norm_values[0])
|
| 335 |
+
|
| 336 |
+
def forward(self, ligand, pocket, return_info=False):
|
| 337 |
+
"""
|
| 338 |
+
Computes the loss and NLL terms
|
| 339 |
+
"""
|
| 340 |
+
# Normalize data, take into account volume change in x.
|
| 341 |
+
ligand, pocket = self.normalize(ligand, pocket)
|
| 342 |
+
|
| 343 |
+
# Likelihood change due to normalization
|
| 344 |
+
delta_log_px = self.delta_log_px(ligand['size'] + pocket['size'])
|
| 345 |
+
|
| 346 |
+
# Sample a timestep t for each example in batch
|
| 347 |
+
# At evaluation time, loss_0 will be computed separately to decrease
|
| 348 |
+
# variance in the estimator (costs two forward passes)
|
| 349 |
+
lowest_t = 0 if self.training else 1
|
| 350 |
+
t_int = torch.randint(
|
| 351 |
+
lowest_t, self.T + 1, size=(ligand['size'].size(0), 1),
|
| 352 |
+
device=ligand['x'].device).float()
|
| 353 |
+
s_int = t_int - 1 # previous timestep
|
| 354 |
+
|
| 355 |
+
# Masks: important to compute log p(x | z0).
|
| 356 |
+
t_is_zero = (t_int == 0).float()
|
| 357 |
+
t_is_not_zero = 1 - t_is_zero
|
| 358 |
+
|
| 359 |
+
# Normalize t to [0, 1]. Note that the negative
|
| 360 |
+
# step of s will never be used, since then p(x | z0) is computed.
|
| 361 |
+
s = s_int / self.T
|
| 362 |
+
t = t_int / self.T
|
| 363 |
+
|
| 364 |
+
# Compute gamma_s and gamma_t via the network.
|
| 365 |
+
gamma_s = self.inflate_batch_array(self.gamma(s), ligand['x'])
|
| 366 |
+
gamma_t = self.inflate_batch_array(self.gamma(t), ligand['x'])
|
| 367 |
+
|
| 368 |
+
# Concatenate x, and h[categorical].
|
| 369 |
+
xh_lig = torch.cat([ligand['x'], ligand['one_hot']], dim=1)
|
| 370 |
+
xh_pocket = torch.cat([pocket['x'], pocket['one_hot']], dim=1)
|
| 371 |
+
|
| 372 |
+
# Find noised representation
|
| 373 |
+
z_t_lig, z_t_pocket, eps_t_lig, eps_t_pocket = \
|
| 374 |
+
self.noised_representation(xh_lig, xh_pocket, ligand['mask'],
|
| 375 |
+
pocket['mask'], gamma_t)
|
| 376 |
+
|
| 377 |
+
# Neural net prediction.
|
| 378 |
+
net_out_lig, net_out_pocket = self.dynamics(
|
| 379 |
+
z_t_lig, z_t_pocket, t, ligand['mask'], pocket['mask'])
|
| 380 |
+
|
| 381 |
+
# For LJ loss term
|
| 382 |
+
xh_lig_hat = self.xh_given_zt_and_epsilon(z_t_lig, net_out_lig, gamma_t,
|
| 383 |
+
ligand['mask'])
|
| 384 |
+
|
| 385 |
+
# Compute the L2 error.
|
| 386 |
+
error_t_lig = self.sum_except_batch((eps_t_lig - net_out_lig) ** 2,
|
| 387 |
+
ligand['mask'])
|
| 388 |
+
|
| 389 |
+
error_t_pocket = self.sum_except_batch(
|
| 390 |
+
(eps_t_pocket - net_out_pocket) ** 2, pocket['mask'])
|
| 391 |
+
|
| 392 |
+
# Compute weighting with SNR: (1 - SNR(s-t)) for epsilon parametrization
|
| 393 |
+
SNR_weight = (1 - self.SNR(gamma_s - gamma_t)).squeeze(1)
|
| 394 |
+
assert error_t_lig.size() == SNR_weight.size()
|
| 395 |
+
|
| 396 |
+
# The _constants_ depending on sigma_0 from the
|
| 397 |
+
# cross entropy term E_q(z0 | x) [log p(x | z0)].
|
| 398 |
+
neg_log_constants = -self.log_constants_p_x_given_z0(
|
| 399 |
+
n_nodes=ligand['size'] + pocket['size'], device=error_t_lig.device)
|
| 400 |
+
|
| 401 |
+
# The KL between q(zT | x) and p(zT) = Normal(0, 1).
|
| 402 |
+
# Should be close to zero.
|
| 403 |
+
kl_prior = self.kl_prior_with_pocket(
|
| 404 |
+
xh_lig, xh_pocket, ligand['mask'], pocket['mask'],
|
| 405 |
+
ligand['size'] + pocket['size'])
|
| 406 |
+
|
| 407 |
+
if self.training:
|
| 408 |
+
# Computes the L_0 term (even if gamma_t is not actually gamma_0)
|
| 409 |
+
# and this will later be selected via masking.
|
| 410 |
+
log_p_x_given_z0_without_constants_ligand, \
|
| 411 |
+
log_p_x_given_z0_without_constants_pocket, log_ph_given_z0 = \
|
| 412 |
+
self.log_pxh_given_z0_without_constants(
|
| 413 |
+
ligand, z_t_lig, eps_t_lig, net_out_lig,
|
| 414 |
+
pocket, z_t_pocket, eps_t_pocket, net_out_pocket, gamma_t)
|
| 415 |
+
|
| 416 |
+
loss_0_x_ligand = -log_p_x_given_z0_without_constants_ligand * \
|
| 417 |
+
t_is_zero.squeeze()
|
| 418 |
+
loss_0_x_pocket = -log_p_x_given_z0_without_constants_pocket * \
|
| 419 |
+
t_is_zero.squeeze()
|
| 420 |
+
loss_0_h = -log_ph_given_z0 * t_is_zero.squeeze()
|
| 421 |
+
|
| 422 |
+
# apply t_is_zero mask
|
| 423 |
+
error_t_lig = error_t_lig * t_is_not_zero.squeeze()
|
| 424 |
+
error_t_pocket = error_t_pocket * t_is_not_zero.squeeze()
|
| 425 |
+
|
| 426 |
+
else:
|
| 427 |
+
# Compute noise values for t = 0.
|
| 428 |
+
t_zeros = torch.zeros_like(s)
|
| 429 |
+
gamma_0 = self.inflate_batch_array(self.gamma(t_zeros), ligand['x'])
|
| 430 |
+
|
| 431 |
+
# Sample z_0 given x, h for timestep t, from q(z_t | x, h)
|
| 432 |
+
z_0_lig, z_0_pocket, eps_0_lig, eps_0_pocket = \
|
| 433 |
+
self.noised_representation(xh_lig, xh_pocket, ligand['mask'],
|
| 434 |
+
pocket['mask'], gamma_0)
|
| 435 |
+
|
| 436 |
+
net_out_0_lig, net_out_0_pocket = self.dynamics(
|
| 437 |
+
z_0_lig, z_0_pocket, t_zeros, ligand['mask'], pocket['mask'])
|
| 438 |
+
|
| 439 |
+
log_p_x_given_z0_without_constants_ligand, \
|
| 440 |
+
log_p_x_given_z0_without_constants_pocket, log_ph_given_z0 = \
|
| 441 |
+
self.log_pxh_given_z0_without_constants(
|
| 442 |
+
ligand, z_0_lig, eps_0_lig, net_out_0_lig,
|
| 443 |
+
pocket, z_0_pocket, eps_0_pocket, net_out_0_pocket, gamma_0)
|
| 444 |
+
loss_0_x_ligand = -log_p_x_given_z0_without_constants_ligand
|
| 445 |
+
loss_0_x_pocket = -log_p_x_given_z0_without_constants_pocket
|
| 446 |
+
loss_0_h = -log_ph_given_z0
|
| 447 |
+
|
| 448 |
+
# sample size prior
|
| 449 |
+
log_pN = self.log_pN(ligand['size'], pocket['size'])
|
| 450 |
+
|
| 451 |
+
info = {
|
| 452 |
+
'eps_hat_lig_x': scatter_mean(
|
| 453 |
+
net_out_lig[:, :self.n_dims].abs().mean(1), ligand['mask'],
|
| 454 |
+
dim=0).mean(),
|
| 455 |
+
'eps_hat_lig_h': scatter_mean(
|
| 456 |
+
net_out_lig[:, self.n_dims:].abs().mean(1), ligand['mask'],
|
| 457 |
+
dim=0).mean(),
|
| 458 |
+
'eps_hat_pocket_x': scatter_mean(
|
| 459 |
+
net_out_pocket[:, :self.n_dims].abs().mean(1), pocket['mask'],
|
| 460 |
+
dim=0).mean(),
|
| 461 |
+
'eps_hat_pocket_h': scatter_mean(
|
| 462 |
+
net_out_pocket[:, self.n_dims:].abs().mean(1), pocket['mask'],
|
| 463 |
+
dim=0).mean(),
|
| 464 |
+
}
|
| 465 |
+
loss_terms = (delta_log_px, error_t_lig, error_t_pocket, SNR_weight,
|
| 466 |
+
loss_0_x_ligand, loss_0_x_pocket, loss_0_h,
|
| 467 |
+
neg_log_constants, kl_prior, log_pN,
|
| 468 |
+
t_int.squeeze(), xh_lig_hat)
|
| 469 |
+
return (*loss_terms, info) if return_info else loss_terms
|
| 470 |
+
|
| 471 |
+
def xh_given_zt_and_epsilon(self, z_t, epsilon, gamma_t, batch_mask):
|
| 472 |
+
""" Equation (7) in the EDM paper """
|
| 473 |
+
alpha_t = self.alpha(gamma_t, z_t)
|
| 474 |
+
sigma_t = self.sigma(gamma_t, z_t)
|
| 475 |
+
xh = z_t / alpha_t[batch_mask] - epsilon * sigma_t[batch_mask] / \
|
| 476 |
+
alpha_t[batch_mask]
|
| 477 |
+
return xh
|
| 478 |
+
|
| 479 |
+
def sample_p_zt_given_zs(self, zs_lig, zs_pocket, ligand_mask, pocket_mask,
|
| 480 |
+
gamma_t, gamma_s, fix_noise=False):
|
| 481 |
+
sigma2_t_given_s, sigma_t_given_s, alpha_t_given_s = \
|
| 482 |
+
self.sigma_and_alpha_t_given_s(gamma_t, gamma_s, zs_lig)
|
| 483 |
+
|
| 484 |
+
mu_lig = alpha_t_given_s[ligand_mask] * zs_lig
|
| 485 |
+
mu_pocket = alpha_t_given_s[pocket_mask] * zs_pocket
|
| 486 |
+
zt_lig, zt_pocket = self.sample_normal(
|
| 487 |
+
mu_lig, mu_pocket, sigma_t_given_s, ligand_mask, pocket_mask,
|
| 488 |
+
fix_noise)
|
| 489 |
+
|
| 490 |
+
# Remove center of mass
|
| 491 |
+
zt_x = self.remove_mean_batch(
|
| 492 |
+
torch.cat((zt_lig[:, :self.n_dims], zt_pocket[:, :self.n_dims]),
|
| 493 |
+
dim=0),
|
| 494 |
+
torch.cat((ligand_mask, pocket_mask))
|
| 495 |
+
)
|
| 496 |
+
zt_lig = torch.cat((zt_x[:len(ligand_mask)],
|
| 497 |
+
zt_lig[:, self.n_dims:]), dim=1)
|
| 498 |
+
zt_pocket = torch.cat((zt_x[len(ligand_mask):],
|
| 499 |
+
zt_pocket[:, self.n_dims:]), dim=1)
|
| 500 |
+
|
| 501 |
+
return zt_lig, zt_pocket
|
| 502 |
+
|
| 503 |
+
def sample_p_zs_given_zt(self, s, t, zt_lig, zt_pocket, ligand_mask,
|
| 504 |
+
pocket_mask, fix_noise=False):
|
| 505 |
+
"""Samples from zs ~ p(zs | zt). Only used during sampling."""
|
| 506 |
+
gamma_s = self.gamma(s)
|
| 507 |
+
gamma_t = self.gamma(t)
|
| 508 |
+
|
| 509 |
+
sigma2_t_given_s, sigma_t_given_s, alpha_t_given_s = \
|
| 510 |
+
self.sigma_and_alpha_t_given_s(gamma_t, gamma_s, zt_lig)
|
| 511 |
+
|
| 512 |
+
sigma_s = self.sigma(gamma_s, target_tensor=zt_lig)
|
| 513 |
+
sigma_t = self.sigma(gamma_t, target_tensor=zt_lig)
|
| 514 |
+
|
| 515 |
+
# Neural net prediction.
|
| 516 |
+
eps_t_lig, eps_t_pocket = self.dynamics(
|
| 517 |
+
zt_lig, zt_pocket, t, ligand_mask, pocket_mask)
|
| 518 |
+
|
| 519 |
+
# Compute mu for p(zs | zt).
|
| 520 |
+
combined_mask = torch.cat((ligand_mask, pocket_mask))
|
| 521 |
+
self.assert_mean_zero_with_mask(
|
| 522 |
+
torch.cat((zt_lig[:, :self.n_dims],
|
| 523 |
+
zt_pocket[:, :self.n_dims]), dim=0),
|
| 524 |
+
combined_mask)
|
| 525 |
+
self.assert_mean_zero_with_mask(
|
| 526 |
+
torch.cat((eps_t_lig[:, :self.n_dims],
|
| 527 |
+
eps_t_pocket[:, :self.n_dims]), dim=0),
|
| 528 |
+
combined_mask)
|
| 529 |
+
|
| 530 |
+
# Note: mu_{t->s} = 1 / alpha_{t|s} z_t - sigma_{t|s}^2 / sigma_t / alpha_{t|s} epsilon
|
| 531 |
+
# follows from the definition of mu_{t->s} and Equ. (7) in the EDM paper
|
| 532 |
+
mu_lig = zt_lig / alpha_t_given_s[ligand_mask] - \
|
| 533 |
+
(sigma2_t_given_s / alpha_t_given_s / sigma_t)[ligand_mask] * \
|
| 534 |
+
eps_t_lig
|
| 535 |
+
mu_pocket = zt_pocket / alpha_t_given_s[pocket_mask] - \
|
| 536 |
+
(sigma2_t_given_s / alpha_t_given_s / sigma_t)[pocket_mask] * \
|
| 537 |
+
eps_t_pocket
|
| 538 |
+
|
| 539 |
+
# Compute sigma for p(zs | zt).
|
| 540 |
+
sigma = sigma_t_given_s * sigma_s / sigma_t
|
| 541 |
+
|
| 542 |
+
# Sample zs given the paramters derived from zt.
|
| 543 |
+
zs_lig, zs_pocket = self.sample_normal(mu_lig, mu_pocket, sigma,
|
| 544 |
+
ligand_mask, pocket_mask,
|
| 545 |
+
fix_noise)
|
| 546 |
+
|
| 547 |
+
# Project down to avoid numerical runaway of the center of gravity.
|
| 548 |
+
zs_x = self.remove_mean_batch(
|
| 549 |
+
torch.cat((zs_lig[:, :self.n_dims],
|
| 550 |
+
zs_pocket[:, :self.n_dims]), dim=0),
|
| 551 |
+
torch.cat((ligand_mask, pocket_mask))
|
| 552 |
+
)
|
| 553 |
+
zs_lig = torch.cat((zs_x[:len(ligand_mask)],
|
| 554 |
+
zs_lig[:, self.n_dims:]), dim=1)
|
| 555 |
+
zs_pocket = torch.cat((zs_x[len(ligand_mask):],
|
| 556 |
+
zs_pocket[:, self.n_dims:]), dim=1)
|
| 557 |
+
return zs_lig, zs_pocket
|
| 558 |
+
|
| 559 |
+
def sample_combined_position_feature_noise(self, lig_indices,
|
| 560 |
+
pocket_indices):
|
| 561 |
+
"""
|
| 562 |
+
Samples mean-centered normal noise for z_x, and standard normal noise
|
| 563 |
+
for z_h.
|
| 564 |
+
"""
|
| 565 |
+
z_x = self.sample_center_gravity_zero_gaussian_batch(
|
| 566 |
+
size=(len(lig_indices) + len(pocket_indices), self.n_dims),
|
| 567 |
+
lig_indices=lig_indices,
|
| 568 |
+
pocket_indices=pocket_indices
|
| 569 |
+
)
|
| 570 |
+
z_h_lig = self.sample_gaussian(
|
| 571 |
+
size=(len(lig_indices), self.atom_nf),
|
| 572 |
+
device=lig_indices.device)
|
| 573 |
+
z_lig = torch.cat([z_x[:len(lig_indices)], z_h_lig], dim=1)
|
| 574 |
+
z_h_pocket = self.sample_gaussian(
|
| 575 |
+
size=(len(pocket_indices), self.residue_nf),
|
| 576 |
+
device=pocket_indices.device)
|
| 577 |
+
z_pocket = torch.cat([z_x[len(lig_indices):], z_h_pocket], dim=1)
|
| 578 |
+
return z_lig, z_pocket
|
| 579 |
+
|
| 580 |
+
@torch.no_grad()
|
| 581 |
+
def sample(self, n_samples, num_nodes_lig, num_nodes_pocket,
|
| 582 |
+
return_frames=1, timesteps=None, device='cpu'):
|
| 583 |
+
"""
|
| 584 |
+
Draw samples from the generative model. Optionally, return intermediate
|
| 585 |
+
states for visualization purposes.
|
| 586 |
+
"""
|
| 587 |
+
timesteps = self.T if timesteps is None else timesteps
|
| 588 |
+
assert 0 < return_frames <= timesteps
|
| 589 |
+
assert timesteps % return_frames == 0
|
| 590 |
+
|
| 591 |
+
lig_mask = utils.num_nodes_to_batch_mask(n_samples, num_nodes_lig,
|
| 592 |
+
device)
|
| 593 |
+
pocket_mask = utils.num_nodes_to_batch_mask(n_samples, num_nodes_pocket,
|
| 594 |
+
device)
|
| 595 |
+
|
| 596 |
+
combined_mask = torch.cat((lig_mask, pocket_mask))
|
| 597 |
+
|
| 598 |
+
z_lig, z_pocket = self.sample_combined_position_feature_noise(
|
| 599 |
+
lig_mask, pocket_mask)
|
| 600 |
+
|
| 601 |
+
self.assert_mean_zero_with_mask(
|
| 602 |
+
torch.cat((z_lig[:, :self.n_dims], z_pocket[:, :self.n_dims]), dim=0),
|
| 603 |
+
combined_mask
|
| 604 |
+
)
|
| 605 |
+
|
| 606 |
+
out_lig = torch.zeros((return_frames,) + z_lig.size(),
|
| 607 |
+
device=z_lig.device)
|
| 608 |
+
out_pocket = torch.zeros((return_frames,) + z_pocket.size(),
|
| 609 |
+
device=z_pocket.device)
|
| 610 |
+
|
| 611 |
+
# Iteratively sample p(z_s | z_t) for t = 1, ..., T, with s = t - 1.
|
| 612 |
+
for s in reversed(range(0, timesteps)):
|
| 613 |
+
s_array = torch.full((n_samples, 1), fill_value=s,
|
| 614 |
+
device=z_lig.device)
|
| 615 |
+
t_array = s_array + 1
|
| 616 |
+
s_array = s_array / timesteps
|
| 617 |
+
t_array = t_array / timesteps
|
| 618 |
+
|
| 619 |
+
z_lig, z_pocket = self.sample_p_zs_given_zt(
|
| 620 |
+
s_array, t_array, z_lig, z_pocket, lig_mask, pocket_mask)
|
| 621 |
+
|
| 622 |
+
# save frame
|
| 623 |
+
if (s * return_frames) % timesteps == 0:
|
| 624 |
+
idx = (s * return_frames) // timesteps
|
| 625 |
+
out_lig[idx], out_pocket[idx] = \
|
| 626 |
+
self.unnormalize_z(z_lig, z_pocket)
|
| 627 |
+
|
| 628 |
+
# Finally sample p(x, h | z_0).
|
| 629 |
+
x_lig, h_lig, x_pocket, h_pocket = self.sample_p_xh_given_z0(
|
| 630 |
+
z_lig, z_pocket, lig_mask, pocket_mask, n_samples)
|
| 631 |
+
|
| 632 |
+
self.assert_mean_zero_with_mask(
|
| 633 |
+
torch.cat((x_lig, x_pocket), dim=0), combined_mask
|
| 634 |
+
)
|
| 635 |
+
|
| 636 |
+
# Correct CoM drift for examples without intermediate states
|
| 637 |
+
if return_frames == 1:
|
| 638 |
+
x = torch.cat((x_lig, x_pocket))
|
| 639 |
+
max_cog = scatter_add(x, combined_mask, dim=0).abs().max().item()
|
| 640 |
+
if max_cog > 5e-2:
|
| 641 |
+
print(f'Warning CoG drift with error {max_cog:.3f}. Projecting '
|
| 642 |
+
f'the positions down.')
|
| 643 |
+
x = self.remove_mean_batch(x, combined_mask)
|
| 644 |
+
x_lig, x_pocket = x[:len(x_lig)], x[len(x_lig):]
|
| 645 |
+
|
| 646 |
+
# Overwrite last frame with the resulting x and h.
|
| 647 |
+
out_lig[0] = torch.cat([x_lig, h_lig], dim=1)
|
| 648 |
+
out_pocket[0] = torch.cat([x_pocket, h_pocket], dim=1)
|
| 649 |
+
|
| 650 |
+
# remove frame dimension if only the final molecule is returned
|
| 651 |
+
return out_lig.squeeze(0), out_pocket.squeeze(0), lig_mask, pocket_mask
|
| 652 |
+
|
| 653 |
+
def get_repaint_schedule(self, resamplings, jump_length, timesteps):
|
| 654 |
+
""" Each integer in the schedule list describes how many denoising steps
|
| 655 |
+
need to be applied before jumping back """
|
| 656 |
+
repaint_schedule = []
|
| 657 |
+
curr_t = 0
|
| 658 |
+
while curr_t < timesteps:
|
| 659 |
+
if curr_t + jump_length < timesteps:
|
| 660 |
+
if len(repaint_schedule) > 0:
|
| 661 |
+
repaint_schedule[-1] += jump_length
|
| 662 |
+
repaint_schedule.extend([jump_length] * (resamplings - 1))
|
| 663 |
+
else:
|
| 664 |
+
repaint_schedule.extend([jump_length] * resamplings)
|
| 665 |
+
curr_t += jump_length
|
| 666 |
+
else:
|
| 667 |
+
residual = (timesteps - curr_t)
|
| 668 |
+
if len(repaint_schedule) > 0:
|
| 669 |
+
repaint_schedule[-1] += residual
|
| 670 |
+
else:
|
| 671 |
+
repaint_schedule.append(residual)
|
| 672 |
+
curr_t += residual
|
| 673 |
+
|
| 674 |
+
return list(reversed(repaint_schedule))
|
| 675 |
+
|
| 676 |
+
@torch.no_grad()
|
| 677 |
+
def inpaint(self, ligand, pocket, lig_fixed, pocket_fixed, resamplings=1,
|
| 678 |
+
jump_length=1, return_frames=1, timesteps=None):
|
| 679 |
+
"""
|
| 680 |
+
Draw samples from the generative model while fixing parts of the input.
|
| 681 |
+
Optionally, return intermediate states for visualization purposes.
|
| 682 |
+
See:
|
| 683 |
+
Lugmayr, Andreas, et al.
|
| 684 |
+
"Repaint: Inpainting using denoising diffusion probabilistic models."
|
| 685 |
+
Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern
|
| 686 |
+
Recognition. 2022.
|
| 687 |
+
"""
|
| 688 |
+
timesteps = self.T if timesteps is None else timesteps
|
| 689 |
+
assert 0 < return_frames <= timesteps
|
| 690 |
+
assert timesteps % return_frames == 0
|
| 691 |
+
assert jump_length == 1 or return_frames == 1, \
|
| 692 |
+
"Chain visualization is only implemented for jump_length=1"
|
| 693 |
+
|
| 694 |
+
if len(lig_fixed.size()) == 1:
|
| 695 |
+
lig_fixed = lig_fixed.unsqueeze(1)
|
| 696 |
+
if len(pocket_fixed.size()) == 1:
|
| 697 |
+
pocket_fixed = pocket_fixed.unsqueeze(1)
|
| 698 |
+
|
| 699 |
+
ligand, pocket = self.normalize(ligand, pocket)
|
| 700 |
+
|
| 701 |
+
n_samples = len(ligand['size'])
|
| 702 |
+
combined_mask = torch.cat((ligand['mask'], pocket['mask']))
|
| 703 |
+
xh0_lig = torch.cat([ligand['x'], ligand['one_hot']], dim=1)
|
| 704 |
+
xh0_pocket = torch.cat([pocket['x'], pocket['one_hot']], dim=1)
|
| 705 |
+
|
| 706 |
+
# Center initial system, subtract COM of known parts
|
| 707 |
+
mean_known = scatter_mean(
|
| 708 |
+
torch.cat((ligand['x'][lig_fixed.bool().view(-1)],
|
| 709 |
+
pocket['x'][pocket_fixed.bool().view(-1)])),
|
| 710 |
+
torch.cat((ligand['mask'][lig_fixed.bool().view(-1)],
|
| 711 |
+
pocket['mask'][pocket_fixed.bool().view(-1)])),
|
| 712 |
+
dim=0
|
| 713 |
+
)
|
| 714 |
+
xh0_lig[:, :self.n_dims] = \
|
| 715 |
+
xh0_lig[:, :self.n_dims] - mean_known[ligand['mask']]
|
| 716 |
+
xh0_pocket[:, :self.n_dims] = \
|
| 717 |
+
xh0_pocket[:, :self.n_dims] - mean_known[pocket['mask']]
|
| 718 |
+
|
| 719 |
+
# Noised representation at step t=T
|
| 720 |
+
z_lig, z_pocket = self.sample_combined_position_feature_noise(
|
| 721 |
+
ligand['mask'], pocket['mask'])
|
| 722 |
+
|
| 723 |
+
# Output tensors
|
| 724 |
+
out_lig = torch.zeros((return_frames,) + z_lig.size(),
|
| 725 |
+
device=z_lig.device)
|
| 726 |
+
out_pocket = torch.zeros((return_frames,) + z_pocket.size(),
|
| 727 |
+
device=z_pocket.device)
|
| 728 |
+
|
| 729 |
+
# Iteratively sample according to a pre-defined schedule
|
| 730 |
+
schedule = self.get_repaint_schedule(resamplings, jump_length, timesteps)
|
| 731 |
+
s = timesteps - 1
|
| 732 |
+
for i, n_denoise_steps in enumerate(schedule):
|
| 733 |
+
for j in range(n_denoise_steps):
|
| 734 |
+
# Denoise one time step: t -> s
|
| 735 |
+
s_array = torch.full((n_samples, 1), fill_value=s,
|
| 736 |
+
device=z_lig.device)
|
| 737 |
+
t_array = s_array + 1
|
| 738 |
+
s_array = s_array / timesteps
|
| 739 |
+
t_array = t_array / timesteps
|
| 740 |
+
|
| 741 |
+
# sample known nodes from the input
|
| 742 |
+
gamma_s = self.inflate_batch_array(self.gamma(s_array),
|
| 743 |
+
ligand['x'])
|
| 744 |
+
z_lig_known, z_pocket_known, _, _ = self.noised_representation(
|
| 745 |
+
xh0_lig, xh0_pocket, ligand['mask'], pocket['mask'], gamma_s)
|
| 746 |
+
|
| 747 |
+
# sample inpainted part
|
| 748 |
+
z_lig_unknown, z_pocket_unknown = self.sample_p_zs_given_zt(
|
| 749 |
+
s_array, t_array, z_lig, z_pocket, ligand['mask'],
|
| 750 |
+
pocket['mask'])
|
| 751 |
+
|
| 752 |
+
# move center of mass of the noised part to the center of mass
|
| 753 |
+
# of the corresponding denoised part before combining them
|
| 754 |
+
# -> the resulting system should be COM-free
|
| 755 |
+
com_noised = scatter_mean(
|
| 756 |
+
torch.cat((z_lig_known[:, :self.n_dims][lig_fixed.bool().view(-1)],
|
| 757 |
+
z_pocket_known[:, :self.n_dims][pocket_fixed.bool().view(-1)])),
|
| 758 |
+
torch.cat((ligand['mask'][lig_fixed.bool().view(-1)],
|
| 759 |
+
pocket['mask'][pocket_fixed.bool().view(-1)])),
|
| 760 |
+
dim=0
|
| 761 |
+
)
|
| 762 |
+
com_denoised = scatter_mean(
|
| 763 |
+
torch.cat((z_lig_unknown[:, :self.n_dims][lig_fixed.bool().view(-1)],
|
| 764 |
+
z_pocket_unknown[:, :self.n_dims][pocket_fixed.bool().view(-1)])),
|
| 765 |
+
torch.cat((ligand['mask'][lig_fixed.bool().view(-1)],
|
| 766 |
+
pocket['mask'][pocket_fixed.bool().view(-1)])),
|
| 767 |
+
dim=0
|
| 768 |
+
)
|
| 769 |
+
z_lig_known[:, :self.n_dims] = \
|
| 770 |
+
z_lig_known[:, :self.n_dims] + (com_denoised - com_noised)[ligand['mask']]
|
| 771 |
+
z_pocket_known[:, :self.n_dims] = \
|
| 772 |
+
z_pocket_known[:, :self.n_dims] + (com_denoised - com_noised)[pocket['mask']]
|
| 773 |
+
|
| 774 |
+
# combine
|
| 775 |
+
z_lig = z_lig_known * lig_fixed + \
|
| 776 |
+
z_lig_unknown * (1 - lig_fixed)
|
| 777 |
+
z_pocket = z_pocket_known * pocket_fixed + \
|
| 778 |
+
z_pocket_unknown * (1 - pocket_fixed)
|
| 779 |
+
|
| 780 |
+
self.assert_mean_zero_with_mask(
|
| 781 |
+
torch.cat((z_lig[:, :self.n_dims],
|
| 782 |
+
z_pocket[:, :self.n_dims]), dim=0), combined_mask
|
| 783 |
+
)
|
| 784 |
+
|
| 785 |
+
# save frame at the end of a resample cycle
|
| 786 |
+
if n_denoise_steps > jump_length or i == len(schedule) - 1:
|
| 787 |
+
if (s * return_frames) % timesteps == 0:
|
| 788 |
+
idx = (s * return_frames) // timesteps
|
| 789 |
+
out_lig[idx], out_pocket[idx] = \
|
| 790 |
+
self.unnormalize_z(z_lig, z_pocket)
|
| 791 |
+
|
| 792 |
+
# Noise combined representation
|
| 793 |
+
if j == n_denoise_steps - 1 and i < len(schedule) - 1:
|
| 794 |
+
# Go back jump_length steps
|
| 795 |
+
t = s + jump_length
|
| 796 |
+
t_array = torch.full((n_samples, 1), fill_value=t,
|
| 797 |
+
device=z_lig.device)
|
| 798 |
+
t_array = t_array / timesteps
|
| 799 |
+
|
| 800 |
+
gamma_s = self.inflate_batch_array(self.gamma(s_array),
|
| 801 |
+
ligand['x'])
|
| 802 |
+
gamma_t = self.inflate_batch_array(self.gamma(t_array),
|
| 803 |
+
ligand['x'])
|
| 804 |
+
|
| 805 |
+
z_lig, z_pocket = self.sample_p_zt_given_zs(
|
| 806 |
+
z_lig, z_pocket, ligand['mask'], pocket['mask'],
|
| 807 |
+
gamma_t, gamma_s)
|
| 808 |
+
|
| 809 |
+
s = t
|
| 810 |
+
|
| 811 |
+
s -= 1
|
| 812 |
+
|
| 813 |
+
# Finally sample p(x, h | z_0).
|
| 814 |
+
x_lig, h_lig, x_pocket, h_pocket = self.sample_p_xh_given_z0(
|
| 815 |
+
z_lig, z_pocket, ligand['mask'], pocket['mask'], n_samples)
|
| 816 |
+
|
| 817 |
+
self.assert_mean_zero_with_mask(
|
| 818 |
+
torch.cat((x_lig, x_pocket), dim=0), combined_mask
|
| 819 |
+
)
|
| 820 |
+
|
| 821 |
+
# Correct CoM drift for examples without intermediate states
|
| 822 |
+
if return_frames == 1:
|
| 823 |
+
x = torch.cat((x_lig, x_pocket))
|
| 824 |
+
max_cog = scatter_add(x, combined_mask, dim=0).abs().max().item()
|
| 825 |
+
if max_cog > 5e-2:
|
| 826 |
+
print(f'Warning CoG drift with error {max_cog:.3f}. Projecting '
|
| 827 |
+
f'the positions down.')
|
| 828 |
+
x = self.remove_mean_batch(x, combined_mask)
|
| 829 |
+
x_lig, x_pocket = x[:len(x_lig)], x[len(x_lig):]
|
| 830 |
+
|
| 831 |
+
# Overwrite last frame with the resulting x and h.
|
| 832 |
+
out_lig[0] = torch.cat([x_lig, h_lig], dim=1)
|
| 833 |
+
out_pocket[0] = torch.cat([x_pocket, h_pocket], dim=1)
|
| 834 |
+
|
| 835 |
+
# remove frame dimension if only the final molecule is returned
|
| 836 |
+
return out_lig.squeeze(0), out_pocket.squeeze(0), ligand['mask'], \
|
| 837 |
+
pocket['mask']
|
| 838 |
+
|
| 839 |
+
@staticmethod
|
| 840 |
+
def gaussian_KL(q_mu_minus_p_mu_squared, q_sigma, p_sigma, d):
|
| 841 |
+
"""Computes the KL distance between two normal distributions.
|
| 842 |
+
Args:
|
| 843 |
+
q_mu_minus_p_mu_squared: Squared difference between mean of
|
| 844 |
+
distribution q and distribution p: ||mu_q - mu_p||^2
|
| 845 |
+
q_sigma: Standard deviation of distribution q.
|
| 846 |
+
p_sigma: Standard deviation of distribution p.
|
| 847 |
+
d: dimension
|
| 848 |
+
Returns:
|
| 849 |
+
The KL distance
|
| 850 |
+
"""
|
| 851 |
+
return d * torch.log(p_sigma / q_sigma) + \
|
| 852 |
+
0.5 * (d * q_sigma ** 2 + q_mu_minus_p_mu_squared) / \
|
| 853 |
+
(p_sigma ** 2) - 0.5 * d
|
| 854 |
+
|
| 855 |
+
@staticmethod
|
| 856 |
+
def inflate_batch_array(array, target):
|
| 857 |
+
"""
|
| 858 |
+
Inflates the batch array (array) with only a single axis
|
| 859 |
+
(i.e. shape = (batch_size,), or possibly more empty axes
|
| 860 |
+
(i.e. shape (batch_size, 1, ..., 1)) to match the target shape.
|
| 861 |
+
"""
|
| 862 |
+
target_shape = (array.size(0),) + (1,) * (len(target.size()) - 1)
|
| 863 |
+
return array.view(target_shape)
|
| 864 |
+
|
| 865 |
+
def sigma(self, gamma, target_tensor):
|
| 866 |
+
"""Computes sigma given gamma."""
|
| 867 |
+
return self.inflate_batch_array(torch.sqrt(torch.sigmoid(gamma)),
|
| 868 |
+
target_tensor)
|
| 869 |
+
|
| 870 |
+
def alpha(self, gamma, target_tensor):
|
| 871 |
+
"""Computes alpha given gamma."""
|
| 872 |
+
return self.inflate_batch_array(torch.sqrt(torch.sigmoid(-gamma)),
|
| 873 |
+
target_tensor)
|
| 874 |
+
|
| 875 |
+
@staticmethod
|
| 876 |
+
def SNR(gamma):
|
| 877 |
+
"""Computes signal to noise ratio (alpha^2/sigma^2) given gamma."""
|
| 878 |
+
return torch.exp(-gamma)
|
| 879 |
+
|
| 880 |
+
def normalize(self, ligand=None, pocket=None):
|
| 881 |
+
if ligand is not None:
|
| 882 |
+
ligand['x'] = ligand['x'] / self.norm_values[0]
|
| 883 |
+
|
| 884 |
+
# Casting to float in case h still has long or int type.
|
| 885 |
+
ligand['one_hot'] = \
|
| 886 |
+
(ligand['one_hot'].float() - self.norm_biases[1]) / \
|
| 887 |
+
self.norm_values[1]
|
| 888 |
+
|
| 889 |
+
if pocket is not None:
|
| 890 |
+
pocket['x'] = pocket['x'] / self.norm_values[0]
|
| 891 |
+
pocket['one_hot'] = \
|
| 892 |
+
(pocket['one_hot'].float() - self.norm_biases[1]) / \
|
| 893 |
+
self.norm_values[1]
|
| 894 |
+
|
| 895 |
+
return ligand, pocket
|
| 896 |
+
|
| 897 |
+
def unnormalize(self, x, h_cat):
|
| 898 |
+
x = x * self.norm_values[0]
|
| 899 |
+
h_cat = h_cat * self.norm_values[1] + self.norm_biases[1]
|
| 900 |
+
|
| 901 |
+
return x, h_cat
|
| 902 |
+
|
| 903 |
+
def unnormalize_z(self, z_lig, z_pocket):
|
| 904 |
+
# Parse from z
|
| 905 |
+
x_lig, h_lig = z_lig[:, :self.n_dims], z_lig[:, self.n_dims:]
|
| 906 |
+
x_pocket, h_pocket = z_pocket[:, :self.n_dims], z_pocket[:, self.n_dims:]
|
| 907 |
+
|
| 908 |
+
# Unnormalize
|
| 909 |
+
x_lig, h_lig = self.unnormalize(x_lig, h_lig)
|
| 910 |
+
x_pocket, h_pocket = self.unnormalize(x_pocket, h_pocket)
|
| 911 |
+
return torch.cat([x_lig, h_lig], dim=1), \
|
| 912 |
+
torch.cat([x_pocket, h_pocket], dim=1)
|
| 913 |
+
|
| 914 |
+
def subspace_dimensionality(self, input_size):
|
| 915 |
+
"""Compute the dimensionality on translation-invariant linear subspace
|
| 916 |
+
where distributions on x are defined."""
|
| 917 |
+
return (input_size - 1) * self.n_dims
|
| 918 |
+
|
| 919 |
+
@staticmethod
|
| 920 |
+
def remove_mean_batch(x, indices):
|
| 921 |
+
mean = scatter_mean(x, indices, dim=0)
|
| 922 |
+
x = x - mean[indices]
|
| 923 |
+
return x
|
| 924 |
+
|
| 925 |
+
@staticmethod
|
| 926 |
+
def assert_mean_zero_with_mask(x, node_mask, eps=1e-10):
|
| 927 |
+
largest_value = x.abs().max().item()
|
| 928 |
+
error = scatter_add(x, node_mask, dim=0).abs().max().item()
|
| 929 |
+
rel_error = error / (largest_value + eps)
|
| 930 |
+
assert rel_error < 1e-2, f'Mean is not zero, relative_error {rel_error}'
|
| 931 |
+
|
| 932 |
+
@staticmethod
|
| 933 |
+
def sample_center_gravity_zero_gaussian_batch(size, lig_indices,
|
| 934 |
+
pocket_indices):
|
| 935 |
+
assert len(size) == 2
|
| 936 |
+
x = torch.randn(size, device=lig_indices.device)
|
| 937 |
+
|
| 938 |
+
# This projection only works because Gaussian is rotation invariant
|
| 939 |
+
# around zero and samples are independent!
|
| 940 |
+
x_projected = EnVariationalDiffusion.remove_mean_batch(
|
| 941 |
+
x, torch.cat((lig_indices, pocket_indices)))
|
| 942 |
+
return x_projected
|
| 943 |
+
|
| 944 |
+
@staticmethod
|
| 945 |
+
def sum_except_batch(x, indices):
|
| 946 |
+
return scatter_add(x.sum(-1), indices, dim=0)
|
| 947 |
+
|
| 948 |
+
@staticmethod
|
| 949 |
+
def cdf_standard_gaussian(x):
|
| 950 |
+
return 0.5 * (1. + torch.erf(x / math.sqrt(2)))
|
| 951 |
+
|
| 952 |
+
@staticmethod
|
| 953 |
+
def sample_gaussian(size, device):
|
| 954 |
+
x = torch.randn(size, device=device)
|
| 955 |
+
return x
|
| 956 |
+
|
| 957 |
+
|
| 958 |
+
class DistributionNodes:
|
| 959 |
+
def __init__(self, histogram):
|
| 960 |
+
|
| 961 |
+
histogram = torch.tensor(histogram).float()
|
| 962 |
+
histogram = histogram + 1e-3 # for numerical stability
|
| 963 |
+
|
| 964 |
+
prob = histogram / histogram.sum()
|
| 965 |
+
|
| 966 |
+
self.idx_to_n_nodes = torch.tensor(
|
| 967 |
+
[[(i, j) for j in range(prob.shape[1])] for i in range(prob.shape[0])]
|
| 968 |
+
).view(-1, 2)
|
| 969 |
+
|
| 970 |
+
self.n_nodes_to_idx = {tuple(x.tolist()): i
|
| 971 |
+
for i, x in enumerate(self.idx_to_n_nodes)}
|
| 972 |
+
|
| 973 |
+
self.prob = prob
|
| 974 |
+
self.m = torch.distributions.Categorical(self.prob.view(-1),
|
| 975 |
+
validate_args=True)
|
| 976 |
+
|
| 977 |
+
self.n1_given_n2 = \
|
| 978 |
+
[torch.distributions.Categorical(prob[:, j], validate_args=True)
|
| 979 |
+
for j in range(prob.shape[1])]
|
| 980 |
+
self.n2_given_n1 = \
|
| 981 |
+
[torch.distributions.Categorical(prob[i, :], validate_args=True)
|
| 982 |
+
for i in range(prob.shape[0])]
|
| 983 |
+
|
| 984 |
+
# entropy = -torch.sum(self.prob.view(-1) * torch.log(self.prob.view(-1) + 1e-30))
|
| 985 |
+
entropy = self.m.entropy()
|
| 986 |
+
print("Entropy of n_nodes: H[N]", entropy.item())
|
| 987 |
+
|
| 988 |
+
def sample(self, n_samples=1):
|
| 989 |
+
idx = self.m.sample((n_samples,))
|
| 990 |
+
num_nodes_lig, num_nodes_pocket = self.idx_to_n_nodes[idx].T
|
| 991 |
+
return num_nodes_lig, num_nodes_pocket
|
| 992 |
+
|
| 993 |
+
def sample_conditional(self, n1=None, n2=None):
|
| 994 |
+
assert (n1 is None) ^ (n2 is None), \
|
| 995 |
+
"Exactly one input argument must be None"
|
| 996 |
+
|
| 997 |
+
m = self.n1_given_n2 if n2 is not None else self.n2_given_n1
|
| 998 |
+
c = n2 if n2 is not None else n1
|
| 999 |
+
|
| 1000 |
+
return torch.tensor([m[i].sample() for i in c], device=c.device)
|
| 1001 |
+
|
| 1002 |
+
def log_prob(self, batch_n_nodes_1, batch_n_nodes_2):
|
| 1003 |
+
assert len(batch_n_nodes_1.size()) == 1
|
| 1004 |
+
assert len(batch_n_nodes_2.size()) == 1
|
| 1005 |
+
|
| 1006 |
+
idx = torch.tensor(
|
| 1007 |
+
[self.n_nodes_to_idx[(n1, n2)]
|
| 1008 |
+
for n1, n2 in zip(batch_n_nodes_1.tolist(), batch_n_nodes_2.tolist())]
|
| 1009 |
+
)
|
| 1010 |
+
|
| 1011 |
+
# log_probs = torch.log(self.prob.view(-1)[idx] + 1e-30)
|
| 1012 |
+
log_probs = self.m.log_prob(idx)
|
| 1013 |
+
|
| 1014 |
+
return log_probs.to(batch_n_nodes_1.device)
|
| 1015 |
+
|
| 1016 |
+
def log_prob_n1_given_n2(self, n1, n2):
|
| 1017 |
+
assert len(n1.size()) == 1
|
| 1018 |
+
assert len(n2.size()) == 1
|
| 1019 |
+
log_probs = torch.stack([self.n1_given_n2[c].log_prob(i.cpu())
|
| 1020 |
+
for i, c in zip(n1, n2)])
|
| 1021 |
+
return log_probs.to(n1.device)
|
| 1022 |
+
|
| 1023 |
+
def log_prob_n2_given_n1(self, n2, n1):
|
| 1024 |
+
assert len(n2.size()) == 1
|
| 1025 |
+
assert len(n1.size()) == 1
|
| 1026 |
+
log_probs = torch.stack([self.n2_given_n1[c].log_prob(i.cpu())
|
| 1027 |
+
for i, c in zip(n2, n1)])
|
| 1028 |
+
return log_probs.to(n2.device)
|
| 1029 |
+
|
| 1030 |
+
|
| 1031 |
+
class PositiveLinear(torch.nn.Module):
|
| 1032 |
+
"""Linear layer with weights forced to be positive."""
|
| 1033 |
+
|
| 1034 |
+
def __init__(self, in_features: int, out_features: int, bias: bool = True,
|
| 1035 |
+
weight_init_offset: int = -2):
|
| 1036 |
+
super(PositiveLinear, self).__init__()
|
| 1037 |
+
self.in_features = in_features
|
| 1038 |
+
self.out_features = out_features
|
| 1039 |
+
self.weight = torch.nn.Parameter(
|
| 1040 |
+
torch.empty((out_features, in_features)))
|
| 1041 |
+
if bias:
|
| 1042 |
+
self.bias = torch.nn.Parameter(torch.empty(out_features))
|
| 1043 |
+
else:
|
| 1044 |
+
self.register_parameter('bias', None)
|
| 1045 |
+
self.weight_init_offset = weight_init_offset
|
| 1046 |
+
self.reset_parameters()
|
| 1047 |
+
|
| 1048 |
+
def reset_parameters(self) -> None:
|
| 1049 |
+
torch.nn.init.kaiming_uniform_(self.weight, a=math.sqrt(5))
|
| 1050 |
+
|
| 1051 |
+
with torch.no_grad():
|
| 1052 |
+
self.weight.add_(self.weight_init_offset)
|
| 1053 |
+
|
| 1054 |
+
if self.bias is not None:
|
| 1055 |
+
fan_in, _ = torch.nn.init._calculate_fan_in_and_fan_out(self.weight)
|
| 1056 |
+
bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0
|
| 1057 |
+
torch.nn.init.uniform_(self.bias, -bound, bound)
|
| 1058 |
+
|
| 1059 |
+
def forward(self, input):
|
| 1060 |
+
positive_weight = F.softplus(self.weight)
|
| 1061 |
+
return F.linear(input, positive_weight, self.bias)
|
| 1062 |
+
|
| 1063 |
+
|
| 1064 |
+
class GammaNetwork(torch.nn.Module):
|
| 1065 |
+
"""The gamma network models a monotonic increasing function.
|
| 1066 |
+
Construction as in the VDM paper."""
|
| 1067 |
+
def __init__(self):
|
| 1068 |
+
super().__init__()
|
| 1069 |
+
|
| 1070 |
+
self.l1 = PositiveLinear(1, 1)
|
| 1071 |
+
self.l2 = PositiveLinear(1, 1024)
|
| 1072 |
+
self.l3 = PositiveLinear(1024, 1)
|
| 1073 |
+
|
| 1074 |
+
self.gamma_0 = torch.nn.Parameter(torch.tensor([-5.]))
|
| 1075 |
+
self.gamma_1 = torch.nn.Parameter(torch.tensor([10.]))
|
| 1076 |
+
self.show_schedule()
|
| 1077 |
+
|
| 1078 |
+
def show_schedule(self, num_steps=50):
|
| 1079 |
+
t = torch.linspace(0, 1, num_steps).view(num_steps, 1)
|
| 1080 |
+
gamma = self.forward(t)
|
| 1081 |
+
print('Gamma schedule:')
|
| 1082 |
+
print(gamma.detach().cpu().numpy().reshape(num_steps))
|
| 1083 |
+
|
| 1084 |
+
def gamma_tilde(self, t):
|
| 1085 |
+
l1_t = self.l1(t)
|
| 1086 |
+
return l1_t + self.l3(torch.sigmoid(self.l2(l1_t)))
|
| 1087 |
+
|
| 1088 |
+
def forward(self, t):
|
| 1089 |
+
zeros, ones = torch.zeros_like(t), torch.ones_like(t)
|
| 1090 |
+
# Not super efficient.
|
| 1091 |
+
gamma_tilde_0 = self.gamma_tilde(zeros)
|
| 1092 |
+
gamma_tilde_1 = self.gamma_tilde(ones)
|
| 1093 |
+
gamma_tilde_t = self.gamma_tilde(t)
|
| 1094 |
+
|
| 1095 |
+
# Normalize to [0, 1]
|
| 1096 |
+
normalized_gamma = (gamma_tilde_t - gamma_tilde_0) / (
|
| 1097 |
+
gamma_tilde_1 - gamma_tilde_0)
|
| 1098 |
+
|
| 1099 |
+
# Rescale to [gamma_0, gamma_1]
|
| 1100 |
+
gamma = self.gamma_0 + (self.gamma_1 - self.gamma_0) * normalized_gamma
|
| 1101 |
+
|
| 1102 |
+
return gamma
|
| 1103 |
+
|
| 1104 |
+
|
| 1105 |
+
def cosine_beta_schedule(timesteps, s=0.008, raise_to_power: float = 1):
|
| 1106 |
+
"""
|
| 1107 |
+
cosine schedule
|
| 1108 |
+
as proposed in https://openreview.net/forum?id=-NEXDKk8gZ
|
| 1109 |
+
"""
|
| 1110 |
+
steps = timesteps + 2
|
| 1111 |
+
x = np.linspace(0, steps, steps)
|
| 1112 |
+
alphas_cumprod = np.cos(((x / steps) + s) / (1 + s) * np.pi * 0.5) ** 2
|
| 1113 |
+
alphas_cumprod = alphas_cumprod / alphas_cumprod[0]
|
| 1114 |
+
betas = 1 - (alphas_cumprod[1:] / alphas_cumprod[:-1])
|
| 1115 |
+
betas = np.clip(betas, a_min=0, a_max=0.999)
|
| 1116 |
+
alphas = 1. - betas
|
| 1117 |
+
alphas_cumprod = np.cumprod(alphas, axis=0)
|
| 1118 |
+
|
| 1119 |
+
if raise_to_power != 1:
|
| 1120 |
+
alphas_cumprod = np.power(alphas_cumprod, raise_to_power)
|
| 1121 |
+
|
| 1122 |
+
return alphas_cumprod
|
| 1123 |
+
|
| 1124 |
+
|
| 1125 |
+
def clip_noise_schedule(alphas2, clip_value=0.001):
|
| 1126 |
+
"""
|
| 1127 |
+
For a noise schedule given by alpha^2, this clips alpha_t / alpha_t-1.
|
| 1128 |
+
This may help improve stability during
|
| 1129 |
+
sampling.
|
| 1130 |
+
"""
|
| 1131 |
+
alphas2 = np.concatenate([np.ones(1), alphas2], axis=0)
|
| 1132 |
+
|
| 1133 |
+
alphas_step = (alphas2[1:] / alphas2[:-1])
|
| 1134 |
+
|
| 1135 |
+
alphas_step = np.clip(alphas_step, a_min=clip_value, a_max=1.)
|
| 1136 |
+
alphas2 = np.cumprod(alphas_step, axis=0)
|
| 1137 |
+
|
| 1138 |
+
return alphas2
|
| 1139 |
+
|
| 1140 |
+
|
| 1141 |
+
def polynomial_schedule(timesteps: int, s=1e-4, power=3.):
|
| 1142 |
+
"""
|
| 1143 |
+
A noise schedule based on a simple polynomial equation: 1 - x^power.
|
| 1144 |
+
"""
|
| 1145 |
+
steps = timesteps + 1
|
| 1146 |
+
x = np.linspace(0, steps, steps)
|
| 1147 |
+
alphas2 = (1 - np.power(x / steps, power))**2
|
| 1148 |
+
|
| 1149 |
+
alphas2 = clip_noise_schedule(alphas2, clip_value=0.001)
|
| 1150 |
+
|
| 1151 |
+
precision = 1 - 2 * s
|
| 1152 |
+
|
| 1153 |
+
alphas2 = precision * alphas2 + s
|
| 1154 |
+
|
| 1155 |
+
return alphas2
|
| 1156 |
+
|
| 1157 |
+
|
| 1158 |
+
class PredefinedNoiseSchedule(torch.nn.Module):
|
| 1159 |
+
"""
|
| 1160 |
+
Predefined noise schedule. Essentially creates a lookup array for predefined
|
| 1161 |
+
(non-learned) noise schedules.
|
| 1162 |
+
"""
|
| 1163 |
+
def __init__(self, noise_schedule, timesteps, precision):
|
| 1164 |
+
super(PredefinedNoiseSchedule, self).__init__()
|
| 1165 |
+
self.timesteps = timesteps
|
| 1166 |
+
|
| 1167 |
+
if noise_schedule == 'cosine':
|
| 1168 |
+
alphas2 = cosine_beta_schedule(timesteps)
|
| 1169 |
+
elif 'polynomial' in noise_schedule:
|
| 1170 |
+
splits = noise_schedule.split('_')
|
| 1171 |
+
assert len(splits) == 2
|
| 1172 |
+
power = float(splits[1])
|
| 1173 |
+
alphas2 = polynomial_schedule(timesteps, s=precision, power=power)
|
| 1174 |
+
else:
|
| 1175 |
+
raise ValueError(noise_schedule)
|
| 1176 |
+
|
| 1177 |
+
sigmas2 = 1 - alphas2
|
| 1178 |
+
|
| 1179 |
+
log_alphas2 = np.log(alphas2)
|
| 1180 |
+
log_sigmas2 = np.log(sigmas2)
|
| 1181 |
+
|
| 1182 |
+
log_alphas2_to_sigmas2 = log_alphas2 - log_sigmas2
|
| 1183 |
+
|
| 1184 |
+
self.gamma = torch.nn.Parameter(
|
| 1185 |
+
torch.from_numpy(-log_alphas2_to_sigmas2).float(),
|
| 1186 |
+
requires_grad=False)
|
| 1187 |
+
|
| 1188 |
+
def forward(self, t):
|
| 1189 |
+
t_int = torch.round(t * self.timesteps).long()
|
| 1190 |
+
return self.gamma[t_int]
|
example/3rfm.pdb
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
example/3rfm_B_CFF.sdf
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
CFF
|
| 2 |
+
ModelServer 0.9.11
|
| 3 |
+
|
| 4 |
+
14 15 0 0 0 0 0 0 0 0 0
|
| 5 |
+
9.1740 -33.1560 -31.8810 N 0 0 0 0 0 0 0 0 0 0 0 0
|
| 6 |
+
9.5110 -33.9730 -32.9480 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 7 |
+
10.1800 -32.7900 -30.8620 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 8 |
+
7.8800 -32.6980 -31.7480 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 9 |
+
8.5600 -34.3290 -33.8820 N 0 0 0 0 0 0 0 0 0 0 0 0
|
| 10 |
+
10.7810 -34.4320 -33.0790 O 0 0 0 0 0 0 0 0 0 0 0 0
|
| 11 |
+
8.9230 -35.2040 -35.0040 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 12 |
+
7.2870 -33.8640 -33.7350 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 13 |
+
6.9250 -33.0680 -32.6990 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 14 |
+
6.2400 -34.1120 -34.5500 N 0 0 0 0 0 0 0 0 0 0 0 0
|
| 15 |
+
7.5520 -31.9080 -30.7050 O 0 0 0 0 0 0 0 0 0 0 0 0
|
| 16 |
+
5.6170 -32.7750 -32.8280 N 0 0 0 0 0 0 0 0 0 0 0 0
|
| 17 |
+
5.2350 -33.4220 -33.9540 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 18 |
+
4.7270 -31.9750 -31.9940 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 19 |
+
1 2 1 0 0 0 0
|
| 20 |
+
1 3 1 0 0 0 0
|
| 21 |
+
1 4 1 0 0 0 0
|
| 22 |
+
2 5 1 0 0 0 0
|
| 23 |
+
2 6 2 0 0 0 0
|
| 24 |
+
4 9 1 0 0 0 0
|
| 25 |
+
4 11 2 0 0 0 0
|
| 26 |
+
5 7 1 0 0 0 0
|
| 27 |
+
5 8 1 0 0 0 0
|
| 28 |
+
8 9 2 0 0 0 0
|
| 29 |
+
8 10 1 0 0 0 0
|
| 30 |
+
9 12 1 0 0 0 0
|
| 31 |
+
10 13 2 0 0 0 0
|
| 32 |
+
12 13 1 0 0 0 0
|
| 33 |
+
12 14 1 0 0 0 0
|
| 34 |
+
M END
|
| 35 |
+
> <model_server_result.job_id>
|
| 36 |
+
yPgH9SanFvk0wz61gS-2-A
|
| 37 |
+
|
| 38 |
+
> <model_server_result.datetime_utc>
|
| 39 |
+
2024-06-19 19:43:15
|
| 40 |
+
|
| 41 |
+
> <model_server_result.server_version>
|
| 42 |
+
0.9.11
|
| 43 |
+
|
| 44 |
+
> <model_server_result.query_name>
|
| 45 |
+
ligand
|
| 46 |
+
|
| 47 |
+
> <model_server_result.source_id>
|
| 48 |
+
pdb-bcif
|
| 49 |
+
|
| 50 |
+
> <model_server_result.entry_id>
|
| 51 |
+
3rfm
|
| 52 |
+
|
| 53 |
+
> <model_server_params.name>
|
| 54 |
+
atom_site
|
| 55 |
+
|
| 56 |
+
> <model_server_params.value>
|
| 57 |
+
{"label_asym_id":"B","auth_seq_id":330}
|
| 58 |
+
|
| 59 |
+
> <model_server_stats.io_time_ms>
|
| 60 |
+
58
|
| 61 |
+
|
| 62 |
+
> <model_server_stats.parse_time_ms>
|
| 63 |
+
37
|
| 64 |
+
|
| 65 |
+
> <model_server_stats.create_model_time_ms>
|
| 66 |
+
3
|
| 67 |
+
|
| 68 |
+
> <model_server_stats.query_time_ms>
|
| 69 |
+
262
|
| 70 |
+
|
| 71 |
+
> <model_server_stats.encode_time_ms>
|
| 72 |
+
0
|
| 73 |
+
|
| 74 |
+
> <model_server_stats.element_count>
|
| 75 |
+
14
|
| 76 |
+
|
| 77 |
+
$$$$
|
example/5ndu.pdb
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
example/5ndu_C_8V2.sdf
ADDED
|
@@ -0,0 +1,236 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
8V2
|
| 2 |
+
ModelServer 0.9.11
|
| 3 |
+
|
| 4 |
+
91 97 0 0 1 0 0 0 0 0 0
|
| 5 |
+
-2.1940 -12.9610 26.5400 O 0 0 0 0 0 0 0 0 0 0 0 0
|
| 6 |
+
-4.8100 -9.4260 21.7910 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 7 |
+
-8.1270 -5.8030 26.7400 O 0 0 0 0 0 0 0 0 0 0 0 0
|
| 8 |
+
-11.9880 -0.1610 25.5730 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 9 |
+
-14.5120 -1.3900 25.7970 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 10 |
+
-12.7190 -1.1730 29.4710 O 0 0 0 0 0 0 0 0 0 0 0 0
|
| 11 |
+
-4.7170 -6.2870 23.5540 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 12 |
+
-3.3950 -12.4570 26.0960 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 13 |
+
-5.4630 -5.7030 26.3340 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 14 |
+
-9.3280 -3.1330 28.0820 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 15 |
+
-10.6630 -2.4060 27.8600 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 16 |
+
-10.9210 -2.1690 26.3410 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 17 |
+
-12.0780 -1.4870 26.0700 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 18 |
+
-13.3390 -2.0460 26.1190 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 19 |
+
-13.1760 0.5220 25.2870 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 20 |
+
-14.4120 -0.0670 25.3540 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 21 |
+
-3.3350 -11.9920 24.7490 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 22 |
+
-5.4890 -7.0560 25.8620 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 23 |
+
-5.0930 -8.6240 24.0310 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 24 |
+
-5.1000 -7.0580 22.3380 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 25 |
+
-4.2420 -4.2060 27.7660 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 26 |
+
-4.2080 -9.5890 24.6460 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 27 |
+
-7.2860 -4.2920 29.7570 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 28 |
+
-5.2240 -4.9870 28.6610 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 29 |
+
-5.9000 -4.2090 29.5940 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 30 |
+
-9.9310 -6.0190 30.2140 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 31 |
+
-3.6910 -13.0290 23.7030 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 32 |
+
-10.4970 -4.9760 29.2580 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 33 |
+
-7.4870 -5.4460 27.7140 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 34 |
+
-11.5960 -0.6690 29.3880 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 35 |
+
-11.3020 0.6340 30.1490 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 36 |
+
-8.7140 -6.3850 29.5490 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 37 |
+
-8.1170 -5.0850 28.9210 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 38 |
+
-5.1950 -12.8950 23.6280 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 39 |
+
-5.3160 -11.3490 23.4180 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 40 |
+
-5.1650 -10.7450 22.1100 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 41 |
+
-4.4700 -8.3770 22.6460 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 42 |
+
-2.3260 -14.3380 26.9150 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 43 |
+
-4.0420 -5.2940 26.7290 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 44 |
+
-4.2660 -10.8780 24.3810 N 0 0 0 0 0 0 0 0 0 0 0 0
|
| 45 |
+
-5.1320 -7.3040 24.6240 N 0 0 0 0 0 0 0 0 0 0 0 0
|
| 46 |
+
-6.1660 -5.4790 27.6010 N 0 0 0 0 0 0 0 0 0 0 0 0
|
| 47 |
+
-10.5890 -1.1050 28.6090 N 0 0 0 0 0 0 0 0 0 0 0 0
|
| 48 |
+
-9.3500 -4.3230 28.7690 N 0 0 0 0 0 0 0 0 0 0 0 0
|
| 49 |
+
-4.3680 -12.2620 26.8450 O 0 0 0 0 0 0 0 0 0 0 0 0
|
| 50 |
+
-8.2700 -2.7530 27.5440 O 0 0 0 0 0 0 0 0 0 0 0 0
|
| 51 |
+
-5.8340 -7.9950 26.6010 O 0 0 0 0 0 0 0 0 0 0 0 0
|
| 52 |
+
-3.3200 -9.2290 25.4600 O 0 0 0 0 0 0 0 0 0 0 0 0
|
| 53 |
+
-13.5690 -3.6750 26.6640 CL 0 0 0 0 0 0 0 0 0 0 0 0
|
| 54 |
+
-4.8050 -9.1860 20.7280 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 55 |
+
-11.0240 0.3430 25.4990 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 56 |
+
-15.4800 -1.8820 25.8960 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 57 |
+
-5.2340 -5.3290 23.6350 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 58 |
+
-3.6350 -6.1450 23.5920 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 59 |
+
-5.8730 -5.0030 25.6060 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 60 |
+
-11.4760 -3.0100 28.2660 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 61 |
+
-10.0840 -1.6100 25.9260 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 62 |
+
-10.9710 -3.1390 25.8500 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 63 |
+
-13.1100 1.5470 24.9230 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 64 |
+
-15.3080 0.5000 25.1020 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 65 |
+
-2.2950 -11.6950 24.6160 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 66 |
+
-6.1150 -8.9750 23.8830 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 67 |
+
-6.1850 -7.1780 22.3120 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 68 |
+
-4.7490 -6.5980 21.4110 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 69 |
+
-4.7440 -3.3520 27.3060 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 70 |
+
-3.3360 -3.8870 28.2870 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 71 |
+
-7.7600 -3.6350 30.4860 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 72 |
+
-4.7660 -5.8510 29.1420 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 73 |
+
-5.3300 -3.5360 30.2340 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 74 |
+
-10.4620 -6.9010 30.4120 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 75 |
+
-9.5090 -5.5750 31.0650 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 76 |
+
-3.3250 -12.6930 22.7310 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 77 |
+
-3.3040 -14.0260 23.9230 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 78 |
+
-10.9230 -5.5090 28.4040 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 79 |
+
-11.2520 -4.3190 29.6970 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 80 |
+
-10.5790 1.2270 29.5880 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 81 |
+
-10.8930 0.3960 31.1310 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 82 |
+
-12.2250 1.2020 30.2670 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 83 |
+
-8.0110 -7.0140 30.1000 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 84 |
+
-9.0890 -6.9500 28.6920 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 85 |
+
-5.7010 -13.4820 22.8600 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 86 |
+
-5.5750 -13.1570 24.6170 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 87 |
+
-6.3170 -11.0600 23.7390 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 88 |
+
-5.4170 -11.3840 21.2630 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 89 |
+
-3.3930 -8.2400 22.7390 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 90 |
+
-1.3630 -14.7140 27.2620 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 91 |
+
-2.6520 -14.9190 26.0530 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 92 |
+
-3.0610 -14.4280 27.7140 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 93 |
+
-3.4060 -5.0410 25.8790 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 94 |
+
-3.5980 -6.1200 27.2860 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 95 |
+
-9.7140 -0.6210 28.6200 H 0 0 0 0 0 0 0 0 0 0 0 0
|
| 96 |
+
1 8 1 0 0 0 0
|
| 97 |
+
1 38 1 0 0 0 0
|
| 98 |
+
2 36 2 0 0 0 0
|
| 99 |
+
2 37 1 0 0 0 0
|
| 100 |
+
2 50 1 0 0 0 0
|
| 101 |
+
3 29 2 0 0 0 0
|
| 102 |
+
4 15 1 0 0 0 0
|
| 103 |
+
4 13 2 0 0 0 0
|
| 104 |
+
4 51 1 0 0 0 0
|
| 105 |
+
5 16 1 0 0 0 0
|
| 106 |
+
5 14 2 0 0 0 0
|
| 107 |
+
5 52 1 0 0 0 0
|
| 108 |
+
6 30 2 0 0 0 0
|
| 109 |
+
7 20 1 0 0 0 0
|
| 110 |
+
7 41 1 0 0 0 0
|
| 111 |
+
7 53 1 0 0 0 0
|
| 112 |
+
7 54 1 0 0 0 0
|
| 113 |
+
8 17 1 0 0 0 0
|
| 114 |
+
8 45 2 0 0 0 0
|
| 115 |
+
9 18 1 0 0 0 0
|
| 116 |
+
9 39 1 0 0 0 0
|
| 117 |
+
9 42 1 0 0 0 0
|
| 118 |
+
9 55 1 0 0 0 0
|
| 119 |
+
10 46 2 0 0 0 0
|
| 120 |
+
10 11 1 0 0 0 0
|
| 121 |
+
10 44 1 0 0 0 0
|
| 122 |
+
11 12 1 0 0 0 0
|
| 123 |
+
11 43 1 0 0 0 0
|
| 124 |
+
11 56 1 0 0 0 0
|
| 125 |
+
12 13 1 0 0 0 0
|
| 126 |
+
12 57 1 0 0 0 0
|
| 127 |
+
12 58 1 0 0 0 0
|
| 128 |
+
13 14 1 0 0 0 0
|
| 129 |
+
14 49 1 0 0 0 0
|
| 130 |
+
15 16 2 0 0 0 0
|
| 131 |
+
15 59 1 0 0 0 0
|
| 132 |
+
16 60 1 0 0 0 0
|
| 133 |
+
17 27 1 0 0 0 0
|
| 134 |
+
17 40 1 0 0 0 0
|
| 135 |
+
17 61 1 0 0 0 0
|
| 136 |
+
18 41 1 0 0 0 0
|
| 137 |
+
18 47 2 0 0 0 0
|
| 138 |
+
19 37 1 0 0 0 0
|
| 139 |
+
19 41 1 0 0 0 0
|
| 140 |
+
19 22 1 0 0 0 0
|
| 141 |
+
19 62 1 0 0 0 0
|
| 142 |
+
20 37 1 0 0 0 0
|
| 143 |
+
20 63 1 0 0 0 0
|
| 144 |
+
20 64 1 0 0 0 0
|
| 145 |
+
21 39 1 0 0 0 0
|
| 146 |
+
21 24 1 0 0 0 0
|
| 147 |
+
21 65 1 0 0 0 0
|
| 148 |
+
21 66 1 0 0 0 0
|
| 149 |
+
22 40 1 0 0 0 0
|
| 150 |
+
22 48 2 0 0 0 0
|
| 151 |
+
23 33 1 0 0 0 0
|
| 152 |
+
23 25 2 0 0 0 0
|
| 153 |
+
23 67 1 0 0 0 0
|
| 154 |
+
24 42 1 0 0 0 0
|
| 155 |
+
24 25 1 0 0 0 0
|
| 156 |
+
24 68 1 0 0 0 0
|
| 157 |
+
25 69 1 0 0 0 0
|
| 158 |
+
26 28 1 0 0 0 0
|
| 159 |
+
26 32 1 0 0 0 0
|
| 160 |
+
26 70 1 0 0 0 0
|
| 161 |
+
26 71 1 0 0 0 0
|
| 162 |
+
27 34 1 0 0 0 0
|
| 163 |
+
27 72 1 0 0 0 0
|
| 164 |
+
27 73 1 0 0 0 0
|
| 165 |
+
28 44 1 0 0 0 0
|
| 166 |
+
28 74 1 0 0 0 0
|
| 167 |
+
28 75 1 0 0 0 0
|
| 168 |
+
29 42 1 0 0 0 0
|
| 169 |
+
29 33 1 0 0 0 0
|
| 170 |
+
30 43 1 0 0 0 0
|
| 171 |
+
30 31 1 0 0 0 0
|
| 172 |
+
31 76 1 0 0 0 0
|
| 173 |
+
31 77 1 0 0 0 0
|
| 174 |
+
31 78 1 0 0 0 0
|
| 175 |
+
32 33 1 0 0 0 0
|
| 176 |
+
32 79 1 0 0 0 0
|
| 177 |
+
32 80 1 0 0 0 0
|
| 178 |
+
33 44 1 0 0 0 0
|
| 179 |
+
34 35 1 0 0 0 0
|
| 180 |
+
34 81 1 0 0 0 0
|
| 181 |
+
34 82 1 0 0 0 0
|
| 182 |
+
35 36 1 0 0 0 0
|
| 183 |
+
35 40 1 0 0 0 0
|
| 184 |
+
35 83 1 0 0 0 0
|
| 185 |
+
36 84 1 0 0 0 0
|
| 186 |
+
37 85 1 0 0 0 0
|
| 187 |
+
38 86 1 0 0 0 0
|
| 188 |
+
38 87 1 0 0 0 0
|
| 189 |
+
38 88 1 0 0 0 0
|
| 190 |
+
39 89 1 0 0 0 0
|
| 191 |
+
39 90 1 0 0 0 0
|
| 192 |
+
43 91 1 0 0 0 0
|
| 193 |
+
M END
|
| 194 |
+
> <model_server_result.job_id>
|
| 195 |
+
7nYBEFm_cwFEDILJPHMr2A
|
| 196 |
+
|
| 197 |
+
> <model_server_result.datetime_utc>
|
| 198 |
+
2024-06-19 19:52:24
|
| 199 |
+
|
| 200 |
+
> <model_server_result.server_version>
|
| 201 |
+
0.9.11
|
| 202 |
+
|
| 203 |
+
> <model_server_result.query_name>
|
| 204 |
+
ligand
|
| 205 |
+
|
| 206 |
+
> <model_server_result.source_id>
|
| 207 |
+
pdb-bcif
|
| 208 |
+
|
| 209 |
+
> <model_server_result.entry_id>
|
| 210 |
+
5ndu
|
| 211 |
+
|
| 212 |
+
> <model_server_params.name>
|
| 213 |
+
atom_site
|
| 214 |
+
|
| 215 |
+
> <model_server_params.value>
|
| 216 |
+
{"label_asym_id":"C","auth_seq_id":201}
|
| 217 |
+
|
| 218 |
+
> <model_server_stats.io_time_ms>
|
| 219 |
+
3
|
| 220 |
+
|
| 221 |
+
> <model_server_stats.parse_time_ms>
|
| 222 |
+
185
|
| 223 |
+
|
| 224 |
+
> <model_server_stats.create_model_time_ms>
|
| 225 |
+
8
|
| 226 |
+
|
| 227 |
+
> <model_server_stats.query_time_ms>
|
| 228 |
+
252
|
| 229 |
+
|
| 230 |
+
> <model_server_stats.encode_time_ms>
|
| 231 |
+
0
|
| 232 |
+
|
| 233 |
+
> <model_server_stats.element_count>
|
| 234 |
+
91
|
| 235 |
+
|
| 236 |
+
$$$$
|
example/fragments.sdf
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
fragments
|
| 2 |
+
PyMOL3.0 3D 0
|
| 3 |
+
|
| 4 |
+
17 18 0 0 1 0 0 0 0 0999 V2000
|
| 5 |
+
-12.0780 -1.4870 26.0700 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 6 |
+
-11.9880 -0.1610 25.5730 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 7 |
+
-13.3390 -2.0460 26.1190 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 8 |
+
-13.1760 0.5220 25.2870 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 9 |
+
-14.5120 -1.3900 25.7970 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 10 |
+
-13.5690 -3.6750 26.6640 Cl 0 0 0 0 0 0 0 0 0 0 0 0
|
| 11 |
+
-14.4120 -0.0670 25.3540 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 12 |
+
-5.4630 -5.7030 26.3340 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 13 |
+
-6.1660 -5.4790 27.6010 N 0 0 0 0 0 0 0 0 0 0 0 0
|
| 14 |
+
-4.0420 -5.2940 26.7290 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 15 |
+
-7.4870 -5.4460 27.7140 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 16 |
+
-5.2240 -4.9870 28.6610 C 0 0 1 0 0 0 0 0 0 0 0 0
|
| 17 |
+
-4.2420 -4.2060 27.7660 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 18 |
+
-8.1170 -5.0850 28.9210 C 0 0 2 0 0 0 0 0 0 0 0 0
|
| 19 |
+
-8.1270 -5.8030 26.7400 O 0 0 0 0 0 0 0 0 0 0 0 0
|
| 20 |
+
-5.9000 -4.2090 29.5940 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 21 |
+
-7.2860 -4.2920 29.7570 C 0 0 0 0 0 0 0 0 0 0 0 0
|
| 22 |
+
1 3 4 0 0 0 0
|
| 23 |
+
1 2 4 0 0 0 0
|
| 24 |
+
2 4 4 0 0 0 0
|
| 25 |
+
3 6 1 0 0 0 0
|
| 26 |
+
4 7 4 0 0 0 0
|
| 27 |
+
3 5 4 0 0 0 0
|
| 28 |
+
5 7 4 0 0 0 0
|
| 29 |
+
8 9 1 0 0 0 0
|
| 30 |
+
8 10 1 0 0 0 0
|
| 31 |
+
9 11 1 0 0 0 0
|
| 32 |
+
11 14 1 0 0 0 0
|
| 33 |
+
9 12 1 0 0 0 0
|
| 34 |
+
12 13 1 0 0 0 0
|
| 35 |
+
12 16 1 0 0 0 0
|
| 36 |
+
10 13 1 0 0 0 0
|
| 37 |
+
11 15 2 0 0 0 0
|
| 38 |
+
14 17 1 0 0 0 0
|
| 39 |
+
16 17 2 0 0 0 0
|
| 40 |
+
M END
|
| 41 |
+
$$$$
|
generate_ligands.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import argparse
|
| 2 |
+
from pathlib import Path
|
| 3 |
+
|
| 4 |
+
import torch
|
| 5 |
+
from openbabel import openbabel
|
| 6 |
+
openbabel.obErrorLog.StopLogging() # suppress OpenBabel messages
|
| 7 |
+
|
| 8 |
+
import utils
|
| 9 |
+
from lightning_modules import LigandPocketDDPM
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
if __name__ == "__main__":
|
| 13 |
+
parser = argparse.ArgumentParser()
|
| 14 |
+
parser.add_argument('checkpoint', type=Path)
|
| 15 |
+
parser.add_argument('--pdbfile', type=str)
|
| 16 |
+
parser.add_argument('--resi_list', type=str, nargs='+', default=None)
|
| 17 |
+
parser.add_argument('--ref_ligand', type=str, default=None)
|
| 18 |
+
parser.add_argument('--outfile', type=Path)
|
| 19 |
+
parser.add_argument('--n_samples', type=int, default=20)
|
| 20 |
+
parser.add_argument('--batch_size', type=int, default=None)
|
| 21 |
+
parser.add_argument('--num_nodes_lig', type=int, default=None)
|
| 22 |
+
parser.add_argument('--all_frags', action='store_true')
|
| 23 |
+
parser.add_argument('--sanitize', action='store_true')
|
| 24 |
+
parser.add_argument('--relax', action='store_true')
|
| 25 |
+
parser.add_argument('--resamplings', type=int, default=10)
|
| 26 |
+
parser.add_argument('--jump_length', type=int, default=1)
|
| 27 |
+
parser.add_argument('--timesteps', type=int, default=None)
|
| 28 |
+
args = parser.parse_args()
|
| 29 |
+
|
| 30 |
+
pdb_id = Path(args.pdbfile).stem
|
| 31 |
+
|
| 32 |
+
device = 'cuda' if torch.cuda.is_available() else 'cpu'
|
| 33 |
+
|
| 34 |
+
if args.batch_size is None:
|
| 35 |
+
args.batch_size = args.n_samples
|
| 36 |
+
assert args.n_samples % args.batch_size == 0
|
| 37 |
+
|
| 38 |
+
# Load model
|
| 39 |
+
model = LigandPocketDDPM.load_from_checkpoint(
|
| 40 |
+
args.checkpoint, map_location=device)
|
| 41 |
+
model = model.to(device)
|
| 42 |
+
|
| 43 |
+
if args.num_nodes_lig is not None:
|
| 44 |
+
num_nodes_lig = torch.ones(args.n_samples, dtype=int) * \
|
| 45 |
+
args.num_nodes_lig
|
| 46 |
+
else:
|
| 47 |
+
num_nodes_lig = None
|
| 48 |
+
|
| 49 |
+
molecules = []
|
| 50 |
+
for i in range(args.n_samples // args.batch_size):
|
| 51 |
+
molecules_batch = model.generate_ligands(
|
| 52 |
+
args.pdbfile, args.batch_size, args.resi_list, args.ref_ligand,
|
| 53 |
+
num_nodes_lig, args.sanitize, largest_frag=not args.all_frags,
|
| 54 |
+
relax_iter=(200 if args.relax else 0),
|
| 55 |
+
resamplings=args.resamplings, jump_length=args.jump_length,
|
| 56 |
+
timesteps=args.timesteps)
|
| 57 |
+
molecules.extend(molecules_batch)
|
| 58 |
+
|
| 59 |
+
# Make SDF files
|
| 60 |
+
utils.write_sdf_file(args.outfile, molecules)
|
geometry_utils.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
|
| 3 |
+
from constants import CA_C_DIST, N_CA_DIST, N_CA_C_ANGLE
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def rotation_matrix(angle, axis):
|
| 7 |
+
"""
|
| 8 |
+
Args:
|
| 9 |
+
angle: (n,)
|
| 10 |
+
axis: 0=x, 1=y, 2=z
|
| 11 |
+
Returns:
|
| 12 |
+
(n, 3, 3)
|
| 13 |
+
"""
|
| 14 |
+
n = len(angle)
|
| 15 |
+
R = np.eye(3)[None, :, :].repeat(n, axis=0)
|
| 16 |
+
|
| 17 |
+
axis = 2 - axis
|
| 18 |
+
start = axis // 2
|
| 19 |
+
step = axis % 2 + 1
|
| 20 |
+
s = slice(start, start + step + 1, step)
|
| 21 |
+
|
| 22 |
+
R[:, s, s] = np.array(
|
| 23 |
+
[[np.cos(angle), (-1) ** (axis + 1) * np.sin(angle)],
|
| 24 |
+
[(-1) ** axis * np.sin(angle), np.cos(angle)]]
|
| 25 |
+
).transpose(2, 0, 1)
|
| 26 |
+
return R
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def get_bb_transform(n_xyz, ca_xyz, c_xyz):
|
| 30 |
+
"""
|
| 31 |
+
Compute translation and rotation of the canoncical backbone frame (triangle N-Ca-C) from a position with
|
| 32 |
+
Ca at the origin, N on the x-axis and C in the xy-plane to the global position of the backbone frame
|
| 33 |
+
|
| 34 |
+
Args:
|
| 35 |
+
n_xyz: (n, 3)
|
| 36 |
+
ca_xyz: (n, 3)
|
| 37 |
+
c_xyz: (n, 3)
|
| 38 |
+
|
| 39 |
+
Returns:
|
| 40 |
+
quaternion represented as array of shape (n, 4)
|
| 41 |
+
translation vector which is an array of shape (n, 3)
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
translation = ca_xyz
|
| 45 |
+
n_xyz = n_xyz - translation
|
| 46 |
+
c_xyz = c_xyz - translation
|
| 47 |
+
|
| 48 |
+
# Find rotation matrix that aligns the coordinate systems
|
| 49 |
+
# rotate around y-axis to move N into the xy-plane
|
| 50 |
+
theta_y = np.arctan2(n_xyz[:, 2], -n_xyz[:, 0])
|
| 51 |
+
Ry = rotation_matrix(theta_y, 1)
|
| 52 |
+
n_xyz = np.einsum('noi,ni->no', Ry.transpose(0, 2, 1), n_xyz)
|
| 53 |
+
|
| 54 |
+
# rotate around z-axis to move N onto the x-axis
|
| 55 |
+
theta_z = np.arctan2(n_xyz[:, 1], n_xyz[:, 0])
|
| 56 |
+
Rz = rotation_matrix(theta_z, 2)
|
| 57 |
+
# n_xyz = np.einsum('noi,ni->no', Rz.transpose(0, 2, 1), n_xyz)
|
| 58 |
+
|
| 59 |
+
# rotate around x-axis to move C into the xy-plane
|
| 60 |
+
c_xyz = np.einsum('noj,nji,ni->no', Rz.transpose(0, 2, 1),
|
| 61 |
+
Ry.transpose(0, 2, 1), c_xyz)
|
| 62 |
+
theta_x = np.arctan2(c_xyz[:, 2], c_xyz[:, 1])
|
| 63 |
+
Rx = rotation_matrix(theta_x, 0)
|
| 64 |
+
|
| 65 |
+
# Final rotation matrix
|
| 66 |
+
R = np.einsum('nok,nkj,nji->noi', Ry, Rz, Rx)
|
| 67 |
+
|
| 68 |
+
# Convert to quaternion
|
| 69 |
+
# q = w + i*u_x + j*u_y + k * u_z
|
| 70 |
+
quaternion = rotation_matrix_to_quaternion(R)
|
| 71 |
+
|
| 72 |
+
return quaternion, translation
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def get_bb_coords_from_transform(ca_coords, quaternion):
|
| 76 |
+
"""
|
| 77 |
+
Args:
|
| 78 |
+
ca_coords: (n, 3)
|
| 79 |
+
quaternion: (n, 4)
|
| 80 |
+
Returns:
|
| 81 |
+
backbone coords (n*3, 3), order is [N, CA, C]
|
| 82 |
+
backbone atom types as a list of length n*3
|
| 83 |
+
"""
|
| 84 |
+
R = quaternion_to_rotation_matrix(quaternion)
|
| 85 |
+
bb_coords = np.tile(np.array(
|
| 86 |
+
[[N_CA_DIST, 0, 0],
|
| 87 |
+
[0, 0, 0],
|
| 88 |
+
[CA_C_DIST * np.cos(N_CA_C_ANGLE), CA_C_DIST * np.sin(N_CA_C_ANGLE), 0]]),
|
| 89 |
+
[len(ca_coords), 1])
|
| 90 |
+
bb_coords = np.einsum('noi,ni->no', R.repeat(3, axis=0), bb_coords) + ca_coords.repeat(3, axis=0)
|
| 91 |
+
bb_atom_types = [t for _ in range(len(ca_coords)) for t in ['N', 'C', 'C']]
|
| 92 |
+
|
| 93 |
+
return bb_coords, bb_atom_types
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def quaternion_to_rotation_matrix(q):
|
| 97 |
+
"""
|
| 98 |
+
x_rot = R x
|
| 99 |
+
|
| 100 |
+
Args:
|
| 101 |
+
q: (n, 4)
|
| 102 |
+
Returns:
|
| 103 |
+
R: (n, 3, 3)
|
| 104 |
+
"""
|
| 105 |
+
# Normalize
|
| 106 |
+
q = q / (q ** 2).sum(1, keepdims=True) ** 0.5
|
| 107 |
+
|
| 108 |
+
# https://en.wikipedia.org/wiki/Rotation_matrix#Quaternion
|
| 109 |
+
w, x, y, z = q[:, 0], q[:, 1], q[:, 2], q[:, 3]
|
| 110 |
+
R = np.stack([
|
| 111 |
+
np.stack([1 - 2 * y ** 2 - 2 * z ** 2, 2 * x * y - 2 * z * w,
|
| 112 |
+
2 * x * z + 2 * y * w], axis=1),
|
| 113 |
+
np.stack([2 * x * y + 2 * z * w, 1 - 2 * x ** 2 - 2 * z ** 2,
|
| 114 |
+
2 * y * z - 2 * x * w], axis=1),
|
| 115 |
+
np.stack([2 * x * z - 2 * y * w, 2 * y * z + 2 * x * w,
|
| 116 |
+
1 - 2 * x ** 2 - 2 * y ** 2], axis=1)
|
| 117 |
+
], axis=1)
|
| 118 |
+
|
| 119 |
+
return R
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def rotation_matrix_to_quaternion(R):
|
| 123 |
+
"""
|
| 124 |
+
https://en.wikipedia.org/wiki/Rotation_matrix#Quaternion
|
| 125 |
+
Args:
|
| 126 |
+
R: (n, 3, 3)
|
| 127 |
+
Returns:
|
| 128 |
+
q: (n, 4)
|
| 129 |
+
"""
|
| 130 |
+
|
| 131 |
+
t = R[:, 0, 0] + R[:, 1, 1] + R[:, 2, 2]
|
| 132 |
+
r = np.sqrt(1 + t)
|
| 133 |
+
w = 0.5 * r
|
| 134 |
+
x = np.sign(R[:, 2, 1] - R[:, 1, 2]) * np.abs(
|
| 135 |
+
0.5 * np.sqrt(1 + R[:, 0, 0] - R[:, 1, 1] - R[:, 2, 2]))
|
| 136 |
+
y = np.sign(R[:, 0, 2] - R[:, 2, 0]) * np.abs(
|
| 137 |
+
0.5 * np.sqrt(1 - R[:, 0, 0] + R[:, 1, 1] - R[:, 2, 2]))
|
| 138 |
+
z = np.sign(R[:, 1, 0] - R[:, 0, 1]) * np.abs(
|
| 139 |
+
0.5 * np.sqrt(1 - R[:, 0, 0] - R[:, 1, 1] + R[:, 2, 2]))
|
| 140 |
+
|
| 141 |
+
return np.stack((w, x, y, z), axis=1)
|
img/overview.png
ADDED
|
Git LFS Details
|
inpaint.py
ADDED
|
@@ -0,0 +1,230 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import argparse
|
| 2 |
+
from pathlib import Path
|
| 3 |
+
|
| 4 |
+
import numpy as np
|
| 5 |
+
import torch
|
| 6 |
+
import torch.nn.functional as F
|
| 7 |
+
from Bio.PDB import PDBParser
|
| 8 |
+
from rdkit import Chem
|
| 9 |
+
from torch_scatter import scatter_mean
|
| 10 |
+
from openbabel import openbabel
|
| 11 |
+
openbabel.obErrorLog.StopLogging() # suppress OpenBabel messages
|
| 12 |
+
|
| 13 |
+
import utils
|
| 14 |
+
from lightning_modules import LigandPocketDDPM
|
| 15 |
+
from constants import FLOAT_TYPE, INT_TYPE
|
| 16 |
+
from analysis.molecule_builder import build_molecule, process_molecule
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def prepare_from_sdf_files(sdf_files, atom_encoder):
|
| 20 |
+
|
| 21 |
+
ligand_coords = []
|
| 22 |
+
atom_one_hot = []
|
| 23 |
+
for file in sdf_files:
|
| 24 |
+
rdmol = Chem.SDMolSupplier(str(file), sanitize=False)[0]
|
| 25 |
+
ligand_coords.append(
|
| 26 |
+
torch.from_numpy(rdmol.GetConformer().GetPositions()).float()
|
| 27 |
+
)
|
| 28 |
+
types = torch.tensor([atom_encoder[a.GetSymbol()] for a in rdmol.GetAtoms()])
|
| 29 |
+
atom_one_hot.append(
|
| 30 |
+
F.one_hot(types, num_classes=len(atom_encoder))
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
return torch.cat(ligand_coords, dim=0), torch.cat(atom_one_hot, dim=0)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def prepare_ligand_from_pdb(biopython_atoms, atom_encoder):
|
| 37 |
+
|
| 38 |
+
coord = torch.tensor(np.array([a.get_coord()
|
| 39 |
+
for a in biopython_atoms]), dtype=FLOAT_TYPE)
|
| 40 |
+
types = torch.tensor([atom_encoder[a.element.capitalize()]
|
| 41 |
+
for a in biopython_atoms])
|
| 42 |
+
one_hot = F.one_hot(types, num_classes=len(atom_encoder))
|
| 43 |
+
|
| 44 |
+
return coord, one_hot
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def prepare_substructure(ref_ligand, fix_atoms, pdb_model):
|
| 48 |
+
|
| 49 |
+
if fix_atoms[0].endswith(".sdf"):
|
| 50 |
+
# ligand as sdf file
|
| 51 |
+
coord, one_hot = prepare_from_sdf_files(fix_atoms, model.lig_type_encoder)
|
| 52 |
+
|
| 53 |
+
else:
|
| 54 |
+
# ligand contained in PDB; given in <chain>:<resi> format
|
| 55 |
+
chain, resi = ref_ligand.split(':')
|
| 56 |
+
ligand = utils.get_residue_with_resi(pdb_model[chain], int(resi))
|
| 57 |
+
fixed_atoms = [a for a in ligand.get_atoms() if a.get_name() in set(fix_atoms)]
|
| 58 |
+
coord, one_hot = prepare_ligand_from_pdb(fixed_atoms, model.lig_type_encoder)
|
| 59 |
+
|
| 60 |
+
return coord, one_hot
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def inpaint_ligand(model, pdb_file, n_samples, ligand, fix_atoms,
|
| 64 |
+
add_n_nodes=None, center='ligand', sanitize=False,
|
| 65 |
+
largest_frag=False, relax_iter=0, timesteps=None,
|
| 66 |
+
resamplings=1, save_traj=False):
|
| 67 |
+
"""
|
| 68 |
+
Generate ligands given a pocket
|
| 69 |
+
Args:
|
| 70 |
+
model: Lightning model
|
| 71 |
+
pdb_file: PDB filename
|
| 72 |
+
n_samples: number of samples
|
| 73 |
+
ligand: reference ligand given in <chain>:<resi> format if the ligand is
|
| 74 |
+
contained in the PDB file, or path to an SDF file that
|
| 75 |
+
contains the ligand; used to define the pocket
|
| 76 |
+
fix_atoms: ligand atoms that should be fixed, e.g. "C1 N6 C5 C12"
|
| 77 |
+
center: 'ligand' or 'pocket'
|
| 78 |
+
add_n_nodes: number of ligand nodes to add, sampled randomly if 'None'
|
| 79 |
+
sanitize: whether to sanitize molecules or not
|
| 80 |
+
largest_frag: only return the largest fragment
|
| 81 |
+
relax_iter: number of force field optimization steps
|
| 82 |
+
timesteps: number of denoising steps, use training value if None
|
| 83 |
+
resamplings: number of resampling iterations
|
| 84 |
+
save_traj: save intermediate states to visualize a denoising trajectory
|
| 85 |
+
Returns:
|
| 86 |
+
list of molecules
|
| 87 |
+
"""
|
| 88 |
+
if save_traj and n_samples > 1:
|
| 89 |
+
raise NotImplementedError("Can only visualize trajectory with "
|
| 90 |
+
"n_samples=1.")
|
| 91 |
+
frames = timesteps if save_traj else 1
|
| 92 |
+
sanitize = False if save_traj else sanitize
|
| 93 |
+
relax_iter = 0 if save_traj else relax_iter
|
| 94 |
+
largest_frag = False if save_traj else largest_frag
|
| 95 |
+
|
| 96 |
+
# Load PDB
|
| 97 |
+
pdb_model = PDBParser(QUIET=True).get_structure('', pdb_file)[0]
|
| 98 |
+
|
| 99 |
+
# Define pocket based on reference ligand
|
| 100 |
+
residues = utils.get_pocket_from_ligand(pdb_model, ligand)
|
| 101 |
+
pocket = model.prepare_pocket(residues, repeats=n_samples)
|
| 102 |
+
|
| 103 |
+
# Get fixed ligand substructure
|
| 104 |
+
x_fixed, one_hot_fixed = prepare_substructure(ligand, fix_atoms, pdb_model)
|
| 105 |
+
n_fixed = len(x_fixed)
|
| 106 |
+
|
| 107 |
+
if add_n_nodes is None:
|
| 108 |
+
num_nodes_lig = model.ddpm.size_distribution.sample_conditional(
|
| 109 |
+
n1=None, n2=pocket['size'])
|
| 110 |
+
num_nodes_lig = torch.clamp(num_nodes_lig, min=n_fixed)
|
| 111 |
+
else:
|
| 112 |
+
num_nodes_lig = torch.ones(n_samples, dtype=int) * n_fixed + add_n_nodes
|
| 113 |
+
|
| 114 |
+
ligand_mask = utils.num_nodes_to_batch_mask(
|
| 115 |
+
len(num_nodes_lig), num_nodes_lig, model.device)
|
| 116 |
+
|
| 117 |
+
ligand = {
|
| 118 |
+
'x': torch.zeros((len(ligand_mask), model.x_dims),
|
| 119 |
+
device=model.device, dtype=FLOAT_TYPE),
|
| 120 |
+
'one_hot': torch.zeros((len(ligand_mask), model.atom_nf),
|
| 121 |
+
device=model.device, dtype=FLOAT_TYPE),
|
| 122 |
+
'size': num_nodes_lig,
|
| 123 |
+
'mask': ligand_mask
|
| 124 |
+
}
|
| 125 |
+
|
| 126 |
+
# fill in fixed atoms
|
| 127 |
+
lig_fixed = torch.zeros_like(ligand_mask)
|
| 128 |
+
for i in range(n_samples):
|
| 129 |
+
sele = (ligand_mask == i)
|
| 130 |
+
|
| 131 |
+
x_new = ligand['x'][sele]
|
| 132 |
+
x_new[:n_fixed] = x_fixed
|
| 133 |
+
ligand['x'][sele] = x_new
|
| 134 |
+
|
| 135 |
+
h_new = ligand['one_hot'][sele]
|
| 136 |
+
h_new[:n_fixed] = one_hot_fixed
|
| 137 |
+
ligand['one_hot'][sele] = h_new
|
| 138 |
+
|
| 139 |
+
fixed_new = lig_fixed[sele]
|
| 140 |
+
fixed_new[:n_fixed] = 1
|
| 141 |
+
lig_fixed[sele] = fixed_new
|
| 142 |
+
|
| 143 |
+
# Pocket's center of mass
|
| 144 |
+
pocket_com_before = scatter_mean(pocket['x'], pocket['mask'], dim=0)
|
| 145 |
+
|
| 146 |
+
# Run sampling
|
| 147 |
+
xh_lig, xh_pocket, lig_mask, pocket_mask = model.ddpm.inpaint(
|
| 148 |
+
ligand, pocket, lig_fixed, center=center,
|
| 149 |
+
resamplings=resamplings, timesteps=timesteps, return_frames=frames)
|
| 150 |
+
|
| 151 |
+
# Treat intermediate states as molecules for downstream processing
|
| 152 |
+
if save_traj:
|
| 153 |
+
xh_lig = utils.reverse_tensor(xh_lig)
|
| 154 |
+
xh_pocket = utils.reverse_tensor(xh_pocket)
|
| 155 |
+
|
| 156 |
+
lig_mask = torch.arange(xh_lig.size(0), device=model.device
|
| 157 |
+
).repeat_interleave(len(lig_mask))
|
| 158 |
+
pocket_mask = torch.arange(xh_pocket.size(0), device=model.device
|
| 159 |
+
).repeat_interleave(len(pocket_mask))
|
| 160 |
+
|
| 161 |
+
xh_lig = xh_lig.view(-1, xh_lig.size(2))
|
| 162 |
+
xh_pocket = xh_pocket.view(-1, xh_pocket.size(2))
|
| 163 |
+
|
| 164 |
+
# Move generated molecule back to the original pocket position
|
| 165 |
+
pocket_com_after = scatter_mean(xh_pocket[:, :model.x_dims], pocket_mask, dim=0)
|
| 166 |
+
|
| 167 |
+
xh_pocket[:, :model.x_dims] += \
|
| 168 |
+
(pocket_com_before - pocket_com_after)[pocket_mask]
|
| 169 |
+
xh_lig[:, :model.x_dims] += \
|
| 170 |
+
(pocket_com_before - pocket_com_after)[lig_mask]
|
| 171 |
+
|
| 172 |
+
# Build mol objects
|
| 173 |
+
x = xh_lig[:, :model.x_dims].detach().cpu()
|
| 174 |
+
atom_type = xh_lig[:, model.x_dims:].argmax(1).detach().cpu()
|
| 175 |
+
|
| 176 |
+
molecules = []
|
| 177 |
+
for mol_pc in zip(utils.batch_to_list(x, lig_mask),
|
| 178 |
+
utils.batch_to_list(atom_type, lig_mask)):
|
| 179 |
+
|
| 180 |
+
mol = build_molecule(*mol_pc, model.dataset_info, add_coords=True)
|
| 181 |
+
mol = process_molecule(mol,
|
| 182 |
+
add_hydrogens=False,
|
| 183 |
+
sanitize=sanitize,
|
| 184 |
+
relax_iter=relax_iter,
|
| 185 |
+
largest_frag=largest_frag)
|
| 186 |
+
if mol is not None:
|
| 187 |
+
molecules.append(mol)
|
| 188 |
+
|
| 189 |
+
return molecules
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
if __name__ == "__main__":
|
| 193 |
+
|
| 194 |
+
parser = argparse.ArgumentParser()
|
| 195 |
+
parser.add_argument('checkpoint', type=Path)
|
| 196 |
+
parser.add_argument('--pdbfile', type=str)
|
| 197 |
+
parser.add_argument('--ref_ligand', type=str, default=None)
|
| 198 |
+
parser.add_argument('--fix_atoms', type=str, nargs='+', default=None)
|
| 199 |
+
parser.add_argument('--center', type=str, default='ligand', choices={'ligand', 'pocket'})
|
| 200 |
+
parser.add_argument('--outfile', type=Path)
|
| 201 |
+
parser.add_argument('--n_samples', type=int, default=20)
|
| 202 |
+
parser.add_argument('--add_n_nodes', type=int, default=None)
|
| 203 |
+
parser.add_argument('--relax', action='store_true')
|
| 204 |
+
parser.add_argument('--sanitize', action='store_true')
|
| 205 |
+
parser.add_argument('--resamplings', type=int, default=20)
|
| 206 |
+
parser.add_argument('--timesteps', type=int, default=50)
|
| 207 |
+
parser.add_argument('--save_traj', action='store_true')
|
| 208 |
+
args = parser.parse_args()
|
| 209 |
+
|
| 210 |
+
pdb_id = Path(args.pdbfile).stem
|
| 211 |
+
|
| 212 |
+
device = 'cuda' if torch.cuda.is_available() else 'cpu'
|
| 213 |
+
|
| 214 |
+
# Load model
|
| 215 |
+
model = LigandPocketDDPM.load_from_checkpoint(
|
| 216 |
+
args.checkpoint, map_location=device)
|
| 217 |
+
model = model.to(device)
|
| 218 |
+
|
| 219 |
+
molecules = inpaint_ligand(model, args.pdbfile, args.n_samples,
|
| 220 |
+
args.ref_ligand, args.fix_atoms,
|
| 221 |
+
args.add_n_nodes, center=args.center,
|
| 222 |
+
sanitize=args.sanitize,
|
| 223 |
+
largest_frag=False,
|
| 224 |
+
relax_iter=(200 if args.relax else 0),
|
| 225 |
+
timesteps=args.timesteps,
|
| 226 |
+
resamplings=args.resamplings,
|
| 227 |
+
save_traj=args.save_traj)
|
| 228 |
+
|
| 229 |
+
# Make SDF files
|
| 230 |
+
utils.write_sdf_file(args.outfile, molecules)
|
lightning_modules.py
ADDED
|
@@ -0,0 +1,914 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
from argparse import Namespace
|
| 3 |
+
from typing import Optional
|
| 4 |
+
from time import time
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
|
| 7 |
+
import numpy as np
|
| 8 |
+
import torch
|
| 9 |
+
import torch.nn.functional as F
|
| 10 |
+
from torch.utils.data import DataLoader
|
| 11 |
+
import pytorch_lightning as pl
|
| 12 |
+
import wandb
|
| 13 |
+
from torch_scatter import scatter_add, scatter_mean
|
| 14 |
+
from Bio.PDB import PDBParser
|
| 15 |
+
from Bio.PDB.Polypeptide import three_to_one
|
| 16 |
+
|
| 17 |
+
from constants import dataset_params, FLOAT_TYPE, INT_TYPE
|
| 18 |
+
from equivariant_diffusion.dynamics import EGNNDynamics
|
| 19 |
+
from equivariant_diffusion.en_diffusion import EnVariationalDiffusion
|
| 20 |
+
from equivariant_diffusion.conditional_model import ConditionalDDPM, \
|
| 21 |
+
SimpleConditionalDDPM
|
| 22 |
+
from dataset import ProcessedLigandPocketDataset
|
| 23 |
+
import utils
|
| 24 |
+
from analysis.visualization import save_xyz_file, visualize, visualize_chain
|
| 25 |
+
from analysis.metrics import BasicMolecularMetrics, CategoricalDistribution, \
|
| 26 |
+
MoleculeProperties
|
| 27 |
+
from analysis.molecule_builder import build_molecule, process_molecule
|
| 28 |
+
from analysis.docking import smina_score
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class LigandPocketDDPM(pl.LightningModule):
|
| 32 |
+
def __init__(
|
| 33 |
+
self,
|
| 34 |
+
outdir,
|
| 35 |
+
dataset,
|
| 36 |
+
datadir,
|
| 37 |
+
batch_size,
|
| 38 |
+
lr,
|
| 39 |
+
egnn_params: Namespace,
|
| 40 |
+
diffusion_params,
|
| 41 |
+
num_workers,
|
| 42 |
+
augment_noise,
|
| 43 |
+
augment_rotation,
|
| 44 |
+
clip_grad,
|
| 45 |
+
eval_epochs,
|
| 46 |
+
eval_params,
|
| 47 |
+
visualize_sample_epoch,
|
| 48 |
+
visualize_chain_epoch,
|
| 49 |
+
auxiliary_loss,
|
| 50 |
+
loss_params,
|
| 51 |
+
mode,
|
| 52 |
+
node_histogram,
|
| 53 |
+
pocket_representation='CA',
|
| 54 |
+
virtual_nodes=False
|
| 55 |
+
):
|
| 56 |
+
super(LigandPocketDDPM, self).__init__()
|
| 57 |
+
self.save_hyperparameters()
|
| 58 |
+
|
| 59 |
+
ddpm_models = {'joint': EnVariationalDiffusion,
|
| 60 |
+
'pocket_conditioning': ConditionalDDPM,
|
| 61 |
+
'pocket_conditioning_simple': SimpleConditionalDDPM}
|
| 62 |
+
assert mode in ddpm_models
|
| 63 |
+
self.mode = mode
|
| 64 |
+
assert pocket_representation in {'CA', 'full-atom'}
|
| 65 |
+
self.pocket_representation = pocket_representation
|
| 66 |
+
|
| 67 |
+
self.dataset_name = dataset
|
| 68 |
+
self.datadir = datadir
|
| 69 |
+
self.outdir = outdir
|
| 70 |
+
self.batch_size = batch_size
|
| 71 |
+
self.eval_batch_size = eval_params.eval_batch_size \
|
| 72 |
+
if 'eval_batch_size' in eval_params else batch_size
|
| 73 |
+
self.lr = lr
|
| 74 |
+
self.loss_type = diffusion_params.diffusion_loss_type
|
| 75 |
+
self.eval_epochs = eval_epochs
|
| 76 |
+
self.visualize_sample_epoch = visualize_sample_epoch
|
| 77 |
+
self.visualize_chain_epoch = visualize_chain_epoch
|
| 78 |
+
self.eval_params = eval_params
|
| 79 |
+
self.num_workers = num_workers
|
| 80 |
+
self.augment_noise = augment_noise
|
| 81 |
+
self.augment_rotation = augment_rotation
|
| 82 |
+
self.dataset_info = dataset_params[dataset]
|
| 83 |
+
self.T = diffusion_params.diffusion_steps
|
| 84 |
+
self.clip_grad = clip_grad
|
| 85 |
+
if clip_grad:
|
| 86 |
+
self.gradnorm_queue = utils.Queue()
|
| 87 |
+
# Add large value that will be flushed.
|
| 88 |
+
self.gradnorm_queue.add(3000)
|
| 89 |
+
|
| 90 |
+
self.lig_type_encoder = self.dataset_info['atom_encoder']
|
| 91 |
+
self.lig_type_decoder = self.dataset_info['atom_decoder']
|
| 92 |
+
self.pocket_type_encoder = self.dataset_info['aa_encoder'] \
|
| 93 |
+
if self.pocket_representation == 'CA' \
|
| 94 |
+
else self.dataset_info['atom_encoder']
|
| 95 |
+
self.pocket_type_decoder = self.dataset_info['aa_decoder'] \
|
| 96 |
+
if self.pocket_representation == 'CA' \
|
| 97 |
+
else self.dataset_info['atom_decoder']
|
| 98 |
+
|
| 99 |
+
smiles_list = None if eval_params.smiles_file is None \
|
| 100 |
+
else np.load(eval_params.smiles_file)
|
| 101 |
+
self.ligand_metrics = BasicMolecularMetrics(self.dataset_info,
|
| 102 |
+
smiles_list)
|
| 103 |
+
self.molecule_properties = MoleculeProperties()
|
| 104 |
+
self.ligand_type_distribution = CategoricalDistribution(
|
| 105 |
+
self.dataset_info['atom_hist'], self.lig_type_encoder)
|
| 106 |
+
if self.pocket_representation == 'CA':
|
| 107 |
+
self.pocket_type_distribution = CategoricalDistribution(
|
| 108 |
+
self.dataset_info['aa_hist'], self.pocket_type_encoder)
|
| 109 |
+
else:
|
| 110 |
+
self.pocket_type_distribution = None
|
| 111 |
+
|
| 112 |
+
self.train_dataset = None
|
| 113 |
+
self.val_dataset = None
|
| 114 |
+
self.test_dataset = None
|
| 115 |
+
|
| 116 |
+
self.virtual_nodes = virtual_nodes
|
| 117 |
+
self.data_transform = None
|
| 118 |
+
self.max_num_nodes = len(node_histogram) - 1
|
| 119 |
+
if virtual_nodes:
|
| 120 |
+
# symbol = 'virtual'
|
| 121 |
+
symbol = 'Ne' # visualize as Neon atoms
|
| 122 |
+
self.lig_type_encoder[symbol] = len(self.lig_type_encoder)
|
| 123 |
+
self.virtual_atom = self.lig_type_encoder[symbol]
|
| 124 |
+
self.lig_type_decoder.append(symbol)
|
| 125 |
+
self.data_transform = utils.AppendVirtualNodes(
|
| 126 |
+
self.max_num_nodes, self.lig_type_encoder, symbol)
|
| 127 |
+
|
| 128 |
+
# Update dataset_info dictionary. This is necessary for using the
|
| 129 |
+
# visualization functions.
|
| 130 |
+
self.dataset_info['atom_encoder'] = self.lig_type_encoder
|
| 131 |
+
self.dataset_info['atom_decoder'] = self.lig_type_decoder
|
| 132 |
+
|
| 133 |
+
self.atom_nf = len(self.lig_type_decoder)
|
| 134 |
+
self.aa_nf = len(self.pocket_type_decoder)
|
| 135 |
+
self.x_dims = 3
|
| 136 |
+
|
| 137 |
+
net_dynamics = EGNNDynamics(
|
| 138 |
+
atom_nf=self.atom_nf,
|
| 139 |
+
residue_nf=self.aa_nf,
|
| 140 |
+
n_dims=self.x_dims,
|
| 141 |
+
joint_nf=egnn_params.joint_nf,
|
| 142 |
+
device=egnn_params.device if torch.cuda.is_available() else 'cpu',
|
| 143 |
+
hidden_nf=egnn_params.hidden_nf,
|
| 144 |
+
act_fn=torch.nn.SiLU(),
|
| 145 |
+
n_layers=egnn_params.n_layers,
|
| 146 |
+
attention=egnn_params.attention,
|
| 147 |
+
tanh=egnn_params.tanh,
|
| 148 |
+
norm_constant=egnn_params.norm_constant,
|
| 149 |
+
inv_sublayers=egnn_params.inv_sublayers,
|
| 150 |
+
sin_embedding=egnn_params.sin_embedding,
|
| 151 |
+
normalization_factor=egnn_params.normalization_factor,
|
| 152 |
+
aggregation_method=egnn_params.aggregation_method,
|
| 153 |
+
edge_cutoff_ligand=egnn_params.__dict__.get('edge_cutoff_ligand'),
|
| 154 |
+
edge_cutoff_pocket=egnn_params.__dict__.get('edge_cutoff_pocket'),
|
| 155 |
+
edge_cutoff_interaction=egnn_params.__dict__.get('edge_cutoff_interaction'),
|
| 156 |
+
update_pocket_coords=(self.mode == 'joint'),
|
| 157 |
+
reflection_equivariant=egnn_params.reflection_equivariant,
|
| 158 |
+
edge_embedding_dim=egnn_params.__dict__.get('edge_embedding_dim'),
|
| 159 |
+
)
|
| 160 |
+
|
| 161 |
+
self.ddpm = ddpm_models[self.mode](
|
| 162 |
+
dynamics=net_dynamics,
|
| 163 |
+
atom_nf=self.atom_nf,
|
| 164 |
+
residue_nf=self.aa_nf,
|
| 165 |
+
n_dims=self.x_dims,
|
| 166 |
+
timesteps=diffusion_params.diffusion_steps,
|
| 167 |
+
noise_schedule=diffusion_params.diffusion_noise_schedule,
|
| 168 |
+
noise_precision=diffusion_params.diffusion_noise_precision,
|
| 169 |
+
loss_type=diffusion_params.diffusion_loss_type,
|
| 170 |
+
norm_values=diffusion_params.normalize_factors,
|
| 171 |
+
size_histogram=node_histogram,
|
| 172 |
+
virtual_node_idx=self.lig_type_encoder[symbol] if virtual_nodes else None
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
self.auxiliary_loss = auxiliary_loss
|
| 176 |
+
self.lj_rm = self.dataset_info['lennard_jones_rm']
|
| 177 |
+
if self.auxiliary_loss:
|
| 178 |
+
self.clamp_lj = loss_params.clamp_lj
|
| 179 |
+
self.auxiliary_weight_schedule = WeightSchedule(
|
| 180 |
+
T=diffusion_params.diffusion_steps,
|
| 181 |
+
max_weight=loss_params.max_weight, mode=loss_params.schedule)
|
| 182 |
+
|
| 183 |
+
def configure_optimizers(self):
|
| 184 |
+
return torch.optim.AdamW(self.ddpm.parameters(), lr=self.lr,
|
| 185 |
+
amsgrad=True, weight_decay=1e-12)
|
| 186 |
+
|
| 187 |
+
def setup(self, stage: Optional[str] = None):
|
| 188 |
+
if stage == 'fit':
|
| 189 |
+
self.train_dataset = ProcessedLigandPocketDataset(
|
| 190 |
+
Path(self.datadir, 'train.npz'), transform=self.data_transform)
|
| 191 |
+
self.val_dataset = ProcessedLigandPocketDataset(
|
| 192 |
+
Path(self.datadir, 'val.npz'), transform=self.data_transform)
|
| 193 |
+
elif stage == 'test':
|
| 194 |
+
self.test_dataset = ProcessedLigandPocketDataset(
|
| 195 |
+
Path(self.datadir, 'test.npz'), transform=self.data_transform)
|
| 196 |
+
else:
|
| 197 |
+
raise NotImplementedError
|
| 198 |
+
|
| 199 |
+
def train_dataloader(self):
|
| 200 |
+
return DataLoader(self.train_dataset, self.batch_size, shuffle=True,
|
| 201 |
+
num_workers=self.num_workers,
|
| 202 |
+
collate_fn=self.train_dataset.collate_fn,
|
| 203 |
+
pin_memory=True)
|
| 204 |
+
|
| 205 |
+
def val_dataloader(self):
|
| 206 |
+
return DataLoader(self.val_dataset, self.batch_size, shuffle=False,
|
| 207 |
+
num_workers=self.num_workers,
|
| 208 |
+
collate_fn=self.val_dataset.collate_fn,
|
| 209 |
+
pin_memory=True)
|
| 210 |
+
|
| 211 |
+
def test_dataloader(self):
|
| 212 |
+
return DataLoader(self.test_dataset, self.batch_size, shuffle=False,
|
| 213 |
+
num_workers=self.num_workers,
|
| 214 |
+
collate_fn=self.test_dataset.collate_fn,
|
| 215 |
+
pin_memory=True)
|
| 216 |
+
|
| 217 |
+
def get_ligand_and_pocket(self, data):
|
| 218 |
+
ligand = {
|
| 219 |
+
'x': data['lig_coords'].to(self.device, FLOAT_TYPE),
|
| 220 |
+
'one_hot': data['lig_one_hot'].to(self.device, FLOAT_TYPE),
|
| 221 |
+
'size': data['num_lig_atoms'].to(self.device, INT_TYPE),
|
| 222 |
+
'mask': data['lig_mask'].to(self.device, INT_TYPE),
|
| 223 |
+
}
|
| 224 |
+
if self.virtual_nodes:
|
| 225 |
+
ligand['num_virtual_atoms'] = data['num_virtual_atoms'].to(
|
| 226 |
+
self.device, INT_TYPE)
|
| 227 |
+
|
| 228 |
+
pocket = {
|
| 229 |
+
'x': data['pocket_coords'].to(self.device, FLOAT_TYPE),
|
| 230 |
+
'one_hot': data['pocket_one_hot'].to(self.device, FLOAT_TYPE),
|
| 231 |
+
'size': data['num_pocket_nodes'].to(self.device, INT_TYPE),
|
| 232 |
+
'mask': data['pocket_mask'].to(self.device, INT_TYPE)
|
| 233 |
+
}
|
| 234 |
+
return ligand, pocket
|
| 235 |
+
|
| 236 |
+
def forward(self, data):
|
| 237 |
+
ligand, pocket = self.get_ligand_and_pocket(data)
|
| 238 |
+
|
| 239 |
+
# Note: \mathcal{L} terms in the paper represent log-likelihoods while
|
| 240 |
+
# our loss terms are a negative(!) log-likelihoods
|
| 241 |
+
delta_log_px, error_t_lig, error_t_pocket, SNR_weight, \
|
| 242 |
+
loss_0_x_ligand, loss_0_x_pocket, loss_0_h, neg_log_const_0, \
|
| 243 |
+
kl_prior, log_pN, t_int, xh_lig_hat, info = \
|
| 244 |
+
self.ddpm(ligand, pocket, return_info=True)
|
| 245 |
+
|
| 246 |
+
if self.loss_type == 'l2' and self.training:
|
| 247 |
+
actual_ligand_size = ligand['size'] - ligand['num_virtual_atoms'] if self.virtual_nodes else ligand['size']
|
| 248 |
+
|
| 249 |
+
# normalize loss_t
|
| 250 |
+
denom_lig = self.x_dims * actual_ligand_size + \
|
| 251 |
+
self.ddpm.atom_nf * ligand['size']
|
| 252 |
+
error_t_lig = error_t_lig / denom_lig
|
| 253 |
+
denom_pocket = (self.x_dims + self.ddpm.residue_nf) * pocket['size']
|
| 254 |
+
error_t_pocket = error_t_pocket / denom_pocket
|
| 255 |
+
loss_t = 0.5 * (error_t_lig + error_t_pocket)
|
| 256 |
+
|
| 257 |
+
# normalize loss_0
|
| 258 |
+
loss_0_x_ligand = loss_0_x_ligand / (self.x_dims * actual_ligand_size)
|
| 259 |
+
loss_0_x_pocket = loss_0_x_pocket / (self.x_dims * pocket['size'])
|
| 260 |
+
loss_0 = loss_0_x_ligand + loss_0_x_pocket + loss_0_h
|
| 261 |
+
|
| 262 |
+
# VLB objective or evaluation step
|
| 263 |
+
else:
|
| 264 |
+
# Note: SNR_weight should be negative
|
| 265 |
+
loss_t = -self.T * 0.5 * SNR_weight * (error_t_lig + error_t_pocket)
|
| 266 |
+
loss_0 = loss_0_x_ligand + loss_0_x_pocket + loss_0_h
|
| 267 |
+
loss_0 = loss_0 + neg_log_const_0
|
| 268 |
+
|
| 269 |
+
nll = loss_t + loss_0 + kl_prior
|
| 270 |
+
|
| 271 |
+
# Correct for normalization on x.
|
| 272 |
+
if not (self.loss_type == 'l2' and self.training):
|
| 273 |
+
nll = nll - delta_log_px
|
| 274 |
+
|
| 275 |
+
# always the same number of nodes if virtual nodes are added
|
| 276 |
+
if not self.virtual_nodes:
|
| 277 |
+
# Transform conditional nll into joint nll
|
| 278 |
+
# Note:
|
| 279 |
+
# loss = -log p(x,h|N) and log p(x,h,N) = log p(x,h|N) + log p(N)
|
| 280 |
+
# Therefore, log p(x,h|N) = -loss + log p(N)
|
| 281 |
+
# => loss_new = -log p(x,h,N) = loss - log p(N)
|
| 282 |
+
nll = nll - log_pN
|
| 283 |
+
|
| 284 |
+
# Add auxiliary loss term
|
| 285 |
+
if self.auxiliary_loss and self.loss_type == 'l2' and self.training:
|
| 286 |
+
x_lig_hat = xh_lig_hat[:, :self.x_dims]
|
| 287 |
+
h_lig_hat = xh_lig_hat[:, self.x_dims:]
|
| 288 |
+
weighted_lj_potential = \
|
| 289 |
+
self.auxiliary_weight_schedule(t_int.long()) * \
|
| 290 |
+
self.lj_potential(x_lig_hat, h_lig_hat, ligand['mask'])
|
| 291 |
+
nll = nll + weighted_lj_potential
|
| 292 |
+
info['weighted_lj'] = weighted_lj_potential.mean(0)
|
| 293 |
+
|
| 294 |
+
info['error_t_lig'] = error_t_lig.mean(0)
|
| 295 |
+
info['error_t_pocket'] = error_t_pocket.mean(0)
|
| 296 |
+
info['SNR_weight'] = SNR_weight.mean(0)
|
| 297 |
+
info['loss_0'] = loss_0.mean(0)
|
| 298 |
+
info['kl_prior'] = kl_prior.mean(0)
|
| 299 |
+
info['delta_log_px'] = delta_log_px.mean(0)
|
| 300 |
+
info['neg_log_const_0'] = neg_log_const_0.mean(0)
|
| 301 |
+
info['log_pN'] = log_pN.mean(0)
|
| 302 |
+
return nll, info
|
| 303 |
+
|
| 304 |
+
def lj_potential(self, atom_x, atom_one_hot, batch_mask):
|
| 305 |
+
adj = batch_mask[:, None] == batch_mask[None, :]
|
| 306 |
+
adj = adj ^ torch.diag(torch.diag(adj)) # remove self-edges
|
| 307 |
+
edges = torch.where(adj)
|
| 308 |
+
|
| 309 |
+
# Compute pair-wise potentials
|
| 310 |
+
r = torch.sum((atom_x[edges[0]] - atom_x[edges[1]])**2, dim=1).sqrt()
|
| 311 |
+
|
| 312 |
+
# Get optimal radii
|
| 313 |
+
lennard_jones_radii = torch.tensor(self.lj_rm, device=r.device)
|
| 314 |
+
# unit conversion pm -> A
|
| 315 |
+
lennard_jones_radii = lennard_jones_radii / 100.0
|
| 316 |
+
# normalization
|
| 317 |
+
lennard_jones_radii = lennard_jones_radii / self.ddpm.norm_values[0]
|
| 318 |
+
atom_type_idx = atom_one_hot.argmax(1)
|
| 319 |
+
rm = lennard_jones_radii[atom_type_idx[edges[0]],
|
| 320 |
+
atom_type_idx[edges[1]]]
|
| 321 |
+
sigma = 2 ** (-1 / 6) * rm
|
| 322 |
+
out = 4 * ((sigma / r) ** 12 - (sigma / r) ** 6)
|
| 323 |
+
|
| 324 |
+
if self.clamp_lj is not None:
|
| 325 |
+
out = torch.clamp(out, min=None, max=self.clamp_lj)
|
| 326 |
+
|
| 327 |
+
# Compute potential per atom
|
| 328 |
+
out = scatter_add(out, edges[0], dim=0, dim_size=len(atom_x))
|
| 329 |
+
|
| 330 |
+
# Sum potentials of all atoms
|
| 331 |
+
return scatter_add(out, batch_mask, dim=0)
|
| 332 |
+
|
| 333 |
+
def log_metrics(self, metrics_dict, split, batch_size=None, **kwargs):
|
| 334 |
+
for m, value in metrics_dict.items():
|
| 335 |
+
self.log(f'{m}/{split}', value, batch_size=batch_size, **kwargs)
|
| 336 |
+
|
| 337 |
+
def training_step(self, data, *args):
|
| 338 |
+
if self.augment_noise > 0:
|
| 339 |
+
raise NotImplementedError
|
| 340 |
+
# Add noise eps ~ N(0, augment_noise) around points.
|
| 341 |
+
eps = sample_center_gravity_zero_gaussian(x.size(), x.device)
|
| 342 |
+
x = x + eps * args.augment_noise
|
| 343 |
+
|
| 344 |
+
if self.augment_rotation:
|
| 345 |
+
raise NotImplementedError
|
| 346 |
+
x = utils.random_rotation(x).detach()
|
| 347 |
+
|
| 348 |
+
try:
|
| 349 |
+
nll, info = self.forward(data)
|
| 350 |
+
except RuntimeError as e:
|
| 351 |
+
# this is not supported for multi-GPU
|
| 352 |
+
if self.trainer.num_devices < 2 and 'out of memory' in str(e):
|
| 353 |
+
print('WARNING: ran out of memory, skipping to the next batch')
|
| 354 |
+
return None
|
| 355 |
+
else:
|
| 356 |
+
raise e
|
| 357 |
+
|
| 358 |
+
loss = nll.mean(0)
|
| 359 |
+
|
| 360 |
+
info['loss'] = loss
|
| 361 |
+
self.log_metrics(info, 'train', batch_size=len(data['num_lig_atoms']))
|
| 362 |
+
|
| 363 |
+
return info
|
| 364 |
+
|
| 365 |
+
def _shared_eval(self, data, prefix, *args):
|
| 366 |
+
nll, info = self.forward(data)
|
| 367 |
+
loss = nll.mean(0)
|
| 368 |
+
|
| 369 |
+
info['loss'] = loss
|
| 370 |
+
|
| 371 |
+
self.log_metrics(info, prefix, batch_size=len(data['num_lig_atoms']),
|
| 372 |
+
sync_dist=True)
|
| 373 |
+
|
| 374 |
+
return info
|
| 375 |
+
|
| 376 |
+
def validation_step(self, data, *args):
|
| 377 |
+
self._shared_eval(data, 'val', *args)
|
| 378 |
+
|
| 379 |
+
def test_step(self, data, *args):
|
| 380 |
+
self._shared_eval(data, 'test', *args)
|
| 381 |
+
|
| 382 |
+
def validation_epoch_end(self, validation_step_outputs):
|
| 383 |
+
|
| 384 |
+
# Perform validation on single GPU
|
| 385 |
+
if not self.trainer.is_global_zero:
|
| 386 |
+
return
|
| 387 |
+
|
| 388 |
+
suffix = '' if self.mode == 'joint' else '_given_pocket'
|
| 389 |
+
|
| 390 |
+
if (self.current_epoch + 1) % self.eval_epochs == 0:
|
| 391 |
+
tic = time()
|
| 392 |
+
|
| 393 |
+
sampling_results = getattr(self, 'sample_and_analyze' + suffix)(
|
| 394 |
+
self.eval_params.n_eval_samples, self.val_dataset,
|
| 395 |
+
batch_size=self.eval_batch_size)
|
| 396 |
+
self.log_metrics(sampling_results, 'val')
|
| 397 |
+
|
| 398 |
+
print(f'Evaluation took {time() - tic:.2f} seconds')
|
| 399 |
+
|
| 400 |
+
if (self.current_epoch + 1) % self.visualize_sample_epoch == 0:
|
| 401 |
+
tic = time()
|
| 402 |
+
getattr(self, 'sample_and_save' + suffix)(
|
| 403 |
+
self.eval_params.n_visualize_samples)
|
| 404 |
+
print(f'Sample visualization took {time() - tic:.2f} seconds')
|
| 405 |
+
|
| 406 |
+
if (self.current_epoch + 1) % self.visualize_chain_epoch == 0:
|
| 407 |
+
tic = time()
|
| 408 |
+
getattr(self, 'sample_chain_and_save' + suffix)(
|
| 409 |
+
self.eval_params.keep_frames)
|
| 410 |
+
print(f'Chain visualization took {time() - tic:.2f} seconds')
|
| 411 |
+
|
| 412 |
+
@torch.no_grad()
|
| 413 |
+
def sample_and_analyze(self, n_samples, dataset=None, batch_size=None):
|
| 414 |
+
print(f'Analyzing sampled molecules at epoch {self.current_epoch}...')
|
| 415 |
+
|
| 416 |
+
batch_size = self.batch_size if batch_size is None else batch_size
|
| 417 |
+
batch_size = min(batch_size, n_samples)
|
| 418 |
+
|
| 419 |
+
# each item in molecules is a tuple (position, atom_type_encoded)
|
| 420 |
+
molecules = []
|
| 421 |
+
atom_types = []
|
| 422 |
+
aa_types = []
|
| 423 |
+
for i in range(math.ceil(n_samples / batch_size)):
|
| 424 |
+
|
| 425 |
+
n_samples_batch = min(batch_size, n_samples - len(molecules))
|
| 426 |
+
|
| 427 |
+
num_nodes_lig, num_nodes_pocket = \
|
| 428 |
+
self.ddpm.size_distribution.sample(n_samples_batch)
|
| 429 |
+
|
| 430 |
+
xh_lig, xh_pocket, lig_mask, _ = self.ddpm.sample(
|
| 431 |
+
n_samples_batch, num_nodes_lig, num_nodes_pocket,
|
| 432 |
+
device=self.device)
|
| 433 |
+
|
| 434 |
+
x = xh_lig[:, :self.x_dims].detach().cpu()
|
| 435 |
+
atom_type = xh_lig[:, self.x_dims:].argmax(1).detach().cpu()
|
| 436 |
+
lig_mask = lig_mask.cpu()
|
| 437 |
+
|
| 438 |
+
molecules.extend(list(
|
| 439 |
+
zip(utils.batch_to_list(x, lig_mask),
|
| 440 |
+
utils.batch_to_list(atom_type, lig_mask))
|
| 441 |
+
))
|
| 442 |
+
|
| 443 |
+
atom_types.extend(atom_type.tolist())
|
| 444 |
+
aa_types.extend(
|
| 445 |
+
xh_pocket[:, self.x_dims:].argmax(1).detach().cpu().tolist())
|
| 446 |
+
|
| 447 |
+
return self.analyze_sample(molecules, atom_types, aa_types)
|
| 448 |
+
|
| 449 |
+
def analyze_sample(self, molecules, atom_types, aa_types, receptors=None):
|
| 450 |
+
# Distribution of node types
|
| 451 |
+
kl_div_atom = self.ligand_type_distribution.kl_divergence(atom_types) \
|
| 452 |
+
if self.ligand_type_distribution is not None else -1
|
| 453 |
+
kl_div_aa = self.pocket_type_distribution.kl_divergence(aa_types) \
|
| 454 |
+
if self.pocket_type_distribution is not None else -1
|
| 455 |
+
|
| 456 |
+
# Convert into rdmols
|
| 457 |
+
rdmols = [build_molecule(*graph, self.dataset_info) for graph in molecules]
|
| 458 |
+
|
| 459 |
+
# Other basic metrics
|
| 460 |
+
(validity, connectivity, uniqueness, novelty), (_, connected_mols) = \
|
| 461 |
+
self.ligand_metrics.evaluate_rdmols(rdmols)
|
| 462 |
+
|
| 463 |
+
qed, sa, logp, lipinski, diversity = \
|
| 464 |
+
self.molecule_properties.evaluate_mean(connected_mols)
|
| 465 |
+
|
| 466 |
+
out = {
|
| 467 |
+
'kl_div_atom_types': kl_div_atom,
|
| 468 |
+
'kl_div_residue_types': kl_div_aa,
|
| 469 |
+
'Validity': validity,
|
| 470 |
+
'Connectivity': connectivity,
|
| 471 |
+
'Uniqueness': uniqueness,
|
| 472 |
+
'Novelty': novelty,
|
| 473 |
+
'QED': qed,
|
| 474 |
+
'SA': sa,
|
| 475 |
+
'LogP': logp,
|
| 476 |
+
'Lipinski': lipinski,
|
| 477 |
+
'Diversity': diversity
|
| 478 |
+
}
|
| 479 |
+
|
| 480 |
+
# Simple docking score
|
| 481 |
+
if receptors is not None:
|
| 482 |
+
# out['smina_score'] = np.mean(smina_score(rdmols, receptors))
|
| 483 |
+
out['smina_score'] = np.mean(smina_score(connected_mols, receptors))
|
| 484 |
+
|
| 485 |
+
return out
|
| 486 |
+
|
| 487 |
+
def get_full_path(self, receptor_name):
|
| 488 |
+
pdb, suffix = receptor_name.split('.')
|
| 489 |
+
receptor_name = f'{pdb.upper()}-{suffix}.pdb'
|
| 490 |
+
return Path(self.datadir, 'val', receptor_name)
|
| 491 |
+
|
| 492 |
+
@torch.no_grad()
|
| 493 |
+
def sample_and_analyze_given_pocket(self, n_samples, dataset=None,
|
| 494 |
+
batch_size=None):
|
| 495 |
+
print(f'Analyzing sampled molecules given pockets at epoch '
|
| 496 |
+
f'{self.current_epoch}...')
|
| 497 |
+
|
| 498 |
+
batch_size = self.batch_size if batch_size is None else batch_size
|
| 499 |
+
batch_size = min(batch_size, n_samples)
|
| 500 |
+
|
| 501 |
+
# each item in molecules is a tuple (position, atom_type_encoded)
|
| 502 |
+
molecules = []
|
| 503 |
+
atom_types = []
|
| 504 |
+
aa_types = []
|
| 505 |
+
receptors = []
|
| 506 |
+
for i in range(math.ceil(n_samples / batch_size)):
|
| 507 |
+
|
| 508 |
+
n_samples_batch = min(batch_size, n_samples - len(molecules))
|
| 509 |
+
|
| 510 |
+
# Create a batch
|
| 511 |
+
batch = dataset.collate_fn(
|
| 512 |
+
[dataset[(i * batch_size + j) % len(dataset)]
|
| 513 |
+
for j in range(n_samples_batch)]
|
| 514 |
+
)
|
| 515 |
+
|
| 516 |
+
ligand, pocket = self.get_ligand_and_pocket(batch)
|
| 517 |
+
receptors.extend([self.get_full_path(x) for x in batch['receptors']])
|
| 518 |
+
|
| 519 |
+
if self.virtual_nodes:
|
| 520 |
+
num_nodes_lig = self.max_num_nodes
|
| 521 |
+
else:
|
| 522 |
+
num_nodes_lig = self.ddpm.size_distribution.sample_conditional(
|
| 523 |
+
n1=None, n2=pocket['size'])
|
| 524 |
+
|
| 525 |
+
xh_lig, xh_pocket, lig_mask, _ = self.ddpm.sample_given_pocket(
|
| 526 |
+
pocket, num_nodes_lig)
|
| 527 |
+
|
| 528 |
+
x = xh_lig[:, :self.x_dims].detach().cpu()
|
| 529 |
+
atom_type = xh_lig[:, self.x_dims:].argmax(1).detach().cpu()
|
| 530 |
+
lig_mask = lig_mask.cpu()
|
| 531 |
+
|
| 532 |
+
if self.virtual_nodes:
|
| 533 |
+
# Remove virtual nodes for analysis
|
| 534 |
+
vnode_mask = (atom_type == self.virtual_atom)
|
| 535 |
+
x = x[~vnode_mask, :]
|
| 536 |
+
atom_type = atom_type[~vnode_mask]
|
| 537 |
+
lig_mask = lig_mask[~vnode_mask]
|
| 538 |
+
|
| 539 |
+
molecules.extend(list(
|
| 540 |
+
zip(utils.batch_to_list(x, lig_mask),
|
| 541 |
+
utils.batch_to_list(atom_type, lig_mask))
|
| 542 |
+
))
|
| 543 |
+
|
| 544 |
+
atom_types.extend(atom_type.tolist())
|
| 545 |
+
aa_types.extend(
|
| 546 |
+
xh_pocket[:, self.x_dims:].argmax(1).detach().cpu().tolist())
|
| 547 |
+
|
| 548 |
+
return self.analyze_sample(molecules, atom_types, aa_types,
|
| 549 |
+
receptors=receptors)
|
| 550 |
+
|
| 551 |
+
def sample_and_save(self, n_samples):
|
| 552 |
+
num_nodes_lig, num_nodes_pocket = \
|
| 553 |
+
self.ddpm.size_distribution.sample(n_samples)
|
| 554 |
+
|
| 555 |
+
xh_lig, xh_pocket, lig_mask, pocket_mask = \
|
| 556 |
+
self.ddpm.sample(n_samples, num_nodes_lig, num_nodes_pocket,
|
| 557 |
+
device=self.device)
|
| 558 |
+
|
| 559 |
+
if self.pocket_representation == 'CA':
|
| 560 |
+
# convert residues into atom representation for visualization
|
| 561 |
+
x_pocket, one_hot_pocket = utils.residues_to_atoms(
|
| 562 |
+
xh_pocket[:, :self.x_dims], self.lig_type_encoder)
|
| 563 |
+
else:
|
| 564 |
+
x_pocket, one_hot_pocket = \
|
| 565 |
+
xh_pocket[:, :self.x_dims], xh_pocket[:, self.x_dims:]
|
| 566 |
+
x = torch.cat((xh_lig[:, :self.x_dims], x_pocket), dim=0)
|
| 567 |
+
one_hot = torch.cat((xh_lig[:, self.x_dims:], one_hot_pocket), dim=0)
|
| 568 |
+
|
| 569 |
+
outdir = Path(self.outdir, f'epoch_{self.current_epoch}')
|
| 570 |
+
save_xyz_file(str(outdir) + '/', one_hot, x, self.lig_type_decoder,
|
| 571 |
+
name='molecule',
|
| 572 |
+
batch_mask=torch.cat((lig_mask, pocket_mask)))
|
| 573 |
+
# visualize(str(outdir), dataset_info=self.dataset_info, wandb=wandb)
|
| 574 |
+
visualize(str(outdir), dataset_info=self.dataset_info, wandb=None)
|
| 575 |
+
|
| 576 |
+
def sample_and_save_given_pocket(self, n_samples):
|
| 577 |
+
batch = self.val_dataset.collate_fn(
|
| 578 |
+
[self.val_dataset[i] for i in torch.randint(len(self.val_dataset),
|
| 579 |
+
size=(n_samples,))]
|
| 580 |
+
)
|
| 581 |
+
ligand, pocket = self.get_ligand_and_pocket(batch)
|
| 582 |
+
|
| 583 |
+
if self.virtual_nodes:
|
| 584 |
+
num_nodes_lig = self.max_num_nodes
|
| 585 |
+
else:
|
| 586 |
+
num_nodes_lig = self.ddpm.size_distribution.sample_conditional(
|
| 587 |
+
n1=None, n2=pocket['size'])
|
| 588 |
+
|
| 589 |
+
xh_lig, xh_pocket, lig_mask, pocket_mask = \
|
| 590 |
+
self.ddpm.sample_given_pocket(pocket, num_nodes_lig)
|
| 591 |
+
|
| 592 |
+
if self.pocket_representation == 'CA':
|
| 593 |
+
# convert residues into atom representation for visualization
|
| 594 |
+
x_pocket, one_hot_pocket = utils.residues_to_atoms(
|
| 595 |
+
xh_pocket[:, :self.x_dims], self.lig_type_encoder)
|
| 596 |
+
else:
|
| 597 |
+
x_pocket, one_hot_pocket = \
|
| 598 |
+
xh_pocket[:, :self.x_dims], xh_pocket[:, self.x_dims:]
|
| 599 |
+
x = torch.cat((xh_lig[:, :self.x_dims], x_pocket), dim=0)
|
| 600 |
+
one_hot = torch.cat((xh_lig[:, self.x_dims:], one_hot_pocket), dim=0)
|
| 601 |
+
|
| 602 |
+
outdir = Path(self.outdir, f'epoch_{self.current_epoch}')
|
| 603 |
+
save_xyz_file(str(outdir) + '/', one_hot, x, self.lig_type_decoder,
|
| 604 |
+
name='molecule',
|
| 605 |
+
batch_mask=torch.cat((lig_mask, pocket_mask)))
|
| 606 |
+
# visualize(str(outdir), dataset_info=self.dataset_info, wandb=wandb)
|
| 607 |
+
visualize(str(outdir), dataset_info=self.dataset_info, wandb=None)
|
| 608 |
+
|
| 609 |
+
def sample_chain_and_save(self, keep_frames):
|
| 610 |
+
n_samples = 1
|
| 611 |
+
|
| 612 |
+
num_nodes_lig, num_nodes_pocket = \
|
| 613 |
+
self.ddpm.size_distribution.sample(n_samples)
|
| 614 |
+
|
| 615 |
+
chain_lig, chain_pocket, _, _ = self.ddpm.sample(
|
| 616 |
+
n_samples, num_nodes_lig, num_nodes_pocket,
|
| 617 |
+
return_frames=keep_frames, device=self.device)
|
| 618 |
+
|
| 619 |
+
chain_lig = utils.reverse_tensor(chain_lig)
|
| 620 |
+
chain_pocket = utils.reverse_tensor(chain_pocket)
|
| 621 |
+
|
| 622 |
+
# Repeat last frame to see final sample better.
|
| 623 |
+
chain_lig = torch.cat([chain_lig, chain_lig[-1:].repeat(10, 1, 1)],
|
| 624 |
+
dim=0)
|
| 625 |
+
chain_pocket = torch.cat(
|
| 626 |
+
[chain_pocket, chain_pocket[-1:].repeat(10, 1, 1)], dim=0)
|
| 627 |
+
|
| 628 |
+
# Prepare entire chain.
|
| 629 |
+
x_lig = chain_lig[:, :, :self.x_dims]
|
| 630 |
+
one_hot_lig = chain_lig[:, :, self.x_dims:]
|
| 631 |
+
one_hot_lig = F.one_hot(
|
| 632 |
+
torch.argmax(one_hot_lig, dim=2),
|
| 633 |
+
num_classes=len(self.lig_type_decoder))
|
| 634 |
+
x_pocket = chain_pocket[:, :, :self.x_dims]
|
| 635 |
+
one_hot_pocket = chain_pocket[:, :, self.x_dims:]
|
| 636 |
+
one_hot_pocket = F.one_hot(
|
| 637 |
+
torch.argmax(one_hot_pocket, dim=2),
|
| 638 |
+
num_classes=len(self.pocket_type_decoder))
|
| 639 |
+
|
| 640 |
+
if self.pocket_representation == 'CA':
|
| 641 |
+
# convert residues into atom representation for visualization
|
| 642 |
+
x_pocket, one_hot_pocket = utils.residues_to_atoms(
|
| 643 |
+
x_pocket, self.lig_type_encoder)
|
| 644 |
+
|
| 645 |
+
x = torch.cat((x_lig, x_pocket), dim=1)
|
| 646 |
+
one_hot = torch.cat((one_hot_lig, one_hot_pocket), dim=1)
|
| 647 |
+
|
| 648 |
+
# flatten (treat frame (chain dimension) as batch for visualization)
|
| 649 |
+
x_flat = x.view(-1, x.size(-1))
|
| 650 |
+
one_hot_flat = one_hot.view(-1, one_hot.size(-1))
|
| 651 |
+
mask_flat = torch.arange(x.size(0)).repeat_interleave(x.size(1))
|
| 652 |
+
|
| 653 |
+
outdir = Path(self.outdir, f'epoch_{self.current_epoch}', 'chain')
|
| 654 |
+
save_xyz_file(str(outdir), one_hot_flat, x_flat, self.lig_type_decoder,
|
| 655 |
+
name='/chain', batch_mask=mask_flat)
|
| 656 |
+
visualize_chain(str(outdir), self.dataset_info, wandb=wandb)
|
| 657 |
+
|
| 658 |
+
def sample_chain_and_save_given_pocket(self, keep_frames):
|
| 659 |
+
n_samples = 1
|
| 660 |
+
|
| 661 |
+
batch = self.val_dataset.collate_fn([
|
| 662 |
+
self.val_dataset[torch.randint(len(self.val_dataset), size=(1,))]
|
| 663 |
+
])
|
| 664 |
+
ligand, pocket = self.get_ligand_and_pocket(batch)
|
| 665 |
+
|
| 666 |
+
if self.virtual_nodes:
|
| 667 |
+
num_nodes_lig = self.max_num_nodes
|
| 668 |
+
else:
|
| 669 |
+
num_nodes_lig = self.ddpm.size_distribution.sample_conditional(
|
| 670 |
+
n1=None, n2=pocket['size'])
|
| 671 |
+
|
| 672 |
+
chain_lig, chain_pocket, _, _ = self.ddpm.sample_given_pocket(
|
| 673 |
+
pocket, num_nodes_lig, return_frames=keep_frames)
|
| 674 |
+
|
| 675 |
+
chain_lig = utils.reverse_tensor(chain_lig)
|
| 676 |
+
chain_pocket = utils.reverse_tensor(chain_pocket)
|
| 677 |
+
|
| 678 |
+
# Repeat last frame to see final sample better.
|
| 679 |
+
chain_lig = torch.cat([chain_lig, chain_lig[-1:].repeat(10, 1, 1)],
|
| 680 |
+
dim=0)
|
| 681 |
+
chain_pocket = torch.cat(
|
| 682 |
+
[chain_pocket, chain_pocket[-1:].repeat(10, 1, 1)], dim=0)
|
| 683 |
+
|
| 684 |
+
# Prepare entire chain.
|
| 685 |
+
x_lig = chain_lig[:, :, :self.x_dims]
|
| 686 |
+
one_hot_lig = chain_lig[:, :, self.x_dims:]
|
| 687 |
+
one_hot_lig = F.one_hot(
|
| 688 |
+
torch.argmax(one_hot_lig, dim=2),
|
| 689 |
+
num_classes=len(self.lig_type_decoder))
|
| 690 |
+
x_pocket = chain_pocket[:, :, :3]
|
| 691 |
+
one_hot_pocket = chain_pocket[:, :, 3:]
|
| 692 |
+
one_hot_pocket = F.one_hot(
|
| 693 |
+
torch.argmax(one_hot_pocket, dim=2),
|
| 694 |
+
num_classes=len(self.pocket_type_decoder))
|
| 695 |
+
|
| 696 |
+
if self.pocket_representation == 'CA':
|
| 697 |
+
# convert residues into atom representation for visualization
|
| 698 |
+
x_pocket, one_hot_pocket = utils.residues_to_atoms(
|
| 699 |
+
x_pocket, self.lig_type_encoder)
|
| 700 |
+
|
| 701 |
+
x = torch.cat((x_lig, x_pocket), dim=1)
|
| 702 |
+
one_hot = torch.cat((one_hot_lig, one_hot_pocket), dim=1)
|
| 703 |
+
|
| 704 |
+
# flatten (treat frame (chain dimension) as batch for visualization)
|
| 705 |
+
x_flat = x.view(-1, x.size(-1))
|
| 706 |
+
one_hot_flat = one_hot.view(-1, one_hot.size(-1))
|
| 707 |
+
mask_flat = torch.arange(x.size(0)).repeat_interleave(x.size(1))
|
| 708 |
+
|
| 709 |
+
outdir = Path(self.outdir, f'epoch_{self.current_epoch}', 'chain')
|
| 710 |
+
save_xyz_file(str(outdir), one_hot_flat, x_flat, self.lig_type_decoder,
|
| 711 |
+
name='/chain', batch_mask=mask_flat)
|
| 712 |
+
visualize_chain(str(outdir), self.dataset_info, wandb=wandb)
|
| 713 |
+
|
| 714 |
+
def prepare_pocket(self, biopython_residues, repeats=1):
|
| 715 |
+
|
| 716 |
+
if self.pocket_representation == 'CA':
|
| 717 |
+
pocket_coord = torch.tensor(np.array(
|
| 718 |
+
[res['CA'].get_coord() for res in biopython_residues]),
|
| 719 |
+
device=self.device, dtype=FLOAT_TYPE)
|
| 720 |
+
pocket_types = torch.tensor(
|
| 721 |
+
[self.pocket_type_encoder[three_to_one(res.get_resname())]
|
| 722 |
+
for res in biopython_residues], device=self.device)
|
| 723 |
+
else:
|
| 724 |
+
pocket_atoms = [a for res in biopython_residues
|
| 725 |
+
for a in res.get_atoms()
|
| 726 |
+
if (a.element.capitalize() in self.pocket_type_encoder or a.element != 'H')]
|
| 727 |
+
pocket_coord = torch.tensor(np.array(
|
| 728 |
+
[a.get_coord() for a in pocket_atoms]),
|
| 729 |
+
device=self.device, dtype=FLOAT_TYPE)
|
| 730 |
+
pocket_types = torch.tensor(
|
| 731 |
+
[self.pocket_type_encoder[a.element.capitalize()]
|
| 732 |
+
for a in pocket_atoms], device=self.device)
|
| 733 |
+
|
| 734 |
+
pocket_one_hot = F.one_hot(
|
| 735 |
+
pocket_types, num_classes=len(self.pocket_type_encoder)
|
| 736 |
+
)
|
| 737 |
+
|
| 738 |
+
pocket_size = torch.tensor([len(pocket_coord)] * repeats,
|
| 739 |
+
device=self.device, dtype=INT_TYPE)
|
| 740 |
+
pocket_mask = torch.repeat_interleave(
|
| 741 |
+
torch.arange(repeats, device=self.device, dtype=INT_TYPE),
|
| 742 |
+
len(pocket_coord)
|
| 743 |
+
)
|
| 744 |
+
|
| 745 |
+
pocket = {
|
| 746 |
+
'x': pocket_coord.repeat(repeats, 1),
|
| 747 |
+
'one_hot': pocket_one_hot.repeat(repeats, 1),
|
| 748 |
+
'size': pocket_size,
|
| 749 |
+
'mask': pocket_mask
|
| 750 |
+
}
|
| 751 |
+
|
| 752 |
+
return pocket
|
| 753 |
+
|
| 754 |
+
def generate_ligands(self, pdb_file, n_samples, pocket_ids=None,
|
| 755 |
+
ref_ligand=None, num_nodes_lig=None, sanitize=False,
|
| 756 |
+
largest_frag=False, relax_iter=0, timesteps=None,
|
| 757 |
+
n_nodes_bias=0, n_nodes_min=0, **kwargs):
|
| 758 |
+
"""
|
| 759 |
+
Generate ligands given a pocket
|
| 760 |
+
Args:
|
| 761 |
+
pdb_file: PDB filename
|
| 762 |
+
n_samples: number of samples
|
| 763 |
+
pocket_ids: list of pocket residues in <chain>:<resi> format
|
| 764 |
+
ref_ligand: alternative way of defining the pocket based on a
|
| 765 |
+
reference ligand given in <chain>:<resi> format if the ligand is
|
| 766 |
+
contained in the PDB file, or path to an SDF file that
|
| 767 |
+
contains the ligand
|
| 768 |
+
num_nodes_lig: number of ligand nodes for each sample (list of
|
| 769 |
+
integers), sampled randomly if 'None'
|
| 770 |
+
sanitize: whether to sanitize molecules or not
|
| 771 |
+
largest_frag: only return the largest fragment
|
| 772 |
+
relax_iter: number of force field optimization steps
|
| 773 |
+
timesteps: number of denoising steps, use training value if None
|
| 774 |
+
n_nodes_bias: added to the sampled (or provided) number of nodes
|
| 775 |
+
n_nodes_min: lower bound on the number of sampled nodes
|
| 776 |
+
kwargs: additional inpainting parameters
|
| 777 |
+
Returns:
|
| 778 |
+
list of molecules
|
| 779 |
+
"""
|
| 780 |
+
|
| 781 |
+
assert (pocket_ids is None) ^ (ref_ligand is None)
|
| 782 |
+
|
| 783 |
+
self.ddpm.eval()
|
| 784 |
+
|
| 785 |
+
# Load PDB
|
| 786 |
+
pdb_struct = PDBParser(QUIET=True).get_structure('', pdb_file)[0]
|
| 787 |
+
if pocket_ids is not None:
|
| 788 |
+
# define pocket with list of residues
|
| 789 |
+
residues = [
|
| 790 |
+
pdb_struct[x.split(':')[0]][(' ', int(x.split(':')[1]), ' ')]
|
| 791 |
+
for x in pocket_ids]
|
| 792 |
+
|
| 793 |
+
else:
|
| 794 |
+
# define pocket with reference ligand
|
| 795 |
+
residues = utils.get_pocket_from_ligand(pdb_struct, ref_ligand)
|
| 796 |
+
|
| 797 |
+
pocket = self.prepare_pocket(residues, repeats=n_samples)
|
| 798 |
+
|
| 799 |
+
# Pocket's center of mass
|
| 800 |
+
pocket_com_before = scatter_mean(pocket['x'], pocket['mask'], dim=0)
|
| 801 |
+
|
| 802 |
+
# Create dummy ligands
|
| 803 |
+
if num_nodes_lig is None:
|
| 804 |
+
num_nodes_lig = self.ddpm.size_distribution.sample_conditional(
|
| 805 |
+
n1=None, n2=pocket['size'])
|
| 806 |
+
|
| 807 |
+
# Add bias
|
| 808 |
+
num_nodes_lig = num_nodes_lig + n_nodes_bias
|
| 809 |
+
|
| 810 |
+
# Apply minimum ligand size
|
| 811 |
+
num_nodes_lig = torch.clamp(num_nodes_lig, min=n_nodes_min)
|
| 812 |
+
|
| 813 |
+
# Use inpainting
|
| 814 |
+
if type(self.ddpm) == EnVariationalDiffusion:
|
| 815 |
+
lig_mask = utils.num_nodes_to_batch_mask(
|
| 816 |
+
len(num_nodes_lig), num_nodes_lig, self.device)
|
| 817 |
+
|
| 818 |
+
ligand = {
|
| 819 |
+
'x': torch.zeros((len(lig_mask), self.x_dims),
|
| 820 |
+
device=self.device, dtype=FLOAT_TYPE),
|
| 821 |
+
'one_hot': torch.zeros((len(lig_mask), self.atom_nf),
|
| 822 |
+
device=self.device, dtype=FLOAT_TYPE),
|
| 823 |
+
'size': num_nodes_lig,
|
| 824 |
+
'mask': lig_mask
|
| 825 |
+
}
|
| 826 |
+
|
| 827 |
+
# Fix all pocket nodes but sample
|
| 828 |
+
lig_mask_fixed = torch.zeros(len(lig_mask), device=self.device)
|
| 829 |
+
pocket_mask_fixed = torch.ones(len(pocket['mask']),
|
| 830 |
+
device=self.device)
|
| 831 |
+
|
| 832 |
+
xh_lig, xh_pocket, lig_mask, pocket_mask = self.ddpm.inpaint(
|
| 833 |
+
ligand, pocket, lig_mask_fixed, pocket_mask_fixed,
|
| 834 |
+
timesteps=timesteps, **kwargs)
|
| 835 |
+
|
| 836 |
+
# Use conditional generation
|
| 837 |
+
elif type(self.ddpm) == ConditionalDDPM:
|
| 838 |
+
xh_lig, xh_pocket, lig_mask, pocket_mask = \
|
| 839 |
+
self.ddpm.sample_given_pocket(pocket, num_nodes_lig,
|
| 840 |
+
timesteps=timesteps)
|
| 841 |
+
|
| 842 |
+
else:
|
| 843 |
+
raise NotImplementedError
|
| 844 |
+
|
| 845 |
+
# Move generated molecule back to the original pocket position
|
| 846 |
+
pocket_com_after = scatter_mean(
|
| 847 |
+
xh_pocket[:, :self.x_dims], pocket_mask, dim=0)
|
| 848 |
+
|
| 849 |
+
xh_pocket[:, :self.x_dims] += \
|
| 850 |
+
(pocket_com_before - pocket_com_after)[pocket_mask]
|
| 851 |
+
xh_lig[:, :self.x_dims] += \
|
| 852 |
+
(pocket_com_before - pocket_com_after)[lig_mask]
|
| 853 |
+
|
| 854 |
+
# Build mol objects
|
| 855 |
+
x = xh_lig[:, :self.x_dims].detach().cpu()
|
| 856 |
+
atom_type = xh_lig[:, self.x_dims:].argmax(1).detach().cpu()
|
| 857 |
+
lig_mask = lig_mask.cpu()
|
| 858 |
+
|
| 859 |
+
molecules = []
|
| 860 |
+
for mol_pc in zip(utils.batch_to_list(x, lig_mask),
|
| 861 |
+
utils.batch_to_list(atom_type, lig_mask)):
|
| 862 |
+
|
| 863 |
+
mol = build_molecule(*mol_pc, self.dataset_info, add_coords=True)
|
| 864 |
+
mol = process_molecule(mol,
|
| 865 |
+
add_hydrogens=False,
|
| 866 |
+
sanitize=sanitize,
|
| 867 |
+
relax_iter=relax_iter,
|
| 868 |
+
largest_frag=largest_frag)
|
| 869 |
+
if mol is not None:
|
| 870 |
+
molecules.append(mol)
|
| 871 |
+
|
| 872 |
+
return molecules
|
| 873 |
+
|
| 874 |
+
def configure_gradient_clipping(self, optimizer, optimizer_idx,
|
| 875 |
+
gradient_clip_val, gradient_clip_algorithm):
|
| 876 |
+
|
| 877 |
+
if not self.clip_grad:
|
| 878 |
+
return
|
| 879 |
+
|
| 880 |
+
# Allow gradient norm to be 150% + 2 * stdev of the recent history.
|
| 881 |
+
max_grad_norm = 1.5 * self.gradnorm_queue.mean() + \
|
| 882 |
+
2 * self.gradnorm_queue.std()
|
| 883 |
+
|
| 884 |
+
# Get current grad_norm
|
| 885 |
+
params = [p for g in optimizer.param_groups for p in g['params']]
|
| 886 |
+
grad_norm = utils.get_grad_norm(params)
|
| 887 |
+
|
| 888 |
+
# Lightning will handle the gradient clipping
|
| 889 |
+
self.clip_gradients(optimizer, gradient_clip_val=max_grad_norm,
|
| 890 |
+
gradient_clip_algorithm='norm')
|
| 891 |
+
|
| 892 |
+
if float(grad_norm) > max_grad_norm:
|
| 893 |
+
self.gradnorm_queue.add(float(max_grad_norm))
|
| 894 |
+
else:
|
| 895 |
+
self.gradnorm_queue.add(float(grad_norm))
|
| 896 |
+
|
| 897 |
+
if float(grad_norm) > max_grad_norm:
|
| 898 |
+
print(f'Clipped gradient with value {grad_norm:.1f} '
|
| 899 |
+
f'while allowed {max_grad_norm:.1f}')
|
| 900 |
+
|
| 901 |
+
|
| 902 |
+
class WeightSchedule:
|
| 903 |
+
def __init__(self, T, max_weight, mode='linear'):
|
| 904 |
+
if mode == 'linear':
|
| 905 |
+
self.weights = torch.linspace(max_weight, 0, T + 1)
|
| 906 |
+
elif mode == 'constant':
|
| 907 |
+
self.weights = max_weight * torch.ones(T + 1)
|
| 908 |
+
else:
|
| 909 |
+
raise NotImplementedError(f'{mode} weight schedule is not '
|
| 910 |
+
f'available.')
|
| 911 |
+
|
| 912 |
+
def __call__(self, t_array):
|
| 913 |
+
""" all values in t_array are assumed to be integers in [0, T] """
|
| 914 |
+
return self.weights[t_array].to(t_array.device)
|
optimize.py
ADDED
|
@@ -0,0 +1,249 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import argparse
|
| 2 |
+
from pathlib import Path
|
| 3 |
+
|
| 4 |
+
import numpy as np
|
| 5 |
+
import torch
|
| 6 |
+
import torch.nn.functional as F
|
| 7 |
+
from Bio.PDB import PDBParser
|
| 8 |
+
from rdkit import Chem
|
| 9 |
+
import pandas as pd
|
| 10 |
+
import random
|
| 11 |
+
from torch_scatter import scatter_mean
|
| 12 |
+
from openbabel import openbabel
|
| 13 |
+
openbabel.obErrorLog.StopLogging() # suppress OpenBabel messages
|
| 14 |
+
|
| 15 |
+
import utils
|
| 16 |
+
from lightning_modules import LigandPocketDDPM
|
| 17 |
+
from constants import FLOAT_TYPE, INT_TYPE
|
| 18 |
+
from analysis.molecule_builder import build_molecule, process_molecule
|
| 19 |
+
from analysis.metrics import MoleculeProperties
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def prepare_from_sdf_files(sdf_files, atom_encoder):
|
| 23 |
+
|
| 24 |
+
ligand_coords = []
|
| 25 |
+
atom_one_hot = []
|
| 26 |
+
for file in sdf_files:
|
| 27 |
+
rdmol = Chem.SDMolSupplier(str(file), sanitize=False)[0]
|
| 28 |
+
ligand_coords.append(
|
| 29 |
+
torch.from_numpy(rdmol.GetConformer().GetPositions()).float()
|
| 30 |
+
)
|
| 31 |
+
types = torch.tensor([atom_encoder[a.GetSymbol()] for a in rdmol.GetAtoms()])
|
| 32 |
+
atom_one_hot.append(
|
| 33 |
+
F.one_hot(types, num_classes=len(atom_encoder))
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
return torch.cat(ligand_coords, dim=0), torch.cat(atom_one_hot, dim=0)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def prepare_ligands_from_mols(mols, atom_encoder, device='cpu'):
|
| 40 |
+
|
| 41 |
+
ligand_coords = []
|
| 42 |
+
atom_one_hots = []
|
| 43 |
+
masks = []
|
| 44 |
+
sizes = []
|
| 45 |
+
for i, mol in enumerate(mols):
|
| 46 |
+
coord = torch.tensor(mol.GetConformer().GetPositions(), dtype=FLOAT_TYPE)
|
| 47 |
+
types = torch.tensor([atom_encoder[a.GetSymbol()] for a in mol.GetAtoms()], dtype=INT_TYPE)
|
| 48 |
+
one_hot = F.one_hot(types, num_classes=len(atom_encoder))
|
| 49 |
+
mask = torch.ones(len(types), dtype=INT_TYPE) * i
|
| 50 |
+
ligand_coords.append(coord)
|
| 51 |
+
atom_one_hots.append(one_hot)
|
| 52 |
+
masks.append(mask)
|
| 53 |
+
sizes.append(len(types))
|
| 54 |
+
|
| 55 |
+
ligand = {
|
| 56 |
+
'x': torch.cat(ligand_coords, dim=0).to(device),
|
| 57 |
+
'one_hot': torch.cat(atom_one_hots, dim=0).to(device),
|
| 58 |
+
'size': torch.tensor(sizes, dtype=INT_TYPE).to(device),
|
| 59 |
+
'mask': torch.cat(masks, dim=0).to(device),
|
| 60 |
+
}
|
| 61 |
+
|
| 62 |
+
return ligand
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def prepare_ligand_from_pdb(biopython_atoms, atom_encoder):
|
| 66 |
+
|
| 67 |
+
coord = torch.tensor(np.array([a.get_coord()
|
| 68 |
+
for a in biopython_atoms]), dtype=FLOAT_TYPE)
|
| 69 |
+
types = torch.tensor([atom_encoder[a.element.capitalize()]
|
| 70 |
+
for a in biopython_atoms])
|
| 71 |
+
one_hot = F.one_hot(types, num_classes=len(atom_encoder))
|
| 72 |
+
|
| 73 |
+
return coord, one_hot
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def prepare_substructure(ref_ligand, fix_atoms, pdb_model):
|
| 77 |
+
|
| 78 |
+
if fix_atoms[0].endswith(".sdf"):
|
| 79 |
+
# ligand as sdf file
|
| 80 |
+
coord, one_hot = prepare_from_sdf_files(fix_atoms, model.lig_type_encoder)
|
| 81 |
+
|
| 82 |
+
else:
|
| 83 |
+
# ligand contained in PDB; given in <chain>:<resi> format
|
| 84 |
+
chain, resi = ref_ligand.split(':')
|
| 85 |
+
ligand = utils.get_residue_with_resi(pdb_model[chain], int(resi))
|
| 86 |
+
fixed_atoms = [a for a in ligand.get_atoms() if a.get_name() in set(fix_atoms)]
|
| 87 |
+
coord, one_hot = prepare_ligand_from_pdb(fixed_atoms, model.lig_type_encoder)
|
| 88 |
+
|
| 89 |
+
return coord, one_hot
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def diversify_ligands(model, pocket, mols, timesteps,
|
| 93 |
+
sanitize=False,
|
| 94 |
+
largest_frag=False,
|
| 95 |
+
relax_iter=0):
|
| 96 |
+
"""
|
| 97 |
+
Diversify ligands for a specified pocket.
|
| 98 |
+
|
| 99 |
+
Parameters:
|
| 100 |
+
model: The model instance used for diversification.
|
| 101 |
+
pocket: The pocket information including coordinates and types.
|
| 102 |
+
mols: List of RDKit molecule objects to be diversified.
|
| 103 |
+
timesteps: Number of denoising steps to apply during diversification.
|
| 104 |
+
sanitize: If True, performs molecule sanitization post-generation (default: False).
|
| 105 |
+
largest_frag: If True, only the largest fragment of the generated molecule is returned (default: False).
|
| 106 |
+
relax_iter: Number of iterations for force field relaxation of the generated molecules (default: 0).
|
| 107 |
+
|
| 108 |
+
Returns:
|
| 109 |
+
A list of diversified RDKit molecule objects.
|
| 110 |
+
"""
|
| 111 |
+
|
| 112 |
+
ligand = prepare_ligands_from_mols(mols, model.lig_type_encoder, device=model.device)
|
| 113 |
+
|
| 114 |
+
pocket_mask = pocket['mask']
|
| 115 |
+
lig_mask = ligand['mask']
|
| 116 |
+
|
| 117 |
+
# Pocket's center of mass
|
| 118 |
+
pocket_com_before = scatter_mean(pocket['x'], pocket['mask'], dim=0)
|
| 119 |
+
|
| 120 |
+
out_lig, out_pocket, _, _ = model.ddpm.diversify(ligand, pocket, noising_steps=timesteps)
|
| 121 |
+
|
| 122 |
+
# Move generated molecule back to the original pocket position
|
| 123 |
+
pocket_com_after = scatter_mean(out_pocket[:, :model.x_dims], pocket_mask, dim=0)
|
| 124 |
+
|
| 125 |
+
out_pocket[:, :model.x_dims] += \
|
| 126 |
+
(pocket_com_before - pocket_com_after)[pocket_mask]
|
| 127 |
+
out_lig[:, :model.x_dims] += \
|
| 128 |
+
(pocket_com_before - pocket_com_after)[lig_mask]
|
| 129 |
+
|
| 130 |
+
# Build mol objects
|
| 131 |
+
x = out_lig[:, :model.x_dims].detach().cpu()
|
| 132 |
+
atom_type = out_lig[:, model.x_dims:].argmax(1).detach().cpu()
|
| 133 |
+
|
| 134 |
+
molecules = []
|
| 135 |
+
for mol_pc in zip(utils.batch_to_list(x, lig_mask),
|
| 136 |
+
utils.batch_to_list(atom_type, lig_mask)):
|
| 137 |
+
|
| 138 |
+
mol = build_molecule(*mol_pc, model.dataset_info, add_coords=True)
|
| 139 |
+
mol = process_molecule(mol,
|
| 140 |
+
add_hydrogens=False,
|
| 141 |
+
sanitize=sanitize,
|
| 142 |
+
relax_iter=relax_iter,
|
| 143 |
+
largest_frag=largest_frag)
|
| 144 |
+
if mol is not None:
|
| 145 |
+
molecules.append(mol)
|
| 146 |
+
|
| 147 |
+
return molecules
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
if __name__ == "__main__":
|
| 151 |
+
|
| 152 |
+
parser = argparse.ArgumentParser()
|
| 153 |
+
parser.add_argument('--checkpoint', type=Path, default='checkpoints/crossdocked_fullatom_cond.ckpt')
|
| 154 |
+
parser.add_argument('--pdbfile', type=str, default='example/5ndu.pdb')
|
| 155 |
+
parser.add_argument('--ref_ligand', type=str, default='example/5ndu_linked_mols.sdf')
|
| 156 |
+
parser.add_argument('--objective', type=str, default='sa', choices={'qed', 'sa'})
|
| 157 |
+
parser.add_argument('--timesteps', type=int, default=100)
|
| 158 |
+
parser.add_argument('--population_size', type=int, default=100)
|
| 159 |
+
parser.add_argument('--evolution_steps', type=int, default=10)
|
| 160 |
+
parser.add_argument('--top_k', type=int, default=7)
|
| 161 |
+
parser.add_argument('--outfile', type=Path, default='output.sdf')
|
| 162 |
+
parser.add_argument('--relax', action='store_true')
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
args = parser.parse_args()
|
| 166 |
+
|
| 167 |
+
pdb_id = Path(args.pdbfile).stem
|
| 168 |
+
|
| 169 |
+
device = 'cuda' if torch.cuda.is_available() else 'cpu'
|
| 170 |
+
population_size = args.population_size
|
| 171 |
+
evolution_steps = args.evolution_steps
|
| 172 |
+
top_k = args.top_k
|
| 173 |
+
|
| 174 |
+
# Load model
|
| 175 |
+
model = LigandPocketDDPM.load_from_checkpoint(
|
| 176 |
+
args.checkpoint, map_location=device)
|
| 177 |
+
model = model.to(device)
|
| 178 |
+
|
| 179 |
+
# Prepare ligand + pocket
|
| 180 |
+
# Load PDB
|
| 181 |
+
pdb_model = PDBParser(QUIET=True).get_structure('', args.pdbfile)[0]
|
| 182 |
+
# Define pocket based on reference ligand
|
| 183 |
+
residues = utils.get_pocket_from_ligand(pdb_model, args.ref_ligand)
|
| 184 |
+
pocket = model.prepare_pocket(residues, repeats=population_size)
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
if args.objective == 'qed':
|
| 188 |
+
objective_function = MoleculeProperties().calculate_qed
|
| 189 |
+
elif args.objective == 'sa':
|
| 190 |
+
objective_function = MoleculeProperties().calculate_sa
|
| 191 |
+
else:
|
| 192 |
+
### IMPLEMENT YOUR OWN OBJECTIVE
|
| 193 |
+
### FUNCTIONS HERE
|
| 194 |
+
raise ValueError(f"Objective function {args.objective} not recognized.")
|
| 195 |
+
|
| 196 |
+
ref_mol = Chem.SDMolSupplier(args.ref_ligand)[0]
|
| 197 |
+
|
| 198 |
+
# Store molecules in history dataframe
|
| 199 |
+
buffer = pd.DataFrame(columns=['generation', 'score', 'fate' 'mol', 'smiles'])
|
| 200 |
+
|
| 201 |
+
# Population initialization
|
| 202 |
+
buffer = buffer.append({'generation': 0,
|
| 203 |
+
'score': objective_function(ref_mol),
|
| 204 |
+
'fate': 'initial', 'mol': ref_mol,
|
| 205 |
+
'smiles': Chem.MolToSmiles(ref_mol)}, ignore_index=True)
|
| 206 |
+
|
| 207 |
+
for generation_idx in range(evolution_steps):
|
| 208 |
+
|
| 209 |
+
if generation_idx == 0:
|
| 210 |
+
molecules = buffer['mol'].tolist() * population_size
|
| 211 |
+
else:
|
| 212 |
+
# Select top k molecules from previous generation
|
| 213 |
+
previous_gen = buffer[buffer['generation'] == generation_idx]
|
| 214 |
+
top_k_molecules = previous_gen.nlargest(top_k, 'score')['mol'].tolist()
|
| 215 |
+
molecules = top_k_molecules * (population_size // top_k)
|
| 216 |
+
|
| 217 |
+
# Update the fate of selected top k molecules in the buffer
|
| 218 |
+
buffer.loc[buffer['generation'] == generation_idx, 'fate'] = 'survived'
|
| 219 |
+
|
| 220 |
+
# Ensure the right number of molecules
|
| 221 |
+
if len(molecules) < population_size:
|
| 222 |
+
molecules += [random.choice(molecules) for _ in range(population_size - len(molecules))]
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
# Diversify molecules
|
| 226 |
+
assert len(molecules) == population_size, f"Wrong number of molecules: {len(molecules)} when it should be {population_size}"
|
| 227 |
+
print(f"Generation {generation_idx}, mean score: {np.mean([objective_function(mol) for mol in molecules])}")
|
| 228 |
+
molecules = diversify_ligands(model,
|
| 229 |
+
pocket,
|
| 230 |
+
molecules,
|
| 231 |
+
timesteps=args.timesteps,
|
| 232 |
+
sanitize=True,
|
| 233 |
+
relax_iter=(200 if args.relax else 0))
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
# Evaluate and save molecules
|
| 237 |
+
for mol in molecules:
|
| 238 |
+
buffer = buffer.append({'generation': generation_idx + 1,
|
| 239 |
+
'score': objective_function(mol),
|
| 240 |
+
'fate': 'purged',
|
| 241 |
+
'mol': mol,
|
| 242 |
+
'smiles': Chem.MolToSmiles(mol)}, ignore_index=True)
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
# Make SDF files
|
| 246 |
+
utils.write_sdf_file(args.outfile, molecules)
|
| 247 |
+
# Save buffer
|
| 248 |
+
buffer.drop(columns=['mol'])
|
| 249 |
+
buffer.to_csv(args.outfile.with_suffix('.csv'))
|
process_bindingmoad.py
ADDED
|
@@ -0,0 +1,652 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pathlib import Path
|
| 2 |
+
from time import time
|
| 3 |
+
import random
|
| 4 |
+
from collections import defaultdict
|
| 5 |
+
import argparse
|
| 6 |
+
import warnings
|
| 7 |
+
|
| 8 |
+
from tqdm import tqdm
|
| 9 |
+
import numpy as np
|
| 10 |
+
import torch
|
| 11 |
+
from Bio.PDB import PDBParser
|
| 12 |
+
from Bio.PDB.Polypeptide import three_to_one, is_aa
|
| 13 |
+
from Bio.PDB import PDBIO, Select
|
| 14 |
+
from openbabel import openbabel
|
| 15 |
+
from rdkit import Chem
|
| 16 |
+
from rdkit.Chem import QED
|
| 17 |
+
from scipy.ndimage import gaussian_filter
|
| 18 |
+
|
| 19 |
+
from geometry_utils import get_bb_transform
|
| 20 |
+
from analysis.molecule_builder import build_molecule
|
| 21 |
+
from analysis.metrics import rdmol_to_smiles
|
| 22 |
+
import constants
|
| 23 |
+
from constants import covalent_radii, dataset_params
|
| 24 |
+
import utils
|
| 25 |
+
|
| 26 |
+
dataset_info = dataset_params['bindingmoad']
|
| 27 |
+
amino_acid_dict = dataset_info['aa_encoder']
|
| 28 |
+
atom_dict = dataset_info['atom_encoder']
|
| 29 |
+
atom_decoder = dataset_info['atom_decoder']
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class Model0(Select):
|
| 33 |
+
def accept_model(self, model):
|
| 34 |
+
return model.id == 0
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def read_label_file(csv_path):
|
| 38 |
+
"""
|
| 39 |
+
Read BindingMOAD's label file
|
| 40 |
+
Args:
|
| 41 |
+
csv_path: path to 'every.csv'
|
| 42 |
+
Returns:
|
| 43 |
+
Nested dictionary with all ligands. First level: EC number,
|
| 44 |
+
Second level: PDB ID, Third level: list of ligands. Each ligand is
|
| 45 |
+
represented as a tuple (ligand name, validity, SMILES string)
|
| 46 |
+
"""
|
| 47 |
+
ligand_dict = {}
|
| 48 |
+
|
| 49 |
+
with open(csv_path, 'r') as f:
|
| 50 |
+
for line in f.readlines():
|
| 51 |
+
row = line.split(',')
|
| 52 |
+
|
| 53 |
+
# new protein class
|
| 54 |
+
if len(row[0]) > 0:
|
| 55 |
+
curr_class = row[0]
|
| 56 |
+
ligand_dict[curr_class] = {}
|
| 57 |
+
continue
|
| 58 |
+
|
| 59 |
+
# new protein
|
| 60 |
+
if len(row[2]) > 0:
|
| 61 |
+
curr_prot = row[2]
|
| 62 |
+
ligand_dict[curr_class][curr_prot] = []
|
| 63 |
+
continue
|
| 64 |
+
|
| 65 |
+
# new small molecule
|
| 66 |
+
if len(row[3]) > 0:
|
| 67 |
+
ligand_dict[curr_class][curr_prot].append(
|
| 68 |
+
# (ligand name, validity, SMILES string)
|
| 69 |
+
[row[3], row[4], row[9]]
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
return ligand_dict
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def compute_druglikeness(ligand_dict):
|
| 76 |
+
"""
|
| 77 |
+
Computes RDKit's QED value and adds it to the dictionary
|
| 78 |
+
Args:
|
| 79 |
+
ligand_dict: nested ligand dictionary
|
| 80 |
+
Returns:
|
| 81 |
+
the same ligand dictionary with additional QED values
|
| 82 |
+
"""
|
| 83 |
+
print("Computing QED values...")
|
| 84 |
+
for p, m in tqdm([(p, m) for c in ligand_dict for p in ligand_dict[c]
|
| 85 |
+
for m in ligand_dict[c][p]]):
|
| 86 |
+
mol = Chem.MolFromSmiles(m[2])
|
| 87 |
+
if mol is None:
|
| 88 |
+
mol_id = f'{p}_{m}'
|
| 89 |
+
warnings.warn(f"Could not construct molecule {mol_id} from SMILES "
|
| 90 |
+
f"string '{m[2]}'")
|
| 91 |
+
continue
|
| 92 |
+
m.append(QED.qed(mol))
|
| 93 |
+
return ligand_dict
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def filter_and_flatten(ligand_dict, qed_thresh, max_occurences, seed):
|
| 97 |
+
|
| 98 |
+
filtered_examples = []
|
| 99 |
+
all_examples = [(c, p, m) for c in ligand_dict for p in ligand_dict[c]
|
| 100 |
+
for m in ligand_dict[c][p]]
|
| 101 |
+
|
| 102 |
+
# shuffle to select random examples of ligands that occur more than
|
| 103 |
+
# max_occurences times
|
| 104 |
+
random.seed(seed)
|
| 105 |
+
random.shuffle(all_examples)
|
| 106 |
+
|
| 107 |
+
ligand_name_counter = defaultdict(int)
|
| 108 |
+
print("Filtering examples...")
|
| 109 |
+
for c, p, m in tqdm(all_examples):
|
| 110 |
+
|
| 111 |
+
ligand_name, ligand_chain, ligand_resi = m[0].split(':')
|
| 112 |
+
if m[1] == 'valid' and len(m) > 3 and m[3] > qed_thresh:
|
| 113 |
+
if ligand_name_counter[ligand_name] < max_occurences:
|
| 114 |
+
filtered_examples.append(
|
| 115 |
+
(c, p, m)
|
| 116 |
+
)
|
| 117 |
+
ligand_name_counter[ligand_name] += 1
|
| 118 |
+
|
| 119 |
+
return filtered_examples
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def split_by_ec_number(data_list, n_val, n_test, ec_level=1):
|
| 123 |
+
"""
|
| 124 |
+
Split dataset into training, validation and test sets based on EC numbers
|
| 125 |
+
https://en.wikipedia.org/wiki/Enzyme_Commission_number
|
| 126 |
+
Args:
|
| 127 |
+
data_list: list of ligands
|
| 128 |
+
n_val: number of validation examples
|
| 129 |
+
n_test: number of test examples
|
| 130 |
+
ec_level: level in the EC numbering hierarchy at which the split is
|
| 131 |
+
made, i.e. items with matching EC numbers at this level are put in
|
| 132 |
+
the same set
|
| 133 |
+
Returns:
|
| 134 |
+
dictionary with keys 'train', 'val', and 'test'
|
| 135 |
+
"""
|
| 136 |
+
|
| 137 |
+
examples_per_class = defaultdict(int)
|
| 138 |
+
for c, p, m in data_list:
|
| 139 |
+
c_sub = '.'.join(c.split('.')[:ec_level])
|
| 140 |
+
examples_per_class[c_sub] += 1
|
| 141 |
+
|
| 142 |
+
assert sum(examples_per_class.values()) == len(data_list)
|
| 143 |
+
|
| 144 |
+
# split ec numbers
|
| 145 |
+
val_classes = set()
|
| 146 |
+
for c, num in sorted(examples_per_class.items(), key=lambda x: x[1],
|
| 147 |
+
reverse=True):
|
| 148 |
+
if sum([examples_per_class[x] for x in val_classes]) + num <= n_val:
|
| 149 |
+
val_classes.add(c)
|
| 150 |
+
|
| 151 |
+
test_classes = set()
|
| 152 |
+
for c, num in sorted(examples_per_class.items(), key=lambda x: x[1],
|
| 153 |
+
reverse=True):
|
| 154 |
+
# skip classes already used in the validation set
|
| 155 |
+
if c in val_classes:
|
| 156 |
+
continue
|
| 157 |
+
if sum([examples_per_class[x] for x in test_classes]) + num <= n_test:
|
| 158 |
+
test_classes.add(c)
|
| 159 |
+
|
| 160 |
+
# remaining classes belong to test set
|
| 161 |
+
train_classes = {x for x in examples_per_class if
|
| 162 |
+
x not in val_classes and x not in test_classes}
|
| 163 |
+
|
| 164 |
+
# create separate lists of examples
|
| 165 |
+
data_split = {}
|
| 166 |
+
data_split['train'] = [x for x in data_list if '.'.join(
|
| 167 |
+
x[0].split('.')[:ec_level]) in train_classes]
|
| 168 |
+
data_split['val'] = [x for x in data_list if '.'.join(
|
| 169 |
+
x[0].split('.')[:ec_level]) in val_classes]
|
| 170 |
+
data_split['test'] = [x for x in data_list if '.'.join(
|
| 171 |
+
x[0].split('.')[:ec_level]) in test_classes]
|
| 172 |
+
|
| 173 |
+
assert len(data_split['train']) + len(data_split['val']) + \
|
| 174 |
+
len(data_split['test']) == len(data_list)
|
| 175 |
+
|
| 176 |
+
return data_split
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
def ligand_list_to_dict(ligand_list):
|
| 180 |
+
out_dict = defaultdict(list)
|
| 181 |
+
for _, p, m in ligand_list:
|
| 182 |
+
out_dict[p].append(m)
|
| 183 |
+
return out_dict
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
def process_ligand_and_pocket(pdb_struct, ligand_name, ligand_chain,
|
| 187 |
+
ligand_resi, dist_cutoff, ca_only,
|
| 188 |
+
compute_quaternion=False):
|
| 189 |
+
try:
|
| 190 |
+
residues = {obj.id[1]: obj for obj in
|
| 191 |
+
pdb_struct[0][ligand_chain].get_residues()}
|
| 192 |
+
except KeyError as e:
|
| 193 |
+
raise KeyError(f'Chain {e} not found ({pdbfile}, '
|
| 194 |
+
f'{ligand_name}:{ligand_chain}:{ligand_resi})')
|
| 195 |
+
ligand = residues[ligand_resi]
|
| 196 |
+
assert ligand.get_resname() == ligand_name, \
|
| 197 |
+
f"{ligand.get_resname()} != {ligand_name}"
|
| 198 |
+
|
| 199 |
+
# remove H atoms if not in atom_dict, other atom types that aren't allowed
|
| 200 |
+
# should stay so that the entire ligand can be removed from the dataset
|
| 201 |
+
lig_atoms = [a for a in ligand.get_atoms()
|
| 202 |
+
if (a.element.capitalize() in atom_dict or a.element != 'H')]
|
| 203 |
+
lig_coords = np.array([a.get_coord() for a in lig_atoms])
|
| 204 |
+
|
| 205 |
+
try:
|
| 206 |
+
lig_one_hot = np.stack([
|
| 207 |
+
np.eye(1, len(atom_dict), atom_dict[a.element.capitalize()]).squeeze()
|
| 208 |
+
for a in lig_atoms
|
| 209 |
+
])
|
| 210 |
+
except KeyError as e:
|
| 211 |
+
raise KeyError(
|
| 212 |
+
f'Ligand atom {e} not in atom dict ({pdbfile}, '
|
| 213 |
+
f'{ligand_name}:{ligand_chain}:{ligand_resi})')
|
| 214 |
+
|
| 215 |
+
# Find interacting pocket residues based on distance cutoff
|
| 216 |
+
pocket_residues = []
|
| 217 |
+
for residue in pdb_struct[0].get_residues():
|
| 218 |
+
res_coords = np.array([a.get_coord() for a in residue.get_atoms()])
|
| 219 |
+
if is_aa(residue.get_resname(), standard=True) and \
|
| 220 |
+
(((res_coords[:, None, :] - lig_coords[None, :, :]) ** 2).sum(-1) ** 0.5).min() < dist_cutoff:
|
| 221 |
+
pocket_residues.append(residue)
|
| 222 |
+
|
| 223 |
+
# Compute transform of the canonical reference frame
|
| 224 |
+
n_xyz = np.array([res['N'].get_coord() for res in pocket_residues])
|
| 225 |
+
ca_xyz = np.array([res['CA'].get_coord() for res in pocket_residues])
|
| 226 |
+
c_xyz = np.array([res['C'].get_coord() for res in pocket_residues])
|
| 227 |
+
|
| 228 |
+
if compute_quaternion:
|
| 229 |
+
quaternion, c_alpha = get_bb_transform(n_xyz, ca_xyz, c_xyz)
|
| 230 |
+
if np.any(np.isnan(quaternion)):
|
| 231 |
+
raise ValueError(
|
| 232 |
+
f'Invalid value in quaternion ({pdbfile}, '
|
| 233 |
+
f'{ligand_name}:{ligand_chain}:{ligand_resi})')
|
| 234 |
+
else:
|
| 235 |
+
c_alpha = ca_xyz
|
| 236 |
+
|
| 237 |
+
if ca_only:
|
| 238 |
+
pocket_coords = c_alpha
|
| 239 |
+
try:
|
| 240 |
+
pocket_one_hot = np.stack([
|
| 241 |
+
np.eye(1, len(amino_acid_dict),
|
| 242 |
+
amino_acid_dict[three_to_one(res.get_resname())]).squeeze()
|
| 243 |
+
for res in pocket_residues])
|
| 244 |
+
except KeyError as e:
|
| 245 |
+
raise KeyError(
|
| 246 |
+
f'{e} not in amino acid dict ({pdbfile}, '
|
| 247 |
+
f'{ligand_name}:{ligand_chain}:{ligand_resi})')
|
| 248 |
+
else:
|
| 249 |
+
pocket_atoms = [a for res in pocket_residues for a in res.get_atoms()
|
| 250 |
+
if (a.element.capitalize() in atom_dict or a.element != 'H')]
|
| 251 |
+
pocket_coords = np.array([a.get_coord() for a in pocket_atoms])
|
| 252 |
+
try:
|
| 253 |
+
pocket_one_hot = np.stack([
|
| 254 |
+
np.eye(1, len(atom_dict), atom_dict[a.element.capitalize()]).squeeze()
|
| 255 |
+
for a in pocket_atoms
|
| 256 |
+
])
|
| 257 |
+
except KeyError as e:
|
| 258 |
+
raise KeyError(
|
| 259 |
+
f'Pocket atom {e} not in atom dict ({pdbfile}, '
|
| 260 |
+
f'{ligand_name}:{ligand_chain}:{ligand_resi})')
|
| 261 |
+
|
| 262 |
+
pocket_ids = [f'{res.parent.id}:{res.id[1]}' for res in pocket_residues]
|
| 263 |
+
|
| 264 |
+
ligand_data = {
|
| 265 |
+
'lig_coords': lig_coords,
|
| 266 |
+
'lig_one_hot': lig_one_hot,
|
| 267 |
+
}
|
| 268 |
+
pocket_data = {
|
| 269 |
+
'pocket_coords': pocket_coords,
|
| 270 |
+
'pocket_one_hot': pocket_one_hot,
|
| 271 |
+
'pocket_ids': pocket_ids,
|
| 272 |
+
}
|
| 273 |
+
if compute_quaternion:
|
| 274 |
+
pocket_data['pocket_quaternion'] = quaternion
|
| 275 |
+
return ligand_data, pocket_data
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
def compute_smiles(positions, one_hot, mask):
|
| 279 |
+
print("Computing SMILES ...")
|
| 280 |
+
|
| 281 |
+
atom_types = np.argmax(one_hot, axis=-1)
|
| 282 |
+
|
| 283 |
+
sections = np.where(np.diff(mask))[0] + 1
|
| 284 |
+
positions = [torch.from_numpy(x) for x in np.split(positions, sections)]
|
| 285 |
+
atom_types = [torch.from_numpy(x) for x in np.split(atom_types, sections)]
|
| 286 |
+
|
| 287 |
+
mols_smiles = []
|
| 288 |
+
|
| 289 |
+
pbar = tqdm(enumerate(zip(positions, atom_types)),
|
| 290 |
+
total=len(np.unique(mask)))
|
| 291 |
+
for i, (pos, atom_type) in pbar:
|
| 292 |
+
mol = build_molecule(pos, atom_type, dataset_info)
|
| 293 |
+
|
| 294 |
+
# BasicMolecularMetrics() computes SMILES after sanitization
|
| 295 |
+
try:
|
| 296 |
+
Chem.SanitizeMol(mol)
|
| 297 |
+
except ValueError:
|
| 298 |
+
continue
|
| 299 |
+
|
| 300 |
+
mol = rdmol_to_smiles(mol)
|
| 301 |
+
if mol is not None:
|
| 302 |
+
mols_smiles.append(mol)
|
| 303 |
+
pbar.set_description(f'{len(mols_smiles)}/{i + 1} successful')
|
| 304 |
+
|
| 305 |
+
return mols_smiles
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
def get_n_nodes(lig_mask, pocket_mask, smooth_sigma=None):
|
| 309 |
+
# Joint distribution of ligand's and pocket's number of nodes
|
| 310 |
+
idx_lig, n_nodes_lig = np.unique(lig_mask, return_counts=True)
|
| 311 |
+
idx_pocket, n_nodes_pocket = np.unique(pocket_mask, return_counts=True)
|
| 312 |
+
assert np.all(idx_lig == idx_pocket)
|
| 313 |
+
|
| 314 |
+
joint_histogram = np.zeros((np.max(n_nodes_lig) + 1,
|
| 315 |
+
np.max(n_nodes_pocket) + 1))
|
| 316 |
+
|
| 317 |
+
for nlig, npocket in zip(n_nodes_lig, n_nodes_pocket):
|
| 318 |
+
joint_histogram[nlig, npocket] += 1
|
| 319 |
+
|
| 320 |
+
print(f'Original histogram: {np.count_nonzero(joint_histogram)}/'
|
| 321 |
+
f'{joint_histogram.shape[0] * joint_histogram.shape[1]} bins filled')
|
| 322 |
+
|
| 323 |
+
# Smooth the histogram
|
| 324 |
+
if smooth_sigma is not None:
|
| 325 |
+
filtered_histogram = gaussian_filter(
|
| 326 |
+
joint_histogram, sigma=smooth_sigma, order=0, mode='constant',
|
| 327 |
+
cval=0.0, truncate=4.0)
|
| 328 |
+
|
| 329 |
+
print(f'Smoothed histogram: {np.count_nonzero(filtered_histogram)}/'
|
| 330 |
+
f'{filtered_histogram.shape[0] * filtered_histogram.shape[1]} bins filled')
|
| 331 |
+
|
| 332 |
+
joint_histogram = filtered_histogram
|
| 333 |
+
|
| 334 |
+
return joint_histogram
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
def get_bond_length_arrays(atom_mapping):
|
| 338 |
+
bond_arrays = []
|
| 339 |
+
for i in range(3):
|
| 340 |
+
bond_dict = getattr(constants, f'bonds{i + 1}')
|
| 341 |
+
bond_array = np.zeros((len(atom_mapping), len(atom_mapping)))
|
| 342 |
+
for a1 in atom_mapping.keys():
|
| 343 |
+
for a2 in atom_mapping.keys():
|
| 344 |
+
if a1 in bond_dict and a2 in bond_dict[a1]:
|
| 345 |
+
bond_len = bond_dict[a1][a2]
|
| 346 |
+
else:
|
| 347 |
+
bond_len = 0
|
| 348 |
+
bond_array[atom_mapping[a1], atom_mapping[a2]] = bond_len
|
| 349 |
+
|
| 350 |
+
assert np.all(bond_array == bond_array.T)
|
| 351 |
+
bond_arrays.append(bond_array)
|
| 352 |
+
|
| 353 |
+
return bond_arrays
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
def get_lennard_jones_rm(atom_mapping):
|
| 357 |
+
# Bond radii for the Lennard-Jones potential
|
| 358 |
+
LJ_rm = np.zeros((len(atom_mapping), len(atom_mapping)))
|
| 359 |
+
|
| 360 |
+
for a1 in atom_mapping.keys():
|
| 361 |
+
for a2 in atom_mapping.keys():
|
| 362 |
+
all_bond_lengths = []
|
| 363 |
+
for btype in ['bonds1', 'bonds2', 'bonds3']:
|
| 364 |
+
bond_dict = getattr(constants, btype)
|
| 365 |
+
if a1 in bond_dict and a2 in bond_dict[a1]:
|
| 366 |
+
all_bond_lengths.append(bond_dict[a1][a2])
|
| 367 |
+
|
| 368 |
+
if len(all_bond_lengths) > 0:
|
| 369 |
+
# take the shortest possible bond length because slightly larger
|
| 370 |
+
# values aren't penalized as much
|
| 371 |
+
bond_len = min(all_bond_lengths)
|
| 372 |
+
else:
|
| 373 |
+
# Replace missing values with sum of average covalent radii
|
| 374 |
+
bond_len = covalent_radii[a1] + covalent_radii[a2]
|
| 375 |
+
|
| 376 |
+
LJ_rm[atom_mapping[a1], atom_mapping[a2]] = bond_len
|
| 377 |
+
|
| 378 |
+
assert np.all(LJ_rm == LJ_rm.T)
|
| 379 |
+
return LJ_rm
|
| 380 |
+
|
| 381 |
+
|
| 382 |
+
def get_type_histograms(lig_one_hot, pocket_one_hot, atom_encoder, aa_encoder):
|
| 383 |
+
|
| 384 |
+
atom_decoder = list(atom_encoder.keys())
|
| 385 |
+
atom_counts = {k: 0 for k in atom_encoder.keys()}
|
| 386 |
+
for a in [atom_decoder[x] for x in lig_one_hot.argmax(1)]:
|
| 387 |
+
atom_counts[a] += 1
|
| 388 |
+
|
| 389 |
+
aa_decoder = list(aa_encoder.keys())
|
| 390 |
+
aa_counts = {k: 0 for k in aa_encoder.keys()}
|
| 391 |
+
for r in [aa_decoder[x] for x in pocket_one_hot.argmax(1)]:
|
| 392 |
+
aa_counts[r] += 1
|
| 393 |
+
|
| 394 |
+
return atom_counts, aa_counts
|
| 395 |
+
|
| 396 |
+
|
| 397 |
+
def saveall(filename, pdb_and_mol_ids, lig_coords, lig_one_hot, lig_mask,
|
| 398 |
+
pocket_coords, pocket_quaternion, pocket_one_hot, pocket_mask):
|
| 399 |
+
|
| 400 |
+
np.savez(filename,
|
| 401 |
+
names=pdb_and_mol_ids,
|
| 402 |
+
lig_coords=lig_coords,
|
| 403 |
+
lig_one_hot=lig_one_hot,
|
| 404 |
+
lig_mask=lig_mask,
|
| 405 |
+
pocket_coords=pocket_coords,
|
| 406 |
+
pocket_quaternion=pocket_quaternion,
|
| 407 |
+
pocket_one_hot=pocket_one_hot,
|
| 408 |
+
pocket_mask=pocket_mask
|
| 409 |
+
)
|
| 410 |
+
return True
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
if __name__ == '__main__':
|
| 414 |
+
parser = argparse.ArgumentParser()
|
| 415 |
+
parser.add_argument('basedir', type=Path)
|
| 416 |
+
parser.add_argument('--outdir', type=Path, default=None)
|
| 417 |
+
parser.add_argument('--qed_thresh', type=float, default=0.3)
|
| 418 |
+
parser.add_argument('--max_occurences', type=int, default=50)
|
| 419 |
+
parser.add_argument('--num_val', type=int, default=300)
|
| 420 |
+
parser.add_argument('--num_test', type=int, default=300)
|
| 421 |
+
parser.add_argument('--dist_cutoff', type=float, default=8.0)
|
| 422 |
+
parser.add_argument('--ca_only', action='store_true')
|
| 423 |
+
parser.add_argument('--random_seed', type=int, default=42)
|
| 424 |
+
parser.add_argument('--make_split', action='store_true')
|
| 425 |
+
args = parser.parse_args()
|
| 426 |
+
|
| 427 |
+
pdbdir = args.basedir / 'BindingMOAD_2020/'
|
| 428 |
+
|
| 429 |
+
# Make output directory
|
| 430 |
+
if args.outdir is None:
|
| 431 |
+
suffix = '' if 'H' in atom_dict else '_noH'
|
| 432 |
+
suffix += '_ca_only' if args.ca_only else '_full'
|
| 433 |
+
processed_dir = Path(args.basedir, f'processed{suffix}')
|
| 434 |
+
else:
|
| 435 |
+
processed_dir = args.outdir
|
| 436 |
+
|
| 437 |
+
processed_dir.mkdir(exist_ok=True, parents=True)
|
| 438 |
+
|
| 439 |
+
if args.make_split:
|
| 440 |
+
# Process the label file
|
| 441 |
+
csv_path = args.basedir / 'every.csv'
|
| 442 |
+
ligand_dict = read_label_file(csv_path)
|
| 443 |
+
ligand_dict = compute_druglikeness(ligand_dict)
|
| 444 |
+
filtered_examples = filter_and_flatten(
|
| 445 |
+
ligand_dict, args.qed_thresh, args.max_occurences, args.random_seed)
|
| 446 |
+
print(f'{len(filtered_examples)} examples after filtering')
|
| 447 |
+
|
| 448 |
+
# Make data split
|
| 449 |
+
data_split = split_by_ec_number(filtered_examples, args.num_val,
|
| 450 |
+
args.num_test)
|
| 451 |
+
|
| 452 |
+
else:
|
| 453 |
+
# Use precomputed data split
|
| 454 |
+
data_split = {}
|
| 455 |
+
for split in ['test', 'val', 'train']:
|
| 456 |
+
with open(f'data/moad_{split}.txt', 'r') as f:
|
| 457 |
+
pocket_ids = f.read().split(',')
|
| 458 |
+
# (ec-number, protein, molecule tuple)
|
| 459 |
+
data_split[split] = [(None, x.split('_')[0][:4], (x.split('_')[1],))
|
| 460 |
+
for x in pocket_ids]
|
| 461 |
+
|
| 462 |
+
n_train_before = len(data_split['train'])
|
| 463 |
+
n_val_before = len(data_split['val'])
|
| 464 |
+
n_test_before = len(data_split['test'])
|
| 465 |
+
|
| 466 |
+
# Read and process PDB files
|
| 467 |
+
n_samples_after = {}
|
| 468 |
+
for split in data_split.keys():
|
| 469 |
+
lig_coords = []
|
| 470 |
+
lig_one_hot = []
|
| 471 |
+
lig_mask = []
|
| 472 |
+
pocket_coords = []
|
| 473 |
+
pocket_one_hot = []
|
| 474 |
+
pocket_mask = []
|
| 475 |
+
pdb_and_mol_ids = []
|
| 476 |
+
receptors = []
|
| 477 |
+
count = 0
|
| 478 |
+
|
| 479 |
+
pdb_sdf_dir = processed_dir / split
|
| 480 |
+
pdb_sdf_dir.mkdir(exist_ok=True)
|
| 481 |
+
|
| 482 |
+
n_tot = len(data_split[split])
|
| 483 |
+
pair_dict = ligand_list_to_dict(data_split[split])
|
| 484 |
+
|
| 485 |
+
tic = time()
|
| 486 |
+
num_failed = 0
|
| 487 |
+
with tqdm(total=n_tot) as pbar:
|
| 488 |
+
for p in pair_dict:
|
| 489 |
+
|
| 490 |
+
pdb_successful = set()
|
| 491 |
+
|
| 492 |
+
# try all available .bio files
|
| 493 |
+
for pdbfile in sorted(pdbdir.glob(f"{p.lower()}.bio*")):
|
| 494 |
+
|
| 495 |
+
# Skip if all ligands have been processed already
|
| 496 |
+
if len(pair_dict[p]) == len(pdb_successful):
|
| 497 |
+
continue
|
| 498 |
+
|
| 499 |
+
pdb_struct = PDBParser(QUIET=True).get_structure('', pdbfile)
|
| 500 |
+
struct_copy = pdb_struct.copy()
|
| 501 |
+
|
| 502 |
+
n_bio_successful = 0
|
| 503 |
+
for m in pair_dict[p]:
|
| 504 |
+
|
| 505 |
+
# Skip already processed ligand
|
| 506 |
+
if m[0] in pdb_successful:
|
| 507 |
+
continue
|
| 508 |
+
|
| 509 |
+
ligand_name, ligand_chain, ligand_resi = m[0].split(':')
|
| 510 |
+
ligand_resi = int(ligand_resi)
|
| 511 |
+
|
| 512 |
+
try:
|
| 513 |
+
ligand_data, pocket_data = process_ligand_and_pocket(
|
| 514 |
+
pdb_struct, ligand_name, ligand_chain, ligand_resi,
|
| 515 |
+
dist_cutoff=args.dist_cutoff, ca_only=args.ca_only)
|
| 516 |
+
except (KeyError, AssertionError, FileNotFoundError,
|
| 517 |
+
IndexError, ValueError) as e:
|
| 518 |
+
# print(type(e).__name__, e)
|
| 519 |
+
continue
|
| 520 |
+
|
| 521 |
+
pdb_and_mol_ids.append(f"{p}_{m[0]}")
|
| 522 |
+
receptors.append(pdbfile.name)
|
| 523 |
+
lig_coords.append(ligand_data['lig_coords'])
|
| 524 |
+
lig_one_hot.append(ligand_data['lig_one_hot'])
|
| 525 |
+
lig_mask.append(
|
| 526 |
+
count * np.ones(len(ligand_data['lig_coords'])))
|
| 527 |
+
pocket_coords.append(pocket_data['pocket_coords'])
|
| 528 |
+
# pocket_quaternion.append(
|
| 529 |
+
# pocket_data['pocket_quaternion'])
|
| 530 |
+
pocket_one_hot.append(pocket_data['pocket_one_hot'])
|
| 531 |
+
pocket_mask.append(
|
| 532 |
+
count * np.ones(len(pocket_data['pocket_coords'])))
|
| 533 |
+
count += 1
|
| 534 |
+
|
| 535 |
+
pdb_successful.add(m[0])
|
| 536 |
+
n_bio_successful += 1
|
| 537 |
+
|
| 538 |
+
# Save additional files for affinity analysis
|
| 539 |
+
if split in {'val', 'test'}:
|
| 540 |
+
# if split in {'val', 'test', 'train'}:
|
| 541 |
+
# remove ligand from receptor
|
| 542 |
+
try:
|
| 543 |
+
struct_copy[0][ligand_chain].detach_child((f'H_{ligand_name}', ligand_resi, ' '))
|
| 544 |
+
except KeyError:
|
| 545 |
+
warnings.warn(f"Could not find ligand {(f'H_{ligand_name}', ligand_resi, ' ')} in {pdbfile}")
|
| 546 |
+
continue
|
| 547 |
+
|
| 548 |
+
# Create SDF file
|
| 549 |
+
atom_types = [atom_decoder[np.argmax(i)] for i in ligand_data['lig_one_hot']]
|
| 550 |
+
xyz_file = Path(pdb_sdf_dir, 'tmp.xyz')
|
| 551 |
+
utils.write_xyz_file(ligand_data['lig_coords'], atom_types, xyz_file)
|
| 552 |
+
|
| 553 |
+
obConversion = openbabel.OBConversion()
|
| 554 |
+
obConversion.SetInAndOutFormats("xyz", "sdf")
|
| 555 |
+
mol = openbabel.OBMol()
|
| 556 |
+
obConversion.ReadFile(mol, str(xyz_file))
|
| 557 |
+
xyz_file.unlink()
|
| 558 |
+
|
| 559 |
+
name = f"{p}-{pdbfile.suffix[1:]}_{m[0]}"
|
| 560 |
+
sdf_file = Path(pdb_sdf_dir, f'{name}.sdf')
|
| 561 |
+
obConversion.WriteFile(mol, str(sdf_file))
|
| 562 |
+
|
| 563 |
+
# specify pocket residues
|
| 564 |
+
with open(Path(pdb_sdf_dir, f'{name}.txt'), 'w') as f:
|
| 565 |
+
f.write(' '.join(pocket_data['pocket_ids']))
|
| 566 |
+
|
| 567 |
+
if split in {'val', 'test'} and n_bio_successful > 0:
|
| 568 |
+
# if split in {'val', 'test', 'train'} and n_bio_successful > 0:
|
| 569 |
+
# create receptor PDB file
|
| 570 |
+
pdb_file_out = Path(pdb_sdf_dir, f'{p}-{pdbfile.suffix[1:]}.pdb')
|
| 571 |
+
io = PDBIO()
|
| 572 |
+
io.set_structure(struct_copy)
|
| 573 |
+
io.save(str(pdb_file_out), select=Model0())
|
| 574 |
+
|
| 575 |
+
pbar.update(len(pair_dict[p]))
|
| 576 |
+
num_failed += (len(pair_dict[p]) - len(pdb_successful))
|
| 577 |
+
pbar.set_description(f'#failed: {num_failed}')
|
| 578 |
+
|
| 579 |
+
|
| 580 |
+
lig_coords = np.concatenate(lig_coords, axis=0)
|
| 581 |
+
lig_one_hot = np.concatenate(lig_one_hot, axis=0)
|
| 582 |
+
lig_mask = np.concatenate(lig_mask, axis=0)
|
| 583 |
+
pocket_coords = np.concatenate(pocket_coords, axis=0)
|
| 584 |
+
pocket_one_hot = np.concatenate(pocket_one_hot, axis=0)
|
| 585 |
+
pocket_mask = np.concatenate(pocket_mask, axis=0)
|
| 586 |
+
|
| 587 |
+
np.savez(processed_dir / f'{split}.npz', names=pdb_and_mol_ids,
|
| 588 |
+
receptors=receptors, lig_coords=lig_coords,
|
| 589 |
+
lig_one_hot=lig_one_hot, lig_mask=lig_mask,
|
| 590 |
+
pocket_coords=pocket_coords, pocket_one_hot=pocket_one_hot,
|
| 591 |
+
pocket_mask=pocket_mask)
|
| 592 |
+
|
| 593 |
+
n_samples_after[split] = len(pdb_and_mol_ids)
|
| 594 |
+
print(f"Processing {split} set took {(time() - tic)/60.0:.2f} minutes")
|
| 595 |
+
|
| 596 |
+
# --------------------------------------------------------------------------
|
| 597 |
+
# Compute statistics & additional information
|
| 598 |
+
# --------------------------------------------------------------------------
|
| 599 |
+
with np.load(processed_dir / 'train.npz', allow_pickle=True) as data:
|
| 600 |
+
lig_mask = data['lig_mask']
|
| 601 |
+
pocket_mask = data['pocket_mask']
|
| 602 |
+
lig_coords = data['lig_coords']
|
| 603 |
+
lig_one_hot = data['lig_one_hot']
|
| 604 |
+
pocket_one_hot = data['pocket_one_hot']
|
| 605 |
+
|
| 606 |
+
# Compute SMILES for all training examples
|
| 607 |
+
train_smiles = compute_smiles(lig_coords, lig_one_hot, lig_mask)
|
| 608 |
+
np.save(processed_dir / 'train_smiles.npy', train_smiles)
|
| 609 |
+
|
| 610 |
+
# Joint histogram of number of ligand and pocket nodes
|
| 611 |
+
n_nodes = get_n_nodes(lig_mask, pocket_mask, smooth_sigma=1.0)
|
| 612 |
+
np.save(Path(processed_dir, 'size_distribution.npy'), n_nodes)
|
| 613 |
+
|
| 614 |
+
# Convert bond length dictionaries to arrays for batch processing
|
| 615 |
+
bonds1, bonds2, bonds3 = get_bond_length_arrays(atom_dict)
|
| 616 |
+
|
| 617 |
+
# Get bond length definitions for Lennard-Jones potential
|
| 618 |
+
rm_LJ = get_lennard_jones_rm(atom_dict)
|
| 619 |
+
|
| 620 |
+
# Get histograms of ligand and pocket node types
|
| 621 |
+
atom_hist, aa_hist = get_type_histograms(lig_one_hot, pocket_one_hot,
|
| 622 |
+
atom_dict, amino_acid_dict)
|
| 623 |
+
|
| 624 |
+
# Create summary string
|
| 625 |
+
summary_string = '# SUMMARY\n\n'
|
| 626 |
+
summary_string += '# Before processing\n'
|
| 627 |
+
summary_string += f'num_samples train: {n_train_before}\n'
|
| 628 |
+
summary_string += f'num_samples val: {n_val_before}\n'
|
| 629 |
+
summary_string += f'num_samples test: {n_test_before}\n\n'
|
| 630 |
+
summary_string += '# After processing\n'
|
| 631 |
+
summary_string += f"num_samples train: {n_samples_after['train']}\n"
|
| 632 |
+
summary_string += f"num_samples val: {n_samples_after['val']}\n"
|
| 633 |
+
summary_string += f"num_samples test: {n_samples_after['test']}\n\n"
|
| 634 |
+
summary_string += '# Info\n'
|
| 635 |
+
summary_string += f"'atom_encoder': {atom_dict}\n"
|
| 636 |
+
summary_string += f"'atom_decoder': {list(atom_dict.keys())}\n"
|
| 637 |
+
summary_string += f"'aa_encoder': {amino_acid_dict}\n"
|
| 638 |
+
summary_string += f"'aa_decoder': {list(amino_acid_dict.keys())}\n"
|
| 639 |
+
summary_string += f"'bonds1': {bonds1.tolist()}\n"
|
| 640 |
+
summary_string += f"'bonds2': {bonds2.tolist()}\n"
|
| 641 |
+
summary_string += f"'bonds3': {bonds3.tolist()}\n"
|
| 642 |
+
summary_string += f"'lennard_jones_rm': {rm_LJ.tolist()}\n"
|
| 643 |
+
summary_string += f"'atom_hist': {atom_hist}\n"
|
| 644 |
+
summary_string += f"'aa_hist': {aa_hist}\n"
|
| 645 |
+
summary_string += f"'n_nodes': {n_nodes.tolist()}\n"
|
| 646 |
+
|
| 647 |
+
# Write summary to text file
|
| 648 |
+
with open(processed_dir / 'summary.txt', 'w') as f:
|
| 649 |
+
f.write(summary_string)
|
| 650 |
+
|
| 651 |
+
# Print summary
|
| 652 |
+
print(summary_string)
|
process_crossdock.py
ADDED
|
@@ -0,0 +1,443 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pathlib import Path
|
| 2 |
+
from time import time
|
| 3 |
+
import argparse
|
| 4 |
+
import shutil
|
| 5 |
+
import random
|
| 6 |
+
|
| 7 |
+
import matplotlib.pyplot as plt
|
| 8 |
+
import seaborn as sns
|
| 9 |
+
|
| 10 |
+
from tqdm import tqdm
|
| 11 |
+
import numpy as np
|
| 12 |
+
|
| 13 |
+
from Bio.PDB import PDBParser
|
| 14 |
+
from Bio.PDB.Polypeptide import three_to_one, is_aa
|
| 15 |
+
from rdkit import Chem
|
| 16 |
+
from scipy.ndimage import gaussian_filter
|
| 17 |
+
|
| 18 |
+
import torch
|
| 19 |
+
|
| 20 |
+
from analysis.molecule_builder import build_molecule
|
| 21 |
+
from analysis.metrics import rdmol_to_smiles
|
| 22 |
+
import constants
|
| 23 |
+
from constants import covalent_radii, dataset_params
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def process_ligand_and_pocket(pdbfile, sdffile,
|
| 27 |
+
atom_dict, dist_cutoff, ca_only):
|
| 28 |
+
pdb_struct = PDBParser(QUIET=True).get_structure('', pdbfile)
|
| 29 |
+
|
| 30 |
+
try:
|
| 31 |
+
ligand = Chem.SDMolSupplier(str(sdffile))[0]
|
| 32 |
+
except:
|
| 33 |
+
raise Exception(f'cannot read sdf mol ({sdffile})')
|
| 34 |
+
|
| 35 |
+
# remove H atoms if not in atom_dict, other atom types that aren't allowed
|
| 36 |
+
# should stay so that the entire ligand can be removed from the dataset
|
| 37 |
+
lig_atoms = [a.GetSymbol() for a in ligand.GetAtoms()
|
| 38 |
+
if (a.GetSymbol().capitalize() in atom_dict or a.element != 'H')]
|
| 39 |
+
lig_coords = np.array([list(ligand.GetConformer(0).GetAtomPosition(idx))
|
| 40 |
+
for idx in range(ligand.GetNumAtoms())])
|
| 41 |
+
|
| 42 |
+
try:
|
| 43 |
+
lig_one_hot = np.stack([
|
| 44 |
+
np.eye(1, len(atom_dict), atom_dict[a.capitalize()]).squeeze()
|
| 45 |
+
for a in lig_atoms
|
| 46 |
+
])
|
| 47 |
+
except KeyError as e:
|
| 48 |
+
raise KeyError(
|
| 49 |
+
f'{e} not in atom dict ({sdffile})')
|
| 50 |
+
|
| 51 |
+
# Find interacting pocket residues based on distance cutoff
|
| 52 |
+
pocket_residues = []
|
| 53 |
+
for residue in pdb_struct[0].get_residues():
|
| 54 |
+
res_coords = np.array([a.get_coord() for a in residue.get_atoms()])
|
| 55 |
+
if is_aa(residue.get_resname(), standard=True) and \
|
| 56 |
+
(((res_coords[:, None, :] - lig_coords[None, :, :]) ** 2).sum(
|
| 57 |
+
-1) ** 0.5).min() < dist_cutoff:
|
| 58 |
+
pocket_residues.append(residue)
|
| 59 |
+
|
| 60 |
+
pocket_ids = [f'{res.parent.id}:{res.id[1]}' for res in pocket_residues]
|
| 61 |
+
ligand_data = {
|
| 62 |
+
'lig_coords': lig_coords,
|
| 63 |
+
'lig_one_hot': lig_one_hot,
|
| 64 |
+
}
|
| 65 |
+
if ca_only:
|
| 66 |
+
try:
|
| 67 |
+
pocket_one_hot = []
|
| 68 |
+
full_coords = []
|
| 69 |
+
for res in pocket_residues:
|
| 70 |
+
for atom in res.get_atoms():
|
| 71 |
+
if atom.name == 'CA':
|
| 72 |
+
pocket_one_hot.append(np.eye(1, len(amino_acid_dict),
|
| 73 |
+
amino_acid_dict[three_to_one(res.get_resname())]).squeeze())
|
| 74 |
+
full_coords.append(atom.coord)
|
| 75 |
+
pocket_one_hot = np.stack(pocket_one_hot)
|
| 76 |
+
full_coords = np.stack(full_coords)
|
| 77 |
+
except KeyError as e:
|
| 78 |
+
raise KeyError(
|
| 79 |
+
f'{e} not in amino acid dict ({pdbfile}, {sdffile})')
|
| 80 |
+
pocket_data = {
|
| 81 |
+
'pocket_coords': full_coords,
|
| 82 |
+
'pocket_one_hot': pocket_one_hot,
|
| 83 |
+
'pocket_ids': pocket_ids
|
| 84 |
+
}
|
| 85 |
+
else:
|
| 86 |
+
full_atoms = np.concatenate(
|
| 87 |
+
[np.array([atom.element for atom in res.get_atoms()])
|
| 88 |
+
for res in pocket_residues], axis=0)
|
| 89 |
+
full_coords = np.concatenate(
|
| 90 |
+
[np.array([atom.coord for atom in res.get_atoms()])
|
| 91 |
+
for res in pocket_residues], axis=0)
|
| 92 |
+
try:
|
| 93 |
+
pocket_one_hot = []
|
| 94 |
+
for a in full_atoms:
|
| 95 |
+
if a in amino_acid_dict:
|
| 96 |
+
atom = np.eye(1, len(amino_acid_dict),
|
| 97 |
+
amino_acid_dict[a.capitalize()]).squeeze()
|
| 98 |
+
elif a != 'H':
|
| 99 |
+
atom = np.eye(1, len(amino_acid_dict),
|
| 100 |
+
len(amino_acid_dict)).squeeze()
|
| 101 |
+
pocket_one_hot.append(atom)
|
| 102 |
+
pocket_one_hot = np.stack(pocket_one_hot)
|
| 103 |
+
except KeyError as e:
|
| 104 |
+
raise KeyError(
|
| 105 |
+
f'{e} not in atom dict ({pdbfile})')
|
| 106 |
+
pocket_data = {
|
| 107 |
+
'pocket_coords': full_coords,
|
| 108 |
+
'pocket_one_hot': pocket_one_hot,
|
| 109 |
+
'pocket_ids': pocket_ids
|
| 110 |
+
}
|
| 111 |
+
return ligand_data, pocket_data
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def compute_smiles(positions, one_hot, mask):
|
| 115 |
+
print("Computing SMILES ...")
|
| 116 |
+
|
| 117 |
+
atom_types = np.argmax(one_hot, axis=-1)
|
| 118 |
+
|
| 119 |
+
sections = np.where(np.diff(mask))[0] + 1
|
| 120 |
+
positions = [torch.from_numpy(x) for x in np.split(positions, sections)]
|
| 121 |
+
atom_types = [torch.from_numpy(x) for x in np.split(atom_types, sections)]
|
| 122 |
+
|
| 123 |
+
mols_smiles = []
|
| 124 |
+
|
| 125 |
+
pbar = tqdm(enumerate(zip(positions, atom_types)),
|
| 126 |
+
total=len(np.unique(mask)))
|
| 127 |
+
for i, (pos, atom_type) in pbar:
|
| 128 |
+
mol = build_molecule(pos, atom_type, dataset_info)
|
| 129 |
+
|
| 130 |
+
# BasicMolecularMetrics() computes SMILES after sanitization
|
| 131 |
+
try:
|
| 132 |
+
Chem.SanitizeMol(mol)
|
| 133 |
+
except ValueError:
|
| 134 |
+
continue
|
| 135 |
+
|
| 136 |
+
mol = rdmol_to_smiles(mol)
|
| 137 |
+
if mol is not None:
|
| 138 |
+
mols_smiles.append(mol)
|
| 139 |
+
pbar.set_description(f'{len(mols_smiles)}/{i + 1} successful')
|
| 140 |
+
|
| 141 |
+
return mols_smiles
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def get_n_nodes(lig_mask, pocket_mask, smooth_sigma=None):
|
| 145 |
+
# Joint distribution of ligand's and pocket's number of nodes
|
| 146 |
+
idx_lig, n_nodes_lig = np.unique(lig_mask, return_counts=True)
|
| 147 |
+
idx_pocket, n_nodes_pocket = np.unique(pocket_mask, return_counts=True)
|
| 148 |
+
assert np.all(idx_lig == idx_pocket)
|
| 149 |
+
|
| 150 |
+
joint_histogram = np.zeros((np.max(n_nodes_lig) + 1,
|
| 151 |
+
np.max(n_nodes_pocket) + 1))
|
| 152 |
+
|
| 153 |
+
for nlig, npocket in zip(n_nodes_lig, n_nodes_pocket):
|
| 154 |
+
joint_histogram[nlig, npocket] += 1
|
| 155 |
+
|
| 156 |
+
print(f'Original histogram: {np.count_nonzero(joint_histogram)}/'
|
| 157 |
+
f'{joint_histogram.shape[0] * joint_histogram.shape[1]} bins filled')
|
| 158 |
+
|
| 159 |
+
# Smooth the histogram
|
| 160 |
+
if smooth_sigma is not None:
|
| 161 |
+
filtered_histogram = gaussian_filter(
|
| 162 |
+
joint_histogram, sigma=smooth_sigma, order=0, mode='constant',
|
| 163 |
+
cval=0.0, truncate=4.0)
|
| 164 |
+
|
| 165 |
+
print(f'Smoothed histogram: {np.count_nonzero(filtered_histogram)}/'
|
| 166 |
+
f'{filtered_histogram.shape[0] * filtered_histogram.shape[1]} bins filled')
|
| 167 |
+
|
| 168 |
+
joint_histogram = filtered_histogram
|
| 169 |
+
|
| 170 |
+
return joint_histogram
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
def get_bond_length_arrays(atom_mapping):
|
| 174 |
+
bond_arrays = []
|
| 175 |
+
for i in range(3):
|
| 176 |
+
bond_dict = getattr(constants, f'bonds{i + 1}')
|
| 177 |
+
bond_array = np.zeros((len(atom_mapping), len(atom_mapping)))
|
| 178 |
+
for a1 in atom_mapping.keys():
|
| 179 |
+
for a2 in atom_mapping.keys():
|
| 180 |
+
if a1 in bond_dict and a2 in bond_dict[a1]:
|
| 181 |
+
bond_len = bond_dict[a1][a2]
|
| 182 |
+
else:
|
| 183 |
+
bond_len = 0
|
| 184 |
+
bond_array[atom_mapping[a1], atom_mapping[a2]] = bond_len
|
| 185 |
+
|
| 186 |
+
assert np.all(bond_array == bond_array.T)
|
| 187 |
+
bond_arrays.append(bond_array)
|
| 188 |
+
|
| 189 |
+
return bond_arrays
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
def get_lennard_jones_rm(atom_mapping):
|
| 193 |
+
# Bond radii for the Lennard-Jones potential
|
| 194 |
+
LJ_rm = np.zeros((len(atom_mapping), len(atom_mapping)))
|
| 195 |
+
|
| 196 |
+
for a1 in atom_mapping.keys():
|
| 197 |
+
for a2 in atom_mapping.keys():
|
| 198 |
+
all_bond_lengths = []
|
| 199 |
+
for btype in ['bonds1', 'bonds2', 'bonds3']:
|
| 200 |
+
bond_dict = getattr(constants, btype)
|
| 201 |
+
if a1 in bond_dict and a2 in bond_dict[a1]:
|
| 202 |
+
all_bond_lengths.append(bond_dict[a1][a2])
|
| 203 |
+
|
| 204 |
+
if len(all_bond_lengths) > 0:
|
| 205 |
+
# take the shortest possible bond length because slightly larger
|
| 206 |
+
# values aren't penalized as much
|
| 207 |
+
bond_len = min(all_bond_lengths)
|
| 208 |
+
else:
|
| 209 |
+
if a1 == 'others' or a2 == 'others':
|
| 210 |
+
bond_len = 0
|
| 211 |
+
else:
|
| 212 |
+
# Replace missing values with sum of average covalent radii
|
| 213 |
+
bond_len = covalent_radii[a1] + covalent_radii[a2]
|
| 214 |
+
|
| 215 |
+
LJ_rm[atom_mapping[a1], atom_mapping[a2]] = bond_len
|
| 216 |
+
|
| 217 |
+
assert np.all(LJ_rm == LJ_rm.T)
|
| 218 |
+
return LJ_rm
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
def get_type_histograms(lig_one_hot, pocket_one_hot, atom_encoder, aa_encoder):
|
| 222 |
+
atom_decoder = list(atom_encoder.keys())
|
| 223 |
+
atom_counts = {k: 0 for k in atom_encoder.keys()}
|
| 224 |
+
for a in [atom_decoder[x] for x in lig_one_hot.argmax(1)]:
|
| 225 |
+
atom_counts[a] += 1
|
| 226 |
+
|
| 227 |
+
aa_decoder = list(aa_encoder.keys())
|
| 228 |
+
aa_counts = {k: 0 for k in aa_encoder.keys()}
|
| 229 |
+
for r in [aa_decoder[x] for x in pocket_one_hot.argmax(1)]:
|
| 230 |
+
aa_counts[r] += 1
|
| 231 |
+
|
| 232 |
+
return atom_counts, aa_counts
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
def saveall(filename, pdb_and_mol_ids, lig_coords, lig_one_hot, lig_mask,
|
| 236 |
+
pocket_coords, pocket_one_hot, pocket_mask):
|
| 237 |
+
np.savez(filename,
|
| 238 |
+
names=pdb_and_mol_ids,
|
| 239 |
+
lig_coords=lig_coords,
|
| 240 |
+
lig_one_hot=lig_one_hot,
|
| 241 |
+
lig_mask=lig_mask,
|
| 242 |
+
pocket_coords=pocket_coords,
|
| 243 |
+
pocket_one_hot=pocket_one_hot,
|
| 244 |
+
pocket_mask=pocket_mask
|
| 245 |
+
)
|
| 246 |
+
return True
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
if __name__ == '__main__':
|
| 250 |
+
parser = argparse.ArgumentParser()
|
| 251 |
+
parser.add_argument('basedir', type=Path)
|
| 252 |
+
parser.add_argument('--outdir', type=Path, default=None)
|
| 253 |
+
parser.add_argument('--no_H', action='store_true')
|
| 254 |
+
parser.add_argument('--ca_only', action='store_true')
|
| 255 |
+
parser.add_argument('--dist_cutoff', type=float, default=8.0)
|
| 256 |
+
parser.add_argument('--random_seed', type=int, default=42)
|
| 257 |
+
args = parser.parse_args()
|
| 258 |
+
|
| 259 |
+
datadir = args.basedir / 'crossdocked_pocket10/'
|
| 260 |
+
|
| 261 |
+
if args.ca_only:
|
| 262 |
+
dataset_info = dataset_params['crossdock']
|
| 263 |
+
else:
|
| 264 |
+
dataset_info = dataset_params['crossdock_full']
|
| 265 |
+
amino_acid_dict = dataset_info['aa_encoder']
|
| 266 |
+
atom_dict = dataset_info['atom_encoder']
|
| 267 |
+
atom_decoder = dataset_info['atom_decoder']
|
| 268 |
+
|
| 269 |
+
# Make output directory
|
| 270 |
+
if args.outdir is None:
|
| 271 |
+
suffix = '_crossdock' if 'H' in atom_dict else '_crossdock_noH'
|
| 272 |
+
suffix += '_ca_only_temp' if args.ca_only else '_full_temp'
|
| 273 |
+
processed_dir = Path(args.basedir, f'processed{suffix}')
|
| 274 |
+
else:
|
| 275 |
+
processed_dir = args.outdir
|
| 276 |
+
|
| 277 |
+
processed_dir.mkdir(exist_ok=True, parents=True)
|
| 278 |
+
|
| 279 |
+
# Read data split
|
| 280 |
+
split_path = Path(args.basedir, 'split_by_name.pt')
|
| 281 |
+
data_split = torch.load(split_path)
|
| 282 |
+
|
| 283 |
+
# There is no validation set, copy 300 training examples (the validation set
|
| 284 |
+
# is not very important in this application)
|
| 285 |
+
# Note: before we had a data leak but it should not matter too much as most
|
| 286 |
+
# metrics monitored during training are independent of the pockets
|
| 287 |
+
data_split['val'] = random.sample(data_split['train'], 300)
|
| 288 |
+
|
| 289 |
+
n_train_before = len(data_split['train'])
|
| 290 |
+
n_val_before = len(data_split['val'])
|
| 291 |
+
n_test_before = len(data_split['test'])
|
| 292 |
+
|
| 293 |
+
failed_save = []
|
| 294 |
+
|
| 295 |
+
n_samples_after = {}
|
| 296 |
+
for split in data_split.keys():
|
| 297 |
+
lig_coords = []
|
| 298 |
+
lig_one_hot = []
|
| 299 |
+
lig_mask = []
|
| 300 |
+
pocket_coords = []
|
| 301 |
+
pocket_one_hot = []
|
| 302 |
+
pocket_mask = []
|
| 303 |
+
pdb_and_mol_ids = []
|
| 304 |
+
count_protein = []
|
| 305 |
+
count_ligand = []
|
| 306 |
+
count_total = []
|
| 307 |
+
count = 0
|
| 308 |
+
|
| 309 |
+
pdb_sdf_dir = processed_dir / split
|
| 310 |
+
pdb_sdf_dir.mkdir(exist_ok=True)
|
| 311 |
+
|
| 312 |
+
tic = time()
|
| 313 |
+
num_failed = 0
|
| 314 |
+
pbar = tqdm(data_split[split])
|
| 315 |
+
pbar.set_description(f'#failed: {num_failed}')
|
| 316 |
+
for pocket_fn, ligand_fn in pbar:
|
| 317 |
+
|
| 318 |
+
sdffile = datadir / f'{ligand_fn}'
|
| 319 |
+
pdbfile = datadir / f'{pocket_fn}'
|
| 320 |
+
|
| 321 |
+
try:
|
| 322 |
+
struct_copy = PDBParser(QUIET=True).get_structure('', pdbfile)
|
| 323 |
+
except:
|
| 324 |
+
num_failed += 1
|
| 325 |
+
failed_save.append((pocket_fn, ligand_fn))
|
| 326 |
+
print(failed_save[-1])
|
| 327 |
+
pbar.set_description(f'#failed: {num_failed}')
|
| 328 |
+
continue
|
| 329 |
+
|
| 330 |
+
try:
|
| 331 |
+
ligand_data, pocket_data = process_ligand_and_pocket(
|
| 332 |
+
pdbfile, sdffile,
|
| 333 |
+
atom_dict=atom_dict, dist_cutoff=args.dist_cutoff,
|
| 334 |
+
ca_only=args.ca_only)
|
| 335 |
+
except (KeyError, AssertionError, FileNotFoundError, IndexError,
|
| 336 |
+
ValueError) as e:
|
| 337 |
+
print(type(e).__name__, e, pocket_fn, ligand_fn)
|
| 338 |
+
num_failed += 1
|
| 339 |
+
pbar.set_description(f'#failed: {num_failed}')
|
| 340 |
+
continue
|
| 341 |
+
|
| 342 |
+
pdb_and_mol_ids.append(f"{pocket_fn}_{ligand_fn}")
|
| 343 |
+
lig_coords.append(ligand_data['lig_coords'])
|
| 344 |
+
lig_one_hot.append(ligand_data['lig_one_hot'])
|
| 345 |
+
lig_mask.append(count * np.ones(len(ligand_data['lig_coords'])))
|
| 346 |
+
pocket_coords.append(pocket_data['pocket_coords'])
|
| 347 |
+
pocket_one_hot.append(pocket_data['pocket_one_hot'])
|
| 348 |
+
pocket_mask.append(
|
| 349 |
+
count * np.ones(len(pocket_data['pocket_coords'])))
|
| 350 |
+
count_protein.append(pocket_data['pocket_coords'].shape[0])
|
| 351 |
+
count_ligand.append(ligand_data['lig_coords'].shape[0])
|
| 352 |
+
count_total.append(pocket_data['pocket_coords'].shape[0] +
|
| 353 |
+
ligand_data['lig_coords'].shape[0])
|
| 354 |
+
count += 1
|
| 355 |
+
|
| 356 |
+
if split in {'val', 'test'}:
|
| 357 |
+
# Copy PDB file
|
| 358 |
+
new_rec_name = Path(pdbfile).stem.replace('_', '-')
|
| 359 |
+
pdb_file_out = Path(pdb_sdf_dir, f"{new_rec_name}.pdb")
|
| 360 |
+
shutil.copy(pdbfile, pdb_file_out)
|
| 361 |
+
|
| 362 |
+
# Copy SDF file
|
| 363 |
+
new_lig_name = new_rec_name + '_' + Path(sdffile).stem.replace('_', '-')
|
| 364 |
+
sdf_file_out = Path(pdb_sdf_dir, f'{new_lig_name}.sdf')
|
| 365 |
+
shutil.copy(sdffile, sdf_file_out)
|
| 366 |
+
|
| 367 |
+
# specify pocket residues
|
| 368 |
+
with open(Path(pdb_sdf_dir, f'{new_lig_name}.txt'), 'w') as f:
|
| 369 |
+
f.write(' '.join(pocket_data['pocket_ids']))
|
| 370 |
+
|
| 371 |
+
lig_coords = np.concatenate(lig_coords, axis=0)
|
| 372 |
+
lig_one_hot = np.concatenate(lig_one_hot, axis=0)
|
| 373 |
+
lig_mask = np.concatenate(lig_mask, axis=0)
|
| 374 |
+
pocket_coords = np.concatenate(pocket_coords, axis=0)
|
| 375 |
+
pocket_one_hot = np.concatenate(pocket_one_hot, axis=0)
|
| 376 |
+
pocket_mask = np.concatenate(pocket_mask, axis=0)
|
| 377 |
+
|
| 378 |
+
saveall(processed_dir / f'{split}.npz', pdb_and_mol_ids, lig_coords,
|
| 379 |
+
lig_one_hot, lig_mask, pocket_coords,
|
| 380 |
+
pocket_one_hot, pocket_mask)
|
| 381 |
+
|
| 382 |
+
n_samples_after[split] = len(pdb_and_mol_ids)
|
| 383 |
+
print(f"Processing {split} set took {(time() - tic) / 60.0:.2f} minutes")
|
| 384 |
+
|
| 385 |
+
# --------------------------------------------------------------------------
|
| 386 |
+
# Compute statistics & additional information
|
| 387 |
+
# --------------------------------------------------------------------------
|
| 388 |
+
with np.load(processed_dir / 'train.npz', allow_pickle=True) as data:
|
| 389 |
+
lig_mask = data['lig_mask']
|
| 390 |
+
pocket_mask = data['pocket_mask']
|
| 391 |
+
lig_coords = data['lig_coords']
|
| 392 |
+
lig_one_hot = data['lig_one_hot']
|
| 393 |
+
pocket_one_hot = data['pocket_one_hot']
|
| 394 |
+
|
| 395 |
+
# Compute SMILES for all training examples
|
| 396 |
+
train_smiles = compute_smiles(lig_coords, lig_one_hot, lig_mask)
|
| 397 |
+
np.save(processed_dir / 'train_smiles.npy', train_smiles)
|
| 398 |
+
|
| 399 |
+
# Joint histogram of number of ligand and pocket nodes
|
| 400 |
+
n_nodes = get_n_nodes(lig_mask, pocket_mask, smooth_sigma=1.0)
|
| 401 |
+
np.save(Path(processed_dir, 'size_distribution.npy'), n_nodes)
|
| 402 |
+
|
| 403 |
+
# Convert bond length dictionaries to arrays for batch processing
|
| 404 |
+
bonds1, bonds2, bonds3 = get_bond_length_arrays(atom_dict)
|
| 405 |
+
|
| 406 |
+
# Get bond length definitions for Lennard-Jones potential
|
| 407 |
+
rm_LJ = get_lennard_jones_rm(atom_dict)
|
| 408 |
+
|
| 409 |
+
# Get histograms of ligand and pocket node types
|
| 410 |
+
atom_hist, aa_hist = get_type_histograms(lig_one_hot, pocket_one_hot,
|
| 411 |
+
atom_dict, amino_acid_dict)
|
| 412 |
+
|
| 413 |
+
# Create summary string
|
| 414 |
+
summary_string = '# SUMMARY\n\n'
|
| 415 |
+
summary_string += '# Before processing\n'
|
| 416 |
+
summary_string += f'num_samples train: {n_train_before}\n'
|
| 417 |
+
summary_string += f'num_samples val: {n_val_before}\n'
|
| 418 |
+
summary_string += f'num_samples test: {n_test_before}\n\n'
|
| 419 |
+
summary_string += '# After processing\n'
|
| 420 |
+
summary_string += f"num_samples train: {n_samples_after['train']}\n"
|
| 421 |
+
summary_string += f"num_samples val: {n_samples_after['val']}\n"
|
| 422 |
+
summary_string += f"num_samples test: {n_samples_after['test']}\n\n"
|
| 423 |
+
summary_string += '# Info\n'
|
| 424 |
+
summary_string += f"'atom_encoder': {atom_dict}\n"
|
| 425 |
+
summary_string += f"'atom_decoder': {list(atom_dict.keys())}\n"
|
| 426 |
+
summary_string += f"'aa_encoder': {amino_acid_dict}\n"
|
| 427 |
+
summary_string += f"'aa_decoder': {list(amino_acid_dict.keys())}\n"
|
| 428 |
+
summary_string += f"'bonds1': {bonds1.tolist()}\n"
|
| 429 |
+
summary_string += f"'bonds2': {bonds2.tolist()}\n"
|
| 430 |
+
summary_string += f"'bonds3': {bonds3.tolist()}\n"
|
| 431 |
+
summary_string += f"'lennard_jones_rm': {rm_LJ.tolist()}\n"
|
| 432 |
+
summary_string += f"'atom_hist': {atom_hist}\n"
|
| 433 |
+
summary_string += f"'aa_hist': {aa_hist}\n"
|
| 434 |
+
summary_string += f"'n_nodes': {n_nodes.tolist()}\n"
|
| 435 |
+
|
| 436 |
+
# Write summary to text file
|
| 437 |
+
with open(processed_dir / 'summary.txt', 'w') as f:
|
| 438 |
+
f.write(summary_string)
|
| 439 |
+
|
| 440 |
+
# Print summary
|
| 441 |
+
print(summary_string)
|
| 442 |
+
|
| 443 |
+
print(failed_save)
|
test.py
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import argparse
|
| 2 |
+
import warnings
|
| 3 |
+
from pathlib import Path
|
| 4 |
+
from time import time
|
| 5 |
+
|
| 6 |
+
import torch
|
| 7 |
+
from rdkit import Chem
|
| 8 |
+
from tqdm import tqdm
|
| 9 |
+
|
| 10 |
+
from lightning_modules import LigandPocketDDPM
|
| 11 |
+
from analysis.molecule_builder import process_molecule
|
| 12 |
+
import utils
|
| 13 |
+
|
| 14 |
+
MAXITER = 10
|
| 15 |
+
MAXNTRIES = 10
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
if __name__ == "__main__":
|
| 19 |
+
parser = argparse.ArgumentParser()
|
| 20 |
+
parser.add_argument('checkpoint', type=Path)
|
| 21 |
+
parser.add_argument('--test_dir', type=Path)
|
| 22 |
+
parser.add_argument('--test_list', type=Path, default=None)
|
| 23 |
+
parser.add_argument('--outdir', type=Path)
|
| 24 |
+
parser.add_argument('--n_samples', type=int, default=100)
|
| 25 |
+
parser.add_argument('--all_frags', action='store_true')
|
| 26 |
+
parser.add_argument('--sanitize', action='store_true')
|
| 27 |
+
parser.add_argument('--relax', action='store_true')
|
| 28 |
+
parser.add_argument('--batch_size', type=int, default=120)
|
| 29 |
+
parser.add_argument('--resamplings', type=int, default=10)
|
| 30 |
+
parser.add_argument('--jump_length', type=int, default=1)
|
| 31 |
+
parser.add_argument('--timesteps', type=int, default=None)
|
| 32 |
+
parser.add_argument('--fix_n_nodes', action='store_true')
|
| 33 |
+
parser.add_argument('--n_nodes_bias', type=int, default=0)
|
| 34 |
+
parser.add_argument('--n_nodes_min', type=int, default=0)
|
| 35 |
+
parser.add_argument('--skip_existing', action='store_true')
|
| 36 |
+
args = parser.parse_args()
|
| 37 |
+
|
| 38 |
+
device = 'cuda' if torch.cuda.is_available() else 'cpu'
|
| 39 |
+
|
| 40 |
+
args.outdir.mkdir(exist_ok=args.skip_existing)
|
| 41 |
+
raw_sdf_dir = Path(args.outdir, 'raw')
|
| 42 |
+
raw_sdf_dir.mkdir(exist_ok=args.skip_existing)
|
| 43 |
+
processed_sdf_dir = Path(args.outdir, 'processed')
|
| 44 |
+
processed_sdf_dir.mkdir(exist_ok=args.skip_existing)
|
| 45 |
+
times_dir = Path(args.outdir, 'pocket_times')
|
| 46 |
+
times_dir.mkdir(exist_ok=args.skip_existing)
|
| 47 |
+
|
| 48 |
+
# Load model
|
| 49 |
+
model = LigandPocketDDPM.load_from_checkpoint(
|
| 50 |
+
args.checkpoint, map_location=device)
|
| 51 |
+
model = model.to(device)
|
| 52 |
+
|
| 53 |
+
test_files = list(args.test_dir.glob('[!.]*.sdf'))
|
| 54 |
+
if args.test_list is not None:
|
| 55 |
+
with open(args.test_list, 'r') as f:
|
| 56 |
+
test_list = set(f.read().split(','))
|
| 57 |
+
test_files = [x for x in test_files if x.stem in test_list]
|
| 58 |
+
|
| 59 |
+
pbar = tqdm(test_files)
|
| 60 |
+
time_per_pocket = {}
|
| 61 |
+
for sdf_file in pbar:
|
| 62 |
+
ligand_name = sdf_file.stem
|
| 63 |
+
|
| 64 |
+
pdb_name, pocket_id, *suffix = ligand_name.split('_')
|
| 65 |
+
pdb_file = Path(sdf_file.parent, f"{pdb_name}.pdb")
|
| 66 |
+
txt_file = Path(sdf_file.parent, f"{ligand_name}.txt")
|
| 67 |
+
sdf_out_file_raw = Path(raw_sdf_dir, f'{ligand_name}_gen.sdf')
|
| 68 |
+
sdf_out_file_processed = Path(processed_sdf_dir,
|
| 69 |
+
f'{ligand_name}_gen.sdf')
|
| 70 |
+
time_file = Path(times_dir, f'{ligand_name}.txt')
|
| 71 |
+
|
| 72 |
+
if args.skip_existing and time_file.exists() \
|
| 73 |
+
and sdf_out_file_processed.exists() \
|
| 74 |
+
and sdf_out_file_raw.exists():
|
| 75 |
+
|
| 76 |
+
with open(time_file, 'r') as f:
|
| 77 |
+
time_per_pocket[str(sdf_file)] = float(f.read().split()[1])
|
| 78 |
+
|
| 79 |
+
continue
|
| 80 |
+
|
| 81 |
+
for n_try in range(MAXNTRIES):
|
| 82 |
+
|
| 83 |
+
try:
|
| 84 |
+
t_pocket_start = time()
|
| 85 |
+
|
| 86 |
+
with open(txt_file, 'r') as f:
|
| 87 |
+
resi_list = f.read().split()
|
| 88 |
+
|
| 89 |
+
if args.fix_n_nodes:
|
| 90 |
+
# some ligands (e.g. 6JWS_bio1_PT1:A:801) could not be read with sanitize=True
|
| 91 |
+
suppl = Chem.SDMolSupplier(str(sdf_file), sanitize=False)
|
| 92 |
+
num_nodes_lig = suppl[0].GetNumAtoms()
|
| 93 |
+
else:
|
| 94 |
+
num_nodes_lig = None
|
| 95 |
+
|
| 96 |
+
all_molecules = []
|
| 97 |
+
valid_molecules = []
|
| 98 |
+
processed_molecules = [] # only used as temporary variable
|
| 99 |
+
iter = 0
|
| 100 |
+
n_generated = 0
|
| 101 |
+
n_valid = 0
|
| 102 |
+
while len(valid_molecules) < args.n_samples:
|
| 103 |
+
iter += 1
|
| 104 |
+
if iter > MAXITER:
|
| 105 |
+
raise RuntimeError('Maximum number of iterations has been exceeded.')
|
| 106 |
+
|
| 107 |
+
num_nodes_lig_inflated = None if num_nodes_lig is None else \
|
| 108 |
+
torch.ones(args.batch_size, dtype=int) * num_nodes_lig
|
| 109 |
+
|
| 110 |
+
# Turn all filters off first
|
| 111 |
+
mols_batch = model.generate_ligands(
|
| 112 |
+
pdb_file, args.batch_size, resi_list,
|
| 113 |
+
num_nodes_lig=num_nodes_lig_inflated,
|
| 114 |
+
timesteps=args.timesteps, sanitize=False,
|
| 115 |
+
largest_frag=False, relax_iter=0,
|
| 116 |
+
n_nodes_bias=args.n_nodes_bias,
|
| 117 |
+
n_nodes_min=args.n_nodes_min,
|
| 118 |
+
resamplings=args.resamplings,
|
| 119 |
+
jump_length=args.jump_length)
|
| 120 |
+
|
| 121 |
+
all_molecules.extend(mols_batch)
|
| 122 |
+
|
| 123 |
+
# Filter to find valid molecules
|
| 124 |
+
mols_batch_processed = [
|
| 125 |
+
process_molecule(m, sanitize=args.sanitize,
|
| 126 |
+
relax_iter=(200 if args.relax else 0),
|
| 127 |
+
largest_frag=not args.all_frags)
|
| 128 |
+
for m in mols_batch
|
| 129 |
+
]
|
| 130 |
+
processed_molecules.extend(mols_batch_processed)
|
| 131 |
+
valid_mols_batch = [m for m in mols_batch_processed if m is not None]
|
| 132 |
+
|
| 133 |
+
n_generated += args.batch_size
|
| 134 |
+
n_valid += len(valid_mols_batch)
|
| 135 |
+
valid_molecules.extend(valid_mols_batch)
|
| 136 |
+
|
| 137 |
+
# Remove excess molecules from list
|
| 138 |
+
valid_molecules = valid_molecules[:args.n_samples]
|
| 139 |
+
|
| 140 |
+
# Reorder raw files
|
| 141 |
+
all_molecules = \
|
| 142 |
+
[all_molecules[i] for i, m in enumerate(processed_molecules)
|
| 143 |
+
if m is not None] + \
|
| 144 |
+
[all_molecules[i] for i, m in enumerate(processed_molecules)
|
| 145 |
+
if m is None]
|
| 146 |
+
|
| 147 |
+
# Write SDF files
|
| 148 |
+
utils.write_sdf_file(sdf_out_file_raw, all_molecules)
|
| 149 |
+
utils.write_sdf_file(sdf_out_file_processed, valid_molecules)
|
| 150 |
+
|
| 151 |
+
# Time the sampling process
|
| 152 |
+
time_per_pocket[str(sdf_file)] = time() - t_pocket_start
|
| 153 |
+
with open(time_file, 'w') as f:
|
| 154 |
+
f.write(f"{str(sdf_file)} {time_per_pocket[str(sdf_file)]}")
|
| 155 |
+
|
| 156 |
+
pbar.set_description(
|
| 157 |
+
f'Last processed: {ligand_name}. '
|
| 158 |
+
f'Validity: {n_valid / n_generated * 100:.2f}%. '
|
| 159 |
+
f'{(time() - t_pocket_start) / len(valid_molecules):.2f} '
|
| 160 |
+
f'sec/mol.')
|
| 161 |
+
|
| 162 |
+
break # no more tries needed
|
| 163 |
+
|
| 164 |
+
except (RuntimeError, ValueError) as e:
|
| 165 |
+
if n_try >= MAXNTRIES - 1:
|
| 166 |
+
raise RuntimeError("Maximum number of retries exceeded")
|
| 167 |
+
warnings.warn(f"Attempt {n_try + 1}/{MAXNTRIES} failed with "
|
| 168 |
+
f"error: '{e}'. Trying again...")
|
| 169 |
+
|
| 170 |
+
with open(Path(args.outdir, 'pocket_times.txt'), 'w') as f:
|
| 171 |
+
for k, v in time_per_pocket.items():
|
| 172 |
+
f.write(f"{k} {v}\n")
|
| 173 |
+
|
| 174 |
+
times_arr = torch.tensor([x for x in time_per_pocket.values()])
|
| 175 |
+
print(f"Time per pocket: {times_arr.mean():.3f} \pm "
|
| 176 |
+
f"{times_arr.std(unbiased=False):.2f}")
|
train.py
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import argparse
|
| 2 |
+
from argparse import Namespace
|
| 3 |
+
from pathlib import Path
|
| 4 |
+
import warnings
|
| 5 |
+
|
| 6 |
+
import torch
|
| 7 |
+
import pytorch_lightning as pl
|
| 8 |
+
import yaml
|
| 9 |
+
import numpy as np
|
| 10 |
+
|
| 11 |
+
from lightning_modules import LigandPocketDDPM
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def merge_args_and_yaml(args, config_dict):
|
| 15 |
+
arg_dict = args.__dict__
|
| 16 |
+
for key, value in config_dict.items():
|
| 17 |
+
if key in arg_dict:
|
| 18 |
+
warnings.warn(f"Command line argument '{key}' (value: "
|
| 19 |
+
f"{arg_dict[key]}) will be overwritten with value "
|
| 20 |
+
f"{value} provided in the config file.")
|
| 21 |
+
if isinstance(value, dict):
|
| 22 |
+
arg_dict[key] = Namespace(**value)
|
| 23 |
+
else:
|
| 24 |
+
arg_dict[key] = value
|
| 25 |
+
|
| 26 |
+
return args
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def merge_configs(config, resume_config):
|
| 30 |
+
for key, value in resume_config.items():
|
| 31 |
+
if isinstance(value, Namespace):
|
| 32 |
+
value = value.__dict__
|
| 33 |
+
if key in config and config[key] != value:
|
| 34 |
+
warnings.warn(f"Config parameter '{key}' (value: "
|
| 35 |
+
f"{config[key]}) will be overwritten with value "
|
| 36 |
+
f"{value} from the checkpoint.")
|
| 37 |
+
config[key] = value
|
| 38 |
+
return config
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
# ------------------------------------------------------------------------------
|
| 42 |
+
# Training
|
| 43 |
+
# ______________________________________________________________________________
|
| 44 |
+
if __name__ == "__main__":
|
| 45 |
+
p = argparse.ArgumentParser()
|
| 46 |
+
p.add_argument('--config', type=str, required=True)
|
| 47 |
+
p.add_argument('--resume', type=str, default=None)
|
| 48 |
+
args = p.parse_args()
|
| 49 |
+
|
| 50 |
+
with open(args.config, 'r') as f:
|
| 51 |
+
config = yaml.safe_load(f)
|
| 52 |
+
|
| 53 |
+
assert 'resume' not in config
|
| 54 |
+
|
| 55 |
+
# Get main config
|
| 56 |
+
ckpt_path = None if args.resume is None else Path(args.resume)
|
| 57 |
+
if args.resume is not None:
|
| 58 |
+
resume_config = torch.load(
|
| 59 |
+
ckpt_path, map_location=torch.device('cpu'))['hyper_parameters']
|
| 60 |
+
|
| 61 |
+
config = merge_configs(config, resume_config)
|
| 62 |
+
|
| 63 |
+
args = merge_args_and_yaml(args, config)
|
| 64 |
+
|
| 65 |
+
out_dir = Path(args.logdir, args.run_name)
|
| 66 |
+
histogram_file = Path(args.datadir, 'size_distribution.npy')
|
| 67 |
+
histogram = np.load(histogram_file).tolist()
|
| 68 |
+
pl_module = LigandPocketDDPM(
|
| 69 |
+
outdir=out_dir,
|
| 70 |
+
dataset=args.dataset,
|
| 71 |
+
datadir=args.datadir,
|
| 72 |
+
batch_size=args.batch_size,
|
| 73 |
+
lr=args.lr,
|
| 74 |
+
egnn_params=args.egnn_params,
|
| 75 |
+
diffusion_params=args.diffusion_params,
|
| 76 |
+
num_workers=args.num_workers,
|
| 77 |
+
augment_noise=args.augment_noise,
|
| 78 |
+
augment_rotation=args.augment_rotation,
|
| 79 |
+
clip_grad=args.clip_grad,
|
| 80 |
+
eval_epochs=args.eval_epochs,
|
| 81 |
+
eval_params=args.eval_params,
|
| 82 |
+
visualize_sample_epoch=args.visualize_sample_epoch,
|
| 83 |
+
visualize_chain_epoch=args.visualize_chain_epoch,
|
| 84 |
+
auxiliary_loss=args.auxiliary_loss,
|
| 85 |
+
loss_params=args.loss_params,
|
| 86 |
+
mode=args.mode,
|
| 87 |
+
node_histogram=histogram,
|
| 88 |
+
pocket_representation=args.pocket_representation,
|
| 89 |
+
virtual_nodes=args.virtual_nodes
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
logger = pl.loggers.WandbLogger(
|
| 93 |
+
save_dir=args.logdir,
|
| 94 |
+
project='ligand-pocket-ddpm',
|
| 95 |
+
group=args.wandb_params.group,
|
| 96 |
+
name=args.run_name,
|
| 97 |
+
id=args.run_name,
|
| 98 |
+
resume='must' if args.resume is not None else False,
|
| 99 |
+
entity=args.wandb_params.entity,
|
| 100 |
+
mode=args.wandb_params.mode,
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
checkpoint_callback = pl.callbacks.ModelCheckpoint(
|
| 104 |
+
dirpath=Path(out_dir, 'checkpoints'),
|
| 105 |
+
filename="best-model-epoch={epoch:02d}",
|
| 106 |
+
monitor="loss/val",
|
| 107 |
+
save_top_k=1,
|
| 108 |
+
save_last=True,
|
| 109 |
+
mode="min",
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
trainer = pl.Trainer(
|
| 113 |
+
max_epochs=args.n_epochs,
|
| 114 |
+
logger=logger,
|
| 115 |
+
callbacks=[checkpoint_callback],
|
| 116 |
+
enable_progress_bar=args.enable_progress_bar,
|
| 117 |
+
num_sanity_val_steps=args.num_sanity_val_steps,
|
| 118 |
+
accelerator='gpu', devices=args.gpus,
|
| 119 |
+
strategy=('ddp' if args.gpus > 1 else None)
|
| 120 |
+
)
|
| 121 |
+
|
| 122 |
+
trainer.fit(model=pl_module, ckpt_path=ckpt_path)
|
utils.py
ADDED
|
@@ -0,0 +1,234 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Union, Iterable
|
| 2 |
+
|
| 3 |
+
import numpy as np
|
| 4 |
+
import torch
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
from rdkit import Chem
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.algorithms import isomorphism
|
| 9 |
+
from Bio.PDB.Polypeptide import is_aa
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class Queue():
|
| 13 |
+
def __init__(self, max_len=50):
|
| 14 |
+
self.items = []
|
| 15 |
+
self.max_len = max_len
|
| 16 |
+
|
| 17 |
+
def __len__(self):
|
| 18 |
+
return len(self.items)
|
| 19 |
+
|
| 20 |
+
def add(self, item):
|
| 21 |
+
self.items.insert(0, item)
|
| 22 |
+
if len(self) > self.max_len:
|
| 23 |
+
self.items.pop()
|
| 24 |
+
|
| 25 |
+
def mean(self):
|
| 26 |
+
return np.mean(self.items)
|
| 27 |
+
|
| 28 |
+
def std(self):
|
| 29 |
+
return np.std(self.items)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def reverse_tensor(x):
|
| 33 |
+
return x[torch.arange(x.size(0) - 1, -1, -1)]
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
#####
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def get_grad_norm(
|
| 40 |
+
parameters: Union[torch.Tensor, Iterable[torch.Tensor]],
|
| 41 |
+
norm_type: float = 2.0) -> torch.Tensor:
|
| 42 |
+
"""
|
| 43 |
+
Adapted from: https://pytorch.org/docs/stable/_modules/torch/nn/utils/clip_grad.html#clip_grad_norm_
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
if isinstance(parameters, torch.Tensor):
|
| 47 |
+
parameters = [parameters]
|
| 48 |
+
parameters = [p for p in parameters if p.grad is not None]
|
| 49 |
+
|
| 50 |
+
norm_type = float(norm_type)
|
| 51 |
+
|
| 52 |
+
if len(parameters) == 0:
|
| 53 |
+
return torch.tensor(0.)
|
| 54 |
+
|
| 55 |
+
device = parameters[0].grad.device
|
| 56 |
+
|
| 57 |
+
total_norm = torch.norm(torch.stack(
|
| 58 |
+
[torch.norm(p.grad.detach(), norm_type).to(device) for p in
|
| 59 |
+
parameters]), norm_type)
|
| 60 |
+
|
| 61 |
+
return total_norm
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def write_xyz_file(coords, atom_types, filename):
|
| 65 |
+
out = f"{len(coords)}\n\n"
|
| 66 |
+
assert len(coords) == len(atom_types)
|
| 67 |
+
for i in range(len(coords)):
|
| 68 |
+
out += f"{atom_types[i]} {coords[i, 0]:.3f} {coords[i, 1]:.3f} {coords[i, 2]:.3f}\n"
|
| 69 |
+
with open(filename, 'w') as f:
|
| 70 |
+
f.write(out)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def write_sdf_file(sdf_path, molecules):
|
| 74 |
+
# NOTE Changed to be compatitble with more versions of rdkit
|
| 75 |
+
#with Chem.SDWriter(str(sdf_path)) as w:
|
| 76 |
+
# for mol in molecules:
|
| 77 |
+
# w.write(mol)
|
| 78 |
+
|
| 79 |
+
w = Chem.SDWriter(str(sdf_path))
|
| 80 |
+
w.SetKekulize(False)
|
| 81 |
+
for m in molecules:
|
| 82 |
+
if m is not None:
|
| 83 |
+
w.write(m)
|
| 84 |
+
|
| 85 |
+
# print(f'Wrote SDF file to {sdf_path}')
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def residues_to_atoms(x_ca, atom_encoder):
|
| 89 |
+
x = x_ca
|
| 90 |
+
one_hot = F.one_hot(
|
| 91 |
+
torch.tensor(atom_encoder['C'], device=x_ca.device),
|
| 92 |
+
num_classes=len(atom_encoder)
|
| 93 |
+
).repeat(*x_ca.shape[:-1], 1)
|
| 94 |
+
return x, one_hot
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def get_residue_with_resi(pdb_chain, resi):
|
| 98 |
+
res = [x for x in pdb_chain.get_residues() if x.id[1] == resi]
|
| 99 |
+
assert len(res) == 1
|
| 100 |
+
return res[0]
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def get_pocket_from_ligand(pdb_model, ligand, dist_cutoff=8.0):
|
| 104 |
+
|
| 105 |
+
if ligand.endswith(".sdf"):
|
| 106 |
+
# ligand as sdf file
|
| 107 |
+
rdmol = Chem.SDMolSupplier(str(ligand))[0]
|
| 108 |
+
ligand_coords = torch.from_numpy(rdmol.GetConformer().GetPositions()).float()
|
| 109 |
+
resi = None
|
| 110 |
+
else:
|
| 111 |
+
# ligand contained in PDB; given in <chain>:<resi> format
|
| 112 |
+
chain, resi = ligand.split(':')
|
| 113 |
+
ligand = get_residue_with_resi(pdb_model[chain], int(resi))
|
| 114 |
+
ligand_coords = torch.from_numpy(
|
| 115 |
+
np.array([a.get_coord() for a in ligand.get_atoms()]))
|
| 116 |
+
|
| 117 |
+
pocket_residues = []
|
| 118 |
+
for residue in pdb_model.get_residues():
|
| 119 |
+
if residue.id[1] == resi:
|
| 120 |
+
continue # skip ligand itself
|
| 121 |
+
|
| 122 |
+
res_coords = torch.from_numpy(
|
| 123 |
+
np.array([a.get_coord() for a in residue.get_atoms()]))
|
| 124 |
+
if is_aa(residue.get_resname(), standard=True) \
|
| 125 |
+
and torch.cdist(res_coords, ligand_coords).min() < dist_cutoff:
|
| 126 |
+
pocket_residues.append(residue)
|
| 127 |
+
|
| 128 |
+
return pocket_residues
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def batch_to_list(data, batch_mask):
|
| 132 |
+
# data_list = []
|
| 133 |
+
# for i in torch.unique(batch_mask):
|
| 134 |
+
# data_list.append(data[batch_mask == i])
|
| 135 |
+
# return data_list
|
| 136 |
+
|
| 137 |
+
# make sure batch_mask is increasing
|
| 138 |
+
idx = torch.argsort(batch_mask)
|
| 139 |
+
batch_mask = batch_mask[idx]
|
| 140 |
+
data = data[idx]
|
| 141 |
+
|
| 142 |
+
chunk_sizes = torch.unique(batch_mask, return_counts=True)[1].tolist()
|
| 143 |
+
return torch.split(data, chunk_sizes)
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def num_nodes_to_batch_mask(n_samples, num_nodes, device):
|
| 147 |
+
assert isinstance(num_nodes, int) or len(num_nodes) == n_samples
|
| 148 |
+
|
| 149 |
+
if isinstance(num_nodes, torch.Tensor):
|
| 150 |
+
num_nodes = num_nodes.to(device)
|
| 151 |
+
|
| 152 |
+
sample_inds = torch.arange(n_samples, device=device)
|
| 153 |
+
|
| 154 |
+
return torch.repeat_interleave(sample_inds, num_nodes)
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def rdmol_to_nxgraph(rdmol):
|
| 158 |
+
graph = nx.Graph()
|
| 159 |
+
for atom in rdmol.GetAtoms():
|
| 160 |
+
# Add the atoms as nodes
|
| 161 |
+
graph.add_node(atom.GetIdx(), atom_type=atom.GetAtomicNum())
|
| 162 |
+
|
| 163 |
+
# Add the bonds as edges
|
| 164 |
+
for bond in rdmol.GetBonds():
|
| 165 |
+
graph.add_edge(bond.GetBeginAtomIdx(), bond.GetEndAtomIdx())
|
| 166 |
+
|
| 167 |
+
return graph
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
def calc_rmsd(mol_a, mol_b):
|
| 171 |
+
""" Calculate RMSD of two molecules with unknown atom correspondence. """
|
| 172 |
+
graph_a = rdmol_to_nxgraph(mol_a)
|
| 173 |
+
graph_b = rdmol_to_nxgraph(mol_b)
|
| 174 |
+
|
| 175 |
+
gm = isomorphism.GraphMatcher(
|
| 176 |
+
graph_a, graph_b,
|
| 177 |
+
node_match=lambda na, nb: na['atom_type'] == nb['atom_type'])
|
| 178 |
+
|
| 179 |
+
isomorphisms = list(gm.isomorphisms_iter())
|
| 180 |
+
if len(isomorphisms) < 1:
|
| 181 |
+
return None
|
| 182 |
+
|
| 183 |
+
all_rmsds = []
|
| 184 |
+
for mapping in isomorphisms:
|
| 185 |
+
atom_types_a = [atom.GetAtomicNum() for atom in mol_a.GetAtoms()]
|
| 186 |
+
atom_types_b = [mol_b.GetAtomWithIdx(mapping[i]).GetAtomicNum()
|
| 187 |
+
for i in range(mol_b.GetNumAtoms())]
|
| 188 |
+
assert atom_types_a == atom_types_b
|
| 189 |
+
|
| 190 |
+
conf_a = mol_a.GetConformer()
|
| 191 |
+
coords_a = np.array([conf_a.GetAtomPosition(i)
|
| 192 |
+
for i in range(mol_a.GetNumAtoms())])
|
| 193 |
+
conf_b = mol_b.GetConformer()
|
| 194 |
+
coords_b = np.array([conf_b.GetAtomPosition(mapping[i])
|
| 195 |
+
for i in range(mol_b.GetNumAtoms())])
|
| 196 |
+
|
| 197 |
+
diff = coords_a - coords_b
|
| 198 |
+
rmsd = np.sqrt(np.mean(np.sum(diff * diff, axis=1)))
|
| 199 |
+
all_rmsds.append(rmsd)
|
| 200 |
+
|
| 201 |
+
if len(isomorphisms) > 1:
|
| 202 |
+
print("More than one isomorphism found. Returning minimum RMSD.")
|
| 203 |
+
|
| 204 |
+
return min(all_rmsds)
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
class AppendVirtualNodes:
|
| 208 |
+
def __init__(self, max_ligand_size, atom_encoder, symbol):
|
| 209 |
+
self.max_ligand_size = max_ligand_size
|
| 210 |
+
self.atom_encoder = atom_encoder
|
| 211 |
+
self.vidx = atom_encoder[symbol]
|
| 212 |
+
|
| 213 |
+
def __call__(self, data):
|
| 214 |
+
|
| 215 |
+
n_virt = self.max_ligand_size - data['num_lig_atoms']
|
| 216 |
+
mu = data['lig_coords'].mean(0, keepdim=True)
|
| 217 |
+
sigma = data['lig_coords'].std(0).max()
|
| 218 |
+
virt_coords = torch.randn(n_virt, 3) * sigma + mu
|
| 219 |
+
|
| 220 |
+
# insert virtual atom column
|
| 221 |
+
one_hot = torch.cat((data['lig_one_hot'][:, :self.vidx],
|
| 222 |
+
torch.zeros(data['num_lig_atoms'])[:, None],
|
| 223 |
+
data['lig_one_hot'][:, self.vidx:]), dim=1)
|
| 224 |
+
virt_one_hot = torch.zeros(n_virt, len(self.atom_encoder))
|
| 225 |
+
virt_one_hot[:, self.vidx] = 1
|
| 226 |
+
virt_mask = torch.ones(n_virt) * data['lig_mask'][0]
|
| 227 |
+
|
| 228 |
+
data['lig_coords'] = torch.cat((data['lig_coords'], virt_coords))
|
| 229 |
+
data['lig_one_hot'] = torch.cat((one_hot, virt_one_hot))
|
| 230 |
+
data['num_lig_atoms'] = self.max_ligand_size
|
| 231 |
+
data['lig_mask'] = torch.cat((data['lig_mask'], virt_mask))
|
| 232 |
+
data['num_virtual_atoms'] = n_virt
|
| 233 |
+
|
| 234 |
+
return data
|