From 83009bc354890358ae0a6ca5b8752e64789317b2 Mon Sep 17 00:00:00 2001
From: Jurica Seva <seva@informatik.hu-berlin.de>
Date: Thu, 21 Jun 2018 09:35:27 +0200
Subject: [PATCH] My changes

---
 code_jurica/_layers.py                        |  350 +++-
 code_jurica/attentionDecoder.py               |  361 ++++
 code_jurica/loader.py                         |    6 +-
 .../logs/classification_report_test.csv       | 1556 +++++++----------
 code_jurica/seq2seq_attention.py              |   30 +-
 code_jurica/train.sh                          |    2 +-
 6 files changed, 1379 insertions(+), 926 deletions(-)
 create mode 100644 code_jurica/attentionDecoder.py

diff --git a/code_jurica/_layers.py b/code_jurica/_layers.py
index e5befe1..874a99f 100644
--- a/code_jurica/_layers.py
+++ b/code_jurica/_layers.py
@@ -10,7 +10,7 @@ Author: Xifeng Guo, E-mail: `guoxifeng1990@163.com`, Github: `https://github.com
 import tensorflow as tf
 from keras import backend as K, initializers, regularizers, constraints, layers
 from keras.engine.topology import Layer
-from keras.layers import InputSpec
+from keras.layers import InputSpec, Recurrent
 
 # https://gist.github.com/cbaziotis/7ef97ccf71cbc14366835198c09809d2
 # https://github.com/cbaziotis/hierarchical-rnn-biocreative-4/blob/master/models/nn_models.py
@@ -378,4 +378,350 @@ class AttentionWithContext(Layer):
 
 
     def compute_output_shape(self, input_shape):
-        return (input_shape[0], input_shape[-1],)
\ No newline at end of file
+        return (input_shape[0], input_shape[-1],)
+
+# ATTENTION DECODER
+# https://github.com/datalogue/keras-attention
+
+def _time_distributed_dense(x, w, b=None, dropout=None,
+                            input_dim=None, output_dim=None,
+                            timesteps=None, training=None):
+    """Apply `y . w + b` for every temporal slice y of x.
+    # Arguments
+        x: input tensor.
+        w: weight matrix.
+        b: optional bias vector.
+        dropout: wether to apply dropout (same dropout mask
+            for every temporal slice of the input).
+        input_dim: integer; optional dimensionality of the input.
+        output_dim: integer; optional dimensionality of the output.
+        timesteps: integer; optional number of timesteps.
+        training: training phase tensor or boolean.
+    # Returns
+        Output tensor.
+    """
+    if not input_dim:
+        input_dim = K.shape(x)[2]
+    if not timesteps:
+        timesteps = K.shape(x)[1]
+    if not output_dim:
+        output_dim = K.shape(w)[1]
+
+    if dropout is not None and 0. < dropout < 1.:
+        # apply the same dropout pattern at every timestep
+        ones = K.ones_like(K.reshape(x[:, 0, :], (-1, input_dim)))
+        dropout_matrix = K.dropout(ones, dropout)
+        expanded_dropout_matrix = K.repeat(dropout_matrix, timesteps)
+        x = K.in_train_phase(x * expanded_dropout_matrix, x, training=training)
+
+    # collapse time dimension and batch dimension together
+    x = K.reshape(x, (-1, input_dim))
+    x = K.dot(x, w)
+    if b is not None:
+        x = K.bias_add(x, b)
+    # reshape to 3D tensor
+    if K.backend() == 'tensorflow':
+        x = K.reshape(x, K.stack([-1, timesteps, output_dim]))
+        x.set_shape([None, None, output_dim])
+    else:
+        x = K.reshape(x, (-1, timesteps, output_dim))
+    return x
+
+tfPrint = lambda d, T: tf.Print(input_=T, data=[T, tf.shape(T)], message=d)
+
+class AttentionDecoder(Recurrent):
+
+    def __init__(self, units, output_dim,
+                 activation='tanh',
+                 return_probabilities=False,
+                 name='AttentionDecoder',
+                 kernel_initializer='glorot_uniform',
+                 recurrent_initializer='orthogonal',
+                 bias_initializer='zeros',
+                 kernel_regularizer=None,
+                 bias_regularizer=None,
+                 activity_regularizer=None,
+                 kernel_constraint=None,
+                 bias_constraint=None,
+                 **kwargs):
+        """
+        Implements an AttentionDecoder that takes in a sequence encoded by an
+        encoder and outputs the decoded states
+        :param units: dimension of the hidden state and the attention matrices
+        :param output_dim: the number of labels in the output space
+
+        references:
+            Bahdanau, Dzmitry, Kyunghyun Cho, and Yoshua Bengio.
+            "Neural machine translation by jointly learning to align and translate."
+            arXiv preprint arXiv:1409.0473 (2014).
+        """
+        self.units = units
+        self.output_dim = output_dim
+        self.return_probabilities = return_probabilities
+        self.activation = activations.get(activation)
+        self.kernel_initializer = initializers.get(kernel_initializer)
+        self.recurrent_initializer = initializers.get(recurrent_initializer)
+        self.bias_initializer = initializers.get(bias_initializer)
+
+        self.kernel_regularizer = regularizers.get(kernel_regularizer)
+        self.recurrent_regularizer = regularizers.get(kernel_regularizer)
+        self.bias_regularizer = regularizers.get(bias_regularizer)
+        self.activity_regularizer = regularizers.get(activity_regularizer)
+
+        self.kernel_constraint = constraints.get(kernel_constraint)
+        self.recurrent_constraint = constraints.get(kernel_constraint)
+        self.bias_constraint = constraints.get(bias_constraint)
+
+        super(AttentionDecoder, self).__init__(**kwargs)
+        self.name = name
+        self.return_sequences = True  # must return sequences
+
+    def build(self, input_shape):
+        """
+          See Appendix 2 of Bahdanau 2014, arXiv:1409.0473
+          for model details that correspond to the matrices here.
+        """
+
+        self.batch_size, self.timesteps, self.input_dim = input_shape
+
+        if self.stateful:
+            super(AttentionDecoder, self).reset_states()
+
+        self.states = [None, None]  # y, s
+
+        """
+            Matrices for creating the context vector
+        """
+
+        self.V_a = self.add_weight(shape=(self.units,),
+                                   name='V_a',
+                                   initializer=self.kernel_initializer,
+                                   regularizer=self.kernel_regularizer,
+                                   constraint=self.kernel_constraint)
+        self.W_a = self.add_weight(shape=(self.units, self.units),
+                                   name='W_a',
+                                   initializer=self.kernel_initializer,
+                                   regularizer=self.kernel_regularizer,
+                                   constraint=self.kernel_constraint)
+        self.U_a = self.add_weight(shape=(self.input_dim, self.units),
+                                   name='U_a',
+                                   initializer=self.kernel_initializer,
+                                   regularizer=self.kernel_regularizer,
+                                   constraint=self.kernel_constraint)
+        self.b_a = self.add_weight(shape=(self.units,),
+                                   name='b_a',
+                                   initializer=self.bias_initializer,
+                                   regularizer=self.bias_regularizer,
+                                   constraint=self.bias_constraint)
+        """
+            Matrices for the r (reset) gate
+        """
+        self.C_r = self.add_weight(shape=(self.input_dim, self.units),
+                                   name='C_r',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.U_r = self.add_weight(shape=(self.units, self.units),
+                                   name='U_r',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.W_r = self.add_weight(shape=(self.output_dim, self.units),
+                                   name='W_r',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.b_r = self.add_weight(shape=(self.units, ),
+                                   name='b_r',
+                                   initializer=self.bias_initializer,
+                                   regularizer=self.bias_regularizer,
+                                   constraint=self.bias_constraint)
+
+        """
+            Matrices for the z (update) gate
+        """
+        self.C_z = self.add_weight(shape=(self.input_dim, self.units),
+                                   name='C_z',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.U_z = self.add_weight(shape=(self.units, self.units),
+                                   name='U_z',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.W_z = self.add_weight(shape=(self.output_dim, self.units),
+                                   name='W_z',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.b_z = self.add_weight(shape=(self.units, ),
+                                   name='b_z',
+                                   initializer=self.bias_initializer,
+                                   regularizer=self.bias_regularizer,
+                                   constraint=self.bias_constraint)
+        """
+            Matrices for the proposal
+        """
+        self.C_p = self.add_weight(shape=(self.input_dim, self.units),
+                                   name='C_p',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.U_p = self.add_weight(shape=(self.units, self.units),
+                                   name='U_p',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.W_p = self.add_weight(shape=(self.output_dim, self.units),
+                                   name='W_p',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.b_p = self.add_weight(shape=(self.units, ),
+                                   name='b_p',
+                                   initializer=self.bias_initializer,
+                                   regularizer=self.bias_regularizer,
+                                   constraint=self.bias_constraint)
+        """
+            Matrices for making the final prediction vector
+        """
+        self.C_o = self.add_weight(shape=(self.input_dim, self.output_dim),
+                                   name='C_o',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.U_o = self.add_weight(shape=(self.units, self.output_dim),
+                                   name='U_o',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.W_o = self.add_weight(shape=(self.output_dim, self.output_dim),
+                                   name='W_o',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.b_o = self.add_weight(shape=(self.output_dim, ),
+                                   name='b_o',
+                                   initializer=self.bias_initializer,
+                                   regularizer=self.bias_regularizer,
+                                   constraint=self.bias_constraint)
+
+        # For creating the initial state:
+        self.W_s = self.add_weight(shape=(self.input_dim, self.units),
+                                   name='W_s',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+
+        self.input_spec = [
+            InputSpec(shape=(self.batch_size, self.timesteps, self.input_dim))]
+        self.built = True
+
+    def call(self, x):
+        # store the whole sequence so we can "attend" to it at each timestep
+        self.x_seq = x
+
+        # apply the a dense layer over the time dimension of the sequence
+        # do it here because it doesn't depend on any previous steps
+        # thefore we can save computation time:
+        self._uxpb = _time_distributed_dense(self.x_seq, self.U_a, b=self.b_a,
+                                             input_dim=self.input_dim,
+                                             timesteps=self.timesteps,
+                                             output_dim=self.units)
+
+        return super(AttentionDecoder, self).call(x)
+
+    def get_initial_state(self, inputs):
+        print('inputs shape:', inputs.get_shape())
+
+        # apply the matrix on the first time step to get the initial s0.
+        s0 = activations.tanh(K.dot(inputs[:, 0], self.W_s))
+
+        # from keras.layers.recurrent to initialize a vector of (batchsize,
+        # output_dim)
+        y0 = K.zeros_like(inputs)  # (samples, timesteps, input_dims)
+        y0 = K.sum(y0, axis=(1, 2))  # (samples, )
+        y0 = K.expand_dims(y0)  # (samples, 1)
+        y0 = K.tile(y0, [1, self.output_dim])
+
+        return [y0, s0]
+
+    def step(self, x, states):
+
+        ytm, stm = states
+
+        # repeat the hidden state to the length of the sequence
+        _stm = K.repeat(stm, self.timesteps)
+
+        # now multiplty the weight matrix with the repeated hidden state
+        _Wxstm = K.dot(_stm, self.W_a)
+
+        # calculate the attention probabilities
+        # this relates how much other timesteps contributed to this one.
+        et = K.dot(activations.tanh(_Wxstm + self._uxpb),
+                   K.expand_dims(self.V_a))
+        at = K.exp(et)
+        at_sum = K.sum(at, axis=1)
+        at_sum_repeated = K.repeat(at_sum, self.timesteps)
+        at /= at_sum_repeated  # vector of size (batchsize, timesteps, 1)
+
+        # calculate the context vector
+        context = K.squeeze(K.batch_dot(at, self.x_seq, axes=1), axis=1)
+        # ~~~> calculate new hidden state
+        # first calculate the "r" gate:
+
+        rt = activations.sigmoid(
+            K.dot(ytm, self.W_r)
+            + K.dot(stm, self.U_r)
+            + K.dot(context, self.C_r)
+            + self.b_r)
+
+        # now calculate the "z" gate
+        zt = activations.sigmoid(
+            K.dot(ytm, self.W_z)
+            + K.dot(stm, self.U_z)
+            + K.dot(context, self.C_z)
+            + self.b_z)
+
+        # calculate the proposal hidden state:
+        s_tp = activations.tanh(
+            K.dot(ytm, self.W_p)
+            + K.dot((rt * stm), self.U_p)
+            + K.dot(context, self.C_p)
+            + self.b_p)
+
+        # new hidden state:
+        st = (1-zt)*stm + zt * s_tp
+
+        yt = activations.softmax(
+            K.dot(ytm, self.W_o)
+            + K.dot(stm, self.U_o)
+            + K.dot(context, self.C_o)
+            + self.b_o)
+
+        if self.return_probabilities:
+            return at, [yt, st]
+        else:
+            return yt, [yt, st]
+
+    def compute_output_shape(self, input_shape):
+        """
+            For Keras internal compatability checking
+        """
+        if self.return_probabilities:
+            return (None, self.timesteps, self.timesteps)
+        else:
+            return (None, self.timesteps, self.output_dim)
+
+    def get_config(self):
+        """
+            For rebuilding models on load time.
+        """
+        config = {
+            'output_dim': self.output_dim,
+            'units': self.units,
+            'return_probabilities': self.return_probabilities
+        }
+        base_config = super(AttentionDecoder, self).get_config()
+        return dict(list(base_config.items()) + list(config.items()))
\ No newline at end of file
diff --git a/code_jurica/attentionDecoder.py b/code_jurica/attentionDecoder.py
new file mode 100644
index 0000000..863ef52
--- /dev/null
+++ b/code_jurica/attentionDecoder.py
@@ -0,0 +1,361 @@
+import tensorflow as tf
+from keras import backend as K
+from keras import regularizers, constraints, initializers, activations
+from keras.layers.recurrent import Recurrent
+from keras.engine import InputSpec
+
+import keras.backend as K
+
+def _time_distributed_dense(x, w, b=None, dropout=None,
+                            input_dim=None, output_dim=None,
+                            timesteps=None, training=None):
+    """Apply `y . w + b` for every temporal slice y of x.
+    # Arguments
+        x: input tensor.
+        w: weight matrix.
+        b: optional bias vector.
+        dropout: wether to apply dropout (same dropout mask
+            for every temporal slice of the input).
+        input_dim: integer; optional dimensionality of the input.
+        output_dim: integer; optional dimensionality of the output.
+        timesteps: integer; optional number of timesteps.
+        training: training phase tensor or boolean.
+    # Returns
+        Output tensor.
+    """
+    if not input_dim:
+        input_dim = K.shape(x)[2]
+    if not timesteps:
+        timesteps = K.shape(x)[1]
+    if not output_dim:
+        output_dim = K.shape(w)[1]
+
+    if dropout is not None and 0. < dropout < 1.:
+        # apply the same dropout pattern at every timestep
+        ones = K.ones_like(K.reshape(x[:, 0, :], (-1, input_dim)))
+        dropout_matrix = K.dropout(ones, dropout)
+        expanded_dropout_matrix = K.repeat(dropout_matrix, timesteps)
+        x = K.in_train_phase(x * expanded_dropout_matrix, x, training=training)
+
+    # collapse time dimension and batch dimension together
+    x = K.reshape(x, (-1, input_dim))
+    x = K.dot(x, w)
+    if b is not None:
+        x = K.bias_add(x, b)
+    # reshape to 3D tensor
+    if K.backend() == 'tensorflow':
+        x = K.reshape(x, K.stack([-1, timesteps, output_dim]))
+        x.set_shape([None, None, output_dim])
+    else:
+        x = K.reshape(x, (-1, timesteps, output_dim))
+    return x
+
+tfPrint = lambda d, T: tf.Print(input_=T, data=[T, tf.shape(T)], message=d)
+
+class AttentionDecoder(Recurrent):
+
+    def __init__(self, units, output_dim,
+                 activation='tanh',
+                 return_probabilities=False,
+                 name='AttentionDecoder',
+                 kernel_initializer='glorot_uniform',
+                 recurrent_initializer='orthogonal',
+                 bias_initializer='zeros',
+                 kernel_regularizer=None,
+                 bias_regularizer=None,
+                 activity_regularizer=None,
+                 kernel_constraint=None,
+                 bias_constraint=None,
+                 **kwargs):
+        """
+        Implements an AttentionDecoder that takes in a sequence encoded by an
+        encoder and outputs the decoded states
+        :param units: dimension of the hidden state and the attention matrices
+        :param output_dim: the number of labels in the output space
+
+        references:
+            Bahdanau, Dzmitry, Kyunghyun Cho, and Yoshua Bengio.
+            "Neural machine translation by jointly learning to align and translate."
+            arXiv preprint arXiv:1409.0473 (2014).
+        """
+        self.units = units
+        self.output_dim = output_dim
+        self.return_probabilities = return_probabilities
+        self.activation = activations.get(activation)
+        self.kernel_initializer = initializers.get(kernel_initializer)
+        self.recurrent_initializer = initializers.get(recurrent_initializer)
+        self.bias_initializer = initializers.get(bias_initializer)
+
+        self.kernel_regularizer = regularizers.get(kernel_regularizer)
+        self.recurrent_regularizer = regularizers.get(kernel_regularizer)
+        self.bias_regularizer = regularizers.get(bias_regularizer)
+        self.activity_regularizer = regularizers.get(activity_regularizer)
+
+        self.kernel_constraint = constraints.get(kernel_constraint)
+        self.recurrent_constraint = constraints.get(kernel_constraint)
+        self.bias_constraint = constraints.get(bias_constraint)
+
+        super(AttentionDecoder, self).__init__(**kwargs)
+        self.name = name
+        self.return_sequences = True  # must return sequences
+
+    def build(self, input_shape):
+        """
+          See Appendix 2 of Bahdanau 2014, arXiv:1409.0473
+          for model details that correspond to the matrices here.
+        """
+
+        self.batch_size, self.timesteps, self.input_dim = input_shape
+
+        if self.stateful:
+            super(AttentionDecoder, self).reset_states()
+
+        self.states = [None, None]  # y, s
+
+        """
+            Matrices for creating the context vector
+        """
+
+        self.V_a = self.add_weight(shape=(self.units,),
+                                   name='V_a',
+                                   initializer=self.kernel_initializer,
+                                   regularizer=self.kernel_regularizer,
+                                   constraint=self.kernel_constraint)
+        self.W_a = self.add_weight(shape=(self.units, self.units),
+                                   name='W_a',
+                                   initializer=self.kernel_initializer,
+                                   regularizer=self.kernel_regularizer,
+                                   constraint=self.kernel_constraint)
+        self.U_a = self.add_weight(shape=(self.input_dim, self.units),
+                                   name='U_a',
+                                   initializer=self.kernel_initializer,
+                                   regularizer=self.kernel_regularizer,
+                                   constraint=self.kernel_constraint)
+        self.b_a = self.add_weight(shape=(self.units,),
+                                   name='b_a',
+                                   initializer=self.bias_initializer,
+                                   regularizer=self.bias_regularizer,
+                                   constraint=self.bias_constraint)
+        """
+            Matrices for the r (reset) gate
+        """
+        self.C_r = self.add_weight(shape=(self.input_dim, self.units),
+                                   name='C_r',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.U_r = self.add_weight(shape=(self.units, self.units),
+                                   name='U_r',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.W_r = self.add_weight(shape=(self.output_dim, self.units),
+                                   name='W_r',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.b_r = self.add_weight(shape=(self.units, ),
+                                   name='b_r',
+                                   initializer=self.bias_initializer,
+                                   regularizer=self.bias_regularizer,
+                                   constraint=self.bias_constraint)
+
+        """
+            Matrices for the z (update) gate
+        """
+        self.C_z = self.add_weight(shape=(self.input_dim, self.units),
+                                   name='C_z',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.U_z = self.add_weight(shape=(self.units, self.units),
+                                   name='U_z',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.W_z = self.add_weight(shape=(self.output_dim, self.units),
+                                   name='W_z',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.b_z = self.add_weight(shape=(self.units, ),
+                                   name='b_z',
+                                   initializer=self.bias_initializer,
+                                   regularizer=self.bias_regularizer,
+                                   constraint=self.bias_constraint)
+        """
+            Matrices for the proposal
+        """
+        self.C_p = self.add_weight(shape=(self.input_dim, self.units),
+                                   name='C_p',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.U_p = self.add_weight(shape=(self.units, self.units),
+                                   name='U_p',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.W_p = self.add_weight(shape=(self.output_dim, self.units),
+                                   name='W_p',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.b_p = self.add_weight(shape=(self.units, ),
+                                   name='b_p',
+                                   initializer=self.bias_initializer,
+                                   regularizer=self.bias_regularizer,
+                                   constraint=self.bias_constraint)
+        """
+            Matrices for making the final prediction vector
+        """
+        self.C_o = self.add_weight(shape=(self.input_dim, self.output_dim),
+                                   name='C_o',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.U_o = self.add_weight(shape=(self.units, self.output_dim),
+                                   name='U_o',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.W_o = self.add_weight(shape=(self.output_dim, self.output_dim),
+                                   name='W_o',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+        self.b_o = self.add_weight(shape=(self.output_dim, ),
+                                   name='b_o',
+                                   initializer=self.bias_initializer,
+                                   regularizer=self.bias_regularizer,
+                                   constraint=self.bias_constraint)
+
+        # For creating the initial state:
+        self.W_s = self.add_weight(shape=(self.input_dim, self.units),
+                                   name='W_s',
+                                   initializer=self.recurrent_initializer,
+                                   regularizer=self.recurrent_regularizer,
+                                   constraint=self.recurrent_constraint)
+
+        self.input_spec = [
+            InputSpec(shape=(self.batch_size, self.timesteps, self.input_dim))]
+        self.built = True
+
+    def call(self, x):
+        # store the whole sequence so we can "attend" to it at each timestep
+        self.x_seq = x
+
+        # apply the a dense layer over the time dimension of the sequence
+        # do it here because it doesn't depend on any previous steps
+        # thefore we can save computation time:
+        self._uxpb = _time_distributed_dense(self.x_seq, self.U_a, b=self.b_a,
+                                             input_dim=self.input_dim,
+                                             timesteps=self.timesteps,
+                                             output_dim=self.units)
+
+        return super(AttentionDecoder, self).call(x)
+
+    def get_initial_state(self, inputs):
+        print('inputs shape:', inputs.get_shape())
+
+        # apply the matrix on the first time step to get the initial s0.
+        s0 = activations.tanh(K.dot(inputs[:, 0], self.W_s))
+
+        # from keras.layers.recurrent to initialize a vector of (batchsize,
+        # output_dim)
+        y0 = K.zeros_like(inputs)  # (samples, timesteps, input_dims)
+        y0 = K.sum(y0, axis=(1, 2))  # (samples, )
+        y0 = K.expand_dims(y0)  # (samples, 1)
+        y0 = K.tile(y0, [1, self.output_dim])
+
+        return [y0, s0]
+
+    def step(self, x, states):
+
+        ytm, stm = states
+
+        # repeat the hidden state to the length of the sequence
+        _stm = K.repeat(stm, self.timesteps)
+
+        # now multiplty the weight matrix with the repeated hidden state
+        _Wxstm = K.dot(_stm, self.W_a)
+
+        # calculate the attention probabilities
+        # this relates how much other timesteps contributed to this one.
+        et = K.dot(activations.tanh(_Wxstm + self._uxpb),
+                   K.expand_dims(self.V_a))
+        at = K.exp(et)
+        at_sum = K.sum(at, axis=1)
+        at_sum_repeated = K.repeat(at_sum, self.timesteps)
+        at /= at_sum_repeated  # vector of size (batchsize, timesteps, 1)
+
+        # calculate the context vector
+        context = K.squeeze(K.batch_dot(at, self.x_seq, axes=1), axis=1)
+        # ~~~> calculate new hidden state
+        # first calculate the "r" gate:
+
+        rt = activations.sigmoid(
+            K.dot(ytm, self.W_r)
+            + K.dot(stm, self.U_r)
+            + K.dot(context, self.C_r)
+            + self.b_r)
+
+        # now calculate the "z" gate
+        zt = activations.sigmoid(
+            K.dot(ytm, self.W_z)
+            + K.dot(stm, self.U_z)
+            + K.dot(context, self.C_z)
+            + self.b_z)
+
+        # calculate the proposal hidden state:
+        s_tp = activations.tanh(
+            K.dot(ytm, self.W_p)
+            + K.dot((rt * stm), self.U_p)
+            + K.dot(context, self.C_p)
+            + self.b_p)
+
+        # new hidden state:
+        st = (1-zt)*stm + zt * s_tp
+
+        yt = activations.softmax(
+            K.dot(ytm, self.W_o)
+            + K.dot(stm, self.U_o)
+            + K.dot(context, self.C_o)
+            + self.b_o)
+
+        if self.return_probabilities:
+            return at, [yt, st]
+        else:
+            return yt, [yt, st]
+
+    def compute_output_shape(self, input_shape):
+        """
+            For Keras internal compatability checking
+        """
+        if self.return_probabilities:
+            return (None, self.timesteps, self.timesteps)
+        else:
+            return (None, self.timesteps, self.output_dim)
+
+    def get_config(self):
+        """
+            For rebuilding models on load time.
+        """
+        config = {
+            'output_dim': self.output_dim,
+            'units': self.units,
+            'return_probabilities': self.return_probabilities
+        }
+        base_config = super(AttentionDecoder, self).get_config()
+        return dict(list(base_config.items()) + list(config.items()))
+
+# check to see if it compiles
+if __name__ == '__main__':
+    from keras.layers import Input, LSTM
+    from keras.models import Model
+    from keras.layers.wrappers import Bidirectional
+    i = Input(shape=(100,104), dtype='float32')
+    enc = Bidirectional(LSTM(64, return_sequences=True), merge_mode='concat')(i)
+    dec = AttentionDecoder(32, 4)(enc)
+    model = Model(inputs=i, outputs=dec)
+    model.summary()
\ No newline at end of file
diff --git a/code_jurica/loader.py b/code_jurica/loader.py
index d8f8076..406144f 100644
--- a/code_jurica/loader.py
+++ b/code_jurica/loader.py
@@ -95,7 +95,8 @@ source_embedding_layer = Embedding(source_embeddings.shape[0],
                                    weights=[source_embeddings],
                                    input_length=source_max_sequence_tokenizer,
                                    trainable=True,
-                                   mask_zero=True)
+                                   mask_zero=True,
+                                   name='source_embedding')
 
 target_embeddings=embedding_matrix(target_vocab)
 target_embedding_layer = Embedding(target_embeddings.shape[0],
@@ -103,7 +104,8 @@ target_embedding_layer = Embedding(target_embeddings.shape[0],
                                    weights=[target_embeddings],
                                    input_length=target_max_sequence_tokenizer,
                                    trainable=True,
-                                   mask_zero=True)
+                                   mask_zero=True,
+                                   name='target_embedding')
 
 #generate train/test split
 source_train, source_val, _, _ = train_test_split(source_corpus, labels, test_size=0.05, random_state=777)
diff --git a/code_jurica/logs/classification_report_test.csv b/code_jurica/logs/classification_report_test.csv
index 85f2b97..70e8143 100644
--- a/code_jurica/logs/classification_report_test.csv
+++ b/code_jurica/logs/classification_report_test.csv
@@ -1,966 +1,708 @@
 Classes,precision,recall,f1-score,support
-A047,0.8,0.67,0.73,6
-A048,1.0,1.0,1.0,2
-A090,0.0,0.0,0.0,2
-A099,0.14,0.5,0.22,2
-A162,0.0,0.0,0.0,4
+A020,0.0,0.0,0.0,1
+A047,0.91,0.77,0.83,13
+A090,1.0,1.0,1.0,2
+A099,0.6,0.6,0.6,5
+A162,0.0,0.0,0.0,2
 A169,0.0,0.0,0.0,0
-A178,0.0,0.0,0.0,1
-A409,0.0,0.0,0.0,3
-A410,0.0,0.0,0.0,3
+A402,0.0,0.0,0.0,2
+A410,1.0,0.5,0.67,2
 A411,0.0,0.0,0.0,1
-A412,0.0,0.0,0.0,1
-A415,0.5,0.25,0.33,4
-A418,0.0,0.0,0.0,1
-A419,0.82,0.5,0.62,152
-A46,0.0,0.0,0.0,3
+A412,1.0,1.0,1.0,1
+A415,0.0,0.0,0.0,1
+A419,0.92,0.95,0.94,76
+A46,0.0,0.0,0.0,1
+A480,0.0,0.0,0.0,0
 A481,0.0,0.0,0.0,1
-A490,0.33,0.5,0.4,2
-A491,0.0,0.0,0.0,0
-A498,0.0,0.0,0.0,0
-B008,0.0,0.0,0.0,0
-B009,0.0,0.0,0.0,1
-B029,0.0,0.0,0.0,0
-B171,0.0,0.0,0.0,1
-B182,0.78,0.7,0.74,10
-B24,1.0,0.33,0.5,3
-B258,0.0,0.0,0.0,1
-B333,0.0,0.0,0.0,0
-B377,0.0,0.0,0.0,2
-B378,0.0,0.0,0.0,1
-B379,0.5,1.0,0.67,1
-B909,0.0,0.0,0.0,1
-B955,0.0,0.0,0.0,0
+A498,0.0,0.0,0.0,1
+A86,0.0,0.0,0.0,1
+B171,0.0,0.0,0.0,0
+B181,1.0,1.0,1.0,1
+B182,0.5,0.33,0.4,3
+B24,0.0,0.0,0.0,1
+B340,0.0,0.0,0.0,1
+B49,0.0,0.0,0.0,0
+B500,1.0,1.0,1.0,1
+B54,0.0,0.0,0.0,1
 B958,0.0,0.0,0.0,0
-B962,0.0,0.0,0.0,0
-B99,0.33,0.5,0.4,6
-C01,0.0,0.0,0.0,1
-C029,1.0,1.0,1.0,4
-C051,0.0,0.0,0.0,1
-C061,0.0,0.0,0.0,0
-C062,0.0,0.0,0.0,1
-C07,1.0,1.0,1.0,1
-C099,1.0,0.5,0.67,2
-C109,0.5,1.0,0.67,1
-C119,1.0,0.5,0.67,2
-C12,0.0,0.0,0.0,1
-C139,1.0,0.5,0.67,2
-C140,0.0,0.0,0.0,1
-C148,0.0,0.0,0.0,1
-C154,0.0,0.0,0.0,1
-C159,0.8,0.5,0.62,8
-C160,1.0,0.33,0.5,3
-C169,0.87,0.81,0.84,16
-C170,0.0,0.0,0.0,1
-C172,0.0,0.0,0.0,2
-C179,0.0,0.0,0.0,1
-C180,1.0,0.33,0.5,3
+B99,0.0,0.0,0.0,0
+C01,0.0,0.0,0.0,2
+C049,0.0,0.0,0.0,1
+C059,0.0,0.0,0.0,1
+C109,0.0,0.0,0.0,1
+C12,1.0,1.0,1.0,1
+C140,1.0,1.0,1.0,4
+C159,0.0,0.0,0.0,4
+C163,0.0,0.0,0.0,1
+C169,0.87,0.93,0.9,14
+C180,0.0,0.0,0.0,1
+C185,0.0,0.0,0.0,1
 C186,0.0,0.0,0.0,1
-C187,1.0,0.25,0.4,4
-C189,0.94,0.31,0.47,48
-C19,1.0,0.25,0.4,4
-C20,1.0,0.56,0.72,16
-C210,0.0,0.0,0.0,1
-C218,0.0,0.0,0.0,0
-C220,0.0,0.0,0.0,19
-C221,1.0,0.93,0.96,14
-C223,0.0,0.0,0.0,1
-C229,0.5,0.45,0.48,11
-C23,1.0,0.33,0.5,6
-C240,0.8,0.8,0.8,5
-C241,0.0,0.0,0.0,2
-C249,0.5,1.0,0.67,1
-C250,0.0,0.0,0.0,5
-C259,1.0,0.25,0.39,53
-C260,0.75,1.0,0.86,3
-C310,0.0,0.0,0.0,1
-C320,0.0,0.0,0.0,3
-C321,0.0,0.0,0.0,1
-C329,1.0,0.67,0.8,6
-C340,0.0,0.0,0.0,2
-C341,1.0,0.5,0.67,4
-C342,1.0,1.0,1.0,1
-C343,1.0,0.33,0.5,3
+C187,1.0,0.2,0.33,5
+C189,0.75,0.12,0.21,25
+C19,1.0,1.0,1.0,1
+C20,0.0,0.0,0.0,11
+C220,1.0,0.43,0.6,7
+C221,1.0,0.67,0.8,3
+C229,0.0,0.0,0.0,4
+C23,1.0,1.0,1.0,1
+C240,0.33,1.0,0.5,1
+C250,0.0,0.0,0.0,6
+C252,1.0,1.0,1.0,1
+C259,1.0,0.2,0.33,25
+C260,0.0,0.0,0.0,0
+C269,1.0,0.5,0.67,2
+C300,0.0,0.0,0.0,1
+C320,0.0,0.0,0.0,1
+C329,0.0,0.0,0.0,2
+C340,0.0,0.0,0.0,1
+C341,0.0,0.0,0.0,6
+C342,0.0,0.0,0.0,1
+C343,1.0,0.5,0.67,2
 C348,0.0,0.0,0.0,1
-C349,0.94,0.58,0.72,142
-C380,0.0,0.0,0.0,0
-C384,1.0,1.0,1.0,2
-C390,0.0,0.0,0.0,0
-C399,1.0,0.25,0.4,4
-C412,0.0,0.0,0.0,0
-C419,0.0,0.0,0.0,0
-C433,0.0,0.0,0.0,1
-C435,0.0,0.0,0.0,1
-C439,0.5,0.4,0.44,5
-C444,0.0,0.0,0.0,0
-C449,0.5,0.5,0.5,2
-C450,1.0,0.5,0.67,4
-C459,0.0,0.0,0.0,1
-C479,0.0,0.0,0.0,1
-C492,0.0,0.0,0.0,0
-C494,0.0,0.0,0.0,1
-C496,1.0,1.0,1.0,1
-C499,0.0,0.0,0.0,1
-C501,0.0,0.0,0.0,1
-C509,0.95,0.7,0.81,54
-C539,0.33,0.5,0.4,2
-C541,1.0,0.14,0.25,7
-C549,0.0,0.0,0.0,1
-C55,1.0,1.0,1.0,3
-C56,0.83,0.33,0.48,15
-C61,0.9,0.9,0.9,30
-C629,0.0,0.0,0.0,1
-C64,1.0,0.6,0.75,10
-C66,0.0,0.0,0.0,1
-C679,0.86,0.75,0.8,24
-C689,0.75,0.43,0.55,7
-C694,0.0,0.0,0.0,1
-C710,1.0,0.2,0.33,5
-C711,0.0,0.0,0.0,1
-C712,0.0,0.0,0.0,1
-C716,1.0,1.0,1.0,1
-C719,0.9,0.75,0.82,12
-C73,1.0,0.5,0.67,2
-C749,0.0,0.0,0.0,1
-C760,1.0,1.0,1.0,1
-C761,0.0,0.0,0.0,0
-C763,1.0,1.0,1.0,1
-C770,0.0,0.0,0.0,1
-C771,1.0,0.5,0.67,2
-C772,0.0,0.0,0.0,1
+C349,1.0,0.29,0.45,85
+C399,1.0,0.5,0.67,2
+C437,0.0,0.0,0.0,1
+C439,0.5,0.5,0.5,4
+C443,0.0,0.0,0.0,4
+C449,0.0,0.0,0.0,1
+C450,0.0,0.0,0.0,3
+C459,0.0,0.0,0.0,2
+C480,0.0,0.0,0.0,0
+C482,0.0,0.0,0.0,2
+C492,1.0,0.5,0.67,2
+C499,0.67,1.0,0.8,2
+C509,0.7,0.3,0.42,23
+C519,1.0,0.5,0.67,2
+C52,0.0,0.0,0.0,1
+C539,0.0,0.0,0.0,7
+C548,0.0,0.0,0.0,1
+C549,0.0,0.0,0.0,5
+C55,1.0,1.0,1.0,1
+C56,1.0,0.2,0.33,5
+C570,0.0,0.0,0.0,1
+C61,0.93,0.72,0.81,18
+C629,0.0,0.0,0.0,2
+C64,0.88,0.88,0.88,8
+C679,0.86,0.86,0.86,7
+C689,0.0,0.0,0.0,1
+C693,0.0,0.0,0.0,1
+C696,0.0,0.0,0.0,1
+C711,1.0,1.0,1.0,1
+C719,0.83,1.0,0.91,5
+C760,0.0,0.0,0.0,0
+C762,1.0,1.0,1.0,1
+C763,0.0,0.0,0.0,1
+C770,1.0,0.75,0.86,4
+C771,0.5,0.5,0.5,2
+C772,1.0,0.5,0.67,2
 C773,0.0,0.0,0.0,1
-C775,1.0,1.0,1.0,1
-C779,0.73,0.57,0.64,14
-C780,0.81,0.28,0.42,46
-C781,0.25,0.5,0.33,2
-C782,0.67,0.18,0.29,11
+C778,0.0,0.0,0.0,0
+C779,1.0,0.23,0.38,13
+C780,0.75,0.12,0.2,26
+C781,0.0,0.0,0.0,4
+C782,0.0,0.0,0.0,5
 C783,0.0,0.0,0.0,1
-C784,0.0,0.0,0.0,1
-C786,0.62,0.25,0.36,20
-C787,0.85,0.36,0.51,64
-C792,0.0,0.0,0.0,1
-C793,0.75,0.19,0.31,31
-C794,0.0,0.0,0.0,5
-C795,0.42,0.39,0.41,33
-C797,0.0,0.0,0.0,3
-C798,0.2,0.33,0.25,3
-C799,0.67,0.45,0.54,69
-C800,0.75,1.0,0.86,3
-C809,0.54,0.88,0.67,109
-C819,0.5,1.0,0.67,1
-C831,1.0,0.5,0.67,2
-C833,0.5,1.0,0.67,1
-C835,0.0,0.0,0.0,1
-C837,1.0,1.0,1.0,1
-C838,0.0,0.0,0.0,0
-C851,0.5,0.5,0.5,2
-C859,0.75,0.86,0.8,14
-C900,1.0,0.5,0.67,16
+C786,0.0,0.0,0.0,12
+C787,1.0,0.08,0.15,37
+C788,0.0,0.0,0.0,2
+C793,1.0,0.05,0.09,21
+C794,0.0,0.0,0.0,3
+C795,0.92,0.55,0.69,20
+C797,0.0,0.0,0.0,1
+C798,0.0,0.0,0.0,5
+C799,0.43,0.29,0.34,35
+C800,0.13,0.67,0.22,3
+C809,0.3,0.95,0.45,75
+C833,1.0,1.0,1.0,3
+C840,0.0,0.0,0.0,1
+C859,0.5,1.0,0.67,2
+C900,1.0,0.6,0.75,5
 C901,0.0,0.0,0.0,1
-C910,0.5,0.33,0.4,3
-C911,1.0,0.5,0.67,6
+C903,0.0,0.0,0.0,0
+C911,1.0,1.0,1.0,1
 C914,0.0,0.0,0.0,1
-C920,1.0,0.5,0.67,12
-C929,0.0,0.0,0.0,1
+C920,0.5,0.5,0.5,2
+C921,1.0,1.0,1.0,1
 C931,0.0,0.0,0.0,1
-C950,0.5,0.33,0.4,3
-C959,0.0,0.0,0.0,1
-C969,0.33,0.33,0.33,3
-D099,0.0,0.0,0.0,0
-D126,0.0,0.0,0.0,1
-D27,0.0,0.0,0.0,1
-D291,0.0,0.0,0.0,1
-D320,1.0,0.5,0.67,2
+C950,0.0,0.0,0.0,3
+C951,0.0,0.0,0.0,1
+C959,1.0,1.0,1.0,1
+D180,1.0,1.0,1.0,1
+D320,0.0,0.0,0.0,2
+D329,1.0,1.0,1.0,1
+D350,0.0,0.0,0.0,1
+D367,0.0,0.0,0.0,1
 D369,0.0,0.0,0.0,0
-D370,0.0,0.0,0.0,0
-D371,0.0,0.0,0.0,1
-D372,0.0,0.0,0.0,1
-D374,0.67,0.5,0.57,4
-D376,0.0,0.0,0.0,1
-D377,0.0,0.0,0.0,3
-D379,0.0,0.0,0.0,1
-D380,0.0,0.0,0.0,1
-D381,1.0,0.2,0.33,5
-D382,1.0,0.5,0.67,2
-D390,1.0,0.33,0.5,3
-D391,1.0,0.5,0.67,2
-D400,0.62,1.0,0.77,5
-D410,1.0,0.5,0.67,2
-D414,0.83,0.83,0.83,6
-D419,0.0,0.0,0.0,0
+D370,0.0,0.0,0.0,1
+D371,0.5,1.0,0.67,1
+D374,0.0,0.0,0.0,4
+D375,0.0,0.0,0.0,3
+D376,0.0,0.0,0.0,3
+D377,0.0,0.0,0.0,4
+D381,0.0,0.0,0.0,5
+D390,1.0,1.0,1.0,1
+D391,0.0,0.0,0.0,1
+D400,0.0,0.0,0.0,0
+D410,0.0,0.0,0.0,1
+D414,1.0,1.0,1.0,3
 D420,0.0,0.0,0.0,1
-D430,1.0,0.5,0.67,2
-D432,0.0,0.0,0.0,0
-D444,0.0,0.0,0.0,1
-D45,0.0,0.0,0.0,0
-D462,1.0,1.0,1.0,1
-D464,0.0,0.0,0.0,1
-D469,0.67,0.33,0.44,12
+D430,0.0,0.0,0.0,2
+D45,1.0,1.0,1.0,1
+D462,0.0,0.0,0.0,2
+D469,1.0,0.25,0.4,4
 D471,0.0,0.0,0.0,1
-D472,0.0,0.0,0.0,0
-D473,0.0,0.0,0.0,1
-D474,1.0,1.0,1.0,1
-D479,1.0,1.0,1.0,1
-D480,0.0,0.0,0.0,0
-D483,0.0,0.0,0.0,1
-D486,0.0,0.0,0.0,2
-D487,1.0,1.0,1.0,4
-D489,0.04,0.5,0.08,2
+D474,1.0,0.67,0.8,3
+D485,0.0,0.0,0.0,1
+D486,0.0,0.0,0.0,1
+D487,0.5,1.0,0.67,1
+D489,0.09,0.75,0.15,4
 D500,0.0,0.0,0.0,1
-D508,0.5,0.5,0.5,2
-D510,1.0,1.0,1.0,1
-D591,0.0,0.0,0.0,1
-D611,0.0,0.0,0.0,5
-D613,1.0,1.0,1.0,1
-D619,0.45,0.83,0.59,6
-D62,1.0,0.5,0.67,2
-D649,0.83,0.75,0.79,32
-D65,1.0,0.5,0.67,4
-D683,0.0,0.0,0.0,1
-D692,0.0,0.0,0.0,1
-D693,0.0,0.0,0.0,0
-D695,0.0,0.0,0.0,1
-D696,0.67,0.67,0.67,3
-D699,0.0,0.0,0.0,1
-D70,1.0,0.5,0.67,2
-D849,0.0,0.0,0.0,2
-E039,0.0,0.0,0.0,2
-E049,0.5,1.0,0.67,1
-E059,0.0,0.0,0.0,1
-E103,0.0,0.0,0.0,1
-E104,0.0,0.0,0.0,1
-E107,0.0,0.0,0.0,1
+D539,0.0,0.0,0.0,0
+D619,1.0,0.5,0.67,2
+D649,0.88,0.93,0.9,15
+D65,1.0,1.0,1.0,1
+D689,0.0,0.0,0.0,1
+D733,0.0,0.0,0.0,1
+D758,0.0,0.0,0.0,1
+D840,0.0,0.0,0.0,0
+D849,1.0,1.0,1.0,1
+E039,0.33,1.0,0.5,1
+E0399,0.0,0.0,0.0,1
+E050,0.0,0.0,0.0,1
+E079,1.0,1.0,1.0,1
+E107,1.0,0.67,0.8,3
 E108,0.0,0.0,0.0,1
-E109,0.56,0.83,0.67,12
-E115,0.0,0.0,0.0,1
-E117,0.0,0.0,0.0,1
-E119,0.71,0.81,0.76,37
+E109,0.89,0.89,0.89,9
+E115,1.0,1.0,1.0,1
+E118,0.0,0.0,0.0,1
+E119,0.91,0.97,0.94,30
+E140,0.0,0.0,0.0,1
 E141,0.0,0.0,0.0,1
-E143,0.0,0.0,0.0,0
-E145,0.5,1.0,0.67,1
-E148,0.0,0.0,0.0,1
-E149,0.76,0.8,0.78,55
-E271,0.0,0.0,0.0,1
-E274,0.0,0.0,0.0,1
-E282,0.0,0.0,0.0,0
-E319,0.0,0.0,0.0,0
-E41,1.0,1.0,1.0,3
-E43,0.0,0.0,0.0,1
-E46,0.67,0.17,0.27,12
-E639,0.0,0.0,0.0,0
-E668,0.0,0.0,0.0,1
-E669,0.75,0.38,0.5,8
-E780,1.0,1.0,1.0,1
-E785,0.0,0.0,0.0,0
-E802,0.0,0.0,0.0,1
-E835,0.0,0.0,0.0,2
+E149,0.81,0.81,0.81,27
+E15,0.0,0.0,0.0,1
+E46,0.0,0.0,0.0,2
+E538,0.0,0.0,0.0,0
+E668,0.0,0.0,0.0,3
+E669,1.0,0.4,0.57,5
+E722,0.0,0.0,0.0,1
+E785,0.0,0.0,0.0,1
+E835,0.0,0.0,0.0,1
 E849,1.0,1.0,1.0,1
-E854,0.0,0.0,0.0,1
-E858,0.0,0.0,0.0,1
-E859,0.0,0.0,0.0,1
-E86,0.57,0.17,0.26,24
-E870,0.0,0.0,0.0,3
-E871,0.0,0.0,0.0,2
-E872,0.86,0.86,0.86,7
-E875,1.0,0.2,0.33,5
-E878,1.0,0.4,0.57,5
-E889,0.5,0.5,0.5,2
-F019,0.85,0.74,0.79,23
-F03,0.91,0.52,0.66,60
-F04,0.0,0.0,0.0,1
-F059,1.0,1.0,1.0,1
-F070,0.0,0.0,0.0,2
-F102,0.38,0.8,0.51,15
-F104,0.0,0.0,0.0,1
-F179,1.0,0.6,0.75,5
-F192,0.0,0.0,0.0,1
-F205,0.0,0.0,0.0,1
-F209,1.0,0.25,0.4,4
-F29,0.0,0.0,0.0,0
-F329,1.0,0.5,0.67,2
-F339,0.0,0.0,0.0,1
-F411,0.0,0.0,0.0,1
-F412,0.0,0.0,0.0,2
-F448,0.0,0.0,0.0,1
-F69,0.0,0.0,0.0,1
-F729,1.0,1.0,1.0,1
-F799,1.0,1.0,1.0,1
-F99,0.0,0.0,0.0,1
-G001,0.0,0.0,0.0,1
+E86,0.0,0.0,0.0,14
+E874,0.0,0.0,0.0,2
+E875,0.0,0.0,0.0,2
+E877,0.0,0.0,0.0,1
+E878,0.0,0.0,0.0,1
+E880,0.0,0.0,0.0,1
+E889,0.0,0.0,0.0,0
+F011,1.0,1.0,1.0,3
+F019,1.0,0.87,0.93,15
+F03,0.82,0.72,0.77,32
+F051,0.0,0.0,0.0,1
+F059,0.0,0.0,0.0,0
+F102,0.46,0.86,0.6,14
+F172,0.0,0.0,0.0,1
+F179,1.0,1.0,1.0,2
+F200,0.0,0.0,0.0,2
+F209,0.0,0.0,0.0,4
+F220,0.0,0.0,0.0,0
+F259,0.0,0.0,0.0,1
+F318,0.0,0.0,0.0,1
+F323,1.0,1.0,1.0,1
+F329,0.0,0.0,0.0,0
 G009,0.0,0.0,0.0,1
-G09,0.0,0.0,0.0,0
-G10,1.0,1.0,1.0,1
-G119,0.0,0.0,0.0,1
-G120,0.0,0.0,0.0,1
-G122,0.46,0.75,0.57,8
-G1229,0.0,0.0,0.0,4
-G20,0.0,0.0,0.0,4
-G200,0.25,0.83,0.38,6
-G2009,0.0,0.0,0.0,13
-G214,0.0,0.0,0.0,0
-G231,0.0,0.0,0.0,1
-G259,0.0,0.0,0.0,0
-G309,0.89,0.95,0.92,43
-G311,0.8,0.8,0.8,5
-G318,1.0,1.0,1.0,3
-G319,0.6,0.5,0.55,6
-G35,0.67,0.5,0.57,4
-G409,1.0,0.6,0.75,10
-G419,1.0,0.8,0.89,5
+G08,0.0,0.0,0.0,1
+G122,1.0,1.0,1.0,2
+G20,1.0,1.0,1.0,8
+G200,0.6,0.75,0.67,4
+G2009,0.0,0.0,0.0,2
+G301,1.0,0.5,0.67,2
+G309,0.78,0.88,0.82,8
+G310,0.0,0.0,0.0,1
+G311,1.0,0.5,0.67,2
+G319,0.2,0.44,0.28,9
+G35,1.0,0.6,0.75,5
+G409,1.0,1.0,1.0,5
+G419,1.0,1.0,1.0,3
 G450,1.0,1.0,1.0,1
-G459,1.0,0.5,0.67,2
-G610,0.0,0.0,0.0,1
-G621,0.0,0.0,0.0,1
-G628,0.0,0.0,0.0,1
-G629,0.0,0.0,0.0,0
-G710,1.0,1.0,1.0,1
-G728,0.0,0.0,0.0,1
-G819,0.9,0.82,0.86,11
-G822,0.0,0.0,0.0,0
-G824,0.0,0.0,0.0,1
-G825,0.0,0.0,0.0,1
-G833,0.0,0.0,0.0,1
-G839,0.0,0.0,0.0,1
-G912,1.0,1.0,1.0,1
+G459,0.5,1.0,0.67,1
+G610,0.0,0.0,0.0,2
+G619,0.0,0.0,0.0,1
+G819,0.33,0.5,0.4,2
+G822,1.0,0.67,0.8,3
+G833,0.0,0.0,0.0,2
+G839,0.0,0.0,0.0,2
 G919,0.0,0.0,0.0,1
-G931,0.81,0.87,0.84,15
-G932,1.0,0.36,0.53,11
-G934,1.0,0.75,0.86,4
-G935,0.88,0.7,0.78,10
-G936,1.0,0.11,0.19,19
-G938,0.0,0.0,0.0,2
-G939,0.2,0.17,0.18,6
-G950,0.0,0.0,0.0,1
-G951,0.0,0.0,0.0,0
-G952,0.0,0.0,0.0,0
-G959,0.0,0.0,0.0,1
-G98,0.0,0.0,0.0,1
-H544,0.0,0.0,0.0,0
-I051,0.0,0.0,0.0,2
-I059,0.0,0.0,0.0,0
-I071,1.0,1.0,1.0,1
-I080,0.75,0.75,0.75,4
-I089,1.0,1.0,1.0,1
-I10,0.9,0.95,0.93,227
-I119,0.9,0.66,0.76,65
-I139,0.0,0.0,0.0,3
+G92,0.0,0.0,0.0,0
+G931,0.75,0.86,0.8,7
+G932,1.0,0.5,0.67,2
+G934,0.5,0.67,0.57,3
+G935,0.83,0.83,0.83,12
+G936,1.0,0.07,0.12,15
+G939,0.0,0.0,0.0,1
+G952,1.0,1.0,1.0,1
+H350,0.0,0.0,0.0,0
+H472,0.0,0.0,0.0,1
+H819,0.0,0.0,0.0,0
+I00,1.0,1.0,1.0,1
+I059,1.0,0.33,0.5,3
+I069,0.0,0.0,0.0,1
+I081,0.0,0.0,0.0,1
+I10,0.96,1.0,0.98,224
+I110,0.0,0.0,0.0,1
+I119,1.0,0.83,0.91,36
+I139,1.0,1.0,1.0,3
+I159,1.0,1.0,1.0,1
 I208,0.0,0.0,0.0,1
-I209,0.67,1.0,0.8,2
-I210,0.0,0.0,0.0,7
+I209,0.5,1.0,0.67,1
+I210,0.0,0.0,0.0,1
 I211,0.0,0.0,0.0,1
-I212,0.0,0.0,0.0,2
-I214,0.0,0.0,0.0,2
-I219,0.27,0.56,0.37,41
-I2199,0.0,0.0,0.0,41
-I229,1.0,1.0,1.0,2
-I240,0.0,0.0,0.0,7
-I248,0.67,0.5,0.57,4
-I249,1.0,0.8,0.89,5
-I250,1.0,0.25,0.4,4
-I251,0.92,0.87,0.89,67
-I252,0.0,0.0,0.0,27
-I253,0.0,0.0,0.0,1
-I255,0.5,0.23,0.32,13
+I212,0.0,0.0,0.0,1
+I214,0.75,1.0,0.86,3
+I219,0.69,0.96,0.8,46
+I2199,0.0,0.0,0.0,9
+I229,0.0,0.0,0.0,2
+I240,0.0,0.0,0.0,0
+I250,0.0,0.0,0.0,1
+I251,0.95,0.96,0.96,79
+I252,1.0,0.03,0.06,33
+I253,1.0,1.0,1.0,2
+I255,0.83,0.83,0.83,6
 I258,1.0,0.5,0.67,4
-I259,0.98,0.79,0.87,174
-I269,0.86,0.37,0.51,49
-I270,1.0,1.0,1.0,2
-I272,0.86,1.0,0.92,6
-I279,0.07,0.89,0.13,9
-I308,1.0,1.0,1.0,1
-I309,0.0,0.0,0.0,0
-I312,1.0,0.5,0.67,2
-I313,1.0,1.0,1.0,1
-I319,0.75,0.75,0.75,4
-I330,0.75,0.6,0.67,5
-I340,0.5,0.43,0.46,7
-I350,0.32,0.73,0.45,15
-I352,0.0,0.0,0.0,1
-I359,0.2,0.67,0.31,3
-I38,0.4,0.5,0.44,4
-I420,0.58,0.69,0.63,16
-I4209,0.0,0.0,0.0,6
-I421,0.0,0.0,0.0,0
+I259,0.98,0.9,0.94,160
+I269,1.0,0.12,0.22,49
+I279,1.0,0.89,0.94,9
+I308,0.0,0.0,0.0,1
+I312,1.0,1.0,1.0,1
+I319,1.0,1.0,1.0,1
+I330,0.5,0.25,0.33,4
+I340,1.0,0.5,0.67,2
+I342,0.0,0.0,0.0,1
+I350,0.37,0.85,0.51,13
+I351,0.0,0.0,0.0,2
+I358,0.0,0.0,0.0,2
+I359,0.67,0.67,0.67,3
+I361,0.0,0.0,0.0,1
+I38,0.12,0.33,0.18,3
+I420,0.92,0.79,0.85,14
+I4209,0.0,0.0,0.0,1
 I422,0.0,0.0,0.0,0
 I4229,0.0,0.0,0.0,1
-I424,0.0,0.0,0.0,1
-I426,0.0,0.0,0.0,2
-I428,0.0,0.0,0.0,0
-I429,0.08,1.0,0.14,1
-I441,0.0,0.0,0.0,0
-I442,1.0,0.67,0.8,3
-I443,0.0,0.0,0.0,1
-I451,0.0,0.0,0.0,0
-I459,0.5,1.0,0.67,1
-I460,0.0,0.0,0.0,6
-I461,0.0,0.0,0.0,4
-I469,0.92,0.71,0.8,297
-I471,0.0,0.0,0.0,0
-I472,0.0,0.0,0.0,0
-I479,0.0,0.0,0.0,1
-I480,0.33,1.0,0.5,1
-I481,1.0,0.5,0.67,2
-I482,0.78,0.64,0.7,11
-I489,0.73,0.82,0.77,50
-I490,1.0,0.88,0.93,8
-I498,1.0,0.5,0.67,2
-I499,0.77,0.74,0.75,27
-I500,0.69,0.83,0.75,59
-I5009,0.0,0.0,0.0,20
-I501,0.86,0.88,0.87,68
-I509,0.93,0.97,0.95,229
-I513,1.0,0.6,0.75,5
-I514,0.33,0.5,0.4,2
-I5140,0.0,0.0,0.0,1
-I515,0.56,1.0,0.71,5
-I5159,0.0,0.0,0.0,2
-I516,0.76,0.95,0.84,20
-I517,0.93,0.93,0.93,30
-I518,0.33,0.5,0.4,2
-I519,0.49,0.97,0.65,98
-I602,0.0,0.0,0.0,2
-I608,0.0,0.0,0.0,2
-I609,0.0,0.0,0.0,3
-I610,0.0,0.0,0.0,2
-I611,0.0,0.0,0.0,4
-I615,0.0,0.0,0.0,2
-I6159,0.0,0.0,0.0,3
-I616,0.0,0.0,0.0,1
-I618,0.0,0.0,0.0,2
-I619,0.0,0.0,0.0,36
-I6199,0.0,0.0,0.0,21
-I620,0.0,0.0,0.0,3
-I629,0.0,0.0,0.0,2
-I630,0.0,0.0,0.0,1
-I6329,0.0,0.0,0.0,1
-I633,0.0,0.0,0.0,2
-I634,0.0,0.0,0.0,0
-I6349,0.0,0.0,0.0,1
-I635,0.0,0.0,0.0,31
-I638,0.0,0.0,0.0,2
-I639,0.47,0.85,0.61,33
+I428,0.0,0.0,0.0,1
+I429,0.25,0.5,0.33,2
+I4290,0.0,0.0,0.0,1
+I440,0.0,0.0,0.0,1
+I442,0.67,1.0,0.8,2
+I447,0.17,1.0,0.29,1
+I450,0.0,0.0,0.0,0
+I458,1.0,1.0,1.0,1
+I459,0.0,0.0,0.0,1
+I460,0.0,0.0,0.0,4
+I461,0.0,0.0,0.0,6
+I469,0.66,0.46,0.54,94
+I482,1.0,0.75,0.86,4
+I489,0.95,0.92,0.93,38
+I490,0.5,1.0,0.67,2
+I493,1.0,1.0,1.0,2
+I498,0.0,0.0,0.0,0
+I499,0.95,0.9,0.92,20
+I500,0.94,0.99,0.96,69
+I5000,0.0,0.0,0.0,1
+I5009,0.0,0.0,0.0,2
+I501,0.98,0.98,0.98,60
+I509,0.97,0.98,0.98,189
+I513,0.0,0.0,0.0,4
+I515,0.93,1.0,0.97,14
+I5159,0.0,0.0,0.0,1
+I516,0.38,1.0,0.55,3
+I517,0.87,1.0,0.93,26
+I518,0.0,0.0,0.0,0
+I519,0.25,0.76,0.38,17
+I608,0.0,0.0,0.0,3
+I609,0.0,0.0,0.0,2
+I612,0.0,0.0,0.0,3
+I613,0.0,0.0,0.0,1
+I614,0.0,0.0,0.0,1
+I615,0.0,0.0,0.0,1
+I6159,0.0,0.0,0.0,1
+I619,0.0,0.0,0.0,28
+I6199,0.0,0.0,0.0,3
+I620,0.67,1.0,0.8,2
+I633,1.0,1.0,1.0,2
+I634,1.0,1.0,1.0,1
+I635,0.0,0.0,0.0,10
+I638,1.0,0.5,0.67,2
+I639,0.7,0.9,0.79,41
 I6390,0.0,0.0,0.0,7
-I6399,0.0,0.0,0.0,16
-I64,0.75,0.6,0.67,5
-I640,0.18,0.67,0.28,24
-I6409,0.0,0.0,0.0,33
-I652,0.5,1.0,0.67,1
-I660,0.0,0.0,0.0,1
-I669,0.29,0.67,0.4,6
-I670,0.0,0.0,0.0,1
-I671,0.0,0.0,0.0,1
-I672,0.96,0.9,0.93,30
-I678,0.77,0.59,0.67,17
-I679,0.94,0.89,0.91,36
-I691,0.0,0.0,0.0,0
-I693,0.5,0.14,0.22,7
-I6930,0.0,0.0,0.0,1
-I694,0.0,0.0,0.0,2
-I6949,0.0,0.0,0.0,7
-I698,0.0,0.0,0.0,1
-I700,1.0,0.33,0.5,3
-I702,1.0,0.25,0.4,12
-I709,0.96,0.98,0.97,223
-I710,0.5,1.0,0.67,1
-I711,0.0,0.0,0.0,1
-I712,1.0,1.0,1.0,1
-I713,0.5,0.5,0.5,2
-I714,0.71,1.0,0.83,5
-I716,1.0,1.0,1.0,1
-I718,0.0,0.0,0.0,2
-I719,1.0,0.4,0.57,5
-I725,0.0,0.0,0.0,2
-I729,0.25,0.33,0.29,3
-I730,0.0,0.0,0.0,1
-I739,0.67,1.0,0.8,4
-I740,0.0,0.0,0.0,2
-I741,1.0,1.0,1.0,1
-I743,0.0,0.0,0.0,7
-I744,0.0,0.0,0.0,1
-I745,0.0,0.0,0.0,1
-I749,0.33,0.25,0.29,4
-I772,0.0,0.0,0.0,0
-I776,0.0,0.0,0.0,2
-I779,1.0,0.33,0.5,3
+I6399,0.0,0.0,0.0,5
+I64,0.92,0.86,0.89,14
+I640,0.25,1.0,0.4,6
+I6409,0.0,0.0,0.0,4
+I651,0.0,0.0,0.0,0
+I652,0.33,1.0,0.5,1
+I669,0.75,0.6,0.67,5
+I671,1.0,1.0,1.0,1
+I672,1.0,0.81,0.89,31
+I678,1.0,0.5,0.67,4
+I679,1.0,0.71,0.83,17
+I693,0.75,0.43,0.55,7
+I694,0.25,1.0,0.4,2
+I6949,0.0,0.0,0.0,1
+I700,1.0,0.5,0.67,2
+I702,0.0,0.0,0.0,7
+I709,0.97,0.99,0.98,229
+I710,1.0,1.0,1.0,2
+I711,1.0,1.0,1.0,1
+I712,0.0,0.0,0.0,1
+I713,1.0,0.33,0.5,3
+I714,1.0,1.0,1.0,2
+I718,0.57,1.0,0.73,4
+I739,0.8,1.0,0.89,4
+I740,1.0,1.0,1.0,1
+I741,0.5,0.33,0.4,3
+I743,0.0,0.0,0.0,3
+I749,0.25,1.0,0.4,1
+I771,0.0,0.0,0.0,1
+I772,0.0,0.0,0.0,1
+I779,1.0,1.0,1.0,1
 I801,0.0,0.0,0.0,3
-I802,1.0,0.71,0.83,7
-I803,0.0,0.0,0.0,2
-I81,0.5,1.0,0.67,1
-I822,1.0,1.0,1.0,1
-I828,0.0,0.0,0.0,1
-I829,0.22,1.0,0.36,2
-I839,1.0,1.0,1.0,1
-I850,0.5,0.33,0.4,3
-I859,1.0,1.0,1.0,1
-I959,1.0,0.25,0.4,4
-I978,0.0,0.0,0.0,1
-I99,0.19,0.82,0.3,17
-J09,1.0,1.0,1.0,3
-J111,1.0,1.0,1.0,1
-J13,1.0,0.5,0.67,2
-J151,1.0,0.67,0.8,3
-J152,0.5,0.5,0.5,2
-J155,1.0,1.0,1.0,1
-J159,0.0,0.0,0.0,17
-J180,1.0,0.89,0.94,54
-J181,0.8,0.89,0.84,9
-J182,1.0,1.0,1.0,16
-J188,1.0,1.0,1.0,1
-J189,0.87,0.58,0.69,147
-J209,0.88,0.35,0.5,20
-J219,0.0,0.0,0.0,2
-J22,1.0,0.67,0.8,3
-J350,0.0,0.0,0.0,1
-J380,0.0,0.0,0.0,1
+I802,1.0,0.67,0.8,12
+I803,0.0,0.0,0.0,1
+I809,0.0,0.0,0.0,1
+I829,0.12,0.5,0.2,2
+I830,0.0,0.0,0.0,1
+I839,0.0,0.0,0.0,1
+I850,0.0,0.0,0.0,3
+I859,0.0,0.0,0.0,3
+I872,0.0,0.0,0.0,1
+I879,0.0,0.0,0.0,1
+I959,0.0,0.0,0.0,1
+I99,0.32,1.0,0.48,6
+J13,1.0,1.0,1.0,2
+J159,0.0,0.0,0.0,16
+J180,0.96,0.9,0.93,29
+J181,1.0,1.0,1.0,2
+J182,0.95,1.0,0.98,21
+J188,0.0,0.0,0.0,1
+J189,0.77,0.18,0.29,97
+J209,0.0,0.0,0.0,25
+J22,1.0,0.75,0.86,4
 J387,0.0,0.0,0.0,0
 J392,0.0,0.0,0.0,1
-J40,0.0,0.0,0.0,3
-J411,0.0,0.0,0.0,2
-J42,0.0,0.0,0.0,12
-J439,0.67,0.11,0.18,19
-J440,0.0,0.0,0.0,1
-J441,0.55,0.6,0.57,10
-J448,0.6,0.78,0.68,32
-J449,0.91,0.84,0.87,62
-J459,0.67,0.8,0.73,5
-J47,0.5,0.33,0.4,3
-J60,0.0,0.0,0.0,1
-J628,0.5,1.0,0.67,1
-J64,0.0,0.0,0.0,2
-J690,1.0,0.85,0.92,60
-J80,1.0,0.64,0.78,11
-J81,0.89,0.63,0.74,54
-J840,0.0,0.0,0.0,1
-J841,0.81,0.72,0.76,18
-J849,0.5,1.0,0.67,3
-J8499,0.0,0.0,0.0,1
-J852,0.0,0.0,0.0,1
-J860,1.0,0.5,0.67,2
-J869,0.0,0.0,0.0,3
-J90,0.73,0.67,0.7,12
-J938,0.0,0.0,0.0,1
-J939,0.0,0.0,0.0,3
-J942,1.0,1.0,1.0,1
-J948,0.43,0.6,0.5,5
+J398,0.0,0.0,0.0,1
+J40,0.0,0.0,0.0,5
+J411,0.75,1.0,0.86,3
+J42,0.5,0.14,0.22,7
+J439,1.0,0.04,0.08,24
+J441,1.0,0.83,0.91,6
+J448,1.0,0.25,0.4,12
+J449,0.85,0.97,0.91,64
+J459,1.0,0.75,0.86,4
+J47,0.0,0.0,0.0,4
+J672,0.0,0.0,0.0,1
+J690,1.0,0.84,0.91,19
+J80,1.0,0.71,0.83,7
+J81,1.0,0.44,0.62,27
+J841,1.0,0.75,0.86,4
+J849,0.5,0.5,0.5,2
+J851,0.0,0.0,0.0,3
+J852,0.0,0.0,0.0,2
+J860,0.0,0.0,0.0,2
+J869,1.0,1.0,1.0,1
+J90,1.0,0.5,0.67,2
+J939,1.0,1.0,1.0,1
+J942,0.0,0.0,0.0,1
+J948,0.78,0.7,0.74,10
+J954,0.0,0.0,0.0,1
 J958,1.0,1.0,1.0,1
-J960,0.94,0.24,0.38,134
-J961,0.76,0.94,0.84,17
-J969,0.76,0.64,0.69,135
-J980,0.33,0.8,0.47,5
-J981,1.0,0.5,0.67,2
-J984,0.0,0.0,0.0,3
-J9840,0.0,0.0,0.0,2
-J985,1.0,0.5,0.67,2
-J988,0.86,1.0,0.92,6
-J989,0.0,0.0,0.0,1
-K114,0.0,0.0,0.0,1
-K219,1.0,1.0,1.0,1
+J960,1.0,0.2,0.33,40
+J961,0.5,0.57,0.53,7
+J969,0.46,0.35,0.4,60
+J980,0.5,1.0,0.67,1
+J981,1.0,0.5,0.67,4
+J984,0.0,0.0,0.0,0
+J9840,0.0,0.0,0.0,1
+J9849,0.0,0.0,0.0,1
+J985,1.0,1.0,1.0,1
+J988,0.0,0.0,0.0,2
+J989,0.2,1.0,0.33,1
+K219,0.0,0.0,0.0,0
 K220,0.0,0.0,0.0,1
 K221,0.0,0.0,0.0,1
-K222,0.0,0.0,0.0,1
-K223,0.0,0.0,0.0,0
-K226,1.0,1.0,1.0,1
-K228,0.0,0.0,0.0,0
-K253,0.0,0.0,0.0,1
-K254,0.0,0.0,0.0,0
+K228,0.0,0.0,0.0,2
+K229,0.0,0.0,0.0,0
+K251,0.0,0.0,0.0,1
+K254,0.0,0.0,0.0,1
+K256,0.0,0.0,0.0,1
 K259,0.0,0.0,0.0,2
-K260,0.0,0.0,0.0,1
-K264,0.0,0.0,0.0,1
-K265,0.0,0.0,0.0,1
-K269,0.0,0.0,0.0,1
-K279,0.0,0.0,0.0,1
-K295,0.0,0.0,0.0,1
-K297,0.0,0.0,0.0,0
-K299,0.0,0.0,0.0,1
-K311,0.0,0.0,0.0,1
-K318,0.0,0.0,0.0,1
-K319,0.0,0.0,0.0,0
-K403,0.0,0.0,0.0,1
-K409,0.0,0.0,0.0,0
-K420,0.0,0.0,0.0,1
-K429,0.0,0.0,0.0,0
-K509,1.0,1.0,1.0,1
-K519,1.0,1.0,1.0,1
-K550,0.0,0.0,0.0,11
-K551,0.0,0.0,0.0,0
-K559,0.0,0.0,0.0,11
-K560,0.0,0.0,0.0,2
+K260,0.0,0.0,0.0,2
+K264,0.0,0.0,0.0,3
+K265,0.0,0.0,0.0,3
+K267,0.0,0.0,0.0,1
+K269,0.0,0.0,0.0,2
+K318,0.0,0.0,0.0,0
+K409,0.0,0.0,0.0,1
+K439,0.0,0.0,0.0,0
+K501,0.0,0.0,0.0,1
+K509,0.0,0.0,0.0,0
+K520,0.0,0.0,0.0,1
+K550,0.5,0.22,0.31,9
+K5500,0.0,0.0,0.0,1
+K5509,0.0,0.0,0.0,1
+K559,0.0,0.0,0.0,7
+K560,1.0,1.0,1.0,1
 K562,0.0,0.0,0.0,1
-K566,0.87,0.93,0.9,28
-K567,0.8,1.0,0.89,4
-K579,0.0,0.0,0.0,2
-K593,0.0,0.0,0.0,1
-K598,0.0,0.0,0.0,0
-K604,0.0,0.0,0.0,0
-K610,0.0,0.0,0.0,1
-K625,1.0,1.0,1.0,1
-K628,0.0,0.0,0.0,0
-K631,0.0,0.0,0.0,0
-K6310,0.0,0.0,0.0,1
-K639,0.0,0.0,0.0,1
-K650,0.75,0.25,0.38,12
-K658,1.0,1.0,1.0,1
-K659,0.43,0.21,0.29,14
-K661,0.25,0.5,0.33,2
+K566,1.0,1.0,1.0,4
+K567,0.86,1.0,0.92,6
+K573,1.0,0.33,0.5,3
+K579,0.0,0.0,0.0,0
+K590,1.0,1.0,1.0,1
+K631,0.0,0.0,0.0,1
+K6310,0.0,0.0,0.0,2
+K639,0.29,1.0,0.44,2
+K650,0.54,0.7,0.61,10
+K659,1.0,0.67,0.8,6
+K660,1.0,1.0,1.0,1
+K668,0.0,0.0,0.0,1
 K700,0.0,0.0,0.0,1
-K701,1.0,1.0,1.0,1
-K703,1.0,0.17,0.3,23
+K703,1.0,0.24,0.38,21
 K709,0.0,0.0,0.0,1
-K717,1.0,1.0,1.0,5
+K717,0.0,0.0,0.0,2
 K719,0.0,0.0,0.0,1
-K720,0.0,0.0,0.0,0
-K7200,0.0,0.0,0.0,3
-K7210,0.0,0.0,0.0,1
-K729,0.91,0.21,0.34,47
-K739,1.0,1.0,1.0,1
-K743,0.0,0.0,0.0,1
-K744,0.0,0.0,0.0,1
-K746,0.73,0.61,0.67,31
-K750,1.0,1.0,1.0,1
-K754,1.0,1.0,1.0,1
-K767,0.88,0.78,0.82,9
-K768,0.0,0.0,0.0,2
-K769,0.5,0.33,0.4,3
-K801,0.0,0.0,0.0,1
-K802,0.0,0.0,0.0,4
-K810,1.0,0.6,0.75,5
-K819,0.0,0.0,0.0,3
-K822,1.0,1.0,1.0,1
-K830,0.8,0.8,0.8,5
-K831,1.0,0.67,0.8,6
-K833,1.0,1.0,1.0,1
-K858,0.0,0.0,0.0,1
-K859,0.8,0.4,0.53,10
-K862,0.0,0.0,0.0,1
-K900,0.0,0.0,0.0,1
-K918,0.0,0.0,0.0,1
-K920,1.0,0.8,0.89,10
-K921,0.0,0.0,0.0,1
-K922,1.0,0.32,0.48,22
-K928,1.0,1.0,1.0,1
+K720,0.0,0.0,0.0,1
+K7200,0.0,0.0,0.0,1
+K729,1.0,0.11,0.2,27
+K739,0.0,0.0,0.0,2
+K740,0.0,0.0,0.0,1
+K743,1.0,1.0,1.0,1
+K744,1.0,1.0,1.0,1
+K746,0.33,0.12,0.18,16
+K750,0.0,0.0,0.0,2
+K760,0.0,0.0,0.0,4
+K767,1.0,0.5,0.67,2
+K768,0.0,0.0,0.0,0
+K802,0.0,0.0,0.0,2
+K803,0.0,0.0,0.0,1
+K810,0.0,0.0,0.0,1
+K819,1.0,1.0,1.0,1
+K829,0.0,0.0,0.0,1
+K830,1.0,0.5,0.67,2
+K838,0.0,0.0,0.0,0
+K859,1.0,0.8,0.89,5
+K869,0.0,0.0,0.0,2
+K921,1.0,0.5,0.67,2
+K922,0.8,0.4,0.53,10
 K929,0.0,0.0,0.0,0
-L021,0.0,0.0,0.0,1
-L022,0.0,0.0,0.0,1
-L029,0.2,1.0,0.33,1
-L089,0.0,0.0,0.0,3
-L120,0.0,0.0,0.0,1
-L892,0.0,0.0,0.0,0
-L899,0.57,0.22,0.32,18
-L909,0.0,0.0,0.0,1
-L942,0.0,0.0,0.0,1
-L97,1.0,0.25,0.4,4
-L988,0.25,1.0,0.4,1
-M009,1.0,0.25,0.4,4
-M051,0.0,0.0,0.0,0
-M052,0.0,0.0,0.0,0
-M069,1.0,1.0,1.0,1
-M082,0.0,0.0,0.0,1
-M109,0.0,0.0,0.0,0
-M130,0.0,0.0,0.0,1
-M138,0.0,0.0,0.0,0
-M139,0.0,0.0,0.0,0
-M159,0.0,0.0,0.0,0
-M169,0.0,0.0,0.0,1
-M219,0.0,0.0,0.0,0
-M311,0.0,0.0,0.0,1
-M316,0.33,1.0,0.5,1
-M331,0.0,0.0,0.0,1
-M341,0.0,0.0,0.0,1
+L022,0.0,0.0,0.0,2
+L029,0.0,0.0,0.0,0
+L049,0.0,0.0,0.0,0
+L089,1.0,0.5,0.67,2
+L893,0.0,0.0,0.0,1
+L899,1.0,0.06,0.11,34
+L97,0.0,0.0,0.0,1
+L984,0.0,0.0,0.0,0
+L988,0.0,0.0,0.0,0
+M009,0.0,0.0,0.0,1
+M069,0.0,0.0,0.0,1
+M139,0.0,0.0,0.0,1
+M159,1.0,1.0,1.0,1
+M199,0.0,0.0,0.0,1
 M349,0.0,0.0,0.0,1
-M359,0.0,0.0,0.0,1
-M439,0.0,0.0,0.0,0
-M462,0.0,0.0,0.0,0
-M463,1.0,1.0,1.0,2
-M485,1.0,0.67,0.8,3
-M512,0.0,0.0,0.0,1
-M513,0.0,0.0,0.0,1
+M359,0.0,0.0,0.0,0
+M418,0.0,0.0,0.0,1
+M419,0.0,0.0,0.0,1
 M623,0.0,0.0,0.0,1
-M628,1.0,1.0,1.0,4
-M819,0.0,0.0,0.0,2
-M844,0.0,0.0,0.0,1
-M869,0.0,0.0,0.0,2
-M895,0.0,0.0,0.0,1
-M898,0.0,0.0,0.0,0
-M899,0.12,0.33,0.18,3
-N059,0.5,1.0,0.67,1
-N10,0.5,0.33,0.4,3
-N111,0.0,0.0,0.0,1
+M628,0.0,0.0,0.0,1
+M726,0.0,0.0,0.0,1
+M819,0.0,0.0,0.0,1
+M869,0.0,0.0,0.0,1
+M899,0.0,0.0,0.0,0
+N039,0.5,1.0,0.67,1
+N10,0.5,0.5,0.5,2
+N119,0.5,0.5,0.5,2
+N12,0.5,1.0,0.67,1
 N133,0.0,0.0,0.0,1
-N138,0.0,0.0,0.0,1
-N179,0.76,0.51,0.61,37
-N184,0.0,0.0,0.0,3
+N135,0.0,0.0,0.0,0
+N139,1.0,1.0,1.0,1
+N179,1.0,0.5,0.67,20
 N185,0.0,0.0,0.0,2
-N189,0.68,0.9,0.77,58
-N19,0.9,0.58,0.7,45
-N209,0.0,0.0,0.0,0
-N26,0.0,0.0,0.0,2
+N189,0.83,0.79,0.81,19
+N19,0.88,0.83,0.86,18
+N200,0.0,0.0,0.0,1
+N201,0.0,0.0,0.0,1
+N26,0.0,0.0,0.0,0
 N288,0.0,0.0,0.0,1
-N289,0.0,0.0,0.0,2
-N300,1.0,1.0,1.0,3
-N309,1.0,1.0,1.0,4
-N319,0.0,0.0,0.0,0
-N328,0.0,0.0,0.0,1
-N390,0.78,0.86,0.82,21
-N399,0.0,0.0,0.0,0
-N40,1.0,1.0,1.0,2
-N419,1.0,1.0,1.0,1
-N428,0.0,0.0,0.0,0
-N429,0.0,0.0,0.0,0
-N459,0.0,0.0,0.0,1
-N649,0.0,0.0,0.0,1
-N739,0.0,0.0,0.0,1
-N768,0.0,0.0,0.0,0
-O670,0.0,0.0,0.0,0
-P012,0.0,0.0,0.0,1
-P015,0.0,0.0,0.0,1
-P059,1.0,1.0,1.0,1
-P070,0.8,0.8,0.8,5
-P071,0.0,0.0,0.0,1
-P073,0.0,0.0,0.0,2
-P251,0.0,0.0,0.0,0
-P271,0.0,0.0,0.0,0
-P285,0.0,0.0,0.0,0
-P298,0.0,0.0,0.0,0
-P369,0.0,0.0,0.0,0
-P38,0.0,0.0,0.0,1
-P399,0.0,0.0,0.0,1
-P524,0.0,0.0,0.0,1
-P788,0.0,0.0,0.0,1
-P916,0.0,0.0,0.0,1
-P918,0.0,0.0,0.0,0
-Q273,0.0,0.0,0.0,1
-Q600,0.0,0.0,0.0,0
-Q613,0.0,0.0,0.0,0
-Q897,0.0,0.0,0.0,0
-R000,0.0,0.0,0.0,2
-R001,1.0,0.5,0.67,2
-R02,0.86,0.67,0.75,9
-R040,0.0,0.0,0.0,1
-R042,0.0,0.0,0.0,3
-R048,0.0,0.0,0.0,2
-R060,1.0,0.8,0.89,10
-R064,1.0,0.5,0.67,2
-R068,1.0,1.0,1.0,1
-R090,0.7,0.64,0.67,11
-R091,0.0,0.0,0.0,1
-R092,0.64,0.45,0.53,320
-R098,0.0,0.0,0.0,1
-R100,1.0,0.5,0.67,2
-R104,0.0,0.0,0.0,1
-R11,1.0,0.67,0.8,3
-R13,0.83,0.36,0.5,14
-R160,0.0,0.0,0.0,1
-R17,0.0,0.0,0.0,4
-R18,0.7,0.64,0.67,11
-R222,0.0,0.0,0.0,0
-R262,0.0,0.0,0.0,0
-R263,0.89,0.75,0.81,52
-R268,0.0,0.0,0.0,1
-R296,1.0,1.0,1.0,1
-R31,0.5,1.0,0.67,1
-R32,0.67,1.0,0.8,2
-R34,0.62,0.83,0.71,6
-R400,0.0,0.0,0.0,0
-R401,1.0,1.0,1.0,1
-R402,0.87,0.62,0.72,73
-R418,0.73,0.89,0.8,18
-R451,1.0,1.0,1.0,1
-R453,0.89,1.0,0.94,8
-R468,1.0,1.0,1.0,1
-R470,0.0,0.0,0.0,0
-R509,1.0,0.75,0.86,4
-R53,0.85,0.34,0.49,50
-R54,0.96,0.92,0.94,50
-R568,1.0,1.0,1.0,2
-R570,1.0,0.64,0.78,67
-R571,1.0,0.11,0.19,19
-R572,0.91,0.94,0.92,31
-R578,0.67,0.4,0.5,10
-R579,0.34,0.84,0.48,37
-R580,0.0,0.0,0.0,5
+N289,0.0,0.0,0.0,0
+N300,1.0,1.0,1.0,1
+N328,0.0,0.0,0.0,0
+N390,0.79,0.92,0.85,12
+N40,0.56,0.71,0.63,7
+N838,0.0,0.0,0.0,0
+N990,0.0,0.0,0.0,1
+NAN,0.0,0.0,0.0,0
+O744,0.0,0.0,0.0,0
+P070,1.0,1.0,1.0,1
+P220,1.0,1.0,1.0,1
+P290,0.0,0.0,0.0,1
+P298,0.0,0.0,0.0,1
+Q211,0.0,0.0,0.0,1
+Q245,0.0,0.0,0.0,1
+Q631,0.0,0.0,0.0,0
+R000,1.0,1.0,1.0,1
+R001,1.0,1.0,1.0,2
+R02,0.71,0.71,0.71,7
+R042,0.0,0.0,0.0,1
+R048,0.0,0.0,0.0,1
+R060,1.0,0.5,0.67,2
+R064,0.67,1.0,0.8,2
+R090,0.33,0.33,0.33,3
+R091,0.0,0.0,0.0,0
+R092,0.88,0.85,0.87,219
+R13,1.0,0.33,0.5,3
+R15,0.0,0.0,0.0,1
+R17,0.0,0.0,0.0,2
+R18,0.82,1.0,0.9,9
+R263,0.9,0.83,0.86,23
+R268,0.0,0.0,0.0,3
+R32,0.0,0.0,0.0,1
+R34,0.0,0.0,0.0,0
+R402,0.62,0.5,0.56,10
+R410,0.0,0.0,0.0,1
+R418,0.6,1.0,0.75,3
+R453,0.6,1.0,0.75,3
+R470,0.0,0.0,0.0,1
+R509,1.0,0.5,0.67,2
+R53,0.88,0.64,0.74,22
+R54,0.87,0.65,0.74,20
+R570,0.91,0.56,0.69,18
+R571,1.0,0.38,0.55,8
+R572,0.83,1.0,0.91,5
+R578,1.0,0.17,0.29,12
+R579,0.31,0.89,0.46,18
+R58,0.0,0.0,0.0,1
+R580,0.0,0.0,0.0,1
 R5800,0.0,0.0,0.0,5
-R590,1.0,1.0,1.0,1
 R599,0.0,0.0,0.0,0
-R600,1.0,0.5,0.67,2
-R601,1.0,1.0,1.0,5
-R609,0.0,0.0,0.0,2
-R630,0.5,0.33,0.4,3
-R633,0.0,0.0,0.0,3
+R601,0.0,0.0,0.0,1
+R609,0.0,0.0,0.0,1
+R630,1.0,1.0,1.0,2
+R633,1.0,0.25,0.4,4
 R634,0.0,0.0,0.0,3
-R64,0.94,0.95,0.95,82
-R651,1.0,1.0,1.0,5
-R659,1.0,1.0,1.0,1
-R680,0.0,0.0,0.0,2
-R688,0.39,0.27,0.32,126
-R91,1.0,0.75,0.86,4
-R932,0.0,0.0,0.0,1
-R933,0.0,0.0,0.0,1
-R950,0.0,0.0,0.0,0
-R959,0.0,0.0,0.0,1
-R960,1.0,1.0,1.0,8
-R97,0.33,0.4,0.36,5
-R98,1.0,1.0,1.0,1
-R99,0.0,0.0,0.0,2
-R999,0.35,0.85,0.49,52
-S021,1.0,1.0,1.0,1
-S029,0.0,0.0,0.0,3
-S062,0.0,0.0,0.0,8
-S063,0.0,0.0,0.0,2
-S065,0.0,0.0,0.0,11
-S066,0.0,0.0,0.0,2
-S068,0.0,0.0,0.0,0
-S069,1.0,0.2,0.33,10
-S099,0.0,0.0,0.0,2
-S109,0.0,0.0,0.0,1
-S122,0.0,0.0,0.0,1
+R64,0.84,0.89,0.86,35
+R651,0.5,0.67,0.57,3
+R688,0.58,0.39,0.47,28
+R770,0.0,0.0,0.0,1
+R798,0.0,0.0,0.0,1
+R945,1.0,1.0,1.0,1
+R950,0.5,1.0,0.67,1
+R960,0.25,0.67,0.36,3
+R97,0.5,0.5,0.5,2
+R98,0.0,0.0,0.0,0
+R99,0.0,0.0,0.0,1
+R999,0.48,0.83,0.61,18
+S019,0.0,0.0,0.0,1
+S020,0.0,0.0,0.0,1
+S021,0.0,0.0,0.0,1
+S027,0.0,0.0,0.0,1
+S029,1.0,0.2,0.33,5
+S061,0.0,0.0,0.0,1
+S062,1.0,0.2,0.33,5
+S063,0.0,0.0,0.0,1
+S065,1.0,0.33,0.5,3
+S066,0.0,0.0,0.0,1
+S068,0.0,0.0,0.0,1
+S069,0.0,0.0,0.0,2
+S097,0.0,0.0,0.0,1
+S099,0.0,0.0,0.0,1
 S129,0.0,0.0,0.0,0
-S141,0.0,0.0,0.0,0
-S199,1.0,0.5,0.67,2
+S199,0.0,0.0,0.0,1
 S220,0.0,0.0,0.0,0
-S224,0.0,0.0,0.0,1
-S241,0.0,0.0,0.0,0
-S250,0.0,0.0,0.0,1
-S270,0.0,0.0,0.0,0
-S271,0.0,0.0,0.0,1
-S273,1.0,1.0,1.0,1
-S299,0.5,1.0,0.67,1
-S318,0.0,0.0,0.0,2
-S320,0.0,0.0,0.0,1
-S328,0.0,0.0,0.0,0
-S350,0.0,0.0,0.0,0
-S361,0.0,0.0,0.0,0
-S429,0.0,0.0,0.0,2
-S529,0.0,0.0,0.0,1
-S684,0.0,0.0,0.0,0
-S720,0.83,0.42,0.56,12
-S721,0.0,0.0,0.0,0
-S729,0.62,0.71,0.67,7
-S822,0.0,0.0,0.0,1
-S929,0.0,0.0,0.0,0
-T029,1.0,1.0,1.0,2
-T07,0.8,0.89,0.84,9
-T08,1.0,0.33,0.5,3
-T090,0.0,0.0,0.0,1
+S223,0.0,0.0,0.0,2
+S280,0.0,0.0,0.0,0
+S321,0.0,0.0,0.0,1
+S400,0.0,0.0,0.0,1
+S423,1.0,0.5,0.67,2
+S499,0.0,0.0,0.0,1
+S720,1.0,0.14,0.25,7
+S721,0.67,0.5,0.57,4
+S729,1.0,0.25,0.4,4
+S798,0.0,0.0,0.0,0
+T012,0.0,0.0,0.0,1
+T019,0.0,0.0,0.0,1
+T025,0.0,0.0,0.0,0
+T029,1.0,1.0,1.0,1
+T07,0.75,1.0,0.86,3
+T08,0.0,0.0,0.0,1
 T093,0.0,0.0,0.0,1
-T140,0.0,0.0,0.0,0
 T141,0.0,0.0,0.0,0
-T144,0.0,0.0,0.0,0
-T149,0.0,0.0,0.0,2
-T179,0.29,0.4,0.33,5
-T402,0.0,0.0,0.0,1
-T486,0.0,0.0,0.0,1
-T58,1.0,1.0,1.0,1
-T699,0.0,0.0,0.0,1
-T71,1.0,0.22,0.36,9
-T751,0.0,0.0,0.0,3
-T782,0.0,0.0,0.0,0
-T794,1.0,0.1,0.18,10
-T795,0.0,0.0,0.0,1
-T796,0.0,0.0,0.0,1
-T811,0.0,0.0,0.0,5
-T813,0.0,0.0,0.0,1
+T149,0.0,0.0,0.0,0
+T175,0.0,0.0,0.0,1
+T179,0.0,0.0,0.0,5
+T300,0.0,0.0,0.0,1
+T435,0.0,0.0,0.0,1
+T443,0.0,0.0,0.0,1
+T71,0.0,0.0,0.0,7
+T751,0.0,0.0,0.0,2
+T754,0.0,0.0,0.0,1
+T794,0.0,0.0,0.0,5
+T811,0.0,0.0,0.0,4
 T814,0.0,0.0,0.0,2
-T827,0.0,0.0,0.0,2
-T828,0.0,0.0,0.0,1
-T835,0.0,0.0,0.0,1
-T845,0.0,0.0,0.0,2
-T856,0.0,0.0,0.0,0
-T889,0.0,0.0,0.0,0
-T931,0.0,0.0,0.0,1
-U847,0.0,0.0,0.0,0
-V031,0.0,0.0,0.0,1
-V184,0.0,0.0,0.0,1
-V189,0.0,0.0,0.0,2
-V234,0.0,0.0,0.0,1
-V299,0.0,0.0,0.0,0
-V435,0.0,0.0,0.0,0
-V486,1.0,1.0,1.0,1
-V499,1.0,1.0,1.0,1
+T826,0.0,0.0,0.0,1
+T827,1.0,0.5,0.67,2
+T828,0.0,0.0,0.0,0
+T836,0.0,0.0,0.0,1
+T857,0.5,1.0,0.67,1
+T876,1.0,1.0,1.0,1
+T886,0.0,0.0,0.0,1
+T905,0.0,0.0,0.0,0
+T931,0.0,0.0,0.0,2
+U837,0.0,0.0,0.0,1
+V475,0.0,0.0,0.0,2
 V892,1.0,1.0,1.0,3
-W10,0.5,1.0,0.67,1
-W18,1.0,1.0,1.0,1
-W180,0.0,0.0,0.0,1
-W19,0.79,0.79,0.79,14
-W190,0.0,0.0,0.0,4
-W199,0.0,0.0,0.0,1
-W204,0.0,0.0,0.0,1
-W673,0.0,0.0,0.0,1
-W69,0.0,0.0,0.0,0
-W74,0.0,0.0,0.0,0
-W77,0.0,0.0,0.0,0
-W78,0.0,0.0,0.0,1
-W79,0.86,0.5,0.63,12
+W010,0.0,0.0,0.0,1
+W019,0.0,0.0,0.0,1
+W131,0.0,0.0,0.0,1
+W17,1.0,1.0,1.0,1
+W180,1.0,1.0,1.0,1
+W19,1.0,1.0,1.0,2
+W190,1.0,1.0,1.0,1
+W79,0.83,1.0,0.91,5
 W790,0.0,0.0,0.0,1
-W81,0.0,0.0,0.0,0
-X00,0.0,0.0,0.0,0
-X09,0.0,0.0,0.0,1
-X31,0.0,0.0,0.0,1
-X314,0.0,0.0,0.0,1
-X42,0.0,0.0,0.0,1
-X44,1.0,0.5,0.67,4
-X49,0.0,0.0,0.0,0
-X59,0.0,0.0,0.0,0
-X599,0.0,0.0,0.0,1
-X61,0.0,0.0,0.0,0
-X70,1.0,1.0,1.0,3
-X700,0.0,0.0,0.0,7
-X704,0.0,0.0,0.0,1
-X708,0.0,0.0,0.0,2
-X71,0.0,0.0,0.0,1
-X74,1.0,0.5,0.67,4
-X81,1.0,1.0,1.0,1
-X818,0.0,0.0,0.0,2
-X84,0.0,0.0,0.0,0
-Y14,0.0,0.0,0.0,1
-Y24,0.0,0.0,0.0,0
-Y423,0.0,0.0,0.0,0
+X310,0.0,0.0,0.0,1
+X44,1.0,1.0,1.0,1
+X599,0.0,0.0,0.0,0
+X64,1.0,1.0,1.0,1
+X640,0.0,0.0,0.0,1
+X70,1.0,0.5,0.67,2
+X700,0.0,0.0,0.0,8
+X707,0.0,0.0,0.0,1
+X802,0.0,0.0,0.0,1
+X808,0.0,0.0,0.0,2
+X815,0.0,0.0,0.0,1
+X990,0.0,0.0,0.0,1
+Y000,0.0,0.0,0.0,1
 Y433,0.0,0.0,0.0,1
-Y442,0.0,0.0,0.0,3
-Y444,0.0,0.0,0.0,1
-Y450,0.0,0.0,0.0,1
-Y458,0.0,0.0,0.0,1
-Y579,0.0,0.0,0.0,0
-Y654,0.0,0.0,0.0,1
-Y66,1.0,0.33,0.5,6
-Y830,0.0,0.0,0.0,1
-Y831,0.0,0.0,0.0,8
-Y832,0.14,0.33,0.2,3
-Y833,0.0,0.0,0.0,2
-Y835,0.33,0.17,0.22,6
-Y836,0.0,0.0,0.0,3
+Y442,1.0,1.0,1.0,1
+Y66,1.0,0.67,0.8,3
+Y831,0.6,0.43,0.5,7
+Y832,0.0,0.0,0.0,1
+Y833,0.0,0.0,0.0,3
+Y834,0.0,0.0,0.0,1
+Y835,0.0,0.0,0.0,5
+Y836,0.0,0.0,0.0,4
 Y838,0.0,0.0,0.0,2
-Y839,0.0,0.0,0.0,16
-Y841,0.0,0.0,0.0,1
-Y842,0.0,0.0,0.0,3
-Y848,0.0,0.0,0.0,1
-Y849,0.0,0.0,0.0,1
-Z450,0.2,0.5,0.29,2
-Z491,0.4,0.67,0.5,3
-Z511,0.0,0.0,0.0,2
-Z512,0.0,0.0,0.0,1
-Z513,0.0,0.0,0.0,1
-Z515,1.0,1.0,1.0,15
-Z518,0.0,0.0,0.0,2
-Z532,0.0,0.0,0.0,1
+Y839,0.0,0.0,0.0,11
+Y842,0.0,0.0,0.0,1
+Z491,0.0,0.0,0.0,0
+Z511,0.0,0.0,0.0,1
+Z515,1.0,1.0,1.0,4
+Z518,0.0,0.0,0.0,0
 Z539,0.0,0.0,0.0,1
-Z599,0.0,0.0,0.0,0
-Z600,0.0,0.0,0.0,1
-Z740,0.0,0.0,0.0,0
-Z741,0.0,0.0,0.0,1
-Z857,0.0,0.0,0.0,0
-Z896,0.0,0.0,0.0,1
-Z897,0.0,0.0,0.0,1
-Z902,0.0,0.0,0.0,0
-Z904,0.0,0.0,0.0,1
-Z908,0.0,0.0,0.0,0
+Z894,0.0,0.0,0.0,1
 Z921,0.0,0.0,0.0,0
-Z922,0.25,1.0,0.4,1
-Z924,0.0,0.0,0.0,1
-Z940,0.0,0.0,0.0,1
-Z948,0.0,0.0,0.0,2
-Z949,0.0,0.0,0.0,0
-Z950,0.0,0.0,0.0,1
-Z952,0.0,0.0,0.0,0
-Z988,0.0,0.0,0.0,0
-Z998,0.0,0.0,0.0,0
-avg/total,0.7,0.58,0.6,7474
+Z948,1.0,1.0,1.0,1
+Z988,0.0,0.0,0.0,1
+Z991,0.0,0.0,0.0,0
+Z998,0.0,0.0,0.0,1
+avg/total,0.73,0.61,0.61,4505
diff --git a/code_jurica/seq2seq_attention.py b/code_jurica/seq2seq_attention.py
index d3688ed..9d17d2f 100644
--- a/code_jurica/seq2seq_attention.py
+++ b/code_jurica/seq2seq_attention.py
@@ -2,7 +2,7 @@
 # experiment = Experiment(api_key="hSd9vTj0EfMu72569YnVEvtvj")
 
 from loader import *
-from _layers import AttentionWithContext, Attention
+from _layers import AttentionWithContext, Attention, AttentionDecoder
 from keras.models import Model, load_model as keras_load_model
 from keras.layers import Input, LSTM, Dense, Embedding, GRU, Activation, dot, concatenate, Bidirectional, TimeDistributed
 from keras.utils import multi_gpu_model
@@ -92,26 +92,26 @@ validation_data_generator = KerasBatchGenerator(batch_size,
 
 print("Lets train some stuff!")
 # Define an input sequence and process it.
-encoder_input = Input(shape=(source_max_sequence_tokenizer, ))
-x = source_embedding_layer(encoder_input)
-encoder_out, state_h, state_c = LSTM(latent_dim, return_sequences=True, unroll=True, return_state=True)(x)
+encoder_input = Input(shape=(source_max_sequence_tokenizer, ), name='encoder_input')
+x_encoder = source_embedding_layer(encoder_input)
+encoder_out, state_h, state_c = LSTM(latent_dim, return_sequences=True, unroll=True, return_state=True, name='encoder_lstm')(x_encoder)
 encoder_states = [state_h, state_c]
 
 # Set up the decoder, using `encoder_states` as initial state.
-decoder_input = Input(shape=(target_max_sequence_tokenizer, ))
+decoder_input = Input(shape=(target_max_sequence_tokenizer, ), name='decoder_input')
 x_decode = target_embedding_layer(decoder_input)
-decoder_LSTM = LSTM(latent_dim, return_sequences=True, return_state = True, unroll=True)
+decoder_LSTM = LSTM(latent_dim, return_sequences=True, return_state = True, unroll=True, name='decoder_lstm')
 decoder, state_h_decode , state_c_decode = decoder_LSTM(x_decode, initial_state=encoder_states)
 
 # Equation (7) with 'dot' score from Section 3.1 in the paper.
 # Note that we reuse Softmax-activation layer instead of writing tensor calculation
-attention = dot([encoder_out, decoder], axes=[2, 2])
-attention = Activation('softmax')(attention)
-context = dot([attention, encoder_out], axes=[1,1])
+attention = dot([encoder_out, decoder], name='attention_dot' ,axes=[2, 2])
+attention = Activation('softmax', name='attention_activation')(attention)
+context = dot([attention, encoder_out], name='context_dot' ,axes=[1,1])
 decoder_combined_context = concatenate([context, decoder])
 print(decoder_combined_context)
 
-decoder_dense = Dense(len(target_vocab)+1, activation='softmax')
+decoder_dense = Dense(len(target_vocab)+1, activation='softmax', name='dense_output')
 decoder_out = decoder_dense(decoder_combined_context) # equation (6) of the paper
 
 # MODEL
@@ -141,10 +141,11 @@ model.fit_generator(
 # INFERENCE MODELS
 # Encoder inference model
 encoder_model_inf = Model(encoder_input, encoder_states)
+encoder_model_inf.summary()
 
 # Decoder inference model
-decoder_state_input_h = Input(shape=(256,))
-decoder_state_input_c = Input(shape=(256,))
+decoder_state_input_h = Input(shape=(256,), name='inference_decoder_input_h')
+decoder_state_input_c = Input(shape=(256,), name='inference_decoder_input_c')
 decoder_input_states = [decoder_state_input_h, decoder_state_input_c]
 
 decoder, decoder_h, decoder_c = decoder_LSTM(x_decode, initial_state=decoder_input_states)
@@ -154,13 +155,14 @@ attention = dot([encoder_out, decoder], axes=[2, 2])
 attention = Activation('softmax')(attention)
 context = dot([attention, encoder_out], axes=[1,1])
 
-print(context, decoder)
+# print(context, decoder)
 decoder_combined_context = concatenate([context, decoder])
-print('decoder_combined_context\t', decoder_combined_context)
+# print('decoder_combined_context\t', decoder_combined_context)
 
 decoder_out = decoder_dense(decoder_combined_context)
 decoder_model_inf = Model(inputs=[decoder_input] + decoder_input_states,
                           outputs=[decoder_out] + decoder_states )
+decoder_model_inf.summary()
 
 def decode_seq(inp_seq):
 
diff --git a/code_jurica/train.sh b/code_jurica/train.sh
index 0165144..cae9235 100644
--- a/code_jurica/train.sh
+++ b/code_jurica/train.sh
@@ -1,4 +1,4 @@
 #!/bin/bash
-#CUDA_VISIBLE_DEVICES=3 /home/sevajuri/anaconda3/bin/python3 /home/sevajuri/projects/clef18/code_jurica/classificationICD10_attention.py
+CUDA_VISIBLE_DEVICES=3 /home/sevajuri/anaconda3/bin/python3 /home/sevajuri/projects/clef18/code_jurica/classificationICD10_attention.py
 CUDA_VISIBLE_DEVICES=3 /home/sevajuri/anaconda3/bin/python3 /home/sevajuri/projects/clef18/code_jurica/seq2seq.py
 CUDA_VISIBLE_DEVICES=3 /home/sevajuri/anaconda3/bin/python3 /home/sevajuri/projects/clef18/code_jurica/test.py
-- 
GitLab