learning_method: "adam" adam_beta1: 0.9 adam_beta2: 0.9 adam_eps: 1e-12 learning_rate: 0.002 decay_base: 0.75 decay_staircase: false decay_steps: 2500 dropout_rate: 0.67 recurrent_dropout_rate: 0.75 gradient_clip_norm: 15 l2_regularization_coefficient: 0 use_moving_average: false seed: 1