1 from __future__
import absolute_import
2 from __future__
import division
3 from __future__
import print_function
4 from __future__
import unicode_literals
13 return 1.0 / (1.0 + np.exp(-x))
17 return 2.0 * sigmoid(2.0 * x) - 1
21 t, n, dim_in, create_rnn, outputs_with_grads,
22 forget_bias, memory_optim=
False,
23 forward_only=
False, drop_states=
False, T=
None,
24 two_d_initial_states=
None, dim_out=
None,
30 print(
"Dims: ", t, n, dim_in, dim_out)
32 model = ModelHelper(name=
'external')
34 if two_d_initial_states
is None:
35 two_d_initial_states = np.random.randint(2)
37 def generate_input_state(n, d):
38 if two_d_initial_states:
39 return np.random.randn(n, d).astype(np.float32)
41 return np.random.randn(1, n, d).astype(np.float32)
44 for layer_id, d
in enumerate(dim_out):
45 for i
in range(num_states):
46 state_name =
"state_{}/layer_{}".format(i, layer_id)
47 states.append(model.net.AddExternalInput(state_name))
49 states[-1], generate_input_state(n, d).astype(np.float32))
53 with scope.NameScope(
"test_name_scope"):
54 input_blob, seq_lengths = model.net.AddScopedExternalInputs(
55 'input_blob',
'seq_lengths')
58 model, input_blob, seq_lengths, states,
59 dim_in=dim_in, dim_out=dim_out, scope=
"external/recurrent",
60 outputs_with_grads=outputs_with_grads,
61 memory_optimization=memory_optim,
62 forget_bias=forget_bias,
63 forward_only=forward_only,
64 drop_states=drop_states,
65 static_rnn_unroll_size=T,
69 workspace.RunNetOnce(model.param_init_net)
73 np.random.randint(1, t + 1, size=(n,)).astype(np.int32)
75 return outputs, model.net, states + [input_blob]