john.watson@gmail.com (8) [Avatar] Offline
#1
I'm running this example from the zip file

ch10_rnn/Concept02_rnn.ipynb

It runs fine once, then if I try and re-run the cell I get the following error:

---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-4-197a3e10b3e3> in <module>()
1 if __name__ == '__main__':
----> 2 predictor = SeriesPredictor(input_dim=1, seq_size=4, hidden_dim=10)
3 train_x = [[[1], [2], [5], [6]],
4 [[5], [7], [7], [8]],
5 [[3], [4], [5], [7]]]

<ipython-input-2-0d7a69bd72ca> in __init__(self, input_dim, seq_size, hidden_dim)
15 # Cost optimizer
16 self.cost = tf.reduce_mean(tf.square(self.model() - self.y))
---> 17 self.train_op = tf.train.AdamOptimizer().minimize(self.cost)
18
19 # Auxiliary ops

/anaconda/envs/dl/lib/python3.6/site-packages/tensorflow/python/training/optimizer.py in minimize(self, loss, global_step, var_list, gate_gradients, aggregation_method, colocate_gradients_with_ops, name, grad_loss)
296
297 return self.apply_gradients(grads_and_vars, global_step=global_step,
--> 298 name=name)
299
300 def compute_gradients(self, loss, var_list=None,

/anaconda/envs/dl/lib/python3.6/site-packages/tensorflow/python/training/optimizer.py in apply_gradients(self, grads_and_vars, global_step, name)
410 ([str(v) for _, _, v in converted_grads_and_vars],))
411 with ops.control_dependencies(None):
--> 412 self._create_slots(var_list)
413 update_ops = []
414 with ops.name_scope(name, self._name) as name:

/anaconda/envs/dl/lib/python3.6/site-packages/tensorflow/python/training/adam.py in _create_slots(self, var_list)
117 # Create slots for the first and second moments.
118 for v in var_list:
--> 119 self._zeros_slot(v, "m", self._name)
120 self._zeros_slot(v, "v", self._name)
121

/anaconda/envs/dl/lib/python3.6/site-packages/tensorflow/python/training/optimizer.py in _zeros_slot(self, var, slot_name, op_name)
654 named_slots = self._slot_dict(slot_name)
655 if var not in named_slots:
--> 656 named_slots[var] = slot_creator.create_zeros_slot(var, op_name)
657 return named_slots[var]

/anaconda/envs/dl/lib/python3.6/site-packages/tensorflow/python/training/slot_creator.py in create_zeros_slot(primary, name, dtype, colocate_with_primary)
121 val = array_ops.zeros(primary.get_shape().as_list(), dtype=dtype)
122 return create_slot(primary, val, name,
--> 123 colocate_with_primary=colocate_with_primary)

/anaconda/envs/dl/lib/python3.6/site-packages/tensorflow/python/training/slot_creator.py in create_slot(primary, val, name, colocate_with_primary)
99 if colocate_with_primary:
100 with ops.colocate_with(primary):
--> 101 return _create_slot_var(primary, val, '')
102 else:
103 return _create_slot_var(primary, val, '')

/anaconda/envs/dl/lib/python3.6/site-packages/tensorflow/python/training/slot_creator.py in _create_slot_var(primary, val, scope)
53 current_partitioner = variable_scope.get_variable_scope().partitioner
54 variable_scope.get_variable_scope().set_partitioner(None)
---> 55 slot = variable_scope.get_variable(scope, initializer=val, trainable=False)
56 variable_scope.get_variable_scope().set_partitioner(current_partitioner)
57

/anaconda/envs/dl/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, custom_getter)
986 collections=collections, caching_device=caching_device,
987 partitioner=partitioner, validate_shape=validate_shape,
--> 988 custom_getter=custom_getter)
989 get_variable_or_local_docstring = (
990 """%s

/anaconda/envs/dl/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(self, var_store, name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, custom_getter)
888 collections=collections, caching_device=caching_device,
889 partitioner=partitioner, validate_shape=validate_shape,
--> 890 custom_getter=custom_getter)
891
892 def _get_partitioned_variable(self,

/anaconda/envs/dl/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(self, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, custom_getter)
346 reuse=reuse, trainable=trainable, collections=collections,
347 caching_device=caching_device, partitioner=partitioner,
--> 348 validate_shape=validate_shape)
349
350 def _get_partitioned_variable(

/anaconda/envs/dl/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in _true_getter(name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape)
331 initializer=initializer, regularizer=regularizer, reuse=reuse,
332 trainable=trainable, collections=collections,
--> 333 caching_device=caching_device, validate_shape=validate_shape)
334
335 if custom_getter is not None:

/anaconda/envs/dl/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in _get_single_variable(self, name, shape, dtype, initializer, regularizer, partition_info, reuse, trainable, collections, caching_device, validate_shape)
655 raise ValueError("Variable %s does not exist, or was not created with "
656 "tf.get_variable(). Did you mean to set reuse=None in "
--> 657 "VarScope?" % name)
658 if not shape.is_fully_defined() and not initializing_from_value:
659 raise ValueError("Shape of a new variable (%s) must be fully defined, "

ValueError: Variable W_out_1/Adam/ does not exist, or was not created with tf.get_variable(). Did you mean to set reuse=None in VarScope?


To re-run I have to delete the directory and unzip a fresh copy of all this.
Any ideas?