Human-Body-Measurements-using-Computer-Vision icon indicating copy to clipboard operation
Human-Body-Measurements-using-Computer-Vision copied to clipboard

Output issue

Open Abdullahkhan5 opened this issue 2 years ago • 1 comments

I have this below error every time (except for running it for the first time) Even If the image is same and height input is same


ValueError Traceback (most recent call last) in 15 bg_removed = res + (255 - cv2.cvtColor(mask, cv2.COLOR_GRAY2BGR)) 16 ---> 17 main(bg_removed,height,None)

in main(img_path, height, json_path) 124 # renderer = vis_util.SMPLRenderer(face_path='src/tf_smpl/smpl_faces.npy') 125 sess = tf.Session() --> 126 model = RunModel(sess=sess) 127 # cv2.imshow('input image for measurement extraction',img_path) 128 # cv2.waitKey(0)

in init(self, sess) 39 40 ---> 41 self.build_test_model_ief() 42 43 if sess is None:

in build_test_model_ief(self) 59 self.img_feat, self.E_var = img_enc_fn(self.images_pl, 60 is_training=False, ---> 61 reuse=False) 62 63 # Start loop

~\Human-Body-Measurements-using-Computer-Vision\src\models.py in Encoder_resnet(x, is_training, weight_decay, reuse) 46 is_training=is_training, 47 reuse=reuse, ---> 48 scope='resnet_v2_50') 49 net = tf.squeeze(net, axis=[1, 2]) 50 variables = tf.contrib.framework.get_variables('resnet_v2_50')

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\slim\python\slim\nets\resnet_v2.py in resnet_v2_50(inputs, num_classes, is_training, global_pool, output_stride, reuse, scope) 285 include_root_block=True, 286 reuse=reuse, --> 287 scope=scope) 288 289

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\slim\python\slim\nets\resnet_v2.py in resnet_v2(inputs, blocks, num_classes, is_training, global_pool, output_stride, include_root_block, reuse, scope) 212 with arg_scope( 213 [layers_lib.conv2d], activation_fn=None, normalizer_fn=None): --> 214 net = resnet_utils.conv2d_same(net, 64, 7, stride=2, scope='conv1') 215 net = layers.max_pool2d(net, [3, 3], stride=2, scope='pool1') 216 net = resnet_utils.stack_blocks_dense(net, blocks, output_stride)

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\slim\python\slim\nets\resnet_utils.py in conv2d_same(inputs, num_outputs, kernel_size, stride, rate, scope) 144 rate=rate, 145 padding='VALID', --> 146 scope=scope) 147 148

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\framework\python\ops\arg_scope.py in func_with_args(*args, **kwargs) 180 current_args = current_scope[key_func].copy() 181 current_args.update(kwargs) --> 182 return func(*args, **current_args) 183 184 _add_op(func)

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\layers\python\layers\layers.py in convolution2d(inputs, num_outputs, kernel_size, stride, padding, data_format, rate, activation_fn, normalizer_fn, normalizer_params, weights_initializer, weights_regularizer, biases_initializer, biases_regularizer, reuse, variables_collections, outputs_collections, trainable, scope) 1153 trainable, 1154 scope, -> 1155 conv_dims=2) 1156 1157 convolution2d.doc = convolution.doc

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\framework\python\ops\arg_scope.py in func_with_args(*args, **kwargs) 180 current_args = current_scope[key_func].copy() 181 current_args.update(kwargs) --> 182 return func(*args, **current_args) 183 184 _add_op(func)

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\layers\python\layers\layers.py in convolution(inputs, num_outputs, kernel_size, stride, padding, data_format, rate, activation_fn, normalizer_fn, normalizer_params, weights_initializer, weights_regularizer, biases_initializer, biases_regularizer, reuse, variables_collections, outputs_collections, trainable, scope, conv_dims) 1056 _scope=sc, 1057 _reuse=reuse) -> 1058 outputs = layer.apply(inputs) 1059 1060 # Add variables to collections.

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\keras\engine\base_layer.py in apply(self, inputs, *args, **kwargs) 1225 Output tensor(s). 1226 """ -> 1227 return self.call(inputs, *args, **kwargs) 1228 1229 @doc_controls.for_subclass_implementers

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\layers\base.py in call(self, inputs, *args, **kwargs) 528 529 # Actually call layer --> 530 outputs = super(Layer, self).call(inputs, *args, **kwargs) 531 532 if not context.executing_eagerly():

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\keras\engine\base_layer.py in call(self, inputs, *args, **kwargs) 536 if not self.built: 537 # Build layer if applicable (if the build method has been overridden). --> 538 self._maybe_build(inputs) 539 # We must set self.built since user defined build functions are not 540 # constrained to set self.built.

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\keras\engine\base_layer.py in _maybe_build(self, inputs) 1601 # Only call build if the user has manually overridden the build method. 1602 if not hasattr(self.build, '_is_default'): -> 1603 self.build(input_shapes) 1604 1605 def setattr(self, name, value):

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\keras\layers\convolutional.py in build(self, input_shape) 163 constraint=self.kernel_constraint, 164 trainable=True, --> 165 dtype=self.dtype) 166 if self.use_bias: 167 self.bias = self.add_weight(

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\layers\base.py in add_weight(self, name, shape, dtype, initializer, regularizer, trainable, constraint, use_resource, synchronization, aggregation, partitioner) 433 synchronization=synchronization, 434 aggregation=aggregation, --> 435 getter=vs.get_variable) 436 437 if regularizer:

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\keras\engine\base_layer.py in add_weight(self, name, shape, dtype, initializer, regularizer, trainable, constraint, partitioner, use_resource, synchronization, aggregation, **kwargs) 347 collections=collections, 348 synchronization=synchronization, --> 349 aggregation=aggregation) 350 backend.track_variable(variable) 351

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\training\checkpointable\base.py in _add_variable_with_custom_getter(self, name, shape, dtype, initializer, getter, overwrite, **kwargs_for_getter) 605 new_variable = getter( 606 name=name, shape=shape, dtype=dtype, initializer=initializer, --> 607 **kwargs_for_getter) 608 609 # If we set an initializer and the variable processed it, tracking will not

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\ops\variable_scope.py in get_variable(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter, constraint, synchronization, aggregation) 1477 constraint=constraint, 1478 synchronization=synchronization, -> 1479 aggregation=aggregation) 1480 1481

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\ops\variable_scope.py in get_variable(self, var_store, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter, constraint, synchronization, aggregation) 1218 constraint=constraint, 1219 synchronization=synchronization, -> 1220 aggregation=aggregation) 1221 1222 def _get_partitioned_variable(self,

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\ops\variable_scope.py in get_variable(self, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter, constraint, synchronization, aggregation) 528 function_utils.has_kwargs(custom_getter)): 529 custom_getter_kwargs["constraint"] = constraint --> 530 return custom_getter(**custom_getter_kwargs) 531 else: 532 return _true_getter(

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\layers\python\layers\layers.py in layer_variable_getter(getter, *args, **kwargs) 1748 def layer_variable_getter(getter, *args, **kwargs): 1749 kwargs['rename'] = rename -> 1750 return _model_variable_getter(getter, *args, **kwargs) 1751 1752 return layer_variable_getter

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\layers\python\layers\layers.py in model_variable_getter(getter, name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, rename, use_resource, synchronization, aggregation, **) 1739 use_resource=use_resource, 1740 synchronization=synchronization, -> 1741 aggregation=aggregation) 1742 1743

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\framework\python\ops\arg_scope.py in func_with_args(*args, **kwargs) 180 current_args = current_scope[key_func].copy() 181 current_args.update(kwargs) --> 182 return func(*args, **current_args) 183 184 _add_op(func)

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\framework\python\ops\variables.py in model_variable(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, device, partitioner, custom_getter, use_resource, synchronization, aggregation) 348 use_resource=use_resource, 349 synchronization=synchronization, --> 350 aggregation=aggregation) 351 return var 352

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\framework\python\ops\arg_scope.py in func_with_args(*args, **kwargs) 180 current_args = current_scope[key_func].copy() 181 current_args.update(kwargs) --> 182 return func(*args, **current_args) 183 184 _add_op(func)

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\framework\python\ops\variables.py in variable(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, device, partitioner, custom_getter, use_resource, synchronization, aggregation) 275 use_resource=use_resource, 276 synchronization=synchronization, --> 277 aggregation=aggregation) 278 279

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\ops\variable_scope.py in _true_getter(name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource, constraint, synchronization, aggregation) 497 constraint=constraint, 498 synchronization=synchronization, --> 499 aggregation=aggregation) 500 501 # Set trainable value based on synchronization value.

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\ops\variable_scope.py in _get_single_variable(self, name, shape, dtype, initializer, regularizer, partition_info, reuse, trainable, collections, caching_device, validate_shape, use_resource, constraint, synchronization, aggregation) 846 tb = [x for x in tb if "tensorflow/python" not in x[0]][:3] 847 raise ValueError("%s Originally defined at:\n\n%s" % (err_msg, "".join( --> 848 traceback.format_list(tb)))) 849 found_var = self._vars[name] 850 if not shape.is_compatible_with(found_var.get_shape()):

ValueError: Variable resnet_v2_50/conv1/weights already exists, disallowed. Did you mean to set reuse=True or reuse=tf.AUTO_REUSE in VarScope? Originally defined at:

File "C:\Users\fahad\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\framework\ops.py", line 1801, in init self._traceback = tf_stack.extract_stack() File "C:\Users\fahad\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\framework\ops.py", line 3300, in create_op op_def=op_def) File "C:\Users\fahad\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\util\deprecation.py", line 507, in new_func return func(*args, **kwargs)

Abdullahkhan5 avatar May 03 '23 15:05 Abdullahkhan5

我每次都有以下错误**(第一次运行除外)** 即使图像相同且高度输入相同

15 bg_removed中的 ValueError 回溯(最近一次调用最后一次) = res + (255 - cv2.cvtColor(mask, cv2.COLOR_GRAY2BGR)) 16 ---> 17 main(bg_removed,height,none)

in main(img_path, height, json_path) 124 # renderer = vis_util.SMPLRenderer(face_path='src/tf_smpl/smpl_faces.npy') 125 sess = tf.Session() --> 126 model = RunModel(sess=sess) 127 # cv2.imshow('用于测量提取的输入图像',img_path) 128 # cv2.waitKey(0)

in init(self, sess) 39 40 ---> 41 self.build_test_model_ief() 42 43 如果 sess 为 None:

在build_test_model_ief(自己)59 self.img_feat,自我。E_var = img_enc_fn(self.images_pl, 60 is_training=False, ---> 61 reuse=False) 62 63 # 启动循环

~\Human-Body-Measurements-using-Computer-Vision\src\models.py in Encoder_resnet(x, is_training, weight_decay, reuse) 46 is_training=is_training, 47 reuse=reuse, ---> 48 scope='resnet_v2_50') 49 net = tf.squeeze(net, axis=[1, 2]) 50 个变量 = tf.contrib.framework.get_variables('resnet_v2_50')

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\slim\python\slim\nets\resnet_v2.py in resnet_v2_50(inputs, num_classes, is_training, global_pool, output_stride, reuse, scope) 285 include_root_block=True, 286 reuse=reuse, --> 287 scope=scope) 288 289

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\slim\python\slim\nets\resnet_v2.py in resnet_v2(inputs, blocks, num_classes, is_training, global_pool, output_stride, include_root_block, reuse, scope) 212 with arg_scope( 213 [layers_lib.conv2d], activation_fn=None, normalizer_fn=None): --> 214 net = resnet_utils.conv2d_same(net, 64, 7, stride=2, scope='conv1') 215 net = layers.max_pool2d(net, [3, 3], stride=2, scope='pool1') 216 net = resnet_utils.stack_blocks_dense(net, blocks, output_stride)

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\slim\python\slim\nets\resnet_utils.py in conv2d_same(inputs, num_outputs, kernel_size, stride, rate, scope) 144 rate=rate, 145 padding='VALID', --> 146 scope=scope) 147 148

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\framework\python\ops\arg_scope.py in func_with_args(*args, **kwargs) 180 current_args = current_scope[key_func].copy() 181 current_args.update(kwargs) --> 182 return func(*args, **current_args) 183 184 _add_op(func)

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\layers\python\layers\layers.py in convolution2d(inputs, num_outputs, kernel_size, stride, padding, data_format, rate, activation_fn, normalizer_fn, normalizer_params, weights_initializer, weights_regularizer, biases_initializer, biases_regularizer, reuse, variables_collections, outputs_collections, trainable, scope) 1153 trainable, 1154 scope, -> 1155 conv_dims=2) 1156 1157 convolution2d.doc = convolution.doc

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\framework\python\ops\arg_scope.py in func_with_args(*args, **kwargs) 180 current_args = current_scope[key_func].copy() 181 current_args.update(kwargs) --> 182 return func(*args, **current_args) 183 184 _add_op(func)

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\layers\python\layers\layers.py in convolution(inputs, num_outputs, kernel_size, stride, padding, data_format, rate, activation_fn, normalizer_fn, normalizer_params, weights_initializer, weights_regularizer, biases_initializer, biases_regularizer, reuse, variables_collections, outputs_collections, trainable, scope, conv_dims) 1056 _scope=sc, 1057 _reuse=reuse) -> 1058 outputs = layer.apply(inputs) 1059 1060 # Add variables to collections.

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\keras\engine\base_layer.py in apply(self, inputs, *args, **kwargs) 1225 Output tensor(s). 1226 """ -> 1227 return self.call(inputs, *args, **kwargs) 1228 1229 @doc_controls.for_subclass_implementers

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\layers\base.py in call(self, inputs, *args, **kwargs) 528 529 # Actually call layer --> 530 outputs = super(Layer, self).call(inputs, *args, **kwargs) 531 532 if not context.executing_eagerly():

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\keras\engine\base_layer.py in call(self, inputs, *args, **kwargs) 536 if not self.built: 537 # Build layer if applicable (if the method has been overridden). --> 538 self._maybe_build(inputs) 539 # We must set self.built since user defined build functions are not 540 # constrained to set self.built.build

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\keras\engine\base_layer.py in _maybe_build(self, inputs) 1601 # Only call if the user has manually overridden the build method. 1602 if not hasattr(self.build, '_is_default'): -> 1603 self.build(input_shapes) 1604 1605 def setattr(self, name, value):build

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\keras\layers\convolutional.py in build(self, input_shape) 163 constraint=self.kernel_constraint, 164 trainable=True, --> 165 dtype=self.dtype) 166 if self.use_bias: 167 self.bias = self.add_weight(

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\layers\base.py in add_weight(self, name, shape, dtype, initializer, regularizer, trainable, constraint, use_resource, synchronization, aggregation, partitioner) 433 synchronization=synchronization, 434 aggregation=aggregation, --> 435 getter=vs.get_variable) 436 437 if regularizer:

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\keras\engine\base_layer.py in add_weight(self, name, shape, dtype, initializer, regularizer, trainable, constraint, partitioner, use_resource, synchronization, aggregation, **kwargs) 347 collections=collections, 348 synchronization=synchronization, --> 349 aggregation=aggregation) 350 backend.track_variable(variable) 351

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\training\checkpointable\base.py in _add_variable_with_custom_getter(self, name, shape, dtype, initializer, getter, overwrite, **kwargs_for_getter) 605 new_variable = getter( 606 name=name, shape=shape, dtype=dtype, initializer=initializer, --> 607 **kwargs_for_getter) 608 609 # If we set an initializer and the variable processed it, tracking will not

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\ops\variable_scope.py in get_variable(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter, constraint, synchronization, aggregation) 1477 constraint=constraint, 1478 synchronization=synchronization, -> 1479 aggregation=aggregation) 1480 1481

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\ops\variable_scope.py in get_variable(self, var_store, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter, constraint, synchronization, aggregation) 1218 constraint=constraint, 1219 synchronization=synchronization, -> 1220 aggregation=aggregation) 1221 1222 def _get_partitioned_variable(self,

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\ops\variable_scope.py in get_variable(self, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter, constraint, synchronization, aggregation) 528 function_utils.has_kwargs(custom_getter)): 529 custom_getter_kwargs["constraint"] = constraint --> 530 return custom_getter(**custom_getter_kwargs) 531 else: 532 return _true_getter(

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\layers\python\layers\layers.py in layer_variable_getter(getter, *args, **kwargs) 1748 def layer_variable_getter(getter, *args, **kwargs): 1749 kwargs['rename'] = rename -> 1750 return _model_variable_getter(getter, *args, **kwargs) 1751 1752 return layer_variable_getter

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\layers\python\layers\layers.py in model_variable_getter(getter, name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, rename, use_resource, synchronization, aggregation, **) 1739 use_resource=use_resource, 1740 synchronization=synchronization, -> 1741 aggregation=aggregation) 1742 1743

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\framework\python\ops\arg_scope.py in func_with_args(*args, **kwargs) 180 current_args = current_scope[key_func].copy() 181 current_args.update(kwargs) --> 182 return func(*args, **current_args) 183 184 _add_op(func)

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\framework\python\ops\variables.py in model_variable(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, device, partitioner, custom_getter, use_resource, synchronization, aggregation) 348 use_resource=use_resource, 349 synchronization=synchronization, --> 350 aggregation=aggregation) 351 return var 352

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\framework\python\ops\arg_scope.py in func_with_args(*args, **kwargs) 180 current_args = current_scope[key_func].copy() 181 current_args.update(kwargs) --> 182 return func(*args, **current_args) 183 184 _add_op(func)

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\contrib\framework\python\ops\variables.py in variable(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, device, partitioner, custom_getter, use_resource, synchronization, aggregation) 275 use_resource=use_resource, 276 synchronization=synchronization, --> 277 aggregation=aggregation) 278 279

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\ops\variable_scope.py in _true_getter(name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource, constraint, synchronization, aggregation) 497 constraint=constraint, 498 synchronization=synchronization, --> 499 aggregation=aggregation) 500 501 # Set trainable value based on synchronization value.

~\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\ops\variable_scope.py in _get_single_variable(self, name, shape, dtype, initializer, regularizer, partition_info, reuse, trainable, collections, caching_device, validate_shape, use_resource, constraint, synchronization, aggregation) 846 tb = [x for x in tb if "tensorflow/python" not in x[0]][:3] 847 raise ValueError("%s Originally defined at:\n\n%s" % (err_msg, "".join( --> 848 traceback.format_list(tb)))) 849 found_var = self._vars[name] 850 if not shape.is_compatible_with(found_var.get_shape()):

ValueError: Variable resnet_v2_50/conv1/weights already exists, disallowed. Did you mean to set reuse=True or reuse=tf.AUTO_REUSE in VarScope? Originally defined at:

File "C:\Users\fahad\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\framework\ops.py", line 1801, in init self._traceback = tf_stack.extract_stack() File "C:\Users\fahad\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\framework\ops.py", line 3300, in create_op op_def=op_def) File "C:\Users\fahad\anaconda3\envs\HMS\lib\site-packages\tensorflow\python\util\deprecation.py", line 507, in new_func return func(*args, **kwargs)

May I ask if you have solved this problem? How did you solve it?

Yomiko11 avatar Nov 21 '23 06:11 Yomiko11