tensorflow 属性错误:对象没有属性“assign”

wooyq4lh  于 2022-11-30  发布在  其他
关注(0)|答案(1)|浏览(160)

我试图通过使用权重修剪来优化我的机器学习模型。但是无论我做什么,我都无法摆脱错误AttributeError:
对象没有属性"assign"
下面是我的修剪代码

#pruning

import tensorflow_model_optimization as tfmot
import numpy as np

origModelFile = 'modeltest.h5'
model = tf.keras.models.load_model(origModelFile)

prune_low_magnitude = tfmot.sparsity.keras.prune_low_magnitude
epochs = 15
batch_size = 2048
validation_split = 0.1
num_images = x_train.shape[0] * (1 - validation_split)

end_step = np.ceil(num_images / batch_size).astype(np.int32) * epochs
pruning_params = {
      'pruning_schedule': tfmot.sparsity.keras.PolynomialDecay(initial_sparsity=.95,
                                                               final_sparsity=.8,
                                                               begin_step=0,
                                                               end_step=end_step)
}
model_for_pruning = prune_low_magnitude(model, **pruning_params) #this line gives the error 

model_for_pruning.compile(optimizer='adam',
              loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
              metrics=['accuracy'])

pruned_model = tfmot.sparsity.keras.strip_pruning(model_for_pruning)


pruned_model.summary()

下面是错误的完整堆栈跟踪

---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
~\AppData\Local\Temp\ipykernel_33564\3560105584.py in <module>
     13                                                                end_step=end_step)
     14 }
---> 15 model_for_pruning = prune_low_magnitude(model, **pruning_params)
     16 
     17 model_for_pruning.compile(optimizer='adam',

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\keras\metrics.py in inner(*args, **kwargs)
     72       except Exception as error:
     73         self.bool_gauge.get_cell(MonitorBoolGauge._FAILURE_LABEL).set(True)
---> 74         raise error
     75 
     76     if self.bool_gauge:

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\keras\metrics.py in inner(*args, **kwargs)
     67     def inner(*args, **kwargs):
     68       try:
---> 69         results = func(*args, **kwargs)
     70         self.bool_gauge.get_cell(MonitorBoolGauge._SUCCESS_LABEL).set(True)
     71         return results

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\prune.py in prune_low_magnitude(to_prune, pruning_schedule, block_size, block_pooling_type, pruning_policy, sparsity_m_by_n, **kwargs)
    208     if pruning_policy:
    209       pruning_policy.ensure_model_supports_pruning(to_prune)
--> 210     return _add_pruning_wrapper(to_prune)
    211   elif is_keras_layer:
    212     params.update(kwargs)

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\prune.py in _add_pruning_wrapper(layer)
    179         raise ValueError('Subclassed models are not supported currently.')
    180 
--> 181       return keras.models.clone_model(
    182           layer, input_tensors=None, clone_function=_add_pruning_wrapper)
    183     if isinstance(layer, pruning_wrapper.PruneLowMagnitude):

A:\Anaconda\lib\site-packages\keras\models\cloning.py in clone_model(model, input_tensors, clone_function)
    446           model, input_tensors=input_tensors, layer_fn=clone_function)
    447     else:
--> 448       return _clone_functional_model(
    449           model, input_tensors=input_tensors, layer_fn=clone_function)
    450 

A:\Anaconda\lib\site-packages\keras\models\cloning.py in _clone_functional_model(model, input_tensors, layer_fn)
    187   # Reconstruct model from the config, using the cloned layers.
    188   input_tensors, output_tensors, created_layers = (
--> 189       functional.reconstruct_from_config(model_configs,
    190                                          created_layers=created_layers))
    191   metrics_names = model.metrics_names

A:\Anaconda\lib\site-packages\keras\engine\functional.py in reconstruct_from_config(config, custom_objects, created_layers)
   1310         while layer_nodes:
   1311           node_data = layer_nodes[0]
-> 1312           if process_node(layer, node_data):
   1313             layer_nodes.pop(0)
   1314           else:

A:\Anaconda\lib\site-packages\keras\engine\functional.py in process_node(layer, node_data)
   1254         input_tensors = (
   1255             base_layer_utils.unnest_if_single_tensor(input_tensors))
-> 1256       output_tensors = layer(input_tensors, **kwargs)
   1257 
   1258       # Update node index map.

A:\Anaconda\lib\site-packages\keras\utils\traceback_utils.py in error_handler(*args, **kwargs)
     65     except Exception as e:  # pylint: disable=broad-except
     66       filtered_tb = _process_traceback_frames(e.__traceback__)
---> 67       raise e.with_traceback(filtered_tb) from None
     68     finally:
     69       del filtered_tb

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\pruning_wrapper.py in tf__call(self, inputs, training, **kwargs)
     71                 update_mask = ag__.converted_call(ag__.ld(utils).smart_cond, (ag__.ld(training), ag__.ld(add_update), ag__.ld(no_op)), None, fscope)
     72                 ag__.converted_call(ag__.ld(self).add_update, (ag__.ld(update_mask),), None, fscope)
---> 73                 ag__.converted_call(ag__.ld(self).add_update, (ag__.converted_call(ag__.ld(self).pruning_obj.weight_mask_op, (), None, fscope),), None, fscope)
     74 
     75                 def get_state_1():

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\pruning_impl.py in tf__weight_mask_op(self)
     11                 try:
     12                     do_return = True
---> 13                     retval_ = ag__.converted_call(ag__.ld(tf).group, (ag__.converted_call(ag__.ld(self)._weight_assign_objs, (), None, fscope),), None, fscope)
     14                 except:
     15                     do_return = False

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\pruning_impl.py in tf___weight_assign_objs(self)
    122                 _ = ag__.Undefined('_')
    123                 masked_weight = ag__.Undefined('masked_weight')
--> 124                 ag__.if_stmt(ag__.converted_call(ag__.ld(tf).distribute.get_replica_context, (), None, fscope), if_body_1, else_body_1, get_state_4, set_state_4, (), 0)
    125                 try:
    126                     do_return = True

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\pruning_impl.py in if_body_1()
     98                     def else_body():
     99                         pass
--> 100                     ag__.if_stmt(ag__.ld(values_and_vars), if_body, else_body, get_state_2, set_state_2, (), 0)
    101 
    102                 def else_body_1():

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\pruning_impl.py in if_body()
     94 
     95                     def if_body():
---> 96                         ag__.converted_call(ag__.ld(assign_objs).append, (ag__.converted_call(ag__.converted_call(ag__.ld(tf).distribute.get_replica_context, (), None, fscope).merge_call, (ag__.ld(update_fn),), dict(args=(ag__.ld(values_and_vars),)), fscope),), None, fscope)
     97 
     98                     def else_body():

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\pruning_impl.py in update_fn(distribution, values_and_vars)
     52                         var = ag__.Undefined('var')
     53                         value = ag__.Undefined('value')
---> 54                         ag__.for_stmt(ag__.ld(values_and_vars), None, loop_body, get_state, set_state, (), {'iterate_names': '(value, var)'})
     55                         try:
     56                             do_return_1 = True

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\pruning_impl.py in loop_body(itr)
     49                         def loop_body(itr):
     50                             (value, var) = itr
---> 51                             ag__.converted_call(ag__.ld(update_objs).append, (ag__.converted_call(ag__.ld(distribution).extended.update, (ag__.ld(var), ag__.ld(update_var)), dict(args=(ag__.ld(value),)), fscope_1),), None, fscope_1)
     52                         var = ag__.Undefined('var')
     53                         value = ag__.Undefined('value')

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\pruning_impl.py in update_var(variable, reduced_value)
     34                                 try:
     35                                     do_return_2 = True
---> 36                                     retval__2 = ag__.converted_call(ag__.ld(tf_compat).assign, (ag__.ld(variable), ag__.ld(reduced_value)), None, fscope_2)
     37                                 except:
     38                                     do_return_2 = False

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\keras\compat.py in tf__assign(ref, value, name)
     34                         do_return = False
     35                         raise
---> 36                 ag__.if_stmt(ag__.converted_call(ag__.ld(hasattr), (ag__.ld(tf), 'assign'), None, fscope), if_body, else_body, get_state, set_state, ('do_return', 'retval_'), 2)
     37                 return fscope.ret(retval_, do_return)
     38         return tf__assign

A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\keras\compat.py in else_body()
     30                     try:
     31                         do_return = True
---> 32                         retval_ = ag__.converted_call(ag__.ld(ref).assign, (ag__.ld(value),), dict(name=ag__.ld(name)), fscope)
     33                     except:
     34                         do_return = False

AttributeError: Exception encountered when calling layer "prune_low_magnitude_conv2d" (type PruneLowMagnitude).

in user code:

    File "A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\pruning_wrapper.py", line 288, in call  *
        self.add_update(self.pruning_obj.weight_mask_op())
    File "A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\pruning_impl.py", line 254, in weight_mask_op  *
        return tf.group(self._weight_assign_objs())
    File "A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\pruning_impl.py", line 225, in update_var  *
        return tf_compat.assign(variable, reduced_value)
    File "A:\Anaconda\lib\site-packages\tensorflow_model_optimization\python\core\keras\compat.py", line 28, in assign  *
        return ref.assign(value, name=name)

    AttributeError: 'tensorflow.python.framework.ops.EagerTensor' object has no attribute 'assign'

Call arguments received by layer "prune_low_magnitude_conv2d" (type PruneLowMagnitude):
  • inputs=tf.Tensor(shape=(None, 14, 8, 8), dtype=float32)
  • training=False
  • kwargs=<class 'inspect._empty'>

我尝试使用here示例,但使用的是我自己的模型

gudnpqoy

gudnpqoy1#

##Pruning 
base_model = tf.keras.models.load_model('modelclustored2.h5')
base_model.load_weights(pretrained_weights) # optional but recommended.

model_for_pruning = tfmot.sparsity.keras.prune_low_magnitude(base_model)
callbacks = [
    tfmot.sparsity.keras.UpdatePruningStep(),
    # Log sparsity and other metrics in Tensorboard.
    #tfmot.sparsity.keras.PruningSummaries(log_dir=log_dir),
    callbacks.ReduceLROnPlateau(monitor='loss', patience=10),
    callbacks.EarlyStopping(monitor='loss', patience=15, min_delta=1e-4)
    
]

model_for_pruning.compile(optimizer=optimizers.Adam(5e-4), loss='mean_squared_error',  metrics=['mae', 'mse'])

model_for_pruning.summary()

只是需要用不同的方式来处理事情

相关问题