python3 try_except_else_finally_assert_raise.py

本文深入讲解Python中的异常处理机制,包括try-except-else-finally结构的使用,如何捕获和处理多种异常,以及如何使用raise抛出异常。此外,还介绍了如何利用finally子句确保代码的清理工作得到执行,以及with语句在资源管理中的作用。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

"""
模块:python3 try_except_else_finally_assert_raise.py
功能:python3 错误和异常。
参考:https://www.runoob.com/python3/python3-errors-execptions.html
知识点:
1.try-except-else-finally
其中,except, else,finally为可选子句。
except,用于捕获异常并处理。
else字句,在没有异常时,会执行。
finally,不管有没有异常都会执行,
如果有异常未被捕获,会先执行finally子句,再抛出异常。

2.一个except子句可以同时处理多个异常,
这些异常将被放在一个括号里成为一个元组,例如:
except (RuntimeError, TypeError, NameError):
        pass
"""
import sys

# # 1.
# while True:
#     try:
#         x = int(input("输入一个数字: "))
#         break
#     except ValueError:
#         print("无效数字,请重试!")

# # 2.命令:python cs.py cs3.txt
# print("sys.argv:", sys.argv)
# # sys.argv: ['cs.py', 'cs3.txt']
# for arg in sys.argv[1:]:
#     try:
#         f = open(arg, 'r')
#     except IOError:
#         print('cannot open', arg)
#     else:
#         print(arg, 'has', len(f.readlines()), 'lines')
#         f.close()
# # try except 语句还有一个可选的 else 子句,
# # 如果使用这个子句,那么必须放在所有的 except 子句之后。
# # 这个子句将在 try 子句没有发生任何异常的时候执行。

# # 3.异常处理并不仅仅处理那些直接发生在try子句中的异常,
# # 而且还能处理子句中调用的函数(甚至间接调用的函数)里抛出的异常。
# def this_fails():
#     x = 1 / 0
#
#
# try:
#     this_fails()
# except ZeroDivisionError as err:
#     print('error:', err)
#     # error: division by zero

# # 4.raise 唯一的一个参数指定了要被抛出的异常。
# # 它必须是一个异常的实例或者是异常的类(也就是 Exception 的子类)。
# # 如果你只想知道这是否抛出了一个异常,并不想去处理它,
# # 那么一个简单的 raise 语句就可以再次把它抛出。
# try:
#     raise NameError('HiThere')
# except NameError:
#     print('一个异常!')
#     raise
# # NameError: HiThere

# # 5.不管 try 子句里面有没有发生异常,finally 子句都会执行。
# # 如果一个异常在 try 子句里(或者在 except 和 else 子句里)被抛出,
# # 而又没有任何的 except 把它截住,
# # 那么这个异常会在 finally 子句执行后,被抛出。
# def divide(x, y):
#     try:
#         result = x / y
#     except ZeroDivisionError:
#         print("除以零!")
#     else:
#         print("结果:", result)
#     finally:
#         print("执行最后子句。")
#
#
# divide(2, 1)
# # 结果: 2.0
# # 执行最后子句。
# divide(2, 0)
# # 除以零!
# # 执行最后子句。
# divide("2", "1")
# # 执行最后子句。
# # TypeError: unsupported operand type(s) for /: 'str' and 'str'

# # 6.
# try:
#     raise KeyboardInterrupt
# finally:
#     print('Goodbye, world!')
# # Goodbye, world!
# # KeyboardInterrupt

# # 7.最后一个except子句可以忽略异常的名称,它将被当作通配符使用。
# # 你可以使用这种方法打印一个错误信息,然后再次把异常抛出。
# try:
#     f = open('cs.txt')
#     s = f.readline()
#     i = int(s.strip())
# except OSError as err:
#     print("OS error: {0}".format(err))
#     # OS error: [Errno 2] No such file or directory: 'myfile.txt'
# except ValueError:
#     print("无法将数据转换为整数。")
#     # 无法将数据转换为整数。
#     raise
# except:
#     print("Unexpected error:", sys.exc_info()[0])
#     raise

# # 8.预定义的清理行为
# # 一些对象定义了标准的清理行为,无论系统是否成功的使用了它,
# # 一旦不需要它了,那么这个标准的清理行为就会执行。
# # 关键词 with 语句就可以保证诸如文件之类的对象在使用完之后一定会正确的执行他的清理方法:
# # 就算在处理过程中出问题了,文件 f 总是会关闭的。
# with open("cs.txt", encoding="utf-8") as f:
#     print("f:", f)
#     # f: <_io.TextIOWrapper name='cs.txt' mode='r' encoding='utf-8'>
#     for line in f:
#         # 使用文件本身的换行。
#         print(line, end="")


def main(): t0 = time.time() ​ # 选择模型 model = build_lstm_model() ​ # 编译模型 model.compile(optimizer=tf.keras.optimizers.Adam(0.001), loss=tf.keras.losses.BinaryCrossentropy(), metrics=[&#39;accuracy&#39;]) ​ # 训练模型 checkpoint = ModelCheckpoint(&#39;model_checkpoint.h5&#39;, save_weights_only=True, verbose=1, save_freq=&#39;epoch&#39;) model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, callbacks=[checkpoint]) ​ # 评估模型 loss, accuracy = model.evaluate(x_test, y_test) print(f"Test Loss: {loss}, Test Accuracy: {accuracy}") ​ t1 = time.time() print(f"模型运行的时间为:{t1 - t0:.2f} 秒") ​ if __name__ == &#39;__main__&#39;: main() 10秒 WARNING:tensorflow:From /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/keras/initializers.py:118: calling RandomUniform.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version. Instructions for updating: Call initializer instance with the dtype argument instead of passing it to the constructor WARNING:tensorflow:From /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/ops/resource_variable_ops.py:1623: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version. Instructions for updating: If using Keras pass *_constraint arguments to layers. NotImplementedError: Cannot convert a symbolic Tensor (lstm/strided_slice:0) to a numpy array. --------------------------------------------------------------------------- NotImplementedError Traceback (most recent call last) Cell In[21], line 24 21 print(f"模型运行的时间为:{t1 - t0:.2f} 秒") 23 if __name__ == &#39;__main__&#39;: ---> 24 main() Cell In[21], line 5, in main() 2 t0 = time.time() 4 # 选择模型 ----> 5 model = build_lstm_model() 7 # 编译模型 8 model.compile(optimizer=tf.keras.optimizers.Adam(0.001), 9 loss=tf.keras.losses.BinaryCrossentropy(), 10 metrics=[&#39;accuracy&#39;]) Cell In[15], line 4, in build_lstm_model() 3 def build_lstm_model(): ----> 4 model = keras.Sequential([ 5 layers.Embedding(total_words, embedding_len, input_length=max_review_len), 6 layers.LSTM(64, return_sequences=False), 7 layers.Dense(1, activation=&#39;sigmoid&#39;) 8 ]) 9 return model File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/training/tracking/base.py:457, in no_automatic_dependency_tracking.<locals>._method_wrapper(self, *args, **kwargs) 455 self._self_setattr_tracking = False # pylint: disable=protected-access 456 try: --> 457 result = method(self, *args, **kwargs) 458 finally: 459 self._self_setattr_tracking = previous_value # pylint: disable=protected-access File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/keras/engine/sequential.py:113, in Sequential.__init__(self, layers, name) 111 tf_utils.assert_no_legacy_layers(layers) 112 for layer in layers: --> 113 self.add(layer) File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/training/tracking/base.py:457, in no_automatic_dependency_tracking.<locals>._method_wrapper(self, *args, **kwargs) 455 self._self_setattr_tracking = False # pylint: disable=protected-access 456 try: --> 457 result = method(self, *args, **kwargs) 458 finally: 459 self._self_setattr_tracking = previous_value # pylint: disable=protected-access File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/keras/engine/sequential.py:195, in Sequential.add(self, layer) 190 self.inputs = layer_utils.get_source_inputs(self.outputs[0]) 192 elif self.outputs: 193 # If the model is being built continuously on top of an input layer: 194 # refresh its output. --> 195 output_tensor = layer(self.outputs[0]) 196 if len(nest.flatten(output_tensor)) != 1: 197 raise TypeError(&#39;All layers in a Sequential model &#39; 198 &#39;should have a single output tensor. &#39; 199 &#39;For multi-output layers, &#39; 200 &#39;use the functional API.&#39;) File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/keras/layers/recurrent.py:623, in RNN.__call__(self, inputs, initial_state, constants, **kwargs) 617 inputs, initial_state, constants = _standardize_args(inputs, 618 initial_state, 619 constants, 620 self._num_constants) 622 if initial_state is None and constants is None: --> 623 return super(RNN, self).__call__(inputs, **kwargs) 625 # If any of `initial_state` or `constants` are specified and are Keras 626 # tensors, then add them to the inputs and temporarily modify the 627 # input_spec to include them. 629 additional_inputs = [] File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/keras/engine/base_layer.py:854, in Layer.__call__(self, inputs, *args, **kwargs) 852 outputs = base_layer_utils.mark_as_return(outputs, acd) 853 else: --> 854 outputs = call_fn(cast_inputs, *args, **kwargs) 856 except errors.OperatorNotAllowedInGraphError as e: 857 raise TypeError(&#39;You are attempting to use Python control &#39; 858 &#39;flow in a layer that was not declared to be &#39; 859 &#39;dynamic. Pass `dynamic=True` to the class &#39; 860 &#39;constructor.\nEncountered error:\n"""\n&#39; + 861 str(e) + &#39;\n"""&#39;) File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/keras/layers/recurrent.py:2548, in LSTM.call(self, inputs, mask, training, initial_state) 2546 self.cell.reset_dropout_mask() 2547 self.cell.reset_recurrent_dropout_mask() -> 2548 return super(LSTM, self).call( 2549 inputs, mask=mask, training=training, initial_state=initial_state) File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/keras/layers/recurrent.py:681, in RNN.call(self, inputs, mask, training, initial_state, constants) 675 def call(self, 676 inputs, 677 mask=None, 678 training=None, 679 initial_state=None, 680 constants=None): --> 681 inputs, initial_state, constants = self._process_inputs( 682 inputs, initial_state, constants) 684 if mask is not None: 685 # Time step masks must be the same for each input. 686 # TODO(scottzhu): Should we accept multiple different masks? 687 mask = nest.flatten(mask)[0] File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/keras/layers/recurrent.py:798, in RNN._process_inputs(self, inputs, initial_state, constants) 796 initial_state = self.states 797 else: --> 798 initial_state = self.get_initial_state(inputs) 800 if len(initial_state) != len(self.states): 801 raise ValueError(&#39;Layer has &#39; + str(len(self.states)) + 802 &#39; states but was passed &#39; + str(len(initial_state)) + 803 &#39; initial states.&#39;) File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/keras/layers/recurrent.py:605, in RNN.get_initial_state(self, inputs) 603 dtype = inputs.dtype 604 if get_initial_state_fn: --> 605 init_state = get_initial_state_fn( 606 inputs=None, batch_size=batch_size, dtype=dtype) 607 else: 608 init_state = _generate_zero_filled_state(batch_size, self.cell.state_size, 609 dtype) File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/keras/layers/recurrent.py:2313, in LSTMCell.get_initial_state(self, inputs, batch_size, dtype) 2312 def get_initial_state(self, inputs=None, batch_size=None, dtype=None): -> 2313 return list(_generate_zero_filled_state_for_cell( 2314 self, inputs, batch_size, dtype)) File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/keras/layers/recurrent.py:2752, in _generate_zero_filled_state_for_cell(cell, inputs, batch_size, dtype) 2750 batch_size = array_ops.shape(inputs)[0] 2751 dtype = inputs.dtype -> 2752 return _generate_zero_filled_state(batch_size, cell.state_size, dtype) File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/keras/layers/recurrent.py:2768, in _generate_zero_filled_state(batch_size_tensor, state_size, dtype) 2765 return array_ops.zeros(init_state_size, dtype=dtype) 2767 if nest.is_sequence(state_size): -> 2768 return nest.map_structure(create_zeros, state_size) 2769 else: 2770 return create_zeros(state_size) File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/util/nest.py:536, in map_structure(func, *structure, **kwargs) 532 flat_structure = [flatten(s, expand_composites) for s in structure] 533 entries = zip(*flat_structure) 535 return pack_sequence_as( --> 536 structure[0], [func(*x) for x in entries], 537 expand_composites=expand_composites) File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/util/nest.py:536, in <listcomp>(.0) 532 flat_structure = [flatten(s, expand_composites) for s in structure] 533 entries = zip(*flat_structure) 535 return pack_sequence_as( --> 536 structure[0], [func(*x) for x in entries], 537 expand_composites=expand_composites) File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/keras/layers/recurrent.py:2765, in _generate_zero_filled_state.<locals>.create_zeros(unnested_state_size) 2763 flat_dims = tensor_shape.as_shape(unnested_state_size).as_list() 2764 init_state_size = [batch_size_tensor] + flat_dims -> 2765 return array_ops.zeros(init_state_size, dtype=dtype) File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/ops/array_ops.py:2338, in zeros(shape, dtype, name) 2334 if not isinstance(shape, ops.Tensor): 2335 try: 2336 # Create a constant if it won&#39;t be very big. Otherwise create a fill op 2337 # to prevent serialized GraphDefs from becoming too large. -> 2338 output = _constant_if_small(zero, shape, dtype, name) 2339 if output is not None: 2340 return output File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/ops/array_ops.py:2295, in _constant_if_small(value, shape, dtype, name) 2293 def _constant_if_small(value, shape, dtype, name): 2294 try: -> 2295 if np.prod(shape) < 1000: 2296 return constant(value, shape=shape, dtype=dtype, name=name) 2297 except TypeError: 2298 # Happens when shape is a Tensor, list with Tensor elements, etc. File <__array_function__ internals>:180, in prod(*args, **kwargs) File /opt/conda/lib/python3.8/site-packages/numpy/core/fromnumeric.py:3088, in prod(a, axis, dtype, out, keepdims, initial, where) 2970 @array_function_dispatch(_prod_dispatcher) 2971 def prod(a, axis=None, dtype=None, out=None, keepdims=np._NoValue, 2972 initial=np._NoValue, where=np._NoValue): 2973 """ 2974 Return the product of array elements over a given axis. 2975 (...) 3086 10 3087 """ -> 3088 return _wrapreduction(a, np.multiply, &#39;prod&#39;, axis, dtype, out, 3089 keepdims=keepdims, initial=initial, where=where) File /opt/conda/lib/python3.8/site-packages/numpy/core/fromnumeric.py:86, in _wrapreduction(obj, ufunc, method, axis, dtype, out, **kwargs) 83 else: 84 return reduction(axis=axis, out=out, **passkwargs) ---> 86 return ufunc.reduce(obj, axis, dtype, out, **passkwargs) File /opt/conda/lib/python3.8/site-packages/tensorflow_core/python/framework/ops.py:735, in Tensor.__array__(self) 734 def __array__(self): --> 735 raise NotImplementedError("Cannot convert a symbolic Tensor ({}) to a numpy" 736 " array.".format(self.name)) NotImplementedError: Cannot convert a symbolic Tensor (lstm/strided_slice:0) to a numpy array.
06-22
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值