我搭建的量子神经网络为什么报错如下?
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
Cell In[105], line 3
1 for epoch in range(epochs):
2 print(f"epoch {epoch+1}\n-------------------------")
----> 3 train(model,train_ds)
4 test(model,test_ds,loss_fn)
5 print("Done!")
Cell In[104], line 19, in train(model, dataset)
17 model.set_train()
18 for batch,(data,label) in enumerate(dataset.create_tuple_iterator()):
---> 19 loss=train_step(data,label)
21 if batch%100==0:
22 loss,current=loss.asnumpy(),batch
Cell In[104], line 10, in train_step(data, label)
9 def train_step(data,label):
---> 10 (loss,_),grads=grad_fn(data,label)
11 optimizer(grads)
12 return loss
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\composite\base.py:638, in _Grad.__call__.<locals>.after_grad(*args, **kwargs)
637 def after_grad(*args, **kwargs):
--> 638 return grad_(fn_, weights)(*args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\common\api.py:187, in _wrap_func.<locals>.wrapper(*arg, **kwargs)
185 @wraps(fn)
186 def wrapper(*arg, **kwargs):
--> 187 results = fn(*arg, **kwargs)
188 return _convert_python_data(results)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\composite\base.py:610, in _Grad.__call__.<locals>.after_grad(*args, **kwargs)
608 @_wrap_func
609 def after_grad(*args, **kwargs):
--> 610 run_args, res = self._pynative_forward_run(fn, grad_, weights, *args, **kwargs)
611 if self.has_aux:
612 out = _pynative_executor.grad_aux(fn, grad_, weights, grad_position, *run_args)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\composite\base.py:671, in _Grad._pynative_forward_run(self, fn, grad, weights, *args, **kwargs)
669 _pynative_executor.set_grad_flag(True)
670 _pynative_executor.new_graph(fn, *args, **kwargs)
--> 671 outputs = fn(*args, **kwargs)
672 _pynative_executor.end_graph(fn, outputs, *args, **kwargs)
673 run_forward = True
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\composite\base.py:578, in _Grad.__call__.<locals>.aux_fn(*args, **kwargs)
577 def aux_fn(*args, **kwargs):
--> 578 outputs = fn(*args, **kwargs)
579 if not isinstance(outputs, tuple) or len(outputs) < 2:
580 raise ValueError("When has_aux is True, origin fn requires more than one outputs.")
Cell In[104], line 3, in forward_fn(data, label)
2 def forward_fn(data,label):
----> 3 logits=model(data)
4 loss=loss_fn(logits,label)
5 return loss,logits
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1355, in Cell.__call__(self, *args, **kwargs)
1352 if not (self.requires_grad or self._dynamic_shape_inputs or self.mixed_precision_type):
1353 if not (self._forward_pre_hook or self._forward_hook or self._backward_pre_hook or self._backward_hook or
1354 self._shard_fn or self._recompute_cell or (self.has_bprop and _pynative_executor.requires_grad())):
-> 1355 return self.construct(*args, **kwargs)
1357 return self._run_construct(*args, **kwargs)
1359 return self._complex_call(*args, **kwargs)
Cell In[87], line 16, in Net.construct(self, x)
14 def construct(self,x):
15 x=self.flatten(x)
---> 16 logits=self.conv(x)
17 return logits
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1355, in Cell.__call__(self, *args, **kwargs)
1352 if not (self.requires_grad or self._dynamic_shape_inputs or self.mixed_precision_type):
1353 if not (self._forward_pre_hook or self._forward_hook or self._backward_pre_hook or self._backward_hook or
1354 self._shard_fn or self._recompute_cell or (self.has_bprop and _pynative_executor.requires_grad())):
-> 1355 return self.construct(*args, **kwargs)
1357 return self._run_construct(*args, **kwargs)
1359 return self._complex_call(*args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\layer\container.py:295, in SequentialCell.construct(self, input_data)
293 def construct(self, input_data):
294 for cell in self.cell_list:
--> 295 input_data = cell(input_data)
296 return input_data
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1355, in Cell.__call__(self, *args, **kwargs)
1352 if not (self.requires_grad or self._dynamic_shape_inputs or self.mixed_precision_type):
1353 if not (self._forward_pre_hook or self._forward_hook or self._backward_pre_hook or self._backward_hook or
1354 self._shard_fn or self._recompute_cell or (self.has_bprop and _pynative_executor.requires_grad())):
-> 1355 return self.construct(*args, **kwargs)
1357 return self._run_construct(*args, **kwargs)
1359 return self._complex_call(*args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindquantum\framework\layer.py:94, in MQLayer.construct(self, arg)
92 def construct(self, arg):
93 """Construct a MQLayer node."""
---> 94 return self.evolution(arg, self.weight)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1357, in Cell.__call__(self, *args, **kwargs)
1353 if not (self._forward_pre_hook or self._forward_hook or self._backward_pre_hook or self._backward_hook or
1354 self._shard_fn or self._recompute_cell or (self.has_bprop and _pynative_executor.requires_grad())):
1355 return self.construct(*args, **kwargs)
-> 1357 return self._run_construct(*args, **kwargs)
1359 return self._complex_call(*args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1063, in Cell._run_construct(self, *args, **kwargs)
1061 output = self._recompute_cell(*args, **kwargs)
1062 elif self.has_bprop:
-> 1063 output = self._call_custom_bprop(*args, **kwargs)
1064 else:
1065 output = self.construct(*args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1411, in Cell._call_custom_bprop(self, *args, **kwargs)
1407 """
1408 Call custom bprop for cell bprop.
1409 """
1410 with _no_grad():
-> 1411 output = self.construct(*args, **kwargs)
1412 return _pynative_executor.call_custom_bprop(self, output, *args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindquantum\framework\operations.py:134, in MQOps.construct(self, enc_data, ans_data)
132 def construct(self, enc_data, ans_data):
133 """Construct an MQOps node."""
--> 134 check_enc_input_shape(enc_data, self.shape_ops(enc_data), len(self.expectation_with_grad.encoder_params_name))
135 check_ans_input_shape(ans_data, self.shape_ops(ans_data), len(self.expectation_with_grad.ansatz_params_name))
136 fval, g_enc, g_ans = self.expectation_with_grad(enc_data.asnumpy(), ans_data.asnumpy())
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\primitive.py:955, in constexpr.<locals>.decorator.<locals>.ProxyOp.__call__(self, *args, **kwargs)
954 def __call__(self, *args, **kwargs):
--> 955 return fn(*args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindquantum\framework\operations.py:38, in check_enc_input_shape(data, encoder_tensor, enc_len)
36 raise TypeError(f"Encoder parameter requires a Tensor but get {type(data)}")
37 if len(encoder_tensor) != 2 or encoder_tensor[1] != enc_len:
---> 38 raise ValueError(
39 'Encoder data requires a two dimension Tensor with second'
40 + f' dimension should be {enc_len}, but get shape {encoder_tensor}'
41 )
ValueError: Encoder data requires a two dimension Tensor with second dimension should be 0, but get shape (64, 16)