---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
Cell In[13], line 6
4 for epoch in range(epochs):
5 train_ds=data_load("MNIST_Data/train",batch_size).take(2500)
----> 6 train_loop(model, train_ds)
7 test_ds=data_load("MNIST_Data/test",batch_size).take(1000)
8 loss,accuracy=test_loop(model, test_ds, loss_fn)
Cell In[12], line 33, in train_loop(model, dataset)
31 # train_step(data, label)
32 for data,label in dataset:
---> 33 loss = train_step(data, label)
34 if step%100==0:
35 print(f"step: {step}, loss: {loss}")
Cell In[12], line 20, in train_step(data, label)
19 def train_step(data, label):
---> 20 (loss, _), grads = grad_fn(data, label)
21 optimizer(grads)
22 # t=count()
23 # if t%10==0:
24 # print("累计100个")
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\composite\base.py:638, in _Grad.__call__.<locals>.after_grad(*args, **kwargs)
637 def after_grad(*args, **kwargs):
--> 638 return grad_(fn_, weights)(*args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\common\api.py:187, in _wrap_func.<locals>.wrapper(*arg, **kwargs)
185 @wraps(fn)
186 def wrapper(*arg, **kwargs):
--> 187 results = fn(*arg, **kwargs)
188 return _convert_python_data(results)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\composite\base.py:610, in _Grad.__call__.<locals>.after_grad(*args, **kwargs)
608 @_wrap_func
609 def after_grad(*args, **kwargs):
--> 610 run_args, res = self._pynative_forward_run(fn, grad_, weights, *args, **kwargs)
611 if self.has_aux:
612 out = _pynative_executor.grad_aux(fn, grad_, weights, grad_position, *run_args)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\composite\base.py:671, in _Grad._pynative_forward_run(self, fn, grad, weights, *args, **kwargs)
669 _pynative_executor.set_grad_flag(True)
670 _pynative_executor.new_graph(fn, *args, **kwargs)
--> 671 outputs = fn(*args, **kwargs)
672 _pynative_executor.end_graph(fn, outputs, *args, **kwargs)
673 run_forward = True
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\composite\base.py:578, in _Grad.__call__.<locals>.aux_fn(*args, **kwargs)
577 def aux_fn(*args, **kwargs):
--> 578 outputs = fn(*args, **kwargs)
579 if not isinstance(outputs, tuple) or len(outputs) < 2:
580 raise ValueError("When has_aux is True, origin fn requires more than one outputs.")
Cell In[12], line 12, in forward_fn(data, label)
11 def forward_fn(data, label):
---> 12 logits = model(data)
13 loss = loss_fn(logits, label)
14 return loss, logits
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1355, in Cell.__call__(self, *args, **kwargs)
1352 if not (self.requires_grad or self._dynamic_shape_inputs or self.mixed_precision_type):
1353 if not (self._forward_pre_hook or self._forward_hook or self._backward_pre_hook or self._backward_hook or
1354 self._shard_fn or self._recompute_cell or (self.has_bprop and _pynative_executor.requires_grad())):
-> 1355 return self.construct(*args, **kwargs)
1357 return self._run_construct(*args, **kwargs)
1359 return self._complex_call(*args, **kwargs)
Cell In[11], line 123, in QCNN.construct(self, x)
121 x = self.flatten(x) # (B, 200)
122 logits = self.classifier(x)
--> 123 logits=self.qconv(logits)
124 return logits
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1355, in Cell.__call__(self, *args, **kwargs)
1352 if not (self.requires_grad or self._dynamic_shape_inputs or self.mixed_precision_type):
1353 if not (self._forward_pre_hook or self._forward_hook or self._backward_pre_hook or self._backward_hook or
1354 self._shard_fn or self._recompute_cell or (self.has_bprop and _pynative_executor.requires_grad())):
-> 1355 return self.construct(*args, **kwargs)
1357 return self._run_construct(*args, **kwargs)
1359 return self._complex_call(*args, **kwargs)
Cell In[11], line 60, in QuantumConvLayer.construct(self, x)
58 def construct(self, x):
59 # x shape: (B, C, H, W), e.g., (B, 1, 10, 10)
---> 60 b, c, h, w = x.shape
61 k, s = self.k, self.s
63 # 确保尺寸可整除
ValueError: not enough values to unpack (expected 4, got 2)
以上报错是什么意思?
最新发布