import mindspore as ms
import mindspore.nn as nn
import mindspore.ops as ops
import numpy as np
from mindquantum import Circuit, Hamiltonian, QubitOperator, Simulator
from mindquantum.core import RX, RY, RZ, H, CNOT
from mindquantum.framework import MQLayer
# 移除错误的导入: from mindquantum.core.operators import Encoder
class QuantumConvLayer(nn.Cell):
def __init__(self, kernel_size=3, n_kernels=2, dilation=1, stride=2):
super().__init__()
self.kernel_size = kernel_size
self.n_kernels = n_kernels
self.dilation = dilation
self.stride = stride
self.n_qubits = int(np.ceil(np.log2(kernel_size**2)))
# 构建量子电路和哈密顿量
self.circuits, self.hamiltonians = self._build_circuits_and_hams()
# 为每个量子核创建MindQuantum层
self.mq_layers = nn.CellList()
self.sim = Simulator('mqvector', self.n_qubits)
for i in range(self.n_kernels):
grad_ops = self.sim.get_expectation_with_grad(self.hamiltonians[i], self.circuits[i])
mq_layer = MQLayer(grad_ops)
self.mq_layers.append(mq_layer)
def _build_circuits_and_hams(self):
circuits = []
hamiltonians = []
for _ in range(self.n_kernels):
circ = Circuit()
# 关键修正:直接使用 'alpha_' 前缀定义编码参数
# MindQuantum会自动识别这些参数需要外部输入
for j in range(self.kernel_size**2):
param_name = f'alpha_{j}' # alpha_j 对应输入数据 x_j
wire = j % self.n_qubits
if j % 3 == 0:
circ += RX(param_name).on(wire)
elif j % 3 == 1:
circ += RY(param_name).on(wire)
else:
circ += RZ(param_name).on(wire)
# 添加可学习的ansatz参数(旋转门)
# 这些参数是θ_j,与编码参数相乘
for j in range(self.kernel_size**2):
param_name = f'theta_{j}' # theta_j 是可学习的
wire = j % self.n_qubits
if j % 3 == 0:
circ += RX(param_name).on(wire)
elif j % 3 == 1:
circ += RY(param_name).on(wire)
else:
circ += RZ(param_name).on(wire)
# 添加Hadamard门和CNOT门
for i in range(self.n_qubits):
circ += H.on(i)
for i in range(self.n_qubits-1):
if np.random.rand() > 0.3:
circ += CNOT.on(i+1, i)
circuits.append(circ)
# 构建哈密顿量
ham_str = ''
for q in range(self.n_qubits):
ham_str += f'Z{q} '
ham = Hamiltonian(QubitOperator(ham_str))
hamiltonians.append(ham)
return circuits, hamiltonians
def construct(self, x):
batch_size, _, height, width = x.shape
out_height = (height + 2*0 - self.dilation*(self.kernel_size-1) - 1)//self.stride + 1
out_width = (width + 2*0 - self.dilation*(self.kernel_size-1) - 1)//self.stride + 1
output = ops.zeros((batch_size, self.n_kernels*self.n_qubits, out_height, out_width), ms.float32)
for b in range(batch_size):
for i in range(out_height):
for j in range(out_width):
h_start = i * self.stride
w_start = j * self.stride
patch = x[b, :, h_start:h_start+self.kernel_size*self.dilation:self.dilation,
w_start:w_start+self.kernel_size*self.dilation:self.dilation].flatten()
for k in range(self.n_kernels):
# 准备编码数据,形状为 (1, kernel_size**2)
data_encoded = ms.Tensor(patch.reshape(1, -1), ms.float32)
# MQLayer现在接收编码数据,内部可训练参数会自动更新
results = self.mq_layers[k](data_encoded)
for q in range(self.n_qubits):
output[b, k*self.n_qubits+q, i, j] = results[0, q] # 结果是二维的,取第一行
return output
class QuantumCNN(nn.Cell):
def __init__(self, kernel_size=3, n_kernels=2, dilation=1):
super().__init__()
self.quantum_conv = QuantumConvLayer(kernel_size, n_kernels, dilation)
self.flatten = nn.Flatten()
self.fc = nn.Dense(n_kernels * int(np.ceil(np.log2(kernel_size**2))) * 14 * 14, 10)
self.softmax = nn.Softmax(axis=1)
def construct(self, x):
x = self.quantum_conv(x)
x = self.flatten(x)
x = self.fc(x)
return self.softmax(x)
import mindspore.dataset as ds
from mindspore.train import Model, LossMonitor
from mindspore.dataset.vision import transforms
# 数据预处理
def create_dataset(data_path, batch_size=32):
transform = [
transforms.Rescale(1./255,0),
transforms.HWC2CHW()
]
dataset = ds.MnistDataset(data_path)
dataset = dataset.map(operations=transform, input_columns="image")
# dataset=dataset.map(lambda x:[x] ,input_columns='image')
dataset = dataset.batch(batch_size)
return dataset
# 创建数据集
train_dataset = create_dataset("MNIST/train")
test_dataset = create_dataset("MNIST/test")
# 初始化模型
model = QuantumCNN(kernel_size=3, n_kernels=2, dilation=2) # 最佳配置
# loss = nn.CrossEntropyLoss()
# opt = nn.Adam(params=net.trainable_params(), learning_rate=0.01)
# model = Model(net, loss, opt, metrics={"Accuracy": nn.Accuracy()})
# Instantiate loss function and optimizer
loss_fn = nn.CrossEntropyLoss()
optimizer = nn.Adam(model.trainable_params(), 1e-2)
# 1. Define forward function
def forward_fn(data, label):
logits = model(data)
loss = loss_fn(logits, label)
return loss, logits
# 2. Get gradient function
grad_fn = ms.value_and_grad(forward_fn, None, optimizer.parameters, has_aux=True)
# 3. Define function of one-step training
def train_step(data, label):
(loss, _), grads = grad_fn(data, label)
optimizer(grads)
return loss
def train(model, dataset):
size = dataset.get_dataset_size()
model.set_train()
for batch, (data, label) in enumerate(dataset.create_tuple_iterator()):
loss = train_step(data, label)
if batch % 100 == 0:
loss, current = loss.asnumpy(), batch
print(f"loss: {loss:>7f} [{current:>3d}/{size:>3d}]")
# 训练
# model.train(20, train_dataset, callbacks=[LossMonitor(0.01)], dataset_sink_mode=False)
train(model, train_dataset)
针对以上代码,为什么报如下错误,如何修改?
ValueError Traceback (most recent call last)
Cell In[1], line 180
175 print(f"loss: {loss:>7f} [{current:>3d}/{size:>3d}]")
178 # 训练
179 # model.train(20, train_dataset, callbacks=[LossMonitor(0.01)], dataset_sink_mode=False)
--> 180 train(model, train_dataset)
Cell In[1], line 171, in train(model, dataset)
169 model.set_train()
170 for batch, (data, label) in enumerate(dataset.create_tuple_iterator()):
--> 171 loss = train_step(data, label)
173 if batch % 100 == 0:
174 loss, current = loss.asnumpy(), batch
Cell In[1], line 163, in train_step(data, label)
162 def train_step(data, label):
--> 163 (loss, _), grads = grad_fn(data, label)
164 optimizer(grads)
165 return loss
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\composite\base.py:638, in _Grad.__call__.<locals>.after_grad(*args, **kwargs)
637 def after_grad(*args, **kwargs):
--> 638 return grad_(fn_, weights)(*args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\common\api.py:187, in _wrap_func.<locals>.wrapper(*arg, **kwargs)
185 @wraps(fn)
186 def wrapper(*arg, **kwargs):
--> 187 results = fn(*arg, **kwargs)
188 return _convert_python_data(results)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\composite\base.py:610, in _Grad.__call__.<locals>.after_grad(*args, **kwargs)
608 @_wrap_func
609 def after_grad(*args, **kwargs):
--> 610 run_args, res = self._pynative_forward_run(fn, grad_, weights, *args, **kwargs)
611 if self.has_aux:
612 out = _pynative_executor.grad_aux(fn, grad_, weights, grad_position, *run_args)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\composite\base.py:671, in _Grad._pynative_forward_run(self, fn, grad, weights, *args, **kwargs)
669 _pynative_executor.set_grad_flag(True)
670 _pynative_executor.new_graph(fn, *args, **kwargs)
--> 671 outputs = fn(*args, **kwargs)
672 _pynative_executor.end_graph(fn, outputs, *args, **kwargs)
673 run_forward = True
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\composite\base.py:578, in _Grad.__call__.<locals>.aux_fn(*args, **kwargs)
577 def aux_fn(*args, **kwargs):
--> 578 outputs = fn(*args, **kwargs)
579 if not isinstance(outputs, tuple) or len(outputs) < 2:
580 raise ValueError("When has_aux is True, origin fn requires more than one outputs.")
Cell In[1], line 154, in forward_fn(data, label)
153 def forward_fn(data, label):
--> 154 logits = model(data)
155 loss = loss_fn(logits, label)
156 return loss, logits
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1355, in Cell.__call__(self, *args, **kwargs)
1352 if not (self.requires_grad or self._dynamic_shape_inputs or self.mixed_precision_type):
1353 if not (self._forward_pre_hook or self._forward_hook or self._backward_pre_hook or self._backward_hook or
1354 self._shard_fn or self._recompute_cell or (self.has_bprop and _pynative_executor.requires_grad())):
-> 1355 return self.construct(*args, **kwargs)
1357 return self._run_construct(*args, **kwargs)
1359 return self._complex_call(*args, **kwargs)
Cell In[1], line 117, in QuantumCNN.construct(self, x)
116 def construct(self, x):
--> 117 x = self.quantum_conv(x)
118 x = self.flatten(x)
119 x = self.fc(x)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1355, in Cell.__call__(self, *args, **kwargs)
1352 if not (self.requires_grad or self._dynamic_shape_inputs or self.mixed_precision_type):
1353 if not (self._forward_pre_hook or self._forward_hook or self._backward_pre_hook or self._backward_hook or
1354 self._shard_fn or self._recompute_cell or (self.has_bprop and _pynative_executor.requires_grad())):
-> 1355 return self.construct(*args, **kwargs)
1357 return self._run_construct(*args, **kwargs)
1359 return self._complex_call(*args, **kwargs)
Cell In[1], line 100, in QuantumConvLayer.construct(self, x)
97 data_encoded = ms.Tensor(patch.reshape(1, -1), ms.float32)
99 # MQLayer现在接收编码数据,内部可训练参数会自动更新
--> 100 results = self.mq_layers[k](data_encoded)
102 for q in range(self.n_qubits):
103 output[b, k*self.n_qubits+q, i, j] = results[0, q] # 结果是二维的,取第一行
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1355, in Cell.__call__(self, *args, **kwargs)
1352 if not (self.requires_grad or self._dynamic_shape_inputs or self.mixed_precision_type):
1353 if not (self._forward_pre_hook or self._forward_hook or self._backward_pre_hook or self._backward_hook or
1354 self._shard_fn or self._recompute_cell or (self.has_bprop and _pynative_executor.requires_grad())):
-> 1355 return self.construct(*args, **kwargs)
1357 return self._run_construct(*args, **kwargs)
1359 return self._complex_call(*args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindquantum\framework\layer.py:94, in MQLayer.construct(self, arg)
92 def construct(self, arg):
93 """Construct a MQLayer node."""
---> 94 return self.evolution(arg, self.weight)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1357, in Cell.__call__(self, *args, **kwargs)
1353 if not (self._forward_pre_hook or self._forward_hook or self._backward_pre_hook or self._backward_hook or
1354 self._shard_fn or self._recompute_cell or (self.has_bprop and _pynative_executor.requires_grad())):
1355 return self.construct(*args, **kwargs)
-> 1357 return self._run_construct(*args, **kwargs)
1359 return self._complex_call(*args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1063, in Cell._run_construct(self, *args, **kwargs)
1061 output = self._recompute_cell(*args, **kwargs)
1062 elif self.has_bprop:
-> 1063 output = self._call_custom_bprop(*args, **kwargs)
1064 else:
1065 output = self.construct(*args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\nn\cell.py:1411, in Cell._call_custom_bprop(self, *args, **kwargs)
1407 """
1408 Call custom bprop for cell bprop.
1409 """
1410 with _no_grad():
-> 1411 output = self.construct(*args, **kwargs)
1412 return _pynative_executor.call_custom_bprop(self, output, *args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindquantum\framework\operations.py:134, in MQOps.construct(self, enc_data, ans_data)
132 def construct(self, enc_data, ans_data):
133 """Construct an MQOps node."""
--> 134 check_enc_input_shape(enc_data, self.shape_ops(enc_data), len(self.expectation_with_grad.encoder_params_name))
135 check_ans_input_shape(ans_data, self.shape_ops(ans_data), len(self.expectation_with_grad.ansatz_params_name))
136 fval, g_enc, g_ans = self.expectation_with_grad(enc_data.asnumpy(), ans_data.asnumpy())
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindspore\ops\primitive.py:955, in constexpr.<locals>.decorator.<locals>.ProxyOp.__call__(self, *args, **kwargs)
954 def __call__(self, *args, **kwargs):
--> 955 return fn(*args, **kwargs)
File D:\10_The_Programs\4_The_Codes\00_virtual_environment\DeepLearning\Lib\site-packages\mindquantum\framework\operations.py:38, in check_enc_input_shape(data, encoder_tensor, enc_len)
36 raise TypeError(f"Encoder parameter requires a Tensor but get {type(data)}")
37 if len(encoder_tensor) != 2 or encoder_tensor[1] != enc_len:
---> 38 raise ValueError(
39 'Encoder data requires a two dimension Tensor with second'
40 + f' dimension should be {enc_len}, but get shape {encoder_tensor}'
41 )
ValueError: Encoder data requires a two dimension Tensor with second dimension should be 0, but get shape (1, 9)
最新发布