101. Symmetric Tree--DFS(递归)/BFS(queue+迭代)

本文介绍了一种检查二叉树是否为自身镜像的方法,即判断二叉树是否围绕其中心对称。通过递归和迭代两种方式实现,递归采用深度优先搜索比较左右子树,迭代则利用队列进行广度优先搜索。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

Given a binary tree, check whether it is a mirror of itself (ie, symmetric around its center).

For example, this binary tree [1,2,2,3,4,4,3] is symmetric:

    1
   / \
  2   2
 / \ / \
3  4 4  3

But the following [1,2,2,null,3,null,3] is not:

    1
   / \
  2   2
   \   \
   3    3

Note:
Bonus points if you could solve it both recursively and iteratively.

class Solution {
public:
    /*//dfs
    bool isSymmetric(TreeNode* root) {
        if(root==NULL) return true;
        return dfs(root->left, root->right);//把根的左右子树看成两树,这两个树要对称 O(n)
    }
    bool dfs(TreeNode* node1, TreeNode* node2){
        if(node1==NULL&&node2==NULL) return true; //都为空,表示对称
        if(node1!=NULL&&node2!=NULL) //都不为空时,若要对称,当前两点的值要相同,并且node1->left和node2->right、
                                     //node1->right和node2->left 要分别对称
            return (node1->val==node2->val)&&dfs(node1->left, node2->right)&&dfs(node1->right, node2->left);
        return false;
    }*/
    //迭代 bfs(使用先进先出的队列)
    bool isSymmetric(TreeNode* root) {
        if(root==NULL) return true;
        queue<pair<TreeNode*, TreeNode*>> q;
        pair<TreeNode*, TreeNode*> tmp1(root->left, root->right), tmp2;
        q.push(tmp1);
        while(q.size()>0){
            tmp1 = q.front();
            q.pop();
            if(tmp1.first==NULL&&tmp1.second==NULL) continue;
            if(tmp1.first!=NULL&&tmp1.second!=NULL&&tmp1.first->val==tmp1.second->val){
                tmp2.first = tmp1.first->left;
                tmp2.second = tmp1.second->right;
                q.push(tmp2);
                tmp2.first = tmp1.first->right;
                tmp2.second = tmp1.second->left;
                q.push(tmp2);
            }
            else return false;
        }
        return true;
    }
};


function [y1] = myNeuralNetworkFunction(x1) %MYNEURALNETWORKFUNCTION neural network simulation function. % % Auto-generated by MATLAB, 29-May-2025 17:44:10. % % [y1] = myNeuralNetworkFunction(x1) takes these arguments: % x = Qx5 matrix, input #1 % and returns: % y = Qx1 matrix, output #1 % where Q is the number of samples. %#ok<*RPMT0> % ===== NEURAL NETWORK CONSTANTS ===== % Input 1 x1_step1.xoffset = [480777000000;157513000000000;11401700000000;5205;154763000000000]; x1_step1.gain = [4.34708679869508e-14;1.34756478535618e-16;1.77276942837848e-15;3.97784341219408e-06;1.34756024554434e-16]; x1_step1.ymin = -1; % Layer 1 b1 = [1.6637122720762880235;-1.5732665448925968743;-1.3469017629425223959;5.2221613538636377783;-0.50109180945842046739;0.93076629966299784247;8.1811762637772638129;3.0235828698241875578;-2.3018970320204750202;2.7798931959262289659;0.40617600585227198184;-2.3812063612092764231;3.2303078209384015196;0.70681302038925530873;-0.30089952450459050981;-0.51338761225007900624;0.41685131655029350473;2.4064635997486485408;1.3825477326600374983;2.059209503904556815;1.3892729484841934706;1.1832450423043530119;-3.6033367654623575937;-0.54285568143037532529;0.60435560856867909241;-0.32844380742291567898;-1.0006479269246388863;-2.3381154756104458592;0.48579947504393156121;-6.5417870843350067744]; IW1_1 = [0.50778584405420768011 -0.13514095900822287177 0.33052488238907029583 2.0969957257563569542 -0.41820412888486002689;-0.27861324742865134496 -2.0648439591818048555 2.2335059779169976757 1.5725094050417163238 0.38615805120811086404;1.6931699488109086538 -1.2102507251722887105 0.44628934905022810131 0.94195198377707323178 -0.071477395366930790432;0.31253276156287845833 -8.4836433743946866315 3.2448198037316777764 -1.2000524100027547547 -1.2788402925151238776;-1.1218160910875487613 0.46651572421737680374 -1.3039072678201015787 -0.94950123051425627807 -1.2483163499387381901;-0.44056602834480063091 -2.6500257858484883577 -0.00016892644354535128748 -0.33695450415421163148 0.84176283293066422875;6.5100461225917776176 7.5828935926173972604 0.063046498412597656857 7.416075738466233247 -0.77907837421871872774;-6.5104344469523525163 -10.356764888567823846 -4.6111384750039405844 -5.9680825913628607893 1.7179082426779781478;0.29952779588636418984 1.0019679568316388618 1.7712263479163026236 -4.613975525809425271 6.1079805116506253881;-3.0357749869404901943 -8.550477132519608503 3.7249383217218867692 -0.8346953765884139731 2.5559284515383522418;-0.14436007739922590565 0.37360983799775560055 -0.78118848850123867589 -0.50543268743787606034 -0.61354429105307262127;3.0056066231496512131 -0.056405946008129898006 -4.6579964553960095941 -1.4957341240973340835 0.25592146384548081217;-3.0223314091154840355 -2.0197134127734797104 1.6575565871773842996 3.1953517969723788106 -0.52647038477218666142;1.3403568102543135243 0.088832456318453892341 0.64117725515135171044 1.146960229866753167 0.43765557565975743426;-0.89951249561902846263 2.538125789891104489 0.74444313090091951413 -0.39311961561065705828 1.8170339574167870822;0.29700568995971382558 1.1577422520961551111 0.24275241984966683395 -0.66222200065358349796 1.2953140520347545817;0.62730289743544209724 1.0825544738993371219 0.16732712665699264742 4.628909315361935839 -3.0095475571017158423;-0.37255144534578465976 1.1265656021398666731 1.6571261689011218543 0.9502084172921591021 1.6910490465906138358;0.80852370731862066577 0.15258921259379218394 0.42342754264485799043 -2.0198483620167690944 -0.31793638357817244344;2.2998497071752557197 -0.2113064995484469355 2.5555939246601111492 -0.095304632528927762936 3.6530810500597152846;0.39278974711902198136 -1.2193515690401528495 4.7072946121181065138 0.44259397113701937077 0.34957094961983786741;0.73659409418489285581 1.0182333756766508426 -1.084763859735640823 -0.4406744482554108 -1.4601507433390377155;-1.8245439363643136677 -5.0519787149383930114 -1.0002197246389910035 -0.36105203822314096973 1.2165240609929526272;4.3432992734077000563 0.77193429270487667893 -1.2974834124980081373 -1.0044468036451459358 -0.60306008323366633306;-1.1209619658040457235 3.391279664388744397 -2.0678585626930638952 -0.088135103753664567616 3.1386049070937835914;0.51106751785515147635 0.23951297167861929083 1.4156483541745823462 -0.6516581046274407818 -0.38567483461944374046;0.13954880584928869292 -1.1458371270137879261 -2.0122737899834195474 -0.53349262578446943195 -2.081343799375376058;-3.7292551802685065532 -0.48557016684159237663 0.1995494594269605626 2.4775510708792150005 -1.2559500567569636953;0.070735675867016678531 0.079511200828890044034 0.580046485811331336 -0.73310684489441557687 -0.17133606911874618706;0.07502084751049771516 -2.7165351554067003192 -4.8488608052408261884 -3.1702270422280593998 8.31619057127640815]; % Layer 2 b2 = -0.82862468421687540676; LW2_1 = [0.12564197345592126309 0.024173817244167429552 0.052712313429482703098 -0.016185255686955328513 0.031056200454798770644 0.013199645495849161511 0.015260924672909100333 -0.0015812692523211430932 0.013240349389070805303 -0.022469198130934748409 -0.20435941386687389576 -0.012300867514544679485 -0.007460318749128531414 -0.040361061054172329976 -0.041480213008578507849 -0.025160110050006075089 -0.0085696736964657061614 0.027790944719379925137 -0.022194529311787593306 0.017680140357790717631 0.047515103537599079342 -0.026018593215458959284 0.027216358070957694104 -0.00061123834116237250012 -0.022366089533571262282 -0.19184505163117052118 -0.067988068566674109139 -0.0026217244603330548507 0.28583637207524592583 -0.036170859837360173905]; % Output 1 y1_step1.ymin = -1; y1_step1.gain = 0.00833702872131406; y1_step1.xoffset = 0.01308; % ===== SIMULATION ======== % Dimensions Q = size(x1,1); % samples % Input 1 x1 = x1'; xp1 = mapminmax_apply(x1,x1_step1); % Layer 1 a1 = tansig_apply(repmat(b1,1,Q) + IW1_1*xp1); % Layer 2 a2 = repmat(b2,1,Q) + LW2_1*a1; % Output 1 y1 = mapminmax_reverse(a2,y1_step1); y1 = y1'; end % ===== MODULE FUNCTIONS ======== % Map Minimum and Maximum Input Processing Function function y = mapminmax_apply(x,settings) y = bsxfun(@minus,x,settings.xoffset); y = bsxfun(@times,y,settings.gain); y = bsxfun(@plus,y,settings.ymin); end % Sigmoid Symmetric Transfer Function function a = tansig_apply(n,~) a = 2 ./ (1 + exp(-2*n)) - 1; end % Map Minimum and Maximum Output Reverse-Processing Function function x = mapminmax_reverse(y,settings) x = bsxfun(@minus,y,settings.ymin); x = bsxfun(@rdivide,x,settings.gain); x = bsxfun(@plus,x,settings.xoffset); end 以上是我编好的神经网络预测模型,它现在是一个可调用的函数,现在我需要一个粒子群算法来实现输入参数优化。神经网络的输入有五个特征,输出有一个,然后我要在粒子群算法中设置的参数变化范围为 ub=[4.65e11,1.5e14,1.14e13,5.08e3,1.5e14]; lb=[4.65e13,1.5e16,1.14e15,5.08e5,1.5e16]; 现在需要找出使得神经网络预测模型的输出值最接近于0的点(x1,x2,x3,x4,x5),给我一个MATLAB代码
最新发布
05-30
<think>好的,我现在需要帮助用户解决如何在MATLAB中使用粒子群算法(PSO)优化神经网络的输入参数,使得输出值最接近0的问题。首先,我需要回顾一下用户提供的引用内容,看看有没有相关的信息可以借鉴。用户提到了引用[1]和引用[3],其中引用[1]介绍了粒子群优化算法的基本概念和优势,而引用[3]则给出了一个使用PSO优化LSTM超参数的MATLAB代码示例。虽然用户的问题不是优化超参数,而是优化输入参数,但代码结构可能有相似之处。我需要将PSO应用于神经网络的输入参数优化,目标是最小化输出值。 首先,用户的需求是找到输入参数,使得神经网络输出接近0。这需要构建一个目标函数,该函数将输入参数传递给神经网络,计算输出,并返回输出的绝对值作为适应度值。PSO算法会不断调整输入参数,以最小化这个适应度值。接下来,我需要考虑如何将神经网络的函数myNeuralNetworkFunction集成到PSO中。根据引用[3]中的示例,目标函数是通过匿名函数定义的,这里应该类似。目标函数应该接受输入参数向量,调用神经网络函数,并返回输出的绝对值或平方作为优化目标。然后,设置PSO的参数,比如种群大小、最大迭代次数、速度限制等。用户提到了上下界lb和ub,这些需要在优化时指定,确保粒子在合理范围内搜索。 还需要注意MATLAB中PSO的实现方式。引用[3]中的代码使用了pseatticlftwseatm函数,但实际在MATLAB中,正确的函数可能是particleswarm。需要确认函数名和参数是否正确,可能需要查阅MATLAB文档以确保代码正确性。另外,用户可能需要可视化优化过程,比如绘制适应度曲线,这有助于观察收敛情况。同时,输出最优解和对应的适应度值,确保用户能够理解结果。 最后,需要验证代码的正确性,比如测试目标函数是否正常工作,PSO参数是否合理,并提醒用户可能需要调整参数以适应具体问题。可能还需要考虑神经网络的输入结构,确保传入的参数向量与神经网络期望的输入维度一致。总结步骤:1.定义目标函数,将输入参数传递给神经网络并计算输出。2.配置PSO算法的参数,包括上下界、种群大小、最大迭代次数等。3.运行PSO优化,获取最优输入参数。4.可视化优化过程和结果。 5.提供代码解释和使用注意事项。 需要确保所有步骤在MATLAB环境中可行,语法正确,特别是粒子群优化函数的调用方式。同时,参考用户提供的示例代码结构,保持一致性,比如使用匿名函数定义目标函数,正确传递参数等。</think>以下是如何在MATLAB中使用粒子群算法(PSO)优化神经网络输入参数的实现步骤及代码: ### 一、问题建模 1. **优化目标**:最小化神经网络输出绝对值$\min |f(x)|$,其中$f(x)$是神经网络函数 2. **决策变量**:神经网络输入参数$x$,需满足约束条件$lb \leq x \leq ub$ 3. **适应度函数**:$Fitness = |myNeuralNetworkFunction(x)|$ ### 二、MATLAB代码实现 ```matlab % 定义目标函数(用户需替换为自己的神经网络函数) objectiveFunction = @(x) abs(myNeuralNetworkFunction(x)); % 设置PSO参数 options = optimoptions('particleswarm',... 'SwarmSize', 50,... 'MaxIterations', 100,... 'Display', 'iter',... 'FunctionTolerance', 1e-6); % 变量边界(示例) lb = [-1, -1, -1]; % 输入参数下限 ub = [1, 1, 1]; % 输入参数上限 % 运行PSO优化 [x_opt, fval] = particleswarm(objectiveFunction, length(lb), lb, ub, options); % 显示结果 disp(['最优输入参数:', num2str(x_opt)]); disp(['最小输出值:', num2str(fval)]); % 绘制收敛曲线(需记录迭代数据) history = zeros(options.MaxIterations,1); options.OutputFcn = @(options,state,flag) psoOutputFcn(options,state,flag,history); figure; plot(history,'LineWidth',1.5); xlabel('迭代次数'); ylabel('适应度值'); title('PSO收敛曲线'); grid on; % 自定义输出函数记录历史数据 function stop = psoOutputFcn(~,state,~,history) persistent iteration if isempty(iteration) iteration = 1; end if strcmp(state,'iter') history(iteration) = state.BestFval; iteration = iteration + 1; end stop = false; end ``` ### 三、关键要素说明 1. **神经网络接口**:需确保`myNeuralNetworkFunction`能接收向量输入并返回标量输出 2. **边界约束**:通过`lb`和`ub`限制搜索空间,防止参数越界 3. **算法参数调优**: - `SwarmSize`建议设置为变量数的10-20倍 - `MaxIterations`根据收敛情况调整 - `FunctionTolerance`控制收敛精度[^1] ### 四、算法改进建议 1. **混合策略**:在PSO后期加入局部搜索(如Nelder-Mead) 2. **动态参数**:使用线性递减的惯性权重: $$w = w_{max} - (w_{max}-w_{min}) \times \frac{iter}{maxiter}$$ 3. **多目标优化**:若需同时最小化多个输出,可改为: ```matlab objectiveFunction = @(x) norm(myNeuralNetworkFunction(x)); ``` ### 五、验证与调试 1. **参数敏感性测试**:调整`SwarmSize`和`MaxIterations`观察收敛性 2. **结果验证**: ```matlab final_output = myNeuralNetworkFunction(x_opt); assert(abs(final_output) < 1e-4, '优化未达到预期精度'); ```
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值