POJ2251 Dungeon Master BFS广搜

You are trapped in a 3D dungeon and need to find the quickest way out! The dungeon is composed of unit cubes which may or may not be filled with rock. It takes one minute to move one unit north, south, east, west, up or down. You cannot move diagonally and the maze is surrounded by solid rock on all sides.

Is an escape possible? If yes, how long will it take?
Input
The input consists of a number of dungeons. Each dungeon description starts with a line containing three integers L, R and C (all limited to 30 in size).
L is the number of levels making up the dungeon.
R and C are the number of rows and columns making up the plan of each level.
Then there will follow L blocks of R lines each containing C characters. Each character describes one cell of the dungeon. A cell full of rock is indicated by a '#' and empty cells are represented by a '.'. Your starting position is indicated by 'S' and the exit by the letter 'E'. There's a single blank line after each level. Input is terminated by three zeroes for L, R and C.
Output
Each maze generates one line of output. If it is possible to reach the exit, print a line of the form
Escaped in x minute(s).

where x is replaced by the shortest time it takes to escape.
If it is not possible to escape, print the line
Trapped!
解析:三维空间的BFS,只是把x,y两个变量变为x,y,z三个变量,改动位移数组即可。
Sample Input
3 4 5
S....
.###.
.##..
###.#

#####
#####
##.##
##...

#####
#####
#.###
####E

1 3 3
S##
#E#
###

0 0 0
Sample Output
Escaped in 11 minute(s).
Trapped!
 
#include <iostream>
#include <cstdlib>
#include <cstdio>
#include <algorithm>
#include <cstring>
#include <queue>
#include <cmath>
using namespace std;
//#define PI 3.14159265358979323
#define inf 0x3f3f3f3f
#define maxn 8   
int l,r,c;
char ch[35][35][35];
int a[6][3]={{1,0,0},{-1,0,0},{0,1,0},{0,-1,0},{0,0,1},{0,0,-1}};
int vis[35][35][35];
int sx,sy,sz,ex,ey,ez;
struct Map
{
    int x,y,z;
    int step;
};
int check(Map m)
{
    if(vis[m.x][m.y][m.z]==1||ch[m.x][m.y][m.z]=='#'||m.x>=r||m.x<0||m.y>=c||m.z<0||m.y<0||m.z>=l)
        return 0;
    else
        return 1;
}
int dfs()
{
    queue<Map> q;
    Map now,next;
    now.x=sx;
    now.y=sy;
    now.z=sz;
    now.step=0;
    q.push(now);
    vis[now.x][now.y][now.z]=1;
    while(!q.empty())
    {
        now=q.front();
        q.pop();

        if(now.x==ex&&now.y==ey&&now.z==ez)
            return now.step;
        for(int i=0;i<6;i++)
        {
            next.step=now.step;
            next.x=now.x+a[i][0];
            next.y=now.y+a[i][1];
            next.z=now.z+a[i][2];
            if(!check(next))
            continue;
            next.step=now.step+1;
                q.push(next);
                vis[next.x][next.y][next.z]=1;

        }

    }
    return 0;
}
int main()
{
    while(cin>>l>>r>>c)
    {
        if(l==0&&r==0&&c==0)
            return 0;
        memset(vis,0,sizeof(vis));
        for(int i=0;i<l;i++)
        {
            for(int j=0;j<r;j++)
            {
                for(int k=0;k<c;k++)
                {
                    cin>>ch[j][k][i];
                    if(ch[j][k][i]=='S')
                    {
                        sz=i;
                        sy=k;
                        sx=j;
                    }
                    if(ch[j][k][i]=='E')
                    {
                        ex=j;
                        ey=k;
                        ez=i;
                    }
                }
                //getchar();
            }
            getchar();

        }
        int ans=dfs();
        if(ans==0)
            cout<<"Trapped!"<<endl;
        else
        cout<<"Escaped in "<<ans<<" minute(s)."<<endl;
    }

    return 0;
}

内容概要:本文介绍了基于贝叶斯优化的CNN-LSTM混合神经网络在时间序列预测中的应用,并提供了完整的Matlab代码实现。该模型结合了卷积神经网络(CNN)在特征提取方面的优势与长短期记忆网络(LSTM)在处理时序依赖问题上的强大能力,形成一种高效的混合预测架构。通过贝叶斯优化算法自动调参,提升了模型的预测精度与泛化能力,适用于风电、光伏、负荷、交通流等多种复杂非线性系统的预测任务。文中还展示了模型训练流程、参数优化机制及实际预测效果分析,突出其在科研与工程应用中的实用性。; 适合人群:具备一定机器学习基基于贝叶斯优化CNN-LSTM混合神经网络预测(Matlab代码实现)础和Matlab编程经验的高校研究生、科研人员及从事预测建模的工程技术人员,尤其适合关注度学习与智能优化算法结合应用的研究者。; 使用场景及目标:①解决各类时间序列预测问题,如能源出力预测、电力负荷预测、环境数据预测等;②学习如何将CNN-LSTM模型与贝叶斯优化相结合,提升模型性能;③掌握Matlab环境下度学习模型搭建与超参数自动优化的技术路线。; 阅读建议:建议读者结合提供的Matlab代码进行实践操作,重点关注贝叶斯优化模块与混合神经网络结构的设计逻辑,通过调整数据集和参数加对模型工作机制的理解,同时可将其框架迁移至其他预测场景中验证效果。
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值