Sence

本文详细介绍了JavaFX中的Scene类,如何创建和设置Scene,以及SceneGraph的概念。还涵盖了鼠标悬停图标设置和常用Cursor类型。重点在于讲解如何组织组件构成场景树并显示在Stage上。

■ 关于Scene

一个Scene类(javafx.scene.Scene)对象,是整个Scene的根节点(root)
简单讲,就是所有在窗体里可见的组件都必须包含在scene里
scene只有设置在stage里,它所包含的组件才能显示在窗体

■ 创建Scene

Label lb = new Label("this is a label");
VBox  vBox  = new VBox(lb);
Scene scene = new Scene(vBox); //创建scene对象时,所指定的组件对象也必须是root组件,这样该root组件内的子组件才可被显示

■ 为Stage设置Scene

如果要让scene可见,必须要将scene设置给stage
一个scene只能指定给一个stage,一个stage也只能显示一个sence

VBox vBox = new VBox(new Label("A JavaFX Label"));
Scene scene = new Scene(vBox);
Stage stage = new Stage();
stage.setScene(scene);

■ 关于Sence Graph

窗体里包含有很多组件,这些组件之间也会有包含和被包含的关系,它们最终都会被设定给sence,sence就是所有这些组件的根节点(root)
sence根节点和这些组件就会组成一个树状的关系图,这个图就是sence graph
这个sence graph只有一个根节点,那就是sence

■ Mouse Cursor

当鼠标悬停在窗体中,鼠标可以有不同的样式
通过setCursor()方法可以设置鼠标悬停时的图标样式

scene.setCursor(Cursor.OPEN_HAND);

javafx.scene.Cursor类包含了很多其他鼠标悬停时的图标显示样式,如下:

  • Cursor.OPEN_HAND
  • Cursor.CLOSED_HAND
  • Cursor.CROSSHAIR
  • Cursor.DEFAULT
  • Cursor.HAND
  • Cursor.WAIT
  • Cursor.H_RESIZE
  • Cursor.V_RESIZE
  • Cursor.MOVE
  • Cursor.TEXT
from utools.tools import expand_topo, sag_generate, load_data, clear_dir from utools.tools import create_csv, append_csv from utools.planner.SgplanPlanner_Phycial import SgplanPlanner_Phycial from utools.planner.PopfPlanner_Phycial import PopfPlanner_Pyhcial from utools.planner.OpticPlanner_Phycial import OpticPlanner_Phycial from utools.planner.FfPlanner_Phycial import FfPlanner_Phycial from utools.planner.FdPlanner_Phycial import FdPlanner_Phycial from utools.planner.SymkPlanner_Phycial import SymkPlanner_Phycial from utools.generateInput.generatePDDLInput_Phycial import GenPDDLInputFile_Phycial from utools.planner.JshopPlanner import JshopPlanner from utools.generateInput.generateJSHOPInput_Phycial import GenJSHOPInputFile_Phycial from utools.planner.PyhopPlanner import PyhopPlanner from utools.generateInput.generatePYHOPInput_Phycial import GenPYHOPInputFile_Phycial from time import time import os # 设置超时时间 import eventlet eventlet.monkey_patch() # 必须加这条代码 # 导入日志模块 import logging # 设置日志级别 logging.basicConfig(level=logging.INFO) # 设置日志格式 logger = logging.getLogger(__name__) # 设置日志输出格式 formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') # 设置日志输出到文件 file_handler = logging.FileHandler("result/log/planner.log", mode='w') # 将格式化器设置到处理器中 file_handler.setFormatter(formatter) # 将处理器添加到日志器中 logger.addHandler(file_handler) def static_planner_sence_topo_type(planner, sence, topo_type, topo_multis, logger = logger, parent_dir = None, domainfile_path=None, goaltasks=None): # 定义planner对应的具体规划器实例 planner_dict = { "pddl/sgplan":SgplanPlanner_Phycial, "htn/jshop": JshopPlanner, "htn/pyhop": PyhopPlanner, "pddl/popf2": PopfPlanner_Pyhcial, "pddl/optic": OpticPlanner_Phycial, "pddl/ff": FfPlanner_Phycial, "pddl/fd": FdPlanner_Phycial, "pddl/symk": SymkPlanner_Phycial } # 根据sence获取拓扑的名称 topo_name = "Topo" + sence.split("Topo")[1] if parent_dir is None: parent_dir = f"result/{planner}/{sence}/{topo_type}/" else: parent_dir = f"{parent_dir}/{planner}/{sence}/{topo_type}/" if "pddl" in planner: if domainfile_path is None: domainfile_path = f"input/{planner}/{sence}/domain1.pddl" problemfile_path = f"input/{planner}/{sence}/problem1.pddl" # 判断problemfile_path是否存在,如果存在则删除 # 初始化场景生成器 gpddl = GenPDDLInputFile_Phycial() # 获取规划器实例 planner_obj = planner_dict[planner]() # 定义存储规划结果的文件 result_path = parent_dir + "result.soln2" # 清空目录 clear_dir(parent_dir) static_file = parent_dir + "static.csv" create_csv(static_file, ["multi", "host_num", "edge_num", "plan_counter", "path_num", "sag_node", "sag_edge","enum_time", "max_mem"]) for multi in topo_multis: logger.info(f"当前拓扑为:input/expand_topo/{topo_name}/{topo_type}/Host0_width_{multi}.json") try: with eventlet.Timeout(3600, False): # 设置超时时间为2000秒 pddl_paths(planner,sence, topo_type,gpddl, planner_obj, multi,result_path, static_file, domainfile_path, problemfile_path, parent_dir) # 读取static_file文件中的最后一行,判断生成的路径数量是否小于上一行 with open(static_file, "r") as f: lines = f.readlines() last_line = lines[-1] last_line = last_line.strip().split(",") last_last_line = lines[-2] last_last_line = last_last_line.strip().split(",") if int(last_line[4]) == 0: logger.info(f"规划器在倍数{multi}无法找到路径,停止运行") break # 如果last_last_line是数字 elif last_last_line[4].isdigit(): if int(last_line[4]) <= int(last_last_line[4]): logger.info(f"规划器在倍数{multi}多规划时间超过限制时间,停止运行") break except eventlet.timeout.Timeout: logger.info(f"程序运行超时在倍数{multi}") # 根据planner获取调用的规划器进程是否关闭 if "symk" in planner: os.system("kill -9 $(ps -ef | grep downward| awk '{print $2}')") break else: # 代表是htn规划器,htn规划器又分为jshop和pyhop规划器 if "jshop" in planner: if domainfile_path is None: domainfile_path = f"input/{planner}/{sence}/domain1.jshop" problemfile_path = f"input/{planner}/{sence}/problem1.jshop" # 初始化场景生成器 gjshop = GenJSHOPInputFile_Phycial() # 获取规划器实例 planner_obj = planner_dict[planner]() # 定义存储规划结果的文件 result_path = parent_dir + "result.soln2" # 清空目录 clear_dir(parent_dir + "all") clear_dir(parent_dir + "filter") # 定义统计文件 static_file = parent_dir + "all/static.csv" create_csv(static_file, ["multi", "host_num", "edge_num","path_num", "sag_node", "sag_edge","enum_time", "max_mem"]) static_file2 = parent_dir + "filter/static.csv" create_csv(static_file2, ["multi", "host_num", "edge_num","path_num", "sag_node", "sag_edge","enum_time", "max_mem"]) current_dir = os.getcwd() for multi in topo_multis: logger.info(f"当前拓扑为:input/expand_topo/{topo_name}/{topo_type}/Host0_width_{multi}.json") try: with eventlet.Timeout(3600, True): logger.info("当前使用的是改进后的jshop规划器") # 设置超时时间为2000秒 jshop_paths(planner,sence,topo_type, gjshop, planner_obj, multi,result_path, static_file, domainfile_path, problemfile_path, parent_dir) except eventlet.timeout.Timeout: logger.info(f"程序运行超时在倍数{multi}") os.system("kill -9 $(ps -ef | grep 'java -Xss2048K -Xmx1024M problem'| awk '{print $2}')") os.chdir(current_dir) break for multi in topo_multis: logger.info(f"当前拓扑为:input/expand_topo/{topo_name}/{topo_type}/Host0_width_{multi}.json") try: with eventlet.Timeout(3600, True): logger.info("当前使用的是原始的jshop规划器,启用长度过滤") jshop_paths_filter(planner,sence,topo_type, gjshop, planner_obj, multi,result_path, static_file2, domainfile_path, problemfile_path, parent_dir) except eventlet.timeout.Timeout: logger.info(f"程序运行超时在倍数{multi}") os.system("kill -9 $(ps -ef | grep 'java -Xss2048K -Xmx1024M problem'| awk '{print $2}')") os.chdir(current_dir) break elif "pyhop" in planner: if domainfile_path is None: domainfile_path = f"input/{planner}/{sence}/domain1.py" if goaltasks is None: if "Topo1" in sence: goaltasks = [('AttackGoal', 'Attacker', 'Attacker', 'Host3')] elif "Topo2" in sence: goaltasks = [('AttackGoal', 'Attacker', 'Attacker', 'Host6')] elif "Topo3" in sence: goaltasks = [('AttackGoal', 'Attacker', 'Attacker', 'Splc')] # 获取分解方案的名称 domain_name = domainfile_path.split("/")[-1].split(".")[0] problemfile_path = f"input/{planner}/{sence}/problem.py" # 初始化场景生成器 gpyhop = GenPYHOPInputFile_Phycial() # 获取规划器实例 planner_obj = planner_dict[planner]() result_path = parent_dir + "result.soln2" # 清空目录 clear_dir(parent_dir + f"{domain_name}/all") clear_dir(parent_dir + f"{domain_name}/filter") clear_dir(parent_dir + f"{domain_name}/enum") # 定义统计文件 static_file = parent_dir + f"{domain_name}/all/static.csv" create_csv(static_file, ["multi", "host_num", "edge_num","path_num", "sag_node", "sag_edge","enum_time", "max_mem"]) static_file2 = parent_dir + f"{domain_name}/filter/static.csv" create_csv(static_file2, ["multi", "host_num", "edge_num","path_num", "sag_node", "sag_edge","enum_time", "max_mem"]) static_file3 = parent_dir + f"{domain_name}/enum/static.csv" create_csv(static_file3, ["multi", "host_num", "edge_num","plan_counter", "path_num", "sag_node", "sag_edge","enum_time", "max_mem"]) for multi in topo_multis: # 当前使用的拓扑为 logger.info(f"当前拓扑为:input/expand_topo/{topo_name}/{topo_type}/Host0_width_{multi}.json") try: with eventlet.Timeout(3600, True): # 设置超时时间为2000秒 logger.info("当前使用的是改进后的pyhop规划器") pyhop_paths(planner,sence,topo_type, gpyhop, planner_obj, multi,result_path, static_file, domainfile_path, goaltasks, domain_name, parent_dir) except eventlet.timeout.Timeout: logger.info(f"程序运行超时在倍数{multi}") os.system("kill -9 $(ps -ef | grep 'utools/planner/PyhopG.py'| awk '{print $2}')") break #for multi in topo_multis: # # 当前使用的拓扑为 # logger.info(f"当前拓扑为:input/expand_topo/{topo_name}/{topo_type}/Host0_width_{multi}.json") # try: # with eventlet.Timeout(3600, True): # # 设置超时时间为2000秒 # logger.info("当前使用的是原始的pyhop规划器,启用枚举") # pyhop_paths_enum(planner,sence,topo_type, gpyhop, planner_obj, multi,result_path, static_file3, domainfile_path, goaltasks, domain_name, parent_dir) # except eventlet.timeout.Timeout: # logger.info(f"程序运行超时在倍数{multi}") # os.system("kill -9 $(ps -ef | grep 'utools/planner/PyhopG.py'| awk '{print $2}')") # break #for multi in topo_multis: # 当前使用的拓扑为 # logger.info(f"当前拓扑为:input/expand_topo/{topo_name}/{topo_type}/Host0_width_{multi}.json") # try: # with eventlet.Timeout(3600, True): # 设置超时时间为2000秒 # logger.info("当前使用的是改进后的pyhop规划器,启用长度过滤") # pyhop_paths_filter(planner,sence,topo_type, gpyhop, planner_obj, multi,result_path, static_file2, domainfile_path, goaltasks, domain_name, parent_dir) # except eventlet.timeout.Timeout: # logger.info(f"程序运行超时在倍数{multi}") # os.system("kill -9 $(ps -ef | grep 'utools/planner/PyhopG.py'| awk '{print $2}')") # break def pddl_paths(planner,sence, topo_type,gpddl, planner_obj, multi,result_path, static_file, domainfile_path, problemfile_path, parent_dir): topo_name = "Topo" + sence.split("Topo")[1] topo_path = f"input/expand_topo/{topo_name}/{topo_type}/Host0_width_{multi}.json" # logger.info(f"当前拓扑为:{topo_path}") # 生成pddl问题文件 gpddl.loadTopo(topo_path) host_num, edge_num = gpddl.get_host_edge() clear_dir(parent_dir + f"{multi}") # 初始化规划器 if "Complex" in sence: gpddl.genProblemFile_Complex(problemfile_path) if "symk" not in planner: planner_obj.Init_Complex(topo_path, f"input/info/Complex{topo_name}/action_map.json", f"input/info/Complex{topo_name}/non_Critical.json") paths, plan_counter, enum_time, max_time, max_mem = planner_obj.path_generator_Complex(domainfile_path, problemfile_path, result_path) else: # 求解全部路径 paths, plan_counter, enum_time, max_time, max_mem = planner_obj.path_generator(domainfile_path, problemfile_path, result_path) out_filepath = parent_dir + f"{multi}/paths.txt" planner_obj.output_paths(paths, out_filepath) sag_node, sag_edge = sag_generate(out_filepath, host_num, edge_num) append_csv(static_file, [multi, host_num, edge_num, plan_counter, len(paths), sag_node, sag_edge,enum_time, max_mem]) # logger.info(f"当前拓扑的主机数量为{host_num},边的数量为{edge_num}, 规划的次数为{plan_counter}, 规划器生成的路径数量为{len(paths)}") # 刷新缓冲区 file_handler.flush() elif "Simple" in sence: gpddl.genProblemFile_Simple(problemfile_path) if "symk" not in planner: planner_obj.Init_Simple(topo_path, f"input/info/Simple{topo_name}/action_map.json", f"input/info/Simple{topo_name}/non_Critical.json") # 为程序设置超时时间 paths, plan_counter, enum_time, max_time, max_mem = planner_obj.path_generator_Simple(domainfile_path, problemfile_path, result_path) else: # 求解全部路径 paths, plan_counter, enum_time, max_time, max_mem = planner_obj.path_generator(domainfile_path, problemfile_path, result_path) out_filepath = parent_dir + f"{multi}/paths.txt" planner_obj.output_paths(paths, out_filepath) sag_node, sag_edge = sag_generate(out_filepath, host_num, edge_num) append_csv(static_file, [multi, host_num, edge_num, plan_counter, len(paths), sag_node, sag_edge,enum_time, max_mem]) # logger.info(f"当前拓扑的主机数量为{host_num},边的数量为{edge_num}, 规划的次数为{plan_counter}, 规划器生成的路径数量为{len(paths)}") # 刷新缓冲区 file_handler.flush() def jshop_paths(planner,sence, topo_type,gjshop, planner_obj, multi,result_path, static_file,domainfile_path, problemfile_path, parent_dir): topo_name = "Topo" + sence.split("Topo")[1] topo_path = f"input/expand_topo/{topo_name}/{topo_type}/Host0_width_{multi}.json" # logger.info(f"当前拓扑为:{topo_path}") # 生成pddl问题文件 gjshop.loadTopo(topo_path) host_num, edge_num = gjshop.get_host_edge() clear_dir(parent_dir + f"all/{multi}") if "Complex" in sence: gjshop.genProblemFile_Complex(problemfile_path) paths, enum_time, max_mem= planner_obj.path_generator(domainfile_path, problemfile_path, result_path) out_filepath = parent_dir + f"all/{multi}/paths.txt" planner_obj.output_paths(paths, out_filepath) sag_node, sag_edge = sag_generate(out_filepath, host_num, edge_num) append_csv(static_file, [multi, host_num, edge_num, len(paths), sag_node, sag_edge,enum_time, max_mem]) # logger.info(f"当前拓扑的主机数量为{host_num},边的数量为{edge_num}, 规划器生成的路径数量为{len(paths)}") # 刷新缓冲区 file_handler.flush() elif "Simple" in sence: gjshop.genProblemFile_Simple(problemfile_path) paths, enum_time, max_mem = planner_obj.path_generator(domainfile_path, problemfile_path, result_path) out_filepath = parent_dir + f"all/{multi}/paths.txt" planner_obj.output_paths(paths, out_filepath) sag_node, sag_edge = sag_generate(out_filepath, host_num, edge_num) append_csv(static_file, [multi, host_num, edge_num, len(paths), sag_node, sag_edge,enum_time, max_mem]) # logger.info(f"当前拓扑的主机数量为{host_num},边的数量为{edge_num}, 规划器生成的路径数量为{len(paths)}") # 刷新缓冲区 file_handler.flush() def jshop_paths_filter(planner,sence, topo_type,gjshop, planner_obj, multi,result_path, static_file,domainfile_path, problemfile_path, parent_dir): topo_name = "Topo" + sence.split("Topo")[1] topo_path = f"input/expand_topo/{topo_name}/{topo_type}/Host0_width_{multi}.json" # logger.info(f"当前拓扑为:{topo_path}") # 生成pddl问题文件 gjshop.loadTopo(topo_path) host_num, edge_num = gjshop.get_host_edge() clear_dir(parent_dir + f"filter/{multi}") if "Complex" in sence: start = time() len_paths = gjshop.get_filter_edges() end = time() paths = [] all_enum_time, all_max_mem = 0, 0 for length, filter_edges in len_paths.items(): # 使用True代表开启边过滤 gjshop.genProblemFile_Complex(problemfile_path, filter_edges, True) path, enum_time, max_mem = planner_obj.path_generator(domainfile_path, problemfile_path, result_path,jshop2="origion") all_enum_time += enum_time all_max_mem = max(all_max_mem, max_mem) # paths += path for p in path: if p not in paths: paths.append(p) all_enum_time = round(all_enum_time+end-start, 3) out_filepath = parent_dir + f"filter/{multi}/paths.txt" planner_obj.output_paths(paths, out_filepath) sag_node, sag_edge = sag_generate(out_filepath, host_num, edge_num) append_csv(static_file, [multi, host_num, edge_num, len(paths), sag_node, sag_edge,all_enum_time, all_max_mem]) # logger.info(f"当前拓扑的主机数量为{host_num},边的数量为{edge_num}, 规划器生成的路径数量为{len(paths)}") # 刷新缓冲区 file_handler.flush() elif "Simple" in sence: start = time() len_paths = gjshop.get_filter_edges() end = time() paths = [] all_enum_time, all_max_mem = 0, 0 for length, filter_edges in len_paths.items(): # 使用True代表开启边过滤 gjshop.genProblemFile_Simple(problemfile_path, filter_edges, True) path, enum_time, max_mem = planner_obj.path_generator(domainfile_path, problemfile_path, result_path,jshop2="origion") all_enum_time += enum_time all_max_mem = max(all_max_mem, max_mem) # paths += path for p in path: if p not in paths: paths.append(p) all_enum_time = round(all_enum_time+end-start, 3) out_filepath = parent_dir + f"filter/{multi}/paths.txt" planner_obj.output_paths(paths, out_filepath) sag_node, sag_edge = sag_generate(out_filepath, host_num, edge_num) append_csv(static_file, [multi, host_num, edge_num, len(paths), sag_node, sag_edge,all_enum_time, all_max_mem]) # logger.info(f"当前拓扑的主机数量为{host_num},边的数量为{edge_num}, 规划器生成的路径数量为{len(paths)}") # 刷新缓冲区 file_handler.flush() def pyhop_paths(planner,sence,topo_type, gpyhop, planner_obj, multi,result_path, static_file, domainfile_path, goaltasks, domain_name, parent_dir): topo_name = "Topo" + sence.split("Topo")[1] topo_path = f"input/expand_topo/{topo_name}/{topo_type}/Host0_width_{multi}.json" # logger.info(f"当前拓扑为:{topo_path}") # 生成pddl问题文件 gpyhop.loadTopo(topo_path) host_num, edge_num = gpyhop.get_host_edge() del gpyhop clear_dir(parent_dir + f"{domain_name}/all/{multi}") #将paths保存到文件中 out_filepath = parent_dir + f"{domain_name}/all/{multi}/paths.txt" if "Complex" in sence: paths, enum_time, max_mem = planner_obj.path_generator(domainfile_path, topo_path, goaltasks, result_path) planner_obj.output_paths(paths, out_filepath) # 生成攻击图 sag_node, sag_edge = sag_generate(out_filepath, host_num, edge_num) # 将攻击图的节点和边的数量保存到文件中 append_csv(static_file, [multi, host_num, edge_num, len(paths), sag_node, sag_edge, enum_time, max_mem]) # print(paths) # logger.info(f"当前拓扑的主机数量为{host_num},边的数量为{edge_num}, 规划器生成的路径数量为{len(paths)}") # 刷新缓冲区 file_handler.flush() elif "Simple" in sence: paths, enum_time, max_mem = planner_obj.path_generator(domainfile_path, topo_path, goaltasks, result_path) planner_obj.output_paths(paths, out_filepath) # 生成攻击图 sag_node, sag_edge = sag_generate(out_filepath, host_num, edge_num) # 将攻击图的节点和边的数量保存到文件中 append_csv(static_file, [multi, host_num, edge_num, len(paths), sag_node, sag_edge, enum_time, max_mem]) # print(paths) # logger.info(f"当前拓扑的主机数量为{host_num},边的数量为{edge_num}, 规划器生成的路径数量为{len(paths)}") # 刷新缓冲区 file_handler.flush() def pyhop_paths_filter(planner,sence,topo_type, gpyhop, planner_obj, multi,result_path, static_file, domainfile_path, goaltasks, domain_name, parent_dir): topo_name = "Topo" + sence.split("Topo")[1] topo_path = f"input/expand_topo/{topo_name}/{topo_type}/Host0_width_{multi}.json" # logger.info(f"当前拓扑为:{topo_path}") # 生成pddl问题文件 gpyhop.loadTopo(topo_path) host_num, edge_num = gpyhop.get_host_edge() del gpyhop clear_dir(parent_dir + f"{domain_name}/filter/{multi}") #将paths保存到文件中 out_filepath = parent_dir + f"{domain_name}/filter/{multi}/paths.txt" if "Complex" in sence: paths, enum_time, max_mem = planner_obj.path_generator(domainfile_path, topo_path, goaltasks, result_path,"filter") planner_obj.output_paths(paths, out_filepath) # 生成攻击图 sag_node, sag_edge = sag_generate(out_filepath, host_num, edge_num) # 将攻击图的节点和边的数量保存到文件中 append_csv(static_file, [multi, host_num, edge_num, len(paths), sag_node, sag_edge, enum_time, max_mem]) # print(paths) # logger.info(f"当前拓扑的主机数量为{host_num},边的数量为{edge_num}, 规划器生成的路径数量为{len(paths)}") # 刷新缓冲区 file_handler.flush() elif "Simple" in sence: paths, enum_time, max_mem = planner_obj.path_generator(domainfile_path, topo_path, goaltasks, result_path, "filter") planner_obj.output_paths(paths, out_filepath) # 生成攻击图 sag_node, sag_edge = sag_generate(out_filepath, host_num, edge_num) # 将攻击图的节点和边的数量保存到文件中 append_csv(static_file, [multi, host_num, edge_num, len(paths), sag_node, sag_edge, enum_time, max_mem]) # print(paths) # logger.info(f"当前拓扑的主机数量为{host_num},边的数量为{edge_num}, 规划器生成的路径数量为{len(paths)}") # 刷新缓冲区 file_handler.flush() def pyhop_paths_enum(planner,sence,topo_type, gpyhop, planner_obj, multi,result_path, static_file, domainfile_path, goaltasks, domain_name, parent_dir): topo_name = "Topo" + sence.split("Topo")[1] topo_path = f"input/expand_topo/{topo_name}/{topo_type}/Host0_width_{multi}.json" # logger.info(f"当前拓扑为:{topo_path}") # 生成pddl问题文件 gpyhop.loadTopo(topo_path) host_num, edge_num = gpyhop.get_host_edge() del gpyhop clear_dir(parent_dir + f"{domain_name}/enum/{multi}") out_filepath = parent_dir + f"{domain_name}/enum/{multi}/paths.txt" if "Complex" in sence: paths, plan_counter, enum_time, max_mem = planner_obj.path_generator(domainfile_path, topo_path, goaltasks, result_path,"enum") planner_obj.output_paths(paths, out_filepath) # 生成攻击图 sag_node, sag_edge = sag_generate(out_filepath, host_num, edge_num) # 将攻击图的节点和边的数量保存到文件中 append_csv(static_file, [multi, host_num, edge_num, plan_counter, len(paths), sag_node, sag_edge, enum_time, max_mem]) # print(paths) # logger.info(f"当前拓扑的主机数量为{host_num},边的数量为{edge_num}, 规划器生成的路径数量为{len(paths)}") # 刷新缓冲区 file_handler.flush() elif "Simple" in sence: paths, plan_counter, enum_time, max_mem = planner_obj.path_generator(domainfile_path, topo_path, goaltasks, result_path, "enum") #将paths保存到文件中 planner_obj.output_paths(paths, out_filepath) # 生成攻击图 sag_node, sag_edge = sag_generate(out_filepath, host_num, edge_num) # 将攻击图的节点和边的数量保存到文件中 append_csv(static_file, [multi, host_num, edge_num, plan_counter, len(paths), sag_node, sag_edge, enum_time, max_mem]) # print(paths) # logger.info(f"当前拓扑的主机数量为{host_num},边的数量为{edge_num}, 规划器生成的路径数量为{len(paths)}") # 刷新缓冲区 file_handler.flush() if __name__ == "__main__": # 选择规划器 planners = ["htn/pyhop"] # planners = ["pddl/sgplan", "pddl/popf2", "pddl/optic", "pddl/ff", "pddl/fd"] # planners = ["htn/pyhop", "pddl/sgplan", "pddl/popf2", "pddl/optic", "pddl/ff", "pddl/fd", "pddl/symk", "htn/jshop"] # 首先,选择场景 sences = ["ComplexTopo5"] # sences = ["SimpleTopo3"] # 选择扩展的拓扑类型 # topo_types = ["ring","star","mesh"] topo_types = ["mesh"] # 选择拓扑倍数 topo_multis = range(0, 1, 20) goaltasks = [('AttackGoal', 'Attacker', 'Attacker', 'Splc')] # 选择一个规划器 for planner in planners: # 清空此前的规划结果 clear_dir(f"result/{planner}") # 选择一个场景 for sence in sences: # 选择一个拓扑类型 for topo_type in topo_types: logger.info(f"当前规划器为:{planner}, 场景为:{sence}, 拓扑类型为:{topo_type}") static_planner_sence_topo_type(planner, sence, topo_type, topo_multis, logger, goaltasks=goaltasks) 这个是static_all_phycial的代码,也一并结合分析
10-08
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值