tuple_list_dictionary整理中...

本文介绍了如何在Python中操作字典类型,包括创建、遍历及将字符串转换为字典的方法。通过示例展示了使用ast.literal_eval进行安全的字符串到字典的转换,以及利用json.loads解析JSON格式字符串。
基本特点

tuple

==

list

==

dictionary

==

遍历

tuple

==

list

==

dictionary

==

转换

str==>dictionary

使用ast

MAC OSX 10.9.2
>>> en = "China".decode('utf8')
>>> cn = "中国".decode('utf8')
>>> print en
China
>>> print cn
中国
>>> s1 = {'en': en, 'cn': cn}
>>> print s1
{'en': u'China', 'cn': u'\u4e2d\u56fd'}
>>> s2 = "{'en': u'China', 'cn': u'\\u4e2d\\u56fd'}"
>>> ast.literal_eval(s2)
{'en': u'China', 'cn': u'\u4e2d\u56fd'}
>>> print ast.literal_eval(s2) #把字符串s2转换成dictionary
{'en': u'China', 'cn': u'\u4e2d\u56fd'}

json string ==> dictionary

使用json.loads 要求字符串符合 json格式

1

Windows7
>>> ss1 = '[{"name":"sam","cn":"山姆"}]'
>>> json.loads(ss1)
[{u'name': u'sam', u'cn': u'\u027d\u0137'}]

2

Windows7
>>> ss = '{"one": null,"two":{"a": "1", "b": null, "c": "西"}, "three": "3"}'
>>>
>>> sj = json.loads(ss.decode('gbk'))
>>> print sj
{u'three': u'3', u'two': {u'a': u'1', u'c': u'\u897f', u'b': None}, u'one': None
}
>>> sj["two"]["c"]
u'\u897f'
>>> print sj["two"]["c"]
西
>>>
3
Windows7
>>> ss = '[{"one": null,"two":{"a": "1", "b": null, "c": "西"}, "three": "3"}, {
"oo": "欧", "tt": "22"}]'
>>> sj = json.loads(ss.decode('gbk'))
>>> sj[1]["oo"]
u'\u6b27'
>>> print sj[1]["oo"]
欧
>>>



#!/usr/bin/python3.10 # -*- coding: utf-8 -*- # Copyright (c) Shenzhen Yinwang Intelligent Technologies Co., Ltd. 2024. All rights reserved. import shutil from dataclasses import dataclass, field import os import re from collections import defaultdict from concurrent.futures import ThreadPoolExecutor from datetime import datetime, timezone from pathlib import Path from typing import Dict, List, Tuple, Any, Union, Set, Optional import heapq from loguru import logger from app.constants import MAX_WORKERS from app.handler.event_config_handler import EventConfigHandler from app.utils.slice_tool.src import slice_bag_tools from app.utils.slice_tool.src.models import RosbagSliceParam, UdpBagSliceParam from app.utils.utils import tar_folder pattern = re.compile(r'([0-9]{17}_[0-9]{5})') def copy_and_rename_files(event_ids, output_dir, file_list: list[str]): for file_path in file_list: base_path = os.path.dirname(file_path) file_name = os.path.basename(file_path) match = pattern.search(file_name) if match: base_name = match.group(1) ext = os.path.splitext(file_name)[1] event_id = match.group(2) # 如果匹配到事件ID,则拷贝到对应的文件夹并重命名 if event_id in event_ids: new_file_name = f"{base_name}{ext}" dest_path = os.path.join(output_dir, new_file_name) shutil.copy(file_path, dest_path) print(f"Copied {file_name} to {event_id}") else: # 如果没有匹配到事件ID,则拷贝到所有文件夹 for event_id in event_ids: dest_path = os.path.join(f"{base_path}/C__{event_id}", file_name) shutil.copy(file_path, dest_path) print(f"Copied {file_name} to {event_id}") @dataclass class SlicerRes: upload_path: Path file_size: int event_time: int error_code: list = field(default_factory=list) def convert_datetime_to_timestamp( datetime_str: str, format_output: bool = False ) -> Union[int, str]: """Convert datetime string to timestamp or formatted string. Args: datetime_str: String in format '%Y%m%d%H%M%S%f' format_output: If True, returns formatted string instead of timestamp Returns: Either timestamp in milliseconds or formatted string """ dt_object = datetime.strptime(datetime_str[:17], '%Y%m%d%H%M%S%f') if format_output: return dt_object.strftime('%Y_%m_%d_%H_%M_%S') timestamp_seconds = dt_object.replace(tzinfo=timezone.utc).timestamp() return int(timestamp_seconds * 1000) def convert_topic_keys(input_dict: Dict[str, Any]) -> Dict[str, Any]: """Convert topic keys to new format. Args: input_dict: Dictionary with original topic keys Returns: Dictionary with converted keys (e.g. '/topic/name' -> 'topic_name_lite') """ return { f"{key.lstrip('/').replace('/', '_')}_lite": value for key, value in input_dict.items() } class SlicerHandler: """Handler for slicing rosbag files based on event configurations.""" def __init__( self, bag_side: str, data_name: str, side_data: str, bag_dir: str, temp_dir_global: str, rest_path: list, need_copy_files: list ): """Initialize SlicerHandler. Args: bag_side: Side identifier for bags data_name: Name of the data side_data: Side data identifier bag_dir: Directory containing bag files temp_dir_global: Global temporary directory """ self.bag_dir = Path(bag_dir) self.err_list: List[str] = [] self.data_name = data_name self.side_data = side_data self.bag_side = bag_side self.temp_dir_global = Path(temp_dir_global) self.rest_paths = rest_path self.need_copy_files = need_copy_files # Create slicer result directory self.slicer_res_dir = self.temp_dir_global / "slicer_res" self.slicer_res_dir.mkdir(mode=0o700, exist_ok=True) def parse(self) -> dict[str, SlicerRes]: """Main parsing method that coordinates the slicing process.""" bags, zbags, gbags, event_yaml_files, bin_files = self.classify_files() config_dict = self.process_yaml_configs(event_yaml_files) bin_events = self.process_bin_files(bin_files) filtered_events = self.filter_and_adjust_events(bags, zbags, gbags, config_dict, bin_events) results = self.process_slice_tasks(filtered_events, bags, zbags, gbags) return results def classify_files(self) -> Tuple[ Dict[str, List[str]], Dict[str, Dict[str, List[str]]], Dict[str, Dict[str, List[str]]], List[str], List[str] ]: """Classify files in the bag directory by type. Returns: Tuple containing: - bags: Dictionary mapping bag names to their file paths - yaml_files: List of event config YAML files - bin_files: List of customized header binary files """ bags = defaultdict(list) zbags = {"ddi": defaultdict(list), "time_list": defaultdict(list)} gbags = {"ddi": defaultdict(list), "time_list": defaultdict(list)} yaml_files = [] bin_files = [] for filename in os.listdir(self.bag_dir): filepath = self.bag_dir / filename if filename.lower().endswith('.bag'): bag_name = filename.split('.')[0] heapq.heappush(bags[bag_name], filename) elif filename.lower().endswith('.yaml') and "event_config" in filename: yaml_files.append(str(filepath)) self.need_copy_files.append(str(filepath)) elif filename.lower().endswith('.bin') and "customized_header" in filename: bin_files.append(str(filepath)) self.need_copy_files.append(str(filepath)) elif filename.lower().endswith("zbag") and filename.split(".")[0].endswith("-ddi"): bag_name = filename.split('.')[0][:-4] heapq.heappush(zbags["ddi"][bag_name], str(filename)) elif filename.lower().endswith("zbag") and filename.split(".")[0].endswith("-timelist"): bag_name = filename.split('.')[0][:-9] heapq.heappush(zbags["time_list"][bag_name], str(filepath)) elif filename.lower().endswith("gbag") and filename.split(".")[0].endswith("-ddi"): bag_name = filename.split('.')[0] heapq.heappush(gbags["ddi"][bag_name], str(filename)) elif filename.lower().endswith("gbag") and filename.split(".")[0].endswith("-timelist"): bag_name = filename.split('.')[0] heapq.heappush(gbags["time_list"][bag_name], str(filepath)) else: self.need_copy_files.append(str(filepath)) return bags, zbags, gbags, yaml_files, bin_files @staticmethod def process_yaml_configs(yaml_files: List[str]) -> Dict[str, Dict[str, Any]]: """Process YAML configuration files. Args: yaml_files: List of paths to YAML config files Returns: Dictionary mapping event info to converted configs """ config_dict = {} for file_path in yaml_files: config = EventConfigHandler(file_path) file_name = Path(file_path).name # Extract event ID and time from filename parts = file_name.split('.')[0].split('_') event_id = parts[-1] event_time = convert_datetime_to_timestamp(parts[-2], format_output=True) event_info = f"{event_id}_{event_time}" config_dict[event_info] = convert_topic_keys(config.event_config) return config_dict @staticmethod def process_bin_files(bin_files: List[str]) -> Dict[str, int]: """Process binary files to extract event timestamps. Args: bin_files: List of paths to binary files Returns: Dictionary mapping event info to timestamps """ bin_events = {} for file_path in bin_files: filename = Path(file_path).name time_part, event_id = filename.split('.')[1].split('_') event_time = convert_datetime_to_timestamp(time_part, format_output=True) timestamp = convert_datetime_to_timestamp(time_part) event_info = f"{event_id}_{event_time}" bin_events[event_info] = timestamp return bin_events @staticmethod def _match_bag_topic(topic: str, available_topics: Set[str]) -> Optional[str]: """Match a topic with available bag topics using multiple strategies. Args: topic: Topic to match available_topics: Set of available bag topics Returns: Matched topic name if found, otherwise None """ # 尝试绝对匹配 if topic in available_topics: return topic # 2. 如果绝对匹配失败,才尝试startswith匹配 matching_topics = [t for t in available_topics if t.startswith(topic.rstrip("_lite"))] if matching_topics: return matching_topics[0] return None def filter_and_adjust_events( self, bags: Dict[str, List[str]], zbags: Dict[str, Dict[str, List[str]]], gbags: Dict[str, Dict[str, List[str]]], config_dict: Dict[str, Dict[str, Any]], bin_events: Dict[str, int] ) -> Dict[str, Dict[str, Dict[str, Union[Tuple[int, int], str]]]]: """Filter and adjust event time ranges based on bin events. Args: bags: Dictionary of available bags zbags: Dictionary of zbag files (包含ddi和time_list) gbags: Dictionary of gbag files (包含ddi和time_list) config_dict: Event configurations bin_events: Event timestamps from binary files Returns: Filtered and adjusted event configurations """ bag_topics = set(bags.keys()) zbag_ddi_topics = set(zbags["ddi"].keys()) gbag_ddi_topics = set(gbags["ddi"].keys()) filtered_events = {} # First pass: adjust time ranges based on bin events for event_id, config in config_dict.items(): if event_id in bin_events: timestamp = bin_events[event_id] for topic, (value1, value2) in config.items(): config[topic] = ( timestamp - value1 * 1000, timestamp + value2 * 1000 ) # Second pass: filter configs to include existing topics for event_id, config in config_dict.items(): filtered_config = {} for topic, time_range in config.items(): # 尝试匹配普通bag matched_topic = self._match_bag_topic(topic, bag_topics) if matched_topic: filtered_config[topic] = { "time_range": time_range, "match_key": f"{matched_topic}", "type": "bag" } continue # 尝试匹配zbag-ddi matched_zbag = self._match_bag_topic(topic, zbag_ddi_topics) if matched_zbag: filtered_config[topic] = { "time_range": time_range, "match_key": f"{matched_zbag}-ddi", "type": "zbag" } continue # 尝试匹配gbag-ddi matched_gbag = self._match_bag_topic(topic, gbag_ddi_topics) if matched_gbag: filtered_config[topic] = { "time_range": time_range, "match_key": f"{matched_gbag}-ddi.gbag", "type": "gbag" } filtered_events[event_id] = filtered_config return filtered_events def process_slice_tasks( self, events: Dict[str, Dict[str, Dict[str, Union[Tuple[int, int], str]]]], bags: Dict[str, List[str]], zbags: Dict[str, Dict[str, List[str]]], gbags: Dict[str, Dict[str, List[str]]], ) -> dict[str, SlicerRes]: """Process all slice tasks using thread pool. Args: events: Dictionary of events to process bags: Dictionary of available bags zbags: Dictionary of available zbags gbags: Dictionary of available gbags Returns: Dictionary containing processing results """ results = {} with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor: for event_id, config in events.items(): err_list = [] output_dir = self._prepare_output_directory(event_id) copy_and_rename_files([event_id], output_dir, self.need_copy_files) futures = self._submit_slice_tasks(executor, config, bags, zbags, gbags, output_dir) self._wait_for_tasks_completion(futures, err_list) output_dir = self._create_result_tarball(output_dir.parent) results[event_id] = SlicerRes(output_dir, os.path.getsize(output_dir), 0, err_list) return results def _prepare_output_directory(self, event_id: str) -> Path: """Prepare output directory for sliced bags. Args: event_id: Event identifier Returns: Path to the output directory """ outer_dir = self.bag_dir / f"C_{self.side_data}_{event_id}_{self.bag_side}" output_dir = outer_dir / f"C_{self.side_data}_{event_id}" outer_dir.mkdir(exist_ok=True) output_dir.mkdir(exist_ok=True) return output_dir def _submit_slice_tasks( self, executor: ThreadPoolExecutor, config: Dict[str, Dict[str, Union[Tuple[int, int], str]]], bags: Dict[str, List[str]], zbags: Dict[str, Dict[str, List[str]]], gbags: Dict[str, Dict[str, List[str]]], output_dir: Path ) -> Dict[str, Dict[str, Any]]: """Submit slice tasks to executor. Args: executor: Thread pool executor config: Event configuration bags: Available bags output_dir: Output directory Returns: Dictionary containing futures and their associated metadata """ task_info = {} for topic, config_data in config.items(): time_range = config_data["time_range"] match_key = config_data["match_key"] bag_type = config_data["type"] if bag_type == "bag": bag_paths = [self.bag_dir / bag_path for bag_path in bags[topic]] param = RosbagSliceParam( name=f"{match_key}.bag", sources=bag_paths, output=output_dir / f"{match_key}.bag", start=time_range[0], end=time_range[1], compression="lz4", ) future = executor.submit(slice_bag_tools.process_ros_bag, param) task_info[match_key] = { "future": future, "type": "bag", "param": param, "topic": topic } elif bag_type == "zbag": zbag_files = [self.bag_dir / f for f in zbags["ddi"].get(match_key[:-4], [])] time_files = [self.bag_dir / f for f in zbags["time_list"].get(match_key[:-4], [])] param = UdpBagSliceParam( name=f"{match_key}.zbag", sources=zbag_files, times=time_files, output=output_dir / f"{match_key}.zbag", start=time_range[0], end=time_range[1], ) future = executor.submit(slice_bag_tools.process_udp_bag, param) task_info[match_key] = { "future": future, "type": "zbag", "param": param, "topic": topic } elif bag_type == "gbag": gbag_files = [self.bag_dir / f for f in gbags["ddi"].get(match_key[:-9], [])] time_files = [self.bag_dir / f for f in gbags["time_list"].get(match_key[:-9], [])] param = UdpBagSliceParam( name=f"{match_key}.gbag", sources=gbag_files, times=time_files, output=output_dir / f"{match_key}.gbag", start=time_range[0], end=time_range[1], ) future = executor.submit(slice_bag_tools.process_udp_bag, param) task_info[match_key] = { "future": future, "type": "gbag", "param": param, "topic": topic } return task_info def _wait_for_tasks_completion(self, task_info: Dict[str, Dict[str, Any]], err_list: List[str]) -> None: """Wait for slice tasks to complete and handle errors. Args: task_info: Dictionary containing task futures and metadata err_list: List to collect error messages """ for match_key, task_data in task_info.items(): try: task_data["future"].result() except Exception as e: logger.error( f"Slice task failed for {match_key} (type: {task_data['type']}, topic: {task_data['topic']}): " f"{e.__class__.__name__}" ) # TODO 兜底? err_list.append(str(e)) def _create_result_tarball(self, output_dir: Path) -> Path: """Create tarball of the processed results. Args: output_dir: Directory containing results to be archived """ for rest_path in self.rest_paths: shutil.copytree(rest_path, output_dir / rest_path.name) tar_name = output_dir.name + ".tar" tar_path = self.slicer_res_dir / tar_name tar_folder(output_dir, tar_path) return output_dir if __name__ == '__main__': slice_handler = SlicerHandler("bag_side", "", "", r"\opt\service\tmp\test\_2169b559-599e-4578-951c-785d949ad590\bag_dir", r"", [], []) slice_handler.parse() copy_and_rename_files有点问题,请帮忙重构下 这个时需要拷贝的目录,有20250723060930410_10000 事件+事件类型的文件,如果能匹配到,就放到对应的文件夹下面,其他同类型的就不添加,如果匹配不到就将所有的添加进去,比如说下面这两个文件\opt\service\tmp\test_2169b559-599e-4578-951c-785d949ad590\bag_dir\customized_data.20250723060924355_13001.bin’, ‘\opt\service\tmp\test_2169b559-599e-4578-951c-785d949ad590\bag_dir\customized_data.20250723060930410_10000.bin’, 分别应该放到20250723060930410_10000和20250723060924355_13001文件夹下面,\opt\service\tmp\test_2169b559-599e-4578-951c-785d949ad590\bag_dir\meta.yaml这种不携带时间+event_id的就这两个文件夹都放这个我们有这两个时间戳+事件id文件,如果文件名里面携带事件id+事件戳,需要将时间戳+事件id去除掉
08-01
using System; using System.Collections.Generic; using System.Linq; using System.Diagnostics; namespace select_chain.NormalSelector { using CommonAPI = calculate.CommonAPI; using RobotVTMTool = calculate.RobotVTMTool; using default_info; using device; using Wafer = device.mobile.Wafer; //using CompareDouble = select_chain.NormalSelector.compare_obj.CompareDouble; using ModifyDouble = select_chain.NormalSelector.compare_obj.ModifyDouble; using WaferCompareModify = select_chain.NormalSelector.compare_obj.WaferCompareModify; using WaitTime = select_chain.NormalSelector.compare_obj.WaitTime; using Chain = select_chain.chain.Chain; using ChainNode = select_chain.chain.ChainNode; using NodeType = select_chain.chain.NodeType; using ChainType = select_chain.chain.ChainType; using NodeTypeExtensions = select_chain.chain.NodeTypeExtensions; using MoveIDCompare = tools.MoveIDCompare; using Tools = tools.Tools; //using MoveIDCompare = tools.MoveIDCompare; using tools.tuple; using tools; public class VtrSelector { static object lockObj = new object(); // 判断是否违反清洁约束 public static bool is_violate_clean(PVDTwin env, List<Wafer> wafers, Dictionary<string, Element2<int, Dictionary<int, WaferCompare>>> pm_lock, string pm_name) { var lock_info = pm_lock.ContainsKey(pm_name) ? pm_lock[pm_name] : null; if (lock_info == null || lock_info.ele2.Count == 0) { return false; } var lock_wafer = lock_info.ele2.Values.First().w; if (Tools.all_of(wafers, wafer => lock_wafer.sequence_name.Equals(wafer.sequence_name) && !env.check_has_pjob_clean(lock_wafer, wafer) && !env.check_has_pjob_clean(wafer, lock_wafer) && (lock_wafer.lot_id.Equals(wafer.lot_id) || (!lock_wafer.has_new_lot_clean && !wafer.has_new_lot_clean)))) { return false; } var lock_wafers = lock_info.ele2; foreach (var ele in lock_wafers.Values) { var l_wafer = ele.w; var c_step_id = ele.step_id; if (env.pms[pm_name].to_check(l_wafer.job_type()) && l_wafer.has_next() && l_wafer.route_recipe.ContainsKey(c_step_id) && l_wafer.route_recipe[c_step_id].ContainsKey(pm_name) && l_wafer.all_post_steps[l_wafer.current_step_id()].Contains(c_step_id) && ((l_wafer.all_post_steps[l_wafer.current_step_id()].Contains(c_step_id) && (l_wafer.job.state != JobState.End || !env.lps.ContainsKey(l_wafer.current_module_name()))) || (l_wafer.current_cycle() < l_wafer.job.cycle && l_wafer.job.state != JobState.End))) { return true; } } return false; } // 判断是否违反并行绑定清洁约束 public static bool is_violate_clean_parallel(PVDTwin env, List<Wafer> wafers, Dictionary<string, Element2<int, Dictionary<int, WaferCompare>>> pm_lock, string pm_name) { var lock_info = pm_lock[pm_name]; if (lock_info == null || lock_info.ele2.Count == 0) { return false; } var lock_wafer = lock_info.ele2.Values.First().w; if (Tools.all_of(wafers, wafer => lock_wafer.sequence_name.Equals(wafer.sequence_name) && !env.check_has_pjob_clean(lock_wafer, wafer) && !env.check_has_pjob_clean(wafer, lock_wafer) && (lock_wafer.lot_id.Equals(wafer.lot_id) || (!lock_wafer.has_new_lot_clean && !wafer.has_new_lot_clean)))) { return false; } var lock_wafers = lock_info.ele2; foreach (var ele in lock_wafers.Values) { var l_wafer = ele.w; var c_step_id = ele.step_id; if (env.pms[pm_name].to_check(l_wafer.job_type()) && l_wafer.has_next() && l_wafer.route_recipe.ContainsKey(c_step_id) && l_wafer.route_recipe[c_step_id].ContainsKey(pm_name) && !l_wafer.visit_next_station().ContainsKey(pm_name) && l_wafer.all_post_steps[l_wafer.current_step_id()].Contains(c_step_id) && ((l_wafer.all_post_steps[l_wafer.current_step_id()].Contains(c_step_id) && (l_wafer.job.state != JobState.End || !env.lps.ContainsKey(l_wafer.current_module_name()))) || (l_wafer.current_cycle() < l_wafer.job.cycle && l_wafer.job.state != JobState.End))) { return true; } } return false; } // 获取绑定模块 public static string get_lock_pm(Dictionary<string, Element2<int, Dictionary<int, WaferCompare>>> pm_lock, ChainNode node) { // 找绑定 List<Element2<int, string>> lock_pms = new List<Element2<int, string>>(); foreach (var pm_info in pm_lock.SetOfKeyValuePairs()) { var pm = pm_info.Key; var info = pm_info.Value; var lock_info = info.ele2; var lock_move_schedule_id = info.ele1; if (Tools.any_of(node.place_wafers, w => (node.parallel_pms.ContainsKey(pm) && lock_info.ContainsKey(w.mat_id) && (new WaferCompare(w, w.current_cycle(), node.place_step_info.StepId)).is_post(lock_info[w.mat_id])) || (w.temp_all_next_info.ContainsKey(pm) && w.all_pms.Contains(pm) && lock_info.ContainsKey(w.mat_id) && (new WaferCompare(w, w.current_cycle(), w.temp_all_next_info[pm].StepId)).is_post(lock_info[w.mat_id])))) { lock_pms.Add(new Element2<int, string>(lock_move_schedule_id, pm)); } } foreach (var pm_info in node.parallel_pms.SetOfKeyValuePairs()) { var pm = pm_info.Key; var info = pm_lock.ContainsKey(pm_info.Value.place_step_info.BoundModule) ? pm_lock[pm_info.Value.place_step_info.BoundModule] : null; if (info != null) { var lock_info = info.ele2; var lock_move_schedule_id = info.ele1; if (Tools.any_of(node.place_wafers, w => lock_info.ContainsKey(w.mat_id) && lock_info[w.mat_id].cycle_id == w.current_cycle())) { lock_pms.Add(new Element2<int, string>(lock_move_schedule_id, pm)); } } } var wafer = node.place_wafers[0]; foreach (var pm_info in wafer.temp_all_next_info.SetOfKeyValuePairs()) { ///zhangjun 这里有异常20240228 pm_info.Value.BoundModule 有可能为null, pm_lock.ContainsKey 不能传null var pm = pm_info.Key; var info = string.IsNullOrEmpty(pm_info.Value.BoundModule) ? null : pm_lock.ContainsKey(pm_info.Value.BoundModule) ? pm_lock[pm_info.Value.BoundModule] : null; if (info != null && wafer.all_pms.Contains(pm)) { var lock_info = info.ele2; var lock_move_schedule_id = info.ele1; if (Tools.any_of(node.place_wafers, w => lock_info.ContainsKey(w.mat_id) && lock_info[w.mat_id].cycle_id == w.current_cycle())) { lock_pms.Add(new Element2<int, string>(lock_move_schedule_id, pm)); } } } if (lock_pms.Count == 0) { return null; } else { var sorted_lock_pms = lock_pms.ToList(); sorted_lock_pms.Sort((a, b) => a.ele1.Equals(b.ele1) ? 0 : MoveIDCompare.lt_id(a.ele1, b.ele1) ? -1 : 1); return sorted_lock_pms.Last().ele2; } } public static Chain vtr_select_chain(PVDTwin env, int robot_type, List<Chain> vtr_chain_list, BaseRobot robot, HashSet<Wafer> wafer_mid, Dictionary<string, HashSet<Wafer>> pms_bound, Dictionary<string, Element2<int, Dictionary<int, WaferCompare>>> pm_lock) { var main_chain_list = new List<Chain>(); var insert_chain_list = new List<Chain>(); foreach (var chain in vtr_chain_list) { if (chain.insert_flag) { insert_chain_list.Add(chain); } else { main_chain_list.Add(chain); } } if (main_chain_list.Count == 0 && insert_chain_list.Count > 0) { main_chain_list = insert_chain_list; insert_chain_list = new List<Chain>(); } // 过滤 if (robot_type == ModuleType.VTR1) { wafer_mid = Tools.filter_set(wafer_mid, w => !env.lls1.ContainsKey(w.current_module_name()) || !env.lls1[w.current_module_name()].opr_mode().Equals(LLOprMode.Vent)); } // 主路径 var main_chain = vtr_select_chain_sub(env, robot_type, main_chain_list, robot, wafer_mid, pms_bound, pm_lock, false); // 插入动作 if (insert_chain_list.Count > 0 && main_chain != null) { var m_first_node = main_chain.chain.first_node(); var m_end_node = main_chain.chain.end_node(); var is_swap = m_first_node.node_type == NodeType.Swap; ChainKey finalMain_chain = main_chain; List<Chain> new_insert_chain_list = new List<Chain>(); foreach (var chain in insert_chain_list) { var first_node = chain.first_node(); var allChainModule = Tools.create_set(finalMain_chain.chain.chain, c => c.module); var temp_chain_module = Tools.create_set(chain.chain, c => c.module); bool chain_module_contains = allChainModule.ContainsAll(temp_chain_module); if (NodeTypeExtensions.picks.Contains(m_first_node.node_type)) { // 避免不同VentStayingTime的已工艺片在LLGroup混片 //int m_property0 = Convert.ToInt32(main_chain.property_list[0].value.ToString()); //int m_property1 = Convert.ToInt32(main_chain.property_list[1].value.ToString()); //if (first_node.module == m_first_node.module && m_property0 == ChainType.CompleteChain && m_property1 == 1) //{ // new_insert_chain_list.Add(chain); // continue; //} if (first_node.pick_wafers.Equals(m_first_node.pick_wafers)) { continue; } var tempPickWafer = first_node.pick_wafers[0]; if (is_swap) { var mTempPickWafer2 = m_first_node.place_wafers[0]; // pick_wafers? if ( tempPickWafer.last_real_process_end_time() > mTempPickWafer2.last_real_process_end_time() + 0.001 && tempPickWafer.sequence != mTempPickWafer2.sequence ) { continue; } } } var end_node = chain.end_node(); double rotateTime = robot.rotate_time(!robot.is_free, end_node.module.module_name, m_first_node.module.module_name); if (chain.chain_end + rotateTime + 0.01 < m_first_node.start + m_first_node.rotate_cost && !m_first_node.haveIdleCycleClean) { var findSameModule = Tools.intersection(allChainModule, temp_chain_module); if (findSameModule.Count == 0) { new_insert_chain_list.Add(chain); continue; } else if (chain_module_contains) { new_insert_chain_list.Add(chain); continue; } else if (Tools.all_of(chain.chain, c => ModuleType.LLs.Contains(c.module.module_type))) { new_insert_chain_list.Add(chain); continue; } else if (allChainModule.Count < finalMain_chain.chain.chain.Count) { new_insert_chain_list.Add(chain); continue; } else if (is_swap && NodeTypeExtensions.picks.Contains(first_node.node_type)) { Wafer pickWafer = first_node.pick_wafers[0]; Wafer pickWafer1 = m_first_node.pick_wafers[0]; double lastRealProcessTime = pickWafer.last_real_process_end_time(); if (lastRealProcessTime > 0 && lastRealProcessTime < pickWafer1.last_real_process_end_time()) { new_insert_chain_list.Add(chain); continue; } } }
08-22
# VTR选择链逻辑分析 这段代码实现了一个机器人调度系统中的VTR(Vertical Transport Robot)选择链逻辑。主要包括: 1. 清洁约束检查 2. 并行绑定清洁约束检查 3. 绑定模块获取 4. 主要的链选择逻辑 ## 核心功能分析 ### 1. 清洁约束检查 (`is_violate_clean` 和 `is_violate_clean_parallel`) 这两个方法用于检查在给定当前锁住的PM(Process Module)的情况下,是否违反了清洁约束。它们会检查: - 是否有锁住的晶圆 - 晶圆的序列名称是否一致 - 是否有清洁作业冲突 - 晶圆的批次ID是否一致或是否都有新批次清洁 ### 2. 绑定模块获取 (`get_lock_pm`) 这个方法用于获取被锁定的PM模块,它会检查: - 节点中是否有晶圆被锁定 - 并行PM中是否有锁定 - 晶圆的下一步信息中是否有锁定 它会返回最近的锁定PM名称。 ### 3. 链选择逻辑 (`vtr_select_chain`) 这是主要的调度逻辑,它会: 1. 将链表分为主链和插入链 2. 过滤掉不符合条件的晶圆 3. 执行主链选择 4. 处理插入链 ## 插入链处理逻辑(续) ```csharp else { new_insert_chain_list.Add(chain); continue; } } else { new_insert_chain_list.Add(chain); continue; } // 如果不能插入到前面,则尝试插入到后面 if (NodeTypeExtensions.places.Contains(end_node.node_type)) { if (end_node.place_wafers.Equals(m_end_node.place_wafers)) { continue; } var tempPlaceWafer = end_node.place_wafers[0]; var mTempPlaceWafer = m_end_node.place_wafers[0]; if (is_swap && tempPlaceWafer.last_real_process_end_time() > mTempPlaceWafer.last_real_process_end_time() + 0.001 && tempPlaceWafer.sequence != mTempPlaceWafer.sequence) { continue; } } double rotateTime2 = robot.rotate_time(!robot.is_free, m_end_node.module.module_name, first_node.module.module_name); if (m_end_node.chain_end + rotateTime2 + 0.01 < chain.chain_start + chain.chain_rotate_cost) { var findSameModule = Tools.intersection(allChainModule, temp_chain_module); if (findSameModule.Count == 0) { new_insert_chain_list.Add(chain); continue; } else if (chain_module_contains) { new_insert_chain_list.Add(chain); continue; } else if (Tools.all_of(chain.chain, c => ModuleType.LLs.Contains(c.module.module_type))) { new_insert_chain_list.Add(chain); continue; } else if (allChainModule.Count < finalMain_chain.chain.chain.Count) { new_insert_chain_list.Add(chain); continue; } else { new_insert_chain_list.Add(chain); continue; } } else { continue; } } // 合并插入链到主链 if (new_insert_chain_list.Count > 0) { // 根据move_id排序 new_insert_chain_list.Sort((a, b) => MoveIDCompare.lt_id(a.move_id, b.move_id) ? -1 : MoveIDCompare.lt_id(b.move_id, a.move_id) ? 1 : 0); foreach (var insert_chain in new_insert_chain_list) { main_chain = Chain.merge_chain(env, main_chain, insert_chain, robot_type); if (main_chain == null) { return null; } } } } return main_chain; } } ``` ## 代码解释 1. **链表分类**: 代码首先将链表分为`main_chain_list`(主链)和`insert_chain_list`(插入链)。主链是正常流程的链,而插入链是需要插入到主链中的特殊链。 2. **过滤处理**: 根据机器人类型(VTR1)对晶圆进行过滤,排除那些处于Vent操作模式的晶圆。 3. **主链选择**: 调用`vtr_select_chain_sub`方法选择主链,这个方法没有在代码片段中展示,但可以推测它是实现选择逻辑的核心方法。 4. **插入链处理**: 如果存在插入链且主链不为空,则尝试将插入链插入到主链的前面或后面。 - 检查模块是否相同 - 检查旋转时间是否符合要求 - 根据链的特性决定是否插入 5. **链合并**: 将筛选后的插入链按照move_id排序,并与主链合并。 6. **返回结果**: 返回最终的主链。 这段代码实现了一个复杂的调度逻辑,考虑了多种约束条件,包括清洁约束、时间约束、模块约束等。通过这种复杂的逻辑,系统可以优化机器人的操作顺序,提高生产效率。
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值