Time.now.beginning_of_day

本文详细介绍了Ruby on Rails中关于日期处理的各种方法,包括如何使用beginning_of_day等方法进行日期计算,适用于不同版本的Rails框架。
# E:\AI_System\web_ui\server.py import sys import os import time import logging import json import traceback import threading import platform import psutil import datetime import subprocess from pathlib import Path from flask import Flask, jsonify, request, render_template from flask_socketio import SocketIO, emit from flask_limiter import Limiter from flask_limiter.util import get_remote_address from functools import wraps from concurrent.futures import ThreadPoolExecutor import logging.handlers # 修复1:更新依赖包列表 REQUIRED_PACKAGES = [ 'flask', 'flask_socketio', 'flask_limiter', 'psutil', 'eventlet', 'waitress' # 添加生产环境依赖 ] def check_dependencies(): """增强依赖检查功能""" missing = [] for package in REQUIRED_PACKAGES: try: __import__(package) except ImportError: missing.append(package) if missing: print(f"❌ 缺少必要的依赖包: {', '.join(missing)}") print("请运行以下命令安装依赖:") print(f"pip install {' '.join(missing)}") sys.exit(1) if __name__ == '__main__': check_dependencies() # 在启动前检查依赖 # ========== 配置系统 ========== class SystemConfig: def __init__(self): self.BASE_DIR = Path(__file__).resolve().parent.parent self.HOST = '0.0.0.0' self.PORT = 5000 self.LOG_LEVEL = 'DEBUG' self.SECRET_KEY = os.getenv('SECRET_KEY', 'your_secret_key_here') self.DEBUG = True self.USE_GPU = False self.DEFAULT_MODEL = 'gpt-3.5-turbo' self.MAX_WORKERS = 4 # 目录配置 self.LOG_DIR = self.BASE_DIR / 'logs' self.LOG_DIR.mkdir(parents=True, exist_ok=True) self.CONFIG_DIR = self.BASE_DIR / 'config' self.CONFIG_DIR.mkdir(parents=True, exist_ok=True) self.AGENT_PATH = self.BASE_DIR / 'agent' self.MODEL_CACHE_DIR = self.BASE_DIR / 'model_cache' self.MODEL_CACHE_DIR.mkdir(parents=True, exist_ok=True) self.TEMPLATE_DIR = self.BASE_DIR / 'web_ui' / 'templates' def __str__(self): return f"SystemConfig(HOST={self.HOST}, PORT={self.PORT})" config = SystemConfig() # ========== 全局协调器 ========== coordinator = None executor = ThreadPoolExecutor(max_workers=config.MAX_WORKERS) def register_coordinator(coord): global coordinator coordinator = coord if coordinator and hasattr(coordinator, 'connect_to_ui'): coordinator.connect_to_ui(update_ui) def update_ui(event): if 'socketio' in globals(): socketio.emit('system_event', event) # ========== 线程安全装饰器 ========== def synchronized(lock): def decorator(func): @wraps(func) def wrapper(*args, **kwargs): with lock: return func(*args, **kwargs) return wrapper return decorator # ========== 日志系统 ========== def setup_logger(): """优化日志配置""" logger = logging.getLogger('WebServer') logger.setLevel(getattr(logging, config.LOG_LEVEL.upper(), logging.DEBUG)) # 清除所有现有处理器 for handler in logger.handlers[:]: logger.removeHandler(handler) # 日志格式 log_formatter = logging.Formatter( '%(asctime)s [%(levelname)s] %(name)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S' ) # 文件日志处理器 (每天轮换,保留30天) file_handler = logging.handlers.TimedRotatingFileHandler( config.LOG_DIR / 'web_server.log', when='midnight', backupCount=30, encoding='utf-8' ) file_handler.setFormatter(log_formatter) logger.addHandler(file_handler) # 控制台日志处理器 console_handler = logging.StreamHandler() console_handler.setFormatter(log_formatter) logger.addHandler(console_handler) # 设置Flask和SocketIO日志 flask_logger = logging.getLogger('werkzeug') flask_logger.setLevel(logging.WARNING) socketio_logger = logging.getLogger('engineio') socketio_logger.setLevel(logging.WARNING) return logger logger = setup_logger() # ========== 环境管理器 ========== class EnvironmentManager: """独立的环境管理器类""" def __init__(self, config): self.config = config self.state = { 'temperature': 22.5, 'humidity': 45.0, 'light_level': 75, 'objects': [], 'last_updated': datetime.datetime.now().isoformat() } self.healthy = True self.lock = threading.Lock() @synchronized(threading.Lock()) def start(self): logger.info("环境管理器已启动") @synchronized(threading.Lock()) def get_state(self): # 更新模拟数据 self.state['temperature'] = round(20 + 5 * (time.time() % 10) / 10, 1) self.state['humidity'] = round(40 + 10 * (time.time() % 10) / 10, 1) self.state['light_level'] = round(70 + 10 * (time.time() % 10) / 10, 1) self.state['last_updated'] = datetime.datetime.now().isoformat() return self.state @synchronized(threading.Lock()) def execute_action(self, action, params): logger.info(f"执行环境动作: {action} 参数: {params}") if action == "adjust_temperature": self.state['temperature'] = params.get('value', 22.0) return True elif action == "adjust_light": self.state['light_level'] = params.get('level', 70) return True return False def is_healthy(self): return self.healthy # ========== 系统初始化 ========== class SystemInitializer: def __init__(self): self.base_dir = Path(__file__).resolve().parent.parent self.ai_core = None self.hardware_manager = None self.life_scheduler = None self.ai_agent = None self.start_time = time.time() self.environment_manager = None self.life_lock = threading.Lock() def initialize_system_paths(self): sys.path.insert(0, str(self.base_dir)) logger.info(f"项目根目录: {self.base_dir}") sub_dirs = ['agent', 'core', 'utils', 'config', 'cognitive_arch', 'environment'] for sub_dir in sub_dirs: full_path = self.base_dir / sub_dir if full_path.exists(): sys.path.insert(0, str(full_path)) logger.info(f"添加路径: {full_path}") else: logger.warning(f"目录不存在: {full_path} - 已跳过") def initialize_environment_manager(self): try: env_config = {'update_interval': 1.0, 'spatial': {'grid_size': 1.0}} self.environment_manager = EnvironmentManager(env_config) self.environment_manager.start() logger.info("✅ 环境管理器初始化成功") return self.environment_manager except Exception as e: logger.error(f"❌ 环境管理器初始化失败: {str(e)}") logger.warning("⚠️ 环境交互功能将不可用") return None def initialize_ai_core(self): logger.info("✅ 模拟AI核心初始化") self.ai_core = type('AICore', (), { 'status': 'running', 'get_state': lambda: {"status": "running", "model": "gpt-3.5-turbo"} })() def initialize_hardware_manager(self): logger.info("✅ 模拟硬件管理器初始化") self.hardware_manager = type('HardwareManager', (), { 'get_status': lambda: { "cpu_usage": psutil.cpu_percent(), "memory_usage": psutil.virtual_memory().percent, "gpu_usage": 0 } })() @synchronized(lock=threading.Lock()) def initialize_life_scheduler(self): logger.info("✅ 模拟生活调度器初始化") self.life_scheduler = type('LifeScheduler', (), { 'get_status': lambda: { "current_activity": "thinking", "next_activity": "learning", "energy": 85 } })() @synchronized(lock=threading.Lock()) def initialize_ai_agent(self): logger.info("✅ 模拟AI智能体初始化") self.ai_agent = type('AIAgent', (), { 'process_input': lambda self, input, user_id: f"你好{user_id},我收到了你的消息: '{input}'" })() def start_evolution_monitor(self): logger.info("✅ 模拟进化监视器启动") def initialize_all(self): logger.info("=" * 50) logger.info("🚀 开始初始化AI系统") logger.info("=" * 50) self.initialize_system_paths() self.initialize_ai_core() self.initialize_hardware_manager() self.initialize_life_scheduler() self.initialize_ai_agent() self.initialize_environment_manager() self.start_evolution_monitor() logger.info("✅ 所有系统组件初始化完成") return { "ai_core": self.ai_core, "hardware_manager": self.hardware_manager, "life_scheduler": self.life_scheduler, "ai_agent": self.ai_agent, "environment_manager": self.environment_manager } # ========== 环境交互路由 ========== def register_environment_routes(app): @app.route('/environment') def environment_view(): return render_template('environment_view.html') @app.route('/api/environment/state', methods=['GET']) @app.config['LIMITER'].limit("10 per minute") def get_environment_state(): env_manager = app.config['SYSTEM_COMPONENTS'].get('environment_manager') if not env_manager: return jsonify({"success": False, "error": "环境管理器未初始化"}), 503 try: state = env_manager.get_state() return jsonify(state) except Exception as e: app.logger.error(f"获取环境状态失败: {traceback.format_exc()}") return jsonify({"success": False, "error": str(e)}), 500 @app.route('/api/environment/action', methods=['POST']) @app.config['LIMITER'].limit("5 per minute") def execute_environment_action(): env_manager = app.config['SYSTEM_COMPONENTS'].get('environment_manager') if not env_manager: return jsonify({"success": False, "error": "环境管理器未初始化"}), 503 try: data = request.json action = data.get('action') params = data.get('params', {}) if not action: return jsonify({"success": False, "error": "缺少动作参数"}), 400 success = env_manager.execute_action(action, params) return jsonify({"success": success, "action": action}) except Exception as e: app.logger.error(f"执行环境动作失败: {traceback.format_exc()}") return jsonify({"success": False, "error": str(e)}), 500 # ========== 路由注册 ========== def register_routes(app): register_environment_routes(app) # 健康检查路由 @app.route('/health') def health_check(): return jsonify({"status": "healthy", "timestamp": datetime.datetime.now().isoformat()}) # 系统状态路由 @app.route('/status') @app.config['LIMITER'].exempt def status(): components = app.config['SYSTEM_COMPONENTS'] system_info = { "uptime": time.time() - app.config['START_TIME'], "ai_core_status": components['ai_core'].status if components['ai_core'] else "uninitialized", "hardware_status": components['hardware_manager'].get_status() if components[ 'hardware_manager'] else "uninitialized", "life_scheduler_status": components['life_scheduler'].get_status() if components[ 'life_scheduler'] else "uninitialized", "environment_status": components['environment_manager'].is_healthy() if components[ 'environment_manager'] else "uninitialized", "platform": platform.platform(), "python_version": sys.version, "memory_usage": psutil.virtual_memory().percent, "cpu_usage": psutil.cpu_percent(), "thread_count": threading.active_count(), "process_id": os.getpid() } return jsonify(system_info) # 核心系统路由 @app.route('/api/core/state') @app.config['LIMITER'].limit("10 per minute") def get_core_state(): ai_core = app.config['SYSTEM_COMPONENTS'].get('ai_core') if not ai_core: return jsonify({"error": "AI核心未初始化"}), 503 return jsonify(ai_core.get_state()) # 生活系统路由 @app.route('/life/dashboard') def life_dashboard(): return render_template('life_dashboard.html') @app.route('/api/life/status') @app.config['LIMITER'].limit("10 per minute") def get_life_status(): life_scheduler = app.config['SYSTEM_COMPONENTS'].get('life_scheduler') if not life_scheduler: return jsonify({"error": "生活调度器未初始化"}), 503 status = life_scheduler.get_status() return jsonify(status) # 聊天路由 @app.route('/chat', methods=['POST']) @app.config['LIMITER'].limit("30 per minute") def chat(): components = app.config['SYSTEM_COMPONENTS'] if not components['ai_agent']: return jsonify({"error": "Agent未初始化"}), 503 try: data = request.get_json() user_input = data.get('message', '') user_id = data.get('user_id', 'default') if not user_input: return jsonify({"error": "消息内容不能为空"}), 400 app.logger.info(f"聊天请求: 用户={user_id}, 内容长度={len(user_input)}") # 使用线程池异步处理 future = executor.submit(components['ai_agent'].process_input, user_input, user_id) response = future.result(timeout=10) # 10秒超时 return jsonify({"response": response}) except TimeoutError: return jsonify({"error": "处理超时"}), 504 except Exception as e: app.logger.error(f"聊天处理失败: {traceback.format_exc()}") return jsonify({"error": "聊天处理失败", "details": str(e)}), 500 # 404处理 @app.route('/<path:path>') def catch_all(path): return jsonify({"error": "路由不存在", "path": path}), 404 def register_error_handlers(app): @app.errorhandler(404) def not_found_error(error): return jsonify({"error": "资源未找到", "message": str(error)}), 404 @app.errorhandler(500) def internal_error(error): app.logger.error(f"服务器内部错误: {str(error)}") return jsonify({"error": "服务器内部错误", "message": "请查看日志获取详细信息"}), 500 # ========== WebSocket处理 ========== def setup_websocket_handlers(socketio): @socketio.on('connect') def handle_connect(): logger.info('客户端已连接') socketio.emit('system_status', {'status': 'ready'}) @socketio.on('disconnect') def handle_disconnect(): logger.info('客户端已断开连接') @socketio.on('user_message') def handle_user_message(data): user_id = data.get('user_id', 'guest') message = data.get('message', '') logger.info(f"收到来自 {user_id} 的消息: {message}") # 使用线程池处理消息 def process_message(): try: global coordinator if coordinator: return coordinator.process_message(message) else: return f"已收到您的消息: {message}" except Exception as e: logger.error(f"消息处理失败: {str(e)}") return "处理消息时出错" future = executor.submit(process_message) try: response = future.result(timeout=10) socketio.emit('agent_response', { 'user_id': user_id, 'response': response }) except TimeoutError: socketio.emit('agent_response', { 'user_id': user_id, 'response': "处理超时,请重试" }) # ========== 生产环境启动器 ========== def run_production_server(app): try: from waitress import serve logger.info(f"🚀 生产服务器启动: http://{config.HOST}:{config.PORT}") logger.warning("⚠️ 当前运行在生产模式 (Waitress WSGI服务器)") serve(app, host=config.HOST, port=config.PORT, threads=8) except ImportError: logger.error("❌ 缺少生产环境依赖: waitress") logger.info("请运行: pip install waitress") sys.exit(1) # ========== Flask应用工厂 ========== def create_app(): app = Flask( __name__, template_folder=str(config.TEMPLATE_DIR), static_folder='static', static_url_path='/static' ) app.secret_key = config.SECRET_KEY # 初始化限流器 limiter = Limiter( get_remote_address, app=app, default_limits=["200 per day", "50 per hour"], storage_uri="memory://" ) system_initializer = SystemInitializer() components = system_initializer.initialize_all() app.config['SYSTEM_COMPONENTS'] = components app.config['START_TIME'] = system_initializer.start_time app.config['BASE_DIR'] = system_initializer.base_dir # 配置SocketIO async_mode = 'threading' try: import eventlet eventlet.monkey_patch() async_mode = 'eventlet' logger.info("✅ 使用eventlet异步模式") except ImportError: logger.warning("⚠️ eventlet未安装,使用threading模式,性能可能受影响") socketio = SocketIO( app, cors_allowed_origins="*", async_mode=async_mode, logger=True, engineio_logger=True ) app.config['SOCKETIO'] = socketio # 注册路由和错误处理 register_routes(app) register_error_handlers(app) app.config['LIMITER'] = limiter return app, socketio # ========== 主程序入口 ========== if __name__ == '__main__': try: app, socketio = create_app() setup_websocket_handlers(socketio) if config.DEBUG: logger.info(f"开发服务器运行在 http://{config.HOST}:{config.PORT}") socketio.run( app, host=config.HOST, port=config.PORT, debug=True, use_reloader=False, log_output=True ) else: run_production_server(app) except KeyboardInterrupt: logger.info("服务器关闭") executor.shutdown(wait=False) except Exception as e: logger.critical(f"服务器启动失败: {str(e)}") logger.error(traceback.format_exc()) executor.shutdown(wait=False) sys.exit(1) 你改吧 把改好的完整版发我哈
08-12
ncdisp('g0085.nc') Source: C:\Users\Owner\Documents\MATLAB\Examples\R2022b\111\g0085.nc Format: classic Global Attributes: WOCE_Version = '3.0' Conventions = 'COARDS/WOCE' DAC_ID = 'FS006047' DOC_01_0000 = 'BODC Document Reference Number 59760 (38 lines)' DOC_01_0001 = ' QUALITY ASSESSMENT OF SEA LEVEL DATA' DOC_01_0002 = ' by the' DOC_01_0003 = ' TOGA SEA LEVEL CENTER/NATIONAL OCEANOGRAPHIC DATA CENTER' DOC_01_0004 = ' JOINT ARCHIVE FOR SEA LEVEL' DOC_01_0005 = '' DOC_01_0006 = 'Station : Kushimoto Latitude: 33 28.0N' DOC_01_0007 = 'Country : Japan Longitude: 135 47.0E' DOC_01_0008 = 'JASL # : 353A Time Meridian: 135E (GMT + 9 hr)' DOC_01_0009 = 'GLOSS # : 085 TOGA #: none NODC #: 13133501' DOC_01_0010 = 'Contributor : Japan Ocean. Data Center' DOC_01_0011 = ' Hydrographic Dept.' DOC_01_0012 = ' Maritime Safety Agency' DOC_01_0013 = ' 3-1,Tsukiji, 5-Chome, Chuo-ku' DOC_01_0014 = ' Tokyo 104 Japan' DOC_01_0015 = 'Originator : same' DOC_01_0016 = 'Original Data: unconfirmed' DOC_01_0017 = 'Instrmnt Type: FUESS' DOC_01_0018 = 'Digitzd Intvl: unconfirmed' DOC_01_0019 = 'Present Data : Hourly, daily, and monthly values obtained by:' DOC_01_0020 = ' Hourly : unconfirmed' DOC_01_0021 = ' Daily : 119-point convolution filter (Bloomfield, 1976)' DOC_01_0022 = ' centered on noon applied to the hourly data' DOC_01_0023 = ' with respective periods of the 95, 50, and 5%' DOC_01_0024 = ' amplitude points at 124.0, 60.2, and 40.2 hours' DOC_01_0025 = ' Monthly: Simple average of all daily values; calculated if' DOC_01_0026 = ' 7 or fewer days are missing' DOC_01_0027 = 'Span of data : 01 Jan 1961 - 30 Nov 1968' DOC_01_0028 = 'Gaps > 1 mon : none' DOC_01_0029 = 'Time Refernce: GMT (hours 00-23) Space-filler Flag :-9999' DOC_01_0030 = 'Units : millimeters Missing Data Flag :99999' DOC_01_0031 = 'Refernce Levl: All data are referred to tide gauge zero and linked' DOC_01_0032 = ' to historic fixed bench marks.' DOC_01_0033 = 'Comment : The daily or monthly means do not reveal' DOC_01_0034 = ' any obvious reference level shifts. Most' DOC_01_0035 = ' questionable fluctuations are related to' DOC_01_0036 = ' atmospheric or oceanographic events. Data' DOC_01_0037 = ' spikes of about 10 cm or less have not been' DOC_01_0038 = ' corrected.' DOC_02_0000 = 'BODC Document Reference Number 59774 (94 lines)' DOC_02_0001 = ' Site History: Kushimoto, Japan' DOC_02_0002 = ' ==============================' DOC_02_0003 = '' DOC_02_0004 = 'Station name : Kushimoto' DOC_02_0005 = 'Country : Japan' DOC_02_0006 = 'Latitude : 33 28.0N' DOC_02_0007 = 'Longitude : 135 47.0E' DOC_02_0008 = 'GLOSS No. : 085' DOC_02_0009 = 'Contributor : 1961-1968 TOGA Sea Level Center' DOC_02_0010 = ' 1969-1999 Japan Meteorological Agency' DOC_02_0011 = 'Instrument Type : Float and stilling well gauge, FUESS' DOC_02_0012 = 'Site of Gauge :' DOC_02_0013 = 'Observational Periods: 1961 - present' DOC_02_0014 = '' DOC_02_0015 = '' DOC_02_0016 = 'Reference level:' DOC_02_0017 = 'All heights have been referred to the station Tide Gauge Zero which is' DOC_02_0018 = 'linked to fixed bench marks.' DOC_02_0019 = '' DOC_02_0020 = 'Tide gauge benchmark (TGBM) description:' DOC_02_0021 = 'Japan Met. Agency Benchmark bolt inside tide gauge hut, adjacent to' DOC_02_0022 = 'stilling well' DOC_02_0023 = '' DOC_02_0024 = 'Auxiliary benchmarks:' DOC_02_0025 = 'Tide station BM 3.3006m above Tokyo Peil (1969).' DOC_02_0026 = '' DOC_02_0027 = 'Benchmark relationships:' DOC_02_0028 = 'Tide Gauge Zero (TGZ) = 1.642m below Tokyo Peil (Mean Sea Level of Tokyo' DOC_02_0029 = 'Bay) TGZ = 5.552m below TGBM (from 1978 onwards)' DOC_02_0030 = '' DOC_02_0031 = '' DOC_02_0032 = 'Quality assessment:' DOC_02_0033 = '' DOC_02_0034 = 'Comment: The daily or monthly means do not reveal any obvious reference' DOC_02_0035 = ' level shifts except for a possible datum shift of +6cm on 12 Jan' DOC_02_0036 = ' 1962. Most large fluctuations are related to atmospheric or' DOC_02_0037 = ' oceanographic events. Data spikes of about 10 cm or less have' DOC_02_0038 = ' not been corrected. Small gaps of 1-2 hours are present throughout' DOC_02_0039 = ' the series upto the end of 1995.' DOC_02_0040 = '' DOC_02_0041 = '' DOC_02_0042 = ' CI QI MISSING REPLACED GAPS QUESTIONABLE' DOC_02_0043 = 'YEAR (%) (%) DATA OR BAD DATA FLUCTUATIONS' DOC_02_0044 = '---- --- --- ---------------- --------------- --------------------' DOC_02_0045 = '1961 100 99 none none 258-260' DOC_02_0046 = '1962 100 99 none none 239-265' DOC_02_0047 = '1963 100 100 none none none' DOC_02_0048 = '1964 100 99 none none 2-4,268-269' DOC_02_0049 = '1965 100 99 none none 252-253,260' DOC_02_0050 = '1966 100 99 none none none' DOC_02_0051 = '1967 100 99 none none 234' DOC_02_0052 = '1968 92 92 335-365 none none' DOC_02_0053 = '1969 80 79 15-21,79-86,89- none 251' DOC_02_0054 = ' 90,146-150,166-' DOC_02_0055 = ' 179,203-205,212-' DOC_02_0056 = ' 243,260,261,297-' DOC_02_0057 = ' 300,300-301,321-' DOC_02_0058 = ' 323' DOC_02_0059 = '1970 94 94 10,12,13,14,15, none none' DOC_02_0060 = ' 16,22,26,116-122' DOC_02_0061 = ' 267-268,342-349' DOC_02_0062 = '1971 100 100 none none none' DOC_02_0063 = '1972 100 100 none none none' DOC_02_0064 = '1973 100 100 none none none' DOC_02_0065 = '1974 99 94 46-50 none 21,112-124,360-365' DOC_02_0066 = '1975 100 98 none none 1-3,354-356' DOC_02_0067 = '1976 100 100 none none none' DOC_02_0068 = '1977 100 100 none none none' DOC_02_0069 = '1978 87 87 5-14,14-18,19-21 none none' DOC_02_0070 = ' 21-23,27-31,31-' DOC_02_0071 = ' 37,37-49,50-51,' DOC_02_0072 = ' 52-55,55-56,56-' DOC_02_0073 = ' 58' DOC_02_0074 = '1979 100 100 none none none' DOC_02_0075 = '1980 100 99 none none 46-48' DOC_02_0076 = '1981 99 99 126-127 none none' DOC_02_0077 = '1982 100 100 none none none' DOC_02_0078 = '1983 100 100 none none none' DOC_02_0079 = '1984 100 100 none none none' DOC_02_0080 = '1985 100 100 none none none' DOC_02_0081 = '1986 100 100 none none none' DOC_02_0082 = '1987 100 100 none none none' DOC_02_0083 = '1988 99 99 249-250 none none' DOC_02_0084 = '1989 100 100 none none none' DOC_02_0085 = '1990 100 100 none none none' DOC_02_0086 = '1991 97 97 302-303,344-352 none none' DOC_02_0087 = '1992 99 99 338 none none' DOC_02_0088 = '1993 100 100 none none none' DOC_02_0089 = '1994 100 100 none none none' DOC_02_0090 = '1995 100 100 none none none' DOC_02_0091 = '1996 100 99 none none 075' DOC_02_0092 = '1997 100 99 none none 171,179' DOC_02_0093 = '1998 100 99 none none 265' DOC_02_0094 = '1999 100 100 none none none' DOC_03_0000 = 'BODC Document Reference Number 63428 (48 lines)' DOC_03_0001 = ' General Data Screening carried out by BODC' DOC_03_0002 = '' DOC_03_0003 = 'BODC screen both the series header qualifying information and the parameter' DOC_03_0004 = 'values in the data cycles themselves.' DOC_03_0005 = '' DOC_03_0006 = 'Header information is inspected for 1) irregularities such as infeasible' DOC_03_0007 = 'values; 2) inconsistencies between related information, for example times' DOC_03_0008 = 'for instrument deployment and for start/end of data series; length of' DOC_03_0009 = 'record or number of data cycles, the cycle interval, originator's comments' DOC_03_0010 = 'on meter/mooring performance, data quality and parameters measured and the' DOC_03_0011 = 'parameters actually present in the data cycles. Documents are written by' DOC_03_0012 = 'BODC highlighting irregularities which cannot be resolved.' DOC_03_0013 = '' DOC_03_0014 = 'Data cycles are inspected using time or depth series plots of all' DOC_03_0015 = 'parameters. Currents are additionally inspected using vector scatter plots' DOC_03_0016 = 'and time series plots of North and East velocity components. These' DOC_03_0017 = 'presentations undergo intrinsic and extrinsic screening to detect' DOC_03_0018 = 'infeasible values within the data cycles themselves and inconsistencies as' DOC_03_0019 = 'seen when comparing characteristics of adjacent data sets displaced with' DOC_03_0020 = 'respect to depth, position or time. Values suspected of being of non-' DOC_03_0021 = 'oceanographic origin may be tagged with the BODC flag denoting suspect' DOC_03_0022 = 'value; the data values will not be altered.' DOC_03_0023 = '' DOC_03_0024 = 'The following types of irregularity, each relying on visual detection in' DOC_03_0025 = 'the plot, are amongst those which may be flagged as suspect:' DOC_03_0026 = '' DOC_03_0027 = ' Spurious data at the start or end of the record.' DOC_03_0028 = ' Obvious spikes occurring in periods free from meteorological disturbance.' DOC_03_0029 = ' A sequence of constant values in consecutive data cycles.' DOC_03_0030 = '' DOC_03_0031 = 'If a large percentage of the data is affected by irregularities, deemed' DOC_03_0032 = 'abnormal, then instead of flagging the individual suspect values, a caution' DOC_03_0033 = 'may be documented. Likewise documents will highlight irregularities seen in' DOC_03_0034 = 'the current vector scatter plots such as incongruous centre holes, abnormal' DOC_03_0035 = 'asymmetry in tidally dominated records or gaps as when a range of speeds or' DOC_03_0036 = 'directions go unregistered due to meter malfunction.' DOC_03_0037 = '' DOC_03_0038 = 'Inconsistencies between the characteristics of the data set and those of' DOC_03_0039 = 'its neighbours are sought and, where necessary, documented. This covers' DOC_03_0040 = 'inconsistencies such as the following:' DOC_03_0041 = '' DOC_03_0042 = ' Maximum and minimum values of parameters (spikes excluded).' DOC_03_0043 = ' The occurrence of meteorological events.' DOC_03_0044 = '' DOC_03_0045 = 'This intrinsic and extrinsic screening of the parameter values seeks to' DOC_03_0046 = 'confirm the qualifying information and the source laboratory's comments on' DOC_03_0047 = 'the series. In screening and collating information, every care is taken to' DOC_03_0048 = 'ensure that errors of BODC making are not introduced.' BODC_QC_00 = 'BODC Processing and quality control flag channel Definitions' BODC_QC_01 = 'Flag Definition' BODC_QC_02 = '---- ----------' BODC_QC_03 = ' Unqualified' BODC_QC_04 = '< Below detection limit' BODC_QC_05 = '> In excess of quoted value' BODC_QC_06 = 'B Beginning of CTD Down/Up Cast' BODC_QC_07 = 'D Thermometric depth' BODC_QC_08 = 'E End of CTD Down/Up Cast' BODC_QC_09 = 'K Uncertain/suspect value' BODC_QC_10 = 'L Improbable value - originator's quality control' BODC_QC_11 = 'M Improbable value - BODC quality control' BODC_QC_12 = 'N Null value' BODC_QC_13 = 'O Improbable value - user quality control' BODC_QC_14 = 'P Trace/calm' BODC_QC_15 = 'Q Indeterminate' BODC_QC_16 = 'R Replacement value' BODC_QC_17 = 'S Estimated value' BODC_QC_18 = 'T Interpolated value' BODC_QC_19 = 'U Uncalibrated' BODC_QC_20 = 'W Control value' BODC_QC_21 = 'X Excessive difference' Dimensions: time = 341103 (UNLIMITED) latitude = 1 longitude = 1 Variables: time Size: 341103x1 Dimensions: time Datatype: double Attributes: long_name = 'time' units = 'days since 1900-1-1 0:0:0' data_min = 22280 data_max = 36523.9583 C_format = '%12.6f' FORTRAN_format = 'f12.6' latitude Size: 1x1 Dimensions: latitude Datatype: single Attributes: long_name = 'latitude' units = 'degrees_N' data_min = 33.4667 data_max = 33.4667 C_format = '%8.4f' FORTRAN_format = 'f8.4' longitude Size: 1x1 Dimensions: longitude Datatype: single Attributes: long_name = 'longitude' units = 'degrees_E' data_min = 135.7833 data_max = 135.7833 C_format = '%9.4f' FORTRAN_format = 'f9.4' woce_date Size: 341103x1 Dimensions: time Datatype: int32 Attributes: long_name = 'WOCE date' units = 'yyyymmdd UTC' data_min = 19610101 data_max = 19991231 C_format = '%8d' FORTRAN_format = 'i8' woce_time Size: 341103x1 Dimensions: time Datatype: single Attributes: long_name = 'WOCE time of day' units = 'hhmmss.dd UTC' data_min = 0 data_max = 230000 C_format = '%9.2f' FORTRAN_format = 'f9.2' sea_level_1 Size: 1x1x341103 Dimensions: longitude,latitude,time Datatype: single Attributes: long_name = 'sea level' units = 'metres' BODC_Codes = 'ASLVZZ01' _FillValue = -99 data_min = 0.22 data_max = 3.02 C_format = '%7.3f' FORTRAN_format = 'f7.3' sea_level_quality_flag_1 Size: 1x1x341103 Dimensions: longitude,latitude,time Datatype: char >> 请将上述代码的数据替换为这些数据
11-14
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值