4619: [Wf2016]Swap Space

本文介绍了一种通过贪心算法优化硬盘空间使用的算法实现。通过对硬盘的读写操作进行模拟,该算法能够计算出达到特定目标所需的最小额外空间。文章包含了完整的C++代码实现,并详细解释了算法的工作原理。

题目链接

题目大意:有n块硬盘·,每块消耗x[i]的空间可以得到y[i]的空间,输出最少需要的额外空间

题解:贪心,类似bzoj3709

我的收获:做到了重复的题……好有成就感啊

#include <iostream>
#include <cstdio>
#include <algorithm>
using namespace std;
const int M=1000005;
int n,x,y,cnta,cntb;
long long now,ans;//现在已有的空余空间,购买的空间 

struct data{int c,v;}a[M],b[M];
bool cmpa(data x,data y){return x.c<y.c;}
bool cmpb(data x,data y){return x.v>y.v;}

void del(int cost,int val){
    now-=cost;//现有的空间减去消耗的空间 
    if(now<0) ans-=now,now=0;//空间不足,购买空间 
    now+=val;//获得新的空间 
}

void work()
{
    for(int i=1;i<=cnta;i++) del(a[i].c,a[i].v);
    for(int i=1;i<=cntb;i++) del(b[i].c,b[i].v);
    printf("%lld\n",ans);
}

void init()
{
    cin>>n;
    for(int i=1;i<=n;i++){
        scanf("%d%d",&x,&y);
        if(x<y) a[++cnta].c=x,a[cnta].v=y; 
        else b[++cntb].c=x,b[cntb].v=y;
    }
    sort(a+1,a+1+cnta,cmpa);
    sort(b+1,b+1+cntb,cmpb);
}

int main()
{
    init();
    work();
    return 0;
}
(vllm) zhzx@zhzx-S2600WF-LS:/media/zhzx/ssd2/Qwen3-32B-AWQ$ CUDA_VISIBLE_DEVICES=0,1 python -m vllm.entrypoints.api_server \ --model /media/zhzx/ssd2/Qwen3-32B-AWQ \ --tensor-parallel-size 2 \ --quantization awq \ --trust-remote-code INFO 05-12 11:01:48 [__init__.py:239] Automatically detected platform cuda. INFO 05-12 11:01:50 [api_server.py:121] vLLM API server version 0.8.5.post1 INFO 05-12 11:01:50 [api_server.py:122] args: Namespace(host=None, port=8000, ssl_keyfile=None, ssl_certfile=None, ssl_ca_certs=None, enable_ssl_refresh=False, ssl_cert_reqs=0, root_path=None, log_level='debug', model='/media/zhzx/ssd2/Qwen3-32B-AWQ', task='auto', tokenizer=None, hf_config_path=None, skip_tokenizer_init=False, revision=None, code_revision=None, tokenizer_revision=None, tokenizer_mode='auto', trust_remote_code=True, allowed_local_media_path=None, load_format='auto', download_dir=None, model_loader_extra_config={}, use_tqdm_on_load=True, config_format=<ConfigFormat.AUTO: 'auto'>, dtype='auto', max_model_len=None, guided_decoding_backend='auto', reasoning_parser=None, logits_processor_pattern=None, model_impl='auto', distributed_executor_backend=None, pipeline_parallel_size=1, tensor_parallel_size=2, data_parallel_size=1, enable_expert_parallel=False, max_parallel_loading_workers=None, ray_workers_use_nsight=False, disable_custom_all_reduce=False, block_size=None, gpu_memory_utilization=0.9, swap_space=4, kv_cache_dtype='auto', num_gpu_blocks_override=None, enable_prefix_caching=None, prefix_caching_hash_algo='builtin', cpu_offload_gb=0, calculate_kv_scales=False, disable_sliding_window=False, use_v2_block_manager=True, seed=None, max_logprobs=20, disable_log_stats=False, quantization='awq', rope_scaling=None, rope_theta=None, hf_token=None, hf_overrides=None, enforce_eager=False, max_seq_len_to_capture=8192, tokenizer_pool_size=0, tokenizer_pool_type='ray', tokenizer_pool_extra_config={}, limit_mm_per_prompt={}, mm_processor_kwargs=None, disable_mm_preprocessor_cache=False, enable_lora=None, enable_lora_bias=False, max_loras=1, max_lora_rank=16, lora_extra_vocab_size=256, lora_dtype='auto', long_lora_scaling_factors=None, max_cpu_loras=None, fully_sharded_loras=False, enable_prompt_adapter=None, max_prompt_adapters=1, max_prompt_adapter_token=0, device='auto', speculative_config=None, ignore_patterns=[], served_model_name=None, qlora_adapter_name_or_path=None, show_hidden_metrics_for_version=None, otlp_traces_endpoint=None, collect_detailed_traces=None, disable_async_output_proc=False, max_num_batched_tokens=None, max_num_seqs=None, max_num_partial_prefills=1, max_long_partial_prefills=1, long_prefill_token_threshold=0, num_lookahead_slots=0, scheduler_delay_factor=0.0, preemption_mode=None, num_scheduler_steps=1, multi_step_stream_outputs=True, scheduling_policy='fcfs', enable_chunked_prefill=None, disable_chunked_mm_input=False, scheduler_cls='vllm.core.scheduler.Scheduler', override_neuron_config=None, override_pooler_config=None, compilation_config=None, kv_transfer_config=None, worker_cls='auto', worker_extension_cls='', generation_config='auto', override_generation_config=None, enable_sleep_mode=False, additional_config=None, enable_reasoning=False, disable_cascade_attn=False, disable_log_requests=False) INFO 05-12 11:01:56 [config.py:717] This model supports multiple tasks: {'score', 'reward', 'classify', 'embed', 'generate'}. Defaulting to 'generate'. WARNING 05-12 11:01:57 [config.py:830] awq quantization is not fully optimized yet. The speed can be slower than non-quantized models. WARNING 05-12 11:01:57 [arg_utils.py:1658] Compute Capability < 8.0 is not supported by the V1 Engine. Falling back to V0. WARNING 05-12 11:01:57 [arg_utils.py:1525] Chunked prefill is enabled by default for models with max_model_len > 32K. Chunked prefill might not work with some features or models. If you encounter any issues, please disable by launching with --enable-chunked-prefill=False. INFO 05-12 11:01:57 [config.py:1770] Defaulting to use mp for distributed inference INFO 05-12 11:01:57 [config.py:2003] Chunked prefill is enabled with max_num_batched_tokens=2048. INFO 05-12 11:01:57 [llm_engine.py:240] Initializing a V0 LLM engine (v0.8.5.post1) with config: model='/media/zhzx/ssd2/Qwen3-32B-AWQ', speculative_config=None, tokenizer='/media/zhzx/ssd2/Qwen3-32B-AWQ', skip_tokenizer_init=False, tokenizer_mode=auto, revision=None, override_neuron_config=None, tokenizer_revision=None, trust_remote_code=True, dtype=torch.float16, max_seq_len=40960, download_dir=None, load_format=LoadFormat.AUTO, tensor_parallel_size=2, pipeline_parallel_size=1, disable_custom_all_reduce=False, quantization=awq, enforce_eager=False, kv_cache_dtype=auto, device_config=cuda, decoding_config=DecodingConfig(guided_decoding_backend='auto', reasoning_backend=None), observability_config=ObservabilityConfig(show_hidden_metrics=False, otlp_traces_endpoint=None, collect_model_forward_time=False, collect_model_execute_time=False), seed=None, served_model_name=/media/zhzx/ssd2/Qwen3-32B-AWQ, num_scheduler_steps=1, multi_step_stream_outputs=True, enable_prefix_caching=None, chunked_prefill_enabled=True, use_async_output_proc=True, disable_mm_preprocessor_cache=False, mm_processor_kwargs=None, pooler_config=None, compilation_config={"splitting_ops":[],"compile_sizes":[],"cudagraph_capture_sizes":[256,248,240,232,224,216,208,200,192,184,176,168,160,152,144,136,128,120,112,104,96,88,80,72,64,56,48,40,32,24,16,8,4,2,1],"max_capture_size":256}, use_cached_outputs=False, WARNING 05-12 11:01:57 [multiproc_worker_utils.py:306] Reducing Torch parallelism from 16 threads to 1 to avoid unnecessary CPU contention. Set OMP_NUM_THREADS in the external environment to tune this value as needed. (VllmWorkerProcess pid=100643) INFO 05-12 11:01:57 [multiproc_worker_utils.py:225] Worker ready; awaiting tasks INFO 05-12 11:01:57 [cuda.py:240] Cannot use FlashAttention-2 backend for Volta and Turing GPUs. INFO 05-12 11:01:57 [cuda.py:289] Using XFormers backend. (VllmWorkerProcess pid=100643) INFO 05-12 11:01:57 [cuda.py:240] Cannot use FlashAttention-2 backend for Volta and Turing GPUs. (VllmWorkerProcess pid=100643) INFO 05-12 11:01:57 [cuda.py:289] Using XFormers backend. INFO 05-12 11:01:58 [utils.py:1055] Found nccl from library libnccl.so.2 (VllmWorkerProcess pid=100643) INFO 05-12 11:01:58 [utils.py:1055] Found nccl from library libnccl.so.2 INFO 05-12 11:01:58 [pynccl.py:69] vLLM is using nccl==2.21.5 (VllmWorkerProcess pid=100643) INFO 05-12 11:01:58 [pynccl.py:69] vLLM is using nccl==2.21.5 (VllmWorkerProcess pid=100643) INFO 05-12 11:01:58 [custom_all_reduce_utils.py:244] reading GPU P2P access cache from /home/zhzx/.cache/vllm/gpu_p2p_access_cache_for_0,1.json INFO 05-12 11:01:58 [custom_all_reduce_utils.py:244] reading GPU P2P access cache from /home/zhzx/.cache/vllm/gpu_p2p_access_cache_for_0,1.json INFO 05-12 11:01:58 [shm_broadcast.py:266] vLLM message queue communication handle: Handle(local_reader_ranks=[1], buffer_handle=(1, 4194304, 6, 'psm_91501777'), local_subscribe_addr='ipc:///tmp/c7c55d66-4bbe-451b-a9f3-3133e26fdb9b', remote_subscribe_addr=None, remote_addr_ipv6=False) (VllmWorkerProcess pid=100643) INFO 05-12 11:01:58 [parallel_state.py:1004] rank 1 in world size 2 is assigned as DP rank 0, PP rank 0, TP rank 1 INFO 05-12 11:01:58 [parallel_state.py:1004] rank 0 in world size 2 is assigned as DP rank 0, PP rank 0, TP rank 0 INFO 05-12 11:01:58 [model_runner.py:1108] Starting to load model /media/zhzx/ssd2/Qwen3-32B-AWQ... (VllmWorkerProcess pid=100643) INFO 05-12 11:01:58 [model_runner.py:1108] Starting to load model /media/zhzx/ssd2/Qwen3-32B-AWQ... Loading safetensors checkpoint shards: 0% Completed | 0/4 [00:00<?, ?it/s] Loading safetensors checkpoint shards: 25% Completed | 1/4 [00:01<00:04, 1.51s/it] Loading safetensors checkpoint shards: 50% Completed | 2/4 [00:02<00:02, 1.36s/it] Loading safetensors checkpoint shards: 75% Completed | 3/4 [00:04<00:01, 1.52s/it] Loading safetensors checkpoint shards: 100% Completed | 4/4 [00:05<00:00, 1.41s/it] Loading safetensors checkpoint shards: 100% Completed | 4/4 [00:05<00:00, 1.43s/it] (VllmWorkerProcess pid=100643) INFO 05-12 11:02:04 [loader.py:458] Loading weights took 5.72 seconds INFO 05-12 11:02:04 [loader.py:458] Loading weights took 5.79 seconds INFO 05-12 11:02:05 [model_runner.py:1140] Model loading took 9.0568 GiB and 5.991388 seconds (VllmWorkerProcess pid=100643) INFO 05-12 11:02:05 [model_runner.py:1140] Model loading took 9.0568 GiB and 5.932729 seconds (VllmWorkerProcess pid=100643) INFO 05-12 11:02:10 [worker.py:287] Memory profiling takes 5.32 seconds (VllmWorkerProcess pid=100643) INFO 05-12 11:02:10 [worker.py:287] the current vLLM instance can use total_gpu_memory (23.64GiB) x gpu_memory_utilization (0.90) = 21.27GiB (VllmWorkerProcess pid=100643) INFO 05-12 11:02:10 [worker.py:287] model weights take 9.06GiB; non_torch_memory takes 0.13GiB; PyTorch activation peak memory takes 0.41GiB; the rest of the memory reserved for KV Cache is 11.68GiB. INFO 05-12 11:02:10 [worker.py:287] Memory profiling takes 5.46 seconds INFO 05-12 11:02:10 [worker.py:287] the current vLLM instance can use total_gpu_memory (23.64GiB) x gpu_memory_utilization (0.90) = 21.27GiB INFO 05-12 11:02:10 [worker.py:287] model weights take 9.06GiB; non_torch_memory takes 0.13GiB; PyTorch activation peak memory takes 1.40GiB; the rest of the memory reserved for KV Cache is 10.68GiB. INFO 05-12 11:02:11 [executor_base.py:112] # cuda blocks: 5469, # CPU blocks: 2048 INFO 05-12 11:02:11 [executor_base.py:117] Maximum concurrency for 40960 tokens per request: 2.14x INFO 05-12 11:02:13 [model_runner.py:1450] Capturing cudagraphs for decoding. This may lead to unexpected consequences if the model is not static. To run the model in eager mode, set 'enforce_eager=True' or use '--enforce-eager' in the CLI. If out-of-memory error occurs during cudagraph capture, consider decreasing `gpu_memory_utilization` or switching to eager mode. You can also reduce the `max_num_seqs` as needed to decrease memory usage. Capturing CUDA graph shapes: 0%| | 0/35 [00:00<?, ?it/s](VllmWorkerProcess pid=100643) INFO 05-12 11:02:13 [model_runner.py:1450] Capturing cudagraphs for decoding. This may lead to unexpected consequences if the model is not static. To run the model in eager mode, set 'enforce_eager=True' or use '--enforce-eager' in the CLI. If out-of-memory error occurs during cudagraph capture, consider decreasing `gpu_memory_utilization` or switching to eager mode. You can also reduce the `max_num_seqs` as needed to decrease memory usage. Capturing CUDA graph shapes: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 35/35 [00:29<00:00, 1.18it/s] INFO 05-12 11:02:43 [custom_all_reduce.py:195] Registering 4515 cuda graph addresses (VllmWorkerProcess pid=100643) INFO 05-12 11:02:43 [custom_all_reduce.py:195] Registering 4515 cuda graph addresses (VllmWorkerProcess pid=100643) INFO 05-12 11:02:43 [model_runner.py:1592] Graph capturing finished in 30 secs, took 0.97 GiB INFO 05-12 11:02:43 [model_runner.py:1592] Graph capturing finished in 30 secs, took 0.97 GiB INFO 05-12 11:02:43 [llm_engine.py:437] init engine (profile, create kv cache, warmup model) took 38.52 seconds INFO 05-12 11:02:43 [launcher.py:28] Available routes are: INFO 05-12 11:02:43 [launcher.py:36] Route: /openapi.json, Methods: GET, HEAD INFO 05-12 11:02:43 [launcher.py:36] Route: /docs, Methods: GET, HEAD INFO 05-12 11:02:43 [launcher.py:36] Route: /docs/oauth2-redirect, Methods: GET, HEAD INFO 05-12 11:02:43 [launcher.py:36] Route: /redoc, Methods: GET, HEAD INFO 05-12 11:02:43 [launcher.py:36] Route: /health, Methods: GET INFO 05-12 11:02:43 [launcher.py:36] Route: /generate, Methods: POST [rank0]: Traceback (most recent call last): [rank0]: File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/starlette/datastructures.py", line 668, in __getattr__ [rank0]: return self._state[key] [rank0]: ~~~~~~~~~~~^^^^^ [rank0]: KeyError: 'engine_client' [rank0]: During handling of the above exception, another exception occurred: [rank0]: Traceback (most recent call last): [rank0]: File "<frozen runpy>", line 198, in _run_module_as_main [rank0]: File "<frozen runpy>", line 88, in _run_code [rank0]: File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/entrypoints/api_server.py", line 177, in <module> [rank0]: asyncio.run(run_server(args)) [rank0]: File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/asyncio/runners.py", line 195, in run [rank0]: return runner.run(main) [rank0]: ^^^^^^^^^^^^^^^^ [rank0]: File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/asyncio/runners.py", line 118, in run [rank0]: return self._loop.run_until_complete(task) [rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [rank0]: File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/asyncio/base_events.py", line 691, in run_until_complete [rank0]: return future.result() [rank0]: ^^^^^^^^^^^^^^^ [rank0]: File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/entrypoints/api_server.py", line 129, in run_server [rank0]: shutdown_task = await serve_http( [rank0]: ^^^^^^^^^^^^^^^^^ [rank0]: File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/entrypoints/launcher.py", line 46, in serve_http [rank0]: watchdog_loop(server, app.state.engine_client)) [rank0]: ^^^^^^^^^^^^^^^^^^^^^^^ [rank0]: File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/starlette/datastructures.py", line 671, in __getattr__ [rank0]: raise AttributeError(message.format(self.__class__.__name__, key)) [rank0]: AttributeError: 'State' object has no attribute 'engine_client' [rank0]:[W512 11:02:45.924296727 ProcessGroupNCCL.cpp:1496] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) /home/zhzx/miniconda3/envs/vllm/lib/python3.12/multiprocessing/resource_tracker.py:255: UserWarning: resource_tracker: There appear to be 1 leaked shared_memory objects to clean up at shutdown warnings.warn('resource_tracker: There appear to be %d '
05-13
(vllm) zhzx@zhzx-S2600WF-LS:/media/zhzx/ssd2/Qwen3-32B$ vllm serve "/media/zhzx/ssd2/Qwen3-32B" --host 0.0.0.0 --port 8060 --dtype bfloat16 --tensor-parallel-size 2 --cpu-offload-gb 20 --gpu-memory-utilization 0.8 --max-model-len 8126 --api-key token-abc123 --enable-prefix-caching --trust-remote-code \ > INFO 05-10 20:06:33 [__init__.py:239] Automatically detected platform cuda. INFO 05-10 20:06:36 [api_server.py:1043] vLLM API server version 0.8.5.post1 INFO 05-10 20:06:36 [api_server.py:1044] args: Namespace(subparser='serve', model_tag='/media/zhzx/ssd2/Qwen3-32B', config='', host='0.0.0.0', port=8060, uvicorn_log_level='info', disable_uvicorn_access_log=False, allow_credentials=False, allowed_origins=['*'], allowed_methods=['*'], allowed_headers=['*'], api_key='token-abc123', lora_modules=None, prompt_adapters=None, chat_template=None, chat_template_content_format='auto', response_role='assistant', ssl_keyfile=None, ssl_certfile=None, ssl_ca_certs=None, enable_ssl_refresh=False, ssl_cert_reqs=0, root_path=None, middleware=[], return_tokens_as_token_ids=False, disable_frontend_multiprocessing=False, enable_request_id_headers=False, enable_auto_tool_choice=False, tool_call_parser=None, tool_parser_plugin='', model='/media/zhzx/ssd2/Qwen3-32B', task='auto', tokenizer=None, hf_config_path=None, skip_tokenizer_init=False, revision=None, code_revision=None, tokenizer_revision=None, tokenizer_mode='auto', trust_remote_code=True, allowed_local_media_path=None, load_format='auto', download_dir=None, model_loader_extra_config={}, use_tqdm_on_load=True, config_format=<ConfigFormat.AUTO: 'auto'>, dtype='bfloat16', max_model_len=8126, guided_decoding_backend='auto', reasoning_parser=None, logits_processor_pattern=None, model_impl='auto', distributed_executor_backend=None, pipeline_parallel_size=1, tensor_parallel_size=2, data_parallel_size=1, enable_expert_parallel=False, max_parallel_loading_workers=None, ray_workers_use_nsight=False, disable_custom_all_reduce=False, block_size=None, gpu_memory_utilization=0.8, swap_space=4, kv_cache_dtype='auto', num_gpu_blocks_override=None, enable_prefix_caching=True, prefix_caching_hash_algo='builtin', cpu_offload_gb=20.0, calculate_kv_scales=False, disable_sliding_window=False, use_v2_block_manager=True, seed=None, max_logprobs=20, disable_log_stats=False, quantization=None, rope_scaling=None, rope_theta=None, hf_token=None, hf_overrides=None, enforce_eager=False, max_seq_len_to_capture=8192, tokenizer_pool_size=0, tokenizer_pool_type='ray', tokenizer_pool_extra_config={}, limit_mm_per_prompt={}, mm_processor_kwargs=None, disable_mm_preprocessor_cache=False, enable_lora=None, enable_lora_bias=False, max_loras=1, max_lora_rank=16, lora_extra_vocab_size=256, lora_dtype='auto', long_lora_scaling_factors=None, max_cpu_loras=None, fully_sharded_loras=False, enable_prompt_adapter=None, max_prompt_adapters=1, max_prompt_adapter_token=0, device='auto', speculative_config=None, ignore_patterns=[], served_model_name=None, qlora_adapter_name_or_path=None, show_hidden_metrics_for_version=None, otlp_traces_endpoint=None, collect_detailed_traces=None, disable_async_output_proc=False, max_num_batched_tokens=None, max_num_seqs=None, max_num_partial_prefills=1, max_long_partial_prefills=1, long_prefill_token_threshold=0, num_lookahead_slots=0, scheduler_delay_factor=0.0, preemption_mode=None, num_scheduler_steps=1, multi_step_stream_outputs=True, scheduling_policy='fcfs', enable_chunked_prefill=None, disable_chunked_mm_input=False, scheduler_cls='vllm.core.scheduler.Scheduler', override_neuron_config=None, override_pooler_config=None, compilation_config=None, kv_transfer_config=None, worker_cls='auto', worker_extension_cls='', generation_config='auto', override_generation_config=None, enable_sleep_mode=False, additional_config=None, enable_reasoning=False, disable_cascade_attn=False, disable_log_requests=False, max_log_len=None, disable_fastapi_docs=False, enable_prompt_tokens_details=False, enable_server_load_tracking=False, dispatch_function=<function ServeSubcommand.cmd at 0x7f625a071bc0>) WARNING 05-10 20:06:36 [config.py:2972] Casting torch.float16 to torch.bfloat16. INFO 05-10 20:06:42 [config.py:717] This model supports multiple tasks: {'reward', 'embed', 'generate', 'classify', 'score'}. Defaulting to 'generate'. WARNING 05-10 20:06:42 [arg_utils.py:1658] Compute Capability < 8.0 is not supported by the V1 Engine. Falling back to V0. INFO 05-10 20:06:42 [config.py:1770] Defaulting to use mp for distributed inference INFO 05-10 20:06:42 [api_server.py:246] Started engine process with PID 73230 INFO 05-10 20:06:45 [__init__.py:239] Automatically detected platform cuda. INFO 05-10 20:06:47 [llm_engine.py:240] Initializing a V0 LLM engine (v0.8.5.post1) with config: model='/media/zhzx/ssd2/Qwen3-32B', speculative_config=None, tokenizer='/media/zhzx/ssd2/Qwen3-32B', skip_tokenizer_init=False, tokenizer_mode=auto, revision=None, override_neuron_config=None, tokenizer_revision=None, trust_remote_code=True, dtype=torch.bfloat16, max_seq_len=8126, download_dir=None, load_format=LoadFormat.AUTO, tensor_parallel_size=2, pipeline_parallel_size=1, disable_custom_all_reduce=False, quantization=None, enforce_eager=False, kv_cache_dtype=auto, device_config=cuda, decoding_config=DecodingConfig(guided_decoding_backend='auto', reasoning_backend=None), observability_config=ObservabilityConfig(show_hidden_metrics=False, otlp_traces_endpoint=None, collect_model_forward_time=False, collect_model_execute_time=False), seed=None, served_model_name=/media/zhzx/ssd2/Qwen3-32B, num_scheduler_steps=1, multi_step_stream_outputs=True, enable_prefix_caching=True, chunked_prefill_enabled=False, use_async_output_proc=True, disable_mm_preprocessor_cache=False, mm_processor_kwargs=None, pooler_config=None, compilation_config={"splitting_ops":[],"compile_sizes":[],"cudagraph_capture_sizes":[256,248,240,232,224,216,208,200,192,184,176,168,160,152,144,136,128,120,112,104,96,88,80,72,64,56,48,40,32,24,16,8,4,2,1],"max_capture_size":256}, use_cached_outputs=True, WARNING 05-10 20:06:48 [multiproc_worker_utils.py:306] Reducing Torch parallelism from 16 threads to 1 to avoid unnecessary CPU contention. Set OMP_NUM_THREADS in the external environment to tune this value as needed. INFO 05-10 20:06:48 [cuda.py:240] Cannot use FlashAttention-2 backend for Volta and Turing GPUs. INFO 05-10 20:06:48 [cuda.py:289] Using XFormers backend. INFO 05-10 20:06:50 [__init__.py:239] Automatically detected platform cuda. (VllmWorkerProcess pid=73270) INFO 05-10 20:06:53 [multiproc_worker_utils.py:225] Worker ready; awaiting tasks (VllmWorkerProcess pid=73270) INFO 05-10 20:06:53 [cuda.py:240] Cannot use FlashAttention-2 backend for Volta and Turing GPUs. (VllmWorkerProcess pid=73270) INFO 05-10 20:06:53 [cuda.py:289] Using XFormers backend. ERROR 05-10 20:06:53 [engine.py:448] Bfloat16 is only supported on GPUs with compute capability of at least 8.0. Your Quadro RTX 6000 GPU has compute capability 7.5. You can use float16 instead by explicitly setting the `dtype` flag in CLI, for example: --dtype=half. ERROR 05-10 20:06:53 [engine.py:448] Traceback (most recent call last): ERROR 05-10 20:06:53 [engine.py:448] File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/engine/multiprocessing/engine.py", line 436, in run_mp_engine ERROR 05-10 20:06:53 [engine.py:448] engine = MQLLMEngine.from_vllm_config( ERROR 05-10 20:06:53 [engine.py:448] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ERROR 05-10 20:06:53 [engine.py:448] File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/engine/multiprocessing/engine.py", line 128, in from_vllm_config ERROR 05-10 20:06:53 [engine.py:448] return cls( ERROR 05-10 20:06:53 [engine.py:448] ^^^^ ERROR 05-10 20:06:53 [engine.py:448] File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/engine/multiprocessing/engine.py", line 82, in __init__ ERROR 05-10 20:06:53 [engine.py:448] self.engine = LLMEngine(*args, **kwargs) ERROR 05-10 20:06:53 [engine.py:448] ^^^^^^^^^^^^^^^^^^^^^^^^^^ ERROR 05-10 20:06:53 [engine.py:448] File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/engine/llm_engine.py", line 275, in __init__ ERROR 05-10 20:06:53 [engine.py:448] self.model_executor = executor_class(vllm_config=vllm_config) ERROR 05-10 20:06:53 [engine.py:448] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ERROR 05-10 20:06:53 [engine.py:448] File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/executor/executor_base.py", line 286, in __init__ ERROR 05-10 20:06:53 [engine.py:448] super().__init__(*args, **kwargs) ERROR 05-10 20:06:53 [engine.py:448] File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/executor/executor_base.py", line 52, in __init__ ERROR 05-10 20:06:53 [engine.py:448] self._init_executor() ERROR 05-10 20:06:53 [engine.py:448] File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/executor/mp_distributed_executor.py", line 124, in _init_executor ERROR 05-10 20:06:53 [engine.py:448] self._run_workers("init_device") ERROR 05-10 20:06:53 [engine.py:448] File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/executor/mp_distributed_executor.py", line 185, in _run_workers ERROR 05-10 20:06:53 [engine.py:448] driver_worker_output = run_method(self.driver_worker, sent_method, ERROR 05-10 20:06:53 [engine.py:448] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ERROR 05-10 20:06:53 [engine.py:448] File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/utils.py", line 2456, in run_method ERROR 05-10 20:06:53 [engine.py:448] return func(*args, **kwargs) ERROR 05-10 20:06:53 [engine.py:448] ^^^^^^^^^^^^^^^^^^^^^ ERROR 05-10 20:06:53 [engine.py:448] File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/worker/worker_base.py", line 604, in init_device ERROR 05-10 20:06:53 [engine.py:448] self.worker.init_device() # type: ignore ERROR 05-10 20:06:53 [engine.py:448] ^^^^^^^^^^^^^^^^^^^^^^^^^ ERROR 05-10 20:06:53 [engine.py:448] File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/worker/worker.py", line 177, in init_device ERROR 05-10 20:06:53 [engine.py:448] _check_if_gpu_supports_dtype(self.model_config.dtype) ERROR 05-10 20:06:53 [engine.py:448] File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/worker/worker.py", line 546, in _check_if_gpu_supports_dtype ERROR 05-10 20:06:53 [engine.py:448] raise ValueError( ERROR 05-10 20:06:53 [engine.py:448] ValueError: Bfloat16 is only supported on GPUs with compute capability of at least 8.0. Your Quadro RTX 6000 GPU has compute capability 7.5. You can use float16 instead by explicitly setting the `dtype` flag in CLI, for example: --dtype=half. Process SpawnProcess-1: ERROR 05-10 20:06:53 [multiproc_worker_utils.py:120] Worker VllmWorkerProcess pid 73270 died, exit code: -15 INFO 05-10 20:06:53 [multiproc_worker_utils.py:124] Killing local vLLM worker processes Traceback (most recent call last): File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/engine/multiprocessing/engine.py", line 450, in run_mp_engine raise e File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/engine/multiprocessing/engine.py", line 436, in run_mp_engine engine = MQLLMEngine.from_vllm_config( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/engine/multiprocessing/engine.py", line 128, in from_vllm_config return cls( ^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/engine/multiprocessing/engine.py", line 82, in __init__ self.engine = LLMEngine(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/engine/llm_engine.py", line 275, in __init__ self.model_executor = executor_class(vllm_config=vllm_config) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/executor/executor_base.py", line 286, in __init__ super().__init__(*args, **kwargs) File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/executor/executor_base.py", line 52, in __init__ self._init_executor() File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/executor/mp_distributed_executor.py", line 124, in _init_executor self._run_workers("init_device") File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/executor/mp_distributed_executor.py", line 185, in _run_workers driver_worker_output = run_method(self.driver_worker, sent_method, ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/utils.py", line 2456, in run_method return func(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/worker/worker_base.py", line 604, in init_device self.worker.init_device() # type: ignore ^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/worker/worker.py", line 177, in init_device _check_if_gpu_supports_dtype(self.model_config.dtype) File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/worker/worker.py", line 546, in _check_if_gpu_supports_dtype raise ValueError( ValueError: Bfloat16 is only supported on GPUs with compute capability of at least 8.0. Your Quadro RTX 6000 GPU has compute capability 7.5. You can use float16 instead by explicitly setting the `dtype` flag in CLI, for example: --dtype=half. Traceback (most recent call last): File "/home/zhzx/miniconda3/envs/vllm/bin/vllm", line 8, in <module> sys.exit(main()) ^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/entrypoints/cli/main.py", line 53, in main args.dispatch_function(args) File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/entrypoints/cli/serve.py", line 27, in cmd uvloop.run(run_server(args)) File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/uvloop/__init__.py", line 109, in run return __asyncio.run( ^^^^^^^^^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/asyncio/runners.py", line 195, in run return runner.run(main) ^^^^^^^^^^^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/asyncio/runners.py", line 118, in run return self._loop.run_until_complete(task) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "uvloop/loop.pyx", line 1518, in uvloop.loop.Loop.run_until_complete File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/uvloop/__init__.py", line 61, in wrapper return await main ^^^^^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/entrypoints/openai/api_server.py", line 1078, in run_server async with build_async_engine_client(args) as engine_client: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/contextlib.py", line 210, in __aenter__ return await anext(self.gen) ^^^^^^^^^^^^^^^^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/entrypoints/openai/api_server.py", line 146, in build_async_engine_client async with build_async_engine_client_from_engine_args( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/contextlib.py", line 210, in __aenter__ return await anext(self.gen) ^^^^^^^^^^^^^^^^^^^^^ File "/home/zhzx/miniconda3/envs/vllm/lib/python3.12/site-packages/vllm/entrypoints/openai/api_server.py", line 269, in build_async_engine_client_from_engine_args raise RuntimeError( RuntimeError: Engine process failed to start. See stack trace for the root cause.
05-11
行 38: TOTAL PSS: 380745 TOTAL RSS: 525040 TOTAL SWAP PSS: 6445 行 116: TOTAL PSS: 385566 TOTAL RSS: 530528 TOTAL SWAP PSS: 6333 行 194: TOTAL PSS: 384866 TOTAL RSS: 529828 TOTAL SWAP PSS: 6333 行 272: TOTAL PSS: 379635 TOTAL RSS: 524524 TOTAL SWAP PSS: 6333 行 350: TOTAL PSS: 379963 TOTAL RSS: 524864 TOTAL SWAP PSS: 6333 行 428: TOTAL PSS: 379967 TOTAL RSS: 524868 TOTAL SWAP PSS: 6333 行 506: TOTAL PSS: 379979 TOTAL RSS: 524880 TOTAL SWAP PSS: 6333 行 584: TOTAL PSS: 379695 TOTAL RSS: 524596 TOTAL SWAP PSS: 6333 行 662: TOTAL PSS: 379979 TOTAL RSS: 524880 TOTAL SWAP PSS: 6333 行 740: TOTAL PSS: 380159 TOTAL RSS: 525060 TOTAL SWAP PSS: 6333 行 818: TOTAL PSS: 379793 TOTAL RSS: 524704 TOTAL SWAP PSS: 6329 行 896: TOTAL PSS: 401391 TOTAL RSS: 546428 TOTAL SWAP PSS: 6273 行 974: TOTAL PSS: 444334 TOTAL RSS: 589480 TOTAL SWAP PSS: 6582 行 1052: TOTAL PSS: 592880 TOTAL RSS: 738140 TOTAL SWAP PSS: 6575 行 1130: TOTAL PSS: 572522 TOTAL RSS: 717448 TOTAL SWAP PSS: 6576 行 1208: TOTAL PSS: 573222 TOTAL RSS: 718336 TOTAL SWAP PSS: 6576 行 1286: TOTAL PSS: 531091 TOTAL RSS: 676164 TOTAL SWAP PSS: 6569 行 1364: TOTAL PSS: 432810 TOTAL RSS: 578416 TOTAL SWAP PSS: 6541 行 1442: TOTAL PSS: 428806 TOTAL RSS: 574500 TOTAL SWAP PSS: 6541 行 1520: TOTAL PSS: 429137 TOTAL RSS: 574836 TOTAL SWAP PSS: 6541 行 1598: TOTAL PSS: 430034 TOTAL RSS: 575732 TOTAL SWAP PSS: 6541 行 1676: TOTAL PSS: 416230 TOTAL RSS: 561972 TOTAL SWAP PSS: 6541 行 1754: TOTAL PSS: 597570 TOTAL RSS: 743052 TOTAL SWAP PSS: 6785 行 1832: TOTAL PSS: 570616 TOTAL RSS: 716064 TOTAL SWAP PSS: 6821 行 1910: TOTAL PSS: 559864 TOTAL RSS: 705260 TOTAL SWAP PSS: 6877 行 1988: TOTAL PSS: 544930 TOTAL RSS: 690308 TOTAL SWAP PSS: 6881 行 2066: TOTAL PSS: 430216 TOTAL RSS: 575212 TOTAL SWAP PSS: 7237 行 2144: TOTAL PSS: 428545 TOTAL RSS: 573508 TOTAL SWAP PSS: 7237 行 2222: TOTAL PSS: 429625 TOTAL RSS: 574588 TOTAL SWAP PSS: 7237 行 2300: TOTAL PSS: 404765 TOTAL RSS: 549732 TOTAL SWAP PSS: 7233 行 2378: TOTAL PSS: 601453 TOTAL RSS: 746428 TOTAL SWAP PSS: 7233 行 2456: TOTAL PSS: 573463 TOTAL RSS: 718388 TOTAL SWAP PSS: 7233 行 2534: TOTAL PSS: 491783 TOTAL RSS: 636708 TOTAL SWAP PSS: 7233 行 2612: TOTAL PSS: 528087 TOTAL RSS: 673012 TOTAL SWAP PSS: 7233 行 2690: TOTAL PSS: 450711 TOTAL RSS: 595636 TOTAL SWAP PSS: 7233 行 2768: TOTAL PSS: 427275 TOTAL RSS: 572200 TOTAL SWAP PSS: 7233 行 2846: TOTAL PSS: 428477 TOTAL RSS: 573404 TOTAL SWAP PSS: 7233 行 2924: TOTAL PSS: 406445 TOTAL RSS: 551376 TOTAL SWAP PSS: 7233 行 3002: TOTAL PSS: 595358 TOTAL RSS: 740380 TOTAL SWAP PSS: 7229 行 3080: TOTAL PSS: 591042 TOTAL RSS: 735828 TOTAL SWAP PSS: 7229 行 3158: TOTAL PSS: 558354 TOTAL RSS: 703140 TOTAL SWAP PSS: 7229 行 3236: TOTAL PSS: 550334 TOTAL RSS: 695120 TOTAL SWAP PSS: 7229 行 3314: TOTAL PSS: 433162 TOTAL RSS: 577960 TOTAL SWAP PSS: 7229 行 3392: TOTAL PSS: 426214 TOTAL RSS: 571012 TOTAL SWAP PSS: 7229 行 3470: TOTAL PSS: 425837 TOTAL RSS: 570640 TOTAL SWAP PSS: 7229 行 3548: TOTAL PSS: 426606 TOTAL RSS: 571428 TOTAL SWAP PSS: 7229 行 3626: TOTAL PSS: 427386 TOTAL RSS: 572212 TOTAL SWAP PSS: 7225 行 3704: TOTAL PSS: 584527 TOTAL RSS: 729360 TOTAL SWAP PSS: 7225 行 3782: TOTAL PSS: 567891 TOTAL RSS: 712728 TOTAL SWAP PSS: 7225 行 3860: TOTAL PSS: 541691 TOTAL RSS: 686528 TOTAL SWAP PSS: 7225 行 3938: TOTAL PSS: 524559 TOTAL RSS: 669408 TOTAL SWAP PSS: 7213 行 4016: TOTAL PSS: 439059 TOTAL RSS: 583908 TOTAL SWAP PSS: 7213 行 4094: TOTAL PSS: 427519 TOTAL RSS: 572372 TOTAL SWAP PSS: 7213 行 4172: TOTAL PSS: 426703 TOTAL RSS: 571556 TOTAL SWAP PSS: 7213 行 4250: TOTAL PSS: 414291 TOTAL RSS: 559168 TOTAL SWAP PSS: 7209 行 4328: TOTAL PSS: 606939 TOTAL RSS: 751816 TOTAL SWAP PSS: 7209 行 4406: TOTAL PSS: 568007 TOTAL RSS: 712888 TOTAL SWAP PSS: 7209 行 4484: TOTAL PSS: 562215 TOTAL RSS: 707096 TOTAL SWAP PSS: 7209 行 4562: TOTAL PSS: 511863 TOTAL RSS: 656744 TOTAL SWAP PSS: 7209 行 4640: TOTAL PSS: 425495 TOTAL RSS: 570376 TOTAL SWAP PSS: 7209 行 4718: TOTAL PSS: 425821 TOTAL RSS: 570708 TOTAL SWAP PSS: 7209 行 4796: TOTAL PSS: 427163 TOTAL RSS: 572044 TOTAL SWAP PSS: 7209 行 4874: TOTAL PSS: 412801 TOTAL RSS: 557684 TOTAL SWAP PSS: 7209 行 4952: TOTAL PSS: 588027 TOTAL RSS: 732912 TOTAL SWAP PSS: 7209 行 5030: TOTAL PSS: 562699 TOTAL RSS: 707600 TOTAL SWAP PSS: 7209 行 5108: TOTAL PSS: 546114 TOTAL RSS: 690932 TOTAL SWAP PSS: 7209 行 5186: TOTAL PSS: 447036 TOTAL RSS: 591788 TOTAL SWAP PSS: 7209 行 5264: TOTAL PSS: 429028 TOTAL RSS: 573780 TOTAL SWAP PSS: 7209 行 5342: TOTAL PSS: 425522 TOTAL RSS: 570280 TOTAL SWAP PSS: 7209 行 5420: TOTAL PSS: 425736 TOTAL RSS: 570492 TOTAL SWAP PSS: 7209 行 5498: TOTAL PSS: 401312 TOTAL RSS: 546080 TOTAL SWAP PSS: 7209 行 5576: TOTAL PSS: 578358 TOTAL RSS: 723132 TOTAL SWAP PSS: 7209 行 5654: TOTAL PSS: 582376 TOTAL RSS: 727120 TOTAL SWAP PSS: 7209 行 5732: TOTAL PSS: 566374 TOTAL RSS: 710988 TOTAL SWAP PSS: 7209 行 5810: TOTAL PSS: 566633 TOTAL RSS: 711168 TOTAL SWAP PSS: 7209 行 5888: TOTAL PSS: 547889 TOTAL RSS: 692424 TOTAL SWAP PSS: 7209 行 5966: TOTAL PSS: 423305 TOTAL RSS: 567832 TOTAL SWAP PSS: 7209 行 6044: TOTAL PSS: 424261 TOTAL RSS: 568792 TOTAL SWAP PSS: 7209 行 6122: TOTAL PSS: 424961 TOTAL RSS: 569500 TOTAL SWAP PSS: 7209 行 6200: TOTAL PSS: 566245 TOTAL RSS: 710792 TOTAL SWAP PSS: 7209 行 6278: TOTAL PSS: 581956 TOTAL RSS: 726476 TOTAL SWAP PSS: 7229 行 6356: TOTAL PSS: 565372 TOTAL RSS: 709896 TOTAL SWAP PSS: 7229 行 6434: TOTAL PSS: 566412 TOTAL RSS: 710932 TOTAL SWAP PSS: 7229 行 6512: TOTAL PSS: 438668 TOTAL RSS: 583040 TOTAL SWAP PSS: 7233 行 6590: TOTAL PSS: 425284 TOTAL RSS: 569656 TOTAL SWAP PSS: 7233 行 6668: TOTAL PSS: 423220 TOTAL RSS: 567592 TOTAL SWAP PSS: 7233 行 6746: TOTAL PSS: 426196 TOTAL RSS: 570576 TOTAL SWAP PSS: 7233 行 6824: TOTAL PSS: 426100 TOTAL RSS: 570480 TOTAL SWAP PSS: 7233 行 6902: TOTAL PSS: 565816 TOTAL RSS: 710244 TOTAL SWAP PSS: 7233 行 6980: TOTAL PSS: 564120 TOTAL RSS: 708544 TOTAL SWAP PSS: 7233 行 7058: TOTAL PSS: 505405 TOTAL RSS: 649844 TOTAL SWAP PSS: 7233 行 7136: TOTAL PSS: 521049 TOTAL RSS: 665488 TOTAL SWAP PSS: 7233 行 7214: TOTAL PSS: 423597 TOTAL RSS: 568036 TOTAL SWAP PSS: 7233 行 7292: TOTAL PSS: 423245 TOTAL RSS: 567684 TOTAL SWAP PSS: 7233 行 7370: TOTAL PSS: 424505 TOTAL RSS: 568944 TOTAL SWAP PSS: 7233 行 7448: TOTAL PSS: 424588 TOTAL RSS: 569224 TOTAL SWAP PSS: 7233 行 7526: TOTAL PSS: 565292 TOTAL RSS: 709960 TOTAL SWAP PSS: 7225 行 7604: TOTAL PSS: 580866 TOTAL RSS: 725468 TOTAL SWAP PSS: 7225 行 7682: TOTAL PSS: 569402 TOTAL RSS: 714004 TOTAL SWAP PSS: 7225 行 7760: TOTAL PSS: 565422 TOTAL RSS: 710024 TOTAL SWAP PSS: 7225 行 7838: TOTAL PSS: 430070 TOTAL RSS: 574672 TOTAL SWAP PSS: 7225 行 7916: TOTAL PSS: 423686 TOTAL RSS: 568288 TOTAL SWAP PSS: 7225 行 7994: TOTAL PSS: 424854 TOTAL RSS: 569472 TOTAL SWAP PSS: 7225 行 8072: TOTAL PSS: 412298 TOTAL RSS: 556916 TOTAL SWAP PSS: 7225 行 8150: TOTAL PSS: 603890 TOTAL RSS: 748468 TOTAL SWAP PSS: 7265 行 8228: TOTAL PSS: 571846 TOTAL RSS: 716360 TOTAL SWAP PSS: 7329 行 8306: TOTAL PSS: 582310 TOTAL RSS: 726648 TOTAL SWAP PSS: 7505 行 8384: TOTAL PSS: 552970 TOTAL RSS: 696488 TOTAL SWAP PSS: 8325 行 8462: TOTAL PSS: 423270 TOTAL RSS: 566788 TOTAL SWAP PSS: 8325 行 8540: TOTAL PSS: 423622 TOTAL RSS: 567144 TOTAL SWAP PSS: 8321 行 8618: TOTAL PSS: 424830 TOTAL RSS: 568352 TOTAL SWAP PSS: 8321 行 8696: TOTAL PSS: 425010 TOTAL RSS: 562204 TOTAL SWAP PSS: 14653 行 8774: TOTAL PSS: 612474 TOTAL RSS: 749684 TOTAL SWAP PSS: 14641 行 8852: TOTAL PSS: 572902 TOTAL RSS: 710112 TOTAL SWAP PSS: 14641 行 8930: TOTAL PSS: 582922 TOTAL RSS: 720136 TOTAL SWAP PSS: 14637 行 9008: TOTAL PSS: 498722 TOTAL RSS: 635936 TOTAL SWAP PSS: 14637 行 9086: TOTAL PSS: 424690 TOTAL RSS: 561904 TOTAL SWAP PSS: 14637 行 9164: TOTAL PSS: 424969 TOTAL RSS: 562188 TOTAL SWAP PSS: 14637 行 9242: TOTAL PSS: 425530 TOTAL RSS: 562756 TOTAL SWAP PSS: 14637 行 9320: TOTAL PSS: 495158 TOTAL RSS: 632392 TOTAL SWAP PSS: 14637 行 9398: TOTAL PSS: 593542 TOTAL RSS: 730776 TOTAL SWAP PSS: 14637 行 9476: TOTAL PSS: 565278 TOTAL RSS: 702512 TOTAL SWAP PSS: 14637 行 9554: TOTAL PSS: 565565 TOTAL RSS: 702792 TOTAL SWAP PSS: 14637 行 9632: TOTAL PSS: 486953 TOTAL RSS: 624180 TOTAL SWAP PSS: 14637 行 9710: TOTAL PSS: 424261 TOTAL RSS: 561492 TOTAL SWAP PSS: 14637 行 9788: TOTAL PSS: 424147 TOTAL RSS: 561396 TOTAL SWAP PSS: 14637 行 9866: TOTAL PSS: 426619 TOTAL RSS: 563868 TOTAL SWAP PSS: 14637 行 9944: TOTAL PSS: 425587 TOTAL RSS: 562836 TOTAL SWAP PSS: 14637 行 10022: TOTAL PSS: 509115 TOTAL RSS: 646364 TOTAL SWAP PSS: 14637 行 10100: TOTAL PSS: 572951 TOTAL RSS: 706644 TOTAL SWAP PSS: 18173 行 10178: TOTAL PSS: 580371 TOTAL RSS: 714064 TOTAL SWAP PSS: 18173 行 10256: TOTAL PSS: 552987 TOTAL RSS: 686680 TOTAL SWAP PSS: 18173 行 10334: TOTAL PSS: 424847 TOTAL RSS: 558540 TOTAL SWAP PSS: 18173 行 10412: TOTAL PSS: 424911 TOTAL RSS: 558612 TOTAL SWAP PSS: 18173 行 10490: TOTAL PSS: 425271 TOTAL RSS: 558980 TOTAL SWAP PSS: 18173 行 10568: TOTAL PSS: 537681 TOTAL RSS: 671404 TOTAL SWAP PSS: 18173 行 10646: TOTAL PSS: 581167 TOTAL RSS: 714004 TOTAL SWAP PSS: 19053 行 10724: TOTAL PSS: 560147 TOTAL RSS: 692924 TOTAL SWAP PSS: 19113 行 10802: TOTAL PSS: 581430 TOTAL RSS: 714180 TOTAL SWAP PSS: 19113 行 10880: TOTAL PSS: 556402 TOTAL RSS: 689160 TOTAL SWAP PSS: 19113 行 10958: TOTAL PSS: 424214 TOTAL RSS: 556972 TOTAL SWAP PSS: 19113 行 11036: TOTAL PSS: 426822 TOTAL RSS: 559580 TOTAL SWAP PSS: 19113 行 11114: TOTAL PSS: 424006 TOTAL RSS: 556772 TOTAL SWAP PSS: 19109 行 11192: TOTAL PSS: 426090 TOTAL RSS: 558864 TOTAL SWAP PSS: 19109 行 11270: TOTAL PSS: 523958 TOTAL RSS: 656732 TOTAL SWAP PSS: 19109 行 11348: TOTAL PSS: 591822 TOTAL RSS: 724596 TOTAL SWAP PSS: 19109 行 11426: TOTAL PSS: 571292 TOTAL RSS: 704068 TOTAL SWAP PSS: 19113 行 11504: TOTAL PSS: 440174 TOTAL RSS: 572956 TOTAL SWAP PSS: 19113 行 11582: TOTAL PSS: 423258 TOTAL RSS: 556044 TOTAL SWAP PSS: 19113 行 11660: TOTAL PSS: 423578 TOTAL RSS: 555604 TOTAL SWAP PSS: 19873 行 11738: TOTAL PSS: 423821 TOTAL RSS: 555852 TOTAL SWAP PSS: 19873 行 11816: TOTAL PSS: 427074 TOTAL RSS: 557800 TOTAL SWAP PSS: 21209 行 11894: TOTAL PSS: 440002 TOTAL RSS: 570768 TOTAL SWAP PSS: 21209 行 11972: TOTAL PSS: 578534 TOTAL RSS: 709300 TOTAL SWAP PSS: 21209 行 12050: TOTAL PSS: 540494 TOTAL RSS: 671264 TOTAL SWAP PSS: 21209 行 12128: TOTAL PSS: 561962 TOTAL RSS: 692732 TOTAL SWAP PSS: 21209 行 12206: TOTAL PSS: 533022 TOTAL RSS: 663792 TOTAL SWAP PSS: 21209 行 12284: TOTAL PSS: 424098 TOTAL RSS: 554868 TOTAL SWAP PSS: 21209 行 12362: TOTAL PSS: 424277 TOTAL RSS: 555052 TOTAL SWAP PSS: 21209 行 12440: TOTAL PSS: 425426 TOTAL RSS: 556204 TOTAL SWAP PSS: 21209 行 12518: TOTAL PSS: 411874 TOTAL RSS: 542652 TOTAL SWAP PSS: 21209 行 12596: TOTAL PSS: 588938 TOTAL RSS: 719716 TOTAL SWAP PSS: 21209 行 12674: TOTAL PSS: 563710 TOTAL RSS: 694488 TOTAL SWAP PSS: 21209 行 12752: TOTAL PSS: 567430 TOTAL RSS: 698208 TOTAL SWAP PSS: 21209 行 12830: TOTAL PSS: 548950 TOTAL RSS: 679740 TOTAL SWAP PSS: 21209 行 12908: TOTAL PSS: 423122 TOTAL RSS: 553920 TOTAL SWAP PSS: 21209 行 12986: TOTAL PSS: 423517 TOTAL RSS: 554320 TOTAL SWAP PSS: 21209 行 13064: TOTAL PSS: 426874 TOTAL RSS: 557672 TOTAL SWAP PSS: 21209 行 13142: TOTAL PSS: 435942 TOTAL RSS: 566740 TOTAL SWAP PSS: 21209 行 13220: TOTAL PSS: 579878 TOTAL RSS: 710696 TOTAL SWAP PSS: 21189 行 13298: TOTAL PSS: 559866 TOTAL RSS: 690684 TOTAL SWAP PSS: 21189 行 13376: TOTAL PSS: 554202 TOTAL RSS: 685020 TOTAL SWAP PSS: 21189 行 13454: TOTAL PSS: 535782 TOTAL RSS: 666380 TOTAL SWAP PSS: 21409 行 13532: TOTAL PSS: 423637 TOTAL RSS: 554224 TOTAL SWAP PSS: 21409 行 13610: TOTAL PSS: 423904 TOTAL RSS: 554496 TOTAL SWAP PSS: 21409 行 13688: TOTAL PSS: 426865 TOTAL RSS: 557452 TOTAL SWAP PSS: 21409 行 13766: TOTAL PSS: 406601 TOTAL RSS: 537188 TOTAL SWAP PSS: 21409 行 13844: TOTAL PSS: 604124 TOTAL RSS: 734724 TOTAL SWAP PSS: 21409 行 13922: TOTAL PSS: 572456 TOTAL RSS: 703072 TOTAL SWAP PSS: 21401 行 14000: TOTAL PSS: 506950 TOTAL RSS: 637572 TOTAL SWAP PSS: 21401 行 14078: TOTAL PSS: 488002 TOTAL RSS: 618636 TOTAL SWAP PSS: 21401 行 14156: TOTAL PSS: 423878 TOTAL RSS: 554512 TOTAL SWAP PSS: 21401 行 14234: TOTAL PSS: 424058 TOTAL RSS: 554692 TOTAL SWAP PSS: 21401 行 14312: TOTAL PSS: 424938 TOTAL RSS: 555572 TOTAL SWAP PSS: 21401 行 14390: TOTAL PSS: 424842 TOTAL RSS: 555476 TOTAL SWAP PSS: 21401 行 14468: TOTAL PSS: 589334 TOTAL RSS: 719968 TOTAL SWAP PSS: 21401 行 14546: TOTAL PSS: 566752 TOTAL RSS: 696588 TOTAL SWAP PSS: 21401 行 14624: TOTAL PSS: 573892 TOTAL RSS: 703684 TOTAL SWAP PSS: 21401 行 14702: TOTAL PSS: 536701 TOTAL RSS: 666520 TOTAL SWAP PSS: 21401 行 14780: TOTAL PSS: 425329 TOTAL RSS: 555152 TOTAL SWAP PSS: 21401 行 14858: TOTAL PSS: 423481 TOTAL RSS: 553304 TOTAL SWAP PSS: 21401 行 14936: TOTAL PSS: 424885 TOTAL RSS: 554712 TOTAL SWAP PSS: 21401 行 15014: TOTAL PSS: 424833 TOTAL RSS: 554660 TOTAL SWAP PSS: 21401 行 15092: TOTAL PSS: 582233 TOTAL RSS: 712060 TOTAL SWAP PSS: 21401 行 15170: TOTAL PSS: 578761 TOTAL RSS: 708588 TOTAL SWAP PSS: 21401 行 15248: TOTAL PSS: 592057 TOTAL RSS: 721884 TOTAL SWAP PSS: 21401 行 15326: TOTAL PSS: 572105 TOTAL RSS: 701932 TOTAL SWAP PSS: 21401 行 15404: TOTAL PSS: 440239 TOTAL RSS: 570068 TOTAL SWAP PSS: 21401 行 15482: TOTAL PSS: 424005 TOTAL RSS: 553844 TOTAL SWAP PSS: 21397 行 15560: TOTAL PSS: 423556 TOTAL RSS: 553400 TOTAL SWAP PSS: 21397 行 15638: TOTAL PSS: 424833 TOTAL RSS: 554684 TOTAL SWAP PSS: 21397 行 15716: TOTAL PSS: 434261 TOTAL RSS: 564120 TOTAL SWAP PSS: 21397 行 15794: TOTAL PSS: 597283 TOTAL RSS: 727168 TOTAL SWAP PSS: 21397 行 15872: TOTAL PSS: 562862 TOTAL RSS: 692748 TOTAL SWAP PSS: 21397 行 15950: TOTAL PSS: 577106 TOTAL RSS: 706992 TOTAL SWAP PSS: 21397 行 16028: TOTAL PSS: 539140 TOTAL RSS: 669028 TOTAL SWAP PSS: 21397 行 16106: TOTAL PSS: 423640 TOTAL RSS: 553528 TOTAL SWAP PSS: 21397 行 16184: TOTAL PSS: 424188 TOTAL RSS: 554076 TOTAL SWAP PSS: 21397 行 16262: TOTAL PSS: 424936 TOTAL RSS: 554844 TOTAL SWAP PSS: 21397 行 16340: TOTAL PSS: 544240 TOTAL RSS: 674152 TOTAL SWAP PSS: 21397 行 16418: TOTAL PSS: 567420 TOTAL RSS: 697332 TOTAL SWAP PSS: 21397 行 16496: TOTAL PSS: 556004 TOTAL RSS: 685920 TOTAL SWAP PSS: 21393 行 16574: TOTAL PSS: 525611 TOTAL RSS: 655420 TOTAL SWAP PSS: 21393 行 16652: TOTAL PSS: 424858 TOTAL RSS: 554708 TOTAL SWAP PSS: 21393 行 16730: TOTAL PSS: 423765 TOTAL RSS: 553620 TOTAL SWAP PSS: 21393 行 16808: TOTAL PSS: 425122 TOTAL RSS: 554972 TOTAL SWAP PSS: 21393 行 16886: TOTAL PSS: 410658 TOTAL RSS: 540508 TOTAL SWAP PSS: 21393 行 16964: TOTAL PSS: 602310 TOTAL RSS: 732160 TOTAL SWAP PSS: 21393 行 17042: TOTAL PSS: 563938 TOTAL RSS: 693812 TOTAL SWAP PSS: 21373 行 17120: TOTAL PSS: 512426 TOTAL RSS: 642296 TOTAL SWAP PSS: 21373 行 17198: TOTAL PSS: 473313 TOTAL RSS: 603204 TOTAL SWAP PSS: 21373 行 17276: TOTAL PSS: 424689 TOTAL RSS: 554580 TOTAL SWAP PSS: 21373 行 17354: TOTAL PSS: 424733 TOTAL RSS: 554628 TOTAL SWAP PSS: 21373 行 17432: TOTAL PSS: 425261 TOTAL RSS: 555152 TOTAL SWAP PSS: 21373 行 17510: TOTAL PSS: 400663 TOTAL RSS: 530556 TOTAL SWAP PSS: 21373 行 17588: TOTAL PSS: 616587 TOTAL RSS: 746456 TOTAL SWAP PSS: 21373 行 17666: TOTAL PSS: 574535 TOTAL RSS: 704400 TOTAL SWAP PSS: 21373 行 17744: TOTAL PSS: 583207 TOTAL RSS: 713040 TOTAL SWAP PSS: 21373 行 17822: TOTAL PSS: 439799 TOTAL RSS: 569632 TOTAL SWAP PSS: 21373 行 17900: TOTAL PSS: 420789 TOTAL RSS: 550596 TOTAL SWAP PSS: 21373 行 17978: TOTAL PSS: 420689 TOTAL RSS: 550496 TOTAL SWAP PSS: 21373 行 18056: TOTAL PSS: 421917 TOTAL RSS: 551728 TOTAL SWAP PSS: 21373 行 18134: TOTAL PSS: 417624 TOTAL RSS: 547440 TOTAL SWAP PSS: 21373 行 18212: TOTAL PSS: 579032 TOTAL RSS: 708852 TOTAL SWAP PSS: 21373 行 18290: TOTAL PSS: 536748 TOTAL RSS: 666568 TOTAL SWAP PSS: 21373 行 18368: TOTAL PSS: 563040 TOTAL RSS: 692860 TOTAL SWAP PSS: 21373 行 18446: TOTAL PSS: 538388 TOTAL RSS: 668208 TOTAL SWAP PSS: 21373 行 18524: TOTAL PSS: 420400 TOTAL RSS: 550220 TOTAL SWAP PSS: 21373 行 18602: TOTAL PSS: 420111 TOTAL RSS: 549936 TOTAL SWAP PSS: 21373 行 18680: TOTAL PSS: 421504 TOTAL RSS: 551324 TOTAL SWAP PSS: 21373 行 18758: TOTAL PSS: 408448 TOTAL RSS: 538272 TOTAL SWAP PSS: 21373 行 18836: TOTAL PSS: 601292 TOTAL RSS: 731116 TOTAL SWAP PSS: 21373 行 18914: TOTAL PSS: 559080 TOTAL RSS: 688904 TOTAL SWAP PSS: 21373 行 18992: TOTAL PSS: 549480 TOTAL RSS: 679304 TOTAL SWAP PSS: 21373 行 19070: TOTAL PSS: 561664 TOTAL RSS: 691488 TOTAL SWAP PSS: 21373 行 19148: TOTAL PSS: 420932 TOTAL RSS: 550756 TOTAL SWAP PSS: 21373 行 19226: TOTAL PSS: 420811 TOTAL RSS: 550640 TOTAL SWAP PSS: 21373 行 19304: TOTAL PSS: 421784 TOTAL RSS: 551608 TOTAL SWAP PSS: 21373 行 19382: TOTAL PSS: 407722 TOTAL RSS: 537548 TOTAL SWAP PSS: 21373 行 19460: TOTAL PSS: 592240 TOTAL RSS: 722068 TOTAL SWAP PSS: 21373 行 19538: TOTAL PSS: 557584 TOTAL RSS: 687412 TOTAL SWAP PSS: 21373 行 19616: TOTAL PSS: 572834 TOTAL RSS: 702640 TOTAL SWAP PSS: 21373 行 19694: TOTAL PSS: 538648 TOTAL RSS: 668448 TOTAL SWAP PSS: 21373 行 19772: TOTAL PSS: 418751 TOTAL RSS: 548528 TOTAL SWAP PSS: 21373 行 19850: TOTAL PSS: 418950 TOTAL RSS: 548724 TOTAL SWAP PSS: 21373 行 19928: TOTAL PSS: 419679 TOTAL RSS: 549456 TOTAL SWAP PSS: 21373 行 20006: TOTAL PSS: 534951 TOTAL RSS: 664728 TOTAL SWAP PSS: 21373 行 20084: TOTAL PSS: 570727 TOTAL RSS: 700504 TOTAL SWAP PSS: 21373 行 20162: TOTAL PSS: 555623 TOTAL RSS: 685400 TOTAL SWAP PSS: 21373 行 20240: TOTAL PSS: 565943 TOTAL RSS: 695720 TOTAL SWAP PSS: 21373 行 20318: TOTAL PSS: 539491 TOTAL RSS: 669268 TOTAL SWAP PSS: 21373 行 20396: TOTAL PSS: 418239 TOTAL RSS: 548016 TOTAL SWAP PSS: 21373 行 20474: TOTAL PSS: 418703 TOTAL RSS: 548480 TOTAL SWAP PSS: 21373 行 20552: TOTAL PSS: 427855 TOTAL RSS: 557632 TOTAL SWAP PSS: 21373 行 20630: TOTAL PSS: 395069 TOTAL RSS: 524848 TOTAL SWAP PSS: 21373 行 20708: TOTAL PSS: 590695 TOTAL RSS: 720484 TOTAL SWAP PSS: 21373 行 20786: TOTAL PSS: 557095 TOTAL RSS: 686888 TOTAL SWAP PSS: 21369 行 20864: TOTAL PSS: 568735 TOTAL RSS: 698528 TOTAL SWAP PSS: 21369 行 20942: TOTAL PSS: 548699 TOTAL RSS: 678492 TOTAL SWAP PSS: 21369 行 21020: TOTAL PSS: 418319 TOTAL RSS: 548112 TOTAL SWAP PSS: 21369 行 21098: TOTAL PSS: 418399 TOTAL RSS: 548192 TOTAL SWAP PSS: 21369 行 21176: TOTAL PSS: 421675 TOTAL RSS: 551468 TOTAL SWAP PSS: 21369 行 21254: TOTAL PSS: 395075 TOTAL RSS: 524868 TOTAL SWAP PSS: 21369 行 21332: TOTAL PSS: 590191 TOTAL RSS: 719988 TOTAL SWAP PSS: 21369 行 21410: TOTAL PSS: 557262 TOTAL RSS: 686892 TOTAL SWAP PSS: 21369 行 21488: TOTAL PSS: 548610 TOTAL RSS: 678240 TOTAL SWAP PSS: 21369 行 21566: TOTAL PSS: 522626 TOTAL RSS: 652256 TOTAL SWAP PSS: 21369 行 21644: TOTAL PSS: 414590 TOTAL RSS: 544220 TOTAL SWAP PSS: 21369 行 21722: TOTAL PSS: 415197 TOTAL RSS: 544832 TOTAL SWAP PSS: 21369 行 21800: TOTAL PSS: 416008 TOTAL RSS: 545656 TOTAL SWAP PSS: 21369 行 21878: TOTAL PSS: 432890 TOTAL RSS: 562560 TOTAL SWAP PSS: 21369 行 21956: TOTAL PSS: 572598 TOTAL RSS: 702268 TOTAL SWAP PSS: 21369 行 22034: TOTAL PSS: 553922 TOTAL RSS: 683592 TOTAL SWAP PSS: 21369 行 22112: TOTAL PSS: 565182 TOTAL RSS: 694852 TOTAL SWAP PSS: 21369 行 22190: TOTAL PSS: 434618 TOTAL RSS: 564288 TOTAL SWAP PSS: 21369 行 22268: TOTAL PSS: 413694 TOTAL RSS: 543364 TOTAL SWAP PSS: 21369 行 22346: TOTAL PSS: 413910 TOTAL RSS: 543580 TOTAL SWAP PSS: 21369 行 22424: TOTAL PSS: 418064 TOTAL RSS: 547756 TOTAL SWAP PSS: 21369 行 22502: TOTAL PSS: 402898 TOTAL RSS: 532596 TOTAL SWAP PSS: 21369 行 22580: TOTAL PSS: 574242 TOTAL RSS: 703940 TOTAL SWAP PSS: 21369 行 22658: TOTAL PSS: 554602 TOTAL RSS: 684300 TOTAL SWAP PSS: 21369 行 22736: TOTAL PSS: 559358 TOTAL RSS: 689020 TOTAL SWAP PSS: 21369 行 22814: TOTAL PSS: 532733 TOTAL RSS: 662348 TOTAL SWAP PSS: 21369 行 22892: TOTAL PSS: 414053 TOTAL RSS: 543668 TOTAL SWAP PSS: 21369 行 22970: TOTAL PSS: 409928 TOTAL RSS: 539548 TOTAL SWAP PSS: 21369 行 23048: TOTAL PSS: 412537 TOTAL RSS: 542152 TOTAL SWAP PSS: 21369 行 23126: TOTAL PSS: 397677 TOTAL RSS: 527292 TOTAL SWAP PSS: 21369 行 23204: TOTAL PSS: 580717 TOTAL RSS: 710332 TOTAL SWAP PSS: 21369 行 23282: TOTAL PSS: 556953 TOTAL RSS: 686572 TOTAL SWAP PSS: 21369 行 23360: TOTAL PSS: 558689 TOTAL RSS: 688308 TOTAL SWAP PSS: 21369 行 23438: TOTAL PSS: 530273 TOTAL RSS: 659892 TOTAL SWAP PSS: 21369 行 23516: TOTAL PSS: 409525 TOTAL RSS: 539144 TOTAL SWAP PSS: 21369 行 23594: TOTAL PSS: 409441 TOTAL RSS: 539060 TOTAL SWAP PSS: 21369 行 23672: TOTAL PSS: 412529 TOTAL RSS: 542152 TOTAL SWAP PSS: 21369 行 23750: TOTAL PSS: 553061 TOTAL RSS: 682688 TOTAL SWAP PSS: 21369 行 23828: TOTAL PSS: 559757 TOTAL RSS: 689392 TOTAL SWAP PSS: 21361 行 23906: TOTAL PSS: 546325 TOTAL RSS: 675960 TOTAL SWAP PSS: 21361 行 23984: TOTAL PSS: 480789 TOTAL RSS: 610424 TOTAL SWAP PSS: 21361 行 24062: TOTAL PSS: 410469 TOTAL RSS: 540104 TOTAL SWAP PSS: 21361 行 24140: TOTAL PSS: 412780 TOTAL RSS: 542420 TOTAL SWAP PSS: 21361 行 24218: TOTAL PSS: 411493 TOTAL RSS: 541128 TOTAL SWAP PSS: 21361 行 24296: TOTAL PSS: 465382 TOTAL RSS: 595084 TOTAL SWAP PSS: 21361 行 24374: TOTAL PSS: 574614 TOTAL RSS: 704324 TOTAL SWAP PSS: 21361 行 24452: TOTAL PSS: 579682 TOTAL RSS: 709392 TOTAL SWAP PSS: 21361 行 24530: TOTAL PSS: 520490 TOTAL RSS: 650200 TOTAL SWAP PSS: 21361 行 24608: TOTAL PSS: 417478 TOTAL RSS: 547188 TOTAL SWAP PSS: 21361 行 24686: TOTAL PSS: 410522 TOTAL RSS: 540232 TOTAL SWAP PSS: 21361 行 24764: TOTAL PSS: 411522 TOTAL RSS: 541232 TOTAL SWAP PSS: 21361 行 24842: TOTAL PSS: 413371 TOTAL RSS: 543140 TOTAL SWAP PSS: 21361 行 24920: TOTAL PSS: 547259 TOTAL RSS: 677064 TOTAL SWAP PSS: 21361 行 24998: TOTAL PSS: 571055 TOTAL RSS: 700864 TOTAL SWAP PSS: 21361 行 25076: TOTAL PSS: 558007 TOTAL RSS: 687816 TOTAL SWAP PSS: 21361 行 25154: TOTAL PSS: 565117 TOTAL RSS: 694924 TOTAL SWAP PSS: 21361 行 25232: TOTAL PSS: 482753 TOTAL RSS: 612564 TOTAL SWAP PSS: 21361 行 25310: TOTAL PSS: 411813 TOTAL RSS: 541624 TOTAL SWAP PSS: 21361 行 25388: TOTAL PSS: 410336 TOTAL RSS: 540152 TOTAL SWAP PSS: 21361 行 25466: TOTAL PSS: 411269 TOTAL RSS: 541080 TOTAL SWAP PSS: 21361 行 25544: TOTAL PSS: 458701 TOTAL RSS: 588528 TOTAL SWAP PSS: 21361 行 25622: TOTAL PSS: 577394 TOTAL RSS: 707168 TOTAL SWAP PSS: 21361 行 25700: TOTAL PSS: 536686 TOTAL RSS: 666460 TOTAL SWAP PSS: 21361 行 25778: TOTAL PSS: 496970 TOTAL RSS: 626748 TOTAL SWAP PSS: 21361 行 25856: TOTAL PSS: 408402 TOTAL RSS: 538184 TOTAL SWAP PSS: 21361 行 25934: TOTAL PSS: 409126 TOTAL RSS: 538912 TOTAL SWAP PSS: 21361 行 26012: TOTAL PSS: 412386 TOTAL RSS: 542172 TOTAL SWAP PSS: 21361 行 26090: TOTAL PSS: 409398 TOTAL RSS: 539184 TOTAL SWAP PSS: 21361 行 26168: TOTAL PSS: 528483 TOTAL RSS: 658180 TOTAL SWAP PSS: 21361 行 26246: TOTAL PSS: 566011 TOTAL RSS: 695708 TOTAL SWAP PSS: 21361 行 26324: TOTAL PSS: 547559 TOTAL RSS: 677256 TOTAL SWAP PSS: 21361 行 26402: TOTAL PSS: 550691 TOTAL RSS: 680388 TOTAL SWAP PSS: 21361 行 26480: TOTAL PSS: 416735 TOTAL RSS: 546432 TOTAL SWAP PSS: 21361 行 26558: TOTAL PSS: 410143 TOTAL RSS: 539840 TOTAL SWAP PSS: 21361 行 26636: TOTAL PSS: 408522 TOTAL RSS: 538224 TOTAL SWAP PSS: 21361 行 26714: TOTAL PSS: 411035 TOTAL RSS: 540732 TOTAL SWAP PSS: 21361 行 26792: TOTAL PSS: 549459 TOTAL RSS: 679160 TOTAL SWAP PSS: 21361 行 26870: TOTAL PSS: 575847 TOTAL RSS: 705548 TOTAL SWAP PSS: 21361 行 26948: TOTAL PSS: 522050 TOTAL RSS: 651808 TOTAL SWAP PSS: 21361 行 27026: TOTAL PSS: 498346 TOTAL RSS: 628104 TOTAL SWAP PSS: 21361 行 27104: TOTAL PSS: 409202 TOTAL RSS: 538960 TOTAL SWAP PSS: 21361 行 27182: TOTAL PSS: 409556 TOTAL RSS: 539320 TOTAL SWAP PSS: 21361 行 27260: TOTAL PSS: 411093 TOTAL RSS: 540852 TOTAL SWAP PSS: 21361 行 27338: TOTAL PSS: 515979 TOTAL RSS: 645740 TOTAL SWAP PSS: 21361 行 27416: TOTAL PSS: 562437 TOTAL RSS: 692200 TOTAL SWAP PSS: 21361 行 27494: TOTAL PSS: 532437 TOTAL RSS: 662200 TOTAL SWAP PSS: 21361 行 27572: TOTAL PSS: 511965 TOTAL RSS: 641728 TOTAL SWAP PSS: 21361 行 27650: TOTAL PSS: 408505 TOTAL RSS: 538268 TOTAL SWAP PSS: 21361 行 27728: TOTAL PSS: 408605 TOTAL RSS: 538368 TOTAL SWAP PSS: 21361 行 27806: TOTAL PSS: 408796 TOTAL RSS: 538632 TOTAL SWAP PSS: 21313 行 27884: TOTAL PSS: 409845 TOTAL RSS: 539700 TOTAL SWAP PSS: 21313 行 27962: TOTAL PSS: 530009 TOTAL RSS: 659868 TOTAL SWAP PSS: 21309 行 28040: TOTAL PSS: 554617 TOTAL RSS: 684476 TOTAL SWAP PSS: 21309 行 28118: TOTAL PSS: 543789 TOTAL RSS: 673648 TOTAL SWAP PSS: 21309 行 28196: TOTAL PSS: 531901 TOTAL RSS: 661760 TOTAL SWAP PSS: 21309 行 28274: TOTAL PSS: 408045 TOTAL RSS: 537904 TOTAL SWAP PSS: 21309 行 28352: TOTAL PSS: 408221 TOTAL RSS: 538080 TOTAL SWAP PSS: 21309 行 28430: TOTAL PSS: 408096 TOTAL RSS: 537960 TOTAL SWAP PSS: 21309 行 28508: TOTAL PSS: 410873 TOTAL RSS: 540752 TOTAL SWAP PSS: 21309 行 28586: TOTAL PSS: 547525 TOTAL RSS: 677408 TOTAL SWAP PSS: 21309 行 28664: TOTAL PSS: 553017 TOTAL RSS: 682900 TOTAL SWAP PSS: 21309 行 28742: TOTAL PSS: 537125 TOTAL RSS: 667008 TOTAL SWAP PSS: 21309 行 28820: TOTAL PSS: 534595 TOTAL RSS: 664480 TOTAL SWAP PSS: 21309 行 28898: TOTAL PSS: 410317 TOTAL RSS: 540204 TOTAL SWAP PSS: 21309 行 28976: TOTAL PSS: 410469 TOTAL RSS: 540356 TOTAL SWAP PSS: 21309 行 29054: TOTAL PSS: 412740 TOTAL RSS: 542632 TOTAL SWAP PSS: 21309 行 29132: TOTAL PSS: 411409 TOTAL RSS: 541300 TOTAL SWAP PSS: 21309 行 29210: TOTAL PSS: 535868 TOTAL RSS: 665768 TOTAL SWAP PSS: 21309 行 29288: TOTAL PSS: 552660 TOTAL RSS: 682560 TOTAL SWAP PSS: 21309 行 29366: TOTAL PSS: 526152 TOTAL RSS: 656052 TOTAL SWAP PSS: 21309 行 29444: TOTAL PSS: 553027 TOTAL RSS: 682932 TOTAL SWAP PSS: 21309 行 29522: TOTAL PSS: 470923 TOTAL RSS: 600828 TOTAL SWAP PSS: 21309 行 29600: TOTAL PSS: 410203 TOTAL RSS: 540108 TOTAL SWAP PSS: 21309 行 29678: TOTAL PSS: 410202 TOTAL RSS: 540112 TOTAL SWAP PSS: 21309 行 29756: TOTAL PSS: 411547 TOTAL RSS: 541452 TOTAL SWAP PSS: 21309 行 29834: TOTAL PSS: 427027 TOTAL RSS: 556932 TOTAL SWAP PSS: 21309 行 29912: TOTAL PSS: 561247 TOTAL RSS: 691156 TOTAL SWAP PSS: 21309 行 29990: TOTAL PSS: 542771 TOTAL RSS: 672680 TOTAL SWAP PSS: 21309 行 30068: TOTAL PSS: 491719 TOTAL RSS: 621628 TOTAL SWAP PSS: 21309 行 30146: TOTAL PSS: 409403 TOTAL RSS: 539312 TOTAL SWAP PSS: 21309 行 30224: TOTAL PSS: 409575 TOTAL RSS: 539484 TOTAL SWAP PSS: 21309 行 30302: TOTAL PSS: 409550 TOTAL RSS: 539464 TOTAL SWAP PSS: 21309 行 30380: TOTAL PSS: 411315 TOTAL RSS: 541224 TOTAL SWAP PSS: 21309 行 30458: TOTAL PSS: 504955 TOTAL RSS: 634872 TOTAL SWAP PSS: 21309 行 30536: TOTAL PSS: 557687 TOTAL RSS: 687604 TOTAL SWAP PSS: 21309 行 30614: TOTAL PSS: 581076 TOTAL RSS: 710988 TOTAL SWAP PSS: 21309 行 30692: TOTAL PSS: 476800 TOTAL RSS: 606580 TOTAL SWAP PSS: 21309 行 30770: TOTAL PSS: 404968 TOTAL RSS: 534752 TOTAL SWAP PSS: 21309 行 30848: TOTAL PSS: 405420 TOTAL RSS: 535204 TOTAL SWAP PSS: 21309 行 30926: TOTAL PSS: 405772 TOTAL RSS: 535556 TOTAL SWAP PSS: 21309 行 31004: TOTAL PSS: 406346 TOTAL RSS: 536132 TOTAL SWAP PSS: 21309 行 31082: TOTAL PSS: 582196 TOTAL RSS: 711996 TOTAL SWAP PSS: 21309 行 31160: TOTAL PSS: 554452 TOTAL RSS: 684252 TOTAL SWAP PSS: 21309 行 31238: TOTAL PSS: 568036 TOTAL RSS: 697836 TOTAL SWAP PSS: 21309 行 31316: TOTAL PSS: 479940 TOTAL RSS: 609740 TOTAL SWAP PSS: 21309 行 31394: TOTAL PSS: 405400 TOTAL RSS: 535200 TOTAL SWAP PSS: 21309 行 31472: TOTAL PSS: 406060 TOTAL RSS: 535860 TOTAL SWAP PSS: 21309 行 31550: TOTAL PSS: 409161 TOTAL RSS: 538956 TOTAL SWAP PSS: 21310 行 31628: TOTAL PSS: 395761 TOTAL RSS: 525556 TOTAL SWAP PSS: 21310 行 31706: TOTAL PSS: 557425 TOTAL RSS: 687224 TOTAL SWAP PSS: 21310 行 31784: TOTAL PSS: 542553 TOTAL RSS: 672352 TOTAL SWAP PSS: 21310 行 31862: TOTAL PSS: 551865 TOTAL RSS: 681668 TOTAL SWAP PSS: 21310 行 31940: TOTAL PSS: 477885 TOTAL RSS: 607688 TOTAL SWAP PSS: 21310 行 32018: TOTAL PSS: 405772 TOTAL RSS: 535588 TOTAL SWAP PSS: 21310 行 32096: TOTAL PSS: 405912 TOTAL RSS: 535728 TOTAL SWAP PSS: 21310 行 32174: TOTAL PSS: 406040 TOTAL RSS: 535856 TOTAL SWAP PSS: 21310 行 32252: TOTAL PSS: 406136 TOTAL RSS: 535952 TOTAL SWAP PSS: 21310 行 32330: TOTAL PSS: 406156 TOTAL RSS: 535972 TOTAL SWAP PSS: 21310 行 32408: TOTAL PSS: 408352 TOTAL RSS: 538168 TOTAL SWAP PSS: 21310 行 32486: TOTAL PSS: 413028 TOTAL RSS: 542856 TOTAL SWAP PSS: 21298 行 32564: TOTAL PSS: 408184 TOTAL RSS: 538024 TOTAL SWAP PSS: 21294 行 32642: TOTAL PSS: 409424 TOTAL RSS: 539292 TOTAL SWAP PSS: 21194 行 32720: TOTAL PSS: 407340 TOTAL RSS: 537208 TOTAL SWAP PSS: 21194 行 32798: TOTAL PSS: 363293 TOTAL RSS: 493052 TOTAL SWAP PSS: 21051 行 32876: TOTAL PSS: 303022 TOTAL RSS: 429124 TOTAL SWAP PSS: 20625 行 32954: TOTAL PSS: 272966 TOTAL RSS: 399108 TOTAL SWAP PSS: 20585 行 33032: TOTAL PSS: 229286 TOTAL RSS: 356488 TOTAL SWAP PSS: 19525 行 33110: TOTAL PSS: 226830 TOTAL RSS: 354032 TOTAL SWAP PSS: 19525 行 33188: TOTAL PSS: 226186 TOTAL RSS: 353388 TOTAL SWAP PSS: 19525 行 33266: TOTAL PSS: 225074 TOTAL RSS: 352276 TOTAL SWAP PSS: 19525 行 33344: TOTAL PSS: 225074 TOTAL RSS: 352276 TOTAL SWAP PSS: 19525 行 33422: TOTAL PSS: 225074 TOTAL RSS: 352276 TOTAL SWAP PSS: 19525 行 33500: TOTAL PSS: 224350 TOTAL RSS: 351552 TOTAL SWAP PSS: 19525 使用这段代码分析这段数据并输出结果
最新发布
10-29
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值