1、首先通过tail -f catalina.out指令进行查看Tomcat运行日志
org.apache.catalina.connector.ClientAbortException: java.io.IOException: Connection reset by peer
at org.apache.catalina.connector.OutputBuffer.realWriteBytes(OutputBuffer.java:356)
at org.apache.catalina.connector.OutputBuffer.flushByteBuffer(OutputBuffer.java:825)
at org.apache.catalina.connector.OutputBuffer.append(OutputBuffer.java:730)
at org.apache.catalina.connector.OutputBuffer.writeBytes(OutputBuffer.java:391)
at org.apache.catalina.connector.OutputBuffer.write(OutputBuffer.java:369)
at org.apache.catalina.connector.CoyoteOutputStream.write(CoyoteOutputStream.java:96)
at com.fasterxml.jackson.core.json.UTF8JsonGenerator._flushBuffer(UTF8JsonGenerator.java:1846)
at com.fasterxml.jackson.core.json.UTF8JsonGenerator._writeStringSegment2(UTF8JsonGenerator.java:1264)
at com.fasterxml.jackson.core.json.UTF8JsonGenerator._writeStringSegment(UTF8JsonGenerator.java:1247)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:661)
at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:846)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:742)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:231)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
at org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:52)
at org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:87)
at org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:342)
at org.apache.coyote.http11.Http11Processor.service(Http11Processor.java:800)
at org.apache.coyote.AbstractProcessorLight.process(AbstractProcessorLight.java:66)
at org.apache.coyote.AbstractProtocol$ConnectionHandler.process(AbstractProtocol.java:800)
at org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.doRun(NioEndpoint.java:1471)
at org.apache.tomcat.util.net.SocketProcessorBase.run(SocketProcessorBase.java:49)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.IOException: Connection reset by peer
at sun.nio.ch.FileDispatcherImpl.write0(Native Method)
at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:47)
at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:93)
at sun.nio.ch.IOUtil.write(IOUtil.java:65)
at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:471)
at org.apache.tomcat.util.net.NioChannel.write(NioChannel.java:134)
at org.apache.tomcat.util.net.NioBlockingSelector.write(NioBlockingSelector.java:101)
at org.apache.tomcat.util.net.NioSelectorPool.write(NioSelectorPool.java:157)
at org.apache.tomcat.util.net.NioEndpoint$NioSocketWrapper.doWrite(NioEndpoint.java:1279)
at org.apache.tomcat.util.net.SocketWrapperBase.doWrite(SocketWrapperBase.java:670)
at org.apache.tomcat.util.net.SocketWrapperBase.writeBlocking(SocketWrapperBase.java:450)
at org.apache.tomcat.util.net.SocketWrapperBase.write(SocketWrapperBase.java:388)
at org.apache.coyote.http11.Http11OutputBuffer$SocketOutputBuffer.doWrite(Http11OutputBuffer.java:623)
at org.apache.coyote.http11.filters.IdentityOutputFilter.doWrite(IdentityOutputFilter.java:127)
at org.apache.coyote.http11.Http11OutputBuffer.doWrite(Http11OutputBuffer.java:225)
at org.apache.coyote.Response.doWrite(Response.java:541)
at org.apache.catalina.connector.OutputBuffer.realWriteBytes(OutputBuffer.java:351)
... 76 more
org.apache.catalina.connector.ClientAbortException: java.io.IOException: Broken pipe
at org.apache.catalina.connector.OutputBuffer.realWriteBytes(OutputBuffer.java:356)
at org.apache.catalina.connector.OutputBuffer.flushByteBuffer(OutputBuffer.java:825)
at org.apache.catalina.connector.OutputBuffer.append(OutputBuffer.java:730)
at org.apache.catalina.connector.OutputBuffer.writeBytes(OutputBuffer.java:391)
at org.apache.catalina.connector.OutputBuffer.write(OutputBuffer.java:369)
at org.apache.catalina.connector.CoyoteOutputStream.write(CoyoteOutputStream.java:96)
at com.fasterxml.jackson.core.json.UTF8JsonGenerator._flushBuffer(UTF8JsonGenerator.java:1846)
at com.fasterxml.jackson.core.json.UTF8JsonGenerator.flush(UTF8JsonGenerator.java:1039)
at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:846)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:742)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:231)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
at org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:52)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.IOException: Broken pipe
at sun.nio.ch.FileDispatcherImpl.write0(Native Method)
at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:47)
at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:93)
at sun.nio.ch.IOUtil.write(IOUtil.java:65)
at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:471)
at org.apache.tomcat.util.net.NioChannel.write(NioChannel.java:134)
at org.apache.tomcat.util.net.NioBlockingSelector.write(NioBlockingSelector.java:101)
at org.apache.tomcat.util.net.NioSelectorPool.write(NioSelectorPool.java:157)
at org.apache.tomcat.util.net.NioEndpoint$NioSocketWrapper.doWrite(NioEndpoint.java:1279)
at org.apache.tomcat.util.net.SocketWrapperBase.doWrite(SocketWrapperBase.java:670)
at org.apache.tomcat.util.net.SocketWrapperBase.writeBlocking(SocketWrapperBase.java:450)
从日志中可以知道当服务在处理请求大数据业务接口时,由于网络会通过I/O操作将业务数据发送给客户端,当I/O流读写的操作数据量过大时,Tomcat服务器会主动断开连接,并且报出客户端与服务端连接的管道通道爆裂的异常错误
可以使用netstat指令查看服务器当前网络通信情况
[root@sdfassd logs]# netstat -n | awk '/^tcp/ {++state[$NF]} END {for(key in state) print key,"\t",state[key]}'
CLOSE_WAIT 3853
ESTABLISHED 285
TIME_WAIT 40
CLOSE_WAIT表示等待关闭的通信数量,当参数值过大时存在多余的网络资源开销,这是由于操作系统对于网络通讯有一个默认的链接参数设定,当一个通讯来请求服务器建立通道的时候,操作系统会给这次链接设定一个默认的链接时间
通过sysctl指令可以查询操作系统的一些状态
[root@sdfassd logs]# sysctl -a |grep keepalive
net.ipv4.tcp_keepalive_time = 7200
net.ipv4.tcp_keepalive_probes = 9
net.ipv4.tcp_keepalive_intvl = 75
由于请求tomcat处理的时候,大量数据的读写操作导致耗时过程,客户端与tomcat(基于CXF框架)的管道爆裂
根据打印日志,从数据库查询到生成json数据返回过程都正常,但是在CXF框架回写给客户端的过程就报错,开始以为是tomcat链接超时时间导致(默认超时时间2000毫秒),
处理方案:
一、优化数据查询量:
因此加大了超时时间为1分钟,没有任何效果。故又重新检查代码,猜测是否是批量查询的时候耗时太长导致,将对应表的字段 where条件里的字段加索引。
二、优化tomcat配置:修改tomcat的,server.xml配置文件
另外,将tomcat默认启动方式(bio方式-性能比较低)改为nio方式,提高处理请求的性能。
默认protocol 为 HTTP/1.1, 改为 org.apache.coyote.http11.Http11NioProtocol
<Connector port="8760" protocol="HTTP/1.1"
connectionTimeout="20000"
redirectPort="8443" />
tomcat有三种启动方式: bio、nio、ajp. 性能bio< nio< apr.
参考资料:
https://blog.youkuaiyun.com/u014721131/article/details/78903004
https://blog.youkuaiyun.com/zqz_zqz/article/details/52235479