问题描述:
2015-7-16 10:17:26 org.apache.tomcat.util.net.NioEndpoint$Poller run
严重:
java.lang.OutOfMemoryError: Java heap space
at java.util.HashMap.newKeyIterator(HashMap.java:840)
at java.util.HashMap$KeySet.iterator(HashMap.java:874)
at java.util.HashSet.iterator(HashSet.java:153)
at java.util.Collections$UnmodifiableCollection$1.<init>(Collections.java:1007)
at java.util.Collections$UnmodifiableCollection.iterator(Collections.java:1006)
at org.apache.tomcat.util.net.NioEndpoint$Poller.timeout(NioEndpoint.java:1651)
at org.apache.tomcat.util.net.NioEndpoint$Poller.run(NioEndpoint.java:1512)
at java.lang.Thread.run(Thread.java:619)
2015-7-16 10:17:26 org.apache.tomcat.util.net.NioBlockingSelector$BlockPoller run
严重:
java.lang.OutOfMemoryError: Java heap space
at java.util.HashMap.newKeyIterator(HashMap.java:840)
at java.util.HashMap$KeySet.iterator(HashMap.java:874)
at java.util.HashSet.iterator(HashSet.java:153)
at sun.nio.ch.SelectorImpl.processDeregisterQueue(SelectorImpl.java:127)
at sun.nio.ch.WindowsSelectorImpl.doSelect(WindowsSelectorImpl.java:120)
at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:69)
at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:80)
at org.apache.tomcat.util.net.NioBlockingSelector$BlockPoller.run(NioBlockingSelector.java:305)
10:17:26.064 ERROR org.hibernate.search.backend.impl.lucene.PerDPQueueProcessor - Unexpected error in Lucene Backend:
java.lang.OutOfMemoryError: Java heap space
at java.util.HashMap.addEntry(HashMap.java:753) ~[na:1.6.0_10-rc2]
at java.util.HashMap.put(HashMap.java:385) ~[na:1.6.0_10-rc2]
at org.wltea.analyzer.dic.DictSegment.lookforSegment(DictSegment.java:242) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.dic.DictSegment.fillSegment(DictSegment.java:176) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.dic.DictSegment.fillSegment(DictSegment.java:180) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.dic.DictSegment.fillSegment(DictSegment.java:156) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.dic.Dictionary.loadMainDict(Dictionary.java:98) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.dic.Dictionary.<init>(Dictionary.java:72) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.dic.Dictionary.<clinit>(Dictionary.java:42) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.cfg.Configuration.loadSegmenter(Configuration.java:110) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.IKSegmentation.<init>(IKSegmentation.java:54) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.lucene.IKTokenizer.<init>(IKTokenizer.java:44) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.lucene.IKAnalyzer.tokenStream(IKAnalyzer.java:45) ~[IKAnalyzer-3.2.8.jar:na]
at org.hibernate.search.util.ScopedAnalyzer.tokenStream(ScopedAnalyzer.java:63) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at org.apache.lucene.analysis.Analyzer.reusableTokenStream(Analyzer.java:80) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocInverterPerField.processFields(DocInverterPerField.java:126) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocFieldProcessorPerThread.processDocument(DocFieldProcessorPerThread.java:278) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocumentsWriter.updateDocument(DocumentsWriter.java:766) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.IndexWriter.addDocument(IndexWriter.java:2066) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.hibernate.search.backend.impl.lucene.works.AddWorkDelegate.performWork(AddWorkDelegate.java:76) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at org.hibernate.search.backend.impl.lucene.PerDPQueueProcessor.run(PerDPQueueProcessor.java:106) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441) [na:1.6.0_10-rc2]
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303) [na:1.6.0_10-rc2]
at java.util.concurrent.FutureTask.run(FutureTask.java:138) [na:1.6.0_10-rc2]
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886) [na:1.6.0_10-rc2]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908) [na:1.6.0_10-rc2]
at java.lang.Thread.run(Thread.java:619) [na:1.6.0_10-rc2]
10:17:26.075 ERROR org.hibernate.search.exception.impl.LogErrorHandler - Exception occurred java.lang.OutOfMemoryError: Java heap space
Primary Failure:
Entity com.sencloud.entity.Product Id 4624 Work Type org.hibernate.search.backend.AddLuceneWork
java.lang.OutOfMemoryError: Java heap space
at java.util.HashMap.addEntry(HashMap.java:753) ~[na:1.6.0_10-rc2]
at java.util.HashMap.put(HashMap.java:385) ~[na:1.6.0_10-rc2]
at org.wltea.analyzer.dic.DictSegment.lookforSegment(DictSegment.java:242) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.dic.DictSegment.fillSegment(DictSegment.java:176) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.dic.DictSegment.fillSegment(DictSegment.java:180) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.dic.DictSegment.fillSegment(DictSegment.java:156) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.dic.Dictionary.loadMainDict(Dictionary.java:98) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.dic.Dictionary.<init>(Dictionary.java:72) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.dic.Dictionary.<clinit>(Dictionary.java:42) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.cfg.Configuration.loadSegmenter(Configuration.java:110) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.IKSegmentation.<init>(IKSegmentation.java:54) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.lucene.IKTokenizer.<init>(IKTokenizer.java:44) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.lucene.IKAnalyzer.tokenStream(IKAnalyzer.java:45) ~[IKAnalyzer-3.2.8.jar:na]
at org.hibernate.search.util.ScopedAnalyzer.tokenStream(ScopedAnalyzer.java:63) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at org.apache.lucene.analysis.Analyzer.reusableTokenStream(Analyzer.java:80) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocInverterPerField.processFields(DocInverterPerField.java:126) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocFieldProcessorPerThread.processDocument(DocFieldProcessorPerThread.java:278) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocumentsWriter.updateDocument(DocumentsWriter.java:766) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.IndexWriter.addDocument(IndexWriter.java:2066) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.hibernate.search.backend.impl.lucene.works.AddWorkDelegate.performWork(AddWorkDelegate.java:76) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at org.hibernate.search.backend.impl.lucene.PerDPQueueProcessor.run(PerDPQueueProcessor.java:106) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441) [na:1.6.0_10-rc2]
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303) [na:1.6.0_10-rc2]
at java.util.concurrent.FutureTask.run(FutureTask.java:138) [na:1.6.0_10-rc2]
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886) [na:1.6.0_10-rc2]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908) [na:1.6.0_10-rc2]
at java.lang.Thread.run(Thread.java:619) [na:1.6.0_10-rc2]
10:19:43.076 ERROR org.hibernate.search.backend.impl.lucene.PerDPQueueProcessor - Unexpected error in Lucene Backend:
java.lang.NoClassDefFoundError: Could not initialize class org.wltea.analyzer.dic.Dictionary
at org.wltea.analyzer.cfg.Configuration.loadSegmenter(Configuration.java:110) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.IKSegmentation.<init>(IKSegmentation.java:54) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.lucene.IKTokenizer.<init>(IKTokenizer.java:44) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.lucene.IKAnalyzer.tokenStream(IKAnalyzer.java:45) ~[IKAnalyzer-3.2.8.jar:na]
at org.hibernate.search.util.ScopedAnalyzer.tokenStream(ScopedAnalyzer.java:63) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at org.apache.lucene.analysis.Analyzer.reusableTokenStream(Analyzer.java:80) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocInverterPerField.processFields(DocInverterPerField.java:126) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocFieldProcessorPerThread.processDocument(DocFieldProcessorPerThread.java:278) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocumentsWriter.updateDocument(DocumentsWriter.java:766) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.IndexWriter.addDocument(IndexWriter.java:2066) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.hibernate.search.backend.impl.lucene.works.AddWorkDelegate.performWork(AddWorkDelegate.java:76) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at org.hibernate.search.backend.impl.lucene.PerDPQueueProcessor.run(PerDPQueueProcessor.java:106) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441) [na:1.6.0_10-rc2]
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303) [na:1.6.0_10-rc2]
at java.util.concurrent.FutureTask.run(FutureTask.java:138) [na:1.6.0_10-rc2]
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886) [na:1.6.0_10-rc2]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908) [na:1.6.0_10-rc2]
at java.lang.Thread.run(Thread.java:619) [na:1.6.0_10-rc2]
10:19:43.328 ERROR org.hibernate.search.exception.impl.LogErrorHandler - Exception occurred java.lang.NoClassDefFoundError: Could not initialize class org.wltea.analyzer.dic.Dictionary
Primary Failure:
Entity com.sencloud.entity.Product Id 4624 Work Type org.hibernate.search.backend.AddLuceneWork
java.lang.NoClassDefFoundError: Could not initialize class org.wltea.analyzer.dic.Dictionary
at org.wltea.analyzer.cfg.Configuration.loadSegmenter(Configuration.java:110) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.IKSegmentation.<init>(IKSegmentation.java:54) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.lucene.IKTokenizer.<init>(IKTokenizer.java:44) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.lucene.IKAnalyzer.tokenStream(IKAnalyzer.java:45) ~[IKAnalyzer-3.2.8.jar:na]
at org.hibernate.search.util.ScopedAnalyzer.tokenStream(ScopedAnalyzer.java:63) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at org.apache.lucene.analysis.Analyzer.reusableTokenStream(Analyzer.java:80) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocInverterPerField.processFields(DocInverterPerField.java:126) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocFieldProcessorPerThread.processDocument(DocFieldProcessorPerThread.java:278) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocumentsWriter.updateDocument(DocumentsWriter.java:766) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.IndexWriter.addDocument(IndexWriter.java:2066) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.hibernate.search.backend.impl.lucene.works.AddWorkDelegate.performWork(AddWorkDelegate.java:76) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at org.hibernate.search.backend.impl.lucene.PerDPQueueProcessor.run(PerDPQueueProcessor.java:106) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441) [na:1.6.0_10-rc2]
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303) [na:1.6.0_10-rc2]
at java.util.concurrent.FutureTask.run(FutureTask.java:138) [na:1.6.0_10-rc2]
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886) [na:1.6.0_10-rc2]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908) [na:1.6.0_10-rc2]
at java.lang.Thread.run(Thread.java:619) [na:1.6.0_10-rc2]
10:20:54.272 ERROR org.hibernate.search.backend.impl.lucene.PerDPQueueProcessor - Unexpected error in Lucene Backend:
java.lang.NoClassDefFoundError: Could not initialize class org.wltea.analyzer.dic.Dictionary
at org.wltea.analyzer.cfg.Configuration.loadSegmenter(Configuration.java:110) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.IKSegmentation.<init>(IKSegmentation.java:54) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.lucene.IKTokenizer.<init>(IKTokenizer.java:44) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.lucene.IKAnalyzer.tokenStream(IKAnalyzer.java:45) ~[IKAnalyzer-3.2.8.jar:na]
at org.hibernate.search.util.ScopedAnalyzer.tokenStream(ScopedAnalyzer.java:63) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at org.apache.lucene.analysis.Analyzer.reusableTokenStream(Analyzer.java:80) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocInverterPerField.processFields(DocInverterPerField.java:126) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocFieldProcessorPerThread.processDocument(DocFieldProcessorPerThread.java:278) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocumentsWriter.updateDocument(DocumentsWriter.java:766) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.IndexWriter.addDocument(IndexWriter.java:2066) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.hibernate.search.backend.impl.lucene.works.AddWorkDelegate.performWork(AddWorkDelegate.java:76) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at org.hibernate.search.backend.impl.lucene.PerDPQueueProcessor.run(PerDPQueueProcessor.java:106) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441) [na:1.6.0_10-rc2]
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303) [na:1.6.0_10-rc2]
at java.util.concurrent.FutureTask.run(FutureTask.java:138) [na:1.6.0_10-rc2]
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886) [na:1.6.0_10-rc2]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908) [na:1.6.0_10-rc2]
at java.lang.Thread.run(Thread.java:619) [na:1.6.0_10-rc2]
10:20:54.280 ERROR org.hibernate.search.exception.impl.LogErrorHandler - Exception occurred java.lang.NoClassDefFoundError: Could not initialize class org.wltea.analyzer.dic.Dictionary
Primary Failure:
Entity com.sencloud.entity.Product Id 4624 Work Type org.hibernate.search.backend.AddLuceneWork
java.lang.NoClassDefFoundError: Could not initialize class org.wltea.analyzer.dic.Dictionary
at org.wltea.analyzer.cfg.Configuration.loadSegmenter(Configuration.java:110) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.IKSegmentation.<init>(IKSegmentation.java:54) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.lucene.IKTokenizer.<init>(IKTokenizer.java:44) ~[IKAnalyzer-3.2.8.jar:na]
at org.wltea.analyzer.lucene.IKAnalyzer.tokenStream(IKAnalyzer.java:45) ~[IKAnalyzer-3.2.8.jar:na]
at org.hibernate.search.util.ScopedAnalyzer.tokenStream(ScopedAnalyzer.java:63) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at org.apache.lucene.analysis.Analyzer.reusableTokenStream(Analyzer.java:80) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocInverterPerField.processFields(DocInverterPerField.java:126) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocFieldProcessorPerThread.processDocument(DocFieldProcessorPerThread.java:278) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.DocumentsWriter.updateDocument(DocumentsWriter.java:766) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.apache.lucene.index.IndexWriter.addDocument(IndexWriter.java:2066) ~[lucene-3.5.0.jar:3.5.0 1204988 - simon - 2011-11-22 14:46:51]
at org.hibernate.search.backend.impl.lucene.works.AddWorkDelegate.performWork(AddWorkDelegate.java:76) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at org.hibernate.search.backend.impl.lucene.PerDPQueueProcessor.run(PerDPQueueProcessor.java:106) ~[hibernate-search-3.4.2.jar:3.4.2.Final]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441) [na:1.6.0_10-rc2]
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303) [na:1.6.0_10-rc2]
at java.util.concurrent.FutureTask.run(FutureTask.java:138) [na:1.6.0_10-rc2]
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886) [na:1.6.0_10-rc2]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908) [na:1.6.0_10-rc2]
at java.lang.Thread.run(Thread.java:619) [na:1.6.0_10-rc2]
解决方案:
1 查看内存:
workspace/.metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.ui.prefs,在里面加上SHOW_MEMORY_MONITOR=true,重启eclipse,就可以在其左下角看到内存使用情况。
2 修改内存:
方法一:
找到eclipse/eclipse.ini,打开,在其上加上
-vmargs
-Xms128m
-Xmx512m
-XX:permSize=64M
-XX:MaxPermSize=128M
方法二:
打开eclipse,选择Window--Preferences--Java--Installed JREs,在右边选择前面有对勾的JRE,单击Edit,出现一个EditJRE的对话框,在其中的Default VM Arguments框中输入-Xms128m -Xmx512m。
这样设置java虚拟机内存使用最小是128M,最大是512M。