第一 Caused by: java.lang.OutOfMemoryError: Java heap space
Diagnostic Messages for this Task:
Error: java.io.IOException: java.lang.reflect.InvocationTargetException
at org.apache.hadoop.hive.io.HiveIOExceptionHandlerChain.handleRecordReaderCreationException(HiveIOExceptionHandlerChain.java:97)
at org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil.handleRecordReaderCreationException(HiveIOExceptionHandlerUtil.java:57)
at org.apache.hadoop.hive.shims.HadoopShimsSecure
C
o
m
b
i
n
e
F
i
l
e
R
e
c
o
r
d
R
e
a
d
e
r
.
i
n
i
t
N
e
x
t
R
e
c
o
r
d
R
e
a
d
e
r
(
H
a
d
o
o
p
S
h
i
m
s
S
e
c
u
r
e
.
j
a
v
a
:
266
)
a
t
o
r
g
.
a
p
a
c
h
e
.
h
a
d
o
o
p
.
h
i
v
e
.
s
h
i
m
s
.
H
a
d
o
o
p
S
h
i
m
s
S
e
c
u
r
e
CombineFileRecordReader.initNextRecordReader(HadoopShimsSecure.java:266) at org.apache.hadoop.hive.shims.HadoopShimsSecure
CombineFileRecordReader.initNextRecordReader(HadoopShimsSecure.java:266)atorg.apache.hadoop.hive.shims.HadoopShimsSecureCombineFileRecordReader.(HadoopShimsSecure.java:213)
at org.apache.hadoop.hive.shims.HadoopShimsSecure
C
o
m
b
i
n
e
F
i
l
e
I
n
p
u
t
F
o
r
m
a
t
S
h
i
m
.
g
e
t
R
e
c
o
r
d
R
e
a
d
e
r
(
H
a
d
o
o
p
S
h
i
m
s
S
e
c
u
r
e
.
j
a
v
a
:
333
)
a
t
o
r
g
.
a
p
a
c
h
e
.
h
a
d
o
o
p
.
h
i
v
e
.
q
l
.
i
o
.
C
o
m
b
i
n
e
H
i
v
e
I
n
p
u
t
F
o
r
m
a
t
.
g
e
t
R
e
c
o
r
d
R
e
a
d
e
r
(
C
o
m
b
i
n
e
H
i
v
e
I
n
p
u
t
F
o
r
m
a
t
.
j
a
v
a
:
720
)
a
t
o
r
g
.
a
p
a
c
h
e
.
h
a
d
o
o
p
.
m
a
p
r
e
d
.
M
a
p
T
a
s
k
CombineFileInputFormatShim.getRecordReader(HadoopShimsSecure.java:333) at org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.getRecordReader(CombineHiveInputFormat.java:720) at org.apache.hadoop.mapred.MapTask
CombineFileInputFormatShim.getRecordReader(HadoopShimsSecure.java:333)atorg.apache.hadoop.hive.ql.io.CombineHiveInputFormat.getRecordReader(CombineHiveInputFormat.java:720)atorg.apache.hadoop.mapred.MapTaskTrackedRecordReader.(MapTask.java:169)
at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:429)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
at org.apache.hadoop.mapred.YarnChild
2.
r
u
n
(
Y
a
r
n
C
h
i
l
d
.
j
a
v
a
:
163
)
a
t
j
a
v
a
.
s
e
c
u
r
i
t
y
.
A
c
c
e
s
s
C
o
n
t
r
o
l
l
e
r
.
d
o
P
r
i
v
i
l
e
g
e
d
(
N
a
t
i
v
e
M
e
t
h
o
d
)
a
t
j
a
v
a
x
.
s
e
c
u
r
i
t
y
.
a
u
t
h
.
S
u
b
j
e
c
t
.
d
o
A
s
(
S
u
b
j
e
c
t
.
j
a
v
a
:
422
)
a
t
o
r
g
.
a
p
a
c
h
e
.
h
a
d
o
o
p
.
s
e
c
u
r
i
t
y
.
U
s
e
r
G
r
o
u
p
I
n
f
o
r
m
a
t
i
o
n
.
d
o
A
s
(
U
s
e
r
G
r
o
u
p
I
n
f
o
r
m
a
t
i
o
n
.
j
a
v
a
:
1656
)
a
t
o
r
g
.
a
p
a
c
h
e
.
h
a
d
o
o
p
.
m
a
p
r
e
d
.
Y
a
r
n
C
h
i
l
d
.
m
a
i
n
(
Y
a
r
n
C
h
i
l
d
.
j
a
v
a
:
158
)
C
a
u
s
e
d
b
y
:
j
a
v
a
.
l
a
n
g
.
r
e
f
l
e
c
t
.
I
n
v
o
c
a
t
i
o
n
T
a
r
g
e
t
E
x
c
e
p
t
i
o
n
a
t
s
u
n
.
r
e
f
l
e
c
t
.
N
a
t
i
v
e
C
o
n
s
t
r
u
c
t
o
r
A
c
c
e
s
s
o
r
I
m
p
l
.
n
e
w
I
n
s
t
a
n
c
e
0
(
N
a
t
i
v
e
M
e
t
h
o
d
)
a
t
s
u
n
.
r
e
f
l
e
c
t
.
N
a
t
i
v
e
C
o
n
s
t
r
u
c
t
o
r
A
c
c
e
s
s
o
r
I
m
p
l
.
n
e
w
I
n
s
t
a
n
c
e
(
N
a
t
i
v
e
C
o
n
s
t
r
u
c
t
o
r
A
c
c
e
s
s
o
r
I
m
p
l
.
j
a
v
a
:
62
)
a
t
s
u
n
.
r
e
f
l
e
c
t
.
D
e
l
e
g
a
t
i
n
g
C
o
n
s
t
r
u
c
t
o
r
A
c
c
e
s
s
o
r
I
m
p
l
.
n
e
w
I
n
s
t
a
n
c
e
(
D
e
l
e
g
a
t
i
n
g
C
o
n
s
t
r
u
c
t
o
r
A
c
c
e
s
s
o
r
I
m
p
l
.
j
a
v
a
:
45
)
a
t
j
a
v
a
.
l
a
n
g
.
r
e
f
l
e
c
t
.
C
o
n
s
t
r
u
c
t
o
r
.
n
e
w
I
n
s
t
a
n
c
e
(
C
o
n
s
t
r
u
c
t
o
r
.
j
a
v
a
:
423
)
a
t
o
r
g
.
a
p
a
c
h
e
.
h
a
d
o
o
p
.
h
i
v
e
.
s
h
i
m
s
.
H
a
d
o
o
p
S
h
i
m
s
S
e
c
u
r
e
2.run(YarnChild.java:163) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1656) at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158) Caused by: java.lang.reflect.InvocationTargetException at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.hadoop.hive.shims.HadoopShimsSecure
2.run(YarnChild.java:163)atjava.security.AccessController.doPrivileged(NativeMethod)atjavax.security.auth.Subject.doAs(Subject.java:422)atorg.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1656)atorg.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)Causedby:java.lang.reflect.InvocationTargetExceptionatsun.reflect.NativeConstructorAccessorImpl.newInstance0(NativeMethod)atsun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)atsun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)atjava.lang.reflect.Constructor.newInstance(Constructor.java:423)atorg.apache.hadoop.hive.shims.HadoopShimsSecureCombineFileRecordReader.initNextRecordReader(HadoopShimsSecure.java:252)
… 11 more
Caused by: java.lang.OutOfMemoryError: Java heap space
at parquet.hadoop.ParquetFileReader$ConsecutiveChunkList.readAll(ParquetFileReader.java:755)
at parquet.hadoop.ParquetFileReader.readNextRowGroup(ParquetFileReader.java:494)
at parquet.hadoop.InternalParquetRecordReader.checkRead(InternalParquetRecordReader.java:127)
at parquet.hadoop.InternalParquetRecordReader.nextKeyValue(InternalParquetRecordReader.java:208)
at parquet.hadoop.ParquetRecordReader.nextKeyValue(ParquetRecordReader.java:201)
at org.apache.hadoop.hive.ql.io.parquet.read.ParquetRecordReaderWrapper.(ParquetRecordReaderWrapper.java:122)
at org.apache.hadoop.hive.ql.io.parquet.read.ParquetRecordReaderWrapper.(ParquetRecordReaderWrapper.java:85)
at org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat.getRecordReader(MapredParquetInputFormat.java:72)
at org.apache.hadoop.hive.ql.io.CombineHiveRecordReader.(CombineHiveRecordReader.java:67)
解决办法
临时设置
set mapred.child.java.opts = -Xmx2000m;
永久设置
每台虚拟机的:hadoop/etc/hadoop/
在yarn_site.xml中添加:
在mapred-site.xml中添加:
mapred.child.java.opts -Xmx2024m