HDFS MapReduce
发现使用docker 也能很方便的解决虚拟机的问题
1.
启动集群 namenode 、 datanode
端口:50070
2.
启动Yarn 运行 MapReduce
resourcemanger
nodemanger
yarn 端口:8088
3.
历史服务器 内部端口:10020
web端口: 19888
historyserver
4.
启动hdfs 的服务
core-site.xml
hdfs-site.xml
yarn 的服务
yarn-site.xml
日志的服务
mapred.site.xml
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logfile=org.apache.log4j.FileAppender
log4j.appender.logfile.File=target/spring.log
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n
JAVA_HOME=/Library/Java/JavaVirtualMachines/jdk1.8.0_40.jdk/Contents/Home
CLASSPAHT=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
PATH=$JAVA_HOME/bin:$PATH:
export JAVA_HOME
export CLASSPATH
export PATH
#MAVEN_HOME
export MAVEN_HOME=/usr/local/apache-maven-3.3.9
export PATH=$PATH:$MAVEN_HOME/bin
#java path
export JAVA_HOME=/Library/Java/JavaVirtualMachines/jdk1.8.0_40.jdk/Contents/Home
# CLASSPAHT=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
export MAVEN_HOME=/usr/local/apache-maven-3.3.9
export PATH=$PATH:$MAVEN_HOME/bin
#export CLASSPATH
docker load --input base_os.tar
docker run -d -p 5000:22 -p50070:50070 -p 8088:8088 -p 19888:19888 --name hadoop101 --privileged=true myimage/base-os:latest /usr/sbin/init