1.下载
wget -c http://archive.cloudera.com/cdh5/cdh/5/flume-ng-1.5.0-cdh5.3.3.tar.gz
2.解压
tar -zxvf flume-ng-1.5.0-cdh5.3.3.tar.gz -C /opt/modules/
3.配置
cd /opt/modules/flume-ng-1.5.0-cdh5.3.3/conf mv flume-env.sh.template flume-env.sh vi flume-env.sh export JAVA_HOME=/opt/modules/jdk1.7.0_67
4.flume场景应用(导入一个目录的文件到hdfs中)
cp flume-conf.properties.template flume.conf
a1.sources=r2
a1.channels=c2
a1.sinks=k2
a1.sources.r2.channels=c2
a1.sources.r2.type=spooldir
a1.sources.r2.spoolDir=/opt/datas/flume/spooldir
a1.sources.r2.fileSuffix=.done
a1.sources.r2.ignorePattern = \.done$
a1.channels.c2.type=memory
a1.channels.c2.capacity=10000
a1.sinks.k2.channel= c2
a1.sinks.k2.type=hdfs
a1.sinks.k2.hdfs.path=hdfs://hadoop-ehp.hyman.com:8020/user/ehp/flume/log/%Y%m%d
#文件块大小128M 一般设置成hdfs块的大小
a1.sinks.k2.hdfs.rollSize=128000000
a1.sinks.k2.hdfs.fileType=DataStream
a1.sinks.k2.hdfs.writeFormat=Text
a1.sinks.k2.hdfs.filePrefix=hyman-log
a1.sinks.k2.hdfs.useLocalTimeStamp=true
a1.sinks.k2.hdfs.minBlockReplicas=1
a1.sinks.k2.hdfs.rollInterval=0
a1.sinks.k2.hdfs.rollCount=0
#超过120秒生成新文件
a1.sinks.k2.hdfs.idleTimeout=120
$FLUME_HOME/bin/flume-ng agent --name a1 --conf conf --conf-file conf/flume.conf &