// sqoop 从mysql导入到hdfs
./sqoop import --connect jdbc:mysql://localhost:3306/1704e \ #sql连接
–username root \ #用户
–password 123456 \ #密码
–table tb_jk \ #表
–target-dir /sqooptest \ #储存hdfs的路径
–fields-terminated-by ‘,’ \ #分割符号
(主键可略去)
–split-by id \ #mapper按照那个字段进行划分片
–m 2 #启动俩个mapper
// sqoop 从mysql导入到hive
bin/sqoop import --connect jdbc:mysql://localhost:3306/1704e
–username root
–password 123456
–table tb_jk
–hive-import
–split-by id
按条件从mysql导入到hdfs
方法一
bin/sqoop import
–connect jdbc:mysql://localhost:3306/1704e
–username root
–password 123456
–table tb_jk
–where "id>10 or name=‘alice’ "
–target-dir /Alice
–fields-terminated-by ‘,’
–split-by id
第二种方法
bin/sqoop import
–connect jdbc:mysql://localhost:3306/1704e
–username root
–password 123456
–query ‘select id,name from tb_jk where id>10 and `name =“Alice” and $CONDITIONS’
–target-dir /Alice02
–fields-terminated-by ‘,’
–m 1
bin/sqoop export
–connect jdbc:mysql://localhost:3306/1704e
–username root
–password 123456
–table t_emp
–input-fields-terminated-by ‘,’
–export-dir /user/hive/warehouse/t_emp \