JAVA拉取Hive的数据导入到MySQL中_java hive取数mysql存储

<?xml version="1.0" encoding="UTF-8" ?>
<mappers>
    <mapper resource="mapper/mysql-events.xml"></mapper>
    <mapper resource="mapper/hive-events.xml"></mapper>
</mappers>

hive-events.xml:



<?xml version="1.0" encoding="UTF-8" ?> select eventid,userid,starttime,city,states,zip,country,lat,lng,features from dwd_intes.tmp where flag=#{flag}

mysql-events.xml:



<?xml version="1.0" encoding="UTF-8" ?> insert into dm_events_bak1 values ( #{eve.eventid},#{eve.userid},#{eve.starttime},#{eve.city}, #{eve.states},#{eve.zip},#{eve.country},#{eve.lat},#{eve.lng},#{eve.features} )

3.JAVA代码



dao层:


HiveEventDAO



package org.dao;

import org.entry.Events;

import java.util.List;
import java.util.Map;

public interface HiveEventDAO {
public ListfindAll(int page);
}


MySQLEventDAO:



package org.dao;

import org.entry.Events;

import java.util.List;

public interface MySQLEventDAO {
public List findAll();
public void batchInsert(Listevs);
}



实体类entry层:


Events:



package org.entry;

public class Events {
private String eventid;
private String userid;
private String starttime;
private String city;
private String states;
private String zip;
private String country;
private String lat;
private String lng;
private String features;

public String getEventid() {
    return eventid;
}

public void setEventid(String eventid) {
    this.eventid = eventid;
}

public String getUserid() {
    return userid;
}

public void setUserid(String userid) {
    this.userid = userid;
}

public String getStarttime() {
    return starttime;
}

public void setStarttime(String starttime) {
    this.starttime = starttime;
}

public String getCity() {
    return city;
}

public void setCity(String city) {
    this.city = city;
}

public String getStates() {
    return states;
}

public void setStates(String states) {
    this.states = states;
}

public String getZip() {
    return zip;
}

public void setZip(String zip) {
    this.zip = zip;
}

public String getCountry() {
    return country;
}

public void setCountry(String country) {
    this.country = country;
}

public String getLat() {
    return lat;
}

public void setLat(String lat) {
    this.lat = lat;
}

public String getLng() {
    return lng;
}

public void setLng(String lng) {
    this.lng = lng;
}

public String getFeatures() {
    return features;
}

public void setFeatures(String features) {
    this.features = features;
}

}



数据源util层:


DruidDataSourceFactory:



package org.example;

import com.alibaba.druid.pool.DruidDataSource;
import org.apache.ibatis.datasource.DataSourceFactory;

import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.Properties;

public class DruidDataSourceFactory implements DataSourceFactory
{
private Properties prop;
@Override
public void setProperties(Properties properties) {
this.prop=properties;
}

@Override
public DataSource getDataSource() {
    DruidDataSource druid = new DruidDataSource();
    druid.setDriverClassName(this.prop.getProperty("driver"));
    druid.setUrl(this.prop.getProperty("url"));
    druid.setUsername(this.prop.getProperty("username"));
    druid.setPassword(this.prop.getProperty("password"));

// druid.setMaxActive(Integer.parseInt(this.prop.getProperty(“maxactive”)));
// druid.setInitialSize(Integer.parseInt(this.prop.getProperty(“initialsize”)));
try {
druid.init();
} catch (SQLException e) {
e.printStackTrace();
}
return druid;
}
}


DatabaseUtils:



package org.example;

import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;

import java.io.IOException;
import java.io.InputStream;

public class DatabaseUtils {
private static final String configPath=“mybatis-config.xml”;

public static SqlSession getSession(String db){
    SqlSession session = null;
    try {
        InputStream is = Resources.getResourceAsStream(configPath);
        SqlSessionFactory factory = new SqlSessionFactoryBuilder().build(is,db.equals("mysql")?"zjy":"zjy1");
        session = factory.openSession();
    } catch (IOException e) {
        e.printStackTrace();
    }

    return session;

}
public static void close(SqlSession session){
    session.close();
}

}



services层:



package org.services;

import org.apache.ibatis.session.SqlSession;
import org.dao.HiveEventDAO;
import org.dao.MySQLEventDAO;
import org.entry.Events;
import org.example.DatabaseUtils;
import java.util.List;

public class HiveToMySqlService {
private List change(int page){
//通过hiveeventdao查找hive中的数据
SqlSession hiveSession = DatabaseUtils.getSession(“hive”);
HiveEventDAO hivedao = hiveSession.getMapper(HiveEventDAO.class);

    List<Events> lst = hivedao.findAll(page);
    return lst;
}

public void fillMySql(){
    //准备mysql的数据库连接
    SqlSession session = DatabaseUtils.getSession("mysql");
    MySQLEventDAO medao = session.getMapper(MySQLEventDAO.class);

    //调用转换方法获取hive中的数据
    for (int i = 0; i <= 627; i++) {
        List<Events> eves = change(i);
        medao.batchInsert(eves);
        session.commit();
    }

}

}



运行层:



package org.example;

import org.services.HiveToMySqlService;

public class App
{
public static void main(String[] args)
{
HiveToMySqlService hts = new HiveToMySqlService();
hts.fillMySql();
}
}


由于service层的中是hive每次读取5000条数据,就往mysql里面塞5000条,而程序中hive读取数据的速度是远大于5000的,所以对这个类进行了优化



package org.services;

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值