springboot集成hive,使用druid连接池

本文介绍如何通过Beeline连接Hive服务器,并演示了基本的Hive SQL操作。此外,还详细介绍了如何在Spring Boot项目中配置Hive数据源,包括Maven依赖设置、配置文件参数说明及服务类实现。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

  • hive基础知识

#后台启动
>bin/hiveserver2 &
#进入beeline
>bin/beeline
#连接ip是部署hive的服务器,端口默认 10000;可在conf/hive-site.xml修改hive.server2.thrift.port属性值 
!connect jdbc:hive2://ip:端口
>输入用户名 默认空
>输入密码 默认空
#连接成功后,即可看到Beeline version 2.3.4 by Apache Hive当前版本。于引入的jar相对应
#查看数据库
>show databases;
#使用default库
>use default;
#即可进行显示表,查询等
>show tables;
>select * from 表名 limit 1,10;

 

  • pom添加hive-jdbc
 <!-- 添加hive依赖 -->
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-jdbc</artifactId>
            <version>2.3.4</version>
            <exclusions>
                <exclusion>
                    <groupId>org.eclipse.jetty.aggregate</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.hive</groupId>
                    <artifactId>hive-shims</artifactId>
                </exclusion>
                <exclusion>
                    <artifactId>jasper-compiler</artifactId>
                    <groupId>tomcat</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>jasper-runtime</artifactId>
                    <groupId>tomcat</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>servlet-api</artifactId>
                    <groupId>javax.servlet</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>log4j-slf4j-impl</artifactId>
                    <groupId>org.apache.logging.log4j</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>slf4j-log4j12</artifactId>
                    <groupId>org.slf4j</groupId>
                </exclusion>
                <exclusion>
                    <groupId>tomcat</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>ch.qos.logback</groupId>
                    <artifactId>logback-classic</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.eclipse.jetty.orbit</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.eclipse.jetty.aggregate</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>javax.servlet</groupId>
                    <artifactId>servlet-api</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.mortbay.jetty</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>jdk.tools</groupId>
            <artifactId>jdk.tools</artifactId>
            <version>1.8</version>
            <scope>system</scope>
            <systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
        </dependency>
    </dependencies>
  • 配置文件
#hive数据库
spring.datasource.druid.hive.name=hive
spring.datasource.druid.hive.url=jdbc:hive2://192.168.1.12:10000/default
spring.datasource.druid.hive.type=com.alibaba.druid.pool.DruidDataSource
spring.datasource.druid.hive.username=
spring.datasource.druid.hive.password=
spring.datasource.druid.hive.driver-class-name=org.apache.hive.jdbc.HiveDriver
spring.datasource.druid.hive.initialSize=3
spring.datasource.druid.hive.minIdle=1
spring.datasource.druid.hive.maxActive=20
spring.datasource.druid.hive.maxWait=60000
spring.datasource.druid.hive.timeBetweenEvictionRunsMillis=60000
spring.datasource.druid.hive.minEvictableIdleTimeMillis=30000
spring.datasource.druid.hive.validationQuery=select 1
spring.datasource.druid.hive.testWhileIdle=true
spring.datasource.druid.hive.testOnBorrow=false
spring.datasource.druid.hive.testOnReturn=false
spring.datasource.druid.hive.poolPreparedStatements=true
spring.datasource.druid.hive.maxOpenPreparedStatement=20
#特别注意,filters不能有wall,要不然报错:Failed to obtain JDBC Connection: dbType not support
spring.datasource.druid.hive.filters=stat
  • springboot添加数据源

@Configuration
@ConfigurationProperties(prefix = "spring.datasource.druid.hive")
@Data
public class HiveJdbcConfiguration {
    private static Logger logger = LoggerFactory.getLogger(HiveJdbcConfiguration.class);
    private String url;
    private String user;
    private String password;
    private String driverClassName;
    private int initialSize;
    private int minIdle;
    private int maxActive;
    private int maxWait;
    private int timeBetweenEvictionRunsMillis;
    private int minEvictableIdleTimeMillis;
    private String validationQuery;
    private boolean testWhileIdle;
    private boolean testOnBorrow;
    private boolean testOnReturn;
    private boolean poolPreparedStatements;
    private int maxOpenPreparedStatement;
    private String filters;

    /**
     * hive数据库源配置
     *
     * @return hive数据库源
     */
    @Bean(name = "hiveDataSource")
    @Qualifier("hiveDataSource")
    public DataSource hiveDataSource() {
        DruidDataSource datasource = new DruidDataSource();
        datasource.setUrl(url);
        datasource.setUsername(user);
        datasource.setPassword(password);
        datasource.setDriverClassName(driverClassName);

        // pool configuration
        datasource.setInitialSize(initialSize);
        datasource.setMinIdle(minIdle);
        datasource.setMaxActive(maxActive);
        datasource.setMaxWait(maxWait);
        datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
        datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
        datasource.setValidationQuery(validationQuery);
        datasource.setTestWhileIdle(testWhileIdle);
        datasource.setTestOnBorrow(testOnBorrow);
        datasource.setTestOnReturn(testOnReturn);
        datasource.setPoolPreparedStatements(poolPreparedStatements);
        datasource.setMaxOpenPreparedStatements(maxOpenPreparedStatement);
        try {
            datasource.setFilters(filters);
        } catch (SQLException e) {
            logger.error("Druid configuration initialization filter error.", e);
        }
        return datasource;
    }

    @Bean(name = "hiveJdbcTemplate")
    public JdbcTemplate hiveJdbcTemplate(@Qualifier("hiveDataSource") DataSource dataSource) {
        return new JdbcTemplate(dataSource);
    }
}
  • 测试

@Service
@Slf4j
public class HiveTestService {
    @Autowired
    @Qualifier("hiveJdbcTemplate")
    private JdbcTemplate jdbcTemplate;

    public void test2() {
        List<Map<String, Object>> datalist = jdbcTemplate.queryForList("select * from  t_user limit 1,10");
        log.info(datalist.size()); 
    }
    
    //原始jdbc测试
    public void test1() {
        try {
            Class.forName("org.apache.hive.jdbc.HiveDriver");
            Connection conn = DriverManager.getConnection("jdbc:hive2://192.168.1.12:10000/default", "", "");
            Statement stmt = conn.createStatement();
            ResultSet resultSet = stmt.executeQuery(" select * from  t_user limit 1,10");
            int columns = resultSet.getMetaData().getColumnCount();
            int rowIndex = 1;
            while (resultSet.next()) {
                for (int i = 1; i <= columns; i++) {
                    System.out.println("RowIndex: " + rowIndex + ", ColumnIndex: " + i + ", ColumnValue: " + resultSet.getString(i));
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

@EnableTransactionManagement
@SpringBootApplication
public class HiveApplication {
    public static void main(String[] args) {
        SpringApplicationBuilder builder = new SpringApplicationBuilder(HiveApplication .class);
        builder.bannerMode(Banner.Mode.LOG).run(args);
        builder.addCommandLineProperties(false);
         HiveTestService hiveTestService = (HiveTestService ) SpringContextHolder.getBean("hiveTestService");
        //shellCrmService.test1();
        shellCrmService.test2();
    }  
}

注意:hivesql查询不使用map传递参数,直接拼接就可以了。要不然会报错:ERROR c.a.d.p.DruidPooledPreparedStatement - getMaxFieldSize error

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值