phoenix_代码中使用phoenix

本文介绍了如何在代码中使用Phoenix,从基础配置到工具类的创建,再到与Spring的集成,最后展示了使用示例。通过Phoenix进行HBase的查询和更新操作,包括设置Maven依赖、创建JDBC连接、封装JdbcTemplate以及控制器中调用。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

代码中使用phoenix

1) 基础使用

maven配置:

<dependency>

    <groupId>org.apache.phoenix</groupId>

    <artifactId>phoenix-core</artifactId>

    <version>4.6.0-HBase-0.98</version>

</dependency>

 

package com.harvetech.service;

import java.sql.Connection;

import java.sql.DriverManager;

import java.sql.ResultSet;

import java.sql.SQLException;

import java.sql.Statement;

public class PhoenixService {

private static String driver = "org.apache.phoenix.jdbc.PhoenixDriver";

public static void main(String[] args) throws SQLException {

try{

Class.forName(driver);

} catch(ClassNotFoundException e) {

e.printStackTrace();

}

Connection con = DriverManager.getConnection("jdbc:phoenix:192.168.110.97,192.168.110.98,192.168.110.99:2181");

Statement stmt = con.createStatement();

String sql = "select * from \"harve_role\"";

ResultSet rset = stmt.executeQuery(sql);

while (rset.next()) {

System.out.println(rset.getString("name"));

}

stmt.close();

con.close();

}

}

 

2) 抽成工具类

package com.harvetech.utils;

import java.sql.Connection;

import java.sql.DriverManager;

import java.sql.ResultSet;

import java.sql.SQLException;

import java.sql.Statement;

public class HbaseSqlUtils {

private static String driver = "org.apache.phoenix.jdbc.PhoenixDriver";

private static String zkAddress = "192.168.110.97,192.168.110.98,192.168.110.99:2181";

private static Connection _connection = null;

private static HbaseSqlUtils _instance = null;

private HbaseSqlUtils(){

try {

Class.forName(driver);

_connection = DriverManager.getConnection("jdbc:phoenix:" + zkAddress);

} catch (SQLException e) {

e.printStackTrace();

}catch(ClassNotFoundException e) {

e.printStackTrace();

}

}

public static HbaseSqlUtils instance(){

if (null == _instance){

_instance = new HbaseSqlUtils();

}

return _instance;

}

private static Connection getConnection(){

return instance()._connection;

}

public static ResultSet sqlQuery(String sql) throws SQLException{

Statement stmt = getConnection().createStatement();

ResultSet rset = stmt.executeQuery(sql);

//stmt.close();

return rset;

}

public static void main(String[] args) throws SQLException {

System.out.println("===============================");

String sql = "select * from \"harve_role\"";

ResultSet rset = HbaseSqlUtils.sqlQuery(sql);

while (rset.next()) {

System.out.println(rset.getString("name"));

}

System.out.println("-----------------------------");

String sql2 = "select * from \"harve_user\" limit 3";

ResultSet rset2 = HbaseSqlUtils.sqlQuery(sql2);

while (rset2.next()) {

System.out.println(rset2.getString("name"));

}

System.out.println("==============================");

}

}

由于stmt.closereset就取不到值,因此上面的工具类抽得不彻底,需要自定义一个HResultSet派生自ResultSet,statement放到成员变量中,提供一个colse方法去关闭statement,调用者用完HResultSet后主动colse之。

因后面重点放在了研究与spring集成上,故没有进一步优化此工具类。

 

3) spring集成

http://blog.youkuaiyun.com/happy_wu/article/details/74560762

 

 

① Pom依赖

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"

  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">

  <modelVersion>4.0.0</modelVersion>

  <groupId>maven</groupId>

  <artifactId>maven-web-redis</artifactId>

  <packaging>war</packaging>

  <version>0.0.1-SNAPSHOT</version>

  <name>maven-web-HDFS Maven Webapp</name>

  <url>http://maven.apache.org</url>

  

  <properties>

 <commons-lang3.version>3.5</commons-lang3.version>

 <spring.version>3.2.4.RELEASE</spring.version>

<slf4j.version>1.7.10</slf4j.version>

<log4j.version>1.2.12</log4j.version>

  </properties>

  

  <dependencies>

    <dependency>

      <groupId>junit</groupId>

      <artifactId>junit</artifactId>

      <version>3.8.1</version>

      <scope>test</scope>

    </dependency>

    

<dependency>

<groupId>org.springframework</groupId>

<artifactId>spring-web</artifactId>

<version>${spring.version}</version>

</dependency>

 

<dependency>

<groupId>org.springframework</groupId>

<artifactId>spring-webmvc</artifactId>

<version>${spring.version}</version>

</dependency>

<dependency>

<groupId>org.springframework</groupId>

<artifactId>spring-jdbc</artifactId>

<version>${spring.version}</version>

</dependency>

    <!-- 日志文件管理包 -->

<!-- log start -->

<dependency>

<groupId>log4j</groupId>

<artifactId>log4j</artifactId>

<version>${log4j.version}</version>

</dependency>

<dependency>

<groupId>org.slf4j</groupId>

<artifactId>slf4j-api</artifactId>

<version>${slf4j.version}</version>

</dependency>

<dependency>

<groupId>org.slf4j</groupId>

<artifactId>slf4j-log4j12</artifactId>

<version>${slf4j.version}</version>

</dependency>

<!-- log end -->

 

<!-- hadoop -->

        <dependency>  

            <groupId>org.apache.hadoop</groupId>  

            <artifactId>hadoop-client</artifactId>  

            <version>2.5.1</version>  

        </dependency>  

 

<dependency>

    <groupId>org.apache.phoenix</groupId>

    <artifactId>phoenix-core</artifactId>

    <version>4.6.0-HBase-0.98</version>

</dependency>

<dependency>

            <groupId>commons-dbcp</groupId>

            <artifactId>commons-dbcp</artifactId>

            <version>1.4</version>

        </dependency>

        

  </dependencies>

  <build>

    <finalName>maven-web-HDFS</finalName>

  </build>

</project>

 

② Spring配置

<bean id="phoenixJdbcTemplate" class="org.springframework.jdbc.core.JdbcTemplate">

        <constructor-arg ref="phoenixDataSource"/>

        <qualifier value="phoenixJdbcTemplate"></qualifier>

    </bean>

 

    <bean id="baseInterfacePonenixImpl" class="com.harvetech.service.PhoenixService">

        <property name="jdbcTemplate" ref="phoenixJdbcTemplate"/>

    </bean>

 

    <bean id="phoenixDataSource" class="org.apache.commons.dbcp.BasicDataSource">

        <property name="driverClassName" value="org.apache.phoenix.jdbc.PhoenixDriver"/>

        <property name="url" value="jdbc:phoenix:192.168.110.97,192.168.110.98,192.168.110.99:2181"/>

        <property name="username" value=""/>

        <property name="password" value=""/>

        <property name="initialSize" value="20"/>

        <property name="maxActive" value="0"/>

        <!--因为Phoenix进行数据更改时不会自动的commit,必须要添加defaultAutoCommit属性,否则会导致数据无法提交的情况-->

        <property name="defaultAutoCommit" value="true"/>

    </bean>

 

③ 封装JdbcTemplate

 

package com.harvetech.service;

import java.util.List;

import org.springframework.beans.factory.annotation.Autowired;

import org.springframework.jdbc.core.JdbcTemplate;

public class PhoenixService {

@Autowired

private JdbcTemplate jdbcTemplate;

public PhoenixService(JdbcTemplate template) {

this.jdbcTemplate = template;

}

public PhoenixService() {

super();

}

public List<Object> query(String querySql, Class cls) {

System.out.println(querySql);

return jdbcTemplate.query(querySql, new HarvetechRowMapper(cls));

}

public void update(String querySql) {

System.out.println(querySql);

jdbcTemplate.update(querySql);

}

 

public JdbcTemplate getJdbcTemplate() {

return jdbcTemplate;

}

public void setJdbcTemplate(JdbcTemplate jdbcTemplate) {

this.jdbcTemplate = jdbcTemplate;

}

}

 

package com.harvetech.service;

import java.sql.ResultSet;

import java.sql.ResultSetMetaData;

import java.sql.SQLException;

import java.util.Date;

import java.util.HashMap;

import java.lang.reflect.Field;

import java.math.BigDecimal;

import org.springframework.jdbc.core.RowMapper;

import com.sun.jmx.snmp.Timestamp;

 

public class HarvetechRowMapper<T> implements RowMapper<T> {

private Class<?> targetClazz;  

private HashMap<String, Field> fieldMap;  

 

public HarvetechRowMapper(Class<?> targetClazz) {  

this.targetClazz = targetClazz;  

fieldMap = new HashMap<String, Field>();  

Field[] fields = targetClazz.getDeclaredFields();  

for (Field field : fields) {  

fieldMap.put(field.getName(), field);  

}  

}  

 

public T mapRow(ResultSet rs, int arg1) throws SQLException {  

T obj = null;  

try {  

obj = (T) targetClazz.newInstance();  

final ResultSetMetaData metaData = rs.getMetaData();  

int columnLength = metaData.getColumnCount();  

String columnName = null;  

 

for (int i = 1; i <= columnLength; i++) {  

columnName = metaData.getColumnName(i);  

Class fieldClazz = fieldMap.get(columnName).getType();  

Field field = fieldMap.get(columnName);  

field.setAccessible(true);  

 

if (fieldClazz == int.class || fieldClazz == Integer.class) { // int  

field.set(obj, rs.getInt(columnName));  

} else if (fieldClazz == boolean.class || fieldClazz == Boolean.class) { // boolean  

field.set(obj, rs.getBoolean(columnName));  

} else if (fieldClazz == String.class) { // string  

field.set(obj, rs.getString(columnName));  

} else if (fieldClazz == float.class) { // float  

field.set(obj, rs.getFloat(columnName));  

} else if (fieldClazz == double.class || fieldClazz == Double.class) { // double  

field.set(obj, rs.getDouble(columnName));  

} else if (fieldClazz == BigDecimal.class) { // bigdecimal  

field.set(obj, rs.getBigDecimal(columnName));  

} else if (fieldClazz == short.class || fieldClazz == Short.class) { // short  

field.set(obj, rs.getShort(columnName));  

} else if (fieldClazz == Date.class) { // date  

field.set(obj, rs.getDate(columnName));  

} else if (fieldClazz == Timestamp.class) { // timestamp  

field.set(obj, rs.getTimestamp(columnName));  

} else if (fieldClazz == Long.class || fieldClazz == long.class) { // long  

field.set(obj, rs.getLong(columnName));  

}  

 

field.setAccessible(false);  

}  

} catch (Exception e) {  

e.printStackTrace();  

}  

 

return obj;  

}  

}

 

package com.harvetech.model;

public class User {

private String rowkey;

private String name;

private String telephone;

private String roleId;

private String roleName;

public String getRowkey() {

return rowkey;

}

public void setRowkey(String rowkey) {

this.rowkey = rowkey;

}

public String getName() {

return name;

}

public void setName(String name) {

this.name = name;

}

public String getTelephone() {

return telephone;

}

public void setTelephone(String telephone) {

this.telephone = telephone;

}

public String getRoleId() {

return roleId;

}

public void setRoleId(String roleId) {

this.roleId = roleId;

}

public String getRoleName() {

return roleName;

}

public void setRoleName(String roleName) {

this.roleName = roleName;

}

@Override

public String toString() {

return "User [rowkey=" + rowkey + ", name=" + name + ", telephone="

+ telephone + ", roleId=" + roleId + ", roleName=" + roleName

+ "]";

}

}

 

④ 使用示例

@Controller

public class IndexController {

@Resource

private PhoenixService phoenixService;

@RequestMapping("login")

public String login(){

System.out.println("-----------------------------");

String sql2 = "select * from \"harve_user\" limit 3";

List<Object> userList = phoenixService.query(sql2, User.class);

for (Object obj : userList){

User user = (User) obj;

System.out.println(user.toString());

}

System.out.println("==============================");

return "login";

}

}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值