Java(115)Java通过jdbc接口连接hive

Java通过jdbc接口连接hive

1、版本信息

hive版本:3.1.2

jdbc:hive-jdbc-uber-2.6.5.0-292.jar

下载驱动地址:https://github.com/timveil/hive-jdbc-uber-jar/releases/tag/v1.9-2.6.5

Java(115)Java通过jdbc接口连接hive_第1张图片

 

2、pom.xml引用


    org.appache.hive
    hive-jdbc-uber
    2.6.5.0-292
    system
    ${pom.basedir}/src/main/resources/lib/hive-jdbc-uber-2.6.5.0-292.jar

3、对应的数据库表

CREATE TABLE regre_one.hive2_varchar(

ID int,

aes varchar(1000),

sm4 varchar(1000),

sm4_a varchar(1000),

email varchar(1000),

phone varchar(1000),

ssn varchar(1000),

military varchar(1000),

passport varchar(1000),

intelssn varchar(1000),

intelpassport varchar(1000),

intelmilitary varchar(1000),

intelganghui varchar(1000),

inteltaitonei varchar(1000),

credit_card_short varchar(1000),

credit_card_long varchar(1000),

job varchar(1000));

4、Java通过jdbc接口连接hive的Java代码

package utils;


import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;

/**
 * Description :
 *
 * @author : HMF
 * Date : Created in 15:28 2023/3/21
 * @version :
 */
public class dbUtil {
    public Connection conn;
    public dbUtil(){
        String driver="org.apache.hive.jdbc.HiveDriver";
        String url="jdbc:hive2://10.1.1.196:10000/regre_one?useUnicode=true&characterEncoding=UTF-8";
        String user="xxx";
        String password="xxx";

        try{
            Class.forName(driver);
            conn= DriverManager.getConnection(url,user,password);
        }catch (Exception e){
            e.printStackTrace();
            System.exit(1);
        }

    }
    public static void main(String[] args)  {
        dbUtil db=new dbUtil();
        String insertSql="INSERT INTO regre_one.hive2_varchar (ID,aes, sm4, sm4_a, email, phone, ssn, military, passport, intelssn, intelpassport, intelmilitary, intelganghui, inteltaitonei, credit_card_short, credit_card_long, job) VALUES (1,'小芬', '北京xxx技术有限公司', '北京市', '[email protected]', '15652996964', '210302199608124861', '武水电字第3632734号', 'BWP018930705', '210302199608124861', 'BWP018930705', '武水电字第3632734号', 'H21157232', '9839487602', '117', '6227612145830440', '高级测试开发工程师')";
        String selectSql="select ID,aes, sm4, sm4_a, email, phone, ssn, military, passport, intelssn, intelpassport, intelmilitary, intelganghui, inteltaitonei, credit_card_short, credit_card_long, job from regre_one.hive2_varchar";
        String deleteSql="truncate table regre_one.hive2_varchar";
        db.DBExecute(deleteSql);
        db.DBExecute(insertSql);
        db.DBQuery(selectSql);
        System.exit(0);
    }

    void DBExecute(String sqlStr){
        try{
            Statement stmt=conn.createStatement();
            boolean result=stmt.execute(sqlStr);
            System.out.println("+++++sqlStr:"+sqlStr);
            stmt.close();
        }catch (Exception e){
            e.printStackTrace();
        }
    }

    void DBQuery(String sqlStr){
        try{
             Statement statement=conn.createStatement();
             ResultSet rs=statement.executeQuery(sqlStr);
             int columnCount=rs.getMetaData().getColumnCount();
             System.out.println("+++++sqlStr:"+sqlStr);
             while (rs.next()){
                 String result="";
                 for(int i=0;i

你可能感兴趣的:(java相关,#,大数据,hive,java,大数据)