文章目录
- 1、删除表程序
- 2、数据操作程序
1、删除表程序
编写删除表程序
hadoop-desktop:~$ vim drop_test_external.java
import java.sql.SQLException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.Statement;
public class drop_test_external{
  private static String driverName = "org.apache.hive.jdbc.HiveDriver";
  private static String url = "jdbc:hive2://ddai-master:10000/default";
  private static String user = "hive";
  private static String password = "Dai@123456";
  private static String sql = "DROP TABLE IF EXISTS test_external";
  public static void main(String[] args) throws SQLException {
    try {
      // Register driver and create driver instance
      Class.forName(driverName);
      // get connection
      Connection conn = DriverManager.getConnection(url, user, password);
      // create statement
      Statement stmt = conn.createStatement();
      // execute statement
      stmt.executeUpdate(sql);
      System.out.println("Drop table successful.");
      conn.close();
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
}编写脚本
hadoop-desktop:~$ vim drop_test_external.sh
#!/bin/bash
HADOOP_HOME=/opt/hadoop-2.8.5
HIVE_HOME=/opt/apache-hive-2.3.6-bin
CLASSPATH=.:$HIVE_HOME/conf:$(hadoop classpath)
for i in ${HIVE_HOME}/lib/*.jar ; do
    CLASSPATH=$CLASSPATH:$i
done
java -cp $CLASSPATH drop_test_external
2、数据操作程序
hadoop-desktop:~$ vim hive_jdbc.java
import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;
public class hive_jdbc {
  private static String driverName = "org.apache.hive.jdbc.HiveDriver";
  private static String url = "jdbc:hive2://ddai-master:10000/default";
  private static String user = "hive";
  private static String passwd = "Dai@123456";
  private static String sql = "";
  private static ResultSet res;
  public static void main(String[] args) throws SQLException {
    try {
      Class.forName(driverName);
    } catch (ClassNotFoundException e) {
      e.printStackTrace();
      System.exit(1);
    }
    Connection con = DriverManager.getConnection(url,user,passwd);
    Statement stmt = con.createStatement();
    String tableName = "test_jdbc";
    stmt.execute("drop table if exists " + tableName);
    stmt.execute("create table " + tableName + " (key int, value string)");
    // show tables
    String sql = "show tables '" + tableName + "'";
    System.out.println("Running: " + sql);
    ResultSet res = stmt.executeQuery(sql);
    if (res.next()) {
      System.out.println(res.getString(1));
    }
    // describe table
    sql = "describe " + tableName;
    System.out.println("Running: " + sql);
    res = stmt.executeQuery(sql);
    while (res.next()) {
      System.out.println(res.getString(1) + "\t" + res.getString(2));
    }
    String filepath = "hdfs://ddai-master:9000/tmp/test.txt";
    sql = "load data inpath '" + filepath + "' into table " + tableName;
    System.out.println("Running: " + sql);
    stmt.execute(sql);
    // select * query
    sql = "select * from " + tableName;
    System.out.println("Running: " + sql);
    res = stmt.executeQuery(sql);
    while (res.next()) {
      System.out.println(String.valueOf(res.getInt(1)) + "\t" 
+ res.getString(2));
    }
  }
}hadoop-desktop:~$ vim puttxt.sh
#!/bin/bash
echo -e '1\x01Tom' > /tmp/test.txt
echo -e '2\x01Jarry' >> /tmp/test.txt
hdfs dfs -put /tmp/test.txt /tmphadoop-desktop:~$ chmod +x puttxt.sh 
hadoop-desktop:~$ ./puttxt.shhadoop-desktop:~$ vim hive_jdbc.sh
#!/bin/bash
HADOOP_HOME=/opt/hadoop-2.8.5
HIVE_HOME=/opt/apache-hive-2.3.6-bin
CLASSPATH=.:$HIVE_HOME/conf:$(hadoop classpath)
for i in ${HIVE_HOME}/lib/*.jar ; do
    CLASSPATH=$CLASSPATH:$i
done
java -cp $CLASSPATH hive_jdbc脚本格式出现问题,调整一下

成功










