配置单元查询executeQuery()挂在Java JDBC代码中

时间:2019-07-12 07:44:03

标签: jdbc hive hive-udf hive-query

我已经创建了UDTF,并且在其中的java hive JDBC代码下运行,以执行hive查询并获取结果。 我能够成功连接到hive2服务器,但是代码无限期地挂起,而statement.executeQuery()没有任何异常,这可能是原因。相同的代码在独立的eclipse类中运行,但是在部署到hadoop集群中时以udtf的形式出现。


public class DynamicWhereUDTF extends GenericUDTF {
    private PrimitiveObjectInspector stringOI = null;
    ArrayList<Object[]> results = new ArrayList<Object[]>();

    @Override
    public StructObjectInspector initialize(ObjectInspector[] args)
            throws UDFArgumentException {

        stringOI = (PrimitiveObjectInspector) args[0];
        if (stringOI != null) {
            String name = stringOI.toString();
            System.out.println("param <-------> " + name);
        }

        List<String> fieldNames = new ArrayList<String>();
        try {
            fieldNames = getColumnNames("d_drug");
        } catch (SQLException e) {
            e.printStackTrace();
        }
        System.out.println("fieldNames size ---> " + fieldNames.size());
        List<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();

        for (int i = 0; i < fieldNames.size(); i++) {
            fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        }

        System.out
                .println("----------ObjectInspectorFactory created------------ ");
        Connection conn = null;
        ResultSet rs = null;
        PreparedStatement statement = null;
        try {
            System.out.println("Processing records 1");
            Class.forName("org.apache.hive.jdbc.HiveDriver");
            System.out.println("Processing records 2");
            Configuration conf = new Configuration();
            conf.set("hadoop.security.authentication", "Kerberos");
            conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
            conf.set("fs.file.impl",
                    org.apache.hadoop.fs.LocalFileSystem.class.getName());
            UserGroupInformation.setConfiguration(conf);
            UserGroupInformation.loginUserFromKeytab("abc@CS.MSD",
                    "/tmp/abc.keytab");
            System.out.println("Processing records 3");
            String hiveJdbcUrl = "jdbc:hive2://<host>:10000/demo_db;principal=hive/<host>@CS.MSD";
            conn = DriverManager.getConnection(hiveJdbcUrl, "abc", "");
            System.out.println("conn1 <-------> " + conn);
            statement = conn.prepareStatement("select * from xyz limit 5");
            System.out.println(" statement ----------> " + statement);
            rs = statement.executeQuery();
            System.out.println(" resultset ----------> " + rs);
            ResultSetMetaData rsMetaData = rs.getMetaData();
            int columnCount = rsMetaData.getColumnCount();
            System.out.println("columnCount ---> " + columnCount);
            // ArrayList<Object[]> results = new ArrayList<Object[]>();
            StringBuilder values = new StringBuilder();

            while (rs.next()) {
                values = new StringBuilder();
                for (int i = 0; i < columnCount; i++) {
                    values = values.append(rs.getString(i + 1)).append(",");
                }
                String output = values.toString().substring(0,
                        values.lastIndexOf(","));
                System.out.println("output  -----> " + output);
                results.add(new Object[] { "122556", "52905" });
            }
            System.out.println("------- results forwarded -------");

        } catch (Exception ex) {
            ex.printStackTrace();
        } finally {
            if (conn != null)
                try {
                    conn.close();
                } catch (SQLException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
        }
        return ObjectInspectorFactory.getStandardStructObjectInspector(
                fieldNames, fieldOIs);
    }

    @Override
    public void close() throws HiveException {
        // TODO Auto-generated method stub

    }

    @Override
    public void process(Object[] record) throws HiveException {

        try {

            Iterator<Object[]> it = results.iterator();

            while (it.hasNext()) {
                Object[] r = it.next();
                forward(r);
            }
            System.out.println("------- results forwarded -------");
        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }

    public List<String> getColumnNames(String tableName) throws SQLException {
        List<String> fieldNames = new ArrayList<String>();
        fieldNames.add("drug_id");
        fieldNames.add("drug_cd");
        return fieldNames;
    }

}

1 个答案:

答案 0 :(得分:0)

问题可能出在通过initialize方法创建连接中。尝试使用configure方法创建连接,您可以以Hbase connector为例。