我创建了一个返回数据库连接对象的java类。但它在调用方法中返回null值。当我在调试模式下运行代码时,它不会进入方法本身。需要建议。
package com.cisco.installbase.hiveconnector;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Connection;
public class CreateConnection {
private static Connection instance = null;
static final String drivername = "org.apache.hive.jdbc.HiveDriver";
private CreateConnection() {
try {
Class.forName(drivername);
// instance =
// DriverManager.getConnection("jdbc:hive2://hddev-c01-edge-01:20000/",
// "phodisvc", "B1GD4T4dev");
// for hive 1 use this ------> instance =
// DriverManager.getConnection("thrift://hddev-c01-edge-02:9083");
instance = DriverManager.getConnection("thrift://hddev-c01-edge-02:9083");
System.out.println("get instance"+instance);
Constants.setFlag(true);
} catch (ClassNotFoundException e) {
e.printStackTrace();
Constants.setFlag(false);
} catch (SQLException e) {
e.printStackTrace();
Constants.setFlag(false);
}
}
public static Connection getInstance() {
Constants.setFlag(true);
return instance;
}
}
下面是调用getInstance()方法的代码
package com.cisco.installbase.hiveconnector;
import java.util.Date;
public class MainApp {
private static final String hiveDB = ReadProperties.getInstance().getProperty("hive_db");
private static final String logTable = ReadProperties.getInstance().getProperty("IB_log_table");
private static final String dataGovernanceLogTable = ReadProperties.getInstance().getProperty("SR_DG_table");
private static final String dataGovernanceMasterTable = ReadProperties.getInstance()
.getProperty("SR_DG_master_table");
private static final String count_xxccs_ds_sahdr_core = "select count(*) from " + hiveDB + "."
+ "xxccs_ds_sahdr_core";
private static final String count_mtl_system_items_b = "select count(*) from " + hiveDB + "."
+ "mtl_system_items_b";
private static final String count_xxccs_scdc_product_profile = "select count(*) from " + hiveDB + "."
+ "xxccs_scdc_product_profile";
private static final String count_xxccs_ds_cvdprdline_detail = "select count(*) from " + hiveDB + "."
+ "xxccs_ds_cvdprdline_detail";
private static final String count_xxccs_ds_instance_detail = "select count(*) from " + hiveDB + "."
+ "xxccs_ds_instance_detail";
private static int currentJobID = 0;
private static Date startTime = null;
private static Date stopTime = null;
private static int runTime = 0;
static CommonDBUtilities commonDB = new CommonDBUtilities();
static ShellUtilities shellUtilities = new ShellUtilities();
static SqoopUtility sqoop = new SqoopUtility();
public static void main(String[] args) {
MainApp.startTimeLogger();
System.out.println("Started the Job");
}
public static void startTimeLogger() {
// getting the Job ID and the start time for the log table
if (Constants.isFlag()) {
currentJobID = commonDB.getMaximumJobID();
startTime = commonDB.getTime();
MainApp.importTables();
System.out.println("executing startTimeLogger");
} else {
MainApp.onFailure();
JobMailer.PostMail("IB Load Failed", "Load failed while logging method name startTimeLogger()");
System.out.println("executing startTimeLogger failed");
}
}
public static void importTables() {
// Delete target directory before running the sqoop imports
if (Constants.isFlag()) {
shellUtilities.DeleteDirectory(Constants.getMtlSystems());
shellUtilities.DeleteDirectory(Constants.getProductLine());
shellUtilities.DeleteDirectory(Constants.getInstanceDetail());
shellUtilities.DeleteDirectory(Constants.getProductLine());
shellUtilities.DeleteDirectory(Constants.getHeaderCore());
// Run the sqoop imports to load the data from oracle to hive
sqoop.runSqoop();
MainApp.getCounts();
System.out.println("executing importTables");
} else {
MainApp.onFailure();
JobMailer.PostMail("IB Load Failed", "Load failed while running sqoop import method name importTables()");
System.out.println("executing importTables failed");
}
}
public static void getCounts() {
// Get the record counts for all the IB tables pulled
if (Constants.isFlag()) {
commonDB.getCounts(count_xxccs_ds_instance_detail);
commonDB.getCounts(count_xxccs_ds_cvdprdline_detail);
commonDB.getCounts(count_xxccs_scdc_product_profile);
commonDB.getCounts(count_mtl_system_items_b);
commonDB.getCounts(count_xxccs_ds_sahdr_core);
MainApp.stopTimeLogger();
System.out.println("executing getCounts");
} else {
MainApp.onFailure();
JobMailer.PostMail("IB Load Failed", "Load failed while getting counts method name getCounts()");
System.out.println("executing getCounts failed");
}
}
public static void stopTimeLogger() {
// Get the stop time or end time
if (Constants.isFlag()) {
stopTime = commonDB.getTime();
MainApp.runTimeLogger();
System.out.println("executing stopTimeLogger");
} else {
MainApp.onFailure();
JobMailer.PostMail("IB Load Failed", "Load failed while end logging method name stopTimeLogger()");
System.out.println("executing stopTimeLogger failed");
}
}
public static void runTimeLogger() {
// Get the run time or total time taken
if (Constants.isFlag()) {
runTime = (int) (stopTime.getTime() - startTime.getTime()) / 1000 * 60 * 60 * 24;
MainApp.onSuccess();
MainApp.logGovernance();
System.out.println("executing runTimeLogger");
} else {
MainApp.onFailure();
JobMailer.PostMail("IB Load Failed", "Load failed while runtime logging method name runTimeLogger()");
System.out.println("executing runTimeLogger failed");
}
}
public static void logGovernance() {
// IB Data governance
if (Constants.isFlag()) {
String dataGovernance = "Insert into table " + hiveDB + "." + dataGovernanceLogTable
+ " select Data_Asset_Reference,File_Name,Origin_System,Transfer_System," + startTime
+ ",Column_Reference,Element_Reference,Rule_Priority,Delete_By_Date,Classification,Geographic_Inclusion,Geographic_Restriction,Group_Inclusion,Group_Restriction,Reserved from "
+ hiveDB + "." + dataGovernanceMasterTable;
commonDB.InsertToTable(dataGovernance);
System.out.println("executing logGovernance");
} else {
MainApp.onFailure();
JobMailer.PostMail("IB Load Failed",
"Load failed while inserting into datagovernance method name logGovernance()");
System.out.println("executing logGovernance failed");
}
}
public static void onFailure() {
// Write to log on Failure
String insertOnFailure = "insert into table " + hiveDB + "." + logTable + " select " + currentJobID + ","
+ stopTime + "," + runTime + "," + "FAILED from " + hiveDB + "." + "dual" + " limit 1; ";
commonDB.InsertToTable(insertOnFailure);
JobMailer.PostMail("IB Load Failed", "Load failed");
System.out.println("executing onFailure");
}
public static void onSuccess() {
// Write to log on Success
String insertOnSuccess = "insert into table " + hiveDB + "." + logTable + " select " + currentJobID + ","
+ stopTime + "," + runTime + "," + "SUCCESS from " + hiveDB + "." + "dual" + " limit 1; ";
commonDB.InsertToTable(insertOnSuccess);
JobMailer.PostMail("IB Load Successfully completed", "Load completed");
System.out.println("executing onSuccess");
}
}
答案 0 :(得分:1)
private static Connection instance = null;
static final String drivername = "org.apache.hive.jdbc.HiveDriver";
public static Connection getInstance() {
if(instance==null){
try {
Class.forName(drivername);
instance = DriverManager.getConnection("thrift://hddev-c01-edge-02:9083");
Constants.setFlag(true);
} catch (ClassNotFoundException e) {
e.printStackTrace();
Constants.setFlag(false);
} catch (SQLException e) {
e.printStackTrace();
Constants.setFlag(false);
}
}
return instance;
}
答案 1 :(得分:0)
你的sigleton实现是错误的。 你永远不会调用构造函数。这是实现单例的正确方法。
10.RandomIndexButNotThis(8)
答案 2 :(得分:0)
你永远不会调用构造函数CreateConnection()
(此外,这不应该是构造函数,而是返回连接的实用程序方法)。