运行cmd:
./ jsvc64 / jsvc64 -pidfile ./log/jsvc.pid -outfile ./log/out.txt -errfile ./log/error.txt -Xmx512m -Djava.util.Arrays.useLegacyMergeSort = true -cp: ./tools/lib/:./ tools / com.g2us.hbase.cmdlog.monitor.CmdLogHbase ./
SQL:
UPSERT INTO CMDLOG_20130818(游戏,角色,otime,日志类型,护照,子游戏,cmdid,例外,更多信息,pname_0,pname_1,pname_2)值(?,?,?,?,?,?,?,?,? ,?,?,?)
upsert 90000行数据,发生异常。
如何解决。
Exception in thread "Thread-0" java.lang.OutOfMemoryError: GC overhead limit exceeded
at java.lang.reflect.Method.copy(Method.java:143)
at java.lang.reflect.ReflectAccess.copyMethod(ReflectAccess.java:118)
at sun.reflect.ReflectionFactory.copyMethod(ReflectionFactory.java:282)
at java.lang.Class.copyMethods(Class.java:2748)
at java.lang.Class.getMethods(Class.java:1410)
at org.apache.hadoop.hbase.ipc.Invocation.<init>(Invocation.java:67)
at org.apache.hadoop.hbase.ipc.WritableRpcEngine$Invoker.invoke(WritableRpcEngine.java:86)
at $Proxy8.getClosestRowBefore(Unknown Source)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:1019)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:885)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:846)
at org.apache.hadoop.hbase.client.HTable.finishSetup(HTable.java:271)
at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:211)
at org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:160)
at org.apache.hadoop.hbase.client.MetaScanner.access$000(MetaScanner.java:54)
at org.apache.hadoop.hbase.client.MetaScanner$1.connect(MetaScanner.java:133)
at org.apache.hadoop.hbase.client.MetaScanner$1.connect(MetaScanner.java:130)
at org.apache.hadoop.hbase.client.HConnectionManager.execute(HConnectionManager.java:383)
at org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:130)
at org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:105)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.prefetchRegionCache(HConnectionManager.java:947)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:1002)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:889)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:846)
at org.apache.hadoop.hbase.client.HTable.finishSetup(HTable.java:271)
at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:263)
at com.salesforce.phoenix.query.HTableFactory$HTableFactoryImpl.getTable(HTableFactory.java:60)
at com.salesforce.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:133)
at com.salesforce.phoenix.execute.MutationState.commit(MutationState.java:227)
at com.salesforce.phoenix.jdbc.PhoenixConnection.commit(PhoenixConnection.java:244)
at com.g2us.hbase.phoenix.HBaseHelper.executeUpdate(HBaseHelper.java:62)
at com.g2us.hbase.cmdlog.io.BaseLogPoster.upsertRow(BaseLogPoster.java:153)
答案 0 :(得分:1)
我发现了问题并修复了它。
preStat定义为类字段var的问题,所以调用executeQuery()多次没有关闭它,然后是OutOfMemoryError。
错误代码:
public class F{
PreparedStatement preStat = null;
public ResultSet executeQuery(String sql, Object... args) throws Exception {
ResultSet rsResultSet = null;
Connection conn = null;
Statement stat = null;
try {
conn = HBaseUtility.getConnection();
preStat = conn.prepareStatement(sql);
if (args != null) {
for (int i = 0; i < args.length; i++) {
preStat.setObject(i + 1, args[i]);
}
}
rsResultSet = preStat.executeQuery();
} catch (Exception e) {
dispos(conn, stat);
Log.error(Log.DB, "queryerror|", e);
throw new RuntimeException("hbase query error");
} finally {
HBaseUtility.release(conn);
}
return rsResultSet;
}
}
固定代码:
public class F{
public ResultSet executeQuery(String sql, Object... args) throws Exception {
ResultSet rsResultSet = null;
Connection conn = null;
Statement stat = null;
try {
PreparedStatement preStat = null; //this var as a class var ,and no close every query .
conn = HBaseUtility.getConnection();
preStat = conn.prepareStatement(sql);
if (args != null) {
for (int i = 0; i < args.length; i++) {
preStat.setObject(i + 1, args[i]);
}
}
rsResultSet = preStat.executeQuery();
preStat.close(); //must be close.
} catch (Exception e) {
dispos(conn, stat);
Log.error(Log.DB, "queryerror|", e);
throw new RuntimeException("hbase query error");
} finally {
HBaseUtility.release(conn);
}
return rsResultSet;
}
}