运行 cmd :
./jsvc64/jsvc64 -pidfile ./log/jsvc.pid -outfile ./log/out.txt -errfile ./log/error.txt -Xmx512m -Djava.util.Arrays.useLegacyMergeSort=true -cp :./tools /lib/ :./tools/ com.g2us.hbase.cmdlog.monitor.CmdLogHbase ./
SQL:
UPSERT INTO CMDLOG_20130818(游戏,roleid,otime,logtype,passport,subgame,cmdid,exception,moreinfo,pname_0,pname_1,pname_2) VALUES(?,?,?,?,?,?,?,?,?,?, ?,?)
upsert 90000 行数据,出现异常。
如何解决它。
Exception in thread "Thread-0" java.lang.OutOfMemoryError: GC overhead limit exceeded
at java.lang.reflect.Method.copy(Method.java:143)
at java.lang.reflect.ReflectAccess.copyMethod(ReflectAccess.java:118)
at sun.reflect.ReflectionFactory.copyMethod(ReflectionFactory.java:282)
at java.lang.Class.copyMethods(Class.java:2748)
at java.lang.Class.getMethods(Class.java:1410)
at org.apache.hadoop.hbase.ipc.Invocation.<init>(Invocation.java:67)
at org.apache.hadoop.hbase.ipc.WritableRpcEngine$Invoker.invoke(WritableRpcEngine.java:86)
at $Proxy8.getClosestRowBefore(Unknown Source)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:1019)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:885)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:846)
at org.apache.hadoop.hbase.client.HTable.finishSetup(HTable.java:271)
at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:211)
at org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:160)
at org.apache.hadoop.hbase.client.MetaScanner.access$000(MetaScanner.java:54)
at org.apache.hadoop.hbase.client.MetaScanner$1.connect(MetaScanner.java:133)
at org.apache.hadoop.hbase.client.MetaScanner$1.connect(MetaScanner.java:130)
at org.apache.hadoop.hbase.client.HConnectionManager.execute(HConnectionManager.java:383)
at org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:130)
at org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:105)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.prefetchRegionCache(HConnectionManager.java:947)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:1002)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:889)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:846)
at org.apache.hadoop.hbase.client.HTable.finishSetup(HTable.java:271)
at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:263)
at com.salesforce.phoenix.query.HTableFactory$HTableFactoryImpl.getTable(HTableFactory.java:60)
at com.salesforce.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:133)
at com.salesforce.phoenix.execute.MutationState.commit(MutationState.java:227)
at com.salesforce.phoenix.jdbc.PhoenixConnection.commit(PhoenixConnection.java:244)
at com.g2us.hbase.phoenix.HBaseHelper.executeUpdate(HBaseHelper.java:62)
at com.g2us.hbase.cmdlog.io.BaseLogPoster.upsertRow(BaseLogPoster.java:153)