我在hadoop上通过mahout运行kmeans clustering时出错,检查错误日志后,我认为可能是hadoop本机库引起的,但是当我自己重新编译hadoop,然后去运行作业时,它总是扔掉hs_err_pid*.log,其内容如下:strong text
#
# A fatal error has been detected by the Java Runtime Environment:
#
# SIGFPE (0x8) at pc=0x00002aae75c8168f, pid=30832, tid=1076017472
#
# JRE version: 6.0_29-b11
# Java VM: Java HotSpot(TM) 64-Bit Server VM (20.4-b02 mixed mode linux-amd64 compressed oops)
# Problematic frame:
# C [ld-linux-x86-64.so.2+0x868f] double+0xcf
#
# If you would like to submit a bug report, please visit:
# http://java.sun.com/webapps/bugreport/crash.jsp
# The crash happened outside the Java Virtual Machine in native code.
# See problematic frame for where to report the bug.
#
--------------- T H R E A D ---------------
Current thread (0x0000000040115000): JavaThread "main" [_thread_in_native, id=30863, stack(0x000000004012b000,0x000000004022c000)]
siginfo:si_signo=SIGFPE: si_errno=0, si_code=1 (FPE_INTDIV), si_addr=0x00002aae75c8168f
Registers:
RAX=0x000000000f4d007f, RBX=0x0000000000000000, RCX=0x0000000040227d00, RDX=0x0000000000000000
RSP=0x0000000040227ba0, RBP=0x0000000040227d40, RSI=0x000000000f4d007f, RDI=0x00002aaab90008f1
R8 =0x00002aaab8e694c0, R9 =0x0000000000000000, R10=0x0000000000000000, R11=0xffffffffffffffff
R12=0x0000000040227d00, R13=0x00002aaab8e69210, R14=0x0000000000000000, R15=0x00002aaab90002c0
RIP=0x00002aae75c8168f, EFLAGS=0x0000000000010246, CSGSFS=0x0000000000000033, ERR=0x0000000000000000
TRAPNO=0x0000000000000000
Java frames: (J=compiled Java code, j=interpreted, Vv=VM code)
j java.lang.ClassLoader$NativeLibrary.load(Ljava/lang/String;)V+0
j java.lang.ClassLoader.loadLibrary0(Ljava/lang/Class;Ljava/io/File;)Z+300
j java.lang.ClassLoader.loadLibrary(Ljava/lang/Class;Ljava/lang/String;Z)V+347
j java.lang.Runtime.loadLibrary0(Ljava/lang/Class;Ljava/lang/String;)V+54
j java.lang.System.loadLibrary(Ljava/lang/String;)V+7
j org.apache.hadoop.util.NativeCodeLoader.<clinit>()V+25
v ~StubRoutines::call_stub
j org.apache.hadoop.io.compress.zlib.ZlibFactory.<clinit>()V+13
v ~StubRoutines::call_stub
j org.apache.hadoop.io.compress.DefaultCodec.getCompressorType()Ljava/lang/Class;+4
j org.apache.hadoop.io.compress.CodecPool.getCompressor(Lorg/apache/hadoop/io/compress/CompressionCodec;Lorg/apache/hadoop/conf/Configuration;)Lorg/apache/hadoop/io/compress/Comp
ressor;+4
j org.apache.hadoop.io.compress.CodecPool.getCompressor(Lorg/apache/hadoop/io/compress/CompressionCodec;)Lorg/apache/hadoop/io/compress/Compressor;+2
j org.apache.hadoop.io.SequenceFile$Writer.init(Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/fs/FSDataOutputStream;Ljava/lang/Class;Ljava/l
ang/Class;ZLorg/apache/hadoop/io/compress/CompressionCodec;Lorg/apache/hadoop/io/SequenceFile$Metadata;)V+121
j org.apache.hadoop.io.SequenceFile$RecordCompressWriter.<init>(Lorg/apache/hadoop/fs/FileSystem;Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/fs/Path;Ljava/lang/Class
;Ljava/lang/Class;ISJLorg/apache/hadoop/io/compress/CompressionCodec;Lorg/apache/hadoop/util/Progressable;Lorg/apache/hadoop/io/SequenceFile$Metadata;)V+30
j org.apache.hadoop.io.SequenceFile.createWriter(Lorg/apache/hadoop/fs/FileSystem;Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/fs/Path;Ljava/lang/Class;Ljava/lang/Cla
ss;ISJLorg/apache/hadoop/io/SequenceFile$CompressionType;Lorg/apache/hadoop/io/compress/CompressionCodec;Lorg/apache/hadoop/util/Progressable;Lorg/apache/hadoop/io/SequenceFile$Me
tadata;)Lorg/apache/hadoop/io/SequenceFile$Writer;+100
j org.apache.hadoop.io.SequenceFile.createWriter(Lorg/apache/hadoop/fs/FileSystem;Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/fs/Path;Ljava/lang/Class;Ljava/lang/Cla
ss;Lorg/apache/hadoop/io/SequenceFile$CompressionType;)Lorg/apache/hadoop/io/SequenceFile$Writer;+43
j org.apache.hadoop.io.SequenceFile.createWriter(Lorg/apache/hadoop/fs/FileSystem;Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/fs/Path;Ljava/lang/Class;Ljava/lang/Cla
ss;)Lorg/apache/hadoop/io/SequenceFile$Writer;+10
j org.apache.mahout.clustering.kmeans.RandomSeedGenerator.buildRandom(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/fs/Path;Lorg/apache/hadoop/fs/Path;ILorg/apache/mah
out/common/distance/DistanceMeasure;)Lorg/apache/hadoop/fs/Path;+101
j org.apache.mahout.clustering.kmeans.KMeansDriver.run([Ljava/lang/String;)I+264
j org.apache.hadoop.util.ToolRunner.run(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/util/Tool;[Ljava/lang/String;)I+38
j org.apache.mahout.clustering.kmeans.KMeansDriver.main([Ljava/lang/String;)V+15
v ~StubRoutines::call_stub
j sun.reflect.NativeMethodAccessorImpl.invoke0(Ljava/lang/reflect/Method;Ljava/lang/Object;[Ljava/lang/Object;)Ljava/lang/Object;+0
j sun.reflect.NativeMethodAccessorImpl.invoke(Ljava/lang/Object;[Ljava/lang/Object;)Ljava/lang/Object;+87
j sun.reflect.DelegatingMethodAccessorImpl.invoke(Ljava/lang/Object;[Ljava/lang/Object;)Ljava/lang/Object;+6
j java.lang.reflect.Method.invoke(Ljava/lang/Object;[Ljava/lang/Object;)Ljava/lang/Object;+161
j org.apache.hadoop.util.RunJar.main([Ljava/lang/String;)V+538
v ~StubRoutines::call_stub
Is there anyone can help me or give me some advice?
xianwu