线程“main”中的Hadoop异常java.io.FileNotFoundException:hadoop-mapreduce-client-core-2.6.0.jar即使该文件存在

时间:2015-05-23 09:33:00

标签: hadoop mapreduce hbase hdfs

我正在使用hadoop-2.6.0和hbase-0.98.9。在运行hadoop作业时,它会抛出java.io.FileNotFoundException,即使该文件存在并且也存在于类路径中,但它仍然在hdfs://路径中查找。可能是什么问题呢?我确实检查了here,但这个问题适用于第三方罐子。这是在类路径中。这是错误。

15/05/23 02:08:39 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:2181 sessionTimeout=90000 watcher=hconnection-0x6737ca, quorum=localhost:2181, baseZNode=/hbase
15/05/23 02:08:39 INFO zookeeper.ClientCnxn: Opening socket connection to server localhost/127.0.0.1:2181. Will not attempt to authenticate using SASL (unknown error)
15/05/23 02:08:39 INFO zookeeper.ClientCnxn: Socket connection established to localhost/127.0.0.1:2181, initiating session
15/05/23 02:08:39 INFO zookeeper.ClientCnxn: Session establishment complete on server localhost/127.0.0.1:2181, sessionid = 0x14d7fd352eb000c, negotiated timeout = 40000
15/05/23 02:08:40 INFO mapreduce.TableOutputFormat: Created table instance for Energy
15/05/23 02:08:40 WARN mapreduce.JobSubmitter: Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.
15/05/23 02:08:40 INFO mapreduce.JobSubmitter: Cleaning up the staging area file:/home/vijaykumar/hadoop/hadoop_tmpdir/mapred/staging/vijaykumar1706101359/.staging/job_local1706101359_0001
Exception in thread "main" java.io.FileNotFoundException: File does not exist: hdfs://localhost:54310/home/vijaykumar/hadoop/hadoop-2.6.0/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.6.0.jar
	at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:1122)
	at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:1114)
	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
	at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1114)
	at org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager.getFileStatus(ClientDistributedCacheManager.java:288)
	at org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager.getFileStatus(ClientDistributedCacheManager.java:224)
	at org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager.determineTimestamps(ClientDistributedCacheManager.java:93)
	at org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager.determineTimestampsAndCacheVisibilities(ClientDistributedCacheManager.java:57)
	at org.apache.hadoop.mapreduce.JobSubmitter.copyAndConfigureFiles(JobSubmitter.java:269)
	at org.apache.hadoop.mapreduce.JobSubmitter.copyAndConfigureFiles(JobSubmitter.java:390)
	at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:483)
	at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296)
	at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
	at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293)
	at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1314)
	at habseWrite.run(habseWrite.java:142)
	at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
	at habseWrite.main(habseWrite.java:107)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
	at org.apache.hadoop.util.RunJar.main(RunJar.java:136)

Mapper类

public static class WriteMapper extends Mapper<LongWritable, Text, IntWritable, Text>  {

			IntWritable k = new IntWritable();
			Text res = new Text();
			public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
				
				String val = value.toString();
				int row = val.hashCode();
				k.set(row);
				res.set(val);
				context.write(k, res);
		    }

	}

减速机代码

public static class WriteReducer extends TableReducer<IntWritable, Text, Text>  {
		  public static final byte[] area = "Area".getBytes();
		  public static final byte[] prop = "Property".getBytes();
		  private Text rowkey = new Text();
		  private int rowCount = 0;
		  public void reduce(IntWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
			String X1 = "",X2="",X3="",X4="",X5="",
					X6="",X7="",X8="",Y1="",Y2="";
		    for (Text val : values) {
		      String[] v = val.toString().split("\t");
		      X1 = v[0];
		      X2 = v[1];
		      X3 = v[2];
		      X4 = v[3];
		      X5 = v[4];
		      X6 = v[5];
		      X7 = v[6];
		      X8 = v[7];
		      Y1 = v[8];
		      Y2 = v[9];
		    }
		    String k = "row"+rowCount;
		    Put put = new Put(Bytes.toBytes(k.toString()));
		    put.add(area, "X1".getBytes(), Bytes.toBytes(X1));
		    put.add(area, "X5".getBytes(), Bytes.toBytes(X5));
		    put.add(area, "X6".getBytes(), Bytes.toBytes(X6));
		    put.add(area, "Y1".getBytes(), Bytes.toBytes(Y1));
		    put.add(area, "Y2".getBytes(), Bytes.toBytes(Y2));
		    put.add(prop, "X2".getBytes(), Bytes.toBytes(X2));
		    put.add(prop, "X3".getBytes(), Bytes.toBytes(X3));
		    put.add(prop, "X4".getBytes(), Bytes.toBytes(X4));
		    put.add(prop, "X7".getBytes(), Bytes.toBytes(X7));
		    put.add(prop, "X8".getBytes(), Bytes.toBytes(X8));
		    rowCount++;
		    rowkey.set(k);	
		    context.write(rowkey, put);
		  }
	}

主要

	 public static void main(String[] args) throws Exception {
	        int res = ToolRunner.run(new Configuration(), new habseWrite(), args);
	        System.exit(res);
	  }
	
	public int run(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		Configuration conf = HBaseConfiguration.create(); 
        String inputPath = args[0];  
        Job job = new Job(conf,"HBase_write");        
        job.setInputFormatClass(TextInputFormat.class);  
        job.setJarByClass(habseWrite.class);  
        job.setMapperClass(habseWrite.WriteMapper.class);
        job.setMapOutputKeyClass(IntWritable.class);
        job.setMapOutputValueClass(Text.class);
        
        
        TableMapReduceUtil.initTableReducerJob(
        		  "Energy",        // output table
        		  WriteReducer.class,    // reducer class
        		  job);
        job.setNumReduceTasks(1);        
        
        FileInputFormat.setInputPaths(job, inputPath);  
        return(job.waitForCompletion(true) ? 0 : 1 );
        
	}

0 个答案:

没有答案