我是Hadoop的新手。我遵循了一些教程并尝试在我的hadoop集群中运行该示例,其版本为2.7.0,不幸的是我得到了如下错误:
$ javac *.java
$ jar cvf myjar.jar *.class
$ hadoop jar ./myjar.jar com.packt.ch3.etl.ParseWeblogs /user/hadoop/apache_clf.txt /user/hadoop/apache_clf_tsv
Exception in thread "main" java.lang.ClassNotFoundException: com.packt.ch3.etl.ParseWeblogs
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.hadoop.util.RunJar.run(RunJar.java:214)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
我的类路径设置如下:
export CLASSPATH=.:$JAVA_HOME/jre/lib/rt.jar:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$ANT_HOME/lib/ant-launcher.jar
export CLASSPATH="/usr/local/hadoop/share/hadoop/common/hadoop-common-2.7.0.jar:/usr/local/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.7.0.jar:/usr/local/hadoop/share/hadoop/common/lib/commons-cli-1.2.jar:$CLASSPATH"
文件ParseWeblogs.java的源代码如下所示:
package com.packt.ch3.etl;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class ParseWeblogs extends Configured implements Tool {
public int run(String[] args) throws Exception {
Path inputPath = new Path(args[0]);
Path outputPath = new Path(args[1]);
Configuration conf = getConf();
Job weblogJob = new Job(conf, "ParseWeblogs");
//weblogJob.setJarByClass(ParseWeblogs.class);
weblogJob.setJobName("Weblog Transformer");
weblogJob.setJarByClass(getClass());
weblogJob.setNumReduceTasks(0);
weblogJob.setMapperClass(CLFMapper.class);
weblogJob.setMapOutputKeyClass(Text.class);
weblogJob.setMapOutputValueClass(Text.class);
weblogJob.setOutputKeyClass(Text.class);
weblogJob.setOutputValueClass(Text.class);
weblogJob.setInputFormatClass(TextInputFormat.class);
weblogJob.setOutputFormatClass(TextOutputFormat.class);
//weblogJob.setJarByClass(ParseWeblogs.class);
FileInputFormat.setInputPaths(weblogJob, inputPath);
FileOutputFormat.setOutputPath(weblogJob, outputPath);
if(weblogJob.waitForCompletion(true)) {
return 0;
}
return 1;
}
public static void main( String[] args ) throws Exception {
int returnCode = ToolRunner.run(new ParseWeblogs(), args);
System.exit(returnCode);
}
}
++++++++++++++++++++++++++++++++++++++++
文件CLFMapper.java的源代码如下所示:
package com.packt.ch3.etl;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
public class CLFMapper extends Mapper<Object, Text, Text, Text> {
private SimpleDateFormat dateFormatter =
new SimpleDateFormat("dd/MMM/yyyy:HH:mm:ss Z");
private Pattern p = Pattern.compile("^([\\d.]+) (\\S+) (\\S+) \\[([\\w:/]+\\s[+\\-]\\d{4})\\] \"(\\w+) (.+?) (.+?)\" (\\d+) (\\d+) \"([^\"]+|(.+?))\" \"([^\"]+|(.+?))\"", Pattern.DOTALL);
private Text outputKey = new Text();
private Text outputValue = new Text();
@Override
protected void map(Object key, Text value, Context context) throws IOException, InterruptedException {
String entry = value.toString();
Matcher m = p.matcher(entry);
if (!m.matches()) {
return;
}
Date date = null;
try {
date = dateFormatter.parse(m.group(4));
} catch (ParseException ex) {
return;
}
outputKey.set(m.group(1)); //ip
StringBuilder b = new StringBuilder();
b.append(date.getTime()); //timestamp
b.append('\t');
b.append(m.group(6)); //page
b.append('\t');
b.append(m.group(8)); //http status
b.append('\t');
b.append(m.group(9)); //bytes
b.append('\t');
b.append(m.group(12)); //useragent
outputValue.set(b.toString());
context.write(outputKey, outputValue);
}
}
任何人都可以帮助我,我尝试了几种解决方案,但还不知道。非常感谢!
答案 0 :(得分:1)
你可以尝试使用这个Driver类吗? 我做了一些编辑。 让我们看看它是否有效。
var difference = (new Date(databaseDate).getTime()-Date.now())/1000;