我正在制作一个运行MapReduce作业的Web服务器,但是我似乎无法让服务器启动。当我尝试为项目运行jar时,我收到以下错误
java.io.IOException: Failed on local exception: com.google.protobuf.InvalidProtocolBufferException: Protocol message tag had invalid wire type.; Host Details : local host is: "hadoop4-rdc.ritis.org/10.1.1.33"; destination host is: "hadoop4-rdc.ritis.org":8888;
at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:764) ~[hadoop-common-2.3.0-cdh5.1.0.jar!/:na]
at org.apache.hadoop.ipc.Client.call(Client.java:1413) ~[hadoop-common-2.3.0-cdh5.1.0.jar!/:na]
at org.apache.hadoop.ipc.Client.call(Client.java:1362) ~[hadoop-common-2.3.0-cdh5.1.0.jar!/:na]
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:206) ~[hadoop-common-2.3.0-cdh5.1.0.jar!/:na]
at com.sun.proxy.$Proxy11.getFileInfo(Unknown Source) ~[na:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.7.0_45]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) ~[na:1.7.0_45]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.7.0_45]
at java.lang.reflect.Method.invoke(Method.java:606) ~[na:1.7.0_45]
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:186) ~[hadoop-common-2.3.0-cdh5.1.0.jar!/:na]
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102) ~[hadoop-common-2.3.0-cdh5.1.0.jar!/:na]
at com.sun.proxy.$Proxy11.getFileInfo(Unknown Source) ~[na:na]
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:701) ~[hadoop-hdfs-2.3.0-cdh5.1.0.jar!/:na]
at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1758) ~[hadoop-hdfs-2.3.0-cdh5.1.0.jar!/:na]
at org.apache.hadoop.hdfs.DistributedFileSystem$17.doCall(DistributedFileSystem.java:1124) ~[hadoop-hdfs-2.3.0-cdh5.1.0.jar!/:na]
at org.apache.hadoop.hdfs.DistributedFileSystem$17.doCall(DistributedFileSystem.java:1120) ~[hadoop-hdfs-2.3.0-cdh5.1.0.jar!/:na]
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-2.3.0-cdh5.1.0.jar!/:na]
at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1120) ~[hadoop-hdfs-2.3.0-cdh5.1.0.jar!/:na]
at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1398) ~[hadoop-common-2.3.0-cdh5.1.0.jar!/:na]
at org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.checkOutputSpecs(FileOutputFormat.java:145) ~[raptor.jar:na]
at org.apache.hadoop.mapreduce.JobSubmitter.checkSpecs(JobSubmitter.java:458) ~[raptor.jar:na]
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:343) ~[raptor.jar:na]
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1295) ~[raptor.jar:na]
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1292) ~[raptor.jar:na]
at java.security.AccessController.doPrivileged(Native Method) ~[na:1.7.0_45]
at javax.security.auth.Subject.doAs(Subject.java:415) ~[na:1.7.0_45]
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1554) ~[hadoop-common-2.3.0-cdh5.1.0.jar!/:na]
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1292) ~[raptor.jar:na]
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1313) ~[raptor.jar:na]
at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:199) ~[spring-data-hadoop-core-2.0.3.RELEASE.jar!/:2.0.3.RELEASE]
at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:50) [spring-core-4.1.0.RELEASE.jar!/:4.1.0.RELEASE]
at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:170) [spring-data-hadoop-core-2.0.3.RELEASE.jar!/:2.0.3.RELEASE]
at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:162) [spring-data-hadoop-core-2.0.3.RELEASE.jar!/:2.0.3.RELEASE]
at org.springframework.data.hadoop.mapreduce.JobRunner.call(JobRunner.java:52) [spring-data-hadoop-core-2.0.3.RELEASE.jar!/:2.0.3.RELEASE]
at org.springframework.data.hadoop.mapreduce.JobRunner.afterPropertiesSet(JobRunner.java:44) [spring-data-hadoop-core-2.0.3.RELEASE.jar!/:2.0.3.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1627) [spring-beans-4.1.0.RELEASE.jar!/:4.1.0.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1564) [spring-beans-4.1.0.RELEASE.jar!/:4.1.0.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:540) [spring-beans-4.1.0.RELEASE.jar!/:4.1.0.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:476) [spring-beans-4.1.0.RELEASE.jar!/:4.1.0.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:302) [spring-beans-4.1.0.RELEASE.jar!/:4.1.0.RELEASE]
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:229) [spring-beans-4.1.0.RELEASE.jar!/:4.1.0.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:298) [spring-beans-4.1.0.RELEASE.jar!/:4.1.0.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:193) [spring-beans-4.1.0.RELEASE.jar!/:4.1.0.RELEASE]
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:725) [spring-beans-4.1.0.RELEASE.jar!/:4.1.0.RELEASE]
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:762) [spring-context-4.0.7.RELEASE.jar!/:4.0.7.RELEASE]
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:482) [spring-context-4.0.7.RELEASE.jar!/:4.0.7.RELEASE]
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:197) [spring-context-4.0.7.RELEASE.jar!/:4.0.7.RELEASE]
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:172) [spring-context-4.0.7.RELEASE.jar!/:4.0.7.RELEASE]
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:158) [spring-context-4.0.7.RELEASE.jar!/:4.0.7.RELEASE]
at edu.umd.cattlab.hadoop.springboot.hello.HelloApplication.main(HelloApplication.java:25) [raptor.jar:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.7.0_45]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) ~[na:1.7.0_45]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.7.0_45]
at java.lang.reflect.Method.invoke(Method.java:606) ~[na:1.7.0_45]
at org.springframework.boot.loader.MainMethodRunner.run(MainMethodRunner.java:53) [raptor.jar:na]
at java.lang.Thread.run(Thread.java:744) [na:1.7.0_45]
此后有一堆更多的错误,但他们都说同样的话,“协议消息标记的线路类型无效。”
这是我的application-context.xml:
<?xml version="1.0" encoding="UTF-8"?>
<beans:beans xmlns="http://www.springframework.org/schema/hadoop"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:beans="http://www.springframework.org/schema/beans"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd">
<configuration>
fs.defaultFS=hdfs://hadoop4-rdc.ritis.org:8888
mapred.job.tracker=hadoop4-rdc.ritis.org:19888
</configuration>
<job
id="michigan_avro"
mapper="edu.umd.cattlab.hadoop.avro.TmcAvroMapper"
output-path="/user/cne1s0n/avro_files/test"
/>
<job-runner
id="runner"
job-ref="michigan_avro"
run-at-startup="true"
/>
</beans:beans>
我正在使用Hadoop 2.3.0 CDH5.1.0和Gradle 2.0构建我的项目,依赖关系如下:
compile 'org.clojure:clojure:1.2.0'
compile 'org.apache.storm:storm-core:0.9.1-incubating'
compile 'org.apache.hadoop:hadoop-common:2.3.0-cdh5.1.0' with no -SNAPSHOT
compile 'org.apache.hbase:hbase-server:0.98.1-cdh5.1.0'
compile 'org.apache.hbase:hbase-client:0.98.1-cdh5.1.0'
compile 'org.apache.hbase:hbase-common:0.96.1.1-cdh5.1.0'
compile 'org.apache.hadoop:hadoop-mapreduce-client-core:2.3.0-cdh5.1.0'
compile 'org.apache.hadoop:hadoop-yarn-common:2.3.0-cdh5.1.0'
compile 'org.apache.hadoop:hadoop-core:2.3.0-cdh5.1.0'
compile 'org.apache.hadoop:hadoop-hdfs:2.3.0-cdh5.1.0'
compile 'org.apache.hive:hive-hbase-handler:0.12.0-cdh5.1.0'
compile 'com.fasterxml.jackson.core:jackson-databind:2.3.3'
compile 'com.googlecode.json-simple:json-simple:1.1.1'
compile 'joda-time:joda-time:2.4'
compile 'org.apache.poi:poi:3.10.1'
compile 'org.apache.poi:poi-ooxml:3.10.1'
compile 'org.apache.xmlbeans:xmlbeans:2.6.0'
compile 'org.apache.poi:poi-ooxml-schemas:3.10.1'
compile 'dom4j:dom4j:1.6.1'
compile 'org.postgresql:postgresql:9.2-1003-jdbc4'
compile 'org.eclipse.jetty.aggregate:jetty-all-server:8.1.15.v20140411'
compile("org.springframework.boot:spring-boot-starter-web:1.1.8.RELEASE")
compile("org.springframework.boot:spring-boot-starter-jetty:1.1.8.RELEASE")
//providedRuntime("org.springframework.boot:spring-boot-starter-jetty:1.1.8.RELEASE")
compile 'org.apache.hadoop:hadoop-mapreduce-client-jobclient:2.3.0'
compile 'org.apache.hadoop:hadoop-mapreduce-client-common:2.3.0'
compile 'org.apache.hadoop:hadoop-mapreduce-client-shuffle:2.3.0'
compile 'org.springframework.data:spring-data-hadoop:2.0.3.RELEASE'
compile files('application-context.xml')
最后,这是我正在尝试运行的应用程序:
@EnableAutoConfiguration
@ComponentScan
@Configuration
public class HelloApplication {
public static void main(String[] args) {
AbstractApplicationContext context = new ClassPathXmlApplicationContext(
"../../../../../../application-context.xml", HelloApplication.class);
context.registerShutdownHook();
context.close();
System.out.println("Inspecting beans provided by Spring Boot:");
String[] beanNames = context.getBeanDefinitionNames();
Arrays.sort(beanNames);
for (String beanName : beanNames) {
System.out.println(beanName);
}
}
}
提前感谢,任何建议或帮助都会很棒。