服务器IPC版本9无法与客户端版本4通信

时间:2016-03-09 23:45:26

标签: java maven hdfs

我正在尝试写入hdfs,我确信我的pom.xml中有相同版本的hdfs客户端,其他一些依赖项导致了这个问题。以下是详细信息。我正在使用CDH 5.5.1 hadoop 2.6.0

如果我只是指定

conf.set("fs.defaultFS",
                "localhost:8020");

它抛出

Exception in thread "main" java.lang.IllegalArgumentException: Wrong FS: hdfs://localhost:8020/user/hdfs, expected: file:///

如果我指定

conf.set("fs.default.name",
         "localhost:8020");

它抛出

org.apache.hadoop.ipc.RemoteException: Server IPC version 9 cannot communicate with client version 4
    at org.apache.hadoop.ipc.Client.call(Client.java:1066)
    at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:225)
    at com.sun.proxy.$Proxy3.getProtocolVersion(Unknown Source)
    at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:396)
    at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:379)
    at org.apache.hadoop.hdfs.DFSClient.createRPCNamenode(DFSClient.java:118)
    at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:222)
    at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:187)
    at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:89)
    at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:1328)
    at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:65)
    at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:1346)
    at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:244)
    at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:122)

maven依赖树

 +- junit:junit:jar:4.12:compile
[INFO] |  \- org.hamcrest:hamcrest-core:jar:1.3:compile
[INFO] +- com.googlecode.protobuf-java-format:protobuf-java-format:jar:1.4:compile
[INFO] +- org.apache.logging.log4j:log4j-core:jar:2.3:compile
[INFO] |  \- org.apache.logging.log4j:log4j-api:jar:2.3:compile
[INFO] +- org.apache.avro:avro-tools:jar:1.7.7:compile
[INFO] |  \- org.slf4j:slf4j-api:jar:1.6.4:compile
[INFO] +- org.apache.spark:spark-core_2.11:jar:1.5.0:compile
[INFO] |  +- org.apache.avro:avro-mapred:jar:hadoop2:1.7.7:compile
[INFO] |  |  +- org.apache.avro:avro-ipc:jar:1.7.7:compile
[INFO] |  |  +- org.apache.avro:avro-ipc:jar:tests:1.7.7:compile
[INFO] |  |  +- org.codehaus.jackson:jackson-core-asl:jar:1.9.13:compile
[INFO] |  |  \- org.codehaus.jackson:jackson-mapper-asl:jar:1.9.13:compile
[INFO] |  +- com.twitter:chill_2.11:jar:0.5.0:compile
[INFO] |  |  \- com.esotericsoftware.kryo:kryo:jar:2.21:compile
[INFO] |  |     +- com.esotericsoftware.reflectasm:reflectasm:jar:shaded:1.07:compile
[INFO] |  |     +- com.esotericsoftware.minlog:minlog:jar:1.2:compile
[INFO] |  |     \- org.objenesis:objenesis:jar:1.2:compile
[INFO] |  +- com.twitter:chill-java:jar:0.5.0:compile
[INFO] |  +- org.apache.spark:spark-launcher_2.11:jar:1.5.0:compile
[INFO] |  +- org.apache.spark:spark-network-common_2.11:jar:1.5.0:compile
[INFO] |  +- org.apache.spark:spark-network-shuffle_2.11:jar:1.5.0:compile
[INFO] |  +- org.apache.spark:spark-unsafe_2.11:jar:1.5.0:compile
[INFO] |  +- net.java.dev.jets3t:jets3t:jar:0.7.1:compile
[INFO] |  |  +- commons-codec:commons-codec:jar:1.3:compile
[INFO] |  |  \- commons-httpclient:commons-httpclient:jar:3.1:compile
[INFO] |  +- org.apache.curator:curator-recipes:jar:2.4.0:compile
[INFO] |  |  +- org.apache.curator:curator-framework:jar:2.4.0:compile
[INFO] |  |  \- com.google.guava:guava:jar:14.0.1:compile
[INFO] |  +- org.eclipse.jetty.orbit:javax.servlet:jar:3.0.0.v201112011016:compile
[INFO] |  +- org.apache.commons:commons-lang3:jar:3.3.2:compile
[INFO] |  +- org.apache.commons:commons-math3:jar:3.4.1:compile
[INFO] |  +- com.google.code.findbugs:jsr305:jar:1.3.9:compile
[INFO] |  +- org.slf4j:jul-to-slf4j:jar:1.7.10:compile
[INFO] |  +- org.slf4j:jcl-over-slf4j:jar:1.7.10:compile
[INFO] |  +- log4j:log4j:jar:1.2.17:compile
[INFO] |  +- org.slf4j:slf4j-log4j12:jar:1.7.10:compile
[INFO] |  +- com.ning:compress-lzf:jar:1.0.3:compile
[INFO] |  +- org.xerial.snappy:snappy-java:jar:1.1.1.7:compile
[INFO] |  +- net.jpountz.lz4:lz4:jar:1.3.0:compile
[INFO] |  +- org.roaringbitmap:RoaringBitmap:jar:0.4.5:compile
[INFO] |  +- com.typesafe.akka:akka-remote_2.11:jar:2.3.11:compile
[INFO] |  |  +- com.typesafe.akka:akka-actor_2.11:jar:2.3.11:compile
[INFO] |  |  |  \- com.typesafe:config:jar:1.2.1:compile
[INFO] |  |  +- io.netty:netty:jar:3.8.0.Final:compile
[INFO] |  |  \- org.uncommons.maths:uncommons-maths:jar:1.2.2a:compile
[INFO] |  +- com.typesafe.akka:akka-slf4j_2.11:jar:2.3.11:compile
[INFO] |  +- org.scala-lang:scala-library:jar:2.11.7:compile
[INFO] |  +- org.json4s:json4s-jackson_2.11:jar:3.2.10:compile
[INFO] |  |  \- org.json4s:json4s-core_2.11:jar:3.2.10:compile
[INFO] |  |     +- org.json4s:json4s-ast_2.11:jar:3.2.10:compile
[INFO] |  |     \- org.scala-lang:scalap:jar:2.11.0:compile
[INFO] |  |        \- org.scala-lang:scala-compiler:jar:2.11.0:compile
[INFO] |  |           \- org.scala-lang.modules:scala-xml_2.11:jar:1.0.1:compile
[INFO] |  +- com.sun.jersey:jersey-server:jar:1.9:compile
[INFO] |  |  \- asm:asm:jar:3.1:compile
[INFO] |  +- com.sun.jersey:jersey-core:jar:1.9:compile
[INFO] |  +- org.apache.mesos:mesos:jar:shaded-protobuf:0.21.1:compile
[INFO] |  +- io.netty:netty-all:jar:4.0.29.Final:compile
[INFO] |  +- com.clearspring.analytics:stream:jar:2.7.0:compile
[INFO] |  +- io.dropwizard.metrics:metrics-core:jar:3.1.2:compile
[INFO] |  +- io.dropwizard.metrics:metrics-jvm:jar:3.1.2:compile
[INFO] |  +- io.dropwizard.metrics:metrics-json:jar:3.1.2:compile
[INFO] |  +- io.dropwizard.metrics:metrics-graphite:jar:3.1.2:compile
[INFO] |  +- com.fasterxml.jackson.core:jackson-databind:jar:2.4.4:compile
[INFO] |  |  +- com.fasterxml.jackson.core:jackson-annotations:jar:2.4.0:compile
[INFO] |  |  \- com.fasterxml.jackson.core:jackson-core:jar:2.4.4:compile
[INFO] |  +- com.fasterxml.jackson.module:jackson-module-scala_2.11:jar:2.4.4:compile
[INFO] |  |  +- org.scala-lang:scala-reflect:jar:2.11.2:compile
[INFO] |  |  \- com.thoughtworks.paranamer:paranamer:jar:2.6:compile
[INFO] |  +- org.apache.ivy:ivy:jar:2.4.0:compile
[INFO] |  +- oro:oro:jar:2.0.8:compile
[INFO] |  +- org.tachyonproject:tachyon-client:jar:0.7.1:compile
[INFO] |  |  +- commons-lang:commons-lang:jar:2.4:compile
[INFO] |  |  +- org.apache.curator:curator-client:jar:2.1.0-incubating:compile
[INFO] |  |  +- org.tachyonproject:tachyon-underfs-hdfs:jar:0.7.1:compile
[INFO] |  |  \- org.tachyonproject:tachyon-underfs-local:jar:0.7.1:compile
[INFO] |  +- net.razorvine:pyrolite:jar:4.4:compile
[INFO] |  +- net.sf.py4j:py4j:jar:0.8.2.1:compile
[INFO] |  \- org.spark-project.spark:unused:jar:1.0.0:compile
[INFO] +- org.apache.spark:spark-sql_2.11:jar:1.5.0:provided
[INFO] |  +- org.apache.spark:spark-catalyst_2.11:jar:1.5.0:provided
[INFO] |  |  \- org.codehaus.janino:janino:jar:2.7.8:provided
[INFO] |  |     \- org.codehaus.janino:commons-compiler:jar:2.7.8:provided
[INFO] |  +- org.apache.parquet:parquet-column:jar:1.7.0:provided
[INFO] |  |  +- org.apache.parquet:parquet-common:jar:1.7.0:provided
[INFO] |  |  \- org.apache.parquet:parquet-encoding:jar:1.7.0:provided
[INFO] |  |     \- org.apache.parquet:parquet-generator:jar:1.7.0:provided
[INFO] |  \- org.apache.parquet:parquet-hadoop:jar:1.7.0:provided
[INFO] |     +- org.apache.parquet:parquet-format:jar:2.3.0-incubating:provided
[INFO] |     \- org.apache.parquet:parquet-jackson:jar:1.7.0:provided
[INFO] +- org.bouncycastle:bcprov-jdk15on:jar:1.52:compile
[INFO] +- com.google.protobuf:protobuf-java:jar:3.0.0-beta-2:compile
[INFO] +- com.databricks:spark-avro_2.11:jar:2.0.1:compile
[INFO] |  \- org.apache.avro:avro:jar:1.7.6:compile
[INFO] |     \- org.apache.commons:commons-compress:jar:1.4.1:compile
[INFO] |        \- org.tukaani:xz:jar:1.0:compile
[INFO] +- org.apache.hadoop:hadoop-client:jar:2.6.0-cdh5.5.1:compile
[INFO] |  +- org.apache.hadoop:hadoop-common:jar:2.6.0-cdh5.5.1:compile
[INFO] |  |  +- xmlenc:xmlenc:jar:0.52:compile
[INFO] |  |  +- commons-net:commons-net:jar:3.1:compile
[INFO] |  |  +- commons-collections:commons-collections:jar:3.2.2:compile
[INFO] |  |  +- commons-logging:commons-logging:jar:1.1.3:compile
[INFO] |  |  +- commons-configuration:commons-configuration:jar:1.6:compile
[INFO] |  |  |  +- commons-digester:commons-digester:jar:1.8:compile
[INFO] |  |  |  |  \- commons-beanutils:commons-beanutils:jar:1.7.0:compile
[INFO] |  |  |  \- commons-beanutils:commons-beanutils-core:jar:1.8.0:compile
[INFO] |  |  +- org.apache.hadoop:hadoop-auth:jar:2.6.0-cdh5.5.1:compile
[INFO] |  |  |  +- org.apache.httpcomponents:httpclient:jar:4.2.5:compile
[INFO] |  |  |  |  \- org.apache.httpcomponents:httpcore:jar:4.2.4:compile
[INFO] |  |  |  \- org.apache.directory.server:apacheds-kerberos-codec:jar:2.0.0-M15:compile
[INFO] |  |  |     +- org.apache.directory.server:apacheds-i18n:jar:2.0.0-M15:compile
[INFO] |  |  |     +- org.apache.directory.api:api-asn1-api:jar:1.0.0-M20:compile
[INFO] |  |  |     \- org.apache.directory.api:api-util:jar:1.0.0-M20:compile
[INFO] |  |  \- org.apache.htrace:htrace-core4:jar:4.0.1-incubating:compile
[INFO] |  +- org.apache.hadoop:hadoop-hdfs:jar:2.6.0-cdh5.5.1:compile
[INFO] |  |  +- org.mortbay.jetty:jetty-util:jar:6.1.26.cloudera.4:compile
[INFO] |  |  +- xerces:xercesImpl:jar:2.9.1:compile
[INFO] |  |  |  \- xml-apis:xml-apis:jar:1.3.04:compile
[INFO] |  |  \- org.fusesource.leveldbjni:leveldbjni-all:jar:1.8:compile
[INFO] |  +- org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.6.0-cdh5.5.1:compile
[INFO] |  |  +- org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.6.0-cdh5.5.1:compile
[INFO] |  |  |  +- org.apache.hadoop:hadoop-yarn-client:jar:2.6.0-cdh5.5.1:compile
[INFO] |  |  |  \- org.apache.hadoop:hadoop-yarn-server-common:jar:2.6.0-cdh5.5.1:compile
[INFO] |  |  \- org.apache.hadoop:hadoop-mapreduce-client-shuffle:jar:2.6.0-cdh5.5.1:compile
[INFO] |  +- org.apache.hadoop:hadoop-yarn-api:jar:2.6.0-cdh5.5.1:compile
[INFO] |  +- org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.6.0-cdh5.5.1:compile
[INFO] |  |  \- org.apache.hadoop:hadoop-yarn-common:jar:2.6.0-cdh5.5.1:compile
[INFO] |  |     +- javax.servlet:servlet-api:jar:2.5:compile
[INFO] |  |     +- com.sun.jersey:jersey-client:jar:1.9:compile
[INFO] |  |     +- org.codehaus.jackson:jackson-jaxrs:jar:1.8.8:compile
[INFO] |  |     \- org.codehaus.jackson:jackson-xc:jar:1.8.8:compile
[INFO] |  +- org.apache.hadoop:hadoop-mapreduce-client-jobclient:jar:2.6.0-cdh5.5.1:compile
[INFO] |  +- org.apache.hadoop:hadoop-aws:jar:2.6.0-cdh5.5.1:compile
[INFO] |  |  \- com.amazonaws:aws-java-sdk:jar:1.7.4:compile
[INFO] |  \- org.apache.hadoop:hadoop-annotations:jar:2.6.0-cdh5.5.1:compile
[INFO] +- org.apache.kafka:kafka_2.11:jar:0.9.0-kafka-2.0.0:compile
[INFO] |  +- com.101tec:zkclient:jar:0.7:compile
[INFO] |  +- com.yammer.metrics:metrics-core:jar:2.2.0:compile
[INFO] |  +- net.sf.jopt-simple:jopt-simple:jar:4.9:compile
[INFO] |  +- org.scala-lang.modules:scala-parser-combinators_2.11:jar:1.0.4:compile
[INFO] |  \- org.apache.zookeeper:zookeeper:jar:3.4.6:compile
[INFO] |     \- jline:jline:jar:0.9.94:compile
[INFO] +- org.apache.kafka:kafka-clients:jar:0.9.0-kafka-2.0.0:compile
[INFO] +- commons-io:commons-io:jar:2.4:compile
[INFO] +- javax.xml.bind:jaxb-api:jar:2.2.11:compile
[INFO] +- org.jsoup:jsoup:jar:1.8.3:compile
[INFO] +- jdk.tools:jdk.tools:jar:1.7:compile
[INFO] +- commons-cli:commons-cli:jar:1.3.1:compile
[INFO] \- com.google.code.gson:gson:jar:2.5:compile

我该如何解决这个问题。

2 个答案:

答案 0 :(得分:0)

检查您的依赖关系和版本是否正确:2.6.0

<dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-client</artifactId>
    <version>2.6.0</version>
</dependency>

<dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-hdfs</artifactId>
    <version>2.6.0</version>
</dependency>

这应该可以解决您的错误。您还可以在依赖项中排除hadoop-core。有关上述解决方案的更多信息,请参阅this blog

答案 1 :(得分:0)

是的,因为某些依赖项层次结构中的旧的 hadoop-core.jar 导致了此问题。经过一段时间的研究,我发现了一些有用的东西:

请在您的 pom.xml 中使用以下依赖项。

<project xmlns="http://maven.apache.org/POM/4.0.0"
    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <groupId>WordCount</groupId>
    <artifactId>WordCount</artifactId>
    <version>0.0.1-SNAPSHOT</version>

    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    </properties>

    <dependencies>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>3.2.0</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>3.2.0</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-yarn-common</artifactId>
            <version>3.2.0</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-mapreduce-client-common</artifactId>
            <version>3.2.0</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-mapreduce-client-core</artifactId>
            <version>3.2.0</version>
        </dependency>
    </dependencies>


    <build>
        <sourceDirectory>src</sourceDirectory>
        <plugins>
            <plugin>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>3.8.0</version>
                <configuration>
                    <source>1.8</source>
                    <target>1.8</target>
                </configuration>
            </plugin>
        </plugins>
    </build>
</project>

示例效果良好的Java代码: AccessHDFS.java

package com.gpl.hadoop;

import java.io.InputStream;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class AccessHDFS {
    public static void main(String args[]) {

        String url = "hdfs://localhost:9000/user/hduser/input/test.txt";

        FileSystem fs = null;
        InputStream in = null;
        try {
            Configuration conf = new Configuration();
            fs = FileSystem.get(URI.create(url), conf);

            in = fs.open(new Path(url));

            IOUtils.copyBytes(in, System.out, 4096, false);

        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            IOUtils.closeStream(fs);
        }
    }
}

或者,您可以添加具有给定依赖性的以下存储库。

<repositories> 
    <repository> <id>mapr-releases</id>
        <url>http://repository.mapr.com/maven/</url>
        <snapshots><enabled>false</enabled></snapshots>
        <releases><enabled>true</enabled></releases>
    </repository>
</repositories>

<dependencies>
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-core</artifactId>
        <version>2.4.1-mapr-1408</version>
    </dependency>
</dependencies>

确保您选择了Force Update of Snapshots/Releases选项,以从以下位置更新Maven项目:

项目-> Maven -> 更新项目->选择强制更新快照/发布 >-> 确定