部署maven jar的例外情况

时间:2017-11-07 11:56:37

标签: maven hadoop pom.xml

我正在尝试使用我的java代码连接Hdfs。

package com.main.CopyToHDFS;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;


public class ConnectHdfs  {

    public static void main(String[] args) throws Exception{

        //String webHdfsUrl = "webhdfs://etc-lab1-edge01-10:8888/";
        String webHdfsUrl= "webhdfs://192.168.163.151:50070";
        String dir = "/tmp/mohit.txt";
        Configuration hdfsConfig = new Configuration();
        FileSystem fs = FileSystem.get(java.net.URI.create(webHdfsUrl), hdfsConfig);
        fs.createNewFile(new Path(dir));

    }
}

下面是我的pom文件:

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.main</groupId>
    <artifactId>CopyToHdfs</artifactId>
    <version>1.0-SNAPSHOT</version>
    <build>

     <plugins>
        <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-jar-plugin</artifactId>
            <configuration>
                <archive>
                    <manifest>
                        <addClasspath>true</addClasspath>
                        <mainClass>com.main.CopyToHDFS.ConnectHdfs</mainClass>
                    </manifest>
                </archive>
            </configuration>
        </plugin>
        <plugin>
            <artifactId>maven-assembly-plugin</artifactId>
            <configuration>
                <descriptorRefs>
                    <descriptorRef>jar-with-dependencies</descriptorRef>
                </descriptorRefs>
            </configuration>
        </plugin>
    </plugins>

    </build>
    <dependencies>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-core</artifactId>
            <version>0.20.2</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>2.6.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>2.6.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>2.6.0</version>
        </dependency>
<!--        <dependency>
            <groupId>jdk.tools</groupId>
            <artifactId>jdk.tools</artifactId>
            <version>1.7</version> 
        </dependency>-->
    </dependencies>

</project>

在部署jar时,我遇到异常:

Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/hadoop/conf/Configuration
        at com.main.CopyToHDFS.ConnectHdfs.main(ConnectHdfs.java:15)
Caused by: java.lang.ClassNotFoundException: 

我已添加并尝试了各种依赖项并交叉检查了java主路径。 我也尝试将tools.jar添加到maven结构的lib目录中。 删除整个m2回购也无效。  pom.xml文件中是否缺少任何内容?

0 个答案:

没有答案