无法通过Java连接到HDFS
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class App
{
public static void main( String[] args ) throws IOException
{
System.out.println( "Hello World!" );
System.out.println("---143---");
String localPath="/home/user1/Documents/hdfspract.txt";
String uri="hdfs://172.16.32.139:9000";
String hdfsDir="hdfs://172.16.32.139:9000/fifo_tbl";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
fs.copyFromLocalFile(new Path(localPath),new Path(hdfsDir));
}
}
当我尝试执行以上代码时,它给了我以下错误:
WARN util.NativeCodeLoader:无法为以下应用加载本机Hadoop库 您的平台...在适当的地方使用内建的Java类 线程“主要” org.apache.hadoop.fs.UnsupportedFileSystemException中的异常:没有用于的文件系统 方案“ hdfs” 在org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:3332) 在org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3352) 在org.apache.hadoop.fs.FileSystem.access $ 200(FileSystem.java:124) 在com.Jambo.App.main(App.java:21)
感谢您使用Java Api在Hadoop中上传文件的其他任何方式
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>3.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
<version>2.9.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-common</artifactId>
<version>2.9.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>2.9.0</version>
</dependency>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.8.0_161</version>
<scope>system</scope>
<systemPath>/usr/local/jdk1.8.0_161/lib/tools.jar</systemPath>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>1.2.1</version>
</dependency>