1、先导入依赖:
pom.xml:
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.7.5</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.7.5</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.7.5</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
</dependency>
</dependencies>
2、建立连接 执行操作
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import java.net.URI;
public class HadoopTestClient {
public static void main(String[] args) throws Exception{
//连接hdfs 并用root用户进行操作
FileSystem fileSystem = FileSystem.get(new URI("hdfs://192.168.40.150:9000"), new Configuration(), "root");
//读取 / 路径下的所有文件
RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator = fileSystem.listFiles(new Path("/"), true);
while (locatedFileStatusRemoteIterator.hasNext()){
LocatedFileStatus next = locatedFileStatusRemoteIterator.next();
System.out.println(next.getPath().toString());
}
// 创建文件夹
fileSystem.mkdirs(new Path("/lgy_test"));
//上传文件
fileSystem.copyFromLocalFile(new Path("D:\\idle\\HadoopTest\\src\\main\\resources\\test.txt"), new Path("/lgy_test"));
//下载文件
fileSystem.copyToLocalFile(new Path("/lgy_test/test.txt"), new Path("D:\\\\idle\\\\HadoopTest\\\\src\\\\main\\\\resources"));
fileSystem.close();
}
}