java操作hdfs

1/17/2024 hdfs

resource目录
具备hdfs-site.xml

实现代码

更多操作hdfs的例子看这里 (opens new window) 更多操作hdfs的例子看这里 (opens new window) 更多操作hdfs的例子看这里 (opens new window) 更多操作hdfs的例子看这里 (opens new window) 更多操作hdfs的例子看这里 (opens new window)

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class HDFSExample {
    public static void main(String[] args) {
        System.setProperty("hadoop.home.dir", "C:/dev/winutils/hadoop-2.8.2");
        System.setProperty("HADOOP_USER_NAME", "hdfs");
        try {
            // 创建配置对象
            Configuration conf = new Configuration();
            conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
            conf.set("fs.defaultFS", "hdfs://nnha");

            // 创建HDFS文件系统对象
            FileSystem fs = FileSystem.get(conf);

            // 创建文件夹
            Path folderPath = new Path("/user/apophis/test/hdfs_opt");
            if (!fs.exists(folderPath)) {
                fs.mkdirs(folderPath);
                System.out.println("文件夹创建成功!");
            } else {
                System.out.println("文件夹已经存在!");
            }

            // 上传文件
            Path localFilePath = new Path("E:/IdeaProjects/person/example-demo/src/main/resources/localfile.txt");
            Path hdfsFilePath = new Path("/user/apophis/test/hdfs_opt/localfile.txt");
            fs.copyFromLocalFile(localFilePath, hdfsFilePath);
            System.out.println("文件上传成功!");

            // 下载文件
            Path downloadFilePath = new Path("downloadedfile.txt");
            fs.copyToLocalFile(hdfsFilePath, downloadFilePath);
            System.out.println("文件下载成功!");

            // 关闭文件系统对象
            fs.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44

pom.xml


<dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-common</artifactId>
    <version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
</dependency>
1
2
3
4
5
6
7
8
9
10
11

winutils

  1. 本地运行操作hadoop相关 (opens new window)
  2. 阿里云盘:软件包/windows系统/开发常用软件/winutils/
Last Updated: 6/1/2024, 6:36:28 AM