HDFS的javaApi

1.安装jdk(1.8)
2.创建maven项目,配置pom.xml(创建对应的依赖)

<dependencies>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>3.2.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>3.2.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>3.2.1</version>
        </dependency>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.13</version>
        </dependency>
    </dependencies>

3.在项目的src/main/resources目录下,新建一个文件,命名为“log4j.properties”

# 控制台输出配置
log4j.appender.Console=org.apache.log4j.ConsoleAppender
log4j.appender.Console.layout=org.apache.log4j.PatternLayout
log4j.appender.Console.layout.ConversionPattern=%d [%t] %p [%c] - %m%n
# 指定日志的输出级别与输出端
log4j.rootLogger=info,Console

4.编写代码实现
4.1配置全局变量

    FileSystem fs=null;
    Configuration conf=null;
    URI uri=null;
    Logger log;

4.2建立连接

  @Before
    public void testInit() throws Exception{
        uri=new URI("hdfs://node-1:8020");
        conf=new Configuration();
        String user="xdd";
        log=LogManager.getLogger(hadooptest.class);
        fs=FileSystem.get(uri,conf,user);
        log.error("初始化成功!");
    }

4.3 运行后释放资源

@After
    public void testClose() throws Exception{
        if(fs!=null){
            log.error("释放资源结束!");
            fs.close();
        }
    }

4.3新建文件

 //新建文件
    @Test
    public void hdmkdir() throws Exception{
        boolean flag=fs.mkdirs(new Path("test1"));
        log.error("创建成功:"+flag);
    }

4.4删除文件

//删除文件
    @Test
    public void hddel() throws Exception{
        boolean flag=fs.delete(new Path("test1"),true);
        log.error("成功删除:"+flag);
    }

4.5上传文件

//上传资源
    @Test
    public void hdput() throws Exception{
        fs.copyFromLocalFile(new Path("D:/software/coffe.jpg"),new Path("/zxm1/put.jpg"));
        log.error("上传成功");
    }

4.6下载文件

  //下载资源
    @Test
    public void hddown() throws Exception{
        fs.copyToLocalFile(new Path("/zxm1/put.jpg"),new Path("D:/software/down.jpg"));
        log.error("下载成功!");
    }

4.7遍历判断是否为文件

 @Test
    public void hdindAll() throws Exception{
        FileStatus[] ls=fs.listStatus(new Path("/"));
        for (FileStatus fS: ls){
            if(fS.isFile()){
                System.out.println("文件:"+fS.getPath().getName());
            }else {
                System.out.println("目录:"+fS.getPath().getName());
            }
        }
    }

5.整体代码
5.1 hadooptest.java

package mytest;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.net.URI;

public class hadooptest {
    FileSystem fs=null;
    Configuration conf=null;
    URI uri=null;
    Logger log=null;
    @Before
    public void testInit() throws Exception{
        uri=new URI("hdfs://node-1:8020");
        conf=new Configuration();
        String user="xdd";
        log=LogManager.getLogger(hadooptest.class);
        fs=FileSystem.get(uri,conf,user);
        log.error("初始化成功!");
    }
    @After
    public void testClose() throws Exception{
        if(fs!=null){
            log.error("释放资源结束!");
            fs.close();
        }
    }
    //新建文件
    @Test
    public void hdmkdir() throws Exception{
        boolean flag=fs.mkdirs(new Path("test1"));
        log.error("创建成功:"+flag);
    }

    //删除文件
    @Test
    public void hddel() throws Exception{
        boolean flag=fs.delete(new Path("test1"),true);
        log.error("成功删除:"+flag);
    }

    //上传资源
    @Test
    public void hdput() throws Exception{
        fs.copyFromLocalFile(new Path("D:/software/coffe.jpg"),new Path("/zxm1/put.jpg"));
        log.error("上传成功");
    }

    //下载资源
    @Test
    public void hddown() throws Exception{
        fs.copyToLocalFile(new Path("/zxm1/put.jpg"),new Path("D:/software/down.jpg"));
        log.error("下载成功!");
    }

    //判断文件
    @Test
    public void hdindAll() throws Exception{
        FileStatus[] ls=fs.listStatus(new Path("/"));
        for (FileStatus fS: ls){
            if(fS.isFile()){
                System.out.println("文件:"+fS.getPath().getName());
            }else {
                System.out.println("目录:"+fS.getPath().getName());
            }
        }
    }

}

5.2  pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>org.example</groupId>
    <artifactId>hadoop</artifactId>
    <version>1.0-SNAPSHOT</version>

    <dependencies>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>3.2.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>3.2.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>3.2.1</version>
        </dependency>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.13</version>
        </dependency>
    </dependencies>

</project>

5.3 log4j.properties

# 控制台输出配置
log4j.appender.Console=org.apache.log4j.ConsoleAppender
log4j.appender.Console.layout=org.apache.log4j.PatternLayout
log4j.appender.Console.layout.ConversionPattern=%d [%t] %p [%c] - %m%n
# 指定日志的输出级别与输出端
log4j.rootLogger=info,Console
上一篇:HDFS常用操作


下一篇:补交02 03 04 05