HDFS操作JavaAPI

package hadoop.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;

/**
 * @Author: ZhangYaotong
 * @Date: 2019/4/2 22:13
 */
public class HdfsClientDemo {

    private FileSystem fileSystem;

    /** 创建配置文件对象,实例化hdfs客户端对象 */
    @Before
    public void init() throws Exception {
        Configuration configuration = new Configuration();
        fileSystem = FileSystem.get(new URI("hdfs://hadoop02:9000"), configuration, "root");
    }
    @After
    public void close() throws IOException {
        fileSystem.close();
        System.out.println("over!");
    }
    /** 1.文件上传 */
    @Test
    public void putFileToHDFS() throws Exception{
        Path src = new Path("e://testdata/hello.txt");
        Path dst = new Path("hdfs://hadoop02:9000/mybigdata/hdfs/hello.bak.txt");
        fileSystem.copyFromLocalFile(src, dst);
    }
    /** 2.文件下载 */
    @Test
    public void getFileFromHDFS() throws Exception{
        Path src = new Path("hdfs://hadoop02:9000/mybigdata/hdfs/hello.bak.txt");
        Path dst = new Path("e://testdata/hello1.txt");
        fileSystem.copyToLocalFile(src, dst);
    }
    /** 3.创建目录 */
    @Test
    public void mkdirHDFS() throws Exception{
        Path demoFile = new Path("hdfs://hadoop02:9000/mybigdata/hdfs/demofile");
        fileSystem.mkdirs(demoFile);
    }
    /** 4.删除目录 */
    @Test
    public void DeleteHDFS() throws Exception{
        Path demoFile = new Path("hdfs://hadoop02:9000/mybigdata/hdfs/demofile");
        fileSystem.delete(demoFile);
    }
    /** 5.更改目录名 */
    @Test
    public void renameHDFS() throws Exception{
        Path srcName = new Path("hdfs://hadoop02:9000/mybigdata/hdfs/hello.bak.txt");
        Path dstName = new Path("hdfs://hadoop02:9000/mybigdata/hdfs/hello.bak2.txt");
        fileSystem.rename(srcName, dstName);
    }
    /** 6.查看目录 */
    @Test
    public void findHDFS() throws Exception{
        //查询路径下的文件状态信息
        FileStatus[] listStatus = fileSystem.listStatus(new Path("/"));
        //遍历所有文件状态
        for (FileStatus status : listStatus){
            if (status.isFile()){
                System.out.println("f--" + status.getPath().getName());
            }else {
                System.out.println("d--" + status.getPath().getName());
            }
        }
    }
    /** 7.查看文件详情 */
    @Test
    public void readListFileHDFS() throws Exception{
        RemoteIterator listFiles = fileSystem.listFiles(new Path("/"), true);
        while (listFiles.hasNext()){
            LocatedFileStatus fileStatus = listFiles.next();
            System.out.println(fileStatus.getPath().getName());
            System.out.println(fileStatus.getBlockSize());
            System.out.println(fileStatus.getPermission());
            System.out.println(fileStatus.getLen());

            BlockLocation[] blockLocations = fileStatus.getBlockLocations();
            for (BlockLocation bl: blockLocations){
                System.out.println("block-offset:" + bl.getOffset());
                String[] hosts = bl.getHosts();
                for (String host : hosts){
                    System.out.println(host);
                }
            }
        }
    }
    /** 8.IO流 文件上传 */
    @Test
    public void IOPutFileToHDFS() throws Exception{
        String src = "e://testdata/hello.txt";
        Path dst = new Path("hdfs://hadoop02:9000/mybigdata/hdfs/hello.bak3.txt");
        //1.创建输入流
        FileInputStream inputStream = new FileInputStream(new File(src));
        //2.创建输出流
        FSDataOutputStream outputStream = fileSystem.create(dst);
        //3.流对接
        try{
            IOUtils.copyBytes(inputStream, outputStream,4096, false);
        }catch (Exception e){
            e.printStackTrace();
        }finally {
            IOUtils.closeStream(inputStream);
            IOUtils.closeStream(outputStream);
        }
    }
    /** 9.IO流 文件下载 */
    @Test
    public void IOGetFileFromHDFS() throws Exception{
        Path src = new Path("hdfs://hadoop02:9000/mybigdata/hdfs/hello.bak3.txt");
        //1.创建输入流
        FSDataInputStream inputStream = fileSystem.open(src);
        //2.流对接输出到控制台
        try{
            IOUtils.copyBytes(inputStream, System.out, 4096, false);
        }catch (Exception e){
            e.printStackTrace();
        }finally {
            IOUtils.closeStream(inputStream);
        }
    }
    /** 10.IO流 定位文件读取 */
    @Test
    public void readFileSeek1() throws Exception{
        Path src = new Path("hdfs://hadoop02:9000/mybigdata/hdfs/hello.bak3.txt");
        //1.打开输入流
        FSDataInputStream fis = fileSystem.open(src);
        //2.创建输出流
        FileOutputStream fos = new FileOutputStream("e://testdata/hello.txt.part1");
        //3.流对接
        byte[] buf = new byte[1024];
        for (int i = 0; i < 128*1024; i++){
            fis.read(buf);
            fos.write(buf);
        }
        //4.关闭流
        IOUtils.closeStream(fis);
        IOUtils.closeStream(fos);
    }
    @Test
    public void readFileSeek2() throws Exception{
        Path src = new Path("hdfs://hadoop02:9000/mybigdata/hdfs/hello.bak3.txt");
        //1.打开输入流
        FSDataInputStream fis = fileSystem.open(src);
        //2.创建输出流
        FileOutputStream fos = new FileOutputStream("e://testdata/hello.txt.part2");
        //3.定位偏移量
        fis.seek(1024*1024*128);
        //4.流对接
        IOUtils.copyBytes(fis, fos, 1024);
        //5.关闭流
        IOUtils.closeStream(fis);
        IOUtils.closeStream(fos);

        //2部分执行完毕后,在window命令窗口执行:type hello.txt.part2 >> hello.txt.part1
    }
}

你可能感兴趣的:(Hadoop)