HDFS java API

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;

public class HDFSDemo {
	private FileSystem fs = null;
	@Before
	public void init() throws IOException, URISyntaxException, InterruptedException{
		fs = FileSystem.get(new URI("hdfs://192.168.2.129:9000"), new Configuration(),"root");
	}
	@Test
	public void  testDel() throws IllegalArgumentException, IOException{
		boolean flag = fs.delete(new Path("/words.txt"), true);
		System.out.println(flag);
	}
	@Test
	public void testMkdir() throws IllegalArgumentException, IOException{
		boolean flag = fs.mkdirs(new Path("/itcast88888888"));
		System.out.println(flag);
	}
	@Test
	public void testUpload() throws IllegalArgumentException, IOException{
		FSDataOutputStream out = fs.create(new Path("/itcast88888888/kaf"));
		FileInputStream in = new FileInputStream(new File("/home/hadoop/Desktop/kafka_2.10-0.8.1.1.tgz"));
		IOUtils.copyBytes(in, out, 2048, true);
	}
	public static void main(String[] args) throws IOException, URISyntaxException {
		FileSystem fs = FileSystem.get(new URI("hdfs://itcast01:9000"), new Configuration());
		InputStream in = fs.open(new Path("/jdk.avi"));
		FileOutputStream out = new FileOutputStream(new File("c:/jdk123456"));
		IOUtils.copyBytes(in, out, 2048, true);
	}
}

你可能感兴趣的:(大数据)