【Hadoop】HDFS客户端开发示例
生活随笔
收集整理的這篇文章主要介紹了
【Hadoop】HDFS客户端开发示例
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
1、原理、步驟
?
2、HDFS客戶端示例代碼
package com.ares.hadoop.hdfs;import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.net.URISyntaxException;import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.log4j.Logger; import org.junit.After; import org.junit.Before; import org.junit.Test;public class HDFSTest {private static final Logger LOGGER = Logger.getLogger(HDFSTest.class);private FileSystem fs = null;@Beforepublic void setUp() throws IOException, URISyntaxException, InterruptedException {Configuration conf = new Configuration(); // conf.set("fs.defaultFS", "hdfs://HADOOP-NODE1:9000"); // URI uri = new URI("hdfs://HADOOP-NODE1:9000"); // this.fs = FileSystem.get(uri, conf, "HADOOP");this.fs = FileSystem.get(conf);}@Afterpublic void tearDown() throws IOException {// TODO Auto-generated method stubthis.fs.close();}@Testpublic void testGET() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: GET FILE TEST");Path path = new Path("hdfs://HADOOP-NODE1:9000/jdk-7u60-linux-x64.tar.gz");FSDataInputStream fsDataInputStream = this.fs.open(path);FileOutputStream fileOutputStream = new FileOutputStream("./testdata/get-test-jdk.tar.gz");IOUtils.copy(fsDataInputStream, fileOutputStream);}@Testpublic void testPUT() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: PUT FILE TEST");Path path = new Path("hdfs://HADOOP-NODE1:9000/put-test-jdk.tar.gz");FSDataOutputStream fsDataOutputStream = this.fs.create(path);FileInputStream fileInputStream = new FileInputStream("./testdata/test-jdk.tar.gz");IOUtils.copy(fileInputStream, fsDataOutputStream);}@Testpublic void testGET_NEW() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: GET_NEW FILE TEST");Path src = new Path("hdfs://HADOOP-NODE1:9000/jdk-7u60-linux-x64.tar.gz");Path dst = new Path("./testdata/get-test-new-jdk.tar.gz");this.fs.copyToLocalFile(src, dst);}@Testpublic void testPUT_NEW() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: PUT_NEW FILE TEST");Path src = new Path("./testdata/test-jdk.tar.gz");Path dst = new Path("hdfs://HADOOP-NODE1:9000/put-test-new-jdk.tar.gz");this.fs.copyFromLocalFile(src , dst);}@Testpublic void testMKDIR() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: MKDIR TEST");Path f = new Path("/mkdir-test/testa/testb");this.fs.mkdirs(f);}@Testpublic void testRM() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: RM TEST");Path f = new Path("/mkdir-test");this.fs.delete(f, true);}@Testpublic void testLIST() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: LIST TEST");Path f = new Path("/");//LIST FILESRemoteIterator<LocatedFileStatus> files = this.fs.listFiles(f, true);while (files.hasNext()) {LocatedFileStatus file = files.next();LOGGER.debug(file.getPath());LOGGER.debug(file.getPath().getName());}//LIST DIRSFileStatus[] files2 = this.fs.listStatus(f); // for (int i = 0; i < files2.length; i++) { // LOGGER.debug(files2[i].getPath().getName()); // }for (FileStatus fileStatus : files2) {LOGGER.debug(fileStatus.getPath().getName());LOGGER.debug(fileStatus.isDirectory());}} }?
總結
以上是生活随笔為你收集整理的【Hadoop】HDFS客户端开发示例的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: 测试Hadoop2.7.1
- 下一篇: eclipse怎样在线安装hiberna