Hadoop入门(四)HDFS编程
生活随笔
收集整理的這篇文章主要介紹了
Hadoop入门(四)HDFS编程
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
一、編程環境設置
編程使用到idea2018,maven
(1)啟動集群
在window上啟動vmware軟件的虛擬機hadoop01,hadoop02,hadoop03。
進入hadoop01虛擬機啟動集群,執行命令
start-dfs.sh(2)檢查開發環境網絡
在window的命令行cmd窗口ping? hadoop01的ip【192.168.150.128】,telnet? hadoop01的ip【192.168.150.128】 hdfs端口【9000】。
注意:window默認不啟動telnet服務的,需要在撫慰設置上先啟動telnet服務。
ping? 192.168.150.128 telnet?192.168.150.128 9000如果ip不能ping通,設置計算機網絡的VMnet01適配器,分配一個ip給window系統。如圖:
如果telnet不通端口,則hadoop01虛擬機的防火墻攔截了,關閉防火墻或者開啟特定端口不攔截。
?
二、項目編程
(1)pom.xml
<?xml version="1.0" encoding="UTF-8"?><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"><modelVersion>4.0.0</modelVersion><groupId>com.mk</groupId><artifactId>hadoop-test</artifactId><version>1.0-SNAPSHOT</version><name>hadoop-test</name><!-- FIXME change it to the project's website --><url>http://www.example.com</url><properties><project.build.sourceEncoding>UTF-8</project.build.sourceEncoding><maven.compiler.source>1.7</maven.compiler.source><maven.compiler.target>1.7</maven.compiler.target><project.build.sourceEncoding>UTF-8</project.build.sourceEncoding><hadoop.version>2.6.0</hadoop.version></properties><dependencies><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-client</artifactId><version>${hadoop.version}</version></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-common</artifactId><version>${hadoop.version}</version></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-hdfs</artifactId><version>${hadoop.version}</version></dependency><dependency><groupId>junit</groupId><artifactId>junit</artifactId><version>4.11</version><scope>test</scope></dependency></dependencies><build><pluginManagement><plugins><plugin><artifactId>maven-clean-plugin</artifactId><version>3.1.0</version></plugin><plugin><artifactId>maven-resources-plugin</artifactId><version>3.0.2</version></plugin><plugin><artifactId>maven-compiler-plugin</artifactId><version>3.8.0</version></plugin><plugin><artifactId>maven-surefire-plugin</artifactId><version>2.22.1</version></plugin><plugin><artifactId>maven-jar-plugin</artifactId><version>3.0.2</version></plugin><plugin><artifactId>maven-install-plugin</artifactId><version>2.5.2</version></plugin><plugin><artifactId>maven-deploy-plugin</artifactId><version>2.8.2</version></plugin><plugin><artifactId>maven-site-plugin</artifactId><version>3.7.1</version></plugin><plugin><artifactId>maven-project-info-reports-plugin</artifactId><version>3.0.0</version></plugin></plugins></pluginManagement></build> </project>(2)App.java
package com.mk;import com.mk.hdfs.DirectoryOp; import com.mk.hdfs.FileOp; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import java.net.URI;public class App {public static void main(String[] args) throws Exception {String uri = "hdfs://192.168.150.128:9000/";Configuration conf = new Configuration();FileSystem fileSystem = FileSystem.get(URI.create(uri), conf);DirectoryOp directoryOp = new DirectoryOp(conf, fileSystem);//directoryOp.list("/");// hdfs://192.168.150.128:9000/home//directoryOp.create("/mytest");//directoryOp.list("/");//hdfs://192.168.150.128:9000/home//hdfs://192.168.150.128:9000/mytest//directoryOp.rename("/mytest","/my");//directoryOp.list("/");//hdfs://192.168.150.128:9000/home//hdfs://192.168.150.128:9000/my//directoryOp.delete("/my");//directoryOp.list("/");//hdfs://192.168.150.128:9000/homeFileOp fileOp = new FileOp(conf, fileSystem);//fileOp.create("/a.txt");//directoryOp.list("/");//hdfs://192.168.150.128:9000/a.txt//hdfs://192.168.150.128:9000/home//fileOp.write("/a.txt","你好,泰山");//fileOp.read("/a.txt");//你好,泰山//fileOp.readTextLine("/a.txt");//你好,泰山//fileOp.rename("/a.txt", "b.txt");//directoryOp.list("/");//hdfs://192.168.150.128:9000/b.txt//hdfs://192.168.150.128:9000/home//fileOp.delete("/b.txt");//directoryOp.list("/");//hdfs://192.168.150.128:9000/home//fileOp.localToHdfs("pom.xml","/pom.xml");//directoryOp.list("/");//hdfs://192.168.150.128:9000/home//hdfs://192.168.150.128:9000/pom.xmlfileOp.hdfsToLocal("/pom.xml","/pom2.xml");directoryOp.list("/");//hdfs://192.168.150.128:9000/home//hdfs://192.168.150.128:9000/pom.xml} }(3)FileOp.java
package com.mk.hdfs;import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.Text; import org.apache.hadoop.util.LineReader;public class FileOp {private Configuration conf;private FileSystem fs;public FileOp(Configuration conf, FileSystem fs) {this.conf = conf;this.fs = fs;}public void create(String file) throws Exception {Path path = new Path(file);Path parent = path.getParent();fs.mkdirs(parent);fs.create(path).close();}public void delete(String file) throws Exception {Path path = new Path(file);fs.delete(path,true);}public void rename(String file, String name) throws Exception {Path path = new Path(file);Path parent = path.getParent();fs.rename(path,new Path(parent, name));}public void read(String file) throws Exception {Path path = new Path(file);FSDataInputStream inputStream = fs.open(path);byte[] data = new byte[inputStream.available()];IOUtils.readFully(inputStream, data, 0, data.length);IOUtils.closeStream(inputStream);System.out.println(new String(data, "utf-8"));}public void readTextLine(String file) throws Exception{Path path = new Path(file);FSDataInputStream inputStream = fs.open(path);Text line = new Text();LineReader liReader = new LineReader(inputStream);while (liReader.readLine(line) > 0) {System.out.println(line);}inputStream.close();}public void write(String file, String text) throws Exception {Path path = new Path(file);FSDataOutputStream outputStream = fs.create(path);outputStream.write(text.getBytes("utf-8"));outputStream.flush();IOUtils.closeStream(outputStream);}public void append(String file, String text) throws Exception {Path path = new Path(file);FSDataOutputStream outputStream = fs.append(path);outputStream.write(text.getBytes("utf-8"));outputStream.flush();IOUtils.closeStream(outputStream);}public void localToHdfs(String localFile, String hdfsFile) throws Exception {Path localPath = new Path(localFile);Path hdfsPath = new Path(hdfsFile);fs.copyFromLocalFile(false, true,localPath, hdfsPath);}public void hdfsToLocal(String hdfsFile, String localFile) throws Exception {Path localPath = new Path(localFile);Path hdfsPath = new Path(hdfsFile);fs.copyToLocalFile(false, hdfsPath, localPath, true);}}(4)DirectoryOp.java?
package com.mk.hdfs;import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*;public class DirectoryOp {private Configuration conf;private FileSystem fs;public DirectoryOp(Configuration conf, FileSystem fs) {this.conf = conf;this.fs = fs;}public void create(String dir) throws Exception {Path path = new Path(dir);fs.mkdirs(path);}public void delete(String dir) throws Exception {Path path = new Path(dir);fs.delete(path,true);}public void rename(String dir, String name) throws Exception {Path path = new Path(dir);Path parent = path.getParent();fs.rename(path,new Path(parent, name));}public void list(Path path) throws Exception {FileStatus[] list = fs.listStatus(path);for (FileStatus status:list){System.out.println(status.getPath());}}public void list(String p) throws Exception {FileStatus[] list = fs.listStatus(new Path(p));for (FileStatus status:list){System.out.println(status.getPath());}}}?
總結
以上是生活随笔為你收集整理的Hadoop入门(四)HDFS编程的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: 茄可以组什么词 茄组词大全21个
- 下一篇: Hadoop的FileSystem.co