一、编程环境设置
编程使用到idea2018,maven
(1)启动集群
在window上启动vmware软件的虚拟机hadoop01,hadoop02,hadoop03。
进入hadoop01虚拟机启动集群,执行命令
start-dfs.sh
(2)检查开发环境网络
在window的命令行cmd窗口ping hadoop01的ip【192.168.150.128】,telnet hadoop01的ip【192.168.150.128】 hdfs端口【9000】。
注意:window默认不启动telnet服务的,需要在抚慰设置上先启动telnet服务。
ping 192.168.150.128
telnet 192.168.150.128 9000
如果ip不能ping通,设置计算机网络的VMnet01适配器,分配一个ip给window系统。如图:
如果telnet不通端口,则hadoop01虚拟机的防火墙拦截了,关闭防火墙或者开启特定端口不拦截。
二、项目编程
(1)pom.xml
<?xml version="1.0" encoding="UTF-8"?><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"><modelVersion>4.0.0</modelVersion><groupId>com.mk</groupId><artifactId>hadoop-test</artifactId><version>1.0-SNAPSHOT</version><name>hadoop-test</name><!-- FIXME change it to the project's website --><url>http://www.example.com</url><properties><project.build.sourceEncoding>UTF-8</project.build.sourceEncoding><maven.compiler.source>1.7</maven.compiler.source><maven.compiler.target>1.7</maven.compiler.target><project.build.sourceEncoding>UTF-8</project.build.sourceEncoding><hadoop.version>2.6.0</hadoop.version></properties><dependencies><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-client</artifactId><version>${hadoop.version}</version></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-common</artifactId><version>${hadoop.version}</version></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-hdfs</artifactId><version>${hadoop.version}</version></dependency><dependency><groupId>junit</groupId><artifactId>junit</artifactId><version>4.11</version><scope>test</scope></dependency></dependencies><build><pluginManagement><plugins><plugin><artifactId>maven-clean-plugin</artifactId><version>3.1.0</version></plugin><plugin><artifactId>maven-resources-plugin</artifactId><version>3.0.2</version></plugin><plugin><artifactId>maven-compiler-plugin</artifactId><version>3.8.0</version></plugin><plugin><artifactId>maven-surefire-plugin</artifactId><version>2.22.1</version></plugin><plugin><artifactId>maven-jar-plugin</artifactId><version>3.0.2</version></plugin><plugin><artifactId>maven-install-plugin</artifactId><version>2.5.2</version></plugin><plugin><artifactId>maven-deploy-plugin</artifactId><version>2.8.2</version></plugin><plugin><artifactId>maven-site-plugin</artifactId><version>3.7.1</version></plugin><plugin><artifactId>maven-project-info-reports-plugin</artifactId><version>3.0.0</version></plugin></plugins></pluginManagement></build>
</project>
(2)App.java
package com.mk;import com.mk.hdfs.DirectoryOp;
import com.mk.hdfs.FileOp;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.net.URI;public class App {public static void main(String[] args) throws Exception {String uri = "hdfs://192.168.150.128:9000/";Configuration conf = new Configuration();FileSystem fileSystem = FileSystem.get(URI.create(uri), conf);DirectoryOp directoryOp = new DirectoryOp(conf, fileSystem);//directoryOp.list("/");// hdfs://192.168.150.128:9000/home//directoryOp.create("/mytest");//directoryOp.list("/");//hdfs://192.168.150.128:9000/home//hdfs://192.168.150.128:9000/mytest//directoryOp.rename("/mytest","/my");//directoryOp.list("/");//hdfs://192.168.150.128:9000/home//hdfs://192.168.150.128:9000/my//directoryOp.delete("/my");//directoryOp.list("/");//hdfs://192.168.150.128:9000/homeFileOp fileOp = new FileOp(conf, fileSystem);//fileOp.create("/a.txt");//directoryOp.list("/");//hdfs://192.168.150.128:9000/a.txt//hdfs://192.168.150.128:9000/home//fileOp.write("/a.txt","你好,泰山");//fileOp.read("/a.txt");//你好,泰山//fileOp.readTextLine("/a.txt");//你好,泰山//fileOp.rename("/a.txt", "b.txt");//directoryOp.list("/");//hdfs://192.168.150.128:9000/b.txt//hdfs://192.168.150.128:9000/home//fileOp.delete("/b.txt");//directoryOp.list("/");//hdfs://192.168.150.128:9000/home//fileOp.localToHdfs("pom.xml","/pom.xml");//directoryOp.list("/");//hdfs://192.168.150.128:9000/home//hdfs://192.168.150.128:9000/pom.xmlfileOp.hdfsToLocal("/pom.xml","/pom2.xml");directoryOp.list("/");//hdfs://192.168.150.128:9000/home//hdfs://192.168.150.128:9000/pom.xml}
}
(3)FileOp.java
package com.mk.hdfs;import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.LineReader;public class FileOp {private Configuration conf;private FileSystem fs;public FileOp(Configuration conf, FileSystem fs) {this.conf = conf;this.fs = fs;}public void create(String file) throws Exception {Path path = new Path(file);Path parent = path.getParent();fs.mkdirs(parent);fs.create(path).close();}public void delete(String file) throws Exception {Path path = new Path(file);fs.delete(path,true);}public void rename(String file, String name) throws Exception {Path path = new Path(file);Path parent = path.getParent();fs.rename(path,new Path(parent, name));}public void read(String file) throws Exception {Path path = new Path(file);FSDataInputStream inputStream = fs.open(path);byte[] data = new byte[inputStream.available()];IOUtils.readFully(inputStream, data, 0, data.length);IOUtils.closeStream(inputStream);System.out.println(new String(data, "utf-8"));}public void readTextLine(String file) throws Exception{Path path = new Path(file);FSDataInputStream inputStream = fs.open(path);Text line = new Text();LineReader liReader = new LineReader(inputStream);while (liReader.readLine(line) > 0) {System.out.println(line);}inputStream.close();}public void write(String file, String text) throws Exception {Path path = new Path(file);FSDataOutputStream outputStream = fs.create(path);outputStream.write(text.getBytes("utf-8"));outputStream.flush();IOUtils.closeStream(outputStream);}public void append(String file, String text) throws Exception {Path path = new Path(file);FSDataOutputStream outputStream = fs.append(path);outputStream.write(text.getBytes("utf-8"));outputStream.flush();IOUtils.closeStream(outputStream);}public void localToHdfs(String localFile, String hdfsFile) throws Exception {Path localPath = new Path(localFile);Path hdfsPath = new Path(hdfsFile);fs.copyFromLocalFile(false, true,localPath, hdfsPath);}public void hdfsToLocal(String hdfsFile, String localFile) throws Exception {Path localPath = new Path(localFile);Path hdfsPath = new Path(hdfsFile);fs.copyToLocalFile(false, hdfsPath, localPath, true);}}
(4)DirectoryOp.java
package com.mk.hdfs;import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;public class DirectoryOp {private Configuration conf;private FileSystem fs;public DirectoryOp(Configuration conf, FileSystem fs) {this.conf = conf;this.fs = fs;}public void create(String dir) throws Exception {Path path = new Path(dir);fs.mkdirs(path);}public void delete(String dir) throws Exception {Path path = new Path(dir);fs.delete(path,true);}public void rename(String dir, String name) throws Exception {Path path = new Path(dir);Path parent = path.getParent();fs.rename(path,new Path(parent, name));}public void list(Path path) throws Exception {FileStatus[] list = fs.listStatus(path);for (FileStatus status:list){System.out.println(status.getPath());}}public void list(String p) throws Exception {FileStatus[] list = fs.listStatus(new Path(p));for (FileStatus status:list){System.out.println(status.getPath());}}}