hadoop学习笔记-java操作HDFS
来源:互联网 发布:暗黑修仙数据库 编辑:程序博客网 时间:2024/05/16 03:31
操作HDFS我们必须搭建好环境,这里我们用HA架构来操作HDFS
开发工具使用idea,开发环境我们使用windows
--准备工作
1.我们需要将hadoop解压到windows下,我解压到D:\hadoop-2.5.0;
2.下载对应版本的hadooponwindows-master.zip,解压,将bin目录(包含以下.dll和.exe文件)文件替换原来hadoop目录下的bin目录;
3.配置环境变量,添加HADOOP_HOME环境变量
到这里我们hadoop的windows开发环境搭建好了
--创建项目
一.创建项目并添加pom依赖
<?xml version="1.0" encoding="UTF-8"?><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>com.demo</groupId> <artifactId>hadoop</artifactId> <version>1.0-SNAPSHOT</version> <dependencies> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>2.5.0</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>2.5.0</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>2.5.0</version> </dependency> </dependencies></project>
二.创建Hdfs主类
2.1 创建文件
package com.demo.hadoop;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import java.io.IOException;public class Hdfs { public static void main(String[] args) throws IOException { Configuration configuration = new Configuration(); configuration.set("fs.defaultFS","hdfs://yangyi:8020"); FileSystem fileSystem = FileSystem.get(configuration); Path path = new Path("/user/yang/demo.txt"); fileSystem.create(path); fileSystem.close(); }}
去文件系统中可以看到已经生成demo.txt文件
2.2 删除文件
package com.demo.hadoop;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import java.io.IOException;public class Hdfs { public static void main(String[] args) throws IOException { Configuration configuration = new Configuration(); configuration.set("fs.defaultFS","hdfs://yangyi:8020"); FileSystem fileSystem = FileSystem.get(configuration); Path path = new Path("/user/yang/demo.txt"); fileSystem.delete(path,true); fileSystem.close(); }}
此时该文件已经不存在
2.3 写入文件
package com.demo.hadoop;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataOutputStream;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import java.io.IOException;public class Hdfs { public static void main(String[] args) throws IOException { Configuration configuration = new Configuration(); configuration.set("fs.defaultFS","hdfs://yangyi:8020"); FileSystem fileSystem = FileSystem.get(configuration); Path path = new Path("/user/yang/demo.out"); FSDataOutputStream out = fileSystem.create(path); out.writeUTF("this is a hdfs demo !\n"); fileSystem.close(); }}
2.4 读取文件
package com.demo.hadoop; import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import java.io.IOException;public class Hdfs { public static void main(String[] args) throws IOException { Configuration configuration = new Configuration(); configuration.set("fs.defaultFS","hdfs://yangyi:8020"); FileSystem fileSystem = FileSystem.get(configuration); Path path = new Path("/user/yang/demo.out"); FSDataInputStream inputStream = fileSystem.open(path); FileStatus status = fileSystem.getFileStatus(path); byte[] buffer = new byte[(int)status.getLen()]; inputStream.read(buffer); inputStream.close(); String result = new String(buffer); System.out.println(result); fileSystem.close(); }}
2.5 上传文件
package com.demo.hadoop;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import java.io.IOException;public class Hdfs { public static void main(String[] args) throws IOException { Configuration configuration = new Configuration(); configuration.set("fs.defaultFS","hdfs://yangyi:8020"); FileSystem fileSystem = FileSystem.get(configuration); Path path = new Path("/user/yang/text.txt"); Path localPath = new Path("D://temp/test.txt"); fileSystem.copyFromLocalFile(localPath,path); fileSystem.close(); }}
2.6 递归目录
package com.demo.hadoop;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import java.io.IOException;public class Hdfs { public static void main(String[] args) throws IOException { Configuration configuration = new Configuration(); configuration.set("fs.defaultFS", "hdfs://yangyi:8020"); FileSystem fileSystem = FileSystem.get(configuration); Path path = new Path("/user/yang"); getFileAndDir(path,fileSystem); fileSystem.close(); } private static void getFileAndDir(Path path, FileSystem fileSystem) throws IOException { FileStatus[] fileStatuses = fileSystem.listStatus(path); for (FileStatus fileStatus : fileStatuses) { if (fileStatus.isDirectory()) { Path childPath = fileStatus.getPath(); getFileAndDir(childPath, fileSystem); System.out.println("文件夹\t" + fileStatus.getPath()); } else System.out.println("文件\t" + fileStatus.getPath()); } }}
阅读全文
0 0
- hadoop学习笔记-java操作HDFS
- Hadoop学习笔记(3)-java操作hdfs的API接口
- java操作HDFS------Hadoop学习(3)
- hadoop学习1 java操作HDFS
- hadoop学习(五)----HDFS的java操作
- hadoop java操作hdfs
- hadoop-hdfs学习笔记
- Hadoop学习笔记_操作篇之一:HDFS操作
- hadoop学习笔记1.使用shell和JAVA API操作HDFS
- Hadoop--学习笔记 在Eclipse中操作远程hdfs文件
- hadoop学习笔记3:shell下的hdfs操作
- Hadoop学习笔记0002——HDFS文件操作
- Hadoop学习笔记(五)---HDFS shell操作
- hadoop学习笔记(HDFS的文件操作)
- Hadoop笔记二之java操作hdfs对象
- hadoop java HDFS 读写操作
- JAVA操作HDFS API(hadoop)
- JAVA操作HDFS API(hadoop)
- 使用JQuery选择两个节点之间所有节点办法
- 聊一位倾慕已久的他——致敬图灵
- JS实现复选框的全选和全不选
- oracle 常见错误
- 集中式的内容分发网络本就是错误的互联网设计,账号登录机制更是垃圾
- hadoop学习笔记-java操作HDFS
- leetcode_587.Erect the Fence?待解决
- hdu5950(递推+矩阵快速幂函) 2016亚洲区域赛沈阳站C题(铜牌题)
- HDU1757(矩阵快速幂+简单的矩阵构造)
- Authentication for Hadoop HTTP web-consoles ---Hadoop 1.2.1
- okHttp封装
- hadoop入门六(基础知识入门)
- ACM-10月15日周日周末训练心得
- java根据经纬度获取该经纬度的省市区