HDFS的javaApi
2021/12/22 17:21:40
本文主要是介绍HDFS的javaApi,对大家解决编程问题具有一定的参考价值,需要的程序猿们随着小编来一起学习吧!
1.安装jdk(1.8)
2.创建maven项目,配置pom.xml(创建对应的依赖)
<dependencies> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>3.2.1</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>3.2.1</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>3.2.1</version> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.13</version> </dependency> </dependencies>
3.在项目的src/main/resources目录下,新建一个文件,命名为“log4j.properties”
# 控制台输出配置 log4j.appender.Console=org.apache.log4j.ConsoleAppender log4j.appender.Console.layout=org.apache.log4j.PatternLayout log4j.appender.Console.layout.ConversionPattern=%d [%t] %p [%c] - %m%n # 指定日志的输出级别与输出端 log4j.rootLogger=info,Console
4.编写代码实现
4.1配置全局变量
FileSystem fs=null; Configuration conf=null; URI uri=null; Logger log;
4.2建立连接
@Before public void testInit() throws Exception{ uri=new URI("hdfs://node-1:8020"); conf=new Configuration(); String user="xdd"; log=LogManager.getLogger(hadooptest.class); fs=FileSystem.get(uri,conf,user); log.error("初始化成功!"); }
4.3 运行后释放资源
@After public void testClose() throws Exception{ if(fs!=null){ log.error("释放资源结束!"); fs.close(); } }
4.3新建文件
//新建文件 @Test public void hdmkdir() throws Exception{ boolean flag=fs.mkdirs(new Path("test1")); log.error("创建成功:"+flag); }
4.4删除文件
//删除文件 @Test public void hddel() throws Exception{ boolean flag=fs.delete(new Path("test1"),true); log.error("成功删除:"+flag); }
4.5上传文件
//上传资源 @Test public void hdput() throws Exception{ fs.copyFromLocalFile(new Path("D:/software/coffe.jpg"),new Path("/zxm1/put.jpg")); log.error("上传成功"); }
4.6下载文件
//下载资源 @Test public void hddown() throws Exception{ fs.copyToLocalFile(new Path("/zxm1/put.jpg"),new Path("D:/software/down.jpg")); log.error("下载成功!"); }
4.7遍历判断是否为文件
@Test public void hdindAll() throws Exception{ FileStatus[] ls=fs.listStatus(new Path("/")); for (FileStatus fS: ls){ if(fS.isFile()){ System.out.println("文件:"+fS.getPath().getName()); }else { System.out.println("目录:"+fS.getPath().getName()); } } }
5.整体代码
5.1 hadooptest.java
package mytest; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.net.URI; public class hadooptest { FileSystem fs=null; Configuration conf=null; URI uri=null; Logger log=null; @Before public void testInit() throws Exception{ uri=new URI("hdfs://node-1:8020"); conf=new Configuration(); String user="xdd"; log=LogManager.getLogger(hadooptest.class); fs=FileSystem.get(uri,conf,user); log.error("初始化成功!"); } @After public void testClose() throws Exception{ if(fs!=null){ log.error("释放资源结束!"); fs.close(); } } //新建文件 @Test public void hdmkdir() throws Exception{ boolean flag=fs.mkdirs(new Path("test1")); log.error("创建成功:"+flag); } //删除文件 @Test public void hddel() throws Exception{ boolean flag=fs.delete(new Path("test1"),true); log.error("成功删除:"+flag); } //上传资源 @Test public void hdput() throws Exception{ fs.copyFromLocalFile(new Path("D:/software/coffe.jpg"),new Path("/zxm1/put.jpg")); log.error("上传成功"); } //下载资源 @Test public void hddown() throws Exception{ fs.copyToLocalFile(new Path("/zxm1/put.jpg"),new Path("D:/software/down.jpg")); log.error("下载成功!"); } //判断文件 @Test public void hdindAll() throws Exception{ FileStatus[] ls=fs.listStatus(new Path("/")); for (FileStatus fS: ls){ if(fS.isFile()){ System.out.println("文件:"+fS.getPath().getName()); }else { System.out.println("目录:"+fS.getPath().getName()); } } } }
5.2 pom.xml
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.example</groupId> <artifactId>hadoop</artifactId> <version>1.0-SNAPSHOT</version> <dependencies> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>3.2.1</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>3.2.1</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>3.2.1</version> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.13</version> </dependency> </dependencies> </project>
5.3 log4j.properties
# 控制台输出配置 log4j.appender.Console=org.apache.log4j.ConsoleAppender log4j.appender.Console.layout=org.apache.log4j.PatternLayout log4j.appender.Console.layout.ConversionPattern=%d [%t] %p [%c] - %m%n # 指定日志的输出级别与输出端 log4j.rootLogger=info,Console
这篇关于HDFS的javaApi的文章就介绍到这儿,希望我们推荐的文章对大家有所帮助,也希望大家多多支持为之网!
- 2024-06-26结对编程到底难不难?答案在这里
- 2024-06-19《2023版Java工程师》课程升级公告
- 2024-06-15matplotlib作图不显示3D图,怎么办?
- 2024-06-1503-Loki 日志监控
- 2024-06-1504-让LLM理解知识 -Prompt
- 2024-06-05做软件测试需要懂代码吗?
- 2024-06-0514-ShardingSphere的分布式主键实现
- 2024-06-03为什么以及如何要进行架构设计权衡?
- 2024-05-31全网首发第二弹!软考2024年5月《软件设计师》真题+解析+答案!(11-20题)
- 2024-05-31全网首发!软考2024年5月《软件设计师》真题+解析+答案!(21-30题)