測試
-
創建項目添加 pom 依賴
<dependencies> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>2.9.2</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>2.9.2</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs-client</artifactId> <version>2.9.2</version> </dependency> </dependencies>
-
測試代碼連接 hdfs 上傳文件
public class HdfsClient { public static void main(String[] args) throws IOException, URISyntaxException, InterruptedException { ioStreamPutHdfs(); } /** * @author zcz * @description io 流方式文件上傳 * @date 2020-03-28 * * @return */ public static void ioStreamPutHdfs() throws URISyntaxException, IOException, InterruptedException { Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(new URI("hdfs://172.16.235.162:8020"), conf, "hadoop"); FileInputStream fileInputStream = new FileInputStream("/Users/zhangchenzhao/Desktop/local/zcz.xml"); FSDataOutputStream fsDataOutputStream = fs.create(new Path("/user/zcz/io.xml")); IOUtils.copyBytes(fileInputStream, fsDataOutputStream, conf); fsDataOutputStream.close(); fileInputStream.close(); fs.close(); } }
springboot hadoop
-
pom依賴
<dependencies> <!-- spring boot--> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-test</artifactId> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-actuator</artifactId> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-web</artifactId> </dependency> <!-- hadoop-hdfs--> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>2.9.2</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>2.9.2</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs-client</artifactId> <version>2.9.2</version> </dependency> </dependencies>
-
配置文件
spring.application.name=hadoop-service server.port=9100 hdfs.user=hadoop hdfs.defaultFS=hdfs://172.16.235.162:8020
-
配置類
@Configuration public class HadoopConf { @Value("${hdfs.defaultFS}") private String defautFS; @Value("${hdfs.user}") private String user; @Bean public org.apache.hadoop.conf.Configuration configuration(){ return new org.apache.hadoop.conf.Configuration(); } @Bean public FileSystem fileSystem(org.apache.hadoop.conf.Configuration configuration) throws URISyntaxException, IOException, InterruptedException { FileSystem fs = FileSystem.get(new URI(defautFS), configuration, user); return fs; } }
-
log4j.properties
log4j.rootLogger=INFO, stdout log4j.appender.stdout=org.apache.log4j.ConsoleAppender log4j.appender.stdout.layout=org.apache.log4j.PatternLayout log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n log4j.appender.logfile=org.apache.log4j.FileAppender log4j.appender.logfile.File=target/spring.log log4j.appender.logfile.layout=org.apache.log4j.PatternLayout log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n
-
hadoop配置文件的 hdfs-site.xml,core-site.xml, mapred-site.xml, yarn-site.xml 可以放在 resource 下, 優先級高於 hadoop 集羣配置
<?xml version="1.0" encoding="UTF-8"?> <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> <configuration> <!--文件備份數 默認3--> <property> <name>dfs.replication</name> <value>1</value> </property> </configuration>
-
測試
@RestController @RequestMapping("/hdfs") public class HdfsController { @Autowired private FileSystem fileSystem; @Autowired private Configuration configuration; @PutMapping("/upload") public void get(@RequestParam("filePath") String filePath) throws IOException { FileInputStream fileInputStream = new FileInputStream(filePath); FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path("/user/zcz/ion.xml")); IOUtils.copyBytes(fileInputStream, fsDataOutputStream, configuration); fsDataOutputStream.close(); fileInputStream.close(); } }
上傳文件副本數爲 1 了, 表示開發環境副本數優先級高於 hadoop 集羣配置