用java讀取hdfs的文件
直接擼代碼:
package com.nature.base.util;
import com.nature.component.process.vo.DebugDataResponse;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import java.io.*;
import java.net.URI;
import java.util.*;
/**
* 操作hdfs文件的工具類
*/
public class HdfsUtils {
private static Logger logger = LoggerUtil.getLogger();
/**
* 讀取hdfs指定路徑的內容
*/
public static String gethdfsData(String hdfsPath){
String result = "";
if(com.nature.third.utils.StringUtils.isNotEmpty(hdfsPath)){
Path path = new Path(hdfsPath);
Configuration configuration = new Configuration();
FSDataInputStream fsDataInputStream = null;
FileSystem fileSystem = null;
BufferedReader br = null;
// 定義一個字符串用來存儲文件內容
try {
fileSystem = path.getFileSystem(configuration);
fsDataInputStream = fileSystem.open(path);
br = new BufferedReader(new InputStreamReader(fsDataInputStream));
String str2;
while ((str2 = br.readLine()) != null) {
// 遍歷抓取到的每一行並將其存儲到result裏面
result += str2 + "\n";
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if(br!=null){
try {
br.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if(fsDataInputStream!=null){
try {
fsDataInputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if(fileSystem!=null){
try {
fileSystem.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
logger.debug("文件內容:" + result);
}
return result;
}
}
核心方法:
Path path = new Path("hdfs://192.168.1.111:9000/test/1234log");
Configuration configuration = new Configuration();
FileSystem fileSystem = path.getFileSystem(configuration);
FSDataInputStream fsDataInputStream = fileSystem.open(path);
注意我加的那些編碼格式,可以防止中文亂碼!!
這種方式即可以讀取hdfs上面的文件也可以讀取本地文件