Hdfs的訪問方式有兩種,第一:類似linux命令,hadoop shell。第二:java API方式。
來看第二種。第二種和第一種完成的功能是一樣的,直接上代碼,裏面一些註釋的,在測試時自行打開測試即可。
package com.wmg.data.join2; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DistributedFileSystem; public class HDFSAcess { /** * @param args * @throws IOException */ public static void main(String[] args) { byte[] buffer = new byte[4096]; try { // create a distributed file system instance Configuration conf = new Configuration(); // 配置文件信息在conf/core-site..xml中配置,namenode的地址端口信息 conf.set("fs.default.name", "hdfs://10.20.151.7:9000"); // 初始化文件系統,兩種方式 /* * DistributedFileSystem fs1 = new DistributedFileSystem(); * fs1.initialize(FileSystem.getDefaultUri(conf), conf); */ FileSystem fs = FileSystem.get(conf); String dir = "/user/hadoop/wmg"; String dir1 = "/user/hadoop/wmg"; Path dirPath = new Path(dir); Path dirPath1 = new Path(dir1); // 創建目錄,兩種方式均可 // fs1.mkdirs(dirPath); fs.mkdirs(dirPath1); // hdfs.mkdirs(dirPath); // 創建文件 String dst = "/user/hadoop/wmg/wmg.txt"; String dst1 = "/user/hadoop/wmg/outputwmg.txt"; Path dstPath = new Path(dst); Path dstPath1 = new Path(dst1); byte[] content = "aaaa".getBytes(); FSDataOutputStream outputStream = fs.create(dstPath); outputStream.write(content); outputStream = fs.create(dstPath1); outputStream.close(); System.out.println("success, create a new file in HDFS: " + dst); /* * rename a file in HDFS * */ String src = "/user/hadoop/wmg/newwmg.txt"; Path srcPath = new Path(src); fs.rename(dstPath, srcPath); System.out.println("ok, file: " + dst + " renamed to: " + src); /* * delete a hdfs file * * */ /* * fs.delete(srcPath, false); * System.out.println("ok, delete file: "+srcPath); */ // read data from file FSDataInputStream is = fs.open(new Path( "/user/hadoop/wmg/newwmg.txt")); is.read(buffer); // overwrite hdfs file FSDataOutputStream os_w = fs.create(new Path( "/user/hadoop/wmg/outputwmg.txt"), true); os_w.write(buffer); // append data to hdfs file // FSDataOutputStream os_a = fs.append(new Path("/fs_t02")); // os_a.write(buffer); /* * upload the file from local system to HDFS * */ String localsrc = "/home/hadoop/minggang.wumg/localwmg.txt"; String dfst = "/user/hadoop/wmg"; Path localsrcPath = new Path(localsrc); Path dfstPath = new Path(dfst); fs.copyFromLocalFile(localsrcPath, dfstPath); System.out.println("Upload to " + conf.get("fs.default.name")); // flush & close stream os_w.flush(); os_w.close(); // os_a.flush(); // os_a.close(); is.close(); fs.close(); System.out.println(new String(buffer, "US-ASCII")); } catch (IOException e) { System.err.print(e.toString()); } } } |