Hadoop 編程實戰:HDFS API 編程樣例

{"type":"doc","content":[{"type":"paragraph","attrs":{"indent":0,"number":0,"align":null,"origin":null},"content":[{"type":"text","text":"想了解HDFS API基礎知識,請查閱","attrs":{}},{"type":"link","attrs":{"href":"https://xie.infoq.cn/article/560f9b30720a529852c718144","title":""},"content":[{"type":"text","text":"Hadoop編程實戰:HDFS API編程","attrs":{}}]}]},{"type":"paragraph","attrs":{"indent":0,"number":0,"align":null,"origin":null}},{"type":"paragraph","attrs":{"indent":0,"number":0,"align":null,"origin":null},"content":[{"type":"text","text":"本文將介紹HDFS常用API編程樣例,具體請看代碼。歡迎大家多多指教,哈哈。","attrs":{}}]},{"type":"codeblock","attrs":{"lang":"java"},"content":[{"type":"text","text":"package cdh_dev_demo;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.*;\nimport org.apache.hadoop.fs.permission.FsPermission;\nimport org.apache.hadoop.io.IOUtils;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\n\npublic class HdfsMethodDemo {\n public static void main(String args[]) throws IOException {\n HdfsMethodDemo h=new HdfsMethodDemo();\n FileSystem fs = getfs();\n //執行方法\n fs.close();\n }\n\n //創建HDFS Configuration配置項\n private static Configuration getconf(){\n Configuration conf = new Configuration();\n conf.set(\"fs.defaultFS\",\"hdfs://namenode.cdh.com:8020\");\n return conf;\n }\n\n //創建HDFS文件對象\n private static FileSystem getfs() throws IOException{\n Configuration conf = getconf();\n FileSystem fs = FileSystem.get(conf);\n return fs;\n }\n\n //拷貝本地文件到HDFS\n public static void CopyLocalToHdfs(FileSystem fs,String LocalPath,String HdfsPath) throws IOException{\n Path local = new Path(LocalPath);\n Path hdfs = new Path(HdfsPath);\n fs.copyFromLocalFile(local,hdfs);\n //查看文件是否複製成功\n FileStatus files[] = fs.listStatus(hdfs);\n for (FileStatus file:files){\n System.out.println(file.getPath().toString());\n }\n }\n\n //拷貝HDFS文件到本地\n public static void CopyHdfsToLocal(FileSystem fs,String LocalPath,String HdfsPath) throws IOException {\n Path local = new Path(LocalPath);\n Path hdfs = new Path(HdfsPath);\n fs.copyToLocalFile(false,hdfs,local,true);\n }\n\n //創建HDFS目錄\n public static void MkdirHdfsPath(FileSystem fs,String HdfsPath) throws IOException{\n fs.mkdirs(new Path(HdfsPath));\n }\n\n //HDFS系統內文件拷貝\n public static void CopyHdfsToHdfs(FileSystem fs,String SrcHdfsPath,String TargetHdfsPath) throws IOException{\n FSDataInputStream fsin = fs.open(new Path(SrcHdfsPath));\n FSDataOutputStream fsout = fs.create(new Path(TargetHdfsPath));\n IOUtils.copyBytes(fsin,fsout,1024,true);\n }\n\n //創建HDFS文件\n public static void CreateHdfsFile(FileSystem fs,String HdfsPath) throws IOException{\n fs.create(new Path(HdfsPath));\n }\n\n //查看HDFS文件元信息\n public static void SelHdfsMetadata(FileSystem fs,String HdfsPath) throws IOException{\n FileStatus[] stat = fs.listStatus(new Path(HdfsPath));\n for (FileStatus f:stat) {\n long accessTime = f.getAccessTime(); //創建時間\n SimpleDateFormat sdf = new SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss\");\n long blockSize = f.getBlockSize(); //塊大小\n String group = f.getGroup(); //用戶組\n long len = f.getLen(); //文件長度\n long modificationTime = f.getModificationTime(); //修改時間\n String owner = f.getOwner(); //用戶\n Path path = f.getPath(); //路徑\n FsPermission permission = f.getPermission(); //權限\n short replication = f.getReplication(); //副本數\n String string = f.toString(); //f集合所有內容\n boolean directory = f.isDirectory(); //判斷是否目錄\n boolean encrypted = f.isEncrypted(); //判斷是否加密\n boolean file = f.isFile(); //判斷是否文件\n\n System.out.println(\"AccessTime:\"+sdf.format(new Date(accessTime)));\n System.out.println(\"BlockSize:\"+blockSize);\n System.out.println(\"Group:\"+group);\n System.out.println(\"Length:\"+len);\n System.out.println(\"ModificationTime:\"+sdf.format(new Date(modificationTime)));\n System.out.println(\"Owner:\"+owner);\n System.out.println(\"Path:\"+path);\n System.out.println(\"Permission:\"+permission);\n System.out.println(\"Replication:\"+replication);\n System.out.println(\"StatusList:\"+string);\n System.out.println(\"IsDirectory:\"+directory);\n System.out.println(\"IsEncrypted:\"+encrypted);\n System.out.println(\"IsFile:\"+file);\n System.out.println(\"\\n\");\n }\n }\n\n //刪除HDFS文件\n public static void DeleteHdfsFile(FileSystem fs,String HdfsPath) throws IOException{\n FileStatus[] stat = fs.listStatus(new Path(HdfsPath));\n for (FileStatus f:stat) {\n System.out.println(\"IsFile?\"+f.isFile());\n System.out.println(\"IsDirectory?\"+f.isDirectory());\n if (f.isFile() == true)\n //刪除文件\n fs.delete(new Path(f.getPath().toString()),false);\n else if (f.isDirectory() == true)\n //刪除目錄及目錄下所有文件\n fs.delete(new Path(f.getPath().toString()),true);\n }\n }\n\n //把HDFS文件內容逐行讀取並輸出到屏幕\n public static void ReadHdfsFile(FileSystem fs,String HdfsPath) throws IOException{\n FSDataInputStream fin = fs.open(new Path(HdfsPath));\n int i=0; //行數\n String s=null;\n BufferedReader br = new BufferedReader(new InputStreamReader(fin));\n while (((s = br.readLine()) != null)){\n i++;\n System.out.println(\"第\"+i+\"行:\"+s);\n }\n }\n\n //把1-10數字逐行寫入到HDFS文件\n public static void WriteHdfsFile(FileSystem fs,String HdfsPath) throws IOException{\n //若文件存在,則進行刪除\n if (fs.exists(new Path(HdfsPath)))\n DeleteHdfsFile(fs,HdfsPath);\n FSDataOutputStream fout = fs.create(new Path(HdfsPath));\n for (int i=0;i<=10;i++){\n byte[] buffer = (i+\"\\n\").getBytes();\n fout.write(buffer);\n }\n }\n}","attrs":{}}]},{"type":"paragraph","attrs":{"indent":0,"number":0,"align":null,"origin":null}}]}
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章