HDFS基本命令與JAVA操作

1、啓動Hadoop集羣

2、查看文件驗證hdfs是否可用

  hadoop fs -ls /

3、對hdfs操作的命令格式是hadoop fs

     hadoop fs -help <command> 查看命令介紹
     hadoop fs -ls <path> 表示對hdfs下一級目錄的查看
     hadoop fs -lsr <path> 表示對hdfs目錄的遞歸查看
     hadoop fs -mkdir <path> 創建目錄
     hadoop fs -put <src> <des> 從linux上傳文件到hdfs
     hadoop fs -get <src> <des> 從hdfs下載文件到linux
     hadoop fs -text <path> 查看文件內容
     hadoop fs -rm <path> 表示刪除文件
     hadoop fs -rmr <path> 表示遞歸刪除文件

4、嘗試編寫Java操作Hdfs
配置Eclipse+Hadoop的開發環境
對hdfs進行增刪上傳下載操作,參考api都非常簡單


package hadoop.hdfs;

import java.io.FileInputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class HdfsCommon {
    private String url = "hdfs://192.168.100.10:9000";
//  public static String DIR = "hdfs://192.168.100.10:9000/d3";
    private FileSystem fileSystem;
    /**
     * @param url hdfs://192.168.100.10:9000
     * @throws IOException
     * @throws URISyntaxException
     */

    public HdfsCommon(String url) throws IOException, URISyntaxException{
        this.url = url;
        this.fileSystem = FileSystem.get(new URI(url), new Configuration());
    }

    public FileSystem getFileSystem() {
        return fileSystem;
    }

    /**
     * 新建文件夾
     * @param dirPath 文件夾路徑  dir/dir/dir
     * @return
     */
    public boolean createDir(String dirPath){
        try {
            return fileSystem.mkdirs(new Path(url+"/"+dirPath));
        } catch (IllegalArgumentException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }
        return false;
    }
    /**
     * 刪除文件夾
     * @param dir 文件夾路徑  dir/dir/dir
     * @return
     */
    public boolean deleteDir(String dir){
        try {
            return fileSystem.delete(new Path(url+"/"+dir), true);
        } catch (IllegalArgumentException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        };
        return false;
    }
    /**
     * 上傳文件
     * @param srcFile  源文件路徑
     * @param desFile  目標文件路徑
     * @return
     */
    public boolean uploadFile(String srcFile ,String desFile){
        try {
            FSDataOutputStream fsout = fileSystem.create(new Path(desFile));
            FileInputStream fis = new FileInputStream(srcFile);
            IOUtils.copyBytes(fis, fsout, 1024,true);
            return true;
        } catch (IllegalArgumentException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        return false;
    }

    /**
     * 下載
     * @param srcFile
     * @return
     */
    public boolean downloadFile(String srcFile){
        FSDataInputStream in = null;
        try {
            in = new FSDataInputStream(fileSystem.open(new Path(srcFile)));
            IOUtils.copyBytes(in, System.out, 1024,true);
            return true;
        } catch (IllegalArgumentException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }finally {
            try {
                in.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
            System.out.close();
        }
        return false;
    }

    public boolean deleteFile(String path){
        try {
            if(fileSystem.exists(new Path(path))){
                fileSystem.delete(new Path(path), true);
                return true;
            }
        } catch (IllegalArgumentException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }
        return false;

    }
}
package hadoop.hdfs;

import java.io.IOException;
import java.net.URISyntaxException;

import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class HdfsDemo {
    public static void main(String[] args) throws IOException, URISyntaxException {
        String url = "hdfs://192.168.100.10:9000";
        String newDir = "/d3";
        HdfsCommon hdfsCommon = new HdfsCommon(url);

        FileSystem fileSystem = hdfsCommon.getFileSystem();
        //判斷文件夾是否存在
        boolean isDir = fileSystem.isDirectory(new Path(url+"/d3"));
        if(!isDir){
            //創建文件夾
            boolean createDir = hdfsCommon.createDir(newDir);
            System.out.println(createDir);
        }
        System.out.println(isDir);
        //上傳文件至指定文件夾
        String srcFile = "F://hello.txt";
        String desFile = "d3/test";
        boolean uploadFile = hdfsCommon.uploadFile(srcFile, "/"+desFile);
        System.out.println(uploadFile);
        //下載
//      hdfsCommon.downloadFile("/"+desFile);//不添加 " / "會默認添加本地用戶的路徑如:/user/kobe/d3/test
        //刪除文件
//      boolean deleteFile = hdfsCommon.deleteFile("/"+desFile);
//      System.out.println(deleteFile);
        boolean exists = hdfsCommon.getFileSystem().exists(new Path("/"+desFile));
        System.out.println("exists:"+exists);

        //遍歷文件
        FileStatus[] listfs =  fileSystem.listStatus(new Path("/d3"));
        for (int i = 0; i < listfs.length; i++) {
            String res = listfs[i].isDirectory()? "文件夾":"文件";
            System.out.println(res);
            String per = listfs[i].getPermission().toString();
            long leng = listfs[i].getLen();
            long size = listfs[i].getBlockSize();
            Path path = listfs[i].getPath();
            System.out.println(per+","+leng+","+size+","+path);
        }
    }
}
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章