(求助帖)寫了一個簡單的mapreduce程序,遇到這個問題,實在解決不了了,求助大佬們

錯誤如下:

Exception in thread "main" java.io.IOException: The ownership on the staging directory /tmp/hadoop-yarn/staging/root/.staging is not as expected. It is owned by xch. The directory must be owned by the submitter root or by root
    at org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:120)
    at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:144)
    at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1290)
    at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1287)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
    at org.apache.hadoop.mapreduce.Job.submit(Job.java:1287)
    at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1308)
    at com.xch.flownumber.Drivers.main(Drivers.java:42)

Mapper:

package com.xch.flownumber;

import java.io.IOException;
import java.util.function.LongPredicate;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class FlowMapper extends Mapper<LongWritable, Text, Text, Flowbean>{
	Flowbean v = new Flowbean();
	Text k = new Text();
	@Override
	protected void map(LongWritable key, Text value,Context context)
			throws IOException, InterruptedException {
		// TODO Auto-generated method stub
	
		//1.獲取一行數據
		String line = value.toString();
		//2.切割數據
		String[] files = line.split("\t");
		
		//3.封裝對象
		//手機號
		String phonenumber = files[1];
        //上下行流量
		long upFlow = Long.parseLong(files[files.length-3]);
		long downFlow = Long.parseLong(files[files.length-2]);
	
		v.set(upFlow, downFlow);
		k.set(phonenumber);
		
		//4.寫出數據
		context.write(k, v);
		
	}
	

}

Reducer:

package com.xch.flownumber;

import java.io.IOException;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class FlowReducer extends Reducer<Text, Flowbean, Text, Flowbean>{
@Override
protected void reduce(Text key, Iterable<Flowbean> v, Context context)
		throws IOException, InterruptedException {
	// 1372623050   2481	24681	sum
	long sum_upFlow = 0;
	long sum_downFlow = 0;
    for (Flowbean flowbean : v) {
			sum_upFlow += flowbean.getUpFlow();
			sum_downFlow +=flowbean.getDownFlow();
	}
    Flowbean flowbean = new Flowbean(sum_upFlow,sum_downFlow);
    
    
    //輸出
    context.write(key, flowbean);
}
}

Drivers:

package com.xch.flownumber;

import java.io.FileInputStream;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.xerces.impl.validation.ConfigurableValidationState;

public class Drivers {
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
	
	//1.獲取job對象
	Configuration conf = new Configuration();
	//1.設置job運行時要訪問的默認文件系統
		conf.set("fs.defaultFs","hdfs://hdp-1:9000");
		//2.設置job提交到哪去運行
		conf.set("mapreduce.framework.name","yarn");
		conf.set("yarn.resourcemanager.hostname","hdp-1");
		//3.如果要從'windows系統上運行這個job提交客戶端程序,則需要加這個跨平臺提交的參數
		conf.set("mapreduce.app-submission.cross-platform","true");
	Job job = Job.getInstance(conf);
	//2.設置jar包位置
	job.setJarByClass(Drivers.class);
	//3.管理mapper和reducer類
	job.setMapperClass(FlowMapper.class);
	job.setReducerClass(FlowReducer.class);
	//設置Mapper輸出的kv類型
	job.setMapOutputKeyClass(Text.class);
	job.setMapOutputValueClass(Flowbean.class);
	//設置最終輸出kv類型
	job.setOutputKeyClass(Text.class);
	job.setOutputValueClass(Flowbean.class);
	//設置輸入輸出路徑
	FileInputFormat.setInputPaths(job, new Path("/wordcount/input"));
	FileOutputFormat.setOutputPath(job, new Path("/wordcount/output"));
	//提交
	boolean result = job.waitForCompletion(true);
	System.exit(result?0:1);
	
}
}

javabean:

package com.xch.flownumber;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

import org.apache.hadoop.io.Writable;

public class Flowbean implements Writable{

	private long upFlow;    //上行流量
	private long downFlow;  //下行流量
	private long sumFlow;   //總流量
	
	//空參構造,爲了以後反射用
	public Flowbean() {
	}
	
	public Flowbean(long upFlow , long downFlow) {
 	this.upFlow = upFlow;
	this.downFlow = downFlow;
	this.sumFlow = upFlow + downFlow;
	}
	public void set(long upFlow , long downFlow) {
		this.upFlow = upFlow;
		this.downFlow = downFlow;
		this.sumFlow = upFlow + downFlow;
	}
    //序列化方法
	@Override
	public void write(DataOutput out) throws IOException {
		// TODO Auto-generated method stub
		out.writeLong(upFlow);
		out.writeLong(downFlow);
		out.writeLong(sumFlow);
	}
	
	
	//反序列化方法
	//!!:序列化順序和反序列化順序必須保持一致
	@Override
	public void readFields(DataInput in) throws IOException {
		// TODO Auto-generated method stub
		this.upFlow = in.readLong();
		this.downFlow = in.readLong();
		this.sumFlow = in.readLong();
	}

	@Override
	public String toString() { 	
		return upFlow + "\t" + downFlow + "\t" + sumFlow;
	}

	public long getUpFlow() {
		return upFlow;
	}

	public void setUpFlow(long upFlow) {
		this.upFlow = upFlow;
	}

	public long getDownFlow() {
		return downFlow;
	}

	public void setDownFlow(long downFlow) {
		this.downFlow = downFlow;
	}

	public long getSumFlow() {
		return sumFlow;
	}

	public void setSumFlow(long sumFlow) {
		this.sumFlow = sumFlow;
	}
	
	
}

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章