package my.test;
import java.io.IOException;
import org.apache.spark.deploy.SparkSubmit;
import com.huangyueran.spark.utils.Constant;
public class Main {
public static void main(String[] args) throws IOException {
System.setProperty("HADOOP_USER_NAME", "hadoop");
System.setProperty("user.name", "hadoop");
//System.setProperty("HADOOP_CONF_DIR", "C:\\eclipse-workspace\\SparkDemo\\src\\main\\resources");
//System.setProperty("HADOOP_CONF_DIR", "C:\\my\\soft\\hadoop\\hadoop-2.8.5\\hadoop-2.8.5\\etc\\hadoop");
System.out.println("------------"+System.getenv("HADOOP_CONF_DIR"));
System.out.println("------------"+System.getenv("HADOOP_HOME"));
String appName = "wordCount-yarn-cluster";
String className = "my.test.WordCount";
String path = "C:\\eclipse-workspace\\SparkDemo\\target\\SparkDemo-1.0-SNAPSHOT.jar";
path = Constant.HDFS_FILE_PREX +"/user/zzm/SparkDemo-1.0-SNAPSHOT.jar";
String [] arg0=new String[]{
// "--jars",Constant.HDFS_FILE_PREX +"/user/zzm/spark-lib",
"--master","yarn",//ip端口
"--deploy-mode","cluster",
"--name",appName,
"--class",className,//運行主類main
//"--spark.yarn.archive",Constant.HDFS_FILE_PREX + "/user/zzm/spark-lib",
"--executor-memory","2G",
"--total-executor-cores","10",
"--executor-cores","2",
path,//在linux上的包 可改爲hdfs上面的路徑
// "LR", "20180817111111", "66"//jar中的參數,注意這裏的參數寫法
};
SparkSubmit.main(arg0);
}
}
一直卡着:
spark-defaults.conf新增配置解決:
#spark.yarn.jar hdfs://udp02:8020/user//spark-lib/*.jar