hadoop window 遠端提交job到叢集並執行

我就是曹總發表於2019-05-11

1.複製Hadoop的4個配置檔案放到src目錄下面:core-site.xml,hdfs-site.xml,log4j.properties,mapred-site.xml,yarn-site.xml

2.配置mapred-site.xml

<configuration>
    <property>
        <name>mapreduce.framework.name</name>
        <value>yarn</value>
    </property>
    <property>
        <name>mapred.remote.os</name>
        <value>Linux</value>
    </property>
    <property>
        <name>mapreduce.app-submission.cross-platform</name>
        <value>true</value>
    </property>
    <property>
    <name>mapreduce.application.classpath</name>
    <value>
        /opt/hadoop-2.6.0/etc/hadoop,
        /opt/hadoop-2.6.0/share/hadoop/common/*,
        /opt/hadoop-2.6.0/share/hadoop/common/lib/*,
        /opt/hadoop-2.6.0/share/hadoop/hdfs/*,
        /opt/hadoop-2.6.0/share/hadoop/hdfs/lib/*,
        /opt/hadoop-2.6.0/share/hadoop/mapreduce/*,
        /opt/hadoop-2.6.0/share/hadoop/mapreduce/lib/*,
        /opt/hadoop-2.6.0/share/hadoop/yarn/*,
        /opt/hadoop-2.6.0/share/hadoop/yarn/lib/*
    </value>
</property>    
    <property>
        <name>mapreduce.jobhistory.address</name>
        <value>master:10020</value>
    </property>
       <property>
                <name>mapreduce.jobhistory.webapp.address</name>
                <value>master:19888</value>
        </property>
</configuration>



注意mapreduce.application.classpath一定是絕對路徑,不要搞什麼$HADOOP_HOME,我這裡反正是報錯的


3:修改yarn-site.xml

<configuration>
<!-- Site specific YARN configuration properties -->
  <property>
        <name>yarn.nodemanager.aux-services</name>
        <value>mapreduce_shuffle</value>
    </property>
    <property>
        <name>yarn.resourcemanager.address</name>
        <value>master:8032</value>
    </property>
<property>
    <name>yarn.application.classpath</name>
    <value>
        /opt/hadoop-2.6.0/etc/hadoop,
        /opt/hadoop-2.6.0/share/hadoop/common/*,
        /opt/hadoop-2.6.0/share/hadoop/common/lib/*,
        /opt/hadoop-2.6.0/share/hadoop/hdfs/*,
        /opt/hadoop-2.6.0/share/hadoop/hdfs/lib/*,
        /opt/hadoop-2.6.0/share/hadoop/mapreduce/*,
        /opt/hadoop-2.6.0/share/hadoop/mapreduce/lib/*,
        /opt/hadoop-2.6.0/share/hadoop/yarn/*,
        /opt/hadoop-2.6.0/share/hadoop/yarn/lib/*
    </value>
  </property>
</configuration>



注意yarn.application.classpath一定是絕對路徑,不要搞什麼$HADOOP_HOME


4:看下我的程式碼

package com.gaoxing.hadoop;

import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.GenericOptionsParser;

public class WordCount {
    //繼承mapper介面,設定map的輸入型別為<Object,Text>
    //輸出型別為<Text,IntWritable>
    public static class Map extends Mapper<Object,Text,Text,IntWritable>{
        //one表示單詞出現一次
        private static IntWritable one = new IntWritable(1);
        //word儲存切下的單詞
        private Text word = new Text();
        public void map(Object key,Text value,Context context) throws IOException,InterruptedException{
            //對輸入的行切詞
            StringTokenizer st = new StringTokenizer(value.toString());
            while(st.hasMoreTokens()){
                word.set(st.nextToken());//切下的單詞存入word
                context.write(word, one);
            }
        }
    }
    //繼承reducer介面,設定reduce的輸入型別<Text,IntWritable>
    //輸出型別為<Text,IntWritable>
    public static class Reduce extends Reducer<Text,IntWritable,Text,IntWritable>{
        //result記錄單詞的頻數
        private static IntWritable result = new IntWritable();
        public void reduce(Text key,Iterable<IntWritable> values,Context context) throws IOException,InterruptedException{
            int sum = 0;
            //對獲取的<key,value-list>計算value的和
            for(IntWritable val:values){
                sum += val.get();
            }
            //將頻數設定到result
            result.set(sum);
            //收集結果
            context.write(key, result);
        }
    }
    /**
     * @param args
     */
    public static void main(String[] args) throws Exception{
        Configuration conf = new Configuration();
       // conf.set("mapred.remote.os","Linux");
       // conf.set("yarn.resourcemanager.address","master:8032");
       // conf.set("mapreduce.framework.name","yarn");
        conf.set("mapred.jar","D:\\IdeaProjects\\hadooplearn\\out\\artifacts\\hadoo.jar");
        //conf.set("mapreduce.app-submission.cross-platform","true");
        Job job = Job.getInstance(conf);
        job.setJobName("test");
        //配置作業各個類
        job.setJarByClass(WordCount.class);
        job.setMapperClass(Map.class);
        job.setCombinerClass(Reduce.class);
        job.setReducerClass(Reduce.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        FileInputFormat.addInputPath(job, new Path("hdfs://master:9000/tmp/hbase-env.sh"));
        FileOutputFormat.setOutputPath(job, new Path("hdfs://master:9000/tmp/out11"));
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }

}



conf.set("mapred.jar","D:\\IdeaProjects\\hadooplearn\\out\\artifacts\\hadoo.jar");這是最重要的一句,不然會報上面第4個問題


我就是曹總最後編輯於:4年前

內容均為作者獨立觀點,不代表八零IT人立場,如涉及侵權,請及時告知。

相關文章