hadoop 自定義格式化輸出
import java.io.IOException; import java.net.URI; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; public class CustomizeOutputFormat { static final Log LOG = LogFactory.getLog(CustomizeOutputFormat.class); public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf); job.setJarByClass(CustomizeOutputFormat.class); job.setMapperClass(CustMapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(Text.class); //此處這隻自定義的格式化輸出 job.setOutputFormatClass(CustOutputFormat.class); String jobName = "Customize outputformat test!"; job.setJobName(jobName); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); boolean b = job.waitForCompletion(true); if(b) { LOG.info("Job "+ jobName +" is done."); }else { LOG.info("Job "+ jobName +"is going wrong,now exit."); System.exit(0); } } } class CustMapper extends Mapper<LongWritable, Text, Text, Text>{ String[] textIn = null; Text outkey = new Text(); Text outvalue = new Text(); @Override protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context) throws IOException, InterruptedException { /** * 假設檔案的內容為如下: * boys girls * firends goodbye * down up * fly to * neibors that * */ textIn = value.toString().split("\t"); outkey.set(textIn[0]); outvalue.set(textIn[1]); context.write(outkey, outvalue); } } //自定義OutoutFormat class CustOutputFormat extends FileOutputFormat<Text, Text>{ @Override public RecordWriter<Text, Text> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { //獲得configration Configuration conf = context.getConfiguration(); //獲得FileSystem FileSystem fs = FileSystem.newInstance(conf); //獲得輸出路徑 Path path = CustOutputFormat.getOutputPath(context); URI uri = path.toUri(); //建立兩個檔案,得到寫入流 FSDataOutputStream foa = fs.create(new Path(uri.toString()+"/out.a")); FSDataOutputStream fob = fs.create(new Path(uri.toString()+"/out.b")); //建立自定義RecordWriter 傳入 兩個流 CustRecordWriter rw = new CustRecordWriter(foa,fob); return rw; } class CustRecordWriter extends RecordWriter<Text, Text>{ FSDataOutputStream foa = null; FSDataOutputStream fob = null; CustRecordWriter(FSDataOutputStream foa,FSDataOutputStream fob){ this.foa = foa; this.fob = fob; } @Override public void write(Text key, Text value) throws IOException, InterruptedException { String mText = key.toString(); //根據可以長度的不同分別輸入到不同的檔案 if(mText.length()>=5) { foa.writeUTF(mText+"\t"+value.toString()+"\n"); }else { fob.writeUTF(mText+"\t"+value.toString()+"\n"); } } @Override public void close(TaskAttemptContext context) throws IOException, InterruptedException { //最後將兩個寫入流關閉 if(foa!=null) { foa.close(); } if(fob!=null) { fob.close(); } } } } //使用MultipleInputs,c處理多個來源的檔案 package hgs.multipuleinput; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.MultipleInputs; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import hgs.custsort.SortBean; import hgs.custsort.SortDriver; import hgs.custsort.SortMapper; import hgs.custsort.SortReducer; public class MultipuleInputDriver { public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf); job.setJarByClass(SortDriver.class); job.setMapperClass(SortMapper.class); job.setReducerClass(SortReducer.class); job.setOutputKeyClass(SortBean.class); job.setOutputValueClass(NullWritable.class); MultipleInputs.addInputPath(job, new Path("/sort"), TextInputFormat.class,SortMapper.class); MultipleInputs.addInputPath(job, new Path("/sort1"), TextInputFormat.class,SortMapper.class); //FileInputFormat.setInputPaths(job, new Path("/sort")); FileOutputFormat.setOutputPath(job, new Path("/sortresult")); System.exit(job.waitForCompletion(true)==true?0:1); } }
來自 “ ITPUB部落格 ” ,連結:http://blog.itpub.net/31506529/viewspace-2213389/,如需轉載,請註明出處,否則將追究法律責任。
相關文章
- Hadoop自定義輸出排序方式Hadoop排序
- 表單驗證自定義格式輸出
- 格式化輸出
- 格式化輸入和輸出
- C++格式化輸入輸出C++
- hadoop mapreducez自定義分割槽Hadoop
- hadoop自定義許可權Hadoop
- JavaScript自定義時間日期格式化JavaScript
- pythonformat格式化輸出PythonORM
- 字串拼接格式化輸出字串
- Python格式化輸出Python
- java日期格式化輸出Java
- SQL*PLUS 格式化輸出SQL
- log4j自定義輸出多個檔案
- Artisan 自定義輸出格式
- 自定義UITextView輸入框UITextView
- Hadoop中自定義計數器Hadoop
- hadoop輸出設定Hadoop
- 自定義 ocelot 中介軟體輸出自定義錯誤資訊
- 格式化輸出變數工具變數
- python字串格式化輸出Python字串格式化
- 【Python】格式化字串輸出Python字串
- SD--如何在輸出控制中增加自定義欄位
- VS code 自定義快捷輸入
- quill 富文字編輯器自定義格式化UI
- objc系列譯文(7.5):自定義格式化程式OBJ
- YTUOJ-學生成績輸入和輸出(使用者自定義型別)型別
- Go 語言中的格式化輸出Go
- shell指令碼(3)-格式化輸出指令碼
- Python 字串格式化輸出方式Python字串格式化
- C#數字格式化輸出C#
- AndroidKeyboard(自定義輸入法)Android
- VSCode自定義快捷輸入VSCode
- 使用檢視格式化來自定義SharePoint
- IDEA自定義設定快捷鍵輸出你想要的語句!Idea
- 操作日誌記錄(包括輸出至自定義日誌檔案)
- Java™ 教程(格式化數字列印輸出)Java
- Golang中的格式化時間輸出Golang