hadoop個人理解

瓜瓜東西發表於2014-10-10

 

2 hadoop三種方式

Local:

需要改hadoop-site.xml—java——home

Hadoop-default是主要配置,不需要更改

命令:

hadoop/bin/hadoop demo.hadoop.HadoopGrep log檔案所在目錄 任意的輸出目錄 grep的字串

3 重寫例子

package com.hadoop.example; 

import java.io.File;
import java.util.ArrayList;
import java.util.List;

import org.apache.catalina.filters.AddDefaultCharsetFilter;

import com.sun.org.apache.bcel.internal.generic.NEW;


public class HReal {

	public static void main(String[] args) {
		
	}
	public void work(){
		JobConf conf = null;
		Mapper mapper = new Mapper();
		Reducer reducer = new Reducer();
		InputFormat inputFormat = new InputFormat();
		OutputFromat outputFromat = new OutputFromat();
		String inpathString ="";
		String outpathString="";
		
		JobTracker jobTracker = new JobTracker();
		Job job = new Job();
		jobTracker.addJob(job);
		if(job.flg=true){
			
		}
		
	}
}

class Mapper{
	private String kString;
	private String vString;
	public void deal(Class c){
		
	}
	private void deal(String s){
		
	}
	
	
	public SequenceFile deal(String kString,String vString){
		this.kString=kString;
		this.vString=vString;
		return new SequenceFile();
	}
}
class SequenceFile{
	
}
class Reducer{
	private String resultString;
	private String middleString;
	public  String  deal(){
		return resultString;
	}
}
class InputFormat{
	
	public RecordReader newReader(){
		return new RecordReader();
	}
	
	public Object format(String line){
		return line;
	}
	
	public Object  format(String line, Object type){
		if(type instanceof String){
			return line;
		}
		if(type instanceof Class){
			return line.getClass();
		}
		return line;
	}
	
	public FileSplist getSplits(String pathfile){
		File file=new File(pathfile);
		//分割檔案處理
		return new FileSplist();
	}
}
class FileSplist{
	
}
class OutputFromat{
	public void write(String result){
		
	}
}
class NameNode{
	
}
class DataNode{
	
}
class Master{
	private JobTracker jobTracker;
	private NameNode nameNode;
}
class JobTracker{
	private MapReduce mapReduce;
	private InputFormat inputFormat;
	List<Job> jobs = new ArrayList<Job>();
	public void addJob(Job job){
		jobs.add(job);
	}
	public void nextJob(){
		
		jobs.get(this.getCurrentFlg());
	}
	public int getCurrentFlg(){
		return 1;
	}
	public void currState(){
		
	}
}
class MapReduce{
	
}
class JobConf{
	
}
class Job{
	static boolean flg = false;
	public void run(){
		System.out.println("deal");
		flg=true;
	}
}
class MapperTask{
	List<FileSplist> lst = new ArrayList<FileSplist>();
	InputFormat inputFormat;
	public void addTask(FileSplist fileSplist){
		lst.add(fileSplist);
	}
	
	public void read(){
//		lst.get(0)
	}
}
class Queue{
	List lst = new ArrayList();
	public void addMapperTask(MapperTask mapperTask){
		lst.add(mapperTask);
	}
}
class TaskTracker{
	int taskCount=0;
	JobTracker jobTracker;
	
	public void nextMapReduce(){
		jobTracker.nextJob();
	}
	public void deal(){
		
	}
	//定時報告任務情況
	public void schele(){
		System.out.println("當前任務完成了多少了");
		jobTracker.currState();
		if(taskCount ==3){
			nextMapReduce();
		}
		try {
			Thread.sleep(10000);
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
	}
}
class RecordReader{
	
}
class ReducerTask{
	Reducer reducer;
	OutputFromat outputFromat;
	public void sort(){
		
	}
	public void merge(){
		
	}
	public void deal(){
		sort();
		merge();
		String resultString = reducer.deal();
		outputFromat.write(resultString);
	}
}


 

 

相關文章