當前位置:編程學習大全網 - 源碼下載 - Win集群源代碼

Win集群源代碼

win7下調試Hadoop2.2.0程序的方法:

壹.環境準備

1版本的Eclipse Juno服務版本4.2

2個操作系統窗口7

3 hadoop eclipse插件Hadoop-Eclipse-plugin-2 . 2 . 0 . jar

4 hadoop集群環境虛擬機Linux Centos6.5單機偽分布式

5調試程序Hellow World

二、註意事項:

例外情況如下:

java.io.IOException:在Hadoop二進制文件中找不到可執行文件null\bin\winutils.exe。

解決方案:

在org.apache.hadoop.util.Shell類的checkHadoopHome()方法的返回值中編寫壹個fixed。

本機hadoop的路徑在此處更改如下:

私有靜態字符串檢查HadoopHome() {

//首先用JVM範圍檢查Dflag hadoop.home.dir

//system . set property(" Hadoop . home . dir ","...");

string home = system . getproperty(" Hadoop . home . dir ");

//返回到系統/用戶全局環境變量

if (home == null) {

HOME = system . getenv(" HADOOP _ HOME ");

}

嘗試{

//找不到hadoop主目錄的任何設置

if (home == null) {

拋出新的IOException("未設置HADOOP_HOME或hadoop.home.dir。");

}

if(home . starts with(" \ ")& amp;& amphome . ends with(" \ "){

home = home.substring(1,home . length()-1);

}

//檢查home設置實際上是壹個存在的目錄

File homedir =新文件(home);

如果(!homedir.isAbsolute() ||!homedir.exists() ||!homedir.isDirectory()) {

拋出新的IOException("Hadoop主目錄"+ homedir

+"不存在,不是目錄,或者不是絕對路徑。);

}

home = homedir . getcanonicalpath();

} catch (IOException ioe) {

if (LOG.isDebugEnabled()) {

LOG.debug("未能檢測到有效的hadoop主目錄",ioe);

}

home = null

}

//修復這臺機器的hadoop地址。

home = " D:\ \ Hadoop-2 . 2 . 0 ";

回國;

}

第二個異常,在hadoop二進制文件中找不到可執行文件d:\ Hadoop \ tar \ Hadoop-2 . 2 . 0 \ Hadoop-2 . 2 . 0 \ bin \ winutils.exe。在win上找不到可執行程序,可以去/src codes/Hadoop-common-2.2.0。

第三個例外:

線程“main”中出現異常Java . lang . illegalargumentexception:錯誤的FS:HDFS://192.168.130.54:19000/user/hmail/output/part-00000,應為:file:///

位於org . Apache . Hadoop . fs . file system . check path(file system . Java:310)

位於org . Apache . Hadoop . fs . rawlocalfilesystem . pathtofile(rawlocalfilesystem . Java:47)

位於org . Apache . Hadoop . fs . rawlocalfilesystem . getfilestatus(rawlocalfilesystem . Java:357)

位於org . Apache . Hadoop . fs . filter file system . getfilestatus(filter file system . Java:245)

位於org . Apache . Hadoop . fs . checksum file system$ChecksumFSInputChecker。& ltinit & gt(checksum file system . Java:125)

位於org . Apache . Hadoop . fs . checksum file system . open(checksum file system . Java:283)

位於org . Apache . Hadoop . fs . file system . open(file system . Java:356)

位於com . Netease . Hadoop . hdfscatwithapi . main(hdfscatwithapi . Java:23)

當這種異常發生時,HDFS的路徑通常有問題。解決方案是在集群上復制core-site.xml和hdfs-site.xml文件,並將它們放在eclipse的src根目錄中。

包com . Qin . word count;

導入Java . io . io exception;

導入org . Apache . Hadoop . fs . file system;

導入org . Apache . Hadoop . fs . path;

導入org . Apache . Hadoop . io . int writable;

導入org . Apache . Hadoop . io . long writable;

導入org . Apache . Hadoop . io . text;

導入org . Apache . Hadoop . map red . job conf;

導入org . Apache . Hadoop . MapReduce . job;

導入org . Apache . Hadoop . MapReduce . mapper;

導入org . Apache . Hadoop . MapReduce . reducer;

導入org . Apache . Hadoop . MapReduce . lib . input . file input format;

導入org . Apache . Hadoop . MapReduce . lib . input . textinputformat;

import org . Apache . Hadoop . MapReduce . lib . output . file output format;

import org . Apache . Hadoop . MapReduce . lib . output . textoutput format;

/***

*

* Hadoop2.2.0測試

*字數示例

*

* @作者秦東亮

*

* hadoop技術交流群:376932160

*

*

* */

公共類MyWordCount {

/**

*映射器

*

* **/

私有靜態類WMapper擴展了Mapper & ltLongWritable,Text,Text,IntWritable & gt{

private int writable count = new int writable(1);

私有文本Text = new Text();

@覆蓋

受保護的空映射(LongWritable鍵、文本值、上下文上下文)

引發IOException,InterruptedException {

String values[]=value.toString()。拆分(" # ");

//system . out . println(values[0]+" = = = = = = = = "+values[1]);

count . set(integer . parse int(values[1]);

text . set(values[0]);

context.write(文本,計數);

}

}

/**

*減速器

*

* **/

私有靜態類WReducer擴展Reducer & ltText,IntWritable,Text,Text & gt{

私有文本t = new Text();

@覆蓋

受保護的void reduce(Text key,Iterable & ltIntWritable & gt值、上下文語境)

引發IOException,InterruptedException {

int count = 0;

for(IntWritable i:value){

count+= I . get();

}

t . set(count+" ");

context.write(key,t);

}

}

/**

*變化1

* (1)在shell源代碼中添加checkHadoopHome的路徑。

* (2)第974行,在FileUtils內部

* **/

公共靜態void main(String[] args)引發異常{

//String path 1 = system . getenv(" HADOOP _ HOME ");

//system . out . println(path 1);

//system . exit(0);

JobConf conf = new JobConf(myword count . class);

//配置conf = new Configuration();

//conf.set("mapred.job.tracker "," 192.168.75 . 130:9001 ");

//親自讀取數據字段。

//conf . set jar(" TT . jar ");

//註意這壹行代碼放在前面並初始化,否則會被舉報。

/* *工作任務* */

Job job=new Job(conf," test word count ");

job . setjarbyclass(myword count . class);

system . out . println(" mode:"+conf . get(" mapred . job . tracker "));;

//job . setcombinerclass(pcombine . class);

//job . setnumreducetasks(3);//設置為3

job . setmapperclass(wmapper . class);

job . setreducerclass(w reducer . class);

job . setinputformatclass(textinputformat . class);

job . setoutputformatclass(textoutputformat . class);

job . setmapoutputkeyclass(text . class);

job . setmapoutputvalueclass(int writable . class);

job . setoutputkey class(text . class);

job . setoutputvalueclass(text . class);

string path = " HDFS://192.168 . 46 . 28:9000/Qin/output ";

file system fs = file system . get(conf);

Path p =新路徑(Path);

if(fs.exists(p)){

fs.delete(p,true);

System.out.println("輸出路徑存在,已被刪除!");

}

fileinputformat . setinputpaths(job," HDFS://192.168.46 . 28:9000/Qin/input ");

file output format . setoutputpath(job,p);

system . exit(job . wait for completion(true)?0 : 1);

}

}

  • 上一篇:ftp怎麽連接網站空間ftp怎麽連接網站空間不足
  • 下一篇:成都7中怎麽樣啊?
  • copyright 2024編程學習大全網