如何在Windows中运行MapReduce程序

2024-12-01 19:04:14
推荐回答(1个)
回答1:

代码
public static void main(String[] args) { //在本地windows平台eclipse运行mapreduce程序 //创建远程用户,以指定的用户来运行程序 //把要运行的程序代码放到run方法体里 UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hadoop"); ugi.doAs(new PrivilegedAction() { public Void run() { try{ //设置引用jar的分割符,linux一般用,和:,但windows一般是用;, //所以要设置一下本地的分割符 System.setProperty("path.separator", ":"); Configuration conf = new Configuration(); //可以设置用到的第三方jar //conf.set("tmpjars", "/tmp/jars/hbase-0.94.5.jar,/tmp/jars/protobuf-java-2.4.0a.jar,/tmp/jars/zookeeper-3.4.3.jar"); conf.set("mapred.job.tracker", "172.168.8.54:9001"); conf.set("fs.default.name", "hdfs://172.168.8.54:9000"); conf.set("hadoop.job.ugi", "hadoop"); conf.set("hbase.zookeeper.quorum","172.168.8.55,172.168.8.56,172.168.8.57"); conf.set("hbase.zookeeper.property.clientPort", "2181"); Job job = new Job(conf); job.setJobName("ReadHbaseToHdfsAction"); job.setJarByClass(ReadHbaseToHdfsAction.class); job.setNumReduceTasks(1); job.setReducerClass(ReadHbaseToHdfsReduce.class); FileSystem fs=FileSystem.get(conf); Path outPath=new Path("/tmp/2/1"); if(fs.exists(outPath)){ fs.delete(outPath, true); } FileOutputFormat.setOutputPath(job, outPath); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); Scan scan = new Scan(); TableMapReduceUtil.initTableMapperJob("misdn_catetory22", scan, ReadHbaseToHdfsMapper.class, Text.class, Text.class, job); job.waitForCompletion(true); }catch(Exception e){ e.printStackTrace(); } return null; }}); }bigs