Hadoop MapReduce V2——找出每个月气温最高的2天
项目目录
MyTQ
package com.henu.tq; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; public class MyTQ { public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException { //1.配置 Configuration conf = new Configuration(); Job job = Job.getInstance(conf); job.setJarByClass(MyTQ.class); job.setJobName("tq"); //2.设置输入输出路径 Path inPath = new Path("/tq/input"); FileInputFormat.addInputPath(job, inPath); Path outPath = new Path("/tq/output"); if (outPath.getFileSystem(conf).exists(outPath)) { outPath.getFileSystem(conf).delete(outPath,true); } FileOutputFormat.setOutputPath(job, outPath); //3.设置Mapper job.setMapperClass(Tmapper.class); job.setMapOutputKeyClass(Tq.class); job.setMapOutputValueClass(IntWritable.class); //4.自定义排序比较器 job.setSortComparatorClass(TSortComparator.class); //5.自定义分区器 job.setPartitionerClass(TPartioner.class); //6.自定义组排序 job.setGroupingComparatorClass(TGroupComparator.class); //7.设置reducetask数量 job.setNumReduceTasks(3); //8.设置reducer job.setReducerClass(Treducer.class); //9. job.waitForCompletion(true); } } |
TGroupComparator
package com.henu.tq; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableComparator; public class TGroupComparator extends WritableComparator{ Tq t1 = null; Tq t2 = null; public TGroupComparator() { super(Tq.class,true); } @Override public int compare(WritableComparable a, WritableComparable b) { t1 = (Tq) a; t2 = (Tq) b; int c1 = Integer.compare(t1.getYear(), t2.getYear()); if (c1 == 0) { return Integer.compare(t1.getMonth(), t2.getMonth()); } return c1; } } |
Tmapper
package com.henu.tq; import java.io.IOException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.util.StringUtils; public class Tmapper extends Mapper<LongWritable, Text, Tq, IntWritable>{ Tq tkey = new Tq(); IntWritable tval = new IntWritable(); @Override protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Tq, IntWritable>.Context context) throws IOException, InterruptedException { //获得时间 温度数组 String[] words = StringUtils.split(value.toString(),'\t'); String pattern = "yyyy-MM-dd"; SimpleDateFormat simpleDateFormat = new SimpleDateFormat(pattern ); try { //1950-01-01 11:21:02 32c //处理日期 Date date = simpleDateFormat.parse(words[0]); Calendar cal = Calendar.getInstance(); cal.setTime(date); tkey.setYear(cal.get(Calendar.YEAR)); tkey.setMonth(cal.get(Calendar.MONTH)+1); tkey.setDay(cal.get(Calendar.DAY_OF_MONTH)); //处理温度 String wdString = words[1].substring(0,words[1].lastIndexOf("c")); int wd = Integer.parseInt(wdString); tkey.setWd(wd); tval.set(wd); context.write(tkey, tval); } catch (ParseException e) { e.printStackTrace(); } } } |
TPartioner
package com.henu.tq; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.mapreduce.Partitioner; public class TPartioner extends Partitioner<Tq, IntWritable>{ @Override public int getPartition(Tq arg0, IntWritable arg1, int arg2) { return arg0.getYear() % arg2; } } |
Tq
package com.henu.tq; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.hadoop.io.WritableComparable; public class Tq implements WritableComparable<Tq>{ private int year; private int month; private int day; private int wd; public int getYear() { return year; } public void setYear(int year) { this.year = year; } public int getMonth() { return month; } public void setMonth(int month) { this.month = month; } public int getDay() { return day; } public void setDay(int day) { this.day = day; } public int getWd() { return wd; } public void setWd(int wd) { this.wd = wd; } @Override public String toString() { return year + "-" + month + "-" + day; } @Override public void readFields(DataInput arg0) throws IOException { this.setYear(arg0.readInt()); this.setMonth(arg0.readInt()); this.setDay(arg0.readInt()); this.setWd(arg0.readInt()); } @Override public void write(DataOutput arg0) throws IOException { arg0.writeInt(this.getYear()); arg0.writeInt(this.getMonth()); arg0.writeInt(this.getDay()); arg0.writeInt(this.getWd()); } @Override public int compareTo(Tq o) { int c1 = Integer.compare(this.getYear(), o.getYear()); if (c1 == 0) { int c2 = Integer.compare(this.getMonth(), o.getMonth()); if (c2 == 0) { return Integer.compare(this.getDay(), o.getDay()); } return c2; } return c1; } } |
Treducer
package com.henu.tq; import java.io.IOException; import org.apache.hadoop.crypto.key.TestCachingKeyProvider; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Reducer; /** * @author George * 1950-01-01 32 */ public class Treducer extends Reducer<Tq, IntWritable, Text, IntWritable>{ Text tkey = new Text(); IntWritable tval = new IntWritable(); @Override protected void reduce(Tq key, Iterable<IntWritable> vals,Context context) throws IOException, InterruptedException { int flag = 0; int day = 0; for(IntWritable val : vals){ if (flag == 0) { tkey.set(key.toString()); tval.set(val.get()); context.write(tkey, tval); flag ++; day = key.getDay(); } if (flag != 0 && day != key.getDay()) { tkey.set(key.toString()); tval.set(val.get()); context.write(tkey, tval); // break; return; } } } } |
TSortComparator
package com.henu.tq; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableComparator; /** * @author George * 实现天气 年月正序,温度倒序 */ public class TSortComparator extends WritableComparator{ Tq t1 = null; Tq t2 = null; public TSortComparator() { super(Tq.class,true); } @Override public int compare(WritableComparable a, WritableComparable b) { t1 = (Tq) a; t2 = (Tq) b; int c1 = Integer.compare(t1.getYear(), t2.getYear()); if (c1 == 0) { int c2 = Integer.compare(t1.getMonth(), t2.getMonth()); if (c2 == 0) { return -Integer.compare(t1.getWd(), t2.getWd()); } return c2; } return c1; } } |
tq.txt
1949-10-01 14:21:02 34c 1949-10-01 19:21:02 38c 1949-10-02 14:01:02 36c 1950-01-01 11:21:02 32c 1950-10-01 12:21:02 37c 1951-12-01 12:21:02 23c 1950-10-02 12:21:02 41c 1950-10-03 12:21:02 27c 1951-07-01 12:21:02 45c 1951-07-02 12:21:02 46c 1951-07-03 12:21:03 47c |
将项目打包
将文件传送到node02虚拟机上,然后
如果报出如上错误,别慌,只是版本问题:
这个问题确实是由较高版本的JDK编译的java class文件试图在较低版本的JVM上运行产生的错误。
1、解决措施就是保证jvm(java命令)和jdk(javac命令)版本一致。如果是linux版本,则在命令行中分别输入java -version和javac -version命令来查看版本是否一致。这里假设都是1.7版本。
2、如果都一致,但还是解决不了问题,那么你肯定不是直接在命令行中用javac来编译的,而是用类似于eclipse、netbeans这样的编译器来编译的。因为很多编译器都自带javac,而不是采用操作系统中的编译器。如果你的编译器是eclipse的话,那么需要在项目的属性里设置jdk版本,方法是右击项目-->properties-->java compiler --> Enable project specific settings -->将compiler compliance level设置为1.7,也就是与jvm一致的版本(在命令行中java -version所显示的版本)。
综上,如果你是用编译器来编译的话,请首先确保编译器自带的jdk版本是否和操作系统中的java版本一致。
解决后再次运行:
运行成功,看eclipse
Hadoop MapReduce V2——找出每个月气温最高的2天相关推荐
- hadoop错误: 找不到或无法加载主类 org.apache.hadoop.mapreduce.v2.app.MRAppMaster
错误: 找不到或无法加载主类 org.apache.hadoop.mapreduce.v2.app.MRAppMaster 原创hongxiao2016 最后发布于2019-03-30 21:20:5 ...
- 错误: 找不到或无法加载主类 org.apache.hadoop.mapreduce.v2.app.MRAppMaster
解决方案: ①确保$HADOOP_HOME/etc/hadoop/mapred-site.xml包含下列设置 <property> <name>yarn.app.mapre ...
- 执行work count程序报错Could not find or load main class org.apache.hadoop.mapreduce.v2.app.MRAppMaster
程序执行过程报错:Could not find or load main class org.apache.hadoop.mapreduce.v2.app.MRAppMaster 根据报错提示 找到h ...
- 记录hadoop3.2.2出现Could not find or load main class org.apache.hadoop.mapreduce.v2.app.MRAppMaster问题
问题: hadoop3.2.2运行集群自带wordcount功能时出现以下错误: [xiaobai@hadoop102 hadoop]$ hadoop jar share/hadoop/mapredu ...
- Could not find or load main class org.apache.hadoop.mapreduce.v2.app.MRAppMaster
我发现大部分网络遇到这个问题的时候,是应为org.apache.hadoop.mapreduce.v2.app.MRAppMaster 类没有在启动的时候被加载到,或者说在使用的时候. 不知道有没有人 ...
- Hadoop MapReduce V2 Yarn——WordCount
WordCount 原理图示: 溢写,打错了... 一.首先准备工作: Hadoop的jar包,以及配置文件还有10000行的txt.文档 二.创建MyWC类 package com.henu.mr. ...
- mapreduce应用-找出扣扣共同好友
需求:找出扣扣共同好友 用户:好友,好友2,- A:B,C,D,F,E,O B:A,C,E,K C:F,A,D,I D:A,E,F,L E:B,C,D,M,L F:A,B,C,D,E,O,M G:A, ...
- MapReduce例子——找出QQ共同好友
///\\\\\\\\ fri.txt 如下: person: friend1, friend2, friend3, friend4, -.. A:B,C,D,F,E,O B:A,C,E,K C:F, ...
- 实例掌握Hadoop MapReduce
本文旨在帮您快速了解 MapReduce 的工作机制和开发方法,解决以下几个问题: MapReduce 基本原理是什么? MapReduce 的执行过程是怎么样的? MapReduce 的核心流程细节 ...
最新文章
- c++ 常见问题之string
- delphi 纯虚函数的应用
- 疯子的算法总结(七) 字符串算法之 manacher 算法 O(N)解决回文串
- h5外卖源码php_校园食堂外卖APP走红 更多APP定制开发上一品威客网
- python中的thread_Python中的thread
- 雷林鹏分享:PHP MySQL 创建数据库
- xamarin_如何实现声明性Xamarin表单验证
- mysql 时间戳截断_列的Mysql时间戳数据被截断
- 并发编程常见面试题总结一
- 聚类算法之层次聚类和密度聚类(图文并茂)
- 15天高斯滤波matlab,【3.15】matlab 高斯滤波,快速滤波
- NB-IoT在无线烟感监控系统中的优势
- HTML注册登录页面模板JS
- 鲁百年创新设计思维学习总结
- Kubernetes Pod 健康检查机制 LivenessProbe 与 ReadinessProbe
- 云片短信平台java案例,Java使用云片网API发送短信验证码
- oracle select from dual,代码中误用select xxx from dual案例一则
- Android 代码设置来电铃声
- PHP生成图形验证码
- CUDA——Python基础与实现