尚硅谷大數(shù)據(jù)技術(shù)之Hadoop(MapReduce)(新)第3章 MapReduce框架原理
3.9.2 數(shù)據(jù)清洗案例實(shí)操-復(fù)雜解析版
1.需求
對Web訪問日志中的各字段識(shí)別切分,去除日志中不合法的記錄。根據(jù)清洗規(guī)則,輸出過濾后的數(shù)據(jù)。
(1)輸入數(shù)據(jù)
web.log
(2)期望輸出數(shù)據(jù)
都是合法的數(shù)據(jù)
2.實(shí)現(xiàn)代碼
(1)定義一個(gè)bean,用來記錄日志數(shù)據(jù)中的各數(shù)據(jù)字段
package com.atguigu.mapreduce.log;
public class LogBean { private String remote_addr;// 記錄客戶端的ip地址 private String remote_user;// 記錄客戶端用戶名稱,忽略屬性"-" private String time_local;// 記錄訪問時(shí)間與時(shí)區(qū) private String request;// 記錄請求的url與http協(xié)議 private String status;// 記錄請求狀態(tài);成功是200 private String body_bytes_sent;// 記錄發(fā)送給客戶端文件主體內(nèi)容大小 private String http_referer;// 用來記錄從那個(gè)頁面鏈接訪問過來的 private String http_user_agent;// 記錄客戶瀏覽器的相關(guān)信息
private boolean valid = true;// 判斷數(shù)據(jù)是否合法
public String getRemote_addr() { return remote_addr; }
public void setRemote_addr(String remote_addr) { this.remote_addr = remote_addr; }
public String getRemote_user() { return remote_user; }
public void setRemote_user(String remote_user) { this.remote_user = remote_user; }
public String getTime_local() { return time_local; }
public void setTime_local(String time_local) { this.time_local = time_local; }
public String getRequest() { return request; }
public void setRequest(String request) { this.request = request; }
public String getStatus() { return status; }
public void setStatus(String status) { this.status = status; }
public String getBody_bytes_sent() { return body_bytes_sent; }
public void setBody_bytes_sent(String body_bytes_sent) { this.body_bytes_sent = body_bytes_sent; }
public String getHttp_referer() { return http_referer; }
public void setHttp_referer(String http_referer) { this.http_referer = http_referer; }
public String getHttp_user_agent() { return http_user_agent; }
public void setHttp_user_agent(String http_user_agent) { this.http_user_agent = http_user_agent; }
public boolean isValid() { return valid; }
public void setValid(boolean valid) { this.valid = valid; }
@Override public String toString() {
StringBuilder sb = new StringBuilder(); sb.append(this.valid); sb.append("\001").append(this.remote_addr); sb.append("\001").append(this.remote_user); sb.append("\001").append(this.time_local); sb.append("\001").append(this.request); sb.append("\001").append(this.status); sb.append("\001").append(this.body_bytes_sent); sb.append("\001").append(this.http_referer); sb.append("\001").append(this.http_user_agent); return sb.toString(); } } |
(2)編寫LogMapper類
package com.atguigu.mapreduce.log; import java.io.IOException; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper;
public class LogMapper extends Mapper<LongWritable, Text, Text, NullWritable>{ Text k = new Text(); @Override protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
// 1 獲取1行 String line = value.toString(); // 2 解析日志是否合法 LogBean bean = parseLog(line); if (!bean.isValid()) { return; } k.set(bean.toString()); // 3 輸出 context.write(k, NullWritable.get()); }
// 解析日志 private LogBean parseLog(String line) {
LogBean logBean = new LogBean(); // 1 截取 String[] fields = line.split(" "); if (fields.length > 11) {
// 2封裝數(shù)據(jù) logBean.setRemote_addr(fields[0]); logBean.setRemote_user(fields[1]); logBean.setTime_local(fields[3].substring(1)); logBean.setRequest(fields[6]); logBean.setStatus(fields[8]); logBean.setBody_bytes_sent(fields[9]); logBean.setHttp_referer(fields[10]); if (fields.length > 12) { logBean.setHttp_user_agent(fields[11] + " "+ fields[12]); }else { logBean.setHttp_user_agent(fields[11]); } // 大于400,HTTP錯(cuò)誤 if (Integer.parseInt(logBean.getStatus()) >= 400) { logBean.setValid(false); } }else { logBean.setValid(false); } return logBean; } } |
(3)編寫LogDriver類
package com.atguigu.mapreduce.log; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class LogDriver { public static void main(String[] args) throws Exception { // 1 獲取job信息 Configuration conf = new Configuration(); Job job = Job.getInstance(conf);
// 2 加載jar包 job.setJarByClass(LogDriver.class);
// 3 關(guān)聯(lián)map job.setMapperClass(LogMapper.class);
// 4 設(shè)置最終輸出類型 job.setOutputKeyClass(Text.class); job.setOutputValueClass(NullWritable.class);
// 5 設(shè)置輸入和輸出路徑 FileInputFormat.setInputPaths(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1]));
// 6 提交 job.waitForCompletion(true); } } |