map阶段
package com.taobao;
import java.io.*;
import java.util.*;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.StringUtils;

public class MyMapper {   
      public static class MapClass extends MapReduceBase
        implements Mapper<LongWritable, Text, Text, Text> {
         
          Path[] localFiles = new Path[0];
          HashMap<String, String> dateMap = new HashMap<String, String>();
           
          public void configure(JobConf job) {
               
                    if(job.getBoolean("HadoopDriver.distributedCacheFile", false)) {
                        try {
                            localFiles = DistributedCache.getLocalCacheFiles(job);
                        }
                        catch (IOException ioe) {
                            System.err.println("Caught exception while getting cached files " + StringUtils.stringifyException(ioe));
                        }
                        if(localFiles[0].toString() != null) {
                            try {
                         

相关文章:

  • 2021-05-31
  • 2022-12-23
  • 2022-12-23
  • 2021-09-14
  • 2021-11-11
  • 2021-12-11
  • 2022-12-23
  • 2022-01-05
猜你喜欢
  • 2021-06-20
  • 2022-01-22
  • 2022-01-10
  • 2021-11-16
  • 2022-12-23
  • 2022-01-09
  • 2021-06-11
相关资源
相似解决方案