java.lang.RuntimeException:java.lang.NoSuchMethodException:Hadoop mapreduce

时间:2015-10-23 13:02:52

标签: hadoop mapreduce

我收到java.lang.NoSuchMethodException请帮助我...

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;


public class Placeholder extends Configured implements Tool  
{
    private static Path[] filepath;
    private static String filename;
    private static String[] calendartablecolumn;
    private static BufferedReader br;
    private static String placeholder;
    private static String output;
    private static HashMap<String,String> map = new HashMap<String,String>();

    public int run(String[] args) throws Exception {

        String placeholdername = args[3];
        String stratposition = args[4];
        String length = args[5];
        String category = args[6];
        String calendarid = args[7];


        Configuration conf = new Configuration();
        conf.set("placeholdername", placeholdername);
        conf.set("stratposition", stratposition);
        conf.set("length", length);
        conf.set("category", category);
        conf.set("calendarid", calendarid);


        Job job = Job.getInstance(conf, "Placeholder");
        DistributedCache.addCacheFile(new Path(args[2]).toUri(), getConf());

        job.setJarByClass(Placeholder.class);
        job.setMapperClass(PlaceholderMapper.class);
        job.setNumReduceTasks(0);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);

        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));


        return job.waitForCompletion(true) ? 0 : 1;

    }


    public static void main(String[] args) throws Exception {

        int res = ToolRunner.run(new Configuration(), new Placeholder(), args);     
        System.exit(res);
    }

    public class PlaceholderMapper extends Mapper<LongWritable,Text,Text,Text>
    {
        protected void setup(Context context) throws IOException,InterruptedException 
        {
            try {

                filepath = DistributedCache.getLocalCacheFiles(getConf());
                filename = filepath[0].getName().toString();
                br = new BufferedReader(new FileReader(filepath[0].toString()));
                String line = null;
                while ((line = br.readLine()) != null) 
                {
                    calendartablecolumn = line.split("\\,");
                }
            } catch (Exception e) {

            }
        }

        public void map(LongWritable keys, Text value, Context context) throws IOException, InterruptedException 
        {
            String str_placeholdername = context.getConfiguration().get("placeholdername");

            String str_stratposition = context.getConfiguration().get("stratposition");
            int int_stratposition = Integer.parseInt(str_stratposition);

            String str_length = context.getConfiguration().get("length");
            int int_length = Integer.parseInt(str_length);

            String str_category = context.getConfiguration().get("category");

            String str_calendarid = context.getConfiguration().get("calendarid");
            int int_calendarid = Integer.parseInt(str_calendarid);

            if (str_category != null)
            {
                if (str_category.equalsIgnoreCase("filename"))
                {
                    placeholder = filename.substring((int_stratposition-1),(int_length-1));

                }else if (str_category.equalsIgnoreCase("startdate"))
                {
                    map.put(calendartablecolumn[0].trim(),calendartablecolumn[3].trim());
                    placeholder = map.get(int_calendarid);


                }else if (str_category.equalsIgnoreCase("enddate"))
                {
                    map.put(calendartablecolumn[0].trim(),calendartablecolumn[4].trim());
                    placeholder = map.get(int_calendarid);

                }else if (str_category.equalsIgnoreCase("currentdate"))
                {
                    DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
                    Date date = new Date();
                    String currentdate = (dateFormat.format(date)).toString();
                    placeholder = currentdate.substring((int_stratposition-1),(int_length-1));

                }else if (str_category.equalsIgnoreCase("isdailydata"))
                {

                }

                output = value+","+placeholder;
            }else
            {
                String line =  value.toString();
                output = line.substring((int_stratposition-1),(int_length-1));
            }

            context.write(null,new Text (output));
        }
    }
}

这是我得到的错误 -

  

错误:java.lang.RuntimeException:java.lang.NoSuchMethodException:   com.nielsen.GRFE.processor.mapreduce.Placeholder $ PlaceholderMapper。()           at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:131)           在org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:745)           在org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)           在org.apache.hadoop.mapred.YarnChild $ 2.run(YarnChild.java:163)           at java.security.AccessController.doPrivileged(Native Method)           在javax.security.auth.Subject.doAs(Subject.java:415)           at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)           在org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)引起:java.lang.NoSuchMethodException:   com.nielsen.GRFE.processor.mapreduce.Placeholder $ PlaceholderMapper。()           at java.lang.Class.getConstructor0(Class.java:2849)           at java.lang.Class.getDeclaredConstructor(Class.java:2053)           在org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:125)

1 个答案:

答案 0 :(得分:3)

我认为问题是您的sqlContext.read.parquet("pairs_parquet").rdd.collect() // Array[org.apache.spark.sql.Row] = Array([2,[3.0,4.0]], [1,[1.0,2.0]]) 课程不是PlaceholderMapper,请尝试将其设为static