1

DistributedCacheを手に入れようとしています。2 つのノードで Apache Hadoop 1.2.1 を使用しています。

-libjarsを使用してサードパーティの jar を使用する方法を説明する他の投稿で単純に拡張されたClouderaの投稿を参照しました。

ノート:

私のjarには、jarライブラリは含まれていません。- Hadoop コアも Commons lang もありません。

コード :

public class WordCounter extends Configured implements Tool {

    @Override
    public int run(String[] args) throws Exception {
        // TODO Auto-generated method stub

        // Job job = new Job(getConf(), args[0]);
        Job job = new Job(super.getConf(), args[0]);

        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);

        job.setJarByClass(WordCounter.class);

        FileInputFormat.setInputPaths(job, new Path(args[1]));
        FileOutputFormat.setOutputPath(job, new Path(args[2]));

        job.setMapperClass(WCMapper.class);
        job.setReducerClass(WCReducer.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        int jobState = job.waitForCompletion(true) ? 0 : 1;

        return jobState;
    }

    public static void main(String[] args) throws Exception {
        // TODO Auto-generated method stub

        if (args == null || args.length < 3) {
            System.out.println("The below three arguments are expected");
            System.out
                    .println("<job name> <hdfs path of the input file> <hdfs path of the output file>");
            return;
        }

        WordCounter wordCounter = new WordCounter();

        // System.exit(ToolRunner.run(wordCounter, args));
        System.exit(ToolRunner.run(new Configuration(), wordCounter, args));
    }

}

Mapper クラスはナイーブで、Apache Commons の StringUtils を使用しようとしているだけです (hadoop ではありません)。

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

/**
 * @author 298790
 * 
 */
public class WCMapper extends Mapper<LongWritable, Text, Text, IntWritable> {

    private static IntWritable one = new IntWritable(1);

    @Override
    protected void map(
            LongWritable key,
            Text value,
            org.apache.hadoop.mapreduce.Mapper<LongWritable, Text, Text, IntWritable>.Context context)
            throws IOException, InterruptedException {
        // TODO Auto-generated method stub
        StringTokenizer strTokenizer = new StringTokenizer(value.toString());
        Text token = new Text();

        while (strTokenizer.hasMoreTokens()) {
            token.set(strTokenizer.nextToken());
            context.write(token, one);
        }

        System.out.println("Converting " + value + " to upper case "
                + StringUtils.upperCase(value.toString()));
    }
}

私が使用するコマンド:

bigdata@slave3:~$ export HADOOP_CLASSPATH=dumphere/lib/commons-lang3-3.1.jar
bigdata@slave3:~$
bigdata@slave3:~$ echo $HADOOP_CLASSPATH
dumphere/lib/commons-lang3-3.1.jar
bigdata@slave3:~$
bigdata@slave3:~$ echo $LIBJARS
dumphere/lib/commons-lang3-3.1.jar
bigdata@slave3:~$ hadoop jar dumphere/code/jars/hdp_3rdparty.jar com.hadoop.basics.WordCounter "WordCount" "/input/dumphere/Childhood_days.txt" "/output/dumphere/wc" -libjars ${LIBJARS}

私が得る例外:

Warning: $HADOOP_HOME is deprecated.

14/08/13 21:56:05 INFO input.FileInputFormat: Total input paths to process : 1
14/08/13 21:56:05 INFO util.NativeCodeLoader: Loaded the native-hadoop library
14/08/13 21:56:05 WARN snappy.LoadSnappy: Snappy native library not loaded
14/08/13 21:56:05 INFO mapred.JobClient: Running job: job_201408111719_0190
14/08/13 21:56:06 INFO mapred.JobClient:  map 0% reduce 0%
14/08/13 21:56:37 INFO mapred.JobClient: Task Id : attempt_201408111719_0190_m_000000_0, Status : FAILED
Error: java.lang.ClassNotFoundException: org.apache.commons.lang3.StringUtils
        at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
        at java.security.AccessController.doPrivileged(Native Method)
        at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at com.hadoop.basics.WCMapper.map(WCMapper.java:40)
        at com.hadoop.basics.WCMapper.map(WCMapper.java:1)
        at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:764)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:364)
        at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:415)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1190)
        at org.apache.hadoop.mapred.Child.main(Child.java:249)

14/08/13 21:56:42 INFO mapred.JobClient: Task Id : attempt_201408111719_0190_m_000000_1, Status : FAILED
Error: java.lang.ClassNotFoundException: org.apache.commons.lang3.StringUtils
        at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
        at java.security.AccessController.doPrivileged(Native Method)
        at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
        at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
        at com.hadoop.basics.WCMapper.map(WCMapper.java:40)
        at com.hadoop.basics.WCMapper.map(WCMapper.java:1)
        at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:764)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:364)
        at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:415)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1190)
        at org.apache.hadoop.mapred.Child.main(Child.java:249)

Cloudera の投稿では次のように言及されています。

The jar will be placed in distributed cache and will be made available to all of the job’s task attempts. More specifically, you will find the JAR in one of the ${mapred.local.dir}/taskTracker/archive/${user.name}/distcache/… subdirectories on local nodes.

しかし、そのパスでは、 commons-lang3-3.1.jar を見つけることができません

私は何が欠けていますか?

4

0 に答える 0