public class WrappedMapper.Context extends Mapper.Context
Modifier and Type | Field and Description |
---|---|
protected MapContext<KEYIN,VALUEIN,KEYOUT,VALUEOUT> |
mapContext |
CACHE_ARCHIVES_VISIBILITIES, CACHE_FILE_VISIBILITIES, COMBINE_CLASS_ATTR, INPUT_FORMAT_CLASS_ATTR, JAR_UNPACK_PATTERN, JOB_ACL_MODIFY_JOB, JOB_ACL_VIEW_JOB, JOB_CANCEL_DELEGATION_TOKEN, JOB_NAMENODES, MAP_CLASS_ATTR, MAP_MEMORY_PHYSICAL_MB, MAP_OUTPUT_COLLECTOR_CLASS_ATTR, MAPREDUCE_TASK_CLASSPATH_PRECEDENCE, OUTPUT_FORMAT_CLASS_ATTR, PARTITIONER_CLASS_ATTR, REDUCE_CLASS_ATTR, REDUCE_MEMORY_PHYSICAL_MB, SHUFFLE_CONSUMER_PLUGIN_ATTR, USER_LOG_RETAIN_HOURS
Constructor and Description |
---|
WrappedMapper.Context(MapContext<KEYIN,VALUEIN,KEYOUT,VALUEOUT> mapContext) |
Modifier and Type | Method and Description |
---|---|
org.apache.hadoop.fs.Path[] |
getArchiveClassPaths()
Get the archive entries in classpath as an array of Path
|
java.lang.String[] |
getArchiveTimestamps()
Get the timestamps of the archives.
|
java.net.URI[] |
getCacheArchives()
Get cache archives set in the Configuration
|
java.net.URI[] |
getCacheFiles()
Get cache files set in the Configuration
|
java.lang.Class<? extends Reducer<?,?,?,?>> |
getCombinerClass()
Get the combiner class for the job.
|
org.apache.hadoop.io.RawComparator<?> |
getCombinerKeyGroupingComparator()
Get the user defined
RawComparator comparator for
grouping keys of inputs to the combiner. |
org.apache.hadoop.conf.Configuration |
getConfiguration()
Return the configuration for the job.
|
Counter |
getCounter(java.lang.Enum<?> counterName)
Get the
Counter for the given counterName . |
Counter |
getCounter(java.lang.String groupName,
java.lang.String counterName)
|
org.apache.hadoop.security.Credentials |
getCredentials()
Get credentials for the job.
|
KEYIN |
getCurrentKey()
Get the current key.
|
VALUEIN |
getCurrentValue()
Get the current value.
|
org.apache.hadoop.fs.Path[] |
getFileClassPaths()
Get the file entries in classpath as an array of Path
|
java.lang.String[] |
getFileTimestamps()
Get the timestamps of the files.
|
org.apache.hadoop.io.RawComparator<?> |
getGroupingComparator()
Get the user defined
RawComparator comparator for
grouping keys of inputs to the reduce. |
java.lang.Class<? extends InputFormat<?,?>> |
getInputFormatClass()
Get the
InputFormat class for the job. |
InputSplit |
getInputSplit()
Get the input split for this map.
|
java.lang.String |
getJar()
Get the pathname of the job's jar.
|
JobID |
getJobID()
Get the unique ID for the job.
|
java.lang.String |
getJobName()
Get the user-specified job name.
|
boolean |
getJobSetupCleanupNeeded()
Get whether job-setup and job-cleanup is needed for the job
|
org.apache.hadoop.fs.Path[] |
getLocalCacheArchives()
Return the path array of the localized caches
|
org.apache.hadoop.fs.Path[] |
getLocalCacheFiles()
Return the path array of the localized files
|
java.lang.Class<?> |
getMapOutputKeyClass()
Get the key class for the map output data.
|
java.lang.Class<?> |
getMapOutputValueClass()
Get the value class for the map output data.
|
java.lang.Class<? extends Mapper<?,?,?,?>> |
getMapperClass()
Get the
Mapper class for the job. |
int |
getMaxMapAttempts()
Get the configured number of maximum attempts that will be made to run a
|
int |
getMaxReduceAttempts()
Get the configured number of maximum attempts that will be made to run a
|
int |
getNumReduceTasks()
Get configured the number of reduce tasks for this job.
|
OutputCommitter |
getOutputCommitter()
Get the
OutputCommitter for the task-attempt. |
java.lang.Class<? extends OutputFormat<?,?>> |
getOutputFormatClass()
Get the
OutputFormat class for the job. |
java.lang.Class<?> |
getOutputKeyClass()
Get the key class for the job output data.
|
java.lang.Class<?> |
getOutputValueClass()
Get the value class for job outputs.
|
java.lang.Class<? extends Partitioner<?,?>> |
getPartitionerClass()
Get the
Partitioner class for the job. |
boolean |
getProfileEnabled()
Get whether the task profiling is enabled.
|
java.lang.String |
getProfileParams() |
java.lang.Class<? extends Reducer<?,?,?,?>> |
getReducerClass()
Get the
Reducer class for the job. |
org.apache.hadoop.io.RawComparator<?> |
getSortComparator()
Get the
RawComparator comparator used to compare keys. |
java.lang.String |
getStatus()
Get the last set status message.
|
boolean |
getSymlink()
This method checks to see if symlinks are to be create for the
localized cache files in the current working directory
|
TaskAttemptID |
getTaskAttemptID()
Get the unique name for this task attempt.
|
java.lang.String |
getUser()
Get the reported username for this job.
|
org.apache.hadoop.fs.Path |
getWorkingDirectory()
Get the current working directory for the default file system.
|
boolean |
nextKeyValue()
Advance to the next key, value pair, returning null if at end.
|
void |
progress() |
void |
setStatus(java.lang.String msg)
Set the current status of the task to the given string.
|
boolean |
userClassesTakesPrecedence()
Get the boolean value for the property that specifies which classpath
takes precedence when tasks are launched.
|
void |
write(KEYOUT key,
VALUEOUT value)
Generate an output key/value pair.
|
protected MapContext<KEYIN,VALUEIN,KEYOUT,VALUEOUT> mapContext
public WrappedMapper.Context(MapContext<KEYIN,VALUEIN,KEYOUT,VALUEOUT> mapContext)
public InputSplit getInputSplit()
public KEYIN getCurrentKey() throws java.io.IOException, java.lang.InterruptedException
TaskInputOutputContext
java.io.IOException
java.lang.InterruptedException
public VALUEIN getCurrentValue() throws java.io.IOException, java.lang.InterruptedException
TaskInputOutputContext
java.io.IOException
java.lang.InterruptedException
public boolean nextKeyValue() throws java.io.IOException, java.lang.InterruptedException
TaskInputOutputContext
java.io.IOException
java.lang.InterruptedException
public Counter getCounter(java.lang.Enum<?> counterName)
TaskAttemptContext
Counter
for the given counterName
.counterName
- counter nameCounter
for the given counterName
public Counter getCounter(java.lang.String groupName, java.lang.String counterName)
TaskAttemptContext
counterName
- counter nameCounter
for the given groupName
and
counterName
public OutputCommitter getOutputCommitter()
TaskInputOutputContext
OutputCommitter
for the task-attempt.OutputCommitter
for the task-attemptpublic void write(KEYOUT key, VALUEOUT value) throws java.io.IOException, java.lang.InterruptedException
TaskInputOutputContext
java.io.IOException
java.lang.InterruptedException
public java.lang.String getStatus()
TaskAttemptContext
public TaskAttemptID getTaskAttemptID()
TaskAttemptContext
public void setStatus(java.lang.String msg)
TaskAttemptContext
public org.apache.hadoop.fs.Path[] getArchiveClassPaths()
JobContext
public java.lang.String[] getArchiveTimestamps()
JobContext
public java.net.URI[] getCacheArchives() throws java.io.IOException
JobContext
java.io.IOException
public java.net.URI[] getCacheFiles() throws java.io.IOException
JobContext
java.io.IOException
public java.lang.Class<? extends Reducer<?,?,?,?>> getCombinerClass() throws java.lang.ClassNotFoundException
JobContext
java.lang.ClassNotFoundException
public org.apache.hadoop.conf.Configuration getConfiguration()
JobContext
public org.apache.hadoop.fs.Path[] getFileClassPaths()
JobContext
public java.lang.String[] getFileTimestamps()
JobContext
public org.apache.hadoop.io.RawComparator<?> getCombinerKeyGroupingComparator()
JobContext
RawComparator
comparator for
grouping keys of inputs to the combiner.Job.setCombinerKeyGroupingComparatorClass(Class)
public org.apache.hadoop.io.RawComparator<?> getGroupingComparator()
JobContext
RawComparator
comparator for
grouping keys of inputs to the reduce.for details.
,
JobContext.getCombinerKeyGroupingComparator()
public java.lang.Class<? extends InputFormat<?,?>> getInputFormatClass() throws java.lang.ClassNotFoundException
JobContext
InputFormat
class for the job.InputFormat
class for the job.java.lang.ClassNotFoundException
public java.lang.String getJar()
JobContext
public JobID getJobID()
JobContext
public java.lang.String getJobName()
JobContext
public boolean userClassesTakesPrecedence()
JobContext
public boolean getJobSetupCleanupNeeded()
JobContext
public org.apache.hadoop.fs.Path[] getLocalCacheArchives() throws java.io.IOException
JobContext
java.io.IOException
public org.apache.hadoop.fs.Path[] getLocalCacheFiles() throws java.io.IOException
JobContext
java.io.IOException
public java.lang.Class<?> getMapOutputKeyClass()
JobContext
public java.lang.Class<?> getMapOutputValueClass()
JobContext
public java.lang.Class<? extends Mapper<?,?,?,?>> getMapperClass() throws java.lang.ClassNotFoundException
JobContext
Mapper
class for the job.Mapper
class for the job.java.lang.ClassNotFoundException
public int getMaxMapAttempts()
JobContext
public int getMaxReduceAttempts()
JobContext
public int getNumReduceTasks()
JobContext
1
.public java.lang.Class<? extends OutputFormat<?,?>> getOutputFormatClass() throws java.lang.ClassNotFoundException
JobContext
OutputFormat
class for the job.OutputFormat
class for the job.java.lang.ClassNotFoundException
public java.lang.Class<?> getOutputKeyClass()
JobContext
public java.lang.Class<?> getOutputValueClass()
JobContext
public java.lang.Class<? extends Partitioner<?,?>> getPartitionerClass() throws java.lang.ClassNotFoundException
JobContext
Partitioner
class for the job.Partitioner
class for the job.java.lang.ClassNotFoundException
public java.lang.Class<? extends Reducer<?,?,?,?>> getReducerClass() throws java.lang.ClassNotFoundException
JobContext
Reducer
class for the job.Reducer
class for the job.java.lang.ClassNotFoundException
public org.apache.hadoop.io.RawComparator<?> getSortComparator()
JobContext
RawComparator
comparator used to compare keys.RawComparator
comparator used to compare keys.public boolean getSymlink()
JobContext
public org.apache.hadoop.fs.Path getWorkingDirectory() throws java.io.IOException
JobContext
java.io.IOException
public void progress()
public boolean getProfileEnabled()
JobContext
public java.lang.String getProfileParams()
public java.lang.String getUser()
JobContext
public org.apache.hadoop.security.Credentials getCredentials()
JobContext
Copyright © 2009 The Apache Software Foundation