public abstract class Task
extends java.lang.Object
implements org.apache.hadoop.io.Writable, org.apache.hadoop.conf.Configurable
Modifier and Type | Class and Description |
---|---|
protected static class |
Task.CombineOutputCollector<K,V>
OutputCollector for the combiner.
|
static class |
Task.CombinerRunner<K,V> |
protected static class |
Task.CombineValuesIterator<KEY,VALUE> |
static class |
Task.Counter |
protected static class |
Task.NewCombinerRunner<K,V> |
protected static class |
Task.OldCombinerRunner<K,V> |
class |
Task.TaskReporter |
Modifier and Type | Field and Description |
---|---|
protected OutputCommitter |
committer |
protected JobConf |
conf |
static long |
DEFAULT_MR_COMBINE_RECORDS_BEFORE_PROGRESS |
protected static java.lang.String |
FILESYSTEM_COUNTER_GROUP
Name of the FileSystem counters' group
|
protected boolean |
jobCleanup |
protected JobContext |
jobContext |
protected JobStatus.State |
jobRunStateForCleanup |
protected boolean |
jobSetup |
protected org.apache.hadoop.mapred.JvmContext |
jvmContext |
protected org.apache.hadoop.fs.LocalDirAllocator |
lDirAlloc |
protected MapOutputFile |
mapOutputFile |
static java.lang.String |
MR_COMBINE_RECORDS_BEFORE_PROGRESS |
protected OutputFormat<?,?> |
outputFormat |
static int |
PROGRESS_INTERVAL
The number of milliseconds between progress reports.
|
protected Counters.Counter |
spilledRecordsCounter |
protected boolean |
taskCleanup |
protected TaskAttemptContext |
taskContext |
protected javax.crypto.SecretKey |
tokenSecret |
protected TaskUmbilicalProtocol |
umbilical |
Constructor and Description |
---|
Task() |
Task(java.lang.String jobFile,
TaskAttemptID taskId,
int partition,
int numSlotsRequired) |
Modifier and Type | Method and Description |
---|---|
protected static <INKEY,INVALUE,OUTKEY,OUTVALUE> |
createReduceContext(Reducer<INKEY,INVALUE,OUTKEY,OUTVALUE> reducer,
org.apache.hadoop.conf.Configuration job,
TaskAttemptID taskId,
RawKeyValueIterator rIter,
Counter inputKeyCounter,
Counter inputValueCounter,
RecordWriter<OUTKEY,OUTVALUE> output,
OutputCommitter committer,
StatusReporter reporter,
org.apache.hadoop.io.RawComparator<INKEY> comparator,
java.lang.Class<INKEY> keyClass,
java.lang.Class<INVALUE> valueClass) |
abstract org.apache.hadoop.mapred.TaskRunner |
createRunner(TaskTracker tracker,
org.apache.hadoop.mapred.TaskTracker.TaskInProgress tip,
org.apache.hadoop.mapred.TaskTracker.RunningJob rjob)
Return an approprate thread runner for this task.
|
void |
done(TaskUmbilicalProtocol umbilical,
Task.TaskReporter reporter) |
org.apache.hadoop.conf.Configuration |
getConf() |
protected static java.lang.String[] |
getFileSystemCounterNames(java.lang.String uriScheme)
Counters to measure the usage of the different file systems.
|
java.lang.String |
getJobFile() |
JobID |
getJobID()
Get the job name for this task.
|
javax.crypto.SecretKey |
getJobTokenSecret()
Get the job token secret
|
org.apache.hadoop.mapred.JvmContext |
getJvmContext()
Gets the task JvmContext
|
MapOutputFile |
getMapOutputFile() |
int |
getNumSlotsRequired() |
int |
getPartition()
Get the index of this task within the job.
|
TaskStatus.Phase |
getPhase()
Return current phase of the task.
|
org.apache.hadoop.util.Progress |
getProgress() |
org.apache.hadoop.mapred.SortedRanges |
getSkipRanges()
Get skipRanges.
|
TaskAttemptID |
getTaskID() |
java.lang.String |
getUser()
Get the name of the user running the job/task.
|
void |
initialize(JobConf job,
JobID id,
Reporter reporter,
boolean useNewApi) |
abstract boolean |
isMapTask() |
boolean |
isSkipping()
Is Task in skipping mode.
|
void |
localizeConfiguration(JobConf conf)
Localize the given JobConf to be specific for this task.
|
void |
readFields(java.io.DataInput in) |
void |
reportFatalError(TaskAttemptID id,
java.lang.Throwable throwable,
java.lang.String logMsg)
Report a fatal error to the parent (task) tracker.
|
protected void |
reportNextRecordRange(TaskUmbilicalProtocol umbilical,
long nextRecIndex)
Reports the next executing record range to TaskTracker.
|
abstract void |
run(JobConf job,
TaskUmbilicalProtocol umbilical)
Run this task as a part of the named job.
|
protected void |
runJobCleanupTask(TaskUmbilicalProtocol umbilical,
Task.TaskReporter reporter) |
protected void |
runJobSetupTask(TaskUmbilicalProtocol umbilical,
Task.TaskReporter reporter) |
protected void |
runTaskCleanupTask(TaskUmbilicalProtocol umbilical,
Task.TaskReporter reporter) |
void |
setConf(org.apache.hadoop.conf.Configuration conf) |
void |
setJobFile(java.lang.String jobFile) |
void |
setJobTokenSecret(javax.crypto.SecretKey tokenSecret)
Set the job token secret
|
void |
setJvmContext(org.apache.hadoop.mapred.JvmContext jvmContext)
Set the task JvmContext
|
protected void |
setPhase(TaskStatus.Phase phase)
Set current phase of the task.
|
void |
setSkipping(boolean skipping)
Sets whether to run Task in skipping mode.
|
void |
setSkipRanges(org.apache.hadoop.mapred.SortedRanges skipRanges)
Set skipRanges.
|
protected void |
setWriteSkipRecs(boolean writeSkipRecs)
Set whether to write skip records.
|
protected void |
statusUpdate(TaskUmbilicalProtocol umbilical) |
protected boolean |
supportIsolationRunner(JobConf conf) |
java.lang.String |
toString() |
protected boolean |
toWriteSkipRecs()
Get whether to write skip records.
|
void |
write(java.io.DataOutput out) |
void |
writeFilesRequiredForRerun(JobConf conf)
Write files that the IsolationRunner will need to rerun the task.
|
public static final java.lang.String MR_COMBINE_RECORDS_BEFORE_PROGRESS
public static final long DEFAULT_MR_COMBINE_RECORDS_BEFORE_PROGRESS
protected static final java.lang.String FILESYSTEM_COUNTER_GROUP
protected JobStatus.State jobRunStateForCleanup
protected boolean jobCleanup
protected boolean jobSetup
protected boolean taskCleanup
protected JobConf conf
protected MapOutputFile mapOutputFile
protected org.apache.hadoop.fs.LocalDirAllocator lDirAlloc
protected JobContext jobContext
protected TaskAttemptContext taskContext
protected OutputFormat<?,?> outputFormat
protected OutputCommitter committer
protected final Counters.Counter spilledRecordsCounter
protected TaskUmbilicalProtocol umbilical
protected javax.crypto.SecretKey tokenSecret
protected org.apache.hadoop.mapred.JvmContext jvmContext
public static final int PROGRESS_INTERVAL
public Task()
public Task(java.lang.String jobFile, TaskAttemptID taskId, int partition, int numSlotsRequired)
protected static java.lang.String[] getFileSystemCounterNames(java.lang.String uriScheme)
public void setJobFile(java.lang.String jobFile)
public java.lang.String getJobFile()
public TaskAttemptID getTaskID()
public int getNumSlotsRequired()
public JobID getJobID()
public void setJobTokenSecret(javax.crypto.SecretKey tokenSecret)
tokenSecret
- the secretpublic javax.crypto.SecretKey getJobTokenSecret()
public void setJvmContext(org.apache.hadoop.mapred.JvmContext jvmContext)
jvmContext
- public org.apache.hadoop.mapred.JvmContext getJvmContext()
public int getPartition()
public TaskStatus.Phase getPhase()
protected void setPhase(TaskStatus.Phase phase)
phase
- task phaseprotected boolean toWriteSkipRecs()
protected void setWriteSkipRecs(boolean writeSkipRecs)
public void reportFatalError(TaskAttemptID id, java.lang.Throwable throwable, java.lang.String logMsg)
public org.apache.hadoop.mapred.SortedRanges getSkipRanges()
public void setSkipRanges(org.apache.hadoop.mapred.SortedRanges skipRanges)
public boolean isSkipping()
public void setSkipping(boolean skipping)
skipping
- public java.lang.String getUser()
public void write(java.io.DataOutput out) throws java.io.IOException
write
in interface org.apache.hadoop.io.Writable
java.io.IOException
public void readFields(java.io.DataInput in) throws java.io.IOException
readFields
in interface org.apache.hadoop.io.Writable
java.io.IOException
public java.lang.String toString()
toString
in class java.lang.Object
public void localizeConfiguration(JobConf conf) throws java.io.IOException
java.io.IOException
public void writeFilesRequiredForRerun(JobConf conf) throws java.io.IOException
java.io.IOException
public abstract void run(JobConf job, TaskUmbilicalProtocol umbilical) throws java.io.IOException, java.lang.ClassNotFoundException, java.lang.InterruptedException
umbilical
- for progress reportsjava.io.IOException
java.lang.ClassNotFoundException
java.lang.InterruptedException
public abstract org.apache.hadoop.mapred.TaskRunner createRunner(TaskTracker tracker, org.apache.hadoop.mapred.TaskTracker.TaskInProgress tip, org.apache.hadoop.mapred.TaskTracker.RunningJob rjob) throws java.io.IOException
tip
- TODOjava.io.IOException
public abstract boolean isMapTask()
public org.apache.hadoop.util.Progress getProgress()
public void initialize(JobConf job, JobID id, Reporter reporter, boolean useNewApi) throws java.io.IOException, java.lang.ClassNotFoundException, java.lang.InterruptedException
java.io.IOException
java.lang.ClassNotFoundException
java.lang.InterruptedException
protected void reportNextRecordRange(TaskUmbilicalProtocol umbilical, long nextRecIndex) throws java.io.IOException
umbilical
- nextRecIndex
- the record index which would be fed next.java.io.IOException
public void done(TaskUmbilicalProtocol umbilical, Task.TaskReporter reporter) throws java.io.IOException, java.lang.InterruptedException
java.io.IOException
java.lang.InterruptedException
protected void statusUpdate(TaskUmbilicalProtocol umbilical) throws java.io.IOException
java.io.IOException
protected void runTaskCleanupTask(TaskUmbilicalProtocol umbilical, Task.TaskReporter reporter) throws java.io.IOException, java.lang.InterruptedException
java.io.IOException
java.lang.InterruptedException
protected void runJobCleanupTask(TaskUmbilicalProtocol umbilical, Task.TaskReporter reporter) throws java.io.IOException, java.lang.InterruptedException
java.io.IOException
java.lang.InterruptedException
protected boolean supportIsolationRunner(JobConf conf)
protected void runJobSetupTask(TaskUmbilicalProtocol umbilical, Task.TaskReporter reporter) throws java.io.IOException, java.lang.InterruptedException
java.io.IOException
java.lang.InterruptedException
public void setConf(org.apache.hadoop.conf.Configuration conf)
setConf
in interface org.apache.hadoop.conf.Configurable
public org.apache.hadoop.conf.Configuration getConf()
getConf
in interface org.apache.hadoop.conf.Configurable
public MapOutputFile getMapOutputFile()
protected static <INKEY,INVALUE,OUTKEY,OUTVALUE> Reducer.Context createReduceContext(Reducer<INKEY,INVALUE,OUTKEY,OUTVALUE> reducer, org.apache.hadoop.conf.Configuration job, TaskAttemptID taskId, RawKeyValueIterator rIter, Counter inputKeyCounter, Counter inputValueCounter, RecordWriter<OUTKEY,OUTVALUE> output, OutputCommitter committer, StatusReporter reporter, org.apache.hadoop.io.RawComparator<INKEY> comparator, java.lang.Class<INKEY> keyClass, java.lang.Class<INVALUE> valueClass) throws java.io.IOException, java.lang.InterruptedException
java.io.IOException
java.lang.InterruptedException
Copyright © 2009 The Apache Software Foundation