public abstract class TaskController
extends java.lang.Object
implements org.apache.hadoop.conf.Configurable
Modifier and Type | Class and Description |
---|---|
static class |
TaskController.DeletionContext |
Modifier and Type | Field and Description |
---|---|
protected org.apache.hadoop.fs.LocalDirAllocator |
allocator |
protected static java.lang.String |
COMMAND_FILE |
protected org.apache.hadoop.mapred.TaskTracker.LocalStorage |
localStorage |
static org.apache.commons.logging.Log |
LOG |
static org.apache.hadoop.fs.permission.FsPermission |
TASK_LAUNCH_SCRIPT_PERMISSION |
Constructor and Description |
---|
TaskController() |
Modifier and Type | Method and Description |
---|---|
abstract void |
createLogDir(TaskAttemptID taskID,
boolean isCleanup)
Creates task log dir
|
abstract void |
deleteAsUser(java.lang.String user,
java.lang.String subDir)
Delete the user's files under all of the task tracker root directories.
|
abstract void |
deleteLogAsUser(java.lang.String user,
java.lang.String subDir)
Delete the user's files under the userlogs directory.
|
org.apache.hadoop.conf.Configuration |
getConf() |
java.lang.String[] |
getLocalDirs() |
java.lang.String |
getRunAsUser(JobConf conf)
Returns the local unix user that a given job will run as.
|
abstract void |
initializeJob(java.lang.String user,
java.lang.String jobid,
org.apache.hadoop.fs.Path credentials,
org.apache.hadoop.fs.Path jobConf,
TaskUmbilicalProtocol taskTracker,
java.net.InetSocketAddress ttAddr)
Create all of the directories necessary for the job to start and download
all of the job and private distributed cache files.
|
abstract int |
launchTask(java.lang.String user,
java.lang.String jobId,
java.lang.String attemptId,
java.util.List<java.lang.String> setup,
java.util.List<java.lang.String> jvmArguments,
java.io.File currentWorkDirectory,
java.lang.String stdout,
java.lang.String stderr)
Create all of the directories for the task and launches the child jvm.
|
protected void |
logOutput(java.lang.String output) |
void |
setConf(org.apache.hadoop.conf.Configuration conf) |
abstract void |
setup(org.apache.hadoop.fs.LocalDirAllocator allocator,
org.apache.hadoop.mapred.TaskTracker.LocalStorage localStorage)
Does initialization and setup.
|
abstract void |
signalTask(java.lang.String user,
int taskPid,
ProcessTree.Signal signal)
Send a signal to a task pid as the user.
|
abstract void |
truncateLogsAsUser(java.lang.String user,
java.util.List<Task> allAttempts)
Run the passed command as the user
|
protected static java.lang.String |
writeCommand(java.lang.String cmdLine,
org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path commandFile) |
public static final org.apache.commons.logging.Log LOG
protected static final java.lang.String COMMAND_FILE
protected org.apache.hadoop.fs.LocalDirAllocator allocator
protected org.apache.hadoop.mapred.TaskTracker.LocalStorage localStorage
public static final org.apache.hadoop.fs.permission.FsPermission TASK_LAUNCH_SCRIPT_PERMISSION
public org.apache.hadoop.conf.Configuration getConf()
getConf
in interface org.apache.hadoop.conf.Configurable
public java.lang.String[] getLocalDirs()
public void setConf(org.apache.hadoop.conf.Configuration conf)
setConf
in interface org.apache.hadoop.conf.Configurable
public abstract void setup(org.apache.hadoop.fs.LocalDirAllocator allocator, org.apache.hadoop.mapred.TaskTracker.LocalStorage localStorage) throws java.io.IOException
allocator
- the local dir allocator to uselocalStorage
- TaskTracker's LocalStorage objectjava.io.IOException
public abstract void initializeJob(java.lang.String user, java.lang.String jobid, org.apache.hadoop.fs.Path credentials, org.apache.hadoop.fs.Path jobConf, TaskUmbilicalProtocol taskTracker, java.net.InetSocketAddress ttAddr) throws java.io.IOException, java.lang.InterruptedException
user
- the user namejobid
- the jobcredentials
- a filename containing the job secretsjobConf
- the path to the localized configuration filetaskTracker
- the connection the task trackerttAddr
- the tasktracker's RPC addressjava.io.IOException
java.lang.InterruptedException
public abstract int launchTask(java.lang.String user, java.lang.String jobId, java.lang.String attemptId, java.util.List<java.lang.String> setup, java.util.List<java.lang.String> jvmArguments, java.io.File currentWorkDirectory, java.lang.String stdout, java.lang.String stderr) throws java.io.IOException
user
- the user namejobId
- the jobId in questionattemptId
- the attempt id (cleanup attempts have .cleanup suffix)setup
- list of shell commands to execute before the jvmjvmArguments
- list of jvm argumentscurrentWorkDirectory
- the full path of the cwd for the taskstdout
- the file to redirect stdout tostderr
- the file to redirect stderr tojava.io.IOException
public abstract void signalTask(java.lang.String user, int taskPid, ProcessTree.Signal signal) throws java.io.IOException
user
- the user nametaskPid
- the pid of the tasksignal
- the id of the signal to sendjava.io.IOException
public abstract void deleteAsUser(java.lang.String user, java.lang.String subDir) throws java.io.IOException
user
- the user namesubDir
- the path relative to the user's subdirectory under
the task tracker root directories.java.io.IOException
public abstract void createLogDir(TaskAttemptID taskID, boolean isCleanup) throws java.io.IOException
taskID
- ID of the taskisCleanup
- If the task is cleanup task or notjava.io.IOException
public abstract void deleteLogAsUser(java.lang.String user, java.lang.String subDir) throws java.io.IOException
user
- the user to work assubDir
- the path under the userlogs directory.java.io.IOException
public abstract void truncateLogsAsUser(java.lang.String user, java.util.List<Task> allAttempts) throws java.io.IOException
user
- allAttempts
- the list of attempts that the JVM ranjava.io.IOException
public java.lang.String getRunAsUser(JobConf conf)
protected static java.lang.String writeCommand(java.lang.String cmdLine, org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path commandFile) throws java.io.IOException
java.io.IOException
protected void logOutput(java.lang.String output)
Copyright © 2009 The Apache Software Foundation