public abstract class SparkJobStats extends JobStats
JobStats.JobState
Modifier and Type | Field and Description |
---|---|
protected org.apache.hadoop.mapred.Counters |
counters |
static java.lang.String |
FS_COUNTER_GROUP |
ALIAS, ALIAS_LOCATION, conf, FAILURE_HEADER, FEATURE, hdfsBytesRead, hdfsBytesWritten, inputs, outputs, state, SUCCESS_HEADER
Modifier | Constructor and Description |
---|---|
protected |
SparkJobStats(int jobId,
PigStats.JobGraph plan,
org.apache.hadoop.conf.Configuration conf) |
protected |
SparkJobStats(java.lang.String jobId,
PigStats.JobGraph plan,
org.apache.hadoop.conf.Configuration conf) |
Modifier and Type | Method and Description |
---|---|
void |
accept(PlanVisitor v)
Accept a visitor at this node in the graph.
|
void |
addInputStats(POLoad po,
boolean success,
boolean singleInput) |
void |
addOutputInfo(POStore poStore,
boolean success,
JobStatisticCollector jobStatisticCollector) |
void |
collectStats(JobStatisticCollector jobStatisticCollector) |
protected abstract java.util.Map<java.lang.String,java.lang.Long> |
combineTaskMetrics(java.util.Map<java.lang.String,java.util.List<TaskMetrics>> jobMetric) |
long |
getAvgMapTime() |
long |
getAvgREduceTime() |
java.lang.String |
getDisplayString() |
org.apache.hadoop.mapred.Counters |
getHadoopCounters() |
java.lang.String |
getJobId() |
long |
getMapInputRecords() |
long |
getMapOutputRecords() |
long |
getMaxMapTime() |
long |
getMaxReduceTime() |
long |
getMinMapTime() |
long |
getMinReduceTime() |
java.util.Map<java.lang.String,java.lang.Long> |
getMultiInputCounters() |
java.util.Map<java.lang.String,java.lang.Long> |
getMultiStoreCounters() |
int |
getNumberMaps() |
int |
getNumberReduces() |
long |
getProactiveSpillCountObjects() |
long |
getProactiveSpillCountRecs() |
long |
getReduceInputRecords() |
long |
getReduceOutputRecords() |
long |
getSMMSpillCount() |
java.util.Map<java.lang.String,java.lang.Long> |
getStats() |
java.util.Map<java.lang.String,SparkCounter<java.util.Map<java.lang.String,java.lang.Long>>> |
getWarningCounters() |
void |
initWarningCounters() |
void |
setAlias(SparkOperator sparkOperator) |
void |
setConf(org.apache.hadoop.conf.Configuration conf) |
calculateMedianValue, getAlias, getAliasLocation, getBytesWritten, getErrorMessage, getException, getFeature, getHdfsBytesRead, getHdfsBytesWritten, getInputs, getOutputs, getOutputSize, getRecordWrittern, getState, isEqual, isIndexer, isSampler, isSuccessful, setBackendException, setErrorMsg, setSuccessful
annotate, getAnnotation, getLocation, getName, getPlan, removeAnnotation, setLocation, setPlan
protected org.apache.hadoop.mapred.Counters counters
public static java.lang.String FS_COUNTER_GROUP
protected SparkJobStats(int jobId, PigStats.JobGraph plan, org.apache.hadoop.conf.Configuration conf)
protected SparkJobStats(java.lang.String jobId, PigStats.JobGraph plan, org.apache.hadoop.conf.Configuration conf)
public void setConf(org.apache.hadoop.conf.Configuration conf)
public void addOutputInfo(POStore poStore, boolean success, JobStatisticCollector jobStatisticCollector)
public void addInputStats(POLoad po, boolean success, boolean singleInput)
public void collectStats(JobStatisticCollector jobStatisticCollector)
protected abstract java.util.Map<java.lang.String,java.lang.Long> combineTaskMetrics(java.util.Map<java.lang.String,java.util.List<TaskMetrics>> jobMetric)
public java.util.Map<java.lang.String,java.lang.Long> getStats()
public void accept(PlanVisitor v) throws FrontendException
Operator
accept
in class JobStats
v
- Visitor to accept.FrontendException
public java.lang.String getDisplayString()
getDisplayString
in class JobStats
public int getNumberMaps()
getNumberMaps
in class JobStats
public int getNumberReduces()
getNumberReduces
in class JobStats
public long getMaxMapTime()
getMaxMapTime
in class JobStats
public long getMinMapTime()
getMinMapTime
in class JobStats
public long getAvgMapTime()
getAvgMapTime
in class JobStats
public long getMaxReduceTime()
getMaxReduceTime
in class JobStats
public long getMinReduceTime()
getMinReduceTime
in class JobStats
public long getAvgREduceTime()
getAvgREduceTime
in class JobStats
public long getMapInputRecords()
getMapInputRecords
in class JobStats
public long getMapOutputRecords()
getMapOutputRecords
in class JobStats
public long getReduceInputRecords()
getReduceInputRecords
in class JobStats
public long getReduceOutputRecords()
getReduceOutputRecords
in class JobStats
public long getSMMSpillCount()
getSMMSpillCount
in class JobStats
public long getProactiveSpillCountObjects()
getProactiveSpillCountObjects
in class JobStats
public long getProactiveSpillCountRecs()
getProactiveSpillCountRecs
in class JobStats
public org.apache.hadoop.mapred.Counters getHadoopCounters()
getHadoopCounters
in class JobStats
public java.util.Map<java.lang.String,java.lang.Long> getMultiStoreCounters()
getMultiStoreCounters
in class JobStats
public java.util.Map<java.lang.String,java.lang.Long> getMultiInputCounters()
getMultiInputCounters
in class JobStats
public void setAlias(SparkOperator sparkOperator)
public java.util.Map<java.lang.String,SparkCounter<java.util.Map<java.lang.String,java.lang.Long>>> getWarningCounters()
public void initWarningCounters()
Copyright © 2007-2017 The Apache Software Foundation