public class ScriptTransformationExec
extends org.apache.spark.sql.execution.SparkPlan
implements org.apache.spark.sql.execution.UnaryExecNode, scala.Product, scala.Serializable
param: input the set of expression that should be passed to the script. param: script the command that should be executed. param: output the attributes that are produced by the script.
Constructor and Description |
---|
ScriptTransformationExec(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> input,
String script,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output,
org.apache.spark.sql.execution.SparkPlan child,
HiveScriptIOSchema ioschema) |
Modifier and Type | Method and Description |
---|---|
static org.apache.spark.sql.catalyst.expressions.AttributeSeq |
allAttributes() |
static org.apache.spark.sql.catalyst.trees.TreeNode<?> |
apply(int number) |
static String |
argString() |
static String |
asCode() |
abstract static boolean |
canEqual(Object that) |
static PlanType |
canonicalized() |
org.apache.spark.sql.execution.SparkPlan |
child() |
static scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan> |
children() |
static <B> scala.collection.Seq<B> |
collect(scala.PartialFunction<BaseType,B> pf) |
static <B> scala.Option<B> |
collectFirst(scala.PartialFunction<BaseType,B> pf) |
static scala.collection.Seq<BaseType> |
collectLeaves() |
static org.apache.spark.sql.internal.SQLConf |
conf() |
static scala.collection.immutable.Set<org.apache.spark.sql.catalyst.trees.TreeNode<?>> |
containsChild() |
abstract static boolean |
equals(Object that) |
static RDD<org.apache.spark.sql.catalyst.InternalRow> |
execute() |
static <T> Broadcast<T> |
executeBroadcast() |
static org.apache.spark.sql.catalyst.InternalRow[] |
executeCollect() |
static Row[] |
executeCollectPublic() |
static org.apache.spark.sql.catalyst.InternalRow[] |
executeTake(int n) |
static scala.collection.Iterator<org.apache.spark.sql.catalyst.InternalRow> |
executeToIterator() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> |
expressions() |
static boolean |
fastEquals(org.apache.spark.sql.catalyst.trees.TreeNode<?> other) |
static scala.Option<BaseType> |
find(scala.Function1<BaseType,Object> f) |
static <A> scala.collection.Seq<A> |
flatMap(scala.Function1<BaseType,scala.collection.TraversableOnce<A>> f) |
static void |
foreach(scala.Function1<BaseType,scala.runtime.BoxedUnit> f) |
static void |
foreachUp(scala.Function1<BaseType,scala.runtime.BoxedUnit> f) |
static scala.collection.mutable.StringBuilder |
generateTreeString(int depth,
scala.collection.Seq<Object> lastChildren,
scala.collection.mutable.StringBuilder builder,
boolean verbose,
String prefix,
boolean addSuffix) |
static String |
generateTreeString$default$5() |
static boolean |
generateTreeString$default$6() |
static int |
hashCode() |
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> |
input() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
inputSet() |
HiveScriptIOSchema |
ioschema() |
static org.apache.spark.sql.execution.metric.SQLMetric |
longMetric(String name) |
static org.apache.spark.sql.execution.SparkPlan |
makeCopy(Object[] newArgs) |
static <A> scala.collection.Seq<A> |
map(scala.Function1<BaseType,A> f) |
static BaseType |
mapChildren(scala.Function1<BaseType,BaseType> f) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
mapExpressions(scala.Function1<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> f) |
static scala.collection.immutable.Map<String,org.apache.spark.sql.execution.metric.SQLMetric> |
metrics() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
missingInput() |
static String |
nodeName() |
static String |
numberedTreeString() |
static org.apache.spark.sql.catalyst.trees.Origin |
origin() |
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
output() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> |
outputOrdering() |
org.apache.spark.sql.catalyst.plans.physical.Partitioning |
outputPartitioning() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
outputSet() |
static BaseType |
p(int number) |
static void |
prepare() |
static String |
prettyJson() |
static void |
printSchema() |
org.apache.spark.sql.catalyst.expressions.AttributeSet |
producedAttributes() |
abstract static int |
productArity() |
abstract static Object |
productElement(int n) |
static scala.collection.Iterator<Object> |
productIterator() |
static String |
productPrefix() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
references() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.plans.physical.Distribution> |
requiredChildDistribution() |
static scala.collection.Seq<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>> |
requiredChildOrdering() |
static void |
resetMetrics() |
static boolean |
sameResult(PlanType other) |
static StructType |
schema() |
static String |
schemaString() |
String |
script() |
static int |
semanticHash() |
static String |
simpleString() |
static SQLContext |
sqlContext() |
static boolean |
subexpressionEliminationEnabled() |
static scala.collection.Seq<PlanType> |
subqueries() |
static String |
toJSON() |
static String |
toString() |
static BaseType |
transform(scala.PartialFunction<BaseType,BaseType> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformAllExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static BaseType |
transformDown(scala.PartialFunction<BaseType,BaseType> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformExpressionsDown(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformExpressionsUp(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static BaseType |
transformUp(scala.PartialFunction<BaseType,BaseType> rule) |
static String |
treeString() |
static String |
treeString(boolean verbose,
boolean addSuffix) |
static boolean |
treeString$default$2() |
static String |
verboseString() |
static String |
verboseStringWithSuffix() |
static BaseType |
withNewChildren(scala.collection.Seq<BaseType> newChildren) |
doExecuteBroadcast, doPrepare, execute, executeBroadcast, executeCollect, executeCollectIterator, executeCollectPublic, executeQuery, executeTake, executeToIterator, initializeLogIfNecessary, initializeLogIfNecessary, initializeLogIfNecessary$default$2, isTraceEnabled, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning, longMetric, makeCopy, metrics, newMutableProjection, newMutableProjection$default$3, newNaturalAscendingOrdering, newOrdering, newPredicate, org$apache$spark$internal$Logging$$log__$eq, org$apache$spark$internal$Logging$$log_, org$apache$spark$sql$execution$SparkPlan$$decodeUnsafeRows, org$apache$spark$sql$execution$SparkPlan$$runningSubqueries, outputOrdering, prepare, prepareSubqueries, requiredChildDistribution, requiredChildOrdering, resetMetrics, sparkContext, sqlContext, subexpressionEliminationEnabled, waitForSubqueries
allAttributes, canEvaluate, canEvaluateWithinJoin, canonicalized, conf, doCanonicalize, expressions, innerChildren, inputSet, isCanonicalizedPlan, mapExpressions, missingInput, normalizeExprId, normalizePredicates, org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$seqToExpressions$1, outputSet, printSchema, references, replaceAlias, sameResult, schema, schemaString, semanticHash, simpleString, splitConjunctivePredicates, splitDisjunctivePredicates, statePrefix, subqueries, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp, verboseString
apply, argString, asCode, children, collect, collectFirst, collectLeaves, containsChild, fastEquals, find, flatMap, foreach, foreachUp, generateTreeString, generateTreeString$default$5, generateTreeString$default$6, hashCode, jsonFields, map, mapChildren, mapProductIterator, nodeName, numberedTreeString, org$apache$spark$sql$catalyst$trees$TreeNode$$allChildren, org$apache$spark$sql$catalyst$trees$TreeNode$$collectJsonValue$1, org$apache$spark$sql$catalyst$trees$TreeNode$$getNodeNumbered, org$apache$spark$sql$catalyst$trees$TreeNode$$mapChild$1, org$apache$spark$sql$catalyst$trees$TreeNode$$mapChild$2, org$apache$spark$sql$catalyst$trees$TreeNode$$mapTreeNode$1, org$apache$spark$sql$catalyst$trees$TreeNode$$parseToJson, origin, otherCopyArgs, p, prettyJson, productIterator, productPrefix, stringArgs, toJSON, toString, transform, transformDown, transformUp, treeString, treeString, treeString$default$2, verboseStringWithSuffix, withNewChildren
productArity, productElement, productIterator, productPrefix
initializeLogging, log_
public ScriptTransformationExec(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> input, String script, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.sql.execution.SparkPlan child, HiveScriptIOSchema ioschema)
public abstract static boolean canEqual(Object that)
public abstract static boolean equals(Object that)
public abstract static Object productElement(int n)
public abstract static int productArity()
public static scala.collection.Iterator<Object> productIterator()
public static String productPrefix()
public static org.apache.spark.sql.catalyst.trees.Origin origin()
public static scala.collection.immutable.Set<org.apache.spark.sql.catalyst.trees.TreeNode<?>> containsChild()
public static int hashCode()
public static boolean fastEquals(org.apache.spark.sql.catalyst.trees.TreeNode<?> other)
public static scala.Option<BaseType> find(scala.Function1<BaseType,Object> f)
public static void foreach(scala.Function1<BaseType,scala.runtime.BoxedUnit> f)
public static void foreachUp(scala.Function1<BaseType,scala.runtime.BoxedUnit> f)
public static <A> scala.collection.Seq<A> map(scala.Function1<BaseType,A> f)
public static <A> scala.collection.Seq<A> flatMap(scala.Function1<BaseType,scala.collection.TraversableOnce<A>> f)
public static <B> scala.collection.Seq<B> collect(scala.PartialFunction<BaseType,B> pf)
public static scala.collection.Seq<BaseType> collectLeaves()
public static <B> scala.Option<B> collectFirst(scala.PartialFunction<BaseType,B> pf)
public static BaseType withNewChildren(scala.collection.Seq<BaseType> newChildren)
public static BaseType transform(scala.PartialFunction<BaseType,BaseType> rule)
public static BaseType transformDown(scala.PartialFunction<BaseType,BaseType> rule)
public static BaseType transformUp(scala.PartialFunction<BaseType,BaseType> rule)
public static BaseType mapChildren(scala.Function1<BaseType,BaseType> f)
public static String nodeName()
public static String argString()
public static String verboseStringWithSuffix()
public static String toString()
public static String treeString()
public static String treeString(boolean verbose, boolean addSuffix)
public static String numberedTreeString()
public static org.apache.spark.sql.catalyst.trees.TreeNode<?> apply(int number)
public static BaseType p(int number)
public static scala.collection.mutable.StringBuilder generateTreeString(int depth, scala.collection.Seq<Object> lastChildren, scala.collection.mutable.StringBuilder builder, boolean verbose, String prefix, boolean addSuffix)
public static String asCode()
public static String toJSON()
public static String prettyJson()
public static boolean treeString$default$2()
public static String generateTreeString$default$5()
public static boolean generateTreeString$default$6()
public static org.apache.spark.sql.internal.SQLConf conf()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet outputSet()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet references()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet inputSet()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet missingInput()
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformExpressionsDown(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformExpressionsUp(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> mapExpressions(scala.Function1<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> f)
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformAllExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static final scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> expressions()
public static StructType schema()
public static String schemaString()
public static void printSchema()
public static String simpleString()
public static String verboseString()
public static scala.collection.Seq<PlanType> subqueries()
public static final PlanType canonicalized()
public static final boolean sameResult(PlanType other)
public static final int semanticHash()
public static org.apache.spark.sql.catalyst.expressions.AttributeSeq allAttributes()
public static final SQLContext sqlContext()
public static boolean subexpressionEliminationEnabled()
public static org.apache.spark.sql.execution.SparkPlan makeCopy(Object[] newArgs)
public static scala.collection.immutable.Map<String,org.apache.spark.sql.execution.metric.SQLMetric> metrics()
public static void resetMetrics()
public static org.apache.spark.sql.execution.metric.SQLMetric longMetric(String name)
public static scala.collection.Seq<org.apache.spark.sql.catalyst.plans.physical.Distribution> requiredChildDistribution()
public static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> outputOrdering()
public static scala.collection.Seq<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>> requiredChildOrdering()
public static final RDD<org.apache.spark.sql.catalyst.InternalRow> execute()
public static final <T> Broadcast<T> executeBroadcast()
public static final void prepare()
public static org.apache.spark.sql.catalyst.InternalRow[] executeCollect()
public static scala.collection.Iterator<org.apache.spark.sql.catalyst.InternalRow> executeToIterator()
public static Row[] executeCollectPublic()
public static org.apache.spark.sql.catalyst.InternalRow[] executeTake(int n)
public static final scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan> children()
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> input()
public String script()
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output()
output
in class org.apache.spark.sql.catalyst.plans.QueryPlan<org.apache.spark.sql.execution.SparkPlan>
public org.apache.spark.sql.execution.SparkPlan child()
child
in interface org.apache.spark.sql.execution.UnaryExecNode
public HiveScriptIOSchema ioschema()
public org.apache.spark.sql.catalyst.expressions.AttributeSet producedAttributes()
producedAttributes
in class org.apache.spark.sql.catalyst.plans.QueryPlan<org.apache.spark.sql.execution.SparkPlan>
public org.apache.spark.sql.catalyst.plans.physical.Partitioning outputPartitioning()
outputPartitioning
in class org.apache.spark.sql.execution.SparkPlan