public class ScriptTransformation
extends org.apache.spark.sql.execution.SparkPlan
implements org.apache.spark.sql.execution.UnaryExecNode, scala.Product, scala.Serializable
param: input the set of expression that should be passed to the script. param: script the command that should be executed. param: output the attributes that are produced by the script.
Constructor and Description |
---|
ScriptTransformation(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> input,
String script,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output,
org.apache.spark.sql.execution.SparkPlan child,
HiveScriptIOSchema ioschema) |
Modifier and Type | Method and Description |
---|---|
static org.apache.spark.sql.catalyst.expressions.AttributeSeq |
allAttributes() |
static org.apache.spark.sql.catalyst.trees.TreeNode<?> |
apply(int number) |
static String |
argString() |
static String |
asCode() |
abstract static boolean |
canEqual(Object that) |
org.apache.spark.sql.execution.SparkPlan |
child() |
static scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan> |
children() |
static <B> scala.collection.Seq<B> |
collect(scala.PartialFunction<BaseType,B> pf) |
static <B> scala.Option<B> |
collectFirst(scala.PartialFunction<BaseType,B> pf) |
static scala.collection.Seq<BaseType> |
collectLeaves() |
static org.apache.spark.sql.catalyst.expressions.ExpressionSet |
constraints() |
static scala.collection.immutable.Set<org.apache.spark.sql.catalyst.trees.TreeNode<?>> |
containsChild() |
abstract static boolean |
equals(Object that) |
static RDD<org.apache.spark.sql.catalyst.InternalRow> |
execute() |
static <T> Broadcast<T> |
executeBroadcast() |
static org.apache.spark.sql.catalyst.InternalRow[] |
executeCollect() |
static Row[] |
executeCollectPublic() |
static org.apache.spark.sql.catalyst.InternalRow[] |
executeTake(int n) |
static scala.collection.Iterator<org.apache.spark.sql.catalyst.InternalRow> |
executeToIterator() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> |
expressions() |
static boolean |
fastEquals(org.apache.spark.sql.catalyst.trees.TreeNode<?> other) |
static scala.Option<BaseType> |
find(scala.Function1<BaseType,Object> f) |
static <A> scala.collection.Seq<A> |
flatMap(scala.Function1<BaseType,scala.collection.TraversableOnce<A>> f) |
static void |
foreach(scala.Function1<BaseType,scala.runtime.BoxedUnit> f) |
static void |
foreachUp(scala.Function1<BaseType,scala.runtime.BoxedUnit> f) |
static scala.collection.mutable.StringBuilder |
generateTreeString(int depth,
scala.collection.Seq<Object> lastChildren,
scala.collection.mutable.StringBuilder builder,
boolean verbose,
String prefix) |
static String |
generateTreeString$default$5() |
static int |
hashCode() |
void |
initializeLogging(boolean isInterpreter) |
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> |
input() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
inputSet() |
HiveScriptIOSchema |
ioschema() |
org.slf4j.Logger |
log_() |
static org.apache.spark.sql.execution.metric.SQLMetric |
longMetric(String name) |
static org.apache.spark.sql.execution.SparkPlan |
makeCopy(Object[] newArgs) |
static <A> scala.collection.Seq<A> |
map(scala.Function1<BaseType,A> f) |
static BaseType |
mapChildren(scala.Function1<BaseType,BaseType> f) |
static scala.collection.immutable.Map<String,String> |
metadata() |
static scala.collection.immutable.Map<String,org.apache.spark.sql.execution.metric.SQLMetric> |
metrics() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
missingInput() |
static String |
nodeName() |
static String |
numberedTreeString() |
static org.apache.spark.sql.catalyst.trees.Origin |
origin() |
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
output() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> |
outputOrdering() |
org.apache.spark.sql.catalyst.plans.physical.Partitioning |
outputPartitioning() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
outputSet() |
static BaseType |
p(int number) |
static void |
prepare() |
static String |
prettyJson() |
static void |
printSchema() |
org.apache.spark.sql.catalyst.expressions.AttributeSet |
producedAttributes() |
abstract static int |
productArity() |
abstract static Object |
productElement(int n) |
static scala.collection.Iterator<Object> |
productIterator() |
static String |
productPrefix() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
references() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.plans.physical.Distribution> |
requiredChildDistribution() |
static scala.collection.Seq<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>> |
requiredChildOrdering() |
static void |
resetMetrics() |
static boolean |
sameResult(PlanType plan) |
static StructType |
schema() |
static String |
schemaString() |
String |
script() |
static String |
simpleString() |
static SQLContext |
sqlContext() |
static boolean |
subexpressionEliminationEnabled() |
static scala.collection.Seq<PlanType> |
subqueries() |
static String |
toJSON() |
static String |
toString() |
static BaseType |
transform(scala.PartialFunction<BaseType,BaseType> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformAllExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static BaseType |
transformDown(scala.PartialFunction<BaseType,BaseType> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformExpressionsDown(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformExpressionsUp(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static BaseType |
transformUp(scala.PartialFunction<BaseType,BaseType> rule) |
static String |
treeString() |
static String |
treeString(boolean verbose) |
static String |
verboseString() |
static BaseType |
withNewChildren(scala.collection.Seq<BaseType> newChildren) |
doExecuteBroadcast, doPrepare, execute, executeBroadcast, executeCollect, executeCollectPublic, executeQuery, executeTake, executeToIterator, initializeLogIfNecessary, isTraceEnabled, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning, longMetric, makeCopy, metadata, metrics, newMutableProjection, newMutableProjection$default$3, newNaturalAscendingOrdering, newOrdering, newPredicate, org$apache$spark$internal$Logging$$log__$eq, org$apache$spark$internal$Logging$$log_, org$apache$spark$sql$execution$SparkPlan$$decodeUnsafeRows, org$apache$spark$sql$execution$SparkPlan$$runningSubqueries, outputOrdering, prepare, prepareSubqueries, requiredChildDistribution, requiredChildOrdering, resetMetrics, sparkContext, sqlContext, subexpressionEliminationEnabled, waitForSubqueries
allAttributes, canonicalized, cleanArgs, cleanExpression, constraints, expressions, getRelevantConstraints, innerChildren, inputSet, missingInput, org$apache$spark$sql$catalyst$plans$QueryPlan$$aliasMap, org$apache$spark$sql$catalyst$plans$QueryPlan$$cleanArg$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$getConstraintClass, org$apache$spark$sql$catalyst$plans$QueryPlan$$inferIsNotNullConstraints, org$apache$spark$sql$catalyst$plans$QueryPlan$$isRecursiveDeduction, org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$2, org$apache$spark$sql$catalyst$plans$QueryPlan$$scanNullIntolerantAttribute, org$apache$spark$sql$catalyst$plans$QueryPlan$$seqToExpressions$1, outputSet, printSchema, references, sameResult, schema, schemaString, simpleString, statePrefix, subqueries, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp, validConstraints, verboseString
apply, argString, asCode, children, collect, collectFirst, collectLeaves, containsChild, fastEquals, find, flatMap, foreach, foreachUp, generateTreeString, generateTreeString$default$5, hashCode, jsonFields, map, mapChildren, mapProductIterator, nodeName, numberedTreeString, org$apache$spark$sql$catalyst$trees$TreeNode$$allChildren, org$apache$spark$sql$catalyst$trees$TreeNode$$collectJsonValue$1, org$apache$spark$sql$catalyst$trees$TreeNode$$getNodeNumbered, org$apache$spark$sql$catalyst$trees$TreeNode$$parseToJson, origin, otherCopyArgs, p, prettyJson, productIterator, productPrefix, stringArgs, toJSON, toString, transform, transformChildren, transformDown, transformUp, treeString, treeString, withNewChildren
public ScriptTransformation(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> input, String script, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, org.apache.spark.sql.execution.SparkPlan child, HiveScriptIOSchema ioschema)
public abstract static boolean canEqual(Object that)
public abstract static boolean equals(Object that)
public abstract static Object productElement(int n)
public abstract static int productArity()
public static scala.collection.Iterator<Object> productIterator()
public static String productPrefix()
public static org.apache.spark.sql.catalyst.trees.Origin origin()
public static scala.collection.immutable.Set<org.apache.spark.sql.catalyst.trees.TreeNode<?>> containsChild()
public static int hashCode()
public static boolean fastEquals(org.apache.spark.sql.catalyst.trees.TreeNode<?> other)
public static scala.Option<BaseType> find(scala.Function1<BaseType,Object> f)
public static void foreach(scala.Function1<BaseType,scala.runtime.BoxedUnit> f)
public static void foreachUp(scala.Function1<BaseType,scala.runtime.BoxedUnit> f)
public static <A> scala.collection.Seq<A> map(scala.Function1<BaseType,A> f)
public static <A> scala.collection.Seq<A> flatMap(scala.Function1<BaseType,scala.collection.TraversableOnce<A>> f)
public static <B> scala.collection.Seq<B> collect(scala.PartialFunction<BaseType,B> pf)
public static scala.collection.Seq<BaseType> collectLeaves()
public static <B> scala.Option<B> collectFirst(scala.PartialFunction<BaseType,B> pf)
public static BaseType mapChildren(scala.Function1<BaseType,BaseType> f)
public static BaseType withNewChildren(scala.collection.Seq<BaseType> newChildren)
public static BaseType transform(scala.PartialFunction<BaseType,BaseType> rule)
public static BaseType transformDown(scala.PartialFunction<BaseType,BaseType> rule)
public static BaseType transformUp(scala.PartialFunction<BaseType,BaseType> rule)
public static String nodeName()
public static String argString()
public static String toString()
public static String treeString()
public static String treeString(boolean verbose)
public static String numberedTreeString()
public static org.apache.spark.sql.catalyst.trees.TreeNode<?> apply(int number)
public static BaseType p(int number)
public static scala.collection.mutable.StringBuilder generateTreeString(int depth, scala.collection.Seq<Object> lastChildren, scala.collection.mutable.StringBuilder builder, boolean verbose, String prefix)
public static String asCode()
public static String toJSON()
public static String prettyJson()
public static String generateTreeString$default$5()
public static org.apache.spark.sql.catalyst.expressions.ExpressionSet constraints()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet outputSet()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet references()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet inputSet()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet missingInput()
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformExpressionsDown(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformExpressionsUp(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformAllExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static final scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> expressions()
public static StructType schema()
public static String schemaString()
public static void printSchema()
public static String simpleString()
public static String verboseString()
public static scala.collection.Seq<PlanType> subqueries()
public static boolean sameResult(PlanType plan)
public static org.apache.spark.sql.catalyst.expressions.AttributeSeq allAttributes()
public static final SQLContext sqlContext()
public static boolean subexpressionEliminationEnabled()
public static org.apache.spark.sql.execution.SparkPlan makeCopy(Object[] newArgs)
public static scala.collection.immutable.Map<String,String> metadata()
public static scala.collection.immutable.Map<String,org.apache.spark.sql.execution.metric.SQLMetric> metrics()
public static void resetMetrics()
public static org.apache.spark.sql.execution.metric.SQLMetric longMetric(String name)
public static scala.collection.Seq<org.apache.spark.sql.catalyst.plans.physical.Distribution> requiredChildDistribution()
public static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> outputOrdering()
public static scala.collection.Seq<scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder>> requiredChildOrdering()
public static final RDD<org.apache.spark.sql.catalyst.InternalRow> execute()
public static final <T> Broadcast<T> executeBroadcast()
public static final void prepare()
public static org.apache.spark.sql.catalyst.InternalRow[] executeCollect()
public static scala.collection.Iterator<org.apache.spark.sql.catalyst.InternalRow> executeToIterator()
public static Row[] executeCollectPublic()
public static org.apache.spark.sql.catalyst.InternalRow[] executeTake(int n)
public static final scala.collection.Seq<org.apache.spark.sql.execution.SparkPlan> children()
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> input()
public String script()
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output()
output
in class org.apache.spark.sql.catalyst.plans.QueryPlan<org.apache.spark.sql.execution.SparkPlan>
public org.apache.spark.sql.execution.SparkPlan child()
child
in interface org.apache.spark.sql.execution.UnaryExecNode
public HiveScriptIOSchema ioschema()
public org.apache.spark.sql.catalyst.expressions.AttributeSet producedAttributes()
producedAttributes
in class org.apache.spark.sql.catalyst.plans.QueryPlan<org.apache.spark.sql.execution.SparkPlan>
public org.apache.spark.sql.catalyst.plans.physical.Partitioning outputPartitioning()
outputPartitioning
in class org.apache.spark.sql.execution.SparkPlan
public org.slf4j.Logger log_()
public void initializeLogging(boolean isInterpreter)