public class CreateHiveTableAsSelectCommand
extends org.apache.spark.sql.catalyst.plans.logical.LeafNode
implements org.apache.spark.sql.execution.command.RunnableCommand, scala.Product, scala.Serializable
param: tableDesc the Table Describe, which may contains serde, storage handler etc. param: query the query whose result will be insert into the new relation param: ignoreIfExists allow continue working if it's already exists, otherwise raise exception
Constructor and Description |
---|
CreateHiveTableAsSelectCommand(org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query,
boolean ignoreIfExists) |
Modifier and Type | Method and Description |
---|---|
static org.apache.spark.sql.catalyst.expressions.AttributeSeq |
allAttributes() |
static boolean |
analyzed() |
static BaseType |
apply(int number) |
String |
argString() |
static String |
asCode() |
abstract static boolean |
canEqual(Object that) |
static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
canonicalized() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> |
children() |
static boolean |
childrenResolved() |
static <B> scala.collection.Seq<B> |
collect(scala.PartialFunction<BaseType,B> pf) |
static <B> scala.Option<B> |
collectFirst(scala.PartialFunction<BaseType,B> pf) |
static scala.collection.Seq<BaseType> |
collectLeaves() |
static org.apache.spark.sql.catalyst.expressions.ExpressionSet |
constraints() |
static scala.collection.immutable.Set<org.apache.spark.sql.catalyst.trees.TreeNode<?>> |
containsChild() |
abstract static boolean |
equals(Object that) |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> |
expressions() |
static boolean |
fastEquals(org.apache.spark.sql.catalyst.trees.TreeNode<?> other) |
static scala.Option<BaseType> |
find(scala.Function1<BaseType,Object> f) |
static <A> scala.collection.Seq<A> |
flatMap(scala.Function1<BaseType,scala.collection.TraversableOnce<A>> f) |
static void |
foreach(scala.Function1<BaseType,scala.runtime.BoxedUnit> f) |
static void |
foreachUp(scala.Function1<BaseType,scala.runtime.BoxedUnit> f) |
static scala.collection.mutable.StringBuilder |
generateTreeString(int depth,
scala.collection.Seq<Object> lastChildren,
scala.collection.mutable.StringBuilder builder,
boolean verbose,
String prefix) |
static String |
generateTreeString$default$5() |
static int |
hashCode() |
boolean |
ignoreIfExists() |
void |
initializeLogging(boolean isInterpreter) |
scala.collection.Seq<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> |
innerChildren() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
inputSet() |
static boolean |
isStreaming() |
org.slf4j.Logger |
log_() |
static BaseType |
makeCopy(Object[] newArgs) |
static <A> scala.collection.Seq<A> |
map(scala.Function1<BaseType,A> f) |
static BaseType |
mapChildren(scala.Function1<BaseType,BaseType> f) |
static scala.Option<Object> |
maxRows() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
missingInput() |
static String |
nodeName() |
static String |
numberedTreeString() |
static org.apache.spark.sql.catalyst.trees.Origin |
origin() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
output() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
outputSet() |
static String |
prettyJson() |
static void |
printSchema() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
producedAttributes() |
abstract static int |
productArity() |
abstract static Object |
productElement(int n) |
static scala.collection.Iterator<Object> |
productIterator() |
static String |
productPrefix() |
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
query() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
references() |
static void |
refresh() |
static scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression> |
resolve(scala.collection.Seq<String> nameParts,
scala.Function2<String,String,Object> resolver) |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
resolve(StructType schema,
scala.Function2<String,String,Object> resolver) |
static scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression> |
resolveChildren(scala.collection.Seq<String> nameParts,
scala.Function2<String,String,Object> resolver) |
static boolean |
resolved() |
static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
resolveExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> r) |
static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
resolveOperators(scala.PartialFunction<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> rule) |
static scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression> |
resolveQuoted(String name,
scala.Function2<String,String,Object> resolver) |
scala.collection.Seq<Row> |
run(SparkSession sparkSession) |
static boolean |
sameResult(PlanType plan) |
static StructType |
schema() |
static String |
schemaString() |
static String |
simpleString() |
static org.apache.spark.sql.catalyst.plans.logical.Statistics |
statistics() |
static scala.collection.Seq<PlanType> |
subqueries() |
org.apache.spark.sql.catalyst.catalog.CatalogTable |
tableDesc() |
static String |
toJSON() |
static String |
toString() |
static BaseType |
transform(scala.PartialFunction<BaseType,BaseType> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformAllExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static BaseType |
transformDown(scala.PartialFunction<BaseType,BaseType> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformExpressionsDown(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformExpressionsUp(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static BaseType |
transformUp(scala.PartialFunction<BaseType,BaseType> rule) |
static String |
treeString() |
static String |
treeString(boolean verbose) |
static String |
verboseString() |
static BaseType |
withNewChildren(scala.collection.Seq<BaseType> newChildren) |
children, producedAttributes
analyzed, canonicalized, childrenResolved, initializeLogIfNecessary, isStreaming, isTraceEnabled, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning, maxRows, org$apache$spark$internal$Logging$$log__$eq, org$apache$spark$internal$Logging$$log_, org$apache$spark$sql$catalyst$plans$logical$LogicalPlan$$name$1, org$apache$spark$sql$catalyst$plans$logical$LogicalPlan$$resolveAsColumn, org$apache$spark$sql$catalyst$plans$logical$LogicalPlan$$resolveAsTableColumn, refresh, resolve, resolve, resolve, resolveChildren, resolved, resolveExpressions, resolveOperators, resolveQuoted, setAnalyzed, statePrefix, statistics
allAttributes, cleanArgs, constraints, expressions, getRelevantConstraints, inputSet, missingInput, org$apache$spark$sql$catalyst$plans$QueryPlan$$aliasMap, org$apache$spark$sql$catalyst$plans$QueryPlan$$cleanArg$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$getConstraintClass, org$apache$spark$sql$catalyst$plans$QueryPlan$$isRecursiveDeduction, org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$2, org$apache$spark$sql$catalyst$plans$QueryPlan$$scanNullIntolerantExpr, org$apache$spark$sql$catalyst$plans$QueryPlan$$seqToExpressions$1, output, outputSet, printSchema, references, sameResult, schema, schemaString, simpleString, subqueries, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp, validConstraints, verboseString
apply, asCode, collect, collectFirst, collectLeaves, containsChild, fastEquals, find, flatMap, foreach, foreachUp, fromJSON, generateTreeString, generateTreeString$default$5, getNodeNumbered, hashCode, jsonFields, makeCopy, map, mapChildren, mapProductIterator, nodeName, numberedTreeString, org$apache$spark$sql$catalyst$trees$TreeNode$$allChildren, org$apache$spark$sql$catalyst$trees$TreeNode$$collectJsonValue$1, org$apache$spark$sql$catalyst$trees$TreeNode$$parseToJson, origin, otherCopyArgs, prettyJson, productIterator, productPrefix, stringArgs, toJSON, toString, transform, transformChildren, transformDown, transformUp, treeString, treeString, withNewChildren
children, output
public CreateHiveTableAsSelectCommand(org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query, boolean ignoreIfExists)
public abstract static boolean canEqual(Object that)
public abstract static boolean equals(Object that)
public abstract static Object productElement(int n)
public abstract static int productArity()
public static scala.collection.Iterator<Object> productIterator()
public static String productPrefix()
public static org.apache.spark.sql.catalyst.trees.Origin origin()
public static scala.collection.immutable.Set<org.apache.spark.sql.catalyst.trees.TreeNode<?>> containsChild()
public static int hashCode()
public static boolean fastEquals(org.apache.spark.sql.catalyst.trees.TreeNode<?> other)
public static scala.Option<BaseType> find(scala.Function1<BaseType,Object> f)
public static void foreach(scala.Function1<BaseType,scala.runtime.BoxedUnit> f)
public static void foreachUp(scala.Function1<BaseType,scala.runtime.BoxedUnit> f)
public static <A> scala.collection.Seq<A> map(scala.Function1<BaseType,A> f)
public static <A> scala.collection.Seq<A> flatMap(scala.Function1<BaseType,scala.collection.TraversableOnce<A>> f)
public static <B> scala.collection.Seq<B> collect(scala.PartialFunction<BaseType,B> pf)
public static scala.collection.Seq<BaseType> collectLeaves()
public static <B> scala.Option<B> collectFirst(scala.PartialFunction<BaseType,B> pf)
public static BaseType mapChildren(scala.Function1<BaseType,BaseType> f)
public static BaseType withNewChildren(scala.collection.Seq<BaseType> newChildren)
public static BaseType transform(scala.PartialFunction<BaseType,BaseType> rule)
public static BaseType transformDown(scala.PartialFunction<BaseType,BaseType> rule)
public static BaseType transformUp(scala.PartialFunction<BaseType,BaseType> rule)
public static BaseType makeCopy(Object[] newArgs)
public static String nodeName()
public static String toString()
public static String treeString()
public static String treeString(boolean verbose)
public static String numberedTreeString()
public static BaseType apply(int number)
public static scala.collection.mutable.StringBuilder generateTreeString(int depth, scala.collection.Seq<Object> lastChildren, scala.collection.mutable.StringBuilder builder, boolean verbose, String prefix)
public static String asCode()
public static String toJSON()
public static String prettyJson()
public static String generateTreeString$default$5()
public static org.apache.spark.sql.catalyst.expressions.ExpressionSet constraints()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet outputSet()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet references()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet inputSet()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet missingInput()
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformExpressionsDown(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformExpressionsUp(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformAllExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static final scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> expressions()
public static StructType schema()
public static String schemaString()
public static void printSchema()
public static String simpleString()
public static String verboseString()
public static scala.collection.Seq<PlanType> subqueries()
public static boolean sameResult(PlanType plan)
public static org.apache.spark.sql.catalyst.expressions.AttributeSeq allAttributes()
public static boolean analyzed()
public static boolean isStreaming()
public static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan resolveOperators(scala.PartialFunction<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> rule)
public static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan resolveExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> r)
public static org.apache.spark.sql.catalyst.plans.logical.Statistics statistics()
public static scala.Option<Object> maxRows()
public static boolean resolved()
public static boolean childrenResolved()
public static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan canonicalized()
public static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> resolve(StructType schema, scala.Function2<String,String,Object> resolver)
public static scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression> resolveChildren(scala.collection.Seq<String> nameParts, scala.Function2<String,String,Object> resolver)
public static scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression> resolve(scala.collection.Seq<String> nameParts, scala.Function2<String,String,Object> resolver)
public static scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression> resolveQuoted(String name, scala.Function2<String,String,Object> resolver)
public static void refresh()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet producedAttributes()
public static final scala.collection.Seq<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> children()
public static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output()
public org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc()
public org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query()
public boolean ignoreIfExists()
public scala.collection.Seq<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> innerChildren()
public scala.collection.Seq<Row> run(SparkSession sparkSession)
run
in interface org.apache.spark.sql.execution.command.RunnableCommand
public String argString()
argString
in class org.apache.spark.sql.catalyst.trees.TreeNode<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>
public org.slf4j.Logger log_()
public void initializeLogging(boolean isInterpreter)