public class CreateHiveTableAsSelectCommand
extends org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
implements org.apache.spark.sql.execution.command.DataWritingCommand, scala.Product, scala.Serializable
param: tableDesc the Table Describe, which may contain serde, storage handler etc. param: query the query whose result will be insert into the new relation param: mode SaveMode
| Constructor and Description |
|---|
CreateHiveTableAsSelectCommand(org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query,
scala.collection.Seq<String> outputColumnNames,
SaveMode mode) |
| Modifier and Type | Method and Description |
|---|---|
abstract static R |
apply(T1 v1,
T2 v2,
T3 v3,
T4 v4) |
String |
argString() |
SaveMode |
mode() |
scala.collection.Seq<String> |
outputColumnNames() |
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
query() |
scala.collection.Seq<Row> |
run(SparkSession sparkSession,
org.apache.spark.sql.execution.SparkPlan child) |
org.apache.spark.sql.catalyst.catalog.CatalogTable |
tableDesc() |
static String |
toString() |
analyzed, assertNotAnalysisRule, childrenResolved, constraints, constructIsNotNullConstraints, inferAdditionalConstraints, initializeLogIfNecessary, initializeLogIfNecessary, initializeLogIfNecessary$default$2, invalidateStatsCache, isStreaming, isTraceEnabled, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning, maxRows, maxRowsPerPartition, org$apache$spark$internal$Logging$$log__$eq, org$apache$spark$internal$Logging$$log_, org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$_analyzed_$eq, org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$_analyzed, outputOrdering, refresh, resolve, resolve, resolveChildren, resolved, resolveExpressions, resolveOperators, resolveOperatorsDown, resolveOperatorsUp, resolveQuoted, setAnalyzed, statePrefix, stats, statsCache_$eq, statsCache, transformAllExpressions, transformDown, transformUp, validConstraints, verboseStringWithSuffixallAttributes, canEvaluate, canEvaluateWithinJoin, canonicalized, conf, doCanonicalize, expressions, innerChildren, inputSet, isCanonicalizedPlan, mapExpressions, missingInput, normalizeExprId, normalizePredicates, org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$seqToExpressions$1, output, outputSet, printSchema, producedAttributes, references, replaceAlias, sameResult, schema, schemaString, semanticHash, simpleString, splitConjunctivePredicates, splitDisjunctivePredicates, subqueries, transformExpressions, transformExpressionsDown, transformExpressionsUp, verboseStringapply, asCode, children, collect, collectFirst, collectLeaves, containsChild, fastEquals, find, flatMap, foreach, foreachUp, generateTreeString, generateTreeString$default$5, generateTreeString$default$6, hashCode, jsonFields, makeCopy, map, mapChildren, mapProductIterator, nodeName, numberedTreeString, org$apache$spark$sql$catalyst$trees$TreeNode$$allChildren, org$apache$spark$sql$catalyst$trees$TreeNode$$collectJsonValue$1, org$apache$spark$sql$catalyst$trees$TreeNode$$getNodeNumbered, org$apache$spark$sql$catalyst$trees$TreeNode$$mapChild$1, org$apache$spark$sql$catalyst$trees$TreeNode$$mapChild$2, org$apache$spark$sql$catalyst$trees$TreeNode$$mapTreeNode$1, org$apache$spark$sql$catalyst$trees$TreeNode$$parseToJson, origin, otherCopyArgs, p, prettyJson, productIterator, productPrefix, stringArgs, toJSON, toString, transform, treeString, treeString, treeString$default$2, withNewChildrenbasicWriteJobStatsTracker, children, metrics, outputColumnsproductArity, productElement, productIterator, productPrefixinitializeLogging, log_public CreateHiveTableAsSelectCommand(org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query,
scala.collection.Seq<String> outputColumnNames,
SaveMode mode)
public abstract static R apply(T1 v1,
T2 v2,
T3 v3,
T4 v4)
public static String toString()
public org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc()
public org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query()
query in interface org.apache.spark.sql.execution.command.DataWritingCommandpublic scala.collection.Seq<String> outputColumnNames()
outputColumnNames in interface org.apache.spark.sql.execution.command.DataWritingCommandpublic SaveMode mode()
public scala.collection.Seq<Row> run(SparkSession sparkSession, org.apache.spark.sql.execution.SparkPlan child)
run in interface org.apache.spark.sql.execution.command.DataWritingCommandpublic String argString()
argString in class org.apache.spark.sql.catalyst.trees.TreeNode<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>