public class ShuffledHashJoin extends SparkPlan implements HashJoin, scala.Product, scala.Serializable
Constructor and Description |
---|
ShuffledHashJoin(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys,
BuildSide buildSide,
SparkPlan left,
SparkPlan right) |
Modifier and Type | Method and Description |
---|---|
BuildSide |
buildSide() |
RDD<org.apache.spark.sql.catalyst.expressions.Row> |
execute()
Runs this query returning the result as an RDD.
|
SparkPlan |
left() |
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> |
leftKeys() |
org.apache.spark.sql.catalyst.plans.physical.Partitioning |
outputPartitioning()
Specifies how data is partitioned across different nodes in the cluster.
|
scala.collection.immutable.List<org.apache.spark.sql.catalyst.plans.physical.ClusteredDistribution> |
requiredChildDistribution() |
SparkPlan |
right() |
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> |
rightKeys() |
codegenEnabled, executeCollect, makeCopy
expressions, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionDown$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionUp$1, output, outputSet, printSchema, schema, schemaString, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp
apply, argString, asCode, children, collect, fastEquals, flatMap, foreach, generateTreeString, getNodeNumbered, id, map, mapChildren, nextId, nodeName, numberedTreeString, otherCopyArgs, sameInstance, simpleString, stringArgs, toString, transform, transformChildrenDown, transformChildrenUp, transformDown, transformUp, treeString, withNewChildren
buildKeys, buildPlan, buildSideKeyGenerator, joinIterators, output, streamedKeys, streamedPlan, streamSideKeyGenerator
productArity, productElement, productIterator, productPrefix
initialized, initializeIfNecessary, initializeLogging, initLock, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> leftKeys()
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> rightKeys()
public org.apache.spark.sql.catalyst.plans.physical.Partitioning outputPartitioning()
SparkPlan
outputPartitioning
in class SparkPlan
public scala.collection.immutable.List<org.apache.spark.sql.catalyst.plans.physical.ClusteredDistribution> requiredChildDistribution()