Spark源码——RDD中join操作的宽窄依赖探究
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
object JoinDemo {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName(this.getClass.getCanonicalName.init).setMaster("local[*]")
val sc =
原创
2021-06-16 15:39:02 ·
430 阅读 ·
0 评论