package com import org.apache.spark.rdd.RDD import org.apache.spark.SparkConf import org.apache.spark.SparkContext object TestRDD { def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("TestRDD").setMaster("local") val sc = new SparkContext(conf) val rdd1 = sc.makeRDD(List(1, 4, 3, 7, 5)) val rdd1_1 = rdd1.map { y => y * y } val aList = rdd1_1.collect() println("map 用法 is " + aList.mkString(",")) val rdd2 = sc.makeRDD(Array(1, 4, 3, 7, 5)) val rdd2_1 = rdd2.filter { x => x < 5 } println("filter 用法 " + rdd2_1.collect().mkString(","))
更多请见:https://blog.csdn.net/qq_44596980/article/details/93309621
本文系转载,前往查看
如有侵权,请联系 cloudcommunity@tencent.com 删除。
本文系转载,前往查看
如有侵权,请联系 cloudcommunity@tencent.com 删除。