SparkContext可以通过parallelize把一个集合转换为RDD
def main(args: Array[String]): Unit = {
val conf = new SparkConf();
val list = List(1, 2, 3, 4, 5,6);
conf.set("spark.master", "local")
conf.set("spark.app.name", "spark demo")
val sc = new SparkContext(conf);
val input = sc.parallelize(list)
val sum = input.sum()
println(sum)
}