package comimport org.apache.spark.sql.{DataFrame, SparkSession}case class User(name: String, age: Int)object DF2RDD {def main(args: Array[String]): Unit = {val spark: SparkSession = SparkSession.builder().master("local[*]").appName("DF2RDD").getOrCreate()val df = spark.read.json("E:\\ZJJ_SparkSQL\\demo01\\src\\main\\resources\\users.json")df.printSchema()val rdd1 = df.rdd.map(row => {User(row.getString(1), row.getLong(0).toInt)})rdd1.collect.foreach(println)spark.close()}}
输出
User(张三,18)User(李四,15)
