1.使用函数get_json_object逐个取出json当层对象
object testdf {
def main(args: Array[String]): Unit = {
Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)
val spark = SparkSession.builder().master("local[*]").appName("test app").getOrCreate()
import spark.implicits._
val ds = spark.createDataset(Seq("""{"name":"hrr","age":12,"subject":[{"name":"math","level":1}]}"""))
ds.show(false)
ds打印为:
+-------------------------------------------------------------+
|value |
+-------------------------------------------------------------+
|{"name":"hrr","age":12,"subject":[{"name":"math","level":1}]}|
+-------------------------------------------------------------+
ds.createOrReplaceTempView("t1")
val ds_format = spark.sql(
"""
|select name, age,get_json_object(subject,'$.name') as subject_name,get_json_object(subject,'$.level') as subject_level from
|(select get_json_object(value,'$.name') as name,
|get_json_object(value,'$.age') as age,
|get_json_object(value,'$.subject[0]') as subject
|from t1) t2
""".stripMargin).toDF("name","age","subject_name","subject_level")
ds_format.filter("age > 11").show(false)
ds_format打印为:
+----+---+------------+-------------+
|name|age|subject_name|subject_level|
+----+---+------------+-------------+
|hrr |12 |math |1 |
+----+---+------------+-------------+
spark.stop()
}
}