Nutzen Sie Wert per Key von JSON Array
import org.apache.spark.sql.types._
import org.apache.spark.sql.functions._
val df1 = spark.read.format("text").load("file path")
val schema = ArrayType(StructType(Array(
StructField("key", StringType),
StructField("value", StringType)
)))
val arrayToMap = udf[Map[String, String], Seq[Row]] {
array => array.map { case Row(key: String, value: String) => (key, value) }.toMap
}
val dfJSON = df1.withColumn("jsonData",from_json(col("value"),schema))
.select("jsonData").withColumn("address", arrayToMap(col("jsonData")))
.withColumn("city", when(col("address.city").isNotNull, col("address.city")).otherwise(lit(""))).withColumn("street", when(col("address.street").isNotNull, col("address.street")).otherwise(lit("")))
dfJSON.printSchema()
dfJSON.show(false)
SAMER SAEID