Spark Structured Streaming + Kafka +Json


    org.apache.spark
    spark-sql-kafka-0-10_2.11
    2.1.1


    org.apache.spark
    spark-sql_2.11
    2.1.1

import org.apache.spark.sql.SparkSession

object MyTest {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("test")
      .master("local")
      .getOrCreate()
    spark.sparkContext.setLogLevel("WARN")


    import spark.implicits._
    val df = spark.readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "10.1.10.97:9092")
      .option("startingOffsets", "earliest")
      .option("subscribe", "userInfo")
      .load()

    df.printSchema()

    val lines = df.selectExpr("CAST(value AS STRING)")
      .as[String]


    lines.createOrReplaceTempView("userInfo")
    val querySql=
      """
        |select
        |get_json_object(VALUE,'\$.address') as address,
        |get_json_object(VALUE,'\$.uname') as uname,
        |get_json_object(VALUE,'\$.userId') as userId
        |from userInfo
        |""".stripMargin


    spark.sql(querySql)
      .writeStream
      .outputMode("update")
      .format("console")
      .start()
      .awaitTermination()


    /*
        lines.printSchema()
        val query: StreamingQuery = lines.writeStream
          .outputMode("update")
          .format("console")
          .start()

        query.awaitTermination()*/

    /*    df.selectExpr("CAST(key AS STRING)", "CAST(value AS STRING)")
          .as[(String, String)]
          .writeStream
          .outputMode("update")
          .format("console")
          .start()
          .awaitTermination()*/

  }
}

控制台输出: 

 Spark Structured Streaming + Kafka +Json_第1张图片

你可能感兴趣的:(kafka,spark,spark,kafka,json)