weixin_43734179
天行见地物
采纳率0%
2018-11-19 01:51 阅读 2.3k

map算子里面使用sparkContext 报 java.io.NotSerializableException: org.apache.spark.SparkContext错?

val receiverStream: ReceiverInputDStream[ String ] = RabbitMQUtils.createStream String
receiverStream.print()

receiverStream.map(value => {
 //@transient val sc = spark.sparkContext
  val jsonS = JSON.parseFull(value)
  val mapjson: Map[ String, String ] = regJson(jsonS)
  val alarmContent = mapjson.get("alarmContent").toString.replace("Some(", "").replace(")", "")
  val alarmEventId = mapjson.get("alarmEventId").toString.replace("Some(", "").replace(")", "")
  val alarmLevel = mapjson.get("alarmLevel").toString.replace("Some(", "").replace(")", "")
  val alarmType = mapjson.get("alarmType").toString.replace("Some(", "").replace(")", "")
  val buildingId = mapjson.get("buildingId").toString.replace("Some(", "").replace(")", "")
  val chargesCode = mapjson.get("chargesCode").toString.replace("Some(", "").replace(")", "")
  val createDate = mapjson.get("createDate").toString.replace("Some(", "").replace(")", "").toDouble
  val delFlag = mapjson.get("delFlag").toString.replace("Some(", "").replace(")", "")
  val deviceId = mapjson.get("deviceId").toString.replace("Some(", "").replace(")", "")
  val happenTime = mapjson.get("happenTime").toString.replace("Some(", "").replace(")", "").toDouble
  val isNewRecord = mapjson.get("isNewRecord").toString.replace("Some(", "").replace(")", "").toBoolean
  val page = mapjson.get("page").toString.replace("Some(", "").replace(")", "")
  val producerCode = mapjson.get("producerCode").toString.replace("Some(", "").replace(")", "")
  val sqlMap = mapjson.get("sqlMap").toString.replace("Some(", "").replace(")", "")
  println(alarmEventId)
  val strings: Apple = Apple(alarmContent, alarmEventId, alarmLevel,
    alarmType, buildingId, chargesCode, createDate, delFlag,
    deviceId, happenTime, isNewRecord, page, producerCode, sqlMap)
  val apples: Seq[ Apple ] = Seq(strings)
  //println("走到这里了!")
 println("logs:" + apples)
 // val appRdd: RDD[ Apple ] = sc.makeRDD(apples)
 /* value1.foreachPartition(iter =>{
    import spark.implicits._
    val frameDF: DataFrame = value1.toDF()
    frameDF.createTempView("t_1")
    frameDF.show()
  })*/
 val value1: RDD[ Apple ] = sc.parallelize(apples)
  import spark.implicits._
  val frameDF: DataFrame = value1.toDF()
  frameDF.createTempView("t_1")
  frameDF.show()
}).print()
  • 点赞
  • 写回答
  • 关注问题
  • 收藏
  • 复制链接分享

1条回答 默认 最新

  • weixin_43734179 天行见地物 2018-11-19 01:55

    报错信息:Exception in thread "main" org.apache.spark.SparkException: Task not serializable
    at org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:298)
    at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:288)
    at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:108)
    at org.apache.spark.SparkContext.clean(SparkContext.scala:2039)
    at org.apache.spark.streaming.dstream.DStream$$anonfun$map$1.apply(DStream.scala:546)
    at org.apache.spark.streaming.dstream.DStream$$anonfun$map$1.apply(DStream.scala:546)
    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
    at org.apache.spark.SparkContext.withScope(SparkContext.scala:679)
    at org.apache.spark.streaming.StreamingContext.withScope(StreamingContext.scala:264)
    at org.apache.spark.streaming.dstream.DStream.map(DStream.scala:545)
    at example.RabbitMQ2Spark$.main(RabbitMQ2Spark.scala:54)
    at example.RabbitMQ2Spark.main(RabbitMQ2Spark.scala)
    Caused by: java.io.NotSerializableException: org.apache.spark.SparkContext
    Serialization stack:
    - object not serializable (class: org.apache.spark.SparkContext, value: org.apache.spark.SparkContext@185f7840)
    - field (class: example.RabbitMQ2Spark$$anonfun$main$1, name: sc$1, type: class org.apache.spark.SparkContext)
    - object (class example.RabbitMQ2Spark$$anonfun$main$1, )
    at org.apache.spark.serializer.SerializationDebugger$.improveException(SerializationDebugger.scala:40)
    at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:46)
    at org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:100)
    at org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:295)
    ... 12 more

    点赞 评论 复制链接分享

相关推荐