package com.hikcreate.data.common import org.apache.kafka.common.serialization.StringDeserializer import org.apache.spark.SparkConf trait Sparking { val conf: SparkConf = new SparkConf() .set("spark.serializer","org.apache.spark.serializer.KryoSerializer") .set("hive.exec.dynamici.partition","true") .set("hive.exec.dynamic.partition.mode","nonstrict") .setAppName("test") .setMaster("local[*]") def getKafkaParams(servers:String,groupId: String):Map[String,Object] = { Map[String,Object]( "bootstrap.servers" -> servers, "key.deserializer" -> classOf[StringDeserializer], "value.deserializer" -> classOf[StringDeserializer], "group.id"-> groupId, "enable.auto.commit" -> (false:java.lang.Boolean)) } }