Commit 26a16293 by 杜发飞

1

parent aa830172
......@@ -132,7 +132,7 @@
<build>
<plugins>
<plugin>
<!--<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
......@@ -146,12 +146,17 @@
<includeScope>runtime</includeScope>
<excludeTransitive>false</excludeTransitive>
</configuration>
</plugin>
</plugin>-->
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<version>2.15.2</version>
<executions><execution><goals><goal>compile</goal></goals></execution>
<executions>
<execution>
<goals>
<goal>compile</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
......
......@@ -208,7 +208,7 @@ object IgniteClient {
)
def main(args: Array[String]): Unit = {
//ignite.cacheNames().asScala.foreach(x=>ignite.destroyCache(x))
ignite.cacheNames().asScala.foreach(x=>ignite.getOrCreateCache(x).clear())
ignite.close()
}
}
package com.hikcreate.data.common
import org.apache.log4j.{Level, Logger}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
trait Sparking {
// 屏蔽不必要的日志 ,在终端上显示需要的日志
Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.WARN)
Logger.getLogger("org.apache.kafka.clients.consumer").setLevel(Level.WARN)
val conf: SparkConf = new SparkConf()
.set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
.set("hive.exec.dynamici.partition","true")
.set("hive.exec.dynamic.partition.mode","nonstrict")
.setAppName("test")
.setMaster("local[*]")
//.setAppName("test")
//.setMaster("local[*]")
def getKafkaParams(servers:String,groupId: String):Map[String,Object] = {
Map[String,Object](
......
......@@ -22,7 +22,8 @@ object SyncIgnite extends Sparking with Logging{
val kafkaParams = getKafkaParams(Const.bootstrap, Const.igniteGroupId)
val offsets = zkManager.getBeginOffset(Const.applicationTopic, Const.igniteGroupId)
val offsetRanges = new ArrayBuffer[OffsetRange]()
val ssc = new StreamingContext(conf, Seconds(1))
val ssc = new StreamingContext(conf, Seconds(2))
//ssc.sparkContext.setLogLevel("WARN")
val inputStream = KafkaUtils.createDirectStream[String, String](ssc,
LocationStrategies.PreferConsistent,
ConsumerStrategies.Subscribe[String, String](Const.applicationTopic,kafkaParams,offsets))
......@@ -43,7 +44,6 @@ object SyncIgnite extends Sparking with Logging{
def processRow(iterator:Iterator[String]): Unit = {
iterator.foreach{ x =>
try {
println(x)
val json = JSON.parseObject(x)
TableKey(Option(json.getString("msgId")), Option(json.getString("dataType"))) match {
//车辆定位消息
......@@ -137,7 +137,7 @@ object SyncIgnite extends Sparking with Logging{
val eventType = infoJson.getString("EVENT_TYPE")
val vehicleInfoOptional = Tools.getVehicleInfo(vehicleNo,vehicleColor)
val alarmInfoOptional = Tools.getAlarmInfo(warnType,eventType)
if(alarmInfoOptional.isDefined){//是否关联到报警信息基础表
if(alarmInfoOptional.isDefined){ //是否关联到报警信息基础表
val useNature = vehicleInfoOptional.map(x=>x.getUseNature).getOrElse("没有关联到车辆性质")
val alarmKey = IgniteClient.getBinaryObject(new PrimaryKey(code._1,code._2,code._3,useNature))
//累计行驶报警数
......@@ -183,7 +183,7 @@ object SyncIgnite extends Sparking with Logging{
case tableKey if tableKey == TableKey(Some("0x1400"),Some("0x1401")) =>
val vehicleNo = json.getString("vehicleNo")
val vehicleColor = json.getString("vehicleColor")
val superVisionId = json.getString("superVisionId")
val superVisionId = json.getString("supervisionId")
val key = IgniteClient.getBinaryObject(new PrimaryKey(vehicleNo,vehicleColor,superVisionId))
val value = new DailyAlarmDeal(true)
if(!IgniteClient.dailyAlarmDealCache.withKeepBinary().putIfAbsent(key,value)){
......@@ -193,6 +193,8 @@ object SyncIgnite extends Sparking with Logging{
}
}catch {
case e:Exception =>
println(x)
println(e.getMessage)
e.printStackTrace()
}
}
......
......@@ -8,8 +8,11 @@ import com.hikcreate.data.common.Logging
import com.hikcreate.data.constant.Const
import com.hikcreate.ignite.domain.basic.{AlarmTypeInfo, EnterpriseInfo, VehicleInfo}
import org.apache.ignite.cache.query.SqlQuery
import org.joda.time.{DateTime, Duration}
import scalaj.http.Http
import scala.collection.mutable.ArrayBuffer
object Tools extends Logging{
def addLocation(json:JSONObject): Array[JSONObject] = {
......@@ -25,25 +28,29 @@ object Tools extends Logging{
}
def getAddressAndLocationCode(lon:Double,lat:Double):(String,String) = {
val json = new JSONObject()
val arr = new JSONArray()
val lonAndLat = new JSONObject()
lonAndLat.put("longitude",lon)
lonAndLat.put("latitude",lat)
arr.add(lonAndLat)
json.put("locations",arr)
val response = Http(Const.areaCodeAndAddressUrl).postData(json.toJSONString)
.header("content-type","application/json")//.charset("ISO-8859-1")
.timeout(connTimeoutMs = 8000, readTimeoutMs = 8000)
.asString
if(response.code == 200){
try{
val json = new JSONObject()
val arr = new JSONArray()
val lonAndLat = new JSONObject()
lonAndLat.put("longitude",lon)
lonAndLat.put("latitude",lat)
arr.add(lonAndLat)
json.put("locations",arr)
val starttime = DateTime.now()
val response = Http(Const.areaCodeAndAddressUrl).postData(json.toJSONString)
.header("content-type","application/json")
//.timeout(connTimeoutMs = 1000,readTimeoutMs = 1000)
.asString
val endtime = DateTime.now()
println("http请求时间:"+new Duration(starttime,endtime).getMillis)
val body = JSON.parseObject(response.body)
val item = body.getJSONObject("result").getJSONArray("regeoItems").getJSONObject(0)
val address = item.getString("formattedAddress")
val locationCode = item.getJSONObject("addressComponent").getString("adcode")
(address,locationCode)
}else{
throw new RuntimeException("http请求城市区编码出错")
}catch{
case e:Exception =>
throw new RuntimeException(e)
}
}
......@@ -56,6 +63,39 @@ object Tools extends Logging{
(provinceCode,cityCode,areaCode)
}
def getAddressAndLocationCodes(buffer:List[(Double,Double)]): IndexedSeq[(String,String)] = {
val json = new JSONObject()
val arr = new JSONArray()
buffer.foreach{x =>
val lonAndLat = new JSONObject()
lonAndLat.put("longitude",x._1)
lonAndLat.put("latitude",x._2)
arr.add(lonAndLat)
}
json.put("locations",arr)
val startTime = DateTime.now()
val response = Http(Const.areaCodeAndAddressUrl).postData(json.toJSONString).header("content-type","application/json").asString
val endTime = DateTime.now()
println("http请求时间:"+new Duration(startTime,endTime).getMillis)
val body = JSON.parseObject(response.body)
val items = body.getJSONObject("result").getJSONArray("regeoItems")
(0 until items.size()).map{ index =>
val item = items.getJSONObject(index)
val address = item.getString("formattedAddress")
val locationCode = item.getJSONObject("addressComponent").getString("adcode")
(address,locationCode)
}
}
def getLocationCodes(buffer:List[(Double,Double)]): IndexedSeq[(String,String,String)] = {
getAddressAndLocationCodes(buffer).map(x=>x._2).map{ locationCode =>
val provinceCode = locationCode.substring(0,2)
val cityCode = locationCode.substring(2,4)
val areaCode = locationCode.substring(4,6)
(provinceCode,cityCode,areaCode)
}
}
def getInfoContentJsonobj(infoStr:String):JSONObject = {
val jsonStr=("{\""+infoStr.replace(":=","\":\"").replace(";","\",\"")+"\"}").replace(",\"\"}","}")
val jSONObject = JSON.parseObject(jsonStr)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment