Commit 0b820297 by 杜发飞

1

parent 27c4f5c8
...@@ -87,6 +87,11 @@ ...@@ -87,6 +87,11 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.ignite</groupId> <groupId>org.apache.ignite</groupId>
<artifactId>ignite-log4j2</artifactId>
<version>${ignite.version}</version>
</dependency>
<dependency>
<groupId>org.apache.ignite</groupId>
<artifactId>ignite-spring</artifactId> <artifactId>ignite-spring</artifactId>
<version>${ignite.version}</version> <version>${ignite.version}</version>
</dependency> </dependency>
......
package com.hikcreate.ignite.domain;
import org.apache.ignite.cache.query.annotations.QuerySqlField;
import java.io.Serializable;
public class ErrorMsg implements Serializable {
private static final long serialVersionUID = 1L;
@QuerySqlField
private String data; //数据
@QuerySqlField
private String error; //错误原因
public ErrorMsg(String data, String error) {
this.data = data;
this.error = error;
}
}
...@@ -4,7 +4,7 @@ import org.apache.ignite.cache.query.annotations.QuerySqlField; ...@@ -4,7 +4,7 @@ import org.apache.ignite.cache.query.annotations.QuerySqlField;
import java.io.Serializable; import java.io.Serializable;
/** /**
* 每日报警处理 * 每日报警处理
*/ */
public class DailyAlarmDeal implements Serializable { public class DailyAlarmDeal implements Serializable {
...@@ -17,9 +17,6 @@ public class DailyAlarmDeal implements Serializable { ...@@ -17,9 +17,6 @@ public class DailyAlarmDeal implements Serializable {
private String vehicleColor; //车牌颜色 private String vehicleColor; //车牌颜色
@QuerySqlField @QuerySqlField
private String useNature; //使用性质 公交
@QuerySqlField
private String supervisionId; //报警督办 ID private String supervisionId; //报警督办 ID
@QuerySqlField @QuerySqlField
...@@ -28,10 +25,9 @@ public class DailyAlarmDeal implements Serializable { ...@@ -28,10 +25,9 @@ public class DailyAlarmDeal implements Serializable {
@QuerySqlField @QuerySqlField
private Boolean isDeal; //是否处理 private Boolean isDeal; //是否处理
public DailyAlarmDeal(String vehicleNo, String vehicleColor, String useNature, String supervisionId, String warnTime, Boolean isDeal) { public DailyAlarmDeal(String vehicleNo, String vehicleColor, String supervisionId, String warnTime, Boolean isDeal) {
this.vehicleNo = vehicleNo; this.vehicleNo = vehicleNo;
this.vehicleColor = vehicleColor; this.vehicleColor = vehicleColor;
this.useNature = useNature;
this.supervisionId = supervisionId; this.supervisionId = supervisionId;
this.warnTime = warnTime; this.warnTime = warnTime;
this.isDeal = isDeal; this.isDeal = isDeal;
......
kafka.bootstrap.servers=39.100.49.76:9092 kafka.bootstrap.servers=39.100.49.76:9092
#kafka.bootstrap.servers=10.197.236.154:9092
kafka.zookerper.servers=10.197.236.154:2181
#,10.197.236.169:2181,10.197.236.184:2181/kafka
#kafka.zookerper.servers=172.26.111.183:2181,172.26.111.178:2181,172.26.111.186:2181/tbd_kafka #kafka.zookerper.servers=172.26.111.183:2181,172.26.111.178:2181,172.26.111.186:2181/tbd_kafka
kafka.zookerper.servers=10.197.236.211:2181
application.kafka.topic=tbd-transport-data-gathering application.kafka.topic=tbd-transport-data-gathering
basicsInfo.kafka.topic=transport_basedata_operation basicsInfo.kafka.topic=transport_basedata_operation
hive.group.id=hive hive.group.id=hive
ignite.group.id=ignite ignite.group.id=ignite3
basics.group.id=basics basics.group.id=basics2
hive.driver=org.apache.hive.jdbc.HiveDriver hive.driver=org.apache.hive.jdbc.HiveDriver
hive.url=jdbc:hive2://hadoop02:10000/ods hive.url=jdbc:hive2://hadoop02:10000/ods
......
package com.hikcreate.data.client package com.hikcreate.data.client
import java.util.concurrent.TimeUnit import java.util.concurrent.TimeUnit
import com.hikcreate.ignite.domain.PrimaryKey
import com.hikcreate.ignite.domain.{ErrorMsg, PrimaryKey}
import com.hikcreate.ignite.domain.alarm._ import com.hikcreate.ignite.domain.alarm._
import com.hikcreate.ignite.domain.alarm.processor.{DailyAlarmDealUpdate, DailyAlarmUpdate} import com.hikcreate.ignite.domain.alarm.processor.{DailyAlarmDealUpdate, DailyAlarmUpdate}
import com.hikcreate.ignite.domain.basic._ import com.hikcreate.ignite.domain.basic._
...@@ -11,6 +12,7 @@ import org.apache.ignite.binary.BinaryObject ...@@ -11,6 +12,7 @@ import org.apache.ignite.binary.BinaryObject
import org.apache.ignite.cache.CacheMode import org.apache.ignite.cache.CacheMode
import org.apache.ignite.configuration.CacheConfiguration import org.apache.ignite.configuration.CacheConfiguration
import org.apache.ignite.{Ignite, IgniteCache, Ignition} import org.apache.ignite.{Ignite, IgniteCache, Ignition}
import scala.collection.JavaConversions.mapAsJavaMap import scala.collection.JavaConversions.mapAsJavaMap
import scala.collection.JavaConverters._ import scala.collection.JavaConverters._
/** /**
...@@ -31,15 +33,6 @@ object IgniteClient { ...@@ -31,15 +33,6 @@ object IgniteClient {
binary.build() binary.build()
} }
/*********************************基础信息表****************************************************************/ /*********************************基础信息表****************************************************************/
//平台基础信息表
lazy val basicPlatformInfo: IgniteCache[Long, PlatformInfo] = ignite.getOrCreateCache(
new CacheConfiguration[Long,PlatformInfo]()
.setSqlSchema("BasicInfo")
.setName("PlatformInfo")
.setDataRegionName("500MB_Region")
.setCacheMode(CacheMode.REPLICATED)
.setIndexedTypes(classOf[Long],classOf[PlatformInfo])
)
//企业基础信息表--企业接入情况 //企业基础信息表--企业接入情况
lazy val basicEnterpriseInfo: IgniteCache[Long, EnterpriseInfo] = ignite.getOrCreateCache( lazy val basicEnterpriseInfo: IgniteCache[Long, EnterpriseInfo] = ignite.getOrCreateCache(
new CacheConfiguration[Long,EnterpriseInfo]() new CacheConfiguration[Long,EnterpriseInfo]()
...@@ -67,7 +60,6 @@ object IgniteClient { ...@@ -67,7 +60,6 @@ object IgniteClient {
.setCacheMode(CacheMode.REPLICATED) .setCacheMode(CacheMode.REPLICATED)
.setIndexedTypes(classOf[Long],classOf[AlarmTypeInfo]) .setIndexedTypes(classOf[Long],classOf[AlarmTypeInfo])
) )
/*********************************营运车辆监测****************************************************************/ /*********************************营运车辆监测****************************************************************/
/** /**
* 今日车辆在线情况 累计行驶 累计安全行驶里程 * 今日车辆在线情况 累计行驶 累计安全行驶里程
...@@ -207,8 +199,18 @@ object IgniteClient { ...@@ -207,8 +199,18 @@ object IgniteClient {
//.setExpiryPolicyFactory(CreatedExpiryPolicy.factoryOf(new Duration(TimeUnit.HOURS,24))) //.setExpiryPolicyFactory(CreatedExpiryPolicy.factoryOf(new Duration(TimeUnit.HOURS,24)))
) )
/*********************************错误数据****************************************************************/
lazy val errorMsgInfo: IgniteCache[String,ErrorMsg] = ignite.getOrCreateCache(
new CacheConfiguration[String,ErrorMsg]()
.setSqlSchema("Error")
.setName("ErrorMsg")
.setDataRegionName("500MB_Region")
.setCacheMode(CacheMode.REPLICATED)
.setIndexedTypes(classOf[String],classOf[ErrorMsg])
)
def main(args: Array[String]): Unit = { def main(args: Array[String]): Unit = {
ignite.cacheNames().asScala.foreach(x=>ignite.getOrCreateCache(x).clear()) ignite.cacheNames().asScala.foreach(println(_))
ignite.close() ignite.close()
} }
} }
...@@ -15,8 +15,8 @@ trait Sparking { ...@@ -15,8 +15,8 @@ trait Sparking {
.set("spark.serializer","org.apache.spark.serializer.KryoSerializer") .set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
.set("hive.exec.dynamici.partition","true") .set("hive.exec.dynamici.partition","true")
.set("hive.exec.dynamic.partition.mode","nonstrict") .set("hive.exec.dynamic.partition.mode","nonstrict")
//.setAppName("test") .setAppName("test")
//.setMaster("local[*]") .setMaster("local[*]")
def getKafkaParams(servers:String,groupId: String):Map[String,Object] = { def getKafkaParams(servers:String,groupId: String):Map[String,Object] = {
Map[String,Object]( Map[String,Object](
......
...@@ -6,10 +6,9 @@ import com.hikcreate.data.client.IgniteClient ...@@ -6,10 +6,9 @@ import com.hikcreate.data.client.IgniteClient
import com.hikcreate.data.common.{Logging, Sparking} import com.hikcreate.data.common.{Logging, Sparking}
import com.hikcreate.data.constant.Const import com.hikcreate.data.constant.Const
import com.hikcreate.data.util.{Tools, ZkManager} import com.hikcreate.data.util.{Tools, ZkManager}
import com.hikcreate.ignite.domain.basic.{EnterpriseInfo, VehicleInfo, PlatformInfo} import com.hikcreate.ignite.domain.basic.{AlarmTypeInfo, EnterpriseInfo, VehicleInfo}
import org.apache.spark.streaming.kafka010._ import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext} import org.apache.spark.streaming.{Seconds, StreamingContext}
import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.ArrayBuffer
object SyncBasic extends Sparking with Logging{ object SyncBasic extends Sparking with Logging{
...@@ -20,8 +19,7 @@ object SyncBasic extends Sparking with Logging{ ...@@ -20,8 +19,7 @@ object SyncBasic extends Sparking with Logging{
val offsets = zkManager.getBeginOffset(Const.basicsInfoTopic,Const.basicsGroupId) val offsets = zkManager.getBeginOffset(Const.basicsInfoTopic,Const.basicsGroupId)
val offsetRanges = new ArrayBuffer[OffsetRange]() val offsetRanges = new ArrayBuffer[OffsetRange]()
val ssc = new StreamingContext(conf,Seconds(1)) val ssc = new StreamingContext(conf,Seconds(1))
val inputStream = KafkaUtils.createDirectStream[String,String]( val inputStream = KafkaUtils.createDirectStream[String,String](ssc,
ssc,
LocationStrategies.PreferConsistent, LocationStrategies.PreferConsistent,
ConsumerStrategies.Subscribe[String,String](Const.basicsInfoTopic,kafkaParams,offsets)) ConsumerStrategies.Subscribe[String,String](Const.basicsInfoTopic,kafkaParams,offsets))
inputStream.transform{ rdd => inputStream.transform{ rdd =>
...@@ -41,52 +39,50 @@ object SyncBasic extends Sparking with Logging{ ...@@ -41,52 +39,50 @@ object SyncBasic extends Sparking with Logging{
def processRow(iterator:Iterator[String]): Unit = { def processRow(iterator:Iterator[String]): Unit = {
iterator.foreach{ x => iterator.foreach{ x =>
val json = JSON.parseObject(x) val json = JSON.parseObject(x)
println(x)
val tableName = json.getString("dataType") val tableName = json.getString("dataType")
val operation = json.getString("operationType") val operation = json.getString("operationType")
json.remove("dataType") json.remove("dataType")
json.remove("operationType") json.remove("operationType")
val str = JSON.toJSONString(json,SerializerFeature.WriteMapNullValue)
if(tableName.equals("baseIntoPlatformInfo")){ //平台
operation match {
case "add" =>
IgniteClient.basicPlatformInfo.withKeepBinary().put(json.getLong("id"),JSON.parseObject(str,classOf[PlatformInfo]))
case "update" =>
IgniteClient.basicPlatformInfo.withKeepBinary().put(json.getLong("id"),JSON.parseObject(str,classOf[PlatformInfo]))
case "delete" =>
IgniteClient.basicPlatformInfo.withKeepBinary().remove(json.getLong("id"))
}
}
if(tableName.equals("baseIntoEnterpriseInfo")){ //企业 if(tableName.equals("baseIntoEnterpriseInfo")){ //企业
val str = JSON.toJSONString(json,SerializerFeature.WriteMapNullValue)
operation match { operation match {
case "add" => case "add" =>
IgniteClient.basicEnterpriseInfo.withKeepBinary().put(json.getLong("id"),JSON.parseObject(str,classOf[EnterpriseInfo])) IgniteClient.basicEnterpriseInfo.withKeepBinary().put(json.getLong("id"),
JSON.parseObject(str,classOf[EnterpriseInfo]))
case "update" => case "update" =>
IgniteClient.basicEnterpriseInfo.withKeepBinary().put(json.getLong("id"),JSON.parseObject(str,classOf[EnterpriseInfo])) IgniteClient.basicEnterpriseInfo.withKeepBinary().put(json.getLong("id"),
JSON.parseObject(str,classOf[EnterpriseInfo]))
case "delete" => case "delete" =>
IgniteClient.basicEnterpriseInfo.withKeepBinary().remove(json.getLong("id")) IgniteClient.basicEnterpriseInfo.withKeepBinary().remove(json.getLong("id"))
} }
} }
if(tableName.equals("baseIntoVehicleInfo")){ //车辆 if(tableName.equals("baseIntoVehicleInfo")){ //车辆
operation match {
case "add" =>
val companyInfo = Tools.getEnterpriseInfo(json.getString("enterpriseCode")) val companyInfo = Tools.getEnterpriseInfo(json.getString("enterpriseCode"))
if(companyInfo.isDefined){ if(companyInfo.isDefined){
//json.put("province",companyInfo.get.getProvince) json.put("province",companyInfo.get.getProvince)
//json.put("city",companyInfo.get.getCity) json.put("city",companyInfo.get.getCity)
json.put("area",companyInfo.get.getArea)
} }
IgniteClient.basicVehicleInfo.withKeepBinary().put(json.getLong("id"),JSON.parseObject(str,classOf[VehicleInfo])) val str = JSON.toJSONString(json,SerializerFeature.WriteMapNullValue)
operation match {
case "add" =>
IgniteClient.basicVehicleInfo.withKeepBinary().put(json.getLong("id"),
JSON.parseObject(str,classOf[VehicleInfo]))
case "update" => case "update" =>
IgniteClient.basicVehicleInfo.withKeepBinary().put(json.getLong("id"),JSON.parseObject(str,classOf[VehicleInfo])) IgniteClient.basicVehicleInfo.withKeepBinary().put(json.getLong("id"),
JSON.parseObject(str,classOf[VehicleInfo]))
case "delete" => case "delete" =>
IgniteClient.basicVehicleInfo.withKeepBinary().remove(json.getLong("id")) IgniteClient.basicVehicleInfo.withKeepBinary().remove(json.getLong("id"))
} }
} }
if(tableName.equals("baseWarningType")){ if(tableName.equals("baseWarningType")){ //报警类型
val str = JSON.toJSONString(json,SerializerFeature.WriteMapNullValue)
operation match { operation match {
case "add" => case "add" =>
IgniteClient.basicAlarmTypeInfo.withKeepBinary().put(json.getLong("id"),JSON.parseObject(str,classOf[VehicleInfo])) IgniteClient.basicAlarmTypeInfo.withKeepBinary().put(json.getLong("id"),JSON.parseObject(str,classOf[AlarmTypeInfo]))
case "update" => case "update" =>
IgniteClient.basicAlarmTypeInfo.withKeepBinary().put(json.getLong("id"),JSON.parseObject(str,classOf[VehicleInfo])) IgniteClient.basicAlarmTypeInfo.withKeepBinary().put(json.getLong("id"),JSON.parseObject(str,classOf[AlarmTypeInfo]))
case "delete" => case "delete" =>
IgniteClient.basicAlarmTypeInfo.withKeepBinary().remove(json.getLong("id")) IgniteClient.basicAlarmTypeInfo.withKeepBinary().remove(json.getLong("id"))
} }
......
...@@ -36,13 +36,14 @@ object Tools extends Logging{ ...@@ -36,13 +36,14 @@ object Tools extends Logging{
lonAndLat.put("latitude",lat) lonAndLat.put("latitude",lat)
arr.add(lonAndLat) arr.add(lonAndLat)
json.put("locations",arr) json.put("locations",arr)
val starttime = DateTime.now() println(json.toJSONString)
//val starttime = DateTime.now()
val response = Http(Const.areaCodeAndAddressUrl).postData(json.toJSONString) val response = Http(Const.areaCodeAndAddressUrl).postData(json.toJSONString)
.header("content-type","application/json") .header("content-type","application/json")
//.timeout(connTimeoutMs = 1000,readTimeoutMs = 1000) //.timeout(connTimeoutMs = 1000,readTimeoutMs = 1000)
.asString .asString
val endtime = DateTime.now() val endtime = DateTime.now()
println("http请求时间:"+new Duration(starttime,endtime).getMillis) //println("http请求时间:"+new Duration(starttime,endtime).getMillis)
val body = JSON.parseObject(response.body) val body = JSON.parseObject(response.body)
val item = body.getJSONObject("result").getJSONArray("regeoItems").getJSONObject(0) val item = body.getJSONObject("result").getJSONArray("regeoItems").getJSONObject(0)
val address = item.getString("formattedAddress") val address = item.getString("formattedAddress")
...@@ -78,6 +79,7 @@ object Tools extends Logging{ ...@@ -78,6 +79,7 @@ object Tools extends Logging{
//val endTime = DateTime.now() //val endTime = DateTime.now()
//println("http请求时间:"+new Duration(startTime,endTime).getMillis/1000) //println("http请求时间:"+new Duration(startTime,endTime).getMillis/1000)
val body = JSON.parseObject(response.body) val body = JSON.parseObject(response.body)
println(response.body)
val items = body.getJSONObject("result").getJSONArray("regeoItems") val items = body.getJSONObject("result").getJSONArray("regeoItems")
(0 until items.size()).map{ index => (0 until items.size()).map{ index =>
val item = items.getJSONObject(index) val item = items.getJSONObject(index)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment