Commit 31d52b5c by 杜发飞

Merge remote-tracking branch 'origin/master'

parents 9662d60b 34f0acb4
......@@ -6,7 +6,7 @@
<groupId>groupId</groupId>
<artifactId>operating-vehicle</artifactId>
<version>1.0-SNAPSHOT</version>
<version>1.3-SNAPSHOT</version>
<repositories>
<repository>
......@@ -27,6 +27,16 @@
<dependencies>
<dependency>
<groupId>com.ctrip.framework.apollo</groupId>
<artifactId>apollo-client</artifactId>
<version>1.3.0</version>
</dependency>
<dependency>
<groupId>com.ctrip.framework.apollo</groupId>
<artifactId>apollo-core</artifactId>
<version>1.3.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
......
......@@ -4,7 +4,9 @@ kafka.bootstrap.servers=39.100.49.76:9092
kafka.zookerper.servers=10.197.236.154:2181
#,10.197.236.169:2181,10.197.236.184:2181/kafka
#kafka.zookerper.servers=172.26.111.183:2181,172.26.111.178:2181,172.26.111.186:2181/tbd_kafka
#kafka.zookerper.servers=10.197.236.211:2181
window.time=5
compact.kafka.topic=transport_basedata_operation,tbd-transport-data-gathering
application.kafka.topic=tbd-transport-data-gathering
basicsInfo.kafka.topic=transport_basedata_operation
hive.group.id=hive
......@@ -16,10 +18,8 @@ hive.url=jdbc:hive2://hadoop02:10000/ods
hive.username=hive
hive.password=hive
areaCodeAndAddress.Url=http://10.197.236.100:40612/bcpbase/geocode/regeo
warnTypes=0x0064,0x0065,0x0066
hive.unknown.table=unknown_mes
hive.unknown.table=KAFKA_UNKNOWN_I
###链路管理类
hive.UP_CONNECT_REQ.table=KAFKA_UP_CONNECT_I
hive.UP_DISCONNECT_REQ.table=KAFKA_UP_DISCONNECT_INFORM_I
......@@ -49,8 +49,8 @@ hive.UP_PREVENTION_EXG_MSG_DEVICE_PARAM.table=KAFKA_UP_PREVENTION_EXG_MSG_DEVICE
###智能视频报警交互消息类
hive.UP_PREVENTION_MSG_FILE_COMPLETE.table=KAFKA_UP_PREVENTION_MSG_FILE_COMPLETE_I
#基础信息hive表
hive.KAFKA_base_into_platform_info.table=KAFKA_base_into_platform_info_I
hive.KAFKA_base_into_enterprise_info.table=KAFKA_base_into_enterprise_info_I
hive.KAFKA_base_into_vehicle_info.table=KAFKA_base_into_vehicle_info_I
hive.KAFKA_base_data_display_config.table=KAFKA_base_data_display_config_I
hive.KAFKA_base_warning_type.table=KAFKA_base_warning_type_I
\ No newline at end of file
hive.KAFKA_base_into_platform_info.table=kafka_base_into_platform_info_i
hive.KAFKA_base_into_enterprise_info.table=kafka_base_into_enterprise_info_i
hive.KAFKA_base_into_vehicle_info.table=kafka_base_into_vehicle_info_i
hive.KAFKA_base_data_display_config.table=kafka_base_data_display_config_i
hive.KAFKA_base_warning_type.table=kafka_base_warning_type_i
\ No newline at end of file
......@@ -6,10 +6,10 @@ import com.hikcreate.data.util.Config
object Const {
Config.load("conf.properties")
val windowTime:Int=Config.getInt("window.time")
val bootstrap: String = Config.getString("kafka.bootstrap.servers")
val zkKafka: String = Config.getString("kafka.zookerper.servers")
val compactTopic:Array[String] =Config.getString("compact.kafka.topic").split(",")
val applicationTopic: Array[String] = Config.getString("application.kafka.topic").split(",")
val basicsInfoTopic:Array[String] = Config.getString("basicsInfo.kafka.topic").split(",")
val hiveGroupId: String = Config.getString("hive.group.id")
......@@ -23,6 +23,7 @@ object Const {
val hivePassword: String = Config.getString("hive.password")
val areaCodeAndAddressUrl:String=Config.getString("areaCodeAndAddress.Url")
val unKnownTable:String = Config.getString("hive.unknown.table")
val hdfsUrl:String=Config.getString("hdfs.url")
val warnTypes: Array[String] = Config.getString("warnTypes").split(",")
......
package com.hikcreate.data.util
import java.io.{FileSystem => _, _}
import org.apache.hadoop.fs._
import scala.collection.mutable.ListBuffer
object HDFSHelper {
def isDir(hdfs : FileSystem, name : String) : Boolean = {
hdfs.isDirectory(new Path(name))
}
def isDir(hdfs : FileSystem, name : Path) : Boolean = {
hdfs.isDirectory(name)
}
def isFile(hdfs : FileSystem, name : String) : Boolean = {
hdfs.isFile(new Path(name))
}
def isFile(hdfs : FileSystem, name : Path) : Boolean = {
hdfs.isFile(name)
}
def createFile(hdfs : FileSystem, name : String) : Boolean = {
hdfs.createNewFile(new Path(name))
}
def createFile(hdfs : FileSystem, name : Path) : Boolean = {
hdfs.createNewFile(name)
}
def createFolder(hdfs : FileSystem, name : String) : Boolean = {
hdfs.mkdirs(new Path(name))
}
def createFolder(hdfs : FileSystem, name : Path) : Boolean = {
hdfs.mkdirs(name)
}
def exists(hdfs : FileSystem, name : String) : Boolean = {
hdfs.exists(new Path(name))
}
def exists(hdfs : FileSystem, name : Path) : Boolean = {
hdfs.exists(name)
}
def transport(inputStream : InputStream, outputStream : OutputStream): Unit ={
val buffer = new Array[Byte](64 * 1000)
var len = inputStream.read(buffer)
while (len != -1) {
outputStream.write(buffer, 0, len - 1)
len = inputStream.read(buffer)
}
outputStream.flush()
inputStream.close()
outputStream.close()
}
class MyPathFilter extends PathFilter {
override def accept(path: Path): Boolean = true
}
/**
* create a target file and provide parent folder if necessary
*/
def createLocalFile(fullName : String) : File = {
val target : File = new File(fullName)
if(!target.exists){
val index = fullName.lastIndexOf(File.separator)
val parentFullName = fullName.substring(0, index)
val parent : File = new File(parentFullName)
if(!parent.exists)
parent.mkdirs
else if(!parent.isDirectory)
parent.mkdir
target.createNewFile
}
target
}
/**
* delete file in hdfs
* @return true: success, false: failed
*/
def deleteFile(hdfs : FileSystem, path: String) : Boolean = {
if (isDir(hdfs, path))
hdfs.delete(new Path(path), true)//true: delete files recursively
else
hdfs.delete(new Path(path), false)
}
/**
* get all file children's full name of a hdfs dir, not include dir children
* @param fullName the hdfs dir's full name
*/
def listChildren(hdfs : FileSystem, fullName : String, holder : ListBuffer[String]) : ListBuffer[String] = {
val filesStatus = hdfs.listStatus(new Path(fullName), new MyPathFilter)
for(status <- filesStatus){
val filePath : Path = status.getPath
if(isFile(hdfs,filePath))
holder += filePath.toString
else
listChildren(hdfs, filePath.toString, holder)
}
holder
}
def copyFile(hdfs : FileSystem, source: String, target: String): Unit = {
val sourcePath = new Path(source)
val targetPath = new Path(target)
if(!exists(hdfs, targetPath))
createFile(hdfs, targetPath)
val inputStream : FSDataInputStream = hdfs.open(sourcePath)
val outputStream : FSDataOutputStream = hdfs.create(targetPath)
transport(inputStream, outputStream)
}
def copyFolder(hdfs : FileSystem, sourceFolder: String, targetFolder: String): Unit = {
val holder : ListBuffer[String] = new ListBuffer[String]
val children : List[String] = listChildren(hdfs, sourceFolder, holder).toList
for(child <- children)
copyFile(hdfs, child, child.replaceFirst(sourceFolder, targetFolder))
}
def copyFileFromLocal(hdfs : FileSystem, localSource: String, hdfsTarget: String): Unit = {
val targetPath = new Path(hdfsTarget)
if(!exists(hdfs, targetPath))
createFile(hdfs, targetPath)
val inputStream : FileInputStream = new FileInputStream(localSource)
val outputStream : FSDataOutputStream = hdfs.create(targetPath)
transport(inputStream, outputStream)
}
def copyFileToLocal(hdfs : FileSystem, hdfsSource: String, localTarget: String): Unit = {
val localFile : File = createLocalFile(localTarget)
val inputStream : FSDataInputStream = hdfs.open(new Path(hdfsSource))
val outputStream : FileOutputStream = new FileOutputStream(localFile)
transport(inputStream, outputStream)
}
def copyFolderFromLocal(hdfs : FileSystem, localSource: String, hdfsTarget: String): Unit = {
val localFolder : File = new File(localSource)
val allChildren : Array[File] = localFolder.listFiles
for(child <- allChildren){
val fullName = child.getAbsolutePath
val nameExcludeSource : String = fullName.substring(localSource.length)
val targetFileFullName : String = hdfsTarget + Path.SEPARATOR + nameExcludeSource
if(child.isFile)
copyFileFromLocal(hdfs, fullName, targetFileFullName)
else
copyFolderFromLocal(hdfs, fullName, targetFileFullName)
}
}
def copyFolderToLocal(hdfs : FileSystem, hdfsSource: String, localTarget: String): Unit = {
val holder : ListBuffer[String] = new ListBuffer[String]
val children : List[String] = listChildren(hdfs, hdfsSource, holder).toList
val hdfsSourceFullName = hdfs.getFileStatus(new Path(hdfsSource)).getPath.toString
val index = hdfsSourceFullName.length
for(child <- children){
val nameExcludeSource : String = child.substring(index + 1)
val targetFileFullName : String = localTarget + File.separator + nameExcludeSource
copyFileToLocal(hdfs, child, targetFileFullName)
}
}
}
......@@ -73,10 +73,10 @@ object Tools extends Logging{
arr.add(lonAndLat)
}
json.put("locations",arr)
//val startTime = DateTime.now()
val startTime = DateTime.now()
val response = Http(Const.areaCodeAndAddressUrl).postData(json.toJSONString).header("content-type","application/json").asString
//val endTime = DateTime.now()
//println("http请求时间:"+new Duration(startTime,endTime).getMillis/1000)
val endTime = DateTime.now()
// println("经纬度列表size:"+buffer.size+"===》http response time :"+new Duration(startTime,endTime).getMillis)
val body = JSON.parseObject(response.body)
val items = body.getJSONObject("result").getJSONArray("regeoItems")
(0 until items.size()).map{ index =>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment