Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
O
operating-vehicle
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
杜发飞
operating-vehicle
Commits
f86c96dc
Commit
f86c96dc
authored
Oct 30, 2019
by
王建成
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
update
parent
26a16293
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
8 changed files
with
312 additions
and
32 deletions
+312
-32
pom.xml
+1
-1
src/main/resources/conf.properties
+1
-1
src/main/scala/com/hikcreate/data/common/Sparking.scala
+2
-2
src/main/scala/com/hikcreate/data/constant/Const.scala
+1
-1
src/main/scala/com/hikcreate/data/sync/SyncHive.scala
+140
-26
src/main/scala/com/hikcreate/data/sync/SysncHiveBatch.scala
+0
-0
src/main/scala/com/hikcreate/data/util/HDFSHelper.scala
+166
-0
src/main/scala/com/hikcreate/data/util/Tools.scala
+1
-1
No files found.
pom.xml
View file @
f86c96dc
...
@@ -6,7 +6,7 @@
...
@@ -6,7 +6,7 @@
<groupId>
groupId
</groupId>
<groupId>
groupId
</groupId>
<artifactId>
operating-vehicle
</artifactId>
<artifactId>
operating-vehicle
</artifactId>
<version>
1.
0
-SNAPSHOT
</version>
<version>
1.
2
-SNAPSHOT
</version>
<repositories>
<repositories>
<repository>
<repository>
...
...
src/main/resources/conf.properties
View file @
f86c96dc
kafka.bootstrap.servers
=
39.100.49.76:9092
kafka.bootstrap.servers
=
39.100.49.76:9092
#kafka.zookerper.servers=172.26.111.183:2181,172.26.111.178:2181,172.26.111.186:2181/tbd_kafka
#kafka.zookerper.servers=172.26.111.183:2181,172.26.111.178:2181,172.26.111.186:2181/tbd_kafka
kafka.zookerper.servers
=
10.197.236.211:2181
kafka.zookerper.servers
=
10.197.236.211:2181
window.time
=
5
application.kafka.topic
=
tbd-transport-data-gathering
application.kafka.topic
=
tbd-transport-data-gathering
basicsInfo.kafka.topic
=
transport_basedata_operation
basicsInfo.kafka.topic
=
transport_basedata_operation
hive.group.id
=
hive
hive.group.id
=
hive
...
...
src/main/scala/com/hikcreate/data/common/Sparking.scala
View file @
f86c96dc
...
@@ -15,8 +15,8 @@ trait Sparking {
...
@@ -15,8 +15,8 @@ trait Sparking {
.
set
(
"spark.serializer"
,
"org.apache.spark.serializer.KryoSerializer"
)
.
set
(
"spark.serializer"
,
"org.apache.spark.serializer.KryoSerializer"
)
.
set
(
"hive.exec.dynamici.partition"
,
"true"
)
.
set
(
"hive.exec.dynamici.partition"
,
"true"
)
.
set
(
"hive.exec.dynamic.partition.mode"
,
"nonstrict"
)
.
set
(
"hive.exec.dynamic.partition.mode"
,
"nonstrict"
)
//.setAppName("test
")
.
setAppName
(
"syshive_local
"
)
//
.setMaster("local[*]")
.
setMaster
(
"local[*]"
)
def
getKafkaParams
(
servers
:
String
,
groupId
:
String
)
:
Map
[
String
,
Object
]
=
{
def
getKafkaParams
(
servers
:
String
,
groupId
:
String
)
:
Map
[
String
,
Object
]
=
{
Map
[
String
,
Object
](
Map
[
String
,
Object
](
...
...
src/main/scala/com/hikcreate/data/constant/Const.scala
View file @
f86c96dc
...
@@ -6,7 +6,7 @@ import com.hikcreate.data.util.Config
...
@@ -6,7 +6,7 @@ import com.hikcreate.data.util.Config
object
Const
{
object
Const
{
Config
.
load
(
"conf.properties"
)
Config
.
load
(
"conf.properties"
)
val
windowTime
:
Int
=
Config
.
getInt
(
"window.time"
)
val
bootstrap
:
String
=
Config
.
getString
(
"kafka.bootstrap.servers"
)
val
bootstrap
:
String
=
Config
.
getString
(
"kafka.bootstrap.servers"
)
val
zkKafka
:
String
=
Config
.
getString
(
"kafka.zookerper.servers"
)
val
zkKafka
:
String
=
Config
.
getString
(
"kafka.zookerper.servers"
)
...
...
src/main/scala/com/hikcreate/data/sync/SyncHive.scala
View file @
f86c96dc
This diff is collapsed.
Click to expand it.
src/main/scala/com/hikcreate/data/sync/SysncHiveBatch.scala
0 → 100644
View file @
f86c96dc
This diff is collapsed.
Click to expand it.
src/main/scala/com/hikcreate/data/util/HDFSHelper.scala
0 → 100644
View file @
f86c96dc
package
com.hikcreate.data.util
import
java.io.
{
FileSystem
=>
_
,
_
}
import
org.apache.hadoop.fs._
import
scala.collection.mutable.ListBuffer
object
HDFSHelper
{
def
isDir
(
hdfs
:
FileSystem
,
name
:
String
)
:
Boolean
=
{
hdfs
.
isDirectory
(
new
Path
(
name
))
}
def
isDir
(
hdfs
:
FileSystem
,
name
:
Path
)
:
Boolean
=
{
hdfs
.
isDirectory
(
name
)
}
def
isFile
(
hdfs
:
FileSystem
,
name
:
String
)
:
Boolean
=
{
hdfs
.
isFile
(
new
Path
(
name
))
}
def
isFile
(
hdfs
:
FileSystem
,
name
:
Path
)
:
Boolean
=
{
hdfs
.
isFile
(
name
)
}
def
createFile
(
hdfs
:
FileSystem
,
name
:
String
)
:
Boolean
=
{
hdfs
.
createNewFile
(
new
Path
(
name
))
}
def
createFile
(
hdfs
:
FileSystem
,
name
:
Path
)
:
Boolean
=
{
hdfs
.
createNewFile
(
name
)
}
def
createFolder
(
hdfs
:
FileSystem
,
name
:
String
)
:
Boolean
=
{
hdfs
.
mkdirs
(
new
Path
(
name
))
}
def
createFolder
(
hdfs
:
FileSystem
,
name
:
Path
)
:
Boolean
=
{
hdfs
.
mkdirs
(
name
)
}
def
exists
(
hdfs
:
FileSystem
,
name
:
String
)
:
Boolean
=
{
hdfs
.
exists
(
new
Path
(
name
))
}
def
exists
(
hdfs
:
FileSystem
,
name
:
Path
)
:
Boolean
=
{
hdfs
.
exists
(
name
)
}
def
transport
(
inputStream
:
InputStream
,
outputStream
:
OutputStream
)
:
Unit
={
val
buffer
=
new
Array
[
Byte
](
64
*
1000
)
var
len
=
inputStream
.
read
(
buffer
)
while
(
len
!=
-
1
)
{
outputStream
.
write
(
buffer
,
0
,
len
-
1
)
len
=
inputStream
.
read
(
buffer
)
}
outputStream
.
flush
()
inputStream
.
close
()
outputStream
.
close
()
}
class
MyPathFilter
extends
PathFilter
{
override
def
accept
(
path
:
Path
)
:
Boolean
=
true
}
/**
* create a target file and provide parent folder if necessary
*/
def
createLocalFile
(
fullName
:
String
)
:
File
=
{
val
target
:
File
=
new
File
(
fullName
)
if
(!
target
.
exists
){
val
index
=
fullName
.
lastIndexOf
(
File
.
separator
)
val
parentFullName
=
fullName
.
substring
(
0
,
index
)
val
parent
:
File
=
new
File
(
parentFullName
)
if
(!
parent
.
exists
)
parent
.
mkdirs
else
if
(!
parent
.
isDirectory
)
parent
.
mkdir
target
.
createNewFile
}
target
}
/**
* delete file in hdfs
* @return true: success, false: failed
*/
def
deleteFile
(
hdfs
:
FileSystem
,
path
:
String
)
:
Boolean
=
{
if
(
isDir
(
hdfs
,
path
))
hdfs
.
delete
(
new
Path
(
path
),
true
)
//true: delete files recursively
else
hdfs
.
delete
(
new
Path
(
path
),
false
)
}
/**
* get all file children's full name of a hdfs dir, not include dir children
* @param fullName the hdfs dir's full name
*/
def
listChildren
(
hdfs
:
FileSystem
,
fullName
:
String
,
holder
:
ListBuffer
[
String
])
:
ListBuffer
[
String
]
=
{
val
filesStatus
=
hdfs
.
listStatus
(
new
Path
(
fullName
),
new
MyPathFilter
)
for
(
status
<-
filesStatus
){
val
filePath
:
Path
=
status
.
getPath
if
(
isFile
(
hdfs
,
filePath
))
holder
+=
filePath
.
toString
else
listChildren
(
hdfs
,
filePath
.
toString
,
holder
)
}
holder
}
def
copyFile
(
hdfs
:
FileSystem
,
source
:
String
,
target
:
String
)
:
Unit
=
{
val
sourcePath
=
new
Path
(
source
)
val
targetPath
=
new
Path
(
target
)
if
(!
exists
(
hdfs
,
targetPath
))
createFile
(
hdfs
,
targetPath
)
val
inputStream
:
FSDataInputStream
=
hdfs
.
open
(
sourcePath
)
val
outputStream
:
FSDataOutputStream
=
hdfs
.
create
(
targetPath
)
transport
(
inputStream
,
outputStream
)
}
def
copyFolder
(
hdfs
:
FileSystem
,
sourceFolder
:
String
,
targetFolder
:
String
)
:
Unit
=
{
val
holder
:
ListBuffer
[
String
]
=
new
ListBuffer
[
String
]
val
children
:
List
[
String
]
=
listChildren
(
hdfs
,
sourceFolder
,
holder
).
toList
for
(
child
<-
children
)
copyFile
(
hdfs
,
child
,
child
.
replaceFirst
(
sourceFolder
,
targetFolder
))
}
def
copyFileFromLocal
(
hdfs
:
FileSystem
,
localSource
:
String
,
hdfsTarget
:
String
)
:
Unit
=
{
val
targetPath
=
new
Path
(
hdfsTarget
)
if
(!
exists
(
hdfs
,
targetPath
))
createFile
(
hdfs
,
targetPath
)
val
inputStream
:
FileInputStream
=
new
FileInputStream
(
localSource
)
val
outputStream
:
FSDataOutputStream
=
hdfs
.
create
(
targetPath
)
transport
(
inputStream
,
outputStream
)
}
def
copyFileToLocal
(
hdfs
:
FileSystem
,
hdfsSource
:
String
,
localTarget
:
String
)
:
Unit
=
{
val
localFile
:
File
=
createLocalFile
(
localTarget
)
val
inputStream
:
FSDataInputStream
=
hdfs
.
open
(
new
Path
(
hdfsSource
))
val
outputStream
:
FileOutputStream
=
new
FileOutputStream
(
localFile
)
transport
(
inputStream
,
outputStream
)
}
def
copyFolderFromLocal
(
hdfs
:
FileSystem
,
localSource
:
String
,
hdfsTarget
:
String
)
:
Unit
=
{
val
localFolder
:
File
=
new
File
(
localSource
)
val
allChildren
:
Array
[
File
]
=
localFolder
.
listFiles
for
(
child
<-
allChildren
){
val
fullName
=
child
.
getAbsolutePath
val
nameExcludeSource
:
String
=
fullName
.
substring
(
localSource
.
length
)
val
targetFileFullName
:
String
=
hdfsTarget
+
Path
.
SEPARATOR
+
nameExcludeSource
if
(
child
.
isFile
)
copyFileFromLocal
(
hdfs
,
fullName
,
targetFileFullName
)
else
copyFolderFromLocal
(
hdfs
,
fullName
,
targetFileFullName
)
}
}
def
copyFolderToLocal
(
hdfs
:
FileSystem
,
hdfsSource
:
String
,
localTarget
:
String
)
:
Unit
=
{
val
holder
:
ListBuffer
[
String
]
=
new
ListBuffer
[
String
]
val
children
:
List
[
String
]
=
listChildren
(
hdfs
,
hdfsSource
,
holder
).
toList
val
hdfsSourceFullName
=
hdfs
.
getFileStatus
(
new
Path
(
hdfsSource
)).
getPath
.
toString
val
index
=
hdfsSourceFullName
.
length
for
(
child
<-
children
){
val
nameExcludeSource
:
String
=
child
.
substring
(
index
+
1
)
val
targetFileFullName
:
String
=
localTarget
+
File
.
separator
+
nameExcludeSource
copyFileToLocal
(
hdfs
,
child
,
targetFileFullName
)
}
}
}
src/main/scala/com/hikcreate/data/util/Tools.scala
View file @
f86c96dc
...
@@ -76,7 +76,7 @@ object Tools extends Logging{
...
@@ -76,7 +76,7 @@ object Tools extends Logging{
val
startTime
=
DateTime
.
now
()
val
startTime
=
DateTime
.
now
()
val
response
=
Http
(
Const
.
areaCodeAndAddressUrl
).
postData
(
json
.
toJSONString
).
header
(
"content-type"
,
"application/json"
).
asString
val
response
=
Http
(
Const
.
areaCodeAndAddressUrl
).
postData
(
json
.
toJSONString
).
header
(
"content-type"
,
"application/json"
).
asString
val
endTime
=
DateTime
.
now
()
val
endTime
=
DateTime
.
now
()
println
(
"
http请求时间
:"
+
new
Duration
(
startTime
,
endTime
).
getMillis
)
println
(
"
经纬度列表size:"
+
buffer
.
size
+
"===》http response time
:"
+
new
Duration
(
startTime
,
endTime
).
getMillis
)
val
body
=
JSON
.
parseObject
(
response
.
body
)
val
body
=
JSON
.
parseObject
(
response
.
body
)
val
items
=
body
.
getJSONObject
(
"result"
).
getJSONArray
(
"regeoItems"
)
val
items
=
body
.
getJSONObject
(
"result"
).
getJSONArray
(
"regeoItems"
)
(
0
until
items
.
size
()).
map
{
index
=>
(
0
until
items
.
size
()).
map
{
index
=>
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment