Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
O
operating-vehicle
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
杜发飞
operating-vehicle
Commits
9be954d2
Commit
9be954d2
authored
Oct 30, 2019
by
王建成
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
update
parent
cb8a603c
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
15 additions
and
13 deletions
+15
-13
src/main/scala/com/hikcreate/data/sync/SysncHiveBatch.scala
+14
-12
src/main/scala/com/hikcreate/data/util/Tools.scala
+1
-1
No files found.
src/main/scala/com/hikcreate/data/sync/SysncHiveBatch.scala
View file @
9be954d2
...
@@ -66,6 +66,8 @@ object SysncHiveBatch extends Sparking with Logging {
...
@@ -66,6 +66,8 @@ object SysncHiveBatch extends Sparking with Logging {
}
}
def
processRow3
(
x
:
RDD
[(
TableKey
,
Iterable
[
JSONObject
])],
ssc
:
StreamingContext
)
:
Unit
=
{
def
processRow3
(
x
:
RDD
[(
TableKey
,
Iterable
[
JSONObject
])],
ssc
:
StreamingContext
)
:
Unit
=
{
println
(
"start process data: "
+
DateTime
.
now
())
println
(
"start process data: "
+
DateTime
.
now
())
DbClient
.
init
(
Const
.
hivePoolName
,
Const
.
hiveDriver
,
Const
.
hiveUrl
,
Const
.
hiveUsername
,
Const
.
hivePassword
)
DbClient
.
usingDB
(
Const
.
hivePoolName
)
{
db
=>
x
.
foreachPartition
{
x
=>
x
.
foreachPartition
{
x
=>
x
.
foreach
{
x
.
foreach
{
x
=>try
{
x
=>try
{
...
@@ -75,7 +77,7 @@ object SysncHiveBatch extends Sparking with Logging {
...
@@ -75,7 +77,7 @@ object SysncHiveBatch extends Sparking with Logging {
x
.
_2
.
foreach
{
json
=>
x
.
_2
.
foreach
{
json
=>
jsonArr
.
append
(
json
)
jsonArr
.
append
(
json
)
}
}
writeUnknown
(
Const
.
unKnownTable
,
jsonArr
)
writeUnknown
(
db
.
conn
,
Const
.
unKnownTable
,
jsonArr
)
}
else
if
(
tableKey
.
msgId
==
null
){
//基础信息
}
else
if
(
tableKey
.
msgId
==
null
){
//基础信息
//x._2.foreach{json=>
//x._2.foreach{json=>
// writeBaseInfoHive(db.conn,Const.tableMap(tableKey),json)
// writeBaseInfoHive(db.conn,Const.tableMap(tableKey),json)
...
@@ -154,7 +156,7 @@ object SysncHiveBatch extends Sparking with Logging {
...
@@ -154,7 +156,7 @@ object SysncHiveBatch extends Sparking with Logging {
//writeHiveBatch(db.conn,Const.tableMap(tableKey),jsonArr,"warnTime")
//writeHiveBatch(db.conn,Const.tableMap(tableKey),jsonArr,"warnTime")
}
}
if
(
useLess
.
size
>
0
&&
useLess
!=
null
){
if
(
useLess
.
size
>
0
&&
useLess
!=
null
){
writeUnknown
(
Const
.
unKnownTable
,
useLess
)
writeUnknown
(
db
.
conn
,
Const
.
unKnownTable
,
useLess
)
}
}
}
else
{
//除了以上几种情况外的消息
}
else
{
//除了以上几种情况外的消息
var
jsonArr
=
new
ArrayBuffer
[
JSONObject
]()
var
jsonArr
=
new
ArrayBuffer
[
JSONObject
]()
...
@@ -174,6 +176,9 @@ object SysncHiveBatch extends Sparking with Logging {
...
@@ -174,6 +176,9 @@ object SysncHiveBatch extends Sparking with Logging {
}
}
}
}
}
def
processRow2
(
x
:
Iterator
[(
TableKey
,
Iterable
[
JSONObject
])])
:
Unit
=
{
def
processRow2
(
x
:
Iterator
[(
TableKey
,
Iterable
[
JSONObject
])])
:
Unit
=
{
println
(
"start process data: "
+
DateTime
.
now
())
println
(
"start process data: "
+
DateTime
.
now
())
DbClient
.
init
(
Const
.
hivePoolName
,
Const
.
hiveDriver
,
Const
.
hiveUrl
,
Const
.
hiveUsername
,
Const
.
hivePassword
)
DbClient
.
init
(
Const
.
hivePoolName
,
Const
.
hiveDriver
,
Const
.
hiveUrl
,
Const
.
hiveUsername
,
Const
.
hivePassword
)
...
@@ -333,19 +338,16 @@ object SysncHiveBatch extends Sparking with Logging {
...
@@ -333,19 +338,16 @@ object SysncHiveBatch extends Sparking with Logging {
stmt
.
close
()
stmt
.
close
()
}
}
}
}
def
writeUnknown
(
tableName
:
String
,
jsonArr
:
ArrayBuffer
[
JSONObject
])
:
Unit
=
{
def
writeUnknown
(
conn
:
Connection
,
tableName
:
String
,
jsonArr
:
ArrayBuffer
[
JSONObject
])
:
Unit
=
{
val
dateTime
=
new
DateTime
().
toString
(
"yyyy-MM-dd HH:mm:ss"
)
val
dateTime
=
new
DateTime
().
toString
(
"yyyy-MM-dd HH:mm:ss"
)
val
day
=
new
DateTime
().
toString
(
"yyyy-MM-dd"
)
val
day
=
new
DateTime
().
toString
(
"yyyy-MM-dd"
)
val
results
=
new
StringBuilder
()
val
results
=
new
StringBuilder
()
val
descTable
=
sparkSesson
.
sql
(
s
"desc table ods.$tableName"
).
registerTempTable
(
"descTable"
)
val
sql
=
val
rdd
=
sparkSesson
.
sql
(
"select col_name from descTable"
).
rdd
"""
val
strings
=
new
ArrayBuffer
[
String
]()
|select * from
val
broadcast
=
sc
.
broadcast
(
strings
)
"""
.
stripMargin
rdd
.
coalesce
(
1
,
false
).
foreach
{
x
=>
val
=
conn
.
prepareStatement
()
broadcast
.
value
.
append
(
x
.
getString
(
0
))
}
broadcast
.
value
.
trimEnd
(
3
)
broadcast
.
value
.
foreach
(
println
(
_
))
jsonArr
.
foreach
{
json
=>
jsonArr
.
foreach
{
json
=>
val
result
=
dateTime
+
"\t"
+
json
.
toJSONString
+
"\t"
+
day
+
"\n"
val
result
=
dateTime
+
"\t"
+
json
.
toJSONString
+
"\t"
+
day
+
"\n"
results
.
append
(
result
)
results
.
append
(
result
)
...
...
src/main/scala/com/hikcreate/data/util/Tools.scala
View file @
9be954d2
...
@@ -74,7 +74,7 @@ object Tools extends Logging{
...
@@ -74,7 +74,7 @@ object Tools extends Logging{
arr
.
add
(
lonAndLat
)
arr
.
add
(
lonAndLat
)
}
}
json
.
put
(
"locations"
,
arr
)
json
.
put
(
"locations"
,
arr
)
//
val startTime = DateTime.now()
val
startTime
=
DateTime
.
now
()
val
response
=
Http
(
Const
.
areaCodeAndAddressUrl
).
postData
(
json
.
toJSONString
).
header
(
"content-type"
,
"application/json"
).
asString
val
response
=
Http
(
Const
.
areaCodeAndAddressUrl
).
postData
(
json
.
toJSONString
).
header
(
"content-type"
,
"application/json"
).
asString
val
endTime
=
DateTime
.
now
()
val
endTime
=
DateTime
.
now
()
println
(
"经纬度列表size:"
+
buffer
.
size
+
"===》http response time :"
+
new
Duration
(
startTime
,
endTime
).
getMillis
)
println
(
"经纬度列表size:"
+
buffer
.
size
+
"===》http response time :"
+
new
Duration
(
startTime
,
endTime
).
getMillis
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment