Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
O
operating-vehicle
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
杜发飞
operating-vehicle
Commits
c1df6dfe
Commit
c1df6dfe
authored
Nov 04, 2019
by
杜发飞
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
1
parent
ef83fb80
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
39 additions
and
4 deletions
+39
-4
src/main/scala/com/hikcreate/data/common/Sparking.scala
+2
-0
src/main/scala/com/hikcreate/data/listener/BatchProcessListener.scala
+6
-3
src/main/scala/com/hikcreate/data/listener/LifecycleListener.scala
+27
-0
src/main/scala/com/hikcreate/data/offline/FullSync.scala
+4
-1
No files found.
src/main/scala/com/hikcreate/data/common/Sparking.scala
View file @
c1df6dfe
package
com.hikcreate.data.common
import
com.hikcreate.data.listener.LifecycleListener
import
org.apache.log4j.
{
Level
,
Logger
}
import
org.apache.kafka.common.serialization.StringDeserializer
import
org.apache.spark.SparkConf
...
...
@@ -13,6 +14,7 @@ trait Sparking {
val
conf
:
SparkConf
=
new
SparkConf
()
.
set
(
"spark.serializer"
,
"org.apache.spark.serializer.KryoSerializer"
)
.
set
(
"spark.extraListeners"
,
classOf
[
LifecycleListener
].
getName
)
.
set
(
"hive.exec.dynamici.partition"
,
"true"
)
.
set
(
"hive.exec.dynamic.partition.mode"
,
"nonstrict"
)
.
setAppName
(
"test"
)
...
...
src/main/scala/com/hikcreate/data/
util/App
Listener.scala
→
src/main/scala/com/hikcreate/data/
listener/BatchProcess
Listener.scala
View file @
c1df6dfe
package
com.hikcreate.data.
util
package
com.hikcreate.data.
listener
import
com.hikcreate.data.common.Logging
import
org.apache.spark.streaming.StreamingContext
import
org.apache.spark.streaming.scheduler.
{
StreamingListener
,
StreamingListenerBatchStarted
}
class
AppListener
(
ssc
:
StreamingContext
)
extends
StreamingListener
with
Logging
{
/**
* 监控批处理时间
*/
class
BatchProcessListener
(
ssc
:
StreamingContext
)
extends
StreamingListener
with
Logging
{
val
DELAY_MAX
=
20
override
def
onBatchStarted
(
batchStarted
:
StreamingListenerBatchStarted
)
:
Unit
=
{
//调度延迟,单位:毫秒
val
Delay_ts
=
batchStarted
.
batchInfo
.
schedulingDelay
.
get
/*if(Delay_ts > DELAY_MAX ){
sendEmail(...)
...
...
src/main/scala/com/hikcreate/data/listener/LifecycleListener.scala
0 → 100644
View file @
c1df6dfe
package
com.hikcreate.data.listener
import
com.hikcreate.data.common.Logging
import
org.apache.spark.SparkConf
import
org.apache.spark.scheduler.
{
SparkListener
,
SparkListenerApplicationEnd
,
SparkListenerApplicationStart
}
import
org.joda.time.DateTime
import
scala.collection.mutable.ArrayBuffer
/**
* 监控spark程序的启动与停止
*/
class
LifecycleListener
(
conf
:
SparkConf
)
extends
SparkListener
with
Logging
{
val
msg
=
new
ArrayBuffer
[
String
]()
override
def
onApplicationStart
(
applicationStart
:
SparkListenerApplicationStart
)
:
Unit
=
{
msg
.
append
(
"应用程序ID:"
+
applicationStart
.
appId
.
getOrElse
(
""
))
msg
.
append
(
"应用程序名称:"
+
applicationStart
.
appName
)
msg
.
append
(
"应用程序开始时间:"
+
new
DateTime
(
applicationStart
.
time
).
toString
(
"yyyy-MM-dd HH:mm:ss"
))
println
(
msg
.
toString
())
}
override
def
onApplicationEnd
(
applicationEnd
:
SparkListenerApplicationEnd
)
:
Unit
=
{
msg
.
append
(
"应用程序结束时间:"
+
new
DateTime
(
applicationEnd
.
time
).
toString
(
"yyyy-MM-dd HH:mm:ss"
))
}
}
src/main/scala/com/hikcreate/data/offline/FullSync.scala
View file @
c1df6dfe
...
...
@@ -10,7 +10,10 @@ import scala.collection.JavaConverters._
object
FullSync
extends
Sparking
{
def
main
(
args
:
Array
[
String
])
:
Unit
=
{
//IgniteClient.ignite.cacheNames().asScala.foreach(x=>IgniteClient.ignite.destroyCache(x))
/*IgniteClient.basicEnterpriseInfo.destroy()
IgniteClient.basicVehicleInfo.destroy()
IgniteClient.basicAlarmTypeInfo.destroy()*/
val
sparkSession
=
SparkSession
.
builder
().
config
(
conf
).
getOrCreate
()
//基本企业信息表
sparkSession
.
sqlContext
.
read
.
format
(
"jdbc"
).
options
(
Map
(
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment