Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
F
ftp_pic
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
李辅翼
ftp_pic
Commits
0e2e5b41
Commit
0e2e5b41
authored
Aug 21, 2019
by
李辅翼
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
v14
parent
4a56114c
Hide whitespace changes
Inline
Side-by-side
Showing
12 changed files
with
18 additions
and
1144 deletions
+18
-1144
pom.xml
+0
-43
src/main/java/com/hikcreate/controller/PicController.java
+1
-22
src/main/java/com/hikcreate/drv_photo_pic/VioPic.java
+0
-9
src/main/java/com/hikcreate/drv_photo_pic/impl/DrvPhotoImpl.java
+13
-18
src/main/java/com/hikcreate/drv_photo_pic/impl/VehicleImpl.java
+0
-1
src/main/java/com/hikcreate/drv_photo_pic/impl/VioPicImpl.java
+0
-316
src/main/java/com/hikcreate/schedul/PicSchedule.java
+0
-11
src/main/java/com/hikcreate/service/fdfs/service/impl/FileServiceImpl.java
+4
-3
src/main/java/com/hikcreate/utils/redis/RedisBuilder.java
+0
-32
src/main/java/com/hikcreate/utils/redis/RedisClient.java
+0
-539
src/main/java/com/hikcreate/utils/redis/RedisClientUtil.java
+0
-32
src/main/java/com/hikcreate/utils/redis/RedisUtils.java
+0
-118
No files found.
pom.xml
View file @
0e2e5b41
...
...
@@ -59,13 +59,6 @@
<scope>
test
</scope>
</dependency>
<!--<dependency>-->
<!--<groupId>oracle</groupId>-->
<!--<artifactId>ojdbc6</artifactId>-->
<!--<version>11.2.0.3</version>-->
<!--<scope>system</scope>-->
<!--<systemPath>${project.basedir}/src/main/resources/lib/ojdbc6-11.2.0.3.jar</systemPath>-->
<!--</dependency>-->
<dependency>
<groupId>
com.oracle
</groupId>
...
...
@@ -74,29 +67,6 @@
</dependency>
<dependency>
<groupId>
org.apache.hive
</groupId>
<artifactId>
hive-jdbc
</artifactId>
<version>
1.1.0-cdh5.15.1
</version>
<exclusions>
<exclusion>
<artifactId>
slf4j-log4j12
</artifactId>
<groupId>
org.slf4j
</groupId>
</exclusion>
<exclusion>
<groupId>
javax.servlet
</groupId>
<artifactId>
servlet-api
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.geronimo.specs
</groupId>
<artifactId>
geronimo-annotation_1.0_spec
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.geronimo.specs
</groupId>
<artifactId>
geronimo-jaspic_1.0_spec
</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>
org.springframework.boot
</groupId>
<artifactId>
spring-boot-starter-web
</artifactId>
</dependency>
...
...
@@ -109,19 +79,6 @@
<dependency>
<groupId>
redis.clients
</groupId>
<artifactId>
jedis
</artifactId>
<version>
2.9.0
</version>
<exclusions>
<exclusion>
<artifactId>
log4j-over-slf4j
</artifactId>
<groupId>
org.slf4j
</groupId>
</exclusion>
</exclusions>
<type>
jar
</type>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
com.alibaba
</groupId>
<artifactId>
fastjson
</artifactId>
<version>
1.2.51
</version>
...
...
src/main/java/com/hikcreate/controller/PicController.java
View file @
0e2e5b41
...
...
@@ -3,22 +3,18 @@ package com.hikcreate.controller;
import
com.alibaba.fastjson.JSONObject
;
import
com.hikcreate.drv_photo_pic.DrvPhoto
;
import
com.hikcreate.drv_photo_pic.Vehicle
;
import
com.hikcreate.drv_photo_pic.VioPic
;
import
com.hikcreate.entity.PicByte
;
import
com.hikcreate.service.fdfs.service.FileService
;
import
com.hikcreate.utils.DateUtil
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.beans.factory.annotation.Value
;
import
org.springframework.util.LinkedMultiValueMap
;
import
org.springframework.util.MultiValueMap
;
import
org.springframework.validation.annotation.Validated
;
import
org.springframework.web.bind.annotation.*
;
import
org.springframework.web.client.RestTemplate
;
import
javax.imageio.stream.FileImageOutputStream
;
import
javax.servlet.http.HttpServletResponse
;
import
java.io.*
;
import
java.util.Date
;
import
java.util.Map
;
import
org.springframework.http.*
;
...
...
@@ -33,8 +29,6 @@ import org.springframework.http.*;
@RequestMapping
(
"/pic"
)
public
class
PicController
{
@Autowired
private
VioPic
vioPic
;
@Autowired
private
DrvPhoto
drvPhoto
;
...
...
@@ -114,10 +108,7 @@ public class PicController {
}
@GetMapping
(
"/testIncVioPic"
)
public
void
testIncVioPic
(
@RequestParam
(
"start"
)
String
start
,
@RequestParam
(
"end"
)
String
end
)
{
vioPic
.
getIncrementVioPic
(
start
,
end
);
}
...
...
@@ -197,18 +188,6 @@ public class PicController {
}
}
@GetMapping
(
"/testIncVio"
)
public
void
testFastDfs
(
@RequestParam
(
"past"
)
int
past
)
{
String
date
=
DateUtil
.
getDate
();
vioPic
.
getIncrementVioPic
(
DateUtil
.
formatDate
(
DateUtil
.
getPastDate
(
new
Date
(),-
past
)),
date
);
}
@GetMapping
(
"/delAllVioPic"
)
public
void
delAllVioPic
(
@RequestParam
(
"past"
)
int
past
){
vioPic
.
delAllVioPic
();
}
}
src/main/java/com/hikcreate/drv_photo_pic/VioPic.java
deleted
100644 → 0
View file @
4a56114c
package
com
.
hikcreate
.
drv_photo_pic
;
public
interface
VioPic
{
void
getIncrementVioPic
(
String
startDay
,
String
endDay
);
void
delAllVioPic
();
}
src/main/java/com/hikcreate/drv_photo_pic/impl/DrvPhotoImpl.java
View file @
0e2e5b41
...
...
@@ -6,11 +6,9 @@ import com.hikcreate.drv_photo_pic.DrvPhoto;
import
com.hikcreate.entity.PicResult
;
import
com.hikcreate.service.fdfs.service.FileService
;
import
com.hikcreate.utils.DateUtil
;
import
com.hikcreate.utils.redis.RedisClientUtil
;
import
oracle.sql.BLOB
;
import
org.apache.hadoop.conf.Configuration
;
import
org.apache.hadoop.hbase.HBaseConfiguration
;
import
org.apache.hadoop.hbase.KeyValue
;
import
org.apache.hadoop.hbase.TableName
;
import
org.apache.hadoop.hbase.client.*
;
import
org.slf4j.Logger
;
...
...
@@ -33,8 +31,6 @@ import java.util.zip.CRC32;
@Service
(
"drvPhotoImpl"
)
public
class
DrvPhotoImpl
implements
DrvPhoto
{
@Autowired
private
RedisClientUtil
redisClientUtil
;
private
static
Logger
logger
=
LoggerFactory
.
getLogger
(
DrvPhotoImpl
.
class
);
@Value
(
"${url}"
)
...
...
@@ -89,7 +85,7 @@ public class DrvPhotoImpl implements DrvPhoto {
try
{
hbaseConn
=
ConnectionFactory
.
createConnection
(
hbaseConf
);
endTime
=
time
;
startTime
=
SqlHelp
.
getStartTime
(
"select min(GXSJ) mtime from GYJG.DRV_PHOTO"
,
url
,
username
,
password
);
startTime
=
SqlHelp
.
getStartTime
(
"select min(GXSJ) mtime from GYJG.DRV_PHOTO"
,
url
,
username
,
password
);
//循环获取图片
String
sql
;
String
lastTime
;
...
...
@@ -214,23 +210,23 @@ public class DrvPhotoImpl implements DrvPhoto {
//判断图片是否已经存入fastdfs
colValue
=
new
String
[
2
];
String
rowkey
=
sfzmhm
;
PicResult
picResult
=
SqlHelp
.
getFlag
(
rowkey
,
driverPhototable
,
colValue
,
gxsj
);
if
(
picResult
.
getI
()
!=
0
)
{
if
(
picResult
.
getI
()==
2
)
{
PicResult
picResult
=
SqlHelp
.
getFlag
(
rowkey
,
driverPhototable
,
colValue
,
gxsj
);
if
(
picResult
.
getI
()
!=
0
)
{
if
(
picResult
.
getI
()
==
2
)
{
fileService
.
deleteFile
(
picResult
.
getUrl
());
}
Put
put
=
new
Put
(
rowkey
.
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"sfzmhm"
.
getBytes
(),
sfzmhm
.
getBytes
());
byte
[]
bytes1
=
SqlHelp
.
blobToByteZp
(
zp
);
byte
[]
bytes1
=
SqlHelp
.
blobToByteZp
(
zp
);
String
picUrl
=
fileService
.
uploadFile
(
bytes1
,
sfzmhm
+
".jpeg"
);
put
.
addColumn
(
"info"
.
getBytes
(),
"sfzmhm"
.
getBytes
(),
(
sfzmhm
==
null
?
"null"
:
sfzmhm
).
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"sfzmhm"
.
getBytes
(),
(
sfzmhm
==
null
?
"null"
:
sfzmhm
).
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"picUrl"
.
getBytes
(),
picUrl
.
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"gxsj"
.
getBytes
(),
(
gxsj
==
null
?
"null"
:
gxsj
).
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"xzqh"
.
getBytes
(),
(
xzqh
==
null
?
"null"
:
xzqh
).
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"flag"
.
getBytes
(),
(
flag
==
null
?
"null"
:
flag
).
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"xh"
.
getBytes
(),
(
xh
==
null
?
"null"
:
xh
).
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"rksj"
.
getBytes
(),
(
rksj
==
null
?
"null"
:
rksj
).
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"fzjg"
.
getBytes
(),
(
fzjg
==
null
?
"null"
:
fzjg
).
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"gxsj"
.
getBytes
(),
(
gxsj
==
null
?
"null"
:
gxsj
).
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"xzqh"
.
getBytes
(),
(
xzqh
==
null
?
"null"
:
xzqh
).
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"flag"
.
getBytes
(),
(
flag
==
null
?
"null"
:
flag
).
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"xh"
.
getBytes
(),
(
xh
==
null
?
"null"
:
xh
).
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"rksj"
.
getBytes
(),
(
rksj
==
null
?
"null"
:
rksj
).
getBytes
());
put
.
addColumn
(
"info"
.
getBytes
(),
"fzjg"
.
getBytes
(),
(
fzjg
==
null
?
"null"
:
fzjg
).
getBytes
());
driverPhototable
.
put
(
put
);
}
}
...
...
@@ -246,7 +242,7 @@ public class DrvPhotoImpl implements DrvPhoto {
}
}
private
void
closeTable
(){
private
void
closeTable
()
{
if
(
driverPhototable
!=
null
)
{
try
{
driverPhototable
.
close
();
...
...
@@ -275,7 +271,6 @@ public class DrvPhotoImpl implements DrvPhoto {
}
public
static
void
main
(
String
[]
args
)
{
// String url="http://193.5.103.124:8083/5251522281554187435986/2019/06/07/11/zhbrt193313019kk/11495774795494205.jpg?fid=156008-358BE80027-2E8B5C6-B190C";
String
url
=
"ftp://vion6:vion6@52.1.113.218:21/193.2.117.66/kk/2019-07-06/18/2019070618541777800022598.jpg"
;
String
[]
arr
=
url
.
split
(
"/"
);
System
.
out
.
println
(
arr
[
arr
.
length
-
1
].
split
(
"jpg"
)[
0
]
+
"jpg"
);
...
...
src/main/java/com/hikcreate/drv_photo_pic/impl/VehicleImpl.java
View file @
0e2e5b41
...
...
@@ -229,7 +229,6 @@ public class VehicleImpl implements Vehicle {
}
private
String
uploadFile
(
byte
[]
file
,
String
fileName
)
{
String
url
;
try
{
...
...
src/main/java/com/hikcreate/drv_photo_pic/impl/VioPicImpl.java
deleted
100644 → 0
View file @
4a56114c
package
com
.
hikcreate
.
drv_photo_pic
.
impl
;
import
com.hikcreate.drv_photo_pic.VioPic
;
import
com.hikcreate.entity.PicByte
;
import
com.hikcreate.service.fdfs.service.FileService
;
import
org.apache.hadoop.conf.Configuration
;
import
org.apache.hadoop.hbase.Cell
;
import
org.apache.hadoop.hbase.HBaseConfiguration
;
import
org.apache.hadoop.hbase.KeyValue
;
import
org.apache.hadoop.hbase.TableName
;
import
org.apache.hadoop.hbase.client.*
;
import
org.apache.hadoop.hbase.util.Bytes
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.beans.factory.annotation.Value
;
import
org.springframework.http.*
;
import
org.springframework.stereotype.Service
;
import
org.springframework.util.LinkedMultiValueMap
;
import
org.springframework.util.MultiValueMap
;
import
org.springframework.web.client.RestTemplate
;
import
java.io.IOException
;
import
java.sql.*
;
import
java.sql.Connection
;
import
java.util.Map
;
@Service
(
"vioPicImpl"
)
public
class
VioPicImpl
implements
VioPic
{
private
static
Logger
logger
=
LoggerFactory
.
getLogger
(
VioPicImpl
.
class
);
@Value
(
"${hive.url}"
)
private
String
hiveUrl
;
@Value
(
"${hive.user}"
)
private
String
hiveUser
;
@Value
(
"${hive.password}"
)
private
String
hivePassWord
;
@Value
(
"${increment.vio.pic.sql}"
)
private
String
incrementVioPicSql
;
@Value
(
"${hbase.zookeeper.property.clientPort}"
)
private
String
hbaseAddress
;
@Value
(
"${hbase.zookeeper.quorum}"
)
private
String
hbaseZkQuorum
;
@Value
(
"${hbase.master}"
)
private
String
hbaseMaster
;
@Value
(
"${hbase.vio.table}"
)
private
String
vioTableStr
;
@Value
(
"${ftpUrl}"
)
private
String
ftpUrl
;
@Value
(
"${httpUrl}"
)
private
String
httpUrl
;
@Value
(
"${vio.pic.sql.byrow}"
)
private
String
sqlByRow
;
@Autowired
private
FileService
fileService
;
private
String
rowkey
;
private
static
TableName
vioTableName
;
private
static
Table
vioTable
;
private
static
Configuration
hbaseConf
;
private
static
org
.
apache
.
hadoop
.
hbase
.
client
.
Connection
hbaseConn
;
private
static
boolean
isPut
=
false
;
private
static
String
hphm
;
private
static
String
hpzl
;
private
static
String
wfsj
;
private
static
String
wfxw
;
private
static
String
url1
;
private
static
String
url2
;
private
static
String
url3
;
private
static
Put
put
;
private
static
MultiValueMap
<
String
,
String
>
params
;
private
static
HttpEntity
<
MultiValueMap
<
String
,
String
>>
requestEntity
;
private
static
HttpHeaders
headers
;
private
static
HttpMethod
method
;
private
static
RestTemplate
restTemplate
;
private
static
String
[]
colValue
;
/**
* 增量同步违法图片
*/
@Override
public
synchronized
void
getIncrementVioPic
(
String
startDay
,
String
endDay
)
{
try
{
String
JDBC_DRIVER
=
"org.apache.hive.jdbc.HiveDriver"
;
Class
.
forName
(
JDBC_DRIVER
);
Connection
connection
=
DriverManager
.
getConnection
(
hiveUrl
,
hiveUser
,
hivePassWord
);
PreparedStatement
pstm
=
connection
.
prepareStatement
(
incrementVioPicSql
);
pstm
.
setString
(
1
,
endDay
);
pstm
.
setString
(
2
,
startDay
);
ResultSet
resultSet
=
pstm
.
executeQuery
();
hbaseConf
=
HBaseConfiguration
.
create
();
hbaseConf
.
set
(
"hbase.zookeeper.property.clientPort"
,
hbaseAddress
);
hbaseConf
.
set
(
"hbase.zookeeper.quorum"
,
hbaseZkQuorum
);
hbaseConn
=
ConnectionFactory
.
createConnection
(
hbaseConf
);
vioTableName
=
TableName
.
valueOf
(
vioTableStr
);
vioTable
=
hbaseConn
.
getTable
(
vioTableName
);
restTemplate
=
new
RestTemplate
();
headers
=
new
HttpHeaders
();
method
=
HttpMethod
.
POST
;
headers
.
setContentType
(
MediaType
.
APPLICATION_FORM_URLENCODED
);
long
count
=
0L
;
loop1:
while
(
resultSet
.
next
())
{
count
=
count
+
1
;
logger
.
info
(
"数据库查询出东西了------------"
+
count
);
hphm
=
resultSet
.
getString
(
"hphm"
);
hpzl
=
resultSet
.
getString
(
"hpzl"
);
wfsj
=
resultSet
.
getString
(
"wfsj"
);
wfxw
=
resultSet
.
getString
(
"wfxw"
);
url1
=
resultSet
.
getString
(
"url1"
);
url2
=
resultSet
.
getString
(
"url2"
);
url3
=
resultSet
.
getString
(
"url3"
);
rowkey
=
hphm
==
null
?
"null"
:
hphm
+
"#"
+
hpzl
==
null
?
"null"
:
hpzl
+
"#"
+
wfsj
==
null
?
"null"
:
wfsj
.
substring
(
0
,
19
)
+
"#"
+
wfxw
==
null
?
"wfxw"
:
wfxw
;
Get
get
=
new
Get
(
rowkey
.
getBytes
());
put
=
new
Put
(
rowkey
.
getBytes
());
get
.
addColumn
(
Bytes
.
toBytes
(
"info"
),
Bytes
.
toBytes
(
"ftpUrl1"
));
get
.
addColumn
(
Bytes
.
toBytes
(
"info"
),
Bytes
.
toBytes
(
"ftpUrl2"
));
get
.
addColumn
(
Bytes
.
toBytes
(
"info"
),
Bytes
.
toBytes
(
"ftpUrl3"
));
get
.
addColumn
(
Bytes
.
toBytes
(
"info"
),
Bytes
.
toBytes
(
"ftpUrl4"
));
get
.
addColumn
(
Bytes
.
toBytes
(
"info"
),
Bytes
.
toBytes
(
"ftpUrl5"
));
get
.
addColumn
(
Bytes
.
toBytes
(
"info"
),
Bytes
.
toBytes
(
"ftpUrl6"
));
Result
result
=
vioTable
.
get
(
get
);
colValue
=
new
String
[
6
];
for
(
Cell
cell
:
result
.
rawCells
())
{
String
colName
=
Bytes
.
toString
(
cell
.
getQualifierArray
(),
cell
.
getQualifierOffset
(),
cell
.
getQualifierLength
());
String
value
=
Bytes
.
toString
(
cell
.
getValueArray
(),
cell
.
getValueOffset
(),
cell
.
getValueLength
());
//保存值
switch
(
colName
)
{
case
"ftpUrl1"
:
colValue
[
0
]
=
value
;
case
"ftpUrl2"
:
colValue
[
1
]
=
value
;
case
"ftpUrl3"
:
colValue
[
2
]
=
value
;
case
"ftpUrl4"
:
colValue
[
3
]
=
value
;
case
"ftpUrl5"
:
colValue
[
4
]
=
value
;
case
"ftpUrl6"
:
colValue
[
5
]
=
value
;
}
}
// logger.info("查询get结果:" + colValue.toString());
//插入数据操作
//url1
if
(
url1
!=
null
&&
!
""
.
equals
(
url1
))
{
//判断之前是否已经插入过图片了,防止多次插入
if
(
url1
.
equals
(
colValue
[
0
])
||
url1
.
equals
(
colValue
[
3
]))
{
logger
.
info
(
"url1已经插入过了"
);
continue
loop1
;
}
if
(
colValue
[
0
]
==
null
||
""
.
equals
(
colValue
[
0
]))
{
//url1插入ftpurl1
if
(
url1
.
startsWith
(
"ftp"
))
{
//ftp协议
putData
(
"ftpUrl1"
,
"fastDfsUrl1"
,
url1
,
ftpUrl
,
fileService
);
}
else
if
(
url1
.
startsWith
(
"http"
))
{
//http协议
putData
(
"ftpUrl1"
,
"fastDfsUrl1"
,
url1
,
httpUrl
,
fileService
);
}
}
else
{
if
(
colValue
[
3
]
==
null
||
""
.
equals
(
colValue
[
3
]))
{
//url1插入ftpurl4
if
(
url1
.
startsWith
(
"ftp"
))
{
putData
(
"ftpUrl4"
,
"fastDfsUrl4"
,
url1
,
ftpUrl
,
fileService
);
}
else
if
(
url1
.
startsWith
(
"http"
))
{
putData
(
"ftpUrl4"
,
"fastDfsUrl4"
,
url1
,
httpUrl
,
fileService
);
}
}
}
}
if
(
url2
!=
null
&&
!
""
.
equals
(
url2
))
{
if
(
url2
.
equals
(
colValue
[
1
])
||
url2
.
equals
(
colValue
[
4
]))
{
logger
.
info
(
"url2已经插入过了"
);
continue
loop1
;
}
if
(
colValue
[
1
]
==
null
||
""
.
equals
(
colValue
[
1
]))
{
//url1插入ftpurl1
if
(
url2
.
startsWith
(
"ftp"
))
{
putData
(
"ftpUrl2"
,
"fastDfsUrl2"
,
url2
,
ftpUrl
,
fileService
);
}
else
if
(
url2
.
startsWith
(
"http"
))
{
putData
(
"ftpUrl2"
,
"fastDfsUrl2"
,
url2
,
httpUrl
,
fileService
);
}
}
else
{
if
(
colValue
[
4
]
==
null
||
""
.
equals
(
colValue
[
4
]))
{
//url1插入ftpurl4
if
(
url2
.
startsWith
(
"ftp"
))
{
putData
(
"ftpUrl5"
,
"fastDfsUrl5"
,
url2
,
ftpUrl
,
fileService
);
}
else
if
(
url2
.
startsWith
(
"http"
))
{
putData
(
"ftpUrl5"
,
"fastDfsUrl5"
,
url2
,
httpUrl
,
fileService
);
}
}
}
}
if
(
url3
!=
null
&&
!
""
.
equals
(
url3
))
{
if
(
url3
.
equals
(
colValue
[
2
])
||
url3
.
equals
(
colValue
[
5
]))
{
logger
.
info
(
"url3已经插入过了"
);
continue
loop1
;
}
if
(
colValue
[
2
]
==
null
||
""
.
equals
(
colValue
[
2
]))
{
//url1插入ftpurl1
if
(
url3
.
startsWith
(
"ftp"
))
{
putData
(
"ftpUrl3"
,
"fastDfsUrl3"
,
url3
,
ftpUrl
,
fileService
);
}
else
if
(
url3
.
startsWith
(
"http"
))
{
putData
(
"ftpUrl3"
,
"fastDfsUrl3"
,
url3
,
httpUrl
,
fileService
);
}
}
else
{
if
(
colValue
[
5
]
==
null
||
""
.
equals
(
colValue
[
5
]))
{
//url1插入ftpurl4
if
(
url3
.
startsWith
(
"ftp"
))
{
putData
(
"ftpUrl6"
,
"fastDfsUrl6"
,
url3
,
ftpUrl
,
fileService
);
}
else
if
(
url3
.
startsWith
(
"http"
))
{
putData
(
"ftpUrl6"
,
"fastDfsUrl6"
,
url3
,
httpUrl
,
fileService
);
}
}
}
}
if
(
isPut
)
{
vioTable
.
put
(
put
);
isPut
=
false
;
}
}
resultSet
.
close
();
pstm
.
close
();
connection
.
close
();
logger
.
info
(
"执行完毕:---------------------"
);
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
}
finally
{
if
(
vioTable
!=
null
)
{
try
{
vioTable
.
close
();
}
catch
(
IOException
e
)
{
e
.
printStackTrace
();
}
}
if
(
hbaseConn
!=
null
)
{
try
{
hbaseConn
.
close
();
}
catch
(
IOException
e
)
{
e
.
printStackTrace
();
}
}
}
}
/**
* 删除所有图片
*/
@Override
public
void
delAllVioPic
()
{
hbaseConf
=
HBaseConfiguration
.
create
();
hbaseConf
.
set
(
"hbase.zookeeper.property.clientPort"
,
hbaseAddress
);
hbaseConf
.
set
(
"hbase.zookeeper.quorum"
,
hbaseZkQuorum
);
try
{
hbaseConn
=
ConnectionFactory
.
createConnection
(
hbaseConf
);
vioTableName
=
TableName
.
valueOf
(
vioTableStr
);
vioTable
=
hbaseConn
.
getTable
(
vioTableName
);
Scan
scan
=
new
Scan
();
scan
.
addColumn
(
"info"
.
getBytes
(),
"fastDfsUrl1"
.
getBytes
());
scan
.
addColumn
(
"info"
.
getBytes
(),
"fastDfsUrl2"
.
getBytes
());
scan
.
addColumn
(
"info"
.
getBytes
(),
"fastDfsUrl3"
.
getBytes
());
scan
.
addColumn
(
"info"
.
getBytes
(),
"fastDfsUrl4"
.
getBytes
());
scan
.
addColumn
(
"info"
.
getBytes
(),
"fastDfsUrl5"
.
getBytes
());
scan
.
addColumn
(
"info"
.
getBytes
(),
"fastDfsUrl6"
.
getBytes
());
ResultScanner
scanner
=
vioTable
.
getScanner
(
scan
);
for
(
Result
result
:
scanner
)
{
for
(
KeyValue
value
:
result
.
raw
())
{
fileService
.
deleteFile
(
new
String
(
value
.
getValue
()));
logger
.
info
(
"删除图片+++++++"
+
new
String
(
value
.
getValue
())
+
"----------"
+
new
String
(
value
.
getQualifier
()));
}
}
}
catch
(
IOException
e
)
{
e
.
printStackTrace
();
}
}
private
static
void
putData
(
String
hbaseFtpCol
,
String
hbaseFastCol
,
String
url
,
String
ftpUrl
,
FileService
fileService
)
{
//存fastDfs
params
=
new
LinkedMultiValueMap
<
String
,
String
>();
params
.
add
(
"urls"
,
url
);
requestEntity
=
new
HttpEntity
<>(
params
,
headers
);
ResponseEntity
<
PicByte
>
resp
=
restTemplate
.
exchange
(
ftpUrl
,
method
,
requestEntity
,
PicByte
.
class
);
Map
<
String
,
byte
[]>
map
=
resp
.
getBody
().
getMap
();
if
(
map
!=
null
&&
map
.
size
()
>
0
)
{
for
(
Map
.
Entry
<
String
,
byte
[]>
entry
:
map
.
entrySet
())
{
if
(
url
.
startsWith
(
"ftp"
)
||
url
.
startsWith
(
"http"
))
{
String
[]
arr
=
url
.
split
(
"/"
);
// String fastDfsUrl = fileService.uploadFile(entry.getValue(), arr[arr.length - 1].split("jpg")[0] + "jpg");
// put.addColumn("info".getBytes(), hbaseFastCol.getBytes(), fastDfsUrl.getBytes());
put
.
addColumn
(
"info"
.
getBytes
(),
hbaseFtpCol
.
getBytes
(),
url
.
getBytes
());
isPut
=
true
;
}
else
{
logger
.
info
(
"图片其他存储方式:"
+
url
);
}
}
}
else
{
logger
.
info
(
"此ftp地址数据为空:=========================="
+
url
);
}
}
}
src/main/java/com/hikcreate/schedul/PicSchedule.java
View file @
0e2e5b41
...
...
@@ -2,7 +2,6 @@ package com.hikcreate.schedul;
import
com.hikcreate.drv_photo_pic.DrvPhoto
;
import
com.hikcreate.drv_photo_pic.Vehicle
;
import
com.hikcreate.drv_photo_pic.VioPic
;
import
com.hikcreate.utils.DateUtil
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.beans.factory.annotation.Value
;
...
...
@@ -17,8 +16,6 @@ public class PicSchedule {
@Autowired
private
DrvPhoto
drvPhoto
;
@Autowired
private
VioPic
vioPic
;
@Autowired
private
Vehicle
vehicle
;
@Value
(
"${pastDay}"
)
...
...
@@ -35,14 +32,6 @@ public class PicSchedule {
}
/**
* 每天早上10点同步违法增量数据
*/
// @Scheduled(cron = "0 0 11 * * *")
public
void
getIncrementVioPic
(){
String
date
=
DateUtil
.
getDate
();
vioPic
.
getIncrementVioPic
(
DateUtil
.
formatDate
(
DateUtil
.
getPastDate
(
new
Date
(),-
pastDay
)),
date
);
}
/**
* 每天早上9点同步机动车的增量数据
...
...
src/main/java/com/hikcreate/service/fdfs/service/impl/FileServiceImpl.java
View file @
0e2e5b41
...
...
@@ -30,7 +30,7 @@ public class FileServiceImpl implements FileService {
if
(!
StringUtils
.
hasText
(
path
))
{
logger
.
error
(
"Upload Img Error"
);
}
logger
.
info
(
"Upload Img Success. path {}"
,
(
"/group"
+
path
.
split
(
"group"
)[
1
]));
//
logger.info("Upload Img Success. path {}", ("/group" + path.split("group")[1]));
return
"/group"
+
path
.
split
(
"group"
)[
1
];
}
...
...
@@ -43,10 +43,11 @@ public class FileServiceImpl implements FileService {
@Override
public
void
deleteFile
(
String
path
)
{
try
{
logger
.
info
(
"删除图片:"
+
path
);
//
logger.info("删除图片:" + path);
fastDFSClient
.
deleteFile
(
path
);
}
catch
(
Exception
e
)
{
deleteFile
(
path
);
// e.printStackTrace();
logger
.
error
(
e
.
toString
());
}
}
...
...
src/main/java/com/hikcreate/utils/redis/RedisBuilder.java
deleted
100644 → 0
View file @
4a56114c
package
com
.
hikcreate
.
utils
.
redis
;
import
org.springframework.util.StringUtils
;
import
redis.clients.jedis.JedisCluster
;
/**
* Copyright (C)
* All rights reserved
* <p>
* 项目名称 : RTComputation
* 项目描述:
* <p>
* com.zcreator.bigdata.rtc.common.redis
* <p>
* created by guangzhong.wgz
* date time 2018/11/22
**/
public
class
RedisBuilder
{
/**
* @param hosts
* @return
*/
public
static
synchronized
RedisClient
getRedisClient
(
String
hosts
)
{
if
(
StringUtils
.
isEmpty
(
hosts
))
{
throw
new
RuntimeException
(
"hosts is empty"
);
}
JedisCluster
jedisCluster
=
RedisUtils
.
getClusterClient
(
hosts
);
return
new
RedisClient
(
jedisCluster
);
}
}
src/main/java/com/hikcreate/utils/redis/RedisClient.java
deleted
100644 → 0
View file @
4a56114c
package
com
.
hikcreate
.
utils
.
redis
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.beans.factory.annotation.Value
;
import
org.springframework.stereotype.Component
;
import
redis.clients.jedis.JedisCluster
;
import
java.util.List
;
import
java.util.Set
;
/**
* @auther: lifuyi
* @date: 2019/1/31 10:53
* @description:
*/
@Component
public
class
RedisClient
{
@Value
(
"${redis.server}"
)
private
String
redisServer
;
public
final
static
String
VIRTUAL_COURSE_PREX
=
"_lc_vc_"
;
private
JedisCluster
jedisCluster
;
public
RedisClient
()
{
}
public
RedisClient
(
JedisCluster
jedisCluster
)
{
this
.
jedisCluster
=
jedisCluster
;
}
/**
* 获取单值
*
* @param key
* @return
*/
public
String
get
(
String
key
)
{
return
jedisCall
((
redis
)
->
redis
.
get
(
key
));
}
public
boolean
setList
(
String
key
,
List
<
String
>
deviceIPList
){
return
jedisCall
((
redis
)->{
redis
.
del
(
key
);
for
(
String
string:
deviceIPList
){
redis
.
lpush
(
key
,
string
);
}
return
true
;
});
}
public
List
<
String
>
getObjectList
(
String
key
){
return
jedisCall
((
redis
)->
redis
.
lrange
(
key
,
0
,
-
1
));
}
/**
* 设置List缓存
* @param key 键
* @param value 值
* @param cacheSeconds 超时时间,0为不超时
* @return
*/
public
long
setList
(
String
key
,
List
<
String
>
value
,
int
cacheSeconds
)
{
return
jedisCall
((
redis
)
->
{
long
result
=
0
;
if
(
redis
.
exists
(
key
)){
redis
.
del
(
key
);
}
String
[]
strArr
=
new
String
[
value
.
size
()];
redis
.
rpush
(
key
,
value
.
toArray
(
strArr
));
if
(
cacheSeconds
!=
0
)
{
redis
.
expire
(
key
,
cacheSeconds
);
}
return
result
;
});
}
/**
* 写入值
*
* @param key
* @param value
* @return
*/
public
String
set
(
String
key
,
String
value
)
{
return
jedisCall
((
redis
)
->
redis
.
set
(
key
,
value
));
}
/**
* 自增计数
*
* @param key
* @return
*/
public
Long
incr
(
String
key
)
{
return
jedisCall
((
redis
)
->
redis
.
incr
(
key
));
}
/**
* 自增计数
*
* @param key
* @param value 初始值
* @return
*/
public
Long
incrBy
(
String
key
,
long
value
)
{
return
jedisCall
((
redis
)
->
redis
.
incrBy
(
key
,
value
));
}
/**
* 自减计数
*
* @param key
* @return
*/
public
Long
decr
(
String
key
)
{
return
jedisCall
((
redis
)
->
redis
.
decr
(
key
));
}
/**
* 自减计数
*
* @param key
* @param value 初始值
* @return
*/
public
Long
decrBy
(
String
key
,
long
value
)
{
return
jedisCall
((
redis
)
->
redis
.
decrBy
(
key
,
value
));
}
/**
* 批量获取值
*
* @param keys
* @return
*/
public
List
<
String
>
mget
(
String
...
keys
)
{
return
jedisCall
((
redis
)
->
redis
.
mget
(
keys
));
}
/**
* 存储范围值
*
* @param key
* @param offset
* @param value
* @return
*/
public
Long
setRange
(
String
key
,
long
offset
,
String
value
)
{
return
jedisCall
((
redis
)
->
redis
.
setrange
(
key
,
offset
,
value
));
}
/**
* 获取指定范围的值
*
* @param key
* @param startOffset
* @param endOffset
* @return
*/
public
String
getRange
(
String
key
,
int
startOffset
,
int
endOffset
)
{
return
jedisCall
((
redis
)
->
redis
.
getrange
(
key
,
startOffset
,
endOffset
));
}
/**
* 指定过期时间存储值
*
* @param key
* @param keepaliveLong 单位秒
* @param value
* @return
*/
public
String
setex
(
String
key
,
int
keepaliveLong
,
String
value
)
{
return
jedisCall
((
redis
)
->
redis
.
setex
(
key
,
keepaliveLong
,
value
));
}
/**
* 从左边写入
*
* @param key
* @param value
* @return
*/
public
Long
lpush
(
String
key
,
String
value
)
{
return
jedisCall
((
redis
)
->
redis
.
lpush
(
key
,
value
));
}
/**
* 出桡
*
* @param key
* @return
*/
public
String
lpop
(
String
key
)
{
return
jedisCall
((
redis
)
->
redis
.
lpop
(
key
));
}
/**
* 从左边开始获取
*
* @param key
* @param start
* @param end
* @return
*/
public
List
<
String
>
lrange
(
String
key
,
long
start
,
long
end
)
{
return
jedisCall
((
redis
)
->
redis
.
lrange
(
key
,
start
,
end
));
}
/**
* 删出指定的值
*
* @param key
* @param count 删出的个数
* @param value 指定的值
* @return
*/
public
Long
lrem
(
String
key
,
long
count
,
String
value
)
{
return
jedisCall
((
redis
)
->
redis
.
lrem
(
key
,
count
,
value
));
}
/**
* 返回数据长度
*
* @param key
* @return
*/
public
Long
lrem
(
String
key
)
{
return
jedisCall
((
redis
)
->
redis
.
llen
(
key
));
}
/**
* 删出下标这外的元数
*
* @param key
* @return
*/
public
String
ltrim
(
String
key
,
long
start
,
long
end
)
{
return
jedisCall
((
redis
)
->
redis
.
ltrim
(
key
,
start
,
end
));
}
/**
* 获取指定下标的元素
*
* @param key
* @param offSet 下标位置
* @return
*/
public
String
lindex
(
String
key
,
long
offSet
)
{
return
jedisCall
((
redis
)
->
redis
.
lindex
(
key
,
offSet
));
}
/**
* set集合新增
*
* @param key
* @param value 值
* @return
*/
public
Long
sadd
(
String
key
,
String
value
)
{
return
jedisCall
((
redis
)
->
redis
.
sadd
(
key
,
value
));
}
/**
* 返回sets集合所有值
*
* @param key
* @return
*/
public
Set
<
String
>
smembers
(
String
key
)
{
return
jedisCall
((
redis
)
->
redis
.
smembers
(
key
));
}
/**
* 删出sets集合指定的值
*
* @param key
* @return
*/
public
Long
srem
(
String
key
,
String
value
)
{
return
jedisCall
((
redis
)
->
redis
.
srem
(
key
,
value
));
}
/**
* 指定的值是否存在sets集合中
*
* @param key
* @return
*/
public
Boolean
sismember
(
String
key
,
String
value
)
{
return
jedisCall
((
redis
)
->
redis
.
sismember
(
key
,
value
));
}
/**
* sets集合求交集
*
* @param keys
* @return
*/
public
Set
<
String
>
sinter
(
String
...
keys
)
{
return
jedisCall
((
redis
)
->
redis
.
sinter
(
keys
));
}
/**
* sets集合求并集
*
* @param keys
* @return
*/
public
Set
<
String
>
sunion
(
String
...
keys
)
{
return
jedisCall
((
redis
)
->
redis
.
sunion
(
keys
));
}
/**
* sets集合求差集
*
* @param keys
* @return
*/
public
Set
<
String
>
sdiff
(
String
...
keys
)
{
return
jedisCall
((
redis
)
->
redis
.
sdiff
(
keys
));
}
/**
* zset中添加元素
*
* @param key
* @param score 权重
* @param value
* @return
*/
public
Long
zadd
(
String
key
,
double
score
,
String
value
)
{
return
jedisCall
((
redis
)
->
redis
.
zadd
(
key
,
score
,
value
));
}
/**
* 按照权重值排序
*
* @param key
* @param start (0, 开始)
* @param end (-1, 取到最后)
* @return
*/
public
Set
<
String
>
zrange
(
String
key
,
long
start
,
long
end
)
{
return
jedisCall
((
redis
)
->
redis
.
zrange
(
key
,
start
,
end
));
}
/**
* 删出指定的值
*
* @param key
* @param value
* @return
*/
public
Long
zrem
(
String
key
,
String
value
)
{
return
jedisCall
((
redis
)
->
redis
.
zrem
(
key
,
value
));
}
/**
* 统计zset集合中的元素中
*
* @param key
* @return
*/
public
Long
zcard
(
String
key
)
{
return
jedisCall
((
redis
)
->
redis
.
zcard
(
key
));
}
/**
* 查看zset集合中value权重
*
* @param key
* @param value
* @return
*/
public
double
zscore
(
String
key
,
String
value
)
{
return
jedisCall
((
redis
)
->
redis
.
zscore
(
key
,
value
));
}
/**
* 统计zset集合中权重某个范围内(1.0——5.0),元素的个数
*
* @param key
* @param min
* @param max
* @return
*/
public
Long
zcount
(
String
key
,
double
min
,
double
max
)
{
return
jedisCall
((
redis
)
->
redis
.
zcount
(
key
,
min
,
max
));
}
/**
* hashs中添加元数
*
* @param key
* @param field
* @param value
* @return
*/
public
Long
hset
(
String
key
,
String
field
,
String
value
)
{
return
jedisCall
((
redis
)
->
redis
.
hset
(
key
,
field
,
value
));
}
/**
* hashs中添加整型元数
*
* @param key
* @param field
* @param value
* @return
*/
public
Long
hincrBy
(
String
key
,
String
field
,
long
value
)
{
return
jedisCall
((
redis
)
->
redis
.
hincrBy
(
key
,
field
,
value
));
}
/**
* 返回hashs中的所有值
*
* @param key
* @return
*/
public
List
<
String
>
hvals
(
String
key
)
{
return
jedisCall
((
redis
)
->
redis
.
hvals
(
key
));
}
/**
* hashs中删除键值对
*
* @param key
* @return
*/
public
Long
hdel
(
String
key
,
String
field
)
{
return
jedisCall
((
redis
)
->
redis
.
hdel
(
key
,
field
));
}
/*
根据key进行删除
*/
public
Long
del
(
String
key
)
{
return
jedisCall
((
redis
)
->
redis
.
del
(
key
));
}
/**
* hashs中判断是否存在
*
* @param key
* @param field
* @return
*/
public
Boolean
hexists
(
String
key
,
String
field
)
{
return
jedisCall
((
redis
)
->
redis
.
hexists
(
key
,
field
));
}
/**
* hashs中获取对应的值
*
* @param key
* @param field
* @return
*/
public
String
hget
(
String
key
,
String
field
)
{
return
jedisCall
((
redis
)
->
redis
.
hget
(
key
,
field
));
}
/**
* hashs中获取多个对应的值
*
* @param key
* @param field
* @return
*/
public
List
<
String
>
hmget
(
String
key
,
String
...
field
)
{
return
jedisCall
((
redis
)
->
redis
.
hmget
(
key
,
field
));
}
/**
* 获取hashs中所有的key
*
* @param key
* @return
*/
public
Set
<
String
>
hkeys
(
String
key
)
{
return
jedisCall
((
redis
)
->
redis
.
hkeys
(
key
));
}
/**
* 指定key有效期
* @param key
* @param seconds
* @return
*/
public
Long
expire
(
String
key
,
Integer
seconds
)
{
return
jedisCall
((
redis
)
->
redis
.
expire
(
key
,
seconds
));
}
/**
* 判断指定key是否存在
* @param key
* @return
*/
public
boolean
exitsKey
(
String
key
){
return
jedisCall
((
redis
)->
redis
.
exists
(
key
));
}
/**
* 清除所有数据, 慎用
*/
public
String
flushDatabase
()
{
return
jedisCall
((
redis
)
->
redis
.
flushDB
());
}
private
String
buildKey
(
String
bkey
)
{
return
VIRTUAL_COURSE_PREX
+
bkey
;
}
@FunctionalInterface
interface
JedisFunction
<
R
>
{
R
call
(
JedisCluster
jedis
);
}
private
<
R
>
R
jedisCall
(
JedisFunction
<
R
>
function
)
{
if
(
jedisCluster
==
null
)
{
String
hosts
=
redisServer
;
jedisCluster
=
RedisUtils
.
getClusterClient
(
hosts
);
}
try
{
return
function
.
call
(
jedisCluster
);
}
finally
{
}
}
}
src/main/java/com/hikcreate/utils/redis/RedisClientUtil.java
deleted
100644 → 0
View file @
4a56114c
package
com
.
hikcreate
.
utils
.
redis
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.beans.factory.annotation.Value
;
import
org.springframework.stereotype.Component
;
/**
* @auther: lifuyi
* @date: 2019/1/31 10:53
* @description:
*/
@Component
public
class
RedisClientUtil
{
@Value
(
"${redis.server}"
)
private
static
String
redisServer
;
@Autowired
private
static
RedisClient
redisClient
;
public
static
RedisClient
getRedisClient
()
{
if
(
redisClient
==
null
)
{
synchronized
(
RedisClientUtil
.
class
)
{
redisClient
=
RedisBuilder
.
getRedisClient
(
redisServer
);
}
}
return
redisClient
;
}
}
src/main/java/com/hikcreate/utils/redis/RedisUtils.java
deleted
100644 → 0
View file @
4a56114c
package
com
.
hikcreate
.
utils
.
redis
;
import
org.springframework.util.StringUtils
;
import
redis.clients.jedis.*
;
import
java.util.ArrayList
;
import
java.util.HashSet
;
import
java.util.List
;
import
java.util.Set
;
import
java.util.stream.Stream
;
public
class
RedisUtils
{
// 非切片连接池
private
static
JedisPool
jedisPool
;
// 非切片连接池
private
static
JedisSentinelPool
jedisSentinelPool
;
// 切片连接池
private
static
ShardedJedisPool
shardedJedisPool
;
private
static
JedisPoolConfig
config
;
private
static
JedisCluster
jedisCluster
;
static
{
config
=
new
JedisPoolConfig
();
config
.
setMaxTotal
(
20
);
config
.
setMaxIdle
(
5
);
config
.
setMaxWaitMillis
(
1000
l
);
config
.
setTestOnBorrow
(
false
);
config
.
setJmxEnabled
(
true
);
}
public
static
Jedis
getJedisSentinelPool
()
{
if
(
jedisSentinelPool
==
null
)
{
synchronized
(
RedisUtils
.
class
)
{
if
(
jedisSentinelPool
==
null
)
{
// JedisSentinelPool 初始化
String
masterName
=
"cdh6"
;
//sentinel地址集合
Set
<
String
>
set
=
new
HashSet
<>();
set
.
add
(
"10.1.80.4:26379"
);
set
.
add
(
"10.1.80.5:26379"
);
set
.
add
(
"10.1.80.7:26379"
);
jedisSentinelPool
=
new
JedisSentinelPool
(
masterName
,
set
,
config
);
}
}
}
return
jedisSentinelPool
.
getResource
();
}
public
static
Jedis
getJedis
()
{
if
(
jedisPool
==
null
)
{
synchronized
(
RedisUtils
.
class
)
{
Set
<
HostAndPort
>
jedisClusterNodes
=
new
HashSet
<>();
jedisClusterNodes
.
add
(
new
HostAndPort
(
"10.1.80.8"
,
6379
));
JedisCluster
jc
=
new
JedisCluster
(
jedisClusterNodes
);
}
}
return
jedisPool
.
getResource
();
}
public
static
ShardedJedis
getShardedJedis
()
{
//初始化非切片池
List
<
JedisShardInfo
>
shards
=
new
ArrayList
<>();
shards
.
add
(
new
JedisShardInfo
(
"cd1"
,
6379
,
"master"
));
shards
.
add
(
new
JedisShardInfo
(
"cd2"
,
6379
,
"master"
));
shards
.
add
(
new
JedisShardInfo
(
"cd3"
,
6379
,
"master"
));
shards
.
add
(
new
JedisShardInfo
(
"cd4"
,
6379
,
"master"
));
shards
.
add
(
new
JedisShardInfo
(
"cd5"
,
6379
,
"master"
));
shards
.
add
(
new
JedisShardInfo
(
"cd6"
,
6379
,
"master"
));
shards
.
add
(
new
JedisShardInfo
(
"cd7"
,
6379
,
"master"
));
shards
.
add
(
new
JedisShardInfo
(
"cd8"
,
6379
,
"master"
));
shardedJedisPool
=
new
ShardedJedisPool
(
config
,
shards
);
ShardedJedis
shardedJedis
=
shardedJedisPool
.
getResource
();
return
shardedJedis
;
}
public
static
JedisCluster
getClusterClient
(
String
hosts
)
{
if
(
jedisCluster
==
null
)
{
synchronized
(
RedisUtils
.
class
)
{
if
(
jedisCluster
==
null
)
{
if
(
StringUtils
.
isEmpty
(
hosts
))
{
throw
new
RuntimeException
(
"hosts is empty"
);
}
Set
<
HostAndPort
>
jedisClusterNodes
=
new
HashSet
<>();
String
[]
hostStr
=
hosts
.
split
(
","
);
Stream
.
of
(
hostStr
).
forEach
(
host
->
{
String
args
[]
=
host
.
split
(
":"
);
if
(
args
.
length
!=
2
)
{
throw
new
RuntimeException
(
"host format error"
);
}
jedisClusterNodes
.
add
(
new
HostAndPort
(
args
[
0
],
Integer
.
parseInt
(
args
[
1
])));
});
jedisCluster
=
new
JedisCluster
(
jedisClusterNodes
);
}
}
}
return
jedisCluster
;
}
public
static
void
getOne
()
{
Jedis
jedis
=
new
Jedis
(
"cdh1"
);
jedis
.
set
(
"foo"
,
"bar"
);
String
value
=
jedis
.
get
(
"foo"
);
}
}
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment