Commit df86ecb6 by 李辅翼

v14

parent fa4a2747
...@@ -55,12 +55,22 @@ ...@@ -55,12 +55,22 @@
<dependency> <dependency>
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId> <artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>com.oracle</groupId> <groupId>com.oracle</groupId>
<artifactId>ojdbc6</artifactId> <artifactId>ojdbc6</artifactId>
<version>11.2.0.3</version> <version>11.2.0.3</version>
...@@ -127,7 +137,7 @@ ...@@ -127,7 +137,7 @@
</plugin> </plugin>
</plugins> </plugins>
<!-- <plugins> <!-- <plugins>
<plugin> <plugin>
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId> <artifactId>spring-boot-maven-plugin</artifactId>
...@@ -143,6 +153,7 @@ ...@@ -143,6 +153,7 @@
</configuration> </configuration>
</plugin> </plugin>
</plugins>--> </plugins>-->
</build> </build>
</project> </project>
...@@ -3,9 +3,12 @@ package com.hikcreate; ...@@ -3,9 +3,12 @@ package com.hikcreate;
import org.mybatis.spring.annotation.MapperScan; import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication; import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.scheduling.annotation.EnableScheduling;
@SpringBootApplication @SpringBootApplication(exclude = {
DataSourceAutoConfiguration.class
})
@EnableScheduling @EnableScheduling
@MapperScan("com.hikcreate.dao") @MapperScan("com.hikcreate.dao")
public class FtpPicApplication { public class FtpPicApplication {
......
package com.hikcreate.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Created by lifuyi on 2018/10/24.
*/
@Target({ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface DBSource {
String name();
}
package com.hikcreate.aspect;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
/**
* Created by lifuyi on 2018/10/24.
*/
public class DynamicDataSource extends AbstractRoutingDataSource {
//保存自己线程的本地变量
private static final ThreadLocal<String> datasourceHolder = new ThreadLocal<>();
@Override
protected Object determineCurrentLookupKey() {
return datasourceHolder.get();
}
static void setDataSource(String sourceName) {
datasourceHolder.set(sourceName);
}
static void clearDataSource() {
datasourceHolder.remove();
}
}
package com.hikcreate.aspect;
import com.hikcreate.annotation.DBSource;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.After;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import java.lang.reflect.Method;
/**
* Created by lifuyi on 2018/10/24.
*/
@Aspect
@Order(-1)// 保证该AOP在@Transactional之前执行
@Component
public class DynamicDataSourceAspect {
@Before(value = "execution(* com.hikcreate.dao..*.*(..))")
public void changeDataSource(JoinPoint point) throws Throwable {
String sourceName = null;
//获得当前访问的class
Class<?> classes = point.getTarget().getClass();
//获得访问的方法名称
String methodName = point.getSignature().getName();
//定义的接口方法
Method abstractMethod = ((MethodSignature) point.getSignature()).getMethod();
//是否含有注解DBSource
if (abstractMethod.isAnnotationPresent(DBSource.class)) {
sourceName = abstractMethod.getAnnotation(DBSource.class).name();
}
//接口方法参数类型
Class<?>[] parameterTypes = abstractMethod.getParameterTypes();
try {
//实现类中的该方法
Method method = classes.getMethod(methodName, parameterTypes);
if (method.isAnnotationPresent(DBSource.class)) {
sourceName = method.getAnnotation(DBSource.class).name();
}
} catch (Exception e) {
e.printStackTrace();
}
if (sourceName != null) {
DynamicDataSource.setDataSource(sourceName);
}
}
@Pointcut("execution(* com.hikcreate.dao..*.*(..))")
public void pointCut() {
}
@After("pointCut()")
public void after(JoinPoint point) {
DynamicDataSource.clearDataSource();
}
}
package com.hikcreate.conf;
import com.hikcreate.aspect.DynamicDataSource;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;
//import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
/**
* Created by lifuyi on 2018/10/24.
*/
@Configuration
public class DataSourceConfig {
@Value("${spring.datasource.type}")
private Class<? extends DataSource> dataSourceType;
@Bean(name="masterDataSource", destroyMethod = "close", initMethod="init")
@ConfigurationProperties(prefix = "spring.datasource.hik")
public DataSource masterDataSource() {
return DataSourceBuilder.create().type(dataSourceType).build();
}
@Bean(name="slaveDataSource", destroyMethod = "close", initMethod="init")
@ConfigurationProperties(prefix = "spring.datasource.bokang")
public DataSource slaveDataSource() {
return DataSourceBuilder.create().type(dataSourceType).build();
}
@Bean(name = "dataSource")
public DataSource dataSource() {
DynamicDataSource dynamicDataSource = new DynamicDataSource();
// 配置多数据源
Map<Object, Object> targetDataSources = new HashMap<>();
targetDataSources.put("hik", masterDataSource());
targetDataSources.put("bokang", slaveDataSource());
dynamicDataSource.setTargetDataSources(targetDataSources);
dynamicDataSource.setDefaultTargetDataSource(slaveDataSource());
return dynamicDataSource;
}
}
package com.hikcreate.dao; package com.hikcreate.dao;
import com.hikcreate.annotation.DBSource;
import com.hikcreate.entity.DriverPhoto; import com.hikcreate.entity.DriverPhoto;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
...@@ -7,9 +8,10 @@ import java.util.List; ...@@ -7,9 +8,10 @@ import java.util.List;
@Component @Component
public interface DriverPhotoMapper { public interface DriverPhotoMapper {
@DBSource(name = "bokang")
List<DriverPhoto> getIncrePhoto(); List<DriverPhoto> getIncrePhoto();
@DBSource(name = "bokang")
List<DriverPhoto> getIncrePhotoBySfzmhm(String sfzmhm); List<DriverPhoto> getIncrePhotoBySfzmhm(String sfzmhm);
@DBSource(name = "bokang")
List<DriverPhoto> getIncrePhotoStage(String start, String end); List<DriverPhoto> getIncrePhotoStage(String start, String end);
} }
...@@ -36,12 +36,6 @@ public class DrvPhotoImpl implements DrvPhoto { ...@@ -36,12 +36,6 @@ public class DrvPhotoImpl implements DrvPhoto {
private static Logger logger = LoggerFactory.getLogger(DrvPhotoImpl.class); private static Logger logger = LoggerFactory.getLogger(DrvPhotoImpl.class);
@Value("${url}")
private String url;
@Value("${username}")
private String username;
@Value("${password}")
private String password;
@Value("${roundDay}") @Value("${roundDay}")
private int roundDay; private int roundDay;
@Autowired @Autowired
......
...@@ -25,7 +25,7 @@ public class PicSchedule { ...@@ -25,7 +25,7 @@ public class PicSchedule {
/** /**
* 每天早上10点同步驾驶人头像增量数据 * 每天早上10点同步驾驶人头像增量数据
*/ */
@Scheduled(cron = "0 0 10 * * *") // @Scheduled(cron = "0 0 10 * * *")
public void getIncrementDrvPhoto(){ public void getIncrementDrvPhoto(){
drvPhoto.getIncrementDrvPhoto(); drvPhoto.getIncrementDrvPhoto();
} }
...@@ -35,7 +35,7 @@ public class PicSchedule { ...@@ -35,7 +35,7 @@ public class PicSchedule {
/** /**
* 每天早上9点同步机动车的增量数据 * 每天早上9点同步机动车的增量数据
*/ */
@Scheduled(cron = "0 0 9 * * *") // @Scheduled(cron = "0 0 9 * * *")
public void getIncrementVehPic(){ public void getIncrementVehPic(){
vehicle.getIncrementVehPic(); vehicle.getIncrementVehPic();
} }
......
spring.datasource.url=jdbc:oracle:thin:@172.16.17.82:1521:gyjg #数据库驱动
spring.datasource.username=gyjg_zckj spring.datasource.hik.driver-class-name=oracle.jdbc.driver.OracleDriver
spring.datasource.password=zckj2018 #数据源地址
spring.datasource.driver-class-name=oracle.jdbc.driver.OracleDriver spring.datasource.hik.url=jdbc:oracle:thin:@172.16.25.02:1521:orcl
#用户名
spring.datasource.hik.username=zckj
#密码
spring.datasource.hik.password=ZCKJ2018
#博康oracle
spring.datasource.bokang.url=jdbc:oracle:thin:@172.16.17.82:1521:gyjg
spring.datasource.bokang.username=gyjg_zckj
spring.datasource.bokang.password=zckj2018
spring.datasource.bokang.driver-class-name=oracle.jdbc.driver.OracleDriver
spring.datasource.type=com.alibaba.druid.pool.DruidDataSource spring.datasource.type=com.alibaba.druid.pool.DruidDataSource
mybatis.mapper-locations=classpath:mapper/*.xml mybatis.mapper-locations=classpath:mapper/*.xml
mybatis.type-aliases-package=com.hikcreate.entity mybatis.type-aliases-package=com.hikcreate.entity
...@@ -27,7 +38,7 @@ fdfs.tracker-list[1] = 172.16.25.26:22122 ...@@ -27,7 +38,7 @@ fdfs.tracker-list[1] = 172.16.25.26:22122
fdfs.pool.max-total = 200 fdfs.pool.max-total = 200
fdfs.pool.max-wait-millis = 150 fdfs.pool.max-wait-millis = 150
#端口 #端口
server.port=8084 server.port=8085
hbase.zookeeper.property.clientPort=2181 hbase.zookeeper.property.clientPort=2181
hbase.zookeeper.quorum=172.16.25.25,172.16.25.28,172.16.25.24,172.16.25.26,172.16.25.27 hbase.zookeeper.quorum=172.16.25.25,172.16.25.28,172.16.25.24,172.16.25.26,172.16.25.27
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment