新增测试模块,单数据源,多数据源切换无问题

This commit is contained in:
lhc
2021-03-03 10:01:05 +08:00
parent 3da32d678c
commit 505c8e3aa5
7 changed files with 295 additions and 10 deletions

View File

@@ -0,0 +1,38 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>parent</artifactId>
<groupId>com.hcframe</groupId>
<version>1.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>hcframe-test</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>com.hcframe</groupId>
<artifactId>hcframe-boot-starter</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.hcframe</groupId>
<artifactId>hcframe-base</artifactId>
<version>1.2.1-SNAPSHOT</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,25 @@
package com.hcframe.test;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.EnableAspectJAutoProxy;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@SpringBootApplication(exclude = DataSourceAutoConfiguration.class)
@EnableAspectJAutoProxy(proxyTargetClass = true)
@EnableSwagger2
@ServletComponentScan
@EnableCaching
//@EnableDiscoveryClient
//@EnableFeignClients
@ComponentScan(basePackages = {"com.hcframe.**"})
public class TestApplication {
public static void main(String[] args) {
SpringApplication.run(TestApplication.class, args);
}
}

View File

@@ -0,0 +1,77 @@
package com.hcframe.test.config;
import com.hcframe.base.module.auth.dao.FtUserDao;
import com.hcframe.base.module.shiro.service.ShiroType;
import com.hcframe.base.module.shiro.service.SystemRealm;
import org.apache.shiro.authz.SimpleAuthorizationInfo;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.LinkedHashMap;
/**
* @author lhc
* @date 2020-07-28
* @decription shiro 配置类
*/
@Component
public class ShiroRealmConfig implements SystemRealm {
@Resource
FtUserDao ftUserDao;
/**
* 根据用户信息注入权限
* @param user 用户信息
* @return 权限信息
*/
@Override
public SimpleAuthorizationInfo setAuthoriztion(Object user) {
return new SimpleAuthorizationInfo();
}
/**
* 根据用户Id查询用户信息并注入到shiro框架中
* @param userId 用户id
* @return 用户信息
*/
@Override
public Object findByUserId(String userId) {
// TODO 需要实现获取用户信息方法
return null;
}
/**
* 配置拦截及放行路径
* @return 返回拦截及放行路径Map
*/
@Override
public LinkedHashMap<String, String> setShiroUrl() {
LinkedHashMap<String, String> map = new LinkedHashMap<>();
// 用户登陆
map.put("/ftUser/login", ShiroType.ANON);
// Vue静态资源
map.put("/img/**", ShiroType.ANON);
map.put("/static/**", ShiroType.ANON);
map.put("/tinymce/**", ShiroType.ANON);
map.put("/favicon.ico", ShiroType.ANON);
map.put("/manifest.json", ShiroType.ANON);
map.put("/robots.txt", ShiroType.ANON);
map.put("/precache*", ShiroType.ANON);
map.put("/service-worker.js", ShiroType.ANON);
// swagger UI 静态资源
map.put("/swagger-ui.html", ShiroType.ANON);
map.put("/doc.html", ShiroType.ANON);
map.put("/swagger-resources/**", ShiroType.ANON);
map.put("/webjars/**", ShiroType.ANON);
map.put("/v2/api-docs", ShiroType.ANON);
map.put("/v2/api-docs-ext", ShiroType.ANON);
map.put("/swagger/**", ShiroType.ANON);
// druid 资源路径
map.put("/druid/**", ShiroType.ANON);
// 其余路径均拦截
map.put("/**", ShiroType.ANON);
return map;
}
}

View File

@@ -0,0 +1,26 @@
package com.hcframe.test.controller;
import com.hcframe.base.common.ResultVO;
import com.hcframe.base.module.auth.entity.FtUser;
import com.hcframe.base.module.data.module.BaseMapper;
import com.hcframe.base.module.data.module.BaseMapperImpl;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class DataTestController {
final
BaseMapper baseMapper;
public DataTestController(@Qualifier(BaseMapperImpl.BASE) BaseMapper baseMapper) {
this.baseMapper = baseMapper;
}
@GetMapping("save")
public ResultVO<Integer> insertTest() {
return ResultVO.getSuccess(baseMapper.save(FtUser.builder().username("test123").build()));
}
}

View File

@@ -0,0 +1,128 @@
server:
port: 8081
servlet:
context-path: /test
pagehelper:
auto-runtime-dialect: true
spring:
application:
name: cloud-config
# 缓存设置
cache:
# redis缓存
type: redis
redis:
time-to-live: 1d
use-key-prefix: true
cache-null-values: true
key-prefix: cache.
# ehcache缓存
# type: ehcache
# ehcache:
# config: classpath:ehcache.xml
redis:
database: 0
host: 192.168.4.119
port: 6379
password:
lettuce:
pool:
# 连接池中的最大空闲连接 默认8
max-idle: 8
# 连接池中的最小空闲连接 默认0
min-idle: 0
# 连接池最大连接数 默认8 ,负数表示没有限制
max-active: 8
# 连接池最大阻塞等待时间(使用负值表示没有限制) 默认-1
max-wait: -1
timeout: 30000
mvc:
view:
prefix: classpath*:/templates/
suffix: .html
static-path-pattern: classpath*:/static/**
datasource:
druid:
# 配置sqlite文件路径需要填写绝对路径推荐将sqlite文件放入到服务器上而非程序jar包或war包中
driver-class-name: dm.jdbc.driver.DmDriver
url: jdbc:dm://192.168.1.131:5236/COMMON?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=utf-8
username: COMMON
password: 123456789
#使用Druid数据源
initialSize: 5
# 初始化大小,最小,最大
minIdle: 5
maxActive: 20
# 配置获取连接等待超时的时间
maxWait: 60000
# 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
timeBetweenEvictionRunsMillis: 60000
# 配置一个连接在池中最小生存的时间,单位是毫秒
minEvictableIdleTimeMillis: 300000
validationQuery: select 1 from dual
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
# 配置监控统计拦截的filters去掉后监控界面sql无法统计'wall'用于防火墙
filters: stat,slf4j
# 通过connectProperties属性来打开mergeSql功能慢SQL记录
connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
mybatis:
mapper-locations: classpath*:mapping/**/*.xml
type-aliases-package: com.hcframe.**.entity
configuration:
# 开启Sql日志记录
log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
# 框架配置
frame:
# 是否开启redis 用户登录若开启此项需要配置redis节点及相关配置若不开启需要注释掉redis相关配置信息
isRedisLogin: true
# 用户登陆超时设置单位为小时此值不能为0
loginTimeout: 4
# 是否开启controller日志监控
showControllerLog: true
# 是否开启多数据源
multi-data-source: false
# swagger配置
swagger:
# 配置controller包路径
path: com.hcframe.module.**.controller
# 是否开启权限
enableAuth: false
# 用户名
username: admin
# 密码
password: admin
# Druid连接配置
druid:
# Druid用户名
username: test
# Druid密码
password: test
# Druid 允许访问的IP,默认全部允许
# allow: 127.0.0.1
# Druid 拒绝访问的IP,默认不拒绝全部
# deny: 10.0.0.2
feign:
client:
config:
default:
connectTimeout: 300000
readTimeout: 30000
loggerLevel: basic
okhttp:
enabled: true
hystrix:
enabled: true
# 设置熔断器超时时间
hystrix:
command:
default:
execution:
isolation:
thread:
timeoutInMilliseconds: 30000