工作需要,技术选型采用Springboot+dubbo+ActvieRecord+Sharding-jdbc-core.暂时只做了分表,经过测试已经成功。分享给需要的兄弟。
引入
<!-- https://mvnrepository.com/artifact/org.apache.shardingsphere/sharding-jdbc-core --> <dependency> <groupId>org.apache.shardingsphere</groupId> <artifactId>sharding-jdbc-core</artifactId> <version>4.0.0-RC1</version> </dependency>
application.yml配置好数据库url/username/password
新建一个shardingjdbc的配置文件:
import com.jfinal.plugin.druid.DruidPlugin; import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration; import org.apache.shardingsphere.shardingjdbc.api.ShardingDataSourceFactory; import javax.sql.DataSource; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import java.util.Properties; public class ShardingJdbcConfig { //分表分库的rule ShardingRuleConfiguration shardingRuleConfiguration; //数据源map Map<String, DruidPlugin> druidPlugins; //原数据库连接源map Map<String, DataSource> dataSourceMap; //最终sharding-jdbc封装后的数据库连接源 DataSource dataSource; public ShardingJdbcConfig(ShardingRuleConfiguration shardingRuleConfiguration, Map<String, DruidPlugin> druidPlugins) { this.shardingRuleConfiguration =shardingRuleConfiguration; this.druidPlugins = druidPlugins; dataSourceMap=new HashMap<String,DataSource>(); } public boolean start() { //遍历数据源 ,将数据源加入sharding jdbc for(Map.Entry<String,DruidPlugin>entry:druidPlugins.entrySet()){ entry.getValue().start(); dataSourceMap.put(entry.getKey(),entry.getValue().getDataSource()); } try { //获得数据库连接类 dataSource = ShardingDataSourceFactory.createDataSource(dataSourceMap,shardingRuleConfiguration,new Properties()); } catch (SQLException e) { e.printStackTrace(); } return true; } public boolean stop() { for(Map.Entry<String,DruidPlugin> entry:druidPlugins.entrySet()){ entry.getValue().stop(); dataSourceMap.put(entry.getKey(),entry.getValue().getDataSource()); } return true; } public DataSource getDataSource() { return dataSource; } }
新建ActiveRecord的配置文件:
import com.facebook.sz.yjzj.model.model._MappingKit; import com.jfinal.plugin.activerecord.ActiveRecordPlugin; import com.jfinal.plugin.druid.DruidPlugin; import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration; import org.apache.shardingsphere.api.config.sharding.TableRuleConfiguration; import org.apache.shardingsphere.api.config.sharding.strategy.InlineShardingStrategyConfiguration; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Configuration; import javax.annotation.PostConstruct; import java.util.HashMap; import java.util.Map; @Configuration public class ActiveRecordConfig { @Value("${spring.datasource.url}") private String jdbcUrl; @Value("${spring.datasource.username}") private String username; @Value("${spring.datasource.password}") private String password; @PostConstruct public void initJDBC() { DruidPlugin dp = new DruidPlugin(jdbcUrl, username, password); Map<String,DruidPlugin> drudMap = new HashMap(); drudMap.put("ds_0",dp); // 配置规则 ShardingRuleConfiguration shardingRuleConfig = new ShardingRuleConfiguration(); // 配置Order表规则 TableRuleConfiguration orderTableRuleConfig = new TableRuleConfiguration("user"); // 配置分表策略 orderTableRuleConfig.setTableShardingStrategyConfig(new InlineShardingStrategyConfiguration("id","user${id% 5}")); shardingRuleConfig.getTableRuleConfigs().add(orderTableRuleConfig); //获得自定义的扩展 ShardingJdbcConfig shardingJdbcConfig = new ShardingJdbcConfig(shardingRuleConfig,drudMap); // 与 jfinal web 环境唯一的不同是要手动调用一次相关插件的start()方法 shardingJdbcConfig.start(); ActiveRecordPlugin arp = new ActiveRecordPlugin(shardingJdbcConfig.dataSource); _MappingKit.mapping(arp); arp.start(); } }
如此即可,启动程序,测试即可。本人亲测,分表已经无问题。分库多表分表待研究~~