You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Encounter error "org.apache.shardingsphere.infra.exception.kernel.metadata.TableNotFoundException: Table or view '%s' does not exist" with Read-Write Splitting
#31179
Closed
jonaskahn opened this issue
May 9, 2024
· 5 comments
Which project did you use? ShardingSphere-JDBC or ShardingSphere-Proxy?
ShardingSphere-JDBC
Expected behavior
Read-Write Splitting does work well as version 5.3.2
Actual behavior
I setup 3 datasource (1 master, 2 slave) but it's all 1 database. My current setting (using Kotlin) work pretty well, but when I tried the version 5.4.1 and 5.5.0. It's completely broken, the error throw right after I query to DB while application (Spring boot) is starting: org.apache.shardingsphere.infra.exception.kernel.metadata.TableNotFoundException: Table or view '%s' does not exist.
Here is my config in version 5.3.2:
@Configuration
@ConditionalOnProperty(prefix = "app.datasource", name = ["mode"], havingValue = "multi")
class ShardingSphereDataSourceConfigurer : AbstractDatasourceConfigurer() {
@Value("\${app.datasource.schema:public}")
private val schema: String? = null
@Value("\${app.datasource.database:boffice_mst}")
private val database: String? = null
@Value("\${app.datasource.host:localhost}")
private val host: String? = null
@Value("\${app.datasource.port:5432}")
private val port: String? = null
@Value("\${app.datasource.username:username}")
private val username: String? = null
@Value("\${app.datasource.password:password}")
private val password: String? = null
@Value("\${app.datasource.slave1.host:localhost}")
private val sl1Host: String? = null
@Value("\${app.datasource.slave1.port:5432}")
private val sl1Port: String? = null
@Value("\${app.datasource.slave2.host:localhost}")
private val sl2Host: String? = null
@Value("\${app.datasource.slave2.port:5432}")
private val sl2Port: String? = null
@Value("\${app.datasource.maximumPoolSize}")
private val maximumPoolSize: Int? = null
@Value("\${app.datasource.connectionTimeout}")
private val connectionTimeout: Long? = null
@Value("\${app.datasource.maxLifetime}")
private val maxLifetime: Long? = null
@Bean
fun ignoreJPAReadOnlyTransactionBeanPostProcessor(): IgnoreJPAReadOnlyTransactionBeanPostProcessor {
return IgnoreJPAReadOnlyTransactionBeanPostProcessor()
}
@Bean
@Throws(SQLException::class)
fun dataSource(): DataSource {
return ShardingSphereDataSourceFactory.createDataSource(
"mdo-database",
createModeConfiguration(),
createDataSourceMap(),
createRuleConfiguration(),
createProperties()
)
}
private fun createProperties(): Properties {
val result = Properties()
result.setProperty(ConfigurationPropertyKey.SQL_SHOW.key, "true")
return result
}
private fun createRuleConfiguration(): Collection<RuleConfiguration> {
val result: MutableCollection<RuleConfiguration> = LinkedList()
result.add(createReadwriteSplittingRuleConfiguration())
return result
}
private fun createReadwriteSplittingRuleConfiguration(): ReadwriteSplittingRuleConfiguration {
val dataSourceConfig = ReadwriteSplittingDataSourceRuleConfiguration(
"standalone-read-write-ds",
StaticReadwriteSplittingStrategyConfiguration(
MASTER_SOURCE,
listOf(SLAVE1_SOURCE, SLAVE2_SOURCE)
), null,
"round_robin"
)
return ReadwriteSplittingRuleConfiguration(
setOf(dataSourceConfig),
java.util.Map.of("round_robin", AlgorithmConfiguration("ROUND_ROBIN", null))
)
}
private fun createDataSourceMap(): Map<String, DataSource> {
val result: MutableMap<String, DataSource> = LinkedHashMap()
result[MASTER_SOURCE] = createDataSource(MASTER_SOURCE, host, port)
result[SLAVE1_SOURCE] = createDataSource(SLAVE1_SOURCE, sl1Host, sl1Port)
result[SLAVE2_SOURCE] = createDataSource(SLAVE2_SOURCE, sl2Host, sl2Port)
return result
}
private fun createDataSource(datasourceId: String, host: String?, port: String?): DataSource {
val dataSource = HikariDataSource()
dataSource.driverClassName = "org.postgresql.Driver"
dataSource.jdbcUrl = "jdbc:postgresql://${host}:${port}/${database}?stringtype=unspecified"
dataSource.poolName = "DATASOURCE - $datasourceId"
dataSource.username = username
dataSource.password = password
dataSource.schema = schema
dataSource.maximumPoolSize = maximumPoolSize!!
dataSource.connectionTimeout = connectionTimeout!!
dataSource.maxLifetime = maxLifetime!!
addMetaData(dataSource)
return dataSource
}
private fun createModeConfiguration(): ModeConfiguration {
return ModeConfiguration("Standalone", StandalonePersistRepositoryConfiguration("JDBC", Properties()))
}
companion object {
private const val MASTER_SOURCE = "master"
private const val SLAVE1_SOURCE = "slave1"
private const val SLAVE2_SOURCE = "slave2"
}
}
Here is my config in version 5.4.1 and 5.5.0:
@Configuration
@ConditionalOnProperty(prefix = "app.datasource", name = ["mode"], havingValue = "multi")
class ShardingSphereDataSourceConfigurer : AbstractDatasourceConfigurer() {
@Value("\${app.datasource.schema:public}")
private val schema: String? = null
@Value("\${app.datasource.database:boffice_mst}")
private val database: String? = null
@Value("\${app.datasource.host:localhost}")
private val host: String? = null
@Value("\${app.datasource.port:5432}")
private val port: String? = null
@Value("\${app.datasource.username:username}")
private val username: String? = null
@Value("\${app.datasource.password:password}")
private val password: String? = null
@Value("\${app.datasource.slave1.host:localhost}")
private val sl1Host: String? = null
@Value("\${app.datasource.slave1.port:5432}")
private val sl1Port: String? = null
@Value("\${app.datasource.slave2.host:localhost}")
private val sl2Host: String? = null
@Value("\${app.datasource.slave2.port:5432}")
private val sl2Port: String? = null
@Value("\${app.datasource.maximumPoolSize}")
private val maximumPoolSize: Int? = null
@Value("\${app.datasource.connectionTimeout}")
private val connectionTimeout: Long? = null
@Value("\${app.datasource.maxLifetime}")
private val maxLifetime: Long? = null
@Bean
fun ignoreJPAReadOnlyTransactionBeanPostProcessor(): IgnoreJPAReadOnlyTransactionBeanPostProcessor {
return IgnoreJPAReadOnlyTransactionBeanPostProcessor()
}
@Bean
@Throws(SQLException::class)
fun dataSource(): DataSource {
return ShardingSphereDataSourceFactory.createDataSource(
"mdo-database",
createModeConfiguration(),
createDataSourceMap(),
createRuleConfiguration(),
createProperties()
)
}
private fun createProperties(): Properties {
val result = Properties()
result.setProperty(ConfigurationPropertyKey.SQL_SHOW.key, "true")
return result
}
private fun createRuleConfiguration(): Collection<RuleConfiguration> {
val result: MutableCollection<RuleConfiguration> = LinkedList()
result.add(createReadwriteSplittingRuleConfiguration())
return result
}
private fun createReadwriteSplittingRuleConfiguration(): ReadwriteSplittingRuleConfiguration {
val dataSourceConfig = ReadwriteSplittingDataSourceRuleConfiguration(
"standalone-read-write-ds",
MASTER_SOURCE,
listOf(SLAVE1_SOURCE, SLAVE2_SOURCE),
TransactionalReadQueryStrategy.DYNAMIC,
"round_robin"
)
return ReadwriteSplittingRuleConfiguration(
setOf(dataSourceConfig),
java.util.Map.of("round_robin", AlgorithmConfiguration("ROUND_ROBIN", null))
)
}
private fun createDataSourceMap(): Map<String, DataSource> {
val result: MutableMap<String, DataSource> = LinkedHashMap()
result[MASTER_SOURCE] = createDataSource(MASTER_SOURCE, host, port)
result[SLAVE1_SOURCE] = createDataSource(SLAVE1_SOURCE, sl1Host, sl1Port)
result[SLAVE2_SOURCE] = createDataSource(SLAVE2_SOURCE, sl2Host, sl2Port)
return result
}
private fun createDataSource(datasourceId: String, host: String?, port: String?): DataSource {
val dataSource = HikariDataSource()
dataSource.driverClassName = "org.postgresql.Driver"
dataSource.jdbcUrl = "jdbc:postgresql://${host}:${port}/${database}?stringtype=unspecified"
dataSource.poolName = "DATASOURCE - $datasourceId"
dataSource.username = username
dataSource.password = password
dataSource.schema = schema
dataSource.maximumPoolSize = maximumPoolSize!!
dataSource.connectionTimeout = connectionTimeout!!
dataSource.maxLifetime = maxLifetime!!
addMetaData(dataSource)
return dataSource
}
private fun createModeConfiguration(): ModeConfiguration {
return ModeConfiguration("Standalone", StandalonePersistRepositoryConfiguration("JDBC", Properties()))
}
companion object {
private const val MASTER_SOURCE = "master"
private const val SLAVE1_SOURCE = "slave1"
private const val SLAVE2_SOURCE = "slave2"
}
}
The text was updated successfully, but these errors were encountered:
I confirmed this page will solve the issue.
If you're using Kotlin or Java, you can take a glance on my configuration:
privatefuncreateRuleConfiguration(): Collection<RuleConfiguration> {
val result:MutableCollection<RuleConfiguration> =LinkedList()
// Add new configuration to load single rule
result.add(createSingleRuleConfiguration())
result.add(createReadwriteSplittingRuleConfiguration())
return result
}
privatefuncreateSingleRuleConfiguration(): SingleRuleConfiguration {
val rule =SingleRuleConfiguration()
rule.tables =listOf("*.*")
return rule
}
Bug Report
Which version of ShardingSphere did you use?
Which project did you use? ShardingSphere-JDBC or ShardingSphere-Proxy?
Expected behavior
Actual behavior
Here is my config in version
5.3.2
:Here is my config in version
5.4.1
and5.5.0
:The text was updated successfully, but these errors were encountered: