Upgrading to Quarkus 1.13.2.Final
This commit is contained in:
parent
8255cba930
commit
6d17117f42
15 changed files with 310 additions and 228 deletions
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
|
@ -302,7 +302,7 @@ jobs:
|
||||||
needs: build
|
needs: build
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
MAVEN_OPTS: -Xmx2048m
|
MAVEN_OPTS: -Xmx1024m
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Cache Maven packages
|
- name: Cache Maven packages
|
||||||
|
|
|
@ -75,6 +75,6 @@ if [ "$DEBUG_MODE" = "true" ]; then
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
CLASSPATH_OPTS="$DIRNAME/../lib/quarkus-run.jar:$DIRNAME/../lib/main/*"
|
CLASSPATH_OPTS="$DIRNAME/../lib/quarkus-run.jar"
|
||||||
|
|
||||||
exec java $JAVA_OPTS $SERVER_OPTS -cp $CLASSPATH_OPTS io.quarkus.bootstrap.runner.QuarkusEntryPoint ${CONFIG_ARGS#?}
|
exec java $JAVA_OPTS $SERVER_OPTS -cp $CLASSPATH_OPTS io.quarkus.bootstrap.runner.QuarkusEntryPoint ${CONFIG_ARGS#?}
|
|
@ -42,6 +42,7 @@ import java.util.NoSuchElementException;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.ServiceLoader;
|
import java.util.ServiceLoader;
|
||||||
|
import java.util.function.Consumer;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.jar.JarEntry;
|
import java.util.jar.JarEntry;
|
||||||
import java.util.jar.JarFile;
|
import java.util.jar.JarFile;
|
||||||
|
@ -50,6 +51,7 @@ import io.quarkus.deployment.IsDevelopment;
|
||||||
import io.quarkus.deployment.builditem.HotDeploymentWatchedFileBuildItem;
|
import io.quarkus.deployment.builditem.HotDeploymentWatchedFileBuildItem;
|
||||||
import io.quarkus.deployment.builditem.IndexDependencyBuildItem;
|
import io.quarkus.deployment.builditem.IndexDependencyBuildItem;
|
||||||
import io.quarkus.hibernate.orm.deployment.HibernateOrmConfig;
|
import io.quarkus.hibernate.orm.deployment.HibernateOrmConfig;
|
||||||
|
import io.quarkus.resteasy.server.common.deployment.ResteasyDeploymentCustomizerBuildItem;
|
||||||
import io.quarkus.smallrye.health.runtime.SmallRyeHealthHandler;
|
import io.quarkus.smallrye.health.runtime.SmallRyeHealthHandler;
|
||||||
import io.quarkus.vertx.http.deployment.RouteBuildItem;
|
import io.quarkus.vertx.http.deployment.RouteBuildItem;
|
||||||
import io.vertx.core.Handler;
|
import io.vertx.core.Handler;
|
||||||
|
@ -57,8 +59,10 @@ import io.vertx.ext.web.RoutingContext;
|
||||||
import org.hibernate.cfg.AvailableSettings;
|
import org.hibernate.cfg.AvailableSettings;
|
||||||
import org.hibernate.jpa.boot.spi.PersistenceUnitDescriptor;
|
import org.hibernate.jpa.boot.spi.PersistenceUnitDescriptor;
|
||||||
import org.jboss.logging.Logger;
|
import org.jboss.logging.Logger;
|
||||||
|
import org.jboss.resteasy.plugins.server.servlet.ResteasyContextParameters;
|
||||||
import org.jboss.resteasy.spi.ResteasyDeployment;
|
import org.jboss.resteasy.spi.ResteasyDeployment;
|
||||||
import org.keycloak.Config;
|
import org.keycloak.Config;
|
||||||
|
import org.keycloak.QuarkusKeycloakApplication;
|
||||||
import org.keycloak.authentication.AuthenticatorSpi;
|
import org.keycloak.authentication.AuthenticatorSpi;
|
||||||
import org.keycloak.authentication.authenticators.browser.DeployedScriptAuthenticatorFactory;
|
import org.keycloak.authentication.authenticators.browser.DeployedScriptAuthenticatorFactory;
|
||||||
import org.keycloak.authorization.policy.provider.PolicySpi;
|
import org.keycloak.authorization.policy.provider.PolicySpi;
|
||||||
|
@ -285,6 +289,20 @@ class KeycloakProcessor {
|
||||||
routes.produce(new RouteBuildItem(KeycloakMetricsHandler.DEFAULT_METRICS_ENDPOINT, metricsHandler));
|
routes.produce(new RouteBuildItem(KeycloakMetricsHandler.DEFAULT_METRICS_ENDPOINT, metricsHandler));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@BuildStep
|
||||||
|
void configureResteasy(BuildProducer<ResteasyDeploymentCustomizerBuildItem> deploymentCustomizerProducer) {
|
||||||
|
deploymentCustomizerProducer.produce(new ResteasyDeploymentCustomizerBuildItem(new Consumer<ResteasyDeployment>() {
|
||||||
|
@Override
|
||||||
|
public void accept(ResteasyDeployment resteasyDeployment) {
|
||||||
|
// we need to explicitly set the application to avoid errors at build time due to the application
|
||||||
|
// from keycloak-services also being added to the index
|
||||||
|
resteasyDeployment.setApplicationClass(QuarkusKeycloakApplication.class.getName());
|
||||||
|
// we need to disable the sanitizer to avoid escaping text/html responses from the server
|
||||||
|
resteasyDeployment.setProperty(ResteasyContextParameters.RESTEASY_DISABLE_HTML_SANITIZER, Boolean.TRUE);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
@BuildStep(onlyIf = IsDevelopment.class)
|
@BuildStep(onlyIf = IsDevelopment.class)
|
||||||
void configureDevMode(BuildProducer<HotDeploymentWatchedFileBuildItem> hotFiles) {
|
void configureDevMode(BuildProducer<HotDeploymentWatchedFileBuildItem> hotFiles) {
|
||||||
hotFiles.produce(new HotDeploymentWatchedFileBuildItem("META-INF/keycloak.properties"));
|
hotFiles.produce(new HotDeploymentWatchedFileBuildItem("META-INF/keycloak.properties"));
|
||||||
|
|
|
@ -61,6 +61,7 @@ class LiquibaseProcessor {
|
||||||
} else {
|
} else {
|
||||||
classes.addAll(index.getAllKnownSubclasses(DotName.createSimple(c.getName())));
|
classes.addAll(index.getAllKnownSubclasses(DotName.createSimple(c.getName())));
|
||||||
}
|
}
|
||||||
|
filterImplementations(c, classes);
|
||||||
for (ClassInfo found : classes) {
|
for (ClassInfo found : classes) {
|
||||||
if (Modifier.isAbstract(found.flags()) ||
|
if (Modifier.isAbstract(found.flags()) ||
|
||||||
Modifier.isInterface(found.flags()) ||
|
Modifier.isInterface(found.flags()) ||
|
||||||
|
@ -83,4 +84,11 @@ class LiquibaseProcessor {
|
||||||
|
|
||||||
recorder.configureLiquibase(services);
|
recorder.configureLiquibase(services);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void filterImplementations(Class<?> types, Set<ClassInfo> classes) {
|
||||||
|
if (Database.class.equals(types)) {
|
||||||
|
// removes unsupported databases
|
||||||
|
classes.removeIf(classInfo -> !org.keycloak.configuration.Database.isSupported(classInfo.name().toString()));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
quarkus.http.root-path=/
|
quarkus.http.root-path=/
|
||||||
quarkus.application.name=Keycloak
|
quarkus.application.name=Keycloak
|
||||||
quarkus.banner.enabled=false
|
quarkus.banner.enabled=false
|
||||||
|
|
||||||
|
quarkus.resteasy.ignore-application-classes=true
|
|
@ -31,14 +31,15 @@
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<quarkus.version>1.12.2.Final</quarkus.version>
|
<quarkus.version>1.13.2.Final</quarkus.version>
|
||||||
<resteasy.version>4.5.9.Final</resteasy.version>
|
<resteasy.version>4.5.9.Final</resteasy.version>
|
||||||
<jackson.version>2.12.1</jackson.version>
|
<jackson.version>2.12.1</jackson.version>
|
||||||
<jackson.databind.version>${jackson.version}</jackson.databind.version>
|
<jackson.databind.version>${jackson.version}</jackson.databind.version>
|
||||||
<hibernate.version>5.4.28.Final</hibernate.version>
|
<hibernate.version>5.4.29.Final</hibernate.version>
|
||||||
<mysql-connector-java.version>8.0.23</mysql-connector-java.version>
|
<mysql-connector-java.version>8.0.23</mysql-connector-java.version>
|
||||||
|
<postgresql.version>42.2.19</postgresql.version>
|
||||||
<picocli.version>4.6.1</picocli.version>
|
<picocli.version>4.6.1</picocli.version>
|
||||||
<snakeyaml.version>1.27</snakeyaml.version>
|
<snakeyaml.version>1.28</snakeyaml.version>
|
||||||
<surefire-plugin.version>3.0.0-M5</surefire-plugin.version>
|
<surefire-plugin.version>3.0.0-M5</surefire-plugin.version>
|
||||||
<wildfly.common.format.version>1.5.4.Final-format-001</wildfly.common.format.version>
|
<wildfly.common.format.version>1.5.4.Final-format-001</wildfly.common.format.version>
|
||||||
<maven.compiler.source>1.8</maven.compiler.source>
|
<maven.compiler.source>1.8</maven.compiler.source>
|
||||||
|
@ -99,6 +100,17 @@
|
||||||
<artifactId>mysql-connector-java</artifactId>
|
<artifactId>mysql-connector-java</artifactId>
|
||||||
<version>${mysql-connector-java.version}</version>
|
<version>${mysql-connector-java.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.postgresql</groupId>
|
||||||
|
<artifactId>postgresql</artifactId>
|
||||||
|
<version>${postgresql.version}</version>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.checkerframework</groupId>
|
||||||
|
<artifactId>checker-qual</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</dependencyManagement>
|
</dependencyManagement>
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
package org.keycloak;
|
package org.keycloak;
|
||||||
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.function.Predicate;
|
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import javax.enterprise.inject.Instance;
|
import javax.enterprise.inject.Instance;
|
||||||
|
@ -10,9 +8,6 @@ import javax.inject.Inject;
|
||||||
import javax.persistence.EntityManagerFactory;
|
import javax.persistence.EntityManagerFactory;
|
||||||
import javax.ws.rs.ApplicationPath;
|
import javax.ws.rs.ApplicationPath;
|
||||||
|
|
||||||
import org.jboss.resteasy.plugins.server.servlet.ResteasyContextParameters;
|
|
||||||
import org.jboss.resteasy.spi.ResteasyDeployment;
|
|
||||||
import org.keycloak.common.util.Resteasy;
|
|
||||||
import org.keycloak.models.utils.PostMigrationEvent;
|
import org.keycloak.models.utils.PostMigrationEvent;
|
||||||
import org.keycloak.provider.quarkus.QuarkusPlatform;
|
import org.keycloak.provider.quarkus.QuarkusPlatform;
|
||||||
import org.keycloak.services.resources.KeycloakApplication;
|
import org.keycloak.services.resources.KeycloakApplication;
|
||||||
|
@ -22,9 +17,13 @@ import org.keycloak.services.resources.WelcomeResource;
|
||||||
@ApplicationPath("/")
|
@ApplicationPath("/")
|
||||||
public class QuarkusKeycloakApplication extends KeycloakApplication {
|
public class QuarkusKeycloakApplication extends KeycloakApplication {
|
||||||
|
|
||||||
|
private static boolean filterSingletons(Object o) {
|
||||||
|
return !WelcomeResource.class.isInstance(o);
|
||||||
|
}
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
Instance<EntityManagerFactory> entityManagerFactory;
|
Instance<EntityManagerFactory> entityManagerFactory;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void startup() {
|
protected void startup() {
|
||||||
try {
|
try {
|
||||||
|
@ -38,16 +37,9 @@ public class QuarkusKeycloakApplication extends KeycloakApplication {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Set<Object> getSingletons() {
|
public Set<Object> getSingletons() {
|
||||||
//TODO: a temporary hack for https://github.com/quarkusio/quarkus/issues/9647, we need to disable the sanitizer to avoid
|
Set<Object> singletons = super.getSingletons().stream()
|
||||||
// escaping text/html responses from the server
|
.filter(QuarkusKeycloakApplication::filterSingletons)
|
||||||
Resteasy.getContextData(ResteasyDeployment.class).setProperty(ResteasyContextParameters.RESTEASY_DISABLE_HTML_SANITIZER, Boolean.TRUE);
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
HashSet<Object> singletons = new HashSet<>(super.getSingletons().stream().filter(new Predicate<Object>() {
|
|
||||||
@Override
|
|
||||||
public boolean test(Object o) {
|
|
||||||
return !WelcomeResource.class.isInstance(o);
|
|
||||||
}
|
|
||||||
}).collect(Collectors.toSet()));
|
|
||||||
|
|
||||||
singletons.add(new QuarkusWelcomeResource());
|
singletons.add(new QuarkusWelcomeResource());
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,13 @@ import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
class Database {
|
import liquibase.database.core.H2Database;
|
||||||
|
import liquibase.database.core.PostgresDatabase;
|
||||||
|
import org.keycloak.connections.jpa.updater.liquibase.PostgresPlusDatabase;
|
||||||
|
import org.keycloak.connections.jpa.updater.liquibase.UpdatedMariaDBDatabase;
|
||||||
|
import org.keycloak.connections.jpa.updater.liquibase.UpdatedMySqlDatabase;
|
||||||
|
|
||||||
|
public class Database {
|
||||||
|
|
||||||
private static Map<String, Vendor> DATABASES = new HashMap<>();
|
private static Map<String, Vendor> DATABASES = new HashMap<>();
|
||||||
|
|
||||||
|
@ -37,7 +43,7 @@ class Database {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static boolean isSupported(String alias) {
|
public static boolean isSupported(String alias) {
|
||||||
return DATABASES.containsKey(alias);
|
return DATABASES.containsKey(alias);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,11 +87,13 @@ class Database {
|
||||||
}
|
}
|
||||||
return "jdbc:h2:mem:keycloakdb${kc.db.url.properties:}";
|
return "jdbc:h2:mem:keycloakdb${kc.db.url.properties:}";
|
||||||
}
|
}
|
||||||
}, "h2-mem", "h2-file"),
|
}, "h2-mem", "h2-file", H2Database.class.getName()),
|
||||||
MYSQL("com.mysql.cj.jdbc.MysqlXADataSource", "org.hibernate.dialect.MySQL8Dialect",
|
MYSQL("com.mysql.cj.jdbc.MysqlXADataSource", "org.hibernate.dialect.MySQL8Dialect",
|
||||||
"jdbc:mysql://${kc.db.url.host:localhost}/${kc.db.url.database:keycloak}${kc.db.url.properties:}"),
|
"jdbc:mysql://${kc.db.url.host:localhost}/${kc.db.url.database:keycloak}${kc.db.url.properties:}",
|
||||||
|
UpdatedMySqlDatabase.class.getName()),
|
||||||
MARIADB("org.mariadb.jdbc.MySQLDataSource", "org.hibernate.dialect.MariaDBDialect",
|
MARIADB("org.mariadb.jdbc.MySQLDataSource", "org.hibernate.dialect.MariaDBDialect",
|
||||||
"jdbc:mariadb://${kc.db.url.host:localhost}/${kc.db.url.database:keycloak}${kc.db.url.properties:}"),
|
"jdbc:mariadb://${kc.db.url.host:localhost}/${kc.db.url.database:keycloak}${kc.db.url.properties:}",
|
||||||
|
UpdatedMariaDBDatabase.class.getName()),
|
||||||
POSTGRES("org.postgresql.xa.PGXADataSource", new Function<String, String>() {
|
POSTGRES("org.postgresql.xa.PGXADataSource", new Function<String, String>() {
|
||||||
@Override
|
@Override
|
||||||
public String apply(String alias) {
|
public String apply(String alias) {
|
||||||
|
@ -95,7 +103,7 @@ class Database {
|
||||||
return "io.quarkus.hibernate.orm.runtime.dialect.QuarkusPostgreSQL10Dialect";
|
return "io.quarkus.hibernate.orm.runtime.dialect.QuarkusPostgreSQL10Dialect";
|
||||||
}
|
}
|
||||||
}, "jdbc:postgresql://${kc.db.url.host:localhost}/${kc.db.url.database:keycloak}${kc.db.url.properties:}",
|
}, "jdbc:postgresql://${kc.db.url.host:localhost}/${kc.db.url.database:keycloak}${kc.db.url.properties:}",
|
||||||
"postgres-95", "postgres-10");
|
"postgres-95", "postgres-10", PostgresDatabase.class.getName(), PostgresPlusDatabase.class.getName());
|
||||||
|
|
||||||
final String driver;
|
final String driver;
|
||||||
final Function<String, String> dialect;
|
final Function<String, String> dialect;
|
||||||
|
|
|
@ -140,7 +140,7 @@ public final class PropertyMappers {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void configureClustering() {
|
private static void configureClustering() {
|
||||||
createWithDefault("cluster", "kc.spi.connections-infinispan.default.config-file", "default", (value, context) -> "cluster-" + value + ".xml", "Specifies clustering configuration. The specified value points to the infinispan configuration file prefixed with the 'cluster-` "
|
createWithDefault("cluster", "kc.spi.connections-infinispan.quarkus.config-file", "default", (value, context) -> "cluster-" + value + ".xml", "Specifies clustering configuration. The specified value points to the infinispan configuration file prefixed with the 'cluster-` "
|
||||||
+ "inside the distribution configuration directory. Supported values out of the box are 'local' and 'cluster'. Value 'local' points to the file cluster-local.xml and " +
|
+ "inside the distribution configuration directory. Supported values out of the box are 'local' and 'cluster'. Value 'local' points to the file cluster-local.xml and " +
|
||||||
"effectively disables clustering and use infinispan caches in the local mode. Value 'default' points to the file cluster-default.xml, which has clustering enabled for infinispan caches.");
|
"effectively disables clustering and use infinispan caches in the local mode. Value 'default' points to the file cluster-default.xml, which has clustering enabled for infinispan caches.");
|
||||||
create("cluster-stack", "kc.spi.connections-infinispan.default.stack", "Specified the default stack to use for cluster communication and node discovery. Possible values are: tcp, udp, kubernetes, ec2, azure, google.");
|
create("cluster-stack", "kc.spi.connections-infinispan.default.stack", "Specified the default stack to use for cluster communication and node discovery. Possible values are: tcp, udp, kubernetes, ec2, azure, google.");
|
||||||
|
|
|
@ -28,15 +28,20 @@ import org.eclipse.microprofile.config.spi.ConfigSource;
|
||||||
*/
|
*/
|
||||||
public class SysPropConfigSource implements ConfigSource {
|
public class SysPropConfigSource implements ConfigSource {
|
||||||
|
|
||||||
public Map<String, String> getProperties() {
|
private final Map<String, String> properties = new TreeMap<>();
|
||||||
Map<String, String> output = new TreeMap<>();
|
|
||||||
|
public SysPropConfigSource() {
|
||||||
for (Map.Entry<Object, Object> entry : System.getProperties().entrySet()) {
|
for (Map.Entry<Object, Object> entry : System.getProperties().entrySet()) {
|
||||||
String key = (String) entry.getKey();
|
String key = (String) entry.getKey();
|
||||||
if (key.startsWith(MicroProfileConfigProvider.NS_KEYCLOAK_PREFIX)) {
|
if (key.startsWith(MicroProfileConfigProvider.NS_KEYCLOAK_PREFIX)) {
|
||||||
output.put(key, entry.getValue().toString());
|
properties.put(key, entry.getValue().toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return output;
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<String, String> getProperties() {
|
||||||
|
return properties;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getValue(final String propertyName) {
|
public String getValue(final String propertyName) {
|
||||||
|
|
|
@ -0,0 +1,47 @@
|
||||||
|
/*
|
||||||
|
*
|
||||||
|
* * Copyright 2021 Red Hat, Inc. and/or its affiliates
|
||||||
|
* * and other contributors as indicated by the @author tags.
|
||||||
|
* *
|
||||||
|
* * Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* * you may not use this file except in compliance with the License.
|
||||||
|
* * You may obtain a copy of the License at
|
||||||
|
* *
|
||||||
|
* * http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* *
|
||||||
|
* * Unless required by applicable law or agreed to in writing, software
|
||||||
|
* * distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* * See the License for the specific language governing permissions and
|
||||||
|
* * limitations under the License.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.keycloak.connections.cache;
|
||||||
|
|
||||||
|
import org.infinispan.manager.EmbeddedCacheManager;
|
||||||
|
import org.keycloak.connections.infinispan.DefaultInfinispanConnectionProviderFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
|
||||||
|
*/
|
||||||
|
public class QuarkusInfinispanConnectionFactory extends DefaultInfinispanConnectionProviderFactory {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void initContainerManaged(EmbeddedCacheManager cacheManager) {
|
||||||
|
super.initContainerManaged(cacheManager);
|
||||||
|
// force closing the cache manager when stopping the provider
|
||||||
|
// we probably want to refactor the default impl a bit to support this use case
|
||||||
|
containerManaged = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int order() {
|
||||||
|
return 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getId() {
|
||||||
|
return "quarkus";
|
||||||
|
}
|
||||||
|
}
|
|
@ -18,6 +18,7 @@
|
||||||
package org.keycloak.connections.jpa;
|
package org.keycloak.connections.jpa;
|
||||||
|
|
||||||
import static org.keycloak.connections.liquibase.QuarkusJpaUpdaterProvider.VERIFY_AND_RUN_MASTER_CHANGELOG;
|
import static org.keycloak.connections.liquibase.QuarkusJpaUpdaterProvider.VERIFY_AND_RUN_MASTER_CHANGELOG;
|
||||||
|
import static org.keycloak.models.utils.KeycloakModelUtils.runJobInTransaction;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
|
@ -43,7 +44,6 @@ import javax.transaction.Transaction;
|
||||||
import com.fasterxml.jackson.core.type.TypeReference;
|
import com.fasterxml.jackson.core.type.TypeReference;
|
||||||
import io.quarkus.runtime.Quarkus;
|
import io.quarkus.runtime.Quarkus;
|
||||||
import org.hibernate.internal.SessionFactoryImpl;
|
import org.hibernate.internal.SessionFactoryImpl;
|
||||||
import org.hibernate.internal.SessionImpl;
|
|
||||||
import org.jboss.logging.Logger;
|
import org.jboss.logging.Logger;
|
||||||
import org.keycloak.Config;
|
import org.keycloak.Config;
|
||||||
import org.keycloak.ServerStartupError;
|
import org.keycloak.ServerStartupError;
|
||||||
|
@ -54,14 +54,12 @@ import org.keycloak.migration.MigrationModelManager;
|
||||||
import org.keycloak.migration.ModelVersion;
|
import org.keycloak.migration.ModelVersion;
|
||||||
import org.keycloak.models.KeycloakSession;
|
import org.keycloak.models.KeycloakSession;
|
||||||
import org.keycloak.models.KeycloakSessionFactory;
|
import org.keycloak.models.KeycloakSessionFactory;
|
||||||
import org.keycloak.models.KeycloakSessionTask;
|
|
||||||
import org.keycloak.models.ModelDuplicateException;
|
import org.keycloak.models.ModelDuplicateException;
|
||||||
import org.keycloak.models.RealmModel;
|
import org.keycloak.models.RealmModel;
|
||||||
import org.keycloak.models.UserModel;
|
import org.keycloak.models.UserModel;
|
||||||
import org.keycloak.models.UserProvider;
|
import org.keycloak.models.UserProvider;
|
||||||
import org.keycloak.models.dblock.DBLockManager;
|
import org.keycloak.models.dblock.DBLockManager;
|
||||||
import org.keycloak.models.dblock.DBLockProvider;
|
import org.keycloak.models.dblock.DBLockProvider;
|
||||||
import org.keycloak.models.utils.KeycloakModelUtils;
|
|
||||||
import org.keycloak.models.utils.RepresentationToModel;
|
import org.keycloak.models.utils.RepresentationToModel;
|
||||||
import org.keycloak.provider.ServerInfoAwareProviderFactory;
|
import org.keycloak.provider.ServerInfoAwareProviderFactory;
|
||||||
import org.keycloak.representations.idm.RealmRepresentation;
|
import org.keycloak.representations.idm.RealmRepresentation;
|
||||||
|
@ -75,10 +73,9 @@ import org.keycloak.util.JsonSerialization;
|
||||||
/**
|
/**
|
||||||
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
|
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
|
||||||
*/
|
*/
|
||||||
public class QuarkusJpaConnectionProviderFactory implements JpaConnectionProviderFactory, ServerInfoAwareProviderFactory {
|
public final class QuarkusJpaConnectionProviderFactory implements JpaConnectionProviderFactory, ServerInfoAwareProviderFactory {
|
||||||
|
|
||||||
private static final Logger logger = Logger.getLogger(QuarkusJpaConnectionProviderFactory.class);
|
private static final Logger logger = Logger.getLogger(QuarkusJpaConnectionProviderFactory.class);
|
||||||
|
|
||||||
private static final String SQL_GET_LATEST_VERSION = "SELECT VERSION FROM %sMIGRATION_MODEL";
|
private static final String SQL_GET_LATEST_VERSION = "SELECT VERSION FROM %sMIGRATION_MODEL";
|
||||||
|
|
||||||
enum MigrationStrategy {
|
enum MigrationStrategy {
|
||||||
|
@ -86,35 +83,14 @@ public class QuarkusJpaConnectionProviderFactory implements JpaConnectionProvide
|
||||||
}
|
}
|
||||||
|
|
||||||
private EntityManagerFactory emf;
|
private EntityManagerFactory emf;
|
||||||
|
|
||||||
private Config.Scope config;
|
private Config.Scope config;
|
||||||
|
|
||||||
private Map<String, String> operationalInfo;
|
private Map<String, String> operationalInfo;
|
||||||
|
|
||||||
private boolean jtaEnabled;
|
|
||||||
private JtaTransactionManagerLookup jtaLookup;
|
|
||||||
|
|
||||||
private KeycloakSessionFactory factory;
|
private KeycloakSessionFactory factory;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public JpaConnectionProvider create(KeycloakSession session) {
|
public JpaConnectionProvider create(KeycloakSession session) {
|
||||||
logger.trace("Create QuarkusJpaConnectionProvider");
|
logger.trace("Create QuarkusJpaConnectionProvider");
|
||||||
EntityManager em;
|
return new DefaultJpaConnectionProvider(createEntityManager(session));
|
||||||
if (!jtaEnabled) {
|
|
||||||
logger.trace("enlisting EntityManager in JpaKeycloakTransaction");
|
|
||||||
em = emf.createEntityManager();
|
|
||||||
try {
|
|
||||||
SessionImpl.class.cast(em).connection().setAutoCommit(false);
|
|
||||||
} catch (SQLException cause) {
|
|
||||||
throw new RuntimeException(cause);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
|
|
||||||
em = emf.createEntityManager(SynchronizationType.SYNCHRONIZED);
|
|
||||||
}
|
|
||||||
em = PersistenceExceptionConverter.create(session, em);
|
|
||||||
if (!jtaEnabled) session.getTransactionManager().enlist(new JpaKeycloakTransaction(em));
|
|
||||||
return new DefaultJpaConnectionProvider(em);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -137,150 +113,9 @@ public class QuarkusJpaConnectionProviderFactory implements JpaConnectionProvide
|
||||||
@Override
|
@Override
|
||||||
public void postInit(KeycloakSessionFactory factory) {
|
public void postInit(KeycloakSessionFactory factory) {
|
||||||
this.factory = factory;
|
this.factory = factory;
|
||||||
checkJtaEnabled(factory);
|
|
||||||
lazyInit();
|
lazyInit();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void checkJtaEnabled(KeycloakSessionFactory factory) {
|
|
||||||
jtaLookup = (JtaTransactionManagerLookup) factory.getProviderFactory(JtaTransactionManagerLookup.class);
|
|
||||||
if (jtaLookup != null) {
|
|
||||||
if (jtaLookup.getTransactionManager() != null) {
|
|
||||||
jtaEnabled = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getSchema(String schema) {
|
|
||||||
return schema == null ? "" : schema + ".";
|
|
||||||
}
|
|
||||||
|
|
||||||
private File getDatabaseUpdateFile() {
|
|
||||||
String databaseUpdateFile = config.get("migrationExport", "keycloak-database-update.sql");
|
|
||||||
return new File(databaseUpdateFile);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void prepareOperationalInfo(Connection connection) {
|
|
||||||
try {
|
|
||||||
operationalInfo = new LinkedHashMap<>();
|
|
||||||
DatabaseMetaData md = connection.getMetaData();
|
|
||||||
operationalInfo.put("databaseUrl", md.getURL());
|
|
||||||
operationalInfo.put("databaseUser", md.getUserName());
|
|
||||||
operationalInfo.put("databaseProduct", md.getDatabaseProductName() + " " + md.getDatabaseProductVersion());
|
|
||||||
operationalInfo.put("databaseDriver", md.getDriverName() + " " + md.getDriverVersion());
|
|
||||||
|
|
||||||
logger.infof("Database info: %s", operationalInfo.toString());
|
|
||||||
} catch (SQLException e) {
|
|
||||||
logger.warn("Unable to prepare operational info due database exception: " + e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void migration(String schema, Connection connection, KeycloakSession session) {
|
|
||||||
MigrationStrategy strategy = getMigrationStrategy();
|
|
||||||
boolean initializeEmpty = config.getBoolean("initializeEmpty", true);
|
|
||||||
File databaseUpdateFile = getDatabaseUpdateFile();
|
|
||||||
|
|
||||||
String version = null;
|
|
||||||
|
|
||||||
try {
|
|
||||||
try (Statement statement = connection.createStatement()) {
|
|
||||||
try (ResultSet rs = statement.executeQuery(String.format(SQL_GET_LATEST_VERSION, getSchema(schema)))) {
|
|
||||||
if (rs.next()) {
|
|
||||||
version = rs.getString(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (SQLException ignore) {
|
|
||||||
// migration model probably does not exist so we assume the database is empty
|
|
||||||
}
|
|
||||||
|
|
||||||
JpaUpdaterProvider updater = session.getProvider(JpaUpdaterProvider.class);
|
|
||||||
|
|
||||||
boolean requiresMigration = version == null || !version.equals(new ModelVersion(Version.VERSION_KEYCLOAK).toString());
|
|
||||||
session.setAttribute(VERIFY_AND_RUN_MASTER_CHANGELOG, requiresMigration);
|
|
||||||
|
|
||||||
JpaUpdaterProvider.Status status = updater.validate(connection, schema);
|
|
||||||
|
|
||||||
if (status == JpaUpdaterProvider.Status.VALID) {
|
|
||||||
logger.debug("Database is up-to-date");
|
|
||||||
} else if (status == JpaUpdaterProvider.Status.EMPTY) {
|
|
||||||
if (initializeEmpty) {
|
|
||||||
update(connection, schema, session, updater);
|
|
||||||
} else {
|
|
||||||
switch (strategy) {
|
|
||||||
case UPDATE:
|
|
||||||
update(connection, schema, session, updater);
|
|
||||||
break;
|
|
||||||
case MANUAL:
|
|
||||||
export(connection, schema, databaseUpdateFile, session, updater);
|
|
||||||
throw new ServerStartupError("Database not initialized, please initialize database with " + databaseUpdateFile.getAbsolutePath(), false);
|
|
||||||
case VALIDATE:
|
|
||||||
throw new ServerStartupError("Database not initialized, please enable database initialization", false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
switch (strategy) {
|
|
||||||
case UPDATE:
|
|
||||||
update(connection, schema, session, updater);
|
|
||||||
break;
|
|
||||||
case MANUAL:
|
|
||||||
export(connection, schema, databaseUpdateFile, session, updater);
|
|
||||||
throw new ServerStartupError("Database not up-to-date, please migrate database with " + databaseUpdateFile.getAbsolutePath(), false);
|
|
||||||
case VALIDATE:
|
|
||||||
throw new ServerStartupError("Database not up-to-date, please enable database migration", false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ExportImportManager exportImportManager = new ExportImportManager(session);
|
|
||||||
|
|
||||||
if (requiresMigration) {
|
|
||||||
KeycloakModelUtils.runJobInTransaction(factory, new KeycloakSessionTask() {
|
|
||||||
@Override
|
|
||||||
public void run(KeycloakSession session) {
|
|
||||||
logger.debug("Calling migrateModel");
|
|
||||||
migrateModel(session);
|
|
||||||
|
|
||||||
DBLockManager dbLockManager = new DBLockManager(session);
|
|
||||||
dbLockManager.checkForcedUnlock();
|
|
||||||
DBLockProvider dbLock = dbLockManager.getDBLock();
|
|
||||||
dbLock.waitForLock(DBLockProvider.Namespace.KEYCLOAK_BOOT);
|
|
||||||
try {
|
|
||||||
createMasterRealm(exportImportManager);
|
|
||||||
} finally {
|
|
||||||
dbLock.releaseLock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (exportImportManager.isRunExport()) {
|
|
||||||
exportImportManager.runExport();
|
|
||||||
Quarkus.asyncExit();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void update(Connection connection, String schema, KeycloakSession session, JpaUpdaterProvider updater) {
|
|
||||||
DBLockManager dbLockManager = new DBLockManager(session);
|
|
||||||
DBLockProvider dbLock2 = dbLockManager.getDBLock();
|
|
||||||
dbLock2.waitForLock(DBLockProvider.Namespace.DATABASE);
|
|
||||||
try {
|
|
||||||
updater.update(connection, schema);
|
|
||||||
} finally {
|
|
||||||
dbLock2.releaseLock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void export(Connection connection, String schema, File databaseUpdateFile, KeycloakSession session,
|
|
||||||
JpaUpdaterProvider updater) {
|
|
||||||
DBLockManager dbLockManager = new DBLockManager(session);
|
|
||||||
DBLockProvider dbLock2 = dbLockManager.getDBLock();
|
|
||||||
dbLock2.waitForLock(DBLockProvider.Namespace.DATABASE);
|
|
||||||
try {
|
|
||||||
updater.export(connection, schema, databaseUpdateFile);
|
|
||||||
} finally {
|
|
||||||
dbLock2.releaseLock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Connection getConnection() {
|
public Connection getConnection() {
|
||||||
SessionFactoryImpl entityManagerFactory = SessionFactoryImpl.class.cast(emf);
|
SessionFactoryImpl entityManagerFactory = SessionFactoryImpl.class.cast(emf);
|
||||||
|
@ -302,6 +137,11 @@ public class QuarkusJpaConnectionProviderFactory implements JpaConnectionProvide
|
||||||
return operationalInfo;
|
return operationalInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int order() {
|
||||||
|
return 100;
|
||||||
|
}
|
||||||
|
|
||||||
private MigrationStrategy getMigrationStrategy() {
|
private MigrationStrategy getMigrationStrategy() {
|
||||||
String migrationStrategy = config.get("migrationStrategy");
|
String migrationStrategy = config.get("migrationStrategy");
|
||||||
if (migrationStrategy == null) {
|
if (migrationStrategy == null) {
|
||||||
|
@ -325,33 +165,44 @@ public class QuarkusJpaConnectionProviderFactory implements JpaConnectionProvide
|
||||||
|
|
||||||
emf = instance.get();
|
emf = instance.get();
|
||||||
|
|
||||||
|
KeycloakSession session = factory.create();
|
||||||
|
boolean initSchema;
|
||||||
|
|
||||||
try (Connection connection = getConnection()) {
|
try (Connection connection = getConnection()) {
|
||||||
if (jtaEnabled) {
|
logDatabaseConnectionInfo(connection);
|
||||||
KeycloakModelUtils.suspendJtaTransaction(factory, () -> {
|
initSchema = createOrUpdateSchema(getSchema(), connection, session);
|
||||||
KeycloakSession session = factory.create();
|
|
||||||
try {
|
|
||||||
migration(getSchema(), connection, session);
|
|
||||||
} finally {
|
|
||||||
session.close();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
KeycloakModelUtils.runJobInTransaction(factory, session -> {
|
|
||||||
migration(getSchema(), connection, session);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
prepareOperationalInfo(connection);
|
|
||||||
} catch (SQLException cause) {
|
} catch (SQLException cause) {
|
||||||
throw new RuntimeException("Failed to migrate model", cause);
|
throw new RuntimeException("Failed to update database.", cause);
|
||||||
|
} finally {
|
||||||
|
session.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (initSchema) {
|
||||||
|
runJobInTransaction(factory, this::initSchemaOrExport);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
private void initSchemaOrExport(KeycloakSession session) {
|
||||||
public int order() {
|
ExportImportManager exportImportManager = new ExportImportManager(session);
|
||||||
return 100;
|
logger.debug("Calling migrateModel");
|
||||||
|
migrateModel(session);
|
||||||
|
|
||||||
|
DBLockManager dbLockManager = new DBLockManager(session);
|
||||||
|
dbLockManager.checkForcedUnlock();
|
||||||
|
DBLockProvider dbLock = dbLockManager.getDBLock();
|
||||||
|
dbLock.waitForLock(DBLockProvider.Namespace.KEYCLOAK_BOOT);
|
||||||
|
try {
|
||||||
|
createMasterRealm(exportImportManager);
|
||||||
|
} finally {
|
||||||
|
dbLock.releaseLock();
|
||||||
|
}
|
||||||
|
if (exportImportManager.isRunExport()) {
|
||||||
|
exportImportManager.runExport();
|
||||||
|
Quarkus.asyncExit();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected ExportImportManager createMasterRealm(ExportImportManager exportImportManager) {
|
private ExportImportManager createMasterRealm(ExportImportManager exportImportManager) {
|
||||||
logger.debug("bootstrap");
|
logger.debug("bootstrap");
|
||||||
KeycloakSession session = factory.create();
|
KeycloakSession session = factory.create();
|
||||||
|
|
||||||
|
@ -406,7 +257,7 @@ public class QuarkusJpaConnectionProviderFactory implements JpaConnectionProvide
|
||||||
return exportImportManager;
|
return exportImportManager;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void migrateModel(KeycloakSession session) {
|
private void migrateModel(KeycloakSession session) {
|
||||||
try {
|
try {
|
||||||
MigrationModelManager.migrate(session);
|
MigrationModelManager.migrate(session);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -414,7 +265,7 @@ public class QuarkusJpaConnectionProviderFactory implements JpaConnectionProvide
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void importRealms() {
|
private void importRealms() {
|
||||||
String files = System.getProperty("keycloak.import");
|
String files = System.getProperty("keycloak.import");
|
||||||
if (files != null) {
|
if (files != null) {
|
||||||
StringTokenizer tokenizer = new StringTokenizer(files, ",");
|
StringTokenizer tokenizer = new StringTokenizer(files, ",");
|
||||||
|
@ -431,7 +282,7 @@ public class QuarkusJpaConnectionProviderFactory implements JpaConnectionProvide
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void importRealm(RealmRepresentation rep, String from) {
|
private void importRealm(RealmRepresentation rep, String from) {
|
||||||
KeycloakSession session = factory.create();
|
KeycloakSession session = factory.create();
|
||||||
boolean exists = false;
|
boolean exists = false;
|
||||||
try {
|
try {
|
||||||
|
@ -465,7 +316,7 @@ public class QuarkusJpaConnectionProviderFactory implements JpaConnectionProvide
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void importAddUser() {
|
private void importAddUser() {
|
||||||
String configDir = System.getProperty("jboss.server.config.dir");
|
String configDir = System.getProperty("jboss.server.config.dir");
|
||||||
if (configDir != null) {
|
if (configDir != null) {
|
||||||
File addUserFile = new File(configDir + File.separator + "keycloak-add-user.json");
|
File addUserFile = new File(configDir + File.separator + "keycloak-add-user.json");
|
||||||
|
@ -525,4 +376,118 @@ public class QuarkusJpaConnectionProviderFactory implements JpaConnectionProvide
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private String getSchema(String schema) {
|
||||||
|
return schema == null ? "" : schema + ".";
|
||||||
|
}
|
||||||
|
|
||||||
|
private File getDatabaseUpdateFile() {
|
||||||
|
String databaseUpdateFile = config.get("migrationExport", "keycloak-database-update.sql");
|
||||||
|
return new File(databaseUpdateFile);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void logDatabaseConnectionInfo(Connection connection) {
|
||||||
|
try {
|
||||||
|
operationalInfo = new LinkedHashMap<>();
|
||||||
|
DatabaseMetaData md = connection.getMetaData();
|
||||||
|
operationalInfo.put("databaseUrl", md.getURL());
|
||||||
|
operationalInfo.put("databaseUser", md.getUserName());
|
||||||
|
operationalInfo.put("databaseProduct", md.getDatabaseProductName() + " " + md.getDatabaseProductVersion());
|
||||||
|
operationalInfo.put("databaseDriver", md.getDriverName() + " " + md.getDriverVersion());
|
||||||
|
logger.debugf("Database info: %s", operationalInfo.toString());
|
||||||
|
} catch (SQLException e) {
|
||||||
|
logger.warn("Unable to prepare operational info due database exception: " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean createOrUpdateSchema(String schema, Connection connection, KeycloakSession session) {
|
||||||
|
MigrationStrategy strategy = getMigrationStrategy();
|
||||||
|
boolean initializeEmpty = config.getBoolean("initializeEmpty", true);
|
||||||
|
File databaseUpdateFile = getDatabaseUpdateFile();
|
||||||
|
|
||||||
|
String version = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
try (Statement statement = connection.createStatement()) {
|
||||||
|
try (ResultSet rs = statement.executeQuery(String.format(SQL_GET_LATEST_VERSION, getSchema(schema)))) {
|
||||||
|
if (rs.next()) {
|
||||||
|
version = rs.getString(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (SQLException ignore) {
|
||||||
|
// migration model probably does not exist so we assume the database is empty
|
||||||
|
}
|
||||||
|
|
||||||
|
JpaUpdaterProvider updater = session.getProvider(JpaUpdaterProvider.class);
|
||||||
|
|
||||||
|
boolean requiresMigration = version == null || !version.equals(new ModelVersion(Version.VERSION_KEYCLOAK).toString());
|
||||||
|
session.setAttribute(VERIFY_AND_RUN_MASTER_CHANGELOG, requiresMigration);
|
||||||
|
|
||||||
|
JpaUpdaterProvider.Status status = updater.validate(connection, schema);
|
||||||
|
|
||||||
|
if (status == JpaUpdaterProvider.Status.VALID) {
|
||||||
|
logger.debug("Database is up-to-date");
|
||||||
|
} else if (status == JpaUpdaterProvider.Status.EMPTY) {
|
||||||
|
if (initializeEmpty) {
|
||||||
|
update(connection, schema, session, updater);
|
||||||
|
} else {
|
||||||
|
switch (strategy) {
|
||||||
|
case UPDATE:
|
||||||
|
update(connection, schema, session, updater);
|
||||||
|
break;
|
||||||
|
case MANUAL:
|
||||||
|
export(connection, schema, databaseUpdateFile, session, updater);
|
||||||
|
throw new ServerStartupError("Database not initialized, please initialize database with " + databaseUpdateFile.getAbsolutePath(), false);
|
||||||
|
case VALIDATE:
|
||||||
|
throw new ServerStartupError("Database not initialized, please enable database initialization", false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
switch (strategy) {
|
||||||
|
case UPDATE:
|
||||||
|
update(connection, schema, session, updater);
|
||||||
|
break;
|
||||||
|
case MANUAL:
|
||||||
|
export(connection, schema, databaseUpdateFile, session, updater);
|
||||||
|
throw new ServerStartupError("Database not up-to-date, please migrate database with " + databaseUpdateFile.getAbsolutePath(), false);
|
||||||
|
case VALIDATE:
|
||||||
|
throw new ServerStartupError("Database not up-to-date, please enable database migration", false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return requiresMigration;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void update(Connection connection, String schema, KeycloakSession session, JpaUpdaterProvider updater) {
|
||||||
|
DBLockManager dbLockManager = new DBLockManager(session);
|
||||||
|
DBLockProvider dbLock2 = dbLockManager.getDBLock();
|
||||||
|
dbLock2.waitForLock(DBLockProvider.Namespace.DATABASE);
|
||||||
|
try {
|
||||||
|
updater.update(connection, schema);
|
||||||
|
} finally {
|
||||||
|
dbLock2.releaseLock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void export(Connection connection, String schema, File databaseUpdateFile, KeycloakSession session,
|
||||||
|
JpaUpdaterProvider updater) {
|
||||||
|
DBLockManager dbLockManager = new DBLockManager(session);
|
||||||
|
DBLockProvider dbLock2 = dbLockManager.getDBLock();
|
||||||
|
dbLock2.waitForLock(DBLockProvider.Namespace.DATABASE);
|
||||||
|
try {
|
||||||
|
updater.export(connection, schema, databaseUpdateFile);
|
||||||
|
} finally {
|
||||||
|
dbLock2.releaseLock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private EntityManager createEntityManager(KeycloakSession session) {
|
||||||
|
// we need to auto join the transaction, hence the synchronized type
|
||||||
|
// ideally, we should leverage how hibernate-orm creates the entity manager
|
||||||
|
// but that breaks us, mainly due to flush which is always set to always
|
||||||
|
// as per hibernate guys, we should consider how JTASessionOpener creates entity managers
|
||||||
|
// but that brings lot of details that we need to investigate further
|
||||||
|
return PersistenceExceptionConverter.create(session, emf.createEntityManager(SynchronizationType.SYNCHRONIZED));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
#
|
||||||
|
# /*
|
||||||
|
# * Copyright 2021 Red Hat, Inc. and/or its affiliates
|
||||||
|
# * and other contributors as indicated by the @author tags.
|
||||||
|
# *
|
||||||
|
# * Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# * you may not use this file except in compliance with the License.
|
||||||
|
# * You may obtain a copy of the License at
|
||||||
|
# *
|
||||||
|
# * http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
# *
|
||||||
|
# * Unless required by applicable law or agreed to in writing, software
|
||||||
|
# * distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# * See the License for the specific language governing permissions and
|
||||||
|
# * limitations under the License.
|
||||||
|
# */
|
||||||
|
#
|
||||||
|
|
||||||
|
org.keycloak.connections.cache.QuarkusInfinispanConnectionFactory
|
|
@ -232,15 +232,15 @@ public class ConfigurationTest {
|
||||||
@Test
|
@Test
|
||||||
public void testClusterConfig() {
|
public void testClusterConfig() {
|
||||||
// Cluster enabled by default, but disabled for the "dev" profile
|
// Cluster enabled by default, but disabled for the "dev" profile
|
||||||
Assert.assertEquals("cluster-default.xml", initConfig("connectionsInfinispan", "default").get("configFile"));
|
Assert.assertEquals("cluster-default.xml", initConfig("connectionsInfinispan", "quarkus").get("configFile"));
|
||||||
|
|
||||||
// If explicitly set, then it is always used regardless of the profile
|
// If explicitly set, then it is always used regardless of the profile
|
||||||
System.clearProperty("kc.profile");
|
System.clearProperty("kc.profile");
|
||||||
System.setProperty("kc.config.args", "--cluster=foo");
|
System.setProperty("kc.config.args", "--cluster=foo");
|
||||||
|
|
||||||
Assert.assertEquals("cluster-foo.xml", initConfig("connectionsInfinispan", "default").get("configFile"));
|
Assert.assertEquals("cluster-foo.xml", initConfig("connectionsInfinispan", "quarkus").get("configFile"));
|
||||||
System.setProperty("kc.profile", "dev");
|
System.setProperty("kc.profile", "dev");
|
||||||
Assert.assertEquals("cluster-foo.xml", initConfig("connectionsInfinispan", "default").get("configFile"));
|
Assert.assertEquals("cluster-foo.xml", initConfig("connectionsInfinispan", "quarkus").get("configFile"));
|
||||||
}
|
}
|
||||||
|
|
||||||
private Config.Scope initConfig(String... scope) {
|
private Config.Scope initConfig(String... scope) {
|
||||||
|
|
|
@ -15,3 +15,8 @@ quarkus.health.extensions.enabled=false
|
||||||
# Default transaction timeout
|
# Default transaction timeout
|
||||||
quarkus.transaction-manager.default-transaction-timeout=300
|
quarkus.transaction-manager.default-transaction-timeout=300
|
||||||
|
|
||||||
|
# The JAX-RS application is programmatically registered at build time.
|
||||||
|
# When indexing classes, both KeycloakApplication and QuarkusKeycloakApplication are indexed and multuple
|
||||||
|
# application classes are no longer supported by resteasy extension
|
||||||
|
quarkus.resteasy.ignore-application-classes=true
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue