Upgrade to Quarkus 3.14.2 (#32519)
Closes #32517 Signed-off-by: Václav Muzikář <vmuzikar@redhat.com>
This commit is contained in:
parent
aec3eb91a8
commit
83c00731c3
21 changed files with 129 additions and 28 deletions
|
@ -265,6 +265,11 @@ As part of this change the following related configuration options for the SPI h
|
|||
|
||||
If you were still making use these options or the `redirect_uri` parameter for logout you should implement the link:https://openid.net/specs/openid-connect-rpinitiated-1_0.html[OpenID Connect RP-Initiated Logout specification] instead.
|
||||
|
||||
= Additional validations on the `--optimized` startup option
|
||||
The `--optimized` startup option now requires the optimized server image to be built first. This can be achieved
|
||||
either by running `kc.sh|bat build` first or by any other server commands (like `start`, `export`, `import`)
|
||||
without the `--optimized` flag.
|
||||
|
||||
= New generalized event types for credentials
|
||||
|
||||
There are now generalized events for updating (`UPDATE_CREDENTIAL`) and removing (`REMOVE_CREDENTIAL`) a credential. The credential type is described in the `credential_type` attribute of the events.
|
||||
|
|
|
@ -108,7 +108,10 @@ public class ResourceEntity {
|
|||
@BatchSize(size = 20)
|
||||
private List<ScopeEntity> scopes;
|
||||
|
||||
@OneToMany(cascade = CascadeType.REMOVE, orphanRemoval = true, mappedBy="resource", fetch = FetchType.LAZY)
|
||||
// Explicitly not using OrphanRemoval as we're handling the removal manually through HQL but at the same time we still
|
||||
// want to remove elements from the entity's collection in a manual way. Without this, Hibernate would do a duplicit
|
||||
// delete query.
|
||||
@OneToMany(cascade = CascadeType.REMOVE, orphanRemoval = false, mappedBy="resource", fetch = FetchType.LAZY)
|
||||
@Fetch(FetchMode.SELECT)
|
||||
@BatchSize(size = 20)
|
||||
private Collection<ResourceAttributeEntity> attributes = new LinkedList<>();
|
||||
|
|
|
@ -189,4 +189,10 @@ public class CustomCreateIndexChange extends CreateIndexChange {
|
|||
return changeValidationErrors;
|
||||
}
|
||||
|
||||
// The default impls seems to be just fine, so this is just to remove the
|
||||
// "class does not implement the 'supports(Database)' method" warnings
|
||||
@Override
|
||||
public boolean supports(Database database) {
|
||||
return super.supports(database);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,7 +34,9 @@ import org.keycloak.connections.jpa.updater.liquibase.ThreadLocalSessionContext;
|
|||
import org.keycloak.models.KeycloakSession;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.Savepoint;
|
||||
import java.sql.Statement;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -95,8 +97,14 @@ public abstract class CustomKeycloakTask implements CustomSqlChange {
|
|||
try {
|
||||
String correctedTableName = database.correctObjectName("REALM", Table.class);
|
||||
if (SnapshotGeneratorFactory.getInstance().has(new Table().setName(correctedTableName), database)) {
|
||||
// We're inside a liquibase managed transaction at this point. Some RDBMS don't like updates to tables
|
||||
// that were queried in the same transaction. So we need to create a savepoint and rollback to it so that
|
||||
// this select is effectively removed from a transaction and doesn't interfere with an update that will come later.
|
||||
Savepoint savepoint = connection.setSavepoint();
|
||||
try (Statement st = connection.createStatement(); ResultSet resultSet = st.executeQuery("SELECT ID FROM " + getTableName(correctedTableName))) {
|
||||
return (resultSet.next());
|
||||
} finally {
|
||||
connection.rollback(savepoint);
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
|
|
|
@ -447,8 +447,19 @@ public class JpaRealmProvider implements RealmProvider, ClientProvider, ClientSc
|
|||
// Throw model exception to ensure transaction rollback and revert previous operations (removing default roles) as well
|
||||
throw new ModelException("Role not found or trying to remove role from incorrect realm");
|
||||
}
|
||||
String compositeRoleTable = JpaUtils.getTableNameForNativeQuery("COMPOSITE_ROLE", em);
|
||||
em.createNativeQuery("delete from " + compositeRoleTable + " where CHILD_ROLE = :role").setParameter("role", roleEntity.getId()).executeUpdate();
|
||||
|
||||
// Can't use a native query to delete the composite roles mappings because it causes TransientObjectException.
|
||||
// At the same time, can't use the persist cascade type on the compositeRoles field because in that case
|
||||
// we could not still use a native query as a different problem would arise - it may happen that a parent role that
|
||||
// has this role as a composite is present in the persistence context. In that case it, the role would be re-created
|
||||
// again after deletion through persist cascade type.
|
||||
// So in any case, native query is not an option. This is not optimal as it executes additional queries but
|
||||
// the alternative of clearing the persistence context is not either as we don't know if something currently present
|
||||
// in the context is not needed later.
|
||||
Stream<RoleEntity> parentRoles = em.createNamedQuery("getParentRolesOfACompositeRole", RoleEntity.class).setParameter("compositeRole", roleEntity).getResultStream();
|
||||
parentRoles.forEach(parentRole -> parentRole.getCompositeRoles().remove(roleEntity));
|
||||
parentRoles.close();
|
||||
|
||||
em.createNamedQuery("deleteClientScopeRoleMappingByRole").setParameter("role", roleEntity).executeUpdate();
|
||||
|
||||
em.flush();
|
||||
|
|
|
@ -65,6 +65,7 @@ import java.util.Set;
|
|||
@NamedQuery(name="searchForRealmRoles", query="select role from RoleEntity role where role.clientRole = false and role.realmId = :realm and ( lower(role.name) like :search or lower(role.description) like :search ) order by role.name"),
|
||||
@NamedQuery(name="getRoleIdsFromIdList", query="select role.id from RoleEntity role where role.realmId = :realm and role.id in :ids order by role.name ASC"),
|
||||
@NamedQuery(name="getRoleIdsByNameContainingFromIdList", query="select role.id from RoleEntity role where role.realmId = :realm and lower(role.name) like lower(concat('%',:search,'%')) and role.id in :ids order by role.name ASC"),
|
||||
@NamedQuery(name="getParentRolesOfACompositeRole", query = "select role from RoleEntity role where :compositeRole member of role.compositeRoles"),
|
||||
})
|
||||
|
||||
public class RoleEntity {
|
||||
|
@ -98,7 +99,10 @@ public class RoleEntity {
|
|||
@JoinTable(name = "COMPOSITE_ROLE", joinColumns = @JoinColumn(name = "COMPOSITE"), inverseJoinColumns = @JoinColumn(name = "CHILD_ROLE"))
|
||||
private Set<RoleEntity> compositeRoles;
|
||||
|
||||
@OneToMany(cascade = CascadeType.REMOVE, orphanRemoval = true, mappedBy="role")
|
||||
// Explicitly not using OrphanRemoval as we're handling the removal manually through HQL but at the same time we still
|
||||
// want to remove elements from the entity's collection in a manual way. Without this, Hibernate would do a duplicit
|
||||
// delete query.
|
||||
@OneToMany(cascade = CascadeType.REMOVE, orphanRemoval = false, mappedBy="role")
|
||||
@Fetch(FetchMode.SELECT)
|
||||
@BatchSize(size = 20)
|
||||
protected List<RoleAttributeEntity> attributes = new LinkedList<>();
|
||||
|
|
|
@ -93,7 +93,10 @@ public class UserEntity {
|
|||
@Column(name = "REALM_ID")
|
||||
protected String realmId;
|
||||
|
||||
@OneToMany(cascade = CascadeType.REMOVE, orphanRemoval = true, mappedBy="user")
|
||||
// Explicitly not using OrphanRemoval as we're handling the removal manually through HQL but at the same time we still
|
||||
// want to remove elements from the entity's collection in a manual way. Without this, Hibernate would do a duplicit
|
||||
// delete query.
|
||||
@OneToMany(cascade = CascadeType.REMOVE, orphanRemoval = false, mappedBy="user")
|
||||
@Fetch(FetchMode.SELECT)
|
||||
@BatchSize(size = 20)
|
||||
protected Collection<UserAttributeEntity> attributes = new LinkedList<>();
|
||||
|
|
18
pom.xml
18
pom.xml
|
@ -51,8 +51,8 @@
|
|||
<jboss.snapshots.repo.id>jboss-snapshots-repository</jboss.snapshots.repo.id>
|
||||
<jboss.snapshots.repo.url>https://s01.oss.sonatype.org/content/repositories/snapshots/</jboss.snapshots.repo.url>
|
||||
|
||||
<quarkus.version>3.13.3</quarkus.version>
|
||||
<quarkus.build.version>3.13.3</quarkus.build.version>
|
||||
<quarkus.version>3.14.2</quarkus.version>
|
||||
<quarkus.build.version>3.14.2</quarkus.build.version>
|
||||
|
||||
<project.build-time>${timestamp}</project.build-time>
|
||||
|
||||
|
@ -92,7 +92,7 @@
|
|||
<bouncycastle.bctls-fips.version>1.0.19</bouncycastle.bctls-fips.version>
|
||||
|
||||
<dom4j.version>2.1.3</dom4j.version>
|
||||
<h2.version>2.2.224</h2.version>
|
||||
<h2.version>2.3.232</h2.version>
|
||||
<hibernate-orm.plugin.version>6.2.13.Final</hibernate-orm.plugin.version>
|
||||
<hibernate.c3p0.version>6.2.13.Final</hibernate.c3p0.version>
|
||||
<infinispan.version>15.0.8.Final</infinispan.version>
|
||||
|
@ -124,7 +124,7 @@
|
|||
<undertow.version>${undertow-legacy.version}</undertow.version>
|
||||
<undertow-legacy.version>2.2.24.Final</undertow-legacy.version>
|
||||
<undertow-jakarta.version>2.3.2.Final</undertow-jakarta.version>
|
||||
<wildfly-elytron.version>2.5.0.Final</wildfly-elytron.version>
|
||||
<wildfly-elytron.version>2.5.2.Final</wildfly-elytron.version>
|
||||
<elytron.undertow-server.version>1.9.0.Final</elytron.undertow-server.version>
|
||||
<woodstox.version>6.0.3</woodstox.version>
|
||||
<wildfly.common.quarkus.aligned.version>1.5.4.Final-format-001</wildfly.common.quarkus.aligned.version>
|
||||
|
@ -149,7 +149,7 @@
|
|||
<com.apicatalog.titanium-json-ld.version>1.3.3</com.apicatalog.titanium-json-ld.version>
|
||||
<io.setl.rdf-urdna.version>1.1</io.setl.rdf-urdna.version>
|
||||
|
||||
<liquibase.version>4.27.0</liquibase.version>
|
||||
<liquibase.version>4.29.1</liquibase.version>
|
||||
<servlet.api.30.version>1.0.2.Final</servlet.api.30.version>
|
||||
<servlet.api.40.version>2.0.0.Final</servlet.api.40.version>
|
||||
<twitter4j.version>4.1.2</twitter4j.version>
|
||||
|
@ -160,15 +160,15 @@
|
|||
<postgresql.version>16</postgresql.version>
|
||||
<aurora-postgresql.version>16.1</aurora-postgresql.version>
|
||||
<aws-jdbc-wrapper.version>2.3.1</aws-jdbc-wrapper.version>
|
||||
<postgresql-jdbc.version>42.7.3</postgresql-jdbc.version>
|
||||
<postgresql-jdbc.version>42.7.4</postgresql-jdbc.version>
|
||||
<mariadb.version>10.11</mariadb.version>
|
||||
<mariadb-jdbc.version>3.4.0</mariadb-jdbc.version>
|
||||
<mariadb-jdbc.version>3.4.1</mariadb-jdbc.version>
|
||||
<mssql.version>2022-latest</mssql.version>
|
||||
<!-- this is the mssql driver version also used in the Quarkus BOM -->
|
||||
<mssql-jdbc.version>12.6.3.jre11</mssql-jdbc.version>
|
||||
<mssql-jdbc.version>12.8.1.jre11</mssql-jdbc.version>
|
||||
<oracledb.version>19.3</oracledb.version>
|
||||
<!-- this is the oracle driver version also used in the Quarkus BOM -->
|
||||
<oracle-jdbc.version>23.3.0.23.09</oracle-jdbc.version>
|
||||
<oracle-jdbc.version>23.5.0.24.07</oracle-jdbc.version>
|
||||
|
||||
<!-- Test -->
|
||||
<greenmail.version>2.1.0-alpha-1</greenmail.version>
|
||||
|
|
|
@ -37,7 +37,8 @@ class KeycloakMetricsConfigurationTest {
|
|||
static final QuarkusUnitTest test = new QuarkusUnitTest()
|
||||
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
|
||||
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"))
|
||||
.overrideConfigKey("quarkus.micrometer.export.prometheus.path", "/prom/metrics");
|
||||
.overrideConfigKey("quarkus.micrometer.export.prometheus.path", "/prom/metrics")
|
||||
.overrideConfigKey("quarkus.class-loading.removed-artifacts", "io.quarkus:quarkus-jdbc-oracle,io.quarkus:quarkus-jdbc-oracle-deployment"); // config works a bit odd in unit tests, so this is to ensure we exclude Oracle to avoid ClassNotFound ex
|
||||
|
||||
@Test
|
||||
void testMetrics() {
|
||||
|
|
|
@ -37,7 +37,8 @@ public class KeycloakNegativeHealthCheckTest {
|
|||
@RegisterExtension
|
||||
static final QuarkusUnitTest test = new QuarkusUnitTest()
|
||||
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
|
||||
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"));
|
||||
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"))
|
||||
.overrideConfigKey("quarkus.class-loading.removed-artifacts", "io.quarkus:quarkus-jdbc-oracle,io.quarkus:quarkus-jdbc-oracle-deployment"); // config works a bit odd in unit tests, so this is to ensure we exclude Oracle to avoid ClassNotFound ex
|
||||
|
||||
@Test
|
||||
public void testReadinessDown() {
|
||||
|
|
|
@ -38,7 +38,8 @@ class KeycloakPathConfigurationTest {
|
|||
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
|
||||
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"))
|
||||
.overrideConfigKey("kc.http-relative-path","/auth")
|
||||
.overrideConfigKey("quarkus.micrometer.export.prometheus.path", "/prom/metrics");
|
||||
.overrideConfigKey("quarkus.micrometer.export.prometheus.path", "/prom/metrics")
|
||||
.overrideConfigKey("quarkus.class-loading.removed-artifacts", "io.quarkus:quarkus-jdbc-oracle,io.quarkus:quarkus-jdbc-oracle-deployment"); // config works a bit odd in unit tests, so this is to ensure we exclude Oracle to avoid ClassNotFound ex
|
||||
|
||||
@Test
|
||||
void testMetrics() {
|
||||
|
|
|
@ -37,7 +37,8 @@ public class KeycloakReadyHealthCheckTest {
|
|||
@RegisterExtension
|
||||
static final QuarkusUnitTest test = new QuarkusUnitTest()
|
||||
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
|
||||
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"));
|
||||
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"))
|
||||
.overrideConfigKey("quarkus.class-loading.removed-artifacts", "io.quarkus:quarkus-jdbc-oracle,io.quarkus:quarkus-jdbc-oracle-deployment"); // config works a bit odd in unit tests, so this is to ensure we exclude Oracle to avoid ClassNotFound ex
|
||||
|
||||
@Test
|
||||
public void testLivenessUp() {
|
||||
|
|
|
@ -24,6 +24,7 @@ import static org.keycloak.quarkus.runtime.Environment.isNonServerMode;
|
|||
import static org.keycloak.quarkus.runtime.Environment.isTestLaunchMode;
|
||||
import static org.keycloak.quarkus.runtime.cli.Picocli.parseAndRun;
|
||||
import static org.keycloak.quarkus.runtime.cli.command.AbstractStartCommand.OPTIMIZED_BUILD_OPTION_LONG;
|
||||
import static org.keycloak.quarkus.runtime.cli.command.AbstractStartCommand.wasBuildEverRun;
|
||||
import static org.keycloak.quarkus.runtime.cli.command.Start.isDevProfileNotAllowed;
|
||||
|
||||
import java.io.PrintWriter;
|
||||
|
@ -74,6 +75,11 @@ public class KeycloakMain implements QuarkusApplication {
|
|||
cliArgs.add("-h");
|
||||
} else if (isFastStart(cliArgs)) { // fast path for starting the server without bootstrapping CLI
|
||||
|
||||
if (!wasBuildEverRun()) {
|
||||
handleUsageError(Messages.optimizedUsedForFirstStartup());
|
||||
return;
|
||||
}
|
||||
|
||||
if (isDevProfileNotAllowed()) {
|
||||
handleUsageError(Messages.devProfileNotAllowedError(Start.NAME));
|
||||
return;
|
||||
|
|
|
@ -22,6 +22,8 @@ import java.util.Set;
|
|||
import java.util.stream.Collectors;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import org.keycloak.quarkus.runtime.cli.command.AbstractStartCommand;
|
||||
import org.keycloak.quarkus.runtime.cli.command.Build;
|
||||
import picocli.CommandLine;
|
||||
|
||||
public final class Messages {
|
||||
|
@ -47,6 +49,10 @@ public final class Messages {
|
|||
return String.format("You can not '%s' the server in %s mode. Please re-build the server first, using 'kc.sh build' for the default production mode.%n", cmd, Environment.getKeycloakModeFromProfile(org.keycloak.common.util.Environment.DEV_PROFILE_VALUE));
|
||||
}
|
||||
|
||||
public static String optimizedUsedForFirstStartup() {
|
||||
return String.format("The '%s' flag was used for first ever server start. Please don't use this flag for the first startup or use '%s %s' to build the server first.", AbstractStartCommand.OPTIMIZED_BUILD_OPTION_LONG, Environment.getCommand(), Build.NAME);
|
||||
}
|
||||
|
||||
public static String invalidLogLevel(String logLevel) {
|
||||
Set<String> values = Arrays.stream(Logger.Level.values()).map(Logger.Level::name).map(String::toLowerCase).collect(Collectors.toSet());
|
||||
return "Invalid log level: " + logLevel + ". Possible values are: " + String.join(", ", values) + ".";
|
||||
|
|
|
@ -20,7 +20,9 @@ package org.keycloak.quarkus.runtime.cli.command;
|
|||
import org.keycloak.config.OptionCategory;
|
||||
import org.keycloak.quarkus.runtime.Environment;
|
||||
import org.keycloak.quarkus.runtime.KeycloakMain;
|
||||
import org.keycloak.quarkus.runtime.Messages;
|
||||
import org.keycloak.quarkus.runtime.cli.ExecutionExceptionHandler;
|
||||
import org.keycloak.quarkus.runtime.configuration.ConfigArgsConfigSource;
|
||||
import org.keycloak.quarkus.runtime.configuration.mappers.HttpPropertyMappers;
|
||||
|
||||
import java.util.EnumSet;
|
||||
|
@ -29,6 +31,8 @@ import java.util.stream.Collectors;
|
|||
|
||||
import picocli.CommandLine;
|
||||
|
||||
import static org.keycloak.quarkus.runtime.configuration.Configuration.getRawPersistedProperties;
|
||||
|
||||
public abstract class AbstractStartCommand extends AbstractCommand implements Runnable {
|
||||
public static final String OPTIMIZED_BUILD_OPTION_LONG = "--optimized";
|
||||
|
||||
|
@ -39,6 +43,11 @@ public abstract class AbstractStartCommand extends AbstractCommand implements Ru
|
|||
CommandLine cmd = spec.commandLine();
|
||||
HttpPropertyMappers.validateConfig();
|
||||
validateConfig();
|
||||
|
||||
if (ConfigArgsConfigSource.getAllCliArgs().contains(OPTIMIZED_BUILD_OPTION_LONG) && !wasBuildEverRun()) {
|
||||
executionError(spec.commandLine(), Messages.optimizedUsedForFirstStartup());
|
||||
}
|
||||
|
||||
KeycloakMain.start((ExecutionExceptionHandler) cmd.getExecutionExceptionHandler(), cmd.getErr(), cmd.getParseResult().originalArgs().toArray(new String[0]));
|
||||
}
|
||||
|
||||
|
@ -46,6 +55,10 @@ public abstract class AbstractStartCommand extends AbstractCommand implements Ru
|
|||
|
||||
}
|
||||
|
||||
public static boolean wasBuildEverRun() {
|
||||
return !getRawPersistedProperties().isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<OptionCategory> getOptionCategories() {
|
||||
EnumSet<OptionCategory> excludedCategories = excludedCategories();
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.keycloak.quarkus.runtime.configuration;
|
|||
import static org.keycloak.quarkus.runtime.Environment.getProfileOrDefault;
|
||||
import static org.keycloak.quarkus.runtime.cli.Picocli.ARG_PREFIX;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Properties;
|
||||
|
||||
|
@ -110,6 +111,10 @@ public final class Configuration {
|
|||
return Optional.ofNullable(PersistedConfigSource.getInstance().getValue(name));
|
||||
}
|
||||
|
||||
public static Map<String, String> getRawPersistedProperties() {
|
||||
return PersistedConfigSource.getInstance().getProperties();
|
||||
}
|
||||
|
||||
public static String getRawValue(String propertyName) {
|
||||
return getConfig().getRawValue(propertyName);
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ final class HealthPropertyMappers {
|
|||
public static PropertyMapper<?>[] getHealthPropertyMappers() {
|
||||
return new PropertyMapper[] {
|
||||
fromOption(HealthOptions.HEALTH_ENABLED)
|
||||
.to("quarkus.health.extensions.enabled")
|
||||
.to("quarkus.smallrye-health.extensions.enabled")
|
||||
.paramLabel(Boolean.TRUE + "|" + Boolean.FALSE)
|
||||
.build()
|
||||
};
|
||||
|
|
|
@ -6,7 +6,7 @@ quarkus.application.name=Keycloak
|
|||
quarkus.banner.enabled=false
|
||||
|
||||
# Disable health checks from extensions, since we provide our own (default is true)
|
||||
quarkus.health.extensions.enabled=false
|
||||
quarkus.smallrye-health.extensions.enabled=false
|
||||
|
||||
# Enables metrics from other extensions if metrics is enabled
|
||||
quarkus.datasource.metrics.enabled=${quarkus.micrometer.enabled:false}
|
||||
|
|
|
@ -418,10 +418,10 @@ public class ConfigurationTest extends AbstractConfigurationTest {
|
|||
public void testResolveHealthOption() {
|
||||
ConfigArgsConfigSource.setCliArgs("--health-enabled=true");
|
||||
SmallRyeConfig config = createConfig();
|
||||
assertEquals("true", config.getConfigValue("quarkus.health.extensions.enabled").getValue());
|
||||
assertEquals("true", config.getConfigValue("quarkus.smallrye-health.extensions.enabled").getValue());
|
||||
ConfigArgsConfigSource.setCliArgs("");
|
||||
config = createConfig();
|
||||
assertEquals("false", config.getConfigValue("quarkus.health.extensions.enabled").getValue());
|
||||
assertEquals("false", config.getConfigValue("quarkus.smallrye-health.extensions.enabled").getValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -86,7 +86,7 @@ public class OptionsDistTest {
|
|||
@RawDistOnly(reason = "Raw is enough and we avoid issues with including custom conf file in the container")
|
||||
public void testExpressionsInConfigFile(KeycloakDistribution distribution) {
|
||||
distribution.setEnvVar("MY_LOG_LEVEL", "warn");
|
||||
CLIResult result = distribution.run(CONFIG_FILE_LONG_NAME + "=" + Paths.get("src/test/resources/OptionsDistTest/keycloak.conf").toAbsolutePath().normalize(), "start", "--http-enabled=true", "--hostname-strict=false", "--optimized");
|
||||
CLIResult result = distribution.run(CONFIG_FILE_LONG_NAME + "=" + Paths.get("src/test/resources/OptionsDistTest/keycloak.conf").toAbsolutePath().normalize(), "start", "--http-enabled=true", "--hostname-strict=false");
|
||||
result.assertNoMessage("INFO [io.quarkus]");
|
||||
result.assertNoMessage("Listening on:");
|
||||
|
||||
|
|
|
@ -25,7 +25,10 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
|
|||
import static org.keycloak.quarkus.runtime.cli.command.AbstractStartCommand.OPTIMIZED_BUILD_OPTION_LONG;
|
||||
import static org.keycloak.quarkus.runtime.cli.command.Main.CONFIG_FILE_LONG_NAME;
|
||||
|
||||
import org.junit.jupiter.api.MethodOrderer;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestMethodOrder;
|
||||
import org.keycloak.it.junit5.extension.CLIResult;
|
||||
import org.keycloak.it.junit5.extension.DistributionTest;
|
||||
|
||||
|
@ -34,10 +37,15 @@ import io.quarkus.test.junit.main.LaunchResult;
|
|||
import org.keycloak.it.junit5.extension.RawDistOnly;
|
||||
import org.keycloak.it.junit5.extension.WithEnvVars;
|
||||
import org.keycloak.it.utils.KeycloakDistribution;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||
@DistributionTest
|
||||
public class StartCommandDistTest {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(StartCommandDistTest.class);
|
||||
|
||||
@Test
|
||||
@Launch({ "start", "--hostname-strict=false" })
|
||||
void failNoTls(LaunchResult result) {
|
||||
|
@ -60,10 +68,13 @@ public class StartCommandDistTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
@Launch({ "start", "--optimized", "--http-enabled=true", "--hostname-strict=false", "--spi-events-listener-jboss-logging-enabled=false" })
|
||||
void warnSpiBuildtimeAtRuntime(LaunchResult result) {
|
||||
assertTrue(result.getOutput().contains("The following build time options have values that differ from what is persisted - the new values will NOT be used until another build is run: kc.spi-events-listener-jboss-logging-enabled"),
|
||||
() -> "The Output:\n" + result.getOutput() + "doesn't contains the expected string.");
|
||||
@RawDistOnly(reason = "Containers are immutable")
|
||||
void warnSpiBuildtimeAtRuntime(KeycloakDistribution dist) {
|
||||
CLIResult result = dist.run("build");
|
||||
result.assertBuild();
|
||||
|
||||
result = dist.run("start", "--optimized", "--http-enabled=true", "--hostname-strict=false", "--spi-events-listener-jboss-logging-enabled=false");
|
||||
result.assertMessage("The following build time options have values that differ from what is persisted - the new values will NOT be used until another build is run: kc.spi-events-listener-jboss-logging-enabled");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -95,6 +106,22 @@ public class StartCommandDistTest {
|
|||
assertEquals(4, cliResult.getErrorStream().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Launch({ "start", "--optimized" })
|
||||
@Order(1)
|
||||
void failIfOptimizedUsedForFirstFastStartup(LaunchResult result) {
|
||||
CLIResult cliResult = (CLIResult) result;
|
||||
cliResult.assertError("The '--optimized' flag was used for first ever server start.");
|
||||
}
|
||||
|
||||
@Test
|
||||
@Launch({ "start", "--optimized", "--http-enabled=true", "--hostname-strict=false" })
|
||||
@Order(2)
|
||||
void failIfOptimizedUsedForFirstStartup(LaunchResult result) {
|
||||
CLIResult cliResult = (CLIResult) result;
|
||||
cliResult.assertError("The '--optimized' flag was used for first ever server start.");
|
||||
}
|
||||
|
||||
@Test
|
||||
@Launch({ "start", "--http-enabled=true" })
|
||||
void failNoHostnameNotSet(LaunchResult result) {
|
||||
|
@ -132,7 +159,7 @@ public class StartCommandDistTest {
|
|||
|
||||
@Test
|
||||
@WithEnvVars({"KC_LOG", "invalid"})
|
||||
@Launch({ "start", "--optimized" })
|
||||
@Launch({ "start" })
|
||||
void testStartUsingOptimizedInvalidEnvOption(LaunchResult result) {
|
||||
CLIResult cliResult = (CLIResult) result;
|
||||
cliResult.assertError("Invalid value for option 'KC_LOG': invalid. Expected values are: console, file, syslog");
|
||||
|
|
Loading…
Reference in a new issue