Upgrade to Quarkus 3.14.2 (#32519)
Closes #32517 Signed-off-by: Václav Muzikář <vmuzikar@redhat.com>
This commit is contained in:
parent
aec3eb91a8
commit
83c00731c3
21 changed files with 129 additions and 28 deletions
|
@ -265,6 +265,11 @@ As part of this change the following related configuration options for the SPI h
|
||||||
|
|
||||||
If you were still making use these options or the `redirect_uri` parameter for logout you should implement the link:https://openid.net/specs/openid-connect-rpinitiated-1_0.html[OpenID Connect RP-Initiated Logout specification] instead.
|
If you were still making use these options or the `redirect_uri` parameter for logout you should implement the link:https://openid.net/specs/openid-connect-rpinitiated-1_0.html[OpenID Connect RP-Initiated Logout specification] instead.
|
||||||
|
|
||||||
|
= Additional validations on the `--optimized` startup option
|
||||||
|
The `--optimized` startup option now requires the optimized server image to be built first. This can be achieved
|
||||||
|
either by running `kc.sh|bat build` first or by any other server commands (like `start`, `export`, `import`)
|
||||||
|
without the `--optimized` flag.
|
||||||
|
|
||||||
= New generalized event types for credentials
|
= New generalized event types for credentials
|
||||||
|
|
||||||
There are now generalized events for updating (`UPDATE_CREDENTIAL`) and removing (`REMOVE_CREDENTIAL`) a credential. The credential type is described in the `credential_type` attribute of the events.
|
There are now generalized events for updating (`UPDATE_CREDENTIAL`) and removing (`REMOVE_CREDENTIAL`) a credential. The credential type is described in the `credential_type` attribute of the events.
|
||||||
|
|
|
@ -108,7 +108,10 @@ public class ResourceEntity {
|
||||||
@BatchSize(size = 20)
|
@BatchSize(size = 20)
|
||||||
private List<ScopeEntity> scopes;
|
private List<ScopeEntity> scopes;
|
||||||
|
|
||||||
@OneToMany(cascade = CascadeType.REMOVE, orphanRemoval = true, mappedBy="resource", fetch = FetchType.LAZY)
|
// Explicitly not using OrphanRemoval as we're handling the removal manually through HQL but at the same time we still
|
||||||
|
// want to remove elements from the entity's collection in a manual way. Without this, Hibernate would do a duplicit
|
||||||
|
// delete query.
|
||||||
|
@OneToMany(cascade = CascadeType.REMOVE, orphanRemoval = false, mappedBy="resource", fetch = FetchType.LAZY)
|
||||||
@Fetch(FetchMode.SELECT)
|
@Fetch(FetchMode.SELECT)
|
||||||
@BatchSize(size = 20)
|
@BatchSize(size = 20)
|
||||||
private Collection<ResourceAttributeEntity> attributes = new LinkedList<>();
|
private Collection<ResourceAttributeEntity> attributes = new LinkedList<>();
|
||||||
|
|
|
@ -189,4 +189,10 @@ public class CustomCreateIndexChange extends CreateIndexChange {
|
||||||
return changeValidationErrors;
|
return changeValidationErrors;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The default impls seems to be just fine, so this is just to remove the
|
||||||
|
// "class does not implement the 'supports(Database)' method" warnings
|
||||||
|
@Override
|
||||||
|
public boolean supports(Database database) {
|
||||||
|
return super.supports(database);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,9 @@ import org.keycloak.connections.jpa.updater.liquibase.ThreadLocalSessionContext;
|
||||||
import org.keycloak.models.KeycloakSession;
|
import org.keycloak.models.KeycloakSession;
|
||||||
|
|
||||||
import java.sql.Connection;
|
import java.sql.Connection;
|
||||||
|
import java.sql.PreparedStatement;
|
||||||
import java.sql.ResultSet;
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.Savepoint;
|
||||||
import java.sql.Statement;
|
import java.sql.Statement;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -95,8 +97,14 @@ public abstract class CustomKeycloakTask implements CustomSqlChange {
|
||||||
try {
|
try {
|
||||||
String correctedTableName = database.correctObjectName("REALM", Table.class);
|
String correctedTableName = database.correctObjectName("REALM", Table.class);
|
||||||
if (SnapshotGeneratorFactory.getInstance().has(new Table().setName(correctedTableName), database)) {
|
if (SnapshotGeneratorFactory.getInstance().has(new Table().setName(correctedTableName), database)) {
|
||||||
|
// We're inside a liquibase managed transaction at this point. Some RDBMS don't like updates to tables
|
||||||
|
// that were queried in the same transaction. So we need to create a savepoint and rollback to it so that
|
||||||
|
// this select is effectively removed from a transaction and doesn't interfere with an update that will come later.
|
||||||
|
Savepoint savepoint = connection.setSavepoint();
|
||||||
try (Statement st = connection.createStatement(); ResultSet resultSet = st.executeQuery("SELECT ID FROM " + getTableName(correctedTableName))) {
|
try (Statement st = connection.createStatement(); ResultSet resultSet = st.executeQuery("SELECT ID FROM " + getTableName(correctedTableName))) {
|
||||||
return (resultSet.next());
|
return (resultSet.next());
|
||||||
|
} finally {
|
||||||
|
connection.rollback(savepoint);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -447,8 +447,19 @@ public class JpaRealmProvider implements RealmProvider, ClientProvider, ClientSc
|
||||||
// Throw model exception to ensure transaction rollback and revert previous operations (removing default roles) as well
|
// Throw model exception to ensure transaction rollback and revert previous operations (removing default roles) as well
|
||||||
throw new ModelException("Role not found or trying to remove role from incorrect realm");
|
throw new ModelException("Role not found or trying to remove role from incorrect realm");
|
||||||
}
|
}
|
||||||
String compositeRoleTable = JpaUtils.getTableNameForNativeQuery("COMPOSITE_ROLE", em);
|
|
||||||
em.createNativeQuery("delete from " + compositeRoleTable + " where CHILD_ROLE = :role").setParameter("role", roleEntity.getId()).executeUpdate();
|
// Can't use a native query to delete the composite roles mappings because it causes TransientObjectException.
|
||||||
|
// At the same time, can't use the persist cascade type on the compositeRoles field because in that case
|
||||||
|
// we could not still use a native query as a different problem would arise - it may happen that a parent role that
|
||||||
|
// has this role as a composite is present in the persistence context. In that case it, the role would be re-created
|
||||||
|
// again after deletion through persist cascade type.
|
||||||
|
// So in any case, native query is not an option. This is not optimal as it executes additional queries but
|
||||||
|
// the alternative of clearing the persistence context is not either as we don't know if something currently present
|
||||||
|
// in the context is not needed later.
|
||||||
|
Stream<RoleEntity> parentRoles = em.createNamedQuery("getParentRolesOfACompositeRole", RoleEntity.class).setParameter("compositeRole", roleEntity).getResultStream();
|
||||||
|
parentRoles.forEach(parentRole -> parentRole.getCompositeRoles().remove(roleEntity));
|
||||||
|
parentRoles.close();
|
||||||
|
|
||||||
em.createNamedQuery("deleteClientScopeRoleMappingByRole").setParameter("role", roleEntity).executeUpdate();
|
em.createNamedQuery("deleteClientScopeRoleMappingByRole").setParameter("role", roleEntity).executeUpdate();
|
||||||
|
|
||||||
em.flush();
|
em.flush();
|
||||||
|
|
|
@ -65,6 +65,7 @@ import java.util.Set;
|
||||||
@NamedQuery(name="searchForRealmRoles", query="select role from RoleEntity role where role.clientRole = false and role.realmId = :realm and ( lower(role.name) like :search or lower(role.description) like :search ) order by role.name"),
|
@NamedQuery(name="searchForRealmRoles", query="select role from RoleEntity role where role.clientRole = false and role.realmId = :realm and ( lower(role.name) like :search or lower(role.description) like :search ) order by role.name"),
|
||||||
@NamedQuery(name="getRoleIdsFromIdList", query="select role.id from RoleEntity role where role.realmId = :realm and role.id in :ids order by role.name ASC"),
|
@NamedQuery(name="getRoleIdsFromIdList", query="select role.id from RoleEntity role where role.realmId = :realm and role.id in :ids order by role.name ASC"),
|
||||||
@NamedQuery(name="getRoleIdsByNameContainingFromIdList", query="select role.id from RoleEntity role where role.realmId = :realm and lower(role.name) like lower(concat('%',:search,'%')) and role.id in :ids order by role.name ASC"),
|
@NamedQuery(name="getRoleIdsByNameContainingFromIdList", query="select role.id from RoleEntity role where role.realmId = :realm and lower(role.name) like lower(concat('%',:search,'%')) and role.id in :ids order by role.name ASC"),
|
||||||
|
@NamedQuery(name="getParentRolesOfACompositeRole", query = "select role from RoleEntity role where :compositeRole member of role.compositeRoles"),
|
||||||
})
|
})
|
||||||
|
|
||||||
public class RoleEntity {
|
public class RoleEntity {
|
||||||
|
@ -98,7 +99,10 @@ public class RoleEntity {
|
||||||
@JoinTable(name = "COMPOSITE_ROLE", joinColumns = @JoinColumn(name = "COMPOSITE"), inverseJoinColumns = @JoinColumn(name = "CHILD_ROLE"))
|
@JoinTable(name = "COMPOSITE_ROLE", joinColumns = @JoinColumn(name = "COMPOSITE"), inverseJoinColumns = @JoinColumn(name = "CHILD_ROLE"))
|
||||||
private Set<RoleEntity> compositeRoles;
|
private Set<RoleEntity> compositeRoles;
|
||||||
|
|
||||||
@OneToMany(cascade = CascadeType.REMOVE, orphanRemoval = true, mappedBy="role")
|
// Explicitly not using OrphanRemoval as we're handling the removal manually through HQL but at the same time we still
|
||||||
|
// want to remove elements from the entity's collection in a manual way. Without this, Hibernate would do a duplicit
|
||||||
|
// delete query.
|
||||||
|
@OneToMany(cascade = CascadeType.REMOVE, orphanRemoval = false, mappedBy="role")
|
||||||
@Fetch(FetchMode.SELECT)
|
@Fetch(FetchMode.SELECT)
|
||||||
@BatchSize(size = 20)
|
@BatchSize(size = 20)
|
||||||
protected List<RoleAttributeEntity> attributes = new LinkedList<>();
|
protected List<RoleAttributeEntity> attributes = new LinkedList<>();
|
||||||
|
|
|
@ -93,7 +93,10 @@ public class UserEntity {
|
||||||
@Column(name = "REALM_ID")
|
@Column(name = "REALM_ID")
|
||||||
protected String realmId;
|
protected String realmId;
|
||||||
|
|
||||||
@OneToMany(cascade = CascadeType.REMOVE, orphanRemoval = true, mappedBy="user")
|
// Explicitly not using OrphanRemoval as we're handling the removal manually through HQL but at the same time we still
|
||||||
|
// want to remove elements from the entity's collection in a manual way. Without this, Hibernate would do a duplicit
|
||||||
|
// delete query.
|
||||||
|
@OneToMany(cascade = CascadeType.REMOVE, orphanRemoval = false, mappedBy="user")
|
||||||
@Fetch(FetchMode.SELECT)
|
@Fetch(FetchMode.SELECT)
|
||||||
@BatchSize(size = 20)
|
@BatchSize(size = 20)
|
||||||
protected Collection<UserAttributeEntity> attributes = new LinkedList<>();
|
protected Collection<UserAttributeEntity> attributes = new LinkedList<>();
|
||||||
|
|
18
pom.xml
18
pom.xml
|
@ -51,8 +51,8 @@
|
||||||
<jboss.snapshots.repo.id>jboss-snapshots-repository</jboss.snapshots.repo.id>
|
<jboss.snapshots.repo.id>jboss-snapshots-repository</jboss.snapshots.repo.id>
|
||||||
<jboss.snapshots.repo.url>https://s01.oss.sonatype.org/content/repositories/snapshots/</jboss.snapshots.repo.url>
|
<jboss.snapshots.repo.url>https://s01.oss.sonatype.org/content/repositories/snapshots/</jboss.snapshots.repo.url>
|
||||||
|
|
||||||
<quarkus.version>3.13.3</quarkus.version>
|
<quarkus.version>3.14.2</quarkus.version>
|
||||||
<quarkus.build.version>3.13.3</quarkus.build.version>
|
<quarkus.build.version>3.14.2</quarkus.build.version>
|
||||||
|
|
||||||
<project.build-time>${timestamp}</project.build-time>
|
<project.build-time>${timestamp}</project.build-time>
|
||||||
|
|
||||||
|
@ -92,7 +92,7 @@
|
||||||
<bouncycastle.bctls-fips.version>1.0.19</bouncycastle.bctls-fips.version>
|
<bouncycastle.bctls-fips.version>1.0.19</bouncycastle.bctls-fips.version>
|
||||||
|
|
||||||
<dom4j.version>2.1.3</dom4j.version>
|
<dom4j.version>2.1.3</dom4j.version>
|
||||||
<h2.version>2.2.224</h2.version>
|
<h2.version>2.3.232</h2.version>
|
||||||
<hibernate-orm.plugin.version>6.2.13.Final</hibernate-orm.plugin.version>
|
<hibernate-orm.plugin.version>6.2.13.Final</hibernate-orm.plugin.version>
|
||||||
<hibernate.c3p0.version>6.2.13.Final</hibernate.c3p0.version>
|
<hibernate.c3p0.version>6.2.13.Final</hibernate.c3p0.version>
|
||||||
<infinispan.version>15.0.8.Final</infinispan.version>
|
<infinispan.version>15.0.8.Final</infinispan.version>
|
||||||
|
@ -124,7 +124,7 @@
|
||||||
<undertow.version>${undertow-legacy.version}</undertow.version>
|
<undertow.version>${undertow-legacy.version}</undertow.version>
|
||||||
<undertow-legacy.version>2.2.24.Final</undertow-legacy.version>
|
<undertow-legacy.version>2.2.24.Final</undertow-legacy.version>
|
||||||
<undertow-jakarta.version>2.3.2.Final</undertow-jakarta.version>
|
<undertow-jakarta.version>2.3.2.Final</undertow-jakarta.version>
|
||||||
<wildfly-elytron.version>2.5.0.Final</wildfly-elytron.version>
|
<wildfly-elytron.version>2.5.2.Final</wildfly-elytron.version>
|
||||||
<elytron.undertow-server.version>1.9.0.Final</elytron.undertow-server.version>
|
<elytron.undertow-server.version>1.9.0.Final</elytron.undertow-server.version>
|
||||||
<woodstox.version>6.0.3</woodstox.version>
|
<woodstox.version>6.0.3</woodstox.version>
|
||||||
<wildfly.common.quarkus.aligned.version>1.5.4.Final-format-001</wildfly.common.quarkus.aligned.version>
|
<wildfly.common.quarkus.aligned.version>1.5.4.Final-format-001</wildfly.common.quarkus.aligned.version>
|
||||||
|
@ -149,7 +149,7 @@
|
||||||
<com.apicatalog.titanium-json-ld.version>1.3.3</com.apicatalog.titanium-json-ld.version>
|
<com.apicatalog.titanium-json-ld.version>1.3.3</com.apicatalog.titanium-json-ld.version>
|
||||||
<io.setl.rdf-urdna.version>1.1</io.setl.rdf-urdna.version>
|
<io.setl.rdf-urdna.version>1.1</io.setl.rdf-urdna.version>
|
||||||
|
|
||||||
<liquibase.version>4.27.0</liquibase.version>
|
<liquibase.version>4.29.1</liquibase.version>
|
||||||
<servlet.api.30.version>1.0.2.Final</servlet.api.30.version>
|
<servlet.api.30.version>1.0.2.Final</servlet.api.30.version>
|
||||||
<servlet.api.40.version>2.0.0.Final</servlet.api.40.version>
|
<servlet.api.40.version>2.0.0.Final</servlet.api.40.version>
|
||||||
<twitter4j.version>4.1.2</twitter4j.version>
|
<twitter4j.version>4.1.2</twitter4j.version>
|
||||||
|
@ -160,15 +160,15 @@
|
||||||
<postgresql.version>16</postgresql.version>
|
<postgresql.version>16</postgresql.version>
|
||||||
<aurora-postgresql.version>16.1</aurora-postgresql.version>
|
<aurora-postgresql.version>16.1</aurora-postgresql.version>
|
||||||
<aws-jdbc-wrapper.version>2.3.1</aws-jdbc-wrapper.version>
|
<aws-jdbc-wrapper.version>2.3.1</aws-jdbc-wrapper.version>
|
||||||
<postgresql-jdbc.version>42.7.3</postgresql-jdbc.version>
|
<postgresql-jdbc.version>42.7.4</postgresql-jdbc.version>
|
||||||
<mariadb.version>10.11</mariadb.version>
|
<mariadb.version>10.11</mariadb.version>
|
||||||
<mariadb-jdbc.version>3.4.0</mariadb-jdbc.version>
|
<mariadb-jdbc.version>3.4.1</mariadb-jdbc.version>
|
||||||
<mssql.version>2022-latest</mssql.version>
|
<mssql.version>2022-latest</mssql.version>
|
||||||
<!-- this is the mssql driver version also used in the Quarkus BOM -->
|
<!-- this is the mssql driver version also used in the Quarkus BOM -->
|
||||||
<mssql-jdbc.version>12.6.3.jre11</mssql-jdbc.version>
|
<mssql-jdbc.version>12.8.1.jre11</mssql-jdbc.version>
|
||||||
<oracledb.version>19.3</oracledb.version>
|
<oracledb.version>19.3</oracledb.version>
|
||||||
<!-- this is the oracle driver version also used in the Quarkus BOM -->
|
<!-- this is the oracle driver version also used in the Quarkus BOM -->
|
||||||
<oracle-jdbc.version>23.3.0.23.09</oracle-jdbc.version>
|
<oracle-jdbc.version>23.5.0.24.07</oracle-jdbc.version>
|
||||||
|
|
||||||
<!-- Test -->
|
<!-- Test -->
|
||||||
<greenmail.version>2.1.0-alpha-1</greenmail.version>
|
<greenmail.version>2.1.0-alpha-1</greenmail.version>
|
||||||
|
|
|
@ -37,7 +37,8 @@ class KeycloakMetricsConfigurationTest {
|
||||||
static final QuarkusUnitTest test = new QuarkusUnitTest()
|
static final QuarkusUnitTest test = new QuarkusUnitTest()
|
||||||
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
|
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
|
||||||
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"))
|
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"))
|
||||||
.overrideConfigKey("quarkus.micrometer.export.prometheus.path", "/prom/metrics");
|
.overrideConfigKey("quarkus.micrometer.export.prometheus.path", "/prom/metrics")
|
||||||
|
.overrideConfigKey("quarkus.class-loading.removed-artifacts", "io.quarkus:quarkus-jdbc-oracle,io.quarkus:quarkus-jdbc-oracle-deployment"); // config works a bit odd in unit tests, so this is to ensure we exclude Oracle to avoid ClassNotFound ex
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testMetrics() {
|
void testMetrics() {
|
||||||
|
|
|
@ -37,7 +37,8 @@ public class KeycloakNegativeHealthCheckTest {
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
static final QuarkusUnitTest test = new QuarkusUnitTest()
|
static final QuarkusUnitTest test = new QuarkusUnitTest()
|
||||||
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
|
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
|
||||||
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"));
|
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"))
|
||||||
|
.overrideConfigKey("quarkus.class-loading.removed-artifacts", "io.quarkus:quarkus-jdbc-oracle,io.quarkus:quarkus-jdbc-oracle-deployment"); // config works a bit odd in unit tests, so this is to ensure we exclude Oracle to avoid ClassNotFound ex
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testReadinessDown() {
|
public void testReadinessDown() {
|
||||||
|
|
|
@ -38,7 +38,8 @@ class KeycloakPathConfigurationTest {
|
||||||
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
|
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
|
||||||
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"))
|
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"))
|
||||||
.overrideConfigKey("kc.http-relative-path","/auth")
|
.overrideConfigKey("kc.http-relative-path","/auth")
|
||||||
.overrideConfigKey("quarkus.micrometer.export.prometheus.path", "/prom/metrics");
|
.overrideConfigKey("quarkus.micrometer.export.prometheus.path", "/prom/metrics")
|
||||||
|
.overrideConfigKey("quarkus.class-loading.removed-artifacts", "io.quarkus:quarkus-jdbc-oracle,io.quarkus:quarkus-jdbc-oracle-deployment"); // config works a bit odd in unit tests, so this is to ensure we exclude Oracle to avoid ClassNotFound ex
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testMetrics() {
|
void testMetrics() {
|
||||||
|
|
|
@ -37,7 +37,8 @@ public class KeycloakReadyHealthCheckTest {
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
static final QuarkusUnitTest test = new QuarkusUnitTest()
|
static final QuarkusUnitTest test = new QuarkusUnitTest()
|
||||||
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
|
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
|
||||||
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"));
|
.addAsResource("keycloak.conf", "META-INF/keycloak.conf"))
|
||||||
|
.overrideConfigKey("quarkus.class-loading.removed-artifacts", "io.quarkus:quarkus-jdbc-oracle,io.quarkus:quarkus-jdbc-oracle-deployment"); // config works a bit odd in unit tests, so this is to ensure we exclude Oracle to avoid ClassNotFound ex
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testLivenessUp() {
|
public void testLivenessUp() {
|
||||||
|
|
|
@ -24,6 +24,7 @@ import static org.keycloak.quarkus.runtime.Environment.isNonServerMode;
|
||||||
import static org.keycloak.quarkus.runtime.Environment.isTestLaunchMode;
|
import static org.keycloak.quarkus.runtime.Environment.isTestLaunchMode;
|
||||||
import static org.keycloak.quarkus.runtime.cli.Picocli.parseAndRun;
|
import static org.keycloak.quarkus.runtime.cli.Picocli.parseAndRun;
|
||||||
import static org.keycloak.quarkus.runtime.cli.command.AbstractStartCommand.OPTIMIZED_BUILD_OPTION_LONG;
|
import static org.keycloak.quarkus.runtime.cli.command.AbstractStartCommand.OPTIMIZED_BUILD_OPTION_LONG;
|
||||||
|
import static org.keycloak.quarkus.runtime.cli.command.AbstractStartCommand.wasBuildEverRun;
|
||||||
import static org.keycloak.quarkus.runtime.cli.command.Start.isDevProfileNotAllowed;
|
import static org.keycloak.quarkus.runtime.cli.command.Start.isDevProfileNotAllowed;
|
||||||
|
|
||||||
import java.io.PrintWriter;
|
import java.io.PrintWriter;
|
||||||
|
@ -74,6 +75,11 @@ public class KeycloakMain implements QuarkusApplication {
|
||||||
cliArgs.add("-h");
|
cliArgs.add("-h");
|
||||||
} else if (isFastStart(cliArgs)) { // fast path for starting the server without bootstrapping CLI
|
} else if (isFastStart(cliArgs)) { // fast path for starting the server without bootstrapping CLI
|
||||||
|
|
||||||
|
if (!wasBuildEverRun()) {
|
||||||
|
handleUsageError(Messages.optimizedUsedForFirstStartup());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (isDevProfileNotAllowed()) {
|
if (isDevProfileNotAllowed()) {
|
||||||
handleUsageError(Messages.devProfileNotAllowedError(Start.NAME));
|
handleUsageError(Messages.devProfileNotAllowedError(Start.NAME));
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -22,6 +22,8 @@ import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import org.jboss.logging.Logger;
|
import org.jboss.logging.Logger;
|
||||||
|
|
||||||
|
import org.keycloak.quarkus.runtime.cli.command.AbstractStartCommand;
|
||||||
|
import org.keycloak.quarkus.runtime.cli.command.Build;
|
||||||
import picocli.CommandLine;
|
import picocli.CommandLine;
|
||||||
|
|
||||||
public final class Messages {
|
public final class Messages {
|
||||||
|
@ -47,6 +49,10 @@ public final class Messages {
|
||||||
return String.format("You can not '%s' the server in %s mode. Please re-build the server first, using 'kc.sh build' for the default production mode.%n", cmd, Environment.getKeycloakModeFromProfile(org.keycloak.common.util.Environment.DEV_PROFILE_VALUE));
|
return String.format("You can not '%s' the server in %s mode. Please re-build the server first, using 'kc.sh build' for the default production mode.%n", cmd, Environment.getKeycloakModeFromProfile(org.keycloak.common.util.Environment.DEV_PROFILE_VALUE));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String optimizedUsedForFirstStartup() {
|
||||||
|
return String.format("The '%s' flag was used for first ever server start. Please don't use this flag for the first startup or use '%s %s' to build the server first.", AbstractStartCommand.OPTIMIZED_BUILD_OPTION_LONG, Environment.getCommand(), Build.NAME);
|
||||||
|
}
|
||||||
|
|
||||||
public static String invalidLogLevel(String logLevel) {
|
public static String invalidLogLevel(String logLevel) {
|
||||||
Set<String> values = Arrays.stream(Logger.Level.values()).map(Logger.Level::name).map(String::toLowerCase).collect(Collectors.toSet());
|
Set<String> values = Arrays.stream(Logger.Level.values()).map(Logger.Level::name).map(String::toLowerCase).collect(Collectors.toSet());
|
||||||
return "Invalid log level: " + logLevel + ". Possible values are: " + String.join(", ", values) + ".";
|
return "Invalid log level: " + logLevel + ". Possible values are: " + String.join(", ", values) + ".";
|
||||||
|
|
|
@ -20,7 +20,9 @@ package org.keycloak.quarkus.runtime.cli.command;
|
||||||
import org.keycloak.config.OptionCategory;
|
import org.keycloak.config.OptionCategory;
|
||||||
import org.keycloak.quarkus.runtime.Environment;
|
import org.keycloak.quarkus.runtime.Environment;
|
||||||
import org.keycloak.quarkus.runtime.KeycloakMain;
|
import org.keycloak.quarkus.runtime.KeycloakMain;
|
||||||
|
import org.keycloak.quarkus.runtime.Messages;
|
||||||
import org.keycloak.quarkus.runtime.cli.ExecutionExceptionHandler;
|
import org.keycloak.quarkus.runtime.cli.ExecutionExceptionHandler;
|
||||||
|
import org.keycloak.quarkus.runtime.configuration.ConfigArgsConfigSource;
|
||||||
import org.keycloak.quarkus.runtime.configuration.mappers.HttpPropertyMappers;
|
import org.keycloak.quarkus.runtime.configuration.mappers.HttpPropertyMappers;
|
||||||
|
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
|
@ -29,6 +31,8 @@ import java.util.stream.Collectors;
|
||||||
|
|
||||||
import picocli.CommandLine;
|
import picocli.CommandLine;
|
||||||
|
|
||||||
|
import static org.keycloak.quarkus.runtime.configuration.Configuration.getRawPersistedProperties;
|
||||||
|
|
||||||
public abstract class AbstractStartCommand extends AbstractCommand implements Runnable {
|
public abstract class AbstractStartCommand extends AbstractCommand implements Runnable {
|
||||||
public static final String OPTIMIZED_BUILD_OPTION_LONG = "--optimized";
|
public static final String OPTIMIZED_BUILD_OPTION_LONG = "--optimized";
|
||||||
|
|
||||||
|
@ -39,6 +43,11 @@ public abstract class AbstractStartCommand extends AbstractCommand implements Ru
|
||||||
CommandLine cmd = spec.commandLine();
|
CommandLine cmd = spec.commandLine();
|
||||||
HttpPropertyMappers.validateConfig();
|
HttpPropertyMappers.validateConfig();
|
||||||
validateConfig();
|
validateConfig();
|
||||||
|
|
||||||
|
if (ConfigArgsConfigSource.getAllCliArgs().contains(OPTIMIZED_BUILD_OPTION_LONG) && !wasBuildEverRun()) {
|
||||||
|
executionError(spec.commandLine(), Messages.optimizedUsedForFirstStartup());
|
||||||
|
}
|
||||||
|
|
||||||
KeycloakMain.start((ExecutionExceptionHandler) cmd.getExecutionExceptionHandler(), cmd.getErr(), cmd.getParseResult().originalArgs().toArray(new String[0]));
|
KeycloakMain.start((ExecutionExceptionHandler) cmd.getExecutionExceptionHandler(), cmd.getErr(), cmd.getParseResult().originalArgs().toArray(new String[0]));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,6 +55,10 @@ public abstract class AbstractStartCommand extends AbstractCommand implements Ru
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static boolean wasBuildEverRun() {
|
||||||
|
return !getRawPersistedProperties().isEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<OptionCategory> getOptionCategories() {
|
public List<OptionCategory> getOptionCategories() {
|
||||||
EnumSet<OptionCategory> excludedCategories = excludedCategories();
|
EnumSet<OptionCategory> excludedCategories = excludedCategories();
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.keycloak.quarkus.runtime.configuration;
|
||||||
import static org.keycloak.quarkus.runtime.Environment.getProfileOrDefault;
|
import static org.keycloak.quarkus.runtime.Environment.getProfileOrDefault;
|
||||||
import static org.keycloak.quarkus.runtime.cli.Picocli.ARG_PREFIX;
|
import static org.keycloak.quarkus.runtime.cli.Picocli.ARG_PREFIX;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
|
||||||
|
@ -110,6 +111,10 @@ public final class Configuration {
|
||||||
return Optional.ofNullable(PersistedConfigSource.getInstance().getValue(name));
|
return Optional.ofNullable(PersistedConfigSource.getInstance().getValue(name));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Map<String, String> getRawPersistedProperties() {
|
||||||
|
return PersistedConfigSource.getInstance().getProperties();
|
||||||
|
}
|
||||||
|
|
||||||
public static String getRawValue(String propertyName) {
|
public static String getRawValue(String propertyName) {
|
||||||
return getConfig().getRawValue(propertyName);
|
return getConfig().getRawValue(propertyName);
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@ final class HealthPropertyMappers {
|
||||||
public static PropertyMapper<?>[] getHealthPropertyMappers() {
|
public static PropertyMapper<?>[] getHealthPropertyMappers() {
|
||||||
return new PropertyMapper[] {
|
return new PropertyMapper[] {
|
||||||
fromOption(HealthOptions.HEALTH_ENABLED)
|
fromOption(HealthOptions.HEALTH_ENABLED)
|
||||||
.to("quarkus.health.extensions.enabled")
|
.to("quarkus.smallrye-health.extensions.enabled")
|
||||||
.paramLabel(Boolean.TRUE + "|" + Boolean.FALSE)
|
.paramLabel(Boolean.TRUE + "|" + Boolean.FALSE)
|
||||||
.build()
|
.build()
|
||||||
};
|
};
|
||||||
|
|
|
@ -6,7 +6,7 @@ quarkus.application.name=Keycloak
|
||||||
quarkus.banner.enabled=false
|
quarkus.banner.enabled=false
|
||||||
|
|
||||||
# Disable health checks from extensions, since we provide our own (default is true)
|
# Disable health checks from extensions, since we provide our own (default is true)
|
||||||
quarkus.health.extensions.enabled=false
|
quarkus.smallrye-health.extensions.enabled=false
|
||||||
|
|
||||||
# Enables metrics from other extensions if metrics is enabled
|
# Enables metrics from other extensions if metrics is enabled
|
||||||
quarkus.datasource.metrics.enabled=${quarkus.micrometer.enabled:false}
|
quarkus.datasource.metrics.enabled=${quarkus.micrometer.enabled:false}
|
||||||
|
|
|
@ -418,10 +418,10 @@ public class ConfigurationTest extends AbstractConfigurationTest {
|
||||||
public void testResolveHealthOption() {
|
public void testResolveHealthOption() {
|
||||||
ConfigArgsConfigSource.setCliArgs("--health-enabled=true");
|
ConfigArgsConfigSource.setCliArgs("--health-enabled=true");
|
||||||
SmallRyeConfig config = createConfig();
|
SmallRyeConfig config = createConfig();
|
||||||
assertEquals("true", config.getConfigValue("quarkus.health.extensions.enabled").getValue());
|
assertEquals("true", config.getConfigValue("quarkus.smallrye-health.extensions.enabled").getValue());
|
||||||
ConfigArgsConfigSource.setCliArgs("");
|
ConfigArgsConfigSource.setCliArgs("");
|
||||||
config = createConfig();
|
config = createConfig();
|
||||||
assertEquals("false", config.getConfigValue("quarkus.health.extensions.enabled").getValue());
|
assertEquals("false", config.getConfigValue("quarkus.smallrye-health.extensions.enabled").getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -86,7 +86,7 @@ public class OptionsDistTest {
|
||||||
@RawDistOnly(reason = "Raw is enough and we avoid issues with including custom conf file in the container")
|
@RawDistOnly(reason = "Raw is enough and we avoid issues with including custom conf file in the container")
|
||||||
public void testExpressionsInConfigFile(KeycloakDistribution distribution) {
|
public void testExpressionsInConfigFile(KeycloakDistribution distribution) {
|
||||||
distribution.setEnvVar("MY_LOG_LEVEL", "warn");
|
distribution.setEnvVar("MY_LOG_LEVEL", "warn");
|
||||||
CLIResult result = distribution.run(CONFIG_FILE_LONG_NAME + "=" + Paths.get("src/test/resources/OptionsDistTest/keycloak.conf").toAbsolutePath().normalize(), "start", "--http-enabled=true", "--hostname-strict=false", "--optimized");
|
CLIResult result = distribution.run(CONFIG_FILE_LONG_NAME + "=" + Paths.get("src/test/resources/OptionsDistTest/keycloak.conf").toAbsolutePath().normalize(), "start", "--http-enabled=true", "--hostname-strict=false");
|
||||||
result.assertNoMessage("INFO [io.quarkus]");
|
result.assertNoMessage("INFO [io.quarkus]");
|
||||||
result.assertNoMessage("Listening on:");
|
result.assertNoMessage("Listening on:");
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,10 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.keycloak.quarkus.runtime.cli.command.AbstractStartCommand.OPTIMIZED_BUILD_OPTION_LONG;
|
import static org.keycloak.quarkus.runtime.cli.command.AbstractStartCommand.OPTIMIZED_BUILD_OPTION_LONG;
|
||||||
import static org.keycloak.quarkus.runtime.cli.command.Main.CONFIG_FILE_LONG_NAME;
|
import static org.keycloak.quarkus.runtime.cli.command.Main.CONFIG_FILE_LONG_NAME;
|
||||||
|
|
||||||
|
import org.junit.jupiter.api.MethodOrderer;
|
||||||
|
import org.junit.jupiter.api.Order;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.TestMethodOrder;
|
||||||
import org.keycloak.it.junit5.extension.CLIResult;
|
import org.keycloak.it.junit5.extension.CLIResult;
|
||||||
import org.keycloak.it.junit5.extension.DistributionTest;
|
import org.keycloak.it.junit5.extension.DistributionTest;
|
||||||
|
|
||||||
|
@ -34,10 +37,15 @@ import io.quarkus.test.junit.main.LaunchResult;
|
||||||
import org.keycloak.it.junit5.extension.RawDistOnly;
|
import org.keycloak.it.junit5.extension.RawDistOnly;
|
||||||
import org.keycloak.it.junit5.extension.WithEnvVars;
|
import org.keycloak.it.junit5.extension.WithEnvVars;
|
||||||
import org.keycloak.it.utils.KeycloakDistribution;
|
import org.keycloak.it.utils.KeycloakDistribution;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||||
@DistributionTest
|
@DistributionTest
|
||||||
public class StartCommandDistTest {
|
public class StartCommandDistTest {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(StartCommandDistTest.class);
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Launch({ "start", "--hostname-strict=false" })
|
@Launch({ "start", "--hostname-strict=false" })
|
||||||
void failNoTls(LaunchResult result) {
|
void failNoTls(LaunchResult result) {
|
||||||
|
@ -60,10 +68,13 @@ public class StartCommandDistTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Launch({ "start", "--optimized", "--http-enabled=true", "--hostname-strict=false", "--spi-events-listener-jboss-logging-enabled=false" })
|
@RawDistOnly(reason = "Containers are immutable")
|
||||||
void warnSpiBuildtimeAtRuntime(LaunchResult result) {
|
void warnSpiBuildtimeAtRuntime(KeycloakDistribution dist) {
|
||||||
assertTrue(result.getOutput().contains("The following build time options have values that differ from what is persisted - the new values will NOT be used until another build is run: kc.spi-events-listener-jboss-logging-enabled"),
|
CLIResult result = dist.run("build");
|
||||||
() -> "The Output:\n" + result.getOutput() + "doesn't contains the expected string.");
|
result.assertBuild();
|
||||||
|
|
||||||
|
result = dist.run("start", "--optimized", "--http-enabled=true", "--hostname-strict=false", "--spi-events-listener-jboss-logging-enabled=false");
|
||||||
|
result.assertMessage("The following build time options have values that differ from what is persisted - the new values will NOT be used until another build is run: kc.spi-events-listener-jboss-logging-enabled");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -95,6 +106,22 @@ public class StartCommandDistTest {
|
||||||
assertEquals(4, cliResult.getErrorStream().size());
|
assertEquals(4, cliResult.getErrorStream().size());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Launch({ "start", "--optimized" })
|
||||||
|
@Order(1)
|
||||||
|
void failIfOptimizedUsedForFirstFastStartup(LaunchResult result) {
|
||||||
|
CLIResult cliResult = (CLIResult) result;
|
||||||
|
cliResult.assertError("The '--optimized' flag was used for first ever server start.");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Launch({ "start", "--optimized", "--http-enabled=true", "--hostname-strict=false" })
|
||||||
|
@Order(2)
|
||||||
|
void failIfOptimizedUsedForFirstStartup(LaunchResult result) {
|
||||||
|
CLIResult cliResult = (CLIResult) result;
|
||||||
|
cliResult.assertError("The '--optimized' flag was used for first ever server start.");
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Launch({ "start", "--http-enabled=true" })
|
@Launch({ "start", "--http-enabled=true" })
|
||||||
void failNoHostnameNotSet(LaunchResult result) {
|
void failNoHostnameNotSet(LaunchResult result) {
|
||||||
|
@ -132,7 +159,7 @@ public class StartCommandDistTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@WithEnvVars({"KC_LOG", "invalid"})
|
@WithEnvVars({"KC_LOG", "invalid"})
|
||||||
@Launch({ "start", "--optimized" })
|
@Launch({ "start" })
|
||||||
void testStartUsingOptimizedInvalidEnvOption(LaunchResult result) {
|
void testStartUsingOptimizedInvalidEnvOption(LaunchResult result) {
|
||||||
CLIResult cliResult = (CLIResult) result;
|
CLIResult cliResult = (CLIResult) result;
|
||||||
cliResult.assertError("Invalid value for option 'KC_LOG': invalid. Expected values are: console, file, syslog");
|
cliResult.assertError("Invalid value for option 'KC_LOG': invalid. Expected values are: console, file, syslog");
|
||||||
|
|
Loading…
Reference in a new issue