KEYCLOAK-3447 Manual upgrade of database schema
This commit is contained in:
parent
fa1fb3a3a9
commit
c522a20ab9
9 changed files with 313 additions and 177 deletions
|
@ -60,7 +60,9 @@
|
|||
"connectionsJpa": {
|
||||
"default": {
|
||||
"dataSource": "java:jboss/datasources/KeycloakDS",
|
||||
"databaseSchema": "update"
|
||||
"initializeEmpty": true,
|
||||
"migrationStrategy": "update",
|
||||
"migrationExport": "${jboss.home.dir}/keycloak-database-update.sql"
|
||||
}
|
||||
},
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
package org.keycloak.connections.jpa;
|
||||
|
||||
import java.io.File;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DatabaseMetaData;
|
||||
import java.sql.DriverManager;
|
||||
|
@ -42,6 +43,7 @@ import org.keycloak.models.dblock.DBLockProvider;
|
|||
import org.keycloak.models.utils.KeycloakModelUtils;
|
||||
import org.keycloak.provider.ServerInfoAwareProviderFactory;
|
||||
import org.keycloak.models.dblock.DBLockManager;
|
||||
import org.keycloak.ServerStartupError;
|
||||
import org.keycloak.timer.TimerProvider;
|
||||
|
||||
/**
|
||||
|
@ -51,6 +53,10 @@ public class DefaultJpaConnectionProviderFactory implements JpaConnectionProvide
|
|||
|
||||
private static final Logger logger = Logger.getLogger(DefaultJpaConnectionProviderFactory.class);
|
||||
|
||||
enum MigrationStrategy {
|
||||
UPDATE, VALIDATE, MANUAL
|
||||
}
|
||||
|
||||
private volatile EntityManagerFactory emf;
|
||||
|
||||
private Config.Scope config;
|
||||
|
@ -125,22 +131,9 @@ public class DefaultJpaConnectionProviderFactory implements JpaConnectionProvide
|
|||
properties.put(JpaUtils.HIBERNATE_DEFAULT_SCHEMA, schema);
|
||||
}
|
||||
|
||||
|
||||
String databaseSchema;
|
||||
String databaseSchemaConf = config.get("databaseSchema");
|
||||
if (databaseSchemaConf == null) {
|
||||
throw new RuntimeException("Property 'databaseSchema' needs to be specified in the configuration");
|
||||
}
|
||||
|
||||
if (databaseSchemaConf.equals("development-update")) {
|
||||
properties.put("hibernate.hbm2ddl.auto", "update");
|
||||
databaseSchema = null;
|
||||
} else if (databaseSchemaConf.equals("development-validate")) {
|
||||
properties.put("hibernate.hbm2ddl.auto", "validate");
|
||||
databaseSchema = null;
|
||||
} else {
|
||||
databaseSchema = databaseSchemaConf;
|
||||
}
|
||||
MigrationStrategy migrationStrategy = getMigrationStrategy();
|
||||
boolean initializeEmpty = config.getBoolean("initializeEmpty", true);
|
||||
File databaseUpdateFile = getDatabaseUpdateFile();
|
||||
|
||||
properties.put("hibernate.show_sql", config.getBoolean("showSql", false));
|
||||
properties.put("hibernate.format_sql", config.getBoolean("formatSql", true));
|
||||
|
@ -153,39 +146,8 @@ public class DefaultJpaConnectionProviderFactory implements JpaConnectionProvide
|
|||
if (driverDialect != null) {
|
||||
properties.put("hibernate.dialect", driverDialect);
|
||||
}
|
||||
|
||||
if (databaseSchema != null) {
|
||||
logger.trace("Updating database");
|
||||
|
||||
JpaUpdaterProvider updater = session.getProvider(JpaUpdaterProvider.class);
|
||||
if (updater == null) {
|
||||
throw new RuntimeException("Can't update database: JPA updater provider not found");
|
||||
}
|
||||
|
||||
// Check if having DBLock before trying to initialize hibernate
|
||||
DBLockProvider dbLock = new DBLockManager(session).getDBLock();
|
||||
if (dbLock.hasLock()) {
|
||||
updateOrValidateDB(databaseSchema, connection, updater, schema);
|
||||
} else {
|
||||
logger.trace("Don't have DBLock retrieved before upgrade. Needs to acquire lock first in separate transaction");
|
||||
|
||||
KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), new KeycloakSessionTask() {
|
||||
|
||||
@Override
|
||||
public void run(KeycloakSession lockSession) {
|
||||
DBLockManager dbLockManager = new DBLockManager(lockSession);
|
||||
DBLockProvider dbLock2 = dbLockManager.getDBLock();
|
||||
dbLock2.waitForLock();
|
||||
try {
|
||||
updateOrValidateDB(databaseSchema, connection, updater, schema);
|
||||
} finally {
|
||||
dbLock2.releaseLock();
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
}
|
||||
migration(migrationStrategy, initializeEmpty, schema, databaseUpdateFile, connection, session);
|
||||
|
||||
int globalStatsInterval = config.getInt("globalStatsInterval", -1);
|
||||
if (globalStatsInterval != -1) {
|
||||
|
@ -199,18 +161,6 @@ public class DefaultJpaConnectionProviderFactory implements JpaConnectionProvide
|
|||
if (globalStatsInterval != -1) {
|
||||
startGlobalStats(session, globalStatsInterval);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
// Safe rollback
|
||||
if (connection != null) {
|
||||
try {
|
||||
connection.rollback();
|
||||
} catch (SQLException e2) {
|
||||
logger.warn("Can't rollback connection", e2);
|
||||
}
|
||||
}
|
||||
|
||||
throw e;
|
||||
} finally {
|
||||
// Close after creating EntityManagerFactory to prevent in-mem databases from closing
|
||||
if (connection != null) {
|
||||
|
@ -226,6 +176,11 @@ public class DefaultJpaConnectionProviderFactory implements JpaConnectionProvide
|
|||
}
|
||||
}
|
||||
|
||||
private File getDatabaseUpdateFile() {
|
||||
String databaseUpdateFile = config.get("migrationExport", "keycloak-database-update.sql");
|
||||
return new File(databaseUpdateFile);
|
||||
}
|
||||
|
||||
protected void prepareOperationalInfo(Connection connection) {
|
||||
try {
|
||||
operationalInfo = new LinkedHashMap<>();
|
||||
|
@ -282,20 +237,82 @@ public class DefaultJpaConnectionProviderFactory implements JpaConnectionProvide
|
|||
timer.scheduleTask(new HibernateStatsReporter(emf), globalStatsIntervalSecs * 1000, "ReportHibernateGlobalStats");
|
||||
}
|
||||
|
||||
public void migration(MigrationStrategy strategy, boolean initializeEmpty, String schema, File databaseUpdateFile, Connection connection, KeycloakSession session) {
|
||||
JpaUpdaterProvider updater = session.getProvider(JpaUpdaterProvider.class);
|
||||
|
||||
// Needs to be called with acquired DBLock
|
||||
protected void updateOrValidateDB(String databaseSchema, Connection connection, JpaUpdaterProvider updater, String schema) {
|
||||
if (databaseSchema.equals("update")) {
|
||||
updater.update(connection, schema);
|
||||
logger.trace("Database update completed");
|
||||
} else if (databaseSchema.equals("validate")) {
|
||||
updater.validate(connection, schema);
|
||||
logger.trace("Database validation completed");
|
||||
JpaUpdaterProvider.Status status = updater.validate(connection, schema);
|
||||
if (status == JpaUpdaterProvider.Status.VALID) {
|
||||
logger.debug("Database is up-to-date");
|
||||
} else if (status == JpaUpdaterProvider.Status.EMPTY) {
|
||||
if (initializeEmpty) {
|
||||
update(connection, schema, session, updater);
|
||||
} else {
|
||||
switch (strategy) {
|
||||
case UPDATE:
|
||||
update(connection, schema, session, updater);
|
||||
break;
|
||||
case MANUAL:
|
||||
export(connection, schema, databaseUpdateFile, session, updater);
|
||||
throw new ServerStartupError("Database not initialized, please initialize database with " + databaseUpdateFile.getAbsolutePath(), false);
|
||||
case VALIDATE:
|
||||
throw new ServerStartupError("Database not initialized, please enable database initialization", false);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new RuntimeException("Invalid value for databaseSchema: " + databaseSchema);
|
||||
switch (strategy) {
|
||||
case UPDATE:
|
||||
update(connection, schema, session, updater);
|
||||
break;
|
||||
case MANUAL:
|
||||
export(connection, schema, databaseUpdateFile, session, updater);
|
||||
throw new ServerStartupError("Database not up-to-date, please migrate database with " + databaseUpdateFile.getAbsolutePath(), false);
|
||||
case VALIDATE:
|
||||
throw new ServerStartupError("Database not up-to-date, please enable database migration", false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void update(Connection connection, String schema, KeycloakSession session, JpaUpdaterProvider updater) {
|
||||
DBLockProvider dbLock = new DBLockManager(session).getDBLock();
|
||||
if (dbLock.hasLock()) {
|
||||
updater.update(connection, schema);
|
||||
} else {
|
||||
KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), new KeycloakSessionTask() {
|
||||
@Override
|
||||
public void run(KeycloakSession lockSession) {
|
||||
DBLockManager dbLockManager = new DBLockManager(lockSession);
|
||||
DBLockProvider dbLock2 = dbLockManager.getDBLock();
|
||||
dbLock2.waitForLock();
|
||||
try {
|
||||
updater.update(connection, schema);
|
||||
} finally {
|
||||
dbLock2.releaseLock();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
protected void export(Connection connection, String schema, File databaseUpdateFile, KeycloakSession session, JpaUpdaterProvider updater) {
|
||||
DBLockProvider dbLock = new DBLockManager(session).getDBLock();
|
||||
if (dbLock.hasLock()) {
|
||||
updater.export(connection, schema, databaseUpdateFile);
|
||||
} else {
|
||||
KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), new KeycloakSessionTask() {
|
||||
@Override
|
||||
public void run(KeycloakSession lockSession) {
|
||||
DBLockManager dbLockManager = new DBLockManager(lockSession);
|
||||
DBLockProvider dbLock2 = dbLockManager.getDBLock();
|
||||
dbLock2.waitForLock();
|
||||
try {
|
||||
updater.export(connection, schema, databaseUpdateFile);
|
||||
} finally {
|
||||
dbLock2.releaseLock();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Connection getConnection() {
|
||||
|
@ -323,4 +340,18 @@ public class DefaultJpaConnectionProviderFactory implements JpaConnectionProvide
|
|||
return operationalInfo;
|
||||
}
|
||||
|
||||
private MigrationStrategy getMigrationStrategy() {
|
||||
String migrationStrategy = config.get("migrationStrategy");
|
||||
if (migrationStrategy == null) {
|
||||
// Support 'databaseSchema' for backwards compatibility
|
||||
migrationStrategy = config.get("databaseSchema");
|
||||
}
|
||||
|
||||
if (migrationStrategy != null) {
|
||||
return MigrationStrategy.valueOf(migrationStrategy.toUpperCase());
|
||||
} else {
|
||||
return MigrationStrategy.UPDATE;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.keycloak.connections.jpa.updater;
|
|||
|
||||
import org.keycloak.provider.Provider;
|
||||
|
||||
import java.io.File;
|
||||
import java.sql.Connection;
|
||||
|
||||
/**
|
||||
|
@ -26,10 +27,14 @@ import java.sql.Connection;
|
|||
*/
|
||||
public interface JpaUpdaterProvider extends Provider {
|
||||
|
||||
public String FIRST_VERSION = "1.0.0.Final";
|
||||
enum Status {
|
||||
VALID, EMPTY, OUTDATED
|
||||
}
|
||||
|
||||
public void update(Connection connection, String defaultSchema);
|
||||
void update(Connection connection, String defaultSchema);
|
||||
|
||||
public void validate(Connection connection, String defaultSchema);
|
||||
Status validate(Connection connection, String defaultSchema);
|
||||
|
||||
void export(Connection connection, String defaultSchema, File file);
|
||||
|
||||
}
|
||||
|
|
|
@ -30,6 +30,9 @@ import org.keycloak.connections.jpa.updater.liquibase.conn.LiquibaseConnectionPr
|
|||
import org.keycloak.connections.jpa.util.JpaUtils;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.sql.Connection;
|
||||
import java.util.List;
|
||||
|
@ -53,6 +56,15 @@ public class LiquibaseJpaUpdaterProvider implements JpaUpdaterProvider {
|
|||
|
||||
@Override
|
||||
public void update(Connection connection, String defaultSchema) {
|
||||
update(connection, null, defaultSchema);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void export(Connection connection, String defaultSchema, File file) {
|
||||
update(connection, file, defaultSchema);
|
||||
}
|
||||
|
||||
private void update(Connection connection, File file, String defaultSchema) {
|
||||
logger.debug("Starting database update");
|
||||
|
||||
// Need ThreadLocal as liquibase doesn't seem to have API to inject custom objects into tasks
|
||||
|
@ -61,7 +73,7 @@ public class LiquibaseJpaUpdaterProvider implements JpaUpdaterProvider {
|
|||
try {
|
||||
// Run update with keycloak master changelog first
|
||||
Liquibase liquibase = getLiquibaseForKeycloakUpdate(connection, defaultSchema);
|
||||
updateChangeSet(liquibase, liquibase.getChangeLogFile());
|
||||
updateChangeSet(liquibase, liquibase.getChangeLogFile(), file);
|
||||
|
||||
// Run update for each custom JpaEntityProvider
|
||||
Set<JpaEntityProvider> jpaProviders = session.getAllProviders(JpaEntityProvider.class);
|
||||
|
@ -71,7 +83,7 @@ public class LiquibaseJpaUpdaterProvider implements JpaUpdaterProvider {
|
|||
String factoryId = jpaProvider.getFactoryId();
|
||||
String changelogTableName = JpaUtils.getCustomChangelogTableName(factoryId);
|
||||
liquibase = getLiquibaseForCustomProviderUpdate(connection, defaultSchema, customChangelog, jpaProvider.getClass().getClassLoader(), changelogTableName);
|
||||
updateChangeSet(liquibase, liquibase.getChangeLogFile());
|
||||
updateChangeSet(liquibase, liquibase.getChangeLogFile(), file);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
@ -81,7 +93,8 @@ public class LiquibaseJpaUpdaterProvider implements JpaUpdaterProvider {
|
|||
}
|
||||
}
|
||||
|
||||
protected void updateChangeSet(Liquibase liquibase, String changelog) throws LiquibaseException {
|
||||
|
||||
protected void updateChangeSet(Liquibase liquibase, String changelog, File exportFile) throws LiquibaseException, IOException {
|
||||
List<ChangeSet> changeSets = liquibase.listUnrunChangeSets((Contexts) null);
|
||||
if (!changeSets.isEmpty()) {
|
||||
List<RanChangeSet> ranChangeSets = liquibase.getDatabase().getRanChangeSetList();
|
||||
|
@ -95,7 +108,12 @@ public class LiquibaseJpaUpdaterProvider implements JpaUpdaterProvider {
|
|||
}
|
||||
}
|
||||
|
||||
liquibase.update((Contexts) null);
|
||||
if (exportFile != null) {
|
||||
liquibase.update((Contexts) null, new FileWriter(exportFile));
|
||||
} else {
|
||||
liquibase.update((Contexts) null);
|
||||
}
|
||||
|
||||
logger.debugv("Completed database update for changelog {0}", changelog);
|
||||
} else {
|
||||
logger.debugv("Database is up to date for changelog {0}", changelog);
|
||||
|
@ -107,13 +125,18 @@ public class LiquibaseJpaUpdaterProvider implements JpaUpdaterProvider {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void validate(Connection connection, String defaultSchema) {
|
||||
public Status validate(Connection connection, String defaultSchema) {
|
||||
logger.debug("Validating if database is updated");
|
||||
ThreadLocalSessionContext.setCurrentSession(session);
|
||||
|
||||
try {
|
||||
// Validate with keycloak master changelog first
|
||||
Liquibase liquibase = getLiquibaseForKeycloakUpdate(connection, defaultSchema);
|
||||
validateChangeSet(liquibase, liquibase.getChangeLogFile());
|
||||
|
||||
Status status = validateChangeSet(liquibase, liquibase.getChangeLogFile());
|
||||
if (status != Status.VALID) {
|
||||
return status;
|
||||
}
|
||||
|
||||
// Validate each custom JpaEntityProvider
|
||||
Set<JpaEntityProvider> jpaProviders = session.getAllProviders(JpaEntityProvider.class);
|
||||
|
@ -123,24 +146,30 @@ public class LiquibaseJpaUpdaterProvider implements JpaUpdaterProvider {
|
|||
String factoryId = jpaProvider.getFactoryId();
|
||||
String changelogTableName = JpaUtils.getCustomChangelogTableName(factoryId);
|
||||
liquibase = getLiquibaseForCustomProviderUpdate(connection, defaultSchema, customChangelog, jpaProvider.getClass().getClassLoader(), changelogTableName);
|
||||
validateChangeSet(liquibase, liquibase.getChangeLogFile());
|
||||
if (validateChangeSet(liquibase, liquibase.getChangeLogFile()) != Status.VALID) {
|
||||
return Status.OUTDATED;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} catch (LiquibaseException e) {
|
||||
throw new RuntimeException("Failed to validate database", e);
|
||||
}
|
||||
|
||||
return Status.VALID;
|
||||
}
|
||||
|
||||
protected void validateChangeSet(Liquibase liquibase, String changelog) throws LiquibaseException {
|
||||
protected Status validateChangeSet(Liquibase liquibase, String changelog) throws LiquibaseException {
|
||||
List<ChangeSet> changeSets = liquibase.listUnrunChangeSets((Contexts) null);
|
||||
if (!changeSets.isEmpty()) {
|
||||
List<RanChangeSet> ranChangeSets = liquibase.getDatabase().getRanChangeSetList();
|
||||
String errorMessage = String.format("Failed to validate database schema. Schema needs updating database from %s to %s. Please change databaseSchema to 'update' or use other database. Used changelog was %s",
|
||||
ranChangeSets.get(ranChangeSets.size() - 1).getId(), changeSets.get(changeSets.size() - 1).getId(), changelog);
|
||||
throw new RuntimeException(errorMessage);
|
||||
if (changeSets.size() == liquibase.getDatabaseChangeLog().getChangeSets().size()) {
|
||||
return Status.EMPTY;
|
||||
} else {
|
||||
logger.debugf("Validation failed. Database is not up-to-date for changelog %s", changelog);
|
||||
return Status.OUTDATED;
|
||||
}
|
||||
} else {
|
||||
logger.debugf("Validation passed. Database is up-to-date for changelog %s", changelog);
|
||||
return Status.VALID;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -51,6 +51,10 @@ import com.mongodb.ServerAddress;
|
|||
*/
|
||||
public class DefaultMongoConnectionFactoryProvider implements MongoConnectionProviderFactory, ServerInfoAwareProviderFactory {
|
||||
|
||||
enum MigrationStrategy {
|
||||
UPDATE, VALIDATE
|
||||
}
|
||||
|
||||
// TODO Make it dynamic
|
||||
private String[] entities = new String[]{
|
||||
"org.keycloak.models.mongo.keycloak.entities.MongoRealmEntity",
|
||||
|
@ -165,46 +169,34 @@ public class DefaultMongoConnectionFactoryProvider implements MongoConnectionPro
|
|||
}
|
||||
|
||||
private void update(KeycloakSession session) {
|
||||
String databaseSchema = config.get("databaseSchema");
|
||||
MigrationStrategy strategy = getMigrationStrategy();
|
||||
|
||||
if (databaseSchema == null) {
|
||||
throw new RuntimeException("Property 'databaseSchema' needs to be specified in the configuration of mongo connections");
|
||||
MongoUpdaterProvider mongoUpdater = session.getProvider(MongoUpdaterProvider.class);
|
||||
if (mongoUpdater == null) {
|
||||
throw new RuntimeException("Can't update database: Mongo updater provider not found");
|
||||
}
|
||||
|
||||
DBLockProvider dbLock = new DBLockManager(session).getDBLock();
|
||||
if (dbLock.hasLock()) {
|
||||
updateOrValidateDB(strategy, session, mongoUpdater);
|
||||
} else {
|
||||
MongoUpdaterProvider mongoUpdater = session.getProvider(MongoUpdaterProvider.class);
|
||||
if (mongoUpdater == null) {
|
||||
throw new RuntimeException("Can't update database: Mongo updater provider not found");
|
||||
}
|
||||
logger.trace("Don't have DBLock retrieved before upgrade. Needs to acquire lock first in separate transaction");
|
||||
|
||||
DBLockProvider dbLock = new DBLockManager(session).getDBLock();
|
||||
if (dbLock.hasLock()) {
|
||||
updateOrValidateDB(databaseSchema, session, mongoUpdater);
|
||||
} else {
|
||||
logger.trace("Don't have DBLock retrieved before upgrade. Needs to acquire lock first in separate transaction");
|
||||
KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), new KeycloakSessionTask() {
|
||||
|
||||
KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), new KeycloakSessionTask() {
|
||||
|
||||
@Override
|
||||
public void run(KeycloakSession lockSession) {
|
||||
DBLockManager dbLockManager = new DBLockManager(lockSession);
|
||||
DBLockProvider dbLock2 = dbLockManager.getDBLock();
|
||||
dbLock2.waitForLock();
|
||||
try {
|
||||
updateOrValidateDB(databaseSchema, session, mongoUpdater);
|
||||
} finally {
|
||||
dbLock2.releaseLock();
|
||||
}
|
||||
@Override
|
||||
public void run(KeycloakSession lockSession) {
|
||||
DBLockManager dbLockManager = new DBLockManager(lockSession);
|
||||
DBLockProvider dbLock2 = dbLockManager.getDBLock();
|
||||
dbLock2.waitForLock();
|
||||
try {
|
||||
updateOrValidateDB(strategy, session, mongoUpdater);
|
||||
} finally {
|
||||
dbLock2.releaseLock();
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
if (databaseSchema.equals("update")) {
|
||||
mongoUpdater.update(session, db);
|
||||
} else if (databaseSchema.equals("validate")) {
|
||||
mongoUpdater.validate(session, db);
|
||||
} else {
|
||||
throw new RuntimeException("Invalid value for databaseSchema: " + databaseSchema);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -217,13 +209,14 @@ public class DefaultMongoConnectionFactoryProvider implements MongoConnectionPro
|
|||
return entityClasses;
|
||||
}
|
||||
|
||||
protected void updateOrValidateDB(String databaseSchema, KeycloakSession session, MongoUpdaterProvider mongoUpdater) {
|
||||
if (databaseSchema.equals("update")) {
|
||||
mongoUpdater.update(session, db);
|
||||
} else if (databaseSchema.equals("validate")) {
|
||||
mongoUpdater.validate(session, db);
|
||||
} else {
|
||||
throw new RuntimeException("Invalid value for databaseSchema: " + databaseSchema);
|
||||
protected void updateOrValidateDB(MigrationStrategy strategy, KeycloakSession session, MongoUpdaterProvider mongoUpdater) {
|
||||
switch (strategy) {
|
||||
case UPDATE:
|
||||
mongoUpdater.update(session, db);
|
||||
break;
|
||||
case VALIDATE:
|
||||
mongoUpdater.validate(session, db);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -345,4 +338,18 @@ public class DefaultMongoConnectionFactoryProvider implements MongoConnectionPro
|
|||
return operationalInfo;
|
||||
}
|
||||
|
||||
private MigrationStrategy getMigrationStrategy() {
|
||||
String migrationStrategy = config.get("migrationStrategy");
|
||||
if (migrationStrategy == null) {
|
||||
// Support 'databaseSchema' for backwards compatibility
|
||||
migrationStrategy = config.get("databaseSchema");
|
||||
}
|
||||
|
||||
if (migrationStrategy != null) {
|
||||
return MigrationStrategy.valueOf(migrationStrategy.toUpperCase());
|
||||
} else {
|
||||
return MigrationStrategy.UPDATE;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright 2016 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.keycloak;
|
||||
|
||||
/**
|
||||
* Non-recoverable error thrown during server startup
|
||||
*
|
||||
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
|
||||
*/
|
||||
public class ServerStartupError extends Error {
|
||||
|
||||
private final boolean fillStackTrace;
|
||||
|
||||
public ServerStartupError(String message) {
|
||||
super(message);
|
||||
fillStackTrace = true;
|
||||
}
|
||||
|
||||
public ServerStartupError(String message, boolean fillStackTrace) {
|
||||
super(message);
|
||||
this.fillStackTrace = fillStackTrace;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Throwable fillInStackTrace() {
|
||||
if (fillStackTrace) {
|
||||
return super.fillInStackTrace();
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -72,69 +72,73 @@ public class KeycloakApplication extends Application {
|
|||
protected String contextPath;
|
||||
|
||||
public KeycloakApplication(@Context ServletContext context, @Context Dispatcher dispatcher) {
|
||||
loadConfig();
|
||||
try {
|
||||
loadConfig();
|
||||
|
||||
this.contextPath = context.getContextPath();
|
||||
this.sessionFactory = createSessionFactory();
|
||||
this.contextPath = context.getContextPath();
|
||||
this.sessionFactory = createSessionFactory();
|
||||
|
||||
dispatcher.getDefaultContextObjects().put(KeycloakApplication.class, this);
|
||||
ResteasyProviderFactory.pushContext(KeycloakApplication.class, this); // for injection
|
||||
context.setAttribute(KeycloakSessionFactory.class.getName(), this.sessionFactory);
|
||||
dispatcher.getDefaultContextObjects().put(KeycloakApplication.class, this);
|
||||
ResteasyProviderFactory.pushContext(KeycloakApplication.class, this); // for injection
|
||||
context.setAttribute(KeycloakSessionFactory.class.getName(), this.sessionFactory);
|
||||
|
||||
singletons.add(new ServerVersionResource());
|
||||
singletons.add(new RobotsResource());
|
||||
singletons.add(new RealmsResource());
|
||||
singletons.add(new AdminRoot());
|
||||
classes.add(ThemeResource.class);
|
||||
classes.add(JsResource.class);
|
||||
singletons.add(new ServerVersionResource());
|
||||
singletons.add(new RobotsResource());
|
||||
singletons.add(new RealmsResource());
|
||||
singletons.add(new AdminRoot());
|
||||
classes.add(ThemeResource.class);
|
||||
classes.add(JsResource.class);
|
||||
|
||||
classes.add(KeycloakTransactionCommitter.class);
|
||||
classes.add(KeycloakTransactionCommitter.class);
|
||||
|
||||
singletons.add(new ObjectMapperResolver(Boolean.parseBoolean(System.getProperty("keycloak.jsonPrettyPrint", "false"))));
|
||||
singletons.add(new ObjectMapperResolver(Boolean.parseBoolean(System.getProperty("keycloak.jsonPrettyPrint", "false"))));
|
||||
|
||||
ExportImportManager[] exportImportManager = new ExportImportManager[1];
|
||||
ExportImportManager[] exportImportManager = new ExportImportManager[1];
|
||||
|
||||
KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() {
|
||||
KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() {
|
||||
|
||||
@Override
|
||||
public void run(KeycloakSession lockSession) {
|
||||
DBLockManager dbLockManager = new DBLockManager(lockSession);
|
||||
dbLockManager.checkForcedUnlock();
|
||||
DBLockProvider dbLock = dbLockManager.getDBLock();
|
||||
dbLock.waitForLock();
|
||||
try {
|
||||
exportImportManager[0] = migrateAndBootstrap();
|
||||
} finally {
|
||||
dbLock.releaseLock();
|
||||
@Override
|
||||
public void run(KeycloakSession lockSession) {
|
||||
DBLockManager dbLockManager = new DBLockManager(lockSession);
|
||||
dbLockManager.checkForcedUnlock();
|
||||
DBLockProvider dbLock = dbLockManager.getDBLock();
|
||||
dbLock.waitForLock();
|
||||
try {
|
||||
exportImportManager[0] = migrateAndBootstrap();
|
||||
} finally {
|
||||
dbLock.releaseLock();
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
|
||||
if (exportImportManager[0].isRunExport()) {
|
||||
exportImportManager[0].runExport();
|
||||
}
|
||||
|
||||
});
|
||||
boolean bootstrapAdminUser = false;
|
||||
KeycloakSession session = sessionFactory.create();
|
||||
try {
|
||||
session.getTransactionManager().begin();
|
||||
bootstrapAdminUser = new ApplianceBootstrap(session).isNoMasterUser();
|
||||
|
||||
session.getTransactionManager().commit();
|
||||
} finally {
|
||||
session.close();
|
||||
}
|
||||
|
||||
if (exportImportManager[0].isRunExport()) {
|
||||
exportImportManager[0].runExport();
|
||||
sessionFactory.publish(new PostMigrationEvent());
|
||||
|
||||
singletons.add(new WelcomeResource(bootstrapAdminUser));
|
||||
|
||||
setupScheduledTasks(sessionFactory);
|
||||
} catch (Throwable t) {
|
||||
exit(1);
|
||||
throw t;
|
||||
}
|
||||
|
||||
boolean bootstrapAdminUser = false;
|
||||
KeycloakSession session = sessionFactory.create();
|
||||
try {
|
||||
session.getTransactionManager().begin();
|
||||
bootstrapAdminUser = new ApplianceBootstrap(session).isNoMasterUser();
|
||||
|
||||
session.getTransactionManager().commit();
|
||||
} finally {
|
||||
session.close();
|
||||
}
|
||||
|
||||
sessionFactory.publish(new PostMigrationEvent());
|
||||
|
||||
singletons.add(new WelcomeResource(bootstrapAdminUser));
|
||||
|
||||
setupScheduledTasks(sessionFactory);
|
||||
}
|
||||
|
||||
|
||||
// Migrate model, bootstrap master realm, import realms and create admin user. This is done with acquired dbLock
|
||||
protected ExportImportManager migrateAndBootstrap() {
|
||||
ExportImportManager exportImportManager;
|
||||
|
@ -185,7 +189,6 @@ public class KeycloakApplication extends Application {
|
|||
session.getTransactionManager().commit();
|
||||
} catch (Exception e) {
|
||||
session.getTransactionManager().rollback();
|
||||
logger.migrationFailure(e);
|
||||
throw e;
|
||||
} finally {
|
||||
session.close();
|
||||
|
@ -386,4 +389,13 @@ public class KeycloakApplication extends Application {
|
|||
}
|
||||
}
|
||||
|
||||
private void exit(int status) {
|
||||
new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
System.exit(status);
|
||||
}
|
||||
}.start();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -90,7 +90,8 @@
|
|||
"driverDialect": "${keycloak.connectionsJpa.driverDialect:}",
|
||||
"user": "${keycloak.connectionsJpa.user:sa}",
|
||||
"password": "${keycloak.connectionsJpa.password:}",
|
||||
"databaseSchema": "${keycloak.connectionsJpa.databaseSchema:update}",
|
||||
"initializeEmpty": true,
|
||||
"migrationStrategy": "update",
|
||||
"showSql": "${keycloak.connectionsJpa.showSql:false}",
|
||||
"formatSql": "${keycloak.connectionsJpa.formatSql:true}",
|
||||
"globalStatsInterval": "${keycloak.connectionsJpa.globalStatsInterval:-1}"
|
||||
|
|
|
@ -65,7 +65,8 @@
|
|||
"driverDialect": "${keycloak.connectionsJpa.driverDialect:}",
|
||||
"user": "${keycloak.connectionsJpa.user:sa}",
|
||||
"password": "${keycloak.connectionsJpa.password:}",
|
||||
"databaseSchema": "${keycloak.connectionsJpa.databaseSchema:update}",
|
||||
"initializeEmpty": true,
|
||||
"migrationStrategy": "update",
|
||||
"showSql": "${keycloak.connectionsJpa.showSql:false}",
|
||||
"formatSql": "${keycloak.connectionsJpa.formatSql:true}",
|
||||
"globalStatsInterval": "${keycloak.connectionsJpa.globalStatsInterval:-1}"
|
||||
|
|
Loading…
Reference in a new issue