File store basis
Fixes: #16676 --- * Enhance DefaultModelCriteria * Fix collection * Fix delete in CHMKeycloakTransaction * Add HasRealmId interface * Fix EntityFieldDelegate * Support for realm-less entities in providers * Support for realm-less entities in providers (events) * File store basis * Add support for writing * Support running KeycloakServer with file store * Add support for file store in model testsuite --------- Co-authored-by: vramik <vramik@redhat.com>
This commit is contained in:
parent
4782a85166
commit
f71ab092de
57 changed files with 3447 additions and 625 deletions
|
@ -531,6 +531,14 @@ public class GenerateEntityImplementationsProcessor extends AbstractGenerateEnti
|
|||
pw.println(" return this.entityFieldDelegate;");
|
||||
pw.println(" }");
|
||||
|
||||
pw.println(" @Override public boolean isUpdated() {");
|
||||
pw.println(" return entityFieldDelegate.isUpdated();");
|
||||
pw.println(" }");
|
||||
|
||||
pw.println(" @Override public void clearUpdatedFlag() {");
|
||||
pw.println(" entityFieldDelegate.clearUpdatedFlag();");
|
||||
pw.println(" }");
|
||||
|
||||
getAllAbstractMethods(e)
|
||||
.forEach(ee -> {
|
||||
String originalField = m2field.get(ee);
|
||||
|
@ -540,16 +548,8 @@ public class GenerateEntityImplementationsProcessor extends AbstractGenerateEnti
|
|||
TypeMirror fieldType = determineFieldType(originalField, methodsPerAttribute.get(originalField));
|
||||
String field = fieldsClassName + "." + toEnumConstant(originalField);
|
||||
|
||||
if (ee.getReturnType().getKind() == TypeKind.BOOLEAN && "isUpdated".equals(ee.getSimpleName().toString())) {
|
||||
pw.println(" return entityFieldDelegate.isUpdated();");
|
||||
pw.println(" }");
|
||||
} else if (ee.getReturnType().getKind() == TypeKind.VOID && "clearUpdatedFlag".equals(ee.getSimpleName().toString())) {
|
||||
pw.println(" return entityFieldDelegate.clearUpdatedFlag();");
|
||||
pw.println(" }");
|
||||
} else {
|
||||
FieldAccessorType fat = FieldAccessorType.determineType(ee, originalField, types, fieldType);
|
||||
printMethodBody(pw, fat, ee, field, fieldType);
|
||||
}
|
||||
FieldAccessorType fat = FieldAccessorType.determineType(ee, originalField, types, fieldType);
|
||||
printMethodBody(pw, fat, ee, field, fieldType);
|
||||
});
|
||||
|
||||
autogenerated.add(" ENTITY_FIELD_DELEGATE_CREATORS.put(" + className + ".class, (EntityFieldDelegateCreator<" + className + ">) " + mapClassName + "::new);");
|
||||
|
@ -649,11 +649,7 @@ public class GenerateEntityImplementationsProcessor extends AbstractGenerateEnti
|
|||
|
||||
getAllAbstractMethods(e)
|
||||
.forEach(ee -> {
|
||||
pw.println(" @Override "
|
||||
+ ee.getModifiers().stream().filter(m -> m != Modifier.ABSTRACT).map(Object::toString).collect(Collectors.joining(" "))
|
||||
+ " " + ee.getReturnType()
|
||||
+ " " + ee.getSimpleName()
|
||||
+ "(" + methodParameters(ee.getParameters()) + ") {");
|
||||
printMethodHeader(pw, ee);
|
||||
String field = m2field.get(ee);
|
||||
field = field == null ? "null" : fieldsClassName + "." + toEnumConstant(field);
|
||||
if (ee.getReturnType().getKind() == TypeKind.BOOLEAN && "isUpdated".equals(ee.getSimpleName().toString())) {
|
||||
|
@ -681,6 +677,14 @@ public class GenerateEntityImplementationsProcessor extends AbstractGenerateEnti
|
|||
}
|
||||
}
|
||||
|
||||
protected void printMethodHeader(final PrintWriter pw, ExecutableElement ee) {
|
||||
pw.println(" @Override "
|
||||
+ ee.getModifiers().stream().filter(m -> m != Modifier.ABSTRACT).map(Object::toString).collect(Collectors.joining(" "))
|
||||
+ " " + ee.getReturnType()
|
||||
+ " " + ee.getSimpleName()
|
||||
+ "(" + methodParameters(ee.getParameters()) + ") {");
|
||||
}
|
||||
|
||||
private class ClonerGenerator implements Generator {
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.keycloak.models.map.processor;
|
|||
|
||||
import org.keycloak.models.map.annotations.IgnoreForEntityImplementationGenerator;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
|
@ -46,7 +47,7 @@ import javax.lang.model.util.SimpleTypeVisitor8;
|
|||
public class Util {
|
||||
|
||||
private static final Set<String> LIST_TYPES = Set.of(List.class.getCanonicalName(), ArrayList.class.getCanonicalName(), LinkedList.class.getCanonicalName());
|
||||
private static final Set<String> SET_TYPES = Set.of(Set.class.getCanonicalName(), TreeSet.class.getCanonicalName(), HashSet.class.getCanonicalName(), LinkedHashSet.class.getCanonicalName());
|
||||
private static final Set<String> SET_TYPES = Set.of(Set.class.getCanonicalName(), TreeSet.class.getCanonicalName(), HashSet.class.getCanonicalName(), LinkedHashSet.class.getCanonicalName(), Collection.class.getCanonicalName());
|
||||
private static final Set<String> MAP_TYPES = Set.of(Map.class.getCanonicalName(), HashMap.class.getCanonicalName());
|
||||
|
||||
public static List<TypeMirror> getGenericsDeclaration(TypeMirror fieldType) {
|
||||
|
|
|
@ -28,15 +28,22 @@
|
|||
|
||||
<artifactId>keycloak-model-map-file</artifactId>
|
||||
<name>Keycloak Model Map File</name>
|
||||
|
||||
<properties>
|
||||
<maven.compiler.release>11</maven.compiler.release>
|
||||
<maven.compiler.source>11</maven.compiler.source>
|
||||
<maven.compiler.target>11</maven.compiler.target>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.keycloak</groupId>
|
||||
<artifactId>keycloak-model-map</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
<groupId>org.snakeyaml</groupId>
|
||||
<artifactId>snakeyaml-engine</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.storage.file;
|
||||
|
||||
import org.keycloak.models.map.storage.criteria.DescriptiveModelCriteria;
|
||||
import org.keycloak.models.map.storage.criteria.ModelCriteriaNode;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author hmlnarik
|
||||
*/
|
||||
public class FileCriteriaBuilder<M> extends DescriptiveModelCriteria<M, FileCriteriaBuilder<M>> {
|
||||
|
||||
private static final FileCriteriaBuilder<?> INSTANCE = new FileCriteriaBuilder<>(null);
|
||||
|
||||
private FileCriteriaBuilder(ModelCriteriaNode<M> node) {
|
||||
super(node);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <M> FileCriteriaBuilder<M> criteria() {
|
||||
return (FileCriteriaBuilder<M>) INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FileCriteriaBuilder<M> instantiateForNode(ModelCriteriaNode<M> targetNode) {
|
||||
return new FileCriteriaBuilder<>(targetNode);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,95 +0,0 @@
|
|||
/*
|
||||
* Copyright 2022 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.storage.file;
|
||||
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.keycloak.models.map.common.AbstractEntity;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.QueryParameters;
|
||||
|
||||
/**
|
||||
* {@link MapKeycloakTransaction} implementation used with the file map storage.
|
||||
*
|
||||
* @author <a href="mailto:sguilhen@redhat.com">Stefan Guilhen</a>
|
||||
*/
|
||||
public class FileKeycloakTransaction<V extends AbstractEntity, M> implements MapKeycloakTransaction<V, M> {
|
||||
|
||||
private boolean active;
|
||||
private boolean rollback;
|
||||
|
||||
@Override
|
||||
public V create(V value) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V read(String key) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<V> read(QueryParameters<M> queryParameters) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getCount(QueryParameters<M> queryParameters) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean delete(String key) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long delete(QueryParameters<M> queryParameters) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void begin() {
|
||||
active = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void commit() {
|
||||
if (rollback) {
|
||||
throw new RuntimeException("Rollback only!");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void rollback() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setRollbackOnly() {
|
||||
rollback = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean getRollbackOnly() {
|
||||
return rollback;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isActive() {
|
||||
return active;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,153 @@
|
|||
/*
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.storage.file;
|
||||
|
||||
|
||||
import org.keycloak.models.map.common.AbstractEntity;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.StringKeyConverter;
|
||||
import org.keycloak.models.map.common.StringKeyConverter.StringKey;
|
||||
import org.keycloak.models.map.common.UpdatableEntity;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.ModelEntityUtil;
|
||||
import org.keycloak.models.map.storage.chm.ConcurrentHashMapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.chm.MapFieldPredicates;
|
||||
import org.keycloak.models.map.storage.chm.MapModelCriteriaBuilder.UpdatePredicatesFunc;
|
||||
import org.keycloak.storage.SearchableModelField;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.FileAlreadyExistsException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* {@link MapKeycloakTransaction} implementation used with the file map storage.
|
||||
*
|
||||
* @author <a href="mailto:sguilhen@redhat.com">Stefan Guilhen</a>
|
||||
*/
|
||||
public class FileMapKeycloakTransaction<V extends AbstractEntity & UpdatableEntity, M>
|
||||
extends ConcurrentHashMapKeycloakTransaction<String, V, M> {
|
||||
|
||||
private final List<Path> pathsToDelete = new LinkedList<>();
|
||||
private Map<Path, Path> renameOnCommit = new IdentityHashMap<>();
|
||||
|
||||
private final String txId = StringKey.INSTANCE.yieldNewUniqueKey();
|
||||
|
||||
public static <V extends AbstractEntity & UpdatableEntity, M> FileMapKeycloakTransaction<V, M> newInstance(Class<V> entityClass,
|
||||
Function<String, Path> dataDirectoryFunc, Function<V, String[]> suggestedPath,
|
||||
boolean isExpirableEntity, Map<SearchableModelField<? super M>, UpdatePredicatesFunc<String, V, M>> fieldPredicates) {
|
||||
Crud<V, M> crud = new Crud<>(entityClass, dataDirectoryFunc, suggestedPath, isExpirableEntity, fieldPredicates);
|
||||
FileMapKeycloakTransaction<V, M> tx = new FileMapKeycloakTransaction<>(entityClass, crud);
|
||||
crud.tx = tx;
|
||||
return tx;
|
||||
}
|
||||
|
||||
private FileMapKeycloakTransaction(Class<V> entityClass, Crud<V, M> crud) {
|
||||
super(
|
||||
crud,
|
||||
StringKeyConverter.StringKey.INSTANCE,
|
||||
DeepCloner.DUMB_CLONER,
|
||||
MapFieldPredicates.getPredicates(ModelEntityUtil.getModelType(entityClass)),
|
||||
ModelEntityUtil.getRealmIdField(entityClass)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void rollback() {
|
||||
this.renameOnCommit.keySet().forEach(FileMapKeycloakTransaction::silentDelete);
|
||||
this.pathsToDelete.forEach(FileMapKeycloakTransaction::silentDelete);
|
||||
super.rollback();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void commit() {
|
||||
super.commit();
|
||||
this.renameOnCommit.forEach(FileMapKeycloakTransaction::silentMove);
|
||||
this.pathsToDelete.forEach(FileMapKeycloakTransaction::silentDelete);
|
||||
}
|
||||
|
||||
private static void silentMove(Path from, Path to) {
|
||||
try {
|
||||
Files.move(from, to, StandardCopyOption.REPLACE_EXISTING);
|
||||
} catch (IOException ex) {
|
||||
throw new UncheckedIOException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
private static void silentDelete(Path p) {
|
||||
try {
|
||||
if (Files.exists(p)) {
|
||||
Files.delete(p);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// Swallow the exception
|
||||
}
|
||||
}
|
||||
|
||||
public void touch(Path path) throws FileAlreadyExistsException, IOException {
|
||||
Files.createFile(path);
|
||||
pathsToDelete.add(path);
|
||||
}
|
||||
|
||||
public boolean removeIfExists(Path path) {
|
||||
final boolean res = ! pathsToDelete.contains(path) && Files.exists(path);
|
||||
pathsToDelete.add(path);
|
||||
return res;
|
||||
}
|
||||
|
||||
void registerRenameOnCommit(Path from, Path to) {
|
||||
this.renameOnCommit.put(from, to);
|
||||
this.pathsToDelete.remove(to);
|
||||
this.pathsToDelete.add(from);
|
||||
}
|
||||
|
||||
private static class Crud<V extends AbstractEntity & UpdatableEntity, M> extends FileMapStorage.Crud<V, M> {
|
||||
|
||||
private FileMapKeycloakTransaction tx;
|
||||
|
||||
public Crud(Class<V> entityClass, Function<String, Path> dataDirectoryFunc, Function<V, String[]> suggestedPath, boolean isExpirableEntity, Map<SearchableModelField<? super M>, UpdatePredicatesFunc<String, V, M>> fieldPredicates) {
|
||||
super(entityClass, dataDirectoryFunc, suggestedPath, isExpirableEntity, fieldPredicates);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void touch(Path sp) throws IOException {
|
||||
tx.touch(sp);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void registerRenameOnCommit(Path from, Path to) {
|
||||
tx.registerRenameOnCommit(from, to);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean removeIfExists(Path sp) {
|
||||
return tx.removeIfExists(sp);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getTxId() {
|
||||
return tx.txId;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright 2022 Red Hat, Inc. and/or its affiliates
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -16,20 +16,398 @@
|
|||
*/
|
||||
package org.keycloak.models.map.storage.file;
|
||||
|
||||
import org.keycloak.models.ClientModel;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.models.ModelDuplicateException;
|
||||
import org.keycloak.models.map.common.AbstractEntity;
|
||||
import org.keycloak.models.map.common.ExpirableEntity;
|
||||
import org.keycloak.models.map.common.ExpirationUtils;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.common.StringKeyConverter.StringKey;
|
||||
import org.keycloak.models.map.realm.MapRealmEntity;
|
||||
import org.keycloak.models.map.common.UpdatableEntity;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.MapStorage;
|
||||
import org.keycloak.models.map.storage.ModelEntityUtil;
|
||||
import org.keycloak.models.map.storage.QueryParameters;
|
||||
import org.keycloak.models.map.storage.chm.ConcurrentHashMapCrudOperations;
|
||||
import org.keycloak.models.map.storage.chm.MapFieldPredicates;
|
||||
import org.keycloak.models.map.storage.chm.MapModelCriteriaBuilder;
|
||||
import org.keycloak.models.map.storage.chm.MapModelCriteriaBuilder.UpdatePredicatesFunc;
|
||||
import org.keycloak.models.map.storage.file.yaml.YamlParser;
|
||||
import org.keycloak.models.map.storage.file.common.MapEntityContext;
|
||||
import org.keycloak.models.map.storage.file.yaml.PathWriter;
|
||||
import org.keycloak.models.map.storage.file.yaml.YamlWritingMechanism;
|
||||
import org.keycloak.storage.SearchableModelField;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.FileAlreadyExistsException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.jboss.logging.Logger;
|
||||
import org.snakeyaml.engine.v2.api.DumpSettings;
|
||||
import org.snakeyaml.engine.v2.emitter.Emitter;
|
||||
import static org.keycloak.utils.StreamsUtil.paginatedStream;
|
||||
|
||||
/**
|
||||
* A file-based {@link MapStorage}.
|
||||
*
|
||||
* @author <a href="mailto:sguilhen@redhat.com">Stefan Guilhen</a>
|
||||
*/
|
||||
public class FileMapStorage<V extends AbstractEntity, M> implements MapStorage<V, M> {
|
||||
public class FileMapStorage<V extends AbstractEntity & UpdatableEntity, M> implements MapStorage<V, M> {
|
||||
|
||||
private static final Logger LOG = Logger.getLogger(FileMapStorage.class);
|
||||
|
||||
// any REALM_ID field would do, they share the same name
|
||||
private static final String SEARCHABLE_FIELD_REALM_ID_FIELD_NAME = ClientModel.SearchableFields.REALM_ID.getName();
|
||||
private static final String FILE_SUFFIX = ".yaml";
|
||||
|
||||
private final static DumpSettings DUMP_SETTINGS = DumpSettings.builder()
|
||||
.setIndent(4)
|
||||
.setIndicatorIndent(2)
|
||||
.setIndentWithIndicator(false)
|
||||
.build();
|
||||
|
||||
private final Class<V> entityClass;
|
||||
private final Function<String, Path> dataDirectoryFunc;
|
||||
private final Function<V, String[]> suggestedPath;
|
||||
private final boolean isExpirableEntity;
|
||||
private final Map<SearchableModelField<? super M>, UpdatePredicatesFunc<String, V, M>> fieldPredicates;
|
||||
|
||||
// TODO: Add auxiliary directory for indices, locks etc.
|
||||
// private final String auxiliaryFilesDirectory;
|
||||
|
||||
public FileMapStorage(Class<V> entityClass, Function<V, String[]> uniqueHumanReadableField, Function<String, Path> dataDirectoryFunc) {
|
||||
this.entityClass = entityClass;
|
||||
this.fieldPredicates = new IdentityHashMap<>(MapFieldPredicates.getPredicates(ModelEntityUtil.getModelType(entityClass)));
|
||||
this.fieldPredicates.keySet().stream() // Ignore realmId since this is treated in reading differently
|
||||
.filter(f -> Objects.equals(SEARCHABLE_FIELD_REALM_ID_FIELD_NAME, f.getName()))
|
||||
.findAny()
|
||||
.ifPresent(key -> this.fieldPredicates.replace(key, (builder, op, params) -> builder));
|
||||
this.dataDirectoryFunc = dataDirectoryFunc;
|
||||
this.suggestedPath = uniqueHumanReadableField == null ? v -> v.getId() == null ? null : new String[] { v.getId() } : uniqueHumanReadableField;
|
||||
this.isExpirableEntity = ExpirableEntity.class.isAssignableFrom(entityClass);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MapKeycloakTransaction<V, M> createTransaction(KeycloakSession session) {
|
||||
return new FileKeycloakTransaction();
|
||||
@SuppressWarnings("unchecked")
|
||||
MapKeycloakTransaction<V, M> sessionTransaction = session.getAttribute("file-map-transaction-" + hashCode(), MapKeycloakTransaction.class);
|
||||
|
||||
if (sessionTransaction == null) {
|
||||
sessionTransaction = createTransactionInternal(session);
|
||||
session.setAttribute("file-map-transaction-" + hashCode(), sessionTransaction);
|
||||
}
|
||||
return sessionTransaction;
|
||||
}
|
||||
|
||||
public FileMapKeycloakTransaction<V, M> createTransactionInternal(KeycloakSession session) {
|
||||
return FileMapKeycloakTransaction.newInstance(entityClass, dataDirectoryFunc, suggestedPath, isExpirableEntity, fieldPredicates);
|
||||
}
|
||||
|
||||
private static boolean canParseFile(Path p) {
|
||||
final String fn = p.getFileName().toString();
|
||||
try {
|
||||
return Files.isRegularFile(p)
|
||||
&& Files.size(p) > 0L
|
||||
&& ! fn.startsWith(".")
|
||||
&& fn.endsWith(FILE_SUFFIX)
|
||||
&& Files.isReadable(p);
|
||||
} catch (IOException ex) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static abstract class Crud<V extends AbstractEntity & UpdatableEntity, M> implements ConcurrentHashMapCrudOperations<V, M>, HasRealmId {
|
||||
|
||||
private String defaultRealmId;
|
||||
private final Class<V> entityClass;
|
||||
private final Function<String, Path> dataDirectoryFunc;
|
||||
private final Function<V, String[]> suggestedPath;
|
||||
private final boolean isExpirableEntity;
|
||||
private final Map<SearchableModelField<? super M>, UpdatePredicatesFunc<String, V, M>> fieldPredicates;
|
||||
|
||||
public Crud(Class<V> entityClass, Function<String, Path> dataDirectoryFunc, Function<V, String[]> suggestedPath, boolean isExpirableEntity, Map<SearchableModelField<? super M>, UpdatePredicatesFunc<String, V, M>> fieldPredicates) {
|
||||
this.entityClass = entityClass;
|
||||
this.dataDirectoryFunc = dataDirectoryFunc;
|
||||
this.suggestedPath = suggestedPath;
|
||||
this.isExpirableEntity = isExpirableEntity;
|
||||
|
||||
this.fieldPredicates = new IdentityHashMap<>(fieldPredicates);
|
||||
this.fieldPredicates.keySet().stream() // Ignore realmId since this is treated in reading differently
|
||||
.filter(f -> Objects.equals(SEARCHABLE_FIELD_REALM_ID_FIELD_NAME, f.getName()))
|
||||
.findAny()
|
||||
.ifPresent(key -> this.fieldPredicates.replace(key, (builder, op, params) -> builder));
|
||||
}
|
||||
|
||||
protected Path getPathForSanitizedId(Path sanitizedIdPath) {
|
||||
final Path dataDirectory = getDataDirectory();
|
||||
final Path dataDirectoryWithChildren = dataDirectory.resolve(sanitizedIdPath).getParent();
|
||||
|
||||
if (! Files.isDirectory(dataDirectoryWithChildren)) {
|
||||
try {
|
||||
Files.createDirectories(dataDirectoryWithChildren);
|
||||
} catch (IOException ex) {
|
||||
throw new IllegalStateException("Directory does not exist and cannot be created: " + dataDirectory, ex);
|
||||
}
|
||||
}
|
||||
return dataDirectoryWithChildren.resolve(sanitizedIdPath.getFileName() + FILE_SUFFIX);
|
||||
}
|
||||
|
||||
protected Path getPathForSanitizedId(String sanitizedId) {
|
||||
if (sanitizedId == null) {
|
||||
throw new IllegalStateException("Invalid ID to sanitize");
|
||||
}
|
||||
|
||||
return getPathForSanitizedId(Path.of(sanitizedId));
|
||||
}
|
||||
|
||||
protected String sanitizeId(String id) {
|
||||
Objects.requireNonNull(id, "ID must be non-null");
|
||||
|
||||
// TODO: sanitize
|
||||
// id = id
|
||||
// .replaceAll("=", "=e")
|
||||
// .replaceAll(":", "=c")
|
||||
// .replaceAll("/", "=s")
|
||||
// .replaceAll("\\\\", "=b")
|
||||
// ;
|
||||
final Path pId = Path.of(id);
|
||||
|
||||
// Do not allow absolute paths
|
||||
if (pId.isAbsolute()) {
|
||||
throw new IllegalStateException("Illegal ID requested: " + id);
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
protected String desanitizeId(String sanitizedId) {
|
||||
if (sanitizedId == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return sanitizedId
|
||||
.replaceAll("=c", ":")
|
||||
.replaceAll("=s", "/")
|
||||
.replaceAll("=b", "\\\\")
|
||||
.replaceAll("=e", "=")
|
||||
;
|
||||
|
||||
}
|
||||
|
||||
protected V parse(Path fileName) {
|
||||
final V parsedObject = YamlParser.parse(fileName, new MapEntityContext<>(entityClass));
|
||||
if (parsedObject == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final String fileNameStr = fileName.getFileName().toString();
|
||||
String id = determineKeyFromValue(parsedObject, false);
|
||||
final String desanitizedId = desanitizeId(fileNameStr.substring(0, fileNameStr.length() - FILE_SUFFIX.length()));
|
||||
if (id == null) {
|
||||
LOG.debugf("Determined ID from filename: %s", desanitizedId);
|
||||
id = desanitizedId;
|
||||
} else if (! id.endsWith(desanitizedId)) {
|
||||
LOG.warnf("Filename \"%s\" does not end with expected id \"%s\". Fix the file name.", fileNameStr, id);
|
||||
}
|
||||
|
||||
parsedObject.setId(id);
|
||||
parsedObject.clearUpdatedFlag();
|
||||
|
||||
return parsedObject;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V create(V value) {
|
||||
// TODO: Lock realm directory for changes (e.g. on realm deletion)
|
||||
// TODO: Sanitize ID
|
||||
String sanitizedId = sanitizeId(value.getId());
|
||||
|
||||
writeYamlContents(getPathForSanitizedId(sanitizedId), value);
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String determineKeyFromValue(V value, boolean forCreate) {
|
||||
final boolean randomId;
|
||||
String[] proposedId = suggestedPath.apply(value);
|
||||
|
||||
if (! forCreate) {
|
||||
return proposedId == null ? null : String.join("/", proposedId);
|
||||
}
|
||||
|
||||
if (proposedId == null || proposedId.length == 0) {
|
||||
randomId = value.getId() == null;
|
||||
proposedId = new String[] { value.getId() == null ? StringKey.INSTANCE.yieldNewUniqueKey() : value.getId() };
|
||||
} else {
|
||||
randomId = false;
|
||||
}
|
||||
|
||||
Path sanitizedId = Path.of(
|
||||
sanitizeId(proposedId[0]),
|
||||
Stream.of(proposedId).skip(1).map(this::sanitizeId).toArray(String[]::new)
|
||||
);
|
||||
|
||||
Path sp = getPathForSanitizedId(sanitizedId);
|
||||
for (int counter = 0; counter < 100; counter++) {
|
||||
LOG.tracef("Attempting to create file %s", sp);
|
||||
try {
|
||||
touch(sp);
|
||||
return String.join("/", proposedId);
|
||||
} catch (FileAlreadyExistsException ex) {
|
||||
if (! randomId) {
|
||||
throw new ModelDuplicateException("File " + sp + " already exists!");
|
||||
}
|
||||
final String lastComponent = StringKey.INSTANCE.yieldNewUniqueKey();
|
||||
proposedId[proposedId.length - 1] = lastComponent;
|
||||
sanitizedId = sanitizedId.resolveSibling(sanitizeId(lastComponent));
|
||||
sp = getPathForSanitizedId(sanitizedId);
|
||||
} catch (IOException ex) {
|
||||
throw new IllegalStateException("Could not create file " + sp, ex);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V read(String key) {
|
||||
return Optional.ofNullable(sanitizeId(key))
|
||||
.map(this::getPathForSanitizedId)
|
||||
.filter(Files::isReadable)
|
||||
.map(this::parse)
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
public MapModelCriteriaBuilder<String, V, M> createCriteriaBuilder() {
|
||||
return new MapModelCriteriaBuilder<>(StringKey.INSTANCE, fieldPredicates);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<V> read(QueryParameters<M> queryParameters) {
|
||||
final List<Path> paths;
|
||||
FileCriteriaBuilder cb = queryParameters.getModelCriteriaBuilder().flashToModelCriteriaBuilder(FileCriteriaBuilder.criteria());
|
||||
String realmId = (String) cb.getSingleRestrictionArgument(SEARCHABLE_FIELD_REALM_ID_FIELD_NAME);
|
||||
setRealmId(realmId);
|
||||
|
||||
final Path dataDirectory = getDataDirectory();
|
||||
if (! Files.isDirectory(dataDirectory)) {
|
||||
return Stream.empty();
|
||||
}
|
||||
|
||||
// We cannot use Files.find since it throws an UncheckedIOException if it lists a file which is removed concurrently
|
||||
// before its BasicAttributes can be retrieved for its BiPredicate parameter
|
||||
try (Stream<Path> dirStream = Files.walk(dataDirectory, entityClass == MapRealmEntity.class ? 1 : 2)) {
|
||||
// The paths list has to be materialized first, otherwise "dirStream" would be closed
|
||||
// before the resulting stream would be read and would return empty result
|
||||
paths = dirStream.collect(Collectors.toList());
|
||||
} catch (IOException | UncheckedIOException ex) {
|
||||
LOG.warnf(ex, "Error listing %s", dataDirectory);
|
||||
return Stream.empty();
|
||||
}
|
||||
Stream<V> res = paths.stream()
|
||||
.filter(FileMapStorage::canParseFile)
|
||||
.map(this::parse).filter(Objects::nonNull);
|
||||
|
||||
MapModelCriteriaBuilder<String,V,M> mcb = queryParameters.getModelCriteriaBuilder().flashToModelCriteriaBuilder(createCriteriaBuilder());
|
||||
|
||||
Predicate<? super String> keyFilter = mcb.getKeyFilter();
|
||||
Predicate<? super V> entityFilter;
|
||||
|
||||
if (isExpirableEntity) {
|
||||
entityFilter = mcb.getEntityFilter().and(ExpirationUtils::isNotExpired);
|
||||
} else {
|
||||
entityFilter = mcb.getEntityFilter();
|
||||
}
|
||||
|
||||
res = res.filter(e -> keyFilter.test(e.getId()) && entityFilter.test(e));
|
||||
|
||||
if (! queryParameters.getOrderBy().isEmpty()) {
|
||||
res = res.sorted(MapFieldPredicates.getComparator(queryParameters.getOrderBy().stream()));
|
||||
}
|
||||
|
||||
return paginatedStream(res, queryParameters.getOffset(), queryParameters.getLimit());
|
||||
}
|
||||
|
||||
@Override
|
||||
public V update(V value) {
|
||||
String proposedId = value.getId();
|
||||
String sanitizedId = sanitizeId(proposedId);
|
||||
|
||||
Path sp = getPathForSanitizedId(sanitizedId);
|
||||
|
||||
// TODO: improve locking
|
||||
synchronized (FileMapStorageProviderFactory.class) {
|
||||
writeYamlContents(sp, value);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean delete(String key) {
|
||||
return Optional.ofNullable(sanitizeId(key))
|
||||
.map(this::getPathForSanitizedId)
|
||||
.map(this::removeIfExists)
|
||||
.orElse(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long delete(QueryParameters<M> queryParameters) {
|
||||
return read(queryParameters).map(AbstractEntity::getId).map(this::delete).filter(a -> a).count();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getCount(QueryParameters<M> queryParameters) {
|
||||
return read(queryParameters).count();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getRealmId() {
|
||||
return defaultRealmId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setRealmId(String realmId) {
|
||||
this.defaultRealmId = realmId;
|
||||
}
|
||||
|
||||
private Path getDataDirectory() {
|
||||
return dataDirectoryFunc.apply(defaultRealmId == null ? null : sanitizeId(defaultRealmId));
|
||||
}
|
||||
|
||||
private void writeYamlContents(Path sp, V value) {
|
||||
Path tempSp = sp.resolveSibling("." + getTxId() + "-" + sp.getFileName());
|
||||
try (PathWriter w = new PathWriter(tempSp)) {
|
||||
final Emitter emitter = new Emitter(DUMP_SETTINGS, w);
|
||||
try (YamlWritingMechanism mech = new YamlWritingMechanism(emitter::emit)) {
|
||||
new MapEntityContext<>(entityClass).writeValue(value, mech);
|
||||
}
|
||||
registerRenameOnCommit(tempSp, sp);
|
||||
} catch (IOException ex) {
|
||||
throw new IllegalStateException("Cannot write " + sp, ex);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void touch(Path sp) throws IOException;
|
||||
|
||||
protected abstract boolean removeIfExists(Path sp);
|
||||
|
||||
protected abstract void registerRenameOnCommit(Path tempSp, Path sp);
|
||||
|
||||
protected abstract String getTxId();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright 2022 Red Hat, Inc. and/or its affiliates
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -28,12 +28,17 @@ import org.keycloak.models.map.storage.MapStorageProviderFactory;
|
|||
*/
|
||||
public class FileMapStorageProvider implements MapStorageProvider {
|
||||
|
||||
public FileMapStorageProvider() {
|
||||
private final FileMapStorageProviderFactory factory;
|
||||
|
||||
public FileMapStorageProvider(FileMapStorageProviderFactory factory) {
|
||||
this.factory = factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <V extends AbstractEntity, M> MapStorage<V, M> getStorage(Class<M> modelType, MapStorageProviderFactory.Flag... flags) {
|
||||
return null;
|
||||
FileMapStorage storage = factory.getStorage(modelType, flags);
|
||||
return (MapStorage<V, M>) storage;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright 2022 Red Hat, Inc. and/or its affiliates
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -21,9 +21,36 @@ import org.keycloak.common.Profile;
|
|||
import org.keycloak.component.AmphibianProviderFactory;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.models.KeycloakSessionFactory;
|
||||
import org.keycloak.models.RealmModel;
|
||||
import org.keycloak.models.SingleUseObjectValueModel;
|
||||
import org.keycloak.models.map.authorization.entity.MapPermissionTicketEntity;
|
||||
import org.keycloak.models.map.authorization.entity.MapPolicyEntity;
|
||||
import org.keycloak.models.map.authorization.entity.MapResourceEntity;
|
||||
import org.keycloak.models.map.authorization.entity.MapResourceServerEntity;
|
||||
import org.keycloak.models.map.authorization.entity.MapScopeEntity;
|
||||
import org.keycloak.models.map.client.MapClientEntity;
|
||||
import org.keycloak.models.map.clientscope.MapClientScopeEntity;
|
||||
import org.keycloak.models.map.common.AbstractEntity;
|
||||
import org.keycloak.models.map.common.UpdatableEntity;
|
||||
import org.keycloak.models.map.group.MapGroupEntity;
|
||||
import org.keycloak.models.map.realm.MapRealmEntity;
|
||||
import org.keycloak.models.map.role.MapRoleEntity;
|
||||
import org.keycloak.models.map.storage.MapStorageProvider;
|
||||
import org.keycloak.models.map.storage.MapStorageProviderFactory;
|
||||
import org.keycloak.models.map.storage.ModelEntityUtil;
|
||||
import org.keycloak.models.map.user.MapUserEntity;
|
||||
import org.keycloak.provider.EnvironmentDependentProviderFactory;
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ConcurrentModificationException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Pattern;
|
||||
import static java.util.Map.entry;
|
||||
import static org.keycloak.models.map.storage.ModelEntityUtil.getModelName;
|
||||
import static org.keycloak.models.map.storage.ModelEntityUtil.getModelNames;
|
||||
|
||||
/**
|
||||
* A {@link MapStorageProviderFactory} that creates file-based {@link MapStorageProvider}s.
|
||||
|
@ -35,11 +62,31 @@ public class FileMapStorageProviderFactory implements AmphibianProviderFactory<M
|
|||
EnvironmentDependentProviderFactory {
|
||||
|
||||
public static final String PROVIDER_ID = "file";
|
||||
private Config.Scope config;
|
||||
private Path rootRealmsDirectory;
|
||||
private final Map<String, Function<String, Path>> rootAreaDirectories = new HashMap<>(); // Function: (realmId) -> path
|
||||
private final Map<Class<?>, FileMapStorage<?, ?>> storages = new HashMap<>();
|
||||
|
||||
private static final Map<Class<?>, Function<?, String[]>> UNIQUE_HUMAN_READABLE_NAME_FIELD = Map.ofEntries(
|
||||
entry(MapClientEntity.class, ((Function<MapClientEntity, String[]>) v -> new String[] { v.getClientId() })),
|
||||
entry(MapClientScopeEntity.class, ((Function<MapClientScopeEntity, String[]>) v -> new String[] { v.getName() })),
|
||||
entry(MapGroupEntity.class, ((Function<MapGroupEntity, String[]>) v -> new String[] { v.getName()})),
|
||||
entry(MapRealmEntity.class, ((Function<MapRealmEntity, String[]>) v -> new String[] { v.getName()})),
|
||||
entry(MapRoleEntity.class, ((Function<MapRoleEntity, String[]>) (v -> v.getClientId() == null
|
||||
? new String[] { v.getName() }
|
||||
: new String[] { v.getClientId(), v.getName() }))),
|
||||
entry(MapUserEntity.class, ((Function<MapUserEntity, String[]>) v -> new String[] { v.getUsername() })),
|
||||
|
||||
// authz
|
||||
entry(MapResourceServerEntity.class, ((Function<MapResourceServerEntity, String[]>) v -> new String[] { v.getClientId() })),
|
||||
entry(MapPolicyEntity.class, ((Function<MapPolicyEntity, String[]>) v -> new String[] { v.getResourceServerId(), "policy", v.getName() })),
|
||||
entry(MapPermissionTicketEntity.class,((Function<MapPermissionTicketEntity, String[]>) v -> new String[] { v.getResourceServerId(), "ticket", v.getId()})),
|
||||
entry(MapResourceEntity.class, ((Function<MapResourceEntity, String[]>) v -> new String[] { v.getResourceServerId(), "resource", v.getName() })),
|
||||
entry(MapScopeEntity.class, ((Function<MapScopeEntity, String[]>) v -> new String[] { v.getResourceServerId(), "scope", v.getName() }))
|
||||
);
|
||||
|
||||
@Override
|
||||
public MapStorageProvider create(KeycloakSession session) {
|
||||
return new FileMapStorageProvider();
|
||||
return new FileMapStorageProvider(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -54,7 +101,42 @@ public class FileMapStorageProviderFactory implements AmphibianProviderFactory<M
|
|||
|
||||
@Override
|
||||
public void init(Config.Scope config) {
|
||||
this.config = config;
|
||||
final String dir = config.get("dir");
|
||||
rootRealmsDirectory = dir == null ? null : Path.of(dir);
|
||||
getModelNames().stream()
|
||||
.filter(n -> ! Objects.equals(n, getModelName(RealmModel.class)))
|
||||
.forEach(n -> rootAreaDirectories.put(n, getRootDir(rootRealmsDirectory, n, config.get("dir." + n))));
|
||||
|
||||
if (rootAreaDirectories != null) {
|
||||
rootAreaDirectories.put(getModelName(RealmModel.class), realmId -> realmId == null ? rootRealmsDirectory : rootRealmsDirectory.resolve(realmId) );
|
||||
}
|
||||
}
|
||||
|
||||
private static final Pattern FORBIDDEN_CHARACTERS = Pattern.compile("[\\.\\" + File.separator + "]");
|
||||
|
||||
private static Function<String, Path> getRootDir(Path rootRealmsDirectory, String areaName, String dirFromConfig) {
|
||||
if (dirFromConfig != null) {
|
||||
Path p = Path.of(dirFromConfig);
|
||||
return realmId -> p;
|
||||
} else {
|
||||
if (rootRealmsDirectory == null) {
|
||||
return p -> { throw new IllegalStateException("Directory for " + areaName + " area not configured."); };
|
||||
}
|
||||
|
||||
String a = areaName.startsWith("authz-") ? "authz" : areaName;
|
||||
|
||||
return realmId -> {
|
||||
if (realmId == null || FORBIDDEN_CHARACTERS.matcher(realmId).find()) {
|
||||
throw new IllegalArgumentException("Realm needed for constructing the path to " + areaName + " but not known or invalid: " + realmId);
|
||||
}
|
||||
|
||||
final Path path = rootRealmsDirectory
|
||||
.resolve(realmId)
|
||||
.resolve(a);
|
||||
|
||||
return path;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -65,4 +147,23 @@ public class FileMapStorageProviderFactory implements AmphibianProviderFactory<M
|
|||
public String getId() {
|
||||
return PROVIDER_ID;
|
||||
}
|
||||
|
||||
public <V extends AbstractEntity & UpdatableEntity, M> FileMapStorage<V, M> initFileStorage(Class<M> modelType) {
|
||||
String name = getModelName(modelType, modelType.getSimpleName());
|
||||
final Class<V> et = ModelEntityUtil.getEntityType(modelType);
|
||||
@SuppressWarnings("unchecked")
|
||||
FileMapStorage<V, M> res = new FileMapStorage<>(et, (Function<V, String[]>) UNIQUE_HUMAN_READABLE_NAME_FIELD.get(et), rootAreaDirectories.get(name));
|
||||
return res;
|
||||
}
|
||||
|
||||
<M> FileMapStorage getStorage(Class<M> modelType, Flag[] flags) {
|
||||
try {
|
||||
if (modelType == SingleUseObjectValueModel.class) {
|
||||
throw new IllegalArgumentException("Unsupported file storage: " + ModelEntityUtil.getModelName(modelType));
|
||||
}
|
||||
return storages.computeIfAbsent(modelType, n -> initFileStorage(modelType));
|
||||
} catch (ConcurrentModificationException ex) {
|
||||
return storages.get(modelType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,266 @@
|
|||
/*
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.storage.file.common;
|
||||
|
||||
import org.keycloak.models.map.common.UndefinedValuesUtils;
|
||||
import org.keycloak.models.map.storage.file.yaml.YamlParser;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import static org.keycloak.models.map.common.CastUtils.cast;
|
||||
|
||||
/**
|
||||
* A class implementing a {@code BlockContext} interface represents a transformer
|
||||
* from a primitive value / sequence / mapping representation as declared in YAML
|
||||
* format into a Java object of type {@code V}, with ability to produce
|
||||
* the {@link #getResult() resulting instance} of parsing.
|
||||
*
|
||||
* <p>
|
||||
* This transformer handles only a values of a single node in structured file, i.e.
|
||||
* single value (a primitive value, sequence or mapping). The root level
|
||||
* is at the beginning of e.g. YAML or JSON document.
|
||||
* Every mapping key and every sequence value then represents next level of nesting.
|
||||
*
|
||||
* @author hmlnarik
|
||||
* @param <V> Type of the result
|
||||
*/
|
||||
public interface BlockContext<V> {
|
||||
|
||||
/**
|
||||
* Writes the given value using {@link WritingMechanism}.
|
||||
*
|
||||
* @param value
|
||||
* @param mech
|
||||
*/
|
||||
void writeValue(V value, WritingMechanism mech);
|
||||
|
||||
/**
|
||||
* Called after reading a key of map entry in YAML file and before reading its value.
|
||||
* The key of the entry is represented as {@code nameOfSubcontext} parameter, and
|
||||
* provides means to specify a {@code YamlContext} for transforming the mapping value
|
||||
* into appropriate Java object.
|
||||
*
|
||||
* @param nameOfSubcontext Key of the map entry
|
||||
*
|
||||
* @return Context used for transforming the value,
|
||||
* or {@code null} if the default primitive / sequence / mapping context should be used instead.
|
||||
*
|
||||
* @see DefaultObjectContext
|
||||
* @see DefaultListContext
|
||||
* @see DefaultMapContext
|
||||
*/
|
||||
BlockContext<?> getContext(String nameOfSubcontext);
|
||||
|
||||
/**
|
||||
* Modifies the {@link #getResult() result returned} from within this context by
|
||||
* providing the read mapping entry {@code name} to given {@code value}.
|
||||
* <p>
|
||||
* Called after reading a map entry (both key and value) from the YAML file is finished.
|
||||
* The entry is represented as {@code name} parameter (key part of the entry)
|
||||
* and {@code value} (value part of the entry).
|
||||
* <p>
|
||||
* The method is called in the same order as the mapping items appear in the source YAML mapping.
|
||||
*
|
||||
* @param name
|
||||
* @param value
|
||||
*/
|
||||
default void add(String name, Object value) { };
|
||||
|
||||
/**
|
||||
* Modifies the {@link #getResult() result returned} from within this context by
|
||||
* providing the read primitive value or a single sequence item in the {@code value} parameter.
|
||||
* <p>
|
||||
* Called after reading a primitive value or a single sequence item
|
||||
* from the YAML file is finished.
|
||||
* <p>
|
||||
* If the parsed YAML part was a sequence, this method is called in the same order
|
||||
* as the sequence items appear in the source YAML sequence.
|
||||
*
|
||||
* @param value
|
||||
*/
|
||||
default void add(Object value) { };
|
||||
|
||||
/**
|
||||
* Returns the result of parsing the given part of YAML file.
|
||||
* @return
|
||||
*/
|
||||
V getResult();
|
||||
|
||||
Class<?> getScalarType();
|
||||
|
||||
public static class DefaultObjectContext<T> implements BlockContext<T> {
|
||||
|
||||
private final Class<T> objectType;
|
||||
private T result;
|
||||
|
||||
public DefaultObjectContext(Class<T> objectType) {
|
||||
this.objectType = objectType;
|
||||
}
|
||||
|
||||
public static DefaultObjectContext<Object> newDefaultObjectContext() {
|
||||
return new DefaultObjectContext<>(Object.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<T> getScalarType() {
|
||||
return objectType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(Object value) {
|
||||
result = (T) value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T getResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(Object value, WritingMechanism mech) {
|
||||
if (UndefinedValuesUtils.isUndefined(value)) return;
|
||||
mech.writeObject(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BlockContext<?> getContext(String nameOfSubcontext) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static class DefaultListContext<T> implements BlockContext<Collection<T>> {
|
||||
private final List<T> result = new LinkedList<>();
|
||||
|
||||
protected final Class<T> itemClass;
|
||||
|
||||
public static DefaultListContext<Object> newDefaultListContext() {
|
||||
return new DefaultListContext<>(Object.class);
|
||||
}
|
||||
|
||||
public DefaultListContext(Class<T> itemClass) {
|
||||
this.itemClass = itemClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<T> getScalarType() {
|
||||
return itemClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(Object value) {
|
||||
result.add(cast(value, itemClass));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<T> getResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void writeValue(Collection<T> value, WritingMechanism mech) {
|
||||
if (UndefinedValuesUtils.isUndefined(value)) return;
|
||||
mech.writeSequence(() -> value.forEach(v -> getContextByValue(v).writeValue(v, mech)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public BlockContext<?> getContext(String nameOfSubcontext) {
|
||||
return null;
|
||||
}
|
||||
|
||||
private BlockContext getContextByValue(Object value) {
|
||||
BlockContext res = getContext(YamlParser.ARRAY_CONTEXT);
|
||||
if (res != null) {
|
||||
return res;
|
||||
}
|
||||
if (value instanceof Collection) {
|
||||
return new DefaultListContext<>(itemClass);
|
||||
} else if (value instanceof Map) {
|
||||
return DefaultMapContext.newDefaultMapContext();
|
||||
} else {
|
||||
return new DefaultObjectContext<>(itemClass);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class DefaultMapContext<T> implements BlockContext<Map<String, T>> {
|
||||
private final Map<String, T> result = new LinkedHashMap<>();
|
||||
|
||||
protected final Class<T> itemClass;
|
||||
|
||||
public static DefaultMapContext<Object> newDefaultMapContext() {
|
||||
return new DefaultMapContext<>(Object.class);
|
||||
}
|
||||
|
||||
public DefaultMapContext(Class<T> itemClass) {
|
||||
this.itemClass = itemClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<T> getScalarType() {
|
||||
return itemClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void add(String name, Object value) {
|
||||
result.put(name, (T) value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, T> getResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(Map<String, T> value, WritingMechanism mech) {
|
||||
if (UndefinedValuesUtils.isUndefined(value)) return;
|
||||
mech.writeMapping(() -> {
|
||||
final TreeMap<String, Object> sortedMap = new TreeMap<>(value);
|
||||
sortedMap.forEach(
|
||||
(key, val) -> mech.writePair(
|
||||
key,
|
||||
() -> getContext(key, val).writeValue(val, mech)
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public BlockContext<T> getContext(String nameOfSubcontext) {
|
||||
return null;
|
||||
}
|
||||
|
||||
private BlockContext getContext(String nameOfSubcontext, Object value) {
|
||||
BlockContext res = getContext(nameOfSubcontext);
|
||||
if (res != null) {
|
||||
return res;
|
||||
}
|
||||
if (value instanceof Collection) {
|
||||
return new DefaultListContext<>(itemClass);
|
||||
} else if (value instanceof Map) {
|
||||
return DefaultMapContext.newDefaultMapContext();
|
||||
} else {
|
||||
return new DefaultObjectContext<>(itemClass);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
/*
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.storage.file.common;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/**
|
||||
* A special stack suited for tracking the parser of a block language, and maintaining
|
||||
* contextual information for block nesting position in the YAML file.
|
||||
* <p>
|
||||
* The intention is as follows:
|
||||
* Initially, it contains a single {@link BlockContext} instance which represents
|
||||
* the root context of the YAML tree. Every sequence item and mapping value
|
||||
* in the YAML file leads to pushing a new {@link BlockContext} onto the stack
|
||||
* which is created by the topmost {@link BlockContext#getContext(java.lang.String)}
|
||||
* method of the topmost {@link BlockContext}. This context is removed from the stack
|
||||
* once parsing of the respective sequence item or mapping pair is finished.
|
||||
*
|
||||
* @author hmlnarik
|
||||
*/
|
||||
public class BlockContextStack extends LinkedList<BlockContext<?>> {
|
||||
|
||||
public BlockContextStack(BlockContext<?> rootElement) {
|
||||
push(rootElement);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pushes the subcontext to the stack.
|
||||
* <p>
|
||||
* The subcontext is created by calling {@link BlockContext#getContext(java.lang.String)}
|
||||
* method. If this method returns {@code null}, the control reverts to producing
|
||||
* the subcontext using {@code nullProducer} which must return a valid {@link BlockContext}
|
||||
* object (it <b>must not</b> return {@code null).
|
||||
*
|
||||
* @param name
|
||||
* @param nullProducer
|
||||
* @return
|
||||
*/
|
||||
public BlockContext<?> push(String name, Supplier<BlockContext<?>> nullProducer) {
|
||||
BlockContext<?> context = peek().getContext(name);
|
||||
if (context == null) {
|
||||
context = nullProducer.get();
|
||||
}
|
||||
push(context);
|
||||
return context;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,284 @@
|
|||
/*
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.storage.file.common;
|
||||
|
||||
import org.keycloak.models.map.common.AbstractEntity;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.EntityField;
|
||||
import org.keycloak.models.map.common.UndefinedValuesUtils;
|
||||
import org.keycloak.models.map.role.MapRoleEntityFields;
|
||||
import org.keycloak.models.map.storage.ModelEntityUtil;
|
||||
import org.keycloak.models.map.storage.file.common.BlockContext.DefaultListContext;
|
||||
import org.keycloak.models.map.storage.file.common.BlockContext.DefaultMapContext;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.TreeSet;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
import org.jboss.logging.Logger;
|
||||
import static org.keycloak.models.map.common.CastUtils.cast;
|
||||
|
||||
/**
|
||||
* {@link BlockContext} which handles any entity accompanied with {@link EntityField} field getters and setters,
|
||||
* namely {@code Map*Entity} classes.
|
||||
* @author hmlnarik
|
||||
*/
|
||||
public class MapEntityContext<T> implements BlockContext<T> {
|
||||
|
||||
private static final Logger LOG = Logger.getLogger(MapEntityContext.class);
|
||||
|
||||
private final Map<String, EntityField<?>> nameToEntityField;
|
||||
private final Map<String, Supplier<? extends BlockContext<?>>> contextCreators;
|
||||
|
||||
protected final Class<T> objectClass;
|
||||
protected final T result;
|
||||
private static final Map<Class, Map<String, EntityField<?>>> CACHE_FIELD_TO_EF = new IdentityHashMap<>();
|
||||
private static final Map<Class, Map<String, Supplier<? extends BlockContext<?>>>> CACHE_CLASS_TO_CC = new IdentityHashMap<>();
|
||||
private final boolean topContext;
|
||||
private boolean alreadyReadProperty = false;
|
||||
|
||||
public static final String SCHEMA_VERSION = "schemaVersion";
|
||||
|
||||
public MapEntityContext(Class<T> clazz) {
|
||||
this(clazz, true);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public MapEntityContext(Class<T> clazz, boolean topContext) {
|
||||
this(clazz,
|
||||
CACHE_FIELD_TO_EF.computeIfAbsent(clazz, MapEntityContext::fieldsToEntityField),
|
||||
CACHE_CLASS_TO_CC.computeIfAbsent(clazz, MapEntityContext::fieldsToContextCreators),
|
||||
topContext
|
||||
);
|
||||
}
|
||||
|
||||
protected MapEntityContext(
|
||||
Class<T> clazz,
|
||||
Map<String, EntityField<?>> nameToEntityField,
|
||||
Map<String, Supplier<? extends BlockContext<?>>> contextCreators,
|
||||
boolean topContext) {
|
||||
this.objectClass = clazz;
|
||||
this.result = DeepCloner.DUMB_CLONER.newInstance(clazz);
|
||||
this.nameToEntityField = nameToEntityField;
|
||||
this.contextCreators = contextCreators;
|
||||
this.topContext = topContext;
|
||||
}
|
||||
|
||||
protected static <T> Map<String, Supplier<? extends BlockContext<?>>> fieldsToContextCreators(Class<T> type) {
|
||||
if (! ModelEntityUtil.entityFieldsKnown(type)) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
return ModelEntityUtil.getEntityFields(type)
|
||||
.map(ef -> Map.entry(ef, Optional.ofNullable(getDefaultContextCreator(ef))))
|
||||
.filter(me -> me.getValue().isPresent())
|
||||
.collect(Collectors.toMap(me -> me.getKey().getNameCamelCase(), me -> me.getValue().get()));
|
||||
}
|
||||
|
||||
private static <T> Supplier<? extends BlockContext<?>> getDefaultContextCreator(EntityField<? super T> ef) {
|
||||
final Class<?> collectionElementClass = ef.getCollectionElementClass();
|
||||
if (collectionElementClass != Void.class) {
|
||||
if (ModelEntityUtil.entityFieldsKnown(collectionElementClass)) {
|
||||
return () -> new MapEntitySequenceYamlContext<>(collectionElementClass);
|
||||
}
|
||||
}
|
||||
|
||||
final Class<?> mapValueClass = ef.getMapValueClass();
|
||||
if (mapValueClass != Void.class) {
|
||||
if (ModelEntityUtil.entityFieldsKnown(mapValueClass)) {
|
||||
return () -> new MapEntityMappingYamlContext<>(mapValueClass);
|
||||
} else if (ATTRIBUTES_NAME.equals(ef.getName())) {
|
||||
return StringListMapContext::new;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
protected static final String ATTRIBUTES_NAME = MapRoleEntityFields.ATTRIBUTES.getName();
|
||||
|
||||
public static <T> Map<String, EntityField<?>> fieldsToEntityField(Class<T> type) {
|
||||
return ModelEntityUtil.getEntityFields(type).collect(Collectors.toUnmodifiableMap(EntityField::getNameCamelCase, Function.identity()));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T> boolean setEntityField(T result, EntityField<? super T> ef, Object value) {
|
||||
LOG.tracef("Setting %s::%s field", ef, result.getClass());
|
||||
if (ef == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
if (ef.getCollectionElementClass() != Void.class && value instanceof Collection) {
|
||||
Class<?> collectionElementClass = ef.getCollectionElementClass();
|
||||
((Collection) value).forEach(v -> ef.collectionAdd(result, cast(v, collectionElementClass)));
|
||||
} else if (ef.getMapKeyClass() != Void.class && value instanceof Map) {
|
||||
Class<?> mapKeyClass = ef.getMapKeyClass();
|
||||
Class<?> mapValueClass = ef.getMapValueClass();
|
||||
((Map) value).forEach((k, v) -> ef.mapPut(result, cast(k, mapKeyClass), cast(v, mapValueClass)));
|
||||
} else {
|
||||
final Object origValue = ef.get(result);
|
||||
if (origValue != null) {
|
||||
LOG.warnf("Overwriting value of %s field", ef.getNameCamelCase());
|
||||
}
|
||||
ef.set(result, cast(value, ef.getFieldClass()));
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
throw new IllegalArgumentException("Exception thrown while setting " + ef + " field", ex);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(String name, Object value) {
|
||||
@SuppressWarnings("unchecked")
|
||||
EntityField<? super T> ef = (EntityField<? super T>) nameToEntityField.get(name);
|
||||
|
||||
if (topContext && name.equals(SCHEMA_VERSION)) {
|
||||
return; // TODO: Check appropriate schema version and potentially update parsing
|
||||
}
|
||||
|
||||
if (! setEntityField(result, ef, value)) {
|
||||
LOG.warnf("Ignoring field %s", name);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<T> getScalarType() {
|
||||
return this.objectClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T getResult() {
|
||||
return this.result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BlockContext<?> getContext(String nameOfSubcontext) {
|
||||
if (topContext && nameOfSubcontext.equals(SCHEMA_VERSION)) {
|
||||
if (alreadyReadProperty) {
|
||||
LOG.warnf("%s must be the first property in the object YAML representation", SCHEMA_VERSION);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
alreadyReadProperty = true;
|
||||
Supplier<? extends BlockContext<?>> cc = contextCreators.get(nameOfSubcontext);
|
||||
if (cc != null) {
|
||||
return cc.get();
|
||||
}
|
||||
EntityField<?> ef = nameToEntityField.get(nameOfSubcontext);
|
||||
if (ef != null) {
|
||||
if (ef.getCollectionElementClass() != Void.class) {
|
||||
return contextFor(ef.getCollectionElementClass(), MapEntitySequenceYamlContext::new, DefaultListContext::new);
|
||||
} else if (ef.getMapValueClass() != Void.class) {
|
||||
if (ef.getMapValueClass() == List.class || Collection.class.isAssignableFrom(ef.getMapValueClass())) {
|
||||
return new StringListMapContext();
|
||||
}
|
||||
return contextFor(ef.getMapValueClass(), MapEntityMappingYamlContext::new, DefaultMapContext::new);
|
||||
}
|
||||
return contextFor(ef.getFieldClass(), MapEntityContext::new, DefaultObjectContext::new);
|
||||
}
|
||||
|
||||
LOG.warnf("No special context set for field %s", nameOfSubcontext);
|
||||
return null;
|
||||
}
|
||||
|
||||
private static <T> BlockContext<?> contextFor(Class<T> clazz, Function<Class<T>, BlockContext<?>> mapContextCreator, Function<Class<T>, BlockContext<?>> defaultCreator) {
|
||||
return ModelEntityUtil.entityFieldsKnown(clazz)
|
||||
? mapContextCreator.apply(clazz)
|
||||
: defaultCreator.apply(clazz);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(T entity, WritingMechanism mech) {
|
||||
if (UndefinedValuesUtils.isUndefined(entity)) return;
|
||||
|
||||
mech.writeMapping(() -> {
|
||||
if (topContext) {
|
||||
mech.writePair(SCHEMA_VERSION, () -> mech.writeObject("1.0.Alpha1"));
|
||||
}
|
||||
|
||||
TreeSet<String> contextNames = new TreeSet<>(nameToEntityField.keySet());
|
||||
contextNames.addAll(contextCreators.keySet());
|
||||
|
||||
for (String contextName : contextNames) {
|
||||
@SuppressWarnings("unchecked")
|
||||
EntityField<T> ef = (EntityField<T>) nameToEntityField.get(contextName);
|
||||
if (ef == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (topContext && (ef.getNameCamelCase().equals("id") || ef.getNameCamelCase().equals("realmId"))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Object fieldVal = ef.get(entity);
|
||||
if (fieldVal != null) {
|
||||
BlockContext context = getContext(contextName);
|
||||
if (context != null) {
|
||||
mech.writePair(contextName, () -> context.writeValue(fieldVal, mech));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public static class MapEntitySequenceYamlContext<T> extends DefaultListContext<T> {
|
||||
|
||||
public MapEntitySequenceYamlContext(Class<T> itemClass) {
|
||||
super(itemClass);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BlockContext<?> getContext(String nameOfSubcontext) {
|
||||
return ModelEntityUtil.entityFieldsKnown(itemClass)
|
||||
? new MapEntityContext<>(itemClass, false)
|
||||
: null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(String name, Object value) {
|
||||
if (value instanceof AbstractEntity) {
|
||||
((AbstractEntity) value).setId(name);
|
||||
add(value);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Sequence expected, mapping with " + name + " key found instead.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class MapEntityMappingYamlContext<T> extends DefaultMapContext<T> {
|
||||
|
||||
public MapEntityMappingYamlContext(Class<T> mapValueClass) {
|
||||
super(mapValueClass);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BlockContext<T> getContext(String nameOfSubcontext) {
|
||||
return ModelEntityUtil.entityFieldsKnown(itemClass)
|
||||
? new MapEntityContext<>(itemClass, false)
|
||||
: super.getContext(nameOfSubcontext);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,112 @@
|
|||
/*
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.storage.file.common;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import org.keycloak.models.map.common.UndefinedValuesUtils;
|
||||
import org.keycloak.models.map.storage.file.common.BlockContext.DefaultListContext;
|
||||
import org.keycloak.models.map.storage.file.common.BlockContext.DefaultMapContext;
|
||||
import org.keycloak.models.map.storage.file.yaml.YamlParser;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Block context which suitable for properties stored in a {@code Map<String, List<String>>}
|
||||
* which accepts string mapping key, and entry value is recognized both as a plain value
|
||||
* (converted to string) or a list of values
|
||||
*
|
||||
* @author hmlnarik
|
||||
*/
|
||||
public class StringListMapContext extends DefaultMapContext<Collection<String>> {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public StringListMapContext() {
|
||||
super((Class) Collection.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a YAML attribute-like context where key of each element
|
||||
* is stored in YAML file without a given prefix, and in the internal
|
||||
* representation each key has that prefix.
|
||||
*
|
||||
* @param prefix
|
||||
* @return
|
||||
*/
|
||||
public static StringListMapContext prefixed(String prefix) {
|
||||
return new Prefixed(prefix);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AttributeValueYamlContext getContext(String nameOfSubcontext) {
|
||||
// regardless of the key name, the values need to be converted into List<String> which is the purpose of AttributeValueYamlContext
|
||||
return new AttributeValueYamlContext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(Map<String, Collection<String>> value, WritingMechanism mech) {
|
||||
if (UndefinedValuesUtils.isUndefined(value)) return;
|
||||
mech.writeMapping(() -> {
|
||||
AttributeValueYamlContext c = getContext(YamlParser.ARRAY_CONTEXT);
|
||||
for (Map.Entry<String, Collection<String>> entry : new TreeMap<>(value).entrySet()) {
|
||||
Collection<String> attrValues = entry.getValue();
|
||||
mech.writePair(entry.getKey(), () -> c.writeValue(attrValues, mech));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static class Prefixed extends StringListMapContext {
|
||||
|
||||
protected final String prefix;
|
||||
|
||||
public Prefixed(String prefix) {
|
||||
this.prefix = prefix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(String name, Object value) {
|
||||
super.add(prefix + name, value);
|
||||
}
|
||||
}
|
||||
|
||||
public static class AttributeValueYamlContext extends DefaultListContext<String> {
|
||||
|
||||
public AttributeValueYamlContext() {
|
||||
super(String.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(Collection<String> value, WritingMechanism mech) {
|
||||
if (UndefinedValuesUtils.isUndefined(value)) return;
|
||||
if (value.size() == 1) {
|
||||
mech.writeObject(value.iterator().next());
|
||||
} else {
|
||||
//sequence
|
||||
super.writeValue(value, mech);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(Object value) {
|
||||
if (value != null) {
|
||||
super.add(String.valueOf(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.storage.file.common;
|
||||
|
||||
/**
|
||||
* Class implementing this interface defines mechanism for writing basic structures: primitive types,
|
||||
* sequences and maps.
|
||||
*/
|
||||
public interface WritingMechanism {
|
||||
|
||||
/**
|
||||
* Writes a value of a primitive type ({@code null}, boolean, number, String).
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
WritingMechanism writeObject(Object value);
|
||||
|
||||
/**
|
||||
* Writes a sequence, items of which are written using this mechanism in the {@code task}.
|
||||
* @param task
|
||||
* @return
|
||||
*/
|
||||
WritingMechanism writeSequence(Runnable task);
|
||||
|
||||
/**
|
||||
* Writes a mapping, items of which are written using this mechanism in the {@code task}.
|
||||
* @param task
|
||||
* @return
|
||||
*/
|
||||
WritingMechanism writeMapping(Runnable task);
|
||||
|
||||
/**
|
||||
* Writes a mapping key/value pair, items of which are written using this mechanism in the {@code task}.
|
||||
* @param valueTask
|
||||
* @return
|
||||
*/
|
||||
WritingMechanism writePair(String key, Runnable valueTask);
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.storage.file.yaml;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import org.snakeyaml.engine.v2.api.StreamDataWriter;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author hmlnarik
|
||||
*/
|
||||
public class PathWriter implements StreamDataWriter, Closeable {
|
||||
|
||||
private final BufferedWriter writer;
|
||||
|
||||
public PathWriter(Path path) throws IOException {
|
||||
this.writer = Files.newBufferedWriter(path);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(String str) {
|
||||
try {
|
||||
this.writer.write(str);
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(String str, int off, int len) {
|
||||
try {
|
||||
this.writer.write(str, off, len);
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
try {
|
||||
this.writer.flush();
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
writer.close();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.storage.file.yaml;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author hmlnarik
|
||||
*/
|
||||
class RunOnlyOnce implements Runnable {
|
||||
|
||||
private final AtomicBoolean ran = new AtomicBoolean(false);
|
||||
private final Runnable preTask;
|
||||
private final Runnable postTask;
|
||||
|
||||
public RunOnlyOnce(Runnable preTask, Runnable postTask) {
|
||||
this.preTask = preTask;
|
||||
this.postTask = postTask;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
if (ran.compareAndSet(false, true) && preTask != null) {
|
||||
preTask.run();
|
||||
}
|
||||
}
|
||||
|
||||
public void runPostTask() {
|
||||
if (hasRun() && postTask != null) {
|
||||
postTask.run();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean hasRun() {
|
||||
return ran.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "RunOnlyOnce"
|
||||
+ (hasRun() ? " - ran already" : "")
|
||||
+ " " + preTask;
|
||||
}
|
||||
|
||||
static class List extends LinkedList<RunOnlyOnce> {
|
||||
|
||||
@Override
|
||||
public RunOnlyOnce removeLast() {
|
||||
final RunOnlyOnce res = super.removeLast();
|
||||
res.runPostTask();
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,268 @@
|
|||
/*
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.storage.file.yaml;
|
||||
|
||||
import org.keycloak.models.map.storage.file.common.BlockContextStack;
|
||||
import org.keycloak.models.map.storage.file.common.BlockContext.DefaultListContext;
|
||||
import org.keycloak.models.map.storage.file.common.BlockContext.DefaultMapContext;
|
||||
import org.keycloak.models.map.storage.file.common.BlockContext.DefaultObjectContext;
|
||||
import java.io.InputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.EnumMap;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Supplier;
|
||||
import org.jboss.logging.Logger;
|
||||
import org.snakeyaml.engine.v2.api.LoadSettings;
|
||||
import org.snakeyaml.engine.v2.api.YamlUnicodeReader;
|
||||
import org.snakeyaml.engine.v2.constructor.StandardConstructor;
|
||||
import org.snakeyaml.engine.v2.events.Event;
|
||||
import org.snakeyaml.engine.v2.events.Event.ID;
|
||||
import org.snakeyaml.engine.v2.events.NodeEvent;
|
||||
import org.snakeyaml.engine.v2.events.ScalarEvent;
|
||||
import org.snakeyaml.engine.v2.exceptions.ConstructorException;
|
||||
import org.snakeyaml.engine.v2.nodes.ScalarNode;
|
||||
import org.snakeyaml.engine.v2.nodes.Tag;
|
||||
import org.snakeyaml.engine.v2.parser.Parser;
|
||||
import org.snakeyaml.engine.v2.parser.ParserImpl;
|
||||
import org.snakeyaml.engine.v2.resolver.JsonScalarResolver;
|
||||
import org.snakeyaml.engine.v2.resolver.ScalarResolver;
|
||||
import org.snakeyaml.engine.v2.scanner.StreamReader;
|
||||
import org.keycloak.models.map.storage.file.common.BlockContext;
|
||||
|
||||
import static org.keycloak.common.util.StackUtil.getShortStackTrace;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author hmlnarik
|
||||
*/
|
||||
public class YamlParser<E> {
|
||||
|
||||
private static final Logger LOG = Logger.getLogger(YamlParser.class);
|
||||
public static final String ARRAY_CONTEXT = "$@[]@$";
|
||||
|
||||
private static final ScalarResolver RESOLVER = new JsonScalarResolver();
|
||||
private final Parser parser;
|
||||
private final BlockContextStack contextStack;
|
||||
|
||||
// Leverage SnakeYaml's translation of primitive values
|
||||
private static final class MiniConstructor extends StandardConstructor {
|
||||
|
||||
public MiniConstructor() {
|
||||
super(SETTINGS);
|
||||
}
|
||||
|
||||
// This has been based on SnakeYaml's own org.snakeyaml.engine.v2.constructor.BaseConstructor.constructObjectNoCheck(Node node)
|
||||
@SuppressWarnings(value = "unchecked")
|
||||
public Object constructStandardJavaInstance(ScalarNode node) {
|
||||
return findConstructorFor(node)
|
||||
.map(constructor -> constructor.construct(node))
|
||||
.orElseThrow(() -> new ConstructorException(null, Optional.empty(), "could not determine a constructor for the tag " + node.getTag(), node.getStartMark()));
|
||||
}
|
||||
|
||||
public static final MiniConstructor INSTANCE = new MiniConstructor();
|
||||
}
|
||||
|
||||
private static final LoadSettings SETTINGS = LoadSettings.builder()
|
||||
.setAllowRecursiveKeys(false)
|
||||
.setParseComments(false)
|
||||
.build();
|
||||
|
||||
public static <E> E parse(Path path, BlockContext<E> initialContext) {
|
||||
LOG.tracef("parse(%s,%s)%s", path, initialContext, getShortStackTrace());
|
||||
|
||||
Objects.requireNonNull(path, "Path invalid");
|
||||
try (InputStream is = Files.newInputStream(path)) {
|
||||
if (Files.size(path) == 0) {
|
||||
return null;
|
||||
}
|
||||
Parser p = new ParserImpl(SETTINGS, new StreamReader(SETTINGS, new YamlUnicodeReader(is)));
|
||||
return new YamlParser<>(p, initialContext).parse();
|
||||
} catch (IOException ex) {
|
||||
LOG.warn(ex);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
protected YamlParser(Parser p, BlockContext<E> initialContext) {
|
||||
this.parser = p;
|
||||
this.contextStack = new BlockContextStack(initialContext);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <E> E parse() {
|
||||
consumeEvent(Event.ID.StreamStart, "Expected a stream");
|
||||
|
||||
if (!parser.checkEvent(Event.ID.StreamEnd)) {
|
||||
consumeEvent(Event.ID.DocumentStart, "Expected a document in the stream");
|
||||
parseNode();
|
||||
consumeEvent(Event.ID.DocumentEnd, "Expected a single document in the stream");
|
||||
}
|
||||
|
||||
consumeEvent(Event.ID.StreamEnd, "Expected a single document in the stream");
|
||||
|
||||
return (E) contextStack.pop().getResult();
|
||||
}
|
||||
|
||||
protected Object parseNode() {
|
||||
if (parser.checkEvent(Event.ID.Alias)) {
|
||||
throw new IllegalStateException("Aliases are not handled at this moment");
|
||||
}
|
||||
Event ev = parser.next();
|
||||
if (!(ev instanceof NodeEvent)) {
|
||||
throw new IllegalArgumentException("Invalid event " + ev);
|
||||
}
|
||||
// if (anchor != null) {
|
||||
// node.setAnchor(anchor);
|
||||
// anchors.put(anchor, node);
|
||||
// }
|
||||
// try {
|
||||
switch (ev.getEventId()) {
|
||||
case Scalar:
|
||||
return parseScalar((ScalarEvent) ev);
|
||||
case SequenceStart:
|
||||
return parseSequence();
|
||||
case MappingStart:
|
||||
return parseMapping();
|
||||
default:
|
||||
throw new IllegalStateException("Event not expected " + ev);
|
||||
}
|
||||
// } finally {
|
||||
// anchors.remove(anchor);
|
||||
// }
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a sequence node inside the current context. Each sequence item is parsed in the context
|
||||
* supplied by the current
|
||||
* @return
|
||||
*/
|
||||
protected Object parseSequence() {
|
||||
LOG.tracef("Parsing sequence");
|
||||
BlockContext context = contextStack.peek();
|
||||
while (! parser.checkEvent(Event.ID.SequenceEnd)) {
|
||||
context.add(parseNodeInFreshContext(ARRAY_CONTEXT));
|
||||
}
|
||||
consumeEvent(Event.ID.SequenceEnd, "Expected end of sequence");
|
||||
return context.getResult();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a mapping node inside the current context. Each mapping value is parsed in the context
|
||||
* supplied by the current context for the mapping key.
|
||||
* @return
|
||||
*/
|
||||
protected Object parseMapping() {
|
||||
LOG.tracef("Parsing mapping");
|
||||
BlockContext context = contextStack.peek();
|
||||
while (! parser.checkEvent(Event.ID.MappingEnd)) {
|
||||
Object key = parseNodeInFreshContext();
|
||||
LOG.tracef("Parsed mapping key: %s", key);
|
||||
if (! (key instanceof String)) {
|
||||
throw new IllegalStateException("Invalid key in map: " + key);
|
||||
}
|
||||
Object value = parseNodeInFreshContext((String) key);
|
||||
LOG.tracef("Parsed mapping value: %s", value);
|
||||
context.add((String) key, value);
|
||||
}
|
||||
consumeEvent(Event.ID.MappingEnd, "Expected end of mapping");
|
||||
return context.getResult();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a scalar node inside the current context.
|
||||
* @return
|
||||
*/
|
||||
protected Object parseScalar(ScalarEvent se) {
|
||||
BlockContext context = contextStack.peek();
|
||||
|
||||
boolean implicit = se.getImplicit().canOmitTagInPlainScalar();
|
||||
final Tag nodeTag;
|
||||
Class ot = context.getScalarType();
|
||||
nodeTag = constructTag(se.getTag(), se.getValue(), implicit, ot);
|
||||
|
||||
ScalarNode node = new ScalarNode(nodeTag, true, se.getValue(), se.getScalarStyle(), se.getStartMark(), se.getEndMark());
|
||||
final Object value = MiniConstructor.INSTANCE.constructStandardJavaInstance(node);
|
||||
context.add(value);
|
||||
return context.getResult();
|
||||
}
|
||||
|
||||
private static final EnumMap<Event.ID, Supplier<BlockContext<?>>> CONTEXT_CONSTRUCTORS = new EnumMap<>(Event.ID.class);
|
||||
static {
|
||||
CONTEXT_CONSTRUCTORS.put(ID.Scalar, DefaultObjectContext::newDefaultObjectContext);
|
||||
CONTEXT_CONSTRUCTORS.put(ID.SequenceStart, DefaultListContext::newDefaultListContext);
|
||||
CONTEXT_CONSTRUCTORS.put(ID.MappingStart, DefaultMapContext::newDefaultMapContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure that the next event is the expectedEventId, otherwise throw an exception, and consume that event
|
||||
*/
|
||||
private Event consumeEvent(ID expectedEventId, String message) throws IllegalArgumentException {
|
||||
if (! parser.checkEvent(expectedEventId)) {
|
||||
Event event = parser.next();
|
||||
throw new IllegalArgumentException(message + " at " + event.getStartMark());
|
||||
}
|
||||
return parser.next();
|
||||
}
|
||||
|
||||
private static Tag constructTag(Optional<String> tag, String value, boolean implicit) {
|
||||
// based on org.snakeyaml.engine.v2.composer.Composer.composeScalarNode(Optional<Anchor> anchor, List<CommentLine> blockComments)
|
||||
return tag.filter(t -> ! "!".equals(t))
|
||||
.map(Tag::new)
|
||||
.orElseGet(() -> RESOLVER.resolve(value, implicit));
|
||||
}
|
||||
|
||||
private Tag constructTag(Optional<String> tag, String value, boolean implicit, Class<?> ot) {
|
||||
if (ot == String.class) {
|
||||
return Tag.STR;
|
||||
} else {
|
||||
return constructTag(tag, value, implicit);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the node in a context created for the given {@code key}.
|
||||
* @param key
|
||||
* @return
|
||||
* @throws IllegalStateException
|
||||
*/
|
||||
private Object parseNodeInFreshContext(String key) throws IllegalStateException {
|
||||
Supplier<BlockContext<?>> cc = CONTEXT_CONSTRUCTORS.get(parser.peekEvent().getEventId());
|
||||
if (cc == null) {
|
||||
throw new IllegalStateException("Invalid value in map with key " + key);
|
||||
}
|
||||
contextStack.push(key, cc);
|
||||
Object value = parseNode();
|
||||
contextStack.pop();
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the node in a fresh context {@link DefaultObjectContext}.
|
||||
* @return
|
||||
* @throws IllegalStateException
|
||||
*/
|
||||
private Object parseNodeInFreshContext() throws IllegalStateException {
|
||||
contextStack.push(DefaultObjectContext.newDefaultObjectContext());
|
||||
Object value = parseNode();
|
||||
contextStack.pop();
|
||||
return value;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,150 @@
|
|||
/*
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.storage.file.yaml;
|
||||
|
||||
import org.keycloak.models.map.storage.file.common.WritingMechanism;
|
||||
import java.io.Closeable;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Consumer;
|
||||
import org.snakeyaml.engine.v2.common.FlowStyle;
|
||||
import org.snakeyaml.engine.v2.common.ScalarStyle;
|
||||
import org.snakeyaml.engine.v2.events.DocumentEndEvent;
|
||||
import org.snakeyaml.engine.v2.events.DocumentStartEvent;
|
||||
import org.snakeyaml.engine.v2.events.Event;
|
||||
import org.snakeyaml.engine.v2.events.ImplicitTuple;
|
||||
import org.snakeyaml.engine.v2.events.MappingEndEvent;
|
||||
import org.snakeyaml.engine.v2.events.MappingStartEvent;
|
||||
import org.snakeyaml.engine.v2.events.ScalarEvent;
|
||||
import org.snakeyaml.engine.v2.events.SequenceEndEvent;
|
||||
import org.snakeyaml.engine.v2.events.SequenceStartEvent;
|
||||
import org.snakeyaml.engine.v2.events.StreamEndEvent;
|
||||
import org.snakeyaml.engine.v2.events.StreamStartEvent;
|
||||
import org.snakeyaml.engine.v2.nodes.Tag;
|
||||
|
||||
/**
|
||||
* Mechanism which produces {@link Event}s for SnakeYaml v2 {@code Emitter}.
|
||||
*
|
||||
* @author vramik
|
||||
*/
|
||||
public class YamlWritingMechanism implements WritingMechanism, Closeable {
|
||||
|
||||
private final ImplicitTuple implicitTuple = new ImplicitTuple(true, true);
|
||||
private final Consumer<Event> consumer;
|
||||
private boolean runningPreTasks = false;
|
||||
private final LinkedList<RunOnlyOnce> preTasks = new RunOnlyOnce.List();
|
||||
|
||||
public YamlWritingMechanism(Consumer<Event> consumer) {
|
||||
this.consumer = consumer;
|
||||
this.preTasks.add(new RunOnlyOnce(this::startDocument, this::endDocument));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
endDocument();
|
||||
}
|
||||
|
||||
@Override
|
||||
public YamlWritingMechanism writeMapping(Runnable task) {
|
||||
return writeObject(task, this::startMapping, this::endMapping);
|
||||
}
|
||||
|
||||
@Override
|
||||
public YamlWritingMechanism writeSequence(Runnable task) {
|
||||
return writeObject(task, this::startSequence, this::endSequence);
|
||||
}
|
||||
|
||||
@Override
|
||||
public YamlWritingMechanism writePair(String key, Runnable task) {
|
||||
return writeObject(task, () -> writeObject(key), null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public YamlWritingMechanism writeObject(Object value) {
|
||||
if (! runningPreTasks) {
|
||||
runningPreTasks = true;
|
||||
preTasks.forEach(RunOnlyOnce::run);
|
||||
runningPreTasks = false;
|
||||
}
|
||||
this.consumer.accept(new ScalarEvent(Optional.empty(), determineTag(value), implicitTuple, value == null ? "null" : value.toString(), determineStyle(value)));
|
||||
return this;
|
||||
}
|
||||
|
||||
private void startDocument() {
|
||||
this.consumer.accept(new StreamStartEvent());
|
||||
this.consumer.accept(new DocumentStartEvent(false, Optional.empty(), Collections.emptyMap()));
|
||||
}
|
||||
|
||||
private void endDocument() {
|
||||
this.consumer.accept(new DocumentEndEvent(false));
|
||||
this.consumer.accept(new StreamEndEvent());
|
||||
}
|
||||
|
||||
private YamlWritingMechanism writeObject(Runnable taskWithOptionalWrite, Runnable preWriteTask, Runnable postWriteTask) {
|
||||
RunOnlyOnce roo = new RunOnlyOnce(preWriteTask, postWriteTask);
|
||||
try {
|
||||
preTasks.addLast(roo);
|
||||
taskWithOptionalWrite.run();
|
||||
} finally {
|
||||
preTasks.removeLast();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private void startSequence() {
|
||||
this.consumer.accept(new SequenceStartEvent(Optional.empty(), Optional.of(Tag.SEQ.getValue()), true, FlowStyle.BLOCK));
|
||||
}
|
||||
|
||||
private void endSequence() {
|
||||
this.consumer.accept(new SequenceEndEvent());
|
||||
}
|
||||
|
||||
private void startMapping() {
|
||||
this.consumer.accept(new MappingStartEvent(Optional.empty(), Optional.of(Tag.MAP.getValue()), true, FlowStyle.BLOCK));
|
||||
}
|
||||
|
||||
private void endMapping() {
|
||||
this.consumer.accept(new MappingEndEvent());
|
||||
}
|
||||
|
||||
private Optional<String> determineTag(Object value) {
|
||||
if (value instanceof String) {
|
||||
return Optional.of(Tag.STR.getValue());
|
||||
} else if (value instanceof Boolean) {
|
||||
return Optional.of(Tag.BOOL.getValue());
|
||||
} else if (value instanceof Integer || value instanceof Long || value instanceof BigInteger) {
|
||||
return Optional.of(Tag.INT.getValue());
|
||||
} else if (value instanceof Float || value instanceof Double || value instanceof BigDecimal) {
|
||||
return Optional.of(Tag.FLOAT.getValue());
|
||||
} else if (value == null) {
|
||||
return Optional.of(Tag.NULL.getValue());
|
||||
} else {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
private ScalarStyle determineStyle(Object value) {
|
||||
if (value instanceof String && ((String) value).lastIndexOf('\n') > 0) {
|
||||
return ScalarStyle.FOLDED;
|
||||
}
|
||||
return ScalarStyle.PLAIN;
|
||||
}
|
||||
|
||||
}
|
|
@ -44,4 +44,34 @@ public class AuthenticatedClientSessionReferenceOnlyFieldDelegate implements Ent
|
|||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<org.keycloak.models.map.userSession.MapAuthenticatedClientSessionEntity>> & org.keycloak.models.map.common.EntityField<org.keycloak.models.map.userSession.MapAuthenticatedClientSessionEntity>> void set(EF field, T value) {
|
||||
throw new UnsupportedOperationException("Not supported yet.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<org.keycloak.models.map.userSession.MapAuthenticatedClientSessionEntity>> & org.keycloak.models.map.common.EntityField<org.keycloak.models.map.userSession.MapAuthenticatedClientSessionEntity>> void collectionAdd(EF field, T value) {
|
||||
throw new UnsupportedOperationException("Not supported yet.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<org.keycloak.models.map.userSession.MapAuthenticatedClientSessionEntity>> & org.keycloak.models.map.common.EntityField<org.keycloak.models.map.userSession.MapAuthenticatedClientSessionEntity>> Object collectionRemove(EF field, T value) {
|
||||
throw new UnsupportedOperationException("Not supported yet.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public <K, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<org.keycloak.models.map.userSession.MapAuthenticatedClientSessionEntity>> & org.keycloak.models.map.common.EntityField<org.keycloak.models.map.userSession.MapAuthenticatedClientSessionEntity>> Object mapGet(EF field, K key) {
|
||||
throw new UnsupportedOperationException("Not supported yet.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public <K, T, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<org.keycloak.models.map.userSession.MapAuthenticatedClientSessionEntity>> & org.keycloak.models.map.common.EntityField<org.keycloak.models.map.userSession.MapAuthenticatedClientSessionEntity>> void mapPut(EF field, K key, T value) {
|
||||
throw new UnsupportedOperationException("Not supported yet.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public <K, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<org.keycloak.models.map.userSession.MapAuthenticatedClientSessionEntity>> & org.keycloak.models.map.common.EntityField<org.keycloak.models.map.userSession.MapAuthenticatedClientSessionEntity>> Object mapRemove(EF field, K key) {
|
||||
throw new UnsupportedOperationException("Not supported yet.");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -332,4 +332,19 @@ public class LdapRoleEntity extends UpdatableEntity.Impl implements EntityFieldD
|
|||
return consumer.apply(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <K, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<org.keycloak.models.map.role.MapRoleEntity>> & org.keycloak.models.map.common.EntityField<org.keycloak.models.map.role.MapRoleEntity>> Object mapGet(EF field, K key) {
|
||||
throw new UnsupportedOperationException("Not supported yet.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public <K, T, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<org.keycloak.models.map.role.MapRoleEntity>> & org.keycloak.models.map.common.EntityField<org.keycloak.models.map.role.MapRoleEntity>> void mapPut(EF field, K key, T value) {
|
||||
throw new UnsupportedOperationException("Not supported yet.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public <K, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<org.keycloak.models.map.role.MapRoleEntity>> & org.keycloak.models.map.common.EntityField<org.keycloak.models.map.role.MapRoleEntity>> Object mapRemove(EF field, K key) {
|
||||
throw new UnsupportedOperationException("Not supported yet.");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.keycloak.models.KeycloakSession;
|
|||
import org.keycloak.models.ModelDuplicateException;
|
||||
import org.keycloak.models.RealmModel;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.common.TimeAdapter;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.MapStorage;
|
||||
|
@ -54,6 +55,7 @@ public class MapRootAuthenticationSessionProvider implements AuthenticationSessi
|
|||
private final KeycloakSession session;
|
||||
protected final MapKeycloakTransaction<MapRootAuthenticationSessionEntity, RootAuthenticationSessionModel> tx;
|
||||
private int authSessionsLimit;
|
||||
private final boolean txHasRealmId;
|
||||
|
||||
public MapRootAuthenticationSessionProvider(KeycloakSession session,
|
||||
MapStorage<MapRootAuthenticationSessionEntity, RootAuthenticationSessionModel> sessionStore,
|
||||
|
@ -63,12 +65,13 @@ public class MapRootAuthenticationSessionProvider implements AuthenticationSessi
|
|||
this.authSessionsLimit = authSessionsLimit;
|
||||
|
||||
session.getTransactionManager().enlistAfterCompletion(tx);
|
||||
this.txHasRealmId = tx instanceof HasRealmId;
|
||||
}
|
||||
|
||||
private Function<MapRootAuthenticationSessionEntity, RootAuthenticationSessionModel> entityToAdapterFunc(RealmModel realm) {
|
||||
return origEntity -> {
|
||||
if (isExpired(origEntity, true)) {
|
||||
tx.delete(origEntity.getId());
|
||||
txInRealm(realm).delete(origEntity.getId());
|
||||
return null;
|
||||
} else {
|
||||
return new MapRootAuthenticationSessionAdapter(session, realm, origEntity, authSessionsLimit);
|
||||
|
@ -76,6 +79,13 @@ public class MapRootAuthenticationSessionProvider implements AuthenticationSessi
|
|||
};
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapRootAuthenticationSessionEntity, RootAuthenticationSessionModel> txInRealm(RealmModel realm) {
|
||||
if (txHasRealmId) {
|
||||
((HasRealmId) tx).setRealmId(realm == null ? null : realm.getId());
|
||||
}
|
||||
return tx;
|
||||
}
|
||||
|
||||
private Predicate<MapRootAuthenticationSessionEntity> entityRealmFilter(String realmId) {
|
||||
if (realmId == null) {
|
||||
return c -> false;
|
||||
|
@ -105,11 +115,11 @@ public class MapRootAuthenticationSessionProvider implements AuthenticationSessi
|
|||
int authSessionLifespanSeconds = getAuthSessionLifespan(realm);
|
||||
entity.setExpiration(timestamp + TimeAdapter.fromSecondsToMilliseconds(authSessionLifespanSeconds));
|
||||
|
||||
if (id != null && tx.exists(id)) {
|
||||
if (id != null && txInRealm(realm).exists(id)) {
|
||||
throw new ModelDuplicateException("Root authentication session exists: " + entity.getId());
|
||||
}
|
||||
|
||||
entity = tx.create(entity);
|
||||
entity = txInRealm(realm).create(entity);
|
||||
|
||||
return entityToAdapterFunc(realm).apply(entity);
|
||||
}
|
||||
|
@ -123,7 +133,7 @@ public class MapRootAuthenticationSessionProvider implements AuthenticationSessi
|
|||
|
||||
LOG.tracef("getRootAuthenticationSession(%s, %s)%s", realm.getName(), authenticationSessionId, getShortStackTrace());
|
||||
|
||||
MapRootAuthenticationSessionEntity entity = tx.read(authenticationSessionId);
|
||||
MapRootAuthenticationSessionEntity entity = txInRealm(realm).read(authenticationSessionId);
|
||||
return (entity == null || !entityRealmFilter(realm.getId()).test(entity))
|
||||
? null
|
||||
: entityToAdapterFunc(realm).apply(entity);
|
||||
|
@ -132,7 +142,7 @@ public class MapRootAuthenticationSessionProvider implements AuthenticationSessi
|
|||
@Override
|
||||
public void removeRootAuthenticationSession(RealmModel realm, RootAuthenticationSessionModel authenticationSession) {
|
||||
Objects.requireNonNull(authenticationSession, "The provided root authentication session can't be null!");
|
||||
tx.delete(authenticationSession.getId());
|
||||
txInRealm(realm).delete(authenticationSession.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -153,7 +163,7 @@ public class MapRootAuthenticationSessionProvider implements AuthenticationSessi
|
|||
DefaultModelCriteria<RootAuthenticationSessionModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
tx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -116,12 +116,14 @@ public class MapAuthorizationStoreFactory implements AmphibianProviderFactory<St
|
|||
authorizationStore.getResourceServerStore().preRemove(realm);
|
||||
} else if (type == RESOURCE_SERVER_BEFORE_REMOVE) {
|
||||
MapAuthorizationStore authorizationStore = (MapAuthorizationStore) session.getProvider(StoreFactory.class);
|
||||
ResourceServer resourceServer = (ResourceServer) params[0];
|
||||
RealmModel realm = (RealmModel) params[0];
|
||||
ResourceServer resourceServer = (ResourceServer) params[1];
|
||||
|
||||
authorizationStore.getScopeStore().preRemove(resourceServer);
|
||||
authorizationStore.getPolicyStore().preRemove(resourceServer);
|
||||
authorizationStore.getResourceStore().preRemove(resourceServer);
|
||||
authorizationStore.getPermissionTicketStore().preRemove(resourceServer);
|
||||
|
||||
authorizationStore.getScopeStore().preRemove(realm, resourceServer);
|
||||
authorizationStore.getPolicyStore().preRemove(realm, resourceServer);
|
||||
authorizationStore.getResourceStore().preRemove(realm, resourceServer);
|
||||
authorizationStore.getPermissionTicketStore().preRemove(realm, resourceServer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.keycloak.models.RealmModel;
|
|||
import org.keycloak.models.map.authorization.adapter.MapPermissionTicketAdapter;
|
||||
import org.keycloak.models.map.authorization.entity.MapPermissionTicketEntity;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.MapStorage;
|
||||
import org.keycloak.models.map.storage.ModelCriteriaBuilder.Operator;
|
||||
|
@ -60,17 +61,26 @@ public class MapPermissionTicketStore implements PermissionTicketStore {
|
|||
private static final Logger LOG = Logger.getLogger(MapPermissionTicketStore.class);
|
||||
private final AuthorizationProvider authorizationProvider;
|
||||
final MapKeycloakTransaction<MapPermissionTicketEntity, PermissionTicket> tx;
|
||||
private final boolean txHasRealmId;
|
||||
|
||||
public MapPermissionTicketStore(KeycloakSession session, MapStorage<MapPermissionTicketEntity, PermissionTicket> permissionTicketStore, AuthorizationProvider provider) {
|
||||
this.authorizationProvider = provider;
|
||||
this.tx = permissionTicketStore.createTransaction(session);
|
||||
session.getTransactionManager().enlist(tx);
|
||||
this.txHasRealmId = tx instanceof HasRealmId;
|
||||
}
|
||||
|
||||
private Function<MapPermissionTicketEntity, PermissionTicket> entityToAdapterFunc(RealmModel realm, ResourceServer resourceServer) {
|
||||
return origEntity -> new MapPermissionTicketAdapter(realm, resourceServer, origEntity, authorizationProvider.getStoreFactory());
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapPermissionTicketEntity, PermissionTicket> txInRealm(RealmModel realm) {
|
||||
if (txHasRealmId) {
|
||||
((HasRealmId) tx).setRealmId(realm == null ? null : realm.getId());
|
||||
}
|
||||
return tx;
|
||||
}
|
||||
|
||||
private DefaultModelCriteria<PermissionTicket> forRealmAndResourceServer(RealmModel realm, ResourceServer resourceServer) {
|
||||
final DefaultModelCriteria<PermissionTicket> mcb = DefaultModelCriteria.<PermissionTicket>criteria()
|
||||
.compare(PermissionTicket.SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
@ -88,8 +98,9 @@ public class MapPermissionTicketStore implements PermissionTicketStore {
|
|||
.map(this::filterEntryToDefaultModelCriteria)
|
||||
.toArray(DefaultModelCriteria[]::new)
|
||||
);
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
|
||||
return tx.getCount(withCriteria(mcb));
|
||||
return txInRealm(realm).getCount(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -110,7 +121,7 @@ public class MapPermissionTicketStore implements PermissionTicketStore {
|
|||
mcb = mcb.compare(SearchableFields.SCOPE_ID, Operator.EQ, scope.getId());
|
||||
}
|
||||
|
||||
if (tx.exists(withCriteria(mcb))) {
|
||||
if (txInRealm(realm).exists(withCriteria(mcb))) {
|
||||
throw new ModelDuplicateException("Permission ticket for resource server: '" + resourceServer.getId()
|
||||
+ ", Resource: " + resource + ", owner: " + owner + ", scopeId: " + scope + " already exists.");
|
||||
}
|
||||
|
@ -128,7 +139,7 @@ public class MapPermissionTicketStore implements PermissionTicketStore {
|
|||
entity.setResourceServerId(resourceServer.getId());
|
||||
entity.setRealmId(realm.getId());
|
||||
|
||||
entity = tx.create(entity);
|
||||
entity = txInRealm(realm).create(entity);
|
||||
|
||||
return entity == null ? null : entityToAdapterFunc(realm, resourceServer).apply(entity);
|
||||
}
|
||||
|
@ -140,7 +151,7 @@ public class MapPermissionTicketStore implements PermissionTicketStore {
|
|||
PermissionTicket permissionTicket = findById(realm, null, id);
|
||||
if (permissionTicket == null) return;
|
||||
|
||||
tx.delete(id);
|
||||
txInRealm(realm).delete(id);
|
||||
UserManagedPermissionUtil.removePolicy(permissionTicket, authorizationProvider.getStoreFactory());
|
||||
}
|
||||
|
||||
|
@ -150,7 +161,7 @@ public class MapPermissionTicketStore implements PermissionTicketStore {
|
|||
|
||||
if (id == null) return null;
|
||||
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.ID, Operator.EQ, id)))
|
||||
.findFirst()
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
|
@ -163,7 +174,7 @@ public class MapPermissionTicketStore implements PermissionTicketStore {
|
|||
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.RESOURCE_ID, Operator.EQ, resource.getId())))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.collect(Collectors.toList());
|
||||
|
@ -175,7 +186,7 @@ public class MapPermissionTicketStore implements PermissionTicketStore {
|
|||
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.SCOPE_ID, Operator.EQ, scope.getId())))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.collect(Collectors.toList());
|
||||
|
@ -205,7 +216,7 @@ public class MapPermissionTicketStore implements PermissionTicketStore {
|
|||
.toArray(DefaultModelCriteria[]::new)
|
||||
);
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(firstResult, maxResult, SearchableFields.ID))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(firstResult, maxResult, SearchableFields.ID))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
@ -288,7 +299,7 @@ public class MapPermissionTicketStore implements PermissionTicketStore {
|
|||
.findById(realm, resourceServerStore.findById(realm, ticket.getResourceServerId()), ticket.getResourceId());
|
||||
}
|
||||
|
||||
return paginatedStream(tx.read(withCriteria(mcb).orderBy(SearchableFields.RESOURCE_ID, ASCENDING))
|
||||
return paginatedStream(txInRealm(realm).read(withCriteria(mcb).orderBy(SearchableFields.RESOURCE_ID, ASCENDING))
|
||||
.filter(distinctByKey(MapPermissionTicketEntity::getResourceId))
|
||||
.map(ticketResourceMapper)
|
||||
.filter(Objects::nonNull), first, max)
|
||||
|
@ -304,7 +315,7 @@ public class MapPermissionTicketStore implements PermissionTicketStore {
|
|||
ResourceStore resourceStore = authorizationProvider.getStoreFactory().getResourceStore();
|
||||
ResourceServerStore resourceServerStore = authorizationProvider.getStoreFactory().getResourceServerStore();
|
||||
|
||||
return paginatedStream(tx.read(withCriteria(mcb).orderBy(SearchableFields.RESOURCE_ID, ASCENDING))
|
||||
return paginatedStream(txInRealm(realm).read(withCriteria(mcb).orderBy(SearchableFields.RESOURCE_ID, ASCENDING))
|
||||
.filter(distinctByKey(MapPermissionTicketEntity::getResourceId)), firstResult, maxResults)
|
||||
.map(ticket -> resourceStore.findById(realm, resourceServerStore.findById(realm, ticket.getResourceServerId()), ticket.getResourceId()))
|
||||
.collect(Collectors.toList());
|
||||
|
@ -316,12 +327,12 @@ public class MapPermissionTicketStore implements PermissionTicketStore {
|
|||
DefaultModelCriteria<PermissionTicket> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
tx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
public void preRemove(ResourceServer resourceServer) {
|
||||
LOG.tracef("preRemove(%s)%s", resourceServer, getShortStackTrace());
|
||||
public void preRemove(RealmModel realm, ResourceServer resourceServer) {
|
||||
LOG.tracef("preRemove(%s, %s)%s", realm, resourceServer, getShortStackTrace());
|
||||
|
||||
tx.delete(withCriteria(forRealmAndResourceServer(resourceServer.getRealm(), resourceServer)));
|
||||
txInRealm(realm).delete(withCriteria(forRealmAndResourceServer(resourceServer.getRealm(), resourceServer)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.keycloak.models.RealmModel;
|
|||
import org.keycloak.models.map.authorization.adapter.MapPolicyAdapter;
|
||||
import org.keycloak.models.map.authorization.entity.MapPolicyEntity;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.MapStorage;
|
||||
import org.keycloak.models.map.storage.ModelCriteriaBuilder.Operator;
|
||||
|
@ -54,17 +55,26 @@ public class MapPolicyStore implements PolicyStore {
|
|||
private static final Logger LOG = Logger.getLogger(MapPolicyStore.class);
|
||||
private final AuthorizationProvider authorizationProvider;
|
||||
final MapKeycloakTransaction<MapPolicyEntity, Policy> tx;
|
||||
private final boolean txHasRealmId;
|
||||
|
||||
public MapPolicyStore(KeycloakSession session, MapStorage<MapPolicyEntity, Policy> policyStore, AuthorizationProvider provider) {
|
||||
this.authorizationProvider = provider;
|
||||
this.tx = policyStore.createTransaction(session);
|
||||
session.getTransactionManager().enlist(tx);
|
||||
this.txHasRealmId = tx instanceof HasRealmId;
|
||||
}
|
||||
|
||||
private Function<MapPolicyEntity, Policy> entityToAdapterFunc(RealmModel realm, ResourceServer resourceServer) {
|
||||
return origEntity -> new MapPolicyAdapter(realm, resourceServer, origEntity, authorizationProvider.getStoreFactory());
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapPolicyEntity, Policy> txInRealm(RealmModel realm) {
|
||||
if (txHasRealmId) {
|
||||
((HasRealmId) tx).setRealmId(realm == null ? null : realm.getId());
|
||||
}
|
||||
return tx;
|
||||
}
|
||||
|
||||
private DefaultModelCriteria<Policy> forRealmAndResourceServer(RealmModel realm, ResourceServer resourceServer) {
|
||||
DefaultModelCriteria<Policy> mcb = DefaultModelCriteria.<Policy>criteria()
|
||||
.compare(Policy.SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
@ -84,7 +94,7 @@ public class MapPolicyStore implements PolicyStore {
|
|||
DefaultModelCriteria<Policy> mcb = forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.NAME, Operator.EQ, representation.getName());
|
||||
|
||||
if (tx.exists(withCriteria(mcb))) {
|
||||
if (txInRealm(realm).exists(withCriteria(mcb))) {
|
||||
throw new ModelDuplicateException("Policy with name '" + representation.getName() + "' for " + resourceServer.getId() + " already exists");
|
||||
}
|
||||
|
||||
|
@ -96,7 +106,7 @@ public class MapPolicyStore implements PolicyStore {
|
|||
entity.setResourceServerId(resourceServer.getId());
|
||||
entity.setRealmId(resourceServer.getRealm().getId());
|
||||
|
||||
entity = tx.create(entity);
|
||||
entity = txInRealm(realm).create(entity);
|
||||
|
||||
return entity == null ? null : entityToAdapterFunc(realm, resourceServer).apply(entity);
|
||||
}
|
||||
|
@ -108,7 +118,7 @@ public class MapPolicyStore implements PolicyStore {
|
|||
Policy policyEntity = findById(realm, null, id);
|
||||
if (policyEntity == null) return;
|
||||
|
||||
tx.delete(id);
|
||||
txInRealm(realm).delete(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -117,7 +127,7 @@ public class MapPolicyStore implements PolicyStore {
|
|||
|
||||
if (id == null) return null;
|
||||
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.ID, Operator.EQ, id)))
|
||||
.findFirst()
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
|
@ -129,7 +139,7 @@ public class MapPolicyStore implements PolicyStore {
|
|||
LOG.tracef("findByName(%s, %s)%s", name, resourceServer, getShortStackTrace());
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.NAME, Operator.EQ, name)))
|
||||
.findFirst()
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
|
@ -141,7 +151,7 @@ public class MapPolicyStore implements PolicyStore {
|
|||
LOG.tracef("findByResourceServer(%s)%s", resourceServer, getShortStackTrace());
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)))
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
@ -161,7 +171,7 @@ public class MapPolicyStore implements PolicyStore {
|
|||
mcb = mcb.compare(SearchableFields.OWNER, Operator.NOT_EXISTS);
|
||||
}
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.NAME))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.NAME))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
@ -208,7 +218,7 @@ public class MapPolicyStore implements PolicyStore {
|
|||
public void findByResource(ResourceServer resourceServer, Resource resource, Consumer<Policy> consumer) {
|
||||
LOG.tracef("findByResource(%s, %s, %s)%s", resourceServer, resource, consumer, getShortStackTrace());
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.RESOURCE_ID, Operator.EQ, resource.getId())))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.forEach(consumer);
|
||||
|
@ -219,7 +229,7 @@ public class MapPolicyStore implements PolicyStore {
|
|||
LOG.tracef("findByResourceType(%s, %s)%s", resourceServer, type, getShortStackTrace());
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
|
||||
tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.CONFIG, Operator.LIKE, (Object[]) new String[]{"defaultResourceType", type})))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.forEach(policyConsumer);
|
||||
|
@ -230,7 +240,7 @@ public class MapPolicyStore implements PolicyStore {
|
|||
LOG.tracef("findByScopes(%s, %s)%s", resourceServer, scopes, getShortStackTrace());
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.SCOPE_ID, Operator.IN, scopes.stream().map(Scope::getId))))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.collect(Collectors.toList());
|
||||
|
@ -252,7 +262,7 @@ public class MapPolicyStore implements PolicyStore {
|
|||
.compare(SearchableFields.CONFIG, Operator.NOT_EXISTS, (Object[]) new String[] {"defaultResourceType"});
|
||||
}
|
||||
|
||||
tx.read(withCriteria(mcb)).map(entityToAdapterFunc(realm, resourceServer)).forEach(consumer);
|
||||
txInRealm(realm).read(withCriteria(mcb)).map(entityToAdapterFunc(realm, resourceServer)).forEach(consumer);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -260,7 +270,7 @@ public class MapPolicyStore implements PolicyStore {
|
|||
LOG.tracef("findByType(%s, %s)%s", resourceServer, type, getShortStackTrace());
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.TYPE, Operator.EQ, type)))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.collect(Collectors.toList());
|
||||
|
@ -269,7 +279,7 @@ public class MapPolicyStore implements PolicyStore {
|
|||
@Override
|
||||
public List<Policy> findDependentPolicies(ResourceServer resourceServer, String id) {
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.ASSOCIATED_POLICY_ID, Operator.EQ, id)))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.collect(Collectors.toList());
|
||||
|
@ -281,12 +291,12 @@ public class MapPolicyStore implements PolicyStore {
|
|||
DefaultModelCriteria<Policy> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
tx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
public void preRemove(ResourceServer resourceServer) {
|
||||
LOG.tracef("preRemove(%s)%s", resourceServer, getShortStackTrace());
|
||||
public void preRemove(RealmModel realm, ResourceServer resourceServer) {
|
||||
LOG.tracef("preRemove(%s, %s)%s", realm, resourceServer, getShortStackTrace());
|
||||
|
||||
tx.delete(withCriteria(forRealmAndResourceServer(resourceServer.getRealm(), resourceServer)));
|
||||
txInRealm(realm).delete(withCriteria(forRealmAndResourceServer(resourceServer.getRealm(), resourceServer)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.keycloak.models.RealmModel;
|
|||
import org.keycloak.models.map.authorization.adapter.MapResourceServerAdapter;
|
||||
import org.keycloak.models.map.authorization.entity.MapResourceServerEntity;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.MapStorage;
|
||||
import org.keycloak.models.map.storage.ModelCriteriaBuilder.Operator;
|
||||
|
@ -50,17 +51,26 @@ public class MapResourceServerStore implements ResourceServerStore {
|
|||
private static final Logger LOG = Logger.getLogger(MapResourceServerStore.class);
|
||||
private final AuthorizationProvider authorizationProvider;
|
||||
final MapKeycloakTransaction<MapResourceServerEntity, ResourceServer> tx;
|
||||
private final boolean txHasRealmId;
|
||||
|
||||
public MapResourceServerStore(KeycloakSession session, MapStorage<MapResourceServerEntity, ResourceServer> resourceServerStore, AuthorizationProvider provider) {
|
||||
this.tx = resourceServerStore.createTransaction(session);
|
||||
this.authorizationProvider = provider;
|
||||
session.getTransactionManager().enlist(tx);
|
||||
this.txHasRealmId = tx instanceof HasRealmId;
|
||||
}
|
||||
|
||||
private Function<MapResourceServerEntity, ResourceServer> entityToAdapterFunc(RealmModel realmModel) {
|
||||
return origEntity -> new MapResourceServerAdapter(realmModel, origEntity, authorizationProvider.getStoreFactory());
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapResourceServerEntity, ResourceServer> txInRealm(RealmModel realm) {
|
||||
if (txHasRealmId) {
|
||||
((HasRealmId) tx).setRealmId(realm == null ? null : realm.getId());
|
||||
}
|
||||
return tx;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResourceServer create(ClientModel client) {
|
||||
LOG.tracef("create(%s)%s", client.getClientId(), getShortStackTrace());
|
||||
|
@ -77,11 +87,12 @@ public class MapResourceServerStore implements ResourceServerStore {
|
|||
}
|
||||
|
||||
MapResourceServerEntity entity = DeepCloner.DUMB_CLONER.newInstance(MapResourceServerEntity.class);
|
||||
final RealmModel realm = client.getRealm();
|
||||
entity.setClientId(clientId);
|
||||
entity.setRealmId(client.getRealm().getId());
|
||||
entity.setRealmId(realm.getId());
|
||||
|
||||
entity = tx.create(entity);
|
||||
return entity == null ? null : entityToAdapterFunc(client.getRealm()).apply(entity);
|
||||
entity = txInRealm(realm).create(entity);
|
||||
return entity == null ? null : entityToAdapterFunc(realm).apply(entity);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -91,9 +102,10 @@ public class MapResourceServerStore implements ResourceServerStore {
|
|||
ResourceServer resourceServer = findByClient(client);
|
||||
if (resourceServer == null) return;
|
||||
|
||||
authorizationProvider.getKeycloakSession().invalidate(RESOURCE_SERVER_BEFORE_REMOVE, resourceServer);
|
||||
final RealmModel realm = client.getRealm();
|
||||
authorizationProvider.getKeycloakSession().invalidate(RESOURCE_SERVER_BEFORE_REMOVE, realm, resourceServer);
|
||||
|
||||
tx.delete(resourceServer.getId());
|
||||
txInRealm(realm).delete(resourceServer.getId());
|
||||
|
||||
authorizationProvider.getKeycloakSession().invalidate(RESOURCE_SERVER_AFTER_REMOVE, resourceServer);
|
||||
}
|
||||
|
@ -106,7 +118,7 @@ public class MapResourceServerStore implements ResourceServerStore {
|
|||
return null;
|
||||
}
|
||||
|
||||
MapResourceServerEntity entity = tx.read(id);
|
||||
MapResourceServerEntity entity = txInRealm(realm).read(id);
|
||||
return (entity == null || !Objects.equals(realm.getId(), entity.getRealmId())) ? null : entityToAdapterFunc(realm).apply(entity);
|
||||
}
|
||||
|
||||
|
@ -118,7 +130,8 @@ public class MapResourceServerStore implements ResourceServerStore {
|
|||
mcb = mcb.compare(SearchableFields.CLIENT_ID, Operator.EQ, client.getId());
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, client.getRealm().getId());
|
||||
|
||||
return tx.read(withCriteria(mcb))
|
||||
final RealmModel realm = client.getRealm();
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.map(entityToAdapterFunc(client.getRealm()))
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
|
@ -130,6 +143,6 @@ public class MapResourceServerStore implements ResourceServerStore {
|
|||
DefaultModelCriteria<ResourceServer> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
tx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.keycloak.models.RealmModel;
|
|||
import org.keycloak.models.map.authorization.adapter.MapResourceAdapter;
|
||||
import org.keycloak.models.map.authorization.entity.MapResourceEntity;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.MapStorage;
|
||||
import org.keycloak.models.map.storage.ModelCriteriaBuilder.Operator;
|
||||
|
@ -53,18 +54,27 @@ public class MapResourceStore implements ResourceStore {
|
|||
private final AuthorizationProvider authorizationProvider;
|
||||
final MapKeycloakTransaction<MapResourceEntity, Resource> tx;
|
||||
private final KeycloakSession session;
|
||||
private final boolean txHasRealmId;
|
||||
|
||||
public MapResourceStore(KeycloakSession session, MapStorage<MapResourceEntity, Resource> resourceStore, AuthorizationProvider provider) {
|
||||
this.tx = resourceStore.createTransaction(session);
|
||||
session.getTransactionManager().enlist(tx);
|
||||
authorizationProvider = provider;
|
||||
this.session = session;
|
||||
this.txHasRealmId = tx instanceof HasRealmId;
|
||||
}
|
||||
|
||||
private Function<MapResourceEntity, Resource> entityToAdapterFunc(RealmModel realm, final ResourceServer resourceServer) {
|
||||
return origEntity -> new MapResourceAdapter(realm, resourceServer, origEntity, authorizationProvider.getStoreFactory());
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapResourceEntity, Resource> txInRealm(RealmModel realm) {
|
||||
if (txHasRealmId) {
|
||||
((HasRealmId) tx).setRealmId(realm == null ? null : realm.getId());
|
||||
}
|
||||
return tx;
|
||||
}
|
||||
|
||||
private DefaultModelCriteria<Resource> forRealmAndResourceServer(RealmModel realm, ResourceServer resourceServer) {
|
||||
DefaultModelCriteria<Resource> mcb = DefaultModelCriteria.<Resource>criteria()
|
||||
.compare(Resource.SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
@ -85,7 +95,7 @@ public class MapResourceStore implements ResourceStore {
|
|||
.compare(SearchableFields.NAME, Operator.EQ, name)
|
||||
.compare(SearchableFields.OWNER, Operator.EQ, owner);
|
||||
|
||||
if (tx.exists(withCriteria(mcb))) {
|
||||
if (txInRealm(realm).exists(withCriteria(mcb))) {
|
||||
throw new ModelDuplicateException("Resource with name '" + name + "' for " + resourceServer.getId() + " already exists for request owner " + owner);
|
||||
}
|
||||
|
||||
|
@ -96,7 +106,7 @@ public class MapResourceStore implements ResourceStore {
|
|||
entity.setOwner(owner);
|
||||
entity.setRealmId(realm.getId());
|
||||
|
||||
entity = tx.create(entity);
|
||||
entity = txInRealm(realm).create(entity);
|
||||
|
||||
return entity == null ? null : entityToAdapterFunc(realm, resourceServer).apply(entity);
|
||||
}
|
||||
|
@ -107,7 +117,7 @@ public class MapResourceStore implements ResourceStore {
|
|||
Resource resource = findById(realm, null, id);
|
||||
if (resource == null) return;
|
||||
|
||||
tx.delete(id);
|
||||
txInRealm(realm).delete(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -116,7 +126,7 @@ public class MapResourceStore implements ResourceStore {
|
|||
|
||||
if (id == null) return null;
|
||||
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.ID, Operator.EQ, id)))
|
||||
.findFirst()
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
|
@ -127,7 +137,7 @@ public class MapResourceStore implements ResourceStore {
|
|||
public void findByOwner(RealmModel realm, ResourceServer resourceServer, String ownerId, Consumer<Resource> consumer) {
|
||||
LOG.tracef("findByOwner(%s, %s, %s)%s", realm, resourceServer, resourceServer, ownerId, getShortStackTrace());
|
||||
|
||||
tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.OWNER, Operator.EQ, ownerId)))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.forEach(consumer);
|
||||
|
@ -138,7 +148,7 @@ public class MapResourceStore implements ResourceStore {
|
|||
LOG.tracef("findByResourceServer(%s)%s", resourceServer, getShortStackTrace());
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)))
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
@ -152,7 +162,7 @@ public class MapResourceStore implements ResourceStore {
|
|||
.toArray(DefaultModelCriteria[]::new)
|
||||
);
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.NAME))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.NAME))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
@ -189,7 +199,7 @@ public class MapResourceStore implements ResourceStore {
|
|||
LOG.tracef("findByScope(%s, %s, %s)%s", scopes, resourceServer, consumer, getShortStackTrace());
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
|
||||
tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.SCOPE_ID, Operator.IN, scopes.stream().map(Scope::getId))))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.forEach(consumer);
|
||||
|
@ -200,7 +210,7 @@ public class MapResourceStore implements ResourceStore {
|
|||
LOG.tracef("findByName(%s, %s, %s)%s", name, ownerId, resourceServer, getShortStackTrace());
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.OWNER, Operator.EQ, ownerId)
|
||||
.compare(SearchableFields.NAME, Operator.EQ, name)))
|
||||
.findFirst()
|
||||
|
@ -213,7 +223,7 @@ public class MapResourceStore implements ResourceStore {
|
|||
LOG.tracef("findByType(%s, %s, %s)%s", type, resourceServer, consumer, getShortStackTrace());
|
||||
RealmModel realm = authorizationProvider.getRealm();
|
||||
|
||||
tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.TYPE, Operator.EQ, type)))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.forEach(consumer);
|
||||
|
@ -231,7 +241,7 @@ public class MapResourceStore implements ResourceStore {
|
|||
mcb = mcb.compare(SearchableFields.OWNER, Operator.EQ, owner);
|
||||
}
|
||||
|
||||
tx.read(withCriteria(mcb))
|
||||
txInRealm(realm).read(withCriteria(mcb))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.forEach(consumer);
|
||||
}
|
||||
|
@ -240,7 +250,7 @@ public class MapResourceStore implements ResourceStore {
|
|||
public void findByTypeInstance(ResourceServer resourceServer, String type, Consumer<Resource> consumer) {
|
||||
LOG.tracef("findByTypeInstance(%s, %s, %s)%s", type, resourceServer, consumer, getShortStackTrace());
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.OWNER, Operator.NE, resourceServer.getClientId())
|
||||
.compare(SearchableFields.TYPE, Operator.EQ, type)))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
|
@ -253,12 +263,12 @@ public class MapResourceStore implements ResourceStore {
|
|||
DefaultModelCriteria<Resource> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
tx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
public void preRemove(ResourceServer resourceServer) {
|
||||
LOG.tracef("preRemove(%s)%s", resourceServer, getShortStackTrace());
|
||||
public void preRemove(RealmModel realm, ResourceServer resourceServer) {
|
||||
LOG.tracef("preRemove(%s, %s)%s", realm, resourceServer, getShortStackTrace());
|
||||
|
||||
tx.delete(withCriteria(forRealmAndResourceServer(resourceServer.getRealm(), resourceServer)));
|
||||
txInRealm(realm).delete(withCriteria(forRealmAndResourceServer(resourceServer.getRealm(), resourceServer)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.keycloak.models.RealmModel;
|
|||
import org.keycloak.models.map.authorization.adapter.MapScopeAdapter;
|
||||
import org.keycloak.models.map.authorization.entity.MapScopeEntity;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.MapStorage;
|
||||
import org.keycloak.models.map.storage.ModelCriteriaBuilder.Operator;
|
||||
|
@ -50,18 +51,27 @@ public class MapScopeStore implements ScopeStore {
|
|||
private final AuthorizationProvider authorizationProvider;
|
||||
final MapKeycloakTransaction<MapScopeEntity, Scope> tx;
|
||||
private final KeycloakSession session;
|
||||
private final boolean txHasRealmId;
|
||||
|
||||
public MapScopeStore(KeycloakSession session, MapStorage<MapScopeEntity, Scope> scopeStore, AuthorizationProvider provider) {
|
||||
this.authorizationProvider = provider;
|
||||
this.tx = scopeStore.createTransaction(session);
|
||||
session.getTransactionManager().enlist(tx);
|
||||
this.session = session;
|
||||
this.txHasRealmId = tx instanceof HasRealmId;
|
||||
}
|
||||
|
||||
private Function<MapScopeEntity, Scope> entityToAdapterFunc(RealmModel realm, ResourceServer resourceServer) {
|
||||
return origEntity -> new MapScopeAdapter(realm, resourceServer, origEntity, authorizationProvider.getStoreFactory());
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapScopeEntity, Scope> txInRealm(RealmModel realm) {
|
||||
if (txHasRealmId) {
|
||||
((HasRealmId) tx).setRealmId(realm == null ? null : realm.getId());
|
||||
}
|
||||
return tx;
|
||||
}
|
||||
|
||||
private DefaultModelCriteria<Scope> forRealmAndResourceServer(RealmModel realm, ResourceServer resourceServer) {
|
||||
DefaultModelCriteria<Scope> mcb = DefaultModelCriteria.<Scope>criteria()
|
||||
.compare(Scope.SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
@ -81,7 +91,7 @@ public class MapScopeStore implements ScopeStore {
|
|||
DefaultModelCriteria<Scope> mcb = forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.NAME, Operator.EQ, name);
|
||||
|
||||
if (tx.exists(withCriteria(mcb))) {
|
||||
if (txInRealm(realm).exists(withCriteria(mcb))) {
|
||||
throw new ModelDuplicateException("Scope with name '" + name + "' for " + resourceServer.getId() + " already exists");
|
||||
}
|
||||
|
||||
|
@ -91,7 +101,7 @@ public class MapScopeStore implements ScopeStore {
|
|||
entity.setResourceServerId(resourceServer.getId());
|
||||
entity.setRealmId(resourceServer.getRealm().getId());
|
||||
|
||||
entity = tx.create(entity);
|
||||
entity = txInRealm(realm).create(entity);
|
||||
|
||||
return entity == null ? null : entityToAdapterFunc(realm, resourceServer).apply(entity);
|
||||
}
|
||||
|
@ -102,7 +112,7 @@ public class MapScopeStore implements ScopeStore {
|
|||
Scope scope = findById(realm, null, id);
|
||||
if (scope == null) return;
|
||||
|
||||
tx.delete(id);
|
||||
txInRealm(realm).delete(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -111,7 +121,7 @@ public class MapScopeStore implements ScopeStore {
|
|||
|
||||
if (id == null) return null;
|
||||
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)
|
||||
.compare(SearchableFields.ID, Operator.EQ, id)))
|
||||
.findFirst()
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
|
@ -123,7 +133,7 @@ public class MapScopeStore implements ScopeStore {
|
|||
LOG.tracef("findByName(%s, %s)%s", name, resourceServer, getShortStackTrace());
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer).compare(SearchableFields.NAME,
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer).compare(SearchableFields.NAME,
|
||||
Operator.EQ, name)))
|
||||
.findFirst()
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
|
@ -134,7 +144,7 @@ public class MapScopeStore implements ScopeStore {
|
|||
public List<Scope> findByResourceServer(ResourceServer resourceServer) {
|
||||
LOG.tracef("findByResourceServer(%s)%s", resourceServer, getShortStackTrace());
|
||||
RealmModel realm = resourceServer.getRealm();
|
||||
return tx.read(withCriteria(forRealmAndResourceServer(realm, resourceServer)))
|
||||
return txInRealm(realm).read(withCriteria(forRealmAndResourceServer(realm, resourceServer)))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
@ -159,7 +169,7 @@ public class MapScopeStore implements ScopeStore {
|
|||
}
|
||||
}
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.NAME))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.NAME))
|
||||
.map(entityToAdapterFunc(realm, resourceServer))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
@ -170,12 +180,12 @@ public class MapScopeStore implements ScopeStore {
|
|||
DefaultModelCriteria<Scope> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
tx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
public void preRemove(ResourceServer resourceServer) {
|
||||
LOG.tracef("preRemove(%s)%s", resourceServer, getShortStackTrace());
|
||||
public void preRemove(RealmModel realm, ResourceServer resourceServer) {
|
||||
LOG.tracef("preRemove(%s, %s)%s", realm, resourceServer, getShortStackTrace());
|
||||
|
||||
tx.delete(withCriteria(forRealmAndResourceServer(resourceServer.getRealm(), resourceServer)));
|
||||
txInRealm(realm).delete(withCriteria(forRealmAndResourceServer(resourceServer.getRealm(), resourceServer)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,6 +40,7 @@ import org.keycloak.models.ModelDuplicateException;
|
|||
import org.keycloak.models.RealmModel;
|
||||
import org.keycloak.models.RoleModel;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.common.TimeAdapter;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.ModelCriteriaBuilder.Operator;
|
||||
|
@ -59,12 +60,14 @@ public class MapClientProvider implements ClientProvider {
|
|||
private final KeycloakSession session;
|
||||
final MapKeycloakTransaction<MapClientEntity, ClientModel> tx;
|
||||
private final ConcurrentMap<String, ConcurrentMap<String, Long>> clientRegisteredNodesStore;
|
||||
private final boolean txHasRealmId;
|
||||
|
||||
public MapClientProvider(KeycloakSession session, MapStorage<MapClientEntity, ClientModel> clientStore, ConcurrentMap<String, ConcurrentMap<String, Long>> clientRegisteredNodesStore) {
|
||||
this.session = session;
|
||||
this.clientRegisteredNodesStore = clientRegisteredNodesStore;
|
||||
this.tx = clientStore.createTransaction(session);
|
||||
session.getTransactionManager().enlist(tx);
|
||||
this.txHasRealmId = tx instanceof HasRealmId;
|
||||
}
|
||||
|
||||
private ClientUpdatedEvent clientUpdatedEvent(ClientModel c) {
|
||||
|
@ -118,6 +121,13 @@ public class MapClientProvider implements ClientProvider {
|
|||
};
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapClientEntity, ClientModel> txInRealm(RealmModel realm) {
|
||||
if (txHasRealmId) {
|
||||
((HasRealmId) tx).setRealmId(realm == null ? null : realm.getId());
|
||||
}
|
||||
return tx;
|
||||
}
|
||||
|
||||
private Predicate<MapClientEntity> entityRealmFilter(RealmModel realm) {
|
||||
if (realm == null || realm.getId() == null) {
|
||||
return c -> false;
|
||||
|
@ -131,7 +141,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
DefaultModelCriteria<ClientModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.CLIENT_ID))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.CLIENT_ID))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
@ -140,7 +150,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
DefaultModelCriteria<ClientModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
return tx.read(withCriteria(mcb).orderBy(SearchableFields.CLIENT_ID, ASCENDING))
|
||||
return txInRealm(realm).read(withCriteria(mcb).orderBy(SearchableFields.CLIENT_ID, ASCENDING))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
@ -148,7 +158,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
public ClientModel addClient(RealmModel realm, String id, String clientId) {
|
||||
LOG.tracef("addClient(%s, %s, %s)%s", realm, id, clientId, getShortStackTrace());
|
||||
|
||||
if (id != null && tx.exists(id)) {
|
||||
if (id != null && txInRealm(realm).exists(id)) {
|
||||
throw new ModelDuplicateException("Client with same id exists: " + id);
|
||||
}
|
||||
if (clientId != null && getClientByClientId(realm, clientId) != null) {
|
||||
|
@ -161,7 +171,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
entity.setClientId(clientId);
|
||||
entity.setEnabled(true);
|
||||
entity.setStandardFlowEnabled(true);
|
||||
entity = tx.create(entity);
|
||||
entity = txInRealm(realm).create(entity);
|
||||
if (clientId == null) {
|
||||
clientId = entity.getId();
|
||||
entity.setClientId(clientId);
|
||||
|
@ -180,7 +190,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
DefaultModelCriteria<ClientModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.ALWAYS_DISPLAY_IN_CONSOLE, Operator.EQ, Boolean.TRUE);
|
||||
return tx.read(withCriteria(mcb).orderBy(SearchableFields.CLIENT_ID, ASCENDING))
|
||||
return txInRealm(realm).read(withCriteria(mcb).orderBy(SearchableFields.CLIENT_ID, ASCENDING))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
@ -205,7 +215,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
|
||||
session.invalidate(CLIENT_BEFORE_REMOVE, realm, client);
|
||||
|
||||
tx.delete(id);
|
||||
txInRealm(realm).delete(id);
|
||||
|
||||
session.invalidate(CLIENT_AFTER_REMOVE, client);
|
||||
|
||||
|
@ -217,7 +227,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
DefaultModelCriteria<ClientModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
return tx.getCount(withCriteria(mcb));
|
||||
return txInRealm(realm).getCount(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -228,7 +238,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
|
||||
LOG.tracef("getClientById(%s, %s)%s", realm, id, getShortStackTrace());
|
||||
|
||||
MapClientEntity entity = tx.read(id);
|
||||
MapClientEntity entity = txInRealm(realm).read(id);
|
||||
return (entity == null || ! entityRealmFilter(realm).test(entity))
|
||||
? null
|
||||
: entityToAdapterFunc(realm).apply(entity);
|
||||
|
@ -245,7 +255,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.CLIENT_ID, Operator.EQ, clientId);
|
||||
|
||||
return tx.read(withCriteria(mcb))
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.map(entityToAdapterFunc(realm))
|
||||
.findFirst()
|
||||
.orElse(null)
|
||||
|
@ -262,7 +272,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.CLIENT_ID, Operator.ILIKE, "%" + clientId + "%");
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.CLIENT_ID))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.CLIENT_ID))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
@ -275,14 +285,14 @@ public class MapClientProvider implements ClientProvider {
|
|||
mcb = mcb.compare(SearchableFields.ATTRIBUTE, Operator.EQ, entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.CLIENT_ID))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.CLIENT_ID))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addClientScopes(RealmModel realm, ClientModel client, Set<ClientScopeModel> clientScopes, boolean defaultScope) {
|
||||
final String id = client.getId();
|
||||
MapClientEntity entity = tx.read(id);
|
||||
MapClientEntity entity = txInRealm(realm).read(id);
|
||||
|
||||
if (entity == null) return;
|
||||
|
||||
|
@ -303,7 +313,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
@Override
|
||||
public void removeClientScope(RealmModel realm, ClientModel client, ClientScopeModel clientScope) {
|
||||
final String id = client.getId();
|
||||
MapClientEntity entity = tx.read(id);
|
||||
MapClientEntity entity = txInRealm(realm).read(id);
|
||||
|
||||
if (entity == null) return;
|
||||
|
||||
|
@ -315,7 +325,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
@Override
|
||||
public Map<String, ClientScopeModel> getClientScopes(RealmModel realm, ClientModel client, boolean defaultScopes) {
|
||||
final String id = client.getId();
|
||||
MapClientEntity entity = tx.read(id);
|
||||
MapClientEntity entity = txInRealm(realm).read(id);
|
||||
|
||||
if (entity == null) return null;
|
||||
|
||||
|
@ -337,7 +347,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.ENABLED, Operator.EQ, Boolean.TRUE);
|
||||
|
||||
try (Stream<MapClientEntity> st = tx.read(withCriteria(mcb))) {
|
||||
try (Stream<MapClientEntity> st = txInRealm(realm).read(withCriteria(mcb))) {
|
||||
return st
|
||||
.filter(mce -> mce.getRedirectUris() != null && ! mce.getRedirectUris().isEmpty())
|
||||
.collect(Collectors.toMap(
|
||||
|
@ -352,7 +362,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.SCOPE_MAPPING_ROLE, Operator.EQ, role.getId());
|
||||
|
||||
try (Stream<MapClientEntity> toRemove = tx.read(withCriteria(mcb))) {
|
||||
try (Stream<MapClientEntity> toRemove = txInRealm(realm).read(withCriteria(mcb))) {
|
||||
toRemove
|
||||
.forEach(clientEntity -> clientEntity.removeScopeMapping(role.getId()));
|
||||
}
|
||||
|
@ -363,7 +373,7 @@ public class MapClientProvider implements ClientProvider {
|
|||
DefaultModelCriteria<ClientModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
tx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.keycloak.models.KeycloakSession;
|
|||
import org.keycloak.models.ModelDuplicateException;
|
||||
import org.keycloak.models.RealmModel;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.MapStorage;
|
||||
import org.keycloak.models.map.storage.ModelCriteriaBuilder.Operator;
|
||||
|
@ -48,11 +49,13 @@ public class MapClientScopeProvider implements ClientScopeProvider {
|
|||
private static final Logger LOG = Logger.getLogger(MapClientScopeProvider.class);
|
||||
private final KeycloakSession session;
|
||||
private final MapKeycloakTransaction<MapClientScopeEntity, ClientScopeModel> tx;
|
||||
private final boolean txHasRealmId;
|
||||
|
||||
public MapClientScopeProvider(KeycloakSession session, MapStorage<MapClientScopeEntity, ClientScopeModel> clientScopeStore) {
|
||||
this.session = session;
|
||||
this.tx = clientScopeStore.createTransaction(session);
|
||||
session.getTransactionManager().enlist(tx);
|
||||
this.txHasRealmId = tx instanceof HasRealmId;
|
||||
}
|
||||
|
||||
private Function<MapClientScopeEntity, ClientScopeModel> entityToAdapterFunc(RealmModel realm) {
|
||||
|
@ -61,6 +64,13 @@ public class MapClientScopeProvider implements ClientScopeProvider {
|
|||
return origEntity -> new MapClientScopeAdapter(session, realm, origEntity);
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapClientScopeEntity, ClientScopeModel> txInRealm(RealmModel realm) {
|
||||
if (txHasRealmId) {
|
||||
((HasRealmId) tx).setRealmId(realm == null ? null : realm.getId());
|
||||
}
|
||||
return tx;
|
||||
}
|
||||
|
||||
private Predicate<MapClientScopeEntity> entityRealmFilter(RealmModel realm) {
|
||||
if (realm == null || realm.getId() == null) {
|
||||
return c -> false;
|
||||
|
@ -74,7 +84,7 @@ public class MapClientScopeProvider implements ClientScopeProvider {
|
|||
DefaultModelCriteria<ClientScopeModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
return tx.read(withCriteria(mcb).orderBy(SearchableFields.NAME, ASCENDING))
|
||||
return txInRealm(realm).read(withCriteria(mcb).orderBy(SearchableFields.NAME, ASCENDING))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
@ -84,11 +94,11 @@ public class MapClientScopeProvider implements ClientScopeProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.NAME, Operator.EQ, name);
|
||||
|
||||
if (tx.exists(withCriteria(mcb))) {
|
||||
if (txInRealm(realm).exists(withCriteria(mcb))) {
|
||||
throw new ModelDuplicateException("Client scope with name '" + name + "' in realm " + realm.getName());
|
||||
}
|
||||
|
||||
if (id != null && tx.exists(id)) {
|
||||
if (id != null && txInRealm(realm).exists(id)) {
|
||||
throw new ModelDuplicateException("Client scope exists: " + id);
|
||||
}
|
||||
|
||||
|
@ -99,7 +109,7 @@ public class MapClientScopeProvider implements ClientScopeProvider {
|
|||
entity.setRealmId(realm.getId());
|
||||
entity.setName(KeycloakModelUtils.convertClientScopeName(name));
|
||||
|
||||
entity = tx.create(entity);
|
||||
entity = txInRealm(realm).create(entity);
|
||||
return entityToAdapterFunc(realm).apply(entity);
|
||||
}
|
||||
|
||||
|
@ -111,7 +121,7 @@ public class MapClientScopeProvider implements ClientScopeProvider {
|
|||
|
||||
session.invalidate(CLIENT_SCOPE_BEFORE_REMOVE, realm, clientScope);
|
||||
|
||||
tx.delete(id);
|
||||
txInRealm(realm).delete(id);
|
||||
|
||||
session.invalidate(CLIENT_SCOPE_AFTER_REMOVE, clientScope);
|
||||
|
||||
|
@ -136,7 +146,7 @@ public class MapClientScopeProvider implements ClientScopeProvider {
|
|||
|
||||
LOG.tracef("getClientScopeById(%s, %s)%s", realm, id, getShortStackTrace());
|
||||
|
||||
MapClientScopeEntity entity = tx.read(id);
|
||||
MapClientScopeEntity entity = txInRealm(realm).read(id);
|
||||
return (entity == null || ! entityRealmFilter(realm).test(entity))
|
||||
? null
|
||||
: entityToAdapterFunc(realm).apply(entity);
|
||||
|
@ -147,7 +157,7 @@ public class MapClientScopeProvider implements ClientScopeProvider {
|
|||
DefaultModelCriteria<ClientScopeModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
tx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.keycloak.provider.EnvironmentDependentProviderFactory;
|
|||
import org.keycloak.provider.InvalidationHandler;
|
||||
import org.keycloak.provider.Provider;
|
||||
import org.keycloak.provider.ProviderFactory;
|
||||
import java.util.Objects;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
|
@ -120,6 +121,7 @@ public abstract class AbstractMapProviderFactory<T extends Provider, V extends A
|
|||
storageProviderFactory = session.getKeycloakSessionFactory().getProviderFactory(MapStorageProvider.class);
|
||||
} else {
|
||||
storageProviderFactory = session.getKeycloakSessionFactory().getProviderFactory(MapStorageProvider.class, provider);
|
||||
Objects.requireNonNull(storageProviderFactory, "Could not find map storage provider " + provider);
|
||||
}
|
||||
} else {
|
||||
// If this is being implemented, make sure that the factory is being closed eventually.
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
/*
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.common;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* This class contains utility classes for type conversion.
|
||||
*
|
||||
* @author hmlnarik
|
||||
*/
|
||||
public class CastUtils {
|
||||
|
||||
/**
|
||||
* Converts value to destination class (if it can).
|
||||
* @param value Value to convert
|
||||
* @param toClass Class to convert value to
|
||||
* @return Value converted to the given class
|
||||
* @throws IllegalStateException if the value cannot be converted to the requested class
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T> T cast(Object value, Class<T> toClass) {
|
||||
return value == null ? null : ((Function<Object, T>) getCastFunc(value.getClass(), toClass)).apply(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a function to convert value of a given class to destination class (if it can).
|
||||
* @param fromClass Class to convert value from
|
||||
* @param toClass Class to convert value to
|
||||
* @return Function {@code fromClass -> toClass} converting values from the {@code fromClass} to the {@code toClass}
|
||||
* @throws IllegalStateException if the value cannot be converted to the requested class
|
||||
*/
|
||||
public static <E extends Enum<E>> Function<?, ?> getCastFunc(Class<?> fromClass, Class<?> toClass) {
|
||||
if (fromClass == toClass || toClass.isAssignableFrom(fromClass)) {
|
||||
return Function.identity();
|
||||
}
|
||||
if (toClass == String.class) {
|
||||
return Objects::toString;
|
||||
}
|
||||
if (fromClass == String.class) {
|
||||
if (toClass == Integer.class) {
|
||||
return (Function<String, ?>) Integer::valueOf;
|
||||
} else if (toClass == Long.class) {
|
||||
return (Function<String, ?>) Long::valueOf;
|
||||
} else if (toClass == Boolean.class) {
|
||||
return (Function<String, ?>) Boolean::valueOf;
|
||||
} else if (toClass.isEnum()) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Class<E> enumClass = (Class<E>) toClass;
|
||||
return (String value) -> Enum.valueOf(enumClass, value);
|
||||
}
|
||||
}
|
||||
if (fromClass == Long.class) {
|
||||
if (toClass == Integer.class) {
|
||||
return (Function<Long, ?>) Long::intValue;
|
||||
}
|
||||
}
|
||||
if (fromClass == Integer.class) {
|
||||
if (toClass == Long.class) {
|
||||
return (Function<Integer, ?>) Integer::longValue;
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalStateException("Unknown cast: " + fromClass + " -> " + toClass);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright 2022 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.common;
|
||||
|
||||
/**
|
||||
* Interface for all objects which are bound to a realm and retain reference to its ID.
|
||||
* @author hmlnarik
|
||||
*/
|
||||
public interface HasRealmId {
|
||||
|
||||
/**
|
||||
* Returns realm ID of the entity.
|
||||
* @return See description
|
||||
*/
|
||||
String getRealmId();
|
||||
|
||||
/**
|
||||
* Sets the realm ID of this object.
|
||||
* @param realmId Realm ID.
|
||||
*/
|
||||
void setRealmId(String realmId);
|
||||
}
|
|
@ -1,56 +1,68 @@
|
|||
package org.keycloak.models.map.common.delegate;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
|
||||
import org.keycloak.models.map.common.EntityField;
|
||||
import org.keycloak.models.map.common.UpdatableEntity;
|
||||
|
||||
public interface EntityFieldDelegate<E> extends UpdatableEntity {
|
||||
|
||||
public abstract class WithEntity<E extends UpdatableEntity> implements EntityFieldDelegate<E> {
|
||||
private final E entity;
|
||||
|
||||
public WithEntity(E entity) {
|
||||
this.entity = entity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <EF extends Enum<? extends EntityField<E>> & EntityField<E>> Object get(EF field) {
|
||||
return field.get(entity);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T, EF extends Enum<? extends EntityField<E>> & EntityField<E>> void set(EF field, T value) {
|
||||
field.set(entity, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <K, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<E>> & org.keycloak.models.map.common.EntityField<E>> Object mapRemove(EF field, K key) {
|
||||
return field.mapRemove(entity, key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <K, T, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<E>> & org.keycloak.models.map.common.EntityField<E>> void mapPut(EF field, K key, T value) {
|
||||
field.mapPut(entity, key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <K, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<E>> & org.keycloak.models.map.common.EntityField<E>> Object mapGet(EF field, K key) {
|
||||
return field.mapGet(entity, key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<E>> & org.keycloak.models.map.common.EntityField<E>> Object collectionRemove(EF field, T value) {
|
||||
return field.collectionRemove(entity, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<E>> & org.keycloak.models.map.common.EntityField<E>> void collectionAdd(EF field, T value) {
|
||||
field.collectionAdd(entity, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isUpdated() {
|
||||
return entity.isUpdated();
|
||||
}
|
||||
}
|
||||
|
||||
// Non-collection values
|
||||
<EF extends Enum<? extends EntityField<E>> & EntityField<E>> Object get(EF field);
|
||||
default <T, EF extends Enum<? extends EntityField<E>> & EntityField<E>> void set(EF field, T value) {}
|
||||
<T, EF extends Enum<? extends EntityField<E>> & EntityField<E>> void set(EF field, T value);
|
||||
|
||||
default <T, EF extends Enum<? extends EntityField<E>> & EntityField<E>> void collectionAdd(EF field, T value) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Collection<T> c = (Collection<T>) get(field);
|
||||
if (c != null) {
|
||||
c.add(value);
|
||||
}
|
||||
}
|
||||
default <T, EF extends Enum<? extends EntityField<E>> & EntityField<E>> Object collectionRemove(EF field, T value) {
|
||||
Collection<?> c = (Collection<?>) get(field);
|
||||
return c == null ? null : c.remove(value);
|
||||
}
|
||||
<T, EF extends Enum<? extends EntityField<E>> & EntityField<E>> void collectionAdd(EF field, T value);
|
||||
<T, EF extends Enum<? extends EntityField<E>> & EntityField<E>> Object collectionRemove(EF field, T value);
|
||||
|
||||
/**
|
||||
*
|
||||
* @param <K> Key type
|
||||
* @param <T> Value type
|
||||
* @param field Field identifier. Should be one of the generated {@code *Fields} enum constants.
|
||||
* @param key Key
|
||||
* @param valueClass class of the value
|
||||
* @return
|
||||
*/
|
||||
default <K, EF extends Enum<? extends EntityField<E>> & EntityField<E>> Object mapGet(EF field, K key) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<K, ?> m = (Map<K, ?>) get(field);
|
||||
return m == null ? null : m.get(key);
|
||||
}
|
||||
default <K, T, EF extends Enum<? extends EntityField<E>> & EntityField<E>> void mapPut(EF field, K key, T value) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<K, T> m = (Map<K, T>) get(field);
|
||||
if (m != null) {
|
||||
m.put(key, value);
|
||||
}
|
||||
}
|
||||
default <K, EF extends Enum<? extends EntityField<E>> & EntityField<E>> Object mapRemove(EF field, K key) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<K, ?> m = (Map<K, ?>) get(field);
|
||||
if (m != null) {
|
||||
return m.remove(key);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
<K, EF extends Enum<? extends EntityField<E>> & EntityField<E>> Object mapGet(EF field, K key);
|
||||
<K, T, EF extends Enum<? extends EntityField<E>> & EntityField<E>> void mapPut(EF field, K key, T value);
|
||||
<K, EF extends Enum<? extends EntityField<E>> & EntityField<E>> Object mapRemove(EF field, K key);
|
||||
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.keycloak.models.map.storage.criteria.DefaultModelCriteria;
|
|||
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.keycloak.models.map.storage.ModelCriteriaBuilder.Operator.EQ;
|
||||
|
@ -39,21 +38,18 @@ import static org.keycloak.models.map.storage.QueryParameters.Order.ASCENDING;
|
|||
import static org.keycloak.models.map.storage.QueryParameters.Order.DESCENDING;
|
||||
import static org.keycloak.models.map.storage.criteria.DefaultModelCriteria.criteria;
|
||||
|
||||
public class MapAdminEventQuery implements AdminEventQuery {
|
||||
public abstract class MapAdminEventQuery implements AdminEventQuery {
|
||||
|
||||
private Integer firstResult;
|
||||
private Integer maxResults;
|
||||
private QueryParameters.Order order = DESCENDING;
|
||||
private DefaultModelCriteria<AdminEvent> mcb = criteria();
|
||||
private final Function<QueryParameters<AdminEvent>, Stream<AdminEvent>> resultProducer;
|
||||
|
||||
public MapAdminEventQuery(Function<QueryParameters<AdminEvent>, Stream<AdminEvent>> resultProducer) {
|
||||
this.resultProducer = resultProducer;
|
||||
}
|
||||
protected String realmId;
|
||||
|
||||
@Override
|
||||
public AdminEventQuery realm(String realmId) {
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, EQ, realmId);
|
||||
this.realmId = realmId;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -137,10 +133,12 @@ public class MapAdminEventQuery implements AdminEventQuery {
|
|||
|
||||
@Override
|
||||
public Stream<AdminEvent> getResultStream() {
|
||||
return resultProducer.apply(QueryParameters.withCriteria(mcb)
|
||||
return read(QueryParameters.withCriteria(mcb)
|
||||
.offset(firstResult)
|
||||
.limit(maxResults)
|
||||
.orderBy(SearchableFields.TIMESTAMP, order)
|
||||
);
|
||||
}
|
||||
|
||||
protected abstract Stream<AdminEvent> read(QueryParameters<AdminEvent> queryParameters);
|
||||
}
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.keycloak.models.map.storage.criteria.DefaultModelCriteria;
|
|||
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.keycloak.models.map.storage.ModelCriteriaBuilder.Operator.EQ;
|
||||
|
@ -37,17 +36,13 @@ import static org.keycloak.models.map.storage.QueryParameters.Order.ASCENDING;
|
|||
import static org.keycloak.models.map.storage.QueryParameters.Order.DESCENDING;
|
||||
import static org.keycloak.models.map.storage.criteria.DefaultModelCriteria.criteria;
|
||||
|
||||
public class MapAuthEventQuery implements EventQuery {
|
||||
public abstract class MapAuthEventQuery implements EventQuery {
|
||||
|
||||
private Integer firstResult;
|
||||
private Integer maxResults;
|
||||
private QueryParameters.Order order = DESCENDING;
|
||||
private DefaultModelCriteria<Event> mcb = criteria();
|
||||
private final Function<QueryParameters<Event>, Stream<Event>> resultProducer;
|
||||
|
||||
public MapAuthEventQuery(Function<QueryParameters<Event>, Stream<Event>> resultProducer) {
|
||||
this.resultProducer = resultProducer;
|
||||
}
|
||||
protected String realmId;
|
||||
|
||||
@Override
|
||||
public EventQuery type(EventType... types) {
|
||||
|
@ -58,6 +53,7 @@ public class MapAuthEventQuery implements EventQuery {
|
|||
@Override
|
||||
public EventQuery realm(String realmId) {
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, EQ, realmId);
|
||||
this.realmId = realmId;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -117,9 +113,11 @@ public class MapAuthEventQuery implements EventQuery {
|
|||
|
||||
@Override
|
||||
public Stream<Event> getResultStream() {
|
||||
return resultProducer.apply(QueryParameters.withCriteria(mcb)
|
||||
return read(QueryParameters.withCriteria(mcb)
|
||||
.offset(firstResult)
|
||||
.limit(maxResults)
|
||||
.orderBy(SearchableFields.TIMESTAMP, order));
|
||||
}
|
||||
|
||||
protected abstract Stream<Event> read(QueryParameters<Event> queryParameters);
|
||||
}
|
||||
|
|
|
@ -28,13 +28,13 @@ import org.keycloak.models.KeycloakSession;
|
|||
import org.keycloak.models.ModelDuplicateException;
|
||||
import org.keycloak.models.RealmModel;
|
||||
import org.keycloak.models.map.common.ExpirableEntity;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.MapStorage;
|
||||
import org.keycloak.models.map.storage.ModelCriteriaBuilder;
|
||||
import org.keycloak.models.map.storage.QueryParameters;
|
||||
import org.keycloak.models.map.storage.criteria.DefaultModelCriteria;
|
||||
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.keycloak.common.util.StackUtil.getShortStackTrace;
|
||||
|
@ -47,6 +47,8 @@ public class MapEventStoreProvider implements EventStoreProvider {
|
|||
private final KeycloakSession session;
|
||||
private final MapKeycloakTransaction<MapAuthEventEntity, Event> authEventsTX;
|
||||
private final MapKeycloakTransaction<MapAdminEventEntity, AdminEvent> adminEventsTX;
|
||||
private final boolean adminTxHasRealmId;
|
||||
private final boolean authTxHasRealmId;
|
||||
|
||||
public MapEventStoreProvider(KeycloakSession session, MapStorage<MapAuthEventEntity, Event> loginEventsStore, MapStorage<MapAdminEventEntity, AdminEvent> adminEventsStore) {
|
||||
this.session = session;
|
||||
|
@ -55,6 +57,30 @@ public class MapEventStoreProvider implements EventStoreProvider {
|
|||
|
||||
session.getTransactionManager().enlistAfterCompletion(this.authEventsTX);
|
||||
session.getTransactionManager().enlistAfterCompletion(this.adminEventsTX);
|
||||
this.authTxHasRealmId = this.authEventsTX instanceof HasRealmId;
|
||||
this.adminTxHasRealmId = this.adminEventsTX instanceof HasRealmId;
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapAdminEventEntity, AdminEvent> adminTxInRealm(String realmId) {
|
||||
if (adminTxHasRealmId) {
|
||||
((HasRealmId) adminEventsTX).setRealmId(realmId);
|
||||
}
|
||||
return adminEventsTX;
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapAdminEventEntity, AdminEvent> adminTxInRealm(RealmModel realm) {
|
||||
return adminTxInRealm(realm == null ? null : realm.getId());
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapAuthEventEntity, Event> authTxInRealm(String realmId) {
|
||||
if (authTxHasRealmId) {
|
||||
((HasRealmId) authEventsTX).setRealmId(realmId);
|
||||
}
|
||||
return authEventsTX;
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapAuthEventEntity, Event> authTxInRealm(RealmModel realm) {
|
||||
return authTxInRealm(realm == null ? null : realm.getId());
|
||||
}
|
||||
|
||||
/** LOGIN EVENTS **/
|
||||
|
@ -62,13 +88,13 @@ public class MapEventStoreProvider implements EventStoreProvider {
|
|||
public void onEvent(Event event) {
|
||||
LOG.tracef("onEvent(%s)%s", event, getShortStackTrace());
|
||||
String id = event.getId();
|
||||
String realmId = event.getRealmId();
|
||||
|
||||
if (id != null && authEventsTX.exists(id)) {
|
||||
if (id != null && authTxInRealm(realmId).exists(id)) {
|
||||
throw new ModelDuplicateException("Event already exists: " + id);
|
||||
}
|
||||
|
||||
MapAuthEventEntity entity = modelToEntity(event);
|
||||
String realmId = event.getRealmId();
|
||||
if (realmId != null) {
|
||||
RealmModel realm = session.realms().getRealm(realmId);
|
||||
if (realm != null && realm.getEventsExpiration() > 0) {
|
||||
|
@ -76,45 +102,51 @@ public class MapEventStoreProvider implements EventStoreProvider {
|
|||
}
|
||||
}
|
||||
|
||||
authEventsTX.create(entity);
|
||||
}
|
||||
|
||||
private boolean filterExpired(ExpirableEntity event) {
|
||||
// Check if entity is expired
|
||||
if (isExpired(event, true)) {
|
||||
// Remove entity
|
||||
authEventsTX.delete(event.getId());
|
||||
|
||||
return false; // Do not include entity in the resulting stream
|
||||
}
|
||||
|
||||
return true; // Entity is not expired
|
||||
authTxInRealm(realmId).create(entity);
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventQuery createQuery() {
|
||||
LOG.tracef("createQuery()%s", getShortStackTrace());
|
||||
return new MapAuthEventQuery(((Function<QueryParameters<Event>, Stream<MapAuthEventEntity>>) authEventsTX::read)
|
||||
.andThen(s -> s.filter(this::filterExpired).map(EventUtils::entityToModel)));
|
||||
return new MapAuthEventQuery() {
|
||||
private boolean filterExpired(ExpirableEntity event) {
|
||||
// Check if entity is expired
|
||||
if (isExpired(event, true)) {
|
||||
// Remove entity
|
||||
authTxInRealm(realmId).delete(event.getId());
|
||||
|
||||
return false; // Do not include entity in the resulting stream
|
||||
}
|
||||
|
||||
return true; // Entity is not expired
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Stream<Event> read(QueryParameters<Event> queryParameters) {
|
||||
return authTxInRealm(realmId).read(queryParameters)
|
||||
.filter(this::filterExpired)
|
||||
.map(EventUtils::entityToModel);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
LOG.tracef("clear()%s", getShortStackTrace());
|
||||
authEventsTX.delete(QueryParameters.withCriteria(DefaultModelCriteria.criteria()));
|
||||
authTxInRealm((String) null).delete(QueryParameters.withCriteria(DefaultModelCriteria.criteria()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear(RealmModel realm) {
|
||||
LOG.tracef("clear(%s)%s", realm, getShortStackTrace());
|
||||
authEventsTX.delete(QueryParameters.withCriteria(DefaultModelCriteria.<Event>criteria()
|
||||
authTxInRealm(realm).delete(QueryParameters.withCriteria(DefaultModelCriteria.<Event>criteria()
|
||||
.compare(Event.SearchableFields.REALM_ID, ModelCriteriaBuilder.Operator.EQ, realm.getId())));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear(RealmModel realm, long olderThan) {
|
||||
LOG.tracef("clear(%s, %d)%s", realm, olderThan, getShortStackTrace());
|
||||
authEventsTX.delete(QueryParameters.withCriteria(DefaultModelCriteria.<Event>criteria()
|
||||
authTxInRealm(realm).delete(QueryParameters.withCriteria(DefaultModelCriteria.<Event>criteria()
|
||||
.compare(Event.SearchableFields.REALM_ID, ModelCriteriaBuilder.Operator.EQ, realm.getId())
|
||||
.compare(Event.SearchableFields.TIMESTAMP, ModelCriteriaBuilder.Operator.LT, olderThan)
|
||||
));
|
||||
|
@ -130,47 +162,67 @@ public class MapEventStoreProvider implements EventStoreProvider {
|
|||
|
||||
@Override
|
||||
public void onEvent(AdminEvent event, boolean includeRepresentation) {
|
||||
LOG.tracef("clear(%s, %s)%s", event, includeRepresentation, getShortStackTrace());
|
||||
LOG.tracef("onEvent(%s, %s)%s", event, includeRepresentation, getShortStackTrace());
|
||||
String id = event.getId();
|
||||
if (id != null && authEventsTX.exists(id)) {
|
||||
String realmId = event.getRealmId();
|
||||
if (id != null && adminTxInRealm(realmId).exists(id)) {
|
||||
throw new ModelDuplicateException("Event already exists: " + id);
|
||||
}
|
||||
String realmId = event.getRealmId();
|
||||
MapAdminEventEntity entity = modelToEntity(event,includeRepresentation);
|
||||
if (realmId != null) {
|
||||
RealmModel realm = session.realms().getRealm(realmId);
|
||||
Long expiration = realm.getAttribute("adminEventsExpiration",0L);
|
||||
if (realm != null && expiration > 0) {
|
||||
entity.setExpiration(Time.currentTimeMillis() + (expiration * 1000));
|
||||
if (realm != null) {
|
||||
Long expiration = realm.getAttribute("adminEventsExpiration",0L);
|
||||
if (expiration > 0) {
|
||||
entity.setExpiration(Time.currentTimeMillis() + (expiration * 1000));
|
||||
}
|
||||
}
|
||||
}
|
||||
adminEventsTX.create(entity);
|
||||
adminTxInRealm(realmId).create(entity);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AdminEventQuery createAdminQuery() {
|
||||
LOG.tracef("createAdminQuery()%s", getShortStackTrace());
|
||||
return new MapAdminEventQuery(((Function<QueryParameters<AdminEvent>, Stream<MapAdminEventEntity>>) adminEventsTX::read)
|
||||
.andThen(s -> s.filter(this::filterExpired).map(EventUtils::entityToModel)));
|
||||
return new MapAdminEventQuery() {
|
||||
private boolean filterExpired(ExpirableEntity event) {
|
||||
// Check if entity is expired
|
||||
if (isExpired(event, true)) {
|
||||
// Remove entity
|
||||
authTxInRealm(realmId).delete(event.getId());
|
||||
|
||||
return false; // Do not include entity in the resulting stream
|
||||
}
|
||||
|
||||
return true; // Entity is not expired
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Stream<AdminEvent> read(QueryParameters<AdminEvent> queryParameters) {
|
||||
return adminTxInRealm(realmId).read(queryParameters)
|
||||
.filter(this::filterExpired)
|
||||
.map(EventUtils::entityToModel);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearAdmin() {
|
||||
LOG.tracef("clearAdmin()%s", getShortStackTrace());
|
||||
adminEventsTX.delete(QueryParameters.withCriteria(DefaultModelCriteria.criteria()));
|
||||
adminTxInRealm((String) null).delete(QueryParameters.withCriteria(DefaultModelCriteria.criteria()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearAdmin(RealmModel realm) {
|
||||
LOG.tracef("clear(%s)%s", realm, getShortStackTrace());
|
||||
adminEventsTX.delete(QueryParameters.withCriteria(DefaultModelCriteria.<AdminEvent>criteria()
|
||||
LOG.tracef("clearAdmin(%s)%s", realm, getShortStackTrace());
|
||||
adminTxInRealm(realm).delete(QueryParameters.withCriteria(DefaultModelCriteria.<AdminEvent>criteria()
|
||||
.compare(AdminEvent.SearchableFields.REALM_ID, ModelCriteriaBuilder.Operator.EQ, realm.getId())));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearAdmin(RealmModel realm, long olderThan) {
|
||||
LOG.tracef("clearAdmin(%s, %d)%s", realm, olderThan, getShortStackTrace());
|
||||
adminEventsTX.delete(QueryParameters.withCriteria(DefaultModelCriteria.<AdminEvent>criteria()
|
||||
adminTxInRealm(realm).delete(QueryParameters.withCriteria(DefaultModelCriteria.<AdminEvent>criteria()
|
||||
.compare(AdminEvent.SearchableFields.REALM_ID, ModelCriteriaBuilder.Operator.EQ, realm.getId())
|
||||
.compare(AdminEvent.SearchableFields.TIMESTAMP, ModelCriteriaBuilder.Operator.LT, olderThan)
|
||||
));
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
|
||||
package org.keycloak.models.map.group;
|
||||
|
||||
import java.security.Key;
|
||||
import org.jboss.logging.Logger;
|
||||
import org.keycloak.models.GroupModel;
|
||||
import org.keycloak.models.GroupModel.SearchableFields;
|
||||
|
@ -27,6 +26,7 @@ import org.keycloak.models.ModelDuplicateException;
|
|||
import org.keycloak.models.RealmModel;
|
||||
import org.keycloak.models.RoleModel;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.MapStorage;
|
||||
|
||||
|
@ -54,11 +54,20 @@ public class MapGroupProvider implements GroupProvider {
|
|||
private static final Logger LOG = Logger.getLogger(MapGroupProvider.class);
|
||||
private final KeycloakSession session;
|
||||
final MapKeycloakTransaction<MapGroupEntity, GroupModel> tx;
|
||||
private final boolean txHasRealmId;
|
||||
|
||||
public MapGroupProvider(KeycloakSession session, MapStorage<MapGroupEntity, GroupModel> groupStore) {
|
||||
this.session = session;
|
||||
this.tx = groupStore.createTransaction(session);
|
||||
session.getTransactionManager().enlist(tx);
|
||||
this.txHasRealmId = tx instanceof HasRealmId;
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapGroupEntity, GroupModel> txInRealm(RealmModel realm) {
|
||||
if (txHasRealmId) {
|
||||
((HasRealmId) tx).setRealmId(realm == null ? null : realm.getId());
|
||||
}
|
||||
return tx;
|
||||
}
|
||||
|
||||
private Function<MapGroupEntity, GroupModel> entityToAdapterFunc(RealmModel realm) {
|
||||
|
@ -73,14 +82,14 @@ public class MapGroupProvider implements GroupProvider {
|
|||
|
||||
@Override
|
||||
public GroupModel getGroupById(RealmModel realm, String id) {
|
||||
if (id == null) {
|
||||
if (id == null || realm == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
LOG.tracef("getGroupById(%s, %s)%s", realm, id, getShortStackTrace());
|
||||
|
||||
MapGroupEntity entity = tx.read(id);
|
||||
String realmId = realm.getId();
|
||||
MapGroupEntity entity = txInRealm(realm).read(id);
|
||||
return (entity == null || ! Objects.equals(realmId, entity.getRealmId()))
|
||||
? null
|
||||
: entityToAdapterFunc(realm).apply(entity);
|
||||
|
@ -105,7 +114,7 @@ public class MapGroupProvider implements GroupProvider {
|
|||
queryParameters = queryParametersModifier.apply(queryParameters);
|
||||
}
|
||||
|
||||
return tx.read(queryParameters)
|
||||
return txInRealm(realm).read(queryParameters)
|
||||
.map(entityToAdapterFunc(realm))
|
||||
;
|
||||
}
|
||||
|
@ -120,7 +129,7 @@ public class MapGroupProvider implements GroupProvider {
|
|||
mcb = mcb.compare(SearchableFields.NAME, Operator.ILIKE, "%" + search + "%");
|
||||
}
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(first, max, SearchableFields.NAME))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(first, max, SearchableFields.NAME))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
@ -134,7 +143,7 @@ public class MapGroupProvider implements GroupProvider {
|
|||
mcb = mcb.compare(SearchableFields.PARENT_ID, Operator.NOT_EXISTS);
|
||||
}
|
||||
|
||||
return tx.getCount(withCriteria(mcb));
|
||||
return txInRealm(realm).getCount(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -169,6 +178,7 @@ public class MapGroupProvider implements GroupProvider {
|
|||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<GroupModel> searchForGroupByNameStream(RealmModel realm, String search, Boolean exact, Integer firstResult, Integer maxResults) {
|
||||
LOG.tracef("searchForGroupByNameStream(%s, %s, %s, %b, %d, %d)%s", realm, session, search, exact, firstResult, maxResults, getShortStackTrace());
|
||||
|
||||
|
@ -183,7 +193,7 @@ public class MapGroupProvider implements GroupProvider {
|
|||
}
|
||||
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.NAME))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.NAME))
|
||||
.map(MapGroupEntity::getId)
|
||||
.map(id -> {
|
||||
GroupModel groupById = session.groups().getGroupById(realm, id);
|
||||
|
@ -202,7 +212,7 @@ public class MapGroupProvider implements GroupProvider {
|
|||
mcb = mcb.compare(GroupModel.SearchableFields.ATTRIBUTE, Operator.EQ, entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.NAME))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.NAME))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
@ -218,7 +228,7 @@ public class MapGroupProvider implements GroupProvider {
|
|||
mcb.compare(SearchableFields.PARENT_ID, Operator.NOT_EXISTS) :
|
||||
mcb.compare(SearchableFields.PARENT_ID, Operator.EQ, toParent.getId());
|
||||
|
||||
if (tx.exists(withCriteria(mcb))) {
|
||||
if (txInRealm(realm).exists(withCriteria(mcb))) {
|
||||
throw new ModelDuplicateException("Group with name '" + name + "' in realm " + realm.getName() + " already exists for requested parent" );
|
||||
}
|
||||
|
||||
|
@ -227,10 +237,10 @@ public class MapGroupProvider implements GroupProvider {
|
|||
entity.setRealmId(realm.getId());
|
||||
entity.setName(name);
|
||||
entity.setParentId(toParent == null ? null : toParent.getId());
|
||||
if (id != null && tx.exists(id)) {
|
||||
if (id != null && txInRealm(realm).exists(id)) {
|
||||
throw new ModelDuplicateException("Group exists: " + id);
|
||||
}
|
||||
entity = tx.create(entity);
|
||||
entity = txInRealm(realm).create(entity);
|
||||
|
||||
return entityToAdapterFunc(realm).apply(entity);
|
||||
}
|
||||
|
@ -242,7 +252,7 @@ public class MapGroupProvider implements GroupProvider {
|
|||
|
||||
session.invalidate(GROUP_BEFORE_REMOVE, realm, group);
|
||||
|
||||
tx.delete(group.getId());
|
||||
txInRealm(realm).delete(group.getId());
|
||||
|
||||
session.invalidate(GROUP_AFTER_REMOVE, realm, group);
|
||||
|
||||
|
@ -269,7 +279,7 @@ public class MapGroupProvider implements GroupProvider {
|
|||
mcb.compare(SearchableFields.PARENT_ID, Operator.NOT_EXISTS) :
|
||||
mcb.compare(SearchableFields.PARENT_ID, Operator.EQ, toParent.getId());
|
||||
|
||||
try (Stream<MapGroupEntity> possibleSiblings = tx.read(withCriteria(mcb))) {
|
||||
try (Stream<MapGroupEntity> possibleSiblings = txInRealm(realm).read(withCriteria(mcb))) {
|
||||
if (possibleSiblings.findAny().isPresent()) {
|
||||
throw new ModelDuplicateException("Parent already contains subgroup named '" + group.getName() + "'");
|
||||
}
|
||||
|
@ -318,7 +328,7 @@ public class MapGroupProvider implements GroupProvider {
|
|||
.compare(SearchableFields.PARENT_ID, Operator.EQ, (Object) null)
|
||||
.compare(SearchableFields.NAME, Operator.EQ, subGroup.getName());
|
||||
|
||||
try (Stream<MapGroupEntity> possibleSiblings = tx.read(withCriteria(mcb))) {
|
||||
try (Stream<MapGroupEntity> possibleSiblings = txInRealm(realm).read(withCriteria(mcb))) {
|
||||
if (possibleSiblings.findAny().isPresent()) {
|
||||
throw new ModelDuplicateException("There is already a top level group named '" + subGroup.getName() + "'");
|
||||
}
|
||||
|
@ -332,7 +342,7 @@ public class MapGroupProvider implements GroupProvider {
|
|||
DefaultModelCriteria<GroupModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.ASSIGNED_ROLE, Operator.EQ, role.getId());
|
||||
try (Stream<MapGroupEntity> toRemove = tx.read(withCriteria(mcb))) {
|
||||
try (Stream<MapGroupEntity> toRemove = txInRealm(realm).read(withCriteria(mcb))) {
|
||||
toRemove
|
||||
.map(groupEntity -> session.groups().getGroupById(realm, groupEntity.getId()))
|
||||
.forEach(groupModel -> groupModel.deleteRoleMapping(role));
|
||||
|
@ -344,7 +354,7 @@ public class MapGroupProvider implements GroupProvider {
|
|||
DefaultModelCriteria<GroupModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
tx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -358,6 +368,6 @@ public class MapGroupProvider implements GroupProvider {
|
|||
.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.PARENT_ID, Operator.EQ, parentId);
|
||||
|
||||
return tx.read(withCriteria(mcb)).map(entityToAdapterFunc(realm));
|
||||
return txInRealm(realm).read(withCriteria(mcb)).map(entityToAdapterFunc(realm));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.keycloak.models.map.storage.MapStorage;
|
|||
import org.keycloak.models.RoleModel.SearchableFields;
|
||||
import org.keycloak.models.RoleProvider;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.storage.ModelCriteriaBuilder.Operator;
|
||||
import org.keycloak.models.map.storage.criteria.DefaultModelCriteria;
|
||||
|
||||
|
@ -46,11 +47,13 @@ public class MapRoleProvider implements RoleProvider {
|
|||
private static final Logger LOG = Logger.getLogger(MapRoleProvider.class);
|
||||
private final KeycloakSession session;
|
||||
final MapKeycloakTransaction<MapRoleEntity, RoleModel> tx;
|
||||
private final boolean txHasRealmId;
|
||||
|
||||
public MapRoleProvider(KeycloakSession session, MapStorage<MapRoleEntity, RoleModel> roleStore) {
|
||||
this.session = session;
|
||||
this.tx = roleStore.createTransaction(session);
|
||||
session.getTransactionManager().enlist(tx);
|
||||
this.txHasRealmId = tx instanceof HasRealmId;
|
||||
}
|
||||
|
||||
private Function<MapRoleEntity, RoleModel> entityToAdapterFunc(RealmModel realm) {
|
||||
|
@ -58,6 +61,13 @@ public class MapRoleProvider implements RoleProvider {
|
|||
return origEntity -> new MapRoleAdapter(session, realm, origEntity);
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapRoleEntity, RoleModel> txInRealm(RealmModel realm) {
|
||||
if (txHasRealmId) {
|
||||
((HasRealmId) tx).setRealmId(realm == null ? null : realm.getId());
|
||||
}
|
||||
return tx;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RoleModel addRealmRole(RealmModel realm, String id, String name) {
|
||||
if (getRealmRole(realm, name) != null) {
|
||||
|
@ -71,10 +81,10 @@ public class MapRoleProvider implements RoleProvider {
|
|||
entity.setRealmId(realm.getId());
|
||||
entity.setName(name);
|
||||
entity.setClientRole(false);
|
||||
if (entity.getId() != null && tx.exists(entity.getId())) {
|
||||
if (entity.getId() != null && txInRealm(realm).exists(entity.getId())) {
|
||||
throw new ModelDuplicateException("Role exists: " + id);
|
||||
}
|
||||
entity = tx.create(entity);
|
||||
entity = txInRealm(realm).create(entity);
|
||||
return entityToAdapterFunc(realm).apply(entity);
|
||||
}
|
||||
|
||||
|
@ -84,7 +94,7 @@ public class MapRoleProvider implements RoleProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.IS_CLIENT_ROLE, Operator.NE, true);
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(first, max, SearchableFields.NAME))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(first, max, SearchableFields.NAME))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
@ -101,7 +111,7 @@ public class MapRoleProvider implements RoleProvider {
|
|||
mcb = mcb.compare(RoleModel.SearchableFields.NAME, Operator.ILIKE, "%" + search + "%");
|
||||
}
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(first, max, RoleModel.SearchableFields.NAME))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(first, max, RoleModel.SearchableFields.NAME))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
@ -111,7 +121,7 @@ public class MapRoleProvider implements RoleProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.IS_CLIENT_ROLE, Operator.NE, true);
|
||||
|
||||
return tx.read(withCriteria(mcb).orderBy(SearchableFields.NAME, ASCENDING))
|
||||
return txInRealm(realm).read(withCriteria(mcb).orderBy(SearchableFields.NAME, ASCENDING))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
@ -124,36 +134,39 @@ public class MapRoleProvider implements RoleProvider {
|
|||
LOG.tracef("addClientRole(%s, %s, %s)%s", client, id, name, getShortStackTrace());
|
||||
|
||||
MapRoleEntity entity = DeepCloner.DUMB_CLONER.newInstance(MapRoleEntity.class);
|
||||
final RealmModel realm = client.getRealm();
|
||||
entity.setId(id);
|
||||
entity.setRealmId(client.getRealm().getId());
|
||||
entity.setRealmId(realm.getId());
|
||||
entity.setName(name);
|
||||
entity.setClientRole(true);
|
||||
entity.setClientId(client.getId());
|
||||
if (entity.getId() != null && tx.exists(entity.getId())) {
|
||||
if (entity.getId() != null && txInRealm(realm).exists(entity.getId())) {
|
||||
throw new ModelDuplicateException("Role exists: " + id);
|
||||
}
|
||||
entity = tx.create(entity);
|
||||
return entityToAdapterFunc(client.getRealm()).apply(entity);
|
||||
entity = txInRealm(realm).create(entity);
|
||||
return entityToAdapterFunc(realm).apply(entity);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<RoleModel> getClientRolesStream(ClientModel client, Integer first, Integer max) {
|
||||
DefaultModelCriteria<RoleModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, client.getRealm().getId())
|
||||
final RealmModel realm = client.getRealm();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.CLIENT_ID, Operator.EQ, client.getId());
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(first, max, SearchableFields.NAME))
|
||||
.map(entityToAdapterFunc(client.getRealm()));
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(first, max, SearchableFields.NAME))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<RoleModel> getClientRolesStream(ClientModel client) {
|
||||
DefaultModelCriteria<RoleModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, client.getRealm().getId())
|
||||
final RealmModel realm = client.getRealm();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.CLIENT_ID, Operator.EQ, client.getId());
|
||||
|
||||
return tx.read(withCriteria(mcb).orderBy(SearchableFields.NAME, ASCENDING))
|
||||
.map(entityToAdapterFunc(client.getRealm()));
|
||||
return txInRealm(realm).read(withCriteria(mcb).orderBy(SearchableFields.NAME, ASCENDING))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
@Override
|
||||
public boolean removeRole(RoleModel role) {
|
||||
|
@ -163,7 +176,7 @@ public class MapRoleProvider implements RoleProvider {
|
|||
|
||||
session.invalidate(ROLE_BEFORE_REMOVE, realm, role);
|
||||
|
||||
tx.delete(role.getId());
|
||||
txInRealm(realm).delete(role.getId());
|
||||
|
||||
session.invalidate(ROLE_AFTER_REMOVE, realm, role);
|
||||
|
||||
|
@ -192,7 +205,7 @@ public class MapRoleProvider implements RoleProvider {
|
|||
.compare(SearchableFields.IS_CLIENT_ROLE, Operator.NE, true)
|
||||
.compare(SearchableFields.NAME, Operator.EQ, name);
|
||||
|
||||
return tx.read(withCriteria(mcb))
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.map(entityToAdapterFunc(realm))
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
|
@ -206,12 +219,13 @@ public class MapRoleProvider implements RoleProvider {
|
|||
LOG.tracef("getClientRole(%s, %s)%s", client, name, getShortStackTrace());
|
||||
|
||||
DefaultModelCriteria<RoleModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, client.getRealm().getId())
|
||||
final RealmModel realm = client.getRealm();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.CLIENT_ID, Operator.EQ, client.getId())
|
||||
.compare(SearchableFields.NAME, Operator.EQ, name);
|
||||
|
||||
return tx.read(withCriteria(mcb))
|
||||
.map(entityToAdapterFunc(client.getRealm()))
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.map(entityToAdapterFunc(realm))
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
|
@ -224,7 +238,7 @@ public class MapRoleProvider implements RoleProvider {
|
|||
|
||||
LOG.tracef("getRoleById(%s, %s)%s", realm, id, getShortStackTrace());
|
||||
|
||||
MapRoleEntity entity = tx.read(id);
|
||||
MapRoleEntity entity = txInRealm(realm).read(id);
|
||||
String realmId = realm.getId();
|
||||
// when a store doesn't store information about all realms, it doesn't have the information about
|
||||
return (entity == null || (entity.getRealmId() != null && !Objects.equals(realmId, entity.getRealmId())))
|
||||
|
@ -245,7 +259,7 @@ public class MapRoleProvider implements RoleProvider {
|
|||
mcb.compare(SearchableFields.DESCRIPTION, Operator.ILIKE, "%" + search + "%")
|
||||
);
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(first, max, SearchableFields.NAME))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(first, max, SearchableFields.NAME))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
@ -255,15 +269,16 @@ public class MapRoleProvider implements RoleProvider {
|
|||
return Stream.empty();
|
||||
}
|
||||
DefaultModelCriteria<RoleModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, client.getRealm().getId())
|
||||
final RealmModel realm = client.getRealm();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.CLIENT_ID, Operator.EQ, client.getId())
|
||||
.or(
|
||||
mcb.compare(SearchableFields.NAME, Operator.ILIKE, "%" + search + "%"),
|
||||
mcb.compare(SearchableFields.DESCRIPTION, Operator.ILIKE, "%" + search + "%")
|
||||
);
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(first, max, SearchableFields.NAME))
|
||||
.map(entityToAdapterFunc(client.getRealm()));
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(first, max, SearchableFields.NAME))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
public void preRemove(RealmModel realm) {
|
||||
|
@ -271,7 +286,7 @@ public class MapRoleProvider implements RoleProvider {
|
|||
DefaultModelCriteria<RoleModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
tx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
public void preRemove(RealmModel realm, RoleModel role) {
|
||||
|
@ -279,7 +294,7 @@ public class MapRoleProvider implements RoleProvider {
|
|||
DefaultModelCriteria<RoleModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.COMPOSITE_ROLE, Operator.EQ, role.getId());
|
||||
tx.read(withCriteria(mcb)).forEach(mapRoleEntity -> mapRoleEntity.removeCompositeRole(role.getId()));
|
||||
txInRealm(realm).read(withCriteria(mcb)).forEach(mapRoleEntity -> mapRoleEntity.removeCompositeRole(role.getId()));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -42,21 +42,30 @@ import org.keycloak.models.map.authorization.entity.MapScopeEntity;
|
|||
import org.keycloak.models.map.client.MapClientEntity;
|
||||
import org.keycloak.models.map.clientscope.MapClientScopeEntity;
|
||||
import org.keycloak.models.map.common.AbstractEntity;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.EntityField;
|
||||
import org.keycloak.models.map.common.UpdatableEntity;
|
||||
import org.keycloak.models.map.common.delegate.EntityFieldDelegate;
|
||||
import org.keycloak.models.map.events.MapAdminEventEntity;
|
||||
import org.keycloak.models.map.events.MapAuthEventEntity;
|
||||
import org.keycloak.models.map.group.MapGroupEntity;
|
||||
import org.keycloak.models.map.loginFailure.MapUserLoginFailureEntity;
|
||||
import org.keycloak.models.map.realm.MapRealmEntity;
|
||||
import org.keycloak.models.map.role.MapRoleEntity;
|
||||
import org.keycloak.models.map.role.MapRoleEntityFields;
|
||||
import org.keycloak.models.map.user.MapUserEntity;
|
||||
import org.keycloak.models.map.userSession.MapAuthenticatedClientSessionEntity;
|
||||
import org.keycloak.models.map.userSession.MapUserSessionEntity;
|
||||
import org.keycloak.sessions.RootAuthenticationSessionModel;
|
||||
import java.util.HashMap;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import static org.keycloak.models.map.common.AutogeneratedClasses.ENTITY_FIELDS;
|
||||
|
||||
/**
|
||||
* Utility class covering various aspects of relationship between model and entity classes.
|
||||
|
@ -64,7 +73,7 @@ import java.util.stream.Collectors;
|
|||
*/
|
||||
public class ModelEntityUtil {
|
||||
|
||||
private static final Map<Class<?>, String> MODEL_TO_NAME = new HashMap<>();
|
||||
private static final Map<Class<?>, String> MODEL_TO_NAME = new IdentityHashMap<>();
|
||||
static {
|
||||
MODEL_TO_NAME.put(SingleUseObjectValueModel.class, "single-use-objects");
|
||||
MODEL_TO_NAME.put(ClientScopeModel.class, "client-scopes");
|
||||
|
@ -88,9 +97,9 @@ public class ModelEntityUtil {
|
|||
MODEL_TO_NAME.put(AdminEvent.class, "admin-events");
|
||||
MODEL_TO_NAME.put(Event.class, "auth-events");
|
||||
}
|
||||
private static final Map<String, Class<?>> NAME_TO_MODEL = MODEL_TO_NAME.entrySet().stream().collect(Collectors.toMap(Entry::getValue, Entry::getKey));
|
||||
private static final Map<String, Class<?>> NAME_TO_MODEL = MODEL_TO_NAME.entrySet().stream().collect(Collectors.toUnmodifiableMap(Entry::getValue, Entry::getKey));
|
||||
|
||||
private static final Map<Class<?>, Class<? extends AbstractEntity>> MODEL_TO_ENTITY_TYPE = new HashMap<>();
|
||||
private static final Map<Class<?>, Class<? extends AbstractEntity>> MODEL_TO_ENTITY_TYPE = new IdentityHashMap<>();
|
||||
static {
|
||||
MODEL_TO_ENTITY_TYPE.put(SingleUseObjectValueModel.class, MapSingleUseObjectEntity.class);
|
||||
MODEL_TO_ENTITY_TYPE.put(ClientScopeModel.class, MapClientScopeEntity.class);
|
||||
|
@ -115,7 +124,20 @@ public class ModelEntityUtil {
|
|||
MODEL_TO_ENTITY_TYPE.put(AdminEvent.class, MapAdminEventEntity.class);
|
||||
MODEL_TO_ENTITY_TYPE.put(Event.class, MapAuthEventEntity.class);
|
||||
}
|
||||
private static final Map<Class<?>, Class<?>> ENTITY_TO_MODEL_TYPE = MODEL_TO_ENTITY_TYPE.entrySet().stream().collect(Collectors.toMap(Entry::getValue, Entry::getKey));
|
||||
private static final Map<Class<?>, Class<?>> ENTITY_TO_MODEL_TYPE = MODEL_TO_ENTITY_TYPE.entrySet().stream().collect(Collectors.toUnmodifiableMap(Entry::getValue, Entry::getKey));
|
||||
private static final String ID_FIELD_NAME = MapRoleEntityFields.ID.getName();
|
||||
private static final Map<Class<?>, EntityField<?>> ENTITY_TO_ID_FIELD = ENTITY_FIELDS.entrySet().stream()
|
||||
.filter(me -> Stream.of(me.getValue()).anyMatch(e -> ID_FIELD_NAME.equals(e.getName())))
|
||||
.map(me -> Map.entry(me.getKey(), Stream.of(me.getValue()).filter(e -> ID_FIELD_NAME.equals(e.getName())).findAny().orElse(null)))
|
||||
.filter(me -> me.getValue() != null)
|
||||
.collect(Collectors.toUnmodifiableMap(Entry::getKey, Entry::getValue));
|
||||
|
||||
private static final String REALM_ID_FIELD_NAME = MapRoleEntityFields.REALM_ID.getName();
|
||||
private static final Map<Class<?>, EntityField<?>> ENTITY_TO_REALM_ID_FIELD = ENTITY_FIELDS.entrySet().stream()
|
||||
.filter(me -> Stream.of(me.getValue()).anyMatch(e -> REALM_ID_FIELD_NAME.equals(e.getName())))
|
||||
.map(me -> Map.entry(me.getKey(), Stream.of(me.getValue()).filter(e -> REALM_ID_FIELD_NAME.equals(e.getName())).findAny().orElse(null)))
|
||||
.filter(me -> me.getValue() != null)
|
||||
.collect(Collectors.toUnmodifiableMap(Entry::getKey, Entry::getValue));
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <V extends AbstractEntity, M> Class<V> getEntityType(Class<M> modelClass) {
|
||||
|
@ -154,6 +176,55 @@ public class ModelEntityUtil {
|
|||
return (Class<M>) NAME_TO_MODEL.get(key);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static boolean entityFieldsKnown(Class<?> entityClass) {
|
||||
return ENTITY_FIELDS.containsKey(entityClass);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <V> Stream<EntityField<V>> getEntityFields(Class<V> entityClass) {
|
||||
EntityField<V>[] values = (EntityField<V>[]) ENTITY_FIELDS.get(entityClass);
|
||||
return values == null ? Stream.empty() : Stream.of(values);
|
||||
}
|
||||
|
||||
public static <V extends AbstractEntity> Optional<EntityField<V>> getEntityField(Class<V> entityClass, String fieldNameCamelCase) {
|
||||
final Stream<EntityField<V>> s = getEntityFields(entityClass);
|
||||
|
||||
return s
|
||||
.filter(ef -> fieldNameCamelCase.equals(ef.getNameCamelCase()))
|
||||
.findAny();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <V extends AbstractEntity> EntityField<V> getIdField(Class<V> targetEntityClass) {
|
||||
return (EntityField<V>) ENTITY_TO_ID_FIELD.get(targetEntityClass);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <V extends AbstractEntity> EntityField<V> getRealmIdField(Class<V> targetEntityClass) {
|
||||
return (EntityField<V>) ENTITY_TO_REALM_ID_FIELD.get(targetEntityClass);
|
||||
}
|
||||
|
||||
public static <T extends AbstractEntity & UpdatableEntity> T supplyReadOnlyFieldValueIfUnset(T entity, EntityField<T> entityField, Object value) {
|
||||
if (entity == null || Objects.equals(entityField.get(entity), value)) {
|
||||
return entity;
|
||||
}
|
||||
return DeepCloner.DUMB_CLONER.entityFieldDelegate(entity, new EntityFieldDelegate.WithEntity<>(entity) {
|
||||
@Override
|
||||
public <EF extends java.lang.Enum<? extends EntityField<T>> & EntityField<T>> Object get(EF field) {
|
||||
if (field == entityField) {
|
||||
return value;
|
||||
}
|
||||
return super.get(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V, EF extends java.lang.Enum<? extends EntityField<T>> & EntityField<T>> void set(EF field, V value) {
|
||||
if (field != entityField) {
|
||||
super.set(field, value);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@ package org.keycloak.models.map.storage.chm;
|
|||
|
||||
import org.keycloak.models.map.common.AbstractEntity;
|
||||
import org.keycloak.models.map.common.UpdatableEntity;
|
||||
import org.keycloak.models.map.storage.ModelCriteriaBuilder;
|
||||
import org.keycloak.models.map.storage.QueryParameters;
|
||||
|
||||
|
||||
|
@ -101,4 +100,13 @@ public interface ConcurrentHashMapCrudOperations<V extends AbstractEntity & Upda
|
|||
default boolean exists(QueryParameters<M> queryParameters) {
|
||||
return getCount(queryParameters) > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines first available key from the value upon creation.
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
default String determineKeyFromValue(V value, boolean forCreate) {
|
||||
return value == null ? null : value.getId();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,8 @@ package org.keycloak.models.map.storage.chm;
|
|||
import org.keycloak.models.map.common.StringKeyConverter;
|
||||
import org.keycloak.models.map.common.AbstractEntity;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.EntityField;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.common.UpdatableEntity;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
|
@ -31,12 +33,14 @@ import java.util.stream.Collectors;
|
|||
import java.util.stream.Stream;
|
||||
import org.jboss.logging.Logger;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.ModelEntityUtil;
|
||||
import org.keycloak.models.map.storage.QueryParameters;
|
||||
import org.keycloak.models.map.storage.chm.MapModelCriteriaBuilder.UpdatePredicatesFunc;
|
||||
import org.keycloak.models.map.storage.criteria.DefaultModelCriteria;
|
||||
import org.keycloak.storage.SearchableModelField;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public class ConcurrentHashMapKeycloakTransaction<K, V extends AbstractEntity & UpdatableEntity, M> implements MapKeycloakTransaction<V, M> {
|
||||
public class ConcurrentHashMapKeycloakTransaction<K, V extends AbstractEntity & UpdatableEntity, M> implements MapKeycloakTransaction<V, M>, HasRealmId {
|
||||
|
||||
private final static Logger log = Logger.getLogger(ConcurrentHashMapKeycloakTransaction.class);
|
||||
|
||||
|
@ -47,16 +51,25 @@ public class ConcurrentHashMapKeycloakTransaction<K, V extends AbstractEntity &
|
|||
protected final StringKeyConverter<K> keyConverter;
|
||||
protected final DeepCloner cloner;
|
||||
protected final Map<SearchableModelField<? super M>, UpdatePredicatesFunc<K, V, M>> fieldPredicates;
|
||||
protected final EntityField<V> realmIdEntityField;
|
||||
private String realmId;
|
||||
private final boolean mapHasRealmId;
|
||||
|
||||
enum MapOperation {
|
||||
CREATE, UPDATE, DELETE,
|
||||
}
|
||||
|
||||
public ConcurrentHashMapKeycloakTransaction(ConcurrentHashMapCrudOperations<V, M> map, StringKeyConverter<K> keyConverter, DeepCloner cloner, Map<SearchableModelField<? super M>, UpdatePredicatesFunc<K, V, M>> fieldPredicates) {
|
||||
this(map, keyConverter, cloner, fieldPredicates, null);
|
||||
}
|
||||
|
||||
public ConcurrentHashMapKeycloakTransaction(ConcurrentHashMapCrudOperations<V, M> map, StringKeyConverter<K> keyConverter, DeepCloner cloner, Map<SearchableModelField<? super M>, UpdatePredicatesFunc<K, V, M>> fieldPredicates, EntityField<V> realmIdEntityField) {
|
||||
this.map = map;
|
||||
this.keyConverter = keyConverter;
|
||||
this.cloner = cloner;
|
||||
this.fieldPredicates = fieldPredicates;
|
||||
this.realmIdEntityField = realmIdEntityField;
|
||||
this.mapHasRealmId = map instanceof HasRealmId;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -70,9 +83,11 @@ public class ConcurrentHashMapKeycloakTransaction<K, V extends AbstractEntity &
|
|||
throw new RuntimeException("Rollback only!");
|
||||
}
|
||||
|
||||
final Consumer<String> setRealmId = mapHasRealmId ? ((HasRealmId) map)::setRealmId : a -> {};
|
||||
if (! tasks.isEmpty()) {
|
||||
log.tracef("Commit - %s", map);
|
||||
for (MapTaskWithValue value : tasks.values()) {
|
||||
setRealmId.accept(value.getRealmId());
|
||||
value.execute();
|
||||
}
|
||||
}
|
||||
|
@ -99,7 +114,7 @@ public class ConcurrentHashMapKeycloakTransaction<K, V extends AbstractEntity &
|
|||
}
|
||||
|
||||
private MapModelCriteriaBuilder<K, V, M> createCriteriaBuilder() {
|
||||
return new MapModelCriteriaBuilder<K, V, M>(keyConverter, fieldPredicates);
|
||||
return new MapModelCriteriaBuilder<>(keyConverter, fieldPredicates);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -136,13 +151,17 @@ public class ConcurrentHashMapKeycloakTransaction<K, V extends AbstractEntity &
|
|||
try {
|
||||
// TODO: Consider using Optional rather than handling NPE
|
||||
final V entity = read(sKey, map::read);
|
||||
return registerEntityForChanges(entity);
|
||||
if (entity == null) {
|
||||
log.debugf("Could not read object for key %s", sKey);
|
||||
return null;
|
||||
}
|
||||
return postProcess(registerEntityForChanges(entity));
|
||||
} catch (NullPointerException ex) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public V read(String key, Function<String, V> defaultValueFunc) {
|
||||
private V read(String key, Function<String, V> defaultValueFunc) {
|
||||
MapTaskWithValue current = tasks.get(key);
|
||||
// If the key exists, then it has entered the "tasks" after bulk delete that could have
|
||||
// removed it, so looking through bulk deletes is irrelevant
|
||||
|
@ -190,6 +209,15 @@ public class ConcurrentHashMapKeycloakTransaction<K, V extends AbstractEntity &
|
|||
.filter(Objects::nonNull)
|
||||
.map(this::registerEntityForChanges);
|
||||
|
||||
updatedAndNotRemovedObjectsStream = postProcess(updatedAndNotRemovedObjectsStream);
|
||||
|
||||
if (mapMcb != null) {
|
||||
// Add explicit filtering for the case when the map returns raw stream of untested values (ie. realize sequential scan)
|
||||
updatedAndNotRemovedObjectsStream = updatedAndNotRemovedObjectsStream
|
||||
.filter(e -> mapMcb.getKeyFilter().test(keyConverter.fromStringSafe(e.getId())))
|
||||
.filter(mapMcb.getEntityFilter());
|
||||
}
|
||||
|
||||
// In case of created values stored in MapKeycloakTransaction, we need filter those according to the filter
|
||||
Stream<V> res = mapMcb == null
|
||||
? updatedAndNotRemovedObjectsStream
|
||||
|
@ -218,16 +246,18 @@ public class ConcurrentHashMapKeycloakTransaction<K, V extends AbstractEntity &
|
|||
|
||||
@Override
|
||||
public V create(V value) {
|
||||
String key = value.getId();
|
||||
String key = map.determineKeyFromValue(value, true);
|
||||
if (key == null) {
|
||||
K newKey = keyConverter.yieldNewUniqueKey();
|
||||
key = keyConverter.keyToString(newKey);
|
||||
value = cloner.from(key, value);
|
||||
} else if (! key.equals(value.getId())) {
|
||||
value = cloner.from(key, value);
|
||||
} else {
|
||||
value = cloner.from(value);
|
||||
}
|
||||
addTask(key, new CreateOperation(value));
|
||||
return value;
|
||||
return postProcess(value);
|
||||
}
|
||||
|
||||
public V updateIfChanged(V value, Predicate<V> shouldPut) {
|
||||
|
@ -309,7 +339,7 @@ public class ConcurrentHashMapKeycloakTransaction<K, V extends AbstractEntity &
|
|||
private MapTaskWithValue merge(MapTaskWithValue oldValue, MapTaskWithValue newValue) {
|
||||
switch (newValue.getOperation()) {
|
||||
case DELETE:
|
||||
return oldValue.containsCreate() ? null : newValue;
|
||||
return newValue;
|
||||
default:
|
||||
return new MapTaskCompose(oldValue, newValue);
|
||||
}
|
||||
|
@ -317,9 +347,11 @@ public class ConcurrentHashMapKeycloakTransaction<K, V extends AbstractEntity &
|
|||
|
||||
protected abstract class MapTaskWithValue {
|
||||
protected final V value;
|
||||
private final String realmId;
|
||||
|
||||
public MapTaskWithValue(V value) {
|
||||
this.value = value;
|
||||
this.realmId = ConcurrentHashMapKeycloakTransaction.this.realmId;
|
||||
}
|
||||
|
||||
public V getValue() {
|
||||
|
@ -338,6 +370,10 @@ public class ConcurrentHashMapKeycloakTransaction<K, V extends AbstractEntity &
|
|||
return false;
|
||||
}
|
||||
|
||||
public String getRealmId() {
|
||||
return realmId;
|
||||
}
|
||||
|
||||
public abstract MapOperation getOperation();
|
||||
public abstract void execute();
|
||||
}
|
||||
|
@ -440,4 +476,39 @@ public class ConcurrentHashMapKeycloakTransaction<K, V extends AbstractEntity &
|
|||
return map.getCount(queryParameters);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getRealmId() {
|
||||
if (mapHasRealmId) {
|
||||
return ((HasRealmId) map).getRealmId();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void setRealmId(String realmId) {
|
||||
if (mapHasRealmId) {
|
||||
((HasRealmId) map).setRealmId(realmId);
|
||||
this.realmId = realmId;
|
||||
} else {
|
||||
this.realmId = null;
|
||||
}
|
||||
}
|
||||
|
||||
private V postProcess(V value) {
|
||||
return (realmId == null || value == null)
|
||||
? value
|
||||
: ModelEntityUtil.supplyReadOnlyFieldValueIfUnset(value, realmIdEntityField, realmId);
|
||||
}
|
||||
|
||||
private Stream<V> postProcess(Stream<V> stream) {
|
||||
if (this.realmId == null) {
|
||||
return stream;
|
||||
}
|
||||
|
||||
String localRealmId = this.realmId;
|
||||
return stream.map((V value) -> ModelEntityUtil.supplyReadOnlyFieldValueIfUnset(value, realmIdEntityField, localRealmId));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright 2021 Red Hat, Inc. and/or its affiliates
|
||||
* Copyright 2022 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -16,177 +16,26 @@
|
|||
*/
|
||||
package org.keycloak.models.map.storage.criteria;
|
||||
|
||||
import org.keycloak.models.map.storage.ModelCriteriaBuilder;
|
||||
import org.keycloak.models.map.storage.criteria.ModelCriteriaNode.ExtOperator;
|
||||
import org.keycloak.storage.SearchableModelField;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* Descriptive model criteria implementation which in other words represents a Boolean formula on searchable fields.
|
||||
* Generic instantiable {@link DescriptiveModelCriteria}.
|
||||
* @author hmlnarik
|
||||
*/
|
||||
public class DefaultModelCriteria<M> implements ModelCriteriaBuilder<M, DefaultModelCriteria<M>> {
|
||||
public class DefaultModelCriteria<M> extends DescriptiveModelCriteria<M, DefaultModelCriteria<M>> {
|
||||
|
||||
private static final DefaultModelCriteria<?> INSTANCE = new DefaultModelCriteria<>(null);
|
||||
|
||||
private final ModelCriteriaNode<M> node;
|
||||
|
||||
private DefaultModelCriteria(ModelCriteriaNode<M> node) {
|
||||
this.node = node;
|
||||
super(node);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <M> DefaultModelCriteria<M> criteria() {
|
||||
return (DefaultModelCriteria<M>) INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DefaultModelCriteria<M> compare(SearchableModelField<? super M> modelField, Operator op, Object... value) {
|
||||
return compare(new ModelCriteriaNode<>(modelField, op, value));
|
||||
}
|
||||
|
||||
private DefaultModelCriteria<M> compare(final ModelCriteriaNode<M> nodeToAdd) {
|
||||
ModelCriteriaNode<M> targetNode;
|
||||
|
||||
if (isEmpty()) {
|
||||
targetNode = nodeToAdd;
|
||||
} else if (node.getNodeOperator() == ExtOperator.AND) {
|
||||
targetNode = node.cloneTree();
|
||||
targetNode.addChild(nodeToAdd);
|
||||
} else {
|
||||
targetNode = new ModelCriteriaNode<>(ExtOperator.AND);
|
||||
targetNode.addChild(node.cloneTree());
|
||||
targetNode.addChild(nodeToAdd);
|
||||
}
|
||||
|
||||
protected DefaultModelCriteria<M> instantiateForNode(ModelCriteriaNode<M> targetNode) {
|
||||
return new DefaultModelCriteria<>(targetNode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DefaultModelCriteria<M> and(DefaultModelCriteria<M>... mcbs) {
|
||||
if (mcbs.length == 1) {
|
||||
return compare(mcbs[0].node);
|
||||
}
|
||||
|
||||
final ModelCriteriaNode<M> targetNode = new ModelCriteriaNode<>(ExtOperator.AND);
|
||||
AtomicBoolean hasFalseNode = new AtomicBoolean(false);
|
||||
for (DefaultModelCriteria<M> mcb : mcbs) {
|
||||
final ModelCriteriaNode<M> nodeToAdd = mcb.node;
|
||||
getNodesToAddForAndOr(nodeToAdd, ExtOperator.AND)
|
||||
.filter(ModelCriteriaNode::isNotTrueNode)
|
||||
.peek(n -> { if (n.isFalseNode()) hasFalseNode.lazySet(true); })
|
||||
.map(ModelCriteriaNode::cloneTree)
|
||||
.forEach(targetNode::addChild);
|
||||
|
||||
if (hasFalseNode.get()) {
|
||||
return compare(new ModelCriteriaNode<>(ExtOperator.__FALSE__));
|
||||
}
|
||||
}
|
||||
|
||||
if (targetNode.getChildren().isEmpty()) {
|
||||
// AND on empty set of formulae is TRUE: It does hold that there all formulae are satisfied
|
||||
return compare(new ModelCriteriaNode<>(ExtOperator.__TRUE__));
|
||||
}
|
||||
|
||||
return compare(targetNode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DefaultModelCriteria<M> or(DefaultModelCriteria<M>... mcbs) {
|
||||
if (mcbs.length == 1) {
|
||||
return compare(mcbs[0].node);
|
||||
}
|
||||
|
||||
final ModelCriteriaNode<M> targetNode = new ModelCriteriaNode<>(ExtOperator.OR);
|
||||
AtomicBoolean hasTrueNode = new AtomicBoolean(false);
|
||||
for (DefaultModelCriteria<M> mcb : mcbs) {
|
||||
final ModelCriteriaNode<M> nodeToAdd = mcb.node;
|
||||
getNodesToAddForAndOr(nodeToAdd, ExtOperator.OR)
|
||||
.filter(ModelCriteriaNode::isNotFalseNode)
|
||||
.peek(n -> { if (n.isTrueNode()) hasTrueNode.lazySet(true); })
|
||||
.map(ModelCriteriaNode::cloneTree)
|
||||
.forEach(targetNode::addChild);
|
||||
|
||||
if (hasTrueNode.get()) {
|
||||
return compare(new ModelCriteriaNode<>(ExtOperator.__TRUE__));
|
||||
}
|
||||
}
|
||||
|
||||
if (targetNode.getChildren().isEmpty()) {
|
||||
// OR on empty set of formulae is FALSE: It does not hold that there is at least one satisfied formula
|
||||
return compare(new ModelCriteriaNode<>(ExtOperator.__FALSE__));
|
||||
}
|
||||
|
||||
return compare(targetNode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DefaultModelCriteria<M> not(DefaultModelCriteria<M> mcb) {
|
||||
ModelCriteriaNode<M> toBeChild = mcb.node;
|
||||
if (toBeChild.getNodeOperator() == ExtOperator.NOT) {
|
||||
return compare(toBeChild.getChildren().get(0).cloneTree());
|
||||
}
|
||||
|
||||
final ModelCriteriaNode<M> targetNode = new ModelCriteriaNode<>(ExtOperator.NOT);
|
||||
targetNode.addChild(toBeChild.cloneTree());
|
||||
return compare(targetNode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies contents of this {@code ModelCriteriaBuilder} into
|
||||
* another {@code ModelCriteriaBuilder}.
|
||||
* @param mcb {@code ModelCriteriaBuilder} to copy the contents onto
|
||||
* @return Updated {@code ModelCriteriaBuilder}
|
||||
*/
|
||||
public <C extends ModelCriteriaBuilder<M, C>> C flashToModelCriteriaBuilder(C mcb) {
|
||||
if (isEmpty()) {
|
||||
return mcb;
|
||||
}
|
||||
return mcb == null ? null : node.flashToModelCriteriaBuilder(mcb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Optimizes this formula into another {@code ModelCriteriaBuilder}, using the values of
|
||||
* {@link ExtOperator#__TRUE__} and {@link ExtOperator#__FALSE__} accordingly.
|
||||
* @return New instance of {@code }
|
||||
*/
|
||||
public DefaultModelCriteria<M> optimize() {
|
||||
return flashToModelCriteriaBuilder(criteria());
|
||||
}
|
||||
|
||||
@FunctionalInterface
|
||||
public interface AtomicFormulaTester<M> {
|
||||
public Boolean test(SearchableModelField<? super M> field, Operator operator, Object[] operatorArguments);
|
||||
}
|
||||
|
||||
public DefaultModelCriteria<M> partiallyEvaluate(AtomicFormulaTester<M> tester) {
|
||||
return new DefaultModelCriteria<>(node.cloneTree((field, operator, operatorArguments) -> {
|
||||
Boolean res = tester.test(field, operator, operatorArguments);
|
||||
if (res == null) {
|
||||
return new ModelCriteriaNode<>(field, operator, operatorArguments);
|
||||
} else {
|
||||
return new ModelCriteriaNode<>(res ? ExtOperator.__TRUE__ : ExtOperator.__FALSE__);
|
||||
}
|
||||
}, ModelCriteriaNode::new));
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return node == null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return isEmpty() ? "" : node.toString();
|
||||
}
|
||||
|
||||
private Stream<ModelCriteriaNode<M>> getNodesToAddForAndOr(ModelCriteriaNode<M> nodeToAdd, ExtOperator operatorBeingAdded) {
|
||||
final ExtOperator op = nodeToAdd.getNodeOperator();
|
||||
|
||||
if (op == operatorBeingAdded) {
|
||||
return nodeToAdd.getChildren().stream();
|
||||
}
|
||||
|
||||
return Stream.of(nodeToAdd);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,284 @@
|
|||
/*
|
||||
* Copyright 2021 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.models.map.storage.criteria;
|
||||
|
||||
import org.keycloak.models.map.storage.ModelCriteriaBuilder;
|
||||
import org.keycloak.models.map.storage.criteria.ModelCriteriaNode.ExtOperator;
|
||||
import org.keycloak.storage.SearchableModelField;
|
||||
import java.util.Collection;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* Descriptive model criteria implementation which in other words represents a Boolean formula on searchable fields.
|
||||
* @author hmlnarik
|
||||
*/
|
||||
public abstract class DescriptiveModelCriteria<M, Self extends DescriptiveModelCriteria<M, Self>> implements ModelCriteriaBuilder<M, Self> {
|
||||
|
||||
protected final ModelCriteriaNode<M> node;
|
||||
|
||||
protected DescriptiveModelCriteria(ModelCriteriaNode<M> node) {
|
||||
this.node = node;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Self compare(SearchableModelField<? super M> modelField, Operator op, Object... value) {
|
||||
return compare(new ModelCriteriaNode<>(modelField, op, value));
|
||||
}
|
||||
|
||||
private Self compare(final ModelCriteriaNode<M> nodeToAdd) {
|
||||
ModelCriteriaNode<M> targetNode;
|
||||
|
||||
if (isEmpty()) {
|
||||
targetNode = nodeToAdd;
|
||||
} else if (node.getNodeOperator() == ExtOperator.AND) {
|
||||
targetNode = node.cloneTree();
|
||||
targetNode.addChild(nodeToAdd);
|
||||
} else {
|
||||
targetNode = new ModelCriteriaNode<>(ExtOperator.AND);
|
||||
targetNode.addChild(node.cloneTree());
|
||||
targetNode.addChild(nodeToAdd);
|
||||
}
|
||||
|
||||
return instantiateForNode(targetNode);
|
||||
}
|
||||
|
||||
protected abstract Self instantiateForNode(ModelCriteriaNode<M> targetNode);
|
||||
|
||||
@Override
|
||||
public Self and(Self... mcbs) {
|
||||
if (mcbs.length == 1) {
|
||||
return compare(mcbs[0].node);
|
||||
}
|
||||
|
||||
final ModelCriteriaNode<M> targetNode = new ModelCriteriaNode<>(ExtOperator.AND);
|
||||
AtomicBoolean hasFalseNode = new AtomicBoolean(false);
|
||||
for (Self mcb : mcbs) {
|
||||
final ModelCriteriaNode<M> nodeToAdd = mcb.node;
|
||||
getNodesToAddForAndOr(nodeToAdd, ExtOperator.AND)
|
||||
.filter(ModelCriteriaNode::isNotTrueNode)
|
||||
.peek(n -> { if (n.isFalseNode()) hasFalseNode.lazySet(true); })
|
||||
.map(ModelCriteriaNode::cloneTree)
|
||||
.forEach(targetNode::addChild);
|
||||
|
||||
if (hasFalseNode.get()) {
|
||||
return compare(new ModelCriteriaNode<>(ExtOperator.__FALSE__));
|
||||
}
|
||||
}
|
||||
|
||||
if (targetNode.getChildren().isEmpty()) {
|
||||
// AND on empty set of formulae is TRUE: It does hold that there all formulae are satisfied
|
||||
return compare(new ModelCriteriaNode<>(ExtOperator.__TRUE__));
|
||||
}
|
||||
|
||||
return compare(targetNode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Self or(Self... mcbs) {
|
||||
if (mcbs.length == 1) {
|
||||
return compare(mcbs[0].node);
|
||||
}
|
||||
|
||||
final ModelCriteriaNode<M> targetNode = new ModelCriteriaNode<>(ExtOperator.OR);
|
||||
AtomicBoolean hasTrueNode = new AtomicBoolean(false);
|
||||
for (Self mcb : mcbs) {
|
||||
final ModelCriteriaNode<M> nodeToAdd = mcb.node;
|
||||
getNodesToAddForAndOr(nodeToAdd, ExtOperator.OR)
|
||||
.filter(ModelCriteriaNode::isNotFalseNode)
|
||||
.peek(n -> { if (n.isTrueNode()) hasTrueNode.lazySet(true); })
|
||||
.map(ModelCriteriaNode::cloneTree)
|
||||
.forEach(targetNode::addChild);
|
||||
|
||||
if (hasTrueNode.get()) {
|
||||
return compare(new ModelCriteriaNode<>(ExtOperator.__TRUE__));
|
||||
}
|
||||
}
|
||||
|
||||
if (targetNode.getChildren().isEmpty()) {
|
||||
// OR on empty set of formulae is FALSE: It does not hold that there is at least one satisfied formula
|
||||
return compare(new ModelCriteriaNode<>(ExtOperator.__FALSE__));
|
||||
}
|
||||
|
||||
return compare(targetNode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Self not(Self mcb) {
|
||||
ModelCriteriaNode<M> toBeChild = mcb.node;
|
||||
if (toBeChild.getNodeOperator() == ExtOperator.NOT) {
|
||||
return compare(toBeChild.getChildren().get(0).cloneTree());
|
||||
}
|
||||
|
||||
final ModelCriteriaNode<M> targetNode = new ModelCriteriaNode<>(ExtOperator.NOT);
|
||||
targetNode.addChild(toBeChild.cloneTree());
|
||||
return compare(targetNode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies contents of this {@code ModelCriteriaBuilder} into
|
||||
* another {@code ModelCriteriaBuilder}.
|
||||
* @param mcb {@code ModelCriteriaBuilder} to copy the contents onto
|
||||
* @return Updated {@code ModelCriteriaBuilder}
|
||||
*/
|
||||
public <C extends ModelCriteriaBuilder<M, C>> C flashToModelCriteriaBuilder(C mcb) {
|
||||
if (isEmpty()) {
|
||||
return mcb;
|
||||
}
|
||||
return mcb == null ? null : node.flashToModelCriteriaBuilder(mcb);
|
||||
}
|
||||
|
||||
@FunctionalInterface
|
||||
public interface AtomicFormulaTester<M> {
|
||||
public Boolean test(SearchableModelField<? super M> field, Operator operator, Object[] operatorArguments);
|
||||
}
|
||||
|
||||
public Self partiallyEvaluate(AtomicFormulaTester<M> tester) {
|
||||
return instantiateForNode(node.cloneTree((field, operator, operatorArguments) -> {
|
||||
Boolean res = tester.test(field, operator, operatorArguments);
|
||||
if (res == null) {
|
||||
return new ModelCriteriaNode<>(field, operator, operatorArguments);
|
||||
} else {
|
||||
return new ModelCriteriaNode<>(res ? ExtOperator.__TRUE__ : ExtOperator.__FALSE__);
|
||||
}
|
||||
}, ModelCriteriaNode::new));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optimizes this formula into another {@code ModelCriteriaBuilder}, using the values of
|
||||
* {@link ExtOperator#__TRUE__} and {@link ExtOperator#__FALSE__} accordingly.
|
||||
* @return New instance of {@code }
|
||||
*/
|
||||
public Self optimize() {
|
||||
return flashToModelCriteriaBuilder(instantiateForNode(null));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the realm ID which limits the results of this criteria.
|
||||
* Does not support formulae which include negation of a condition containing the given field.
|
||||
* Only supports plain equality ({@link Operator#EQ}), ignores all
|
||||
* instances of the field comparison which do not use plain equality.
|
||||
* @return {@code null} if the field is not contained in the formula, there are multiple
|
||||
* mutually different field values in the formula, or the formula contains field check within
|
||||
* a negation.
|
||||
*/
|
||||
public <T extends DescriptiveModelCriteria<?, ?>> Object getSingleRestrictionArgument(String fieldName) {
|
||||
if (node == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// relax all conditions but those which check realmId equality. For this moment,
|
||||
// other operators like NE or IN are disregarded and will be added only if need
|
||||
// arises, since the current queries do not use them.
|
||||
DescriptiveModelCriteria<M, ?> criterionFormula =
|
||||
instantiateForNode(node.cloneTree(n -> {
|
||||
switch (n.getNodeOperator()) {
|
||||
case ATOMIC_FORMULA:
|
||||
if (fieldName.equals(n.getField().getName()) && n.getSimpleOperator() == Operator.EQ) {
|
||||
return new ModelCriteriaNode<>(n.getField(), n.getSimpleOperator(), n.getSimpleOperatorArguments());
|
||||
}
|
||||
return getNotParentsParity(n.getParent(), true)
|
||||
? new ModelCriteriaNode<>(ExtOperator.__TRUE__)
|
||||
: new ModelCriteriaNode<>(ExtOperator.__FALSE__);
|
||||
default:
|
||||
return new ModelCriteriaNode<>(n.getNodeOperator());
|
||||
}
|
||||
}))
|
||||
.optimize();
|
||||
|
||||
final ModelCriteriaNode<M> criterionFormulaRoot = criterionFormula.getNode();
|
||||
if (criterionFormulaRoot.isFalseNode()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (criterionFormulaRoot.isTrueNode()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
ThreadLocal<Object> criterionArgument = new ThreadLocal<>();
|
||||
@SuppressWarnings("unchecked")
|
||||
Optional<ModelCriteriaNode<M>> firstInvalidNode = criterionFormulaRoot.findFirstDfs(n -> {
|
||||
switch (n.getNodeOperator()) {
|
||||
case NOT:
|
||||
return true;
|
||||
|
||||
case ATOMIC_FORMULA: // Atomic formula must be of the form "realmID" EQ ..., see realmIdFormula instatiation
|
||||
Object argument = getSingleArgument(n.getSimpleOperatorArguments());
|
||||
if (argument != null) {
|
||||
Object orig = criterionArgument.get();
|
||||
if (orig != null && ! Objects.equals(argument, orig)) {
|
||||
// Two different realms are not supported
|
||||
return true;
|
||||
}
|
||||
criterionArgument.set(argument);
|
||||
}
|
||||
return false;
|
||||
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
return firstInvalidNode.isPresent() ? null : criterionArgument.get();
|
||||
}
|
||||
|
||||
private static Object getSingleArgument(Object[] arguments) {
|
||||
if (arguments == null || arguments.length != 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final Object a0 = arguments[0];
|
||||
if (a0 instanceof Collection) { // Note this cannot be a Stream due to ModelCriteriaNode always converting stream to List
|
||||
final Collection c0 = (Collection) a0;
|
||||
return c0.size() == 1 ? c0.iterator().next() : null;
|
||||
}
|
||||
|
||||
return a0;
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return node == null;
|
||||
}
|
||||
|
||||
public ModelCriteriaNode<M> getNode() {
|
||||
return node;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return isEmpty() ? "" : node.toString();
|
||||
}
|
||||
|
||||
private Stream<ModelCriteriaNode<M>> getNodesToAddForAndOr(ModelCriteriaNode<M> nodeToAdd, ExtOperator operatorBeingAdded) {
|
||||
final ExtOperator op = nodeToAdd.getNodeOperator();
|
||||
|
||||
if (op == operatorBeingAdded) {
|
||||
return nodeToAdd.getChildren().stream();
|
||||
}
|
||||
|
||||
return Stream.of(nodeToAdd);
|
||||
}
|
||||
|
||||
private boolean getNotParentsParity(Optional<ModelCriteriaNode<M>> node, boolean currentValue) {
|
||||
return node
|
||||
.map(n -> getNotParentsParity(n.getParent(), n.getNodeOperator() == ExtOperator.NOT ? ! currentValue : currentValue))
|
||||
.orElse(currentValue);
|
||||
}
|
||||
|
||||
}
|
|
@ -67,7 +67,13 @@ public class ModelCriteriaNode<M> extends DefaultTreeNode<ModelCriteriaNode<M>>
|
|||
},
|
||||
NOT {
|
||||
@Override public <M, C extends ModelCriteriaBuilder<M, C>> C apply(C mcb, ModelCriteriaNode<M> node) {
|
||||
return mcb.not(node.getChildren().iterator().next().flashToModelCriteriaBuilder(mcb));
|
||||
final ModelCriteriaNode<M> child = node.getChildren().iterator().next();
|
||||
return child.isFalseNode()
|
||||
? mcb.and((C[]) Array.newInstance(mcb.getClass(), 0))
|
||||
: (child.isTrueNode()
|
||||
? mcb.or((C[]) Array.newInstance(mcb.getClass(), 0))
|
||||
: mcb.not(child.flashToModelCriteriaBuilder(mcb))
|
||||
);
|
||||
}
|
||||
@Override public String toString(ModelCriteriaNode<?> node) {
|
||||
return "! " + node.getChildren().iterator().next().toString();
|
||||
|
@ -142,18 +148,22 @@ public class ModelCriteriaNode<M> extends DefaultTreeNode<ModelCriteriaNode<M>>
|
|||
this.simpleOperatorArguments = null;
|
||||
}
|
||||
|
||||
private ModelCriteriaNode(ExtOperator nodeOperator, Operator simpleOperator, SearchableModelField<? super M> field, Object[] simpleOperatorArguments) {
|
||||
super(Collections.emptyMap());
|
||||
this.nodeOperator = nodeOperator;
|
||||
this.simpleOperator = simpleOperator;
|
||||
this.field = field;
|
||||
this.simpleOperatorArguments = simpleOperatorArguments;
|
||||
}
|
||||
|
||||
public ExtOperator getNodeOperator() {
|
||||
return nodeOperator;
|
||||
}
|
||||
|
||||
public Operator getSimpleOperator() {
|
||||
return simpleOperator;
|
||||
}
|
||||
|
||||
public SearchableModelField<? super M> getField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
public Object[] getSimpleOperatorArguments() {
|
||||
return simpleOperatorArguments;
|
||||
}
|
||||
|
||||
public ModelCriteriaNode<M> cloneTree() {
|
||||
return cloneTree(ModelCriteriaNode::new, ModelCriteriaNode::new);
|
||||
}
|
||||
|
|
|
@ -47,6 +47,7 @@ import org.keycloak.models.UserModel;
|
|||
import org.keycloak.models.UserModel.SearchableFields;
|
||||
import org.keycloak.models.UserProvider;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.common.TimeAdapter;
|
||||
import org.keycloak.models.map.credential.MapUserCredentialManager;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransactionWithAuth;
|
||||
|
@ -88,11 +89,13 @@ public class MapUserProvider implements UserProvider {
|
|||
private static final Logger LOG = Logger.getLogger(MapUserProvider.class);
|
||||
private final KeycloakSession session;
|
||||
final MapKeycloakTransaction<MapUserEntity, UserModel> tx;
|
||||
private final boolean txHasRealmId;
|
||||
|
||||
public MapUserProvider(KeycloakSession session, MapStorage<MapUserEntity, UserModel> store) {
|
||||
this.session = session;
|
||||
this.tx = store.createTransaction(session);
|
||||
session.getTransactionManager().enlist(tx);
|
||||
this.txHasRealmId = tx instanceof HasRealmId;
|
||||
}
|
||||
|
||||
private Function<MapUserEntity, UserModel> entityToAdapterFunc(RealmModel realm) {
|
||||
|
@ -115,6 +118,13 @@ public class MapUserProvider implements UserProvider {
|
|||
};
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapUserEntity, UserModel> txInRealm(RealmModel realm) {
|
||||
if (txHasRealmId) {
|
||||
((HasRealmId) tx).setRealmId(realm == null ? null : realm.getId());
|
||||
}
|
||||
return tx;
|
||||
}
|
||||
|
||||
private Predicate<MapUserEntity> entityRealmFilter(RealmModel realm) {
|
||||
if (realm == null || realm.getId() == null) {
|
||||
return c -> false;
|
||||
|
@ -129,7 +139,7 @@ public class MapUserProvider implements UserProvider {
|
|||
|
||||
private Optional<MapUserEntity> getEntityById(RealmModel realm, String id) {
|
||||
try {
|
||||
MapUserEntity mapUserEntity = tx.read(id);
|
||||
MapUserEntity mapUserEntity = txInRealm(realm).read(id);
|
||||
if (mapUserEntity != null && entityRealmFilter(realm).test(mapUserEntity)) {
|
||||
return Optional.of(mapUserEntity);
|
||||
}
|
||||
|
@ -176,7 +186,7 @@ public class MapUserProvider implements UserProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.IDP_AND_USER, Operator.EQ, socialProvider);
|
||||
|
||||
tx.read(withCriteria(mcb))
|
||||
txInRealm(realm).read(withCriteria(mcb))
|
||||
.forEach(userEntity -> userEntity.removeFederatedIdentity(socialProvider));
|
||||
}
|
||||
|
||||
|
@ -218,7 +228,7 @@ public class MapUserProvider implements UserProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.IDP_AND_USER, Operator.EQ, socialLink.getIdentityProvider(), socialLink.getUserId());
|
||||
|
||||
return tx.read(withCriteria(mcb))
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.collect(Collectors.collectingAndThen(
|
||||
Collectors.toList(),
|
||||
list -> {
|
||||
|
@ -308,12 +318,12 @@ public class MapUserProvider implements UserProvider {
|
|||
public UserModel getServiceAccount(ClientModel client) {
|
||||
LOG.tracef("getServiceAccount(%s)%s", client.getId(), getShortStackTrace());
|
||||
DefaultModelCriteria<UserModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, client.getRealm().getId())
|
||||
final RealmModel realm = client.getRealm();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.SERVICE_ACCOUNT_CLIENT, Operator.EQ, client.getId());
|
||||
|
||||
return tx.read(withCriteria(mcb))
|
||||
.collect(Collectors.collectingAndThen(
|
||||
Collectors.toList(),
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.collect(Collectors.collectingAndThen(Collectors.toList(),
|
||||
list -> {
|
||||
if (list.isEmpty()) {
|
||||
return null;
|
||||
|
@ -322,7 +332,7 @@ public class MapUserProvider implements UserProvider {
|
|||
", results=" + list);
|
||||
}
|
||||
|
||||
return entityToAdapterFunc(client.getRealm()).apply(list.get(0));
|
||||
return entityToAdapterFunc(realm).apply(list.get(0));
|
||||
}
|
||||
));
|
||||
}
|
||||
|
@ -336,11 +346,11 @@ public class MapUserProvider implements UserProvider {
|
|||
SearchableFields.USERNAME :
|
||||
SearchableFields.USERNAME_CASE_INSENSITIVE, Operator.EQ, username);
|
||||
|
||||
if (tx.exists(withCriteria(mcb))) {
|
||||
if (txInRealm(realm).exists(withCriteria(mcb))) {
|
||||
throw new ModelDuplicateException("User with username '" + username + "' in realm " + realm.getName() + " already exists" );
|
||||
}
|
||||
|
||||
if (id != null && tx.exists(id)) {
|
||||
if (id != null && txInRealm(realm).exists(id)) {
|
||||
throw new ModelDuplicateException("User exists: " + id);
|
||||
}
|
||||
|
||||
|
@ -351,7 +361,7 @@ public class MapUserProvider implements UserProvider {
|
|||
entity.setUsername(username);
|
||||
entity.setCreatedTimestamp(Time.currentTimeMillis());
|
||||
|
||||
entity = tx.create(entity);
|
||||
entity = txInRealm(realm).create(entity);
|
||||
final UserModel userModel = entityToAdapterFunc(realm).apply(entity);
|
||||
|
||||
if (addDefaultRoles) {
|
||||
|
@ -378,7 +388,7 @@ public class MapUserProvider implements UserProvider {
|
|||
DefaultModelCriteria<UserModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
tx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -388,7 +398,7 @@ public class MapUserProvider implements UserProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.FEDERATION_LINK, Operator.EQ, storageProviderId);
|
||||
|
||||
tx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -398,7 +408,7 @@ public class MapUserProvider implements UserProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.FEDERATION_LINK, Operator.EQ, storageProviderId);
|
||||
|
||||
try (Stream<MapUserEntity> s = tx.read(withCriteria(mcb))) {
|
||||
try (Stream<MapUserEntity> s = txInRealm(realm).read(withCriteria(mcb))) {
|
||||
s.forEach(userEntity -> userEntity.setFederationLink(null));
|
||||
}
|
||||
}
|
||||
|
@ -411,7 +421,7 @@ public class MapUserProvider implements UserProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.ASSIGNED_ROLE, Operator.EQ, roleId);
|
||||
|
||||
try (Stream<MapUserEntity> s = tx.read(withCriteria(mcb))) {
|
||||
try (Stream<MapUserEntity> s = txInRealm(realm).read(withCriteria(mcb))) {
|
||||
s.forEach(userEntity -> userEntity.removeRolesMembership(roleId));
|
||||
}
|
||||
}
|
||||
|
@ -424,7 +434,7 @@ public class MapUserProvider implements UserProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.ASSIGNED_GROUP, Operator.EQ, groupId);
|
||||
|
||||
try (Stream<MapUserEntity> s = tx.read(withCriteria(mcb))) {
|
||||
try (Stream<MapUserEntity> s = txInRealm(realm).read(withCriteria(mcb))) {
|
||||
s.forEach(userEntity -> userEntity.removeGroupsMembership(groupId));
|
||||
}
|
||||
}
|
||||
|
@ -437,7 +447,7 @@ public class MapUserProvider implements UserProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.CONSENT_FOR_CLIENT, Operator.EQ, clientId);
|
||||
|
||||
try (Stream<MapUserEntity> s = tx.read(withCriteria(mcb))) {
|
||||
try (Stream<MapUserEntity> s = txInRealm(realm).read(withCriteria(mcb))) {
|
||||
s.forEach(userEntity -> userEntity.removeUserConsent(clientId));
|
||||
}
|
||||
}
|
||||
|
@ -453,10 +463,11 @@ public class MapUserProvider implements UserProvider {
|
|||
LOG.tracef("preRemove[ClientScopeModel](%s)%s", clientScopeId, getShortStackTrace());
|
||||
|
||||
DefaultModelCriteria<UserModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, clientScope.getRealm().getId())
|
||||
final RealmModel realm = clientScope.getRealm();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.CONSENT_WITH_CLIENT_SCOPE, Operator.EQ, clientScopeId);
|
||||
|
||||
try (Stream<MapUserEntity> s = tx.read(withCriteria(mcb))) {
|
||||
try (Stream<MapUserEntity> s = txInRealm(realm).read(withCriteria(mcb))) {
|
||||
s.map(MapUserEntity::getUserConsents)
|
||||
.filter(Objects::nonNull)
|
||||
.flatMap(Collection::stream)
|
||||
|
@ -475,7 +486,7 @@ public class MapUserProvider implements UserProvider {
|
|||
DefaultModelCriteria<UserModel> mcb = criteria();
|
||||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId());
|
||||
|
||||
try (Stream<MapUserEntity> s = tx.read(withCriteria(mcb))) {
|
||||
try (Stream<MapUserEntity> s = txInRealm(realm).read(withCriteria(mcb))) {
|
||||
s.forEach(entity -> entity.addRolesMembership(roleId));
|
||||
}
|
||||
}
|
||||
|
@ -497,7 +508,7 @@ public class MapUserProvider implements UserProvider {
|
|||
SearchableFields.USERNAME_CASE_INSENSITIVE, Operator.EQ, username);
|
||||
|
||||
// there is orderBy used to always return the same user in case multiple users are returned from the store
|
||||
try (Stream<MapUserEntity> s = tx.read(withCriteria(mcb).orderBy(SearchableFields.USERNAME, ASCENDING))) {
|
||||
try (Stream<MapUserEntity> s = txInRealm(realm).read(withCriteria(mcb).orderBy(SearchableFields.USERNAME, ASCENDING))) {
|
||||
List<MapUserEntity> users = s.collect(Collectors.toList());
|
||||
if (users.isEmpty()) return null;
|
||||
if (users.size() != 1) {
|
||||
|
@ -515,7 +526,7 @@ public class MapUserProvider implements UserProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.EMAIL, Operator.EQ, email);
|
||||
|
||||
List<MapUserEntity> usersWithEmail = tx.read(withCriteria(mcb)).collect(Collectors.toList());
|
||||
List<MapUserEntity> usersWithEmail = txInRealm(realm).read(withCriteria(mcb)).collect(Collectors.toList());
|
||||
|
||||
if (usersWithEmail.isEmpty()) return null;
|
||||
if (usersWithEmail.size() > 1) {
|
||||
|
@ -547,7 +558,7 @@ public class MapUserProvider implements UserProvider {
|
|||
mcb = mcb.compare(SearchableFields.SERVICE_ACCOUNT_CLIENT, Operator.NOT_EXISTS);
|
||||
}
|
||||
|
||||
return (int) tx.getCount(withCriteria(mcb));
|
||||
return (int) txInRealm(realm).getCount(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -665,7 +676,7 @@ public class MapUserProvider implements UserProvider {
|
|||
criteria = criteria.compare(SearchableFields.ASSIGNED_GROUP, Operator.IN, authorizedGroups);
|
||||
}
|
||||
|
||||
return tx.read(withCriteria(criteria).pagination(firstResult, maxResults, SearchableFields.USERNAME))
|
||||
return txInRealm(realm).read(withCriteria(criteria).pagination(firstResult, maxResults, SearchableFields.USERNAME))
|
||||
.map(entityToAdapterFunc(realm))
|
||||
.filter(Objects::nonNull);
|
||||
}
|
||||
|
@ -677,7 +688,7 @@ public class MapUserProvider implements UserProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.ASSIGNED_GROUP, Operator.EQ, group.getId());
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.USERNAME))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.USERNAME))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
@ -688,7 +699,7 @@ public class MapUserProvider implements UserProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.ATTRIBUTE, Operator.EQ, attrName, attrValue);
|
||||
|
||||
return tx.read(withCriteria(mcb).orderBy(SearchableFields.USERNAME, ASCENDING))
|
||||
return txInRealm(realm).read(withCriteria(mcb).orderBy(SearchableFields.USERNAME, ASCENDING))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
@ -699,12 +710,13 @@ public class MapUserProvider implements UserProvider {
|
|||
|
||||
@Override
|
||||
public boolean removeUser(RealmModel realm, UserModel user) {
|
||||
LOG.tracef("removeUser(%s, %s)%s", realm, user, getShortStackTrace());
|
||||
String userId = user.getId();
|
||||
Optional<MapUserEntity> userById = getEntityById(realm, userId);
|
||||
if (userById.isPresent()) {
|
||||
session.invalidate(USER_BEFORE_REMOVE, realm, user);
|
||||
|
||||
tx.delete(userId);
|
||||
txInRealm(realm).delete(userId);
|
||||
|
||||
session.invalidate(USER_AFTER_REMOVE, realm, user);
|
||||
return true;
|
||||
|
@ -720,7 +732,7 @@ public class MapUserProvider implements UserProvider {
|
|||
mcb = mcb.compare(SearchableFields.REALM_ID, Operator.EQ, realm.getId())
|
||||
.compare(SearchableFields.ASSIGNED_ROLE, Operator.EQ, role.getId());
|
||||
|
||||
return tx.read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.USERNAME))
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(firstResult, maxResults, SearchableFields.USERNAME))
|
||||
.map(entityToAdapterFunc(realm));
|
||||
}
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.keycloak.models.UserModel;
|
|||
import org.keycloak.models.UserSessionModel;
|
||||
import org.keycloak.models.UserSessionProvider;
|
||||
import org.keycloak.models.map.common.DeepCloner;
|
||||
import org.keycloak.models.map.common.HasRealmId;
|
||||
import org.keycloak.models.map.common.TimeAdapter;
|
||||
import org.keycloak.models.map.storage.MapKeycloakTransaction;
|
||||
import org.keycloak.models.map.storage.MapStorage;
|
||||
|
@ -69,12 +70,14 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
* Storage for transient user sessions which lifespan is limited to one request.
|
||||
*/
|
||||
private final Map<String, MapUserSessionEntity> transientUserSessions = new HashMap<>();
|
||||
private final boolean txHasRealmId;
|
||||
|
||||
public MapUserSessionProvider(KeycloakSession session, MapStorage<MapUserSessionEntity, UserSessionModel> userSessionStore) {
|
||||
this.session = session;
|
||||
userSessionTx = userSessionStore.createTransaction(session);
|
||||
|
||||
session.getTransactionManager().enlistAfterCompletion(userSessionTx);
|
||||
this.txHasRealmId = userSessionTx instanceof HasRealmId;
|
||||
}
|
||||
|
||||
private Function<MapUserSessionEntity, UserSessionModel> userEntityToAdapterFunc(RealmModel realm) {
|
||||
|
@ -85,7 +88,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
if (TRANSIENT == origEntity.getPersistenceState()) {
|
||||
transientUserSessions.remove(origEntity.getId());
|
||||
} else {
|
||||
userSessionTx.delete(origEntity.getId());
|
||||
txInRealm(realm).delete(origEntity.getId());
|
||||
}
|
||||
return null;
|
||||
} else {
|
||||
|
@ -94,6 +97,13 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
};
|
||||
}
|
||||
|
||||
private MapKeycloakTransaction<MapUserSessionEntity, UserSessionModel> txInRealm(RealmModel realm) {
|
||||
if (txHasRealmId) {
|
||||
((HasRealmId) userSessionTx).setRealmId(realm == null ? null : realm.getId());
|
||||
}
|
||||
return userSessionTx;
|
||||
}
|
||||
|
||||
@Override
|
||||
public KeycloakSession getKeycloakSession() {
|
||||
return session;
|
||||
|
@ -103,7 +113,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
public AuthenticatedClientSessionModel createClientSession(RealmModel realm, ClientModel client, UserSessionModel userSession) {
|
||||
LOG.tracef("createClientSession(%s, %s, %s)%s", realm, client, userSession, getShortStackTrace());
|
||||
|
||||
MapUserSessionEntity userSessionEntity = getUserSessionById(userSession.getId());
|
||||
MapUserSessionEntity userSessionEntity = getUserSessionById(realm, userSession.getId());
|
||||
|
||||
if (userSessionEntity == null) {
|
||||
throw new IllegalStateException("User session entity does not exist: " + userSession.getId());
|
||||
|
@ -158,10 +168,10 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
}
|
||||
transientUserSessions.put(entity.getId(), entity);
|
||||
} else {
|
||||
if (id != null && userSessionTx.exists(id)) {
|
||||
if (id != null && txInRealm(realm).exists(id)) {
|
||||
throw new ModelDuplicateException("User session exists: " + id);
|
||||
}
|
||||
entity = userSessionTx.create(entity);
|
||||
entity = txInRealm(realm).create(entity);
|
||||
}
|
||||
|
||||
entity.setPersistenceState(persistenceState);
|
||||
|
@ -191,7 +201,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
DefaultModelCriteria<UserSessionModel> mcb = realmAndOfflineCriteriaBuilder(realm, false)
|
||||
.compare(UserSessionModel.SearchableFields.ID, Operator.EQ, id);
|
||||
|
||||
return userSessionTx.read(withCriteria(mcb))
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.findFirst()
|
||||
.map(userEntityToAdapterFunc(realm))
|
||||
.orElse(null);
|
||||
|
@ -204,7 +214,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("getUserSessionsStream(%s, %s)%s", realm, user, getShortStackTrace());
|
||||
|
||||
return userSessionTx.read(withCriteria(mcb))
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.map(userEntityToAdapterFunc(realm))
|
||||
.filter(Objects::nonNull);
|
||||
}
|
||||
|
@ -216,7 +226,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("getUserSessionsStream(%s, %s)%s", realm, client, getShortStackTrace());
|
||||
|
||||
return userSessionTx.read(withCriteria(mcb))
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.map(userEntityToAdapterFunc(realm))
|
||||
.filter(Objects::nonNull);
|
||||
}
|
||||
|
@ -230,7 +240,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
.compare(UserSessionModel.SearchableFields.CLIENT_ID, Operator.EQ, client.getId());
|
||||
|
||||
|
||||
return userSessionTx.read(withCriteria(mcb).pagination(firstResult, maxResults,
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(firstResult, maxResults,
|
||||
UserSessionModel.SearchableFields.LAST_SESSION_REFRESH))
|
||||
.map(userEntityToAdapterFunc(realm))
|
||||
.filter(Objects::nonNull);
|
||||
|
@ -243,7 +253,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("getUserSessionByBrokerUserIdStream(%s, %s)%s", realm, brokerUserId, getShortStackTrace());
|
||||
|
||||
return userSessionTx.read(withCriteria(mcb))
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.map(userEntityToAdapterFunc(realm))
|
||||
.filter(Objects::nonNull);
|
||||
}
|
||||
|
@ -255,7 +265,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("getUserSessionByBrokerSessionId(%s, %s)%s", realm, brokerSessionId, getShortStackTrace());
|
||||
|
||||
return userSessionTx.read(withCriteria(mcb))
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.findFirst()
|
||||
.map(userEntityToAdapterFunc(realm))
|
||||
.orElse(null);
|
||||
|
@ -288,7 +298,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("getActiveUserSessions(%s, %s)%s", realm, client, getShortStackTrace());
|
||||
|
||||
return userSessionTx.getCount(withCriteria(mcb));
|
||||
return txInRealm(realm).getCount(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -297,7 +307,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("getActiveClientSessionStats(%s, %s)%s", realm, offline, getShortStackTrace());
|
||||
|
||||
return userSessionTx.read(withCriteria(mcb))
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.map(userEntityToAdapterFunc(realm))
|
||||
.filter(Objects::nonNull)
|
||||
.map(UserSessionModel::getAuthenticatedClientSessions)
|
||||
|
@ -315,7 +325,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("removeUserSession(%s, %s)%s", realm, session, getShortStackTrace());
|
||||
|
||||
userSessionTx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -326,7 +336,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("removeUserSessions(%s, %s)%s", realm, user, getShortStackTrace());
|
||||
|
||||
userSessionTx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -345,7 +355,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("removeUserSessions(%s)%s", realm, getShortStackTrace());
|
||||
|
||||
userSessionTx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -363,7 +373,8 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
LOG.tracef("createOfflineUserSession(%s)%s", userSession, getShortStackTrace());
|
||||
|
||||
MapUserSessionEntity offlineUserSession = createUserSessionEntityInstance(userSession, true);
|
||||
offlineUserSession = userSessionTx.create(offlineUserSession);
|
||||
RealmModel realm = userSession.getRealm();
|
||||
offlineUserSession = txInRealm(realm).create(offlineUserSession);
|
||||
|
||||
// set a reference for the offline user session to the original online user session
|
||||
userSession.setNote(CORRESPONDING_SESSION_ID, offlineUserSession.getId());
|
||||
|
@ -394,12 +405,12 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
DefaultModelCriteria<UserSessionModel> mcb;
|
||||
if (userSession.isOffline()) {
|
||||
userSessionTx.delete(userSession.getId());
|
||||
txInRealm(realm).delete(userSession.getId());
|
||||
} else if (userSession.getNote(CORRESPONDING_SESSION_ID) != null) {
|
||||
String uk = userSession.getNote(CORRESPONDING_SESSION_ID);
|
||||
mcb = realmAndOfflineCriteriaBuilder(realm, true)
|
||||
.compare(UserSessionModel.SearchableFields.ID, Operator.EQ, uk);
|
||||
userSessionTx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
userSession.removeNote(CORRESPONDING_SESSION_ID);
|
||||
}
|
||||
}
|
||||
|
@ -440,7 +451,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("getOfflineUserSessionsStream(%s, %s)%s", realm, user, getShortStackTrace());
|
||||
|
||||
return userSessionTx.read(withCriteria(mcb))
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.map(userEntityToAdapterFunc(realm))
|
||||
.filter(Objects::nonNull);
|
||||
}
|
||||
|
@ -452,7 +463,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("getOfflineUserSessionByBrokerSessionId(%s, %s)%s", realm, brokerSessionId, getShortStackTrace());
|
||||
|
||||
return userSessionTx.read(withCriteria(mcb))
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.findFirst()
|
||||
.map(userEntityToAdapterFunc(realm))
|
||||
.orElse(null);
|
||||
|
@ -465,7 +476,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("getOfflineUserSessionByBrokerUserIdStream(%s, %s)%s", realm, brokerUserId, getShortStackTrace());
|
||||
|
||||
return userSessionTx.read(withCriteria(mcb))
|
||||
return txInRealm(realm).read(withCriteria(mcb))
|
||||
.map(userEntityToAdapterFunc(realm))
|
||||
.filter(Objects::nonNull);
|
||||
}
|
||||
|
@ -477,7 +488,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("getOfflineSessionsCount(%s, %s)%s", realm, client, getShortStackTrace());
|
||||
|
||||
return userSessionTx.getCount(withCriteria(mcb));
|
||||
return txInRealm(realm).getCount(withCriteria(mcb));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -488,7 +499,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("getOfflineUserSessionsStream(%s, %s, %s, %s)%s", realm, client, firstResult, maxResults, getShortStackTrace());
|
||||
|
||||
return userSessionTx.read(withCriteria(mcb).pagination(firstResult, maxResults,
|
||||
return txInRealm(realm).read(withCriteria(mcb).pagination(firstResult, maxResults,
|
||||
UserSessionModel.SearchableFields.LAST_SESSION_REFRESH))
|
||||
.map(userEntityToAdapterFunc(realm))
|
||||
.filter(Objects::nonNull);
|
||||
|
@ -539,7 +550,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
|
||||
LOG.tracef("removeAllUserSessions(%s)%s", realm, getShortStackTrace());
|
||||
|
||||
userSessionTx.delete(withCriteria(mcb));
|
||||
txInRealm(realm).delete(withCriteria(mcb));
|
||||
}
|
||||
|
||||
private Stream<MapUserSessionEntity> getOfflineUserSessionEntityStream(RealmModel realm, String userSessionId) {
|
||||
|
@ -553,7 +564,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
.compare(UserSessionModel.SearchableFields.ID, Operator.EQ, userSessionId);
|
||||
|
||||
// check if it's an offline user session
|
||||
MapUserSessionEntity userSessionEntity = userSessionTx.read(withCriteria(mcb)).findFirst().orElse(null);
|
||||
MapUserSessionEntity userSessionEntity = txInRealm(realm).read(withCriteria(mcb)).findFirst().orElse(null);
|
||||
if (userSessionEntity != null) {
|
||||
if (Boolean.TRUE.equals(userSessionEntity.isOffline())) {
|
||||
return Stream.of(userSessionEntity);
|
||||
|
@ -562,7 +573,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
// no session found by the given ID, try to find by corresponding session ID
|
||||
mcb = realmAndOfflineCriteriaBuilder(realm, true)
|
||||
.compare(UserSessionModel.SearchableFields.CORRESPONDING_SESSION_ID, Operator.EQ, userSessionId);
|
||||
return userSessionTx.read(withCriteria(mcb));
|
||||
return txInRealm(realm).read(withCriteria(mcb));
|
||||
}
|
||||
|
||||
// it's online user session so lookup offline user session by corresponding session id reference
|
||||
|
@ -570,7 +581,7 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
if (offlineUserSessionId != null) {
|
||||
mcb = realmAndOfflineCriteriaBuilder(realm, true)
|
||||
.compare(UserSessionModel.SearchableFields.ID, Operator.EQ, offlineUserSessionId);
|
||||
return userSessionTx.read(withCriteria(mcb));
|
||||
return txInRealm(realm).read(withCriteria(mcb));
|
||||
}
|
||||
|
||||
return Stream.empty();
|
||||
|
@ -582,13 +593,13 @@ public class MapUserSessionProvider implements UserSessionProvider {
|
|||
.compare(UserSessionModel.SearchableFields.IS_OFFLINE, Operator.EQ, offline);
|
||||
}
|
||||
|
||||
private MapUserSessionEntity getUserSessionById(String id) {
|
||||
private MapUserSessionEntity getUserSessionById(RealmModel realm, String id) {
|
||||
if (id == null) return null;
|
||||
|
||||
MapUserSessionEntity userSessionEntity = transientUserSessions.get(id);
|
||||
|
||||
if (userSessionEntity == null) {
|
||||
MapUserSessionEntity userSession = userSessionTx.read(id);
|
||||
MapUserSessionEntity userSession = txInRealm(realm).read(id);
|
||||
return userSession;
|
||||
}
|
||||
return userSessionEntity;
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.util.Arrays;
|
|||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import org.junit.Test;
|
||||
import static org.hamcrest.Matchers.hasToString;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.keycloak.models.ClientModel.SearchableFields.*;
|
||||
import static org.keycloak.models.map.storage.criteria.DefaultModelCriteria.criteria;
|
||||
|
||||
|
@ -206,4 +207,80 @@ public class DefaultModelCriteriaTest {
|
|||
hasToString("(clientId EQ [4] && id EQ [5])"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetFieldCriteriaSingleArgument() {
|
||||
DefaultModelCriteria<ClientModel> v = criteria();
|
||||
assertThat(
|
||||
v.compare(REALM_ID, Operator.EQ, "aa")
|
||||
.getSingleRestrictionArgument(REALM_ID.getName()),
|
||||
hasToString("aa")
|
||||
);
|
||||
|
||||
assertThat(
|
||||
v.not(v.compare(REALM_ID, Operator.EQ, "aa"))
|
||||
.getSingleRestrictionArgument(REALM_ID.getName()),
|
||||
nullValue()
|
||||
);
|
||||
|
||||
assertThat(
|
||||
v.not(v.not(v.compare(REALM_ID, Operator.EQ, "aa")))
|
||||
.getSingleRestrictionArgument(REALM_ID.getName()),
|
||||
hasToString("aa")
|
||||
);
|
||||
|
||||
assertThat(v.or(
|
||||
v.and(
|
||||
v.compare(CLIENT_ID, Operator.EQ, 4),
|
||||
v.compare(REALM_ID, Operator.EQ, "aa")
|
||||
)
|
||||
).getSingleRestrictionArgument(REALM_ID.getName()),
|
||||
hasToString("aa")
|
||||
);
|
||||
|
||||
assertThat(v.or(
|
||||
v.and(
|
||||
v.compare(CLIENT_ID, Operator.EQ, 4),
|
||||
v.compare(REALM_ID, Operator.EQ, "aa")
|
||||
),
|
||||
v.and(
|
||||
v.compare(CLIENT_ID, Operator.EQ, 123),
|
||||
v.compare(REALM_ID, Operator.EQ, "aa")
|
||||
)
|
||||
).getSingleRestrictionArgument(REALM_ID.getName()),
|
||||
hasToString("aa")
|
||||
);
|
||||
|
||||
assertThat(v.or(
|
||||
v.and(
|
||||
v.compare(CLIENT_ID, Operator.EQ, 4),
|
||||
v.compare(REALM_ID, Operator.EQ, "aa")
|
||||
),
|
||||
v.and(
|
||||
v.compare(CLIENT_ID, Operator.EQ, 4),
|
||||
v.compare(REALM_ID, Operator.EQ, "bb")
|
||||
)
|
||||
).getSingleRestrictionArgument(REALM_ID.getName()),
|
||||
nullValue()
|
||||
);
|
||||
|
||||
assertThat(v.or(
|
||||
v.and(
|
||||
v.compare(CLIENT_ID, Operator.EQ, 4),
|
||||
v.compare(REALM_ID, Operator.EQ, "aa")
|
||||
),
|
||||
v.not(v.compare(ID, Operator.EQ, 5))
|
||||
).getSingleRestrictionArgument(REALM_ID.getName()),
|
||||
nullValue()
|
||||
);
|
||||
|
||||
assertThat(v.and(
|
||||
v.and(
|
||||
v.compare(CLIENT_ID, Operator.EQ, 4),
|
||||
v.compare(REALM_ID, Operator.EQ, "aa")
|
||||
),
|
||||
v.not(v.compare(ID, Operator.EQ, 5))
|
||||
).getSingleRestrictionArgument(REALM_ID.getName()),
|
||||
hasToString("aa")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
7
pom.xml
7
pom.xml
|
@ -123,6 +123,8 @@
|
|||
<org.yaml.snakeyaml.version>1.33</org.yaml.snakeyaml.version>
|
||||
<!-- Override sshd-common to fix CVE-2022-45047 -->
|
||||
<org.apache.sshd.version>2.9.2</org.apache.sshd.version>
|
||||
<org.snakeyaml.snakeyaml-engine.version>2.6</org.snakeyaml.snakeyaml-engine.version>
|
||||
|
||||
<!-- Openshift -->
|
||||
<version.com.openshift.openshift-restclient-java>9.0.5.Final</version.com.openshift.openshift-restclient-java>
|
||||
|
||||
|
@ -307,6 +309,11 @@
|
|||
<artifactId>sshd-common</artifactId>
|
||||
<version>${org.apache.sshd.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.snakeyaml</groupId>
|
||||
<artifactId>snakeyaml-engine</artifactId>
|
||||
<version>${org.snakeyaml.snakeyaml-engine.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.keycloak</groupId>
|
||||
<artifactId>keycloak-dependencies-admin-ui-wrapper</artifactId>
|
||||
|
|
|
@ -338,6 +338,14 @@
|
|||
</properties>
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>file</id>
|
||||
<properties>
|
||||
<keycloak.profile.feature.map_storage>enabled</keycloak.profile.feature.map_storage>
|
||||
<keycloak.model.parameters>Map,FileMapStorage</keycloak.model.parameters>
|
||||
</properties>
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>hot-rod</id>
|
||||
<properties>
|
||||
|
|
|
@ -553,7 +553,7 @@ public abstract class KeycloakModelTest {
|
|||
|
||||
what.accept(session, parameter);
|
||||
|
||||
session.getTransactionManager().rollback();
|
||||
session.getTransactionManager().setRollbackOnly();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -73,6 +73,7 @@ public class AdminEventQueryTest extends KeycloakModelTest {
|
|||
withRealm(realmId, (session, realm) -> {
|
||||
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
|
||||
assertThat(eventStore.createAdminQuery()
|
||||
.realm(realmId)
|
||||
.firstResult(2)
|
||||
.getResultStream()
|
||||
.collect(Collectors.counting()),
|
||||
|
@ -92,6 +93,7 @@ public class AdminEventQueryTest extends KeycloakModelTest {
|
|||
eventStore.onEvent(firstEvent, false);
|
||||
eventStore.onEvent(secondEvent, false);
|
||||
List<AdminEvent> adminEventsAsc = eventStore.createAdminQuery()
|
||||
.realm(realmId)
|
||||
.orderByAscTime()
|
||||
.getResultStream()
|
||||
.collect(Collectors.toList());
|
||||
|
@ -100,6 +102,7 @@ public class AdminEventQueryTest extends KeycloakModelTest {
|
|||
assertThat(adminEventsAsc.get(1).getOperationType(), is(OperationType.DELETE));
|
||||
|
||||
List<AdminEvent> adminEventsDesc = eventStore.createAdminQuery()
|
||||
.realm(realmId)
|
||||
.orderByDescTime()
|
||||
.getResultStream()
|
||||
.collect(Collectors.toList());
|
||||
|
|
|
@ -20,11 +20,14 @@ import org.keycloak.common.ClientConnection;
|
|||
import org.keycloak.events.Event;
|
||||
import org.keycloak.events.EventBuilder;
|
||||
import org.keycloak.events.EventStoreProvider;
|
||||
import org.keycloak.events.EventStoreSpi;
|
||||
import org.keycloak.events.EventType;
|
||||
import org.keycloak.events.admin.AdminEvent;
|
||||
import org.keycloak.models.Constants;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.models.RealmModel;
|
||||
import org.keycloak.models.map.events.MapEventStoreProviderFactory;
|
||||
import org.keycloak.models.map.storage.file.FileMapStorageProviderFactory;
|
||||
import org.keycloak.testsuite.model.KeycloakModelTest;
|
||||
import org.keycloak.testsuite.model.RequireProvider;
|
||||
import java.util.List;
|
||||
|
@ -36,6 +39,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.junit.Assume.assumeFalse;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -60,6 +64,12 @@ public class EventQueryTest extends KeycloakModelTest {
|
|||
|
||||
@Test
|
||||
public void testClear() {
|
||||
// Skip the test if EventProvider == File
|
||||
String evProvider = CONFIG.getConfig().get(EventStoreSpi.NAME + ".provider");
|
||||
String evMapStorageProvider = CONFIG.getConfig().get(EventStoreSpi.NAME + ".map.storage-auth-events.provider");
|
||||
assumeFalse(MapEventStoreProviderFactory.PROVIDER_ID.equals(evProvider) &&
|
||||
(evMapStorageProvider == null || FileMapStorageProviderFactory.PROVIDER_ID.equals(evMapStorageProvider)));
|
||||
|
||||
inRolledBackTransaction(null, (session, t) -> {
|
||||
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
|
||||
eventStore.clear();
|
||||
|
@ -89,6 +99,7 @@ public class EventQueryTest extends KeycloakModelTest {
|
|||
withRealm(realmId, (session, realm) -> {
|
||||
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
|
||||
assertThat(eventStore.createQuery()
|
||||
.realm(realmId)
|
||||
.firstResult(2)
|
||||
.getResultStream()
|
||||
.collect(Collectors.counting()),
|
||||
|
@ -165,6 +176,7 @@ public class EventQueryTest extends KeycloakModelTest {
|
|||
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
|
||||
|
||||
Set<Event> events = eventStore.createQuery()
|
||||
.realm(realmId)
|
||||
.getResultStream().collect(Collectors.toSet());
|
||||
|
||||
assertThat(events, hasSize(1));
|
||||
|
@ -200,8 +212,8 @@ public class EventQueryTest extends KeycloakModelTest {
|
|||
// Check if events were created
|
||||
inComittedTransaction(session -> {
|
||||
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
|
||||
assertThat(eventStore.createQuery().getResultStream().count(), is(1L));
|
||||
assertThat(eventStore.createAdminQuery().getResultStream().count(), is(1L));
|
||||
assertThat(eventStore.createQuery().realm(newRealmId).getResultStream().count(), is(1L));
|
||||
assertThat(eventStore.createAdminQuery().realm(newRealmId).getResultStream().count(), is(1L));
|
||||
});
|
||||
|
||||
// Remove realm
|
||||
|
@ -210,8 +222,8 @@ public class EventQueryTest extends KeycloakModelTest {
|
|||
// Check events were removed
|
||||
inComittedTransaction(session -> {
|
||||
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
|
||||
assertThat(eventStore.createQuery().getResultStream().count(), is(0L));
|
||||
assertThat(eventStore.createAdminQuery().getResultStream().count(), is(0L));
|
||||
assertThat(eventStore.createQuery().realm(newRealmId).getResultStream().count(), is(0L));
|
||||
assertThat(eventStore.createAdminQuery().realm(newRealmId).getResultStream().count(), is(0L));
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,89 @@
|
|||
/*
|
||||
* Copyright 2023 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.keycloak.testsuite.model.parameters;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import java.util.Set;
|
||||
import org.keycloak.authorization.store.StoreFactorySpi;
|
||||
import org.keycloak.events.EventStoreSpi;
|
||||
import org.keycloak.models.DeploymentStateSpi;
|
||||
import org.keycloak.models.SingleUseObjectSpi;
|
||||
import org.keycloak.models.UserLoginFailureSpi;
|
||||
import org.keycloak.models.UserSessionSpi;
|
||||
import org.keycloak.models.map.authSession.MapRootAuthenticationSessionProviderFactory;
|
||||
import org.keycloak.models.map.authorization.MapAuthorizationStoreFactory;
|
||||
import org.keycloak.models.map.client.MapClientProviderFactory;
|
||||
import org.keycloak.models.map.clientscope.MapClientScopeProviderFactory;
|
||||
import org.keycloak.models.map.deploymentState.MapDeploymentStateProviderFactory;
|
||||
import org.keycloak.models.map.events.MapEventStoreProviderFactory;
|
||||
import org.keycloak.models.map.group.MapGroupProviderFactory;
|
||||
import org.keycloak.models.map.keys.MapPublicKeyStorageProviderFactory;
|
||||
import org.keycloak.models.map.loginFailure.MapUserLoginFailureProviderFactory;
|
||||
import org.keycloak.models.map.realm.MapRealmProviderFactory;
|
||||
import org.keycloak.models.map.role.MapRoleProviderFactory;
|
||||
import org.keycloak.models.map.singleUseObject.MapSingleUseObjectProviderFactory;
|
||||
import org.keycloak.models.map.storage.MapStorageSpi;
|
||||
import org.keycloak.models.map.storage.chm.ConcurrentHashMapStorageProviderFactory;
|
||||
import org.keycloak.models.map.storage.file.FileMapStorageProviderFactory;
|
||||
import org.keycloak.models.map.user.MapUserProviderFactory;
|
||||
import org.keycloak.models.map.userSession.MapUserSessionProviderFactory;
|
||||
import org.keycloak.provider.ProviderFactory;
|
||||
import org.keycloak.provider.Spi;
|
||||
import org.keycloak.sessions.AuthenticationSessionSpi;
|
||||
import org.keycloak.testsuite.model.Config;
|
||||
import org.keycloak.testsuite.model.KeycloakModelParameters;
|
||||
|
||||
public class FileMapStorage extends KeycloakModelParameters {
|
||||
|
||||
static final Set<Class<? extends Spi>> ALLOWED_SPIS = ImmutableSet.<Class<? extends Spi>>builder()
|
||||
.build();
|
||||
|
||||
static final Set<Class<? extends ProviderFactory>> ALLOWED_FACTORIES = ImmutableSet.<Class<? extends ProviderFactory>>builder()
|
||||
.add(FileMapStorageProviderFactory.class)
|
||||
.add(ConcurrentHashMapStorageProviderFactory.class)
|
||||
.build();
|
||||
|
||||
public FileMapStorage() {
|
||||
super(ALLOWED_SPIS, ALLOWED_FACTORIES);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateConfig(Config cf) {
|
||||
cf.spi(MapStorageSpi.NAME)
|
||||
.provider(FileMapStorageProviderFactory.PROVIDER_ID)
|
||||
.config("dir", "${project.build.directory:target/file}")
|
||||
.provider(ConcurrentHashMapStorageProviderFactory.PROVIDER_ID)
|
||||
.config("dir", "${project.build.directory:target/chm}")
|
||||
|
||||
.spi(AuthenticationSessionSpi.PROVIDER_ID).provider(MapRootAuthenticationSessionProviderFactory.PROVIDER_ID) .config(STORAGE_CONFIG, FileMapStorageProviderFactory.PROVIDER_ID)
|
||||
.spi("client").provider(MapClientProviderFactory.PROVIDER_ID) .config(STORAGE_CONFIG, FileMapStorageProviderFactory.PROVIDER_ID)
|
||||
.spi("clientScope").provider(MapClientScopeProviderFactory.PROVIDER_ID) .config(STORAGE_CONFIG, FileMapStorageProviderFactory.PROVIDER_ID)
|
||||
.spi("group").provider(MapGroupProviderFactory.PROVIDER_ID) .config(STORAGE_CONFIG, FileMapStorageProviderFactory.PROVIDER_ID)
|
||||
.spi("realm").provider(MapRealmProviderFactory.PROVIDER_ID) .config(STORAGE_CONFIG, FileMapStorageProviderFactory.PROVIDER_ID)
|
||||
.spi("role").provider(MapRoleProviderFactory.PROVIDER_ID) .config(STORAGE_CONFIG, FileMapStorageProviderFactory.PROVIDER_ID)
|
||||
.spi(DeploymentStateSpi.NAME).provider(MapDeploymentStateProviderFactory.PROVIDER_ID) .config(STORAGE_CONFIG, ConcurrentHashMapStorageProviderFactory.PROVIDER_ID)
|
||||
.spi(StoreFactorySpi.NAME).provider(MapAuthorizationStoreFactory.PROVIDER_ID) .config(STORAGE_CONFIG, FileMapStorageProviderFactory.PROVIDER_ID)
|
||||
.spi("user").provider(MapUserProviderFactory.PROVIDER_ID) .config(STORAGE_CONFIG, FileMapStorageProviderFactory.PROVIDER_ID)
|
||||
.spi(UserLoginFailureSpi.NAME).provider(MapUserLoginFailureProviderFactory.PROVIDER_ID) .config(STORAGE_CONFIG, FileMapStorageProviderFactory.PROVIDER_ID)
|
||||
.spi(SingleUseObjectSpi.NAME).provider(MapSingleUseObjectProviderFactory.PROVIDER_ID) .config(STORAGE_CONFIG, ConcurrentHashMapStorageProviderFactory.PROVIDER_ID)
|
||||
.spi("publicKeyStorage").provider(MapPublicKeyStorageProviderFactory.PROVIDER_ID) .config(STORAGE_CONFIG, ConcurrentHashMapStorageProviderFactory.PROVIDER_ID)
|
||||
.spi(UserSessionSpi.NAME).provider(MapUserSessionProviderFactory.PROVIDER_ID) .config(STORAGE_CONFIG, FileMapStorageProviderFactory.PROVIDER_ID)
|
||||
.spi(EventStoreSpi.NAME).provider(MapEventStoreProviderFactory.PROVIDER_ID) .config("storage-admin-events.provider", FileMapStorageProviderFactory.PROVIDER_ID)
|
||||
.config("storage-auth-events.provider", FileMapStorageProviderFactory.PROVIDER_ID);
|
||||
}
|
||||
|
||||
}
|
|
@ -26,14 +26,19 @@ import org.keycloak.models.KeycloakTransaction;
|
|||
import org.keycloak.models.RealmModel;
|
||||
import org.keycloak.models.UserSessionModel;
|
||||
import org.keycloak.models.UserSessionProvider;
|
||||
import org.keycloak.models.UserSessionSpi;
|
||||
import org.keycloak.models.map.storage.ModelEntityUtil;
|
||||
import org.keycloak.models.map.storage.file.FileMapStorageProviderFactory;
|
||||
import org.keycloak.models.map.storage.hotRod.HotRodMapStorageProviderFactory;
|
||||
import org.keycloak.models.map.storage.hotRod.connections.HotRodConnectionProvider;
|
||||
import org.keycloak.models.map.userSession.MapUserSessionProviderFactory;
|
||||
import org.keycloak.protocol.oidc.OIDCLoginProtocol;
|
||||
import org.keycloak.testsuite.model.KeycloakModelTest;
|
||||
import org.keycloak.testsuite.model.RequireProvider;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.locks.Lock;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
import java.util.function.Consumer;
|
||||
|
@ -43,6 +48,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
|
|||
import static org.hamcrest.Matchers.aMapWithSize;
|
||||
import static org.hamcrest.Matchers.anEmptyMap;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.junit.Assume.assumeFalse;
|
||||
import static org.keycloak.utils.LockObjectsForModification.lockUserSessionsForModification;
|
||||
|
||||
|
||||
|
@ -73,19 +79,32 @@ public class UserSessionConcurrencyTest extends KeycloakModelTest {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cleanEnvironment(KeycloakSession s) {
|
||||
s.realms().removeRealm(realmId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isUseSameKeycloakSessionFactoryForAllThreads() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConcurrentNotesChange() {
|
||||
public void testConcurrentNotesChange() throws InterruptedException {
|
||||
// Defer this one until file locking is available
|
||||
// Skip the test if EventProvider == File
|
||||
String evProvider = CONFIG.getConfig().get(UserSessionSpi.NAME + ".provider");
|
||||
String evMapStorageProvider = CONFIG.getConfig().get(UserSessionSpi.NAME + ".map.storage.provider");
|
||||
assumeFalse(MapUserSessionProviderFactory.PROVIDER_ID.equals(evProvider) &&
|
||||
(evMapStorageProvider == null || FileMapStorageProviderFactory.PROVIDER_ID.equals(evMapStorageProvider)));
|
||||
|
||||
// Create user session
|
||||
String uId = withRealm(this.realmId, (session, realm) -> session.sessions().createUserSession(realm, session.users().getUserByUsername(realm, "user1"), "user1", "127.0.0.1", "form", true, null, null)).getId();
|
||||
|
||||
// Create/Update client session's notes concurrently
|
||||
CountDownLatch cdl = new CountDownLatch(200 * CLIENTS_COUNT);
|
||||
IntStream.range(0, 200 * CLIENTS_COUNT).parallel()
|
||||
.forEach(i -> inComittedTransaction(i, (session, n) -> {
|
||||
.forEach(i -> inComittedTransaction(i, (session, n) -> { try {
|
||||
RealmModel realm = session.realms().getRealm(realmId);
|
||||
ClientModel client = realm.getClientByClientId("client" + (n % CLIENTS_COUNT));
|
||||
|
||||
|
@ -114,8 +133,11 @@ public class UserSessionConcurrencyTest extends KeycloakModelTest {
|
|||
}
|
||||
|
||||
return null;
|
||||
}));
|
||||
} finally {
|
||||
cdl.countDown();
|
||||
}}));
|
||||
|
||||
cdl.await(10, TimeUnit.SECONDS);
|
||||
withRealm(this.realmId, (session, realm) -> {
|
||||
UserSessionModel uSession = session.sessions().getUserSession(realm, uId);
|
||||
assertThat(uSession.getAuthenticatedClientSessions(), aMapWithSize(CLIENTS_COUNT));
|
||||
|
|
|
@ -26,7 +26,10 @@ import org.keycloak.models.Constants;
|
|||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.models.RealmModel;
|
||||
import org.keycloak.models.SingleUseObjectProvider;
|
||||
import org.keycloak.models.SingleUseObjectProviderFactory;
|
||||
import org.keycloak.models.SingleUseObjectSpi;
|
||||
import org.keycloak.models.UserModel;
|
||||
import org.keycloak.models.map.singleUseObject.MapSingleUseObjectProviderFactory;
|
||||
import org.keycloak.models.map.storage.chm.ConcurrentHashMapStorageProviderFactory;
|
||||
import org.keycloak.models.map.userSession.MapUserSessionProviderFactory;
|
||||
import org.keycloak.testsuite.model.KeycloakModelTest;
|
||||
|
@ -161,11 +164,11 @@ public class SingleUseObjectModelTest extends KeycloakModelTest {
|
|||
|
||||
@Test
|
||||
public void testCluster() throws InterruptedException {
|
||||
// Skip the test if MapUserSessionProvider == CHM
|
||||
String usProvider = CONFIG.getConfig().get("userSessions.provider");
|
||||
String usMapStorageProvider = CONFIG.getConfig().get("userSessions.map.storage.provider");
|
||||
assumeFalse(MapUserSessionProviderFactory.PROVIDER_ID.equals(usProvider) &&
|
||||
(usMapStorageProvider == null || ConcurrentHashMapStorageProviderFactory.PROVIDER_ID.equals(usMapStorageProvider)));
|
||||
// Skip the test if SingleUseObjectProvider == CHM
|
||||
String suProvider = CONFIG.getConfig().get(SingleUseObjectSpi.NAME + ".provider");
|
||||
String suMapStorageProvider = CONFIG.getConfig().get(SingleUseObjectSpi.NAME + ".map.storage.provider");
|
||||
assumeFalse(MapSingleUseObjectProviderFactory.PROVIDER_ID.equals(suProvider) &&
|
||||
(suMapStorageProvider == null || ConcurrentHashMapStorageProviderFactory.PROVIDER_ID.equals(suMapStorageProvider)));
|
||||
|
||||
AtomicInteger index = new AtomicInteger();
|
||||
CountDownLatch afterFirstNodeLatch = new CountDownLatch(1);
|
||||
|
|
|
@ -130,6 +130,17 @@ public class Dict<E> extends UpdatableEntity.Impl implements EntityFieldDelegate
|
|||
return null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public <T, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<E>> & org.keycloak.models.map.common.EntityField<E>> void collectionAdd(EF field, T value) {
|
||||
throw new UnsupportedOperationException("Not supported yet.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T, EF extends java.lang.Enum<? extends org.keycloak.models.map.common.EntityField<E>> & org.keycloak.models.map.common.EntityField<E>> Object collectionRemove(EF field, T value) {
|
||||
throw new UnsupportedOperationException("Not supported yet.");
|
||||
}
|
||||
|
||||
protected boolean isKeyAllowed(String key) {
|
||||
return allowedKeys.contains(key);
|
||||
}
|
||||
|
|
|
@ -17,10 +17,10 @@
|
|||
},
|
||||
"map": {
|
||||
"storage-admin-events": {
|
||||
"provider": "${keycloak.adminEventsStore.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.adminEventsStore.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
},
|
||||
"storage-auth-events": {
|
||||
"provider": "${keycloak.authEventsStore.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.authEventsStore.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -40,7 +40,7 @@
|
|||
"provider": "${keycloak.realm.provider:jpa}",
|
||||
"map": {
|
||||
"storage": {
|
||||
"provider": "${keycloak.realm.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.realm.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -49,7 +49,7 @@
|
|||
"provider": "${keycloak.client.provider:jpa}",
|
||||
"map": {
|
||||
"storage": {
|
||||
"provider": "${keycloak.client.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.client.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -58,7 +58,7 @@
|
|||
"provider": "${keycloak.clientScope.provider:jpa}",
|
||||
"map": {
|
||||
"storage": {
|
||||
"provider": "${keycloak.clientScope.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.clientScope.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -67,7 +67,7 @@
|
|||
"provider": "${keycloak.group.provider:jpa}",
|
||||
"map": {
|
||||
"storage": {
|
||||
"provider": "${keycloak.group.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.group.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -76,7 +76,7 @@
|
|||
"provider": "${keycloak.role.provider:jpa}",
|
||||
"map": {
|
||||
"storage": {
|
||||
"provider": "${keycloak.role.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.role.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -85,7 +85,7 @@
|
|||
"provider": "${keycloak.authSession.provider:infinispan}",
|
||||
"map": {
|
||||
"storage": {
|
||||
"provider": "${keycloak.authSession.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.authSession.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
}
|
||||
},
|
||||
"infinispan": {
|
||||
|
@ -97,7 +97,7 @@
|
|||
"provider": "${keycloak.userSession.provider:infinispan}",
|
||||
"map": {
|
||||
"storage": {
|
||||
"provider": "${keycloak.userSession.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.userSession.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -106,7 +106,7 @@
|
|||
"provider": "${keycloak.loginFailure.provider:infinispan}",
|
||||
"map": {
|
||||
"storage": {
|
||||
"provider": "${keycloak.loginFailure.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.loginFailure.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -115,7 +115,7 @@
|
|||
"provider": "${keycloak.singleUseObject.provider:infinispan}",
|
||||
"map": {
|
||||
"storage": {
|
||||
"provider": "${keycloak.singleUseObject.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.singleUseObject.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -124,7 +124,7 @@
|
|||
"provider": "${keycloak.publicKeyStorage.provider:infinispan}",
|
||||
"map": {
|
||||
"storage": {
|
||||
"provider": "${keycloak.publicKeyStorage.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.publicKeyStorage.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -165,6 +165,9 @@
|
|||
"mode": "LDAP_ONLY",
|
||||
"use.realm.roles.mapping": "true",
|
||||
"connectionPooling": "true"
|
||||
},
|
||||
"file": {
|
||||
"dir": "${keycloak.group.map.storage.provider.directory:target/file}"
|
||||
}
|
||||
},
|
||||
|
||||
|
@ -172,7 +175,7 @@
|
|||
"provider": "${keycloak.user.provider:jpa}",
|
||||
"map": {
|
||||
"storage": {
|
||||
"provider": "${keycloak.user.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.user.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -189,7 +192,7 @@
|
|||
"provider": "${keycloak.authorization.provider:jpa}",
|
||||
"map": {
|
||||
"storage": {
|
||||
"provider": "${keycloak.authorization.map.storage.provider:concurrenthashmap}"
|
||||
"provider": "${keycloak.authorization.map.storage.provider,keycloak.mapStorage.provider.default:concurrenthashmap}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
Loading…
Reference in a new issue