KEYCLOAK-16584 Rename map to CRUD operations
* rename putIfAbsent() to create(), get() to read(), put() to update(), remove() to delete() * move ConcurrentHashMapStorage to org.keycloak.models.map.storage.chm package * Add javadoc to MapStorage
This commit is contained in:
parent
8cbfeef5b5
commit
6c07679446
15 changed files with 575 additions and 188 deletions
|
@ -105,7 +105,7 @@ public class MapRootAuthenticationSessionAdapter extends AbstractRootAuthenticat
|
||||||
if (entity.getAuthenticationSessions().isEmpty()) {
|
if (entity.getAuthenticationSessions().isEmpty()) {
|
||||||
MapRootAuthenticationSessionProvider authenticationSessionProvider =
|
MapRootAuthenticationSessionProvider authenticationSessionProvider =
|
||||||
(MapRootAuthenticationSessionProvider) session.authenticationSessions();
|
(MapRootAuthenticationSessionProvider) session.authenticationSessions();
|
||||||
authenticationSessionProvider.tx.remove(entity.getId());
|
authenticationSessionProvider.tx.delete(entity.getId());
|
||||||
} else {
|
} else {
|
||||||
entity.setTimestamp(Time.currentTime());
|
entity.setTimestamp(Time.currentTime());
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,8 +69,8 @@ public class MapRootAuthenticationSessionProvider implements AuthenticationSessi
|
||||||
}
|
}
|
||||||
|
|
||||||
private MapRootAuthenticationSessionEntity registerEntityForChanges(MapRootAuthenticationSessionEntity origEntity) {
|
private MapRootAuthenticationSessionEntity registerEntityForChanges(MapRootAuthenticationSessionEntity origEntity) {
|
||||||
MapRootAuthenticationSessionEntity res = tx.get(origEntity.getId(), id -> Serialization.from(origEntity));
|
MapRootAuthenticationSessionEntity res = tx.read(origEntity.getId(), id -> Serialization.from(origEntity));
|
||||||
tx.putIfChanged(origEntity.getId(), res, MapRootAuthenticationSessionEntity::isUpdated);
|
tx.updateIfChanged(origEntity.getId(), res, MapRootAuthenticationSessionEntity::isUpdated);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,11 +100,11 @@ public class MapRootAuthenticationSessionProvider implements AuthenticationSessi
|
||||||
entity.setRealmId(realm.getId());
|
entity.setRealmId(realm.getId());
|
||||||
entity.setTimestamp(Time.currentTime());
|
entity.setTimestamp(Time.currentTime());
|
||||||
|
|
||||||
if (tx.get(entity.getId(), sessionStore::get) != null) {
|
if (tx.read(entity.getId(), sessionStore::read) != null) {
|
||||||
throw new ModelDuplicateException("Root authentication session exists: " + entity.getId());
|
throw new ModelDuplicateException("Root authentication session exists: " + entity.getId());
|
||||||
}
|
}
|
||||||
|
|
||||||
tx.putIfAbsent(entity.getId(), entity);
|
tx.create(entity.getId(), entity);
|
||||||
|
|
||||||
return entityToAdapterFunc(realm).apply(entity);
|
return entityToAdapterFunc(realm).apply(entity);
|
||||||
}
|
}
|
||||||
|
@ -118,7 +118,7 @@ public class MapRootAuthenticationSessionProvider implements AuthenticationSessi
|
||||||
|
|
||||||
LOG.tracef("getRootAuthenticationSession(%s, %s)%s", realm.getName(), authenticationSessionId, getShortStackTrace());
|
LOG.tracef("getRootAuthenticationSession(%s, %s)%s", realm.getName(), authenticationSessionId, getShortStackTrace());
|
||||||
|
|
||||||
MapRootAuthenticationSessionEntity entity = tx.get(UUID.fromString(authenticationSessionId), sessionStore::get);
|
MapRootAuthenticationSessionEntity entity = tx.read(UUID.fromString(authenticationSessionId), sessionStore::read);
|
||||||
return (entity == null || !entityRealmFilter(realm.getId()).test(entity))
|
return (entity == null || !entityRealmFilter(realm.getId()).test(entity))
|
||||||
? null
|
? null
|
||||||
: entityToAdapterFunc(realm).apply(entity);
|
: entityToAdapterFunc(realm).apply(entity);
|
||||||
|
@ -127,7 +127,7 @@ public class MapRootAuthenticationSessionProvider implements AuthenticationSessi
|
||||||
@Override
|
@Override
|
||||||
public void removeRootAuthenticationSession(RealmModel realm, RootAuthenticationSessionModel authenticationSession) {
|
public void removeRootAuthenticationSession(RealmModel realm, RootAuthenticationSessionModel authenticationSession) {
|
||||||
Objects.requireNonNull(authenticationSession, "The provided root authentication session can't be null!");
|
Objects.requireNonNull(authenticationSession, "The provided root authentication session can't be null!");
|
||||||
tx.remove(UUID.fromString(authenticationSession.getId()));
|
tx.delete(UUID.fromString(authenticationSession.getId()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -145,7 +145,7 @@ public class MapRootAuthenticationSessionProvider implements AuthenticationSessi
|
||||||
|
|
||||||
LOG.debugf("Removed %d expired authentication sessions for realm '%s'", sessionIds.size(), realm.getName());
|
LOG.debugf("Removed %d expired authentication sessions for realm '%s'", sessionIds.size(), realm.getName());
|
||||||
|
|
||||||
sessionIds.forEach(tx::remove);
|
sessionIds.forEach(tx::delete);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -155,7 +155,7 @@ public class MapRootAuthenticationSessionProvider implements AuthenticationSessi
|
||||||
.filter(entity -> entityRealmFilter(realm.getId()).test(entity.getValue()))
|
.filter(entity -> entityRealmFilter(realm.getId()).test(entity.getValue()))
|
||||||
.map(Map.Entry::getKey)
|
.map(Map.Entry::getKey)
|
||||||
.collect(Collectors.toList())
|
.collect(Collectors.toList())
|
||||||
.forEach(tx::remove);
|
.forEach(tx::delete);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -87,8 +87,8 @@ public class MapClientProvider implements ClientProvider {
|
||||||
}
|
}
|
||||||
|
|
||||||
private MapClientEntity registerEntityForChanges(MapClientEntity origEntity) {
|
private MapClientEntity registerEntityForChanges(MapClientEntity origEntity) {
|
||||||
final MapClientEntity res = tx.get(origEntity.getId(), id -> Serialization.from(origEntity));
|
final MapClientEntity res = tx.read(origEntity.getId(), id -> Serialization.from(origEntity));
|
||||||
tx.putIfChanged(origEntity.getId(), res, MapClientEntity::isUpdated);
|
tx.updateIfChanged(origEntity.getId(), res, MapClientEntity::isUpdated);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -165,10 +165,10 @@ public class MapClientProvider implements ClientProvider {
|
||||||
entity.setClientId(clientId);
|
entity.setClientId(clientId);
|
||||||
entity.setEnabled(true);
|
entity.setEnabled(true);
|
||||||
entity.setStandardFlowEnabled(true);
|
entity.setStandardFlowEnabled(true);
|
||||||
if (tx.get(entity.getId(), clientStore::get) != null) {
|
if (tx.read(entity.getId(), clientStore::read) != null) {
|
||||||
throw new ModelDuplicateException("Client exists: " + id);
|
throw new ModelDuplicateException("Client exists: " + id);
|
||||||
}
|
}
|
||||||
tx.putIfAbsent(entity.getId(), entity);
|
tx.create(entity.getId(), entity);
|
||||||
final ClientModel resource = entityToAdapterFunc(realm).apply(entity);
|
final ClientModel resource = entityToAdapterFunc(realm).apply(entity);
|
||||||
|
|
||||||
// TODO: Sending an event should be extracted to store layer
|
// TODO: Sending an event should be extracted to store layer
|
||||||
|
@ -221,7 +221,7 @@ public class MapClientProvider implements ClientProvider {
|
||||||
});
|
});
|
||||||
// TODO: ^^^^^^^ Up to here
|
// TODO: ^^^^^^^ Up to here
|
||||||
|
|
||||||
tx.remove(UUID.fromString(id));
|
tx.delete(UUID.fromString(id));
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -241,7 +241,7 @@ public class MapClientProvider implements ClientProvider {
|
||||||
|
|
||||||
LOG.tracef("getClientById(%s, %s)%s", realm, id, getShortStackTrace());
|
LOG.tracef("getClientById(%s, %s)%s", realm, id, getShortStackTrace());
|
||||||
|
|
||||||
MapClientEntity entity = tx.get(UUID.fromString(id), clientStore::get);
|
MapClientEntity entity = tx.read(UUID.fromString(id), clientStore::read);
|
||||||
return (entity == null || ! entityRealmFilter(realm).test(entity))
|
return (entity == null || ! entityRealmFilter(realm).test(entity))
|
||||||
? null
|
? null
|
||||||
: entityToAdapterFunc(realm).apply(entity);
|
: entityToAdapterFunc(realm).apply(entity);
|
||||||
|
|
|
@ -21,8 +21,11 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
import com.fasterxml.jackson.annotation.PropertyAccessor;
|
import com.fasterxml.jackson.annotation.PropertyAccessor;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectReader;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectWriter;
|
||||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -31,6 +34,8 @@ import java.io.IOException;
|
||||||
public class Serialization {
|
public class Serialization {
|
||||||
|
|
||||||
public static final ObjectMapper MAPPER = new ObjectMapper();
|
public static final ObjectMapper MAPPER = new ObjectMapper();
|
||||||
|
public static final ConcurrentHashMap<Class<?>, ObjectReader> READERS = new ConcurrentHashMap<>();
|
||||||
|
public static final ConcurrentHashMap<Class<?>, ObjectWriter> WRITERS = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
abstract class IgnoreUpdatedMixIn { @JsonIgnore public abstract boolean isUpdated(); }
|
abstract class IgnoreUpdatedMixIn { @JsonIgnore public abstract boolean isUpdated(); }
|
||||||
|
|
||||||
|
@ -49,10 +54,14 @@ public class Serialization {
|
||||||
if (orig == null) {
|
if (orig == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
final Class<T> origClass = (Class<T>) orig.getClass();
|
||||||
|
|
||||||
|
// Naive solution but will do.
|
||||||
try {
|
try {
|
||||||
// Naive solution but will do.
|
ObjectReader reader = READERS.computeIfAbsent(origClass, MAPPER::readerFor);
|
||||||
@SuppressWarnings("unchecked")
|
ObjectWriter writer = WRITERS.computeIfAbsent(origClass, MAPPER::writerFor);
|
||||||
final T res = MAPPER.readValue(MAPPER.writeValueAsBytes(orig), (Class<T>) orig.getClass());
|
final T res = reader.readValue(writer.writeValueAsBytes(orig));
|
||||||
return res;
|
return res;
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
throw new IllegalStateException(ex);
|
throw new IllegalStateException(ex);
|
||||||
|
|
|
@ -55,7 +55,7 @@ public class MapGroupProvider implements GroupProvider {
|
||||||
|
|
||||||
private MapGroupEntity registerEntityForChanges(MapGroupEntity origEntity) {
|
private MapGroupEntity registerEntityForChanges(MapGroupEntity origEntity) {
|
||||||
final MapGroupEntity res = Serialization.from(origEntity);
|
final MapGroupEntity res = Serialization.from(origEntity);
|
||||||
tx.putIfChanged(origEntity.getId(), res, MapGroupEntity::isUpdated);
|
tx.updateIfChanged(origEntity.getId(), res, MapGroupEntity::isUpdated);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -88,7 +88,7 @@ public class MapGroupProvider implements GroupProvider {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
MapGroupEntity entity = tx.get(uid, groupStore::get);
|
MapGroupEntity entity = tx.read(uid, groupStore::read);
|
||||||
return (entity == null || ! entityRealmFilter(realm).test(entity))
|
return (entity == null || ! entityRealmFilter(realm).test(entity))
|
||||||
? null
|
? null
|
||||||
: entityToAdapterFunc(realm).apply(entity);
|
: entityToAdapterFunc(realm).apply(entity);
|
||||||
|
@ -194,10 +194,10 @@ public class MapGroupProvider implements GroupProvider {
|
||||||
MapGroupEntity entity = new MapGroupEntity(entityId, realm.getId());
|
MapGroupEntity entity = new MapGroupEntity(entityId, realm.getId());
|
||||||
entity.setName(name);
|
entity.setName(name);
|
||||||
entity.setParentId(toParent == null ? null : toParent.getId());
|
entity.setParentId(toParent == null ? null : toParent.getId());
|
||||||
if (tx.get(entity.getId(), groupStore::get) != null) {
|
if (tx.read(entity.getId(), groupStore::read) != null) {
|
||||||
throw new ModelDuplicateException("Group exists: " + entityId);
|
throw new ModelDuplicateException("Group exists: " + entityId);
|
||||||
}
|
}
|
||||||
tx.putIfAbsent(entity.getId(), entity);
|
tx.create(entity.getId(), entity);
|
||||||
|
|
||||||
return entityToAdapterFunc(realm).apply(entity);
|
return entityToAdapterFunc(realm).apply(entity);
|
||||||
}
|
}
|
||||||
|
@ -233,7 +233,7 @@ public class MapGroupProvider implements GroupProvider {
|
||||||
|
|
||||||
// TODO: ^^^^^^^ Up to here
|
// TODO: ^^^^^^^ Up to here
|
||||||
|
|
||||||
tx.remove(UUID.fromString(group.getId()));
|
tx.delete(UUID.fromString(group.getId()));
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -77,7 +77,7 @@ public class MapRoleProvider implements RoleProvider {
|
||||||
|
|
||||||
private MapRoleEntity registerEntityForChanges(MapRoleEntity origEntity) {
|
private MapRoleEntity registerEntityForChanges(MapRoleEntity origEntity) {
|
||||||
final MapRoleEntity res = Serialization.from(origEntity);
|
final MapRoleEntity res = Serialization.from(origEntity);
|
||||||
tx.putIfChanged(origEntity.getId(), res, MapRoleEntity::isUpdated);
|
tx.updateIfChanged(origEntity.getId(), res, MapRoleEntity::isUpdated);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -119,10 +119,10 @@ public class MapRoleProvider implements RoleProvider {
|
||||||
MapRoleEntity entity = new MapRoleEntity(entityId, realm.getId());
|
MapRoleEntity entity = new MapRoleEntity(entityId, realm.getId());
|
||||||
entity.setName(name);
|
entity.setName(name);
|
||||||
entity.setRealmId(realm.getId());
|
entity.setRealmId(realm.getId());
|
||||||
if (tx.get(entity.getId(), roleStore::get) != null) {
|
if (tx.read(entity.getId(), roleStore::read) != null) {
|
||||||
throw new ModelDuplicateException("Role exists: " + id);
|
throw new ModelDuplicateException("Role exists: " + id);
|
||||||
}
|
}
|
||||||
tx.putIfAbsent(entity.getId(), entity);
|
tx.create(entity.getId(), entity);
|
||||||
return entityToAdapterFunc(realm).apply(entity);
|
return entityToAdapterFunc(realm).apply(entity);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -157,10 +157,10 @@ public class MapRoleProvider implements RoleProvider {
|
||||||
entity.setName(name);
|
entity.setName(name);
|
||||||
entity.setClientRole(true);
|
entity.setClientRole(true);
|
||||||
entity.setClientId(client.getId());
|
entity.setClientId(client.getId());
|
||||||
if (tx.get(entity.getId(), roleStore::get) != null) {
|
if (tx.read(entity.getId(), roleStore::read) != null) {
|
||||||
throw new ModelDuplicateException("Role exists: " + id);
|
throw new ModelDuplicateException("Role exists: " + id);
|
||||||
}
|
}
|
||||||
tx.putIfAbsent(entity.getId(), entity);
|
tx.create(entity.getId(), entity);
|
||||||
return entityToAdapterFunc(client.getRealm()).apply(entity);
|
return entityToAdapterFunc(client.getRealm()).apply(entity);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -233,7 +233,7 @@ public class MapRoleProvider implements RoleProvider {
|
||||||
});
|
});
|
||||||
// TODO: ^^^^^^^ Up to here
|
// TODO: ^^^^^^^ Up to here
|
||||||
|
|
||||||
tx.remove(UUID.fromString(role.getId()));
|
tx.delete(UUID.fromString(role.getId()));
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -295,7 +295,7 @@ public class MapRoleProvider implements RoleProvider {
|
||||||
|
|
||||||
LOG.tracef("getRoleById(%s, %s)%s", realm.getName(), id, getShortStackTrace());
|
LOG.tracef("getRoleById(%s, %s)%s", realm.getName(), id, getShortStackTrace());
|
||||||
|
|
||||||
MapRoleEntity entity = tx.get(UUID.fromString(id), roleStore::get);
|
MapRoleEntity entity = tx.read(UUID.fromString(id), roleStore::read);
|
||||||
return (entity == null || ! entityRealmFilter(realm).test(entity))
|
return (entity == null || ! entityRealmFilter(realm).test(entity))
|
||||||
? null
|
? null
|
||||||
: entityToAdapterFunc(realm).apply(entity);
|
: entityToAdapterFunc(realm).apply(entity);
|
||||||
|
|
|
@ -1,132 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright 2020 Red Hat, Inc. and/or its affiliates
|
|
||||||
* and other contributors as indicated by the @author tags.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.keycloak.models.map.storage;
|
|
||||||
|
|
||||||
import org.keycloak.Config.Scope;
|
|
||||||
import org.keycloak.models.KeycloakSession;
|
|
||||||
import org.keycloak.models.KeycloakSessionFactory;
|
|
||||||
import org.keycloak.models.map.common.AbstractEntity;
|
|
||||||
import org.keycloak.models.map.common.Serialization;
|
|
||||||
import com.fasterxml.jackson.databind.JavaType;
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.nio.file.Files;
|
|
||||||
import java.util.EnumSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
|
||||||
import java.util.logging.Level;
|
|
||||||
import org.jboss.logging.Logger;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @author hmlnarik
|
|
||||||
*/
|
|
||||||
public class ConcurrentHashMapStorageProvider implements MapStorageProvider {
|
|
||||||
|
|
||||||
private static class ConcurrentHashMapStorage<K, V> extends ConcurrentHashMap<K, V> implements MapStorage<K, V> {
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final String PROVIDER_ID = "concurrenthashmap";
|
|
||||||
|
|
||||||
private static final Logger LOG = Logger.getLogger(ConcurrentHashMapStorageProvider.class);
|
|
||||||
|
|
||||||
private final ConcurrentHashMap<String, ConcurrentHashMap<?,?>> storages = new ConcurrentHashMap<>();
|
|
||||||
|
|
||||||
private File storageDirectory;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public MapStorageProvider create(KeycloakSession session) {
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void init(Scope config) {
|
|
||||||
File f = new File(config.get("dir"));
|
|
||||||
try {
|
|
||||||
this.storageDirectory = f.exists()
|
|
||||||
? f
|
|
||||||
: Files.createTempDirectory("storage-map-chm-").toFile();
|
|
||||||
} catch (IOException ex) {
|
|
||||||
this.storageDirectory = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void postInit(KeycloakSessionFactory factory) {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
storages.forEach(this::storeMap);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void storeMap(String fileName, ConcurrentHashMap<?, ?> store) {
|
|
||||||
if (fileName != null) {
|
|
||||||
File f = getFile(fileName);
|
|
||||||
try {
|
|
||||||
if (storageDirectory != null && storageDirectory.exists()) {
|
|
||||||
LOG.debugf("Storing contents to %s", f.getCanonicalPath());
|
|
||||||
Serialization.MAPPER.writeValue(f, store.values());
|
|
||||||
} else {
|
|
||||||
LOG.debugf("Not storing contents of %s because directory %s does not exist", fileName, this.storageDirectory);
|
|
||||||
}
|
|
||||||
} catch (IOException ex) {
|
|
||||||
throw new RuntimeException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private <K, V extends AbstractEntity<K>> ConcurrentHashMapStorage<?, V> loadMap(String fileName, Class<V> valueType, EnumSet<Flag> flags) {
|
|
||||||
ConcurrentHashMapStorage<K, V> store = new ConcurrentHashMapStorage<>();
|
|
||||||
|
|
||||||
if (! flags.contains(Flag.INITIALIZE_EMPTY)) {
|
|
||||||
final File f = getFile(fileName);
|
|
||||||
if (f != null && f.exists()) {
|
|
||||||
try {
|
|
||||||
LOG.debugf("Restoring contents from %s", f.getCanonicalPath());
|
|
||||||
JavaType type = Serialization.MAPPER.getTypeFactory().constructCollectionType(List.class, valueType);
|
|
||||||
|
|
||||||
List<V> values = Serialization.MAPPER.readValue(f, type);
|
|
||||||
values.forEach((V mce) -> store.put(mce.getId(), mce));
|
|
||||||
} catch (IOException ex) {
|
|
||||||
throw new RuntimeException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return store;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getId() {
|
|
||||||
return PROVIDER_ID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public <K, V extends AbstractEntity<K>> MapStorage<K, V> getStorage(String name, Class<K> keyType, Class<V> valueType, Flag... flags) {
|
|
||||||
EnumSet<Flag> f = flags == null || flags.length == 0 ? EnumSet.noneOf(Flag.class) : EnumSet.of(flags[0], flags);
|
|
||||||
return (MapStorage<K, V>) storages.computeIfAbsent(name, n -> loadMap(name, valueType, f));
|
|
||||||
}
|
|
||||||
|
|
||||||
private File getFile(String fileName) {
|
|
||||||
return storageDirectory == null
|
|
||||||
? null
|
|
||||||
: new File(storageDirectory, "map-" + fileName + ".json");
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -35,7 +35,7 @@ public class MapKeycloakTransaction<K, V> implements KeycloakTransaction {
|
||||||
@Override
|
@Override
|
||||||
protected <K, V> MapTaskWithValue<K, V> taskFor(K key, V value) {
|
protected <K, V> MapTaskWithValue<K, V> taskFor(K key, V value) {
|
||||||
return new MapTaskWithValue<K, V>(value) {
|
return new MapTaskWithValue<K, V>(value) {
|
||||||
@Override public void execute(MapStorage<K, V> map) { map.putIfAbsent(key, getValue()); }
|
@Override public void execute(MapStorage<K, V> map) { map.create(key, getValue()); }
|
||||||
@Override public MapOperation getOperation() { return CREATE; }
|
@Override public MapOperation getOperation() { return CREATE; }
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -44,7 +44,7 @@ public class MapKeycloakTransaction<K, V> implements KeycloakTransaction {
|
||||||
@Override
|
@Override
|
||||||
protected <K, V> MapTaskWithValue<K, V> taskFor(K key, V value) {
|
protected <K, V> MapTaskWithValue<K, V> taskFor(K key, V value) {
|
||||||
return new MapTaskWithValue<K, V>(value) {
|
return new MapTaskWithValue<K, V>(value) {
|
||||||
@Override public void execute(MapStorage<K, V> map) { map.put(key, getValue()); }
|
@Override public void execute(MapStorage<K, V> map) { map.update(key, getValue()); }
|
||||||
@Override public MapOperation getOperation() { return UPDATE; }
|
@Override public MapOperation getOperation() { return UPDATE; }
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -53,7 +53,7 @@ public class MapKeycloakTransaction<K, V> implements KeycloakTransaction {
|
||||||
@Override
|
@Override
|
||||||
protected <K, V> MapTaskWithValue<K, V> taskFor(K key, V value) {
|
protected <K, V> MapTaskWithValue<K, V> taskFor(K key, V value) {
|
||||||
return new MapTaskWithValue<K, V>(null) {
|
return new MapTaskWithValue<K, V>(null) {
|
||||||
@Override public void execute(MapStorage<K, V> map) { map.remove(key); }
|
@Override public void execute(MapStorage<K, V> map) { map.delete(key); }
|
||||||
@Override public MapOperation getOperation() { return DELETE; }
|
@Override public MapOperation getOperation() { return DELETE; }
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -115,14 +115,14 @@ public class MapKeycloakTransaction<K, V> implements KeycloakTransaction {
|
||||||
* Adds a given task if not exists for the given key
|
* Adds a given task if not exists for the given key
|
||||||
*/
|
*/
|
||||||
private void addTask(MapOperation op, K key, V value) {
|
private void addTask(MapOperation op, K key, V value) {
|
||||||
log.tracef("Adding operation %s for %s @ %08x", op, key, System.identityHashCode(value));
|
log.tracef("Adding operation %s for %s @ %08x", op, key, System.identityHashCode(value));
|
||||||
|
|
||||||
K taskKey = key;
|
K taskKey = key;
|
||||||
tasks.merge(taskKey, op.taskFor(key, value), MapTaskCompose::new);
|
tasks.merge(taskKey, op.taskFor(key, value), MapTaskCompose::new);
|
||||||
}
|
}
|
||||||
|
|
||||||
// This is for possibility to lookup for session by id, which was created in this transaction
|
// This is for possibility to lookup for session by id, which was created in this transaction
|
||||||
public V get(K key, Function<K, V> defaultValueFunc) {
|
public V read(K key, Function<K, V> defaultValueFunc) {
|
||||||
MapTaskWithValue<K, V> current = tasks.get(key);
|
MapTaskWithValue<K, V> current = tasks.get(key);
|
||||||
if (current != null) {
|
if (current != null) {
|
||||||
return current.getValue();
|
return current.getValue();
|
||||||
|
@ -140,23 +140,23 @@ public class MapKeycloakTransaction<K, V> implements KeycloakTransaction {
|
||||||
return keyDefaultValue.getValue();
|
return keyDefaultValue.getValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void put(K key, V value) {
|
public void update(K key, V value) {
|
||||||
addTask(MapOperation.UPDATE, key, value);
|
addTask(MapOperation.UPDATE, key, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void putIfAbsent(K key, V value) {
|
public void create(K key, V value) {
|
||||||
addTask(MapOperation.CREATE, key, value);
|
addTask(MapOperation.CREATE, key, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void putIfChanged(K key, V value, Predicate<V> shouldPut) {
|
public void updateIfChanged(K key, V value, Predicate<V> shouldPut) {
|
||||||
log.tracef("Adding operation UPDATE_IF_CHANGED for %s @ %08x", key, System.identityHashCode(value));
|
log.tracef("Adding operation UPDATE_IF_CHANGED for %s @ %08x", key, System.identityHashCode(value));
|
||||||
|
|
||||||
K taskKey = key;
|
K taskKey = key;
|
||||||
MapTaskWithValue<K, V> op = new MapTaskWithValue<K, V>(value) {
|
MapTaskWithValue<K, V> op = new MapTaskWithValue<K, V>(value) {
|
||||||
@Override
|
@Override
|
||||||
public void execute(MapStorage<K, V> map) {
|
public void execute(MapStorage<K, V> map) {
|
||||||
if (shouldPut.test(getValue())) {
|
if (shouldPut.test(getValue())) {
|
||||||
map.put(key, getValue());
|
map.update(key, getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@Override public MapOperation getOperation() { return MapOperation.UPDATE; }
|
@Override public MapOperation getOperation() { return MapOperation.UPDATE; }
|
||||||
|
@ -164,7 +164,7 @@ public class MapKeycloakTransaction<K, V> implements KeycloakTransaction {
|
||||||
tasks.merge(taskKey, op, MapKeycloakTransaction::merge);
|
tasks.merge(taskKey, op, MapKeycloakTransaction::merge);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void remove(K key) {
|
public void delete(K key) {
|
||||||
addTask(MapOperation.DELETE, key, null);
|
addTask(MapOperation.DELETE, key, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,7 @@ package org.keycloak.models.map.storage;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -25,14 +26,60 @@ import java.util.Set;
|
||||||
*/
|
*/
|
||||||
public interface MapStorage<K, V> {
|
public interface MapStorage<K, V> {
|
||||||
|
|
||||||
V get(K key);
|
/**
|
||||||
|
* Creates an object in the store identified by given {@code key}.
|
||||||
|
* @param key Key of the object as seen in the logical level
|
||||||
|
* @param value Entity
|
||||||
|
* @return Reference to the entity created in the store
|
||||||
|
* @throws NullPointerException if object or its {@code id} is {@code null}
|
||||||
|
*/
|
||||||
|
V create(K key, V value);
|
||||||
|
|
||||||
V put(K key, V value);
|
/**
|
||||||
|
* Returns object with the given {@code key} from the storage or {@code null} if object does not exist.
|
||||||
|
* @param key Must not be {@code null}.
|
||||||
|
* @return See description
|
||||||
|
*/
|
||||||
|
V read(K key);
|
||||||
|
|
||||||
V putIfAbsent(K key, V value);
|
/**
|
||||||
|
* Returns stream of objects satisfying given {@code criteria} from the storage.
|
||||||
|
* The criteria are specified in the given criteria builder based on model properties.
|
||||||
|
*
|
||||||
|
* @param criteria
|
||||||
|
* @return Stream of objects. Never returns {@code null}.
|
||||||
|
*/
|
||||||
|
Stream<V> read(ModelCriteriaBuilder criteria);
|
||||||
|
|
||||||
V remove(K key);
|
/**
|
||||||
|
* Updates the object with the given {@code id} in the storage if it already exists.
|
||||||
|
* @param id
|
||||||
|
* @throws NullPointerException if object or its {@code id} is {@code null}
|
||||||
|
*/
|
||||||
|
V update(K key, V value);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes object with the given {@code key} from the storage, if exists, no-op otherwise.
|
||||||
|
* @param key
|
||||||
|
*/
|
||||||
|
V delete(K key);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns criteria builder for the storage engine.
|
||||||
|
* The criteria are specified in the given criteria builder based on model properties.
|
||||||
|
* <br>
|
||||||
|
* <b>Note:</b> While the criteria are formulated in terms of model properties,
|
||||||
|
* the storage engine may in turn process them into the best form that suits the
|
||||||
|
* underlying storage engine query language, e.g. to conditions on storage
|
||||||
|
* attributes or REST query parameters.
|
||||||
|
* If possible, do <i>not</i> delay filtering after the models are reconstructed from
|
||||||
|
* storage entities, in most cases this would be highly inefficient.
|
||||||
|
*
|
||||||
|
* @return See description
|
||||||
|
*/
|
||||||
|
ModelCriteriaBuilder createCriteriaBuilder();
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
Set<Map.Entry<K,V>> entrySet();
|
Set<Map.Entry<K,V>> entrySet();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,176 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2021 Red Hat, Inc. and/or its affiliates
|
||||||
|
* and other contributors as indicated by the @author tags.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.keycloak.models.map.storage;
|
||||||
|
|
||||||
|
import org.keycloak.storage.SearchableModelField;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builder for criteria that can be used to limit results obtained from the store.
|
||||||
|
* This class is used for similar purpose as e.g. JPA's {@code CriteriaBuilder},
|
||||||
|
* however it is much simpler version as it is tailored to very specific needs
|
||||||
|
* of future Keycloak store.
|
||||||
|
* <p>
|
||||||
|
* Implementations are expected to be immutable. The expected use is like this:
|
||||||
|
* <pre>
|
||||||
|
* cb = storage.getCriteriaBuilder();
|
||||||
|
* storage.read(
|
||||||
|
* cb.or(
|
||||||
|
* cb.compare(FIELD1, EQ, 1).compare(FIELD2, EQ, 2),
|
||||||
|
* cb.compare(FIELD1, EQ, 3).compare(FIELD2, EQ, 4)
|
||||||
|
* )
|
||||||
|
* );
|
||||||
|
* </pre>
|
||||||
|
* The above code should read items where
|
||||||
|
* {@code (FIELD1 == 1 && FIELD2 == 2) || (FIELD1 == 3 && FIELD2 == 4)}.
|
||||||
|
*
|
||||||
|
* <p>
|
||||||
|
* It is equivalent to this:
|
||||||
|
* <pre>
|
||||||
|
* cb = storage.getCriteriaBuilder();
|
||||||
|
* storage.read(
|
||||||
|
* cb.or(
|
||||||
|
* cb.and(cb.compare(FIELD1, EQ, 1), cb.compare(FIELD2, EQ, 2)),
|
||||||
|
* cb.and(cb.compare(FIELD1, EQ, 3), cb.compare(FIELD2, EQ, 4))
|
||||||
|
* )
|
||||||
|
* );
|
||||||
|
* </pre>
|
||||||
|
*
|
||||||
|
* @author hmlnarik
|
||||||
|
*/
|
||||||
|
public interface ModelCriteriaBuilder {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The operators are very basic ones for this use case. In the real scenario,
|
||||||
|
* new operators can be added, possibly with different arity, e.g. {@code IN}.
|
||||||
|
* The {@link ModelCriteriaBuilder#compare} method would need an adjustment
|
||||||
|
* then, likely to taky vararg {@code value} instead of single value as it
|
||||||
|
* is now.
|
||||||
|
*/
|
||||||
|
public enum Operator {
|
||||||
|
/** Equals to */
|
||||||
|
EQ,
|
||||||
|
/** Not equals to */
|
||||||
|
NE,
|
||||||
|
/** Less than */
|
||||||
|
LT,
|
||||||
|
/** Less than or equal */
|
||||||
|
LE,
|
||||||
|
/** Greater than */
|
||||||
|
GT,
|
||||||
|
/** Greater than or equal */
|
||||||
|
GE,
|
||||||
|
/** Similar to SQL case-sensitive LIKE Whole string is matched.
|
||||||
|
* Percent sign means "any characters", question mark means "any single character":
|
||||||
|
* <ul>
|
||||||
|
* <li>{@code field LIKE "abc"} means <i>value of the field {@code field} must match exactly {@code abc}</i></li>
|
||||||
|
* <li>{@code field LIKE "abc%"} means <i>value of the field {@code field} must start with {@code abc}</i></li>
|
||||||
|
* <li>{@code field LIKE "%abc"} means <i>value of the field {@code field} must end with {@code abc}</i></li>
|
||||||
|
* <li>{@code field LIKE "%abc%"} means <i>value of the field {@code field} must contain {@code abc}</i></li>
|
||||||
|
* </ul>
|
||||||
|
*/
|
||||||
|
LIKE,
|
||||||
|
/**
|
||||||
|
* Similar to SQL case-insensitive LIKE. Whole string is matched.
|
||||||
|
* Percent sign means "any characters", question mark means "any single character":
|
||||||
|
* <ul>
|
||||||
|
* <li>{@code field ILIKE "abc"} means <i>value of the field {@code field} must match exactly {@code abc}, {@code ABC}, {@code aBc} etc.</i></li>
|
||||||
|
* <li>{@code field ILIKE "abc%"} means <i>value of the field {@code field} must start with {@code abc}, {@code ABC}, {@code aBc} etc.</i></li>
|
||||||
|
* <li>{@code field ILIKE "%abc"} means <i>value of the field {@code field} must end with {@code abc}, {@code ABC}, {@code aBc} etc.</i></li>
|
||||||
|
* <li>{@code field ILIKE "%abc%"} means <i>value of the field {@code field} must contain {@code abc}, {@code ABC}, {@code aBc} etc.</i></li>
|
||||||
|
* </ul>
|
||||||
|
*/
|
||||||
|
ILIKE,
|
||||||
|
/** Operator for belonging into a set of values */
|
||||||
|
IN
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a constraint for the given model field to this criteria builder
|
||||||
|
* and returns a criteria builder that is combined with the the new constraint.
|
||||||
|
* The resulting constraint is a logical conjunction (i.e. AND) of the original
|
||||||
|
* constraint present in this {@link ModelCriteriaBuilder} and the given operator.
|
||||||
|
*
|
||||||
|
* @param modelField Field on the logical <i>model</i> to be constrained
|
||||||
|
* @param op Operator
|
||||||
|
* @param value Additional operands of the operator.
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
ModelCriteriaBuilder compare(SearchableModelField modelField, Operator op, Object... value);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates and returns a new instance of {@code ModelCriteriaBuilder} that
|
||||||
|
* combines the given builders with the Boolean AND operator.
|
||||||
|
* <p>
|
||||||
|
* Predicate coming out of {@code and} on an empty array of {@code builders}
|
||||||
|
* (i.e. empty conjunction) is always {@code true}.
|
||||||
|
*
|
||||||
|
* <pre>
|
||||||
|
* cb = storage.getCriteriaBuilder();
|
||||||
|
* storage.read(cb.or(
|
||||||
|
* cb.and(cb.compare(FIELD1, EQ, 1), cb.compare(FIELD2, EQ, 2)),
|
||||||
|
* cb.and(cb.compare(FIELD1, EQ, 3), cb.compare(FIELD2, EQ, 4))
|
||||||
|
* );
|
||||||
|
* </pre>
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
ModelCriteriaBuilder and(ModelCriteriaBuilder... builders);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates and returns a new instance of {@code ModelCriteriaBuilder} that
|
||||||
|
* combines the given builders with the Boolean OR operator.
|
||||||
|
* <p>
|
||||||
|
* Predicate coming out of {@code and} on an empty array of {@code builders}
|
||||||
|
* (i.e. empty disjunction) is always {@code false}.
|
||||||
|
*
|
||||||
|
* <pre>
|
||||||
|
* cb = storage.getCriteriaBuilder();
|
||||||
|
* storage.read(cb.or(
|
||||||
|
* cb.compare(FIELD1, EQ, 1).compare(FIELD2, EQ, 2),
|
||||||
|
* cb.compare(FIELD1, EQ, 3).compare(FIELD2, EQ, 4)
|
||||||
|
* );
|
||||||
|
* </pre>
|
||||||
|
*/
|
||||||
|
ModelCriteriaBuilder or(ModelCriteriaBuilder... builders);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates and returns a new instance of {@code ModelCriteriaBuilder} that
|
||||||
|
* negates the given builder.
|
||||||
|
* <p>
|
||||||
|
* Note that if the {@code builder} has no condition yet, there is nothing
|
||||||
|
* to negate: empty negation is always {@code true}.
|
||||||
|
*
|
||||||
|
* @param builder
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
ModelCriteriaBuilder not(ModelCriteriaBuilder builder);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns this object cast to the given class.
|
||||||
|
* @param <T>
|
||||||
|
* @param clazz
|
||||||
|
* @return
|
||||||
|
* @throws ClassCastException When this instance cannot be converted to the given {@code clazz}.
|
||||||
|
*/
|
||||||
|
default <T extends ModelCriteriaBuilder> T unwrap(Class<T> clazz) {
|
||||||
|
if (clazz.isInstance(this)) {
|
||||||
|
return clazz.cast(this);
|
||||||
|
} else {
|
||||||
|
throw new ClassCastException("Incompatible class: " + clazz);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,247 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2020 Red Hat, Inc. and/or its affiliates
|
||||||
|
* and other contributors as indicated by the @author tags.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.keycloak.models.map.storage;
|
||||||
|
|
||||||
|
import org.keycloak.Config.Scope;
|
||||||
|
import org.keycloak.models.KeycloakSession;
|
||||||
|
import org.keycloak.models.KeycloakSessionFactory;
|
||||||
|
import org.keycloak.models.map.common.AbstractEntity;
|
||||||
|
import org.keycloak.models.map.common.Serialization;
|
||||||
|
import org.keycloak.storage.SearchableModelField;
|
||||||
|
import com.fasterxml.jackson.databind.JavaType;
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.util.EnumSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Map.Entry;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.concurrent.ConcurrentMap;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
import org.jboss.logging.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @author hmlnarik
|
||||||
|
*/
|
||||||
|
public class ConcurrentHashMapStorageProvider implements MapStorageProvider {
|
||||||
|
|
||||||
|
public static class ConcurrentHashMapStorage<K, V> implements MapStorage<K, V> {
|
||||||
|
|
||||||
|
private final ConcurrentMap<K, V> store = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public V create(K key, V value) {
|
||||||
|
return store.putIfAbsent(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public V read(K key) {
|
||||||
|
return store.get(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public V update(K key, V value) {
|
||||||
|
return store.replace(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public V delete(K key) {
|
||||||
|
return store.remove(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ModelCriteriaBuilder createCriteriaBuilder() {
|
||||||
|
return new MapModelCriteriaBuilder(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<Entry<K, V>> entrySet() {
|
||||||
|
return store.entrySet();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Stream<V> read(ModelCriteriaBuilder criteria) {
|
||||||
|
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class MapModelCriteriaBuilder<M> implements ModelCriteriaBuilder {
|
||||||
|
|
||||||
|
@FunctionalInterface
|
||||||
|
public interface TriConsumer<A extends MapModelCriteriaBuilder<?>,B,C> { A apply(A a, B b, C c); }
|
||||||
|
|
||||||
|
private static final Predicate<Object> ALWAYS_TRUE = e -> true;
|
||||||
|
private static final Predicate<Object> ALWAYS_FALSE = e -> false;
|
||||||
|
|
||||||
|
private final Predicate<? super String> indexFilter;
|
||||||
|
private final Predicate<? super M> modelFilter;
|
||||||
|
|
||||||
|
private final Map<String, TriConsumer<MapModelCriteriaBuilder<M>, Operator, Object>> fieldPredicates;
|
||||||
|
|
||||||
|
public MapModelCriteriaBuilder(Map<String, TriConsumer<MapModelCriteriaBuilder<M>, Operator, Object>> fieldPredicates) {
|
||||||
|
this(fieldPredicates, ALWAYS_TRUE, ALWAYS_TRUE);
|
||||||
|
}
|
||||||
|
|
||||||
|
private MapModelCriteriaBuilder(Map<String, TriConsumer<MapModelCriteriaBuilder<M>, Operator, Object>> fieldPredicates,
|
||||||
|
Predicate<? super String> indexReadFilter, Predicate<? super M> sequentialReadFilter) {
|
||||||
|
this.fieldPredicates = fieldPredicates;
|
||||||
|
this.indexFilter = indexReadFilter;
|
||||||
|
this.modelFilter = sequentialReadFilter;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ModelCriteriaBuilder compare(SearchableModelField modelField, Operator op, Object... value) {
|
||||||
|
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public MapModelCriteriaBuilder<M> and(ModelCriteriaBuilder... builders) {
|
||||||
|
Predicate<? super String> resIndexFilter = Stream.of(builders)
|
||||||
|
.map(MapModelCriteriaBuilder.class::cast)
|
||||||
|
.map(MapModelCriteriaBuilder::getIndexFilter)
|
||||||
|
.reduce(ALWAYS_TRUE, (p1, p2) -> p1.and(p2));
|
||||||
|
Predicate<? super M> resModelFilter = Stream.of(builders)
|
||||||
|
.map(MapModelCriteriaBuilder.class::cast)
|
||||||
|
.map(MapModelCriteriaBuilder::getModelFilter)
|
||||||
|
.reduce(ALWAYS_TRUE, (p1, p2) -> p1.and(p2));
|
||||||
|
return new MapModelCriteriaBuilder<>(fieldPredicates, resIndexFilter, resModelFilter);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public MapModelCriteriaBuilder<M> or(ModelCriteriaBuilder... builders) {
|
||||||
|
Predicate<? super String> resIndexFilter = Stream.of(builders)
|
||||||
|
.map(MapModelCriteriaBuilder.class::cast)
|
||||||
|
.map(MapModelCriteriaBuilder::getIndexFilter)
|
||||||
|
.reduce(ALWAYS_FALSE, (p1, p2) -> p1.or(p2));
|
||||||
|
Predicate<? super M> resModelFilter = Stream.of(builders)
|
||||||
|
.map(MapModelCriteriaBuilder.class::cast)
|
||||||
|
.map(MapModelCriteriaBuilder::getModelFilter)
|
||||||
|
.reduce(ALWAYS_FALSE, (p1, p2) -> p1.or(p2));
|
||||||
|
return new MapModelCriteriaBuilder<>(fieldPredicates, resIndexFilter, resModelFilter);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public MapModelCriteriaBuilder<M> not(ModelCriteriaBuilder builder) {
|
||||||
|
MapModelCriteriaBuilder b = builder.unwrap(MapModelCriteriaBuilder.class);
|
||||||
|
Predicate<? super String> resIndexFilter = b.getIndexFilter() == ALWAYS_TRUE ? ALWAYS_TRUE : b.getIndexFilter().negate();
|
||||||
|
Predicate<? super M> resModelFilter = b.getModelFilter() == ALWAYS_TRUE ? ALWAYS_TRUE : b.getModelFilter().negate();
|
||||||
|
return new MapModelCriteriaBuilder<>(fieldPredicates, resIndexFilter, resModelFilter);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Predicate<? super String> getIndexFilter() {
|
||||||
|
return indexFilter;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Predicate<? super M> getModelFilter() {
|
||||||
|
return modelFilter;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final String PROVIDER_ID = "concurrenthashmap";
|
||||||
|
|
||||||
|
private static final Logger LOG = Logger.getLogger(ConcurrentHashMapStorageProvider.class);
|
||||||
|
|
||||||
|
private final ConcurrentHashMap<String, ConcurrentHashMapStorage<?,?>> storages = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
private File storageDirectory;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public MapStorageProvider create(KeycloakSession session) {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void init(Scope config) {
|
||||||
|
File f = new File(config.get("dir"));
|
||||||
|
try {
|
||||||
|
this.storageDirectory = f.exists()
|
||||||
|
? f
|
||||||
|
: Files.createTempDirectory("storage-map-chm-").toFile();
|
||||||
|
} catch (IOException ex) {
|
||||||
|
this.storageDirectory = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void postInit(KeycloakSessionFactory factory) {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
storages.forEach(this::storeMap);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void storeMap(String fileName, ConcurrentHashMapStorage<?, ?> store) {
|
||||||
|
if (fileName != null) {
|
||||||
|
File f = getFile(fileName);
|
||||||
|
try {
|
||||||
|
if (storageDirectory != null && storageDirectory.exists()) {
|
||||||
|
LOG.debugf("Storing contents to %s", f.getCanonicalPath());
|
||||||
|
Serialization.MAPPER.writeValue(f, store.entrySet().stream().map(Map.Entry::getValue));
|
||||||
|
} else {
|
||||||
|
LOG.debugf("Not storing contents of %s because directory %s does not exist", fileName, this.storageDirectory);
|
||||||
|
}
|
||||||
|
} catch (IOException ex) {
|
||||||
|
throw new RuntimeException(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private <K, V extends AbstractEntity<K>> ConcurrentHashMapStorage<K, V> loadMap(String fileName, Class<V> valueType, EnumSet<Flag> flags) {
|
||||||
|
ConcurrentHashMapStorage<K, V> store = new ConcurrentHashMapStorage<>();
|
||||||
|
|
||||||
|
if (! flags.contains(Flag.INITIALIZE_EMPTY)) {
|
||||||
|
final File f = getFile(fileName);
|
||||||
|
if (f != null && f.exists()) {
|
||||||
|
try {
|
||||||
|
LOG.debugf("Restoring contents from %s", f.getCanonicalPath());
|
||||||
|
JavaType type = Serialization.MAPPER.getTypeFactory().constructCollectionType(List.class, valueType);
|
||||||
|
|
||||||
|
List<V> values = Serialization.MAPPER.readValue(f, type);
|
||||||
|
values.forEach((V mce) -> store.create(mce.getId(), mce));
|
||||||
|
} catch (IOException ex) {
|
||||||
|
throw new RuntimeException(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return store;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getId() {
|
||||||
|
return PROVIDER_ID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public <K, V extends AbstractEntity<K>> ConcurrentHashMapStorage<K, V> getStorage(String name, Class<K> keyType, Class<V> valueType, Flag... flags) {
|
||||||
|
EnumSet<Flag> f = flags == null || flags.length == 0 ? EnumSet.noneOf(Flag.class) : EnumSet.of(flags[0], flags);
|
||||||
|
return (ConcurrentHashMapStorage<K, V>) storages.computeIfAbsent(name, n -> loadMap(name, valueType, f));
|
||||||
|
}
|
||||||
|
|
||||||
|
private File getFile(String fileName) {
|
||||||
|
return storageDirectory == null
|
||||||
|
? null
|
||||||
|
: new File(storageDirectory, "map-" + fileName + ".json");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -86,8 +86,8 @@ public class MapUserProvider implements UserProvider.Streams, UserCredentialStor
|
||||||
}
|
}
|
||||||
|
|
||||||
private MapUserEntity registerEntityForChanges(MapUserEntity origEntity) {
|
private MapUserEntity registerEntityForChanges(MapUserEntity origEntity) {
|
||||||
MapUserEntity res = tx.get(origEntity.getId(), id -> Serialization.from(origEntity));
|
MapUserEntity res = tx.read(origEntity.getId(), id -> Serialization.from(origEntity));
|
||||||
tx.putIfChanged(origEntity.getId(), res, MapUserEntity::isUpdated);
|
tx.updateIfChanged(origEntity.getId(), res, MapUserEntity::isUpdated);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,7 +134,7 @@ public class MapUserProvider implements UserProvider.Streams, UserCredentialStor
|
||||||
}
|
}
|
||||||
|
|
||||||
private Optional<MapUserEntity> getEntityById(RealmModel realm, UUID id) {
|
private Optional<MapUserEntity> getEntityById(RealmModel realm, UUID id) {
|
||||||
MapUserEntity mapUserEntity = tx.get(id, userStore::get);
|
MapUserEntity mapUserEntity = tx.read(id, userStore::read);
|
||||||
if (mapUserEntity != null && entityRealmFilter(realm).test(mapUserEntity)) {
|
if (mapUserEntity != null && entityRealmFilter(realm).test(mapUserEntity)) {
|
||||||
return Optional.of(mapUserEntity);
|
return Optional.of(mapUserEntity);
|
||||||
}
|
}
|
||||||
|
@ -148,7 +148,7 @@ public class MapUserProvider implements UserProvider.Streams, UserCredentialStor
|
||||||
|
|
||||||
private Stream<MapUserEntity> getNotRemovedUpdatedUsersStream() {
|
private Stream<MapUserEntity> getNotRemovedUpdatedUsersStream() {
|
||||||
Stream<MapUserEntity> updatedAndNotRemovedUsersStream = userStore.entrySet().stream()
|
Stream<MapUserEntity> updatedAndNotRemovedUsersStream = userStore.entrySet().stream()
|
||||||
.map(tx::getUpdated) // If the group has been removed, tx.get will return null, otherwise it will return me.getValue()
|
.map(tx::getUpdated) // If the group has been removed, tx.read will return null, otherwise it will return me.getValue()
|
||||||
.filter(Objects::nonNull);
|
.filter(Objects::nonNull);
|
||||||
return Stream.concat(tx.createdValuesStream(), updatedAndNotRemovedUsersStream);
|
return Stream.concat(tx.createdValuesStream(), updatedAndNotRemovedUsersStream);
|
||||||
}
|
}
|
||||||
|
@ -328,7 +328,7 @@ public class MapUserProvider implements UserProvider.Streams, UserCredentialStor
|
||||||
|
|
||||||
final UUID entityId = id == null ? UUID.randomUUID() : UUID.fromString(id);
|
final UUID entityId = id == null ? UUID.randomUUID() : UUID.fromString(id);
|
||||||
|
|
||||||
if (tx.get(entityId, userStore::get) != null) {
|
if (tx.read(entityId, userStore::read) != null) {
|
||||||
throw new ModelDuplicateException("User exists: " + entityId);
|
throw new ModelDuplicateException("User exists: " + entityId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -336,7 +336,7 @@ public class MapUserProvider implements UserProvider.Streams, UserCredentialStor
|
||||||
entity.setUsername(username.toLowerCase());
|
entity.setUsername(username.toLowerCase());
|
||||||
entity.setCreatedTimestamp(Time.currentTimeMillis());
|
entity.setCreatedTimestamp(Time.currentTimeMillis());
|
||||||
|
|
||||||
tx.putIfAbsent(entityId, entity);
|
tx.create(entityId, entity);
|
||||||
final UserModel userModel = entityToAdapterFunc(realm).apply(entity);
|
final UserModel userModel = entityToAdapterFunc(realm).apply(entity);
|
||||||
|
|
||||||
if (addDefaultRoles) {
|
if (addDefaultRoles) {
|
||||||
|
@ -362,7 +362,7 @@ public class MapUserProvider implements UserProvider.Streams, UserCredentialStor
|
||||||
LOG.tracef("preRemove[RealmModel](%s)%s", realm, getShortStackTrace());
|
LOG.tracef("preRemove[RealmModel](%s)%s", realm, getShortStackTrace());
|
||||||
getUnsortedUserEntitiesStream(realm)
|
getUnsortedUserEntitiesStream(realm)
|
||||||
.map(MapUserEntity::getId)
|
.map(MapUserEntity::getId)
|
||||||
.forEach(tx::remove);
|
.forEach(tx::delete);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -371,7 +371,7 @@ public class MapUserProvider implements UserProvider.Streams, UserCredentialStor
|
||||||
getUnsortedUserEntitiesStream(realm)
|
getUnsortedUserEntitiesStream(realm)
|
||||||
.filter(userEntity -> Objects.equals(userEntity.getFederationLink(), storageProviderId))
|
.filter(userEntity -> Objects.equals(userEntity.getFederationLink(), storageProviderId))
|
||||||
.map(MapUserEntity::getId)
|
.map(MapUserEntity::getId)
|
||||||
.forEach(tx::remove);
|
.forEach(tx::delete);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -712,7 +712,7 @@ public class MapUserProvider implements UserProvider.Streams, UserCredentialStor
|
||||||
String userId = user.getId();
|
String userId = user.getId();
|
||||||
Optional<MapUserEntity> userById = getEntityById(realm, userId);
|
Optional<MapUserEntity> userById = getEntityById(realm, userId);
|
||||||
if (userById.isPresent()) {
|
if (userById.isPresent()) {
|
||||||
tx.remove(UUID.fromString(userId));
|
tx.delete(UUID.fromString(userId));
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,4 +15,4 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
org.keycloak.models.map.storage.ConcurrentHashMapStorageProvider
|
org.keycloak.models.map.storage.chm.ConcurrentHashMapStorageProvider
|
||||||
|
|
|
@ -0,0 +1,40 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2021 Red Hat, Inc. and/or its affiliates
|
||||||
|
* and other contributors as indicated by the @author tags.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.keycloak.storage;
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @author hmlnarik
|
||||||
|
*/
|
||||||
|
public class SearchableModelField<M> {
|
||||||
|
|
||||||
|
private final String name;
|
||||||
|
private final Class<?> fieldClass;
|
||||||
|
|
||||||
|
public SearchableModelField(String name, Class<?> fieldClass) {
|
||||||
|
this.name = name;
|
||||||
|
this.fieldClass = fieldClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return this.name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Class<?> getFieldType() {
|
||||||
|
return fieldClass;
|
||||||
|
}
|
||||||
|
}
|
|
@ -20,7 +20,7 @@ import org.keycloak.testsuite.model.KeycloakModelParameters;
|
||||||
import org.keycloak.models.map.client.MapClientProviderFactory;
|
import org.keycloak.models.map.client.MapClientProviderFactory;
|
||||||
import org.keycloak.models.map.group.MapGroupProviderFactory;
|
import org.keycloak.models.map.group.MapGroupProviderFactory;
|
||||||
import org.keycloak.models.map.role.MapRoleProviderFactory;
|
import org.keycloak.models.map.role.MapRoleProviderFactory;
|
||||||
import org.keycloak.models.map.storage.ConcurrentHashMapStorageProvider;
|
import org.keycloak.models.map.storage.chm.ConcurrentHashMapStorageProvider;
|
||||||
import org.keycloak.models.map.storage.MapStorageProvider;
|
import org.keycloak.models.map.storage.MapStorageProvider;
|
||||||
import org.keycloak.models.map.storage.MapStorageSpi;
|
import org.keycloak.models.map.storage.MapStorageSpi;
|
||||||
import org.keycloak.provider.ProviderFactory;
|
import org.keycloak.provider.ProviderFactory;
|
||||||
|
|
Loading…
Reference in a new issue